summaryrefslogtreecommitdiff
path: root/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
diff options
context:
space:
mode:
authorLorry Tar Creator <lorry-tar-importer@lorry>2017-06-27 06:07:23 +0000
committerLorry Tar Creator <lorry-tar-importer@lorry>2017-06-27 06:07:23 +0000
commit1bf1084f2b10c3b47fd1a588d85d21ed0eb41d0c (patch)
tree46dcd36c86e7fbc6e5df36deb463b33e9967a6f7 /Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
parent32761a6cee1d0dee366b885b7b9c777e67885688 (diff)
downloadWebKitGtk-tarball-master.tar.gz
Diffstat (limited to 'Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp')
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp1511
1 files changed, 864 insertions, 647 deletions
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
index 165c5a06a..cadf905ed 100644
--- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
@@ -3,7 +3,9 @@
* Copyright (C) 2007 Collabora Ltd. All rights reserved.
* Copyright (C) 2007 Alp Toker <alp@atoker.com>
* Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
- * Copyright (C) 2009, 2010, 2011, 2012, 2013 Igalia S.L
+ * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2015, 2016 Igalia S.L
+ * Copyright (C) 2014 Cable Television Laboratories, Inc.
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@@ -26,18 +28,24 @@
#if ENABLE(VIDEO) && USE(GSTREAMER)
+#include "FileSystem.h"
#include "GStreamerUtilities.h"
#include "URL.h"
#include "MIMETypeRegistry.h"
#include "MediaPlayer.h"
+#include "MediaPlayerRequestInstallMissingPluginsCallback.h"
#include "NotImplemented.h"
#include "SecurityOrigin.h"
#include "TimeRanges.h"
#include "WebKitWebSourceGStreamer.h"
+#include <glib.h>
#include <gst/gst.h>
#include <gst/pbutils/missing-plugins.h>
#include <limits>
-#include <wtf/gobject/GUniquePtr.h>
+#include <wtf/HexNumber.h>
+#include <wtf/MediaTime.h>
+#include <wtf/NeverDestroyed.h>
+#include <wtf/glib/GUniquePtr.h>
#include <wtf/text/CString.h>
#if ENABLE(VIDEO_TRACK)
@@ -49,6 +57,11 @@
#include "VideoTrackPrivateGStreamer.h"
#endif
+#if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
+#define GST_USE_UNSTABLE_API
+#include <gst/mpegts/mpegts.h>
+#undef GST_USE_UNSTABLE_API
+#endif
#include <gst/audio/streamvolume.h>
#if ENABLE(MEDIA_SOURCE)
@@ -56,24 +69,9 @@
#include "WebKitMediaSourceGStreamer.h"
#endif
-// GstPlayFlags flags from playbin2. It is the policy of GStreamer to
-// not publicly expose element-specific enums. That's why this
-// GstPlayFlags enum has been copied here.
-typedef enum {
- GST_PLAY_FLAG_VIDEO = 0x00000001,
- GST_PLAY_FLAG_AUDIO = 0x00000002,
- GST_PLAY_FLAG_TEXT = 0x00000004,
- GST_PLAY_FLAG_VIS = 0x00000008,
- GST_PLAY_FLAG_SOFT_VOLUME = 0x00000010,
- GST_PLAY_FLAG_NATIVE_AUDIO = 0x00000020,
- GST_PLAY_FLAG_NATIVE_VIDEO = 0x00000040,
- GST_PLAY_FLAG_DOWNLOAD = 0x00000080,
- GST_PLAY_FLAG_BUFFERING = 0x000000100
-} GstPlayFlags;
-
-// Max interval in seconds to stay in the READY state on manual
-// state change requests.
-static const guint gReadyStateTimerInterval = 60;
+#if ENABLE(WEB_AUDIO)
+#include "AudioSourceProviderGStreamer.h"
+#endif
GST_DEBUG_CATEGORY_EXTERN(webkit_media_player_debug);
#define GST_CAT_DEFAULT webkit_media_player_debug
@@ -82,127 +80,34 @@ using namespace std;
namespace WebCore {
-static gboolean mediaPlayerPrivateMessageCallback(GstBus*, GstMessage* message, MediaPlayerPrivateGStreamer* player)
-{
- return player->handleMessage(message);
-}
-
-static void mediaPlayerPrivateSourceChangedCallback(GObject*, GParamSpec*, MediaPlayerPrivateGStreamer* player)
-{
- player->sourceChanged();
-}
-
-static void mediaPlayerPrivateVideoSinkCapsChangedCallback(GObject*, GParamSpec*, MediaPlayerPrivateGStreamer* player)
-{
- player->videoCapsChanged();
-}
-
-static void mediaPlayerPrivateVideoChangedCallback(GObject*, MediaPlayerPrivateGStreamer* player)
-{
- player->videoChanged();
-}
-
-static void mediaPlayerPrivateAudioChangedCallback(GObject*, MediaPlayerPrivateGStreamer* player)
+static void busMessageCallback(GstBus*, GstMessage* message, MediaPlayerPrivateGStreamer* player)
{
- player->audioChanged();
+ player->handleMessage(message);
}
-static gboolean mediaPlayerPrivateAudioChangeTimeoutCallback(MediaPlayerPrivateGStreamer* player)
-{
- // This is the callback of the timeout source created in ::audioChanged.
- player->notifyPlayerOfAudio();
- return FALSE;
-}
-
-static void setAudioStreamPropertiesCallback(GstChildProxy*, GObject* object, gchar*,
- MediaPlayerPrivateGStreamer* player)
+void MediaPlayerPrivateGStreamer::setAudioStreamPropertiesCallback(MediaPlayerPrivateGStreamer* player, GObject* object)
{
player->setAudioStreamProperties(object);
}
-static gboolean mediaPlayerPrivateVideoChangeTimeoutCallback(MediaPlayerPrivateGStreamer* player)
-{
- // This is the callback of the timeout source created in ::videoChanged.
- player->notifyPlayerOfVideo();
- return FALSE;
-}
-
-static gboolean mediaPlayerPrivateVideoCapsChangeTimeoutCallback(MediaPlayerPrivateGStreamer* player)
-{
- // This is the callback of the timeout source created in ::videoCapsChanged.
- player->notifyPlayerOfVideoCaps();
- return FALSE;
-}
-
-#if ENABLE(VIDEO_TRACK)
-static void mediaPlayerPrivateTextChangedCallback(GObject*, MediaPlayerPrivateGStreamer* player)
-{
- player->textChanged();
-}
-
-static gboolean mediaPlayerPrivateTextChangeTimeoutCallback(MediaPlayerPrivateGStreamer* player)
-{
- // This is the callback of the timeout source created in ::textChanged.
- player->notifyPlayerOfText();
- return FALSE;
-}
-
-static GstFlowReturn mediaPlayerPrivateNewTextSampleCallback(GObject*, MediaPlayerPrivateGStreamer* player)
-{
- player->newTextSample();
- return GST_FLOW_OK;
-}
-#endif
-
-static gboolean mediaPlayerPrivateReadyStateTimeoutCallback(MediaPlayerPrivateGStreamer* player)
-{
- // This is the callback of the timeout source created in ::changePipelineState.
- // Reset pipeline if we are sitting on READY state when timeout is reached
- player->changePipelineState(GST_STATE_NULL);
- return FALSE;
-}
-
-static void mediaPlayerPrivatePluginInstallerResultFunction(GstInstallPluginsReturn result, gpointer userData)
-{
- MediaPlayerPrivateGStreamer* player = reinterpret_cast<MediaPlayerPrivateGStreamer*>(userData);
- player->handlePluginInstallerResult(result);
-}
-
-static GstClockTime toGstClockTime(float time)
-{
- // Extract the integer part of the time (seconds) and the fractional part (microseconds). Attempt to
- // round the microseconds so no floating point precision is lost and we can perform an accurate seek.
- float seconds;
- float microSeconds = modf(time, &seconds) * 1000000;
- GTimeVal timeValue;
- timeValue.tv_sec = static_cast<glong>(seconds);
- timeValue.tv_usec = static_cast<glong>(roundf(microSeconds / 10000) * 10000);
- return GST_TIMEVAL_TO_TIME(timeValue);
-}
-
void MediaPlayerPrivateGStreamer::setAudioStreamProperties(GObject* object)
{
if (g_strcmp0(G_OBJECT_TYPE_NAME(object), "GstPulseSink"))
return;
- const char* role = m_player->mediaPlayerClient() && m_player->mediaPlayerClient()->mediaPlayerIsVideo()
- ? "video" : "music";
- GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, NULL);
- g_object_set(object, "stream-properties", structure, NULL);
+ const char* role = m_player->client().mediaPlayerIsVideo() ? "video" : "music";
+ GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, nullptr);
+ g_object_set(object, "stream-properties", structure, nullptr);
gst_structure_free(structure);
GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(object)));
- LOG_MEDIA_MESSAGE("Set media.role as %s at %s", role, elementName.get());
-}
-
-PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateGStreamer::create(MediaPlayer* player)
-{
- return adoptPtr(new MediaPlayerPrivateGStreamer(player));
+ GST_DEBUG("Set media.role as %s at %s", role, elementName.get());
}
void MediaPlayerPrivateGStreamer::registerMediaEngine(MediaEngineRegistrar registrar)
{
if (isAvailable())
- registrar(create, getSupportedTypes, supportsType, 0, 0, 0);
+ registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamer>(player); },
+ getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem);
}
bool initializeGStreamerAndRegisterWebKitElements()
@@ -210,17 +115,14 @@ bool initializeGStreamerAndRegisterWebKitElements()
if (!initializeGStreamer())
return false;
- GRefPtr<GstElementFactory> srcFactory = gst_element_factory_find("webkitwebsrc");
+ registerWebKitGStreamerElements();
+
+ GRefPtr<GstElementFactory> srcFactory = adoptGRef(gst_element_factory_find("webkitwebsrc"));
if (!srcFactory) {
GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player");
- gst_element_register(0, "webkitwebsrc", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_WEB_SRC);
+ gst_element_register(nullptr, "webkitwebsrc", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_WEB_SRC);
}
-#if ENABLE(MEDIA_SOURCE)
- GRefPtr<GstElementFactory> WebKitMediaSrcFactory = gst_element_factory_find("webkitmediasrc");
- if (!WebKitMediaSrcFactory)
- gst_element_register(0, "webkitmediasrc", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_MEDIA_SRC);
-#endif
return true;
}
@@ -229,52 +131,50 @@ bool MediaPlayerPrivateGStreamer::isAvailable()
if (!initializeGStreamerAndRegisterWebKitElements())
return false;
- GRefPtr<GstElementFactory> factory = gst_element_factory_find("playbin");
+ GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
return factory;
}
MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player)
: MediaPlayerPrivateGStreamerBase(player)
- , m_source(0)
- , m_seekTime(0)
+ , m_buffering(false)
+ , m_bufferingPercentage(0)
+ , m_canFallBackToLastFinishedSeekPosition(false)
, m_changingRate(false)
- , m_endTime(numeric_limits<float>::infinity())
+ , m_downloadFinished(false)
+ , m_errorOccured(false)
, m_isEndReached(false)
, m_isStreaming(false)
- , m_mediaLocations(0)
- , m_mediaLocationCurrentIndex(0)
- , m_resetPipeline(false)
+ , m_durationAtEOS(0)
, m_paused(true)
- , m_playbackRatePause(false)
+ , m_playbackRate(1)
+ , m_requestedState(GST_STATE_VOID_PENDING)
+ , m_resetPipeline(false)
, m_seeking(false)
, m_seekIsPending(false)
+ , m_seekTime(0)
+ , m_source(nullptr)
+ , m_volumeAndMuteInitialized(false)
+ , m_weakPtrFactory(this)
+ , m_mediaLocations(nullptr)
+ , m_mediaLocationCurrentIndex(0)
+ , m_playbackRatePause(false)
, m_timeOfOverlappingSeek(-1)
- , m_buffering(false)
- , m_playbackRate(1)
, m_lastPlaybackRate(1)
- , m_errorOccured(false)
- , m_mediaDuration(0)
- , m_downloadFinished(false)
- , m_fillTimer(this, &MediaPlayerPrivateGStreamer::fillTimerFired)
+ , m_fillTimer(*this, &MediaPlayerPrivateGStreamer::fillTimerFired)
, m_maxTimeLoaded(0)
- , m_bufferingPercentage(0)
, m_preload(player->preload())
, m_delayingLoad(false)
- , m_mediaDurationKnown(true)
, m_maxTimeLoadedAtLastDidLoadingProgress(0)
- , m_volumeAndMuteInitialized(false)
, m_hasVideo(false)
, m_hasAudio(false)
- , m_audioTimerHandler(0)
- , m_textTimerHandler(0)
- , m_videoTimerHandler(0)
- , m_videoCapsTimerHandler(0)
- , m_readyTimerHandler(0)
- , m_totalBytes(-1)
+ , m_readyTimerHandler(RunLoop::main(), this, &MediaPlayerPrivateGStreamer::readyTimerFired)
+ , m_totalBytes(0)
, m_preservesPitch(false)
- , m_requestedState(GST_STATE_VOID_PENDING)
- , m_missingPlugins(false)
{
+#if USE(GLIB)
+ m_readyTimerHandler.setPriority(G_PRIORITY_DEFAULT_IDLE);
+#endif
}
MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
@@ -294,50 +194,35 @@ MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
if (m_mediaLocations) {
gst_structure_free(m_mediaLocations);
- m_mediaLocations = 0;
+ m_mediaLocations = nullptr;
}
+ if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
+ g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
+
if (m_autoAudioSink)
g_signal_handlers_disconnect_by_func(G_OBJECT(m_autoAudioSink.get()),
reinterpret_cast<gpointer>(setAudioStreamPropertiesCallback), this);
- if (m_readyTimerHandler)
- g_source_remove(m_readyTimerHandler);
-
- if (m_playBin) {
- GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_playBin.get())));
- ASSERT(bus);
- g_signal_handlers_disconnect_by_func(bus.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateMessageCallback), this);
- gst_bus_remove_signal_watch(bus.get());
-
- g_signal_handlers_disconnect_by_func(m_playBin.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateSourceChangedCallback), this);
- g_signal_handlers_disconnect_by_func(m_playBin.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateVideoChangedCallback), this);
- g_signal_handlers_disconnect_by_func(m_playBin.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateAudioChangedCallback), this);
-#if ENABLE(VIDEO_TRACK)
- g_signal_handlers_disconnect_by_func(m_playBin.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateNewTextSampleCallback), this);
- g_signal_handlers_disconnect_by_func(m_playBin.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateTextChangedCallback), this);
-#endif
-
- gst_element_set_state(m_playBin.get(), GST_STATE_NULL);
- m_playBin.clear();
+ m_readyTimerHandler.stop();
+ if (m_missingPluginsCallback) {
+ m_missingPluginsCallback->invalidate();
+ m_missingPluginsCallback = nullptr;
}
- if (m_webkitVideoSink) {
- GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_webkitVideoSink.get(), "sink"));
- g_signal_handlers_disconnect_by_func(videoSinkPad.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateVideoSinkCapsChangedCallback), this);
+ if (m_videoSink) {
+ GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
+ g_signal_handlers_disconnect_matched(videoSinkPad.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
}
- if (m_videoTimerHandler)
- g_source_remove(m_videoTimerHandler);
-
- if (m_audioTimerHandler)
- g_source_remove(m_audioTimerHandler);
-
- if (m_textTimerHandler)
- g_source_remove(m_textTimerHandler);
-
- if (m_videoCapsTimerHandler)
- g_source_remove(m_videoCapsTimerHandler);
+ if (m_pipeline) {
+ GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
+ ASSERT(bus);
+ g_signal_handlers_disconnect_by_func(bus.get(), gpointer(busMessageCallback), this);
+ gst_bus_remove_signal_watch(bus.get());
+ gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr);
+ g_signal_handlers_disconnect_matched(m_pipeline.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
+ }
}
void MediaPlayerPrivateGStreamer::load(const String& urlString)
@@ -354,18 +239,21 @@ void MediaPlayerPrivateGStreamer::load(const String& urlString)
if (url.isLocalFile())
cleanURL = cleanURL.substring(0, url.pathEnd());
- if (!m_playBin)
+ if (!m_pipeline)
createGSTPlayBin();
- ASSERT(m_playBin);
+ if (m_fillTimer.isActive())
+ m_fillTimer.stop();
+
+ ASSERT(m_pipeline);
m_url = URL(URL(), cleanURL);
- g_object_set(m_playBin.get(), "uri", cleanURL.utf8().data(), NULL);
+ g_object_set(m_pipeline.get(), "uri", cleanURL.utf8().data(), nullptr);
- INFO_MEDIA_MESSAGE("Load %s", cleanURL.utf8().data());
+ GST_INFO("Load %s", cleanURL.utf8().data());
if (m_preload == MediaPlayer::None) {
- LOG_MEDIA_MESSAGE("Delaying load.");
+ GST_DEBUG("Delaying load.");
m_delayingLoad = true;
}
@@ -376,24 +264,32 @@ void MediaPlayerPrivateGStreamer::load(const String& urlString)
m_readyState = MediaPlayer::HaveNothing;
m_player->readyStateChanged();
m_volumeAndMuteInitialized = false;
+ m_durationAtEOS = 0;
if (!m_delayingLoad)
commitLoad();
}
#if ENABLE(MEDIA_SOURCE)
-void MediaPlayerPrivateGStreamer::load(const String& url, PassRefPtr<HTMLMediaSource> mediaSource)
+void MediaPlayerPrivateGStreamer::load(const String&, MediaSourcePrivateClient*)
+{
+ // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
+ m_networkState = MediaPlayer::FormatError;
+ m_player->networkStateChanged();
+}
+#endif
+
+#if ENABLE(MEDIA_STREAM)
+void MediaPlayerPrivateGStreamer::load(MediaStreamPrivate&)
{
- String mediasourceUri = String::format("mediasource%s", url.utf8().data());
- m_mediaSource = mediaSource;
- load(mediasourceUri);
+ notImplemented();
}
#endif
void MediaPlayerPrivateGStreamer::commitLoad()
{
ASSERT(!m_delayingLoad);
- LOG_MEDIA_MESSAGE("Committing load.");
+ GST_DEBUG("Committing load.");
// GStreamer needs to have the pipeline set to a paused state to
// start providing anything useful.
@@ -403,7 +299,7 @@ void MediaPlayerPrivateGStreamer::commitLoad()
updateStates();
}
-float MediaPlayerPrivateGStreamer::playbackPosition() const
+double MediaPlayerPrivateGStreamer::playbackPosition() const
{
if (m_isEndReached) {
// Position queries on a null pipeline return 0. If we're at
@@ -412,48 +308,56 @@ float MediaPlayerPrivateGStreamer::playbackPosition() const
// what the Media element spec expects us to do.
if (m_seeking)
return m_seekTime;
- if (m_mediaDuration)
- return m_mediaDuration;
+
+ MediaTime mediaDuration = durationMediaTime();
+ if (mediaDuration)
+ return mediaDuration.toDouble();
return 0;
}
// Position is only available if no async state change is going on and the state is either paused or playing.
gint64 position = GST_CLOCK_TIME_NONE;
GstQuery* query= gst_query_new_position(GST_FORMAT_TIME);
- if (gst_element_query(m_playBin.get(), query))
+ if (gst_element_query(m_pipeline.get(), query))
gst_query_parse_position(query, 0, &position);
+ gst_query_unref(query);
- float result = 0.0f;
- if (static_cast<GstClockTime>(position) != GST_CLOCK_TIME_NONE)
- result = static_cast<double>(position) / GST_SECOND;
- else if (m_canFallBackToLastFinishedSeekPositon)
- result = m_seekTime;
-
- LOG_MEDIA_MESSAGE("Position %" GST_TIME_FORMAT, GST_TIME_ARGS(position));
+ GST_DEBUG("Position %" GST_TIME_FORMAT, GST_TIME_ARGS(position));
- gst_query_unref(query);
+ double result = 0.0f;
+ if (static_cast<GstClockTime>(position) != GST_CLOCK_TIME_NONE) {
+ GTimeVal timeValue;
+ GST_TIME_TO_TIMEVAL(position, timeValue);
+ result = static_cast<double>(timeValue.tv_sec + (timeValue.tv_usec / 1000000.0));
+ } else if (m_canFallBackToLastFinishedSeekPosition)
+ result = m_seekTime;
return result;
}
+void MediaPlayerPrivateGStreamer::readyTimerFired()
+{
+ changePipelineState(GST_STATE_NULL);
+}
+
bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState)
{
- ASSERT(m_playBin);
+ ASSERT(m_pipeline);
GstState currentState;
GstState pending;
- gst_element_get_state(m_playBin.get(), &currentState, &pending, 0);
+ gst_element_get_state(m_pipeline.get(), &currentState, &pending, 0);
if (currentState == newState || pending == newState) {
- LOG_MEDIA_MESSAGE("Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState),
+ GST_DEBUG("Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState),
gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
return true;
}
- LOG_MEDIA_MESSAGE("Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState),
+ GST_DEBUG("Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState),
gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
- GstStateChangeReturn setStateResult = gst_element_set_state(m_playBin.get(), newState);
+ GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), newState);
GstState pausedOrPlaying = newState == GST_STATE_PLAYING ? GST_STATE_PAUSED : GST_STATE_PLAYING;
if (currentState != pausedOrPlaying && setStateResult == GST_STATE_CHANGE_FAILURE) {
return false;
@@ -463,13 +367,13 @@ bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState)
// if we stay for too long on READY.
// Also lets remove the timer if we request a state change for any state other than READY.
// See also https://bugs.webkit.org/show_bug.cgi?id=117354
- if (newState == GST_STATE_READY && !m_readyTimerHandler) {
- m_readyTimerHandler = g_timeout_add_seconds(gReadyStateTimerInterval, reinterpret_cast<GSourceFunc>(mediaPlayerPrivateReadyStateTimeoutCallback), this);
- g_source_set_name_by_id(m_readyTimerHandler, "[WebKit] mediaPlayerPrivateReadyStateTimeoutCallback");
- } else if (newState != GST_STATE_READY && m_readyTimerHandler) {
- g_source_remove(m_readyTimerHandler);
- m_readyTimerHandler = 0;
- }
+ if (newState == GST_STATE_READY && !m_readyTimerHandler.isActive()) {
+ // Max interval in seconds to stay in the READY state on manual
+ // state change requests.
+ static const double readyStateTimerDelay = 60;
+ m_readyTimerHandler.startOneShot(readyStateTimerDelay);
+ } else if (newState != GST_STATE_READY)
+ m_readyTimerHandler.stop();
return true;
}
@@ -495,7 +399,7 @@ void MediaPlayerPrivateGStreamer::play()
m_delayingLoad = false;
m_preload = MediaPlayer::Auto;
setDownloadBuffering();
- LOG_MEDIA_MESSAGE("Play");
+ GST_DEBUG("Play");
} else {
loadingFailed(MediaPlayer::Empty);
}
@@ -505,57 +409,56 @@ void MediaPlayerPrivateGStreamer::pause()
{
m_playbackRatePause = false;
GstState currentState, pendingState;
- gst_element_get_state(m_playBin.get(), &currentState, &pendingState, 0);
+ gst_element_get_state(m_pipeline.get(), &currentState, &pendingState, 0);
if (currentState < GST_STATE_PAUSED && pendingState <= GST_STATE_PAUSED)
return;
if (changePipelineState(GST_STATE_PAUSED))
- INFO_MEDIA_MESSAGE("Pause");
+ GST_INFO("Pause");
else
loadingFailed(MediaPlayer::Empty);
}
-float MediaPlayerPrivateGStreamer::duration() const
+MediaTime MediaPlayerPrivateGStreamer::durationMediaTime() const
{
- if (!m_playBin)
- return 0.0f;
+ if (!m_pipeline)
+ return { };
if (m_errorOccured)
- return 0.0f;
+ return { };
- // Media duration query failed already, don't attempt new useless queries.
- if (!m_mediaDurationKnown)
- return numeric_limits<float>::infinity();
+ if (m_durationAtEOS)
+ return MediaTime::createWithDouble(m_durationAtEOS);
- if (m_mediaDuration)
- return m_mediaDuration;
+ // The duration query would fail on a not-prerolled pipeline.
+ if (GST_STATE(m_pipeline.get()) < GST_STATE_PAUSED)
+ return { };
GstFormat timeFormat = GST_FORMAT_TIME;
gint64 timeLength = 0;
- bool failure = !gst_element_query_duration(m_playBin.get(), timeFormat, &timeLength) || static_cast<guint64>(timeLength) == GST_CLOCK_TIME_NONE;
+ bool failure = !gst_element_query_duration(m_pipeline.get(), timeFormat, &timeLength) || static_cast<guint64>(timeLength) == GST_CLOCK_TIME_NONE;
if (failure) {
- LOG_MEDIA_MESSAGE("Time duration query failed for %s", m_url.string().utf8().data());
- return numeric_limits<float>::infinity();
+ GST_DEBUG("Time duration query failed for %s", m_url.string().utf8().data());
+ return MediaTime::positiveInfiniteTime();
}
- LOG_MEDIA_MESSAGE("Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(timeLength));
+ GST_DEBUG("Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(timeLength));
- m_mediaDuration = static_cast<double>(timeLength) / GST_SECOND;
- return m_mediaDuration;
+ return MediaTime::createWithDouble(static_cast<double>(timeLength) / GST_SECOND);
// FIXME: handle 3.14.9.5 properly
}
-float MediaPlayerPrivateGStreamer::currentTime() const
+MediaTime MediaPlayerPrivateGStreamer::currentMediaTime() const
{
- if (!m_playBin)
- return 0.0f;
+ if (!m_pipeline)
+ return { };
if (m_errorOccured)
- return 0.0f;
+ return { };
if (m_seeking)
- return m_seekTime;
+ return MediaTime::createWithFloat(m_seekTime);
// Workaround for
// https://bugzilla.gnome.org/show_bug.cgi?id=639941 In GStreamer
@@ -563,30 +466,30 @@ float MediaPlayerPrivateGStreamer::currentTime() const
// negative playback rate. There's no upstream accepted patch for
// this bug yet, hence this temporary workaround.
if (m_isEndReached && m_playbackRate < 0)
- return 0.0f;
+ return { };
- return playbackPosition();
+ return MediaTime::createWithDouble(playbackPosition());
}
void MediaPlayerPrivateGStreamer::seek(float time)
{
- if (!m_playBin)
+ if (!m_pipeline)
return;
if (m_errorOccured)
return;
- INFO_MEDIA_MESSAGE("[Seek] seek attempt to %f secs", time);
+ GST_INFO("[Seek] seek attempt to %f secs", time);
// Avoid useless seeking.
- if (time == currentTime())
+ if (MediaTime::createWithFloat(time) == currentMediaTime())
return;
if (isLiveStream())
return;
GstClockTime clockTime = toGstClockTime(time);
- INFO_MEDIA_MESSAGE("[Seek] seeking to %" GST_TIME_FORMAT " (%f)", GST_TIME_ARGS(clockTime), time);
+ GST_INFO("[Seek] seeking to %" GST_TIME_FORMAT " (%f)", GST_TIME_ARGS(clockTime), time);
if (m_seeking) {
m_timeOfOverlappingSeek = time;
@@ -597,15 +500,15 @@ void MediaPlayerPrivateGStreamer::seek(float time)
}
GstState state;
- GstStateChangeReturn getStateResult = gst_element_get_state(m_playBin.get(), &state, 0, 0);
+ GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
- LOG_MEDIA_MESSAGE("[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
+ GST_DEBUG("[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
return;
}
if (getStateResult == GST_STATE_CHANGE_ASYNC || state < GST_STATE_PAUSED || m_isEndReached) {
m_seekIsPending = true;
if (m_isEndReached) {
- LOG_MEDIA_MESSAGE("[Seek] reset pipeline");
+ GST_DEBUG("[Seek] reset pipeline");
m_resetPipeline = true;
if (!changePipelineState(GST_STATE_PAUSED))
loadingFailed(MediaPlayer::Empty);
@@ -613,7 +516,7 @@ void MediaPlayerPrivateGStreamer::seek(float time)
} else {
// We can seek now.
if (!doSeek(clockTime, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE))) {
- LOG_MEDIA_MESSAGE("[Seek] seeking to %f failed", time);
+ GST_DEBUG("[Seek] seeking to %f failed", time);
return;
}
}
@@ -627,6 +530,11 @@ bool MediaPlayerPrivateGStreamer::doSeek(gint64 position, float rate, GstSeekFla
{
gint64 startTime, endTime;
+ // TODO: Should do more than that, need to notify the media source
+ // and probably flush the pipeline at least.
+ if (isMediaSource())
+ return true;
+
if (rate > 0) {
startTime = position;
endTime = GST_CLOCK_TIME_NONE;
@@ -635,7 +543,7 @@ bool MediaPlayerPrivateGStreamer::doSeek(gint64 position, float rate, GstSeekFla
// If we are at beginning of media, start from the end to
// avoid immediate EOS.
if (position < 0)
- endTime = static_cast<gint64>(duration() * GST_SECOND);
+ endTime = static_cast<gint64>(durationMediaTime().toDouble() * GST_SECOND);
else
endTime = position;
}
@@ -643,7 +551,7 @@ bool MediaPlayerPrivateGStreamer::doSeek(gint64 position, float rate, GstSeekFla
if (!rate)
rate = 1.0;
- return gst_element_seek(m_playBin.get(), rate, GST_FORMAT_TIME, seekType,
+ return gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType,
GST_SEEK_TYPE_SET, startTime, GST_SEEK_TYPE_SET, endTime);
}
@@ -655,7 +563,7 @@ void MediaPlayerPrivateGStreamer::updatePlaybackRate()
float currentPosition = static_cast<float>(playbackPosition() * GST_SECOND);
bool mute = false;
- INFO_MEDIA_MESSAGE("Set Rate to %f", m_playbackRate);
+ GST_INFO("Set Rate to %f", m_playbackRate);
if (m_playbackRate > 0) {
// Mute the sound if the playback rate is too extreme and
@@ -667,20 +575,20 @@ void MediaPlayerPrivateGStreamer::updatePlaybackRate()
mute = true;
}
- INFO_MEDIA_MESSAGE("Need to mute audio?: %d", (int) mute);
+ GST_INFO("Need to mute audio?: %d", (int) mute);
if (doSeek(currentPosition, m_playbackRate, static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH))) {
- g_object_set(m_playBin.get(), "mute", mute, NULL);
+ g_object_set(m_pipeline.get(), "mute", mute, nullptr);
m_lastPlaybackRate = m_playbackRate;
} else {
m_playbackRate = m_lastPlaybackRate;
- ERROR_MEDIA_MESSAGE("Set rate to %f failed", m_playbackRate);
+ GST_ERROR("Set rate to %f failed", m_playbackRate);
}
if (m_playbackRatePause) {
GstState state;
GstState pending;
- gst_element_get_state(m_playBin.get(), &state, &pending, 0);
+ gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
if (state != GST_STATE_PLAYING && pending != GST_STATE_PLAYING)
changePipelineState(GST_STATE_PLAYING);
m_playbackRatePause = false;
@@ -693,7 +601,7 @@ void MediaPlayerPrivateGStreamer::updatePlaybackRate()
bool MediaPlayerPrivateGStreamer::paused() const
{
if (m_isEndReached) {
- LOG_MEDIA_MESSAGE("Ignoring pause at EOS");
+ GST_DEBUG("Ignoring pause at EOS");
return true;
}
@@ -701,8 +609,8 @@ bool MediaPlayerPrivateGStreamer::paused() const
return false;
GstState state;
- gst_element_get_state(m_playBin.get(), &state, 0, 0);
- return state == GST_STATE_PAUSED;
+ gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
+ return state <= GST_STATE_PAUSED;
}
bool MediaPlayerPrivateGStreamer::seeking() const
@@ -710,34 +618,35 @@ bool MediaPlayerPrivateGStreamer::seeking() const
return m_seeking;
}
-void MediaPlayerPrivateGStreamer::videoChanged()
-{
- if (m_videoTimerHandler)
- g_source_remove(m_videoTimerHandler);
- m_videoTimerHandler = g_idle_add_full(G_PRIORITY_DEFAULT, reinterpret_cast<GSourceFunc>(mediaPlayerPrivateVideoChangeTimeoutCallback), this, 0);
-}
-
-void MediaPlayerPrivateGStreamer::videoCapsChanged()
+void MediaPlayerPrivateGStreamer::videoChangedCallback(MediaPlayerPrivateGStreamer* player)
{
- if (m_videoCapsTimerHandler)
- g_source_remove(m_videoCapsTimerHandler);
- m_videoCapsTimerHandler = g_timeout_add(0, reinterpret_cast<GSourceFunc>(mediaPlayerPrivateVideoCapsChangeTimeoutCallback), this);
+ player->m_notifier->notify(MainThreadNotification::VideoChanged, [player] { player->notifyPlayerOfVideo(); });
}
void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
{
- m_videoTimerHandler = 0;
+ if (UNLIKELY(!m_pipeline || !m_source))
+ return;
gint numTracks = 0;
- if (m_playBin)
- g_object_get(m_playBin.get(), "n-video", &numTracks, NULL);
+ bool useMediaSource = isMediaSource();
+ GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
+ g_object_get(element, "n-video", &numTracks, nullptr);
m_hasVideo = numTracks > 0;
+ if (m_hasVideo)
+ m_player->sizeChanged();
+
+ if (useMediaSource) {
+ GST_DEBUG("Tracks managed by source element. Bailing out now.");
+ m_player->client().mediaPlayerEngineUpdated(m_player);
+ return;
+ }
#if ENABLE(VIDEO_TRACK)
for (gint i = 0; i < numTracks; ++i) {
GRefPtr<GstPad> pad;
- g_signal_emit_by_name(m_playBin.get(), "get-video-pad", i, &pad.outPtr(), NULL);
+ g_signal_emit_by_name(m_pipeline.get(), "get-video-pad", i, &pad.outPtr(), nullptr);
ASSERT(pad);
if (i < static_cast<gint>(m_videoTracks.size())) {
@@ -747,50 +656,60 @@ void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
continue;
}
- RefPtr<VideoTrackPrivateGStreamer> track = VideoTrackPrivateGStreamer::create(m_playBin, i, pad);
+ RefPtr<VideoTrackPrivateGStreamer> track = VideoTrackPrivateGStreamer::create(m_pipeline, i, pad);
m_videoTracks.append(track);
- m_player->addVideoTrack(track.release());
+ m_player->addVideoTrack(*track);
}
while (static_cast<gint>(m_videoTracks.size()) > numTracks) {
RefPtr<VideoTrackPrivateGStreamer> track = m_videoTracks.last();
track->disconnect();
m_videoTracks.removeLast();
- m_player->removeVideoTrack(track.release());
+ m_player->removeVideoTrack(*track);
}
#endif
- m_player->mediaPlayerClient()->mediaPlayerEngineUpdated(m_player);
+ m_player->client().mediaPlayerEngineUpdated(m_player);
+}
+
+void MediaPlayerPrivateGStreamer::videoSinkCapsChangedCallback(MediaPlayerPrivateGStreamer* player)
+{
+ player->m_notifier->notify(MainThreadNotification::VideoCapsChanged, [player] { player->notifyPlayerOfVideoCaps(); });
}
void MediaPlayerPrivateGStreamer::notifyPlayerOfVideoCaps()
{
- m_videoCapsTimerHandler = 0;
m_videoSize = IntSize();
- m_player->mediaPlayerClient()->mediaPlayerEngineUpdated(m_player);
+ m_player->client().mediaPlayerEngineUpdated(m_player);
}
-void MediaPlayerPrivateGStreamer::audioChanged()
+void MediaPlayerPrivateGStreamer::audioChangedCallback(MediaPlayerPrivateGStreamer* player)
{
- if (m_audioTimerHandler)
- g_source_remove(m_audioTimerHandler);
- m_audioTimerHandler = g_idle_add_full(G_PRIORITY_DEFAULT, reinterpret_cast<GSourceFunc>(mediaPlayerPrivateAudioChangeTimeoutCallback), this, 0);
+ player->m_notifier->notify(MainThreadNotification::AudioChanged, [player] { player->notifyPlayerOfAudio(); });
}
void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio()
{
- m_audioTimerHandler = 0;
+ if (UNLIKELY(!m_pipeline || !m_source))
+ return;
gint numTracks = 0;
- if (m_playBin)
- g_object_get(m_playBin.get(), "n-audio", &numTracks, NULL);
+ bool useMediaSource = isMediaSource();
+ GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
+ g_object_get(element, "n-audio", &numTracks, nullptr);
m_hasAudio = numTracks > 0;
+ if (useMediaSource) {
+ GST_DEBUG("Tracks managed by source element. Bailing out now.");
+ m_player->client().mediaPlayerEngineUpdated(m_player);
+ return;
+ }
+
#if ENABLE(VIDEO_TRACK)
for (gint i = 0; i < numTracks; ++i) {
GRefPtr<GstPad> pad;
- g_signal_emit_by_name(m_playBin.get(), "get-audio-pad", i, &pad.outPtr(), NULL);
+ g_signal_emit_by_name(m_pipeline.get(), "get-audio-pad", i, &pad.outPtr(), nullptr);
ASSERT(pad);
if (i < static_cast<gint>(m_audioTracks.size())) {
@@ -800,41 +719,46 @@ void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio()
continue;
}
- RefPtr<AudioTrackPrivateGStreamer> track = AudioTrackPrivateGStreamer::create(m_playBin, i, pad);
+ RefPtr<AudioTrackPrivateGStreamer> track = AudioTrackPrivateGStreamer::create(m_pipeline, i, pad);
m_audioTracks.insert(i, track);
- m_player->addAudioTrack(track.release());
+ m_player->addAudioTrack(*track);
}
while (static_cast<gint>(m_audioTracks.size()) > numTracks) {
RefPtr<AudioTrackPrivateGStreamer> track = m_audioTracks.last();
track->disconnect();
m_audioTracks.removeLast();
- m_player->removeAudioTrack(track.release());
+ m_player->removeAudioTrack(*track);
}
#endif
- m_player->mediaPlayerClient()->mediaPlayerEngineUpdated(m_player);
+ m_player->client().mediaPlayerEngineUpdated(m_player);
}
#if ENABLE(VIDEO_TRACK)
-void MediaPlayerPrivateGStreamer::textChanged()
+void MediaPlayerPrivateGStreamer::textChangedCallback(MediaPlayerPrivateGStreamer* player)
{
- if (m_textTimerHandler)
- g_source_remove(m_textTimerHandler);
- m_textTimerHandler = g_timeout_add(0, reinterpret_cast<GSourceFunc>(mediaPlayerPrivateTextChangeTimeoutCallback), this);
+ player->m_notifier->notify(MainThreadNotification::TextChanged, [player] { player->notifyPlayerOfText(); });
}
void MediaPlayerPrivateGStreamer::notifyPlayerOfText()
{
- m_textTimerHandler = 0;
+ if (UNLIKELY(!m_pipeline || !m_source))
+ return;
gint numTracks = 0;
- if (m_playBin)
- g_object_get(m_playBin.get(), "n-text", &numTracks, NULL);
+ bool useMediaSource = isMediaSource();
+ GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
+ g_object_get(element, "n-text", &numTracks, nullptr);
+
+ if (useMediaSource) {
+ GST_DEBUG("Tracks managed by source element. Bailing out now.");
+ return;
+ }
for (gint i = 0; i < numTracks; ++i) {
GRefPtr<GstPad> pad;
- g_signal_emit_by_name(m_playBin.get(), "get-text-pad", i, &pad.outPtr(), NULL);
+ g_signal_emit_by_name(m_pipeline.get(), "get-text-pad", i, &pad.outPtr(), nullptr);
ASSERT(pad);
if (i < static_cast<gint>(m_textTracks.size())) {
@@ -846,17 +770,23 @@ void MediaPlayerPrivateGStreamer::notifyPlayerOfText()
RefPtr<InbandTextTrackPrivateGStreamer> track = InbandTextTrackPrivateGStreamer::create(i, pad);
m_textTracks.insert(i, track);
- m_player->addTextTrack(track.release());
+ m_player->addTextTrack(*track);
}
while (static_cast<gint>(m_textTracks.size()) > numTracks) {
RefPtr<InbandTextTrackPrivateGStreamer> track = m_textTracks.last();
track->disconnect();
m_textTracks.removeLast();
- m_player->removeTextTrack(track.release());
+ m_player->removeTextTrack(*track);
}
}
+GstFlowReturn MediaPlayerPrivateGStreamer::newTextSampleCallback(MediaPlayerPrivateGStreamer* player)
+{
+ player->newTextSample();
+ return GST_FLOW_OK;
+}
+
void MediaPlayerPrivateGStreamer::newTextSample()
{
if (!m_textAppSink)
@@ -866,7 +796,7 @@ void MediaPlayerPrivateGStreamer::newTextSample()
gst_pad_get_sticky_event(m_textAppSinkPad.get(), GST_EVENT_STREAM_START, 0));
GRefPtr<GstSample> sample;
- g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), NULL);
+ g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), nullptr);
ASSERT(sample);
if (streamStartEvent) {
@@ -882,9 +812,9 @@ void MediaPlayerPrivateGStreamer::newTextSample()
}
}
if (!found)
- WARN_MEDIA_MESSAGE("Got sample with unknown stream ID.");
+ GST_WARNING("Got sample with unknown stream ID.");
} else
- WARN_MEDIA_MESSAGE("Unable to handle sample with no stream start event.");
+ GST_WARNING("Unable to handle sample with no stream start event.");
}
#endif
@@ -915,7 +845,7 @@ void MediaPlayerPrivateGStreamer::setRate(float rate)
m_playbackRate = rate;
m_changingRate = true;
- gst_element_get_state(m_playBin.get(), &state, &pending, 0);
+ gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
if (!rate) {
m_changingRate = false;
@@ -932,52 +862,53 @@ void MediaPlayerPrivateGStreamer::setRate(float rate)
updatePlaybackRate();
}
+double MediaPlayerPrivateGStreamer::rate() const
+{
+ return m_playbackRate;
+}
+
void MediaPlayerPrivateGStreamer::setPreservesPitch(bool preservesPitch)
{
m_preservesPitch = preservesPitch;
}
-PassRefPtr<TimeRanges> MediaPlayerPrivateGStreamer::buffered() const
+std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamer::buffered() const
{
- RefPtr<TimeRanges> timeRanges = TimeRanges::create();
+ auto timeRanges = std::make_unique<PlatformTimeRanges>();
if (m_errorOccured || isLiveStream())
- return timeRanges.release();
+ return timeRanges;
-#if GST_CHECK_VERSION(0, 10, 31)
- float mediaDuration(duration());
+ float mediaDuration(durationMediaTime().toDouble());
if (!mediaDuration || std::isinf(mediaDuration))
- return timeRanges.release();
+ return timeRanges;
GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
- if (!gst_element_query(m_playBin.get(), query)) {
+ if (!gst_element_query(m_pipeline.get(), query)) {
gst_query_unref(query);
- return timeRanges.release();
+ return timeRanges;
}
- for (guint index = 0; index < gst_query_get_n_buffering_ranges(query); index++) {
+ guint numBufferingRanges = gst_query_get_n_buffering_ranges(query);
+ for (guint index = 0; index < numBufferingRanges; index++) {
gint64 rangeStart = 0, rangeStop = 0;
if (gst_query_parse_nth_buffering_range(query, index, &rangeStart, &rangeStop))
- timeRanges->add(static_cast<float>((rangeStart * mediaDuration) / GST_FORMAT_PERCENT_MAX),
- static_cast<float>((rangeStop * mediaDuration) / GST_FORMAT_PERCENT_MAX));
+ timeRanges->add(MediaTime::createWithDouble((rangeStart * mediaDuration) / GST_FORMAT_PERCENT_MAX),
+ MediaTime::createWithDouble((rangeStop * mediaDuration) / GST_FORMAT_PERCENT_MAX));
}
// Fallback to the more general maxTimeLoaded() if no range has
// been found.
if (!timeRanges->length())
if (float loaded = maxTimeLoaded())
- timeRanges->add(0, loaded);
+ timeRanges->add(MediaTime::zeroTime(), MediaTime::createWithDouble(loaded));
gst_query_unref(query);
-#else
- float loaded = maxTimeLoaded();
- if (!m_errorOccured && !isLiveStream() && loaded > 0)
- timeRanges->add(0, loaded);
-#endif
- return timeRanges.release();
+
+ return timeRanges;
}
-gboolean MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
+void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
{
GUniqueOutPtr<GError> err;
GUniqueOutPtr<gchar> debug;
@@ -987,7 +918,7 @@ gboolean MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
const GstStructure* structure = gst_message_get_structure(message);
GstState requestedState, currentState;
- m_canFallBackToLastFinishedSeekPositon = false;
+ m_canFallBackToLastFinishedSeekPosition = false;
if (structure) {
const gchar* messageTypeName = gst_structure_get_name(structure);
@@ -996,40 +927,37 @@ gboolean MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
// notify of the new location(s) of the media.
if (!g_strcmp0(messageTypeName, "redirect")) {
mediaLocationChanged(message);
- return TRUE;
+ return;
}
}
// We ignore state changes from internal elements. They are forwarded to playbin2 anyway.
- bool messageSourceIsPlaybin = GST_MESSAGE_SRC(message) == reinterpret_cast<GstObject*>(m_playBin.get());
+ bool messageSourceIsPlaybin = GST_MESSAGE_SRC(message) == reinterpret_cast<GstObject*>(m_pipeline.get());
- LOG_MEDIA_MESSAGE("Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message));
+ GST_DEBUG("Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message));
switch (GST_MESSAGE_TYPE(message)) {
case GST_MESSAGE_ERROR:
- if (m_resetPipeline)
- break;
- if (m_missingPlugins)
+ if (m_resetPipeline || m_missingPluginsCallback || m_errorOccured)
break;
gst_message_parse_error(message, &err.outPtr(), &debug.outPtr());
- ERROR_MEDIA_MESSAGE("Error %d: %s (url=%s)", err->code, err->message, m_url.string().utf8().data());
+ GST_ERROR("Error %d: %s (url=%s)", err->code, err->message, m_url.string().utf8().data());
- GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_playBin.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-video.error");
+ GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-video.error");
error = MediaPlayer::Empty;
- if (err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND
- || err->code == GST_STREAM_ERROR_WRONG_TYPE
- || err->code == GST_STREAM_ERROR_FAILED
- || err->code == GST_CORE_ERROR_MISSING_PLUGIN
- || err->code == GST_RESOURCE_ERROR_NOT_FOUND)
+ if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_CODEC_NOT_FOUND)
+ || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE)
+ || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED)
+ || g_error_matches(err.get(), GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN)
+ || g_error_matches(err.get(), GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_NOT_FOUND))
error = MediaPlayer::FormatError;
- else if (err->domain == GST_STREAM_ERROR) {
+ else if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_TYPE_NOT_FOUND)) {
// Let the mediaPlayerClient handle the stream error, in
// this case the HTMLMediaElement will emit a stalled
// event.
- if (err->code == GST_STREAM_ERROR_TYPE_NOT_FOUND) {
- ERROR_MEDIA_MESSAGE("Decode error, let the Media element emit a stalled event.");
- break;
- }
+ GST_ERROR("Decode error, let the Media element emit a stalled event.");
+ break;
+ } else if (err->domain == GST_STREAM_ERROR) {
error = MediaPlayer::DecodeError;
attemptNextLocation = true;
} else if (err->domain == GST_RESOURCE_ERROR)
@@ -1055,9 +983,9 @@ gboolean MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
// Construct a filename for the graphviz dot file output.
GstState newState;
- gst_message_parse_state_changed(message, &currentState, &newState, 0);
+ gst_message_parse_state_changed(message, &currentState, &newState, nullptr);
CString dotFileName = String::format("webkit-video.%s_%s", gst_element_state_get_name(currentState), gst_element_state_get_name(newState)).utf8();
- GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_playBin.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data());
+ GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data());
break;
}
@@ -1065,71 +993,176 @@ gboolean MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
processBufferingStats(message);
break;
case GST_MESSAGE_DURATION_CHANGED:
- if (messageSourceIsPlaybin)
+ // Duration in MSE is managed by MediaSource, SourceBuffer and AppendPipeline.
+ if (messageSourceIsPlaybin && !isMediaSource())
durationChanged();
break;
case GST_MESSAGE_REQUEST_STATE:
gst_message_parse_request_state(message, &requestedState);
- gst_element_get_state(m_playBin.get(), &currentState, NULL, 250);
+ gst_element_get_state(m_pipeline.get(), &currentState, nullptr, 250 * GST_NSECOND);
if (requestedState < currentState) {
GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(message)));
- INFO_MEDIA_MESSAGE("Element %s requested state change to %s", elementName.get(),
+ GST_INFO("Element %s requested state change to %s", elementName.get(),
gst_element_state_get_name(requestedState));
m_requestedState = requestedState;
if (!changePipelineState(requestedState))
loadingFailed(MediaPlayer::Empty);
}
break;
+ case GST_MESSAGE_CLOCK_LOST:
+ // This can only happen in PLAYING state and we should just
+ // get a new clock by moving back to PAUSED and then to
+ // PLAYING again.
+ // This can happen if the stream that ends in a sink that
+ // provides the current clock disappears, for example if
+ // the audio sink provides the clock and the audio stream
+ // is disabled. It also happens relatively often with
+ // HTTP adaptive streams when switching between different
+ // variants of a stream.
+ gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
+ gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
+ break;
+ case GST_MESSAGE_LATENCY:
+ // Recalculate the latency, we don't need any special handling
+ // here other than the GStreamer default.
+ // This can happen if the latency of live elements changes, or
+ // for one reason or another a new live element is added or
+ // removed from the pipeline.
+ gst_bin_recalculate_latency(GST_BIN(m_pipeline.get()));
+ break;
case GST_MESSAGE_ELEMENT:
if (gst_is_missing_plugin_message(message)) {
- gchar* detail = gst_missing_plugin_message_get_installer_detail(message);
- gchar* detailArray[2] = {detail, 0};
- GstInstallPluginsReturn result = gst_install_plugins_async(detailArray, 0, mediaPlayerPrivatePluginInstallerResultFunction, this);
- m_missingPlugins = result == GST_INSTALL_PLUGINS_STARTED_OK;
- g_free(detail);
+ if (gst_install_plugins_supported()) {
+ m_missingPluginsCallback = MediaPlayerRequestInstallMissingPluginsCallback::create([this](uint32_t result) {
+ m_missingPluginsCallback = nullptr;
+ if (result != GST_INSTALL_PLUGINS_SUCCESS)
+ return;
+
+ changePipelineState(GST_STATE_READY);
+ changePipelineState(GST_STATE_PAUSED);
+ });
+ GUniquePtr<char> detail(gst_missing_plugin_message_get_installer_detail(message));
+ GUniquePtr<char> description(gst_missing_plugin_message_get_description(message));
+ m_player->client().requestInstallMissingPlugins(String::fromUTF8(detail.get()), String::fromUTF8(description.get()), *m_missingPluginsCallback);
+ }
+ }
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ else if (gst_structure_has_name(structure, "drm-key-needed")) {
+ GST_DEBUG("drm-key-needed message from %s", GST_MESSAGE_SRC_NAME(message));
+ GRefPtr<GstEvent> event;
+ gst_structure_get(structure, "event", GST_TYPE_EVENT, &event.outPtr(), nullptr);
+ handleProtectionEvent(event.get());
+ }
+#endif
+#if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
+ else {
+ GstMpegtsSection* section = gst_message_parse_mpegts_section(message);
+ if (section) {
+ processMpegTsSection(section);
+ gst_mpegts_section_unref(section);
+ }
}
+#endif
break;
#if ENABLE(VIDEO_TRACK)
case GST_MESSAGE_TOC:
processTableOfContents(message);
break;
#endif
+ case GST_MESSAGE_TAG: {
+ GstTagList* tags = nullptr;
+ GUniqueOutPtr<gchar> tag;
+ gst_message_parse_tag(message, &tags);
+ if (gst_tag_list_get_string(tags, GST_TAG_IMAGE_ORIENTATION, &tag.outPtr())) {
+ if (!g_strcmp0(tag.get(), "rotate-90"))
+ setVideoSourceOrientation(ImageOrientation(OriginRightTop));
+ else if (!g_strcmp0(tag.get(), "rotate-180"))
+ setVideoSourceOrientation(ImageOrientation(OriginBottomRight));
+ else if (!g_strcmp0(tag.get(), "rotate-270"))
+ setVideoSourceOrientation(ImageOrientation(OriginLeftBottom));
+ }
+ gst_tag_list_unref(tags);
+ break;
+ }
default:
- LOG_MEDIA_MESSAGE("Unhandled GStreamer message type: %s",
+ GST_DEBUG("Unhandled GStreamer message type: %s",
GST_MESSAGE_TYPE_NAME(message));
break;
}
- return TRUE;
-}
-
-void MediaPlayerPrivateGStreamer::handlePluginInstallerResult(GstInstallPluginsReturn result)
-{
- m_missingPlugins = false;
- if (result == GST_INSTALL_PLUGINS_SUCCESS) {
- changePipelineState(GST_STATE_READY);
- changePipelineState(GST_STATE_PAUSED);
- }
+ return;
}
void MediaPlayerPrivateGStreamer::processBufferingStats(GstMessage* message)
{
m_buffering = true;
- const GstStructure *structure = gst_message_get_structure(message);
- gst_structure_get_int(structure, "buffer-percent", &m_bufferingPercentage);
+ gst_message_parse_buffering(message, &m_bufferingPercentage);
- LOG_MEDIA_MESSAGE("[Buffering] Buffering: %d%%.", m_bufferingPercentage);
+ GST_DEBUG("[Buffering] Buffering: %d%%.", m_bufferingPercentage);
updateStates();
}
+#if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
+void MediaPlayerPrivateGStreamer::processMpegTsSection(GstMpegtsSection* section)
+{
+ ASSERT(section);
+
+ if (section->section_type == GST_MPEGTS_SECTION_PMT) {
+ const GstMpegtsPMT* pmt = gst_mpegts_section_get_pmt(section);
+ m_metadataTracks.clear();
+ for (guint i = 0; i < pmt->streams->len; ++i) {
+ const GstMpegtsPMTStream* stream = static_cast<const GstMpegtsPMTStream*>(g_ptr_array_index(pmt->streams, i));
+ if (stream->stream_type == 0x05 || stream->stream_type >= 0x80) {
+ AtomicString pid = String::number(stream->pid);
+ RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = InbandMetadataTextTrackPrivateGStreamer::create(
+ InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data, pid);
+
+ // 4.7.10.12.2 Sourcing in-band text tracks
+ // If the new text track's kind is metadata, then set the text track in-band metadata track dispatch
+ // type as follows, based on the type of the media resource:
+ // Let stream type be the value of the "stream_type" field describing the text track's type in the
+ // file's program map section, interpreted as an 8-bit unsigned integer. Let length be the value of
+ // the "ES_info_length" field for the track in the same part of the program map section, interpreted
+ // as an integer as defined by the MPEG-2 specification. Let descriptor bytes be the length bytes
+ // following the "ES_info_length" field. The text track in-band metadata track dispatch type must be
+ // set to the concatenation of the stream type byte and the zero or more descriptor bytes bytes,
+ // expressed in hexadecimal using uppercase ASCII hex digits.
+ String inbandMetadataTrackDispatchType;
+ appendUnsignedAsHexFixedSize(stream->stream_type, inbandMetadataTrackDispatchType, 2);
+ for (guint j = 0; j < stream->descriptors->len; ++j) {
+ const GstMpegtsDescriptor* descriptor = static_cast<const GstMpegtsDescriptor*>(g_ptr_array_index(stream->descriptors, j));
+ for (guint k = 0; k < descriptor->length; ++k)
+ appendByteAsHex(descriptor->data[k], inbandMetadataTrackDispatchType);
+ }
+ track->setInBandMetadataTrackDispatchType(inbandMetadataTrackDispatchType);
+
+ m_metadataTracks.add(pid, track);
+ m_player->addTextTrack(*track);
+ }
+ }
+ } else {
+ AtomicString pid = String::number(section->pid);
+ RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = m_metadataTracks.get(pid);
+ if (!track)
+ return;
+
+ GRefPtr<GBytes> data = gst_mpegts_section_get_data(section);
+ gsize size;
+ const void* bytes = g_bytes_get_data(data.get(), &size);
+
+ track->addDataCue(currentMediaTime(), currentMediaTime(), bytes, size);
+ }
+}
+#endif
+
#if ENABLE(VIDEO_TRACK)
void MediaPlayerPrivateGStreamer::processTableOfContents(GstMessage* message)
{
if (m_chaptersTrack)
- m_player->removeTextTrack(m_chaptersTrack);
+ m_player->removeTextTrack(*m_chaptersTrack);
- m_chaptersTrack = InbandMetadataTextTrackPrivateGStreamer::create(InbandTextTrackPrivate::Chapters);
- m_player->addTextTrack(m_chaptersTrack);
+ m_chaptersTrack = InbandMetadataTextTrackPrivateGStreamer::create(InbandTextTrackPrivate::Chapters, InbandTextTrackPrivate::Generic);
+ m_player->addTextTrack(*m_chaptersTrack);
GRefPtr<GstToc> toc;
gboolean updated;
@@ -1137,12 +1170,11 @@ void MediaPlayerPrivateGStreamer::processTableOfContents(GstMessage* message)
ASSERT(toc);
for (GList* i = gst_toc_get_entries(toc.get()); i; i = i->next)
- processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data), 0);
+ processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
}
-void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry, GstTocEntry* parent)
+void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry)
{
- UNUSED_PARAM(parent);
ASSERT(entry);
RefPtr<GenericCueData> cue = GenericCueData::create();
@@ -1150,13 +1182,13 @@ void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry
gint64 start = -1, stop = -1;
gst_toc_entry_get_start_stop_times(entry, &start, &stop);
if (start != -1)
- cue->setStartTime(static_cast<double>(start) / GST_SECOND);
+ cue->setStartTime(MediaTime(start, GST_SECOND));
if (stop != -1)
- cue->setEndTime(static_cast<double>(stop) / GST_SECOND);
+ cue->setEndTime(MediaTime(stop, GST_SECOND));
GstTagList* tags = gst_toc_entry_get_tags(entry);
if (tags) {
- gchar* title = 0;
+ gchar* title = nullptr;
gst_tag_list_get_string(tags, GST_TAG_TITLE, &title);
if (title) {
cue->setContent(title);
@@ -1164,18 +1196,18 @@ void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry
}
}
- m_chaptersTrack->client()->addGenericCue(m_chaptersTrack.get(), cue.release());
+ m_chaptersTrack->addGenericCue(cue.release());
for (GList* i = gst_toc_entry_get_sub_entries(entry); i; i = i->next)
- processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data), entry);
+ processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
}
#endif
-void MediaPlayerPrivateGStreamer::fillTimerFired(Timer<MediaPlayerPrivateGStreamer>*)
+void MediaPlayerPrivateGStreamer::fillTimerFired()
{
GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
- if (!gst_element_query(m_playBin.get(), query)) {
+ if (!gst_element_query(m_pipeline.get(), query)) {
gst_query_unref(query);
return;
}
@@ -1183,25 +1215,24 @@ void MediaPlayerPrivateGStreamer::fillTimerFired(Timer<MediaPlayerPrivateGStream
gint64 start, stop;
gdouble fillStatus = 100.0;
- gst_query_parse_buffering_range(query, 0, &start, &stop, 0);
+ gst_query_parse_buffering_range(query, nullptr, &start, &stop, nullptr);
gst_query_unref(query);
if (stop != -1)
fillStatus = 100.0 * stop / GST_FORMAT_PERCENT_MAX;
- LOG_MEDIA_MESSAGE("[Buffering] Download buffer filled up to %f%%", fillStatus);
+ GST_DEBUG("[Buffering] Download buffer filled up to %f%%", fillStatus);
- if (!m_mediaDuration)
- durationChanged();
+ float mediaDuration = durationMediaTime().toDouble();
// Update maxTimeLoaded only if the media duration is
// available. Otherwise we can't compute it.
- if (m_mediaDuration) {
+ if (mediaDuration) {
if (fillStatus == 100.0)
- m_maxTimeLoaded = m_mediaDuration;
+ m_maxTimeLoaded = mediaDuration;
else
- m_maxTimeLoaded = static_cast<float>((fillStatus * m_mediaDuration) / 100.0);
- LOG_MEDIA_MESSAGE("[Buffering] Updated maxTimeLoaded: %f", m_maxTimeLoaded);
+ m_maxTimeLoaded = static_cast<float>((fillStatus * mediaDuration) / 100.0);
+ GST_DEBUG("[Buffering] Updated maxTimeLoaded: %f", m_maxTimeLoaded);
}
m_downloadFinished = fillStatus == 100.0;
@@ -1222,12 +1253,13 @@ float MediaPlayerPrivateGStreamer::maxTimeSeekable() const
if (m_errorOccured)
return 0.0f;
- LOG_MEDIA_MESSAGE("maxTimeSeekable");
+ float mediaDuration = durationMediaTime().toDouble();
+ GST_DEBUG("maxTimeSeekable, duration: %f", mediaDuration);
// infinite duration means live stream
- if (std::isinf(duration()))
+ if (std::isinf(mediaDuration))
return 0.0f;
- return duration();
+ return mediaDuration;
}
float MediaPlayerPrivateGStreamer::maxTimeLoaded() const
@@ -1236,29 +1268,29 @@ float MediaPlayerPrivateGStreamer::maxTimeLoaded() const
return 0.0f;
float loaded = m_maxTimeLoaded;
- if (m_isEndReached && m_mediaDuration)
- loaded = m_mediaDuration;
- LOG_MEDIA_MESSAGE("maxTimeLoaded: %f", loaded);
+ if (m_isEndReached)
+ loaded = durationMediaTime().toDouble();
+ GST_DEBUG("maxTimeLoaded: %f", loaded);
return loaded;
}
bool MediaPlayerPrivateGStreamer::didLoadingProgress() const
{
- if (!m_playBin || !m_mediaDuration || !totalBytes())
+ if (UNLIKELY(!m_pipeline || !durationMediaTime() || (!isMediaSource() && !totalBytes())))
return false;
float currentMaxTimeLoaded = maxTimeLoaded();
bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress;
m_maxTimeLoadedAtLastDidLoadingProgress = currentMaxTimeLoaded;
- LOG_MEDIA_MESSAGE("didLoadingProgress: %d", didLoadingProgress);
+ GST_DEBUG("didLoadingProgress: %d", didLoadingProgress);
return didLoadingProgress;
}
-unsigned MediaPlayerPrivateGStreamer::totalBytes() const
+unsigned long long MediaPlayerPrivateGStreamer::totalBytes() const
{
if (m_errorOccured)
return 0;
- if (m_totalBytes != -1)
+ if (m_totalBytes)
return m_totalBytes;
if (!m_source)
@@ -1267,8 +1299,8 @@ unsigned MediaPlayerPrivateGStreamer::totalBytes() const
GstFormat fmt = GST_FORMAT_BYTES;
gint64 length = 0;
if (gst_element_query_duration(m_source.get(), fmt, &length)) {
- INFO_MEDIA_MESSAGE("totalBytes %" G_GINT64_FORMAT, length);
- m_totalBytes = static_cast<unsigned>(length);
+ GST_INFO("totalBytes %" G_GINT64_FORMAT, length);
+ m_totalBytes = static_cast<unsigned long long>(length);
m_isStreaming = !length;
return m_totalBytes;
}
@@ -1302,25 +1334,105 @@ unsigned MediaPlayerPrivateGStreamer::totalBytes() const
gst_iterator_free(iter);
- INFO_MEDIA_MESSAGE("totalBytes %" G_GINT64_FORMAT, length);
- m_totalBytes = static_cast<unsigned>(length);
+ GST_INFO("totalBytes %" G_GINT64_FORMAT, length);
+ m_totalBytes = static_cast<unsigned long long>(length);
m_isStreaming = !length;
return m_totalBytes;
}
+void MediaPlayerPrivateGStreamer::sourceChangedCallback(MediaPlayerPrivateGStreamer* player)
+{
+ player->sourceChanged();
+}
+
+void MediaPlayerPrivateGStreamer::uriDecodeBinElementAddedCallback(GstBin* bin, GstElement* element, MediaPlayerPrivateGStreamer* player)
+{
+ if (g_strcmp0(G_OBJECT_CLASS_NAME(G_OBJECT_GET_CLASS(G_OBJECT(element))), "GstDownloadBuffer"))
+ return;
+
+ player->m_downloadBuffer = element;
+ g_signal_handlers_disconnect_by_func(bin, reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), player);
+ g_signal_connect_swapped(element, "notify::temp-location", G_CALLBACK(downloadBufferFileCreatedCallback), player);
+
+ GUniqueOutPtr<char> oldDownloadTemplate;
+ g_object_get(element, "temp-template", &oldDownloadTemplate.outPtr(), nullptr);
+
+ GUniquePtr<char> newDownloadTemplate(g_build_filename(G_DIR_SEPARATOR_S, "var", "tmp", "WebKit-Media-XXXXXX", nullptr));
+ g_object_set(element, "temp-template", newDownloadTemplate.get(), nullptr);
+ GST_TRACE("Reconfigured file download template from '%s' to '%s'", oldDownloadTemplate.get(), newDownloadTemplate.get());
+
+ player->purgeOldDownloadFiles(oldDownloadTemplate.get());
+}
+
+void MediaPlayerPrivateGStreamer::downloadBufferFileCreatedCallback(MediaPlayerPrivateGStreamer* player)
+{
+ ASSERT(player->m_downloadBuffer);
+
+ g_signal_handlers_disconnect_by_func(player->m_downloadBuffer.get(), reinterpret_cast<gpointer>(downloadBufferFileCreatedCallback), player);
+
+ GUniqueOutPtr<char> downloadFile;
+ g_object_get(player->m_downloadBuffer.get(), "temp-location", &downloadFile.outPtr(), nullptr);
+ player->m_downloadBuffer = nullptr;
+
+ if (UNLIKELY(!deleteFile(downloadFile.get()))) {
+ GST_WARNING("Couldn't unlink media temporary file %s after creation", downloadFile.get());
+ return;
+ }
+
+ GST_TRACE("Unlinked media temporary file %s after creation", downloadFile.get());
+}
+
+void MediaPlayerPrivateGStreamer::purgeOldDownloadFiles(const char* downloadFileTemplate)
+{
+ if (!downloadFileTemplate)
+ return;
+
+ GUniquePtr<char> templatePath(g_path_get_dirname(downloadFileTemplate));
+ GUniquePtr<char> templateFile(g_path_get_basename(downloadFileTemplate));
+ String templatePattern = String(templateFile.get()).replace("X", "?");
+
+ for (auto& filePath : listDirectory(templatePath.get(), templatePattern)) {
+ if (UNLIKELY(!deleteFile(filePath))) {
+ GST_WARNING("Couldn't unlink legacy media temporary file: %s", filePath.utf8().data());
+ continue;
+ }
+
+ GST_TRACE("Unlinked legacy media temporary file: %s", filePath.utf8().data());
+ }
+}
+
void MediaPlayerPrivateGStreamer::sourceChanged()
{
+ if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
+ g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
+
m_source.clear();
- g_object_get(m_playBin.get(), "source", &m_source.outPtr(), NULL);
+ g_object_get(m_pipeline.get(), "source", &m_source.outPtr(), nullptr);
- if (WEBKIT_IS_WEB_SRC(m_source.get()))
+ if (WEBKIT_IS_WEB_SRC(m_source.get())) {
webKitWebSrcSetMediaPlayer(WEBKIT_WEB_SRC(m_source.get()), m_player);
-#if ENABLE(MEDIA_SOURCE)
- if (m_mediaSource && WEBKIT_IS_MEDIA_SRC(m_source.get())) {
- MediaSourceGStreamer::open(m_mediaSource.get(), WEBKIT_MEDIA_SRC(m_source.get()));
- webKitMediaSrcSetPlayBin(WEBKIT_MEDIA_SRC(m_source.get()), m_playBin.get());
+ g_signal_connect(GST_ELEMENT_PARENT(m_source.get()), "element-added", G_CALLBACK(uriDecodeBinElementAddedCallback), this);
}
-#endif
+}
+
+bool MediaPlayerPrivateGStreamer::hasSingleSecurityOrigin() const
+{
+ if (!m_source)
+ return false;
+
+ if (!WEBKIT_IS_WEB_SRC(m_source.get()))
+ return true;
+
+ GUniqueOutPtr<char> originalURI, resolvedURI;
+ g_object_get(m_source.get(), "location", &originalURI.outPtr(), "resolved-location", &resolvedURI.outPtr(), nullptr);
+ if (!originalURI || !resolvedURI)
+ return false;
+ if (!g_strcmp0(originalURI.get(), resolvedURI.get()))
+ return true;
+
+ Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::createFromString(String::fromUTF8(resolvedURI.get())));
+ Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(String::fromUTF8(originalURI.get())));
+ return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
}
void MediaPlayerPrivateGStreamer::cancelLoad()
@@ -1328,20 +1440,20 @@ void MediaPlayerPrivateGStreamer::cancelLoad()
if (m_networkState < MediaPlayer::Loading || m_networkState == MediaPlayer::Loaded)
return;
- if (m_playBin)
+ if (m_pipeline)
changePipelineState(GST_STATE_READY);
}
void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
{
- if (!m_playBin || m_errorOccured)
+ if (!m_pipeline || m_errorOccured)
return;
if (m_seeking) {
if (m_seekIsPending)
updateStates();
else {
- LOG_MEDIA_MESSAGE("[Seek] seeked to %f", m_seekTime);
+ GST_DEBUG("[Seek] seeked to %f", m_seekTime);
m_seeking = false;
if (m_timeOfOverlappingSeek != m_seekTime && m_timeOfOverlappingSeek != -1) {
seek(m_timeOfOverlappingSeek);
@@ -1352,7 +1464,7 @@ void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
// The pipeline can still have a pending state. In this case a position query will fail.
// Right now we can use m_seekTime as a fallback.
- m_canFallBackToLastFinishedSeekPositon = true;
+ m_canFallBackToLastFinishedSeekPosition = true;
timeChanged();
}
} else
@@ -1361,7 +1473,7 @@ void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
void MediaPlayerPrivateGStreamer::updateStates()
{
- if (!m_playBin)
+ if (!m_pipeline)
return;
if (m_errorOccured)
@@ -1372,25 +1484,19 @@ void MediaPlayerPrivateGStreamer::updateStates()
GstState state;
GstState pending;
- GstStateChangeReturn getStateResult = gst_element_get_state(m_playBin.get(), &state, &pending, 250 * GST_NSECOND);
+ GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
bool shouldUpdatePlaybackState = false;
switch (getStateResult) {
case GST_STATE_CHANGE_SUCCESS: {
- LOG_MEDIA_MESSAGE("State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
+ GST_DEBUG("State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
// Do nothing if on EOS and state changed to READY to avoid recreating the player
// on HTMLMediaElement and properly generate the video 'ended' event.
if (m_isEndReached && state == GST_STATE_READY)
break;
- if (state <= GST_STATE_READY) {
- m_resetPipeline = true;
- m_mediaDuration = 0;
- } else {
- m_resetPipeline = false;
- cacheDuration();
- }
+ m_resetPipeline = state <= GST_STATE_READY;
bool didBuffering = m_buffering;
@@ -1408,7 +1514,7 @@ void MediaPlayerPrivateGStreamer::updateStates()
case GST_STATE_PLAYING:
if (m_buffering) {
if (m_bufferingPercentage == 100) {
- LOG_MEDIA_MESSAGE("[Buffering] Complete.");
+ GST_DEBUG("[Buffering] Complete.");
m_buffering = false;
m_readyState = MediaPlayer::HaveEnoughData;
m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading;
@@ -1439,14 +1545,14 @@ void MediaPlayerPrivateGStreamer::updateStates()
}
if (didBuffering && !m_buffering && !m_paused && m_playbackRate) {
- LOG_MEDIA_MESSAGE("[Buffering] Restarting playback.");
+ GST_DEBUG("[Buffering] Restarting playback.");
changePipelineState(GST_STATE_PLAYING);
}
} else if (state == GST_STATE_PLAYING) {
m_paused = false;
if ((m_buffering && !isLiveStream()) || !m_playbackRate) {
- LOG_MEDIA_MESSAGE("[Buffering] Pausing stream for buffering.");
+ GST_DEBUG("[Buffering] Pausing stream for buffering.");
changePipelineState(GST_STATE_PAUSED);
}
} else
@@ -1454,21 +1560,21 @@ void MediaPlayerPrivateGStreamer::updateStates()
if (m_requestedState == GST_STATE_PAUSED && state == GST_STATE_PAUSED) {
shouldUpdatePlaybackState = true;
- LOG_MEDIA_MESSAGE("Requested state change to %s was completed", gst_element_state_get_name(state));
+ GST_DEBUG("Requested state change to %s was completed", gst_element_state_get_name(state));
}
break;
}
case GST_STATE_CHANGE_ASYNC:
- LOG_MEDIA_MESSAGE("Async: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
+ GST_DEBUG("Async: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
// Change in progress.
break;
case GST_STATE_CHANGE_FAILURE:
- LOG_MEDIA_MESSAGE("Failure: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
+ GST_DEBUG("Failure: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
// Change failed
return;
case GST_STATE_CHANGE_NO_PREROLL:
- LOG_MEDIA_MESSAGE("No preroll: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
+ GST_DEBUG("No preroll: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
// Live pipelines go in PAUSED without prerolling.
m_isStreaming = true;
@@ -1488,7 +1594,7 @@ void MediaPlayerPrivateGStreamer::updateStates()
m_networkState = MediaPlayer::Loading;
break;
default:
- LOG_MEDIA_MESSAGE("Else : %d", getStateResult);
+ GST_DEBUG("Else : %d", getStateResult);
break;
}
@@ -1498,22 +1604,22 @@ void MediaPlayerPrivateGStreamer::updateStates()
m_player->playbackStateChanged();
if (m_networkState != oldNetworkState) {
- LOG_MEDIA_MESSAGE("Network State Changed from %u to %u", oldNetworkState, m_networkState);
+ GST_DEBUG("Network State Changed from %u to %u", oldNetworkState, m_networkState);
m_player->networkStateChanged();
}
if (m_readyState != oldReadyState) {
- LOG_MEDIA_MESSAGE("Ready State Changed from %u to %u", oldReadyState, m_readyState);
+ GST_DEBUG("Ready State Changed from %u to %u", oldReadyState, m_readyState);
m_player->readyStateChanged();
}
if (getStateResult == GST_STATE_CHANGE_SUCCESS && state >= GST_STATE_PAUSED) {
updatePlaybackRate();
if (m_seekIsPending) {
- LOG_MEDIA_MESSAGE("[Seek] committing pending seek to %f", m_seekTime);
+ GST_DEBUG("[Seek] committing pending seek to %f", m_seekTime);
m_seekIsPending = false;
m_seeking = doSeek(toGstClockTime(m_seekTime), m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE));
if (!m_seeking)
- LOG_MEDIA_MESSAGE("[Seek] seeking to %f failed", m_seekTime);
+ GST_DEBUG("[Seek] seeking to %f failed", m_seekTime);
}
}
}
@@ -1544,7 +1650,7 @@ bool MediaPlayerPrivateGStreamer::loadNextLocation()
return false;
const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
- const gchar* newLocation = 0;
+ const gchar* newLocation = nullptr;
if (!locations) {
// Fallback on new-location string.
@@ -1555,7 +1661,7 @@ bool MediaPlayerPrivateGStreamer::loadNextLocation()
if (!newLocation) {
if (m_mediaLocationCurrentIndex < 0) {
- m_mediaLocations = 0;
+ m_mediaLocations = nullptr;
return false;
}
@@ -1580,7 +1686,7 @@ bool MediaPlayerPrivateGStreamer::loadNextLocation()
RefPtr<SecurityOrigin> securityOrigin = SecurityOrigin::create(m_url);
if (securityOrigin->canRequest(newUrl)) {
- INFO_MEDIA_MESSAGE("New media url: %s", newUrl.string().utf8().data());
+ GST_INFO("New media url: %s", newUrl.string().utf8().data());
// Reset player states.
m_networkState = MediaPlayer::Loading;
@@ -1593,16 +1699,16 @@ bool MediaPlayerPrivateGStreamer::loadNextLocation()
changePipelineState(GST_STATE_READY);
GstState state;
- gst_element_get_state(m_playBin.get(), &state, 0, 0);
+ gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
if (state <= GST_STATE_READY) {
// Set the new uri and start playing.
- g_object_set(m_playBin.get(), "uri", newUrl.string().utf8().data(), NULL);
+ g_object_set(m_pipeline.get(), "uri", newUrl.string().utf8().data(), nullptr);
m_url = newUrl;
changePipelineState(GST_STATE_PLAYING);
return true;
}
} else
- INFO_MEDIA_MESSAGE("Not allowed to load new media location: %s", newUrl.string().utf8().data());
+ GST_INFO("Not allowed to load new media location: %s", newUrl.string().utf8().data());
}
m_mediaLocationCurrentIndex--;
return false;
@@ -1624,49 +1730,29 @@ void MediaPlayerPrivateGStreamer::didEnd()
// Synchronize position and duration values to not confuse the
// HTMLMediaElement. In some cases like reverse playback the
// position is not always reported as 0 for instance.
- float now = currentTime();
- if (now > 0 && now <= duration() && m_mediaDuration != now) {
- m_mediaDurationKnown = true;
- m_mediaDuration = now;
+ MediaTime now = currentMediaTime();
+ if (now > MediaTime { } && now <= durationMediaTime())
m_player->durationChanged();
- }
m_isEndReached = true;
timeChanged();
- if (!m_player->mediaPlayerClient()->mediaPlayerIsLooping()) {
+ if (!m_player->client().mediaPlayerIsLooping()) {
m_paused = true;
+ m_durationAtEOS = durationMediaTime().toDouble();
changePipelineState(GST_STATE_READY);
m_downloadFinished = false;
}
}
-void MediaPlayerPrivateGStreamer::cacheDuration()
-{
- if (m_mediaDuration || !m_mediaDurationKnown)
- return;
-
- float newDuration = duration();
- if (std::isinf(newDuration)) {
- // Only pretend that duration is not available if the the query failed in a stable pipeline state.
- GstState state;
- if (gst_element_get_state(m_playBin.get(), &state, 0, 0) == GST_STATE_CHANGE_SUCCESS && state > GST_STATE_READY)
- m_mediaDurationKnown = false;
- return;
- }
-
- m_mediaDuration = newDuration;
-}
-
void MediaPlayerPrivateGStreamer::durationChanged()
{
- float previousDuration = m_mediaDuration;
+ float previousDuration = durationMediaTime().toDouble();
- cacheDuration();
// Avoid emiting durationchanged in the case where the previous
// duration was 0 because that case is already handled by the
// HTMLMediaElement.
- if (previousDuration && m_mediaDuration != previousDuration)
+ if (previousDuration && durationMediaTime().toDouble() != previousDuration)
m_player->durationChanged();
}
@@ -1683,154 +1769,187 @@ void MediaPlayerPrivateGStreamer::loadingFailed(MediaPlayer::NetworkState error)
}
// Loading failed, remove ready timer.
- if (m_readyTimerHandler) {
- g_source_remove(m_readyTimerHandler);
- m_readyTimerHandler = 0;
- }
-}
-
-static HashSet<String> mimeTypeCache()
-{
- initializeGStreamerAndRegisterWebKitElements();
-
- DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
- static bool typeListInitialized = false;
-
- if (typeListInitialized)
- return cache;
-
- const char* mimeTypes[] = {
- "application/ogg",
- "application/vnd.apple.mpegurl",
- "application/vnd.rn-realmedia",
- "application/x-3gp",
- "application/x-pn-realaudio",
- "audio/3gpp",
- "audio/aac",
- "audio/flac",
- "audio/iLBC-sh",
- "audio/midi",
- "audio/mobile-xmf",
- "audio/mp1",
- "audio/mp2",
- "audio/mp3",
- "audio/mp4",
- "audio/mpeg",
- "audio/ogg",
- "audio/opus",
- "audio/qcelp",
- "audio/riff-midi",
- "audio/speex",
- "audio/wav",
- "audio/webm",
- "audio/x-ac3",
- "audio/x-aiff",
- "audio/x-amr-nb-sh",
- "audio/x-amr-wb-sh",
- "audio/x-au",
- "audio/x-ay",
- "audio/x-celt",
- "audio/x-dts",
- "audio/x-flac",
- "audio/x-gbs",
- "audio/x-gsm",
- "audio/x-gym",
- "audio/x-imelody",
- "audio/x-ircam",
- "audio/x-kss",
- "audio/x-m4a",
- "audio/x-mod",
- "audio/x-mp3",
- "audio/x-mpeg",
- "audio/x-musepack",
- "audio/x-nist",
- "audio/x-nsf",
- "audio/x-paris",
- "audio/x-sap",
- "audio/x-sbc",
- "audio/x-sds",
- "audio/x-shorten",
- "audio/x-sid",
- "audio/x-spc",
- "audio/x-speex",
- "audio/x-svx",
- "audio/x-ttafile",
- "audio/x-vgm",
- "audio/x-voc",
- "audio/x-vorbis+ogg",
- "audio/x-w64",
- "audio/x-wav",
- "audio/x-wavpack",
- "audio/x-wavpack-correction",
- "video/3gpp",
- "video/mj2",
- "video/mp4",
- "video/mpeg",
- "video/mpegts",
- "video/ogg",
- "video/quicktime",
- "video/vivo",
- "video/webm",
- "video/x-cdxa",
- "video/x-dirac",
- "video/x-dv",
- "video/x-fli",
- "video/x-flv",
- "video/x-h263",
- "video/x-ivf",
- "video/x-m4v",
- "video/x-matroska",
- "video/x-mng",
- "video/x-ms-asf",
- "video/x-msvideo",
- "video/x-mve",
- "video/x-nuv",
- "video/x-vcd"
- };
-
- for (unsigned i = 0; i < (sizeof(mimeTypes) / sizeof(*mimeTypes)); ++i)
- cache.add(String(mimeTypes[i]));
-
- typeListInitialized = true;
- return cache;
-}
-
-void MediaPlayerPrivateGStreamer::getSupportedTypes(HashSet<String>& types)
-{
- types = mimeTypeCache();
+ m_readyTimerHandler.stop();
+}
+
+static HashSet<String, ASCIICaseInsensitiveHash>& mimeTypeSet()
+{
+ static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> mimeTypes = []()
+ {
+ initializeGStreamerAndRegisterWebKitElements();
+ HashSet<String, ASCIICaseInsensitiveHash> set;
+
+ GList* audioDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO, GST_RANK_MARGINAL);
+ GList* videoDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, GST_RANK_MARGINAL);
+ GList* demuxerFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DEMUXER, GST_RANK_MARGINAL);
+
+ enum ElementType {
+ AudioDecoder = 0,
+ VideoDecoder,
+ Demuxer
+ };
+ struct GstCapsWebKitMapping {
+ ElementType elementType;
+ const char* capsString;
+ Vector<AtomicString> webkitMimeTypes;
+ };
+
+ Vector<GstCapsWebKitMapping> mapping = {
+ {AudioDecoder, "audio/midi", {"audio/midi", "audio/riff-midi"}},
+ {AudioDecoder, "audio/x-sbc", { }},
+ {AudioDecoder, "audio/x-sid", { }},
+ {AudioDecoder, "audio/x-flac", {"audio/x-flac", "audio/flac"}},
+ {AudioDecoder, "audio/x-wav", {"audio/x-wav", "audio/wav"}},
+ {AudioDecoder, "audio/x-wavpack", {"audio/x-wavpack"}},
+ {AudioDecoder, "audio/x-speex", {"audio/speex", "audio/x-speex"}},
+ {AudioDecoder, "audio/x-ac3", { }},
+ {AudioDecoder, "audio/x-eac3", {"audio/x-ac3"}},
+ {AudioDecoder, "audio/x-dts", { }},
+ {VideoDecoder, "video/x-h264, profile=(string)high", {"video/mp4", "video/x-m4v"}},
+ {VideoDecoder, "video/x-msvideocodec", {"video/x-msvideo"}},
+ {VideoDecoder, "video/x-h263", { }},
+ {VideoDecoder, "video/mpegts", { }},
+ {VideoDecoder, "video/mpeg, mpegversion=(int){1,2}, systemstream=(boolean)false", {"video/mpeg"}},
+ {VideoDecoder, "video/x-dirac", { }},
+ {VideoDecoder, "video/x-flash-video", {"video/flv", "video/x-flv"}},
+ {Demuxer, "video/quicktime", { }},
+ {Demuxer, "video/quicktime, variant=(string)3gpp", {"video/3gpp"}},
+ {Demuxer, "application/x-3gp", { }},
+ {Demuxer, "video/x-ms-asf", { }},
+ {Demuxer, "audio/x-aiff", { }},
+ {Demuxer, "application/x-pn-realaudio", { }},
+ {Demuxer, "application/vnd.rn-realmedia", { }},
+ {Demuxer, "audio/x-wav", {"audio/x-wav", "audio/wav"}},
+ {Demuxer, "application/x-hls", {"application/vnd.apple.mpegurl", "application/x-mpegurl"}}
+ };
+
+ for (auto& current : mapping) {
+ GList* factories = demuxerFactories;
+ if (current.elementType == AudioDecoder)
+ factories = audioDecoderFactories;
+ else if (current.elementType == VideoDecoder)
+ factories = videoDecoderFactories;
+
+ if (gstRegistryHasElementForMediaType(factories, current.capsString)) {
+ if (!current.webkitMimeTypes.isEmpty()) {
+ for (const auto& mimeType : current.webkitMimeTypes)
+ set.add(mimeType);
+ } else
+ set.add(AtomicString(current.capsString));
+ }
+ }
+
+ bool opusSupported = false;
+ if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/x-opus")) {
+ opusSupported = true;
+ set.add(AtomicString("audio/opus"));
+ }
+
+ bool vorbisSupported = false;
+ if (gstRegistryHasElementForMediaType(demuxerFactories, "application/ogg")) {
+ set.add(AtomicString("application/ogg"));
+
+ vorbisSupported = gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/x-vorbis");
+ if (vorbisSupported) {
+ set.add(AtomicString("audio/ogg"));
+ set.add(AtomicString("audio/x-vorbis+ogg"));
+ }
+
+ if (gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-theora"))
+ set.add(AtomicString("video/ogg"));
+ }
+
+ bool audioMpegSupported = false;
+ if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int)1, layer=(int)[1, 3]")) {
+ audioMpegSupported = true;
+ set.add(AtomicString("audio/mp1"));
+ set.add(AtomicString("audio/mp3"));
+ set.add(AtomicString("audio/x-mp3"));
+ }
+
+ if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int){2, 4}")) {
+ audioMpegSupported = true;
+ set.add(AtomicString("audio/aac"));
+ set.add(AtomicString("audio/mp2"));
+ set.add(AtomicString("audio/mp4"));
+ set.add(AtomicString("audio/x-m4a"));
+ }
+
+ if (audioMpegSupported) {
+ set.add(AtomicString("audio/mpeg"));
+ set.add(AtomicString("audio/x-mpeg"));
+ }
+
+ if (gstRegistryHasElementForMediaType(demuxerFactories, "video/x-matroska")) {
+ set.add(AtomicString("video/x-matroska"));
+
+ if (gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp8")
+ || gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp9")
+ || gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp10"))
+ set.add(AtomicString("video/webm"));
+
+ if (vorbisSupported || opusSupported)
+ set.add(AtomicString("audio/webm"));
+ }
+
+ gst_plugin_feature_list_free(audioDecoderFactories);
+ gst_plugin_feature_list_free(videoDecoderFactories);
+ gst_plugin_feature_list_free(demuxerFactories);
+ return set;
+ }();
+ return mimeTypes;
+}
+
+void MediaPlayerPrivateGStreamer::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
+{
+ types = mimeTypeSet();
}
MediaPlayer::SupportsType MediaPlayerPrivateGStreamer::supportsType(const MediaEngineSupportParameters& parameters)
{
+ MediaPlayer::SupportsType result = MediaPlayer::IsNotSupported;
+#if ENABLE(MEDIA_SOURCE)
+ // MediaPlayerPrivateGStreamerMSE is in charge of mediasource playback, not us.
+ if (parameters.isMediaSource)
+ return result;
+#endif
+
+ // MediaStream playback is handled by the OpenWebRTC player.
+ if (parameters.isMediaStream)
+ return result;
+
if (parameters.type.isNull() || parameters.type.isEmpty())
- return MediaPlayer::IsNotSupported;
+ return result;
// spec says we should not return "probably" if the codecs string is empty
- if (mimeTypeCache().contains(parameters.type))
- return parameters.codecs.isEmpty() ? MediaPlayer::MayBeSupported : MediaPlayer::IsSupported;
- return MediaPlayer::IsNotSupported;
+ if (mimeTypeSet().contains(parameters.type))
+ result = parameters.codecs.isEmpty() ? MediaPlayer::MayBeSupported : MediaPlayer::IsSupported;
+
+ return extendedSupportsType(parameters, result);
}
void MediaPlayerPrivateGStreamer::setDownloadBuffering()
{
- if (!m_playBin)
+ if (!m_pipeline)
return;
- GstPlayFlags flags;
- g_object_get(m_playBin.get(), "flags", &flags, NULL);
+ unsigned flags;
+ g_object_get(m_pipeline.get(), "flags", &flags, nullptr);
+
+ unsigned flagDownload = getGstPlayFlag("download");
// We don't want to stop downloading if we already started it.
- if (flags & GST_PLAY_FLAG_DOWNLOAD && m_readyState > MediaPlayer::HaveNothing && !m_resetPipeline)
+ if (flags & flagDownload && m_readyState > MediaPlayer::HaveNothing && !m_resetPipeline)
return;
bool shouldDownload = !isLiveStream() && m_preload == MediaPlayer::Auto;
if (shouldDownload) {
- LOG_MEDIA_MESSAGE("Enabling on-disk buffering");
- g_object_set(m_playBin.get(), "flags", flags | GST_PLAY_FLAG_DOWNLOAD, NULL);
+ GST_DEBUG("Enabling on-disk buffering");
+ g_object_set(m_pipeline.get(), "flags", flags | flagDownload, nullptr);
m_fillTimer.startRepeating(0.2);
} else {
- LOG_MEDIA_MESSAGE("Disabling on-disk buffering");
- g_object_set(m_playBin.get(), "flags", flags & ~GST_PLAY_FLAG_DOWNLOAD, NULL);
+ GST_DEBUG("Disabling on-disk buffering");
+ g_object_set(m_pipeline.get(), "flags", flags & ~flagDownload, nullptr);
m_fillTimer.stop();
}
}
@@ -1851,93 +1970,172 @@ void MediaPlayerPrivateGStreamer::setPreload(MediaPlayer::Preload preload)
GstElement* MediaPlayerPrivateGStreamer::createAudioSink()
{
- m_autoAudioSink = gst_element_factory_make("autoaudiosink", 0);
- g_signal_connect(m_autoAudioSink.get(), "child-added", G_CALLBACK(setAudioStreamPropertiesCallback), this);
+ m_autoAudioSink = gst_element_factory_make("autoaudiosink", nullptr);
+ if (!m_autoAudioSink) {
+ GST_WARNING("GStreamer's autoaudiosink not found. Please check your gst-plugins-good installation");
+ return nullptr;
+ }
- // Construct audio sink only if pitch preserving is enabled.
- if (!m_preservesPitch)
- return m_autoAudioSink.get();
+ g_signal_connect_swapped(m_autoAudioSink.get(), "child-added", G_CALLBACK(setAudioStreamPropertiesCallback), this);
+
+ GstElement* audioSinkBin;
- GstElement* scale = gst_element_factory_make("scaletempo", 0);
- if (!scale) {
- GST_WARNING("Failed to create scaletempo");
+ if (webkitGstCheckVersion(1, 4, 2)) {
+#if ENABLE(WEB_AUDIO)
+ audioSinkBin = gst_bin_new("audio-sink");
+ ensureAudioSourceProvider();
+ m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
+ return audioSinkBin;
+#else
return m_autoAudioSink.get();
+#endif
}
- GstElement* audioSinkBin = gst_bin_new("audio-sink");
- GstElement* convert = gst_element_factory_make("audioconvert", 0);
- GstElement* resample = gst_element_factory_make("audioresample", 0);
+ // Construct audio sink only if pitch preserving is enabled.
+ // If GStreamer 1.4.2 is used the audio-filter playbin property is used instead.
+ if (m_preservesPitch) {
+ GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
+ if (!scale) {
+ GST_WARNING("Failed to create scaletempo");
+ return m_autoAudioSink.get();
+ }
- gst_bin_add_many(GST_BIN(audioSinkBin), scale, convert, resample, m_autoAudioSink.get(), NULL);
+ audioSinkBin = gst_bin_new("audio-sink");
+ gst_bin_add(GST_BIN(audioSinkBin), scale);
+ GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(scale, "sink"));
+ gst_element_add_pad(audioSinkBin, gst_ghost_pad_new("sink", pad.get()));
- if (!gst_element_link_many(scale, convert, resample, m_autoAudioSink.get(), NULL)) {
- GST_WARNING("Failed to link audio sink elements");
- gst_object_unref(audioSinkBin);
- return m_autoAudioSink.get();
+#if ENABLE(WEB_AUDIO)
+ ensureAudioSourceProvider();
+ m_audioSourceProvider->configureAudioBin(audioSinkBin, scale);
+#else
+ GstElement* convert = gst_element_factory_make("audioconvert", nullptr);
+ GstElement* resample = gst_element_factory_make("audioresample", nullptr);
+
+ gst_bin_add_many(GST_BIN(audioSinkBin), convert, resample, m_autoAudioSink.get(), nullptr);
+
+ if (!gst_element_link_many(scale, convert, resample, m_autoAudioSink.get(), nullptr)) {
+ GST_WARNING("Failed to link audio sink elements");
+ gst_object_unref(audioSinkBin);
+ return m_autoAudioSink.get();
+ }
+#endif
+ return audioSinkBin;
}
- GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(scale, "sink"));
- gst_element_add_pad(audioSinkBin, gst_ghost_pad_new("sink", pad.get()));
+#if ENABLE(WEB_AUDIO)
+ audioSinkBin = gst_bin_new("audio-sink");
+ ensureAudioSourceProvider();
+ m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
return audioSinkBin;
+#endif
+ ASSERT_NOT_REACHED();
+ return nullptr;
}
GstElement* MediaPlayerPrivateGStreamer::audioSink() const
{
GstElement* sink;
- g_object_get(m_playBin.get(), "audio-sink", &sink, nullptr);
+ g_object_get(m_pipeline.get(), "audio-sink", &sink, nullptr);
return sink;
}
+#if ENABLE(WEB_AUDIO)
+void MediaPlayerPrivateGStreamer::ensureAudioSourceProvider()
+{
+ if (!m_audioSourceProvider)
+ m_audioSourceProvider = std::make_unique<AudioSourceProviderGStreamer>();
+}
+
+AudioSourceProvider* MediaPlayerPrivateGStreamer::audioSourceProvider()
+{
+ ensureAudioSourceProvider();
+ return m_audioSourceProvider.get();
+}
+#endif
+
void MediaPlayerPrivateGStreamer::createGSTPlayBin()
{
- ASSERT(!m_playBin);
+ ASSERT(!m_pipeline);
// gst_element_factory_make() returns a floating reference so
// we should not adopt.
- m_playBin = gst_element_factory_make("playbin", "play");
- setStreamVolumeElement(GST_STREAM_VOLUME(m_playBin.get()));
+ setPipeline(gst_element_factory_make("playbin", "play"));
+ setStreamVolumeElement(GST_STREAM_VOLUME(m_pipeline.get()));
+
+ GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
+ gst_bus_set_sync_handler(bus.get(), [](GstBus*, GstMessage* message, gpointer userData) {
+ auto& player = *static_cast<MediaPlayerPrivateGStreamer*>(userData);
+
+ if (player.handleSyncMessage(message)) {
+ gst_message_unref(message);
+ return GST_BUS_DROP;
+ }
- GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_playBin.get())));
+ return GST_BUS_PASS;
+ }, this, nullptr);
+
+ // Let also other listeners subscribe to (application) messages in this bus.
gst_bus_add_signal_watch(bus.get());
- g_signal_connect(bus.get(), "message", G_CALLBACK(mediaPlayerPrivateMessageCallback), this);
+ g_signal_connect(bus.get(), "message", G_CALLBACK(busMessageCallback), this);
- g_object_set(m_playBin.get(), "mute", m_player->muted(), NULL);
+ g_object_set(m_pipeline.get(), "mute", m_player->muted(), nullptr);
- g_signal_connect(m_playBin.get(), "notify::source", G_CALLBACK(mediaPlayerPrivateSourceChangedCallback), this);
- g_signal_connect(m_playBin.get(), "video-changed", G_CALLBACK(mediaPlayerPrivateVideoChangedCallback), this);
- g_signal_connect(m_playBin.get(), "audio-changed", G_CALLBACK(mediaPlayerPrivateAudioChangedCallback), this);
+ g_signal_connect_swapped(m_pipeline.get(), "notify::source", G_CALLBACK(sourceChangedCallback), this);
+ g_signal_connect_swapped(m_pipeline.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
+ g_signal_connect_swapped(m_pipeline.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
#if ENABLE(VIDEO_TRACK)
- if (webkitGstCheckVersion(1, 1, 2)) {
- g_signal_connect(m_playBin.get(), "text-changed", G_CALLBACK(mediaPlayerPrivateTextChangedCallback), this);
+ g_signal_connect_swapped(m_pipeline.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
- GstElement* textCombiner = webkitTextCombinerNew();
- ASSERT(textCombiner);
- g_object_set(m_playBin.get(), "text-stream-combiner", textCombiner, NULL);
+ GstElement* textCombiner = webkitTextCombinerNew();
+ ASSERT(textCombiner);
+ g_object_set(m_pipeline.get(), "text-stream-combiner", textCombiner, nullptr);
- m_textAppSink = webkitTextSinkNew();
- ASSERT(m_textAppSink);
+ m_textAppSink = webkitTextSinkNew();
+ ASSERT(m_textAppSink);
- m_textAppSinkPad = adoptGRef(gst_element_get_static_pad(m_textAppSink.get(), "sink"));
- ASSERT(m_textAppSinkPad);
+ m_textAppSinkPad = adoptGRef(gst_element_get_static_pad(m_textAppSink.get(), "sink"));
+ ASSERT(m_textAppSinkPad);
- g_object_set(m_textAppSink.get(), "emit-signals", true, "enable-last-sample", false, "caps", gst_caps_new_empty_simple("text/vtt"), NULL);
- g_signal_connect(m_textAppSink.get(), "new-sample", G_CALLBACK(mediaPlayerPrivateNewTextSampleCallback), this);
+ g_object_set(m_textAppSink.get(), "emit-signals", true, "enable-last-sample", false, "caps", gst_caps_new_empty_simple("text/vtt"), nullptr);
+ g_signal_connect_swapped(m_textAppSink.get(), "new-sample", G_CALLBACK(newTextSampleCallback), this);
- g_object_set(m_playBin.get(), "text-sink", m_textAppSink.get(), NULL);
- }
+ g_object_set(m_pipeline.get(), "text-sink", m_textAppSink.get(), nullptr);
#endif
- g_object_set(m_playBin.get(), "video-sink", createVideoSink(), "audio-sink", createAudioSink(), nullptr);
+ g_object_set(m_pipeline.get(), "video-sink", createVideoSink(), "audio-sink", createAudioSink(), nullptr);
+
+ configurePlaySink();
+
+ // On 1.4.2 and newer we use the audio-filter property instead.
+ // See https://bugzilla.gnome.org/show_bug.cgi?id=735748 for
+ // the reason for using >= 1.4.2 instead of >= 1.4.0.
+ if (m_preservesPitch && webkitGstCheckVersion(1, 4, 2)) {
+ GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
- GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_webkitVideoSink.get(), "sink"));
+ if (!scale)
+ GST_WARNING("Failed to create scaletempo");
+ else
+ g_object_set(m_pipeline.get(), "audio-filter", scale, nullptr);
+ }
+
+ if (!m_renderingCanBeAccelerated) {
+ // If not using accelerated compositing, let GStreamer handle
+ // the image-orientation tag.
+ GstElement* videoFlip = gst_element_factory_make("videoflip", nullptr);
+ g_object_set(videoFlip, "method", 8, nullptr);
+ g_object_set(m_pipeline.get(), "video-filter", videoFlip, nullptr);
+ }
+
+ GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
if (videoSinkPad)
- g_signal_connect(videoSinkPad.get(), "notify::caps", G_CALLBACK(mediaPlayerPrivateVideoSinkCapsChangedCallback), this);
+ g_signal_connect_swapped(videoSinkPad.get(), "notify::caps", G_CALLBACK(videoSinkCapsChangedCallback), this);
}
void MediaPlayerPrivateGStreamer::simulateAudioInterruption()
{
- GstMessage* message = gst_message_new_request_state(GST_OBJECT(m_playBin.get()), GST_STATE_PAUSED);
- gst_element_post_message(m_playBin.get(), message);
+ GstMessage* message = gst_message_new_request_state(GST_OBJECT(m_pipeline.get()), GST_STATE_PAUSED);
+ gst_element_post_message(m_pipeline.get(), message);
}
bool MediaPlayerPrivateGStreamer::didPassCORSAccessCheck() const
@@ -1947,6 +2145,25 @@ bool MediaPlayerPrivateGStreamer::didPassCORSAccessCheck() const
return false;
}
+bool MediaPlayerPrivateGStreamer::canSaveMediaData() const
+{
+ if (isLiveStream())
+ return false;
+
+ if (m_url.isLocalFile())
+ return true;
+
+ if (m_url.protocolIsInHTTPFamily())
+ return true;
+
+ return false;
+}
+
+bool MediaPlayerPrivateGStreamer::handleSyncMessage(GstMessage* message)
+{
+ return MediaPlayerPrivateGStreamerBase::handleSyncMessage(message);
+}
+
}
#endif // USE(GSTREAMER)