summaryrefslogtreecommitdiff
path: root/Source/WebCore/platform/graphics/gstreamer/mse
diff options
context:
space:
mode:
Diffstat (limited to 'Source/WebCore/platform/graphics/gstreamer/mse')
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.cpp1188
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.h165
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaDescription.cpp75
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaDescription.h58
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaSample.cpp120
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaSample.h76
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp860
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.h132
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.cpp216
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.h70
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceGStreamer.cpp139
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceGStreamer.h88
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.cpp449
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.h79
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/SourceBufferPrivateGStreamer.cpp177
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/SourceBufferPrivateGStreamer.h94
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.cpp776
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.h80
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamerPrivate.h143
19 files changed, 4985 insertions, 0 deletions
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.cpp
new file mode 100644
index 000000000..c4f2b06bc
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.cpp
@@ -0,0 +1,1188 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "config.h"
+#include "AppendPipeline.h"
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "AudioTrackPrivateGStreamer.h"
+#include "GRefPtrGStreamer.h"
+#include "GStreamerMediaDescription.h"
+#include "GStreamerMediaSample.h"
+#include "GStreamerUtilities.h"
+#include "InbandTextTrackPrivateGStreamer.h"
+#include "MediaDescription.h"
+#include "SourceBufferPrivateGStreamer.h"
+#include "VideoTrackPrivateGStreamer.h"
+
+#include <gst/app/gstappsink.h>
+#include <gst/app/gstappsrc.h>
+#include <gst/gst.h>
+#include <gst/pbutils/pbutils.h>
+#include <gst/video/video.h>
+#include <wtf/Condition.h>
+#include <wtf/glib/GLibUtilities.h>
+
+GST_DEBUG_CATEGORY_EXTERN(webkit_mse_debug);
+#define GST_CAT_DEFAULT webkit_mse_debug
+
+namespace WebCore {
+
+static const char* dumpAppendState(AppendPipeline::AppendState appendState)
+{
+ switch (appendState) {
+ case AppendPipeline::AppendState::Invalid:
+ return "Invalid";
+ case AppendPipeline::AppendState::NotStarted:
+ return "NotStarted";
+ case AppendPipeline::AppendState::Ongoing:
+ return "Ongoing";
+ case AppendPipeline::AppendState::KeyNegotiation:
+ return "KeyNegotiation";
+ case AppendPipeline::AppendState::DataStarve:
+ return "DataStarve";
+ case AppendPipeline::AppendState::Sampling:
+ return "Sampling";
+ case AppendPipeline::AppendState::LastSample:
+ return "LastSample";
+ case AppendPipeline::AppendState::Aborting:
+ return "Aborting";
+ default:
+ return "(unknown)";
+ }
+}
+
+static void appendPipelineAppsrcNeedData(GstAppSrc*, guint, AppendPipeline*);
+static void appendPipelineDemuxerPadAdded(GstElement*, GstPad*, AppendPipeline*);
+static void appendPipelineDemuxerPadRemoved(GstElement*, GstPad*, AppendPipeline*);
+static void appendPipelineAppsinkCapsChanged(GObject*, GParamSpec*, AppendPipeline*);
+static GstPadProbeReturn appendPipelineAppsrcDataLeaving(GstPad*, GstPadProbeInfo*, AppendPipeline*);
+#if !LOG_DISABLED
+static GstPadProbeReturn appendPipelinePadProbeDebugInformation(GstPad*, GstPadProbeInfo*, struct PadProbeInformation*);
+#endif
+static GstPadProbeReturn appendPipelineDemuxerBlackHolePadProbe(GstPad*, GstPadProbeInfo*, gpointer);
+static GstFlowReturn appendPipelineAppsinkNewSample(GstElement*, AppendPipeline*);
+static void appendPipelineAppsinkEOS(GstElement*, AppendPipeline*);
+
+static void appendPipelineNeedContextMessageCallback(GstBus*, GstMessage* message, AppendPipeline* appendPipeline)
+{
+ GST_TRACE("received callback");
+ appendPipeline->handleNeedContextSyncMessage(message);
+}
+
+static void appendPipelineApplicationMessageCallback(GstBus*, GstMessage* message, AppendPipeline* appendPipeline)
+{
+ appendPipeline->handleApplicationMessage(message);
+}
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+static void appendPipelineElementMessageCallback(GstBus*, GstMessage* message, AppendPipeline* appendPipeline)
+{
+ appendPipeline->handleElementMessage(message);
+}
+#endif
+
+AppendPipeline::AppendPipeline(Ref<MediaSourceClientGStreamerMSE> mediaSourceClient, Ref<SourceBufferPrivateGStreamer> sourceBufferPrivate, MediaPlayerPrivateGStreamerMSE& playerPrivate)
+ : m_mediaSourceClient(mediaSourceClient.get())
+ , m_sourceBufferPrivate(sourceBufferPrivate.get())
+ , m_playerPrivate(&playerPrivate)
+ , m_id(0)
+ , m_appsrcAtLeastABufferLeft(false)
+ , m_appsrcNeedDataReceived(false)
+ , m_appsrcDataLeavingProbeId(0)
+ , m_appendState(AppendState::NotStarted)
+ , m_abortPending(false)
+ , m_streamType(Unknown)
+{
+ ASSERT(WTF::isMainThread());
+
+ GST_TRACE("Creating AppendPipeline (%p)", this);
+
+ // FIXME: give a name to the pipeline, maybe related with the track it's managing.
+ // The track name is still unknown at this time, though.
+ m_pipeline = gst_pipeline_new(nullptr);
+
+ m_bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
+ gst_bus_add_signal_watch(m_bus.get());
+ gst_bus_enable_sync_message_emission(m_bus.get());
+
+ g_signal_connect(m_bus.get(), "sync-message::need-context", G_CALLBACK(appendPipelineNeedContextMessageCallback), this);
+ g_signal_connect(m_bus.get(), "message::application", G_CALLBACK(appendPipelineApplicationMessageCallback), this);
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ g_signal_connect(m_bus.get(), "message::element", G_CALLBACK(appendPipelineElementMessageCallback), this);
+#endif
+
+ // We assign the created instances here instead of adoptRef() because gst_bin_add_many()
+ // below will already take the initial reference and we need an additional one for us.
+ m_appsrc = gst_element_factory_make("appsrc", nullptr);
+ m_demux = gst_element_factory_make("qtdemux", nullptr);
+ m_appsink = gst_element_factory_make("appsink", nullptr);
+
+ gst_app_sink_set_emit_signals(GST_APP_SINK(m_appsink.get()), TRUE);
+ gst_base_sink_set_sync(GST_BASE_SINK(m_appsink.get()), FALSE);
+
+ GRefPtr<GstPad> appsinkPad = adoptGRef(gst_element_get_static_pad(m_appsink.get(), "sink"));
+ g_signal_connect(appsinkPad.get(), "notify::caps", G_CALLBACK(appendPipelineAppsinkCapsChanged), this);
+
+ setAppsrcDataLeavingProbe();
+
+#if !LOG_DISABLED
+ GRefPtr<GstPad> demuxerPad = adoptGRef(gst_element_get_static_pad(m_demux.get(), "sink"));
+ m_demuxerDataEnteringPadProbeInformation.appendPipeline = this;
+ m_demuxerDataEnteringPadProbeInformation.description = "demuxer data entering";
+ m_demuxerDataEnteringPadProbeInformation.probeId = gst_pad_add_probe(demuxerPad.get(), GST_PAD_PROBE_TYPE_BUFFER, reinterpret_cast<GstPadProbeCallback>(appendPipelinePadProbeDebugInformation), &m_demuxerDataEnteringPadProbeInformation, nullptr);
+ m_appsinkDataEnteringPadProbeInformation.appendPipeline = this;
+ m_appsinkDataEnteringPadProbeInformation.description = "appsink data entering";
+ m_appsinkDataEnteringPadProbeInformation.probeId = gst_pad_add_probe(appsinkPad.get(), GST_PAD_PROBE_TYPE_BUFFER, reinterpret_cast<GstPadProbeCallback>(appendPipelinePadProbeDebugInformation), &m_appsinkDataEnteringPadProbeInformation, nullptr);
+#endif
+
+ // These signals won't be connected outside of the lifetime of "this".
+ g_signal_connect(m_appsrc.get(), "need-data", G_CALLBACK(appendPipelineAppsrcNeedData), this);
+ g_signal_connect(m_demux.get(), "pad-added", G_CALLBACK(appendPipelineDemuxerPadAdded), this);
+ g_signal_connect(m_demux.get(), "pad-removed", G_CALLBACK(appendPipelineDemuxerPadRemoved), this);
+ g_signal_connect(m_appsink.get(), "new-sample", G_CALLBACK(appendPipelineAppsinkNewSample), this);
+ g_signal_connect(m_appsink.get(), "eos", G_CALLBACK(appendPipelineAppsinkEOS), this);
+
+ // Add_many will take ownership of a reference. That's why we used an assignment before.
+ gst_bin_add_many(GST_BIN(m_pipeline.get()), m_appsrc.get(), m_demux.get(), nullptr);
+ gst_element_link(m_appsrc.get(), m_demux.get());
+
+ gst_element_set_state(m_pipeline.get(), GST_STATE_READY);
+};
+
+AppendPipeline::~AppendPipeline()
+{
+ ASSERT(WTF::isMainThread());
+
+ {
+ LockHolder locker(m_newSampleLock);
+ setAppendState(AppendState::Invalid);
+ m_newSampleCondition.notifyOne();
+ }
+
+ {
+ LockHolder locker(m_padAddRemoveLock);
+ m_playerPrivate = nullptr;
+ m_padAddRemoveCondition.notifyOne();
+ }
+
+ GST_TRACE("Destroying AppendPipeline (%p)", this);
+
+ // FIXME: Maybe notify appendComplete here?
+
+ if (m_pipeline) {
+ ASSERT(m_bus);
+ gst_bus_remove_signal_watch(m_bus.get());
+ gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
+ m_pipeline = nullptr;
+ }
+
+ if (m_appsrc) {
+ removeAppsrcDataLeavingProbe();
+ g_signal_handlers_disconnect_by_data(m_appsrc.get(), this);
+ m_appsrc = nullptr;
+ }
+
+ if (m_demux) {
+#if !LOG_DISABLED
+ GRefPtr<GstPad> demuxerPad = adoptGRef(gst_element_get_static_pad(m_demux.get(), "sink"));
+ gst_pad_remove_probe(demuxerPad.get(), m_demuxerDataEnteringPadProbeInformation.probeId);
+#endif
+
+ g_signal_handlers_disconnect_by_data(m_demux.get(), this);
+ m_demux = nullptr;
+ }
+
+ if (m_appsink) {
+ GRefPtr<GstPad> appsinkPad = adoptGRef(gst_element_get_static_pad(m_appsink.get(), "sink"));
+ g_signal_handlers_disconnect_by_data(appsinkPad.get(), this);
+ g_signal_handlers_disconnect_by_data(m_appsink.get(), this);
+
+#if !LOG_DISABLED
+ gst_pad_remove_probe(appsinkPad.get(), m_appsinkDataEnteringPadProbeInformation.probeId);
+#endif
+
+ m_appsink = nullptr;
+ }
+
+ m_appsinkCaps = nullptr;
+ m_demuxerSrcPadCaps = nullptr;
+};
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+void AppendPipeline::dispatchPendingDecryptionKey()
+{
+ ASSERT(m_decryptor);
+ ASSERT(m_pendingKey);
+ ASSERT(m_appendState == KeyNegotiation);
+ GST_TRACE("dispatching key to append pipeline %p", this);
+ gst_element_send_event(m_pipeline.get(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB,
+ gst_structure_new("drm-cipher", "key", GST_TYPE_BUFFER, m_pendingKey.get(), nullptr)));
+ m_pendingKey.clear();
+ setAppendState(AppendState::Ongoing);
+}
+
+void AppendPipeline::dispatchDecryptionKey(GstBuffer* buffer)
+{
+ if (m_appendState == AppendState::KeyNegotiation) {
+ GST_TRACE("append pipeline %p in key negotiation", this);
+ m_pendingKey = buffer;
+ if (m_decryptor)
+ dispatchPendingDecryptionKey();
+ else
+ GST_TRACE("no decryptor yet, waiting for it");
+ } else
+ GST_TRACE("append pipeline %p not in key negotiation", this);
+}
+#endif
+
+void AppendPipeline::clearPlayerPrivate()
+{
+ ASSERT(WTF::isMainThread());
+ GST_DEBUG("cleaning private player");
+
+ {
+ LockHolder locker(m_newSampleLock);
+ // Make sure that AppendPipeline won't process more data from now on and
+ // instruct handleNewSample to abort itself from now on as well.
+ setAppendState(AppendState::Invalid);
+
+ // Awake any pending handleNewSample operation in the streaming thread.
+ m_newSampleCondition.notifyOne();
+ }
+
+ {
+ LockHolder locker(m_padAddRemoveLock);
+ m_playerPrivate = nullptr;
+ m_padAddRemoveCondition.notifyOne();
+ }
+
+ // And now that no handleNewSample operations will remain stalled waiting
+ // for the main thread, stop the pipeline.
+ if (m_pipeline)
+ gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
+}
+
+void AppendPipeline::handleNeedContextSyncMessage(GstMessage* message)
+{
+ const gchar* contextType = nullptr;
+ gst_message_parse_context_type(message, &contextType);
+ GST_TRACE("context type: %s", contextType);
+ if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id"))
+ setAppendState(AppendPipeline::AppendState::KeyNegotiation);
+
+ // MediaPlayerPrivateGStreamerBase will take care of setting up encryption.
+ if (m_playerPrivate)
+ m_playerPrivate->handleSyncMessage(message);
+}
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+void AppendPipeline::handleElementMessage(GstMessage* message)
+{
+ ASSERT(WTF::isMainThread());
+
+ const GstStructure* structure = gst_message_get_structure(message);
+ GST_TRACE("%s message from %s", gst_structure_get_name(structure), GST_MESSAGE_SRC_NAME(message));
+ if (m_playerPrivate && gst_structure_has_name(structure, "drm-key-needed")) {
+ setAppendState(AppendPipeline::AppendState::KeyNegotiation);
+
+ GST_DEBUG("sending drm-key-needed message from %s to the player", GST_MESSAGE_SRC_NAME(message));
+ GRefPtr<GstEvent> event;
+ gst_structure_get(structure, "event", GST_TYPE_EVENT, &event.outPtr(), nullptr);
+ m_playerPrivate->handleProtectionEvent(event.get());
+ }
+}
+#endif
+
+void AppendPipeline::handleApplicationMessage(GstMessage* message)
+{
+ ASSERT(WTF::isMainThread());
+
+ const GstStructure* structure = gst_message_get_structure(message);
+
+ if (gst_structure_has_name(structure, "appsrc-need-data")) {
+ handleAppsrcNeedDataReceived();
+ return;
+ }
+
+ if (gst_structure_has_name(structure, "appsrc-buffer-left")) {
+ handleAppsrcAtLeastABufferLeft();
+ return;
+ }
+
+ if (gst_structure_has_name(structure, "demuxer-connect-to-appsink")) {
+ GRefPtr<GstPad> demuxerSrcPad;
+ gst_structure_get(structure, "demuxer-src-pad", G_TYPE_OBJECT, &demuxerSrcPad.outPtr(), nullptr);
+ ASSERT(demuxerSrcPad);
+ connectDemuxerSrcPadToAppsink(demuxerSrcPad.get());
+ return;
+ }
+
+ if (gst_structure_has_name(structure, "appsink-caps-changed")) {
+ appsinkCapsChanged();
+ return;
+ }
+
+ if (gst_structure_has_name(structure, "appsink-new-sample")) {
+ GRefPtr<GstSample> newSample;
+ gst_structure_get(structure, "new-sample", GST_TYPE_SAMPLE, &newSample.outPtr(), nullptr);
+
+ appsinkNewSample(newSample.get());
+ return;
+ }
+
+ if (gst_structure_has_name(structure, "appsink-eos")) {
+ appsinkEOS();
+ return;
+ }
+
+ ASSERT_NOT_REACHED();
+}
+
+void AppendPipeline::handleAppsrcNeedDataReceived()
+{
+ if (!m_appsrcAtLeastABufferLeft) {
+ GST_TRACE("discarding until at least a buffer leaves appsrc");
+ return;
+ }
+
+ ASSERT(m_appendState == AppendState::Ongoing || m_appendState == AppendState::Sampling);
+ ASSERT(!m_appsrcNeedDataReceived);
+
+ GST_TRACE("received need-data from appsrc");
+
+ m_appsrcNeedDataReceived = true;
+ checkEndOfAppend();
+}
+
+void AppendPipeline::handleAppsrcAtLeastABufferLeft()
+{
+ m_appsrcAtLeastABufferLeft = true;
+ GST_TRACE("received buffer-left from appsrc");
+#if LOG_DISABLED
+ removeAppsrcDataLeavingProbe();
+#endif
+}
+
+gint AppendPipeline::id()
+{
+ ASSERT(WTF::isMainThread());
+
+ if (m_id)
+ return m_id;
+
+ static gint s_totalAudio = 0;
+ static gint s_totalVideo = 0;
+ static gint s_totalText = 0;
+
+ switch (m_streamType) {
+ case Audio:
+ m_id = ++s_totalAudio;
+ break;
+ case Video:
+ m_id = ++s_totalVideo;
+ break;
+ case Text:
+ m_id = ++s_totalText;
+ break;
+ case Unknown:
+ case Invalid:
+ GST_ERROR("Trying to get id for a pipeline of Unknown/Invalid type");
+ ASSERT_NOT_REACHED();
+ break;
+ }
+
+ GST_DEBUG("streamType=%d, id=%d", static_cast<int>(m_streamType), m_id);
+
+ return m_id;
+}
+
+void AppendPipeline::setAppendState(AppendState newAppendState)
+{
+ ASSERT(WTF::isMainThread());
+ // Valid transitions:
+ // NotStarted-->Ongoing-->DataStarve-->NotStarted
+ // | | `->Aborting-->NotStarted
+ // | `->Sampling-···->Sampling-->LastSample-->NotStarted
+ // | | `->Aborting-->NotStarted
+ // | `->KeyNegotiation-->Ongoing-->[...]
+ // `->Aborting-->NotStarted
+ AppendState oldAppendState = m_appendState;
+ AppendState nextAppendState = AppendState::Invalid;
+
+ if (oldAppendState != newAppendState)
+ GST_TRACE("%s --> %s", dumpAppendState(oldAppendState), dumpAppendState(newAppendState));
+
+ bool ok = false;
+
+ switch (oldAppendState) {
+ case AppendState::NotStarted:
+ switch (newAppendState) {
+ case AppendState::Ongoing:
+ ok = true;
+ gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
+ break;
+ case AppendState::NotStarted:
+ ok = true;
+ if (m_pendingBuffer) {
+ GST_TRACE("pushing pending buffer %p", m_pendingBuffer.get());
+ gst_app_src_push_buffer(GST_APP_SRC(appsrc()), m_pendingBuffer.leakRef());
+ nextAppendState = AppendState::Ongoing;
+ }
+ break;
+ case AppendState::Aborting:
+ ok = true;
+ nextAppendState = AppendState::NotStarted;
+ break;
+ case AppendState::Invalid:
+ ok = true;
+ break;
+ default:
+ break;
+ }
+ break;
+ case AppendState::KeyNegotiation:
+ switch (newAppendState) {
+ case AppendState::Ongoing:
+ case AppendState::Invalid:
+ ok = true;
+ break;
+ default:
+ break;
+ }
+ break;
+ case AppendState::Ongoing:
+ switch (newAppendState) {
+ case AppendState::KeyNegotiation:
+ case AppendState::Sampling:
+ case AppendState::Invalid:
+ ok = true;
+ break;
+ case AppendState::DataStarve:
+ ok = true;
+ GST_DEBUG("received all pending samples");
+ m_sourceBufferPrivate->didReceiveAllPendingSamples();
+ if (m_abortPending)
+ nextAppendState = AppendState::Aborting;
+ else
+ nextAppendState = AppendState::NotStarted;
+ break;
+ default:
+ break;
+ }
+ break;
+ case AppendState::DataStarve:
+ switch (newAppendState) {
+ case AppendState::NotStarted:
+ case AppendState::Invalid:
+ ok = true;
+ break;
+ case AppendState::Aborting:
+ ok = true;
+ nextAppendState = AppendState::NotStarted;
+ break;
+ default:
+ break;
+ }
+ break;
+ case AppendState::Sampling:
+ switch (newAppendState) {
+ case AppendState::Sampling:
+ case AppendState::Invalid:
+ ok = true;
+ break;
+ case AppendState::LastSample:
+ ok = true;
+ GST_DEBUG("received all pending samples");
+ m_sourceBufferPrivate->didReceiveAllPendingSamples();
+ if (m_abortPending)
+ nextAppendState = AppendState::Aborting;
+ else
+ nextAppendState = AppendState::NotStarted;
+ break;
+ default:
+ break;
+ }
+ break;
+ case AppendState::LastSample:
+ switch (newAppendState) {
+ case AppendState::NotStarted:
+ case AppendState::Invalid:
+ ok = true;
+ break;
+ case AppendState::Aborting:
+ ok = true;
+ nextAppendState = AppendState::NotStarted;
+ break;
+ default:
+ break;
+ }
+ break;
+ case AppendState::Aborting:
+ switch (newAppendState) {
+ case AppendState::NotStarted:
+ ok = true;
+ resetPipeline();
+ m_abortPending = false;
+ nextAppendState = AppendState::NotStarted;
+ break;
+ case AppendState::Invalid:
+ ok = true;
+ break;
+ default:
+ break;
+ }
+ break;
+ case AppendState::Invalid:
+ ok = true;
+ break;
+ }
+
+ if (ok)
+ m_appendState = newAppendState;
+ else
+ GST_ERROR("Invalid append state transition %s --> %s", dumpAppendState(oldAppendState), dumpAppendState(newAppendState));
+
+ ASSERT(ok);
+
+ if (nextAppendState != AppendState::Invalid)
+ setAppendState(nextAppendState);
+}
+
+void AppendPipeline::parseDemuxerSrcPadCaps(GstCaps* demuxerSrcPadCaps)
+{
+ ASSERT(WTF::isMainThread());
+
+ m_demuxerSrcPadCaps = adoptGRef(demuxerSrcPadCaps);
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Unknown;
+
+ GstStructure* structure = gst_caps_get_structure(m_demuxerSrcPadCaps.get(), 0);
+ bool sizeConfigured = false;
+
+#if GST_CHECK_VERSION(1, 5, 3) && (ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA))
+ if (gst_structure_has_name(structure, "application/x-cenc")) {
+ // Any previous decryptor should have been removed from the pipeline by disconnectFromAppSinkFromStreamingThread()
+ ASSERT(!m_decryptor);
+
+ m_decryptor = WebCore::createGstDecryptor(gst_structure_get_string(structure, "protection-system"));
+ if (!m_decryptor) {
+ GST_ERROR("decryptor not found for caps: %" GST_PTR_FORMAT, m_demuxerSrcPadCaps.get());
+ return;
+ }
+
+ const gchar* originalMediaType = gst_structure_get_string(structure, "original-media-type");
+
+ if (!MediaPlayerPrivateGStreamerMSE::supportsCodecs(originalMediaType)) {
+ m_presentationSize = WebCore::FloatSize();
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Invalid;
+ } else if (g_str_has_prefix(originalMediaType, "video/")) {
+ int width = 0;
+ int height = 0;
+ float finalHeight = 0;
+
+ if (gst_structure_get_int(structure, "width", &width) && gst_structure_get_int(structure, "height", &height)) {
+ int ratioNumerator = 1;
+ int ratioDenominator = 1;
+
+ gst_structure_get_fraction(structure, "pixel-aspect-ratio", &ratioNumerator, &ratioDenominator);
+ finalHeight = height * ((float) ratioDenominator / (float) ratioNumerator);
+ }
+
+ m_presentationSize = WebCore::FloatSize(width, finalHeight);
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Video;
+ } else {
+ m_presentationSize = WebCore::FloatSize();
+ if (g_str_has_prefix(originalMediaType, "audio/"))
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Audio;
+ else if (g_str_has_prefix(originalMediaType, "text/"))
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Text;
+ }
+ sizeConfigured = true;
+ }
+#endif
+
+ if (!sizeConfigured) {
+ const char* structureName = gst_structure_get_name(structure);
+ GstVideoInfo info;
+
+ if (!MediaPlayerPrivateGStreamerMSE::supportsCodecs(structureName)) {
+ m_presentationSize = WebCore::FloatSize();
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Invalid;
+ } else if (g_str_has_prefix(structureName, "video/") && gst_video_info_from_caps(&info, demuxerSrcPadCaps)) {
+ float width, height;
+
+ width = info.width;
+ height = info.height * ((float) info.par_d / (float) info.par_n);
+
+ m_presentationSize = WebCore::FloatSize(width, height);
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Video;
+ } else {
+ m_presentationSize = WebCore::FloatSize();
+ if (g_str_has_prefix(structureName, "audio/"))
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Audio;
+ else if (g_str_has_prefix(structureName, "text/"))
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Text;
+ }
+ }
+}
+
+void AppendPipeline::appsinkCapsChanged()
+{
+ ASSERT(WTF::isMainThread());
+
+ if (!m_appsink)
+ return;
+
+ GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(m_appsink.get(), "sink"));
+ GRefPtr<GstCaps> caps = adoptGRef(gst_pad_get_current_caps(pad.get()));
+
+ if (!caps)
+ return;
+
+ // This means that we're right after a new track has appeared. Otherwise, it's a caps change inside the same track.
+ bool previousCapsWereNull = !m_appsinkCaps;
+
+ if (m_appsinkCaps != caps) {
+ m_appsinkCaps = WTFMove(caps);
+ if (m_playerPrivate && previousCapsWereNull)
+ m_playerPrivate->trackDetected(this, m_oldTrack, m_track);
+ didReceiveInitializationSegment();
+ gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
+ }
+}
+
+void AppendPipeline::checkEndOfAppend()
+{
+ ASSERT(WTF::isMainThread());
+
+ if (!m_appsrcNeedDataReceived || (m_appendState != AppendState::Ongoing && m_appendState != AppendState::Sampling))
+ return;
+
+ GST_TRACE("end of append data mark was received");
+
+ switch (m_appendState) {
+ case AppendState::Ongoing:
+ GST_TRACE("DataStarve");
+ m_appsrcNeedDataReceived = false;
+ setAppendState(AppendState::DataStarve);
+ break;
+ case AppendState::Sampling:
+ GST_TRACE("LastSample");
+ m_appsrcNeedDataReceived = false;
+ setAppendState(AppendState::LastSample);
+ break;
+ default:
+ ASSERT_NOT_REACHED();
+ break;
+ }
+}
+
+void AppendPipeline::appsinkNewSample(GstSample* sample)
+{
+ ASSERT(WTF::isMainThread());
+
+ {
+ LockHolder locker(m_newSampleLock);
+
+ // Ignore samples if we're not expecting them. Refuse processing if we're in Invalid state.
+ if (m_appendState != AppendState::Ongoing && m_appendState != AppendState::Sampling) {
+ GST_WARNING("Unexpected sample, appendState=%s", dumpAppendState(m_appendState));
+ // FIXME: Return ERROR and find a more robust way to detect that all the
+ // data has been processed, so we don't need to resort to these hacks.
+ // All in all, return OK, even if it's not the proper thing to do. We don't want to break the demuxer.
+ m_flowReturn = GST_FLOW_OK;
+ m_newSampleCondition.notifyOne();
+ return;
+ }
+
+ RefPtr<GStreamerMediaSample> mediaSample = WebCore::GStreamerMediaSample::create(sample, m_presentationSize, trackId());
+
+ GST_TRACE("append: trackId=%s PTS=%f presentationSize=%.0fx%.0f", mediaSample->trackID().string().utf8().data(), mediaSample->presentationTime().toFloat(), mediaSample->presentationSize().width(), mediaSample->presentationSize().height());
+
+ // If we're beyond the duration, ignore this sample and the remaining ones.
+ MediaTime duration = m_mediaSourceClient->duration();
+ if (duration.isValid() && !duration.indefiniteTime() && mediaSample->presentationTime() > duration) {
+ GST_DEBUG("Detected sample (%f) beyond the duration (%f), declaring LastSample", mediaSample->presentationTime().toFloat(), duration.toFloat());
+ setAppendState(AppendState::LastSample);
+ m_flowReturn = GST_FLOW_OK;
+ m_newSampleCondition.notifyOne();
+ return;
+ }
+
+ // Add a gap sample if a gap is detected before the first sample.
+ if (mediaSample->decodeTime() == MediaTime::zeroTime()
+ && mediaSample->presentationTime() > MediaTime::zeroTime()
+ && mediaSample->presentationTime() <= MediaTime::createWithDouble(0.1)) {
+ GST_DEBUG("Adding gap offset");
+ mediaSample->applyPtsOffset(MediaTime::zeroTime());
+ }
+
+ m_sourceBufferPrivate->didReceiveSample(*mediaSample);
+ setAppendState(AppendState::Sampling);
+ m_flowReturn = GST_FLOW_OK;
+ m_newSampleCondition.notifyOne();
+ }
+
+ checkEndOfAppend();
+}
+
+void AppendPipeline::appsinkEOS()
+{
+ ASSERT(WTF::isMainThread());
+
+ switch (m_appendState) {
+ case AppendState::Aborting:
+ // Ignored. Operation completion will be managed by the Aborting->NotStarted transition.
+ return;
+ case AppendState::Ongoing:
+ // Finish Ongoing and Sampling states.
+ setAppendState(AppendState::DataStarve);
+ break;
+ case AppendState::Sampling:
+ setAppendState(AppendState::LastSample);
+ break;
+ default:
+ GST_DEBUG("Unexpected EOS");
+ break;
+ }
+}
+
+void AppendPipeline::didReceiveInitializationSegment()
+{
+ ASSERT(WTF::isMainThread());
+
+ WebCore::SourceBufferPrivateClient::InitializationSegment initializationSegment;
+
+ GST_DEBUG("Notifying SourceBuffer for track %s", (m_track) ? m_track->id().string().utf8().data() : nullptr);
+ initializationSegment.duration = m_mediaSourceClient->duration();
+
+ switch (m_streamType) {
+ case Audio: {
+ WebCore::SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
+ info.track = static_cast<AudioTrackPrivateGStreamer*>(m_track.get());
+ info.description = WebCore::GStreamerMediaDescription::create(m_demuxerSrcPadCaps.get());
+ initializationSegment.audioTracks.append(info);
+ break;
+ }
+ case Video: {
+ WebCore::SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
+ info.track = static_cast<VideoTrackPrivateGStreamer*>(m_track.get());
+ info.description = WebCore::GStreamerMediaDescription::create(m_demuxerSrcPadCaps.get());
+ initializationSegment.videoTracks.append(info);
+ break;
+ }
+ default:
+ GST_ERROR("Unsupported stream type or codec");
+ break;
+ }
+
+ m_sourceBufferPrivate->didReceiveInitializationSegment(initializationSegment);
+}
+
+AtomicString AppendPipeline::trackId()
+{
+ ASSERT(WTF::isMainThread());
+
+ if (!m_track)
+ return AtomicString();
+
+ return m_track->id();
+}
+
+void AppendPipeline::resetPipeline()
+{
+ ASSERT(WTF::isMainThread());
+ GST_DEBUG("resetting pipeline");
+ m_appsrcAtLeastABufferLeft = false;
+ setAppsrcDataLeavingProbe();
+
+ {
+ LockHolder locker(m_newSampleLock);
+ m_newSampleCondition.notifyOne();
+ gst_element_set_state(m_pipeline.get(), GST_STATE_READY);
+ gst_element_get_state(m_pipeline.get(), nullptr, nullptr, 0);
+ }
+
+#if (!(LOG_DISABLED || defined(GST_DISABLE_GST_DEBUG)))
+ {
+ static unsigned i = 0;
+ // This is here for debugging purposes. It does not make sense to have it as class member.
+ WTF::String dotFileName = String::format("reset-pipeline-%d", ++i);
+ gst_debug_bin_to_dot_file(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.utf8().data());
+ }
+#endif
+
+}
+
+void AppendPipeline::setAppsrcDataLeavingProbe()
+{
+ if (m_appsrcDataLeavingProbeId)
+ return;
+
+ GST_TRACE("setting appsrc data leaving probe");
+
+ GRefPtr<GstPad> appsrcPad = adoptGRef(gst_element_get_static_pad(m_appsrc.get(), "src"));
+ m_appsrcDataLeavingProbeId = gst_pad_add_probe(appsrcPad.get(), GST_PAD_PROBE_TYPE_BUFFER, reinterpret_cast<GstPadProbeCallback>(appendPipelineAppsrcDataLeaving), this, nullptr);
+}
+
+void AppendPipeline::removeAppsrcDataLeavingProbe()
+{
+ if (!m_appsrcDataLeavingProbeId)
+ return;
+
+ GST_TRACE("removing appsrc data leaving probe");
+
+ GRefPtr<GstPad> appsrcPad = adoptGRef(gst_element_get_static_pad(m_appsrc.get(), "src"));
+ gst_pad_remove_probe(appsrcPad.get(), m_appsrcDataLeavingProbeId);
+ m_appsrcDataLeavingProbeId = 0;
+}
+
+void AppendPipeline::abort()
+{
+ ASSERT(WTF::isMainThread());
+ GST_DEBUG("aborting");
+
+ m_pendingBuffer = nullptr;
+
+ // Abort already ongoing.
+ if (m_abortPending)
+ return;
+
+ m_abortPending = true;
+ if (m_appendState == AppendState::NotStarted)
+ setAppendState(AppendState::Aborting);
+ // Else, the automatic state transitions will take care when the ongoing append finishes.
+}
+
+GstFlowReturn AppendPipeline::pushNewBuffer(GstBuffer* buffer)
+{
+ GstFlowReturn result;
+
+ if (m_abortPending) {
+ m_pendingBuffer = adoptGRef(buffer);
+ result = GST_FLOW_OK;
+ } else {
+ setAppendState(AppendPipeline::AppendState::Ongoing);
+ GST_TRACE("pushing new buffer %p", buffer);
+ result = gst_app_src_push_buffer(GST_APP_SRC(appsrc()), buffer);
+ }
+
+ return result;
+}
+
+void AppendPipeline::reportAppsrcAtLeastABufferLeft()
+{
+ GST_TRACE("buffer left appsrc, reposting to bus");
+ GstStructure* structure = gst_structure_new_empty("appsrc-buffer-left");
+ GstMessage* message = gst_message_new_application(GST_OBJECT(m_appsrc.get()), structure);
+ gst_bus_post(m_bus.get(), message);
+}
+
+void AppendPipeline::reportAppsrcNeedDataReceived()
+{
+ GST_TRACE("received need-data signal at appsrc, reposting to bus");
+ GstStructure* structure = gst_structure_new_empty("appsrc-need-data");
+ GstMessage* message = gst_message_new_application(GST_OBJECT(m_appsrc.get()), structure);
+ gst_bus_post(m_bus.get(), message);
+}
+
+GstFlowReturn AppendPipeline::handleNewAppsinkSample(GstElement* appsink)
+{
+ ASSERT(!WTF::isMainThread());
+
+ // Even if we're disabled, it's important to pull the sample out anyway to
+ // avoid deadlocks when changing to GST_STATE_NULL having a non empty appsink.
+ GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(appsink)));
+ LockHolder locker(m_newSampleLock);
+
+ if (!m_playerPrivate || m_appendState == AppendState::Invalid) {
+ GST_WARNING("AppendPipeline has been disabled, ignoring this sample");
+ return GST_FLOW_ERROR;
+ }
+
+ GstStructure* structure = gst_structure_new("appsink-new-sample", "new-sample", GST_TYPE_SAMPLE, sample.get(), nullptr);
+ GstMessage* message = gst_message_new_application(GST_OBJECT(appsink), structure);
+ gst_bus_post(m_bus.get(), message);
+ GST_TRACE("appsink-new-sample message posted to bus");
+
+ m_newSampleCondition.wait(m_newSampleLock);
+ // We've been awaken because the sample was processed or because of
+ // an exceptional condition (entered in Invalid state, destructor, etc.).
+ // We can't reliably delete info here, appendPipelineAppsinkNewSampleMainThread will do it.
+
+ return m_flowReturn;
+}
+
+void AppendPipeline::connectDemuxerSrcPadToAppsinkFromAnyThread(GstPad* demuxerSrcPad)
+{
+ if (!m_appsink)
+ return;
+
+ GST_DEBUG("connecting to appsink");
+
+ if (m_demux->numsrcpads > 1) {
+ GST_WARNING("Only one stream per SourceBuffer is allowed! Ignoring stream %d by adding a black hole probe.", m_demux->numsrcpads);
+ gulong probeId = gst_pad_add_probe(demuxerSrcPad, GST_PAD_PROBE_TYPE_BUFFER, reinterpret_cast<GstPadProbeCallback>(appendPipelineDemuxerBlackHolePadProbe), nullptr, nullptr);
+ g_object_set_data(G_OBJECT(demuxerSrcPad), "blackHoleProbeId", GULONG_TO_POINTER(probeId));
+ return;
+ }
+
+ GRefPtr<GstPad> appsinkSinkPad = adoptGRef(gst_element_get_static_pad(m_appsink.get(), "sink"));
+
+ // Only one stream per demuxer is supported.
+ ASSERT(!gst_pad_is_linked(appsinkSinkPad.get()));
+
+ gint64 timeLength = 0;
+ if (gst_element_query_duration(m_demux.get(), GST_FORMAT_TIME, &timeLength)
+ && static_cast<guint64>(timeLength) != GST_CLOCK_TIME_NONE)
+ m_initialDuration = MediaTime(GST_TIME_AS_USECONDS(timeLength), G_USEC_PER_SEC);
+ else
+ m_initialDuration = MediaTime::positiveInfiniteTime();
+
+ if (WTF::isMainThread())
+ connectDemuxerSrcPadToAppsink(demuxerSrcPad);
+ else {
+ // Call connectDemuxerSrcPadToAppsink() in the main thread and wait.
+ LockHolder locker(m_padAddRemoveLock);
+ if (!m_playerPrivate)
+ return;
+
+ GstStructure* structure = gst_structure_new("demuxer-connect-to-appsink", "demuxer-src-pad", G_TYPE_OBJECT, demuxerSrcPad, nullptr);
+ GstMessage* message = gst_message_new_application(GST_OBJECT(m_demux.get()), structure);
+ gst_bus_post(m_bus.get(), message);
+ GST_TRACE("demuxer-connect-to-appsink message posted to bus");
+
+ m_padAddRemoveCondition.wait(m_padAddRemoveLock);
+ }
+
+ // Must be done in the thread we were called from (usually streaming thread).
+ bool isData = (m_streamType == WebCore::MediaSourceStreamTypeGStreamer::Audio)
+ || (m_streamType == WebCore::MediaSourceStreamTypeGStreamer::Video)
+ || (m_streamType == WebCore::MediaSourceStreamTypeGStreamer::Text);
+
+ if (isData) {
+ // FIXME: Only add appsink one time. This method can be called several times.
+ GRefPtr<GstObject> parent = adoptGRef(gst_element_get_parent(m_appsink.get()));
+ if (!parent)
+ gst_bin_add(GST_BIN(m_pipeline.get()), m_appsink.get());
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
+ if (m_decryptor) {
+ gst_object_ref(m_decryptor.get());
+ gst_bin_add(GST_BIN(m_pipeline.get()), m_decryptor.get());
+
+ GRefPtr<GstPad> decryptorSinkPad = adoptGRef(gst_element_get_static_pad(m_decryptor.get(), "sink"));
+ gst_pad_link(demuxerSrcPad, decryptorSinkPad.get());
+
+ GRefPtr<GstPad> decryptorSrcPad = adoptGRef(gst_element_get_static_pad(m_decryptor.get(), "src"));
+ gst_pad_link(decryptorSrcPad.get(), appsinkSinkPad.get());
+
+ gst_element_sync_state_with_parent(m_appsink.get());
+ gst_element_sync_state_with_parent(m_decryptor.get());
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ if (m_pendingKey)
+ dispatchPendingDecryptionKey();
+#endif
+ } else {
+#endif
+ gst_pad_link(demuxerSrcPad, appsinkSinkPad.get());
+ gst_element_sync_state_with_parent(m_appsink.get());
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
+ }
+#endif
+ gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
+ }
+}
+
+void AppendPipeline::connectDemuxerSrcPadToAppsink(GstPad* demuxerSrcPad)
+{
+ ASSERT(WTF::isMainThread());
+ GST_DEBUG("Connecting to appsink");
+
+ LockHolder locker(m_padAddRemoveLock);
+ GRefPtr<GstPad> sinkSinkPad = adoptGRef(gst_element_get_static_pad(m_appsink.get(), "sink"));
+
+ // Only one stream per demuxer is supported.
+ ASSERT(!gst_pad_is_linked(sinkSinkPad.get()));
+
+ GRefPtr<GstCaps> caps = adoptGRef(gst_pad_get_current_caps(GST_PAD(demuxerSrcPad)));
+
+ if (!caps || m_appendState == AppendState::Invalid || !m_playerPrivate) {
+ m_padAddRemoveCondition.notifyOne();
+ return;
+ }
+
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ GUniquePtr<gchar> strcaps(gst_caps_to_string(caps.get()));
+ GST_DEBUG("%s", strcaps.get());
+ }
+#endif
+
+ if (m_initialDuration > m_mediaSourceClient->duration()
+ || (m_mediaSourceClient->duration().isInvalid() && m_initialDuration > MediaTime::zeroTime()))
+ m_mediaSourceClient->durationChanged(m_initialDuration);
+
+ m_oldTrack = m_track;
+
+ parseDemuxerSrcPadCaps(gst_caps_ref(caps.get()));
+
+ switch (m_streamType) {
+ case WebCore::MediaSourceStreamTypeGStreamer::Audio:
+ if (m_playerPrivate)
+ m_track = WebCore::AudioTrackPrivateGStreamer::create(m_playerPrivate->pipeline(), id(), sinkSinkPad.get());
+ break;
+ case WebCore::MediaSourceStreamTypeGStreamer::Video:
+ if (m_playerPrivate)
+ m_track = WebCore::VideoTrackPrivateGStreamer::create(m_playerPrivate->pipeline(), id(), sinkSinkPad.get());
+ break;
+ case WebCore::MediaSourceStreamTypeGStreamer::Text:
+ m_track = WebCore::InbandTextTrackPrivateGStreamer::create(id(), sinkSinkPad.get());
+ break;
+ case WebCore::MediaSourceStreamTypeGStreamer::Invalid:
+ {
+ GUniquePtr<gchar> strcaps(gst_caps_to_string(caps.get()));
+ GST_DEBUG("Unsupported track codec: %s", strcaps.get());
+ }
+ // This is going to cause an error which will detach the SourceBuffer and tear down this
+ // AppendPipeline, so we need the padAddRemove lock released before continuing.
+ m_track = nullptr;
+ m_padAddRemoveCondition.notifyOne();
+ locker.unlockEarly();
+ didReceiveInitializationSegment();
+ return;
+ default:
+ // No useful data, but notify anyway to complete the append operation.
+ GST_DEBUG("Received all pending samples (no data)");
+ m_sourceBufferPrivate->didReceiveAllPendingSamples();
+ break;
+ }
+
+ m_padAddRemoveCondition.notifyOne();
+}
+
+void AppendPipeline::disconnectDemuxerSrcPadFromAppsinkFromAnyThread(GstPad* demuxerSrcPad)
+{
+ // Must be done in the thread we were called from (usually streaming thread).
+ if (!gst_pad_is_linked(demuxerSrcPad)) {
+ gulong probeId = GPOINTER_TO_ULONG(g_object_get_data(G_OBJECT(demuxerSrcPad), "blackHoleProbeId"));
+ if (probeId) {
+ GST_DEBUG("Disconnecting black hole probe.");
+ g_object_set_data(G_OBJECT(demuxerSrcPad), "blackHoleProbeId", nullptr);
+ gst_pad_remove_probe(demuxerSrcPad, probeId);
+ } else
+ GST_WARNING("Not disconnecting demuxer src pad because it wasn't linked");
+ return;
+ }
+
+ GST_DEBUG("Disconnecting appsink");
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
+ if (m_decryptor) {
+ gst_element_unlink(m_decryptor.get(), m_appsink.get());
+ gst_element_unlink(m_demux.get(), m_decryptor.get());
+ gst_element_set_state(m_decryptor.get(), GST_STATE_NULL);
+ gst_bin_remove(GST_BIN(m_pipeline.get()), m_decryptor.get());
+ } else
+#endif
+ gst_element_unlink(m_demux.get(), m_appsink.get());
+}
+
+static void appendPipelineAppsinkCapsChanged(GObject* appsinkPad, GParamSpec*, AppendPipeline* appendPipeline)
+{
+ GstStructure* structure = gst_structure_new_empty("appsink-caps-changed");
+ GstMessage* message = gst_message_new_application(GST_OBJECT(appsinkPad), structure);
+ gst_bus_post(appendPipeline->bus(), message);
+ GST_TRACE("appsink-caps-changed message posted to bus");
+}
+
+static GstPadProbeReturn appendPipelineAppsrcDataLeaving(GstPad*, GstPadProbeInfo* info, AppendPipeline* appendPipeline)
+{
+ ASSERT(GST_PAD_PROBE_INFO_TYPE(info) & GST_PAD_PROBE_TYPE_BUFFER);
+
+ GstBuffer* buffer = GST_PAD_PROBE_INFO_BUFFER(info);
+ gsize bufferSize = gst_buffer_get_size(buffer);
+
+ GST_TRACE("buffer of size %" G_GSIZE_FORMAT " going thru", bufferSize);
+
+ appendPipeline->reportAppsrcAtLeastABufferLeft();
+
+ return GST_PAD_PROBE_OK;
+}
+
+#if !LOG_DISABLED
+static GstPadProbeReturn appendPipelinePadProbeDebugInformation(GstPad*, GstPadProbeInfo* info, struct PadProbeInformation* padProbeInformation)
+{
+ ASSERT(GST_PAD_PROBE_INFO_TYPE(info) & GST_PAD_PROBE_TYPE_BUFFER);
+ GstBuffer* buffer = GST_PAD_PROBE_INFO_BUFFER(info);
+ GST_TRACE("%s: buffer of size %" G_GSIZE_FORMAT " going thru", padProbeInformation->description, gst_buffer_get_size(buffer));
+ return GST_PAD_PROBE_OK;
+}
+#endif
+
+static GstPadProbeReturn appendPipelineDemuxerBlackHolePadProbe(GstPad*, GstPadProbeInfo* info, gpointer)
+{
+ ASSERT(GST_PAD_PROBE_INFO_TYPE(info) & GST_PAD_PROBE_TYPE_BUFFER);
+ GstBuffer* buffer = GST_PAD_PROBE_INFO_BUFFER(info);
+ GST_TRACE("buffer of size %" G_GSIZE_FORMAT " ignored", gst_buffer_get_size(buffer));
+ return GST_PAD_PROBE_DROP;
+}
+
+static void appendPipelineAppsrcNeedData(GstAppSrc*, guint, AppendPipeline* appendPipeline)
+{
+ appendPipeline->reportAppsrcNeedDataReceived();
+}
+
+static void appendPipelineDemuxerPadAdded(GstElement*, GstPad* demuxerSrcPad, AppendPipeline* appendPipeline)
+{
+ appendPipeline->connectDemuxerSrcPadToAppsinkFromAnyThread(demuxerSrcPad);
+}
+
+static void appendPipelineDemuxerPadRemoved(GstElement*, GstPad* demuxerSrcPad, AppendPipeline* appendPipeline)
+{
+ appendPipeline->disconnectDemuxerSrcPadFromAppsinkFromAnyThread(demuxerSrcPad);
+}
+
+static GstFlowReturn appendPipelineAppsinkNewSample(GstElement* appsink, AppendPipeline* appendPipeline)
+{
+ return appendPipeline->handleNewAppsinkSample(appsink);
+}
+
+static void appendPipelineAppsinkEOS(GstElement*, AppendPipeline* appendPipeline)
+{
+ if (WTF::isMainThread())
+ appendPipeline->appsinkEOS();
+ else {
+ GstStructure* structure = gst_structure_new_empty("appsink-eos");
+ GstMessage* message = gst_message_new_application(GST_OBJECT(appendPipeline->appsink()), structure);
+ gst_bus_post(appendPipeline->bus(), message);
+ GST_TRACE("appsink-eos message posted to bus");
+ }
+
+ GST_DEBUG("%s main thread", (WTF::isMainThread()) ? "Is" : "Not");
+}
+
+
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.h b/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.h
new file mode 100644
index 000000000..301265eb9
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.h
@@ -0,0 +1,165 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "GRefPtrGStreamer.h"
+#include "MediaPlayerPrivateGStreamerMSE.h"
+#include "MediaSourceClientGStreamerMSE.h"
+#include "SourceBufferPrivateGStreamer.h"
+
+#include <gst/gst.h>
+#include <wtf/Condition.h>
+
+namespace WebCore {
+
+#if !LOG_DISABLED
+struct PadProbeInformation {
+ AppendPipeline* appendPipeline;
+ const char* description;
+ gulong probeId;
+};
+#endif
+
+class AppendPipeline : public ThreadSafeRefCounted<AppendPipeline> {
+public:
+ enum class AppendState { Invalid, NotStarted, Ongoing, KeyNegotiation, DataStarve, Sampling, LastSample, Aborting };
+
+ AppendPipeline(Ref<MediaSourceClientGStreamerMSE>, Ref<SourceBufferPrivateGStreamer>, MediaPlayerPrivateGStreamerMSE&);
+ virtual ~AppendPipeline();
+
+ void handleNeedContextSyncMessage(GstMessage*);
+ void handleApplicationMessage(GstMessage*);
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ void handleElementMessage(GstMessage*);
+#endif
+
+ gint id();
+ AppendState appendState() { return m_appendState; }
+ void setAppendState(AppendState);
+
+ GstFlowReturn handleNewAppsinkSample(GstElement*);
+ GstFlowReturn pushNewBuffer(GstBuffer*);
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ void dispatchDecryptionKey(GstBuffer*);
+#endif
+
+ // Takes ownership of caps.
+ void parseDemuxerSrcPadCaps(GstCaps*);
+ void appsinkCapsChanged();
+ void appsinkNewSample(GstSample*);
+ void appsinkEOS();
+ void didReceiveInitializationSegment();
+ AtomicString trackId();
+ void abort();
+
+ void clearPlayerPrivate();
+ Ref<SourceBufferPrivateGStreamer> sourceBufferPrivate() { return m_sourceBufferPrivate.get(); }
+ GstBus* bus() { return m_bus.get(); }
+ GstElement* pipeline() { return m_pipeline.get(); }
+ GstElement* appsrc() { return m_appsrc.get(); }
+ GstElement* appsink() { return m_appsink.get(); }
+ GstCaps* demuxerSrcPadCaps() { return m_demuxerSrcPadCaps.get(); }
+ GstCaps* appsinkCaps() { return m_appsinkCaps.get(); }
+ RefPtr<WebCore::TrackPrivateBase> track() { return m_track; }
+ WebCore::MediaSourceStreamTypeGStreamer streamType() { return m_streamType; }
+
+ void disconnectDemuxerSrcPadFromAppsinkFromAnyThread(GstPad*);
+ void connectDemuxerSrcPadToAppsinkFromAnyThread(GstPad*);
+ void connectDemuxerSrcPadToAppsink(GstPad*);
+
+ void reportAppsrcAtLeastABufferLeft();
+ void reportAppsrcNeedDataReceived();
+
+private:
+ void resetPipeline();
+ void checkEndOfAppend();
+ void handleAppsrcAtLeastABufferLeft();
+ void handleAppsrcNeedDataReceived();
+ void removeAppsrcDataLeavingProbe();
+ void setAppsrcDataLeavingProbe();
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ void dispatchPendingDecryptionKey();
+#endif
+
+private:
+ Ref<MediaSourceClientGStreamerMSE> m_mediaSourceClient;
+ Ref<SourceBufferPrivateGStreamer> m_sourceBufferPrivate;
+ MediaPlayerPrivateGStreamerMSE* m_playerPrivate;
+
+ // (m_mediaType, m_id) is unique.
+ gint m_id;
+
+ MediaTime m_initialDuration;
+
+ GstFlowReturn m_flowReturn;
+
+ GRefPtr<GstElement> m_pipeline;
+ GRefPtr<GstBus> m_bus;
+ GRefPtr<GstElement> m_appsrc;
+ GRefPtr<GstElement> m_demux;
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
+ GRefPtr<GstElement> m_decryptor;
+#endif
+ // The demuxer has one src stream only, so only one appsink is needed and linked to it.
+ GRefPtr<GstElement> m_appsink;
+
+ Lock m_newSampleLock;
+ Condition m_newSampleCondition;
+ Lock m_padAddRemoveLock;
+ Condition m_padAddRemoveCondition;
+
+ GRefPtr<GstCaps> m_appsinkCaps;
+ GRefPtr<GstCaps> m_demuxerSrcPadCaps;
+ FloatSize m_presentationSize;
+
+ bool m_appsrcAtLeastABufferLeft;
+ bool m_appsrcNeedDataReceived;
+
+ gulong m_appsrcDataLeavingProbeId;
+#if !LOG_DISABLED
+ struct PadProbeInformation m_demuxerDataEnteringPadProbeInformation;
+ struct PadProbeInformation m_appsinkDataEnteringPadProbeInformation;
+#endif
+
+ // Keeps track of the states of append processing, to avoid performing actions inappropriate for the current state
+ // (eg: processing more samples when the last one has been detected, etc.). See setAppendState() for valid
+ // transitions.
+ AppendState m_appendState;
+
+ // Aborts can only be completed when the normal sample detection has finished. Meanwhile, the willing to abort is
+ // expressed in this field.
+ bool m_abortPending;
+
+ WebCore::MediaSourceStreamTypeGStreamer m_streamType;
+ RefPtr<WebCore::TrackPrivateBase> m_oldTrack;
+ RefPtr<WebCore::TrackPrivateBase> m_track;
+
+ GRefPtr<GstBuffer> m_pendingBuffer;
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ GRefPtr<GstBuffer> m_pendingKey;
+#endif
+};
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaDescription.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaDescription.cpp
new file mode 100644
index 000000000..776a0be9b
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaDescription.cpp
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "config.h"
+#include "GStreamerMediaDescription.h"
+
+#include "GUniquePtrGStreamer.h"
+
+#include <gst/pbutils/pbutils.h>
+#include <wtf/text/AtomicString.h>
+#include <wtf/text/WTFString.h>
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+namespace WebCore {
+
+AtomicString GStreamerMediaDescription::codec() const
+{
+ GUniquePtr<gchar> description(gst_pb_utils_get_codec_description(m_caps.get()));
+ String codecName(description.get());
+
+ // Report "H.264 (Main Profile)" and "H.264 (High Profile)" just as "H.264" to allow changes between both variants
+ // go unnoticed to the SourceBuffer layer.
+ if (codecName.startsWith("H.264")) {
+ size_t braceStart = codecName.find(" (");
+ size_t braceEnd = codecName.find(")");
+ if (braceStart != notFound && braceEnd != notFound)
+ codecName.remove(braceStart, braceEnd-braceStart);
+ }
+
+ return codecName;
+}
+
+bool GStreamerMediaDescription::isVideo() const
+{
+ GstStructure* structure = gst_caps_get_structure(m_caps.get(), 0);
+ const gchar* name = gst_structure_get_name(structure);
+
+ return g_str_has_prefix(name, "video/");
+}
+
+bool GStreamerMediaDescription::isAudio() const
+{
+ GstStructure* structure = gst_caps_get_structure(m_caps.get(), 0);
+ const gchar* name = gst_structure_get_name(structure);
+
+ return g_str_has_prefix(name, "audio/");
+}
+
+bool GStreamerMediaDescription::isText() const
+{
+ // FIXME: Implement proper text track support.
+ return false;
+}
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaDescription.h b/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaDescription.h
new file mode 100644
index 000000000..84e263caa
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaDescription.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "GRefPtrGStreamer.h"
+#include "MediaDescription.h"
+
+#include <gst/gst.h>
+
+namespace WebCore {
+
+class GStreamerMediaDescription : public MediaDescription {
+public:
+ static Ref<GStreamerMediaDescription> create(GstCaps* caps)
+ {
+ return adoptRef(*new GStreamerMediaDescription(caps));
+ }
+
+ virtual ~GStreamerMediaDescription() = default;
+
+ AtomicString codec() const override;
+ bool isVideo() const override;
+ bool isAudio() const override;
+ bool isText() const override;
+
+private:
+ GStreamerMediaDescription(GstCaps* caps)
+ : MediaDescription()
+ , m_caps(caps)
+ {
+ }
+
+ GRefPtr<GstCaps> m_caps;
+};
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaSample.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaSample.cpp
new file mode 100644
index 000000000..86d4329df
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaSample.cpp
@@ -0,0 +1,120 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "config.h"
+#include "GStreamerMediaSample.h"
+
+#include "GStreamerUtilities.h"
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+namespace WebCore {
+
+GStreamerMediaSample::GStreamerMediaSample(GstSample* sample, const FloatSize& presentationSize, const AtomicString& trackId)
+ : MediaSample()
+ , m_pts(MediaTime::zeroTime())
+ , m_dts(MediaTime::zeroTime())
+ , m_duration(MediaTime::zeroTime())
+ , m_trackId(trackId)
+ , m_size(0)
+ , m_presentationSize(presentationSize)
+ , m_flags(MediaSample::IsSync)
+{
+
+ if (!sample)
+ return;
+
+ GstBuffer* buffer = gst_sample_get_buffer(sample);
+ if (!buffer)
+ return;
+
+ auto createMediaTime =
+ [](GstClockTime time) -> MediaTime {
+ return MediaTime(GST_TIME_AS_USECONDS(time), G_USEC_PER_SEC);
+ };
+
+ if (GST_BUFFER_PTS_IS_VALID(buffer))
+ m_pts = createMediaTime(GST_BUFFER_PTS(buffer));
+ if (GST_BUFFER_DTS_IS_VALID(buffer))
+ m_dts = createMediaTime(GST_BUFFER_DTS(buffer));
+ if (GST_BUFFER_DURATION_IS_VALID(buffer))
+ m_duration = createMediaTime(GST_BUFFER_DURATION(buffer));
+
+ m_size = gst_buffer_get_size(buffer);
+ m_sample = sample;
+
+ if (GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT))
+ m_flags = MediaSample::None;
+
+ if (GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DECODE_ONLY))
+ m_flags = static_cast<MediaSample::SampleFlags>(m_flags | MediaSample::IsNonDisplaying);
+}
+
+Ref<GStreamerMediaSample> GStreamerMediaSample::createFakeSample(GstCaps*, MediaTime pts, MediaTime dts, MediaTime duration, const FloatSize& presentationSize, const AtomicString& trackId)
+{
+ GStreamerMediaSample* gstreamerMediaSample = new GStreamerMediaSample(nullptr, presentationSize, trackId);
+ gstreamerMediaSample->m_pts = pts;
+ gstreamerMediaSample->m_dts = dts;
+ gstreamerMediaSample->m_duration = duration;
+ gstreamerMediaSample->m_flags = MediaSample::IsNonDisplaying;
+ return adoptRef(*gstreamerMediaSample);
+}
+
+void GStreamerMediaSample::applyPtsOffset(MediaTime timestampOffset)
+{
+ if (m_pts > timestampOffset) {
+ m_duration = m_duration + (m_pts - timestampOffset);
+ m_pts = timestampOffset;
+ }
+}
+
+void GStreamerMediaSample::offsetTimestampsBy(const MediaTime& timestampOffset)
+{
+ if (!timestampOffset)
+ return;
+ m_pts += timestampOffset;
+ m_dts += timestampOffset;
+ GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
+ if (buffer) {
+ GST_BUFFER_PTS(buffer) = toGstClockTime(m_pts.toFloat());
+ GST_BUFFER_DTS(buffer) = toGstClockTime(m_dts.toFloat());
+ }
+}
+
+Ref<MediaSample> GStreamerMediaSample::createNonDisplayingCopy() const
+{
+ if (!m_sample)
+ return createFakeSample(nullptr, m_pts, m_dts, m_duration, m_presentationSize, m_trackId);
+
+ GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
+ GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_FLAG_DECODE_ONLY);
+
+ GstCaps* caps = gst_sample_get_caps(m_sample.get());
+ GstSegment* segment = gst_sample_get_segment(m_sample.get());
+ const GstStructure* originalInfo = gst_sample_get_info(m_sample.get());
+ GstStructure* info = originalInfo ? gst_structure_copy(originalInfo) : nullptr;
+ GRefPtr<GstSample> sample = adoptGRef(gst_sample_new(buffer, caps, segment, info));
+
+ return adoptRef(*new GStreamerMediaSample(sample.get(), m_presentationSize, m_trackId));
+}
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaSample.h b/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaSample.h
new file mode 100644
index 000000000..49e12b5c3
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaSample.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "FloatSize.h"
+#include "GRefPtrGStreamer.h"
+#include "MediaSample.h"
+#include <gst/gst.h>
+#include <wtf/text/AtomicString.h>
+
+namespace WebCore {
+
+class GStreamerMediaSample : public MediaSample {
+public:
+ static Ref<GStreamerMediaSample> create(GstSample* sample, const FloatSize& presentationSize, const AtomicString& trackId)
+ {
+ return adoptRef(*new GStreamerMediaSample(sample, presentationSize, trackId));
+ }
+
+ static Ref<GStreamerMediaSample> createFakeSample(GstCaps*, MediaTime pts, MediaTime dts, MediaTime duration, const FloatSize& presentationSize, const AtomicString& trackId);
+
+ void applyPtsOffset(MediaTime);
+ MediaTime presentationTime() const override { return m_pts; }
+ MediaTime decodeTime() const override { return m_dts; }
+ MediaTime duration() const override { return m_duration; }
+ AtomicString trackID() const override { return m_trackId; }
+ void setTrackID(const String& trackId) override { m_trackId = trackId; }
+ size_t sizeInBytes() const override { return m_size; }
+ GstSample* sample() const { return m_sample.get(); }
+ FloatSize presentationSize() const override { return m_presentationSize; }
+ void offsetTimestampsBy(const MediaTime&) override;
+ void setTimestamps(const MediaTime&, const MediaTime&) override { }
+ bool isDivisable() const override { return false; }
+ std::pair<RefPtr<MediaSample>, RefPtr<MediaSample>> divide(const MediaTime&) override { return { nullptr, nullptr }; }
+ Ref<MediaSample> createNonDisplayingCopy() const override;
+ SampleFlags flags() const override { return m_flags; }
+ PlatformSample platformSample() override { return PlatformSample(); }
+ void dump(PrintStream&) const override { }
+
+private:
+ GStreamerMediaSample(GstSample*, const FloatSize& presentationSize, const AtomicString& trackId);
+ virtual ~GStreamerMediaSample() = default;
+
+ MediaTime m_pts;
+ MediaTime m_dts;
+ MediaTime m_duration;
+ AtomicString m_trackId;
+ size_t m_size;
+ GRefPtr<GstSample> m_sample;
+ FloatSize m_presentationSize;
+ MediaSample::SampleFlags m_flags;
+};
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp
new file mode 100644
index 000000000..4614eb9b9
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp
@@ -0,0 +1,860 @@
+/*
+ * Copyright (C) 2007, 2009 Apple Inc. All rights reserved.
+ * Copyright (C) 2007 Collabora Ltd. All rights reserved.
+ * Copyright (C) 2007 Alp Toker <alp@atoker.com>
+ * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
+ * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2016 Igalia S.L
+ * Copyright (C) 2015 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "config.h"
+#include "MediaPlayerPrivateGStreamerMSE.h"
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "AppendPipeline.h"
+#include "AudioTrackPrivateGStreamer.h"
+#include "GStreamerUtilities.h"
+#include "InbandTextTrackPrivateGStreamer.h"
+#include "MIMETypeRegistry.h"
+#include "MediaDescription.h"
+#include "MediaPlayer.h"
+#include "NotImplemented.h"
+#include "SourceBufferPrivateGStreamer.h"
+#include "TimeRanges.h"
+#include "URL.h"
+#include "VideoTrackPrivateGStreamer.h"
+
+#include <fnmatch.h>
+#include <gst/app/gstappsink.h>
+#include <gst/app/gstappsrc.h>
+#include <gst/gst.h>
+#include <gst/pbutils/pbutils.h>
+#include <gst/video/video.h>
+#include <wtf/Condition.h>
+#include <wtf/NeverDestroyed.h>
+
+static const char* dumpReadyState(WebCore::MediaPlayer::ReadyState readyState)
+{
+ switch (readyState) {
+ case WebCore::MediaPlayer::HaveNothing: return "HaveNothing";
+ case WebCore::MediaPlayer::HaveMetadata: return "HaveMetadata";
+ case WebCore::MediaPlayer::HaveCurrentData: return "HaveCurrentData";
+ case WebCore::MediaPlayer::HaveFutureData: return "HaveFutureData";
+ case WebCore::MediaPlayer::HaveEnoughData: return "HaveEnoughData";
+ default: return "(unknown)";
+ }
+}
+
+GST_DEBUG_CATEGORY(webkit_mse_debug);
+#define GST_CAT_DEFAULT webkit_mse_debug
+
+namespace WebCore {
+
+void MediaPlayerPrivateGStreamerMSE::registerMediaEngine(MediaEngineRegistrar registrar)
+{
+ if (isAvailable()) {
+ registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamerMSE>(player); },
+ getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem);
+ }
+}
+
+bool initializeGStreamerAndRegisterWebKitMSEElement()
+{
+ if (UNLIKELY(!initializeGStreamer()))
+ return false;
+
+ registerWebKitGStreamerElements();
+
+ GST_DEBUG_CATEGORY_INIT(webkit_mse_debug, "webkitmse", 0, "WebKit MSE media player");
+
+ GRefPtr<GstElementFactory> WebKitMediaSrcFactory = adoptGRef(gst_element_factory_find("webkitmediasrc"));
+ if (UNLIKELY(!WebKitMediaSrcFactory))
+ gst_element_register(nullptr, "webkitmediasrc", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_MEDIA_SRC);
+ return true;
+}
+
+bool MediaPlayerPrivateGStreamerMSE::isAvailable()
+{
+ if (UNLIKELY(!initializeGStreamerAndRegisterWebKitMSEElement()))
+ return false;
+
+ GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
+ return factory;
+}
+
+MediaPlayerPrivateGStreamerMSE::MediaPlayerPrivateGStreamerMSE(MediaPlayer* player)
+ : MediaPlayerPrivateGStreamer(player)
+{
+ GST_TRACE("creating the player (%p)", this);
+}
+
+MediaPlayerPrivateGStreamerMSE::~MediaPlayerPrivateGStreamerMSE()
+{
+ GST_TRACE("destroying the player (%p)", this);
+
+ for (auto iterator : m_appendPipelinesMap)
+ iterator.value->clearPlayerPrivate();
+
+ if (m_source) {
+ webKitMediaSrcSetMediaPlayerPrivate(WEBKIT_MEDIA_SRC(m_source.get()), nullptr);
+ g_signal_handlers_disconnect_by_data(m_source.get(), this);
+ }
+
+ if (m_playbackPipeline)
+ m_playbackPipeline->setWebKitMediaSrc(nullptr);
+}
+
+void MediaPlayerPrivateGStreamerMSE::load(const String& urlString)
+{
+ if (!urlString.startsWith("mediasource")) {
+ // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
+ m_networkState = MediaPlayer::FormatError;
+ m_player->networkStateChanged();
+ return;
+ }
+
+ if (UNLIKELY(!initializeGStreamerAndRegisterWebKitMSEElement()))
+ return;
+
+ if (!m_playbackPipeline)
+ m_playbackPipeline = PlaybackPipeline::create();
+
+ MediaPlayerPrivateGStreamer::load(urlString);
+}
+
+void MediaPlayerPrivateGStreamerMSE::load(const String& url, MediaSourcePrivateClient* mediaSource)
+{
+ m_mediaSource = mediaSource;
+ load(String::format("mediasource%s", url.utf8().data()));
+}
+
+void MediaPlayerPrivateGStreamerMSE::pause()
+{
+ m_paused = true;
+ MediaPlayerPrivateGStreamer::pause();
+}
+
+MediaTime MediaPlayerPrivateGStreamerMSE::durationMediaTime() const
+{
+ if (UNLIKELY(!m_pipeline || m_errorOccured))
+ return MediaTime();
+
+ return m_mediaTimeDuration;
+}
+
+void MediaPlayerPrivateGStreamerMSE::seek(float time)
+{
+ if (UNLIKELY(!m_pipeline || m_errorOccured))
+ return;
+
+ GST_INFO("[Seek] seek attempt to %f secs", time);
+
+ // Avoid useless seeking.
+ float current = currentMediaTime().toFloat();
+ if (time == current) {
+ if (!m_seeking)
+ timeChanged();
+ return;
+ }
+
+ if (isLiveStream())
+ return;
+
+ if (m_seeking && m_seekIsPending) {
+ m_seekTime = time;
+ return;
+ }
+
+ GST_DEBUG("Seeking from %f to %f seconds", current, time);
+
+ float prevSeekTime = m_seekTime;
+ m_seekTime = time;
+
+ if (!doSeek()) {
+ m_seekTime = prevSeekTime;
+ GST_WARNING("Seeking to %f failed", time);
+ return;
+ }
+
+ m_isEndReached = false;
+ GST_DEBUG("m_seeking=%s, m_seekTime=%f", m_seeking ? "true" : "false", m_seekTime);
+}
+
+void MediaPlayerPrivateGStreamerMSE::configurePlaySink()
+{
+ MediaPlayerPrivateGStreamer::configurePlaySink();
+
+ GRefPtr<GstElement> playsink = adoptGRef(gst_bin_get_by_name(GST_BIN(m_pipeline.get()), "playsink"));
+ if (playsink) {
+ // The default value (0) means "send events to all the sinks", instead
+ // of "only to the first that returns true". This is needed for MSE seek.
+ g_object_set(G_OBJECT(playsink.get()), "send-event-mode", 0, nullptr);
+ }
+}
+
+bool MediaPlayerPrivateGStreamerMSE::changePipelineState(GstState newState)
+{
+ if (seeking()) {
+ GST_DEBUG("Rejected state change to %s while seeking",
+ gst_element_state_get_name(newState));
+ return true;
+ }
+
+ return MediaPlayerPrivateGStreamer::changePipelineState(newState);
+}
+
+void MediaPlayerPrivateGStreamerMSE::notifySeekNeedsDataForTime(const MediaTime& seekTime)
+{
+ // Reenqueue samples needed to resume playback in the new position.
+ m_mediaSource->seekToTime(seekTime);
+
+ GST_DEBUG("MSE seek to %f finished", seekTime.toDouble());
+
+ if (!m_gstSeekCompleted) {
+ m_gstSeekCompleted = true;
+ maybeFinishSeek();
+ }
+}
+
+bool MediaPlayerPrivateGStreamerMSE::doSeek(gint64, float, GstSeekFlags)
+{
+ // Use doSeek() instead. If anybody is calling this version of doSeek(), something is wrong.
+ ASSERT_NOT_REACHED();
+ return false;
+}
+
+bool MediaPlayerPrivateGStreamerMSE::doSeek()
+{
+ GstClockTime position = toGstClockTime(m_seekTime);
+ MediaTime seekTime = MediaTime::createWithDouble(m_seekTime);
+ double rate = m_player->rate();
+ GstSeekFlags seekType = static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE);
+
+ // Always move to seeking state to report correct 'currentTime' while pending for actual seek to complete.
+ m_seeking = true;
+
+ // Check if playback pipeline is ready for seek.
+ GstState state, newState;
+ GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &newState, 0);
+ if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
+ GST_DEBUG("[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
+ webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
+ m_seeking = false;
+ return false;
+ }
+ if ((getStateResult == GST_STATE_CHANGE_ASYNC
+ && !(state == GST_STATE_PLAYING && newState == GST_STATE_PAUSED))
+ || state < GST_STATE_PAUSED
+ || m_isEndReached
+ || !m_gstSeekCompleted) {
+ CString reason = "Unknown reason";
+ if (getStateResult == GST_STATE_CHANGE_ASYNC) {
+ reason = String::format("In async change %s --> %s",
+ gst_element_state_get_name(state),
+ gst_element_state_get_name(newState)).utf8();
+ } else if (state < GST_STATE_PAUSED)
+ reason = "State less than PAUSED";
+ else if (m_isEndReached)
+ reason = "End reached";
+ else if (!m_gstSeekCompleted)
+ reason = "Previous seek is not finished yet";
+
+ GST_DEBUG("[Seek] Delaying the seek: %s", reason.data());
+
+ m_seekIsPending = true;
+
+ if (m_isEndReached) {
+ GST_DEBUG("[Seek] reset pipeline");
+ m_resetPipeline = true;
+ m_seeking = false;
+ if (!changePipelineState(GST_STATE_PAUSED))
+ loadingFailed(MediaPlayer::Empty);
+ else
+ m_seeking = true;
+ }
+
+ return m_seeking;
+ }
+
+ // Stop accepting new samples until actual seek is finished.
+ webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), false);
+
+ // Correct seek time if it helps to fix a small gap.
+ if (!isTimeBuffered(seekTime)) {
+ // Look if a near future time (<0.1 sec.) is buffered and change the seek target time.
+ if (m_mediaSource) {
+ const MediaTime miniGap = MediaTime::createWithDouble(0.1);
+ MediaTime nearest = m_mediaSource->buffered()->nearest(seekTime);
+ if (nearest.isValid() && nearest > seekTime && (nearest - seekTime) <= miniGap && isTimeBuffered(nearest + miniGap)) {
+ GST_DEBUG("[Seek] Changed the seek target time from %f to %f, a near point in the future", seekTime.toFloat(), nearest.toFloat());
+ seekTime = nearest;
+ }
+ }
+ }
+
+ // Check if MSE has samples for requested time and defer actual seek if needed.
+ if (!isTimeBuffered(seekTime)) {
+ GST_DEBUG("[Seek] Delaying the seek: MSE is not ready");
+ GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
+ if (setStateResult == GST_STATE_CHANGE_FAILURE) {
+ GST_DEBUG("[Seek] Cannot seek, failed to pause playback pipeline.");
+ webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
+ m_seeking = false;
+ return false;
+ }
+ m_readyState = MediaPlayer::HaveMetadata;
+ notifySeekNeedsDataForTime(seekTime);
+ ASSERT(!m_mseSeekCompleted);
+ return true;
+ }
+
+ // Complete previous MSE seek if needed.
+ if (!m_mseSeekCompleted) {
+ m_mediaSource->monitorSourceBuffers();
+ ASSERT(m_mseSeekCompleted);
+ // Note: seekCompleted will recursively call us.
+ return m_seeking;
+ }
+
+ GST_DEBUG("We can seek now");
+
+ gint64 startTime = position, endTime = GST_CLOCK_TIME_NONE;
+ if (rate < 0) {
+ startTime = 0;
+ endTime = position;
+ }
+
+ if (!rate)
+ rate = 1;
+
+ GST_DEBUG("Actual seek to %" GST_TIME_FORMAT ", end time: %" GST_TIME_FORMAT ", rate: %f", GST_TIME_ARGS(startTime), GST_TIME_ARGS(endTime), rate);
+
+ // This will call notifySeekNeedsData() after some time to tell that the pipeline is ready for sample enqueuing.
+ webKitMediaSrcPrepareSeek(WEBKIT_MEDIA_SRC(m_source.get()), seekTime);
+
+ m_gstSeekCompleted = false;
+ if (!gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType, GST_SEEK_TYPE_SET, startTime, GST_SEEK_TYPE_SET, endTime)) {
+ webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
+ m_seeking = false;
+ m_gstSeekCompleted = true;
+ GST_DEBUG("doSeek(): gst_element_seek() failed, returning false");
+ return false;
+ }
+
+ // The samples will be enqueued in notifySeekNeedsData().
+ GST_DEBUG("doSeek(): gst_element_seek() succeeded, returning true");
+ return true;
+}
+
+void MediaPlayerPrivateGStreamerMSE::maybeFinishSeek()
+{
+ if (!m_seeking || !m_mseSeekCompleted || !m_gstSeekCompleted)
+ return;
+
+ GstState state, newState;
+ GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &newState, 0);
+
+ if (getStateResult == GST_STATE_CHANGE_ASYNC
+ && !(state == GST_STATE_PLAYING && newState == GST_STATE_PAUSED)) {
+ GST_DEBUG("[Seek] Delaying seek finish");
+ return;
+ }
+
+ if (m_seekIsPending) {
+ GST_DEBUG("[Seek] Committing pending seek to %f", m_seekTime);
+ m_seekIsPending = false;
+ if (!doSeek()) {
+ GST_WARNING("[Seek] Seeking to %f failed", m_seekTime);
+ m_cachedPosition = -1;
+ }
+ return;
+ }
+
+ GST_DEBUG("[Seek] Seeked to %f", m_seekTime);
+
+ webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
+ m_seeking = false;
+ m_cachedPosition = -1;
+ // The pipeline can still have a pending state. In this case a position query will fail.
+ // Right now we can use m_seekTime as a fallback.
+ m_canFallBackToLastFinishedSeekPosition = true;
+ timeChanged();
+}
+
+void MediaPlayerPrivateGStreamerMSE::updatePlaybackRate()
+{
+ notImplemented();
+}
+
+bool MediaPlayerPrivateGStreamerMSE::seeking() const
+{
+ return m_seeking;
+}
+
+// FIXME: MediaPlayerPrivateGStreamer manages the ReadyState on its own. We shouldn't change it manually.
+void MediaPlayerPrivateGStreamerMSE::setReadyState(MediaPlayer::ReadyState readyState)
+{
+ if (readyState == m_readyState)
+ return;
+
+ if (seeking()) {
+ GST_DEBUG("Skip ready state change(%s -> %s) due to seek\n", dumpReadyState(m_readyState), dumpReadyState(readyState));
+ return;
+ }
+
+ GST_DEBUG("Ready State Changed manually from %u to %u", m_readyState, readyState);
+ MediaPlayer::ReadyState oldReadyState = m_readyState;
+ m_readyState = readyState;
+ GST_DEBUG("m_readyState: %s -> %s", dumpReadyState(oldReadyState), dumpReadyState(m_readyState));
+
+ if (oldReadyState < MediaPlayer::HaveCurrentData && m_readyState >= MediaPlayer::HaveCurrentData) {
+ GST_DEBUG("[Seek] Reporting load state changed to trigger seek continuation");
+ loadStateChanged();
+ }
+ m_player->readyStateChanged();
+
+ GstState pipelineState;
+ GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &pipelineState, nullptr, 250 * GST_NSECOND);
+ bool isPlaying = (getStateResult == GST_STATE_CHANGE_SUCCESS && pipelineState == GST_STATE_PLAYING);
+
+ if (m_readyState == MediaPlayer::HaveMetadata && oldReadyState > MediaPlayer::HaveMetadata && isPlaying) {
+ GST_TRACE("Changing pipeline to PAUSED...");
+ bool ok = changePipelineState(GST_STATE_PAUSED);
+ GST_TRACE("Changed pipeline to PAUSED: %s", ok ? "Success" : "Error");
+ }
+}
+
+void MediaPlayerPrivateGStreamerMSE::waitForSeekCompleted()
+{
+ if (!m_seeking)
+ return;
+
+ GST_DEBUG("Waiting for MSE seek completed");
+ m_mseSeekCompleted = false;
+}
+
+void MediaPlayerPrivateGStreamerMSE::seekCompleted()
+{
+ if (m_mseSeekCompleted)
+ return;
+
+ GST_DEBUG("MSE seek completed");
+ m_mseSeekCompleted = true;
+
+ doSeek();
+
+ if (!seeking() && m_readyState >= MediaPlayer::HaveFutureData)
+ changePipelineState(GST_STATE_PLAYING);
+
+ if (!seeking())
+ m_player->timeChanged();
+}
+
+void MediaPlayerPrivateGStreamerMSE::setRate(float)
+{
+ notImplemented();
+}
+
+std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamerMSE::buffered() const
+{
+ return m_mediaSource ? m_mediaSource->buffered() : std::make_unique<PlatformTimeRanges>();
+}
+
+void MediaPlayerPrivateGStreamerMSE::sourceChanged()
+{
+ m_source = nullptr;
+ g_object_get(m_pipeline.get(), "source", &m_source.outPtr(), nullptr);
+
+ ASSERT(WEBKIT_IS_MEDIA_SRC(m_source.get()));
+
+ m_playbackPipeline->setWebKitMediaSrc(WEBKIT_MEDIA_SRC(m_source.get()));
+
+ MediaSourceGStreamer::open(*m_mediaSource.get(), *this);
+ g_signal_connect_swapped(m_source.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
+ g_signal_connect_swapped(m_source.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
+ g_signal_connect_swapped(m_source.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
+ webKitMediaSrcSetMediaPlayerPrivate(WEBKIT_MEDIA_SRC(m_source.get()), this);
+}
+
+void MediaPlayerPrivateGStreamerMSE::updateStates()
+{
+ if (UNLIKELY(!m_pipeline || m_errorOccured))
+ return;
+
+ MediaPlayer::NetworkState oldNetworkState = m_networkState;
+ MediaPlayer::ReadyState oldReadyState = m_readyState;
+ GstState state, pending;
+
+ GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
+
+ bool shouldUpdatePlaybackState = false;
+ switch (getStateResult) {
+ case GST_STATE_CHANGE_SUCCESS: {
+ GST_DEBUG("State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
+
+ // Do nothing if on EOS and state changed to READY to avoid recreating the player
+ // on HTMLMediaElement and properly generate the video 'ended' event.
+ if (m_isEndReached && state == GST_STATE_READY)
+ break;
+
+ m_resetPipeline = (state <= GST_STATE_READY);
+ if (m_resetPipeline)
+ m_mediaTimeDuration = MediaTime::zeroTime();
+
+ // Update ready and network states.
+ switch (state) {
+ case GST_STATE_NULL:
+ m_readyState = MediaPlayer::HaveNothing;
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ m_networkState = MediaPlayer::Empty;
+ break;
+ case GST_STATE_READY:
+ m_readyState = MediaPlayer::HaveMetadata;
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ m_networkState = MediaPlayer::Empty;
+ break;
+ case GST_STATE_PAUSED:
+ case GST_STATE_PLAYING:
+ if (seeking()) {
+ m_readyState = MediaPlayer::HaveMetadata;
+ // FIXME: Should we manage NetworkState too?
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ } else if (m_buffering) {
+ if (m_bufferingPercentage == 100) {
+ GST_DEBUG("[Buffering] Complete.");
+ m_buffering = false;
+ m_readyState = MediaPlayer::HaveEnoughData;
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading;
+ } else {
+ m_readyState = MediaPlayer::HaveCurrentData;
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ m_networkState = MediaPlayer::Loading;
+ }
+ } else if (m_downloadFinished) {
+ m_readyState = MediaPlayer::HaveEnoughData;
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ m_networkState = MediaPlayer::Loaded;
+ } else {
+ m_readyState = MediaPlayer::HaveFutureData;
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ m_networkState = MediaPlayer::Loading;
+ }
+
+ if (m_eosMarked && state == GST_STATE_PLAYING)
+ m_eosPending = true;
+
+ break;
+ default:
+ ASSERT_NOT_REACHED();
+ break;
+ }
+
+ // Sync states where needed.
+ if (state == GST_STATE_PAUSED) {
+ if (!m_volumeAndMuteInitialized) {
+ notifyPlayerOfVolumeChange();
+ notifyPlayerOfMute();
+ m_volumeAndMuteInitialized = true;
+ }
+
+ if (!seeking() && !m_buffering && !m_paused && m_playbackRate) {
+ GST_DEBUG("[Buffering] Restarting playback.");
+ changePipelineState(GST_STATE_PLAYING);
+ }
+ } else if (state == GST_STATE_PLAYING) {
+ m_paused = false;
+
+ if ((m_buffering && !isLiveStream()) || !m_playbackRate) {
+ GST_DEBUG("[Buffering] Pausing stream for buffering.");
+ changePipelineState(GST_STATE_PAUSED);
+ }
+ } else
+ m_paused = true;
+
+ if (m_requestedState == GST_STATE_PAUSED && state == GST_STATE_PAUSED) {
+ shouldUpdatePlaybackState = true;
+ GST_DEBUG("Requested state change to %s was completed", gst_element_state_get_name(state));
+ }
+
+ break;
+ }
+ case GST_STATE_CHANGE_ASYNC:
+ GST_DEBUG("Async: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
+ // Change in progress.
+ break;
+ case GST_STATE_CHANGE_FAILURE:
+ GST_WARNING("Failure: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
+ // Change failed.
+ return;
+ case GST_STATE_CHANGE_NO_PREROLL:
+ GST_DEBUG("No preroll: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
+
+ // Live pipelines go in PAUSED without prerolling.
+ m_isStreaming = true;
+
+ if (state == GST_STATE_READY) {
+ m_readyState = MediaPlayer::HaveNothing;
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ } else if (state == GST_STATE_PAUSED) {
+ m_readyState = MediaPlayer::HaveEnoughData;
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ m_paused = true;
+ } else if (state == GST_STATE_PLAYING)
+ m_paused = false;
+
+ if (!m_paused && m_playbackRate)
+ changePipelineState(GST_STATE_PLAYING);
+
+ m_networkState = MediaPlayer::Loading;
+ break;
+ default:
+ GST_DEBUG("Else : %d", getStateResult);
+ break;
+ }
+
+ m_requestedState = GST_STATE_VOID_PENDING;
+
+ if (shouldUpdatePlaybackState)
+ m_player->playbackStateChanged();
+
+ if (m_networkState != oldNetworkState) {
+ GST_DEBUG("Network State Changed from %u to %u", oldNetworkState, m_networkState);
+ m_player->networkStateChanged();
+ }
+ if (m_readyState != oldReadyState) {
+ GST_DEBUG("Ready State Changed from %u to %u", oldReadyState, m_readyState);
+ m_player->readyStateChanged();
+ }
+
+ if (getStateResult == GST_STATE_CHANGE_SUCCESS && state >= GST_STATE_PAUSED) {
+ updatePlaybackRate();
+ maybeFinishSeek();
+ }
+}
+void MediaPlayerPrivateGStreamerMSE::asyncStateChangeDone()
+{
+ if (UNLIKELY(!m_pipeline || m_errorOccured))
+ return;
+
+ if (m_seeking)
+ maybeFinishSeek();
+ else
+ updateStates();
+}
+
+bool MediaPlayerPrivateGStreamerMSE::isTimeBuffered(const MediaTime &time) const
+{
+ bool result = m_mediaSource && m_mediaSource->buffered()->contain(time);
+ GST_DEBUG("Time %f buffered? %s", time.toDouble(), result ? "Yes" : "No");
+ return result;
+}
+
+void MediaPlayerPrivateGStreamerMSE::setMediaSourceClient(Ref<MediaSourceClientGStreamerMSE> client)
+{
+ m_mediaSourceClient = client.ptr();
+}
+
+RefPtr<MediaSourceClientGStreamerMSE> MediaPlayerPrivateGStreamerMSE::mediaSourceClient()
+{
+ return m_mediaSourceClient;
+}
+
+void MediaPlayerPrivateGStreamerMSE::durationChanged()
+{
+ if (!m_mediaSourceClient) {
+ GST_DEBUG("m_mediaSourceClient is null, doing nothing");
+ return;
+ }
+
+ MediaTime previousDuration = m_mediaTimeDuration;
+ m_mediaTimeDuration = m_mediaSourceClient->duration();
+
+ GST_TRACE("previous=%f, new=%f", previousDuration.toFloat(), m_mediaTimeDuration.toFloat());
+
+ // Avoid emiting durationchanged in the case where the previous duration was 0 because that case is already handled
+ // by the HTMLMediaElement.
+ if (m_mediaTimeDuration != previousDuration && m_mediaTimeDuration.isValid() && previousDuration.isValid()) {
+ m_player->durationChanged();
+ m_playbackPipeline->notifyDurationChanged();
+ m_mediaSource->durationChanged(m_mediaTimeDuration);
+ }
+}
+
+static HashSet<String, ASCIICaseInsensitiveHash>& mimeTypeCache()
+{
+ static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> cache = []()
+ {
+ initializeGStreamerAndRegisterWebKitMSEElement();
+ HashSet<String, ASCIICaseInsensitiveHash> set;
+ const char* mimeTypes[] = {
+ "video/mp4",
+ "audio/mp4"
+ };
+ for (auto& type : mimeTypes)
+ set.add(type);
+ return set;
+ }();
+ return cache;
+}
+
+void MediaPlayerPrivateGStreamerMSE::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
+{
+ types = mimeTypeCache();
+}
+
+void MediaPlayerPrivateGStreamerMSE::trackDetected(RefPtr<AppendPipeline> appendPipeline, RefPtr<WebCore::TrackPrivateBase> oldTrack, RefPtr<WebCore::TrackPrivateBase> newTrack)
+{
+ ASSERT(appendPipeline->track() == newTrack);
+
+ GstCaps* caps = appendPipeline->appsinkCaps();
+ ASSERT(caps);
+ GST_DEBUG("track ID: %s, caps: %" GST_PTR_FORMAT, newTrack->id().string().latin1().data(), caps);
+
+ GstStructure* structure = gst_caps_get_structure(caps, 0);
+ const gchar* mediaType = gst_structure_get_name(structure);
+ GstVideoInfo info;
+
+ if (g_str_has_prefix(mediaType, "video/") && gst_video_info_from_caps(&info, caps)) {
+ float width, height;
+
+ width = info.width;
+ height = info.height * ((float) info.par_d / (float) info.par_n);
+ m_videoSize.setWidth(width);
+ m_videoSize.setHeight(height);
+ }
+
+ if (!oldTrack)
+ m_playbackPipeline->attachTrack(appendPipeline->sourceBufferPrivate(), newTrack, structure, caps);
+ else
+ m_playbackPipeline->reattachTrack(appendPipeline->sourceBufferPrivate(), newTrack);
+}
+
+bool MediaPlayerPrivateGStreamerMSE::supportsCodecs(const String& codecs)
+{
+ static Vector<const char*> supportedCodecs = { "avc*", "mp4a*", "mpeg", "x-h264" };
+ Vector<String> codecEntries;
+ codecs.split(',', false, codecEntries);
+
+ for (String codec : codecEntries) {
+ bool isCodecSupported = false;
+
+ // If the codec is named like a mimetype (eg: video/avc) remove the "video/" part.
+ size_t slashIndex = codec.find('/');
+ if (slashIndex != WTF::notFound)
+ codec = codec.substring(slashIndex+1);
+
+ const char* codecData = codec.utf8().data();
+ for (const auto& pattern : supportedCodecs) {
+ isCodecSupported = !fnmatch(pattern, codecData, 0);
+ if (isCodecSupported)
+ break;
+ }
+ if (!isCodecSupported)
+ return false;
+ }
+
+ return true;
+}
+
+MediaPlayer::SupportsType MediaPlayerPrivateGStreamerMSE::supportsType(const MediaEngineSupportParameters& parameters)
+{
+ MediaPlayer::SupportsType result = MediaPlayer::IsNotSupported;
+ if (!parameters.isMediaSource)
+ return result;
+
+ // Disable VPX/Opus on MSE for now, mp4/avc1 seems way more reliable currently.
+ if (parameters.type.endsWith("webm"))
+ return result;
+
+ // YouTube TV provides empty types for some videos and we want to be selected as best media engine for them.
+ if (parameters.type.isEmpty()) {
+ result = MediaPlayer::MayBeSupported;
+ return result;
+ }
+
+ // Spec says we should not return "probably" if the codecs string is empty.
+ if (mimeTypeCache().contains(parameters.type)) {
+ if (parameters.codecs.isEmpty())
+ result = MediaPlayer::MayBeSupported;
+ else
+ result = supportsCodecs(parameters.codecs) ? MediaPlayer::IsSupported : MediaPlayer::IsNotSupported;
+ }
+
+ return extendedSupportsType(parameters, result);
+}
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+void MediaPlayerPrivateGStreamerMSE::dispatchDecryptionKey(GstBuffer* buffer)
+{
+ for (auto it : m_appendPipelinesMap)
+ it.value->dispatchDecryptionKey(buffer);
+}
+#endif
+
+void MediaPlayerPrivateGStreamerMSE::markEndOfStream(MediaSourcePrivate::EndOfStreamStatus status)
+{
+ if (status != MediaSourcePrivate::EosNoError)
+ return;
+
+ GST_DEBUG("Marking end of stream");
+ m_eosMarked = true;
+ updateStates();
+}
+
+MediaTime MediaPlayerPrivateGStreamerMSE::currentMediaTime() const
+{
+ MediaTime position = MediaPlayerPrivateGStreamer::currentMediaTime();
+
+ if (m_eosPending && (paused() || (position >= durationMediaTime()))) {
+ if (m_networkState != MediaPlayer::Loaded) {
+ m_networkState = MediaPlayer::Loaded;
+ m_player->networkStateChanged();
+ }
+
+ m_eosPending = false;
+ m_isEndReached = true;
+ m_cachedPosition = m_mediaTimeDuration.toFloat();
+ m_durationAtEOS = m_mediaTimeDuration.toFloat();
+ m_player->timeChanged();
+ }
+ return position;
+}
+
+float MediaPlayerPrivateGStreamerMSE::maxTimeSeekable() const
+{
+ if (UNLIKELY(m_errorOccured))
+ return 0;
+
+ GST_DEBUG("maxTimeSeekable");
+ float result = durationMediaTime().toFloat();
+ // Infinite duration means live stream.
+ if (std::isinf(result)) {
+ MediaTime maxBufferedTime = buffered()->maximumBufferedTime();
+ // Return the highest end time reported by the buffered attribute.
+ result = maxBufferedTime.isValid() ? maxBufferedTime.toFloat() : 0;
+ }
+
+ return result;
+}
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.h b/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.h
new file mode 100644
index 000000000..0d3ebb902
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.h
@@ -0,0 +1,132 @@
+/*
+ * Copyright (C) 2007, 2009 Apple Inc. All rights reserved.
+ * Copyright (C) 2007 Collabora Ltd. All rights reserved.
+ * Copyright (C) 2007 Alp Toker <alp@atoker.com>
+ * Copyright (C) 2009, 2010, 2016 Igalia S.L
+ * Copyright (C) 2015 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "GRefPtrGStreamer.h"
+#include "MediaPlayerPrivateGStreamer.h"
+#include "MediaSample.h"
+#include "MediaSourceGStreamer.h"
+#include "PlaybackPipeline.h"
+#include "WebKitMediaSourceGStreamer.h"
+
+namespace WebCore {
+
+class MediaSourceClientGStreamerMSE;
+class AppendPipeline;
+class PlaybackPipeline;
+
+class MediaPlayerPrivateGStreamerMSE : public MediaPlayerPrivateGStreamer {
+ WTF_MAKE_NONCOPYABLE(MediaPlayerPrivateGStreamerMSE); WTF_MAKE_FAST_ALLOCATED;
+
+ friend class MediaSourceClientGStreamerMSE;
+
+public:
+ explicit MediaPlayerPrivateGStreamerMSE(MediaPlayer*);
+ virtual ~MediaPlayerPrivateGStreamerMSE();
+
+ static void registerMediaEngine(MediaEngineRegistrar);
+
+ void load(const String&) override;
+ void load(const String&, MediaSourcePrivateClient*) override;
+
+ void setDownloadBuffering() override { };
+
+ bool isLiveStream() const override { return false; }
+ MediaTime currentMediaTime() const override;
+
+ void pause() override;
+ bool seeking() const override;
+ void seek(float) override;
+ void configurePlaySink() override;
+ bool changePipelineState(GstState) override;
+
+ void durationChanged() override;
+ MediaTime durationMediaTime() const override;
+
+ void setRate(float) override;
+ std::unique_ptr<PlatformTimeRanges> buffered() const override;
+ float maxTimeSeekable() const override;
+
+ void sourceChanged() override;
+
+ void setReadyState(MediaPlayer::ReadyState);
+ void waitForSeekCompleted();
+ void seekCompleted();
+ MediaSourcePrivateClient* mediaSourcePrivateClient() { return m_mediaSource.get(); }
+
+ void markEndOfStream(MediaSourcePrivate::EndOfStreamStatus);
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ void dispatchDecryptionKey(GstBuffer*) override;
+#endif
+
+ void trackDetected(RefPtr<AppendPipeline>, RefPtr<WebCore::TrackPrivateBase> oldTrack, RefPtr<WebCore::TrackPrivateBase> newTrack);
+ void notifySeekNeedsDataForTime(const MediaTime&);
+
+ static bool supportsCodecs(const String& codecs);
+
+private:
+ static void getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>&);
+ static MediaPlayer::SupportsType supportsType(const MediaEngineSupportParameters&);
+
+ static bool isAvailable();
+
+ // FIXME: Reduce code duplication.
+ void updateStates() override;
+
+ bool doSeek(gint64, float, GstSeekFlags) override;
+ bool doSeek();
+ void maybeFinishSeek();
+ void updatePlaybackRate() override;
+ void asyncStateChangeDone() override;
+
+ // FIXME: Implement.
+ unsigned long totalVideoFrames() override { return 0; }
+ unsigned long droppedVideoFrames() override { return 0; }
+ unsigned long corruptedVideoFrames() override { return 0; }
+ MediaTime totalFrameDelay() override { return MediaTime::zeroTime(); }
+ bool isTimeBuffered(const MediaTime&) const;
+
+ bool isMediaSource() const override { return true; }
+
+ void setMediaSourceClient(Ref<MediaSourceClientGStreamerMSE>);
+ RefPtr<MediaSourceClientGStreamerMSE> mediaSourceClient();
+
+ HashMap<RefPtr<SourceBufferPrivateGStreamer>, RefPtr<AppendPipeline>> m_appendPipelinesMap;
+ bool m_eosMarked = false;
+ mutable bool m_eosPending = false;
+ bool m_gstSeekCompleted = true;
+ RefPtr<MediaSourcePrivateClient> m_mediaSource;
+ RefPtr<MediaSourceClientGStreamerMSE> m_mediaSourceClient;
+ MediaTime m_mediaTimeDuration;
+ bool m_mseSeekCompleted = true;
+ RefPtr<PlaybackPipeline> m_playbackPipeline;
+};
+
+} // namespace WebCore
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.cpp
new file mode 100644
index 000000000..441401e6a
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.cpp
@@ -0,0 +1,216 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "config.h"
+#include "MediaSourceClientGStreamerMSE.h"
+
+#include "AppendPipeline.h"
+#include "MediaPlayerPrivateGStreamerMSE.h"
+#include "WebKitMediaSourceGStreamer.h"
+#include <gst/gst.h>
+
+GST_DEBUG_CATEGORY_EXTERN(webkit_mse_debug);
+#define GST_CAT_DEFAULT webkit_mse_debug
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+namespace WebCore {
+
+Ref<MediaSourceClientGStreamerMSE> MediaSourceClientGStreamerMSE::create(MediaPlayerPrivateGStreamerMSE& playerPrivate)
+{
+ ASSERT(WTF::isMainThread());
+
+ // No return adoptRef(new MediaSourceClientGStreamerMSE(playerPrivate)) because the ownership has already been transferred to MediaPlayerPrivateGStreamerMSE.
+ Ref<MediaSourceClientGStreamerMSE> client(adoptRef(*new MediaSourceClientGStreamerMSE(playerPrivate)));
+ playerPrivate.setMediaSourceClient(client.get());
+ return client;
+}
+
+MediaSourceClientGStreamerMSE::MediaSourceClientGStreamerMSE(MediaPlayerPrivateGStreamerMSE& playerPrivate)
+ : m_playerPrivate(&playerPrivate)
+ , m_duration(MediaTime::invalidTime())
+{
+ ASSERT(WTF::isMainThread());
+}
+
+MediaSourceClientGStreamerMSE::~MediaSourceClientGStreamerMSE()
+{
+ ASSERT(WTF::isMainThread());
+}
+
+MediaSourcePrivate::AddStatus MediaSourceClientGStreamerMSE::addSourceBuffer(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, const ContentType&)
+{
+ ASSERT(WTF::isMainThread());
+
+ if (!m_playerPrivate)
+ return MediaSourcePrivate::AddStatus::NotSupported;
+
+ ASSERT(m_playerPrivate->m_playbackPipeline);
+ ASSERT(sourceBufferPrivate);
+
+ RefPtr<AppendPipeline> appendPipeline = adoptRef(new AppendPipeline(*this, *sourceBufferPrivate, *m_playerPrivate));
+ GST_TRACE("Adding SourceBuffer to AppendPipeline: this=%p sourceBuffer=%p appendPipeline=%p", this, sourceBufferPrivate.get(), appendPipeline.get());
+ m_playerPrivate->m_appendPipelinesMap.add(sourceBufferPrivate, appendPipeline);
+
+ return m_playerPrivate->m_playbackPipeline->addSourceBuffer(sourceBufferPrivate);
+}
+
+const MediaTime& MediaSourceClientGStreamerMSE::duration()
+{
+ ASSERT(WTF::isMainThread());
+
+ return m_duration;
+}
+
+void MediaSourceClientGStreamerMSE::durationChanged(const MediaTime& duration)
+{
+ ASSERT(WTF::isMainThread());
+
+ GST_TRACE("duration: %f", duration.toFloat());
+ if (!duration.isValid() || duration.isPositiveInfinite() || duration.isNegativeInfinite())
+ return;
+
+ m_duration = duration;
+ if (m_playerPrivate)
+ m_playerPrivate->durationChanged();
+}
+
+void MediaSourceClientGStreamerMSE::abort(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate)
+{
+ ASSERT(WTF::isMainThread());
+
+ GST_DEBUG("aborting");
+
+ if (!m_playerPrivate)
+ return;
+
+ RefPtr<AppendPipeline> appendPipeline = m_playerPrivate->m_appendPipelinesMap.get(sourceBufferPrivate);
+
+ ASSERT(appendPipeline);
+
+ appendPipeline->abort();
+}
+
+void MediaSourceClientGStreamerMSE::resetParserState(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate)
+{
+ ASSERT(WTF::isMainThread());
+
+ GST_DEBUG("resetting parser state");
+
+ if (!m_playerPrivate)
+ return;
+
+ RefPtr<AppendPipeline> appendPipeline = m_playerPrivate->m_appendPipelinesMap.get(sourceBufferPrivate);
+
+ ASSERT(appendPipeline);
+
+ appendPipeline->abort();
+}
+
+bool MediaSourceClientGStreamerMSE::append(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, const unsigned char* data, unsigned length)
+{
+ ASSERT(WTF::isMainThread());
+
+ GST_DEBUG("Appending %u bytes", length);
+
+ if (!m_playerPrivate)
+ return false;
+
+ RefPtr<AppendPipeline> appendPipeline = m_playerPrivate->m_appendPipelinesMap.get(sourceBufferPrivate);
+
+ ASSERT(appendPipeline);
+
+ void* bufferData = fastMalloc(length);
+ GstBuffer* buffer = gst_buffer_new_wrapped_full(static_cast<GstMemoryFlags>(0), bufferData, length, 0, length, bufferData, fastFree);
+ gst_buffer_fill(buffer, 0, data, length);
+
+ return appendPipeline->pushNewBuffer(buffer) == GST_FLOW_OK;
+}
+
+void MediaSourceClientGStreamerMSE::markEndOfStream(MediaSourcePrivate::EndOfStreamStatus status)
+{
+ ASSERT(WTF::isMainThread());
+
+ if (!m_playerPrivate)
+ return;
+
+ m_playerPrivate->markEndOfStream(status);
+}
+
+void MediaSourceClientGStreamerMSE::removedFromMediaSource(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate)
+{
+ ASSERT(WTF::isMainThread());
+
+ if (!m_playerPrivate)
+ return;
+
+ ASSERT(m_playerPrivate->m_playbackPipeline);
+
+ RefPtr<AppendPipeline> appendPipeline = m_playerPrivate->m_appendPipelinesMap.get(sourceBufferPrivate);
+
+ ASSERT(appendPipeline);
+
+ appendPipeline->clearPlayerPrivate();
+ m_playerPrivate->m_appendPipelinesMap.remove(sourceBufferPrivate);
+ // AppendPipeline destructor will take care of cleaning up when appropriate.
+
+ m_playerPrivate->m_playbackPipeline->removeSourceBuffer(sourceBufferPrivate);
+}
+
+void MediaSourceClientGStreamerMSE::flush(AtomicString trackId)
+{
+ ASSERT(WTF::isMainThread());
+
+ if (m_playerPrivate)
+ m_playerPrivate->m_playbackPipeline->flush(trackId);
+}
+
+void MediaSourceClientGStreamerMSE::enqueueSample(PassRefPtr<MediaSample> prpSample)
+{
+ ASSERT(WTF::isMainThread());
+
+ if (m_playerPrivate)
+ m_playerPrivate->m_playbackPipeline->enqueueSample(prpSample);
+}
+
+GRefPtr<WebKitMediaSrc> MediaSourceClientGStreamerMSE::webKitMediaSrc()
+{
+ ASSERT(WTF::isMainThread());
+
+ if (!m_playerPrivate)
+ return nullptr;
+
+ WebKitMediaSrc* source = WEBKIT_MEDIA_SRC(m_playerPrivate->m_source.get());
+
+ ASSERT(WEBKIT_IS_MEDIA_SRC(source));
+
+ return source;
+}
+
+void MediaSourceClientGStreamerMSE::clearPlayerPrivate()
+{
+ ASSERT(WTF::isMainThread());
+
+ m_playerPrivate = nullptr;
+}
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.h b/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.h
new file mode 100644
index 000000000..c3d4ac7bc
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "GRefPtrGStreamer.h"
+#include "MediaSourcePrivate.h"
+#include "MediaSourcePrivateClient.h"
+#include "WebKitMediaSourceGStreamer.h"
+#include <wtf/MediaTime.h>
+
+namespace WebCore {
+
+class ContentType;
+class MediaPlayerPrivateGStreamerMSE;
+class MediaSample;
+class SourceBufferPrivateGStreamer;
+
+class MediaSourceClientGStreamerMSE : public RefCounted<MediaSourceClientGStreamerMSE> {
+public:
+ static Ref<MediaSourceClientGStreamerMSE> create(MediaPlayerPrivateGStreamerMSE&);
+ virtual ~MediaSourceClientGStreamerMSE();
+
+ // From MediaSourceGStreamer.
+ MediaSourcePrivate::AddStatus addSourceBuffer(RefPtr<SourceBufferPrivateGStreamer>, const ContentType&);
+ void durationChanged(const MediaTime&);
+ void markEndOfStream(MediaSourcePrivate::EndOfStreamStatus);
+
+ // From SourceBufferPrivateGStreamer.
+ void abort(RefPtr<SourceBufferPrivateGStreamer>);
+ void resetParserState(RefPtr<SourceBufferPrivateGStreamer>);
+ bool append(RefPtr<SourceBufferPrivateGStreamer>, const unsigned char*, unsigned);
+ void removedFromMediaSource(RefPtr<SourceBufferPrivateGStreamer>);
+ void flush(AtomicString);
+ void enqueueSample(PassRefPtr<MediaSample>);
+
+ void clearPlayerPrivate();
+
+ const MediaTime& duration();
+ GRefPtr<WebKitMediaSrc> webKitMediaSrc();
+
+private:
+ MediaSourceClientGStreamerMSE(MediaPlayerPrivateGStreamerMSE&);
+
+ MediaPlayerPrivateGStreamerMSE* m_playerPrivate;
+ MediaTime m_duration;
+};
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceGStreamer.cpp
new file mode 100644
index 000000000..92095b610
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceGStreamer.cpp
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2013 Google Inc. All rights reserved.
+ * Copyright (C) 2013 Orange
+ * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
+ * Copyright (C) 2015, 2016 Igalia, S.L
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ * * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "MediaSourceGStreamer.h"
+
+#if ENABLE(MEDIA_SOURCE) && USE(GSTREAMER)
+
+#include "ContentType.h"
+#include "MediaPlayerPrivateGStreamer.h"
+#include "MediaPlayerPrivateGStreamerMSE.h"
+#include "MediaSourceClientGStreamerMSE.h"
+#include "NotImplemented.h"
+#include "SourceBufferPrivateGStreamer.h"
+#include "TimeRanges.h"
+#include "WebKitMediaSourceGStreamer.h"
+#include <wtf/PassRefPtr.h>
+#include <wtf/glib/GRefPtr.h>
+
+namespace WebCore {
+
+void MediaSourceGStreamer::open(MediaSourcePrivateClient& mediaSource, MediaPlayerPrivateGStreamerMSE& playerPrivate)
+{
+ mediaSource.setPrivateAndOpen(adoptRef(*new MediaSourceGStreamer(mediaSource, playerPrivate)));
+}
+
+MediaSourceGStreamer::MediaSourceGStreamer(MediaSourcePrivateClient& mediaSource, MediaPlayerPrivateGStreamerMSE& playerPrivate)
+ : MediaSourcePrivate()
+ , m_client(MediaSourceClientGStreamerMSE::create(playerPrivate))
+ , m_mediaSource(mediaSource)
+ , m_playerPrivate(playerPrivate)
+{
+}
+
+MediaSourceGStreamer::~MediaSourceGStreamer()
+{
+ for (auto& sourceBufferPrivate : m_sourceBuffers)
+ sourceBufferPrivate->clearMediaSource();
+}
+
+MediaSourceGStreamer::AddStatus MediaSourceGStreamer::addSourceBuffer(const ContentType& contentType, RefPtr<SourceBufferPrivate>& sourceBufferPrivate)
+{
+ sourceBufferPrivate = SourceBufferPrivateGStreamer::create(this, m_client.get(), contentType);
+ RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivateGStreamer = static_cast<SourceBufferPrivateGStreamer*>(sourceBufferPrivate.get());
+ m_sourceBuffers.add(sourceBufferPrivateGStreamer);
+ return m_client->addSourceBuffer(sourceBufferPrivateGStreamer, contentType);
+}
+
+void MediaSourceGStreamer::removeSourceBuffer(SourceBufferPrivate* sourceBufferPrivate)
+{
+ RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivateGStreamer = static_cast<SourceBufferPrivateGStreamer*>(sourceBufferPrivate);
+ ASSERT(m_sourceBuffers.contains(sourceBufferPrivateGStreamer));
+
+ sourceBufferPrivateGStreamer->clearMediaSource();
+ m_sourceBuffers.remove(sourceBufferPrivateGStreamer);
+ m_activeSourceBuffers.remove(sourceBufferPrivateGStreamer.get());
+}
+
+void MediaSourceGStreamer::durationChanged()
+{
+ m_client->durationChanged(m_mediaSource->duration());
+}
+
+void MediaSourceGStreamer::markEndOfStream(EndOfStreamStatus status)
+{
+ m_client->markEndOfStream(status);
+}
+
+void MediaSourceGStreamer::unmarkEndOfStream()
+{
+ notImplemented();
+}
+
+MediaPlayer::ReadyState MediaSourceGStreamer::readyState() const
+{
+ return m_playerPrivate.readyState();
+}
+
+void MediaSourceGStreamer::setReadyState(MediaPlayer::ReadyState state)
+{
+ m_playerPrivate.setReadyState(state);
+}
+
+void MediaSourceGStreamer::waitForSeekCompleted()
+{
+ m_playerPrivate.waitForSeekCompleted();
+}
+
+void MediaSourceGStreamer::seekCompleted()
+{
+ m_playerPrivate.seekCompleted();
+}
+
+void MediaSourceGStreamer::sourceBufferPrivateDidChangeActiveState(SourceBufferPrivateGStreamer* sourceBufferPrivate, bool isActive)
+{
+ if (!isActive)
+ m_activeSourceBuffers.remove(sourceBufferPrivate);
+ else if (!m_activeSourceBuffers.contains(sourceBufferPrivate))
+ m_activeSourceBuffers.add(sourceBufferPrivate);
+}
+
+std::unique_ptr<PlatformTimeRanges> MediaSourceGStreamer::buffered()
+{
+ return m_mediaSource->buffered();
+}
+
+}
+#endif
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceGStreamer.h
new file mode 100644
index 000000000..c9a09fa04
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceGStreamer.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2013 Google Inc. All rights reserved.
+ * Copyright (C) 2013 Orange
+ * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
+ * Copyright (C) 2015, 2016 Igalia, S.L
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ * * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#if ENABLE(MEDIA_SOURCE) && USE(GSTREAMER)
+#include "MediaSourcePrivate.h"
+
+#include <wtf/Forward.h>
+#include <wtf/HashSet.h>
+
+typedef struct _WebKitMediaSrc WebKitMediaSrc;
+
+namespace WebCore {
+
+class SourceBufferPrivateGStreamer;
+class MediaSourceClientGStreamerMSE;
+class MediaPlayerPrivateGStreamerMSE;
+class PlatformTimeRanges;
+
+// FIXME: Should this be called MediaSourcePrivateGStreamer?
+class MediaSourceGStreamer final : public MediaSourcePrivate {
+public:
+ static void open(MediaSourcePrivateClient&, MediaPlayerPrivateGStreamerMSE&);
+ virtual ~MediaSourceGStreamer();
+
+ MediaSourceClientGStreamerMSE& client() { return m_client.get(); }
+ AddStatus addSourceBuffer(const ContentType&, RefPtr<SourceBufferPrivate>&) override;
+ void removeSourceBuffer(SourceBufferPrivate*);
+
+ void durationChanged() override;
+ void markEndOfStream(EndOfStreamStatus) override;
+ void unmarkEndOfStream() override;
+
+ MediaPlayer::ReadyState readyState() const override;
+ void setReadyState(MediaPlayer::ReadyState) override;
+
+ void waitForSeekCompleted() override;
+ void seekCompleted() override;
+
+ void sourceBufferPrivateDidChangeActiveState(SourceBufferPrivateGStreamer*, bool);
+
+ std::unique_ptr<PlatformTimeRanges> buffered();
+
+private:
+ MediaSourceGStreamer(MediaSourcePrivateClient&, MediaPlayerPrivateGStreamerMSE&);
+
+ HashSet<RefPtr<SourceBufferPrivateGStreamer>> m_sourceBuffers;
+ HashSet<SourceBufferPrivateGStreamer*> m_activeSourceBuffers;
+ Ref<MediaSourceClientGStreamerMSE> m_client;
+ Ref<MediaSourcePrivateClient> m_mediaSource;
+ MediaPlayerPrivateGStreamerMSE& m_playerPrivate;
+};
+
+}
+
+#endif
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.cpp
new file mode 100644
index 000000000..95df6d947
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.cpp
@@ -0,0 +1,449 @@
+/*
+ * Copyright (C) 2014, 2015 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "config.h"
+#include "PlaybackPipeline.h"
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "AudioTrackPrivateGStreamer.h"
+#include "GStreamerMediaSample.h"
+#include "GStreamerUtilities.h"
+#include "MediaSample.h"
+#include "SourceBufferPrivateGStreamer.h"
+#include "VideoTrackPrivateGStreamer.h"
+
+#include <gst/app/gstappsrc.h>
+#include <gst/gst.h>
+#include <wtf/MainThread.h>
+#include <wtf/RefCounted.h>
+#include <wtf/glib/GMutexLocker.h>
+#include <wtf/glib/GRefPtr.h>
+#include <wtf/glib/GUniquePtr.h>
+#include <wtf/text/AtomicString.h>
+
+GST_DEBUG_CATEGORY_EXTERN(webkit_mse_debug);
+#define GST_CAT_DEFAULT webkit_mse_debug
+
+static Stream* getStreamByTrackId(WebKitMediaSrc*, AtomicString);
+static Stream* getStreamBySourceBufferPrivate(WebKitMediaSrc*, WebCore::SourceBufferPrivateGStreamer*);
+
+static Stream* getStreamByTrackId(WebKitMediaSrc* source, AtomicString trackIdString)
+{
+ // WebKitMediaSrc should be locked at this point.
+ for (Stream* stream : source->priv->streams) {
+ if (stream->type != WebCore::Invalid
+ && ((stream->audioTrack && stream->audioTrack->id() == trackIdString)
+ || (stream->videoTrack && stream->videoTrack->id() == trackIdString) ) ) {
+ return stream;
+ }
+ }
+ return nullptr;
+}
+
+static Stream* getStreamBySourceBufferPrivate(WebKitMediaSrc* source, WebCore::SourceBufferPrivateGStreamer* sourceBufferPrivate)
+{
+ for (Stream* stream : source->priv->streams) {
+ if (stream->sourceBuffer == sourceBufferPrivate)
+ return stream;
+ }
+ return nullptr;
+}
+
+// FIXME: Use gst_app_src_push_sample() instead when we switch to the appropriate GStreamer version.
+static GstFlowReturn pushSample(GstAppSrc* appsrc, GstSample* sample)
+{
+ g_return_val_if_fail(GST_IS_SAMPLE(sample), GST_FLOW_ERROR);
+
+ GstCaps* caps = gst_sample_get_caps(sample);
+ if (caps)
+ gst_app_src_set_caps(appsrc, caps);
+ else
+ GST_WARNING_OBJECT(appsrc, "received sample without caps");
+
+ GstBuffer* buffer = gst_sample_get_buffer(sample);
+ if (UNLIKELY(!buffer)) {
+ GST_WARNING_OBJECT(appsrc, "received sample without buffer");
+ return GST_FLOW_OK;
+ }
+
+ // gst_app_src_push_buffer() steals the reference, we need an additional one.
+ return gst_app_src_push_buffer(appsrc, gst_buffer_ref(buffer));
+}
+
+namespace WebCore {
+
+void PlaybackPipeline::setWebKitMediaSrc(WebKitMediaSrc* webKitMediaSrc)
+{
+ GST_DEBUG("webKitMediaSrc=%p", webKitMediaSrc);
+ m_webKitMediaSrc = webKitMediaSrc;
+}
+
+WebKitMediaSrc* PlaybackPipeline::webKitMediaSrc()
+{
+ return m_webKitMediaSrc.get();
+}
+
+MediaSourcePrivate::AddStatus PlaybackPipeline::addSourceBuffer(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate)
+{
+ WebKitMediaSrcPrivate* priv = m_webKitMediaSrc->priv;
+
+ if (priv->allTracksConfigured) {
+ GST_ERROR_OBJECT(m_webKitMediaSrc.get(), "Adding new source buffers after first data not supported yet");
+ return MediaSourcePrivate::NotSupported;
+ }
+
+ GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "State %d", int(GST_STATE(m_webKitMediaSrc.get())));
+
+ Stream* stream = new Stream{ };
+ stream->parent = m_webKitMediaSrc.get();
+ stream->appsrc = gst_element_factory_make("appsrc", nullptr);
+ stream->appsrcNeedDataFlag = false;
+ stream->sourceBuffer = sourceBufferPrivate.get();
+
+ // No track has been attached yet.
+ stream->type = Invalid;
+ stream->parser = nullptr;
+ stream->caps = nullptr;
+ stream->audioTrack = nullptr;
+ stream->videoTrack = nullptr;
+ stream->presentationSize = WebCore::FloatSize();
+ stream->lastEnqueuedTime = MediaTime::invalidTime();
+
+ gst_app_src_set_callbacks(GST_APP_SRC(stream->appsrc), &enabledAppsrcCallbacks, stream->parent, nullptr);
+ gst_app_src_set_emit_signals(GST_APP_SRC(stream->appsrc), FALSE);
+ gst_app_src_set_stream_type(GST_APP_SRC(stream->appsrc), GST_APP_STREAM_TYPE_SEEKABLE);
+
+ gst_app_src_set_max_bytes(GST_APP_SRC(stream->appsrc), 2 * WTF::MB);
+ g_object_set(G_OBJECT(stream->appsrc), "block", FALSE, "min-percent", 20, nullptr);
+
+ GST_OBJECT_LOCK(m_webKitMediaSrc.get());
+ priv->streams.prepend(stream);
+ GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
+
+ gst_bin_add(GST_BIN(m_webKitMediaSrc.get()), stream->appsrc);
+ gst_element_sync_state_with_parent(stream->appsrc);
+
+ return MediaSourcePrivate::Ok;
+}
+
+void PlaybackPipeline::removeSourceBuffer(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate)
+{
+ ASSERT(WTF::isMainThread());
+
+ GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "Element removed from MediaSource");
+ GST_OBJECT_LOCK(m_webKitMediaSrc.get());
+ WebKitMediaSrcPrivate* priv = m_webKitMediaSrc->priv;
+ Stream* stream = nullptr;
+ Deque<Stream*>::iterator streamPosition = priv->streams.begin();
+
+ for (; streamPosition != priv->streams.end(); ++streamPosition) {
+ if ((*streamPosition)->sourceBuffer == sourceBufferPrivate.get()) {
+ stream = *streamPosition;
+ break;
+ }
+ }
+ if (stream)
+ priv->streams.remove(streamPosition);
+ GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
+
+ if (stream)
+ webKitMediaSrcFreeStream(m_webKitMediaSrc.get(), stream);
+}
+
+void PlaybackPipeline::attachTrack(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, RefPtr<TrackPrivateBase> trackPrivate, GstStructure* structure, GstCaps* caps)
+{
+ WebKitMediaSrc* webKitMediaSrc = m_webKitMediaSrc.get();
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ Stream* stream = getStreamBySourceBufferPrivate(webKitMediaSrc, sourceBufferPrivate.get());
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ ASSERT(stream);
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ unsigned padId = stream->parent->priv->numberOfPads;
+ stream->parent->priv->numberOfPads++;
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ const gchar* mediaType = gst_structure_get_name(structure);
+
+ GST_DEBUG_OBJECT(webKitMediaSrc, "Configured track %s: appsrc=%s, padId=%u, mediaType=%s", trackPrivate->id().string().utf8().data(), GST_ELEMENT_NAME(stream->appsrc), padId, mediaType);
+
+ GUniquePtr<gchar> parserBinName(g_strdup_printf("streamparser%u", padId));
+
+ if (!g_strcmp0(mediaType, "video/x-h264")) {
+ GRefPtr<GstCaps> filterCaps = adoptGRef(gst_caps_new_simple("video/x-h264", "alignment", G_TYPE_STRING, "au", nullptr));
+ GstElement* capsfilter = gst_element_factory_make("capsfilter", nullptr);
+ g_object_set(capsfilter, "caps", filterCaps.get(), nullptr);
+
+ stream->parser = gst_bin_new(parserBinName.get());
+
+ GstElement* parser = gst_element_factory_make("h264parse", nullptr);
+ gst_bin_add_many(GST_BIN(stream->parser), parser, capsfilter, nullptr);
+ gst_element_link_pads(parser, "src", capsfilter, "sink");
+
+ GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(parser, "sink"));
+ gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad.get()));
+
+ pad = adoptGRef(gst_element_get_static_pad(capsfilter, "src"));
+ gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad.get()));
+ } else if (!g_strcmp0(mediaType, "video/x-h265")) {
+ GRefPtr<GstCaps> filterCaps = adoptGRef(gst_caps_new_simple("video/x-h265", "alignment", G_TYPE_STRING, "au", nullptr));
+ GstElement* capsfilter = gst_element_factory_make("capsfilter", nullptr);
+ g_object_set(capsfilter, "caps", filterCaps.get(), nullptr);
+
+ stream->parser = gst_bin_new(parserBinName.get());
+
+ GstElement* parser = gst_element_factory_make("h265parse", nullptr);
+ gst_bin_add_many(GST_BIN(stream->parser), parser, capsfilter, nullptr);
+ gst_element_link_pads(parser, "src", capsfilter, "sink");
+
+ GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(parser, "sink"));
+ gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad.get()));
+
+ pad = adoptGRef(gst_element_get_static_pad(capsfilter, "src"));
+ gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad.get()));
+ } else if (!g_strcmp0(mediaType, "audio/mpeg")) {
+ gint mpegversion = -1;
+ gst_structure_get_int(structure, "mpegversion", &mpegversion);
+
+ GstElement* parser = nullptr;
+ if (mpegversion == 1)
+ parser = gst_element_factory_make("mpegaudioparse", nullptr);
+ else if (mpegversion == 2 || mpegversion == 4)
+ parser = gst_element_factory_make("aacparse", nullptr);
+ else
+ ASSERT_NOT_REACHED();
+
+ stream->parser = gst_bin_new(parserBinName.get());
+ gst_bin_add(GST_BIN(stream->parser), parser);
+
+ GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(parser, "sink"));
+ gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad.get()));
+
+ pad = adoptGRef(gst_element_get_static_pad(parser, "src"));
+ gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad.get()));
+ } else if (!g_strcmp0(mediaType, "video/x-vp9"))
+ stream->parser = nullptr;
+ else {
+ GST_ERROR_OBJECT(stream->parent, "Unsupported media format: %s", mediaType);
+ return;
+ }
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ stream->type = Unknown;
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ GRefPtr<GstPad> sourcePad;
+ if (stream->parser) {
+ gst_bin_add(GST_BIN(stream->parent), stream->parser);
+ gst_element_sync_state_with_parent(stream->parser);
+
+ GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(stream->parser, "sink"));
+ sourcePad = adoptGRef(gst_element_get_static_pad(stream->appsrc, "src"));
+ gst_pad_link(sourcePad.get(), sinkPad.get());
+ sourcePad = adoptGRef(gst_element_get_static_pad(stream->parser, "src"));
+ } else {
+ GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "Stream of type %s doesn't require a parser bin", mediaType);
+ sourcePad = adoptGRef(gst_element_get_static_pad(stream->appsrc, "src"));
+ }
+ ASSERT(sourcePad);
+
+ // FIXME: Is padId the best way to identify the Stream? What about trackId?
+ g_object_set_data(G_OBJECT(sourcePad.get()), "padId", GINT_TO_POINTER(padId));
+ webKitMediaSrcLinkParser(sourcePad.get(), caps, stream);
+
+ ASSERT(stream->parent->priv->mediaPlayerPrivate);
+ int signal = -1;
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ if (g_str_has_prefix(mediaType, "audio")) {
+ stream->type = Audio;
+ stream->parent->priv->numberOfAudioStreams++;
+ signal = SIGNAL_AUDIO_CHANGED;
+ stream->audioTrack = RefPtr<WebCore::AudioTrackPrivateGStreamer>(static_cast<WebCore::AudioTrackPrivateGStreamer*>(trackPrivate.get()));
+ } else if (g_str_has_prefix(mediaType, "video")) {
+ stream->type = Video;
+ stream->parent->priv->numberOfVideoStreams++;
+ signal = SIGNAL_VIDEO_CHANGED;
+ stream->videoTrack = RefPtr<WebCore::VideoTrackPrivateGStreamer>(static_cast<WebCore::VideoTrackPrivateGStreamer*>(trackPrivate.get()));
+ } else if (g_str_has_prefix(mediaType, "text")) {
+ stream->type = Text;
+ stream->parent->priv->numberOfTextStreams++;
+ signal = SIGNAL_TEXT_CHANGED;
+
+ // FIXME: Support text tracks.
+ }
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ if (signal != -1)
+ g_signal_emit(G_OBJECT(stream->parent), webKitMediaSrcSignals[signal], 0, nullptr);
+}
+
+void PlaybackPipeline::reattachTrack(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, RefPtr<TrackPrivateBase> trackPrivate)
+{
+ GST_DEBUG("Re-attaching track");
+
+ // FIXME: Maybe remove this method. Now the caps change is managed by gst_appsrc_push_sample() in enqueueSample()
+ // and flushAndEnqueueNonDisplayingSamples().
+
+ WebKitMediaSrc* webKitMediaSrc = m_webKitMediaSrc.get();
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ Stream* stream = getStreamBySourceBufferPrivate(webKitMediaSrc, sourceBufferPrivate.get());
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ ASSERT(stream && stream->type != Invalid);
+
+ // The caps change is managed by gst_appsrc_push_sample() in enqueueSample() and
+ // flushAndEnqueueNonDisplayingSamples(), so the caps aren't set from here.
+ GRefPtr<GstCaps> appsrcCaps = adoptGRef(gst_app_src_get_caps(GST_APP_SRC(stream->appsrc)));
+ const gchar* mediaType = gst_structure_get_name(gst_caps_get_structure(appsrcCaps.get(), 0));
+ int signal = -1;
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ if (g_str_has_prefix(mediaType, "audio")) {
+ ASSERT(stream->type == Audio);
+ signal = SIGNAL_AUDIO_CHANGED;
+ stream->audioTrack = RefPtr<WebCore::AudioTrackPrivateGStreamer>(static_cast<WebCore::AudioTrackPrivateGStreamer*>(trackPrivate.get()));
+ } else if (g_str_has_prefix(mediaType, "video")) {
+ ASSERT(stream->type == Video);
+ signal = SIGNAL_VIDEO_CHANGED;
+ stream->videoTrack = RefPtr<WebCore::VideoTrackPrivateGStreamer>(static_cast<WebCore::VideoTrackPrivateGStreamer*>(trackPrivate.get()));
+ } else if (g_str_has_prefix(mediaType, "text")) {
+ ASSERT(stream->type == Text);
+ signal = SIGNAL_TEXT_CHANGED;
+
+ // FIXME: Support text tracks.
+ }
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ if (signal != -1)
+ g_signal_emit(G_OBJECT(stream->parent), webKitMediaSrcSignals[signal], 0, nullptr);
+}
+
+void PlaybackPipeline::notifyDurationChanged()
+{
+ gst_element_post_message(GST_ELEMENT(m_webKitMediaSrc.get()), gst_message_new_duration_changed(GST_OBJECT(m_webKitMediaSrc.get())));
+ // WebKitMediaSrc will ask MediaPlayerPrivateGStreamerMSE for the new duration later, when somebody asks for it.
+}
+
+void PlaybackPipeline::markEndOfStream(MediaSourcePrivate::EndOfStreamStatus)
+{
+ WebKitMediaSrcPrivate* priv = m_webKitMediaSrc->priv;
+
+ GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "Have EOS");
+
+ GST_OBJECT_LOCK(m_webKitMediaSrc.get());
+ bool allTracksConfigured = priv->allTracksConfigured;
+ if (!allTracksConfigured)
+ priv->allTracksConfigured = true;
+ GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
+
+ if (!allTracksConfigured) {
+ gst_element_no_more_pads(GST_ELEMENT(m_webKitMediaSrc.get()));
+ webKitMediaSrcDoAsyncDone(m_webKitMediaSrc.get());
+ }
+
+ Vector<GstAppSrc*> appsrcs;
+
+ GST_OBJECT_LOCK(m_webKitMediaSrc.get());
+ for (Stream* stream : priv->streams) {
+ if (stream->appsrc)
+ appsrcs.append(GST_APP_SRC(stream->appsrc));
+ }
+ GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
+
+ for (GstAppSrc* appsrc : appsrcs)
+ gst_app_src_end_of_stream(appsrc);
+}
+
+void PlaybackPipeline::flush(AtomicString trackId)
+{
+ ASSERT(WTF::isMainThread());
+
+ GST_DEBUG("flush: trackId=%s", trackId.string().utf8().data());
+
+ GST_OBJECT_LOCK(m_webKitMediaSrc.get());
+ Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId);
+
+ if (!stream) {
+ GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
+ return;
+ }
+
+ stream->lastEnqueuedTime = MediaTime::invalidTime();
+ GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
+}
+
+void PlaybackPipeline::enqueueSample(RefPtr<MediaSample> mediaSample)
+{
+ ASSERT(WTF::isMainThread());
+
+ AtomicString trackId = mediaSample->trackID();
+
+ GST_TRACE("enqueing sample trackId=%s PTS=%f presentationSize=%.0fx%.0f at %" GST_TIME_FORMAT " duration: %" GST_TIME_FORMAT,
+ trackId.string().utf8().data(), mediaSample->presentationTime().toFloat(),
+ mediaSample->presentationSize().width(), mediaSample->presentationSize().height(),
+ GST_TIME_ARGS(WebCore::toGstClockTime(mediaSample->presentationTime().toDouble())),
+ GST_TIME_ARGS(WebCore::toGstClockTime(mediaSample->duration().toDouble())));
+
+ Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId);
+
+ if (!stream) {
+ GST_WARNING("No stream!");
+ return;
+ }
+
+ if (!stream->sourceBuffer->isReadyForMoreSamples(trackId)) {
+ GST_DEBUG("enqueueSample: skip adding new sample for trackId=%s, SB is not ready yet", trackId.string().utf8().data());
+ return;
+ }
+
+ GstElement* appsrc = stream->appsrc;
+ MediaTime lastEnqueuedTime = stream->lastEnqueuedTime;
+
+ GStreamerMediaSample* sample = static_cast<GStreamerMediaSample*>(mediaSample.get());
+ if (sample->sample() && gst_sample_get_buffer(sample->sample())) {
+ GRefPtr<GstSample> gstSample = sample->sample();
+ GstBuffer* buffer = gst_sample_get_buffer(gstSample.get());
+ lastEnqueuedTime = sample->presentationTime();
+
+ GST_BUFFER_FLAG_UNSET(buffer, GST_BUFFER_FLAG_DECODE_ONLY);
+ pushSample(GST_APP_SRC(appsrc), gstSample.get());
+ // gst_app_src_push_sample() uses transfer-none for gstSample.
+
+ stream->lastEnqueuedTime = lastEnqueuedTime;
+ }
+}
+
+GstElement* PlaybackPipeline::pipeline()
+{
+ if (!m_webKitMediaSrc || !GST_ELEMENT_PARENT(GST_ELEMENT(m_webKitMediaSrc.get())))
+ return nullptr;
+
+ return GST_ELEMENT_PARENT(GST_ELEMENT_PARENT(GST_ELEMENT(m_webKitMediaSrc.get())));
+}
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.h b/Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.h
new file mode 100644
index 000000000..08f0e60d3
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+// PlaybackPipeline is (sort of) a friend class of WebKitMediaSourceGStreamer.
+
+#include "WebKitMediaSourceGStreamer.h"
+#include "WebKitMediaSourceGStreamerPrivate.h"
+
+#include <gst/gst.h>
+#include <wtf/Condition.h>
+#include <wtf/glib/GRefPtr.h>
+
+namespace WTF {
+template<> GRefPtr<WebKitMediaSrc> adoptGRef(WebKitMediaSrc*);
+template<> WebKitMediaSrc* refGPtr<WebKitMediaSrc>(WebKitMediaSrc*);
+template<> void derefGPtr<WebKitMediaSrc>(WebKitMediaSrc*);
+};
+
+namespace WebCore {
+
+class ContentType;
+class SourceBufferPrivateGStreamer;
+class MediaSourceGStreamer;
+
+class PlaybackPipeline: public RefCounted<PlaybackPipeline> {
+public:
+ static Ref<PlaybackPipeline> create()
+ {
+ return adoptRef(*new PlaybackPipeline());
+ }
+
+ virtual ~PlaybackPipeline() = default;
+
+ void setWebKitMediaSrc(WebKitMediaSrc*);
+ WebKitMediaSrc* webKitMediaSrc();
+
+ MediaSourcePrivate::AddStatus addSourceBuffer(RefPtr<SourceBufferPrivateGStreamer>);
+ void removeSourceBuffer(RefPtr<SourceBufferPrivateGStreamer>);
+ void attachTrack(RefPtr<SourceBufferPrivateGStreamer>, RefPtr<TrackPrivateBase>, GstStructure*, GstCaps*);
+ void reattachTrack(RefPtr<SourceBufferPrivateGStreamer>, RefPtr<TrackPrivateBase>);
+ void notifyDurationChanged();
+
+ // From MediaSourceGStreamer.
+ void markEndOfStream(MediaSourcePrivate::EndOfStreamStatus);
+
+ // From SourceBufferPrivateGStreamer.
+ void flush(AtomicString);
+ void enqueueSample(RefPtr<MediaSample>);
+
+ GstElement* pipeline();
+private:
+ PlaybackPipeline() = default;
+ GRefPtr<WebKitMediaSrc> m_webKitMediaSrc;
+};
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/SourceBufferPrivateGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/SourceBufferPrivateGStreamer.cpp
new file mode 100644
index 000000000..e4b107f70
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/SourceBufferPrivateGStreamer.cpp
@@ -0,0 +1,177 @@
+/*
+ * Copyright (C) 2013 Google Inc. All rights reserved.
+ * Copyright (C) 2013 Orange
+ * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
+ * Copyright (C) 2015, 2016 Igalia, S.L
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ * * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "SourceBufferPrivateGStreamer.h"
+
+#if ENABLE(MEDIA_SOURCE) && USE(GSTREAMER)
+
+#include "ContentType.h"
+#include "GStreamerUtilities.h"
+#include "MediaPlayerPrivateGStreamerMSE.h"
+#include "MediaSample.h"
+#include "MediaSourceClientGStreamerMSE.h"
+#include "MediaSourceGStreamer.h"
+#include "NotImplemented.h"
+#include "WebKitMediaSourceGStreamer.h"
+
+namespace WebCore {
+
+Ref<SourceBufferPrivateGStreamer> SourceBufferPrivateGStreamer::create(MediaSourceGStreamer* mediaSource, Ref<MediaSourceClientGStreamerMSE> client, const ContentType& contentType)
+{
+ return adoptRef(*new SourceBufferPrivateGStreamer(mediaSource, client.get(), contentType));
+}
+
+SourceBufferPrivateGStreamer::SourceBufferPrivateGStreamer(MediaSourceGStreamer* mediaSource, Ref<MediaSourceClientGStreamerMSE> client, const ContentType& contentType)
+ : SourceBufferPrivate()
+ , m_mediaSource(mediaSource)
+ , m_type(contentType)
+ , m_client(client.get())
+{
+}
+
+void SourceBufferPrivateGStreamer::setClient(SourceBufferPrivateClient* client)
+{
+ m_sourceBufferPrivateClient = client;
+}
+
+void SourceBufferPrivateGStreamer::append(const unsigned char* data, unsigned length)
+{
+ ASSERT(m_mediaSource);
+
+ if (!m_sourceBufferPrivateClient)
+ return;
+
+ if (m_client->append(this, data, length))
+ return;
+
+ m_sourceBufferPrivateClient->sourceBufferPrivateAppendComplete(SourceBufferPrivateClient::ReadStreamFailed);
+}
+
+void SourceBufferPrivateGStreamer::abort()
+{
+ m_client->abort(this);
+}
+
+void SourceBufferPrivateGStreamer::resetParserState()
+{
+ m_client->resetParserState(this);
+}
+
+void SourceBufferPrivateGStreamer::removedFromMediaSource()
+{
+ if (m_mediaSource)
+ m_mediaSource->removeSourceBuffer(this);
+ m_client->removedFromMediaSource(this);
+}
+
+MediaPlayer::ReadyState SourceBufferPrivateGStreamer::readyState() const
+{
+ return m_mediaSource->readyState();
+}
+
+void SourceBufferPrivateGStreamer::setReadyState(MediaPlayer::ReadyState state)
+{
+ m_mediaSource->setReadyState(state);
+}
+
+void SourceBufferPrivateGStreamer::flush(const AtomicString& trackId)
+{
+ m_client->flush(trackId);
+}
+
+void SourceBufferPrivateGStreamer::enqueueSample(Ref<MediaSample>&& sample, const AtomicString&)
+{
+ m_notifyWhenReadyForMoreSamples = false;
+
+ m_client->enqueueSample(WTFMove(sample));
+}
+
+bool SourceBufferPrivateGStreamer::isReadyForMoreSamples(const AtomicString&)
+{
+ return m_isReadyForMoreSamples;
+}
+
+void SourceBufferPrivateGStreamer::setReadyForMoreSamples(bool isReady)
+{
+ ASSERT(WTF::isMainThread());
+ m_isReadyForMoreSamples = isReady;
+}
+
+void SourceBufferPrivateGStreamer::notifyReadyForMoreSamples()
+{
+ ASSERT(WTF::isMainThread());
+ setReadyForMoreSamples(true);
+ if (m_notifyWhenReadyForMoreSamples)
+ m_sourceBufferPrivateClient->sourceBufferPrivateDidBecomeReadyForMoreSamples(m_trackId);
+}
+
+void SourceBufferPrivateGStreamer::setActive(bool isActive)
+{
+ if (m_mediaSource)
+ m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
+}
+
+void SourceBufferPrivateGStreamer::stopAskingForMoreSamples(const AtomicString&)
+{
+ notImplemented();
+}
+
+void SourceBufferPrivateGStreamer::notifyClientWhenReadyForMoreSamples(const AtomicString& trackId)
+{
+ ASSERT(WTF::isMainThread());
+ m_notifyWhenReadyForMoreSamples = true;
+ m_trackId = trackId;
+}
+
+void SourceBufferPrivateGStreamer::didReceiveInitializationSegment(const SourceBufferPrivateClient::InitializationSegment& initializationSegment)
+{
+ if (m_sourceBufferPrivateClient)
+ m_sourceBufferPrivateClient->sourceBufferPrivateDidReceiveInitializationSegment(initializationSegment);
+}
+
+void SourceBufferPrivateGStreamer::didReceiveSample(MediaSample& sample)
+{
+ if (m_sourceBufferPrivateClient)
+ m_sourceBufferPrivateClient->sourceBufferPrivateDidReceiveSample(sample);
+}
+
+void SourceBufferPrivateGStreamer::didReceiveAllPendingSamples()
+{
+ if (m_sourceBufferPrivateClient)
+ m_sourceBufferPrivateClient->sourceBufferPrivateAppendComplete(SourceBufferPrivateClient::AppendSucceeded);
+}
+
+}
+#endif
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/SourceBufferPrivateGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/mse/SourceBufferPrivateGStreamer.h
new file mode 100644
index 000000000..5671310ff
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/SourceBufferPrivateGStreamer.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2013 Google Inc. All rights reserved.
+ * Copyright (C) 2013 Orange
+ * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
+ * Copyright (C) 2015, 2016 Igalia, S.L
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ * * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#if ENABLE(MEDIA_SOURCE) && USE(GSTREAMER)
+
+#include "ContentType.h"
+#include "MediaPlayerPrivateGStreamerMSE.h"
+#include "SourceBufferPrivate.h"
+#include "SourceBufferPrivateClient.h"
+#include "WebKitMediaSourceGStreamer.h"
+
+namespace WebCore {
+
+class MediaSourceGStreamer;
+
+class SourceBufferPrivateGStreamer final : public SourceBufferPrivate {
+
+public:
+ static Ref<SourceBufferPrivateGStreamer> create(MediaSourceGStreamer*, Ref<MediaSourceClientGStreamerMSE>, const ContentType&);
+ virtual ~SourceBufferPrivateGStreamer() = default;
+
+ void clearMediaSource() { m_mediaSource = nullptr; }
+
+ void setClient(SourceBufferPrivateClient*) final;
+ void append(const unsigned char*, unsigned) final;
+ void abort() final;
+ void resetParserState() final;
+ void removedFromMediaSource() final;
+ MediaPlayer::ReadyState readyState() const final;
+ void setReadyState(MediaPlayer::ReadyState) final;
+
+ void flush(const AtomicString&) final;
+ void enqueueSample(Ref<MediaSample>&&, const AtomicString&) final;
+ bool isReadyForMoreSamples(const AtomicString&) final;
+ void setActive(bool) final;
+ void stopAskingForMoreSamples(const AtomicString&) final;
+ void notifyClientWhenReadyForMoreSamples(const AtomicString&) final;
+
+ void setReadyForMoreSamples(bool);
+ void notifyReadyForMoreSamples();
+
+ void didReceiveInitializationSegment(const SourceBufferPrivateClient::InitializationSegment&);
+ void didReceiveSample(MediaSample&);
+ void didReceiveAllPendingSamples();
+
+private:
+ SourceBufferPrivateGStreamer(MediaSourceGStreamer*, Ref<MediaSourceClientGStreamerMSE>, const ContentType&);
+ friend class MediaSourceClientGStreamerMSE;
+
+ MediaSourceGStreamer* m_mediaSource;
+ ContentType m_type;
+ Ref<MediaSourceClientGStreamerMSE> m_client;
+ SourceBufferPrivateClient* m_sourceBufferPrivateClient;
+ bool m_isReadyForMoreSamples = true;
+ bool m_notifyWhenReadyForMoreSamples = false;
+ AtomicString m_trackId;
+};
+
+}
+
+#endif
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.cpp
new file mode 100644
index 000000000..52ca66867
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.cpp
@@ -0,0 +1,776 @@
+/*
+ * Copyright (C) 2009, 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (C) 2013 Collabora Ltd.
+ * Copyright (C) 2013 Orange
+ * Copyright (C) 2014, 2015 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
+ * Copyright (C) 2015, 2016 Igalia, S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "config.h"
+#include "WebKitMediaSourceGStreamer.h"
+
+#include "PlaybackPipeline.h"
+
+#if ENABLE(VIDEO) && ENABLE(MEDIA_SOURCE) && USE(GSTREAMER)
+
+#include "AudioTrackPrivateGStreamer.h"
+#include "GStreamerUtilities.h"
+#include "MediaDescription.h"
+#include "MediaPlayerPrivateGStreamerMSE.h"
+#include "MediaSample.h"
+#include "MediaSourceGStreamer.h"
+#include "NotImplemented.h"
+#include "SourceBufferPrivateGStreamer.h"
+#include "TimeRanges.h"
+#include "VideoTrackPrivateGStreamer.h"
+#include "WebKitMediaSourceGStreamerPrivate.h"
+
+#include <gst/app/app.h>
+#include <gst/app/gstappsrc.h>
+#include <gst/gst.h>
+#include <gst/pbutils/missing-plugins.h>
+#include <gst/pbutils/pbutils.h>
+#include <gst/video/video.h>
+#include <wtf/Condition.h>
+#include <wtf/MainThread.h>
+#include <wtf/glib/GMutexLocker.h>
+#include <wtf/glib/GUniquePtr.h>
+#include <wtf/text/CString.h>
+
+GST_DEBUG_CATEGORY_STATIC(webkit_media_src_debug);
+#define GST_CAT_DEFAULT webkit_media_src_debug
+
+#define webkit_media_src_parent_class parent_class
+#define WEBKIT_MEDIA_SRC_CATEGORY_INIT GST_DEBUG_CATEGORY_INIT(webkit_media_src_debug, "webkitmediasrc", 0, "websrc element");
+
+static GstStaticPadTemplate srcTemplate = GST_STATIC_PAD_TEMPLATE("src_%u", GST_PAD_SRC,
+ GST_PAD_SOMETIMES, GST_STATIC_CAPS_ANY);
+
+static void enabledAppsrcNeedData(GstAppSrc*, guint, gpointer);
+static void enabledAppsrcEnoughData(GstAppSrc*, gpointer);
+static gboolean enabledAppsrcSeekData(GstAppSrc*, guint64, gpointer);
+
+static void disabledAppsrcNeedData(GstAppSrc*, guint, gpointer) { };
+static void disabledAppsrcEnoughData(GstAppSrc*, gpointer) { };
+static gboolean disabledAppsrcSeekData(GstAppSrc*, guint64, gpointer)
+{
+ return FALSE;
+};
+
+GstAppSrcCallbacks enabledAppsrcCallbacks = {
+ enabledAppsrcNeedData,
+ enabledAppsrcEnoughData,
+ enabledAppsrcSeekData,
+ { 0 }
+};
+
+GstAppSrcCallbacks disabledAppsrcCallbacks = {
+ disabledAppsrcNeedData,
+ disabledAppsrcEnoughData,
+ disabledAppsrcSeekData,
+ { 0 }
+};
+
+static Stream* getStreamByAppsrc(WebKitMediaSrc*, GstElement*);
+
+static void enabledAppsrcNeedData(GstAppSrc* appsrc, guint, gpointer userData)
+{
+ WebKitMediaSrc* webKitMediaSrc = static_cast<WebKitMediaSrc*>(userData);
+ ASSERT(WEBKIT_IS_MEDIA_SRC(webKitMediaSrc));
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ OnSeekDataAction appsrcSeekDataNextAction = webKitMediaSrc->priv->appsrcSeekDataNextAction;
+ Stream* appsrcStream = getStreamByAppsrc(webKitMediaSrc, GST_ELEMENT(appsrc));
+ bool allAppsrcNeedDataAfterSeek = false;
+
+ if (webKitMediaSrc->priv->appsrcSeekDataCount > 0) {
+ if (appsrcStream && !appsrcStream->appsrcNeedDataFlag) {
+ ++webKitMediaSrc->priv->appsrcNeedDataCount;
+ appsrcStream->appsrcNeedDataFlag = true;
+ }
+ int numAppsrcs = webKitMediaSrc->priv->streams.size();
+ if (webKitMediaSrc->priv->appsrcSeekDataCount == numAppsrcs && webKitMediaSrc->priv->appsrcNeedDataCount == numAppsrcs) {
+ GST_DEBUG("All needDatas completed");
+ allAppsrcNeedDataAfterSeek = true;
+ webKitMediaSrc->priv->appsrcSeekDataCount = 0;
+ webKitMediaSrc->priv->appsrcNeedDataCount = 0;
+ webKitMediaSrc->priv->appsrcSeekDataNextAction = Nothing;
+
+ for (Stream* stream : webKitMediaSrc->priv->streams)
+ stream->appsrcNeedDataFlag = false;
+ }
+ }
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ if (allAppsrcNeedDataAfterSeek) {
+ GST_DEBUG("All expected appsrcSeekData() and appsrcNeedData() calls performed. Running next action (%d)", static_cast<int>(appsrcSeekDataNextAction));
+
+ switch (appsrcSeekDataNextAction) {
+ case MediaSourceSeekToTime: {
+ GstStructure* structure = gst_structure_new_empty("seek-needs-data");
+ GstMessage* message = gst_message_new_application(GST_OBJECT(appsrc), structure);
+ gst_bus_post(webKitMediaSrc->priv->bus.get(), message);
+ GST_TRACE("seek-needs-data message posted to the bus");
+ break;
+ }
+ case Nothing:
+ break;
+ }
+ } else if (appsrcSeekDataNextAction == Nothing) {
+ LockHolder locker(webKitMediaSrc->priv->streamLock);
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+
+ // Search again for the Stream, just in case it was removed between the previous lock and this one.
+ appsrcStream = getStreamByAppsrc(webKitMediaSrc, GST_ELEMENT(appsrc));
+
+ if (appsrcStream && appsrcStream->type != WebCore::Invalid) {
+ GstStructure* structure = gst_structure_new("ready-for-more-samples", "appsrc-stream", G_TYPE_POINTER, appsrcStream, nullptr);
+ GstMessage* message = gst_message_new_application(GST_OBJECT(appsrc), structure);
+ gst_bus_post(webKitMediaSrc->priv->bus.get(), message);
+ GST_TRACE("ready-for-more-samples message posted to the bus");
+ }
+
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+ }
+}
+
+static void enabledAppsrcEnoughData(GstAppSrc *appsrc, gpointer userData)
+{
+ // No need to lock on webKitMediaSrc, we're on the main thread and nobody is going to remove the stream in the meantime.
+ ASSERT(WTF::isMainThread());
+
+ WebKitMediaSrc* webKitMediaSrc = static_cast<WebKitMediaSrc*>(userData);
+ ASSERT(WEBKIT_IS_MEDIA_SRC(webKitMediaSrc));
+ Stream* stream = getStreamByAppsrc(webKitMediaSrc, GST_ELEMENT(appsrc));
+
+ // This callback might have been scheduled from a child thread before the stream was removed.
+ // Then, the removal code might have run, and later this callback.
+ // This check solves the race condition.
+ if (!stream || stream->type == WebCore::Invalid)
+ return;
+
+ stream->sourceBuffer->setReadyForMoreSamples(false);
+}
+
+static gboolean enabledAppsrcSeekData(GstAppSrc*, guint64, gpointer userData)
+{
+ ASSERT(WTF::isMainThread());
+
+ WebKitMediaSrc* webKitMediaSrc = static_cast<WebKitMediaSrc*>(userData);
+
+ ASSERT(WEBKIT_IS_MEDIA_SRC(webKitMediaSrc));
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ webKitMediaSrc->priv->appsrcSeekDataCount++;
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ return TRUE;
+}
+
+static Stream* getStreamByAppsrc(WebKitMediaSrc* source, GstElement* appsrc)
+{
+ for (Stream* stream : source->priv->streams) {
+ if (stream->appsrc == appsrc)
+ return stream;
+ }
+ return nullptr;
+}
+
+G_DEFINE_TYPE_WITH_CODE(WebKitMediaSrc, webkit_media_src, GST_TYPE_BIN,
+ G_IMPLEMENT_INTERFACE(GST_TYPE_URI_HANDLER, webKitMediaSrcUriHandlerInit);
+ WEBKIT_MEDIA_SRC_CATEGORY_INIT);
+
+guint webKitMediaSrcSignals[LAST_SIGNAL] = { 0 };
+
+static void webkit_media_src_class_init(WebKitMediaSrcClass* klass)
+{
+ GObjectClass* oklass = G_OBJECT_CLASS(klass);
+ GstElementClass* eklass = GST_ELEMENT_CLASS(klass);
+
+ oklass->finalize = webKitMediaSrcFinalize;
+ oklass->set_property = webKitMediaSrcSetProperty;
+ oklass->get_property = webKitMediaSrcGetProperty;
+
+ gst_element_class_add_pad_template(eklass, gst_static_pad_template_get(&srcTemplate));
+
+ gst_element_class_set_static_metadata(eklass, "WebKit Media source element", "Source", "Handles Blob uris", "Stephane Jadaud <sjadaud@sii.fr>, Sebastian Dröge <sebastian@centricular.com>, Enrique Ocaña González <eocanha@igalia.com>");
+
+ // Allows setting the uri using the 'location' property, which is used for example by gst_element_make_from_uri().
+ g_object_class_install_property(oklass,
+ PROP_LOCATION,
+ g_param_spec_string("location", "location", "Location to read from", nullptr,
+ GParamFlags(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
+ g_object_class_install_property(oklass,
+ PROP_N_AUDIO,
+ g_param_spec_int("n-audio", "Number Audio", "Total number of audio streams",
+ 0, G_MAXINT, 0, GParamFlags(G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
+ g_object_class_install_property(oklass,
+ PROP_N_VIDEO,
+ g_param_spec_int("n-video", "Number Video", "Total number of video streams",
+ 0, G_MAXINT, 0, GParamFlags(G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
+ g_object_class_install_property(oklass,
+ PROP_N_TEXT,
+ g_param_spec_int("n-text", "Number Text", "Total number of text streams",
+ 0, G_MAXINT, 0, GParamFlags(G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
+
+ webKitMediaSrcSignals[SIGNAL_VIDEO_CHANGED] =
+ g_signal_new("video-changed", G_TYPE_FROM_CLASS(oklass),
+ G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(WebKitMediaSrcClass, videoChanged), nullptr, nullptr,
+ g_cclosure_marshal_generic, G_TYPE_NONE, 0, G_TYPE_NONE);
+ webKitMediaSrcSignals[SIGNAL_AUDIO_CHANGED] =
+ g_signal_new("audio-changed", G_TYPE_FROM_CLASS(oklass),
+ G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(WebKitMediaSrcClass, audioChanged), nullptr, nullptr,
+ g_cclosure_marshal_generic, G_TYPE_NONE, 0, G_TYPE_NONE);
+ webKitMediaSrcSignals[SIGNAL_TEXT_CHANGED] =
+ g_signal_new("text-changed", G_TYPE_FROM_CLASS(oklass),
+ G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(WebKitMediaSrcClass, textChanged), nullptr, nullptr,
+ g_cclosure_marshal_generic, G_TYPE_NONE, 0, G_TYPE_NONE);
+
+ eklass->change_state = webKitMediaSrcChangeState;
+
+ g_type_class_add_private(klass, sizeof(WebKitMediaSrcPrivate));
+}
+
+static void webkit_media_src_init(WebKitMediaSrc* source)
+{
+ source->priv = WEBKIT_MEDIA_SRC_GET_PRIVATE(source);
+ new (source->priv) WebKitMediaSrcPrivate();
+ source->priv->seekTime = MediaTime::invalidTime();
+ source->priv->appsrcSeekDataCount = 0;
+ source->priv->appsrcNeedDataCount = 0;
+ source->priv->appsrcSeekDataNextAction = Nothing;
+
+ // No need to reset Stream.appsrcNeedDataFlag because there are no Streams at this point yet.
+}
+
+void webKitMediaSrcFinalize(GObject* object)
+{
+ ASSERT(WTF::isMainThread());
+
+ WebKitMediaSrc* source = WEBKIT_MEDIA_SRC(object);
+ WebKitMediaSrcPrivate* priv = source->priv;
+
+ Deque<Stream*> oldStreams;
+ source->priv->streams.swap(oldStreams);
+
+ for (Stream* stream : oldStreams)
+ webKitMediaSrcFreeStream(source, stream);
+
+ priv->seekTime = MediaTime::invalidTime();
+
+ if (priv->mediaPlayerPrivate)
+ webKitMediaSrcSetMediaPlayerPrivate(source, nullptr);
+
+ // We used a placement new for construction, the destructor won't be called automatically.
+ priv->~_WebKitMediaSrcPrivate();
+
+ GST_CALL_PARENT(G_OBJECT_CLASS, finalize, (object));
+}
+
+void webKitMediaSrcSetProperty(GObject* object, guint propId, const GValue* value, GParamSpec* pspec)
+{
+ WebKitMediaSrc* source = WEBKIT_MEDIA_SRC(object);
+
+ switch (propId) {
+ case PROP_LOCATION:
+ gst_uri_handler_set_uri(reinterpret_cast<GstURIHandler*>(source), g_value_get_string(value), nullptr);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propId, pspec);
+ break;
+ }
+}
+
+void webKitMediaSrcGetProperty(GObject* object, guint propId, GValue* value, GParamSpec* pspec)
+{
+ WebKitMediaSrc* source = WEBKIT_MEDIA_SRC(object);
+ WebKitMediaSrcPrivate* priv = source->priv;
+
+ GST_OBJECT_LOCK(source);
+ switch (propId) {
+ case PROP_LOCATION:
+ g_value_set_string(value, priv->location.get());
+ break;
+ case PROP_N_AUDIO:
+ g_value_set_int(value, priv->numberOfAudioStreams);
+ break;
+ case PROP_N_VIDEO:
+ g_value_set_int(value, priv->numberOfVideoStreams);
+ break;
+ case PROP_N_TEXT:
+ g_value_set_int(value, priv->numberOfTextStreams);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propId, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK(source);
+}
+
+void webKitMediaSrcDoAsyncStart(WebKitMediaSrc* source)
+{
+ source->priv->asyncStart = true;
+ GST_BIN_CLASS(parent_class)->handle_message(GST_BIN(source),
+ gst_message_new_async_start(GST_OBJECT(source)));
+}
+
+void webKitMediaSrcDoAsyncDone(WebKitMediaSrc* source)
+{
+ WebKitMediaSrcPrivate* priv = source->priv;
+ if (priv->asyncStart) {
+ GST_BIN_CLASS(parent_class)->handle_message(GST_BIN(source),
+ gst_message_new_async_done(GST_OBJECT(source), GST_CLOCK_TIME_NONE));
+ priv->asyncStart = false;
+ }
+}
+
+GstStateChangeReturn webKitMediaSrcChangeState(GstElement* element, GstStateChange transition)
+{
+ WebKitMediaSrc* source = WEBKIT_MEDIA_SRC(element);
+ WebKitMediaSrcPrivate* priv = source->priv;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ priv->allTracksConfigured = false;
+ webKitMediaSrcDoAsyncStart(source);
+ break;
+ default:
+ break;
+ }
+
+ GstStateChangeReturn result = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
+ if (G_UNLIKELY(result == GST_STATE_CHANGE_FAILURE)) {
+ GST_WARNING_OBJECT(source, "State change failed");
+ webKitMediaSrcDoAsyncDone(source);
+ return result;
+ }
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ result = GST_STATE_CHANGE_ASYNC;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ webKitMediaSrcDoAsyncDone(source);
+ priv->allTracksConfigured = false;
+ break;
+ default:
+ break;
+ }
+
+ return result;
+}
+
+gint64 webKitMediaSrcGetSize(WebKitMediaSrc* webKitMediaSrc)
+{
+ gint64 duration = 0;
+ for (Stream* stream : webKitMediaSrc->priv->streams)
+ duration = std::max<gint64>(duration, gst_app_src_get_size(GST_APP_SRC(stream->appsrc)));
+ return duration;
+}
+
+gboolean webKitMediaSrcQueryWithParent(GstPad* pad, GstObject* parent, GstQuery* query)
+{
+ WebKitMediaSrc* source = WEBKIT_MEDIA_SRC(GST_ELEMENT(parent));
+ gboolean result = FALSE;
+
+ switch (GST_QUERY_TYPE(query)) {
+ case GST_QUERY_DURATION: {
+ GstFormat format;
+ gst_query_parse_duration(query, &format, nullptr);
+
+ GST_DEBUG_OBJECT(source, "duration query in format %s", gst_format_get_name(format));
+ GST_OBJECT_LOCK(source);
+ switch (format) {
+ case GST_FORMAT_TIME: {
+ if (source->priv && source->priv->mediaPlayerPrivate) {
+ float duration = source->priv->mediaPlayerPrivate->durationMediaTime().toFloat();
+ if (duration > 0) {
+ gst_query_set_duration(query, format, WebCore::toGstClockTime(duration));
+ GST_DEBUG_OBJECT(source, "Answering: duration=%" GST_TIME_FORMAT, GST_TIME_ARGS(WebCore::toGstClockTime(duration)));
+ result = TRUE;
+ }
+ }
+ break;
+ }
+ case GST_FORMAT_BYTES: {
+ if (source->priv) {
+ gint64 duration = webKitMediaSrcGetSize(source);
+ if (duration) {
+ gst_query_set_duration(query, format, duration);
+ GST_DEBUG_OBJECT(source, "size: %" G_GINT64_FORMAT, duration);
+ result = TRUE;
+ }
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ GST_OBJECT_UNLOCK(source);
+ break;
+ }
+ case GST_QUERY_URI:
+ if (source) {
+ GST_OBJECT_LOCK(source);
+ if (source->priv)
+ gst_query_set_uri(query, source->priv->location.get());
+ GST_OBJECT_UNLOCK(source);
+ }
+ result = TRUE;
+ break;
+ default: {
+ GRefPtr<GstPad> target = adoptGRef(gst_ghost_pad_get_target(GST_GHOST_PAD_CAST(pad)));
+ // Forward the query to the proxy target pad.
+ if (target)
+ result = gst_pad_query(target.get(), query);
+ break;
+ }
+ }
+
+ return result;
+}
+
+void webKitMediaSrcUpdatePresentationSize(GstCaps* caps, Stream* stream)
+{
+ GstStructure* structure = gst_caps_get_structure(caps, 0);
+ const gchar* structureName = gst_structure_get_name(structure);
+ GstVideoInfo info;
+
+ GST_OBJECT_LOCK(stream->parent);
+ if (g_str_has_prefix(structureName, "video/") && gst_video_info_from_caps(&info, caps)) {
+ float width, height;
+
+ // FIXME: Correct?.
+ width = info.width;
+ height = info.height * ((float) info.par_d / (float) info.par_n);
+ stream->presentationSize = WebCore::FloatSize(width, height);
+ } else
+ stream->presentationSize = WebCore::FloatSize();
+
+ gst_caps_ref(caps);
+ stream->caps = adoptGRef(caps);
+ GST_OBJECT_UNLOCK(stream->parent);
+}
+
+void webKitMediaSrcLinkStreamToSrcPad(GstPad* sourcePad, Stream* stream)
+{
+ unsigned padId = static_cast<unsigned>(GPOINTER_TO_INT(g_object_get_data(G_OBJECT(sourcePad), "padId")));
+ GST_DEBUG_OBJECT(stream->parent, "linking stream to src pad (id: %u)", padId);
+
+ GUniquePtr<gchar> padName(g_strdup_printf("src_%u", padId));
+ GstPad* ghostpad = WebCore::webkitGstGhostPadFromStaticTemplate(&srcTemplate, padName.get(), sourcePad);
+
+ gst_pad_set_query_function(ghostpad, webKitMediaSrcQueryWithParent);
+
+ gst_pad_set_active(ghostpad, TRUE);
+ gst_element_add_pad(GST_ELEMENT(stream->parent), ghostpad);
+
+ if (stream->decodebinSinkPad) {
+ GST_DEBUG_OBJECT(stream->parent, "A decodebin was previously used for this source, trying to reuse it.");
+ // FIXME: error checking here. Not sure what to do if linking
+ // fails though, because decodebin is out of this source
+ // element's scope, in theory.
+ gst_pad_link(ghostpad, stream->decodebinSinkPad);
+ }
+}
+
+void webKitMediaSrcLinkParser(GstPad* sourcePad, GstCaps* caps, Stream* stream)
+{
+ ASSERT(caps && stream->parent);
+ if (!caps || !stream->parent) {
+ GST_ERROR("Unable to link parser");
+ return;
+ }
+
+ webKitMediaSrcUpdatePresentationSize(caps, stream);
+
+ // FIXME: drop webKitMediaSrcLinkStreamToSrcPad() and move its code here.
+ if (!gst_pad_is_linked(sourcePad)) {
+ GST_DEBUG_OBJECT(stream->parent, "pad not linked yet");
+ webKitMediaSrcLinkStreamToSrcPad(sourcePad, stream);
+ }
+
+ webKitMediaSrcCheckAllTracksConfigured(stream->parent);
+}
+
+void webKitMediaSrcFreeStream(WebKitMediaSrc* source, Stream* stream)
+{
+ if (stream->appsrc) {
+ // Don't trigger callbacks from this appsrc to avoid using the stream anymore.
+ gst_app_src_set_callbacks(GST_APP_SRC(stream->appsrc), &disabledAppsrcCallbacks, nullptr, nullptr);
+ gst_app_src_end_of_stream(GST_APP_SRC(stream->appsrc));
+ }
+
+ if (stream->type != WebCore::Invalid) {
+ GST_DEBUG("Freeing track-related info on stream %p", stream);
+
+ LockHolder locker(source->priv->streamLock);
+
+ if (stream->caps)
+ stream->caps = nullptr;
+
+ if (stream->audioTrack)
+ stream->audioTrack = nullptr;
+ if (stream->videoTrack)
+ stream->videoTrack = nullptr;
+
+ int signal = -1;
+ switch (stream->type) {
+ case WebCore::Audio:
+ signal = SIGNAL_AUDIO_CHANGED;
+ break;
+ case WebCore::Video:
+ signal = SIGNAL_VIDEO_CHANGED;
+ break;
+ case WebCore::Text:
+ signal = SIGNAL_TEXT_CHANGED;
+ break;
+ default:
+ break;
+ }
+ stream->type = WebCore::Invalid;
+
+ if (signal != -1)
+ g_signal_emit(G_OBJECT(source), webKitMediaSrcSignals[signal], 0, nullptr);
+
+ source->priv->streamCondition.notifyOne();
+ }
+
+ GST_DEBUG("Releasing stream: %p", stream);
+ delete stream;
+}
+
+void webKitMediaSrcCheckAllTracksConfigured(WebKitMediaSrc* webKitMediaSrc)
+{
+ bool allTracksConfigured = false;
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ if (!webKitMediaSrc->priv->allTracksConfigured) {
+ allTracksConfigured = true;
+ for (Stream* stream : webKitMediaSrc->priv->streams) {
+ if (stream->type == WebCore::Invalid) {
+ allTracksConfigured = false;
+ break;
+ }
+ }
+ if (allTracksConfigured)
+ webKitMediaSrc->priv->allTracksConfigured = true;
+ }
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ if (allTracksConfigured) {
+ GST_DEBUG("All tracks attached. Completing async state change operation.");
+ gst_element_no_more_pads(GST_ELEMENT(webKitMediaSrc));
+ webKitMediaSrcDoAsyncDone(webKitMediaSrc);
+ }
+}
+
+// Uri handler interface.
+GstURIType webKitMediaSrcUriGetType(GType)
+{
+ return GST_URI_SRC;
+}
+
+const gchar* const* webKitMediaSrcGetProtocols(GType)
+{
+ static const char* protocols[] = {"mediasourceblob", nullptr };
+ return protocols;
+}
+
+gchar* webKitMediaSrcGetUri(GstURIHandler* handler)
+{
+ WebKitMediaSrc* source = WEBKIT_MEDIA_SRC(handler);
+ gchar* result;
+
+ GST_OBJECT_LOCK(source);
+ result = g_strdup(source->priv->location.get());
+ GST_OBJECT_UNLOCK(source);
+ return result;
+}
+
+gboolean webKitMediaSrcSetUri(GstURIHandler* handler, const gchar* uri, GError**)
+{
+ WebKitMediaSrc* source = WEBKIT_MEDIA_SRC(handler);
+
+ if (GST_STATE(source) >= GST_STATE_PAUSED) {
+ GST_ERROR_OBJECT(source, "URI can only be set in states < PAUSED");
+ return FALSE;
+ }
+
+ GST_OBJECT_LOCK(source);
+ WebKitMediaSrcPrivate* priv = source->priv;
+ priv->location = nullptr;
+ if (!uri) {
+ GST_OBJECT_UNLOCK(source);
+ return TRUE;
+ }
+
+ WebCore::URL url(WebCore::URL(), uri);
+
+ priv->location = GUniquePtr<gchar>(g_strdup(url.string().utf8().data()));
+ GST_OBJECT_UNLOCK(source);
+ return TRUE;
+}
+
+void webKitMediaSrcUriHandlerInit(gpointer gIface, gpointer)
+{
+ GstURIHandlerInterface* iface = (GstURIHandlerInterface *) gIface;
+
+ iface->get_type = webKitMediaSrcUriGetType;
+ iface->get_protocols = webKitMediaSrcGetProtocols;
+ iface->get_uri = webKitMediaSrcGetUri;
+ iface->set_uri = webKitMediaSrcSetUri;
+}
+
+static void seekNeedsDataMainThread(WebKitMediaSrc* source)
+{
+ GST_DEBUG("Buffering needed before seek");
+
+ ASSERT(WTF::isMainThread());
+
+ GST_OBJECT_LOCK(source);
+ MediaTime seekTime = source->priv->seekTime;
+ WebCore::MediaPlayerPrivateGStreamerMSE* mediaPlayerPrivate = source->priv->mediaPlayerPrivate;
+
+ if (!mediaPlayerPrivate) {
+ GST_OBJECT_UNLOCK(source);
+ return;
+ }
+
+ for (Stream* stream : source->priv->streams) {
+ if (stream->type != WebCore::Invalid)
+ stream->sourceBuffer->setReadyForMoreSamples(true);
+ }
+ GST_OBJECT_UNLOCK(source);
+ mediaPlayerPrivate->notifySeekNeedsDataForTime(seekTime);
+}
+
+static void notifyReadyForMoreSamplesMainThread(WebKitMediaSrc* source, Stream* appsrcStream)
+{
+ GST_OBJECT_LOCK(source);
+
+ auto it = std::find(source->priv->streams.begin(), source->priv->streams.end(), appsrcStream);
+ if (it == source->priv->streams.end()) {
+ GST_OBJECT_UNLOCK(source);
+ return;
+ }
+
+ WebCore::MediaPlayerPrivateGStreamerMSE* mediaPlayerPrivate = source->priv->mediaPlayerPrivate;
+ if (mediaPlayerPrivate && !mediaPlayerPrivate->seeking())
+ appsrcStream->sourceBuffer->notifyReadyForMoreSamples();
+
+ GST_OBJECT_UNLOCK(source);
+}
+
+static void applicationMessageCallback(GstBus*, GstMessage* message, WebKitMediaSrc* source)
+{
+ ASSERT(WTF::isMainThread());
+ ASSERT(GST_MESSAGE_TYPE(message) == GST_MESSAGE_APPLICATION);
+
+ const GstStructure* structure = gst_message_get_structure(message);
+
+ if (gst_structure_has_name(structure, "seek-needs-data")) {
+ seekNeedsDataMainThread(source);
+ return;
+ }
+
+ if (gst_structure_has_name(structure, "ready-for-more-samples")) {
+ Stream* appsrcStream = nullptr;
+ gst_structure_get(structure, "appsrc-stream", G_TYPE_POINTER, &appsrcStream, nullptr);
+ ASSERT(appsrcStream);
+
+ notifyReadyForMoreSamplesMainThread(source, appsrcStream);
+ return;
+ }
+
+ ASSERT_NOT_REACHED();
+}
+
+void webKitMediaSrcSetMediaPlayerPrivate(WebKitMediaSrc* source, WebCore::MediaPlayerPrivateGStreamerMSE* mediaPlayerPrivate)
+{
+ GST_OBJECT_LOCK(source);
+ if (source->priv->mediaPlayerPrivate && source->priv->mediaPlayerPrivate != mediaPlayerPrivate && source->priv->bus)
+ g_signal_handlers_disconnect_by_func(source->priv->bus.get(), gpointer(applicationMessageCallback), source);
+
+ // Set to nullptr on MediaPlayerPrivateGStreamer destruction, never a dangling pointer.
+ source->priv->mediaPlayerPrivate = mediaPlayerPrivate;
+ source->priv->bus = mediaPlayerPrivate ? adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(mediaPlayerPrivate->pipeline()))) : nullptr;
+ if (source->priv->bus) {
+ // MediaPlayerPrivateGStreamer has called gst_bus_add_signal_watch() at this point, so we can subscribe.
+ g_signal_connect(source->priv->bus.get(), "message::application", G_CALLBACK(applicationMessageCallback), source);
+ }
+ GST_OBJECT_UNLOCK(source);
+}
+
+void webKitMediaSrcSetReadyForSamples(WebKitMediaSrc* source, bool isReady)
+{
+ if (source) {
+ GST_OBJECT_LOCK(source);
+ for (Stream* stream : source->priv->streams)
+ stream->sourceBuffer->setReadyForMoreSamples(isReady);
+ GST_OBJECT_UNLOCK(source);
+ }
+}
+
+void webKitMediaSrcPrepareSeek(WebKitMediaSrc* source, const MediaTime& time)
+{
+ GST_OBJECT_LOCK(source);
+ source->priv->seekTime = time;
+ source->priv->appsrcSeekDataCount = 0;
+ source->priv->appsrcNeedDataCount = 0;
+
+ for (Stream* stream : source->priv->streams) {
+ stream->appsrcNeedDataFlag = false;
+ // Don't allow samples away from the seekTime to be enqueued.
+ stream->lastEnqueuedTime = time;
+ }
+
+ // The pending action will be performed in enabledAppsrcSeekData().
+ source->priv->appsrcSeekDataNextAction = MediaSourceSeekToTime;
+ GST_OBJECT_UNLOCK(source);
+}
+
+namespace WTF {
+template <> GRefPtr<WebKitMediaSrc> adoptGRef(WebKitMediaSrc* ptr)
+{
+ ASSERT(!ptr || !g_object_is_floating(G_OBJECT(ptr)));
+ return GRefPtr<WebKitMediaSrc>(ptr, GRefPtrAdopt);
+}
+
+template <> WebKitMediaSrc* refGPtr<WebKitMediaSrc>(WebKitMediaSrc* ptr)
+{
+ if (ptr)
+ gst_object_ref_sink(GST_OBJECT(ptr));
+
+ return ptr;
+}
+
+template <> void derefGPtr<WebKitMediaSrc>(WebKitMediaSrc* ptr)
+{
+ if (ptr)
+ gst_object_unref(ptr);
+}
+};
+
+#endif // USE(GSTREAMER)
+
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.h
new file mode 100644
index 000000000..79086054c
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2009, 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (C) 2013 Collabora Ltd.
+ * Copyright (C) 2013 Orange
+ * Copyright (C) 2014, 2015 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
+ * Copyright (C) 2015, 2016 Igalia, S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO) && ENABLE(MEDIA_SOURCE) && USE(GSTREAMER)
+
+#include "GRefPtrGStreamer.h"
+#include "MediaPlayer.h"
+#include "MediaSource.h"
+#include "MediaSourcePrivate.h"
+#include "SourceBufferPrivate.h"
+#include "SourceBufferPrivateClient.h"
+#include <gst/gst.h>
+
+namespace WebCore {
+
+class MediaPlayerPrivateGStreamerMSE;
+
+enum MediaSourceStreamTypeGStreamer { Invalid, Unknown, Audio, Video, Text };
+
+}
+
+G_BEGIN_DECLS
+
+#define WEBKIT_TYPE_MEDIA_SRC (webkit_media_src_get_type ())
+#define WEBKIT_MEDIA_SRC(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), WEBKIT_TYPE_MEDIA_SRC, WebKitMediaSrc))
+#define WEBKIT_MEDIA_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), WEBKIT_TYPE_MEDIA_SRC, WebKitMediaSrcClass))
+#define WEBKIT_IS_MEDIA_SRC(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), WEBKIT_TYPE_MEDIA_SRC))
+#define WEBKIT_IS_MEDIA_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), WEBKIT_TYPE_MEDIA_SRC))
+
+typedef struct _WebKitMediaSrc WebKitMediaSrc;
+typedef struct _WebKitMediaSrcClass WebKitMediaSrcClass;
+typedef struct _WebKitMediaSrcPrivate WebKitMediaSrcPrivate;
+
+struct _WebKitMediaSrc {
+ GstBin parent;
+
+ WebKitMediaSrcPrivate *priv;
+};
+
+struct _WebKitMediaSrcClass {
+ GstBinClass parentClass;
+
+ // Notify app that number of audio/video/text streams changed.
+ void (*videoChanged)(WebKitMediaSrc*);
+ void (*audioChanged)(WebKitMediaSrc*);
+ void (*textChanged)(WebKitMediaSrc*);
+};
+
+GType webkit_media_src_get_type(void);
+
+void webKitMediaSrcSetMediaPlayerPrivate(WebKitMediaSrc*, WebCore::MediaPlayerPrivateGStreamerMSE*);
+
+void webKitMediaSrcPrepareSeek(WebKitMediaSrc*, const MediaTime&);
+void webKitMediaSrcSetReadyForSamples(WebKitMediaSrc*, bool);
+
+G_END_DECLS
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamerPrivate.h b/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamerPrivate.h
new file mode 100644
index 000000000..83b523f7d
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamerPrivate.h
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "AudioTrackPrivateGStreamer.h"
+#include "SourceBufferPrivateGStreamer.h"
+#include "VideoTrackPrivateGStreamer.h"
+#include "WebKitMediaSourceGStreamer.h"
+
+#include <gst/app/gstappsrc.h>
+#include <gst/gst.h>
+#include <wtf/Condition.h>
+#include <wtf/RefPtr.h>
+#include <wtf/glib/GRefPtr.h>
+
+namespace WebCore {
+
+class MediaPlayerPrivateGStreamerMSE;
+
+};
+
+void webKitMediaSrcUriHandlerInit(gpointer, gpointer);
+
+#define WEBKIT_MEDIA_SRC_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj), WEBKIT_TYPE_MEDIA_SRC, WebKitMediaSrcPrivate))
+
+typedef struct _Stream Stream;
+
+struct _Stream {
+ // Fields filled when the Stream is created.
+ WebKitMediaSrc* parent;
+
+ // AppSrc.
+ GstElement* appsrc;
+ GstPad* decodebinSinkPad;
+ WebCore::SourceBufferPrivateGStreamer* sourceBuffer;
+
+ // Fields filled when the track is attached.
+ WebCore::MediaSourceStreamTypeGStreamer type;
+ // Might be 0, e.g. for VP8/VP9.
+ GstElement* parser;
+ GRefPtr<GstCaps> caps;
+ RefPtr<WebCore::AudioTrackPrivateGStreamer> audioTrack;
+ RefPtr<WebCore::VideoTrackPrivateGStreamer> videoTrack;
+ WebCore::FloatSize presentationSize;
+
+ // This helps WebKitMediaSrcPrivate.appsrcNeedDataCount, ensuring that needDatas are
+ // counted only once per each appsrc.
+ bool appsrcNeedDataFlag;
+
+ // Used to enforce continuity in the appended data and avoid breaking the decoder.
+ MediaTime lastEnqueuedTime;
+};
+
+enum {
+ PROP_0,
+ PROP_LOCATION,
+ PROP_N_AUDIO,
+ PROP_N_VIDEO,
+ PROP_N_TEXT,
+ PROP_LAST
+};
+
+enum {
+ SIGNAL_VIDEO_CHANGED,
+ SIGNAL_AUDIO_CHANGED,
+ SIGNAL_TEXT_CHANGED,
+ LAST_SIGNAL
+};
+
+enum OnSeekDataAction {
+ Nothing,
+ MediaSourceSeekToTime
+};
+
+struct _WebKitMediaSrcPrivate {
+ // Used to coordinate the release of Stream track info.
+ Lock streamLock;
+ Condition streamCondition;
+
+ Deque<Stream*> streams;
+ GUniquePtr<gchar> location;
+ int numberOfAudioStreams;
+ int numberOfVideoStreams;
+ int numberOfTextStreams;
+ bool asyncStart;
+ bool allTracksConfigured;
+ unsigned numberOfPads;
+
+ MediaTime seekTime;
+
+ // On seek, we wait for all the seekDatas, then for all the needDatas, and then run the nextAction.
+ OnSeekDataAction appsrcSeekDataNextAction;
+ int appsrcSeekDataCount;
+ int appsrcNeedDataCount;
+
+ GRefPtr<GstBus> bus;
+ WebCore::MediaPlayerPrivateGStreamerMSE* mediaPlayerPrivate;
+};
+
+extern guint webKitMediaSrcSignals[LAST_SIGNAL];
+extern GstAppSrcCallbacks enabledAppsrcCallbacks;
+extern GstAppSrcCallbacks disabledAppsrcCallbacks;
+
+void webKitMediaSrcUriHandlerInit(gpointer gIface, gpointer ifaceData);
+void webKitMediaSrcFinalize(GObject*);
+void webKitMediaSrcSetProperty(GObject*, guint propertyId, const GValue*, GParamSpec*);
+void webKitMediaSrcGetProperty(GObject*, guint propertyId, GValue*, GParamSpec*);
+void webKitMediaSrcDoAsyncStart(WebKitMediaSrc*);
+void webKitMediaSrcDoAsyncDone(WebKitMediaSrc*);
+GstStateChangeReturn webKitMediaSrcChangeState(GstElement*, GstStateChange);
+gint64 webKitMediaSrcGetSize(WebKitMediaSrc*);
+gboolean webKitMediaSrcQueryWithParent(GstPad*, GstObject*, GstQuery*);
+void webKitMediaSrcUpdatePresentationSize(GstCaps*, Stream*);
+void webKitMediaSrcLinkStreamToSrcPad(GstPad*, Stream*);
+void webKitMediaSrcLinkParser(GstPad*, GstCaps*, Stream*);
+void webKitMediaSrcFreeStream(WebKitMediaSrc*, Stream*);
+void webKitMediaSrcCheckAllTracksConfigured(WebKitMediaSrc*);
+GstURIType webKitMediaSrcUriGetType(GType);
+const gchar* const* webKitMediaSrcGetProtocols(GType);
+gchar* webKitMediaSrcGetUri(GstURIHandler*);
+gboolean webKitMediaSrcSetUri(GstURIHandler*, const gchar*, GError**);
+
+#endif // USE(GSTREAMER)