summaryrefslogtreecommitdiff
path: root/Source/WebCore/platform/graphics/gstreamer
diff options
context:
space:
mode:
authorLorry Tar Creator <lorry-tar-importer@lorry>2017-06-27 06:07:23 +0000
committerLorry Tar Creator <lorry-tar-importer@lorry>2017-06-27 06:07:23 +0000
commit1bf1084f2b10c3b47fd1a588d85d21ed0eb41d0c (patch)
tree46dcd36c86e7fbc6e5df36deb463b33e9967a6f7 /Source/WebCore/platform/graphics/gstreamer
parent32761a6cee1d0dee366b885b7b9c777e67885688 (diff)
downloadWebKitGtk-tarball-master.tar.gz
Diffstat (limited to 'Source/WebCore/platform/graphics/gstreamer')
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/AudioTrackPrivateGStreamer.cpp4
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/AudioTrackPrivateGStreamer.h14
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/GRefPtrGStreamer.cpp142
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/GRefPtrGStreamer.h39
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/GStreamerUtilities.cpp127
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/GStreamerUtilities.h33
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/GUniquePtrGStreamer.h36
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/ImageGStreamer.h8
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/ImageGStreamerCairo.cpp10
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/InbandMetadataTextTrackPrivateGStreamer.h33
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/InbandTextTrackPrivateGStreamer.cpp78
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/InbandTextTrackPrivateGStreamer.h20
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/MainThreadNotifier.h105
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp1511
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h238
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp1118
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h218
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.cpp501
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.h128
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/MediaPlayerRequestInstallMissingPluginsCallback.h61
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/MediaSourceGStreamer.cpp84
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/SourceBufferPrivateGStreamer.cpp68
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/SourceBufferPrivateGStreamer.h65
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/TextCombinerGStreamer.cpp8
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/TextSinkGStreamer.cpp2
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/TrackPrivateBaseGStreamer.cpp123
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/TrackPrivateBaseGStreamer.h34
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/VideoSinkGStreamer.cpp340
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/VideoTextureCopierGStreamer.cpp190
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/VideoTextureCopierGStreamer.h59
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/VideoTrackPrivateGStreamer.cpp4
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/VideoTrackPrivateGStreamer.h14
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/WebKitMediaSourceGStreamer.cpp846
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/WebKitWebSourceGStreamer.cpp1083
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/eme/WebKitClearKeyDecryptorGStreamer.cpp260
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/eme/WebKitClearKeyDecryptorGStreamer.h57
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/eme/WebKitCommonEncryptionDecryptorGStreamer.cpp362
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/eme/WebKitCommonEncryptionDecryptorGStreamer.h64
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.cpp1188
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.h165
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaDescription.cpp75
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaDescription.h58
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaSample.cpp120
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaSample.h76
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp860
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.h132
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.cpp216
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.h70
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceGStreamer.cpp139
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceGStreamer.h (renamed from Source/WebCore/platform/graphics/gstreamer/MediaSourceGStreamer.h)61
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.cpp449
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.h79
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/SourceBufferPrivateGStreamer.cpp177
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/SourceBufferPrivateGStreamer.h94
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.cpp776
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.h (renamed from Source/WebCore/platform/graphics/gstreamer/WebKitMediaSourceGStreamer.h)46
-rw-r--r--Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamerPrivate.h143
57 files changed, 10047 insertions, 2964 deletions
diff --git a/Source/WebCore/platform/graphics/gstreamer/AudioTrackPrivateGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/AudioTrackPrivateGStreamer.cpp
index 2f0debcc0..fc2d67471 100644
--- a/Source/WebCore/platform/graphics/gstreamer/AudioTrackPrivateGStreamer.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/AudioTrackPrivateGStreamer.cpp
@@ -37,6 +37,8 @@ AudioTrackPrivateGStreamer::AudioTrackPrivateGStreamer(GRefPtr<GstElement> playb
: TrackPrivateBaseGStreamer(this, index, pad)
, m_playbin(playbin)
{
+ // FIXME: Get a real ID from the tkhd atom.
+ m_id = "A" + String::number(index);
notifyTrackOfActiveChanged();
}
@@ -53,7 +55,7 @@ void AudioTrackPrivateGStreamer::setEnabled(bool enabled)
AudioTrackPrivate::setEnabled(enabled);
if (enabled && m_playbin)
- g_object_set(m_playbin.get(), "current-audio", m_index, NULL);
+ g_object_set(m_playbin.get(), "current-audio", m_index, nullptr);
}
} // namespace WebCore
diff --git a/Source/WebCore/platform/graphics/gstreamer/AudioTrackPrivateGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/AudioTrackPrivateGStreamer.h
index 1775b2021..bd18e5c58 100644
--- a/Source/WebCore/platform/graphics/gstreamer/AudioTrackPrivateGStreamer.h
+++ b/Source/WebCore/platform/graphics/gstreamer/AudioTrackPrivateGStreamer.h
@@ -41,19 +41,21 @@ public:
return adoptRef(new AudioTrackPrivateGStreamer(playbin, index, pad));
}
- virtual void disconnect() override;
+ void disconnect() override;
- virtual void setEnabled(bool) override;
- virtual void setActive(bool enabled) override { setEnabled(enabled); }
+ void setEnabled(bool) override;
+ void setActive(bool enabled) override { setEnabled(enabled); }
- virtual int trackIndex() const override { return m_index; }
+ int trackIndex() const override { return m_index; }
- virtual AtomicString label() const override { return m_label; }
- virtual AtomicString language() const override { return m_language; }
+ AtomicString id() const override { return m_id; }
+ AtomicString label() const override { return m_label; }
+ AtomicString language() const override { return m_language; }
private:
AudioTrackPrivateGStreamer(GRefPtr<GstElement> playbin, gint index, GRefPtr<GstPad>);
+ AtomicString m_id;
GRefPtr<GstElement> m_playbin;
};
diff --git a/Source/WebCore/platform/graphics/gstreamer/GRefPtrGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/GRefPtrGStreamer.cpp
index f7ef46dd8..0ed9b5633 100644
--- a/Source/WebCore/platform/graphics/gstreamer/GRefPtrGStreamer.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/GRefPtrGStreamer.cpp
@@ -27,7 +27,7 @@ namespace WTF {
template <> GRefPtr<GstElement> adoptGRef(GstElement* ptr)
{
- ASSERT(!ptr || !g_object_is_floating(G_OBJECT(ptr)));
+ ASSERT(!ptr || !g_object_is_floating(ptr));
return GRefPtr<GstElement>(ptr, GRefPtrAdopt);
}
@@ -47,7 +47,7 @@ template <> void derefGPtr<GstElement>(GstElement* ptr)
template <> GRefPtr<GstPad> adoptGRef(GstPad* ptr)
{
- ASSERT(!ptr || !g_object_is_floating(G_OBJECT(ptr)));
+ ASSERT(!ptr || !g_object_is_floating(ptr));
return GRefPtr<GstPad>(ptr, GRefPtrAdopt);
}
@@ -67,7 +67,7 @@ template <> void derefGPtr<GstPad>(GstPad* ptr)
template <> GRefPtr<GstPadTemplate> adoptGRef(GstPadTemplate* ptr)
{
- ASSERT(!ptr || !g_object_is_floating(G_OBJECT(ptr)));
+ ASSERT(!ptr || !g_object_is_floating(ptr));
return GRefPtr<GstPadTemplate>(ptr, GRefPtrAdopt);
}
@@ -103,10 +103,28 @@ template <> void derefGPtr<GstCaps>(GstCaps* ptr)
gst_caps_unref(ptr);
}
+template <> GRefPtr<GstContext> adoptGRef(GstContext* ptr)
+{
+ return GRefPtr<GstContext>(ptr, GRefPtrAdopt);
+}
+
+template <> GstContext* refGPtr<GstContext>(GstContext* ptr)
+{
+ if (ptr)
+ gst_context_ref(ptr);
+ return ptr;
+}
+
+template <> void derefGPtr<GstContext>(GstContext* ptr)
+{
+ if (ptr)
+ gst_context_unref(ptr);
+}
template <> GRefPtr<GstTask> adoptGRef(GstTask* ptr)
{
- ASSERT(!g_object_is_floating(G_OBJECT(ptr)));
+ // There is no need to check the object reference is floating here because
+ // gst_task_init() always sinks it.
return GRefPtr<GstTask>(ptr, GRefPtrAdopt);
}
@@ -126,7 +144,7 @@ template <> void derefGPtr<GstTask>(GstTask* ptr)
template <> GRefPtr<GstBus> adoptGRef(GstBus* ptr)
{
- ASSERT(!g_object_is_floating(G_OBJECT(ptr)));
+ ASSERT(!ptr || !g_object_is_floating(ptr));
return GRefPtr<GstBus>(ptr, GRefPtrAdopt);
}
@@ -146,7 +164,7 @@ template <> void derefGPtr<GstBus>(GstBus* ptr)
template <> GRefPtr<GstElementFactory> adoptGRef(GstElementFactory* ptr)
{
- ASSERT(!g_object_is_floating(G_OBJECT(ptr)));
+ ASSERT(!ptr || !g_object_is_floating(ptr));
return GRefPtr<GstElementFactory>(ptr, GRefPtrAdopt);
}
@@ -183,6 +201,45 @@ template<> void derefGPtr<GstBuffer>(GstBuffer* ptr)
gst_buffer_unref(ptr);
}
+template<> GRefPtr<GstBufferList> adoptGRef(GstBufferList* ptr)
+{
+ return GRefPtr<GstBufferList>(ptr, GRefPtrAdopt);
+}
+
+template<> GstBufferList* refGPtr<GstBufferList>(GstBufferList* ptr)
+{
+ if (ptr)
+ gst_buffer_list_ref(ptr);
+
+ return ptr;
+}
+
+template<> void derefGPtr<GstBufferList>(GstBufferList* ptr)
+{
+ if (ptr)
+ gst_buffer_list_unref(ptr);
+}
+
+template<> GRefPtr<GstBufferPool> adoptGRef(GstBufferPool* ptr)
+{
+ ASSERT(!ptr || !g_object_is_floating(ptr));
+ return GRefPtr<GstBufferPool>(ptr, GRefPtrAdopt);
+}
+
+template<> GstBufferPool* refGPtr<GstBufferPool>(GstBufferPool* ptr)
+{
+ if (ptr)
+ gst_object_ref_sink(GST_OBJECT(ptr));
+
+ return ptr;
+}
+
+template<> void derefGPtr<GstBufferPool>(GstBufferPool* ptr)
+{
+ if (ptr)
+ gst_object_unref(ptr);
+}
+
template<> GRefPtr<GstSample> adoptGRef(GstSample* ptr)
{
return GRefPtr<GstSample>(ptr, GRefPtrAdopt);
@@ -248,7 +305,7 @@ template<> GRefPtr<GstToc> adoptGRef(GstToc* ptr)
template<> GstToc* refGPtr<GstToc>(GstToc* ptr)
{
if (ptr)
- gst_toc_ref(ptr);
+ return gst_toc_ref(ptr);
return ptr;
}
@@ -258,5 +315,76 @@ template<> void derefGPtr<GstToc>(GstToc* ptr)
if (ptr)
gst_toc_unref(ptr);
}
+
+template<> GRefPtr<GstMessage> adoptGRef(GstMessage* ptr)
+{
+ return GRefPtr<GstMessage>(ptr, GRefPtrAdopt);
}
+
+template<> GstMessage* refGPtr<GstMessage>(GstMessage* ptr)
+{
+ if (ptr)
+ return gst_message_ref(ptr);
+
+ return ptr;
+}
+
+template<> void derefGPtr<GstMessage>(GstMessage* ptr)
+{
+ if (ptr)
+ gst_message_unref(ptr);
+}
+
+template <> GRefPtr<WebKitVideoSink> adoptGRef(WebKitVideoSink* ptr)
+{
+ ASSERT(!ptr || !g_object_is_floating(ptr));
+ return GRefPtr<WebKitVideoSink>(ptr, GRefPtrAdopt);
+}
+
+template <> WebKitVideoSink* refGPtr<WebKitVideoSink>(WebKitVideoSink* ptr)
+{
+ if (ptr)
+ gst_object_ref_sink(GST_OBJECT(ptr));
+
+ return ptr;
+}
+
+template <> void derefGPtr<WebKitVideoSink>(WebKitVideoSink* ptr)
+{
+ if (ptr)
+ gst_object_unref(GST_OBJECT(ptr));
+}
+
+template <> GRefPtr<WebKitWebSrc> adoptGRef(WebKitWebSrc* ptr)
+{
+ ASSERT(!ptr || !g_object_is_floating(ptr));
+ return GRefPtr<WebKitWebSrc>(ptr, GRefPtrAdopt);
+}
+
+// This method is only available for WebKitWebSrc and should not be used for any other type.
+// This is only to work around a bug in GST where the URI downloader is not taking the ownership of WebKitWebSrc.
+// See https://bugs.webkit.org/show_bug.cgi?id=144040.
+GRefPtr<WebKitWebSrc> ensureGRef(WebKitWebSrc* ptr)
+{
+ if (ptr && g_object_is_floating(ptr))
+ gst_object_ref_sink(GST_OBJECT(ptr));
+ return GRefPtr<WebKitWebSrc>(ptr);
+}
+
+template <> WebKitWebSrc* refGPtr<WebKitWebSrc>(WebKitWebSrc* ptr)
+{
+ if (ptr)
+ gst_object_ref_sink(GST_OBJECT(ptr));
+
+ return ptr;
+}
+
+template <> void derefGPtr<WebKitWebSrc>(WebKitWebSrc* ptr)
+{
+ if (ptr)
+ gst_object_unref(GST_OBJECT(ptr));
+}
+
+} // namespace WTF
+
#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/GRefPtrGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/GRefPtrGStreamer.h
index 2ce0a1d80..9f9bb6d17 100644
--- a/Source/WebCore/platform/graphics/gstreamer/GRefPtrGStreamer.h
+++ b/Source/WebCore/platform/graphics/gstreamer/GRefPtrGStreamer.h
@@ -21,20 +21,26 @@
#define GRefPtrGStreamer_h
#if USE(GSTREAMER)
-#include <wtf/gobject/GRefPtr.h>
+#include <wtf/glib/GRefPtr.h>
typedef struct _GstElement GstElement;
typedef struct _GstPad GstPad;
typedef struct _GstPadTemplate GstPadTemplate;
typedef struct _GstCaps GstCaps;
+typedef struct _GstContext GstContext;
typedef struct _GstTask GstTask;
typedef struct _GstBus GstBus;
typedef struct _GstElementFactory GstElementFactory;
typedef struct _GstBuffer GstBuffer;
+typedef struct _GstBufferList GstBufferList;
+typedef struct _GstBufferPool GstBufferPool;
typedef struct _GstSample GstSample;
typedef struct _GstTagList GstTagList;
typedef struct _GstEvent GstEvent;
typedef struct _GstToc GstToc;
+typedef struct _GstMessage GstMessage;
+typedef struct _WebKitVideoSink WebKitVideoSink;
+typedef struct _WebKitWebSrc WebKitWebSrc;
namespace WTF {
@@ -54,6 +60,10 @@ template<> GRefPtr<GstCaps> adoptGRef(GstCaps* ptr);
template<> GstCaps* refGPtr<GstCaps>(GstCaps* ptr);
template<> void derefGPtr<GstCaps>(GstCaps* ptr);
+template<> GRefPtr<GstContext> adoptGRef(GstContext* ptr);
+template<> GstContext* refGPtr<GstContext>(GstContext* ptr);
+template<> void derefGPtr<GstContext>(GstContext* ptr);
+
template<> GRefPtr<GstTask> adoptGRef(GstTask* ptr);
template<> GstTask* refGPtr<GstTask>(GstTask* ptr);
template<> void derefGPtr<GstTask>(GstTask* ptr);
@@ -70,6 +80,14 @@ template<> GRefPtr<GstBuffer> adoptGRef(GstBuffer* ptr);
template<> GstBuffer* refGPtr<GstBuffer>(GstBuffer* ptr);
template<> void derefGPtr<GstBuffer>(GstBuffer* ptr);
+template<> GRefPtr<GstBufferList> adoptGRef(GstBufferList*);
+template<> GstBufferList* refGPtr<GstBufferList>(GstBufferList*);
+template<> void derefGPtr<GstBufferList>(GstBufferList*);
+
+template<> GRefPtr<GstBufferPool> adoptGRef(GstBufferPool*);
+template<> GstBufferPool* refGPtr<GstBufferPool>(GstBufferPool*);
+template<> void derefGPtr<GstBufferPool>(GstBufferPool*);
+
template<> GRefPtr<GstSample> adoptGRef(GstSample* ptr);
template<> GstSample* refGPtr<GstSample>(GstSample* ptr);
template<> void derefGPtr<GstSample>(GstSample* ptr);
@@ -85,7 +103,22 @@ template<> void derefGPtr<GstEvent>(GstEvent* ptr);
template<> GRefPtr<GstToc> adoptGRef(GstToc* ptr);
template<> GstToc* refGPtr<GstToc>(GstToc* ptr);
template<> void derefGPtr<GstToc>(GstToc* ptr);
-}
+
+template<> GRefPtr<GstMessage> adoptGRef(GstMessage*);
+template<> GstMessage* refGPtr<GstMessage>(GstMessage*);
+template<> void derefGPtr<GstMessage>(GstMessage*);
+
+template<> GRefPtr<WebKitVideoSink> adoptGRef(WebKitVideoSink* ptr);
+template<> WebKitVideoSink* refGPtr<WebKitVideoSink>(WebKitVideoSink* ptr);
+template<> void derefGPtr<WebKitVideoSink>(WebKitVideoSink* ptr);
+
+template<> GRefPtr<WebKitWebSrc> adoptGRef(WebKitWebSrc* ptr);
+GRefPtr<WebKitWebSrc> ensureGRef(WebKitWebSrc* ptr);
+template<> WebKitWebSrc* refGPtr<WebKitWebSrc>(WebKitWebSrc* ptr);
+template<> void derefGPtr<WebKitWebSrc>(WebKitWebSrc* ptr);
+
+} // namespace WTF
#endif // USE(GSTREAMER)
-#endif
+
+#endif // GRefPtrGStreamer_h
diff --git a/Source/WebCore/platform/graphics/gstreamer/GStreamerUtilities.cpp b/Source/WebCore/platform/graphics/gstreamer/GStreamerUtilities.cpp
index 7b666944e..770675266 100644
--- a/Source/WebCore/platform/graphics/gstreamer/GStreamerUtilities.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/GStreamerUtilities.cpp
@@ -1,5 +1,6 @@
/*
- * Copyright (C) 2012 Igalia S.L
+ * Copyright (C) 2012, 2015, 2016 Igalia S.L
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
@@ -22,11 +23,19 @@
#if USE(GSTREAMER)
#include "GStreamerUtilities.h"
+#include "GRefPtrGStreamer.h"
#include "IntSize.h"
-#include <gst/audio/audio.h>
+#include <gst/audio/audio-info.h>
#include <gst/gst.h>
-#include <wtf/gobject/GUniquePtr.h>
+#include <wtf/MathExtras.h>
+#include <wtf/glib/GUniquePtr.h>
+
+#if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
+#define GST_USE_UNSTABLE_API
+#include <gst/mpegts/mpegts.h>
+#undef GST_USE_UNSTABLE_API
+#endif
namespace WebCore {
@@ -65,6 +74,22 @@ bool getVideoSizeAndFormatFromCaps(GstCaps* caps, WebCore::IntSize& size, GstVid
return true;
}
+
+bool getSampleVideoInfo(GstSample* sample, GstVideoInfo& videoInfo)
+{
+ if (!GST_IS_SAMPLE(sample))
+ return false;
+
+ GstCaps* caps = gst_sample_get_caps(sample);
+ if (!caps)
+ return false;
+
+ gst_video_info_init(&videoInfo);
+ if (!gst_video_info_from_caps(&videoInfo, caps))
+ return false;
+
+ return true;
+}
#endif
GstBuffer* createGstBuffer(GstBuffer* buffer)
@@ -95,17 +120,17 @@ char* getGstBufferDataPointer(GstBuffer* buffer)
return reinterpret_cast<char*>(mapInfo->data);
}
-void mapGstBuffer(GstBuffer* buffer)
+void mapGstBuffer(GstBuffer* buffer, uint32_t flags)
{
- GstMapInfo* mapInfo = g_slice_new(GstMapInfo);
- if (!gst_buffer_map(buffer, mapInfo, GST_MAP_WRITE)) {
- g_slice_free(GstMapInfo, mapInfo);
+ GstMapInfo* mapInfo = static_cast<GstMapInfo*>(fastMalloc(sizeof(GstMapInfo)));
+ if (!gst_buffer_map(buffer, mapInfo, static_cast<GstMapFlags>(flags))) {
+ fastFree(mapInfo);
gst_buffer_unref(buffer);
return;
}
GstMiniObject* miniObject = reinterpret_cast<GstMiniObject*>(buffer);
- gst_mini_object_set_qdata(miniObject, g_quark_from_static_string(webkitGstMapInfoQuarkString), mapInfo, 0);
+ gst_mini_object_set_qdata(miniObject, g_quark_from_static_string(webkitGstMapInfoQuarkString), mapInfo, nullptr);
}
void unmapGstBuffer(GstBuffer* buffer)
@@ -117,23 +142,101 @@ void unmapGstBuffer(GstBuffer* buffer)
return;
gst_buffer_unmap(buffer, mapInfo);
- g_slice_free(GstMapInfo, mapInfo);
+ fastFree(mapInfo);
}
bool initializeGStreamer()
{
-#if GST_CHECK_VERSION(0, 10, 31)
if (gst_is_initialized())
return true;
-#endif
GUniqueOutPtr<GError> error;
// FIXME: We should probably pass the arguments from the command line.
- bool gstInitialized = gst_init_check(0, 0, &error.outPtr());
+ bool gstInitialized = gst_init_check(nullptr, nullptr, &error.outPtr());
ASSERT_WITH_MESSAGE(gstInitialized, "GStreamer initialization failed: %s", error ? error->message : "unknown error occurred");
+
+#if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
+ if (gstInitialized)
+ gst_mpegts_initialize();
+#endif
+
return gstInitialized;
}
+unsigned getGstPlayFlag(const char* nick)
+{
+ static GFlagsClass* flagsClass = static_cast<GFlagsClass*>(g_type_class_ref(g_type_from_name("GstPlayFlags")));
+ ASSERT(flagsClass);
+
+ GFlagsValue* flag = g_flags_get_value_by_nick(flagsClass, nick);
+ if (!flag)
+ return 0;
+
+ return flag->value;
+}
+
+GstClockTime toGstClockTime(float time)
+{
+ // Extract the integer part of the time (seconds) and the fractional part (microseconds). Attempt to
+ // round the microseconds so no floating point precision is lost and we can perform an accurate seek.
+ float seconds;
+ float microSeconds = modff(time, &seconds) * 1000000;
+ GTimeVal timeValue;
+ timeValue.tv_sec = static_cast<glong>(seconds);
+ timeValue.tv_usec = static_cast<glong>(floor(microSeconds + 0.5));
+ return GST_TIMEVAL_TO_TIME(timeValue);
+}
+
+bool gstRegistryHasElementForMediaType(GList* elementFactories, const char* capsString)
+{
+ GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string(capsString));
+ GList* candidates = gst_element_factory_list_filter(elementFactories, caps.get(), GST_PAD_SINK, false);
+ bool result = candidates;
+
+ gst_plugin_feature_list_free(candidates);
+ return result;
+}
+
+#if GST_CHECK_VERSION(1, 5, 3) && (ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA))
+GstElement* createGstDecryptor(const gchar* protectionSystem)
+{
+ GstElement* decryptor = nullptr;
+ GList* decryptors = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECRYPTOR, GST_RANK_MARGINAL);
+
+ GST_TRACE("looking for decryptor for %s", protectionSystem);
+
+ for (GList* walk = decryptors; !decryptor && walk; walk = g_list_next(walk)) {
+ GstElementFactory* factory = reinterpret_cast<GstElementFactory*>(walk->data);
+
+ GST_TRACE("checking factory %s", GST_OBJECT_NAME(factory));
+
+ for (const GList* current = gst_element_factory_get_static_pad_templates(factory); current && !decryptor; current = g_list_next(current)) {
+ GstStaticPadTemplate* staticPadTemplate = static_cast<GstStaticPadTemplate*>(current->data);
+ GRefPtr<GstCaps> caps = adoptGRef(gst_static_pad_template_get_caps(staticPadTemplate));
+ unsigned length = gst_caps_get_size(caps.get());
+
+ GST_TRACE("factory %s caps has size %u", GST_OBJECT_NAME(factory), length);
+ for (unsigned i = 0; !decryptor && i < length; ++i) {
+ GstStructure* structure = gst_caps_get_structure(caps.get(), i);
+ GST_TRACE("checking structure %s", gst_structure_get_name(structure));
+ if (gst_structure_has_field_typed(structure, GST_PROTECTION_SYSTEM_ID_CAPS_FIELD, G_TYPE_STRING)) {
+ const gchar* sysId = gst_structure_get_string(structure, GST_PROTECTION_SYSTEM_ID_CAPS_FIELD);
+ GST_TRACE("structure %s has protection system %s", gst_structure_get_name(structure), sysId);
+ if (!g_ascii_strcasecmp(protectionSystem, sysId)) {
+ GST_DEBUG("found decryptor %s for %s", GST_OBJECT_NAME(factory), protectionSystem);
+ decryptor = gst_element_factory_create(factory, nullptr);
+ break;
+ }
+ }
+ }
+ }
+ }
+ gst_plugin_feature_list_free(decryptors);
+ GST_TRACE("returning decryptor %p", decryptor);
+ return decryptor;
+}
+#endif
+
}
#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/GStreamerUtilities.h b/Source/WebCore/platform/graphics/gstreamer/GStreamerUtilities.h
index 755dbdb72..f79a8cf6f 100644
--- a/Source/WebCore/platform/graphics/gstreamer/GStreamerUtilities.h
+++ b/Source/WebCore/platform/graphics/gstreamer/GStreamerUtilities.h
@@ -1,5 +1,6 @@
/*
- * Copyright (C) 2012 Igalia S.L
+ * Copyright (C) 2012, 2015, 2016 Igalia S.L
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
@@ -16,26 +17,13 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
+#pragma once
+
#include "Logging.h"
#include <gst/gst.h>
-#include <gst/video/video.h>
-
-#define LOG_MEDIA_MESSAGE(...) do { \
- GST_DEBUG(__VA_ARGS__); \
- LOG_VERBOSE(Media, __VA_ARGS__); } while (0)
-
-#define ERROR_MEDIA_MESSAGE(...) do { \
- GST_ERROR(__VA_ARGS__); \
- LOG_VERBOSE(Media, __VA_ARGS__); } while (0)
-
-#define INFO_MEDIA_MESSAGE(...) do { \
- GST_INFO(__VA_ARGS__); \
- LOG_VERBOSE(Media, __VA_ARGS__); } while (0)
-
-#define WARN_MEDIA_MESSAGE(...) do { \
- GST_WARNING(__VA_ARGS__); \
- LOG_VERBOSE(Media, __VA_ARGS__); } while (0)
+#include <gst/video/video-format.h>
+#include <gst/video/video-info.h>
namespace WebCore {
@@ -65,12 +53,19 @@ inline bool webkitGstCheckVersion(guint major, guint minor, guint micro)
GstPad* webkitGstGhostPadFromStaticTemplate(GstStaticPadTemplate*, const gchar* name, GstPad* target);
#if ENABLE(VIDEO)
bool getVideoSizeAndFormatFromCaps(GstCaps*, WebCore::IntSize&, GstVideoFormat&, int& pixelAspectRatioNumerator, int& pixelAspectRatioDenominator, int& stride);
+bool getSampleVideoInfo(GstSample*, GstVideoInfo&);
#endif
GstBuffer* createGstBuffer(GstBuffer*);
GstBuffer* createGstBufferForData(const char* data, int length);
char* getGstBufferDataPointer(GstBuffer*);
-void mapGstBuffer(GstBuffer*);
+void mapGstBuffer(GstBuffer*, uint32_t);
void unmapGstBuffer(GstBuffer*);
bool initializeGStreamer();
+unsigned getGstPlayFlag(const char* nick);
+GstClockTime toGstClockTime(float time);
+bool gstRegistryHasElementForMediaType(GList* elementFactories, const char* capsString);
+#if GST_CHECK_VERSION(1, 5, 3) && (ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA))
+GstElement* createGstDecryptor(const gchar* protectionSystem);
+#endif
}
diff --git a/Source/WebCore/platform/graphics/gstreamer/GUniquePtrGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/GUniquePtrGStreamer.h
new file mode 100644
index 000000000..693990884
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/GUniquePtrGStreamer.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2015 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef GUniquePtrGStreamer_h
+#define GUniquePtrGStreamer_h
+#if USE(GSTREAMER)
+
+#include <gst/gststructure.h>
+#include <gst/pbutils/install-plugins.h>
+#include <wtf/glib/GUniquePtr.h>
+
+namespace WTF {
+
+WTF_DEFINE_GPTR_DELETER(GstStructure, gst_structure_free)
+WTF_DEFINE_GPTR_DELETER(GstInstallPluginsContext, gst_install_plugins_context_free)
+
+}
+
+#endif // USE(GSTREAMER)
+#endif
diff --git a/Source/WebCore/platform/graphics/gstreamer/ImageGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/ImageGStreamer.h
index c9d2fb4e9..1ecd334ba 100644
--- a/Source/WebCore/platform/graphics/gstreamer/ImageGStreamer.h
+++ b/Source/WebCore/platform/graphics/gstreamer/ImageGStreamer.h
@@ -27,7 +27,7 @@
#include "GRefPtrGStreamer.h"
#include <gst/gst.h>
-#include <gst/video/video.h>
+#include <gst/video/video-frame.h>
#include <wtf/PassRefPtr.h>
#include <wtf/RefCounted.h>
@@ -38,9 +38,9 @@ class IntSize;
class ImageGStreamer : public RefCounted<ImageGStreamer> {
public:
- static PassRefPtr<ImageGStreamer> createImage(GstBuffer* buffer, GstCaps* caps)
+ static PassRefPtr<ImageGStreamer> createImage(GstSample* sample)
{
- return adoptRef(new ImageGStreamer(buffer, caps));
+ return adoptRef(new ImageGStreamer(sample));
}
~ImageGStreamer();
@@ -60,7 +60,7 @@ class ImageGStreamer : public RefCounted<ImageGStreamer> {
}
private:
- ImageGStreamer(GstBuffer*, GstCaps*);
+ ImageGStreamer(GstSample*);
RefPtr<BitmapImage> m_image;
FloatRect m_cropRect;
diff --git a/Source/WebCore/platform/graphics/gstreamer/ImageGStreamerCairo.cpp b/Source/WebCore/platform/graphics/gstreamer/ImageGStreamerCairo.cpp
index b153f09ec..c55cfdf97 100644
--- a/Source/WebCore/platform/graphics/gstreamer/ImageGStreamerCairo.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/ImageGStreamerCairo.cpp
@@ -32,8 +32,9 @@
using namespace std;
using namespace WebCore;
-ImageGStreamer::ImageGStreamer(GstBuffer* buffer, GstCaps* caps)
+ImageGStreamer::ImageGStreamer(GstSample* sample)
{
+ GstCaps* caps = gst_sample_get_caps(sample);
GstVideoInfo videoInfo;
gst_video_info_init(&videoInfo);
if (!gst_video_info_from_caps(&videoInfo, caps))
@@ -42,6 +43,7 @@ ImageGStreamer::ImageGStreamer(GstBuffer* buffer, GstCaps* caps)
// Right now the TextureMapper only supports chromas with one plane
ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
+ GstBuffer* buffer = gst_sample_get_buffer(sample);
if (!gst_video_frame_map(&m_videoFrame, &videoInfo, buffer, GST_MAP_READ))
return;
@@ -60,7 +62,7 @@ ImageGStreamer::ImageGStreamer(GstBuffer* buffer, GstCaps* caps)
RefPtr<cairo_surface_t> surface = adoptRef(cairo_image_surface_create_for_data(bufferData, cairoFormat, width, height, stride));
ASSERT(cairo_surface_status(surface.get()) == CAIRO_STATUS_SUCCESS);
- m_image = BitmapImage::create(surface.release());
+ m_image = BitmapImage::create(WTFMove(surface));
if (GstVideoCropMeta* cropMeta = gst_buffer_get_video_crop_meta(buffer))
setCropRect(FloatRect(cropMeta->x, cropMeta->y, cropMeta->width, cropMeta->height));
@@ -69,9 +71,7 @@ ImageGStreamer::ImageGStreamer(GstBuffer* buffer, GstCaps* caps)
ImageGStreamer::~ImageGStreamer()
{
if (m_image)
- m_image.clear();
-
- m_image = 0;
+ m_image = nullptr;
// We keep the buffer memory mapped until the image is destroyed because the internal
// cairo_surface_t was created using cairo_image_surface_create_for_data().
diff --git a/Source/WebCore/platform/graphics/gstreamer/InbandMetadataTextTrackPrivateGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/InbandMetadataTextTrackPrivateGStreamer.h
index b8adc641f..e942a2c5f 100644
--- a/Source/WebCore/platform/graphics/gstreamer/InbandMetadataTextTrackPrivateGStreamer.h
+++ b/Source/WebCore/platform/graphics/gstreamer/InbandMetadataTextTrackPrivateGStreamer.h
@@ -23,8 +23,7 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#ifndef InbandMetadataTextTrackPrivateGStreamer_h
-#define InbandMetadataTextTrackPrivateGStreamer_h
+#pragma once
#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(VIDEO_TRACK)
@@ -35,28 +34,44 @@ namespace WebCore {
class InbandMetadataTextTrackPrivateGStreamer : public InbandTextTrackPrivate {
public:
- static PassRefPtr<InbandMetadataTextTrackPrivateGStreamer> create(Kind kind)
+ static PassRefPtr<InbandMetadataTextTrackPrivateGStreamer> create(Kind kind, CueFormat cueFormat, const AtomicString& id = emptyAtom)
{
- return adoptRef(new InbandMetadataTextTrackPrivateGStreamer(kind));
+ return adoptRef(new InbandMetadataTextTrackPrivateGStreamer(kind, cueFormat, id));
}
~InbandMetadataTextTrackPrivateGStreamer() { }
- virtual Kind kind() const override { return m_kind; }
+ Kind kind() const override { return m_kind; }
+ AtomicString id() const override { return m_id; }
+ AtomicString inBandMetadataTrackDispatchType() const override { return m_inBandMetadataTrackDispatchType; }
+ void setInBandMetadataTrackDispatchType(const AtomicString& value) { m_inBandMetadataTrackDispatchType = value; }
+
+ void addDataCue(const MediaTime& start, const MediaTime& end, const void* data, unsigned length)
+ {
+ ASSERT(cueFormat() == Data);
+ client()->addDataCue(start, end, data, length);
+ }
+
+ void addGenericCue(PassRefPtr<GenericCueData> data)
+ {
+ ASSERT(cueFormat() == Generic);
+ client()->addGenericCue(*data);
+ }
private:
- InbandMetadataTextTrackPrivateGStreamer(Kind kind)
- : InbandTextTrackPrivate(Generic)
+ InbandMetadataTextTrackPrivateGStreamer(Kind kind, CueFormat cueFormat, const AtomicString& id)
+ : InbandTextTrackPrivate(cueFormat)
, m_kind(kind)
+ , m_id(id)
{
}
Kind m_kind;
+ AtomicString m_id;
+ AtomicString m_inBandMetadataTrackDispatchType;
};
} // namespace WebCore
#endif // ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(VIDEO_TRACK)
-
-#endif // InbandMetadataTextTrackPrivateGStreamer_h
diff --git a/Source/WebCore/platform/graphics/gstreamer/InbandTextTrackPrivateGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/InbandTextTrackPrivateGStreamer.cpp
index bed10faa7..24e58f160 100644
--- a/Source/WebCore/platform/graphics/gstreamer/InbandTextTrackPrivateGStreamer.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/InbandTextTrackPrivateGStreamer.cpp
@@ -39,38 +39,20 @@ GST_DEBUG_CATEGORY_EXTERN(webkit_media_player_debug);
namespace WebCore {
-static GstPadProbeReturn textTrackPrivateEventCallback(GstPad*, GstPadProbeInfo* info, InbandTextTrackPrivateGStreamer* track)
-{
- GstEvent* event = gst_pad_probe_info_get_event(info);
- switch (GST_EVENT_TYPE(event)) {
- case GST_EVENT_STREAM_START:
- track->streamChanged();
- break;
- default:
- break;
- }
- return GST_PAD_PROBE_OK;
-}
-
-static gboolean textTrackPrivateSampleTimeoutCallback(InbandTextTrackPrivateGStreamer* track)
-{
- track->notifyTrackOfSample();
- return FALSE;
-}
-
-static gboolean textTrackPrivateStreamTimeoutCallback(InbandTextTrackPrivateGStreamer* track)
-{
- track->notifyTrackOfStreamChanged();
- return FALSE;
-}
-
InbandTextTrackPrivateGStreamer::InbandTextTrackPrivateGStreamer(gint index, GRefPtr<GstPad> pad)
: InbandTextTrackPrivate(WebVTT), TrackPrivateBaseGStreamer(this, index, pad)
- , m_sampleTimerHandler(0)
- , m_streamTimerHandler(0)
{
- m_eventProbe = gst_pad_add_probe(m_pad.get(), GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
- reinterpret_cast<GstPadProbeCallback>(textTrackPrivateEventCallback), this, 0);
+ m_eventProbe = gst_pad_add_probe(m_pad.get(), GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, [] (GstPad*, GstPadProbeInfo* info, gpointer userData) -> GstPadProbeReturn {
+ auto* track = static_cast<InbandTextTrackPrivateGStreamer*>(userData);
+ switch (GST_EVENT_TYPE(gst_pad_probe_info_get_event(info))) {
+ case GST_EVENT_STREAM_START:
+ track->streamChanged();
+ break;
+ default:
+ break;
+ }
+ return GST_PAD_PROBE_OK;
+ }, this, nullptr);
notifyTrackOfStreamChanged();
}
@@ -82,39 +64,35 @@ void InbandTextTrackPrivateGStreamer::disconnect()
gst_pad_remove_probe(m_pad.get(), m_eventProbe);
- if (m_streamTimerHandler)
- g_source_remove(m_streamTimerHandler);
-
TrackPrivateBaseGStreamer::disconnect();
}
void InbandTextTrackPrivateGStreamer::handleSample(GRefPtr<GstSample> sample)
{
- if (m_sampleTimerHandler)
- g_source_remove(m_sampleTimerHandler);
{
- MutexLocker lock(m_sampleMutex);
+ LockHolder lock(m_sampleMutex);
m_pendingSamples.append(sample);
}
- m_sampleTimerHandler = g_timeout_add(0,
- reinterpret_cast<GSourceFunc>(textTrackPrivateSampleTimeoutCallback), this);
+
+ RefPtr<InbandTextTrackPrivateGStreamer> protectedThis(this);
+ m_notifier->notify(MainThreadNotification::NewSample, [protectedThis] {
+ protectedThis->notifyTrackOfSample();
+ });
}
void InbandTextTrackPrivateGStreamer::streamChanged()
{
- if (m_streamTimerHandler)
- g_source_remove(m_streamTimerHandler);
- m_streamTimerHandler = g_timeout_add(0,
- reinterpret_cast<GSourceFunc>(textTrackPrivateStreamTimeoutCallback), this);
+ RefPtr<InbandTextTrackPrivateGStreamer> protectedThis(this);
+ m_notifier->notify(MainThreadNotification::StreamChanged, [protectedThis] {
+ protectedThis->notifyTrackOfStreamChanged();
+ });
}
void InbandTextTrackPrivateGStreamer::notifyTrackOfSample()
{
- m_sampleTimerHandler = 0;
-
Vector<GRefPtr<GstSample> > samples;
{
- MutexLocker lock(m_sampleMutex);
+ LockHolder lock(m_sampleMutex);
m_pendingSamples.swap(samples);
}
@@ -122,28 +100,26 @@ void InbandTextTrackPrivateGStreamer::notifyTrackOfSample()
GRefPtr<GstSample> sample = samples[i];
GstBuffer* buffer = gst_sample_get_buffer(sample.get());
if (!buffer) {
- WARN_MEDIA_MESSAGE("Track %d got sample with no buffer.", m_index);
+ GST_WARNING("Track %d got sample with no buffer.", m_index);
continue;
}
GstMapInfo info;
gboolean ret = gst_buffer_map(buffer, &info, GST_MAP_READ);
ASSERT(ret);
if (!ret) {
- WARN_MEDIA_MESSAGE("Track %d unable to map buffer.", m_index);
+ GST_WARNING("Track %d unable to map buffer.", m_index);
continue;
}
- INFO_MEDIA_MESSAGE("Track %d parsing sample: %.*s", m_index, static_cast<int>(info.size),
+ GST_INFO("Track %d parsing sample: %.*s", m_index, static_cast<int>(info.size),
reinterpret_cast<char*>(info.data));
- client()->parseWebVTTCueData(this, reinterpret_cast<char*>(info.data), info.size);
+ client()->parseWebVTTCueData(reinterpret_cast<char*>(info.data), info.size);
gst_buffer_unmap(buffer, &info);
}
}
void InbandTextTrackPrivateGStreamer::notifyTrackOfStreamChanged()
{
- m_streamTimerHandler = 0;
-
GRefPtr<GstEvent> event = adoptGRef(gst_pad_get_sticky_event(m_pad.get(),
GST_EVENT_STREAM_START, 0));
if (!event)
@@ -151,7 +127,7 @@ void InbandTextTrackPrivateGStreamer::notifyTrackOfStreamChanged()
const gchar* streamId;
gst_event_parse_stream_start(event.get(), &streamId);
- INFO_MEDIA_MESSAGE("Track %d got stream start for stream %s.", m_index, streamId);
+ GST_INFO("Track %d got stream start for stream %s.", m_index, streamId);
m_streamId = streamId;
}
diff --git a/Source/WebCore/platform/graphics/gstreamer/InbandTextTrackPrivateGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/InbandTextTrackPrivateGStreamer.h
index 285cd86c8..cc3a6f0e9 100644
--- a/Source/WebCore/platform/graphics/gstreamer/InbandTextTrackPrivateGStreamer.h
+++ b/Source/WebCore/platform/graphics/gstreamer/InbandTextTrackPrivateGStreamer.h
@@ -31,6 +31,7 @@
#include "GRefPtrGStreamer.h"
#include "InbandTextTrackPrivate.h"
#include "TrackPrivateBaseGStreamer.h"
+#include <wtf/Lock.h>
namespace WebCore {
@@ -44,29 +45,28 @@ public:
return adoptRef(new InbandTextTrackPrivateGStreamer(index, pad));
}
- virtual void disconnect() override;
+ void disconnect() override;
- virtual AtomicString label() const override { return m_label; }
- virtual AtomicString language() const override { return m_language; }
+ AtomicString label() const override { return m_label; }
+ AtomicString language() const override { return m_language; }
- virtual int trackIndex() const override { return m_index; }
+ int trackIndex() const override { return m_index; }
String streamId() const { return m_streamId; }
void handleSample(GRefPtr<GstSample>);
+
+private:
+ InbandTextTrackPrivateGStreamer(gint index, GRefPtr<GstPad>);
+
void streamChanged();
void notifyTrackOfSample();
void notifyTrackOfStreamChanged();
-private:
- InbandTextTrackPrivateGStreamer(gint index, GRefPtr<GstPad>);
-
- guint m_sampleTimerHandler;
- guint m_streamTimerHandler;
gulong m_eventProbe;
Vector<GRefPtr<GstSample> > m_pendingSamples;
String m_streamId;
- Mutex m_sampleMutex;
+ Lock m_sampleMutex;
};
} // namespace WebCore
diff --git a/Source/WebCore/platform/graphics/gstreamer/MainThreadNotifier.h b/Source/WebCore/platform/graphics/gstreamer/MainThreadNotifier.h
new file mode 100644
index 000000000..96b587ff9
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/MainThreadNotifier.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright (C) 2015 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#pragma once
+
+#include <wtf/Atomics.h>
+#include <wtf/Lock.h>
+#include <wtf/MainThread.h>
+#include <wtf/RunLoop.h>
+#include <wtf/ThreadSafeRefCounted.h>
+
+namespace WebCore {
+
+template <typename T>
+class MainThreadNotifier final : public ThreadSafeRefCounted<MainThreadNotifier<T>> {
+public:
+ static Ref<MainThreadNotifier> create()
+ {
+ return adoptRef(*new MainThreadNotifier());
+ }
+
+ template<typename F>
+ void notify(T notificationType, const F& callbackFunctor)
+ {
+ ASSERT(m_isValid.load());
+ if (isMainThread()) {
+ removePendingNotification(notificationType);
+ callbackFunctor();
+ return;
+ }
+
+ if (!addPendingNotification(notificationType))
+ return;
+
+ RunLoop::main().dispatch([this, protectedThis = makeRef(*this), notificationType, callback = std::function<void()>(callbackFunctor)] {
+ if (!m_isValid.load())
+ return;
+ if (removePendingNotification(notificationType))
+ callback();
+ });
+ }
+
+ void cancelPendingNotifications(unsigned mask = 0)
+ {
+ ASSERT(m_isValid.load());
+ LockHolder locker(m_pendingNotificationsLock);
+ if (mask)
+ m_pendingNotifications &= ~mask;
+ else
+ m_pendingNotifications = 0;
+ }
+
+ void invalidate()
+ {
+ ASSERT(m_isValid.load());
+ m_isValid.store(false);
+ }
+
+private:
+ MainThreadNotifier()
+ {
+ m_isValid.store(true);
+ }
+
+ bool addPendingNotification(T notificationType)
+ {
+ LockHolder locker(m_pendingNotificationsLock);
+ if (notificationType & m_pendingNotifications)
+ return false;
+ m_pendingNotifications |= notificationType;
+ return true;
+ }
+
+ bool removePendingNotification(T notificationType)
+ {
+ LockHolder locker(m_pendingNotificationsLock);
+ if (notificationType & m_pendingNotifications) {
+ m_pendingNotifications &= ~notificationType;
+ return true;
+ }
+ return false;
+ }
+
+ Lock m_pendingNotificationsLock;
+ unsigned m_pendingNotifications { 0 };
+ Atomic<bool> m_isValid;
+};
+
+} // namespace WebCore
+
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
index 165c5a06a..cadf905ed 100644
--- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
@@ -3,7 +3,9 @@
* Copyright (C) 2007 Collabora Ltd. All rights reserved.
* Copyright (C) 2007 Alp Toker <alp@atoker.com>
* Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
- * Copyright (C) 2009, 2010, 2011, 2012, 2013 Igalia S.L
+ * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2015, 2016 Igalia S.L
+ * Copyright (C) 2014 Cable Television Laboratories, Inc.
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@@ -26,18 +28,24 @@
#if ENABLE(VIDEO) && USE(GSTREAMER)
+#include "FileSystem.h"
#include "GStreamerUtilities.h"
#include "URL.h"
#include "MIMETypeRegistry.h"
#include "MediaPlayer.h"
+#include "MediaPlayerRequestInstallMissingPluginsCallback.h"
#include "NotImplemented.h"
#include "SecurityOrigin.h"
#include "TimeRanges.h"
#include "WebKitWebSourceGStreamer.h"
+#include <glib.h>
#include <gst/gst.h>
#include <gst/pbutils/missing-plugins.h>
#include <limits>
-#include <wtf/gobject/GUniquePtr.h>
+#include <wtf/HexNumber.h>
+#include <wtf/MediaTime.h>
+#include <wtf/NeverDestroyed.h>
+#include <wtf/glib/GUniquePtr.h>
#include <wtf/text/CString.h>
#if ENABLE(VIDEO_TRACK)
@@ -49,6 +57,11 @@
#include "VideoTrackPrivateGStreamer.h"
#endif
+#if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
+#define GST_USE_UNSTABLE_API
+#include <gst/mpegts/mpegts.h>
+#undef GST_USE_UNSTABLE_API
+#endif
#include <gst/audio/streamvolume.h>
#if ENABLE(MEDIA_SOURCE)
@@ -56,24 +69,9 @@
#include "WebKitMediaSourceGStreamer.h"
#endif
-// GstPlayFlags flags from playbin2. It is the policy of GStreamer to
-// not publicly expose element-specific enums. That's why this
-// GstPlayFlags enum has been copied here.
-typedef enum {
- GST_PLAY_FLAG_VIDEO = 0x00000001,
- GST_PLAY_FLAG_AUDIO = 0x00000002,
- GST_PLAY_FLAG_TEXT = 0x00000004,
- GST_PLAY_FLAG_VIS = 0x00000008,
- GST_PLAY_FLAG_SOFT_VOLUME = 0x00000010,
- GST_PLAY_FLAG_NATIVE_AUDIO = 0x00000020,
- GST_PLAY_FLAG_NATIVE_VIDEO = 0x00000040,
- GST_PLAY_FLAG_DOWNLOAD = 0x00000080,
- GST_PLAY_FLAG_BUFFERING = 0x000000100
-} GstPlayFlags;
-
-// Max interval in seconds to stay in the READY state on manual
-// state change requests.
-static const guint gReadyStateTimerInterval = 60;
+#if ENABLE(WEB_AUDIO)
+#include "AudioSourceProviderGStreamer.h"
+#endif
GST_DEBUG_CATEGORY_EXTERN(webkit_media_player_debug);
#define GST_CAT_DEFAULT webkit_media_player_debug
@@ -82,127 +80,34 @@ using namespace std;
namespace WebCore {
-static gboolean mediaPlayerPrivateMessageCallback(GstBus*, GstMessage* message, MediaPlayerPrivateGStreamer* player)
-{
- return player->handleMessage(message);
-}
-
-static void mediaPlayerPrivateSourceChangedCallback(GObject*, GParamSpec*, MediaPlayerPrivateGStreamer* player)
-{
- player->sourceChanged();
-}
-
-static void mediaPlayerPrivateVideoSinkCapsChangedCallback(GObject*, GParamSpec*, MediaPlayerPrivateGStreamer* player)
-{
- player->videoCapsChanged();
-}
-
-static void mediaPlayerPrivateVideoChangedCallback(GObject*, MediaPlayerPrivateGStreamer* player)
-{
- player->videoChanged();
-}
-
-static void mediaPlayerPrivateAudioChangedCallback(GObject*, MediaPlayerPrivateGStreamer* player)
+static void busMessageCallback(GstBus*, GstMessage* message, MediaPlayerPrivateGStreamer* player)
{
- player->audioChanged();
+ player->handleMessage(message);
}
-static gboolean mediaPlayerPrivateAudioChangeTimeoutCallback(MediaPlayerPrivateGStreamer* player)
-{
- // This is the callback of the timeout source created in ::audioChanged.
- player->notifyPlayerOfAudio();
- return FALSE;
-}
-
-static void setAudioStreamPropertiesCallback(GstChildProxy*, GObject* object, gchar*,
- MediaPlayerPrivateGStreamer* player)
+void MediaPlayerPrivateGStreamer::setAudioStreamPropertiesCallback(MediaPlayerPrivateGStreamer* player, GObject* object)
{
player->setAudioStreamProperties(object);
}
-static gboolean mediaPlayerPrivateVideoChangeTimeoutCallback(MediaPlayerPrivateGStreamer* player)
-{
- // This is the callback of the timeout source created in ::videoChanged.
- player->notifyPlayerOfVideo();
- return FALSE;
-}
-
-static gboolean mediaPlayerPrivateVideoCapsChangeTimeoutCallback(MediaPlayerPrivateGStreamer* player)
-{
- // This is the callback of the timeout source created in ::videoCapsChanged.
- player->notifyPlayerOfVideoCaps();
- return FALSE;
-}
-
-#if ENABLE(VIDEO_TRACK)
-static void mediaPlayerPrivateTextChangedCallback(GObject*, MediaPlayerPrivateGStreamer* player)
-{
- player->textChanged();
-}
-
-static gboolean mediaPlayerPrivateTextChangeTimeoutCallback(MediaPlayerPrivateGStreamer* player)
-{
- // This is the callback of the timeout source created in ::textChanged.
- player->notifyPlayerOfText();
- return FALSE;
-}
-
-static GstFlowReturn mediaPlayerPrivateNewTextSampleCallback(GObject*, MediaPlayerPrivateGStreamer* player)
-{
- player->newTextSample();
- return GST_FLOW_OK;
-}
-#endif
-
-static gboolean mediaPlayerPrivateReadyStateTimeoutCallback(MediaPlayerPrivateGStreamer* player)
-{
- // This is the callback of the timeout source created in ::changePipelineState.
- // Reset pipeline if we are sitting on READY state when timeout is reached
- player->changePipelineState(GST_STATE_NULL);
- return FALSE;
-}
-
-static void mediaPlayerPrivatePluginInstallerResultFunction(GstInstallPluginsReturn result, gpointer userData)
-{
- MediaPlayerPrivateGStreamer* player = reinterpret_cast<MediaPlayerPrivateGStreamer*>(userData);
- player->handlePluginInstallerResult(result);
-}
-
-static GstClockTime toGstClockTime(float time)
-{
- // Extract the integer part of the time (seconds) and the fractional part (microseconds). Attempt to
- // round the microseconds so no floating point precision is lost and we can perform an accurate seek.
- float seconds;
- float microSeconds = modf(time, &seconds) * 1000000;
- GTimeVal timeValue;
- timeValue.tv_sec = static_cast<glong>(seconds);
- timeValue.tv_usec = static_cast<glong>(roundf(microSeconds / 10000) * 10000);
- return GST_TIMEVAL_TO_TIME(timeValue);
-}
-
void MediaPlayerPrivateGStreamer::setAudioStreamProperties(GObject* object)
{
if (g_strcmp0(G_OBJECT_TYPE_NAME(object), "GstPulseSink"))
return;
- const char* role = m_player->mediaPlayerClient() && m_player->mediaPlayerClient()->mediaPlayerIsVideo()
- ? "video" : "music";
- GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, NULL);
- g_object_set(object, "stream-properties", structure, NULL);
+ const char* role = m_player->client().mediaPlayerIsVideo() ? "video" : "music";
+ GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, nullptr);
+ g_object_set(object, "stream-properties", structure, nullptr);
gst_structure_free(structure);
GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(object)));
- LOG_MEDIA_MESSAGE("Set media.role as %s at %s", role, elementName.get());
-}
-
-PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateGStreamer::create(MediaPlayer* player)
-{
- return adoptPtr(new MediaPlayerPrivateGStreamer(player));
+ GST_DEBUG("Set media.role as %s at %s", role, elementName.get());
}
void MediaPlayerPrivateGStreamer::registerMediaEngine(MediaEngineRegistrar registrar)
{
if (isAvailable())
- registrar(create, getSupportedTypes, supportsType, 0, 0, 0);
+ registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamer>(player); },
+ getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem);
}
bool initializeGStreamerAndRegisterWebKitElements()
@@ -210,17 +115,14 @@ bool initializeGStreamerAndRegisterWebKitElements()
if (!initializeGStreamer())
return false;
- GRefPtr<GstElementFactory> srcFactory = gst_element_factory_find("webkitwebsrc");
+ registerWebKitGStreamerElements();
+
+ GRefPtr<GstElementFactory> srcFactory = adoptGRef(gst_element_factory_find("webkitwebsrc"));
if (!srcFactory) {
GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player");
- gst_element_register(0, "webkitwebsrc", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_WEB_SRC);
+ gst_element_register(nullptr, "webkitwebsrc", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_WEB_SRC);
}
-#if ENABLE(MEDIA_SOURCE)
- GRefPtr<GstElementFactory> WebKitMediaSrcFactory = gst_element_factory_find("webkitmediasrc");
- if (!WebKitMediaSrcFactory)
- gst_element_register(0, "webkitmediasrc", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_MEDIA_SRC);
-#endif
return true;
}
@@ -229,52 +131,50 @@ bool MediaPlayerPrivateGStreamer::isAvailable()
if (!initializeGStreamerAndRegisterWebKitElements())
return false;
- GRefPtr<GstElementFactory> factory = gst_element_factory_find("playbin");
+ GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
return factory;
}
MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player)
: MediaPlayerPrivateGStreamerBase(player)
- , m_source(0)
- , m_seekTime(0)
+ , m_buffering(false)
+ , m_bufferingPercentage(0)
+ , m_canFallBackToLastFinishedSeekPosition(false)
, m_changingRate(false)
- , m_endTime(numeric_limits<float>::infinity())
+ , m_downloadFinished(false)
+ , m_errorOccured(false)
, m_isEndReached(false)
, m_isStreaming(false)
- , m_mediaLocations(0)
- , m_mediaLocationCurrentIndex(0)
- , m_resetPipeline(false)
+ , m_durationAtEOS(0)
, m_paused(true)
- , m_playbackRatePause(false)
+ , m_playbackRate(1)
+ , m_requestedState(GST_STATE_VOID_PENDING)
+ , m_resetPipeline(false)
, m_seeking(false)
, m_seekIsPending(false)
+ , m_seekTime(0)
+ , m_source(nullptr)
+ , m_volumeAndMuteInitialized(false)
+ , m_weakPtrFactory(this)
+ , m_mediaLocations(nullptr)
+ , m_mediaLocationCurrentIndex(0)
+ , m_playbackRatePause(false)
, m_timeOfOverlappingSeek(-1)
- , m_buffering(false)
- , m_playbackRate(1)
, m_lastPlaybackRate(1)
- , m_errorOccured(false)
- , m_mediaDuration(0)
- , m_downloadFinished(false)
- , m_fillTimer(this, &MediaPlayerPrivateGStreamer::fillTimerFired)
+ , m_fillTimer(*this, &MediaPlayerPrivateGStreamer::fillTimerFired)
, m_maxTimeLoaded(0)
- , m_bufferingPercentage(0)
, m_preload(player->preload())
, m_delayingLoad(false)
- , m_mediaDurationKnown(true)
, m_maxTimeLoadedAtLastDidLoadingProgress(0)
- , m_volumeAndMuteInitialized(false)
, m_hasVideo(false)
, m_hasAudio(false)
- , m_audioTimerHandler(0)
- , m_textTimerHandler(0)
- , m_videoTimerHandler(0)
- , m_videoCapsTimerHandler(0)
- , m_readyTimerHandler(0)
- , m_totalBytes(-1)
+ , m_readyTimerHandler(RunLoop::main(), this, &MediaPlayerPrivateGStreamer::readyTimerFired)
+ , m_totalBytes(0)
, m_preservesPitch(false)
- , m_requestedState(GST_STATE_VOID_PENDING)
- , m_missingPlugins(false)
{
+#if USE(GLIB)
+ m_readyTimerHandler.setPriority(G_PRIORITY_DEFAULT_IDLE);
+#endif
}
MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
@@ -294,50 +194,35 @@ MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
if (m_mediaLocations) {
gst_structure_free(m_mediaLocations);
- m_mediaLocations = 0;
+ m_mediaLocations = nullptr;
}
+ if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
+ g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
+
if (m_autoAudioSink)
g_signal_handlers_disconnect_by_func(G_OBJECT(m_autoAudioSink.get()),
reinterpret_cast<gpointer>(setAudioStreamPropertiesCallback), this);
- if (m_readyTimerHandler)
- g_source_remove(m_readyTimerHandler);
-
- if (m_playBin) {
- GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_playBin.get())));
- ASSERT(bus);
- g_signal_handlers_disconnect_by_func(bus.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateMessageCallback), this);
- gst_bus_remove_signal_watch(bus.get());
-
- g_signal_handlers_disconnect_by_func(m_playBin.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateSourceChangedCallback), this);
- g_signal_handlers_disconnect_by_func(m_playBin.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateVideoChangedCallback), this);
- g_signal_handlers_disconnect_by_func(m_playBin.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateAudioChangedCallback), this);
-#if ENABLE(VIDEO_TRACK)
- g_signal_handlers_disconnect_by_func(m_playBin.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateNewTextSampleCallback), this);
- g_signal_handlers_disconnect_by_func(m_playBin.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateTextChangedCallback), this);
-#endif
-
- gst_element_set_state(m_playBin.get(), GST_STATE_NULL);
- m_playBin.clear();
+ m_readyTimerHandler.stop();
+ if (m_missingPluginsCallback) {
+ m_missingPluginsCallback->invalidate();
+ m_missingPluginsCallback = nullptr;
}
- if (m_webkitVideoSink) {
- GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_webkitVideoSink.get(), "sink"));
- g_signal_handlers_disconnect_by_func(videoSinkPad.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateVideoSinkCapsChangedCallback), this);
+ if (m_videoSink) {
+ GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
+ g_signal_handlers_disconnect_matched(videoSinkPad.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
}
- if (m_videoTimerHandler)
- g_source_remove(m_videoTimerHandler);
-
- if (m_audioTimerHandler)
- g_source_remove(m_audioTimerHandler);
-
- if (m_textTimerHandler)
- g_source_remove(m_textTimerHandler);
-
- if (m_videoCapsTimerHandler)
- g_source_remove(m_videoCapsTimerHandler);
+ if (m_pipeline) {
+ GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
+ ASSERT(bus);
+ g_signal_handlers_disconnect_by_func(bus.get(), gpointer(busMessageCallback), this);
+ gst_bus_remove_signal_watch(bus.get());
+ gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr);
+ g_signal_handlers_disconnect_matched(m_pipeline.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
+ }
}
void MediaPlayerPrivateGStreamer::load(const String& urlString)
@@ -354,18 +239,21 @@ void MediaPlayerPrivateGStreamer::load(const String& urlString)
if (url.isLocalFile())
cleanURL = cleanURL.substring(0, url.pathEnd());
- if (!m_playBin)
+ if (!m_pipeline)
createGSTPlayBin();
- ASSERT(m_playBin);
+ if (m_fillTimer.isActive())
+ m_fillTimer.stop();
+
+ ASSERT(m_pipeline);
m_url = URL(URL(), cleanURL);
- g_object_set(m_playBin.get(), "uri", cleanURL.utf8().data(), NULL);
+ g_object_set(m_pipeline.get(), "uri", cleanURL.utf8().data(), nullptr);
- INFO_MEDIA_MESSAGE("Load %s", cleanURL.utf8().data());
+ GST_INFO("Load %s", cleanURL.utf8().data());
if (m_preload == MediaPlayer::None) {
- LOG_MEDIA_MESSAGE("Delaying load.");
+ GST_DEBUG("Delaying load.");
m_delayingLoad = true;
}
@@ -376,24 +264,32 @@ void MediaPlayerPrivateGStreamer::load(const String& urlString)
m_readyState = MediaPlayer::HaveNothing;
m_player->readyStateChanged();
m_volumeAndMuteInitialized = false;
+ m_durationAtEOS = 0;
if (!m_delayingLoad)
commitLoad();
}
#if ENABLE(MEDIA_SOURCE)
-void MediaPlayerPrivateGStreamer::load(const String& url, PassRefPtr<HTMLMediaSource> mediaSource)
+void MediaPlayerPrivateGStreamer::load(const String&, MediaSourcePrivateClient*)
+{
+ // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
+ m_networkState = MediaPlayer::FormatError;
+ m_player->networkStateChanged();
+}
+#endif
+
+#if ENABLE(MEDIA_STREAM)
+void MediaPlayerPrivateGStreamer::load(MediaStreamPrivate&)
{
- String mediasourceUri = String::format("mediasource%s", url.utf8().data());
- m_mediaSource = mediaSource;
- load(mediasourceUri);
+ notImplemented();
}
#endif
void MediaPlayerPrivateGStreamer::commitLoad()
{
ASSERT(!m_delayingLoad);
- LOG_MEDIA_MESSAGE("Committing load.");
+ GST_DEBUG("Committing load.");
// GStreamer needs to have the pipeline set to a paused state to
// start providing anything useful.
@@ -403,7 +299,7 @@ void MediaPlayerPrivateGStreamer::commitLoad()
updateStates();
}
-float MediaPlayerPrivateGStreamer::playbackPosition() const
+double MediaPlayerPrivateGStreamer::playbackPosition() const
{
if (m_isEndReached) {
// Position queries on a null pipeline return 0. If we're at
@@ -412,48 +308,56 @@ float MediaPlayerPrivateGStreamer::playbackPosition() const
// what the Media element spec expects us to do.
if (m_seeking)
return m_seekTime;
- if (m_mediaDuration)
- return m_mediaDuration;
+
+ MediaTime mediaDuration = durationMediaTime();
+ if (mediaDuration)
+ return mediaDuration.toDouble();
return 0;
}
// Position is only available if no async state change is going on and the state is either paused or playing.
gint64 position = GST_CLOCK_TIME_NONE;
GstQuery* query= gst_query_new_position(GST_FORMAT_TIME);
- if (gst_element_query(m_playBin.get(), query))
+ if (gst_element_query(m_pipeline.get(), query))
gst_query_parse_position(query, 0, &position);
+ gst_query_unref(query);
- float result = 0.0f;
- if (static_cast<GstClockTime>(position) != GST_CLOCK_TIME_NONE)
- result = static_cast<double>(position) / GST_SECOND;
- else if (m_canFallBackToLastFinishedSeekPositon)
- result = m_seekTime;
-
- LOG_MEDIA_MESSAGE("Position %" GST_TIME_FORMAT, GST_TIME_ARGS(position));
+ GST_DEBUG("Position %" GST_TIME_FORMAT, GST_TIME_ARGS(position));
- gst_query_unref(query);
+ double result = 0.0f;
+ if (static_cast<GstClockTime>(position) != GST_CLOCK_TIME_NONE) {
+ GTimeVal timeValue;
+ GST_TIME_TO_TIMEVAL(position, timeValue);
+ result = static_cast<double>(timeValue.tv_sec + (timeValue.tv_usec / 1000000.0));
+ } else if (m_canFallBackToLastFinishedSeekPosition)
+ result = m_seekTime;
return result;
}
+void MediaPlayerPrivateGStreamer::readyTimerFired()
+{
+ changePipelineState(GST_STATE_NULL);
+}
+
bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState)
{
- ASSERT(m_playBin);
+ ASSERT(m_pipeline);
GstState currentState;
GstState pending;
- gst_element_get_state(m_playBin.get(), &currentState, &pending, 0);
+ gst_element_get_state(m_pipeline.get(), &currentState, &pending, 0);
if (currentState == newState || pending == newState) {
- LOG_MEDIA_MESSAGE("Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState),
+ GST_DEBUG("Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState),
gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
return true;
}
- LOG_MEDIA_MESSAGE("Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState),
+ GST_DEBUG("Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState),
gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
- GstStateChangeReturn setStateResult = gst_element_set_state(m_playBin.get(), newState);
+ GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), newState);
GstState pausedOrPlaying = newState == GST_STATE_PLAYING ? GST_STATE_PAUSED : GST_STATE_PLAYING;
if (currentState != pausedOrPlaying && setStateResult == GST_STATE_CHANGE_FAILURE) {
return false;
@@ -463,13 +367,13 @@ bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState)
// if we stay for too long on READY.
// Also lets remove the timer if we request a state change for any state other than READY.
// See also https://bugs.webkit.org/show_bug.cgi?id=117354
- if (newState == GST_STATE_READY && !m_readyTimerHandler) {
- m_readyTimerHandler = g_timeout_add_seconds(gReadyStateTimerInterval, reinterpret_cast<GSourceFunc>(mediaPlayerPrivateReadyStateTimeoutCallback), this);
- g_source_set_name_by_id(m_readyTimerHandler, "[WebKit] mediaPlayerPrivateReadyStateTimeoutCallback");
- } else if (newState != GST_STATE_READY && m_readyTimerHandler) {
- g_source_remove(m_readyTimerHandler);
- m_readyTimerHandler = 0;
- }
+ if (newState == GST_STATE_READY && !m_readyTimerHandler.isActive()) {
+ // Max interval in seconds to stay in the READY state on manual
+ // state change requests.
+ static const double readyStateTimerDelay = 60;
+ m_readyTimerHandler.startOneShot(readyStateTimerDelay);
+ } else if (newState != GST_STATE_READY)
+ m_readyTimerHandler.stop();
return true;
}
@@ -495,7 +399,7 @@ void MediaPlayerPrivateGStreamer::play()
m_delayingLoad = false;
m_preload = MediaPlayer::Auto;
setDownloadBuffering();
- LOG_MEDIA_MESSAGE("Play");
+ GST_DEBUG("Play");
} else {
loadingFailed(MediaPlayer::Empty);
}
@@ -505,57 +409,56 @@ void MediaPlayerPrivateGStreamer::pause()
{
m_playbackRatePause = false;
GstState currentState, pendingState;
- gst_element_get_state(m_playBin.get(), &currentState, &pendingState, 0);
+ gst_element_get_state(m_pipeline.get(), &currentState, &pendingState, 0);
if (currentState < GST_STATE_PAUSED && pendingState <= GST_STATE_PAUSED)
return;
if (changePipelineState(GST_STATE_PAUSED))
- INFO_MEDIA_MESSAGE("Pause");
+ GST_INFO("Pause");
else
loadingFailed(MediaPlayer::Empty);
}
-float MediaPlayerPrivateGStreamer::duration() const
+MediaTime MediaPlayerPrivateGStreamer::durationMediaTime() const
{
- if (!m_playBin)
- return 0.0f;
+ if (!m_pipeline)
+ return { };
if (m_errorOccured)
- return 0.0f;
+ return { };
- // Media duration query failed already, don't attempt new useless queries.
- if (!m_mediaDurationKnown)
- return numeric_limits<float>::infinity();
+ if (m_durationAtEOS)
+ return MediaTime::createWithDouble(m_durationAtEOS);
- if (m_mediaDuration)
- return m_mediaDuration;
+ // The duration query would fail on a not-prerolled pipeline.
+ if (GST_STATE(m_pipeline.get()) < GST_STATE_PAUSED)
+ return { };
GstFormat timeFormat = GST_FORMAT_TIME;
gint64 timeLength = 0;
- bool failure = !gst_element_query_duration(m_playBin.get(), timeFormat, &timeLength) || static_cast<guint64>(timeLength) == GST_CLOCK_TIME_NONE;
+ bool failure = !gst_element_query_duration(m_pipeline.get(), timeFormat, &timeLength) || static_cast<guint64>(timeLength) == GST_CLOCK_TIME_NONE;
if (failure) {
- LOG_MEDIA_MESSAGE("Time duration query failed for %s", m_url.string().utf8().data());
- return numeric_limits<float>::infinity();
+ GST_DEBUG("Time duration query failed for %s", m_url.string().utf8().data());
+ return MediaTime::positiveInfiniteTime();
}
- LOG_MEDIA_MESSAGE("Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(timeLength));
+ GST_DEBUG("Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(timeLength));
- m_mediaDuration = static_cast<double>(timeLength) / GST_SECOND;
- return m_mediaDuration;
+ return MediaTime::createWithDouble(static_cast<double>(timeLength) / GST_SECOND);
// FIXME: handle 3.14.9.5 properly
}
-float MediaPlayerPrivateGStreamer::currentTime() const
+MediaTime MediaPlayerPrivateGStreamer::currentMediaTime() const
{
- if (!m_playBin)
- return 0.0f;
+ if (!m_pipeline)
+ return { };
if (m_errorOccured)
- return 0.0f;
+ return { };
if (m_seeking)
- return m_seekTime;
+ return MediaTime::createWithFloat(m_seekTime);
// Workaround for
// https://bugzilla.gnome.org/show_bug.cgi?id=639941 In GStreamer
@@ -563,30 +466,30 @@ float MediaPlayerPrivateGStreamer::currentTime() const
// negative playback rate. There's no upstream accepted patch for
// this bug yet, hence this temporary workaround.
if (m_isEndReached && m_playbackRate < 0)
- return 0.0f;
+ return { };
- return playbackPosition();
+ return MediaTime::createWithDouble(playbackPosition());
}
void MediaPlayerPrivateGStreamer::seek(float time)
{
- if (!m_playBin)
+ if (!m_pipeline)
return;
if (m_errorOccured)
return;
- INFO_MEDIA_MESSAGE("[Seek] seek attempt to %f secs", time);
+ GST_INFO("[Seek] seek attempt to %f secs", time);
// Avoid useless seeking.
- if (time == currentTime())
+ if (MediaTime::createWithFloat(time) == currentMediaTime())
return;
if (isLiveStream())
return;
GstClockTime clockTime = toGstClockTime(time);
- INFO_MEDIA_MESSAGE("[Seek] seeking to %" GST_TIME_FORMAT " (%f)", GST_TIME_ARGS(clockTime), time);
+ GST_INFO("[Seek] seeking to %" GST_TIME_FORMAT " (%f)", GST_TIME_ARGS(clockTime), time);
if (m_seeking) {
m_timeOfOverlappingSeek = time;
@@ -597,15 +500,15 @@ void MediaPlayerPrivateGStreamer::seek(float time)
}
GstState state;
- GstStateChangeReturn getStateResult = gst_element_get_state(m_playBin.get(), &state, 0, 0);
+ GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
- LOG_MEDIA_MESSAGE("[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
+ GST_DEBUG("[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
return;
}
if (getStateResult == GST_STATE_CHANGE_ASYNC || state < GST_STATE_PAUSED || m_isEndReached) {
m_seekIsPending = true;
if (m_isEndReached) {
- LOG_MEDIA_MESSAGE("[Seek] reset pipeline");
+ GST_DEBUG("[Seek] reset pipeline");
m_resetPipeline = true;
if (!changePipelineState(GST_STATE_PAUSED))
loadingFailed(MediaPlayer::Empty);
@@ -613,7 +516,7 @@ void MediaPlayerPrivateGStreamer::seek(float time)
} else {
// We can seek now.
if (!doSeek(clockTime, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE))) {
- LOG_MEDIA_MESSAGE("[Seek] seeking to %f failed", time);
+ GST_DEBUG("[Seek] seeking to %f failed", time);
return;
}
}
@@ -627,6 +530,11 @@ bool MediaPlayerPrivateGStreamer::doSeek(gint64 position, float rate, GstSeekFla
{
gint64 startTime, endTime;
+ // TODO: Should do more than that, need to notify the media source
+ // and probably flush the pipeline at least.
+ if (isMediaSource())
+ return true;
+
if (rate > 0) {
startTime = position;
endTime = GST_CLOCK_TIME_NONE;
@@ -635,7 +543,7 @@ bool MediaPlayerPrivateGStreamer::doSeek(gint64 position, float rate, GstSeekFla
// If we are at beginning of media, start from the end to
// avoid immediate EOS.
if (position < 0)
- endTime = static_cast<gint64>(duration() * GST_SECOND);
+ endTime = static_cast<gint64>(durationMediaTime().toDouble() * GST_SECOND);
else
endTime = position;
}
@@ -643,7 +551,7 @@ bool MediaPlayerPrivateGStreamer::doSeek(gint64 position, float rate, GstSeekFla
if (!rate)
rate = 1.0;
- return gst_element_seek(m_playBin.get(), rate, GST_FORMAT_TIME, seekType,
+ return gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType,
GST_SEEK_TYPE_SET, startTime, GST_SEEK_TYPE_SET, endTime);
}
@@ -655,7 +563,7 @@ void MediaPlayerPrivateGStreamer::updatePlaybackRate()
float currentPosition = static_cast<float>(playbackPosition() * GST_SECOND);
bool mute = false;
- INFO_MEDIA_MESSAGE("Set Rate to %f", m_playbackRate);
+ GST_INFO("Set Rate to %f", m_playbackRate);
if (m_playbackRate > 0) {
// Mute the sound if the playback rate is too extreme and
@@ -667,20 +575,20 @@ void MediaPlayerPrivateGStreamer::updatePlaybackRate()
mute = true;
}
- INFO_MEDIA_MESSAGE("Need to mute audio?: %d", (int) mute);
+ GST_INFO("Need to mute audio?: %d", (int) mute);
if (doSeek(currentPosition, m_playbackRate, static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH))) {
- g_object_set(m_playBin.get(), "mute", mute, NULL);
+ g_object_set(m_pipeline.get(), "mute", mute, nullptr);
m_lastPlaybackRate = m_playbackRate;
} else {
m_playbackRate = m_lastPlaybackRate;
- ERROR_MEDIA_MESSAGE("Set rate to %f failed", m_playbackRate);
+ GST_ERROR("Set rate to %f failed", m_playbackRate);
}
if (m_playbackRatePause) {
GstState state;
GstState pending;
- gst_element_get_state(m_playBin.get(), &state, &pending, 0);
+ gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
if (state != GST_STATE_PLAYING && pending != GST_STATE_PLAYING)
changePipelineState(GST_STATE_PLAYING);
m_playbackRatePause = false;
@@ -693,7 +601,7 @@ void MediaPlayerPrivateGStreamer::updatePlaybackRate()
bool MediaPlayerPrivateGStreamer::paused() const
{
if (m_isEndReached) {
- LOG_MEDIA_MESSAGE("Ignoring pause at EOS");
+ GST_DEBUG("Ignoring pause at EOS");
return true;
}
@@ -701,8 +609,8 @@ bool MediaPlayerPrivateGStreamer::paused() const
return false;
GstState state;
- gst_element_get_state(m_playBin.get(), &state, 0, 0);
- return state == GST_STATE_PAUSED;
+ gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
+ return state <= GST_STATE_PAUSED;
}
bool MediaPlayerPrivateGStreamer::seeking() const
@@ -710,34 +618,35 @@ bool MediaPlayerPrivateGStreamer::seeking() const
return m_seeking;
}
-void MediaPlayerPrivateGStreamer::videoChanged()
-{
- if (m_videoTimerHandler)
- g_source_remove(m_videoTimerHandler);
- m_videoTimerHandler = g_idle_add_full(G_PRIORITY_DEFAULT, reinterpret_cast<GSourceFunc>(mediaPlayerPrivateVideoChangeTimeoutCallback), this, 0);
-}
-
-void MediaPlayerPrivateGStreamer::videoCapsChanged()
+void MediaPlayerPrivateGStreamer::videoChangedCallback(MediaPlayerPrivateGStreamer* player)
{
- if (m_videoCapsTimerHandler)
- g_source_remove(m_videoCapsTimerHandler);
- m_videoCapsTimerHandler = g_timeout_add(0, reinterpret_cast<GSourceFunc>(mediaPlayerPrivateVideoCapsChangeTimeoutCallback), this);
+ player->m_notifier->notify(MainThreadNotification::VideoChanged, [player] { player->notifyPlayerOfVideo(); });
}
void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
{
- m_videoTimerHandler = 0;
+ if (UNLIKELY(!m_pipeline || !m_source))
+ return;
gint numTracks = 0;
- if (m_playBin)
- g_object_get(m_playBin.get(), "n-video", &numTracks, NULL);
+ bool useMediaSource = isMediaSource();
+ GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
+ g_object_get(element, "n-video", &numTracks, nullptr);
m_hasVideo = numTracks > 0;
+ if (m_hasVideo)
+ m_player->sizeChanged();
+
+ if (useMediaSource) {
+ GST_DEBUG("Tracks managed by source element. Bailing out now.");
+ m_player->client().mediaPlayerEngineUpdated(m_player);
+ return;
+ }
#if ENABLE(VIDEO_TRACK)
for (gint i = 0; i < numTracks; ++i) {
GRefPtr<GstPad> pad;
- g_signal_emit_by_name(m_playBin.get(), "get-video-pad", i, &pad.outPtr(), NULL);
+ g_signal_emit_by_name(m_pipeline.get(), "get-video-pad", i, &pad.outPtr(), nullptr);
ASSERT(pad);
if (i < static_cast<gint>(m_videoTracks.size())) {
@@ -747,50 +656,60 @@ void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
continue;
}
- RefPtr<VideoTrackPrivateGStreamer> track = VideoTrackPrivateGStreamer::create(m_playBin, i, pad);
+ RefPtr<VideoTrackPrivateGStreamer> track = VideoTrackPrivateGStreamer::create(m_pipeline, i, pad);
m_videoTracks.append(track);
- m_player->addVideoTrack(track.release());
+ m_player->addVideoTrack(*track);
}
while (static_cast<gint>(m_videoTracks.size()) > numTracks) {
RefPtr<VideoTrackPrivateGStreamer> track = m_videoTracks.last();
track->disconnect();
m_videoTracks.removeLast();
- m_player->removeVideoTrack(track.release());
+ m_player->removeVideoTrack(*track);
}
#endif
- m_player->mediaPlayerClient()->mediaPlayerEngineUpdated(m_player);
+ m_player->client().mediaPlayerEngineUpdated(m_player);
+}
+
+void MediaPlayerPrivateGStreamer::videoSinkCapsChangedCallback(MediaPlayerPrivateGStreamer* player)
+{
+ player->m_notifier->notify(MainThreadNotification::VideoCapsChanged, [player] { player->notifyPlayerOfVideoCaps(); });
}
void MediaPlayerPrivateGStreamer::notifyPlayerOfVideoCaps()
{
- m_videoCapsTimerHandler = 0;
m_videoSize = IntSize();
- m_player->mediaPlayerClient()->mediaPlayerEngineUpdated(m_player);
+ m_player->client().mediaPlayerEngineUpdated(m_player);
}
-void MediaPlayerPrivateGStreamer::audioChanged()
+void MediaPlayerPrivateGStreamer::audioChangedCallback(MediaPlayerPrivateGStreamer* player)
{
- if (m_audioTimerHandler)
- g_source_remove(m_audioTimerHandler);
- m_audioTimerHandler = g_idle_add_full(G_PRIORITY_DEFAULT, reinterpret_cast<GSourceFunc>(mediaPlayerPrivateAudioChangeTimeoutCallback), this, 0);
+ player->m_notifier->notify(MainThreadNotification::AudioChanged, [player] { player->notifyPlayerOfAudio(); });
}
void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio()
{
- m_audioTimerHandler = 0;
+ if (UNLIKELY(!m_pipeline || !m_source))
+ return;
gint numTracks = 0;
- if (m_playBin)
- g_object_get(m_playBin.get(), "n-audio", &numTracks, NULL);
+ bool useMediaSource = isMediaSource();
+ GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
+ g_object_get(element, "n-audio", &numTracks, nullptr);
m_hasAudio = numTracks > 0;
+ if (useMediaSource) {
+ GST_DEBUG("Tracks managed by source element. Bailing out now.");
+ m_player->client().mediaPlayerEngineUpdated(m_player);
+ return;
+ }
+
#if ENABLE(VIDEO_TRACK)
for (gint i = 0; i < numTracks; ++i) {
GRefPtr<GstPad> pad;
- g_signal_emit_by_name(m_playBin.get(), "get-audio-pad", i, &pad.outPtr(), NULL);
+ g_signal_emit_by_name(m_pipeline.get(), "get-audio-pad", i, &pad.outPtr(), nullptr);
ASSERT(pad);
if (i < static_cast<gint>(m_audioTracks.size())) {
@@ -800,41 +719,46 @@ void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio()
continue;
}
- RefPtr<AudioTrackPrivateGStreamer> track = AudioTrackPrivateGStreamer::create(m_playBin, i, pad);
+ RefPtr<AudioTrackPrivateGStreamer> track = AudioTrackPrivateGStreamer::create(m_pipeline, i, pad);
m_audioTracks.insert(i, track);
- m_player->addAudioTrack(track.release());
+ m_player->addAudioTrack(*track);
}
while (static_cast<gint>(m_audioTracks.size()) > numTracks) {
RefPtr<AudioTrackPrivateGStreamer> track = m_audioTracks.last();
track->disconnect();
m_audioTracks.removeLast();
- m_player->removeAudioTrack(track.release());
+ m_player->removeAudioTrack(*track);
}
#endif
- m_player->mediaPlayerClient()->mediaPlayerEngineUpdated(m_player);
+ m_player->client().mediaPlayerEngineUpdated(m_player);
}
#if ENABLE(VIDEO_TRACK)
-void MediaPlayerPrivateGStreamer::textChanged()
+void MediaPlayerPrivateGStreamer::textChangedCallback(MediaPlayerPrivateGStreamer* player)
{
- if (m_textTimerHandler)
- g_source_remove(m_textTimerHandler);
- m_textTimerHandler = g_timeout_add(0, reinterpret_cast<GSourceFunc>(mediaPlayerPrivateTextChangeTimeoutCallback), this);
+ player->m_notifier->notify(MainThreadNotification::TextChanged, [player] { player->notifyPlayerOfText(); });
}
void MediaPlayerPrivateGStreamer::notifyPlayerOfText()
{
- m_textTimerHandler = 0;
+ if (UNLIKELY(!m_pipeline || !m_source))
+ return;
gint numTracks = 0;
- if (m_playBin)
- g_object_get(m_playBin.get(), "n-text", &numTracks, NULL);
+ bool useMediaSource = isMediaSource();
+ GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
+ g_object_get(element, "n-text", &numTracks, nullptr);
+
+ if (useMediaSource) {
+ GST_DEBUG("Tracks managed by source element. Bailing out now.");
+ return;
+ }
for (gint i = 0; i < numTracks; ++i) {
GRefPtr<GstPad> pad;
- g_signal_emit_by_name(m_playBin.get(), "get-text-pad", i, &pad.outPtr(), NULL);
+ g_signal_emit_by_name(m_pipeline.get(), "get-text-pad", i, &pad.outPtr(), nullptr);
ASSERT(pad);
if (i < static_cast<gint>(m_textTracks.size())) {
@@ -846,17 +770,23 @@ void MediaPlayerPrivateGStreamer::notifyPlayerOfText()
RefPtr<InbandTextTrackPrivateGStreamer> track = InbandTextTrackPrivateGStreamer::create(i, pad);
m_textTracks.insert(i, track);
- m_player->addTextTrack(track.release());
+ m_player->addTextTrack(*track);
}
while (static_cast<gint>(m_textTracks.size()) > numTracks) {
RefPtr<InbandTextTrackPrivateGStreamer> track = m_textTracks.last();
track->disconnect();
m_textTracks.removeLast();
- m_player->removeTextTrack(track.release());
+ m_player->removeTextTrack(*track);
}
}
+GstFlowReturn MediaPlayerPrivateGStreamer::newTextSampleCallback(MediaPlayerPrivateGStreamer* player)
+{
+ player->newTextSample();
+ return GST_FLOW_OK;
+}
+
void MediaPlayerPrivateGStreamer::newTextSample()
{
if (!m_textAppSink)
@@ -866,7 +796,7 @@ void MediaPlayerPrivateGStreamer::newTextSample()
gst_pad_get_sticky_event(m_textAppSinkPad.get(), GST_EVENT_STREAM_START, 0));
GRefPtr<GstSample> sample;
- g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), NULL);
+ g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), nullptr);
ASSERT(sample);
if (streamStartEvent) {
@@ -882,9 +812,9 @@ void MediaPlayerPrivateGStreamer::newTextSample()
}
}
if (!found)
- WARN_MEDIA_MESSAGE("Got sample with unknown stream ID.");
+ GST_WARNING("Got sample with unknown stream ID.");
} else
- WARN_MEDIA_MESSAGE("Unable to handle sample with no stream start event.");
+ GST_WARNING("Unable to handle sample with no stream start event.");
}
#endif
@@ -915,7 +845,7 @@ void MediaPlayerPrivateGStreamer::setRate(float rate)
m_playbackRate = rate;
m_changingRate = true;
- gst_element_get_state(m_playBin.get(), &state, &pending, 0);
+ gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
if (!rate) {
m_changingRate = false;
@@ -932,52 +862,53 @@ void MediaPlayerPrivateGStreamer::setRate(float rate)
updatePlaybackRate();
}
+double MediaPlayerPrivateGStreamer::rate() const
+{
+ return m_playbackRate;
+}
+
void MediaPlayerPrivateGStreamer::setPreservesPitch(bool preservesPitch)
{
m_preservesPitch = preservesPitch;
}
-PassRefPtr<TimeRanges> MediaPlayerPrivateGStreamer::buffered() const
+std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamer::buffered() const
{
- RefPtr<TimeRanges> timeRanges = TimeRanges::create();
+ auto timeRanges = std::make_unique<PlatformTimeRanges>();
if (m_errorOccured || isLiveStream())
- return timeRanges.release();
+ return timeRanges;
-#if GST_CHECK_VERSION(0, 10, 31)
- float mediaDuration(duration());
+ float mediaDuration(durationMediaTime().toDouble());
if (!mediaDuration || std::isinf(mediaDuration))
- return timeRanges.release();
+ return timeRanges;
GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
- if (!gst_element_query(m_playBin.get(), query)) {
+ if (!gst_element_query(m_pipeline.get(), query)) {
gst_query_unref(query);
- return timeRanges.release();
+ return timeRanges;
}
- for (guint index = 0; index < gst_query_get_n_buffering_ranges(query); index++) {
+ guint numBufferingRanges = gst_query_get_n_buffering_ranges(query);
+ for (guint index = 0; index < numBufferingRanges; index++) {
gint64 rangeStart = 0, rangeStop = 0;
if (gst_query_parse_nth_buffering_range(query, index, &rangeStart, &rangeStop))
- timeRanges->add(static_cast<float>((rangeStart * mediaDuration) / GST_FORMAT_PERCENT_MAX),
- static_cast<float>((rangeStop * mediaDuration) / GST_FORMAT_PERCENT_MAX));
+ timeRanges->add(MediaTime::createWithDouble((rangeStart * mediaDuration) / GST_FORMAT_PERCENT_MAX),
+ MediaTime::createWithDouble((rangeStop * mediaDuration) / GST_FORMAT_PERCENT_MAX));
}
// Fallback to the more general maxTimeLoaded() if no range has
// been found.
if (!timeRanges->length())
if (float loaded = maxTimeLoaded())
- timeRanges->add(0, loaded);
+ timeRanges->add(MediaTime::zeroTime(), MediaTime::createWithDouble(loaded));
gst_query_unref(query);
-#else
- float loaded = maxTimeLoaded();
- if (!m_errorOccured && !isLiveStream() && loaded > 0)
- timeRanges->add(0, loaded);
-#endif
- return timeRanges.release();
+
+ return timeRanges;
}
-gboolean MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
+void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
{
GUniqueOutPtr<GError> err;
GUniqueOutPtr<gchar> debug;
@@ -987,7 +918,7 @@ gboolean MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
const GstStructure* structure = gst_message_get_structure(message);
GstState requestedState, currentState;
- m_canFallBackToLastFinishedSeekPositon = false;
+ m_canFallBackToLastFinishedSeekPosition = false;
if (structure) {
const gchar* messageTypeName = gst_structure_get_name(structure);
@@ -996,40 +927,37 @@ gboolean MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
// notify of the new location(s) of the media.
if (!g_strcmp0(messageTypeName, "redirect")) {
mediaLocationChanged(message);
- return TRUE;
+ return;
}
}
// We ignore state changes from internal elements. They are forwarded to playbin2 anyway.
- bool messageSourceIsPlaybin = GST_MESSAGE_SRC(message) == reinterpret_cast<GstObject*>(m_playBin.get());
+ bool messageSourceIsPlaybin = GST_MESSAGE_SRC(message) == reinterpret_cast<GstObject*>(m_pipeline.get());
- LOG_MEDIA_MESSAGE("Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message));
+ GST_DEBUG("Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message));
switch (GST_MESSAGE_TYPE(message)) {
case GST_MESSAGE_ERROR:
- if (m_resetPipeline)
- break;
- if (m_missingPlugins)
+ if (m_resetPipeline || m_missingPluginsCallback || m_errorOccured)
break;
gst_message_parse_error(message, &err.outPtr(), &debug.outPtr());
- ERROR_MEDIA_MESSAGE("Error %d: %s (url=%s)", err->code, err->message, m_url.string().utf8().data());
+ GST_ERROR("Error %d: %s (url=%s)", err->code, err->message, m_url.string().utf8().data());
- GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_playBin.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-video.error");
+ GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-video.error");
error = MediaPlayer::Empty;
- if (err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND
- || err->code == GST_STREAM_ERROR_WRONG_TYPE
- || err->code == GST_STREAM_ERROR_FAILED
- || err->code == GST_CORE_ERROR_MISSING_PLUGIN
- || err->code == GST_RESOURCE_ERROR_NOT_FOUND)
+ if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_CODEC_NOT_FOUND)
+ || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE)
+ || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED)
+ || g_error_matches(err.get(), GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN)
+ || g_error_matches(err.get(), GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_NOT_FOUND))
error = MediaPlayer::FormatError;
- else if (err->domain == GST_STREAM_ERROR) {
+ else if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_TYPE_NOT_FOUND)) {
// Let the mediaPlayerClient handle the stream error, in
// this case the HTMLMediaElement will emit a stalled
// event.
- if (err->code == GST_STREAM_ERROR_TYPE_NOT_FOUND) {
- ERROR_MEDIA_MESSAGE("Decode error, let the Media element emit a stalled event.");
- break;
- }
+ GST_ERROR("Decode error, let the Media element emit a stalled event.");
+ break;
+ } else if (err->domain == GST_STREAM_ERROR) {
error = MediaPlayer::DecodeError;
attemptNextLocation = true;
} else if (err->domain == GST_RESOURCE_ERROR)
@@ -1055,9 +983,9 @@ gboolean MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
// Construct a filename for the graphviz dot file output.
GstState newState;
- gst_message_parse_state_changed(message, &currentState, &newState, 0);
+ gst_message_parse_state_changed(message, &currentState, &newState, nullptr);
CString dotFileName = String::format("webkit-video.%s_%s", gst_element_state_get_name(currentState), gst_element_state_get_name(newState)).utf8();
- GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_playBin.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data());
+ GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data());
break;
}
@@ -1065,71 +993,176 @@ gboolean MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
processBufferingStats(message);
break;
case GST_MESSAGE_DURATION_CHANGED:
- if (messageSourceIsPlaybin)
+ // Duration in MSE is managed by MediaSource, SourceBuffer and AppendPipeline.
+ if (messageSourceIsPlaybin && !isMediaSource())
durationChanged();
break;
case GST_MESSAGE_REQUEST_STATE:
gst_message_parse_request_state(message, &requestedState);
- gst_element_get_state(m_playBin.get(), &currentState, NULL, 250);
+ gst_element_get_state(m_pipeline.get(), &currentState, nullptr, 250 * GST_NSECOND);
if (requestedState < currentState) {
GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(message)));
- INFO_MEDIA_MESSAGE("Element %s requested state change to %s", elementName.get(),
+ GST_INFO("Element %s requested state change to %s", elementName.get(),
gst_element_state_get_name(requestedState));
m_requestedState = requestedState;
if (!changePipelineState(requestedState))
loadingFailed(MediaPlayer::Empty);
}
break;
+ case GST_MESSAGE_CLOCK_LOST:
+ // This can only happen in PLAYING state and we should just
+ // get a new clock by moving back to PAUSED and then to
+ // PLAYING again.
+ // This can happen if the stream that ends in a sink that
+ // provides the current clock disappears, for example if
+ // the audio sink provides the clock and the audio stream
+ // is disabled. It also happens relatively often with
+ // HTTP adaptive streams when switching between different
+ // variants of a stream.
+ gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
+ gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
+ break;
+ case GST_MESSAGE_LATENCY:
+ // Recalculate the latency, we don't need any special handling
+ // here other than the GStreamer default.
+ // This can happen if the latency of live elements changes, or
+ // for one reason or another a new live element is added or
+ // removed from the pipeline.
+ gst_bin_recalculate_latency(GST_BIN(m_pipeline.get()));
+ break;
case GST_MESSAGE_ELEMENT:
if (gst_is_missing_plugin_message(message)) {
- gchar* detail = gst_missing_plugin_message_get_installer_detail(message);
- gchar* detailArray[2] = {detail, 0};
- GstInstallPluginsReturn result = gst_install_plugins_async(detailArray, 0, mediaPlayerPrivatePluginInstallerResultFunction, this);
- m_missingPlugins = result == GST_INSTALL_PLUGINS_STARTED_OK;
- g_free(detail);
+ if (gst_install_plugins_supported()) {
+ m_missingPluginsCallback = MediaPlayerRequestInstallMissingPluginsCallback::create([this](uint32_t result) {
+ m_missingPluginsCallback = nullptr;
+ if (result != GST_INSTALL_PLUGINS_SUCCESS)
+ return;
+
+ changePipelineState(GST_STATE_READY);
+ changePipelineState(GST_STATE_PAUSED);
+ });
+ GUniquePtr<char> detail(gst_missing_plugin_message_get_installer_detail(message));
+ GUniquePtr<char> description(gst_missing_plugin_message_get_description(message));
+ m_player->client().requestInstallMissingPlugins(String::fromUTF8(detail.get()), String::fromUTF8(description.get()), *m_missingPluginsCallback);
+ }
+ }
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ else if (gst_structure_has_name(structure, "drm-key-needed")) {
+ GST_DEBUG("drm-key-needed message from %s", GST_MESSAGE_SRC_NAME(message));
+ GRefPtr<GstEvent> event;
+ gst_structure_get(structure, "event", GST_TYPE_EVENT, &event.outPtr(), nullptr);
+ handleProtectionEvent(event.get());
+ }
+#endif
+#if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
+ else {
+ GstMpegtsSection* section = gst_message_parse_mpegts_section(message);
+ if (section) {
+ processMpegTsSection(section);
+ gst_mpegts_section_unref(section);
+ }
}
+#endif
break;
#if ENABLE(VIDEO_TRACK)
case GST_MESSAGE_TOC:
processTableOfContents(message);
break;
#endif
+ case GST_MESSAGE_TAG: {
+ GstTagList* tags = nullptr;
+ GUniqueOutPtr<gchar> tag;
+ gst_message_parse_tag(message, &tags);
+ if (gst_tag_list_get_string(tags, GST_TAG_IMAGE_ORIENTATION, &tag.outPtr())) {
+ if (!g_strcmp0(tag.get(), "rotate-90"))
+ setVideoSourceOrientation(ImageOrientation(OriginRightTop));
+ else if (!g_strcmp0(tag.get(), "rotate-180"))
+ setVideoSourceOrientation(ImageOrientation(OriginBottomRight));
+ else if (!g_strcmp0(tag.get(), "rotate-270"))
+ setVideoSourceOrientation(ImageOrientation(OriginLeftBottom));
+ }
+ gst_tag_list_unref(tags);
+ break;
+ }
default:
- LOG_MEDIA_MESSAGE("Unhandled GStreamer message type: %s",
+ GST_DEBUG("Unhandled GStreamer message type: %s",
GST_MESSAGE_TYPE_NAME(message));
break;
}
- return TRUE;
-}
-
-void MediaPlayerPrivateGStreamer::handlePluginInstallerResult(GstInstallPluginsReturn result)
-{
- m_missingPlugins = false;
- if (result == GST_INSTALL_PLUGINS_SUCCESS) {
- changePipelineState(GST_STATE_READY);
- changePipelineState(GST_STATE_PAUSED);
- }
+ return;
}
void MediaPlayerPrivateGStreamer::processBufferingStats(GstMessage* message)
{
m_buffering = true;
- const GstStructure *structure = gst_message_get_structure(message);
- gst_structure_get_int(structure, "buffer-percent", &m_bufferingPercentage);
+ gst_message_parse_buffering(message, &m_bufferingPercentage);
- LOG_MEDIA_MESSAGE("[Buffering] Buffering: %d%%.", m_bufferingPercentage);
+ GST_DEBUG("[Buffering] Buffering: %d%%.", m_bufferingPercentage);
updateStates();
}
+#if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
+void MediaPlayerPrivateGStreamer::processMpegTsSection(GstMpegtsSection* section)
+{
+ ASSERT(section);
+
+ if (section->section_type == GST_MPEGTS_SECTION_PMT) {
+ const GstMpegtsPMT* pmt = gst_mpegts_section_get_pmt(section);
+ m_metadataTracks.clear();
+ for (guint i = 0; i < pmt->streams->len; ++i) {
+ const GstMpegtsPMTStream* stream = static_cast<const GstMpegtsPMTStream*>(g_ptr_array_index(pmt->streams, i));
+ if (stream->stream_type == 0x05 || stream->stream_type >= 0x80) {
+ AtomicString pid = String::number(stream->pid);
+ RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = InbandMetadataTextTrackPrivateGStreamer::create(
+ InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data, pid);
+
+ // 4.7.10.12.2 Sourcing in-band text tracks
+ // If the new text track's kind is metadata, then set the text track in-band metadata track dispatch
+ // type as follows, based on the type of the media resource:
+ // Let stream type be the value of the "stream_type" field describing the text track's type in the
+ // file's program map section, interpreted as an 8-bit unsigned integer. Let length be the value of
+ // the "ES_info_length" field for the track in the same part of the program map section, interpreted
+ // as an integer as defined by the MPEG-2 specification. Let descriptor bytes be the length bytes
+ // following the "ES_info_length" field. The text track in-band metadata track dispatch type must be
+ // set to the concatenation of the stream type byte and the zero or more descriptor bytes bytes,
+ // expressed in hexadecimal using uppercase ASCII hex digits.
+ String inbandMetadataTrackDispatchType;
+ appendUnsignedAsHexFixedSize(stream->stream_type, inbandMetadataTrackDispatchType, 2);
+ for (guint j = 0; j < stream->descriptors->len; ++j) {
+ const GstMpegtsDescriptor* descriptor = static_cast<const GstMpegtsDescriptor*>(g_ptr_array_index(stream->descriptors, j));
+ for (guint k = 0; k < descriptor->length; ++k)
+ appendByteAsHex(descriptor->data[k], inbandMetadataTrackDispatchType);
+ }
+ track->setInBandMetadataTrackDispatchType(inbandMetadataTrackDispatchType);
+
+ m_metadataTracks.add(pid, track);
+ m_player->addTextTrack(*track);
+ }
+ }
+ } else {
+ AtomicString pid = String::number(section->pid);
+ RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = m_metadataTracks.get(pid);
+ if (!track)
+ return;
+
+ GRefPtr<GBytes> data = gst_mpegts_section_get_data(section);
+ gsize size;
+ const void* bytes = g_bytes_get_data(data.get(), &size);
+
+ track->addDataCue(currentMediaTime(), currentMediaTime(), bytes, size);
+ }
+}
+#endif
+
#if ENABLE(VIDEO_TRACK)
void MediaPlayerPrivateGStreamer::processTableOfContents(GstMessage* message)
{
if (m_chaptersTrack)
- m_player->removeTextTrack(m_chaptersTrack);
+ m_player->removeTextTrack(*m_chaptersTrack);
- m_chaptersTrack = InbandMetadataTextTrackPrivateGStreamer::create(InbandTextTrackPrivate::Chapters);
- m_player->addTextTrack(m_chaptersTrack);
+ m_chaptersTrack = InbandMetadataTextTrackPrivateGStreamer::create(InbandTextTrackPrivate::Chapters, InbandTextTrackPrivate::Generic);
+ m_player->addTextTrack(*m_chaptersTrack);
GRefPtr<GstToc> toc;
gboolean updated;
@@ -1137,12 +1170,11 @@ void MediaPlayerPrivateGStreamer::processTableOfContents(GstMessage* message)
ASSERT(toc);
for (GList* i = gst_toc_get_entries(toc.get()); i; i = i->next)
- processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data), 0);
+ processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
}
-void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry, GstTocEntry* parent)
+void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry)
{
- UNUSED_PARAM(parent);
ASSERT(entry);
RefPtr<GenericCueData> cue = GenericCueData::create();
@@ -1150,13 +1182,13 @@ void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry
gint64 start = -1, stop = -1;
gst_toc_entry_get_start_stop_times(entry, &start, &stop);
if (start != -1)
- cue->setStartTime(static_cast<double>(start) / GST_SECOND);
+ cue->setStartTime(MediaTime(start, GST_SECOND));
if (stop != -1)
- cue->setEndTime(static_cast<double>(stop) / GST_SECOND);
+ cue->setEndTime(MediaTime(stop, GST_SECOND));
GstTagList* tags = gst_toc_entry_get_tags(entry);
if (tags) {
- gchar* title = 0;
+ gchar* title = nullptr;
gst_tag_list_get_string(tags, GST_TAG_TITLE, &title);
if (title) {
cue->setContent(title);
@@ -1164,18 +1196,18 @@ void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry
}
}
- m_chaptersTrack->client()->addGenericCue(m_chaptersTrack.get(), cue.release());
+ m_chaptersTrack->addGenericCue(cue.release());
for (GList* i = gst_toc_entry_get_sub_entries(entry); i; i = i->next)
- processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data), entry);
+ processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
}
#endif
-void MediaPlayerPrivateGStreamer::fillTimerFired(Timer<MediaPlayerPrivateGStreamer>*)
+void MediaPlayerPrivateGStreamer::fillTimerFired()
{
GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
- if (!gst_element_query(m_playBin.get(), query)) {
+ if (!gst_element_query(m_pipeline.get(), query)) {
gst_query_unref(query);
return;
}
@@ -1183,25 +1215,24 @@ void MediaPlayerPrivateGStreamer::fillTimerFired(Timer<MediaPlayerPrivateGStream
gint64 start, stop;
gdouble fillStatus = 100.0;
- gst_query_parse_buffering_range(query, 0, &start, &stop, 0);
+ gst_query_parse_buffering_range(query, nullptr, &start, &stop, nullptr);
gst_query_unref(query);
if (stop != -1)
fillStatus = 100.0 * stop / GST_FORMAT_PERCENT_MAX;
- LOG_MEDIA_MESSAGE("[Buffering] Download buffer filled up to %f%%", fillStatus);
+ GST_DEBUG("[Buffering] Download buffer filled up to %f%%", fillStatus);
- if (!m_mediaDuration)
- durationChanged();
+ float mediaDuration = durationMediaTime().toDouble();
// Update maxTimeLoaded only if the media duration is
// available. Otherwise we can't compute it.
- if (m_mediaDuration) {
+ if (mediaDuration) {
if (fillStatus == 100.0)
- m_maxTimeLoaded = m_mediaDuration;
+ m_maxTimeLoaded = mediaDuration;
else
- m_maxTimeLoaded = static_cast<float>((fillStatus * m_mediaDuration) / 100.0);
- LOG_MEDIA_MESSAGE("[Buffering] Updated maxTimeLoaded: %f", m_maxTimeLoaded);
+ m_maxTimeLoaded = static_cast<float>((fillStatus * mediaDuration) / 100.0);
+ GST_DEBUG("[Buffering] Updated maxTimeLoaded: %f", m_maxTimeLoaded);
}
m_downloadFinished = fillStatus == 100.0;
@@ -1222,12 +1253,13 @@ float MediaPlayerPrivateGStreamer::maxTimeSeekable() const
if (m_errorOccured)
return 0.0f;
- LOG_MEDIA_MESSAGE("maxTimeSeekable");
+ float mediaDuration = durationMediaTime().toDouble();
+ GST_DEBUG("maxTimeSeekable, duration: %f", mediaDuration);
// infinite duration means live stream
- if (std::isinf(duration()))
+ if (std::isinf(mediaDuration))
return 0.0f;
- return duration();
+ return mediaDuration;
}
float MediaPlayerPrivateGStreamer::maxTimeLoaded() const
@@ -1236,29 +1268,29 @@ float MediaPlayerPrivateGStreamer::maxTimeLoaded() const
return 0.0f;
float loaded = m_maxTimeLoaded;
- if (m_isEndReached && m_mediaDuration)
- loaded = m_mediaDuration;
- LOG_MEDIA_MESSAGE("maxTimeLoaded: %f", loaded);
+ if (m_isEndReached)
+ loaded = durationMediaTime().toDouble();
+ GST_DEBUG("maxTimeLoaded: %f", loaded);
return loaded;
}
bool MediaPlayerPrivateGStreamer::didLoadingProgress() const
{
- if (!m_playBin || !m_mediaDuration || !totalBytes())
+ if (UNLIKELY(!m_pipeline || !durationMediaTime() || (!isMediaSource() && !totalBytes())))
return false;
float currentMaxTimeLoaded = maxTimeLoaded();
bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress;
m_maxTimeLoadedAtLastDidLoadingProgress = currentMaxTimeLoaded;
- LOG_MEDIA_MESSAGE("didLoadingProgress: %d", didLoadingProgress);
+ GST_DEBUG("didLoadingProgress: %d", didLoadingProgress);
return didLoadingProgress;
}
-unsigned MediaPlayerPrivateGStreamer::totalBytes() const
+unsigned long long MediaPlayerPrivateGStreamer::totalBytes() const
{
if (m_errorOccured)
return 0;
- if (m_totalBytes != -1)
+ if (m_totalBytes)
return m_totalBytes;
if (!m_source)
@@ -1267,8 +1299,8 @@ unsigned MediaPlayerPrivateGStreamer::totalBytes() const
GstFormat fmt = GST_FORMAT_BYTES;
gint64 length = 0;
if (gst_element_query_duration(m_source.get(), fmt, &length)) {
- INFO_MEDIA_MESSAGE("totalBytes %" G_GINT64_FORMAT, length);
- m_totalBytes = static_cast<unsigned>(length);
+ GST_INFO("totalBytes %" G_GINT64_FORMAT, length);
+ m_totalBytes = static_cast<unsigned long long>(length);
m_isStreaming = !length;
return m_totalBytes;
}
@@ -1302,25 +1334,105 @@ unsigned MediaPlayerPrivateGStreamer::totalBytes() const
gst_iterator_free(iter);
- INFO_MEDIA_MESSAGE("totalBytes %" G_GINT64_FORMAT, length);
- m_totalBytes = static_cast<unsigned>(length);
+ GST_INFO("totalBytes %" G_GINT64_FORMAT, length);
+ m_totalBytes = static_cast<unsigned long long>(length);
m_isStreaming = !length;
return m_totalBytes;
}
+void MediaPlayerPrivateGStreamer::sourceChangedCallback(MediaPlayerPrivateGStreamer* player)
+{
+ player->sourceChanged();
+}
+
+void MediaPlayerPrivateGStreamer::uriDecodeBinElementAddedCallback(GstBin* bin, GstElement* element, MediaPlayerPrivateGStreamer* player)
+{
+ if (g_strcmp0(G_OBJECT_CLASS_NAME(G_OBJECT_GET_CLASS(G_OBJECT(element))), "GstDownloadBuffer"))
+ return;
+
+ player->m_downloadBuffer = element;
+ g_signal_handlers_disconnect_by_func(bin, reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), player);
+ g_signal_connect_swapped(element, "notify::temp-location", G_CALLBACK(downloadBufferFileCreatedCallback), player);
+
+ GUniqueOutPtr<char> oldDownloadTemplate;
+ g_object_get(element, "temp-template", &oldDownloadTemplate.outPtr(), nullptr);
+
+ GUniquePtr<char> newDownloadTemplate(g_build_filename(G_DIR_SEPARATOR_S, "var", "tmp", "WebKit-Media-XXXXXX", nullptr));
+ g_object_set(element, "temp-template", newDownloadTemplate.get(), nullptr);
+ GST_TRACE("Reconfigured file download template from '%s' to '%s'", oldDownloadTemplate.get(), newDownloadTemplate.get());
+
+ player->purgeOldDownloadFiles(oldDownloadTemplate.get());
+}
+
+void MediaPlayerPrivateGStreamer::downloadBufferFileCreatedCallback(MediaPlayerPrivateGStreamer* player)
+{
+ ASSERT(player->m_downloadBuffer);
+
+ g_signal_handlers_disconnect_by_func(player->m_downloadBuffer.get(), reinterpret_cast<gpointer>(downloadBufferFileCreatedCallback), player);
+
+ GUniqueOutPtr<char> downloadFile;
+ g_object_get(player->m_downloadBuffer.get(), "temp-location", &downloadFile.outPtr(), nullptr);
+ player->m_downloadBuffer = nullptr;
+
+ if (UNLIKELY(!deleteFile(downloadFile.get()))) {
+ GST_WARNING("Couldn't unlink media temporary file %s after creation", downloadFile.get());
+ return;
+ }
+
+ GST_TRACE("Unlinked media temporary file %s after creation", downloadFile.get());
+}
+
+void MediaPlayerPrivateGStreamer::purgeOldDownloadFiles(const char* downloadFileTemplate)
+{
+ if (!downloadFileTemplate)
+ return;
+
+ GUniquePtr<char> templatePath(g_path_get_dirname(downloadFileTemplate));
+ GUniquePtr<char> templateFile(g_path_get_basename(downloadFileTemplate));
+ String templatePattern = String(templateFile.get()).replace("X", "?");
+
+ for (auto& filePath : listDirectory(templatePath.get(), templatePattern)) {
+ if (UNLIKELY(!deleteFile(filePath))) {
+ GST_WARNING("Couldn't unlink legacy media temporary file: %s", filePath.utf8().data());
+ continue;
+ }
+
+ GST_TRACE("Unlinked legacy media temporary file: %s", filePath.utf8().data());
+ }
+}
+
void MediaPlayerPrivateGStreamer::sourceChanged()
{
+ if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
+ g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
+
m_source.clear();
- g_object_get(m_playBin.get(), "source", &m_source.outPtr(), NULL);
+ g_object_get(m_pipeline.get(), "source", &m_source.outPtr(), nullptr);
- if (WEBKIT_IS_WEB_SRC(m_source.get()))
+ if (WEBKIT_IS_WEB_SRC(m_source.get())) {
webKitWebSrcSetMediaPlayer(WEBKIT_WEB_SRC(m_source.get()), m_player);
-#if ENABLE(MEDIA_SOURCE)
- if (m_mediaSource && WEBKIT_IS_MEDIA_SRC(m_source.get())) {
- MediaSourceGStreamer::open(m_mediaSource.get(), WEBKIT_MEDIA_SRC(m_source.get()));
- webKitMediaSrcSetPlayBin(WEBKIT_MEDIA_SRC(m_source.get()), m_playBin.get());
+ g_signal_connect(GST_ELEMENT_PARENT(m_source.get()), "element-added", G_CALLBACK(uriDecodeBinElementAddedCallback), this);
}
-#endif
+}
+
+bool MediaPlayerPrivateGStreamer::hasSingleSecurityOrigin() const
+{
+ if (!m_source)
+ return false;
+
+ if (!WEBKIT_IS_WEB_SRC(m_source.get()))
+ return true;
+
+ GUniqueOutPtr<char> originalURI, resolvedURI;
+ g_object_get(m_source.get(), "location", &originalURI.outPtr(), "resolved-location", &resolvedURI.outPtr(), nullptr);
+ if (!originalURI || !resolvedURI)
+ return false;
+ if (!g_strcmp0(originalURI.get(), resolvedURI.get()))
+ return true;
+
+ Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::createFromString(String::fromUTF8(resolvedURI.get())));
+ Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(String::fromUTF8(originalURI.get())));
+ return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
}
void MediaPlayerPrivateGStreamer::cancelLoad()
@@ -1328,20 +1440,20 @@ void MediaPlayerPrivateGStreamer::cancelLoad()
if (m_networkState < MediaPlayer::Loading || m_networkState == MediaPlayer::Loaded)
return;
- if (m_playBin)
+ if (m_pipeline)
changePipelineState(GST_STATE_READY);
}
void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
{
- if (!m_playBin || m_errorOccured)
+ if (!m_pipeline || m_errorOccured)
return;
if (m_seeking) {
if (m_seekIsPending)
updateStates();
else {
- LOG_MEDIA_MESSAGE("[Seek] seeked to %f", m_seekTime);
+ GST_DEBUG("[Seek] seeked to %f", m_seekTime);
m_seeking = false;
if (m_timeOfOverlappingSeek != m_seekTime && m_timeOfOverlappingSeek != -1) {
seek(m_timeOfOverlappingSeek);
@@ -1352,7 +1464,7 @@ void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
// The pipeline can still have a pending state. In this case a position query will fail.
// Right now we can use m_seekTime as a fallback.
- m_canFallBackToLastFinishedSeekPositon = true;
+ m_canFallBackToLastFinishedSeekPosition = true;
timeChanged();
}
} else
@@ -1361,7 +1473,7 @@ void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
void MediaPlayerPrivateGStreamer::updateStates()
{
- if (!m_playBin)
+ if (!m_pipeline)
return;
if (m_errorOccured)
@@ -1372,25 +1484,19 @@ void MediaPlayerPrivateGStreamer::updateStates()
GstState state;
GstState pending;
- GstStateChangeReturn getStateResult = gst_element_get_state(m_playBin.get(), &state, &pending, 250 * GST_NSECOND);
+ GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
bool shouldUpdatePlaybackState = false;
switch (getStateResult) {
case GST_STATE_CHANGE_SUCCESS: {
- LOG_MEDIA_MESSAGE("State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
+ GST_DEBUG("State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
// Do nothing if on EOS and state changed to READY to avoid recreating the player
// on HTMLMediaElement and properly generate the video 'ended' event.
if (m_isEndReached && state == GST_STATE_READY)
break;
- if (state <= GST_STATE_READY) {
- m_resetPipeline = true;
- m_mediaDuration = 0;
- } else {
- m_resetPipeline = false;
- cacheDuration();
- }
+ m_resetPipeline = state <= GST_STATE_READY;
bool didBuffering = m_buffering;
@@ -1408,7 +1514,7 @@ void MediaPlayerPrivateGStreamer::updateStates()
case GST_STATE_PLAYING:
if (m_buffering) {
if (m_bufferingPercentage == 100) {
- LOG_MEDIA_MESSAGE("[Buffering] Complete.");
+ GST_DEBUG("[Buffering] Complete.");
m_buffering = false;
m_readyState = MediaPlayer::HaveEnoughData;
m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading;
@@ -1439,14 +1545,14 @@ void MediaPlayerPrivateGStreamer::updateStates()
}
if (didBuffering && !m_buffering && !m_paused && m_playbackRate) {
- LOG_MEDIA_MESSAGE("[Buffering] Restarting playback.");
+ GST_DEBUG("[Buffering] Restarting playback.");
changePipelineState(GST_STATE_PLAYING);
}
} else if (state == GST_STATE_PLAYING) {
m_paused = false;
if ((m_buffering && !isLiveStream()) || !m_playbackRate) {
- LOG_MEDIA_MESSAGE("[Buffering] Pausing stream for buffering.");
+ GST_DEBUG("[Buffering] Pausing stream for buffering.");
changePipelineState(GST_STATE_PAUSED);
}
} else
@@ -1454,21 +1560,21 @@ void MediaPlayerPrivateGStreamer::updateStates()
if (m_requestedState == GST_STATE_PAUSED && state == GST_STATE_PAUSED) {
shouldUpdatePlaybackState = true;
- LOG_MEDIA_MESSAGE("Requested state change to %s was completed", gst_element_state_get_name(state));
+ GST_DEBUG("Requested state change to %s was completed", gst_element_state_get_name(state));
}
break;
}
case GST_STATE_CHANGE_ASYNC:
- LOG_MEDIA_MESSAGE("Async: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
+ GST_DEBUG("Async: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
// Change in progress.
break;
case GST_STATE_CHANGE_FAILURE:
- LOG_MEDIA_MESSAGE("Failure: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
+ GST_DEBUG("Failure: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
// Change failed
return;
case GST_STATE_CHANGE_NO_PREROLL:
- LOG_MEDIA_MESSAGE("No preroll: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
+ GST_DEBUG("No preroll: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
// Live pipelines go in PAUSED without prerolling.
m_isStreaming = true;
@@ -1488,7 +1594,7 @@ void MediaPlayerPrivateGStreamer::updateStates()
m_networkState = MediaPlayer::Loading;
break;
default:
- LOG_MEDIA_MESSAGE("Else : %d", getStateResult);
+ GST_DEBUG("Else : %d", getStateResult);
break;
}
@@ -1498,22 +1604,22 @@ void MediaPlayerPrivateGStreamer::updateStates()
m_player->playbackStateChanged();
if (m_networkState != oldNetworkState) {
- LOG_MEDIA_MESSAGE("Network State Changed from %u to %u", oldNetworkState, m_networkState);
+ GST_DEBUG("Network State Changed from %u to %u", oldNetworkState, m_networkState);
m_player->networkStateChanged();
}
if (m_readyState != oldReadyState) {
- LOG_MEDIA_MESSAGE("Ready State Changed from %u to %u", oldReadyState, m_readyState);
+ GST_DEBUG("Ready State Changed from %u to %u", oldReadyState, m_readyState);
m_player->readyStateChanged();
}
if (getStateResult == GST_STATE_CHANGE_SUCCESS && state >= GST_STATE_PAUSED) {
updatePlaybackRate();
if (m_seekIsPending) {
- LOG_MEDIA_MESSAGE("[Seek] committing pending seek to %f", m_seekTime);
+ GST_DEBUG("[Seek] committing pending seek to %f", m_seekTime);
m_seekIsPending = false;
m_seeking = doSeek(toGstClockTime(m_seekTime), m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE));
if (!m_seeking)
- LOG_MEDIA_MESSAGE("[Seek] seeking to %f failed", m_seekTime);
+ GST_DEBUG("[Seek] seeking to %f failed", m_seekTime);
}
}
}
@@ -1544,7 +1650,7 @@ bool MediaPlayerPrivateGStreamer::loadNextLocation()
return false;
const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
- const gchar* newLocation = 0;
+ const gchar* newLocation = nullptr;
if (!locations) {
// Fallback on new-location string.
@@ -1555,7 +1661,7 @@ bool MediaPlayerPrivateGStreamer::loadNextLocation()
if (!newLocation) {
if (m_mediaLocationCurrentIndex < 0) {
- m_mediaLocations = 0;
+ m_mediaLocations = nullptr;
return false;
}
@@ -1580,7 +1686,7 @@ bool MediaPlayerPrivateGStreamer::loadNextLocation()
RefPtr<SecurityOrigin> securityOrigin = SecurityOrigin::create(m_url);
if (securityOrigin->canRequest(newUrl)) {
- INFO_MEDIA_MESSAGE("New media url: %s", newUrl.string().utf8().data());
+ GST_INFO("New media url: %s", newUrl.string().utf8().data());
// Reset player states.
m_networkState = MediaPlayer::Loading;
@@ -1593,16 +1699,16 @@ bool MediaPlayerPrivateGStreamer::loadNextLocation()
changePipelineState(GST_STATE_READY);
GstState state;
- gst_element_get_state(m_playBin.get(), &state, 0, 0);
+ gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
if (state <= GST_STATE_READY) {
// Set the new uri and start playing.
- g_object_set(m_playBin.get(), "uri", newUrl.string().utf8().data(), NULL);
+ g_object_set(m_pipeline.get(), "uri", newUrl.string().utf8().data(), nullptr);
m_url = newUrl;
changePipelineState(GST_STATE_PLAYING);
return true;
}
} else
- INFO_MEDIA_MESSAGE("Not allowed to load new media location: %s", newUrl.string().utf8().data());
+ GST_INFO("Not allowed to load new media location: %s", newUrl.string().utf8().data());
}
m_mediaLocationCurrentIndex--;
return false;
@@ -1624,49 +1730,29 @@ void MediaPlayerPrivateGStreamer::didEnd()
// Synchronize position and duration values to not confuse the
// HTMLMediaElement. In some cases like reverse playback the
// position is not always reported as 0 for instance.
- float now = currentTime();
- if (now > 0 && now <= duration() && m_mediaDuration != now) {
- m_mediaDurationKnown = true;
- m_mediaDuration = now;
+ MediaTime now = currentMediaTime();
+ if (now > MediaTime { } && now <= durationMediaTime())
m_player->durationChanged();
- }
m_isEndReached = true;
timeChanged();
- if (!m_player->mediaPlayerClient()->mediaPlayerIsLooping()) {
+ if (!m_player->client().mediaPlayerIsLooping()) {
m_paused = true;
+ m_durationAtEOS = durationMediaTime().toDouble();
changePipelineState(GST_STATE_READY);
m_downloadFinished = false;
}
}
-void MediaPlayerPrivateGStreamer::cacheDuration()
-{
- if (m_mediaDuration || !m_mediaDurationKnown)
- return;
-
- float newDuration = duration();
- if (std::isinf(newDuration)) {
- // Only pretend that duration is not available if the the query failed in a stable pipeline state.
- GstState state;
- if (gst_element_get_state(m_playBin.get(), &state, 0, 0) == GST_STATE_CHANGE_SUCCESS && state > GST_STATE_READY)
- m_mediaDurationKnown = false;
- return;
- }
-
- m_mediaDuration = newDuration;
-}
-
void MediaPlayerPrivateGStreamer::durationChanged()
{
- float previousDuration = m_mediaDuration;
+ float previousDuration = durationMediaTime().toDouble();
- cacheDuration();
// Avoid emiting durationchanged in the case where the previous
// duration was 0 because that case is already handled by the
// HTMLMediaElement.
- if (previousDuration && m_mediaDuration != previousDuration)
+ if (previousDuration && durationMediaTime().toDouble() != previousDuration)
m_player->durationChanged();
}
@@ -1683,154 +1769,187 @@ void MediaPlayerPrivateGStreamer::loadingFailed(MediaPlayer::NetworkState error)
}
// Loading failed, remove ready timer.
- if (m_readyTimerHandler) {
- g_source_remove(m_readyTimerHandler);
- m_readyTimerHandler = 0;
- }
-}
-
-static HashSet<String> mimeTypeCache()
-{
- initializeGStreamerAndRegisterWebKitElements();
-
- DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
- static bool typeListInitialized = false;
-
- if (typeListInitialized)
- return cache;
-
- const char* mimeTypes[] = {
- "application/ogg",
- "application/vnd.apple.mpegurl",
- "application/vnd.rn-realmedia",
- "application/x-3gp",
- "application/x-pn-realaudio",
- "audio/3gpp",
- "audio/aac",
- "audio/flac",
- "audio/iLBC-sh",
- "audio/midi",
- "audio/mobile-xmf",
- "audio/mp1",
- "audio/mp2",
- "audio/mp3",
- "audio/mp4",
- "audio/mpeg",
- "audio/ogg",
- "audio/opus",
- "audio/qcelp",
- "audio/riff-midi",
- "audio/speex",
- "audio/wav",
- "audio/webm",
- "audio/x-ac3",
- "audio/x-aiff",
- "audio/x-amr-nb-sh",
- "audio/x-amr-wb-sh",
- "audio/x-au",
- "audio/x-ay",
- "audio/x-celt",
- "audio/x-dts",
- "audio/x-flac",
- "audio/x-gbs",
- "audio/x-gsm",
- "audio/x-gym",
- "audio/x-imelody",
- "audio/x-ircam",
- "audio/x-kss",
- "audio/x-m4a",
- "audio/x-mod",
- "audio/x-mp3",
- "audio/x-mpeg",
- "audio/x-musepack",
- "audio/x-nist",
- "audio/x-nsf",
- "audio/x-paris",
- "audio/x-sap",
- "audio/x-sbc",
- "audio/x-sds",
- "audio/x-shorten",
- "audio/x-sid",
- "audio/x-spc",
- "audio/x-speex",
- "audio/x-svx",
- "audio/x-ttafile",
- "audio/x-vgm",
- "audio/x-voc",
- "audio/x-vorbis+ogg",
- "audio/x-w64",
- "audio/x-wav",
- "audio/x-wavpack",
- "audio/x-wavpack-correction",
- "video/3gpp",
- "video/mj2",
- "video/mp4",
- "video/mpeg",
- "video/mpegts",
- "video/ogg",
- "video/quicktime",
- "video/vivo",
- "video/webm",
- "video/x-cdxa",
- "video/x-dirac",
- "video/x-dv",
- "video/x-fli",
- "video/x-flv",
- "video/x-h263",
- "video/x-ivf",
- "video/x-m4v",
- "video/x-matroska",
- "video/x-mng",
- "video/x-ms-asf",
- "video/x-msvideo",
- "video/x-mve",
- "video/x-nuv",
- "video/x-vcd"
- };
-
- for (unsigned i = 0; i < (sizeof(mimeTypes) / sizeof(*mimeTypes)); ++i)
- cache.add(String(mimeTypes[i]));
-
- typeListInitialized = true;
- return cache;
-}
-
-void MediaPlayerPrivateGStreamer::getSupportedTypes(HashSet<String>& types)
-{
- types = mimeTypeCache();
+ m_readyTimerHandler.stop();
+}
+
+static HashSet<String, ASCIICaseInsensitiveHash>& mimeTypeSet()
+{
+ static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> mimeTypes = []()
+ {
+ initializeGStreamerAndRegisterWebKitElements();
+ HashSet<String, ASCIICaseInsensitiveHash> set;
+
+ GList* audioDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO, GST_RANK_MARGINAL);
+ GList* videoDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, GST_RANK_MARGINAL);
+ GList* demuxerFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DEMUXER, GST_RANK_MARGINAL);
+
+ enum ElementType {
+ AudioDecoder = 0,
+ VideoDecoder,
+ Demuxer
+ };
+ struct GstCapsWebKitMapping {
+ ElementType elementType;
+ const char* capsString;
+ Vector<AtomicString> webkitMimeTypes;
+ };
+
+ Vector<GstCapsWebKitMapping> mapping = {
+ {AudioDecoder, "audio/midi", {"audio/midi", "audio/riff-midi"}},
+ {AudioDecoder, "audio/x-sbc", { }},
+ {AudioDecoder, "audio/x-sid", { }},
+ {AudioDecoder, "audio/x-flac", {"audio/x-flac", "audio/flac"}},
+ {AudioDecoder, "audio/x-wav", {"audio/x-wav", "audio/wav"}},
+ {AudioDecoder, "audio/x-wavpack", {"audio/x-wavpack"}},
+ {AudioDecoder, "audio/x-speex", {"audio/speex", "audio/x-speex"}},
+ {AudioDecoder, "audio/x-ac3", { }},
+ {AudioDecoder, "audio/x-eac3", {"audio/x-ac3"}},
+ {AudioDecoder, "audio/x-dts", { }},
+ {VideoDecoder, "video/x-h264, profile=(string)high", {"video/mp4", "video/x-m4v"}},
+ {VideoDecoder, "video/x-msvideocodec", {"video/x-msvideo"}},
+ {VideoDecoder, "video/x-h263", { }},
+ {VideoDecoder, "video/mpegts", { }},
+ {VideoDecoder, "video/mpeg, mpegversion=(int){1,2}, systemstream=(boolean)false", {"video/mpeg"}},
+ {VideoDecoder, "video/x-dirac", { }},
+ {VideoDecoder, "video/x-flash-video", {"video/flv", "video/x-flv"}},
+ {Demuxer, "video/quicktime", { }},
+ {Demuxer, "video/quicktime, variant=(string)3gpp", {"video/3gpp"}},
+ {Demuxer, "application/x-3gp", { }},
+ {Demuxer, "video/x-ms-asf", { }},
+ {Demuxer, "audio/x-aiff", { }},
+ {Demuxer, "application/x-pn-realaudio", { }},
+ {Demuxer, "application/vnd.rn-realmedia", { }},
+ {Demuxer, "audio/x-wav", {"audio/x-wav", "audio/wav"}},
+ {Demuxer, "application/x-hls", {"application/vnd.apple.mpegurl", "application/x-mpegurl"}}
+ };
+
+ for (auto& current : mapping) {
+ GList* factories = demuxerFactories;
+ if (current.elementType == AudioDecoder)
+ factories = audioDecoderFactories;
+ else if (current.elementType == VideoDecoder)
+ factories = videoDecoderFactories;
+
+ if (gstRegistryHasElementForMediaType(factories, current.capsString)) {
+ if (!current.webkitMimeTypes.isEmpty()) {
+ for (const auto& mimeType : current.webkitMimeTypes)
+ set.add(mimeType);
+ } else
+ set.add(AtomicString(current.capsString));
+ }
+ }
+
+ bool opusSupported = false;
+ if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/x-opus")) {
+ opusSupported = true;
+ set.add(AtomicString("audio/opus"));
+ }
+
+ bool vorbisSupported = false;
+ if (gstRegistryHasElementForMediaType(demuxerFactories, "application/ogg")) {
+ set.add(AtomicString("application/ogg"));
+
+ vorbisSupported = gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/x-vorbis");
+ if (vorbisSupported) {
+ set.add(AtomicString("audio/ogg"));
+ set.add(AtomicString("audio/x-vorbis+ogg"));
+ }
+
+ if (gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-theora"))
+ set.add(AtomicString("video/ogg"));
+ }
+
+ bool audioMpegSupported = false;
+ if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int)1, layer=(int)[1, 3]")) {
+ audioMpegSupported = true;
+ set.add(AtomicString("audio/mp1"));
+ set.add(AtomicString("audio/mp3"));
+ set.add(AtomicString("audio/x-mp3"));
+ }
+
+ if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int){2, 4}")) {
+ audioMpegSupported = true;
+ set.add(AtomicString("audio/aac"));
+ set.add(AtomicString("audio/mp2"));
+ set.add(AtomicString("audio/mp4"));
+ set.add(AtomicString("audio/x-m4a"));
+ }
+
+ if (audioMpegSupported) {
+ set.add(AtomicString("audio/mpeg"));
+ set.add(AtomicString("audio/x-mpeg"));
+ }
+
+ if (gstRegistryHasElementForMediaType(demuxerFactories, "video/x-matroska")) {
+ set.add(AtomicString("video/x-matroska"));
+
+ if (gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp8")
+ || gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp9")
+ || gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp10"))
+ set.add(AtomicString("video/webm"));
+
+ if (vorbisSupported || opusSupported)
+ set.add(AtomicString("audio/webm"));
+ }
+
+ gst_plugin_feature_list_free(audioDecoderFactories);
+ gst_plugin_feature_list_free(videoDecoderFactories);
+ gst_plugin_feature_list_free(demuxerFactories);
+ return set;
+ }();
+ return mimeTypes;
+}
+
+void MediaPlayerPrivateGStreamer::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
+{
+ types = mimeTypeSet();
}
MediaPlayer::SupportsType MediaPlayerPrivateGStreamer::supportsType(const MediaEngineSupportParameters& parameters)
{
+ MediaPlayer::SupportsType result = MediaPlayer::IsNotSupported;
+#if ENABLE(MEDIA_SOURCE)
+ // MediaPlayerPrivateGStreamerMSE is in charge of mediasource playback, not us.
+ if (parameters.isMediaSource)
+ return result;
+#endif
+
+ // MediaStream playback is handled by the OpenWebRTC player.
+ if (parameters.isMediaStream)
+ return result;
+
if (parameters.type.isNull() || parameters.type.isEmpty())
- return MediaPlayer::IsNotSupported;
+ return result;
// spec says we should not return "probably" if the codecs string is empty
- if (mimeTypeCache().contains(parameters.type))
- return parameters.codecs.isEmpty() ? MediaPlayer::MayBeSupported : MediaPlayer::IsSupported;
- return MediaPlayer::IsNotSupported;
+ if (mimeTypeSet().contains(parameters.type))
+ result = parameters.codecs.isEmpty() ? MediaPlayer::MayBeSupported : MediaPlayer::IsSupported;
+
+ return extendedSupportsType(parameters, result);
}
void MediaPlayerPrivateGStreamer::setDownloadBuffering()
{
- if (!m_playBin)
+ if (!m_pipeline)
return;
- GstPlayFlags flags;
- g_object_get(m_playBin.get(), "flags", &flags, NULL);
+ unsigned flags;
+ g_object_get(m_pipeline.get(), "flags", &flags, nullptr);
+
+ unsigned flagDownload = getGstPlayFlag("download");
// We don't want to stop downloading if we already started it.
- if (flags & GST_PLAY_FLAG_DOWNLOAD && m_readyState > MediaPlayer::HaveNothing && !m_resetPipeline)
+ if (flags & flagDownload && m_readyState > MediaPlayer::HaveNothing && !m_resetPipeline)
return;
bool shouldDownload = !isLiveStream() && m_preload == MediaPlayer::Auto;
if (shouldDownload) {
- LOG_MEDIA_MESSAGE("Enabling on-disk buffering");
- g_object_set(m_playBin.get(), "flags", flags | GST_PLAY_FLAG_DOWNLOAD, NULL);
+ GST_DEBUG("Enabling on-disk buffering");
+ g_object_set(m_pipeline.get(), "flags", flags | flagDownload, nullptr);
m_fillTimer.startRepeating(0.2);
} else {
- LOG_MEDIA_MESSAGE("Disabling on-disk buffering");
- g_object_set(m_playBin.get(), "flags", flags & ~GST_PLAY_FLAG_DOWNLOAD, NULL);
+ GST_DEBUG("Disabling on-disk buffering");
+ g_object_set(m_pipeline.get(), "flags", flags & ~flagDownload, nullptr);
m_fillTimer.stop();
}
}
@@ -1851,93 +1970,172 @@ void MediaPlayerPrivateGStreamer::setPreload(MediaPlayer::Preload preload)
GstElement* MediaPlayerPrivateGStreamer::createAudioSink()
{
- m_autoAudioSink = gst_element_factory_make("autoaudiosink", 0);
- g_signal_connect(m_autoAudioSink.get(), "child-added", G_CALLBACK(setAudioStreamPropertiesCallback), this);
+ m_autoAudioSink = gst_element_factory_make("autoaudiosink", nullptr);
+ if (!m_autoAudioSink) {
+ GST_WARNING("GStreamer's autoaudiosink not found. Please check your gst-plugins-good installation");
+ return nullptr;
+ }
- // Construct audio sink only if pitch preserving is enabled.
- if (!m_preservesPitch)
- return m_autoAudioSink.get();
+ g_signal_connect_swapped(m_autoAudioSink.get(), "child-added", G_CALLBACK(setAudioStreamPropertiesCallback), this);
+
+ GstElement* audioSinkBin;
- GstElement* scale = gst_element_factory_make("scaletempo", 0);
- if (!scale) {
- GST_WARNING("Failed to create scaletempo");
+ if (webkitGstCheckVersion(1, 4, 2)) {
+#if ENABLE(WEB_AUDIO)
+ audioSinkBin = gst_bin_new("audio-sink");
+ ensureAudioSourceProvider();
+ m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
+ return audioSinkBin;
+#else
return m_autoAudioSink.get();
+#endif
}
- GstElement* audioSinkBin = gst_bin_new("audio-sink");
- GstElement* convert = gst_element_factory_make("audioconvert", 0);
- GstElement* resample = gst_element_factory_make("audioresample", 0);
+ // Construct audio sink only if pitch preserving is enabled.
+ // If GStreamer 1.4.2 is used the audio-filter playbin property is used instead.
+ if (m_preservesPitch) {
+ GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
+ if (!scale) {
+ GST_WARNING("Failed to create scaletempo");
+ return m_autoAudioSink.get();
+ }
- gst_bin_add_many(GST_BIN(audioSinkBin), scale, convert, resample, m_autoAudioSink.get(), NULL);
+ audioSinkBin = gst_bin_new("audio-sink");
+ gst_bin_add(GST_BIN(audioSinkBin), scale);
+ GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(scale, "sink"));
+ gst_element_add_pad(audioSinkBin, gst_ghost_pad_new("sink", pad.get()));
- if (!gst_element_link_many(scale, convert, resample, m_autoAudioSink.get(), NULL)) {
- GST_WARNING("Failed to link audio sink elements");
- gst_object_unref(audioSinkBin);
- return m_autoAudioSink.get();
+#if ENABLE(WEB_AUDIO)
+ ensureAudioSourceProvider();
+ m_audioSourceProvider->configureAudioBin(audioSinkBin, scale);
+#else
+ GstElement* convert = gst_element_factory_make("audioconvert", nullptr);
+ GstElement* resample = gst_element_factory_make("audioresample", nullptr);
+
+ gst_bin_add_many(GST_BIN(audioSinkBin), convert, resample, m_autoAudioSink.get(), nullptr);
+
+ if (!gst_element_link_many(scale, convert, resample, m_autoAudioSink.get(), nullptr)) {
+ GST_WARNING("Failed to link audio sink elements");
+ gst_object_unref(audioSinkBin);
+ return m_autoAudioSink.get();
+ }
+#endif
+ return audioSinkBin;
}
- GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(scale, "sink"));
- gst_element_add_pad(audioSinkBin, gst_ghost_pad_new("sink", pad.get()));
+#if ENABLE(WEB_AUDIO)
+ audioSinkBin = gst_bin_new("audio-sink");
+ ensureAudioSourceProvider();
+ m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
return audioSinkBin;
+#endif
+ ASSERT_NOT_REACHED();
+ return nullptr;
}
GstElement* MediaPlayerPrivateGStreamer::audioSink() const
{
GstElement* sink;
- g_object_get(m_playBin.get(), "audio-sink", &sink, nullptr);
+ g_object_get(m_pipeline.get(), "audio-sink", &sink, nullptr);
return sink;
}
+#if ENABLE(WEB_AUDIO)
+void MediaPlayerPrivateGStreamer::ensureAudioSourceProvider()
+{
+ if (!m_audioSourceProvider)
+ m_audioSourceProvider = std::make_unique<AudioSourceProviderGStreamer>();
+}
+
+AudioSourceProvider* MediaPlayerPrivateGStreamer::audioSourceProvider()
+{
+ ensureAudioSourceProvider();
+ return m_audioSourceProvider.get();
+}
+#endif
+
void MediaPlayerPrivateGStreamer::createGSTPlayBin()
{
- ASSERT(!m_playBin);
+ ASSERT(!m_pipeline);
// gst_element_factory_make() returns a floating reference so
// we should not adopt.
- m_playBin = gst_element_factory_make("playbin", "play");
- setStreamVolumeElement(GST_STREAM_VOLUME(m_playBin.get()));
+ setPipeline(gst_element_factory_make("playbin", "play"));
+ setStreamVolumeElement(GST_STREAM_VOLUME(m_pipeline.get()));
+
+ GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
+ gst_bus_set_sync_handler(bus.get(), [](GstBus*, GstMessage* message, gpointer userData) {
+ auto& player = *static_cast<MediaPlayerPrivateGStreamer*>(userData);
+
+ if (player.handleSyncMessage(message)) {
+ gst_message_unref(message);
+ return GST_BUS_DROP;
+ }
- GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_playBin.get())));
+ return GST_BUS_PASS;
+ }, this, nullptr);
+
+ // Let also other listeners subscribe to (application) messages in this bus.
gst_bus_add_signal_watch(bus.get());
- g_signal_connect(bus.get(), "message", G_CALLBACK(mediaPlayerPrivateMessageCallback), this);
+ g_signal_connect(bus.get(), "message", G_CALLBACK(busMessageCallback), this);
- g_object_set(m_playBin.get(), "mute", m_player->muted(), NULL);
+ g_object_set(m_pipeline.get(), "mute", m_player->muted(), nullptr);
- g_signal_connect(m_playBin.get(), "notify::source", G_CALLBACK(mediaPlayerPrivateSourceChangedCallback), this);
- g_signal_connect(m_playBin.get(), "video-changed", G_CALLBACK(mediaPlayerPrivateVideoChangedCallback), this);
- g_signal_connect(m_playBin.get(), "audio-changed", G_CALLBACK(mediaPlayerPrivateAudioChangedCallback), this);
+ g_signal_connect_swapped(m_pipeline.get(), "notify::source", G_CALLBACK(sourceChangedCallback), this);
+ g_signal_connect_swapped(m_pipeline.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
+ g_signal_connect_swapped(m_pipeline.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
#if ENABLE(VIDEO_TRACK)
- if (webkitGstCheckVersion(1, 1, 2)) {
- g_signal_connect(m_playBin.get(), "text-changed", G_CALLBACK(mediaPlayerPrivateTextChangedCallback), this);
+ g_signal_connect_swapped(m_pipeline.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
- GstElement* textCombiner = webkitTextCombinerNew();
- ASSERT(textCombiner);
- g_object_set(m_playBin.get(), "text-stream-combiner", textCombiner, NULL);
+ GstElement* textCombiner = webkitTextCombinerNew();
+ ASSERT(textCombiner);
+ g_object_set(m_pipeline.get(), "text-stream-combiner", textCombiner, nullptr);
- m_textAppSink = webkitTextSinkNew();
- ASSERT(m_textAppSink);
+ m_textAppSink = webkitTextSinkNew();
+ ASSERT(m_textAppSink);
- m_textAppSinkPad = adoptGRef(gst_element_get_static_pad(m_textAppSink.get(), "sink"));
- ASSERT(m_textAppSinkPad);
+ m_textAppSinkPad = adoptGRef(gst_element_get_static_pad(m_textAppSink.get(), "sink"));
+ ASSERT(m_textAppSinkPad);
- g_object_set(m_textAppSink.get(), "emit-signals", true, "enable-last-sample", false, "caps", gst_caps_new_empty_simple("text/vtt"), NULL);
- g_signal_connect(m_textAppSink.get(), "new-sample", G_CALLBACK(mediaPlayerPrivateNewTextSampleCallback), this);
+ g_object_set(m_textAppSink.get(), "emit-signals", true, "enable-last-sample", false, "caps", gst_caps_new_empty_simple("text/vtt"), nullptr);
+ g_signal_connect_swapped(m_textAppSink.get(), "new-sample", G_CALLBACK(newTextSampleCallback), this);
- g_object_set(m_playBin.get(), "text-sink", m_textAppSink.get(), NULL);
- }
+ g_object_set(m_pipeline.get(), "text-sink", m_textAppSink.get(), nullptr);
#endif
- g_object_set(m_playBin.get(), "video-sink", createVideoSink(), "audio-sink", createAudioSink(), nullptr);
+ g_object_set(m_pipeline.get(), "video-sink", createVideoSink(), "audio-sink", createAudioSink(), nullptr);
+
+ configurePlaySink();
+
+ // On 1.4.2 and newer we use the audio-filter property instead.
+ // See https://bugzilla.gnome.org/show_bug.cgi?id=735748 for
+ // the reason for using >= 1.4.2 instead of >= 1.4.0.
+ if (m_preservesPitch && webkitGstCheckVersion(1, 4, 2)) {
+ GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
- GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_webkitVideoSink.get(), "sink"));
+ if (!scale)
+ GST_WARNING("Failed to create scaletempo");
+ else
+ g_object_set(m_pipeline.get(), "audio-filter", scale, nullptr);
+ }
+
+ if (!m_renderingCanBeAccelerated) {
+ // If not using accelerated compositing, let GStreamer handle
+ // the image-orientation tag.
+ GstElement* videoFlip = gst_element_factory_make("videoflip", nullptr);
+ g_object_set(videoFlip, "method", 8, nullptr);
+ g_object_set(m_pipeline.get(), "video-filter", videoFlip, nullptr);
+ }
+
+ GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
if (videoSinkPad)
- g_signal_connect(videoSinkPad.get(), "notify::caps", G_CALLBACK(mediaPlayerPrivateVideoSinkCapsChangedCallback), this);
+ g_signal_connect_swapped(videoSinkPad.get(), "notify::caps", G_CALLBACK(videoSinkCapsChangedCallback), this);
}
void MediaPlayerPrivateGStreamer::simulateAudioInterruption()
{
- GstMessage* message = gst_message_new_request_state(GST_OBJECT(m_playBin.get()), GST_STATE_PAUSED);
- gst_element_post_message(m_playBin.get(), message);
+ GstMessage* message = gst_message_new_request_state(GST_OBJECT(m_pipeline.get()), GST_STATE_PAUSED);
+ gst_element_post_message(m_pipeline.get(), message);
}
bool MediaPlayerPrivateGStreamer::didPassCORSAccessCheck() const
@@ -1947,6 +2145,25 @@ bool MediaPlayerPrivateGStreamer::didPassCORSAccessCheck() const
return false;
}
+bool MediaPlayerPrivateGStreamer::canSaveMediaData() const
+{
+ if (isLiveStream())
+ return false;
+
+ if (m_url.isLocalFile())
+ return true;
+
+ if (m_url.protocolIsInHTTPFamily())
+ return true;
+
+ return false;
+}
+
+bool MediaPlayerPrivateGStreamer::handleSyncMessage(GstMessage* message)
+{
+ return MediaPlayerPrivateGStreamerBase::handleSyncMessage(message);
+}
+
}
#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h
index 1990bb20b..953239b58 100644
--- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h
+++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h
@@ -2,7 +2,9 @@
* Copyright (C) 2007, 2009 Apple Inc. All rights reserved.
* Copyright (C) 2007 Collabora Ltd. All rights reserved.
* Copyright (C) 2007 Alp Toker <alp@atoker.com>
- * Copyright (C) 2009, 2010 Igalia S.L
+ * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2015, 2016 Igalia S.L
+ * Copyright (C) 2014 Cable Television Laboratories, Inc.
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@@ -32,185 +34,233 @@
#include <gst/gst.h>
#include <gst/pbutils/install-plugins.h>
#include <wtf/Forward.h>
+#include <wtf/RunLoop.h>
+#include <wtf/WeakPtr.h>
-#if ENABLE(MEDIA_SOURCE)
-#include "MediaSourceGStreamer.h"
+#if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
+#include <wtf/text/AtomicStringHash.h>
#endif
typedef struct _GstBuffer GstBuffer;
typedef struct _GstMessage GstMessage;
typedef struct _GstElement GstElement;
+typedef struct _GstMpegtsSection GstMpegtsSection;
namespace WebCore {
+#if ENABLE(WEB_AUDIO)
+class AudioSourceProvider;
+class AudioSourceProviderGStreamer;
+#endif
+
class AudioTrackPrivateGStreamer;
+class InbandMetadataTextTrackPrivateGStreamer;
class InbandTextTrackPrivateGStreamer;
+class MediaPlayerRequestInstallMissingPluginsCallback;
class VideoTrackPrivateGStreamer;
+#if ENABLE(MEDIA_SOURCE)
+class MediaSourcePrivateClient;
+#endif
+
class MediaPlayerPrivateGStreamer : public MediaPlayerPrivateGStreamerBase {
public:
- ~MediaPlayerPrivateGStreamer();
+ explicit MediaPlayerPrivateGStreamer(MediaPlayer*);
+ virtual ~MediaPlayerPrivateGStreamer();
+
static void registerMediaEngine(MediaEngineRegistrar);
- gboolean handleMessage(GstMessage*);
+ void handleMessage(GstMessage*);
void handlePluginInstallerResult(GstInstallPluginsReturn);
- bool hasVideo() const { return m_hasVideo; }
- bool hasAudio() const { return m_hasAudio; }
+ bool hasVideo() const override { return m_hasVideo; }
+ bool hasAudio() const override { return m_hasAudio; }
- void load(const String &url);
+ void load(const String &url) override;
#if ENABLE(MEDIA_SOURCE)
- void load(const String& url, PassRefPtr<HTMLMediaSource>);
+ void load(const String& url, MediaSourcePrivateClient*) override;
+#endif
+#if ENABLE(MEDIA_STREAM)
+ void load(MediaStreamPrivate&) override;
#endif
void commitLoad();
- void cancelLoad();
+ void cancelLoad() override;
- void prepareToPlay();
- void play();
- void pause();
+ void prepareToPlay() override;
+ void play() override;
+ void pause() override;
- bool paused() const;
- bool seeking() const;
+ bool paused() const override;
+ bool seeking() const override;
- float duration() const;
- float currentTime() const;
- void seek(float);
+ MediaTime durationMediaTime() const override;
+ MediaTime currentMediaTime() const override;
+ void seek(float) override;
- void setRate(float);
- void setPreservesPitch(bool);
+ void setRate(float) override;
+ double rate() const override;
+ void setPreservesPitch(bool) override;
- void setPreload(MediaPlayer::Preload);
- void fillTimerFired(Timer<MediaPlayerPrivateGStreamer>*);
+ void setPreload(MediaPlayer::Preload) override;
+ void fillTimerFired();
- PassRefPtr<TimeRanges> buffered() const;
- float maxTimeSeekable() const;
- bool didLoadingProgress() const;
- unsigned totalBytes() const;
- float maxTimeLoaded() const;
+ std::unique_ptr<PlatformTimeRanges> buffered() const override;
+ float maxTimeSeekable() const override;
+ bool didLoadingProgress() const override;
+ unsigned long long totalBytes() const override;
+ float maxTimeLoaded() const override;
+
+ bool hasSingleSecurityOrigin() const override;
void loadStateChanged();
void timeChanged();
void didEnd();
- void durationChanged();
+ virtual void durationChanged();
void loadingFailed(MediaPlayer::NetworkState);
- void videoChanged();
- void videoCapsChanged();
- void audioChanged();
- void notifyPlayerOfVideo();
- void notifyPlayerOfVideoCaps();
- void notifyPlayerOfAudio();
+ virtual void sourceChanged();
-#if ENABLE(VIDEO_TRACK)
- void textChanged();
- void notifyPlayerOfText();
+ GstElement* audioSink() const override;
+ virtual void configurePlaySink() { }
- void newTextSample();
- void notifyPlayerOfNewTextSample();
-#endif
+ void simulateAudioInterruption() override;
- void sourceChanged();
- GstElement* audioSink() const;
+ virtual bool changePipelineState(GstState);
- void setAudioStreamProperties(GObject*);
+#if ENABLE(WEB_AUDIO)
+ AudioSourceProvider* audioSourceProvider() override;
+#endif
- void simulateAudioInterruption();
+ bool isLiveStream() const override { return m_isStreaming; }
- bool changePipelineState(GstState);
+ bool handleSyncMessage(GstMessage*) override;
private:
- MediaPlayerPrivateGStreamer(MediaPlayer*);
-
- static PassOwnPtr<MediaPlayerPrivateInterface> create(MediaPlayer*);
-
- static void getSupportedTypes(HashSet<String>&);
+ static void getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>&);
static MediaPlayer::SupportsType supportsType(const MediaEngineSupportParameters&);
static bool isAvailable();
- GstElement* createAudioSink();
+ WeakPtr<MediaPlayerPrivateGStreamer> createWeakPtr() { return m_weakPtrFactory.createWeakPtr(); }
- float playbackPosition() const;
+ GstElement* createAudioSink() override;
- void cacheDuration();
- void updateStates();
- void asyncStateChangeDone();
+ double playbackPosition() const;
+
+ virtual void updateStates();
+ virtual void asyncStateChangeDone();
void createGSTPlayBin();
bool loadNextLocation();
void mediaLocationChanged(GstMessage*);
- void setDownloadBuffering();
+ virtual void setDownloadBuffering();
void processBufferingStats(GstMessage*);
+#if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
+ void processMpegTsSection(GstMpegtsSection*);
+#endif
#if ENABLE(VIDEO_TRACK)
void processTableOfContents(GstMessage*);
- void processTableOfContentsEntry(GstTocEntry*, GstTocEntry* parent);
+ void processTableOfContentsEntry(GstTocEntry*);
#endif
- bool doSeek(gint64 position, float rate, GstSeekFlags seekType);
- void updatePlaybackRate();
+ virtual bool doSeek(gint64 position, float rate, GstSeekFlags seekType);
+ virtual void updatePlaybackRate();
+ String engineDescription() const override { return "GStreamer"; }
+ bool didPassCORSAccessCheck() const override;
+ bool canSaveMediaData() const override;
- virtual String engineDescription() const { return "GStreamer"; }
- virtual bool isLiveStream() const { return m_isStreaming; }
- virtual bool didPassCORSAccessCheck() const;
+ void purgeOldDownloadFiles(const char*);
+ static void uriDecodeBinElementAddedCallback(GstBin*, GstElement*, MediaPlayerPrivateGStreamer*);
+ static void downloadBufferFileCreatedCallback(MediaPlayerPrivateGStreamer*);
-private:
- GRefPtr<GstElement> m_playBin;
+protected:
+ void cacheDuration();
+
+ bool m_buffering;
+ int m_bufferingPercentage;
+ mutable float m_cachedPosition;
+ bool m_canFallBackToLastFinishedSeekPosition;
+ bool m_changingRate;
+ bool m_downloadFinished;
+ bool m_errorOccured;
+ mutable bool m_isEndReached;
+ mutable bool m_isStreaming;
+ mutable gdouble m_durationAtEOS;
+ bool m_paused;
+ float m_playbackRate;
+ GstState m_requestedState;
+ bool m_resetPipeline;
+ bool m_seeking;
+ bool m_seekIsPending;
+ float m_seekTime;
GRefPtr<GstElement> m_source;
+ bool m_volumeAndMuteInitialized;
+
+ void readyTimerFired();
+
+ void notifyPlayerOfVideo();
+ void notifyPlayerOfVideoCaps();
+ void notifyPlayerOfAudio();
+
+#if ENABLE(VIDEO_TRACK)
+ void notifyPlayerOfText();
+ void newTextSample();
+#endif
+
+ void ensureAudioSourceProvider();
+ void setAudioStreamProperties(GObject*);
+
+ static void setAudioStreamPropertiesCallback(MediaPlayerPrivateGStreamer*, GObject*);
+
+ static void sourceChangedCallback(MediaPlayerPrivateGStreamer*);
+ static void videoChangedCallback(MediaPlayerPrivateGStreamer*);
+ static void videoSinkCapsChangedCallback(MediaPlayerPrivateGStreamer*);
+ static void audioChangedCallback(MediaPlayerPrivateGStreamer*);
+#if ENABLE(VIDEO_TRACK)
+ static void textChangedCallback(MediaPlayerPrivateGStreamer*);
+ static GstFlowReturn newTextSampleCallback(MediaPlayerPrivateGStreamer*);
+#endif
+
+private:
+ WeakPtrFactory<MediaPlayerPrivateGStreamer> m_weakPtrFactory;
+
#if ENABLE(VIDEO_TRACK)
GRefPtr<GstElement> m_textAppSink;
GRefPtr<GstPad> m_textAppSinkPad;
#endif
- float m_seekTime;
- bool m_changingRate;
- float m_endTime;
- bool m_isEndReached;
- mutable bool m_isStreaming;
GstStructure* m_mediaLocations;
int m_mediaLocationCurrentIndex;
- bool m_resetPipeline;
- bool m_paused;
bool m_playbackRatePause;
- bool m_seeking;
- bool m_seekIsPending;
float m_timeOfOverlappingSeek;
- bool m_canFallBackToLastFinishedSeekPositon;
- bool m_buffering;
- float m_playbackRate;
float m_lastPlaybackRate;
- bool m_errorOccured;
- mutable gfloat m_mediaDuration;
- bool m_downloadFinished;
- Timer<MediaPlayerPrivateGStreamer> m_fillTimer;
+ Timer m_fillTimer;
float m_maxTimeLoaded;
- int m_bufferingPercentage;
MediaPlayer::Preload m_preload;
bool m_delayingLoad;
- bool m_mediaDurationKnown;
mutable float m_maxTimeLoadedAtLastDidLoadingProgress;
- bool m_volumeAndMuteInitialized;
bool m_hasVideo;
bool m_hasAudio;
- guint m_audioTimerHandler;
- guint m_textTimerHandler;
- guint m_videoTimerHandler;
- guint m_videoCapsTimerHandler;
- guint m_readyTimerHandler;
- mutable long m_totalBytes;
+ RunLoop::Timer<MediaPlayerPrivateGStreamer> m_readyTimerHandler;
+ mutable unsigned long long m_totalBytes;
URL m_url;
bool m_preservesPitch;
- GstState m_requestedState;
+#if ENABLE(WEB_AUDIO)
+ std::unique_ptr<AudioSourceProviderGStreamer> m_audioSourceProvider;
+#endif
GRefPtr<GstElement> m_autoAudioSink;
- bool m_missingPlugins;
+ GRefPtr<GstElement> m_downloadBuffer;
+ RefPtr<MediaPlayerRequestInstallMissingPluginsCallback> m_missingPluginsCallback;
#if ENABLE(VIDEO_TRACK)
Vector<RefPtr<AudioTrackPrivateGStreamer>> m_audioTracks;
Vector<RefPtr<InbandTextTrackPrivateGStreamer>> m_textTracks;
Vector<RefPtr<VideoTrackPrivateGStreamer>> m_videoTracks;
- RefPtr<InbandTextTrackPrivate> m_chaptersTrack;
+ RefPtr<InbandMetadataTextTrackPrivateGStreamer> m_chaptersTrack;
#endif
-#if ENABLE(MEDIA_SOURCE)
- RefPtr<HTMLMediaSource> m_mediaSource;
+#if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
+ HashMap<AtomicString, RefPtr<InbandMetadataTextTrackPrivateGStreamer>> m_metadataTracks;
#endif
+ virtual bool isMediaSource() const { return false; }
};
}
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp
index c6564730a..9332aab49 100644
--- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp
@@ -3,7 +3,8 @@
* Copyright (C) 2007 Collabora Ltd. All rights reserved.
* Copyright (C) 2007 Alp Toker <alp@atoker.com>
* Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
- * Copyright (C) 2009, 2010 Igalia S.L
+ * Copyright (C) 2009, 2010, 2015, 2016 Igalia S.L
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@@ -26,7 +27,6 @@
#if ENABLE(VIDEO) && USE(GSTREAMER)
-#include "ColorSpace.h"
#include "GStreamerUtilities.h"
#include "GraphicsContext.h"
#include "GraphicsTypes.h"
@@ -37,16 +37,71 @@
#include "NotImplemented.h"
#include "VideoSinkGStreamer.h"
#include "WebKitWebSourceGStreamer.h"
-#include <gst/gst.h>
-#include <wtf/gobject/GMutexLocker.h>
+#include <wtf/glib/GMutexLocker.h>
+#include <wtf/glib/GUniquePtr.h>
+#include <wtf/text/AtomicString.h>
#include <wtf/text/CString.h>
+#include <wtf/MathExtras.h>
#include <gst/audio/streamvolume.h>
#include <gst/video/gstvideometa.h>
-#if GST_CHECK_VERSION(1, 1, 0) && USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL)
+#if USE(GSTREAMER_GL)
+#include <gst/app/gstappsink.h>
+#define GST_USE_UNSTABLE_API
+#include <gst/gl/gl.h>
+#undef GST_USE_UNSTABLE_API
+
+#include "GLContext.h"
+#if USE(GLX)
+#include "GLContextGLX.h"
+#include <gst/gl/x11/gstgldisplay_x11.h>
+#endif
+
+#if USE(EGL)
+#include "GLContextEGL.h"
+#include <gst/gl/egl/gstgldisplay_egl.h>
+#endif
+
+#if PLATFORM(X11)
+#include "PlatformDisplayX11.h"
+#endif
+
+#if PLATFORM(WAYLAND)
+#include "PlatformDisplayWayland.h"
+#endif
+
+// gstglapi.h may include eglplatform.h and it includes X.h, which
+// defines None, breaking MediaPlayer::None enum
+#if PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
+#undef None
+#endif // PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
+#include "VideoTextureCopierGStreamer.h"
+#endif // USE(GSTREAMER_GL)
+
+#if USE(TEXTURE_MAPPER_GL)
+#include "BitmapTextureGL.h"
+#include "BitmapTexturePool.h"
#include "TextureMapperGL.h"
#endif
+#if USE(COORDINATED_GRAPHICS_THREADED)
+#include "TextureMapperPlatformLayerBuffer.h"
+#endif
+
+#if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
+#include <cairo-gl.h>
+#endif
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
+#include "SharedBuffer.h"
+#include "WebKitClearKeyDecryptorGStreamer.h"
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+#include "UUID.h"
+#include <runtime/JSCInlines.h>
+#include <runtime/TypedArrayInlines.h>
+#include <runtime/Uint8Array.h>
+#endif
+#endif
GST_DEBUG_CATEGORY(webkit_media_player_debug);
#define GST_CAT_DEFAULT webkit_media_player_debug
@@ -55,6 +110,22 @@ using namespace std;
namespace WebCore {
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+static AtomicString keySystemIdToUuid(const AtomicString&);
+#endif
+
+void registerWebKitGStreamerElements()
+{
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
+ if (!webkitGstCheckVersion(1, 6, 1))
+ return;
+
+ GRefPtr<GstElementFactory> clearKeyDecryptorFactory = gst_element_factory_find("webkitclearkey");
+ if (!clearKeyDecryptorFactory)
+ gst_element_register(nullptr, "webkitclearkey", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_MEDIA_CK_DECRYPT);
+#endif
+}
+
static int greatestCommonDivisor(int a, int b)
{
while (b) {
@@ -66,112 +137,345 @@ static int greatestCommonDivisor(int a, int b)
return ABS(a);
}
-static void mediaPlayerPrivateVolumeChangedCallback(GObject*, GParamSpec*, MediaPlayerPrivateGStreamerBase* player)
+#if USE(TEXTURE_MAPPER_GL)
+static inline TextureMapperGL::Flags texMapFlagFromOrientation(const ImageOrientation& orientation)
{
- // This is called when m_volumeElement receives the notify::volume signal.
- LOG_MEDIA_MESSAGE("Volume changed to: %f", player->volume());
- player->volumeChanged();
+ switch (orientation) {
+ case DefaultImageOrientation:
+ return 0;
+ case OriginRightTop:
+ return TextureMapperGL::ShouldRotateTexture90;
+ case OriginBottomRight:
+ return TextureMapperGL::ShouldRotateTexture180;
+ case OriginLeftBottom:
+ return TextureMapperGL::ShouldRotateTexture270;
+ default:
+ ASSERT_NOT_REACHED();
+ }
+
+ return 0;
}
+#endif
+
+#if USE(COORDINATED_GRAPHICS_THREADED) && USE(GSTREAMER_GL)
+class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder {
+public:
+ explicit GstVideoFrameHolder(GstSample* sample, TextureMapperGL::Flags flags)
+ {
+ GstVideoInfo videoInfo;
+ if (UNLIKELY(!getSampleVideoInfo(sample, videoInfo)))
+ return;
+
+ m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
+ m_flags = flags | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0);
+
+ GstBuffer* buffer = gst_sample_get_buffer(sample);
+ if (UNLIKELY(!gst_video_frame_map(&m_videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL))))
+ return;
-static gboolean mediaPlayerPrivateVolumeChangeTimeoutCallback(MediaPlayerPrivateGStreamerBase* player)
+ m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame.data[0]);
+ m_isValid = true;
+ }
+
+ virtual ~GstVideoFrameHolder()
+ {
+ if (UNLIKELY(!m_isValid))
+ return;
+
+ gst_video_frame_unmap(&m_videoFrame);
+ }
+
+ const IntSize& size() const { return m_size; }
+ TextureMapperGL::Flags flags() const { return m_flags; }
+ GLuint textureID() const { return m_textureID; }
+ bool isValid() const { return m_isValid; }
+
+private:
+ GstVideoFrame m_videoFrame;
+ IntSize m_size;
+ TextureMapperGL::Flags m_flags;
+ GLuint m_textureID;
+ bool m_isValid { false };
+};
+#endif // USE(COORDINATED_GRAPHICS_THREADED) && USE(GSTREAMER_GL)
+
+MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player)
+ : m_notifier(MainThreadNotifier<MainThreadNotification>::create())
+ , m_player(player)
+ , m_fpsSink(nullptr)
+ , m_readyState(MediaPlayer::HaveNothing)
+ , m_networkState(MediaPlayer::Empty)
+#if USE(GSTREAMER_GL) || USE(COORDINATED_GRAPHICS_THREADED)
+ , m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamerBase::repaint)
+#endif
+ , m_usingFallbackVideoSink(false)
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ , m_cdmSession(nullptr)
+#endif
{
- // This is the callback of the timeout source created in ::volumeChanged.
- player->notifyPlayerOfVolumeChange();
- return FALSE;
+ g_mutex_init(&m_sampleMutex);
+#if USE(COORDINATED_GRAPHICS_THREADED)
+ m_platformLayerProxy = adoptRef(new TextureMapperPlatformLayerProxy());
+#endif
}
-static void mediaPlayerPrivateMuteChangedCallback(GObject*, GParamSpec*, MediaPlayerPrivateGStreamerBase* player)
+MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
{
- // This is called when m_volumeElement receives the notify::mute signal.
- player->muteChanged();
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ m_protectionCondition.notifyOne();
+#endif
+
+ m_notifier->invalidate();
+
+ cancelRepaint();
+
+ if (m_videoSink) {
+ g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
+#if USE(GSTREAMER_GL)
+ if (GST_IS_BIN(m_videoSink.get())) {
+ GRefPtr<GstElement> appsink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_videoSink.get()), "webkit-gl-video-sink"));
+ g_signal_handlers_disconnect_by_data(appsink.get(), this);
+ }
+#endif
+ }
+
+ g_mutex_clear(&m_sampleMutex);
+
+ m_player = nullptr;
+
+ if (m_volumeElement)
+ g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
+
+#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
+ if (client())
+ client()->platformLayerWillBeDestroyed();
+#endif
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ m_cdmSession = nullptr;
+#endif
+
+ if (m_pipeline)
+ gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
}
-static gboolean mediaPlayerPrivateMuteChangeTimeoutCallback(MediaPlayerPrivateGStreamerBase* player)
+void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline)
{
- // This is the callback of the timeout source created in ::muteChanged.
- player->notifyPlayerOfMute();
- return FALSE;
+ m_pipeline = pipeline;
}
-static void mediaPlayerPrivateRepaintCallback(WebKitVideoSink*, GstBuffer *buffer, MediaPlayerPrivateGStreamerBase* playerPrivate)
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+static std::pair<Vector<GRefPtr<GstEvent>>, Vector<String>> extractEventsAndSystemsFromMessage(GstMessage* message)
{
- playerPrivate->triggerRepaint(buffer);
+ const GstStructure* structure = gst_message_get_structure(message);
+
+ const GValue* streamEncryptionAllowedSystemsValue = gst_structure_get_value(structure, "stream-encryption-systems");
+ ASSERT(streamEncryptionAllowedSystemsValue && G_VALUE_HOLDS(streamEncryptionAllowedSystemsValue, G_TYPE_STRV));
+ const char** streamEncryptionAllowedSystems = reinterpret_cast<const char**>(g_value_get_boxed(streamEncryptionAllowedSystemsValue));
+ ASSERT(streamEncryptionAllowedSystems);
+ Vector<String> streamEncryptionAllowedSystemsVector;
+ unsigned i;
+ for (i = 0; streamEncryptionAllowedSystems[i]; ++i)
+ streamEncryptionAllowedSystemsVector.append(streamEncryptionAllowedSystems[i]);
+
+ const GValue* streamEncryptionEventsList = gst_structure_get_value(structure, "stream-encryption-events");
+ ASSERT(streamEncryptionEventsList && GST_VALUE_HOLDS_LIST(streamEncryptionEventsList));
+ unsigned streamEncryptionEventsListSize = gst_value_list_get_size(streamEncryptionEventsList);
+ Vector<GRefPtr<GstEvent>> streamEncryptionEventsVector;
+ for (i = 0; i < streamEncryptionEventsListSize; ++i)
+ streamEncryptionEventsVector.append(GRefPtr<GstEvent>(static_cast<GstEvent*>(g_value_get_boxed(gst_value_list_get_value(streamEncryptionEventsList, i)))));
+
+ return std::make_pair(streamEncryptionEventsVector, streamEncryptionAllowedSystemsVector);
}
-
-MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player)
- : m_player(player)
- , m_fpsSink(0)
- , m_readyState(MediaPlayer::HaveNothing)
- , m_networkState(MediaPlayer::Empty)
- , m_buffer(0)
- , m_volumeTimerHandler(0)
- , m_muteTimerHandler(0)
- , m_repaintHandler(0)
- , m_volumeSignalHandler(0)
- , m_muteSignalHandler(0)
-{
-#if GLIB_CHECK_VERSION(2, 31, 0)
- m_bufferMutex = new GMutex;
- g_mutex_init(m_bufferMutex);
-#else
- m_bufferMutex = g_mutex_new();
#endif
+
+bool MediaPlayerPrivateGStreamerBase::handleSyncMessage(GstMessage* message)
+{
+ UNUSED_PARAM(message);
+ if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT)
+ return false;
+
+ const gchar* contextType;
+ gst_message_parse_context_type(message, &contextType);
+
+#if USE(GSTREAMER_GL)
+ GRefPtr<GstContext> elementContext = adoptGRef(requestGLContext(contextType, this));
+ if (elementContext) {
+ gst_element_set_context(GST_ELEMENT(message->src), elementContext.get());
+ return true;
+ }
+#endif // USE(GSTREAMER_GL)
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id")) {
+ if (isMainThread()) {
+ GST_ERROR("can't handle drm-preferred-decryption-system-id need context message in the main thread");
+ ASSERT_NOT_REACHED();
+ return false;
+ }
+ GST_DEBUG("handling drm-preferred-decryption-system-id need context message");
+ std::pair<Vector<GRefPtr<GstEvent>>, Vector<String>> streamEncryptionInformation = extractEventsAndSystemsFromMessage(message);
+ GST_TRACE("found %" G_GSIZE_FORMAT " protection events", streamEncryptionInformation.first.size());
+ Vector<uint8_t> concatenatedInitDataChunks;
+ unsigned concatenatedInitDataChunksNumber = 0;
+ String eventKeySystemIdString;
+ for (auto& event : streamEncryptionInformation.first) {
+ GST_TRACE("handling protection event %u", GST_EVENT_SEQNUM(event.get()));
+ const char* eventKeySystemId = nullptr;
+ GstBuffer* data = nullptr;
+ gst_event_parse_protection(event.get(), &eventKeySystemId, &data, nullptr);
+
+ // Here we receive the DRM init data from the pipeline: we will emit
+ // the needkey event with that data and the browser might create a
+ // CDMSession from this event handler. If such a session was created
+ // We will emit the message event from the session to provide the
+ // DRM challenge to the browser and wait for an update. If on the
+ // contrary no session was created we won't wait and let the pipeline
+ // error out by itself.
+ GstMapInfo mapInfo;
+ if (!gst_buffer_map(data, &mapInfo, GST_MAP_READ)) {
+ GST_WARNING("cannot map %s protection data", eventKeySystemId);
+ break;
+ }
+
+ GST_TRACE("appending init data for %s of size %" G_GSIZE_FORMAT, eventKeySystemId, mapInfo.size);
+ GST_MEMDUMP("init data", reinterpret_cast<const unsigned char *>(mapInfo.data), mapInfo.size);
+ concatenatedInitDataChunks.append(mapInfo.data, mapInfo.size);
+ ++concatenatedInitDataChunksNumber;
+ eventKeySystemIdString = eventKeySystemId;
+ if (streamEncryptionInformation.second.contains(eventKeySystemId)) {
+ GST_TRACE("considering init data handled for %s", eventKeySystemId);
+ m_handledProtectionEvents.add(GST_EVENT_SEQNUM(event.get()));
+ }
+ gst_buffer_unmap(data, &mapInfo);
+ }
+
+ if (!concatenatedInitDataChunksNumber)
+ return false;
+
+ if (concatenatedInitDataChunksNumber > 1)
+ eventKeySystemIdString = emptyString();
+
+ RunLoop::main().dispatch([this, eventKeySystemIdString, initData = WTFMove(concatenatedInitDataChunks)] {
+ GST_DEBUG("scheduling keyNeeded event for %s with concatenated init datas size of %" G_GSIZE_FORMAT, eventKeySystemIdString.utf8().data(), initData.size());
+ GST_MEMDUMP("init datas", initData.data(), initData.size());
+
+ // FIXME: Provide a somehow valid sessionId.
+ RefPtr<Uint8Array> initDataArray = Uint8Array::create(initData.data(), initData.size());
+ needKey(initDataArray);
+ });
+
+ GST_INFO("waiting for a key request to arrive");
+ LockHolder lock(m_protectionMutex);
+ m_protectionCondition.waitFor(m_protectionMutex, Seconds(4), [this] {
+ return !this->m_lastGenerateKeyRequestKeySystemUuid.isEmpty();
+ });
+ if (!m_lastGenerateKeyRequestKeySystemUuid.isEmpty()) {
+ GST_INFO("got a key request, continuing with %s on %s", m_lastGenerateKeyRequestKeySystemUuid.utf8().data(), GST_MESSAGE_SRC_NAME(message));
+
+ GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-preferred-decryption-system-id", FALSE));
+ GstStructure* contextStructure = gst_context_writable_structure(context.get());
+ gst_structure_set(contextStructure, "decryption-system-id", G_TYPE_STRING, m_lastGenerateKeyRequestKeySystemUuid.utf8().data(), nullptr);
+ gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
+ } else
+ GST_WARNING("did not get a proper key request");
+
+ return true;
+ }
+#endif // ENABLE(LEGACY_ENCRYPTED_MEDIA)
+
+ return false;
}
-MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
+#if USE(GSTREAMER_GL)
+GstContext* MediaPlayerPrivateGStreamerBase::requestGLContext(const gchar* contextType, MediaPlayerPrivateGStreamerBase* player)
{
- if (m_repaintHandler) {
- g_signal_handler_disconnect(m_webkitVideoSink.get(), m_repaintHandler);
- m_repaintHandler = 0;
+ if (!player->ensureGstGLContext())
+ return nullptr;
+
+ if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) {
+ GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE);
+ gst_context_set_gl_display(displayContext, player->gstGLDisplay());
+ return displayContext;
}
-#if GLIB_CHECK_VERSION(2, 31, 0)
- g_mutex_clear(m_bufferMutex);
- delete m_bufferMutex;
+ if (!g_strcmp0(contextType, "gst.gl.app_context")) {
+ GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE);
+ GstStructure* structure = gst_context_writable_structure(appContext);
+#if GST_CHECK_VERSION(1, 11, 0)
+ gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, player->gstGLContext(), nullptr);
#else
- g_mutex_free(m_bufferMutex);
+ gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, player->gstGLContext(), nullptr);
#endif
+ return appContext;
+ }
- if (m_buffer)
- gst_buffer_unref(m_buffer);
- m_buffer = 0;
-
- m_player = 0;
+ return nullptr;
+}
- if (m_muteTimerHandler)
- g_source_remove(m_muteTimerHandler);
+bool MediaPlayerPrivateGStreamerBase::ensureGstGLContext()
+{
+ if (m_glContext)
+ return true;
+
+ auto& sharedDisplay = PlatformDisplay::sharedDisplayForCompositing();
+ if (!m_glDisplay) {
+#if PLATFORM(X11)
+#if USE(GLX)
+ if (is<PlatformDisplayX11>(sharedDisplay))
+ m_glDisplay = GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native()));
+#elif USE(EGL)
+ if (is<PlatformDisplayX11>(sharedDisplay))
+ m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay()));
+#endif
+#endif
- if (m_volumeTimerHandler)
- g_source_remove(m_volumeTimerHandler);
+#if PLATFORM(WAYLAND)
+ if (is<PlatformDisplayWayland>(sharedDisplay))
+ m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay()));
+#endif
- if (m_volumeSignalHandler) {
- g_signal_handler_disconnect(m_volumeElement.get(), m_volumeSignalHandler);
- m_volumeSignalHandler = 0;
+ ASSERT(m_glDisplay);
}
- if (m_muteSignalHandler) {
- g_signal_handler_disconnect(m_volumeElement.get(), m_muteSignalHandler);
- m_muteSignalHandler = 0;
- }
+ GLContext* webkitContext = sharedDisplay.sharingGLContext();
+ // EGL and GLX are mutually exclusive, no need for ifdefs here.
+ GstGLPlatform glPlatform = webkitContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX;
-#if USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
- if (client())
- client()->platformLayerWillBeDestroyed();
+#if USE(OPENGL_ES_2)
+ GstGLAPI glAPI = GST_GL_API_GLES2;
+#elif USE(OPENGL)
+ GstGLAPI glAPI = GST_GL_API_OPENGL;
+#else
+ ASSERT_NOT_REACHED();
#endif
+
+ PlatformGraphicsContext3D contextHandle = webkitContext->platformContext();
+ if (!contextHandle)
+ return false;
+
+ m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI);
+
+ return true;
}
+#endif // USE(GSTREAMER_GL)
// Returns the size of the video
-IntSize MediaPlayerPrivateGStreamerBase::naturalSize() const
+FloatSize MediaPlayerPrivateGStreamerBase::naturalSize() const
{
if (!hasVideo())
- return IntSize();
+ return FloatSize();
if (!m_videoSize.isEmpty())
return m_videoSize;
- GRefPtr<GstCaps> caps = currentVideoSinkCaps();
+ WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
+ if (!GST_IS_SAMPLE(m_sample.get()))
+ return FloatSize();
+
+ GstCaps* caps = gst_sample_get_caps(m_sample.get());
if (!caps)
- return IntSize();
+ return FloatSize();
// TODO: handle possible clean aperture data. See
@@ -184,11 +488,19 @@ IntSize MediaPlayerPrivateGStreamerBase::naturalSize() const
int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
IntSize originalSize;
GstVideoFormat format;
- if (!getVideoSizeAndFormatFromCaps(caps.get(), originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
- return IntSize();
+ if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
+ return FloatSize();
+
+#if USE(TEXTURE_MAPPER_GL)
+ // When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height.
+ if (m_renderingCanBeAccelerated) {
+ if (m_videoSourceOrientation.usesWidthAsHeight())
+ originalSize = originalSize.transposedSize();
+ }
+#endif
- LOG_MEDIA_MESSAGE("Original video size: %dx%d", originalSize.width(), originalSize.height());
- LOG_MEDIA_MESSAGE("Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
+ GST_DEBUG("Original video size: %dx%d", originalSize.width(), originalSize.height());
+ GST_DEBUG("Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
// Calculate DAR based on PAR and video size.
int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
@@ -202,21 +514,21 @@ IntSize MediaPlayerPrivateGStreamerBase::naturalSize() const
// Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
guint64 width = 0, height = 0;
if (!(originalSize.height() % displayHeight)) {
- LOG_MEDIA_MESSAGE("Keeping video original height");
+ GST_DEBUG("Keeping video original height");
width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
height = static_cast<guint64>(originalSize.height());
} else if (!(originalSize.width() % displayWidth)) {
- LOG_MEDIA_MESSAGE("Keeping video original width");
+ GST_DEBUG("Keeping video original width");
height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
width = static_cast<guint64>(originalSize.width());
} else {
- LOG_MEDIA_MESSAGE("Approximating while keeping original video height");
+ GST_DEBUG("Approximating while keeping original video height");
width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
height = static_cast<guint64>(originalSize.height());
}
- LOG_MEDIA_MESSAGE("Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
- m_videoSize = IntSize(static_cast<int>(width), static_cast<int>(height));
+ GST_DEBUG("Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
+ m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height));
return m_videoSize;
}
@@ -225,7 +537,7 @@ void MediaPlayerPrivateGStreamerBase::setVolume(float volume)
if (!m_volumeElement)
return;
- LOG_MEDIA_MESSAGE("Setting volume: %f", volume);
+ GST_DEBUG("Setting volume: %f", volume);
gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC, static_cast<double>(volume));
}
@@ -240,8 +552,6 @@ float MediaPlayerPrivateGStreamerBase::volume() const
void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange()
{
- m_volumeTimerHandler = 0;
-
if (!m_player || !m_volumeElement)
return;
double volume;
@@ -253,11 +563,12 @@ void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange()
m_player->volumeChanged(static_cast<float>(volume));
}
-void MediaPlayerPrivateGStreamerBase::volumeChanged()
+void MediaPlayerPrivateGStreamerBase::volumeChangedCallback(MediaPlayerPrivateGStreamerBase* player)
{
- if (m_volumeTimerHandler)
- g_source_remove(m_volumeTimerHandler);
- m_volumeTimerHandler = g_idle_add_full(G_PRIORITY_DEFAULT, reinterpret_cast<GSourceFunc>(mediaPlayerPrivateVolumeChangeTimeoutCallback), this, 0);
+ // This is called when m_volumeElement receives the notify::volume signal.
+ GST_DEBUG("Volume changed to: %f", player->volume());
+
+ player->m_notifier->notify(MainThreadNotification::VolumeChanged, [player] { player->notifyPlayerOfVolumeChange(); });
}
MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const
@@ -280,7 +591,7 @@ void MediaPlayerPrivateGStreamerBase::setMuted(bool muted)
if (!m_volumeElement)
return;
- g_object_set(m_volumeElement.get(), "mute", muted, NULL);
+ g_object_set(m_volumeElement.get(), "mute", muted, nullptr);
}
bool MediaPlayerPrivateGStreamerBase::muted() const
@@ -289,145 +600,456 @@ bool MediaPlayerPrivateGStreamerBase::muted() const
return false;
bool muted;
- g_object_get(m_volumeElement.get(), "mute", &muted, NULL);
+ g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
return muted;
}
void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute()
{
- m_muteTimerHandler = 0;
-
if (!m_player || !m_volumeElement)
return;
gboolean muted;
- g_object_get(m_volumeElement.get(), "mute", &muted, NULL);
+ g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
m_player->muteChanged(static_cast<bool>(muted));
}
-void MediaPlayerPrivateGStreamerBase::muteChanged()
+void MediaPlayerPrivateGStreamerBase::muteChangedCallback(MediaPlayerPrivateGStreamerBase* player)
{
- if (m_muteTimerHandler)
- g_source_remove(m_muteTimerHandler);
- m_muteTimerHandler = g_idle_add_full(G_PRIORITY_DEFAULT, reinterpret_cast<GSourceFunc>(mediaPlayerPrivateMuteChangeTimeoutCallback), this, 0);
+ // This is called when m_volumeElement receives the notify::mute signal.
+ player->m_notifier->notify(MainThreadNotification::MuteChanged, [player] { player->notifyPlayerOfMute(); });
}
-
-#if USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
-PassRefPtr<BitmapTexture> MediaPlayerPrivateGStreamerBase::updateTexture(TextureMapper* textureMapper)
+void MediaPlayerPrivateGStreamerBase::acceleratedRenderingStateChanged()
{
- WTF::GMutexLocker lock(m_bufferMutex);
- if (!m_buffer)
- return nullptr;
-
- GRefPtr<GstCaps> caps = currentVideoSinkCaps();
- if (!caps)
- return nullptr;
-
- GstVideoInfo videoInfo;
- gst_video_info_init(&videoInfo);
- if (!gst_video_info_from_caps(&videoInfo, caps.get()))
- return nullptr;
+ m_renderingCanBeAccelerated = m_player && m_player->client().mediaPlayerAcceleratedCompositingEnabled() && m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player);
+}
- IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
- RefPtr<BitmapTexture> texture = textureMapper->acquireTextureFromPool(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
+#if USE(TEXTURE_MAPPER_GL)
+void MediaPlayerPrivateGStreamerBase::updateTexture(BitmapTextureGL& texture, GstVideoInfo& videoInfo)
+{
+ GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
-#if GST_CHECK_VERSION(1, 1, 0)
GstVideoGLTextureUploadMeta* meta;
- if ((meta = gst_buffer_get_video_gl_texture_upload_meta(m_buffer))) {
+ if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) {
if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
- const BitmapTextureGL* textureGL = static_cast<const BitmapTextureGL*>(texture.get());
- guint ids[4] = { textureGL->id(), 0, 0, 0 };
+ guint ids[4] = { texture.id(), 0, 0, 0 };
if (gst_video_gl_texture_upload_meta_upload(meta, ids))
- return texture;
+ return;
}
}
-#endif
// Right now the TextureMapper only supports chromas with one plane
ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
GstVideoFrame videoFrame;
- if (!gst_video_frame_map(&videoFrame, &videoInfo, m_buffer, GST_MAP_READ))
- return nullptr;
+ if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, GST_MAP_READ))
+ return;
int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0);
const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0);
- texture->updateContents(srcData, WebCore::IntRect(WebCore::IntPoint(0, 0), size), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData);
+ texture.updateContents(srcData, WebCore::IntRect(0, 0, GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData);
gst_video_frame_unmap(&videoFrame);
-
- return texture;
}
#endif
-void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstBuffer* buffer)
+#if USE(COORDINATED_GRAPHICS_THREADED)
+void MediaPlayerPrivateGStreamerBase::pushTextureToCompositor()
{
- g_return_if_fail(GST_IS_BUFFER(buffer));
+#if !USE(GSTREAMER_GL)
+ class ConditionNotifier {
+ public:
+ ConditionNotifier(Lock& lock, Condition& condition)
+ : m_locker(lock), m_condition(condition)
+ {
+ }
+ ~ConditionNotifier()
+ {
+ m_condition.notifyOne();
+ }
+ private:
+ LockHolder m_locker;
+ Condition& m_condition;
+ };
+ ConditionNotifier notifier(m_drawMutex, m_drawCondition);
+#endif
- {
- WTF::GMutexLocker lock(m_bufferMutex);
- gst_buffer_replace(&m_buffer, buffer);
+ WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
+ if (!GST_IS_SAMPLE(m_sample.get()))
+ return;
+
+ LockHolder holder(m_platformLayerProxy->lock());
+
+ if (!m_platformLayerProxy->isActive()) {
+ // Consume the buffer (so it gets eventually unreffed) but keep the rest of the info.
+ const GstStructure* info = gst_sample_get_info(m_sample.get());
+ GstStructure* infoCopy = nullptr;
+ if (info)
+ infoCopy = gst_structure_copy(info);
+ m_sample = adoptGRef(gst_sample_new(nullptr, gst_sample_get_caps(m_sample.get()),
+ gst_sample_get_segment(m_sample.get()), infoCopy));
+ return;
}
-#if USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
- if (supportsAcceleratedRendering() && m_player->mediaPlayerClient()->mediaPlayerRenderingCanBeAccelerated(m_player) && client()) {
+#if USE(GSTREAMER_GL)
+ std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation));
+ if (UNLIKELY(!frameHolder->isValid()))
+ return;
+
+ std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(frameHolder->textureID(), frameHolder->size(), frameHolder->flags());
+ layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder));
+ m_platformLayerProxy->pushNextBuffer(WTFMove(layerBuffer));
+#else
+ GstVideoInfo videoInfo;
+ if (UNLIKELY(!getSampleVideoInfo(m_sample.get(), videoInfo)))
+ return;
+
+ IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
+ std::unique_ptr<TextureMapperPlatformLayerBuffer> buffer = m_platformLayerProxy->getAvailableBuffer(size, GraphicsContext3D::DONT_CARE);
+ if (UNLIKELY(!buffer)) {
+ if (UNLIKELY(!m_context3D))
+ m_context3D = GraphicsContext3D::create(GraphicsContext3DAttributes(), nullptr, GraphicsContext3D::RenderToCurrentGLContext);
+
+ auto texture = BitmapTextureGL::create(*m_context3D);
+ texture->reset(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
+ buffer = std::make_unique<TextureMapperPlatformLayerBuffer>(WTFMove(texture));
+ }
+ updateTexture(buffer->textureGL(), videoInfo);
+ buffer->setExtraFlags(texMapFlagFromOrientation(m_videoSourceOrientation) | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0));
+ m_platformLayerProxy->pushNextBuffer(WTFMove(buffer));
+#endif
+}
+#endif
+
+void MediaPlayerPrivateGStreamerBase::repaint()
+{
+ ASSERT(m_sample);
+ ASSERT(isMainThread());
+
+#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
+ if (m_renderingCanBeAccelerated && client()) {
client()->setPlatformLayerNeedsDisplay();
+#if USE(GSTREAMER_GL)
+ LockHolder lock(m_drawMutex);
+ m_drawCondition.notifyOne();
+#endif
return;
}
#endif
m_player->repaint();
+
+#if USE(GSTREAMER_GL) || USE(COORDINATED_GRAPHICS_THREADED)
+ LockHolder lock(m_drawMutex);
+ m_drawCondition.notifyOne();
+#endif
}
-void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size)
+void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstSample* sample)
{
- m_size = size;
+ bool triggerResize;
+ {
+ WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
+ triggerResize = !m_sample;
+ m_sample = sample;
+ }
+
+ if (triggerResize) {
+ GST_DEBUG("First sample reached the sink, triggering video dimensions update");
+ m_notifier->notify(MainThreadNotification::SizeChanged, [this] { m_player->sizeChanged(); });
+ }
+
+#if USE(COORDINATED_GRAPHICS_THREADED)
+ if (!m_renderingCanBeAccelerated) {
+ LockHolder locker(m_drawMutex);
+ m_drawTimer.startOneShot(0);
+ m_drawCondition.wait(m_drawMutex);
+ return;
+ }
+
+#if USE(GSTREAMER_GL)
+ pushTextureToCompositor();
+#else
+ {
+ LockHolder lock(m_drawMutex);
+ if (!m_platformLayerProxy->scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); }))
+ return;
+ m_drawCondition.wait(m_drawMutex);
+ }
+#endif
+ return;
+#else
+#if USE(GSTREAMER_GL)
+ {
+ ASSERT(!isMainThread());
+
+ LockHolder locker(m_drawMutex);
+ m_drawTimer.startOneShot(0);
+ m_drawCondition.wait(m_drawMutex);
+ }
+#else
+ repaint();
+#endif
+#endif
}
-void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext* context, const IntRect& rect)
+void MediaPlayerPrivateGStreamerBase::repaintCallback(MediaPlayerPrivateGStreamerBase* player, GstSample* sample)
{
-#if USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
- if (client())
- return;
+ player->triggerRepaint(sample);
+}
+
+void MediaPlayerPrivateGStreamerBase::cancelRepaint()
+{
+#if USE(TEXTURE_MAPPER_GL) || USE(COORDINATED_GRAPHICS_THREADED)
+ m_drawTimer.stop();
+ LockHolder locker(m_drawMutex);
+ m_drawCondition.notifyOne();
#endif
+}
+
+void MediaPlayerPrivateGStreamerBase::repaintCancelledCallback(MediaPlayerPrivateGStreamerBase* player)
+{
+ player->cancelRepaint();
+}
+
+#if USE(GSTREAMER_GL)
+GstFlowReturn MediaPlayerPrivateGStreamerBase::newSampleCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
+{
+ GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink)));
+ player->triggerRepaint(sample.get());
+ return GST_FLOW_OK;
+}
+
+GstFlowReturn MediaPlayerPrivateGStreamerBase::newPrerollCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
+{
+ GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_preroll(GST_APP_SINK(sink)));
+ player->triggerRepaint(sample.get());
+ return GST_FLOW_OK;
+}
+#endif
+
+void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size)
+{
+ m_size = size;
+}
- if (context->paintingDisabled())
+void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext& context, const FloatRect& rect)
+{
+ if (context.paintingDisabled())
return;
if (!m_player->visible())
return;
- WTF::GMutexLocker lock(m_bufferMutex);
- if (!m_buffer)
+ WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
+ if (!GST_IS_SAMPLE(m_sample.get()))
return;
- GRefPtr<GstCaps> caps = currentVideoSinkCaps();
- if (!caps)
- return;
+ ImagePaintingOptions paintingOptions(CompositeCopy);
+ if (m_renderingCanBeAccelerated)
+ paintingOptions.m_orientationDescription.setImageOrientationEnum(m_videoSourceOrientation);
- RefPtr<ImageGStreamer> gstImage = ImageGStreamer::createImage(m_buffer, caps.get());
+ RefPtr<ImageGStreamer> gstImage = ImageGStreamer::createImage(m_sample.get());
if (!gstImage)
return;
- context->drawImage(reinterpret_cast<Image*>(gstImage->image().get()), ColorSpaceSRGB,
- rect, gstImage->rect(), CompositeCopy, ImageOrientationDescription(), false);
+ if (Image* image = reinterpret_cast<Image*>(gstImage->image().get()))
+ context.drawImage(*image, rect, gstImage->rect(), paintingOptions);
}
-#if USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
-void MediaPlayerPrivateGStreamerBase::paintToTextureMapper(TextureMapper* textureMapper, const FloatRect& targetRect, const TransformationMatrix& matrix, float opacity)
+#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
+void MediaPlayerPrivateGStreamerBase::paintToTextureMapper(TextureMapper& textureMapper, const FloatRect& targetRect, const TransformationMatrix& matrix, float opacity)
{
- if (textureMapper->accelerationMode() != TextureMapper::OpenGLMode)
+ if (!m_player->visible())
return;
- if (!m_player->visible())
+ if (m_usingFallbackVideoSink) {
+ RefPtr<BitmapTexture> texture;
+ IntSize size;
+ TextureMapperGL::Flags flags;
+ {
+ WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
+
+ GstVideoInfo videoInfo;
+ if (UNLIKELY(!getSampleVideoInfo(m_sample.get(), videoInfo)))
+ return;
+
+ size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
+ flags = texMapFlagFromOrientation(m_videoSourceOrientation) | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0);
+ texture = textureMapper.acquireTextureFromPool(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
+ updateTexture(static_cast<BitmapTextureGL&>(*texture), videoInfo);
+ }
+ TextureMapperGL& texmapGL = reinterpret_cast<TextureMapperGL&>(textureMapper);
+ BitmapTextureGL* textureGL = static_cast<BitmapTextureGL*>(texture.get());
+ texmapGL.drawTexture(textureGL->id(), flags, textureGL->size(), targetRect, matrix, opacity);
return;
+ }
+
+#if USE(GSTREAMER_GL)
+ WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
+
+ GstVideoInfo videoInfo;
+ if (!getSampleVideoInfo(m_sample.get(), videoInfo))
+ return;
+
+ GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
+ GstVideoFrame videoFrame;
+ if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
+ return;
+
+ unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
+ TextureMapperGL::Flags flags = texMapFlagFromOrientation(m_videoSourceOrientation) | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0);
+
+ IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
+ TextureMapperGL& textureMapperGL = reinterpret_cast<TextureMapperGL&>(textureMapper);
+ textureMapperGL.drawTexture(textureID, flags, size, targetRect, matrix, opacity);
+ gst_video_frame_unmap(&videoFrame);
+#endif
+}
+#endif
+
+#if USE(GSTREAMER_GL)
+#if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
+// This should be called with the sample mutex locked.
+GLContext* MediaPlayerPrivateGStreamerBase::prepareContextForCairoPaint(GstVideoInfo& videoInfo, IntSize& size, IntSize& rotatedSize)
+{
+ if (!getSampleVideoInfo(m_sample.get(), videoInfo))
+ return nullptr;
+
+ GLContext* context = PlatformDisplay::sharedDisplayForCompositing().sharingGLContext();
+ context->makeContextCurrent();
+
+ // Thread-awareness is a huge performance hit on non-Intel drivers.
+ cairo_gl_device_set_thread_aware(context->cairoDevice(), FALSE);
+
+ size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
+ rotatedSize = m_videoSourceOrientation.usesWidthAsHeight() ? size.transposedSize() : size;
+
+ return context;
+}
+
+// This should be called with the sample mutex locked.
+bool MediaPlayerPrivateGStreamerBase::paintToCairoSurface(cairo_surface_t* outputSurface, cairo_device_t* device, GstVideoInfo& videoInfo, const IntSize& size, const IntSize& rotatedSize, bool flipY)
+{
+ GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
+ GstVideoFrame videoFrame;
+ if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
+ return false;
+
+ unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
+ RefPtr<cairo_surface_t> surface = adoptRef(cairo_gl_surface_create_for_texture(device, CAIRO_CONTENT_COLOR_ALPHA, textureID, size.width(), size.height()));
+ RefPtr<cairo_t> cr = adoptRef(cairo_create(outputSurface));
+
+ switch (m_videoSourceOrientation) {
+ case DefaultImageOrientation:
+ break;
+ case OriginRightTop:
+ cairo_translate(cr.get(), rotatedSize.width() * 0.5, rotatedSize.height() * 0.5);
+ cairo_rotate(cr.get(), piOverTwoDouble);
+ cairo_translate(cr.get(), -rotatedSize.height() * 0.5, -rotatedSize.width() * 0.5);
+ break;
+ case OriginBottomRight:
+ cairo_translate(cr.get(), rotatedSize.width() * 0.5, rotatedSize.height() * 0.5);
+ cairo_rotate(cr.get(), piDouble);
+ cairo_translate(cr.get(), -rotatedSize.width() * 0.5, -rotatedSize.height() * 0.5);
+ break;
+ case OriginLeftBottom:
+ cairo_translate(cr.get(), rotatedSize.width() * 0.5, rotatedSize.height() * 0.5);
+ cairo_rotate(cr.get(), 3 * piOverTwoDouble);
+ cairo_translate(cr.get(), -rotatedSize.height() * 0.5, -rotatedSize.width() * 0.5);
+ break;
+ default:
+ ASSERT_NOT_REACHED();
+ break;
+ }
+
+ if (flipY) {
+ cairo_scale(cr.get(), 1.0f, -1.0f);
+ cairo_translate(cr.get(), 0.0f, -size.height());
+ }
+
+ cairo_set_source_surface(cr.get(), surface.get(), 0, 0);
+ cairo_set_operator(cr.get(), CAIRO_OPERATOR_SOURCE);
+ cairo_paint(cr.get());
+
+ gst_video_frame_unmap(&videoFrame);
+
+ return true;
+}
+#endif // USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
+
+bool MediaPlayerPrivateGStreamerBase::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
+{
+#if USE(GSTREAMER_GL)
+ UNUSED_PARAM(context);
+
+ if (m_usingFallbackVideoSink)
+ return false;
+
+ if (premultiplyAlpha)
+ return false;
+
+ WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
+
+ GstVideoInfo videoInfo;
+ if (!getSampleVideoInfo(m_sample.get(), videoInfo))
+ return false;
+
+ GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
+ GstVideoFrame videoFrame;
+ if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
+ return false;
+
+ IntSize size(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
+ if (m_videoSourceOrientation.usesWidthAsHeight())
+ size = size.transposedSize();
+ unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
+
+ if (!m_videoTextureCopier)
+ m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>();
+
+ bool copied = m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, outputTexture, outputTarget, level, internalFormat, format, type, flipY, m_videoSourceOrientation);
+
+ gst_video_frame_unmap(&videoFrame);
- RefPtr<BitmapTexture> texture = updateTexture(textureMapper);
- if (texture)
- textureMapper->drawTexture(*texture.get(), targetRect, matrix, opacity);
+ return copied;
+#else
+ return false;
+#endif
}
+
+NativeImagePtr MediaPlayerPrivateGStreamerBase::nativeImageForCurrentTime()
+{
+#if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
+ if (m_usingFallbackVideoSink)
+ return nullptr;
+
+ GstVideoInfo videoInfo;
+ IntSize size, rotatedSize;
+ WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
+ GLContext* context = prepareContextForCairoPaint(videoInfo, size, rotatedSize);
+ if (!context)
+ return nullptr;
+
+ RefPtr<cairo_surface_t> rotatedSurface = adoptRef(cairo_gl_surface_create(context->cairoDevice(), CAIRO_CONTENT_COLOR_ALPHA, rotatedSize.width(), rotatedSize.height()));
+ if (!paintToCairoSurface(rotatedSurface.get(), context->cairoDevice(), videoInfo, size, rotatedSize, false))
+ return nullptr;
+
+ return rotatedSurface;
+#else
+ return nullptr;
#endif
+}
+#endif
+
+void MediaPlayerPrivateGStreamerBase::setVideoSourceOrientation(const ImageOrientation& orientation)
+{
+ if (m_videoSourceOrientation == orientation)
+ return;
+
+ m_videoSourceOrientation = orientation;
+}
bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const
{
@@ -450,25 +1072,88 @@ MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() cons
return MediaPlayer::Download;
}
-GRefPtr<GstCaps> MediaPlayerPrivateGStreamerBase::currentVideoSinkCaps() const
+#if USE(GSTREAMER_GL)
+GstElement* MediaPlayerPrivateGStreamerBase::createGLAppSink()
+{
+ if (!webkitGstCheckVersion(1, 8, 0))
+ return nullptr;
+
+ GstElement* appsink = gst_element_factory_make("appsink", "webkit-gl-video-sink");
+ if (!appsink)
+ return nullptr;
+
+ g_object_set(appsink, "enable-last-sample", FALSE, "emit-signals", TRUE, "max-buffers", 1, nullptr);
+ g_signal_connect(appsink, "new-sample", G_CALLBACK(newSampleCallback), this);
+ g_signal_connect(appsink, "new-preroll", G_CALLBACK(newPrerollCallback), this);
+
+ return appsink;
+}
+
+GstElement* MediaPlayerPrivateGStreamerBase::createVideoSinkGL()
{
- if (!m_webkitVideoSink)
+ // FIXME: Currently it's not possible to get the video frames and caps using this approach until
+ // the pipeline gets into playing state. Due to this, trying to grab a frame and painting it by some
+ // other mean (canvas or webgl) before playing state can result in a crash.
+ // This is being handled in https://bugs.webkit.org/show_bug.cgi?id=159460.
+ if (!webkitGstCheckVersion(1, 8, 0))
return nullptr;
- GRefPtr<GstCaps> currentCaps;
- g_object_get(G_OBJECT(m_webkitVideoSink.get()), "current-caps", &currentCaps.outPtr(), NULL);
- return currentCaps;
+ gboolean result = TRUE;
+ GstElement* videoSink = gst_bin_new(nullptr);
+ GstElement* upload = gst_element_factory_make("glupload", nullptr);
+ GstElement* colorconvert = gst_element_factory_make("glcolorconvert", nullptr);
+ GstElement* appsink = createGLAppSink();
+
+ if (!appsink || !upload || !colorconvert) {
+ GST_WARNING("Failed to create GstGL elements");
+ gst_object_unref(videoSink);
+
+ if (upload)
+ gst_object_unref(upload);
+ if (colorconvert)
+ gst_object_unref(colorconvert);
+ if (appsink)
+ gst_object_unref(appsink);
+
+ return nullptr;
+ }
+
+ gst_bin_add_many(GST_BIN(videoSink), upload, colorconvert, appsink, nullptr);
+
+ GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), format = (string) { RGBA }"));
+
+ result &= gst_element_link_pads(upload, "src", colorconvert, "sink");
+ result &= gst_element_link_pads_filtered(colorconvert, "src", appsink, "sink", caps.get());
+
+ GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(upload, "sink"));
+ gst_element_add_pad(videoSink, gst_ghost_pad_new("sink", pad.get()));
+
+ if (!result) {
+ GST_WARNING("Failed to link GstGL elements");
+ gst_object_unref(videoSink);
+ videoSink = nullptr;
+ }
+ return videoSink;
}
+#endif
GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink()
{
- ASSERT(initializeGStreamer());
+ acceleratedRenderingStateChanged();
- GstElement* videoSink = nullptr;
- m_webkitVideoSink = webkitVideoSinkNew();
+#if USE(GSTREAMER_GL)
+ if (m_renderingCanBeAccelerated)
+ m_videoSink = createVideoSinkGL();
+#endif
- m_repaintHandler = g_signal_connect(m_webkitVideoSink.get(), "repaint-requested", G_CALLBACK(mediaPlayerPrivateRepaintCallback), this);
+ if (!m_videoSink) {
+ m_usingFallbackVideoSink = true;
+ m_videoSink = webkitVideoSinkNew();
+ g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this);
+ g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled", G_CALLBACK(repaintCancelledCallback), this);
+ }
+ GstElement* videoSink = nullptr;
m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
if (m_fpsSink) {
g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);
@@ -477,20 +1162,19 @@ GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink()
#if LOG_DISABLED
g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
#else
- WTFLogChannel* channel = logChannelByName("Media");
- if (channel->state != WTFLogChannelOn)
+ if (!isLogChannelEnabled("Media"))
g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
#endif // LOG_DISABLED
if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) {
- g_object_set(m_fpsSink.get(), "video-sink", m_webkitVideoSink.get(), nullptr);
+ g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
videoSink = m_fpsSink.get();
} else
m_fpsSink = nullptr;
}
if (!m_fpsSink)
- videoSink = m_webkitVideoSink.get();
+ videoSink = m_videoSink.get();
ASSERT(videoSink);
@@ -505,23 +1189,23 @@ void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* vo
// We don't set the initial volume because we trust the sink to keep it for us. See
// https://bugs.webkit.org/show_bug.cgi?id=118974 for more information.
if (!m_player->platformVolumeConfigurationRequired()) {
- LOG_MEDIA_MESSAGE("Setting stream volume to %f", m_player->volume());
- g_object_set(m_volumeElement.get(), "volume", m_player->volume(), NULL);
+ GST_DEBUG("Setting stream volume to %f", m_player->volume());
+ g_object_set(m_volumeElement.get(), "volume", m_player->volume(), nullptr);
} else
- LOG_MEDIA_MESSAGE("Not setting stream volume, trusting system one");
+ GST_DEBUG("Not setting stream volume, trusting system one");
- LOG_MEDIA_MESSAGE("Setting stream muted %d", m_player->muted());
- g_object_set(m_volumeElement.get(), "mute", m_player->muted(), NULL);
+ GST_DEBUG("Setting stream muted %d", m_player->muted());
+ g_object_set(m_volumeElement.get(), "mute", m_player->muted(), nullptr);
- m_volumeSignalHandler = g_signal_connect(m_volumeElement.get(), "notify::volume", G_CALLBACK(mediaPlayerPrivateVolumeChangedCallback), this);
- m_muteSignalHandler = g_signal_connect(m_volumeElement.get(), "notify::mute", G_CALLBACK(mediaPlayerPrivateMuteChangedCallback), this);
+ g_signal_connect_swapped(m_volumeElement.get(), "notify::volume", G_CALLBACK(volumeChangedCallback), this);
+ g_signal_connect_swapped(m_volumeElement.get(), "notify::mute", G_CALLBACK(muteChangedCallback), this);
}
unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const
{
guint64 decodedFrames = 0;
if (m_fpsSink)
- g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, NULL);
+ g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, nullptr);
return static_cast<unsigned>(decodedFrames);
}
@@ -529,7 +1213,7 @@ unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const
{
guint64 framesDropped = 0;
if (m_fpsSink)
- g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, NULL);
+ g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, nullptr);
return static_cast<unsigned>(framesDropped);
}
@@ -550,13 +1234,95 @@ unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const
GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
gint64 position = 0;
- if (gst_element_query(m_webkitVideoSink.get(), query))
+ if (gst_element_query(m_videoSink.get(), query))
gst_query_parse_position(query, 0, &position);
gst_query_unref(query);
return static_cast<unsigned>(position);
}
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+void MediaPlayerPrivateGStreamerBase::needKey(RefPtr<Uint8Array> initData)
+{
+ if (!m_player->keyNeeded(initData.get()))
+ GST_INFO("no event handler for key needed");
+}
+
+void MediaPlayerPrivateGStreamerBase::setCDMSession(CDMSession* session)
+{
+ GST_DEBUG("setting CDM session to %p", session);
+ m_cdmSession = session;
+}
+
+void MediaPlayerPrivateGStreamerBase::keyAdded()
+{
+}
+
+void MediaPlayerPrivateGStreamerBase::handleProtectionEvent(GstEvent* event)
+{
+ if (m_handledProtectionEvents.contains(GST_EVENT_SEQNUM(event))) {
+ GST_DEBUG("event %u already handled", GST_EVENT_SEQNUM(event));
+ m_handledProtectionEvents.remove(GST_EVENT_SEQNUM(event));
+ return;
+ }
+
+ const gchar* eventKeySystemId = nullptr;
+ GstBuffer* data = nullptr;
+ gst_event_parse_protection(event, &eventKeySystemId, &data, nullptr);
+
+ GstMapInfo mapInfo;
+ if (!gst_buffer_map(data, &mapInfo, GST_MAP_READ)) {
+ GST_WARNING("cannot map %s protection data", eventKeySystemId);
+ return;
+ }
+
+ GST_DEBUG("scheduling keyNeeded event for %s with init data size of %" G_GSIZE_FORMAT, eventKeySystemId, mapInfo.size);
+ GST_MEMDUMP("init datas", mapInfo.data, mapInfo.size);
+ RefPtr<Uint8Array> initDataArray = Uint8Array::create(mapInfo.data, mapInfo.size);
+ needKey(initDataArray);
+ gst_buffer_unmap(data, &mapInfo);
+}
+
+void MediaPlayerPrivateGStreamerBase::receivedGenerateKeyRequest(const String& keySystem)
+{
+ GST_DEBUG("received generate key request for %s", keySystem.utf8().data());
+ m_lastGenerateKeyRequestKeySystemUuid = keySystemIdToUuid(keySystem);
+ m_protectionCondition.notifyOne();
+}
+
+static AtomicString keySystemIdToUuid(const AtomicString& id)
+{
+ if (equalIgnoringASCIICase(id, CLEAR_KEY_PROTECTION_SYSTEM_ID))
+ return AtomicString(CLEAR_KEY_PROTECTION_SYSTEM_UUID);
+
+ return { };
+}
+
+std::unique_ptr<CDMSession> MediaPlayerPrivateGStreamerBase::createSession(const String& keySystem, CDMSessionClient*)
+{
+ GST_INFO("Requested CDMSession for KeySystem %s: Returning null.", keySystem.utf8().data());
+ return nullptr;
+}
+
+void MediaPlayerPrivateGStreamerBase::dispatchDecryptionKey(GstBuffer* buffer)
+{
+ gst_element_send_event(m_pipeline.get(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB,
+ gst_structure_new("drm-cipher", "key", GST_TYPE_BUFFER, buffer, nullptr)));
+}
+#endif
+
+bool MediaPlayerPrivateGStreamerBase::supportsKeySystem(const String& keySystem, const String& mimeType)
+{
+ GST_INFO("Checking for KeySystem support with %s and type %s: false.", keySystem.utf8().data(), mimeType.utf8().data());
+ return false;
+}
+
+MediaPlayer::SupportsType MediaPlayerPrivateGStreamerBase::extendedSupportsType(const MediaEngineSupportParameters& parameters, MediaPlayer::SupportsType result)
+{
+ UNUSED_PARAM(parameters);
+ return result;
+}
+
}
#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h
index dfcab5994..3092c1eff 100644
--- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h
+++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h
@@ -2,7 +2,8 @@
* Copyright (C) 2007, 2009 Apple Inc. All rights reserved.
* Copyright (C) 2007 Collabora Ltd. All rights reserved.
* Copyright (C) 2007 Alp Toker <alp@atoker.com>
- * Copyright (C) 2009, 2010 Igalia S.L
+ * Copyright (C) 2009, 2010, 2015, 2016 Igalia S.L
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@@ -25,110 +26,227 @@
#if ENABLE(VIDEO) && USE(GSTREAMER)
#include "GRefPtrGStreamer.h"
+#include "MainThreadNotifier.h"
#include "MediaPlayerPrivate.h"
-
+#include "PlatformLayer.h"
#include <glib.h>
-
+#include <gst/gst.h>
+#include <wtf/Condition.h>
#include <wtf/Forward.h>
+#include <wtf/RunLoop.h>
-#if USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
+#if USE(TEXTURE_MAPPER)
#include "TextureMapperPlatformLayer.h"
+#include "TextureMapperPlatformLayerProxy.h"
#endif
-typedef struct _GstBuffer GstBuffer;
-typedef struct _GstElement GstElement;
-typedef struct _GstMessage GstMessage;
typedef struct _GstStreamVolume GstStreamVolume;
-typedef struct _WebKitVideoSink WebKitVideoSink;
+typedef struct _GstVideoInfo GstVideoInfo;
+typedef struct _GstGLContext GstGLContext;
+typedef struct _GstGLDisplay GstGLDisplay;
namespace WebCore {
+class BitmapTextureGL;
+class GLContext;
class GraphicsContext;
+class GraphicsContext3D;
class IntSize;
class IntRect;
+class VideoTextureCopierGStreamer;
+
+void registerWebKitGStreamerElements();
class MediaPlayerPrivateGStreamerBase : public MediaPlayerPrivateInterface
-#if USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
- , public TextureMapperPlatformLayer
+#if USE(COORDINATED_GRAPHICS_THREADED) || (USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS))
+ , public PlatformLayer
#endif
{
public:
virtual ~MediaPlayerPrivateGStreamerBase();
- IntSize naturalSize() const;
+ FloatSize naturalSize() const override;
- void setVolume(float);
- float volume() const;
- void volumeChanged();
- void notifyPlayerOfVolumeChange();
+ void setVolume(float) override;
+ virtual float volume() const;
+
+#if USE(GSTREAMER_GL)
+ bool ensureGstGLContext();
+ static GstContext* requestGLContext(const gchar* contextType, MediaPlayerPrivateGStreamerBase*);
+#endif
- bool supportsMuting() const { return true; }
- void setMuted(bool);
+ bool supportsMuting() const override { return true; }
+ void setMuted(bool) override;
bool muted() const;
- void muteChanged();
- void notifyPlayerOfMute();
- MediaPlayer::NetworkState networkState() const;
- MediaPlayer::ReadyState readyState() const;
+ MediaPlayer::NetworkState networkState() const override;
+ MediaPlayer::ReadyState readyState() const override;
- void setVisible(bool) { }
- void setSize(const IntSize&);
+ void setVisible(bool) override { }
+ void setSize(const IntSize&) override;
void sizeChanged();
- void triggerRepaint(GstBuffer*);
- void paint(GraphicsContext*, const IntRect&);
+ void paint(GraphicsContext&, const FloatRect&) override;
- virtual bool hasSingleSecurityOrigin() const { return true; }
+ bool hasSingleSecurityOrigin() const override { return true; }
virtual float maxTimeLoaded() const { return 0.0; }
- bool supportsFullscreen() const;
- PlatformMedia platformMedia() const;
+ bool supportsFullscreen() const override;
+ PlatformMedia platformMedia() const override;
- MediaPlayer::MovieLoadType movieLoadType() const;
+ MediaPlayer::MovieLoadType movieLoadType() const override;
virtual bool isLiveStream() const = 0;
MediaPlayer* mediaPlayer() const { return m_player; }
- unsigned decodedFrameCount() const;
- unsigned droppedFrameCount() const;
- unsigned audioDecodedByteCount() const;
- unsigned videoDecodedByteCount() const;
+ unsigned decodedFrameCount() const override;
+ unsigned droppedFrameCount() const override;
+ unsigned audioDecodedByteCount() const override;
+ unsigned videoDecodedByteCount() const override;
+
+ void acceleratedRenderingStateChanged() override;
+
+#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
+ PlatformLayer* platformLayer() const override { return const_cast<MediaPlayerPrivateGStreamerBase*>(this); }
+#if PLATFORM(WIN_CAIRO)
+ // FIXME: Accelerated rendering has not been implemented for WinCairo yet.
+ bool supportsAcceleratedRendering() const override { return false; }
+#else
+ bool supportsAcceleratedRendering() const override { return true; }
+#endif
+ void paintToTextureMapper(TextureMapper&, const FloatRect&, const TransformationMatrix&, float) override;
+#endif
+
+#if USE(COORDINATED_GRAPHICS_THREADED)
+ PlatformLayer* platformLayer() const override { return const_cast<MediaPlayerPrivateGStreamerBase*>(this); }
+ bool supportsAcceleratedRendering() const override { return true; }
+#endif
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ void needKey(RefPtr<Uint8Array>);
+ void setCDMSession(CDMSession*) override;
+ void keyAdded() override;
+ virtual void dispatchDecryptionKey(GstBuffer*);
+ void handleProtectionEvent(GstEvent*);
+ void receivedGenerateKeyRequest(const String&);
+#endif
+
+ static bool supportsKeySystem(const String& keySystem, const String& mimeType);
+ static MediaPlayer::SupportsType extendedSupportsType(const MediaEngineSupportParameters&, MediaPlayer::SupportsType);
-#if USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
- virtual PlatformLayer* platformLayer() const { return const_cast<MediaPlayerPrivateGStreamerBase*>(this); }
- virtual bool supportsAcceleratedRendering() const { return true; }
- virtual void paintToTextureMapper(TextureMapper*, const FloatRect&, const TransformationMatrix&, float);
+#if USE(GSTREAMER_GL)
+ bool copyVideoTextureToPlatformTexture(GraphicsContext3D*, Platform3DObject, GC3Denum, GC3Dint, GC3Denum, GC3Denum, GC3Denum, bool, bool) override;
+ NativeImagePtr nativeImageForCurrentTime() override;
#endif
+ void setVideoSourceOrientation(const ImageOrientation&);
+ GstElement* pipeline() const { return m_pipeline.get(); }
+
+ virtual bool handleSyncMessage(GstMessage*);
+
protected:
MediaPlayerPrivateGStreamerBase(MediaPlayer*);
virtual GstElement* createVideoSink();
- GRefPtr<GstCaps> currentVideoSinkCaps() const;
+
+#if USE(GSTREAMER_GL)
+ static GstFlowReturn newSampleCallback(GstElement*, MediaPlayerPrivateGStreamerBase*);
+ static GstFlowReturn newPrerollCallback(GstElement*, MediaPlayerPrivateGStreamerBase*);
+ GstElement* createGLAppSink();
+ GstElement* createVideoSinkGL();
+ GstGLContext* gstGLContext() const { return m_glContext.get(); }
+ GstGLDisplay* gstGLDisplay() const { return m_glDisplay.get(); }
+#if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
+ GLContext* prepareContextForCairoPaint(GstVideoInfo&, IntSize&, IntSize&);
+ bool paintToCairoSurface(cairo_surface_t*, cairo_device_t*, GstVideoInfo&, const IntSize&, const IntSize&, bool);
+#endif
+#endif
+
+ GstElement* videoSink() const { return m_videoSink.get(); }
void setStreamVolumeElement(GstStreamVolume*);
virtual GstElement* createAudioSink() { return 0; }
virtual GstElement* audioSink() const { return 0; }
+ void setPipeline(GstElement*);
+
+ void triggerRepaint(GstSample*);
+ void repaint();
+ void cancelRepaint();
+
+ static void repaintCallback(MediaPlayerPrivateGStreamerBase*, GstSample*);
+ static void repaintCancelledCallback(MediaPlayerPrivateGStreamerBase*);
+
+ void notifyPlayerOfVolumeChange();
+ void notifyPlayerOfMute();
+
+ static void volumeChangedCallback(MediaPlayerPrivateGStreamerBase*);
+ static void muteChangedCallback(MediaPlayerPrivateGStreamerBase*);
+
+ enum MainThreadNotification {
+ VideoChanged = 1 << 0,
+ VideoCapsChanged = 1 << 1,
+ AudioChanged = 1 << 2,
+ VolumeChanged = 1 << 3,
+ MuteChanged = 1 << 4,
+#if ENABLE(VIDEO_TRACK)
+ TextChanged = 1 << 5,
+#endif
+ SizeChanged = 1 << 6
+ };
+
+ Ref<MainThreadNotifier<MainThreadNotification>> m_notifier;
MediaPlayer* m_player;
+ GRefPtr<GstElement> m_pipeline;
GRefPtr<GstStreamVolume> m_volumeElement;
- GRefPtr<GstElement> m_webkitVideoSink;
+ GRefPtr<GstElement> m_videoSink;
GRefPtr<GstElement> m_fpsSink;
MediaPlayer::ReadyState m_readyState;
- MediaPlayer::NetworkState m_networkState;
+ mutable MediaPlayer::NetworkState m_networkState;
IntSize m_size;
- GMutex* m_bufferMutex;
- GstBuffer* m_buffer;
- unsigned long m_volumeTimerHandler;
- unsigned long m_muteTimerHandler;
- unsigned long m_repaintHandler;
- unsigned long m_volumeSignalHandler;
- unsigned long m_muteSignalHandler;
- mutable IntSize m_videoSize;
-#if USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
- PassRefPtr<BitmapTexture> updateTexture(TextureMapper*);
+ mutable GMutex m_sampleMutex;
+ GRefPtr<GstSample> m_sample;
+#if USE(GSTREAMER_GL) || USE(COORDINATED_GRAPHICS_THREADED)
+ RunLoop::Timer<MediaPlayerPrivateGStreamerBase> m_drawTimer;
+#endif
+ mutable FloatSize m_videoSize;
+ bool m_usingFallbackVideoSink;
+ bool m_renderingCanBeAccelerated { false };
+#if USE(TEXTURE_MAPPER_GL)
+ void updateTexture(BitmapTextureGL&, GstVideoInfo&);
+#endif
+#if USE(GSTREAMER_GL)
+ GRefPtr<GstGLContext> m_glContext;
+ GRefPtr<GstGLDisplay> m_glDisplay;
+#endif
+
+#if USE(COORDINATED_GRAPHICS_THREADED)
+ RefPtr<TextureMapperPlatformLayerProxy> proxy() const override { return m_platformLayerProxy.copyRef(); }
+ void swapBuffersIfNeeded() override { };
+ void pushTextureToCompositor();
+ RefPtr<TextureMapperPlatformLayerProxy> m_platformLayerProxy;
+#endif
+
+#if USE(GSTREAMER_GL) || USE(COORDINATED_GRAPHICS_THREADED)
+ RefPtr<GraphicsContext3D> m_context3D;
+ Condition m_drawCondition;
+ Lock m_drawMutex;
+#endif
+
+ ImageOrientation m_videoSourceOrientation;
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ std::unique_ptr<CDMSession> createSession(const String&, CDMSessionClient*) override;
+ CDMSession* m_cdmSession;
+ Lock m_protectionMutex;
+ Condition m_protectionCondition;
+ String m_lastGenerateKeyRequestKeySystemUuid;
+ HashSet<uint32_t> m_handledProtectionEvents;
+#endif
+#if USE(GSTREAMER_GL)
+ std::unique_ptr<VideoTextureCopierGStreamer> m_videoTextureCopier;
#endif
};
+
}
#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.cpp b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.cpp
new file mode 100644
index 000000000..8e3736c8b
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.cpp
@@ -0,0 +1,501 @@
+/*
+ * Copyright (C) 2012 Collabora Ltd. All rights reserved.
+ * Copyright (C) 2014, 2015 Igalia S.L. All rights reserved.
+ * Copyright (C) 2015 Metrological All rights reserved.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "config.h"
+
+#include "MediaPlayerPrivateGStreamerOwr.h"
+
+#if ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(GSTREAMER) && USE(OPENWEBRTC)
+
+#include "GStreamerUtilities.h"
+#include "MediaPlayer.h"
+#include "MediaStreamPrivate.h"
+#include "NotImplemented.h"
+#include "RealtimeMediaSourceOwr.h"
+#include "URL.h"
+#include <owr/owr.h>
+#include <owr/owr_gst_audio_renderer.h>
+#include <owr/owr_gst_video_renderer.h>
+#include <wtf/NeverDestroyed.h>
+#include <wtf/text/CString.h>
+
+GST_DEBUG_CATEGORY(webkit_openwebrtc_debug);
+#define GST_CAT_DEFAULT webkit_openwebrtc_debug
+
+namespace WebCore {
+
+MediaPlayerPrivateGStreamerOwr::MediaPlayerPrivateGStreamerOwr(MediaPlayer* player)
+ : MediaPlayerPrivateGStreamerBase(player)
+{
+ initializeGStreamerAndGStreamerDebugging();
+}
+
+MediaPlayerPrivateGStreamerOwr::~MediaPlayerPrivateGStreamerOwr()
+{
+ GST_TRACE("Destroying");
+
+ if (hasAudio())
+ m_audioTrack->removeObserver(*this);
+ if (hasVideo())
+ m_videoTrack->removeObserver(*this);
+
+ m_audioTrackMap.clear();
+ m_videoTrackMap.clear();
+
+ stop();
+}
+
+void MediaPlayerPrivateGStreamerOwr::play()
+{
+ GST_DEBUG("Play");
+
+ if (!m_streamPrivate || !m_streamPrivate->active()) {
+ m_readyState = MediaPlayer::HaveNothing;
+ loadingFailed(MediaPlayer::Empty);
+ return;
+ }
+
+ m_ended = false;
+ m_paused = false;
+
+ GST_DEBUG("Connecting to live stream, descriptor: %p", m_streamPrivate.get());
+
+ if (m_videoTrack)
+ maybeHandleChangeMutedState(*m_videoTrack.get());
+
+ if (m_audioTrack)
+ maybeHandleChangeMutedState(*m_audioTrack.get());
+}
+
+void MediaPlayerPrivateGStreamerOwr::pause()
+{
+ GST_DEBUG("Pause");
+ m_paused = true;
+ disableMediaTracks();
+}
+
+bool MediaPlayerPrivateGStreamerOwr::hasVideo() const
+{
+ return m_videoTrack;
+}
+
+bool MediaPlayerPrivateGStreamerOwr::hasAudio() const
+{
+ return m_audioTrack;
+}
+
+void MediaPlayerPrivateGStreamerOwr::setVolume(float volume)
+{
+ if (!m_audioTrack)
+ return;
+
+ auto& realTimeMediaSource = static_cast<RealtimeMediaSourceOwr&>(m_audioTrack->source());
+ auto mediaSource = OWR_MEDIA_SOURCE(realTimeMediaSource.mediaSource());
+
+ GST_DEBUG("Setting volume: %f", volume);
+ g_object_set(mediaSource, "volume", static_cast<gdouble>(volume), nullptr);
+}
+
+void MediaPlayerPrivateGStreamerOwr::setMuted(bool muted)
+{
+ if (!m_audioTrack)
+ return;
+
+ auto& realTimeMediaSource = static_cast<RealtimeMediaSourceOwr&>(m_audioTrack->source());
+ auto mediaSource = OWR_MEDIA_SOURCE(realTimeMediaSource.mediaSource());
+ if (!mediaSource)
+ return;
+
+ GST_DEBUG("Setting mute: %s", muted ? "on":"off");
+ g_object_set(mediaSource, "mute", muted, nullptr);
+}
+
+float MediaPlayerPrivateGStreamerOwr::currentTime() const
+{
+ gint64 position = GST_CLOCK_TIME_NONE;
+ GstQuery* query = gst_query_new_position(GST_FORMAT_TIME);
+
+ if (m_videoTrack && gst_element_query(m_videoSink.get(), query))
+ gst_query_parse_position(query, 0, &position);
+ else if (m_audioTrack && gst_element_query(m_audioSink.get(), query))
+ gst_query_parse_position(query, 0, &position);
+
+ float result = 0;
+ if (static_cast<GstClockTime>(position) != GST_CLOCK_TIME_NONE)
+ result = static_cast<double>(position) / GST_SECOND;
+
+ GST_LOG("Position %" GST_TIME_FORMAT, GST_TIME_ARGS(position));
+ gst_query_unref(query);
+
+ return result;
+}
+
+void MediaPlayerPrivateGStreamerOwr::load(const String &)
+{
+ // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
+ m_networkState = MediaPlayer::FormatError;
+ m_player->networkStateChanged();
+}
+
+#if ENABLE(MEDIA_SOURCE)
+void MediaPlayerPrivateGStreamerOwr::load(const String&, MediaSourcePrivateClient*)
+{
+ // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
+ m_networkState = MediaPlayer::FormatError;
+ m_player->networkStateChanged();
+}
+#endif
+
+void MediaPlayerPrivateGStreamerOwr::load(MediaStreamPrivate& streamPrivate)
+{
+ if (!initializeGStreamer())
+ return;
+
+ m_streamPrivate = &streamPrivate;
+ if (!m_streamPrivate->active()) {
+ loadingFailed(MediaPlayer::NetworkError);
+ return;
+ }
+
+ if (streamPrivate.hasVideo() && !m_videoSink)
+ createVideoSink();
+
+ if (streamPrivate.hasAudio() && !m_audioSink)
+ createGSTAudioSinkBin();
+
+ GST_DEBUG("Loading MediaStreamPrivate %p video: %s, audio: %s", &streamPrivate, streamPrivate.hasVideo() ? "yes":"no", streamPrivate.hasAudio() ? "yes":"no");
+
+ m_readyState = MediaPlayer::HaveNothing;
+ m_networkState = MediaPlayer::Loading;
+ m_player->networkStateChanged();
+ m_player->readyStateChanged();
+
+ for (auto track : m_streamPrivate->tracks()) {
+ if (!track->enabled()) {
+ GST_DEBUG("Track %s disabled", track->label().ascii().data());
+ continue;
+ }
+
+ GST_DEBUG("Processing track %s", track->label().ascii().data());
+
+ bool observeTrack = false;
+
+ // TODO: Support for multiple tracks of the same type.
+
+ switch (track->type()) {
+ case RealtimeMediaSource::Audio:
+ if (!m_audioTrack) {
+ String preSelectedDevice = getenv("WEBKIT_AUDIO_DEVICE");
+ if (!preSelectedDevice || (preSelectedDevice == track->label())) {
+ m_audioTrack = track;
+ auto audioTrack = AudioTrackPrivateMediaStream::create(*m_audioTrack.get());
+ m_player->addAudioTrack(*audioTrack);
+ m_audioTrackMap.add(track->id(), audioTrack);
+ observeTrack = true;
+ }
+ }
+ break;
+ case RealtimeMediaSource::Video:
+ if (!m_videoTrack) {
+ String preSelectedDevice = getenv("WEBKIT_VIDEO_DEVICE");
+ if (!preSelectedDevice || (preSelectedDevice == track->label())) {
+ m_videoTrack = track;
+ auto videoTrack = VideoTrackPrivateMediaStream::create(*m_videoTrack.get());
+ m_player->addVideoTrack(*videoTrack);
+ videoTrack->setSelected(true);
+ m_videoTrackMap.add(track->id(), videoTrack);
+ observeTrack = true;
+ }
+ }
+ break;
+ case RealtimeMediaSource::None:
+ GST_WARNING("Loading a track with None type");
+ }
+
+ if (observeTrack)
+ track->addObserver(*this);
+ }
+
+ m_readyState = MediaPlayer::HaveEnoughData;
+ m_player->readyStateChanged();
+}
+
+void MediaPlayerPrivateGStreamerOwr::loadingFailed(MediaPlayer::NetworkState error)
+{
+ if (m_networkState != error) {
+ GST_WARNING("Loading failed, error: %d", error);
+ m_networkState = error;
+ m_player->networkStateChanged();
+ }
+ if (m_readyState != MediaPlayer::HaveNothing) {
+ m_readyState = MediaPlayer::HaveNothing;
+ m_player->readyStateChanged();
+ }
+}
+
+bool MediaPlayerPrivateGStreamerOwr::didLoadingProgress() const
+{
+ // FIXME: Implement loading progress support.
+ return true;
+}
+
+void MediaPlayerPrivateGStreamerOwr::disableMediaTracks()
+{
+ if (m_audioTrack) {
+ GST_DEBUG("Stop: disconnecting audio");
+ g_object_set(m_audioRenderer.get(), "disabled", true, nullptr);
+ owr_media_renderer_set_source(OWR_MEDIA_RENDERER(m_audioRenderer.get()), nullptr);
+ }
+
+ if (m_videoTrack) {
+ GST_DEBUG("Stop: disconnecting video");
+ g_object_set(m_videoRenderer.get(), "disabled", true, nullptr);
+ owr_media_renderer_set_source(OWR_MEDIA_RENDERER(m_videoRenderer.get()), nullptr);
+ }
+}
+
+void MediaPlayerPrivateGStreamerOwr::stop()
+{
+ disableMediaTracks();
+ if (m_videoTrack) {
+ auto videoTrack = m_videoTrackMap.get(m_videoTrack->id());
+ if (videoTrack)
+ videoTrack->setSelected(false);
+ }
+}
+
+void MediaPlayerPrivateGStreamerOwr::registerMediaEngine(MediaEngineRegistrar registrar)
+{
+ if (initializeGStreamerAndGStreamerDebugging()) {
+ registrar([](MediaPlayer* player) {
+ return std::make_unique<MediaPlayerPrivateGStreamerOwr>(player);
+ }, getSupportedTypes, supportsType, nullptr, nullptr, nullptr, nullptr);
+ }
+}
+
+void MediaPlayerPrivateGStreamerOwr::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
+{
+ // Not supported in this media player.
+ static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> cache;
+ types = cache;
+}
+
+MediaPlayer::SupportsType MediaPlayerPrivateGStreamerOwr::supportsType(const MediaEngineSupportParameters& parameters)
+{
+ if (parameters.isMediaStream)
+ return MediaPlayer::IsSupported;
+ return MediaPlayer::IsNotSupported;
+}
+
+bool MediaPlayerPrivateGStreamerOwr::initializeGStreamerAndGStreamerDebugging()
+{
+ if (!initializeGStreamer())
+ return false;
+
+ static std::once_flag debugRegisteredFlag;
+ std::call_once(debugRegisteredFlag, [] {
+ GST_DEBUG_CATEGORY_INIT(webkit_openwebrtc_debug, "webkitowrplayer", 0, "WebKit OpenWebRTC player");
+ });
+
+ return true;
+}
+
+void MediaPlayerPrivateGStreamerOwr::createGSTAudioSinkBin()
+{
+ ASSERT(!m_audioSink);
+ GST_DEBUG("Creating audio sink");
+ // FIXME: volume/mute support: https://webkit.org/b/153828.
+
+ // Pre-roll an autoaudiosink so that the platform audio sink is created and
+ // can be retrieved from the autoaudiosink bin.
+ GRefPtr<GstElement> sink = gst_element_factory_make("autoaudiosink", nullptr);
+ GstChildProxy* childProxy = GST_CHILD_PROXY(sink.get());
+ gst_element_set_state(sink.get(), GST_STATE_READY);
+ GRefPtr<GstElement> platformSink = adoptGRef(GST_ELEMENT(gst_child_proxy_get_child_by_index(childProxy, 0)));
+ GstElementFactory* factory = gst_element_get_factory(platformSink.get());
+
+ // Dispose now un-needed autoaudiosink.
+ gst_element_set_state(sink.get(), GST_STATE_NULL);
+
+ // Create a fresh new audio sink compatible with the platform.
+ m_audioSink = gst_element_factory_create(factory, nullptr);
+ m_audioRenderer = adoptGRef(owr_gst_audio_renderer_new(m_audioSink.get()));
+}
+
+void MediaPlayerPrivateGStreamerOwr::trackEnded(MediaStreamTrackPrivate& track)
+{
+ GST_DEBUG("Track ended");
+
+ if (!m_streamPrivate || !m_streamPrivate->active()) {
+ stop();
+ return;
+ }
+
+ if (&track == m_audioTrack)
+ g_object_set(m_audioRenderer.get(), "disabled", true, nullptr);
+ else if (&track == m_videoTrack) {
+ g_object_set(m_videoRenderer.get(), "disabled", true, nullptr);
+ auto& realTimeMediaSource = static_cast<RealtimeMediaSourceOwr&>(m_videoTrack->source());
+ realTimeMediaSource.setWidth(0);
+ realTimeMediaSource.setHeight(0);
+ auto videoTrack = m_videoTrackMap.get(m_videoTrack->id());
+ if (videoTrack)
+ videoTrack->setSelected(false);
+ }
+
+ bool audioDisabled;
+ bool videoDisabled;
+ g_object_get(m_audioRenderer.get(), "disabled", &audioDisabled, nullptr);
+ g_object_get(m_videoRenderer.get(), "disabled", &videoDisabled, nullptr);
+ if (audioDisabled && videoDisabled) {
+ m_ended = true;
+ m_player->timeChanged();
+ }
+}
+
+void MediaPlayerPrivateGStreamerOwr::trackMutedChanged(MediaStreamTrackPrivate& track)
+{
+ GST_DEBUG("Track muted state changed");
+
+ maybeHandleChangeMutedState(track);
+}
+
+void MediaPlayerPrivateGStreamerOwr::maybeHandleChangeMutedState(MediaStreamTrackPrivate& track)
+{
+ auto& realTimeMediaSource = static_cast<RealtimeMediaSourceOwr&>(track.source());
+ auto mediaSource = OWR_MEDIA_SOURCE(realTimeMediaSource.mediaSource());
+
+ GST_DEBUG("%s track now %s", track.type() == RealtimeMediaSource::Audio ? "audio":"video", realTimeMediaSource.muted() ? "muted":"un-muted");
+ switch (track.type()) {
+ case RealtimeMediaSource::Audio:
+ if (!realTimeMediaSource.muted()) {
+ g_object_set(m_audioRenderer.get(), "disabled", false, nullptr);
+ owr_media_renderer_set_source(OWR_MEDIA_RENDERER(m_audioRenderer.get()), mediaSource);
+ } else {
+ g_object_set(m_audioRenderer.get(), "disabled", true, nullptr);
+ owr_media_renderer_set_source(OWR_MEDIA_RENDERER(m_audioRenderer.get()), nullptr);
+ }
+ if (mediaSource)
+ g_object_set(mediaSource, "mute", !track.enabled(), nullptr);
+ break;
+ case RealtimeMediaSource::Video:
+ if (!realTimeMediaSource.muted()) {
+ g_object_set(m_videoRenderer.get(), "disabled", false, nullptr);
+ owr_media_renderer_set_source(OWR_MEDIA_RENDERER(m_videoRenderer.get()), mediaSource);
+ } else {
+ g_object_set(m_videoRenderer.get(), "disabled", true, nullptr);
+ owr_media_renderer_set_source(OWR_MEDIA_RENDERER(m_videoRenderer.get()), nullptr);
+ }
+ break;
+ case RealtimeMediaSource::None:
+ GST_WARNING("Trying to change mute state of a track with None type");
+ }
+}
+
+void MediaPlayerPrivateGStreamerOwr::trackSettingsChanged(MediaStreamTrackPrivate&)
+{
+ GST_DEBUG("Track settings changed");
+}
+
+void MediaPlayerPrivateGStreamerOwr::trackEnabledChanged(MediaStreamTrackPrivate& track)
+{
+ GST_DEBUG("%s track now %s", track.type() == RealtimeMediaSource::Audio ? "audio":"video", track.enabled() ? "enabled":"disabled");
+
+ switch (track.type()) {
+ case RealtimeMediaSource::Audio:
+ g_object_set(m_audioRenderer.get(), "disabled", !track.enabled(), nullptr);
+ break;
+ case RealtimeMediaSource::Video:
+ g_object_set(m_videoRenderer.get(), "disabled", !track.enabled(), nullptr);
+ break;
+ case RealtimeMediaSource::None:
+ GST_WARNING("Trying to change enabled state of a track with None type");
+ }
+}
+
+GstElement* MediaPlayerPrivateGStreamerOwr::createVideoSink()
+{
+ GstElement* sink;
+#if USE(GSTREAMER_GL)
+ // No need to create glupload and glcolorconvert here because they are
+ // already created by the video renderer.
+ // FIXME: This should probably return a RefPtr. See https://bugs.webkit.org/show_bug.cgi?id=164709.
+ sink = MediaPlayerPrivateGStreamerBase::createGLAppSink();
+ m_videoSink = sink;
+#else
+ if (m_streamPrivate->getVideoRenderer()) {
+ m_videoRenderer = m_streamPrivate->getVideoRenderer();
+ m_videoSink = m_streamPrivate->getVideoSinkElement();
+ g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(MediaPlayerPrivateGStreamerBase::repaintCallback), this);
+ g_object_get(m_videoRenderer.get(), "sink", &sink, nullptr);
+ } else {
+ GstElement* gldownload = gst_element_factory_make("gldownload", nullptr);
+ GstElement* videoconvert = gst_element_factory_make("videoconvert", nullptr);
+ GstElement* webkitSink = MediaPlayerPrivateGStreamerBase::createVideoSink();
+ sink = gst_bin_new(nullptr);
+ gst_bin_add_many(GST_BIN(sink), gldownload, videoconvert, webkitSink, nullptr);
+ gst_element_link_many(gldownload, videoconvert, webkitSink, nullptr);
+ GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(gldownload, "sink"));
+ gst_element_add_pad(sink, gst_ghost_pad_new("sink", pad.get()));
+ }
+#endif
+ if (!m_videoRenderer) {
+ m_videoRenderer = adoptGRef(owr_gst_video_renderer_new(sink));
+#if USE(GSTREAMER_GL)
+ owr_video_renderer_set_request_context_callback(OWR_VIDEO_RENDERER(m_videoRenderer.get()), (OwrVideoRendererRequestContextCallback) MediaPlayerPrivateGStreamerBase::requestGLContext, this, nullptr);
+#endif
+ m_streamPrivate->setVideoRenderer(m_videoRenderer.get(), videoSink());
+ }
+ return sink;
+}
+
+void MediaPlayerPrivateGStreamerOwr::setSize(const IntSize& size)
+{
+ if (size == m_size)
+ return;
+
+ MediaPlayerPrivateGStreamerBase::setSize(size);
+ if (m_videoRenderer)
+ g_object_set(m_videoRenderer.get(), "width", size.width(), "height", size.height(), nullptr);
+
+ if (!m_videoTrack)
+ return;
+
+ auto& realTimeMediaSource = static_cast<RealtimeMediaSourceOwr&>(m_videoTrack->source());
+ realTimeMediaSource.setWidth(size.width());
+ realTimeMediaSource.setHeight(size.height());
+}
+
+FloatSize MediaPlayerPrivateGStreamerOwr::naturalSize() const
+{
+ auto size = MediaPlayerPrivateGStreamerBase::naturalSize();
+
+ // In case we are not playing the video we return the size we set to the media source.
+ if (m_videoTrack && size.isZero()) {
+ auto& realTimeMediaSource = static_cast<RealtimeMediaSourceOwr&>(m_videoTrack->source());
+ return realTimeMediaSource.size();
+ }
+
+ return size;
+}
+
+} // namespace WebCore
+
+#endif // ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(GSTREAMER) && USE(OPENWEBRTC)
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.h b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.h
new file mode 100644
index 000000000..334630e45
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.h
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2015 Igalia S.L. All rights reserved.
+ * Copyright (C) 2015 Metrological. All rights reserved.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#ifndef MediaPlayerPrivateGStreamerOwr_h
+#define MediaPlayerPrivateGStreamerOwr_h
+
+#if ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(GSTREAMER) && USE(OPENWEBRTC)
+
+#include "AudioTrackPrivateMediaStream.h"
+#include "MediaPlayerPrivateGStreamerBase.h"
+#include "MediaStreamTrackPrivate.h"
+#include "VideoTrackPrivateMediaStream.h"
+
+typedef struct _OwrGstVideoRenderer OwrGstVideoRenderer;
+typedef struct _OwrGstAudioRenderer OwrGstAudioRenderer;
+
+namespace WebCore {
+
+class MediaStreamPrivate;
+class RealtimeMediaSourceOwr;
+
+class MediaPlayerPrivateGStreamerOwr : public MediaPlayerPrivateGStreamerBase, private MediaStreamTrackPrivate::Observer {
+public:
+ explicit MediaPlayerPrivateGStreamerOwr(MediaPlayer*);
+ ~MediaPlayerPrivateGStreamerOwr();
+
+ static void registerMediaEngine(MediaEngineRegistrar);
+
+ void setSize(const IntSize&) final;
+
+ FloatSize naturalSize() const final;
+
+private:
+ GstElement* createVideoSink() final;
+ GstElement* audioSink() const final { return m_audioSink.get(); }
+ bool isLiveStream() const final { return true; }
+
+ String engineDescription() const final { return "OpenWebRTC"; }
+
+ void load(const String&) final;
+#if ENABLE(MEDIA_SOURCE)
+ void load(const String&, MediaSourcePrivateClient*) final;
+#endif
+ void load(MediaStreamPrivate&) final;
+ void cancelLoad() final { }
+
+ void prepareToPlay() final { }
+ void play() final;
+ void pause() final;
+
+ bool hasVideo() const final;
+ bool hasAudio() const final;
+
+ float duration() const final { return 0; }
+
+ float currentTime() const final;
+ void seek(float) final { }
+ bool seeking() const final { return false; }
+
+ void setRate(float) final { }
+ void setPreservesPitch(bool) final { }
+ bool paused() const final { return m_paused; }
+
+ void setVolume(float) final;
+ void setMuted(bool) final;
+
+ bool hasClosedCaptions() const final { return false; }
+ void setClosedCaptionsVisible(bool) final { };
+
+ float maxTimeSeekable() const final { return 0; }
+ std::unique_ptr<PlatformTimeRanges> buffered() const final { return std::make_unique<PlatformTimeRanges>(); }
+ bool didLoadingProgress() const final;
+
+ unsigned long long totalBytes() const final { return 0; }
+
+ bool canLoadPoster() const final { return false; }
+ void setPoster(const String&) final { }
+ bool ended() const final { return m_ended; }
+
+ // MediaStreamTrackPrivate::Observer implementation.
+ void trackEnded(MediaStreamTrackPrivate&) final;
+ void trackMutedChanged(MediaStreamTrackPrivate&) final;
+ void trackSettingsChanged(MediaStreamTrackPrivate&) final;
+ void trackEnabledChanged(MediaStreamTrackPrivate&) final;
+
+ static void getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>&);
+ static MediaPlayer::SupportsType supportsType(const MediaEngineSupportParameters&);
+ static bool initializeGStreamerAndGStreamerDebugging();
+ void createGSTAudioSinkBin();
+ void loadingFailed(MediaPlayer::NetworkState error);
+ void stop();
+ void maybeHandleChangeMutedState(MediaStreamTrackPrivate&);
+ void disableMediaTracks();
+
+ bool m_paused { true };
+ bool m_ended { false };
+ RefPtr<MediaStreamTrackPrivate> m_videoTrack;
+ RefPtr<MediaStreamTrackPrivate> m_audioTrack;
+ GRefPtr<GstElement> m_audioSink;
+ RefPtr<MediaStreamPrivate> m_streamPrivate;
+ GRefPtr<OwrGstVideoRenderer> m_videoRenderer;
+ GRefPtr<OwrGstAudioRenderer> m_audioRenderer;
+
+ HashMap<String, RefPtr<AudioTrackPrivateMediaStream>> m_audioTrackMap;
+ HashMap<String, RefPtr<VideoTrackPrivateMediaStream>> m_videoTrackMap;
+};
+
+} // namespace WebCore
+
+#endif // ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(GSTREAMER) && USE(OPENWEBRTC)
+
+#endif // MediaPlayerPrivateGStreamerOwr_h
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerRequestInstallMissingPluginsCallback.h b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerRequestInstallMissingPluginsCallback.h
new file mode 100644
index 000000000..95ed2da63
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerRequestInstallMissingPluginsCallback.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2015 Igalia S.L.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef MediaPlayerRequestInstallMissingPluginsCallback_h
+#define MediaPlayerRequestInstallMissingPluginsCallback_h
+
+#if ENABLE(VIDEO) && USE(GSTREAMER)
+#include <wtf/RefCounted.h>
+
+namespace WebCore {
+
+class MediaPlayerRequestInstallMissingPluginsCallback : public RefCounted<MediaPlayerRequestInstallMissingPluginsCallback> {
+ WTF_MAKE_FAST_ALLOCATED();
+public:
+ static Ref<MediaPlayerRequestInstallMissingPluginsCallback> create(std::function<void (uint32_t)>&& function)
+ {
+ return adoptRef(*new MediaPlayerRequestInstallMissingPluginsCallback(WTFMove(function)));
+ }
+
+ void invalidate()
+ {
+ m_function = nullptr;
+ }
+
+ void complete(uint32_t result)
+ {
+ if (!m_function)
+ return;
+ m_function(result);
+ m_function = nullptr;
+ }
+
+private:
+ MediaPlayerRequestInstallMissingPluginsCallback(std::function<void (uint32_t)>&& function)
+ : m_function(WTFMove(function))
+ {
+ }
+
+ std::function<void (uint32_t)> m_function;
+};
+
+} // namespace WebCore
+
+#endif // ENABLE(VIDEO) && USE(GSTREAMER)
+#endif // MediaPlayerRequestInstallMissingPluginsCallback_h
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaSourceGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/MediaSourceGStreamer.cpp
deleted file mode 100644
index 5982f80b9..000000000
--- a/Source/WebCore/platform/graphics/gstreamer/MediaSourceGStreamer.cpp
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (C) 2013 Google Inc. All rights reserved.
- * Copyright (C) 2013 Orange
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are
- * met:
- *
- * * Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * * Redistributions in binary form must reproduce the above
- * copyright notice, this list of conditions and the following disclaimer
- * in the documentation and/or other materials provided with the
- * distribution.
- * * Neither the name of Google Inc. nor the names of its
- * contributors may be used to endorse or promote products derived from
- * this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
- * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
- * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
- * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
- * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
- * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
- * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
- * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "MediaSourceGStreamer.h"
-
-#if ENABLE(MEDIA_SOURCE) && USE(GSTREAMER)
-
-#include "SourceBufferPrivateGStreamer.h"
-#include "WebKitMediaSourceGStreamer.h"
-#include <wtf/gobject/GRefPtr.h>
-
-namespace WebCore {
-
-void MediaSourceGStreamer::open(PassRefPtr<HTMLMediaSource> mediaSource, WebKitMediaSrc* src)
-{
- mediaSource->setPrivateAndOpen(adoptRef(*new MediaSourceGStreamer(src)));
-}
-
-MediaSourceGStreamer::MediaSourceGStreamer(WebKitMediaSrc* src)
- : m_client(adoptRef(new MediaSourceClientGstreamer(src)))
- , m_duration(0.0)
- , m_readyState(MediaPlayer::HaveNothing)
-{
-}
-
-MediaSourceGStreamer::~MediaSourceGStreamer()
-{
-}
-
-MediaSourceGStreamer::AddStatus MediaSourceGStreamer::addSourceBuffer(const ContentType& contentType, RefPtr<SourceBufferPrivate>& sourceBufferPrivate)
-{
- sourceBufferPrivate = adoptRef(new SourceBufferPrivateGStreamer(m_client.get(), contentType));
- return MediaSourceGStreamer::Ok;
-}
-
-void MediaSourceGStreamer::setDuration(double duration)
-{
- ASSERT(m_client);
- m_duration = duration;
- m_client->didReceiveDuration(duration);
-}
-
-void MediaSourceGStreamer::markEndOfStream(EndOfStreamStatus)
-{
- ASSERT(m_client);
- m_client->didFinishLoading(0);
-}
-
-void MediaSourceGStreamer::unmarkEndOfStream()
-{
- ASSERT(m_client);
-}
-
-}
-#endif
diff --git a/Source/WebCore/platform/graphics/gstreamer/SourceBufferPrivateGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/SourceBufferPrivateGStreamer.cpp
deleted file mode 100644
index 7068a558c..000000000
--- a/Source/WebCore/platform/graphics/gstreamer/SourceBufferPrivateGStreamer.cpp
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (C) 2013 Google Inc. All rights reserved.
- * Copyright (C) 2013 Orange
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are
- * met:
- *
- * * Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * * Redistributions in binary form must reproduce the above
- * copyright notice, this list of conditions and the following disclaimer
- * in the documentation and/or other materials provided with the
- * distribution.
- * * Neither the name of Google Inc. nor the names of its
- * contributors may be used to endorse or promote products derived from
- * this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
- * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
- * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
- * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
- * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
- * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
- * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
- * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "SourceBufferPrivateGStreamer.h"
-
-#if ENABLE(MEDIA_SOURCE) && USE(GSTREAMER)
-
-#include "ContentType.h"
-#include "NotImplemented.h"
-
-namespace WebCore {
-
-SourceBufferPrivateGStreamer::SourceBufferPrivateGStreamer(PassRefPtr<MediaSourceClientGstreamer> client, const ContentType& contentType)
- : m_readyState(MediaPlayer::HaveNothing)
-{
- m_client = client;
- m_type = contentType.type();
-}
-
-SourceBufferPrivate::AppendResult SourceBufferPrivateGStreamer::append(const unsigned char* data, unsigned length)
-{
- AppendResult result = AppendSucceeded;
- ASSERT(m_client);
- m_client->didReceiveData(reinterpret_cast_ptr<const char*>(data), length, m_type);
- return result;
-}
-
-void SourceBufferPrivateGStreamer::abort()
-{
- notImplemented();
-}
-
-void SourceBufferPrivateGStreamer::removedFromMediaSource()
-{
- notImplemented();
-}
-
-}
-#endif
diff --git a/Source/WebCore/platform/graphics/gstreamer/SourceBufferPrivateGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/SourceBufferPrivateGStreamer.h
deleted file mode 100644
index 4a9eb4abd..000000000
--- a/Source/WebCore/platform/graphics/gstreamer/SourceBufferPrivateGStreamer.h
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright (C) 2013 Google Inc. All rights reserved.
- * Copyright (C) 2013 Orange
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are
- * met:
- *
- * * Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * * Redistributions in binary form must reproduce the above
- * copyright notice, this list of conditions and the following disclaimer
- * in the documentation and/or other materials provided with the
- * distribution.
- * * Neither the name of Google Inc. nor the names of its
- * contributors may be used to endorse or promote products derived from
- * this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
- * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
- * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
- * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
- * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
- * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
- * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
- * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef SourceBufferPrivateGStreamer_h
-#define SourceBufferPrivateGStreamer_h
-
-#if ENABLE(MEDIA_SOURCE) && USE(GSTREAMER)
-
-#include "SourceBufferPrivate.h"
-#include "WebKitMediaSourceGStreamer.h"
-
-namespace WebCore {
-
-class SourceBufferPrivateGStreamer final : public SourceBufferPrivate {
-public:
- SourceBufferPrivateGStreamer(PassRefPtr<MediaSourceClientGstreamer>, const ContentType&);
- ~SourceBufferPrivateGStreamer() { }
-
- void setClient(SourceBufferPrivateClient*) { }
- AppendResult append(const unsigned char*, unsigned);
- void abort();
- void removedFromMediaSource();
- MediaPlayer::ReadyState readyState() const { return m_readyState; }
- void setReadyState(MediaPlayer::ReadyState readyState) { m_readyState = readyState; }
- void evictCodedFrames() { }
- bool isFull() { return false; }
-
-private:
- String m_type;
- RefPtr<MediaSourceClientGstreamer> m_client;
- MediaPlayer::ReadyState m_readyState;
-};
-
-}
-
-#endif
-#endif
diff --git a/Source/WebCore/platform/graphics/gstreamer/TextCombinerGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/TextCombinerGStreamer.cpp
index 339ca37eb..02de1e8bd 100644
--- a/Source/WebCore/platform/graphics/gstreamer/TextCombinerGStreamer.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/TextCombinerGStreamer.cpp
@@ -76,7 +76,7 @@ static gboolean webkitTextCombinerPadEvent(GstPad*, GstObject* parent, GstEvent*
static void webkit_text_combiner_init(WebKitTextCombiner* combiner)
{
- combiner->funnel = gst_element_factory_make("funnel", NULL);
+ combiner->funnel = gst_element_factory_make("funnel", nullptr);
ASSERT(combiner->funnel);
gboolean ret = gst_bin_add(GST_BIN(combiner), combiner->funnel);
@@ -147,7 +147,7 @@ static gboolean webkitTextCombinerPadEvent(GstPad* pad, GstObject* parent, GstEv
* the funnel */
if (targetParent == combiner->funnel) {
/* Setup a WebVTT encoder */
- GstElement* encoder = gst_element_factory_make("webvttenc", NULL);
+ GstElement* encoder = gst_element_factory_make("webvttenc", nullptr);
ASSERT(encoder);
ret = gst_bin_add(GST_BIN(combiner), encoder);
@@ -232,7 +232,7 @@ static GstPad* webkitTextCombinerRequestNewPad(GstElement * element,
GstPad* pad = gst_element_request_pad(combiner->funnel, templ, name, caps);
ASSERT(pad);
- GstPad* ghostPad = GST_PAD(g_object_new(WEBKIT_TYPE_TEXT_COMBINER_PAD, "direction", gst_pad_get_direction(pad), NULL));
+ GstPad* ghostPad = GST_PAD(g_object_new(WEBKIT_TYPE_TEXT_COMBINER_PAD, "direction", gst_pad_get_direction(pad), nullptr));
ASSERT(ghostPad);
ret = gst_ghost_pad_construct(GST_GHOST_PAD(ghostPad));
@@ -295,7 +295,7 @@ static void webkit_text_combiner_pad_class_init(WebKitTextCombinerPadClass* klas
GstElement* webkitTextCombinerNew()
{
- return GST_ELEMENT(g_object_new(WEBKIT_TYPE_TEXT_COMBINER, 0));
+ return GST_ELEMENT(g_object_new(WEBKIT_TYPE_TEXT_COMBINER, nullptr));
}
#endif // ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(VIDEO_TRACK)
diff --git a/Source/WebCore/platform/graphics/gstreamer/TextSinkGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/TextSinkGStreamer.cpp
index 678e7ac35..e651debfe 100644
--- a/Source/WebCore/platform/graphics/gstreamer/TextSinkGStreamer.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/TextSinkGStreamer.cpp
@@ -95,7 +95,7 @@ static void webkit_text_sink_class_init(WebKitTextSinkClass* klass)
GstElement* webkitTextSinkNew()
{
- return GST_ELEMENT(g_object_new(WEBKIT_TYPE_TEXT_SINK, 0));
+ return GST_ELEMENT(g_object_new(WEBKIT_TYPE_TEXT_SINK, nullptr));
}
#endif // ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(VIDEO_TRACK)
diff --git a/Source/WebCore/platform/graphics/gstreamer/TrackPrivateBaseGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/TrackPrivateBaseGStreamer.cpp
index 16bd494a9..af068cb08 100644
--- a/Source/WebCore/platform/graphics/gstreamer/TrackPrivateBaseGStreamer.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/TrackPrivateBaseGStreamer.cpp
@@ -34,46 +34,25 @@
#include "TrackPrivateBase.h"
#include <glib-object.h>
#include <gst/gst.h>
-#include <wtf/gobject/GUniquePtr.h>
+#include <gst/tag/tag.h>
+#include <wtf/glib/GUniquePtr.h>
+#include <wtf/text/CString.h>
GST_DEBUG_CATEGORY_EXTERN(webkit_media_player_debug);
#define GST_CAT_DEFAULT webkit_media_player_debug
namespace WebCore {
-static void trackPrivateActiveChangedCallback(GObject*, GParamSpec*, TrackPrivateBaseGStreamer* track)
-{
- track->activeChanged();
-}
-
-static void trackPrivateTagsChangedCallback(GObject*, GParamSpec*, TrackPrivateBaseGStreamer* track)
-{
- track->tagsChanged();
-}
-
-static gboolean trackPrivateActiveChangeTimeoutCallback(TrackPrivateBaseGStreamer* track)
-{
- track->notifyTrackOfActiveChanged();
- return FALSE;
-}
-
-static gboolean trackPrivateTagsChangeTimeoutCallback(TrackPrivateBaseGStreamer* track)
-{
- track->notifyTrackOfTagsChanged();
- return FALSE;
-}
-
TrackPrivateBaseGStreamer::TrackPrivateBaseGStreamer(TrackPrivateBase* owner, gint index, GRefPtr<GstPad> pad)
- : m_index(index)
+ : m_notifier(MainThreadNotifier<MainThreadNotification>::create())
+ , m_index(index)
, m_pad(pad)
, m_owner(owner)
- , m_activeTimerHandler(0)
- , m_tagTimerHandler(0)
{
ASSERT(m_pad);
- g_signal_connect(m_pad.get(), "notify::active", G_CALLBACK(trackPrivateActiveChangedCallback), this);
- g_signal_connect(m_pad.get(), "notify::tags", G_CALLBACK(trackPrivateTagsChangedCallback), this);
+ g_signal_connect_swapped(m_pad.get(), "notify::active", G_CALLBACK(activeChangedCallback), this);
+ g_signal_connect_swapped(m_pad.get(), "notify::tags", G_CALLBACK(tagsChangedCallback), this);
// We can't call notifyTrackOfTagsChanged() directly, because we need tagsChanged()
// to setup m_tags.
@@ -83,6 +62,7 @@ TrackPrivateBaseGStreamer::TrackPrivateBaseGStreamer(TrackPrivateBase* owner, gi
TrackPrivateBaseGStreamer::~TrackPrivateBaseGStreamer()
{
disconnect();
+ m_notifier->invalidate();
}
void TrackPrivateBaseGStreamer::disconnect()
@@ -90,67 +70,71 @@ void TrackPrivateBaseGStreamer::disconnect()
if (!m_pad)
return;
- g_signal_handlers_disconnect_by_func(m_pad.get(),
- reinterpret_cast<gpointer>(trackPrivateActiveChangedCallback), this);
- g_signal_handlers_disconnect_by_func(m_pad.get(),
- reinterpret_cast<gpointer>(trackPrivateTagsChangedCallback), this);
-
- if (m_activeTimerHandler)
- g_source_remove(m_activeTimerHandler);
- m_activeTimerHandler = 0;
-
- if (m_tagTimerHandler)
- g_source_remove(m_tagTimerHandler);
- m_tagTimerHandler = 0;
+ m_notifier->cancelPendingNotifications();
+ g_signal_handlers_disconnect_matched(m_pad.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
m_pad.clear();
m_tags.clear();
}
-void TrackPrivateBaseGStreamer::activeChanged()
+void TrackPrivateBaseGStreamer::activeChangedCallback(TrackPrivateBaseGStreamer* track)
{
- if (m_activeTimerHandler)
- g_source_remove(m_activeTimerHandler);
- m_activeTimerHandler = g_timeout_add(0,
- reinterpret_cast<GSourceFunc>(trackPrivateActiveChangeTimeoutCallback), this);
- g_source_set_name_by_id(m_activeTimerHandler, "[WebKit] trackPrivateActiveChangeTimeoutCallback");
+ track->m_notifier->notify(MainThreadNotification::ActiveChanged, [track] { track->notifyTrackOfActiveChanged(); });
}
-void TrackPrivateBaseGStreamer::tagsChanged()
+void TrackPrivateBaseGStreamer::tagsChangedCallback(TrackPrivateBaseGStreamer* track)
{
- if (m_tagTimerHandler)
- g_source_remove(m_tagTimerHandler);
+ track->tagsChanged();
+}
+void TrackPrivateBaseGStreamer::tagsChanged()
+{
GRefPtr<GstTagList> tags;
- g_object_get(m_pad.get(), "tags", &tags.outPtr(), NULL);
+ if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_pad.get()), "tags"))
+ g_object_get(m_pad.get(), "tags", &tags.outPtr(), nullptr);
+ else
+ tags = adoptGRef(gst_tag_list_new_empty());
+
{
- MutexLocker lock(m_tagMutex);
+ LockHolder lock(m_tagMutex);
m_tags.swap(tags);
}
- m_tagTimerHandler = g_timeout_add(0,
- reinterpret_cast<GSourceFunc>(trackPrivateTagsChangeTimeoutCallback), this);
- g_source_set_name_by_id(m_tagTimerHandler, "[WebKit] trackPrivateTagsChangeTimeoutCallback");
+ m_notifier->notify(MainThreadNotification::TagsChanged, [this] { notifyTrackOfTagsChanged(); });
}
void TrackPrivateBaseGStreamer::notifyTrackOfActiveChanged()
{
- m_activeTimerHandler = 0;
if (!m_pad)
return;
gboolean active = false;
- if (m_pad)
- g_object_get(m_pad.get(), "active", &active, NULL);
+ if (m_pad && g_object_class_find_property(G_OBJECT_GET_CLASS(m_pad.get()), "active"))
+ g_object_get(m_pad.get(), "active", &active, nullptr);
setActive(active);
}
-bool TrackPrivateBaseGStreamer::getTag(GstTagList* tags, const gchar* tagName, String& value)
+bool TrackPrivateBaseGStreamer::getLanguageCode(GstTagList* tags, AtomicString& value)
+{
+ String language;
+ if (getTag(tags, GST_TAG_LANGUAGE_CODE, language)) {
+ language = gst_tag_get_language_code_iso_639_1(language.utf8().data());
+ GST_INFO("Converted track %d's language code to %s.", m_index, language.utf8().data());
+ if (language != value) {
+ value = language;
+ return true;
+ }
+ }
+ return false;
+}
+
+template<class StringType>
+bool TrackPrivateBaseGStreamer::getTag(GstTagList* tags, const gchar* tagName, StringType& value)
{
GUniqueOutPtr<gchar> tagValue;
if (gst_tag_list_get_string(tags, tagName, &tagValue.outPtr())) {
- INFO_MEDIA_MESSAGE("Track %d got %s %s.", m_index, tagName, tagValue.get());
+ GST_INFO("Track %d got %s %s.", m_index, tagName, tagValue.get());
value = tagValue.get();
return true;
}
@@ -159,24 +143,33 @@ bool TrackPrivateBaseGStreamer::getTag(GstTagList* tags, const gchar* tagName, S
void TrackPrivateBaseGStreamer::notifyTrackOfTagsChanged()
{
- m_tagTimerHandler = 0;
if (!m_pad)
return;
TrackPrivateBaseClient* client = m_owner->client();
+ if (!client)
+ return;
+
GRefPtr<GstTagList> tags;
{
- MutexLocker lock(m_tagMutex);
+ LockHolder lock(m_tagMutex);
tags.swap(m_tags);
}
if (!tags)
return;
- if (getTag(tags.get(), GST_TAG_TITLE, m_label) && client)
- client->labelChanged(m_owner, m_label);
+ if (getTag(tags.get(), GST_TAG_TITLE, m_label))
+ client->labelChanged(m_label);
+
+ AtomicString language;
+ if (!getLanguageCode(tags.get(), language))
+ return;
+
+ if (language == m_language)
+ return;
- if (getTag(tags.get(), GST_TAG_LANGUAGE_CODE, m_language) && client)
- client->languageChanged(m_owner, m_language);
+ m_language = language;
+ client->languageChanged(m_language);
}
} // namespace WebCore
diff --git a/Source/WebCore/platform/graphics/gstreamer/TrackPrivateBaseGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/TrackPrivateBaseGStreamer.h
index 1e3b8c898..8e3488497 100644
--- a/Source/WebCore/platform/graphics/gstreamer/TrackPrivateBaseGStreamer.h
+++ b/Source/WebCore/platform/graphics/gstreamer/TrackPrivateBaseGStreamer.h
@@ -29,6 +29,8 @@
#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(VIDEO_TRACK)
#include "GRefPtrGStreamer.h"
+#include "MainThreadNotifier.h"
+#include <wtf/Lock.h>
#include <wtf/ThreadingPrimitives.h>
#include <wtf/text/WTFString.h>
@@ -48,28 +50,38 @@ public:
void setIndex(int index) { m_index = index; }
- void activeChanged();
- void tagsChanged();
+protected:
+ TrackPrivateBaseGStreamer(TrackPrivateBase* owner, gint index, GRefPtr<GstPad>);
void notifyTrackOfActiveChanged();
void notifyTrackOfTagsChanged();
-protected:
- TrackPrivateBaseGStreamer(TrackPrivateBase* owner, gint index, GRefPtr<GstPad>);
+ enum MainThreadNotification {
+ ActiveChanged = 1 << 0,
+ TagsChanged = 1 << 1,
+ NewSample = 1 << 2,
+ StreamChanged = 1 << 3
+ };
+ Ref<MainThreadNotifier<MainThreadNotification>> m_notifier;
gint m_index;
- String m_label;
- String m_language;
+ AtomicString m_label;
+ AtomicString m_language;
GRefPtr<GstPad> m_pad;
private:
- bool getTag(GstTagList* tags, const gchar* tagName, String& value);
+ bool getLanguageCode(GstTagList* tags, AtomicString& value);
- TrackPrivateBase* m_owner;
- guint m_activeTimerHandler;
- guint m_tagTimerHandler;
+ template<class StringType>
+ bool getTag(GstTagList* tags, const gchar* tagName, StringType& value);
+
+ static void activeChangedCallback(TrackPrivateBaseGStreamer*);
+ static void tagsChangedCallback(TrackPrivateBaseGStreamer*);
- Mutex m_tagMutex;
+ void tagsChanged();
+
+ TrackPrivateBase* m_owner;
+ Lock m_tagMutex;
GRefPtr<GstTagList> m_tags;
};
diff --git a/Source/WebCore/platform/graphics/gstreamer/VideoSinkGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/VideoSinkGStreamer.cpp
index 7dd894e28..9adaaa0bb 100644
--- a/Source/WebCore/platform/graphics/gstreamer/VideoSinkGStreamer.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/VideoSinkGStreamer.cpp
@@ -1,7 +1,8 @@
/*
* Copyright (C) 2007 OpenedHand
* Copyright (C) 2007 Alp Toker <alp@atoker.com>
- * Copyright (C) 2009, 2010, 2011, 2012 Igalia S.L
+ * Copyright (C) 2009, 2010, 2011, 2012, 2015, 2016 Igalia S.L
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
@@ -35,8 +36,8 @@
#include <glib.h>
#include <gst/gst.h>
#include <gst/video/gstvideometa.h>
-#include <wtf/OwnPtr.h>
-#include <wtf/gobject/GMutexLocker.h>
+#include <wtf/Condition.h>
+#include <wtf/RunLoop.h>
using namespace WebCore;
@@ -46,13 +47,8 @@ using namespace WebCore;
#else
#define GST_CAPS_FORMAT "{ xRGB, ARGB }"
#endif
-#if GST_CHECK_VERSION(1, 1, 0)
-#define GST_FEATURED_CAPS GST_VIDEO_CAPS_MAKE_WITH_FEATURES(GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, GST_CAPS_FORMAT) ";"
-#else
-#define GST_FEATURED_CAPS
-#endif
-#define WEBKIT_VIDEO_SINK_PAD_CAPS GST_FEATURED_CAPS GST_VIDEO_CAPS_MAKE(GST_CAPS_FORMAT)
+#define WEBKIT_VIDEO_SINK_PAD_CAPS GST_VIDEO_CAPS_MAKE_WITH_FEATURES(GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, GST_CAPS_FORMAT) ";" GST_VIDEO_CAPS_MAKE(GST_CAPS_FORMAT)
static GstStaticPadTemplate s_sinkTemplate = GST_STATIC_PAD_TEMPLATE("sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS(WEBKIT_VIDEO_SINK_PAD_CAPS));
@@ -62,35 +58,123 @@ GST_DEBUG_CATEGORY_STATIC(webkitVideoSinkDebug);
enum {
REPAINT_REQUESTED,
+ REPAINT_CANCELLED,
LAST_SIGNAL
};
-enum {
- PROP_0,
- PROP_CAPS
-};
-
static guint webkitVideoSinkSignals[LAST_SIGNAL] = { 0, };
-struct _WebKitVideoSinkPrivate {
- GstBuffer* buffer;
- guint timeoutId;
- GMutex* bufferMutex;
- GCond* dataCondition;
+static void webkitVideoSinkRepaintRequested(WebKitVideoSink*, GstSample*);
+static GRefPtr<GstSample> webkitVideoSinkRequestRender(WebKitVideoSink*, GstBuffer*);
- GstVideoInfo info;
+class VideoRenderRequestScheduler {
+public:
+ VideoRenderRequestScheduler()
+#if !USE(COORDINATED_GRAPHICS_THREADED)
+ : m_timer(RunLoop::main(), this, &VideoRenderRequestScheduler::render)
+#endif
+ {
+#if PLATFORM(GTK) && !USE(COORDINATED_GRAPHICS_THREADED)
+ // Use a higher priority than WebCore timers (G_PRIORITY_HIGH_IDLE + 20).
+ m_timer.setPriority(G_PRIORITY_HIGH_IDLE + 19);
+#endif
+ }
- GstCaps* currentCaps;
+ void start()
+ {
+ LockHolder locker(m_sampleMutex);
+ m_unlocked = false;
+ }
+
+ void stop()
+ {
+ LockHolder locker(m_sampleMutex);
+ m_sample = nullptr;
+ m_unlocked = true;
+#if !USE(COORDINATED_GRAPHICS_THREADED)
+ m_timer.stop();
+ m_dataCondition.notifyOne();
+#endif
+ }
+
+ void drain()
+ {
+ LockHolder locker(m_sampleMutex);
+ m_sample = nullptr;
+ }
+
+ bool requestRender(WebKitVideoSink* sink, GstBuffer* buffer)
+ {
+ LockHolder locker(m_sampleMutex);
+ if (m_unlocked)
+ return true;
+
+ m_sample = webkitVideoSinkRequestRender(sink, buffer);
+ if (!m_sample)
+ return false;
+
+#if USE(COORDINATED_GRAPHICS_THREADED)
+ auto sample = WTFMove(m_sample);
+ locker.unlockEarly();
+ if (LIKELY(GST_IS_SAMPLE(sample.get())))
+ webkitVideoSinkRepaintRequested(sink, sample.get());
+#else
+ m_sink = sink;
+ m_timer.startOneShot(0);
+ m_dataCondition.wait(m_sampleMutex);
+#endif
+ return true;
+ }
- // If this is TRUE all processing should finish ASAP
+private:
+
+#if !USE(COORDINATED_GRAPHICS_THREADED)
+ void render()
+ {
+ LockHolder locker(m_sampleMutex);
+ GRefPtr<GstSample> sample = WTFMove(m_sample);
+ GRefPtr<WebKitVideoSink> sink = WTFMove(m_sink);
+ if (sample && !m_unlocked && LIKELY(GST_IS_SAMPLE(sample.get())))
+ webkitVideoSinkRepaintRequested(sink.get(), sample.get());
+ m_dataCondition.notifyOne();
+ }
+#endif
+
+ Lock m_sampleMutex;
+ GRefPtr<GstSample> m_sample;
+
+#if !USE(COORDINATED_GRAPHICS_THREADED)
+ RunLoop::Timer<VideoRenderRequestScheduler> m_timer;
+ Condition m_dataCondition;
+ GRefPtr<WebKitVideoSink> m_sink;
+#endif
+
+ // If this is true all processing should finish ASAP
// This is necessary because there could be a race between
// unlock() and render(), where unlock() wins, signals the
- // GCond, then render() tries to render a frame although
+ // Condition, then render() tries to render a frame although
// everything else isn't running anymore. This will lead
// to deadlocks because render() holds the stream lock.
//
- // Protected by the buffer mutex
- bool unlocked;
+ // Protected by the sample mutex
+ bool m_unlocked { false };
+};
+
+struct _WebKitVideoSinkPrivate {
+ _WebKitVideoSinkPrivate()
+ {
+ gst_video_info_init(&info);
+ }
+
+ ~_WebKitVideoSinkPrivate()
+ {
+ if (currentCaps)
+ gst_caps_unref(currentCaps);
+ }
+
+ VideoRenderRequestScheduler scheduler;
+ GstVideoInfo info;
+ GstCaps* currentCaps;
};
#define webkit_video_sink_parent_class parent_class
@@ -100,59 +184,29 @@ G_DEFINE_TYPE_WITH_CODE(WebKitVideoSink, webkit_video_sink, GST_TYPE_VIDEO_SINK,
static void webkit_video_sink_init(WebKitVideoSink* sink)
{
sink->priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, WEBKIT_TYPE_VIDEO_SINK, WebKitVideoSinkPrivate);
-#if GLIB_CHECK_VERSION(2, 31, 0)
- sink->priv->dataCondition = new GCond;
- g_cond_init(sink->priv->dataCondition);
- sink->priv->bufferMutex = new GMutex;
- g_mutex_init(sink->priv->bufferMutex);
-#else
- sink->priv->dataCondition = g_cond_new();
- sink->priv->bufferMutex = g_mutex_new();
-#endif
-
- gst_video_info_init(&sink->priv->info);
+ g_object_set(GST_BASE_SINK(sink), "enable-last-sample", FALSE, nullptr);
+ new (sink->priv) WebKitVideoSinkPrivate();
}
-static gboolean webkitVideoSinkTimeoutCallback(gpointer data)
+static void webkitVideoSinkRepaintRequested(WebKitVideoSink* sink, GstSample* sample)
{
- WebKitVideoSink* sink = reinterpret_cast<WebKitVideoSink*>(data);
- WebKitVideoSinkPrivate* priv = sink->priv;
-
- WTF::GMutexLocker lock(priv->bufferMutex);
- GstBuffer* buffer = priv->buffer;
- priv->buffer = 0;
- priv->timeoutId = 0;
-
- if (!buffer || priv->unlocked || UNLIKELY(!GST_IS_BUFFER(buffer))) {
- g_cond_signal(priv->dataCondition);
- return FALSE;
- }
-
- g_signal_emit(sink, webkitVideoSinkSignals[REPAINT_REQUESTED], 0, buffer);
- gst_buffer_unref(buffer);
- g_cond_signal(priv->dataCondition);
+ g_signal_emit(sink, webkitVideoSinkSignals[REPAINT_REQUESTED], 0, sample);
+}
- return FALSE;
+static void webkitVideoSinkRepaintCancelled(WebKitVideoSink* sink)
+{
+ g_signal_emit(sink, webkitVideoSinkSignals[REPAINT_CANCELLED], 0);
}
-static GstFlowReturn webkitVideoSinkRender(GstBaseSink* baseSink, GstBuffer* buffer)
+static GRefPtr<GstSample> webkitVideoSinkRequestRender(WebKitVideoSink* sink, GstBuffer* buffer)
{
- WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
WebKitVideoSinkPrivate* priv = sink->priv;
-
- WTF::GMutexLocker lock(priv->bufferMutex);
-
- if (priv->unlocked)
- return GST_FLOW_OK;
-
- priv->buffer = gst_buffer_ref(buffer);
+ GRefPtr<GstSample> sample = adoptGRef(gst_sample_new(buffer, priv->currentCaps, nullptr, nullptr));
// The video info structure is valid only if the sink handled an allocation query.
GstVideoFormat format = GST_VIDEO_INFO_FORMAT(&priv->info);
- if (format == GST_VIDEO_FORMAT_UNKNOWN) {
- gst_buffer_unref(buffer);
- return GST_FLOW_ERROR;
- }
+ if (format == GST_VIDEO_FORMAT_UNKNOWN)
+ return nullptr;
#if !(USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS))
// Cairo's ARGB has pre-multiplied alpha while GStreamer's doesn't.
@@ -166,10 +220,8 @@ static GstFlowReturn webkitVideoSinkRender(GstBaseSink* baseSink, GstBuffer* buf
GstBuffer* newBuffer = WebCore::createGstBuffer(buffer);
// Check if allocation failed.
- if (UNLIKELY(!newBuffer)) {
- gst_buffer_unref(buffer);
- return GST_FLOW_ERROR;
- }
+ if (UNLIKELY(!newBuffer))
+ return nullptr;
// We don't use Color::premultipliedARGBFromColor() here because
// one function call per video pixel is just too expensive:
@@ -179,15 +231,13 @@ static GstFlowReturn webkitVideoSinkRender(GstBaseSink* baseSink, GstBuffer* buf
GstVideoFrame destinationFrame;
if (!gst_video_frame_map(&sourceFrame, &priv->info, buffer, GST_MAP_READ)) {
- gst_buffer_unref(buffer);
gst_buffer_unref(newBuffer);
- return GST_FLOW_ERROR;
+ return nullptr;
}
if (!gst_video_frame_map(&destinationFrame, &priv->info, newBuffer, GST_MAP_WRITE)) {
gst_video_frame_unmap(&sourceFrame);
- gst_buffer_unref(buffer);
gst_buffer_unref(newBuffer);
- return GST_FLOW_ERROR;
+ return nullptr;
}
const guint8* source = static_cast<guint8*>(GST_VIDEO_FRAME_PLANE_DATA(&sourceFrame, 0));
@@ -215,87 +265,32 @@ static GstFlowReturn webkitVideoSinkRender(GstBaseSink* baseSink, GstBuffer* buf
gst_video_frame_unmap(&sourceFrame);
gst_video_frame_unmap(&destinationFrame);
- gst_buffer_unref(buffer);
- buffer = priv->buffer = newBuffer;
- }
-#endif
-
- // This should likely use a lower priority, but glib currently starves
- // lower priority sources.
- // See: https://bugzilla.gnome.org/show_bug.cgi?id=610830.
- priv->timeoutId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, webkitVideoSinkTimeoutCallback,
- gst_object_ref(sink), reinterpret_cast<GDestroyNotify>(gst_object_unref));
- g_source_set_name_by_id(priv->timeoutId, "[WebKit] webkitVideoSinkTimeoutCallback");
-
- g_cond_wait(priv->dataCondition, priv->bufferMutex);
- return GST_FLOW_OK;
-}
-
-static void webkitVideoSinkDispose(GObject* object)
-{
- WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(object);
- WebKitVideoSinkPrivate* priv = sink->priv;
-
- if (priv->dataCondition) {
-#if GLIB_CHECK_VERSION(2, 31, 0)
- g_cond_clear(priv->dataCondition);
- delete priv->dataCondition;
-#else
- g_cond_free(priv->dataCondition);
-#endif
- priv->dataCondition = 0;
+ sample = adoptGRef(gst_sample_new(newBuffer, priv->currentCaps, nullptr, nullptr));
+ gst_buffer_unref(newBuffer);
}
-
- if (priv->bufferMutex) {
-#if GLIB_CHECK_VERSION(2, 31, 0)
- g_mutex_clear(priv->bufferMutex);
- delete priv->bufferMutex;
-#else
- g_mutex_free(priv->bufferMutex);
#endif
- priv->bufferMutex = 0;
- }
- G_OBJECT_CLASS(parent_class)->dispose(object);
+ return sample;
}
-static void webkitVideoSinkGetProperty(GObject* object, guint propertyId, GValue* value, GParamSpec* parameterSpec)
+static GstFlowReturn webkitVideoSinkRender(GstBaseSink* baseSink, GstBuffer* buffer)
{
- WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(object);
- WebKitVideoSinkPrivate* priv = sink->priv;
-
- switch (propertyId) {
- case PROP_CAPS: {
- GstCaps* caps = priv->currentCaps;
- if (caps)
- gst_caps_ref(caps);
- g_value_take_boxed(value, caps);
- break;
- }
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propertyId, parameterSpec);
- }
+ WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
+ return sink->priv->scheduler.requestRender(sink, buffer) ? GST_FLOW_OK : GST_FLOW_ERROR;
}
-static void unlockBufferMutex(WebKitVideoSinkPrivate* priv)
+static void webkitVideoSinkFinalize(GObject* object)
{
- WTF::GMutexLocker lock(priv->bufferMutex);
-
- if (priv->buffer) {
- gst_buffer_unref(priv->buffer);
- priv->buffer = 0;
- }
-
- priv->unlocked = true;
-
- g_cond_signal(priv->dataCondition);
+ WEBKIT_VIDEO_SINK(object)->priv->~WebKitVideoSinkPrivate();
+ G_OBJECT_CLASS(parent_class)->finalize(object);
}
static gboolean webkitVideoSinkUnlock(GstBaseSink* baseSink)
{
- WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
+ WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
- unlockBufferMutex(sink->priv);
+ priv->scheduler.stop();
+ webkitVideoSinkRepaintCancelled(WEBKIT_VIDEO_SINK(baseSink));
return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock, (baseSink), TRUE);
}
@@ -304,10 +299,7 @@ static gboolean webkitVideoSinkUnlockStop(GstBaseSink* baseSink)
{
WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
- {
- WTF::GMutexLocker lock(priv->bufferMutex);
- priv->unlocked = false;
- }
+ priv->scheduler.start();
return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop, (baseSink), TRUE);
}
@@ -316,11 +308,11 @@ static gboolean webkitVideoSinkStop(GstBaseSink* baseSink)
{
WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
- unlockBufferMutex(priv);
-
+ priv->scheduler.stop();
+ webkitVideoSinkRepaintCancelled(WEBKIT_VIDEO_SINK(baseSink));
if (priv->currentCaps) {
gst_caps_unref(priv->currentCaps);
- priv->currentCaps = 0;
+ priv->currentCaps = nullptr;
}
return TRUE;
@@ -330,8 +322,8 @@ static gboolean webkitVideoSinkStart(GstBaseSink* baseSink)
{
WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
- WTF::GMutexLocker lock(priv->bufferMutex);
- priv->unlocked = false;
+ priv->scheduler.start();
+
return TRUE;
}
@@ -357,7 +349,7 @@ static gboolean webkitVideoSinkSetCaps(GstBaseSink* baseSink, GstCaps* caps)
static gboolean webkitVideoSinkProposeAllocation(GstBaseSink* baseSink, GstQuery* query)
{
GstCaps* caps;
- gst_query_parse_allocation(query, &caps, 0);
+ gst_query_parse_allocation(query, &caps, nullptr);
if (!caps)
return FALSE;
@@ -365,14 +357,27 @@ static gboolean webkitVideoSinkProposeAllocation(GstBaseSink* baseSink, GstQuery
if (!gst_video_info_from_caps(&sink->priv->info, caps))
return FALSE;
- gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, 0);
- gst_query_add_allocation_meta(query, GST_VIDEO_CROP_META_API_TYPE, 0);
-#if GST_CHECK_VERSION(1, 1, 0)
- gst_query_add_allocation_meta(query, GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, 0);
-#endif
+ gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, nullptr);
+ gst_query_add_allocation_meta(query, GST_VIDEO_CROP_META_API_TYPE, nullptr);
+ gst_query_add_allocation_meta(query, GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, nullptr);
return TRUE;
}
+static gboolean webkitVideoSinkEvent(GstBaseSink* baseSink, GstEvent* event)
+{
+ switch (GST_EVENT_TYPE(event)) {
+ case GST_EVENT_FLUSH_START: {
+ WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
+ sink->priv->scheduler.drain();
+
+ GST_DEBUG_OBJECT(sink, "Flush-start, releasing m_sample");
+ }
+ FALLTHROUGH;
+ default:
+ return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, event, (baseSink, event), TRUE);
+ }
+}
+
static void webkit_video_sink_class_init(WebKitVideoSinkClass* klass)
{
GObjectClass* gobjectClass = G_OBJECT_CLASS(klass);
@@ -384,8 +389,7 @@ static void webkit_video_sink_class_init(WebKitVideoSinkClass* klass)
g_type_class_add_private(klass, sizeof(WebKitVideoSinkPrivate));
- gobjectClass->dispose = webkitVideoSinkDispose;
- gobjectClass->get_property = webkitVideoSinkGetProperty;
+ gobjectClass->finalize = webkitVideoSinkFinalize;
baseSinkClass->unlock = webkitVideoSinkUnlock;
baseSinkClass->unlock_stop = webkitVideoSinkUnlockStop;
@@ -395,9 +399,7 @@ static void webkit_video_sink_class_init(WebKitVideoSinkClass* klass)
baseSinkClass->start = webkitVideoSinkStart;
baseSinkClass->set_caps = webkitVideoSinkSetCaps;
baseSinkClass->propose_allocation = webkitVideoSinkProposeAllocation;
-
- g_object_class_install_property(gobjectClass, PROP_CAPS,
- g_param_spec_boxed("current-caps", "Current-Caps", "Current caps", GST_TYPE_CAPS, G_PARAM_READABLE));
+ baseSinkClass->event = webkitVideoSinkEvent;
webkitVideoSinkSignals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
G_TYPE_FROM_CLASS(klass),
@@ -408,13 +410,23 @@ static void webkit_video_sink_class_init(WebKitVideoSinkClass* klass)
g_cclosure_marshal_generic,
G_TYPE_NONE, // Return type
1, // Only one parameter
- GST_TYPE_BUFFER);
+ GST_TYPE_SAMPLE);
+ webkitVideoSinkSignals[REPAINT_CANCELLED] = g_signal_new("repaint-cancelled",
+ G_TYPE_FROM_CLASS(klass),
+ G_SIGNAL_RUN_LAST,
+ 0, // Class offset
+ nullptr, // Accumulator
+ nullptr, // Accumulator data
+ g_cclosure_marshal_generic,
+ G_TYPE_NONE, // Return type
+ 0, // No parameters
+ G_TYPE_NONE);
}
GstElement* webkitVideoSinkNew()
{
- return GST_ELEMENT(g_object_new(WEBKIT_TYPE_VIDEO_SINK, 0));
+ return GST_ELEMENT(g_object_new(WEBKIT_TYPE_VIDEO_SINK, nullptr));
}
#endif // ENABLE(VIDEO) && USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/VideoTextureCopierGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/VideoTextureCopierGStreamer.cpp
new file mode 100644
index 000000000..abb8b9bcd
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/VideoTextureCopierGStreamer.cpp
@@ -0,0 +1,190 @@
+/*
+ Copyright (C) 2016 Igalia S.L.
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public License
+ along with this library; see the file COPYING.LIB. If not, write to
+ the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ Boston, MA 02110-1301, USA.
+ */
+
+
+#include "config.h"
+#include "VideoTextureCopierGStreamer.h"
+
+#if USE(GSTREAMER_GL)
+
+#include "GLContext.h"
+#include "ImageOrientation.h"
+#include "TextureMapperShaderProgram.h"
+
+namespace WebCore {
+
+VideoTextureCopierGStreamer::VideoTextureCopierGStreamer()
+{
+ GLContext* previousContext = GLContext::current();
+ ASSERT(previousContext);
+ PlatformDisplay::sharedDisplayForCompositing().sharingGLContext()->makeContextCurrent();
+
+ m_context3D = GraphicsContext3D::createForCurrentGLContext();
+
+ m_shaderProgram = TextureMapperShaderProgram::create(*m_context3D, TextureMapperShaderProgram::Texture);
+
+ m_framebuffer = m_context3D->createFramebuffer();
+
+ static const GLfloat vertices[] = { 0, 0, 1, 0, 1, 1, 0, 1 };
+ m_vbo = m_context3D->createBuffer();
+ m_context3D->bindBuffer(GraphicsContext3D::ARRAY_BUFFER, m_vbo);
+ m_context3D->bufferData(GraphicsContext3D::ARRAY_BUFFER, sizeof(GC3Dfloat) * 8, vertices, GraphicsContext3D::STATIC_DRAW);
+
+ updateTextureSpaceMatrix();
+
+ previousContext->makeContextCurrent();
+}
+
+VideoTextureCopierGStreamer::~VideoTextureCopierGStreamer()
+{
+ GLContext* previousContext = GLContext::current();
+ ASSERT(previousContext);
+ PlatformDisplay::sharedDisplayForCompositing().sharingGLContext()->makeContextCurrent();
+
+ m_context3D->deleteFramebuffer(m_framebuffer);
+ m_context3D->deleteBuffer(m_vbo);
+ m_shaderProgram = nullptr;
+ m_context3D = nullptr;
+
+ previousContext->makeContextCurrent();
+}
+
+void VideoTextureCopierGStreamer::updateTextureSpaceMatrix()
+{
+ m_textureSpaceMatrix.makeIdentity();
+
+ switch (m_orientation) {
+ case OriginRightTop:
+ m_textureSpaceMatrix.rotate(-90);
+ m_textureSpaceMatrix.translate(-1, 0);
+ break;
+ case OriginBottomRight:
+ m_textureSpaceMatrix.rotate(180);
+ m_textureSpaceMatrix.translate(-1, -1);
+ break;
+ case OriginLeftBottom:
+ m_textureSpaceMatrix.rotate(-270);
+ m_textureSpaceMatrix.translate(0, -1);
+ break;
+ default:
+ ASSERT_NOT_REACHED();
+ }
+
+ if (!m_flipY) {
+ m_textureSpaceMatrix.flipY();
+ m_textureSpaceMatrix.translate(0, -1);
+ }
+}
+
+void VideoTextureCopierGStreamer::updateTransformationMatrix()
+{
+ FloatRect targetRect = FloatRect(FloatPoint(), m_size);
+ TransformationMatrix identityMatrix;
+ m_modelViewMatrix = TransformationMatrix(identityMatrix).multiply(TransformationMatrix::rectToRect(FloatRect(0, 0, 1, 1), targetRect));
+
+ // Taken from TextureMapperGL.
+ const float nearValue = 9999999;
+ const float farValue = -99999;
+
+ m_projectionMatrix = TransformationMatrix(2.0 / float(m_size.width()), 0, 0, 0,
+ 0, (-2.0) / float(m_size.height()), 0, 0,
+ 0, 0, -2.f / (farValue - nearValue), 0,
+ -1, 1, -(farValue + nearValue) / (farValue - nearValue), 1);
+}
+
+bool VideoTextureCopierGStreamer::copyVideoTextureToPlatformTexture(Platform3DObject inputTexture, IntSize& frameSize, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool flipY, ImageOrientation& sourceOrientation)
+{
+ if (!m_shaderProgram || !m_framebuffer || !m_vbo || frameSize.isEmpty())
+ return false;
+
+ if (m_size != frameSize) {
+ m_size = frameSize;
+ updateTransformationMatrix();
+ }
+
+ if (m_flipY != flipY || m_orientation != sourceOrientation) {
+ m_flipY = flipY;
+ m_orientation = sourceOrientation;
+ updateTextureSpaceMatrix();
+ }
+
+ // Save previous context and activate the sharing one.
+ GLContext* previousContext = GLContext::current();
+ ASSERT(previousContext);
+ PlatformDisplay::sharedDisplayForCompositing().sharingGLContext()->makeContextCurrent();
+
+ // Save previous bound framebuffer, texture and viewport.
+ GC3Dint boundFramebuffer = 0;
+ GC3Dint boundTexture = 0;
+ GC3Dint previousViewport[4] = { 0, 0, 0, 0};
+ m_context3D->getIntegerv(GraphicsContext3D::FRAMEBUFFER_BINDING, &boundFramebuffer);
+ m_context3D->getIntegerv(GraphicsContext3D::TEXTURE_BINDING_2D, &boundTexture);
+ m_context3D->getIntegerv(GraphicsContext3D::VIEWPORT, previousViewport);
+
+ // Set proper parameters to the output texture and allocate uninitialized memory for it.
+ m_context3D->bindTexture(outputTarget, outputTexture);
+ m_context3D->texParameteri(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_MIN_FILTER, GraphicsContext3D::LINEAR);
+ m_context3D->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_S, GraphicsContext3D::CLAMP_TO_EDGE);
+ m_context3D->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_T, GraphicsContext3D::CLAMP_TO_EDGE);
+ m_context3D->texImage2DDirect(outputTarget, level, internalFormat, m_size.width(), m_size.height(), 0, format, type, nullptr);
+
+ // Bind framebuffer to paint and attach the destination texture to it.
+ m_context3D->bindFramebuffer(GraphicsContext3D::FRAMEBUFFER, m_framebuffer);
+ m_context3D->framebufferTexture2D(GraphicsContext3D::FRAMEBUFFER, GraphicsContext3D::COLOR_ATTACHMENT0, GL_TEXTURE_2D, outputTexture, 0);
+
+ // Set proper wrap parameter to the source texture.
+ m_context3D->bindTexture(GL_TEXTURE_2D, inputTexture);
+ m_context3D->texParameteri(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_MIN_FILTER, GraphicsContext3D::LINEAR);
+ m_context3D->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_S, GraphicsContext3D::CLAMP_TO_EDGE);
+ m_context3D->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_T, GraphicsContext3D::CLAMP_TO_EDGE);
+
+ // Set the viewport.
+ m_context3D->viewport(0, 0, m_size.width(), m_size.height());
+
+ // Set program parameters.
+ m_context3D->useProgram(m_shaderProgram->programID());
+ m_context3D->uniform1i(m_shaderProgram->samplerLocation(), 0);
+ m_shaderProgram->setMatrix(m_shaderProgram->modelViewMatrixLocation(), m_modelViewMatrix);
+ m_shaderProgram->setMatrix(m_shaderProgram->projectionMatrixLocation(), m_projectionMatrix);
+ m_shaderProgram->setMatrix(m_shaderProgram->textureSpaceMatrixLocation(), m_textureSpaceMatrix);
+
+ // Perform the copy.
+ m_context3D->enableVertexAttribArray(m_shaderProgram->vertexLocation());
+ m_context3D->bindBuffer(GraphicsContext3D::ARRAY_BUFFER, m_vbo);
+ m_context3D->vertexAttribPointer(m_shaderProgram->vertexLocation(), 2, GraphicsContext3D::FLOAT, false, 0, 0);
+ m_context3D->drawArrays(GraphicsContext3D::TRIANGLE_FAN, 0, 4);
+ m_context3D->bindBuffer(GraphicsContext3D::ARRAY_BUFFER, 0);
+ m_context3D->disableVertexAttribArray(m_shaderProgram->vertexLocation());
+ m_context3D->useProgram(0);
+
+ // Restore previous bindings and viewport.
+ m_context3D->bindFramebuffer(GraphicsContext3D::FRAMEBUFFER, boundFramebuffer);
+ m_context3D->bindTexture(outputTarget, boundTexture);
+ m_context3D->viewport(previousViewport[0], previousViewport[1], previousViewport[2], previousViewport[3]);
+
+ bool ok = (m_context3D->getError() == GraphicsContext3D::NO_ERROR);
+
+ // Restore previous context.
+ previousContext->makeContextCurrent();
+ return ok;
+}
+
+} // namespace WebCore
+
+#endif // USE(GSTREAMER_GL)
diff --git a/Source/WebCore/platform/graphics/gstreamer/VideoTextureCopierGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/VideoTextureCopierGStreamer.h
new file mode 100644
index 000000000..945a7b3b4
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/VideoTextureCopierGStreamer.h
@@ -0,0 +1,59 @@
+/*
+ Copyright (C) 2016 Igalia S.L.
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public License
+ along with this library; see the file COPYING.LIB. If not, write to
+ the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ Boston, MA 02110-1301, USA.
+ */
+
+#ifndef VideoTextureCopierGStreamer_h
+#define VideoTextureCopierGStreamer_h
+
+#if USE(GSTREAMER_GL)
+
+#include "GraphicsContext3D.h"
+#include "TransformationMatrix.h"
+
+namespace WebCore {
+
+class TextureMapperShaderProgram;
+class ImageOrientation;
+
+class VideoTextureCopierGStreamer {
+public:
+ VideoTextureCopierGStreamer();
+ ~VideoTextureCopierGStreamer();
+
+ bool copyVideoTextureToPlatformTexture(Platform3DObject inputTexture, IntSize& frameSize, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool flipY, ImageOrientation& sourceOrientation);
+ void updateTextureSpaceMatrix();
+ void updateTransformationMatrix();
+
+private:
+ RefPtr<GraphicsContext3D> m_context3D;
+ RefPtr<TextureMapperShaderProgram> m_shaderProgram;
+ Platform3DObject m_framebuffer { 0 };
+ Platform3DObject m_vbo { 0 };
+ bool m_flipY { false };
+ ImageOrientation m_orientation;
+ IntSize m_size;
+ TransformationMatrix m_modelViewMatrix;
+ TransformationMatrix m_projectionMatrix;
+ TransformationMatrix m_textureSpaceMatrix;
+};
+
+} // namespace WebCore
+
+#endif // USE(GSTREAMER_GL)
+
+#endif // VideoTextureCopierGStreamer_h
diff --git a/Source/WebCore/platform/graphics/gstreamer/VideoTrackPrivateGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/VideoTrackPrivateGStreamer.cpp
index e3652c350..a6f94b82c 100644
--- a/Source/WebCore/platform/graphics/gstreamer/VideoTrackPrivateGStreamer.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/VideoTrackPrivateGStreamer.cpp
@@ -37,6 +37,8 @@ VideoTrackPrivateGStreamer::VideoTrackPrivateGStreamer(GRefPtr<GstElement> playb
: TrackPrivateBaseGStreamer(this, index, pad)
, m_playbin(playbin)
{
+ // FIXME: Get a real ID from the tkhd atom.
+ m_id = "V" + String::number(index);
notifyTrackOfActiveChanged();
}
@@ -53,7 +55,7 @@ void VideoTrackPrivateGStreamer::setSelected(bool selected)
VideoTrackPrivate::setSelected(selected);
if (selected && m_playbin)
- g_object_set(m_playbin.get(), "current-video", m_index, NULL);
+ g_object_set(m_playbin.get(), "current-video", m_index, nullptr);
}
} // namespace WebCore
diff --git a/Source/WebCore/platform/graphics/gstreamer/VideoTrackPrivateGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/VideoTrackPrivateGStreamer.h
index b216221e0..ba46a69b2 100644
--- a/Source/WebCore/platform/graphics/gstreamer/VideoTrackPrivateGStreamer.h
+++ b/Source/WebCore/platform/graphics/gstreamer/VideoTrackPrivateGStreamer.h
@@ -41,19 +41,21 @@ public:
return adoptRef(new VideoTrackPrivateGStreamer(playbin, index, pad));
}
- virtual void disconnect() override;
+ void disconnect() override;
- virtual void setSelected(bool) override;
- virtual void setActive(bool enabled) override { setSelected(enabled); }
+ void setSelected(bool) override;
+ void setActive(bool enabled) override { setSelected(enabled); }
- virtual int trackIndex() const override { return m_index; }
+ int trackIndex() const override { return m_index; }
- virtual AtomicString label() const override { return m_label; }
- virtual AtomicString language() const override { return m_language; }
+ AtomicString id() const override { return m_id; }
+ AtomicString label() const override { return m_label; }
+ AtomicString language() const override { return m_language; }
private:
VideoTrackPrivateGStreamer(GRefPtr<GstElement> playbin, gint index, GRefPtr<GstPad>);
+ AtomicString m_id;
GRefPtr<GstElement> m_playbin;
};
diff --git a/Source/WebCore/platform/graphics/gstreamer/WebKitMediaSourceGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/WebKitMediaSourceGStreamer.cpp
deleted file mode 100644
index bade219f8..000000000
--- a/Source/WebCore/platform/graphics/gstreamer/WebKitMediaSourceGStreamer.cpp
+++ /dev/null
@@ -1,846 +0,0 @@
-/*
- * Copyright (C) 2009, 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
- * Copyright (C) 2013 Collabora Ltd.
- * Copyright (C) 2013 Orange
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with this library; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- */
-
-#include "config.h"
-#include "WebKitMediaSourceGStreamer.h"
-
-#if ENABLE(VIDEO) && ENABLE(MEDIA_SOURCE) && USE(GSTREAMER)
-
-#include "GRefPtrGStreamer.h"
-#include "GStreamerUtilities.h"
-#include "NotImplemented.h"
-#include "TimeRanges.h"
-#include <gst/app/gstappsrc.h>
-#include <gst/gst.h>
-#include <gst/pbutils/missing-plugins.h>
-#include <wtf/gobject/GUniquePtr.h>
-#include <wtf/text/CString.h>
-
-typedef struct _Source {
- GstElement* appsrc;
- guint sourceid; /* To control the GSource */
- GstPad* srcpad;
- gboolean padAdded;
-
- guint64 offset;
- guint64 size;
- gboolean paused;
-
- guint startId;
- guint stopId;
- guint needDataId;
- guint enoughDataId;
- guint seekId;
-
- guint64 requestedOffset;
-} Source;
-
-
-#define WEBKIT_MEDIA_SRC_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj), WEBKIT_TYPE_MEDIA_SRC, WebKitMediaSrcPrivate))
-
-struct _WebKitMediaSrcPrivate {
- gchar* uri;
- Source sourceVideo;
- Source sourceAudio;
- WebCore::MediaPlayer* player;
- GstElement* playbin;
- gint64 duration;
- gboolean seekable;
- gboolean noMorePad;
- // TRUE if appsrc's version is >= 0.10.27, see
- // https://bugzilla.gnome.org/show_bug.cgi?id=609423
- gboolean haveAppSrc27;
- guint nbSource;
-};
-
-enum {
- PropLocation = 1,
- ProLast
-};
-
-static GstStaticPadTemplate srcTemplate = GST_STATIC_PAD_TEMPLATE("src_%u", GST_PAD_SRC, GST_PAD_SOMETIMES, GST_STATIC_CAPS_ANY);
-
-GST_DEBUG_CATEGORY_STATIC(webkit_media_src_debug);
-#define GST_CAT_DEFAULT webkit_media_src_debug
-
-static void webKitMediaSrcUriHandlerInit(gpointer gIface, gpointer ifaceData);
-static void webKitMediaSrcFinalize(GObject*);
-static void webKitMediaSrcSetProperty(GObject*, guint propertyId, const GValue*, GParamSpec*);
-static void webKitMediaSrcGetProperty(GObject*, guint propertyId, GValue*, GParamSpec*);
-static GstStateChangeReturn webKitMediaSrcChangeState(GstElement*, GstStateChange);
-static gboolean webKitMediaSrcQueryWithParent(GstPad*, GstObject*, GstQuery*);
-
-static void webKitMediaVideoSrcNeedDataCb(GstAppSrc*, guint, gpointer);
-static void webKitMediaVideoSrcEnoughDataCb(GstAppSrc*, gpointer);
-static gboolean webKitMediaVideoSrcSeekDataCb(GstAppSrc*, guint64, gpointer);
-static void webKitMediaAudioSrcNeedDataCb(GstAppSrc*, guint, gpointer);
-static void webKitMediaAudioSrcEnoughDataCb(GstAppSrc*, gpointer);
-static gboolean webKitMediaAudioSrcSeekDataCb(GstAppSrc*, guint64, gpointer);
-static GstAppSrcCallbacks appsrcCallbacksVideo = {
- webKitMediaVideoSrcNeedDataCb,
- webKitMediaVideoSrcEnoughDataCb,
- webKitMediaVideoSrcSeekDataCb,
- { 0 }
-};
-static GstAppSrcCallbacks appsrcCallbacksAudio = {
- webKitMediaAudioSrcNeedDataCb,
- webKitMediaAudioSrcEnoughDataCb,
- webKitMediaAudioSrcSeekDataCb,
- { 0 }
-};
-#define webkit_media_src_parent_class parent_class
-// We split this out into another macro to avoid a check-webkit-style error.
-#define WEBKIT_MEDIA_SRC_CATEGORY_INIT GST_DEBUG_CATEGORY_INIT(webkit_media_src_debug, "webkitmediasrc", 0, "websrc element");
-G_DEFINE_TYPE_WITH_CODE(WebKitMediaSrc, webkit_media_src, GST_TYPE_BIN,
- G_IMPLEMENT_INTERFACE(GST_TYPE_URI_HANDLER, webKitMediaSrcUriHandlerInit);
- WEBKIT_MEDIA_SRC_CATEGORY_INIT);
-
-static void webkit_media_src_class_init(WebKitMediaSrcClass* klass)
-{
- GObjectClass* oklass = G_OBJECT_CLASS(klass);
- GstElementClass* eklass = GST_ELEMENT_CLASS(klass);
-
- oklass->finalize = webKitMediaSrcFinalize;
- oklass->set_property = webKitMediaSrcSetProperty;
- oklass->get_property = webKitMediaSrcGetProperty;
-
- gst_element_class_add_pad_template(eklass, gst_static_pad_template_get(&srcTemplate));
-
- gst_element_class_set_metadata(eklass, "WebKit Media source element", "Source", "Handles Blob uris", "Stephane Jadaud <sjadaud@sii.fr>");
-
- /* Allows setting the uri using the 'location' property, which is used
- * for example by gst_element_make_from_uri() */
- g_object_class_install_property(oklass,
- PropLocation,
- g_param_spec_string("location", "location", "Location to read from", 0,
- (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
-
- eklass->change_state = webKitMediaSrcChangeState;
-
- g_type_class_add_private(klass, sizeof(WebKitMediaSrcPrivate));
-}
-
-static void webKitMediaSrcAddSrc(WebKitMediaSrc* src, GstElement* element)
-{
- GstPad* ghostPad;
- WebKitMediaSrcPrivate* priv = src->priv;
-
- if (!gst_bin_add(GST_BIN(src), element)) {
- GST_DEBUG_OBJECT(src, "Src element not added");
- return;
- }
- GRefPtr<GstPad> targetsrc = adoptGRef(gst_element_get_static_pad(element, "src"));
- if (!targetsrc) {
- GST_DEBUG_OBJECT(src, "Pad not found");
- return;
- }
-
- gst_element_sync_state_with_parent(element);
- GUniquePtr<gchar> name(g_strdup_printf("src_%u", priv->nbSource));
- ghostPad = WebCore::webkitGstGhostPadFromStaticTemplate(&srcTemplate, name.get(), targetsrc.get());
- gst_pad_set_active(ghostPad, TRUE);
-
- priv->nbSource++;
-
- if (priv->sourceVideo.appsrc == element)
- priv->sourceVideo.srcpad = ghostPad;
- else if (priv->sourceAudio.appsrc == element)
- priv->sourceAudio.srcpad = ghostPad;
-
- GST_OBJECT_FLAG_SET(ghostPad, GST_PAD_FLAG_NEED_PARENT);
- gst_pad_set_query_function(ghostPad, webKitMediaSrcQueryWithParent);
-}
-
-static void webkit_media_src_init(WebKitMediaSrc* src)
-{
- WebKitMediaSrcPrivate* priv = WEBKIT_MEDIA_SRC_GET_PRIVATE(src);
- src->priv = priv;
-
- priv->sourceVideo.appsrc = gst_element_factory_make("appsrc", "videoappsrc");
- gst_app_src_set_callbacks(GST_APP_SRC(priv->sourceVideo.appsrc), &appsrcCallbacksVideo, src, 0);
- webKitMediaSrcAddSrc(src, priv->sourceVideo.appsrc);
-
- priv->sourceAudio.appsrc = gst_element_factory_make("appsrc", "audioappsrc");
- gst_app_src_set_callbacks(GST_APP_SRC(priv->sourceAudio.appsrc), &appsrcCallbacksAudio, src, 0);
- webKitMediaSrcAddSrc(src, priv->sourceAudio.appsrc);
-}
-
-static void webKitMediaSrcFinalize(GObject* object)
-{
- WebKitMediaSrc* src = WEBKIT_MEDIA_SRC(object);
- WebKitMediaSrcPrivate* priv = src->priv;
-
- g_free(priv->uri);
-
- GST_CALL_PARENT(G_OBJECT_CLASS, finalize, (object));
-}
-
-static void webKitMediaSrcSetProperty(GObject* object, guint propId, const GValue* value, GParamSpec* pspec)
-{
- WebKitMediaSrc* src = WEBKIT_MEDIA_SRC(object);
- switch (propId) {
- case PropLocation:
- gst_uri_handler_set_uri(reinterpret_cast<GstURIHandler*>(src), g_value_get_string(value), 0);
- break;
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propId, pspec);
- break;
- }
-}
-
-static void webKitMediaSrcGetProperty(GObject* object, guint propId, GValue* value, GParamSpec* pspec)
-{
- WebKitMediaSrc* src = WEBKIT_MEDIA_SRC(object);
- WebKitMediaSrcPrivate* priv = src->priv;
-
- GST_OBJECT_LOCK(src);
- switch (propId) {
- case PropLocation:
- g_value_set_string(value, priv->uri);
- break;
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propId, pspec);
- break;
- }
- GST_OBJECT_UNLOCK(src);
-}
-
-// must be called on main thread and with object unlocked
-static gboolean webKitMediaVideoSrcStop(WebKitMediaSrc* src)
-{
- WebKitMediaSrcPrivate* priv = src->priv;
- gboolean seeking;
-
- GST_OBJECT_LOCK(src);
-
- seeking = priv->sourceVideo.seekId;
-
- if (priv->sourceVideo.startId) {
- g_source_remove(priv->sourceVideo.startId);
- priv->sourceVideo.startId = 0;
- }
-
- priv->player = 0;
- priv->playbin = 0;
-
- if (priv->sourceVideo.needDataId)
- g_source_remove(priv->sourceVideo.needDataId);
- priv->sourceVideo.needDataId = 0;
-
- if (priv->sourceVideo.enoughDataId)
- g_source_remove(priv->sourceVideo.enoughDataId);
- priv->sourceVideo.enoughDataId = 0;
-
- if (priv->sourceVideo.seekId)
- g_source_remove(priv->sourceVideo.seekId);
-
- priv->sourceVideo.seekId = 0;
-
- priv->sourceVideo.paused = FALSE;
- priv->sourceVideo.offset = 0;
- priv->seekable = FALSE;
-
- priv->duration = 0;
- priv->nbSource = 0;
-
- priv->sourceVideo.stopId = 0;
-
- GST_OBJECT_UNLOCK(src);
-
- if (priv->sourceVideo.appsrc) {
- gst_app_src_set_caps(GST_APP_SRC(priv->sourceVideo.appsrc), 0);
- if (!seeking)
- gst_app_src_set_size(GST_APP_SRC(priv->sourceVideo.appsrc), -1);
- }
-
- GST_DEBUG_OBJECT(src, "Stopped request");
-
- return FALSE;
-}
-
-static gboolean webKitMediaAudioSrcStop(WebKitMediaSrc* src)
-{
- WebKitMediaSrcPrivate* priv = src->priv;
- gboolean seeking;
-
- GST_OBJECT_LOCK(src);
-
- seeking = priv->sourceAudio.seekId;
-
- if (priv->sourceAudio.startId) {
- g_source_remove(priv->sourceAudio.startId);
- priv->sourceAudio.startId = 0;
- }
-
- priv->player = 0;
-
- priv->playbin = 0;
-
- if (priv->sourceAudio.needDataId)
- g_source_remove(priv->sourceAudio.needDataId);
- priv->sourceAudio.needDataId = 0;
-
- if (priv->sourceAudio.enoughDataId)
- g_source_remove(priv->sourceAudio.enoughDataId);
- priv->sourceAudio.enoughDataId = 0;
-
- if (priv->sourceAudio.seekId)
- g_source_remove(priv->sourceAudio.seekId);
-
- priv->sourceAudio.seekId = 0;
-
- priv->sourceAudio.paused = FALSE;
-
- priv->sourceAudio.offset = 0;
-
- priv->seekable = FALSE;
-
- priv->duration = 0;
- priv->nbSource = 0;
-
- priv->sourceAudio.stopId = 0;
-
- GST_OBJECT_UNLOCK(src);
-
- if (priv->sourceAudio.appsrc) {
- gst_app_src_set_caps(GST_APP_SRC(priv->sourceAudio.appsrc), 0);
- if (!seeking)
- gst_app_src_set_size(GST_APP_SRC(priv->sourceAudio.appsrc), -1);
- }
-
- GST_DEBUG_OBJECT(src, "Stopped request");
-
- return FALSE;
-}
-
-// must be called on main thread and with object unlocked
-static gboolean webKitMediaVideoSrcStart(WebKitMediaSrc* src)
-{
- WebKitMediaSrcPrivate* priv = src->priv;
-
- GST_OBJECT_LOCK(src);
- if (!priv->uri) {
- GST_ERROR_OBJECT(src, "No URI provided");
- GST_OBJECT_UNLOCK(src);
- webKitMediaVideoSrcStop(src);
- return FALSE;
- }
-
- priv->sourceVideo.startId = 0;
-
- GST_OBJECT_UNLOCK(src);
- GST_DEBUG_OBJECT(src, "Started request");
-
- return FALSE;
-}
-
-// must be called on main thread and with object unlocked
-static gboolean webKitMediaAudioSrcStart(WebKitMediaSrc* src)
-{
- WebKitMediaSrcPrivate* priv = src->priv;
-
- GST_OBJECT_LOCK(src);
- if (!priv->uri) {
- GST_ERROR_OBJECT(src, "No URI provided");
- GST_OBJECT_UNLOCK(src);
- webKitMediaAudioSrcStop(src);
- return FALSE;
- }
-
- priv->sourceAudio.startId = 0;
-
- GST_OBJECT_UNLOCK(src);
- GST_DEBUG_OBJECT(src, "Started request");
-
- return FALSE;
-}
-
-static GstStateChangeReturn webKitMediaSrcChangeState(GstElement* element, GstStateChange transition)
-{
- GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
- WebKitMediaSrc* src = WEBKIT_MEDIA_SRC(element);
- WebKitMediaSrcPrivate* priv = src->priv;
-
- switch (transition) {
- case GST_STATE_CHANGE_NULL_TO_READY:
- if (!priv->sourceVideo.appsrc && !priv->sourceAudio.appsrc) {
- gst_element_post_message(element,
- gst_missing_element_message_new(element, "appsrc"));
- GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, (0), ("no appsrc"));
- return GST_STATE_CHANGE_FAILURE;
- }
- break;
- default:
- break;
- }
-
- ret = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
- if (G_UNLIKELY(ret == GST_STATE_CHANGE_FAILURE)) {
- GST_DEBUG_OBJECT(src, "State change failed");
- return ret;
- }
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_PAUSED:
- GST_DEBUG_OBJECT(src, "READY->PAUSED");
- GST_OBJECT_LOCK(src);
- priv->sourceVideo.startId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, (GSourceFunc) webKitMediaVideoSrcStart, gst_object_ref(src), (GDestroyNotify) gst_object_unref);
- g_source_set_name_by_id(priv->sourceVideo.startId, "[WebKit] webKitMediaVideoSrcStart");
- priv->sourceAudio.startId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, (GSourceFunc) webKitMediaAudioSrcStart, gst_object_ref(src), (GDestroyNotify) gst_object_unref);
- g_source_set_name_by_id(priv->sourceAudio.startId, "[WebKit] webKitMediaAudioSrcStart");
- GST_OBJECT_UNLOCK(src);
- break;
- case GST_STATE_CHANGE_PAUSED_TO_READY:
- GST_DEBUG_OBJECT(src, "PAUSED->READY");
- GST_OBJECT_LOCK(src);
- priv->sourceVideo.stopId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, (GSourceFunc) webKitMediaVideoSrcStop, gst_object_ref(src), (GDestroyNotify) gst_object_unref);
- g_source_set_name_by_id(priv->sourceVideo.stopId, "[WebKit] webKitMediaVideoSrcStop");
- priv->sourceAudio.stopId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, (GSourceFunc) webKitMediaAudioSrcStop, gst_object_ref(src), (GDestroyNotify) gst_object_unref);
- g_source_set_name_by_id(priv->sourceAudio.stopId, "[WebKit] webKitMediaAudioSrcStop");
- GST_OBJECT_UNLOCK(src);
- break;
- default:
- break;
- }
-
- return ret;
-}
-
-static gboolean webKitMediaSrcQueryWithParent(GstPad* pad, GstObject* parent, GstQuery* query)
-{
- WebKitMediaSrc* src = WEBKIT_MEDIA_SRC(GST_ELEMENT(parent));
- gboolean result = FALSE;
-
- switch (GST_QUERY_TYPE(query)) {
- case GST_QUERY_DURATION: {
- GstFormat format;
- gst_query_parse_duration(query, &format, NULL);
-
- GST_DEBUG_OBJECT(src, "duration query in format %s", gst_format_get_name(format));
- GST_OBJECT_LOCK(src);
- if ((format == GST_FORMAT_TIME) && (src->priv->duration > 0)) {
- gst_query_set_duration(query, format, src->priv->duration);
- result = TRUE;
- }
- GST_OBJECT_UNLOCK(src);
- break;
- }
- case GST_QUERY_URI: {
- GST_OBJECT_LOCK(src);
- gst_query_set_uri(query, src->priv->uri);
- GST_OBJECT_UNLOCK(src);
- result = TRUE;
- break;
- }
- default: {
- GRefPtr<GstPad> target = adoptGRef(gst_ghost_pad_get_target(GST_GHOST_PAD_CAST(pad)));
- // Forward the query to the proxy target pad.
- if (target)
- result = gst_pad_query(target.get(), query);
- break;
- }
- }
-
- return result;
-}
-
-// uri handler interface
-static GstURIType webKitMediaSrcUriGetType(GType)
-{
- return GST_URI_SRC;
-}
-
-const gchar* const* webKitMediaSrcGetProtocols(GType)
-{
- static const char* protocols[] = {"mediasourceblob", 0 };
- return protocols;
-}
-
-static gchar* webKitMediaSrcGetUri(GstURIHandler* handler)
-{
- WebKitMediaSrc* src = WEBKIT_MEDIA_SRC(handler);
- gchar* ret;
-
- GST_OBJECT_LOCK(src);
- ret = g_strdup(src->priv->uri);
- GST_OBJECT_UNLOCK(src);
- return ret;
-}
-
-static gboolean webKitMediaSrcSetUri(GstURIHandler* handler, const gchar* uri, GError** error)
-{
- WebKitMediaSrc* src = WEBKIT_MEDIA_SRC(handler);
- WebKitMediaSrcPrivate* priv = src->priv;
- if (GST_STATE(src) >= GST_STATE_PAUSED) {
- GST_ERROR_OBJECT(src, "URI can only be set in states < PAUSED");
- return FALSE;
- }
-
- GST_OBJECT_LOCK(src);
- g_free(priv->uri);
- priv->uri = 0;
- if (!uri) {
- GST_OBJECT_UNLOCK(src);
- return TRUE;
- }
-
- WebCore::URL url(WebCore::URL(), uri);
-
- priv->uri = g_strdup(url.string().utf8().data());
- GST_OBJECT_UNLOCK(src);
- return TRUE;
-}
-
-static void webKitMediaSrcUriHandlerInit(gpointer gIface, gpointer)
-{
- GstURIHandlerInterface* iface = (GstURIHandlerInterface *) gIface;
-
- iface->get_type = webKitMediaSrcUriGetType;
- iface->get_protocols = webKitMediaSrcGetProtocols;
- iface->get_uri = webKitMediaSrcGetUri;
- iface->set_uri = webKitMediaSrcSetUri;
-}
-
-// appsrc callbacks
-static gboolean webKitMediaVideoSrcNeedDataMainCb(WebKitMediaSrc* src)
-{
- WebKitMediaSrcPrivate* priv = src->priv;
-
- GST_OBJECT_LOCK(src);
- // already stopped
- if (!priv->sourceVideo.needDataId) {
- GST_OBJECT_UNLOCK(src);
- return FALSE;
- }
-
- priv->sourceVideo.paused = FALSE;
- priv->sourceVideo.needDataId = 0;
- GST_OBJECT_UNLOCK(src);
-
- return FALSE;
-}
-
-static gboolean webKitMediaAudioSrcNeedDataMainCb(WebKitMediaSrc* src)
-{
- WebKitMediaSrcPrivate* priv = src->priv;
-
- GST_OBJECT_LOCK(src);
- // already stopped
- if (!priv->sourceAudio.needDataId) {
- GST_OBJECT_UNLOCK(src);
- return FALSE;
- }
-
- priv->sourceAudio.paused = FALSE;
- priv->sourceAudio.needDataId = 0;
- GST_OBJECT_UNLOCK(src);
-
- return FALSE;
-}
-
-static void webKitMediaVideoSrcNeedDataCb(GstAppSrc*, guint length, gpointer userData)
-{
- WebKitMediaSrc* src = WEBKIT_MEDIA_SRC(userData);
- WebKitMediaSrcPrivate* priv = src->priv;
-
- GST_DEBUG_OBJECT(src, "Need more data: %u", length);
-
- GST_OBJECT_LOCK(src);
- if (priv->sourceVideo.needDataId || !priv->sourceVideo.paused) {
- GST_OBJECT_UNLOCK(src);
- return;
- }
-
- priv->sourceVideo.needDataId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, (GSourceFunc) webKitMediaVideoSrcNeedDataMainCb, gst_object_ref(src), (GDestroyNotify) gst_object_unref);
- g_source_set_name_by_id(priv->sourceVideo.needDataId, "[WebKit] webKitMediaVideoSrcNeedDataMainCb");
- GST_OBJECT_UNLOCK(src);
-}
-
-static void webKitMediaAudioSrcNeedDataCb(GstAppSrc*, guint length, gpointer userData)
-{
- WebKitMediaSrc* src = WEBKIT_MEDIA_SRC(userData);
- WebKitMediaSrcPrivate* priv = src->priv;
-
- GST_DEBUG_OBJECT(src, "Need more data: %u", length);
-
- GST_OBJECT_LOCK(src);
- if (priv->sourceAudio.needDataId || !priv->sourceAudio.paused) {
- GST_OBJECT_UNLOCK(src);
- return;
- }
-
- priv->sourceAudio.needDataId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, (GSourceFunc) webKitMediaAudioSrcNeedDataMainCb, gst_object_ref(src), (GDestroyNotify) gst_object_unref);
- g_source_set_name_by_id(priv->sourceAudio.needDataId, "[WebKit] webKitMediaAudioSrcNeedDataMainCb");
- GST_OBJECT_UNLOCK(src);
-}
-
-static gboolean webKitMediaVideoSrcEnoughDataMainCb(WebKitMediaSrc* src)
-{
- WebKitMediaSrcPrivate* priv = src->priv;
-
- GST_OBJECT_LOCK(src);
- // already stopped
- if (!priv->sourceVideo.enoughDataId) {
- GST_OBJECT_UNLOCK(src);
- return FALSE;
- }
-
- priv->sourceVideo.paused = TRUE;
- priv->sourceVideo.enoughDataId = 0;
- GST_OBJECT_UNLOCK(src);
-
- return FALSE;
-}
-
-static gboolean webKitMediaAudioSrcEnoughDataMainCb(WebKitMediaSrc* src)
-{
- WebKitMediaSrcPrivate* priv = src->priv;
-
- GST_OBJECT_LOCK(src);
- // already stopped
- if (!priv->sourceAudio.enoughDataId) {
- GST_OBJECT_UNLOCK(src);
- return FALSE;
- }
-
- priv->sourceAudio.paused = TRUE;
- priv->sourceAudio.enoughDataId = 0;
- GST_OBJECT_UNLOCK(src);
-
- return FALSE;
-}
-
-static void webKitMediaVideoSrcEnoughDataCb(GstAppSrc*, gpointer userData)
-{
- WebKitMediaSrc* src = WEBKIT_MEDIA_SRC(userData);
- WebKitMediaSrcPrivate* priv = src->priv;
-
- GST_DEBUG_OBJECT(src, "Have enough data");
-
- GST_OBJECT_LOCK(src);
- if (priv->sourceVideo.enoughDataId || priv->sourceVideo.paused) {
- GST_OBJECT_UNLOCK(src);
- return;
- }
-
- priv->sourceVideo.enoughDataId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, (GSourceFunc) webKitMediaVideoSrcEnoughDataMainCb, gst_object_ref(src), (GDestroyNotify) gst_object_unref);
- g_source_set_name_by_id(priv->sourceVideo.enoughDataId, "[WebKit] webKitMediaVideoSrcEnoughDataMainCb");
- GST_OBJECT_UNLOCK(src);
-}
-
-static void webKitMediaAudioSrcEnoughDataCb(GstAppSrc*, gpointer userData)
-{
- WebKitMediaSrc* src = WEBKIT_MEDIA_SRC(userData);
- WebKitMediaSrcPrivate* priv = src->priv;
-
- GST_DEBUG_OBJECT(src, "Have enough data");
-
- GST_OBJECT_LOCK(src);
- if (priv->sourceAudio.enoughDataId || priv->sourceAudio.paused) {
- GST_OBJECT_UNLOCK(src);
- return;
- }
-
- priv->sourceAudio.enoughDataId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, (GSourceFunc) webKitMediaAudioSrcEnoughDataMainCb, gst_object_ref(src), (GDestroyNotify) gst_object_unref);
- g_source_set_name_by_id(priv->sourceAudio.enoughDataId, "[WebKit] webKitMediaAudioSrcEnoughDataMainCb");
- GST_OBJECT_UNLOCK(src);
-}
-
-static gboolean webKitMediaVideoSrcSeekMainCb(WebKitMediaSrc* src)
-{
- notImplemented();
- src->priv->sourceVideo.seekId = 0;
- return FALSE;
-}
-
-static gboolean webKitMediaAudioSrcSeekMainCb(WebKitMediaSrc* src)
-{
- notImplemented();
- src->priv->sourceAudio.seekId = 0;
- return FALSE;
-}
-
-static gboolean webKitMediaVideoSrcSeekDataCb(GstAppSrc*, guint64 offset, gpointer userData)
-{
- WebKitMediaSrc* src = WEBKIT_MEDIA_SRC(userData);
- WebKitMediaSrcPrivate* priv = src->priv;
-
- GST_DEBUG_OBJECT(src, "Seeking to offset: %" G_GUINT64_FORMAT, offset);
- GST_OBJECT_LOCK(src);
- if (offset == priv->sourceVideo.offset && priv->sourceVideo.requestedOffset == priv->sourceVideo.offset) {
- GST_OBJECT_UNLOCK(src);
- return TRUE;
- }
-
- if (!priv->seekable) {
- GST_OBJECT_UNLOCK(src);
- return FALSE;
- }
- if (offset > priv->sourceVideo.size) {
- GST_OBJECT_UNLOCK(src);
- return FALSE;
- }
-
- GST_DEBUG_OBJECT(src, "Doing range-request seek");
- priv->sourceVideo.requestedOffset = offset;
-
- if (priv->sourceVideo.seekId)
- g_source_remove(priv->sourceVideo.seekId);
- priv->sourceVideo.seekId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, (GSourceFunc) webKitMediaVideoSrcSeekMainCb, gst_object_ref(src), (GDestroyNotify) gst_object_unref);
- g_source_set_name_by_id(priv->sourceVideo.seekId, "[WebKit] webKitMediaVideoSrcSeekMainCb");
- GST_OBJECT_UNLOCK(src);
-
- return TRUE;
-}
-
-static gboolean webKitMediaAudioSrcSeekDataCb(GstAppSrc*, guint64 offset, gpointer userData)
-{
- WebKitMediaSrc* src = WEBKIT_MEDIA_SRC(userData);
- WebKitMediaSrcPrivate* priv = src->priv;
-
- GST_DEBUG_OBJECT(src, "Seeking to offset: %" G_GUINT64_FORMAT, offset);
- GST_OBJECT_LOCK(src);
- if (offset == priv->sourceAudio.offset && priv->sourceAudio.requestedOffset == priv->sourceAudio.offset) {
- GST_OBJECT_UNLOCK(src);
- return TRUE;
- }
-
- if (!priv->seekable) {
- GST_OBJECT_UNLOCK(src);
- return FALSE;
- }
- if (offset > priv->sourceAudio.size) {
- GST_OBJECT_UNLOCK(src);
- return FALSE;
- }
-
- GST_DEBUG_OBJECT(src, "Doing range-request seek");
- priv->sourceAudio.requestedOffset = offset;
-
- if (priv->sourceAudio.seekId)
- g_source_remove(priv->sourceAudio.seekId);
- priv->sourceAudio.seekId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, (GSourceFunc) webKitMediaAudioSrcSeekMainCb, gst_object_ref(src), (GDestroyNotify) gst_object_unref);
- g_source_set_name_by_id(priv->sourceAudio.seekId, "[WebKit] webKitMediaAudioSrcSeekMainCb");
- GST_OBJECT_UNLOCK(src);
-
- return TRUE;
-}
-
-void webKitMediaSrcSetMediaPlayer(WebKitMediaSrc* src, WebCore::MediaPlayer* player)
-{
- WebKitMediaSrcPrivate* priv = src->priv;
- priv->player = player;
-}
-
-void webKitMediaSrcSetPlayBin(WebKitMediaSrc* src, GstElement* playBin)
-{
- WebKitMediaSrcPrivate* priv = src->priv;
- priv->playbin = playBin;
-}
-
-MediaSourceClientGstreamer::MediaSourceClientGstreamer(WebKitMediaSrc* src)
- : m_src(static_cast<WebKitMediaSrc*>(gst_object_ref(src)))
-{
-}
-
-MediaSourceClientGstreamer::~MediaSourceClientGstreamer()
-{
- gst_object_unref(m_src);
-}
-
-void MediaSourceClientGstreamer::didReceiveDuration(double duration)
-{
- WebKitMediaSrcPrivate* priv = m_src->priv;
- GST_DEBUG_OBJECT(m_src, "Received duration: %lf", duration);
-
- GST_OBJECT_LOCK(m_src);
- priv->duration = duration >= 0.0 ? static_cast<gint64>(duration*GST_SECOND) : 0;
- GST_OBJECT_UNLOCK(m_src);
-}
-
-void MediaSourceClientGstreamer::didReceiveData(const char* data, int length, String type)
-{
- WebKitMediaSrcPrivate* priv = m_src->priv;
- GstFlowReturn ret = GST_FLOW_OK;
- GstBuffer * buffer;
-
- if (type.startsWith("video")) {
- if (priv->noMorePad == FALSE && priv->sourceVideo.padAdded == TRUE) {
- gst_element_no_more_pads(GST_ELEMENT(m_src));
- priv->noMorePad = TRUE;
- }
- if (priv->noMorePad == FALSE && priv->sourceVideo.padAdded == FALSE) {
- gst_element_add_pad(GST_ELEMENT(m_src), priv->sourceVideo.srcpad);
- priv->sourceVideo.padAdded = TRUE;
- }
- GST_OBJECT_LOCK(m_src);
- buffer = WebCore::createGstBufferForData(data, length);
- GST_OBJECT_UNLOCK(m_src);
-
- ret = gst_app_src_push_buffer(GST_APP_SRC(priv->sourceVideo.appsrc), buffer);
- } else if (type.startsWith("audio")) {
- if (priv->noMorePad == FALSE && priv->sourceAudio.padAdded == TRUE) {
- gst_element_no_more_pads(GST_ELEMENT(m_src));
- priv->noMorePad = TRUE;
- }
- if (priv->noMorePad == FALSE && priv->sourceAudio.padAdded == FALSE) {
- gst_element_add_pad(GST_ELEMENT(m_src), priv->sourceAudio.srcpad);
- priv->sourceAudio.padAdded = TRUE;
- }
- GST_OBJECT_LOCK(m_src);
- buffer = WebCore::createGstBufferForData(data, length);
- GST_OBJECT_UNLOCK(m_src);
-
- ret = gst_app_src_push_buffer(GST_APP_SRC(priv->sourceAudio.appsrc), buffer);
- }
-
- if (ret != GST_FLOW_OK && ret != GST_FLOW_EOS)
- GST_ELEMENT_ERROR(m_src, CORE, FAILED, (0), (0));
-}
-
-void MediaSourceClientGstreamer::didFinishLoading(double)
-{
- WebKitMediaSrcPrivate* priv = m_src->priv;
-
- GST_DEBUG_OBJECT(m_src, "Have EOS");
-
- GST_OBJECT_LOCK(m_src);
- if (!priv->sourceVideo.seekId) {
- GST_OBJECT_UNLOCK(m_src);
- gst_app_src_end_of_stream(GST_APP_SRC(priv->sourceVideo.appsrc));
- } else
- GST_OBJECT_UNLOCK(m_src);
-
- GST_OBJECT_LOCK(m_src);
- if (!priv->sourceAudio.seekId) {
- GST_OBJECT_UNLOCK(m_src);
- gst_app_src_end_of_stream(GST_APP_SRC(priv->sourceAudio.appsrc));
- } else
- GST_OBJECT_UNLOCK(m_src);
-}
-
-void MediaSourceClientGstreamer::didFail()
-{
- gst_app_src_end_of_stream(GST_APP_SRC(m_src->priv->sourceVideo.appsrc));
- gst_app_src_end_of_stream(GST_APP_SRC(m_src->priv->sourceAudio.appsrc));
-}
-
-#endif // USE(GSTREAMER)
-
diff --git a/Source/WebCore/platform/graphics/gstreamer/WebKitWebSourceGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/WebKitWebSourceGStreamer.cpp
index c7d8eca76..1b31b380b 100644
--- a/Source/WebCore/platform/graphics/gstreamer/WebKitWebSourceGStreamer.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/WebKitWebSourceGStreamer.cpp
@@ -22,144 +22,160 @@
#if ENABLE(VIDEO) && USE(GSTREAMER)
-#include "CachedRawResource.h"
-#include "CachedRawResourceClient.h"
-#include "CachedResourceHandle.h"
-#include "CachedResourceLoader.h"
-#include "CachedResourceRequest.h"
-#include "CrossOriginAccessControl.h"
#include "GRefPtrGStreamer.h"
#include "GStreamerUtilities.h"
+#include "GUniquePtrGStreamer.h"
+#include "HTTPHeaderNames.h"
+#include "MainThreadNotifier.h"
#include "MediaPlayer.h"
#include "NotImplemented.h"
+#include "PlatformMediaResourceLoader.h"
+#include "ResourceError.h"
#include "ResourceHandle.h"
#include "ResourceHandleClient.h"
#include "ResourceRequest.h"
#include "ResourceResponse.h"
-#include "SecurityOrigin.h"
#include "SharedBuffer.h"
#include <gst/app/gstappsrc.h>
#include <gst/gst.h>
#include <gst/pbutils/missing-plugins.h>
+#include <wtf/MainThread.h>
#include <wtf/Noncopyable.h>
-#include <wtf/gobject/GMutexLocker.h>
-#include <wtf/gobject/GRefPtr.h>
-#include <wtf/gobject/GUniquePtr.h>
+#include <wtf/glib/GMutexLocker.h>
+#include <wtf/glib/GRefPtr.h>
+#include <wtf/glib/GUniquePtr.h>
#include <wtf/text/CString.h>
+#if USE(SOUP)
+#include "SoupNetworkSession.h"
+#endif
+
using namespace WebCore;
-enum CORSAccessCheckResult {
- CORSNoCheck,
- CORSSuccess,
- CORSFailure
+class StreamingClient {
+public:
+ StreamingClient(WebKitWebSrc*, ResourceRequest&&);
+ virtual ~StreamingClient();
+
+protected:
+ char* createReadBuffer(size_t requestedSize, size_t& actualSize);
+ void handleResponseReceived(const ResourceResponse&);
+ void handleDataReceived(const char*, int);
+ void handleNotifyFinished();
+
+ GRefPtr<GstElement> m_src;
+ ResourceRequest m_request;
};
-class StreamingClient {
- public:
- StreamingClient(WebKitWebSrc*);
- virtual ~StreamingClient();
+class CachedResourceStreamingClient final : public PlatformMediaResourceClient, public StreamingClient {
+ WTF_MAKE_NONCOPYABLE(CachedResourceStreamingClient);
+public:
+ CachedResourceStreamingClient(WebKitWebSrc*, ResourceRequest&&);
+ virtual ~CachedResourceStreamingClient();
+
+private:
+ // PlatformMediaResourceClient virtual methods.
+#if USE(SOUP)
+ char* getOrCreateReadBuffer(PlatformMediaResource&, size_t requestedSize, size_t& actualSize) override;
+#endif
+ void responseReceived(PlatformMediaResource&, const ResourceResponse&) override;
+ void dataReceived(PlatformMediaResource&, const char*, int) override;
+ void accessControlCheckFailed(PlatformMediaResource&, const ResourceError&) override;
+ void loadFailed(PlatformMediaResource&, const ResourceError&) override;
+ void loadFinished(PlatformMediaResource&) override;
+};
- virtual bool loadFailed() const = 0;
- virtual void setDefersLoading(bool) = 0;
+class ResourceHandleStreamingClient : public ThreadSafeRefCounted<ResourceHandleStreamingClient>, public ResourceHandleClient, public StreamingClient {
+public:
+ static Ref<ResourceHandleStreamingClient> create(WebKitWebSrc* src, ResourceRequest&& request)
+ {
+ return adoptRef(*new ResourceHandleStreamingClient(src, WTFMove(request)));
+ }
+ virtual ~ResourceHandleStreamingClient();
- protected:
- char* createReadBuffer(size_t requestedSize, size_t& actualSize);
- void handleResponseReceived(const ResourceResponse&, CORSAccessCheckResult);
- void handleDataReceived(const char*, int);
- void handleNotifyFinished();
+ void invalidate();
- GstElement* m_src;
-};
+ // StreamingClient virtual methods.
+ bool loadFailed() const;
+ void setDefersLoading(bool);
-class CachedResourceStreamingClient : public CachedRawResourceClient, public StreamingClient {
- WTF_MAKE_NONCOPYABLE(CachedResourceStreamingClient); WTF_MAKE_FAST_ALLOCATED;
- public:
- CachedResourceStreamingClient(WebKitWebSrc*, CachedResourceLoader*, const ResourceRequest&, MediaPlayerClient::CORSMode);
- virtual ~CachedResourceStreamingClient();
-
- // StreamingClient virtual methods.
- virtual bool loadFailed() const;
- virtual void setDefersLoading(bool);
-
- private:
- // CachedResourceClient virtual methods.
- virtual char* getOrCreateReadBuffer(CachedResource*, size_t requestedSize, size_t& actualSize);
- virtual void responseReceived(CachedResource*, const ResourceResponse&);
- virtual void dataReceived(CachedResource*, const char*, int);
- virtual void notifyFinished(CachedResource*);
-
- CachedResourceHandle<CachedRawResource> m_resource;
- RefPtr<SecurityOrigin> m_origin;
+private:
+ ResourceHandleStreamingClient(WebKitWebSrc*, ResourceRequest&&);
+ void cleanupAndStopRunLoop();
+
+ // ResourceHandleClient virtual methods.
+#if USE(SOUP)
+ char* getOrCreateReadBuffer(size_t requestedSize, size_t& actualSize) override;
+#endif
+ ResourceRequest willSendRequest(ResourceHandle*, ResourceRequest&&, ResourceResponse&&) override;
+ void didReceiveResponse(ResourceHandle*, ResourceResponse&&) override;
+ void didReceiveData(ResourceHandle*, const char*, unsigned, int) override;
+ void didReceiveBuffer(ResourceHandle*, Ref<SharedBuffer>&&, int encodedLength) override;
+ void didFinishLoading(ResourceHandle*, double) override;
+ void didFail(ResourceHandle*, const ResourceError&) override;
+ void wasBlocked(ResourceHandle*) override;
+ void cannotShowURL(ResourceHandle*) override;
+
+ ThreadIdentifier m_thread { 0 };
+ Lock m_initializeRunLoopConditionMutex;
+ Condition m_initializeRunLoopCondition;
+ RunLoop* m_runLoop { nullptr };
+ Lock m_terminateRunLoopConditionMutex;
+ Condition m_terminateRunLoopCondition;
+ RefPtr<ResourceHandle> m_resource;
+#if USE(SOUP)
+ std::unique_ptr<SoupNetworkSession> m_session;
+#endif
};
-class ResourceHandleStreamingClient : public ResourceHandleClient, public StreamingClient {
- WTF_MAKE_NONCOPYABLE(ResourceHandleStreamingClient); WTF_MAKE_FAST_ALLOCATED;
- public:
- ResourceHandleStreamingClient(WebKitWebSrc*, const ResourceRequest&);
- virtual ~ResourceHandleStreamingClient();
-
- // StreamingClient virtual methods.
- virtual bool loadFailed() const;
- virtual void setDefersLoading(bool);
-
- private:
- // ResourceHandleClient virtual methods.
- virtual char* getOrCreateReadBuffer(size_t requestedSize, size_t& actualSize);
- virtual void willSendRequest(ResourceHandle*, ResourceRequest&, const ResourceResponse&);
- virtual void didReceiveResponse(ResourceHandle*, const ResourceResponse&);
- virtual void didReceiveData(ResourceHandle*, const char*, unsigned, int);
- virtual void didReceiveBuffer(ResourceHandle*, PassRefPtr<SharedBuffer>, int encodedLength);
- virtual void didFinishLoading(ResourceHandle*, double /*finishTime*/);
- virtual void didFail(ResourceHandle*, const ResourceError&);
- virtual void wasBlocked(ResourceHandle*);
- virtual void cannotShowURL(ResourceHandle*);
-
- RefPtr<ResourceHandle> m_resource;
+enum MainThreadSourceNotification {
+ Start = 1 << 0,
+ Stop = 1 << 1,
+ NeedData = 1 << 2,
+ EnoughData = 1 << 3,
+ Seek = 1 << 4
};
#define WEBKIT_WEB_SRC_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj), WEBKIT_TYPE_WEB_SRC, WebKitWebSrcPrivate))
struct _WebKitWebSrcPrivate {
GstAppSrc* appsrc;
GstPad* srcpad;
- gchar* uri;
+ CString originalURI;
+ CString redirectedURI;
+ bool keepAlive;
+ GUniquePtr<GstStructure> extraHeaders;
+ bool compress;
+ GUniquePtr<gchar> httpMethod;
WebCore::MediaPlayer* player;
- StreamingClient* client;
+ RefPtr<PlatformMediaResourceLoader> loader;
+ RefPtr<PlatformMediaResource> resource;
+ RefPtr<ResourceHandleStreamingClient> client;
- CORSAccessCheckResult corsAccessCheck;
+ bool didPassAccessControlCheck;
guint64 offset;
guint64 size;
gboolean seekable;
- gboolean paused;
+ bool paused;
+ bool isSeeking;
guint64 requestedOffset;
- guint startID;
- guint stopID;
- guint needDataID;
- guint enoughDataID;
- guint seekID;
-
+ bool createdInMainThread;
+ RefPtr<MainThreadNotifier<MainThreadSourceNotification>> notifier;
GRefPtr<GstBuffer> buffer;
-
- // icecast stuff
- gboolean iradioMode;
- gchar* iradioName;
- gchar* iradioGenre;
- gchar* iradioUrl;
- gchar* iradioTitle;
};
enum {
- PROP_IRADIO_MODE = 1,
- PROP_IRADIO_NAME,
- PROP_IRADIO_GENRE,
- PROP_IRADIO_URL,
- PROP_IRADIO_TITLE,
- PROP_LOCATION
+ PROP_0,
+ PROP_LOCATION,
+ PROP_RESOLVED_LOCATION,
+ PROP_KEEP_ALIVE,
+ PROP_EXTRA_HEADERS,
+ PROP_COMPRESS,
+ PROP_METHOD
};
static GstStaticPadTemplate srcTemplate = GST_STATIC_PAD_TEMPLATE("src",
@@ -180,15 +196,24 @@ static GstStateChangeReturn webKitWebSrcChangeState(GstElement*, GstStateChange)
static gboolean webKitWebSrcQueryWithParent(GstPad*, GstObject*, GstQuery*);
-static void webKitWebSrcNeedDataCb(GstAppSrc*, guint length, gpointer userData);
-static void webKitWebSrcEnoughDataCb(GstAppSrc*, gpointer userData);
-static gboolean webKitWebSrcSeekDataCb(GstAppSrc*, guint64 offset, gpointer userData);
+static void webKitWebSrcNeedData(WebKitWebSrc*);
+static void webKitWebSrcEnoughData(WebKitWebSrc*);
+static gboolean webKitWebSrcSeek(WebKitWebSrc*, guint64);
static GstAppSrcCallbacks appsrcCallbacks = {
- webKitWebSrcNeedDataCb,
- webKitWebSrcEnoughDataCb,
- webKitWebSrcSeekDataCb,
- { 0 }
+ // need_data
+ [](GstAppSrc*, guint, gpointer userData) {
+ webKitWebSrcNeedData(WEBKIT_WEB_SRC(userData));
+ },
+ // enough_data
+ [](GstAppSrc*, gpointer userData) {
+ webKitWebSrcEnoughData(WEBKIT_WEB_SRC(userData));
+ },
+ // seek_data
+ [](GstAppSrc*, guint64 offset, gpointer userData) -> gboolean {
+ return webKitWebSrcSeek(WEBKIT_WEB_SRC(userData), offset);
+ },
+ { nullptr }
};
#define webkit_web_src_parent_class parent_class
@@ -213,57 +238,32 @@ static void webkit_web_src_class_init(WebKitWebSrcClass* klass)
gst_element_class_set_metadata(eklass, "WebKit Web source element", "Source", "Handles HTTP/HTTPS uris",
"Sebastian Dröge <sebastian.droege@collabora.co.uk>");
- // icecast stuff
- g_object_class_install_property(oklass,
- PROP_IRADIO_MODE,
- g_param_spec_boolean("iradio-mode",
- "iradio-mode",
- "Enable internet radio mode (extraction of shoutcast/icecast metadata)",
- FALSE,
- (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
-
- g_object_class_install_property(oklass,
- PROP_IRADIO_NAME,
- g_param_spec_string("iradio-name",
- "iradio-name",
- "Name of the stream",
- 0,
- (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
-
- g_object_class_install_property(oklass,
- PROP_IRADIO_GENRE,
- g_param_spec_string("iradio-genre",
- "iradio-genre",
- "Genre of the stream",
- 0,
- (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
-
- g_object_class_install_property(oklass,
- PROP_IRADIO_URL,
- g_param_spec_string("iradio-url",
- "iradio-url",
- "Homepage URL for radio stream",
- 0,
- (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
-
- g_object_class_install_property(oklass,
- PROP_IRADIO_TITLE,
- g_param_spec_string("iradio-title",
- "iradio-title",
- "Name of currently playing song",
- 0,
- (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
-
-
/* Allows setting the uri using the 'location' property, which is used
* for example by gst_element_make_from_uri() */
- g_object_class_install_property(oklass,
- PROP_LOCATION,
- g_param_spec_string("location",
- "location",
- "Location to read from",
- 0,
- (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
+ g_object_class_install_property(oklass, PROP_LOCATION,
+ g_param_spec_string("location", "location", "Location to read from",
+ nullptr, static_cast<GParamFlags>(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
+
+ g_object_class_install_property(oklass, PROP_RESOLVED_LOCATION,
+ g_param_spec_string("resolved-location", "Resolved location", "The location resolved by the server",
+ nullptr, static_cast<GParamFlags>(G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
+
+ g_object_class_install_property(oklass, PROP_KEEP_ALIVE,
+ g_param_spec_boolean("keep-alive", "keep-alive", "Use HTTP persistent connections",
+ FALSE, static_cast<GParamFlags>(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
+
+ g_object_class_install_property(oklass, PROP_EXTRA_HEADERS,
+ g_param_spec_boxed("extra-headers", "Extra Headers", "Extra headers to append to the HTTP request",
+ GST_TYPE_STRUCTURE, static_cast<GParamFlags>(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
+
+ g_object_class_install_property(oklass, PROP_COMPRESS,
+ g_param_spec_boolean("compress", "Compress", "Allow compressed content encodings",
+ FALSE, static_cast<GParamFlags>(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
+
+ g_object_class_install_property(oklass, PROP_METHOD,
+ g_param_spec_string("method", "method", "The HTTP method to use (default: GET)",
+ nullptr, static_cast<GParamFlags>(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
+
eklass->change_state = webKitWebSrcChangeState;
g_type_class_add_private(klass, sizeof(WebKitWebSrcPrivate));
@@ -274,8 +274,12 @@ static void webkit_web_src_init(WebKitWebSrc* src)
WebKitWebSrcPrivate* priv = WEBKIT_WEB_SRC_GET_PRIVATE(src);
src->priv = priv;
+ new (priv) WebKitWebSrcPrivate();
+
+ priv->createdInMainThread = isMainThread();
+ priv->notifier = MainThreadNotifier<MainThreadSourceNotification>::create();
- priv->appsrc = GST_APP_SRC(gst_element_factory_make("appsrc", 0));
+ priv->appsrc = GST_APP_SRC(gst_element_factory_make("appsrc", nullptr));
if (!priv->appsrc) {
GST_ERROR_OBJECT(src, "Failed to create appsrc");
return;
@@ -292,7 +296,7 @@ static void webkit_web_src_init(WebKitWebSrc* src)
GST_OBJECT_FLAG_SET(priv->srcpad, GST_PAD_FLAG_NEED_PARENT);
gst_pad_set_query_function(priv->srcpad, webKitWebSrcQueryWithParent);
- gst_app_src_set_callbacks(priv->appsrc, &appsrcCallbacks, src, 0);
+ gst_app_src_set_callbacks(priv->appsrc, &appsrcCallbacks, src, nullptr);
gst_app_src_set_emit_signals(priv->appsrc, FALSE);
gst_app_src_set_stream_type(priv->appsrc, GST_APP_STREAM_TYPE_SEEKABLE);
@@ -313,28 +317,32 @@ static void webkit_web_src_init(WebKitWebSrc* src)
// likely that libsoup already provides new data before
// the queue is really empty.
// This might need tweaking for ports not using libsoup.
- g_object_set(priv->appsrc, "min-percent", 20, NULL);
+ g_object_set(priv->appsrc, "min-percent", 20, nullptr);
- gst_app_src_set_caps(priv->appsrc, 0);
+ gst_base_src_set_automatic_eos(GST_BASE_SRC(priv->appsrc), FALSE);
+
+ gst_app_src_set_caps(priv->appsrc, nullptr);
gst_app_src_set_size(priv->appsrc, -1);
}
static void webKitWebSrcDispose(GObject* object)
{
- WebKitWebSrc* src = WEBKIT_WEB_SRC(object);
- WebKitWebSrcPrivate* priv = src->priv;
+ WebKitWebSrcPrivate* priv = WEBKIT_WEB_SRC(object)->priv;
+ if (priv->notifier) {
+ priv->notifier->invalidate();
+ priv->notifier = nullptr;
+ }
- priv->player = 0;
+ priv->player = nullptr;
GST_CALL_PARENT(G_OBJECT_CLASS, dispose, (object));
}
static void webKitWebSrcFinalize(GObject* object)
{
- WebKitWebSrc* src = WEBKIT_WEB_SRC(object);
- WebKitWebSrcPrivate* priv = src->priv;
+ WebKitWebSrcPrivate* priv = WEBKIT_WEB_SRC(object)->priv;
- g_free(priv->uri);
+ priv->~WebKitWebSrcPrivate();
GST_CALL_PARENT(G_OBJECT_CLASS, finalize, (object));
}
@@ -342,16 +350,24 @@ static void webKitWebSrcFinalize(GObject* object)
static void webKitWebSrcSetProperty(GObject* object, guint propID, const GValue* value, GParamSpec* pspec)
{
WebKitWebSrc* src = WEBKIT_WEB_SRC(object);
- WebKitWebSrcPrivate* priv = src->priv;
switch (propID) {
- case PROP_IRADIO_MODE: {
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
- priv->iradioMode = g_value_get_boolean(value);
+ case PROP_LOCATION:
+ gst_uri_handler_set_uri(reinterpret_cast<GstURIHandler*>(src), g_value_get_string(value), nullptr);
+ break;
+ case PROP_KEEP_ALIVE:
+ src->priv->keepAlive = g_value_get_boolean(value);
+ break;
+ case PROP_EXTRA_HEADERS: {
+ const GstStructure* s = gst_value_get_structure(value);
+ src->priv->extraHeaders.reset(s ? gst_structure_copy(s) : nullptr);
break;
}
- case PROP_LOCATION:
- gst_uri_handler_set_uri(reinterpret_cast<GstURIHandler*>(src), g_value_get_string(value), 0);
+ case PROP_COMPRESS:
+ src->priv->compress = g_value_get_boolean(value);
+ break;
+ case PROP_METHOD:
+ src->priv->httpMethod.reset(g_value_dup_string(value));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propID, pspec);
@@ -364,25 +380,25 @@ static void webKitWebSrcGetProperty(GObject* object, guint propID, GValue* value
WebKitWebSrc* src = WEBKIT_WEB_SRC(object);
WebKitWebSrcPrivate* priv = src->priv;
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(src));
switch (propID) {
- case PROP_IRADIO_MODE:
- g_value_set_boolean(value, priv->iradioMode);
+ case PROP_LOCATION:
+ g_value_set_string(value, priv->originalURI.data());
break;
- case PROP_IRADIO_NAME:
- g_value_set_string(value, priv->iradioName);
+ case PROP_RESOLVED_LOCATION:
+ g_value_set_string(value, priv->redirectedURI.isNull() ? priv->originalURI.data() : priv->redirectedURI.data());
break;
- case PROP_IRADIO_GENRE:
- g_value_set_string(value, priv->iradioGenre);
+ case PROP_KEEP_ALIVE:
+ g_value_set_boolean(value, priv->keepAlive);
break;
- case PROP_IRADIO_URL:
- g_value_set_string(value, priv->iradioUrl);
+ case PROP_EXTRA_HEADERS:
+ gst_value_set_structure(value, priv->extraHeaders.get());
break;
- case PROP_IRADIO_TITLE:
- g_value_set_string(value, priv->iradioTitle);
+ case PROP_COMPRESS:
+ g_value_set_boolean(value, priv->compress);
break;
- case PROP_LOCATION:
- g_value_set_string(value, priv->uri);
+ case PROP_METHOD:
+ g_value_set_string(value, priv->httpMethod.get());
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propID, pspec);
@@ -390,123 +406,159 @@ static void webKitWebSrcGetProperty(GObject* object, guint propID, GValue* value
}
}
-static void removeTimeoutSources(WebKitWebSrc* src)
+static void webKitWebSrcStop(WebKitWebSrc* src)
{
WebKitWebSrcPrivate* priv = src->priv;
- if (priv->startID)
- g_source_remove(priv->startID);
- priv->startID = 0;
-
- if (priv->needDataID)
- g_source_remove(priv->needDataID);
- priv->needDataID = 0;
-
- if (priv->enoughDataID)
- g_source_remove(priv->enoughDataID);
- priv->enoughDataID = 0;
-
- if (priv->seekID)
- g_source_remove(priv->seekID);
- priv->seekID = 0;
-}
-
-static gboolean webKitWebSrcStop(WebKitWebSrc* src)
-{
- WebKitWebSrcPrivate* priv = src->priv;
-
- ASSERT(isMainThread());
-
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
-
- bool seeking = priv->seekID;
-
- removeTimeoutSources(src);
- priv->stopID = 0;
+ if (priv->resource || (priv->loader && !priv->keepAlive)) {
+ GRefPtr<WebKitWebSrc> protector = WTF::ensureGRef(src);
+ priv->notifier->cancelPendingNotifications(MainThreadSourceNotification::NeedData | MainThreadSourceNotification::EnoughData | MainThreadSourceNotification::Seek);
+ priv->notifier->notify(MainThreadSourceNotification::Stop, [protector, keepAlive = priv->keepAlive] {
+ WebKitWebSrcPrivate* priv = protector->priv;
+
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(protector.get()));
+ if (priv->resource) {
+ priv->resource->stop();
+ priv->resource->setClient(nullptr);
+ priv->resource = nullptr;
+ }
+
+ if (!keepAlive)
+ priv->loader = nullptr;
+ });
+ }
if (priv->client) {
- delete priv->client;
- priv->client = 0;
+ priv->client->invalidate();
+ priv->client = nullptr;
}
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(src));
+
+ bool wasSeeking = std::exchange(priv->isSeeking, false);
+
if (priv->buffer) {
unmapGstBuffer(priv->buffer.get());
priv->buffer.clear();
}
- priv->paused = FALSE;
-
- g_free(priv->iradioName);
- priv->iradioName = 0;
-
- g_free(priv->iradioGenre);
- priv->iradioGenre = 0;
-
- g_free(priv->iradioUrl);
- priv->iradioUrl = 0;
-
- g_free(priv->iradioTitle);
- priv->iradioTitle = 0;
+ priv->paused = false;
priv->offset = 0;
priv->seekable = FALSE;
- if (!seeking) {
+ if (!wasSeeking) {
priv->size = 0;
priv->requestedOffset = 0;
- priv->player = 0;
+ priv->player = nullptr;
}
locker.unlock();
if (priv->appsrc) {
- gst_app_src_set_caps(priv->appsrc, 0);
- if (!seeking)
+ gst_app_src_set_caps(priv->appsrc, nullptr);
+ if (!wasSeeking)
gst_app_src_set_size(priv->appsrc, -1);
}
GST_DEBUG_OBJECT(src, "Stopped request");
+}
+
+static bool webKitWebSrcSetExtraHeader(GQuark fieldId, const GValue* value, gpointer userData)
+{
+ GUniquePtr<gchar> fieldContent;
+
+ if (G_VALUE_HOLDS_STRING(value))
+ fieldContent.reset(g_value_dup_string(value));
+ else {
+ GValue dest = G_VALUE_INIT;
+
+ g_value_init(&dest, G_TYPE_STRING);
+ if (g_value_transform(value, &dest))
+ fieldContent.reset(g_value_dup_string(&dest));
+ }
+
+ const gchar* fieldName = g_quark_to_string(fieldId);
+ if (!fieldContent.get()) {
+ GST_ERROR("extra-headers field '%s' contains no value or can't be converted to a string", fieldName);
+ return false;
+ }
- return FALSE;
+ GST_DEBUG("Appending extra header: \"%s: %s\"", fieldName, fieldContent.get());
+ ResourceRequest* request = static_cast<ResourceRequest*>(userData);
+ request->setHTTPHeaderField(fieldName, fieldContent.get());
+ return true;
}
-static gboolean webKitWebSrcStart(WebKitWebSrc* src)
+static gboolean webKitWebSrcProcessExtraHeaders(GQuark fieldId, const GValue* value, gpointer userData)
{
- WebKitWebSrcPrivate* priv = src->priv;
+ if (G_VALUE_TYPE(value) == GST_TYPE_ARRAY) {
+ unsigned size = gst_value_array_get_size(value);
- ASSERT(isMainThread());
+ for (unsigned i = 0; i < size; i++) {
+ if (!webKitWebSrcSetExtraHeader(fieldId, gst_value_array_get_value(value, i), userData))
+ return FALSE;
+ }
+ return TRUE;
+ }
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
+ if (G_VALUE_TYPE(value) == GST_TYPE_LIST) {
+ unsigned size = gst_value_list_get_size(value);
+
+ for (unsigned i = 0; i < size; i++) {
+ if (!webKitWebSrcSetExtraHeader(fieldId, gst_value_list_get_value(value, i), userData))
+ return FALSE;
+ }
+ return TRUE;
+ }
+
+ return webKitWebSrcSetExtraHeader(fieldId, value, userData);
+}
+
+static void webKitWebSrcStart(WebKitWebSrc* src)
+{
+ WebKitWebSrcPrivate* priv = src->priv;
- priv->startID = 0;
- priv->corsAccessCheck = CORSNoCheck;
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(src));
- if (!priv->uri) {
+ priv->didPassAccessControlCheck = false;
+
+ if (priv->originalURI.isNull()) {
GST_ERROR_OBJECT(src, "No URI provided");
locker.unlock();
webKitWebSrcStop(src);
- return FALSE;
+ return;
}
ASSERT(!priv->client);
- URL url = URL(URL(), priv->uri);
+ GST_DEBUG_OBJECT(src, "Fetching %s", priv->originalURI.data());
+ URL url = URL(URL(), priv->originalURI.data());
ResourceRequest request(url);
request.setAllowCookies(true);
request.setFirstPartyForCookies(url);
+ priv->size = 0;
+
if (priv->player)
request.setHTTPReferrer(priv->player->referrer());
+ if (priv->httpMethod.get())
+ request.setHTTPMethod(priv->httpMethod.get());
+
#if USE(SOUP)
- // Let's disable HTTP Accept-Encoding here as we don't want the received response to be
- // encoded in any way as we need to rely on the proper size of the returned data on
+ // By default, HTTP Accept-Encoding is disabled here as we don't
+ // want the received response to be encoded in any way as we need
+ // to rely on the proper size of the returned data on
// didReceiveResponse.
// If Accept-Encoding is used, the server may send the data in encoded format and
// request.expectedContentLength() will have the "wrong" size (the size of the
// compressed data), even though the data received in didReceiveData is uncompressed.
- request.setAcceptEncoding(false);
+ // This is however useful to enable for adaptive streaming
+ // scenarios, when the demuxer needs to download playlists.
+ if (!priv->compress)
+ request.setAcceptEncoding(false);
#endif
// Let Apple web servers know we want to access their nice movie trailers.
@@ -516,36 +568,55 @@ static gboolean webKitWebSrcStart(WebKitWebSrc* src)
if (priv->requestedOffset) {
GUniquePtr<gchar> val(g_strdup_printf("bytes=%" G_GUINT64_FORMAT "-", priv->requestedOffset));
- request.setHTTPHeaderField("Range", val.get());
+ request.setHTTPHeaderField(HTTPHeaderName::Range, val.get());
}
priv->offset = priv->requestedOffset;
- if (priv->iradioMode)
- request.setHTTPHeaderField("icy-metadata", "1");
+ if (!priv->keepAlive) {
+ GST_DEBUG_OBJECT(src, "Persistent connection support disabled");
+ request.setHTTPHeaderField(HTTPHeaderName::Connection, "close");
+ }
- // Needed to use DLNA streaming servers
- request.setHTTPHeaderField("transferMode.dlna", "Streaming");
+ if (priv->extraHeaders)
+ gst_structure_foreach(priv->extraHeaders.get(), webKitWebSrcProcessExtraHeaders, &request);
- if (priv->player) {
- if (CachedResourceLoader* loader = priv->player->cachedResourceLoader())
- priv->client = new CachedResourceStreamingClient(src, loader, request, priv->player->mediaPlayerClient()->mediaPlayerCORSMode());
- }
+ // We always request Icecast/Shoutcast metadata, just in case ...
+ request.setHTTPHeaderField(HTTPHeaderName::IcyMetadata, "1");
- if (!priv->client)
- priv->client = new ResourceHandleStreamingClient(src, request);
+ if (!priv->player || !priv->createdInMainThread) {
+ priv->client = ResourceHandleStreamingClient::create(src, WTFMove(request));
+ if (priv->client->loadFailed()) {
+ GST_ERROR_OBJECT(src, "Failed to setup streaming client");
+ locker.unlock();
+ webKitWebSrcStop(src);
+ } else
+ GST_DEBUG_OBJECT(src, "Started request");
+ return;
+ }
- if (!priv->client || priv->client->loadFailed()) {
- GST_ERROR_OBJECT(src, "Failed to setup streaming client");
- if (priv->client) {
- delete priv->client;
- priv->client = 0;
+ locker.unlock();
+ GRefPtr<WebKitWebSrc> protector = WTF::ensureGRef(src);
+ priv->notifier->notify(MainThreadSourceNotification::Start, [protector, request = WTFMove(request)] {
+ WebKitWebSrcPrivate* priv = protector->priv;
+
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(protector.get()));
+ if (!priv->loader)
+ priv->loader = priv->player->createResourceLoader();
+
+ PlatformMediaResourceLoader::LoadOptions loadOptions = 0;
+ if (request.url().protocolIsBlob())
+ loadOptions |= PlatformMediaResourceLoader::LoadOption::BufferData;
+ priv->resource = priv->loader->requestResource(ResourceRequest(request), loadOptions);
+ if (priv->resource) {
+ priv->resource->setClient(std::make_unique<CachedResourceStreamingClient>(protector.get(), ResourceRequest(request)));
+ GST_DEBUG_OBJECT(protector.get(), "Started request");
+ } else {
+ GST_ERROR_OBJECT(protector.get(), "Failed to setup streaming client");
+ priv->loader = nullptr;
+ locker.unlock();
+ webKitWebSrcStop(protector.get());
}
- locker.unlock();
- webKitWebSrcStop(src);
- return FALSE;
- }
- GST_DEBUG_OBJECT(src, "Started request");
- return FALSE;
+ });
}
static GstStateChangeReturn webKitWebSrcChangeState(GstElement* element, GstStateChange transition)
@@ -559,7 +630,7 @@ static GstStateChangeReturn webKitWebSrcChangeState(GstElement* element, GstStat
if (!priv->appsrc) {
gst_element_post_message(element,
gst_missing_element_message_new(element, "appsrc"));
- GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, (0), ("no appsrc"));
+ GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, (nullptr), ("no appsrc"));
return GST_STATE_CHANGE_FAILURE;
}
break;
@@ -573,18 +644,19 @@ static GstStateChangeReturn webKitWebSrcChangeState(GstElement* element, GstStat
return ret;
}
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
switch (transition) {
case GST_STATE_CHANGE_READY_TO_PAUSED:
+ {
GST_DEBUG_OBJECT(src, "READY->PAUSED");
- priv->startID = g_idle_add_full(G_PRIORITY_DEFAULT, (GSourceFunc) webKitWebSrcStart, gst_object_ref(src), (GDestroyNotify) gst_object_unref);
+ webKitWebSrcStart(src);
break;
+ }
case GST_STATE_CHANGE_PAUSED_TO_READY:
+ {
GST_DEBUG_OBJECT(src, "PAUSED->READY");
- // cancel pending sources
- removeTimeoutSources(src);
- priv->stopID = g_idle_add_full(G_PRIORITY_DEFAULT, (GSourceFunc) webKitWebSrcStop, gst_object_ref(src), (GDestroyNotify) gst_object_unref);
+ webKitWebSrcStop(src);
break;
+ }
default:
break;
}
@@ -601,10 +673,10 @@ static gboolean webKitWebSrcQueryWithParent(GstPad* pad, GstObject* parent, GstQ
case GST_QUERY_DURATION: {
GstFormat format;
- gst_query_parse_duration(query, &format, NULL);
+ gst_query_parse_duration(query, &format, nullptr);
GST_DEBUG_OBJECT(src, "duration query in format %s", gst_format_get_name(format));
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(src));
if (format == GST_FORMAT_BYTES && src->priv->size > 0) {
gst_query_set_duration(query, format, src->priv->size);
result = TRUE;
@@ -612,8 +684,19 @@ static gboolean webKitWebSrcQueryWithParent(GstPad* pad, GstObject* parent, GstQ
break;
}
case GST_QUERY_URI: {
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
- gst_query_set_uri(query, src->priv->uri);
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(src));
+ gst_query_set_uri(query, src->priv->originalURI.data());
+ if (!src->priv->redirectedURI.isNull())
+ gst_query_set_uri_redirection(query, src->priv->redirectedURI.data());
+ result = TRUE;
+ break;
+ }
+ case GST_QUERY_SCHEDULING: {
+ GstSchedulingFlags flags;
+ int minSize, maxSize, align;
+
+ gst_query_parse_scheduling(query, &flags, &minSize, &maxSize, &align);
+ gst_query_set_scheduling(query, static_cast<GstSchedulingFlags>(flags | GST_SCHEDULING_FLAG_BANDWIDTH_LIMITED), minSize, maxSize, align);
result = TRUE;
break;
}
@@ -632,7 +715,7 @@ static gboolean webKitWebSrcQueryWithParent(GstPad* pad, GstObject* parent, GstQ
static bool urlHasSupportedProtocol(const URL& url)
{
- return url.isValid() && (url.protocolIsInHTTPFamily() || url.protocolIs("blob"));
+ return url.isValid() && (url.protocolIsInHTTPFamily() || url.protocolIsBlob());
}
// uri handler interface
@@ -644,7 +727,7 @@ static GstURIType webKitWebSrcUriGetType(GType)
const gchar* const* webKitWebSrcGetProtocols(GType)
{
- static const char* protocols[] = {"http", "https", "blob", 0 };
+ static const char* protocols[] = {"http", "https", "blob", nullptr };
return protocols;
}
@@ -653,8 +736,8 @@ static gchar* webKitWebSrcGetUri(GstURIHandler* handler)
WebKitWebSrc* src = WEBKIT_WEB_SRC(handler);
gchar* ret;
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
- ret = g_strdup(src->priv->uri);
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(src));
+ ret = g_strdup(src->priv->originalURI.data());
return ret;
}
@@ -668,11 +751,10 @@ static gboolean webKitWebSrcSetUri(GstURIHandler* handler, const gchar* uri, GEr
return FALSE;
}
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
-
- g_free(priv->uri);
- priv->uri = 0;
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(src));
+ priv->redirectedURI = CString();
+ priv->originalURI = CString();
if (!uri)
return TRUE;
@@ -682,7 +764,7 @@ static gboolean webKitWebSrcSetUri(GstURIHandler* handler, const gchar* uri, GEr
return FALSE;
}
- priv->uri = g_strdup(url.string().utf8().data());
+ priv->originalURI = url.string().utf8();
return TRUE;
}
@@ -696,152 +778,122 @@ static void webKitWebSrcUriHandlerInit(gpointer gIface, gpointer)
iface->set_uri = webKitWebSrcSetUri;
}
-// appsrc callbacks
-
-static gboolean webKitWebSrcNeedDataMainCb(WebKitWebSrc* src)
-{
- WebKitWebSrcPrivate* priv = src->priv;
-
- ASSERT(isMainThread());
-
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
- // already stopped
- if (!priv->needDataID)
- return FALSE;
-
- priv->paused = FALSE;
- priv->needDataID = 0;
- locker.unlock();
-
- if (priv->client)
- priv->client->setDefersLoading(false);
- return FALSE;
-}
-
-static void webKitWebSrcNeedDataCb(GstAppSrc*, guint length, gpointer userData)
+static void webKitWebSrcNeedData(WebKitWebSrc* src)
{
- WebKitWebSrc* src = WEBKIT_WEB_SRC(userData);
WebKitWebSrcPrivate* priv = src->priv;
- GST_DEBUG_OBJECT(src, "Need more data: %u", length);
+ GST_DEBUG_OBJECT(src, "Need more data");
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
- if (priv->needDataID || !priv->paused) {
- return;
+ {
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(src));
+ if (!priv->paused)
+ return;
+ priv->paused = false;
+ if (priv->client) {
+ priv->client->setDefersLoading(false);
+ return;
+ }
}
- priv->needDataID = g_idle_add_full(G_PRIORITY_DEFAULT, (GSourceFunc) webKitWebSrcNeedDataMainCb, gst_object_ref(src), (GDestroyNotify) gst_object_unref);
+ GRefPtr<WebKitWebSrc> protector = WTF::ensureGRef(src);
+ priv->notifier->notify(MainThreadSourceNotification::NeedData, [protector] {
+ WebKitWebSrcPrivate* priv = protector->priv;
+ if (priv->resource)
+ priv->resource->setDefersLoading(false);
+ });
}
-static gboolean webKitWebSrcEnoughDataMainCb(WebKitWebSrc* src)
+static void webKitWebSrcEnoughData(WebKitWebSrc* src)
{
WebKitWebSrcPrivate* priv = src->priv;
- ASSERT(isMainThread());
-
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
- // already stopped
- if (!priv->enoughDataID)
- return FALSE;
-
- priv->paused = TRUE;
- priv->enoughDataID = 0;
- locker.unlock();
-
- if (priv->client)
- priv->client->setDefersLoading(true);
- return FALSE;
-}
-
-static void webKitWebSrcEnoughDataCb(GstAppSrc*, gpointer userData)
-{
- WebKitWebSrc* src = WEBKIT_WEB_SRC(userData);
- WebKitWebSrcPrivate* priv = src->priv;
-
GST_DEBUG_OBJECT(src, "Have enough data");
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
- if (priv->enoughDataID || priv->paused) {
- return;
+ {
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(src));
+ if (priv->paused)
+ return;
+ priv->paused = true;
+ if (priv->client) {
+ priv->client->setDefersLoading(true);
+ return;
+ }
}
- priv->enoughDataID = g_idle_add_full(G_PRIORITY_DEFAULT, (GSourceFunc) webKitWebSrcEnoughDataMainCb, gst_object_ref(src), (GDestroyNotify) gst_object_unref);
+ GRefPtr<WebKitWebSrc> protector = WTF::ensureGRef(src);
+ priv->notifier->notify(MainThreadSourceNotification::EnoughData, [protector] {
+ WebKitWebSrcPrivate* priv = protector->priv;
+ if (priv->resource)
+ priv->resource->setDefersLoading(true);
+ });
}
-static gboolean webKitWebSrcSeekMainCb(WebKitWebSrc* src)
+static gboolean webKitWebSrcSeek(WebKitWebSrc* src, guint64 offset)
{
WebKitWebSrcPrivate* priv = src->priv;
- ASSERT(isMainThread());
+ {
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(src));
+ if (offset == priv->offset && priv->requestedOffset == priv->offset)
+ return TRUE;
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
- // already stopped
- if (!priv->seekID)
- return FALSE;
- locker.unlock();
+ if (!priv->seekable)
+ return FALSE;
- webKitWebSrcStop(src);
- webKitWebSrcStart(src);
-
- return FALSE;
-}
-
-static gboolean webKitWebSrcSeekDataCb(GstAppSrc*, guint64 offset, gpointer userData)
-{
- WebKitWebSrc* src = WEBKIT_WEB_SRC(userData);
- WebKitWebSrcPrivate* priv = src->priv;
+ priv->isSeeking = true;
+ priv->requestedOffset = offset;
+ }
- GST_DEBUG_OBJECT(src, "Seeking to offset: %" G_GUINT64_FORMAT, offset);
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
- if (offset == priv->offset && priv->requestedOffset == priv->offset)
+ GST_DEBUG_OBJECT(src, "Seeking to offset: %" G_GUINT64_FORMAT, src->priv->requestedOffset);
+ if (priv->client) {
+ webKitWebSrcStop(src);
+ webKitWebSrcStart(src);
return TRUE;
+ }
- if (!priv->seekable)
- return FALSE;
-
- GST_DEBUG_OBJECT(src, "Doing range-request seek");
- priv->requestedOffset = offset;
-
- if (priv->seekID)
- g_source_remove(priv->seekID);
- priv->seekID = g_idle_add_full(G_PRIORITY_DEFAULT, (GSourceFunc) webKitWebSrcSeekMainCb, gst_object_ref(src), (GDestroyNotify) gst_object_unref);
+ GRefPtr<WebKitWebSrc> protector = WTF::ensureGRef(src);
+ priv->notifier->notify(MainThreadSourceNotification::Seek, [protector] {
+ webKitWebSrcStop(protector.get());
+ webKitWebSrcStart(protector.get());
+ });
return TRUE;
}
void webKitWebSrcSetMediaPlayer(WebKitWebSrc* src, WebCore::MediaPlayer* player)
{
ASSERT(player);
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
+ ASSERT(src->priv->createdInMainThread);
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(src));
src->priv->player = player;
}
bool webKitSrcPassedCORSAccessCheck(WebKitWebSrc* src)
{
- return src->priv->corsAccessCheck == CORSSuccess;
+ return src->priv->didPassAccessControlCheck;
}
-StreamingClient::StreamingClient(WebKitWebSrc* src)
- : m_src(static_cast<GstElement*>(gst_object_ref(src)))
+StreamingClient::StreamingClient(WebKitWebSrc* src, ResourceRequest&& request)
+ : m_src(GST_ELEMENT(src))
+ , m_request(WTFMove(request))
{
}
StreamingClient::~StreamingClient()
{
- gst_object_unref(m_src);
}
char* StreamingClient::createReadBuffer(size_t requestedSize, size_t& actualSize)
{
- WebKitWebSrc* src = WEBKIT_WEB_SRC(m_src);
+ WebKitWebSrc* src = WEBKIT_WEB_SRC(m_src.get());
WebKitWebSrcPrivate* priv = src->priv;
ASSERT(!priv->buffer);
GstBuffer* buffer = gst_buffer_new_and_alloc(requestedSize);
- mapGstBuffer(buffer);
+ mapGstBuffer(buffer, GST_MAP_WRITE);
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(src));
priv->buffer = adoptGRef(buffer);
locker.unlock();
@@ -849,29 +901,27 @@ char* StreamingClient::createReadBuffer(size_t requestedSize, size_t& actualSize
return getGstBufferDataPointer(buffer);
}
-void StreamingClient::handleResponseReceived(const ResourceResponse& response, CORSAccessCheckResult corsAccessCheck)
+void StreamingClient::handleResponseReceived(const ResourceResponse& response)
{
- WebKitWebSrc* src = WEBKIT_WEB_SRC(m_src);
+ WebKitWebSrc* src = WEBKIT_WEB_SRC(m_src.get());
WebKitWebSrcPrivate* priv = src->priv;
GST_DEBUG_OBJECT(src, "Received response: %d", response.httpStatusCode());
- if (response.httpStatusCode() >= 400 || corsAccessCheck == CORSFailure) {
- // Received error code or CORS check failed
- if (corsAccessCheck == CORSFailure)
- GST_ELEMENT_ERROR(src, RESOURCE, READ, ("Cross-origin stream load denied by Cross-Origin Resource Sharing policy."), (nullptr));
- else
- GST_ELEMENT_ERROR(src, RESOURCE, READ, ("Received %d HTTP error code", response.httpStatusCode()), (nullptr));
+ auto responseURI = response.url().string().utf8();
+ if (priv->originalURI != responseURI)
+ priv->redirectedURI = WTFMove(responseURI);
+
+ if (response.httpStatusCode() >= 400) {
+ GST_ELEMENT_ERROR(src, RESOURCE, READ, ("Received %d HTTP error code", response.httpStatusCode()), (nullptr));
gst_app_src_end_of_stream(priv->appsrc);
webKitWebSrcStop(src);
return;
}
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(src));
- priv->corsAccessCheck = corsAccessCheck;
-
- if (priv->seekID) {
+ if (priv->isSeeking) {
GST_DEBUG_OBJECT(src, "Seek in progress, ignoring response");
return;
}
@@ -896,43 +946,9 @@ void StreamingClient::handleResponseReceived(const ResourceResponse& response, C
length += priv->requestedOffset;
priv->size = length >= 0 ? length : 0;
- priv->seekable = length > 0 && g_ascii_strcasecmp("none", response.httpHeaderField("Accept-Ranges").utf8().data());
-
- // Wait until we unlock to send notifications
- g_object_freeze_notify(G_OBJECT(src));
-
- GstTagList* tags = gst_tag_list_new_empty();
- String value = response.httpHeaderField("icy-name");
- if (!value.isEmpty()) {
- g_free(priv->iradioName);
- priv->iradioName = g_strdup(value.utf8().data());
- g_object_notify(G_OBJECT(src), "iradio-name");
- gst_tag_list_add(tags, GST_TAG_MERGE_REPLACE, GST_TAG_ORGANIZATION, priv->iradioName, NULL);
- }
- value = response.httpHeaderField("icy-genre");
- if (!value.isEmpty()) {
- g_free(priv->iradioGenre);
- priv->iradioGenre = g_strdup(value.utf8().data());
- g_object_notify(G_OBJECT(src), "iradio-genre");
- gst_tag_list_add(tags, GST_TAG_MERGE_REPLACE, GST_TAG_GENRE, priv->iradioGenre, NULL);
- }
- value = response.httpHeaderField("icy-url");
- if (!value.isEmpty()) {
- g_free(priv->iradioUrl);
- priv->iradioUrl = g_strdup(value.utf8().data());
- g_object_notify(G_OBJECT(src), "iradio-url");
- gst_tag_list_add(tags, GST_TAG_MERGE_REPLACE, GST_TAG_LOCATION, priv->iradioUrl, NULL);
- }
- value = response.httpHeaderField("icy-title");
- if (!value.isEmpty()) {
- g_free(priv->iradioTitle);
- priv->iradioTitle = g_strdup(value.utf8().data());
- g_object_notify(G_OBJECT(src), "iradio-title");
- gst_tag_list_add(tags, GST_TAG_MERGE_REPLACE, GST_TAG_TITLE, priv->iradioTitle, NULL);
- }
+ priv->seekable = length > 0 && g_ascii_strcasecmp("none", response.httpHeaderField(HTTPHeaderName::AcceptRanges).utf8().data());
locker.unlock();
- g_object_thaw_notify(G_OBJECT(src));
// notify size/duration
if (length > 0) {
@@ -940,33 +956,30 @@ void StreamingClient::handleResponseReceived(const ResourceResponse& response, C
} else
gst_app_src_set_size(priv->appsrc, -1);
- // icecast stuff
- value = response.httpHeaderField("icy-metaint");
- if (!value.isEmpty()) {
- gchar* endptr = 0;
- gint64 icyMetaInt = g_ascii_strtoll(value.utf8().data(), &endptr, 10);
-
- if (endptr && *endptr == '\0' && icyMetaInt > 0) {
- GRefPtr<GstCaps> caps = adoptGRef(gst_caps_new_simple("application/x-icy", "metadata-interval", G_TYPE_INT, (gint) icyMetaInt, NULL));
-
- gst_app_src_set_caps(priv->appsrc, caps.get());
- }
- } else
- gst_app_src_set_caps(priv->appsrc, 0);
-
- // notify tags
- if (gst_tag_list_is_empty(tags))
- gst_tag_list_unref(tags);
- else
- gst_pad_push_event(priv->srcpad, gst_event_new_tag(tags));
+ gst_app_src_set_caps(priv->appsrc, nullptr);
+
+ // Emit a GST_EVENT_CUSTOM_DOWNSTREAM_STICKY event to let GStreamer know about the HTTP headers sent and received.
+ GstStructure* httpHeaders = gst_structure_new_empty("http-headers");
+ gst_structure_set(httpHeaders, "uri", G_TYPE_STRING, priv->originalURI.data(), nullptr);
+ if (!priv->redirectedURI.isNull())
+ gst_structure_set(httpHeaders, "redirection-uri", G_TYPE_STRING, priv->redirectedURI.data(), nullptr);
+ GUniquePtr<GstStructure> headers(gst_structure_new_empty("request-headers"));
+ for (const auto& header : m_request.httpHeaderFields())
+ gst_structure_set(headers.get(), header.key.utf8().data(), G_TYPE_STRING, header.value.utf8().data(), nullptr);
+ gst_structure_set(httpHeaders, "request-headers", GST_TYPE_STRUCTURE, headers.get(), nullptr);
+ headers.reset(gst_structure_new_empty("response-headers"));
+ for (const auto& header : response.httpHeaderFields())
+ gst_structure_set(headers.get(), header.key.utf8().data(), G_TYPE_STRING, header.value.utf8().data(), nullptr);
+ gst_structure_set(httpHeaders, "response-headers", GST_TYPE_STRUCTURE, headers.get(), nullptr);
+ gst_pad_push_event(GST_BASE_SRC_PAD(priv->appsrc), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_STICKY, httpHeaders));
}
void StreamingClient::handleDataReceived(const char* data, int length)
{
- WebKitWebSrc* src = WEBKIT_WEB_SRC(m_src);
+ WebKitWebSrc* src = WEBKIT_WEB_SRC(m_src.get());
WebKitWebSrcPrivate* priv = src->priv;
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(src));
GST_LOG_OBJECT(src, "Have %lld bytes of data", priv->buffer ? static_cast<long long>(gst_buffer_get_size(priv->buffer.get())) : length);
@@ -975,7 +988,7 @@ void StreamingClient::handleDataReceived(const char* data, int length)
if (priv->buffer)
unmapGstBuffer(priv->buffer.get());
- if (priv->seekID) {
+ if (priv->isSeeking) {
GST_DEBUG_OBJECT(src, "Seek in progress, ignoring data");
priv->buffer.clear();
return;
@@ -1025,111 +1038,135 @@ void StreamingClient::handleDataReceived(const char* data, int length)
GstFlowReturn ret = gst_app_src_push_buffer(priv->appsrc, priv->buffer.leakRef());
if (ret != GST_FLOW_OK && ret != GST_FLOW_EOS)
- GST_ELEMENT_ERROR(src, CORE, FAILED, (0), (0));
+ GST_ELEMENT_ERROR(src, CORE, FAILED, (nullptr), (nullptr));
}
void StreamingClient::handleNotifyFinished()
{
- WebKitWebSrc* src = WEBKIT_WEB_SRC(m_src);
+ WebKitWebSrc* src = WEBKIT_WEB_SRC(m_src.get());
WebKitWebSrcPrivate* priv = src->priv;
GST_DEBUG_OBJECT(src, "Have EOS");
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
- if (!priv->seekID) {
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(src));
+ if (!priv->isSeeking) {
locker.unlock();
gst_app_src_end_of_stream(priv->appsrc);
}
}
-CachedResourceStreamingClient::CachedResourceStreamingClient(WebKitWebSrc* src, CachedResourceLoader* resourceLoader, const ResourceRequest& request, MediaPlayerClient::CORSMode corsMode)
- : StreamingClient(src)
+CachedResourceStreamingClient::CachedResourceStreamingClient(WebKitWebSrc* src, ResourceRequest&& request)
+ : StreamingClient(src, WTFMove(request))
{
- DataBufferingPolicy bufferingPolicy = request.url().protocolIs("blob") ? BufferData : DoNotBufferData;
- RequestOriginPolicy corsPolicy = corsMode != MediaPlayerClient::Unspecified ? PotentiallyCrossOriginEnabled : UseDefaultOriginRestrictionsForType;
- StoredCredentials allowCredentials = corsMode == MediaPlayerClient::UseCredentials ? AllowStoredCredentials : DoNotAllowStoredCredentials;
- ResourceLoaderOptions options(SendCallbacks, DoNotSniffContent, bufferingPolicy, allowCredentials, DoNotAskClientForCrossOriginCredentials, DoSecurityCheck, corsPolicy);
-
- CachedResourceRequest cacheRequest(request, options);
-
- if (corsMode != MediaPlayerClient::Unspecified) {
- m_origin = resourceLoader->document() ? resourceLoader->document()->securityOrigin() : nullptr;
- updateRequestForAccessControl(cacheRequest.mutableResourceRequest(), m_origin.get(), allowCredentials);
- }
-
- // TODO: Decide whether to use preflight mode for cross-origin requests (see http://wkbug.com/131484).
- m_resource = resourceLoader->requestRawResource(cacheRequest);
- if (m_resource)
- m_resource->addClient(this);
}
CachedResourceStreamingClient::~CachedResourceStreamingClient()
{
- if (m_resource) {
- m_resource->removeClient(this);
- m_resource = 0;
- }
}
-bool CachedResourceStreamingClient::loadFailed() const
+#if USE(SOUP)
+char* CachedResourceStreamingClient::getOrCreateReadBuffer(PlatformMediaResource&, size_t requestedSize, size_t& actualSize)
{
- return !m_resource;
+ return createReadBuffer(requestedSize, actualSize);
}
+#endif
-void CachedResourceStreamingClient::setDefersLoading(bool defers)
+void CachedResourceStreamingClient::responseReceived(PlatformMediaResource&, const ResourceResponse& response)
{
- if (m_resource)
- m_resource->setDefersLoading(defers);
+ WebKitWebSrcPrivate* priv = WEBKIT_WEB_SRC(m_src.get())->priv;
+ priv->didPassAccessControlCheck = priv->resource->didPassAccessControlCheck();
+ handleResponseReceived(response);
}
-char* CachedResourceStreamingClient::getOrCreateReadBuffer(CachedResource*, size_t requestedSize, size_t& actualSize)
+void CachedResourceStreamingClient::dataReceived(PlatformMediaResource&, const char* data, int length)
{
- return createReadBuffer(requestedSize, actualSize);
+ handleDataReceived(data, length);
}
-void CachedResourceStreamingClient::responseReceived(CachedResource* resource, const ResourceResponse& response)
+void CachedResourceStreamingClient::accessControlCheckFailed(PlatformMediaResource&, const ResourceError& error)
{
- CORSAccessCheckResult corsAccessCheck = CORSNoCheck;
- if (m_origin)
- corsAccessCheck = (m_origin->canRequest(response.url()) || resource->passesAccessControlCheck(m_origin.get())) ? CORSSuccess : CORSFailure;
- handleResponseReceived(response, corsAccessCheck);
+ WebKitWebSrc* src = WEBKIT_WEB_SRC(m_src.get());
+ GST_ELEMENT_ERROR(src, RESOURCE, READ, ("%s", error.localizedDescription().utf8().data()), (nullptr));
+ gst_app_src_end_of_stream(src->priv->appsrc);
+ webKitWebSrcStop(src);
}
-void CachedResourceStreamingClient::dataReceived(CachedResource*, const char* data, int length)
+void CachedResourceStreamingClient::loadFailed(PlatformMediaResource&, const ResourceError& error)
{
- handleDataReceived(data, length);
+ WebKitWebSrc* src = WEBKIT_WEB_SRC(m_src.get());
+
+ if (!error.isCancellation()) {
+ GST_ERROR_OBJECT(src, "Have failure: %s", error.localizedDescription().utf8().data());
+ GST_ELEMENT_ERROR(src, RESOURCE, FAILED, ("%s", error.localizedDescription().utf8().data()), (nullptr));
+ }
+
+ gst_app_src_end_of_stream(src->priv->appsrc);
}
-void CachedResourceStreamingClient::notifyFinished(CachedResource* resource)
+void CachedResourceStreamingClient::loadFinished(PlatformMediaResource&)
{
- if (resource->loadFailedOrCanceled()) {
- WebKitWebSrc* src = WEBKIT_WEB_SRC(m_src);
+ handleNotifyFinished();
+}
- if (!resource->wasCanceled()) {
- const ResourceError& error = resource->resourceError();
- GST_ERROR_OBJECT(src, "Have failure: %s", error.localizedDescription().utf8().data());
- GST_ELEMENT_ERROR(src, RESOURCE, FAILED, ("%s", error.localizedDescription().utf8().data()), (0));
+ResourceHandleStreamingClient::ResourceHandleStreamingClient(WebKitWebSrc* src, ResourceRequest&& request)
+ : StreamingClient(src, WTFMove(request))
+{
+ LockHolder locker(m_initializeRunLoopConditionMutex);
+ m_thread = createThread("ResourceHandleStreamingClient", [this] {
+ {
+ LockHolder locker(m_initializeRunLoopConditionMutex);
+ m_runLoop = &RunLoop::current();
+#if USE(SOUP)
+ m_session = std::make_unique<SoupNetworkSession>();
+ m_resource = ResourceHandle::create(*m_session, m_request, this, true, false);
+#else
+ // FIXME: This create will hit an assert in debug builds. See https://bugs.webkit.org/show_bug.cgi?id=167003.
+ m_resource = ResourceHandle::create(nullptr, m_request, this, true, false);
+#endif
+ m_initializeRunLoopCondition.notifyOne();
}
- gst_app_src_end_of_stream(src->priv->appsrc);
- return;
- }
+ if (!m_resource)
+ return;
- handleNotifyFinished();
+ m_runLoop->dispatch([this] { m_resource->setDefersLoading(false); });
+ m_runLoop->run();
+ });
+ m_initializeRunLoopCondition.wait(m_initializeRunLoopConditionMutex);
}
-ResourceHandleStreamingClient::ResourceHandleStreamingClient(WebKitWebSrc* src, const ResourceRequest& request)
- : StreamingClient(src)
+ResourceHandleStreamingClient::~ResourceHandleStreamingClient()
{
- m_resource = ResourceHandle::create(0 /*context*/, request, this, false, false);
+ if (m_thread) {
+ detachThread(m_thread);
+ m_thread = 0;
+ }
}
-ResourceHandleStreamingClient::~ResourceHandleStreamingClient()
+void ResourceHandleStreamingClient::cleanupAndStopRunLoop()
+{
+ m_resource->clearClient();
+ m_resource->cancel();
+ m_resource = nullptr;
+#if USE(SOUP)
+ m_session = nullptr;
+#endif
+ m_runLoop->stop();
+}
+
+void ResourceHandleStreamingClient::invalidate()
{
- if (m_resource) {
- m_resource->cancel();
- m_resource.release();
- m_resource = 0;
+ if (m_runLoop == &RunLoop::current()) {
+ cleanupAndStopRunLoop();
+ return;
}
+
+ LockHolder locker(m_terminateRunLoopConditionMutex);
+ m_runLoop->dispatch([this, protectedThis = makeRef(*this)] {
+ cleanupAndStopRunLoop();
+ LockHolder locker(m_terminateRunLoopConditionMutex);
+ m_terminateRunLoopCondition.notifyOne();
+ });
+ m_terminateRunLoopCondition.wait(m_terminateRunLoopConditionMutex);
}
bool ResourceHandleStreamingClient::loadFailed() const
@@ -1139,31 +1176,40 @@ bool ResourceHandleStreamingClient::loadFailed() const
void ResourceHandleStreamingClient::setDefersLoading(bool defers)
{
- if (m_resource)
- m_resource->setDefersLoading(defers);
+ m_runLoop->dispatch([this, protectedThis = makeRef(*this), defers] {
+ if (m_resource)
+ m_resource->setDefersLoading(defers);
+ });
}
+#if USE(SOUP)
char* ResourceHandleStreamingClient::getOrCreateReadBuffer(size_t requestedSize, size_t& actualSize)
{
return createReadBuffer(requestedSize, actualSize);
}
+#endif
-void ResourceHandleStreamingClient::willSendRequest(ResourceHandle*, ResourceRequest&, const ResourceResponse&)
+ResourceRequest ResourceHandleStreamingClient::willSendRequest(ResourceHandle*, ResourceRequest&& request, ResourceResponse&&)
{
+ return WTFMove(request);
}
-void ResourceHandleStreamingClient::didReceiveResponse(ResourceHandle*, const ResourceResponse& response)
+void ResourceHandleStreamingClient::didReceiveResponse(ResourceHandle*, ResourceResponse&& response)
{
- handleResponseReceived(response, CORSNoCheck);
+ if (m_resource)
+ handleResponseReceived(response);
}
-void ResourceHandleStreamingClient::didReceiveData(ResourceHandle*, const char* data, unsigned length, int)
+void ResourceHandleStreamingClient::didReceiveData(ResourceHandle*, const char* /* data */, unsigned /* length */, int)
{
ASSERT_NOT_REACHED();
}
-void ResourceHandleStreamingClient::didReceiveBuffer(ResourceHandle*, PassRefPtr<SharedBuffer> buffer, int /* encodedLength */)
+void ResourceHandleStreamingClient::didReceiveBuffer(ResourceHandle*, Ref<SharedBuffer>&& buffer, int /* encodedLength */)
{
+ if (!m_resource)
+ return;
+
// This pattern is suggested by SharedBuffer.h.
const char* segment;
unsigned position = 0;
@@ -1175,44 +1221,45 @@ void ResourceHandleStreamingClient::didReceiveBuffer(ResourceHandle*, PassRefPtr
void ResourceHandleStreamingClient::didFinishLoading(ResourceHandle*, double)
{
- handleNotifyFinished();
+ if (m_resource)
+ handleNotifyFinished();
}
void ResourceHandleStreamingClient::didFail(ResourceHandle*, const ResourceError& error)
{
- WebKitWebSrc* src = WEBKIT_WEB_SRC(m_src);
+ WebKitWebSrc* src = WEBKIT_WEB_SRC(m_src.get());
GST_ERROR_OBJECT(src, "Have failure: %s", error.localizedDescription().utf8().data());
- GST_ELEMENT_ERROR(src, RESOURCE, FAILED, ("%s", error.localizedDescription().utf8().data()), (0));
+ GST_ELEMENT_ERROR(src, RESOURCE, FAILED, ("%s", error.localizedDescription().utf8().data()), (nullptr));
gst_app_src_end_of_stream(src->priv->appsrc);
}
void ResourceHandleStreamingClient::wasBlocked(ResourceHandle*)
{
- WebKitWebSrc* src = WEBKIT_WEB_SRC(m_src);
+ WebKitWebSrc* src = WEBKIT_WEB_SRC(m_src.get());
GUniquePtr<gchar> uri;
GST_ERROR_OBJECT(src, "Request was blocked");
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
- uri.reset(g_strdup(src->priv->uri));
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(src));
+ uri.reset(g_strdup(src->priv->originalURI.data()));
locker.unlock();
- GST_ELEMENT_ERROR(src, RESOURCE, OPEN_READ, ("Access to \"%s\" was blocked", uri.get()), (0));
+ GST_ELEMENT_ERROR(src, RESOURCE, OPEN_READ, ("Access to \"%s\" was blocked", uri.get()), (nullptr));
}
void ResourceHandleStreamingClient::cannotShowURL(ResourceHandle*)
{
- WebKitWebSrc* src = WEBKIT_WEB_SRC(m_src);
+ WebKitWebSrc* src = WEBKIT_WEB_SRC(m_src.get());
GUniquePtr<gchar> uri;
GST_ERROR_OBJECT(src, "Cannot show URL");
- WTF::GMutexLocker locker(GST_OBJECT_GET_LOCK(src));
- uri.reset(g_strdup(src->priv->uri));
+ WTF::GMutexLocker<GMutex> locker(*GST_OBJECT_GET_LOCK(src));
+ uri.reset(g_strdup(src->priv->originalURI.data()));
locker.unlock();
- GST_ELEMENT_ERROR(src, RESOURCE, OPEN_READ, ("Can't show \"%s\"", uri.get()), (0));
+ GST_ELEMENT_ERROR(src, RESOURCE, OPEN_READ, ("Can't show \"%s\"", uri.get()), (nullptr));
}
#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/eme/WebKitClearKeyDecryptorGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/eme/WebKitClearKeyDecryptorGStreamer.cpp
new file mode 100644
index 000000000..dc697089f
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/eme/WebKitClearKeyDecryptorGStreamer.cpp
@@ -0,0 +1,260 @@
+/* GStreamer ClearKey common encryption decryptor
+ *
+ * Copyright (C) 2016 Metrological
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
+ * Boston, MA 02110-1335, USA.
+ */
+
+#include "config.h"
+#include "WebKitClearKeyDecryptorGStreamer.h"
+
+#if (ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)) && USE(GSTREAMER)
+
+#include "GRefPtrGStreamer.h"
+#include <gcrypt.h>
+#include <gst/base/gstbytereader.h>
+#include <wtf/RunLoop.h>
+
+#define CLEARKEY_SIZE 16
+
+#define WEBKIT_MEDIA_CK_DECRYPT_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj), WEBKIT_TYPE_MEDIA_CK_DECRYPT, WebKitMediaClearKeyDecryptPrivate))
+struct _WebKitMediaClearKeyDecryptPrivate {
+ GRefPtr<GstBuffer> key;
+ gcry_cipher_hd_t handle;
+};
+
+static void webKitMediaClearKeyDecryptorFinalize(GObject*);
+static gboolean webKitMediaClearKeyDecryptorHandleKeyResponse(WebKitMediaCommonEncryptionDecrypt* self, GstEvent*);
+static gboolean webKitMediaClearKeyDecryptorSetupCipher(WebKitMediaCommonEncryptionDecrypt*);
+static gboolean webKitMediaClearKeyDecryptorDecrypt(WebKitMediaCommonEncryptionDecrypt*, GstBuffer* iv, GstBuffer* sample, unsigned subSamplesCount, GstBuffer* subSamples);
+static void webKitMediaClearKeyDecryptorReleaseCipher(WebKitMediaCommonEncryptionDecrypt*);
+
+GST_DEBUG_CATEGORY_STATIC(webkit_media_clear_key_decrypt_debug_category);
+#define GST_CAT_DEFAULT webkit_media_clear_key_decrypt_debug_category
+
+static GstStaticPadTemplate sinkTemplate = GST_STATIC_PAD_TEMPLATE("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS("application/x-cenc, original-media-type=(string)video/x-h264, protection-system=(string)" CLEAR_KEY_PROTECTION_SYSTEM_UUID "; "
+ "application/x-cenc, original-media-type=(string)audio/mpeg, protection-system=(string)" CLEAR_KEY_PROTECTION_SYSTEM_UUID));
+
+static GstStaticPadTemplate srcTemplate = GST_STATIC_PAD_TEMPLATE("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS("video/x-h264; audio/mpeg"));
+
+#define webkit_media_clear_key_decrypt_parent_class parent_class
+G_DEFINE_TYPE(WebKitMediaClearKeyDecrypt, webkit_media_clear_key_decrypt, WEBKIT_TYPE_MEDIA_CENC_DECRYPT);
+
+static void webkit_media_clear_key_decrypt_class_init(WebKitMediaClearKeyDecryptClass* klass)
+{
+ GObjectClass* gobjectClass = G_OBJECT_CLASS(klass);
+ gobjectClass->finalize = webKitMediaClearKeyDecryptorFinalize;
+
+ GstElementClass* elementClass = GST_ELEMENT_CLASS(klass);
+ gst_element_class_add_pad_template(elementClass, gst_static_pad_template_get(&sinkTemplate));
+ gst_element_class_add_pad_template(elementClass, gst_static_pad_template_get(&srcTemplate));
+
+ gst_element_class_set_static_metadata(elementClass,
+ "Decrypt content encrypted using ISOBMFF ClearKey Common Encryption",
+ GST_ELEMENT_FACTORY_KLASS_DECRYPTOR,
+ "Decrypts media that has been encrypted using ISOBMFF ClearKey Common Encryption.",
+ "Philippe Normand <philn@igalia.com>");
+
+ GST_DEBUG_CATEGORY_INIT(webkit_media_clear_key_decrypt_debug_category,
+ "webkitclearkey", 0, "ClearKey decryptor");
+
+ WebKitMediaCommonEncryptionDecryptClass* cencClass = WEBKIT_MEDIA_CENC_DECRYPT_CLASS(klass);
+ cencClass->protectionSystemId = CLEAR_KEY_PROTECTION_SYSTEM_UUID;
+ cencClass->handleKeyResponse = GST_DEBUG_FUNCPTR(webKitMediaClearKeyDecryptorHandleKeyResponse);
+ cencClass->setupCipher = GST_DEBUG_FUNCPTR(webKitMediaClearKeyDecryptorSetupCipher);
+ cencClass->decrypt = GST_DEBUG_FUNCPTR(webKitMediaClearKeyDecryptorDecrypt);
+ cencClass->releaseCipher = GST_DEBUG_FUNCPTR(webKitMediaClearKeyDecryptorReleaseCipher);
+
+ g_type_class_add_private(klass, sizeof(WebKitMediaClearKeyDecryptPrivate));
+}
+
+static void webkit_media_clear_key_decrypt_init(WebKitMediaClearKeyDecrypt* self)
+{
+ WebKitMediaClearKeyDecryptPrivate* priv = WEBKIT_MEDIA_CK_DECRYPT_GET_PRIVATE(self);
+
+ if (!gcry_check_version(GCRYPT_VERSION))
+ GST_ERROR_OBJECT(self, "Libgcrypt failed to initialize");
+
+ // Allocate a pool of 16k secure memory. This make the secure memory
+ // available and also drops privileges where needed.
+ gcry_control(GCRYCTL_INIT_SECMEM, 16384, 0);
+
+ gcry_control(GCRYCTL_INITIALIZATION_FINISHED, 0);
+
+ self->priv = priv;
+ new (priv) WebKitMediaClearKeyDecryptPrivate();
+}
+
+static void webKitMediaClearKeyDecryptorFinalize(GObject* object)
+{
+ WebKitMediaClearKeyDecrypt* self = WEBKIT_MEDIA_CK_DECRYPT(object);
+ WebKitMediaClearKeyDecryptPrivate* priv = self->priv;
+
+ priv->~WebKitMediaClearKeyDecryptPrivate();
+
+ GST_CALL_PARENT(G_OBJECT_CLASS, finalize, (object));
+}
+
+static gboolean webKitMediaClearKeyDecryptorHandleKeyResponse(WebKitMediaCommonEncryptionDecrypt* self, GstEvent* event)
+{
+ WebKitMediaClearKeyDecryptPrivate* priv = WEBKIT_MEDIA_CK_DECRYPT_GET_PRIVATE(WEBKIT_MEDIA_CK_DECRYPT(self));
+ const GstStructure* structure = gst_event_get_structure(event);
+
+ if (!gst_structure_has_name(structure, "drm-cipher"))
+ return FALSE;
+
+ const GValue* value = gst_structure_get_value(structure, "key");
+ priv->key.clear();
+ priv->key = adoptGRef(gst_buffer_copy(gst_value_get_buffer(value)));
+ return TRUE;
+}
+
+static gboolean webKitMediaClearKeyDecryptorSetupCipher(WebKitMediaCommonEncryptionDecrypt* self)
+{
+ WebKitMediaClearKeyDecryptPrivate* priv = WEBKIT_MEDIA_CK_DECRYPT_GET_PRIVATE(WEBKIT_MEDIA_CK_DECRYPT(self));
+ gcry_error_t error;
+
+ ASSERT(priv->key);
+ if (!priv->key) {
+ GST_ERROR_OBJECT(self, "Decryption key not provided");
+ return false;
+ }
+
+ error = gcry_cipher_open(&(priv->handle), GCRY_CIPHER_AES128, GCRY_CIPHER_MODE_CTR, GCRY_CIPHER_SECURE);
+ if (error) {
+ GST_ERROR_OBJECT(self, "Failed to create AES 128 CTR cipher handle: %s", gpg_strerror(error));
+ return false;
+ }
+
+ GstMapInfo keyMap;
+ if (!gst_buffer_map(priv->key.get(), &keyMap, GST_MAP_READ)) {
+ GST_ERROR_OBJECT(self, "Failed to map decryption key");
+ return false;
+ }
+
+ ASSERT(keyMap.size == CLEARKEY_SIZE);
+ error = gcry_cipher_setkey(priv->handle, keyMap.data, keyMap.size);
+ gst_buffer_unmap(priv->key.get(), &keyMap);
+ if (error) {
+ GST_ERROR_OBJECT(self, "gcry_cipher_setkey failed: %s", gpg_strerror(error));
+ return false;
+ }
+
+ return true;
+}
+
+static gboolean webKitMediaClearKeyDecryptorDecrypt(WebKitMediaCommonEncryptionDecrypt* self, GstBuffer* ivBuffer, GstBuffer* buffer, unsigned subSampleCount, GstBuffer* subSamplesBuffer)
+{
+ GstMapInfo ivMap;
+ if (!gst_buffer_map(ivBuffer, &ivMap, GST_MAP_READ)) {
+ GST_ERROR_OBJECT(self, "Failed to map IV");
+ return false;
+ }
+
+ uint8_t ctr[CLEARKEY_SIZE];
+ if (ivMap.size == 8) {
+ memset(ctr + 8, 0, 8);
+ memcpy(ctr, ivMap.data, 8);
+ } else {
+ ASSERT(ivMap.size == CLEARKEY_SIZE);
+ memcpy(ctr, ivMap.data, CLEARKEY_SIZE);
+ }
+ gst_buffer_unmap(ivBuffer, &ivMap);
+
+ WebKitMediaClearKeyDecryptPrivate* priv = WEBKIT_MEDIA_CK_DECRYPT_GET_PRIVATE(WEBKIT_MEDIA_CK_DECRYPT(self));
+ gcry_error_t error = gcry_cipher_setctr(priv->handle, ctr, CLEARKEY_SIZE);
+ if (error) {
+ GST_ERROR_OBJECT(self, "gcry_cipher_setctr failed: %s", gpg_strerror(error));
+ return false;
+ }
+
+ GstMapInfo map;
+ gboolean bufferMapped = gst_buffer_map(buffer, &map, static_cast<GstMapFlags>(GST_MAP_READWRITE));
+ if (!bufferMapped) {
+ GST_ERROR_OBJECT(self, "Failed to map buffer");
+ return false;
+ }
+
+ GstMapInfo subSamplesMap;
+ gboolean subsamplesBufferMapped = gst_buffer_map(subSamplesBuffer, &subSamplesMap, GST_MAP_READ);
+ if (!subsamplesBufferMapped) {
+ GST_ERROR_OBJECT(self, "Failed to map subsample buffer");
+ gst_buffer_unmap(buffer, &map);
+ return false;
+ }
+
+ GstByteReader* reader = gst_byte_reader_new(subSamplesMap.data, subSamplesMap.size);
+ unsigned position = 0;
+ unsigned sampleIndex = 0;
+
+ GST_DEBUG_OBJECT(self, "position: %d, size: %zu", position, map.size);
+
+ while (position < map.size) {
+ guint16 nBytesClear = 0;
+ guint32 nBytesEncrypted = 0;
+
+ if (sampleIndex < subSampleCount) {
+ if (!gst_byte_reader_get_uint16_be(reader, &nBytesClear)
+ || !gst_byte_reader_get_uint32_be(reader, &nBytesEncrypted)) {
+ GST_DEBUG_OBJECT(self, "unsupported");
+ gst_byte_reader_free(reader);
+ gst_buffer_unmap(buffer, &map);
+ gst_buffer_unmap(subSamplesBuffer, &subSamplesMap);
+ return false;
+ }
+
+ sampleIndex++;
+ } else {
+ nBytesClear = 0;
+ nBytesEncrypted = map.size - position;
+ }
+
+ GST_TRACE_OBJECT(self, "%d bytes clear (todo=%zu)", nBytesClear, map.size - position);
+ position += nBytesClear;
+ if (nBytesEncrypted) {
+ GST_TRACE_OBJECT(self, "%d bytes encrypted (todo=%zu)", nBytesEncrypted, map.size - position);
+ error = gcry_cipher_decrypt(priv->handle, map.data + position, nBytesEncrypted, 0, 0);
+ if (error) {
+ GST_ERROR_OBJECT(self, "decryption failed: %s", gpg_strerror(error));
+ gst_byte_reader_free(reader);
+ gst_buffer_unmap(buffer, &map);
+ gst_buffer_unmap(subSamplesBuffer, &subSamplesMap);
+ return false;
+ }
+ position += nBytesEncrypted;
+ }
+ }
+
+ gst_byte_reader_free(reader);
+ gst_buffer_unmap(buffer, &map);
+ gst_buffer_unmap(subSamplesBuffer, &subSamplesMap);
+ return true;
+}
+
+static void webKitMediaClearKeyDecryptorReleaseCipher(WebKitMediaCommonEncryptionDecrypt* self)
+{
+ WebKitMediaClearKeyDecryptPrivate* priv = WEBKIT_MEDIA_CK_DECRYPT_GET_PRIVATE(WEBKIT_MEDIA_CK_DECRYPT(self));
+ gcry_cipher_close(priv->handle);
+}
+
+#endif // (ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)) && USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/eme/WebKitClearKeyDecryptorGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/eme/WebKitClearKeyDecryptorGStreamer.h
new file mode 100644
index 000000000..30cfa299b
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/eme/WebKitClearKeyDecryptorGStreamer.h
@@ -0,0 +1,57 @@
+/* GStreamer ClearKey common encryption decryptor
+ *
+ * Copyright (C) 2016 Metrological
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#if (ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)) && USE(GSTREAMER)
+
+#include "WebKitCommonEncryptionDecryptorGStreamer.h"
+
+#define CLEAR_KEY_PROTECTION_SYSTEM_UUID "58147ec8-0423-4659-92e6-f52c5ce8c3cc"
+#define CLEAR_KEY_PROTECTION_SYSTEM_ID "org.w3.clearkey"
+
+G_BEGIN_DECLS
+
+#define WEBKIT_TYPE_MEDIA_CK_DECRYPT (webkit_media_clear_key_decrypt_get_type())
+#define WEBKIT_MEDIA_CK_DECRYPT(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), WEBKIT_TYPE_MEDIA_CK_DECRYPT, WebKitMediaClearKeyDecrypt))
+#define WEBKIT_MEDIA_CK_DECRYPT_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), WEBKIT_TYPE_MEDIA_CK_DECRYPT, WebKitMediaClearKeyDecryptClass))
+#define WEBKIT_IS_MEDIA_CK_DECRYPT(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj), WEBKIT_TYPE_MEDIA_CK_DECRYPT))
+#define WEBKIT_IS_MEDIA_CK_DECRYPT_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass), WEBKIT_TYPE_MEDIA_CK_DECRYPT))
+
+typedef struct _WebKitMediaClearKeyDecrypt WebKitMediaClearKeyDecrypt;
+typedef struct _WebKitMediaClearKeyDecryptClass WebKitMediaClearKeyDecryptClass;
+typedef struct _WebKitMediaClearKeyDecryptPrivate WebKitMediaClearKeyDecryptPrivate;
+
+GType webkit_media_clear_key_decrypt_get_type(void);
+
+struct _WebKitMediaClearKeyDecrypt {
+ WebKitMediaCommonEncryptionDecrypt parent;
+
+ WebKitMediaClearKeyDecryptPrivate* priv;
+};
+
+struct _WebKitMediaClearKeyDecryptClass {
+ WebKitMediaCommonEncryptionDecryptClass parentClass;
+};
+
+G_END_DECLS
+
+#endif // (ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)) && USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/eme/WebKitCommonEncryptionDecryptorGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/eme/WebKitCommonEncryptionDecryptorGStreamer.cpp
new file mode 100644
index 000000000..389808050
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/eme/WebKitCommonEncryptionDecryptorGStreamer.cpp
@@ -0,0 +1,362 @@
+/* GStreamer ClearKey common encryption decryptor
+ *
+ * Copyright (C) 2013 YouView TV Ltd. <alex.ashley@youview.com>
+ * Copyright (C) 2016 Metrological
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
+ * Boston, MA 02110-1335, USA.
+ */
+
+#include "config.h"
+#include "WebKitCommonEncryptionDecryptorGStreamer.h"
+
+#if (ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)) && USE(GSTREAMER)
+
+#include "GRefPtrGStreamer.h"
+#include <wtf/Condition.h>
+#include <wtf/RunLoop.h>
+
+#define WEBKIT_MEDIA_CENC_DECRYPT_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj), WEBKIT_TYPE_MEDIA_CENC_DECRYPT, WebKitMediaCommonEncryptionDecryptPrivate))
+struct _WebKitMediaCommonEncryptionDecryptPrivate {
+ GRefPtr<GstEvent> protectionEvent;
+
+ bool keyReceived;
+ Lock mutex;
+ Condition condition;
+};
+
+static GstStateChangeReturn webKitMediaCommonEncryptionDecryptorChangeState(GstElement*, GstStateChange transition);
+static void webKitMediaCommonEncryptionDecryptorFinalize(GObject*);
+static GstCaps* webkitMediaCommonEncryptionDecryptTransformCaps(GstBaseTransform*, GstPadDirection, GstCaps*, GstCaps*);
+static GstFlowReturn webkitMediaCommonEncryptionDecryptTransformInPlace(GstBaseTransform*, GstBuffer*);
+static gboolean webkitMediaCommonEncryptionDecryptSinkEventHandler(GstBaseTransform*, GstEvent*);
+
+static gboolean webKitMediaCommonEncryptionDecryptDefaultSetupCipher(WebKitMediaCommonEncryptionDecrypt*);
+static void webKitMediaCommonEncryptionDecryptDefaultReleaseCipher(WebKitMediaCommonEncryptionDecrypt*);
+
+GST_DEBUG_CATEGORY_STATIC(webkit_media_common_encryption_decrypt_debug_category);
+#define GST_CAT_DEFAULT webkit_media_common_encryption_decrypt_debug_category
+
+#define webkit_media_common_encryption_decrypt_parent_class parent_class
+G_DEFINE_TYPE(WebKitMediaCommonEncryptionDecrypt, webkit_media_common_encryption_decrypt, GST_TYPE_BASE_TRANSFORM);
+
+static void webkit_media_common_encryption_decrypt_class_init(WebKitMediaCommonEncryptionDecryptClass* klass)
+{
+ GObjectClass* gobjectClass = G_OBJECT_CLASS(klass);
+ gobjectClass->finalize = webKitMediaCommonEncryptionDecryptorFinalize;
+
+ GST_DEBUG_CATEGORY_INIT(webkit_media_common_encryption_decrypt_debug_category,
+ "webkitcenc", 0, "Common Encryption base class");
+
+ GstElementClass* elementClass = GST_ELEMENT_CLASS(klass);
+ elementClass->change_state = GST_DEBUG_FUNCPTR(webKitMediaCommonEncryptionDecryptorChangeState);
+
+ GstBaseTransformClass* baseTransformClass = GST_BASE_TRANSFORM_CLASS(klass);
+ baseTransformClass->transform_ip = GST_DEBUG_FUNCPTR(webkitMediaCommonEncryptionDecryptTransformInPlace);
+ baseTransformClass->transform_caps = GST_DEBUG_FUNCPTR(webkitMediaCommonEncryptionDecryptTransformCaps);
+ baseTransformClass->transform_ip_on_passthrough = FALSE;
+ baseTransformClass->sink_event = GST_DEBUG_FUNCPTR(webkitMediaCommonEncryptionDecryptSinkEventHandler);
+
+ klass->setupCipher = GST_DEBUG_FUNCPTR(webKitMediaCommonEncryptionDecryptDefaultSetupCipher);
+ klass->releaseCipher = GST_DEBUG_FUNCPTR(webKitMediaCommonEncryptionDecryptDefaultReleaseCipher);
+
+ g_type_class_add_private(klass, sizeof(WebKitMediaCommonEncryptionDecryptPrivate));
+}
+
+static void webkit_media_common_encryption_decrypt_init(WebKitMediaCommonEncryptionDecrypt* self)
+{
+ WebKitMediaCommonEncryptionDecryptPrivate* priv = WEBKIT_MEDIA_CENC_DECRYPT_GET_PRIVATE(self);
+
+ self->priv = priv;
+ new (priv) WebKitMediaCommonEncryptionDecryptPrivate();
+
+ GstBaseTransform* base = GST_BASE_TRANSFORM(self);
+ gst_base_transform_set_in_place(base, TRUE);
+ gst_base_transform_set_passthrough(base, FALSE);
+ gst_base_transform_set_gap_aware(base, FALSE);
+}
+
+static void webKitMediaCommonEncryptionDecryptorFinalize(GObject* object)
+{
+ WebKitMediaCommonEncryptionDecrypt* self = WEBKIT_MEDIA_CENC_DECRYPT(object);
+ WebKitMediaCommonEncryptionDecryptPrivate* priv = self->priv;
+
+ priv->~WebKitMediaCommonEncryptionDecryptPrivate();
+ GST_CALL_PARENT(G_OBJECT_CLASS, finalize, (object));
+}
+
+static GstCaps* webkitMediaCommonEncryptionDecryptTransformCaps(GstBaseTransform* base, GstPadDirection direction, GstCaps* caps, GstCaps* filter)
+{
+ if (direction == GST_PAD_UNKNOWN)
+ return nullptr;
+
+ GST_DEBUG_OBJECT(base, "direction: %s, caps: %" GST_PTR_FORMAT " filter: %" GST_PTR_FORMAT, (direction == GST_PAD_SRC) ? "src" : "sink", caps, filter);
+
+ GstCaps* transformedCaps = gst_caps_new_empty();
+ WebKitMediaCommonEncryptionDecrypt* self = WEBKIT_MEDIA_CENC_DECRYPT(base);
+ WebKitMediaCommonEncryptionDecryptClass* klass = WEBKIT_MEDIA_CENC_DECRYPT_GET_CLASS(self);
+
+ unsigned size = gst_caps_get_size(caps);
+ for (unsigned i = 0; i < size; ++i) {
+ GstStructure* incomingStructure = gst_caps_get_structure(caps, i);
+ GRefPtr<GstStructure> outgoingStructure = nullptr;
+
+ if (direction == GST_PAD_SINK) {
+ if (!gst_structure_has_field(incomingStructure, "original-media-type"))
+ continue;
+
+ outgoingStructure = adoptGRef(gst_structure_copy(incomingStructure));
+ gst_structure_set_name(outgoingStructure.get(), gst_structure_get_string(outgoingStructure.get(), "original-media-type"));
+
+ // Filter out the DRM related fields from the down-stream caps.
+ for (int j = 0; j < gst_structure_n_fields(incomingStructure); ++j) {
+ const gchar* fieldName = gst_structure_nth_field_name(incomingStructure, j);
+
+ if (g_str_has_prefix(fieldName, "protection-system")
+ || g_str_has_prefix(fieldName, "original-media-type"))
+ gst_structure_remove_field(outgoingStructure.get(), fieldName);
+ }
+ } else {
+ outgoingStructure = adoptGRef(gst_structure_copy(incomingStructure));
+ // Filter out the video related fields from the up-stream caps,
+ // because they are not relevant to the input caps of this element and
+ // can cause caps negotiation failures with adaptive bitrate streams.
+ for (int index = gst_structure_n_fields(outgoingStructure.get()) - 1; index >= 0; --index) {
+ const gchar* fieldName = gst_structure_nth_field_name(outgoingStructure.get(), index);
+ GST_TRACE("Check field \"%s\" for removal", fieldName);
+
+ if (!g_strcmp0(fieldName, "base-profile")
+ || !g_strcmp0(fieldName, "codec_data")
+ || !g_strcmp0(fieldName, "height")
+ || !g_strcmp0(fieldName, "framerate")
+ || !g_strcmp0(fieldName, "level")
+ || !g_strcmp0(fieldName, "pixel-aspect-ratio")
+ || !g_strcmp0(fieldName, "profile")
+ || !g_strcmp0(fieldName, "rate")
+ || !g_strcmp0(fieldName, "width")) {
+ gst_structure_remove_field(outgoingStructure.get(), fieldName);
+ GST_TRACE("Removing field %s", fieldName);
+ }
+ }
+
+ gst_structure_set(outgoingStructure.get(), "protection-system", G_TYPE_STRING, klass->protectionSystemId,
+ "original-media-type", G_TYPE_STRING, gst_structure_get_name(incomingStructure), nullptr);
+
+ gst_structure_set_name(outgoingStructure.get(), "application/x-cenc");
+ }
+
+ bool duplicate = false;
+ unsigned size = gst_caps_get_size(transformedCaps);
+
+ for (unsigned index = 0; !duplicate && index < size; ++index) {
+ GstStructure* structure = gst_caps_get_structure(transformedCaps, index);
+ if (gst_structure_is_equal(structure, outgoingStructure.get()))
+ duplicate = true;
+ }
+
+ if (!duplicate)
+ gst_caps_append_structure(transformedCaps, outgoingStructure.leakRef());
+ }
+
+ if (filter) {
+ GstCaps* intersection;
+
+ GST_DEBUG_OBJECT(base, "Using filter caps %" GST_PTR_FORMAT, filter);
+ intersection = gst_caps_intersect_full(transformedCaps, filter, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref(transformedCaps);
+ transformedCaps = intersection;
+ }
+
+ GST_DEBUG_OBJECT(base, "returning %" GST_PTR_FORMAT, transformedCaps);
+ return transformedCaps;
+}
+
+static GstFlowReturn webkitMediaCommonEncryptionDecryptTransformInPlace(GstBaseTransform* base, GstBuffer* buffer)
+{
+ WebKitMediaCommonEncryptionDecrypt* self = WEBKIT_MEDIA_CENC_DECRYPT(base);
+ WebKitMediaCommonEncryptionDecryptPrivate* priv = WEBKIT_MEDIA_CENC_DECRYPT_GET_PRIVATE(self);
+ LockHolder locker(priv->mutex);
+
+ // The key might not have been received yet. Wait for it.
+ if (!priv->keyReceived) {
+ GST_DEBUG_OBJECT(self, "key not available yet, waiting for it");
+ if (GST_STATE(GST_ELEMENT(self)) < GST_STATE_PAUSED || (GST_STATE_TARGET(GST_ELEMENT(self)) != GST_STATE_VOID_PENDING && GST_STATE_TARGET(GST_ELEMENT(self)) < GST_STATE_PAUSED)) {
+ GST_ERROR_OBJECT(self, "can't process key requests in less than PAUSED state");
+ return GST_FLOW_NOT_SUPPORTED;
+ }
+ priv->condition.waitFor(priv->mutex, Seconds(5), [priv] {
+ return priv->keyReceived;
+ });
+ if (!priv->keyReceived) {
+ GST_ERROR_OBJECT(self, "key not available");
+ return GST_FLOW_NOT_SUPPORTED;
+ }
+ GST_DEBUG_OBJECT(self, "key received, continuing");
+ }
+
+ GstProtectionMeta* protectionMeta = reinterpret_cast<GstProtectionMeta*>(gst_buffer_get_protection_meta(buffer));
+ if (!protectionMeta) {
+ GST_ERROR_OBJECT(self, "Failed to get GstProtection metadata from buffer %p", buffer);
+ return GST_FLOW_NOT_SUPPORTED;
+ }
+
+ unsigned ivSize;
+ if (!gst_structure_get_uint(protectionMeta->info, "iv_size", &ivSize)) {
+ GST_ERROR_OBJECT(self, "Failed to get iv_size");
+ gst_buffer_remove_meta(buffer, reinterpret_cast<GstMeta*>(protectionMeta));
+ return GST_FLOW_NOT_SUPPORTED;
+ }
+
+ gboolean encrypted;
+ if (!gst_structure_get_boolean(protectionMeta->info, "encrypted", &encrypted)) {
+ GST_ERROR_OBJECT(self, "Failed to get encrypted flag");
+ gst_buffer_remove_meta(buffer, reinterpret_cast<GstMeta*>(protectionMeta));
+ return GST_FLOW_NOT_SUPPORTED;
+ }
+
+ if (!ivSize || !encrypted) {
+ gst_buffer_remove_meta(buffer, reinterpret_cast<GstMeta*>(protectionMeta));
+ return GST_FLOW_OK;
+ }
+
+ GST_DEBUG_OBJECT(base, "protection meta: %" GST_PTR_FORMAT, protectionMeta->info);
+
+ unsigned subSampleCount;
+ if (!gst_structure_get_uint(protectionMeta->info, "subsample_count", &subSampleCount)) {
+ GST_ERROR_OBJECT(self, "Failed to get subsample_count");
+ gst_buffer_remove_meta(buffer, reinterpret_cast<GstMeta*>(protectionMeta));
+ return GST_FLOW_NOT_SUPPORTED;
+ }
+
+ const GValue* value;
+ GstBuffer* subSamplesBuffer = nullptr;
+ if (subSampleCount) {
+ value = gst_structure_get_value(protectionMeta->info, "subsamples");
+ if (!value) {
+ GST_ERROR_OBJECT(self, "Failed to get subsamples");
+ gst_buffer_remove_meta(buffer, reinterpret_cast<GstMeta*>(protectionMeta));
+ return GST_FLOW_NOT_SUPPORTED;
+ }
+ subSamplesBuffer = gst_value_get_buffer(value);
+ }
+
+ WebKitMediaCommonEncryptionDecryptClass* klass = WEBKIT_MEDIA_CENC_DECRYPT_GET_CLASS(self);
+ if (!klass->setupCipher(self)) {
+ GST_ERROR_OBJECT(self, "Failed to configure cipher");
+ gst_buffer_remove_meta(buffer, reinterpret_cast<GstMeta*>(protectionMeta));
+ return GST_FLOW_NOT_SUPPORTED;
+ }
+
+ value = gst_structure_get_value(protectionMeta->info, "iv");
+ if (!value) {
+ GST_ERROR_OBJECT(self, "Failed to get IV for sample");
+ klass->releaseCipher(self);
+ gst_buffer_remove_meta(buffer, reinterpret_cast<GstMeta*>(protectionMeta));
+ return GST_FLOW_NOT_SUPPORTED;
+ }
+
+ GstBuffer* ivBuffer = gst_value_get_buffer(value);
+ GST_TRACE_OBJECT(self, "decrypting");
+ if (!klass->decrypt(self, ivBuffer, buffer, subSampleCount, subSamplesBuffer)) {
+ GST_ERROR_OBJECT(self, "Decryption failed");
+ klass->releaseCipher(self);
+ gst_buffer_remove_meta(buffer, reinterpret_cast<GstMeta*>(protectionMeta));
+ return GST_FLOW_NOT_SUPPORTED;
+ }
+
+ klass->releaseCipher(self);
+ gst_buffer_remove_meta(buffer, reinterpret_cast<GstMeta*>(protectionMeta));
+ return GST_FLOW_OK;
+}
+
+
+static gboolean webkitMediaCommonEncryptionDecryptSinkEventHandler(GstBaseTransform* trans, GstEvent* event)
+{
+ WebKitMediaCommonEncryptionDecrypt* self = WEBKIT_MEDIA_CENC_DECRYPT(trans);
+ WebKitMediaCommonEncryptionDecryptPrivate* priv = WEBKIT_MEDIA_CENC_DECRYPT_GET_PRIVATE(self);
+ WebKitMediaCommonEncryptionDecryptClass* klass = WEBKIT_MEDIA_CENC_DECRYPT_GET_CLASS(self);
+ gboolean result = FALSE;
+
+ switch (GST_EVENT_TYPE(event)) {
+ case GST_EVENT_PROTECTION: {
+ const char* systemId = nullptr;
+
+ gst_event_parse_protection(event, &systemId, nullptr, nullptr);
+ GST_TRACE_OBJECT(self, "received protection event for %s", systemId);
+
+ if (!g_strcmp0(systemId, klass->protectionSystemId)) {
+ GST_DEBUG_OBJECT(self, "sending protection event to the pipeline");
+ gst_element_post_message(GST_ELEMENT(self),
+ gst_message_new_element(GST_OBJECT(self),
+ gst_structure_new("drm-key-needed", "event", GST_TYPE_EVENT, event, nullptr)));
+ }
+
+ gst_event_unref(event);
+ result = TRUE;
+ break;
+ }
+ case GST_EVENT_CUSTOM_DOWNSTREAM_OOB: {
+ if (klass->handleKeyResponse(self, event)) {
+ GST_DEBUG_OBJECT(self, "key received");
+ priv->keyReceived = true;
+ priv->condition.notifyOne();
+ }
+
+ gst_event_unref(event);
+ result = TRUE;
+ break;
+ }
+ default:
+ result = GST_BASE_TRANSFORM_CLASS(parent_class)->sink_event(trans, event);
+ break;
+ }
+
+ return result;
+}
+
+static GstStateChangeReturn webKitMediaCommonEncryptionDecryptorChangeState(GstElement* element, GstStateChange transition)
+{
+ WebKitMediaCommonEncryptionDecrypt* self = WEBKIT_MEDIA_CENC_DECRYPT(element);
+ WebKitMediaCommonEncryptionDecryptPrivate* priv = WEBKIT_MEDIA_CENC_DECRYPT_GET_PRIVATE(self);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ GST_DEBUG_OBJECT(self, "PAUSED->READY");
+ priv->condition.notifyOne();
+ break;
+ default:
+ break;
+ }
+
+ GstStateChangeReturn result = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
+
+ // Add post-transition code here.
+
+ return result;
+}
+
+
+static gboolean webKitMediaCommonEncryptionDecryptDefaultSetupCipher(WebKitMediaCommonEncryptionDecrypt*)
+{
+ return true;
+}
+
+
+static void webKitMediaCommonEncryptionDecryptDefaultReleaseCipher(WebKitMediaCommonEncryptionDecrypt*)
+{
+}
+
+#endif // (ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)) && USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/eme/WebKitCommonEncryptionDecryptorGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/eme/WebKitCommonEncryptionDecryptorGStreamer.h
new file mode 100644
index 000000000..dcae82790
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/eme/WebKitCommonEncryptionDecryptorGStreamer.h
@@ -0,0 +1,64 @@
+/* GStreamer ClearKey common encryption decryptor
+ *
+ * Copyright (C) 2013 YouView TV Ltd. <alex.ashley@youview.com>
+ * Copyright (C) 2016 Metrological
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#if (ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)) && USE(GSTREAMER)
+
+#include <gst/base/gstbasetransform.h>
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#define WEBKIT_TYPE_MEDIA_CENC_DECRYPT (webkit_media_common_encryption_decrypt_get_type())
+#define WEBKIT_MEDIA_CENC_DECRYPT(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), WEBKIT_TYPE_MEDIA_CENC_DECRYPT, WebKitMediaCommonEncryptionDecrypt))
+#define WEBKIT_MEDIA_CENC_DECRYPT_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), WEBKIT_TYPE_MEDIA_CENC_DECRYPT, WebKitMediaCommonEncryptionDecryptClass))
+#define WEBKIT_MEDIA_CENC_DECRYPT_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), WEBKIT_TYPE_MEDIA_CENC_DECRYPT, WebKitMediaCommonEncryptionDecryptClass))
+
+#define WEBKIT_IS_MEDIA_CENC_DECRYPT(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj), WEBKIT_TYPE_MEDIA_CENC_DECRYPT))
+#define WEBKIT_IS_MEDIA_CENC_DECRYPT_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass), WEBKIT_TYPE_MEDIA_CENC_DECRYPT))
+
+typedef struct _WebKitMediaCommonEncryptionDecrypt WebKitMediaCommonEncryptionDecrypt;
+typedef struct _WebKitMediaCommonEncryptionDecryptClass WebKitMediaCommonEncryptionDecryptClass;
+typedef struct _WebKitMediaCommonEncryptionDecryptPrivate WebKitMediaCommonEncryptionDecryptPrivate;
+
+GType webkit_media_common_encryption_decrypt_get_type(void);
+
+struct _WebKitMediaCommonEncryptionDecrypt {
+ GstBaseTransform parent;
+
+ WebKitMediaCommonEncryptionDecryptPrivate* priv;
+};
+
+struct _WebKitMediaCommonEncryptionDecryptClass {
+ GstBaseTransformClass parentClass;
+
+ const char* protectionSystemId;
+ gboolean (*handleKeyResponse)(WebKitMediaCommonEncryptionDecrypt*, GstEvent* event);
+ gboolean (*setupCipher)(WebKitMediaCommonEncryptionDecrypt*);
+ gboolean (*decrypt)(WebKitMediaCommonEncryptionDecrypt*, GstBuffer* ivBuffer, GstBuffer* buffer, unsigned subSamplesCount, GstBuffer* subSamplesBuffer);
+ void (*releaseCipher)(WebKitMediaCommonEncryptionDecrypt*);
+};
+
+G_END_DECLS
+
+#endif // (ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)) && USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.cpp
new file mode 100644
index 000000000..c4f2b06bc
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.cpp
@@ -0,0 +1,1188 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "config.h"
+#include "AppendPipeline.h"
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "AudioTrackPrivateGStreamer.h"
+#include "GRefPtrGStreamer.h"
+#include "GStreamerMediaDescription.h"
+#include "GStreamerMediaSample.h"
+#include "GStreamerUtilities.h"
+#include "InbandTextTrackPrivateGStreamer.h"
+#include "MediaDescription.h"
+#include "SourceBufferPrivateGStreamer.h"
+#include "VideoTrackPrivateGStreamer.h"
+
+#include <gst/app/gstappsink.h>
+#include <gst/app/gstappsrc.h>
+#include <gst/gst.h>
+#include <gst/pbutils/pbutils.h>
+#include <gst/video/video.h>
+#include <wtf/Condition.h>
+#include <wtf/glib/GLibUtilities.h>
+
+GST_DEBUG_CATEGORY_EXTERN(webkit_mse_debug);
+#define GST_CAT_DEFAULT webkit_mse_debug
+
+namespace WebCore {
+
+static const char* dumpAppendState(AppendPipeline::AppendState appendState)
+{
+ switch (appendState) {
+ case AppendPipeline::AppendState::Invalid:
+ return "Invalid";
+ case AppendPipeline::AppendState::NotStarted:
+ return "NotStarted";
+ case AppendPipeline::AppendState::Ongoing:
+ return "Ongoing";
+ case AppendPipeline::AppendState::KeyNegotiation:
+ return "KeyNegotiation";
+ case AppendPipeline::AppendState::DataStarve:
+ return "DataStarve";
+ case AppendPipeline::AppendState::Sampling:
+ return "Sampling";
+ case AppendPipeline::AppendState::LastSample:
+ return "LastSample";
+ case AppendPipeline::AppendState::Aborting:
+ return "Aborting";
+ default:
+ return "(unknown)";
+ }
+}
+
+static void appendPipelineAppsrcNeedData(GstAppSrc*, guint, AppendPipeline*);
+static void appendPipelineDemuxerPadAdded(GstElement*, GstPad*, AppendPipeline*);
+static void appendPipelineDemuxerPadRemoved(GstElement*, GstPad*, AppendPipeline*);
+static void appendPipelineAppsinkCapsChanged(GObject*, GParamSpec*, AppendPipeline*);
+static GstPadProbeReturn appendPipelineAppsrcDataLeaving(GstPad*, GstPadProbeInfo*, AppendPipeline*);
+#if !LOG_DISABLED
+static GstPadProbeReturn appendPipelinePadProbeDebugInformation(GstPad*, GstPadProbeInfo*, struct PadProbeInformation*);
+#endif
+static GstPadProbeReturn appendPipelineDemuxerBlackHolePadProbe(GstPad*, GstPadProbeInfo*, gpointer);
+static GstFlowReturn appendPipelineAppsinkNewSample(GstElement*, AppendPipeline*);
+static void appendPipelineAppsinkEOS(GstElement*, AppendPipeline*);
+
+static void appendPipelineNeedContextMessageCallback(GstBus*, GstMessage* message, AppendPipeline* appendPipeline)
+{
+ GST_TRACE("received callback");
+ appendPipeline->handleNeedContextSyncMessage(message);
+}
+
+static void appendPipelineApplicationMessageCallback(GstBus*, GstMessage* message, AppendPipeline* appendPipeline)
+{
+ appendPipeline->handleApplicationMessage(message);
+}
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+static void appendPipelineElementMessageCallback(GstBus*, GstMessage* message, AppendPipeline* appendPipeline)
+{
+ appendPipeline->handleElementMessage(message);
+}
+#endif
+
+AppendPipeline::AppendPipeline(Ref<MediaSourceClientGStreamerMSE> mediaSourceClient, Ref<SourceBufferPrivateGStreamer> sourceBufferPrivate, MediaPlayerPrivateGStreamerMSE& playerPrivate)
+ : m_mediaSourceClient(mediaSourceClient.get())
+ , m_sourceBufferPrivate(sourceBufferPrivate.get())
+ , m_playerPrivate(&playerPrivate)
+ , m_id(0)
+ , m_appsrcAtLeastABufferLeft(false)
+ , m_appsrcNeedDataReceived(false)
+ , m_appsrcDataLeavingProbeId(0)
+ , m_appendState(AppendState::NotStarted)
+ , m_abortPending(false)
+ , m_streamType(Unknown)
+{
+ ASSERT(WTF::isMainThread());
+
+ GST_TRACE("Creating AppendPipeline (%p)", this);
+
+ // FIXME: give a name to the pipeline, maybe related with the track it's managing.
+ // The track name is still unknown at this time, though.
+ m_pipeline = gst_pipeline_new(nullptr);
+
+ m_bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
+ gst_bus_add_signal_watch(m_bus.get());
+ gst_bus_enable_sync_message_emission(m_bus.get());
+
+ g_signal_connect(m_bus.get(), "sync-message::need-context", G_CALLBACK(appendPipelineNeedContextMessageCallback), this);
+ g_signal_connect(m_bus.get(), "message::application", G_CALLBACK(appendPipelineApplicationMessageCallback), this);
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ g_signal_connect(m_bus.get(), "message::element", G_CALLBACK(appendPipelineElementMessageCallback), this);
+#endif
+
+ // We assign the created instances here instead of adoptRef() because gst_bin_add_many()
+ // below will already take the initial reference and we need an additional one for us.
+ m_appsrc = gst_element_factory_make("appsrc", nullptr);
+ m_demux = gst_element_factory_make("qtdemux", nullptr);
+ m_appsink = gst_element_factory_make("appsink", nullptr);
+
+ gst_app_sink_set_emit_signals(GST_APP_SINK(m_appsink.get()), TRUE);
+ gst_base_sink_set_sync(GST_BASE_SINK(m_appsink.get()), FALSE);
+
+ GRefPtr<GstPad> appsinkPad = adoptGRef(gst_element_get_static_pad(m_appsink.get(), "sink"));
+ g_signal_connect(appsinkPad.get(), "notify::caps", G_CALLBACK(appendPipelineAppsinkCapsChanged), this);
+
+ setAppsrcDataLeavingProbe();
+
+#if !LOG_DISABLED
+ GRefPtr<GstPad> demuxerPad = adoptGRef(gst_element_get_static_pad(m_demux.get(), "sink"));
+ m_demuxerDataEnteringPadProbeInformation.appendPipeline = this;
+ m_demuxerDataEnteringPadProbeInformation.description = "demuxer data entering";
+ m_demuxerDataEnteringPadProbeInformation.probeId = gst_pad_add_probe(demuxerPad.get(), GST_PAD_PROBE_TYPE_BUFFER, reinterpret_cast<GstPadProbeCallback>(appendPipelinePadProbeDebugInformation), &m_demuxerDataEnteringPadProbeInformation, nullptr);
+ m_appsinkDataEnteringPadProbeInformation.appendPipeline = this;
+ m_appsinkDataEnteringPadProbeInformation.description = "appsink data entering";
+ m_appsinkDataEnteringPadProbeInformation.probeId = gst_pad_add_probe(appsinkPad.get(), GST_PAD_PROBE_TYPE_BUFFER, reinterpret_cast<GstPadProbeCallback>(appendPipelinePadProbeDebugInformation), &m_appsinkDataEnteringPadProbeInformation, nullptr);
+#endif
+
+ // These signals won't be connected outside of the lifetime of "this".
+ g_signal_connect(m_appsrc.get(), "need-data", G_CALLBACK(appendPipelineAppsrcNeedData), this);
+ g_signal_connect(m_demux.get(), "pad-added", G_CALLBACK(appendPipelineDemuxerPadAdded), this);
+ g_signal_connect(m_demux.get(), "pad-removed", G_CALLBACK(appendPipelineDemuxerPadRemoved), this);
+ g_signal_connect(m_appsink.get(), "new-sample", G_CALLBACK(appendPipelineAppsinkNewSample), this);
+ g_signal_connect(m_appsink.get(), "eos", G_CALLBACK(appendPipelineAppsinkEOS), this);
+
+ // Add_many will take ownership of a reference. That's why we used an assignment before.
+ gst_bin_add_many(GST_BIN(m_pipeline.get()), m_appsrc.get(), m_demux.get(), nullptr);
+ gst_element_link(m_appsrc.get(), m_demux.get());
+
+ gst_element_set_state(m_pipeline.get(), GST_STATE_READY);
+};
+
+AppendPipeline::~AppendPipeline()
+{
+ ASSERT(WTF::isMainThread());
+
+ {
+ LockHolder locker(m_newSampleLock);
+ setAppendState(AppendState::Invalid);
+ m_newSampleCondition.notifyOne();
+ }
+
+ {
+ LockHolder locker(m_padAddRemoveLock);
+ m_playerPrivate = nullptr;
+ m_padAddRemoveCondition.notifyOne();
+ }
+
+ GST_TRACE("Destroying AppendPipeline (%p)", this);
+
+ // FIXME: Maybe notify appendComplete here?
+
+ if (m_pipeline) {
+ ASSERT(m_bus);
+ gst_bus_remove_signal_watch(m_bus.get());
+ gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
+ m_pipeline = nullptr;
+ }
+
+ if (m_appsrc) {
+ removeAppsrcDataLeavingProbe();
+ g_signal_handlers_disconnect_by_data(m_appsrc.get(), this);
+ m_appsrc = nullptr;
+ }
+
+ if (m_demux) {
+#if !LOG_DISABLED
+ GRefPtr<GstPad> demuxerPad = adoptGRef(gst_element_get_static_pad(m_demux.get(), "sink"));
+ gst_pad_remove_probe(demuxerPad.get(), m_demuxerDataEnteringPadProbeInformation.probeId);
+#endif
+
+ g_signal_handlers_disconnect_by_data(m_demux.get(), this);
+ m_demux = nullptr;
+ }
+
+ if (m_appsink) {
+ GRefPtr<GstPad> appsinkPad = adoptGRef(gst_element_get_static_pad(m_appsink.get(), "sink"));
+ g_signal_handlers_disconnect_by_data(appsinkPad.get(), this);
+ g_signal_handlers_disconnect_by_data(m_appsink.get(), this);
+
+#if !LOG_DISABLED
+ gst_pad_remove_probe(appsinkPad.get(), m_appsinkDataEnteringPadProbeInformation.probeId);
+#endif
+
+ m_appsink = nullptr;
+ }
+
+ m_appsinkCaps = nullptr;
+ m_demuxerSrcPadCaps = nullptr;
+};
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+void AppendPipeline::dispatchPendingDecryptionKey()
+{
+ ASSERT(m_decryptor);
+ ASSERT(m_pendingKey);
+ ASSERT(m_appendState == KeyNegotiation);
+ GST_TRACE("dispatching key to append pipeline %p", this);
+ gst_element_send_event(m_pipeline.get(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB,
+ gst_structure_new("drm-cipher", "key", GST_TYPE_BUFFER, m_pendingKey.get(), nullptr)));
+ m_pendingKey.clear();
+ setAppendState(AppendState::Ongoing);
+}
+
+void AppendPipeline::dispatchDecryptionKey(GstBuffer* buffer)
+{
+ if (m_appendState == AppendState::KeyNegotiation) {
+ GST_TRACE("append pipeline %p in key negotiation", this);
+ m_pendingKey = buffer;
+ if (m_decryptor)
+ dispatchPendingDecryptionKey();
+ else
+ GST_TRACE("no decryptor yet, waiting for it");
+ } else
+ GST_TRACE("append pipeline %p not in key negotiation", this);
+}
+#endif
+
+void AppendPipeline::clearPlayerPrivate()
+{
+ ASSERT(WTF::isMainThread());
+ GST_DEBUG("cleaning private player");
+
+ {
+ LockHolder locker(m_newSampleLock);
+ // Make sure that AppendPipeline won't process more data from now on and
+ // instruct handleNewSample to abort itself from now on as well.
+ setAppendState(AppendState::Invalid);
+
+ // Awake any pending handleNewSample operation in the streaming thread.
+ m_newSampleCondition.notifyOne();
+ }
+
+ {
+ LockHolder locker(m_padAddRemoveLock);
+ m_playerPrivate = nullptr;
+ m_padAddRemoveCondition.notifyOne();
+ }
+
+ // And now that no handleNewSample operations will remain stalled waiting
+ // for the main thread, stop the pipeline.
+ if (m_pipeline)
+ gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
+}
+
+void AppendPipeline::handleNeedContextSyncMessage(GstMessage* message)
+{
+ const gchar* contextType = nullptr;
+ gst_message_parse_context_type(message, &contextType);
+ GST_TRACE("context type: %s", contextType);
+ if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id"))
+ setAppendState(AppendPipeline::AppendState::KeyNegotiation);
+
+ // MediaPlayerPrivateGStreamerBase will take care of setting up encryption.
+ if (m_playerPrivate)
+ m_playerPrivate->handleSyncMessage(message);
+}
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+void AppendPipeline::handleElementMessage(GstMessage* message)
+{
+ ASSERT(WTF::isMainThread());
+
+ const GstStructure* structure = gst_message_get_structure(message);
+ GST_TRACE("%s message from %s", gst_structure_get_name(structure), GST_MESSAGE_SRC_NAME(message));
+ if (m_playerPrivate && gst_structure_has_name(structure, "drm-key-needed")) {
+ setAppendState(AppendPipeline::AppendState::KeyNegotiation);
+
+ GST_DEBUG("sending drm-key-needed message from %s to the player", GST_MESSAGE_SRC_NAME(message));
+ GRefPtr<GstEvent> event;
+ gst_structure_get(structure, "event", GST_TYPE_EVENT, &event.outPtr(), nullptr);
+ m_playerPrivate->handleProtectionEvent(event.get());
+ }
+}
+#endif
+
+void AppendPipeline::handleApplicationMessage(GstMessage* message)
+{
+ ASSERT(WTF::isMainThread());
+
+ const GstStructure* structure = gst_message_get_structure(message);
+
+ if (gst_structure_has_name(structure, "appsrc-need-data")) {
+ handleAppsrcNeedDataReceived();
+ return;
+ }
+
+ if (gst_structure_has_name(structure, "appsrc-buffer-left")) {
+ handleAppsrcAtLeastABufferLeft();
+ return;
+ }
+
+ if (gst_structure_has_name(structure, "demuxer-connect-to-appsink")) {
+ GRefPtr<GstPad> demuxerSrcPad;
+ gst_structure_get(structure, "demuxer-src-pad", G_TYPE_OBJECT, &demuxerSrcPad.outPtr(), nullptr);
+ ASSERT(demuxerSrcPad);
+ connectDemuxerSrcPadToAppsink(demuxerSrcPad.get());
+ return;
+ }
+
+ if (gst_structure_has_name(structure, "appsink-caps-changed")) {
+ appsinkCapsChanged();
+ return;
+ }
+
+ if (gst_structure_has_name(structure, "appsink-new-sample")) {
+ GRefPtr<GstSample> newSample;
+ gst_structure_get(structure, "new-sample", GST_TYPE_SAMPLE, &newSample.outPtr(), nullptr);
+
+ appsinkNewSample(newSample.get());
+ return;
+ }
+
+ if (gst_structure_has_name(structure, "appsink-eos")) {
+ appsinkEOS();
+ return;
+ }
+
+ ASSERT_NOT_REACHED();
+}
+
+void AppendPipeline::handleAppsrcNeedDataReceived()
+{
+ if (!m_appsrcAtLeastABufferLeft) {
+ GST_TRACE("discarding until at least a buffer leaves appsrc");
+ return;
+ }
+
+ ASSERT(m_appendState == AppendState::Ongoing || m_appendState == AppendState::Sampling);
+ ASSERT(!m_appsrcNeedDataReceived);
+
+ GST_TRACE("received need-data from appsrc");
+
+ m_appsrcNeedDataReceived = true;
+ checkEndOfAppend();
+}
+
+void AppendPipeline::handleAppsrcAtLeastABufferLeft()
+{
+ m_appsrcAtLeastABufferLeft = true;
+ GST_TRACE("received buffer-left from appsrc");
+#if LOG_DISABLED
+ removeAppsrcDataLeavingProbe();
+#endif
+}
+
+gint AppendPipeline::id()
+{
+ ASSERT(WTF::isMainThread());
+
+ if (m_id)
+ return m_id;
+
+ static gint s_totalAudio = 0;
+ static gint s_totalVideo = 0;
+ static gint s_totalText = 0;
+
+ switch (m_streamType) {
+ case Audio:
+ m_id = ++s_totalAudio;
+ break;
+ case Video:
+ m_id = ++s_totalVideo;
+ break;
+ case Text:
+ m_id = ++s_totalText;
+ break;
+ case Unknown:
+ case Invalid:
+ GST_ERROR("Trying to get id for a pipeline of Unknown/Invalid type");
+ ASSERT_NOT_REACHED();
+ break;
+ }
+
+ GST_DEBUG("streamType=%d, id=%d", static_cast<int>(m_streamType), m_id);
+
+ return m_id;
+}
+
+void AppendPipeline::setAppendState(AppendState newAppendState)
+{
+ ASSERT(WTF::isMainThread());
+ // Valid transitions:
+ // NotStarted-->Ongoing-->DataStarve-->NotStarted
+ // | | `->Aborting-->NotStarted
+ // | `->Sampling-···->Sampling-->LastSample-->NotStarted
+ // | | `->Aborting-->NotStarted
+ // | `->KeyNegotiation-->Ongoing-->[...]
+ // `->Aborting-->NotStarted
+ AppendState oldAppendState = m_appendState;
+ AppendState nextAppendState = AppendState::Invalid;
+
+ if (oldAppendState != newAppendState)
+ GST_TRACE("%s --> %s", dumpAppendState(oldAppendState), dumpAppendState(newAppendState));
+
+ bool ok = false;
+
+ switch (oldAppendState) {
+ case AppendState::NotStarted:
+ switch (newAppendState) {
+ case AppendState::Ongoing:
+ ok = true;
+ gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
+ break;
+ case AppendState::NotStarted:
+ ok = true;
+ if (m_pendingBuffer) {
+ GST_TRACE("pushing pending buffer %p", m_pendingBuffer.get());
+ gst_app_src_push_buffer(GST_APP_SRC(appsrc()), m_pendingBuffer.leakRef());
+ nextAppendState = AppendState::Ongoing;
+ }
+ break;
+ case AppendState::Aborting:
+ ok = true;
+ nextAppendState = AppendState::NotStarted;
+ break;
+ case AppendState::Invalid:
+ ok = true;
+ break;
+ default:
+ break;
+ }
+ break;
+ case AppendState::KeyNegotiation:
+ switch (newAppendState) {
+ case AppendState::Ongoing:
+ case AppendState::Invalid:
+ ok = true;
+ break;
+ default:
+ break;
+ }
+ break;
+ case AppendState::Ongoing:
+ switch (newAppendState) {
+ case AppendState::KeyNegotiation:
+ case AppendState::Sampling:
+ case AppendState::Invalid:
+ ok = true;
+ break;
+ case AppendState::DataStarve:
+ ok = true;
+ GST_DEBUG("received all pending samples");
+ m_sourceBufferPrivate->didReceiveAllPendingSamples();
+ if (m_abortPending)
+ nextAppendState = AppendState::Aborting;
+ else
+ nextAppendState = AppendState::NotStarted;
+ break;
+ default:
+ break;
+ }
+ break;
+ case AppendState::DataStarve:
+ switch (newAppendState) {
+ case AppendState::NotStarted:
+ case AppendState::Invalid:
+ ok = true;
+ break;
+ case AppendState::Aborting:
+ ok = true;
+ nextAppendState = AppendState::NotStarted;
+ break;
+ default:
+ break;
+ }
+ break;
+ case AppendState::Sampling:
+ switch (newAppendState) {
+ case AppendState::Sampling:
+ case AppendState::Invalid:
+ ok = true;
+ break;
+ case AppendState::LastSample:
+ ok = true;
+ GST_DEBUG("received all pending samples");
+ m_sourceBufferPrivate->didReceiveAllPendingSamples();
+ if (m_abortPending)
+ nextAppendState = AppendState::Aborting;
+ else
+ nextAppendState = AppendState::NotStarted;
+ break;
+ default:
+ break;
+ }
+ break;
+ case AppendState::LastSample:
+ switch (newAppendState) {
+ case AppendState::NotStarted:
+ case AppendState::Invalid:
+ ok = true;
+ break;
+ case AppendState::Aborting:
+ ok = true;
+ nextAppendState = AppendState::NotStarted;
+ break;
+ default:
+ break;
+ }
+ break;
+ case AppendState::Aborting:
+ switch (newAppendState) {
+ case AppendState::NotStarted:
+ ok = true;
+ resetPipeline();
+ m_abortPending = false;
+ nextAppendState = AppendState::NotStarted;
+ break;
+ case AppendState::Invalid:
+ ok = true;
+ break;
+ default:
+ break;
+ }
+ break;
+ case AppendState::Invalid:
+ ok = true;
+ break;
+ }
+
+ if (ok)
+ m_appendState = newAppendState;
+ else
+ GST_ERROR("Invalid append state transition %s --> %s", dumpAppendState(oldAppendState), dumpAppendState(newAppendState));
+
+ ASSERT(ok);
+
+ if (nextAppendState != AppendState::Invalid)
+ setAppendState(nextAppendState);
+}
+
+void AppendPipeline::parseDemuxerSrcPadCaps(GstCaps* demuxerSrcPadCaps)
+{
+ ASSERT(WTF::isMainThread());
+
+ m_demuxerSrcPadCaps = adoptGRef(demuxerSrcPadCaps);
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Unknown;
+
+ GstStructure* structure = gst_caps_get_structure(m_demuxerSrcPadCaps.get(), 0);
+ bool sizeConfigured = false;
+
+#if GST_CHECK_VERSION(1, 5, 3) && (ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA))
+ if (gst_structure_has_name(structure, "application/x-cenc")) {
+ // Any previous decryptor should have been removed from the pipeline by disconnectFromAppSinkFromStreamingThread()
+ ASSERT(!m_decryptor);
+
+ m_decryptor = WebCore::createGstDecryptor(gst_structure_get_string(structure, "protection-system"));
+ if (!m_decryptor) {
+ GST_ERROR("decryptor not found for caps: %" GST_PTR_FORMAT, m_demuxerSrcPadCaps.get());
+ return;
+ }
+
+ const gchar* originalMediaType = gst_structure_get_string(structure, "original-media-type");
+
+ if (!MediaPlayerPrivateGStreamerMSE::supportsCodecs(originalMediaType)) {
+ m_presentationSize = WebCore::FloatSize();
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Invalid;
+ } else if (g_str_has_prefix(originalMediaType, "video/")) {
+ int width = 0;
+ int height = 0;
+ float finalHeight = 0;
+
+ if (gst_structure_get_int(structure, "width", &width) && gst_structure_get_int(structure, "height", &height)) {
+ int ratioNumerator = 1;
+ int ratioDenominator = 1;
+
+ gst_structure_get_fraction(structure, "pixel-aspect-ratio", &ratioNumerator, &ratioDenominator);
+ finalHeight = height * ((float) ratioDenominator / (float) ratioNumerator);
+ }
+
+ m_presentationSize = WebCore::FloatSize(width, finalHeight);
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Video;
+ } else {
+ m_presentationSize = WebCore::FloatSize();
+ if (g_str_has_prefix(originalMediaType, "audio/"))
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Audio;
+ else if (g_str_has_prefix(originalMediaType, "text/"))
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Text;
+ }
+ sizeConfigured = true;
+ }
+#endif
+
+ if (!sizeConfigured) {
+ const char* structureName = gst_structure_get_name(structure);
+ GstVideoInfo info;
+
+ if (!MediaPlayerPrivateGStreamerMSE::supportsCodecs(structureName)) {
+ m_presentationSize = WebCore::FloatSize();
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Invalid;
+ } else if (g_str_has_prefix(structureName, "video/") && gst_video_info_from_caps(&info, demuxerSrcPadCaps)) {
+ float width, height;
+
+ width = info.width;
+ height = info.height * ((float) info.par_d / (float) info.par_n);
+
+ m_presentationSize = WebCore::FloatSize(width, height);
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Video;
+ } else {
+ m_presentationSize = WebCore::FloatSize();
+ if (g_str_has_prefix(structureName, "audio/"))
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Audio;
+ else if (g_str_has_prefix(structureName, "text/"))
+ m_streamType = WebCore::MediaSourceStreamTypeGStreamer::Text;
+ }
+ }
+}
+
+void AppendPipeline::appsinkCapsChanged()
+{
+ ASSERT(WTF::isMainThread());
+
+ if (!m_appsink)
+ return;
+
+ GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(m_appsink.get(), "sink"));
+ GRefPtr<GstCaps> caps = adoptGRef(gst_pad_get_current_caps(pad.get()));
+
+ if (!caps)
+ return;
+
+ // This means that we're right after a new track has appeared. Otherwise, it's a caps change inside the same track.
+ bool previousCapsWereNull = !m_appsinkCaps;
+
+ if (m_appsinkCaps != caps) {
+ m_appsinkCaps = WTFMove(caps);
+ if (m_playerPrivate && previousCapsWereNull)
+ m_playerPrivate->trackDetected(this, m_oldTrack, m_track);
+ didReceiveInitializationSegment();
+ gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
+ }
+}
+
+void AppendPipeline::checkEndOfAppend()
+{
+ ASSERT(WTF::isMainThread());
+
+ if (!m_appsrcNeedDataReceived || (m_appendState != AppendState::Ongoing && m_appendState != AppendState::Sampling))
+ return;
+
+ GST_TRACE("end of append data mark was received");
+
+ switch (m_appendState) {
+ case AppendState::Ongoing:
+ GST_TRACE("DataStarve");
+ m_appsrcNeedDataReceived = false;
+ setAppendState(AppendState::DataStarve);
+ break;
+ case AppendState::Sampling:
+ GST_TRACE("LastSample");
+ m_appsrcNeedDataReceived = false;
+ setAppendState(AppendState::LastSample);
+ break;
+ default:
+ ASSERT_NOT_REACHED();
+ break;
+ }
+}
+
+void AppendPipeline::appsinkNewSample(GstSample* sample)
+{
+ ASSERT(WTF::isMainThread());
+
+ {
+ LockHolder locker(m_newSampleLock);
+
+ // Ignore samples if we're not expecting them. Refuse processing if we're in Invalid state.
+ if (m_appendState != AppendState::Ongoing && m_appendState != AppendState::Sampling) {
+ GST_WARNING("Unexpected sample, appendState=%s", dumpAppendState(m_appendState));
+ // FIXME: Return ERROR and find a more robust way to detect that all the
+ // data has been processed, so we don't need to resort to these hacks.
+ // All in all, return OK, even if it's not the proper thing to do. We don't want to break the demuxer.
+ m_flowReturn = GST_FLOW_OK;
+ m_newSampleCondition.notifyOne();
+ return;
+ }
+
+ RefPtr<GStreamerMediaSample> mediaSample = WebCore::GStreamerMediaSample::create(sample, m_presentationSize, trackId());
+
+ GST_TRACE("append: trackId=%s PTS=%f presentationSize=%.0fx%.0f", mediaSample->trackID().string().utf8().data(), mediaSample->presentationTime().toFloat(), mediaSample->presentationSize().width(), mediaSample->presentationSize().height());
+
+ // If we're beyond the duration, ignore this sample and the remaining ones.
+ MediaTime duration = m_mediaSourceClient->duration();
+ if (duration.isValid() && !duration.indefiniteTime() && mediaSample->presentationTime() > duration) {
+ GST_DEBUG("Detected sample (%f) beyond the duration (%f), declaring LastSample", mediaSample->presentationTime().toFloat(), duration.toFloat());
+ setAppendState(AppendState::LastSample);
+ m_flowReturn = GST_FLOW_OK;
+ m_newSampleCondition.notifyOne();
+ return;
+ }
+
+ // Add a gap sample if a gap is detected before the first sample.
+ if (mediaSample->decodeTime() == MediaTime::zeroTime()
+ && mediaSample->presentationTime() > MediaTime::zeroTime()
+ && mediaSample->presentationTime() <= MediaTime::createWithDouble(0.1)) {
+ GST_DEBUG("Adding gap offset");
+ mediaSample->applyPtsOffset(MediaTime::zeroTime());
+ }
+
+ m_sourceBufferPrivate->didReceiveSample(*mediaSample);
+ setAppendState(AppendState::Sampling);
+ m_flowReturn = GST_FLOW_OK;
+ m_newSampleCondition.notifyOne();
+ }
+
+ checkEndOfAppend();
+}
+
+void AppendPipeline::appsinkEOS()
+{
+ ASSERT(WTF::isMainThread());
+
+ switch (m_appendState) {
+ case AppendState::Aborting:
+ // Ignored. Operation completion will be managed by the Aborting->NotStarted transition.
+ return;
+ case AppendState::Ongoing:
+ // Finish Ongoing and Sampling states.
+ setAppendState(AppendState::DataStarve);
+ break;
+ case AppendState::Sampling:
+ setAppendState(AppendState::LastSample);
+ break;
+ default:
+ GST_DEBUG("Unexpected EOS");
+ break;
+ }
+}
+
+void AppendPipeline::didReceiveInitializationSegment()
+{
+ ASSERT(WTF::isMainThread());
+
+ WebCore::SourceBufferPrivateClient::InitializationSegment initializationSegment;
+
+ GST_DEBUG("Notifying SourceBuffer for track %s", (m_track) ? m_track->id().string().utf8().data() : nullptr);
+ initializationSegment.duration = m_mediaSourceClient->duration();
+
+ switch (m_streamType) {
+ case Audio: {
+ WebCore::SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
+ info.track = static_cast<AudioTrackPrivateGStreamer*>(m_track.get());
+ info.description = WebCore::GStreamerMediaDescription::create(m_demuxerSrcPadCaps.get());
+ initializationSegment.audioTracks.append(info);
+ break;
+ }
+ case Video: {
+ WebCore::SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
+ info.track = static_cast<VideoTrackPrivateGStreamer*>(m_track.get());
+ info.description = WebCore::GStreamerMediaDescription::create(m_demuxerSrcPadCaps.get());
+ initializationSegment.videoTracks.append(info);
+ break;
+ }
+ default:
+ GST_ERROR("Unsupported stream type or codec");
+ break;
+ }
+
+ m_sourceBufferPrivate->didReceiveInitializationSegment(initializationSegment);
+}
+
+AtomicString AppendPipeline::trackId()
+{
+ ASSERT(WTF::isMainThread());
+
+ if (!m_track)
+ return AtomicString();
+
+ return m_track->id();
+}
+
+void AppendPipeline::resetPipeline()
+{
+ ASSERT(WTF::isMainThread());
+ GST_DEBUG("resetting pipeline");
+ m_appsrcAtLeastABufferLeft = false;
+ setAppsrcDataLeavingProbe();
+
+ {
+ LockHolder locker(m_newSampleLock);
+ m_newSampleCondition.notifyOne();
+ gst_element_set_state(m_pipeline.get(), GST_STATE_READY);
+ gst_element_get_state(m_pipeline.get(), nullptr, nullptr, 0);
+ }
+
+#if (!(LOG_DISABLED || defined(GST_DISABLE_GST_DEBUG)))
+ {
+ static unsigned i = 0;
+ // This is here for debugging purposes. It does not make sense to have it as class member.
+ WTF::String dotFileName = String::format("reset-pipeline-%d", ++i);
+ gst_debug_bin_to_dot_file(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.utf8().data());
+ }
+#endif
+
+}
+
+void AppendPipeline::setAppsrcDataLeavingProbe()
+{
+ if (m_appsrcDataLeavingProbeId)
+ return;
+
+ GST_TRACE("setting appsrc data leaving probe");
+
+ GRefPtr<GstPad> appsrcPad = adoptGRef(gst_element_get_static_pad(m_appsrc.get(), "src"));
+ m_appsrcDataLeavingProbeId = gst_pad_add_probe(appsrcPad.get(), GST_PAD_PROBE_TYPE_BUFFER, reinterpret_cast<GstPadProbeCallback>(appendPipelineAppsrcDataLeaving), this, nullptr);
+}
+
+void AppendPipeline::removeAppsrcDataLeavingProbe()
+{
+ if (!m_appsrcDataLeavingProbeId)
+ return;
+
+ GST_TRACE("removing appsrc data leaving probe");
+
+ GRefPtr<GstPad> appsrcPad = adoptGRef(gst_element_get_static_pad(m_appsrc.get(), "src"));
+ gst_pad_remove_probe(appsrcPad.get(), m_appsrcDataLeavingProbeId);
+ m_appsrcDataLeavingProbeId = 0;
+}
+
+void AppendPipeline::abort()
+{
+ ASSERT(WTF::isMainThread());
+ GST_DEBUG("aborting");
+
+ m_pendingBuffer = nullptr;
+
+ // Abort already ongoing.
+ if (m_abortPending)
+ return;
+
+ m_abortPending = true;
+ if (m_appendState == AppendState::NotStarted)
+ setAppendState(AppendState::Aborting);
+ // Else, the automatic state transitions will take care when the ongoing append finishes.
+}
+
+GstFlowReturn AppendPipeline::pushNewBuffer(GstBuffer* buffer)
+{
+ GstFlowReturn result;
+
+ if (m_abortPending) {
+ m_pendingBuffer = adoptGRef(buffer);
+ result = GST_FLOW_OK;
+ } else {
+ setAppendState(AppendPipeline::AppendState::Ongoing);
+ GST_TRACE("pushing new buffer %p", buffer);
+ result = gst_app_src_push_buffer(GST_APP_SRC(appsrc()), buffer);
+ }
+
+ return result;
+}
+
+void AppendPipeline::reportAppsrcAtLeastABufferLeft()
+{
+ GST_TRACE("buffer left appsrc, reposting to bus");
+ GstStructure* structure = gst_structure_new_empty("appsrc-buffer-left");
+ GstMessage* message = gst_message_new_application(GST_OBJECT(m_appsrc.get()), structure);
+ gst_bus_post(m_bus.get(), message);
+}
+
+void AppendPipeline::reportAppsrcNeedDataReceived()
+{
+ GST_TRACE("received need-data signal at appsrc, reposting to bus");
+ GstStructure* structure = gst_structure_new_empty("appsrc-need-data");
+ GstMessage* message = gst_message_new_application(GST_OBJECT(m_appsrc.get()), structure);
+ gst_bus_post(m_bus.get(), message);
+}
+
+GstFlowReturn AppendPipeline::handleNewAppsinkSample(GstElement* appsink)
+{
+ ASSERT(!WTF::isMainThread());
+
+ // Even if we're disabled, it's important to pull the sample out anyway to
+ // avoid deadlocks when changing to GST_STATE_NULL having a non empty appsink.
+ GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(appsink)));
+ LockHolder locker(m_newSampleLock);
+
+ if (!m_playerPrivate || m_appendState == AppendState::Invalid) {
+ GST_WARNING("AppendPipeline has been disabled, ignoring this sample");
+ return GST_FLOW_ERROR;
+ }
+
+ GstStructure* structure = gst_structure_new("appsink-new-sample", "new-sample", GST_TYPE_SAMPLE, sample.get(), nullptr);
+ GstMessage* message = gst_message_new_application(GST_OBJECT(appsink), structure);
+ gst_bus_post(m_bus.get(), message);
+ GST_TRACE("appsink-new-sample message posted to bus");
+
+ m_newSampleCondition.wait(m_newSampleLock);
+ // We've been awaken because the sample was processed or because of
+ // an exceptional condition (entered in Invalid state, destructor, etc.).
+ // We can't reliably delete info here, appendPipelineAppsinkNewSampleMainThread will do it.
+
+ return m_flowReturn;
+}
+
+void AppendPipeline::connectDemuxerSrcPadToAppsinkFromAnyThread(GstPad* demuxerSrcPad)
+{
+ if (!m_appsink)
+ return;
+
+ GST_DEBUG("connecting to appsink");
+
+ if (m_demux->numsrcpads > 1) {
+ GST_WARNING("Only one stream per SourceBuffer is allowed! Ignoring stream %d by adding a black hole probe.", m_demux->numsrcpads);
+ gulong probeId = gst_pad_add_probe(demuxerSrcPad, GST_PAD_PROBE_TYPE_BUFFER, reinterpret_cast<GstPadProbeCallback>(appendPipelineDemuxerBlackHolePadProbe), nullptr, nullptr);
+ g_object_set_data(G_OBJECT(demuxerSrcPad), "blackHoleProbeId", GULONG_TO_POINTER(probeId));
+ return;
+ }
+
+ GRefPtr<GstPad> appsinkSinkPad = adoptGRef(gst_element_get_static_pad(m_appsink.get(), "sink"));
+
+ // Only one stream per demuxer is supported.
+ ASSERT(!gst_pad_is_linked(appsinkSinkPad.get()));
+
+ gint64 timeLength = 0;
+ if (gst_element_query_duration(m_demux.get(), GST_FORMAT_TIME, &timeLength)
+ && static_cast<guint64>(timeLength) != GST_CLOCK_TIME_NONE)
+ m_initialDuration = MediaTime(GST_TIME_AS_USECONDS(timeLength), G_USEC_PER_SEC);
+ else
+ m_initialDuration = MediaTime::positiveInfiniteTime();
+
+ if (WTF::isMainThread())
+ connectDemuxerSrcPadToAppsink(demuxerSrcPad);
+ else {
+ // Call connectDemuxerSrcPadToAppsink() in the main thread and wait.
+ LockHolder locker(m_padAddRemoveLock);
+ if (!m_playerPrivate)
+ return;
+
+ GstStructure* structure = gst_structure_new("demuxer-connect-to-appsink", "demuxer-src-pad", G_TYPE_OBJECT, demuxerSrcPad, nullptr);
+ GstMessage* message = gst_message_new_application(GST_OBJECT(m_demux.get()), structure);
+ gst_bus_post(m_bus.get(), message);
+ GST_TRACE("demuxer-connect-to-appsink message posted to bus");
+
+ m_padAddRemoveCondition.wait(m_padAddRemoveLock);
+ }
+
+ // Must be done in the thread we were called from (usually streaming thread).
+ bool isData = (m_streamType == WebCore::MediaSourceStreamTypeGStreamer::Audio)
+ || (m_streamType == WebCore::MediaSourceStreamTypeGStreamer::Video)
+ || (m_streamType == WebCore::MediaSourceStreamTypeGStreamer::Text);
+
+ if (isData) {
+ // FIXME: Only add appsink one time. This method can be called several times.
+ GRefPtr<GstObject> parent = adoptGRef(gst_element_get_parent(m_appsink.get()));
+ if (!parent)
+ gst_bin_add(GST_BIN(m_pipeline.get()), m_appsink.get());
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
+ if (m_decryptor) {
+ gst_object_ref(m_decryptor.get());
+ gst_bin_add(GST_BIN(m_pipeline.get()), m_decryptor.get());
+
+ GRefPtr<GstPad> decryptorSinkPad = adoptGRef(gst_element_get_static_pad(m_decryptor.get(), "sink"));
+ gst_pad_link(demuxerSrcPad, decryptorSinkPad.get());
+
+ GRefPtr<GstPad> decryptorSrcPad = adoptGRef(gst_element_get_static_pad(m_decryptor.get(), "src"));
+ gst_pad_link(decryptorSrcPad.get(), appsinkSinkPad.get());
+
+ gst_element_sync_state_with_parent(m_appsink.get());
+ gst_element_sync_state_with_parent(m_decryptor.get());
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ if (m_pendingKey)
+ dispatchPendingDecryptionKey();
+#endif
+ } else {
+#endif
+ gst_pad_link(demuxerSrcPad, appsinkSinkPad.get());
+ gst_element_sync_state_with_parent(m_appsink.get());
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
+ }
+#endif
+ gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
+ }
+}
+
+void AppendPipeline::connectDemuxerSrcPadToAppsink(GstPad* demuxerSrcPad)
+{
+ ASSERT(WTF::isMainThread());
+ GST_DEBUG("Connecting to appsink");
+
+ LockHolder locker(m_padAddRemoveLock);
+ GRefPtr<GstPad> sinkSinkPad = adoptGRef(gst_element_get_static_pad(m_appsink.get(), "sink"));
+
+ // Only one stream per demuxer is supported.
+ ASSERT(!gst_pad_is_linked(sinkSinkPad.get()));
+
+ GRefPtr<GstCaps> caps = adoptGRef(gst_pad_get_current_caps(GST_PAD(demuxerSrcPad)));
+
+ if (!caps || m_appendState == AppendState::Invalid || !m_playerPrivate) {
+ m_padAddRemoveCondition.notifyOne();
+ return;
+ }
+
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ GUniquePtr<gchar> strcaps(gst_caps_to_string(caps.get()));
+ GST_DEBUG("%s", strcaps.get());
+ }
+#endif
+
+ if (m_initialDuration > m_mediaSourceClient->duration()
+ || (m_mediaSourceClient->duration().isInvalid() && m_initialDuration > MediaTime::zeroTime()))
+ m_mediaSourceClient->durationChanged(m_initialDuration);
+
+ m_oldTrack = m_track;
+
+ parseDemuxerSrcPadCaps(gst_caps_ref(caps.get()));
+
+ switch (m_streamType) {
+ case WebCore::MediaSourceStreamTypeGStreamer::Audio:
+ if (m_playerPrivate)
+ m_track = WebCore::AudioTrackPrivateGStreamer::create(m_playerPrivate->pipeline(), id(), sinkSinkPad.get());
+ break;
+ case WebCore::MediaSourceStreamTypeGStreamer::Video:
+ if (m_playerPrivate)
+ m_track = WebCore::VideoTrackPrivateGStreamer::create(m_playerPrivate->pipeline(), id(), sinkSinkPad.get());
+ break;
+ case WebCore::MediaSourceStreamTypeGStreamer::Text:
+ m_track = WebCore::InbandTextTrackPrivateGStreamer::create(id(), sinkSinkPad.get());
+ break;
+ case WebCore::MediaSourceStreamTypeGStreamer::Invalid:
+ {
+ GUniquePtr<gchar> strcaps(gst_caps_to_string(caps.get()));
+ GST_DEBUG("Unsupported track codec: %s", strcaps.get());
+ }
+ // This is going to cause an error which will detach the SourceBuffer and tear down this
+ // AppendPipeline, so we need the padAddRemove lock released before continuing.
+ m_track = nullptr;
+ m_padAddRemoveCondition.notifyOne();
+ locker.unlockEarly();
+ didReceiveInitializationSegment();
+ return;
+ default:
+ // No useful data, but notify anyway to complete the append operation.
+ GST_DEBUG("Received all pending samples (no data)");
+ m_sourceBufferPrivate->didReceiveAllPendingSamples();
+ break;
+ }
+
+ m_padAddRemoveCondition.notifyOne();
+}
+
+void AppendPipeline::disconnectDemuxerSrcPadFromAppsinkFromAnyThread(GstPad* demuxerSrcPad)
+{
+ // Must be done in the thread we were called from (usually streaming thread).
+ if (!gst_pad_is_linked(demuxerSrcPad)) {
+ gulong probeId = GPOINTER_TO_ULONG(g_object_get_data(G_OBJECT(demuxerSrcPad), "blackHoleProbeId"));
+ if (probeId) {
+ GST_DEBUG("Disconnecting black hole probe.");
+ g_object_set_data(G_OBJECT(demuxerSrcPad), "blackHoleProbeId", nullptr);
+ gst_pad_remove_probe(demuxerSrcPad, probeId);
+ } else
+ GST_WARNING("Not disconnecting demuxer src pad because it wasn't linked");
+ return;
+ }
+
+ GST_DEBUG("Disconnecting appsink");
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
+ if (m_decryptor) {
+ gst_element_unlink(m_decryptor.get(), m_appsink.get());
+ gst_element_unlink(m_demux.get(), m_decryptor.get());
+ gst_element_set_state(m_decryptor.get(), GST_STATE_NULL);
+ gst_bin_remove(GST_BIN(m_pipeline.get()), m_decryptor.get());
+ } else
+#endif
+ gst_element_unlink(m_demux.get(), m_appsink.get());
+}
+
+static void appendPipelineAppsinkCapsChanged(GObject* appsinkPad, GParamSpec*, AppendPipeline* appendPipeline)
+{
+ GstStructure* structure = gst_structure_new_empty("appsink-caps-changed");
+ GstMessage* message = gst_message_new_application(GST_OBJECT(appsinkPad), structure);
+ gst_bus_post(appendPipeline->bus(), message);
+ GST_TRACE("appsink-caps-changed message posted to bus");
+}
+
+static GstPadProbeReturn appendPipelineAppsrcDataLeaving(GstPad*, GstPadProbeInfo* info, AppendPipeline* appendPipeline)
+{
+ ASSERT(GST_PAD_PROBE_INFO_TYPE(info) & GST_PAD_PROBE_TYPE_BUFFER);
+
+ GstBuffer* buffer = GST_PAD_PROBE_INFO_BUFFER(info);
+ gsize bufferSize = gst_buffer_get_size(buffer);
+
+ GST_TRACE("buffer of size %" G_GSIZE_FORMAT " going thru", bufferSize);
+
+ appendPipeline->reportAppsrcAtLeastABufferLeft();
+
+ return GST_PAD_PROBE_OK;
+}
+
+#if !LOG_DISABLED
+static GstPadProbeReturn appendPipelinePadProbeDebugInformation(GstPad*, GstPadProbeInfo* info, struct PadProbeInformation* padProbeInformation)
+{
+ ASSERT(GST_PAD_PROBE_INFO_TYPE(info) & GST_PAD_PROBE_TYPE_BUFFER);
+ GstBuffer* buffer = GST_PAD_PROBE_INFO_BUFFER(info);
+ GST_TRACE("%s: buffer of size %" G_GSIZE_FORMAT " going thru", padProbeInformation->description, gst_buffer_get_size(buffer));
+ return GST_PAD_PROBE_OK;
+}
+#endif
+
+static GstPadProbeReturn appendPipelineDemuxerBlackHolePadProbe(GstPad*, GstPadProbeInfo* info, gpointer)
+{
+ ASSERT(GST_PAD_PROBE_INFO_TYPE(info) & GST_PAD_PROBE_TYPE_BUFFER);
+ GstBuffer* buffer = GST_PAD_PROBE_INFO_BUFFER(info);
+ GST_TRACE("buffer of size %" G_GSIZE_FORMAT " ignored", gst_buffer_get_size(buffer));
+ return GST_PAD_PROBE_DROP;
+}
+
+static void appendPipelineAppsrcNeedData(GstAppSrc*, guint, AppendPipeline* appendPipeline)
+{
+ appendPipeline->reportAppsrcNeedDataReceived();
+}
+
+static void appendPipelineDemuxerPadAdded(GstElement*, GstPad* demuxerSrcPad, AppendPipeline* appendPipeline)
+{
+ appendPipeline->connectDemuxerSrcPadToAppsinkFromAnyThread(demuxerSrcPad);
+}
+
+static void appendPipelineDemuxerPadRemoved(GstElement*, GstPad* demuxerSrcPad, AppendPipeline* appendPipeline)
+{
+ appendPipeline->disconnectDemuxerSrcPadFromAppsinkFromAnyThread(demuxerSrcPad);
+}
+
+static GstFlowReturn appendPipelineAppsinkNewSample(GstElement* appsink, AppendPipeline* appendPipeline)
+{
+ return appendPipeline->handleNewAppsinkSample(appsink);
+}
+
+static void appendPipelineAppsinkEOS(GstElement*, AppendPipeline* appendPipeline)
+{
+ if (WTF::isMainThread())
+ appendPipeline->appsinkEOS();
+ else {
+ GstStructure* structure = gst_structure_new_empty("appsink-eos");
+ GstMessage* message = gst_message_new_application(GST_OBJECT(appendPipeline->appsink()), structure);
+ gst_bus_post(appendPipeline->bus(), message);
+ GST_TRACE("appsink-eos message posted to bus");
+ }
+
+ GST_DEBUG("%s main thread", (WTF::isMainThread()) ? "Is" : "Not");
+}
+
+
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.h b/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.h
new file mode 100644
index 000000000..301265eb9
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.h
@@ -0,0 +1,165 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "GRefPtrGStreamer.h"
+#include "MediaPlayerPrivateGStreamerMSE.h"
+#include "MediaSourceClientGStreamerMSE.h"
+#include "SourceBufferPrivateGStreamer.h"
+
+#include <gst/gst.h>
+#include <wtf/Condition.h>
+
+namespace WebCore {
+
+#if !LOG_DISABLED
+struct PadProbeInformation {
+ AppendPipeline* appendPipeline;
+ const char* description;
+ gulong probeId;
+};
+#endif
+
+class AppendPipeline : public ThreadSafeRefCounted<AppendPipeline> {
+public:
+ enum class AppendState { Invalid, NotStarted, Ongoing, KeyNegotiation, DataStarve, Sampling, LastSample, Aborting };
+
+ AppendPipeline(Ref<MediaSourceClientGStreamerMSE>, Ref<SourceBufferPrivateGStreamer>, MediaPlayerPrivateGStreamerMSE&);
+ virtual ~AppendPipeline();
+
+ void handleNeedContextSyncMessage(GstMessage*);
+ void handleApplicationMessage(GstMessage*);
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ void handleElementMessage(GstMessage*);
+#endif
+
+ gint id();
+ AppendState appendState() { return m_appendState; }
+ void setAppendState(AppendState);
+
+ GstFlowReturn handleNewAppsinkSample(GstElement*);
+ GstFlowReturn pushNewBuffer(GstBuffer*);
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ void dispatchDecryptionKey(GstBuffer*);
+#endif
+
+ // Takes ownership of caps.
+ void parseDemuxerSrcPadCaps(GstCaps*);
+ void appsinkCapsChanged();
+ void appsinkNewSample(GstSample*);
+ void appsinkEOS();
+ void didReceiveInitializationSegment();
+ AtomicString trackId();
+ void abort();
+
+ void clearPlayerPrivate();
+ Ref<SourceBufferPrivateGStreamer> sourceBufferPrivate() { return m_sourceBufferPrivate.get(); }
+ GstBus* bus() { return m_bus.get(); }
+ GstElement* pipeline() { return m_pipeline.get(); }
+ GstElement* appsrc() { return m_appsrc.get(); }
+ GstElement* appsink() { return m_appsink.get(); }
+ GstCaps* demuxerSrcPadCaps() { return m_demuxerSrcPadCaps.get(); }
+ GstCaps* appsinkCaps() { return m_appsinkCaps.get(); }
+ RefPtr<WebCore::TrackPrivateBase> track() { return m_track; }
+ WebCore::MediaSourceStreamTypeGStreamer streamType() { return m_streamType; }
+
+ void disconnectDemuxerSrcPadFromAppsinkFromAnyThread(GstPad*);
+ void connectDemuxerSrcPadToAppsinkFromAnyThread(GstPad*);
+ void connectDemuxerSrcPadToAppsink(GstPad*);
+
+ void reportAppsrcAtLeastABufferLeft();
+ void reportAppsrcNeedDataReceived();
+
+private:
+ void resetPipeline();
+ void checkEndOfAppend();
+ void handleAppsrcAtLeastABufferLeft();
+ void handleAppsrcNeedDataReceived();
+ void removeAppsrcDataLeavingProbe();
+ void setAppsrcDataLeavingProbe();
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ void dispatchPendingDecryptionKey();
+#endif
+
+private:
+ Ref<MediaSourceClientGStreamerMSE> m_mediaSourceClient;
+ Ref<SourceBufferPrivateGStreamer> m_sourceBufferPrivate;
+ MediaPlayerPrivateGStreamerMSE* m_playerPrivate;
+
+ // (m_mediaType, m_id) is unique.
+ gint m_id;
+
+ MediaTime m_initialDuration;
+
+ GstFlowReturn m_flowReturn;
+
+ GRefPtr<GstElement> m_pipeline;
+ GRefPtr<GstBus> m_bus;
+ GRefPtr<GstElement> m_appsrc;
+ GRefPtr<GstElement> m_demux;
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
+ GRefPtr<GstElement> m_decryptor;
+#endif
+ // The demuxer has one src stream only, so only one appsink is needed and linked to it.
+ GRefPtr<GstElement> m_appsink;
+
+ Lock m_newSampleLock;
+ Condition m_newSampleCondition;
+ Lock m_padAddRemoveLock;
+ Condition m_padAddRemoveCondition;
+
+ GRefPtr<GstCaps> m_appsinkCaps;
+ GRefPtr<GstCaps> m_demuxerSrcPadCaps;
+ FloatSize m_presentationSize;
+
+ bool m_appsrcAtLeastABufferLeft;
+ bool m_appsrcNeedDataReceived;
+
+ gulong m_appsrcDataLeavingProbeId;
+#if !LOG_DISABLED
+ struct PadProbeInformation m_demuxerDataEnteringPadProbeInformation;
+ struct PadProbeInformation m_appsinkDataEnteringPadProbeInformation;
+#endif
+
+ // Keeps track of the states of append processing, to avoid performing actions inappropriate for the current state
+ // (eg: processing more samples when the last one has been detected, etc.). See setAppendState() for valid
+ // transitions.
+ AppendState m_appendState;
+
+ // Aborts can only be completed when the normal sample detection has finished. Meanwhile, the willing to abort is
+ // expressed in this field.
+ bool m_abortPending;
+
+ WebCore::MediaSourceStreamTypeGStreamer m_streamType;
+ RefPtr<WebCore::TrackPrivateBase> m_oldTrack;
+ RefPtr<WebCore::TrackPrivateBase> m_track;
+
+ GRefPtr<GstBuffer> m_pendingBuffer;
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ GRefPtr<GstBuffer> m_pendingKey;
+#endif
+};
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaDescription.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaDescription.cpp
new file mode 100644
index 000000000..776a0be9b
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaDescription.cpp
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "config.h"
+#include "GStreamerMediaDescription.h"
+
+#include "GUniquePtrGStreamer.h"
+
+#include <gst/pbutils/pbutils.h>
+#include <wtf/text/AtomicString.h>
+#include <wtf/text/WTFString.h>
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+namespace WebCore {
+
+AtomicString GStreamerMediaDescription::codec() const
+{
+ GUniquePtr<gchar> description(gst_pb_utils_get_codec_description(m_caps.get()));
+ String codecName(description.get());
+
+ // Report "H.264 (Main Profile)" and "H.264 (High Profile)" just as "H.264" to allow changes between both variants
+ // go unnoticed to the SourceBuffer layer.
+ if (codecName.startsWith("H.264")) {
+ size_t braceStart = codecName.find(" (");
+ size_t braceEnd = codecName.find(")");
+ if (braceStart != notFound && braceEnd != notFound)
+ codecName.remove(braceStart, braceEnd-braceStart);
+ }
+
+ return codecName;
+}
+
+bool GStreamerMediaDescription::isVideo() const
+{
+ GstStructure* structure = gst_caps_get_structure(m_caps.get(), 0);
+ const gchar* name = gst_structure_get_name(structure);
+
+ return g_str_has_prefix(name, "video/");
+}
+
+bool GStreamerMediaDescription::isAudio() const
+{
+ GstStructure* structure = gst_caps_get_structure(m_caps.get(), 0);
+ const gchar* name = gst_structure_get_name(structure);
+
+ return g_str_has_prefix(name, "audio/");
+}
+
+bool GStreamerMediaDescription::isText() const
+{
+ // FIXME: Implement proper text track support.
+ return false;
+}
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaDescription.h b/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaDescription.h
new file mode 100644
index 000000000..84e263caa
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaDescription.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "GRefPtrGStreamer.h"
+#include "MediaDescription.h"
+
+#include <gst/gst.h>
+
+namespace WebCore {
+
+class GStreamerMediaDescription : public MediaDescription {
+public:
+ static Ref<GStreamerMediaDescription> create(GstCaps* caps)
+ {
+ return adoptRef(*new GStreamerMediaDescription(caps));
+ }
+
+ virtual ~GStreamerMediaDescription() = default;
+
+ AtomicString codec() const override;
+ bool isVideo() const override;
+ bool isAudio() const override;
+ bool isText() const override;
+
+private:
+ GStreamerMediaDescription(GstCaps* caps)
+ : MediaDescription()
+ , m_caps(caps)
+ {
+ }
+
+ GRefPtr<GstCaps> m_caps;
+};
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaSample.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaSample.cpp
new file mode 100644
index 000000000..86d4329df
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaSample.cpp
@@ -0,0 +1,120 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "config.h"
+#include "GStreamerMediaSample.h"
+
+#include "GStreamerUtilities.h"
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+namespace WebCore {
+
+GStreamerMediaSample::GStreamerMediaSample(GstSample* sample, const FloatSize& presentationSize, const AtomicString& trackId)
+ : MediaSample()
+ , m_pts(MediaTime::zeroTime())
+ , m_dts(MediaTime::zeroTime())
+ , m_duration(MediaTime::zeroTime())
+ , m_trackId(trackId)
+ , m_size(0)
+ , m_presentationSize(presentationSize)
+ , m_flags(MediaSample::IsSync)
+{
+
+ if (!sample)
+ return;
+
+ GstBuffer* buffer = gst_sample_get_buffer(sample);
+ if (!buffer)
+ return;
+
+ auto createMediaTime =
+ [](GstClockTime time) -> MediaTime {
+ return MediaTime(GST_TIME_AS_USECONDS(time), G_USEC_PER_SEC);
+ };
+
+ if (GST_BUFFER_PTS_IS_VALID(buffer))
+ m_pts = createMediaTime(GST_BUFFER_PTS(buffer));
+ if (GST_BUFFER_DTS_IS_VALID(buffer))
+ m_dts = createMediaTime(GST_BUFFER_DTS(buffer));
+ if (GST_BUFFER_DURATION_IS_VALID(buffer))
+ m_duration = createMediaTime(GST_BUFFER_DURATION(buffer));
+
+ m_size = gst_buffer_get_size(buffer);
+ m_sample = sample;
+
+ if (GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT))
+ m_flags = MediaSample::None;
+
+ if (GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DECODE_ONLY))
+ m_flags = static_cast<MediaSample::SampleFlags>(m_flags | MediaSample::IsNonDisplaying);
+}
+
+Ref<GStreamerMediaSample> GStreamerMediaSample::createFakeSample(GstCaps*, MediaTime pts, MediaTime dts, MediaTime duration, const FloatSize& presentationSize, const AtomicString& trackId)
+{
+ GStreamerMediaSample* gstreamerMediaSample = new GStreamerMediaSample(nullptr, presentationSize, trackId);
+ gstreamerMediaSample->m_pts = pts;
+ gstreamerMediaSample->m_dts = dts;
+ gstreamerMediaSample->m_duration = duration;
+ gstreamerMediaSample->m_flags = MediaSample::IsNonDisplaying;
+ return adoptRef(*gstreamerMediaSample);
+}
+
+void GStreamerMediaSample::applyPtsOffset(MediaTime timestampOffset)
+{
+ if (m_pts > timestampOffset) {
+ m_duration = m_duration + (m_pts - timestampOffset);
+ m_pts = timestampOffset;
+ }
+}
+
+void GStreamerMediaSample::offsetTimestampsBy(const MediaTime& timestampOffset)
+{
+ if (!timestampOffset)
+ return;
+ m_pts += timestampOffset;
+ m_dts += timestampOffset;
+ GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
+ if (buffer) {
+ GST_BUFFER_PTS(buffer) = toGstClockTime(m_pts.toFloat());
+ GST_BUFFER_DTS(buffer) = toGstClockTime(m_dts.toFloat());
+ }
+}
+
+Ref<MediaSample> GStreamerMediaSample::createNonDisplayingCopy() const
+{
+ if (!m_sample)
+ return createFakeSample(nullptr, m_pts, m_dts, m_duration, m_presentationSize, m_trackId);
+
+ GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
+ GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_FLAG_DECODE_ONLY);
+
+ GstCaps* caps = gst_sample_get_caps(m_sample.get());
+ GstSegment* segment = gst_sample_get_segment(m_sample.get());
+ const GstStructure* originalInfo = gst_sample_get_info(m_sample.get());
+ GstStructure* info = originalInfo ? gst_structure_copy(originalInfo) : nullptr;
+ GRefPtr<GstSample> sample = adoptGRef(gst_sample_new(buffer, caps, segment, info));
+
+ return adoptRef(*new GStreamerMediaSample(sample.get(), m_presentationSize, m_trackId));
+}
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaSample.h b/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaSample.h
new file mode 100644
index 000000000..49e12b5c3
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/GStreamerMediaSample.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "FloatSize.h"
+#include "GRefPtrGStreamer.h"
+#include "MediaSample.h"
+#include <gst/gst.h>
+#include <wtf/text/AtomicString.h>
+
+namespace WebCore {
+
+class GStreamerMediaSample : public MediaSample {
+public:
+ static Ref<GStreamerMediaSample> create(GstSample* sample, const FloatSize& presentationSize, const AtomicString& trackId)
+ {
+ return adoptRef(*new GStreamerMediaSample(sample, presentationSize, trackId));
+ }
+
+ static Ref<GStreamerMediaSample> createFakeSample(GstCaps*, MediaTime pts, MediaTime dts, MediaTime duration, const FloatSize& presentationSize, const AtomicString& trackId);
+
+ void applyPtsOffset(MediaTime);
+ MediaTime presentationTime() const override { return m_pts; }
+ MediaTime decodeTime() const override { return m_dts; }
+ MediaTime duration() const override { return m_duration; }
+ AtomicString trackID() const override { return m_trackId; }
+ void setTrackID(const String& trackId) override { m_trackId = trackId; }
+ size_t sizeInBytes() const override { return m_size; }
+ GstSample* sample() const { return m_sample.get(); }
+ FloatSize presentationSize() const override { return m_presentationSize; }
+ void offsetTimestampsBy(const MediaTime&) override;
+ void setTimestamps(const MediaTime&, const MediaTime&) override { }
+ bool isDivisable() const override { return false; }
+ std::pair<RefPtr<MediaSample>, RefPtr<MediaSample>> divide(const MediaTime&) override { return { nullptr, nullptr }; }
+ Ref<MediaSample> createNonDisplayingCopy() const override;
+ SampleFlags flags() const override { return m_flags; }
+ PlatformSample platformSample() override { return PlatformSample(); }
+ void dump(PrintStream&) const override { }
+
+private:
+ GStreamerMediaSample(GstSample*, const FloatSize& presentationSize, const AtomicString& trackId);
+ virtual ~GStreamerMediaSample() = default;
+
+ MediaTime m_pts;
+ MediaTime m_dts;
+ MediaTime m_duration;
+ AtomicString m_trackId;
+ size_t m_size;
+ GRefPtr<GstSample> m_sample;
+ FloatSize m_presentationSize;
+ MediaSample::SampleFlags m_flags;
+};
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp
new file mode 100644
index 000000000..4614eb9b9
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp
@@ -0,0 +1,860 @@
+/*
+ * Copyright (C) 2007, 2009 Apple Inc. All rights reserved.
+ * Copyright (C) 2007 Collabora Ltd. All rights reserved.
+ * Copyright (C) 2007 Alp Toker <alp@atoker.com>
+ * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
+ * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2016 Igalia S.L
+ * Copyright (C) 2015 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "config.h"
+#include "MediaPlayerPrivateGStreamerMSE.h"
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "AppendPipeline.h"
+#include "AudioTrackPrivateGStreamer.h"
+#include "GStreamerUtilities.h"
+#include "InbandTextTrackPrivateGStreamer.h"
+#include "MIMETypeRegistry.h"
+#include "MediaDescription.h"
+#include "MediaPlayer.h"
+#include "NotImplemented.h"
+#include "SourceBufferPrivateGStreamer.h"
+#include "TimeRanges.h"
+#include "URL.h"
+#include "VideoTrackPrivateGStreamer.h"
+
+#include <fnmatch.h>
+#include <gst/app/gstappsink.h>
+#include <gst/app/gstappsrc.h>
+#include <gst/gst.h>
+#include <gst/pbutils/pbutils.h>
+#include <gst/video/video.h>
+#include <wtf/Condition.h>
+#include <wtf/NeverDestroyed.h>
+
+static const char* dumpReadyState(WebCore::MediaPlayer::ReadyState readyState)
+{
+ switch (readyState) {
+ case WebCore::MediaPlayer::HaveNothing: return "HaveNothing";
+ case WebCore::MediaPlayer::HaveMetadata: return "HaveMetadata";
+ case WebCore::MediaPlayer::HaveCurrentData: return "HaveCurrentData";
+ case WebCore::MediaPlayer::HaveFutureData: return "HaveFutureData";
+ case WebCore::MediaPlayer::HaveEnoughData: return "HaveEnoughData";
+ default: return "(unknown)";
+ }
+}
+
+GST_DEBUG_CATEGORY(webkit_mse_debug);
+#define GST_CAT_DEFAULT webkit_mse_debug
+
+namespace WebCore {
+
+void MediaPlayerPrivateGStreamerMSE::registerMediaEngine(MediaEngineRegistrar registrar)
+{
+ if (isAvailable()) {
+ registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamerMSE>(player); },
+ getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem);
+ }
+}
+
+bool initializeGStreamerAndRegisterWebKitMSEElement()
+{
+ if (UNLIKELY(!initializeGStreamer()))
+ return false;
+
+ registerWebKitGStreamerElements();
+
+ GST_DEBUG_CATEGORY_INIT(webkit_mse_debug, "webkitmse", 0, "WebKit MSE media player");
+
+ GRefPtr<GstElementFactory> WebKitMediaSrcFactory = adoptGRef(gst_element_factory_find("webkitmediasrc"));
+ if (UNLIKELY(!WebKitMediaSrcFactory))
+ gst_element_register(nullptr, "webkitmediasrc", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_MEDIA_SRC);
+ return true;
+}
+
+bool MediaPlayerPrivateGStreamerMSE::isAvailable()
+{
+ if (UNLIKELY(!initializeGStreamerAndRegisterWebKitMSEElement()))
+ return false;
+
+ GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
+ return factory;
+}
+
+MediaPlayerPrivateGStreamerMSE::MediaPlayerPrivateGStreamerMSE(MediaPlayer* player)
+ : MediaPlayerPrivateGStreamer(player)
+{
+ GST_TRACE("creating the player (%p)", this);
+}
+
+MediaPlayerPrivateGStreamerMSE::~MediaPlayerPrivateGStreamerMSE()
+{
+ GST_TRACE("destroying the player (%p)", this);
+
+ for (auto iterator : m_appendPipelinesMap)
+ iterator.value->clearPlayerPrivate();
+
+ if (m_source) {
+ webKitMediaSrcSetMediaPlayerPrivate(WEBKIT_MEDIA_SRC(m_source.get()), nullptr);
+ g_signal_handlers_disconnect_by_data(m_source.get(), this);
+ }
+
+ if (m_playbackPipeline)
+ m_playbackPipeline->setWebKitMediaSrc(nullptr);
+}
+
+void MediaPlayerPrivateGStreamerMSE::load(const String& urlString)
+{
+ if (!urlString.startsWith("mediasource")) {
+ // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
+ m_networkState = MediaPlayer::FormatError;
+ m_player->networkStateChanged();
+ return;
+ }
+
+ if (UNLIKELY(!initializeGStreamerAndRegisterWebKitMSEElement()))
+ return;
+
+ if (!m_playbackPipeline)
+ m_playbackPipeline = PlaybackPipeline::create();
+
+ MediaPlayerPrivateGStreamer::load(urlString);
+}
+
+void MediaPlayerPrivateGStreamerMSE::load(const String& url, MediaSourcePrivateClient* mediaSource)
+{
+ m_mediaSource = mediaSource;
+ load(String::format("mediasource%s", url.utf8().data()));
+}
+
+void MediaPlayerPrivateGStreamerMSE::pause()
+{
+ m_paused = true;
+ MediaPlayerPrivateGStreamer::pause();
+}
+
+MediaTime MediaPlayerPrivateGStreamerMSE::durationMediaTime() const
+{
+ if (UNLIKELY(!m_pipeline || m_errorOccured))
+ return MediaTime();
+
+ return m_mediaTimeDuration;
+}
+
+void MediaPlayerPrivateGStreamerMSE::seek(float time)
+{
+ if (UNLIKELY(!m_pipeline || m_errorOccured))
+ return;
+
+ GST_INFO("[Seek] seek attempt to %f secs", time);
+
+ // Avoid useless seeking.
+ float current = currentMediaTime().toFloat();
+ if (time == current) {
+ if (!m_seeking)
+ timeChanged();
+ return;
+ }
+
+ if (isLiveStream())
+ return;
+
+ if (m_seeking && m_seekIsPending) {
+ m_seekTime = time;
+ return;
+ }
+
+ GST_DEBUG("Seeking from %f to %f seconds", current, time);
+
+ float prevSeekTime = m_seekTime;
+ m_seekTime = time;
+
+ if (!doSeek()) {
+ m_seekTime = prevSeekTime;
+ GST_WARNING("Seeking to %f failed", time);
+ return;
+ }
+
+ m_isEndReached = false;
+ GST_DEBUG("m_seeking=%s, m_seekTime=%f", m_seeking ? "true" : "false", m_seekTime);
+}
+
+void MediaPlayerPrivateGStreamerMSE::configurePlaySink()
+{
+ MediaPlayerPrivateGStreamer::configurePlaySink();
+
+ GRefPtr<GstElement> playsink = adoptGRef(gst_bin_get_by_name(GST_BIN(m_pipeline.get()), "playsink"));
+ if (playsink) {
+ // The default value (0) means "send events to all the sinks", instead
+ // of "only to the first that returns true". This is needed for MSE seek.
+ g_object_set(G_OBJECT(playsink.get()), "send-event-mode", 0, nullptr);
+ }
+}
+
+bool MediaPlayerPrivateGStreamerMSE::changePipelineState(GstState newState)
+{
+ if (seeking()) {
+ GST_DEBUG("Rejected state change to %s while seeking",
+ gst_element_state_get_name(newState));
+ return true;
+ }
+
+ return MediaPlayerPrivateGStreamer::changePipelineState(newState);
+}
+
+void MediaPlayerPrivateGStreamerMSE::notifySeekNeedsDataForTime(const MediaTime& seekTime)
+{
+ // Reenqueue samples needed to resume playback in the new position.
+ m_mediaSource->seekToTime(seekTime);
+
+ GST_DEBUG("MSE seek to %f finished", seekTime.toDouble());
+
+ if (!m_gstSeekCompleted) {
+ m_gstSeekCompleted = true;
+ maybeFinishSeek();
+ }
+}
+
+bool MediaPlayerPrivateGStreamerMSE::doSeek(gint64, float, GstSeekFlags)
+{
+ // Use doSeek() instead. If anybody is calling this version of doSeek(), something is wrong.
+ ASSERT_NOT_REACHED();
+ return false;
+}
+
+bool MediaPlayerPrivateGStreamerMSE::doSeek()
+{
+ GstClockTime position = toGstClockTime(m_seekTime);
+ MediaTime seekTime = MediaTime::createWithDouble(m_seekTime);
+ double rate = m_player->rate();
+ GstSeekFlags seekType = static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE);
+
+ // Always move to seeking state to report correct 'currentTime' while pending for actual seek to complete.
+ m_seeking = true;
+
+ // Check if playback pipeline is ready for seek.
+ GstState state, newState;
+ GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &newState, 0);
+ if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
+ GST_DEBUG("[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
+ webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
+ m_seeking = false;
+ return false;
+ }
+ if ((getStateResult == GST_STATE_CHANGE_ASYNC
+ && !(state == GST_STATE_PLAYING && newState == GST_STATE_PAUSED))
+ || state < GST_STATE_PAUSED
+ || m_isEndReached
+ || !m_gstSeekCompleted) {
+ CString reason = "Unknown reason";
+ if (getStateResult == GST_STATE_CHANGE_ASYNC) {
+ reason = String::format("In async change %s --> %s",
+ gst_element_state_get_name(state),
+ gst_element_state_get_name(newState)).utf8();
+ } else if (state < GST_STATE_PAUSED)
+ reason = "State less than PAUSED";
+ else if (m_isEndReached)
+ reason = "End reached";
+ else if (!m_gstSeekCompleted)
+ reason = "Previous seek is not finished yet";
+
+ GST_DEBUG("[Seek] Delaying the seek: %s", reason.data());
+
+ m_seekIsPending = true;
+
+ if (m_isEndReached) {
+ GST_DEBUG("[Seek] reset pipeline");
+ m_resetPipeline = true;
+ m_seeking = false;
+ if (!changePipelineState(GST_STATE_PAUSED))
+ loadingFailed(MediaPlayer::Empty);
+ else
+ m_seeking = true;
+ }
+
+ return m_seeking;
+ }
+
+ // Stop accepting new samples until actual seek is finished.
+ webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), false);
+
+ // Correct seek time if it helps to fix a small gap.
+ if (!isTimeBuffered(seekTime)) {
+ // Look if a near future time (<0.1 sec.) is buffered and change the seek target time.
+ if (m_mediaSource) {
+ const MediaTime miniGap = MediaTime::createWithDouble(0.1);
+ MediaTime nearest = m_mediaSource->buffered()->nearest(seekTime);
+ if (nearest.isValid() && nearest > seekTime && (nearest - seekTime) <= miniGap && isTimeBuffered(nearest + miniGap)) {
+ GST_DEBUG("[Seek] Changed the seek target time from %f to %f, a near point in the future", seekTime.toFloat(), nearest.toFloat());
+ seekTime = nearest;
+ }
+ }
+ }
+
+ // Check if MSE has samples for requested time and defer actual seek if needed.
+ if (!isTimeBuffered(seekTime)) {
+ GST_DEBUG("[Seek] Delaying the seek: MSE is not ready");
+ GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
+ if (setStateResult == GST_STATE_CHANGE_FAILURE) {
+ GST_DEBUG("[Seek] Cannot seek, failed to pause playback pipeline.");
+ webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
+ m_seeking = false;
+ return false;
+ }
+ m_readyState = MediaPlayer::HaveMetadata;
+ notifySeekNeedsDataForTime(seekTime);
+ ASSERT(!m_mseSeekCompleted);
+ return true;
+ }
+
+ // Complete previous MSE seek if needed.
+ if (!m_mseSeekCompleted) {
+ m_mediaSource->monitorSourceBuffers();
+ ASSERT(m_mseSeekCompleted);
+ // Note: seekCompleted will recursively call us.
+ return m_seeking;
+ }
+
+ GST_DEBUG("We can seek now");
+
+ gint64 startTime = position, endTime = GST_CLOCK_TIME_NONE;
+ if (rate < 0) {
+ startTime = 0;
+ endTime = position;
+ }
+
+ if (!rate)
+ rate = 1;
+
+ GST_DEBUG("Actual seek to %" GST_TIME_FORMAT ", end time: %" GST_TIME_FORMAT ", rate: %f", GST_TIME_ARGS(startTime), GST_TIME_ARGS(endTime), rate);
+
+ // This will call notifySeekNeedsData() after some time to tell that the pipeline is ready for sample enqueuing.
+ webKitMediaSrcPrepareSeek(WEBKIT_MEDIA_SRC(m_source.get()), seekTime);
+
+ m_gstSeekCompleted = false;
+ if (!gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType, GST_SEEK_TYPE_SET, startTime, GST_SEEK_TYPE_SET, endTime)) {
+ webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
+ m_seeking = false;
+ m_gstSeekCompleted = true;
+ GST_DEBUG("doSeek(): gst_element_seek() failed, returning false");
+ return false;
+ }
+
+ // The samples will be enqueued in notifySeekNeedsData().
+ GST_DEBUG("doSeek(): gst_element_seek() succeeded, returning true");
+ return true;
+}
+
+void MediaPlayerPrivateGStreamerMSE::maybeFinishSeek()
+{
+ if (!m_seeking || !m_mseSeekCompleted || !m_gstSeekCompleted)
+ return;
+
+ GstState state, newState;
+ GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &newState, 0);
+
+ if (getStateResult == GST_STATE_CHANGE_ASYNC
+ && !(state == GST_STATE_PLAYING && newState == GST_STATE_PAUSED)) {
+ GST_DEBUG("[Seek] Delaying seek finish");
+ return;
+ }
+
+ if (m_seekIsPending) {
+ GST_DEBUG("[Seek] Committing pending seek to %f", m_seekTime);
+ m_seekIsPending = false;
+ if (!doSeek()) {
+ GST_WARNING("[Seek] Seeking to %f failed", m_seekTime);
+ m_cachedPosition = -1;
+ }
+ return;
+ }
+
+ GST_DEBUG("[Seek] Seeked to %f", m_seekTime);
+
+ webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
+ m_seeking = false;
+ m_cachedPosition = -1;
+ // The pipeline can still have a pending state. In this case a position query will fail.
+ // Right now we can use m_seekTime as a fallback.
+ m_canFallBackToLastFinishedSeekPosition = true;
+ timeChanged();
+}
+
+void MediaPlayerPrivateGStreamerMSE::updatePlaybackRate()
+{
+ notImplemented();
+}
+
+bool MediaPlayerPrivateGStreamerMSE::seeking() const
+{
+ return m_seeking;
+}
+
+// FIXME: MediaPlayerPrivateGStreamer manages the ReadyState on its own. We shouldn't change it manually.
+void MediaPlayerPrivateGStreamerMSE::setReadyState(MediaPlayer::ReadyState readyState)
+{
+ if (readyState == m_readyState)
+ return;
+
+ if (seeking()) {
+ GST_DEBUG("Skip ready state change(%s -> %s) due to seek\n", dumpReadyState(m_readyState), dumpReadyState(readyState));
+ return;
+ }
+
+ GST_DEBUG("Ready State Changed manually from %u to %u", m_readyState, readyState);
+ MediaPlayer::ReadyState oldReadyState = m_readyState;
+ m_readyState = readyState;
+ GST_DEBUG("m_readyState: %s -> %s", dumpReadyState(oldReadyState), dumpReadyState(m_readyState));
+
+ if (oldReadyState < MediaPlayer::HaveCurrentData && m_readyState >= MediaPlayer::HaveCurrentData) {
+ GST_DEBUG("[Seek] Reporting load state changed to trigger seek continuation");
+ loadStateChanged();
+ }
+ m_player->readyStateChanged();
+
+ GstState pipelineState;
+ GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &pipelineState, nullptr, 250 * GST_NSECOND);
+ bool isPlaying = (getStateResult == GST_STATE_CHANGE_SUCCESS && pipelineState == GST_STATE_PLAYING);
+
+ if (m_readyState == MediaPlayer::HaveMetadata && oldReadyState > MediaPlayer::HaveMetadata && isPlaying) {
+ GST_TRACE("Changing pipeline to PAUSED...");
+ bool ok = changePipelineState(GST_STATE_PAUSED);
+ GST_TRACE("Changed pipeline to PAUSED: %s", ok ? "Success" : "Error");
+ }
+}
+
+void MediaPlayerPrivateGStreamerMSE::waitForSeekCompleted()
+{
+ if (!m_seeking)
+ return;
+
+ GST_DEBUG("Waiting for MSE seek completed");
+ m_mseSeekCompleted = false;
+}
+
+void MediaPlayerPrivateGStreamerMSE::seekCompleted()
+{
+ if (m_mseSeekCompleted)
+ return;
+
+ GST_DEBUG("MSE seek completed");
+ m_mseSeekCompleted = true;
+
+ doSeek();
+
+ if (!seeking() && m_readyState >= MediaPlayer::HaveFutureData)
+ changePipelineState(GST_STATE_PLAYING);
+
+ if (!seeking())
+ m_player->timeChanged();
+}
+
+void MediaPlayerPrivateGStreamerMSE::setRate(float)
+{
+ notImplemented();
+}
+
+std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamerMSE::buffered() const
+{
+ return m_mediaSource ? m_mediaSource->buffered() : std::make_unique<PlatformTimeRanges>();
+}
+
+void MediaPlayerPrivateGStreamerMSE::sourceChanged()
+{
+ m_source = nullptr;
+ g_object_get(m_pipeline.get(), "source", &m_source.outPtr(), nullptr);
+
+ ASSERT(WEBKIT_IS_MEDIA_SRC(m_source.get()));
+
+ m_playbackPipeline->setWebKitMediaSrc(WEBKIT_MEDIA_SRC(m_source.get()));
+
+ MediaSourceGStreamer::open(*m_mediaSource.get(), *this);
+ g_signal_connect_swapped(m_source.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
+ g_signal_connect_swapped(m_source.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
+ g_signal_connect_swapped(m_source.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
+ webKitMediaSrcSetMediaPlayerPrivate(WEBKIT_MEDIA_SRC(m_source.get()), this);
+}
+
+void MediaPlayerPrivateGStreamerMSE::updateStates()
+{
+ if (UNLIKELY(!m_pipeline || m_errorOccured))
+ return;
+
+ MediaPlayer::NetworkState oldNetworkState = m_networkState;
+ MediaPlayer::ReadyState oldReadyState = m_readyState;
+ GstState state, pending;
+
+ GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
+
+ bool shouldUpdatePlaybackState = false;
+ switch (getStateResult) {
+ case GST_STATE_CHANGE_SUCCESS: {
+ GST_DEBUG("State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
+
+ // Do nothing if on EOS and state changed to READY to avoid recreating the player
+ // on HTMLMediaElement and properly generate the video 'ended' event.
+ if (m_isEndReached && state == GST_STATE_READY)
+ break;
+
+ m_resetPipeline = (state <= GST_STATE_READY);
+ if (m_resetPipeline)
+ m_mediaTimeDuration = MediaTime::zeroTime();
+
+ // Update ready and network states.
+ switch (state) {
+ case GST_STATE_NULL:
+ m_readyState = MediaPlayer::HaveNothing;
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ m_networkState = MediaPlayer::Empty;
+ break;
+ case GST_STATE_READY:
+ m_readyState = MediaPlayer::HaveMetadata;
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ m_networkState = MediaPlayer::Empty;
+ break;
+ case GST_STATE_PAUSED:
+ case GST_STATE_PLAYING:
+ if (seeking()) {
+ m_readyState = MediaPlayer::HaveMetadata;
+ // FIXME: Should we manage NetworkState too?
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ } else if (m_buffering) {
+ if (m_bufferingPercentage == 100) {
+ GST_DEBUG("[Buffering] Complete.");
+ m_buffering = false;
+ m_readyState = MediaPlayer::HaveEnoughData;
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading;
+ } else {
+ m_readyState = MediaPlayer::HaveCurrentData;
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ m_networkState = MediaPlayer::Loading;
+ }
+ } else if (m_downloadFinished) {
+ m_readyState = MediaPlayer::HaveEnoughData;
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ m_networkState = MediaPlayer::Loaded;
+ } else {
+ m_readyState = MediaPlayer::HaveFutureData;
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ m_networkState = MediaPlayer::Loading;
+ }
+
+ if (m_eosMarked && state == GST_STATE_PLAYING)
+ m_eosPending = true;
+
+ break;
+ default:
+ ASSERT_NOT_REACHED();
+ break;
+ }
+
+ // Sync states where needed.
+ if (state == GST_STATE_PAUSED) {
+ if (!m_volumeAndMuteInitialized) {
+ notifyPlayerOfVolumeChange();
+ notifyPlayerOfMute();
+ m_volumeAndMuteInitialized = true;
+ }
+
+ if (!seeking() && !m_buffering && !m_paused && m_playbackRate) {
+ GST_DEBUG("[Buffering] Restarting playback.");
+ changePipelineState(GST_STATE_PLAYING);
+ }
+ } else if (state == GST_STATE_PLAYING) {
+ m_paused = false;
+
+ if ((m_buffering && !isLiveStream()) || !m_playbackRate) {
+ GST_DEBUG("[Buffering] Pausing stream for buffering.");
+ changePipelineState(GST_STATE_PAUSED);
+ }
+ } else
+ m_paused = true;
+
+ if (m_requestedState == GST_STATE_PAUSED && state == GST_STATE_PAUSED) {
+ shouldUpdatePlaybackState = true;
+ GST_DEBUG("Requested state change to %s was completed", gst_element_state_get_name(state));
+ }
+
+ break;
+ }
+ case GST_STATE_CHANGE_ASYNC:
+ GST_DEBUG("Async: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
+ // Change in progress.
+ break;
+ case GST_STATE_CHANGE_FAILURE:
+ GST_WARNING("Failure: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
+ // Change failed.
+ return;
+ case GST_STATE_CHANGE_NO_PREROLL:
+ GST_DEBUG("No preroll: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
+
+ // Live pipelines go in PAUSED without prerolling.
+ m_isStreaming = true;
+
+ if (state == GST_STATE_READY) {
+ m_readyState = MediaPlayer::HaveNothing;
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ } else if (state == GST_STATE_PAUSED) {
+ m_readyState = MediaPlayer::HaveEnoughData;
+ GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
+ m_paused = true;
+ } else if (state == GST_STATE_PLAYING)
+ m_paused = false;
+
+ if (!m_paused && m_playbackRate)
+ changePipelineState(GST_STATE_PLAYING);
+
+ m_networkState = MediaPlayer::Loading;
+ break;
+ default:
+ GST_DEBUG("Else : %d", getStateResult);
+ break;
+ }
+
+ m_requestedState = GST_STATE_VOID_PENDING;
+
+ if (shouldUpdatePlaybackState)
+ m_player->playbackStateChanged();
+
+ if (m_networkState != oldNetworkState) {
+ GST_DEBUG("Network State Changed from %u to %u", oldNetworkState, m_networkState);
+ m_player->networkStateChanged();
+ }
+ if (m_readyState != oldReadyState) {
+ GST_DEBUG("Ready State Changed from %u to %u", oldReadyState, m_readyState);
+ m_player->readyStateChanged();
+ }
+
+ if (getStateResult == GST_STATE_CHANGE_SUCCESS && state >= GST_STATE_PAUSED) {
+ updatePlaybackRate();
+ maybeFinishSeek();
+ }
+}
+void MediaPlayerPrivateGStreamerMSE::asyncStateChangeDone()
+{
+ if (UNLIKELY(!m_pipeline || m_errorOccured))
+ return;
+
+ if (m_seeking)
+ maybeFinishSeek();
+ else
+ updateStates();
+}
+
+bool MediaPlayerPrivateGStreamerMSE::isTimeBuffered(const MediaTime &time) const
+{
+ bool result = m_mediaSource && m_mediaSource->buffered()->contain(time);
+ GST_DEBUG("Time %f buffered? %s", time.toDouble(), result ? "Yes" : "No");
+ return result;
+}
+
+void MediaPlayerPrivateGStreamerMSE::setMediaSourceClient(Ref<MediaSourceClientGStreamerMSE> client)
+{
+ m_mediaSourceClient = client.ptr();
+}
+
+RefPtr<MediaSourceClientGStreamerMSE> MediaPlayerPrivateGStreamerMSE::mediaSourceClient()
+{
+ return m_mediaSourceClient;
+}
+
+void MediaPlayerPrivateGStreamerMSE::durationChanged()
+{
+ if (!m_mediaSourceClient) {
+ GST_DEBUG("m_mediaSourceClient is null, doing nothing");
+ return;
+ }
+
+ MediaTime previousDuration = m_mediaTimeDuration;
+ m_mediaTimeDuration = m_mediaSourceClient->duration();
+
+ GST_TRACE("previous=%f, new=%f", previousDuration.toFloat(), m_mediaTimeDuration.toFloat());
+
+ // Avoid emiting durationchanged in the case where the previous duration was 0 because that case is already handled
+ // by the HTMLMediaElement.
+ if (m_mediaTimeDuration != previousDuration && m_mediaTimeDuration.isValid() && previousDuration.isValid()) {
+ m_player->durationChanged();
+ m_playbackPipeline->notifyDurationChanged();
+ m_mediaSource->durationChanged(m_mediaTimeDuration);
+ }
+}
+
+static HashSet<String, ASCIICaseInsensitiveHash>& mimeTypeCache()
+{
+ static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> cache = []()
+ {
+ initializeGStreamerAndRegisterWebKitMSEElement();
+ HashSet<String, ASCIICaseInsensitiveHash> set;
+ const char* mimeTypes[] = {
+ "video/mp4",
+ "audio/mp4"
+ };
+ for (auto& type : mimeTypes)
+ set.add(type);
+ return set;
+ }();
+ return cache;
+}
+
+void MediaPlayerPrivateGStreamerMSE::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
+{
+ types = mimeTypeCache();
+}
+
+void MediaPlayerPrivateGStreamerMSE::trackDetected(RefPtr<AppendPipeline> appendPipeline, RefPtr<WebCore::TrackPrivateBase> oldTrack, RefPtr<WebCore::TrackPrivateBase> newTrack)
+{
+ ASSERT(appendPipeline->track() == newTrack);
+
+ GstCaps* caps = appendPipeline->appsinkCaps();
+ ASSERT(caps);
+ GST_DEBUG("track ID: %s, caps: %" GST_PTR_FORMAT, newTrack->id().string().latin1().data(), caps);
+
+ GstStructure* structure = gst_caps_get_structure(caps, 0);
+ const gchar* mediaType = gst_structure_get_name(structure);
+ GstVideoInfo info;
+
+ if (g_str_has_prefix(mediaType, "video/") && gst_video_info_from_caps(&info, caps)) {
+ float width, height;
+
+ width = info.width;
+ height = info.height * ((float) info.par_d / (float) info.par_n);
+ m_videoSize.setWidth(width);
+ m_videoSize.setHeight(height);
+ }
+
+ if (!oldTrack)
+ m_playbackPipeline->attachTrack(appendPipeline->sourceBufferPrivate(), newTrack, structure, caps);
+ else
+ m_playbackPipeline->reattachTrack(appendPipeline->sourceBufferPrivate(), newTrack);
+}
+
+bool MediaPlayerPrivateGStreamerMSE::supportsCodecs(const String& codecs)
+{
+ static Vector<const char*> supportedCodecs = { "avc*", "mp4a*", "mpeg", "x-h264" };
+ Vector<String> codecEntries;
+ codecs.split(',', false, codecEntries);
+
+ for (String codec : codecEntries) {
+ bool isCodecSupported = false;
+
+ // If the codec is named like a mimetype (eg: video/avc) remove the "video/" part.
+ size_t slashIndex = codec.find('/');
+ if (slashIndex != WTF::notFound)
+ codec = codec.substring(slashIndex+1);
+
+ const char* codecData = codec.utf8().data();
+ for (const auto& pattern : supportedCodecs) {
+ isCodecSupported = !fnmatch(pattern, codecData, 0);
+ if (isCodecSupported)
+ break;
+ }
+ if (!isCodecSupported)
+ return false;
+ }
+
+ return true;
+}
+
+MediaPlayer::SupportsType MediaPlayerPrivateGStreamerMSE::supportsType(const MediaEngineSupportParameters& parameters)
+{
+ MediaPlayer::SupportsType result = MediaPlayer::IsNotSupported;
+ if (!parameters.isMediaSource)
+ return result;
+
+ // Disable VPX/Opus on MSE for now, mp4/avc1 seems way more reliable currently.
+ if (parameters.type.endsWith("webm"))
+ return result;
+
+ // YouTube TV provides empty types for some videos and we want to be selected as best media engine for them.
+ if (parameters.type.isEmpty()) {
+ result = MediaPlayer::MayBeSupported;
+ return result;
+ }
+
+ // Spec says we should not return "probably" if the codecs string is empty.
+ if (mimeTypeCache().contains(parameters.type)) {
+ if (parameters.codecs.isEmpty())
+ result = MediaPlayer::MayBeSupported;
+ else
+ result = supportsCodecs(parameters.codecs) ? MediaPlayer::IsSupported : MediaPlayer::IsNotSupported;
+ }
+
+ return extendedSupportsType(parameters, result);
+}
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+void MediaPlayerPrivateGStreamerMSE::dispatchDecryptionKey(GstBuffer* buffer)
+{
+ for (auto it : m_appendPipelinesMap)
+ it.value->dispatchDecryptionKey(buffer);
+}
+#endif
+
+void MediaPlayerPrivateGStreamerMSE::markEndOfStream(MediaSourcePrivate::EndOfStreamStatus status)
+{
+ if (status != MediaSourcePrivate::EosNoError)
+ return;
+
+ GST_DEBUG("Marking end of stream");
+ m_eosMarked = true;
+ updateStates();
+}
+
+MediaTime MediaPlayerPrivateGStreamerMSE::currentMediaTime() const
+{
+ MediaTime position = MediaPlayerPrivateGStreamer::currentMediaTime();
+
+ if (m_eosPending && (paused() || (position >= durationMediaTime()))) {
+ if (m_networkState != MediaPlayer::Loaded) {
+ m_networkState = MediaPlayer::Loaded;
+ m_player->networkStateChanged();
+ }
+
+ m_eosPending = false;
+ m_isEndReached = true;
+ m_cachedPosition = m_mediaTimeDuration.toFloat();
+ m_durationAtEOS = m_mediaTimeDuration.toFloat();
+ m_player->timeChanged();
+ }
+ return position;
+}
+
+float MediaPlayerPrivateGStreamerMSE::maxTimeSeekable() const
+{
+ if (UNLIKELY(m_errorOccured))
+ return 0;
+
+ GST_DEBUG("maxTimeSeekable");
+ float result = durationMediaTime().toFloat();
+ // Infinite duration means live stream.
+ if (std::isinf(result)) {
+ MediaTime maxBufferedTime = buffered()->maximumBufferedTime();
+ // Return the highest end time reported by the buffered attribute.
+ result = maxBufferedTime.isValid() ? maxBufferedTime.toFloat() : 0;
+ }
+
+ return result;
+}
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.h b/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.h
new file mode 100644
index 000000000..0d3ebb902
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.h
@@ -0,0 +1,132 @@
+/*
+ * Copyright (C) 2007, 2009 Apple Inc. All rights reserved.
+ * Copyright (C) 2007 Collabora Ltd. All rights reserved.
+ * Copyright (C) 2007 Alp Toker <alp@atoker.com>
+ * Copyright (C) 2009, 2010, 2016 Igalia S.L
+ * Copyright (C) 2015 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "GRefPtrGStreamer.h"
+#include "MediaPlayerPrivateGStreamer.h"
+#include "MediaSample.h"
+#include "MediaSourceGStreamer.h"
+#include "PlaybackPipeline.h"
+#include "WebKitMediaSourceGStreamer.h"
+
+namespace WebCore {
+
+class MediaSourceClientGStreamerMSE;
+class AppendPipeline;
+class PlaybackPipeline;
+
+class MediaPlayerPrivateGStreamerMSE : public MediaPlayerPrivateGStreamer {
+ WTF_MAKE_NONCOPYABLE(MediaPlayerPrivateGStreamerMSE); WTF_MAKE_FAST_ALLOCATED;
+
+ friend class MediaSourceClientGStreamerMSE;
+
+public:
+ explicit MediaPlayerPrivateGStreamerMSE(MediaPlayer*);
+ virtual ~MediaPlayerPrivateGStreamerMSE();
+
+ static void registerMediaEngine(MediaEngineRegistrar);
+
+ void load(const String&) override;
+ void load(const String&, MediaSourcePrivateClient*) override;
+
+ void setDownloadBuffering() override { };
+
+ bool isLiveStream() const override { return false; }
+ MediaTime currentMediaTime() const override;
+
+ void pause() override;
+ bool seeking() const override;
+ void seek(float) override;
+ void configurePlaySink() override;
+ bool changePipelineState(GstState) override;
+
+ void durationChanged() override;
+ MediaTime durationMediaTime() const override;
+
+ void setRate(float) override;
+ std::unique_ptr<PlatformTimeRanges> buffered() const override;
+ float maxTimeSeekable() const override;
+
+ void sourceChanged() override;
+
+ void setReadyState(MediaPlayer::ReadyState);
+ void waitForSeekCompleted();
+ void seekCompleted();
+ MediaSourcePrivateClient* mediaSourcePrivateClient() { return m_mediaSource.get(); }
+
+ void markEndOfStream(MediaSourcePrivate::EndOfStreamStatus);
+
+#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
+ void dispatchDecryptionKey(GstBuffer*) override;
+#endif
+
+ void trackDetected(RefPtr<AppendPipeline>, RefPtr<WebCore::TrackPrivateBase> oldTrack, RefPtr<WebCore::TrackPrivateBase> newTrack);
+ void notifySeekNeedsDataForTime(const MediaTime&);
+
+ static bool supportsCodecs(const String& codecs);
+
+private:
+ static void getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>&);
+ static MediaPlayer::SupportsType supportsType(const MediaEngineSupportParameters&);
+
+ static bool isAvailable();
+
+ // FIXME: Reduce code duplication.
+ void updateStates() override;
+
+ bool doSeek(gint64, float, GstSeekFlags) override;
+ bool doSeek();
+ void maybeFinishSeek();
+ void updatePlaybackRate() override;
+ void asyncStateChangeDone() override;
+
+ // FIXME: Implement.
+ unsigned long totalVideoFrames() override { return 0; }
+ unsigned long droppedVideoFrames() override { return 0; }
+ unsigned long corruptedVideoFrames() override { return 0; }
+ MediaTime totalFrameDelay() override { return MediaTime::zeroTime(); }
+ bool isTimeBuffered(const MediaTime&) const;
+
+ bool isMediaSource() const override { return true; }
+
+ void setMediaSourceClient(Ref<MediaSourceClientGStreamerMSE>);
+ RefPtr<MediaSourceClientGStreamerMSE> mediaSourceClient();
+
+ HashMap<RefPtr<SourceBufferPrivateGStreamer>, RefPtr<AppendPipeline>> m_appendPipelinesMap;
+ bool m_eosMarked = false;
+ mutable bool m_eosPending = false;
+ bool m_gstSeekCompleted = true;
+ RefPtr<MediaSourcePrivateClient> m_mediaSource;
+ RefPtr<MediaSourceClientGStreamerMSE> m_mediaSourceClient;
+ MediaTime m_mediaTimeDuration;
+ bool m_mseSeekCompleted = true;
+ RefPtr<PlaybackPipeline> m_playbackPipeline;
+};
+
+} // namespace WebCore
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.cpp
new file mode 100644
index 000000000..441401e6a
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.cpp
@@ -0,0 +1,216 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "config.h"
+#include "MediaSourceClientGStreamerMSE.h"
+
+#include "AppendPipeline.h"
+#include "MediaPlayerPrivateGStreamerMSE.h"
+#include "WebKitMediaSourceGStreamer.h"
+#include <gst/gst.h>
+
+GST_DEBUG_CATEGORY_EXTERN(webkit_mse_debug);
+#define GST_CAT_DEFAULT webkit_mse_debug
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+namespace WebCore {
+
+Ref<MediaSourceClientGStreamerMSE> MediaSourceClientGStreamerMSE::create(MediaPlayerPrivateGStreamerMSE& playerPrivate)
+{
+ ASSERT(WTF::isMainThread());
+
+ // No return adoptRef(new MediaSourceClientGStreamerMSE(playerPrivate)) because the ownership has already been transferred to MediaPlayerPrivateGStreamerMSE.
+ Ref<MediaSourceClientGStreamerMSE> client(adoptRef(*new MediaSourceClientGStreamerMSE(playerPrivate)));
+ playerPrivate.setMediaSourceClient(client.get());
+ return client;
+}
+
+MediaSourceClientGStreamerMSE::MediaSourceClientGStreamerMSE(MediaPlayerPrivateGStreamerMSE& playerPrivate)
+ : m_playerPrivate(&playerPrivate)
+ , m_duration(MediaTime::invalidTime())
+{
+ ASSERT(WTF::isMainThread());
+}
+
+MediaSourceClientGStreamerMSE::~MediaSourceClientGStreamerMSE()
+{
+ ASSERT(WTF::isMainThread());
+}
+
+MediaSourcePrivate::AddStatus MediaSourceClientGStreamerMSE::addSourceBuffer(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, const ContentType&)
+{
+ ASSERT(WTF::isMainThread());
+
+ if (!m_playerPrivate)
+ return MediaSourcePrivate::AddStatus::NotSupported;
+
+ ASSERT(m_playerPrivate->m_playbackPipeline);
+ ASSERT(sourceBufferPrivate);
+
+ RefPtr<AppendPipeline> appendPipeline = adoptRef(new AppendPipeline(*this, *sourceBufferPrivate, *m_playerPrivate));
+ GST_TRACE("Adding SourceBuffer to AppendPipeline: this=%p sourceBuffer=%p appendPipeline=%p", this, sourceBufferPrivate.get(), appendPipeline.get());
+ m_playerPrivate->m_appendPipelinesMap.add(sourceBufferPrivate, appendPipeline);
+
+ return m_playerPrivate->m_playbackPipeline->addSourceBuffer(sourceBufferPrivate);
+}
+
+const MediaTime& MediaSourceClientGStreamerMSE::duration()
+{
+ ASSERT(WTF::isMainThread());
+
+ return m_duration;
+}
+
+void MediaSourceClientGStreamerMSE::durationChanged(const MediaTime& duration)
+{
+ ASSERT(WTF::isMainThread());
+
+ GST_TRACE("duration: %f", duration.toFloat());
+ if (!duration.isValid() || duration.isPositiveInfinite() || duration.isNegativeInfinite())
+ return;
+
+ m_duration = duration;
+ if (m_playerPrivate)
+ m_playerPrivate->durationChanged();
+}
+
+void MediaSourceClientGStreamerMSE::abort(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate)
+{
+ ASSERT(WTF::isMainThread());
+
+ GST_DEBUG("aborting");
+
+ if (!m_playerPrivate)
+ return;
+
+ RefPtr<AppendPipeline> appendPipeline = m_playerPrivate->m_appendPipelinesMap.get(sourceBufferPrivate);
+
+ ASSERT(appendPipeline);
+
+ appendPipeline->abort();
+}
+
+void MediaSourceClientGStreamerMSE::resetParserState(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate)
+{
+ ASSERT(WTF::isMainThread());
+
+ GST_DEBUG("resetting parser state");
+
+ if (!m_playerPrivate)
+ return;
+
+ RefPtr<AppendPipeline> appendPipeline = m_playerPrivate->m_appendPipelinesMap.get(sourceBufferPrivate);
+
+ ASSERT(appendPipeline);
+
+ appendPipeline->abort();
+}
+
+bool MediaSourceClientGStreamerMSE::append(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, const unsigned char* data, unsigned length)
+{
+ ASSERT(WTF::isMainThread());
+
+ GST_DEBUG("Appending %u bytes", length);
+
+ if (!m_playerPrivate)
+ return false;
+
+ RefPtr<AppendPipeline> appendPipeline = m_playerPrivate->m_appendPipelinesMap.get(sourceBufferPrivate);
+
+ ASSERT(appendPipeline);
+
+ void* bufferData = fastMalloc(length);
+ GstBuffer* buffer = gst_buffer_new_wrapped_full(static_cast<GstMemoryFlags>(0), bufferData, length, 0, length, bufferData, fastFree);
+ gst_buffer_fill(buffer, 0, data, length);
+
+ return appendPipeline->pushNewBuffer(buffer) == GST_FLOW_OK;
+}
+
+void MediaSourceClientGStreamerMSE::markEndOfStream(MediaSourcePrivate::EndOfStreamStatus status)
+{
+ ASSERT(WTF::isMainThread());
+
+ if (!m_playerPrivate)
+ return;
+
+ m_playerPrivate->markEndOfStream(status);
+}
+
+void MediaSourceClientGStreamerMSE::removedFromMediaSource(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate)
+{
+ ASSERT(WTF::isMainThread());
+
+ if (!m_playerPrivate)
+ return;
+
+ ASSERT(m_playerPrivate->m_playbackPipeline);
+
+ RefPtr<AppendPipeline> appendPipeline = m_playerPrivate->m_appendPipelinesMap.get(sourceBufferPrivate);
+
+ ASSERT(appendPipeline);
+
+ appendPipeline->clearPlayerPrivate();
+ m_playerPrivate->m_appendPipelinesMap.remove(sourceBufferPrivate);
+ // AppendPipeline destructor will take care of cleaning up when appropriate.
+
+ m_playerPrivate->m_playbackPipeline->removeSourceBuffer(sourceBufferPrivate);
+}
+
+void MediaSourceClientGStreamerMSE::flush(AtomicString trackId)
+{
+ ASSERT(WTF::isMainThread());
+
+ if (m_playerPrivate)
+ m_playerPrivate->m_playbackPipeline->flush(trackId);
+}
+
+void MediaSourceClientGStreamerMSE::enqueueSample(PassRefPtr<MediaSample> prpSample)
+{
+ ASSERT(WTF::isMainThread());
+
+ if (m_playerPrivate)
+ m_playerPrivate->m_playbackPipeline->enqueueSample(prpSample);
+}
+
+GRefPtr<WebKitMediaSrc> MediaSourceClientGStreamerMSE::webKitMediaSrc()
+{
+ ASSERT(WTF::isMainThread());
+
+ if (!m_playerPrivate)
+ return nullptr;
+
+ WebKitMediaSrc* source = WEBKIT_MEDIA_SRC(m_playerPrivate->m_source.get());
+
+ ASSERT(WEBKIT_IS_MEDIA_SRC(source));
+
+ return source;
+}
+
+void MediaSourceClientGStreamerMSE::clearPlayerPrivate()
+{
+ ASSERT(WTF::isMainThread());
+
+ m_playerPrivate = nullptr;
+}
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.h b/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.h
new file mode 100644
index 000000000..c3d4ac7bc
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "GRefPtrGStreamer.h"
+#include "MediaSourcePrivate.h"
+#include "MediaSourcePrivateClient.h"
+#include "WebKitMediaSourceGStreamer.h"
+#include <wtf/MediaTime.h>
+
+namespace WebCore {
+
+class ContentType;
+class MediaPlayerPrivateGStreamerMSE;
+class MediaSample;
+class SourceBufferPrivateGStreamer;
+
+class MediaSourceClientGStreamerMSE : public RefCounted<MediaSourceClientGStreamerMSE> {
+public:
+ static Ref<MediaSourceClientGStreamerMSE> create(MediaPlayerPrivateGStreamerMSE&);
+ virtual ~MediaSourceClientGStreamerMSE();
+
+ // From MediaSourceGStreamer.
+ MediaSourcePrivate::AddStatus addSourceBuffer(RefPtr<SourceBufferPrivateGStreamer>, const ContentType&);
+ void durationChanged(const MediaTime&);
+ void markEndOfStream(MediaSourcePrivate::EndOfStreamStatus);
+
+ // From SourceBufferPrivateGStreamer.
+ void abort(RefPtr<SourceBufferPrivateGStreamer>);
+ void resetParserState(RefPtr<SourceBufferPrivateGStreamer>);
+ bool append(RefPtr<SourceBufferPrivateGStreamer>, const unsigned char*, unsigned);
+ void removedFromMediaSource(RefPtr<SourceBufferPrivateGStreamer>);
+ void flush(AtomicString);
+ void enqueueSample(PassRefPtr<MediaSample>);
+
+ void clearPlayerPrivate();
+
+ const MediaTime& duration();
+ GRefPtr<WebKitMediaSrc> webKitMediaSrc();
+
+private:
+ MediaSourceClientGStreamerMSE(MediaPlayerPrivateGStreamerMSE&);
+
+ MediaPlayerPrivateGStreamerMSE* m_playerPrivate;
+ MediaTime m_duration;
+};
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceGStreamer.cpp
new file mode 100644
index 000000000..92095b610
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceGStreamer.cpp
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2013 Google Inc. All rights reserved.
+ * Copyright (C) 2013 Orange
+ * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
+ * Copyright (C) 2015, 2016 Igalia, S.L
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ * * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "MediaSourceGStreamer.h"
+
+#if ENABLE(MEDIA_SOURCE) && USE(GSTREAMER)
+
+#include "ContentType.h"
+#include "MediaPlayerPrivateGStreamer.h"
+#include "MediaPlayerPrivateGStreamerMSE.h"
+#include "MediaSourceClientGStreamerMSE.h"
+#include "NotImplemented.h"
+#include "SourceBufferPrivateGStreamer.h"
+#include "TimeRanges.h"
+#include "WebKitMediaSourceGStreamer.h"
+#include <wtf/PassRefPtr.h>
+#include <wtf/glib/GRefPtr.h>
+
+namespace WebCore {
+
+void MediaSourceGStreamer::open(MediaSourcePrivateClient& mediaSource, MediaPlayerPrivateGStreamerMSE& playerPrivate)
+{
+ mediaSource.setPrivateAndOpen(adoptRef(*new MediaSourceGStreamer(mediaSource, playerPrivate)));
+}
+
+MediaSourceGStreamer::MediaSourceGStreamer(MediaSourcePrivateClient& mediaSource, MediaPlayerPrivateGStreamerMSE& playerPrivate)
+ : MediaSourcePrivate()
+ , m_client(MediaSourceClientGStreamerMSE::create(playerPrivate))
+ , m_mediaSource(mediaSource)
+ , m_playerPrivate(playerPrivate)
+{
+}
+
+MediaSourceGStreamer::~MediaSourceGStreamer()
+{
+ for (auto& sourceBufferPrivate : m_sourceBuffers)
+ sourceBufferPrivate->clearMediaSource();
+}
+
+MediaSourceGStreamer::AddStatus MediaSourceGStreamer::addSourceBuffer(const ContentType& contentType, RefPtr<SourceBufferPrivate>& sourceBufferPrivate)
+{
+ sourceBufferPrivate = SourceBufferPrivateGStreamer::create(this, m_client.get(), contentType);
+ RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivateGStreamer = static_cast<SourceBufferPrivateGStreamer*>(sourceBufferPrivate.get());
+ m_sourceBuffers.add(sourceBufferPrivateGStreamer);
+ return m_client->addSourceBuffer(sourceBufferPrivateGStreamer, contentType);
+}
+
+void MediaSourceGStreamer::removeSourceBuffer(SourceBufferPrivate* sourceBufferPrivate)
+{
+ RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivateGStreamer = static_cast<SourceBufferPrivateGStreamer*>(sourceBufferPrivate);
+ ASSERT(m_sourceBuffers.contains(sourceBufferPrivateGStreamer));
+
+ sourceBufferPrivateGStreamer->clearMediaSource();
+ m_sourceBuffers.remove(sourceBufferPrivateGStreamer);
+ m_activeSourceBuffers.remove(sourceBufferPrivateGStreamer.get());
+}
+
+void MediaSourceGStreamer::durationChanged()
+{
+ m_client->durationChanged(m_mediaSource->duration());
+}
+
+void MediaSourceGStreamer::markEndOfStream(EndOfStreamStatus status)
+{
+ m_client->markEndOfStream(status);
+}
+
+void MediaSourceGStreamer::unmarkEndOfStream()
+{
+ notImplemented();
+}
+
+MediaPlayer::ReadyState MediaSourceGStreamer::readyState() const
+{
+ return m_playerPrivate.readyState();
+}
+
+void MediaSourceGStreamer::setReadyState(MediaPlayer::ReadyState state)
+{
+ m_playerPrivate.setReadyState(state);
+}
+
+void MediaSourceGStreamer::waitForSeekCompleted()
+{
+ m_playerPrivate.waitForSeekCompleted();
+}
+
+void MediaSourceGStreamer::seekCompleted()
+{
+ m_playerPrivate.seekCompleted();
+}
+
+void MediaSourceGStreamer::sourceBufferPrivateDidChangeActiveState(SourceBufferPrivateGStreamer* sourceBufferPrivate, bool isActive)
+{
+ if (!isActive)
+ m_activeSourceBuffers.remove(sourceBufferPrivate);
+ else if (!m_activeSourceBuffers.contains(sourceBufferPrivate))
+ m_activeSourceBuffers.add(sourceBufferPrivate);
+}
+
+std::unique_ptr<PlatformTimeRanges> MediaSourceGStreamer::buffered()
+{
+ return m_mediaSource->buffered();
+}
+
+}
+#endif
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaSourceGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceGStreamer.h
index ad27d3602..c9a09fa04 100644
--- a/Source/WebCore/platform/graphics/gstreamer/MediaSourceGStreamer.h
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceGStreamer.h
@@ -1,6 +1,9 @@
/*
* Copyright (C) 2013 Google Inc. All rights reserved.
* Copyright (C) 2013 Orange
+ * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
+ * Copyright (C) 2015, 2016 Igalia, S.L
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
@@ -29,35 +32,57 @@
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#ifndef MediaSourceGStreamer_h
-#define MediaSourceGStreamer_h
+#pragma once
#if ENABLE(MEDIA_SOURCE) && USE(GSTREAMER)
-#include "MediaSource.h"
-#include "WebKitMediaSourceGStreamer.h"
+#include "MediaSourcePrivate.h"
+
+#include <wtf/Forward.h>
+#include <wtf/HashSet.h>
+
+typedef struct _WebKitMediaSrc WebKitMediaSrc;
namespace WebCore {
+class SourceBufferPrivateGStreamer;
+class MediaSourceClientGStreamerMSE;
+class MediaPlayerPrivateGStreamerMSE;
+class PlatformTimeRanges;
+
+// FIXME: Should this be called MediaSourcePrivateGStreamer?
class MediaSourceGStreamer final : public MediaSourcePrivate {
public:
- static void open(PassRefPtr<HTMLMediaSource>, WebKitMediaSrc*);
- ~MediaSourceGStreamer();
- AddStatus addSourceBuffer(const ContentType&, RefPtr<SourceBufferPrivate>&);
- double duration() { return m_duration; }
- void setDuration(double);
- void markEndOfStream(EndOfStreamStatus);
- void unmarkEndOfStream();
- MediaPlayer::ReadyState readyState() const { return m_readyState; }
- void setReadyState(MediaPlayer::ReadyState readyState) { m_readyState = readyState; }
+ static void open(MediaSourcePrivateClient&, MediaPlayerPrivateGStreamerMSE&);
+ virtual ~MediaSourceGStreamer();
+
+ MediaSourceClientGStreamerMSE& client() { return m_client.get(); }
+ AddStatus addSourceBuffer(const ContentType&, RefPtr<SourceBufferPrivate>&) override;
+ void removeSourceBuffer(SourceBufferPrivate*);
+
+ void durationChanged() override;
+ void markEndOfStream(EndOfStreamStatus) override;
+ void unmarkEndOfStream() override;
+
+ MediaPlayer::ReadyState readyState() const override;
+ void setReadyState(MediaPlayer::ReadyState) override;
+
+ void waitForSeekCompleted() override;
+ void seekCompleted() override;
+
+ void sourceBufferPrivateDidChangeActiveState(SourceBufferPrivateGStreamer*, bool);
+
+ std::unique_ptr<PlatformTimeRanges> buffered();
private:
- RefPtr<MediaSourceClientGstreamer> m_client;
- MediaSourceGStreamer(WebKitMediaSrc*);
- double m_duration;
- MediaPlayer::ReadyState m_readyState;
+ MediaSourceGStreamer(MediaSourcePrivateClient&, MediaPlayerPrivateGStreamerMSE&);
+
+ HashSet<RefPtr<SourceBufferPrivateGStreamer>> m_sourceBuffers;
+ HashSet<SourceBufferPrivateGStreamer*> m_activeSourceBuffers;
+ Ref<MediaSourceClientGStreamerMSE> m_client;
+ Ref<MediaSourcePrivateClient> m_mediaSource;
+ MediaPlayerPrivateGStreamerMSE& m_playerPrivate;
};
}
#endif
-#endif
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.cpp
new file mode 100644
index 000000000..95df6d947
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.cpp
@@ -0,0 +1,449 @@
+/*
+ * Copyright (C) 2014, 2015 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "config.h"
+#include "PlaybackPipeline.h"
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "AudioTrackPrivateGStreamer.h"
+#include "GStreamerMediaSample.h"
+#include "GStreamerUtilities.h"
+#include "MediaSample.h"
+#include "SourceBufferPrivateGStreamer.h"
+#include "VideoTrackPrivateGStreamer.h"
+
+#include <gst/app/gstappsrc.h>
+#include <gst/gst.h>
+#include <wtf/MainThread.h>
+#include <wtf/RefCounted.h>
+#include <wtf/glib/GMutexLocker.h>
+#include <wtf/glib/GRefPtr.h>
+#include <wtf/glib/GUniquePtr.h>
+#include <wtf/text/AtomicString.h>
+
+GST_DEBUG_CATEGORY_EXTERN(webkit_mse_debug);
+#define GST_CAT_DEFAULT webkit_mse_debug
+
+static Stream* getStreamByTrackId(WebKitMediaSrc*, AtomicString);
+static Stream* getStreamBySourceBufferPrivate(WebKitMediaSrc*, WebCore::SourceBufferPrivateGStreamer*);
+
+static Stream* getStreamByTrackId(WebKitMediaSrc* source, AtomicString trackIdString)
+{
+ // WebKitMediaSrc should be locked at this point.
+ for (Stream* stream : source->priv->streams) {
+ if (stream->type != WebCore::Invalid
+ && ((stream->audioTrack && stream->audioTrack->id() == trackIdString)
+ || (stream->videoTrack && stream->videoTrack->id() == trackIdString) ) ) {
+ return stream;
+ }
+ }
+ return nullptr;
+}
+
+static Stream* getStreamBySourceBufferPrivate(WebKitMediaSrc* source, WebCore::SourceBufferPrivateGStreamer* sourceBufferPrivate)
+{
+ for (Stream* stream : source->priv->streams) {
+ if (stream->sourceBuffer == sourceBufferPrivate)
+ return stream;
+ }
+ return nullptr;
+}
+
+// FIXME: Use gst_app_src_push_sample() instead when we switch to the appropriate GStreamer version.
+static GstFlowReturn pushSample(GstAppSrc* appsrc, GstSample* sample)
+{
+ g_return_val_if_fail(GST_IS_SAMPLE(sample), GST_FLOW_ERROR);
+
+ GstCaps* caps = gst_sample_get_caps(sample);
+ if (caps)
+ gst_app_src_set_caps(appsrc, caps);
+ else
+ GST_WARNING_OBJECT(appsrc, "received sample without caps");
+
+ GstBuffer* buffer = gst_sample_get_buffer(sample);
+ if (UNLIKELY(!buffer)) {
+ GST_WARNING_OBJECT(appsrc, "received sample without buffer");
+ return GST_FLOW_OK;
+ }
+
+ // gst_app_src_push_buffer() steals the reference, we need an additional one.
+ return gst_app_src_push_buffer(appsrc, gst_buffer_ref(buffer));
+}
+
+namespace WebCore {
+
+void PlaybackPipeline::setWebKitMediaSrc(WebKitMediaSrc* webKitMediaSrc)
+{
+ GST_DEBUG("webKitMediaSrc=%p", webKitMediaSrc);
+ m_webKitMediaSrc = webKitMediaSrc;
+}
+
+WebKitMediaSrc* PlaybackPipeline::webKitMediaSrc()
+{
+ return m_webKitMediaSrc.get();
+}
+
+MediaSourcePrivate::AddStatus PlaybackPipeline::addSourceBuffer(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate)
+{
+ WebKitMediaSrcPrivate* priv = m_webKitMediaSrc->priv;
+
+ if (priv->allTracksConfigured) {
+ GST_ERROR_OBJECT(m_webKitMediaSrc.get(), "Adding new source buffers after first data not supported yet");
+ return MediaSourcePrivate::NotSupported;
+ }
+
+ GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "State %d", int(GST_STATE(m_webKitMediaSrc.get())));
+
+ Stream* stream = new Stream{ };
+ stream->parent = m_webKitMediaSrc.get();
+ stream->appsrc = gst_element_factory_make("appsrc", nullptr);
+ stream->appsrcNeedDataFlag = false;
+ stream->sourceBuffer = sourceBufferPrivate.get();
+
+ // No track has been attached yet.
+ stream->type = Invalid;
+ stream->parser = nullptr;
+ stream->caps = nullptr;
+ stream->audioTrack = nullptr;
+ stream->videoTrack = nullptr;
+ stream->presentationSize = WebCore::FloatSize();
+ stream->lastEnqueuedTime = MediaTime::invalidTime();
+
+ gst_app_src_set_callbacks(GST_APP_SRC(stream->appsrc), &enabledAppsrcCallbacks, stream->parent, nullptr);
+ gst_app_src_set_emit_signals(GST_APP_SRC(stream->appsrc), FALSE);
+ gst_app_src_set_stream_type(GST_APP_SRC(stream->appsrc), GST_APP_STREAM_TYPE_SEEKABLE);
+
+ gst_app_src_set_max_bytes(GST_APP_SRC(stream->appsrc), 2 * WTF::MB);
+ g_object_set(G_OBJECT(stream->appsrc), "block", FALSE, "min-percent", 20, nullptr);
+
+ GST_OBJECT_LOCK(m_webKitMediaSrc.get());
+ priv->streams.prepend(stream);
+ GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
+
+ gst_bin_add(GST_BIN(m_webKitMediaSrc.get()), stream->appsrc);
+ gst_element_sync_state_with_parent(stream->appsrc);
+
+ return MediaSourcePrivate::Ok;
+}
+
+void PlaybackPipeline::removeSourceBuffer(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate)
+{
+ ASSERT(WTF::isMainThread());
+
+ GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "Element removed from MediaSource");
+ GST_OBJECT_LOCK(m_webKitMediaSrc.get());
+ WebKitMediaSrcPrivate* priv = m_webKitMediaSrc->priv;
+ Stream* stream = nullptr;
+ Deque<Stream*>::iterator streamPosition = priv->streams.begin();
+
+ for (; streamPosition != priv->streams.end(); ++streamPosition) {
+ if ((*streamPosition)->sourceBuffer == sourceBufferPrivate.get()) {
+ stream = *streamPosition;
+ break;
+ }
+ }
+ if (stream)
+ priv->streams.remove(streamPosition);
+ GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
+
+ if (stream)
+ webKitMediaSrcFreeStream(m_webKitMediaSrc.get(), stream);
+}
+
+void PlaybackPipeline::attachTrack(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, RefPtr<TrackPrivateBase> trackPrivate, GstStructure* structure, GstCaps* caps)
+{
+ WebKitMediaSrc* webKitMediaSrc = m_webKitMediaSrc.get();
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ Stream* stream = getStreamBySourceBufferPrivate(webKitMediaSrc, sourceBufferPrivate.get());
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ ASSERT(stream);
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ unsigned padId = stream->parent->priv->numberOfPads;
+ stream->parent->priv->numberOfPads++;
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ const gchar* mediaType = gst_structure_get_name(structure);
+
+ GST_DEBUG_OBJECT(webKitMediaSrc, "Configured track %s: appsrc=%s, padId=%u, mediaType=%s", trackPrivate->id().string().utf8().data(), GST_ELEMENT_NAME(stream->appsrc), padId, mediaType);
+
+ GUniquePtr<gchar> parserBinName(g_strdup_printf("streamparser%u", padId));
+
+ if (!g_strcmp0(mediaType, "video/x-h264")) {
+ GRefPtr<GstCaps> filterCaps = adoptGRef(gst_caps_new_simple("video/x-h264", "alignment", G_TYPE_STRING, "au", nullptr));
+ GstElement* capsfilter = gst_element_factory_make("capsfilter", nullptr);
+ g_object_set(capsfilter, "caps", filterCaps.get(), nullptr);
+
+ stream->parser = gst_bin_new(parserBinName.get());
+
+ GstElement* parser = gst_element_factory_make("h264parse", nullptr);
+ gst_bin_add_many(GST_BIN(stream->parser), parser, capsfilter, nullptr);
+ gst_element_link_pads(parser, "src", capsfilter, "sink");
+
+ GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(parser, "sink"));
+ gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad.get()));
+
+ pad = adoptGRef(gst_element_get_static_pad(capsfilter, "src"));
+ gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad.get()));
+ } else if (!g_strcmp0(mediaType, "video/x-h265")) {
+ GRefPtr<GstCaps> filterCaps = adoptGRef(gst_caps_new_simple("video/x-h265", "alignment", G_TYPE_STRING, "au", nullptr));
+ GstElement* capsfilter = gst_element_factory_make("capsfilter", nullptr);
+ g_object_set(capsfilter, "caps", filterCaps.get(), nullptr);
+
+ stream->parser = gst_bin_new(parserBinName.get());
+
+ GstElement* parser = gst_element_factory_make("h265parse", nullptr);
+ gst_bin_add_many(GST_BIN(stream->parser), parser, capsfilter, nullptr);
+ gst_element_link_pads(parser, "src", capsfilter, "sink");
+
+ GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(parser, "sink"));
+ gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad.get()));
+
+ pad = adoptGRef(gst_element_get_static_pad(capsfilter, "src"));
+ gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad.get()));
+ } else if (!g_strcmp0(mediaType, "audio/mpeg")) {
+ gint mpegversion = -1;
+ gst_structure_get_int(structure, "mpegversion", &mpegversion);
+
+ GstElement* parser = nullptr;
+ if (mpegversion == 1)
+ parser = gst_element_factory_make("mpegaudioparse", nullptr);
+ else if (mpegversion == 2 || mpegversion == 4)
+ parser = gst_element_factory_make("aacparse", nullptr);
+ else
+ ASSERT_NOT_REACHED();
+
+ stream->parser = gst_bin_new(parserBinName.get());
+ gst_bin_add(GST_BIN(stream->parser), parser);
+
+ GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(parser, "sink"));
+ gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad.get()));
+
+ pad = adoptGRef(gst_element_get_static_pad(parser, "src"));
+ gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad.get()));
+ } else if (!g_strcmp0(mediaType, "video/x-vp9"))
+ stream->parser = nullptr;
+ else {
+ GST_ERROR_OBJECT(stream->parent, "Unsupported media format: %s", mediaType);
+ return;
+ }
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ stream->type = Unknown;
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ GRefPtr<GstPad> sourcePad;
+ if (stream->parser) {
+ gst_bin_add(GST_BIN(stream->parent), stream->parser);
+ gst_element_sync_state_with_parent(stream->parser);
+
+ GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(stream->parser, "sink"));
+ sourcePad = adoptGRef(gst_element_get_static_pad(stream->appsrc, "src"));
+ gst_pad_link(sourcePad.get(), sinkPad.get());
+ sourcePad = adoptGRef(gst_element_get_static_pad(stream->parser, "src"));
+ } else {
+ GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "Stream of type %s doesn't require a parser bin", mediaType);
+ sourcePad = adoptGRef(gst_element_get_static_pad(stream->appsrc, "src"));
+ }
+ ASSERT(sourcePad);
+
+ // FIXME: Is padId the best way to identify the Stream? What about trackId?
+ g_object_set_data(G_OBJECT(sourcePad.get()), "padId", GINT_TO_POINTER(padId));
+ webKitMediaSrcLinkParser(sourcePad.get(), caps, stream);
+
+ ASSERT(stream->parent->priv->mediaPlayerPrivate);
+ int signal = -1;
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ if (g_str_has_prefix(mediaType, "audio")) {
+ stream->type = Audio;
+ stream->parent->priv->numberOfAudioStreams++;
+ signal = SIGNAL_AUDIO_CHANGED;
+ stream->audioTrack = RefPtr<WebCore::AudioTrackPrivateGStreamer>(static_cast<WebCore::AudioTrackPrivateGStreamer*>(trackPrivate.get()));
+ } else if (g_str_has_prefix(mediaType, "video")) {
+ stream->type = Video;
+ stream->parent->priv->numberOfVideoStreams++;
+ signal = SIGNAL_VIDEO_CHANGED;
+ stream->videoTrack = RefPtr<WebCore::VideoTrackPrivateGStreamer>(static_cast<WebCore::VideoTrackPrivateGStreamer*>(trackPrivate.get()));
+ } else if (g_str_has_prefix(mediaType, "text")) {
+ stream->type = Text;
+ stream->parent->priv->numberOfTextStreams++;
+ signal = SIGNAL_TEXT_CHANGED;
+
+ // FIXME: Support text tracks.
+ }
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ if (signal != -1)
+ g_signal_emit(G_OBJECT(stream->parent), webKitMediaSrcSignals[signal], 0, nullptr);
+}
+
+void PlaybackPipeline::reattachTrack(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, RefPtr<TrackPrivateBase> trackPrivate)
+{
+ GST_DEBUG("Re-attaching track");
+
+ // FIXME: Maybe remove this method. Now the caps change is managed by gst_appsrc_push_sample() in enqueueSample()
+ // and flushAndEnqueueNonDisplayingSamples().
+
+ WebKitMediaSrc* webKitMediaSrc = m_webKitMediaSrc.get();
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ Stream* stream = getStreamBySourceBufferPrivate(webKitMediaSrc, sourceBufferPrivate.get());
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ ASSERT(stream && stream->type != Invalid);
+
+ // The caps change is managed by gst_appsrc_push_sample() in enqueueSample() and
+ // flushAndEnqueueNonDisplayingSamples(), so the caps aren't set from here.
+ GRefPtr<GstCaps> appsrcCaps = adoptGRef(gst_app_src_get_caps(GST_APP_SRC(stream->appsrc)));
+ const gchar* mediaType = gst_structure_get_name(gst_caps_get_structure(appsrcCaps.get(), 0));
+ int signal = -1;
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ if (g_str_has_prefix(mediaType, "audio")) {
+ ASSERT(stream->type == Audio);
+ signal = SIGNAL_AUDIO_CHANGED;
+ stream->audioTrack = RefPtr<WebCore::AudioTrackPrivateGStreamer>(static_cast<WebCore::AudioTrackPrivateGStreamer*>(trackPrivate.get()));
+ } else if (g_str_has_prefix(mediaType, "video")) {
+ ASSERT(stream->type == Video);
+ signal = SIGNAL_VIDEO_CHANGED;
+ stream->videoTrack = RefPtr<WebCore::VideoTrackPrivateGStreamer>(static_cast<WebCore::VideoTrackPrivateGStreamer*>(trackPrivate.get()));
+ } else if (g_str_has_prefix(mediaType, "text")) {
+ ASSERT(stream->type == Text);
+ signal = SIGNAL_TEXT_CHANGED;
+
+ // FIXME: Support text tracks.
+ }
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ if (signal != -1)
+ g_signal_emit(G_OBJECT(stream->parent), webKitMediaSrcSignals[signal], 0, nullptr);
+}
+
+void PlaybackPipeline::notifyDurationChanged()
+{
+ gst_element_post_message(GST_ELEMENT(m_webKitMediaSrc.get()), gst_message_new_duration_changed(GST_OBJECT(m_webKitMediaSrc.get())));
+ // WebKitMediaSrc will ask MediaPlayerPrivateGStreamerMSE for the new duration later, when somebody asks for it.
+}
+
+void PlaybackPipeline::markEndOfStream(MediaSourcePrivate::EndOfStreamStatus)
+{
+ WebKitMediaSrcPrivate* priv = m_webKitMediaSrc->priv;
+
+ GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "Have EOS");
+
+ GST_OBJECT_LOCK(m_webKitMediaSrc.get());
+ bool allTracksConfigured = priv->allTracksConfigured;
+ if (!allTracksConfigured)
+ priv->allTracksConfigured = true;
+ GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
+
+ if (!allTracksConfigured) {
+ gst_element_no_more_pads(GST_ELEMENT(m_webKitMediaSrc.get()));
+ webKitMediaSrcDoAsyncDone(m_webKitMediaSrc.get());
+ }
+
+ Vector<GstAppSrc*> appsrcs;
+
+ GST_OBJECT_LOCK(m_webKitMediaSrc.get());
+ for (Stream* stream : priv->streams) {
+ if (stream->appsrc)
+ appsrcs.append(GST_APP_SRC(stream->appsrc));
+ }
+ GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
+
+ for (GstAppSrc* appsrc : appsrcs)
+ gst_app_src_end_of_stream(appsrc);
+}
+
+void PlaybackPipeline::flush(AtomicString trackId)
+{
+ ASSERT(WTF::isMainThread());
+
+ GST_DEBUG("flush: trackId=%s", trackId.string().utf8().data());
+
+ GST_OBJECT_LOCK(m_webKitMediaSrc.get());
+ Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId);
+
+ if (!stream) {
+ GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
+ return;
+ }
+
+ stream->lastEnqueuedTime = MediaTime::invalidTime();
+ GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
+}
+
+void PlaybackPipeline::enqueueSample(RefPtr<MediaSample> mediaSample)
+{
+ ASSERT(WTF::isMainThread());
+
+ AtomicString trackId = mediaSample->trackID();
+
+ GST_TRACE("enqueing sample trackId=%s PTS=%f presentationSize=%.0fx%.0f at %" GST_TIME_FORMAT " duration: %" GST_TIME_FORMAT,
+ trackId.string().utf8().data(), mediaSample->presentationTime().toFloat(),
+ mediaSample->presentationSize().width(), mediaSample->presentationSize().height(),
+ GST_TIME_ARGS(WebCore::toGstClockTime(mediaSample->presentationTime().toDouble())),
+ GST_TIME_ARGS(WebCore::toGstClockTime(mediaSample->duration().toDouble())));
+
+ Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId);
+
+ if (!stream) {
+ GST_WARNING("No stream!");
+ return;
+ }
+
+ if (!stream->sourceBuffer->isReadyForMoreSamples(trackId)) {
+ GST_DEBUG("enqueueSample: skip adding new sample for trackId=%s, SB is not ready yet", trackId.string().utf8().data());
+ return;
+ }
+
+ GstElement* appsrc = stream->appsrc;
+ MediaTime lastEnqueuedTime = stream->lastEnqueuedTime;
+
+ GStreamerMediaSample* sample = static_cast<GStreamerMediaSample*>(mediaSample.get());
+ if (sample->sample() && gst_sample_get_buffer(sample->sample())) {
+ GRefPtr<GstSample> gstSample = sample->sample();
+ GstBuffer* buffer = gst_sample_get_buffer(gstSample.get());
+ lastEnqueuedTime = sample->presentationTime();
+
+ GST_BUFFER_FLAG_UNSET(buffer, GST_BUFFER_FLAG_DECODE_ONLY);
+ pushSample(GST_APP_SRC(appsrc), gstSample.get());
+ // gst_app_src_push_sample() uses transfer-none for gstSample.
+
+ stream->lastEnqueuedTime = lastEnqueuedTime;
+ }
+}
+
+GstElement* PlaybackPipeline::pipeline()
+{
+ if (!m_webKitMediaSrc || !GST_ELEMENT_PARENT(GST_ELEMENT(m_webKitMediaSrc.get())))
+ return nullptr;
+
+ return GST_ELEMENT_PARENT(GST_ELEMENT_PARENT(GST_ELEMENT(m_webKitMediaSrc.get())));
+}
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.h b/Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.h
new file mode 100644
index 000000000..08f0e60d3
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+// PlaybackPipeline is (sort of) a friend class of WebKitMediaSourceGStreamer.
+
+#include "WebKitMediaSourceGStreamer.h"
+#include "WebKitMediaSourceGStreamerPrivate.h"
+
+#include <gst/gst.h>
+#include <wtf/Condition.h>
+#include <wtf/glib/GRefPtr.h>
+
+namespace WTF {
+template<> GRefPtr<WebKitMediaSrc> adoptGRef(WebKitMediaSrc*);
+template<> WebKitMediaSrc* refGPtr<WebKitMediaSrc>(WebKitMediaSrc*);
+template<> void derefGPtr<WebKitMediaSrc>(WebKitMediaSrc*);
+};
+
+namespace WebCore {
+
+class ContentType;
+class SourceBufferPrivateGStreamer;
+class MediaSourceGStreamer;
+
+class PlaybackPipeline: public RefCounted<PlaybackPipeline> {
+public:
+ static Ref<PlaybackPipeline> create()
+ {
+ return adoptRef(*new PlaybackPipeline());
+ }
+
+ virtual ~PlaybackPipeline() = default;
+
+ void setWebKitMediaSrc(WebKitMediaSrc*);
+ WebKitMediaSrc* webKitMediaSrc();
+
+ MediaSourcePrivate::AddStatus addSourceBuffer(RefPtr<SourceBufferPrivateGStreamer>);
+ void removeSourceBuffer(RefPtr<SourceBufferPrivateGStreamer>);
+ void attachTrack(RefPtr<SourceBufferPrivateGStreamer>, RefPtr<TrackPrivateBase>, GstStructure*, GstCaps*);
+ void reattachTrack(RefPtr<SourceBufferPrivateGStreamer>, RefPtr<TrackPrivateBase>);
+ void notifyDurationChanged();
+
+ // From MediaSourceGStreamer.
+ void markEndOfStream(MediaSourcePrivate::EndOfStreamStatus);
+
+ // From SourceBufferPrivateGStreamer.
+ void flush(AtomicString);
+ void enqueueSample(RefPtr<MediaSample>);
+
+ GstElement* pipeline();
+private:
+ PlaybackPipeline() = default;
+ GRefPtr<WebKitMediaSrc> m_webKitMediaSrc;
+};
+
+} // namespace WebCore.
+
+#endif // USE(GSTREAMER)
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/SourceBufferPrivateGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/SourceBufferPrivateGStreamer.cpp
new file mode 100644
index 000000000..e4b107f70
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/SourceBufferPrivateGStreamer.cpp
@@ -0,0 +1,177 @@
+/*
+ * Copyright (C) 2013 Google Inc. All rights reserved.
+ * Copyright (C) 2013 Orange
+ * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
+ * Copyright (C) 2015, 2016 Igalia, S.L
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ * * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "SourceBufferPrivateGStreamer.h"
+
+#if ENABLE(MEDIA_SOURCE) && USE(GSTREAMER)
+
+#include "ContentType.h"
+#include "GStreamerUtilities.h"
+#include "MediaPlayerPrivateGStreamerMSE.h"
+#include "MediaSample.h"
+#include "MediaSourceClientGStreamerMSE.h"
+#include "MediaSourceGStreamer.h"
+#include "NotImplemented.h"
+#include "WebKitMediaSourceGStreamer.h"
+
+namespace WebCore {
+
+Ref<SourceBufferPrivateGStreamer> SourceBufferPrivateGStreamer::create(MediaSourceGStreamer* mediaSource, Ref<MediaSourceClientGStreamerMSE> client, const ContentType& contentType)
+{
+ return adoptRef(*new SourceBufferPrivateGStreamer(mediaSource, client.get(), contentType));
+}
+
+SourceBufferPrivateGStreamer::SourceBufferPrivateGStreamer(MediaSourceGStreamer* mediaSource, Ref<MediaSourceClientGStreamerMSE> client, const ContentType& contentType)
+ : SourceBufferPrivate()
+ , m_mediaSource(mediaSource)
+ , m_type(contentType)
+ , m_client(client.get())
+{
+}
+
+void SourceBufferPrivateGStreamer::setClient(SourceBufferPrivateClient* client)
+{
+ m_sourceBufferPrivateClient = client;
+}
+
+void SourceBufferPrivateGStreamer::append(const unsigned char* data, unsigned length)
+{
+ ASSERT(m_mediaSource);
+
+ if (!m_sourceBufferPrivateClient)
+ return;
+
+ if (m_client->append(this, data, length))
+ return;
+
+ m_sourceBufferPrivateClient->sourceBufferPrivateAppendComplete(SourceBufferPrivateClient::ReadStreamFailed);
+}
+
+void SourceBufferPrivateGStreamer::abort()
+{
+ m_client->abort(this);
+}
+
+void SourceBufferPrivateGStreamer::resetParserState()
+{
+ m_client->resetParserState(this);
+}
+
+void SourceBufferPrivateGStreamer::removedFromMediaSource()
+{
+ if (m_mediaSource)
+ m_mediaSource->removeSourceBuffer(this);
+ m_client->removedFromMediaSource(this);
+}
+
+MediaPlayer::ReadyState SourceBufferPrivateGStreamer::readyState() const
+{
+ return m_mediaSource->readyState();
+}
+
+void SourceBufferPrivateGStreamer::setReadyState(MediaPlayer::ReadyState state)
+{
+ m_mediaSource->setReadyState(state);
+}
+
+void SourceBufferPrivateGStreamer::flush(const AtomicString& trackId)
+{
+ m_client->flush(trackId);
+}
+
+void SourceBufferPrivateGStreamer::enqueueSample(Ref<MediaSample>&& sample, const AtomicString&)
+{
+ m_notifyWhenReadyForMoreSamples = false;
+
+ m_client->enqueueSample(WTFMove(sample));
+}
+
+bool SourceBufferPrivateGStreamer::isReadyForMoreSamples(const AtomicString&)
+{
+ return m_isReadyForMoreSamples;
+}
+
+void SourceBufferPrivateGStreamer::setReadyForMoreSamples(bool isReady)
+{
+ ASSERT(WTF::isMainThread());
+ m_isReadyForMoreSamples = isReady;
+}
+
+void SourceBufferPrivateGStreamer::notifyReadyForMoreSamples()
+{
+ ASSERT(WTF::isMainThread());
+ setReadyForMoreSamples(true);
+ if (m_notifyWhenReadyForMoreSamples)
+ m_sourceBufferPrivateClient->sourceBufferPrivateDidBecomeReadyForMoreSamples(m_trackId);
+}
+
+void SourceBufferPrivateGStreamer::setActive(bool isActive)
+{
+ if (m_mediaSource)
+ m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
+}
+
+void SourceBufferPrivateGStreamer::stopAskingForMoreSamples(const AtomicString&)
+{
+ notImplemented();
+}
+
+void SourceBufferPrivateGStreamer::notifyClientWhenReadyForMoreSamples(const AtomicString& trackId)
+{
+ ASSERT(WTF::isMainThread());
+ m_notifyWhenReadyForMoreSamples = true;
+ m_trackId = trackId;
+}
+
+void SourceBufferPrivateGStreamer::didReceiveInitializationSegment(const SourceBufferPrivateClient::InitializationSegment& initializationSegment)
+{
+ if (m_sourceBufferPrivateClient)
+ m_sourceBufferPrivateClient->sourceBufferPrivateDidReceiveInitializationSegment(initializationSegment);
+}
+
+void SourceBufferPrivateGStreamer::didReceiveSample(MediaSample& sample)
+{
+ if (m_sourceBufferPrivateClient)
+ m_sourceBufferPrivateClient->sourceBufferPrivateDidReceiveSample(sample);
+}
+
+void SourceBufferPrivateGStreamer::didReceiveAllPendingSamples()
+{
+ if (m_sourceBufferPrivateClient)
+ m_sourceBufferPrivateClient->sourceBufferPrivateAppendComplete(SourceBufferPrivateClient::AppendSucceeded);
+}
+
+}
+#endif
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/SourceBufferPrivateGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/mse/SourceBufferPrivateGStreamer.h
new file mode 100644
index 000000000..5671310ff
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/SourceBufferPrivateGStreamer.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2013 Google Inc. All rights reserved.
+ * Copyright (C) 2013 Orange
+ * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
+ * Copyright (C) 2015, 2016 Igalia, S.L
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ * * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#if ENABLE(MEDIA_SOURCE) && USE(GSTREAMER)
+
+#include "ContentType.h"
+#include "MediaPlayerPrivateGStreamerMSE.h"
+#include "SourceBufferPrivate.h"
+#include "SourceBufferPrivateClient.h"
+#include "WebKitMediaSourceGStreamer.h"
+
+namespace WebCore {
+
+class MediaSourceGStreamer;
+
+class SourceBufferPrivateGStreamer final : public SourceBufferPrivate {
+
+public:
+ static Ref<SourceBufferPrivateGStreamer> create(MediaSourceGStreamer*, Ref<MediaSourceClientGStreamerMSE>, const ContentType&);
+ virtual ~SourceBufferPrivateGStreamer() = default;
+
+ void clearMediaSource() { m_mediaSource = nullptr; }
+
+ void setClient(SourceBufferPrivateClient*) final;
+ void append(const unsigned char*, unsigned) final;
+ void abort() final;
+ void resetParserState() final;
+ void removedFromMediaSource() final;
+ MediaPlayer::ReadyState readyState() const final;
+ void setReadyState(MediaPlayer::ReadyState) final;
+
+ void flush(const AtomicString&) final;
+ void enqueueSample(Ref<MediaSample>&&, const AtomicString&) final;
+ bool isReadyForMoreSamples(const AtomicString&) final;
+ void setActive(bool) final;
+ void stopAskingForMoreSamples(const AtomicString&) final;
+ void notifyClientWhenReadyForMoreSamples(const AtomicString&) final;
+
+ void setReadyForMoreSamples(bool);
+ void notifyReadyForMoreSamples();
+
+ void didReceiveInitializationSegment(const SourceBufferPrivateClient::InitializationSegment&);
+ void didReceiveSample(MediaSample&);
+ void didReceiveAllPendingSamples();
+
+private:
+ SourceBufferPrivateGStreamer(MediaSourceGStreamer*, Ref<MediaSourceClientGStreamerMSE>, const ContentType&);
+ friend class MediaSourceClientGStreamerMSE;
+
+ MediaSourceGStreamer* m_mediaSource;
+ ContentType m_type;
+ Ref<MediaSourceClientGStreamerMSE> m_client;
+ SourceBufferPrivateClient* m_sourceBufferPrivateClient;
+ bool m_isReadyForMoreSamples = true;
+ bool m_notifyWhenReadyForMoreSamples = false;
+ AtomicString m_trackId;
+};
+
+}
+
+#endif
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.cpp
new file mode 100644
index 000000000..52ca66867
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.cpp
@@ -0,0 +1,776 @@
+/*
+ * Copyright (C) 2009, 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (C) 2013 Collabora Ltd.
+ * Copyright (C) 2013 Orange
+ * Copyright (C) 2014, 2015 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
+ * Copyright (C) 2015, 2016 Igalia, S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "config.h"
+#include "WebKitMediaSourceGStreamer.h"
+
+#include "PlaybackPipeline.h"
+
+#if ENABLE(VIDEO) && ENABLE(MEDIA_SOURCE) && USE(GSTREAMER)
+
+#include "AudioTrackPrivateGStreamer.h"
+#include "GStreamerUtilities.h"
+#include "MediaDescription.h"
+#include "MediaPlayerPrivateGStreamerMSE.h"
+#include "MediaSample.h"
+#include "MediaSourceGStreamer.h"
+#include "NotImplemented.h"
+#include "SourceBufferPrivateGStreamer.h"
+#include "TimeRanges.h"
+#include "VideoTrackPrivateGStreamer.h"
+#include "WebKitMediaSourceGStreamerPrivate.h"
+
+#include <gst/app/app.h>
+#include <gst/app/gstappsrc.h>
+#include <gst/gst.h>
+#include <gst/pbutils/missing-plugins.h>
+#include <gst/pbutils/pbutils.h>
+#include <gst/video/video.h>
+#include <wtf/Condition.h>
+#include <wtf/MainThread.h>
+#include <wtf/glib/GMutexLocker.h>
+#include <wtf/glib/GUniquePtr.h>
+#include <wtf/text/CString.h>
+
+GST_DEBUG_CATEGORY_STATIC(webkit_media_src_debug);
+#define GST_CAT_DEFAULT webkit_media_src_debug
+
+#define webkit_media_src_parent_class parent_class
+#define WEBKIT_MEDIA_SRC_CATEGORY_INIT GST_DEBUG_CATEGORY_INIT(webkit_media_src_debug, "webkitmediasrc", 0, "websrc element");
+
+static GstStaticPadTemplate srcTemplate = GST_STATIC_PAD_TEMPLATE("src_%u", GST_PAD_SRC,
+ GST_PAD_SOMETIMES, GST_STATIC_CAPS_ANY);
+
+static void enabledAppsrcNeedData(GstAppSrc*, guint, gpointer);
+static void enabledAppsrcEnoughData(GstAppSrc*, gpointer);
+static gboolean enabledAppsrcSeekData(GstAppSrc*, guint64, gpointer);
+
+static void disabledAppsrcNeedData(GstAppSrc*, guint, gpointer) { };
+static void disabledAppsrcEnoughData(GstAppSrc*, gpointer) { };
+static gboolean disabledAppsrcSeekData(GstAppSrc*, guint64, gpointer)
+{
+ return FALSE;
+};
+
+GstAppSrcCallbacks enabledAppsrcCallbacks = {
+ enabledAppsrcNeedData,
+ enabledAppsrcEnoughData,
+ enabledAppsrcSeekData,
+ { 0 }
+};
+
+GstAppSrcCallbacks disabledAppsrcCallbacks = {
+ disabledAppsrcNeedData,
+ disabledAppsrcEnoughData,
+ disabledAppsrcSeekData,
+ { 0 }
+};
+
+static Stream* getStreamByAppsrc(WebKitMediaSrc*, GstElement*);
+
+static void enabledAppsrcNeedData(GstAppSrc* appsrc, guint, gpointer userData)
+{
+ WebKitMediaSrc* webKitMediaSrc = static_cast<WebKitMediaSrc*>(userData);
+ ASSERT(WEBKIT_IS_MEDIA_SRC(webKitMediaSrc));
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ OnSeekDataAction appsrcSeekDataNextAction = webKitMediaSrc->priv->appsrcSeekDataNextAction;
+ Stream* appsrcStream = getStreamByAppsrc(webKitMediaSrc, GST_ELEMENT(appsrc));
+ bool allAppsrcNeedDataAfterSeek = false;
+
+ if (webKitMediaSrc->priv->appsrcSeekDataCount > 0) {
+ if (appsrcStream && !appsrcStream->appsrcNeedDataFlag) {
+ ++webKitMediaSrc->priv->appsrcNeedDataCount;
+ appsrcStream->appsrcNeedDataFlag = true;
+ }
+ int numAppsrcs = webKitMediaSrc->priv->streams.size();
+ if (webKitMediaSrc->priv->appsrcSeekDataCount == numAppsrcs && webKitMediaSrc->priv->appsrcNeedDataCount == numAppsrcs) {
+ GST_DEBUG("All needDatas completed");
+ allAppsrcNeedDataAfterSeek = true;
+ webKitMediaSrc->priv->appsrcSeekDataCount = 0;
+ webKitMediaSrc->priv->appsrcNeedDataCount = 0;
+ webKitMediaSrc->priv->appsrcSeekDataNextAction = Nothing;
+
+ for (Stream* stream : webKitMediaSrc->priv->streams)
+ stream->appsrcNeedDataFlag = false;
+ }
+ }
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ if (allAppsrcNeedDataAfterSeek) {
+ GST_DEBUG("All expected appsrcSeekData() and appsrcNeedData() calls performed. Running next action (%d)", static_cast<int>(appsrcSeekDataNextAction));
+
+ switch (appsrcSeekDataNextAction) {
+ case MediaSourceSeekToTime: {
+ GstStructure* structure = gst_structure_new_empty("seek-needs-data");
+ GstMessage* message = gst_message_new_application(GST_OBJECT(appsrc), structure);
+ gst_bus_post(webKitMediaSrc->priv->bus.get(), message);
+ GST_TRACE("seek-needs-data message posted to the bus");
+ break;
+ }
+ case Nothing:
+ break;
+ }
+ } else if (appsrcSeekDataNextAction == Nothing) {
+ LockHolder locker(webKitMediaSrc->priv->streamLock);
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+
+ // Search again for the Stream, just in case it was removed between the previous lock and this one.
+ appsrcStream = getStreamByAppsrc(webKitMediaSrc, GST_ELEMENT(appsrc));
+
+ if (appsrcStream && appsrcStream->type != WebCore::Invalid) {
+ GstStructure* structure = gst_structure_new("ready-for-more-samples", "appsrc-stream", G_TYPE_POINTER, appsrcStream, nullptr);
+ GstMessage* message = gst_message_new_application(GST_OBJECT(appsrc), structure);
+ gst_bus_post(webKitMediaSrc->priv->bus.get(), message);
+ GST_TRACE("ready-for-more-samples message posted to the bus");
+ }
+
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+ }
+}
+
+static void enabledAppsrcEnoughData(GstAppSrc *appsrc, gpointer userData)
+{
+ // No need to lock on webKitMediaSrc, we're on the main thread and nobody is going to remove the stream in the meantime.
+ ASSERT(WTF::isMainThread());
+
+ WebKitMediaSrc* webKitMediaSrc = static_cast<WebKitMediaSrc*>(userData);
+ ASSERT(WEBKIT_IS_MEDIA_SRC(webKitMediaSrc));
+ Stream* stream = getStreamByAppsrc(webKitMediaSrc, GST_ELEMENT(appsrc));
+
+ // This callback might have been scheduled from a child thread before the stream was removed.
+ // Then, the removal code might have run, and later this callback.
+ // This check solves the race condition.
+ if (!stream || stream->type == WebCore::Invalid)
+ return;
+
+ stream->sourceBuffer->setReadyForMoreSamples(false);
+}
+
+static gboolean enabledAppsrcSeekData(GstAppSrc*, guint64, gpointer userData)
+{
+ ASSERT(WTF::isMainThread());
+
+ WebKitMediaSrc* webKitMediaSrc = static_cast<WebKitMediaSrc*>(userData);
+
+ ASSERT(WEBKIT_IS_MEDIA_SRC(webKitMediaSrc));
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ webKitMediaSrc->priv->appsrcSeekDataCount++;
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ return TRUE;
+}
+
+static Stream* getStreamByAppsrc(WebKitMediaSrc* source, GstElement* appsrc)
+{
+ for (Stream* stream : source->priv->streams) {
+ if (stream->appsrc == appsrc)
+ return stream;
+ }
+ return nullptr;
+}
+
+G_DEFINE_TYPE_WITH_CODE(WebKitMediaSrc, webkit_media_src, GST_TYPE_BIN,
+ G_IMPLEMENT_INTERFACE(GST_TYPE_URI_HANDLER, webKitMediaSrcUriHandlerInit);
+ WEBKIT_MEDIA_SRC_CATEGORY_INIT);
+
+guint webKitMediaSrcSignals[LAST_SIGNAL] = { 0 };
+
+static void webkit_media_src_class_init(WebKitMediaSrcClass* klass)
+{
+ GObjectClass* oklass = G_OBJECT_CLASS(klass);
+ GstElementClass* eklass = GST_ELEMENT_CLASS(klass);
+
+ oklass->finalize = webKitMediaSrcFinalize;
+ oklass->set_property = webKitMediaSrcSetProperty;
+ oklass->get_property = webKitMediaSrcGetProperty;
+
+ gst_element_class_add_pad_template(eklass, gst_static_pad_template_get(&srcTemplate));
+
+ gst_element_class_set_static_metadata(eklass, "WebKit Media source element", "Source", "Handles Blob uris", "Stephane Jadaud <sjadaud@sii.fr>, Sebastian Dröge <sebastian@centricular.com>, Enrique Ocaña González <eocanha@igalia.com>");
+
+ // Allows setting the uri using the 'location' property, which is used for example by gst_element_make_from_uri().
+ g_object_class_install_property(oklass,
+ PROP_LOCATION,
+ g_param_spec_string("location", "location", "Location to read from", nullptr,
+ GParamFlags(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
+ g_object_class_install_property(oklass,
+ PROP_N_AUDIO,
+ g_param_spec_int("n-audio", "Number Audio", "Total number of audio streams",
+ 0, G_MAXINT, 0, GParamFlags(G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
+ g_object_class_install_property(oklass,
+ PROP_N_VIDEO,
+ g_param_spec_int("n-video", "Number Video", "Total number of video streams",
+ 0, G_MAXINT, 0, GParamFlags(G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
+ g_object_class_install_property(oklass,
+ PROP_N_TEXT,
+ g_param_spec_int("n-text", "Number Text", "Total number of text streams",
+ 0, G_MAXINT, 0, GParamFlags(G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
+
+ webKitMediaSrcSignals[SIGNAL_VIDEO_CHANGED] =
+ g_signal_new("video-changed", G_TYPE_FROM_CLASS(oklass),
+ G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(WebKitMediaSrcClass, videoChanged), nullptr, nullptr,
+ g_cclosure_marshal_generic, G_TYPE_NONE, 0, G_TYPE_NONE);
+ webKitMediaSrcSignals[SIGNAL_AUDIO_CHANGED] =
+ g_signal_new("audio-changed", G_TYPE_FROM_CLASS(oklass),
+ G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(WebKitMediaSrcClass, audioChanged), nullptr, nullptr,
+ g_cclosure_marshal_generic, G_TYPE_NONE, 0, G_TYPE_NONE);
+ webKitMediaSrcSignals[SIGNAL_TEXT_CHANGED] =
+ g_signal_new("text-changed", G_TYPE_FROM_CLASS(oklass),
+ G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(WebKitMediaSrcClass, textChanged), nullptr, nullptr,
+ g_cclosure_marshal_generic, G_TYPE_NONE, 0, G_TYPE_NONE);
+
+ eklass->change_state = webKitMediaSrcChangeState;
+
+ g_type_class_add_private(klass, sizeof(WebKitMediaSrcPrivate));
+}
+
+static void webkit_media_src_init(WebKitMediaSrc* source)
+{
+ source->priv = WEBKIT_MEDIA_SRC_GET_PRIVATE(source);
+ new (source->priv) WebKitMediaSrcPrivate();
+ source->priv->seekTime = MediaTime::invalidTime();
+ source->priv->appsrcSeekDataCount = 0;
+ source->priv->appsrcNeedDataCount = 0;
+ source->priv->appsrcSeekDataNextAction = Nothing;
+
+ // No need to reset Stream.appsrcNeedDataFlag because there are no Streams at this point yet.
+}
+
+void webKitMediaSrcFinalize(GObject* object)
+{
+ ASSERT(WTF::isMainThread());
+
+ WebKitMediaSrc* source = WEBKIT_MEDIA_SRC(object);
+ WebKitMediaSrcPrivate* priv = source->priv;
+
+ Deque<Stream*> oldStreams;
+ source->priv->streams.swap(oldStreams);
+
+ for (Stream* stream : oldStreams)
+ webKitMediaSrcFreeStream(source, stream);
+
+ priv->seekTime = MediaTime::invalidTime();
+
+ if (priv->mediaPlayerPrivate)
+ webKitMediaSrcSetMediaPlayerPrivate(source, nullptr);
+
+ // We used a placement new for construction, the destructor won't be called automatically.
+ priv->~_WebKitMediaSrcPrivate();
+
+ GST_CALL_PARENT(G_OBJECT_CLASS, finalize, (object));
+}
+
+void webKitMediaSrcSetProperty(GObject* object, guint propId, const GValue* value, GParamSpec* pspec)
+{
+ WebKitMediaSrc* source = WEBKIT_MEDIA_SRC(object);
+
+ switch (propId) {
+ case PROP_LOCATION:
+ gst_uri_handler_set_uri(reinterpret_cast<GstURIHandler*>(source), g_value_get_string(value), nullptr);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propId, pspec);
+ break;
+ }
+}
+
+void webKitMediaSrcGetProperty(GObject* object, guint propId, GValue* value, GParamSpec* pspec)
+{
+ WebKitMediaSrc* source = WEBKIT_MEDIA_SRC(object);
+ WebKitMediaSrcPrivate* priv = source->priv;
+
+ GST_OBJECT_LOCK(source);
+ switch (propId) {
+ case PROP_LOCATION:
+ g_value_set_string(value, priv->location.get());
+ break;
+ case PROP_N_AUDIO:
+ g_value_set_int(value, priv->numberOfAudioStreams);
+ break;
+ case PROP_N_VIDEO:
+ g_value_set_int(value, priv->numberOfVideoStreams);
+ break;
+ case PROP_N_TEXT:
+ g_value_set_int(value, priv->numberOfTextStreams);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propId, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK(source);
+}
+
+void webKitMediaSrcDoAsyncStart(WebKitMediaSrc* source)
+{
+ source->priv->asyncStart = true;
+ GST_BIN_CLASS(parent_class)->handle_message(GST_BIN(source),
+ gst_message_new_async_start(GST_OBJECT(source)));
+}
+
+void webKitMediaSrcDoAsyncDone(WebKitMediaSrc* source)
+{
+ WebKitMediaSrcPrivate* priv = source->priv;
+ if (priv->asyncStart) {
+ GST_BIN_CLASS(parent_class)->handle_message(GST_BIN(source),
+ gst_message_new_async_done(GST_OBJECT(source), GST_CLOCK_TIME_NONE));
+ priv->asyncStart = false;
+ }
+}
+
+GstStateChangeReturn webKitMediaSrcChangeState(GstElement* element, GstStateChange transition)
+{
+ WebKitMediaSrc* source = WEBKIT_MEDIA_SRC(element);
+ WebKitMediaSrcPrivate* priv = source->priv;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ priv->allTracksConfigured = false;
+ webKitMediaSrcDoAsyncStart(source);
+ break;
+ default:
+ break;
+ }
+
+ GstStateChangeReturn result = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
+ if (G_UNLIKELY(result == GST_STATE_CHANGE_FAILURE)) {
+ GST_WARNING_OBJECT(source, "State change failed");
+ webKitMediaSrcDoAsyncDone(source);
+ return result;
+ }
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ result = GST_STATE_CHANGE_ASYNC;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ webKitMediaSrcDoAsyncDone(source);
+ priv->allTracksConfigured = false;
+ break;
+ default:
+ break;
+ }
+
+ return result;
+}
+
+gint64 webKitMediaSrcGetSize(WebKitMediaSrc* webKitMediaSrc)
+{
+ gint64 duration = 0;
+ for (Stream* stream : webKitMediaSrc->priv->streams)
+ duration = std::max<gint64>(duration, gst_app_src_get_size(GST_APP_SRC(stream->appsrc)));
+ return duration;
+}
+
+gboolean webKitMediaSrcQueryWithParent(GstPad* pad, GstObject* parent, GstQuery* query)
+{
+ WebKitMediaSrc* source = WEBKIT_MEDIA_SRC(GST_ELEMENT(parent));
+ gboolean result = FALSE;
+
+ switch (GST_QUERY_TYPE(query)) {
+ case GST_QUERY_DURATION: {
+ GstFormat format;
+ gst_query_parse_duration(query, &format, nullptr);
+
+ GST_DEBUG_OBJECT(source, "duration query in format %s", gst_format_get_name(format));
+ GST_OBJECT_LOCK(source);
+ switch (format) {
+ case GST_FORMAT_TIME: {
+ if (source->priv && source->priv->mediaPlayerPrivate) {
+ float duration = source->priv->mediaPlayerPrivate->durationMediaTime().toFloat();
+ if (duration > 0) {
+ gst_query_set_duration(query, format, WebCore::toGstClockTime(duration));
+ GST_DEBUG_OBJECT(source, "Answering: duration=%" GST_TIME_FORMAT, GST_TIME_ARGS(WebCore::toGstClockTime(duration)));
+ result = TRUE;
+ }
+ }
+ break;
+ }
+ case GST_FORMAT_BYTES: {
+ if (source->priv) {
+ gint64 duration = webKitMediaSrcGetSize(source);
+ if (duration) {
+ gst_query_set_duration(query, format, duration);
+ GST_DEBUG_OBJECT(source, "size: %" G_GINT64_FORMAT, duration);
+ result = TRUE;
+ }
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ GST_OBJECT_UNLOCK(source);
+ break;
+ }
+ case GST_QUERY_URI:
+ if (source) {
+ GST_OBJECT_LOCK(source);
+ if (source->priv)
+ gst_query_set_uri(query, source->priv->location.get());
+ GST_OBJECT_UNLOCK(source);
+ }
+ result = TRUE;
+ break;
+ default: {
+ GRefPtr<GstPad> target = adoptGRef(gst_ghost_pad_get_target(GST_GHOST_PAD_CAST(pad)));
+ // Forward the query to the proxy target pad.
+ if (target)
+ result = gst_pad_query(target.get(), query);
+ break;
+ }
+ }
+
+ return result;
+}
+
+void webKitMediaSrcUpdatePresentationSize(GstCaps* caps, Stream* stream)
+{
+ GstStructure* structure = gst_caps_get_structure(caps, 0);
+ const gchar* structureName = gst_structure_get_name(structure);
+ GstVideoInfo info;
+
+ GST_OBJECT_LOCK(stream->parent);
+ if (g_str_has_prefix(structureName, "video/") && gst_video_info_from_caps(&info, caps)) {
+ float width, height;
+
+ // FIXME: Correct?.
+ width = info.width;
+ height = info.height * ((float) info.par_d / (float) info.par_n);
+ stream->presentationSize = WebCore::FloatSize(width, height);
+ } else
+ stream->presentationSize = WebCore::FloatSize();
+
+ gst_caps_ref(caps);
+ stream->caps = adoptGRef(caps);
+ GST_OBJECT_UNLOCK(stream->parent);
+}
+
+void webKitMediaSrcLinkStreamToSrcPad(GstPad* sourcePad, Stream* stream)
+{
+ unsigned padId = static_cast<unsigned>(GPOINTER_TO_INT(g_object_get_data(G_OBJECT(sourcePad), "padId")));
+ GST_DEBUG_OBJECT(stream->parent, "linking stream to src pad (id: %u)", padId);
+
+ GUniquePtr<gchar> padName(g_strdup_printf("src_%u", padId));
+ GstPad* ghostpad = WebCore::webkitGstGhostPadFromStaticTemplate(&srcTemplate, padName.get(), sourcePad);
+
+ gst_pad_set_query_function(ghostpad, webKitMediaSrcQueryWithParent);
+
+ gst_pad_set_active(ghostpad, TRUE);
+ gst_element_add_pad(GST_ELEMENT(stream->parent), ghostpad);
+
+ if (stream->decodebinSinkPad) {
+ GST_DEBUG_OBJECT(stream->parent, "A decodebin was previously used for this source, trying to reuse it.");
+ // FIXME: error checking here. Not sure what to do if linking
+ // fails though, because decodebin is out of this source
+ // element's scope, in theory.
+ gst_pad_link(ghostpad, stream->decodebinSinkPad);
+ }
+}
+
+void webKitMediaSrcLinkParser(GstPad* sourcePad, GstCaps* caps, Stream* stream)
+{
+ ASSERT(caps && stream->parent);
+ if (!caps || !stream->parent) {
+ GST_ERROR("Unable to link parser");
+ return;
+ }
+
+ webKitMediaSrcUpdatePresentationSize(caps, stream);
+
+ // FIXME: drop webKitMediaSrcLinkStreamToSrcPad() and move its code here.
+ if (!gst_pad_is_linked(sourcePad)) {
+ GST_DEBUG_OBJECT(stream->parent, "pad not linked yet");
+ webKitMediaSrcLinkStreamToSrcPad(sourcePad, stream);
+ }
+
+ webKitMediaSrcCheckAllTracksConfigured(stream->parent);
+}
+
+void webKitMediaSrcFreeStream(WebKitMediaSrc* source, Stream* stream)
+{
+ if (stream->appsrc) {
+ // Don't trigger callbacks from this appsrc to avoid using the stream anymore.
+ gst_app_src_set_callbacks(GST_APP_SRC(stream->appsrc), &disabledAppsrcCallbacks, nullptr, nullptr);
+ gst_app_src_end_of_stream(GST_APP_SRC(stream->appsrc));
+ }
+
+ if (stream->type != WebCore::Invalid) {
+ GST_DEBUG("Freeing track-related info on stream %p", stream);
+
+ LockHolder locker(source->priv->streamLock);
+
+ if (stream->caps)
+ stream->caps = nullptr;
+
+ if (stream->audioTrack)
+ stream->audioTrack = nullptr;
+ if (stream->videoTrack)
+ stream->videoTrack = nullptr;
+
+ int signal = -1;
+ switch (stream->type) {
+ case WebCore::Audio:
+ signal = SIGNAL_AUDIO_CHANGED;
+ break;
+ case WebCore::Video:
+ signal = SIGNAL_VIDEO_CHANGED;
+ break;
+ case WebCore::Text:
+ signal = SIGNAL_TEXT_CHANGED;
+ break;
+ default:
+ break;
+ }
+ stream->type = WebCore::Invalid;
+
+ if (signal != -1)
+ g_signal_emit(G_OBJECT(source), webKitMediaSrcSignals[signal], 0, nullptr);
+
+ source->priv->streamCondition.notifyOne();
+ }
+
+ GST_DEBUG("Releasing stream: %p", stream);
+ delete stream;
+}
+
+void webKitMediaSrcCheckAllTracksConfigured(WebKitMediaSrc* webKitMediaSrc)
+{
+ bool allTracksConfigured = false;
+
+ GST_OBJECT_LOCK(webKitMediaSrc);
+ if (!webKitMediaSrc->priv->allTracksConfigured) {
+ allTracksConfigured = true;
+ for (Stream* stream : webKitMediaSrc->priv->streams) {
+ if (stream->type == WebCore::Invalid) {
+ allTracksConfigured = false;
+ break;
+ }
+ }
+ if (allTracksConfigured)
+ webKitMediaSrc->priv->allTracksConfigured = true;
+ }
+ GST_OBJECT_UNLOCK(webKitMediaSrc);
+
+ if (allTracksConfigured) {
+ GST_DEBUG("All tracks attached. Completing async state change operation.");
+ gst_element_no_more_pads(GST_ELEMENT(webKitMediaSrc));
+ webKitMediaSrcDoAsyncDone(webKitMediaSrc);
+ }
+}
+
+// Uri handler interface.
+GstURIType webKitMediaSrcUriGetType(GType)
+{
+ return GST_URI_SRC;
+}
+
+const gchar* const* webKitMediaSrcGetProtocols(GType)
+{
+ static const char* protocols[] = {"mediasourceblob", nullptr };
+ return protocols;
+}
+
+gchar* webKitMediaSrcGetUri(GstURIHandler* handler)
+{
+ WebKitMediaSrc* source = WEBKIT_MEDIA_SRC(handler);
+ gchar* result;
+
+ GST_OBJECT_LOCK(source);
+ result = g_strdup(source->priv->location.get());
+ GST_OBJECT_UNLOCK(source);
+ return result;
+}
+
+gboolean webKitMediaSrcSetUri(GstURIHandler* handler, const gchar* uri, GError**)
+{
+ WebKitMediaSrc* source = WEBKIT_MEDIA_SRC(handler);
+
+ if (GST_STATE(source) >= GST_STATE_PAUSED) {
+ GST_ERROR_OBJECT(source, "URI can only be set in states < PAUSED");
+ return FALSE;
+ }
+
+ GST_OBJECT_LOCK(source);
+ WebKitMediaSrcPrivate* priv = source->priv;
+ priv->location = nullptr;
+ if (!uri) {
+ GST_OBJECT_UNLOCK(source);
+ return TRUE;
+ }
+
+ WebCore::URL url(WebCore::URL(), uri);
+
+ priv->location = GUniquePtr<gchar>(g_strdup(url.string().utf8().data()));
+ GST_OBJECT_UNLOCK(source);
+ return TRUE;
+}
+
+void webKitMediaSrcUriHandlerInit(gpointer gIface, gpointer)
+{
+ GstURIHandlerInterface* iface = (GstURIHandlerInterface *) gIface;
+
+ iface->get_type = webKitMediaSrcUriGetType;
+ iface->get_protocols = webKitMediaSrcGetProtocols;
+ iface->get_uri = webKitMediaSrcGetUri;
+ iface->set_uri = webKitMediaSrcSetUri;
+}
+
+static void seekNeedsDataMainThread(WebKitMediaSrc* source)
+{
+ GST_DEBUG("Buffering needed before seek");
+
+ ASSERT(WTF::isMainThread());
+
+ GST_OBJECT_LOCK(source);
+ MediaTime seekTime = source->priv->seekTime;
+ WebCore::MediaPlayerPrivateGStreamerMSE* mediaPlayerPrivate = source->priv->mediaPlayerPrivate;
+
+ if (!mediaPlayerPrivate) {
+ GST_OBJECT_UNLOCK(source);
+ return;
+ }
+
+ for (Stream* stream : source->priv->streams) {
+ if (stream->type != WebCore::Invalid)
+ stream->sourceBuffer->setReadyForMoreSamples(true);
+ }
+ GST_OBJECT_UNLOCK(source);
+ mediaPlayerPrivate->notifySeekNeedsDataForTime(seekTime);
+}
+
+static void notifyReadyForMoreSamplesMainThread(WebKitMediaSrc* source, Stream* appsrcStream)
+{
+ GST_OBJECT_LOCK(source);
+
+ auto it = std::find(source->priv->streams.begin(), source->priv->streams.end(), appsrcStream);
+ if (it == source->priv->streams.end()) {
+ GST_OBJECT_UNLOCK(source);
+ return;
+ }
+
+ WebCore::MediaPlayerPrivateGStreamerMSE* mediaPlayerPrivate = source->priv->mediaPlayerPrivate;
+ if (mediaPlayerPrivate && !mediaPlayerPrivate->seeking())
+ appsrcStream->sourceBuffer->notifyReadyForMoreSamples();
+
+ GST_OBJECT_UNLOCK(source);
+}
+
+static void applicationMessageCallback(GstBus*, GstMessage* message, WebKitMediaSrc* source)
+{
+ ASSERT(WTF::isMainThread());
+ ASSERT(GST_MESSAGE_TYPE(message) == GST_MESSAGE_APPLICATION);
+
+ const GstStructure* structure = gst_message_get_structure(message);
+
+ if (gst_structure_has_name(structure, "seek-needs-data")) {
+ seekNeedsDataMainThread(source);
+ return;
+ }
+
+ if (gst_structure_has_name(structure, "ready-for-more-samples")) {
+ Stream* appsrcStream = nullptr;
+ gst_structure_get(structure, "appsrc-stream", G_TYPE_POINTER, &appsrcStream, nullptr);
+ ASSERT(appsrcStream);
+
+ notifyReadyForMoreSamplesMainThread(source, appsrcStream);
+ return;
+ }
+
+ ASSERT_NOT_REACHED();
+}
+
+void webKitMediaSrcSetMediaPlayerPrivate(WebKitMediaSrc* source, WebCore::MediaPlayerPrivateGStreamerMSE* mediaPlayerPrivate)
+{
+ GST_OBJECT_LOCK(source);
+ if (source->priv->mediaPlayerPrivate && source->priv->mediaPlayerPrivate != mediaPlayerPrivate && source->priv->bus)
+ g_signal_handlers_disconnect_by_func(source->priv->bus.get(), gpointer(applicationMessageCallback), source);
+
+ // Set to nullptr on MediaPlayerPrivateGStreamer destruction, never a dangling pointer.
+ source->priv->mediaPlayerPrivate = mediaPlayerPrivate;
+ source->priv->bus = mediaPlayerPrivate ? adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(mediaPlayerPrivate->pipeline()))) : nullptr;
+ if (source->priv->bus) {
+ // MediaPlayerPrivateGStreamer has called gst_bus_add_signal_watch() at this point, so we can subscribe.
+ g_signal_connect(source->priv->bus.get(), "message::application", G_CALLBACK(applicationMessageCallback), source);
+ }
+ GST_OBJECT_UNLOCK(source);
+}
+
+void webKitMediaSrcSetReadyForSamples(WebKitMediaSrc* source, bool isReady)
+{
+ if (source) {
+ GST_OBJECT_LOCK(source);
+ for (Stream* stream : source->priv->streams)
+ stream->sourceBuffer->setReadyForMoreSamples(isReady);
+ GST_OBJECT_UNLOCK(source);
+ }
+}
+
+void webKitMediaSrcPrepareSeek(WebKitMediaSrc* source, const MediaTime& time)
+{
+ GST_OBJECT_LOCK(source);
+ source->priv->seekTime = time;
+ source->priv->appsrcSeekDataCount = 0;
+ source->priv->appsrcNeedDataCount = 0;
+
+ for (Stream* stream : source->priv->streams) {
+ stream->appsrcNeedDataFlag = false;
+ // Don't allow samples away from the seekTime to be enqueued.
+ stream->lastEnqueuedTime = time;
+ }
+
+ // The pending action will be performed in enabledAppsrcSeekData().
+ source->priv->appsrcSeekDataNextAction = MediaSourceSeekToTime;
+ GST_OBJECT_UNLOCK(source);
+}
+
+namespace WTF {
+template <> GRefPtr<WebKitMediaSrc> adoptGRef(WebKitMediaSrc* ptr)
+{
+ ASSERT(!ptr || !g_object_is_floating(G_OBJECT(ptr)));
+ return GRefPtr<WebKitMediaSrc>(ptr, GRefPtrAdopt);
+}
+
+template <> WebKitMediaSrc* refGPtr<WebKitMediaSrc>(WebKitMediaSrc* ptr)
+{
+ if (ptr)
+ gst_object_ref_sink(GST_OBJECT(ptr));
+
+ return ptr;
+}
+
+template <> void derefGPtr<WebKitMediaSrc>(WebKitMediaSrc* ptr)
+{
+ if (ptr)
+ gst_object_unref(ptr);
+}
+};
+
+#endif // USE(GSTREAMER)
+
diff --git a/Source/WebCore/platform/graphics/gstreamer/WebKitMediaSourceGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.h
index 78892c3e3..79086054c 100644
--- a/Source/WebCore/platform/graphics/gstreamer/WebKitMediaSourceGStreamer.h
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.h
@@ -2,6 +2,9 @@
* Copyright (C) 2009, 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
* Copyright (C) 2013 Collabora Ltd.
* Copyright (C) 2013 Orange
+ * Copyright (C) 2014, 2015 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015, 2016 Metrological Group B.V.
+ * Copyright (C) 2015, 2016 Igalia, S.L
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
@@ -18,13 +21,26 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
-#ifndef WebKitMediaSourceGStreamer_h
-#define WebKitMediaSourceGStreamer_h
+#pragma once
+
#if ENABLE(VIDEO) && ENABLE(MEDIA_SOURCE) && USE(GSTREAMER)
+#include "GRefPtrGStreamer.h"
#include "MediaPlayer.h"
+#include "MediaSource.h"
+#include "MediaSourcePrivate.h"
+#include "SourceBufferPrivate.h"
+#include "SourceBufferPrivateClient.h"
#include <gst/gst.h>
+namespace WebCore {
+
+class MediaPlayerPrivateGStreamerMSE;
+
+enum MediaSourceStreamTypeGStreamer { Invalid, Unknown, Audio, Video, Text };
+
+}
+
G_BEGIN_DECLS
#define WEBKIT_TYPE_MEDIA_SRC (webkit_media_src_get_type ())
@@ -45,28 +61,20 @@ struct _WebKitMediaSrc {
struct _WebKitMediaSrcClass {
GstBinClass parentClass;
+
+ // Notify app that number of audio/video/text streams changed.
+ void (*videoChanged)(WebKitMediaSrc*);
+ void (*audioChanged)(WebKitMediaSrc*);
+ void (*textChanged)(WebKitMediaSrc*);
};
GType webkit_media_src_get_type(void);
-void webKitMediaSrcSetMediaPlayer(WebKitMediaSrc*, WebCore::MediaPlayer*);
-void webKitMediaSrcSetPlayBin(WebKitMediaSrc*, GstElement*);
-G_END_DECLS
-
-class MediaSourceClientGstreamer: public RefCounted<MediaSourceClientGstreamer> {
- public:
- MediaSourceClientGstreamer(WebKitMediaSrc*);
- ~MediaSourceClientGstreamer();
+void webKitMediaSrcSetMediaPlayerPrivate(WebKitMediaSrc*, WebCore::MediaPlayerPrivateGStreamerMSE*);
- void didReceiveDuration(double);
- void didReceiveData(const char*, int, String);
- void didFinishLoading(double);
- void didFail();
-
- private:
- WebKitMediaSrc* m_src;
-};
+void webKitMediaSrcPrepareSeek(WebKitMediaSrc*, const MediaTime&);
+void webKitMediaSrcSetReadyForSamples(WebKitMediaSrc*, bool);
+G_END_DECLS
#endif // USE(GSTREAMER)
-#endif
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamerPrivate.h b/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamerPrivate.h
new file mode 100644
index 000000000..83b523f7d
--- /dev/null
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamerPrivate.h
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2016 Metrological Group B.V.
+ * Copyright (C) 2016 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
+
+#include "AudioTrackPrivateGStreamer.h"
+#include "SourceBufferPrivateGStreamer.h"
+#include "VideoTrackPrivateGStreamer.h"
+#include "WebKitMediaSourceGStreamer.h"
+
+#include <gst/app/gstappsrc.h>
+#include <gst/gst.h>
+#include <wtf/Condition.h>
+#include <wtf/RefPtr.h>
+#include <wtf/glib/GRefPtr.h>
+
+namespace WebCore {
+
+class MediaPlayerPrivateGStreamerMSE;
+
+};
+
+void webKitMediaSrcUriHandlerInit(gpointer, gpointer);
+
+#define WEBKIT_MEDIA_SRC_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj), WEBKIT_TYPE_MEDIA_SRC, WebKitMediaSrcPrivate))
+
+typedef struct _Stream Stream;
+
+struct _Stream {
+ // Fields filled when the Stream is created.
+ WebKitMediaSrc* parent;
+
+ // AppSrc.
+ GstElement* appsrc;
+ GstPad* decodebinSinkPad;
+ WebCore::SourceBufferPrivateGStreamer* sourceBuffer;
+
+ // Fields filled when the track is attached.
+ WebCore::MediaSourceStreamTypeGStreamer type;
+ // Might be 0, e.g. for VP8/VP9.
+ GstElement* parser;
+ GRefPtr<GstCaps> caps;
+ RefPtr<WebCore::AudioTrackPrivateGStreamer> audioTrack;
+ RefPtr<WebCore::VideoTrackPrivateGStreamer> videoTrack;
+ WebCore::FloatSize presentationSize;
+
+ // This helps WebKitMediaSrcPrivate.appsrcNeedDataCount, ensuring that needDatas are
+ // counted only once per each appsrc.
+ bool appsrcNeedDataFlag;
+
+ // Used to enforce continuity in the appended data and avoid breaking the decoder.
+ MediaTime lastEnqueuedTime;
+};
+
+enum {
+ PROP_0,
+ PROP_LOCATION,
+ PROP_N_AUDIO,
+ PROP_N_VIDEO,
+ PROP_N_TEXT,
+ PROP_LAST
+};
+
+enum {
+ SIGNAL_VIDEO_CHANGED,
+ SIGNAL_AUDIO_CHANGED,
+ SIGNAL_TEXT_CHANGED,
+ LAST_SIGNAL
+};
+
+enum OnSeekDataAction {
+ Nothing,
+ MediaSourceSeekToTime
+};
+
+struct _WebKitMediaSrcPrivate {
+ // Used to coordinate the release of Stream track info.
+ Lock streamLock;
+ Condition streamCondition;
+
+ Deque<Stream*> streams;
+ GUniquePtr<gchar> location;
+ int numberOfAudioStreams;
+ int numberOfVideoStreams;
+ int numberOfTextStreams;
+ bool asyncStart;
+ bool allTracksConfigured;
+ unsigned numberOfPads;
+
+ MediaTime seekTime;
+
+ // On seek, we wait for all the seekDatas, then for all the needDatas, and then run the nextAction.
+ OnSeekDataAction appsrcSeekDataNextAction;
+ int appsrcSeekDataCount;
+ int appsrcNeedDataCount;
+
+ GRefPtr<GstBus> bus;
+ WebCore::MediaPlayerPrivateGStreamerMSE* mediaPlayerPrivate;
+};
+
+extern guint webKitMediaSrcSignals[LAST_SIGNAL];
+extern GstAppSrcCallbacks enabledAppsrcCallbacks;
+extern GstAppSrcCallbacks disabledAppsrcCallbacks;
+
+void webKitMediaSrcUriHandlerInit(gpointer gIface, gpointer ifaceData);
+void webKitMediaSrcFinalize(GObject*);
+void webKitMediaSrcSetProperty(GObject*, guint propertyId, const GValue*, GParamSpec*);
+void webKitMediaSrcGetProperty(GObject*, guint propertyId, GValue*, GParamSpec*);
+void webKitMediaSrcDoAsyncStart(WebKitMediaSrc*);
+void webKitMediaSrcDoAsyncDone(WebKitMediaSrc*);
+GstStateChangeReturn webKitMediaSrcChangeState(GstElement*, GstStateChange);
+gint64 webKitMediaSrcGetSize(WebKitMediaSrc*);
+gboolean webKitMediaSrcQueryWithParent(GstPad*, GstObject*, GstQuery*);
+void webKitMediaSrcUpdatePresentationSize(GstCaps*, Stream*);
+void webKitMediaSrcLinkStreamToSrcPad(GstPad*, Stream*);
+void webKitMediaSrcLinkParser(GstPad*, GstCaps*, Stream*);
+void webKitMediaSrcFreeStream(WebKitMediaSrc*, Stream*);
+void webKitMediaSrcCheckAllTracksConfigured(WebKitMediaSrc*);
+GstURIType webKitMediaSrcUriGetType(GType);
+const gchar* const* webKitMediaSrcGetProtocols(GType);
+gchar* webKitMediaSrcGetUri(GstURIHandler*);
+gboolean webKitMediaSrcSetUri(GstURIHandler*, const gchar*, GError**);
+
+#endif // USE(GSTREAMER)