diff options
author | Lorry Tar Creator <lorry-tar-importer@lorry> | 2017-06-27 06:07:23 +0000 |
---|---|---|
committer | Lorry Tar Creator <lorry-tar-importer@lorry> | 2017-06-27 06:07:23 +0000 |
commit | 1bf1084f2b10c3b47fd1a588d85d21ed0eb41d0c (patch) | |
tree | 46dcd36c86e7fbc6e5df36deb463b33e9967a6f7 /Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp | |
parent | 32761a6cee1d0dee366b885b7b9c777e67885688 (diff) | |
download | WebKitGtk-tarball-master.tar.gz |
webkitgtk-2.16.5HEADwebkitgtk-2.16.5master
Diffstat (limited to 'Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp')
-rw-r--r-- | Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp | 1118 |
1 files changed, 942 insertions, 176 deletions
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp index c6564730a..9332aab49 100644 --- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp +++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp @@ -3,7 +3,8 @@ * Copyright (C) 2007 Collabora Ltd. All rights reserved. * Copyright (C) 2007 Alp Toker <alp@atoker.com> * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org> - * Copyright (C) 2009, 2010 Igalia S.L + * Copyright (C) 2009, 2010, 2015, 2016 Igalia S.L + * Copyright (C) 2015, 2016 Metrological Group B.V. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public @@ -26,7 +27,6 @@ #if ENABLE(VIDEO) && USE(GSTREAMER) -#include "ColorSpace.h" #include "GStreamerUtilities.h" #include "GraphicsContext.h" #include "GraphicsTypes.h" @@ -37,16 +37,71 @@ #include "NotImplemented.h" #include "VideoSinkGStreamer.h" #include "WebKitWebSourceGStreamer.h" -#include <gst/gst.h> -#include <wtf/gobject/GMutexLocker.h> +#include <wtf/glib/GMutexLocker.h> +#include <wtf/glib/GUniquePtr.h> +#include <wtf/text/AtomicString.h> #include <wtf/text/CString.h> +#include <wtf/MathExtras.h> #include <gst/audio/streamvolume.h> #include <gst/video/gstvideometa.h> -#if GST_CHECK_VERSION(1, 1, 0) && USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL) +#if USE(GSTREAMER_GL) +#include <gst/app/gstappsink.h> +#define GST_USE_UNSTABLE_API +#include <gst/gl/gl.h> +#undef GST_USE_UNSTABLE_API + +#include "GLContext.h" +#if USE(GLX) +#include "GLContextGLX.h" +#include <gst/gl/x11/gstgldisplay_x11.h> +#endif + +#if USE(EGL) +#include "GLContextEGL.h" +#include <gst/gl/egl/gstgldisplay_egl.h> +#endif + +#if PLATFORM(X11) +#include "PlatformDisplayX11.h" +#endif + +#if PLATFORM(WAYLAND) +#include "PlatformDisplayWayland.h" +#endif + +// gstglapi.h may include eglplatform.h and it includes X.h, which +// defines None, breaking MediaPlayer::None enum +#if PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL +#undef None +#endif // PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL +#include "VideoTextureCopierGStreamer.h" +#endif // USE(GSTREAMER_GL) + +#if USE(TEXTURE_MAPPER_GL) +#include "BitmapTextureGL.h" +#include "BitmapTexturePool.h" #include "TextureMapperGL.h" #endif +#if USE(COORDINATED_GRAPHICS_THREADED) +#include "TextureMapperPlatformLayerBuffer.h" +#endif + +#if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS) +#include <cairo-gl.h> +#endif + +#if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA) +#include "SharedBuffer.h" +#include "WebKitClearKeyDecryptorGStreamer.h" +#if ENABLE(LEGACY_ENCRYPTED_MEDIA) +#include "UUID.h" +#include <runtime/JSCInlines.h> +#include <runtime/TypedArrayInlines.h> +#include <runtime/Uint8Array.h> +#endif +#endif GST_DEBUG_CATEGORY(webkit_media_player_debug); #define GST_CAT_DEFAULT webkit_media_player_debug @@ -55,6 +110,22 @@ using namespace std; namespace WebCore { +#if ENABLE(LEGACY_ENCRYPTED_MEDIA) +static AtomicString keySystemIdToUuid(const AtomicString&); +#endif + +void registerWebKitGStreamerElements() +{ +#if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA) + if (!webkitGstCheckVersion(1, 6, 1)) + return; + + GRefPtr<GstElementFactory> clearKeyDecryptorFactory = gst_element_factory_find("webkitclearkey"); + if (!clearKeyDecryptorFactory) + gst_element_register(nullptr, "webkitclearkey", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_MEDIA_CK_DECRYPT); +#endif +} + static int greatestCommonDivisor(int a, int b) { while (b) { @@ -66,112 +137,345 @@ static int greatestCommonDivisor(int a, int b) return ABS(a); } -static void mediaPlayerPrivateVolumeChangedCallback(GObject*, GParamSpec*, MediaPlayerPrivateGStreamerBase* player) +#if USE(TEXTURE_MAPPER_GL) +static inline TextureMapperGL::Flags texMapFlagFromOrientation(const ImageOrientation& orientation) { - // This is called when m_volumeElement receives the notify::volume signal. - LOG_MEDIA_MESSAGE("Volume changed to: %f", player->volume()); - player->volumeChanged(); + switch (orientation) { + case DefaultImageOrientation: + return 0; + case OriginRightTop: + return TextureMapperGL::ShouldRotateTexture90; + case OriginBottomRight: + return TextureMapperGL::ShouldRotateTexture180; + case OriginLeftBottom: + return TextureMapperGL::ShouldRotateTexture270; + default: + ASSERT_NOT_REACHED(); + } + + return 0; } +#endif + +#if USE(COORDINATED_GRAPHICS_THREADED) && USE(GSTREAMER_GL) +class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder { +public: + explicit GstVideoFrameHolder(GstSample* sample, TextureMapperGL::Flags flags) + { + GstVideoInfo videoInfo; + if (UNLIKELY(!getSampleVideoInfo(sample, videoInfo))) + return; + + m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)); + m_flags = flags | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0); + + GstBuffer* buffer = gst_sample_get_buffer(sample); + if (UNLIKELY(!gst_video_frame_map(&m_videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))) + return; -static gboolean mediaPlayerPrivateVolumeChangeTimeoutCallback(MediaPlayerPrivateGStreamerBase* player) + m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame.data[0]); + m_isValid = true; + } + + virtual ~GstVideoFrameHolder() + { + if (UNLIKELY(!m_isValid)) + return; + + gst_video_frame_unmap(&m_videoFrame); + } + + const IntSize& size() const { return m_size; } + TextureMapperGL::Flags flags() const { return m_flags; } + GLuint textureID() const { return m_textureID; } + bool isValid() const { return m_isValid; } + +private: + GstVideoFrame m_videoFrame; + IntSize m_size; + TextureMapperGL::Flags m_flags; + GLuint m_textureID; + bool m_isValid { false }; +}; +#endif // USE(COORDINATED_GRAPHICS_THREADED) && USE(GSTREAMER_GL) + +MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player) + : m_notifier(MainThreadNotifier<MainThreadNotification>::create()) + , m_player(player) + , m_fpsSink(nullptr) + , m_readyState(MediaPlayer::HaveNothing) + , m_networkState(MediaPlayer::Empty) +#if USE(GSTREAMER_GL) || USE(COORDINATED_GRAPHICS_THREADED) + , m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamerBase::repaint) +#endif + , m_usingFallbackVideoSink(false) +#if ENABLE(LEGACY_ENCRYPTED_MEDIA) + , m_cdmSession(nullptr) +#endif { - // This is the callback of the timeout source created in ::volumeChanged. - player->notifyPlayerOfVolumeChange(); - return FALSE; + g_mutex_init(&m_sampleMutex); +#if USE(COORDINATED_GRAPHICS_THREADED) + m_platformLayerProxy = adoptRef(new TextureMapperPlatformLayerProxy()); +#endif } -static void mediaPlayerPrivateMuteChangedCallback(GObject*, GParamSpec*, MediaPlayerPrivateGStreamerBase* player) +MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase() { - // This is called when m_volumeElement receives the notify::mute signal. - player->muteChanged(); +#if ENABLE(LEGACY_ENCRYPTED_MEDIA) + m_protectionCondition.notifyOne(); +#endif + + m_notifier->invalidate(); + + cancelRepaint(); + + if (m_videoSink) { + g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this); +#if USE(GSTREAMER_GL) + if (GST_IS_BIN(m_videoSink.get())) { + GRefPtr<GstElement> appsink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_videoSink.get()), "webkit-gl-video-sink")); + g_signal_handlers_disconnect_by_data(appsink.get(), this); + } +#endif + } + + g_mutex_clear(&m_sampleMutex); + + m_player = nullptr; + + if (m_volumeElement) + g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this); + +#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS) + if (client()) + client()->platformLayerWillBeDestroyed(); +#endif + +#if ENABLE(LEGACY_ENCRYPTED_MEDIA) + m_cdmSession = nullptr; +#endif + + if (m_pipeline) + gst_element_set_state(m_pipeline.get(), GST_STATE_NULL); } -static gboolean mediaPlayerPrivateMuteChangeTimeoutCallback(MediaPlayerPrivateGStreamerBase* player) +void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline) { - // This is the callback of the timeout source created in ::muteChanged. - player->notifyPlayerOfMute(); - return FALSE; + m_pipeline = pipeline; } -static void mediaPlayerPrivateRepaintCallback(WebKitVideoSink*, GstBuffer *buffer, MediaPlayerPrivateGStreamerBase* playerPrivate) +#if ENABLE(LEGACY_ENCRYPTED_MEDIA) +static std::pair<Vector<GRefPtr<GstEvent>>, Vector<String>> extractEventsAndSystemsFromMessage(GstMessage* message) { - playerPrivate->triggerRepaint(buffer); + const GstStructure* structure = gst_message_get_structure(message); + + const GValue* streamEncryptionAllowedSystemsValue = gst_structure_get_value(structure, "stream-encryption-systems"); + ASSERT(streamEncryptionAllowedSystemsValue && G_VALUE_HOLDS(streamEncryptionAllowedSystemsValue, G_TYPE_STRV)); + const char** streamEncryptionAllowedSystems = reinterpret_cast<const char**>(g_value_get_boxed(streamEncryptionAllowedSystemsValue)); + ASSERT(streamEncryptionAllowedSystems); + Vector<String> streamEncryptionAllowedSystemsVector; + unsigned i; + for (i = 0; streamEncryptionAllowedSystems[i]; ++i) + streamEncryptionAllowedSystemsVector.append(streamEncryptionAllowedSystems[i]); + + const GValue* streamEncryptionEventsList = gst_structure_get_value(structure, "stream-encryption-events"); + ASSERT(streamEncryptionEventsList && GST_VALUE_HOLDS_LIST(streamEncryptionEventsList)); + unsigned streamEncryptionEventsListSize = gst_value_list_get_size(streamEncryptionEventsList); + Vector<GRefPtr<GstEvent>> streamEncryptionEventsVector; + for (i = 0; i < streamEncryptionEventsListSize; ++i) + streamEncryptionEventsVector.append(GRefPtr<GstEvent>(static_cast<GstEvent*>(g_value_get_boxed(gst_value_list_get_value(streamEncryptionEventsList, i))))); + + return std::make_pair(streamEncryptionEventsVector, streamEncryptionAllowedSystemsVector); } - -MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player) - : m_player(player) - , m_fpsSink(0) - , m_readyState(MediaPlayer::HaveNothing) - , m_networkState(MediaPlayer::Empty) - , m_buffer(0) - , m_volumeTimerHandler(0) - , m_muteTimerHandler(0) - , m_repaintHandler(0) - , m_volumeSignalHandler(0) - , m_muteSignalHandler(0) -{ -#if GLIB_CHECK_VERSION(2, 31, 0) - m_bufferMutex = new GMutex; - g_mutex_init(m_bufferMutex); -#else - m_bufferMutex = g_mutex_new(); #endif + +bool MediaPlayerPrivateGStreamerBase::handleSyncMessage(GstMessage* message) +{ + UNUSED_PARAM(message); + if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT) + return false; + + const gchar* contextType; + gst_message_parse_context_type(message, &contextType); + +#if USE(GSTREAMER_GL) + GRefPtr<GstContext> elementContext = adoptGRef(requestGLContext(contextType, this)); + if (elementContext) { + gst_element_set_context(GST_ELEMENT(message->src), elementContext.get()); + return true; + } +#endif // USE(GSTREAMER_GL) + +#if ENABLE(LEGACY_ENCRYPTED_MEDIA) + if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id")) { + if (isMainThread()) { + GST_ERROR("can't handle drm-preferred-decryption-system-id need context message in the main thread"); + ASSERT_NOT_REACHED(); + return false; + } + GST_DEBUG("handling drm-preferred-decryption-system-id need context message"); + std::pair<Vector<GRefPtr<GstEvent>>, Vector<String>> streamEncryptionInformation = extractEventsAndSystemsFromMessage(message); + GST_TRACE("found %" G_GSIZE_FORMAT " protection events", streamEncryptionInformation.first.size()); + Vector<uint8_t> concatenatedInitDataChunks; + unsigned concatenatedInitDataChunksNumber = 0; + String eventKeySystemIdString; + for (auto& event : streamEncryptionInformation.first) { + GST_TRACE("handling protection event %u", GST_EVENT_SEQNUM(event.get())); + const char* eventKeySystemId = nullptr; + GstBuffer* data = nullptr; + gst_event_parse_protection(event.get(), &eventKeySystemId, &data, nullptr); + + // Here we receive the DRM init data from the pipeline: we will emit + // the needkey event with that data and the browser might create a + // CDMSession from this event handler. If such a session was created + // We will emit the message event from the session to provide the + // DRM challenge to the browser and wait for an update. If on the + // contrary no session was created we won't wait and let the pipeline + // error out by itself. + GstMapInfo mapInfo; + if (!gst_buffer_map(data, &mapInfo, GST_MAP_READ)) { + GST_WARNING("cannot map %s protection data", eventKeySystemId); + break; + } + + GST_TRACE("appending init data for %s of size %" G_GSIZE_FORMAT, eventKeySystemId, mapInfo.size); + GST_MEMDUMP("init data", reinterpret_cast<const unsigned char *>(mapInfo.data), mapInfo.size); + concatenatedInitDataChunks.append(mapInfo.data, mapInfo.size); + ++concatenatedInitDataChunksNumber; + eventKeySystemIdString = eventKeySystemId; + if (streamEncryptionInformation.second.contains(eventKeySystemId)) { + GST_TRACE("considering init data handled for %s", eventKeySystemId); + m_handledProtectionEvents.add(GST_EVENT_SEQNUM(event.get())); + } + gst_buffer_unmap(data, &mapInfo); + } + + if (!concatenatedInitDataChunksNumber) + return false; + + if (concatenatedInitDataChunksNumber > 1) + eventKeySystemIdString = emptyString(); + + RunLoop::main().dispatch([this, eventKeySystemIdString, initData = WTFMove(concatenatedInitDataChunks)] { + GST_DEBUG("scheduling keyNeeded event for %s with concatenated init datas size of %" G_GSIZE_FORMAT, eventKeySystemIdString.utf8().data(), initData.size()); + GST_MEMDUMP("init datas", initData.data(), initData.size()); + + // FIXME: Provide a somehow valid sessionId. + RefPtr<Uint8Array> initDataArray = Uint8Array::create(initData.data(), initData.size()); + needKey(initDataArray); + }); + + GST_INFO("waiting for a key request to arrive"); + LockHolder lock(m_protectionMutex); + m_protectionCondition.waitFor(m_protectionMutex, Seconds(4), [this] { + return !this->m_lastGenerateKeyRequestKeySystemUuid.isEmpty(); + }); + if (!m_lastGenerateKeyRequestKeySystemUuid.isEmpty()) { + GST_INFO("got a key request, continuing with %s on %s", m_lastGenerateKeyRequestKeySystemUuid.utf8().data(), GST_MESSAGE_SRC_NAME(message)); + + GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-preferred-decryption-system-id", FALSE)); + GstStructure* contextStructure = gst_context_writable_structure(context.get()); + gst_structure_set(contextStructure, "decryption-system-id", G_TYPE_STRING, m_lastGenerateKeyRequestKeySystemUuid.utf8().data(), nullptr); + gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get()); + } else + GST_WARNING("did not get a proper key request"); + + return true; + } +#endif // ENABLE(LEGACY_ENCRYPTED_MEDIA) + + return false; } -MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase() +#if USE(GSTREAMER_GL) +GstContext* MediaPlayerPrivateGStreamerBase::requestGLContext(const gchar* contextType, MediaPlayerPrivateGStreamerBase* player) { - if (m_repaintHandler) { - g_signal_handler_disconnect(m_webkitVideoSink.get(), m_repaintHandler); - m_repaintHandler = 0; + if (!player->ensureGstGLContext()) + return nullptr; + + if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) { + GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE); + gst_context_set_gl_display(displayContext, player->gstGLDisplay()); + return displayContext; } -#if GLIB_CHECK_VERSION(2, 31, 0) - g_mutex_clear(m_bufferMutex); - delete m_bufferMutex; + if (!g_strcmp0(contextType, "gst.gl.app_context")) { + GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE); + GstStructure* structure = gst_context_writable_structure(appContext); +#if GST_CHECK_VERSION(1, 11, 0) + gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, player->gstGLContext(), nullptr); #else - g_mutex_free(m_bufferMutex); + gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, player->gstGLContext(), nullptr); #endif + return appContext; + } - if (m_buffer) - gst_buffer_unref(m_buffer); - m_buffer = 0; - - m_player = 0; + return nullptr; +} - if (m_muteTimerHandler) - g_source_remove(m_muteTimerHandler); +bool MediaPlayerPrivateGStreamerBase::ensureGstGLContext() +{ + if (m_glContext) + return true; + + auto& sharedDisplay = PlatformDisplay::sharedDisplayForCompositing(); + if (!m_glDisplay) { +#if PLATFORM(X11) +#if USE(GLX) + if (is<PlatformDisplayX11>(sharedDisplay)) + m_glDisplay = GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native())); +#elif USE(EGL) + if (is<PlatformDisplayX11>(sharedDisplay)) + m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay())); +#endif +#endif - if (m_volumeTimerHandler) - g_source_remove(m_volumeTimerHandler); +#if PLATFORM(WAYLAND) + if (is<PlatformDisplayWayland>(sharedDisplay)) + m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay())); +#endif - if (m_volumeSignalHandler) { - g_signal_handler_disconnect(m_volumeElement.get(), m_volumeSignalHandler); - m_volumeSignalHandler = 0; + ASSERT(m_glDisplay); } - if (m_muteSignalHandler) { - g_signal_handler_disconnect(m_volumeElement.get(), m_muteSignalHandler); - m_muteSignalHandler = 0; - } + GLContext* webkitContext = sharedDisplay.sharingGLContext(); + // EGL and GLX are mutually exclusive, no need for ifdefs here. + GstGLPlatform glPlatform = webkitContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX; -#if USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS) - if (client()) - client()->platformLayerWillBeDestroyed(); +#if USE(OPENGL_ES_2) + GstGLAPI glAPI = GST_GL_API_GLES2; +#elif USE(OPENGL) + GstGLAPI glAPI = GST_GL_API_OPENGL; +#else + ASSERT_NOT_REACHED(); #endif + + PlatformGraphicsContext3D contextHandle = webkitContext->platformContext(); + if (!contextHandle) + return false; + + m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI); + + return true; } +#endif // USE(GSTREAMER_GL) // Returns the size of the video -IntSize MediaPlayerPrivateGStreamerBase::naturalSize() const +FloatSize MediaPlayerPrivateGStreamerBase::naturalSize() const { if (!hasVideo()) - return IntSize(); + return FloatSize(); if (!m_videoSize.isEmpty()) return m_videoSize; - GRefPtr<GstCaps> caps = currentVideoSinkCaps(); + WTF::GMutexLocker<GMutex> lock(m_sampleMutex); + if (!GST_IS_SAMPLE(m_sample.get())) + return FloatSize(); + + GstCaps* caps = gst_sample_get_caps(m_sample.get()); if (!caps) - return IntSize(); + return FloatSize(); // TODO: handle possible clean aperture data. See @@ -184,11 +488,19 @@ IntSize MediaPlayerPrivateGStreamerBase::naturalSize() const int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride; IntSize originalSize; GstVideoFormat format; - if (!getVideoSizeAndFormatFromCaps(caps.get(), originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride)) - return IntSize(); + if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride)) + return FloatSize(); + +#if USE(TEXTURE_MAPPER_GL) + // When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height. + if (m_renderingCanBeAccelerated) { + if (m_videoSourceOrientation.usesWidthAsHeight()) + originalSize = originalSize.transposedSize(); + } +#endif - LOG_MEDIA_MESSAGE("Original video size: %dx%d", originalSize.width(), originalSize.height()); - LOG_MEDIA_MESSAGE("Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator); + GST_DEBUG("Original video size: %dx%d", originalSize.width(), originalSize.height()); + GST_DEBUG("Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator); // Calculate DAR based on PAR and video size. int displayWidth = originalSize.width() * pixelAspectRatioNumerator; @@ -202,21 +514,21 @@ IntSize MediaPlayerPrivateGStreamerBase::naturalSize() const // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function. guint64 width = 0, height = 0; if (!(originalSize.height() % displayHeight)) { - LOG_MEDIA_MESSAGE("Keeping video original height"); + GST_DEBUG("Keeping video original height"); width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight); height = static_cast<guint64>(originalSize.height()); } else if (!(originalSize.width() % displayWidth)) { - LOG_MEDIA_MESSAGE("Keeping video original width"); + GST_DEBUG("Keeping video original width"); height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth); width = static_cast<guint64>(originalSize.width()); } else { - LOG_MEDIA_MESSAGE("Approximating while keeping original video height"); + GST_DEBUG("Approximating while keeping original video height"); width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight); height = static_cast<guint64>(originalSize.height()); } - LOG_MEDIA_MESSAGE("Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height); - m_videoSize = IntSize(static_cast<int>(width), static_cast<int>(height)); + GST_DEBUG("Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height); + m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height)); return m_videoSize; } @@ -225,7 +537,7 @@ void MediaPlayerPrivateGStreamerBase::setVolume(float volume) if (!m_volumeElement) return; - LOG_MEDIA_MESSAGE("Setting volume: %f", volume); + GST_DEBUG("Setting volume: %f", volume); gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC, static_cast<double>(volume)); } @@ -240,8 +552,6 @@ float MediaPlayerPrivateGStreamerBase::volume() const void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange() { - m_volumeTimerHandler = 0; - if (!m_player || !m_volumeElement) return; double volume; @@ -253,11 +563,12 @@ void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange() m_player->volumeChanged(static_cast<float>(volume)); } -void MediaPlayerPrivateGStreamerBase::volumeChanged() +void MediaPlayerPrivateGStreamerBase::volumeChangedCallback(MediaPlayerPrivateGStreamerBase* player) { - if (m_volumeTimerHandler) - g_source_remove(m_volumeTimerHandler); - m_volumeTimerHandler = g_idle_add_full(G_PRIORITY_DEFAULT, reinterpret_cast<GSourceFunc>(mediaPlayerPrivateVolumeChangeTimeoutCallback), this, 0); + // This is called when m_volumeElement receives the notify::volume signal. + GST_DEBUG("Volume changed to: %f", player->volume()); + + player->m_notifier->notify(MainThreadNotification::VolumeChanged, [player] { player->notifyPlayerOfVolumeChange(); }); } MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const @@ -280,7 +591,7 @@ void MediaPlayerPrivateGStreamerBase::setMuted(bool muted) if (!m_volumeElement) return; - g_object_set(m_volumeElement.get(), "mute", muted, NULL); + g_object_set(m_volumeElement.get(), "mute", muted, nullptr); } bool MediaPlayerPrivateGStreamerBase::muted() const @@ -289,145 +600,456 @@ bool MediaPlayerPrivateGStreamerBase::muted() const return false; bool muted; - g_object_get(m_volumeElement.get(), "mute", &muted, NULL); + g_object_get(m_volumeElement.get(), "mute", &muted, nullptr); return muted; } void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute() { - m_muteTimerHandler = 0; - if (!m_player || !m_volumeElement) return; gboolean muted; - g_object_get(m_volumeElement.get(), "mute", &muted, NULL); + g_object_get(m_volumeElement.get(), "mute", &muted, nullptr); m_player->muteChanged(static_cast<bool>(muted)); } -void MediaPlayerPrivateGStreamerBase::muteChanged() +void MediaPlayerPrivateGStreamerBase::muteChangedCallback(MediaPlayerPrivateGStreamerBase* player) { - if (m_muteTimerHandler) - g_source_remove(m_muteTimerHandler); - m_muteTimerHandler = g_idle_add_full(G_PRIORITY_DEFAULT, reinterpret_cast<GSourceFunc>(mediaPlayerPrivateMuteChangeTimeoutCallback), this, 0); + // This is called when m_volumeElement receives the notify::mute signal. + player->m_notifier->notify(MainThreadNotification::MuteChanged, [player] { player->notifyPlayerOfMute(); }); } - -#if USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS) -PassRefPtr<BitmapTexture> MediaPlayerPrivateGStreamerBase::updateTexture(TextureMapper* textureMapper) +void MediaPlayerPrivateGStreamerBase::acceleratedRenderingStateChanged() { - WTF::GMutexLocker lock(m_bufferMutex); - if (!m_buffer) - return nullptr; - - GRefPtr<GstCaps> caps = currentVideoSinkCaps(); - if (!caps) - return nullptr; - - GstVideoInfo videoInfo; - gst_video_info_init(&videoInfo); - if (!gst_video_info_from_caps(&videoInfo, caps.get())) - return nullptr; + m_renderingCanBeAccelerated = m_player && m_player->client().mediaPlayerAcceleratedCompositingEnabled() && m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player); +} - IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)); - RefPtr<BitmapTexture> texture = textureMapper->acquireTextureFromPool(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag); +#if USE(TEXTURE_MAPPER_GL) +void MediaPlayerPrivateGStreamerBase::updateTexture(BitmapTextureGL& texture, GstVideoInfo& videoInfo) +{ + GstBuffer* buffer = gst_sample_get_buffer(m_sample.get()); -#if GST_CHECK_VERSION(1, 1, 0) GstVideoGLTextureUploadMeta* meta; - if ((meta = gst_buffer_get_video_gl_texture_upload_meta(m_buffer))) { + if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) { if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture. - const BitmapTextureGL* textureGL = static_cast<const BitmapTextureGL*>(texture.get()); - guint ids[4] = { textureGL->id(), 0, 0, 0 }; + guint ids[4] = { texture.id(), 0, 0, 0 }; if (gst_video_gl_texture_upload_meta_upload(meta, ids)) - return texture; + return; } } -#endif // Right now the TextureMapper only supports chromas with one plane ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1); GstVideoFrame videoFrame; - if (!gst_video_frame_map(&videoFrame, &videoInfo, m_buffer, GST_MAP_READ)) - return nullptr; + if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, GST_MAP_READ)) + return; int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0); const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0); - texture->updateContents(srcData, WebCore::IntRect(WebCore::IntPoint(0, 0), size), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData); + texture.updateContents(srcData, WebCore::IntRect(0, 0, GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData); gst_video_frame_unmap(&videoFrame); - - return texture; } #endif -void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstBuffer* buffer) +#if USE(COORDINATED_GRAPHICS_THREADED) +void MediaPlayerPrivateGStreamerBase::pushTextureToCompositor() { - g_return_if_fail(GST_IS_BUFFER(buffer)); +#if !USE(GSTREAMER_GL) + class ConditionNotifier { + public: + ConditionNotifier(Lock& lock, Condition& condition) + : m_locker(lock), m_condition(condition) + { + } + ~ConditionNotifier() + { + m_condition.notifyOne(); + } + private: + LockHolder m_locker; + Condition& m_condition; + }; + ConditionNotifier notifier(m_drawMutex, m_drawCondition); +#endif - { - WTF::GMutexLocker lock(m_bufferMutex); - gst_buffer_replace(&m_buffer, buffer); + WTF::GMutexLocker<GMutex> lock(m_sampleMutex); + if (!GST_IS_SAMPLE(m_sample.get())) + return; + + LockHolder holder(m_platformLayerProxy->lock()); + + if (!m_platformLayerProxy->isActive()) { + // Consume the buffer (so it gets eventually unreffed) but keep the rest of the info. + const GstStructure* info = gst_sample_get_info(m_sample.get()); + GstStructure* infoCopy = nullptr; + if (info) + infoCopy = gst_structure_copy(info); + m_sample = adoptGRef(gst_sample_new(nullptr, gst_sample_get_caps(m_sample.get()), + gst_sample_get_segment(m_sample.get()), infoCopy)); + return; } -#if USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS) - if (supportsAcceleratedRendering() && m_player->mediaPlayerClient()->mediaPlayerRenderingCanBeAccelerated(m_player) && client()) { +#if USE(GSTREAMER_GL) + std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation)); + if (UNLIKELY(!frameHolder->isValid())) + return; + + std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(frameHolder->textureID(), frameHolder->size(), frameHolder->flags()); + layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder)); + m_platformLayerProxy->pushNextBuffer(WTFMove(layerBuffer)); +#else + GstVideoInfo videoInfo; + if (UNLIKELY(!getSampleVideoInfo(m_sample.get(), videoInfo))) + return; + + IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)); + std::unique_ptr<TextureMapperPlatformLayerBuffer> buffer = m_platformLayerProxy->getAvailableBuffer(size, GraphicsContext3D::DONT_CARE); + if (UNLIKELY(!buffer)) { + if (UNLIKELY(!m_context3D)) + m_context3D = GraphicsContext3D::create(GraphicsContext3DAttributes(), nullptr, GraphicsContext3D::RenderToCurrentGLContext); + + auto texture = BitmapTextureGL::create(*m_context3D); + texture->reset(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag); + buffer = std::make_unique<TextureMapperPlatformLayerBuffer>(WTFMove(texture)); + } + updateTexture(buffer->textureGL(), videoInfo); + buffer->setExtraFlags(texMapFlagFromOrientation(m_videoSourceOrientation) | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0)); + m_platformLayerProxy->pushNextBuffer(WTFMove(buffer)); +#endif +} +#endif + +void MediaPlayerPrivateGStreamerBase::repaint() +{ + ASSERT(m_sample); + ASSERT(isMainThread()); + +#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS) + if (m_renderingCanBeAccelerated && client()) { client()->setPlatformLayerNeedsDisplay(); +#if USE(GSTREAMER_GL) + LockHolder lock(m_drawMutex); + m_drawCondition.notifyOne(); +#endif return; } #endif m_player->repaint(); + +#if USE(GSTREAMER_GL) || USE(COORDINATED_GRAPHICS_THREADED) + LockHolder lock(m_drawMutex); + m_drawCondition.notifyOne(); +#endif } -void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size) +void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstSample* sample) { - m_size = size; + bool triggerResize; + { + WTF::GMutexLocker<GMutex> lock(m_sampleMutex); + triggerResize = !m_sample; + m_sample = sample; + } + + if (triggerResize) { + GST_DEBUG("First sample reached the sink, triggering video dimensions update"); + m_notifier->notify(MainThreadNotification::SizeChanged, [this] { m_player->sizeChanged(); }); + } + +#if USE(COORDINATED_GRAPHICS_THREADED) + if (!m_renderingCanBeAccelerated) { + LockHolder locker(m_drawMutex); + m_drawTimer.startOneShot(0); + m_drawCondition.wait(m_drawMutex); + return; + } + +#if USE(GSTREAMER_GL) + pushTextureToCompositor(); +#else + { + LockHolder lock(m_drawMutex); + if (!m_platformLayerProxy->scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); })) + return; + m_drawCondition.wait(m_drawMutex); + } +#endif + return; +#else +#if USE(GSTREAMER_GL) + { + ASSERT(!isMainThread()); + + LockHolder locker(m_drawMutex); + m_drawTimer.startOneShot(0); + m_drawCondition.wait(m_drawMutex); + } +#else + repaint(); +#endif +#endif } -void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext* context, const IntRect& rect) +void MediaPlayerPrivateGStreamerBase::repaintCallback(MediaPlayerPrivateGStreamerBase* player, GstSample* sample) { -#if USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS) - if (client()) - return; + player->triggerRepaint(sample); +} + +void MediaPlayerPrivateGStreamerBase::cancelRepaint() +{ +#if USE(TEXTURE_MAPPER_GL) || USE(COORDINATED_GRAPHICS_THREADED) + m_drawTimer.stop(); + LockHolder locker(m_drawMutex); + m_drawCondition.notifyOne(); #endif +} + +void MediaPlayerPrivateGStreamerBase::repaintCancelledCallback(MediaPlayerPrivateGStreamerBase* player) +{ + player->cancelRepaint(); +} + +#if USE(GSTREAMER_GL) +GstFlowReturn MediaPlayerPrivateGStreamerBase::newSampleCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player) +{ + GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink))); + player->triggerRepaint(sample.get()); + return GST_FLOW_OK; +} + +GstFlowReturn MediaPlayerPrivateGStreamerBase::newPrerollCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player) +{ + GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_preroll(GST_APP_SINK(sink))); + player->triggerRepaint(sample.get()); + return GST_FLOW_OK; +} +#endif + +void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size) +{ + m_size = size; +} - if (context->paintingDisabled()) +void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext& context, const FloatRect& rect) +{ + if (context.paintingDisabled()) return; if (!m_player->visible()) return; - WTF::GMutexLocker lock(m_bufferMutex); - if (!m_buffer) + WTF::GMutexLocker<GMutex> lock(m_sampleMutex); + if (!GST_IS_SAMPLE(m_sample.get())) return; - GRefPtr<GstCaps> caps = currentVideoSinkCaps(); - if (!caps) - return; + ImagePaintingOptions paintingOptions(CompositeCopy); + if (m_renderingCanBeAccelerated) + paintingOptions.m_orientationDescription.setImageOrientationEnum(m_videoSourceOrientation); - RefPtr<ImageGStreamer> gstImage = ImageGStreamer::createImage(m_buffer, caps.get()); + RefPtr<ImageGStreamer> gstImage = ImageGStreamer::createImage(m_sample.get()); if (!gstImage) return; - context->drawImage(reinterpret_cast<Image*>(gstImage->image().get()), ColorSpaceSRGB, - rect, gstImage->rect(), CompositeCopy, ImageOrientationDescription(), false); + if (Image* image = reinterpret_cast<Image*>(gstImage->image().get())) + context.drawImage(*image, rect, gstImage->rect(), paintingOptions); } -#if USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS) -void MediaPlayerPrivateGStreamerBase::paintToTextureMapper(TextureMapper* textureMapper, const FloatRect& targetRect, const TransformationMatrix& matrix, float opacity) +#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS) +void MediaPlayerPrivateGStreamerBase::paintToTextureMapper(TextureMapper& textureMapper, const FloatRect& targetRect, const TransformationMatrix& matrix, float opacity) { - if (textureMapper->accelerationMode() != TextureMapper::OpenGLMode) + if (!m_player->visible()) return; - if (!m_player->visible()) + if (m_usingFallbackVideoSink) { + RefPtr<BitmapTexture> texture; + IntSize size; + TextureMapperGL::Flags flags; + { + WTF::GMutexLocker<GMutex> lock(m_sampleMutex); + + GstVideoInfo videoInfo; + if (UNLIKELY(!getSampleVideoInfo(m_sample.get(), videoInfo))) + return; + + size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)); + flags = texMapFlagFromOrientation(m_videoSourceOrientation) | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0); + texture = textureMapper.acquireTextureFromPool(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag); + updateTexture(static_cast<BitmapTextureGL&>(*texture), videoInfo); + } + TextureMapperGL& texmapGL = reinterpret_cast<TextureMapperGL&>(textureMapper); + BitmapTextureGL* textureGL = static_cast<BitmapTextureGL*>(texture.get()); + texmapGL.drawTexture(textureGL->id(), flags, textureGL->size(), targetRect, matrix, opacity); return; + } + +#if USE(GSTREAMER_GL) + WTF::GMutexLocker<GMutex> lock(m_sampleMutex); + + GstVideoInfo videoInfo; + if (!getSampleVideoInfo(m_sample.get(), videoInfo)) + return; + + GstBuffer* buffer = gst_sample_get_buffer(m_sample.get()); + GstVideoFrame videoFrame; + if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL))) + return; + + unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]); + TextureMapperGL::Flags flags = texMapFlagFromOrientation(m_videoSourceOrientation) | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0); + + IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)); + TextureMapperGL& textureMapperGL = reinterpret_cast<TextureMapperGL&>(textureMapper); + textureMapperGL.drawTexture(textureID, flags, size, targetRect, matrix, opacity); + gst_video_frame_unmap(&videoFrame); +#endif +} +#endif + +#if USE(GSTREAMER_GL) +#if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS) +// This should be called with the sample mutex locked. +GLContext* MediaPlayerPrivateGStreamerBase::prepareContextForCairoPaint(GstVideoInfo& videoInfo, IntSize& size, IntSize& rotatedSize) +{ + if (!getSampleVideoInfo(m_sample.get(), videoInfo)) + return nullptr; + + GLContext* context = PlatformDisplay::sharedDisplayForCompositing().sharingGLContext(); + context->makeContextCurrent(); + + // Thread-awareness is a huge performance hit on non-Intel drivers. + cairo_gl_device_set_thread_aware(context->cairoDevice(), FALSE); + + size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)); + rotatedSize = m_videoSourceOrientation.usesWidthAsHeight() ? size.transposedSize() : size; + + return context; +} + +// This should be called with the sample mutex locked. +bool MediaPlayerPrivateGStreamerBase::paintToCairoSurface(cairo_surface_t* outputSurface, cairo_device_t* device, GstVideoInfo& videoInfo, const IntSize& size, const IntSize& rotatedSize, bool flipY) +{ + GstBuffer* buffer = gst_sample_get_buffer(m_sample.get()); + GstVideoFrame videoFrame; + if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL))) + return false; + + unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]); + RefPtr<cairo_surface_t> surface = adoptRef(cairo_gl_surface_create_for_texture(device, CAIRO_CONTENT_COLOR_ALPHA, textureID, size.width(), size.height())); + RefPtr<cairo_t> cr = adoptRef(cairo_create(outputSurface)); + + switch (m_videoSourceOrientation) { + case DefaultImageOrientation: + break; + case OriginRightTop: + cairo_translate(cr.get(), rotatedSize.width() * 0.5, rotatedSize.height() * 0.5); + cairo_rotate(cr.get(), piOverTwoDouble); + cairo_translate(cr.get(), -rotatedSize.height() * 0.5, -rotatedSize.width() * 0.5); + break; + case OriginBottomRight: + cairo_translate(cr.get(), rotatedSize.width() * 0.5, rotatedSize.height() * 0.5); + cairo_rotate(cr.get(), piDouble); + cairo_translate(cr.get(), -rotatedSize.width() * 0.5, -rotatedSize.height() * 0.5); + break; + case OriginLeftBottom: + cairo_translate(cr.get(), rotatedSize.width() * 0.5, rotatedSize.height() * 0.5); + cairo_rotate(cr.get(), 3 * piOverTwoDouble); + cairo_translate(cr.get(), -rotatedSize.height() * 0.5, -rotatedSize.width() * 0.5); + break; + default: + ASSERT_NOT_REACHED(); + break; + } + + if (flipY) { + cairo_scale(cr.get(), 1.0f, -1.0f); + cairo_translate(cr.get(), 0.0f, -size.height()); + } + + cairo_set_source_surface(cr.get(), surface.get(), 0, 0); + cairo_set_operator(cr.get(), CAIRO_OPERATOR_SOURCE); + cairo_paint(cr.get()); + + gst_video_frame_unmap(&videoFrame); + + return true; +} +#endif // USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS) + +bool MediaPlayerPrivateGStreamerBase::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY) +{ +#if USE(GSTREAMER_GL) + UNUSED_PARAM(context); + + if (m_usingFallbackVideoSink) + return false; + + if (premultiplyAlpha) + return false; + + WTF::GMutexLocker<GMutex> lock(m_sampleMutex); + + GstVideoInfo videoInfo; + if (!getSampleVideoInfo(m_sample.get(), videoInfo)) + return false; + + GstBuffer* buffer = gst_sample_get_buffer(m_sample.get()); + GstVideoFrame videoFrame; + if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL))) + return false; + + IntSize size(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)); + if (m_videoSourceOrientation.usesWidthAsHeight()) + size = size.transposedSize(); + unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]); + + if (!m_videoTextureCopier) + m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(); + + bool copied = m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, outputTexture, outputTarget, level, internalFormat, format, type, flipY, m_videoSourceOrientation); + + gst_video_frame_unmap(&videoFrame); - RefPtr<BitmapTexture> texture = updateTexture(textureMapper); - if (texture) - textureMapper->drawTexture(*texture.get(), targetRect, matrix, opacity); + return copied; +#else + return false; +#endif } + +NativeImagePtr MediaPlayerPrivateGStreamerBase::nativeImageForCurrentTime() +{ +#if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS) + if (m_usingFallbackVideoSink) + return nullptr; + + GstVideoInfo videoInfo; + IntSize size, rotatedSize; + WTF::GMutexLocker<GMutex> lock(m_sampleMutex); + GLContext* context = prepareContextForCairoPaint(videoInfo, size, rotatedSize); + if (!context) + return nullptr; + + RefPtr<cairo_surface_t> rotatedSurface = adoptRef(cairo_gl_surface_create(context->cairoDevice(), CAIRO_CONTENT_COLOR_ALPHA, rotatedSize.width(), rotatedSize.height())); + if (!paintToCairoSurface(rotatedSurface.get(), context->cairoDevice(), videoInfo, size, rotatedSize, false)) + return nullptr; + + return rotatedSurface; +#else + return nullptr; #endif +} +#endif + +void MediaPlayerPrivateGStreamerBase::setVideoSourceOrientation(const ImageOrientation& orientation) +{ + if (m_videoSourceOrientation == orientation) + return; + + m_videoSourceOrientation = orientation; +} bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const { @@ -450,25 +1072,88 @@ MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() cons return MediaPlayer::Download; } -GRefPtr<GstCaps> MediaPlayerPrivateGStreamerBase::currentVideoSinkCaps() const +#if USE(GSTREAMER_GL) +GstElement* MediaPlayerPrivateGStreamerBase::createGLAppSink() +{ + if (!webkitGstCheckVersion(1, 8, 0)) + return nullptr; + + GstElement* appsink = gst_element_factory_make("appsink", "webkit-gl-video-sink"); + if (!appsink) + return nullptr; + + g_object_set(appsink, "enable-last-sample", FALSE, "emit-signals", TRUE, "max-buffers", 1, nullptr); + g_signal_connect(appsink, "new-sample", G_CALLBACK(newSampleCallback), this); + g_signal_connect(appsink, "new-preroll", G_CALLBACK(newPrerollCallback), this); + + return appsink; +} + +GstElement* MediaPlayerPrivateGStreamerBase::createVideoSinkGL() { - if (!m_webkitVideoSink) + // FIXME: Currently it's not possible to get the video frames and caps using this approach until + // the pipeline gets into playing state. Due to this, trying to grab a frame and painting it by some + // other mean (canvas or webgl) before playing state can result in a crash. + // This is being handled in https://bugs.webkit.org/show_bug.cgi?id=159460. + if (!webkitGstCheckVersion(1, 8, 0)) return nullptr; - GRefPtr<GstCaps> currentCaps; - g_object_get(G_OBJECT(m_webkitVideoSink.get()), "current-caps", ¤tCaps.outPtr(), NULL); - return currentCaps; + gboolean result = TRUE; + GstElement* videoSink = gst_bin_new(nullptr); + GstElement* upload = gst_element_factory_make("glupload", nullptr); + GstElement* colorconvert = gst_element_factory_make("glcolorconvert", nullptr); + GstElement* appsink = createGLAppSink(); + + if (!appsink || !upload || !colorconvert) { + GST_WARNING("Failed to create GstGL elements"); + gst_object_unref(videoSink); + + if (upload) + gst_object_unref(upload); + if (colorconvert) + gst_object_unref(colorconvert); + if (appsink) + gst_object_unref(appsink); + + return nullptr; + } + + gst_bin_add_many(GST_BIN(videoSink), upload, colorconvert, appsink, nullptr); + + GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), format = (string) { RGBA }")); + + result &= gst_element_link_pads(upload, "src", colorconvert, "sink"); + result &= gst_element_link_pads_filtered(colorconvert, "src", appsink, "sink", caps.get()); + + GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(upload, "sink")); + gst_element_add_pad(videoSink, gst_ghost_pad_new("sink", pad.get())); + + if (!result) { + GST_WARNING("Failed to link GstGL elements"); + gst_object_unref(videoSink); + videoSink = nullptr; + } + return videoSink; } +#endif GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink() { - ASSERT(initializeGStreamer()); + acceleratedRenderingStateChanged(); - GstElement* videoSink = nullptr; - m_webkitVideoSink = webkitVideoSinkNew(); +#if USE(GSTREAMER_GL) + if (m_renderingCanBeAccelerated) + m_videoSink = createVideoSinkGL(); +#endif - m_repaintHandler = g_signal_connect(m_webkitVideoSink.get(), "repaint-requested", G_CALLBACK(mediaPlayerPrivateRepaintCallback), this); + if (!m_videoSink) { + m_usingFallbackVideoSink = true; + m_videoSink = webkitVideoSinkNew(); + g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this); + g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled", G_CALLBACK(repaintCancelledCallback), this); + } + GstElement* videoSink = nullptr; m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink"); if (m_fpsSink) { g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr); @@ -477,20 +1162,19 @@ GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink() #if LOG_DISABLED g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr); #else - WTFLogChannel* channel = logChannelByName("Media"); - if (channel->state != WTFLogChannelOn) + if (!isLogChannelEnabled("Media")) g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr); #endif // LOG_DISABLED if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) { - g_object_set(m_fpsSink.get(), "video-sink", m_webkitVideoSink.get(), nullptr); + g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr); videoSink = m_fpsSink.get(); } else m_fpsSink = nullptr; } if (!m_fpsSink) - videoSink = m_webkitVideoSink.get(); + videoSink = m_videoSink.get(); ASSERT(videoSink); @@ -505,23 +1189,23 @@ void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* vo // We don't set the initial volume because we trust the sink to keep it for us. See // https://bugs.webkit.org/show_bug.cgi?id=118974 for more information. if (!m_player->platformVolumeConfigurationRequired()) { - LOG_MEDIA_MESSAGE("Setting stream volume to %f", m_player->volume()); - g_object_set(m_volumeElement.get(), "volume", m_player->volume(), NULL); + GST_DEBUG("Setting stream volume to %f", m_player->volume()); + g_object_set(m_volumeElement.get(), "volume", m_player->volume(), nullptr); } else - LOG_MEDIA_MESSAGE("Not setting stream volume, trusting system one"); + GST_DEBUG("Not setting stream volume, trusting system one"); - LOG_MEDIA_MESSAGE("Setting stream muted %d", m_player->muted()); - g_object_set(m_volumeElement.get(), "mute", m_player->muted(), NULL); + GST_DEBUG("Setting stream muted %d", m_player->muted()); + g_object_set(m_volumeElement.get(), "mute", m_player->muted(), nullptr); - m_volumeSignalHandler = g_signal_connect(m_volumeElement.get(), "notify::volume", G_CALLBACK(mediaPlayerPrivateVolumeChangedCallback), this); - m_muteSignalHandler = g_signal_connect(m_volumeElement.get(), "notify::mute", G_CALLBACK(mediaPlayerPrivateMuteChangedCallback), this); + g_signal_connect_swapped(m_volumeElement.get(), "notify::volume", G_CALLBACK(volumeChangedCallback), this); + g_signal_connect_swapped(m_volumeElement.get(), "notify::mute", G_CALLBACK(muteChangedCallback), this); } unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const { guint64 decodedFrames = 0; if (m_fpsSink) - g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, NULL); + g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, nullptr); return static_cast<unsigned>(decodedFrames); } @@ -529,7 +1213,7 @@ unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const { guint64 framesDropped = 0; if (m_fpsSink) - g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, NULL); + g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, nullptr); return static_cast<unsigned>(framesDropped); } @@ -550,13 +1234,95 @@ unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES); gint64 position = 0; - if (gst_element_query(m_webkitVideoSink.get(), query)) + if (gst_element_query(m_videoSink.get(), query)) gst_query_parse_position(query, 0, &position); gst_query_unref(query); return static_cast<unsigned>(position); } +#if ENABLE(LEGACY_ENCRYPTED_MEDIA) +void MediaPlayerPrivateGStreamerBase::needKey(RefPtr<Uint8Array> initData) +{ + if (!m_player->keyNeeded(initData.get())) + GST_INFO("no event handler for key needed"); +} + +void MediaPlayerPrivateGStreamerBase::setCDMSession(CDMSession* session) +{ + GST_DEBUG("setting CDM session to %p", session); + m_cdmSession = session; +} + +void MediaPlayerPrivateGStreamerBase::keyAdded() +{ +} + +void MediaPlayerPrivateGStreamerBase::handleProtectionEvent(GstEvent* event) +{ + if (m_handledProtectionEvents.contains(GST_EVENT_SEQNUM(event))) { + GST_DEBUG("event %u already handled", GST_EVENT_SEQNUM(event)); + m_handledProtectionEvents.remove(GST_EVENT_SEQNUM(event)); + return; + } + + const gchar* eventKeySystemId = nullptr; + GstBuffer* data = nullptr; + gst_event_parse_protection(event, &eventKeySystemId, &data, nullptr); + + GstMapInfo mapInfo; + if (!gst_buffer_map(data, &mapInfo, GST_MAP_READ)) { + GST_WARNING("cannot map %s protection data", eventKeySystemId); + return; + } + + GST_DEBUG("scheduling keyNeeded event for %s with init data size of %" G_GSIZE_FORMAT, eventKeySystemId, mapInfo.size); + GST_MEMDUMP("init datas", mapInfo.data, mapInfo.size); + RefPtr<Uint8Array> initDataArray = Uint8Array::create(mapInfo.data, mapInfo.size); + needKey(initDataArray); + gst_buffer_unmap(data, &mapInfo); +} + +void MediaPlayerPrivateGStreamerBase::receivedGenerateKeyRequest(const String& keySystem) +{ + GST_DEBUG("received generate key request for %s", keySystem.utf8().data()); + m_lastGenerateKeyRequestKeySystemUuid = keySystemIdToUuid(keySystem); + m_protectionCondition.notifyOne(); +} + +static AtomicString keySystemIdToUuid(const AtomicString& id) +{ + if (equalIgnoringASCIICase(id, CLEAR_KEY_PROTECTION_SYSTEM_ID)) + return AtomicString(CLEAR_KEY_PROTECTION_SYSTEM_UUID); + + return { }; +} + +std::unique_ptr<CDMSession> MediaPlayerPrivateGStreamerBase::createSession(const String& keySystem, CDMSessionClient*) +{ + GST_INFO("Requested CDMSession for KeySystem %s: Returning null.", keySystem.utf8().data()); + return nullptr; +} + +void MediaPlayerPrivateGStreamerBase::dispatchDecryptionKey(GstBuffer* buffer) +{ + gst_element_send_event(m_pipeline.get(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB, + gst_structure_new("drm-cipher", "key", GST_TYPE_BUFFER, buffer, nullptr))); +} +#endif + +bool MediaPlayerPrivateGStreamerBase::supportsKeySystem(const String& keySystem, const String& mimeType) +{ + GST_INFO("Checking for KeySystem support with %s and type %s: false.", keySystem.utf8().data(), mimeType.utf8().data()); + return false; +} + +MediaPlayer::SupportsType MediaPlayerPrivateGStreamerBase::extendedSupportsType(const MediaEngineSupportParameters& parameters, MediaPlayer::SupportsType result) +{ + UNUSED_PARAM(parameters); + return result; +} + } #endif // USE(GSTREAMER) |