summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTor Arne Vestbø <tor.arne.vestbo@qt.io>2021-11-29 14:53:59 +0100
committerTor Arne Vestbø <tor.arne.vestbo@qt.io>2021-12-07 16:32:05 +0100
commitd9cdfeebd9eb6a067b97316daa149c7f58e1c7ec (patch)
tree9ad8ae07190f6e491397272114f0e7c4539e5c1d
parent466a4f29c20b6d797af9d67029a7329b2124b276 (diff)
downloadqtmultimedia-d9cdfeebd9eb6a067b97316daa149c7f58e1c7ec.tar.gz
Use AVPlayerItemVideoOutput to generate video frames
This fixes rendering problems on M1 based Macs. It also unifies the rendering pipeline between macOS and iOS as much as possible, and avoids an intermediate copy to an FBO, Since AVPlayerItemVideoOutput produces GL_TEXTURE_RECTANGLE textures on macOS a new QAbstractVideoBuffer handle has been added that explicitly maps to GL_TEXTURE_RECTANGLE. We use this handle type internally in QSGVideoMaterial_Texture where we know how to blit GL_TEXTURE_RECTANGLE textures. To maintain compatibility for QAbstractVideoSurface consumers who expect GL_TEXTURE_2D textures we blit the rectangle texture to an FBO returned as QAbstractVideoBuffer::GLTextureHandle. Fixes: QTBUG-89803 Done-with: Lars Knoll <lars.knoll@qt.io> Change-Id: I36d22eafb63902ecc1097e138705812ef6a8cb71 Reviewed-by: Lars Knoll <lars.knoll@qt.io> Reviewed-by: Doris Verria <doris.verria@qt.io>
-rw-r--r--src/multimedia/video/qabstractvideobuffer.cpp5
-rw-r--r--src/multimedia/video/qabstractvideobuffer.h1
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm21
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h102
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm513
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h113
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.mm261
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h5
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm140
-rw-r--r--src/plugins/avfoundation/mediaplayer/mediaplayer.pro35
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_texture.cpp147
-rw-r--r--src/qtmultimediaquicktools/qtmultimediaquicktools.qrc5
-rw-r--r--src/qtmultimediaquicktools/shaders/rectsampler.vert10
-rw-r--r--src/qtmultimediaquicktools/shaders/rectsampler_core.vert11
-rw-r--r--src/qtmultimediaquicktools/shaders/rectsampler_rgb.frag8
-rw-r--r--src/qtmultimediaquicktools/shaders/rectsampler_rgb_core.frag10
16 files changed, 514 insertions, 873 deletions
diff --git a/src/multimedia/video/qabstractvideobuffer.cpp b/src/multimedia/video/qabstractvideobuffer.cpp
index 357155e5e..428e47de6 100644
--- a/src/multimedia/video/qabstractvideobuffer.cpp
+++ b/src/multimedia/video/qabstractvideobuffer.cpp
@@ -96,12 +96,15 @@ int QAbstractVideoBufferPrivate::map(
Identifies the type of a video buffers handle.
\value NoHandle The buffer has no handle, its data can only be accessed by mapping the buffer.
- \value GLTextureHandle The handle of the buffer is an OpenGL texture ID.
+ \value GLTextureHandle The handle of the buffer is an OpenGL texture ID
+ of an undefined and platform dependent target type.
\value XvShmImageHandle The handle contains pointer to shared memory XVideo image.
\value CoreImageHandle The handle contains pointer to \macos CIImage.
\value QPixmapHandle The handle of the buffer is a QPixmap.
\value EGLImageHandle The handle of the buffer is an EGLImageKHR.
\value UserHandle Start value for user defined handle types.
+ \value GLTextureRectangleHandle The handle of the buffer is an OpenGL texture ID
+ of target type \c GL_TEXTURE_RECTANGLE.
\sa handleType()
*/
diff --git a/src/multimedia/video/qabstractvideobuffer.h b/src/multimedia/video/qabstractvideobuffer.h
index 6a3714dcb..adb8a07c8 100644
--- a/src/multimedia/video/qabstractvideobuffer.h
+++ b/src/multimedia/video/qabstractvideobuffer.h
@@ -64,6 +64,7 @@ public:
CoreImageHandle,
QPixmapHandle,
EGLImageHandle,
+ GLTextureRectangleHandle,
UserHandle = 1000
};
diff --git a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
index ea54fe6be..ae2234764 100644
--- a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
+++ b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
@@ -110,6 +110,12 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext
self->m_session = session;
self->m_bufferIsLikelyToKeepUp = FALSE;
+
+ m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:nil];
+ [m_playerLayer retain];
+ m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
+ m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
+
return self;
}
@@ -172,10 +178,6 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext
[m_player release];
m_player = 0;
}
- if (m_playerLayer) {
- [m_playerLayer release];
- m_playerLayer = 0;
- }
}
- (void) prepareToPlayAsset:(AVURLAsset *)asset
@@ -260,14 +262,8 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext
[m_player setMuted:m_session->isMuted()];
}
- //Create a new player layer if we don't have one already
- if (!m_playerLayer)
- {
- m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:m_player];
- [m_playerLayer retain];
- m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
- m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
- }
+ //Assign the output layer to the new player
+ m_playerLayer.player = m_player;
//Observe the AVPlayer "currentItem" property to find out when any
//AVPlayer replaceCurrentItemWithPlayerItem: replacement will/did
@@ -413,6 +409,7 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext
}
[m_mimeType release];
+ [m_playerLayer release];
[super dealloc];
}
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h
index 28b47ac57..886722744 100644
--- a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h
+++ b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h
@@ -43,22 +43,48 @@
#include <QtCore/QObject>
#include <QtGui/QImage>
#include <QtGui/QOpenGLContext>
+#include <QtGui/QOpenGLTextureBlitter>
#include <QtCore/QSize>
-#import "Metal/Metal.h"
-#import "MetalKit/MetalKit.h"
-
-@class CARenderer;
@class AVPlayerLayer;
+@class AVPlayerItemVideoOutput;
QT_BEGIN_NAMESPACE
+class QOpenGLContext;
class QOpenGLFramebufferObject;
class QOpenGLShaderProgram;
-class QWindow;
-class QOpenGLContext;
+class QOffscreenSurface;
class QAbstractVideoSurface;
+typedef struct __CVBuffer *CVBufferRef;
+typedef CVBufferRef CVImageBufferRef;
+typedef CVImageBufferRef CVPixelBufferRef;
+
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ typedef struct __CVOpenGLESTextureCache *CVOpenGLESTextureCacheRef;
+ typedef CVImageBufferRef CVOpenGLESTextureRef;
+ // helpers to avoid boring if def
+ typedef CVOpenGLESTextureCacheRef CVOGLTextureCacheRef;
+ typedef CVOpenGLESTextureRef CVOGLTextureRef;
+ #define CVOGLTextureGetTarget CVOpenGLESTextureGetTarget
+ #define CVOGLTextureGetName CVOpenGLESTextureGetName
+ #define CVOGLTextureCacheCreate CVOpenGLESTextureCacheCreate
+ #define CVOGLTextureCacheCreateTextureFromImage CVOpenGLESTextureCacheCreateTextureFromImage
+ #define CVOGLTextureCacheFlush CVOpenGLESTextureCacheFlush
+#else
+ typedef struct __CVOpenGLTextureCache *CVOpenGLTextureCacheRef;
+ typedef CVImageBufferRef CVOpenGLTextureRef;
+ // helpers to avoid boring if def
+ typedef CVOpenGLTextureCacheRef CVOGLTextureCacheRef;
+ typedef CVOpenGLTextureRef CVOGLTextureRef;
+ #define CVOGLTextureGetTarget CVOpenGLTextureGetTarget
+ #define CVOGLTextureGetName CVOpenGLTextureGetName
+ #define CVOGLTextureCacheCreate CVOpenGLTextureCacheCreate
+ #define CVOGLTextureCacheCreateTextureFromImage CVOpenGLTextureCacheCreateTextureFromImage
+ #define CVOGLTextureCacheFlush CVOpenGLTextureCacheFlush
+#endif
+
class AVFVideoFrameRenderer : public QObject
{
public:
@@ -66,57 +92,31 @@ public:
virtual ~AVFVideoFrameRenderer();
- GLuint renderLayerToTexture(AVPlayerLayer *layer);
- QImage renderLayerToImage(AVPlayerLayer *layer);
-
- static GLuint createGLTexture(CGLContextObj cglContextObj, CGLPixelFormatObj cglPixelFormtObj,
- CVOpenGLTextureCacheRef cvglTextureCache,
- CVPixelBufferRef cvPixelBufferRef,
- CVOpenGLTextureRef cvOpenGLTextureRef);
+ void setPlayerLayer(AVPlayerLayer *layer);
- static id<MTLTexture> createMetalTexture(id<MTLDevice> mtlDevice,
- CVMetalTextureCacheRef cvMetalTextureCacheRef,
- CVPixelBufferRef cvPixelBufferRef,
- MTLPixelFormat pixelFormat, size_t width, size_t height,
- CVMetalTextureRef cvMetalTextureRef);
+ CVOGLTextureRef renderLayerToTexture(AVPlayerLayer *layer, QSize *size);
+#ifdef Q_OS_MACOS
+ GLuint renderLayerToFBO(AVPlayerLayer *layer, QSize *size);
+#endif
+ QImage renderLayerToImage(AVPlayerLayer *layer, QSize *size);
private:
- QOpenGLFramebufferObject* initRenderer(AVPlayerLayer *layer);
- void renderLayerToFBO(AVPlayerLayer *layer, QOpenGLFramebufferObject *fbo);
- void renderLayerToFBOCoreOpenGL(AVPlayerLayer *layer, QOpenGLFramebufferObject *fbo);
+ void initRenderer();
+ CVPixelBufferRef copyPixelBufferFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height);
+ CVOGLTextureRef createCacheTextureFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height);
- CARenderer *m_videoLayerRenderer;
- QAbstractVideoSurface *m_surface;
- QOpenGLFramebufferObject *m_fbo[2];
- QOpenGLShaderProgram *m_shader = nullptr;
- QWindow *m_offscreenSurface;
QOpenGLContext *m_glContext;
- QSize m_targetSize;
-
- bool m_useCoreProfile = false;
-
- // Shared pixel buffer
- CVPixelBufferRef m_CVPixelBuffer;
-
- // OpenGL Texture
- CVOpenGLTextureCacheRef m_CVGLTextureCache;
- CVOpenGLTextureRef m_CVGLTexture;
- CGLPixelFormatObj m_CGLPixelFormat;
- GLuint m_textureName = 0;
-
- // Metal Texture
- CVMetalTextureRef m_CVMTLTexture;
- CVMetalTextureCacheRef m_CVMTLTextureCache;
- id<MTLDevice> m_metalDevice = nil;
- id<MTLTexture> m_metalTexture = nil;
-
- NSOpenGLContext *m_NSGLContext = nullptr;
-
- GLuint m_quadVao = 0;
- GLuint m_quadVbos[2];
-
- uint m_currentBuffer;
+ QOffscreenSurface *m_offscreenSurface;
+ QAbstractVideoSurface *m_surface;
+ CVOGLTextureCacheRef m_textureCache;
+ AVPlayerItemVideoOutput* m_videoOutput;
bool m_isContextShared;
+
+#ifdef Q_OS_MACOS
+ QOpenGLFramebufferObject *m_fbo[2];
+ uint m_currentFBO;
+ QOpenGLTextureBlitter m_blitter;
+#endif
};
QT_END_NAMESPACE
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm
index 4c7364a11..766764ee3 100644
--- a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm
+++ b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm
@@ -41,14 +41,20 @@
#include <QtMultimedia/qabstractvideosurface.h>
#include <QtGui/QOpenGLFramebufferObject>
-#include <QtGui/QWindow>
-#include <QOpenGLShaderProgram>
-#include <QtPlatformHeaders/QCocoaNativeContext>
+#include <QtGui/QOpenGLShaderProgram>
+#include <QtGui/QOffscreenSurface>
+
+#include <QtCore/private/qcore_mac_p.h>
#ifdef QT_DEBUG_AVF
#include <QtCore/qdebug.h>
#endif
+#ifdef Q_OS_MACOS
+#import <AppKit/AppKit.h>
+#include <CoreVideo/CVOpenGLTextureCache.h>
+#endif
+
#import <CoreVideo/CVBase.h>
#import <AVFoundation/AVFoundation.h>
@@ -56,15 +62,23 @@ QT_USE_NAMESPACE
AVFVideoFrameRenderer::AVFVideoFrameRenderer(QAbstractVideoSurface *surface, QObject *parent)
: QObject(parent)
- , m_videoLayerRenderer(nullptr)
- , m_surface(surface)
- , m_offscreenSurface(nullptr)
, m_glContext(nullptr)
- , m_currentBuffer(1)
+ , m_offscreenSurface(nullptr)
+ , m_surface(surface)
+ , m_textureCache(nullptr)
+ , m_videoOutput(nullptr)
, m_isContextShared(true)
{
+ m_videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:@{
+ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
+ (NSString *)kCVPixelBufferOpenGLCompatibilityKey: @YES
+ }];
+ [m_videoOutput setDelegate:nil queue:nil];
+
+#ifdef Q_OS_MACOS
m_fbo[0] = nullptr;
m_fbo[1] = nullptr;
+#endif
}
AVFVideoFrameRenderer::~AVFVideoFrameRenderer()
@@ -73,355 +87,244 @@ AVFVideoFrameRenderer::~AVFVideoFrameRenderer()
qDebug() << Q_FUNC_INFO;
#endif
- [m_videoLayerRenderer release];
- delete m_fbo[0];
- delete m_fbo[1];
+ [m_videoOutput release];
+ if (m_textureCache)
+ CFRelease(m_textureCache);
delete m_offscreenSurface;
delete m_glContext;
- if (m_useCoreProfile) {
- glDeleteVertexArrays(1, &m_quadVao);
- glDeleteBuffers(2, m_quadVbos);
- delete m_shader;
- }
+#ifdef Q_OS_MACOS
+ delete m_fbo[0];
+ delete m_fbo[1];
+#endif
}
-GLuint AVFVideoFrameRenderer::renderLayerToTexture(AVPlayerLayer *layer)
+#ifdef Q_OS_MACOS
+GLuint AVFVideoFrameRenderer::renderLayerToFBO(AVPlayerLayer *layer, QSize *size)
{
- //Is layer valid
- if (!layer)
+ QCFType<CVOGLTextureRef> texture = renderLayerToTexture(layer, size);
+ if (!texture)
return 0;
- //If the glContext isn't shared, it doesn't make sense to return a texture for us
- if (m_offscreenSurface && !m_isContextShared)
- return 0;
+ Q_ASSERT(size);
- QOpenGLFramebufferObject *fbo = initRenderer(layer);
+ // Do we have FBO's already?
+ if ((!m_fbo[0] && !m_fbo[0]) || (m_fbo[0]->size() != *size)) {
+ delete m_fbo[0];
+ delete m_fbo[1];
+ m_fbo[0] = new QOpenGLFramebufferObject(*size);
+ m_fbo[1] = new QOpenGLFramebufferObject(*size);
+ }
- if (!fbo)
- return 0;
+ // Switch buffer target
+ m_currentFBO = !m_currentFBO;
+ QOpenGLFramebufferObject *fbo = m_fbo[m_currentFBO];
- renderLayerToFBO(layer, fbo);
- if (m_glContext)
- m_glContext->doneCurrent();
+ if (!fbo || !fbo->bind())
+ return 0;
- return fbo->texture();
-}
+ glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+ glClear(GL_COLOR_BUFFER_BIT);
-QImage AVFVideoFrameRenderer::renderLayerToImage(AVPlayerLayer *layer)
-{
- //Is layer valid
- if (!layer) {
- return QImage();
- }
+ glViewport(0, 0, size->width(), size->height());
- QOpenGLFramebufferObject *fbo = initRenderer(layer);
+ if (!m_blitter.isCreated())
+ m_blitter.create();
- if (!fbo)
- return QImage();
+ m_blitter.bind(GL_TEXTURE_RECTANGLE);
+ m_blitter.blit(CVOpenGLTextureGetName(texture), QMatrix4x4(), QMatrix3x3());
+ m_blitter.release();
- renderLayerToFBO(layer, fbo);
- QImage fboImage = fbo->toImage();
- if (m_glContext)
- m_glContext->doneCurrent();
+ glFinish();
- return fboImage;
+ fbo->release();
+ return fbo->texture();
}
+#endif
-QOpenGLFramebufferObject *AVFVideoFrameRenderer::initRenderer(AVPlayerLayer *layer)
+CVOGLTextureRef AVFVideoFrameRenderer::renderLayerToTexture(AVPlayerLayer *layer, QSize *size)
{
+ initRenderer();
- //Get size from AVPlayerLayer
- m_targetSize = QSize(layer.bounds.size.width, layer.bounds.size.height);
-
- QOpenGLContext *shareContext = !m_glContext && m_surface
- ? qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>())
- : nullptr;
+ // If the glContext isn't shared, it doesn't make sense to return a texture for us
+ if (!m_isContextShared)
+ return nullptr;
- //Make sure we have an OpenGL context to make current
- if ((shareContext && shareContext != QOpenGLContext::currentContext())
- || (!QOpenGLContext::currentContext() && !m_glContext)) {
-
- //Create Hidden QWindow surface to create context in this thread
- delete m_offscreenSurface;
- m_offscreenSurface = new QWindow();
- m_offscreenSurface->setSurfaceType(QWindow::OpenGLSurface);
- //Needs geometry to be a valid surface, but size is not important
- m_offscreenSurface->setGeometry(0, 0, 1, 1);
- m_offscreenSurface->create();
-
- delete m_glContext;
- m_glContext = new QOpenGLContext();
- m_glContext->setFormat(m_offscreenSurface->requestedFormat());
+ size_t width = 0, height = 0;
+ auto texture = createCacheTextureFromLayer(layer, width, height);
+ if (size)
+ *size = QSize(width, height);
+ return texture;
+}
- if (shareContext) {
- m_glContext->setShareContext(shareContext);
- m_isContextShared = true;
- } else {
+CVPixelBufferRef AVFVideoFrameRenderer::copyPixelBufferFromLayer(AVPlayerLayer *layer,
+ size_t& width, size_t& height)
+{
+ //Is layer valid
+ if (!layer) {
#ifdef QT_DEBUG_AVF
- qWarning("failed to get Render Thread context");
+ qWarning("copyPixelBufferFromLayer: invalid layer");
#endif
- m_isContextShared = false;
- }
- if (!m_glContext->create()) {
- qWarning("failed to create QOpenGLContext");
- return nullptr;
- }
-
- // CARenderer must be re-created with different current context, so release it now.
- // See lines below where m_videoLayerRenderer is constructed.
- if (m_videoLayerRenderer) {
- [m_videoLayerRenderer release];
- m_videoLayerRenderer = nullptr;
- }
-
- if (m_useCoreProfile) {
- glDeleteVertexArrays(1, &m_quadVao);
- glDeleteBuffers(2, m_quadVbos);
- delete m_shader;
- m_shader = nullptr;
- }
+ return nullptr;
}
- //Need current context
- if (m_glContext)
- m_glContext->makeCurrent(m_offscreenSurface);
+ AVPlayerItem *item = layer.player.currentItem;
+ if (![item.outputs containsObject:m_videoOutput])
+ [item addOutput:m_videoOutput];
- if (!m_metalDevice)
- m_metalDevice = MTLCreateSystemDefaultDevice();
+ CFTimeInterval currentCAFrameTime = CACurrentMediaTime();
+ CMTime currentCMFrameTime = [m_videoOutput itemTimeForHostTime:currentCAFrameTime];
- if (@available(macOS 10.13, *)) {
- m_useCoreProfile = m_metalDevice && (QOpenGLContext::currentContext()->format().profile() ==
- QSurfaceFormat::CoreProfile);
- } else {
- m_useCoreProfile = false;
- }
-
- // Create the CARenderer if needed for no Core OpenGL
- if (!m_videoLayerRenderer) {
- if (!m_useCoreProfile) {
- m_videoLayerRenderer = [CARenderer rendererWithCGLContext: CGLGetCurrentContext()
- options: nil];
- [m_videoLayerRenderer retain];
- } else if (@available(macOS 10.13, *)) {
- // This is always true when m_useCoreProfile is true, but the compiler wants the check
- // anyway
- // Setup Core OpenGL shader, VAO, VBOs and metal renderer
- m_shader = new QOpenGLShaderProgram();
- m_shader->create();
- if (!m_shader->addShaderFromSourceCode(QOpenGLShader::Vertex, R"(#version 150 core
- in vec2 qt_VertexPosition;
- in vec2 qt_VertexTexCoord;
- out vec2 qt_TexCoord;
- void main()
- {
- qt_TexCoord = qt_VertexTexCoord;
- gl_Position = vec4(qt_VertexPosition, 0.0f, 1.0f);
- })")) {
- qCritical() << "Vertex shader compilation failed" << m_shader->log();
- }
- if (!m_shader->addShaderFromSourceCode(QOpenGLShader::Fragment, R"(#version 150 core
- in vec2 qt_TexCoord;
- out vec4 fragColor;
- uniform sampler2DRect videoFrame;
- void main(void)
- {
- ivec2 textureDim = textureSize(videoFrame);
- fragColor = texture(videoFrame, qt_TexCoord * textureDim);
- })")) {
- qCritical() << "Fragment shader compilation failed" << m_shader->log();
- }
-
- // Setup quad where the video frame will be attached
- GLfloat vertices[] = {
- -1.0f, -1.0f,
- 1.0f, -1.0f,
- -1.0f, 1.0f,
- 1.0f, 1.0f,
- };
-
- GLfloat uvs[] = {
- 0.0f, 0.0f,
- 1.0f, 0.0f,
- 0.0f, 1.0f,
- 1.0f, 1.0f,
- };
-
- glGenVertexArrays(1, &m_quadVao);
- glBindVertexArray(m_quadVao);
-
- // Create vertex buffer objects for vertices
- glGenBuffers(2, m_quadVbos);
-
- // Setup vertices
- glBindBuffer(GL_ARRAY_BUFFER, m_quadVbos[0]);
- glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
- glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(float), nullptr);
- glEnableVertexAttribArray(0);
-
- // Setup uvs
- glBindBuffer(GL_ARRAY_BUFFER, m_quadVbos[1]);
- glBufferData(GL_ARRAY_BUFFER, sizeof(uvs), uvs, GL_STATIC_DRAW);
- glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(float), nullptr);
- glEnableVertexAttribArray(1);
-
- glBindVertexArray(0);
-
- // Setup shared Metal/OpenGL pixel buffer and textures
- m_NSGLContext = static_cast<QCocoaNativeContext*>((QOpenGLContext::currentContext()->nativeHandle().data()))->context();
- m_CGLPixelFormat = m_NSGLContext.pixelFormat.CGLPixelFormatObj;
-
- NSDictionary* cvBufferProperties = @{
- static_cast<NSString*>(kCVPixelBufferOpenGLCompatibilityKey) : @YES,
- static_cast<NSString*>(kCVPixelBufferMetalCompatibilityKey): @YES,
- };
-
- CVPixelBufferCreate(kCFAllocatorDefault, static_cast<size_t>(m_targetSize.width()),
- static_cast<size_t>(m_targetSize.height()), kCVPixelFormatType_32BGRA,
- static_cast<CFDictionaryRef>(cvBufferProperties), &m_CVPixelBuffer);
-
- m_textureName = createGLTexture(reinterpret_cast<CGLContextObj>(m_NSGLContext.CGLContextObj),
- m_CGLPixelFormat, m_CVGLTextureCache, m_CVPixelBuffer,
- m_CVGLTexture);
- m_metalTexture = createMetalTexture(m_metalDevice, m_CVMTLTextureCache, m_CVPixelBuffer,
- MTLPixelFormatBGRA8Unorm,
- static_cast<size_t>(m_targetSize.width()),
- static_cast<size_t>(m_targetSize.height()),
- m_CVMTLTexture);
-
- m_videoLayerRenderer = [CARenderer rendererWithMTLTexture:m_metalTexture options:nil];
- [m_videoLayerRenderer retain];
- }
- }
+ // Happens when buffering / loading
+ if (CMTimeCompare(currentCMFrameTime, kCMTimeZero) < 0)
+ return nullptr;
- //Set/Change render source if needed
- if (m_videoLayerRenderer.layer != layer) {
- m_videoLayerRenderer.layer = layer;
- m_videoLayerRenderer.bounds = layer.bounds;
- }
+ if (![m_videoOutput hasNewPixelBufferForItemTime:currentCMFrameTime])
+ return nullptr;
- //Do we have FBO's already?
- if ((!m_fbo[0] && !m_fbo[0]) || (m_fbo[0]->size() != m_targetSize)) {
- delete m_fbo[0];
- delete m_fbo[1];
- m_fbo[0] = new QOpenGLFramebufferObject(m_targetSize);
- m_fbo[1] = new QOpenGLFramebufferObject(m_targetSize);
+ CVPixelBufferRef pixelBuffer = [m_videoOutput copyPixelBufferForItemTime:currentCMFrameTime
+ itemTimeForDisplay:nil];
+ if (!pixelBuffer) {
+#ifdef QT_DEBUG_AVF
+ qWarning("copyPixelBufferForItemTime returned nil");
+ CMTimeShow(currentCMFrameTime);
+#endif
+ return nullptr;
}
- //Switch buffer target
- m_currentBuffer = !m_currentBuffer;
- return m_fbo[m_currentBuffer];
+ width = CVPixelBufferGetWidth(pixelBuffer);
+ height = CVPixelBufferGetHeight(pixelBuffer);
+ return pixelBuffer;
}
-void AVFVideoFrameRenderer::renderLayerToFBO(AVPlayerLayer *layer, QOpenGLFramebufferObject *fbo)
+CVOGLTextureRef AVFVideoFrameRenderer::createCacheTextureFromLayer(AVPlayerLayer *layer,
+ size_t& width, size_t& height)
{
- //Start Rendering
- //NOTE: This rendering method will NOT work on iOS as there is no CARenderer in iOS
- if (!fbo->bind()) {
- qWarning("AVFVideoRender FBO failed to bind");
- return;
+ CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height);
+
+ if (!pixelBuffer)
+ return nullptr;
+
+ CVOGLTextureCacheFlush(m_textureCache, 0);
+
+ CVOGLTextureRef texture = nullptr;
+#ifdef Q_OS_MACOS
+ CVReturn err = CVOpenGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
+ m_textureCache,
+ pixelBuffer,
+ nil,
+ &texture);
+#else
+ CVReturn err = CVOGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_textureCache, pixelBuffer, nullptr,
+ GL_TEXTURE_2D, GL_RGBA,
+ (GLsizei) width, (GLsizei) height,
+ GL_BGRA, GL_UNSIGNED_BYTE, 0,
+ &texture);
+#endif
+
+ if (!texture || err) {
+ qWarning() << "CVOGLTextureCacheCreateTextureFromImage failed error:" << err << m_textureCache;
}
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT);
+ CVPixelBufferRelease(pixelBuffer);
- glViewport(0, 0, m_targetSize.width(), m_targetSize.height());
+ return texture;
+}
- if (m_useCoreProfile) {
- CGLLockContext(m_NSGLContext.CGLContextObj);
- m_shader->bind();
- glBindVertexArray(m_quadVao);
- } else {
- glMatrixMode(GL_PROJECTION);
- glPushMatrix();
- glLoadIdentity();
+QImage AVFVideoFrameRenderer::renderLayerToImage(AVPlayerLayer *layer, QSize *size)
+{
+ size_t width = 0;
+ size_t height = 0;
+ CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height);
+ if (size)
+ *size = QSize(width, height);
- // Render to FBO with inverted Y
- glOrtho(0.0, m_targetSize.width(), 0.0, m_targetSize.height(), 0.0, 1.0);
+ if (!pixelBuffer)
+ return QImage();
- glMatrixMode(GL_MODELVIEW);
- glPushMatrix();
- glLoadIdentity();
+ OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
+ if (pixelFormat != kCVPixelFormatType_32BGRA) {
+#ifdef QT_DEBUG_AVF
+ qWarning("CVPixelBuffer format is not BGRA32 (got: %d)", static_cast<quint32>(pixelFormat));
+#endif
+ return QImage();
}
- [m_videoLayerRenderer beginFrameAtTime:CACurrentMediaTime() timeStamp:nullptr];
- [m_videoLayerRenderer addUpdateRect:layer.bounds];
- [m_videoLayerRenderer render];
- [m_videoLayerRenderer endFrame];
-
- if (m_useCoreProfile) {
- glActiveTexture(0);
- glBindTexture(GL_TEXTURE_RECTANGLE, m_textureName);
+ CVPixelBufferLockBaseAddress(pixelBuffer, 0);
+ char *data = (char *)CVPixelBufferGetBaseAddress(pixelBuffer);
+ size_t stride = CVPixelBufferGetBytesPerRow(pixelBuffer);
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
+ // format here is not relevant, only using for storage
+ QImage img = QImage(width, height, QImage::Format_ARGB32);
+ for (size_t j = 0; j < height; j++) {
+ memcpy(img.scanLine(j), data, width * 4);
+ data += stride;
+ }
- glBindTexture(GL_TEXTURE_RECTANGLE, 0);
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
+ CVPixelBufferRelease(pixelBuffer);
+ return img;
+}
- glBindVertexArray(0);
+void AVFVideoFrameRenderer::initRenderer()
+{
+ // even for using a texture directly, we need to be able to make a context current,
+ // so we need an offscreen, and we shouldn't assume we can make the surface context
+ // current on that offscreen, so use our own (sharing with it). Slightly
+ // excessive but no performance penalty and makes the QImage path easier to maintain
- m_shader->release();
+ //Make sure we have an OpenGL context to make current
+ if (!m_glContext) {
+ //Create OpenGL context and set share context from surface
+ QOpenGLContext *shareContext = nullptr;
+ if (m_surface)
+ shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
- CGLFlushDrawable(m_NSGLContext.CGLContextObj);
- CGLUnlockContext(m_NSGLContext.CGLContextObj);
- } else {
- glMatrixMode(GL_MODELVIEW);
- glPopMatrix();
- glMatrixMode(GL_PROJECTION);
- glPopMatrix();
+ m_glContext = new QOpenGLContext();
+ if (shareContext) {
+ m_glContext->setShareContext(shareContext);
+ m_isContextShared = true;
+ } else {
+#ifdef QT_DEBUG_AVF
+ qWarning("failed to get Render Thread context");
+#endif
+ m_isContextShared = false;
+ }
+ if (!m_glContext->create()) {
+#ifdef QT_DEBUG_AVF
+ qWarning("failed to create QOpenGLContext");
+#endif
+ return;
+ }
}
- glFinish(); //Rendering needs to be done before passing texture to video frame
-
- fbo->release();
-}
+ if (!m_offscreenSurface) {
+ m_offscreenSurface = new QOffscreenSurface();
+ m_offscreenSurface->setFormat(m_glContext->format());
+ m_offscreenSurface->create();
+ }
-GLuint AVFVideoFrameRenderer::createGLTexture(CGLContextObj cglContextObj, CGLPixelFormatObj cglPixelFormtObj, CVOpenGLTextureCacheRef cvglTextureCache,
- CVPixelBufferRef cvPixelBufferRef, CVOpenGLTextureRef cvOpenGLTextureRef)
-{
- CVReturn cvret;
- // Create an OpenGL CoreVideo texture cache from the pixel buffer.
- cvret = CVOpenGLTextureCacheCreate(
- kCFAllocatorDefault,
- nil,
- cglContextObj,
- cglPixelFormtObj,
- nil,
- &cvglTextureCache);
-
- // Create a CVPixelBuffer-backed OpenGL texture image from the texture cache.
- cvret = CVOpenGLTextureCacheCreateTextureFromImage(
- kCFAllocatorDefault,
- cvglTextureCache,
- cvPixelBufferRef,
- nil,
- &cvOpenGLTextureRef);
-
- // Get an OpenGL texture name from the CVPixelBuffer-backed OpenGL texture image.
- return CVOpenGLTextureGetName(cvOpenGLTextureRef);
-}
+ // Need current context
+ m_glContext->makeCurrent(m_offscreenSurface);
+
+ if (!m_textureCache) {
+#ifdef Q_OS_MACOS
+ auto *currentContext = NSOpenGLContext.currentContext;
+ // Create an OpenGL CoreVideo texture cache from the pixel buffer.
+ auto err = CVOpenGLTextureCacheCreate(
+ kCFAllocatorDefault,
+ nullptr,
+ currentContext.CGLContextObj,
+ currentContext.pixelFormat.CGLPixelFormatObj,
+ nil,
+ &m_textureCache);
+#else
+ CVReturn err = CVOGLTextureCacheCreate(kCFAllocatorDefault, nullptr,
+ [EAGLContext currentContext],
+ nullptr, &m_textureCache);
+#endif
+ if (err)
+ qWarning("Error at CVOGLTextureCacheCreate %d", err);
+ }
-id<MTLTexture> AVFVideoFrameRenderer::createMetalTexture(id<MTLDevice> mtlDevice, CVMetalTextureCacheRef cvMetalTextureCacheRef, CVPixelBufferRef cvPixelBufferRef,
- MTLPixelFormat pixelFormat, size_t width, size_t height, CVMetalTextureRef cvMetalTextureRef)
-{
- CVReturn cvret;
- // Create a Metal Core Video texture cache from the pixel buffer.
- cvret = CVMetalTextureCacheCreate(
- kCFAllocatorDefault,
- nil,
- mtlDevice,
- nil,
- &cvMetalTextureCacheRef);
-
- // Create a CoreVideo pixel buffer backed Metal texture image from the texture cache.
- cvret = CVMetalTextureCacheCreateTextureFromImage(
- kCFAllocatorDefault,
- cvMetalTextureCacheRef,
- cvPixelBufferRef, nil,
- pixelFormat,
- width, height,
- 0,
- &cvMetalTextureRef);
-
- // Get a Metal texture using the CoreVideo Metal texture reference.
- return CVMetalTextureGetTexture(cvMetalTextureRef);
}
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h
deleted file mode 100644
index 6ad676ad7..000000000
--- a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h
+++ /dev/null
@@ -1,113 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2021 The Qt Company Ltd.
-** Contact: https://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:COMM$
-**
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see https://www.qt.io/terms-conditions. For further
-** information use the contact form at https://www.qt.io/contact-us.
-**
-** $QT_END_LICENSE$
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-****************************************************************************/
-
-#ifndef AVFVIDEOFRAMERENDERER_H
-#define AVFVIDEOFRAMERENDERER_H
-
-#include <QtCore/QObject>
-#include <QtGui/QImage>
-#include <QtGui/QOpenGLContext>
-#include <QtCore/QSize>
-
-@class AVPlayerLayer;
-@class AVPlayerItemVideoOutput;
-
-QT_BEGIN_NAMESPACE
-
-class QOpenGLContext;
-class QOpenGLFramebufferObject;
-class QOpenGLShaderProgram;
-class QOffscreenSurface;
-class QAbstractVideoSurface;
-
-typedef struct __CVBuffer *CVBufferRef;
-typedef CVBufferRef CVImageBufferRef;
-typedef CVImageBufferRef CVPixelBufferRef;
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
-typedef struct __CVOpenGLESTextureCache *CVOpenGLESTextureCacheRef;
-typedef CVImageBufferRef CVOpenGLESTextureRef;
-// helpers to avoid boring if def
-typedef CVOpenGLESTextureCacheRef CVOGLTextureCacheRef;
-typedef CVOpenGLESTextureRef CVOGLTextureRef;
-#define CVOGLTextureGetTarget CVOpenGLESTextureGetTarget
-#define CVOGLTextureGetName CVOpenGLESTextureGetName
-#define CVOGLTextureCacheCreate CVOpenGLESTextureCacheCreate
-#define CVOGLTextureCacheCreateTextureFromImage CVOpenGLESTextureCacheCreateTextureFromImage
-#define CVOGLTextureCacheFlush CVOpenGLESTextureCacheFlush
-#else
-typedef struct __CVOpenGLTextureCache *CVOpenGLTextureCacheRef;
-typedef CVImageBufferRef CVOpenGLTextureRef;
-// helpers to avoid boring if def
-typedef CVOpenGLTextureCacheRef CVOGLTextureCacheRef;
-typedef CVOpenGLTextureRef CVOGLTextureRef;
-#define CVOGLTextureGetTarget CVOpenGLTextureGetTarget
-#define CVOGLTextureGetName CVOpenGLTextureGetName
-#define CVOGLTextureCacheCreate CVOpenGLTextureCacheCreate
-#define CVOGLTextureCacheCreateTextureFromImage CVOpenGLTextureCacheCreateTextureFromImage
-#define CVOGLTextureCacheFlush CVOpenGLTextureCacheFlush
-#endif
-
-class AVFVideoFrameRenderer : public QObject
-{
-public:
- AVFVideoFrameRenderer(QAbstractVideoSurface *surface, QObject *parent = nullptr);
-
- virtual ~AVFVideoFrameRenderer();
-
- void setPlayerLayer(AVPlayerLayer *layer);
-
- CVOGLTextureRef renderLayerToTexture(AVPlayerLayer *layer);
- QImage renderLayerToImage(AVPlayerLayer *layer);
-
-private:
- void initRenderer();
- CVPixelBufferRef copyPixelBufferFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height);
- CVOGLTextureRef createCacheTextureFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height);
-
- QOpenGLContext *m_glContext;
- QOffscreenSurface *m_offscreenSurface;
- QAbstractVideoSurface *m_surface;
- CVOGLTextureCacheRef m_textureCache;
- AVPlayerItemVideoOutput* m_videoOutput;
- bool m_isContextShared;
-};
-
-QT_END_NAMESPACE
-
-#endif // AVFVIDEOFRAMERENDERER_H
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.mm b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.mm
deleted file mode 100644
index 078898aa7..000000000
--- a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.mm
+++ /dev/null
@@ -1,261 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2021 The Qt Company Ltd and/or its subsidiary(-ies).
-** Contact: https://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:COMM$
-**
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see https://www.qt.io/terms-conditions. For further
-** information use the contact form at https://www.qt.io/contact-us.
-**
-** $QT_END_LICENSE$
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-**
-****************************************************************************/
-
-#include "avfvideoframerenderer_ios.h"
-
-#include <QtMultimedia/qabstractvideosurface.h>
-#include <QtGui/QOpenGLFramebufferObject>
-#include <QtGui/QOpenGLShaderProgram>
-#include <QtGui/QOffscreenSurface>
-
-#ifdef QT_DEBUG_AVF
-#include <QtCore/qdebug.h>
-#endif
-
-#import <CoreVideo/CVBase.h>
-#import <AVFoundation/AVFoundation.h>
-QT_USE_NAMESPACE
-
-AVFVideoFrameRenderer::AVFVideoFrameRenderer(QAbstractVideoSurface *surface, QObject *parent)
- : QObject(parent)
- , m_glContext(nullptr)
- , m_offscreenSurface(nullptr)
- , m_surface(surface)
- , m_textureCache(nullptr)
- , m_videoOutput(nullptr)
- , m_isContextShared(true)
-{
-}
-
-AVFVideoFrameRenderer::~AVFVideoFrameRenderer()
-{
-#ifdef QT_DEBUG_AVF
- qDebug() << Q_FUNC_INFO;
-#endif
-
- [m_videoOutput release]; // sending to nil is fine
- if (m_textureCache)
- CFRelease(m_textureCache);
- delete m_offscreenSurface;
- delete m_glContext;
-}
-
-void AVFVideoFrameRenderer::setPlayerLayer(AVPlayerLayer *layer)
-{
- Q_UNUSED(layer)
- if (m_videoOutput) {
- [m_videoOutput release];
- m_videoOutput = nullptr;
- // will be re-created in first call to copyPixelBufferFromLayer
- }
-}
-
-CVOGLTextureRef AVFVideoFrameRenderer::renderLayerToTexture(AVPlayerLayer *layer)
-{
- initRenderer();
-
- // If the glContext isn't shared, it doesn't make sense to return a texture for us
- if (!m_isContextShared)
- return nullptr;
-
- size_t dummyWidth = 0, dummyHeight = 0;
- return createCacheTextureFromLayer(layer, dummyWidth, dummyHeight);
-}
-
-static NSString* const AVF_PIXEL_FORMAT_KEY = (NSString*)kCVPixelBufferPixelFormatTypeKey;
-static NSNumber* const AVF_PIXEL_FORMAT_VALUE = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
-static NSDictionary* const AVF_OUTPUT_SETTINGS = [NSDictionary dictionaryWithObject:AVF_PIXEL_FORMAT_VALUE forKey:AVF_PIXEL_FORMAT_KEY];
-
-
-CVPixelBufferRef AVFVideoFrameRenderer::copyPixelBufferFromLayer(AVPlayerLayer *layer,
- size_t& width, size_t& height)
-{
- //Is layer valid
- if (!layer) {
-#ifdef QT_DEBUG_AVF
- qWarning("copyPixelBufferFromLayer: invalid layer");
-#endif
- return nullptr;
- }
-
- if (!m_videoOutput) {
- m_videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:AVF_OUTPUT_SETTINGS];
- [m_videoOutput setDelegate:nil queue:nil];
- AVPlayerItem * item = [[layer player] currentItem];
- [item addOutput:m_videoOutput];
- }
-
- CFTimeInterval currentCAFrameTime = CACurrentMediaTime();
- CMTime currentCMFrameTime = [m_videoOutput itemTimeForHostTime:currentCAFrameTime];
- // happens when buffering / loading
- if (CMTimeCompare(currentCMFrameTime, kCMTimeZero) < 0) {
- return nullptr;
- }
-
- CVPixelBufferRef pixelBuffer = [m_videoOutput copyPixelBufferForItemTime:currentCMFrameTime
- itemTimeForDisplay:nil];
- if (!pixelBuffer) {
-#ifdef QT_DEBUG_AVF
- qWarning("copyPixelBufferForItemTime returned nil");
- CMTimeShow(currentCMFrameTime);
-#endif
- return nullptr;
- }
-
- width = CVPixelBufferGetWidth(pixelBuffer);
- height = CVPixelBufferGetHeight(pixelBuffer);
- return pixelBuffer;
-}
-
-CVOGLTextureRef AVFVideoFrameRenderer::createCacheTextureFromLayer(AVPlayerLayer *layer,
- size_t& width, size_t& height)
-{
- CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height);
-
- if (!pixelBuffer)
- return nullptr;
-
- CVOGLTextureCacheFlush(m_textureCache, 0);
-
- CVOGLTextureRef texture = nullptr;
- CVReturn err = CVOGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_textureCache, pixelBuffer, nullptr,
- GL_TEXTURE_2D, GL_RGBA,
- (GLsizei) width, (GLsizei) height,
- GL_BGRA, GL_UNSIGNED_BYTE, 0,
- &texture);
-
- if (!texture || err) {
-#ifdef QT_DEBUG_AVF
- qWarning("CVOGLTextureCacheCreateTextureFromImage failed (error: %d)", err);
-#endif
- }
-
- CVPixelBufferRelease(pixelBuffer);
-
- return texture;
-}
-
-QImage AVFVideoFrameRenderer::renderLayerToImage(AVPlayerLayer *layer)
-{
- size_t width = 0;
- size_t height = 0;
- CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height);
-
- if (!pixelBuffer)
- return QImage();
-
- OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
- if (pixelFormat != kCVPixelFormatType_32BGRA) {
-#ifdef QT_DEBUG_AVF
- qWarning("CVPixelBuffer format is not BGRA32 (got: %d)", static_cast<quint32>(pixelFormat));
-#endif
- return QImage();
- }
-
- CVPixelBufferLockBaseAddress(pixelBuffer, 0);
- char *data = (char *)CVPixelBufferGetBaseAddress(pixelBuffer);
- size_t stride = CVPixelBufferGetBytesPerRow(pixelBuffer);
-
- // format here is not relevant, only using for storage
- QImage img = QImage(width, height, QImage::Format_ARGB32);
- for (size_t j = 0; j < height; j++) {
- memcpy(img.scanLine(j), data, width * 4);
- data += stride;
- }
-
- CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
- CVPixelBufferRelease(pixelBuffer);
- return img;
-}
-
-void AVFVideoFrameRenderer::initRenderer()
-{
- // even for using a texture directly, we need to be able to make a context current,
- // so we need an offscreen, and we shouldn't assume we can make the surface context
- // current on that offscreen, so use our own (sharing with it). Slightly
- // excessive but no performance penalty and makes the QImage path easier to maintain
-
- //Make sure we have an OpenGL context to make current
- if (!m_glContext) {
- //Create OpenGL context and set share context from surface
- QOpenGLContext *shareContext = nullptr;
- if (m_surface) {
- shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
- }
-
- m_glContext = new QOpenGLContext();
- if (shareContext) {
- m_glContext->setShareContext(shareContext);
- m_isContextShared = true;
- } else {
-#ifdef QT_DEBUG_AVF
- qWarning("failed to get Render Thread context");
-#endif
- m_isContextShared = false;
- }
- if (!m_glContext->create()) {
-#ifdef QT_DEBUG_AVF
- qWarning("failed to create QOpenGLContext");
-#endif
- return;
- }
- }
-
- if (!m_offscreenSurface) {
- m_offscreenSurface = new QOffscreenSurface();
- m_offscreenSurface->setFormat(m_glContext->format());
- m_offscreenSurface->create();
- }
-
- //Need current context
- m_glContext->makeCurrent(m_offscreenSurface);
-
- if (!m_textureCache) {
- // Create a new open gl texture cache
- CVReturn err = CVOGLTextureCacheCreate(kCFAllocatorDefault, nullptr,
- [EAGLContext currentContext],
- nullptr, &m_textureCache);
- if (err) {
- #ifdef QT_DEBUG_AVF
- qWarning("Error at CVOGLTextureCacheCreate %d", err);
- #endif
- }
- }
-
-}
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h b/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h
index db675cd96..fa76fb33c 100644
--- a/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h
+++ b/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h
@@ -41,6 +41,8 @@
#define AVFVIDEORENDERERCONTROL_H
#include <QtMultimedia/QVideoRendererControl>
+#include <QtMultimedia/qabstractvideobuffer.h>
+
#include <QtCore/QMutex>
#include <QtCore/QSize>
@@ -82,8 +84,7 @@ private:
AVFVideoFrameRenderer *m_frameRenderer;
AVFDisplayLink *m_displayLink;
- QSize m_nativeSize;
- bool m_enableOpenGL;
+ QAbstractVideoBuffer::HandleType m_surfaceType = QAbstractVideoBuffer::NoHandle;
};
QT_END_NAMESPACE
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm b/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm
index 2e68b36f9..b267c1f25 100644
--- a/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm
+++ b/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm
@@ -40,11 +40,7 @@
#include "avfvideorenderercontrol.h"
#include "avfdisplaylink.h"
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
-#include "avfvideoframerenderer_ios.h"
-#else
#include "avfvideoframerenderer.h"
-#endif
#include <QtMultimedia/qabstractvideobuffer.h>
#include <QtMultimedia/qabstractvideosurface.h>
@@ -58,69 +54,52 @@
QT_USE_NAMESPACE
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
-class TextureCacheVideoBuffer : public QAbstractVideoBuffer
+class TextureVideoBuffer : public QAbstractVideoBuffer
{
public:
- TextureCacheVideoBuffer(CVOGLTextureRef texture)
- : QAbstractVideoBuffer(GLTextureHandle)
+ TextureVideoBuffer(GLuint texture, QAbstractVideoBuffer::HandleType type)
+ : QAbstractVideoBuffer(type)
, m_texture(texture)
{}
- virtual ~TextureCacheVideoBuffer()
- {
- // absolutely critical that we drop this
- // reference of textures will stay in the cache
- CFRelease(m_texture);
- }
-
MapMode mapMode() const { return NotMapped; }
uchar *map(MapMode, int*, int*) { return nullptr; }
void unmap() {}
QVariant handle() const
{
- GLuint texId = CVOGLTextureGetName(m_texture);
- return QVariant::fromValue<unsigned int>(texId);
+ return QVariant::fromValue<unsigned int>(m_texture);
}
private:
- CVOGLTextureRef m_texture;
+ GLuint m_texture;
};
-#else
-class TextureVideoBuffer : public QAbstractVideoBuffer
+
+class CoreVideoTextureVideoBuffer : public TextureVideoBuffer
{
public:
- TextureVideoBuffer(GLuint tex)
- : QAbstractVideoBuffer(GLTextureHandle)
- , m_texture(tex)
+ CoreVideoTextureVideoBuffer(CVOGLTextureRef texture, QAbstractVideoBuffer::HandleType type)
+ : TextureVideoBuffer(CVOGLTextureGetName(texture), type)
+ , m_coreVideoTexture(texture)
{}
- virtual ~TextureVideoBuffer()
+ virtual ~CoreVideoTextureVideoBuffer()
{
- }
-
- MapMode mapMode() const { return NotMapped; }
- uchar *map(MapMode, int*, int*) { return 0; }
- void unmap() {}
-
- QVariant handle() const
- {
- return QVariant::fromValue<unsigned int>(m_texture);
+ // absolutely critical that we drop this
+ // reference of textures will stay in the cache
+ CFRelease(m_coreVideoTexture);
}
private:
- GLuint m_texture;
+ CVOGLTextureRef m_coreVideoTexture;
};
-#endif
+
AVFVideoRendererControl::AVFVideoRendererControl(QObject *parent)
: QVideoRendererControl(parent)
, m_surface(nullptr)
, m_playerLayer(nullptr)
, m_frameRenderer(nullptr)
- , m_enableOpenGL(false)
-
{
m_displayLink = new AVFDisplayLink(this);
connect(m_displayLink, SIGNAL(tick(CVTimeStamp)), SLOT(updateVideoFrame(CVTimeStamp)));
@@ -170,18 +149,26 @@ void AVFVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
//Surface changed, so we need a new frame renderer
m_frameRenderer = new AVFVideoFrameRenderer(m_surface, this);
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
- if (m_playerLayer) {
- m_frameRenderer->setPlayerLayer(static_cast<AVPlayerLayer*>(m_playerLayer));
- }
-#endif
- //Check for needed formats to render as OpenGL Texture
- auto handleGlEnabled = [this] {
- m_enableOpenGL = m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).contains(QVideoFrame::Format_BGR32);
+ auto updateSurfaceType = [this] {
+ auto preferredOpenGLSurfaceTypes = {
+#ifdef Q_OS_MACOS
+ QAbstractVideoBuffer::GLTextureRectangleHandle, // GL_TEXTURE_RECTANGLE
+#endif
+ QAbstractVideoBuffer::GLTextureHandle // GL_TEXTURE_2D
+ };
+
+ for (auto surfaceType : preferredOpenGLSurfaceTypes) {
+ auto supportedFormats = m_surface->supportedPixelFormats(surfaceType);
+ if (supportedFormats.contains(QVideoFrame::Format_BGR32)) {
+ m_surfaceType = surfaceType;
+ return;
+ }
+ m_surfaceType = QAbstractVideoBuffer::NoHandle; // QImage
+ }
};
- handleGlEnabled();
- connect(m_surface, &QAbstractVideoSurface::supportedFormatsChanged, this, handleGlEnabled);
+ updateSurfaceType();
+ connect(m_surface, &QAbstractVideoSurface::supportedFormatsChanged, this, updateSurfaceType);
//If we already have a layer, but changed surfaces start rendering again
if (m_playerLayer && !m_displayLink->isActive()) {
@@ -204,12 +191,6 @@ void AVFVideoRendererControl::setLayer(void *playerLayer)
if (m_surface && m_surface->isActive())
m_surface->stop();
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
- if (m_frameRenderer) {
- m_frameRenderer->setPlayerLayer(static_cast<AVPlayerLayer*>(playerLayer));
- }
-#endif
-
//If there is no layer to render, stop scheduling updates
if (m_playerLayer == nullptr) {
m_displayLink->stop();
@@ -238,36 +219,39 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
if (!playerLayer.readyForDisplay)
return;
- if (m_enableOpenGL) {
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
- CVOGLTextureRef tex = m_frameRenderer->renderLayerToTexture(playerLayer);
-
- //Make sure we got a valid texture
- if (tex == nullptr)
- return;
-
- QAbstractVideoBuffer *buffer = new TextureCacheVideoBuffer(tex);
+ if (m_surfaceType == QAbstractVideoBuffer::GLTextureHandle
+ || m_surfaceType == QAbstractVideoBuffer::GLTextureRectangleHandle) {
+ QSize size;
+ QAbstractVideoBuffer *buffer = nullptr;
+
+#ifdef Q_OS_MACOS
+ if (m_surfaceType == QAbstractVideoBuffer::GLTextureRectangleHandle) {
+ // Render to GL_TEXTURE_RECTANGLE directly
+ if (CVOGLTextureRef tex = m_frameRenderer->renderLayerToTexture(playerLayer, &size))
+ buffer = new CoreVideoTextureVideoBuffer(tex, m_surfaceType);
+ } else {
+ // Render to GL_TEXTURE_2D via FBO
+ if (GLuint tex = m_frameRenderer->renderLayerToFBO(playerLayer, &size))
+ buffer = new TextureVideoBuffer(tex, m_surfaceType);
+ }
#else
- GLuint tex = m_frameRenderer->renderLayerToTexture(playerLayer);
- //Make sure we got a valid texture
- if (tex == 0)
+ Q_ASSERT(m_surfaceType != QAbstractVideoBuffer::GLTextureRectangleHandle);
+ // Render to GL_TEXTURE_2D directly
+ if (CVOGLTextureRef tex = m_frameRenderer->renderLayerToTexture(playerLayer, &size))
+ buffer = new CoreVideoTextureVideoBuffer(tex, m_surfaceType);
+#endif
+ if (!buffer)
return;
- QAbstractVideoBuffer *buffer = new TextureVideoBuffer(tex);
-#endif
- QVideoFrame frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
+ QVideoFrame frame = QVideoFrame(buffer, size, QVideoFrame::Format_BGR32);
if (m_surface && frame.isValid()) {
if (m_surface->isActive() && m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat())
m_surface->stop();
if (!m_surface->isActive()) {
- QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), QAbstractVideoBuffer::GLTextureHandle);
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), m_surfaceType);
format.setScanLineDirection(QVideoSurfaceFormat::TopToBottom);
-#else
- format.setScanLineDirection(QVideoSurfaceFormat::BottomToTop);
-#endif
if (!m_surface->start(format)) {
//Surface doesn't support GLTextureHandle
qWarning("Failed to activate video surface");
@@ -279,20 +263,21 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
}
} else {
//fallback to rendering frames to QImages
- QImage frameData = m_frameRenderer->renderLayerToImage(playerLayer);
+ QSize size;
+ QImage frameData = m_frameRenderer->renderLayerToImage(playerLayer, &size);
if (frameData.isNull()) {
return;
}
QAbstractVideoBuffer *buffer = new QImageVideoBuffer(frameData);
- QVideoFrame frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_ARGB32);
+ QVideoFrame frame = QVideoFrame(buffer, size, QVideoFrame::Format_ARGB32);
if (m_surface && frame.isValid()) {
if (m_surface->isActive() && m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat())
m_surface->stop();
if (!m_surface->isActive()) {
- QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), QAbstractVideoBuffer::NoHandle);
+ QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), m_surfaceType);
if (!m_surface->start(format)) {
qWarning("Failed to activate video surface");
@@ -308,7 +293,4 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
void AVFVideoRendererControl::setupVideoOutput()
{
- AVPlayerLayer *playerLayer = static_cast<AVPlayerLayer*>(m_playerLayer);
- if (playerLayer)
- m_nativeSize = QSize(playerLayer.bounds.size.width, playerLayer.bounds.size.height);
}
diff --git a/src/plugins/avfoundation/mediaplayer/mediaplayer.pro b/src/plugins/avfoundation/mediaplayer/mediaplayer.pro
index 174220f37..f71e0c3b3 100644
--- a/src/plugins/avfoundation/mediaplayer/mediaplayer.pro
+++ b/src/plugins/avfoundation/mediaplayer/mediaplayer.pro
@@ -39,34 +39,23 @@ OBJECTIVE_SOURCES += \
avfvideowidget.mm
}
-ios|tvos {
- qtConfig(opengl) {
- HEADERS += \
- avfvideoframerenderer_ios.h \
- avfvideorenderercontrol.h \
- avfdisplaylink.h
+qtConfig(opengl) {
+ HEADERS += \
+ avfvideoframerenderer.h \
+ avfvideorenderercontrol.h \
+ avfdisplaylink.h
- OBJECTIVE_SOURCES += \
- avfvideoframerenderer_ios.mm \
- avfvideorenderercontrol.mm \
- avfdisplaylink.mm
- }
+ OBJECTIVE_SOURCES += \
+ avfvideoframerenderer.mm \
+ avfvideorenderercontrol.mm \
+ avfdisplaylink.mm
+}
+
+ios|tvos {
LIBS += -framework Foundation
} else {
INCLUDEPATH += $$[QT_INSTALL_HEADERS]
LIBS += -framework AppKit -framework Metal
-
- qtConfig(opengl) {
- HEADERS += \
- avfvideoframerenderer.h \
- avfvideorenderercontrol.h \
- avfdisplaylink.h
-
- OBJECTIVE_SOURCES += \
- avfvideoframerenderer.mm \
- avfvideorenderercontrol.mm \
- avfdisplaylink.mm
- }
}
OTHER_FILES += \
diff --git a/src/qtmultimediaquicktools/qsgvideonode_texture.cpp b/src/qtmultimediaquicktools/qsgvideonode_texture.cpp
index 589b1359a..65c57f903 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_texture.cpp
+++ b/src/qtmultimediaquicktools/qsgvideonode_texture.cpp
@@ -53,6 +53,13 @@ QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_Texture::supportedPixelForma
{
QList<QVideoFrame::PixelFormat> pixelFormats;
+#ifdef Q_OS_MACOS
+ if (handleType == QAbstractVideoBuffer::GLTextureRectangleHandle) {
+ pixelFormats.append(QVideoFrame::Format_BGR32);
+ pixelFormats.append(QVideoFrame::Format_BGRA32);
+ }
+#endif
+
if (handleType == QAbstractVideoBuffer::GLTextureHandle) {
pixelFormats.append(QVideoFrame::Format_RGB565);
pixelFormats.append(QVideoFrame::Format_RGB32);
@@ -82,8 +89,6 @@ public:
QSGVideoMaterialShader_Texture()
: QSGMaterialShader()
{
- setShaderSourceFile(QOpenGLShader::Vertex, QStringLiteral(":/qtmultimediaquicktools/shaders/monoplanarvideo.vert"));
- setShaderSourceFile(QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/rgbvideo.frag"));
}
void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial) override;
@@ -109,10 +114,20 @@ protected:
int m_id_opacity;
};
-class QSGVideoMaterialShader_Texture_swizzle : public QSGVideoMaterialShader_Texture
+class QSGVideoMaterialShader_Texture_2D : public QSGVideoMaterialShader_Texture
{
public:
- QSGVideoMaterialShader_Texture_swizzle(bool hasAlpha)
+ QSGVideoMaterialShader_Texture_2D()
+ {
+ setShaderSourceFile(QOpenGLShader::Vertex, QStringLiteral(":/qtmultimediaquicktools/shaders/monoplanarvideo.vert"));
+ setShaderSourceFile(QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/rgbvideo.frag"));
+ }
+};
+
+class QSGVideoMaterialShader_Texture_2D_swizzle : public QSGVideoMaterialShader_Texture_2D
+{
+public:
+ QSGVideoMaterialShader_Texture_2D_swizzle(bool hasAlpha)
: m_hasAlpha(hasAlpha)
{
setShaderSourceFile(QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/rgbvideo_swizzle.frag"));
@@ -120,14 +135,13 @@ public:
protected:
void initialize() override {
- QSGVideoMaterialShader_Texture::initialize();
+ QSGVideoMaterialShader_Texture_2D::initialize();
program()->setUniformValue(program()->uniformLocation("hasAlpha"), GLboolean(m_hasAlpha));
}
int m_hasAlpha;
};
-
class QSGVideoMaterial_Texture : public QSGMaterial
{
public:
@@ -149,12 +163,6 @@ public:
return needsSwizzling() ? &swizzleType : &normalType;
}
- QSGMaterialShader *createShader() const override {
- const bool hasAlpha = m_format.pixelFormat() == QVideoFrame::Format_ARGB32;
- return needsSwizzling() ? new QSGVideoMaterialShader_Texture_swizzle(hasAlpha)
- : new QSGVideoMaterialShader_Texture;
- }
-
int compare(const QSGMaterial *other) const override {
const QSGVideoMaterial_Texture *m = static_cast<const QSGVideoMaterial_Texture *>(other);
@@ -179,9 +187,42 @@ public:
void setVideoFrame(const QVideoFrame &frame) {
QMutexLocker lock(&m_frameMutex);
m_frame = frame;
+ m_textureSize = frame.size();
+ }
+
+ virtual void bind() = 0;
+
+ QVideoFrame m_frame;
+ QMutex m_frameMutex;
+ QSize m_textureSize;
+ QVideoSurfaceFormat m_format;
+ GLuint m_textureId;
+ qreal m_opacity;
+
+protected:
+ bool needsSwizzling() const {
+ return !QMediaOpenGLHelper::isANGLE()
+ && (m_format.pixelFormat() == QVideoFrame::Format_RGB32
+ || m_format.pixelFormat() == QVideoFrame::Format_ARGB32);
+ }
+};
+
+class QSGVideoMaterial_Texture_2D : public QSGVideoMaterial_Texture
+{
+public:
+ QSGVideoMaterial_Texture_2D(const QVideoSurfaceFormat &format) :
+ QSGVideoMaterial_Texture(format)
+ {
}
- void bind()
+ QSGMaterialShader *createShader() const override
+ {
+ const bool hasAlpha = m_format.pixelFormat() == QVideoFrame::Format_ARGB32;
+ return needsSwizzling() ? new QSGVideoMaterialShader_Texture_2D_swizzle(hasAlpha)
+ : new QSGVideoMaterialShader_Texture_2D;
+ }
+
+ void bind() override
{
QMutexLocker lock(&m_frameMutex);
if (m_frame.isValid()) {
@@ -197,28 +238,84 @@ public:
m_textureId = 0;
}
}
+};
+
+#ifdef Q_OS_MACOS
+class QSGVideoMaterialShader_Texture_Rectangle : public QSGVideoMaterialShader_Texture
+{
+public:
+ QSGVideoMaterialShader_Texture_Rectangle()
+ {
+ setShaderSourceFile(QOpenGLShader::Vertex, QStringLiteral(":/qtmultimediaquicktools/shaders/rectsampler.vert"));
+ setShaderSourceFile(QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/rectsampler_rgb.frag"));
+ }
- QVideoFrame m_frame;
- QMutex m_frameMutex;
- QSize m_textureSize;
- QVideoSurfaceFormat m_format;
- GLuint m_textureId;
- qreal m_opacity;
+ void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial) override
+ {
+ QSGVideoMaterial_Texture *mat = static_cast<QSGVideoMaterial_Texture *>(newMaterial);
+ QVector2D size(mat->m_textureSize.width(), mat->m_textureSize.height());
+ program()->setUniformValue(m_id_videoSize, size);
-private:
- bool needsSwizzling() const {
- return !QMediaOpenGLHelper::isANGLE()
- && (m_format.pixelFormat() == QVideoFrame::Format_RGB32
- || m_format.pixelFormat() == QVideoFrame::Format_ARGB32);
+ QSGVideoMaterialShader_Texture::updateState(state, newMaterial, oldMaterial);
}
+
+protected:
+ void initialize() override
+ {
+ QSGVideoMaterialShader_Texture::initialize();
+ m_id_videoSize = program()->uniformLocation("qt_videoSize");
+ }
+
+ int m_id_videoSize;
};
+class QSGVideoMaterial_Texture_Rectangle : public QSGVideoMaterial_Texture
+{
+public:
+ QSGVideoMaterial_Texture_Rectangle(const QVideoSurfaceFormat &format) :
+ QSGVideoMaterial_Texture(format)
+ {
+ }
+
+ QSGMaterialShader *createShader() const override
+ {
+ Q_ASSERT(!needsSwizzling());
+ return new QSGVideoMaterialShader_Texture_Rectangle;
+ }
+
+ void bind() override
+ {
+ QMutexLocker lock(&m_frameMutex);
+ if (m_frame.isValid()) {
+ m_textureId = m_frame.handle().toUInt();
+ QOpenGLFunctions *functions = QOpenGLContext::currentContext()->functions();
+ functions->glActiveTexture(GL_TEXTURE0);
+ functions->glBindTexture(GL_TEXTURE_RECTANGLE, m_textureId);
+
+ functions->glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ functions->glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ functions->glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ functions->glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ } else {
+ m_textureId = 0;
+ }
+ }
+};
+#endif
QSGVideoNode_Texture::QSGVideoNode_Texture(const QVideoSurfaceFormat &format) :
m_format(format)
{
setFlag(QSGNode::OwnsMaterial);
- m_material = new QSGVideoMaterial_Texture(format);
+
+#ifdef Q_OS_MACOS
+ if (format.handleType() == QAbstractVideoBuffer::GLTextureRectangleHandle)
+ m_material = new QSGVideoMaterial_Texture_Rectangle(format);
+#endif
+
+ if (!m_material)
+ m_material = new QSGVideoMaterial_Texture_2D(format);
+
setMaterial(m_material);
}
diff --git a/src/qtmultimediaquicktools/qtmultimediaquicktools.qrc b/src/qtmultimediaquicktools/qtmultimediaquicktools.qrc
index b8180e31f..86523e771 100644
--- a/src/qtmultimediaquicktools/qtmultimediaquicktools.qrc
+++ b/src/qtmultimediaquicktools/qtmultimediaquicktools.qrc
@@ -11,7 +11,6 @@
<file>shaders/triplanaryuvvideo.vert</file>
<file>shaders/uyvyvideo.frag</file>
<file>shaders/yuyvvideo.frag</file>
-
<file>shaders/monoplanarvideo_core.vert</file>
<file>shaders/rgbvideo_core.frag</file>
<file>shaders/rgbvideo_swizzle_core.frag</file>
@@ -23,5 +22,9 @@
<file>shaders/triplanaryuvvideo_core.vert</file>
<file>shaders/uyvyvideo_core.frag</file>
<file>shaders/yuyvvideo_core.frag</file>
+ <file>shaders/rectsampler.vert</file>
+ <file>shaders/rectsampler_rgb.frag</file>
+ <file>shaders/rectsampler_core.vert</file>
+ <file>shaders/rectsampler_rgb_core.frag</file>
</qresource>
</RCC>
diff --git a/src/qtmultimediaquicktools/shaders/rectsampler.vert b/src/qtmultimediaquicktools/shaders/rectsampler.vert
new file mode 100644
index 000000000..762ec7e7e
--- /dev/null
+++ b/src/qtmultimediaquicktools/shaders/rectsampler.vert
@@ -0,0 +1,10 @@
+uniform highp mat4 qt_Matrix;
+uniform highp vec2 qt_videoSize;
+attribute highp vec4 qt_VertexPosition;
+attribute highp vec2 qt_VertexTexCoord;
+varying highp vec2 qt_TexCoord;
+
+void main() {
+ qt_TexCoord = vec2(qt_VertexTexCoord.x * qt_videoSize.x, qt_VertexTexCoord.y * qt_videoSize.y);
+ gl_Position = qt_Matrix * qt_VertexPosition;
+}
diff --git a/src/qtmultimediaquicktools/shaders/rectsampler_core.vert b/src/qtmultimediaquicktools/shaders/rectsampler_core.vert
new file mode 100644
index 000000000..f0fe02349
--- /dev/null
+++ b/src/qtmultimediaquicktools/shaders/rectsampler_core.vert
@@ -0,0 +1,11 @@
+#version 150 core
+uniform highp mat4 qt_Matrix;
+uniform highp vec2 qt_videoSize;
+in highp vec4 qt_VertexPosition;
+in highp vec2 qt_VertexTexCoord;
+out highp vec2 qt_TexCoord;
+
+void main() {
+ qt_TexCoord = vec2(qt_VertexTexCoord.x * qt_videoSize.x, qt_VertexTexCoord.y * qt_videoSize.y);
+ gl_Position = qt_Matrix * qt_VertexPosition;
+}
diff --git a/src/qtmultimediaquicktools/shaders/rectsampler_rgb.frag b/src/qtmultimediaquicktools/shaders/rectsampler_rgb.frag
new file mode 100644
index 000000000..2a30f7c3d
--- /dev/null
+++ b/src/qtmultimediaquicktools/shaders/rectsampler_rgb.frag
@@ -0,0 +1,8 @@
+uniform sampler2DRect rgbTexture;
+uniform lowp float opacity;
+varying highp vec2 qt_TexCoord;
+
+void main()
+{
+ gl_FragColor = texture2DRect(rgbTexture, qt_TexCoord) * opacity;
+}
diff --git a/src/qtmultimediaquicktools/shaders/rectsampler_rgb_core.frag b/src/qtmultimediaquicktools/shaders/rectsampler_rgb_core.frag
new file mode 100644
index 000000000..17f306456
--- /dev/null
+++ b/src/qtmultimediaquicktools/shaders/rectsampler_rgb_core.frag
@@ -0,0 +1,10 @@
+#version 150 core
+uniform sampler2DRect rgbTexture;
+uniform lowp float opacity;
+in highp vec2 qt_TexCoord;
+out vec4 fragColor;
+
+void main()
+{
+ fragColor = texture(rgbTexture, qt_TexCoord) * opacity;
+}