summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndré de la Rocha <andre.rocha@qt.io>2022-02-10 12:36:12 +0100
committerAndré de la Rocha <andre.rocha@qt.io>2022-04-01 09:12:26 -0300
commitcf33c31b06190f853635242b34eea0820d9fb041 (patch)
treeb6b8b3e8f6550dd522b977bbbb27ff409f61e29c
parent2a7babe9e6f695e9effa5334734bd8e6d997644a (diff)
downloadqtmultimedia-cf33c31b06190f853635242b34eea0820d9fb041.tar.gz
Do QVideoFrame to QImage conversion using shaders
Use existing shaders to do format conversion, in order to reduce overall CPU use, rendering the frame in an offscreen texture that is read back and used to generate a QImage. Task-number: QTBUG-99969 Change-Id: Iaae9acb573c49e81c177dbcb6c1ec4d3a9cab89d Reviewed-by: Qt CI Bot <qt_ci_bot@qt-project.org> Reviewed-by: Piotr Srebrny <piotr.srebrny@qt.io> Reviewed-by: Lars Knoll <lars.knoll@qt.io>
-rw-r--r--src/multimedia/CMakeLists.txt1
-rw-r--r--src/multimedia/video/qabstractvideobuffer.cpp10
-rw-r--r--src/multimedia/video/qabstractvideobuffer_p.h3
-rw-r--r--src/multimedia/video/qvideoframe.cpp78
-rw-r--r--src/multimedia/video/qvideoframeconverter.cpp451
-rw-r--r--src/multimedia/video/qvideoframeconverter_p.h63
-rw-r--r--src/plugins/multimedia/android/common/qandroidvideooutput.cpp4
-rw-r--r--src/plugins/multimedia/darwin/avfvideobuffer.mm10
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp8
-rw-r--r--src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp9
10 files changed, 545 insertions, 92 deletions
diff --git a/src/multimedia/CMakeLists.txt b/src/multimedia/CMakeLists.txt
index 41a80420c..9a77fa117 100644
--- a/src/multimedia/CMakeLists.txt
+++ b/src/multimedia/CMakeLists.txt
@@ -58,6 +58,7 @@ qt_internal_add_module(Multimedia
video/qvideotexturehelper.cpp video/qvideotexturehelper_p.h
video/qvideoframeconversionhelper.cpp video/qvideoframeconversionhelper_p.h
video/qvideooutputorientationhandler.cpp video/qvideooutputorientationhandler_p.h
+ video/qvideoframeconverter.cpp video/qvideoframeconverter_p.h
video/qvideoframeformat.cpp video/qvideoframeformat.h
video/qvideowindow.cpp video/qvideowindow_p.h
INCLUDE_DIRECTORIES
diff --git a/src/multimedia/video/qabstractvideobuffer.cpp b/src/multimedia/video/qabstractvideobuffer.cpp
index dc6f77212..803c09c8d 100644
--- a/src/multimedia/video/qabstractvideobuffer.cpp
+++ b/src/multimedia/video/qabstractvideobuffer.cpp
@@ -115,7 +115,7 @@ QT_BEGIN_NAMESPACE
*/
QAbstractVideoBuffer::QAbstractVideoBuffer(QVideoFrame::HandleType type, QRhi *rhi)
: m_type(type),
- rhi(rhi)
+ m_rhi(rhi)
{
}
@@ -141,6 +141,14 @@ std::unique_ptr<QRhiTexture> QAbstractVideoBuffer::texture(int /*plane*/) const
return {};
}
+/*!
+ Returns the QRhi instance.
+*/
+QRhi *QAbstractVideoBuffer::rhi() const
+{
+ return m_rhi;
+}
+
/*! \fn uchar *QAbstractVideoBuffer::map(MapMode mode, int *numBytes, int *bytesPerLine)
Independently maps the planes of a video buffer to memory.
diff --git a/src/multimedia/video/qabstractvideobuffer_p.h b/src/multimedia/video/qabstractvideobuffer_p.h
index af9af5b75..7fe0d1ad9 100644
--- a/src/multimedia/video/qabstractvideobuffer_p.h
+++ b/src/multimedia/video/qabstractvideobuffer_p.h
@@ -74,6 +74,7 @@ public:
virtual ~QAbstractVideoBuffer();
QVideoFrame::HandleType handleType() const;
+ QRhi *rhi() const;
struct MapData
{
@@ -94,7 +95,7 @@ public:
virtual QMatrix4x4 externalTextureMatrix() const { return {}; }
protected:
QVideoFrame::HandleType m_type;
- QRhi *rhi = nullptr;
+ QRhi *m_rhi = nullptr;
private:
Q_DISABLE_COPY(QAbstractVideoBuffer)
diff --git a/src/multimedia/video/qvideoframe.cpp b/src/multimedia/video/qvideoframe.cpp
index 6ad601c5e..0894cc466 100644
--- a/src/multimedia/video/qvideoframe.cpp
+++ b/src/multimedia/video/qvideoframe.cpp
@@ -41,7 +41,7 @@
#include "qvideotexturehelper_p.h"
#include "qmemoryvideobuffer_p.h"
-#include "qvideoframeconversionhelper_p.h"
+#include "qvideoframeconverter_p.h"
#include "qvideoframeformat.h"
#include "qpainter.h"
#include <qtextlayout.h>
@@ -56,45 +56,6 @@
#include <QDebug>
QT_BEGIN_NAMESPACE
-static bool pixelFormatHasAlpha[QVideoFrameFormat::NPixelFormats] =
-{
- false, //Format_Invalid,
- true, //Format_ARGB32,
- true, //Format_ARGB32_Premultiplied,
- false, //Format_XRGB32,
- true, //Format_BGRA32,
- true, //Format_BGRA32_Premultiplied,
- false, //Format_BGRX32,
- true, //Format_ABGR32,
- false, //Format_XBGR32,
- true, //Format_RGBA32,
- false, //Format_RGBX32,
-
- true, //Format_AYUV,
- true, //Format_AYUV_Premultiplied,
- false, //Format_YUV420P,
- false, //Format_YUV422P,
- false, //Format_YV12,
- false, //Format_UYVY,
- false, //Format_YUYV,
- false, //Format_NV12,
- false, //Format_NV21,
- false, //Format_IMC1,
- false, //Format_IMC2,
- false, //Format_IMC3,
- false, //Format_IMC4,
- false, //Format_Y8,
- false, //Format_Y16,
-
- false, //Format_P010,
- false, //Format_P016,
-
- false, //Format_SamplerExternalOES
- false, //Format_Jpeg,
- false, //Format_SamplerRect
-
-};
-
class QVideoFramePrivate : public QSharedData
{
@@ -772,41 +733,8 @@ QImage QVideoFrame::toImage() const
if (!d->image.isNull())
return d->image;
- QVideoFrame frame = *this;
-
- if (!frame.map(QVideoFrame::ReadOnly))
- return d->image;
-
- if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg) {
- // Load from JPG
- d->image.loadFromData(frame.bits(0), frame.mappedBytes(0), "JPG");
- }
-
- // Need conversion
- else {
- VideoFrameConvertFunc convert = qConverterForFormat(frame.pixelFormat());
- if (!convert) {
- qWarning() << Q_FUNC_INFO << ": unsupported pixel format" << frame.pixelFormat();
- } else {
- auto format = pixelFormatHasAlpha[frame.pixelFormat()] ? QImage::Format_ARGB32_Premultiplied : QImage::Format_RGB32;
- d->image = QImage(frame.width(), frame.height(), format);
- convert(frame, d->image.bits());
- }
- }
-
- frame.unmap();
-
- QTransform t;
- if (mirrored())
- t.scale(-1.f, 1.f);
- if (rotationAngle() != Rotation0)
- t.rotate(float(rotationAngle()));
- if (surfaceFormat().scanLineDirection() != QVideoFrameFormat::TopToBottom)
- t.scale(1.f, -1.f);
-
- if (!t.isIdentity())
- d->image = d->image.transformed(t);
-
+ d->image = qImageFromVideoFrame(*this, rotationAngle(), mirrored(),
+ surfaceFormat().scanLineDirection() != QVideoFrameFormat::TopToBottom);
return d->image;
}
diff --git a/src/multimedia/video/qvideoframeconverter.cpp b/src/multimedia/video/qvideoframeconverter.cpp
new file mode 100644
index 000000000..f93d32def
--- /dev/null
+++ b/src/multimedia/video/qvideoframeconverter.cpp
@@ -0,0 +1,451 @@
+/****************************************************************************
+**
+** Copyright (C) 2022 The Qt Company Ltd.
+** Contact: https://www.qt.io/licensing/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and The Qt Company. For licensing terms
+** and conditions see https://www.qt.io/terms-conditions. For further
+** information use the contact form at https://www.qt.io/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 3 as published by the Free Software
+** Foundation and appearing in the file LICENSE.LGPL3 included in the
+** packaging of this file. Please review the following information to
+** ensure the GNU Lesser General Public License version 3 requirements
+** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU
+** General Public License version 2.0 or (at your option) the GNU General
+** Public license version 3 or any later version approved by the KDE Free
+** Qt Foundation. The licenses are as published by the Free Software
+** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
+** included in the packaging of this file. Please review the following
+** information to ensure the GNU General Public License requirements will
+** be met: https://www.gnu.org/licenses/gpl-2.0.html and
+** https://www.gnu.org/licenses/gpl-3.0.html.
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "qvideoframeconverter_p.h"
+#include "qvideoframeconversionhelper_p.h"
+#include "qvideoframeformat.h"
+
+#include <QtGui/private/qrhinull_p.h>
+#if QT_CONFIG(opengl)
+#include <QtGui/private/qrhigles2_p.h>
+#include <QOffscreenSurface>
+#endif
+#if QT_CONFIG(vulkan)
+#include <QtGui/private/qrhivulkan_p.h>
+#endif
+#ifdef Q_OS_WIN
+#include <QtGui/private/qrhid3d11_p.h>
+#endif
+#if defined(Q_OS_MACOS) || defined(Q_OS_IOS)
+#include <QtGui/private/qrhimetal_p.h>
+#endif
+
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qsize.h>
+#include <QtCore/qhash.h>
+#include <QtCore/qfile.h>
+#include <QtCore/qthreadstorage.h>
+#include <QtGui/qimage.h>
+#include <qpa/qplatformintegration.h>
+#include <private/qvideotexturehelper_p.h>
+#include <private/qabstractvideobuffer_p.h>
+#include <private/qguiapplication_p.h>
+#include <private/qrhi_p.h>
+
+
+QT_BEGIN_NAMESPACE
+
+Q_LOGGING_CATEGORY(qLcVideoFrameConverter, "qt.multimedia.video.frameconverter")
+
+namespace {
+
+struct State
+{
+ QRhi *rhi = nullptr;
+#if QT_CONFIG(opengl)
+ QOffscreenSurface *fallbackSurface = nullptr;
+#endif
+ bool cpuOnly = false;
+ ~State() {
+ delete rhi;
+#if QT_CONFIG(opengl)
+ delete fallbackSurface;
+#endif
+ }
+};
+
+}
+
+static QThreadStorage<State> g_state;
+static QHash<QString, QShader> g_shaderCache;
+
+static const float g_quad[] = {
+ // Rotation 0 CW
+ 1.f, -1.f, 1.f, 1.f,
+ 1.f, 1.f, 1.f, 0.f,
+ -1.f, -1.f, 0.f, 1.f,
+ -1.f, 1.f, 0.f, 0.f,
+ // Rotation 90 CW
+ 1.f, -1.f, 1.f, 0.f,
+ 1.f, 1.f, 0.f, 0.f,
+ -1.f, -1.f, 1.f, 1.f,
+ -1.f, 1.f, 0.f, 1.f,
+ // Rotation 180 CW
+ 1.f, -1.f, 0.f, 0.f,
+ 1.f, 1.f, 0.f, 1.f,
+ -1.f, -1.f, 1.f, 0.f,
+ -1.f, 1.f, 1.f, 1.f,
+ // Rotation 270 CW
+ 1.f, -1.f, 0.f, 1.f,
+ 1.f, 1.f, 1.f, 1.f,
+ -1.f, -1.f, 0.f, 0.f,
+ -1.f, 1.f, 1.f, 0.f,
+};
+
+static bool pixelFormatHasAlpha(QVideoFrameFormat::PixelFormat format)
+{
+ switch (format) {
+ case QVideoFrameFormat::Format_ARGB8888:
+ case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
+ case QVideoFrameFormat::Format_BGRA8888:
+ case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
+ case QVideoFrameFormat::Format_ABGR8888:
+ case QVideoFrameFormat::Format_RGBA8888:
+ case QVideoFrameFormat::Format_AYUV:
+ case QVideoFrameFormat::Format_AYUV_Premultiplied:
+ return true;
+ default:
+ return false;
+ }
+};
+
+static QShader getShader(const QString &name)
+{
+ QShader shader = g_shaderCache.value(name);
+ if (shader.isValid())
+ return shader;
+
+ QFile f(name);
+ if (f.open(QIODevice::ReadOnly))
+ shader = QShader::fromSerialized(f.readAll());
+
+ if (shader.isValid())
+ g_shaderCache[name] = shader;
+
+ return shader;
+}
+
+static void rasterTransform(QImage &image, QVideoFrame::RotationAngle rotation,
+ bool mirrorX, bool mirrorY)
+{
+ QTransform t;
+ if (mirrorX)
+ t.scale(-1.f, 1.f);
+ if (rotation != QVideoFrame::Rotation0)
+ t.rotate(float(rotation));
+ if (mirrorY)
+ t.scale(1.f, -1.f);
+ if (!t.isIdentity())
+ image = image.transformed(t);
+}
+
+static void imageCleanupHandler(void *info)
+{
+ QByteArray *imageData = reinterpret_cast<QByteArray *>(info);
+ delete imageData;
+}
+
+static QRhi *initializeRHI(QRhi::Implementation backend)
+{
+ if (g_state.localData().rhi || g_state.localData().cpuOnly)
+ return g_state.localData().rhi;
+
+ if (QGuiApplicationPrivate::platformIntegration()->hasCapability(QPlatformIntegration::RhiBasedRendering)) {
+
+#if defined(Q_OS_MACOS) || defined(Q_OS_IOS)
+ if (backend == QRhi::Metal || backend == QRhi::Null) {
+ QRhiMetalInitParams params;
+ g_state.localData().rhi = QRhi::create(QRhi::Metal, &params);
+ }
+#endif
+
+#if defined(Q_OS_WIN)
+ if (backend == QRhi::D3D11 || backend == QRhi::Null) {
+ QRhiD3D11InitParams params;
+ g_state.localData().rhi = QRhi::create(QRhi::D3D11, &params);
+ }
+#endif
+
+#if QT_CONFIG(opengl)
+ if (!g_state.localData().rhi && (backend == QRhi::OpenGLES2 || backend == QRhi::Null)) {
+ if (QGuiApplicationPrivate::platformIntegration()->hasCapability(QPlatformIntegration::OpenGL)
+ && QGuiApplicationPrivate::platformIntegration()->hasCapability(QPlatformIntegration::RasterGLSurface)
+ && !QCoreApplication::testAttribute(Qt::AA_ForceRasterWidgets)) {
+
+ g_state.localData().fallbackSurface = QRhiGles2InitParams::newFallbackSurface();
+ QRhiGles2InitParams params;
+ params.fallbackSurface = g_state.localData().fallbackSurface;
+ g_state.localData().rhi = QRhi::create(QRhi::OpenGLES2, &params);
+ }
+ }
+#endif
+ }
+
+ if (!g_state.localData().rhi) {
+ g_state.localData().cpuOnly = true;
+ qWarning() << Q_FUNC_INFO << ": No RHI backend. Using CPU conversion.";
+ }
+
+ return g_state.localData().rhi;
+}
+
+static bool updateTextures(QRhi *rhi,
+ QRhiResourceUpdateBatch *rub,
+ std::unique_ptr<QRhiBuffer> &uniformBuffer,
+ std::unique_ptr<QRhiSampler> &textureSampler,
+ std::unique_ptr<QRhiShaderResourceBindings> &shaderResourceBindings,
+ std::unique_ptr<QRhiGraphicsPipeline> &graphicsPipeline,
+ std::unique_ptr<QRhiRenderPassDescriptor> &renderPass,
+ const QVideoFrame &frame,
+ std::unique_ptr<QRhiTexture> (&textures)[QVideoTextureHelper::TextureDescription::maxPlanes])
+{
+ auto pixelFormat = frame.pixelFormat();
+
+ auto textureDesc = QVideoTextureHelper::textureDescription(pixelFormat);
+
+ for (int i = 0; i < QVideoTextureHelper::TextureDescription::maxPlanes; ++i)
+ QVideoTextureHelper::updateRhiTexture(frame, rhi, rub, i, textures[i]);
+
+ QRhiShaderResourceBinding bindings[4];
+ auto *b = bindings;
+ *b++ = QRhiShaderResourceBinding::uniformBuffer(0, QRhiShaderResourceBinding::VertexStage | QRhiShaderResourceBinding::FragmentStage,
+ uniformBuffer.get());
+ for (int i = 0; i < textureDesc->nplanes; ++i)
+ *b++ = QRhiShaderResourceBinding::sampledTexture(i + 1, QRhiShaderResourceBinding::FragmentStage,
+ textures[i].get(), textureSampler.get());
+ shaderResourceBindings->setBindings(bindings, b);
+ shaderResourceBindings->create();
+
+ graphicsPipeline.reset(rhi->newGraphicsPipeline());
+ graphicsPipeline->setTopology(QRhiGraphicsPipeline::TriangleStrip);
+
+ QShader vs = getShader(QVideoTextureHelper::vertexShaderFileName(pixelFormat));
+ if (!vs.isValid())
+ return false;
+
+ QShader fs = getShader(QVideoTextureHelper::fragmentShaderFileName(pixelFormat));
+ if (!fs.isValid())
+ return false;
+
+ graphicsPipeline->setShaderStages({
+ { QRhiShaderStage::Vertex, vs },
+ { QRhiShaderStage::Fragment, fs }
+ });
+
+ QRhiVertexInputLayout inputLayout;
+ inputLayout.setBindings({
+ { 4 * sizeof(float) }
+ });
+ inputLayout.setAttributes({
+ { 0, 0, QRhiVertexInputAttribute::Float2, 0 },
+ { 0, 1, QRhiVertexInputAttribute::Float2, 2 * sizeof(float) }
+ });
+
+ graphicsPipeline->setVertexInputLayout(inputLayout);
+ graphicsPipeline->setShaderResourceBindings(shaderResourceBindings.get());
+ graphicsPipeline->setRenderPassDescriptor(renderPass.get());
+ graphicsPipeline->create();
+
+ return true;
+}
+
+static QImage convertJPEG(const QVideoFrame &frame, QVideoFrame::RotationAngle rotation, bool mirrorX, bool mirrorY)
+{
+ QVideoFrame varFrame = frame;
+ if (!varFrame.map(QVideoFrame::ReadOnly)) {
+ qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
+ return {};
+ }
+ QImage image;
+ image.loadFromData(varFrame.bits(0), varFrame.mappedBytes(0), "JPG");
+ varFrame.unmap();
+ rasterTransform(image, rotation, mirrorX, mirrorY);
+ return image;
+}
+
+static QImage convertCPU(const QVideoFrame &frame, QVideoFrame::RotationAngle rotation, bool mirrorX, bool mirrorY)
+{
+ VideoFrameConvertFunc convert = qConverterForFormat(frame.pixelFormat());
+ if (!convert) {
+ qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": unsupported pixel format" << frame.pixelFormat();
+ return {};
+ } else {
+ QVideoFrame varFrame = frame;
+ if (!varFrame.map(QVideoFrame::ReadOnly)) {
+ qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
+ return {};
+ }
+ auto format = pixelFormatHasAlpha(varFrame.pixelFormat()) ? QImage::Format_ARGB32_Premultiplied : QImage::Format_RGB32;
+ QImage image = QImage(varFrame.width(), varFrame.height(), format);
+ convert(varFrame, image.bits());
+ varFrame.unmap();
+ rasterTransform(image, rotation, mirrorX, mirrorY);
+ return image;
+ }
+}
+
+QImage qImageFromVideoFrame(const QVideoFrame &frame, QVideoFrame::RotationAngle rotation, bool mirrorX, bool mirrorY)
+{
+#ifdef Q_OS_DARWIN
+ QMacAutoReleasePool releasePool;
+#endif
+
+ if (!g_state.hasLocalData())
+ g_state.setLocalData({});
+
+ std::unique_ptr<QRhiRenderPassDescriptor> renderPass;
+ std::unique_ptr<QRhiBuffer> vertexBuffer;
+ std::unique_ptr<QRhiBuffer> uniformBuffer;
+ std::unique_ptr<QRhiTexture> targetTexture;
+ std::unique_ptr<QRhiTextureRenderTarget> renderTarget;
+ std::unique_ptr<QRhiSampler> textureSampler;
+ std::unique_ptr<QRhiShaderResourceBindings> shaderResourceBindings;
+ std::unique_ptr<QRhiGraphicsPipeline> graphicsPipeline;
+ std::unique_ptr<QRhiTexture> frameTextures[QVideoTextureHelper::TextureDescription::maxPlanes];
+
+ if (frame.size().isEmpty() || frame.pixelFormat() == QVideoFrameFormat::Format_Invalid)
+ return {};
+
+ if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg)
+ return convertJPEG(frame, rotation, mirrorX, mirrorY);
+
+ QRhi *rhi = nullptr;
+ QRhi::Implementation backend = QRhi::Null;
+
+ if (frame.videoBuffer()) {
+ rhi = frame.videoBuffer()->rhi();
+ if (rhi)
+ backend = rhi->backend();
+ }
+
+ if (!rhi || rhi->thread() != QThread::currentThread())
+ rhi = initializeRHI(backend);
+
+ if (!rhi || rhi->isRecordingFrame())
+ return convertCPU(frame, rotation, mirrorX, mirrorY);
+
+ // Do conversion using shaders
+
+ const int rotationIndex = (rotation / 90) % 4;
+
+ QSize frameSize = frame.size();
+ if (rotationIndex % 2)
+ frameSize.transpose();
+
+ vertexBuffer.reset(rhi->newBuffer(QRhiBuffer::Immutable, QRhiBuffer::VertexBuffer, sizeof(g_quad)));
+ vertexBuffer->create();
+
+ uniformBuffer.reset(rhi->newBuffer(QRhiBuffer::Dynamic, QRhiBuffer::UniformBuffer, 64 + 64 + 4 + 4));
+ uniformBuffer->create();
+
+ textureSampler.reset(rhi->newSampler(QRhiSampler::Linear, QRhiSampler::Linear, QRhiSampler::None,
+ QRhiSampler::ClampToEdge, QRhiSampler::ClampToEdge));
+ textureSampler->create();
+
+ shaderResourceBindings.reset(rhi->newShaderResourceBindings());
+
+ targetTexture.reset(rhi->newTexture(QRhiTexture::RGBA8, frameSize, 1, QRhiTexture::RenderTarget));
+ if (!targetTexture->create()) {
+ qCDebug(qLcVideoFrameConverter) << "Failed to create target texture. Using CPU conversion.";
+ return convertCPU(frame, rotation, mirrorX, mirrorY);
+ }
+
+ renderTarget.reset(rhi->newTextureRenderTarget({ { targetTexture.get() } }));
+ renderPass.reset(renderTarget->newCompatibleRenderPassDescriptor());
+ renderTarget->setRenderPassDescriptor(renderPass.get());
+ renderTarget->create();
+
+ QRhiCommandBuffer *cb = nullptr;
+ QRhi::FrameOpResult r = rhi->beginOffscreenFrame(&cb);
+ if (r != QRhi::FrameOpSuccess) {
+ qCDebug(qLcVideoFrameConverter) << "Failed to set up offscreen frame. Using CPU conversion.";
+ return convertCPU(frame, rotation, mirrorX, mirrorY);
+ }
+
+ QRhiResourceUpdateBatch *rub = rhi->nextResourceUpdateBatch();
+
+ rub->uploadStaticBuffer(vertexBuffer.get(), g_quad);
+
+ if (!updateTextures(rhi, rub, uniformBuffer, textureSampler, shaderResourceBindings,
+ graphicsPipeline, renderPass, frame, frameTextures)) {
+ qCDebug(qLcVideoFrameConverter) << "Failed to update textures. Using CPU conversion.";
+ return convertCPU(frame, rotation, mirrorX, mirrorY);
+ }
+
+ float xScale = mirrorX ? -1.0 : 1.0;
+ float yScale = mirrorY ? -1.0 : 1.0;
+
+ if (rhi->isYUpInFramebuffer())
+ yScale = -yScale;
+
+ QMatrix4x4 transform;
+ transform.scale(xScale, yScale);
+
+ QByteArray uniformData(64 + 64 + 4 + 4, Qt::Uninitialized);
+ QVideoTextureHelper::updateUniformData(&uniformData, frame.surfaceFormat(), frame, transform, 1.f);
+ rub->updateDynamicBuffer(uniformBuffer.get(), 0, uniformData.size(), uniformData.constData());
+
+ cb->beginPass(renderTarget.get(), Qt::black, { 1.0f, 0 }, rub);
+ cb->setGraphicsPipeline(graphicsPipeline.get());
+
+ cb->setViewport({ 0, 0, float(frameSize.width()), float(frameSize.height()) });
+ cb->setShaderResources(shaderResourceBindings.get());
+
+ quint32 vertexOffset = quint32(sizeof(float)) * 16 * rotationIndex;
+ const QRhiCommandBuffer::VertexInput vbufBinding(vertexBuffer.get(), vertexOffset);
+ cb->setVertexInput(0, 1, &vbufBinding);
+ cb->draw(4);
+
+ QRhiReadbackDescription readDesc(targetTexture.get());
+ QRhiReadbackResult readResult;
+ bool readCompleted = false;
+
+ readResult.completed = [&readCompleted] { readCompleted = true; };
+
+ rub = rhi->nextResourceUpdateBatch();
+ rub->readBackTexture(readDesc, &readResult);
+
+ cb->endPass(rub);
+
+ rhi->endOffscreenFrame();
+
+ if (!readCompleted) {
+ qCDebug(qLcVideoFrameConverter) << "Failed to read back texture. Using CPU conversion.";
+ return convertCPU(frame, rotation, mirrorX, mirrorY);
+ }
+
+ QByteArray *imageData = new QByteArray(readResult.data);
+
+ return QImage(reinterpret_cast<const uchar *>(imageData->constData()),
+ readResult.pixelSize.width(), readResult.pixelSize.height(),
+ QImage::Format_RGBA8888_Premultiplied, imageCleanupHandler, imageData);
+}
+
+QT_END_NAMESPACE
+
diff --git a/src/multimedia/video/qvideoframeconverter_p.h b/src/multimedia/video/qvideoframeconverter_p.h
new file mode 100644
index 000000000..d912728fb
--- /dev/null
+++ b/src/multimedia/video/qvideoframeconverter_p.h
@@ -0,0 +1,63 @@
+/****************************************************************************
+**
+** Copyright (C) 2022 The Qt Company Ltd.
+** Contact: https://www.qt.io/licensing/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and The Qt Company. For licensing terms
+** and conditions see https://www.qt.io/terms-conditions. For further
+** information use the contact form at https://www.qt.io/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 3 as published by the Free Software
+** Foundation and appearing in the file LICENSE.LGPL3 included in the
+** packaging of this file. Please review the following information to
+** ensure the GNU Lesser General Public License version 3 requirements
+** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU
+** General Public License version 2.0 or (at your option) the GNU General
+** Public license version 3 or any later version approved by the KDE Free
+** Qt Foundation. The licenses are as published by the Free Software
+** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
+** included in the packaging of this file. Please review the following
+** information to ensure the GNU General Public License requirements will
+** be met: https://www.gnu.org/licenses/gpl-2.0.html and
+** https://www.gnu.org/licenses/gpl-3.0.html.
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#ifndef QVIDEOFRAMECONVERTER_H
+#define QVIDEOFRAMECONVERTER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qvideoframe.h>
+
+QT_BEGIN_NAMESPACE
+
+Q_MULTIMEDIA_EXPORT QImage qImageFromVideoFrame(const QVideoFrame &frame, QVideoFrame::RotationAngle rotation = QVideoFrame::Rotation0, bool mirrorX = false, bool mirrorY = false);
+
+QT_END_NAMESPACE
+
+#endif
+
diff --git a/src/plugins/multimedia/android/common/qandroidvideooutput.cpp b/src/plugins/multimedia/android/common/qandroidvideooutput.cpp
index 8187aa1c8..99710a1c4 100644
--- a/src/plugins/multimedia/android/common/qandroidvideooutput.cpp
+++ b/src/plugins/multimedia/android/common/qandroidvideooutput.cpp
@@ -123,10 +123,10 @@ static QMatrix4x4 extTransformMatrix(AndroidSurfaceTexture *surfaceTexture)
quint64 AndroidTextureVideoBuffer::textureHandle(int plane) const
{
- if (plane != 0 || !rhi || !m_output->m_nativeSize.isValid())
+ if (plane != 0 || !m_rhi || !m_output->m_nativeSize.isValid())
return 0;
- m_output->ensureExternalTexture(rhi);
+ m_output->ensureExternalTexture(m_rhi);
m_output->m_surfaceTexture->updateTexImage();
m_externalMatrix = extTransformMatrix(m_output->m_surfaceTexture);
return m_output->m_externalTex->nativeTexture().object;
diff --git a/src/plugins/multimedia/darwin/avfvideobuffer.mm b/src/plugins/multimedia/darwin/avfvideobuffer.mm
index d6b152517..d567efbfd 100644
--- a/src/plugins/multimedia/darwin/avfvideobuffer.mm
+++ b/src/plugins/multimedia/darwin/avfvideobuffer.mm
@@ -59,7 +59,7 @@ AVFVideoBuffer::AVFVideoBuffer(AVFVideoSinkInterface *sink, CVImageBufferRef buf
m_buffer(buffer)
{
// m_type = QVideoFrame::NoHandle;
-// qDebug() << "RHI" << rhi;
+// qDebug() << "RHI" << m_rhi;
CVPixelBufferRetain(m_buffer);
m_pixelFormat = fromCVVideoPixelFormat(CVPixelBufferGetPixelFormatType(m_buffer));
}
@@ -158,12 +158,12 @@ quint64 AVFVideoBuffer::textureHandle(int plane) const
{
auto *textureDescription = QVideoTextureHelper::textureDescription(m_pixelFormat);
int bufferPlanes = CVPixelBufferGetPlaneCount(m_buffer);
-// qDebug() << "texture handle" << plane << rhi << (rhi->backend() == QRhi::Metal) << bufferPlanes;
+// qDebug() << "texture handle" << plane << m_rhi << (m_rhi->backend() == QRhi::Metal) << bufferPlanes;
if (plane > 0 && plane >= bufferPlanes)
return 0;
- if (!rhi)
+ if (!m_rhi)
return 0;
- if (rhi->backend() == QRhi::Metal) {
+ if (m_rhi->backend() == QRhi::Metal) {
if (!cvMetalTexture[plane]) {
size_t width = CVPixelBufferGetWidth(m_buffer);
size_t height = CVPixelBufferGetHeight(m_buffer);
@@ -190,7 +190,7 @@ quint64 AVFVideoBuffer::textureHandle(int plane) const
// Get a Metal texture using the CoreVideo Metal texture reference.
// qDebug() << " -> " << quint64(CVMetalTextureGetTexture(cvMetalTexture[plane]));
return cvMetalTexture[plane] ? quint64(CVMetalTextureGetTexture(cvMetalTexture[plane])) : 0;
- } else if (rhi->backend() == QRhi::OpenGLES2) {
+ } else if (m_rhi->backend() == QRhi::OpenGLES2) {
#if QT_CONFIG(opengl)
#ifdef Q_OS_MACOS
CVOpenGLTextureCacheFlush(sink->cvOpenGLTextureCache, 0);
diff --git a/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp
index 8d57ea1c9..0d22999b2 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp
@@ -120,8 +120,8 @@ QGstVideoBuffer::~QGstVideoBuffer()
break;
}
#if QT_CONFIG(gstreamer_gl)
- if (rhi) {
- rhi->makeThreadLocalNativeContextCurrent();
+ if (m_rhi) {
+ m_rhi->makeThreadLocalNativeContextCurrent();
QOpenGLFunctions functions(glContext);
functions.glDeleteTextures(planes, m_textures);
}
@@ -266,7 +266,7 @@ fourccFromVideoInfo(const GstVideoInfo * info, int plane)
void QGstVideoBuffer::mapTextures()
{
- if (!rhi)
+ if (!m_rhi)
return;
#if QT_CONFIG(gstreamer_gl)
@@ -300,7 +300,7 @@ void QGstVideoBuffer::mapTextures()
Q_ASSERT(eglDisplay);
Q_ASSERT(eglImageTargetTexture2D);
- auto *nativeHandles = static_cast<const QRhiGles2NativeHandles *>(rhi->nativeHandles());
+ auto *nativeHandles = static_cast<const QRhiGles2NativeHandles *>(m_rhi->nativeHandles());
glContext = nativeHandles->context;
if (!glContext) {
qWarning() << "no GL context";
diff --git a/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp b/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp
index 88bb8ebb8..96c7836cf 100644
--- a/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp
+++ b/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp
@@ -161,7 +161,7 @@ public:
std::unique_ptr<QRhiTexture> texture(int plane) const override
{
- if (!rhi || !m_d2d11tex || plane > 0)
+ if (!m_rhi || !m_d2d11tex || plane > 0)
return {};
D3D11_TEXTURE2D_DESC desc = {};
m_d2d11tex->GetDesc(&desc);
@@ -173,7 +173,7 @@ public:
else
return {};
- std::unique_ptr<QRhiTexture> tex(rhi->newTexture(format, QSize{int(desc.Width), int(desc.Height)}, 1, {}));
+ std::unique_ptr<QRhiTexture> tex(m_rhi->newTexture(format, QSize{int(desc.Width), int(desc.Height)}, 1, {}));
tex->createFrom({quint64(m_d2d11tex.get()), 0});
return tex;
}
@@ -243,6 +243,7 @@ public:
m_d3dglHandle = m_wgl.wglDXOpenDeviceNV(m_device.get());
if (!m_d3dglHandle) {
+ m_texture.reset();
qCDebug(qLcEvrD3DPresentEngine) << "Failed to open D3D device";
return;
}
@@ -274,7 +275,7 @@ public:
std::unique_ptr<QRhiTexture> texture(int plane) const override
{
- if (!rhi || !m_texture || plane > 0)
+ if (!m_rhi || !m_texture || plane > 0)
return {};
D3DSURFACE_DESC desc;
@@ -287,7 +288,7 @@ public:
else
return {};
- std::unique_ptr<QRhiTexture> tex(rhi->newTexture(format, QSize{int(desc.Width), int(desc.Height)}, 1, {}));
+ std::unique_ptr<QRhiTexture> tex(m_rhi->newTexture(format, QSize{int(desc.Width), int(desc.Height)}, 1, {}));
tex->createFrom({quint64(m_glTextureName), 0});
return tex;
}