diff options
author | Laszlo Agocs <laszlo.agocs@theqtcompany.com> | 2015-01-08 14:32:41 +0100 |
---|---|---|
committer | Yoann Lopes <yoann.lopes@theqtcompany.com> | 2015-02-03 14:41:39 +0000 |
commit | 3e94b7ce2d8166767ec47425d2cefbc77cb5fde2 (patch) | |
tree | c27061139c94b04d8237d4fabba05801a39f9369 /src/plugins | |
parent | 2f494446388e314286506eae335439b7bbc75736 (diff) | |
download | qtmultimedia-3e94b7ce2d8166767ec47425d2cefbc77cb5fde2.tar.gz |
Add video filtering support to VideoOutput
Add the QAbstractVideoFilter base class and integrate it with VideoOutput.
This can be used to perform arbitrary filtering or image processing
on the frames of a video stream of a VideoOutput element right before
the OpenGL texture is provided to the scenegraph by the video node.
This opens up the possibility to integrate computer vision
frameworks or accelerated image processing with Qt Quick applications
that display video streams using Qt Multimedia.
Conceptually it is somewhat similar to QVideoProbe, this
approach however allows modifying the frame, in real time
with tight integration to the scenegraph node, and targets
Qt Quick meaning setting up the filter and processing the results
of the computations happen completely in QML.
[ChangeLog] Added QAbstractVideoFilter that serves as a base class for QML
video filtering elements that integrate compute, vision, and image processing
frameworks with VideoOutput.
Change-Id: Ice1483f8c2daec5a43536978627a7bbb64549480
Reviewed-by: Yoann Lopes <yoann.lopes@theqtcompany.com>
Diffstat (limited to 'src/plugins')
8 files changed, 115 insertions, 52 deletions
diff --git a/src/plugins/android/videonode/qandroidsgvideonode.cpp b/src/plugins/android/videonode/qandroidsgvideonode.cpp index 4e1b2a89b..f094c0d8d 100644 --- a/src/plugins/android/videonode/qandroidsgvideonode.cpp +++ b/src/plugins/android/videonode/qandroidsgvideonode.cpp @@ -165,7 +165,7 @@ QAndroidSGVideoNode::~QAndroidSGVideoNode() m_frame = QVideoFrame(); } -void QAndroidSGVideoNode::setCurrentFrame(const QVideoFrame &frame) +void QAndroidSGVideoNode::setCurrentFrame(const QVideoFrame &frame, FrameFlags) { QMutexLocker lock(&m_frameMutex); m_frame = frame; diff --git a/src/plugins/android/videonode/qandroidsgvideonode.h b/src/plugins/android/videonode/qandroidsgvideonode.h index f3f838331..0c50d8cf9 100644 --- a/src/plugins/android/videonode/qandroidsgvideonode.h +++ b/src/plugins/android/videonode/qandroidsgvideonode.h @@ -47,8 +47,9 @@ public: QAndroidSGVideoNode(const QVideoSurfaceFormat &format); ~QAndroidSGVideoNode(); - void setCurrentFrame(const QVideoFrame &frame); + void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags); QVideoFrame::PixelFormat pixelFormat() const { return m_format.pixelFormat(); } + QAbstractVideoBuffer::HandleType handleType() const { return QAbstractVideoBuffer::GLTextureHandle; } void preprocess(); diff --git a/src/plugins/videonode/egl/qsgvideonode_egl.cpp b/src/plugins/videonode/egl/qsgvideonode_egl.cpp index 4e63c0dba..15af8b5e7 100644 --- a/src/plugins/videonode/egl/qsgvideonode_egl.cpp +++ b/src/plugins/videonode/egl/qsgvideonode_egl.cpp @@ -187,7 +187,7 @@ QSGVideoNode_EGL::~QSGVideoNode_EGL() { } -void QSGVideoNode_EGL::setCurrentFrame(const QVideoFrame &frame) +void QSGVideoNode_EGL::setCurrentFrame(const QVideoFrame &frame, FrameFlags) { EGLImageKHR image = frame.handle().value<void *>(); m_material.setImage(image); @@ -199,6 +199,11 @@ QVideoFrame::PixelFormat QSGVideoNode_EGL::pixelFormat() const return m_pixelFormat; } +QAbstractVideoBuffer::HandleType QSGVideoNode_EGL::handleType() const +{ + return QAbstractVideoBuffer::EGLImageHandle; +} + static bool isExtensionSupported() { static const bool supported = eglGetProcAddress("glEGLImageTargetTexture2DOES"); diff --git a/src/plugins/videonode/egl/qsgvideonode_egl.h b/src/plugins/videonode/egl/qsgvideonode_egl.h index b6f70abfb..7e9cfe871 100644 --- a/src/plugins/videonode/egl/qsgvideonode_egl.h +++ b/src/plugins/videonode/egl/qsgvideonode_egl.h @@ -74,8 +74,9 @@ public: QSGVideoNode_EGL(const QVideoSurfaceFormat &format); ~QSGVideoNode_EGL(); - void setCurrentFrame(const QVideoFrame &frame); + void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags); QVideoFrame::PixelFormat pixelFormat() const; + QAbstractVideoBuffer::HandleType handleType() const; private: QSGVideoMaterial_EGL m_material; diff --git a/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp b/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp index 948e9f9c8..073ffb34e 100644 --- a/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp +++ b/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp @@ -51,7 +51,9 @@ QSGVivanteVideoMaterial::QSGVivanteVideoMaterial() : mWidth(0), mHeight(0), mFormat(QVideoFrame::Format_Invalid), - mCurrentTexture(0) + mCurrentTexture(0), + mMappable(true), + mTexDirectTexture(0) { #ifdef QT_VIVANTE_VIDEO_DEBUG qDebug() << Q_FUNC_INFO; @@ -62,12 +64,7 @@ QSGVivanteVideoMaterial::QSGVivanteVideoMaterial() : QSGVivanteVideoMaterial::~QSGVivanteVideoMaterial() { - Q_FOREACH (GLuint id, mBitsToTextureMap.values()) { -#ifdef QT_VIVANTE_VIDEO_DEBUG - qDebug() << "delete texture: " << id; -#endif - glDeleteTextures(1, &id); - } + clearTextures(); } QSGMaterialType *QSGVivanteVideoMaterial::type() const { @@ -94,9 +91,11 @@ void QSGVivanteVideoMaterial::updateBlending() { setFlag(Blending, qFuzzyCompare(mOpacity, qreal(1.0)) ? false : true); } -void QSGVivanteVideoMaterial::setCurrentFrame(const QVideoFrame &frame) { +void QSGVivanteVideoMaterial::setCurrentFrame(const QVideoFrame &frame, QSGVideoNode::FrameFlags flags) +{ QMutexLocker lock(&mFrameMutex); mNextFrame = frame; + mMappable = !flags.testFlag(QSGVideoNode::FrameFiltered); #ifdef QT_VIVANTE_VIDEO_DEBUG qDebug() << Q_FUNC_INFO << " new frame: " << frame; @@ -122,6 +121,22 @@ void QSGVivanteVideoMaterial::bind() glBindTexture(GL_TEXTURE_2D, mCurrentTexture); } +void QSGVivanteVideoMaterial::clearTextures() +{ + Q_FOREACH (GLuint id, mBitsToTextureMap.values()) { +#ifdef QT_VIVANTE_VIDEO_DEBUG + qDebug() << "delete texture: " << id; +#endif + glDeleteTextures(1, &id); + } + mBitsToTextureMap.clear(); + + if (mTexDirectTexture) { + glDeleteTextures(1, &mTexDirectTexture); + mTexDirectTexture = 0; + } +} + GLuint QSGVivanteVideoMaterial::vivanteMapping(QVideoFrame vF) { QOpenGLContext *glcontext = QOpenGLContext::currentContext(); @@ -130,14 +145,16 @@ GLuint QSGVivanteVideoMaterial::vivanteMapping(QVideoFrame vF) return 0; } + static PFNGLTEXDIRECTVIVPROC glTexDirectVIV_LOCAL = 0; static PFNGLTEXDIRECTVIVMAPPROC glTexDirectVIVMap_LOCAL = 0; static PFNGLTEXDIRECTINVALIDATEVIVPROC glTexDirectInvalidateVIV_LOCAL = 0; - if (glTexDirectVIVMap_LOCAL == 0 || glTexDirectInvalidateVIV_LOCAL == 0) { + if (glTexDirectVIV_LOCAL == 0 || glTexDirectVIVMap_LOCAL == 0 || glTexDirectInvalidateVIV_LOCAL == 0) { + glTexDirectVIV_LOCAL = reinterpret_cast<PFNGLTEXDIRECTVIVPROC>(glcontext->getProcAddress("glTexDirectVIV")); glTexDirectVIVMap_LOCAL = reinterpret_cast<PFNGLTEXDIRECTVIVMAPPROC>(glcontext->getProcAddress("glTexDirectVIVMap")); glTexDirectInvalidateVIV_LOCAL = reinterpret_cast<PFNGLTEXDIRECTINVALIDATEVIVPROC>(glcontext->getProcAddress("glTexDirectInvalidateVIV")); } - if (glTexDirectVIVMap_LOCAL == 0 || glTexDirectInvalidateVIV_LOCAL == 0) { + if (glTexDirectVIV_LOCAL == 0 || glTexDirectVIVMap_LOCAL == 0 || glTexDirectInvalidateVIV_LOCAL == 0) { qWarning() << Q_FUNC_INFO << "couldn't find \"glTexDirectVIVMap\" and/or \"glTexDirectInvalidateVIV\" => do nothing and return"; return 0; } @@ -146,49 +163,80 @@ GLuint QSGVivanteVideoMaterial::vivanteMapping(QVideoFrame vF) mWidth = vF.width(); mHeight = vF.height(); mFormat = vF.pixelFormat(); - Q_FOREACH (GLuint id, mBitsToTextureMap.values()) { -#ifdef QT_VIVANTE_VIDEO_DEBUG - qDebug() << "delete texture: " << id; -#endif - glDeleteTextures(1, &id); - } - mBitsToTextureMap.clear(); + clearTextures(); } if (vF.map(QAbstractVideoBuffer::ReadOnly)) { - if (!mBitsToTextureMap.contains(vF.bits())) { - GLuint tmpTexId; - glGenTextures(1, &tmpTexId); - mBitsToTextureMap.insert(vF.bits(), tmpTexId); + if (mMappable) { + if (!mBitsToTextureMap.contains(vF.bits())) { + // Haven't yet seen this logical address: map to texture. + GLuint tmpTexId; + glGenTextures(1, &tmpTexId); + mBitsToTextureMap.insert(vF.bits(), tmpTexId); - const uchar *constBits = vF.bits(); - void *bits = (void*)constBits; + const uchar *constBits = vF.bits(); + void *bits = (void*)constBits; #ifdef QT_VIVANTE_VIDEO_DEBUG - qDebug() << Q_FUNC_INFO << "new texture, texId: " << tmpTexId << "; constBits: " << constBits; + qDebug() << Q_FUNC_INFO << "new texture, texId: " << tmpTexId << "; constBits: " << constBits; #endif - GLuint physical = ~0U; - - glBindTexture(GL_TEXTURE_2D, tmpTexId); - glTexDirectVIVMap_LOCAL(GL_TEXTURE_2D, - vF.width(), vF.height(), - QSGVivanteVideoNode::getVideoFormat2GLFormatMap().value(vF.pixelFormat()), - &bits, &physical); - - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - glTexDirectInvalidateVIV_LOCAL(GL_TEXTURE_2D); - - return tmpTexId; - } - else { - glBindTexture(GL_TEXTURE_2D, mBitsToTextureMap.value(vF.bits())); + GLuint physical = ~0U; + + glBindTexture(GL_TEXTURE_2D, tmpTexId); + glTexDirectVIVMap_LOCAL(GL_TEXTURE_2D, + vF.width(), vF.height(), + QSGVivanteVideoNode::getVideoFormat2GLFormatMap().value(vF.pixelFormat()), + &bits, &physical); + + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glTexDirectInvalidateVIV_LOCAL(GL_TEXTURE_2D); + + return tmpTexId; + } else { + // Fastest path: already seen this logical address. Just + // indicate that the data belonging to the texture has changed. + glBindTexture(GL_TEXTURE_2D, mBitsToTextureMap.value(vF.bits())); + glTexDirectInvalidateVIV_LOCAL(GL_TEXTURE_2D); + return mBitsToTextureMap.value(vF.bits()); + } + } else { + // Cannot map. So copy. + if (!mTexDirectTexture) { + glGenTextures(1, &mTexDirectTexture); + glBindTexture(GL_TEXTURE_2D, mTexDirectTexture); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glTexDirectVIV_LOCAL(GL_TEXTURE_2D, mCurrentFrame.width(), mCurrentFrame.height(), + QSGVivanteVideoNode::getVideoFormat2GLFormatMap().value(mCurrentFrame.pixelFormat()), + (GLvoid **) &mTexDirectPlanes); + } else { + glBindTexture(GL_TEXTURE_2D, mTexDirectTexture); + } + switch (mCurrentFrame.pixelFormat()) { + case QVideoFrame::Format_YUV420P: + case QVideoFrame::Format_YV12: + memcpy(mTexDirectPlanes[0], mCurrentFrame.bits(0), mCurrentFrame.height() * mCurrentFrame.bytesPerLine(0)); + memcpy(mTexDirectPlanes[1], mCurrentFrame.bits(1), mCurrentFrame.height() * mCurrentFrame.bytesPerLine(1)); + memcpy(mTexDirectPlanes[2], mCurrentFrame.bits(2), mCurrentFrame.height() * mCurrentFrame.bytesPerLine(2)); + break; + case QVideoFrame::Format_NV12: + case QVideoFrame::Format_NV21: + memcpy(mTexDirectPlanes[0], mCurrentFrame.bits(0), mCurrentFrame.height() * mCurrentFrame.bytesPerLine(0)); + memcpy(mTexDirectPlanes[1], mCurrentFrame.bits(1), mCurrentFrame.height() / 2 * mCurrentFrame.bytesPerLine(1)); + break; + default: + memcpy(mTexDirectPlanes[0], mCurrentFrame.bits(), mCurrentFrame.height() * mCurrentFrame.bytesPerLine()); + break; + } glTexDirectInvalidateVIV_LOCAL(GL_TEXTURE_2D); - return mBitsToTextureMap.value(vF.bits()); + return mTexDirectTexture; } } else { diff --git a/src/plugins/videonode/imx6/qsgvivantevideomaterial.h b/src/plugins/videonode/imx6/qsgvivantevideomaterial.h index 227bc633c..910595836 100644 --- a/src/plugins/videonode/imx6/qsgvivantevideomaterial.h +++ b/src/plugins/videonode/imx6/qsgvivantevideomaterial.h @@ -41,6 +41,7 @@ #include <QVideoFrame> #include <QMutex> +#include <private/qsgvideonode_p.h> class QSGVivanteVideoMaterial : public QSGMaterial { @@ -52,7 +53,7 @@ public: virtual QSGMaterialShader *createShader() const; virtual int compare(const QSGMaterial *other) const; void updateBlending(); - void setCurrentFrame(const QVideoFrame &frame); + void setCurrentFrame(const QVideoFrame &frame, QSGVideoNode::FrameFlags flags); void bind(); GLuint vivanteMapping(QVideoFrame texIdVideoFramePair); @@ -60,6 +61,8 @@ public: void setOpacity(float o) { mOpacity = o; } private: + void clearTextures(); + qreal mOpacity; int mWidth; @@ -69,8 +72,12 @@ private: QMap<const uchar*, GLuint> mBitsToTextureMap; QVideoFrame mCurrentFrame, mNextFrame; GLuint mCurrentTexture; + bool mMappable; QMutex mFrameMutex; + + GLuint mTexDirectTexture; + GLvoid *mTexDirectPlanes[3]; }; #endif // QSGVIDEOMATERIAL_VIVMAP_H diff --git a/src/plugins/videonode/imx6/qsgvivantevideonode.cpp b/src/plugins/videonode/imx6/qsgvivantevideonode.cpp index 1c1c1008b..e24ab3962 100644 --- a/src/plugins/videonode/imx6/qsgvivantevideonode.cpp +++ b/src/plugins/videonode/imx6/qsgvivantevideonode.cpp @@ -52,9 +52,9 @@ QSGVivanteVideoNode::~QSGVivanteVideoNode() { } -void QSGVivanteVideoNode::setCurrentFrame(const QVideoFrame &frame) +void QSGVivanteVideoNode::setCurrentFrame(const QVideoFrame &frame, FrameFlags flags) { - mMaterial->setCurrentFrame(frame); + mMaterial->setCurrentFrame(frame, flags); markDirty(DirtyMaterial); } diff --git a/src/plugins/videonode/imx6/qsgvivantevideonode.h b/src/plugins/videonode/imx6/qsgvivantevideonode.h index 5830cc3b4..79b6e9e57 100644 --- a/src/plugins/videonode/imx6/qsgvivantevideonode.h +++ b/src/plugins/videonode/imx6/qsgvivantevideonode.h @@ -44,8 +44,9 @@ public: QSGVivanteVideoNode(const QVideoSurfaceFormat &format); ~QSGVivanteVideoNode(); - virtual QVideoFrame::PixelFormat pixelFormat() const { return mFormat.pixelFormat(); } - void setCurrentFrame(const QVideoFrame &frame); + QVideoFrame::PixelFormat pixelFormat() const { return mFormat.pixelFormat(); } + QAbstractVideoBuffer::HandleType handleType() const { return QAbstractVideoBuffer::NoHandle; } + void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags); static const QMap<QVideoFrame::PixelFormat, GLenum>& getVideoFormat2GLFormatMap(); |