summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorLars Knoll <lars.knoll@qt.io>2021-03-24 13:05:41 +0100
committerLars Knoll <lars.knoll@qt.io>2021-04-06 08:20:31 +0000
commit3c4be415b55e0ea67eb17b30cc50f9f3e64a775d (patch)
tree92ea2d1bcb74d338f1e978854fac091a388312b9
parent80c24e91131362d33555465ca4675a2cb009add4 (diff)
Start adding infrastructure to retrieve video frames as texture
We're using RHI here, as that's what we need for Qt Quick anyway. You can now set a QRhi object on QVideoSink. This can then be used internally to create textures of the required format instead of memory buffers. QVideoSurfaceFormat will now tell you how many planes are required for the format, and you can retrieve the textures for each plane from QVideoFrame. Change-Id: I86430db60a8f1aba07ec3b38e22b977cdaefaa0a Reviewed-by: Doris Verria <doris.verria@qt.io> Reviewed-by: Lars Knoll <lars.knoll@qt.io>
-rw-r--r--src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp6
-rw-r--r--src/multimedia/video/qabstractvideobuffer_p.h1
-rw-r--r--src/multimedia/video/qvideoframe.cpp25
-rw-r--r--src/multimedia/video/qvideoframe.h5
-rw-r--r--src/multimedia/video/qvideosink.cpp11
-rw-r--r--src/multimedia/video/qvideosink.h10
-rw-r--r--src/multimedia/video/qvideosurfaceformat.cpp46
-rw-r--r--src/multimedia/video/qvideosurfaceformat.h2
-rw-r--r--src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp3
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_yuv.cpp14
-rw-r--r--src/qtmultimediaquicktools/qsgvideotexture.cpp4
-rw-r--r--src/qtmultimediaquicktools/qsgvideotexture_p.h2
-rw-r--r--tests/auto/unit/multimedia/qabstractvideobuffer/tst_qabstractvideobuffer.cpp3
-rw-r--r--tests/auto/unit/multimedia/qvideoframe/tst_qvideoframe.cpp8
14 files changed, 101 insertions, 39 deletions
diff --git a/src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp b/src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp
index 412a482f8..b6b9704dc 100644
--- a/src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp
+++ b/src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp
@@ -86,7 +86,7 @@ QGstMutableCaps QGstVideoRenderer::getCaps()
// All the formats that both we and gstreamer support
#if QT_CONFIG(gstreamer_gl)
if (QGstUtils::useOpenGL()) {
- m_handleType = QVideoFrame::GLTextureHandle;
+ m_handleType = QVideoFrame::RhiTextureHandle;
auto formats = QList<QVideoSurfaceFormat::PixelFormat>()
<< QVideoSurfaceFormat::Format_YUV420P
<< QVideoSurfaceFormat::Format_YUV422P
@@ -119,7 +119,7 @@ QGstMutableCaps QGstVideoRenderer::getCaps()
if (!formats.isEmpty()) {
QGstMutableCaps caps = QGstUtils::capsForFormats(formats);
for (int i = 0; i < caps.size(); ++i)
- gst_caps_set_features(caps.get(), i, gst_caps_features_from_string("memory:GLMemory"));
+ gst_caps_set_features(caps.get(), i, gst_caps_features_from_string(GST_CAPS_FEATURE_MEMORY_GL_MEMORY));
return caps;
}
@@ -172,7 +172,7 @@ bool QGstVideoRenderer::present(QVideoSink *sink, GstBuffer *buffer)
QGstVideoBuffer *videoBuffer = nullptr;
#if QT_CONFIG(gstreamer_gl)
- if (m_handleType == QVideoFrame::GLTextureHandle) {
+ if (m_handleType == QVideoFrame::RhiTextureHandle) {
GstGLMemory *glmem = GST_GL_MEMORY_CAST(gst_buffer_peek_memory(buffer, 0));
guint textureId = gst_gl_memory_get_texture_id(glmem);
videoBuffer = new QGstVideoBuffer(buffer, m_videoInfo, m_handleType, textureId);
diff --git a/src/multimedia/video/qabstractvideobuffer_p.h b/src/multimedia/video/qabstractvideobuffer_p.h
index 3ab1b60e4..e3a0fb86c 100644
--- a/src/multimedia/video/qabstractvideobuffer_p.h
+++ b/src/multimedia/video/qabstractvideobuffer_p.h
@@ -82,6 +82,7 @@ public:
virtual void unmap() = 0;
virtual QVariant handle() const;
+ virtual quint64 textureHandle(int /*plane*/) const { return 0; }
protected:
QVideoFrame::HandleType m_type;
diff --git a/src/multimedia/video/qvideoframe.cpp b/src/multimedia/video/qvideoframe.cpp
index 0ff9d2ace..316366d53 100644
--- a/src/multimedia/video/qvideoframe.cpp
+++ b/src/multimedia/video/qvideoframe.cpp
@@ -633,7 +633,11 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
case QVideoSurfaceFormat::Format_NV12:
case QVideoSurfaceFormat::Format_NV21:
case QVideoSurfaceFormat::Format_IMC2:
- case QVideoSurfaceFormat::Format_IMC4: {
+ case QVideoSurfaceFormat::Format_IMC4:
+ case QVideoSurfaceFormat::Format_P010BE:
+ case QVideoSurfaceFormat::Format_P010LE:
+ case QVideoSurfaceFormat::Format_P016BE:
+ case QVideoSurfaceFormat::Format_P016LE: {
// Semi planar, Full resolution Y plane with interleaved subsampled U and V planes.
d->mapData.nPlanes = 2;
d->mapData.bytesPerLine[1] = d->mapData.bytesPerLine[0];
@@ -650,8 +654,6 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
d->mapData.data[2] = d->mapData.data[1] + (d->mapData.bytesPerLine[1] * height() / 2);
break;
}
- default:
- break;
}
}
@@ -797,15 +799,18 @@ int QVideoFrame::mappedBytes() const
/*!
Returns the number of planes in the video frame.
- This value is only valid while the frame data is \l {map()}{mapped}.
-
- \sa map()
+ \sa map(), textureHandle()
\since 5.4
*/
int QVideoFrame::planeCount() const
{
- return d->mapData.nPlanes;
+ return d->format.nPlanes();
+}
+
+quint64 QVideoFrame::textureHandle(int plane)
+{
+ return d->buffer->textureHandle(plane);
}
/*!
@@ -982,10 +987,8 @@ QDebug operator<<(QDebug dbg, QVideoFrame::HandleType type)
switch (type) {
case QVideoFrame::NoHandle:
return dbg << "NoHandle";
- case QVideoFrame::GLTextureHandle:
- return dbg << "GLTextureHandle";
- case QVideoFrame::MTLTextureHandle:
- return dbg << "MTLTextureHandle";
+ case QVideoFrame::RhiTextureHandle:
+ return dbg << "RhiTextureHandle";
}
}
diff --git a/src/multimedia/video/qvideoframe.h b/src/multimedia/video/qvideoframe.h
index c3eba5585..5e90b6ca1 100644
--- a/src/multimedia/video/qvideoframe.h
+++ b/src/multimedia/video/qvideoframe.h
@@ -61,8 +61,7 @@ public:
enum HandleType
{
NoHandle,
- GLTextureHandle,
- MTLTextureHandle,
+ RhiTextureHandle
};
enum MapMode
@@ -114,6 +113,8 @@ public:
int mappedBytes() const;
int planeCount() const;
+ quint64 textureHandle(int plane);
+
QVariant handle() const;
qint64 startTime() const;
diff --git a/src/multimedia/video/qvideosink.cpp b/src/multimedia/video/qvideosink.cpp
index 639d00583..f5112b173 100644
--- a/src/multimedia/video/qvideosink.cpp
+++ b/src/multimedia/video/qvideosink.cpp
@@ -69,6 +69,7 @@ public:
QSize nativeResolution;
bool active = false;
WId window = 0;
+ QRhi *rhi = nullptr;
Qt::AspectRatioMode aspectRatioMode = Qt::KeepAspectRatio;
QRectF targetRect;
int brightness = 0;
@@ -118,6 +119,16 @@ void QVideoSink::setNativeWindowId(WId id)
d->videoSink->setWinId(id);
}
+QRhi *QVideoSink::rhi() const
+{
+ return d->rhi;
+}
+
+void QVideoSink::setRhi(QRhi *rhi)
+{
+ d->rhi = rhi;
+}
+
void QVideoSink::setFullScreen(bool fullscreen)
{
d->videoSink->setFullScreen(fullscreen);
diff --git a/src/multimedia/video/qvideosink.h b/src/multimedia/video/qvideosink.h
index ec89c425d..f94a43d2e 100644
--- a/src/multimedia/video/qvideosink.h
+++ b/src/multimedia/video/qvideosink.h
@@ -52,6 +52,7 @@ class QVideoFrame;
class QVideoSinkPrivate;
class QPlatformVideoSink;
+class QRhi;
class Q_MULTIMEDIA_EXPORT QVideoSink : public QObject
{
@@ -61,11 +62,7 @@ public:
{
Memory,
NativeWindow,
- NativeTexture,
- OpenGL,
- Metal,
- Direct3D11,
- Vulkan
+ RhiTexture
};
QVideoSink(QObject *parent = nullptr);
@@ -80,6 +77,9 @@ public:
WId nativeWindowId() const;
void setNativeWindowId(WId id);
+ QRhi *rhi() const;
+ void setRhi(QRhi *rhi);
+
void setFullScreen(bool fullscreen);
bool isFullscreen() const;
diff --git a/src/multimedia/video/qvideosurfaceformat.cpp b/src/multimedia/video/qvideosurfaceformat.cpp
index 3ba143079..ad83aa954 100644
--- a/src/multimedia/video/qvideosurfaceformat.cpp
+++ b/src/multimedia/video/qvideosurfaceformat.cpp
@@ -251,6 +251,52 @@ int QVideoSurfaceFormat::frameHeight() const
return d->frameSize.height();
}
+int QVideoSurfaceFormat::nPlanes() const
+{
+ switch (d->pixelFormat) {
+ case Format_Invalid:
+ case Format_ARGB32:
+ case Format_ARGB32_Premultiplied:
+ case Format_RGB32:
+ case Format_RGB24:
+ case Format_RGB565:
+ case Format_RGB555:
+ case Format_ARGB8565_Premultiplied:
+ case Format_BGRA32:
+ case Format_BGRA32_Premultiplied:
+ case Format_ABGR32:
+ case Format_BGR32:
+ case Format_BGR24:
+ case Format_BGR565:
+ case Format_BGR555:
+ case Format_BGRA5658_Premultiplied:
+ case Format_AYUV444:
+ case Format_AYUV444_Premultiplied:
+ case Format_YUV444:
+ case Format_UYVY:
+ case Format_YUYV:
+ case Format_Y8:
+ case Format_Y16:
+ case Format_Jpeg:
+ return 1;
+ case Format_NV12:
+ case Format_NV21:
+ case Format_IMC2:
+ case Format_IMC4:
+ case Format_P010LE:
+ case Format_P010BE:
+ case Format_P016LE:
+ case Format_P016BE:
+ return 2;
+ case Format_YUV420P:
+ case Format_YUV422P:
+ case Format_YV12:
+ case Format_IMC1:
+ case Format_IMC3:
+ return 3;
+ }
+}
+
/*!
Sets the size of frames in a video stream to \a size.
diff --git a/src/multimedia/video/qvideosurfaceformat.h b/src/multimedia/video/qvideosurfaceformat.h
index 5b9c9fcb8..837d9545a 100644
--- a/src/multimedia/video/qvideosurfaceformat.h
+++ b/src/multimedia/video/qvideosurfaceformat.h
@@ -144,6 +144,8 @@ public:
int frameWidth() const;
int frameHeight() const;
+ int nPlanes() const;
+
QRect viewport() const;
void setViewport(const QRect &viewport);
diff --git a/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp b/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp
index c52b168e6..fad92bc5f 100644
--- a/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp
+++ b/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp
@@ -48,6 +48,7 @@
#include <qvideosink.h>
#include <QtQuick/QQuickWindow>
+#include <private/qquickwindow_p.h>
#include <QtCore/QRunnable>
QT_BEGIN_NAMESPACE
@@ -62,7 +63,7 @@ QDeclarativeVideoBackend::QDeclarativeVideoBackend(QDeclarativeVideoOutput *pare
m_frameChanged(false)
{
m_sink = new QVideoSink(q);
-// m_sink->setGraphicsType(QVideoSink::Metal);
+ m_sink->setRhi(QQuickWindowPrivate::get(q->window())->rhi);
qRegisterMetaType<QVideoSurfaceFormat>();
QObject::connect(m_sink, SIGNAL(newVideoFrame(const QVideoFrame &)),
q, SLOT(_q_newFrame(const QVideoFrame &)), Qt::QueuedConnection);
diff --git a/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp b/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp
index 460e6ed2e..efefaf0f1 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp
+++ b/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp
@@ -352,14 +352,14 @@ void QSGVideoMaterialRhiShader_NV12::mapFrame(QSGVideoMaterial_YUV *m)
if (!m->m_frame.isValid())
return;
- if (m->m_frame.handleType() == QVideoFrame::GLTextureHandle || m->m_frame.handleType() == QVideoFrame::MTLTextureHandle) {
+ if (m->m_frame.handleType() == QVideoFrame::RhiTextureHandle) {
m->m_planeWidth[0] = m->m_planeWidth[1] = 1;
auto textures = m->m_frame.handle().toList();
if (!textures.isEmpty()) {
auto w = m->m_frame.size().width();
auto h = m->m_frame.size().height();
- m->m_textures[0]->setNativeObject(textures[0].toULongLong(), {w, h});
- m->m_textures[1]->setNativeObject(textures[1].toULongLong(), {w / 2, h / 2});
+ m->m_textures[0]->setNativeObject(textures[0].toULongLong(), {w, h}, QRhiTexture::R8);
+ m->m_textures[1]->setNativeObject(textures[1].toULongLong(), {w / 2, h / 2}, QRhiTexture::RG8);
} else {
qWarning() << "NV12/NV21 requires 2 textures";
}
@@ -390,22 +390,20 @@ void QSGVideoMaterialRhiShader_P010::mapFrame(QSGVideoMaterial_YUV *m)
if (!m->m_frame.isValid())
return;
-#if 0
- if (m->m_frame.handleType() == QVideoFrame::GLTextureHandle || m->m_frame.handleType() == QVideoFrame::MTLTextureHandle) {
+ if (m->m_frame.handleType() == QVideoFrame::RhiTextureHandle) {
m->m_planeWidth[0] = m->m_planeWidth[1] = 1;
auto textures = m->m_frame.handle().toList();
if (!textures.isEmpty()) {
auto w = m->m_frame.size().width();
auto h = m->m_frame.size().height();
- m->m_textures[0]->setNativeObject(textures[0].toULongLong(), {w, h});
- m->m_textures[1]->setNativeObject(textures[1].toULongLong(), {w / 2, h / 2});
+ m->m_textures[0]->setNativeObject(textures[0].toULongLong(), {w, h}, QRhiTexture::RG8);
+ m->m_textures[1]->setNativeObject(textures[1].toULongLong(), {w / 2, h / 2}, QRhiTexture::BGRA8);
} else {
qWarning() << "P010/P016 requires 2 textures";
}
return;
}
-#endif
if (!m->m_frame.map(QVideoFrame::ReadOnly))
return;
diff --git a/src/qtmultimediaquicktools/qsgvideotexture.cpp b/src/qtmultimediaquicktools/qsgvideotexture.cpp
index 1c3ff30f3..18c060de7 100644
--- a/src/qtmultimediaquicktools/qsgvideotexture.cpp
+++ b/src/qtmultimediaquicktools/qsgvideotexture.cpp
@@ -109,10 +109,10 @@ void QSGVideoTexture::setData(QRhiTexture::Format f, const QSize &s, const uchar
d->m_data = {reinterpret_cast<const char *>(data), bytes};
}
-void QSGVideoTexture::setNativeObject(quint64 obj, const QSize &s)
+void QSGVideoTexture::setNativeObject(quint64 obj, const QSize &s, QRhiTexture::Format f)
{
Q_D(QSGVideoTexture);
- setData(QRhiTexture::RGBA8, s, nullptr, 0);
+ setData(f, s, nullptr, 0);
if (d->m_nativeObject != obj) {
d->m_nativeObject = obj;
d->m_texture.reset();
diff --git a/src/qtmultimediaquicktools/qsgvideotexture_p.h b/src/qtmultimediaquicktools/qsgvideotexture_p.h
index 1cae713b8..221b891e3 100644
--- a/src/qtmultimediaquicktools/qsgvideotexture_p.h
+++ b/src/qtmultimediaquicktools/qsgvideotexture_p.h
@@ -73,7 +73,7 @@ public:
bool hasMipmaps() const override;
void commitTextureOperations(QRhi *rhi, QRhiResourceUpdateBatch *resourceUpdates) override;
void setData(QRhiTexture::Format f, const QSize &s, const uchar *data, int bytes);
- void setNativeObject(quint64 obj, const QSize &s);
+ void setNativeObject(quint64 obj, const QSize &s, QRhiTexture::Format f = QRhiTexture::RGBA8);
protected:
QScopedPointer<QSGVideoTexturePrivate> d_ptr;
diff --git a/tests/auto/unit/multimedia/qabstractvideobuffer/tst_qabstractvideobuffer.cpp b/tests/auto/unit/multimedia/qabstractvideobuffer/tst_qabstractvideobuffer.cpp
index 38647bb6b..cd6e756ab 100644
--- a/tests/auto/unit/multimedia/qabstractvideobuffer/tst_qabstractvideobuffer.cpp
+++ b/tests/auto/unit/multimedia/qabstractvideobuffer/tst_qabstractvideobuffer.cpp
@@ -101,8 +101,7 @@ void tst_QAbstractVideoBuffer::handleType_data()
QTest::addColumn<QString>("stringized");
ADD_ENUM_TEST(NoHandle);
- ADD_ENUM_TEST(GLTextureHandle);
- ADD_ENUM_TEST(MTLTextureHandle);
+ ADD_ENUM_TEST(RhiTextureHandle);
}
void tst_QAbstractVideoBuffer::handleType()
diff --git a/tests/auto/unit/multimedia/qvideoframe/tst_qvideoframe.cpp b/tests/auto/unit/multimedia/qvideoframe/tst_qvideoframe.cpp
index 3f4cf9e6a..6db33c7f6 100644
--- a/tests/auto/unit/multimedia/qvideoframe/tst_qvideoframe.cpp
+++ b/tests/auto/unit/multimedia/qvideoframe/tst_qvideoframe.cpp
@@ -254,7 +254,7 @@ void tst_QVideoFrame::createFromBuffer_data()
<< QSize(64, 64)
<< QVideoSurfaceFormat::Format_ARGB32;
QTest::newRow("64x64 ARGB32 gl handle")
- << QVideoFrame::GLTextureHandle
+ << QVideoFrame::RhiTextureHandle
<< QSize(64, 64)
<< QVideoSurfaceFormat::Format_ARGB32;
}
@@ -399,13 +399,13 @@ void tst_QVideoFrame::copy_data()
QTest::addColumn<qint64>("endTime");
QTest::newRow("64x64 ARGB32")
- << QVideoFrame::GLTextureHandle
+ << QVideoFrame::RhiTextureHandle
<< QSize(64, 64)
<< QVideoSurfaceFormat::Format_ARGB32
<< qint64(63641740)
<< qint64(63641954);
QTest::newRow("64x64 ARGB32")
- << QVideoFrame::GLTextureHandle
+ << QVideoFrame::RhiTextureHandle
<< QSize(64, 64)
<< QVideoSurfaceFormat::Format_ARGB32
<< qint64(63641740)
@@ -506,7 +506,7 @@ void tst_QVideoFrame::assign_data()
QTest::addColumn<qint64>("endTime");
QTest::newRow("64x64 ARGB32")
- << QVideoFrame::GLTextureHandle
+ << QVideoFrame::RhiTextureHandle
<< QSize(64, 64)
<< QVideoSurfaceFormat::Format_ARGB32
<< qint64(63641740)