summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--dist/changes-5.13.12
-rw-r--r--dist/changes-5.13.26
-rw-r--r--dist/changes-5.14.02
-rw-r--r--dist/changes-5.15.12
-rw-r--r--examples/multimedia/video/qmlvideofilter_opencl/main.cpp4
-rw-r--r--examples/multimedia/video/qmlvideofilter_opencl/rgbframehelper.h2
-rw-r--r--examples/multimediawidgets/player/histogramwidget.cpp6
-rw-r--r--src/multimedia/camera/qcameraimagecapture.cpp2
-rw-r--r--src/multimedia/camera/qcamerainfo.cpp2
-rw-r--r--src/multimedia/camera/qcamerainfo.h2
-rw-r--r--src/multimedia/camera/qcamerainfo_p.h2
-rw-r--r--src/multimedia/doc/snippets/multimedia-snippets/camera.cpp4
-rw-r--r--src/multimedia/doc/snippets/multimedia-snippets/media.cpp4
-rw-r--r--src/multimedia/doc/snippets/multimedia-snippets/video.cpp8
-rw-r--r--src/multimedia/platform/android/common/qandroidmultimediautils.cpp26
-rw-r--r--src/multimedia/platform/android/common/qandroidmultimediautils_p.h4
-rw-r--r--src/multimedia/platform/android/common/qandroidvideooutput.cpp2
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp42
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidcamerasession_p.h6
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidcameravideorenderercontrol.cpp12
-rw-r--r--src/multimedia/platform/darwin/camera/avfcamera.mm36
-rw-r--r--src/multimedia/platform/darwin/camera/avfcamera_p.h4
-rw-r--r--src/multimedia/platform/darwin/camera/avfcameraimagecapture.mm2
-rw-r--r--src/multimedia/platform/darwin/camera/avfcamerarenderer.mm5
-rw-r--r--src/multimedia/platform/darwin/mediaplayer/avfvideorenderercontrol.mm6
-rw-r--r--src/multimedia/platform/darwin/qdarwindevicemanager.mm2
-rw-r--r--src/multimedia/platform/gstreamer/common/qgst_p.h2
-rw-r--r--src/multimedia/platform/gstreamer/common/qgstutils.cpp74
-rw-r--r--src/multimedia/platform/gstreamer/common/qgstutils_p.h2
-rw-r--r--src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp96
-rw-r--r--src/multimedia/platform/qnx/camera/bbcamerasession.cpp4
-rw-r--r--src/multimedia/platform/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp10
-rw-r--r--src/multimedia/platform/windows/evr/evrcustompresenter.cpp42
-rw-r--r--src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp2
-rw-r--r--src/multimedia/platform/windows/evr/evrhelpers.cpp56
-rw-r--r--src/multimedia/platform/windows/evr/evrhelpers_p.h4
-rw-r--r--src/multimedia/platform/windows/player/mftvideo.cpp26
-rw-r--r--src/multimedia/platform/windows/player/mftvideo_p.h2
-rw-r--r--src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp60
-rw-r--r--src/multimedia/platform/windows/qwindowsdevicemanager.cpp2
-rw-r--r--src/multimedia/video/qabstractvideosurface.h2
-rw-r--r--src/multimedia/video/qvideoframe.cpp265
-rw-r--r--src/multimedia/video/qvideoframe.h54
-rw-r--r--src/multimedia/video/qvideoframeconversionhelper.cpp22
-rw-r--r--src/multimedia/video/qvideoframeconversionhelper_p.h2
-rw-r--r--src/multimedia/video/qvideosink.cpp2
-rw-r--r--src/multimedia/video/qvideosurfaceformat.cpp193
-rw-r--r--src/multimedia/video/qvideosurfaceformat.h58
-rw-r--r--src/multimedia/video/qvideosurfaces.cpp6
-rw-r--r--src/multimedia/video/qvideosurfaces_p.h2
-rw-r--r--src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp10
-rw-r--r--src/plugins/videonode/imx6/qsgvivantevideomaterial.h2
-rw-r--r--src/plugins/videonode/imx6/qsgvivantevideonode.cpp50
-rw-r--r--src/plugins/videonode/imx6/qsgvivantevideonode.h8
-rw-r--r--src/plugins/videonode/imx6/qsgvivantevideonodefactory.cpp4
-rw-r--r--src/plugins/videonode/imx6/qsgvivantevideonodefactory.h2
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_p.h6
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_rgb.cpp24
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_rgb_p.h4
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_texture.cpp20
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_texture_p.h4
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_yuv.cpp32
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_yuv_p.h4
-rw-r--r--tests/auto/integration/qdeclarativevideooutput/tst_qdeclarativevideooutput.cpp8
-rw-r--r--tests/auto/integration/qmediaplayerbackend/tst_qmediaplayerbackend.cpp40
-rw-r--r--tests/auto/unit/multimedia/qvideoframe/tst_qvideoframe.cpp294
-rw-r--r--tests/auto/unit/multimedia/qvideosurfaceformat/tst_qvideosurfaceformat.cpp58
-rw-r--r--tests/auto/unit/multimediawidgets/qgraphicsvideoitem/tst_qgraphicsvideoitem.cpp14
-rw-r--r--tests/auto/unit/multimediawidgets/qvideowidget/tst_qvideowidget.cpp10
-rw-r--r--tests/auto/unit/qml/qdeclarativevideo/tst_qdeclarativevideo.cpp2
70 files changed, 892 insertions, 887 deletions
diff --git a/dist/changes-5.13.1 b/dist/changes-5.13.1
index 5167a49b8..78a724322 100644
--- a/dist/changes-5.13.1
+++ b/dist/changes-5.13.1
@@ -32,7 +32,7 @@ information about a particular change.
* [QTBUG-65574] Added error when attempting to play a video without a video output set.
* [QTBUG-65574] Added fix to prevent the same media content from being set twice.
* [QTBUG-65574] Returns QMediaPlayer::NoMedia if an empty url is provided.
- * [QTBUG-75959] Mapped MEDIASUBTYPE_RGB24 to QVideoFrame::Format_BGR24.
+ * [QTBUG-75959] Mapped MEDIASUBTYPE_RGB24 to QVideoSurfaceFormat::Format_BGR24.
* [QTBUG-70655] Fixed build issues with newer MinGW versions.
* Fixed memory leak introduced by using GetFrameRateList.
diff --git a/dist/changes-5.13.2 b/dist/changes-5.13.2
index 62dffcb02..a132744ab 100644
--- a/dist/changes-5.13.2
+++ b/dist/changes-5.13.2
@@ -1,7 +1,7 @@
Qt 5.13.2 is a bug-fix release. It maintains both forward and backward
source compatibility with Qt 5.13.0 through 5.13.1.
In Qt 5.13.0, binary compatibility was broken due to the usage of the enum
-QVideoFrame::PixelFormat, the break has been reverted,
+QVideoSurfaceFormat::PixelFormat, the break has been reverted,
thus introducing a binary compatibility break with earlier Qt 5.13.0 and 5.13.1.
For more details, refer to the online documentation included in this
@@ -25,8 +25,8 @@ information about a particular change.
****************************************************************************
- In Qt 5.13.0 binary compatibility was broken
- for usage of the enum QVideoFrame::PixelFormat by introducing
- QVideoFrame::Format_ABGR32.
+ for usage of the enum QVideoSurfaceFormat::PixelFormat by introducing
+ QVideoSurfaceFormat::Format_ABGR32.
To minimize the impact of this, the break has been reverted,
thus introducing a binary compatibility break with earlier Qt 5.13.x
versions, but restoring compatibility with all earlier versions of Qt 5.
diff --git a/dist/changes-5.14.0 b/dist/changes-5.14.0
index 4f99255e2..fb4f46ba4 100644
--- a/dist/changes-5.14.0
+++ b/dist/changes-5.14.0
@@ -20,7 +20,7 @@ information about a particular change.
****************************************************************************
- [QTBUG-73878] Removed the Mir client code as it is no longer used.
- - Added QVideoFrame::Format_YUV422P.
+ - Added QVideoSurfaceFormat::Format_YUV422P.
- Deprecated canonicalUrl and canonicalRequest in QMediaContent.
- [QTBUG-74422] Moved dtors of QCameraExposure, QCameraFocus,
QCameraImageProcessing from private to protected.
diff --git a/dist/changes-5.15.1 b/dist/changes-5.15.1
index bffbec190..d7ec2c5dc 100644
--- a/dist/changes-5.15.1
+++ b/dist/changes-5.15.1
@@ -22,7 +22,7 @@ information about a particular change.
****************************************************************************
- [QTBUG-85202] QMemoryVideoBuffer is not mapped when there is no data inside.
-- [QTBUG-52455] QVideoFrame::Format_Y8 is mapped to QImage::Format_Grayscale8.
+- [QTBUG-52455] QVideoSurfaceFormat::Format_Y8 is mapped to QImage::Format_Grayscale8.
****************************************************************************
* Platform Specific Changes *
diff --git a/examples/multimedia/video/qmlvideofilter_opencl/main.cpp b/examples/multimedia/video/qmlvideofilter_opencl/main.cpp
index 58c8967f0..2ee2d8bf4 100644
--- a/examples/multimedia/video/qmlvideofilter_opencl/main.cpp
+++ b/examples/multimedia/video/qmlvideofilter_opencl/main.cpp
@@ -323,8 +323,8 @@ QVideoFrame CLFilterRunnable::run(QVideoFrame *input, const QVideoSurfaceFormat
return *input;
}
- if (input->pixelFormat() == QVideoFrame::Format_YUV420P
- || input->pixelFormat() == QVideoFrame::Format_YV12) {
+ if (input->pixelFormat() == QVideoSurfaceFormat::Format_YUV420P
+ || input->pixelFormat() == QVideoSurfaceFormat::Format_YV12) {
qWarning("YUV data is not supported");
return *input;
}
diff --git a/examples/multimedia/video/qmlvideofilter_opencl/rgbframehelper.h b/examples/multimedia/video/qmlvideofilter_opencl/rgbframehelper.h
index ea82b267e..79e577a40 100644
--- a/examples/multimedia/video/qmlvideofilter_opencl/rgbframehelper.h
+++ b/examples/multimedia/video/qmlvideofilter_opencl/rgbframehelper.h
@@ -129,7 +129,7 @@ private:
QVideoFrame::imageFormatFromPixelFormat() to get a suitable format. Ownership is not
altered, the new QVideoFrame will not destroy the texture.
*/
-QVideoFrame frameFromTexture(uint textureId, const QSize &size, QVideoFrame::PixelFormat format)
+QVideoFrame frameFromTexture(uint textureId, const QSize &size, QVideoSurfaceFormat::PixelFormat format)
{
#ifndef QT_NO_OPENGL
return QVideoFrame(new TextureBuffer(textureId), size, format);
diff --git a/examples/multimediawidgets/player/histogramwidget.cpp b/examples/multimediawidgets/player/histogramwidget.cpp
index 78a666c6d..fadb32656 100644
--- a/examples/multimediawidgets/player/histogramwidget.cpp
+++ b/examples/multimediawidgets/player/histogramwidget.cpp
@@ -211,8 +211,8 @@ void FrameProcessor::processFrame(QVideoFrame frame, int levels)
if (!frame.map(QVideoFrame::ReadOnly))
break;
- if (frame.pixelFormat() == QVideoFrame::Format_YUV420P ||
- frame.pixelFormat() == QVideoFrame::Format_NV12) {
+ if (frame.pixelFormat() == QVideoSurfaceFormat::Format_YUV420P ||
+ frame.pixelFormat() == QVideoSurfaceFormat::Format_NV12) {
// Process YUV data
uchar *b = frame.bits();
for (int y = 0; y < frame.height(); ++y) {
@@ -222,7 +222,7 @@ void FrameProcessor::processFrame(QVideoFrame frame, int levels)
b += frame.bytesPerLine();
}
} else {
- QImage::Format imageFormat = QVideoFrame::imageFormatFromPixelFormat(frame.pixelFormat());
+ QImage::Format imageFormat = QVideoSurfaceFormat::imageFormatFromPixelFormat(frame.pixelFormat());
if (imageFormat != QImage::Format_Invalid) {
// Process RGB data
QImage image(frame.bits(), frame.width(), frame.height(), imageFormat);
diff --git a/src/multimedia/camera/qcameraimagecapture.cpp b/src/multimedia/camera/qcameraimagecapture.cpp
index 0682729c8..7569a35dc 100644
--- a/src/multimedia/camera/qcameraimagecapture.cpp
+++ b/src/multimedia/camera/qcameraimagecapture.cpp
@@ -342,7 +342,7 @@ int QCameraImageCapture::captureToBuffer()
*/
/*!
- \fn QCameraImageCapture::bufferFormatChanged(QVideoFrame::PixelFormat format)
+ \fn QCameraImageCapture::bufferFormatChanged(QVideoSurfaceFormat::PixelFormat format)
Signal emitted when the buffer \a format for the buffer image capture has changed.
*/
diff --git a/src/multimedia/camera/qcamerainfo.cpp b/src/multimedia/camera/qcamerainfo.cpp
index 9c0de17ee..ca8ab59be 100644
--- a/src/multimedia/camera/qcamerainfo.cpp
+++ b/src/multimedia/camera/qcamerainfo.cpp
@@ -49,7 +49,7 @@ QCameraFormat &QCameraFormat::operator=(const QCameraFormat &other) = default;
QCameraFormat::~QCameraFormat() = default;
-QVideoFrame::PixelFormat QCameraFormat::pixelFormat() const
+QVideoSurfaceFormat::PixelFormat QCameraFormat::pixelFormat() const
{
return d->pixelFormat;
}
diff --git a/src/multimedia/camera/qcamerainfo.h b/src/multimedia/camera/qcamerainfo.h
index afe9965a6..eab517e30 100644
--- a/src/multimedia/camera/qcamerainfo.h
+++ b/src/multimedia/camera/qcamerainfo.h
@@ -54,7 +54,7 @@ public:
QCameraFormat &operator=(const QCameraFormat &other);
~QCameraFormat();
- QVideoFrame::PixelFormat pixelFormat() const;
+ QVideoSurfaceFormat::PixelFormat pixelFormat() const;
QSize resolution() const;
float minFrameRate() const;
float maxFrameRate() const;
diff --git a/src/multimedia/camera/qcamerainfo_p.h b/src/multimedia/camera/qcamerainfo_p.h
index 710bd0971..8d81e46a1 100644
--- a/src/multimedia/camera/qcamerainfo_p.h
+++ b/src/multimedia/camera/qcamerainfo_p.h
@@ -59,7 +59,7 @@ QT_BEGIN_NAMESPACE
class QCameraFormatPrivate : public QSharedData
{
public:
- QVideoFrame::PixelFormat pixelFormat;
+ QVideoSurfaceFormat::PixelFormat pixelFormat;
QSize resolution;
float minFrameRate = 0;
float maxFrameRate = 0;
diff --git a/src/multimedia/doc/snippets/multimedia-snippets/camera.cpp b/src/multimedia/doc/snippets/multimedia-snippets/camera.cpp
index 85b2072dd..7f72e2e21 100644
--- a/src/multimedia/doc/snippets/multimedia-snippets/camera.cpp
+++ b/src/multimedia/doc/snippets/multimedia-snippets/camera.cpp
@@ -87,10 +87,10 @@ void overview_camera_by_position()
// -.-
class MyVideoSurface : public QAbstractVideoSurface
{
- QList<QVideoFrame::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType handleType) const
+ QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType handleType) const
{
Q_UNUSED(handleType);
- return QList<QVideoFrame::PixelFormat>();
+ return QList<QVideoSurfaceFormat::PixelFormat>();
}
bool present(const QVideoFrame &frame)
{
diff --git a/src/multimedia/doc/snippets/multimedia-snippets/media.cpp b/src/multimedia/doc/snippets/multimedia-snippets/media.cpp
index 2f10a5e58..d4832bfa9 100644
--- a/src/multimedia/doc/snippets/multimedia-snippets/media.cpp
+++ b/src/multimedia/doc/snippets/multimedia-snippets/media.cpp
@@ -174,10 +174,10 @@ void MediaExample::MediaPlayer()
{
public:
Surface(QObject *p) : QAbstractVideoSurface(p) { }
- QList<QVideoFrame::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType) const override
+ QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType) const override
{
// Make sure that the driver supports this pixel format.
- return QList<QVideoFrame::PixelFormat>() << QVideoFrame::Format_YUYV;
+ return QList<QVideoSurfaceFormat::PixelFormat>() << QVideoSurfaceFormat::Format_YUYV;
}
// Video frames are handled here.
diff --git a/src/multimedia/doc/snippets/multimedia-snippets/video.cpp b/src/multimedia/doc/snippets/multimedia-snippets/video.cpp
index 561a852b5..1b877ba0b 100644
--- a/src/multimedia/doc/snippets/multimedia-snippets/video.cpp
+++ b/src/multimedia/doc/snippets/multimedia-snippets/video.cpp
@@ -52,13 +52,13 @@
//! [Derived Surface]
class MyVideoSurface : public QAbstractVideoSurface
{
- QList<QVideoFrame::PixelFormat> supportedPixelFormats(
+ QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(
QVideoFrame::HandleType handleType = QVideoFrame::NoHandle) const
{
Q_UNUSED(handleType);
// Return the formats you will support
- return QList<QVideoFrame::PixelFormat>() << QVideoFrame::Format_RGB565;
+ return QList<QVideoSurfaceFormat::PixelFormat>() << QVideoSurfaceFormat::Format_RGB565;
}
bool present(const QVideoFrame &frame)
@@ -156,7 +156,7 @@ void VideoExample::VideoSurface()
{
//! [Widget Surface]
QImage img = QImage("images/qt-logo.png").convertToFormat(QImage::Format_ARGB32);
- QVideoSurfaceFormat format(img.size(), QVideoFrame::Format_ARGB32);
+ QVideoSurfaceFormat format(img.size(), QVideoSurfaceFormat::Format_ARGB32);
videoWidget = new QVideoWidget;
videoWidget->videoSurface()->start(format);
videoWidget->videoSurface()->present(img);
@@ -168,7 +168,7 @@ void VideoExample::VideoSurface()
graphicsView->scene()->addItem(item);
graphicsView->show();
QImage img = QImage("images/qt-logo.png").convertToFormat(QImage::Format_ARGB32);
- QVideoSurfaceFormat format(img.size(), QVideoFrame::Format_ARGB32);
+ QVideoSurfaceFormat format(img.size(), QVideoSurfaceFormat::Format_ARGB32);
item->videoSurface()->start(format);
item->videoSurface()->present(img);
//! [GraphicsVideoItem Surface]
diff --git a/src/multimedia/platform/android/common/qandroidmultimediautils.cpp b/src/multimedia/platform/android/common/qandroidmultimediautils.cpp
index 850b3d7ea..395cea43a 100644
--- a/src/multimedia/platform/android/common/qandroidmultimediautils.cpp
+++ b/src/multimedia/platform/android/common/qandroidmultimediautils.cpp
@@ -77,36 +77,36 @@ bool qt_sizeLessThan(const QSize &s1, const QSize &s2)
return s1.width() * s1.height() < s2.width() * s2.height();
}
-QVideoFrame::PixelFormat qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat f)
+QVideoSurfaceFormat::PixelFormat qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat f)
{
switch (f) {
case AndroidCamera::NV21:
- return QVideoFrame::Format_NV21;
+ return QVideoSurfaceFormat::Format_NV21;
case AndroidCamera::YV12:
- return QVideoFrame::Format_YV12;
+ return QVideoSurfaceFormat::Format_YV12;
case AndroidCamera::RGB565:
- return QVideoFrame::Format_RGB565;
+ return QVideoSurfaceFormat::Format_RGB565;
case AndroidCamera::YUY2:
- return QVideoFrame::Format_YUYV;
+ return QVideoSurfaceFormat::Format_YUYV;
case AndroidCamera::JPEG:
- return QVideoFrame::Format_Jpeg;
+ return QVideoSurfaceFormat::Format_Jpeg;
default:
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
}
}
-AndroidCamera::ImageFormat qt_androidImageFormatFromPixelFormat(QVideoFrame::PixelFormat f)
+AndroidCamera::ImageFormat qt_androidImageFormatFromPixelFormat(QVideoSurfaceFormat::PixelFormat f)
{
switch (f) {
- case QVideoFrame::Format_NV21:
+ case QVideoSurfaceFormat::Format_NV21:
return AndroidCamera::NV21;
- case QVideoFrame::Format_YV12:
+ case QVideoSurfaceFormat::Format_YV12:
return AndroidCamera::YV12;
- case QVideoFrame::Format_RGB565:
+ case QVideoSurfaceFormat::Format_RGB565:
return AndroidCamera::RGB565;
- case QVideoFrame::Format_YUYV:
+ case QVideoSurfaceFormat::Format_YUYV:
return AndroidCamera::YUY2;
- case QVideoFrame::Format_Jpeg:
+ case QVideoSurfaceFormat::Format_Jpeg:
return AndroidCamera::JPEG;
default:
return AndroidCamera::UnknownImageFormat;
diff --git a/src/multimedia/platform/android/common/qandroidmultimediautils_p.h b/src/multimedia/platform/android/common/qandroidmultimediautils_p.h
index 205244eb5..af91de75d 100644
--- a/src/multimedia/platform/android/common/qandroidmultimediautils_p.h
+++ b/src/multimedia/platform/android/common/qandroidmultimediautils_p.h
@@ -63,8 +63,8 @@ int qt_findClosestValue(const QList<int> &list, int value);
bool qt_sizeLessThan(const QSize &s1, const QSize &s2);
-QVideoFrame::PixelFormat qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat f);
-AndroidCamera::ImageFormat qt_androidImageFormatFromPixelFormat(QVideoFrame::PixelFormat f);
+QVideoSurfaceFormat::PixelFormat qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat f);
+AndroidCamera::ImageFormat qt_androidImageFormatFromPixelFormat(QVideoSurfaceFormat::PixelFormat f);
bool qt_androidRequestCameraPermission();
bool qt_androidRequestRecordingPermission();
diff --git a/src/multimedia/platform/android/common/qandroidvideooutput.cpp b/src/multimedia/platform/android/common/qandroidvideooutput.cpp
index 266b651eb..909a5b573 100644
--- a/src/multimedia/platform/android/common/qandroidvideooutput.cpp
+++ b/src/multimedia/platform/android/common/qandroidvideooutput.cpp
@@ -323,7 +323,7 @@ void QAndroidTextureVideoOutput::onFrameAvailable()
return;
QAbstractVideoBuffer *buffer = new AndroidTextureVideoBuffer(this, m_nativeSize);
- QVideoFrame frame(buffer, m_nativeSize, QVideoFrame::Format_ABGR32);
+ QVideoFrame frame(buffer, m_nativeSize, QVideoSurfaceFormat::Format_ABGR32);
if (m_surface->isActive() && (m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat()
|| m_surface->surfaceFormat().frameSize() != frame.size())) {
diff --git a/src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp b/src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp
index f42899aae..73ff2043e 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp
+++ b/src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp
@@ -336,9 +336,9 @@ QList<QSize> QAndroidCameraSession::getSupportedPreviewSizes() const
return m_camera ? m_camera->getSupportedPreviewSizes() : QList<QSize>();
}
-QList<QVideoFrame::PixelFormat> QAndroidCameraSession::getSupportedPixelFormats() const
+QList<QVideoSurfaceFormat::PixelFormat> QAndroidCameraSession::getSupportedPixelFormats() const
{
- QList<QVideoFrame::PixelFormat> formats;
+ QList<QVideoSurfaceFormat::PixelFormat> formats;
if (!m_camera)
return formats;
@@ -348,8 +348,8 @@ QList<QVideoFrame::PixelFormat> QAndroidCameraSession::getSupportedPixelFormats(
formats.reserve(nativeFormats.size());
for (AndroidCamera::ImageFormat nativeFormat : nativeFormats) {
- QVideoFrame::PixelFormat format = QtPixelFormatFromAndroidImageFormat(nativeFormat);
- if (format != QVideoFrame::Format_Invalid)
+ QVideoSurfaceFormat::PixelFormat format = QtPixelFormatFromAndroidImageFormat(nativeFormat);
+ if (format != QVideoSurfaceFormat::Format_Invalid)
formats.append(format);
}
@@ -364,12 +364,12 @@ QList<AndroidCamera::FpsRange> QAndroidCameraSession::getSupportedPreviewFpsRang
struct NullSurface : QAbstractVideoSurface
{
NullSurface(QObject *parent = nullptr) : QAbstractVideoSurface(parent) { }
- QList<QVideoFrame::PixelFormat> supportedPixelFormats(
+ QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(
QVideoFrame::HandleType type = QVideoFrame::NoHandle) const override
{
- QList<QVideoFrame::PixelFormat> result;
+ QList<QVideoSurfaceFormat::PixelFormat> result;
if (type == QVideoFrame::NoHandle)
- result << QVideoFrame::Format_NV21;
+ result << QVideoSurfaceFormat::Format_NV21;
return result;
}
@@ -720,41 +720,41 @@ void QAndroidCameraSession::processCapturedImage(int id,
emit imageCaptureError(id, QCameraImageCapture::ResourceError, errorMessage);
}
} else {
- QVideoFrame frame(new QMemoryVideoBuffer(data, -1), resolution, QVideoFrame::Format_Jpeg);
+ QVideoFrame frame(new QMemoryVideoBuffer(data, -1), resolution, QVideoSurfaceFormat::Format_Jpeg);
emit imageAvailable(id, frame);
}
}
-QVideoFrame::PixelFormat QAndroidCameraSession::QtPixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat format)
+QVideoSurfaceFormat::PixelFormat QAndroidCameraSession::QtPixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat format)
{
switch (format) {
case AndroidCamera::RGB565:
- return QVideoFrame::Format_RGB565;
+ return QVideoSurfaceFormat::Format_RGB565;
case AndroidCamera::NV21:
- return QVideoFrame::Format_NV21;
+ return QVideoSurfaceFormat::Format_NV21;
case AndroidCamera::YUY2:
- return QVideoFrame::Format_YUYV;
+ return QVideoSurfaceFormat::Format_YUYV;
case AndroidCamera::JPEG:
- return QVideoFrame::Format_Jpeg;
+ return QVideoSurfaceFormat::Format_Jpeg;
case AndroidCamera::YV12:
- return QVideoFrame::Format_YV12;
+ return QVideoSurfaceFormat::Format_YV12;
default:
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
}
}
-AndroidCamera::ImageFormat QAndroidCameraSession::AndroidImageFormatFromQtPixelFormat(QVideoFrame::PixelFormat format)
+AndroidCamera::ImageFormat QAndroidCameraSession::AndroidImageFormatFromQtPixelFormat(QVideoSurfaceFormat::PixelFormat format)
{
switch (format) {
- case QVideoFrame::Format_RGB565:
+ case QVideoSurfaceFormat::Format_RGB565:
return AndroidCamera::RGB565;
- case QVideoFrame::Format_NV21:
+ case QVideoSurfaceFormat::Format_NV21:
return AndroidCamera::NV21;
- case QVideoFrame::Format_YUYV:
+ case QVideoSurfaceFormat::Format_YUYV:
return AndroidCamera::YUY2;
- case QVideoFrame::Format_Jpeg:
+ case QVideoSurfaceFormat::Format_Jpeg:
return AndroidCamera::JPEG;
- case QVideoFrame::Format_YV12:
+ case QVideoSurfaceFormat::Format_YV12:
return AndroidCamera::YV12;
default:
return AndroidCamera::UnknownImageFormat;
diff --git a/src/multimedia/platform/android/mediacapture/qandroidcamerasession_p.h b/src/multimedia/platform/android/mediacapture/qandroidcamerasession_p.h
index bb7a82cdd..a444915ec 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidcamerasession_p.h
+++ b/src/multimedia/platform/android/mediacapture/qandroidcamerasession_p.h
@@ -91,7 +91,7 @@ public:
void setVideoOutput(QAndroidVideoOutput *output);
QList<QSize> getSupportedPreviewSizes() const;
- QList<QVideoFrame::PixelFormat> getSupportedPixelFormats() const;
+ QList<QVideoSurfaceFormat::PixelFormat> getSupportedPixelFormats() const;
QList<AndroidCamera::FpsRange> getSupportedPreviewFpsRange() const;
QImageEncoderSettings imageSettings() const { return m_actualImageSettings; }
@@ -164,8 +164,8 @@ private:
bool captureToBuffer,
const QString &fileName);
- static QVideoFrame::PixelFormat QtPixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat);
- static AndroidCamera::ImageFormat AndroidImageFormatFromQtPixelFormat(QVideoFrame::PixelFormat);
+ static QVideoSurfaceFormat::PixelFormat QtPixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat);
+ static AndroidCamera::ImageFormat AndroidImageFormatFromQtPixelFormat(QVideoSurfaceFormat::PixelFormat);
void setActiveHelper(bool active);
diff --git a/src/multimedia/platform/android/mediacapture/qandroidcameravideorenderercontrol.cpp b/src/multimedia/platform/android/mediacapture/qandroidcameravideorenderercontrol.cpp
index 7cb397d13..4e88a1337 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidcameravideorenderercontrol.cpp
+++ b/src/multimedia/platform/android/mediacapture/qandroidcameravideorenderercontrol.cpp
@@ -76,14 +76,14 @@ private:
QAndroidCameraVideoRendererControl *m_control;
AndroidSurfaceView *m_surfaceView;
QMutex m_mutex;
- QVideoFrame::PixelFormat m_pixelFormat;
+ QVideoSurfaceFormat::PixelFormat m_pixelFormat;
QVideoFrame m_lastFrame;
};
QAndroidCameraDataVideoOutput::QAndroidCameraDataVideoOutput(QAndroidCameraVideoRendererControl *control)
: QAndroidVideoOutput(control)
, m_control(control)
- , m_pixelFormat(QVideoFrame::Format_Invalid)
+ , m_pixelFormat(QVideoSurfaceFormat::Format_Invalid)
{
// The camera preview cannot be started unless we set a SurfaceTexture or a
// SurfaceHolder. In this case we don't actually care about either of these, but since
@@ -129,15 +129,15 @@ void QAndroidCameraDataVideoOutput::onSurfaceCreated()
void QAndroidCameraDataVideoOutput::configureFormat()
{
- m_pixelFormat = QVideoFrame::Format_Invalid;
+ m_pixelFormat = QVideoSurfaceFormat::Format_Invalid;
if (!m_control->cameraSession()->camera())
return;
- QList<QVideoFrame::PixelFormat> surfaceFormats = m_control->surface()->supportedPixelFormats();
+ QList<QVideoSurfaceFormat::PixelFormat> surfaceFormats = m_control->surface()->supportedPixelFormats();
QList<AndroidCamera::ImageFormat> previewFormats = m_control->cameraSession()->camera()->getSupportedPreviewFormats();
for (int i = 0; i < surfaceFormats.size(); ++i) {
- QVideoFrame::PixelFormat pixFormat = surfaceFormats.at(i);
+ QVideoSurfaceFormat::PixelFormat pixFormat = surfaceFormats.at(i);
AndroidCamera::ImageFormat f = qt_androidImageFormatFromPixelFormat(pixFormat);
if (previewFormats.contains(f)) {
m_pixelFormat = pixFormat;
@@ -145,7 +145,7 @@ void QAndroidCameraDataVideoOutput::configureFormat()
}
}
- if (m_pixelFormat == QVideoFrame::Format_Invalid) {
+ if (m_pixelFormat == QVideoSurfaceFormat::Format_Invalid) {
m_control->cameraSession()->setPreviewCallback(nullptr);
qWarning("The video surface is not compatible with any format supported by the camera");
} else {
diff --git a/src/multimedia/platform/darwin/camera/avfcamera.mm b/src/multimedia/platform/darwin/camera/avfcamera.mm
index 98d2b4ded..2acf529f5 100644
--- a/src/multimedia/platform/darwin/camera/avfcamera.mm
+++ b/src/multimedia/platform/darwin/camera/avfcamera.mm
@@ -115,64 +115,64 @@ void AVFCamera::updateStatus()
}
}
-QVideoFrame::PixelFormat AVFCamera::QtPixelFormatFromCVFormat(unsigned avPixelFormat)
+QVideoSurfaceFormat::PixelFormat AVFCamera::QtPixelFormatFromCVFormat(unsigned avPixelFormat)
{
// BGRA <-> ARGB "swap" is intentional:
// to work correctly with GL_RGBA, color swap shaders
// (in QSG node renderer etc.).
switch (avPixelFormat) {
case kCVPixelFormatType_32ARGB:
- return QVideoFrame::Format_BGRA32;
+ return QVideoSurfaceFormat::Format_BGRA32;
case kCVPixelFormatType_32BGRA:
- return QVideoFrame::Format_ARGB32;
+ return QVideoSurfaceFormat::Format_ARGB32;
case kCVPixelFormatType_24RGB:
- return QVideoFrame::Format_RGB24;
+ return QVideoSurfaceFormat::Format_RGB24;
case kCVPixelFormatType_24BGR:
- return QVideoFrame::Format_BGR24;
+ return QVideoSurfaceFormat::Format_BGR24;
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
- return QVideoFrame::Format_NV12;
+ return QVideoSurfaceFormat::Format_NV12;
case kCVPixelFormatType_422YpCbCr8:
- return QVideoFrame::Format_UYVY;
+ return QVideoSurfaceFormat::Format_UYVY;
case kCVPixelFormatType_422YpCbCr8_yuvs:
- return QVideoFrame::Format_YUYV;
+ return QVideoSurfaceFormat::Format_YUYV;
case kCMVideoCodecType_JPEG:
case kCMVideoCodecType_JPEG_OpenDML:
- return QVideoFrame::Format_Jpeg;
+ return QVideoSurfaceFormat::Format_Jpeg;
default:
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
}
}
-bool AVFCamera::CVPixelFormatFromQtFormat(QVideoFrame::PixelFormat qtFormat, unsigned &conv)
+bool AVFCamera::CVPixelFormatFromQtFormat(QVideoSurfaceFormat::PixelFormat qtFormat, unsigned &conv)
{
// BGRA <-> ARGB "swap" is intentional:
// to work correctly with GL_RGBA, color swap shaders
// (in QSG node renderer etc.).
switch (qtFormat) {
- case QVideoFrame::Format_ARGB32:
+ case QVideoSurfaceFormat::Format_ARGB32:
conv = kCVPixelFormatType_32BGRA;
break;
- case QVideoFrame::Format_BGRA32:
+ case QVideoSurfaceFormat::Format_BGRA32:
conv = kCVPixelFormatType_32ARGB;
break;
- case QVideoFrame::Format_NV12:
+ case QVideoSurfaceFormat::Format_NV12:
conv = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
break;
- case QVideoFrame::Format_UYVY:
+ case QVideoSurfaceFormat::Format_UYVY:
conv = kCVPixelFormatType_422YpCbCr8;
break;
- case QVideoFrame::Format_YUYV:
+ case QVideoSurfaceFormat::Format_YUYV:
conv = kCVPixelFormatType_422YpCbCr8_yuvs;
break;
// These two formats below are not supported
// by QSGVideoNodeFactory_RGB, so for now I have to
// disable them.
/*
- case QVideoFrame::Format_RGB24:
+ case QVideoSurfaceFormat::Format_RGB24:
conv = kCVPixelFormatType_24RGB;
break;
- case QVideoFrame::Format_BGR24:
+ case QVideoSurfaceFormat::Format_BGR24:
conv = kCVPixelFormatType_24BGR;
break;
*/
diff --git a/src/multimedia/platform/darwin/camera/avfcamera_p.h b/src/multimedia/platform/darwin/camera/avfcamera_p.h
index ae8cb3292..9d4c69471 100644
--- a/src/multimedia/platform/darwin/camera/avfcamera_p.h
+++ b/src/multimedia/platform/darwin/camera/avfcamera_p.h
@@ -85,8 +85,8 @@ public:
QPlatformCameraImageProcessing *imageProcessingControl() override;
// "Converters":
- static QVideoFrame::PixelFormat QtPixelFormatFromCVFormat(unsigned avPixelFormat);
- static bool CVPixelFormatFromQtFormat(QVideoFrame::PixelFormat qtFormat, unsigned &conv);
+ static QVideoSurfaceFormat::PixelFormat QtPixelFormatFromCVFormat(unsigned avPixelFormat);
+ static bool CVPixelFormatFromQtFormat(QVideoSurfaceFormat::PixelFormat qtFormat, unsigned &conv);
AVCaptureConnection *videoConnection() const;
diff --git a/src/multimedia/platform/darwin/camera/avfcameraimagecapture.mm b/src/multimedia/platform/darwin/camera/avfcameraimagecapture.mm
index 94e2841b8..a6fb7bbe6 100644
--- a/src/multimedia/platform/darwin/camera/avfcameraimagecapture.mm
+++ b/src/multimedia/platform/darwin/camera/avfcameraimagecapture.mm
@@ -153,7 +153,7 @@ int AVFCameraImageCapture::doCapture(const QString &actualFileName)
QBuffer data(&jpgData);
QImageReader reader(&data, "JPEG");
QSize size = reader.size();
- QVideoFrame frame(new QMemoryVideoBuffer(QByteArray(jpgData.constData(), jpgData.size()), -1), size, QVideoFrame::Format_Jpeg);
+ QVideoFrame frame(new QMemoryVideoBuffer(QByteArray(jpgData.constData(), jpgData.size()), -1), size, QVideoSurfaceFormat::Format_Jpeg);
QMetaObject::invokeMethod(this, "imageAvailable", Qt::QueuedConnection,
Q_ARG(int, request.captureId),
Q_ARG(QVideoFrame, frame));
diff --git a/src/multimedia/platform/darwin/camera/avfcamerarenderer.mm b/src/multimedia/platform/darwin/camera/avfcamerarenderer.mm
index 52032e600..429ca1e81 100644
--- a/src/multimedia/platform/darwin/camera/avfcamerarenderer.mm
+++ b/src/multimedia/platform/darwin/camera/avfcamerarenderer.mm
@@ -251,9 +251,10 @@ private:
int width = CVPixelBufferGetWidth(imageBuffer);
int height = CVPixelBufferGetHeight(imageBuffer);
- QVideoFrame::PixelFormat format =
+ QVideoSurfaceFormat::PixelFormat format =
AVFCamera::QtPixelFormatFromCVFormat(CVPixelBufferGetPixelFormatType(imageBuffer));
- if (format == QVideoFrame::Format_Invalid)
+ if (format == QVideoSurfaceFormat::Format_Invalid)
+avfcamerarenderercontrol.mm
return;
QVideoFrame frame(new CVImageVideoBuffer(imageBuffer, m_renderer),
diff --git a/src/multimedia/platform/darwin/mediaplayer/avfvideorenderercontrol.mm b/src/multimedia/platform/darwin/mediaplayer/avfvideorenderercontrol.mm
index d0fcfe00a..da5f56875 100644
--- a/src/multimedia/platform/darwin/mediaplayer/avfvideorenderercontrol.mm
+++ b/src/multimedia/platform/darwin/mediaplayer/avfvideorenderercontrol.mm
@@ -160,7 +160,7 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
return;
auto buffer = new TextureVideoBuffer(QVideoFrame::MTLTextureHandle, tex);
- frame = QVideoFrame(buffer, nativeSize(), QVideoFrame::Format_BGR32);
+ frame = QVideoFrame(buffer, nativeSize(), QVideoSurfaceFormat::Format_BGR32);
if (!frame.isValid())
return;
@@ -178,7 +178,7 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
return;
QAbstractVideoBuffer *buffer = new TextureVideoBuffer(QVideoFrame::GLTextureHandle, tex);
- frame = QVideoFrame(buffer, nativeSize(), QVideoFrame::Format_BGR32);
+ frame = QVideoFrame(buffer, nativeSize(), QVideoSurfaceFormat::Format_BGR32);
if (!frame.isValid())
return;
@@ -197,7 +197,7 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
return;
QAbstractVideoBuffer *buffer = new QImageVideoBuffer(frameData);
- frame = QVideoFrame(buffer, nativeSize(), QVideoFrame::Format_ARGB32_Premultiplied);
+ frame = QVideoFrame(buffer, nativeSize(), QVideoSurfaceFormat::Format_ARGB32_Premultiplied);
QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), QVideoFrame::NoHandle);
}
diff --git a/src/multimedia/platform/darwin/qdarwindevicemanager.mm b/src/multimedia/platform/darwin/qdarwindevicemanager.mm
index 46c73d629..8ec79355c 100644
--- a/src/multimedia/platform/darwin/qdarwindevicemanager.mm
+++ b/src/multimedia/platform/darwin/qdarwindevicemanager.mm
@@ -266,7 +266,7 @@ void QDarwinDeviceManager::updateCameraDevices()
auto encoding = CMVideoFormatDescriptionGetCodecType(format.formatDescription);
auto pixelFormat = AVFCamera::QtPixelFormatFromCVFormat(encoding);
// Ignore pixel formats we can't handle
- if (pixelFormat == QVideoFrame::Format_Invalid)
+ if (pixelFormat == QVideoSurfaceFormat::Format_Invalid)
continue;
for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRanges) {
diff --git a/src/multimedia/platform/gstreamer/common/qgst_p.h b/src/multimedia/platform/gstreamer/common/qgst_p.h
index b4c6ee732..d4ee13cd9 100644
--- a/src/multimedia/platform/gstreamer/common/qgst_p.h
+++ b/src/multimedia/platform/gstreamer/common/qgst_p.h
@@ -175,7 +175,7 @@ public:
}
Q_MULTIMEDIA_EXPORT QSize resolution() const;
- Q_MULTIMEDIA_EXPORT QVideoFrame::PixelFormat pixelFormat() const;
+ Q_MULTIMEDIA_EXPORT QVideoSurfaceFormat::PixelFormat pixelFormat() const;
Q_MULTIMEDIA_EXPORT QGRange<float> frameRateRange() const;
QByteArray toString() const { return gst_structure_to_string(structure); }
diff --git a/src/multimedia/platform/gstreamer/common/qgstutils.cpp b/src/multimedia/platform/gstreamer/common/qgstutils.cpp
index 49d5a527c..be121bdcd 100644
--- a/src/multimedia/platform/gstreamer/common/qgstutils.cpp
+++ b/src/multimedia/platform/gstreamer/common/qgstutils.cpp
@@ -165,49 +165,49 @@ namespace {
struct VideoFormat
{
- QVideoFrame::PixelFormat pixelFormat;
+ QVideoSurfaceFormat::PixelFormat pixelFormat;
GstVideoFormat gstFormat;
};
static const VideoFormat qt_videoFormatLookup[] =
{
- { QVideoFrame::Format_YUV420P, GST_VIDEO_FORMAT_I420 },
- { QVideoFrame::Format_YUV422P, GST_VIDEO_FORMAT_Y42B },
- { QVideoFrame::Format_YV12 , GST_VIDEO_FORMAT_YV12 },
- { QVideoFrame::Format_UYVY , GST_VIDEO_FORMAT_UYVY },
- { QVideoFrame::Format_YUYV , GST_VIDEO_FORMAT_YUY2 },
- { QVideoFrame::Format_NV12 , GST_VIDEO_FORMAT_NV12 },
- { QVideoFrame::Format_NV21 , GST_VIDEO_FORMAT_NV21 },
- { QVideoFrame::Format_AYUV444, GST_VIDEO_FORMAT_AYUV },
- { QVideoFrame::Format_YUV444, GST_VIDEO_FORMAT_Y444 },
- { QVideoFrame::Format_P010LE , GST_VIDEO_FORMAT_P010_10LE },
- { QVideoFrame::Format_P010BE , GST_VIDEO_FORMAT_P010_10BE },
- { QVideoFrame::Format_Y8 , GST_VIDEO_FORMAT_GRAY8 },
+ { QVideoSurfaceFormat::Format_YUV420P, GST_VIDEO_FORMAT_I420 },
+ { QVideoSurfaceFormat::Format_YUV422P, GST_VIDEO_FORMAT_Y42B },
+ { QVideoSurfaceFormat::Format_YV12 , GST_VIDEO_FORMAT_YV12 },
+ { QVideoSurfaceFormat::Format_UYVY , GST_VIDEO_FORMAT_UYVY },
+ { QVideoSurfaceFormat::Format_YUYV , GST_VIDEO_FORMAT_YUY2 },
+ { QVideoSurfaceFormat::Format_NV12 , GST_VIDEO_FORMAT_NV12 },
+ { QVideoSurfaceFormat::Format_NV21 , GST_VIDEO_FORMAT_NV21 },
+ { QVideoSurfaceFormat::Format_AYUV444, GST_VIDEO_FORMAT_AYUV },
+ { QVideoSurfaceFormat::Format_YUV444, GST_VIDEO_FORMAT_Y444 },
+ { QVideoSurfaceFormat::Format_P010LE , GST_VIDEO_FORMAT_P010_10LE },
+ { QVideoSurfaceFormat::Format_P010BE , GST_VIDEO_FORMAT_P010_10BE },
+ { QVideoSurfaceFormat::Format_Y8 , GST_VIDEO_FORMAT_GRAY8 },
#if Q_BYTE_ORDER == Q_LITTLE_ENDIAN
- { QVideoFrame::Format_RGB32 , GST_VIDEO_FORMAT_BGRx },
- { QVideoFrame::Format_BGR32 , GST_VIDEO_FORMAT_RGBx },
- { QVideoFrame::Format_ARGB32, GST_VIDEO_FORMAT_BGRA },
- { QVideoFrame::Format_ABGR32, GST_VIDEO_FORMAT_RGBA },
- { QVideoFrame::Format_BGRA32, GST_VIDEO_FORMAT_ARGB },
- { QVideoFrame::Format_RGB555 , GST_VIDEO_FORMAT_BGR15 },
- { QVideoFrame::Format_BGR555 , GST_VIDEO_FORMAT_RGB15 },
- { QVideoFrame::Format_Y16 , GST_VIDEO_FORMAT_GRAY16_LE },
+ { QVideoSurfaceFormat::Format_RGB32 , GST_VIDEO_FORMAT_BGRx },
+ { QVideoSurfaceFormat::Format_BGR32 , GST_VIDEO_FORMAT_RGBx },
+ { QVideoSurfaceFormat::Format_ARGB32, GST_VIDEO_FORMAT_BGRA },
+ { QVideoSurfaceFormat::Format_ABGR32, GST_VIDEO_FORMAT_RGBA },
+ { QVideoSurfaceFormat::Format_BGRA32, GST_VIDEO_FORMAT_ARGB },
+ { QVideoSurfaceFormat::Format_RGB555 , GST_VIDEO_FORMAT_BGR15 },
+ { QVideoSurfaceFormat::Format_BGR555 , GST_VIDEO_FORMAT_RGB15 },
+ { QVideoSurfaceFormat::Format_Y16 , GST_VIDEO_FORMAT_GRAY16_LE },
#else
- { QVideoFrame::Format_RGB32 , GST_VIDEO_FORMAT_xRGB },
- { QVideoFrame::Format_BGR32 , GST_VIDEO_FORMAT_xBGR },
- { QVideoFrame::Format_ARGB32, GST_VIDEO_FORMAT_ARGB },
- { QVideoFrame::Format_ABGR32, GST_VIDEO_FORMAT_ABGR },
- { QVideoFrame::Format_BGRA32, GST_VIDEO_FORMAT_BGRA },
- { QVideoFrame::Format_RGB555 , GST_VIDEO_FORMAT_RGB15 },
- { QVideoFrame::Format_BGR555 , GST_VIDEO_FORMAT_BGR15 },
- { QVideoFrame::Format_Y16 , GST_VIDEO_FORMAT_GRAY16_BE },
+ { QVideoSurfaceFormat::Format_RGB32 , GST_VIDEO_FORMAT_xRGB },
+ { QVideoSurfaceFormat::Format_BGR32 , GST_VIDEO_FORMAT_xBGR },
+ { QVideoSurfaceFormat::Format_ARGB32, GST_VIDEO_FORMAT_ARGB },
+ { QVideoSurfaceFormat::Format_ABGR32, GST_VIDEO_FORMAT_ABGR },
+ { QVideoSurfaceFormat::Format_BGRA32, GST_VIDEO_FORMAT_BGRA },
+ { QVideoSurfaceFormat::Format_RGB555 , GST_VIDEO_FORMAT_RGB15 },
+ { QVideoSurfaceFormat::Format_BGR555 , GST_VIDEO_FORMAT_BGR15 },
+ { QVideoSurfaceFormat::Format_Y16 , GST_VIDEO_FORMAT_GRAY16_BE },
#endif
- { QVideoFrame::Format_RGB24 , GST_VIDEO_FORMAT_RGB },
- { QVideoFrame::Format_BGR24 , GST_VIDEO_FORMAT_BGR },
- { QVideoFrame::Format_RGB565, GST_VIDEO_FORMAT_RGB16 }
+ { QVideoSurfaceFormat::Format_RGB24 , GST_VIDEO_FORMAT_RGB },
+ { QVideoSurfaceFormat::Format_BGR24 , GST_VIDEO_FORMAT_BGR },
+ { QVideoSurfaceFormat::Format_RGB565, GST_VIDEO_FORMAT_RGB16 }
};
-static int indexOfVideoFormat(QVideoFrame::PixelFormat format)
+static int indexOfVideoFormat(QVideoSurfaceFormat::PixelFormat format)
{
for (int i = 0; i < lengthOf(qt_videoFormatLookup); ++i)
if (qt_videoFormatLookup[i].pixelFormat == format)
@@ -250,11 +250,11 @@ QVideoSurfaceFormat QGstUtils::formatForCaps(
return QVideoSurfaceFormat();
}
-QGstMutableCaps QGstUtils::capsForFormats(const QList<QVideoFrame::PixelFormat> &formats)
+QGstMutableCaps QGstUtils::capsForFormats(const QList<QVideoSurfaceFormat::PixelFormat> &formats)
{
GstCaps *caps = gst_caps_new_empty();
- for (QVideoFrame::PixelFormat format : formats) {
+ for (QVideoSurfaceFormat::PixelFormat format : formats) {
int index = indexOfVideoFormat(format);
if (index != -1) {
@@ -303,9 +303,9 @@ QSize QGstStructure::resolution() const
return size;
}
-QVideoFrame::PixelFormat QGstStructure::pixelFormat() const
+QVideoSurfaceFormat::PixelFormat QGstStructure::pixelFormat() const
{
- QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid;
+ QVideoSurfaceFormat::PixelFormat pixelFormat = QVideoSurfaceFormat::Format_Invalid;
if (!structure)
return pixelFormat;
diff --git a/src/multimedia/platform/gstreamer/common/qgstutils_p.h b/src/multimedia/platform/gstreamer/common/qgstutils_p.h
index 81fd4d31c..39063b45a 100644
--- a/src/multimedia/platform/gstreamer/common/qgstutils_p.h
+++ b/src/multimedia/platform/gstreamer/common/qgstutils_p.h
@@ -78,7 +78,7 @@ namespace QGstUtils {
GstCaps *caps,
GstVideoInfo *info = 0);
- Q_MULTIMEDIA_EXPORT QGstMutableCaps capsForFormats(const QList<QVideoFrame::PixelFormat> &formats);
+ Q_MULTIMEDIA_EXPORT QGstMutableCaps capsForFormats(const QList<QVideoSurfaceFormat::PixelFormat> &formats);
void setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer);
Q_MULTIMEDIA_EXPORT bool useOpenGL();
diff --git a/src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp b/src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp
index 990ab2bd4..412a482f8 100644
--- a/src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp
+++ b/src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp
@@ -87,30 +87,30 @@ QGstMutableCaps QGstVideoRenderer::getCaps()
#if QT_CONFIG(gstreamer_gl)
if (QGstUtils::useOpenGL()) {
m_handleType = QVideoFrame::GLTextureHandle;
- auto formats = QList<QVideoFrame::PixelFormat>()
- << QVideoFrame::Format_YUV420P
- << QVideoFrame::Format_YUV422P
- << QVideoFrame::Format_YV12
- << QVideoFrame::Format_UYVY
- << QVideoFrame::Format_YUYV
- << QVideoFrame::Format_NV12
- << QVideoFrame::Format_NV21
- << QVideoFrame::Format_AYUV444
- << QVideoFrame::Format_YUV444
-// << QVideoFrame::Format_P010LE
-// << QVideoFrame::Format_P010BE
-// << QVideoFrame::Format_Y8
- << QVideoFrame::Format_RGB32
- << QVideoFrame::Format_BGR32
- << QVideoFrame::Format_ARGB32
- << QVideoFrame::Format_ABGR32
- << QVideoFrame::Format_BGRA32
- << QVideoFrame::Format_RGB555
- << QVideoFrame::Format_BGR555
-// << QVideoFrame::Format_Y16
-// << QVideoFrame::Format_RGB24
-// << QVideoFrame::Format_BGR24
-// << QVideoFrame::Format_RGB565
+ auto formats = QList<QVideoSurfaceFormat::PixelFormat>()
+ << QVideoSurfaceFormat::Format_YUV420P
+ << QVideoSurfaceFormat::Format_YUV422P
+ << QVideoSurfaceFormat::Format_YV12
+ << QVideoSurfaceFormat::Format_UYVY
+ << QVideoSurfaceFormat::Format_YUYV
+ << QVideoSurfaceFormat::Format_NV12
+ << QVideoSurfaceFormat::Format_NV21
+ << QVideoSurfaceFormat::Format_AYUV444
+ << QVideoSurfaceFormat::Format_YUV444
+// << QVideoSurfaceFormat::Format_P010LE
+// << QVideoSurfaceFormat::Format_P010BE
+// << QVideoSurfaceFormat::Format_Y8
+ << QVideoSurfaceFormat::Format_RGB32
+ << QVideoSurfaceFormat::Format_BGR32
+ << QVideoSurfaceFormat::Format_ARGB32
+ << QVideoSurfaceFormat::Format_ABGR32
+ << QVideoSurfaceFormat::Format_BGRA32
+ << QVideoSurfaceFormat::Format_RGB555
+ << QVideoSurfaceFormat::Format_BGR555
+// << QVideoSurfaceFormat::Format_Y16
+// << QVideoSurfaceFormat::Format_RGB24
+// << QVideoSurfaceFormat::Format_BGR24
+// << QVideoSurfaceFormat::Format_RGB565
;
// Even if the surface does not support gl textures,
// glupload will be added to the pipeline and GLMemory will be requested.
@@ -126,30 +126,30 @@ QGstMutableCaps QGstVideoRenderer::getCaps()
m_handleType = QVideoFrame::NoHandle;
}
#endif
- auto formats = QList<QVideoFrame::PixelFormat>()
- << QVideoFrame::Format_YUV420P
- << QVideoFrame::Format_YUV422P
- << QVideoFrame::Format_YV12
- << QVideoFrame::Format_UYVY
- << QVideoFrame::Format_YUYV
- << QVideoFrame::Format_NV12
- << QVideoFrame::Format_NV21
- << QVideoFrame::Format_AYUV444
- << QVideoFrame::Format_YUV444
- << QVideoFrame::Format_P010LE
- << QVideoFrame::Format_P010BE
- << QVideoFrame::Format_Y8
- << QVideoFrame::Format_RGB32
- << QVideoFrame::Format_BGR32
- << QVideoFrame::Format_ARGB32
- << QVideoFrame::Format_ABGR32
- << QVideoFrame::Format_BGRA32
- << QVideoFrame::Format_RGB555
- << QVideoFrame::Format_BGR555
- << QVideoFrame::Format_Y16
- << QVideoFrame::Format_RGB24
- << QVideoFrame::Format_BGR24
- << QVideoFrame::Format_RGB565;
+ auto formats = QList<QVideoSurfaceFormat::PixelFormat>()
+ << QVideoSurfaceFormat::Format_YUV420P
+ << QVideoSurfaceFormat::Format_YUV422P
+ << QVideoSurfaceFormat::Format_YV12
+ << QVideoSurfaceFormat::Format_UYVY
+ << QVideoSurfaceFormat::Format_YUYV
+ << QVideoSurfaceFormat::Format_NV12
+ << QVideoSurfaceFormat::Format_NV21
+ << QVideoSurfaceFormat::Format_AYUV444
+ << QVideoSurfaceFormat::Format_YUV444
+ << QVideoSurfaceFormat::Format_P010LE
+ << QVideoSurfaceFormat::Format_P010BE
+ << QVideoSurfaceFormat::Format_Y8
+ << QVideoSurfaceFormat::Format_RGB32
+ << QVideoSurfaceFormat::Format_BGR32
+ << QVideoSurfaceFormat::Format_ARGB32
+ << QVideoSurfaceFormat::Format_ABGR32
+ << QVideoSurfaceFormat::Format_BGRA32
+ << QVideoSurfaceFormat::Format_RGB555
+ << QVideoSurfaceFormat::Format_BGR555
+ << QVideoSurfaceFormat::Format_Y16
+ << QVideoSurfaceFormat::Format_RGB24
+ << QVideoSurfaceFormat::Format_BGR24
+ << QVideoSurfaceFormat::Format_RGB565;
return QGstUtils::capsForFormats(formats);
}
diff --git a/src/multimedia/platform/qnx/camera/bbcamerasession.cpp b/src/multimedia/platform/qnx/camera/bbcamerasession.cpp
index 89d51ab20..109708eff 100644
--- a/src/multimedia/platform/qnx/camera/bbcamerasession.cpp
+++ b/src/multimedia/platform/qnx/camera/bbcamerasession.cpp
@@ -732,7 +732,7 @@ void BbCameraSession::viewfinderFrameGrabbed(const QImage &image)
if (m_surface) {
if (frame.size() != m_surface->surfaceFormat().frameSize()) {
m_surface->stop();
- m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoFrame::Format_ARGB32));
+ m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoSurfaceFormat::Format_ARGB32));
}
QVideoFrame videoFrame(frame);
@@ -850,7 +850,7 @@ bool BbCameraSession::startViewFinder()
m_surfaceMutex.lock();
if (m_surface) {
- const bool ok = m_surface->start(QVideoSurfaceFormat(rotatedSize, QVideoFrame::Format_ARGB32));
+ const bool ok = m_surface->start(QVideoSurfaceFormat(rotatedSize, QVideoSurfaceFormat::Format_ARGB32));
if (!ok)
qWarning() << "Unable to start camera viewfinder surface";
}
diff --git a/src/multimedia/platform/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp b/src/multimedia/platform/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp
index b44fbe64e..5862765b8 100644
--- a/src/multimedia/platform/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp
+++ b/src/multimedia/platform/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp
@@ -172,19 +172,19 @@ void MmRendererPlayerVideoRendererControl::updateScene(const QSize &size)
if (m_surface) {
if (!m_surface->isActive()) {
if (m_windowGrabber->eglImageSupported()) {
- m_surface->start(QVideoSurfaceFormat(size, QVideoFrame::Format_BGR32,
+ m_surface->start(QVideoSurfaceFormat(size, QVideoSurfaceFormat::Format_BGR32,
QVideoFrame::GLTextureHandle));
} else {
- m_surface->start(QVideoSurfaceFormat(size, QVideoFrame::Format_ARGB32));
+ m_surface->start(QVideoSurfaceFormat(size, QVideoSurfaceFormat::Format_ARGB32));
}
} else {
if (m_surface->surfaceFormat().frameSize() != size) {
m_surface->stop();
if (m_windowGrabber->eglImageSupported()) {
- m_surface->start(QVideoSurfaceFormat(size, QVideoFrame::Format_BGR32,
+ m_surface->start(QVideoSurfaceFormat(size, QVideoSurfaceFormat::Format_BGR32,
QVideoFrame::GLTextureHandle));
} else {
- m_surface->start(QVideoSurfaceFormat(size, QVideoFrame::Format_ARGB32));
+ m_surface->start(QVideoSurfaceFormat(size, QVideoSurfaceFormat::Format_ARGB32));
}
}
}
@@ -193,7 +193,7 @@ void MmRendererPlayerVideoRendererControl::updateScene(const QSize &size)
// handle or a copy of the image data
if (m_windowGrabber->eglImageSupported()) {
QnxTextureBuffer *textBuffer = new QnxTextureBuffer(m_windowGrabber);
- QVideoFrame actualFrame(textBuffer, size, QVideoFrame::Format_BGR32);
+ QVideoFrame actualFrame(textBuffer, size, QVideoSurfaceFormat::Format_BGR32);
m_surface->present(actualFrame);
} else {
m_surface->present(m_windowGrabber->getNextImage().copy());
diff --git a/src/multimedia/platform/windows/evr/evrcustompresenter.cpp b/src/multimedia/platform/windows/evr/evrcustompresenter.cpp
index ed398675e..f23debc81 100644
--- a/src/multimedia/platform/windows/evr/evrcustompresenter.cpp
+++ b/src/multimedia/platform/windows/evr/evrcustompresenter.cpp
@@ -67,7 +67,7 @@ static const LONG ONE_MSEC = 1000;
static HRESULT setDesiredSampleTime(IMFSample *sample, const LONGLONG& hnsSampleTime, const LONGLONG& hnsDuration);
static HRESULT clearDesiredSampleTime(IMFSample *sample);
static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect& nrcSource);
-static QVideoFrame::PixelFormat pixelFormatFromMediaType(IMFMediaType *type);
+static QVideoSurfaceFormat::PixelFormat pixelFormatFromMediaType(IMFMediaType *type);
static inline LONG MFTimeToMsec(const LONGLONG& time)
{
@@ -1026,13 +1026,13 @@ void EVRCustomPresenter::supportedFormatsChanged()
// check if we can render to the surface (compatible formats)
if (m_surface) {
- QList<QVideoFrame::PixelFormat> formats = m_surface->supportedPixelFormats(QVideoFrame::GLTextureHandle);
- if (m_presentEngine->supportsTextureRendering() && formats.contains(QVideoFrame::Format_RGB32)) {
+ QList<QVideoSurfaceFormat::PixelFormat> formats = m_surface->supportedPixelFormats(QVideoFrame::GLTextureHandle);
+ if (m_presentEngine->supportsTextureRendering() && formats.contains(QVideoSurfaceFormat::Format_RGB32)) {
m_presentEngine->setHint(D3DPresentEngine::RenderToTexture, true);
m_canRenderToSurface = true;
} else {
formats = m_surface->supportedPixelFormats(QVideoFrame::NoHandle);
- for (QVideoFrame::PixelFormat format : qAsConst(formats)) {
+ for (QVideoSurfaceFormat::PixelFormat format : qAsConst(formats)) {
if (SUCCEEDED(m_presentEngine->checkFormat(qt_evr_D3DFormatFromPixelFormat(format)))) {
m_canRenderToSurface = true;
break;
@@ -1479,8 +1479,8 @@ HRESULT EVRCustomPresenter::isMediaTypeSupported(IMFMediaType *proposed)
if (FAILED(hr))
return hr;
- QVideoFrame::PixelFormat pixelFormat = pixelFormatFromMediaType(proposed);
- if (pixelFormat == QVideoFrame::Format_Invalid)
+ QVideoSurfaceFormat::PixelFormat pixelFormat = pixelFormatFromMediaType(proposed);
+ if (pixelFormat == QVideoSurfaceFormat::Format_Invalid)
return MF_E_INVALIDMEDIATYPE;
// When not rendering to texture, only accept pixel formats supported by the video surface
@@ -2023,40 +2023,40 @@ HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &sou
return hr;
}
-static QVideoFrame::PixelFormat pixelFormatFromMediaType(IMFMediaType *type)
+static QVideoSurfaceFormat::PixelFormat pixelFormatFromMediaType(IMFMediaType *type)
{
GUID majorType;
if (FAILED(type->GetMajorType(&majorType)))
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
if (majorType != MFMediaType_Video)
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
GUID subtype;
if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &subtype)))
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
if (subtype == MFVideoFormat_RGB32)
- return QVideoFrame::Format_RGB32;
+ return QVideoSurfaceFormat::Format_RGB32;
if (subtype == MFVideoFormat_ARGB32)
- return QVideoFrame::Format_ARGB32;
+ return QVideoSurfaceFormat::Format_ARGB32;
if (subtype == MFVideoFormat_RGB24)
- return QVideoFrame::Format_RGB24;
+ return QVideoSurfaceFormat::Format_RGB24;
if (subtype == MFVideoFormat_RGB565)
- return QVideoFrame::Format_RGB565;
+ return QVideoSurfaceFormat::Format_RGB565;
if (subtype == MFVideoFormat_RGB555)
- return QVideoFrame::Format_RGB555;
+ return QVideoSurfaceFormat::Format_RGB555;
if (subtype == MFVideoFormat_AYUV)
- return QVideoFrame::Format_AYUV444;
+ return QVideoSurfaceFormat::Format_AYUV444;
if (subtype == MFVideoFormat_I420)
- return QVideoFrame::Format_YUV420P;
+ return QVideoSurfaceFormat::Format_YUV420P;
if (subtype == MFVideoFormat_UYVY)
- return QVideoFrame::Format_UYVY;
+ return QVideoSurfaceFormat::Format_UYVY;
if (subtype == MFVideoFormat_YV12)
- return QVideoFrame::Format_YV12;
+ return QVideoSurfaceFormat::Format_YV12;
if (subtype == MFVideoFormat_NV12)
- return QVideoFrame::Format_NV12;
+ return QVideoSurfaceFormat::Format_NV12;
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
}
QT_END_NAMESPACE
diff --git a/src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp b/src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp
index 4f5eadc64..afbbbcb3a 100644
--- a/src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp
+++ b/src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp
@@ -369,7 +369,7 @@ HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSamp
done:
if (SUCCEEDED(hr)) {
m_surfaceFormat = QVideoSurfaceFormat(QSize(width, height),
- m_useTextureRendering ? QVideoFrame::Format_RGB32
+ m_useTextureRendering ? QVideoSurfaceFormat::Format_RGB32
: qt_evr_pixelFormatFromD3DFormat(d3dFormat),
m_useTextureRendering ? QVideoFrame::GLTextureHandle
: QVideoFrame::NoHandle);
diff --git a/src/multimedia/platform/windows/evr/evrhelpers.cpp b/src/multimedia/platform/windows/evr/evrhelpers.cpp
index aa2311f46..4c81228b0 100644
--- a/src/multimedia/platform/windows/evr/evrhelpers.cpp
+++ b/src/multimedia/platform/windows/evr/evrhelpers.cpp
@@ -117,67 +117,67 @@ bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample)
return false;
}
-QVideoFrame::PixelFormat qt_evr_pixelFormatFromD3DFormat(DWORD format)
+QVideoSurfaceFormat::PixelFormat qt_evr_pixelFormatFromD3DFormat(DWORD format)
{
switch (format) {
case D3DFMT_R8G8B8:
- return QVideoFrame::Format_RGB24;
+ return QVideoSurfaceFormat::Format_RGB24;
case D3DFMT_A8R8G8B8:
- return QVideoFrame::Format_ARGB32;
+ return QVideoSurfaceFormat::Format_ARGB32;
case D3DFMT_X8R8G8B8:
- return QVideoFrame::Format_RGB32;
+ return QVideoSurfaceFormat::Format_RGB32;
case D3DFMT_R5G6B5:
- return QVideoFrame::Format_RGB565;
+ return QVideoSurfaceFormat::Format_RGB565;
case D3DFMT_X1R5G5B5:
- return QVideoFrame::Format_RGB555;
+ return QVideoSurfaceFormat::Format_RGB555;
case D3DFMT_A8:
- return QVideoFrame::Format_Y8;
+ return QVideoSurfaceFormat::Format_Y8;
case D3DFMT_A8B8G8R8:
- return QVideoFrame::Format_BGRA32;
+ return QVideoSurfaceFormat::Format_BGRA32;
case D3DFMT_X8B8G8R8:
- return QVideoFrame::Format_BGR32;
+ return QVideoSurfaceFormat::Format_BGR32;
case D3DFMT_UYVY:
- return QVideoFrame::Format_UYVY;
+ return QVideoSurfaceFormat::Format_UYVY;
case D3DFMT_YUY2:
- return QVideoFrame::Format_YUYV;
+ return QVideoSurfaceFormat::Format_YUYV;
case D3DFMT_NV12:
- return QVideoFrame::Format_NV12;
+ return QVideoSurfaceFormat::Format_NV12;
case D3DFMT_YV12:
- return QVideoFrame::Format_YV12;
+ return QVideoSurfaceFormat::Format_YV12;
case D3DFMT_UNKNOWN:
default:
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
}
}
-D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrame::PixelFormat format)
+D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoSurfaceFormat::PixelFormat format)
{
switch (format) {
- case QVideoFrame::Format_RGB24:
+ case QVideoSurfaceFormat::Format_RGB24:
return D3DFMT_R8G8B8;
- case QVideoFrame::Format_ARGB32:
+ case QVideoSurfaceFormat::Format_ARGB32:
return D3DFMT_A8R8G8B8;
- case QVideoFrame::Format_RGB32:
+ case QVideoSurfaceFormat::Format_RGB32:
return D3DFMT_X8R8G8B8;
- case QVideoFrame::Format_RGB565:
+ case QVideoSurfaceFormat::Format_RGB565:
return D3DFMT_R5G6B5;
- case QVideoFrame::Format_RGB555:
+ case QVideoSurfaceFormat::Format_RGB555:
return D3DFMT_X1R5G5B5;
- case QVideoFrame::Format_Y8:
+ case QVideoSurfaceFormat::Format_Y8:
return D3DFMT_A8;
- case QVideoFrame::Format_BGRA32:
+ case QVideoSurfaceFormat::Format_BGRA32:
return D3DFMT_A8B8G8R8;
- case QVideoFrame::Format_BGR32:
+ case QVideoSurfaceFormat::Format_BGR32:
return D3DFMT_X8B8G8R8;
- case QVideoFrame::Format_UYVY:
+ case QVideoSurfaceFormat::Format_UYVY:
return D3DFMT_UYVY;
- case QVideoFrame::Format_YUYV:
+ case QVideoSurfaceFormat::Format_YUYV:
return D3DFMT_YUY2;
- case QVideoFrame::Format_NV12:
+ case QVideoSurfaceFormat::Format_NV12:
return D3DFMT_NV12;
- case QVideoFrame::Format_YV12:
+ case QVideoSurfaceFormat::Format_YV12:
return D3DFMT_YV12;
- case QVideoFrame::Format_Invalid:
+ case QVideoSurfaceFormat::Format_Invalid:
default:
return D3DFMT_UNKNOWN;
}
diff --git a/src/multimedia/platform/windows/evr/evrhelpers_p.h b/src/multimedia/platform/windows/evr/evrhelpers_p.h
index 89bff6288..34e992624 100644
--- a/src/multimedia/platform/windows/evr/evrhelpers_p.h
+++ b/src/multimedia/platform/windows/evr/evrhelpers_p.h
@@ -103,8 +103,8 @@ inline HRESULT qt_evr_getFrameRate(IMFMediaType *pType, MFRatio *pRatio)
reinterpret_cast<UINT32*>(&pRatio->Denominator));
}
-QVideoFrame::PixelFormat qt_evr_pixelFormatFromD3DFormat(DWORD format);
-D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrame::PixelFormat format);
+QVideoSurfaceFormat::PixelFormat qt_evr_pixelFormatFromD3DFormat(DWORD format);
+D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoSurfaceFormat::PixelFormat format);
QT_END_NAMESPACE
diff --git a/src/multimedia/platform/windows/player/mftvideo.cpp b/src/multimedia/platform/windows/player/mftvideo.cpp
index 8d318a6f6..c97c479d6 100644
--- a/src/multimedia/platform/windows/player/mftvideo.cpp
+++ b/src/multimedia/platform/windows/player/mftvideo.cpp
@@ -603,30 +603,30 @@ HRESULT MFTransform::OnFlush()
return S_OK;
}
-QVideoFrame::PixelFormat MFTransform::formatFromSubtype(const GUID& subtype)
+QVideoSurfaceFormat::PixelFormat MFTransform::formatFromSubtype(const GUID& subtype)
{
if (subtype == MFVideoFormat_ARGB32)
- return QVideoFrame::Format_ARGB32;
+ return QVideoSurfaceFormat::Format_ARGB32;
else if (subtype == MFVideoFormat_RGB32)
- return QVideoFrame::Format_RGB32;
+ return QVideoSurfaceFormat::Format_RGB32;
else if (subtype == MFVideoFormat_RGB24)
- return QVideoFrame::Format_RGB24;
+ return QVideoSurfaceFormat::Format_RGB24;
else if (subtype == MFVideoFormat_RGB565)
- return QVideoFrame::Format_RGB565;
+ return QVideoSurfaceFormat::Format_RGB565;
else if (subtype == MFVideoFormat_RGB555)
- return QVideoFrame::Format_RGB555;
+ return QVideoSurfaceFormat::Format_RGB555;
else if (subtype == MFVideoFormat_AYUV)
- return QVideoFrame::Format_AYUV444;
+ return QVideoSurfaceFormat::Format_AYUV444;
else if (subtype == MFVideoFormat_I420)
- return QVideoFrame::Format_YUV420P;
+ return QVideoSurfaceFormat::Format_YUV420P;
else if (subtype == MFVideoFormat_UYVY)
- return QVideoFrame::Format_UYVY;
+ return QVideoSurfaceFormat::Format_UYVY;
else if (subtype == MFVideoFormat_YV12)
- return QVideoFrame::Format_YV12;
+ return QVideoSurfaceFormat::Format_YV12;
else if (subtype == MFVideoFormat_NV12)
- return QVideoFrame::Format_NV12;
+ return QVideoSurfaceFormat::Format_NV12;
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
}
QVideoSurfaceFormat MFTransform::videoFormatForMFMediaType(IMFMediaType *mediaType, int *bytesPerLine)
@@ -651,7 +651,7 @@ QVideoSurfaceFormat MFTransform::videoFormatForMFMediaType(IMFMediaType *mediaTy
if (FAILED(mediaType->GetGUID(MF_MT_SUBTYPE, &subtype)))
return QVideoSurfaceFormat();
- QVideoFrame::PixelFormat pixelFormat = formatFromSubtype(subtype);
+ QVideoSurfaceFormat::PixelFormat pixelFormat = formatFromSubtype(subtype);
QVideoSurfaceFormat format(size, pixelFormat);
if (SUCCEEDED(MFGetAttributeRatio(mediaType, MF_MT_FRAME_RATE, &num, &den))) {
diff --git a/src/multimedia/platform/windows/player/mftvideo_p.h b/src/multimedia/platform/windows/player/mftvideo_p.h
index 63cba906f..26f12b889 100644
--- a/src/multimedia/platform/windows/player/mftvideo_p.h
+++ b/src/multimedia/platform/windows/player/mftvideo_p.h
@@ -72,7 +72,7 @@ public:
void setVideoSink(IUnknown *videoSink);
- static QVideoFrame::PixelFormat formatFromSubtype(const GUID& subtype);
+ static QVideoSurfaceFormat::PixelFormat formatFromSubtype(const GUID& subtype);
// IUnknown methods
STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
diff --git a/src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp b/src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp
index 2ef77a9e4..5a7ff6d2b 100644
--- a/src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp
+++ b/src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp
@@ -791,8 +791,8 @@ namespace
clearMediaTypes();
if (!m_surface)
return;
- const QList<QVideoFrame::PixelFormat> formats = m_surface->supportedPixelFormats();
- for (QVideoFrame::PixelFormat format : formats) {
+ const QList<QVideoSurfaceFormat::PixelFormat> formats = m_surface->supportedPixelFormats();
+ for (QVideoSurfaceFormat::PixelFormat format : formats) {
IMFMediaType *mediaType;
if (FAILED(MFCreateMediaType(&mediaType))) {
qWarning("Failed to create mf media type!");
@@ -803,36 +803,36 @@ namespace
mediaType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
mediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
switch (format) {
- case QVideoFrame::Format_ARGB32:
- case QVideoFrame::Format_ARGB32_Premultiplied:
+ case QVideoSurfaceFormat::Format_ARGB32:
+ case QVideoSurfaceFormat::Format_ARGB32_Premultiplied:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_ARGB32);
break;
- case QVideoFrame::Format_RGB32:
+ case QVideoSurfaceFormat::Format_RGB32:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32);
break;
- case QVideoFrame::Format_BGR24: // MFVideoFormat_RGB24 has a BGR layout
+ case QVideoSurfaceFormat::Format_BGR24: // MFVideoFormat_RGB24 has a BGR layout
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24);
break;
- case QVideoFrame::Format_RGB565:
+ case QVideoSurfaceFormat::Format_RGB565:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB565);
break;
- case QVideoFrame::Format_RGB555:
+ case QVideoSurfaceFormat::Format_RGB555:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB555);
break;
- case QVideoFrame::Format_AYUV444:
- case QVideoFrame::Format_AYUV444_Premultiplied:
+ case QVideoSurfaceFormat::Format_AYUV444:
+ case QVideoSurfaceFormat::Format_AYUV444_Premultiplied:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_AYUV);
break;
- case QVideoFrame::Format_YUV420P:
+ case QVideoSurfaceFormat::Format_YUV420P:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_I420);
break;
- case QVideoFrame::Format_UYVY:
+ case QVideoSurfaceFormat::Format_UYVY:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_UYVY);
break;
- case QVideoFrame::Format_YV12:
+ case QVideoSurfaceFormat::Format_YV12:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YV12);
break;
- case QVideoFrame::Format_NV12:
+ case QVideoSurfaceFormat::Format_NV12:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12);
break;
default:
@@ -1038,7 +1038,7 @@ namespace
bool m_shutdown;
QList<IMFMediaType*> m_mediaTypes;
- QList<QVideoFrame::PixelFormat> m_pixelFormats;
+ QList<QVideoSurfaceFormat::PixelFormat> m_pixelFormats;
int m_currentFormatIndex;
int m_bytesPerLine;
QVideoSurfaceFormat m_surfaceFormat;
@@ -1077,27 +1077,27 @@ namespace
{
switch (format.pixelFormat()) {
// 32 bpp packed formats.
- case QVideoFrame::Format_RGB32:
- case QVideoFrame::Format_AYUV444:
+ case QVideoSurfaceFormat::Format_RGB32:
+ case QVideoSurfaceFormat::Format_AYUV444:
return format.frameWidth() * 4;
// 24 bpp packed formats.
- case QVideoFrame::Format_RGB24:
- case QVideoFrame::Format_BGR24:
+ case QVideoSurfaceFormat::Format_RGB24:
+ case QVideoSurfaceFormat::Format_BGR24:
return PAD_TO_DWORD(format.frameWidth() * 3);
// 16 bpp packed formats.
- case QVideoFrame::Format_RGB565:
- case QVideoFrame::Format_RGB555:
- case QVideoFrame::Format_YUYV:
- case QVideoFrame::Format_UYVY:
+ case QVideoSurfaceFormat::Format_RGB565:
+ case QVideoSurfaceFormat::Format_RGB555:
+ case QVideoSurfaceFormat::Format_YUYV:
+ case QVideoSurfaceFormat::Format_UYVY:
return PAD_TO_DWORD(format.frameWidth() * 2);
// Planar formats.
- case QVideoFrame::Format_IMC1:
- case QVideoFrame::Format_IMC2:
- case QVideoFrame::Format_IMC3:
- case QVideoFrame::Format_IMC4:
- case QVideoFrame::Format_YV12:
- case QVideoFrame::Format_NV12:
- case QVideoFrame::Format_YUV420P:
+ case QVideoSurfaceFormat::Format_IMC1:
+ case QVideoSurfaceFormat::Format_IMC2:
+ case QVideoSurfaceFormat::Format_IMC3:
+ case QVideoSurfaceFormat::Format_IMC4:
+ case QVideoSurfaceFormat::Format_YV12:
+ case QVideoSurfaceFormat::Format_NV12:
+ case QVideoSurfaceFormat::Format_YUV420P:
return PAD_TO_DWORD(format.frameWidth());
default:
return 0;
diff --git a/src/multimedia/platform/windows/qwindowsdevicemanager.cpp b/src/multimedia/platform/windows/qwindowsdevicemanager.cpp
index 0aa85f34e..6567bb665 100644
--- a/src/multimedia/platform/windows/qwindowsdevicemanager.cpp
+++ b/src/multimedia/platform/windows/qwindowsdevicemanager.cpp
@@ -227,7 +227,7 @@ QList<QCameraInfo> QWindowsDeviceManager::videoInputs() const
if (mediaFormatResult == MF_E_NO_MORE_TYPES)
break;
else if (SUCCEEDED(mediaFormatResult)) {
- QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid;
+ QVideoSurfaceFormat::PixelFormat pixelFormat = QVideoSurfaceFormat::Format_Invalid;
QSize resolution;
float minFr = .0;
float maxFr = .0;
diff --git a/src/multimedia/video/qabstractvideosurface.h b/src/multimedia/video/qabstractvideosurface.h
index f47b7ac69..a437d7565 100644
--- a/src/multimedia/video/qabstractvideosurface.h
+++ b/src/multimedia/video/qabstractvideosurface.h
@@ -67,7 +67,7 @@ public:
explicit QAbstractVideoSurface(QObject *parent = nullptr);
~QAbstractVideoSurface();
- virtual QList<QVideoFrame::PixelFormat> supportedPixelFormats(
+ virtual QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(
QVideoFrame::HandleType type = QVideoFrame::NoHandle) const = 0;
bool isFormatSupported(const QVideoSurfaceFormat &format) const;
diff --git a/src/multimedia/video/qvideoframe.cpp b/src/multimedia/video/qvideoframe.cpp
index 8ca606b4a..f9b15e0df 100644
--- a/src/multimedia/video/qvideoframe.cpp
+++ b/src/multimedia/video/qvideoframe.cpp
@@ -53,7 +53,7 @@
#include <QDebug>
QT_BEGIN_NAMESPACE
-static bool pixelFormatHasAlpha[QVideoFrame::NPixelFormats] =
+static bool pixelFormatHasAlpha[QVideoSurfaceFormat::NPixelFormats] =
{
false, //Format_Invalid,
true, //Format_ARGB32,
@@ -159,7 +159,7 @@ private:
*/
/*!
- \enum QVideoFrame::PixelFormat
+ \enum QVideoSurfaceFormat::PixelFormat
Enumerates video data types.
@@ -341,7 +341,8 @@ QVideoFrame::QVideoFrame(int bytes, int bytesPerLine, const QVideoSurfaceFormat
\sa pixelFormatFromImageFormat()
*/
QVideoFrame::QVideoFrame(const QImage &image)
- : d(new QVideoFramePrivate(QVideoSurfaceFormat(image.size(), pixelFormatFromImageFormat(image.format()))))
+ : d(new QVideoFramePrivate(QVideoSurfaceFormat(image.size(),
+ QVideoSurfaceFormat::pixelFormatFromImageFormat(image.format()))))
{
d->buffer = new QImageVideoBuffer(image);
}
@@ -416,7 +417,7 @@ bool QVideoFrame::isValid() const
/*!
Returns the pixel format of this video frame.
*/
-QVideoFrame::PixelFormat QVideoFrame::pixelFormat() const
+QVideoSurfaceFormat::PixelFormat QVideoFrame::pixelFormat() const
{
return d->format.pixelFormat();
}
@@ -588,35 +589,35 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
auto pixelFmt = d->format.pixelFormat();
// If the plane count is 1 derive the additional planes for planar formats.
switch (pixelFmt) {
- case Format_Invalid:
- case Format_ARGB32:
- case Format_ARGB32_Premultiplied:
- case Format_RGB32:
- case Format_RGB24:
- case Format_RGB565:
- case Format_RGB555:
- case Format_ARGB8565_Premultiplied:
- case Format_BGRA32:
- case Format_BGRA32_Premultiplied:
- case Format_ABGR32:
- case Format_BGR32:
- case Format_BGR24:
- case Format_BGR565:
- case Format_BGR555:
- case Format_BGRA5658_Premultiplied:
- case Format_AYUV444:
- case Format_AYUV444_Premultiplied:
- case Format_YUV444:
- case Format_UYVY:
- case Format_YUYV:
- case Format_Y8:
- case Format_Y16:
- case Format_Jpeg:
+ case QVideoSurfaceFormat::Format_Invalid:
+ case QVideoSurfaceFormat::Format_ARGB32:
+ case QVideoSurfaceFormat::Format_ARGB32_Premultiplied:
+ case QVideoSurfaceFormat::Format_RGB32:
+ case QVideoSurfaceFormat::Format_RGB24:
+ case QVideoSurfaceFormat::Format_RGB565:
+ case QVideoSurfaceFormat::Format_RGB555:
+ case QVideoSurfaceFormat::Format_ARGB8565_Premultiplied:
+ case QVideoSurfaceFormat::Format_BGRA32:
+ case QVideoSurfaceFormat::Format_BGRA32_Premultiplied:
+ case QVideoSurfaceFormat::Format_ABGR32:
+ case QVideoSurfaceFormat::Format_BGR32:
+ case QVideoSurfaceFormat::Format_BGR24:
+ case QVideoSurfaceFormat::Format_BGR565:
+ case QVideoSurfaceFormat::Format_BGR555:
+ case QVideoSurfaceFormat::Format_BGRA5658_Premultiplied:
+ case QVideoSurfaceFormat::Format_AYUV444:
+ case QVideoSurfaceFormat::Format_AYUV444_Premultiplied:
+ case QVideoSurfaceFormat::Format_YUV444:
+ case QVideoSurfaceFormat::Format_UYVY:
+ case QVideoSurfaceFormat::Format_YUYV:
+ case QVideoSurfaceFormat::Format_Y8:
+ case QVideoSurfaceFormat::Format_Y16:
+ case QVideoSurfaceFormat::Format_Jpeg:
// Single plane or opaque format.
break;
- case Format_YUV420P:
- case Format_YUV422P:
- case Format_YV12: {
+ case QVideoSurfaceFormat::Format_YUV420P:
+ case QVideoSurfaceFormat::Format_YUV422P:
+ case QVideoSurfaceFormat::Format_YV12: {
// The UV stride is usually half the Y stride and is 32-bit aligned.
// However it's not always the case, at least on Windows where the
// UV planes are sometimes not aligned.
@@ -624,7 +625,7 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
// have a correct stride.
const int height = this->height();
const int yStride = d->mapData.bytesPerLine[0];
- const int uvHeight = pixelFmt == Format_YUV422P ? height : height / 2;
+ const int uvHeight = pixelFmt == QVideoSurfaceFormat::Format_YUV422P ? height : height / 2;
const int uvStride = (d->mapData.nBytes - (yStride * height)) / uvHeight / 2;
// Three planes, the second and third vertically (and horizontally for other than Format_YUV422P formats) subsampled.
@@ -634,18 +635,18 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
d->mapData.data[2] = d->mapData.data[1] + (uvStride * uvHeight);
break;
}
- case Format_NV12:
- case Format_NV21:
- case Format_IMC2:
- case Format_IMC4: {
+ case QVideoSurfaceFormat::Format_NV12:
+ case QVideoSurfaceFormat::Format_NV21:
+ case QVideoSurfaceFormat::Format_IMC2:
+ case QVideoSurfaceFormat::Format_IMC4: {
// Semi planar, Full resolution Y plane with interleaved subsampled U and V planes.
d->mapData.nPlanes = 2;
d->mapData.bytesPerLine[1] = d->mapData.bytesPerLine[0];
d->mapData.data[1] = d->mapData.data[0] + (d->mapData.bytesPerLine[0] * height());
break;
}
- case Format_IMC1:
- case Format_IMC3: {
+ case QVideoSurfaceFormat::Format_IMC1:
+ case QVideoSurfaceFormat::Format_IMC3: {
// Three planes, the second and third vertically and horizontally subsumpled,
// but with lines padded to the width of the first plane.
d->mapData.nPlanes = 3;
@@ -697,7 +698,7 @@ void QVideoFrame::unmap()
Returns the number of bytes in a scan line.
\note For planar formats this is the bytes per line of the first plane only. The bytes per line of subsequent
- planes should be calculated as per the frame \l{QVideoFrame::PixelFormat}{pixel format}.
+ planes should be calculated as per the frame \l{QVideoSurfaceFormat::PixelFormat}{pixel format}.
This value is only valid while the frame data is \l {map()}{mapped}.
@@ -868,103 +869,6 @@ void QVideoFrame::setEndTime(qint64 time)
d->endTime = time;
}
-/*!
- Returns a video pixel format equivalent to an image \a format. If there is no equivalent
- format QVideoFrame::InvalidType is returned instead.
-
- \note In general \l QImage does not handle YUV formats.
-
-*/
-QVideoFrame::PixelFormat QVideoFrame::pixelFormatFromImageFormat(QImage::Format format)
-{
- switch (format) {
- case QImage::Format_RGB32:
- case QImage::Format_RGBX8888:
- return Format_RGB32;
- case QImage::Format_ARGB32:
- case QImage::Format_RGBA8888:
- return Format_ARGB32;
- case QImage::Format_ARGB32_Premultiplied:
- case QImage::Format_RGBA8888_Premultiplied:
- return Format_ARGB32_Premultiplied;
- case QImage::Format_RGB16:
- return Format_RGB565;
- case QImage::Format_ARGB8565_Premultiplied:
- return Format_ARGB8565_Premultiplied;
- case QImage::Format_RGB555:
- return Format_RGB555;
- case QImage::Format_RGB888:
- return Format_RGB24;
- case QImage::Format_Grayscale8:
- return Format_Y8;
- case QImage::Format_Grayscale16:
- return Format_Y16;
- default:
- return Format_Invalid;
- }
-}
-
-/*!
- Returns an image format equivalent to a video frame pixel \a format. If there is no equivalent
- format QImage::Format_Invalid is returned instead.
-
- \note In general \l QImage does not handle YUV formats.
-
-*/
-QImage::Format QVideoFrame::imageFormatFromPixelFormat(PixelFormat format)
-{
- switch (format) {
- case Format_ARGB32:
- return QImage::Format_ARGB32;
- case Format_ARGB32_Premultiplied:
- return QImage::Format_ARGB32_Premultiplied;
- case Format_RGB32:
- return QImage::Format_RGB32;
- case Format_RGB24:
- return QImage::Format_RGB888;
- case Format_RGB565:
- return QImage::Format_RGB16;
- case Format_RGB555:
- return QImage::Format_RGB555;
- case Format_ARGB8565_Premultiplied:
- return QImage::Format_ARGB8565_Premultiplied;
- case Format_Y8:
- return QImage::Format_Grayscale8;
- case Format_Y16:
- return QImage::Format_Grayscale16;
- case Format_ABGR32:
- case Format_BGRA32:
- case Format_BGRA32_Premultiplied:
- case Format_BGR32:
- case Format_BGR24:
- case Format_BGR565:
- case Format_BGR555:
- case Format_BGRA5658_Premultiplied:
- case Format_AYUV444:
- case Format_AYUV444_Premultiplied:
- case Format_YUV444:
- case Format_YUV420P:
- case Format_YUV422P:
- case Format_YV12:
- case Format_UYVY:
- case Format_YUYV:
- case Format_NV12:
- case Format_NV21:
- case Format_IMC1:
- case Format_IMC2:
- case Format_IMC3:
- case Format_IMC4:
- case Format_P010LE:
- case Format_P010BE:
- case Format_P016LE:
- case Format_P016BE:
- case Format_Jpeg:
- case Format_Invalid:
- return QImage::Format_Invalid;
- }
- return QImage::Format_Invalid;
-}
-
/*!
Based on the pixel format converts current video frame to image.
@@ -979,13 +883,13 @@ QImage QVideoFrame::image() const
return result;
// Formats supported by QImage don't need conversion
- QImage::Format imageFormat = QVideoFrame::imageFormatFromPixelFormat(frame.pixelFormat());
+ QImage::Format imageFormat = QVideoSurfaceFormat::imageFormatFromPixelFormat(frame.pixelFormat());
if (imageFormat != QImage::Format_Invalid) {
result = QImage(frame.bits(), frame.width(), frame.height(), frame.bytesPerLine(), imageFormat).copy();
}
// Load from JPG
- else if (frame.pixelFormat() == QVideoFrame::Format_Jpeg) {
+ else if (frame.pixelFormat() == QVideoSurfaceFormat::Format_Jpeg) {
result.loadFromData(frame.bits(), frame.mappedBytes(), "JPG");
}
@@ -1007,91 +911,6 @@ QImage QVideoFrame::image() const
}
#ifndef QT_NO_DEBUG_STREAM
-QDebug operator<<(QDebug dbg, QVideoFrame::PixelFormat pf)
-{
- QDebugStateSaver saver(dbg);
- dbg.nospace();
- switch (pf) {
- case QVideoFrame::Format_Invalid:
- return dbg << "Format_Invalid";
- case QVideoFrame::Format_ARGB32:
- return dbg << "Format_ARGB32";
- case QVideoFrame::Format_ARGB32_Premultiplied:
- return dbg << "Format_ARGB32_Premultiplied";
- case QVideoFrame::Format_RGB32:
- return dbg << "Format_RGB32";
- case QVideoFrame::Format_RGB24:
- return dbg << "Format_RGB24";
- case QVideoFrame::Format_RGB565:
- return dbg << "Format_RGB565";
- case QVideoFrame::Format_RGB555:
- return dbg << "Format_RGB555";
- case QVideoFrame::Format_ARGB8565_Premultiplied:
- return dbg << "Format_ARGB8565_Premultiplied";
- case QVideoFrame::Format_BGRA32:
- return dbg << "Format_BGRA32";
- case QVideoFrame::Format_BGRA32_Premultiplied:
- return dbg << "Format_BGRA32_Premultiplied";
- case QVideoFrame::Format_ABGR32:
- return dbg << "Format_ABGR32";
- case QVideoFrame::Format_BGR32:
- return dbg << "Format_BGR32";
- case QVideoFrame::Format_BGR24:
- return dbg << "Format_BGR24";
- case QVideoFrame::Format_BGR565:
- return dbg << "Format_BGR565";
- case QVideoFrame::Format_BGR555:
- return dbg << "Format_BGR555";
- case QVideoFrame::Format_BGRA5658_Premultiplied:
- return dbg << "Format_BGRA5658_Premultiplied";
- case QVideoFrame::Format_AYUV444:
- return dbg << "Format_AYUV444";
- case QVideoFrame::Format_AYUV444_Premultiplied:
- return dbg << "Format_AYUV444_Premultiplied";
- case QVideoFrame::Format_YUV444:
- return dbg << "Format_YUV444";
- case QVideoFrame::Format_YUV420P:
- return dbg << "Format_YUV420P";
- case QVideoFrame::Format_YUV422P:
- return dbg << "Format_YUV422P";
- case QVideoFrame::Format_YV12:
- return dbg << "Format_YV12";
- case QVideoFrame::Format_UYVY:
- return dbg << "Format_UYVY";
- case QVideoFrame::Format_YUYV:
- return dbg << "Format_YUYV";
- case QVideoFrame::Format_NV12:
- return dbg << "Format_NV12";
- case QVideoFrame::Format_NV21:
- return dbg << "Format_NV21";
- case QVideoFrame::Format_IMC1:
- return dbg << "Format_IMC1";
- case QVideoFrame::Format_IMC2:
- return dbg << "Format_IMC2";
- case QVideoFrame::Format_IMC3:
- return dbg << "Format_IMC3";
- case QVideoFrame::Format_IMC4:
- return dbg << "Format_IMC4";
- case QVideoFrame::Format_Y8:
- return dbg << "Format_Y8";
- case QVideoFrame::Format_Y16:
- return dbg << "Format_Y16";
- case QVideoFrame::Format_P010LE:
- return dbg << "Format_P010LE";
- case QVideoFrame::Format_P010BE:
- return dbg << "Format_P010BE";
- case QVideoFrame::Format_P016LE:
- return dbg << "Format_P016LE";
- case QVideoFrame::Format_P016BE:
- return dbg << "Format_P016BE";
- case QVideoFrame::Format_Jpeg:
- return dbg << "Format_Jpeg";
-
- default:
- return dbg << QString(QLatin1String("UserType(%1)" )).arg(int(pf)).toLatin1().constData();
- }
-}
-
static QString qFormatTimeStamps(qint64 start, qint64 end)
{
// Early out for invalid.
diff --git a/src/multimedia/video/qvideoframe.h b/src/multimedia/video/qvideoframe.h
index bd69095da..028286426 100644
--- a/src/multimedia/video/qvideoframe.h
+++ b/src/multimedia/video/qvideoframe.h
@@ -41,6 +41,7 @@
#define QVIDEOFRAME_H
#include <QtMultimedia/qtmultimediaglobal.h>
+#include <QtMultimedia/qvideosurfaceformat.h>
#include <QtCore/qmetatype.h>
#include <QtCore/qshareddata.h>
@@ -52,57 +53,10 @@ QT_BEGIN_NAMESPACE
class QSize;
class QVideoFramePrivate;
class QAbstractVideoBuffer;
-class QVideoSurfaceFormat;
class Q_MULTIMEDIA_EXPORT QVideoFrame
{
public:
- enum PixelFormat
- {
- Format_Invalid,
- Format_ARGB32,
- Format_ARGB32_Premultiplied,
- Format_RGB32,
- Format_RGB24,
- Format_RGB565,
- Format_RGB555,
- Format_ARGB8565_Premultiplied,
- Format_BGRA32,
- Format_BGRA32_Premultiplied,
- Format_ABGR32,
- Format_BGR32,
- Format_BGR24,
- Format_BGR565,
- Format_BGR555,
- Format_BGRA5658_Premultiplied,
-
- Format_AYUV444,
- Format_AYUV444_Premultiplied,
- Format_YUV444,
- Format_YUV420P,
- Format_YUV422P,
- Format_YV12,
- Format_UYVY,
- Format_YUYV,
- Format_NV12,
- Format_NV21,
- Format_IMC1,
- Format_IMC2,
- Format_IMC3,
- Format_IMC4,
- Format_Y8,
- Format_Y16,
-
- Format_P010LE,
- Format_P010BE,
- Format_P016LE,
- Format_P016BE,
-
- Format_Jpeg,
- };
-#ifndef Q_QDOC
- static constexpr int NPixelFormats = Format_Jpeg + 1;
-#endif
enum HandleType
{
@@ -133,7 +87,7 @@ public:
QAbstractVideoBuffer *buffer() const;
bool isValid() const;
- PixelFormat pixelFormat() const;
+ QVideoSurfaceFormat::PixelFormat pixelFormat() const;
QVideoSurfaceFormat surfaceFormat() const;
QVideoFrame::HandleType handleType() const;
@@ -171,9 +125,6 @@ public:
QImage image() const;
- static PixelFormat pixelFormatFromImageFormat(QImage::Format format);
- static QImage::Format imageFormatFromPixelFormat(PixelFormat format);
-
private:
QExplicitlySharedDataPointer<QVideoFramePrivate> d;
};
@@ -183,7 +134,6 @@ Q_DECLARE_METATYPE(QVideoFrame);
#ifndef QT_NO_DEBUG_STREAM
Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, const QVideoFrame&);
Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoFrame::HandleType);
-Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoFrame::PixelFormat);
#endif
QT_END_NAMESPACE
diff --git a/src/multimedia/video/qvideoframeconversionhelper.cpp b/src/multimedia/video/qvideoframeconversionhelper.cpp
index b2d724703..7612c88c3 100644
--- a/src/multimedia/video/qvideoframeconversionhelper.cpp
+++ b/src/multimedia/video/qvideoframeconversionhelper.cpp
@@ -624,7 +624,7 @@ static void QT_FASTCALL qt_convert_P016BE_to_ARGB32(const QVideoFrame &frame, uc
}
-static VideoFrameConvertFunc qConvertFuncs[QVideoFrame::NPixelFormats] = {
+static VideoFrameConvertFunc qConvertFuncs[QVideoSurfaceFormat::NPixelFormats] = {
/* Format_Invalid */ nullptr, // Not needed
/* Format_ARGB32 */ nullptr, // Not needed
/* Format_ARGB32_Premultiplied */ nullptr, // Not needed
@@ -669,30 +669,30 @@ static void qInitConvertFuncsAsm()
#ifdef QT_COMPILER_SUPPORTS_SSE2
extern void QT_FASTCALL qt_convert_BGRA32_to_ARGB32_sse2(const QVideoFrame&, uchar*);
if (qCpuHasFeature(SSE2)){
- qConvertFuncs[QVideoFrame::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_sse2;
- qConvertFuncs[QVideoFrame::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_sse2;
- qConvertFuncs[QVideoFrame::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_sse2;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_sse2;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_sse2;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_sse2;
}
#endif
#ifdef QT_COMPILER_SUPPORTS_SSSE3
extern void QT_FASTCALL qt_convert_BGRA32_to_ARGB32_ssse3(const QVideoFrame&, uchar*);
if (qCpuHasFeature(SSSE3)){
- qConvertFuncs[QVideoFrame::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_ssse3;
- qConvertFuncs[QVideoFrame::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_ssse3;
- qConvertFuncs[QVideoFrame::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_ssse3;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_ssse3;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_ssse3;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_ssse3;
}
#endif
#ifdef QT_COMPILER_SUPPORTS_AVX2
extern void QT_FASTCALL qt_convert_BGRA32_to_ARGB32_avx2(const QVideoFrame&, uchar*);
if (qCpuHasFeature(AVX2)){
- qConvertFuncs[QVideoFrame::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_avx2;
- qConvertFuncs[QVideoFrame::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_avx2;
- qConvertFuncs[QVideoFrame::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_avx2;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_avx2;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_avx2;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_avx2;
}
#endif
}
-VideoFrameConvertFunc qConverterForFormat(QVideoFrame::PixelFormat format)
+VideoFrameConvertFunc qConverterForFormat(QVideoSurfaceFormat::PixelFormat format)
{
static bool initAsmFuncsDone = false;
if (!initAsmFuncsDone) {
diff --git a/src/multimedia/video/qvideoframeconversionhelper_p.h b/src/multimedia/video/qvideoframeconversionhelper_p.h
index 0fdcbf8a3..58edd48f2 100644
--- a/src/multimedia/video/qvideoframeconversionhelper_p.h
+++ b/src/multimedia/video/qvideoframeconversionhelper_p.h
@@ -57,7 +57,7 @@
// Converts to RGB32 or ARGB32_Premultiplied
typedef void (QT_FASTCALL *VideoFrameConvertFunc)(const QVideoFrame &frame, uchar *output);
-VideoFrameConvertFunc qConverterForFormat(QVideoFrame::PixelFormat format);
+VideoFrameConvertFunc qConverterForFormat(QVideoSurfaceFormat::PixelFormat format);
inline quint32 qConvertBGRA32ToARGB32(quint32 bgra)
{
diff --git a/src/multimedia/video/qvideosink.cpp b/src/multimedia/video/qvideosink.cpp
index 8b6b088d2..fa9177e21 100644
--- a/src/multimedia/video/qvideosink.cpp
+++ b/src/multimedia/video/qvideosink.cpp
@@ -233,7 +233,7 @@ void QVideoSink::paint(QPainter *painter, const QVideoFrame &f)
return;
}
- auto imageFormat = QVideoFrame::imageFormatFromPixelFormat(frame.pixelFormat());
+ auto imageFormat = QVideoSurfaceFormat::imageFormatFromPixelFormat(frame.pixelFormat());
// Do not render into ARGB32 images using QPainter.
// Using QImage::Format_ARGB32_Premultiplied is significantly faster.
if (imageFormat == QImage::Format_ARGB32)
diff --git a/src/multimedia/video/qvideosurfaceformat.cpp b/src/multimedia/video/qvideosurfaceformat.cpp
index 986734a65..3ba143079 100644
--- a/src/multimedia/video/qvideosurfaceformat.cpp
+++ b/src/multimedia/video/qvideosurfaceformat.cpp
@@ -54,7 +54,7 @@ public:
QVideoSurfaceFormatPrivate(
const QSize &size,
- QVideoFrame::PixelFormat format)
+ QVideoSurfaceFormat::PixelFormat format)
: pixelFormat(format)
, frameSize(size)
, viewport(QPoint(0, 0), size)
@@ -80,7 +80,7 @@ public:
return qAbs(r1 - r2) <= 0.00001 * qMin(qAbs(r1), qAbs(r2));
}
- QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid;
+ QVideoSurfaceFormat::PixelFormat pixelFormat = QVideoSurfaceFormat::Format_Invalid;
QVideoSurfaceFormat::Direction scanLineDirection = QVideoSurfaceFormat::TopToBottom;
QSize frameSize;
QVideoSurfaceFormat::YCbCrColorSpace ycbcrColorSpace = QVideoSurfaceFormat::YCbCr_Undefined;
@@ -169,7 +169,7 @@ QVideoSurfaceFormat::QVideoSurfaceFormat()
\a size and pixel \a format.
*/
QVideoSurfaceFormat::QVideoSurfaceFormat(
- const QSize& size, QVideoFrame::PixelFormat format)
+ const QSize& size, QVideoSurfaceFormat::PixelFormat format)
: d(new QVideoSurfaceFormatPrivate(size, format))
{
}
@@ -196,7 +196,7 @@ QVideoSurfaceFormat::~QVideoSurfaceFormat() = default;
*/
bool QVideoSurfaceFormat::isValid() const
{
- return d->pixelFormat != QVideoFrame::Format_Invalid && d->frameSize.isValid();
+ return d->pixelFormat != Format_Invalid && d->frameSize.isValid();
}
/*!
@@ -218,7 +218,7 @@ bool QVideoSurfaceFormat::operator !=(const QVideoSurfaceFormat &other) const
/*!
Returns the pixel format of frames in a video stream.
*/
-QVideoFrame::PixelFormat QVideoSurfaceFormat::pixelFormat() const
+QVideoSurfaceFormat::PixelFormat QVideoSurfaceFormat::pixelFormat() const
{
return d->pixelFormat;
}
@@ -383,6 +383,104 @@ QSize QVideoSurfaceFormat::sizeHint() const
return d->viewport.size();
}
+
+/*!
+ Returns a video pixel format equivalent to an image \a format. If there is no equivalent
+ format QVideoFrame::InvalidType is returned instead.
+
+ \note In general \l QImage does not handle YUV formats.
+
+*/
+QVideoSurfaceFormat::PixelFormat QVideoSurfaceFormat::pixelFormatFromImageFormat(QImage::Format format)
+{
+ switch (format) {
+ case QImage::Format_RGB32:
+ case QImage::Format_RGBX8888:
+ return QVideoSurfaceFormat::Format_RGB32;
+ case QImage::Format_ARGB32:
+ case QImage::Format_RGBA8888:
+ return QVideoSurfaceFormat::Format_ARGB32;
+ case QImage::Format_ARGB32_Premultiplied:
+ case QImage::Format_RGBA8888_Premultiplied:
+ return QVideoSurfaceFormat::Format_ARGB32_Premultiplied;
+ case QImage::Format_RGB16:
+ return QVideoSurfaceFormat::Format_RGB565;
+ case QImage::Format_ARGB8565_Premultiplied:
+ return QVideoSurfaceFormat::Format_ARGB8565_Premultiplied;
+ case QImage::Format_RGB555:
+ return QVideoSurfaceFormat::Format_RGB555;
+ case QImage::Format_RGB888:
+ return QVideoSurfaceFormat::Format_RGB24;
+ case QImage::Format_Grayscale8:
+ return QVideoSurfaceFormat::Format_Y8;
+ case QImage::Format_Grayscale16:
+ return QVideoSurfaceFormat::Format_Y16;
+ default:
+ return QVideoSurfaceFormat::Format_Invalid;
+ }
+}
+
+/*!
+ Returns an image format equivalent to a video frame pixel \a format. If there is no equivalent
+ format QImage::Format_Invalid is returned instead.
+
+ \note In general \l QImage does not handle YUV formats.
+
+*/
+QImage::Format QVideoSurfaceFormat::imageFormatFromPixelFormat(QVideoSurfaceFormat::PixelFormat format)
+{
+ switch (format) {
+ case QVideoSurfaceFormat::Format_ARGB32:
+ return QImage::Format_ARGB32;
+ case QVideoSurfaceFormat::Format_ARGB32_Premultiplied:
+ return QImage::Format_ARGB32_Premultiplied;
+ case QVideoSurfaceFormat::Format_RGB32:
+ return QImage::Format_RGB32;
+ case QVideoSurfaceFormat::Format_RGB24:
+ return QImage::Format_RGB888;
+ case QVideoSurfaceFormat::Format_RGB565:
+ return QImage::Format_RGB16;
+ case QVideoSurfaceFormat::Format_RGB555:
+ return QImage::Format_RGB555;
+ case QVideoSurfaceFormat::Format_ARGB8565_Premultiplied:
+ return QImage::Format_ARGB8565_Premultiplied;
+ case QVideoSurfaceFormat::Format_Y8:
+ return QImage::Format_Grayscale8;
+ case QVideoSurfaceFormat::Format_Y16:
+ return QImage::Format_Grayscale16;
+ case QVideoSurfaceFormat::Format_ABGR32:
+ case QVideoSurfaceFormat::Format_BGRA32:
+ case QVideoSurfaceFormat::Format_BGRA32_Premultiplied:
+ case QVideoSurfaceFormat::Format_BGR32:
+ case QVideoSurfaceFormat::Format_BGR24:
+ case QVideoSurfaceFormat::Format_BGR565:
+ case QVideoSurfaceFormat::Format_BGR555:
+ case QVideoSurfaceFormat::Format_BGRA5658_Premultiplied:
+ case QVideoSurfaceFormat::Format_AYUV444:
+ case QVideoSurfaceFormat::Format_AYUV444_Premultiplied:
+ case QVideoSurfaceFormat::Format_YUV444:
+ case QVideoSurfaceFormat::Format_YUV420P:
+ case QVideoSurfaceFormat::Format_YUV422P:
+ case QVideoSurfaceFormat::Format_YV12:
+ case QVideoSurfaceFormat::Format_UYVY:
+ case QVideoSurfaceFormat::Format_YUYV:
+ case QVideoSurfaceFormat::Format_NV12:
+ case QVideoSurfaceFormat::Format_NV21:
+ case QVideoSurfaceFormat::Format_IMC1:
+ case QVideoSurfaceFormat::Format_IMC2:
+ case QVideoSurfaceFormat::Format_IMC3:
+ case QVideoSurfaceFormat::Format_IMC4:
+ case QVideoSurfaceFormat::Format_P010LE:
+ case QVideoSurfaceFormat::Format_P010BE:
+ case QVideoSurfaceFormat::Format_P016LE:
+ case QVideoSurfaceFormat::Format_P016BE:
+ case QVideoSurfaceFormat::Format_Jpeg:
+ case QVideoSurfaceFormat::Format_Invalid:
+ return QImage::Format_Invalid;
+ }
+ return QImage::Format_Invalid;
+}
+
#ifndef QT_NO_DEBUG_STREAM
QDebug operator<<(QDebug dbg, QVideoSurfaceFormat::YCbCrColorSpace cs)
{
@@ -443,6 +541,91 @@ QDebug operator<<(QDebug dbg, const QVideoSurfaceFormat &f)
return dbg;
}
+
+QDebug operator<<(QDebug dbg, QVideoSurfaceFormat::PixelFormat pf)
+{
+ QDebugStateSaver saver(dbg);
+ dbg.nospace();
+ switch (pf) {
+ case QVideoSurfaceFormat::Format_Invalid:
+ return dbg << "Format_Invalid";
+ case QVideoSurfaceFormat::Format_ARGB32:
+ return dbg << "Format_ARGB32";
+ case QVideoSurfaceFormat::Format_ARGB32_Premultiplied:
+ return dbg << "Format_ARGB32_Premultiplied";
+ case QVideoSurfaceFormat::Format_RGB32:
+ return dbg << "Format_RGB32";
+ case QVideoSurfaceFormat::Format_RGB24:
+ return dbg << "Format_RGB24";
+ case QVideoSurfaceFormat::Format_RGB565:
+ return dbg << "Format_RGB565";
+ case QVideoSurfaceFormat::Format_RGB555:
+ return dbg << "Format_RGB555";
+ case QVideoSurfaceFormat::Format_ARGB8565_Premultiplied:
+ return dbg << "Format_ARGB8565_Premultiplied";
+ case QVideoSurfaceFormat::Format_BGRA32:
+ return dbg << "Format_BGRA32";
+ case QVideoSurfaceFormat::Format_BGRA32_Premultiplied:
+ return dbg << "Format_BGRA32_Premultiplied";
+ case QVideoSurfaceFormat::Format_ABGR32:
+ return dbg << "Format_ABGR32";
+ case QVideoSurfaceFormat::Format_BGR32:
+ return dbg << "Format_BGR32";
+ case QVideoSurfaceFormat::Format_BGR24:
+ return dbg << "Format_BGR24";
+ case QVideoSurfaceFormat::Format_BGR565:
+ return dbg << "Format_BGR565";
+ case QVideoSurfaceFormat::Format_BGR555:
+ return dbg << "Format_BGR555";
+ case QVideoSurfaceFormat::Format_BGRA5658_Premultiplied:
+ return dbg << "Format_BGRA5658_Premultiplied";
+ case QVideoSurfaceFormat::Format_AYUV444:
+ return dbg << "Format_AYUV444";
+ case QVideoSurfaceFormat::Format_AYUV444_Premultiplied:
+ return dbg << "Format_AYUV444_Premultiplied";
+ case QVideoSurfaceFormat::Format_YUV444:
+ return dbg << "Format_YUV444";
+ case QVideoSurfaceFormat::Format_YUV420P:
+ return dbg << "Format_YUV420P";
+ case QVideoSurfaceFormat::Format_YUV422P:
+ return dbg << "Format_YUV422P";
+ case QVideoSurfaceFormat::Format_YV12:
+ return dbg << "Format_YV12";
+ case QVideoSurfaceFormat::Format_UYVY:
+ return dbg << "Format_UYVY";
+ case QVideoSurfaceFormat::Format_YUYV:
+ return dbg << "Format_YUYV";
+ case QVideoSurfaceFormat::Format_NV12:
+ return dbg << "Format_NV12";
+ case QVideoSurfaceFormat::Format_NV21:
+ return dbg << "Format_NV21";
+ case QVideoSurfaceFormat::Format_IMC1:
+ return dbg << "Format_IMC1";
+ case QVideoSurfaceFormat::Format_IMC2:
+ return dbg << "Format_IMC2";
+ case QVideoSurfaceFormat::Format_IMC3:
+ return dbg << "Format_IMC3";
+ case QVideoSurfaceFormat::Format_IMC4:
+ return dbg << "Format_IMC4";
+ case QVideoSurfaceFormat::Format_Y8:
+ return dbg << "Format_Y8";
+ case QVideoSurfaceFormat::Format_Y16:
+ return dbg << "Format_Y16";
+ case QVideoSurfaceFormat::Format_P010LE:
+ return dbg << "Format_P010LE";
+ case QVideoSurfaceFormat::Format_P010BE:
+ return dbg << "Format_P010BE";
+ case QVideoSurfaceFormat::Format_P016LE:
+ return dbg << "Format_P016LE";
+ case QVideoSurfaceFormat::Format_P016BE:
+ return dbg << "Format_P016BE";
+ case QVideoSurfaceFormat::Format_Jpeg:
+ return dbg << "Format_Jpeg";
+
+ default:
+ return dbg << QString(QLatin1String("UserType(%1)" )).arg(int(pf)).toLatin1().constData();
+ }
+}
#endif
QT_END_NAMESPACE
diff --git a/src/multimedia/video/qvideosurfaceformat.h b/src/multimedia/video/qvideosurfaceformat.h
index 694dd9a79..5b9c9fcb8 100644
--- a/src/multimedia/video/qvideosurfaceformat.h
+++ b/src/multimedia/video/qvideosurfaceformat.h
@@ -40,12 +40,13 @@
#ifndef QVIDEOSURFACEFORMAT_H
#define QVIDEOSURFACEFORMAT_H
+#include <QtMultimedia/qtmultimediaglobal.h>
+
#include <QtCore/qlist.h>
#include <QtCore/qpair.h>
#include <QtCore/qshareddata.h>
#include <QtCore/qsize.h>
#include <QtGui/qimage.h>
-#include <QtMultimedia/qvideoframe.h>
QT_BEGIN_NAMESPACE
@@ -57,6 +58,53 @@ class QVideoSurfaceFormatPrivate;
class Q_MULTIMEDIA_EXPORT QVideoSurfaceFormat
{
public:
+ enum PixelFormat
+ {
+ Format_Invalid,
+ Format_ARGB32,
+ Format_ARGB32_Premultiplied,
+ Format_RGB32,
+ Format_RGB24,
+ Format_RGB565,
+ Format_RGB555,
+ Format_ARGB8565_Premultiplied,
+ Format_BGRA32,
+ Format_BGRA32_Premultiplied,
+ Format_ABGR32,
+ Format_BGR32,
+ Format_BGR24,
+ Format_BGR565,
+ Format_BGR555,
+ Format_BGRA5658_Premultiplied,
+
+ Format_AYUV444,
+ Format_AYUV444_Premultiplied,
+ Format_YUV444,
+ Format_YUV420P,
+ Format_YUV422P,
+ Format_YV12,
+ Format_UYVY,
+ Format_YUYV,
+ Format_NV12,
+ Format_NV21,
+ Format_IMC1,
+ Format_IMC2,
+ Format_IMC3,
+ Format_IMC4,
+ Format_Y8,
+ Format_Y16,
+
+ Format_P010LE,
+ Format_P010BE,
+ Format_P016LE,
+ Format_P016BE,
+
+ Format_Jpeg,
+ };
+#ifndef Q_QDOC
+ static constexpr int NPixelFormats = Format_Jpeg + 1;
+#endif
+
enum Direction
{
TopToBottom,
@@ -76,7 +124,7 @@ public:
QVideoSurfaceFormat();
QVideoSurfaceFormat(
const QSize &size,
- QVideoFrame::PixelFormat pixelFormat);
+ QVideoSurfaceFormat::PixelFormat pixelFormat);
QVideoSurfaceFormat(const QVideoSurfaceFormat &format);
~QVideoSurfaceFormat();
@@ -87,7 +135,7 @@ public:
bool isValid() const;
- QVideoFrame::PixelFormat pixelFormat() const;
+ QVideoSurfaceFormat::PixelFormat pixelFormat() const;
QSize frameSize() const;
void setFrameSize(const QSize &size);
@@ -113,6 +161,9 @@ public:
QSize sizeHint() const;
+ static PixelFormat pixelFormatFromImageFormat(QImage::Format format);
+ static QImage::Format imageFormatFromPixelFormat(PixelFormat format);
+
private:
QSharedDataPointer<QVideoSurfaceFormatPrivate> d;
};
@@ -121,6 +172,7 @@ private:
Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, const QVideoSurfaceFormat &);
Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoSurfaceFormat::Direction);
Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoSurfaceFormat::YCbCrColorSpace);
+Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoSurfaceFormat::PixelFormat);
#endif
QT_END_NAMESPACE
diff --git a/src/multimedia/video/qvideosurfaces.cpp b/src/multimedia/video/qvideosurfaces.cpp
index 1870e6ebf..b3e07fb26 100644
--- a/src/multimedia/video/qvideosurfaces.cpp
+++ b/src/multimedia/video/qvideosurfaces.cpp
@@ -57,10 +57,10 @@ QVideoSurfaces::QVideoSurfaces(const QList<QAbstractVideoSurface *> &s, QObject
QVideoSurfaces::~QVideoSurfaces() = default;
-QList<QVideoFrame::PixelFormat> QVideoSurfaces::supportedPixelFormats(QVideoFrame::HandleType type) const
+QList<QVideoSurfaceFormat::PixelFormat> QVideoSurfaces::supportedPixelFormats(QVideoFrame::HandleType type) const
{
- QList<QVideoFrame::PixelFormat> result;
- QMap<QVideoFrame::PixelFormat, int> formats;
+ QList<QVideoSurfaceFormat::PixelFormat> result;
+ QMap<QVideoSurfaceFormat::PixelFormat, int> formats;
for (auto &s : m_surfaces) {
for (auto &p : s->supportedPixelFormats(type)) {
if (++formats[p] == m_surfaces.size())
diff --git a/src/multimedia/video/qvideosurfaces_p.h b/src/multimedia/video/qvideosurfaces_p.h
index f72bd0565..4d9a8a9df 100644
--- a/src/multimedia/video/qvideosurfaces_p.h
+++ b/src/multimedia/video/qvideosurfaces_p.h
@@ -62,7 +62,7 @@ public:
QVideoSurfaces(const QList<QAbstractVideoSurface *> &surfaces, QObject *parent = nullptr);
~QVideoSurfaces();
- QList<QVideoFrame::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType type) const override;
+ QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType type) const override;
bool start(const QVideoSurfaceFormat &format) override;
void stop() override;
bool present(const QVideoFrame &frame) override;
diff --git a/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp b/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp
index b238dff4f..10e065ca1 100644
--- a/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp
+++ b/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp
@@ -60,7 +60,7 @@ QSGVivanteVideoMaterial::QSGVivanteVideoMaterial() :
mOpacity(1.0),
mWidth(0),
mHeight(0),
- mFormat(QVideoFrame::Format_Invalid),
+ mFormat(QVideoSurfaceFormat::Format_Invalid),
mCurrentTexture(0),
mMappable(true),
mTexDirectTexture(0)
@@ -273,14 +273,14 @@ GLuint QSGVivanteVideoMaterial::vivanteMapping(QVideoFrame vF)
glBindTexture(GL_TEXTURE_2D, mTexDirectTexture);
}
switch (mCurrentFrame.pixelFormat()) {
- case QVideoFrame::Format_YUV420P:
- case QVideoFrame::Format_YV12:
+ case QVideoSurfaceFormat::Format_YUV420P:
+ case QVideoSurfaceFormat::Format_YV12:
memcpy(mTexDirectPlanes[0], mCurrentFrame.bits(0), mCurrentFrame.height() * mCurrentFrame.bytesPerLine(0));
memcpy(mTexDirectPlanes[1], mCurrentFrame.bits(1), mCurrentFrame.height() / 2 * mCurrentFrame.bytesPerLine(1));
memcpy(mTexDirectPlanes[2], mCurrentFrame.bits(2), mCurrentFrame.height() / 2 * mCurrentFrame.bytesPerLine(2));
break;
- case QVideoFrame::Format_NV12:
- case QVideoFrame::Format_NV21:
+ case QVideoSurfaceFormat::Format_NV12:
+ case QVideoSurfaceFormat::Format_NV21:
memcpy(mTexDirectPlanes[0], mCurrentFrame.bits(0), mCurrentFrame.height() * mCurrentFrame.bytesPerLine(0));
memcpy(mTexDirectPlanes[1], mCurrentFrame.bits(1), mCurrentFrame.height() / 2 * mCurrentFrame.bytesPerLine(1));
break;
diff --git a/src/plugins/videonode/imx6/qsgvivantevideomaterial.h b/src/plugins/videonode/imx6/qsgvivantevideomaterial.h
index 10fd5447f..5fe567814 100644
--- a/src/plugins/videonode/imx6/qsgvivantevideomaterial.h
+++ b/src/plugins/videonode/imx6/qsgvivantevideomaterial.h
@@ -75,7 +75,7 @@ private:
int mWidth;
int mHeight;
- QVideoFrame::PixelFormat mFormat;
+ QVideoSurfaceFormat::PixelFormat mFormat;
QMap<const uchar*, GLuint> mBitsToTextureMap;
QVideoFrame mCurrentFrame;
diff --git a/src/plugins/videonode/imx6/qsgvivantevideonode.cpp b/src/plugins/videonode/imx6/qsgvivantevideonode.cpp
index c8d83b4b2..810536715 100644
--- a/src/plugins/videonode/imx6/qsgvivantevideonode.cpp
+++ b/src/plugins/videonode/imx6/qsgvivantevideonode.cpp
@@ -44,7 +44,7 @@
#include "qsgvivantevideomaterialshader.h"
#include "qsgvivantevideomaterial.h"
-QMap<QVideoFrame::PixelFormat, GLenum> QSGVivanteVideoNode::static_VideoFormat2GLFormatMap = QMap<QVideoFrame::PixelFormat, GLenum>();
+QMap<QVideoSurfaceFormat::PixelFormat, GLenum> QSGVivanteVideoNode::static_VideoFormat2GLFormatMap = QMap<QVideoSurfaceFormat::PixelFormat, GLenum>();
QSGVivanteVideoNode::QSGVivanteVideoNode(const QVideoSurfaceFormat &format) :
mFormat(format)
@@ -64,40 +64,40 @@ void QSGVivanteVideoNode::setCurrentFrame(const QVideoFrame &frame, FrameFlags f
markDirty(DirtyMaterial);
}
-const QMap<QVideoFrame::PixelFormat, GLenum>& QSGVivanteVideoNode::getVideoFormat2GLFormatMap()
+const QMap<QVideoSurfaceFormat::PixelFormat, GLenum>& QSGVivanteVideoNode::getVideoFormat2GLFormatMap()
{
if (static_VideoFormat2GLFormatMap.isEmpty()) {
- static_VideoFormat2GLFormatMap.insert(QVideoFrame::Format_YUV420P, GL_VIV_I420);
- static_VideoFormat2GLFormatMap.insert(QVideoFrame::Format_YV12, GL_VIV_YV12);
- static_VideoFormat2GLFormatMap.insert(QVideoFrame::Format_NV12, GL_VIV_NV12);
- static_VideoFormat2GLFormatMap.insert(QVideoFrame::Format_NV21, GL_VIV_NV21);
- static_VideoFormat2GLFormatMap.insert(QVideoFrame::Format_UYVY, GL_VIV_UYVY);
- static_VideoFormat2GLFormatMap.insert(QVideoFrame::Format_YUYV, GL_VIV_YUY2);
- static_VideoFormat2GLFormatMap.insert(QVideoFrame::Format_RGB32, GL_BGRA_EXT);
- static_VideoFormat2GLFormatMap.insert(QVideoFrame::Format_ARGB32, GL_BGRA_EXT);
- static_VideoFormat2GLFormatMap.insert(QVideoFrame::Format_BGR32, GL_RGBA);
- static_VideoFormat2GLFormatMap.insert(QVideoFrame::Format_BGRA32, GL_RGBA);
- static_VideoFormat2GLFormatMap.insert(QVideoFrame::Format_RGB565, GL_RGB565);
+ static_VideoFormat2GLFormatMap.insert(QVideoSurfaceFormat::Format_YUV420P, GL_VIV_I420);
+ static_VideoFormat2GLFormatMap.insert(QVideoSurfaceFormat::Format_YV12, GL_VIV_YV12);
+ static_VideoFormat2GLFormatMap.insert(QVideoSurfaceFormat::Format_NV12, GL_VIV_NV12);
+ static_VideoFormat2GLFormatMap.insert(QVideoSurfaceFormat::Format_NV21, GL_VIV_NV21);
+ static_VideoFormat2GLFormatMap.insert(QVideoSurfaceFormat::Format_UYVY, GL_VIV_UYVY);
+ static_VideoFormat2GLFormatMap.insert(QVideoSurfaceFormat::Format_YUYV, GL_VIV_YUY2);
+ static_VideoFormat2GLFormatMap.insert(QVideoSurfaceFormat::Format_RGB32, GL_BGRA_EXT);
+ static_VideoFormat2GLFormatMap.insert(QVideoSurfaceFormat::Format_ARGB32, GL_BGRA_EXT);
+ static_VideoFormat2GLFormatMap.insert(QVideoSurfaceFormat::Format_BGR32, GL_RGBA);
+ static_VideoFormat2GLFormatMap.insert(QVideoSurfaceFormat::Format_BGRA32, GL_RGBA);
+ static_VideoFormat2GLFormatMap.insert(QVideoSurfaceFormat::Format_RGB565, GL_RGB565);
}
return static_VideoFormat2GLFormatMap;
}
-int QSGVivanteVideoNode::getBytesForPixelFormat(QVideoFrame::PixelFormat pixelformat)
+int QSGVivanteVideoNode::getBytesForPixelFormat(QVideoSurfaceFormat::PixelFormat pixelformat)
{
switch (pixelformat) {
- case QVideoFrame::Format_YUV420P: return 1;
- case QVideoFrame::Format_YV12: return 1;
- case QVideoFrame::Format_NV12: return 1;
- case QVideoFrame::Format_NV21: return 1;
- case QVideoFrame::Format_UYVY: return 2;
- case QVideoFrame::Format_YUYV: return 2;
- case QVideoFrame::Format_RGB32: return 4;
- case QVideoFrame::Format_ARGB32: return 4;
- case QVideoFrame::Format_BGR32: return 4;
- case QVideoFrame::Format_BGRA32: return 4;
- case QVideoFrame::Format_RGB565: return 2;
+ case QVideoSurfaceFormat::Format_YUV420P: return 1;
+ case QVideoSurfaceFormat::Format_YV12: return 1;
+ case QVideoSurfaceFormat::Format_NV12: return 1;
+ case QVideoSurfaceFormat::Format_NV21: return 1;
+ case QVideoSurfaceFormat::Format_UYVY: return 2;
+ case QVideoSurfaceFormat::Format_YUYV: return 2;
+ case QVideoSurfaceFormat::Format_RGB32: return 4;
+ case QVideoSurfaceFormat::Format_ARGB32: return 4;
+ case QVideoSurfaceFormat::Format_BGR32: return 4;
+ case QVideoSurfaceFormat::Format_BGRA32: return 4;
+ case QVideoSurfaceFormat::Format_RGB565: return 2;
default: return 1;
}
}
diff --git a/src/plugins/videonode/imx6/qsgvivantevideonode.h b/src/plugins/videonode/imx6/qsgvivantevideonode.h
index 1d2f3d342..f0a11f22a 100644
--- a/src/plugins/videonode/imx6/qsgvivantevideonode.h
+++ b/src/plugins/videonode/imx6/qsgvivantevideonode.h
@@ -50,18 +50,18 @@ public:
QSGVivanteVideoNode(const QVideoSurfaceFormat &format);
~QSGVivanteVideoNode();
- QVideoFrame::PixelFormat pixelFormat() const { return mFormat.pixelFormat(); }
+ QVideoSurfaceFormat::PixelFormat pixelFormat() const { return mFormat.pixelFormat(); }
QVideoFrame::HandleType handleType() const { return QVideoFrame::NoHandle; }
void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags);
- static const QMap<QVideoFrame::PixelFormat, GLenum>& getVideoFormat2GLFormatMap();
- static int getBytesForPixelFormat(QVideoFrame::PixelFormat pixelformat);
+ static const QMap<QVideoSurfaceFormat::PixelFormat, GLenum>& getVideoFormat2GLFormatMap();
+ static int getBytesForPixelFormat(QVideoSurfaceFormat::PixelFormat pixelformat);
private:
QVideoSurfaceFormat mFormat;
QSGVivanteVideoMaterial *mMaterial;
- static QMap<QVideoFrame::PixelFormat, GLenum> static_VideoFormat2GLFormatMap;
+ static QMap<QVideoSurfaceFormat::PixelFormat, GLenum> static_VideoFormat2GLFormatMap;
};
#endif // QSGVIDEONODE_VIVANTE_H
diff --git a/src/plugins/videonode/imx6/qsgvivantevideonodefactory.cpp b/src/plugins/videonode/imx6/qsgvivantevideonodefactory.cpp
index a4982ad46..5369bfd2b 100644
--- a/src/plugins/videonode/imx6/qsgvivantevideonodefactory.cpp
+++ b/src/plugins/videonode/imx6/qsgvivantevideonodefactory.cpp
@@ -41,14 +41,14 @@
#include "qsgvivantevideonode.h"
#include <QtGui/QGuiApplication>
-QList<QVideoFrame::PixelFormat> QSGVivanteVideoNodeFactory::supportedPixelFormats(
+QList<QVideoSurfaceFormat::PixelFormat> QSGVivanteVideoNodeFactory::supportedPixelFormats(
QVideoFrame::HandleType handleType) const
{
const bool isWebGl = QGuiApplication::platformName() == QLatin1String("webgl");
if (!isWebGl && handleType == QVideoFrame::NoHandle)
return QSGVivanteVideoNode::getVideoFormat2GLFormatMap().keys();
else
- return QList<QVideoFrame::PixelFormat>();
+ return QList<QVideoSurfaceFormat::PixelFormat>();
}
QSGVideoNode *QSGVivanteVideoNodeFactory::createNode(const QVideoSurfaceFormat &format)
diff --git a/src/plugins/videonode/imx6/qsgvivantevideonodefactory.h b/src/plugins/videonode/imx6/qsgvivantevideonodefactory.h
index 6dd068d55..1f24251ad 100644
--- a/src/plugins/videonode/imx6/qsgvivantevideonodefactory.h
+++ b/src/plugins/videonode/imx6/qsgvivantevideonodefactory.h
@@ -50,7 +50,7 @@ public:
Q_PLUGIN_METADATA(IID QSGVideoNodeFactoryInterface_iid FILE "imx6.json")
Q_INTERFACES(QSGVideoNodeFactoryInterface)
- QList<QVideoFrame::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType handleType) const;
+ QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType handleType) const;
QSGVideoNode *createNode(const QVideoSurfaceFormat &format);
};
#endif // QSGVIDEONODEFACTORY_VIVANTE_H
diff --git a/src/qtmultimediaquicktools/qsgvideonode_p.h b/src/qtmultimediaquicktools/qsgvideonode_p.h
index 7f6a04e01..7d71cb9f5 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_p.h
+++ b/src/qtmultimediaquicktools/qsgvideonode_p.h
@@ -71,7 +71,7 @@ public:
QSGVideoNode();
virtual void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags) = 0;
- virtual QVideoFrame::PixelFormat pixelFormat() const = 0;
+ virtual QVideoSurfaceFormat::PixelFormat pixelFormat() const = 0;
void setTexturedRectGeometry(const QRectF &boundingRect, const QRectF &textureRect, int orientation);
@@ -88,7 +88,7 @@ class Q_MULTIMEDIAQUICK_EXPORT QSGVideoNodeFactoryInterface
public:
virtual ~QSGVideoNodeFactoryInterface();
- virtual QList<QVideoFrame::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType handleType) const = 0;
+ virtual QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType handleType) const = 0;
virtual QSGVideoNode *createNode(const QVideoSurfaceFormat &format) = 0;
};
@@ -100,7 +100,7 @@ class Q_MULTIMEDIAQUICK_EXPORT QSGVideoNodeFactoryPlugin : public QObject, publi
Q_OBJECT
Q_INTERFACES(QSGVideoNodeFactoryInterface)
public:
- QList<QVideoFrame::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType handleType) const override = 0;
+ QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType handleType) const override = 0;
QSGVideoNode *createNode(const QVideoSurfaceFormat &format) override = 0;
};
diff --git a/src/qtmultimediaquicktools/qsgvideonode_rgb.cpp b/src/qtmultimediaquicktools/qsgvideonode_rgb.cpp
index 7345291c8..4a6c74573 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_rgb.cpp
+++ b/src/qtmultimediaquicktools/qsgvideonode_rgb.cpp
@@ -43,18 +43,18 @@
QT_BEGIN_NAMESPACE
-QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_RGB::supportedPixelFormats(
+QList<QVideoSurfaceFormat::PixelFormat> QSGVideoNodeFactory_RGB::supportedPixelFormats(
QVideoFrame::HandleType handleType) const
{
- QList<QVideoFrame::PixelFormat> pixelFormats;
+ QList<QVideoSurfaceFormat::PixelFormat> pixelFormats;
if (handleType == QVideoFrame::NoHandle) {
- pixelFormats.append(QVideoFrame::Format_RGB32);
- pixelFormats.append(QVideoFrame::Format_ARGB32);
- pixelFormats.append(QVideoFrame::Format_ARGB32_Premultiplied);
- pixelFormats.append(QVideoFrame::Format_BGR32);
- pixelFormats.append(QVideoFrame::Format_BGRA32);
- pixelFormats.append(QVideoFrame::Format_RGB565);
+ pixelFormats.append(QVideoSurfaceFormat::Format_RGB32);
+ pixelFormats.append(QVideoSurfaceFormat::Format_ARGB32);
+ pixelFormats.append(QVideoSurfaceFormat::Format_ARGB32_Premultiplied);
+ pixelFormats.append(QVideoSurfaceFormat::Format_BGR32);
+ pixelFormats.append(QVideoSurfaceFormat::Format_BGRA32);
+ pixelFormats.append(QVideoSurfaceFormat::Format_RGB565);
}
return pixelFormats;
@@ -165,13 +165,13 @@ void QSGVideoMaterialRhiShader_RGB::updateSampledImage(RenderState &state, int b
m->m_frameMutex.lock();
auto frame = m->m_frame;
- if (frame.pixelFormat() == QVideoFrame::Format_RGB565) // Format_RGB565 requires GL_UNSIGNED_SHORT_5_6_5
+ if (frame.pixelFormat() == QVideoSurfaceFormat::Format_RGB565) // Format_RGB565 requires GL_UNSIGNED_SHORT_5_6_5
frame = frame.image().convertToFormat(QImage::Format_RGBA8888_Premultiplied);
auto format = QRhiTexture::RGBA8;
- if (frame.pixelFormat() == QVideoFrame::Format_RGB32
- || frame.pixelFormat() == QVideoFrame::Format_ARGB32
- || frame.pixelFormat() == QVideoFrame::Format_ARGB32_Premultiplied)
+ if (frame.pixelFormat() == QVideoSurfaceFormat::Format_RGB32
+ || frame.pixelFormat() == QVideoSurfaceFormat::Format_ARGB32
+ || frame.pixelFormat() == QVideoSurfaceFormat::Format_ARGB32_Premultiplied)
{
format = QRhiTexture::BGRA8;
}
diff --git a/src/qtmultimediaquicktools/qsgvideonode_rgb_p.h b/src/qtmultimediaquicktools/qsgvideonode_rgb_p.h
index 165781475..b42509c1f 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_rgb_p.h
+++ b/src/qtmultimediaquicktools/qsgvideonode_rgb_p.h
@@ -64,7 +64,7 @@ public:
QSGVideoNode_RGB(const QVideoSurfaceFormat &format);
~QSGVideoNode_RGB();
- QVideoFrame::PixelFormat pixelFormat() const override {
+ QVideoSurfaceFormat::PixelFormat pixelFormat() const override {
return m_format.pixelFormat();
}
void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags) override;
@@ -77,7 +77,7 @@ private:
class QSGVideoNodeFactory_RGB : public QSGVideoNodeFactoryInterface {
public:
- QList<QVideoFrame::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType handleType) const override;
+ QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType handleType) const override;
QSGVideoNode *createNode(const QVideoSurfaceFormat &format) override;
};
diff --git a/src/qtmultimediaquicktools/qsgvideonode_texture.cpp b/src/qtmultimediaquicktools/qsgvideonode_texture.cpp
index 0a52f06c4..cc345d61a 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_texture.cpp
+++ b/src/qtmultimediaquicktools/qsgvideonode_texture.cpp
@@ -45,18 +45,18 @@
QT_BEGIN_NAMESPACE
-QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_Texture::supportedPixelFormats(
+QList<QVideoSurfaceFormat::PixelFormat> QSGVideoNodeFactory_Texture::supportedPixelFormats(
QVideoFrame::HandleType) const
{
- QList<QVideoFrame::PixelFormat> pixelFormats;
+ QList<QVideoSurfaceFormat::PixelFormat> pixelFormats;
- pixelFormats.append(QVideoFrame::Format_RGB565);
- pixelFormats.append(QVideoFrame::Format_RGB32);
- pixelFormats.append(QVideoFrame::Format_ARGB32);
- pixelFormats.append(QVideoFrame::Format_BGR32);
- pixelFormats.append(QVideoFrame::Format_BGRA32);
+ pixelFormats.append(QVideoSurfaceFormat::Format_RGB565);
+ pixelFormats.append(QVideoSurfaceFormat::Format_RGB32);
+ pixelFormats.append(QVideoSurfaceFormat::Format_ARGB32);
+ pixelFormats.append(QVideoSurfaceFormat::Format_BGR32);
+ pixelFormats.append(QVideoSurfaceFormat::Format_BGRA32);
#if !QT_CONFIG(gpu_vivante)
- pixelFormats.append(QVideoFrame::Format_ABGR32);
+ pixelFormats.append(QVideoSurfaceFormat::Format_ABGR32);
#endif
return pixelFormats;
@@ -143,8 +143,8 @@ public:
private:
[[nodiscard]] bool needsSwizzling() const {
- return m_format.pixelFormat() == QVideoFrame::Format_RGB32
- || m_format.pixelFormat() == QVideoFrame::Format_ARGB32;
+ return m_format.pixelFormat() == QVideoSurfaceFormat::Format_RGB32
+ || m_format.pixelFormat() == QVideoSurfaceFormat::Format_ARGB32;
}
};
diff --git a/src/qtmultimediaquicktools/qsgvideonode_texture_p.h b/src/qtmultimediaquicktools/qsgvideonode_texture_p.h
index c8ccf8cd9..61aa04873 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_texture_p.h
+++ b/src/qtmultimediaquicktools/qsgvideonode_texture_p.h
@@ -64,7 +64,7 @@ public:
QSGVideoNode_Texture(const QVideoSurfaceFormat &format);
~QSGVideoNode_Texture();
- QVideoFrame::PixelFormat pixelFormat() const override {
+ QVideoSurfaceFormat::PixelFormat pixelFormat() const override {
return m_format.pixelFormat();
}
void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags) override;
@@ -77,7 +77,7 @@ private:
class QSGVideoNodeFactory_Texture : public QSGVideoNodeFactoryInterface {
public:
- QList<QVideoFrame::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType handleType) const override;
+ QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType handleType) const override;
QSGVideoNode *createNode(const QVideoSurfaceFormat &format) override;
};
diff --git a/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp b/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp
index 729aa33ad..8fe7752eb 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp
+++ b/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp
@@ -43,14 +43,14 @@
QT_BEGIN_NAMESPACE
-QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_YUV::supportedPixelFormats(
+QList<QVideoSurfaceFormat::PixelFormat> QSGVideoNodeFactory_YUV::supportedPixelFormats(
QVideoFrame::HandleType) const
{
- QList<QVideoFrame::PixelFormat> formats;
+ QList<QVideoSurfaceFormat::PixelFormat> formats;
- formats << QVideoFrame::Format_YUV420P << QVideoFrame::Format_YV12 << QVideoFrame::Format_YUV422P
- << QVideoFrame::Format_NV12 << QVideoFrame::Format_NV21
- << QVideoFrame::Format_UYVY << QVideoFrame::Format_YUYV;
+ formats << QVideoSurfaceFormat::Format_YUV420P << QVideoSurfaceFormat::Format_YV12 << QVideoSurfaceFormat::Format_YUV422P
+ << QVideoSurfaceFormat::Format_NV12 << QVideoSurfaceFormat::Format_NV21
+ << QVideoSurfaceFormat::Format_UYVY << QVideoSurfaceFormat::Format_YUYV;
return formats;
}
@@ -144,13 +144,13 @@ public:
static QSGMaterialType biPlanarType, biPlanarSwizzleType, triPlanarType, uyvyType, yuyvType;
switch (m_format.pixelFormat()) {
- case QVideoFrame::Format_NV12:
+ case QVideoSurfaceFormat::Format_NV12:
return &biPlanarType;
- case QVideoFrame::Format_NV21:
+ case QVideoSurfaceFormat::Format_NV21:
return &biPlanarSwizzleType;
- case QVideoFrame::Format_UYVY:
+ case QVideoSurfaceFormat::Format_UYVY:
return &uyvyType;
- case QVideoFrame::Format_YUYV:
+ case QVideoSurfaceFormat::Format_YUYV:
return &yuyvType;
default: // Currently: YUV420P, YUV422P and YV12
return &triPlanarType;
@@ -159,13 +159,13 @@ public:
[[nodiscard]] QSGMaterialShader *createShader(QSGRendererInterface::RenderMode) const override {
switch (m_format.pixelFormat()) {
- case QVideoFrame::Format_NV12:
+ case QVideoSurfaceFormat::Format_NV12:
return new QSGVideoMaterialRhiShader_NV12;
- case QVideoFrame::Format_NV21:
+ case QVideoSurfaceFormat::Format_NV21:
return new QSGVideoMaterialRhiShader_NV21;
- case QVideoFrame::Format_UYVY:
+ case QVideoSurfaceFormat::Format_UYVY:
return new QSGVideoMaterialRhiShader_UYVY;
- case QVideoFrame::Format_YUYV:
+ case QVideoSurfaceFormat::Format_YUYV:
return new QSGVideoMaterialRhiShader_YUYV;
default: // Currently: YUV420P, YUV422P and YV12
return new QSGVideoMaterialRhiShader_YUV_YV;
@@ -288,11 +288,11 @@ void QSGVideoMaterialRhiShader_YUV_YV::mapFrame(QSGVideoMaterial_YUV *m)
return;
int y = 0;
- int u = m->m_frame.pixelFormat() == QVideoFrame::Format_YV12 ? 2 : 1;
- int v = m->m_frame.pixelFormat() == QVideoFrame::Format_YV12 ? 1 : 2;
+ int u = m->m_frame.pixelFormat() == QVideoSurfaceFormat::Format_YV12 ? 2 : 1;
+ int v = m->m_frame.pixelFormat() == QVideoSurfaceFormat::Format_YV12 ? 1 : 2;
int fw = m->m_frame.width();
int fh = m->m_frame.height();
- int uvHeight = m->m_frame.pixelFormat() == QVideoFrame::Format_YUV422P ? fh : fh / 2;
+ int uvHeight = m->m_frame.pixelFormat() == QVideoSurfaceFormat::Format_YUV422P ? fh : fh / 2;
m->m_planeWidth[0] = float(fw) / m->m_frame.bytesPerLine(y);
m->m_planeWidth[1] = m->m_planeWidth[2] = float(fw) / (2 * m->m_frame.bytesPerLine(u));
diff --git a/src/qtmultimediaquicktools/qsgvideonode_yuv_p.h b/src/qtmultimediaquicktools/qsgvideonode_yuv_p.h
index 89257ab8d..55b3bc764 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_yuv_p.h
+++ b/src/qtmultimediaquicktools/qsgvideonode_yuv_p.h
@@ -63,7 +63,7 @@ public:
QSGVideoNode_YUV(const QVideoSurfaceFormat &format);
~QSGVideoNode_YUV();
- QVideoFrame::PixelFormat pixelFormat() const override {
+ QVideoSurfaceFormat::PixelFormat pixelFormat() const override {
return m_format.pixelFormat();
}
void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags) override;
@@ -77,7 +77,7 @@ private:
class QSGVideoNodeFactory_YUV : public QSGVideoNodeFactoryInterface {
public:
- QList<QVideoFrame::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType handleType) const override;
+ QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType handleType) const override;
QSGVideoNode *createNode(const QVideoSurfaceFormat &format) override;
};
diff --git a/tests/auto/integration/qdeclarativevideooutput/tst_qdeclarativevideooutput.cpp b/tests/auto/integration/qdeclarativevideooutput/tst_qdeclarativevideooutput.cpp
index b995f94a1..667194f41 100644
--- a/tests/auto/integration/qdeclarativevideooutput/tst_qdeclarativevideooutput.cpp
+++ b/tests/auto/integration/qdeclarativevideooutput/tst_qdeclarativevideooutput.cpp
@@ -74,7 +74,7 @@ private:
void SurfaceHolder::presentDummyFrame(const QSize &size)
{
if (m_surface && m_surface->supportedPixelFormats().count() > 0) {
- QVideoFrame::PixelFormat pixelFormat = m_surface->supportedPixelFormats().value(0);
+ QVideoSurfaceFormat::PixelFormat pixelFormat = m_surface->supportedPixelFormats().value(0);
QVideoSurfaceFormat format(size, pixelFormat);
QVideoFrame frame(size.width() * size.height() * 4, size.width() * 4, QVideoSurfaceFormat(size, pixelFormat));
@@ -273,11 +273,11 @@ void tst_QDeclarativeVideoOutput::surfaceSource()
QVERIFY(holder.videoSurface() != nullptr);
// Now we could do things with the surface..
- const QList<QVideoFrame::PixelFormat> formats = holder.videoSurface()->supportedPixelFormats();
+ const QList<QVideoSurfaceFormat::PixelFormat> formats = holder.videoSurface()->supportedPixelFormats();
QVERIFY(formats.count() > 0);
// See if we can start and stop each pixel format (..)
- for (QVideoFrame::PixelFormat format : formats) {
+ for (QVideoSurfaceFormat::PixelFormat format : formats) {
QVideoSurfaceFormat surfaceFormat(QSize(200,100), format);
QVERIFY(holder.videoSurface()->isFormatSupported(surfaceFormat)); // This does kind of depend on node factories
@@ -362,7 +362,7 @@ void tst_QDeclarativeVideoOutput::paintSurface()
QVERIFY(surface);
QVERIFY(!surface->isActive());
videoOutput->setSize(QSize(2, 2));
- QVideoSurfaceFormat format(QSize(2, 2), QVideoFrame::Format_RGB32);
+ QVideoSurfaceFormat format(QSize(2, 2), QVideoSurfaceFormat::Format_RGB32);
QVERIFY(surface->isFormatSupported(format));
QVERIFY(surface->start(format));
QVERIFY(surface->isActive());
diff --git a/tests/auto/integration/qmediaplayerbackend/tst_qmediaplayerbackend.cpp b/tests/auto/integration/qmediaplayerbackend/tst_qmediaplayerbackend.cpp
index b28afaf9b..8fd6cf19c 100644
--- a/tests/auto/integration/qmediaplayerbackend/tst_qmediaplayerbackend.cpp
+++ b/tests/auto/integration/qmediaplayerbackend/tst_qmediaplayerbackend.cpp
@@ -120,7 +120,7 @@ public:
private:
bool m_storeFrames;
- QList<QVideoFrame::PixelFormat> m_supported;
+ QList<QVideoSurfaceFormat::PixelFormat> m_supported;
};
void tst_QMediaPlayerBackend::init()
@@ -780,7 +780,7 @@ void tst_QMediaPlayerBackend::seekPauseSeek()
// create QImage for QVideoFrame to verify RGB pixel colors
QVERIFY(frame.map(QVideoFrame::ReadOnly));
- QImage image(frame.bits(), frame.width(), frame.height(), QVideoFrame::imageFormatFromPixelFormat(frame.pixelFormat()));
+ QImage image(frame.bits(), frame.width(), frame.height(), QVideoSurfaceFormat::imageFormatFromPixelFormat(frame.pixelFormat()));
QVERIFY(!image.isNull());
QVERIFY(qRed(image.pixel(0, 0)) >= 230); // conversion from YUV => RGB, that's why it's not 255
QVERIFY(qGreen(image.pixel(0, 0)) < 20);
@@ -804,7 +804,7 @@ void tst_QMediaPlayerBackend::seekPauseSeek()
QCOMPARE(frame.height(), 120);
QVERIFY(frame.map(QVideoFrame::ReadOnly));
- QImage image(frame.bits(), frame.width(), frame.height(), QVideoFrame::imageFormatFromPixelFormat(frame.pixelFormat()));
+ QImage image(frame.bits(), frame.width(), frame.height(), QVideoSurfaceFormat::imageFormatFromPixelFormat(frame.pixelFormat()));
QVERIFY(!image.isNull());
QVERIFY(qRed(image.pixel(0, 0)) < 20);
QVERIFY(qGreen(image.pixel(0, 0)) >= 230);
@@ -974,22 +974,22 @@ void tst_QMediaPlayerBackend::subsequentPlayback()
void tst_QMediaPlayerBackend::surfaceTest_data()
{
- QTest::addColumn< QList<QVideoFrame::PixelFormat> >("formatsList");
-
- QList<QVideoFrame::PixelFormat> formatsRGB;
- formatsRGB << QVideoFrame::Format_RGB32
- << QVideoFrame::Format_ARGB32
- << QVideoFrame::Format_RGB565
- << QVideoFrame::Format_BGRA32;
-
- QList<QVideoFrame::PixelFormat> formatsYUV;
- formatsYUV << QVideoFrame::Format_YUV420P
- << QVideoFrame::Format_YUV422P
- << QVideoFrame::Format_YV12
- << QVideoFrame::Format_UYVY
- << QVideoFrame::Format_YUYV
- << QVideoFrame::Format_NV12
- << QVideoFrame::Format_NV21;
+ QTest::addColumn< QList<QVideoSurfaceFormat::PixelFormat> >("formatsList");
+
+ QList<QVideoSurfaceFormat::PixelFormat> formatsRGB;
+ formatsRGB << QVideoSurfaceFormat::Format_RGB32
+ << QVideoSurfaceFormat::Format_ARGB32
+ << QVideoSurfaceFormat::Format_RGB565
+ << QVideoSurfaceFormat::Format_BGRA32;
+
+ QList<QVideoSurfaceFormat::PixelFormat> formatsYUV;
+ formatsYUV << QVideoSurfaceFormat::Format_YUV420P
+ << QVideoSurfaceFormat::Format_YUV422P
+ << QVideoSurfaceFormat::Format_YV12
+ << QVideoSurfaceFormat::Format_UYVY
+ << QVideoSurfaceFormat::Format_YUYV
+ << QVideoSurfaceFormat::Format_NV12
+ << QVideoSurfaceFormat::Format_NV21;
QTest::newRow("RGB formats")
<< formatsRGB;
@@ -1007,7 +1007,7 @@ void tst_QMediaPlayerBackend::surfaceTest()
if (localVideoFile.isEmpty())
QSKIP("No supported video file");
- QFETCH(QList<QVideoFrame::PixelFormat>, formatsList);
+ QFETCH(QList<QVideoSurfaceFormat::PixelFormat>, formatsList);
TestVideoSink surface(false);
QMediaPlayer player;
diff --git a/tests/auto/unit/multimedia/qvideoframe/tst_qvideoframe.cpp b/tests/auto/unit/multimedia/qvideoframe/tst_qvideoframe.cpp
index e83bdbf7c..dbccbd32c 100644
--- a/tests/auto/unit/multimedia/qvideoframe/tst_qvideoframe.cpp
+++ b/tests/auto/unit/multimedia/qvideoframe/tst_qvideoframe.cpp
@@ -168,18 +168,18 @@ void tst_QVideoFrame::cleanup()
void tst_QVideoFrame::create_data()
{
QTest::addColumn<QSize>("size");
- QTest::addColumn<QVideoFrame::PixelFormat>("pixelFormat");
+ QTest::addColumn<QVideoSurfaceFormat::PixelFormat>("pixelFormat");
QTest::addColumn<int>("bytes");
QTest::addColumn<int>("bytesPerLine");
QTest::newRow("64x64 ARGB32")
<< QSize(64, 64)
- << QVideoFrame::Format_ARGB32
+ << QVideoSurfaceFormat::Format_ARGB32
<< 16384
<< 256;
QTest::newRow("32x256 YUV420P")
<< QSize(32, 256)
- << QVideoFrame::Format_YUV420P
+ << QVideoSurfaceFormat::Format_YUV420P
<< 13288
<< 32;
}
@@ -187,7 +187,7 @@ void tst_QVideoFrame::create_data()
void tst_QVideoFrame::create()
{
QFETCH(QSize, size);
- QFETCH(QVideoFrame::PixelFormat, pixelFormat);
+ QFETCH(QVideoSurfaceFormat::PixelFormat, pixelFormat);
QFETCH(int, bytes);
QFETCH(int, bytesPerLine);
@@ -207,18 +207,18 @@ void tst_QVideoFrame::create()
void tst_QVideoFrame::createInvalid_data()
{
QTest::addColumn<QSize>("size");
- QTest::addColumn<QVideoFrame::PixelFormat>("pixelFormat");
+ QTest::addColumn<QVideoSurfaceFormat::PixelFormat>("pixelFormat");
QTest::addColumn<int>("bytes");
QTest::addColumn<int>("bytesPerLine");
QTest::newRow("64x64 ARGB32 0 size")
<< QSize(64, 64)
- << QVideoFrame::Format_ARGB32
+ << QVideoSurfaceFormat::Format_ARGB32
<< 0
<< 45;
QTest::newRow("32x256 YUV420P negative size")
<< QSize(32, 256)
- << QVideoFrame::Format_YUV420P
+ << QVideoSurfaceFormat::Format_YUV420P
<< -13288
<< 32;
}
@@ -226,7 +226,7 @@ void tst_QVideoFrame::createInvalid_data()
void tst_QVideoFrame::createInvalid()
{
QFETCH(QSize, size);
- QFETCH(QVideoFrame::PixelFormat, pixelFormat);
+ QFETCH(QVideoSurfaceFormat::PixelFormat, pixelFormat);
QFETCH(int, bytes);
QFETCH(int, bytesPerLine);
@@ -247,23 +247,23 @@ void tst_QVideoFrame::createFromBuffer_data()
{
QTest::addColumn<QVideoFrame::HandleType>("handleType");
QTest::addColumn<QSize>("size");
- QTest::addColumn<QVideoFrame::PixelFormat>("pixelFormat");
+ QTest::addColumn<QVideoSurfaceFormat::PixelFormat>("pixelFormat");
QTest::newRow("64x64 ARGB32 no handle")
<< QVideoFrame::NoHandle
<< QSize(64, 64)
- << QVideoFrame::Format_ARGB32;
+ << QVideoSurfaceFormat::Format_ARGB32;
QTest::newRow("64x64 ARGB32 gl handle")
<< QVideoFrame::GLTextureHandle
<< QSize(64, 64)
- << QVideoFrame::Format_ARGB32;
+ << QVideoSurfaceFormat::Format_ARGB32;
}
void tst_QVideoFrame::createFromBuffer()
{
QFETCH(QVideoFrame::HandleType, handleType);
QFETCH(QSize, size);
- QFETCH(QVideoFrame::PixelFormat, pixelFormat);
+ QFETCH(QVideoSurfaceFormat::PixelFormat, pixelFormat);
QVideoFrame frame(new QtTestDummyVideoBuffer(handleType), QVideoSurfaceFormat(size, pixelFormat));
@@ -281,27 +281,27 @@ void tst_QVideoFrame::createFromImage_data()
{
QTest::addColumn<QSize>("size");
QTest::addColumn<QImage::Format>("imageFormat");
- QTest::addColumn<QVideoFrame::PixelFormat>("pixelFormat");
+ QTest::addColumn<QVideoSurfaceFormat::PixelFormat>("pixelFormat");
QTest::newRow("64x64 RGB32")
<< QSize(64, 64)
<< QImage::Format_RGB32
- << QVideoFrame::Format_RGB32;
+ << QVideoSurfaceFormat::Format_RGB32;
QTest::newRow("12x45 RGB16")
<< QSize(12, 45)
<< QImage::Format_RGB16
- << QVideoFrame::Format_RGB565;
+ << QVideoSurfaceFormat::Format_RGB565;
QTest::newRow("19x46 ARGB32_Premultiplied")
<< QSize(19, 46)
<< QImage::Format_ARGB32_Premultiplied
- << QVideoFrame::Format_ARGB32_Premultiplied;
+ << QVideoSurfaceFormat::Format_ARGB32_Premultiplied;
}
void tst_QVideoFrame::createFromImage()
{
QFETCH(QSize, size);
QFETCH(QImage::Format, imageFormat);
- QFETCH(QVideoFrame::PixelFormat, pixelFormat);
+ QFETCH(QVideoSurfaceFormat::PixelFormat, pixelFormat);
const QImage image(size.width(), size.height(), imageFormat);
@@ -325,7 +325,7 @@ void tst_QVideoFrame::createFromIncompatibleImage()
QVERIFY(!frame.isValid());
QCOMPARE(frame.handleType(), QVideoFrame::NoHandle);
- QCOMPARE(frame.pixelFormat(), QVideoFrame::Format_Invalid);
+ QCOMPARE(frame.pixelFormat(), QVideoSurfaceFormat::Format_Invalid);
QCOMPARE(frame.size(), QSize(64, 64));
QCOMPARE(frame.width(), 64);
QCOMPARE(frame.height(), 64);
@@ -341,7 +341,7 @@ void tst_QVideoFrame::createNull()
QVERIFY(!frame.isValid());
QCOMPARE(frame.handleType(), QVideoFrame::NoHandle);
- QCOMPARE(frame.pixelFormat(), QVideoFrame::Format_Invalid);
+ QCOMPARE(frame.pixelFormat(), QVideoSurfaceFormat::Format_Invalid);
QCOMPARE(frame.size(), QSize());
QCOMPARE(frame.width(), -1);
QCOMPARE(frame.height(), -1);
@@ -359,10 +359,10 @@ void tst_QVideoFrame::createNull()
// Null buffer (shouldn't crash)
{
- QVideoFrame frame(nullptr, QVideoSurfaceFormat(QSize(1024,768), QVideoFrame::Format_ARGB32));
+ QVideoFrame frame(nullptr, QVideoSurfaceFormat(QSize(1024,768), QVideoSurfaceFormat::Format_ARGB32));
QVERIFY(!frame.isValid());
QCOMPARE(frame.handleType(), QVideoFrame::NoHandle);
- QCOMPARE(frame.pixelFormat(), QVideoFrame::Format_ARGB32);
+ QCOMPARE(frame.pixelFormat(), QVideoSurfaceFormat::Format_ARGB32);
QCOMPARE(frame.size(), QSize(1024, 768));
QCOMPARE(frame.width(), 1024);
QCOMPARE(frame.height(), 768);
@@ -384,7 +384,7 @@ void tst_QVideoFrame::destructor()
QPointer<QtTestDummyVideoBuffer> buffer = new QtTestDummyVideoBuffer;
{
- QVideoFrame frame(buffer, QVideoSurfaceFormat(QSize(4, 1), QVideoFrame::Format_ARGB32));
+ QVideoFrame frame(buffer, QVideoSurfaceFormat(QSize(4, 1), QVideoSurfaceFormat::Format_ARGB32));
}
QVERIFY(buffer.isNull());
@@ -394,38 +394,38 @@ void tst_QVideoFrame::copy_data()
{
QTest::addColumn<QVideoFrame::HandleType>("handleType");
QTest::addColumn<QSize>("size");
- QTest::addColumn<QVideoFrame::PixelFormat>("pixelFormat");
+ QTest::addColumn<QVideoSurfaceFormat::PixelFormat>("pixelFormat");
QTest::addColumn<qint64>("startTime");
QTest::addColumn<qint64>("endTime");
QTest::newRow("64x64 ARGB32")
<< QVideoFrame::GLTextureHandle
<< QSize(64, 64)
- << QVideoFrame::Format_ARGB32
+ << QVideoSurfaceFormat::Format_ARGB32
<< qint64(63641740)
<< qint64(63641954);
QTest::newRow("64x64 ARGB32")
<< QVideoFrame::GLTextureHandle
<< QSize(64, 64)
- << QVideoFrame::Format_ARGB32
+ << QVideoSurfaceFormat::Format_ARGB32
<< qint64(63641740)
<< qint64(63641954);
QTest::newRow("32x256 YUV420P")
<< QVideoFrame::NoHandle
<< QSize(32, 256)
- << QVideoFrame::Format_YUV420P
+ << QVideoSurfaceFormat::Format_YUV420P
<< qint64(12345)
<< qint64(12389);
QTest::newRow("1052x756 ARGB32")
<< QVideoFrame::NoHandle
<< QSize(1052, 756)
- << QVideoFrame::Format_ARGB32
+ << QVideoSurfaceFormat::Format_ARGB32
<< qint64(12345)
<< qint64(12389);
QTest::newRow("32x256 YUV420P")
<< QVideoFrame::NoHandle
<< QSize(32, 256)
- << QVideoFrame::Format_YUV420P
+ << QVideoSurfaceFormat::Format_YUV420P
<< qint64(12345)
<< qint64(12389);
}
@@ -434,7 +434,7 @@ void tst_QVideoFrame::copy()
{
QFETCH(QVideoFrame::HandleType, handleType);
QFETCH(QSize, size);
- QFETCH(QVideoFrame::PixelFormat, pixelFormat);
+ QFETCH(QVideoSurfaceFormat::PixelFormat, pixelFormat);
QFETCH(qint64, startTime);
QFETCH(qint64, endTime);
@@ -501,20 +501,20 @@ void tst_QVideoFrame::assign_data()
{
QTest::addColumn<QVideoFrame::HandleType>("handleType");
QTest::addColumn<QSize>("size");
- QTest::addColumn<QVideoFrame::PixelFormat>("pixelFormat");
+ QTest::addColumn<QVideoSurfaceFormat::PixelFormat>("pixelFormat");
QTest::addColumn<qint64>("startTime");
QTest::addColumn<qint64>("endTime");
QTest::newRow("64x64 ARGB32")
<< QVideoFrame::GLTextureHandle
<< QSize(64, 64)
- << QVideoFrame::Format_ARGB32
+ << QVideoSurfaceFormat::Format_ARGB32
<< qint64(63641740)
<< qint64(63641954);
QTest::newRow("32x256 YUV420P")
<< QVideoFrame::NoHandle
<< QSize(32, 256)
- << QVideoFrame::Format_YUV420P
+ << QVideoSurfaceFormat::Format_YUV420P
<< qint64(12345)
<< qint64(12389);
}
@@ -523,7 +523,7 @@ void tst_QVideoFrame::assign()
{
QFETCH(QVideoFrame::HandleType, handleType);
QFETCH(QSize, size);
- QFETCH(QVideoFrame::PixelFormat, pixelFormat);
+ QFETCH(QVideoSurfaceFormat::PixelFormat, pixelFormat);
QFETCH(qint64, startTime);
QFETCH(qint64, endTime);
@@ -579,7 +579,7 @@ void tst_QVideoFrame::assign()
QVERIFY(!frame.isValid());
QCOMPARE(frame.handleType(), QVideoFrame::NoHandle);
- QCOMPARE(frame.pixelFormat(), QVideoFrame::Format_Invalid);
+ QCOMPARE(frame.pixelFormat(), QVideoSurfaceFormat::Format_Invalid);
QCOMPARE(frame.size(), QSize());
QCOMPARE(frame.width(), -1);
QCOMPARE(frame.height(), -1);
@@ -592,28 +592,28 @@ void tst_QVideoFrame::map_data()
QTest::addColumn<QSize>("size");
QTest::addColumn<int>("mappedBytes");
QTest::addColumn<int>("bytesPerLine");
- QTest::addColumn<QVideoFrame::PixelFormat>("pixelFormat");
+ QTest::addColumn<QVideoSurfaceFormat::PixelFormat>("pixelFormat");
QTest::addColumn<QVideoFrame::MapMode>("mode");
QTest::newRow("read-only")
<< QSize(64, 64)
<< 16384
<< 256
- << QVideoFrame::Format_ARGB32
+ << QVideoSurfaceFormat::Format_ARGB32
<< QVideoFrame::ReadOnly;
QTest::newRow("write-only")
<< QSize(64, 64)
<< 16384
<< 256
- << QVideoFrame::Format_ARGB32
+ << QVideoSurfaceFormat::Format_ARGB32
<< QVideoFrame::WriteOnly;
QTest::newRow("read-write")
<< QSize(64, 64)
<< 16384
<< 256
- << QVideoFrame::Format_ARGB32
+ << QVideoSurfaceFormat::Format_ARGB32
<< QVideoFrame::ReadWrite;
}
@@ -622,7 +622,7 @@ void tst_QVideoFrame::map()
QFETCH(QSize, size);
QFETCH(int, mappedBytes);
QFETCH(int, bytesPerLine);
- QFETCH(QVideoFrame::PixelFormat, pixelFormat);
+ QFETCH(QVideoSurfaceFormat::PixelFormat, pixelFormat);
QFETCH(QVideoFrame::MapMode, mode);
QVideoFrame frame(mappedBytes, bytesPerLine, QVideoSurfaceFormat(size, pixelFormat));
@@ -740,43 +740,43 @@ void tst_QVideoFrame::mapPlanes_data()
planarBuffer->m_numBytes = sizeof(bufferData);
QTest::newRow("Planar")
- << QVideoFrame(planarBuffer, QVideoSurfaceFormat(QSize(64, 64), QVideoFrame::Format_YUV420P))
+ << QVideoFrame(planarBuffer, QVideoSurfaceFormat(QSize(64, 64), QVideoSurfaceFormat::Format_YUV420P))
<< (QList<int>() << 64 << 36 << 36)
<< (QList<int>() << 512 << 765);
QTest::newRow("Format_YUV420P")
- << QVideoFrame(8096, 64, QVideoSurfaceFormat(QSize(60, 64), QVideoFrame::Format_YUV420P))
+ << QVideoFrame(8096, 64, QVideoSurfaceFormat(QSize(60, 64), QVideoSurfaceFormat::Format_YUV420P))
<< (QList<int>() << 64 << 62 << 62)
<< (QList<int>() << 4096 << 6080);
QTest::newRow("Format_YV12")
- << QVideoFrame(8096, 64, QVideoSurfaceFormat(QSize(60, 64), QVideoFrame::Format_YV12))
+ << QVideoFrame(8096, 64, QVideoSurfaceFormat(QSize(60, 64), QVideoSurfaceFormat::Format_YV12))
<< (QList<int>() << 64 << 62 << 62)
<< (QList<int>() << 4096 << 6080);
QTest::newRow("Format_NV12")
- << QVideoFrame(8096, 64, QVideoSurfaceFormat(QSize(60, 64), QVideoFrame::Format_NV12))
+ << QVideoFrame(8096, 64, QVideoSurfaceFormat(QSize(60, 64), QVideoSurfaceFormat::Format_NV12))
<< (QList<int>() << 64 << 64)
<< (QList<int>() << 4096);
QTest::newRow("Format_NV21")
- << QVideoFrame(8096, 64, QVideoSurfaceFormat(QSize(60, 64), QVideoFrame::Format_NV21))
+ << QVideoFrame(8096, 64, QVideoSurfaceFormat(QSize(60, 64), QVideoSurfaceFormat::Format_NV21))
<< (QList<int>() << 64 << 64)
<< (QList<int>() << 4096);
QTest::newRow("Format_IMC2")
- << QVideoFrame(8096, 64, QVideoSurfaceFormat(QSize(60, 64), QVideoFrame::Format_IMC2))
+ << QVideoFrame(8096, 64, QVideoSurfaceFormat(QSize(60, 64), QVideoSurfaceFormat::Format_IMC2))
<< (QList<int>() << 64 << 64)
<< (QList<int>() << 4096);
QTest::newRow("Format_IMC4")
- << QVideoFrame(8096, 64, QVideoSurfaceFormat(QSize(60, 64), QVideoFrame::Format_IMC4))
+ << QVideoFrame(8096, 64, QVideoSurfaceFormat(QSize(60, 64), QVideoSurfaceFormat::Format_IMC4))
<< (QList<int>() << 64 << 64)
<< (QList<int>() << 4096);
QTest::newRow("Format_IMC1")
- << QVideoFrame(8096, 64, QVideoSurfaceFormat(QSize(60, 64), QVideoFrame::Format_IMC1))
+ << QVideoFrame(8096, 64, QVideoSurfaceFormat(QSize(60, 64), QVideoSurfaceFormat::Format_IMC1))
<< (QList<int>() << 64 << 64 << 64)
<< (QList<int>() << 4096 << 6144);
QTest::newRow("Format_IMC3")
- << QVideoFrame(8096, 64, QVideoSurfaceFormat(QSize(60, 64), QVideoFrame::Format_IMC3))
+ << QVideoFrame(8096, 64, QVideoSurfaceFormat(QSize(60, 64), QVideoSurfaceFormat::Format_IMC3))
<< (QList<int>() << 64 << 64 << 64)
<< (QList<int>() << 4096 << 6144);
QTest::newRow("Format_ARGB32")
- << QVideoFrame(8096, 256, QVideoSurfaceFormat(QSize(60, 64), QVideoFrame::Format_ARGB32))
+ << QVideoFrame(8096, 256, QVideoSurfaceFormat(QSize(60, 64), QVideoSurfaceFormat::Format_ARGB32))
<< (QList<int>() << 256)
<< (QList<int>());
}
@@ -840,133 +840,133 @@ void tst_QVideoFrame::imageDetach()
void tst_QVideoFrame::formatConversion_data()
{
QTest::addColumn<QImage::Format>("imageFormat");
- QTest::addColumn<QVideoFrame::PixelFormat>("pixelFormat");
+ QTest::addColumn<QVideoSurfaceFormat::PixelFormat>("pixelFormat");
- QTest::newRow("QImage::Format_RGB32 | QVideoFrame::Format_RGB32")
+ QTest::newRow("QImage::Format_RGB32 | QVideoSurfaceFormat::Format_RGB32")
<< QImage::Format_RGB32
- << QVideoFrame::Format_RGB32;
- QTest::newRow("QImage::Format_ARGB32 | QVideoFrame::Format_ARGB32")
+ << QVideoSurfaceFormat::Format_RGB32;
+ QTest::newRow("QImage::Format_ARGB32 | QVideoSurfaceFormat::Format_ARGB32")
<< QImage::Format_ARGB32
- << QVideoFrame::Format_ARGB32;
- QTest::newRow("QImage::Format_ARGB32_Premultiplied | QVideoFrame::Format_ARGB32_Premultiplied")
+ << QVideoSurfaceFormat::Format_ARGB32;
+ QTest::newRow("QImage::Format_ARGB32_Premultiplied | QVideoSurfaceFormat::Format_ARGB32_Premultiplied")
<< QImage::Format_ARGB32_Premultiplied
- << QVideoFrame::Format_ARGB32_Premultiplied;
- QTest::newRow("QImage::Format_RGB16 | QVideoFrame::Format_RGB565")
+ << QVideoSurfaceFormat::Format_ARGB32_Premultiplied;
+ QTest::newRow("QImage::Format_RGB16 | QVideoSurfaceFormat::Format_RGB565")
<< QImage::Format_RGB16
- << QVideoFrame::Format_RGB565;
- QTest::newRow("QImage::Format_ARGB8565_Premultiplied | QVideoFrame::Format_ARGB8565_Premultiplied")
+ << QVideoSurfaceFormat::Format_RGB565;
+ QTest::newRow("QImage::Format_ARGB8565_Premultiplied | QVideoSurfaceFormat::Format_ARGB8565_Premultiplied")
<< QImage::Format_ARGB8565_Premultiplied
- << QVideoFrame::Format_ARGB8565_Premultiplied;
- QTest::newRow("QImage::Format_RGB555 | QVideoFrame::Format_RGB555")
+ << QVideoSurfaceFormat::Format_ARGB8565_Premultiplied;
+ QTest::newRow("QImage::Format_RGB555 | QVideoSurfaceFormat::Format_RGB555")
<< QImage::Format_RGB555
- << QVideoFrame::Format_RGB555;
- QTest::newRow("QImage::Format_RGB888 | QVideoFrame::Format_RGB24")
+ << QVideoSurfaceFormat::Format_RGB555;
+ QTest::newRow("QImage::Format_RGB888 | QVideoSurfaceFormat::Format_RGB24")
<< QImage::Format_RGB888
- << QVideoFrame::Format_RGB24;
+ << QVideoSurfaceFormat::Format_RGB24;
QTest::newRow("QImage::Format_MonoLSB")
<< QImage::Format_MonoLSB
- << QVideoFrame::Format_Invalid;
+ << QVideoSurfaceFormat::Format_Invalid;
QTest::newRow("QImage::Format_Indexed8")
<< QImage::Format_Indexed8
- << QVideoFrame::Format_Invalid;
+ << QVideoSurfaceFormat::Format_Invalid;
QTest::newRow("QImage::Format_ARGB6666_Premultiplied")
<< QImage::Format_ARGB6666_Premultiplied
- << QVideoFrame::Format_Invalid;
+ << QVideoSurfaceFormat::Format_Invalid;
QTest::newRow("QImage::Format_ARGB8555_Premultiplied")
<< QImage::Format_ARGB8555_Premultiplied
- << QVideoFrame::Format_Invalid;
+ << QVideoSurfaceFormat::Format_Invalid;
QTest::newRow("QImage::Format_RGB666")
<< QImage::Format_RGB666
- << QVideoFrame::Format_Invalid;
+ << QVideoSurfaceFormat::Format_Invalid;
QTest::newRow("QImage::Format_RGB444")
<< QImage::Format_RGB444
- << QVideoFrame::Format_Invalid;
+ << QVideoSurfaceFormat::Format_Invalid;
QTest::newRow("QImage::Format_ARGB4444_Premultiplied")
<< QImage::Format_ARGB4444_Premultiplied
- << QVideoFrame::Format_Invalid;
+ << QVideoSurfaceFormat::Format_Invalid;
- QTest::newRow("QVideoFrame::Format_BGRA32")
+ QTest::newRow("QVideoSurfaceFormat::Format_BGRA32")
<< QImage::Format_Invalid
- << QVideoFrame::Format_BGRA32;
- QTest::newRow("QVideoFrame::Format_BGRA32_Premultiplied")
+ << QVideoSurfaceFormat::Format_BGRA32;
+ QTest::newRow("QVideoSurfaceFormat::Format_BGRA32_Premultiplied")
<< QImage::Format_Invalid
- << QVideoFrame::Format_BGRA32_Premultiplied;
- QTest::newRow("QVideoFrame::Format_BGR32")
+ << QVideoSurfaceFormat::Format_BGRA32_Premultiplied;
+ QTest::newRow("QVideoSurfaceFormat::Format_BGR32")
<< QImage::Format_Invalid
- << QVideoFrame::Format_BGR32;
- QTest::newRow("QVideoFrame::Format_BGR24")
+ << QVideoSurfaceFormat::Format_BGR32;
+ QTest::newRow("QVideoSurfaceFormat::Format_BGR24")
<< QImage::Format_Invalid
- << QVideoFrame::Format_BGR24;
- QTest::newRow("QVideoFrame::Format_BGR565")
+ << QVideoSurfaceFormat::Format_BGR24;
+ QTest::newRow("QVideoSurfaceFormat::Format_BGR565")
<< QImage::Format_Invalid
- << QVideoFrame::Format_BGR565;
- QTest::newRow("QVideoFrame::Format_BGR555")
+ << QVideoSurfaceFormat::Format_BGR565;
+ QTest::newRow("QVideoSurfaceFormat::Format_BGR555")
<< QImage::Format_Invalid
- << QVideoFrame::Format_BGR555;
- QTest::newRow("QVideoFrame::Format_BGRA5658_Premultiplied")
+ << QVideoSurfaceFormat::Format_BGR555;
+ QTest::newRow("QVideoSurfaceFormat::Format_BGRA5658_Premultiplied")
<< QImage::Format_Invalid
- << QVideoFrame::Format_BGRA5658_Premultiplied;
- QTest::newRow("QVideoFrame::Format_AYUV444")
+ << QVideoSurfaceFormat::Format_BGRA5658_Premultiplied;
+ QTest::newRow("QVideoSurfaceFormat::Format_AYUV444")
<< QImage::Format_Invalid
- << QVideoFrame::Format_AYUV444;
- QTest::newRow("QVideoFrame::Format_AYUV444_Premultiplied")
+ << QVideoSurfaceFormat::Format_AYUV444;
+ QTest::newRow("QVideoSurfaceFormat::Format_AYUV444_Premultiplied")
<< QImage::Format_Invalid
- << QVideoFrame::Format_AYUV444_Premultiplied;
- QTest::newRow("QVideoFrame::Format_YUV444")
+ << QVideoSurfaceFormat::Format_AYUV444_Premultiplied;
+ QTest::newRow("QVideoSurfaceFormat::Format_YUV444")
<< QImage::Format_Invalid
- << QVideoFrame::Format_YUV444;
- QTest::newRow("QVideoFrame::Format_YUV420P")
+ << QVideoSurfaceFormat::Format_YUV444;
+ QTest::newRow("QVideoSurfaceFormat::Format_YUV420P")
<< QImage::Format_Invalid
- << QVideoFrame::Format_YUV420P;
- QTest::newRow("QVideoFrame::Format_YV12")
+ << QVideoSurfaceFormat::Format_YUV420P;
+ QTest::newRow("QVideoSurfaceFormat::Format_YV12")
<< QImage::Format_Invalid
- << QVideoFrame::Format_YV12;
- QTest::newRow("QVideoFrame::Format_UYVY")
+ << QVideoSurfaceFormat::Format_YV12;
+ QTest::newRow("QVideoSurfaceFormat::Format_UYVY")
<< QImage::Format_Invalid
- << QVideoFrame::Format_UYVY;
- QTest::newRow("QVideoFrame::Format_YUYV")
+ << QVideoSurfaceFormat::Format_UYVY;
+ QTest::newRow("QVideoSurfaceFormat::Format_YUYV")
<< QImage::Format_Invalid
- << QVideoFrame::Format_YUYV;
- QTest::newRow("QVideoFrame::Format_NV12")
+ << QVideoSurfaceFormat::Format_YUYV;
+ QTest::newRow("QVideoSurfaceFormat::Format_NV12")
<< QImage::Format_Invalid
- << QVideoFrame::Format_NV12;
- QTest::newRow("QVideoFrame::Format_NV21")
+ << QVideoSurfaceFormat::Format_NV12;
+ QTest::newRow("QVideoSurfaceFormat::Format_NV21")
<< QImage::Format_Invalid
- << QVideoFrame::Format_NV21;
- QTest::newRow("QVideoFrame::Format_IMC1")
+ << QVideoSurfaceFormat::Format_NV21;
+ QTest::newRow("QVideoSurfaceFormat::Format_IMC1")
<< QImage::Format_Invalid
- << QVideoFrame::Format_IMC1;
- QTest::newRow("QVideoFrame::Format_IMC2")
+ << QVideoSurfaceFormat::Format_IMC1;
+ QTest::newRow("QVideoSurfaceFormat::Format_IMC2")
<< QImage::Format_Invalid
- << QVideoFrame::Format_IMC2;
- QTest::newRow("QVideoFrame::Format_IMC3")
+ << QVideoSurfaceFormat::Format_IMC2;
+ QTest::newRow("QVideoSurfaceFormat::Format_IMC3")
<< QImage::Format_Invalid
- << QVideoFrame::Format_IMC3;
- QTest::newRow("QVideoFrame::Format_IMC4")
+ << QVideoSurfaceFormat::Format_IMC3;
+ QTest::newRow("QVideoSurfaceFormat::Format_IMC4")
<< QImage::Format_Invalid
- << QVideoFrame::Format_IMC4;
- QTest::newRow("QVideoFrame::Format_Y8")
+ << QVideoSurfaceFormat::Format_IMC4;
+ QTest::newRow("QVideoSurfaceFormat::Format_Y8")
<< QImage::Format_Invalid
- << QVideoFrame::Format_Y8;
- QTest::newRow("QVideoFrame::Format_Y16")
+ << QVideoSurfaceFormat::Format_Y8;
+ QTest::newRow("QVideoSurfaceFormat::Format_Y16")
<< QImage::Format_Invalid
- << QVideoFrame::Format_Y16;
- QTest::newRow("QVideoFrame::Format_Jpeg")
+ << QVideoSurfaceFormat::Format_Y16;
+ QTest::newRow("QVideoSurfaceFormat::Format_Jpeg")
<< QImage::Format_Invalid
- << QVideoFrame::Format_Jpeg;
+ << QVideoSurfaceFormat::Format_Jpeg;
}
void tst_QVideoFrame::formatConversion()
{
QFETCH(QImage::Format, imageFormat);
- QFETCH(QVideoFrame::PixelFormat, pixelFormat);
+ QFETCH(QVideoSurfaceFormat::PixelFormat, pixelFormat);
- QCOMPARE(QVideoFrame::pixelFormatFromImageFormat(imageFormat) == pixelFormat,
+ QCOMPARE(QVideoSurfaceFormat::pixelFormatFromImageFormat(imageFormat) == pixelFormat,
imageFormat != QImage::Format_Invalid);
- QCOMPARE(QVideoFrame::imageFormatFromPixelFormat(pixelFormat) == imageFormat,
- pixelFormat != QVideoFrame::Format_Invalid);
+ QCOMPARE(QVideoSurfaceFormat::imageFormatFromPixelFormat(pixelFormat) == imageFormat,
+ pixelFormat != QVideoSurfaceFormat::Format_Invalid);
}
#define TEST_MAPPED(frame, mode) \
@@ -989,7 +989,7 @@ do { \
void tst_QVideoFrame::isMapped()
{
- QVideoFrame frame(16384, 256, QVideoSurfaceFormat(QSize(64, 64), QVideoFrame::Format_ARGB32));
+ QVideoFrame frame(16384, 256, QVideoSurfaceFormat(QSize(64, 64), QVideoSurfaceFormat::Format_ARGB32));
const QVideoFrame& constFrame(frame);
TEST_UNMAPPED(frame);
@@ -1019,7 +1019,7 @@ void tst_QVideoFrame::isMapped()
void tst_QVideoFrame::isReadable()
{
- QVideoFrame frame(16384, 256, QVideoSurfaceFormat(QSize(64, 64), QVideoFrame::Format_ARGB32));
+ QVideoFrame frame(16384, 256, QVideoSurfaceFormat(QSize(64, 64), QVideoSurfaceFormat::Format_ARGB32));
QVERIFY(!frame.isMapped());
QVERIFY(!frame.isReadable());
@@ -1042,7 +1042,7 @@ void tst_QVideoFrame::isReadable()
void tst_QVideoFrame::isWritable()
{
- QVideoFrame frame(16384, 256, QVideoSurfaceFormat(QSize(64, 64), QVideoFrame::Format_ARGB32));
+ QVideoFrame frame(16384, 256, QVideoSurfaceFormat(QSize(64, 64), QVideoSurfaceFormat::Format_ARGB32));
QVERIFY(!frame.isMapped());
QVERIFY(!frame.isWritable());
@@ -1066,146 +1066,146 @@ void tst_QVideoFrame::isWritable()
void tst_QVideoFrame::image_data()
{
QTest::addColumn<QSize>("size");
- QTest::addColumn<QVideoFrame::PixelFormat>("pixelFormat");
+ QTest::addColumn<QVideoSurfaceFormat::PixelFormat>("pixelFormat");
QTest::addColumn<int>("bytes");
QTest::addColumn<int>("bytesPerLine");
QTest::addColumn<QImage::Format>("imageFormat");
QTest::newRow("64x64 ARGB32")
<< QSize(64, 64)
- << QVideoFrame::Format_ARGB32
+ << QVideoSurfaceFormat::Format_ARGB32
<< 16384
<< 256
<< QImage::Format_ARGB32;
QTest::newRow("64x64 ARGB32_Premultiplied")
<< QSize(64, 64)
- << QVideoFrame::Format_ARGB32_Premultiplied
+ << QVideoSurfaceFormat::Format_ARGB32_Premultiplied
<< 16384
<< 256
<< QImage::Format_ARGB32_Premultiplied;
QTest::newRow("64x64 RGB32")
<< QSize(64, 64)
- << QVideoFrame::Format_RGB32
+ << QVideoSurfaceFormat::Format_RGB32
<< 16384
<< 256
<< QImage::Format_RGB32;
QTest::newRow("64x64 RGB24")
<< QSize(64, 64)
- << QVideoFrame::Format_RGB24
+ << QVideoSurfaceFormat::Format_RGB24
<< 16384
<< 192
<< QImage::Format_RGB888;
QTest::newRow("64x64 RGB565")
<< QSize(64, 64)
- << QVideoFrame::Format_RGB565
+ << QVideoSurfaceFormat::Format_RGB565
<< 16384
<< 128
<< QImage::Format_RGB16;
QTest::newRow("64x64 RGB555")
<< QSize(64, 64)
- << QVideoFrame::Format_RGB555
+ << QVideoSurfaceFormat::Format_RGB555
<< 16384
<< 128
<< QImage::Format_RGB555;
QTest::newRow("64x64 BGRA32")
<< QSize(64, 64)
- << QVideoFrame::Format_BGRA32
+ << QVideoSurfaceFormat::Format_BGRA32
<< 16384
<< 256
<< QImage::Format_ARGB32;
QTest::newRow("64x64 BGRA32_Premultiplied")
<< QSize(64, 64)
- << QVideoFrame::Format_BGRA32_Premultiplied
+ << QVideoSurfaceFormat::Format_BGRA32_Premultiplied
<< 16384
<< 256
<< QImage::Format_ARGB32;
QTest::newRow("64x64 BGR32")
<< QSize(64, 64)
- << QVideoFrame::Format_BGR32
+ << QVideoSurfaceFormat::Format_BGR32
<< 16384
<< 256
<< QImage::Format_ARGB32;
QTest::newRow("64x64 BGR24")
<< QSize(64, 64)
- << QVideoFrame::Format_BGR24
+ << QVideoSurfaceFormat::Format_BGR24
<< 16384
<< 256
<< QImage::Format_ARGB32;
QTest::newRow("64x64 BGR565")
<< QSize(64, 64)
- << QVideoFrame::Format_BGR565
+ << QVideoSurfaceFormat::Format_BGR565
<< 16384
<< 256
<< QImage::Format_ARGB32;
QTest::newRow("64x64 BGR555")
<< QSize(64, 64)
- << QVideoFrame::Format_BGR555
+ << QVideoSurfaceFormat::Format_BGR555
<< 16384
<< 256
<< QImage::Format_ARGB32;
QTest::newRow("64x64 AYUV444")
<< QSize(64, 64)
- << QVideoFrame::Format_AYUV444
+ << QVideoSurfaceFormat::Format_AYUV444
<< 16384
<< 256
<< QImage::Format_ARGB32;
QTest::newRow("64x64 YUV444")
<< QSize(64, 64)
- << QVideoFrame::Format_YUV444
+ << QVideoSurfaceFormat::Format_YUV444
<< 16384
<< 256
<< QImage::Format_ARGB32;
QTest::newRow("64x64 YUV420P")
<< QSize(64, 64)
- << QVideoFrame::Format_YUV420P
+ << QVideoSurfaceFormat::Format_YUV420P
<< 13288
<< 256
<< QImage::Format_ARGB32;
QTest::newRow("64x64 YV12")
<< QSize(64, 64)
- << QVideoFrame::Format_YV12
+ << QVideoSurfaceFormat::Format_YV12
<< 16384
<< 256
<< QImage::Format_ARGB32;
QTest::newRow("64x64 UYVY")
<< QSize(64, 64)
- << QVideoFrame::Format_UYVY
+ << QVideoSurfaceFormat::Format_UYVY
<< 16384
<< 256
<< QImage::Format_ARGB32;
QTest::newRow("64x64 YUYV")
<< QSize(64, 64)
- << QVideoFrame::Format_YUYV
+ << QVideoSurfaceFormat::Format_YUYV
<< 16384
<< 256
<< QImage::Format_ARGB32;
QTest::newRow("64x64 NV12")
<< QSize(64, 64)
- << QVideoFrame::Format_NV12
+ << QVideoSurfaceFormat::Format_NV12
<< 16384
<< 256
<< QImage::Format_ARGB32;
QTest::newRow("64x64 NV21")
<< QSize(64, 64)
- << QVideoFrame::Format_NV21
+ << QVideoSurfaceFormat::Format_NV21
<< 16384
<< 256
<< QImage::Format_ARGB32;
@@ -1214,7 +1214,7 @@ void tst_QVideoFrame::image_data()
void tst_QVideoFrame::image()
{
QFETCH(QSize, size);
- QFETCH(QVideoFrame::PixelFormat, pixelFormat);
+ QFETCH(QVideoSurfaceFormat::PixelFormat, pixelFormat);
QFETCH(int, bytes);
QFETCH(int, bytesPerLine);
QFETCH(QImage::Format, imageFormat);
@@ -1232,7 +1232,7 @@ void tst_QVideoFrame::emptyData()
{
QByteArray data(nullptr, 0);
QVideoFrame f(new QMemoryVideoBuffer(data, 600),
- QVideoSurfaceFormat(QSize(800, 600), QVideoFrame::Format_ARGB32));
+ QVideoSurfaceFormat(QSize(800, 600), QVideoSurfaceFormat::Format_ARGB32));
QVERIFY(!f.map(QVideoFrame::ReadOnly));
}
diff --git a/tests/auto/unit/multimedia/qvideosurfaceformat/tst_qvideosurfaceformat.cpp b/tests/auto/unit/multimedia/qvideosurfaceformat/tst_qvideosurfaceformat.cpp
index 2ecfb34b1..99799d476 100644
--- a/tests/auto/unit/multimedia/qvideosurfaceformat/tst_qvideosurfaceformat.cpp
+++ b/tests/auto/unit/multimedia/qvideosurfaceformat/tst_qvideosurfaceformat.cpp
@@ -105,7 +105,7 @@ void tst_QVideoSurfaceFormat::constructNull()
QVideoSurfaceFormat format;
QVERIFY(!format.isValid());
- QCOMPARE(format.pixelFormat(), QVideoFrame::Format_Invalid);
+ QCOMPARE(format.pixelFormat(), QVideoSurfaceFormat::Format_Invalid);
QCOMPARE(format.frameSize(), QSize());
QCOMPARE(format.frameWidth(), -1);
QCOMPARE(format.frameHeight(), -1);
@@ -118,39 +118,39 @@ void tst_QVideoSurfaceFormat::constructNull()
void tst_QVideoSurfaceFormat::construct_data()
{
QTest::addColumn<QSize>("frameSize");
- QTest::addColumn<QVideoFrame::PixelFormat>("pixelFormat");
+ QTest::addColumn<QVideoSurfaceFormat::PixelFormat>("pixelFormat");
QTest::addColumn<bool>("valid");
QTest::newRow("32x32 rgb32 no handle")
<< QSize(32, 32)
- << QVideoFrame::Format_RGB32
+ << QVideoSurfaceFormat::Format_RGB32
<< true;
QTest::newRow("1024x768 YUV444 GL texture")
<< QSize(32, 32)
- << QVideoFrame::Format_YUV444
+ << QVideoSurfaceFormat::Format_YUV444
<< true;
QTest::newRow("32x32 invalid no handle")
<< QSize(32, 32)
- << QVideoFrame::Format_Invalid
+ << QVideoSurfaceFormat::Format_Invalid
<< false;
QTest::newRow("invalid size, rgb32 no handle")
<< QSize()
- << QVideoFrame::Format_RGB32
+ << QVideoSurfaceFormat::Format_RGB32
<< false;
QTest::newRow("0x0 rgb32 no handle")
<< QSize(0,0)
- << QVideoFrame::Format_RGB32
+ << QVideoSurfaceFormat::Format_RGB32
<< true;
}
void tst_QVideoSurfaceFormat::construct()
{
QFETCH(QSize, frameSize);
- QFETCH(QVideoFrame::PixelFormat, pixelFormat);
+ QFETCH(QVideoSurfaceFormat::PixelFormat, pixelFormat);
QFETCH(bool, valid);
QRect viewport(QPoint(0, 0), frameSize);
@@ -189,7 +189,7 @@ void tst_QVideoSurfaceFormat::frameSize()
QFETCH(QSize, initialSize);
QFETCH(QSize, newSize);
- QVideoSurfaceFormat format(initialSize, QVideoFrame::Format_RGB32);
+ QVideoSurfaceFormat format(initialSize, QVideoSurfaceFormat::Format_RGB32);
format.setFrameSize(newSize);
@@ -231,7 +231,7 @@ void tst_QVideoSurfaceFormat::viewport()
QRect initialViewport(QPoint(0, 0), initialSize);
- QVideoSurfaceFormat format(initialSize, QVideoFrame::Format_RGB32);
+ QVideoSurfaceFormat format(initialSize, QVideoSurfaceFormat::Format_RGB32);
format.setViewport(viewport);
@@ -256,7 +256,7 @@ void tst_QVideoSurfaceFormat::scanLineDirection()
QFETCH(QVideoSurfaceFormat::Direction, direction);
QFETCH(QString, stringized);
- QVideoSurfaceFormat format(QSize(16, 16), QVideoFrame::Format_RGB32);
+ QVideoSurfaceFormat format(QSize(16, 16), QVideoSurfaceFormat::Format_RGB32);
format.setScanLineDirection(direction);
@@ -285,7 +285,7 @@ void tst_QVideoSurfaceFormat::yCbCrColorSpaceEnum()
QFETCH(QVideoSurfaceFormat::YCbCrColorSpace, colorspace);
QFETCH(QString, stringized);
- QVideoSurfaceFormat format(QSize(64, 64), QVideoFrame::Format_RGB32);
+ QVideoSurfaceFormat format(QSize(64, 64), QVideoSurfaceFormat::Format_RGB32);
format.setYCbCrColorSpace(colorspace);
QCOMPARE(format.yCbCrColorSpace(), colorspace);
@@ -313,7 +313,7 @@ void tst_QVideoSurfaceFormat::frameRate()
{
QFETCH(qreal, frameRate);
- QVideoSurfaceFormat format(QSize(64, 64), QVideoFrame::Format_RGB32);
+ QVideoSurfaceFormat format(QSize(64, 64), QVideoSurfaceFormat::Format_RGB32);
format.setFrameRate(frameRate);
@@ -342,7 +342,7 @@ void tst_QVideoSurfaceFormat::sizeHint()
QFETCH(QRect, viewport);
QFETCH(QSize, sizeHint);
- QVideoSurfaceFormat format(frameSize, QVideoFrame::Format_RGB32);
+ QVideoSurfaceFormat format(frameSize, QVideoSurfaceFormat::Format_RGB32);
format.setViewport(viewport);
QCOMPARE(format.sizeHint(), sizeHint);
@@ -351,13 +351,13 @@ void tst_QVideoSurfaceFormat::sizeHint()
void tst_QVideoSurfaceFormat::compare()
{
QVideoSurfaceFormat format1(
- QSize(16, 16), QVideoFrame::Format_RGB32);
+ QSize(16, 16), QVideoSurfaceFormat::Format_RGB32);
QVideoSurfaceFormat format2(
- QSize(16, 16), QVideoFrame::Format_RGB32);
+ QSize(16, 16), QVideoSurfaceFormat::Format_RGB32);
QVideoSurfaceFormat format3(
- QSize(32, 32), QVideoFrame::Format_YUV444);
+ QSize(32, 32), QVideoSurfaceFormat::Format_YUV444);
QVideoSurfaceFormat format4(
- QSize(16, 16), QVideoFrame::Format_RGB32);
+ QSize(16, 16), QVideoSurfaceFormat::Format_RGB32);
QCOMPARE(format1 == format2, true);
QCOMPARE(format1 != format2, false);
@@ -432,12 +432,12 @@ void tst_QVideoSurfaceFormat::compare()
void tst_QVideoSurfaceFormat::copy()
{
QVideoSurfaceFormat original(
- QSize(1024, 768), QVideoFrame::Format_ARGB32);
+ QSize(1024, 768), QVideoSurfaceFormat::Format_ARGB32);
original.setScanLineDirection(QVideoSurfaceFormat::BottomToTop);
QVideoSurfaceFormat copy(original);
- QCOMPARE(copy.pixelFormat(), QVideoFrame::Format_ARGB32);
+ QCOMPARE(copy.pixelFormat(), QVideoSurfaceFormat::Format_ARGB32);
QCOMPARE(copy.frameSize(), QSize(1024, 768));
QCOMPARE(copy.scanLineDirection(), QVideoSurfaceFormat::BottomToTop);
@@ -457,15 +457,15 @@ void tst_QVideoSurfaceFormat::copy()
void tst_QVideoSurfaceFormat::assign()
{
QVideoSurfaceFormat copy(
- QSize(64, 64), QVideoFrame::Format_AYUV444);
+ QSize(64, 64), QVideoSurfaceFormat::Format_AYUV444);
QVideoSurfaceFormat original(
- QSize(1024, 768), QVideoFrame::Format_ARGB32);
+ QSize(1024, 768), QVideoSurfaceFormat::Format_ARGB32);
original.setScanLineDirection(QVideoSurfaceFormat::BottomToTop);
copy = original;
- QCOMPARE(copy.pixelFormat(), QVideoFrame::Format_ARGB32);
+ QCOMPARE(copy.pixelFormat(), QVideoSurfaceFormat::Format_ARGB32);
QCOMPARE(copy.frameSize(), QSize(1024, 768));
QCOMPARE(copy.scanLineDirection(), QVideoSurfaceFormat::BottomToTop);
@@ -496,7 +496,7 @@ void tst_QVideoSurfaceFormat::isValid()
QVERIFY(!format.isValid());
/* When both the pixel format and framesize is valid. */
- QVideoSurfaceFormat format1(QSize(32, 32), QVideoFrame::Format_AYUV444);
+ QVideoSurfaceFormat format1(QSize(32, 32), QVideoSurfaceFormat::Format_AYUV444);
QVERIFY(format1.isValid());
/* When pixel format is valid and frame size is not valid */
@@ -510,7 +510,7 @@ void tst_QVideoSurfaceFormat::copyAllParameters()
{
/* Create the instance and set all the parameters. */
QVideoSurfaceFormat original(
- QSize(1024, 768), QVideoFrame::Format_ARGB32);
+ QSize(1024, 768), QVideoSurfaceFormat::Format_ARGB32);
original.setScanLineDirection(QVideoSurfaceFormat::BottomToTop);
original.setViewport(QRect(0, 0, 1024, 1024));
@@ -521,7 +521,7 @@ void tst_QVideoSurfaceFormat::copyAllParameters()
have the same parameters. */
QVideoSurfaceFormat copy(original);
- QCOMPARE(copy.pixelFormat(), QVideoFrame::Format_ARGB32);
+ QCOMPARE(copy.pixelFormat(), QVideoSurfaceFormat::Format_ARGB32);
QCOMPARE(copy.frameSize(), QSize(1024, 768));
QCOMPARE(copy.scanLineDirection(), QVideoSurfaceFormat::BottomToTop);
QCOMPARE(copy.viewport(), QRect(0, 0, 1024, 1024));
@@ -538,7 +538,7 @@ void tst_QVideoSurfaceFormat::assignAllParameters()
{
/* Create the instance and set all the parameters. */
QVideoSurfaceFormat copy(
- QSize(64, 64), QVideoFrame::Format_AYUV444);
+ QSize(64, 64), QVideoSurfaceFormat::Format_AYUV444);
copy.setScanLineDirection(QVideoSurfaceFormat::TopToBottom);
copy.setViewport(QRect(0, 0, 640, 320));
copy.setFrameRate(qreal(7.5));
@@ -546,7 +546,7 @@ void tst_QVideoSurfaceFormat::assignAllParameters()
/* Create the instance and set all the parameters. */
QVideoSurfaceFormat original(
- QSize(1024, 768), QVideoFrame::Format_ARGB32);
+ QSize(1024, 768), QVideoSurfaceFormat::Format_ARGB32);
original.setScanLineDirection(QVideoSurfaceFormat::BottomToTop);
original.setViewport(QRect(0, 0, 1024, 1024));
original.setFrameRate(qreal(15.0));
@@ -556,7 +556,7 @@ void tst_QVideoSurfaceFormat::assignAllParameters()
have the same parameters. */
copy = original;
- QCOMPARE(copy.pixelFormat(), QVideoFrame::Format_ARGB32);
+ QCOMPARE(copy.pixelFormat(), QVideoSurfaceFormat::Format_ARGB32);
QCOMPARE(copy.frameSize(), QSize(1024, 768));
QCOMPARE(copy.scanLineDirection(), QVideoSurfaceFormat::BottomToTop);
QCOMPARE(copy.viewport(), QRect(0, 0, 1024, 1024));
diff --git a/tests/auto/unit/multimediawidgets/qgraphicsvideoitem/tst_qgraphicsvideoitem.cpp b/tests/auto/unit/multimediawidgets/qgraphicsvideoitem/tst_qgraphicsvideoitem.cpp
index 43c0e2870..6a4a26a32 100644
--- a/tests/auto/unit/multimediawidgets/qgraphicsvideoitem/tst_qgraphicsvideoitem.cpp
+++ b/tests/auto/unit/multimediawidgets/qgraphicsvideoitem/tst_qgraphicsvideoitem.cpp
@@ -290,7 +290,7 @@ void tst_QGraphicsVideoItem::show()
QVERIFY(item->boundingRect().isEmpty());
- QVideoSurfaceFormat format(QSize(320,240),QVideoFrame::Format_RGB32);
+ QVideoSurfaceFormat format(QSize(320,240),QVideoSurfaceFormat::Format_RGB32);
QVERIFY(object.testService->rendererControl->surface()->start(format));
QCoreApplication::processEvents();
@@ -385,7 +385,7 @@ void tst_QGraphicsVideoItem::nativeSize()
QSignalSpy spy(&item, SIGNAL(nativeSizeChanged(QSizeF)));
- QVideoSurfaceFormat format(frameSize, QVideoFrame::Format_ARGB32);
+ QVideoSurfaceFormat format(frameSize, QVideoSurfaceFormat::Format_ARGB32);
format.setViewport(viewport);
{ // Surface setup is deferred until after the first paint.
@@ -527,7 +527,7 @@ void tst_QGraphicsVideoItem::boundingRect()
item.setSize(size);
item.setAspectRatioMode(aspectRatioMode);
- QVideoSurfaceFormat format(frameSize, QVideoFrame::Format_ARGB32);
+ QVideoSurfaceFormat format(frameSize, QVideoSurfaceFormat::Format_ARGB32);
{ // Surface setup is deferred until after the first paint.
QImage image(320, 240, QImage::Format_RGB32);
@@ -565,7 +565,7 @@ void tst_QGraphicsVideoItem::paint()
if (!surface)
QSKIP("QGraphicsVideoItem is not QPainterVideoSurface based");
- QVideoSurfaceFormat format(QSize(2, 2), QVideoFrame::Format_RGB32);
+ QVideoSurfaceFormat format(QSize(2, 2), QVideoSurfaceFormat::Format_RGB32);
QVERIFY(surface->start(format));
QCOMPARE(surface->isActive(), true);
@@ -576,7 +576,7 @@ void tst_QGraphicsVideoItem::paint()
QCOMPARE(surface->isActive(), true);
QCOMPARE(surface->isReady(), true);
- QVideoFrame frame(sizeof(rgb32ImageData), QSize(2, 2), 8, QVideoFrame::Format_RGB32);
+ QVideoFrame frame(sizeof(rgb32ImageData), QSize(2, 2), 8, QVideoSurfaceFormat::Format_RGB32);
frame.map(QVideoFrame::WriteOnly);
memcpy(frame.bits(), rgb32ImageData, frame.mappedBytes());
@@ -608,7 +608,7 @@ void tst_QGraphicsVideoItem::paintSurface()
if (!surface)
QSKIP("QGraphicsVideoItem is not QPainterVideoSurface based");
- QVideoSurfaceFormat format(QSize(2, 2), QVideoFrame::Format_RGB32);
+ QVideoSurfaceFormat format(QSize(2, 2), QVideoSurfaceFormat::Format_RGB32);
QVERIFY(surface->start(format));
QCOMPARE(surface->isActive(), true);
@@ -619,7 +619,7 @@ void tst_QGraphicsVideoItem::paintSurface()
QCOMPARE(surface->isActive(), true);
QCOMPARE(surface->isReady(), true);
- QVideoFrame frame(sizeof(rgb32ImageData), QSize(2, 2), 8, QVideoFrame::Format_RGB32);
+ QVideoFrame frame(sizeof(rgb32ImageData), QSize(2, 2), 8, QVideoSurfaceFormat::Format_RGB32);
frame.map(QVideoFrame::WriteOnly);
memcpy(frame.bits(), rgb32ImageData, frame.mappedBytes());
diff --git a/tests/auto/unit/multimediawidgets/qvideowidget/tst_qvideowidget.cpp b/tests/auto/unit/multimediawidgets/qvideowidget/tst_qvideowidget.cpp
index 5e36dd322..aff23c11e 100644
--- a/tests/auto/unit/multimediawidgets/qvideowidget/tst_qvideowidget.cpp
+++ b/tests/auto/unit/multimediawidgets/qvideowidget/tst_qvideowidget.cpp
@@ -255,7 +255,7 @@ void tst_QVideoWidget::sizeHintWindowControl()
widget.show();
QVERIFY(QTest::qWaitForWindowExposed(&widget));
- QVideoSurfaceFormat format(frameSize, QVideoFrame::Format_ARGB32);
+ QVideoSurfaceFormat format(frameSize, QVideoSurfaceFormat::Format_ARGB32);
format.setViewport(viewport);
QVERIFY(object.testService->rendererControl->surface()->start(format));
@@ -651,7 +651,7 @@ void tst_QVideoWidget::paintRendererControl()
QPainterVideoSurface *surface = qobject_cast<QPainterVideoSurface *>(
object.testService->rendererControl->surface());
- QVideoSurfaceFormat format(QSize(2, 2), QVideoFrame::Format_RGB32);
+ QVideoSurfaceFormat format(QSize(2, 2), QVideoSurfaceFormat::Format_RGB32);
QVERIFY(surface->start(format));
QCOMPARE(surface->isActive(), true);
@@ -662,7 +662,7 @@ void tst_QVideoWidget::paintRendererControl()
QCOMPARE(surface->isActive(), true);
QCOMPARE(surface->isReady(), true);
- QVideoFrame frame(sizeof(rgb32ImageData), QSize(2, 2), 8, QVideoFrame::Format_RGB32);
+ QVideoFrame frame(sizeof(rgb32ImageData), QSize(2, 2), 8, QVideoSurfaceFormat::Format_RGB32);
frame.map(QVideoFrame::WriteOnly);
memcpy(frame.bits(), rgb32ImageData, frame.mappedBytes());
@@ -689,11 +689,11 @@ void tst_QVideoWidget::paintSurface()
widget.videoSurface());
QVERIFY(surface);
- QVideoSurfaceFormat format(QSize(2, 2), QVideoFrame::Format_RGB32);
+ QVideoSurfaceFormat format(QSize(2, 2), QVideoSurfaceFormat::Format_RGB32);
QVERIFY(surface->start(format));
QCOMPARE(surface->isActive(), true);
- QVideoFrame frame(sizeof(rgb32ImageData), QSize(2, 2), 8, QVideoFrame::Format_RGB32);
+ QVideoFrame frame(sizeof(rgb32ImageData), QSize(2, 2), 8, QVideoSurfaceFormat::Format_RGB32);
frame.map(QVideoFrame::WriteOnly);
memcpy(frame.bits(), rgb32ImageData, frame.mappedBytes());
frame.unmap();
diff --git a/tests/auto/unit/qml/qdeclarativevideo/tst_qdeclarativevideo.cpp b/tests/auto/unit/qml/qdeclarativevideo/tst_qdeclarativevideo.cpp
index 0c586eb21..7086bb0b1 100644
--- a/tests/auto/unit/qml/qdeclarativevideo/tst_qdeclarativevideo.cpp
+++ b/tests/auto/unit/qml/qdeclarativevideo/tst_qdeclarativevideo.cpp
@@ -956,7 +956,7 @@ void tst_QDeclarativeVideo::geometry()
//video item can use overlay, QVideoSink is not used than.
if (surface) {
- QVideoSurfaceFormat format(QSize(640, 480), QVideoFrame::Format_RGB32);
+ QVideoSurfaceFormat format(QSize(640, 480), QVideoSurfaceFormat::Format_RGB32);
QVERIFY(surface->start(format));
QCoreApplication::processEvents();