summaryrefslogtreecommitdiffstats
path: root/src/multimedia
diff options
context:
space:
mode:
authorLars Knoll <lars.knoll@qt.io>2021-03-23 13:31:22 +0100
committerLars Knoll <lars.knoll@qt.io>2021-04-06 08:11:45 +0000
commitd6e52bea2da1d31bce91d1a175f9b9e84b8e37f5 (patch)
tree90e41da7f8a6abdd069da5e7fcd410d8accb8843 /src/multimedia
parent9aaec39aef634a9f2d972f35484a566bdd01ecd6 (diff)
Move the PixelFormat enum from QVideoFrame to QVideoSurfaceFormat
Change-Id: Ifa888c74c397c640b19387a9ce624dfcf8269c2c Reviewed-by: Lars Knoll <lars.knoll@qt.io> Reviewed-by: Doris Verria <doris.verria@qt.io>
Diffstat (limited to 'src/multimedia')
-rw-r--r--src/multimedia/camera/qcameraimagecapture.cpp2
-rw-r--r--src/multimedia/camera/qcamerainfo.cpp2
-rw-r--r--src/multimedia/camera/qcamerainfo.h2
-rw-r--r--src/multimedia/camera/qcamerainfo_p.h2
-rw-r--r--src/multimedia/doc/snippets/multimedia-snippets/camera.cpp4
-rw-r--r--src/multimedia/doc/snippets/multimedia-snippets/media.cpp4
-rw-r--r--src/multimedia/doc/snippets/multimedia-snippets/video.cpp8
-rw-r--r--src/multimedia/platform/android/common/qandroidmultimediautils.cpp26
-rw-r--r--src/multimedia/platform/android/common/qandroidmultimediautils_p.h4
-rw-r--r--src/multimedia/platform/android/common/qandroidvideooutput.cpp2
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp42
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidcamerasession_p.h6
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidcameravideorenderercontrol.cpp12
-rw-r--r--src/multimedia/platform/darwin/camera/avfcamera.mm36
-rw-r--r--src/multimedia/platform/darwin/camera/avfcamera_p.h4
-rw-r--r--src/multimedia/platform/darwin/camera/avfcameraimagecapture.mm2
-rw-r--r--src/multimedia/platform/darwin/camera/avfcamerarenderer.mm5
-rw-r--r--src/multimedia/platform/darwin/mediaplayer/avfvideorenderercontrol.mm6
-rw-r--r--src/multimedia/platform/darwin/qdarwindevicemanager.mm2
-rw-r--r--src/multimedia/platform/gstreamer/common/qgst_p.h2
-rw-r--r--src/multimedia/platform/gstreamer/common/qgstutils.cpp74
-rw-r--r--src/multimedia/platform/gstreamer/common/qgstutils_p.h2
-rw-r--r--src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp96
-rw-r--r--src/multimedia/platform/qnx/camera/bbcamerasession.cpp4
-rw-r--r--src/multimedia/platform/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp10
-rw-r--r--src/multimedia/platform/windows/evr/evrcustompresenter.cpp42
-rw-r--r--src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp2
-rw-r--r--src/multimedia/platform/windows/evr/evrhelpers.cpp56
-rw-r--r--src/multimedia/platform/windows/evr/evrhelpers_p.h4
-rw-r--r--src/multimedia/platform/windows/player/mftvideo.cpp26
-rw-r--r--src/multimedia/platform/windows/player/mftvideo_p.h2
-rw-r--r--src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp60
-rw-r--r--src/multimedia/platform/windows/qwindowsdevicemanager.cpp2
-rw-r--r--src/multimedia/video/qabstractvideosurface.h2
-rw-r--r--src/multimedia/video/qvideoframe.cpp265
-rw-r--r--src/multimedia/video/qvideoframe.h54
-rw-r--r--src/multimedia/video/qvideoframeconversionhelper.cpp22
-rw-r--r--src/multimedia/video/qvideoframeconversionhelper_p.h2
-rw-r--r--src/multimedia/video/qvideosink.cpp2
-rw-r--r--src/multimedia/video/qvideosurfaceformat.cpp193
-rw-r--r--src/multimedia/video/qvideosurfaceformat.h58
-rw-r--r--src/multimedia/video/qvideosurfaces.cpp6
-rw-r--r--src/multimedia/video/qvideosurfaces_p.h2
43 files changed, 582 insertions, 577 deletions
diff --git a/src/multimedia/camera/qcameraimagecapture.cpp b/src/multimedia/camera/qcameraimagecapture.cpp
index 0682729c8..7569a35dc 100644
--- a/src/multimedia/camera/qcameraimagecapture.cpp
+++ b/src/multimedia/camera/qcameraimagecapture.cpp
@@ -342,7 +342,7 @@ int QCameraImageCapture::captureToBuffer()
*/
/*!
- \fn QCameraImageCapture::bufferFormatChanged(QVideoFrame::PixelFormat format)
+ \fn QCameraImageCapture::bufferFormatChanged(QVideoSurfaceFormat::PixelFormat format)
Signal emitted when the buffer \a format for the buffer image capture has changed.
*/
diff --git a/src/multimedia/camera/qcamerainfo.cpp b/src/multimedia/camera/qcamerainfo.cpp
index 9c0de17ee..ca8ab59be 100644
--- a/src/multimedia/camera/qcamerainfo.cpp
+++ b/src/multimedia/camera/qcamerainfo.cpp
@@ -49,7 +49,7 @@ QCameraFormat &QCameraFormat::operator=(const QCameraFormat &other) = default;
QCameraFormat::~QCameraFormat() = default;
-QVideoFrame::PixelFormat QCameraFormat::pixelFormat() const
+QVideoSurfaceFormat::PixelFormat QCameraFormat::pixelFormat() const
{
return d->pixelFormat;
}
diff --git a/src/multimedia/camera/qcamerainfo.h b/src/multimedia/camera/qcamerainfo.h
index afe9965a6..eab517e30 100644
--- a/src/multimedia/camera/qcamerainfo.h
+++ b/src/multimedia/camera/qcamerainfo.h
@@ -54,7 +54,7 @@ public:
QCameraFormat &operator=(const QCameraFormat &other);
~QCameraFormat();
- QVideoFrame::PixelFormat pixelFormat() const;
+ QVideoSurfaceFormat::PixelFormat pixelFormat() const;
QSize resolution() const;
float minFrameRate() const;
float maxFrameRate() const;
diff --git a/src/multimedia/camera/qcamerainfo_p.h b/src/multimedia/camera/qcamerainfo_p.h
index 710bd0971..8d81e46a1 100644
--- a/src/multimedia/camera/qcamerainfo_p.h
+++ b/src/multimedia/camera/qcamerainfo_p.h
@@ -59,7 +59,7 @@ QT_BEGIN_NAMESPACE
class QCameraFormatPrivate : public QSharedData
{
public:
- QVideoFrame::PixelFormat pixelFormat;
+ QVideoSurfaceFormat::PixelFormat pixelFormat;
QSize resolution;
float minFrameRate = 0;
float maxFrameRate = 0;
diff --git a/src/multimedia/doc/snippets/multimedia-snippets/camera.cpp b/src/multimedia/doc/snippets/multimedia-snippets/camera.cpp
index 85b2072dd..7f72e2e21 100644
--- a/src/multimedia/doc/snippets/multimedia-snippets/camera.cpp
+++ b/src/multimedia/doc/snippets/multimedia-snippets/camera.cpp
@@ -87,10 +87,10 @@ void overview_camera_by_position()
// -.-
class MyVideoSurface : public QAbstractVideoSurface
{
- QList<QVideoFrame::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType handleType) const
+ QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType handleType) const
{
Q_UNUSED(handleType);
- return QList<QVideoFrame::PixelFormat>();
+ return QList<QVideoSurfaceFormat::PixelFormat>();
}
bool present(const QVideoFrame &frame)
{
diff --git a/src/multimedia/doc/snippets/multimedia-snippets/media.cpp b/src/multimedia/doc/snippets/multimedia-snippets/media.cpp
index 2f10a5e58..d4832bfa9 100644
--- a/src/multimedia/doc/snippets/multimedia-snippets/media.cpp
+++ b/src/multimedia/doc/snippets/multimedia-snippets/media.cpp
@@ -174,10 +174,10 @@ void MediaExample::MediaPlayer()
{
public:
Surface(QObject *p) : QAbstractVideoSurface(p) { }
- QList<QVideoFrame::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType) const override
+ QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType) const override
{
// Make sure that the driver supports this pixel format.
- return QList<QVideoFrame::PixelFormat>() << QVideoFrame::Format_YUYV;
+ return QList<QVideoSurfaceFormat::PixelFormat>() << QVideoSurfaceFormat::Format_YUYV;
}
// Video frames are handled here.
diff --git a/src/multimedia/doc/snippets/multimedia-snippets/video.cpp b/src/multimedia/doc/snippets/multimedia-snippets/video.cpp
index 561a852b5..1b877ba0b 100644
--- a/src/multimedia/doc/snippets/multimedia-snippets/video.cpp
+++ b/src/multimedia/doc/snippets/multimedia-snippets/video.cpp
@@ -52,13 +52,13 @@
//! [Derived Surface]
class MyVideoSurface : public QAbstractVideoSurface
{
- QList<QVideoFrame::PixelFormat> supportedPixelFormats(
+ QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(
QVideoFrame::HandleType handleType = QVideoFrame::NoHandle) const
{
Q_UNUSED(handleType);
// Return the formats you will support
- return QList<QVideoFrame::PixelFormat>() << QVideoFrame::Format_RGB565;
+ return QList<QVideoSurfaceFormat::PixelFormat>() << QVideoSurfaceFormat::Format_RGB565;
}
bool present(const QVideoFrame &frame)
@@ -156,7 +156,7 @@ void VideoExample::VideoSurface()
{
//! [Widget Surface]
QImage img = QImage("images/qt-logo.png").convertToFormat(QImage::Format_ARGB32);
- QVideoSurfaceFormat format(img.size(), QVideoFrame::Format_ARGB32);
+ QVideoSurfaceFormat format(img.size(), QVideoSurfaceFormat::Format_ARGB32);
videoWidget = new QVideoWidget;
videoWidget->videoSurface()->start(format);
videoWidget->videoSurface()->present(img);
@@ -168,7 +168,7 @@ void VideoExample::VideoSurface()
graphicsView->scene()->addItem(item);
graphicsView->show();
QImage img = QImage("images/qt-logo.png").convertToFormat(QImage::Format_ARGB32);
- QVideoSurfaceFormat format(img.size(), QVideoFrame::Format_ARGB32);
+ QVideoSurfaceFormat format(img.size(), QVideoSurfaceFormat::Format_ARGB32);
item->videoSurface()->start(format);
item->videoSurface()->present(img);
//! [GraphicsVideoItem Surface]
diff --git a/src/multimedia/platform/android/common/qandroidmultimediautils.cpp b/src/multimedia/platform/android/common/qandroidmultimediautils.cpp
index 850b3d7ea..395cea43a 100644
--- a/src/multimedia/platform/android/common/qandroidmultimediautils.cpp
+++ b/src/multimedia/platform/android/common/qandroidmultimediautils.cpp
@@ -77,36 +77,36 @@ bool qt_sizeLessThan(const QSize &s1, const QSize &s2)
return s1.width() * s1.height() < s2.width() * s2.height();
}
-QVideoFrame::PixelFormat qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat f)
+QVideoSurfaceFormat::PixelFormat qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat f)
{
switch (f) {
case AndroidCamera::NV21:
- return QVideoFrame::Format_NV21;
+ return QVideoSurfaceFormat::Format_NV21;
case AndroidCamera::YV12:
- return QVideoFrame::Format_YV12;
+ return QVideoSurfaceFormat::Format_YV12;
case AndroidCamera::RGB565:
- return QVideoFrame::Format_RGB565;
+ return QVideoSurfaceFormat::Format_RGB565;
case AndroidCamera::YUY2:
- return QVideoFrame::Format_YUYV;
+ return QVideoSurfaceFormat::Format_YUYV;
case AndroidCamera::JPEG:
- return QVideoFrame::Format_Jpeg;
+ return QVideoSurfaceFormat::Format_Jpeg;
default:
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
}
}
-AndroidCamera::ImageFormat qt_androidImageFormatFromPixelFormat(QVideoFrame::PixelFormat f)
+AndroidCamera::ImageFormat qt_androidImageFormatFromPixelFormat(QVideoSurfaceFormat::PixelFormat f)
{
switch (f) {
- case QVideoFrame::Format_NV21:
+ case QVideoSurfaceFormat::Format_NV21:
return AndroidCamera::NV21;
- case QVideoFrame::Format_YV12:
+ case QVideoSurfaceFormat::Format_YV12:
return AndroidCamera::YV12;
- case QVideoFrame::Format_RGB565:
+ case QVideoSurfaceFormat::Format_RGB565:
return AndroidCamera::RGB565;
- case QVideoFrame::Format_YUYV:
+ case QVideoSurfaceFormat::Format_YUYV:
return AndroidCamera::YUY2;
- case QVideoFrame::Format_Jpeg:
+ case QVideoSurfaceFormat::Format_Jpeg:
return AndroidCamera::JPEG;
default:
return AndroidCamera::UnknownImageFormat;
diff --git a/src/multimedia/platform/android/common/qandroidmultimediautils_p.h b/src/multimedia/platform/android/common/qandroidmultimediautils_p.h
index 205244eb5..af91de75d 100644
--- a/src/multimedia/platform/android/common/qandroidmultimediautils_p.h
+++ b/src/multimedia/platform/android/common/qandroidmultimediautils_p.h
@@ -63,8 +63,8 @@ int qt_findClosestValue(const QList<int> &list, int value);
bool qt_sizeLessThan(const QSize &s1, const QSize &s2);
-QVideoFrame::PixelFormat qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat f);
-AndroidCamera::ImageFormat qt_androidImageFormatFromPixelFormat(QVideoFrame::PixelFormat f);
+QVideoSurfaceFormat::PixelFormat qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat f);
+AndroidCamera::ImageFormat qt_androidImageFormatFromPixelFormat(QVideoSurfaceFormat::PixelFormat f);
bool qt_androidRequestCameraPermission();
bool qt_androidRequestRecordingPermission();
diff --git a/src/multimedia/platform/android/common/qandroidvideooutput.cpp b/src/multimedia/platform/android/common/qandroidvideooutput.cpp
index 266b651eb..909a5b573 100644
--- a/src/multimedia/platform/android/common/qandroidvideooutput.cpp
+++ b/src/multimedia/platform/android/common/qandroidvideooutput.cpp
@@ -323,7 +323,7 @@ void QAndroidTextureVideoOutput::onFrameAvailable()
return;
QAbstractVideoBuffer *buffer = new AndroidTextureVideoBuffer(this, m_nativeSize);
- QVideoFrame frame(buffer, m_nativeSize, QVideoFrame::Format_ABGR32);
+ QVideoFrame frame(buffer, m_nativeSize, QVideoSurfaceFormat::Format_ABGR32);
if (m_surface->isActive() && (m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat()
|| m_surface->surfaceFormat().frameSize() != frame.size())) {
diff --git a/src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp b/src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp
index f42899aae..73ff2043e 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp
+++ b/src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp
@@ -336,9 +336,9 @@ QList<QSize> QAndroidCameraSession::getSupportedPreviewSizes() const
return m_camera ? m_camera->getSupportedPreviewSizes() : QList<QSize>();
}
-QList<QVideoFrame::PixelFormat> QAndroidCameraSession::getSupportedPixelFormats() const
+QList<QVideoSurfaceFormat::PixelFormat> QAndroidCameraSession::getSupportedPixelFormats() const
{
- QList<QVideoFrame::PixelFormat> formats;
+ QList<QVideoSurfaceFormat::PixelFormat> formats;
if (!m_camera)
return formats;
@@ -348,8 +348,8 @@ QList<QVideoFrame::PixelFormat> QAndroidCameraSession::getSupportedPixelFormats(
formats.reserve(nativeFormats.size());
for (AndroidCamera::ImageFormat nativeFormat : nativeFormats) {
- QVideoFrame::PixelFormat format = QtPixelFormatFromAndroidImageFormat(nativeFormat);
- if (format != QVideoFrame::Format_Invalid)
+ QVideoSurfaceFormat::PixelFormat format = QtPixelFormatFromAndroidImageFormat(nativeFormat);
+ if (format != QVideoSurfaceFormat::Format_Invalid)
formats.append(format);
}
@@ -364,12 +364,12 @@ QList<AndroidCamera::FpsRange> QAndroidCameraSession::getSupportedPreviewFpsRang
struct NullSurface : QAbstractVideoSurface
{
NullSurface(QObject *parent = nullptr) : QAbstractVideoSurface(parent) { }
- QList<QVideoFrame::PixelFormat> supportedPixelFormats(
+ QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(
QVideoFrame::HandleType type = QVideoFrame::NoHandle) const override
{
- QList<QVideoFrame::PixelFormat> result;
+ QList<QVideoSurfaceFormat::PixelFormat> result;
if (type == QVideoFrame::NoHandle)
- result << QVideoFrame::Format_NV21;
+ result << QVideoSurfaceFormat::Format_NV21;
return result;
}
@@ -720,41 +720,41 @@ void QAndroidCameraSession::processCapturedImage(int id,
emit imageCaptureError(id, QCameraImageCapture::ResourceError, errorMessage);
}
} else {
- QVideoFrame frame(new QMemoryVideoBuffer(data, -1), resolution, QVideoFrame::Format_Jpeg);
+ QVideoFrame frame(new QMemoryVideoBuffer(data, -1), resolution, QVideoSurfaceFormat::Format_Jpeg);
emit imageAvailable(id, frame);
}
}
-QVideoFrame::PixelFormat QAndroidCameraSession::QtPixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat format)
+QVideoSurfaceFormat::PixelFormat QAndroidCameraSession::QtPixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat format)
{
switch (format) {
case AndroidCamera::RGB565:
- return QVideoFrame::Format_RGB565;
+ return QVideoSurfaceFormat::Format_RGB565;
case AndroidCamera::NV21:
- return QVideoFrame::Format_NV21;
+ return QVideoSurfaceFormat::Format_NV21;
case AndroidCamera::YUY2:
- return QVideoFrame::Format_YUYV;
+ return QVideoSurfaceFormat::Format_YUYV;
case AndroidCamera::JPEG:
- return QVideoFrame::Format_Jpeg;
+ return QVideoSurfaceFormat::Format_Jpeg;
case AndroidCamera::YV12:
- return QVideoFrame::Format_YV12;
+ return QVideoSurfaceFormat::Format_YV12;
default:
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
}
}
-AndroidCamera::ImageFormat QAndroidCameraSession::AndroidImageFormatFromQtPixelFormat(QVideoFrame::PixelFormat format)
+AndroidCamera::ImageFormat QAndroidCameraSession::AndroidImageFormatFromQtPixelFormat(QVideoSurfaceFormat::PixelFormat format)
{
switch (format) {
- case QVideoFrame::Format_RGB565:
+ case QVideoSurfaceFormat::Format_RGB565:
return AndroidCamera::RGB565;
- case QVideoFrame::Format_NV21:
+ case QVideoSurfaceFormat::Format_NV21:
return AndroidCamera::NV21;
- case QVideoFrame::Format_YUYV:
+ case QVideoSurfaceFormat::Format_YUYV:
return AndroidCamera::YUY2;
- case QVideoFrame::Format_Jpeg:
+ case QVideoSurfaceFormat::Format_Jpeg:
return AndroidCamera::JPEG;
- case QVideoFrame::Format_YV12:
+ case QVideoSurfaceFormat::Format_YV12:
return AndroidCamera::YV12;
default:
return AndroidCamera::UnknownImageFormat;
diff --git a/src/multimedia/platform/android/mediacapture/qandroidcamerasession_p.h b/src/multimedia/platform/android/mediacapture/qandroidcamerasession_p.h
index bb7a82cdd..a444915ec 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidcamerasession_p.h
+++ b/src/multimedia/platform/android/mediacapture/qandroidcamerasession_p.h
@@ -91,7 +91,7 @@ public:
void setVideoOutput(QAndroidVideoOutput *output);
QList<QSize> getSupportedPreviewSizes() const;
- QList<QVideoFrame::PixelFormat> getSupportedPixelFormats() const;
+ QList<QVideoSurfaceFormat::PixelFormat> getSupportedPixelFormats() const;
QList<AndroidCamera::FpsRange> getSupportedPreviewFpsRange() const;
QImageEncoderSettings imageSettings() const { return m_actualImageSettings; }
@@ -164,8 +164,8 @@ private:
bool captureToBuffer,
const QString &fileName);
- static QVideoFrame::PixelFormat QtPixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat);
- static AndroidCamera::ImageFormat AndroidImageFormatFromQtPixelFormat(QVideoFrame::PixelFormat);
+ static QVideoSurfaceFormat::PixelFormat QtPixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat);
+ static AndroidCamera::ImageFormat AndroidImageFormatFromQtPixelFormat(QVideoSurfaceFormat::PixelFormat);
void setActiveHelper(bool active);
diff --git a/src/multimedia/platform/android/mediacapture/qandroidcameravideorenderercontrol.cpp b/src/multimedia/platform/android/mediacapture/qandroidcameravideorenderercontrol.cpp
index 7cb397d13..4e88a1337 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidcameravideorenderercontrol.cpp
+++ b/src/multimedia/platform/android/mediacapture/qandroidcameravideorenderercontrol.cpp
@@ -76,14 +76,14 @@ private:
QAndroidCameraVideoRendererControl *m_control;
AndroidSurfaceView *m_surfaceView;
QMutex m_mutex;
- QVideoFrame::PixelFormat m_pixelFormat;
+ QVideoSurfaceFormat::PixelFormat m_pixelFormat;
QVideoFrame m_lastFrame;
};
QAndroidCameraDataVideoOutput::QAndroidCameraDataVideoOutput(QAndroidCameraVideoRendererControl *control)
: QAndroidVideoOutput(control)
, m_control(control)
- , m_pixelFormat(QVideoFrame::Format_Invalid)
+ , m_pixelFormat(QVideoSurfaceFormat::Format_Invalid)
{
// The camera preview cannot be started unless we set a SurfaceTexture or a
// SurfaceHolder. In this case we don't actually care about either of these, but since
@@ -129,15 +129,15 @@ void QAndroidCameraDataVideoOutput::onSurfaceCreated()
void QAndroidCameraDataVideoOutput::configureFormat()
{
- m_pixelFormat = QVideoFrame::Format_Invalid;
+ m_pixelFormat = QVideoSurfaceFormat::Format_Invalid;
if (!m_control->cameraSession()->camera())
return;
- QList<QVideoFrame::PixelFormat> surfaceFormats = m_control->surface()->supportedPixelFormats();
+ QList<QVideoSurfaceFormat::PixelFormat> surfaceFormats = m_control->surface()->supportedPixelFormats();
QList<AndroidCamera::ImageFormat> previewFormats = m_control->cameraSession()->camera()->getSupportedPreviewFormats();
for (int i = 0; i < surfaceFormats.size(); ++i) {
- QVideoFrame::PixelFormat pixFormat = surfaceFormats.at(i);
+ QVideoSurfaceFormat::PixelFormat pixFormat = surfaceFormats.at(i);
AndroidCamera::ImageFormat f = qt_androidImageFormatFromPixelFormat(pixFormat);
if (previewFormats.contains(f)) {
m_pixelFormat = pixFormat;
@@ -145,7 +145,7 @@ void QAndroidCameraDataVideoOutput::configureFormat()
}
}
- if (m_pixelFormat == QVideoFrame::Format_Invalid) {
+ if (m_pixelFormat == QVideoSurfaceFormat::Format_Invalid) {
m_control->cameraSession()->setPreviewCallback(nullptr);
qWarning("The video surface is not compatible with any format supported by the camera");
} else {
diff --git a/src/multimedia/platform/darwin/camera/avfcamera.mm b/src/multimedia/platform/darwin/camera/avfcamera.mm
index 98d2b4ded..2acf529f5 100644
--- a/src/multimedia/platform/darwin/camera/avfcamera.mm
+++ b/src/multimedia/platform/darwin/camera/avfcamera.mm
@@ -115,64 +115,64 @@ void AVFCamera::updateStatus()
}
}
-QVideoFrame::PixelFormat AVFCamera::QtPixelFormatFromCVFormat(unsigned avPixelFormat)
+QVideoSurfaceFormat::PixelFormat AVFCamera::QtPixelFormatFromCVFormat(unsigned avPixelFormat)
{
// BGRA <-> ARGB "swap" is intentional:
// to work correctly with GL_RGBA, color swap shaders
// (in QSG node renderer etc.).
switch (avPixelFormat) {
case kCVPixelFormatType_32ARGB:
- return QVideoFrame::Format_BGRA32;
+ return QVideoSurfaceFormat::Format_BGRA32;
case kCVPixelFormatType_32BGRA:
- return QVideoFrame::Format_ARGB32;
+ return QVideoSurfaceFormat::Format_ARGB32;
case kCVPixelFormatType_24RGB:
- return QVideoFrame::Format_RGB24;
+ return QVideoSurfaceFormat::Format_RGB24;
case kCVPixelFormatType_24BGR:
- return QVideoFrame::Format_BGR24;
+ return QVideoSurfaceFormat::Format_BGR24;
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
- return QVideoFrame::Format_NV12;
+ return QVideoSurfaceFormat::Format_NV12;
case kCVPixelFormatType_422YpCbCr8:
- return QVideoFrame::Format_UYVY;
+ return QVideoSurfaceFormat::Format_UYVY;
case kCVPixelFormatType_422YpCbCr8_yuvs:
- return QVideoFrame::Format_YUYV;
+ return QVideoSurfaceFormat::Format_YUYV;
case kCMVideoCodecType_JPEG:
case kCMVideoCodecType_JPEG_OpenDML:
- return QVideoFrame::Format_Jpeg;
+ return QVideoSurfaceFormat::Format_Jpeg;
default:
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
}
}
-bool AVFCamera::CVPixelFormatFromQtFormat(QVideoFrame::PixelFormat qtFormat, unsigned &conv)
+bool AVFCamera::CVPixelFormatFromQtFormat(QVideoSurfaceFormat::PixelFormat qtFormat, unsigned &conv)
{
// BGRA <-> ARGB "swap" is intentional:
// to work correctly with GL_RGBA, color swap shaders
// (in QSG node renderer etc.).
switch (qtFormat) {
- case QVideoFrame::Format_ARGB32:
+ case QVideoSurfaceFormat::Format_ARGB32:
conv = kCVPixelFormatType_32BGRA;
break;
- case QVideoFrame::Format_BGRA32:
+ case QVideoSurfaceFormat::Format_BGRA32:
conv = kCVPixelFormatType_32ARGB;
break;
- case QVideoFrame::Format_NV12:
+ case QVideoSurfaceFormat::Format_NV12:
conv = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
break;
- case QVideoFrame::Format_UYVY:
+ case QVideoSurfaceFormat::Format_UYVY:
conv = kCVPixelFormatType_422YpCbCr8;
break;
- case QVideoFrame::Format_YUYV:
+ case QVideoSurfaceFormat::Format_YUYV:
conv = kCVPixelFormatType_422YpCbCr8_yuvs;
break;
// These two formats below are not supported
// by QSGVideoNodeFactory_RGB, so for now I have to
// disable them.
/*
- case QVideoFrame::Format_RGB24:
+ case QVideoSurfaceFormat::Format_RGB24:
conv = kCVPixelFormatType_24RGB;
break;
- case QVideoFrame::Format_BGR24:
+ case QVideoSurfaceFormat::Format_BGR24:
conv = kCVPixelFormatType_24BGR;
break;
*/
diff --git a/src/multimedia/platform/darwin/camera/avfcamera_p.h b/src/multimedia/platform/darwin/camera/avfcamera_p.h
index ae8cb3292..9d4c69471 100644
--- a/src/multimedia/platform/darwin/camera/avfcamera_p.h
+++ b/src/multimedia/platform/darwin/camera/avfcamera_p.h
@@ -85,8 +85,8 @@ public:
QPlatformCameraImageProcessing *imageProcessingControl() override;
// "Converters":
- static QVideoFrame::PixelFormat QtPixelFormatFromCVFormat(unsigned avPixelFormat);
- static bool CVPixelFormatFromQtFormat(QVideoFrame::PixelFormat qtFormat, unsigned &conv);
+ static QVideoSurfaceFormat::PixelFormat QtPixelFormatFromCVFormat(unsigned avPixelFormat);
+ static bool CVPixelFormatFromQtFormat(QVideoSurfaceFormat::PixelFormat qtFormat, unsigned &conv);
AVCaptureConnection *videoConnection() const;
diff --git a/src/multimedia/platform/darwin/camera/avfcameraimagecapture.mm b/src/multimedia/platform/darwin/camera/avfcameraimagecapture.mm
index 94e2841b8..a6fb7bbe6 100644
--- a/src/multimedia/platform/darwin/camera/avfcameraimagecapture.mm
+++ b/src/multimedia/platform/darwin/camera/avfcameraimagecapture.mm
@@ -153,7 +153,7 @@ int AVFCameraImageCapture::doCapture(const QString &actualFileName)
QBuffer data(&jpgData);
QImageReader reader(&data, "JPEG");
QSize size = reader.size();
- QVideoFrame frame(new QMemoryVideoBuffer(QByteArray(jpgData.constData(), jpgData.size()), -1), size, QVideoFrame::Format_Jpeg);
+ QVideoFrame frame(new QMemoryVideoBuffer(QByteArray(jpgData.constData(), jpgData.size()), -1), size, QVideoSurfaceFormat::Format_Jpeg);
QMetaObject::invokeMethod(this, "imageAvailable", Qt::QueuedConnection,
Q_ARG(int, request.captureId),
Q_ARG(QVideoFrame, frame));
diff --git a/src/multimedia/platform/darwin/camera/avfcamerarenderer.mm b/src/multimedia/platform/darwin/camera/avfcamerarenderer.mm
index 52032e600..429ca1e81 100644
--- a/src/multimedia/platform/darwin/camera/avfcamerarenderer.mm
+++ b/src/multimedia/platform/darwin/camera/avfcamerarenderer.mm
@@ -251,9 +251,10 @@ private:
int width = CVPixelBufferGetWidth(imageBuffer);
int height = CVPixelBufferGetHeight(imageBuffer);
- QVideoFrame::PixelFormat format =
+ QVideoSurfaceFormat::PixelFormat format =
AVFCamera::QtPixelFormatFromCVFormat(CVPixelBufferGetPixelFormatType(imageBuffer));
- if (format == QVideoFrame::Format_Invalid)
+ if (format == QVideoSurfaceFormat::Format_Invalid)
+avfcamerarenderercontrol.mm
return;
QVideoFrame frame(new CVImageVideoBuffer(imageBuffer, m_renderer),
diff --git a/src/multimedia/platform/darwin/mediaplayer/avfvideorenderercontrol.mm b/src/multimedia/platform/darwin/mediaplayer/avfvideorenderercontrol.mm
index d0fcfe00a..da5f56875 100644
--- a/src/multimedia/platform/darwin/mediaplayer/avfvideorenderercontrol.mm
+++ b/src/multimedia/platform/darwin/mediaplayer/avfvideorenderercontrol.mm
@@ -160,7 +160,7 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
return;
auto buffer = new TextureVideoBuffer(QVideoFrame::MTLTextureHandle, tex);
- frame = QVideoFrame(buffer, nativeSize(), QVideoFrame::Format_BGR32);
+ frame = QVideoFrame(buffer, nativeSize(), QVideoSurfaceFormat::Format_BGR32);
if (!frame.isValid())
return;
@@ -178,7 +178,7 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
return;
QAbstractVideoBuffer *buffer = new TextureVideoBuffer(QVideoFrame::GLTextureHandle, tex);
- frame = QVideoFrame(buffer, nativeSize(), QVideoFrame::Format_BGR32);
+ frame = QVideoFrame(buffer, nativeSize(), QVideoSurfaceFormat::Format_BGR32);
if (!frame.isValid())
return;
@@ -197,7 +197,7 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
return;
QAbstractVideoBuffer *buffer = new QImageVideoBuffer(frameData);
- frame = QVideoFrame(buffer, nativeSize(), QVideoFrame::Format_ARGB32_Premultiplied);
+ frame = QVideoFrame(buffer, nativeSize(), QVideoSurfaceFormat::Format_ARGB32_Premultiplied);
QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), QVideoFrame::NoHandle);
}
diff --git a/src/multimedia/platform/darwin/qdarwindevicemanager.mm b/src/multimedia/platform/darwin/qdarwindevicemanager.mm
index 46c73d629..8ec79355c 100644
--- a/src/multimedia/platform/darwin/qdarwindevicemanager.mm
+++ b/src/multimedia/platform/darwin/qdarwindevicemanager.mm
@@ -266,7 +266,7 @@ void QDarwinDeviceManager::updateCameraDevices()
auto encoding = CMVideoFormatDescriptionGetCodecType(format.formatDescription);
auto pixelFormat = AVFCamera::QtPixelFormatFromCVFormat(encoding);
// Ignore pixel formats we can't handle
- if (pixelFormat == QVideoFrame::Format_Invalid)
+ if (pixelFormat == QVideoSurfaceFormat::Format_Invalid)
continue;
for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRanges) {
diff --git a/src/multimedia/platform/gstreamer/common/qgst_p.h b/src/multimedia/platform/gstreamer/common/qgst_p.h
index b4c6ee732..d4ee13cd9 100644
--- a/src/multimedia/platform/gstreamer/common/qgst_p.h
+++ b/src/multimedia/platform/gstreamer/common/qgst_p.h
@@ -175,7 +175,7 @@ public:
}
Q_MULTIMEDIA_EXPORT QSize resolution() const;
- Q_MULTIMEDIA_EXPORT QVideoFrame::PixelFormat pixelFormat() const;
+ Q_MULTIMEDIA_EXPORT QVideoSurfaceFormat::PixelFormat pixelFormat() const;
Q_MULTIMEDIA_EXPORT QGRange<float> frameRateRange() const;
QByteArray toString() const { return gst_structure_to_string(structure); }
diff --git a/src/multimedia/platform/gstreamer/common/qgstutils.cpp b/src/multimedia/platform/gstreamer/common/qgstutils.cpp
index 49d5a527c..be121bdcd 100644
--- a/src/multimedia/platform/gstreamer/common/qgstutils.cpp
+++ b/src/multimedia/platform/gstreamer/common/qgstutils.cpp
@@ -165,49 +165,49 @@ namespace {
struct VideoFormat
{
- QVideoFrame::PixelFormat pixelFormat;
+ QVideoSurfaceFormat::PixelFormat pixelFormat;
GstVideoFormat gstFormat;
};
static const VideoFormat qt_videoFormatLookup[] =
{
- { QVideoFrame::Format_YUV420P, GST_VIDEO_FORMAT_I420 },
- { QVideoFrame::Format_YUV422P, GST_VIDEO_FORMAT_Y42B },
- { QVideoFrame::Format_YV12 , GST_VIDEO_FORMAT_YV12 },
- { QVideoFrame::Format_UYVY , GST_VIDEO_FORMAT_UYVY },
- { QVideoFrame::Format_YUYV , GST_VIDEO_FORMAT_YUY2 },
- { QVideoFrame::Format_NV12 , GST_VIDEO_FORMAT_NV12 },
- { QVideoFrame::Format_NV21 , GST_VIDEO_FORMAT_NV21 },
- { QVideoFrame::Format_AYUV444, GST_VIDEO_FORMAT_AYUV },
- { QVideoFrame::Format_YUV444, GST_VIDEO_FORMAT_Y444 },
- { QVideoFrame::Format_P010LE , GST_VIDEO_FORMAT_P010_10LE },
- { QVideoFrame::Format_P010BE , GST_VIDEO_FORMAT_P010_10BE },
- { QVideoFrame::Format_Y8 , GST_VIDEO_FORMAT_GRAY8 },
+ { QVideoSurfaceFormat::Format_YUV420P, GST_VIDEO_FORMAT_I420 },
+ { QVideoSurfaceFormat::Format_YUV422P, GST_VIDEO_FORMAT_Y42B },
+ { QVideoSurfaceFormat::Format_YV12 , GST_VIDEO_FORMAT_YV12 },
+ { QVideoSurfaceFormat::Format_UYVY , GST_VIDEO_FORMAT_UYVY },
+ { QVideoSurfaceFormat::Format_YUYV , GST_VIDEO_FORMAT_YUY2 },
+ { QVideoSurfaceFormat::Format_NV12 , GST_VIDEO_FORMAT_NV12 },
+ { QVideoSurfaceFormat::Format_NV21 , GST_VIDEO_FORMAT_NV21 },
+ { QVideoSurfaceFormat::Format_AYUV444, GST_VIDEO_FORMAT_AYUV },
+ { QVideoSurfaceFormat::Format_YUV444, GST_VIDEO_FORMAT_Y444 },
+ { QVideoSurfaceFormat::Format_P010LE , GST_VIDEO_FORMAT_P010_10LE },
+ { QVideoSurfaceFormat::Format_P010BE , GST_VIDEO_FORMAT_P010_10BE },
+ { QVideoSurfaceFormat::Format_Y8 , GST_VIDEO_FORMAT_GRAY8 },
#if Q_BYTE_ORDER == Q_LITTLE_ENDIAN
- { QVideoFrame::Format_RGB32 , GST_VIDEO_FORMAT_BGRx },
- { QVideoFrame::Format_BGR32 , GST_VIDEO_FORMAT_RGBx },
- { QVideoFrame::Format_ARGB32, GST_VIDEO_FORMAT_BGRA },
- { QVideoFrame::Format_ABGR32, GST_VIDEO_FORMAT_RGBA },
- { QVideoFrame::Format_BGRA32, GST_VIDEO_FORMAT_ARGB },
- { QVideoFrame::Format_RGB555 , GST_VIDEO_FORMAT_BGR15 },
- { QVideoFrame::Format_BGR555 , GST_VIDEO_FORMAT_RGB15 },
- { QVideoFrame::Format_Y16 , GST_VIDEO_FORMAT_GRAY16_LE },
+ { QVideoSurfaceFormat::Format_RGB32 , GST_VIDEO_FORMAT_BGRx },
+ { QVideoSurfaceFormat::Format_BGR32 , GST_VIDEO_FORMAT_RGBx },
+ { QVideoSurfaceFormat::Format_ARGB32, GST_VIDEO_FORMAT_BGRA },
+ { QVideoSurfaceFormat::Format_ABGR32, GST_VIDEO_FORMAT_RGBA },
+ { QVideoSurfaceFormat::Format_BGRA32, GST_VIDEO_FORMAT_ARGB },
+ { QVideoSurfaceFormat::Format_RGB555 , GST_VIDEO_FORMAT_BGR15 },
+ { QVideoSurfaceFormat::Format_BGR555 , GST_VIDEO_FORMAT_RGB15 },
+ { QVideoSurfaceFormat::Format_Y16 , GST_VIDEO_FORMAT_GRAY16_LE },
#else
- { QVideoFrame::Format_RGB32 , GST_VIDEO_FORMAT_xRGB },
- { QVideoFrame::Format_BGR32 , GST_VIDEO_FORMAT_xBGR },
- { QVideoFrame::Format_ARGB32, GST_VIDEO_FORMAT_ARGB },
- { QVideoFrame::Format_ABGR32, GST_VIDEO_FORMAT_ABGR },
- { QVideoFrame::Format_BGRA32, GST_VIDEO_FORMAT_BGRA },
- { QVideoFrame::Format_RGB555 , GST_VIDEO_FORMAT_RGB15 },
- { QVideoFrame::Format_BGR555 , GST_VIDEO_FORMAT_BGR15 },
- { QVideoFrame::Format_Y16 , GST_VIDEO_FORMAT_GRAY16_BE },
+ { QVideoSurfaceFormat::Format_RGB32 , GST_VIDEO_FORMAT_xRGB },
+ { QVideoSurfaceFormat::Format_BGR32 , GST_VIDEO_FORMAT_xBGR },
+ { QVideoSurfaceFormat::Format_ARGB32, GST_VIDEO_FORMAT_ARGB },
+ { QVideoSurfaceFormat::Format_ABGR32, GST_VIDEO_FORMAT_ABGR },
+ { QVideoSurfaceFormat::Format_BGRA32, GST_VIDEO_FORMAT_BGRA },
+ { QVideoSurfaceFormat::Format_RGB555 , GST_VIDEO_FORMAT_RGB15 },
+ { QVideoSurfaceFormat::Format_BGR555 , GST_VIDEO_FORMAT_BGR15 },
+ { QVideoSurfaceFormat::Format_Y16 , GST_VIDEO_FORMAT_GRAY16_BE },
#endif
- { QVideoFrame::Format_RGB24 , GST_VIDEO_FORMAT_RGB },
- { QVideoFrame::Format_BGR24 , GST_VIDEO_FORMAT_BGR },
- { QVideoFrame::Format_RGB565, GST_VIDEO_FORMAT_RGB16 }
+ { QVideoSurfaceFormat::Format_RGB24 , GST_VIDEO_FORMAT_RGB },
+ { QVideoSurfaceFormat::Format_BGR24 , GST_VIDEO_FORMAT_BGR },
+ { QVideoSurfaceFormat::Format_RGB565, GST_VIDEO_FORMAT_RGB16 }
};
-static int indexOfVideoFormat(QVideoFrame::PixelFormat format)
+static int indexOfVideoFormat(QVideoSurfaceFormat::PixelFormat format)
{
for (int i = 0; i < lengthOf(qt_videoFormatLookup); ++i)
if (qt_videoFormatLookup[i].pixelFormat == format)
@@ -250,11 +250,11 @@ QVideoSurfaceFormat QGstUtils::formatForCaps(
return QVideoSurfaceFormat();
}
-QGstMutableCaps QGstUtils::capsForFormats(const QList<QVideoFrame::PixelFormat> &formats)
+QGstMutableCaps QGstUtils::capsForFormats(const QList<QVideoSurfaceFormat::PixelFormat> &formats)
{
GstCaps *caps = gst_caps_new_empty();
- for (QVideoFrame::PixelFormat format : formats) {
+ for (QVideoSurfaceFormat::PixelFormat format : formats) {
int index = indexOfVideoFormat(format);
if (index != -1) {
@@ -303,9 +303,9 @@ QSize QGstStructure::resolution() const
return size;
}
-QVideoFrame::PixelFormat QGstStructure::pixelFormat() const
+QVideoSurfaceFormat::PixelFormat QGstStructure::pixelFormat() const
{
- QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid;
+ QVideoSurfaceFormat::PixelFormat pixelFormat = QVideoSurfaceFormat::Format_Invalid;
if (!structure)
return pixelFormat;
diff --git a/src/multimedia/platform/gstreamer/common/qgstutils_p.h b/src/multimedia/platform/gstreamer/common/qgstutils_p.h
index 81fd4d31c..39063b45a 100644
--- a/src/multimedia/platform/gstreamer/common/qgstutils_p.h
+++ b/src/multimedia/platform/gstreamer/common/qgstutils_p.h
@@ -78,7 +78,7 @@ namespace QGstUtils {
GstCaps *caps,
GstVideoInfo *info = 0);
- Q_MULTIMEDIA_EXPORT QGstMutableCaps capsForFormats(const QList<QVideoFrame::PixelFormat> &formats);
+ Q_MULTIMEDIA_EXPORT QGstMutableCaps capsForFormats(const QList<QVideoSurfaceFormat::PixelFormat> &formats);
void setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer);
Q_MULTIMEDIA_EXPORT bool useOpenGL();
diff --git a/src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp b/src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp
index 990ab2bd4..412a482f8 100644
--- a/src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp
+++ b/src/multimedia/platform/gstreamer/common/qgstvideorenderersink.cpp
@@ -87,30 +87,30 @@ QGstMutableCaps QGstVideoRenderer::getCaps()
#if QT_CONFIG(gstreamer_gl)
if (QGstUtils::useOpenGL()) {
m_handleType = QVideoFrame::GLTextureHandle;
- auto formats = QList<QVideoFrame::PixelFormat>()
- << QVideoFrame::Format_YUV420P
- << QVideoFrame::Format_YUV422P
- << QVideoFrame::Format_YV12
- << QVideoFrame::Format_UYVY
- << QVideoFrame::Format_YUYV
- << QVideoFrame::Format_NV12
- << QVideoFrame::Format_NV21
- << QVideoFrame::Format_AYUV444
- << QVideoFrame::Format_YUV444
-// << QVideoFrame::Format_P010LE
-// << QVideoFrame::Format_P010BE
-// << QVideoFrame::Format_Y8
- << QVideoFrame::Format_RGB32
- << QVideoFrame::Format_BGR32
- << QVideoFrame::Format_ARGB32
- << QVideoFrame::Format_ABGR32
- << QVideoFrame::Format_BGRA32
- << QVideoFrame::Format_RGB555
- << QVideoFrame::Format_BGR555
-// << QVideoFrame::Format_Y16
-// << QVideoFrame::Format_RGB24
-// << QVideoFrame::Format_BGR24
-// << QVideoFrame::Format_RGB565
+ auto formats = QList<QVideoSurfaceFormat::PixelFormat>()
+ << QVideoSurfaceFormat::Format_YUV420P
+ << QVideoSurfaceFormat::Format_YUV422P
+ << QVideoSurfaceFormat::Format_YV12
+ << QVideoSurfaceFormat::Format_UYVY
+ << QVideoSurfaceFormat::Format_YUYV
+ << QVideoSurfaceFormat::Format_NV12
+ << QVideoSurfaceFormat::Format_NV21
+ << QVideoSurfaceFormat::Format_AYUV444
+ << QVideoSurfaceFormat::Format_YUV444
+// << QVideoSurfaceFormat::Format_P010LE
+// << QVideoSurfaceFormat::Format_P010BE
+// << QVideoSurfaceFormat::Format_Y8
+ << QVideoSurfaceFormat::Format_RGB32
+ << QVideoSurfaceFormat::Format_BGR32
+ << QVideoSurfaceFormat::Format_ARGB32
+ << QVideoSurfaceFormat::Format_ABGR32
+ << QVideoSurfaceFormat::Format_BGRA32
+ << QVideoSurfaceFormat::Format_RGB555
+ << QVideoSurfaceFormat::Format_BGR555
+// << QVideoSurfaceFormat::Format_Y16
+// << QVideoSurfaceFormat::Format_RGB24
+// << QVideoSurfaceFormat::Format_BGR24
+// << QVideoSurfaceFormat::Format_RGB565
;
// Even if the surface does not support gl textures,
// glupload will be added to the pipeline and GLMemory will be requested.
@@ -126,30 +126,30 @@ QGstMutableCaps QGstVideoRenderer::getCaps()
m_handleType = QVideoFrame::NoHandle;
}
#endif
- auto formats = QList<QVideoFrame::PixelFormat>()
- << QVideoFrame::Format_YUV420P
- << QVideoFrame::Format_YUV422P
- << QVideoFrame::Format_YV12
- << QVideoFrame::Format_UYVY
- << QVideoFrame::Format_YUYV
- << QVideoFrame::Format_NV12
- << QVideoFrame::Format_NV21
- << QVideoFrame::Format_AYUV444
- << QVideoFrame::Format_YUV444
- << QVideoFrame::Format_P010LE
- << QVideoFrame::Format_P010BE
- << QVideoFrame::Format_Y8
- << QVideoFrame::Format_RGB32
- << QVideoFrame::Format_BGR32
- << QVideoFrame::Format_ARGB32
- << QVideoFrame::Format_ABGR32
- << QVideoFrame::Format_BGRA32
- << QVideoFrame::Format_RGB555
- << QVideoFrame::Format_BGR555
- << QVideoFrame::Format_Y16
- << QVideoFrame::Format_RGB24
- << QVideoFrame::Format_BGR24
- << QVideoFrame::Format_RGB565;
+ auto formats = QList<QVideoSurfaceFormat::PixelFormat>()
+ << QVideoSurfaceFormat::Format_YUV420P
+ << QVideoSurfaceFormat::Format_YUV422P
+ << QVideoSurfaceFormat::Format_YV12
+ << QVideoSurfaceFormat::Format_UYVY
+ << QVideoSurfaceFormat::Format_YUYV
+ << QVideoSurfaceFormat::Format_NV12
+ << QVideoSurfaceFormat::Format_NV21
+ << QVideoSurfaceFormat::Format_AYUV444
+ << QVideoSurfaceFormat::Format_YUV444
+ << QVideoSurfaceFormat::Format_P010LE
+ << QVideoSurfaceFormat::Format_P010BE
+ << QVideoSurfaceFormat::Format_Y8
+ << QVideoSurfaceFormat::Format_RGB32
+ << QVideoSurfaceFormat::Format_BGR32
+ << QVideoSurfaceFormat::Format_ARGB32
+ << QVideoSurfaceFormat::Format_ABGR32
+ << QVideoSurfaceFormat::Format_BGRA32
+ << QVideoSurfaceFormat::Format_RGB555
+ << QVideoSurfaceFormat::Format_BGR555
+ << QVideoSurfaceFormat::Format_Y16
+ << QVideoSurfaceFormat::Format_RGB24
+ << QVideoSurfaceFormat::Format_BGR24
+ << QVideoSurfaceFormat::Format_RGB565;
return QGstUtils::capsForFormats(formats);
}
diff --git a/src/multimedia/platform/qnx/camera/bbcamerasession.cpp b/src/multimedia/platform/qnx/camera/bbcamerasession.cpp
index 89d51ab20..109708eff 100644
--- a/src/multimedia/platform/qnx/camera/bbcamerasession.cpp
+++ b/src/multimedia/platform/qnx/camera/bbcamerasession.cpp
@@ -732,7 +732,7 @@ void BbCameraSession::viewfinderFrameGrabbed(const QImage &image)
if (m_surface) {
if (frame.size() != m_surface->surfaceFormat().frameSize()) {
m_surface->stop();
- m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoFrame::Format_ARGB32));
+ m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoSurfaceFormat::Format_ARGB32));
}
QVideoFrame videoFrame(frame);
@@ -850,7 +850,7 @@ bool BbCameraSession::startViewFinder()
m_surfaceMutex.lock();
if (m_surface) {
- const bool ok = m_surface->start(QVideoSurfaceFormat(rotatedSize, QVideoFrame::Format_ARGB32));
+ const bool ok = m_surface->start(QVideoSurfaceFormat(rotatedSize, QVideoSurfaceFormat::Format_ARGB32));
if (!ok)
qWarning() << "Unable to start camera viewfinder surface";
}
diff --git a/src/multimedia/platform/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp b/src/multimedia/platform/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp
index b44fbe64e..5862765b8 100644
--- a/src/multimedia/platform/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp
+++ b/src/multimedia/platform/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp
@@ -172,19 +172,19 @@ void MmRendererPlayerVideoRendererControl::updateScene(const QSize &size)
if (m_surface) {
if (!m_surface->isActive()) {
if (m_windowGrabber->eglImageSupported()) {
- m_surface->start(QVideoSurfaceFormat(size, QVideoFrame::Format_BGR32,
+ m_surface->start(QVideoSurfaceFormat(size, QVideoSurfaceFormat::Format_BGR32,
QVideoFrame::GLTextureHandle));
} else {
- m_surface->start(QVideoSurfaceFormat(size, QVideoFrame::Format_ARGB32));
+ m_surface->start(QVideoSurfaceFormat(size, QVideoSurfaceFormat::Format_ARGB32));
}
} else {
if (m_surface->surfaceFormat().frameSize() != size) {
m_surface->stop();
if (m_windowGrabber->eglImageSupported()) {
- m_surface->start(QVideoSurfaceFormat(size, QVideoFrame::Format_BGR32,
+ m_surface->start(QVideoSurfaceFormat(size, QVideoSurfaceFormat::Format_BGR32,
QVideoFrame::GLTextureHandle));
} else {
- m_surface->start(QVideoSurfaceFormat(size, QVideoFrame::Format_ARGB32));
+ m_surface->start(QVideoSurfaceFormat(size, QVideoSurfaceFormat::Format_ARGB32));
}
}
}
@@ -193,7 +193,7 @@ void MmRendererPlayerVideoRendererControl::updateScene(const QSize &size)
// handle or a copy of the image data
if (m_windowGrabber->eglImageSupported()) {
QnxTextureBuffer *textBuffer = new QnxTextureBuffer(m_windowGrabber);
- QVideoFrame actualFrame(textBuffer, size, QVideoFrame::Format_BGR32);
+ QVideoFrame actualFrame(textBuffer, size, QVideoSurfaceFormat::Format_BGR32);
m_surface->present(actualFrame);
} else {
m_surface->present(m_windowGrabber->getNextImage().copy());
diff --git a/src/multimedia/platform/windows/evr/evrcustompresenter.cpp b/src/multimedia/platform/windows/evr/evrcustompresenter.cpp
index ed398675e..f23debc81 100644
--- a/src/multimedia/platform/windows/evr/evrcustompresenter.cpp
+++ b/src/multimedia/platform/windows/evr/evrcustompresenter.cpp
@@ -67,7 +67,7 @@ static const LONG ONE_MSEC = 1000;
static HRESULT setDesiredSampleTime(IMFSample *sample, const LONGLONG& hnsSampleTime, const LONGLONG& hnsDuration);
static HRESULT clearDesiredSampleTime(IMFSample *sample);
static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect& nrcSource);
-static QVideoFrame::PixelFormat pixelFormatFromMediaType(IMFMediaType *type);
+static QVideoSurfaceFormat::PixelFormat pixelFormatFromMediaType(IMFMediaType *type);
static inline LONG MFTimeToMsec(const LONGLONG& time)
{
@@ -1026,13 +1026,13 @@ void EVRCustomPresenter::supportedFormatsChanged()
// check if we can render to the surface (compatible formats)
if (m_surface) {
- QList<QVideoFrame::PixelFormat> formats = m_surface->supportedPixelFormats(QVideoFrame::GLTextureHandle);
- if (m_presentEngine->supportsTextureRendering() && formats.contains(QVideoFrame::Format_RGB32)) {
+ QList<QVideoSurfaceFormat::PixelFormat> formats = m_surface->supportedPixelFormats(QVideoFrame::GLTextureHandle);
+ if (m_presentEngine->supportsTextureRendering() && formats.contains(QVideoSurfaceFormat::Format_RGB32)) {
m_presentEngine->setHint(D3DPresentEngine::RenderToTexture, true);
m_canRenderToSurface = true;
} else {
formats = m_surface->supportedPixelFormats(QVideoFrame::NoHandle);
- for (QVideoFrame::PixelFormat format : qAsConst(formats)) {
+ for (QVideoSurfaceFormat::PixelFormat format : qAsConst(formats)) {
if (SUCCEEDED(m_presentEngine->checkFormat(qt_evr_D3DFormatFromPixelFormat(format)))) {
m_canRenderToSurface = true;
break;
@@ -1479,8 +1479,8 @@ HRESULT EVRCustomPresenter::isMediaTypeSupported(IMFMediaType *proposed)
if (FAILED(hr))
return hr;
- QVideoFrame::PixelFormat pixelFormat = pixelFormatFromMediaType(proposed);
- if (pixelFormat == QVideoFrame::Format_Invalid)
+ QVideoSurfaceFormat::PixelFormat pixelFormat = pixelFormatFromMediaType(proposed);
+ if (pixelFormat == QVideoSurfaceFormat::Format_Invalid)
return MF_E_INVALIDMEDIATYPE;
// When not rendering to texture, only accept pixel formats supported by the video surface
@@ -2023,40 +2023,40 @@ HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &sou
return hr;
}
-static QVideoFrame::PixelFormat pixelFormatFromMediaType(IMFMediaType *type)
+static QVideoSurfaceFormat::PixelFormat pixelFormatFromMediaType(IMFMediaType *type)
{
GUID majorType;
if (FAILED(type->GetMajorType(&majorType)))
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
if (majorType != MFMediaType_Video)
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
GUID subtype;
if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &subtype)))
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
if (subtype == MFVideoFormat_RGB32)
- return QVideoFrame::Format_RGB32;
+ return QVideoSurfaceFormat::Format_RGB32;
if (subtype == MFVideoFormat_ARGB32)
- return QVideoFrame::Format_ARGB32;
+ return QVideoSurfaceFormat::Format_ARGB32;
if (subtype == MFVideoFormat_RGB24)
- return QVideoFrame::Format_RGB24;
+ return QVideoSurfaceFormat::Format_RGB24;
if (subtype == MFVideoFormat_RGB565)
- return QVideoFrame::Format_RGB565;
+ return QVideoSurfaceFormat::Format_RGB565;
if (subtype == MFVideoFormat_RGB555)
- return QVideoFrame::Format_RGB555;
+ return QVideoSurfaceFormat::Format_RGB555;
if (subtype == MFVideoFormat_AYUV)
- return QVideoFrame::Format_AYUV444;
+ return QVideoSurfaceFormat::Format_AYUV444;
if (subtype == MFVideoFormat_I420)
- return QVideoFrame::Format_YUV420P;
+ return QVideoSurfaceFormat::Format_YUV420P;
if (subtype == MFVideoFormat_UYVY)
- return QVideoFrame::Format_UYVY;
+ return QVideoSurfaceFormat::Format_UYVY;
if (subtype == MFVideoFormat_YV12)
- return QVideoFrame::Format_YV12;
+ return QVideoSurfaceFormat::Format_YV12;
if (subtype == MFVideoFormat_NV12)
- return QVideoFrame::Format_NV12;
+ return QVideoSurfaceFormat::Format_NV12;
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
}
QT_END_NAMESPACE
diff --git a/src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp b/src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp
index 4f5eadc64..afbbbcb3a 100644
--- a/src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp
+++ b/src/multimedia/platform/windows/evr/evrd3dpresentengine.cpp
@@ -369,7 +369,7 @@ HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSamp
done:
if (SUCCEEDED(hr)) {
m_surfaceFormat = QVideoSurfaceFormat(QSize(width, height),
- m_useTextureRendering ? QVideoFrame::Format_RGB32
+ m_useTextureRendering ? QVideoSurfaceFormat::Format_RGB32
: qt_evr_pixelFormatFromD3DFormat(d3dFormat),
m_useTextureRendering ? QVideoFrame::GLTextureHandle
: QVideoFrame::NoHandle);
diff --git a/src/multimedia/platform/windows/evr/evrhelpers.cpp b/src/multimedia/platform/windows/evr/evrhelpers.cpp
index aa2311f46..4c81228b0 100644
--- a/src/multimedia/platform/windows/evr/evrhelpers.cpp
+++ b/src/multimedia/platform/windows/evr/evrhelpers.cpp
@@ -117,67 +117,67 @@ bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample)
return false;
}
-QVideoFrame::PixelFormat qt_evr_pixelFormatFromD3DFormat(DWORD format)
+QVideoSurfaceFormat::PixelFormat qt_evr_pixelFormatFromD3DFormat(DWORD format)
{
switch (format) {
case D3DFMT_R8G8B8:
- return QVideoFrame::Format_RGB24;
+ return QVideoSurfaceFormat::Format_RGB24;
case D3DFMT_A8R8G8B8:
- return QVideoFrame::Format_ARGB32;
+ return QVideoSurfaceFormat::Format_ARGB32;
case D3DFMT_X8R8G8B8:
- return QVideoFrame::Format_RGB32;
+ return QVideoSurfaceFormat::Format_RGB32;
case D3DFMT_R5G6B5:
- return QVideoFrame::Format_RGB565;
+ return QVideoSurfaceFormat::Format_RGB565;
case D3DFMT_X1R5G5B5:
- return QVideoFrame::Format_RGB555;
+ return QVideoSurfaceFormat::Format_RGB555;
case D3DFMT_A8:
- return QVideoFrame::Format_Y8;
+ return QVideoSurfaceFormat::Format_Y8;
case D3DFMT_A8B8G8R8:
- return QVideoFrame::Format_BGRA32;
+ return QVideoSurfaceFormat::Format_BGRA32;
case D3DFMT_X8B8G8R8:
- return QVideoFrame::Format_BGR32;
+ return QVideoSurfaceFormat::Format_BGR32;
case D3DFMT_UYVY:
- return QVideoFrame::Format_UYVY;
+ return QVideoSurfaceFormat::Format_UYVY;
case D3DFMT_YUY2:
- return QVideoFrame::Format_YUYV;
+ return QVideoSurfaceFormat::Format_YUYV;
case D3DFMT_NV12:
- return QVideoFrame::Format_NV12;
+ return QVideoSurfaceFormat::Format_NV12;
case D3DFMT_YV12:
- return QVideoFrame::Format_YV12;
+ return QVideoSurfaceFormat::Format_YV12;
case D3DFMT_UNKNOWN:
default:
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
}
}
-D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrame::PixelFormat format)
+D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoSurfaceFormat::PixelFormat format)
{
switch (format) {
- case QVideoFrame::Format_RGB24:
+ case QVideoSurfaceFormat::Format_RGB24:
return D3DFMT_R8G8B8;
- case QVideoFrame::Format_ARGB32:
+ case QVideoSurfaceFormat::Format_ARGB32:
return D3DFMT_A8R8G8B8;
- case QVideoFrame::Format_RGB32:
+ case QVideoSurfaceFormat::Format_RGB32:
return D3DFMT_X8R8G8B8;
- case QVideoFrame::Format_RGB565:
+ case QVideoSurfaceFormat::Format_RGB565:
return D3DFMT_R5G6B5;
- case QVideoFrame::Format_RGB555:
+ case QVideoSurfaceFormat::Format_RGB555:
return D3DFMT_X1R5G5B5;
- case QVideoFrame::Format_Y8:
+ case QVideoSurfaceFormat::Format_Y8:
return D3DFMT_A8;
- case QVideoFrame::Format_BGRA32:
+ case QVideoSurfaceFormat::Format_BGRA32:
return D3DFMT_A8B8G8R8;
- case QVideoFrame::Format_BGR32:
+ case QVideoSurfaceFormat::Format_BGR32:
return D3DFMT_X8B8G8R8;
- case QVideoFrame::Format_UYVY:
+ case QVideoSurfaceFormat::Format_UYVY:
return D3DFMT_UYVY;
- case QVideoFrame::Format_YUYV:
+ case QVideoSurfaceFormat::Format_YUYV:
return D3DFMT_YUY2;
- case QVideoFrame::Format_NV12:
+ case QVideoSurfaceFormat::Format_NV12:
return D3DFMT_NV12;
- case QVideoFrame::Format_YV12:
+ case QVideoSurfaceFormat::Format_YV12:
return D3DFMT_YV12;
- case QVideoFrame::Format_Invalid:
+ case QVideoSurfaceFormat::Format_Invalid:
default:
return D3DFMT_UNKNOWN;
}
diff --git a/src/multimedia/platform/windows/evr/evrhelpers_p.h b/src/multimedia/platform/windows/evr/evrhelpers_p.h
index 89bff6288..34e992624 100644
--- a/src/multimedia/platform/windows/evr/evrhelpers_p.h
+++ b/src/multimedia/platform/windows/evr/evrhelpers_p.h
@@ -103,8 +103,8 @@ inline HRESULT qt_evr_getFrameRate(IMFMediaType *pType, MFRatio *pRatio)
reinterpret_cast<UINT32*>(&pRatio->Denominator));
}
-QVideoFrame::PixelFormat qt_evr_pixelFormatFromD3DFormat(DWORD format);
-D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrame::PixelFormat format);
+QVideoSurfaceFormat::PixelFormat qt_evr_pixelFormatFromD3DFormat(DWORD format);
+D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoSurfaceFormat::PixelFormat format);
QT_END_NAMESPACE
diff --git a/src/multimedia/platform/windows/player/mftvideo.cpp b/src/multimedia/platform/windows/player/mftvideo.cpp
index 8d318a6f6..c97c479d6 100644
--- a/src/multimedia/platform/windows/player/mftvideo.cpp
+++ b/src/multimedia/platform/windows/player/mftvideo.cpp
@@ -603,30 +603,30 @@ HRESULT MFTransform::OnFlush()
return S_OK;
}
-QVideoFrame::PixelFormat MFTransform::formatFromSubtype(const GUID& subtype)
+QVideoSurfaceFormat::PixelFormat MFTransform::formatFromSubtype(const GUID& subtype)
{
if (subtype == MFVideoFormat_ARGB32)
- return QVideoFrame::Format_ARGB32;
+ return QVideoSurfaceFormat::Format_ARGB32;
else if (subtype == MFVideoFormat_RGB32)
- return QVideoFrame::Format_RGB32;
+ return QVideoSurfaceFormat::Format_RGB32;
else if (subtype == MFVideoFormat_RGB24)
- return QVideoFrame::Format_RGB24;
+ return QVideoSurfaceFormat::Format_RGB24;
else if (subtype == MFVideoFormat_RGB565)
- return QVideoFrame::Format_RGB565;
+ return QVideoSurfaceFormat::Format_RGB565;
else if (subtype == MFVideoFormat_RGB555)
- return QVideoFrame::Format_RGB555;
+ return QVideoSurfaceFormat::Format_RGB555;
else if (subtype == MFVideoFormat_AYUV)
- return QVideoFrame::Format_AYUV444;
+ return QVideoSurfaceFormat::Format_AYUV444;
else if (subtype == MFVideoFormat_I420)
- return QVideoFrame::Format_YUV420P;
+ return QVideoSurfaceFormat::Format_YUV420P;
else if (subtype == MFVideoFormat_UYVY)
- return QVideoFrame::Format_UYVY;
+ return QVideoSurfaceFormat::Format_UYVY;
else if (subtype == MFVideoFormat_YV12)
- return QVideoFrame::Format_YV12;
+ return QVideoSurfaceFormat::Format_YV12;
else if (subtype == MFVideoFormat_NV12)
- return QVideoFrame::Format_NV12;
+ return QVideoSurfaceFormat::Format_NV12;
- return QVideoFrame::Format_Invalid;
+ return QVideoSurfaceFormat::Format_Invalid;
}
QVideoSurfaceFormat MFTransform::videoFormatForMFMediaType(IMFMediaType *mediaType, int *bytesPerLine)
@@ -651,7 +651,7 @@ QVideoSurfaceFormat MFTransform::videoFormatForMFMediaType(IMFMediaType *mediaTy
if (FAILED(mediaType->GetGUID(MF_MT_SUBTYPE, &subtype)))
return QVideoSurfaceFormat();
- QVideoFrame::PixelFormat pixelFormat = formatFromSubtype(subtype);
+ QVideoSurfaceFormat::PixelFormat pixelFormat = formatFromSubtype(subtype);
QVideoSurfaceFormat format(size, pixelFormat);
if (SUCCEEDED(MFGetAttributeRatio(mediaType, MF_MT_FRAME_RATE, &num, &den))) {
diff --git a/src/multimedia/platform/windows/player/mftvideo_p.h b/src/multimedia/platform/windows/player/mftvideo_p.h
index 63cba906f..26f12b889 100644
--- a/src/multimedia/platform/windows/player/mftvideo_p.h
+++ b/src/multimedia/platform/windows/player/mftvideo_p.h
@@ -72,7 +72,7 @@ public:
void setVideoSink(IUnknown *videoSink);
- static QVideoFrame::PixelFormat formatFromSubtype(const GUID& subtype);
+ static QVideoSurfaceFormat::PixelFormat formatFromSubtype(const GUID& subtype);
// IUnknown methods
STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
diff --git a/src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp b/src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp
index 2ef77a9e4..5a7ff6d2b 100644
--- a/src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp
+++ b/src/multimedia/platform/windows/player/mfvideorenderercontrol.cpp
@@ -791,8 +791,8 @@ namespace
clearMediaTypes();
if (!m_surface)
return;
- const QList<QVideoFrame::PixelFormat> formats = m_surface->supportedPixelFormats();
- for (QVideoFrame::PixelFormat format : formats) {
+ const QList<QVideoSurfaceFormat::PixelFormat> formats = m_surface->supportedPixelFormats();
+ for (QVideoSurfaceFormat::PixelFormat format : formats) {
IMFMediaType *mediaType;
if (FAILED(MFCreateMediaType(&mediaType))) {
qWarning("Failed to create mf media type!");
@@ -803,36 +803,36 @@ namespace
mediaType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
mediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
switch (format) {
- case QVideoFrame::Format_ARGB32:
- case QVideoFrame::Format_ARGB32_Premultiplied:
+ case QVideoSurfaceFormat::Format_ARGB32:
+ case QVideoSurfaceFormat::Format_ARGB32_Premultiplied:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_ARGB32);
break;
- case QVideoFrame::Format_RGB32:
+ case QVideoSurfaceFormat::Format_RGB32:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32);
break;
- case QVideoFrame::Format_BGR24: // MFVideoFormat_RGB24 has a BGR layout
+ case QVideoSurfaceFormat::Format_BGR24: // MFVideoFormat_RGB24 has a BGR layout
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24);
break;
- case QVideoFrame::Format_RGB565:
+ case QVideoSurfaceFormat::Format_RGB565:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB565);
break;
- case QVideoFrame::Format_RGB555:
+ case QVideoSurfaceFormat::Format_RGB555:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB555);
break;
- case QVideoFrame::Format_AYUV444:
- case QVideoFrame::Format_AYUV444_Premultiplied:
+ case QVideoSurfaceFormat::Format_AYUV444:
+ case QVideoSurfaceFormat::Format_AYUV444_Premultiplied:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_AYUV);
break;
- case QVideoFrame::Format_YUV420P:
+ case QVideoSurfaceFormat::Format_YUV420P:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_I420);
break;
- case QVideoFrame::Format_UYVY:
+ case QVideoSurfaceFormat::Format_UYVY:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_UYVY);
break;
- case QVideoFrame::Format_YV12:
+ case QVideoSurfaceFormat::Format_YV12:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YV12);
break;
- case QVideoFrame::Format_NV12:
+ case QVideoSurfaceFormat::Format_NV12:
mediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12);
break;
default:
@@ -1038,7 +1038,7 @@ namespace
bool m_shutdown;
QList<IMFMediaType*> m_mediaTypes;
- QList<QVideoFrame::PixelFormat> m_pixelFormats;
+ QList<QVideoSurfaceFormat::PixelFormat> m_pixelFormats;
int m_currentFormatIndex;
int m_bytesPerLine;
QVideoSurfaceFormat m_surfaceFormat;
@@ -1077,27 +1077,27 @@ namespace
{
switch (format.pixelFormat()) {
// 32 bpp packed formats.
- case QVideoFrame::Format_RGB32:
- case QVideoFrame::Format_AYUV444:
+ case QVideoSurfaceFormat::Format_RGB32:
+ case QVideoSurfaceFormat::Format_AYUV444:
return format.frameWidth() * 4;
// 24 bpp packed formats.
- case QVideoFrame::Format_RGB24:
- case QVideoFrame::Format_BGR24:
+ case QVideoSurfaceFormat::Format_RGB24:
+ case QVideoSurfaceFormat::Format_BGR24:
return PAD_TO_DWORD(format.frameWidth() * 3);
// 16 bpp packed formats.
- case QVideoFrame::Format_RGB565:
- case QVideoFrame::Format_RGB555:
- case QVideoFrame::Format_YUYV:
- case QVideoFrame::Format_UYVY:
+ case QVideoSurfaceFormat::Format_RGB565:
+ case QVideoSurfaceFormat::Format_RGB555:
+ case QVideoSurfaceFormat::Format_YUYV:
+ case QVideoSurfaceFormat::Format_UYVY:
return PAD_TO_DWORD(format.frameWidth() * 2);
// Planar formats.
- case QVideoFrame::Format_IMC1:
- case QVideoFrame::Format_IMC2:
- case QVideoFrame::Format_IMC3:
- case QVideoFrame::Format_IMC4:
- case QVideoFrame::Format_YV12:
- case QVideoFrame::Format_NV12:
- case QVideoFrame::Format_YUV420P:
+ case QVideoSurfaceFormat::Format_IMC1:
+ case QVideoSurfaceFormat::Format_IMC2:
+ case QVideoSurfaceFormat::Format_IMC3:
+ case QVideoSurfaceFormat::Format_IMC4:
+ case QVideoSurfaceFormat::Format_YV12:
+ case QVideoSurfaceFormat::Format_NV12:
+ case QVideoSurfaceFormat::Format_YUV420P:
return PAD_TO_DWORD(format.frameWidth());
default:
return 0;
diff --git a/src/multimedia/platform/windows/qwindowsdevicemanager.cpp b/src/multimedia/platform/windows/qwindowsdevicemanager.cpp
index 0aa85f34e..6567bb665 100644
--- a/src/multimedia/platform/windows/qwindowsdevicemanager.cpp
+++ b/src/multimedia/platform/windows/qwindowsdevicemanager.cpp
@@ -227,7 +227,7 @@ QList<QCameraInfo> QWindowsDeviceManager::videoInputs() const
if (mediaFormatResult == MF_E_NO_MORE_TYPES)
break;
else if (SUCCEEDED(mediaFormatResult)) {
- QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid;
+ QVideoSurfaceFormat::PixelFormat pixelFormat = QVideoSurfaceFormat::Format_Invalid;
QSize resolution;
float minFr = .0;
float maxFr = .0;
diff --git a/src/multimedia/video/qabstractvideosurface.h b/src/multimedia/video/qabstractvideosurface.h
index f47b7ac69..a437d7565 100644
--- a/src/multimedia/video/qabstractvideosurface.h
+++ b/src/multimedia/video/qabstractvideosurface.h
@@ -67,7 +67,7 @@ public:
explicit QAbstractVideoSurface(QObject *parent = nullptr);
~QAbstractVideoSurface();
- virtual QList<QVideoFrame::PixelFormat> supportedPixelFormats(
+ virtual QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(
QVideoFrame::HandleType type = QVideoFrame::NoHandle) const = 0;
bool isFormatSupported(const QVideoSurfaceFormat &format) const;
diff --git a/src/multimedia/video/qvideoframe.cpp b/src/multimedia/video/qvideoframe.cpp
index 8ca606b4a..f9b15e0df 100644
--- a/src/multimedia/video/qvideoframe.cpp
+++ b/src/multimedia/video/qvideoframe.cpp
@@ -53,7 +53,7 @@
#include <QDebug>
QT_BEGIN_NAMESPACE
-static bool pixelFormatHasAlpha[QVideoFrame::NPixelFormats] =
+static bool pixelFormatHasAlpha[QVideoSurfaceFormat::NPixelFormats] =
{
false, //Format_Invalid,
true, //Format_ARGB32,
@@ -159,7 +159,7 @@ private:
*/
/*!
- \enum QVideoFrame::PixelFormat
+ \enum QVideoSurfaceFormat::PixelFormat
Enumerates video data types.
@@ -341,7 +341,8 @@ QVideoFrame::QVideoFrame(int bytes, int bytesPerLine, const QVideoSurfaceFormat
\sa pixelFormatFromImageFormat()
*/
QVideoFrame::QVideoFrame(const QImage &image)
- : d(new QVideoFramePrivate(QVideoSurfaceFormat(image.size(), pixelFormatFromImageFormat(image.format()))))
+ : d(new QVideoFramePrivate(QVideoSurfaceFormat(image.size(),
+ QVideoSurfaceFormat::pixelFormatFromImageFormat(image.format()))))
{
d->buffer = new QImageVideoBuffer(image);
}
@@ -416,7 +417,7 @@ bool QVideoFrame::isValid() const
/*!
Returns the pixel format of this video frame.
*/
-QVideoFrame::PixelFormat QVideoFrame::pixelFormat() const
+QVideoSurfaceFormat::PixelFormat QVideoFrame::pixelFormat() const
{
return d->format.pixelFormat();
}
@@ -588,35 +589,35 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
auto pixelFmt = d->format.pixelFormat();
// If the plane count is 1 derive the additional planes for planar formats.
switch (pixelFmt) {
- case Format_Invalid:
- case Format_ARGB32:
- case Format_ARGB32_Premultiplied:
- case Format_RGB32:
- case Format_RGB24:
- case Format_RGB565:
- case Format_RGB555:
- case Format_ARGB8565_Premultiplied:
- case Format_BGRA32:
- case Format_BGRA32_Premultiplied:
- case Format_ABGR32:
- case Format_BGR32:
- case Format_BGR24:
- case Format_BGR565:
- case Format_BGR555:
- case Format_BGRA5658_Premultiplied:
- case Format_AYUV444:
- case Format_AYUV444_Premultiplied:
- case Format_YUV444:
- case Format_UYVY:
- case Format_YUYV:
- case Format_Y8:
- case Format_Y16:
- case Format_Jpeg:
+ case QVideoSurfaceFormat::Format_Invalid:
+ case QVideoSurfaceFormat::Format_ARGB32:
+ case QVideoSurfaceFormat::Format_ARGB32_Premultiplied:
+ case QVideoSurfaceFormat::Format_RGB32:
+ case QVideoSurfaceFormat::Format_RGB24:
+ case QVideoSurfaceFormat::Format_RGB565:
+ case QVideoSurfaceFormat::Format_RGB555:
+ case QVideoSurfaceFormat::Format_ARGB8565_Premultiplied:
+ case QVideoSurfaceFormat::Format_BGRA32:
+ case QVideoSurfaceFormat::Format_BGRA32_Premultiplied:
+ case QVideoSurfaceFormat::Format_ABGR32:
+ case QVideoSurfaceFormat::Format_BGR32:
+ case QVideoSurfaceFormat::Format_BGR24:
+ case QVideoSurfaceFormat::Format_BGR565:
+ case QVideoSurfaceFormat::Format_BGR555:
+ case QVideoSurfaceFormat::Format_BGRA5658_Premultiplied:
+ case QVideoSurfaceFormat::Format_AYUV444:
+ case QVideoSurfaceFormat::Format_AYUV444_Premultiplied:
+ case QVideoSurfaceFormat::Format_YUV444:
+ case QVideoSurfaceFormat::Format_UYVY:
+ case QVideoSurfaceFormat::Format_YUYV:
+ case QVideoSurfaceFormat::Format_Y8:
+ case QVideoSurfaceFormat::Format_Y16:
+ case QVideoSurfaceFormat::Format_Jpeg:
// Single plane or opaque format.
break;
- case Format_YUV420P:
- case Format_YUV422P:
- case Format_YV12: {
+ case QVideoSurfaceFormat::Format_YUV420P:
+ case QVideoSurfaceFormat::Format_YUV422P:
+ case QVideoSurfaceFormat::Format_YV12: {
// The UV stride is usually half the Y stride and is 32-bit aligned.
// However it's not always the case, at least on Windows where the
// UV planes are sometimes not aligned.
@@ -624,7 +625,7 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
// have a correct stride.
const int height = this->height();
const int yStride = d->mapData.bytesPerLine[0];
- const int uvHeight = pixelFmt == Format_YUV422P ? height : height / 2;
+ const int uvHeight = pixelFmt == QVideoSurfaceFormat::Format_YUV422P ? height : height / 2;
const int uvStride = (d->mapData.nBytes - (yStride * height)) / uvHeight / 2;
// Three planes, the second and third vertically (and horizontally for other than Format_YUV422P formats) subsampled.
@@ -634,18 +635,18 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
d->mapData.data[2] = d->mapData.data[1] + (uvStride * uvHeight);
break;
}
- case Format_NV12:
- case Format_NV21:
- case Format_IMC2:
- case Format_IMC4: {
+ case QVideoSurfaceFormat::Format_NV12:
+ case QVideoSurfaceFormat::Format_NV21:
+ case QVideoSurfaceFormat::Format_IMC2:
+ case QVideoSurfaceFormat::Format_IMC4: {
// Semi planar, Full resolution Y plane with interleaved subsampled U and V planes.
d->mapData.nPlanes = 2;
d->mapData.bytesPerLine[1] = d->mapData.bytesPerLine[0];
d->mapData.data[1] = d->mapData.data[0] + (d->mapData.bytesPerLine[0] * height());
break;
}
- case Format_IMC1:
- case Format_IMC3: {
+ case QVideoSurfaceFormat::Format_IMC1:
+ case QVideoSurfaceFormat::Format_IMC3: {
// Three planes, the second and third vertically and horizontally subsumpled,
// but with lines padded to the width of the first plane.
d->mapData.nPlanes = 3;
@@ -697,7 +698,7 @@ void QVideoFrame::unmap()
Returns the number of bytes in a scan line.
\note For planar formats this is the bytes per line of the first plane only. The bytes per line of subsequent
- planes should be calculated as per the frame \l{QVideoFrame::PixelFormat}{pixel format}.
+ planes should be calculated as per the frame \l{QVideoSurfaceFormat::PixelFormat}{pixel format}.
This value is only valid while the frame data is \l {map()}{mapped}.
@@ -868,103 +869,6 @@ void QVideoFrame::setEndTime(qint64 time)
d->endTime = time;
}
-/*!
- Returns a video pixel format equivalent to an image \a format. If there is no equivalent
- format QVideoFrame::InvalidType is returned instead.
-
- \note In general \l QImage does not handle YUV formats.
-
-*/
-QVideoFrame::PixelFormat QVideoFrame::pixelFormatFromImageFormat(QImage::Format format)
-{
- switch (format) {
- case QImage::Format_RGB32:
- case QImage::Format_RGBX8888:
- return Format_RGB32;
- case QImage::Format_ARGB32:
- case QImage::Format_RGBA8888:
- return Format_ARGB32;
- case QImage::Format_ARGB32_Premultiplied:
- case QImage::Format_RGBA8888_Premultiplied:
- return Format_ARGB32_Premultiplied;
- case QImage::Format_RGB16:
- return Format_RGB565;
- case QImage::Format_ARGB8565_Premultiplied:
- return Format_ARGB8565_Premultiplied;
- case QImage::Format_RGB555:
- return Format_RGB555;
- case QImage::Format_RGB888:
- return Format_RGB24;
- case QImage::Format_Grayscale8:
- return Format_Y8;
- case QImage::Format_Grayscale16:
- return Format_Y16;
- default:
- return Format_Invalid;
- }
-}
-
-/*!
- Returns an image format equivalent to a video frame pixel \a format. If there is no equivalent
- format QImage::Format_Invalid is returned instead.
-
- \note In general \l QImage does not handle YUV formats.
-
-*/
-QImage::Format QVideoFrame::imageFormatFromPixelFormat(PixelFormat format)
-{
- switch (format) {
- case Format_ARGB32:
- return QImage::Format_ARGB32;
- case Format_ARGB32_Premultiplied:
- return QImage::Format_ARGB32_Premultiplied;
- case Format_RGB32:
- return QImage::Format_RGB32;
- case Format_RGB24:
- return QImage::Format_RGB888;
- case Format_RGB565:
- return QImage::Format_RGB16;
- case Format_RGB555:
- return QImage::Format_RGB555;
- case Format_ARGB8565_Premultiplied:
- return QImage::Format_ARGB8565_Premultiplied;
- case Format_Y8:
- return QImage::Format_Grayscale8;
- case Format_Y16:
- return QImage::Format_Grayscale16;
- case Format_ABGR32:
- case Format_BGRA32:
- case Format_BGRA32_Premultiplied:
- case Format_BGR32:
- case Format_BGR24:
- case Format_BGR565:
- case Format_BGR555:
- case Format_BGRA5658_Premultiplied:
- case Format_AYUV444:
- case Format_AYUV444_Premultiplied:
- case Format_YUV444:
- case Format_YUV420P:
- case Format_YUV422P:
- case Format_YV12:
- case Format_UYVY:
- case Format_YUYV:
- case Format_NV12:
- case Format_NV21:
- case Format_IMC1:
- case Format_IMC2:
- case Format_IMC3:
- case Format_IMC4:
- case Format_P010LE:
- case Format_P010BE:
- case Format_P016LE:
- case Format_P016BE:
- case Format_Jpeg:
- case Format_Invalid:
- return QImage::Format_Invalid;
- }
- return QImage::Format_Invalid;
-}
-
/*!
Based on the pixel format converts current video frame to image.
@@ -979,13 +883,13 @@ QImage QVideoFrame::image() const
return result;
// Formats supported by QImage don't need conversion
- QImage::Format imageFormat = QVideoFrame::imageFormatFromPixelFormat(frame.pixelFormat());
+ QImage::Format imageFormat = QVideoSurfaceFormat::imageFormatFromPixelFormat(frame.pixelFormat());
if (imageFormat != QImage::Format_Invalid) {
result = QImage(frame.bits(), frame.width(), frame.height(), frame.bytesPerLine(), imageFormat).copy();
}
// Load from JPG
- else if (frame.pixelFormat() == QVideoFrame::Format_Jpeg) {
+ else if (frame.pixelFormat() == QVideoSurfaceFormat::Format_Jpeg) {
result.loadFromData(frame.bits(), frame.mappedBytes(), "JPG");
}
@@ -1007,91 +911,6 @@ QImage QVideoFrame::image() const
}
#ifndef QT_NO_DEBUG_STREAM
-QDebug operator<<(QDebug dbg, QVideoFrame::PixelFormat pf)
-{
- QDebugStateSaver saver(dbg);
- dbg.nospace();
- switch (pf) {
- case QVideoFrame::Format_Invalid:
- return dbg << "Format_Invalid";
- case QVideoFrame::Format_ARGB32:
- return dbg << "Format_ARGB32";
- case QVideoFrame::Format_ARGB32_Premultiplied:
- return dbg << "Format_ARGB32_Premultiplied";
- case QVideoFrame::Format_RGB32:
- return dbg << "Format_RGB32";
- case QVideoFrame::Format_RGB24:
- return dbg << "Format_RGB24";
- case QVideoFrame::Format_RGB565:
- return dbg << "Format_RGB565";
- case QVideoFrame::Format_RGB555:
- return dbg << "Format_RGB555";
- case QVideoFrame::Format_ARGB8565_Premultiplied:
- return dbg << "Format_ARGB8565_Premultiplied";
- case QVideoFrame::Format_BGRA32:
- return dbg << "Format_BGRA32";
- case QVideoFrame::Format_BGRA32_Premultiplied:
- return dbg << "Format_BGRA32_Premultiplied";
- case QVideoFrame::Format_ABGR32:
- return dbg << "Format_ABGR32";
- case QVideoFrame::Format_BGR32:
- return dbg << "Format_BGR32";
- case QVideoFrame::Format_BGR24:
- return dbg << "Format_BGR24";
- case QVideoFrame::Format_BGR565:
- return dbg << "Format_BGR565";
- case QVideoFrame::Format_BGR555:
- return dbg << "Format_BGR555";
- case QVideoFrame::Format_BGRA5658_Premultiplied:
- return dbg << "Format_BGRA5658_Premultiplied";
- case QVideoFrame::Format_AYUV444:
- return dbg << "Format_AYUV444";
- case QVideoFrame::Format_AYUV444_Premultiplied:
- return dbg << "Format_AYUV444_Premultiplied";
- case QVideoFrame::Format_YUV444:
- return dbg << "Format_YUV444";
- case QVideoFrame::Format_YUV420P:
- return dbg << "Format_YUV420P";
- case QVideoFrame::Format_YUV422P:
- return dbg << "Format_YUV422P";
- case QVideoFrame::Format_YV12:
- return dbg << "Format_YV12";
- case QVideoFrame::Format_UYVY:
- return dbg << "Format_UYVY";
- case QVideoFrame::Format_YUYV:
- return dbg << "Format_YUYV";
- case QVideoFrame::Format_NV12:
- return dbg << "Format_NV12";
- case QVideoFrame::Format_NV21:
- return dbg << "Format_NV21";
- case QVideoFrame::Format_IMC1:
- return dbg << "Format_IMC1";
- case QVideoFrame::Format_IMC2:
- return dbg << "Format_IMC2";
- case QVideoFrame::Format_IMC3:
- return dbg << "Format_IMC3";
- case QVideoFrame::Format_IMC4:
- return dbg << "Format_IMC4";
- case QVideoFrame::Format_Y8:
- return dbg << "Format_Y8";
- case QVideoFrame::Format_Y16:
- return dbg << "Format_Y16";
- case QVideoFrame::Format_P010LE:
- return dbg << "Format_P010LE";
- case QVideoFrame::Format_P010BE:
- return dbg << "Format_P010BE";
- case QVideoFrame::Format_P016LE:
- return dbg << "Format_P016LE";
- case QVideoFrame::Format_P016BE:
- return dbg << "Format_P016BE";
- case QVideoFrame::Format_Jpeg:
- return dbg << "Format_Jpeg";
-
- default:
- return dbg << QString(QLatin1String("UserType(%1)" )).arg(int(pf)).toLatin1().constData();
- }
-}
-
static QString qFormatTimeStamps(qint64 start, qint64 end)
{
// Early out for invalid.
diff --git a/src/multimedia/video/qvideoframe.h b/src/multimedia/video/qvideoframe.h
index bd69095da..028286426 100644
--- a/src/multimedia/video/qvideoframe.h
+++ b/src/multimedia/video/qvideoframe.h
@@ -41,6 +41,7 @@
#define QVIDEOFRAME_H
#include <QtMultimedia/qtmultimediaglobal.h>
+#include <QtMultimedia/qvideosurfaceformat.h>
#include <QtCore/qmetatype.h>
#include <QtCore/qshareddata.h>
@@ -52,57 +53,10 @@ QT_BEGIN_NAMESPACE
class QSize;
class QVideoFramePrivate;
class QAbstractVideoBuffer;
-class QVideoSurfaceFormat;
class Q_MULTIMEDIA_EXPORT QVideoFrame
{
public:
- enum PixelFormat
- {
- Format_Invalid,
- Format_ARGB32,
- Format_ARGB32_Premultiplied,
- Format_RGB32,
- Format_RGB24,
- Format_RGB565,
- Format_RGB555,
- Format_ARGB8565_Premultiplied,
- Format_BGRA32,
- Format_BGRA32_Premultiplied,
- Format_ABGR32,
- Format_BGR32,
- Format_BGR24,
- Format_BGR565,
- Format_BGR555,
- Format_BGRA5658_Premultiplied,
-
- Format_AYUV444,
- Format_AYUV444_Premultiplied,
- Format_YUV444,
- Format_YUV420P,
- Format_YUV422P,
- Format_YV12,
- Format_UYVY,
- Format_YUYV,
- Format_NV12,
- Format_NV21,
- Format_IMC1,
- Format_IMC2,
- Format_IMC3,
- Format_IMC4,
- Format_Y8,
- Format_Y16,
-
- Format_P010LE,
- Format_P010BE,
- Format_P016LE,
- Format_P016BE,
-
- Format_Jpeg,
- };
-#ifndef Q_QDOC
- static constexpr int NPixelFormats = Format_Jpeg + 1;
-#endif
enum HandleType
{
@@ -133,7 +87,7 @@ public:
QAbstractVideoBuffer *buffer() const;
bool isValid() const;
- PixelFormat pixelFormat() const;
+ QVideoSurfaceFormat::PixelFormat pixelFormat() const;
QVideoSurfaceFormat surfaceFormat() const;
QVideoFrame::HandleType handleType() const;
@@ -171,9 +125,6 @@ public:
QImage image() const;
- static PixelFormat pixelFormatFromImageFormat(QImage::Format format);
- static QImage::Format imageFormatFromPixelFormat(PixelFormat format);
-
private:
QExplicitlySharedDataPointer<QVideoFramePrivate> d;
};
@@ -183,7 +134,6 @@ Q_DECLARE_METATYPE(QVideoFrame);
#ifndef QT_NO_DEBUG_STREAM
Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, const QVideoFrame&);
Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoFrame::HandleType);
-Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoFrame::PixelFormat);
#endif
QT_END_NAMESPACE
diff --git a/src/multimedia/video/qvideoframeconversionhelper.cpp b/src/multimedia/video/qvideoframeconversionhelper.cpp
index b2d724703..7612c88c3 100644
--- a/src/multimedia/video/qvideoframeconversionhelper.cpp
+++ b/src/multimedia/video/qvideoframeconversionhelper.cpp
@@ -624,7 +624,7 @@ static void QT_FASTCALL qt_convert_P016BE_to_ARGB32(const QVideoFrame &frame, uc
}
-static VideoFrameConvertFunc qConvertFuncs[QVideoFrame::NPixelFormats] = {
+static VideoFrameConvertFunc qConvertFuncs[QVideoSurfaceFormat::NPixelFormats] = {
/* Format_Invalid */ nullptr, // Not needed
/* Format_ARGB32 */ nullptr, // Not needed
/* Format_ARGB32_Premultiplied */ nullptr, // Not needed
@@ -669,30 +669,30 @@ static void qInitConvertFuncsAsm()
#ifdef QT_COMPILER_SUPPORTS_SSE2
extern void QT_FASTCALL qt_convert_BGRA32_to_ARGB32_sse2(const QVideoFrame&, uchar*);
if (qCpuHasFeature(SSE2)){
- qConvertFuncs[QVideoFrame::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_sse2;
- qConvertFuncs[QVideoFrame::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_sse2;
- qConvertFuncs[QVideoFrame::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_sse2;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_sse2;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_sse2;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_sse2;
}
#endif
#ifdef QT_COMPILER_SUPPORTS_SSSE3
extern void QT_FASTCALL qt_convert_BGRA32_to_ARGB32_ssse3(const QVideoFrame&, uchar*);
if (qCpuHasFeature(SSSE3)){
- qConvertFuncs[QVideoFrame::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_ssse3;
- qConvertFuncs[QVideoFrame::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_ssse3;
- qConvertFuncs[QVideoFrame::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_ssse3;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_ssse3;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_ssse3;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_ssse3;
}
#endif
#ifdef QT_COMPILER_SUPPORTS_AVX2
extern void QT_FASTCALL qt_convert_BGRA32_to_ARGB32_avx2(const QVideoFrame&, uchar*);
if (qCpuHasFeature(AVX2)){
- qConvertFuncs[QVideoFrame::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_avx2;
- qConvertFuncs[QVideoFrame::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_avx2;
- qConvertFuncs[QVideoFrame::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_avx2;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_avx2;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_avx2;
+ qConvertFuncs[QVideoSurfaceFormat::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_avx2;
}
#endif
}
-VideoFrameConvertFunc qConverterForFormat(QVideoFrame::PixelFormat format)
+VideoFrameConvertFunc qConverterForFormat(QVideoSurfaceFormat::PixelFormat format)
{
static bool initAsmFuncsDone = false;
if (!initAsmFuncsDone) {
diff --git a/src/multimedia/video/qvideoframeconversionhelper_p.h b/src/multimedia/video/qvideoframeconversionhelper_p.h
index 0fdcbf8a3..58edd48f2 100644
--- a/src/multimedia/video/qvideoframeconversionhelper_p.h
+++ b/src/multimedia/video/qvideoframeconversionhelper_p.h
@@ -57,7 +57,7 @@
// Converts to RGB32 or ARGB32_Premultiplied
typedef void (QT_FASTCALL *VideoFrameConvertFunc)(const QVideoFrame &frame, uchar *output);
-VideoFrameConvertFunc qConverterForFormat(QVideoFrame::PixelFormat format);
+VideoFrameConvertFunc qConverterForFormat(QVideoSurfaceFormat::PixelFormat format);
inline quint32 qConvertBGRA32ToARGB32(quint32 bgra)
{
diff --git a/src/multimedia/video/qvideosink.cpp b/src/multimedia/video/qvideosink.cpp
index 8b6b088d2..fa9177e21 100644
--- a/src/multimedia/video/qvideosink.cpp
+++ b/src/multimedia/video/qvideosink.cpp
@@ -233,7 +233,7 @@ void QVideoSink::paint(QPainter *painter, const QVideoFrame &f)
return;
}
- auto imageFormat = QVideoFrame::imageFormatFromPixelFormat(frame.pixelFormat());
+ auto imageFormat = QVideoSurfaceFormat::imageFormatFromPixelFormat(frame.pixelFormat());
// Do not render into ARGB32 images using QPainter.
// Using QImage::Format_ARGB32_Premultiplied is significantly faster.
if (imageFormat == QImage::Format_ARGB32)
diff --git a/src/multimedia/video/qvideosurfaceformat.cpp b/src/multimedia/video/qvideosurfaceformat.cpp
index 986734a65..3ba143079 100644
--- a/src/multimedia/video/qvideosurfaceformat.cpp
+++ b/src/multimedia/video/qvideosurfaceformat.cpp
@@ -54,7 +54,7 @@ public:
QVideoSurfaceFormatPrivate(
const QSize &size,
- QVideoFrame::PixelFormat format)
+ QVideoSurfaceFormat::PixelFormat format)
: pixelFormat(format)
, frameSize(size)
, viewport(QPoint(0, 0), size)
@@ -80,7 +80,7 @@ public:
return qAbs(r1 - r2) <= 0.00001 * qMin(qAbs(r1), qAbs(r2));
}
- QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid;
+ QVideoSurfaceFormat::PixelFormat pixelFormat = QVideoSurfaceFormat::Format_Invalid;
QVideoSurfaceFormat::Direction scanLineDirection = QVideoSurfaceFormat::TopToBottom;
QSize frameSize;
QVideoSurfaceFormat::YCbCrColorSpace ycbcrColorSpace = QVideoSurfaceFormat::YCbCr_Undefined;
@@ -169,7 +169,7 @@ QVideoSurfaceFormat::QVideoSurfaceFormat()
\a size and pixel \a format.
*/
QVideoSurfaceFormat::QVideoSurfaceFormat(
- const QSize& size, QVideoFrame::PixelFormat format)
+ const QSize& size, QVideoSurfaceFormat::PixelFormat format)
: d(new QVideoSurfaceFormatPrivate(size, format))
{
}
@@ -196,7 +196,7 @@ QVideoSurfaceFormat::~QVideoSurfaceFormat() = default;
*/
bool QVideoSurfaceFormat::isValid() const
{
- return d->pixelFormat != QVideoFrame::Format_Invalid && d->frameSize.isValid();
+ return d->pixelFormat != Format_Invalid && d->frameSize.isValid();
}
/*!
@@ -218,7 +218,7 @@ bool QVideoSurfaceFormat::operator !=(const QVideoSurfaceFormat &other) const
/*!
Returns the pixel format of frames in a video stream.
*/
-QVideoFrame::PixelFormat QVideoSurfaceFormat::pixelFormat() const
+QVideoSurfaceFormat::PixelFormat QVideoSurfaceFormat::pixelFormat() const
{
return d->pixelFormat;
}
@@ -383,6 +383,104 @@ QSize QVideoSurfaceFormat::sizeHint() const
return d->viewport.size();
}
+
+/*!
+ Returns a video pixel format equivalent to an image \a format. If there is no equivalent
+ format QVideoFrame::InvalidType is returned instead.
+
+ \note In general \l QImage does not handle YUV formats.
+
+*/
+QVideoSurfaceFormat::PixelFormat QVideoSurfaceFormat::pixelFormatFromImageFormat(QImage::Format format)
+{
+ switch (format) {
+ case QImage::Format_RGB32:
+ case QImage::Format_RGBX8888:
+ return QVideoSurfaceFormat::Format_RGB32;
+ case QImage::Format_ARGB32:
+ case QImage::Format_RGBA8888:
+ return QVideoSurfaceFormat::Format_ARGB32;
+ case QImage::Format_ARGB32_Premultiplied:
+ case QImage::Format_RGBA8888_Premultiplied:
+ return QVideoSurfaceFormat::Format_ARGB32_Premultiplied;
+ case QImage::Format_RGB16:
+ return QVideoSurfaceFormat::Format_RGB565;
+ case QImage::Format_ARGB8565_Premultiplied:
+ return QVideoSurfaceFormat::Format_ARGB8565_Premultiplied;
+ case QImage::Format_RGB555:
+ return QVideoSurfaceFormat::Format_RGB555;
+ case QImage::Format_RGB888:
+ return QVideoSurfaceFormat::Format_RGB24;
+ case QImage::Format_Grayscale8:
+ return QVideoSurfaceFormat::Format_Y8;
+ case QImage::Format_Grayscale16:
+ return QVideoSurfaceFormat::Format_Y16;
+ default:
+ return QVideoSurfaceFormat::Format_Invalid;
+ }
+}
+
+/*!
+ Returns an image format equivalent to a video frame pixel \a format. If there is no equivalent
+ format QImage::Format_Invalid is returned instead.
+
+ \note In general \l QImage does not handle YUV formats.
+
+*/
+QImage::Format QVideoSurfaceFormat::imageFormatFromPixelFormat(QVideoSurfaceFormat::PixelFormat format)
+{
+ switch (format) {
+ case QVideoSurfaceFormat::Format_ARGB32:
+ return QImage::Format_ARGB32;
+ case QVideoSurfaceFormat::Format_ARGB32_Premultiplied:
+ return QImage::Format_ARGB32_Premultiplied;
+ case QVideoSurfaceFormat::Format_RGB32:
+ return QImage::Format_RGB32;
+ case QVideoSurfaceFormat::Format_RGB24:
+ return QImage::Format_RGB888;
+ case QVideoSurfaceFormat::Format_RGB565:
+ return QImage::Format_RGB16;
+ case QVideoSurfaceFormat::Format_RGB555:
+ return QImage::Format_RGB555;
+ case QVideoSurfaceFormat::Format_ARGB8565_Premultiplied:
+ return QImage::Format_ARGB8565_Premultiplied;
+ case QVideoSurfaceFormat::Format_Y8:
+ return QImage::Format_Grayscale8;
+ case QVideoSurfaceFormat::Format_Y16:
+ return QImage::Format_Grayscale16;
+ case QVideoSurfaceFormat::Format_ABGR32:
+ case QVideoSurfaceFormat::Format_BGRA32:
+ case QVideoSurfaceFormat::Format_BGRA32_Premultiplied:
+ case QVideoSurfaceFormat::Format_BGR32:
+ case QVideoSurfaceFormat::Format_BGR24:
+ case QVideoSurfaceFormat::Format_BGR565:
+ case QVideoSurfaceFormat::Format_BGR555:
+ case QVideoSurfaceFormat::Format_BGRA5658_Premultiplied:
+ case QVideoSurfaceFormat::Format_AYUV444:
+ case QVideoSurfaceFormat::Format_AYUV444_Premultiplied:
+ case QVideoSurfaceFormat::Format_YUV444:
+ case QVideoSurfaceFormat::Format_YUV420P:
+ case QVideoSurfaceFormat::Format_YUV422P:
+ case QVideoSurfaceFormat::Format_YV12:
+ case QVideoSurfaceFormat::Format_UYVY:
+ case QVideoSurfaceFormat::Format_YUYV:
+ case QVideoSurfaceFormat::Format_NV12:
+ case QVideoSurfaceFormat::Format_NV21:
+ case QVideoSurfaceFormat::Format_IMC1:
+ case QVideoSurfaceFormat::Format_IMC2:
+ case QVideoSurfaceFormat::Format_IMC3:
+ case QVideoSurfaceFormat::Format_IMC4:
+ case QVideoSurfaceFormat::Format_P010LE:
+ case QVideoSurfaceFormat::Format_P010BE:
+ case QVideoSurfaceFormat::Format_P016LE:
+ case QVideoSurfaceFormat::Format_P016BE:
+ case QVideoSurfaceFormat::Format_Jpeg:
+ case QVideoSurfaceFormat::Format_Invalid:
+ return QImage::Format_Invalid;
+ }
+ return QImage::Format_Invalid;
+}
+
#ifndef QT_NO_DEBUG_STREAM
QDebug operator<<(QDebug dbg, QVideoSurfaceFormat::YCbCrColorSpace cs)
{
@@ -443,6 +541,91 @@ QDebug operator<<(QDebug dbg, const QVideoSurfaceFormat &f)
return dbg;
}
+
+QDebug operator<<(QDebug dbg, QVideoSurfaceFormat::PixelFormat pf)
+{
+ QDebugStateSaver saver(dbg);
+ dbg.nospace();
+ switch (pf) {
+ case QVideoSurfaceFormat::Format_Invalid:
+ return dbg << "Format_Invalid";
+ case QVideoSurfaceFormat::Format_ARGB32:
+ return dbg << "Format_ARGB32";
+ case QVideoSurfaceFormat::Format_ARGB32_Premultiplied:
+ return dbg << "Format_ARGB32_Premultiplied";
+ case QVideoSurfaceFormat::Format_RGB32:
+ return dbg << "Format_RGB32";
+ case QVideoSurfaceFormat::Format_RGB24:
+ return dbg << "Format_RGB24";
+ case QVideoSurfaceFormat::Format_RGB565:
+ return dbg << "Format_RGB565";
+ case QVideoSurfaceFormat::Format_RGB555:
+ return dbg << "Format_RGB555";
+ case QVideoSurfaceFormat::Format_ARGB8565_Premultiplied:
+ return dbg << "Format_ARGB8565_Premultiplied";
+ case QVideoSurfaceFormat::Format_BGRA32:
+ return dbg << "Format_BGRA32";
+ case QVideoSurfaceFormat::Format_BGRA32_Premultiplied:
+ return dbg << "Format_BGRA32_Premultiplied";
+ case QVideoSurfaceFormat::Format_ABGR32:
+ return dbg << "Format_ABGR32";
+ case QVideoSurfaceFormat::Format_BGR32:
+ return dbg << "Format_BGR32";
+ case QVideoSurfaceFormat::Format_BGR24:
+ return dbg << "Format_BGR24";
+ case QVideoSurfaceFormat::Format_BGR565:
+ return dbg << "Format_BGR565";
+ case QVideoSurfaceFormat::Format_BGR555:
+ return dbg << "Format_BGR555";
+ case QVideoSurfaceFormat::Format_BGRA5658_Premultiplied:
+ return dbg << "Format_BGRA5658_Premultiplied";
+ case QVideoSurfaceFormat::Format_AYUV444:
+ return dbg << "Format_AYUV444";
+ case QVideoSurfaceFormat::Format_AYUV444_Premultiplied:
+ return dbg << "Format_AYUV444_Premultiplied";
+ case QVideoSurfaceFormat::Format_YUV444:
+ return dbg << "Format_YUV444";
+ case QVideoSurfaceFormat::Format_YUV420P:
+ return dbg << "Format_YUV420P";
+ case QVideoSurfaceFormat::Format_YUV422P:
+ return dbg << "Format_YUV422P";
+ case QVideoSurfaceFormat::Format_YV12:
+ return dbg << "Format_YV12";
+ case QVideoSurfaceFormat::Format_UYVY:
+ return dbg << "Format_UYVY";
+ case QVideoSurfaceFormat::Format_YUYV:
+ return dbg << "Format_YUYV";
+ case QVideoSurfaceFormat::Format_NV12:
+ return dbg << "Format_NV12";
+ case QVideoSurfaceFormat::Format_NV21:
+ return dbg << "Format_NV21";
+ case QVideoSurfaceFormat::Format_IMC1:
+ return dbg << "Format_IMC1";
+ case QVideoSurfaceFormat::Format_IMC2:
+ return dbg << "Format_IMC2";
+ case QVideoSurfaceFormat::Format_IMC3:
+ return dbg << "Format_IMC3";
+ case QVideoSurfaceFormat::Format_IMC4:
+ return dbg << "Format_IMC4";
+ case QVideoSurfaceFormat::Format_Y8:
+ return dbg << "Format_Y8";
+ case QVideoSurfaceFormat::Format_Y16:
+ return dbg << "Format_Y16";
+ case QVideoSurfaceFormat::Format_P010LE:
+ return dbg << "Format_P010LE";
+ case QVideoSurfaceFormat::Format_P010BE:
+ return dbg << "Format_P010BE";
+ case QVideoSurfaceFormat::Format_P016LE:
+ return dbg << "Format_P016LE";
+ case QVideoSurfaceFormat::Format_P016BE:
+ return dbg << "Format_P016BE";
+ case QVideoSurfaceFormat::Format_Jpeg:
+ return dbg << "Format_Jpeg";
+
+ default:
+ return dbg << QString(QLatin1String("UserType(%1)" )).arg(int(pf)).toLatin1().constData();
+ }
+}
#endif
QT_END_NAMESPACE
diff --git a/src/multimedia/video/qvideosurfaceformat.h b/src/multimedia/video/qvideosurfaceformat.h
index 694dd9a79..5b9c9fcb8 100644
--- a/src/multimedia/video/qvideosurfaceformat.h
+++ b/src/multimedia/video/qvideosurfaceformat.h
@@ -40,12 +40,13 @@
#ifndef QVIDEOSURFACEFORMAT_H
#define QVIDEOSURFACEFORMAT_H
+#include <QtMultimedia/qtmultimediaglobal.h>
+
#include <QtCore/qlist.h>
#include <QtCore/qpair.h>
#include <QtCore/qshareddata.h>
#include <QtCore/qsize.h>
#include <QtGui/qimage.h>
-#include <QtMultimedia/qvideoframe.h>
QT_BEGIN_NAMESPACE
@@ -57,6 +58,53 @@ class QVideoSurfaceFormatPrivate;
class Q_MULTIMEDIA_EXPORT QVideoSurfaceFormat
{
public:
+ enum PixelFormat
+ {
+ Format_Invalid,
+ Format_ARGB32,
+ Format_ARGB32_Premultiplied,
+ Format_RGB32,
+ Format_RGB24,
+ Format_RGB565,
+ Format_RGB555,
+ Format_ARGB8565_Premultiplied,
+ Format_BGRA32,
+ Format_BGRA32_Premultiplied,
+ Format_ABGR32,
+ Format_BGR32,
+ Format_BGR24,
+ Format_BGR565,
+ Format_BGR555,
+ Format_BGRA5658_Premultiplied,
+
+ Format_AYUV444,
+ Format_AYUV444_Premultiplied,
+ Format_YUV444,
+ Format_YUV420P,
+ Format_YUV422P,
+ Format_YV12,
+ Format_UYVY,
+ Format_YUYV,
+ Format_NV12,
+ Format_NV21,
+ Format_IMC1,
+ Format_IMC2,
+ Format_IMC3,
+ Format_IMC4,
+ Format_Y8,
+ Format_Y16,
+
+ Format_P010LE,
+ Format_P010BE,
+ Format_P016LE,
+ Format_P016BE,
+
+ Format_Jpeg,
+ };
+#ifndef Q_QDOC
+ static constexpr int NPixelFormats = Format_Jpeg + 1;
+#endif
+
enum Direction
{
TopToBottom,
@@ -76,7 +124,7 @@ public:
QVideoSurfaceFormat();
QVideoSurfaceFormat(
const QSize &size,
- QVideoFrame::PixelFormat pixelFormat);
+ QVideoSurfaceFormat::PixelFormat pixelFormat);
QVideoSurfaceFormat(const QVideoSurfaceFormat &format);
~QVideoSurfaceFormat();
@@ -87,7 +135,7 @@ public:
bool isValid() const;
- QVideoFrame::PixelFormat pixelFormat() const;
+ QVideoSurfaceFormat::PixelFormat pixelFormat() const;
QSize frameSize() const;
void setFrameSize(const QSize &size);
@@ -113,6 +161,9 @@ public:
QSize sizeHint() const;
+ static PixelFormat pixelFormatFromImageFormat(QImage::Format format);
+ static QImage::Format imageFormatFromPixelFormat(PixelFormat format);
+
private:
QSharedDataPointer<QVideoSurfaceFormatPrivate> d;
};
@@ -121,6 +172,7 @@ private:
Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, const QVideoSurfaceFormat &);
Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoSurfaceFormat::Direction);
Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoSurfaceFormat::YCbCrColorSpace);
+Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoSurfaceFormat::PixelFormat);
#endif
QT_END_NAMESPACE
diff --git a/src/multimedia/video/qvideosurfaces.cpp b/src/multimedia/video/qvideosurfaces.cpp
index 1870e6ebf..b3e07fb26 100644
--- a/src/multimedia/video/qvideosurfaces.cpp
+++ b/src/multimedia/video/qvideosurfaces.cpp
@@ -57,10 +57,10 @@ QVideoSurfaces::QVideoSurfaces(const QList<QAbstractVideoSurface *> &s, QObject
QVideoSurfaces::~QVideoSurfaces() = default;
-QList<QVideoFrame::PixelFormat> QVideoSurfaces::supportedPixelFormats(QVideoFrame::HandleType type) const
+QList<QVideoSurfaceFormat::PixelFormat> QVideoSurfaces::supportedPixelFormats(QVideoFrame::HandleType type) const
{
- QList<QVideoFrame::PixelFormat> result;
- QMap<QVideoFrame::PixelFormat, int> formats;
+ QList<QVideoSurfaceFormat::PixelFormat> result;
+ QMap<QVideoSurfaceFormat::PixelFormat, int> formats;
for (auto &s : m_surfaces) {
for (auto &p : s->supportedPixelFormats(type)) {
if (++formats[p] == m_surfaces.size())
diff --git a/src/multimedia/video/qvideosurfaces_p.h b/src/multimedia/video/qvideosurfaces_p.h
index f72bd0565..4d9a8a9df 100644
--- a/src/multimedia/video/qvideosurfaces_p.h
+++ b/src/multimedia/video/qvideosurfaces_p.h
@@ -62,7 +62,7 @@ public:
QVideoSurfaces(const QList<QAbstractVideoSurface *> &surfaces, QObject *parent = nullptr);
~QVideoSurfaces();
- QList<QVideoFrame::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType type) const override;
+ QList<QVideoSurfaceFormat::PixelFormat> supportedPixelFormats(QVideoFrame::HandleType type) const override;
bool start(const QVideoSurfaceFormat &format) override;
void stop() override;
bool present(const QVideoFrame &frame) override;