summaryrefslogtreecommitdiffstats
path: root/src/multimedia/video
diff options
context:
space:
mode:
authorLars Knoll <lars.knoll@qt.io>2021-04-12 09:36:05 +0200
committerLars Knoll <lars.knoll@qt.io>2021-04-14 14:01:10 +0000
commit2656b64825f702e25b564d42e27b40d060c65bb0 (patch)
tree520ac81b60cd27e191cb593ed6ea831c8e98da33 /src/multimedia/video
parent50309974fd82f52877f96ee3cfffafa24600d397 (diff)
Rename QVideoSurfaceFormat to QVideoFrameFormat
The class is used exclusively together with video frames to describe their format, so the name should reflect that. Change-Id: I10bec7a0556b22c69ac790a99282e1376ce4af97 Reviewed-by: Doris Verria <doris.verria@qt.io> Reviewed-by: Lars Knoll <lars.knoll@qt.io>
Diffstat (limited to 'src/multimedia/video')
-rw-r--r--src/multimedia/video/qvideoframe.cpp82
-rw-r--r--src/multimedia/video/qvideoframe.h10
-rw-r--r--src/multimedia/video/qvideoframeconversionhelper.cpp22
-rw-r--r--src/multimedia/video/qvideoframeconversionhelper_p.h2
-rw-r--r--src/multimedia/video/qvideoframeformat.cpp (renamed from src/multimedia/video/qvideosurfaceformat.cpp)234
-rw-r--r--src/multimedia/video/qvideoframeformat.h (renamed from src/multimedia/video/qvideosurfaceformat.h)32
-rw-r--r--src/multimedia/video/qvideosink.cpp10
-rw-r--r--src/multimedia/video/qvideosink.h2
-rw-r--r--src/multimedia/video/qvideotexturehelper.cpp126
-rw-r--r--src/multimedia/video/qvideotexturehelper_p.h10
-rw-r--r--src/multimedia/video/video.pri4
11 files changed, 267 insertions, 267 deletions
diff --git a/src/multimedia/video/qvideoframe.cpp b/src/multimedia/video/qvideoframe.cpp
index 28516c60a..9995b9708 100644
--- a/src/multimedia/video/qvideoframe.cpp
+++ b/src/multimedia/video/qvideoframe.cpp
@@ -42,7 +42,7 @@
#include "qimagevideobuffer_p.h"
#include "qmemoryvideobuffer_p.h"
#include "qvideoframeconversionhelper_p.h"
-#include "qvideosurfaceformat.h"
+#include "qvideoframeformat.h"
#include <qimage.h>
#include <qmutex.h>
@@ -54,7 +54,7 @@
#include <QDebug>
QT_BEGIN_NAMESPACE
-static bool pixelFormatHasAlpha[QVideoSurfaceFormat::NPixelFormats] =
+static bool pixelFormatHasAlpha[QVideoFrameFormat::NPixelFormats] =
{
false, //Format_Invalid,
true, //Format_ARGB32,
@@ -93,7 +93,7 @@ class QVideoFramePrivate : public QSharedData
{
public:
QVideoFramePrivate() = default;
- QVideoFramePrivate(const QVideoSurfaceFormat &format)
+ QVideoFramePrivate(const QVideoFrameFormat &format)
: format(format)
{
}
@@ -106,7 +106,7 @@ public:
qint64 startTime = -1;
qint64 endTime = -1;
QAbstractVideoBuffer::MapData mapData;
- QVideoSurfaceFormat format;
+ QVideoFrameFormat format;
QAbstractVideoBuffer *buffer = nullptr;
int mappedCount = 0;
QMutex mapMutex;
@@ -149,7 +149,7 @@ private:
*/
/*!
- \enum QVideoSurfaceFormat::PixelFormat
+ \enum QVideoFrameFormat::PixelFormat
Enumerates video data types.
@@ -267,7 +267,7 @@ QVideoFrame::QVideoFrame()
\note This doesn't increment the reference count of the video buffer.
*/
-QVideoFrame::QVideoFrame(QAbstractVideoBuffer *buffer, const QVideoSurfaceFormat &format)
+QVideoFrame::QVideoFrame(QAbstractVideoBuffer *buffer, const QVideoFrameFormat &format)
: d(new QVideoFramePrivate(format))
{
d->buffer = buffer;
@@ -279,7 +279,7 @@ QVideoFrame::QVideoFrame(QAbstractVideoBuffer *buffer, const QVideoSurfaceFormat
The \a bytesPerLine (stride) is the length of each scan line in bytes, and \a bytes is the total
number of bytes that must be allocated for the frame.
*/
-QVideoFrame::QVideoFrame(int bytes, int bytesPerLine, const QVideoSurfaceFormat &format)
+QVideoFrame::QVideoFrame(int bytes, int bytesPerLine, const QVideoFrameFormat &format)
: d(new QVideoFramePrivate(format))
{
if (bytes > 0) {
@@ -301,8 +301,8 @@ QVideoFrame::QVideoFrame(int bytes, int bytesPerLine, const QVideoSurfaceFormat
\sa pixelFormatFromImageFormat()
*/
QVideoFrame::QVideoFrame(const QImage &image)
- : d(new QVideoFramePrivate(QVideoSurfaceFormat(image.size(),
- QVideoSurfaceFormat::pixelFormatFromImageFormat(image.format()))))
+ : d(new QVideoFramePrivate(QVideoFrameFormat(image.size(),
+ QVideoFrameFormat::pixelFormatFromImageFormat(image.format()))))
{
d->buffer = new QImageVideoBuffer(image);
}
@@ -372,7 +372,7 @@ bool QVideoFrame::isValid() const
/*!
Returns the pixel format of this video frame.
*/
-QVideoSurfaceFormat::PixelFormat QVideoFrame::pixelFormat() const
+QVideoFrameFormat::PixelFormat QVideoFrame::pixelFormat() const
{
return d->format.pixelFormat();
}
@@ -380,7 +380,7 @@ QVideoSurfaceFormat::PixelFormat QVideoFrame::pixelFormat() const
/*!
Returns the surface format of this video frame.
*/
-QVideoSurfaceFormat QVideoFrame::surfaceFormat() const
+QVideoFrameFormat QVideoFrame::surfaceFormat() const
{
return d->format;
}
@@ -544,26 +544,26 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
auto pixelFmt = d->format.pixelFormat();
// If the plane count is 1 derive the additional planes for planar formats.
switch (pixelFmt) {
- case QVideoSurfaceFormat::Format_Invalid:
- case QVideoSurfaceFormat::Format_ARGB32:
- case QVideoSurfaceFormat::Format_ARGB32_Premultiplied:
- case QVideoSurfaceFormat::Format_RGB32:
- case QVideoSurfaceFormat::Format_BGRA32:
- case QVideoSurfaceFormat::Format_BGRA32_Premultiplied:
- case QVideoSurfaceFormat::Format_ABGR32:
- case QVideoSurfaceFormat::Format_BGR32:
- case QVideoSurfaceFormat::Format_AYUV444:
- case QVideoSurfaceFormat::Format_AYUV444_Premultiplied:
- case QVideoSurfaceFormat::Format_UYVY:
- case QVideoSurfaceFormat::Format_YUYV:
- case QVideoSurfaceFormat::Format_Y8:
- case QVideoSurfaceFormat::Format_Y16:
- case QVideoSurfaceFormat::Format_Jpeg:
+ case QVideoFrameFormat::Format_Invalid:
+ case QVideoFrameFormat::Format_ARGB32:
+ case QVideoFrameFormat::Format_ARGB32_Premultiplied:
+ case QVideoFrameFormat::Format_RGB32:
+ case QVideoFrameFormat::Format_BGRA32:
+ case QVideoFrameFormat::Format_BGRA32_Premultiplied:
+ case QVideoFrameFormat::Format_ABGR32:
+ case QVideoFrameFormat::Format_BGR32:
+ case QVideoFrameFormat::Format_AYUV444:
+ case QVideoFrameFormat::Format_AYUV444_Premultiplied:
+ case QVideoFrameFormat::Format_UYVY:
+ case QVideoFrameFormat::Format_YUYV:
+ case QVideoFrameFormat::Format_Y8:
+ case QVideoFrameFormat::Format_Y16:
+ case QVideoFrameFormat::Format_Jpeg:
// Single plane or opaque format.
break;
- case QVideoSurfaceFormat::Format_YUV420P:
- case QVideoSurfaceFormat::Format_YUV422P:
- case QVideoSurfaceFormat::Format_YV12: {
+ case QVideoFrameFormat::Format_YUV420P:
+ case QVideoFrameFormat::Format_YUV422P:
+ case QVideoFrameFormat::Format_YV12: {
// The UV stride is usually half the Y stride and is 32-bit aligned.
// However it's not always the case, at least on Windows where the
// UV planes are sometimes not aligned.
@@ -571,7 +571,7 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
// have a correct stride.
const int height = this->height();
const int yStride = d->mapData.bytesPerLine[0];
- const int uvHeight = pixelFmt == QVideoSurfaceFormat::Format_YUV422P ? height : height / 2;
+ const int uvHeight = pixelFmt == QVideoFrameFormat::Format_YUV422P ? height : height / 2;
const int uvStride = (d->mapData.nBytes - (yStride * height)) / uvHeight / 2;
// Three planes, the second and third vertically (and horizontally for other than Format_YUV422P formats) subsampled.
@@ -581,20 +581,20 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
d->mapData.data[2] = d->mapData.data[1] + (uvStride * uvHeight);
break;
}
- case QVideoSurfaceFormat::Format_NV12:
- case QVideoSurfaceFormat::Format_NV21:
- case QVideoSurfaceFormat::Format_IMC2:
- case QVideoSurfaceFormat::Format_IMC4:
- case QVideoSurfaceFormat::Format_P010:
- case QVideoSurfaceFormat::Format_P016: {
+ case QVideoFrameFormat::Format_NV12:
+ case QVideoFrameFormat::Format_NV21:
+ case QVideoFrameFormat::Format_IMC2:
+ case QVideoFrameFormat::Format_IMC4:
+ case QVideoFrameFormat::Format_P010:
+ case QVideoFrameFormat::Format_P016: {
// Semi planar, Full resolution Y plane with interleaved subsampled U and V planes.
d->mapData.nPlanes = 2;
d->mapData.bytesPerLine[1] = d->mapData.bytesPerLine[0];
d->mapData.data[1] = d->mapData.data[0] + (d->mapData.bytesPerLine[0] * height());
break;
}
- case QVideoSurfaceFormat::Format_IMC1:
- case QVideoSurfaceFormat::Format_IMC3: {
+ case QVideoFrameFormat::Format_IMC1:
+ case QVideoFrameFormat::Format_IMC3: {
// Three planes, the second and third vertically and horizontally subsumpled,
// but with lines padded to the width of the first plane.
d->mapData.nPlanes = 3;
@@ -644,7 +644,7 @@ void QVideoFrame::unmap()
Returns the number of bytes in a scan line.
\note For planar formats this is the bytes per line of the first plane only. The bytes per line of subsequent
- planes should be calculated as per the frame \l{QVideoSurfaceFormat::PixelFormat}{pixel format}.
+ planes should be calculated as per the frame \l{QVideoFrameFormat::PixelFormat}{pixel format}.
This value is only valid while the frame data is \l {map()}{mapped}.
@@ -824,13 +824,13 @@ QImage QVideoFrame::toImage() const
return result;
// Formats supported by QImage don't need conversion
- QImage::Format imageFormat = QVideoSurfaceFormat::imageFormatFromPixelFormat(frame.pixelFormat());
+ QImage::Format imageFormat = QVideoFrameFormat::imageFormatFromPixelFormat(frame.pixelFormat());
if (imageFormat != QImage::Format_Invalid) {
result = QImage(frame.bits(), frame.width(), frame.height(), frame.bytesPerLine(), imageFormat).copy();
}
// Load from JPG
- else if (frame.pixelFormat() == QVideoSurfaceFormat::Format_Jpeg) {
+ else if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg) {
result.loadFromData(frame.bits(), frame.mappedBytes(), "JPG");
}
diff --git a/src/multimedia/video/qvideoframe.h b/src/multimedia/video/qvideoframe.h
index d2c371c3f..2d5e2d57e 100644
--- a/src/multimedia/video/qvideoframe.h
+++ b/src/multimedia/video/qvideoframe.h
@@ -41,7 +41,7 @@
#define QVIDEOFRAME_H
#include <QtMultimedia/qtmultimediaglobal.h>
-#include <QtMultimedia/qvideosurfaceformat.h>
+#include <QtMultimedia/qvideoframeformat.h>
#include <QtCore/qmetatype.h>
#include <QtCore/qshareddata.h>
@@ -76,8 +76,8 @@ public:
};
QVideoFrame();
- QVideoFrame(QAbstractVideoBuffer *buffer, const QVideoSurfaceFormat &format);
- QVideoFrame(int bytes, int bytesPerLine, const QVideoSurfaceFormat &format);
+ QVideoFrame(QAbstractVideoBuffer *buffer, const QVideoFrameFormat &format);
+ QVideoFrame(int bytes, int bytesPerLine, const QVideoFrameFormat &format);
QVideoFrame(const QImage &image);
QVideoFrame(const QVideoFrame &other);
~QVideoFrame();
@@ -88,9 +88,9 @@ public:
bool isValid() const;
- QVideoSurfaceFormat::PixelFormat pixelFormat() const;
+ QVideoFrameFormat::PixelFormat pixelFormat() const;
- QVideoSurfaceFormat surfaceFormat() const;
+ QVideoFrameFormat surfaceFormat() const;
QVideoFrame::HandleType handleType() const;
QSize size() const;
diff --git a/src/multimedia/video/qvideoframeconversionhelper.cpp b/src/multimedia/video/qvideoframeconversionhelper.cpp
index 346880a29..68cfbccec 100644
--- a/src/multimedia/video/qvideoframeconversionhelper.cpp
+++ b/src/multimedia/video/qvideoframeconversionhelper.cpp
@@ -471,7 +471,7 @@ static void QT_FASTCALL qt_convert_P016_to_ARGB32(const QVideoFrame &frame, ucha
}
-static VideoFrameConvertFunc qConvertFuncs[QVideoSurfaceFormat::NPixelFormats] = {
+static VideoFrameConvertFunc qConvertFuncs[QVideoFrameFormat::NPixelFormats] = {
/* Format_Invalid */ nullptr, // Not needed
/* Format_ARGB32 */ nullptr, // Not needed
/* Format_ARGB32_Premultiplied */ nullptr, // Not needed
@@ -505,30 +505,30 @@ static void qInitConvertFuncsAsm()
#ifdef QT_COMPILER_SUPPORTS_SSE2
extern void QT_FASTCALL qt_convert_BGRA32_to_ARGB32_sse2(const QVideoFrame&, uchar*);
if (qCpuHasFeature(SSE2)){
- qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_sse2;
- qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_sse2;
- qConvertFuncs[QVideoSurfaceFormat::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_sse2;
+ qConvertFuncs[QVideoFrameFormat::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_sse2;
+ qConvertFuncs[QVideoFrameFormat::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_sse2;
+ qConvertFuncs[QVideoFrameFormat::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_sse2;
}
#endif
#ifdef QT_COMPILER_SUPPORTS_SSSE3
extern void QT_FASTCALL qt_convert_BGRA32_to_ARGB32_ssse3(const QVideoFrame&, uchar*);
if (qCpuHasFeature(SSSE3)){
- qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_ssse3;
- qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_ssse3;
- qConvertFuncs[QVideoSurfaceFormat::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_ssse3;
+ qConvertFuncs[QVideoFrameFormat::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_ssse3;
+ qConvertFuncs[QVideoFrameFormat::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_ssse3;
+ qConvertFuncs[QVideoFrameFormat::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_ssse3;
}
#endif
#ifdef QT_COMPILER_SUPPORTS_AVX2
extern void QT_FASTCALL qt_convert_BGRA32_to_ARGB32_avx2(const QVideoFrame&, uchar*);
if (qCpuHasFeature(AVX2)){
- qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_avx2;
- qConvertFuncs[QVideoSurfaceFormat::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_avx2;
- qConvertFuncs[QVideoSurfaceFormat::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_avx2;
+ qConvertFuncs[QVideoFrameFormat::Format_BGRA32] = qt_convert_BGRA32_to_ARGB32_avx2;
+ qConvertFuncs[QVideoFrameFormat::Format_BGRA32_Premultiplied] = qt_convert_BGRA32_to_ARGB32_avx2;
+ qConvertFuncs[QVideoFrameFormat::Format_BGR32] = qt_convert_BGRA32_to_ARGB32_avx2;
}
#endif
}
-VideoFrameConvertFunc qConverterForFormat(QVideoSurfaceFormat::PixelFormat format)
+VideoFrameConvertFunc qConverterForFormat(QVideoFrameFormat::PixelFormat format)
{
static bool initAsmFuncsDone = false;
if (!initAsmFuncsDone) {
diff --git a/src/multimedia/video/qvideoframeconversionhelper_p.h b/src/multimedia/video/qvideoframeconversionhelper_p.h
index 5ec919d9f..06e0d9a6d 100644
--- a/src/multimedia/video/qvideoframeconversionhelper_p.h
+++ b/src/multimedia/video/qvideoframeconversionhelper_p.h
@@ -57,7 +57,7 @@
// Converts to RGB32 or ARGB32_Premultiplied
typedef void (QT_FASTCALL *VideoFrameConvertFunc)(const QVideoFrame &frame, uchar *output);
-VideoFrameConvertFunc qConverterForFormat(QVideoSurfaceFormat::PixelFormat format);
+VideoFrameConvertFunc qConverterForFormat(QVideoFrameFormat::PixelFormat format);
inline quint32 qConvertBGRA32ToARGB32(quint32 bgra)
{
diff --git a/src/multimedia/video/qvideosurfaceformat.cpp b/src/multimedia/video/qvideoframeformat.cpp
index 6177ba0a6..02a679a09 100644
--- a/src/multimedia/video/qvideosurfaceformat.cpp
+++ b/src/multimedia/video/qvideoframeformat.cpp
@@ -37,7 +37,7 @@
**
****************************************************************************/
-#include "qvideosurfaceformat.h"
+#include "qvideoframeformat.h"
#include "qvideotexturehelper_p.h"
#include <qdebug.h>
@@ -53,21 +53,21 @@ static void initResource() {
Q_INIT_RESOURCE(shaders);
}
-class QVideoSurfaceFormatPrivate : public QSharedData
+class QVideoFrameFormatPrivate : public QSharedData
{
public:
- QVideoSurfaceFormatPrivate() = default;
+ QVideoFrameFormatPrivate() = default;
- QVideoSurfaceFormatPrivate(
+ QVideoFrameFormatPrivate(
const QSize &size,
- QVideoSurfaceFormat::PixelFormat format)
+ QVideoFrameFormat::PixelFormat format)
: pixelFormat(format)
, frameSize(size)
, viewport(QPoint(0, 0), size)
{
}
- bool operator ==(const QVideoSurfaceFormatPrivate &other) const
+ bool operator ==(const QVideoFrameFormatPrivate &other) const
{
if (pixelFormat == other.pixelFormat
&& scanLineDirection == other.scanLineDirection
@@ -86,18 +86,18 @@ public:
return qAbs(r1 - r2) <= 0.00001 * qMin(qAbs(r1), qAbs(r2));
}
- QVideoSurfaceFormat::PixelFormat pixelFormat = QVideoSurfaceFormat::Format_Invalid;
- QVideoSurfaceFormat::Direction scanLineDirection = QVideoSurfaceFormat::TopToBottom;
+ QVideoFrameFormat::PixelFormat pixelFormat = QVideoFrameFormat::Format_Invalid;
+ QVideoFrameFormat::Direction scanLineDirection = QVideoFrameFormat::TopToBottom;
QSize frameSize;
- QVideoSurfaceFormat::YCbCrColorSpace ycbcrColorSpace = QVideoSurfaceFormat::YCbCr_Undefined;
+ QVideoFrameFormat::YCbCrColorSpace ycbcrColorSpace = QVideoFrameFormat::YCbCr_Undefined;
QRect viewport;
qreal frameRate = 0.0;
bool mirrored = false;
};
/*!
- \class QVideoSurfaceFormat
- \brief The QVideoSurfaceFormat class specifies the stream format of a video presentation
+ \class QVideoFrameFormat
+ \brief The QVideoFrameFormat class specifies the stream format of a video presentation
surface.
\inmodule QtMultimedia
@@ -125,7 +125,7 @@ public:
*/
/*!
- \enum QVideoSurfaceFormat::Direction
+ \enum QVideoFrameFormat::Direction
Enumerates the layout direction of video scan lines.
@@ -134,7 +134,7 @@ public:
*/
/*!
- \enum QVideoSurfaceFormat::YCbCrColorSpace
+ \enum QVideoFrameFormat::YCbCrColorSpace
Enumerates the Y'CbCr color space of video frames.
@@ -165,8 +165,8 @@ public:
/*!
Constructs a null video stream format.
*/
-QVideoSurfaceFormat::QVideoSurfaceFormat()
- : d(new QVideoSurfaceFormatPrivate)
+QVideoFrameFormat::QVideoFrameFormat()
+ : d(new QVideoFrameFormatPrivate)
{
initResource();
}
@@ -175,33 +175,33 @@ QVideoSurfaceFormat::QVideoSurfaceFormat()
Contructs a description of stream which receives stream of \a type buffers with given frame
\a size and pixel \a format.
*/
-QVideoSurfaceFormat::QVideoSurfaceFormat(
- const QSize& size, QVideoSurfaceFormat::PixelFormat format)
- : d(new QVideoSurfaceFormatPrivate(size, format))
+QVideoFrameFormat::QVideoFrameFormat(
+ const QSize& size, QVideoFrameFormat::PixelFormat format)
+ : d(new QVideoFrameFormatPrivate(size, format))
{
}
/*!
Constructs a copy of \a other.
*/
-QVideoSurfaceFormat::QVideoSurfaceFormat(const QVideoSurfaceFormat &other) = default;
+QVideoFrameFormat::QVideoFrameFormat(const QVideoFrameFormat &other) = default;
/*!
Assigns the values of \a other to this object.
*/
-QVideoSurfaceFormat &QVideoSurfaceFormat::operator =(const QVideoSurfaceFormat &other) = default;
+QVideoFrameFormat &QVideoFrameFormat::operator =(const QVideoFrameFormat &other) = default;
/*!
Destroys a video stream description.
*/
-QVideoSurfaceFormat::~QVideoSurfaceFormat() = default;
+QVideoFrameFormat::~QVideoFrameFormat() = default;
/*!
Identifies if a video surface format has a valid pixel format and frame size.
Returns true if the format is valid, and false otherwise.
*/
-bool QVideoSurfaceFormat::isValid() const
+bool QVideoFrameFormat::isValid() const
{
return d->pixelFormat != Format_Invalid && d->frameSize.isValid();
}
@@ -209,7 +209,7 @@ bool QVideoSurfaceFormat::isValid() const
/*!
Returns true if \a other is the same as this video format, and false if they are different.
*/
-bool QVideoSurfaceFormat::operator ==(const QVideoSurfaceFormat &other) const
+bool QVideoFrameFormat::operator ==(const QVideoFrameFormat &other) const
{
return d == other.d || *d == *other.d;
}
@@ -217,7 +217,7 @@ bool QVideoSurfaceFormat::operator ==(const QVideoSurfaceFormat &other) const
/*!
Returns true if \a other is different to this video format, and false if they are the same.
*/
-bool QVideoSurfaceFormat::operator !=(const QVideoSurfaceFormat &other) const
+bool QVideoFrameFormat::operator !=(const QVideoFrameFormat &other) const
{
return d != other.d && !(*d == *other.d);
}
@@ -225,7 +225,7 @@ bool QVideoSurfaceFormat::operator !=(const QVideoSurfaceFormat &other) const
/*!
Returns the pixel format of frames in a video stream.
*/
-QVideoSurfaceFormat::PixelFormat QVideoSurfaceFormat::pixelFormat() const
+QVideoFrameFormat::PixelFormat QVideoFrameFormat::pixelFormat() const
{
return d->pixelFormat;
}
@@ -235,7 +235,7 @@ QVideoSurfaceFormat::PixelFormat QVideoSurfaceFormat::pixelFormat() const
\sa frameWidth(), frameHeight()
*/
-QSize QVideoSurfaceFormat::frameSize() const
+QSize QVideoFrameFormat::frameSize() const
{
return d->frameSize;
}
@@ -245,7 +245,7 @@ QSize QVideoSurfaceFormat::frameSize() const
\sa frameSize(), frameHeight()
*/
-int QVideoSurfaceFormat::frameWidth() const
+int QVideoFrameFormat::frameWidth() const
{
return d->frameSize.width();
}
@@ -253,12 +253,12 @@ int QVideoSurfaceFormat::frameWidth() const
/*!
Returns the height of frame in a video stream.
*/
-int QVideoSurfaceFormat::frameHeight() const
+int QVideoFrameFormat::frameHeight() const
{
return d->frameSize.height();
}
-int QVideoSurfaceFormat::nPlanes() const
+int QVideoFrameFormat::nPlanes() const
{
return QVideoTextureHelper::textureDescription(d->pixelFormat)->nplanes;
}
@@ -268,7 +268,7 @@ int QVideoSurfaceFormat::nPlanes() const
This will reset the viewport() to fill the entire frame.
*/
-void QVideoSurfaceFormat::setFrameSize(const QSize &size)
+void QVideoFrameFormat::setFrameSize(const QSize &size)
{
d->frameSize = size;
d->viewport = QRect(QPoint(0, 0), size);
@@ -281,7 +281,7 @@ void QVideoSurfaceFormat::setFrameSize(const QSize &size)
This will reset the viewport() to fill the entire frame.
*/
-void QVideoSurfaceFormat::setFrameSize(int width, int height)
+void QVideoFrameFormat::setFrameSize(int width, int height)
{
d->frameSize = QSize(width, height);
d->viewport = QRect(0, 0, width, height);
@@ -294,7 +294,7 @@ void QVideoSurfaceFormat::setFrameSize(int width, int height)
By default the viewport covers an entire frame.
*/
-QRect QVideoSurfaceFormat::viewport() const
+QRect QVideoFrameFormat::viewport() const
{
return d->viewport;
}
@@ -302,7 +302,7 @@ QRect QVideoSurfaceFormat::viewport() const
/*!
Sets the viewport of a video stream to \a viewport.
*/
-void QVideoSurfaceFormat::setViewport(const QRect &viewport)
+void QVideoFrameFormat::setViewport(const QRect &viewport)
{
d->viewport = viewport;
}
@@ -310,7 +310,7 @@ void QVideoSurfaceFormat::setViewport(const QRect &viewport)
/*!
Returns the direction of scan lines.
*/
-QVideoSurfaceFormat::Direction QVideoSurfaceFormat::scanLineDirection() const
+QVideoFrameFormat::Direction QVideoFrameFormat::scanLineDirection() const
{
return d->scanLineDirection;
}
@@ -318,7 +318,7 @@ QVideoSurfaceFormat::Direction QVideoSurfaceFormat::scanLineDirection() const
/*!
Sets the \a direction of scan lines.
*/
-void QVideoSurfaceFormat::setScanLineDirection(Direction direction)
+void QVideoFrameFormat::setScanLineDirection(Direction direction)
{
d->scanLineDirection = direction;
}
@@ -326,7 +326,7 @@ void QVideoSurfaceFormat::setScanLineDirection(Direction direction)
/*!
Returns the frame rate of a video stream in frames per second.
*/
-qreal QVideoSurfaceFormat::frameRate() const
+qreal QVideoFrameFormat::frameRate() const
{
return d->frameRate;
}
@@ -334,7 +334,7 @@ qreal QVideoSurfaceFormat::frameRate() const
/*!
Sets the frame \a rate of a video stream in frames per second.
*/
-void QVideoSurfaceFormat::setFrameRate(qreal rate)
+void QVideoFrameFormat::setFrameRate(qreal rate)
{
d->frameRate = rate;
}
@@ -342,7 +342,7 @@ void QVideoSurfaceFormat::setFrameRate(qreal rate)
/*!
Returns the Y'CbCr color space of a video stream.
*/
-QVideoSurfaceFormat::YCbCrColorSpace QVideoSurfaceFormat::yCbCrColorSpace() const
+QVideoFrameFormat::YCbCrColorSpace QVideoFrameFormat::yCbCrColorSpace() const
{
return d->ycbcrColorSpace;
}
@@ -351,7 +351,7 @@ QVideoSurfaceFormat::YCbCrColorSpace QVideoSurfaceFormat::yCbCrColorSpace() cons
Sets the Y'CbCr color \a space of a video stream.
It is only used with raw YUV frame types.
*/
-void QVideoSurfaceFormat::setYCbCrColorSpace(QVideoSurfaceFormat::YCbCrColorSpace space)
+void QVideoFrameFormat::setYCbCrColorSpace(QVideoFrameFormat::YCbCrColorSpace space)
{
d->ycbcrColorSpace = space;
}
@@ -365,7 +365,7 @@ void QVideoSurfaceFormat::setYCbCrColorSpace(QVideoSurfaceFormat::YCbCrColorSpac
\since 5.11
*/
-bool QVideoSurfaceFormat::isMirrored() const
+bool QVideoFrameFormat::isMirrored() const
{
return d->mirrored;
}
@@ -380,7 +380,7 @@ bool QVideoSurfaceFormat::isMirrored() const
\since 5.11
*/
-void QVideoSurfaceFormat::setMirrored(bool mirrored)
+void QVideoFrameFormat::setMirrored(bool mirrored)
{
d->mirrored = mirrored;
}
@@ -390,22 +390,22 @@ void QVideoSurfaceFormat::setMirrored(bool mirrored)
This is the same as the size of the viewport.
*/
-QSize QVideoSurfaceFormat::sizeHint() const
+QSize QVideoFrameFormat::sizeHint() const
{
return d->viewport.size();
}
-QString QVideoSurfaceFormat::vertexShaderFileName() const
+QString QVideoFrameFormat::vertexShaderFileName() const
{
return QVideoTextureHelper::vertexShaderFileName(d->pixelFormat);
}
-QString QVideoSurfaceFormat::fragmentShaderFileName() const
+QString QVideoFrameFormat::fragmentShaderFileName() const
{
return QVideoTextureHelper::fragmentShaderFileName(d->pixelFormat);
}
-QByteArray QVideoSurfaceFormat::uniformData(const QMatrix4x4 &transform, float opacity) const
+QByteArray QVideoFrameFormat::uniformData(const QMatrix4x4 &transform, float opacity) const
{
return QVideoTextureHelper::uniformData(*this, transform, opacity);
}
@@ -418,24 +418,24 @@ QByteArray QVideoSurfaceFormat::uniformData(const QMatrix4x4 &transform, float o
\note In general \l QImage does not handle YUV formats.
*/
-QVideoSurfaceFormat::PixelFormat QVideoSurfaceFormat::pixelFormatFromImageFormat(QImage::Format format)
+QVideoFrameFormat::PixelFormat QVideoFrameFormat::pixelFormatFromImageFormat(QImage::Format format)
{
switch (format) {
case QImage::Format_RGB32:
case QImage::Format_RGBX8888:
- return QVideoSurfaceFormat::Format_RGB32;
+ return QVideoFrameFormat::Format_RGB32;
case QImage::Format_ARGB32:
case QImage::Format_RGBA8888:
- return QVideoSurfaceFormat::Format_ARGB32;
+ return QVideoFrameFormat::Format_ARGB32;
case QImage::Format_ARGB32_Premultiplied:
case QImage::Format_RGBA8888_Premultiplied:
- return QVideoSurfaceFormat::Format_ARGB32_Premultiplied;
+ return QVideoFrameFormat::Format_ARGB32_Premultiplied;
case QImage::Format_Grayscale8:
- return QVideoSurfaceFormat::Format_Y8;
+ return QVideoFrameFormat::Format_Y8;
case QImage::Format_Grayscale16:
- return QVideoSurfaceFormat::Format_Y16;
+ return QVideoFrameFormat::Format_Y16;
default:
- return QVideoSurfaceFormat::Format_Invalid;
+ return QVideoFrameFormat::Format_Invalid;
}
}
@@ -446,64 +446,64 @@ QVideoSurfaceFormat::PixelFormat QVideoSurfaceFormat::pixelFormatFromImageFormat
\note In general \l QImage does not handle YUV formats.
*/
-QImage::Format QVideoSurfaceFormat::imageFormatFromPixelFormat(QVideoSurfaceFormat::PixelFormat format)
+QImage::Format QVideoFrameFormat::imageFormatFromPixelFormat(QVideoFrameFormat::PixelFormat format)
{
switch (format) {
- case QVideoSurfaceFormat::Format_ARGB32:
+ case QVideoFrameFormat::Format_ARGB32:
return QImage::Format_ARGB32;
- case QVideoSurfaceFormat::Format_ARGB32_Premultiplied:
+ case QVideoFrameFormat::Format_ARGB32_Premultiplied:
return QImage::Format_ARGB32_Premultiplied;
- case QVideoSurfaceFormat::Format_RGB32:
+ case QVideoFrameFormat::Format_RGB32:
return QImage::Format_RGB32;
- case QVideoSurfaceFormat::Format_Y8:
+ case QVideoFrameFormat::Format_Y8:
return QImage::Format_Grayscale8;
- case QVideoSurfaceFormat::Format_Y16:
+ case QVideoFrameFormat::Format_Y16:
return QImage::Format_Grayscale16;
- case QVideoSurfaceFormat::Format_ABGR32:
- case QVideoSurfaceFormat::Format_BGRA32:
- case QVideoSurfaceFormat::Format_BGRA32_Premultiplied:
- case QVideoSurfaceFormat::Format_BGR32:
- case QVideoSurfaceFormat::Format_AYUV444:
- case QVideoSurfaceFormat::Format_AYUV444_Premultiplied:
- case QVideoSurfaceFormat::Format_YUV420P:
- case QVideoSurfaceFormat::Format_YUV422P:
- case QVideoSurfaceFormat::Format_YV12:
- case QVideoSurfaceFormat::Format_UYVY:
- case QVideoSurfaceFormat::Format_YUYV:
- case QVideoSurfaceFormat::Format_NV12:
- case QVideoSurfaceFormat::Format_NV21:
- case QVideoSurfaceFormat::Format_IMC1:
- case QVideoSurfaceFormat::Format_IMC2:
- case QVideoSurfaceFormat::Format_IMC3:
- case QVideoSurfaceFormat::Format_IMC4:
- case QVideoSurfaceFormat::Format_P010:
- case QVideoSurfaceFormat::Format_P016:
- case QVideoSurfaceFormat::Format_Jpeg:
- case QVideoSurfaceFormat::Format_Invalid:
+ case QVideoFrameFormat::Format_ABGR32:
+ case QVideoFrameFormat::Format_BGRA32:
+ case QVideoFrameFormat::Format_BGRA32_Premultiplied:
+ case QVideoFrameFormat::Format_BGR32:
+ case QVideoFrameFormat::Format_AYUV444:
+ case QVideoFrameFormat::Format_AYUV444_Premultiplied:
+ case QVideoFrameFormat::Format_YUV420P:
+ case QVideoFrameFormat::Format_YUV422P:
+ case QVideoFrameFormat::Format_YV12:
+ case QVideoFrameFormat::Format_UYVY:
+ case QVideoFrameFormat::Format_YUYV:
+ case QVideoFrameFormat::Format_NV12:
+ case QVideoFrameFormat::Format_NV21:
+ case QVideoFrameFormat::Format_IMC1:
+ case QVideoFrameFormat::Format_IMC2:
+ case QVideoFrameFormat::Format_IMC3:
+ case QVideoFrameFormat::Format_IMC4:
+ case QVideoFrameFormat::Format_P010:
+ case QVideoFrameFormat::Format_P016:
+ case QVideoFrameFormat::Format_Jpeg:
+ case QVideoFrameFormat::Format_Invalid:
return QImage::Format_Invalid;
}
return QImage::Format_Invalid;
}
#ifndef QT_NO_DEBUG_STREAM
-QDebug operator<<(QDebug dbg, QVideoSurfaceFormat::YCbCrColorSpace cs)
+QDebug operator<<(QDebug dbg, QVideoFrameFormat::YCbCrColorSpace cs)
{
QDebugStateSaver saver(dbg);
dbg.nospace();
switch (cs) {
- case QVideoSurfaceFormat::YCbCr_BT601:
+ case QVideoFrameFormat::YCbCr_BT601:
dbg << "YCbCr_BT601";
break;
- case QVideoSurfaceFormat::YCbCr_BT709:
+ case QVideoFrameFormat::YCbCr_BT709:
dbg << "YCbCr_BT709";
break;
- case QVideoSurfaceFormat::YCbCr_JPEG:
+ case QVideoFrameFormat::YCbCr_JPEG:
dbg << "YCbCr_JPEG";
break;
- case QVideoSurfaceFormat::YCbCr_xvYCC601:
+ case QVideoFrameFormat::YCbCr_xvYCC601:
dbg << "YCbCr_xvYCC601";
break;
- case QVideoSurfaceFormat::YCbCr_xvYCC709:
+ case QVideoFrameFormat::YCbCr_xvYCC709:
dbg << "YCbCr_xvYCC709";
break;
default:
@@ -513,26 +513,26 @@ QDebug operator<<(QDebug dbg, QVideoSurfaceFormat::YCbCrColorSpace cs)
return dbg;
}
-QDebug operator<<(QDebug dbg, QVideoSurfaceFormat::Direction dir)
+QDebug operator<<(QDebug dbg, QVideoFrameFormat::Direction dir)
{
QDebugStateSaver saver(dbg);
dbg.nospace();
switch (dir) {
- case QVideoSurfaceFormat::BottomToTop:
+ case QVideoFrameFormat::BottomToTop:
dbg << "BottomToTop";
break;
- case QVideoSurfaceFormat::TopToBottom:
+ case QVideoFrameFormat::TopToBottom:
dbg << "TopToBottom";
break;
}
return dbg;
}
-QDebug operator<<(QDebug dbg, const QVideoSurfaceFormat &f)
+QDebug operator<<(QDebug dbg, const QVideoFrameFormat &f)
{
QDebugStateSaver saver(dbg);
dbg.nospace();
- dbg << "QVideoSurfaceFormat(" << f.pixelFormat() << ", " << f.frameSize()
+ dbg << "QVideoFrameFormat(" << f.pixelFormat() << ", " << f.frameSize()
<< ", viewport=" << f.viewport()
<< ", yCbCrColorSpace=" << f.yCbCrColorSpace()
<< ')'
@@ -546,62 +546,62 @@ QDebug operator<<(QDebug dbg, const QVideoSurfaceFormat &f)
return dbg;
}
-QDebug operator<<(QDebug dbg, QVideoSurfaceFormat::PixelFormat pf)
+QDebug operator<<(QDebug dbg, QVideoFrameFormat::PixelFormat pf)
{
QDebugStateSaver saver(dbg);
dbg.nospace();
switch (pf) {
- case QVideoSurfaceFormat::Format_Invalid:
+ case QVideoFrameFormat::Format_Invalid:
return dbg << "Format_Invalid";
- case QVideoSurfaceFormat::Format_ARGB32:
+ case QVideoFrameFormat::Format_ARGB32:
return dbg << "Format_ARGB32";
- case QVideoSurfaceFormat::Format_ARGB32_Premultiplied:
+ case QVideoFrameFormat::Format_ARGB32_Premultiplied:
return dbg << "Format_ARGB32_Premultiplied";
- case QVideoSurfaceFormat::Format_RGB32:
+ case QVideoFrameFormat::Format_RGB32:
return dbg << "Format_RGB32";
- case QVideoSurfaceFormat::Format_BGRA32:
+ case QVideoFrameFormat::Format_BGRA32:
return dbg << "Format_BGRA32";
- case QVideoSurfaceFormat::Format_BGRA32_Premultiplied:
+ case QVideoFrameFormat::Format_BGRA32_Premultiplied:
return dbg << "Format_BGRA32_Premultiplied";
- case QVideoSurfaceFormat::Format_ABGR32:
+ case QVideoFrameFormat::Format_ABGR32:
return dbg << "Format_ABGR32";
- case QVideoSurfaceFormat::Format_BGR32:
+ case QVideoFrameFormat::Format_BGR32:
return dbg << "Format_BGR32";
- case QVideoSurfaceFormat::Format_AYUV444:
+ case QVideoFrameFormat::Format_AYUV444:
return dbg << "Format_AYUV444";
- case QVideoSurfaceFormat::Format_AYUV444_Premultiplied:
+ case QVideoFrameFormat::Format_AYUV444_Premultiplied:
return dbg << "Format_AYUV444_Premultiplied";
- case QVideoSurfaceFormat::Format_YUV420P:
+ case QVideoFrameFormat::Format_YUV420P:
return dbg << "Format_YUV420P";
- case QVideoSurfaceFormat::Format_YUV422P:
+ case QVideoFrameFormat::Format_YUV422P:
return dbg << "Format_YUV422P";
- case QVideoSurfaceFormat::Format_YV12:
+ case QVideoFrameFormat::Format_YV12:
return dbg << "Format_YV12";
- case QVideoSurfaceFormat::Format_UYVY:
+ case QVideoFrameFormat::Format_UYVY:
return dbg << "Format_UYVY";
- case QVideoSurfaceFormat::Format_YUYV:
+ case QVideoFrameFormat::Format_YUYV:
return dbg << "Format_YUYV";
- case QVideoSurfaceFormat::Format_NV12:
+ case QVideoFrameFormat::Format_NV12:
return dbg << "Format_NV12";
- case QVideoSurfaceFormat::Format_NV21:
+ case QVideoFrameFormat::Format_NV21:
return dbg << "Format_NV21";
- case QVideoSurfaceFormat::Format_IMC1:
+ case QVideoFrameFormat::Format_IMC1:
return dbg << "Format_IMC1";
- case QVideoSurfaceFormat::Format_IMC2:
+ case QVideoFrameFormat::Format_IMC2:
return dbg << "Format_IMC2";
- case QVideoSurfaceFormat::Format_IMC3:
+ case QVideoFrameFormat::Format_IMC3:
return dbg << "Format_IMC3";
- case QVideoSurfaceFormat::Format_IMC4:
+ case QVideoFrameFormat::Format_IMC4:
return dbg << "Format_IMC4";
- case QVideoSurfaceFormat::Format_Y8:
+ case QVideoFrameFormat::Format_Y8:
return dbg << "Format_Y8";
- case QVideoSurfaceFormat::Format_Y16:
+ case QVideoFrameFormat::Format_Y16:
return dbg << "Format_Y16";
- case QVideoSurfaceFormat::Format_P010:
+ case QVideoFrameFormat::Format_P010:
return dbg << "Format_P010";
- case QVideoSurfaceFormat::Format_P016:
+ case QVideoFrameFormat::Format_P016:
return dbg << "Format_P016";
- case QVideoSurfaceFormat::Format_Jpeg:
+ case QVideoFrameFormat::Format_Jpeg:
return dbg << "Format_Jpeg";
default:
diff --git a/src/multimedia/video/qvideosurfaceformat.h b/src/multimedia/video/qvideoframeformat.h
index e3551501a..e9be55ca1 100644
--- a/src/multimedia/video/qvideosurfaceformat.h
+++ b/src/multimedia/video/qvideoframeformat.h
@@ -53,10 +53,10 @@ QT_BEGIN_NAMESPACE
class QDebug;
-class QVideoSurfaceFormatPrivate;
+class QVideoFrameFormatPrivate;
class QMatrix4x4;
-class Q_MULTIMEDIA_EXPORT QVideoSurfaceFormat
+class Q_MULTIMEDIA_EXPORT QVideoFrameFormat
{
public:
enum PixelFormat
@@ -111,21 +111,21 @@ public:
YCbCr_JPEG,
};
- QVideoSurfaceFormat();
- QVideoSurfaceFormat(
+ QVideoFrameFormat();
+ QVideoFrameFormat(
const QSize &size,
- QVideoSurfaceFormat::PixelFormat pixelFormat);
- QVideoSurfaceFormat(const QVideoSurfaceFormat &format);
- ~QVideoSurfaceFormat();
+ QVideoFrameFormat::PixelFormat pixelFormat);
+ QVideoFrameFormat(const QVideoFrameFormat &format);
+ ~QVideoFrameFormat();
- QVideoSurfaceFormat &operator =(const QVideoSurfaceFormat &format);
+ QVideoFrameFormat &operator =(const QVideoFrameFormat &format);
- bool operator ==(const QVideoSurfaceFormat &format) const;
- bool operator !=(const QVideoSurfaceFormat &format) const;
+ bool operator ==(const QVideoFrameFormat &format) const;
+ bool operator !=(const QVideoFrameFormat &format) const;
bool isValid() const;
- QVideoSurfaceFormat::PixelFormat pixelFormat() const;
+ QVideoFrameFormat::PixelFormat pixelFormat() const;
QSize frameSize() const;
void setFrameSize(const QSize &size);
@@ -161,14 +161,14 @@ public:
static QImage::Format imageFormatFromPixelFormat(PixelFormat format);
private:
- QSharedDataPointer<QVideoSurfaceFormatPrivate> d;
+ QSharedDataPointer<QVideoFrameFormatPrivate> d;
};
#ifndef QT_NO_DEBUG_STREAM
-Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, const QVideoSurfaceFormat &);
-Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoSurfaceFormat::Direction);
-Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoSurfaceFormat::YCbCrColorSpace);
-Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoSurfaceFormat::PixelFormat);
+Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, const QVideoFrameFormat &);
+Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoFrameFormat::Direction);
+Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoFrameFormat::YCbCrColorSpace);
+Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoFrameFormat::PixelFormat);
#endif
QT_END_NAMESPACE
diff --git a/src/multimedia/video/qvideosink.cpp b/src/multimedia/video/qvideosink.cpp
index 541d9058a..93308112e 100644
--- a/src/multimedia/video/qvideosink.cpp
+++ b/src/multimedia/video/qvideosink.cpp
@@ -39,7 +39,7 @@
#include "qvideosink.h"
-#include "qvideosurfaceformat.h"
+#include "qvideoframeformat.h"
#include "qvideoframe.h"
#include <qvariant.h>
@@ -65,7 +65,7 @@ public:
QVideoSink *q_ptr = nullptr;
QPlatformVideoSink *videoSink = nullptr;
QVideoSink::GraphicsType type = QVideoSink::Memory;
- QVideoSurfaceFormat surfaceFormat;
+ QVideoFrameFormat surfaceFormat;
QSize nativeResolution;
bool active = false;
WId window = 0;
@@ -283,13 +283,13 @@ void QVideoSink::paint(QPainter *painter, const QVideoFrame &f)
return;
}
- auto imageFormat = QVideoSurfaceFormat::imageFormatFromPixelFormat(frame.pixelFormat());
+ auto imageFormat = QVideoFrameFormat::imageFormatFromPixelFormat(frame.pixelFormat());
// Do not render into ARGB32 images using QPainter.
// Using QImage::Format_ARGB32_Premultiplied is significantly faster.
if (imageFormat == QImage::Format_ARGB32)
imageFormat = QImage::Format_ARGB32_Premultiplied;
- QVideoSurfaceFormat::Direction scanLineDirection = QVideoSurfaceFormat::TopToBottom;//format.scanLineDirection();
+ QVideoFrameFormat::Direction scanLineDirection = QVideoFrameFormat::TopToBottom;//format.scanLineDirection();
bool mirrored = false;//format.isMirrored();
QSizeF size = frame.size();
@@ -332,7 +332,7 @@ void QVideoSink::paint(QPainter *painter, const QVideoFrame &f)
const QTransform oldTransform = painter->transform();
QTransform transform = oldTransform;
- if (scanLineDirection == QVideoSurfaceFormat::BottomToTop) {
+ if (scanLineDirection == QVideoFrameFormat::BottomToTop) {
transform.scale(1, -1);
transform.translate(0, -targetRect.bottom());
targetRect = QRectF(targetRect.x(), 0, targetRect.width(), targetRect.height());
diff --git a/src/multimedia/video/qvideosink.h b/src/multimedia/video/qvideosink.h
index 524a2af14..e88a288c5 100644
--- a/src/multimedia/video/qvideosink.h
+++ b/src/multimedia/video/qvideosink.h
@@ -47,7 +47,7 @@
QT_BEGIN_NAMESPACE
class QRectF;
-class QVideoSurfaceFormat;
+class QVideoFrameFormat;
class QVideoFrame;
class QVideoSinkPrivate;
diff --git a/src/multimedia/video/qvideotexturehelper.cpp b/src/multimedia/video/qvideotexturehelper.cpp
index f0797b039..706844022 100644
--- a/src/multimedia/video/qvideotexturehelper.cpp
+++ b/src/multimedia/video/qvideotexturehelper.cpp
@@ -43,7 +43,7 @@
namespace QVideoTextureHelper
{
-static const TextureDescription descriptions[QVideoSurfaceFormat::NPixelFormats] = {
+static const TextureDescription descriptions[QVideoFrameFormat::NPixelFormats] = {
// Format_Invalid
{ 0,
{ QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat, QRhiTexture::UnknownFormat},
@@ -180,76 +180,76 @@ static const TextureDescription descriptions[QVideoSurfaceFormat::NPixelFormats]
};
-const TextureDescription *textureDescription(QVideoSurfaceFormat::PixelFormat format)
+const TextureDescription *textureDescription(QVideoFrameFormat::PixelFormat format)
{
return descriptions + format;
}
-QString vertexShaderFileName(QVideoSurfaceFormat::PixelFormat /*format*/)
+QString vertexShaderFileName(QVideoFrameFormat::PixelFormat /*format*/)
{
return QStringLiteral(":/qt-project.org/multimedia/shaders/vertex.vert.qsb");
}
-QString fragmentShaderFileName(QVideoSurfaceFormat::PixelFormat format)
+QString fragmentShaderFileName(QVideoFrameFormat::PixelFormat format)
{
switch (format) {
- case QVideoSurfaceFormat::Format_Invalid:
- case QVideoSurfaceFormat::Format_Jpeg:
+ case QVideoFrameFormat::Format_Invalid:
+ case QVideoFrameFormat::Format_Jpeg:
return QString();
- case QVideoSurfaceFormat::Format_Y8:
- case QVideoSurfaceFormat::Format_Y16:
+ case QVideoFrameFormat::Format_Y8:
+ case QVideoFrameFormat::Format_Y16:
return QStringLiteral(":/qt-project.org/multimedia/shaders/y.frag.qsb");
- case QVideoSurfaceFormat::Format_AYUV444:
- case QVideoSurfaceFormat::Format_AYUV444_Premultiplied:
+ case QVideoFrameFormat::Format_AYUV444:
+ case QVideoFrameFormat::Format_AYUV444_Premultiplied:
return QStringLiteral(":/qt-project.org/multimedia/shaders/ayuv.frag.qsb");
- case QVideoSurfaceFormat::Format_ARGB32:
- case QVideoSurfaceFormat::Format_ARGB32_Premultiplied:
- case QVideoSurfaceFormat::Format_RGB32:
+ case QVideoFrameFormat::Format_ARGB32:
+ case QVideoFrameFormat::Format_ARGB32_Premultiplied:
+ case QVideoFrameFormat::Format_RGB32:
return QStringLiteral(":/qt-project.org/multimedia/shaders/argb.frag.qsb");
- case QVideoSurfaceFormat::Format_BGRA32:
- case QVideoSurfaceFormat::Format_BGRA32_Premultiplied:
+ case QVideoFrameFormat::Format_BGRA32:
+ case QVideoFrameFormat::Format_BGRA32_Premultiplied:
return QStringLiteral(":/qt-project.org/multimedia/shaders/bgra.frag.qsb");
- case QVideoSurfaceFormat::Format_ABGR32:
- case QVideoSurfaceFormat::Format_BGR32:
+ case QVideoFrameFormat::Format_ABGR32:
+ case QVideoFrameFormat::Format_BGR32:
return QStringLiteral(":/qt-project.org/multimedia/shaders/abgr.frag.qsb");
- case QVideoSurfaceFormat::Format_YUV420P:
- case QVideoSurfaceFormat::Format_YUV422P:
- case QVideoSurfaceFormat::Format_IMC3:
+ case QVideoFrameFormat::Format_YUV420P:
+ case QVideoFrameFormat::Format_YUV422P:
+ case QVideoFrameFormat::Format_IMC3:
return QStringLiteral(":/qt-project.org/multimedia/shaders/yuv_triplanar.frag.qsb");
- case QVideoSurfaceFormat::Format_YV12:
- case QVideoSurfaceFormat::Format_IMC1:
+ case QVideoFrameFormat::Format_YV12:
+ case QVideoFrameFormat::Format_IMC1:
return QStringLiteral(":/qt-project.org/multimedia/shaders/yvu_triplanar.frag.qsb");
- case QVideoSurfaceFormat::Format_IMC2:
+ case QVideoFrameFormat::Format_IMC2:
return QStringLiteral(":/qt-project.org/multimedia/shaders/imc2.frag.qsb");
- case QVideoSurfaceFormat::Format_IMC4:
+ case QVideoFrameFormat::Format_IMC4:
return QStringLiteral(":/qt-project.org/multimedia/shaders/imc4.frag.qsb");
- case QVideoSurfaceFormat::Format_UYVY:
+ case QVideoFrameFormat::Format_UYVY:
return QStringLiteral(":/qt-project.org/multimedia/shaders/uyvy.frag.qsb");
- case QVideoSurfaceFormat::Format_YUYV:
+ case QVideoFrameFormat::Format_YUYV:
return QStringLiteral(":/qt-project.org/multimedia/shaders/yuyv.frag.qsb");
- case QVideoSurfaceFormat::Format_NV12:
- case QVideoSurfaceFormat::Format_P010:
- case QVideoSurfaceFormat::Format_P016:
+ case QVideoFrameFormat::Format_NV12:
+ case QVideoFrameFormat::Format_P010:
+ case QVideoFrameFormat::Format_P016:
// P010/P016 have the same layout as NV12, just 16 instead of 8 bits per pixel
return QStringLiteral(":/qt-project.org/multimedia/shaders/nv12.frag.qsb");
- case QVideoSurfaceFormat::Format_NV21:
+ case QVideoFrameFormat::Format_NV21:
return QStringLiteral(":/qt-project.org/multimedia/shaders/nv21.frag.qsb");
}
}
-static QMatrix4x4 colorMatrix(QVideoSurfaceFormat::YCbCrColorSpace colorSpace)
+static QMatrix4x4 colorMatrix(QVideoFrameFormat::YCbCrColorSpace colorSpace)
{
switch (colorSpace) {
- case QVideoSurfaceFormat::YCbCr_JPEG:
+ case QVideoFrameFormat::YCbCr_JPEG:
return QMatrix4x4(
1.0f, 0.000f, 1.402f, -0.701f,
1.0f, -0.344f, -0.714f, 0.529f,
1.0f, 1.772f, 0.000f, -0.886f,
0.0f, 0.000f, 0.000f, 1.0000f);
- case QVideoSurfaceFormat::YCbCr_BT709:
- case QVideoSurfaceFormat::YCbCr_xvYCC709:
+ case QVideoFrameFormat::YCbCr_BT709:
+ case QVideoFrameFormat::YCbCr_xvYCC709:
return QMatrix4x4(
1.164f, 0.000f, 1.793f, -0.5727f,
1.164f, -0.534f, -0.213f, 0.3007f,
@@ -298,40 +298,40 @@ static QMatrix4x4 yuvColorCorrectionMatrix(float brightness, float contrast, flo
}
#endif
-QByteArray uniformData(const QVideoSurfaceFormat &format, const QMatrix4x4 &transform, float opacity)
+QByteArray uniformData(const QVideoFrameFormat &format, const QMatrix4x4 &transform, float opacity)
{
QMatrix4x4 cmat;
switch (format.pixelFormat()) {
- case QVideoSurfaceFormat::Format_Invalid:
- case QVideoSurfaceFormat::Format_Jpeg:
+ case QVideoFrameFormat::Format_Invalid:
+ case QVideoFrameFormat::Format_Jpeg:
return QByteArray();
- case QVideoSurfaceFormat::Format_ARGB32:
- case QVideoSurfaceFormat::Format_ARGB32_Premultiplied:
- case QVideoSurfaceFormat::Format_RGB32:
- case QVideoSurfaceFormat::Format_BGRA32:
- case QVideoSurfaceFormat::Format_BGRA32_Premultiplied:
- case QVideoSurfaceFormat::Format_ABGR32:
- case QVideoSurfaceFormat::Format_BGR32:
+ case QVideoFrameFormat::Format_ARGB32:
+ case QVideoFrameFormat::Format_ARGB32_Premultiplied:
+ case QVideoFrameFormat::Format_RGB32:
+ case QVideoFrameFormat::Format_BGRA32:
+ case QVideoFrameFormat::Format_BGRA32_Premultiplied:
+ case QVideoFrameFormat::Format_ABGR32:
+ case QVideoFrameFormat::Format_BGR32:
- case QVideoSurfaceFormat::Format_Y8:
- case QVideoSurfaceFormat::Format_Y16:
+ case QVideoFrameFormat::Format_Y8:
+ case QVideoFrameFormat::Format_Y16:
break;
- case QVideoSurfaceFormat::Format_IMC1:
- case QVideoSurfaceFormat::Format_IMC2:
- case QVideoSurfaceFormat::Format_IMC3:
- case QVideoSurfaceFormat::Format_IMC4:
- case QVideoSurfaceFormat::Format_AYUV444:
- case QVideoSurfaceFormat::Format_AYUV444_Premultiplied:
- case QVideoSurfaceFormat::Format_YUV420P:
- case QVideoSurfaceFormat::Format_YUV422P:
- case QVideoSurfaceFormat::Format_YV12:
- case QVideoSurfaceFormat::Format_UYVY:
- case QVideoSurfaceFormat::Format_YUYV:
- case QVideoSurfaceFormat::Format_NV12:
- case QVideoSurfaceFormat::Format_NV21:
- case QVideoSurfaceFormat::Format_P010:
- case QVideoSurfaceFormat::Format_P016:
+ case QVideoFrameFormat::Format_IMC1:
+ case QVideoFrameFormat::Format_IMC2:
+ case QVideoFrameFormat::Format_IMC3:
+ case QVideoFrameFormat::Format_IMC4:
+ case QVideoFrameFormat::Format_AYUV444:
+ case QVideoFrameFormat::Format_AYUV444_Premultiplied:
+ case QVideoFrameFormat::Format_YUV420P:
+ case QVideoFrameFormat::Format_YUV422P:
+ case QVideoFrameFormat::Format_YV12:
+ case QVideoFrameFormat::Format_UYVY:
+ case QVideoFrameFormat::Format_YUYV:
+ case QVideoFrameFormat::Format_NV12:
+ case QVideoFrameFormat::Format_NV21:
+ case QVideoFrameFormat::Format_P010:
+ case QVideoFrameFormat::Format_P016:
cmat = colorMatrix(format.yCbCrColorSpace());
break;
}
@@ -348,8 +348,8 @@ QByteArray uniformData(const QVideoSurfaceFormat &format, const QMatrix4x4 &tran
int updateRhiTextures(QVideoFrame frame, QRhi *rhi, QRhiResourceUpdateBatch *resourceUpdates, QRhiTexture **textures)
{
- QVideoSurfaceFormat fmt = frame.surfaceFormat();
- QVideoSurfaceFormat::PixelFormat pixelFormat = fmt.pixelFormat();
+ QVideoFrameFormat fmt = frame.surfaceFormat();
+ QVideoFrameFormat::PixelFormat pixelFormat = fmt.pixelFormat();
QSize size = fmt.frameSize();
const TextureDescription *description = descriptions + pixelFormat;
diff --git a/src/multimedia/video/qvideotexturehelper_p.h b/src/multimedia/video/qvideotexturehelper_p.h
index 4b369594a..153752010 100644
--- a/src/multimedia/video/qvideotexturehelper_p.h
+++ b/src/multimedia/video/qvideotexturehelper_p.h
@@ -40,7 +40,7 @@
#ifndef QVIDEOTEXTUREHELPER_H
#define QVIDEOTEXTUREHELPER_H
-#include <qvideosurfaceformat.h>
+#include <qvideoframeformat.h>
#include <private/qrhi_p.h>
QT_BEGIN_NAMESPACE
@@ -63,11 +63,11 @@ struct TextureDescription
SizeScale sizeScale[maxPlanes];
};
-Q_MULTIMEDIA_EXPORT const TextureDescription *textureDescription(QVideoSurfaceFormat::PixelFormat format);
+Q_MULTIMEDIA_EXPORT const TextureDescription *textureDescription(QVideoFrameFormat::PixelFormat format);
-Q_MULTIMEDIA_EXPORT QString vertexShaderFileName(QVideoSurfaceFormat::PixelFormat format);
-Q_MULTIMEDIA_EXPORT QString fragmentShaderFileName(QVideoSurfaceFormat::PixelFormat format);
-Q_MULTIMEDIA_EXPORT QByteArray uniformData(const QVideoSurfaceFormat &format, const QMatrix4x4 &transform, float opacity);
+Q_MULTIMEDIA_EXPORT QString vertexShaderFileName(QVideoFrameFormat::PixelFormat format);
+Q_MULTIMEDIA_EXPORT QString fragmentShaderFileName(QVideoFrameFormat::PixelFormat format);
+Q_MULTIMEDIA_EXPORT QByteArray uniformData(const QVideoFrameFormat &format, const QMatrix4x4 &transform, float opacity);
Q_MULTIMEDIA_EXPORT int updateRhiTextures(QVideoFrame frame, QRhi *rhi,
QRhiResourceUpdateBatch *resourceUpdates, QRhiTexture **textures);
diff --git a/src/multimedia/video/video.pri b/src/multimedia/video/video.pri
index 50d1a260b..c6ba59c2b 100644
--- a/src/multimedia/video/video.pri
+++ b/src/multimedia/video/video.pri
@@ -4,7 +4,7 @@ INCLUDEPATH += video
PUBLIC_HEADERS += \
video/qabstractvideosurface.h \
video/qvideoframe.h \
- video/qvideosurfaceformat.h \
+ video/qvideoframeformat.h \
PRIVATE_HEADERS += \
video/qabstractvideobuffer_p.h \
@@ -21,7 +21,7 @@ SOURCES += \
video/qmemoryvideobuffer.cpp \
video/qvideoframe.cpp \
video/qvideooutputorientationhandler.cpp \
- video/qvideosurfaceformat.cpp \
+ video/qvideoframeformat.cpp \
video/qvideoframeconversionhelper.cpp \
video/qvideosurfaces.cpp