summaryrefslogtreecommitdiffstats
path: root/src/multimedia/video/qvideotexturehelper.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/multimedia/video/qvideotexturehelper.cpp')
-rw-r--r--src/multimedia/video/qvideotexturehelper.cpp300
1 files changed, 159 insertions, 141 deletions
diff --git a/src/multimedia/video/qvideotexturehelper.cpp b/src/multimedia/video/qvideotexturehelper.cpp
index 48d328b51..e9c26044c 100644
--- a/src/multimedia/video/qvideotexturehelper.cpp
+++ b/src/multimedia/video/qvideotexturehelper.cpp
@@ -1,53 +1,15 @@
-/****************************************************************************
-**
-** Copyright (C) 2021 The Qt Company Ltd.
-** Contact: https://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see https://www.qt.io/terms-conditions. For further
-** information use the contact form at https://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 3 as published by the Free Software
-** Foundation and appearing in the file LICENSE.LGPL3 included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU Lesser General Public License version 3 requirements
-** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
-**
-** GNU General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU
-** General Public License version 2.0 or (at your option) the GNU General
-** Public license version 3 or any later version approved by the KDE Free
-** Qt Foundation. The licenses are as published by the Free Software
-** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
-** included in the packaging of this file. Please review the following
-** information to ensure the GNU General Public License requirements will
-** be met: https://www.gnu.org/licenses/gpl-2.0.html and
-** https://www.gnu.org/licenses/gpl-3.0.html.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include "qvideotexturehelper_p.h"
-#include "qvideoframe.h"
#include "qabstractvideobuffer_p.h"
+#include "qvideoframeconverter_p.h"
#include <qpainter.h>
#include <qloggingcategory.h>
QT_BEGIN_NAMESPACE
-Q_LOGGING_CATEGORY(qLcVideoTextureHelper, "qt.multimedia.video.texturehelper")
-
namespace QVideoTextureHelper
{
@@ -138,7 +100,7 @@ static const TextureDescription descriptions[QVideoFrameFormat::NPixelFormats] =
},
// Format_YUV422P
{ 3, 1,
- [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
+ [](int stride, int height) { return stride * height * 2; },
{ QRhiTexture::R8, QRhiTexture::R8, QRhiTexture::R8 },
{ { 1, 1 }, { 2, 1 }, { 2, 1 } }
},
@@ -251,7 +213,7 @@ static const TextureDescription descriptions[QVideoFrameFormat::NPixelFormats] =
{ { 1, 1 }, { 1, 1 }, { 1, 1 } }
},
// Format_YUV420P10
- { 3, 1,
+ { 3, 2,
[](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
{ QRhiTexture::R16, QRhiTexture::R16, QRhiTexture::R16 },
{ { 1, 1 }, { 2, 2 }, { 2, 2 } }
@@ -408,6 +370,8 @@ QString fragmentShaderFileName(const QVideoFrameFormat &format, QRhiSwapChain::F
// d = 1.42
// e = 1.772
//
+
+// clang-format off
static QMatrix4x4 colorMatrix(const QVideoFrameFormat &format)
{
auto colorSpace = format.colorSpace();
@@ -421,86 +385,60 @@ static QMatrix4x4 colorMatrix(const QVideoFrameFormat &format)
}
switch (colorSpace) {
case QVideoFrameFormat::ColorSpace_AdobeRgb:
- return QMatrix4x4(
+ return {
1.0f, 0.000f, 1.402f, -0.701f,
1.0f, -0.344f, -0.714f, 0.529f,
1.0f, 1.772f, 0.000f, -0.886f,
- 0.0f, 0.000f, 0.000f, 1.0000f);
+ 0.0f, 0.000f, 0.000f, 1.000f
+ };
default:
case QVideoFrameFormat::ColorSpace_BT709:
if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
- return QMatrix4x4(
- 1.f, 0.000f, 1.5748f, -0.8774f,
- 1.f, -0.187324f, -0.468124f, 0.327724f,
- 1.f, 1.8556f, 0.000f, -0.9278f,
- 0.0f, 0.000f, 0.000f, 1.0000f);
- return QMatrix4x4(
- 1.1644f, 0.000f, 1.7928f, -0.9731f,
- 1.1644f, -0.5329f, -0.2132f, 0.3015f,
- 1.1644f, 2.1124f, 0.000f, -1.1335f,
- 0.0f, 0.000f, 0.000f, 1.0000f);
+ return {
+ 1.0f, 0.0f, 1.5748f, -0.790488f,
+ 1.0f, -0.187324f, -0.468124f, 0.329010f,
+ 1.0f, 1.855600f, 0.0f, -0.931439f,
+ 0.0f, 0.0f, 0.0f, 1.0f
+ };
+ return {
+ 1.1644f, 0.0000f, 1.7927f, -0.9729f,
+ 1.1644f, -0.2132f, -0.5329f, 0.3015f,
+ 1.1644f, 2.1124f, 0.0000f, -1.1334f,
+ 0.0000f, 0.0000f, 0.0000f, 1.0000f
+ };
case QVideoFrameFormat::ColorSpace_BT2020:
if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
- return QMatrix4x4(
- 1.f, 0.000f, 1.4746f, -0.7373f,
- 1.f, -0.2801f, -0.91666f, 0.5984f,
- 1.f, 1.8814f, 0.000f, -0.9407f,
- 0.0f, 0.000f, 0.000f, 1.0000f);
- return QMatrix4x4(
- 1.1644f, 0.000f, 1.6787f, -0.9158f,
- 1.1644f, -0.1874f, -0.6511f, 0.3478f,
- 1.1644f, 2.1418f, 0.000f, -1.1483f,
- 0.0f, 0.000f, 0.000f, 1.0000f);
+ return {
+ 1.f, 0.0000f, 1.4746f, -0.7402f,
+ 1.f, -0.1646f, -0.5714f, 0.3694f,
+ 1.f, 1.8814f, 0.000f, -0.9445f,
+ 0.0f, 0.0000f, 0.000f, 1.0000f
+ };
+ return {
+ 1.1644f, 0.000f, 1.6787f, -0.9157f,
+ 1.1644f, -0.1874f, -0.6504f, 0.3475f,
+ 1.1644f, 2.1418f, 0.0000f, -1.1483f,
+ 0.0000f, 0.0000f, 0.0000f, 1.0000f
+ };
case QVideoFrameFormat::ColorSpace_BT601:
// Corresponds to the primaries used by NTSC BT601. For PAL BT601, we use the BT709 conversion
// as those are very close.
if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
- return QMatrix4x4(
- 1.f, 0.000f, 1.772f, -0.886f,
+ return {
+ 1.f, 0.000f, 1.772f, -0.886f,
1.f, -0.1646f, -0.57135f, 0.36795f,
- 1.f, 1.42f, 0.000f, -0.71f,
- 0.0f, 0.000f, 0.000f, 1.0000f);
- return QMatrix4x4(
+ 1.f, 1.42f, 0.000f, -0.71f,
+ 0.0f, 0.000f, 0.000f, 1.0000f
+ };
+ return {
1.164f, 0.000f, 1.596f, -0.8708f,
1.164f, -0.392f, -0.813f, 0.5296f,
- 1.164f, 2.017f, 0.000f, -1.081f,
- 0.0f, 0.000f, 0.000f, 1.0000f);
+ 1.164f, 2.017f, 0.000f, -1.0810f,
+ 0.000f, 0.000f, 0.000f, 1.0000f
+ };
}
}
-
-#if 0
-static QMatrix4x4 yuvColorCorrectionMatrix(float brightness, float contrast, float hue, float saturation)
-{
- // Color correction in YUV space is done as follows:
-
- // The formulas assumes values in range 0-255, and a blackpoint of Y=16, whitepoint of Y=235
- //
- // Bightness: b
- // Contrast: c
- // Hue: h
- // Saturation: s
- //
- // Y' = (Y - 16)*c + b + 16
- // U' = ((U - 128)*cos(h) + (V - 128)*sin(h))*c*s + 128
- // V' = ((V - 128)*cos(h) - (U - 128)*sin(h))*c*s + 128
- //
- // For normalized YUV values (0-1 range) as we have them in the pixel shader, this translates to:
- //
- // Y' = (Y - .0625)*c + b + .0625
- // U' = ((U - .5)*cos(h) + (V - .5)*sin(h))*c*s + .5
- // V' = ((V - .5)*cos(h) - (U - .5)*sin(h))*c*s + .5
- //
- // The values need to be clamped to 0-1 after the correction and before converting to RGB
- // The transformation can be encoded in a 4x4 matrix assuming we have an A component of 1
-
- float chcs = cos(hue)*contrast*saturation;
- float shcs = sin(hue)*contrast*saturation;
- return QMatrix4x4(contrast, 0, 0, .0625*(1 - contrast) + brightness,
- 0, chcs, shcs, .5*(1 - chcs - shcs),
- 0, -shcs, chcs, .5*(1 + shcs - chcs),
- 0, 0, 0, 1);
-}
-#endif
+// clang-format on
// PQ transfer function, see also https://en.wikipedia.org/wiki/Perceptual_quantizer
// or https://ieeexplore.ieee.org/document/7291452
@@ -623,12 +561,17 @@ void updateUniformData(QByteArray *dst, const QVideoFrameFormat &format, const Q
ud->maxLum = fromLinear(float(maxNits)/100.f);
}
-static bool updateTextureWithMap(QVideoFrame frame, QRhi *rhi, QRhiResourceUpdateBatch *rub, int plane, std::unique_ptr<QRhiTexture> &tex)
+enum class UpdateTextureWithMapResult : uint8_t {
+ Failed,
+ UpdatedWithDataCopy,
+ UpdatedWithDataReference
+};
+
+static UpdateTextureWithMapResult updateTextureWithMap(const QVideoFrame &frame, QRhi *rhi,
+ QRhiResourceUpdateBatch *rub, int plane,
+ std::unique_ptr<QRhiTexture> &tex)
{
- if (!frame.map(QVideoFrame::ReadOnly)) {
- qWarning() << "could not map data of QVideoFrame for upload";
- return false;
- }
+ Q_ASSERT(frame.isMapped());
QVideoFrameFormat fmt = frame.surfaceFormat();
QVideoFrameFormat::PixelFormat pixelFormat = fmt.pixelFormat();
@@ -642,7 +585,7 @@ static bool updateTextureWithMap(QVideoFrame frame, QRhi *rhi, QRhiResourceUpdat
tex.reset(rhi->newTexture(texDesc.textureFormat[plane], planeSize, 1, {}));
if (!tex) {
qWarning("Failed to create new texture (size %dx%d)", planeSize.width(), planeSize.height());
- return false;
+ return UpdateTextureWithMapResult::Failed;
}
}
@@ -651,30 +594,40 @@ static bool updateTextureWithMap(QVideoFrame frame, QRhi *rhi, QRhiResourceUpdat
tex->setPixelSize(planeSize);
if (!tex->create()) {
qWarning("Failed to create texture (size %dx%d)", planeSize.width(), planeSize.height());
- return false;
+ return UpdateTextureWithMapResult::Failed;
}
}
+ auto result = UpdateTextureWithMapResult::UpdatedWithDataCopy;
+
QRhiTextureSubresourceUploadDescription subresDesc;
- QImage image;
+
if (pixelFormat == QVideoFrameFormat::Format_Jpeg) {
+ Q_ASSERT(plane == 0);
+
+ QImage image;
+
+ // calling QVideoFrame::toImage is not accurate. To be fixed.
image = frame.toImage();
image.convertTo(QImage::Format_ARGB32);
- subresDesc.setData(QByteArray((const char *)image.bits(), image.bytesPerLine()*image.height()));
- subresDesc.setDataStride(image.bytesPerLine());
+ subresDesc.setImage(image);
+
} else {
- subresDesc.setData(QByteArray::fromRawData((const char *)frame.bits(plane), frame.mappedBytes(plane)));
+ // Note, QByteArray::fromRawData creare QByteArray as a view without data copying
+ subresDesc.setData(QByteArray::fromRawData(
+ reinterpret_cast<const char *>(frame.bits(plane)), frame.mappedBytes(plane)));
subresDesc.setDataStride(frame.bytesPerLine(plane));
+ result = UpdateTextureWithMapResult::UpdatedWithDataReference;
}
QRhiTextureUploadEntry entry(0, 0, subresDesc);
QRhiTextureUploadDescription desc({ entry });
rub->uploadTexture(tex.get(), desc);
- return true;
+ return result;
}
-static bool updateTextureWithHandle(QVideoFrame frame, QRhi *rhi, int plane, std::unique_ptr<QRhiTexture> &tex)
+static std::unique_ptr<QRhiTexture> createTextureFromHandle(const QVideoFrame &frame, QRhi *rhi, int plane)
{
QVideoFrameFormat fmt = frame.surfaceFormat();
QVideoFrameFormat::PixelFormat pixelFormat = fmt.pixelFormat();
@@ -697,38 +650,103 @@ static bool updateTextureWithHandle(QVideoFrame frame, QRhi *rhi, int plane, std
#endif
}
- if (quint64 handle = frame.textureHandle(plane); handle) {
- tex.reset(rhi->newTexture(texDesc.textureFormat[plane], planeSize, 1, textureFlags));
- if (!tex->createFrom({handle, 0})) {
- qWarning("Failed to initialize QRhiTexture wrapper for native texture object %llu",handle);
- return false;
- }
- } else {
- qCDebug(qLcVideoTextureHelper) << "Incorrect texture handle from QVideoFrame, trying to map and upload texture";
- return false;
+ if (quint64 handle = frame.videoBuffer()->textureHandle(rhi, plane); handle) {
+ std::unique_ptr<QRhiTexture> tex(rhi->newTexture(texDesc.textureFormat[plane], planeSize, 1, textureFlags));
+ if (tex->createFrom({handle, 0}))
+ return tex;
+
+ qWarning("Failed to initialize QRhiTexture wrapper for native texture object %llu",handle);
}
- return true;
+ return {};
}
-void updateRhiTexture(QVideoFrame frame, QRhi *rhi, QRhiResourceUpdateBatch *rub, int plane, std::unique_ptr<QRhiTexture> &tex)
+class QVideoFrameTexturesArray : public QVideoFrameTextures
+{
+public:
+ using TextureArray = std::array<std::unique_ptr<QRhiTexture>, TextureDescription::maxPlanes>;
+ QVideoFrameTexturesArray(TextureArray &&textures, QVideoFrame mappedFrame = {})
+ : m_textures(std::move(textures)), m_mappedFrame(std::move(mappedFrame))
+ {
+ Q_ASSERT(!m_mappedFrame.isValid() || m_mappedFrame.isReadable());
+ }
+
+ // We keep the source frame mapped during the target texture lifetime.
+ // Alternatively, we may use setting a custom image to QRhiTextureSubresourceUploadDescription,
+ // unsig videoFramePlaneAsImage, however, the OpenGL rendering pipeline in QRhi
+ // may keep QImage, and consequently the mapped QVideoFrame,
+ // even after the target texture is deleted: QTBUG-123174.
+ ~QVideoFrameTexturesArray() { m_mappedFrame.unmap(); }
+
+ QRhiTexture *texture(uint plane) const override
+ {
+ return plane < std::size(m_textures) ? m_textures[plane].get() : nullptr;
+ }
+
+ TextureArray takeTextures() { return std::move(m_textures); }
+
+private:
+ TextureArray m_textures;
+ QVideoFrame m_mappedFrame;
+};
+
+static std::unique_ptr<QVideoFrameTextures> createTexturesFromHandles(const QVideoFrame &frame, QRhi *rhi)
{
const TextureDescription &texDesc = descriptions[frame.surfaceFormat().pixelFormat()];
- if (plane >= texDesc.nplanes) {
- tex.reset();
- return;
+ bool ok = true;
+ QVideoFrameTexturesArray::TextureArray textures;
+ for (quint8 plane = 0; plane < texDesc.nplanes; ++plane) {
+ textures[plane] = QVideoTextureHelper::createTextureFromHandle(frame, rhi, plane);
+ ok &= bool(textures[plane]);
}
+ if (ok)
+ return std::make_unique<QVideoFrameTexturesArray>(std::move(textures));
+ else
+ return {};
+}
- if (frame.handleType() == QVideoFrame::RhiTextureHandle) {
- if (std::unique_ptr<QRhiTexture> ftex = frame.rhiTexture(plane); ftex) {
- tex = std::move(ftex);
- return;
- }
+static std::unique_ptr<QVideoFrameTextures> createTexturesFromMemory(QVideoFrame frame, QRhi *rhi, QRhiResourceUpdateBatch *rub, QVideoFrameTextures *old)
+{
+ const TextureDescription &texDesc = descriptions[frame.surfaceFormat().pixelFormat()];
+ QVideoFrameTexturesArray::TextureArray textures;
+ auto oldArray = dynamic_cast<QVideoFrameTexturesArray *>(old);
+ if (oldArray)
+ textures = oldArray->takeTextures();
+
+ if (!frame.map(QVideoFrame::ReadOnly)) {
+ qWarning() << "Cannot map a video frame in ReadOnly mode!";
+ return {};
+ }
+
+ auto unmapFrameGuard = qScopeGuard([&frame] { frame.unmap(); });
+
+ bool shouldKeepMapping = false;
+ for (quint8 plane = 0; plane < texDesc.nplanes; ++plane) {
+ const auto result = updateTextureWithMap(frame, rhi, rub, plane, textures[plane]);
+ if (result == UpdateTextureWithMapResult::Failed)
+ return {};
- if (QVideoTextureHelper::updateTextureWithHandle(frame, rhi, plane, tex))
- return;
+ if (result == UpdateTextureWithMapResult::UpdatedWithDataReference)
+ shouldKeepMapping = true;
}
- QVideoTextureHelper::updateTextureWithMap(frame, rhi, rub, plane, tex);
+ // as QVideoFrame::unmap does nothing with null frames, we just move the frame to the result
+ return std::make_unique<QVideoFrameTexturesArray>(
+ std::move(textures), shouldKeepMapping ? std::move(frame) : QVideoFrame());
+}
+
+std::unique_ptr<QVideoFrameTextures> createTextures(QVideoFrame &frame, QRhi *rhi, QRhiResourceUpdateBatch *rub, std::unique_ptr<QVideoFrameTextures> &&oldTextures)
+{
+ QAbstractVideoBuffer *vf = frame.videoBuffer();
+ if (!vf)
+ return {};
+
+ if (auto vft = vf->mapTextures(rhi))
+ return vft;
+
+ if (auto vft = createTexturesFromHandles(frame, rhi))
+ return vft;
+
+ return createTexturesFromMemory(frame, rhi, rub, oldTextures.get());
}
bool SubtitleLayout::update(const QSize &frameSize, QString text)