summaryrefslogtreecommitdiffstats
path: root/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp')
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp245
1 files changed, 110 insertions, 135 deletions
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp b/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp
index 144281896..1f2f362b4 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp
@@ -1,45 +1,11 @@
-/****************************************************************************
-**
-** Copyright (C) 2021 The Qt Company Ltd.
-** Contact: https://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see https://www.qt.io/terms-conditions. For further
-** information use the contact form at https://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 3 as published by the Free Software
-** Foundation and appearing in the file LICENSE.LGPL3 included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU Lesser General Public License version 3 requirements
-** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
-**
-** GNU General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU
-** General Public License version 2.0 or (at your option) the GNU General
-** Public license version 3 or any later version approved by the KDE Free
-** Qt Foundation. The licenses are as published by the Free Software
-** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
-** included in the packaging of this file. Please review the following
-** information to ensure the GNU General Public License requirements will
-** be met: https://www.gnu.org/licenses/gpl-2.0.html and
-** https://www.gnu.org/licenses/gpl-3.0.html.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include "qffmpegvideobuffer_p.h"
#include "private/qvideotexturehelper_p.h"
+#include "private/qmultimediautils_p.h"
#include "qffmpeghwaccel_p.h"
+#include "qloggingcategory.h"
extern "C" {
#include <libavutil/pixdesc.h>
@@ -49,68 +15,76 @@ extern "C" {
QT_BEGIN_NAMESPACE
-QFFmpegVideoBuffer::QFFmpegVideoBuffer(AVFrame *frame)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle)
- , frame(frame)
+static bool isFrameFlipped(const AVFrame& frame) {
+ for (int i = 0; i < AV_NUM_DATA_POINTERS && frame.data[i]; ++i) {
+ if (frame.linesize[i] < 0)
+ return true;
+ }
+
+ return false;
+}
+
+static Q_LOGGING_CATEGORY(qLcFFmpegVideoBuffer, "qt.multimedia.ffmpeg.videobuffer");
+
+QFFmpegVideoBuffer::QFFmpegVideoBuffer(AVFrameUPtr frame, AVRational pixelAspectRatio)
+ : QAbstractVideoBuffer(QVideoFrame::NoHandle),
+ m_frame(frame.get()),
+ m_size(qCalculateFrameSize({ frame->width, frame->height },
+ { pixelAspectRatio.num, pixelAspectRatio.den }))
{
if (frame->hw_frames_ctx) {
- hwFrame = frame;
- m_pixelFormat = toQtPixelFormat(QFFmpeg::HWAccel::format(frame));
+ m_hwFrame = std::move(frame);
+ m_pixelFormat = toQtPixelFormat(QFFmpeg::HWAccel::format(m_hwFrame.get()));
return;
}
- swFrame = frame;
- m_pixelFormat = toQtPixelFormat(AVPixelFormat(swFrame->format));
+ m_swFrame = std::move(frame);
+ m_pixelFormat = toQtPixelFormat(AVPixelFormat(m_swFrame->format));
convertSWFrame();
}
-QFFmpegVideoBuffer::~QFFmpegVideoBuffer()
-{
- delete textures;
- if (swFrame)
- av_frame_free(&swFrame);
- if (hwFrame)
- av_frame_free(&hwFrame);
-}
+QFFmpegVideoBuffer::~QFFmpegVideoBuffer() = default;
void QFFmpegVideoBuffer::convertSWFrame()
{
- Q_ASSERT(swFrame);
- bool needsConversion = false;
- auto pixelFormat = toQtPixelFormat(AVPixelFormat(swFrame->format), &needsConversion);
-// qDebug() << "SW frame format:" << pixelFormat << swFrame->format << needsConversion;
+ Q_ASSERT(m_swFrame);
+
+ const auto actualAVPixelFormat = AVPixelFormat(m_swFrame->format);
+ const auto targetAVPixelFormat = toAVPixelFormat(m_pixelFormat);
- if (pixelFormat != m_pixelFormat) {
- AVPixelFormat newFormat = toAVPixelFormat(m_pixelFormat);
+ if (actualAVPixelFormat != targetAVPixelFormat || isFrameFlipped(*m_swFrame)
+ || m_size != QSize(m_swFrame->width, m_swFrame->height)) {
+ Q_ASSERT(toQtPixelFormat(targetAVPixelFormat) == m_pixelFormat);
// convert the format into something we can handle
- SwsContext *c = sws_getContext(swFrame->width, swFrame->height, AVPixelFormat(swFrame->format),
- swFrame->width, swFrame->height, newFormat,
+ SwsContext *c = sws_getContext(m_swFrame->width, m_swFrame->height, actualAVPixelFormat,
+ m_size.width(), m_size.height(), targetAVPixelFormat,
SWS_BICUBIC, nullptr, nullptr, nullptr);
- AVFrame *newFrame = av_frame_alloc();
- newFrame->width = swFrame->width;
- newFrame->height = swFrame->height;
- newFrame->format = newFormat;
- av_frame_get_buffer(newFrame, 0);
+ auto newFrame = QFFmpeg::makeAVFrame();
+ newFrame->width = m_size.width();
+ newFrame->height = m_size.height();
+ newFrame->format = targetAVPixelFormat;
+ av_frame_get_buffer(newFrame.get(), 0);
- sws_scale(c, swFrame->data, swFrame->linesize, 0, swFrame->height, newFrame->data, newFrame->linesize);
- av_frame_free(&swFrame);
- swFrame = newFrame;
+ sws_scale(c, m_swFrame->data, m_swFrame->linesize, 0, m_swFrame->height, newFrame->data, newFrame->linesize);
+ if (m_frame == m_swFrame.get())
+ m_frame = newFrame.get();
+ m_swFrame = std::move(newFrame);
sws_freeContext(c);
}
}
void QFFmpegVideoBuffer::setTextureConverter(const QFFmpeg::TextureConverter &converter)
{
- textureConverter = converter;
- textureConverter.init(hwFrame);
+ m_textureConverter = converter;
+ m_textureConverter.init(m_hwFrame.get());
m_type = converter.isNull() ? QVideoFrame::NoHandle : QVideoFrame::RhiTextureHandle;
}
QVideoFrameFormat::ColorSpace QFFmpegVideoBuffer::colorSpace() const
{
- switch (frame->colorspace) {
+ switch (m_frame->colorspace) {
default:
case AVCOL_SPC_UNSPECIFIED:
case AVCOL_SPC_RESERVED:
@@ -137,38 +111,12 @@ QVideoFrameFormat::ColorSpace QFFmpegVideoBuffer::colorSpace() const
QVideoFrameFormat::ColorTransfer QFFmpegVideoBuffer::colorTransfer() const
{
- switch (frame->color_trc) {
- case AVCOL_TRC_BT709:
- // The following three cases have transfer characteristics identical to BT709
- case AVCOL_TRC_BT1361_ECG:
- case AVCOL_TRC_BT2020_10:
- case AVCOL_TRC_BT2020_12:
- case AVCOL_TRC_SMPTE240M: // almost identical to bt709
- return QVideoFrameFormat::ColorTransfer_BT709;
- case AVCOL_TRC_GAMMA22:
- case AVCOL_TRC_SMPTE428 : // No idea, let's hope for the best...
- case AVCOL_TRC_IEC61966_2_1: // sRGB, close enough to 2.2...
- case AVCOL_TRC_IEC61966_2_4: // not quite, but probably close enough
- return QVideoFrameFormat::ColorTransfer_Gamma22;
- case AVCOL_TRC_GAMMA28:
- return QVideoFrameFormat::ColorTransfer_Gamma28;
- case AVCOL_TRC_SMPTE170M:
- return QVideoFrameFormat::ColorTransfer_BT601;
- case AVCOL_TRC_LINEAR:
- return QVideoFrameFormat::ColorTransfer_Linear;
- case AVCOL_TRC_SMPTE2084:
- return QVideoFrameFormat::ColorTransfer_ST2084;
- case AVCOL_TRC_ARIB_STD_B67:
- return QVideoFrameFormat::ColorTransfer_STD_B67;
- default:
- break;
- }
- return QVideoFrameFormat::ColorTransfer_Unknown;
+ return QFFmpeg::fromAvColorTransfer(m_frame->color_trc);
}
QVideoFrameFormat::ColorRange QFFmpegVideoBuffer::colorRange() const
{
- switch (frame->color_range) {
+ switch (m_frame->color_range) {
case AVCOL_RANGE_MPEG:
return QVideoFrameFormat::ColorRange_Video;
case AVCOL_RANGE_JPEG:
@@ -181,31 +129,28 @@ QVideoFrameFormat::ColorRange QFFmpegVideoBuffer::colorRange() const
float QFFmpegVideoBuffer::maxNits()
{
float maxNits = -1;
- for (int i = 0; i <frame->nb_side_data; ++i) {
- AVFrameSideData *sd = frame->side_data[i];
+ for (int i = 0; i < m_frame->nb_side_data; ++i) {
+ AVFrameSideData *sd = m_frame->side_data[i];
// TODO: Longer term we might want to also support HDR10+ dynamic metadata
if (sd->type == AV_FRAME_DATA_MASTERING_DISPLAY_METADATA) {
auto *data = reinterpret_cast<AVMasteringDisplayMetadata *>(sd->data);
- maxNits = float(data->max_luminance.num)/float(data->max_luminance.den)*10000.;
+ auto maybeLum = QFFmpeg::mul(10'000., data->max_luminance);
+ if (maybeLum)
+ maxNits = float(maybeLum.value());
}
}
return maxNits;
}
-QVideoFrame::MapMode QFFmpegVideoBuffer::mapMode() const
-{
- return m_mode;
-}
-
QAbstractVideoBuffer::MapData QFFmpegVideoBuffer::map(QVideoFrame::MapMode mode)
{
- if (!swFrame) {
- Q_ASSERT(hwFrame && hwFrame->hw_frames_ctx);
- swFrame = av_frame_alloc();
+ if (!m_swFrame) {
+ Q_ASSERT(m_hwFrame && m_hwFrame->hw_frames_ctx);
+ m_swFrame = QFFmpeg::makeAVFrame();
/* retrieve data from GPU to CPU */
- int ret = av_hwframe_transfer_data(swFrame, hwFrame, 0);
+ int ret = av_hwframe_transfer_data(m_swFrame.get(), m_hwFrame.get(), 0);
if (ret < 0) {
- qWarning() << "Error transferring the data to system memory\n";
+ qWarning() << "Error transferring the data to system memory:" << ret;
return {};
}
convertSWFrame();
@@ -213,42 +158,62 @@ QAbstractVideoBuffer::MapData QFFmpegVideoBuffer::map(QVideoFrame::MapMode mode)
m_mode = mode;
-// qDebug() << "MAP:";
MapData mapData;
auto *desc = QVideoTextureHelper::textureDescription(pixelFormat());
mapData.nPlanes = desc->nplanes;
for (int i = 0; i < mapData.nPlanes; ++i) {
- mapData.data[i] = swFrame->data[i];
- mapData.bytesPerLine[i] = swFrame->linesize[i];
- mapData.size[i] = mapData.bytesPerLine[i]*desc->heightForPlane(swFrame->height, i);
-// qDebug() << " " << i << mapData.data[i] << mapData.size[i];
+ Q_ASSERT(m_swFrame->linesize[i] >= 0);
+
+ mapData.data[i] = m_swFrame->data[i];
+ mapData.bytesPerLine[i] = m_swFrame->linesize[i];
+ mapData.size[i] = mapData.bytesPerLine[i]*desc->heightForPlane(m_swFrame->height, i);
}
+
+ if ((mode & QVideoFrame::WriteOnly) != 0 && m_hwFrame) {
+ m_type = QVideoFrame::NoHandle;
+ m_hwFrame.reset();
+ if (m_textures) {
+ qCDebug(qLcFFmpegVideoBuffer)
+ << "Mapping of FFmpeg video buffer with write mode when "
+ "textures have been created. Visual artifacts might "
+ "happen if the frame is still in the rendering pipeline";
+ m_textures.reset();
+ }
+ }
+
return mapData;
}
void QFFmpegVideoBuffer::unmap()
{
- // nothing to do here for SW buffers
+ // nothing to do here for SW buffers.
+ // Set NotMapped mode to ensure map/unmap/mapMode consisteny.
+ m_mode = QVideoFrame::NotMapped;
}
-void QFFmpegVideoBuffer::mapTextures()
+std::unique_ptr<QVideoFrameTextures> QFFmpegVideoBuffer::mapTextures(QRhi *)
{
- if (textures || !hwFrame)
- return;
-// qDebug() << ">>>>> mapTextures";
- textures = textureConverter.getTextures(hwFrame);
- if (!textures)
- qWarning() << " failed to get textures for frame" << textureConverter.isNull();
-}
+ if (m_textures)
+ return {};
+ if (!m_hwFrame)
+ return {};
+ if (m_textureConverter.isNull()) {
+ m_textures = nullptr;
+ return {};
+ }
-quint64 QFFmpegVideoBuffer::textureHandle(int plane) const
-{
- return textures ? textures->textureHandle(plane) : 0;
+ m_textures.reset(m_textureConverter.getTextures(m_hwFrame.get()));
+ if (!m_textures) {
+ static thread_local int lastFormat = 0;
+ if (std::exchange(lastFormat, m_hwFrame->format) != m_hwFrame->format) // prevent logging spam
+ qWarning() << " failed to get textures for frame; format:" << m_hwFrame->format;
+ }
+ return {};
}
-std::unique_ptr<QRhiTexture> QFFmpegVideoBuffer::texture(int plane) const
+quint64 QFFmpegVideoBuffer::textureHandle(QRhi *rhi, int plane) const
{
- return textures ? textures->texture(plane) : std::unique_ptr<QRhiTexture>();
+ return m_textures ? m_textures->textureHandle(rhi, plane) : 0;
}
QVideoFrameFormat::PixelFormat QFFmpegVideoBuffer::pixelFormat() const
@@ -258,7 +223,7 @@ QVideoFrameFormat::PixelFormat QFFmpegVideoBuffer::pixelFormat() const
QSize QFFmpegVideoBuffer::size() const
{
- return QSize(frame->width, frame->height);
+ return m_size;
}
QVideoFrameFormat::PixelFormat QFFmpegVideoBuffer::toQtPixelFormat(AVPixelFormat avPixelFormat, bool *needsConversion)
@@ -269,6 +234,9 @@ QVideoFrameFormat::PixelFormat QFFmpegVideoBuffer::toQtPixelFormat(AVPixelFormat
switch (avPixelFormat) {
default:
break;
+ case AV_PIX_FMT_NONE:
+ Q_ASSERT(!"Invalid avPixelFormat!");
+ return QVideoFrameFormat::Format_Invalid;
case AV_PIX_FMT_ARGB:
return QVideoFrameFormat::Format_ARGB8888;
case AV_PIX_FMT_0RGB:
@@ -309,6 +277,8 @@ QVideoFrameFormat::PixelFormat QFFmpegVideoBuffer::toQtPixelFormat(AVPixelFormat
return QVideoFrameFormat::Format_P010;
case AV_PIX_FMT_P016:
return QVideoFrameFormat::Format_P016;
+ case AV_PIX_FMT_MEDIACODEC:
+ return QVideoFrameFormat::Format_SamplerExternalOES;
}
if (needsConversion)
@@ -341,13 +311,13 @@ AVPixelFormat QFFmpegVideoBuffer::toAVPixelFormat(QVideoFrameFormat::PixelFormat
// We're using the data from the converted QImage here, which is in BGRA.
return AV_PIX_FMT_BGRA;
case QVideoFrameFormat::Format_ARGB8888:
- case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
return AV_PIX_FMT_ARGB;
+ case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
case QVideoFrameFormat::Format_XRGB8888:
return AV_PIX_FMT_0RGB;
case QVideoFrameFormat::Format_BGRA8888:
- case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
return AV_PIX_FMT_BGRA;
+ case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
case QVideoFrameFormat::Format_BGRX8888:
return AV_PIX_FMT_BGR0;
case QVideoFrameFormat::Format_ABGR8888:
@@ -356,6 +326,8 @@ AVPixelFormat QFFmpegVideoBuffer::toAVPixelFormat(QVideoFrameFormat::PixelFormat
return AV_PIX_FMT_0BGR;
case QVideoFrameFormat::Format_RGBA8888:
return AV_PIX_FMT_RGBA;
+ // to be added in 6.8:
+ // case QVideoFrameFormat::Format_RGBA8888_Premultiplied:
case QVideoFrameFormat::Format_RGBX8888:
return AV_PIX_FMT_RGB0;
@@ -382,6 +354,9 @@ AVPixelFormat QFFmpegVideoBuffer::toAVPixelFormat(QVideoFrameFormat::PixelFormat
return AV_PIX_FMT_P010;
case QVideoFrameFormat::Format_P016:
return AV_PIX_FMT_P016;
+
+ case QVideoFrameFormat::Format_SamplerExternalOES:
+ return AV_PIX_FMT_MEDIACODEC;
}
}