summaryrefslogtreecommitdiffstats
path: root/src/multimedia
diff options
context:
space:
mode:
Diffstat (limited to 'src/multimedia')
-rw-r--r--src/multimedia/CMakeLists.txt24
-rw-r--r--src/multimedia/alsa/qalsaaudiodevice.cpp52
-rw-r--r--src/multimedia/alsa/qalsaaudiodevice_p.h6
-rw-r--r--src/multimedia/alsa/qalsaaudiosink.cpp5
-rw-r--r--src/multimedia/alsa/qalsaaudiosink_p.h1
-rw-r--r--src/multimedia/alsa/qalsaaudiosource.cpp20
-rw-r--r--src/multimedia/alsa/qalsamediadevices.cpp98
-rw-r--r--src/multimedia/android/qandroidmediadevices.cpp8
-rw-r--r--src/multimedia/audio/qaudiobufferinput.cpp162
-rw-r--r--src/multimedia/audio/qaudiobufferinput.h44
-rw-r--r--src/multimedia/audio/qaudiobufferoutput.cpp78
-rw-r--r--src/multimedia/audio/qaudiobufferoutput.h37
-rw-r--r--src/multimedia/audio/qaudiobufferoutput_p.h42
-rw-r--r--src/multimedia/audio/qsamplecache_p.cpp11
-rw-r--r--src/multimedia/audio/qsoundeffect.cpp3
-rw-r--r--src/multimedia/audio/qwavedecoder.cpp8
-rw-r--r--src/multimedia/camera/qcamera.cpp25
-rw-r--r--src/multimedia/camera/qcamera.h1
-rw-r--r--src/multimedia/camera/qcamera_p.h6
-rw-r--r--src/multimedia/camera/qcameradevice.cpp10
-rw-r--r--src/multimedia/camera/qimagecapture.cpp25
-rw-r--r--src/multimedia/doc/src/qtmultimedia-index.qdoc26
-rw-r--r--src/multimedia/platform/qgstreamer_platformspecificinterface.cpp27
-rw-r--r--src/multimedia/platform/qgstreamer_platformspecificinterface_p.h46
-rw-r--r--src/multimedia/platform/qplatformaudiobufferinput.cpp10
-rw-r--r--src/multimedia/platform/qplatformaudiobufferinput_p.h56
-rw-r--r--src/multimedia/platform/qplatformcamera.cpp9
-rw-r--r--src/multimedia/platform/qplatformcamera_p.h10
-rw-r--r--src/multimedia/platform/qplatformmediacapture.cpp11
-rw-r--r--src/multimedia/platform/qplatformmediacapture_p.h13
-rw-r--r--src/multimedia/platform/qplatformmediaintegration.cpp6
-rw-r--r--src/multimedia/platform/qplatformmediaintegration_p.h12
-rw-r--r--src/multimedia/platform/qplatformmediaplayer.cpp16
-rw-r--r--src/multimedia/platform/qplatformmediaplayer_p.h33
-rw-r--r--src/multimedia/platform/qplatformmediarecorder.cpp6
-rw-r--r--src/multimedia/platform/qplatformmediarecorder_p.h6
-rw-r--r--src/multimedia/platform/qplatformsurfacecapture_p.h3
-rw-r--r--src/multimedia/platform/qplatformvideoframeinput.cpp10
-rw-r--r--src/multimedia/platform/qplatformvideoframeinput_p.h55
-rw-r--r--src/multimedia/platform/qplatformvideosource_p.h5
-rw-r--r--src/multimedia/playback/qmediaplayer.cpp52
-rw-r--r--src/multimedia/playback/qmediaplayer.h5
-rw-r--r--src/multimedia/playback/qmediaplayer_p.h7
-rw-r--r--src/multimedia/pulseaudio/qaudioengine_pulse.cpp2
-rw-r--r--src/multimedia/qmediaframeinput.cpp43
-rw-r--r--src/multimedia/qmediaframeinput_p.h74
-rw-r--r--src/multimedia/qmediainputencoderinterface_p.h31
-rw-r--r--src/multimedia/qmediametadata.cpp50
-rw-r--r--src/multimedia/qmediametadata.h10
-rw-r--r--src/multimedia/qsymbolsresolveutils.cpp79
-rw-r--r--src/multimedia/qsymbolsresolveutils_p.h178
-rw-r--r--src/multimedia/recording/qmediacapturesession.cpp249
-rw-r--r--src/multimedia/recording/qmediacapturesession.h18
-rw-r--r--src/multimedia/recording/qmediacapturesession_p.h15
-rw-r--r--src/multimedia/recording/qmediarecorder.cpp43
-rw-r--r--src/multimedia/recording/qmediarecorder.h5
-rw-r--r--src/multimedia/recording/qmediarecorder_p.h1
-rw-r--r--src/multimedia/recording/qscreencapture-limitations.qdocinc15
-rw-r--r--src/multimedia/recording/qvideoframeinput.cpp156
-rw-r--r--src/multimedia/recording/qvideoframeinput.h44
-rw-r--r--src/multimedia/video/qabstractvideobuffer.cpp213
-rw-r--r--src/multimedia/video/qabstractvideobuffer.h32
-rw-r--r--src/multimedia/video/qabstractvideobuffer_p.h82
-rw-r--r--src/multimedia/video/qhwvideobuffer.cpp17
-rw-r--r--src/multimedia/video/qhwvideobuffer_p.h58
-rw-r--r--src/multimedia/video/qimagevideobuffer.cpp30
-rw-r--r--src/multimedia/video/qimagevideobuffer_p.h9
-rw-r--r--src/multimedia/video/qmemoryvideobuffer.cpp41
-rw-r--r--src/multimedia/video/qmemoryvideobuffer_p.h11
-rw-r--r--src/multimedia/video/qtvideo.cpp41
-rw-r--r--src/multimedia/video/qtvideo.h34
-rw-r--r--src/multimedia/video/qvideoframe.cpp292
-rw-r--r--src/multimedia/video/qvideoframe.h24
-rw-r--r--src/multimedia/video/qvideoframe_p.h44
-rw-r--r--src/multimedia/video/qvideoframeconversionhelper.cpp70
-rw-r--r--src/multimedia/video/qvideoframeconverter.cpp18
-rw-r--r--src/multimedia/video/qvideoframeconverter_p.h4
-rw-r--r--src/multimedia/video/qvideoframeformat.cpp43
-rw-r--r--src/multimedia/video/qvideoframeformat.h11
-rw-r--r--src/multimedia/video/qvideooutputorientationhandler.cpp4
-rw-r--r--src/multimedia/video/qvideotexturehelper.cpp29
-rw-r--r--src/multimedia/video/qvideowindow.cpp7
82 files changed, 2484 insertions, 733 deletions
diff --git a/src/multimedia/CMakeLists.txt b/src/multimedia/CMakeLists.txt
index 8c58545b5..8ccf81c0c 100644
--- a/src/multimedia/CMakeLists.txt
+++ b/src/multimedia/CMakeLists.txt
@@ -23,6 +23,8 @@ qt_internal_add_module(Multimedia
audio/qaudiodecoder.cpp audio/qaudiodecoder.h audio/qaudiodecoder_p.h
audio/qaudiodevice.cpp audio/qaudiodevice.h audio/qaudiodevice_p.h
audio/qaudioinput.cpp audio/qaudioinput.h
+ audio/qaudiobufferinput.cpp audio/qaudiobufferinput.h
+ audio/qaudiobufferoutput.cpp audio/qaudiobufferoutput.h audio/qaudiobufferoutput_p.h
audio/qaudiooutput.cpp audio/qaudiooutput.h
audio/qaudioformat.cpp audio/qaudioformat.h
audio/qaudiohelpers.cpp audio/qaudiohelpers_p.h
@@ -38,25 +40,28 @@ qt_internal_add_module(Multimedia
camera/qcameradevice.cpp camera/qcameradevice.h camera/qcameradevice_p.h
camera/qimagecapture.cpp camera/qimagecapture.h
compat/removed_api.cpp
+ platform/qgstreamer_platformspecificinterface.cpp platform/qgstreamer_platformspecificinterface_p.h
platform/qplatformaudiodecoder.cpp platform/qplatformaudiodecoder_p.h
platform/qplatformaudioinput_p.h
platform/qplatformaudiooutput_p.h
platform/qplatformaudioresampler_p.h
platform/qplatformcamera.cpp platform/qplatformcamera_p.h
- platform/qplatformvideosource.cpp platform/qplatformvideosource_p.h
- platform/qplatformsurfacecapture.cpp platform/qplatformsurfacecapture_p.h
+ platform/qplatformcapturablewindows_p.h
platform/qplatformimagecapture.cpp platform/qplatformimagecapture_p.h
platform/qplatformmediacapture.cpp platform/qplatformmediacapture_p.h
platform/qplatformmediadevices.cpp platform/qplatformmediadevices_p.h
- platform/qplatformmediarecorder.cpp platform/qplatformmediarecorder_p.h
platform/qplatformmediaformatinfo.cpp platform/qplatformmediaformatinfo_p.h
platform/qplatformmediaintegration.cpp platform/qplatformmediaintegration_p.h
platform/qplatformmediaplayer.cpp platform/qplatformmediaplayer_p.h
platform/qplatformmediaplugin.cpp platform/qplatformmediaplugin_p.h
+ platform/qplatformmediarecorder.cpp platform/qplatformmediarecorder_p.h
+ platform/qplatformsurfacecapture.cpp platform/qplatformsurfacecapture_p.h
platform/qplatformvideodevices.cpp platform/qplatformvideodevices_p.h
platform/qplatformvideosink.cpp platform/qplatformvideosink_p.h
+ platform/qplatformvideosource.cpp platform/qplatformvideosource_p.h
+ platform/qplatformvideoframeinput.cpp platform/qplatformvideoframeinput_p.h
+ platform/qplatformaudiobufferinput.cpp platform/qplatformaudiobufferinput_p.h
playback/qmediaplayer.cpp playback/qmediaplayer.h playback/qmediaplayer_p.h
- platform/qplatformcapturablewindows_p.h
qmediadevices.cpp qmediadevices.h
qmediaenumdebug.h
qmediaformat.cpp qmediaformat.h
@@ -64,15 +69,19 @@ qt_internal_add_module(Multimedia
qmediastoragelocation.cpp qmediastoragelocation_p.h
qmediatimerange.cpp qmediatimerange.h
qmultimediautils.cpp qmultimediautils_p.h
+ qmediaframeinput.cpp qmediaframeinput_p.h
qmaybe_p.h
qtmultimediaglobal.h qtmultimediaglobal_p.h
qerrorinfo_p.h
+ qmediainputencoderinterface_p.h
recording/qmediacapturesession.cpp recording/qmediacapturesession.h recording/qmediacapturesession_p.h
recording/qmediarecorder.cpp recording/qmediarecorder.h recording/qmediarecorder_p.h
recording/qscreencapture.cpp recording/qscreencapture.h
recording/qwindowcapture.cpp recording/qwindowcapture.h
recording/qcapturablewindow.cpp recording/qcapturablewindow.h recording/qcapturablewindow_p.h
- video/qabstractvideobuffer.cpp video/qabstractvideobuffer_p.h
+ recording/qvideoframeinput.cpp recording/qvideoframeinput.h
+ video/qabstractvideobuffer.cpp video/qabstractvideobuffer.h
+ video/qhwvideobuffer.cpp video/qhwvideobuffer_p.h
video/qmemoryvideobuffer.cpp video/qmemoryvideobuffer_p.h
video/qimagevideobuffer.cpp video/qimagevideobuffer_p.h
video/qvideoframe.cpp video/qvideoframe.h video/qvideoframe_p.h
@@ -104,9 +113,12 @@ qt_internal_add_module(Multimedia
Qt::GuiPrivate
NO_PCH_SOURCES
compat/removed_api.cpp
- GENERATE_CPP_EXPORTS
)
+qt_internal_extend_target(Multimedia
+ CONDITION LINUX OR ANDROID
+ SOURCES qsymbolsresolveutils.cpp qsymbolsresolveutils_p.h)
+
qt_internal_add_simd_part(Multimedia SIMD sse2
SOURCES
video/qvideoframeconversionhelper_sse2.cpp
diff --git a/src/multimedia/alsa/qalsaaudiodevice.cpp b/src/multimedia/alsa/qalsaaudiodevice.cpp
index f5d4a2209..893375270 100644
--- a/src/multimedia/alsa/qalsaaudiodevice.cpp
+++ b/src/multimedia/alsa/qalsaaudiodevice.cpp
@@ -37,55 +37,35 @@ QAlsaAudioDeviceInfo::QAlsaAudioDeviceInfo(const QByteArray &dev, const QString
minimumSampleRate = 8000;
maximumSampleRate = 48000;
- supportedSampleFormats << QAudioFormat::UInt8 << QAudioFormat::Int16 << QAudioFormat::Int32 << QAudioFormat::Float;
+ supportedSampleFormats = {
+ QAudioFormat::UInt8,
+ QAudioFormat::Int16,
+ QAudioFormat::Int32,
+ QAudioFormat::Float,
+ };
preferredFormat.setChannelCount(mode == QAudioDevice::Input ? 1 : 2);
preferredFormat.setSampleFormat(QAudioFormat::Float);
preferredFormat.setSampleRate(48000);
}
-QAlsaAudioDeviceInfo::~QAlsaAudioDeviceInfo()
-{
-}
+QAlsaAudioDeviceInfo::~QAlsaAudioDeviceInfo() = default;
void QAlsaAudioDeviceInfo::checkSurround()
{
+ if (mode != QAudioDevice::Output)
+ return;
+
surround40 = false;
surround51 = false;
surround71 = false;
- void **hints, **n;
- char *name, *descr, *io;
-
- if(snd_device_name_hint(-1, "pcm", &hints) < 0)
- return;
-
- n = hints;
-
- while (*n != NULL) {
- name = snd_device_name_get_hint(*n, "NAME");
- descr = snd_device_name_get_hint(*n, "DESC");
- io = snd_device_name_get_hint(*n, "IOID");
- if((name != NULL) && (descr != NULL)) {
- QString deviceName = QLatin1String(name);
- if (mode == QAudioDevice::Output) {
- if(deviceName.contains(QLatin1String("surround40")))
- surround40 = true;
- if(deviceName.contains(QLatin1String("surround51")))
- surround51 = true;
- if(deviceName.contains(QLatin1String("surround71")))
- surround71 = true;
- }
- }
- if(name != NULL)
- free(name);
- if(descr != NULL)
- free(descr);
- if(io != NULL)
- free(io);
- ++n;
- }
- snd_device_name_free_hint(hints);
+ if (id.startsWith(QLatin1String("surround40")))
+ surround40 = true;
+ if (id.startsWith(QLatin1String("surround51")))
+ surround51 = true;
+ if (id.startsWith(QLatin1String("surround71")))
+ surround71 = true;
}
QT_END_NAMESPACE
diff --git a/src/multimedia/alsa/qalsaaudiodevice_p.h b/src/multimedia/alsa/qalsaaudiodevice_p.h
index f82ea4f5a..dcbc9e692 100644
--- a/src/multimedia/alsa/qalsaaudiodevice_p.h
+++ b/src/multimedia/alsa/qalsaaudiodevice_p.h
@@ -38,9 +38,9 @@ public:
private:
void checkSurround();
- bool surround40;
- bool surround51;
- bool surround71;
+ bool surround40{};
+ bool surround51{};
+ bool surround71{};
};
QT_END_NAMESPACE
diff --git a/src/multimedia/alsa/qalsaaudiosink.cpp b/src/multimedia/alsa/qalsaaudiosink.cpp
index 98a68861f..e515219a2 100644
--- a/src/multimedia/alsa/qalsaaudiosink.cpp
+++ b/src/multimedia/alsa/qalsaaudiosink.cpp
@@ -30,13 +30,13 @@ QAlsaAudioSink::QAlsaAudioSink(const QByteArray &device, QObject *parent)
m_device = device;
timer = new QTimer(this);
- connect(timer, SIGNAL(timeout()), this, SLOT(userFeed()));
+ connect(timer, &QTimer::timeout, this, &QAlsaAudioSink::userFeed);
}
QAlsaAudioSink::~QAlsaAudioSink()
{
close();
- disconnect(timer, SIGNAL(timeout()));
+ disconnect(timer, &QTimer::timeout, this, &QAlsaAudioSink::userFeed);
QCoreApplication::processEvents();
delete timer;
}
@@ -130,6 +130,7 @@ int QAlsaAudioSink::setFormat()
pcmformat = SND_PCM_FORMAT_FLOAT_BE;
else
pcmformat = SND_PCM_FORMAT_FLOAT_LE;
+ break;
default:
break;
}
diff --git a/src/multimedia/alsa/qalsaaudiosink_p.h b/src/multimedia/alsa/qalsaaudiosink_p.h
index 7e8836f96..0f5a5aa5a 100644
--- a/src/multimedia/alsa/qalsaaudiosink_p.h
+++ b/src/multimedia/alsa/qalsaaudiosink_p.h
@@ -96,7 +96,6 @@ private:
char* audioBuffer = nullptr;
snd_pcm_t* handle = nullptr;
snd_pcm_access_t access = SND_PCM_ACCESS_RW_INTERLEAVED;
- snd_pcm_format_t pcmformat = SND_PCM_FORMAT_S16;
snd_pcm_hw_params_t *hwparams = nullptr;
qreal m_volume = 1.0f;
};
diff --git a/src/multimedia/alsa/qalsaaudiosource.cpp b/src/multimedia/alsa/qalsaaudiosource.cpp
index ce099463d..ebf6e24e2 100644
--- a/src/multimedia/alsa/qalsaaudiosource.cpp
+++ b/src/multimedia/alsa/qalsaaudiosource.cpp
@@ -16,7 +16,6 @@
#include <QtCore/qvarlengtharray.h>
#include <QtMultimedia/private/qaudiohelpers_p.h>
#include "qalsaaudiosource_p.h"
-#include "qalsaaudiodevice_p.h"
QT_BEGIN_NAMESPACE
@@ -45,13 +44,13 @@ QAlsaAudioSource::QAlsaAudioSource(const QByteArray &device, QObject *parent)
m_device = device;
timer = new QTimer(this);
- connect(timer, SIGNAL(timeout()), this, SLOT(userFeed()));
+ connect(timer, &QTimer::timeout, this, &QAlsaAudioSource::userFeed);
}
QAlsaAudioSource::~QAlsaAudioSource()
{
close();
- disconnect(timer, SIGNAL(timeout()));
+ disconnect(timer, &QTimer::timeout, this, &QAlsaAudioSource::userFeed);
QCoreApplication::processEvents();
delete timer;
}
@@ -143,21 +142,22 @@ int QAlsaAudioSource::setFormat()
break;
case QAudioFormat::Int16:
if constexpr (QSysInfo::ByteOrder == QSysInfo::BigEndian)
- pcmformat = SND_PCM_FORMAT_S16_LE;
- else
pcmformat = SND_PCM_FORMAT_S16_BE;
+ else
+ pcmformat = SND_PCM_FORMAT_S16_LE;
break;
case QAudioFormat::Int32:
if constexpr (QSysInfo::ByteOrder == QSysInfo::BigEndian)
- pcmformat = SND_PCM_FORMAT_S32_LE;
- else
pcmformat = SND_PCM_FORMAT_S32_BE;
+ else
+ pcmformat = SND_PCM_FORMAT_S32_LE;
break;
case QAudioFormat::Float:
if constexpr (QSysInfo::ByteOrder == QSysInfo::BigEndian)
- pcmformat = SND_PCM_FORMAT_FLOAT_LE;
- else
pcmformat = SND_PCM_FORMAT_FLOAT_BE;
+ else
+ pcmformat = SND_PCM_FORMAT_FLOAT_LE;
+ break;
default:
break;
}
@@ -370,7 +370,7 @@ bool QAlsaAudioSource::open()
bytesAvailable = checkBytesReady();
if(pullMode)
- connect(audioSource,SIGNAL(readyRead()),this,SLOT(userFeed()));
+ connect(audioSource, &QIODevice::readyRead, this, &QAlsaAudioSource::userFeed);
// Step 6: Start audio processing
chunks = buffer_size/period_size;
diff --git a/src/multimedia/alsa/qalsamediadevices.cpp b/src/multimedia/alsa/qalsamediadevices.cpp
index 5a133e9d1..9466fa0cd 100644
--- a/src/multimedia/alsa/qalsamediadevices.cpp
+++ b/src/multimedia/alsa/qalsamediadevices.cpp
@@ -13,6 +13,26 @@
QT_BEGIN_NAMESPACE
+namespace {
+
+struct free_char
+{
+ void operator()(char *c) const { ::free(c); }
+};
+
+using unique_str = std::unique_ptr<char, free_char>;
+
+bool operator==(const unique_str &str, std::string_view sv)
+{
+ return std::string_view{ str.get() } == sv;
+}
+bool operator!=(const unique_str &str, std::string_view sv)
+{
+ return !(str == sv);
+}
+
+} // namespace
+
QAlsaMediaDevices::QAlsaMediaDevices()
: QPlatformMediaDevices()
{
@@ -22,52 +42,50 @@ static QList<QAudioDevice> availableDevices(QAudioDevice::Mode mode)
{
QList<QAudioDevice> devices;
- QByteArray filter;
-
// Create a list of all current audio devices that support mode
- void **hints, **n;
- char *name, *descr, *io;
- bool hasDefault = false;
-
- if(snd_device_name_hint(-1, "pcm", &hints) < 0) {
+ void **hints;
+ if (snd_device_name_hint(-1, "pcm", &hints) < 0) {
qWarning() << "no alsa devices available";
return devices;
}
- n = hints;
- if(mode == QAudioDevice::Input) {
- filter = "Input";
- } else {
- filter = "Output";
- }
+ std::string_view filter = (mode == QAudioDevice::Input) ? "Input" : "Output";
- QAlsaAudioDeviceInfo* sysdefault = nullptr;
+ QAlsaAudioDeviceInfo *sysdefault = nullptr;
- while (*n != NULL) {
- name = snd_device_name_get_hint(*n, "NAME");
- if (name != 0 && qstrcmp(name, "null") != 0) {
- descr = snd_device_name_get_hint(*n, "DESC");
- io = snd_device_name_get_hint(*n, "IOID");
-
- if ((descr != NULL) && ((io == NULL) || (io == filter))) {
- auto *infop = new QAlsaAudioDeviceInfo(name, QString::fromUtf8(descr), mode);
- devices.append(infop->create());
- if (!hasDefault && strcmp(name, "default") == 0) {
- infop->isDefault = true;
- hasDefault = true;
- }
- else if (!sysdefault && !hasDefault && strcmp(name, "sysdefault") == 0) {
- sysdefault = infop;
- }
+ auto makeDeviceInfo = [&filter, mode](void *entry) -> QAlsaAudioDeviceInfo * {
+ unique_str name{ snd_device_name_get_hint(entry, "NAME") };
+ if (name && name != "null") {
+ unique_str descr{ snd_device_name_get_hint(entry, "DESC") };
+ unique_str io{ snd_device_name_get_hint(entry, "IOID") };
+
+ if (descr && (!io || (io == filter))) {
+ auto *infop = new QAlsaAudioDeviceInfo{
+ name.get(),
+ QString::fromUtf8(descr.get()),
+ mode,
+ };
+ return infop;
}
+ }
+ return nullptr;
+ };
+
+ bool hasDefault = false;
+ void **n = hints;
+ while (*n != NULL) {
+ QAlsaAudioDeviceInfo *infop = makeDeviceInfo(*n++);
- free(descr);
- free(io);
+ if (infop) {
+ devices.append(infop->create());
+ if (!hasDefault && infop->id.startsWith("default")) {
+ infop->isDefault = true;
+ hasDefault = true;
+ }
+ if (!sysdefault && infop->id.startsWith("sysdefault"))
+ sysdefault = infop;
}
- free(name);
- ++n;
}
- snd_device_name_free_hint(hints);
if (!hasDefault && sysdefault) {
// Make "sysdefault" the default device if there is no "default" device exists
@@ -75,11 +93,15 @@ static QList<QAudioDevice> availableDevices(QAudioDevice::Mode mode)
hasDefault = true;
}
if (!hasDefault && devices.size() > 0) {
- auto infop = new QAlsaAudioDeviceInfo("default", QString(), QAudioDevice::Output);
- infop->isDefault = true;
- devices.prepend(infop->create());
+ // forcefully declare the first device as "default"
+ QAlsaAudioDeviceInfo *infop = makeDeviceInfo(hints[0]);
+ if (infop) {
+ infop->isDefault = true;
+ devices.prepend(infop->create());
+ }
}
+ snd_device_name_free_hint(hints);
return devices;
}
diff --git a/src/multimedia/android/qandroidmediadevices.cpp b/src/multimedia/android/qandroidmediadevices.cpp
index 55533621c..7688da079 100644
--- a/src/multimedia/android/qandroidmediadevices.cpp
+++ b/src/multimedia/android/qandroidmediadevices.cpp
@@ -23,9 +23,7 @@ Q_DECLARE_JNI_CLASS(QtAudioDeviceManager,
QAndroidMediaDevices::QAndroidMediaDevices() : QPlatformMediaDevices()
{
- QtJniTypes::QtAudioDeviceManager::callStaticMethod<void>(
- "registerAudioHeadsetStateReceiver",
- QNativeInterface::QAndroidApplication::context());
+ QtJniTypes::QtAudioDeviceManager::callStaticMethod<void>("registerAudioHeadsetStateReceiver");
}
QAndroidMediaDevices::~QAndroidMediaDevices()
@@ -33,9 +31,7 @@ QAndroidMediaDevices::~QAndroidMediaDevices()
// Object of QAndroidMediaDevices type is static. Unregistering will happend only when closing
// the application. In such case it is probably not needed, but let's leave it for
// compatibility with Android documentation
- QtJniTypes::QtAudioDeviceManager::callStaticMethod<void>(
- "unregisterAudioHeadsetStateReceiver",
- QNativeInterface::QAndroidApplication::context());
+ QtJniTypes::QtAudioDeviceManager::callStaticMethod<void>("unregisterAudioHeadsetStateReceiver");
}
QList<QAudioDevice> QAndroidMediaDevices::audioInputs() const
diff --git a/src/multimedia/audio/qaudiobufferinput.cpp b/src/multimedia/audio/qaudiobufferinput.cpp
new file mode 100644
index 000000000..69d8f319b
--- /dev/null
+++ b/src/multimedia/audio/qaudiobufferinput.cpp
@@ -0,0 +1,162 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qaudiobufferinput.h"
+#include "qplatformaudiobufferinput_p.h"
+#include "qmediainputencoderinterface_p.h"
+#include "qmediaframeinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QAudioBufferInputPrivate : public QMediaFrameInputPrivate
+{
+public:
+ QAudioBufferInputPrivate(QAudioBufferInput *q) : q(q) { }
+
+ bool sendAudioBuffer(const QAudioBuffer &audioBuffer)
+ {
+ return sendMediaFrame(
+ [&]() { emit m_platfromAudioBufferInput->newAudioBuffer(audioBuffer); });
+ }
+
+ void initialize()
+ {
+ m_platfromAudioBufferInput = std::make_unique<QPlatformAudioBufferInput>();
+ addUpdateSignal(m_platfromAudioBufferInput.get(),
+ &QPlatformAudioBufferInput::encoderUpdated);
+ }
+
+ void uninitialize()
+ {
+ m_platfromAudioBufferInput.reset();
+
+ if (captureSession())
+ captureSession()->setAudioBufferInput(nullptr);
+ }
+
+ QMediaCaptureSession *session() const { return m_captureSession; }
+
+ QPlatformAudioBufferInput *platfromAudioBufferInput() const
+ {
+ return m_platfromAudioBufferInput.get();
+ }
+
+private:
+ void updateCaptureSessionConnections(QMediaCaptureSession *prevSession,
+ QMediaCaptureSession *newSession) override
+ {
+ if (prevSession)
+ removeUpdateSignal(prevSession, &QMediaCaptureSession::audioOutputChanged);
+
+ if (newSession)
+ addUpdateSignal(newSession, &QMediaCaptureSession::audioOutputChanged);
+ }
+
+ bool checkIfCanSendMediaFrame() const override
+ {
+ if (auto encoderInterface = m_platfromAudioBufferInput->encoderInterface())
+ return encoderInterface->canPushFrame();
+
+ // Not implemented yet
+ // return captureSession()->audioOutput() != nullptr;
+ return false;
+ }
+
+ void emitReadyToSendMediaFrame() override { emit q->readyToSendAudioBuffer(); }
+
+private:
+ QAudioBufferInput *q = nullptr;
+ QMediaCaptureSession *m_captureSession = nullptr;
+ std::unique_ptr<QPlatformAudioBufferInput> m_platfromAudioBufferInput;
+};
+
+/*!
+ \class QAudioBufferInput
+ \inmodule QtMultimedia
+ \ingroup multimedia
+ \ingroup multimedia_audio
+ \since 6.8
+
+ \brief The QAudioBufferInput class is used for providing custom audio buffers
+ to \l QMediaRecorder through \l QMediaCaptureSession.
+
+ \sa QMediaRecorder, QMediaCaptureSession
+*/
+
+/*!
+ Constructs a new QAudioBufferInput object with \a parent.
+*/
+QAudioBufferInput::QAudioBufferInput(QObject *parent)
+ : QObject(*new QAudioBufferInputPrivate(this), parent)
+{
+ Q_D(QAudioBufferInput);
+ d->initialize();
+}
+
+/*!
+ Destroys the object.
+ */
+QAudioBufferInput::~QAudioBufferInput()
+{
+ Q_D(QAudioBufferInput);
+ d->uninitialize();
+}
+
+/*!
+ Sends \l QAudioBuffer to \l QMediaRecorder through \l QMediaCaptureSession.
+
+ Returns \c true if the specified \a audioBuffer has been sent successfully
+ to the destination. Returns \c false, if the buffer hasn't been sent,
+ which can happen if the instance is not assigned to
+ \l QMediaCaptureSession, the session doesn't have a media recorder,
+ the media recorder is not started or its queue is full.
+ The \l readyToSendAudioBuffer() signal will be emitted as soon as
+ the destination is able to handle a new audio buffer.
+
+ Sending of an empty audio buffer is treated by \l QMediaRecorder
+ as an end of the input stream. QMediaRecorder stops the recording
+ automatically if \l QMediaRecorder::autoStop is \c true and
+ all the inputs have reported the end of the stream.
+*/
+bool QAudioBufferInput::sendAudioBuffer(const QAudioBuffer &audioBuffer)
+{
+ Q_D(QAudioBufferInput);
+ return d->sendAudioBuffer(audioBuffer);
+}
+
+/*!
+ Returns the capture session this audio buffer input is connected to, or
+ a \c nullptr if the audio buffer input is not connected to a capture session.
+
+ Use QMediaCaptureSession::setAudioBufferInput() to connect
+ the audio buffer input to a session.
+*/
+QMediaCaptureSession *QAudioBufferInput::captureSession() const
+{
+ Q_D(const QAudioBufferInput);
+ return d->captureSession();
+}
+
+void QAudioBufferInput::setCaptureSession(QMediaCaptureSession *captureSession)
+{
+ Q_D(QAudioBufferInput);
+ d->setCaptureSession(captureSession);
+}
+
+QPlatformAudioBufferInput *QAudioBufferInput::platformAudioBufferInput() const
+{
+ Q_D(const QAudioBufferInput);
+ return d->platfromAudioBufferInput();
+}
+
+/*!
+ \fn void QAudioBufferInput::readyToSendAudioBuffer()
+
+ Signals that a new audio buffer can be sent to the audio buffer input.
+ After receiving the signal, if you have audio date to be sent, invoke \l sendAudioBuffer
+ once or in a loop until it returns \c false.
+
+ \sa sendAudioBuffer()
+*/
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/audio/qaudiobufferinput.h b/src/multimedia/audio/qaudiobufferinput.h
new file mode 100644
index 000000000..92bb8b71a
--- /dev/null
+++ b/src/multimedia/audio/qaudiobufferinput.h
@@ -0,0 +1,44 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QAUDIOBUFFERINPUT_H
+#define QAUDIOBUFFERINPUT_H
+
+#include <QtMultimedia/qtmultimediaexports.h>
+#include <QtMultimedia/qaudiobuffer.h>
+#include <QtCore/qobject.h>
+
+QT_BEGIN_NAMESPACE
+
+class QPlatformAudioBufferInput;
+class QAudioBufferInputPrivate;
+class QMediaCaptureSession;
+
+class Q_MULTIMEDIA_EXPORT QAudioBufferInput : public QObject
+{
+ Q_OBJECT
+public:
+ explicit QAudioBufferInput(QObject *parent = nullptr);
+
+ ~QAudioBufferInput() override;
+
+ bool sendAudioBuffer(const QAudioBuffer &audioBuffer);
+
+ QMediaCaptureSession *captureSession() const;
+
+Q_SIGNALS:
+ void readyToSendAudioBuffer();
+
+private:
+ void setCaptureSession(QMediaCaptureSession *captureSession);
+
+ QPlatformAudioBufferInput *platformAudioBufferInput() const;
+
+ friend class QMediaCaptureSession;
+ Q_DISABLE_COPY(QAudioBufferInput)
+ Q_DECLARE_PRIVATE(QAudioBufferInput)
+};
+
+QT_END_NAMESPACE
+
+#endif // QAUDIOBUFFERINPUT_H
diff --git a/src/multimedia/audio/qaudiobufferoutput.cpp b/src/multimedia/audio/qaudiobufferoutput.cpp
new file mode 100644
index 000000000..50389c49a
--- /dev/null
+++ b/src/multimedia/audio/qaudiobufferoutput.cpp
@@ -0,0 +1,78 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qaudiobufferoutput_p.h"
+#include "qmediaplayer.h"
+
+QT_BEGIN_NAMESPACE
+
+/*!
+ \class QAudioBufferOutput
+ \inmodule QtMultimedia
+ \ingroup multimedia
+ \ingroup multimedia_audio
+ \since 6.8
+
+ \brief The QAudioBufferOutput class is used for capturing audio data provided by \l QMediaPlayer.
+
+ QAudioBufferOutput can be set to QMediaPlayer in order to receive audio buffers
+ decoded by the media player. The received audio data can be used for any
+ processing or visualization.
+
+ \sa QMediaPlayer, QMediaPlayer::setAudioBufferOutput, QAudioBuffer
+*/
+
+/*!
+ Constructs a new QAudioBufferOutput object with \a parent.
+
+ The audio format of output audio buffers will depend on
+ the source media file and the inner audio decoder in \l QMediaPlayer.
+*/
+QAudioBufferOutput::QAudioBufferOutput(QObject *parent)
+ : QObject(*new QAudioBufferOutputPrivate, parent)
+{
+}
+
+/*!
+ Constructs a new QAudioBufferOutput object with audio \a format and \a parent.
+
+ If the specified \a format is valid, it will be the format of output
+ audio buffers. Otherwise, the format of output audio buffers
+ will depend on the source media file and the inner audio decoder in \l QMediaPlayer.
+*/
+QAudioBufferOutput::QAudioBufferOutput(const QAudioFormat &format, QObject *parent)
+ : QObject(*new QAudioBufferOutputPrivate(format), parent)
+{
+}
+
+/*!
+ Destroys the audio buffer output object.
+*/
+QAudioBufferOutput::~QAudioBufferOutput()
+{
+ Q_D(QAudioBufferOutput);
+
+ if (d->mediaPlayer)
+ d->mediaPlayer->setAudioBufferOutput(nullptr);
+}
+
+/*!
+ Gets the audio format specified in the constructor.
+
+ If the format is valid, it specifies the format of output oudio buffers.
+*/
+QAudioFormat QAudioBufferOutput::format() const
+{
+ Q_D(const QAudioBufferOutput);
+ return d->format;
+}
+
+/*!
+ \fn void QAudioBufferOutput::audioBufferReceived(const QAudioBuffer &buffer)
+
+ Signals that a new audio \a buffer has been received from \l QMediaPlayer.
+*/
+
+QT_END_NAMESPACE
+
+#include "moc_qaudiobufferoutput.cpp"
diff --git a/src/multimedia/audio/qaudiobufferoutput.h b/src/multimedia/audio/qaudiobufferoutput.h
new file mode 100644
index 000000000..2e4fab1a4
--- /dev/null
+++ b/src/multimedia/audio/qaudiobufferoutput.h
@@ -0,0 +1,37 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QAUDIOBUFFEROUTPUT_H
+#define QAUDIOBUFFEROUTPUT_H
+
+#include <QtMultimedia/qtmultimediaexports.h>
+#include <QtMultimedia/qaudiobuffer.h>
+#include <QtCore/qobject.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAudioBufferOutputPrivate;
+
+class Q_MULTIMEDIA_EXPORT QAudioBufferOutput : public QObject
+{
+ Q_OBJECT
+public:
+ explicit QAudioBufferOutput(QObject *parent = nullptr);
+
+ explicit QAudioBufferOutput(const QAudioFormat &format, QObject *parent = nullptr);
+
+ ~QAudioBufferOutput() override;
+
+ QAudioFormat format() const;
+
+Q_SIGNALS:
+ void audioBufferReceived(const QAudioBuffer &buffer);
+
+private:
+ Q_DISABLE_COPY(QAudioBufferOutput)
+ Q_DECLARE_PRIVATE(QAudioBufferOutput)
+};
+
+QT_END_NAMESPACE
+
+#endif // QAUDIOBUFFEROUTPUT_H
diff --git a/src/multimedia/audio/qaudiobufferoutput_p.h b/src/multimedia/audio/qaudiobufferoutput_p.h
new file mode 100644
index 000000000..2f9c11bd1
--- /dev/null
+++ b/src/multimedia/audio/qaudiobufferoutput_p.h
@@ -0,0 +1,42 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QAUDIOBUFFEROUTPUT_P_H
+#define QAUDIOBUFFEROUTPUT_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/private/qobject_p.h>
+#include "qaudiobufferoutput.h"
+
+QT_BEGIN_NAMESPACE
+
+class QMediaPlayer;
+
+class QAudioBufferOutputPrivate : public QObjectPrivate
+{
+public:
+ QAudioBufferOutputPrivate(const QAudioFormat &format = {}) : format(std::move(format)) { }
+
+ static QMediaPlayer *exchangeMediaPlayer(QAudioBufferOutput &output, QMediaPlayer *player)
+ {
+ auto outputPrivate = static_cast<QAudioBufferOutputPrivate *>(output.d_func());
+ return std::exchange(outputPrivate->mediaPlayer, player);
+ }
+
+ QAudioFormat format;
+ QMediaPlayer *mediaPlayer = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QAUDIOBUFFEROUTPUT_P_H
diff --git a/src/multimedia/audio/qsamplecache_p.cpp b/src/multimedia/audio/qsamplecache_p.cpp
index 825c79685..b4be09f72 100644
--- a/src/multimedia/audio/qsamplecache_p.cpp
+++ b/src/multimedia/audio/qsamplecache_p.cpp
@@ -357,12 +357,13 @@ void QSample::load()
Q_ASSERT(QThread::currentThread()->objectName() == QLatin1String("QSampleCache::LoadingThread"));
#endif
qCDebug(qLcSampleCache) << "QSample: load [" << m_url << "]";
- m_stream = m_parent->networkAccessManager().get(QNetworkRequest(m_url));
- connect(m_stream, SIGNAL(errorOccurred(QNetworkReply::NetworkError)), SLOT(loadingError(QNetworkReply::NetworkError)));
+ QNetworkReply *reply = m_parent->networkAccessManager().get(QNetworkRequest(m_url));
+ m_stream = reply;
+ connect(reply, &QNetworkReply::errorOccurred, this, &QSample::loadingError);
m_waveDecoder = new QWaveDecoder(m_stream);
- connect(m_waveDecoder, SIGNAL(formatKnown()), SLOT(decoderReady()));
- connect(m_waveDecoder, SIGNAL(parsingError()), SLOT(decoderError()));
- connect(m_waveDecoder, SIGNAL(readyRead()), SLOT(readSample()));
+ connect(m_waveDecoder, &QWaveDecoder::formatKnown, this, &QSample::decoderReady);
+ connect(m_waveDecoder, &QWaveDecoder::parsingError, this, &QSample::decoderError);
+ connect(m_waveDecoder, &QIODevice::readyRead, this, &QSample::readSample);
m_waveDecoder->open(QIODevice::ReadOnly);
}
diff --git a/src/multimedia/audio/qsoundeffect.cpp b/src/multimedia/audio/qsoundeffect.cpp
index c12114672..a3b502662 100644
--- a/src/multimedia/audio/qsoundeffect.cpp
+++ b/src/multimedia/audio/qsoundeffect.cpp
@@ -421,8 +421,7 @@ void QSoundEffect::setSource(const QUrl &url)
disconnect(d->m_sample.get(), &QSample::error, d, &QSoundEffectPrivate::decoderError);
disconnect(d->m_sample.get(), &QSample::ready, d, &QSoundEffectPrivate::sampleReady);
}
- d->m_sample->release();
- d->m_sample = nullptr;
+ d->m_sample.reset();
}
if (d->m_audioSink) {
diff --git a/src/multimedia/audio/qwavedecoder.cpp b/src/multimedia/audio/qwavedecoder.cpp
index 36ac3c779..452363ddc 100644
--- a/src/multimedia/audio/qwavedecoder.cpp
+++ b/src/multimedia/audio/qwavedecoder.cpp
@@ -56,7 +56,7 @@ bool QWaveDecoder::open(QIODevice::OpenMode mode)
if (canOpen && enoughDataAvailable())
handleData();
else
- connect(device, SIGNAL(readyRead()), SLOT(handleData()));
+ connect(device, &QIODevice::readyRead, this, &QWaveDecoder::handleData);
return canOpen;
}
@@ -274,7 +274,7 @@ bool QWaveDecoder::writeDataLength()
void QWaveDecoder::parsingFailed()
{
Q_ASSERT(device);
- device->disconnect(SIGNAL(readyRead()), this, SLOT(handleData()));
+ disconnect(device, &QIODevice::readyRead, this, &QWaveDecoder::handleData);
emit parsingError();
}
@@ -386,7 +386,7 @@ void QWaveDecoder::handleData()
if (state == QWaveDecoder::WaitingForDataState) {
if (findChunk("data")) {
- device->disconnect(SIGNAL(readyRead()), this, SLOT(handleData()));
+ disconnect(device, &QIODevice::readyRead, this, &QWaveDecoder::handleData);
chunk descriptor;
device->read(reinterpret_cast<char *>(&descriptor), sizeof(chunk));
@@ -400,7 +400,7 @@ void QWaveDecoder::handleData()
dataSize = device->size() - headerLength();
haveFormat = true;
- connect(device, SIGNAL(readyRead()), SIGNAL(readyRead()));
+ connect(device, &QIODevice::readyRead, this, &QIODevice::readyRead);
emit formatKnown();
return;
diff --git a/src/multimedia/camera/qcamera.cpp b/src/multimedia/camera/qcamera.cpp
index 527b14c25..9cfbcc01d 100644
--- a/src/multimedia/camera/qcamera.cpp
+++ b/src/multimedia/camera/qcamera.cpp
@@ -152,14 +152,6 @@ QT_BEGIN_NAMESPACE
See the \l{Camera Overview}{camera overview} for more information.
*/
-
-void QCameraPrivate::_q_error(int error, const QString &errorString)
-{
- Q_Q(QCamera);
-
- this->error.setAndNotify(QCamera::Error(error), errorString, *q);
-}
-
void QCameraPrivate::init(const QCameraDevice &device)
{
Q_Q(QCamera);
@@ -167,16 +159,16 @@ void QCameraPrivate::init(const QCameraDevice &device)
auto maybeControl = QPlatformMediaIntegration::instance()->createCamera(q);
if (!maybeControl) {
qWarning() << "Failed to initialize QCamera" << maybeControl.error();
- error = { QCamera::CameraError, maybeControl.error() };
return;
}
control = maybeControl.value();
cameraDevice = !device.isNull() ? device : QMediaDevices::defaultVideoInput();
if (cameraDevice.isNull())
- _q_error(QCamera::CameraError, QStringLiteral("No camera detected"));
+ control->updateError(QCamera::CameraError, QStringLiteral("No camera detected"));
control->setCamera(cameraDevice);
- q->connect(control, SIGNAL(activeChanged(bool)), q, SIGNAL(activeChanged(bool)));
- q->connect(control, SIGNAL(error(int,QString)), q, SLOT(_q_error(int,QString)));
+ q->connect(control, &QPlatformVideoSource::activeChanged, q, &QCamera::activeChanged);
+ q->connect(control, &QPlatformCamera::errorChanged, q, &QCamera::errorChanged);
+ q->connect(control, &QPlatformCamera::errorOccurred, q, &QCamera::errorOccurred);
}
/*!
@@ -296,7 +288,9 @@ void QCamera::setActive(bool active)
QCamera::Error QCamera::error() const
{
- return d_func()->error.code();
+ Q_D(const QCamera);
+
+ return d->control ? d->control->error() : QCamera::CameraError;
}
/*!
@@ -312,7 +306,10 @@ QCamera::Error QCamera::error() const
*/
QString QCamera::errorString() const
{
- return d_func()->error.description();
+ Q_D(const QCamera);
+
+ return d->control ? d->control->errorString()
+ : QStringLiteral("Camera is not supported on the platform");
}
/*! \enum QCamera::Feature
diff --git a/src/multimedia/camera/qcamera.h b/src/multimedia/camera/qcamera.h
index 09d9521ff..82d89f5a9 100644
--- a/src/multimedia/camera/qcamera.h
+++ b/src/multimedia/camera/qcamera.h
@@ -261,7 +261,6 @@ private:
friend class QMediaCaptureSession;
Q_DISABLE_COPY(QCamera)
Q_DECLARE_PRIVATE(QCamera)
- Q_PRIVATE_SLOT(d_func(), void _q_error(int, const QString &))
friend class QCameraDevice;
};
diff --git a/src/multimedia/camera/qcamera_p.h b/src/multimedia/camera/qcamera_p.h
index c0477c242..ae1299435 100644
--- a/src/multimedia/camera/qcamera_p.h
+++ b/src/multimedia/camera/qcamera_p.h
@@ -16,7 +16,6 @@
//
#include "private/qobject_p.h"
-#include "private/qerrorinfo_p.h"
#include "qcamera.h"
#include "qcameradevice.h"
@@ -34,13 +33,8 @@ public:
QMediaCaptureSession *captureSession = nullptr;
QPlatformCamera *control = nullptr;
- QErrorInfo<QCamera::Error> error;
-
QCameraDevice cameraDevice;
QCameraFormat cameraFormat;
-
- void _q_error(int error, const QString &errorString);
- void unsetError() { error = {}; }
};
QT_END_NAMESPACE
diff --git a/src/multimedia/camera/qcameradevice.cpp b/src/multimedia/camera/qcameradevice.cpp
index 50727d49c..63e7fb4c0 100644
--- a/src/multimedia/camera/qcameradevice.cpp
+++ b/src/multimedia/camera/qcameradevice.cpp
@@ -455,10 +455,12 @@ QCameraDevice& QCameraDevice::operator=(const QCameraDevice& other) = default;
#ifndef QT_NO_DEBUG_STREAM
QDebug operator<<(QDebug d, const QCameraDevice &camera)
{
- d.maybeSpace() << QStringLiteral("QCameraDevice(name=%1, position=%2, orientation=%3)")
- .arg(camera.description())
- .arg(QString::fromLatin1(QCamera::staticMetaObject.enumerator(QCamera::staticMetaObject.indexOfEnumerator("Position"))
- .valueToKey(camera.position())));
+ d.maybeSpace() << QStringLiteral("QCameraDevice(name=%1, id=%2, position=%3)")
+ .arg(camera.description())
+ .arg(QLatin1StringView(camera.id()))
+ .arg(QLatin1StringView(
+ QMetaEnum::fromType<QCameraDevice::Position>().valueToKey(
+ camera.position())));
return d.space();
}
#endif
diff --git a/src/multimedia/camera/qimagecapture.cpp b/src/multimedia/camera/qimagecapture.cpp
index 9b92ce743..ecf39935c 100644
--- a/src/multimedia/camera/qimagecapture.cpp
+++ b/src/multimedia/camera/qimagecapture.cpp
@@ -92,18 +92,15 @@ QImageCapture::QImageCapture(QObject *parent)
}
d->control = maybeControl.value();
- connect(d->control, SIGNAL(imageExposed(int)),
- this, SIGNAL(imageExposed(int)));
- connect(d->control, SIGNAL(imageCaptured(int,QImage)),
- this, SIGNAL(imageCaptured(int,QImage)));
- connect(d->control, SIGNAL(imageMetadataAvailable(int,QMediaMetaData)),
- this, SIGNAL(imageMetadataAvailable(int,QMediaMetaData)));
- connect(d->control, SIGNAL(imageAvailable(int,QVideoFrame)),
- this, SIGNAL(imageAvailable(int,QVideoFrame)));
- connect(d->control, SIGNAL(imageSaved(int,QString)),
- this, SIGNAL(imageSaved(int,QString)));
- connect(d->control, SIGNAL(readyForCaptureChanged(bool)),
- this, SIGNAL(readyForCaptureChanged(bool)));
+ connect(d->control, &QPlatformImageCapture::imageExposed, this, &QImageCapture::imageExposed);
+ connect(d->control, &QPlatformImageCapture::imageCaptured, this, &QImageCapture::imageCaptured);
+ connect(d->control, &QPlatformImageCapture::imageMetadataAvailable, this,
+ &QImageCapture::imageMetadataAvailable);
+ connect(d->control, &QPlatformImageCapture::imageAvailable, this,
+ &QImageCapture::imageAvailable);
+ connect(d->control, &QPlatformImageCapture::imageSaved, this, &QImageCapture::imageSaved);
+ connect(d->control, &QPlatformImageCapture::readyForCaptureChanged, this,
+ &QImageCapture::readyForCaptureChanged);
connect(d->control, SIGNAL(error(int,int,QString)),
this, SLOT(_q_error(int,int,QString)));
}
@@ -214,8 +211,8 @@ void QImageCapture::addMetaData(const QMediaMetaData &metaData)
{
Q_D(QImageCapture);
auto data = d->metaData;
- for (auto k : metaData.keys())
- data.insert(k, metaData.value(k));
+ for (auto &&[key, value] : metaData.asKeyValueRange())
+ data.insert(key, value);
setMetaData(data);
}
diff --git a/src/multimedia/doc/src/qtmultimedia-index.qdoc b/src/multimedia/doc/src/qtmultimedia-index.qdoc
index 67b6688be..74646b84c 100644
--- a/src/multimedia/doc/src/qtmultimedia-index.qdoc
+++ b/src/multimedia/doc/src/qtmultimedia-index.qdoc
@@ -193,18 +193,18 @@
The version shipped with Qt binary packages is \b{FFmpeg 6.1.1} and is tested
by the maintainers.
- \note On the Windows platform, Qt's FFmpeg media backend uses
- dynamic linking to the FFmpeg libraries. Windows applications must
- therefore bundle FFmpeg binaries in their installer, and make them
- visible to the application according to Windows dll loading rules.
- We recommend to store the FFmpeg dlls in the same directory as the
- application's executable file, because this guarantees that the
- correct build of FFmpeg is being used if multiple versions are
- available on the system. All necessary FFmpeg dlls are shipped with
- the Qt Online Installer and are automatically deployed if the
- windeployqt tool is used to create the deployment. Applications can
- also deploy their own build of FFmpeg, as long as the FFmpeg major
- version matches the version used by Qt.
+ \note On the Windows and macOS platforms, Qt's FFmpeg media backend
+ uses dynamic linking to the FFmpeg libraries. Windows and macOS
+ applications must therefore bundle FFmpeg binaries in their
+ installer, and make them visible to the application at runtime. On
+ Windows, we recommend to store the FFmpeg dlls in the same directory
+ as the application's executable file, because this guarantees that
+ the correct build of FFmpeg is being used if multiple versions are
+ available on the system. All necessary FFmpeg libraries are shipped
+ with the Qt Online Installer and are automatically deployed if the
+ windeployqt or macdeployqt tools are used to create the deployment.
+ Applications can also deploy their own build of FFmpeg, as long as
+ the FFmpeg major version matches the version used by Qt.
\note See \l{Licenses and Attributions} regarding what components are removed
in the package shipped by Qt.
@@ -220,6 +220,8 @@
\note These are still available but with \b limited support. The gstreamer
backend is only available on Linux.
+ \note MediaCodec on Android is deprecated as of Qt 6.8 and will be removed
+ in Qt 7.0.
\section2 Backend support
Maintainers will strive to fix critical issues with the native backends but
diff --git a/src/multimedia/platform/qgstreamer_platformspecificinterface.cpp b/src/multimedia/platform/qgstreamer_platformspecificinterface.cpp
new file mode 100644
index 000000000..06ce46e3c
--- /dev/null
+++ b/src/multimedia/platform/qgstreamer_platformspecificinterface.cpp
@@ -0,0 +1,27 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/private/qgstreamer_platformspecificinterface_p.h>
+
+QT_BEGIN_NAMESPACE
+
+QGStreamerPlatformSpecificInterface::~QGStreamerPlatformSpecificInterface() = default;
+
+QGStreamerPlatformSpecificInterface *QGStreamerPlatformSpecificInterface::instance()
+{
+ return dynamic_cast<QGStreamerPlatformSpecificInterface *>(
+ QPlatformMediaIntegration::instance()->platformSpecificInterface());
+}
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/platform/qgstreamer_platformspecificinterface_p.h b/src/multimedia/platform/qgstreamer_platformspecificinterface_p.h
new file mode 100644
index 000000000..1a086f5a4
--- /dev/null
+++ b/src/multimedia/platform/qgstreamer_platformspecificinterface_p.h
@@ -0,0 +1,46 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef GSTREAMER_PLATFORMSPECIFICINTERFACE_P_H
+#define GSTREAMER_PLATFORMSPECIFICINTERFACE_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/private/qplatformmediaintegration_p.h>
+
+typedef struct _GstPipeline GstPipeline; // NOLINT (bugprone-reserved-identifier)
+typedef struct _GstElement GstElement; // NOLINT (bugprone-reserved-identifier)
+
+QT_BEGIN_NAMESPACE
+
+class Q_MULTIMEDIA_EXPORT QGStreamerPlatformSpecificInterface
+ : public QAbstractPlatformSpecificInterface
+{
+public:
+ ~QGStreamerPlatformSpecificInterface() override;
+
+ static QGStreamerPlatformSpecificInterface *instance();
+
+ virtual QAudioDevice makeCustomGStreamerAudioInput(const QByteArray &gstreamerPipeline) = 0;
+ virtual QAudioDevice makeCustomGStreamerAudioOutput(const QByteArray &gstreamerPipeline) = 0;
+ virtual QCamera *makeCustomGStreamerCamera(const QByteArray &gstreamerPipeline,
+ QObject *parent) = 0;
+
+ // Note: ownership of GstElement is not transferred
+ virtual QCamera *makeCustomGStreamerCamera(GstElement *, QObject *parent) = 0;
+
+ virtual GstPipeline *gstPipeline(QMediaPlayer *) = 0;
+ virtual GstPipeline *gstPipeline(QMediaCaptureSession *) = 0;
+};
+
+QT_END_NAMESPACE
+
+#endif // GSTREAMER_PLATFORMSPECIFICINTERFACE_P_H
diff --git a/src/multimedia/platform/qplatformaudiobufferinput.cpp b/src/multimedia/platform/qplatformaudiobufferinput.cpp
new file mode 100644
index 000000000..883b11fc0
--- /dev/null
+++ b/src/multimedia/platform/qplatformaudiobufferinput.cpp
@@ -0,0 +1,10 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qplatformaudiobufferinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QT_END_NAMESPACE
+
+#include "moc_qplatformaudiobufferinput_p.cpp"
diff --git a/src/multimedia/platform/qplatformaudiobufferinput_p.h b/src/multimedia/platform/qplatformaudiobufferinput_p.h
new file mode 100644
index 000000000..a05a98100
--- /dev/null
+++ b/src/multimedia/platform/qplatformaudiobufferinput_p.h
@@ -0,0 +1,56 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QPLATFORMAUDIOBUFFERINPUT_P_H
+#define QPLATFORMAUDIOBUFFERINPUT_P_H
+
+#include "qaudioformat.h"
+#include "qaudiobuffer.h"
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+QT_BEGIN_NAMESPACE
+
+class QMediaInputEncoderInterface;
+
+class Q_MULTIMEDIA_EXPORT QPlatformAudioBufferInputBase : public QObject
+{
+ Q_OBJECT
+Q_SIGNALS:
+ void newAudioBuffer(const QAudioBuffer &buffer);
+};
+
+class Q_MULTIMEDIA_EXPORT QPlatformAudioBufferInput : public QPlatformAudioBufferInputBase
+{
+ Q_OBJECT
+public:
+ QPlatformAudioBufferInput(QAudioFormat format = {}) : m_format(std::move(format)) { }
+
+ const QAudioFormat &audioFormat() const { return m_format; }
+
+ QMediaInputEncoderInterface *encoderInterface() const { return m_encoderInterface; }
+ void setEncoderInterface(QMediaInputEncoderInterface *interface)
+ {
+ m_encoderInterface = interface;
+ }
+
+Q_SIGNALS:
+ void encoderUpdated();
+
+private:
+ QMediaInputEncoderInterface *m_encoderInterface = nullptr;
+ QAudioFormat m_format;
+};
+
+QT_END_NAMESPACE
+
+#endif // QPLATFORMAUDIOBUFFERINPUT_P_H
diff --git a/src/multimedia/platform/qplatformcamera.cpp b/src/multimedia/platform/qplatformcamera.cpp
index 0d3975550..d03c19d67 100644
--- a/src/multimedia/platform/qplatformcamera.cpp
+++ b/src/multimedia/platform/qplatformcamera.cpp
@@ -50,7 +50,7 @@ QVideoFrameFormat QPlatformCamera::frameFormat() const
m_framePixelFormat == QVideoFrameFormat::Format_Invalid
? m_cameraFormat.pixelFormat()
: m_framePixelFormat);
- result.setFrameRate(m_cameraFormat.maxFrameRate());
+ result.setStreamFrameRate(m_cameraFormat.maxFrameRate());
return result;
}
@@ -221,6 +221,13 @@ int QPlatformCamera::colorTemperatureForWhiteBalance(QCamera::WhiteBalanceMode m
return 0;
}
+void QPlatformCamera::updateError(QCamera::Error error, const QString &errorString)
+{
+ QMetaObject::invokeMethod(this, [this, error, errorString]() {
+ m_error.setAndNotify(error, errorString, *this);
+ });
+}
+
QT_END_NAMESPACE
#include "moc_qplatformcamera_p.cpp"
diff --git a/src/multimedia/platform/qplatformcamera_p.h b/src/multimedia/platform/qplatformcamera_p.h
index 85624c0ce..341bf9121 100644
--- a/src/multimedia/platform/qplatformcamera_p.h
+++ b/src/multimedia/platform/qplatformcamera_p.h
@@ -16,7 +16,7 @@
//
#include "qplatformvideosource_p.h"
-
+#include "private/qerrorinfo_p.h"
#include <QtMultimedia/qcamera.h>
QT_BEGIN_NAMESPACE
@@ -110,8 +110,13 @@ public:
static int colorTemperatureForWhiteBalance(QCamera::WhiteBalanceMode mode);
+ QCamera::Error error() const { return m_error.code(); }
+ QString errorString() const final { return m_error.description(); }
+
+ void updateError(QCamera::Error error, const QString &errorString);
+
Q_SIGNALS:
- void error(int error, const QString &errorString);
+ void errorOccurred(QCamera::Error error, const QString &errorString);
protected:
explicit QPlatformCamera(QCamera *parent);
@@ -150,6 +155,7 @@ private:
float m_maxExposureTime = -1.;
QCamera::WhiteBalanceMode m_whiteBalance = QCamera::WhiteBalanceAuto;
int m_colorTemperature = 0;
+ QErrorInfo<QCamera::Error> m_error;
};
QT_END_NAMESPACE
diff --git a/src/multimedia/platform/qplatformmediacapture.cpp b/src/multimedia/platform/qplatformmediacapture.cpp
index c8aded824..13bcbd63b 100644
--- a/src/multimedia/platform/qplatformmediacapture.cpp
+++ b/src/multimedia/platform/qplatformmediacapture.cpp
@@ -8,6 +8,7 @@
#include <QtMultimedia/private/qplatformmediacapture_p.h>
#include <QtMultimedia/private/qmediacapturesession_p.h>
#include <QtMultimedia/private/qplatformsurfacecapture_p.h>
+#include <QtMultimedia/private/qplatformvideoframeinput_p.h>
#include <QtMultimedia/private/qtmultimediaglobal_p.h>
QT_BEGIN_NAMESPACE
@@ -23,6 +24,7 @@ std::vector<QPlatformVideoSource *> QPlatformMediaCaptureSession::activeVideoSou
result.push_back(source);
};
+ checkSource(videoFrameInput());
checkSource(camera());
checkSource(screenCapture());
checkSource(windowCapture());
@@ -30,15 +32,6 @@ std::vector<QPlatformVideoSource *> QPlatformMediaCaptureSession::activeVideoSou
return result;
}
-void *QPlatformMediaCaptureSession::nativePipeline(QMediaCaptureSession *session)
-{
- auto sessionPrivate = session->d_func();
- if (!sessionPrivate || !sessionPrivate->captureSession)
- return nullptr;
-
- return sessionPrivate->captureSession->nativePipeline();
-}
-
QT_END_NAMESPACE
#include "moc_qplatformmediacapture_p.cpp"
diff --git a/src/multimedia/platform/qplatformmediacapture_p.h b/src/multimedia/platform/qplatformmediacapture_p.h
index 981cf199b..8d6afc90e 100644
--- a/src/multimedia/platform/qplatformmediacapture_p.h
+++ b/src/multimedia/platform/qplatformmediacapture_p.h
@@ -29,6 +29,8 @@ class QPlatformAudioOutput;
class QMediaCaptureSession;
class QPlatformSurfaceCapture;
class QPlatformVideoSource;
+class QPlatformAudioBufferInput;
+class QPlatformVideoFrameInput;
class Q_MULTIMEDIA_EXPORT QPlatformMediaCaptureSession : public QObject
{
@@ -49,6 +51,9 @@ public:
virtual QPlatformSurfaceCapture *windowCapture() { return nullptr; }
virtual void setWindowCapture(QPlatformSurfaceCapture *) { }
+ virtual QPlatformVideoFrameInput *videoFrameInput() { return nullptr; }
+ virtual void setVideoFrameInput(QPlatformVideoFrameInput *) { }
+
virtual QPlatformImageCapture *imageCapture() = 0;
virtual void setImageCapture(QPlatformImageCapture *) {}
@@ -57,6 +62,8 @@ public:
virtual void setAudioInput(QPlatformAudioInput *input) = 0;
+ virtual void setAudioBufferInput(QPlatformAudioBufferInput *) { }
+
virtual void setVideoPreview(QVideoSink * /*sink*/) {}
virtual void setAudioOutput(QPlatformAudioOutput *) {}
@@ -64,15 +71,11 @@ public:
// TBD: implement ordering of the sources basing on the order of adding
std::vector<QPlatformVideoSource *> activeVideoSources();
- virtual void *nativePipeline() { return nullptr; }
-
- // private API, the purpose is getting GstPipeline
- static void *nativePipeline(QMediaCaptureSession *);
-
Q_SIGNALS:
void cameraChanged();
void screenCaptureChanged();
void windowCaptureChanged();
+ void videoFrameInputChanged();
void imageCaptureChanged();
void encoderChanged();
diff --git a/src/multimedia/platform/qplatformmediaintegration.cpp b/src/multimedia/platform/qplatformmediaintegration.cpp
index dda00de61..4bacc488f 100644
--- a/src/multimedia/platform/qplatformmediaintegration.cpp
+++ b/src/multimedia/platform/qplatformmediaintegration.cpp
@@ -207,6 +207,12 @@ QLatin1String QPlatformMediaIntegration::name()
return m_backendName;
}
+QVideoFrame QPlatformMediaIntegration::convertVideoFrame(QVideoFrame &,
+ const QVideoFrameFormat &)
+{
+ return {};
+}
+
QPlatformMediaIntegration::QPlatformMediaIntegration(QLatin1String name) : m_backendName(name) { }
QPlatformMediaIntegration::~QPlatformMediaIntegration() = default;
diff --git a/src/multimedia/platform/qplatformmediaintegration_p.h b/src/multimedia/platform/qplatformmediaintegration_p.h
index 19fa40baf..d03d0c794 100644
--- a/src/multimedia/platform/qplatformmediaintegration_p.h
+++ b/src/multimedia/platform/qplatformmediaintegration_p.h
@@ -53,6 +53,13 @@ class QPlatformAudioOutput;
class QPlatformVideoDevices;
class QCapturableWindow;
class QPlatformCapturableWindows;
+class QVideoFrame;
+
+class Q_MULTIMEDIA_EXPORT QAbstractPlatformSpecificInterface
+{
+public:
+ virtual ~QAbstractPlatformSpecificInterface() = default;
+};
class Q_MULTIMEDIA_EXPORT QPlatformMediaIntegration : public QObject
{
@@ -96,6 +103,11 @@ public:
static QStringList availableBackends();
QLatin1String name(); // for unit tests
+ // Convert a QVideoFrame to the destination format
+ virtual QVideoFrame convertVideoFrame(QVideoFrame &, const QVideoFrameFormat &);
+
+ virtual QAbstractPlatformSpecificInterface *platformSpecificInterface() { return nullptr; }
+
protected:
virtual QPlatformMediaFormatInfo *createFormatInfo();
diff --git a/src/multimedia/platform/qplatformmediaplayer.cpp b/src/multimedia/platform/qplatformmediaplayer.cpp
index ea22f94df..00840f074 100644
--- a/src/multimedia/platform/qplatformmediaplayer.cpp
+++ b/src/multimedia/platform/qplatformmediaplayer.cpp
@@ -14,9 +14,7 @@ QPlatformMediaPlayer::QPlatformMediaPlayer(QMediaPlayer *parent) : player(parent
QPlatformMediaIntegration::instance()->mediaDevices()->prepareAudio();
}
-QPlatformMediaPlayer::~QPlatformMediaPlayer()
-{
-}
+QPlatformMediaPlayer::~QPlatformMediaPlayer() = default;
void QPlatformMediaPlayer::stateChanged(QMediaPlayer::PlaybackState newState)
{
@@ -39,16 +37,4 @@ void QPlatformMediaPlayer::error(int error, const QString &errorString)
player->d_func()->setError(QMediaPlayer::Error(error), errorString);
}
-void *QPlatformMediaPlayer::nativePipeline(QMediaPlayer *player)
-{
- if (!player)
- return nullptr;
-
- auto playerPrivate = player->d_func();
- if (!playerPrivate || !playerPrivate->control)
- return nullptr;
-
- return playerPrivate->control->nativePipeline();
-}
-
QT_END_NAMESPACE
diff --git a/src/multimedia/platform/qplatformmediaplayer_p.h b/src/multimedia/platform/qplatformmediaplayer_p.h
index 6e3590763..f8815958b 100644
--- a/src/multimedia/platform/qplatformmediaplayer_p.h
+++ b/src/multimedia/platform/qplatformmediaplayer_p.h
@@ -22,6 +22,7 @@
#include <QtCore/qpair.h>
#include <QtCore/private/qglobal_p.h>
+#include <QtCore/qobject.h>
QT_BEGIN_NAMESPACE
@@ -64,19 +65,23 @@ public:
virtual void setAudioOutput(QPlatformAudioOutput *) {}
+ virtual void setAudioBufferOutput(QAudioBufferOutput *) { }
+
virtual QMediaMetaData metaData() const { return {}; }
virtual void setVideoSink(QVideoSink * /*sink*/) = 0;
// media streams
- enum TrackType { VideoStream, AudioStream, SubtitleStream, NTrackTypes };
+ enum TrackType : uint8_t { VideoStream, AudioStream, SubtitleStream, NTrackTypes };
virtual int trackCount(TrackType) { return 0; };
virtual QMediaMetaData trackMetaData(TrackType /*type*/, int /*streamNumber*/) { return QMediaMetaData(); }
virtual int activeTrack(TrackType) { return -1; }
virtual void setActiveTrack(TrackType, int /*streamNumber*/) {}
+ void durationChanged(std::chrono::milliseconds ms) { durationChanged(ms.count()); }
void durationChanged(qint64 duration) { emit player->durationChanged(duration); }
+ void positionChanged(std::chrono::milliseconds ms) { positionChanged(ms.count()); }
void positionChanged(qint64 position) {
if (m_position == position)
return;
@@ -115,7 +120,7 @@ public:
bool doLoop() {
return isSeekable() && (m_loops < 0 || ++m_currentLoop < m_loops);
}
- int loops() { return m_loops; }
+ int loops() const { return m_loops; }
virtual void setLoops(int loops)
{
if (m_loops == loops)
@@ -124,11 +129,6 @@ public:
Q_EMIT player->loopsChanged();
}
- virtual void *nativePipeline() { return nullptr; }
-
- // private API, the purpose is getting GstPipeline
- static void *nativePipeline(QMediaPlayer *player);
-
protected:
explicit QPlatformMediaPlayer(QMediaPlayer *parent = nullptr);
@@ -144,6 +144,25 @@ private:
qint64 m_position = 0;
};
+#ifndef QT_NO_DEBUG_STREAM
+inline QDebug operator<<(QDebug dbg, QPlatformMediaPlayer::TrackType type)
+{
+ QDebugStateSaver save(dbg);
+ dbg.nospace();
+
+ switch (type) {
+ case QPlatformMediaPlayer::TrackType::AudioStream:
+ return dbg << "AudioStream";
+ case QPlatformMediaPlayer::TrackType::VideoStream:
+ return dbg << "VideoStream";
+ case QPlatformMediaPlayer::TrackType::SubtitleStream:
+ return dbg << "SubtitleStream";
+ default:
+ Q_UNREACHABLE_RETURN(dbg);
+ }
+}
+#endif
+
QT_END_NAMESPACE
diff --git a/src/multimedia/platform/qplatformmediarecorder.cpp b/src/multimedia/platform/qplatformmediarecorder.cpp
index ba9ea0165..30dba0a45 100644
--- a/src/multimedia/platform/qplatformmediarecorder.cpp
+++ b/src/multimedia/platform/qplatformmediarecorder.cpp
@@ -15,12 +15,12 @@ QPlatformMediaRecorder::QPlatformMediaRecorder(QMediaRecorder *parent)
void QPlatformMediaRecorder::pause()
{
- error(QMediaRecorder::FormatError, QMediaRecorder::tr("Pause not supported"));
+ updateError(QMediaRecorder::FormatError, QMediaRecorder::tr("Pause not supported"));
}
void QPlatformMediaRecorder::resume()
{
- error(QMediaRecorder::FormatError, QMediaRecorder::tr("Resume not supported"));
+ updateError(QMediaRecorder::FormatError, QMediaRecorder::tr("Resume not supported"));
}
void QPlatformMediaRecorder::stateChanged(QMediaRecorder::RecorderState state)
@@ -47,7 +47,7 @@ void QPlatformMediaRecorder::actualLocationChanged(const QUrl &location)
emit q->actualLocationChanged(location);
}
-void QPlatformMediaRecorder::error(QMediaRecorder::Error error, const QString &errorString)
+void QPlatformMediaRecorder::updateError(QMediaRecorder::Error error, const QString &errorString)
{
m_error.setAndNotify(error, errorString, *q);
}
diff --git a/src/multimedia/platform/qplatformmediarecorder_p.h b/src/multimedia/platform/qplatformmediarecorder_p.h
index 6e88dc187..ab6af759d 100644
--- a/src/multimedia/platform/qplatformmediarecorder_p.h
+++ b/src/multimedia/platform/qplatformmediarecorder_p.h
@@ -125,18 +125,20 @@ public:
virtual void setOutputLocation(const QUrl &location) { m_outputLocation = location; }
QUrl actualLocation() const { return m_actualLocation; }
void clearActualLocation() { m_actualLocation.clear(); }
- void clearError() { error(QMediaRecorder::NoError, QString()); }
+ void clearError() { updateError(QMediaRecorder::NoError, QString()); }
QIODevice *outputDevice() const { return m_outputDevice; }
void setOutputDevice(QIODevice *device) { m_outputDevice = device; }
+ virtual void updateAutoStop() { }
+
protected:
explicit QPlatformMediaRecorder(QMediaRecorder *parent);
void stateChanged(QMediaRecorder::RecorderState state);
void durationChanged(qint64 position);
void actualLocationChanged(const QUrl &location);
- void error(QMediaRecorder::Error error, const QString &errorString);
+ void updateError(QMediaRecorder::Error error, const QString &errorString);
void metaDataChanged();
QMediaRecorder *mediaRecorder() { return q; }
diff --git a/src/multimedia/platform/qplatformsurfacecapture_p.h b/src/multimedia/platform/qplatformsurfacecapture_p.h
index 42fbda474..e4c59c6f4 100644
--- a/src/multimedia/platform/qplatformsurfacecapture_p.h
+++ b/src/multimedia/platform/qplatformsurfacecapture_p.h
@@ -61,7 +61,7 @@ public:
Source source() const { return m_source; }
Error error() const;
- QString errorString() const;
+ QString errorString() const final;
protected:
virtual bool setActiveInternal(bool) = 0;
@@ -74,7 +74,6 @@ public Q_SLOTS:
Q_SIGNALS:
void sourceChanged(WindowSource);
void sourceChanged(ScreenSource);
- void errorChanged();
void errorOccurred(Error error, QString errorString);
private:
diff --git a/src/multimedia/platform/qplatformvideoframeinput.cpp b/src/multimedia/platform/qplatformvideoframeinput.cpp
new file mode 100644
index 000000000..d90306345
--- /dev/null
+++ b/src/multimedia/platform/qplatformvideoframeinput.cpp
@@ -0,0 +1,10 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qplatformvideoframeinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QT_END_NAMESPACE
+
+#include "moc_qplatformvideoframeinput_p.cpp"
diff --git a/src/multimedia/platform/qplatformvideoframeinput_p.h b/src/multimedia/platform/qplatformvideoframeinput_p.h
new file mode 100644
index 000000000..45714492c
--- /dev/null
+++ b/src/multimedia/platform/qplatformvideoframeinput_p.h
@@ -0,0 +1,55 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QPLATFORMVIDEOFRAMEINPUT_P_H
+#define QPLATFORMVIDEOFRAMEINPUT_P_H
+
+#include "qplatformvideosource_p.h"
+#include "qmetaobject.h"
+#include "qpointer.h"
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+QT_BEGIN_NAMESPACE
+
+class QMediaInputEncoderInterface;
+
+class Q_MULTIMEDIA_EXPORT QPlatformVideoFrameInput : public QPlatformVideoSource
+{
+ Q_OBJECT
+public:
+ QPlatformVideoFrameInput(QVideoFrameFormat format = {}) : m_format(std::move(format)) { }
+
+ void setActive(bool) final { }
+ bool isActive() const final { return true; }
+
+ QVideoFrameFormat frameFormat() const final { return m_format; }
+
+ QString errorString() const final { return {}; }
+
+ QMediaInputEncoderInterface *encoderInterface() const { return m_encoderInterface; }
+ void setEncoderInterface(QMediaInputEncoderInterface *interface)
+ {
+ m_encoderInterface = interface;
+ }
+
+Q_SIGNALS:
+ void encoderUpdated();
+
+private:
+ QMediaInputEncoderInterface *m_encoderInterface = nullptr;
+ QVideoFrameFormat m_format;
+};
+
+QT_END_NAMESPACE
+
+#endif // QPLATFORMVIDEOFRAMEINPUT_P_H
diff --git a/src/multimedia/platform/qplatformvideosource_p.h b/src/multimedia/platform/qplatformvideosource_p.h
index 3ed76d3e2..b11524226 100644
--- a/src/multimedia/platform/qplatformvideosource_p.h
+++ b/src/multimedia/platform/qplatformvideosource_p.h
@@ -43,9 +43,14 @@ public:
virtual void setCaptureSession(QPlatformMediaCaptureSession *) { }
+ virtual QString errorString() const = 0;
+
+ bool hasError() const { return !errorString().isEmpty(); }
+
Q_SIGNALS:
void newVideoFrame(const QVideoFrame &);
void activeChanged(bool);
+ void errorChanged();
};
QT_END_NAMESPACE
diff --git a/src/multimedia/playback/qmediaplayer.cpp b/src/multimedia/playback/qmediaplayer.cpp
index dc8e3dab8..644c2d094 100644
--- a/src/multimedia/playback/qmediaplayer.cpp
+++ b/src/multimedia/playback/qmediaplayer.cpp
@@ -5,6 +5,7 @@
#include <private/qmultimediautils_p.h>
#include <private/qplatformmediaintegration_p.h>
+#include <private/qaudiobufferoutput_p.h>
#include <qvideosink.h>
#include <qaudiooutput.h>
@@ -596,6 +597,12 @@ void QMediaPlayer::setPlaybackRate(qreal rate)
It does not wait for the media to finish loading and does not check for errors. Listen for
the mediaStatusChanged() and error() signals to be notified when the media is loaded and
when an error occurs during loading.
+
+ \note FFmpeg, used by the FFmpeg media backend, restricts use of nested protocols for
+ security reasons. In controlled environments where all inputs are trusted, the list of
+ approved protocols can be overridden using the QT_FFMPEG_PROTOCOL_WHITELIST environment
+ variable. This environment variable is Qt's private API and can change between patch
+ releases without notice.
*/
void QMediaPlayer::setSource(const QUrl &source)
@@ -643,6 +650,51 @@ void QMediaPlayer::setSourceDevice(QIODevice *device, const QUrl &sourceUrl)
}
/*!
+ Sets an audio buffer \a output to the media player.
+
+ If \l QAudioBufferOutput is specified and the media source
+ contains an audio stream, the media player, it will emit
+ the signal \l{QAudioBufferOutput::audioBufferReceived} with
+ audio buffers containing decoded audio data. At the end of
+ the audio stream, \c QMediaPlayer emits an empty \l QAudioBuffer.
+
+ \c QMediaPlayer emits outputs frames at the same time as it
+ pushes the matching data to the audio output if it's specified.
+ However, the sound can be played with a small delay due to
+ audio bufferization.
+*/
+void QMediaPlayer::setAudioBufferOutput(QAudioBufferOutput *output)
+{
+ Q_D(QMediaPlayer);
+
+ QAudioBufferOutput *oldOutput = d->audioBufferOutput;
+ if (oldOutput == output)
+ return;
+
+ d->audioBufferOutput = output;
+
+ if (output) {
+ auto oldPlayer = QAudioBufferOutputPrivate::exchangeMediaPlayer(*oldOutput, this);
+ if (oldPlayer)
+ oldPlayer->setAudioBufferOutput(nullptr);
+ }
+
+ if (d->control)
+ d->control->setAudioBufferOutput(output);
+
+ emit audioBufferOutputChanged();
+}
+
+/*!
+ Get \l QAudioBufferOutput that has been set to the media player.
+*/
+QAudioBufferOutput *QMediaPlayer::audioBufferOutput() const
+{
+ Q_D(const QMediaPlayer);
+ return d->audioBufferOutput;
+}
+
+/*!
\qmlproperty AudioOutput QtMultimedia::MediaPlayer::audioOutput
This property holds the target audio output.
diff --git a/src/multimedia/playback/qmediaplayer.h b/src/multimedia/playback/qmediaplayer.h
index 015a30f05..e0d1fec75 100644
--- a/src/multimedia/playback/qmediaplayer.h
+++ b/src/multimedia/playback/qmediaplayer.h
@@ -17,6 +17,7 @@ class QAudioOutput;
class QAudioDevice;
class QMediaMetaData;
class QMediaTimeRange;
+class QAudioBufferOutput;
class QMediaPlayerPrivate;
class Q_MULTIMEDIA_EXPORT QMediaPlayer : public QObject
@@ -106,6 +107,9 @@ public:
void setActiveVideoTrack(int index);
void setActiveSubtitleTrack(int index);
+ void setAudioBufferOutput(QAudioBufferOutput *output);
+ QAudioBufferOutput *audioBufferOutput() const;
+
void setAudioOutput(QAudioOutput *output);
QAudioOutput *audioOutput() const;
@@ -177,6 +181,7 @@ Q_SIGNALS:
void metaDataChanged();
void videoOutputChanged();
void audioOutputChanged();
+ void audioBufferOutputChanged();
void tracksChanged();
void activeTracksChanged();
diff --git a/src/multimedia/playback/qmediaplayer_p.h b/src/multimedia/playback/qmediaplayer_p.h
index ece086d06..3d32d4e68 100644
--- a/src/multimedia/playback/qmediaplayer_p.h
+++ b/src/multimedia/playback/qmediaplayer_p.h
@@ -19,6 +19,7 @@
#include "qmediametadata.h"
#include "qvideosink.h"
#include "qaudiooutput.h"
+#include "qaudiobufferoutput.h"
#include <private/qplatformmediaplayer_p.h>
#include <private/qerrorinfo_p.h>
@@ -40,9 +41,15 @@ class QMediaPlayerPrivate : public QObjectPrivate
Q_DECLARE_PUBLIC(QMediaPlayer)
public:
+ static QMediaPlayerPrivate *get(QMediaPlayer *session)
+ {
+ return reinterpret_cast<QMediaPlayerPrivate *>(QObjectPrivate::get(session));
+ }
+
QMediaPlayerPrivate() = default;
QPlatformMediaPlayer *control = nullptr;
+ QPointer<QAudioBufferOutput> audioBufferOutput;
QPointer<QAudioOutput> audioOutput;
QPointer<QVideoSink> videoSink;
QPointer<QObject> videoOutput;
diff --git a/src/multimedia/pulseaudio/qaudioengine_pulse.cpp b/src/multimedia/pulseaudio/qaudioengine_pulse.cpp
index e54356404..5fac7234a 100644
--- a/src/multimedia/pulseaudio/qaudioengine_pulse.cpp
+++ b/src/multimedia/pulseaudio/qaudioengine_pulse.cpp
@@ -477,7 +477,7 @@ void QPulseAudioEngine::onContextFailed()
release();
// Try to reconnect later
- QTimer::singleShot(3000, this, SLOT(prepare()));
+ QTimer::singleShot(3000, this, &QPulseAudioEngine::prepare);
}
QPulseAudioEngine *QPulseAudioEngine::instance()
diff --git a/src/multimedia/qmediaframeinput.cpp b/src/multimedia/qmediaframeinput.cpp
new file mode 100644
index 000000000..4bb90d3ee
--- /dev/null
+++ b/src/multimedia/qmediaframeinput.cpp
@@ -0,0 +1,43 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qmediaframeinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+void QMediaFrameInputPrivate::setCaptureSession(QMediaCaptureSession *session)
+{
+ if (session == m_captureSession)
+ return;
+
+ auto prevSession = std::exchange(m_captureSession, session);
+ updateCaptureSessionConnections(prevSession, session);
+ updateCanSendMediaFrame();
+}
+
+void QMediaFrameInputPrivate::updateCanSendMediaFrame()
+{
+ const bool canSendMediaFrame = m_captureSession && checkIfCanSendMediaFrame();
+ if (m_canSendMediaFrame != canSendMediaFrame) {
+ m_canSendMediaFrame = canSendMediaFrame;
+ if (m_canSendMediaFrame)
+ emitReadyToSendMediaFrame();
+ }
+}
+
+void QMediaFrameInputPrivate::postponeCheckReadyToSend()
+{
+ if (m_canSendMediaFrame && !m_postponeReadyToSendCheckRun) {
+ m_postponeReadyToSendCheckRun = true;
+ QMetaObject::invokeMethod(
+ q_ptr,
+ [this]() {
+ m_postponeReadyToSendCheckRun = false;
+ if (m_canSendMediaFrame)
+ emitReadyToSendMediaFrame();
+ },
+ Qt::QueuedConnection);
+ }
+}
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/qmediaframeinput_p.h b/src/multimedia/qmediaframeinput_p.h
new file mode 100644
index 000000000..22277865d
--- /dev/null
+++ b/src/multimedia/qmediaframeinput_p.h
@@ -0,0 +1,74 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QMEDIAFRAMEINPUT_P_H
+#define QMEDIAFRAMEINPUT_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qmediacapturesession.h"
+#include <QtCore/private/qobject_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QMediaFrameInputPrivate : public QObjectPrivate
+{
+public:
+ void setCaptureSession(QMediaCaptureSession *session);
+
+ QMediaCaptureSession *captureSession() const { return m_captureSession; }
+
+protected:
+ template <typename Sender>
+ bool sendMediaFrame(Sender &&sender)
+ {
+ if (!m_canSendMediaFrame)
+ return false;
+
+ sender();
+ postponeCheckReadyToSend();
+ return true;
+ }
+
+ template <typename Sender, typename Signal>
+ void addUpdateSignal(Sender sender, Signal signal)
+ {
+ connect(sender, signal, this, &QMediaFrameInputPrivate::updateCanSendMediaFrame);
+ }
+
+ template <typename Sender, typename Signal>
+ void removeUpdateSignal(Sender sender, Signal signal)
+ {
+ disconnect(sender, signal, this, &QMediaFrameInputPrivate::updateCanSendMediaFrame);
+ }
+
+ void updateCanSendMediaFrame();
+
+private:
+ void postponeCheckReadyToSend();
+
+ virtual bool checkIfCanSendMediaFrame() const = 0;
+
+ virtual void emitReadyToSendMediaFrame() = 0;
+
+ virtual void updateCaptureSessionConnections(QMediaCaptureSession *prevSession,
+ QMediaCaptureSession *currentSession) = 0;
+
+private:
+ QMediaCaptureSession *m_captureSession = nullptr;
+ bool m_canSendMediaFrame = false;
+ bool m_postponeReadyToSendCheckRun = false;
+};
+
+QT_END_NAMESPACE
+
+#endif // QMEDIAFRAMEINPUT_P_H
diff --git a/src/multimedia/qmediainputencoderinterface_p.h b/src/multimedia/qmediainputencoderinterface_p.h
new file mode 100644
index 000000000..c199e59b4
--- /dev/null
+++ b/src/multimedia/qmediainputencoderinterface_p.h
@@ -0,0 +1,31 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QMEDIAINPUTENCODERINTERFACE_P_H
+#define QMEDIAINPUTENCODERINTERFACE_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/qtmultimediaglobal.h>
+
+QT_BEGIN_NAMESPACE
+
+class QMediaInputEncoderInterface
+{
+public:
+ virtual ~QMediaInputEncoderInterface() = default;
+ virtual bool canPushFrame() const = 0;
+};
+
+QT_END_NAMESPACE
+
+#endif // QMEDIAINPUTENCODERINTERFACE_P_H
diff --git a/src/multimedia/qmediametadata.cpp b/src/multimedia/qmediametadata.cpp
index dc238721f..5ff462865 100644
--- a/src/multimedia/qmediametadata.cpp
+++ b/src/multimedia/qmediametadata.cpp
@@ -2,14 +2,15 @@
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include "qmediametadata.h"
+
#include <QtCore/qcoreapplication.h>
-#include <qvariant.h>
-#include <qobject.h>
-#include <qdatetime.h>
-#include <qmediaformat.h>
-#include <qsize.h>
-#include <qurl.h>
-#include <qimage.h>
+#include <QtCore/qdatetime.h>
+#include <QtCore/qobject.h>
+#include <QtCore/qsize.h>
+#include <QtCore/qurl.h>
+#include <QtCore/qvariant.h>
+#include <QtGui/qimage.h>
+#include <QtMultimedia/qmediaformat.h>
QT_BEGIN_NAMESPACE
@@ -41,7 +42,7 @@ QT_BEGIN_NAMESPACE
Media attributes
\row \li MediaType \li The type of the media (audio, video, etc). \li QString
\row \li FileFormat \li The file format of the media. \li QMediaFormat::FileFormat
- \row \li Duration \li The duration in millseconds of the media. \li qint64
+ \row \li Duration \li The duration in milliseconds of the media. \li qint64
\header \li {3,1}
Audio attributes
@@ -53,6 +54,7 @@ QT_BEGIN_NAMESPACE
\row \li VideoFrameRate \li The frame rate of the media's video stream. \li qreal
\row \li VideoBitRate \li The bit rate of the media's video stream in bits per second. \li int
\row \li VideoCodec \li The codec of the media's video stream. \li QMediaFormat::VideoCodec
+ \row \li HasHdrContent \li True if video is intended for HDR display (FFmpeg media backend only). \li bool
\header \li {3,1}
Music attributes
@@ -129,6 +131,10 @@ QMetaType QMediaMetaData::keyType(Key key)
case Resolution:
return QMetaType::fromType<QSize>();
+
+ case HasHdrContent:
+ return QMetaType::fromType<bool>();
+
default:
return QMetaType::fromType<void>();
}
@@ -276,6 +282,7 @@ QMetaType QMediaMetaData::keyType(Key key)
\value CoverArtImage Media cover art
\value Orientation
\value Resolution
+ \value [since 6.8] HasHdrContent Video may have HDR content (read only, FFmpeg media backend only)
*/
/*!
@@ -385,6 +392,7 @@ QString QMediaMetaData::stringValue(QMediaMetaData::Key key) const
case Composer:
case Orientation:
case LeadPerformer:
+ case HasHdrContent:
return value.toString();
case Language: {
auto l = value.value<QLocale::Language>();
@@ -479,10 +487,31 @@ QString QMediaMetaData::metaDataKeyToString(QMediaMetaData::Key key)
return (QCoreApplication::translate("QMediaMetaData", "Resolution"));
case QMediaMetaData::LeadPerformer:
return (QCoreApplication::translate("QMediaMetaData", "Lead performer"));
+ case QMediaMetaData::HasHdrContent:
+ return (QCoreApplication::translate("QMediaMetaData", "Has HDR content"));
}
return QString();
}
+QDebug operator<<(QDebug dbg, const QMediaMetaData &metaData)
+{
+ QDebugStateSaver sv(dbg);
+ dbg.nospace();
+
+ dbg << "QMediaMetaData{";
+ auto range = metaData.asKeyValueRange();
+ auto begin = std::begin(range);
+
+ for (auto it = begin; it != std::end(range); ++it) {
+ if (it != begin)
+ dbg << ", ";
+ dbg << it->first << ": " << it->second;
+ }
+
+ dbg << "}";
+ return dbg;
+}
+
// operator documentation
/*!
\fn QVariant &QMediaMetaData ::operator[](QMediaMetaData::Key k)
@@ -511,6 +540,11 @@ QString QMediaMetaData::metaDataKeyToString(QMediaMetaData::Key key)
\note this is a \c protected member of its class.
*/
+/*!
+ \fn auto QMediaMetaData::asKeyValueRange() const
+ \internal
+*/
+
QT_END_NAMESPACE
#include "moc_qmediametadata.cpp"
diff --git a/src/multimedia/qmediametadata.h b/src/multimedia/qmediametadata.h
index d6f4477d3..e21594a02 100644
--- a/src/multimedia/qmediametadata.h
+++ b/src/multimedia/qmediametadata.h
@@ -57,11 +57,13 @@ public:
CoverArtImage,
Orientation,
- Resolution
+ Resolution,
+
+ HasHdrContent
};
Q_ENUM(Key)
- static constexpr int NumMetaData = Resolution + 1;
+ static constexpr int NumMetaData = HasHdrContent + 1;
// QMetaType typeForKey(Key k);
Q_INVOKABLE QVariant value(Key k) const { return data.value(k); }
@@ -77,7 +79,11 @@ public:
Q_INVOKABLE static QString metaDataKeyToString(Key k);
+ QT_TECH_PREVIEW_API auto asKeyValueRange() const { return data.asKeyValueRange(); }
+
protected:
+ Q_MULTIMEDIA_EXPORT friend QDebug operator<<(QDebug, const QMediaMetaData &);
+
friend bool operator==(const QMediaMetaData &a, const QMediaMetaData &b)
{ return a.data == b.data; }
friend bool operator!=(const QMediaMetaData &a, const QMediaMetaData &b)
diff --git a/src/multimedia/qsymbolsresolveutils.cpp b/src/multimedia/qsymbolsresolveutils.cpp
new file mode 100644
index 000000000..8441ac243
--- /dev/null
+++ b/src/multimedia/qsymbolsresolveutils.cpp
@@ -0,0 +1,79 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qsymbolsresolveutils_p.h"
+
+#include <qdebug.h>
+#include <algorithm>
+#include <qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcSymbolsResolver, "qt.multimedia.symbolsresolver");
+
+bool SymbolsResolver::isLazyLoadEnabled()
+{
+ static const bool lazyLoad =
+ !static_cast<bool>(qEnvironmentVariableIntValue("QT_INSTANT_LOAD_FFMPEG_STUBS"));
+ return lazyLoad;
+}
+
+SymbolsResolver::SymbolsResolver(const char *libLoggingName, LibraryLoader loader)
+ : m_libLoggingName(libLoggingName)
+{
+ Q_ASSERT(libLoggingName);
+ Q_ASSERT(loader);
+
+ auto library = loader();
+ if (library && library->isLoaded())
+ m_library = std::move(library);
+ else
+ qCWarning(qLcSymbolsResolver) << "Couldn't load" << m_libLoggingName << "library";
+}
+
+SymbolsResolver::SymbolsResolver(const char *libName, const char *version,
+ const char *libLoggingName)
+ : m_libLoggingName(libLoggingName ? libLoggingName : libName)
+{
+ Q_ASSERT(libName);
+ Q_ASSERT(version);
+
+ auto library = std::make_unique<QLibrary>(QString::fromLocal8Bit(libName),
+ QString::fromLocal8Bit(version));
+ if (library->load())
+ m_library = std::move(library);
+ else
+ qCWarning(qLcSymbolsResolver) << "Couldn't load" << m_libLoggingName << "library";
+}
+
+SymbolsResolver::~SymbolsResolver()
+{
+ if (m_library)
+ m_library->unload();
+}
+
+QFunctionPointer SymbolsResolver::initFunction(const char *funcName)
+{
+ if (!m_library)
+ return nullptr;
+ if (auto func = m_library->resolve(funcName))
+ return func;
+
+ qCWarning(qLcSymbolsResolver) << "Couldn't resolve" << m_libLoggingName << "symbol" << funcName;
+ m_library->unload();
+ m_library.reset();
+ return nullptr;
+}
+
+void SymbolsResolver::checkLibrariesLoaded(SymbolsMarker *begin, SymbolsMarker *end)
+{
+ if (m_library) {
+ qCDebug(qLcSymbolsResolver) << m_libLoggingName << "symbols resolved";
+ } else {
+ const auto size = reinterpret_cast<char *>(end) - reinterpret_cast<char *>(begin);
+ memset(begin, 0, size);
+ qCWarning(qLcSymbolsResolver) << "Couldn't resolve" << m_libLoggingName << "symbols";
+ }
+}
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/qsymbolsresolveutils_p.h b/src/multimedia/qsymbolsresolveutils_p.h
new file mode 100644
index 000000000..98a552170
--- /dev/null
+++ b/src/multimedia/qsymbolsresolveutils_p.h
@@ -0,0 +1,178 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef Q_SYMBOLSRESOLVEUTILS
+#define Q_SYMBOLSRESOLVEUTILS
+
+#include <QtCore/qlibrary.h>
+#include <QtMultimedia/qtmultimediaexports.h>
+#include <tuple>
+#include <memory>
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+QT_BEGIN_NAMESPACE
+
+constexpr bool areVersionsEqual(const char lhs[], const char rhs[])
+{
+ int i = 0;
+ for (; lhs[i] && rhs[i]; ++i)
+ if (lhs[i] != rhs[i])
+ return false;
+ return lhs[i] == rhs[i];
+}
+
+constexpr bool areVersionsEqual(const char lhs[], int rhsInt)
+{
+ int lhsInt = 0;
+ for (int i = 0; lhs[i]; ++i) {
+ if (lhs[i] < '0' || lhs[i] > '9')
+ return false;
+
+ lhsInt *= 10;
+ lhsInt += lhs[i] - '0';
+ }
+
+ return lhsInt == rhsInt;
+}
+
+
+template <typename T>
+struct DefaultReturn
+{
+ template <typename... Arg>
+ T operator()(Arg &&...) { return val; }
+ T val;
+};
+
+template <>
+struct DefaultReturn<void>
+{
+ template <typename... Arg>
+ void operator()(Arg &&...) { }
+};
+
+template <typename...>
+struct FuncInfo;
+
+template <typename R, typename... A>
+struct FuncInfo<R(A...)>
+{
+ using Return = R;
+ using Args = std::tuple<A...>;
+};
+
+class Q_MULTIMEDIA_EXPORT SymbolsResolver
+{
+public:
+ using LibraryLoader = std::unique_ptr<QLibrary> (*)();
+ static bool isLazyLoadEnabled();
+
+ ~SymbolsResolver();
+protected:
+ SymbolsResolver(const char *libLoggingName, LibraryLoader loader);
+
+ SymbolsResolver(const char *libName, const char *version = "",
+ const char *libLoggingName = nullptr);
+
+ QFunctionPointer initFunction(const char *name);
+
+ struct SymbolsMarker {};
+ void checkLibrariesLoaded(SymbolsMarker *begin, SymbolsMarker *end);
+
+private:
+ const char *m_libLoggingName;
+ std::unique_ptr<QLibrary> m_library;
+};
+
+
+QT_END_NAMESPACE
+
+// clang-format off
+
+#define CHECK_VERSIONS(Name, NeededSoversion, DetectedVersion) \
+ static_assert(areVersionsEqual(NeededSoversion, DetectedVersion), \
+ "Configuartion error: misleading " Name " versions!")
+
+#define BEGIN_INIT_FUNCS(...) \
+ QT_USE_NAMESPACE \
+ namespace { \
+ class SymbolsResolverImpl : SymbolsResolver { \
+ public: \
+ SymbolsResolverImpl() : SymbolsResolver(__VA_ARGS__) \
+ { checkLibrariesLoaded(&symbolsBegin, &symbolsEnd); } \
+ static const SymbolsResolverImpl& instance() \
+ { static const SymbolsResolverImpl instance; return instance; } \
+ SymbolsMarker symbolsBegin;
+
+#define INIT_FUNC(F) QFunctionPointer F = initFunction(#F);
+
+#define END_INIT_FUNCS() \
+ SymbolsMarker symbolsEnd; \
+ }; \
+ [[maybe_unused]] static const auto *instantResolver = \
+ SymbolsResolver::isLazyLoadEnabled() ? &SymbolsResolverImpl::instance() : nullptr; \
+ }
+
+
+#ifdef Q_EXPORT_STUB_SYMBOLS
+#define EXPORT_FUNC Q_MULTIMEDIA_EXPORT
+#else
+#define EXPORT_FUNC
+#endif
+
+#define DEFINE_FUNC_IMPL(F, Vars, TypesWithVars, ReturnFunc) \
+ using F##_ReturnType = FuncInfo<decltype(F)>::Return; \
+ extern "C" EXPORT_FUNC [[maybe_unused]] F##_ReturnType F(TypesWithVars(F)) { \
+ using F##_Type = F##_ReturnType (*)(TypesWithVars(F)); \
+ const auto f = SymbolsResolverImpl::instance().F; \
+ return f ? (reinterpret_cast<F##_Type>(f))(Vars()) : ReturnFunc(); \
+ }
+
+
+#define VAR(I) a##I
+#define VARS0()
+#define VARS1() VAR(0)
+#define VARS2() VARS1(), VAR(1)
+#define VARS3() VARS2(), VAR(2)
+#define VARS4() VARS3(), VAR(3)
+#define VARS5() VARS4(), VAR(4)
+#define VARS6() VARS5(), VAR(5)
+#define VARS7() VARS6(), VAR(6)
+#define VARS8() VARS7(), VAR(7)
+#define VARS9() VARS8(), VAR(8)
+#define VARS10() VARS9(), VAR(9)
+#define VARS11() VARS10(), VAR(10)
+
+#define TYPE_WITH_VAR(F, I) std::tuple_element_t<I, FuncInfo<decltype(F)>::Args> VAR(I)
+#define TYPES_WITH_VARS0(F)
+#define TYPES_WITH_VARS1(F) TYPE_WITH_VAR(F, 0)
+#define TYPES_WITH_VARS2(F) TYPES_WITH_VARS1(F), TYPE_WITH_VAR(F, 1)
+#define TYPES_WITH_VARS3(F) TYPES_WITH_VARS2(F), TYPE_WITH_VAR(F, 2)
+#define TYPES_WITH_VARS4(F) TYPES_WITH_VARS3(F), TYPE_WITH_VAR(F, 3)
+#define TYPES_WITH_VARS5(F) TYPES_WITH_VARS4(F), TYPE_WITH_VAR(F, 4)
+#define TYPES_WITH_VARS6(F) TYPES_WITH_VARS5(F), TYPE_WITH_VAR(F, 5)
+#define TYPES_WITH_VARS7(F) TYPES_WITH_VARS6(F), TYPE_WITH_VAR(F, 6)
+#define TYPES_WITH_VARS8(F) TYPES_WITH_VARS7(F), TYPE_WITH_VAR(F, 7)
+#define TYPES_WITH_VARS9(F) TYPES_WITH_VARS8(F), TYPE_WITH_VAR(F, 8)
+#define TYPES_WITH_VARS10(F) TYPES_WITH_VARS9(F), TYPE_WITH_VAR(F, 9)
+#define TYPES_WITH_VARS11(F) TYPES_WITH_VARS10(F), TYPE_WITH_VAR(F, 10)
+
+
+#define RET(F, ...) DefaultReturn<FuncInfo<decltype(F)>::Return>{__VA_ARGS__}
+
+#define DEFINE_FUNC(F, ArgsCount, /*Return value*/...) \
+ DEFINE_FUNC_IMPL(F, VARS##ArgsCount, TYPES_WITH_VARS##ArgsCount, RET(F, __VA_ARGS__));
+
+// clang-format on
+
+#endif // Q_SYMBOLSRESOLVEUTILS
diff --git a/src/multimedia/recording/qmediacapturesession.cpp b/src/multimedia/recording/qmediacapturesession.cpp
index f175cd98e..9df09acef 100644
--- a/src/multimedia/recording/qmediacapturesession.cpp
+++ b/src/multimedia/recording/qmediacapturesession.cpp
@@ -10,16 +10,20 @@
#include "qvideosink.h"
#include "qscreencapture.h"
#include "qwindowcapture.h"
+#include "qvideoframeinput.h"
#include "qplatformmediaintegration_p.h"
#include "qplatformmediacapture_p.h"
#include "qaudioinput.h"
+#include "qaudiobufferinput.h"
#include "qaudiooutput.h"
QT_BEGIN_NAMESPACE
void QMediaCaptureSessionPrivate::setVideoSink(QVideoSink *sink)
{
+ Q_Q(QMediaCaptureSession);
+
if (sink == videoSink)
return;
if (videoSink)
@@ -41,18 +45,23 @@ void QMediaCaptureSessionPrivate::setVideoSink(QVideoSink *sink)
\ingroup multimedia_video
\ingroup multimedia_audio
- The QMediaCaptureSession is the central class that manages capturing of media on the local device.
+ The QMediaCaptureSession is the central class that manages capturing of media on the local
+ device.
- You can connect a video input to QMediaCaptureSession using setCamera(), setScreenCapture() or setWindowCapture().
- A preview of the captured media can be seen by setting a QVideoWidget or QGraphicsVideoItem using setVideoOutput().
+ You can connect a video input to QMediaCaptureSession using setCamera(),
+ setScreenCapture(), setWindowCapture() or setVideoFrameInput().
+ A preview of the captured media can be seen by setting a QVideoWidget or QGraphicsVideoItem
+ using setVideoOutput().
- You can connect a microphone to QMediaCaptureSession using setAudioInput().
+ You can connect a microphone to QMediaCaptureSession using setAudioInput(), or set your
+ custom audio input using setAudioBufferInput().
The captured sound can be heard by routing the audio to an output device using setAudioOutput().
- You can capture still images from a camera by setting a QImageCapture object on the capture session,
- and record audio/video using a QMediaRecorder.
+ You can capture still images from a camera by setting a QImageCapture object on the capture
+ session, and record audio/video using a QMediaRecorder.
- \sa QCamera, QAudioDevice, QMediaRecorder, QImageCapture, QScreenCapture, QWindowCapture, QMediaRecorder, QGraphicsVideoItem
+ \sa QCamera, QAudioDevice, QMediaRecorder, QImageCapture, QScreenCapture, QWindowCapture,
+ QVideoFrameInput, QMediaRecorder, QGraphicsVideoItem
*/
/*!
@@ -112,14 +121,16 @@ void QMediaCaptureSessionPrivate::setVideoSink(QVideoSink *sink)
Creates a session for media capture from the \a parent object.
*/
QMediaCaptureSession::QMediaCaptureSession(QObject *parent)
- : QObject(parent),
- d_ptr(new QMediaCaptureSessionPrivate)
+ : QObject{ *new QMediaCaptureSessionPrivate, parent }
{
- d_ptr->q = this;
+ QT6_ONLY(Q_UNUSED(unused))
+
+ Q_D(QMediaCaptureSession);
+
auto maybeCaptureSession = QPlatformMediaIntegration::instance()->createCaptureSession();
if (maybeCaptureSession) {
- d_ptr->captureSession = maybeCaptureSession.value();
- d_ptr->captureSession->setCaptureSession(this);
+ d->captureSession.reset(maybeCaptureSession.value());
+ d->captureSession->setCaptureSession(this);
} else {
qWarning() << "Failed to initialize QMediaCaptureSession" << maybeCaptureSession.error();
}
@@ -130,16 +141,19 @@ QMediaCaptureSession::QMediaCaptureSession(QObject *parent)
*/
QMediaCaptureSession::~QMediaCaptureSession()
{
+ Q_D(QMediaCaptureSession);
+
setCamera(nullptr);
setRecorder(nullptr);
setImageCapture(nullptr);
setScreenCapture(nullptr);
setWindowCapture(nullptr);
+ setVideoFrameInput(nullptr);
+ setAudioBufferInput(nullptr);
setAudioInput(nullptr);
setAudioOutput(nullptr);
- d_ptr->setVideoSink(nullptr);
- delete d_ptr->captureSession;
- delete d_ptr;
+ d->setVideoSink(nullptr);
+ d->captureSession.reset();
}
/*!
\qmlproperty AudioInput QtMultimedia::CaptureSession::audioInput
@@ -154,7 +168,8 @@ QMediaCaptureSession::~QMediaCaptureSession()
*/
QAudioInput *QMediaCaptureSession::audioInput() const
{
- return d_ptr->audioInput;
+ Q_D(const QMediaCaptureSession);
+ return d->audioInput;
}
/*!
@@ -164,28 +179,69 @@ QAudioInput *QMediaCaptureSession::audioInput() const
*/
void QMediaCaptureSession::setAudioInput(QAudioInput *input)
{
- QAudioInput *oldInput = d_ptr->audioInput;
+ Q_D(QMediaCaptureSession);
+
+ QAudioInput *oldInput = d->audioInput;
if (oldInput == input)
return;
// To avoid double emit of audioInputChanged
// from recursive setAudioInput(nullptr) call.
- d_ptr->audioInput = nullptr;
+ d->audioInput = nullptr;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setAudioInput(nullptr);
+ if (d->captureSession)
+ d->captureSession->setAudioInput(nullptr);
if (oldInput)
oldInput->setDisconnectFunction({});
if (input) {
input->setDisconnectFunction([this](){ setAudioInput(nullptr); });
- if (d_ptr->captureSession)
- d_ptr->captureSession->setAudioInput(input->handle());
+ if (d->captureSession)
+ d->captureSession->setAudioInput(input->handle());
}
- d_ptr->audioInput = input;
+ d->audioInput = input;
emit audioInputChanged();
}
/*!
+ \property QMediaCaptureSession::audioBufferInput
+ \since 6.8
+
+ \brief The object used to send custom audio buffers to \l QMediaRecorder.
+*/
+QAudioBufferInput *QMediaCaptureSession::audioBufferInput() const
+{
+ Q_D(const QMediaCaptureSession);
+
+ return d->audioBufferInput;
+}
+
+void QMediaCaptureSession::setAudioBufferInput(QAudioBufferInput *input)
+{
+ Q_D(QMediaCaptureSession);
+
+ // TODO: come up with an unification of the captures setup
+ QAudioBufferInput *oldInput = d->audioBufferInput;
+ if (oldInput == input)
+ return;
+ d->audioBufferInput = input;
+ if (d->captureSession)
+ d->captureSession->setAudioBufferInput(nullptr);
+ if (oldInput) {
+ if (oldInput->captureSession() && oldInput->captureSession() != this)
+ oldInput->captureSession()->setAudioBufferInput(nullptr);
+ oldInput->setCaptureSession(nullptr);
+ }
+ if (input) {
+ if (input->captureSession())
+ input->captureSession()->setAudioBufferInput(nullptr);
+ if (d->captureSession)
+ d->captureSession->setAudioBufferInput(input->platformAudioBufferInput());
+ input->setCaptureSession(this);
+ }
+ emit audioBufferInputChanged();
+}
+
+/*!
\qmlproperty Camera QtMultimedia::CaptureSession::camera
\brief The camera used to capture video.
@@ -204,18 +260,22 @@ void QMediaCaptureSession::setAudioInput(QAudioInput *input)
*/
QCamera *QMediaCaptureSession::camera() const
{
- return d_ptr->camera;
+ Q_D(const QMediaCaptureSession);
+
+ return d->camera;
}
void QMediaCaptureSession::setCamera(QCamera *camera)
{
+ Q_D(QMediaCaptureSession);
+
// TODO: come up with an unification of the captures setup
- QCamera *oldCamera = d_ptr->camera;
+ QCamera *oldCamera = d->camera;
if (oldCamera == camera)
return;
- d_ptr->camera = camera;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setCamera(nullptr);
+ d->camera = camera;
+ if (d->captureSession)
+ d->captureSession->setCamera(nullptr);
if (oldCamera) {
if (oldCamera->captureSession() && oldCamera->captureSession() != this)
oldCamera->captureSession()->setCamera(nullptr);
@@ -224,8 +284,8 @@ void QMediaCaptureSession::setCamera(QCamera *camera)
if (camera) {
if (camera->captureSession())
camera->captureSession()->setCamera(nullptr);
- if (d_ptr->captureSession)
- d_ptr->captureSession->setCamera(camera->platformCamera());
+ if (d->captureSession)
+ d->captureSession->setCamera(camera->platformCamera());
camera->setCaptureSession(this);
}
emit cameraChanged();
@@ -252,18 +312,22 @@ void QMediaCaptureSession::setCamera(QCamera *camera)
*/
QScreenCapture *QMediaCaptureSession::screenCapture()
{
- return d_ptr ? d_ptr->screenCapture : nullptr;
+ Q_D(QMediaCaptureSession);
+
+ return d->screenCapture;
}
void QMediaCaptureSession::setScreenCapture(QScreenCapture *screenCapture)
{
+ Q_D(QMediaCaptureSession);
+
// TODO: come up with an unification of the captures setup
- QScreenCapture *oldScreenCapture = d_ptr->screenCapture;
+ QScreenCapture *oldScreenCapture = d->screenCapture;
if (oldScreenCapture == screenCapture)
return;
- d_ptr->screenCapture = screenCapture;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setScreenCapture(nullptr);
+ d->screenCapture = screenCapture;
+ if (d->captureSession)
+ d->captureSession->setScreenCapture(nullptr);
if (oldScreenCapture) {
if (oldScreenCapture->captureSession() && oldScreenCapture->captureSession() != this)
oldScreenCapture->captureSession()->setScreenCapture(nullptr);
@@ -272,8 +336,8 @@ void QMediaCaptureSession::setScreenCapture(QScreenCapture *screenCapture)
if (screenCapture) {
if (screenCapture->captureSession())
screenCapture->captureSession()->setScreenCapture(nullptr);
- if (d_ptr->captureSession)
- d_ptr->captureSession->setScreenCapture(screenCapture->platformScreenCapture());
+ if (d->captureSession)
+ d->captureSession->setScreenCapture(screenCapture->platformScreenCapture());
screenCapture->setCaptureSession(this);
}
emit screenCaptureChanged();
@@ -298,19 +362,23 @@ void QMediaCaptureSession::setScreenCapture(QScreenCapture *screenCapture)
Record a window by adding a window capture objet
to the capture session using this property.
*/
-QWindowCapture *QMediaCaptureSession::windowCapture() {
- return d_ptr ? d_ptr->windowCapture : nullptr;
+QWindowCapture *QMediaCaptureSession::windowCapture()
+{
+ Q_D(QMediaCaptureSession);
+ return d->windowCapture;
}
void QMediaCaptureSession::setWindowCapture(QWindowCapture *windowCapture)
{
+ Q_D(QMediaCaptureSession);
+
// TODO: come up with an unification of the captures setup
- QWindowCapture *oldCapture = d_ptr->windowCapture;
+ QWindowCapture *oldCapture = d->windowCapture;
if (oldCapture == windowCapture)
return;
- d_ptr->windowCapture = windowCapture;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setWindowCapture(nullptr);
+ d->windowCapture = windowCapture;
+ if (d->captureSession)
+ d->captureSession->setWindowCapture(nullptr);
if (oldCapture) {
if (oldCapture->captureSession() && oldCapture->captureSession() != this)
oldCapture->captureSession()->setWindowCapture(nullptr);
@@ -319,14 +387,52 @@ void QMediaCaptureSession::setWindowCapture(QWindowCapture *windowCapture)
if (windowCapture) {
if (windowCapture->captureSession())
windowCapture->captureSession()->setWindowCapture(nullptr);
- if (d_ptr->captureSession)
- d_ptr->captureSession->setWindowCapture(windowCapture->platformWindowCapture());
+ if (d->captureSession)
+ d->captureSession->setWindowCapture(windowCapture->platformWindowCapture());
windowCapture->setCaptureSession(this);
}
emit windowCaptureChanged();
}
/*!
+ \property QMediaCaptureSession::videoFrameInput
+ \since 6.8
+
+ \brief The object used to send custom video frames to
+ \l QMediaRecorder or a video output.
+*/
+QVideoFrameInput *QMediaCaptureSession::videoFrameInput() const
+{
+ Q_D(const QMediaCaptureSession);
+ return d->videoFrameInput;
+}
+
+void QMediaCaptureSession::setVideoFrameInput(QVideoFrameInput *input)
+{
+ Q_D(QMediaCaptureSession);
+ // TODO: come up with an unification of the captures setup
+ QVideoFrameInput *oldInput = d->videoFrameInput;
+ if (oldInput == input)
+ return;
+ d->videoFrameInput = input;
+ if (d->captureSession)
+ d->captureSession->setVideoFrameInput(nullptr);
+ if (oldInput) {
+ if (oldInput->captureSession() && oldInput->captureSession() != this)
+ oldInput->captureSession()->setVideoFrameInput(nullptr);
+ oldInput->setCaptureSession(nullptr);
+ }
+ if (input) {
+ if (input->captureSession())
+ input->captureSession()->setVideoFrameInput(nullptr);
+ if (d->captureSession)
+ d->captureSession->setVideoFrameInput(input->platformVideoFrameInput());
+ input->setCaptureSession(this);
+ }
+ emit videoFrameInputChanged();
+}
+
+/*!
\qmlproperty ImageCapture QtMultimedia::CaptureSession::imageCapture
\brief The object used to capture still images.
@@ -344,18 +450,22 @@ void QMediaCaptureSession::setWindowCapture(QWindowCapture *windowCapture)
*/
QImageCapture *QMediaCaptureSession::imageCapture()
{
- return d_ptr->imageCapture;
+ Q_D(QMediaCaptureSession);
+
+ return d->imageCapture;
}
void QMediaCaptureSession::setImageCapture(QImageCapture *imageCapture)
{
+ Q_D(QMediaCaptureSession);
+
// TODO: come up with an unification of the captures setup
- QImageCapture *oldImageCapture = d_ptr->imageCapture;
+ QImageCapture *oldImageCapture = d->imageCapture;
if (oldImageCapture == imageCapture)
return;
- d_ptr->imageCapture = imageCapture;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setImageCapture(nullptr);
+ d->imageCapture = imageCapture;
+ if (d->captureSession)
+ d->captureSession->setImageCapture(nullptr);
if (oldImageCapture) {
if (oldImageCapture->captureSession() && oldImageCapture->captureSession() != this)
oldImageCapture->captureSession()->setImageCapture(nullptr);
@@ -364,8 +474,8 @@ void QMediaCaptureSession::setImageCapture(QImageCapture *imageCapture)
if (imageCapture) {
if (imageCapture->captureSession())
imageCapture->captureSession()->setImageCapture(nullptr);
- if (d_ptr->captureSession)
- d_ptr->captureSession->setImageCapture(imageCapture->platformImageCapture());
+ if (d->captureSession)
+ d->captureSession->setImageCapture(imageCapture->platformImageCapture());
imageCapture->setCaptureSession(this);
}
emit imageCaptureChanged();
@@ -389,17 +499,19 @@ void QMediaCaptureSession::setImageCapture(QImageCapture *imageCapture)
QMediaRecorder *QMediaCaptureSession::recorder()
{
- return d_ptr->recorder;
+ Q_D(QMediaCaptureSession);
+ return d->recorder;
}
void QMediaCaptureSession::setRecorder(QMediaRecorder *recorder)
{
- QMediaRecorder *oldRecorder = d_ptr->recorder;
+ Q_D(QMediaCaptureSession);
+ QMediaRecorder *oldRecorder = d->recorder;
if (oldRecorder == recorder)
return;
- d_ptr->recorder = recorder;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setMediaRecorder(nullptr);
+ d->recorder = recorder;
+ if (d->captureSession)
+ d->captureSession->setMediaRecorder(nullptr);
if (oldRecorder) {
if (oldRecorder->captureSession() && oldRecorder->captureSession() != this)
oldRecorder->captureSession()->setRecorder(nullptr);
@@ -408,8 +520,8 @@ void QMediaCaptureSession::setRecorder(QMediaRecorder *recorder)
if (recorder) {
if (recorder->captureSession())
recorder->captureSession()->setRecorder(nullptr);
- if (d_ptr->captureSession)
- d_ptr->captureSession->setMediaRecorder(recorder->platformRecoder());
+ if (d->captureSession)
+ d->captureSession->setMediaRecorder(recorder->platformRecoder());
recorder->setCaptureSession(this);
}
emit recorderChanged();
@@ -487,25 +599,27 @@ QVideoSink *QMediaCaptureSession::videoSink() const
*/
void QMediaCaptureSession::setAudioOutput(QAudioOutput *output)
{
- QAudioOutput *oldOutput = d_ptr->audioOutput;
+ Q_D(QMediaCaptureSession);
+
+ QAudioOutput *oldOutput = d->audioOutput;
if (oldOutput == output)
return;
// We don't want to end up with signal emitted
// twice (from recursive call setAudioInput(nullptr)
// from oldOutput->setDisconnectFunction():
- d_ptr->audioOutput = nullptr;
+ d->audioOutput = nullptr;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setAudioOutput(nullptr);
+ if (d->captureSession)
+ d->captureSession->setAudioOutput(nullptr);
if (oldOutput)
oldOutput->setDisconnectFunction({});
if (output) {
output->setDisconnectFunction([this](){ setAudioOutput(nullptr); });
- if (d_ptr->captureSession)
- d_ptr->captureSession->setAudioOutput(output->handle());
+ if (d->captureSession)
+ d->captureSession->setAudioOutput(output->handle());
}
- d_ptr->audioOutput = output;
+ d->audioOutput = output;
emit audioOutputChanged();
}
/*!
@@ -531,7 +645,8 @@ QAudioOutput *QMediaCaptureSession::audioOutput() const
*/
QPlatformMediaCaptureSession *QMediaCaptureSession::platformSession() const
{
- return d_ptr->captureSession;
+ Q_D(const QMediaCaptureSession);
+ return d->captureSession.get();
}
/*!
\qmlsignal QtMultimedia::CaptureSession::audioInputChanged()
diff --git a/src/multimedia/recording/qmediacapturesession.h b/src/multimedia/recording/qmediacapturesession.h
index 1333af7eb..219c382d1 100644
--- a/src/multimedia/recording/qmediacapturesession.h
+++ b/src/multimedia/recording/qmediacapturesession.h
@@ -11,6 +11,7 @@ QT_BEGIN_NAMESPACE
class QCamera;
class QAudioInput;
+class QAudioBufferInput;
class QAudioOutput;
class QCameraDevice;
class QImageCapture;
@@ -19,18 +20,23 @@ class QPlatformMediaCaptureSession;
class QVideoSink;
class QScreenCapture;
class QWindowCapture;
+class QVideoFrameInput;
class QMediaCaptureSessionPrivate;
class Q_MULTIMEDIA_EXPORT QMediaCaptureSession : public QObject
{
Q_OBJECT
Q_PROPERTY(QAudioInput *audioInput READ audioInput WRITE setAudioInput NOTIFY audioInputChanged)
+ Q_PROPERTY(QAudioBufferInput *audioBufferInput READ audioBufferInput WRITE setAudioBufferInput
+ NOTIFY audioBufferInputChanged)
Q_PROPERTY(QAudioOutput *audioOutput READ audioOutput WRITE setAudioOutput NOTIFY audioOutputChanged)
Q_PROPERTY(QCamera *camera READ camera WRITE setCamera NOTIFY cameraChanged)
Q_PROPERTY(
QScreenCapture *screenCapture READ screenCapture WRITE setScreenCapture NOTIFY screenCaptureChanged)
Q_PROPERTY(
QWindowCapture *windowCapture READ windowCapture WRITE setWindowCapture NOTIFY windowCaptureChanged)
+ Q_PROPERTY(QVideoFrameInput *videoFrameInput READ videoFrameInput WRITE setVideoFrameInput
+ NOTIFY videoFrameInputChanged)
Q_PROPERTY(QImageCapture *imageCapture READ imageCapture WRITE setImageCapture NOTIFY imageCaptureChanged)
Q_PROPERTY(QMediaRecorder *recorder READ recorder WRITE setRecorder NOTIFY recorderChanged)
Q_PROPERTY(QObject *videoOutput READ videoOutput WRITE setVideoOutput NOTIFY videoOutputChanged)
@@ -41,6 +47,9 @@ public:
QAudioInput *audioInput() const;
void setAudioInput(QAudioInput *input);
+ QAudioBufferInput *audioBufferInput() const;
+ void setAudioBufferInput(QAudioBufferInput *input);
+
QCamera *camera() const;
void setCamera(QCamera *camera);
@@ -53,6 +62,9 @@ public:
QWindowCapture *windowCapture();
void setWindowCapture(QWindowCapture *windowCapture);
+ QVideoFrameInput *videoFrameInput() const;
+ void setVideoFrameInput(QVideoFrameInput *input);
+
QMediaRecorder *recorder();
void setRecorder(QMediaRecorder *recorder);
@@ -69,9 +81,11 @@ public:
Q_SIGNALS:
void audioInputChanged();
+ void audioBufferInputChanged();
void cameraChanged();
void screenCaptureChanged();
void windowCaptureChanged();
+ void videoFrameInputChanged();
void imageCaptureChanged();
void recorderChanged();
void videoOutputChanged();
@@ -80,7 +94,9 @@ Q_SIGNALS:
private:
friend class QPlatformMediaCaptureSession;
- QMediaCaptureSessionPrivate *d_ptr;
+ // ### Qt7: remove unused member
+ QT6_ONLY(void *unused = nullptr;) // for ABI compatibility
+
Q_DISABLE_COPY(QMediaCaptureSession)
Q_DECLARE_PRIVATE(QMediaCaptureSession)
};
diff --git a/src/multimedia/recording/qmediacapturesession_p.h b/src/multimedia/recording/qmediacapturesession_p.h
index 8702c8d2b..cba222993 100644
--- a/src/multimedia/recording/qmediacapturesession_p.h
+++ b/src/multimedia/recording/qmediacapturesession_p.h
@@ -18,19 +18,28 @@
#include <QtMultimedia/qmediacapturesession.h>
#include <QtCore/qpointer.h>
+#include <QtCore/private/qobject_p.h>
QT_BEGIN_NAMESPACE
-class QMediaCaptureSessionPrivate
+class QMediaCaptureSessionPrivate : public QObjectPrivate
{
public:
- QMediaCaptureSession *q = nullptr;
- QPlatformMediaCaptureSession *captureSession = nullptr;
+ static QMediaCaptureSessionPrivate *get(QMediaCaptureSession *session)
+ {
+ return reinterpret_cast<QMediaCaptureSessionPrivate *>(QObjectPrivate::get(session));
+ }
+
+ Q_DECLARE_PUBLIC(QMediaCaptureSession)
+
+ std::unique_ptr<QPlatformMediaCaptureSession> captureSession;
QAudioInput *audioInput = nullptr;
+ QPointer<QAudioBufferInput> audioBufferInput;
QAudioOutput *audioOutput = nullptr;
QPointer<QCamera> camera;
QPointer<QScreenCapture> screenCapture;
QPointer<QWindowCapture> windowCapture;
+ QPointer<QVideoFrameInput> videoFrameInput;
QPointer<QImageCapture> imageCapture;
QPointer<QMediaRecorder> recorder;
QPointer<QVideoSink> videoSink;
diff --git a/src/multimedia/recording/qmediarecorder.cpp b/src/multimedia/recording/qmediarecorder.cpp
index a7f5a31b8..ea38b231a 100644
--- a/src/multimedia/recording/qmediarecorder.cpp
+++ b/src/multimedia/recording/qmediarecorder.cpp
@@ -227,7 +227,7 @@ void QMediaRecorder::setOutputLocation(const QUrl &location)
/*!
Set the output IO device for media content.
- The \a device must have been opened in the \l{QIODevice::Write}{Write} or
+ The \a device must have been opened in the \l{QIODevice::WriteOnly}{WriteOnly} or
\l{QIODevice::ReadWrite}{ReadWrite} modes before the recording starts.
The media recorder doesn't take ownership of the specified \a device.
@@ -573,10 +573,47 @@ void QMediaRecorder::addMetaData(const QMediaMetaData &metaData)
{
auto data = this->metaData();
// merge data
- for (const auto &k : metaData.keys())
- data.insert(k, metaData.value(k));
+ for (auto &&[key, value] : metaData.asKeyValueRange())
+ data.insert(key, value);
setMetaData(data);
}
+
+/*!
+ \property QMediaRecorder::autoStop
+
+ This property controls whether the media recorder stops automatically when
+ all media inputs have reported the end of the stream or have been deactivated.
+
+ The end of the stream is reported by sending an empty media frame,
+ which you can send explicitly via \l QVideoFrameInput or \l QAudioBufferInput.
+
+ Video inputs, specificly, \l QCamera, \l QScreenCapture and \l QWindowCapture,
+ can be deactivated via the function \c setActive.
+
+ Defaults to \c false.
+
+ \sa QCamera, QScreenCapture, QWindowCapture
+*/
+
+bool QMediaRecorder::autoStop() const
+{
+ Q_D(const QMediaRecorder);
+
+ return d->autoStop;
+}
+
+void QMediaRecorder::setAutoStop(bool autoStop)
+{
+ Q_D(QMediaRecorder);
+
+ if (d->autoStop == autoStop)
+ return;
+
+ d->autoStop = autoStop;
+ d->control->updateAutoStop();
+ emit autoStopChanged();
+}
+
/*!
\qmlsignal QtMultimedia::MediaRecorder::metaDataChanged()
diff --git a/src/multimedia/recording/qmediarecorder.h b/src/multimedia/recording/qmediarecorder.h
index fed276baf..a73d9f8af 100644
--- a/src/multimedia/recording/qmediarecorder.h
+++ b/src/multimedia/recording/qmediarecorder.h
@@ -44,6 +44,7 @@ class Q_MULTIMEDIA_EXPORT QMediaRecorder : public QObject
Q_PROPERTY(int audioBitRate READ audioBitRate WRITE setAudioBitRate NOTIFY audioBitRateChanged)
Q_PROPERTY(int audioChannelCount READ audioChannelCount WRITE setAudioChannelCount NOTIFY audioChannelCountChanged)
Q_PROPERTY(int audioSampleRate READ audioSampleRate WRITE setAudioSampleRate NOTIFY audioSampleRateChanged)
+ Q_PROPERTY(bool autoStop READ autoStop WRITE setAutoStop NOTIFY autoStopChanged)
public:
enum Quality
{
@@ -134,6 +135,9 @@ public:
void setMetaData(const QMediaMetaData &metaData);
void addMetaData(const QMediaMetaData &metaData);
+ bool autoStop() const;
+ void setAutoStop(bool autoStop);
+
QMediaCaptureSession *captureSession() const;
QPlatformMediaRecorder *platformRecoder() const;
@@ -162,6 +166,7 @@ Q_SIGNALS:
void audioBitRateChanged();
void audioChannelCountChanged();
void audioSampleRateChanged();
+ void autoStopChanged();
private:
QMediaRecorderPrivate *d_ptr;
diff --git a/src/multimedia/recording/qmediarecorder_p.h b/src/multimedia/recording/qmediarecorder_p.h
index 193aa5f00..896f6c368 100644
--- a/src/multimedia/recording/qmediarecorder_p.h
+++ b/src/multimedia/recording/qmediarecorder_p.h
@@ -38,6 +38,7 @@ public:
QMediaCaptureSession *captureSession = nullptr;
QPlatformMediaRecorder *control = nullptr;
QString initErrorMessage;
+ bool autoStop = false;
bool settingsChanged = false;
diff --git a/src/multimedia/recording/qscreencapture-limitations.qdocinc b/src/multimedia/recording/qscreencapture-limitations.qdocinc
index cac51df02..240a1a389 100644
--- a/src/multimedia/recording/qscreencapture-limitations.qdocinc
+++ b/src/multimedia/recording/qscreencapture-limitations.qdocinc
@@ -1,22 +1,25 @@
-// Copyright (C) 2023 The Qt Company Ltd.
+// Copyright (C) 2024 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GFDL-1.3-no-invariants-only
/*!
//! [content]
\section1 Screen Capture Limitations
- On Qt 6.5.2 and 6.5.3, the following limitations apply to using \1ScreenCapture:
+ On Qt 6.5.2 and above, the following limitations apply to using \1ScreenCapture:
\list
\li It is only supported with the FFmpeg backend.
- \li It is supported on all desktop platforms, except Linux with Wayland
- compositor, due to Wayland protocol restrictions and limitations.
+ \li It is unsupported on Linux with Wayland compositor, due to Wayland
+ protocol restrictions and limitations.
\li It is not supported on mobile operating systems, except on Android.
There, you might run into performance issues as the class is currently
implemented via QScreen::grabWindow, which is not optimal for the use case.
- \li On Linux, it works with X11, but it has not been tested on embedded.
+ \li On embedded with EGLFS, it has limited functionality. For Qt Quick
+ applications, the class is currently implemented via
+ QQuickWindow::grabWindow, which can cause performance issues.
\li In most cases, we set a screen capture frame rate that equals the screen
refresh rate, except on Windows, where the rate might be flexible.
Such a frame rate (75/120 FPS) might cause performance issues on weak
- CPUs if the captured screen is of 4K resolution.
+ CPUs if the captured screen is of 4K resolution. On EGLFS, the capture
+ frame rate is currently locked to 30 FPS.
\endlist
//! [content]
*/
diff --git a/src/multimedia/recording/qvideoframeinput.cpp b/src/multimedia/recording/qvideoframeinput.cpp
new file mode 100644
index 000000000..c3016e785
--- /dev/null
+++ b/src/multimedia/recording/qvideoframeinput.cpp
@@ -0,0 +1,156 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qvideoframeinput.h"
+#include "qmediaframeinput_p.h"
+#include "qmediainputencoderinterface_p.h"
+#include "qplatformvideoframeinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QVideoFrameInputPrivate : public QMediaFrameInputPrivate
+{
+public:
+ QVideoFrameInputPrivate(QVideoFrameInput *q) : q(q) { }
+
+ bool sendVideoFrame(const QVideoFrame &frame)
+ {
+ return sendMediaFrame([&]() { emit m_platfromVideoFrameInput->newVideoFrame(frame); });
+ }
+
+ void initialize()
+ {
+ m_platfromVideoFrameInput = std::make_unique<QPlatformVideoFrameInput>();
+ addUpdateSignal(m_platfromVideoFrameInput.get(), &QPlatformVideoFrameInput::encoderUpdated);
+ }
+
+ void uninitialize()
+ {
+ m_platfromVideoFrameInput.reset();
+
+ if (captureSession())
+ captureSession()->setVideoFrameInput(nullptr);
+ }
+
+ QPlatformVideoFrameInput *platfromVideoFrameInput() const
+ {
+ return m_platfromVideoFrameInput.get();
+ }
+
+protected:
+ void updateCaptureSessionConnections(QMediaCaptureSession *prevSession,
+ QMediaCaptureSession *newSession) override
+ {
+ if (prevSession)
+ removeUpdateSignal(prevSession, &QMediaCaptureSession::videoOutputChanged);
+
+ if (newSession)
+ addUpdateSignal(newSession, &QMediaCaptureSession::videoOutputChanged);
+ }
+
+ bool checkIfCanSendMediaFrame() const override
+ {
+ if (auto encoderInterface = m_platfromVideoFrameInput->encoderInterface())
+ return encoderInterface->canPushFrame();
+
+ return captureSession()->videoOutput() || captureSession()->videoSink();
+ }
+
+ void emitReadyToSendMediaFrame() override { emit q->readyToSendVideoFrame(); }
+
+private:
+ QVideoFrameInput *q = nullptr;
+ std::unique_ptr<QPlatformVideoFrameInput> m_platfromVideoFrameInput;
+};
+
+/*!
+ \class QVideoFrameInput
+ \inmodule QtMultimedia
+ \ingroup multimedia
+ \ingroup multimedia_video
+ \since 6.8
+
+ \brief The QVideoFrameInput class is used for providing custom video frames
+ to \l QMediaRecorder or a video output through \l QMediaCaptureSession.
+
+ \sa QMediaRecorder, QMediaCaptureSession, QVideoSink
+*/
+
+/*!
+ Constructs a new QVideoFrameInput object with \a parent.
+*/
+QVideoFrameInput::QVideoFrameInput(QObject *parent)
+ : QObject(*new QVideoFrameInputPrivate(this), parent)
+{
+ Q_D(QVideoFrameInput);
+ d->initialize();
+}
+
+/*!
+ Destroys the object.
+ */
+QVideoFrameInput::~QVideoFrameInput()
+{
+ Q_D(QVideoFrameInput);
+ d->uninitialize();
+}
+
+/*!
+ Sends \l QVideoFrame to \l QMediaRecorder or a video output
+ through \l QMediaCaptureSession.
+
+ Returns \c true if the specified \a frame has been sent successfully
+ to the destination. Returns \c false, if the frame hasn't been sent,
+ which can happen if the instance is not assigned to
+ \l QMediaCaptureSession, the session doesn't have video outputs or
+ a media recorder, the media recorder is not started or its queue is full.
+ The signal \l readyToSendVideoFrame will be sent as soon as
+ the destination is able to handle a new frame.
+
+ Sending of an empty video frame is treated by \l QMediaRecorder
+ as an end of the input stream. QMediaRecorder stops the recording
+ automatically if \l QMediaRecorder::autoStop is \c true and
+ all the inputs have reported the end of the stream.
+*/
+bool QVideoFrameInput::sendVideoFrame(const QVideoFrame &frame)
+{
+ Q_D(QVideoFrameInput);
+ return d->sendVideoFrame(frame);
+}
+
+/*!
+ Returns the capture session this video frame input is connected to, or
+ a \c nullptr if the video frame input is not connected to a capture session.
+
+ Use QMediaCaptureSession::setVideoFrameInput() to connect
+ the video frame input to a session.
+*/
+QMediaCaptureSession *QVideoFrameInput::captureSession() const
+{
+ Q_D(const QVideoFrameInput);
+ return d->captureSession();
+}
+
+void QVideoFrameInput::setCaptureSession(QMediaCaptureSession *captureSession)
+{
+ Q_D(QVideoFrameInput);
+ d->setCaptureSession(captureSession);
+}
+
+QPlatformVideoFrameInput *QVideoFrameInput::platformVideoFrameInput() const
+{
+ Q_D(const QVideoFrameInput);
+ return d->platfromVideoFrameInput();
+}
+
+/*!
+ \fn void QVideoFrameInput::readyToSendVideoFrame()
+
+ Signals that a new frame can be sent to the video frame input.
+ After receiving the signal, if you have frames to be sent, invoke \l sendVideoFrame
+ once or in a loop until it returns \c false.
+
+ \sa sendVideoFrame()
+*/
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/recording/qvideoframeinput.h b/src/multimedia/recording/qvideoframeinput.h
new file mode 100644
index 000000000..6617b051f
--- /dev/null
+++ b/src/multimedia/recording/qvideoframeinput.h
@@ -0,0 +1,44 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QVIDEOFRAMEINPUT_H
+#define QVIDEOFRAMEINPUT_H
+
+#include <QtMultimedia/qtmultimediaexports.h>
+#include <QtMultimedia/qvideoframe.h>
+#include <QtCore/qobject.h>
+
+QT_BEGIN_NAMESPACE
+
+class QPlatformVideoFrameInput;
+class QVideoFrameInputPrivate;
+class QMediaCaptureSession;
+
+class Q_MULTIMEDIA_EXPORT QVideoFrameInput : public QObject
+{
+ Q_OBJECT
+public:
+ explicit QVideoFrameInput(QObject *parent = nullptr);
+
+ ~QVideoFrameInput() override;
+
+ bool sendVideoFrame(const QVideoFrame &frame);
+
+ QMediaCaptureSession *captureSession() const;
+
+Q_SIGNALS:
+ void readyToSendVideoFrame();
+
+private:
+ void setCaptureSession(QMediaCaptureSession *captureSession);
+
+ QPlatformVideoFrameInput *platformVideoFrameInput() const;
+
+ friend class QMediaCaptureSession;
+ Q_DISABLE_COPY(QVideoFrameInput)
+ Q_DECLARE_PRIVATE(QVideoFrameInput)
+};
+
+QT_END_NAMESPACE
+
+#endif // QVIDEOFRAMEINPUT_H
diff --git a/src/multimedia/video/qabstractvideobuffer.cpp b/src/multimedia/video/qabstractvideobuffer.cpp
index d65438855..7368082b1 100644
--- a/src/multimedia/video/qabstractvideobuffer.cpp
+++ b/src/multimedia/video/qabstractvideobuffer.cpp
@@ -1,139 +1,115 @@
-// Copyright (C) 2016 The Qt Company Ltd.
+// Copyright (C) 2024 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-#include "qabstractvideobuffer_p.h"
-
-#include <qvariant.h>
-#include <rhi/qrhi.h>
-
-#include <QDebug>
-
+#include "qabstractvideobuffer.h"
QT_BEGIN_NAMESPACE
/*!
\class QAbstractVideoBuffer
- \internal
+ \since 6.8
\brief The QAbstractVideoBuffer class is an abstraction for video data.
\inmodule QtMultimedia
\ingroup multimedia
\ingroup multimedia_video
- The QVideoFrame class makes use of a QAbstractVideoBuffer internally to reference a buffer of
- video data. Quite often video data buffers may reside in video memory rather than system
- memory, and this class provides an abstraction of the location.
-
- In addition, creating a subclass of QAbstractVideoBuffer will allow you to construct video
- frames from preallocated or static buffers. This caters for cases where the QVideoFrame constructors
- taking a QByteArray or a QImage do not suffice. This may be necessary when implementing
- a new hardware accelerated video system, for example.
+ The \l QVideoFrame class makes use of a QAbstractVideoBuffer internally to reference a buffer of
+ video data. Creating a subclass of QAbstractVideoBuffer allows you to construct video
+ frames from preallocated or static buffers. The subclass can contain a hardware buffer,
+ and implement access to the data by mapping the buffer to CPU memory.
The contents of a buffer can be accessed by mapping the buffer to memory using the map()
- function, which returns a pointer to memory containing the contents of the video buffer.
- The memory returned by map() is released by calling the unmap() function.
+ function, which returns a structure containing information about plane layout of the current
+ video data.
- The handle() of a buffer may also be used to manipulate its contents using type specific APIs.
- The type of a buffer's handle is given by the handleType() function.
-
- \sa QVideoFrame
+ \sa QVideoFrame, QVideoFrameFormat, QtVideo::MapMode
*/
/*!
- \enum QVideoFrame::HandleType
+ \class QAbstractVideoBuffer::MapData
+ \brief The QAbstractVideoBuffer::MapData structure describes the mapped plane layout.
+ \inmodule QtMultimedia
+ \ingroup multimedia
+ \ingroup multimedia_video
- Identifies the type of a video buffers handle.
+ The structure contains a number of mapped planes, and plane data for each plane,
+ specificly, a number of bytes per line, a data pointer, and a data size.
+ The structure doesn't hold any ownership of the data it refers to.
- \value NoHandle
- The buffer has no handle, its data can only be accessed by mapping the buffer.
- \value RhiTextureHandle
- The handle of the buffer is defined by The Qt Rendering Hardware Interface
- (RHI). RHI is Qt's internal graphics abstraction for 3D APIs, such as
- OpenGL, Vulkan, Metal, and Direct 3D.
+ A defaultly created structure means that no data has been mapped.
- \sa handleType()
+ All the values in the structure default to zeros.
+
+ \sa QAbstractVideoBuffer::map
*/
/*!
- \enum QVideoFrame::MapMode
-
- Enumerates how a video buffer's data is mapped to system memory.
-
- \value NotMapped
- The video buffer is not mapped to memory.
- \value ReadOnly
- The mapped memory is populated with data from the video buffer when mapped,
- but the content of the mapped memory may be discarded when unmapped.
- \value WriteOnly
- The mapped memory is uninitialized when mapped, but the possibly modified
- content will be used to populate the video buffer when unmapped.
- \value ReadWrite
- The mapped memory is populated with data from the video
- buffer, and the video buffer is repopulated with the content of the mapped
- memory when it is unmapped.
-
- \sa mapMode(), map()
+ \variable QAbstractVideoBuffer::MapData::planeCount
+
+ The number of planes of the mapped video data. If the format of the data
+ is multiplanar, and the value is \c 1, the actual plane layout will
+ be calculated upon invoking of \l QVideoFrame::map from the frame height,
+ \c{bytesPerLine[0]}, and \c{dataSize[0]}.
+
+ Defaults to \c 0.
*/
/*!
- Constructs an abstract video buffer of the given \a type.
+ \variable QAbstractVideoBuffer::MapData::bytesPerLine
+
+ The array of numbrers of bytes per line for each
+ plane from \c 0 to \c{planeCount - 1}.
+
+ The values of the array default to \c 0.
*/
-QAbstractVideoBuffer::QAbstractVideoBuffer(QVideoFrame::HandleType type, QRhi *rhi)
- : m_type(type),
- m_rhi(rhi)
-{
-}
/*!
- Destroys an abstract video buffer.
+ \variable QAbstractVideoBuffer::MapData::data
+
+ The array of pointers to the mapped video pixel data
+ for each plane from \c 0 to \c{planeCount - 1}.
+ The implementation of QAbstractVideoBuffer must hold ownership of the data
+ at least until \l QAbstractVideoBuffer::unmap is called.
+
+ The values of the array default to \c nullptr.
*/
-QAbstractVideoBuffer::~QAbstractVideoBuffer()
-{
-}
/*!
- Returns the type of a video buffer's handle.
+ \variable QAbstractVideoBuffer::MapData::dataSize
+
+ The array of sizes in bytes of the mapped video pixel data
+ for each plane from \c 0 to \c{planeCount - 1}.
- \sa handle()
+ The values of the array default to \c 0.
*/
-QVideoFrame::HandleType QAbstractVideoBuffer::handleType() const
-{
- return m_type;
-}
+// must be out-of-line to ensure correct working of dynamic_cast when QHwVideoBuffer is created in tests
/*!
- Returns the QRhi instance.
+ Destroys a video buffer.
*/
-QRhi *QAbstractVideoBuffer::rhi() const
-{
- return m_rhi;
-}
+QAbstractVideoBuffer::~QAbstractVideoBuffer() = default;
-/*! \fn uchar *QAbstractVideoBuffer::map(MapMode mode, int *numBytes, int *bytesPerLine)
+/*! \fn QAbstractVideoBuffer::MapData QAbstractVideoBuffer::map(QtVideo::MapMode mode)
- Independently maps the planes of a video buffer to memory.
+ Maps the planes of a video buffer to memory.
- The map \a mode indicates whether the contents of the mapped memory should be read from and/or
- written to the buffer. If the map mode includes the \c QVideoFrame::ReadOnly flag the
- mapped memory will be populated with the content of the buffer when initially mapped. If the map
- mode includes the \c QVideoFrame::WriteOnly flag the content of the possibly modified
- mapped memory will be written back to the buffer when unmapped.
-
- When access to the data is no longer needed be sure to call the unmap() function to release the
- mapped memory and possibly update the buffer contents.
+ Returns a \l MapData structure that contains information about the plane layout of
+ the mapped current video data. If the mapping fails, the method returns the default structure.
+ For CPU memory buffers, the data is considered as already mapped, so the function
+ just returns the plane layout of the preallocated underlying data.
- Returns the number of planes in the mapped video data. For each plane the line stride of that
- plane will be returned in \a bytesPerLine, and a pointer to the plane data will be returned in
- \a data. The accumulative size of the mapped data is returned in \a numBytes.
-
- Not all buffer implementations will map more than the first plane, if this returns a single
- plane for a planar format the additional planes will have to be calculated from the line stride
- of the first plane and the frame height. Mapping a buffer with QVideoFrame will do this for
- you.
+ The map \a mode indicates whether the contents of the mapped memory should be read from and/or
+ written to the buffer. If the map mode includes the \c QtVideo::MapMode::WriteOnly flag,
+ the content of the possibly modified mapped memory is expected to be written back
+ to the buffer when unmapped.
- To implement this function create a derivative of QAbstractPlanarVideoBuffer and implement
- its map function instance instead.
+ When access to the data is no longer needed, the \l unmap function is called
+ to release the mapped memory and possibly update the buffer contents.
- \since 5.4
+ If the format of the video data is multiplanar, the method may map the whole pixel data
+ as a single plane. In this case, mapping a buffer with \l QVideoFrame
+ will calculate additional planes from the specified line stride of the first plane,
+ the frame height, and the data size.
*/
/*!
@@ -141,56 +117,23 @@ QRhi *QAbstractVideoBuffer::rhi() const
Releases the memory mapped by the map() function.
- If the \l {QVideoFrame::MapMode}{MapMode} included the \c QVideoFrame::WriteOnly
+ If the \l {QtVideo::MapMode}{MapMode} included the \c QtVideo::MapMode::WriteOnly
flag this will write the current content of the mapped memory back to the video frame.
- \sa map()
-*/
-
-/*! \fn quint64 QAbstractVideoBuffer::textureHandle(QRhi *rhi, int plane) const
-
- Returns a texture handle to the data buffer.
+ For CPU video buffers, the function may be not overridden.
+ The default implementation of \c unmap does nothing.
- \sa handleType()
+ \sa map()
*/
-/*
- \fn int QAbstractPlanarVideoBuffer::map(MapMode mode, int *numBytes, int bytesPerLine[4], uchar *data[4])
-
- Maps the contents of a video buffer to memory.
-
- The map \a mode indicates whether the contents of the mapped memory should be read from and/or
- written to the buffer. If the map mode includes the \c QVideoFrame::ReadOnly flag the
- mapped memory will be populated with the content of the buffer when initially mapped. If the map
- mode includes the \c QVideoFrame::WriteOnly flag the content of the possibly modified
- mapped memory will be written back to the buffer when unmapped.
-
- When access to the data is no longer needed be sure to call the unmap() function to release the
- mapped memory and possibly update the buffer contents.
+/*!
+ \fn QAbstractVideoBuffer::format() const
- Returns the number of planes in the mapped video data. For each plane the line stride of that
- plane will be returned in \a bytesPerLine, and a pointer to the plane data will be returned in
- \a data. The accumulative size of the mapped data is returned in \a numBytes.
+ Gets \l QVideoFrameFormat of the underlying video buffer.
- \sa QAbstractVideoBuffer::map(), QAbstractVideoBuffer::unmap(), QVideoFrame::mapMode()
+ The format must be available upon construction of \l QVideoFrame.
+ QVideoFrame will contain won instance of the given format, that
+ can be detached and modified.
*/
-#ifndef QT_NO_DEBUG_STREAM
-QDebug operator<<(QDebug dbg, QVideoFrame::MapMode mode)
-{
- QDebugStateSaver saver(dbg);
- dbg.nospace();
- switch (mode) {
- case QVideoFrame::ReadOnly:
- return dbg << "ReadOnly";
- case QVideoFrame::ReadWrite:
- return dbg << "ReadWrite";
- case QVideoFrame::WriteOnly:
- return dbg << "WriteOnly";
- default:
- return dbg << "NotMapped";
- }
-}
-#endif
-
QT_END_NAMESPACE
diff --git a/src/multimedia/video/qabstractvideobuffer.h b/src/multimedia/video/qabstractvideobuffer.h
new file mode 100644
index 000000000..3e046f3b4
--- /dev/null
+++ b/src/multimedia/video/qabstractvideobuffer.h
@@ -0,0 +1,32 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QABSTRACTVIDEOBUFFER_H
+#define QABSTRACTVIDEOBUFFER_H
+
+#include <QtMultimedia/qtmultimediaexports.h>
+#include <QtMultimedia/qvideoframeformat.h>
+#include <QtMultimedia/qtvideo.h>
+
+QT_BEGIN_NAMESPACE
+
+class Q_MULTIMEDIA_EXPORT QAbstractVideoBuffer
+{
+public:
+ struct MapData
+ {
+ int planeCount = 0;
+ int bytesPerLine[4] = {};
+ uchar *data[4] = {};
+ int dataSize[4] = {};
+ };
+
+ virtual ~QAbstractVideoBuffer();
+ virtual MapData map(QtVideo::MapMode mode) = 0;
+ virtual void unmap() { }
+ virtual QVideoFrameFormat format() const = 0;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/multimedia/video/qabstractvideobuffer_p.h b/src/multimedia/video/qabstractvideobuffer_p.h
deleted file mode 100644
index 2004e25f7..000000000
--- a/src/multimedia/video/qabstractvideobuffer_p.h
+++ /dev/null
@@ -1,82 +0,0 @@
-// Copyright (C) 2022 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-#ifndef QABSTRACTVIDEOBUFFER_H
-#define QABSTRACTVIDEOBUFFER_H
-
-//
-// W A R N I N G
-// -------------
-//
-// This file is not part of the Qt API. It exists purely as an
-// implementation detail. This header file may change from version to
-// version without notice, or even be removed.
-//
-// We mean it.
-//
-
-#include <QtMultimedia/qtmultimediaglobal.h>
-#include <QtMultimedia/qvideoframe.h>
-
-#include <QtCore/qmetatype.h>
-#include <QtGui/qmatrix4x4.h>
-#include <QtCore/private/qglobal_p.h>
-
-#include <memory>
-
-QT_BEGIN_NAMESPACE
-
-
-class QVariant;
-class QRhi;
-class QRhiTexture;
-
-class Q_MULTIMEDIA_EXPORT QVideoFrameTextures
-{
-public:
- virtual ~QVideoFrameTextures() {}
- virtual QRhiTexture *texture(uint plane) const = 0;
-};
-
-class Q_MULTIMEDIA_EXPORT QAbstractVideoBuffer
-{
-public:
- QAbstractVideoBuffer(QVideoFrame::HandleType type, QRhi *rhi = nullptr);
- virtual ~QAbstractVideoBuffer();
-
- QVideoFrame::HandleType handleType() const;
- QRhi *rhi() const;
-
- struct MapData
- {
- int nPlanes = 0;
- int bytesPerLine[4] = {};
- uchar *data[4] = {};
- int size[4] = {};
- };
-
- virtual QVideoFrame::MapMode mapMode() const = 0;
- virtual MapData map(QVideoFrame::MapMode mode) = 0;
- virtual void unmap() = 0;
-
- virtual std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *) { return {}; }
- virtual quint64 textureHandle(QRhi *, int /*plane*/) const { return 0; }
-
- virtual QMatrix4x4 externalTextureMatrix() const { return {}; }
-
- virtual QByteArray underlyingByteArray(int /*plane*/) const { return {}; }
-protected:
- QVideoFrame::HandleType m_type;
- QRhi *m_rhi = nullptr;
-
-private:
- Q_DISABLE_COPY(QAbstractVideoBuffer)
-};
-
-#ifndef QT_NO_DEBUG_STREAM
-Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoFrame::MapMode);
-#endif
-
-QT_END_NAMESPACE
-
-#endif
diff --git a/src/multimedia/video/qhwvideobuffer.cpp b/src/multimedia/video/qhwvideobuffer.cpp
new file mode 100644
index 000000000..ecd3435d0
--- /dev/null
+++ b/src/multimedia/video/qhwvideobuffer.cpp
@@ -0,0 +1,17 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qhwvideobuffer_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QVideoFrameTextures::~QVideoFrameTextures() = default;
+
+QHwVideoBuffer::QHwVideoBuffer(QVideoFrame::HandleType type, QRhi *rhi) : m_type(type), m_rhi(rhi)
+{
+}
+
+// must be out-of-line to ensure correct working of dynamic_cast when QHwVideoBuffer is created in tests
+QHwVideoBuffer::~QHwVideoBuffer() = default;
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/video/qhwvideobuffer_p.h b/src/multimedia/video/qhwvideobuffer_p.h
new file mode 100644
index 000000000..fabf82dce
--- /dev/null
+++ b/src/multimedia/video/qhwvideobuffer_p.h
@@ -0,0 +1,58 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QHWVIDEOBUFFER_P_H
+#define QHWVIDEOBUFFER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qabstractvideobuffer.h"
+#include "qvideoframe.h"
+
+#include <QtGui/qmatrix4x4.h>
+
+QT_BEGIN_NAMESPACE
+
+class QRhi;
+class QRhiTexture;
+
+class Q_MULTIMEDIA_EXPORT QVideoFrameTextures
+{
+public:
+ virtual ~QVideoFrameTextures();
+ virtual QRhiTexture *texture(uint plane) const = 0;
+};
+
+class Q_MULTIMEDIA_EXPORT QHwVideoBuffer : public QAbstractVideoBuffer
+{
+public:
+ QHwVideoBuffer(QVideoFrame::HandleType type, QRhi *rhi = nullptr);
+
+ ~QHwVideoBuffer() override;
+
+ QVideoFrame::HandleType handleType() const { return m_type; }
+ QRhi *rhi() const { return m_rhi; }
+
+ QVideoFrameFormat format() const override { return {}; }
+
+ virtual std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *) { return {}; }
+ virtual quint64 textureHandle(QRhi *, int /*plane*/) const { return 0; }
+ virtual QMatrix4x4 externalTextureMatrix() const { return {}; }
+
+protected:
+ QVideoFrame::HandleType m_type;
+ QRhi *m_rhi = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QHWVIDEOBUFFER_P_H
diff --git a/src/multimedia/video/qimagevideobuffer.cpp b/src/multimedia/video/qimagevideobuffer.cpp
index bc825004e..400b89319 100644
--- a/src/multimedia/video/qimagevideobuffer.cpp
+++ b/src/multimedia/video/qimagevideobuffer.cpp
@@ -51,37 +51,25 @@ QImage fixImage(QImage image)
} // namespace
-QImageVideoBuffer::QImageVideoBuffer(QImage image)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle), m_image(fixImage(std::move(image)))
-{
-}
-
-QVideoFrame::MapMode QImageVideoBuffer::mapMode() const
-{
- return m_mapMode;
-}
+QImageVideoBuffer::QImageVideoBuffer(QImage image) : m_image(fixImage(std::move(image))) { }
-QAbstractVideoBuffer::MapData QImageVideoBuffer::map(QVideoFrame::MapMode mode)
+QAbstractVideoBuffer::MapData QImageVideoBuffer::map(QtVideo::MapMode mode)
{
MapData mapData;
- if (m_mapMode == QVideoFrame::NotMapped && !m_image.isNull()
- && mode != QVideoFrame::NotMapped) {
- m_mapMode = mode;
- mapData.nPlanes = 1;
+ if (!m_image.isNull()) {
+ mapData.planeCount = 1;
mapData.bytesPerLine[0] = m_image.bytesPerLine();
- mapData.data[0] = m_image.bits();
- mapData.size[0] = m_image.sizeInBytes();
+ if (mode == QtVideo::MapMode::ReadOnly)
+ mapData.data[0] = const_cast<uint8_t *>(m_image.constBits());
+ else
+ mapData.data[0] = m_image.bits();
+ mapData.dataSize[0] = m_image.sizeInBytes();
}
return mapData;
}
-void QImageVideoBuffer::unmap()
-{
- m_mapMode = QVideoFrame::NotMapped;
-}
-
QImage QImageVideoBuffer::underlyingImage() const
{
return m_image;
diff --git a/src/multimedia/video/qimagevideobuffer_p.h b/src/multimedia/video/qimagevideobuffer_p.h
index e5467563a..4ea894ba8 100644
--- a/src/multimedia/video/qimagevideobuffer_p.h
+++ b/src/multimedia/video/qimagevideobuffer_p.h
@@ -4,7 +4,7 @@
#ifndef QIMAGEVIDEOBUFFER_P_H
#define QIMAGEVIDEOBUFFER_P_H
-#include <private/qabstractvideobuffer_p.h>
+#include <qabstractvideobuffer.h>
#include <qimage.h>
//
@@ -25,16 +25,13 @@ class Q_MULTIMEDIA_EXPORT QImageVideoBuffer : public QAbstractVideoBuffer
public:
QImageVideoBuffer(QImage image);
- QVideoFrame::MapMode mapMode() const override;
+ MapData map(QtVideo::MapMode mode) override;
- MapData map(QVideoFrame::MapMode mode) override;
-
- void unmap() override;
+ QVideoFrameFormat format() const override { return {}; }
QImage underlyingImage() const;
private:
- QVideoFrame::MapMode m_mapMode = QVideoFrame::NotMapped;
QImage m_image;
};
diff --git a/src/multimedia/video/qmemoryvideobuffer.cpp b/src/multimedia/video/qmemoryvideobuffer.cpp
index bcbbe7e59..0940d2ca4 100644
--- a/src/multimedia/video/qmemoryvideobuffer.cpp
+++ b/src/multimedia/video/qmemoryvideobuffer.cpp
@@ -18,9 +18,7 @@ QT_BEGIN_NAMESPACE
Constructs a video buffer with an image stride of \a bytesPerLine from a byte \a array.
*/
QMemoryVideoBuffer::QMemoryVideoBuffer(QByteArray data, int bytesPerLine)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle),
- m_bytesPerLine(bytesPerLine),
- m_data(std::move(data))
+ : m_bytesPerLine(bytesPerLine), m_data(std::move(data))
{
}
@@ -32,48 +30,23 @@ QMemoryVideoBuffer::~QMemoryVideoBuffer() = default;
/*!
\reimp
*/
-QVideoFrame::MapMode QMemoryVideoBuffer::mapMode() const
-{
- return m_mapMode;
-}
-
-/*!
- \reimp
-*/
-QAbstractVideoBuffer::MapData QMemoryVideoBuffer::map(QVideoFrame::MapMode mode)
+QAbstractVideoBuffer::MapData QMemoryVideoBuffer::map(QtVideo::MapMode mode)
{
MapData mapData;
- if (m_mapMode == QVideoFrame::NotMapped && m_data.size() && mode != QVideoFrame::NotMapped) {
- m_mapMode = mode;
- mapData.nPlanes = 1;
+ if (!m_data.isEmpty()) {
+ mapData.planeCount = 1;
mapData.bytesPerLine[0] = m_bytesPerLine;
// avoid detaching and extra copying in case the underlyingByteArray is
// being held by textures or anything else.
- if (mode == QVideoFrame::ReadOnly)
- mapData.data[0] = reinterpret_cast<uchar *>(const_cast<char*>(m_data.constData()));
+ if (mode == QtVideo::MapMode::ReadOnly)
+ mapData.data[0] = reinterpret_cast<uchar *>(const_cast<char *>(m_data.constData()));
else
mapData.data[0] = reinterpret_cast<uchar *>(m_data.data());
- mapData.size[0] = m_data.size();
+ mapData.dataSize[0] = m_data.size();
}
return mapData;
}
-/*!
- \reimp
-*/
-void QMemoryVideoBuffer::unmap()
-{
- m_mapMode = QVideoFrame::NotMapped;
-}
-
-/*!
- \reimp
-*/
-QByteArray QMemoryVideoBuffer::underlyingByteArray(int plane) const
-{
- return plane == 0 ? m_data : QByteArray{};
-}
-
QT_END_NAMESPACE
diff --git a/src/multimedia/video/qmemoryvideobuffer_p.h b/src/multimedia/video/qmemoryvideobuffer_p.h
index ec97abd4f..1bd5d6be2 100644
--- a/src/multimedia/video/qmemoryvideobuffer_p.h
+++ b/src/multimedia/video/qmemoryvideobuffer_p.h
@@ -4,7 +4,7 @@
#ifndef QMEMORYVIDEOBUFFER_P_H
#define QMEMORYVIDEOBUFFER_P_H
-#include <private/qabstractvideobuffer_p.h>
+#include "qabstractvideobuffer.h"
//
// W A R N I N G
@@ -23,17 +23,14 @@ class Q_MULTIMEDIA_EXPORT QMemoryVideoBuffer : public QAbstractVideoBuffer
{
public:
QMemoryVideoBuffer(QByteArray data, int bytesPerLine);
- ~QMemoryVideoBuffer();
+ ~QMemoryVideoBuffer() override;
- QVideoFrame::MapMode mapMode() const override;
+ MapData map(QtVideo::MapMode mode) override;
- MapData map(QVideoFrame::MapMode mode) override;
- void unmap() override;
+ QVideoFrameFormat format() const override { return {}; }
- QByteArray underlyingByteArray(int plane) const override;
private:
int m_bytesPerLine = 0;
- QVideoFrame::MapMode m_mapMode = QVideoFrame::NotMapped;
QByteArray m_data;
};
diff --git a/src/multimedia/video/qtvideo.cpp b/src/multimedia/video/qtvideo.cpp
index 29747b776..5bb28e5b5 100644
--- a/src/multimedia/video/qtvideo.cpp
+++ b/src/multimedia/video/qtvideo.cpp
@@ -3,6 +3,8 @@
#include "qtvideo.h"
+#include <QtCore/qdebug.h>
+
QT_BEGIN_NAMESPACE
/*!
@@ -25,6 +27,45 @@ QT_BEGIN_NAMESPACE
\value Clockwise270 The frame should be rotated clockwise by 270 degrees
*/
+/*!
+ \enum QtVideo::MapMode
+
+ Enumerates how a video buffer's data is mapped to system memory.
+
+ \value NotMapped
+ The video buffer is not mapped to memory.
+ \value ReadOnly
+ The mapped memory is populated with data from the video buffer when mapped,
+ but the content of the mapped memory may be discarded when unmapped.
+ \value WriteOnly
+ The mapped memory is uninitialized when mapped, but the possibly modified
+ content will be used to populate the video buffer when unmapped.
+ \value ReadWrite
+ The mapped memory is populated with data from the video
+ buffer, and the video buffer is repopulated with the content of the mapped
+ memory when it is unmapped.
+
+ \sa QVideoFrame::mapMode(), map()
+*/
+
+#ifndef QT_NO_DEBUG_STREAM
+QDebug operator<<(QDebug dbg, QtVideo::MapMode mode)
+{
+ QDebugStateSaver saver(dbg);
+ dbg.nospace();
+ switch (mode) {
+ case QtVideo::MapMode::ReadOnly:
+ return dbg << "ReadOnly";
+ case QtVideo::MapMode::ReadWrite:
+ return dbg << "ReadWrite";
+ case QtVideo::MapMode::WriteOnly:
+ return dbg << "WriteOnly";
+ default:
+ return dbg << "NotMapped";
+ }
+}
+#endif
+
QT_END_NAMESPACE
#include "moc_qtvideo.cpp"
diff --git a/src/multimedia/video/qtvideo.h b/src/multimedia/video/qtvideo.h
index a5f22ea2c..fdcb26419 100644
--- a/src/multimedia/video/qtvideo.h
+++ b/src/multimedia/video/qtvideo.h
@@ -16,8 +16,42 @@ Q_NAMESPACE_EXPORT(Q_MULTIMEDIA_EXPORT)
enum class Rotation { None = 0, Clockwise90 = 90, Clockwise180 = 180, Clockwise270 = 270 };
Q_ENUM_NS(Rotation)
+
+enum class MapMode
+{
+ NotMapped = 0x00,
+ ReadOnly = 0x01,
+ WriteOnly = 0x02,
+ ReadWrite = ReadOnly | WriteOnly
+};
+Q_ENUM_NS(MapMode)
+
+inline constexpr MapMode operator & (MapMode lhs, MapMode rhs)
+{
+ return MapMode(qToUnderlying(lhs) & qToUnderlying(rhs));
+}
+
+inline constexpr MapMode operator | (MapMode lhs, MapMode rhs)
+{
+ return MapMode(qToUnderlying(lhs) | qToUnderlying(rhs));
+}
+
+inline constexpr MapMode &operator &= (MapMode &lhs, MapMode rhs)
+{
+ return (lhs = lhs & rhs);
+}
+
+inline constexpr MapMode &operator |= (MapMode &lhs, MapMode rhs)
+{
+ return (lhs = lhs | rhs);
+}
+
}
+#ifndef QT_NO_DEBUG_STREAM
+Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QtVideo::MapMode);
+#endif
+
QT_END_NAMESPACE
#endif // QTVIDEO_H
diff --git a/src/multimedia/video/qvideoframe.cpp b/src/multimedia/video/qvideoframe.cpp
index de981f423..9da4ea3b8 100644
--- a/src/multimedia/video/qvideoframe.cpp
+++ b/src/multimedia/video/qvideoframe.cpp
@@ -58,6 +58,23 @@ QT_DEFINE_QESDP_SPECIALIZATION_DTOR(QVideoFramePrivate);
\note Since video frames can be expensive to copy, QVideoFrame is explicitly shared, so any
change made to a video frame will also apply to any copies.
+
+ \sa QAbstractVideoBuffer, QVideoFrameFormat, QtVideo::MapMode
+*/
+
+/*!
+ \enum QVideoFrame::HandleType
+
+ Identifies the type of a video buffers handle.
+
+ \value NoHandle
+ The buffer has no handle, its data can only be accessed by mapping the buffer.
+ \value RhiTextureHandle
+ The handle of the buffer is defined by The Qt Rendering Hardware Interface
+ (RHI). RHI is Qt's internal graphics abstraction for 3D APIs, such as
+ OpenGL, Vulkan, Metal, and Direct 3D.
+
+ \sa handleType()
*/
@@ -68,6 +85,8 @@ QVideoFrame::QVideoFrame()
{
}
+#if QT_DEPRECATED_SINCE(6, 8)
+
/*!
\internal
Constructs a video frame from a \a buffer with the given pixel \a format and \a size in pixels.
@@ -75,9 +94,8 @@ QVideoFrame::QVideoFrame()
\note This doesn't increment the reference count of the video buffer.
*/
QVideoFrame::QVideoFrame(QAbstractVideoBuffer *buffer, const QVideoFrameFormat &format)
- : d(new QVideoFramePrivate(format))
+ : d(new QVideoFramePrivate(format, std::unique_ptr<QAbstractVideoBuffer>(buffer)))
{
- d->buffer.reset(buffer);
}
/*!
@@ -85,9 +103,11 @@ QVideoFrame::QVideoFrame(QAbstractVideoBuffer *buffer, const QVideoFrameFormat &
*/
QAbstractVideoBuffer *QVideoFrame::videoBuffer() const
{
- return d ? d->buffer.get() : nullptr;
+ return d ? d->videoBuffer.get() : nullptr;
}
+#endif
+
/*!
Constructs a video frame of the given pixel \a format.
@@ -103,22 +123,25 @@ QVideoFrame::QVideoFrame(const QVideoFrameFormat &format)
// Check the memory was successfully allocated.
if (!data.isEmpty())
- d->buffer = std::make_unique<QMemoryVideoBuffer>(data, textureDescription->strideForWidth(format.frameWidth()));
+ d->videoBuffer = std::make_unique<QMemoryVideoBuffer>(
+ data, textureDescription->strideForWidth(format.frameWidth()));
}
}
/*!
- Constructs a QVideoFrame from a QImage. The QImage pixels are copied
- into the QVideoFrame's memory buffer. The resulting frame has the
- same size as the QImage, but the number of bytes per line may
- differ.
+ Constructs a QVideoFrame from a QImage.
\since 6.8
If the QImage::Format matches one of the formats in
- QVideoFrameFormat::PixelFormat, the QVideoFrame will use that format
- without any pixel format conversion. Otherwise, the image is first
- converted to a supported (A)RGB format using QImage::convertedTo()
- with the Qt::AutoColor flag. This may incur a performance penalty.
+ QVideoFrameFormat::PixelFormat, the QVideoFrame will hold an instance of
+ the \a image and use that format without any pixel format conversion.
+ In this case, pixel data will be copied only if you call \l{QVideoFrame::map}
+ with \c WriteOnly flag while keeping the original image.
+
+ Otherwise, if the QImage::Format matches none of video formats,
+ the image is first converted to a supported (A)RGB format using
+ QImage::convertedTo() with the Qt::AutoColor flag.
+ This may incur a performance penalty.
If QImage::isNull() evaluates to true for the input QImage, the
QVideoFrame will be invalid and QVideoFrameFormat::isValid() will
@@ -151,6 +174,47 @@ QVideoFrame::QVideoFrame(const QImage &image)
}
/*!
+ Constructs a QVideoFrame from a \l QAbstractVideoBuffer.
+
+ \since 6.8
+
+ The specified \a videoBuffer refers to an instance a reimplemented
+ \l QAbstractVideoBuffer. The instance is expected to contain a preallocated custom
+ video buffer and must implement \l QAbstractVideoBuffer::format,
+ \l QAbstractVideoBuffer::map, and \l QAbstractVideoBuffer::unmap for GPU content.
+
+ If \a videoBuffer is null or gets an invalid \l QVideoFrameFormat,
+ the constructors creates an invalid video frame.
+
+ The created frame will hold ownership of the specified video buffer for its lifetime.
+ Considering that QVideoFrame is implemented via a shared private object,
+ the specified video buffer will be destroyed upon destruction of the last copy
+ of the created video frame.
+
+ Note, if a video frame has been passed to \l QMediaRecorder or a rendering pipeline,
+ the lifetime of the frame is undefined, and the media recorder can destroy it
+ in a different thread.
+
+ QVideoFrame will contain own instance of QVideoFrameFormat.
+ Upon invoking \l setStreamFrameRate, \l setMirrored, or \l setRotation,
+ the inner format can be modified, and \l surfaceFormat will return
+ a detached instance.
+
+ \sa QAbstractVideoBuffer, QVideoFrameFormat
+*/
+QVideoFrame::QVideoFrame(std::unique_ptr<QAbstractVideoBuffer> videoBuffer)
+{
+ if (!videoBuffer)
+ return;
+
+ QVideoFrameFormat format = videoBuffer->format();
+ if (!format.isValid())
+ return;
+
+ d = new QVideoFramePrivate{ std::move(format), std::move(videoBuffer) };
+}
+
+/*!
Constructs a shallow copy of \a other. Since QVideoFrame is
explicitly shared, these two instances will reflect the same frame.
@@ -213,7 +277,7 @@ QVideoFrame::~QVideoFrame() = default;
*/
bool QVideoFrame::isValid() const
{
- return (d && d->buffer) && d->format.pixelFormat() != QVideoFrameFormat::Format_Invalid;
+ return d && d->videoBuffer && d->format.pixelFormat() != QVideoFrameFormat::Format_Invalid;
}
/*!
@@ -240,7 +304,7 @@ QVideoFrameFormat QVideoFrame::surfaceFormat() const
*/
QVideoFrame::HandleType QVideoFrame::handleType() const
{
- return (d && d->buffer) ? d->buffer->handleType() : QVideoFrame::NoHandle;
+ return (d && d->hwVideoBuffer) ? d->hwVideoBuffer->handleType() : QVideoFrame::NoHandle;
}
/*!
@@ -270,25 +334,25 @@ int QVideoFrame::height() const
/*!
Identifies if a video frame's contents are currently mapped to system memory.
- This is a convenience function which checks that the \l {QVideoFrame::MapMode}{MapMode}
- of the frame is not equal to QVideoFrame::NotMapped.
+ This is a convenience function which checks that the \l {QtVideo::MapMode}{MapMode}
+ of the frame is not equal to QtVideo::MapMode::NotMapped.
Returns true if the contents of the video frame are mapped to system memory, and false
otherwise.
- \sa mapMode(), QVideoFrame::MapMode
+ \sa mapMode(), QtVideo::MapMode
*/
bool QVideoFrame::isMapped() const
{
- return d && d->buffer && d->buffer->mapMode() != QVideoFrame::NotMapped;
+ return d && d->mapMode != QtVideo::MapMode::NotMapped;
}
/*!
Identifies if the mapped contents of a video frame will be persisted when the frame is unmapped.
- This is a convenience function which checks if the \l {QVideoFrame::MapMode}{MapMode}
- contains the QVideoFrame::WriteOnly flag.
+ This is a convenience function which checks if the \l {QtVideo::MapMode}{MapMode}
+ contains the QtVideo::MapMode::WriteOnly flag.
Returns true if the video frame will be updated when unmapped, and false otherwise.
@@ -296,37 +360,37 @@ bool QVideoFrame::isMapped() const
Depending on the buffer implementation the changes may be persisted, or worse alter a shared
buffer.
- \sa mapMode(), QVideoFrame::MapMode
+ \sa mapMode(), QtVideo::MapMode
*/
bool QVideoFrame::isWritable() const
{
- return d && d->buffer && (d->buffer->mapMode() & QVideoFrame::WriteOnly);
+ return d && (d->mapMode & QtVideo::MapMode::WriteOnly) != QtVideo::MapMode::NotMapped;
}
/*!
Identifies if the mapped contents of a video frame were read from the frame when it was mapped.
- This is a convenience function which checks if the \l {QVideoFrame::MapMode}{MapMode}
- contains the QVideoFrame::WriteOnly flag.
+ This is a convenience function which checks if the \l {QtVideo::MapMode}{MapMode}
+ contains the QtVideo::MapMode::WriteOnly flag.
Returns true if the contents of the mapped memory were read from the video frame, and false
otherwise.
- \sa mapMode(), QVideoFrame::MapMode
+ \sa mapMode(), QtVideo::MapMode
*/
bool QVideoFrame::isReadable() const
{
- return d && d->buffer && (d->buffer->mapMode() & QVideoFrame::ReadOnly);
+ return d && (d->mapMode & QtVideo::MapMode::ReadOnly) != QtVideo::MapMode::NotMapped;
}
/*!
Returns the mode a video frame was mapped to system memory in.
- \sa map(), QVideoFrame::MapMode
+ \sa map(), QtVideo::MapMode
*/
QVideoFrame::MapMode QVideoFrame::mapMode() const
{
- return (d && d->buffer) ? d->buffer->mapMode() : QVideoFrame::NotMapped;
+ return static_cast<QVideoFrame::MapMode>(d ? d->mapMode : QtVideo::MapMode::NotMapped);
}
/*!
@@ -337,9 +401,9 @@ QVideoFrame::MapMode QVideoFrame::mapMode() const
copying the contents around, so avoid mapping and unmapping unless required.
The map \a mode indicates whether the contents of the mapped memory should be read from and/or
- written to the frame. If the map mode includes the \c QVideoFrame::ReadOnly flag the
+ written to the frame. If the map mode includes the \c QtVideo::MapMode::ReadOnly flag the
mapped memory will be populated with the content of the video frame when initially mapped. If the map
- mode includes the \c QVideoFrame::WriteOnly flag the content of the possibly modified
+ mode includes the \c QtVideo::MapMode::WriteOnly flag the content of the possibly modified
mapped memory will be written back to the frame when unmapped.
While mapped the contents of a video frame can be accessed directly through the pointer returned
@@ -359,20 +423,18 @@ QVideoFrame::MapMode QVideoFrame::mapMode() const
\sa unmap(), mapMode(), bits()
*/
-bool QVideoFrame::map(QVideoFrame::MapMode mode)
+bool QVideoFrame::map(QtVideo::MapMode mode)
{
-
- if (!d || !d->buffer)
+ if (!d || !d->videoBuffer)
return false;
QMutexLocker lock(&d->mapMutex);
- if (mode == QVideoFrame::NotMapped)
+ if (mode == QtVideo::MapMode::NotMapped)
return false;
if (d->mappedCount > 0) {
//it's allowed to map the video frame multiple times in read only mode
- if (d->buffer->mapMode() == QVideoFrame::ReadOnly
- && mode == QVideoFrame::ReadOnly) {
+ if (d->mapMode == QtVideo::MapMode::ReadOnly && mode == QtVideo::MapMode::ReadOnly) {
d->mappedCount++;
return true;
}
@@ -382,14 +444,16 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
Q_ASSERT(d->mapData.data[0] == nullptr);
Q_ASSERT(d->mapData.bytesPerLine[0] == 0);
- Q_ASSERT(d->mapData.nPlanes == 0);
- Q_ASSERT(d->mapData.size[0] == 0);
+ Q_ASSERT(d->mapData.planeCount == 0);
+ Q_ASSERT(d->mapData.dataSize[0] == 0);
- d->mapData = d->buffer->map(mode);
- if (d->mapData.nPlanes == 0)
+ d->mapData = d->videoBuffer->map(mode);
+ if (d->mapData.planeCount == 0)
return false;
- if (d->mapData.nPlanes == 1) {
+ d->mapMode = mode;
+
+ if (d->mapData.planeCount == 1) {
auto pixelFmt = d->format.pixelFormat();
// If the plane count is 1 derive the additional planes for planar formats.
switch (pixelFmt) {
@@ -427,16 +491,16 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
const int height = this->height();
const int yStride = d->mapData.bytesPerLine[0];
const int uvHeight = pixelFmt == QVideoFrameFormat::Format_YUV422P ? height : height / 2;
- const int uvStride = (d->mapData.size[0] - (yStride * height)) / uvHeight / 2;
+ const int uvStride = (d->mapData.dataSize[0] - (yStride * height)) / uvHeight / 2;
// Three planes, the second and third vertically (and horizontally for other than Format_YUV422P formats) subsampled.
- d->mapData.nPlanes = 3;
+ d->mapData.planeCount = 3;
d->mapData.bytesPerLine[2] = d->mapData.bytesPerLine[1] = uvStride;
- d->mapData.size[0] = yStride * height;
- d->mapData.size[1] = uvStride * uvHeight;
- d->mapData.size[2] = uvStride * uvHeight;
- d->mapData.data[1] = d->mapData.data[0] + d->mapData.size[0];
- d->mapData.data[2] = d->mapData.data[1] + d->mapData.size[1];
+ d->mapData.dataSize[0] = yStride * height;
+ d->mapData.dataSize[1] = uvStride * uvHeight;
+ d->mapData.dataSize[2] = uvStride * uvHeight;
+ d->mapData.data[1] = d->mapData.data[0] + d->mapData.dataSize[0];
+ d->mapData.data[2] = d->mapData.data[1] + d->mapData.dataSize[1];
break;
}
case QVideoFrameFormat::Format_NV12:
@@ -446,25 +510,25 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
case QVideoFrameFormat::Format_P010:
case QVideoFrameFormat::Format_P016: {
// Semi planar, Full resolution Y plane with interleaved subsampled U and V planes.
- d->mapData.nPlanes = 2;
+ d->mapData.planeCount = 2;
d->mapData.bytesPerLine[1] = d->mapData.bytesPerLine[0];
- int size = d->mapData.size[0];
- d->mapData.size[0] = (d->mapData.bytesPerLine[0] * height());
- d->mapData.size[1] = size - d->mapData.size[0];
- d->mapData.data[1] = d->mapData.data[0] + d->mapData.size[0];
+ int size = d->mapData.dataSize[0];
+ d->mapData.dataSize[0] = (d->mapData.bytesPerLine[0] * height());
+ d->mapData.dataSize[1] = size - d->mapData.dataSize[0];
+ d->mapData.data[1] = d->mapData.data[0] + d->mapData.dataSize[0];
break;
}
case QVideoFrameFormat::Format_IMC1:
case QVideoFrameFormat::Format_IMC3: {
// Three planes, the second and third vertically and horizontally subsumpled,
// but with lines padded to the width of the first plane.
- d->mapData.nPlanes = 3;
+ d->mapData.planeCount = 3;
d->mapData.bytesPerLine[2] = d->mapData.bytesPerLine[1] = d->mapData.bytesPerLine[0];
- d->mapData.size[0] = (d->mapData.bytesPerLine[0] * height());
- d->mapData.size[1] = (d->mapData.bytesPerLine[0] * height() / 2);
- d->mapData.size[2] = (d->mapData.bytesPerLine[0] * height() / 2);
- d->mapData.data[1] = d->mapData.data[0] + d->mapData.size[0];
- d->mapData.data[2] = d->mapData.data[1] + d->mapData.size[1];
+ d->mapData.dataSize[0] = (d->mapData.bytesPerLine[0] * height());
+ d->mapData.dataSize[1] = (d->mapData.bytesPerLine[0] * height() / 2);
+ d->mapData.dataSize[2] = (d->mapData.bytesPerLine[0] * height() / 2);
+ d->mapData.data[1] = d->mapData.data[0] + d->mapData.dataSize[0];
+ d->mapData.data[2] = d->mapData.data[1] + d->mapData.dataSize[1];
break;
}
}
@@ -475,7 +539,7 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
// unlock mapMutex to avoid potential deadlock imageMutex <--> mapMutex
lock.unlock();
- if ((mode & QVideoFrame::WriteOnly) != 0) {
+ if ((mode & QtVideo::MapMode::WriteOnly) != QtVideo::MapMode::NotMapped) {
QMutexLocker lock(&d->imageMutex);
d->image = {};
}
@@ -483,10 +547,73 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
return true;
}
+#if QT_DEPRECATED_SINCE(6, 8)
+
+/*!
+ \deprecated [6.8] Use \c QtVideo::MapMode instead. The values of this enum
+ are consistent with values of \c QtVideo::MapMode.
+ \enum QVideoFrame::MapMode
+
+ Enumerates how a video buffer's data is mapped to system memory.
+
+ \value NotMapped
+ The video buffer is not mapped to memory.
+ \value ReadOnly
+ The mapped memory is populated with data from the video buffer when mapped,
+ but the content of the mapped memory may be discarded when unmapped.
+ \value WriteOnly
+ The mapped memory is uninitialized when mapped, but the possibly modified
+ content will be used to populate the video buffer when unmapped.
+ \value ReadWrite
+ The mapped memory is populated with data from the video
+ buffer, and the video buffer is repopulated with the content of the mapped
+ memory when it is unmapped.
+
+ \sa mapMode(), map()
+*/
+
+/*!
+ \deprecated [6.8] Use \c QVideoFrame::map(Qt::Video::MapMode) instead.
+ Maps the contents of a video frame to system (CPU addressable) memory.
+
+ In some cases the video frame data might be stored in video memory or otherwise inaccessible
+ memory, so it is necessary to map a frame before accessing the pixel data. This may involve
+ copying the contents around, so avoid mapping and unmapping unless required.
+
+ The map \a mode indicates whether the contents of the mapped memory should be read from and/or
+ written to the frame. If the map mode includes the \c QVideoFrame::ReadOnly flag the
+ mapped memory will be populated with the content of the video frame when initially mapped. If the map
+ mode includes the \c QVideoFrame::WriteOnly flag the content of the possibly modified
+ mapped memory will be written back to the frame when unmapped.
+
+ While mapped the contents of a video frame can be accessed directly through the pointer returned
+ by the bits() function.
+
+ When access to the data is no longer needed, be sure to call the unmap() function to release the
+ mapped memory and possibly update the video frame contents.
+
+ If the video frame has been mapped in read only mode, it is permissible to map it
+ multiple times in read only mode (and unmap it a corresponding number of times). In all
+ other cases it is necessary to unmap the frame first before mapping a second time.
+
+ \note Writing to memory that is mapped as read-only is undefined, and may result in changes
+ to shared data or crashes.
+
+ Returns true if the frame was mapped to memory in the given \a mode and false otherwise.
+
+ \sa unmap(), mapMode(), bits()
+*/
+bool QVideoFrame::map(QVideoFrame::MapMode mode)
+{
+ return map(static_cast<QtVideo::MapMode>(mode));
+}
+
+#endif
+
/*!
Releases the memory mapped by the map() function.
- If the \l {QVideoFrame::MapMode}{MapMode} included the QVideoFrame::WriteOnly
+ If the \l {QtVideo::MapMode}{MapMode} included the QtVideo::MapMode::WriteOnly
flag this will persist the current content of the mapped memory to the video frame.
unmap() should not be called if map() function failed.
@@ -495,7 +622,7 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
*/
void QVideoFrame::unmap()
{
- if (!d || !d->buffer)
+ if (!d || !d->videoBuffer)
return;
QMutexLocker lock(&d->mapMutex);
@@ -509,7 +636,8 @@ void QVideoFrame::unmap()
if (d->mappedCount == 0) {
d->mapData = {};
- d->buffer->unmap();
+ d->mapMode = QtVideo::MapMode::NotMapped;
+ d->videoBuffer->unmap();
}
}
@@ -526,7 +654,7 @@ int QVideoFrame::bytesPerLine(int plane) const
{
if (!d)
return 0;
- return plane >= 0 && plane < d->mapData.nPlanes ? d->mapData.bytesPerLine[plane] : 0;
+ return plane >= 0 && plane < d->mapData.planeCount ? d->mapData.bytesPerLine[plane] : 0;
}
/*!
@@ -545,7 +673,7 @@ uchar *QVideoFrame::bits(int plane)
{
if (!d)
return nullptr;
- return plane >= 0 && plane < d->mapData.nPlanes ? d->mapData.data[plane] : nullptr;
+ return plane >= 0 && plane < d->mapData.planeCount ? d->mapData.data[plane] : nullptr;
}
/*!
@@ -563,7 +691,7 @@ const uchar *QVideoFrame::bits(int plane) const
{
if (!d)
return nullptr;
- return plane >= 0 && plane < d->mapData.nPlanes ? d->mapData.data[plane] : nullptr;
+ return plane >= 0 && plane < d->mapData.planeCount ? d->mapData.data[plane] : nullptr;
}
/*!
@@ -577,7 +705,7 @@ int QVideoFrame::mappedBytes(int plane) const
{
if (!d)
return 0;
- return plane >= 0 && plane < d->mapData.nPlanes ? d->mapData.size[plane] : 0;
+ return plane >= 0 && plane < d->mapData.planeCount ? d->mapData.dataSize[plane] : 0;
}
/*!
@@ -683,7 +811,7 @@ void QVideoFrame::setEndTime(qint64 time)
void QVideoFrame::setRotation(QtVideo::Rotation angle)
{
if (d)
- d->rotation = angle;
+ d->format.setRotation(angle);
}
/*!
@@ -691,16 +819,17 @@ void QVideoFrame::setRotation(QtVideo::Rotation angle)
*/
QtVideo::Rotation QVideoFrame::rotation() const
{
- return QtVideo::Rotation(d ? d->rotation : QtVideo::Rotation::None);
+ return d ? d->format.rotation() : QtVideo::Rotation::None;
}
/*!
- Sets the \a mirrored flag for the frame.
+ Sets the \a mirrored flag for the frame and
+ sets the flag to the underlying \l surfaceFormat.
*/
void QVideoFrame::setMirrored(bool mirrored)
{
if (d)
- d->mirrored = mirrored;
+ d->format.setMirrored(mirrored);
}
/*!
@@ -708,7 +837,24 @@ void QVideoFrame::setMirrored(bool mirrored)
*/
bool QVideoFrame::mirrored() const
{
- return d && d->mirrored;
+ return d && d->format.isMirrored();
+}
+
+/*!
+ Sets the frame \a rate of a video stream in frames per second.
+*/
+void QVideoFrame::setStreamFrameRate(qreal rate)
+{
+ if (d)
+ d->format.setStreamFrameRate(rate);
+}
+
+/*!
+ Returns the frame rate of a video stream in frames per second.
+*/
+qreal QVideoFrame::streamFrameRate() const
+{
+ return d ? d->format.streamFrameRate() : 0.;
}
/*!
@@ -792,7 +938,7 @@ void QVideoFrame::paint(QPainter *painter, const QRectF &rect, const PaintOption
}
}
- if (map(QVideoFrame::ReadOnly)) {
+ if (map(QtVideo::MapMode::ReadOnly)) {
const QTransform oldTransform = painter->transform();
QTransform transform = oldTransform;
transform.translate(targetRect.center().x() - size.width()/2,
diff --git a/src/multimedia/video/qvideoframe.h b/src/multimedia/video/qvideoframe.h
index a306162e8..c026972f8 100644
--- a/src/multimedia/video/qvideoframe.h
+++ b/src/multimedia/video/qvideoframe.h
@@ -35,10 +35,14 @@ public:
enum MapMode
{
- NotMapped = 0x00,
- ReadOnly = 0x01,
- WriteOnly = 0x02,
- ReadWrite = ReadOnly | WriteOnly
+ NotMapped Q_DECL_ENUMERATOR_DEPRECATED_X("Use QtVideo::MapMode::NotMapped instead")
+ = static_cast<int>(QtVideo::MapMode::NotMapped),
+ ReadOnly Q_DECL_ENUMERATOR_DEPRECATED_X("Use QtVideo::MapMode::ReadOnly instead")
+ = static_cast<int>(QtVideo::MapMode::ReadOnly),
+ WriteOnly Q_DECL_ENUMERATOR_DEPRECATED_X("Use QtVideo::MapMode::WriteOnly instead")
+ = static_cast<int>(QtVideo::MapMode::WriteOnly),
+ ReadWrite Q_DECL_ENUMERATOR_DEPRECATED_X("Use QtVideo::MapMode::ReadWrite instead")
+ = static_cast<int>(QtVideo::MapMode::ReadWrite)
};
#if QT_DEPRECATED_SINCE(6, 7)
@@ -54,6 +58,7 @@ public:
QVideoFrame();
QVideoFrame(const QVideoFrameFormat &format);
explicit QVideoFrame(const QImage &image);
+ explicit QVideoFrame(std::unique_ptr<QAbstractVideoBuffer> videoBuffer);
QVideoFrame(const QVideoFrame &other);
~QVideoFrame();
@@ -84,7 +89,11 @@ public:
QVideoFrame::MapMode mapMode() const;
+ bool map(QtVideo::MapMode mode);
+#if QT_DEPRECATED_SINCE(6, 8)
+ QT_DEPRECATED_VERSION_X_6_7("Use QVideoFrame::map(QtVideo::MapMode) instead")
bool map(QVideoFrame::MapMode mode);
+#endif
void unmap();
int bytesPerLine(int plane) const;
@@ -114,6 +123,9 @@ public:
void setMirrored(bool);
bool mirrored() const;
+ void setStreamFrameRate(qreal rate);
+ qreal streamFrameRate() const;
+
QImage toImage() const;
struct PaintOptions {
@@ -131,9 +143,13 @@ public:
void paint(QPainter *painter, const QRectF &rect, const PaintOptions &options);
+#if QT_DEPRECATED_SINCE(6, 8)
+ QT_DEPRECATED_VERSION_X_6_8("The constructor is internal and deprecated")
QVideoFrame(QAbstractVideoBuffer *buffer, const QVideoFrameFormat &format);
+ QT_DEPRECATED_VERSION_X_6_8("The method is internal and deprecated")
QAbstractVideoBuffer *videoBuffer() const;
+#endif
private:
friend class QVideoFramePrivate;
QExplicitlySharedDataPointer<QVideoFramePrivate> d;
diff --git a/src/multimedia/video/qvideoframe_p.h b/src/multimedia/video/qvideoframe_p.h
index 23457e55c..2ca798fbe 100644
--- a/src/multimedia/video/qvideoframe_p.h
+++ b/src/multimedia/video/qvideoframe_p.h
@@ -16,7 +16,7 @@
//
#include "qvideoframe.h"
-#include "qabstractvideobuffer_p.h"
+#include "qhwvideobuffer_p.h"
#include <qmutex.h>
@@ -26,14 +26,44 @@ class QVideoFramePrivate : public QSharedData
{
public:
QVideoFramePrivate() = default;
- QVideoFramePrivate(const QVideoFrameFormat &format) : format(format) { }
- QVideoFramePrivate(QVideoFrameFormat format, std::unique_ptr<QAbstractVideoBuffer> buffer)
- : format{ std::move(format) }, buffer{ std::move(buffer) }
+
+ ~QVideoFramePrivate()
+ {
+ if (videoBuffer && mapMode != QtVideo::MapMode::NotMapped)
+ videoBuffer->unmap();
+ }
+
+ template <typename Buffer>
+ static QVideoFrame createFrame(std::unique_ptr<Buffer> buffer, QVideoFrameFormat format)
{
+ QVideoFrame result;
+ result.d.reset(new QVideoFramePrivate(std::move(format), std::move(buffer)));
+ return result;
+ }
+
+ template <typename Buffer = QAbstractVideoBuffer>
+ QVideoFramePrivate(QVideoFrameFormat format, std::unique_ptr<Buffer> buffer = nullptr)
+ : format{ std::move(format) }, videoBuffer{ std::move(buffer) }
+ {
+ if constexpr (std::is_base_of_v<QHwVideoBuffer, Buffer>)
+ hwVideoBuffer = static_cast<QHwVideoBuffer *>(videoBuffer.get());
+ else if constexpr (std::is_same_v<QAbstractVideoBuffer, Buffer>)
+ hwVideoBuffer = dynamic_cast<QHwVideoBuffer *>(videoBuffer.get());
+ // else hwVideoBuffer == nullptr
}
static QVideoFramePrivate *handle(QVideoFrame &frame) { return frame.d.get(); };
+ static QHwVideoBuffer *hwBuffer(const QVideoFrame &frame)
+ {
+ return frame.d ? frame.d->hwVideoBuffer : nullptr;
+ };
+
+ static QAbstractVideoBuffer *buffer(const QVideoFrame &frame)
+ {
+ return frame.d ? frame.d->videoBuffer.get() : nullptr;
+ };
+
QVideoFrame adoptThisByVideoFrame()
{
QVideoFrame frame;
@@ -44,13 +74,13 @@ public:
qint64 startTime = -1;
qint64 endTime = -1;
QAbstractVideoBuffer::MapData mapData;
+ QtVideo::MapMode mapMode = QtVideo::MapMode::NotMapped;
QVideoFrameFormat format;
- std::unique_ptr<QAbstractVideoBuffer> buffer;
+ std::unique_ptr<QAbstractVideoBuffer> videoBuffer;
+ QHwVideoBuffer *hwVideoBuffer = nullptr;
int mappedCount = 0;
QMutex mapMutex;
QString subtitleText;
- QtVideo::Rotation rotation = QtVideo::Rotation::None;
- bool mirrored = false;
QImage image;
QMutex imageMutex;
diff --git a/src/multimedia/video/qvideoframeconversionhelper.cpp b/src/multimedia/video/qvideoframeconversionhelper.cpp
index 1b570b74f..d3f2b0403 100644
--- a/src/multimedia/video/qvideoframeconversionhelper.cpp
+++ b/src/multimedia/video/qvideoframeconversionhelper.cpp
@@ -34,31 +34,30 @@ static inline void planarYUV420_to_ARGB32(const uchar *y, int yStride,
int width, int height)
{
height &= ~1;
- quint32 *rgb0 = rgb;
- quint32 *rgb1 = rgb + width;
- for (int j = 0; j < height; j += 2) {
+ for (int j = 0; j + 1 < height; j += 2) {
const uchar *lineY0 = y;
const uchar *lineY1 = y + yStride;
const uchar *lineU = u;
const uchar *lineV = v;
- for (int i = 0; i < width; i += 2) {
+ quint32 *rgb0 = rgb;
+ quint32 *rgb1 = rgb + width;
+ for (int i = 0; i + 1 < width; i += 2) {
EXPAND_UV(*lineU, *lineV);
lineU += uvPixelStride;
lineV += uvPixelStride;
- *rgb0++ = qYUVToARGB32(*lineY0++, rv, guv, bu);
- *rgb0++ = qYUVToARGB32(*lineY0++, rv, guv, bu);
- *rgb1++ = qYUVToARGB32(*lineY1++, rv, guv, bu);
- *rgb1++ = qYUVToARGB32(*lineY1++, rv, guv, bu);
+ rgb0[i] = qYUVToARGB32(*lineY0++, rv, guv, bu);
+ rgb0[i + 1] = qYUVToARGB32(*lineY0++, rv, guv, bu);
+ rgb1[i] = qYUVToARGB32(*lineY1++, rv, guv, bu);
+ rgb1[i + 1] = qYUVToARGB32(*lineY1++, rv, guv, bu);
}
y += yStride << 1; // stride * 2
u += uStride;
v += vStride;
- rgb0 += width;
- rgb1 += width;
+ rgb += width << 1; // width * 2
}
}
@@ -69,31 +68,27 @@ static inline void planarYUV422_to_ARGB32(const uchar *y, int yStride,
quint32 *rgb,
int width, int height)
{
- quint32 *rgb0 = rgb;
-
for (int j = 0; j < height; ++j) {
const uchar *lineY0 = y;
const uchar *lineU = u;
const uchar *lineV = v;
- for (int i = 0; i < width; i += 2) {
+ for (int i = 0; i + 1 < width; i += 2) {
EXPAND_UV(*lineU, *lineV);
lineU += uvPixelStride;
lineV += uvPixelStride;
- *rgb0++ = qYUVToARGB32(*lineY0++, rv, guv, bu);
- *rgb0++ = qYUVToARGB32(*lineY0++, rv, guv, bu);
+ rgb[i] = qYUVToARGB32(*lineY0++, rv, guv, bu);
+ rgb[i+1] = qYUVToARGB32(*lineY0++, rv, guv, bu);
}
y += yStride; // stride * 2
u += uStride;
v += vStride;
- rgb0 += width;
+ rgb += width;
}
}
-
-
static void QT_FASTCALL qt_convert_YUV420P_to_ARGB32(const QVideoFrame &frame, uchar *output)
{
FETCH_INFO_TRIPLANAR(frame)
@@ -187,8 +182,7 @@ static void QT_FASTCALL qt_convert_UYVY_to_ARGB32(const QVideoFrame &frame, ucha
for (int i = 0; i < height; ++i) {
const uchar *lineSrc = src;
-
- for (int j = 0; j < width; j += 2) {
+ for (int j = 0; j + 1 < width; j += 2) {
int u = *lineSrc++;
int y0 = *lineSrc++;
int v = *lineSrc++;
@@ -196,11 +190,12 @@ static void QT_FASTCALL qt_convert_UYVY_to_ARGB32(const QVideoFrame &frame, ucha
EXPAND_UV(u, v);
- *rgb++ = qYUVToARGB32(y0, rv, guv, bu);
- *rgb++ = qYUVToARGB32(y1, rv, guv, bu);
+ rgb[j] = qYUVToARGB32(y0, rv, guv, bu);
+ rgb[j+1] = qYUVToARGB32(y1, rv, guv, bu);
}
src += stride;
+ rgb += width;
}
}
@@ -213,8 +208,7 @@ static void QT_FASTCALL qt_convert_YUYV_to_ARGB32(const QVideoFrame &frame, ucha
for (int i = 0; i < height; ++i) {
const uchar *lineSrc = src;
-
- for (int j = 0; j < width; j += 2) {
+ for (int j = 0; j + 1 < width; j += 2) {
int y0 = *lineSrc++;
int u = *lineSrc++;
int y1 = *lineSrc++;
@@ -222,11 +216,12 @@ static void QT_FASTCALL qt_convert_YUYV_to_ARGB32(const QVideoFrame &frame, ucha
EXPAND_UV(u, v);
- *rgb++ = qYUVToARGB32(y0, rv, guv, bu);
- *rgb++ = qYUVToARGB32(y1, rv, guv, bu);
+ rgb[j] = qYUVToARGB32(y0, rv, guv, bu);
+ rgb[j+1] = qYUVToARGB32(y1, rv, guv, bu);
}
src += stride;
+ rgb += width;
}
}
@@ -376,23 +371,24 @@ static void QT_FASTCALL qt_convert_premultiplied_to_ARGB32(const QVideoFrame &fr
}
static inline void planarYUV420_16bit_to_ARGB32(const uchar *y, int yStride,
- const uchar *u, int uStride,
- const uchar *v, int vStride,
- int uvPixelStride,
- quint32 *rgb,
- int width, int height)
+ const uchar *u, int uStride,
+ const uchar *v, int vStride,
+ int uvPixelStride,
+ quint32 *rgb,
+ int width, int height)
{
height &= ~1;
- quint32 *rgb0 = rgb;
- quint32 *rgb1 = rgb + width;
- for (int j = 0; j < height; j += 2) {
+ for (int j = 0; j + 1 < height; j += 2) {
const uchar *lineY0 = y;
const uchar *lineY1 = y + yStride;
const uchar *lineU = u;
const uchar *lineV = v;
- for (int i = 0; i < width; i += 2) {
+ quint32 *rgb0 = rgb;
+ quint32 *rgb1 = rgb + width;
+
+ for (int i = 0; i + 1 < width; i += 2) {
EXPAND_UV(*lineU, *lineV);
lineU += uvPixelStride;
lineV += uvPixelStride;
@@ -410,11 +406,11 @@ static inline void planarYUV420_16bit_to_ARGB32(const uchar *y, int yStride,
y += yStride << 1; // stride * 2
u += uStride;
v += vStride;
- rgb0 += width;
- rgb1 += width;
+ rgb += width * 2;
}
}
+
static void QT_FASTCALL qt_convert_P016_to_ARGB32(const QVideoFrame &frame, uchar *output)
{
FETCH_INFO_BIPLANAR(frame)
diff --git a/src/multimedia/video/qvideoframeconverter.cpp b/src/multimedia/video/qvideoframeconverter.cpp
index 82e0a0af5..d406aa342 100644
--- a/src/multimedia/video/qvideoframeconverter.cpp
+++ b/src/multimedia/video/qvideoframeconverter.cpp
@@ -6,6 +6,7 @@
#include "qvideoframeformat.h"
#include "qvideoframe_p.h"
#include "qmultimediautils_p.h"
+#include "qabstractvideobuffer.h"
#include <QtCore/qcoreapplication.h>
#include <QtCore/qsize.h>
@@ -16,7 +17,6 @@
#include <QtGui/qoffscreensurface.h>
#include <qpa/qplatformintegration.h>
#include <private/qvideotexturehelper_p.h>
-#include <private/qabstractvideobuffer_p.h>
#include <private/qguiapplication_p.h>
#include <rhi/qrhi.h>
@@ -254,7 +254,7 @@ static bool updateTextures(QRhi *rhi,
static QImage convertJPEG(const QVideoFrame &frame, QtVideo::Rotation rotation, bool mirrorX, bool mirrorY)
{
QVideoFrame varFrame = frame;
- if (!varFrame.map(QVideoFrame::ReadOnly)) {
+ if (!varFrame.map(QtVideo::MapMode::ReadOnly)) {
qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
return {};
}
@@ -273,7 +273,7 @@ static QImage convertCPU(const QVideoFrame &frame, QtVideo::Rotation rotation, b
return {};
} else {
QVideoFrame varFrame = frame;
- if (!varFrame.map(QVideoFrame::ReadOnly)) {
+ if (!varFrame.map(QtVideo::MapMode::ReadOnly)) {
qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
return {};
}
@@ -286,7 +286,8 @@ static QImage convertCPU(const QVideoFrame &frame, QtVideo::Rotation rotation, b
}
}
-QImage qImageFromVideoFrame(const QVideoFrame &frame, QtVideo::Rotation rotation, bool mirrorX, bool mirrorY)
+QImage qImageFromVideoFrame(const QVideoFrame &frame, QtVideo::Rotation rotation, bool mirrorX,
+ bool mirrorY, bool forceCpu)
{
#ifdef Q_OS_DARWIN
QMacAutoReleasePool releasePool;
@@ -310,10 +311,13 @@ QImage qImageFromVideoFrame(const QVideoFrame &frame, QtVideo::Rotation rotation
if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg)
return convertJPEG(frame, rotation, mirrorX, mirrorY);
+ if (forceCpu) // For test purposes
+ return convertCPU(frame, rotation, mirrorX, mirrorY);
+
QRhi *rhi = nullptr;
- if (frame.videoBuffer())
- rhi = frame.videoBuffer()->rhi();
+ if (QHwVideoBuffer *buffer = QVideoFramePrivate::hwBuffer(frame))
+ rhi = buffer->rhi();
if (!rhi || rhi->thread() != QThread::currentThread())
rhi = initializeRHI(rhi);
@@ -428,7 +432,7 @@ QImage videoFramePlaneAsImage(QVideoFrame &frame, int plane, QImage::Format targ
if (plane >= frame.planeCount())
return {};
- if (!frame.map(QVideoFrame::ReadOnly)) {
+ if (!frame.map(QtVideo::MapMode::ReadOnly)) {
qWarning() << "Cannot map a video frame in ReadOnly mode!";
return {};
}
diff --git a/src/multimedia/video/qvideoframeconverter_p.h b/src/multimedia/video/qvideoframeconverter_p.h
index d22491f66..ad6cea9e4 100644
--- a/src/multimedia/video/qvideoframeconverter_p.h
+++ b/src/multimedia/video/qvideoframeconverter_p.h
@@ -19,7 +19,9 @@
QT_BEGIN_NAMESPACE
-Q_MULTIMEDIA_EXPORT QImage qImageFromVideoFrame(const QVideoFrame &frame, QtVideo::Rotation rotation = QtVideo::Rotation::None, bool mirrorX = false, bool mirrorY = false);
+Q_MULTIMEDIA_EXPORT QImage
+qImageFromVideoFrame(const QVideoFrame &frame, QtVideo::Rotation rotation = QtVideo::Rotation::None,
+ bool mirrorX = false, bool mirrorY = false, bool forceCpu = false);
/**
* @brief Maps the video frame and returns an image having a shared ownership for the video frame
diff --git a/src/multimedia/video/qvideoframeformat.cpp b/src/multimedia/video/qvideoframeformat.cpp
index b2c9dc5f1..b3177234f 100644
--- a/src/multimedia/video/qvideoframeformat.cpp
+++ b/src/multimedia/video/qvideoframeformat.cpp
@@ -39,7 +39,8 @@ public:
&& viewport == other.viewport
&& frameRatesEqual(frameRate, other.frameRate)
&& colorSpace == other.colorSpace
- && mirrored == other.mirrored)
+ && mirrored == other.mirrored
+ && rotation == other.rotation)
return true;
return false;
@@ -60,6 +61,7 @@ public:
float frameRate = 0.0;
float maxLuminance = -1.;
bool mirrored = false;
+ QtVideo::Rotation rotation = QtVideo::Rotation::None;
};
QT_DEFINE_QESDP_SPECIALIZATION_DTOR(QVideoFrameFormatPrivate);
@@ -538,12 +540,13 @@ void QVideoFrameFormat::setScanLineDirection(Direction direction)
d->scanLineDirection = direction;
}
+#if QT_DEPRECATED_SINCE(6, 8)
/*!
Returns the frame rate of a video stream in frames per second.
*/
qreal QVideoFrameFormat::frameRate() const
{
- return d->frameRate;
+ return streamFrameRate();
}
/*!
@@ -551,6 +554,23 @@ qreal QVideoFrameFormat::frameRate() const
*/
void QVideoFrameFormat::setFrameRate(qreal rate)
{
+ setStreamFrameRate(rate);
+}
+#endif
+
+/*!
+ Returns the frame rate of a video stream in frames per second.
+*/
+qreal QVideoFrameFormat::streamFrameRate() const
+{
+ return d->frameRate;
+}
+
+/*!
+ Sets the frame \a rate of a video stream in frames per second.
+*/
+void QVideoFrameFormat::setStreamFrameRate(qreal rate)
+{
detach();
d->frameRate = rate;
}
@@ -665,6 +685,23 @@ void QVideoFrameFormat::setMirrored(bool mirrored)
}
/*!
+ Returns the rotation angle the matching video frame should be rotated clockwise before displaying.
+ */
+QtVideo::Rotation QVideoFrameFormat::rotation() const
+{
+ return d->rotation;
+}
+
+/*!
+ Sets the \a rotation angle the matching video frame should be rotated clockwise before displaying.
+ */
+void QVideoFrameFormat::setRotation(QtVideo::Rotation rotation)
+{
+ detach();
+ d->rotation = rotation;
+}
+
+/*!
\internal
*/
QString QVideoFrameFormat::vertexShaderFileName() const
@@ -984,7 +1021,7 @@ QDebug operator<<(QDebug dbg, const QVideoFrameFormat &f)
<< "\n frame size=" << f.frameSize()
<< "\n viewport=" << f.viewport()
<< "\n colorSpace=" << f.colorSpace()
- << "\n frameRate=" << f.frameRate()
+ << "\n frameRate=" << f.streamFrameRate()
<< "\n mirrored=" << f.isMirrored();
return dbg;
diff --git a/src/multimedia/video/qvideoframeformat.h b/src/multimedia/video/qvideoframeformat.h
index 5fb6b3701..18dc9952d 100644
--- a/src/multimedia/video/qvideoframeformat.h
+++ b/src/multimedia/video/qvideoframeformat.h
@@ -5,6 +5,7 @@
#define QVIDEOSURFACEFORMAT_H
#include <QtMultimedia/qtmultimediaglobal.h>
+#include <QtMultimedia/qtvideo.h>
#include <QtCore/qlist.h>
#include <QtCore/qmetatype.h>
@@ -153,8 +154,15 @@ public:
Direction scanLineDirection() const;
void setScanLineDirection(Direction direction);
+#if QT_DEPRECATED_SINCE(6, 8)
+ QT_DEPRECATED_VERSION_X_6_8("Use streamFrameRate()")
qreal frameRate() const;
+ QT_DEPRECATED_VERSION_X_6_8("Use setStreamFrameRate()")
void setFrameRate(qreal rate);
+#endif
+
+ qreal streamFrameRate() const;
+ void setStreamFrameRate(qreal rate);
#if QT_DEPRECATED_SINCE(6, 4)
QT_DEPRECATED_VERSION_X_6_4("Use colorSpace()")
@@ -175,6 +183,9 @@ public:
bool isMirrored() const;
void setMirrored(bool mirrored);
+ QtVideo::Rotation rotation() const;
+ void setRotation(QtVideo::Rotation rotation);
+
QString vertexShaderFileName() const;
QString fragmentShaderFileName() const;
void updateUniformData(QByteArray *dst, const QVideoFrame &frame, const QMatrix4x4 &transform, float opacity) const;
diff --git a/src/multimedia/video/qvideooutputorientationhandler.cpp b/src/multimedia/video/qvideooutputorientationhandler.cpp
index c34e9e92a..ff91bd7fb 100644
--- a/src/multimedia/video/qvideooutputorientationhandler.cpp
+++ b/src/multimedia/video/qvideooutputorientationhandler.cpp
@@ -18,8 +18,8 @@ QVideoOutputOrientationHandler::QVideoOutputOrientationHandler(QObject *parent)
if (!screen)
return;
- connect(screen, SIGNAL(orientationChanged(Qt::ScreenOrientation)),
- this, SLOT(screenOrientationChanged(Qt::ScreenOrientation)));
+ connect(screen, &QScreen::orientationChanged, this,
+ &QVideoOutputOrientationHandler::screenOrientationChanged);
screenOrientationChanged(screen->orientation());
}
diff --git a/src/multimedia/video/qvideotexturehelper.cpp b/src/multimedia/video/qvideotexturehelper.cpp
index 937ff33cb..093989654 100644
--- a/src/multimedia/video/qvideotexturehelper.cpp
+++ b/src/multimedia/video/qvideotexturehelper.cpp
@@ -1,9 +1,11 @@
// Copyright (C) 2021 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qabstractvideobuffer.h"
+
#include "qvideotexturehelper_p.h"
-#include "qabstractvideobuffer_p.h"
#include "qvideoframeconverter_p.h"
+#include "qvideoframe_p.h"
#include <qpainter.h>
#include <qloggingcategory.h>
@@ -213,7 +215,7 @@ static const TextureDescription descriptions[QVideoFrameFormat::NPixelFormats] =
{ { 1, 1 }, { 1, 1 }, { 1, 1 } }
},
// Format_YUV420P10
- { 3, 1,
+ { 3, 2,
[](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
{ QRhiTexture::R16, QRhiTexture::R16, QRhiTexture::R16 },
{ { 1, 1 }, { 2, 2 }, { 2, 2 } }
@@ -520,7 +522,8 @@ void updateUniformData(QByteArray *dst, const QVideoFrameFormat &format, const Q
break;
case QVideoFrameFormat::Format_SamplerExternalOES:
// get Android specific transform for the externalsampler texture
- cmat = frame.videoBuffer()->externalTextureMatrix();
+ if (auto hwBuffer = QVideoFramePrivate::hwBuffer(frame))
+ cmat = hwBuffer->externalTextureMatrix();
break;
case QVideoFrameFormat::Format_SamplerRect:
{
@@ -629,6 +632,9 @@ static UpdateTextureWithMapResult updateTextureWithMap(const QVideoFrame &frame,
static std::unique_ptr<QRhiTexture> createTextureFromHandle(const QVideoFrame &frame, QRhi *rhi, int plane)
{
+ QHwVideoBuffer *hwBuffer = QVideoFramePrivate::hwBuffer(frame);
+ Q_ASSERT(hwBuffer);
+
QVideoFrameFormat fmt = frame.surfaceFormat();
QVideoFrameFormat::PixelFormat pixelFormat = fmt.pixelFormat();
QSize size = fmt.frameSize();
@@ -650,7 +656,7 @@ static std::unique_ptr<QRhiTexture> createTextureFromHandle(const QVideoFrame &f
#endif
}
- if (quint64 handle = frame.videoBuffer()->textureHandle(rhi, plane); handle) {
+ if (quint64 handle = hwBuffer->textureHandle(rhi, plane); handle) {
std::unique_ptr<QRhiTexture> tex(rhi->newTexture(texDesc.textureFormat[plane], planeSize, 1, textureFlags));
if (tex->createFrom({handle, 0}))
return tex;
@@ -712,7 +718,7 @@ static std::unique_ptr<QVideoFrameTextures> createTexturesFromMemory(QVideoFrame
if (oldArray)
textures = oldArray->takeTextures();
- if (!frame.map(QVideoFrame::ReadOnly)) {
+ if (!frame.map(QtVideo::MapMode::ReadOnly)) {
qWarning() << "Cannot map a video frame in ReadOnly mode!";
return {};
}
@@ -736,15 +742,16 @@ static std::unique_ptr<QVideoFrameTextures> createTexturesFromMemory(QVideoFrame
std::unique_ptr<QVideoFrameTextures> createTextures(QVideoFrame &frame, QRhi *rhi, QRhiResourceUpdateBatch *rub, std::unique_ptr<QVideoFrameTextures> &&oldTextures)
{
- QAbstractVideoBuffer *vf = frame.videoBuffer();
- if (!vf)
+ if (!frame.isValid())
return {};
- if (auto vft = vf->mapTextures(rhi))
- return vft;
+ if (QHwVideoBuffer *hwBuffer = QVideoFramePrivate::hwBuffer(frame)) {
+ if (auto textures = hwBuffer->mapTextures(rhi))
+ return textures;
- if (auto vft = createTexturesFromHandles(frame, rhi))
- return vft;
+ if (auto textures = createTexturesFromHandles(frame, rhi))
+ return textures;
+ }
return createTexturesFromMemory(frame, rhi, rub, oldTextures.get());
}
diff --git a/src/multimedia/video/qvideowindow.cpp b/src/multimedia/video/qvideowindow.cpp
index 9cab23f5f..9b88a86df 100644
--- a/src/multimedia/video/qvideowindow.cpp
+++ b/src/multimedia/video/qvideowindow.cpp
@@ -7,7 +7,9 @@
#include <qpainter.h>
#include <private/qguiapplication_p.h>
#include <private/qmemoryvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
#include <private/qmultimediautils_p.h>
+#include <private/qvideoframe_p.h>
#include <qpa/qplatformintegration.h>
QT_BEGIN_NAMESPACE
@@ -209,8 +211,9 @@ void QVideoWindowPrivate::updateTextures(QRhiResourceUpdateBatch *rub)
// We render a 1x1 black pixel when we don't have a video
if (!m_currentFrame.isValid())
- m_currentFrame = QVideoFrame(new QMemoryVideoBuffer(QByteArray{4, 0}, 4),
- QVideoFrameFormat(QSize(1,1), QVideoFrameFormat::Format_RGBA8888));
+ m_currentFrame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(QByteArray{ 4, 0 }, 4),
+ QVideoFrameFormat(QSize(1, 1), QVideoFrameFormat::Format_RGBA8888));
m_frameTextures = QVideoTextureHelper::createTextures(m_currentFrame, m_rhi.get(), rub, std::move(m_frameTextures));
if (!m_frameTextures)