summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/3rdparty/pffft/pffft.c46
-rw-r--r--src/3rdparty/pffft/qt_attribution.json4
-rw-r--r--src/CMakeLists.txt2
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java79
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java24
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java26
-rw-r--r--src/multimedia/CMakeLists.txt29
-rw-r--r--src/multimedia/alsa/qalsaaudiodevice.cpp52
-rw-r--r--src/multimedia/alsa/qalsaaudiodevice_p.h6
-rw-r--r--src/multimedia/alsa/qalsaaudiosink.cpp5
-rw-r--r--src/multimedia/alsa/qalsaaudiosink_p.h1
-rw-r--r--src/multimedia/alsa/qalsaaudiosource.cpp20
-rw-r--r--src/multimedia/alsa/qalsamediadevices.cpp98
-rw-r--r--src/multimedia/android/qandroidmediadevices.cpp8
-rw-r--r--src/multimedia/audio/qaudiobufferinput.cpp162
-rw-r--r--src/multimedia/audio/qaudiobufferinput.h44
-rw-r--r--src/multimedia/audio/qaudiobufferoutput.cpp78
-rw-r--r--src/multimedia/audio/qaudiobufferoutput.h37
-rw-r--r--src/multimedia/audio/qaudiobufferoutput_p.h42
-rw-r--r--src/multimedia/audio/qsamplecache_p.cpp11
-rw-r--r--src/multimedia/audio/qwavedecoder.cpp12
-rw-r--r--src/multimedia/camera/qcamera.cpp25
-rw-r--r--src/multimedia/camera/qcamera.h3
-rw-r--r--src/multimedia/camera/qcamera_p.h6
-rw-r--r--src/multimedia/camera/qcameradevice.cpp10
-rw-r--r--src/multimedia/camera/qimagecapture.cpp21
-rw-r--r--src/multimedia/configure.cmake6
-rw-r--r--src/multimedia/doc/qtmultimedia.qdocconf1
-rw-r--r--src/multimedia/doc/src/qtmultimedia-building-from-source.qdoc94
-rw-r--r--src/multimedia/doc/src/qtmultimedia-index.qdoc29
-rw-r--r--src/multimedia/platform/qgstreamer_platformspecificinterface.cpp27
-rw-r--r--src/multimedia/platform/qgstreamer_platformspecificinterface_p.h46
-rw-r--r--src/multimedia/platform/qplatformaudiobufferinput.cpp10
-rw-r--r--src/multimedia/platform/qplatformaudiobufferinput_p.h56
-rw-r--r--src/multimedia/platform/qplatformcamera.cpp9
-rw-r--r--src/multimedia/platform/qplatformcamera_p.h10
-rw-r--r--src/multimedia/platform/qplatformmediacapture.cpp16
-rw-r--r--src/multimedia/platform/qplatformmediacapture_p.h8
-rw-r--r--src/multimedia/platform/qplatformmediaintegration.cpp47
-rw-r--r--src/multimedia/platform/qplatformmediaintegration_p.h12
-rw-r--r--src/multimedia/platform/qplatformmediaplayer.cpp16
-rw-r--r--src/multimedia/platform/qplatformmediaplayer_p.h29
-rw-r--r--src/multimedia/platform/qplatformmediarecorder.cpp6
-rw-r--r--src/multimedia/platform/qplatformmediarecorder_p.h4
-rw-r--r--src/multimedia/platform/qplatformsurfacecapture_p.h3
-rw-r--r--src/multimedia/platform/qplatformvideoframeinput.cpp10
-rw-r--r--src/multimedia/platform/qplatformvideoframeinput_p.h55
-rw-r--r--src/multimedia/platform/qplatformvideosource_p.h5
-rw-r--r--src/multimedia/playback/qmediaplayer.cpp52
-rw-r--r--src/multimedia/playback/qmediaplayer.h5
-rw-r--r--src/multimedia/playback/qmediaplayer_p.h7
-rw-r--r--src/multimedia/pulseaudio/qaudioengine_pulse.cpp2
-rw-r--r--src/multimedia/qmediaframeinput.cpp43
-rw-r--r--src/multimedia/qmediaframeinput_p.h74
-rw-r--r--src/multimedia/qmediainputencoderinterface_p.h31
-rw-r--r--src/multimedia/qmediametadata.cpp26
-rw-r--r--src/multimedia/qmediametadata.h6
-rw-r--r--src/multimedia/qsymbolsresolveutils.cpp79
-rw-r--r--src/multimedia/qsymbolsresolveutils_p.h178
-rw-r--r--src/multimedia/recording/qmediacapturesession.cpp292
-rw-r--r--src/multimedia/recording/qmediacapturesession.h20
-rw-r--r--src/multimedia/recording/qmediacapturesession_p.h53
-rw-r--r--src/multimedia/recording/qmediarecorder.cpp38
-rw-r--r--src/multimedia/recording/qmediarecorder.h5
-rw-r--r--src/multimedia/recording/qmediarecorder_p.h1
-rw-r--r--src/multimedia/recording/qscreencapture-limitations.qdocinc15
-rw-r--r--src/multimedia/recording/qvideoframeinput.cpp156
-rw-r--r--src/multimedia/recording/qvideoframeinput.h44
-rw-r--r--src/multimedia/video/qabstractvideobuffer.cpp213
-rw-r--r--src/multimedia/video/qabstractvideobuffer.h32
-rw-r--r--src/multimedia/video/qabstractvideobuffer_p.h82
-rw-r--r--src/multimedia/video/qhwvideobuffer.cpp17
-rw-r--r--src/multimedia/video/qhwvideobuffer_p.h58
-rw-r--r--src/multimedia/video/qimagevideobuffer.cpp27
-rw-r--r--src/multimedia/video/qimagevideobuffer_p.h10
-rw-r--r--src/multimedia/video/qmemoryvideobuffer.cpp32
-rw-r--r--src/multimedia/video/qmemoryvideobuffer_p.h13
-rw-r--r--src/multimedia/video/qtvideo.cpp41
-rw-r--r--src/multimedia/video/qtvideo.h34
-rw-r--r--src/multimedia/video/qvideoframe.cpp293
-rw-r--r--src/multimedia/video/qvideoframe.h24
-rw-r--r--src/multimedia/video/qvideoframe_p.h38
-rw-r--r--src/multimedia/video/qvideoframeconversionhelper.cpp70
-rw-r--r--src/multimedia/video/qvideoframeconverter.cpp18
-rw-r--r--src/multimedia/video/qvideoframeconverter_p.h4
-rw-r--r--src/multimedia/video/qvideoframeformat.cpp43
-rw-r--r--src/multimedia/video/qvideoframeformat.h11
-rw-r--r--src/multimedia/video/qvideooutputorientationhandler.cpp4
-rw-r--r--src/multimedia/video/qvideotexturehelper.cpp29
-rw-r--r--src/multimedia/video/qvideowindow.cpp7
-rw-r--r--src/multimediaquick/qquickimagecapture.cpp2
-rw-r--r--src/multimediaquick/qsgvideonode_p.cpp2
-rw-r--r--src/plugins/multimedia/CMakeLists.txt28
-rw-r--r--src/plugins/multimedia/android/common/qandroidvideooutput.cpp52
-rw-r--r--src/plugins/multimedia/android/common/qandroidvideooutput_p.h2
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp5
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp18
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h4
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp20
-rw-r--r--src/plugins/multimedia/darwin/avfvideobuffer.mm29
-rw-r--r--src/plugins/multimedia/darwin/avfvideobuffer_p.h10
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm10
-rw-r--r--src/plugins/multimedia/darwin/camera/avfimagecapture.mm7
-rw-r--r--src/plugins/multimedia/darwin/camera/avfmediaencoder.mm17
-rw-r--r--src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm40
-rw-r--r--src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm8
-rw-r--r--src/plugins/multimedia/ffmpeg/CMakeLists.txt99
-rw-r--r--src/plugins/multimedia/ffmpeg/cmake/QtAddFFmpegStubs.cmake199
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer.cpp128
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer_p.h22
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec.cpp7
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec_p.h1
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder.cpp25
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer.cpp5
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidcamera.cpp60
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp34
-rw-r--r--src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate.mm44
-rw-r--r--src/plugins/multimedia/ffmpeg/qcgwindowcapture.mm27
-rw-r--r--src/plugins/multimedia/ffmpeg/qeglfsscreencapture.cpp8
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeg.cpp82
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeg_p.h33
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h7
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegconverter.cpp272
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegconverter_p.h (renamed from src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolve_p.h)27
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegdefs_p.h2
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp21
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h3
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp14
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp36
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h18
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp10
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h3
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp28
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h3
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp62
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h5
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegopensslsymbols.cpp185
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegplaybackengine.cpp23
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegplaybackengine_p.h7
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp7
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h4
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi.cpp34
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolveutils.cpp103
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolveutils_p.h142
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegthread.cpp19
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegthread_p.h15
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp47
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h11
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp3
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp.cpp35
-rw-r--r--src/plugins/multimedia/ffmpeg/qgdiwindowcapture.cpp6
-rw-r--r--src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture.cpp5
-rw-r--r--src/plugins/multimedia/ffmpeg/qopenglvideobuffer.cpp9
-rw-r--r--src/plugins/multimedia/ffmpeg/qopenglvideobuffer_p.h7
-rw-r--r--src/plugins/multimedia/ffmpeg/qv4l2camera.cpp11
-rw-r--r--src/plugins/multimedia/ffmpeg/qwindowscamera.cpp8
-rw-r--r--src/plugins/multimedia/ffmpeg/qx11surfacecapture.cpp8
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder.cpp236
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder_p.h30
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread.cpp19
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread_p.h47
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer.cpp165
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer_p.h77
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer.cpp12
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer_p.h1
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine.cpp272
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine_p.h53
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils.cpp63
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils_p.h81
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder.cpp182
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder_p.h27
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils.cpp5
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder.cpp74
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder_p.h9
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/openssl3.ver7
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-crypto.cpp6
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-ssl.cpp300
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-drm.cpp14
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-x11.cpp14
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va.cpp (renamed from src/plugins/multimedia/ffmpeg/qffmpegvaapisymbols.cpp)131
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/va.ver7
-rw-r--r--src/plugins/multimedia/gstreamer/CMakeLists.txt7
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder.cpp387
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder_p.h22
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice.cpp25
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice_p.h18
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiosink.cpp381
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiosink_p.h124
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiosource.cpp369
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiosource_p.h120
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgst.cpp261
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgst_debug.cpp272
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgst_debug_p.h17
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgst_handle_types_p.h21
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgst_p.h119
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstappsource.cpp2
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstpipeline.cpp165
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstpipeline_p.h23
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreameraudioinput.cpp94
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreameraudioinput_p.h4
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput.cpp117
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput_p.h5
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe.cpp14
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer.cpp567
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer_p.h60
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamermessage_p.h2
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamermetadata.cpp611
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamermetadata_p.h17
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamervideooutput.cpp50
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamervideooutput_p.h6
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamervideosink.cpp79
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamervideosink_p.h18
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstutils.cpp4
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstutils_p.h2
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp84
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstvideobuffer_p.h17
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp349
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstvideorenderersink_p.h76
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera.cpp129
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera_p.h35
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture.cpp379
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture_p.h33
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture.cpp126
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture_p.h8
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder.cpp63
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder_p.h4
-rw-r--r--src/plugins/multimedia/gstreamer/qgstreamerformatinfo.cpp133
-rw-r--r--src/plugins/multimedia/gstreamer/qgstreamerformatinfo_p.h8
-rw-r--r--src/plugins/multimedia/gstreamer/qgstreamerintegration.cpp165
-rw-r--r--src/plugins/multimedia/gstreamer/qgstreamerintegration_p.h24
-rw-r--r--src/plugins/multimedia/gstreamer/qgstreamervideodevices.cpp10
-rw-r--r--src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer.cpp39
-rw-r--r--src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer_p.h7
-rw-r--r--src/plugins/multimedia/qnx/camera/qqnxplatformcamera.cpp13
-rw-r--r--src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer.cpp45
-rw-r--r--src/plugins/multimedia/wasm/common/qwasmvideooutput.cpp22
-rw-r--r--src/plugins/multimedia/wasm/common/qwasmvideooutput_p.h1
-rw-r--r--src/plugins/multimedia/wasm/mediacapture/qwasmcamera.cpp5
-rw-r--r--src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder.cpp13
-rw-r--r--src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp50
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp7
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp18
-rw-r--r--src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp5
-rw-r--r--src/resonance-audio/CMakeLists.txt2
244 files changed, 8617 insertions, 4873 deletions
diff --git a/src/3rdparty/pffft/pffft.c b/src/3rdparty/pffft/pffft.c
index 7fe8c4c15..9271a9ad9 100644
--- a/src/3rdparty/pffft/pffft.c
+++ b/src/3rdparty/pffft/pffft.c
@@ -1233,27 +1233,19 @@ PFFFT_Setup *pffft_new_setup(int N, pffft_transform_t transform) {
s->e = (float*)s->data;
s->twiddle = (float*)(s->data + (2*s->Ncvec*(SIMD_SZ-1))/SIMD_SZ);
- if (transform == PFFFT_REAL) {
- for (k=0; k < s->Ncvec; ++k) {
- int i = k/SIMD_SZ;
- int j = k%SIMD_SZ;
- for (m=0; m < SIMD_SZ-1; ++m) {
- float A = -2*M_PI*(m+1)*k / N;
- s->e[(2*(i*3 + m) + 0) * SIMD_SZ + j] = cos(A);
- s->e[(2*(i*3 + m) + 1) * SIMD_SZ + j] = sin(A);
- }
+ for (k=0; k < s->Ncvec; ++k) {
+ int i = k/SIMD_SZ;
+ int j = k%SIMD_SZ;
+ for (m=0; m < SIMD_SZ-1; ++m) {
+ float A = -2*M_PI*(m+1)*k / N;
+ s->e[(2*(i*3 + m) + 0) * SIMD_SZ + j] = cos(A);
+ s->e[(2*(i*3 + m) + 1) * SIMD_SZ + j] = sin(A);
}
+ }
+
+ if (transform == PFFFT_REAL) {
rffti1_ps(N/SIMD_SZ, s->twiddle, s->ifac);
} else {
- for (k=0; k < s->Ncvec; ++k) {
- int i = k/SIMD_SZ;
- int j = k%SIMD_SZ;
- for (m=0; m < SIMD_SZ-1; ++m) {
- float A = -2*M_PI*(m+1)*k / N;
- s->e[(2*(i*3 + m) + 0)*SIMD_SZ + j] = cos(A);
- s->e[(2*(i*3 + m) + 1)*SIMD_SZ + j] = sin(A);
- }
- }
cffti1_ps(N/SIMD_SZ, s->twiddle, s->ifac);
}
@@ -1708,19 +1700,19 @@ void pffft_zconvolve_accumulate(PFFFT_Setup *s, const float *a, const float *b,
# endif
#endif
- float ar, ai, br, bi, abr, abi;
+ float ar0, ai0, br0, bi0, abr0, abi0;
#ifndef ZCONVOLVE_USING_INLINE_ASM
v4sf vscal = LD_PS1(scaling);
int i;
#endif
assert(VALIGNED(a) && VALIGNED(b) && VALIGNED(ab));
- ar = ((v4sf_union*)va)[0].f[0];
- ai = ((v4sf_union*)va)[1].f[0];
- br = ((v4sf_union*)vb)[0].f[0];
- bi = ((v4sf_union*)vb)[1].f[0];
- abr = ((v4sf_union*)vab)[0].f[0];
- abi = ((v4sf_union*)vab)[1].f[0];
+ ar0 = ((v4sf_union*)va)[0].f[0];
+ ai0 = ((v4sf_union*)va)[1].f[0];
+ br0 = ((v4sf_union*)vb)[0].f[0];
+ bi0 = ((v4sf_union*)vb)[1].f[0];
+ abr0 = ((v4sf_union*)vab)[0].f[0];
+ abi0 = ((v4sf_union*)vab)[1].f[0];
#ifdef ZCONVOLVE_USING_INLINE_ASM // inline asm version, unfortunately miscompiled by clang 3.2, at least on ubuntu.. so this will be restricted to gcc
const float *a_ = a, *b_ = b; float *ab_ = ab;
@@ -1774,8 +1766,8 @@ void pffft_zconvolve_accumulate(PFFFT_Setup *s, const float *a, const float *b,
}
#endif
if (s->transform == PFFFT_REAL) {
- ((v4sf_union*)vab)[0].f[0] = abr + ar*br*scaling;
- ((v4sf_union*)vab)[1].f[0] = abi + ai*bi*scaling;
+ ((v4sf_union*)vab)[0].f[0] = abr0 + ar0*br0*scaling;
+ ((v4sf_union*)vab)[1].f[0] = abi0 + ai0*bi0*scaling;
}
}
diff --git a/src/3rdparty/pffft/qt_attribution.json b/src/3rdparty/pffft/qt_attribution.json
index 19c6a25d2..2cf2c7264 100644
--- a/src/3rdparty/pffft/qt_attribution.json
+++ b/src/3rdparty/pffft/qt_attribution.json
@@ -7,8 +7,8 @@
"SecurityCritical": true,
"Homepage": "https://bitbucket.org/jpommier/pffft.git",
- "Version": "7641e67977cf937c22849e2ef19fffa70269e562",
- "DownloadLocation": "https://bitbucket.org/jpommier/pffft/get/38946c766c1a.zip",
+ "Version": "fbc4058602803f40dc554b8a5d2bcc694c005f2f",
+ "DownloadLocation": "https://bitbucket.org/jpommier/pffft/get/fbc405860280.zip",
"CopyrightFile": "COPYRIGHTS",
"License": "BSD 3-Clause \"New\" or \"Revised\" License",
"LicenseId": "BSD-3-Clause",
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index d344ae211..7d5f3b8e8 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -8,7 +8,7 @@ add_subdirectory(multimedia)
if(ANDROID)
add_subdirectory(android)
endif()
-if (QT_FEATURE_spatialaudio)
+if(QT_FEATURE_spatialaudio)
add_subdirectory(spatialaudio)
endif()
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
index 83d704838..2b6fcc2dc 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
@@ -4,20 +4,16 @@
package org.qtproject.qt.android.multimedia;
import java.util.ArrayList;
-import android.bluetooth.BluetoothA2dp;
-import android.bluetooth.BluetoothAdapter;
-import android.bluetooth.BluetoothDevice;
-import android.bluetooth.BluetoothHeadset;
-import android.content.BroadcastReceiver;
import android.content.Context;
-import android.content.Intent;
-import android.content.IntentFilter;
+import android.media.AudioDeviceCallback;
import android.media.AudioDeviceInfo;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.media.MediaRecorder;
+import android.os.Handler;
+import android.os.Looper;
import android.util.Log;
public class QtAudioDeviceManager
@@ -25,10 +21,12 @@ public class QtAudioDeviceManager
private static final String TAG = "QtAudioDeviceManager";
static private AudioManager m_audioManager = null;
static private final AudioDevicesReceiver m_audioDevicesReceiver = new AudioDevicesReceiver();
+ static private Handler handler = new Handler(Looper.getMainLooper());
static private AudioRecord m_recorder = null;
static private AudioTrack m_streamPlayer = null;
static private Thread m_streamingThread = null;
static private boolean m_isStreaming = false;
+ static private boolean m_useSpeaker = false;
static private final int m_sampleRate = 8000;
static private final int m_channels = AudioFormat.CHANNEL_CONFIGURATION_MONO;
static private final int m_audioFormat = AudioFormat.ENCODING_PCM_16BIT;
@@ -37,36 +35,37 @@ public class QtAudioDeviceManager
public static native void onAudioInputDevicesUpdated();
public static native void onAudioOutputDevicesUpdated();
- static private class AudioDevicesReceiver extends BroadcastReceiver
- {
+ static private void updateDeviceList() {
+ onAudioInputDevicesUpdated();
+ onAudioOutputDevicesUpdated();
+ if (m_useSpeaker) {
+ final AudioDeviceInfo[] audioDevices =
+ m_audioManager.getDevices(AudioManager.GET_DEVICES_OUTPUTS);
+ setAudioOutput(getModeForSpeaker(audioDevices), false, true);
+ }
+ }
+
+ private static class AudioDevicesReceiver extends AudioDeviceCallback {
+ @Override
+ public void onAudioDevicesAdded(AudioDeviceInfo[] addedDevices) {
+ updateDeviceList();
+ }
+
@Override
- public void onReceive(Context context, Intent intent) {
- onAudioInputDevicesUpdated();
- onAudioOutputDevicesUpdated();
+ public void onAudioDevicesRemoved(AudioDeviceInfo[] removedDevices) {
+ updateDeviceList();
}
}
- public static void registerAudioHeadsetStateReceiver(Context context)
+
+ public static void registerAudioHeadsetStateReceiver()
{
- IntentFilter audioDevicesFilter = new IntentFilter();
- audioDevicesFilter.addAction(AudioManager.ACTION_HEADSET_PLUG);
- audioDevicesFilter.addAction(AudioManager.ACTION_HDMI_AUDIO_PLUG);
- audioDevicesFilter.addAction(BluetoothDevice.ACTION_ACL_CONNECTED);
- audioDevicesFilter.addAction(BluetoothDevice.ACTION_ACL_DISCONNECTED);
- audioDevicesFilter.addAction(BluetoothDevice.ACTION_ACL_DISCONNECT_REQUESTED);
- audioDevicesFilter.addAction(BluetoothAdapter.ACTION_STATE_CHANGED);
- audioDevicesFilter.addAction(BluetoothDevice.ACTION_BOND_STATE_CHANGED);
- audioDevicesFilter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED);
- audioDevicesFilter.addAction(AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED);
- audioDevicesFilter.addAction(BluetoothA2dp.ACTION_CONNECTION_STATE_CHANGED);
- audioDevicesFilter.addAction(BluetoothA2dp.ACTION_PLAYING_STATE_CHANGED);
-
- context.registerReceiver(m_audioDevicesReceiver, audioDevicesFilter);
+ m_audioManager.registerAudioDeviceCallback(m_audioDevicesReceiver, handler);
}
- public static void unregisterAudioHeadsetStateReceiver(Context context)
+ public static void unregisterAudioHeadsetStateReceiver()
{
- context.unregisterReceiver(m_audioDevicesReceiver);
+ m_audioManager.unregisterAudioDeviceCallback(m_audioDevicesReceiver);
}
static public void setContext(Context context)
@@ -226,8 +225,27 @@ public class QtAudioDeviceManager
return ret;
}
+ private static int getModeForSpeaker(AudioDeviceInfo[] audioDevices)
+ {
+ // If we want to force device to use speaker when Bluetooth or Wiread headset is connected,
+ // we need to use MODE_IN_COMMUNICATION. Otherwise the MODE_NORMAL can be used.
+ for (AudioDeviceInfo deviceInfo : audioDevices) {
+ switch (deviceInfo.getType()) {
+ case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP:
+ case AudioDeviceInfo.TYPE_BLUETOOTH_SCO:
+ case AudioDeviceInfo.TYPE_WIRED_HEADSET:
+ case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
+ return AudioManager.MODE_IN_COMMUNICATION;
+ default: break;
+ }
+ }
+ return AudioManager.MODE_NORMAL;
+ }
+
+
private static boolean setAudioOutput(int id)
{
+ m_useSpeaker = false;
final AudioDeviceInfo[] audioDevices =
m_audioManager.getDevices(AudioManager.GET_DEVICES_OUTPUTS);
for (AudioDeviceInfo deviceInfo : audioDevices) {
@@ -239,7 +257,8 @@ public class QtAudioDeviceManager
setAudioOutput(AudioManager.MODE_IN_COMMUNICATION, true, false);
return true;
case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER:
- setAudioOutput(AudioManager.STREAM_MUSIC, false, true);
+ m_useSpeaker = true;
+ setAudioOutput(getModeForSpeaker(audioDevices), false, true);
return true;
case AudioDeviceInfo.TYPE_WIRED_HEADSET:
case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java
index 39feff6c7..ac8140197 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java
@@ -24,6 +24,7 @@ import android.graphics.ImageFormat;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
+import android.util.Range;
import android.view.Surface;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
@@ -57,11 +58,12 @@ public class QtCamera2 {
private int mState = STATE_PREVIEW;
private Object mStartMutex = new Object();
private boolean mIsStarted = false;
- private static int MaxNumberFrames = 10;
+ private static int MaxNumberFrames = 12;
private int mFlashMode = CaptureRequest.CONTROL_AE_MODE_ON;
private int mTorchMode = CameraMetadata.FLASH_MODE_OFF;
private int mAFMode = CaptureRequest.CONTROL_AF_MODE_OFF;
private float mZoomFactor = 1.0f;
+ private Range<Integer> mFpsRange = null;
private QtExifDataHandler mExifDataHandler = null;
native void onCameraOpened(String cameraId);
@@ -261,7 +263,14 @@ public class QtCamera2 {
}
};
- public boolean addImageReader(int width, int height, int format) {
+
+ public void prepareCamera(int width, int height, int format, int minFps, int maxFps) {
+
+ addImageReader(width, height, format);
+ setFrameRate(minFps, maxFps);
+ }
+
+ private void addImageReader(int width, int height, int format) {
if (mImageReader != null)
removeSurface(mImageReader.getSurface());
@@ -276,8 +285,14 @@ public class QtCamera2 {
mCapturedPhotoReader = ImageReader.newInstance(width, height, format, MaxNumberFrames);
mCapturedPhotoReader.setOnImageAvailableListener(mOnPhotoAvailableListener, mBackgroundHandler);
addSurface(mCapturedPhotoReader.getSurface());
+ }
+
+ private void setFrameRate(int minFrameRate, int maxFrameRate) {
- return true;
+ if (minFrameRate <= 0 || maxFrameRate <= 0)
+ mFpsRange = null;
+ else
+ mFpsRange = new Range<>(minFrameRate, maxFrameRate);
}
public boolean addSurface(Surface surface) {
@@ -335,7 +350,8 @@ public class QtCamera2 {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CameraMetadata.CONTROL_CAPTURE_INTENT_VIDEO_RECORD);
if (mZoomFactor != 1.0f)
mPreviewRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, getScalerCropRegion());
-
+ if (mFpsRange != null)
+ mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, mFpsRange);
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler);
mIsStarted = true;
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java
index b3ba8f3dc..2e11e62a2 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java
@@ -13,6 +13,7 @@ import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaCodecList;
import android.media.MediaCodecInfo;
+import android.os.Build;
import android.util.Range;
import android.util.Size;
import android.util.Log;
@@ -137,6 +138,7 @@ public class QtVideoDeviceManager {
return activeArraySize;
}
+ static final int maxResolution = 3840*2160; // 4k resolution
public String[] getStreamConfigurationsSizes(String cameraId, int imageFormat) {
CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
@@ -148,13 +150,14 @@ public class QtVideoDeviceManager {
if (sizes == null)
return new String[0];
- String[] stream = new String[sizes.length];
+ ArrayList<String> stream = new ArrayList<>();
for (int index = 0; index < sizes.length; index++) {
- stream[index] = sizes[index].toString();
+ if (sizes[index].getWidth() * sizes[index].getHeight() <= maxResolution)
+ stream.add(sizes[index].toString());
}
- return stream;
+ return stream.toArray(new String[0]);
}
public int stringToControlAEMode(String mode) {
@@ -217,6 +220,23 @@ public class QtVideoDeviceManager {
return supportedFlashModesList.toArray(ret);
}
+ static public boolean isEmulator()
+ {
+ return ((Build.BRAND.startsWith("generic") && Build.DEVICE.startsWith("generic"))
+ || Build.FINGERPRINT.startsWith("generic")
+ || Build.FINGERPRINT.startsWith("unknown")
+ || Build.HARDWARE.contains("goldfish")
+ || Build.HARDWARE.contains("ranchu")
+ || Build.MODEL.contains("google_sdk")
+ || Build.MODEL.contains("Emulator")
+ || Build.MODEL.contains("Android SDK built for x86")
+ || Build.MANUFACTURER.contains("Genymotion")
+ || Build.PRODUCT.contains("sdk")
+ || Build.PRODUCT.contains("vbox86p")
+ || Build.PRODUCT.contains("emulator")
+ || Build.PRODUCT.contains("simulator"));
+ }
+
public boolean isTorchModeSupported(String cameraId) {
boolean ret = false;
final CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
diff --git a/src/multimedia/CMakeLists.txt b/src/multimedia/CMakeLists.txt
index 1cca4c2de..a17b1bfc7 100644
--- a/src/multimedia/CMakeLists.txt
+++ b/src/multimedia/CMakeLists.txt
@@ -23,6 +23,8 @@ qt_internal_add_module(Multimedia
audio/qaudiodecoder.cpp audio/qaudiodecoder.h audio/qaudiodecoder_p.h
audio/qaudiodevice.cpp audio/qaudiodevice.h audio/qaudiodevice_p.h
audio/qaudioinput.cpp audio/qaudioinput.h
+ audio/qaudiobufferinput.cpp audio/qaudiobufferinput.h
+ audio/qaudiobufferoutput.cpp audio/qaudiobufferoutput.h audio/qaudiobufferoutput_p.h
audio/qaudiooutput.cpp audio/qaudiooutput.h
audio/qaudioformat.cpp audio/qaudioformat.h
audio/qaudiohelpers.cpp audio/qaudiohelpers_p.h
@@ -38,25 +40,28 @@ qt_internal_add_module(Multimedia
camera/qcameradevice.cpp camera/qcameradevice.h camera/qcameradevice_p.h
camera/qimagecapture.cpp camera/qimagecapture.h
compat/removed_api.cpp
+ platform/qgstreamer_platformspecificinterface.cpp platform/qgstreamer_platformspecificinterface_p.h
platform/qplatformaudiodecoder.cpp platform/qplatformaudiodecoder_p.h
platform/qplatformaudioinput_p.h
platform/qplatformaudiooutput_p.h
platform/qplatformaudioresampler_p.h
platform/qplatformcamera.cpp platform/qplatformcamera_p.h
- platform/qplatformvideosource.cpp platform/qplatformvideosource_p.h
- platform/qplatformsurfacecapture.cpp platform/qplatformsurfacecapture_p.h
+ platform/qplatformcapturablewindows_p.h
platform/qplatformimagecapture.cpp platform/qplatformimagecapture_p.h
platform/qplatformmediacapture.cpp platform/qplatformmediacapture_p.h
platform/qplatformmediadevices.cpp platform/qplatformmediadevices_p.h
- platform/qplatformmediarecorder.cpp platform/qplatformmediarecorder_p.h
platform/qplatformmediaformatinfo.cpp platform/qplatformmediaformatinfo_p.h
platform/qplatformmediaintegration.cpp platform/qplatformmediaintegration_p.h
platform/qplatformmediaplayer.cpp platform/qplatformmediaplayer_p.h
platform/qplatformmediaplugin.cpp platform/qplatformmediaplugin_p.h
+ platform/qplatformmediarecorder.cpp platform/qplatformmediarecorder_p.h
+ platform/qplatformsurfacecapture.cpp platform/qplatformsurfacecapture_p.h
platform/qplatformvideodevices.cpp platform/qplatformvideodevices_p.h
platform/qplatformvideosink.cpp platform/qplatformvideosink_p.h
+ platform/qplatformvideosource.cpp platform/qplatformvideosource_p.h
+ platform/qplatformvideoframeinput.cpp platform/qplatformvideoframeinput_p.h
+ platform/qplatformaudiobufferinput.cpp platform/qplatformaudiobufferinput_p.h
playback/qmediaplayer.cpp playback/qmediaplayer.h playback/qmediaplayer_p.h
- platform/qplatformcapturablewindows_p.h
qmediadevices.cpp qmediadevices.h
qmediaenumdebug.h
qmediaformat.cpp qmediaformat.h
@@ -64,15 +69,19 @@ qt_internal_add_module(Multimedia
qmediastoragelocation.cpp qmediastoragelocation_p.h
qmediatimerange.cpp qmediatimerange.h
qmultimediautils.cpp qmultimediautils_p.h
+ qmediaframeinput.cpp qmediaframeinput_p.h
qmaybe_p.h
qtmultimediaglobal.h qtmultimediaglobal_p.h
qerrorinfo_p.h
- recording/qmediacapturesession.cpp recording/qmediacapturesession.h
+ qmediainputencoderinterface_p.h
+ recording/qmediacapturesession.cpp recording/qmediacapturesession.h recording/qmediacapturesession_p.h
recording/qmediarecorder.cpp recording/qmediarecorder.h recording/qmediarecorder_p.h
recording/qscreencapture.cpp recording/qscreencapture.h
recording/qwindowcapture.cpp recording/qwindowcapture.h
recording/qcapturablewindow.cpp recording/qcapturablewindow.h recording/qcapturablewindow_p.h
- video/qabstractvideobuffer.cpp video/qabstractvideobuffer_p.h
+ recording/qvideoframeinput.cpp recording/qvideoframeinput.h
+ video/qabstractvideobuffer.cpp video/qabstractvideobuffer.h
+ video/qhwvideobuffer.cpp video/qhwvideobuffer_p.h
video/qmemoryvideobuffer.cpp video/qmemoryvideobuffer_p.h
video/qimagevideobuffer.cpp video/qimagevideobuffer_p.h
video/qvideoframe.cpp video/qvideoframe.h video/qvideoframe_p.h
@@ -107,6 +116,10 @@ qt_internal_add_module(Multimedia
GENERATE_CPP_EXPORTS
)
+qt_internal_extend_target(Multimedia
+ CONDITION LINUX OR ANDROID
+ SOURCES qsymbolsresolveutils.cpp qsymbolsresolveutils_p.h)
+
qt_internal_add_simd_part(Multimedia SIMD sse2
SOURCES
video/qvideoframeconversionhelper_sse2.cpp
@@ -167,7 +180,7 @@ qt_internal_extend_target(Multimedia CONDITION ANDROID
OpenSLES
)
-if (ANDROID)
+if(ANDROID)
set_property(TARGET Multimedia APPEND PROPERTY QT_ANDROID_BUNDLED_JAR_DEPENDENCIES
jar/Qt${QtMultimedia_VERSION_MAJOR}AndroidMultimedia.jar:org.qtproject.qt.android.multimedia.QtAudioDeviceManager
)
@@ -355,7 +368,7 @@ qt_internal_add_shaders(Multimedia "qtmultimedia_shaders_gl_macos_linear"
"shaders/rectsampler_bgra_linear.frag.qsb"
)
-if (DEFINED QT_DEFAULT_MEDIA_BACKEND)
+if(DEFINED QT_DEFAULT_MEDIA_BACKEND)
target_compile_definitions(Multimedia
PRIVATE QT_DEFAULT_MEDIA_BACKEND="${QT_DEFAULT_MEDIA_BACKEND}")
endif()
diff --git a/src/multimedia/alsa/qalsaaudiodevice.cpp b/src/multimedia/alsa/qalsaaudiodevice.cpp
index f5d4a2209..893375270 100644
--- a/src/multimedia/alsa/qalsaaudiodevice.cpp
+++ b/src/multimedia/alsa/qalsaaudiodevice.cpp
@@ -37,55 +37,35 @@ QAlsaAudioDeviceInfo::QAlsaAudioDeviceInfo(const QByteArray &dev, const QString
minimumSampleRate = 8000;
maximumSampleRate = 48000;
- supportedSampleFormats << QAudioFormat::UInt8 << QAudioFormat::Int16 << QAudioFormat::Int32 << QAudioFormat::Float;
+ supportedSampleFormats = {
+ QAudioFormat::UInt8,
+ QAudioFormat::Int16,
+ QAudioFormat::Int32,
+ QAudioFormat::Float,
+ };
preferredFormat.setChannelCount(mode == QAudioDevice::Input ? 1 : 2);
preferredFormat.setSampleFormat(QAudioFormat::Float);
preferredFormat.setSampleRate(48000);
}
-QAlsaAudioDeviceInfo::~QAlsaAudioDeviceInfo()
-{
-}
+QAlsaAudioDeviceInfo::~QAlsaAudioDeviceInfo() = default;
void QAlsaAudioDeviceInfo::checkSurround()
{
+ if (mode != QAudioDevice::Output)
+ return;
+
surround40 = false;
surround51 = false;
surround71 = false;
- void **hints, **n;
- char *name, *descr, *io;
-
- if(snd_device_name_hint(-1, "pcm", &hints) < 0)
- return;
-
- n = hints;
-
- while (*n != NULL) {
- name = snd_device_name_get_hint(*n, "NAME");
- descr = snd_device_name_get_hint(*n, "DESC");
- io = snd_device_name_get_hint(*n, "IOID");
- if((name != NULL) && (descr != NULL)) {
- QString deviceName = QLatin1String(name);
- if (mode == QAudioDevice::Output) {
- if(deviceName.contains(QLatin1String("surround40")))
- surround40 = true;
- if(deviceName.contains(QLatin1String("surround51")))
- surround51 = true;
- if(deviceName.contains(QLatin1String("surround71")))
- surround71 = true;
- }
- }
- if(name != NULL)
- free(name);
- if(descr != NULL)
- free(descr);
- if(io != NULL)
- free(io);
- ++n;
- }
- snd_device_name_free_hint(hints);
+ if (id.startsWith(QLatin1String("surround40")))
+ surround40 = true;
+ if (id.startsWith(QLatin1String("surround51")))
+ surround51 = true;
+ if (id.startsWith(QLatin1String("surround71")))
+ surround71 = true;
}
QT_END_NAMESPACE
diff --git a/src/multimedia/alsa/qalsaaudiodevice_p.h b/src/multimedia/alsa/qalsaaudiodevice_p.h
index f82ea4f5a..dcbc9e692 100644
--- a/src/multimedia/alsa/qalsaaudiodevice_p.h
+++ b/src/multimedia/alsa/qalsaaudiodevice_p.h
@@ -38,9 +38,9 @@ public:
private:
void checkSurround();
- bool surround40;
- bool surround51;
- bool surround71;
+ bool surround40{};
+ bool surround51{};
+ bool surround71{};
};
QT_END_NAMESPACE
diff --git a/src/multimedia/alsa/qalsaaudiosink.cpp b/src/multimedia/alsa/qalsaaudiosink.cpp
index 98a68861f..e515219a2 100644
--- a/src/multimedia/alsa/qalsaaudiosink.cpp
+++ b/src/multimedia/alsa/qalsaaudiosink.cpp
@@ -30,13 +30,13 @@ QAlsaAudioSink::QAlsaAudioSink(const QByteArray &device, QObject *parent)
m_device = device;
timer = new QTimer(this);
- connect(timer, SIGNAL(timeout()), this, SLOT(userFeed()));
+ connect(timer, &QTimer::timeout, this, &QAlsaAudioSink::userFeed);
}
QAlsaAudioSink::~QAlsaAudioSink()
{
close();
- disconnect(timer, SIGNAL(timeout()));
+ disconnect(timer, &QTimer::timeout, this, &QAlsaAudioSink::userFeed);
QCoreApplication::processEvents();
delete timer;
}
@@ -130,6 +130,7 @@ int QAlsaAudioSink::setFormat()
pcmformat = SND_PCM_FORMAT_FLOAT_BE;
else
pcmformat = SND_PCM_FORMAT_FLOAT_LE;
+ break;
default:
break;
}
diff --git a/src/multimedia/alsa/qalsaaudiosink_p.h b/src/multimedia/alsa/qalsaaudiosink_p.h
index 7e8836f96..0f5a5aa5a 100644
--- a/src/multimedia/alsa/qalsaaudiosink_p.h
+++ b/src/multimedia/alsa/qalsaaudiosink_p.h
@@ -96,7 +96,6 @@ private:
char* audioBuffer = nullptr;
snd_pcm_t* handle = nullptr;
snd_pcm_access_t access = SND_PCM_ACCESS_RW_INTERLEAVED;
- snd_pcm_format_t pcmformat = SND_PCM_FORMAT_S16;
snd_pcm_hw_params_t *hwparams = nullptr;
qreal m_volume = 1.0f;
};
diff --git a/src/multimedia/alsa/qalsaaudiosource.cpp b/src/multimedia/alsa/qalsaaudiosource.cpp
index ce099463d..ebf6e24e2 100644
--- a/src/multimedia/alsa/qalsaaudiosource.cpp
+++ b/src/multimedia/alsa/qalsaaudiosource.cpp
@@ -16,7 +16,6 @@
#include <QtCore/qvarlengtharray.h>
#include <QtMultimedia/private/qaudiohelpers_p.h>
#include "qalsaaudiosource_p.h"
-#include "qalsaaudiodevice_p.h"
QT_BEGIN_NAMESPACE
@@ -45,13 +44,13 @@ QAlsaAudioSource::QAlsaAudioSource(const QByteArray &device, QObject *parent)
m_device = device;
timer = new QTimer(this);
- connect(timer, SIGNAL(timeout()), this, SLOT(userFeed()));
+ connect(timer, &QTimer::timeout, this, &QAlsaAudioSource::userFeed);
}
QAlsaAudioSource::~QAlsaAudioSource()
{
close();
- disconnect(timer, SIGNAL(timeout()));
+ disconnect(timer, &QTimer::timeout, this, &QAlsaAudioSource::userFeed);
QCoreApplication::processEvents();
delete timer;
}
@@ -143,21 +142,22 @@ int QAlsaAudioSource::setFormat()
break;
case QAudioFormat::Int16:
if constexpr (QSysInfo::ByteOrder == QSysInfo::BigEndian)
- pcmformat = SND_PCM_FORMAT_S16_LE;
- else
pcmformat = SND_PCM_FORMAT_S16_BE;
+ else
+ pcmformat = SND_PCM_FORMAT_S16_LE;
break;
case QAudioFormat::Int32:
if constexpr (QSysInfo::ByteOrder == QSysInfo::BigEndian)
- pcmformat = SND_PCM_FORMAT_S32_LE;
- else
pcmformat = SND_PCM_FORMAT_S32_BE;
+ else
+ pcmformat = SND_PCM_FORMAT_S32_LE;
break;
case QAudioFormat::Float:
if constexpr (QSysInfo::ByteOrder == QSysInfo::BigEndian)
- pcmformat = SND_PCM_FORMAT_FLOAT_LE;
- else
pcmformat = SND_PCM_FORMAT_FLOAT_BE;
+ else
+ pcmformat = SND_PCM_FORMAT_FLOAT_LE;
+ break;
default:
break;
}
@@ -370,7 +370,7 @@ bool QAlsaAudioSource::open()
bytesAvailable = checkBytesReady();
if(pullMode)
- connect(audioSource,SIGNAL(readyRead()),this,SLOT(userFeed()));
+ connect(audioSource, &QIODevice::readyRead, this, &QAlsaAudioSource::userFeed);
// Step 6: Start audio processing
chunks = buffer_size/period_size;
diff --git a/src/multimedia/alsa/qalsamediadevices.cpp b/src/multimedia/alsa/qalsamediadevices.cpp
index 5a133e9d1..9466fa0cd 100644
--- a/src/multimedia/alsa/qalsamediadevices.cpp
+++ b/src/multimedia/alsa/qalsamediadevices.cpp
@@ -13,6 +13,26 @@
QT_BEGIN_NAMESPACE
+namespace {
+
+struct free_char
+{
+ void operator()(char *c) const { ::free(c); }
+};
+
+using unique_str = std::unique_ptr<char, free_char>;
+
+bool operator==(const unique_str &str, std::string_view sv)
+{
+ return std::string_view{ str.get() } == sv;
+}
+bool operator!=(const unique_str &str, std::string_view sv)
+{
+ return !(str == sv);
+}
+
+} // namespace
+
QAlsaMediaDevices::QAlsaMediaDevices()
: QPlatformMediaDevices()
{
@@ -22,52 +42,50 @@ static QList<QAudioDevice> availableDevices(QAudioDevice::Mode mode)
{
QList<QAudioDevice> devices;
- QByteArray filter;
-
// Create a list of all current audio devices that support mode
- void **hints, **n;
- char *name, *descr, *io;
- bool hasDefault = false;
-
- if(snd_device_name_hint(-1, "pcm", &hints) < 0) {
+ void **hints;
+ if (snd_device_name_hint(-1, "pcm", &hints) < 0) {
qWarning() << "no alsa devices available";
return devices;
}
- n = hints;
- if(mode == QAudioDevice::Input) {
- filter = "Input";
- } else {
- filter = "Output";
- }
+ std::string_view filter = (mode == QAudioDevice::Input) ? "Input" : "Output";
- QAlsaAudioDeviceInfo* sysdefault = nullptr;
+ QAlsaAudioDeviceInfo *sysdefault = nullptr;
- while (*n != NULL) {
- name = snd_device_name_get_hint(*n, "NAME");
- if (name != 0 && qstrcmp(name, "null") != 0) {
- descr = snd_device_name_get_hint(*n, "DESC");
- io = snd_device_name_get_hint(*n, "IOID");
-
- if ((descr != NULL) && ((io == NULL) || (io == filter))) {
- auto *infop = new QAlsaAudioDeviceInfo(name, QString::fromUtf8(descr), mode);
- devices.append(infop->create());
- if (!hasDefault && strcmp(name, "default") == 0) {
- infop->isDefault = true;
- hasDefault = true;
- }
- else if (!sysdefault && !hasDefault && strcmp(name, "sysdefault") == 0) {
- sysdefault = infop;
- }
+ auto makeDeviceInfo = [&filter, mode](void *entry) -> QAlsaAudioDeviceInfo * {
+ unique_str name{ snd_device_name_get_hint(entry, "NAME") };
+ if (name && name != "null") {
+ unique_str descr{ snd_device_name_get_hint(entry, "DESC") };
+ unique_str io{ snd_device_name_get_hint(entry, "IOID") };
+
+ if (descr && (!io || (io == filter))) {
+ auto *infop = new QAlsaAudioDeviceInfo{
+ name.get(),
+ QString::fromUtf8(descr.get()),
+ mode,
+ };
+ return infop;
}
+ }
+ return nullptr;
+ };
+
+ bool hasDefault = false;
+ void **n = hints;
+ while (*n != NULL) {
+ QAlsaAudioDeviceInfo *infop = makeDeviceInfo(*n++);
- free(descr);
- free(io);
+ if (infop) {
+ devices.append(infop->create());
+ if (!hasDefault && infop->id.startsWith("default")) {
+ infop->isDefault = true;
+ hasDefault = true;
+ }
+ if (!sysdefault && infop->id.startsWith("sysdefault"))
+ sysdefault = infop;
}
- free(name);
- ++n;
}
- snd_device_name_free_hint(hints);
if (!hasDefault && sysdefault) {
// Make "sysdefault" the default device if there is no "default" device exists
@@ -75,11 +93,15 @@ static QList<QAudioDevice> availableDevices(QAudioDevice::Mode mode)
hasDefault = true;
}
if (!hasDefault && devices.size() > 0) {
- auto infop = new QAlsaAudioDeviceInfo("default", QString(), QAudioDevice::Output);
- infop->isDefault = true;
- devices.prepend(infop->create());
+ // forcefully declare the first device as "default"
+ QAlsaAudioDeviceInfo *infop = makeDeviceInfo(hints[0]);
+ if (infop) {
+ infop->isDefault = true;
+ devices.prepend(infop->create());
+ }
}
+ snd_device_name_free_hint(hints);
return devices;
}
diff --git a/src/multimedia/android/qandroidmediadevices.cpp b/src/multimedia/android/qandroidmediadevices.cpp
index 55533621c..7688da079 100644
--- a/src/multimedia/android/qandroidmediadevices.cpp
+++ b/src/multimedia/android/qandroidmediadevices.cpp
@@ -23,9 +23,7 @@ Q_DECLARE_JNI_CLASS(QtAudioDeviceManager,
QAndroidMediaDevices::QAndroidMediaDevices() : QPlatformMediaDevices()
{
- QtJniTypes::QtAudioDeviceManager::callStaticMethod<void>(
- "registerAudioHeadsetStateReceiver",
- QNativeInterface::QAndroidApplication::context());
+ QtJniTypes::QtAudioDeviceManager::callStaticMethod<void>("registerAudioHeadsetStateReceiver");
}
QAndroidMediaDevices::~QAndroidMediaDevices()
@@ -33,9 +31,7 @@ QAndroidMediaDevices::~QAndroidMediaDevices()
// Object of QAndroidMediaDevices type is static. Unregistering will happend only when closing
// the application. In such case it is probably not needed, but let's leave it for
// compatibility with Android documentation
- QtJniTypes::QtAudioDeviceManager::callStaticMethod<void>(
- "unregisterAudioHeadsetStateReceiver",
- QNativeInterface::QAndroidApplication::context());
+ QtJniTypes::QtAudioDeviceManager::callStaticMethod<void>("unregisterAudioHeadsetStateReceiver");
}
QList<QAudioDevice> QAndroidMediaDevices::audioInputs() const
diff --git a/src/multimedia/audio/qaudiobufferinput.cpp b/src/multimedia/audio/qaudiobufferinput.cpp
new file mode 100644
index 000000000..0ac72c764
--- /dev/null
+++ b/src/multimedia/audio/qaudiobufferinput.cpp
@@ -0,0 +1,162 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qaudiobufferinput.h"
+#include "qplatformaudiobufferinput_p.h"
+#include "qmediainputencoderinterface_p.h"
+#include "qmediaframeinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QAudioBufferInputPrivate : public QMediaFrameInputPrivate
+{
+public:
+ QAudioBufferInputPrivate(QAudioBufferInput *q) : q(q) { }
+
+ bool sendAudioBuffer(const QAudioBuffer &audioBuffer)
+ {
+ return sendMediaFrame(
+ [&]() { emit m_platfromAudioBufferInput->newAudioBuffer(audioBuffer); });
+ }
+
+ void initialize()
+ {
+ m_platfromAudioBufferInput = std::make_unique<QPlatformAudioBufferInput>();
+ addUpdateSignal(m_platfromAudioBufferInput.get(),
+ &QPlatformAudioBufferInput::encoderUpdated);
+ }
+
+ void uninitialize()
+ {
+ m_platfromAudioBufferInput.reset();
+
+ if (captureSession())
+ captureSession()->setAudioBufferInput(nullptr);
+ }
+
+ QMediaCaptureSession *session() const { return m_captureSession; }
+
+ QPlatformAudioBufferInput *platfromAudioBufferInput() const
+ {
+ return m_platfromAudioBufferInput.get();
+ }
+
+private:
+ void updateCaptureSessionConnections(QMediaCaptureSession *prevSession,
+ QMediaCaptureSession *newSession) override
+ {
+ if (prevSession)
+ removeUpdateSignal(prevSession, &QMediaCaptureSession::audioOutputChanged);
+
+ if (newSession)
+ addUpdateSignal(newSession, &QMediaCaptureSession::audioOutputChanged);
+ }
+
+ bool checkIfCanSendMediaFrame() const override
+ {
+ if (auto encoderInterface = m_platfromAudioBufferInput->encoderInterface())
+ return encoderInterface->canPushFrame();
+
+ // Not implemented yet
+ // return captureSession()->audioOutput() != nullptr;
+ return false;
+ }
+
+ void emitReadyToSendMediaFrame() override { emit q->readyToSendAudioBuffer(); }
+
+private:
+ QAudioBufferInput *q = nullptr;
+ QMediaCaptureSession *m_captureSession = nullptr;
+ std::unique_ptr<QPlatformAudioBufferInput> m_platfromAudioBufferInput;
+};
+
+/*!
+ \class QAudioBufferInput
+ \inmodule QtMultimedia
+ \ingroup multimedia
+ \ingroup multimedia_audio
+ \since 6.8
+
+ \brief The QAudioBufferInput class is used for providing custom audio buffers
+ to \l QMediaRecorder through \l QMediaCaptureSession.
+
+ \sa QMediaRecorder, QMediaCaptureSession
+*/
+
+/*!
+ Constructs a new QAudioBufferInput object with \a parent.
+*/
+QAudioBufferInput::QAudioBufferInput(QObject *parent)
+ : QObject(*new QAudioBufferInputPrivate(this), parent)
+{
+ Q_D(QAudioBufferInput);
+ d->initialize();
+}
+
+/*!
+ Destroys the object.
+ */
+QAudioBufferInput::~QAudioBufferInput()
+{
+ Q_D(QAudioBufferInput);
+ d->uninitialize();
+}
+
+/*!
+ Sends \l QAudioBuffer to \l QMediaRecorder through \l QMediaCaptureSession.
+
+ Returns \c true if the specified \a audioBuffer has been sent successfully
+ to the destination. Returns \c false, if the buffer hasn't been sent,
+ which can happen if the instance is not assigned to
+ \l QMediaCaptureSession, the session doesn't have a media recorder,
+ the media recorder is not started or its queue is full.
+ The signal \l readyToSendAudiobuffer will be sent as soon as
+ the destination is able to handle a new audio buffer.
+
+ Sending of an empty audio buffer is treated by \l QMediaRecorder
+ as an end of the input stream. QMediaRecorder stops the recording
+ automatically if \l QMediaRecorder::autoStop is \c true and
+ all the inputs have reported the end of the stream.
+*/
+bool QAudioBufferInput::sendAudioBuffer(const QAudioBuffer &audioBuffer)
+{
+ Q_D(QAudioBufferInput);
+ return d->sendAudioBuffer(audioBuffer);
+}
+
+/*!
+ Returns the capture session this audio buffer input is connected to, or
+ a \c nullptr if the audio buffer input is not connected to a capture session.
+
+ Use QMediaCaptureSession::setAudioBufferInput() to connect
+ the audio buffer input to a session.
+*/
+QMediaCaptureSession *QAudioBufferInput::captureSession() const
+{
+ Q_D(const QAudioBufferInput);
+ return d->captureSession();
+}
+
+void QAudioBufferInput::setCaptureSession(QMediaCaptureSession *captureSession)
+{
+ Q_D(QAudioBufferInput);
+ d->setCaptureSession(captureSession);
+}
+
+QPlatformAudioBufferInput *QAudioBufferInput::platformAudioBufferInput() const
+{
+ Q_D(const QAudioBufferInput);
+ return d->platfromAudioBufferInput();
+}
+
+/*!
+ \fn void QAudioBufferInput::readyToSendAudioBuffer()
+
+ Signals that a new audio buffer can be sent to the audio buffer input.
+ After receiving the signal, if you have audio date to be sent, invoke \l sendAudioBuffer
+ once or in a loop until it returns \c false.
+
+ \sa sendAudioBuffer()
+*/
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/audio/qaudiobufferinput.h b/src/multimedia/audio/qaudiobufferinput.h
new file mode 100644
index 000000000..92bb8b71a
--- /dev/null
+++ b/src/multimedia/audio/qaudiobufferinput.h
@@ -0,0 +1,44 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QAUDIOBUFFERINPUT_H
+#define QAUDIOBUFFERINPUT_H
+
+#include <QtMultimedia/qtmultimediaexports.h>
+#include <QtMultimedia/qaudiobuffer.h>
+#include <QtCore/qobject.h>
+
+QT_BEGIN_NAMESPACE
+
+class QPlatformAudioBufferInput;
+class QAudioBufferInputPrivate;
+class QMediaCaptureSession;
+
+class Q_MULTIMEDIA_EXPORT QAudioBufferInput : public QObject
+{
+ Q_OBJECT
+public:
+ explicit QAudioBufferInput(QObject *parent = nullptr);
+
+ ~QAudioBufferInput() override;
+
+ bool sendAudioBuffer(const QAudioBuffer &audioBuffer);
+
+ QMediaCaptureSession *captureSession() const;
+
+Q_SIGNALS:
+ void readyToSendAudioBuffer();
+
+private:
+ void setCaptureSession(QMediaCaptureSession *captureSession);
+
+ QPlatformAudioBufferInput *platformAudioBufferInput() const;
+
+ friend class QMediaCaptureSession;
+ Q_DISABLE_COPY(QAudioBufferInput)
+ Q_DECLARE_PRIVATE(QAudioBufferInput)
+};
+
+QT_END_NAMESPACE
+
+#endif // QAUDIOBUFFERINPUT_H
diff --git a/src/multimedia/audio/qaudiobufferoutput.cpp b/src/multimedia/audio/qaudiobufferoutput.cpp
new file mode 100644
index 000000000..50389c49a
--- /dev/null
+++ b/src/multimedia/audio/qaudiobufferoutput.cpp
@@ -0,0 +1,78 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qaudiobufferoutput_p.h"
+#include "qmediaplayer.h"
+
+QT_BEGIN_NAMESPACE
+
+/*!
+ \class QAudioBufferOutput
+ \inmodule QtMultimedia
+ \ingroup multimedia
+ \ingroup multimedia_audio
+ \since 6.8
+
+ \brief The QAudioBufferOutput class is used for capturing audio data provided by \l QMediaPlayer.
+
+ QAudioBufferOutput can be set to QMediaPlayer in order to receive audio buffers
+ decoded by the media player. The received audio data can be used for any
+ processing or visualization.
+
+ \sa QMediaPlayer, QMediaPlayer::setAudioBufferOutput, QAudioBuffer
+*/
+
+/*!
+ Constructs a new QAudioBufferOutput object with \a parent.
+
+ The audio format of output audio buffers will depend on
+ the source media file and the inner audio decoder in \l QMediaPlayer.
+*/
+QAudioBufferOutput::QAudioBufferOutput(QObject *parent)
+ : QObject(*new QAudioBufferOutputPrivate, parent)
+{
+}
+
+/*!
+ Constructs a new QAudioBufferOutput object with audio \a format and \a parent.
+
+ If the specified \a format is valid, it will be the format of output
+ audio buffers. Otherwise, the format of output audio buffers
+ will depend on the source media file and the inner audio decoder in \l QMediaPlayer.
+*/
+QAudioBufferOutput::QAudioBufferOutput(const QAudioFormat &format, QObject *parent)
+ : QObject(*new QAudioBufferOutputPrivate(format), parent)
+{
+}
+
+/*!
+ Destroys the audio buffer output object.
+*/
+QAudioBufferOutput::~QAudioBufferOutput()
+{
+ Q_D(QAudioBufferOutput);
+
+ if (d->mediaPlayer)
+ d->mediaPlayer->setAudioBufferOutput(nullptr);
+}
+
+/*!
+ Gets the audio format specified in the constructor.
+
+ If the format is valid, it specifies the format of output oudio buffers.
+*/
+QAudioFormat QAudioBufferOutput::format() const
+{
+ Q_D(const QAudioBufferOutput);
+ return d->format;
+}
+
+/*!
+ \fn void QAudioBufferOutput::audioBufferReceived(const QAudioBuffer &buffer)
+
+ Signals that a new audio \a buffer has been received from \l QMediaPlayer.
+*/
+
+QT_END_NAMESPACE
+
+#include "moc_qaudiobufferoutput.cpp"
diff --git a/src/multimedia/audio/qaudiobufferoutput.h b/src/multimedia/audio/qaudiobufferoutput.h
new file mode 100644
index 000000000..2e4fab1a4
--- /dev/null
+++ b/src/multimedia/audio/qaudiobufferoutput.h
@@ -0,0 +1,37 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QAUDIOBUFFEROUTPUT_H
+#define QAUDIOBUFFEROUTPUT_H
+
+#include <QtMultimedia/qtmultimediaexports.h>
+#include <QtMultimedia/qaudiobuffer.h>
+#include <QtCore/qobject.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAudioBufferOutputPrivate;
+
+class Q_MULTIMEDIA_EXPORT QAudioBufferOutput : public QObject
+{
+ Q_OBJECT
+public:
+ explicit QAudioBufferOutput(QObject *parent = nullptr);
+
+ explicit QAudioBufferOutput(const QAudioFormat &format, QObject *parent = nullptr);
+
+ ~QAudioBufferOutput() override;
+
+ QAudioFormat format() const;
+
+Q_SIGNALS:
+ void audioBufferReceived(const QAudioBuffer &buffer);
+
+private:
+ Q_DISABLE_COPY(QAudioBufferOutput)
+ Q_DECLARE_PRIVATE(QAudioBufferOutput)
+};
+
+QT_END_NAMESPACE
+
+#endif // QAUDIOBUFFEROUTPUT_H
diff --git a/src/multimedia/audio/qaudiobufferoutput_p.h b/src/multimedia/audio/qaudiobufferoutput_p.h
new file mode 100644
index 000000000..2f9c11bd1
--- /dev/null
+++ b/src/multimedia/audio/qaudiobufferoutput_p.h
@@ -0,0 +1,42 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QAUDIOBUFFEROUTPUT_P_H
+#define QAUDIOBUFFEROUTPUT_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/private/qobject_p.h>
+#include "qaudiobufferoutput.h"
+
+QT_BEGIN_NAMESPACE
+
+class QMediaPlayer;
+
+class QAudioBufferOutputPrivate : public QObjectPrivate
+{
+public:
+ QAudioBufferOutputPrivate(const QAudioFormat &format = {}) : format(std::move(format)) { }
+
+ static QMediaPlayer *exchangeMediaPlayer(QAudioBufferOutput &output, QMediaPlayer *player)
+ {
+ auto outputPrivate = static_cast<QAudioBufferOutputPrivate *>(output.d_func());
+ return std::exchange(outputPrivate->mediaPlayer, player);
+ }
+
+ QAudioFormat format;
+ QMediaPlayer *mediaPlayer = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QAUDIOBUFFEROUTPUT_P_H
diff --git a/src/multimedia/audio/qsamplecache_p.cpp b/src/multimedia/audio/qsamplecache_p.cpp
index 825c79685..b4be09f72 100644
--- a/src/multimedia/audio/qsamplecache_p.cpp
+++ b/src/multimedia/audio/qsamplecache_p.cpp
@@ -357,12 +357,13 @@ void QSample::load()
Q_ASSERT(QThread::currentThread()->objectName() == QLatin1String("QSampleCache::LoadingThread"));
#endif
qCDebug(qLcSampleCache) << "QSample: load [" << m_url << "]";
- m_stream = m_parent->networkAccessManager().get(QNetworkRequest(m_url));
- connect(m_stream, SIGNAL(errorOccurred(QNetworkReply::NetworkError)), SLOT(loadingError(QNetworkReply::NetworkError)));
+ QNetworkReply *reply = m_parent->networkAccessManager().get(QNetworkRequest(m_url));
+ m_stream = reply;
+ connect(reply, &QNetworkReply::errorOccurred, this, &QSample::loadingError);
m_waveDecoder = new QWaveDecoder(m_stream);
- connect(m_waveDecoder, SIGNAL(formatKnown()), SLOT(decoderReady()));
- connect(m_waveDecoder, SIGNAL(parsingError()), SLOT(decoderError()));
- connect(m_waveDecoder, SIGNAL(readyRead()), SLOT(readSample()));
+ connect(m_waveDecoder, &QWaveDecoder::formatKnown, this, &QSample::decoderReady);
+ connect(m_waveDecoder, &QWaveDecoder::parsingError, this, &QSample::decoderError);
+ connect(m_waveDecoder, &QIODevice::readyRead, this, &QSample::readSample);
m_waveDecoder->open(QIODevice::ReadOnly);
}
diff --git a/src/multimedia/audio/qwavedecoder.cpp b/src/multimedia/audio/qwavedecoder.cpp
index 0df50bcbf..452363ddc 100644
--- a/src/multimedia/audio/qwavedecoder.cpp
+++ b/src/multimedia/audio/qwavedecoder.cpp
@@ -56,7 +56,7 @@ bool QWaveDecoder::open(QIODevice::OpenMode mode)
if (canOpen && enoughDataAvailable())
handleData();
else
- connect(device, SIGNAL(readyRead()), SLOT(handleData()));
+ connect(device, &QIODevice::readyRead, this, &QWaveDecoder::handleData);
return canOpen;
}
@@ -91,6 +91,10 @@ qint64 QWaveDecoder::pos() const
return device->pos();
}
+void QWaveDecoder::setIODevice(QIODevice * /* device */)
+{
+}
+
QAudioFormat QWaveDecoder::audioFormat() const
{
return format;
@@ -270,7 +274,7 @@ bool QWaveDecoder::writeDataLength()
void QWaveDecoder::parsingFailed()
{
Q_ASSERT(device);
- device->disconnect(SIGNAL(readyRead()), this, SLOT(handleData()));
+ disconnect(device, &QIODevice::readyRead, this, &QWaveDecoder::handleData);
emit parsingError();
}
@@ -382,7 +386,7 @@ void QWaveDecoder::handleData()
if (state == QWaveDecoder::WaitingForDataState) {
if (findChunk("data")) {
- device->disconnect(SIGNAL(readyRead()), this, SLOT(handleData()));
+ disconnect(device, &QIODevice::readyRead, this, &QWaveDecoder::handleData);
chunk descriptor;
device->read(reinterpret_cast<char *>(&descriptor), sizeof(chunk));
@@ -396,7 +400,7 @@ void QWaveDecoder::handleData()
dataSize = device->size() - headerLength();
haveFormat = true;
- connect(device, SIGNAL(readyRead()), SIGNAL(readyRead()));
+ connect(device, &QIODevice::readyRead, this, &QIODevice::readyRead);
emit formatKnown();
return;
diff --git a/src/multimedia/camera/qcamera.cpp b/src/multimedia/camera/qcamera.cpp
index 527b14c25..9cfbcc01d 100644
--- a/src/multimedia/camera/qcamera.cpp
+++ b/src/multimedia/camera/qcamera.cpp
@@ -152,14 +152,6 @@ QT_BEGIN_NAMESPACE
See the \l{Camera Overview}{camera overview} for more information.
*/
-
-void QCameraPrivate::_q_error(int error, const QString &errorString)
-{
- Q_Q(QCamera);
-
- this->error.setAndNotify(QCamera::Error(error), errorString, *q);
-}
-
void QCameraPrivate::init(const QCameraDevice &device)
{
Q_Q(QCamera);
@@ -167,16 +159,16 @@ void QCameraPrivate::init(const QCameraDevice &device)
auto maybeControl = QPlatformMediaIntegration::instance()->createCamera(q);
if (!maybeControl) {
qWarning() << "Failed to initialize QCamera" << maybeControl.error();
- error = { QCamera::CameraError, maybeControl.error() };
return;
}
control = maybeControl.value();
cameraDevice = !device.isNull() ? device : QMediaDevices::defaultVideoInput();
if (cameraDevice.isNull())
- _q_error(QCamera::CameraError, QStringLiteral("No camera detected"));
+ control->updateError(QCamera::CameraError, QStringLiteral("No camera detected"));
control->setCamera(cameraDevice);
- q->connect(control, SIGNAL(activeChanged(bool)), q, SIGNAL(activeChanged(bool)));
- q->connect(control, SIGNAL(error(int,QString)), q, SLOT(_q_error(int,QString)));
+ q->connect(control, &QPlatformVideoSource::activeChanged, q, &QCamera::activeChanged);
+ q->connect(control, &QPlatformCamera::errorChanged, q, &QCamera::errorChanged);
+ q->connect(control, &QPlatformCamera::errorOccurred, q, &QCamera::errorOccurred);
}
/*!
@@ -296,7 +288,9 @@ void QCamera::setActive(bool active)
QCamera::Error QCamera::error() const
{
- return d_func()->error.code();
+ Q_D(const QCamera);
+
+ return d->control ? d->control->error() : QCamera::CameraError;
}
/*!
@@ -312,7 +306,10 @@ QCamera::Error QCamera::error() const
*/
QString QCamera::errorString() const
{
- return d_func()->error.description();
+ Q_D(const QCamera);
+
+ return d->control ? d->control->errorString()
+ : QStringLiteral("Camera is not supported on the platform");
}
/*! \enum QCamera::Feature
diff --git a/src/multimedia/camera/qcamera.h b/src/multimedia/camera/qcamera.h
index c5790e339..82d89f5a9 100644
--- a/src/multimedia/camera/qcamera.h
+++ b/src/multimedia/camera/qcamera.h
@@ -34,7 +34,7 @@ class Q_MULTIMEDIA_EXPORT QCamera : public QObject
Q_PROPERTY(QString errorString READ errorString NOTIFY errorChanged)
Q_PROPERTY(QCameraFormat cameraFormat READ cameraFormat WRITE setCameraFormat NOTIFY cameraFormatChanged)
- Q_PROPERTY(FocusMode focusMode READ focusMode WRITE setFocusMode)
+ Q_PROPERTY(FocusMode focusMode READ focusMode WRITE setFocusMode NOTIFY focusModeChanged)
Q_PROPERTY(QPointF focusPoint READ focusPoint NOTIFY focusPointChanged)
Q_PROPERTY(QPointF customFocusPoint READ customFocusPoint WRITE setCustomFocusPoint NOTIFY customFocusPointChanged)
Q_PROPERTY(float focusDistance READ focusDistance WRITE setFocusDistance NOTIFY focusDistanceChanged)
@@ -261,7 +261,6 @@ private:
friend class QMediaCaptureSession;
Q_DISABLE_COPY(QCamera)
Q_DECLARE_PRIVATE(QCamera)
- Q_PRIVATE_SLOT(d_func(), void _q_error(int, const QString &))
friend class QCameraDevice;
};
diff --git a/src/multimedia/camera/qcamera_p.h b/src/multimedia/camera/qcamera_p.h
index c0477c242..ae1299435 100644
--- a/src/multimedia/camera/qcamera_p.h
+++ b/src/multimedia/camera/qcamera_p.h
@@ -16,7 +16,6 @@
//
#include "private/qobject_p.h"
-#include "private/qerrorinfo_p.h"
#include "qcamera.h"
#include "qcameradevice.h"
@@ -34,13 +33,8 @@ public:
QMediaCaptureSession *captureSession = nullptr;
QPlatformCamera *control = nullptr;
- QErrorInfo<QCamera::Error> error;
-
QCameraDevice cameraDevice;
QCameraFormat cameraFormat;
-
- void _q_error(int error, const QString &errorString);
- void unsetError() { error = {}; }
};
QT_END_NAMESPACE
diff --git a/src/multimedia/camera/qcameradevice.cpp b/src/multimedia/camera/qcameradevice.cpp
index 50727d49c..63e7fb4c0 100644
--- a/src/multimedia/camera/qcameradevice.cpp
+++ b/src/multimedia/camera/qcameradevice.cpp
@@ -455,10 +455,12 @@ QCameraDevice& QCameraDevice::operator=(const QCameraDevice& other) = default;
#ifndef QT_NO_DEBUG_STREAM
QDebug operator<<(QDebug d, const QCameraDevice &camera)
{
- d.maybeSpace() << QStringLiteral("QCameraDevice(name=%1, position=%2, orientation=%3)")
- .arg(camera.description())
- .arg(QString::fromLatin1(QCamera::staticMetaObject.enumerator(QCamera::staticMetaObject.indexOfEnumerator("Position"))
- .valueToKey(camera.position())));
+ d.maybeSpace() << QStringLiteral("QCameraDevice(name=%1, id=%2, position=%3)")
+ .arg(camera.description())
+ .arg(QLatin1StringView(camera.id()))
+ .arg(QLatin1StringView(
+ QMetaEnum::fromType<QCameraDevice::Position>().valueToKey(
+ camera.position())));
return d.space();
}
#endif
diff --git a/src/multimedia/camera/qimagecapture.cpp b/src/multimedia/camera/qimagecapture.cpp
index 9b92ce743..df3ddae3f 100644
--- a/src/multimedia/camera/qimagecapture.cpp
+++ b/src/multimedia/camera/qimagecapture.cpp
@@ -92,18 +92,15 @@ QImageCapture::QImageCapture(QObject *parent)
}
d->control = maybeControl.value();
- connect(d->control, SIGNAL(imageExposed(int)),
- this, SIGNAL(imageExposed(int)));
- connect(d->control, SIGNAL(imageCaptured(int,QImage)),
- this, SIGNAL(imageCaptured(int,QImage)));
- connect(d->control, SIGNAL(imageMetadataAvailable(int,QMediaMetaData)),
- this, SIGNAL(imageMetadataAvailable(int,QMediaMetaData)));
- connect(d->control, SIGNAL(imageAvailable(int,QVideoFrame)),
- this, SIGNAL(imageAvailable(int,QVideoFrame)));
- connect(d->control, SIGNAL(imageSaved(int,QString)),
- this, SIGNAL(imageSaved(int,QString)));
- connect(d->control, SIGNAL(readyForCaptureChanged(bool)),
- this, SIGNAL(readyForCaptureChanged(bool)));
+ connect(d->control, &QPlatformImageCapture::imageExposed, this, &QImageCapture::imageExposed);
+ connect(d->control, &QPlatformImageCapture::imageCaptured, this, &QImageCapture::imageCaptured);
+ connect(d->control, &QPlatformImageCapture::imageMetadataAvailable, this,
+ &QImageCapture::imageMetadataAvailable);
+ connect(d->control, &QPlatformImageCapture::imageAvailable, this,
+ &QImageCapture::imageAvailable);
+ connect(d->control, &QPlatformImageCapture::imageSaved, this, &QImageCapture::imageSaved);
+ connect(d->control, &QPlatformImageCapture::readyForCaptureChanged, this,
+ &QImageCapture::readyForCaptureChanged);
connect(d->control, SIGNAL(error(int,int,QString)),
this, SLOT(_q_error(int,int,QString)));
}
diff --git a/src/multimedia/configure.cmake b/src/multimedia/configure.cmake
index 7c3092a47..5fe25f172 100644
--- a/src/multimedia/configure.cmake
+++ b/src/multimedia/configure.cmake
@@ -22,7 +22,11 @@ qt_find_package(MMRendererCore PROVIDED_TARGETS MMRendererCore::MMRendererCore M
qt_find_package(MMRenderer PROVIDED_TARGETS MMRenderer::MMRenderer MODULE_NAME multimedia QMAKE_LIB mmrndclient)
qt_find_package(WrapPulseAudio PROVIDED_TARGETS WrapPulseAudio::WrapPulseAudio MODULE_NAME multimedia QMAKE_LIB pulseaudio)
qt_find_package(WMF PROVIDED_TARGETS WMF::WMF MODULE_NAME multimedia QMAKE_LIB wmf)
-qt_find_package(EGL)
+if(TARGET EGL::EGL)
+ qt_internal_disable_find_package_global_promotion(EGL::EGL)
+endif()
+qt_find_package(EGL PROVIDED_TARGETS EGL::EGL)
+
qt_find_package(FFmpeg OPTIONAL_COMPONENTS AVCODEC AVFORMAT AVUTIL SWRESAMPLE SWSCALE PROVIDED_TARGETS FFmpeg::avcodec FFmpeg::avformat FFmpeg::avutil FFmpeg::swresample FFmpeg::swscale MODULE_NAME multimedia QMAKE_LIB ffmpeg)
qt_find_package(VAAPI COMPONENTS VA DRM PROVIDED_TARGETS VAAPI::VAAPI MODULE_NAME multimedia QMAKE_LIB vaapi)
diff --git a/src/multimedia/doc/qtmultimedia.qdocconf b/src/multimedia/doc/qtmultimedia.qdocconf
index 5a16bddec..97e6dc696 100644
--- a/src/multimedia/doc/qtmultimedia.qdocconf
+++ b/src/multimedia/doc/qtmultimedia.qdocconf
@@ -1,6 +1,5 @@
include($QT_INSTALL_DOCS/global/qt-module-defaults.qdocconf)
include($QT_INSTALL_DOCS/config/exampleurl-qtmultimedia.qdocconf)
-include(../../../examples/multimedia/video/mediaplayer/doc/qmlmediaplayer.qdocconf)
project = QtMultimedia
description = Qt Multimedia Documentation
diff --git a/src/multimedia/doc/src/qtmultimedia-building-from-source.qdoc b/src/multimedia/doc/src/qtmultimedia-building-from-source.qdoc
new file mode 100644
index 000000000..df434c699
--- /dev/null
+++ b/src/multimedia/doc/src/qtmultimedia-building-from-source.qdoc
@@ -0,0 +1,94 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GFDL-1.3-no-invariants-only
+
+/*!
+\page qtmultimedia-building-from-source.html
+\title Building Qt Multimedia from sources
+\brief This document describes how to build Qt Multimedia with full
+feature support from source code.
+
+This page describes the process of configuring and building \l{Qt
+Multimedia}. This description assumes familiarity with \l{Building Qt
+Sources} which specifies build requirements for your platform, as well
+as an overview of \l{Qt Configure Options}. For platform-specific
+considerations related to the Qt Multimedia module, see \l{Platform
+Notes} below.
+
+\section1 Building from source
+
+Building Qt Multimedia with full feature support depends on \l
+{https://ffmpeg.org/}{FFmpeg} headers and libraries on most platforms.
+It is possible to build Qt Multimedia without the Qt Multimedia FFmpeg
+media backend, but this is only recommended when building for platforms
+where the FFmpeg backend is not supported.
+
+FFmpeg developer libraries required to build Qt Multimedia can be built
+from sources or downloaded as binary packages. Qt Multimedia can use
+either static linking or dynamic linking to FFmpeg libraries. We
+recommend using the same major version of FFmpeg that is listed in
+\l{FFmpeg as the default backend}.
+
+To build Qt Multimedia with FFmpeg support, specify the \c{-DFFMPEG_DIR}
+CMake variable on the configure command line when building Qt. Note the
+\c{--} separator which separates ordinary configure arguments from CMake
+parameters.
+
+\badcode
+qt-source/configure -- -DFFMPEG_DIR=<FFMPEG_DIR>
+\endcode
+
+Here, \c{<FFMPEG_DIR>} is the directory containing the FFmpeg include,
+lib, and bin directories. To build Qt Multimedia without FFmpeg, omit
+the \c{<FFMPEG_DIR>} variable or specify the \c{-no-feature-ffmpeg}
+configure option.
+
+If you prefer not to build all Qt's submodules, you can reduce configure
+and build times using the \c{-submodules} configure option. This will
+configure a build that only builds Qt Multimedia and its dependencies.
+
+\badcode
+qt-source/configure -submodules qtmultimedia -- -DFFMPEG_DIR=<FFMPEG_DIR>
+\endcode
+
+If you configure Qt Multimedia against FFmpeg built with shared
+libraries (dynamic linking), the FFmpeg shared libraries must be in the
+module loader's search path to run tests or use examples.
+
+\note Qt Multimedia requires the FFmpeg avformat, avcodec, swresample,
+swscale, and avutil libraries during runtime to be able to use the
+FFmpeg media backend. If one or more of these dynamic libraries are not
+found during application startup, the FFmpeg media backend will fail to
+load, and the system will attempt to load the native backend. Qt
+Multimedia doesn't support as many features on native backends.
+
+If you don't already have these libraries in the \c{path}, specify the
+\c{-DQT_DEPLOY_FFMPEG=ON} configure option. With this option enabled,
+the necessary FFmpeg binaries will be copied to Qt's install directory
+during the build and install steps:
+
+\badcode
+qt-source/configure -submodules qtmultimedia -- -DFFMPEG_DIR=<FFMPEG_DIR> -DQT_DEPLOY_FFMPEG=ON
+\endcode
+
+After configuring Qt Multimedia, carefully review the configure summary
+(found in the config.summary file). You can verify that FFmpeg is found
+under the "Plugin" section. Then follow the regular build and install
+steps described in \l{Building Qt Sources}.
+
+\section1 Platform Notes
+
+\section2 Linux
+
+\list
+ \li When configuring Qt Multimedia with FFmpeg enabled, the
+ pulseaudio development package is required. Without this
+ package, FFmpeg will not be recognized.
+ \li When using a version of FFmpeg that is built with VAAPI support,
+ we recommend building Qt Multimedia with VAAPI support as well
+ to make hardware texture conversion possible. To configure Qt
+ Multimedia with VAAPI support, VAAPI developer libraries must be
+ installed on your system. Review the config.summary file to
+ verify that VAAPI support is enabled under the "Hardware
+ acceleration and features" section.
+\endlist
+*/
diff --git a/src/multimedia/doc/src/qtmultimedia-index.qdoc b/src/multimedia/doc/src/qtmultimedia-index.qdoc
index 7cde47add..74646b84c 100644
--- a/src/multimedia/doc/src/qtmultimedia-index.qdoc
+++ b/src/multimedia/doc/src/qtmultimedia-index.qdoc
@@ -48,6 +48,9 @@
target_link_libraries(my_project PRIVATE Qt6::Multimedia)
\endcode
+ See \l {Building Qt Multimedia from sources} for guidance on building
+ Qt Multimedia from sources.
+
\section1 Overviews and important topics
\list
@@ -190,18 +193,18 @@
The version shipped with Qt binary packages is \b{FFmpeg 6.1.1} and is tested
by the maintainers.
- \note On the Windows platform, Qt's FFmpeg media backend uses
- dynamic linking to the FFmpeg libraries. Windows applications must
- therefore bundle FFmpeg binaries in their installer, and make them
- visible to the application according to Windows dll loading rules.
- We recommend to store the FFmpeg dlls in the same directory as the
- application's executable file, because this guarantees that the
- correct build of FFmpeg is being used if multiple versions are
- available on the system. All necessary FFmpeg dlls are shipped with
- the Qt Online Installer and are automatically deployed if the
- windeployqt tool is used to create the deployment. Applications can
- also deploy their own build of FFmpeg, as long as the FFmpeg major
- version matches the version used by Qt.
+ \note On the Windows and macOS platforms, Qt's FFmpeg media backend
+ uses dynamic linking to the FFmpeg libraries. Windows and macOS
+ applications must therefore bundle FFmpeg binaries in their
+ installer, and make them visible to the application at runtime. On
+ Windows, we recommend to store the FFmpeg dlls in the same directory
+ as the application's executable file, because this guarantees that
+ the correct build of FFmpeg is being used if multiple versions are
+ available on the system. All necessary FFmpeg libraries are shipped
+ with the Qt Online Installer and are automatically deployed if the
+ windeployqt or macdeployqt tools are used to create the deployment.
+ Applications can also deploy their own build of FFmpeg, as long as
+ the FFmpeg major version matches the version used by Qt.
\note See \l{Licenses and Attributions} regarding what components are removed
in the package shipped by Qt.
@@ -217,6 +220,8 @@
\note These are still available but with \b limited support. The gstreamer
backend is only available on Linux.
+ \note MediaCodec on Android is deprecated as of Qt 6.8 and will be removed
+ in Qt 7.0.
\section2 Backend support
Maintainers will strive to fix critical issues with the native backends but
diff --git a/src/multimedia/platform/qgstreamer_platformspecificinterface.cpp b/src/multimedia/platform/qgstreamer_platformspecificinterface.cpp
new file mode 100644
index 000000000..06ce46e3c
--- /dev/null
+++ b/src/multimedia/platform/qgstreamer_platformspecificinterface.cpp
@@ -0,0 +1,27 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/private/qgstreamer_platformspecificinterface_p.h>
+
+QT_BEGIN_NAMESPACE
+
+QGStreamerPlatformSpecificInterface::~QGStreamerPlatformSpecificInterface() = default;
+
+QGStreamerPlatformSpecificInterface *QGStreamerPlatformSpecificInterface::instance()
+{
+ return dynamic_cast<QGStreamerPlatformSpecificInterface *>(
+ QPlatformMediaIntegration::instance()->platformSpecificInterface());
+}
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/platform/qgstreamer_platformspecificinterface_p.h b/src/multimedia/platform/qgstreamer_platformspecificinterface_p.h
new file mode 100644
index 000000000..1a086f5a4
--- /dev/null
+++ b/src/multimedia/platform/qgstreamer_platformspecificinterface_p.h
@@ -0,0 +1,46 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef GSTREAMER_PLATFORMSPECIFICINTERFACE_P_H
+#define GSTREAMER_PLATFORMSPECIFICINTERFACE_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/private/qplatformmediaintegration_p.h>
+
+typedef struct _GstPipeline GstPipeline; // NOLINT (bugprone-reserved-identifier)
+typedef struct _GstElement GstElement; // NOLINT (bugprone-reserved-identifier)
+
+QT_BEGIN_NAMESPACE
+
+class Q_MULTIMEDIA_EXPORT QGStreamerPlatformSpecificInterface
+ : public QAbstractPlatformSpecificInterface
+{
+public:
+ ~QGStreamerPlatformSpecificInterface() override;
+
+ static QGStreamerPlatformSpecificInterface *instance();
+
+ virtual QAudioDevice makeCustomGStreamerAudioInput(const QByteArray &gstreamerPipeline) = 0;
+ virtual QAudioDevice makeCustomGStreamerAudioOutput(const QByteArray &gstreamerPipeline) = 0;
+ virtual QCamera *makeCustomGStreamerCamera(const QByteArray &gstreamerPipeline,
+ QObject *parent) = 0;
+
+ // Note: ownership of GstElement is not transferred
+ virtual QCamera *makeCustomGStreamerCamera(GstElement *, QObject *parent) = 0;
+
+ virtual GstPipeline *gstPipeline(QMediaPlayer *) = 0;
+ virtual GstPipeline *gstPipeline(QMediaCaptureSession *) = 0;
+};
+
+QT_END_NAMESPACE
+
+#endif // GSTREAMER_PLATFORMSPECIFICINTERFACE_P_H
diff --git a/src/multimedia/platform/qplatformaudiobufferinput.cpp b/src/multimedia/platform/qplatformaudiobufferinput.cpp
new file mode 100644
index 000000000..883b11fc0
--- /dev/null
+++ b/src/multimedia/platform/qplatformaudiobufferinput.cpp
@@ -0,0 +1,10 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qplatformaudiobufferinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QT_END_NAMESPACE
+
+#include "moc_qplatformaudiobufferinput_p.cpp"
diff --git a/src/multimedia/platform/qplatformaudiobufferinput_p.h b/src/multimedia/platform/qplatformaudiobufferinput_p.h
new file mode 100644
index 000000000..a05a98100
--- /dev/null
+++ b/src/multimedia/platform/qplatformaudiobufferinput_p.h
@@ -0,0 +1,56 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QPLATFORMAUDIOBUFFERINPUT_P_H
+#define QPLATFORMAUDIOBUFFERINPUT_P_H
+
+#include "qaudioformat.h"
+#include "qaudiobuffer.h"
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+QT_BEGIN_NAMESPACE
+
+class QMediaInputEncoderInterface;
+
+class Q_MULTIMEDIA_EXPORT QPlatformAudioBufferInputBase : public QObject
+{
+ Q_OBJECT
+Q_SIGNALS:
+ void newAudioBuffer(const QAudioBuffer &buffer);
+};
+
+class Q_MULTIMEDIA_EXPORT QPlatformAudioBufferInput : public QPlatformAudioBufferInputBase
+{
+ Q_OBJECT
+public:
+ QPlatformAudioBufferInput(QAudioFormat format = {}) : m_format(std::move(format)) { }
+
+ const QAudioFormat &audioFormat() const { return m_format; }
+
+ QMediaInputEncoderInterface *encoderInterface() const { return m_encoderInterface; }
+ void setEncoderInterface(QMediaInputEncoderInterface *interface)
+ {
+ m_encoderInterface = interface;
+ }
+
+Q_SIGNALS:
+ void encoderUpdated();
+
+private:
+ QMediaInputEncoderInterface *m_encoderInterface = nullptr;
+ QAudioFormat m_format;
+};
+
+QT_END_NAMESPACE
+
+#endif // QPLATFORMAUDIOBUFFERINPUT_P_H
diff --git a/src/multimedia/platform/qplatformcamera.cpp b/src/multimedia/platform/qplatformcamera.cpp
index 0d3975550..d03c19d67 100644
--- a/src/multimedia/platform/qplatformcamera.cpp
+++ b/src/multimedia/platform/qplatformcamera.cpp
@@ -50,7 +50,7 @@ QVideoFrameFormat QPlatformCamera::frameFormat() const
m_framePixelFormat == QVideoFrameFormat::Format_Invalid
? m_cameraFormat.pixelFormat()
: m_framePixelFormat);
- result.setFrameRate(m_cameraFormat.maxFrameRate());
+ result.setStreamFrameRate(m_cameraFormat.maxFrameRate());
return result;
}
@@ -221,6 +221,13 @@ int QPlatformCamera::colorTemperatureForWhiteBalance(QCamera::WhiteBalanceMode m
return 0;
}
+void QPlatformCamera::updateError(QCamera::Error error, const QString &errorString)
+{
+ QMetaObject::invokeMethod(this, [this, error, errorString]() {
+ m_error.setAndNotify(error, errorString, *this);
+ });
+}
+
QT_END_NAMESPACE
#include "moc_qplatformcamera_p.cpp"
diff --git a/src/multimedia/platform/qplatformcamera_p.h b/src/multimedia/platform/qplatformcamera_p.h
index 85624c0ce..341bf9121 100644
--- a/src/multimedia/platform/qplatformcamera_p.h
+++ b/src/multimedia/platform/qplatformcamera_p.h
@@ -16,7 +16,7 @@
//
#include "qplatformvideosource_p.h"
-
+#include "private/qerrorinfo_p.h"
#include <QtMultimedia/qcamera.h>
QT_BEGIN_NAMESPACE
@@ -110,8 +110,13 @@ public:
static int colorTemperatureForWhiteBalance(QCamera::WhiteBalanceMode mode);
+ QCamera::Error error() const { return m_error.code(); }
+ QString errorString() const final { return m_error.description(); }
+
+ void updateError(QCamera::Error error, const QString &errorString);
+
Q_SIGNALS:
- void error(int error, const QString &errorString);
+ void errorOccurred(QCamera::Error error, const QString &errorString);
protected:
explicit QPlatformCamera(QCamera *parent);
@@ -150,6 +155,7 @@ private:
float m_maxExposureTime = -1.;
QCamera::WhiteBalanceMode m_whiteBalance = QCamera::WhiteBalanceAuto;
int m_colorTemperature = 0;
+ QErrorInfo<QCamera::Error> m_error;
};
QT_END_NAMESPACE
diff --git a/src/multimedia/platform/qplatformmediacapture.cpp b/src/multimedia/platform/qplatformmediacapture.cpp
index 826228764..13bcbd63b 100644
--- a/src/multimedia/platform/qplatformmediacapture.cpp
+++ b/src/multimedia/platform/qplatformmediacapture.cpp
@@ -1,12 +1,15 @@
// Copyright (C) 2021 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-#include <qtmultimediaglobal_p.h>
-#include "qplatformmediacapture_p.h"
-#include "qaudiodevice.h"
-#include "qaudioinput.h"
-#include "qplatformcamera_p.h"
-#include "qplatformsurfacecapture_p.h"
+#include <QtMultimedia/qaudiodevice.h>
+#include <QtMultimedia/qaudioinput.h>
+#include <QtMultimedia/qmediacapturesession.h>
+#include <QtMultimedia/private/qplatformcamera_p.h>
+#include <QtMultimedia/private/qplatformmediacapture_p.h>
+#include <QtMultimedia/private/qmediacapturesession_p.h>
+#include <QtMultimedia/private/qplatformsurfacecapture_p.h>
+#include <QtMultimedia/private/qplatformvideoframeinput_p.h>
+#include <QtMultimedia/private/qtmultimediaglobal_p.h>
QT_BEGIN_NAMESPACE
@@ -21,6 +24,7 @@ std::vector<QPlatformVideoSource *> QPlatformMediaCaptureSession::activeVideoSou
result.push_back(source);
};
+ checkSource(videoFrameInput());
checkSource(camera());
checkSource(screenCapture());
checkSource(windowCapture());
diff --git a/src/multimedia/platform/qplatformmediacapture_p.h b/src/multimedia/platform/qplatformmediacapture_p.h
index 814fa160c..8d6afc90e 100644
--- a/src/multimedia/platform/qplatformmediacapture_p.h
+++ b/src/multimedia/platform/qplatformmediacapture_p.h
@@ -29,6 +29,8 @@ class QPlatformAudioOutput;
class QMediaCaptureSession;
class QPlatformSurfaceCapture;
class QPlatformVideoSource;
+class QPlatformAudioBufferInput;
+class QPlatformVideoFrameInput;
class Q_MULTIMEDIA_EXPORT QPlatformMediaCaptureSession : public QObject
{
@@ -49,6 +51,9 @@ public:
virtual QPlatformSurfaceCapture *windowCapture() { return nullptr; }
virtual void setWindowCapture(QPlatformSurfaceCapture *) { }
+ virtual QPlatformVideoFrameInput *videoFrameInput() { return nullptr; }
+ virtual void setVideoFrameInput(QPlatformVideoFrameInput *) { }
+
virtual QPlatformImageCapture *imageCapture() = 0;
virtual void setImageCapture(QPlatformImageCapture *) {}
@@ -57,6 +62,8 @@ public:
virtual void setAudioInput(QPlatformAudioInput *input) = 0;
+ virtual void setAudioBufferInput(QPlatformAudioBufferInput *) { }
+
virtual void setVideoPreview(QVideoSink * /*sink*/) {}
virtual void setAudioOutput(QPlatformAudioOutput *) {}
@@ -68,6 +75,7 @@ Q_SIGNALS:
void cameraChanged();
void screenCaptureChanged();
void windowCaptureChanged();
+ void videoFrameInputChanged();
void imageCaptureChanged();
void encoderChanged();
diff --git a/src/multimedia/platform/qplatformmediaintegration.cpp b/src/multimedia/platform/qplatformmediaintegration.cpp
index c8f662963..4bacc488f 100644
--- a/src/multimedia/platform/qplatformmediaintegration.cpp
+++ b/src/multimedia/platform/qplatformmediaintegration.cpp
@@ -91,48 +91,10 @@ struct InstanceHolder
qCDebug(qLcMediaPlugin) << "Released media backend";
}
- // Play nice with QtGlobalStatic::ApplicationHolder
- using QAS_Type = InstanceHolder;
- static void innerFunction(void *pointer)
- {
- new (pointer) InstanceHolder();
- }
-
std::unique_ptr<QPlatformMediaIntegration> instance;
};
-// Specialized implementation of Q_APPLICATION_STATIC which behaves as
-// an application static if a Qt application is present, otherwise as a Q_GLOBAL_STATIC.
-// By doing this, and we have a Qt application, all system resources allocated by the
-// backend is released when application lifetime ends. This is important on Windows,
-// where Windows Media Foundation instances should not be released during static destruction.
-//
-// If we don't have a Qt application available when instantiating the instance holder,
-// it will be created once, and not destroyed until static destruction. This can cause
-// abrupt termination of Windows applications during static destruction. This is not a
-// supported use case, but we keep this as a fallback to keep old applications functional.
-// See also QTBUG-120198
-struct ApplicationHolder : QtGlobalStatic::ApplicationHolder<InstanceHolder>
-{
- // Replace QtGlobalStatic::ApplicationHolder::pointer to prevent crash if
- // no application is present
- static InstanceHolder* pointer()
- {
- if (guard.loadAcquire() == QtGlobalStatic::Initialized)
- return realPointer();
-
- QMutexLocker locker(&mutex);
- if (guard.loadRelaxed() == QtGlobalStatic::Uninitialized) {
- InstanceHolder::innerFunction(&storage);
-
- if (const QCoreApplication *app = QCoreApplication::instance())
- QObject::connect(app, &QObject::destroyed, app, reset, Qt::DirectConnection);
-
- guard.storeRelease(QtGlobalStatic::Initialized);
- }
- return realPointer();
- }
-};
+Q_APPLICATION_STATIC(InstanceHolder, s_instanceHolder);
} // namespace
@@ -140,7 +102,6 @@ QT_BEGIN_NAMESPACE
QPlatformMediaIntegration *QPlatformMediaIntegration::instance()
{
- static QGlobalStatic<ApplicationHolder> s_instanceHolder;
return s_instanceHolder->instance.get();
}
@@ -246,6 +207,12 @@ QLatin1String QPlatformMediaIntegration::name()
return m_backendName;
}
+QVideoFrame QPlatformMediaIntegration::convertVideoFrame(QVideoFrame &,
+ const QVideoFrameFormat &)
+{
+ return {};
+}
+
QPlatformMediaIntegration::QPlatformMediaIntegration(QLatin1String name) : m_backendName(name) { }
QPlatformMediaIntegration::~QPlatformMediaIntegration() = default;
diff --git a/src/multimedia/platform/qplatformmediaintegration_p.h b/src/multimedia/platform/qplatformmediaintegration_p.h
index 19fa40baf..d03d0c794 100644
--- a/src/multimedia/platform/qplatformmediaintegration_p.h
+++ b/src/multimedia/platform/qplatformmediaintegration_p.h
@@ -53,6 +53,13 @@ class QPlatformAudioOutput;
class QPlatformVideoDevices;
class QCapturableWindow;
class QPlatformCapturableWindows;
+class QVideoFrame;
+
+class Q_MULTIMEDIA_EXPORT QAbstractPlatformSpecificInterface
+{
+public:
+ virtual ~QAbstractPlatformSpecificInterface() = default;
+};
class Q_MULTIMEDIA_EXPORT QPlatformMediaIntegration : public QObject
{
@@ -96,6 +103,11 @@ public:
static QStringList availableBackends();
QLatin1String name(); // for unit tests
+ // Convert a QVideoFrame to the destination format
+ virtual QVideoFrame convertVideoFrame(QVideoFrame &, const QVideoFrameFormat &);
+
+ virtual QAbstractPlatformSpecificInterface *platformSpecificInterface() { return nullptr; }
+
protected:
virtual QPlatformMediaFormatInfo *createFormatInfo();
diff --git a/src/multimedia/platform/qplatformmediaplayer.cpp b/src/multimedia/platform/qplatformmediaplayer.cpp
index ea22f94df..00840f074 100644
--- a/src/multimedia/platform/qplatformmediaplayer.cpp
+++ b/src/multimedia/platform/qplatformmediaplayer.cpp
@@ -14,9 +14,7 @@ QPlatformMediaPlayer::QPlatformMediaPlayer(QMediaPlayer *parent) : player(parent
QPlatformMediaIntegration::instance()->mediaDevices()->prepareAudio();
}
-QPlatformMediaPlayer::~QPlatformMediaPlayer()
-{
-}
+QPlatformMediaPlayer::~QPlatformMediaPlayer() = default;
void QPlatformMediaPlayer::stateChanged(QMediaPlayer::PlaybackState newState)
{
@@ -39,16 +37,4 @@ void QPlatformMediaPlayer::error(int error, const QString &errorString)
player->d_func()->setError(QMediaPlayer::Error(error), errorString);
}
-void *QPlatformMediaPlayer::nativePipeline(QMediaPlayer *player)
-{
- if (!player)
- return nullptr;
-
- auto playerPrivate = player->d_func();
- if (!playerPrivate || !playerPrivate->control)
- return nullptr;
-
- return playerPrivate->control->nativePipeline();
-}
-
QT_END_NAMESPACE
diff --git a/src/multimedia/platform/qplatformmediaplayer_p.h b/src/multimedia/platform/qplatformmediaplayer_p.h
index 6e3590763..3b839fdd5 100644
--- a/src/multimedia/platform/qplatformmediaplayer_p.h
+++ b/src/multimedia/platform/qplatformmediaplayer_p.h
@@ -22,6 +22,7 @@
#include <QtCore/qpair.h>
#include <QtCore/private/qglobal_p.h>
+#include <QtCore/qobject.h>
QT_BEGIN_NAMESPACE
@@ -64,6 +65,8 @@ public:
virtual void setAudioOutput(QPlatformAudioOutput *) {}
+ virtual void setAudioBufferOutput(QAudioBufferOutput *) { }
+
virtual QMediaMetaData metaData() const { return {}; }
virtual void setVideoSink(QVideoSink * /*sink*/) = 0;
@@ -76,7 +79,9 @@ public:
virtual int activeTrack(TrackType) { return -1; }
virtual void setActiveTrack(TrackType, int /*streamNumber*/) {}
+ void durationChanged(std::chrono::milliseconds ms) { durationChanged(ms.count()); }
void durationChanged(qint64 duration) { emit player->durationChanged(duration); }
+ void positionChanged(std::chrono::milliseconds ms) { positionChanged(ms.count()); }
void positionChanged(qint64 position) {
if (m_position == position)
return;
@@ -124,11 +129,6 @@ public:
Q_EMIT player->loopsChanged();
}
- virtual void *nativePipeline() { return nullptr; }
-
- // private API, the purpose is getting GstPipeline
- static void *nativePipeline(QMediaPlayer *player);
-
protected:
explicit QPlatformMediaPlayer(QMediaPlayer *parent = nullptr);
@@ -144,6 +144,25 @@ private:
qint64 m_position = 0;
};
+#ifndef QT_NO_DEBUG_STREAM
+inline QDebug operator<<(QDebug dbg, QPlatformMediaPlayer::TrackType type)
+{
+ QDebugStateSaver save(dbg);
+ dbg.nospace();
+
+ switch (type) {
+ case QPlatformMediaPlayer::TrackType::AudioStream:
+ return dbg << "AudioStream";
+ case QPlatformMediaPlayer::TrackType::VideoStream:
+ return dbg << "VideoStream";
+ case QPlatformMediaPlayer::TrackType::SubtitleStream:
+ return dbg << "SubtitleStream";
+ default:
+ Q_UNREACHABLE_RETURN(dbg);
+ }
+}
+#endif
+
QT_END_NAMESPACE
diff --git a/src/multimedia/platform/qplatformmediarecorder.cpp b/src/multimedia/platform/qplatformmediarecorder.cpp
index ba9ea0165..30dba0a45 100644
--- a/src/multimedia/platform/qplatformmediarecorder.cpp
+++ b/src/multimedia/platform/qplatformmediarecorder.cpp
@@ -15,12 +15,12 @@ QPlatformMediaRecorder::QPlatformMediaRecorder(QMediaRecorder *parent)
void QPlatformMediaRecorder::pause()
{
- error(QMediaRecorder::FormatError, QMediaRecorder::tr("Pause not supported"));
+ updateError(QMediaRecorder::FormatError, QMediaRecorder::tr("Pause not supported"));
}
void QPlatformMediaRecorder::resume()
{
- error(QMediaRecorder::FormatError, QMediaRecorder::tr("Resume not supported"));
+ updateError(QMediaRecorder::FormatError, QMediaRecorder::tr("Resume not supported"));
}
void QPlatformMediaRecorder::stateChanged(QMediaRecorder::RecorderState state)
@@ -47,7 +47,7 @@ void QPlatformMediaRecorder::actualLocationChanged(const QUrl &location)
emit q->actualLocationChanged(location);
}
-void QPlatformMediaRecorder::error(QMediaRecorder::Error error, const QString &errorString)
+void QPlatformMediaRecorder::updateError(QMediaRecorder::Error error, const QString &errorString)
{
m_error.setAndNotify(error, errorString, *q);
}
diff --git a/src/multimedia/platform/qplatformmediarecorder_p.h b/src/multimedia/platform/qplatformmediarecorder_p.h
index 6e88dc187..dea45ac70 100644
--- a/src/multimedia/platform/qplatformmediarecorder_p.h
+++ b/src/multimedia/platform/qplatformmediarecorder_p.h
@@ -125,7 +125,7 @@ public:
virtual void setOutputLocation(const QUrl &location) { m_outputLocation = location; }
QUrl actualLocation() const { return m_actualLocation; }
void clearActualLocation() { m_actualLocation.clear(); }
- void clearError() { error(QMediaRecorder::NoError, QString()); }
+ void clearError() { updateError(QMediaRecorder::NoError, QString()); }
QIODevice *outputDevice() const { return m_outputDevice; }
void setOutputDevice(QIODevice *device) { m_outputDevice = device; }
@@ -136,7 +136,7 @@ protected:
void stateChanged(QMediaRecorder::RecorderState state);
void durationChanged(qint64 position);
void actualLocationChanged(const QUrl &location);
- void error(QMediaRecorder::Error error, const QString &errorString);
+ void updateError(QMediaRecorder::Error error, const QString &errorString);
void metaDataChanged();
QMediaRecorder *mediaRecorder() { return q; }
diff --git a/src/multimedia/platform/qplatformsurfacecapture_p.h b/src/multimedia/platform/qplatformsurfacecapture_p.h
index 42fbda474..e4c59c6f4 100644
--- a/src/multimedia/platform/qplatformsurfacecapture_p.h
+++ b/src/multimedia/platform/qplatformsurfacecapture_p.h
@@ -61,7 +61,7 @@ public:
Source source() const { return m_source; }
Error error() const;
- QString errorString() const;
+ QString errorString() const final;
protected:
virtual bool setActiveInternal(bool) = 0;
@@ -74,7 +74,6 @@ public Q_SLOTS:
Q_SIGNALS:
void sourceChanged(WindowSource);
void sourceChanged(ScreenSource);
- void errorChanged();
void errorOccurred(Error error, QString errorString);
private:
diff --git a/src/multimedia/platform/qplatformvideoframeinput.cpp b/src/multimedia/platform/qplatformvideoframeinput.cpp
new file mode 100644
index 000000000..d90306345
--- /dev/null
+++ b/src/multimedia/platform/qplatformvideoframeinput.cpp
@@ -0,0 +1,10 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qplatformvideoframeinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QT_END_NAMESPACE
+
+#include "moc_qplatformvideoframeinput_p.cpp"
diff --git a/src/multimedia/platform/qplatformvideoframeinput_p.h b/src/multimedia/platform/qplatformvideoframeinput_p.h
new file mode 100644
index 000000000..45714492c
--- /dev/null
+++ b/src/multimedia/platform/qplatformvideoframeinput_p.h
@@ -0,0 +1,55 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QPLATFORMVIDEOFRAMEINPUT_P_H
+#define QPLATFORMVIDEOFRAMEINPUT_P_H
+
+#include "qplatformvideosource_p.h"
+#include "qmetaobject.h"
+#include "qpointer.h"
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+QT_BEGIN_NAMESPACE
+
+class QMediaInputEncoderInterface;
+
+class Q_MULTIMEDIA_EXPORT QPlatformVideoFrameInput : public QPlatformVideoSource
+{
+ Q_OBJECT
+public:
+ QPlatformVideoFrameInput(QVideoFrameFormat format = {}) : m_format(std::move(format)) { }
+
+ void setActive(bool) final { }
+ bool isActive() const final { return true; }
+
+ QVideoFrameFormat frameFormat() const final { return m_format; }
+
+ QString errorString() const final { return {}; }
+
+ QMediaInputEncoderInterface *encoderInterface() const { return m_encoderInterface; }
+ void setEncoderInterface(QMediaInputEncoderInterface *interface)
+ {
+ m_encoderInterface = interface;
+ }
+
+Q_SIGNALS:
+ void encoderUpdated();
+
+private:
+ QMediaInputEncoderInterface *m_encoderInterface = nullptr;
+ QVideoFrameFormat m_format;
+};
+
+QT_END_NAMESPACE
+
+#endif // QPLATFORMVIDEOFRAMEINPUT_P_H
diff --git a/src/multimedia/platform/qplatformvideosource_p.h b/src/multimedia/platform/qplatformvideosource_p.h
index 3ed76d3e2..b11524226 100644
--- a/src/multimedia/platform/qplatformvideosource_p.h
+++ b/src/multimedia/platform/qplatformvideosource_p.h
@@ -43,9 +43,14 @@ public:
virtual void setCaptureSession(QPlatformMediaCaptureSession *) { }
+ virtual QString errorString() const = 0;
+
+ bool hasError() const { return !errorString().isEmpty(); }
+
Q_SIGNALS:
void newVideoFrame(const QVideoFrame &);
void activeChanged(bool);
+ void errorChanged();
};
QT_END_NAMESPACE
diff --git a/src/multimedia/playback/qmediaplayer.cpp b/src/multimedia/playback/qmediaplayer.cpp
index dc8e3dab8..644c2d094 100644
--- a/src/multimedia/playback/qmediaplayer.cpp
+++ b/src/multimedia/playback/qmediaplayer.cpp
@@ -5,6 +5,7 @@
#include <private/qmultimediautils_p.h>
#include <private/qplatformmediaintegration_p.h>
+#include <private/qaudiobufferoutput_p.h>
#include <qvideosink.h>
#include <qaudiooutput.h>
@@ -596,6 +597,12 @@ void QMediaPlayer::setPlaybackRate(qreal rate)
It does not wait for the media to finish loading and does not check for errors. Listen for
the mediaStatusChanged() and error() signals to be notified when the media is loaded and
when an error occurs during loading.
+
+ \note FFmpeg, used by the FFmpeg media backend, restricts use of nested protocols for
+ security reasons. In controlled environments where all inputs are trusted, the list of
+ approved protocols can be overridden using the QT_FFMPEG_PROTOCOL_WHITELIST environment
+ variable. This environment variable is Qt's private API and can change between patch
+ releases without notice.
*/
void QMediaPlayer::setSource(const QUrl &source)
@@ -643,6 +650,51 @@ void QMediaPlayer::setSourceDevice(QIODevice *device, const QUrl &sourceUrl)
}
/*!
+ Sets an audio buffer \a output to the media player.
+
+ If \l QAudioBufferOutput is specified and the media source
+ contains an audio stream, the media player, it will emit
+ the signal \l{QAudioBufferOutput::audioBufferReceived} with
+ audio buffers containing decoded audio data. At the end of
+ the audio stream, \c QMediaPlayer emits an empty \l QAudioBuffer.
+
+ \c QMediaPlayer emits outputs frames at the same time as it
+ pushes the matching data to the audio output if it's specified.
+ However, the sound can be played with a small delay due to
+ audio bufferization.
+*/
+void QMediaPlayer::setAudioBufferOutput(QAudioBufferOutput *output)
+{
+ Q_D(QMediaPlayer);
+
+ QAudioBufferOutput *oldOutput = d->audioBufferOutput;
+ if (oldOutput == output)
+ return;
+
+ d->audioBufferOutput = output;
+
+ if (output) {
+ auto oldPlayer = QAudioBufferOutputPrivate::exchangeMediaPlayer(*oldOutput, this);
+ if (oldPlayer)
+ oldPlayer->setAudioBufferOutput(nullptr);
+ }
+
+ if (d->control)
+ d->control->setAudioBufferOutput(output);
+
+ emit audioBufferOutputChanged();
+}
+
+/*!
+ Get \l QAudioBufferOutput that has been set to the media player.
+*/
+QAudioBufferOutput *QMediaPlayer::audioBufferOutput() const
+{
+ Q_D(const QMediaPlayer);
+ return d->audioBufferOutput;
+}
+
+/*!
\qmlproperty AudioOutput QtMultimedia::MediaPlayer::audioOutput
This property holds the target audio output.
diff --git a/src/multimedia/playback/qmediaplayer.h b/src/multimedia/playback/qmediaplayer.h
index 015a30f05..e0d1fec75 100644
--- a/src/multimedia/playback/qmediaplayer.h
+++ b/src/multimedia/playback/qmediaplayer.h
@@ -17,6 +17,7 @@ class QAudioOutput;
class QAudioDevice;
class QMediaMetaData;
class QMediaTimeRange;
+class QAudioBufferOutput;
class QMediaPlayerPrivate;
class Q_MULTIMEDIA_EXPORT QMediaPlayer : public QObject
@@ -106,6 +107,9 @@ public:
void setActiveVideoTrack(int index);
void setActiveSubtitleTrack(int index);
+ void setAudioBufferOutput(QAudioBufferOutput *output);
+ QAudioBufferOutput *audioBufferOutput() const;
+
void setAudioOutput(QAudioOutput *output);
QAudioOutput *audioOutput() const;
@@ -177,6 +181,7 @@ Q_SIGNALS:
void metaDataChanged();
void videoOutputChanged();
void audioOutputChanged();
+ void audioBufferOutputChanged();
void tracksChanged();
void activeTracksChanged();
diff --git a/src/multimedia/playback/qmediaplayer_p.h b/src/multimedia/playback/qmediaplayer_p.h
index ece086d06..3d32d4e68 100644
--- a/src/multimedia/playback/qmediaplayer_p.h
+++ b/src/multimedia/playback/qmediaplayer_p.h
@@ -19,6 +19,7 @@
#include "qmediametadata.h"
#include "qvideosink.h"
#include "qaudiooutput.h"
+#include "qaudiobufferoutput.h"
#include <private/qplatformmediaplayer_p.h>
#include <private/qerrorinfo_p.h>
@@ -40,9 +41,15 @@ class QMediaPlayerPrivate : public QObjectPrivate
Q_DECLARE_PUBLIC(QMediaPlayer)
public:
+ static QMediaPlayerPrivate *get(QMediaPlayer *session)
+ {
+ return reinterpret_cast<QMediaPlayerPrivate *>(QObjectPrivate::get(session));
+ }
+
QMediaPlayerPrivate() = default;
QPlatformMediaPlayer *control = nullptr;
+ QPointer<QAudioBufferOutput> audioBufferOutput;
QPointer<QAudioOutput> audioOutput;
QPointer<QVideoSink> videoSink;
QPointer<QObject> videoOutput;
diff --git a/src/multimedia/pulseaudio/qaudioengine_pulse.cpp b/src/multimedia/pulseaudio/qaudioengine_pulse.cpp
index e54356404..5fac7234a 100644
--- a/src/multimedia/pulseaudio/qaudioengine_pulse.cpp
+++ b/src/multimedia/pulseaudio/qaudioengine_pulse.cpp
@@ -477,7 +477,7 @@ void QPulseAudioEngine::onContextFailed()
release();
// Try to reconnect later
- QTimer::singleShot(3000, this, SLOT(prepare()));
+ QTimer::singleShot(3000, this, &QPulseAudioEngine::prepare);
}
QPulseAudioEngine *QPulseAudioEngine::instance()
diff --git a/src/multimedia/qmediaframeinput.cpp b/src/multimedia/qmediaframeinput.cpp
new file mode 100644
index 000000000..4bb90d3ee
--- /dev/null
+++ b/src/multimedia/qmediaframeinput.cpp
@@ -0,0 +1,43 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qmediaframeinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+void QMediaFrameInputPrivate::setCaptureSession(QMediaCaptureSession *session)
+{
+ if (session == m_captureSession)
+ return;
+
+ auto prevSession = std::exchange(m_captureSession, session);
+ updateCaptureSessionConnections(prevSession, session);
+ updateCanSendMediaFrame();
+}
+
+void QMediaFrameInputPrivate::updateCanSendMediaFrame()
+{
+ const bool canSendMediaFrame = m_captureSession && checkIfCanSendMediaFrame();
+ if (m_canSendMediaFrame != canSendMediaFrame) {
+ m_canSendMediaFrame = canSendMediaFrame;
+ if (m_canSendMediaFrame)
+ emitReadyToSendMediaFrame();
+ }
+}
+
+void QMediaFrameInputPrivate::postponeCheckReadyToSend()
+{
+ if (m_canSendMediaFrame && !m_postponeReadyToSendCheckRun) {
+ m_postponeReadyToSendCheckRun = true;
+ QMetaObject::invokeMethod(
+ q_ptr,
+ [this]() {
+ m_postponeReadyToSendCheckRun = false;
+ if (m_canSendMediaFrame)
+ emitReadyToSendMediaFrame();
+ },
+ Qt::QueuedConnection);
+ }
+}
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/qmediaframeinput_p.h b/src/multimedia/qmediaframeinput_p.h
new file mode 100644
index 000000000..22277865d
--- /dev/null
+++ b/src/multimedia/qmediaframeinput_p.h
@@ -0,0 +1,74 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QMEDIAFRAMEINPUT_P_H
+#define QMEDIAFRAMEINPUT_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qmediacapturesession.h"
+#include <QtCore/private/qobject_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QMediaFrameInputPrivate : public QObjectPrivate
+{
+public:
+ void setCaptureSession(QMediaCaptureSession *session);
+
+ QMediaCaptureSession *captureSession() const { return m_captureSession; }
+
+protected:
+ template <typename Sender>
+ bool sendMediaFrame(Sender &&sender)
+ {
+ if (!m_canSendMediaFrame)
+ return false;
+
+ sender();
+ postponeCheckReadyToSend();
+ return true;
+ }
+
+ template <typename Sender, typename Signal>
+ void addUpdateSignal(Sender sender, Signal signal)
+ {
+ connect(sender, signal, this, &QMediaFrameInputPrivate::updateCanSendMediaFrame);
+ }
+
+ template <typename Sender, typename Signal>
+ void removeUpdateSignal(Sender sender, Signal signal)
+ {
+ disconnect(sender, signal, this, &QMediaFrameInputPrivate::updateCanSendMediaFrame);
+ }
+
+ void updateCanSendMediaFrame();
+
+private:
+ void postponeCheckReadyToSend();
+
+ virtual bool checkIfCanSendMediaFrame() const = 0;
+
+ virtual void emitReadyToSendMediaFrame() = 0;
+
+ virtual void updateCaptureSessionConnections(QMediaCaptureSession *prevSession,
+ QMediaCaptureSession *currentSession) = 0;
+
+private:
+ QMediaCaptureSession *m_captureSession = nullptr;
+ bool m_canSendMediaFrame = false;
+ bool m_postponeReadyToSendCheckRun = false;
+};
+
+QT_END_NAMESPACE
+
+#endif // QMEDIAFRAMEINPUT_P_H
diff --git a/src/multimedia/qmediainputencoderinterface_p.h b/src/multimedia/qmediainputencoderinterface_p.h
new file mode 100644
index 000000000..c199e59b4
--- /dev/null
+++ b/src/multimedia/qmediainputencoderinterface_p.h
@@ -0,0 +1,31 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QMEDIAINPUTENCODERINTERFACE_P_H
+#define QMEDIAINPUTENCODERINTERFACE_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/qtmultimediaglobal.h>
+
+QT_BEGIN_NAMESPACE
+
+class QMediaInputEncoderInterface
+{
+public:
+ virtual ~QMediaInputEncoderInterface() = default;
+ virtual bool canPushFrame() const = 0;
+};
+
+QT_END_NAMESPACE
+
+#endif // QMEDIAINPUTENCODERINTERFACE_P_H
diff --git a/src/multimedia/qmediametadata.cpp b/src/multimedia/qmediametadata.cpp
index dc238721f..afbaea5b7 100644
--- a/src/multimedia/qmediametadata.cpp
+++ b/src/multimedia/qmediametadata.cpp
@@ -2,14 +2,15 @@
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include "qmediametadata.h"
+
#include <QtCore/qcoreapplication.h>
-#include <qvariant.h>
-#include <qobject.h>
-#include <qdatetime.h>
-#include <qmediaformat.h>
-#include <qsize.h>
-#include <qurl.h>
-#include <qimage.h>
+#include <QtCore/qdatetime.h>
+#include <QtCore/qobject.h>
+#include <QtCore/qsize.h>
+#include <QtCore/qurl.h>
+#include <QtCore/qvariant.h>
+#include <QtGui/qimage.h>
+#include <QtMultimedia/qmediaformat.h>
QT_BEGIN_NAMESPACE
@@ -41,7 +42,7 @@ QT_BEGIN_NAMESPACE
Media attributes
\row \li MediaType \li The type of the media (audio, video, etc). \li QString
\row \li FileFormat \li The file format of the media. \li QMediaFormat::FileFormat
- \row \li Duration \li The duration in millseconds of the media. \li qint64
+ \row \li Duration \li The duration in milliseconds of the media. \li qint64
\header \li {3,1}
Audio attributes
@@ -53,6 +54,7 @@ QT_BEGIN_NAMESPACE
\row \li VideoFrameRate \li The frame rate of the media's video stream. \li qreal
\row \li VideoBitRate \li The bit rate of the media's video stream in bits per second. \li int
\row \li VideoCodec \li The codec of the media's video stream. \li QMediaFormat::VideoCodec
+ \row \li HasHdrContent \li True if video is intended for HDR display (FFmpeg media backend only). \li bool
\header \li {3,1}
Music attributes
@@ -129,6 +131,10 @@ QMetaType QMediaMetaData::keyType(Key key)
case Resolution:
return QMetaType::fromType<QSize>();
+
+ case HasHdrContent:
+ return QMetaType::fromType<bool>();
+
default:
return QMetaType::fromType<void>();
}
@@ -276,6 +282,7 @@ QMetaType QMediaMetaData::keyType(Key key)
\value CoverArtImage Media cover art
\value Orientation
\value Resolution
+ \value [since 6.8] HasHdrContent Video may have HDR content (read only, FFmpeg media backend only)
*/
/*!
@@ -385,6 +392,7 @@ QString QMediaMetaData::stringValue(QMediaMetaData::Key key) const
case Composer:
case Orientation:
case LeadPerformer:
+ case HasHdrContent:
return value.toString();
case Language: {
auto l = value.value<QLocale::Language>();
@@ -479,6 +487,8 @@ QString QMediaMetaData::metaDataKeyToString(QMediaMetaData::Key key)
return (QCoreApplication::translate("QMediaMetaData", "Resolution"));
case QMediaMetaData::LeadPerformer:
return (QCoreApplication::translate("QMediaMetaData", "Lead performer"));
+ case QMediaMetaData::HasHdrContent:
+ return (QCoreApplication::translate("QMediaMetaData", "Has HDR content"));
}
return QString();
}
diff --git a/src/multimedia/qmediametadata.h b/src/multimedia/qmediametadata.h
index d6f4477d3..0ff03dda2 100644
--- a/src/multimedia/qmediametadata.h
+++ b/src/multimedia/qmediametadata.h
@@ -57,11 +57,13 @@ public:
CoverArtImage,
Orientation,
- Resolution
+ Resolution,
+
+ HasHdrContent
};
Q_ENUM(Key)
- static constexpr int NumMetaData = Resolution + 1;
+ static constexpr int NumMetaData = HasHdrContent + 1;
// QMetaType typeForKey(Key k);
Q_INVOKABLE QVariant value(Key k) const { return data.value(k); }
diff --git a/src/multimedia/qsymbolsresolveutils.cpp b/src/multimedia/qsymbolsresolveutils.cpp
new file mode 100644
index 000000000..8441ac243
--- /dev/null
+++ b/src/multimedia/qsymbolsresolveutils.cpp
@@ -0,0 +1,79 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qsymbolsresolveutils_p.h"
+
+#include <qdebug.h>
+#include <algorithm>
+#include <qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcSymbolsResolver, "qt.multimedia.symbolsresolver");
+
+bool SymbolsResolver::isLazyLoadEnabled()
+{
+ static const bool lazyLoad =
+ !static_cast<bool>(qEnvironmentVariableIntValue("QT_INSTANT_LOAD_FFMPEG_STUBS"));
+ return lazyLoad;
+}
+
+SymbolsResolver::SymbolsResolver(const char *libLoggingName, LibraryLoader loader)
+ : m_libLoggingName(libLoggingName)
+{
+ Q_ASSERT(libLoggingName);
+ Q_ASSERT(loader);
+
+ auto library = loader();
+ if (library && library->isLoaded())
+ m_library = std::move(library);
+ else
+ qCWarning(qLcSymbolsResolver) << "Couldn't load" << m_libLoggingName << "library";
+}
+
+SymbolsResolver::SymbolsResolver(const char *libName, const char *version,
+ const char *libLoggingName)
+ : m_libLoggingName(libLoggingName ? libLoggingName : libName)
+{
+ Q_ASSERT(libName);
+ Q_ASSERT(version);
+
+ auto library = std::make_unique<QLibrary>(QString::fromLocal8Bit(libName),
+ QString::fromLocal8Bit(version));
+ if (library->load())
+ m_library = std::move(library);
+ else
+ qCWarning(qLcSymbolsResolver) << "Couldn't load" << m_libLoggingName << "library";
+}
+
+SymbolsResolver::~SymbolsResolver()
+{
+ if (m_library)
+ m_library->unload();
+}
+
+QFunctionPointer SymbolsResolver::initFunction(const char *funcName)
+{
+ if (!m_library)
+ return nullptr;
+ if (auto func = m_library->resolve(funcName))
+ return func;
+
+ qCWarning(qLcSymbolsResolver) << "Couldn't resolve" << m_libLoggingName << "symbol" << funcName;
+ m_library->unload();
+ m_library.reset();
+ return nullptr;
+}
+
+void SymbolsResolver::checkLibrariesLoaded(SymbolsMarker *begin, SymbolsMarker *end)
+{
+ if (m_library) {
+ qCDebug(qLcSymbolsResolver) << m_libLoggingName << "symbols resolved";
+ } else {
+ const auto size = reinterpret_cast<char *>(end) - reinterpret_cast<char *>(begin);
+ memset(begin, 0, size);
+ qCWarning(qLcSymbolsResolver) << "Couldn't resolve" << m_libLoggingName << "symbols";
+ }
+}
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/qsymbolsresolveutils_p.h b/src/multimedia/qsymbolsresolveutils_p.h
new file mode 100644
index 000000000..98a552170
--- /dev/null
+++ b/src/multimedia/qsymbolsresolveutils_p.h
@@ -0,0 +1,178 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef Q_SYMBOLSRESOLVEUTILS
+#define Q_SYMBOLSRESOLVEUTILS
+
+#include <QtCore/qlibrary.h>
+#include <QtMultimedia/qtmultimediaexports.h>
+#include <tuple>
+#include <memory>
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+QT_BEGIN_NAMESPACE
+
+constexpr bool areVersionsEqual(const char lhs[], const char rhs[])
+{
+ int i = 0;
+ for (; lhs[i] && rhs[i]; ++i)
+ if (lhs[i] != rhs[i])
+ return false;
+ return lhs[i] == rhs[i];
+}
+
+constexpr bool areVersionsEqual(const char lhs[], int rhsInt)
+{
+ int lhsInt = 0;
+ for (int i = 0; lhs[i]; ++i) {
+ if (lhs[i] < '0' || lhs[i] > '9')
+ return false;
+
+ lhsInt *= 10;
+ lhsInt += lhs[i] - '0';
+ }
+
+ return lhsInt == rhsInt;
+}
+
+
+template <typename T>
+struct DefaultReturn
+{
+ template <typename... Arg>
+ T operator()(Arg &&...) { return val; }
+ T val;
+};
+
+template <>
+struct DefaultReturn<void>
+{
+ template <typename... Arg>
+ void operator()(Arg &&...) { }
+};
+
+template <typename...>
+struct FuncInfo;
+
+template <typename R, typename... A>
+struct FuncInfo<R(A...)>
+{
+ using Return = R;
+ using Args = std::tuple<A...>;
+};
+
+class Q_MULTIMEDIA_EXPORT SymbolsResolver
+{
+public:
+ using LibraryLoader = std::unique_ptr<QLibrary> (*)();
+ static bool isLazyLoadEnabled();
+
+ ~SymbolsResolver();
+protected:
+ SymbolsResolver(const char *libLoggingName, LibraryLoader loader);
+
+ SymbolsResolver(const char *libName, const char *version = "",
+ const char *libLoggingName = nullptr);
+
+ QFunctionPointer initFunction(const char *name);
+
+ struct SymbolsMarker {};
+ void checkLibrariesLoaded(SymbolsMarker *begin, SymbolsMarker *end);
+
+private:
+ const char *m_libLoggingName;
+ std::unique_ptr<QLibrary> m_library;
+};
+
+
+QT_END_NAMESPACE
+
+// clang-format off
+
+#define CHECK_VERSIONS(Name, NeededSoversion, DetectedVersion) \
+ static_assert(areVersionsEqual(NeededSoversion, DetectedVersion), \
+ "Configuartion error: misleading " Name " versions!")
+
+#define BEGIN_INIT_FUNCS(...) \
+ QT_USE_NAMESPACE \
+ namespace { \
+ class SymbolsResolverImpl : SymbolsResolver { \
+ public: \
+ SymbolsResolverImpl() : SymbolsResolver(__VA_ARGS__) \
+ { checkLibrariesLoaded(&symbolsBegin, &symbolsEnd); } \
+ static const SymbolsResolverImpl& instance() \
+ { static const SymbolsResolverImpl instance; return instance; } \
+ SymbolsMarker symbolsBegin;
+
+#define INIT_FUNC(F) QFunctionPointer F = initFunction(#F);
+
+#define END_INIT_FUNCS() \
+ SymbolsMarker symbolsEnd; \
+ }; \
+ [[maybe_unused]] static const auto *instantResolver = \
+ SymbolsResolver::isLazyLoadEnabled() ? &SymbolsResolverImpl::instance() : nullptr; \
+ }
+
+
+#ifdef Q_EXPORT_STUB_SYMBOLS
+#define EXPORT_FUNC Q_MULTIMEDIA_EXPORT
+#else
+#define EXPORT_FUNC
+#endif
+
+#define DEFINE_FUNC_IMPL(F, Vars, TypesWithVars, ReturnFunc) \
+ using F##_ReturnType = FuncInfo<decltype(F)>::Return; \
+ extern "C" EXPORT_FUNC [[maybe_unused]] F##_ReturnType F(TypesWithVars(F)) { \
+ using F##_Type = F##_ReturnType (*)(TypesWithVars(F)); \
+ const auto f = SymbolsResolverImpl::instance().F; \
+ return f ? (reinterpret_cast<F##_Type>(f))(Vars()) : ReturnFunc(); \
+ }
+
+
+#define VAR(I) a##I
+#define VARS0()
+#define VARS1() VAR(0)
+#define VARS2() VARS1(), VAR(1)
+#define VARS3() VARS2(), VAR(2)
+#define VARS4() VARS3(), VAR(3)
+#define VARS5() VARS4(), VAR(4)
+#define VARS6() VARS5(), VAR(5)
+#define VARS7() VARS6(), VAR(6)
+#define VARS8() VARS7(), VAR(7)
+#define VARS9() VARS8(), VAR(8)
+#define VARS10() VARS9(), VAR(9)
+#define VARS11() VARS10(), VAR(10)
+
+#define TYPE_WITH_VAR(F, I) std::tuple_element_t<I, FuncInfo<decltype(F)>::Args> VAR(I)
+#define TYPES_WITH_VARS0(F)
+#define TYPES_WITH_VARS1(F) TYPE_WITH_VAR(F, 0)
+#define TYPES_WITH_VARS2(F) TYPES_WITH_VARS1(F), TYPE_WITH_VAR(F, 1)
+#define TYPES_WITH_VARS3(F) TYPES_WITH_VARS2(F), TYPE_WITH_VAR(F, 2)
+#define TYPES_WITH_VARS4(F) TYPES_WITH_VARS3(F), TYPE_WITH_VAR(F, 3)
+#define TYPES_WITH_VARS5(F) TYPES_WITH_VARS4(F), TYPE_WITH_VAR(F, 4)
+#define TYPES_WITH_VARS6(F) TYPES_WITH_VARS5(F), TYPE_WITH_VAR(F, 5)
+#define TYPES_WITH_VARS7(F) TYPES_WITH_VARS6(F), TYPE_WITH_VAR(F, 6)
+#define TYPES_WITH_VARS8(F) TYPES_WITH_VARS7(F), TYPE_WITH_VAR(F, 7)
+#define TYPES_WITH_VARS9(F) TYPES_WITH_VARS8(F), TYPE_WITH_VAR(F, 8)
+#define TYPES_WITH_VARS10(F) TYPES_WITH_VARS9(F), TYPE_WITH_VAR(F, 9)
+#define TYPES_WITH_VARS11(F) TYPES_WITH_VARS10(F), TYPE_WITH_VAR(F, 10)
+
+
+#define RET(F, ...) DefaultReturn<FuncInfo<decltype(F)>::Return>{__VA_ARGS__}
+
+#define DEFINE_FUNC(F, ArgsCount, /*Return value*/...) \
+ DEFINE_FUNC_IMPL(F, VARS##ArgsCount, TYPES_WITH_VARS##ArgsCount, RET(F, __VA_ARGS__));
+
+// clang-format on
+
+#endif // Q_SYMBOLSRESOLVEUTILS
diff --git a/src/multimedia/recording/qmediacapturesession.cpp b/src/multimedia/recording/qmediacapturesession.cpp
index 0ff804bf4..9df09acef 100644
--- a/src/multimedia/recording/qmediacapturesession.cpp
+++ b/src/multimedia/recording/qmediacapturesession.cpp
@@ -2,6 +2,7 @@
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include "qmediacapturesession.h"
+#include "qmediacapturesession_p.h"
#include "qaudiodevice.h"
#include "qcamera.h"
#include "qmediarecorder.h"
@@ -9,45 +10,31 @@
#include "qvideosink.h"
#include "qscreencapture.h"
#include "qwindowcapture.h"
-
-#include <qpointer.h>
+#include "qvideoframeinput.h"
#include "qplatformmediaintegration_p.h"
#include "qplatformmediacapture_p.h"
#include "qaudioinput.h"
+#include "qaudiobufferinput.h"
#include "qaudiooutput.h"
QT_BEGIN_NAMESPACE
-class QMediaCaptureSessionPrivate
+void QMediaCaptureSessionPrivate::setVideoSink(QVideoSink *sink)
{
-public:
- QMediaCaptureSession *q = nullptr;
- QPlatformMediaCaptureSession *captureSession = nullptr;
- QAudioInput *audioInput = nullptr;
- QAudioOutput *audioOutput = nullptr;
- QPointer<QCamera> camera;
- QPointer<QScreenCapture> screenCapture;
- QPointer<QWindowCapture> windowCapture;
- QPointer<QImageCapture> imageCapture;
- QPointer<QMediaRecorder> recorder;
- QPointer<QVideoSink> videoSink;
- QPointer<QObject> videoOutput;
-
- void setVideoSink(QVideoSink *sink)
- {
- if (sink == videoSink)
- return;
- if (videoSink)
- videoSink->setSource(nullptr);
- videoSink = sink;
- if (sink)
- sink->setSource(q);
- if (captureSession)
- captureSession->setVideoPreview(sink);
- emit q->videoOutputChanged();
- }
-};
+ Q_Q(QMediaCaptureSession);
+
+ if (sink == videoSink)
+ return;
+ if (videoSink)
+ videoSink->setSource(nullptr);
+ videoSink = sink;
+ if (sink)
+ sink->setSource(q);
+ if (captureSession)
+ captureSession->setVideoPreview(sink);
+ emit q->videoOutputChanged();
+}
/*!
\class QMediaCaptureSession
@@ -58,18 +45,23 @@ public:
\ingroup multimedia_video
\ingroup multimedia_audio
- The QMediaCaptureSession is the central class that manages capturing of media on the local device.
+ The QMediaCaptureSession is the central class that manages capturing of media on the local
+ device.
- You can connect a video input to QMediaCaptureSession using setCamera(), setScreenCapture() or setWindowCapture().
- A preview of the captured media can be seen by setting a QVideoWidget or QGraphicsVideoItem using setVideoOutput().
+ You can connect a video input to QMediaCaptureSession using setCamera(),
+ setScreenCapture(), setWindowCapture() or setVideoFrameInput().
+ A preview of the captured media can be seen by setting a QVideoWidget or QGraphicsVideoItem
+ using setVideoOutput().
- You can connect a microphone to QMediaCaptureSession using setAudioInput().
+ You can connect a microphone to QMediaCaptureSession using setAudioInput(), or set your
+ custom audio input using setAudioBufferInput().
The captured sound can be heard by routing the audio to an output device using setAudioOutput().
- You can capture still images from a camera by setting a QImageCapture object on the capture session,
- and record audio/video using a QMediaRecorder.
+ You can capture still images from a camera by setting a QImageCapture object on the capture
+ session, and record audio/video using a QMediaRecorder.
- \sa QCamera, QAudioDevice, QMediaRecorder, QImageCapture, QScreenCapture, QWindowCapture, QMediaRecorder, QGraphicsVideoItem
+ \sa QCamera, QAudioDevice, QMediaRecorder, QImageCapture, QScreenCapture, QWindowCapture,
+ QVideoFrameInput, QMediaRecorder, QGraphicsVideoItem
*/
/*!
@@ -129,14 +121,16 @@ public:
Creates a session for media capture from the \a parent object.
*/
QMediaCaptureSession::QMediaCaptureSession(QObject *parent)
- : QObject(parent),
- d_ptr(new QMediaCaptureSessionPrivate)
+ : QObject{ *new QMediaCaptureSessionPrivate, parent }
{
- d_ptr->q = this;
+ QT6_ONLY(Q_UNUSED(unused))
+
+ Q_D(QMediaCaptureSession);
+
auto maybeCaptureSession = QPlatformMediaIntegration::instance()->createCaptureSession();
if (maybeCaptureSession) {
- d_ptr->captureSession = maybeCaptureSession.value();
- d_ptr->captureSession->setCaptureSession(this);
+ d->captureSession.reset(maybeCaptureSession.value());
+ d->captureSession->setCaptureSession(this);
} else {
qWarning() << "Failed to initialize QMediaCaptureSession" << maybeCaptureSession.error();
}
@@ -147,16 +141,19 @@ QMediaCaptureSession::QMediaCaptureSession(QObject *parent)
*/
QMediaCaptureSession::~QMediaCaptureSession()
{
+ Q_D(QMediaCaptureSession);
+
setCamera(nullptr);
setRecorder(nullptr);
setImageCapture(nullptr);
setScreenCapture(nullptr);
setWindowCapture(nullptr);
+ setVideoFrameInput(nullptr);
+ setAudioBufferInput(nullptr);
setAudioInput(nullptr);
setAudioOutput(nullptr);
- d_ptr->setVideoSink(nullptr);
- delete d_ptr->captureSession;
- delete d_ptr;
+ d->setVideoSink(nullptr);
+ d->captureSession.reset();
}
/*!
\qmlproperty AudioInput QtMultimedia::CaptureSession::audioInput
@@ -171,7 +168,8 @@ QMediaCaptureSession::~QMediaCaptureSession()
*/
QAudioInput *QMediaCaptureSession::audioInput() const
{
- return d_ptr->audioInput;
+ Q_D(const QMediaCaptureSession);
+ return d->audioInput;
}
/*!
@@ -181,28 +179,69 @@ QAudioInput *QMediaCaptureSession::audioInput() const
*/
void QMediaCaptureSession::setAudioInput(QAudioInput *input)
{
- QAudioInput *oldInput = d_ptr->audioInput;
+ Q_D(QMediaCaptureSession);
+
+ QAudioInput *oldInput = d->audioInput;
if (oldInput == input)
return;
// To avoid double emit of audioInputChanged
// from recursive setAudioInput(nullptr) call.
- d_ptr->audioInput = nullptr;
+ d->audioInput = nullptr;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setAudioInput(nullptr);
+ if (d->captureSession)
+ d->captureSession->setAudioInput(nullptr);
if (oldInput)
oldInput->setDisconnectFunction({});
if (input) {
input->setDisconnectFunction([this](){ setAudioInput(nullptr); });
- if (d_ptr->captureSession)
- d_ptr->captureSession->setAudioInput(input->handle());
+ if (d->captureSession)
+ d->captureSession->setAudioInput(input->handle());
}
- d_ptr->audioInput = input;
+ d->audioInput = input;
emit audioInputChanged();
}
/*!
+ \property QMediaCaptureSession::audioBufferInput
+ \since 6.8
+
+ \brief The object used to send custom audio buffers to \l QMediaRecorder.
+*/
+QAudioBufferInput *QMediaCaptureSession::audioBufferInput() const
+{
+ Q_D(const QMediaCaptureSession);
+
+ return d->audioBufferInput;
+}
+
+void QMediaCaptureSession::setAudioBufferInput(QAudioBufferInput *input)
+{
+ Q_D(QMediaCaptureSession);
+
+ // TODO: come up with an unification of the captures setup
+ QAudioBufferInput *oldInput = d->audioBufferInput;
+ if (oldInput == input)
+ return;
+ d->audioBufferInput = input;
+ if (d->captureSession)
+ d->captureSession->setAudioBufferInput(nullptr);
+ if (oldInput) {
+ if (oldInput->captureSession() && oldInput->captureSession() != this)
+ oldInput->captureSession()->setAudioBufferInput(nullptr);
+ oldInput->setCaptureSession(nullptr);
+ }
+ if (input) {
+ if (input->captureSession())
+ input->captureSession()->setAudioBufferInput(nullptr);
+ if (d->captureSession)
+ d->captureSession->setAudioBufferInput(input->platformAudioBufferInput());
+ input->setCaptureSession(this);
+ }
+ emit audioBufferInputChanged();
+}
+
+/*!
\qmlproperty Camera QtMultimedia::CaptureSession::camera
\brief The camera used to capture video.
@@ -221,18 +260,22 @@ void QMediaCaptureSession::setAudioInput(QAudioInput *input)
*/
QCamera *QMediaCaptureSession::camera() const
{
- return d_ptr->camera;
+ Q_D(const QMediaCaptureSession);
+
+ return d->camera;
}
void QMediaCaptureSession::setCamera(QCamera *camera)
{
+ Q_D(QMediaCaptureSession);
+
// TODO: come up with an unification of the captures setup
- QCamera *oldCamera = d_ptr->camera;
+ QCamera *oldCamera = d->camera;
if (oldCamera == camera)
return;
- d_ptr->camera = camera;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setCamera(nullptr);
+ d->camera = camera;
+ if (d->captureSession)
+ d->captureSession->setCamera(nullptr);
if (oldCamera) {
if (oldCamera->captureSession() && oldCamera->captureSession() != this)
oldCamera->captureSession()->setCamera(nullptr);
@@ -241,8 +284,8 @@ void QMediaCaptureSession::setCamera(QCamera *camera)
if (camera) {
if (camera->captureSession())
camera->captureSession()->setCamera(nullptr);
- if (d_ptr->captureSession)
- d_ptr->captureSession->setCamera(camera->platformCamera());
+ if (d->captureSession)
+ d->captureSession->setCamera(camera->platformCamera());
camera->setCaptureSession(this);
}
emit cameraChanged();
@@ -269,18 +312,22 @@ void QMediaCaptureSession::setCamera(QCamera *camera)
*/
QScreenCapture *QMediaCaptureSession::screenCapture()
{
- return d_ptr ? d_ptr->screenCapture : nullptr;
+ Q_D(QMediaCaptureSession);
+
+ return d->screenCapture;
}
void QMediaCaptureSession::setScreenCapture(QScreenCapture *screenCapture)
{
+ Q_D(QMediaCaptureSession);
+
// TODO: come up with an unification of the captures setup
- QScreenCapture *oldScreenCapture = d_ptr->screenCapture;
+ QScreenCapture *oldScreenCapture = d->screenCapture;
if (oldScreenCapture == screenCapture)
return;
- d_ptr->screenCapture = screenCapture;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setScreenCapture(nullptr);
+ d->screenCapture = screenCapture;
+ if (d->captureSession)
+ d->captureSession->setScreenCapture(nullptr);
if (oldScreenCapture) {
if (oldScreenCapture->captureSession() && oldScreenCapture->captureSession() != this)
oldScreenCapture->captureSession()->setScreenCapture(nullptr);
@@ -289,8 +336,8 @@ void QMediaCaptureSession::setScreenCapture(QScreenCapture *screenCapture)
if (screenCapture) {
if (screenCapture->captureSession())
screenCapture->captureSession()->setScreenCapture(nullptr);
- if (d_ptr->captureSession)
- d_ptr->captureSession->setScreenCapture(screenCapture->platformScreenCapture());
+ if (d->captureSession)
+ d->captureSession->setScreenCapture(screenCapture->platformScreenCapture());
screenCapture->setCaptureSession(this);
}
emit screenCaptureChanged();
@@ -315,19 +362,23 @@ void QMediaCaptureSession::setScreenCapture(QScreenCapture *screenCapture)
Record a window by adding a window capture objet
to the capture session using this property.
*/
-QWindowCapture *QMediaCaptureSession::windowCapture() {
- return d_ptr ? d_ptr->windowCapture : nullptr;
+QWindowCapture *QMediaCaptureSession::windowCapture()
+{
+ Q_D(QMediaCaptureSession);
+ return d->windowCapture;
}
void QMediaCaptureSession::setWindowCapture(QWindowCapture *windowCapture)
{
+ Q_D(QMediaCaptureSession);
+
// TODO: come up with an unification of the captures setup
- QWindowCapture *oldCapture = d_ptr->windowCapture;
+ QWindowCapture *oldCapture = d->windowCapture;
if (oldCapture == windowCapture)
return;
- d_ptr->windowCapture = windowCapture;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setWindowCapture(nullptr);
+ d->windowCapture = windowCapture;
+ if (d->captureSession)
+ d->captureSession->setWindowCapture(nullptr);
if (oldCapture) {
if (oldCapture->captureSession() && oldCapture->captureSession() != this)
oldCapture->captureSession()->setWindowCapture(nullptr);
@@ -336,14 +387,52 @@ void QMediaCaptureSession::setWindowCapture(QWindowCapture *windowCapture)
if (windowCapture) {
if (windowCapture->captureSession())
windowCapture->captureSession()->setWindowCapture(nullptr);
- if (d_ptr->captureSession)
- d_ptr->captureSession->setWindowCapture(windowCapture->platformWindowCapture());
+ if (d->captureSession)
+ d->captureSession->setWindowCapture(windowCapture->platformWindowCapture());
windowCapture->setCaptureSession(this);
}
emit windowCaptureChanged();
}
/*!
+ \property QMediaCaptureSession::videoFrameInput
+ \since 6.8
+
+ \brief The object used to send custom video frames to
+ \l QMediaRecorder or a video output.
+*/
+QVideoFrameInput *QMediaCaptureSession::videoFrameInput() const
+{
+ Q_D(const QMediaCaptureSession);
+ return d->videoFrameInput;
+}
+
+void QMediaCaptureSession::setVideoFrameInput(QVideoFrameInput *input)
+{
+ Q_D(QMediaCaptureSession);
+ // TODO: come up with an unification of the captures setup
+ QVideoFrameInput *oldInput = d->videoFrameInput;
+ if (oldInput == input)
+ return;
+ d->videoFrameInput = input;
+ if (d->captureSession)
+ d->captureSession->setVideoFrameInput(nullptr);
+ if (oldInput) {
+ if (oldInput->captureSession() && oldInput->captureSession() != this)
+ oldInput->captureSession()->setVideoFrameInput(nullptr);
+ oldInput->setCaptureSession(nullptr);
+ }
+ if (input) {
+ if (input->captureSession())
+ input->captureSession()->setVideoFrameInput(nullptr);
+ if (d->captureSession)
+ d->captureSession->setVideoFrameInput(input->platformVideoFrameInput());
+ input->setCaptureSession(this);
+ }
+ emit videoFrameInputChanged();
+}
+
+/*!
\qmlproperty ImageCapture QtMultimedia::CaptureSession::imageCapture
\brief The object used to capture still images.
@@ -361,18 +450,22 @@ void QMediaCaptureSession::setWindowCapture(QWindowCapture *windowCapture)
*/
QImageCapture *QMediaCaptureSession::imageCapture()
{
- return d_ptr->imageCapture;
+ Q_D(QMediaCaptureSession);
+
+ return d->imageCapture;
}
void QMediaCaptureSession::setImageCapture(QImageCapture *imageCapture)
{
+ Q_D(QMediaCaptureSession);
+
// TODO: come up with an unification of the captures setup
- QImageCapture *oldImageCapture = d_ptr->imageCapture;
+ QImageCapture *oldImageCapture = d->imageCapture;
if (oldImageCapture == imageCapture)
return;
- d_ptr->imageCapture = imageCapture;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setImageCapture(nullptr);
+ d->imageCapture = imageCapture;
+ if (d->captureSession)
+ d->captureSession->setImageCapture(nullptr);
if (oldImageCapture) {
if (oldImageCapture->captureSession() && oldImageCapture->captureSession() != this)
oldImageCapture->captureSession()->setImageCapture(nullptr);
@@ -381,8 +474,8 @@ void QMediaCaptureSession::setImageCapture(QImageCapture *imageCapture)
if (imageCapture) {
if (imageCapture->captureSession())
imageCapture->captureSession()->setImageCapture(nullptr);
- if (d_ptr->captureSession)
- d_ptr->captureSession->setImageCapture(imageCapture->platformImageCapture());
+ if (d->captureSession)
+ d->captureSession->setImageCapture(imageCapture->platformImageCapture());
imageCapture->setCaptureSession(this);
}
emit imageCaptureChanged();
@@ -406,17 +499,19 @@ void QMediaCaptureSession::setImageCapture(QImageCapture *imageCapture)
QMediaRecorder *QMediaCaptureSession::recorder()
{
- return d_ptr->recorder;
+ Q_D(QMediaCaptureSession);
+ return d->recorder;
}
void QMediaCaptureSession::setRecorder(QMediaRecorder *recorder)
{
- QMediaRecorder *oldRecorder = d_ptr->recorder;
+ Q_D(QMediaCaptureSession);
+ QMediaRecorder *oldRecorder = d->recorder;
if (oldRecorder == recorder)
return;
- d_ptr->recorder = recorder;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setMediaRecorder(nullptr);
+ d->recorder = recorder;
+ if (d->captureSession)
+ d->captureSession->setMediaRecorder(nullptr);
if (oldRecorder) {
if (oldRecorder->captureSession() && oldRecorder->captureSession() != this)
oldRecorder->captureSession()->setRecorder(nullptr);
@@ -425,8 +520,8 @@ void QMediaCaptureSession::setRecorder(QMediaRecorder *recorder)
if (recorder) {
if (recorder->captureSession())
recorder->captureSession()->setRecorder(nullptr);
- if (d_ptr->captureSession)
- d_ptr->captureSession->setMediaRecorder(recorder->platformRecoder());
+ if (d->captureSession)
+ d->captureSession->setMediaRecorder(recorder->platformRecoder());
recorder->setCaptureSession(this);
}
emit recorderChanged();
@@ -504,25 +599,27 @@ QVideoSink *QMediaCaptureSession::videoSink() const
*/
void QMediaCaptureSession::setAudioOutput(QAudioOutput *output)
{
- QAudioOutput *oldOutput = d_ptr->audioOutput;
+ Q_D(QMediaCaptureSession);
+
+ QAudioOutput *oldOutput = d->audioOutput;
if (oldOutput == output)
return;
// We don't want to end up with signal emitted
// twice (from recursive call setAudioInput(nullptr)
// from oldOutput->setDisconnectFunction():
- d_ptr->audioOutput = nullptr;
+ d->audioOutput = nullptr;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setAudioOutput(nullptr);
+ if (d->captureSession)
+ d->captureSession->setAudioOutput(nullptr);
if (oldOutput)
oldOutput->setDisconnectFunction({});
if (output) {
output->setDisconnectFunction([this](){ setAudioOutput(nullptr); });
- if (d_ptr->captureSession)
- d_ptr->captureSession->setAudioOutput(output->handle());
+ if (d->captureSession)
+ d->captureSession->setAudioOutput(output->handle());
}
- d_ptr->audioOutput = output;
+ d->audioOutput = output;
emit audioOutputChanged();
}
/*!
@@ -548,7 +645,8 @@ QAudioOutput *QMediaCaptureSession::audioOutput() const
*/
QPlatformMediaCaptureSession *QMediaCaptureSession::platformSession() const
{
- return d_ptr->captureSession;
+ Q_D(const QMediaCaptureSession);
+ return d->captureSession.get();
}
/*!
\qmlsignal QtMultimedia::CaptureSession::audioInputChanged()
diff --git a/src/multimedia/recording/qmediacapturesession.h b/src/multimedia/recording/qmediacapturesession.h
index c613c3615..219c382d1 100644
--- a/src/multimedia/recording/qmediacapturesession.h
+++ b/src/multimedia/recording/qmediacapturesession.h
@@ -11,6 +11,7 @@ QT_BEGIN_NAMESPACE
class QCamera;
class QAudioInput;
+class QAudioBufferInput;
class QAudioOutput;
class QCameraDevice;
class QImageCapture;
@@ -19,18 +20,23 @@ class QPlatformMediaCaptureSession;
class QVideoSink;
class QScreenCapture;
class QWindowCapture;
+class QVideoFrameInput;
class QMediaCaptureSessionPrivate;
class Q_MULTIMEDIA_EXPORT QMediaCaptureSession : public QObject
{
Q_OBJECT
Q_PROPERTY(QAudioInput *audioInput READ audioInput WRITE setAudioInput NOTIFY audioInputChanged)
+ Q_PROPERTY(QAudioBufferInput *audioBufferInput READ audioBufferInput WRITE setAudioBufferInput
+ NOTIFY audioBufferInputChanged)
Q_PROPERTY(QAudioOutput *audioOutput READ audioOutput WRITE setAudioOutput NOTIFY audioOutputChanged)
Q_PROPERTY(QCamera *camera READ camera WRITE setCamera NOTIFY cameraChanged)
Q_PROPERTY(
QScreenCapture *screenCapture READ screenCapture WRITE setScreenCapture NOTIFY screenCaptureChanged)
Q_PROPERTY(
QWindowCapture *windowCapture READ windowCapture WRITE setWindowCapture NOTIFY windowCaptureChanged)
+ Q_PROPERTY(QVideoFrameInput *videoFrameInput READ videoFrameInput WRITE setVideoFrameInput
+ NOTIFY videoFrameInputChanged)
Q_PROPERTY(QImageCapture *imageCapture READ imageCapture WRITE setImageCapture NOTIFY imageCaptureChanged)
Q_PROPERTY(QMediaRecorder *recorder READ recorder WRITE setRecorder NOTIFY recorderChanged)
Q_PROPERTY(QObject *videoOutput READ videoOutput WRITE setVideoOutput NOTIFY videoOutputChanged)
@@ -41,6 +47,9 @@ public:
QAudioInput *audioInput() const;
void setAudioInput(QAudioInput *input);
+ QAudioBufferInput *audioBufferInput() const;
+ void setAudioBufferInput(QAudioBufferInput *input);
+
QCamera *camera() const;
void setCamera(QCamera *camera);
@@ -53,6 +62,9 @@ public:
QWindowCapture *windowCapture();
void setWindowCapture(QWindowCapture *windowCapture);
+ QVideoFrameInput *videoFrameInput() const;
+ void setVideoFrameInput(QVideoFrameInput *input);
+
QMediaRecorder *recorder();
void setRecorder(QMediaRecorder *recorder);
@@ -69,16 +81,22 @@ public:
Q_SIGNALS:
void audioInputChanged();
+ void audioBufferInputChanged();
void cameraChanged();
void screenCaptureChanged();
void windowCaptureChanged();
+ void videoFrameInputChanged();
void imageCaptureChanged();
void recorderChanged();
void videoOutputChanged();
void audioOutputChanged();
private:
- QMediaCaptureSessionPrivate *d_ptr;
+ friend class QPlatformMediaCaptureSession;
+
+ // ### Qt7: remove unused member
+ QT6_ONLY(void *unused = nullptr;) // for ABI compatibility
+
Q_DISABLE_COPY(QMediaCaptureSession)
Q_DECLARE_PRIVATE(QMediaCaptureSession)
};
diff --git a/src/multimedia/recording/qmediacapturesession_p.h b/src/multimedia/recording/qmediacapturesession_p.h
new file mode 100644
index 000000000..cba222993
--- /dev/null
+++ b/src/multimedia/recording/qmediacapturesession_p.h
@@ -0,0 +1,53 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QMEDIACAPTURESESSION_P_H
+#define QMEDIACAPTURESESSION_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/qmediacapturesession.h>
+
+#include <QtCore/qpointer.h>
+#include <QtCore/private/qobject_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QMediaCaptureSessionPrivate : public QObjectPrivate
+{
+public:
+ static QMediaCaptureSessionPrivate *get(QMediaCaptureSession *session)
+ {
+ return reinterpret_cast<QMediaCaptureSessionPrivate *>(QObjectPrivate::get(session));
+ }
+
+ Q_DECLARE_PUBLIC(QMediaCaptureSession)
+
+ std::unique_ptr<QPlatformMediaCaptureSession> captureSession;
+ QAudioInput *audioInput = nullptr;
+ QPointer<QAudioBufferInput> audioBufferInput;
+ QAudioOutput *audioOutput = nullptr;
+ QPointer<QCamera> camera;
+ QPointer<QScreenCapture> screenCapture;
+ QPointer<QWindowCapture> windowCapture;
+ QPointer<QVideoFrameInput> videoFrameInput;
+ QPointer<QImageCapture> imageCapture;
+ QPointer<QMediaRecorder> recorder;
+ QPointer<QVideoSink> videoSink;
+ QPointer<QObject> videoOutput;
+
+ void setVideoSink(QVideoSink *sink);
+};
+
+QT_END_NAMESPACE
+
+#endif // QMEDIACAPTURESESSION_P_H
diff --git a/src/multimedia/recording/qmediarecorder.cpp b/src/multimedia/recording/qmediarecorder.cpp
index a7f5a31b8..9fe1e4501 100644
--- a/src/multimedia/recording/qmediarecorder.cpp
+++ b/src/multimedia/recording/qmediarecorder.cpp
@@ -227,7 +227,7 @@ void QMediaRecorder::setOutputLocation(const QUrl &location)
/*!
Set the output IO device for media content.
- The \a device must have been opened in the \l{QIODevice::Write}{Write} or
+ The \a device must have been opened in the \l{QIODevice::WriteOnly}{WriteOnly} or
\l{QIODevice::ReadWrite}{ReadWrite} modes before the recording starts.
The media recorder doesn't take ownership of the specified \a device.
@@ -577,6 +577,42 @@ void QMediaRecorder::addMetaData(const QMediaMetaData &metaData)
data.insert(k, metaData.value(k));
setMetaData(data);
}
+
+/*!
+ \property QMediaRecorder::autoStop
+
+ This property controls whether the media recorder stops automatically when
+ all media inputs have reported the end of the stream or have been deactivated.
+
+ The end of the stream is reported by sending an empty media frame,
+ which you can send explicitly via \l QVideoFrameInput or \l QAudioBufferInput.
+
+ Video inputs, specificly, \l QCamera, \l QScreenCapture and \l QWindowCapture,
+ can be deactivated via the function \c setActive.
+
+ Defaults to \c false.
+
+ \sa QCamera, QScreenCapture, QWindowCapture
+*/
+
+bool QMediaRecorder::autoStop() const
+{
+ Q_D(const QMediaRecorder);
+
+ return d->autoStop;
+}
+
+void QMediaRecorder::setAutoStop(bool autoStop)
+{
+ Q_D(QMediaRecorder);
+
+ if (d->autoStop == autoStop)
+ return;
+
+ d->autoStop = autoStop;
+ emit autoStopChanged();
+}
+
/*!
\qmlsignal QtMultimedia::MediaRecorder::metaDataChanged()
diff --git a/src/multimedia/recording/qmediarecorder.h b/src/multimedia/recording/qmediarecorder.h
index fed276baf..a73d9f8af 100644
--- a/src/multimedia/recording/qmediarecorder.h
+++ b/src/multimedia/recording/qmediarecorder.h
@@ -44,6 +44,7 @@ class Q_MULTIMEDIA_EXPORT QMediaRecorder : public QObject
Q_PROPERTY(int audioBitRate READ audioBitRate WRITE setAudioBitRate NOTIFY audioBitRateChanged)
Q_PROPERTY(int audioChannelCount READ audioChannelCount WRITE setAudioChannelCount NOTIFY audioChannelCountChanged)
Q_PROPERTY(int audioSampleRate READ audioSampleRate WRITE setAudioSampleRate NOTIFY audioSampleRateChanged)
+ Q_PROPERTY(bool autoStop READ autoStop WRITE setAutoStop NOTIFY autoStopChanged)
public:
enum Quality
{
@@ -134,6 +135,9 @@ public:
void setMetaData(const QMediaMetaData &metaData);
void addMetaData(const QMediaMetaData &metaData);
+ bool autoStop() const;
+ void setAutoStop(bool autoStop);
+
QMediaCaptureSession *captureSession() const;
QPlatformMediaRecorder *platformRecoder() const;
@@ -162,6 +166,7 @@ Q_SIGNALS:
void audioBitRateChanged();
void audioChannelCountChanged();
void audioSampleRateChanged();
+ void autoStopChanged();
private:
QMediaRecorderPrivate *d_ptr;
diff --git a/src/multimedia/recording/qmediarecorder_p.h b/src/multimedia/recording/qmediarecorder_p.h
index 193aa5f00..896f6c368 100644
--- a/src/multimedia/recording/qmediarecorder_p.h
+++ b/src/multimedia/recording/qmediarecorder_p.h
@@ -38,6 +38,7 @@ public:
QMediaCaptureSession *captureSession = nullptr;
QPlatformMediaRecorder *control = nullptr;
QString initErrorMessage;
+ bool autoStop = false;
bool settingsChanged = false;
diff --git a/src/multimedia/recording/qscreencapture-limitations.qdocinc b/src/multimedia/recording/qscreencapture-limitations.qdocinc
index cac51df02..240a1a389 100644
--- a/src/multimedia/recording/qscreencapture-limitations.qdocinc
+++ b/src/multimedia/recording/qscreencapture-limitations.qdocinc
@@ -1,22 +1,25 @@
-// Copyright (C) 2023 The Qt Company Ltd.
+// Copyright (C) 2024 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GFDL-1.3-no-invariants-only
/*!
//! [content]
\section1 Screen Capture Limitations
- On Qt 6.5.2 and 6.5.3, the following limitations apply to using \1ScreenCapture:
+ On Qt 6.5.2 and above, the following limitations apply to using \1ScreenCapture:
\list
\li It is only supported with the FFmpeg backend.
- \li It is supported on all desktop platforms, except Linux with Wayland
- compositor, due to Wayland protocol restrictions and limitations.
+ \li It is unsupported on Linux with Wayland compositor, due to Wayland
+ protocol restrictions and limitations.
\li It is not supported on mobile operating systems, except on Android.
There, you might run into performance issues as the class is currently
implemented via QScreen::grabWindow, which is not optimal for the use case.
- \li On Linux, it works with X11, but it has not been tested on embedded.
+ \li On embedded with EGLFS, it has limited functionality. For Qt Quick
+ applications, the class is currently implemented via
+ QQuickWindow::grabWindow, which can cause performance issues.
\li In most cases, we set a screen capture frame rate that equals the screen
refresh rate, except on Windows, where the rate might be flexible.
Such a frame rate (75/120 FPS) might cause performance issues on weak
- CPUs if the captured screen is of 4K resolution.
+ CPUs if the captured screen is of 4K resolution. On EGLFS, the capture
+ frame rate is currently locked to 30 FPS.
\endlist
//! [content]
*/
diff --git a/src/multimedia/recording/qvideoframeinput.cpp b/src/multimedia/recording/qvideoframeinput.cpp
new file mode 100644
index 000000000..cfeb911cc
--- /dev/null
+++ b/src/multimedia/recording/qvideoframeinput.cpp
@@ -0,0 +1,156 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qvideoframeinput.h"
+#include "qmediaframeinput_p.h"
+#include "qmediainputencoderinterface_p.h"
+#include "qplatformvideoframeinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QVideoFrameInputPrivate : public QMediaFrameInputPrivate
+{
+public:
+ QVideoFrameInputPrivate(QVideoFrameInput *q) : q(q) { }
+
+ bool sendVideoFrame(const QVideoFrame &frame)
+ {
+ return sendMediaFrame([&]() { emit m_platfromVideoFrameInput->newVideoFrame(frame); });
+ }
+
+ void initialize()
+ {
+ m_platfromVideoFrameInput = std::make_unique<QPlatformVideoFrameInput>();
+ addUpdateSignal(m_platfromVideoFrameInput.get(), &QPlatformVideoFrameInput::encoderUpdated);
+ }
+
+ void uninitialize()
+ {
+ m_platfromVideoFrameInput.reset();
+
+ if (captureSession())
+ captureSession()->setVideoFrameInput(nullptr);
+ }
+
+ QPlatformVideoFrameInput *platfromVideoFrameInput() const
+ {
+ return m_platfromVideoFrameInput.get();
+ }
+
+protected:
+ void updateCaptureSessionConnections(QMediaCaptureSession *prevSession,
+ QMediaCaptureSession *newSession) override
+ {
+ if (prevSession)
+ removeUpdateSignal(prevSession, &QMediaCaptureSession::videoOutputChanged);
+
+ if (newSession)
+ addUpdateSignal(newSession, &QMediaCaptureSession::videoOutputChanged);
+ }
+
+ bool checkIfCanSendMediaFrame() const override
+ {
+ if (auto encoderInterface = m_platfromVideoFrameInput->encoderInterface())
+ return encoderInterface->canPushFrame();
+
+ return captureSession()->videoOutput() || captureSession()->videoSink();
+ }
+
+ void emitReadyToSendMediaFrame() override { emit q->readyToSendVideoFrame(); }
+
+private:
+ QVideoFrameInput *q = nullptr;
+ std::unique_ptr<QPlatformVideoFrameInput> m_platfromVideoFrameInput;
+};
+
+/*!
+ \class QVideoFrameInput
+ \inmodule QtMultimedia
+ \ingroup multimedia
+ \ingroup multimedia_video
+ \since 6.8
+
+ \brief The QVideoFrameInput class is used for providing custom video frames
+ to \l QMediaRecorder or a video output through \l QMediaCaptureSession.
+
+ \sa QMediaRecorder, QMediaCaptureSession, QVideoSink, QVideoOutput
+*/
+
+/*!
+ Constructs a new QVideoFrameInput object with \a parent.
+*/
+QVideoFrameInput::QVideoFrameInput(QObject *parent)
+ : QObject(*new QVideoFrameInputPrivate(this), parent)
+{
+ Q_D(QVideoFrameInput);
+ d->initialize();
+}
+
+/*!
+ Destroys the object.
+ */
+QVideoFrameInput::~QVideoFrameInput()
+{
+ Q_D(QVideoFrameInput);
+ d->uninitialize();
+}
+
+/*!
+ Sends \l QVideoFrame to \l QMediaRecorder or a video output
+ through \l QMediaCaptureSession.
+
+ Returns \c true if the specified \a frame has been sent successfully
+ to the destination. Returns \c false, if the frame hasn't been sent,
+ which can happen if the instance is not assigned to
+ \l QMediaCaptureSession, the session doesn't have video outputs or
+ a media recorder, the media recorder is not started or its queue is full.
+ The signal \l readyToSendVideoFrame will be sent as soon as
+ the destination is able to handle a new frame.
+
+ Sending of an empty video frame is treated by \l QMediaRecorder
+ as an end of the input stream. QMediaRecorder stops the recording
+ automatically if \l QMediaRecorder::autoStop is \c true and
+ all the inputs have reported the end of the stream.
+*/
+bool QVideoFrameInput::sendVideoFrame(const QVideoFrame &frame)
+{
+ Q_D(QVideoFrameInput);
+ return d->sendVideoFrame(frame);
+}
+
+/*!
+ Returns the capture session this video frame input is connected to, or
+ a \c nullptr if the video frame input is not connected to a capture session.
+
+ Use QMediaCaptureSession::setVideoFrameInput() to connect
+ the video frame input to a session.
+*/
+QMediaCaptureSession *QVideoFrameInput::captureSession() const
+{
+ Q_D(const QVideoFrameInput);
+ return d->captureSession();
+}
+
+void QVideoFrameInput::setCaptureSession(QMediaCaptureSession *captureSession)
+{
+ Q_D(QVideoFrameInput);
+ d->setCaptureSession(captureSession);
+}
+
+QPlatformVideoFrameInput *QVideoFrameInput::platformVideoFrameInput() const
+{
+ Q_D(const QVideoFrameInput);
+ return d->platfromVideoFrameInput();
+}
+
+/*!
+ \fn void QVideoFrameInput::readyToSendVideoFrame()
+
+ Signals that a new frame can be sent to the video frame input.
+ After receiving the signal, if you have frames to be sent, invoke \l sendVideoFrame
+ once or in a loop until it returns \c false.
+
+ \sa sendVideoFrame()
+*/
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/recording/qvideoframeinput.h b/src/multimedia/recording/qvideoframeinput.h
new file mode 100644
index 000000000..6617b051f
--- /dev/null
+++ b/src/multimedia/recording/qvideoframeinput.h
@@ -0,0 +1,44 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QVIDEOFRAMEINPUT_H
+#define QVIDEOFRAMEINPUT_H
+
+#include <QtMultimedia/qtmultimediaexports.h>
+#include <QtMultimedia/qvideoframe.h>
+#include <QtCore/qobject.h>
+
+QT_BEGIN_NAMESPACE
+
+class QPlatformVideoFrameInput;
+class QVideoFrameInputPrivate;
+class QMediaCaptureSession;
+
+class Q_MULTIMEDIA_EXPORT QVideoFrameInput : public QObject
+{
+ Q_OBJECT
+public:
+ explicit QVideoFrameInput(QObject *parent = nullptr);
+
+ ~QVideoFrameInput() override;
+
+ bool sendVideoFrame(const QVideoFrame &frame);
+
+ QMediaCaptureSession *captureSession() const;
+
+Q_SIGNALS:
+ void readyToSendVideoFrame();
+
+private:
+ void setCaptureSession(QMediaCaptureSession *captureSession);
+
+ QPlatformVideoFrameInput *platformVideoFrameInput() const;
+
+ friend class QMediaCaptureSession;
+ Q_DISABLE_COPY(QVideoFrameInput)
+ Q_DECLARE_PRIVATE(QVideoFrameInput)
+};
+
+QT_END_NAMESPACE
+
+#endif // QVIDEOFRAMEINPUT_H
diff --git a/src/multimedia/video/qabstractvideobuffer.cpp b/src/multimedia/video/qabstractvideobuffer.cpp
index d65438855..7368082b1 100644
--- a/src/multimedia/video/qabstractvideobuffer.cpp
+++ b/src/multimedia/video/qabstractvideobuffer.cpp
@@ -1,139 +1,115 @@
-// Copyright (C) 2016 The Qt Company Ltd.
+// Copyright (C) 2024 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-#include "qabstractvideobuffer_p.h"
-
-#include <qvariant.h>
-#include <rhi/qrhi.h>
-
-#include <QDebug>
-
+#include "qabstractvideobuffer.h"
QT_BEGIN_NAMESPACE
/*!
\class QAbstractVideoBuffer
- \internal
+ \since 6.8
\brief The QAbstractVideoBuffer class is an abstraction for video data.
\inmodule QtMultimedia
\ingroup multimedia
\ingroup multimedia_video
- The QVideoFrame class makes use of a QAbstractVideoBuffer internally to reference a buffer of
- video data. Quite often video data buffers may reside in video memory rather than system
- memory, and this class provides an abstraction of the location.
-
- In addition, creating a subclass of QAbstractVideoBuffer will allow you to construct video
- frames from preallocated or static buffers. This caters for cases where the QVideoFrame constructors
- taking a QByteArray or a QImage do not suffice. This may be necessary when implementing
- a new hardware accelerated video system, for example.
+ The \l QVideoFrame class makes use of a QAbstractVideoBuffer internally to reference a buffer of
+ video data. Creating a subclass of QAbstractVideoBuffer allows you to construct video
+ frames from preallocated or static buffers. The subclass can contain a hardware buffer,
+ and implement access to the data by mapping the buffer to CPU memory.
The contents of a buffer can be accessed by mapping the buffer to memory using the map()
- function, which returns a pointer to memory containing the contents of the video buffer.
- The memory returned by map() is released by calling the unmap() function.
+ function, which returns a structure containing information about plane layout of the current
+ video data.
- The handle() of a buffer may also be used to manipulate its contents using type specific APIs.
- The type of a buffer's handle is given by the handleType() function.
-
- \sa QVideoFrame
+ \sa QVideoFrame, QVideoFrameFormat, QtVideo::MapMode
*/
/*!
- \enum QVideoFrame::HandleType
+ \class QAbstractVideoBuffer::MapData
+ \brief The QAbstractVideoBuffer::MapData structure describes the mapped plane layout.
+ \inmodule QtMultimedia
+ \ingroup multimedia
+ \ingroup multimedia_video
- Identifies the type of a video buffers handle.
+ The structure contains a number of mapped planes, and plane data for each plane,
+ specificly, a number of bytes per line, a data pointer, and a data size.
+ The structure doesn't hold any ownership of the data it refers to.
- \value NoHandle
- The buffer has no handle, its data can only be accessed by mapping the buffer.
- \value RhiTextureHandle
- The handle of the buffer is defined by The Qt Rendering Hardware Interface
- (RHI). RHI is Qt's internal graphics abstraction for 3D APIs, such as
- OpenGL, Vulkan, Metal, and Direct 3D.
+ A defaultly created structure means that no data has been mapped.
- \sa handleType()
+ All the values in the structure default to zeros.
+
+ \sa QAbstractVideoBuffer::map
*/
/*!
- \enum QVideoFrame::MapMode
-
- Enumerates how a video buffer's data is mapped to system memory.
-
- \value NotMapped
- The video buffer is not mapped to memory.
- \value ReadOnly
- The mapped memory is populated with data from the video buffer when mapped,
- but the content of the mapped memory may be discarded when unmapped.
- \value WriteOnly
- The mapped memory is uninitialized when mapped, but the possibly modified
- content will be used to populate the video buffer when unmapped.
- \value ReadWrite
- The mapped memory is populated with data from the video
- buffer, and the video buffer is repopulated with the content of the mapped
- memory when it is unmapped.
-
- \sa mapMode(), map()
+ \variable QAbstractVideoBuffer::MapData::planeCount
+
+ The number of planes of the mapped video data. If the format of the data
+ is multiplanar, and the value is \c 1, the actual plane layout will
+ be calculated upon invoking of \l QVideoFrame::map from the frame height,
+ \c{bytesPerLine[0]}, and \c{dataSize[0]}.
+
+ Defaults to \c 0.
*/
/*!
- Constructs an abstract video buffer of the given \a type.
+ \variable QAbstractVideoBuffer::MapData::bytesPerLine
+
+ The array of numbrers of bytes per line for each
+ plane from \c 0 to \c{planeCount - 1}.
+
+ The values of the array default to \c 0.
*/
-QAbstractVideoBuffer::QAbstractVideoBuffer(QVideoFrame::HandleType type, QRhi *rhi)
- : m_type(type),
- m_rhi(rhi)
-{
-}
/*!
- Destroys an abstract video buffer.
+ \variable QAbstractVideoBuffer::MapData::data
+
+ The array of pointers to the mapped video pixel data
+ for each plane from \c 0 to \c{planeCount - 1}.
+ The implementation of QAbstractVideoBuffer must hold ownership of the data
+ at least until \l QAbstractVideoBuffer::unmap is called.
+
+ The values of the array default to \c nullptr.
*/
-QAbstractVideoBuffer::~QAbstractVideoBuffer()
-{
-}
/*!
- Returns the type of a video buffer's handle.
+ \variable QAbstractVideoBuffer::MapData::dataSize
+
+ The array of sizes in bytes of the mapped video pixel data
+ for each plane from \c 0 to \c{planeCount - 1}.
- \sa handle()
+ The values of the array default to \c 0.
*/
-QVideoFrame::HandleType QAbstractVideoBuffer::handleType() const
-{
- return m_type;
-}
+// must be out-of-line to ensure correct working of dynamic_cast when QHwVideoBuffer is created in tests
/*!
- Returns the QRhi instance.
+ Destroys a video buffer.
*/
-QRhi *QAbstractVideoBuffer::rhi() const
-{
- return m_rhi;
-}
+QAbstractVideoBuffer::~QAbstractVideoBuffer() = default;
-/*! \fn uchar *QAbstractVideoBuffer::map(MapMode mode, int *numBytes, int *bytesPerLine)
+/*! \fn QAbstractVideoBuffer::MapData QAbstractVideoBuffer::map(QtVideo::MapMode mode)
- Independently maps the planes of a video buffer to memory.
+ Maps the planes of a video buffer to memory.
- The map \a mode indicates whether the contents of the mapped memory should be read from and/or
- written to the buffer. If the map mode includes the \c QVideoFrame::ReadOnly flag the
- mapped memory will be populated with the content of the buffer when initially mapped. If the map
- mode includes the \c QVideoFrame::WriteOnly flag the content of the possibly modified
- mapped memory will be written back to the buffer when unmapped.
-
- When access to the data is no longer needed be sure to call the unmap() function to release the
- mapped memory and possibly update the buffer contents.
+ Returns a \l MapData structure that contains information about the plane layout of
+ the mapped current video data. If the mapping fails, the method returns the default structure.
+ For CPU memory buffers, the data is considered as already mapped, so the function
+ just returns the plane layout of the preallocated underlying data.
- Returns the number of planes in the mapped video data. For each plane the line stride of that
- plane will be returned in \a bytesPerLine, and a pointer to the plane data will be returned in
- \a data. The accumulative size of the mapped data is returned in \a numBytes.
-
- Not all buffer implementations will map more than the first plane, if this returns a single
- plane for a planar format the additional planes will have to be calculated from the line stride
- of the first plane and the frame height. Mapping a buffer with QVideoFrame will do this for
- you.
+ The map \a mode indicates whether the contents of the mapped memory should be read from and/or
+ written to the buffer. If the map mode includes the \c QtVideo::MapMode::WriteOnly flag,
+ the content of the possibly modified mapped memory is expected to be written back
+ to the buffer when unmapped.
- To implement this function create a derivative of QAbstractPlanarVideoBuffer and implement
- its map function instance instead.
+ When access to the data is no longer needed, the \l unmap function is called
+ to release the mapped memory and possibly update the buffer contents.
- \since 5.4
+ If the format of the video data is multiplanar, the method may map the whole pixel data
+ as a single plane. In this case, mapping a buffer with \l QVideoFrame
+ will calculate additional planes from the specified line stride of the first plane,
+ the frame height, and the data size.
*/
/*!
@@ -141,56 +117,23 @@ QRhi *QAbstractVideoBuffer::rhi() const
Releases the memory mapped by the map() function.
- If the \l {QVideoFrame::MapMode}{MapMode} included the \c QVideoFrame::WriteOnly
+ If the \l {QtVideo::MapMode}{MapMode} included the \c QtVideo::MapMode::WriteOnly
flag this will write the current content of the mapped memory back to the video frame.
- \sa map()
-*/
-
-/*! \fn quint64 QAbstractVideoBuffer::textureHandle(QRhi *rhi, int plane) const
-
- Returns a texture handle to the data buffer.
+ For CPU video buffers, the function may be not overridden.
+ The default implementation of \c unmap does nothing.
- \sa handleType()
+ \sa map()
*/
-/*
- \fn int QAbstractPlanarVideoBuffer::map(MapMode mode, int *numBytes, int bytesPerLine[4], uchar *data[4])
-
- Maps the contents of a video buffer to memory.
-
- The map \a mode indicates whether the contents of the mapped memory should be read from and/or
- written to the buffer. If the map mode includes the \c QVideoFrame::ReadOnly flag the
- mapped memory will be populated with the content of the buffer when initially mapped. If the map
- mode includes the \c QVideoFrame::WriteOnly flag the content of the possibly modified
- mapped memory will be written back to the buffer when unmapped.
-
- When access to the data is no longer needed be sure to call the unmap() function to release the
- mapped memory and possibly update the buffer contents.
+/*!
+ \fn QAbstractVideoBuffer::format() const
- Returns the number of planes in the mapped video data. For each plane the line stride of that
- plane will be returned in \a bytesPerLine, and a pointer to the plane data will be returned in
- \a data. The accumulative size of the mapped data is returned in \a numBytes.
+ Gets \l QVideoFrameFormat of the underlying video buffer.
- \sa QAbstractVideoBuffer::map(), QAbstractVideoBuffer::unmap(), QVideoFrame::mapMode()
+ The format must be available upon construction of \l QVideoFrame.
+ QVideoFrame will contain won instance of the given format, that
+ can be detached and modified.
*/
-#ifndef QT_NO_DEBUG_STREAM
-QDebug operator<<(QDebug dbg, QVideoFrame::MapMode mode)
-{
- QDebugStateSaver saver(dbg);
- dbg.nospace();
- switch (mode) {
- case QVideoFrame::ReadOnly:
- return dbg << "ReadOnly";
- case QVideoFrame::ReadWrite:
- return dbg << "ReadWrite";
- case QVideoFrame::WriteOnly:
- return dbg << "WriteOnly";
- default:
- return dbg << "NotMapped";
- }
-}
-#endif
-
QT_END_NAMESPACE
diff --git a/src/multimedia/video/qabstractvideobuffer.h b/src/multimedia/video/qabstractvideobuffer.h
new file mode 100644
index 000000000..3e046f3b4
--- /dev/null
+++ b/src/multimedia/video/qabstractvideobuffer.h
@@ -0,0 +1,32 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QABSTRACTVIDEOBUFFER_H
+#define QABSTRACTVIDEOBUFFER_H
+
+#include <QtMultimedia/qtmultimediaexports.h>
+#include <QtMultimedia/qvideoframeformat.h>
+#include <QtMultimedia/qtvideo.h>
+
+QT_BEGIN_NAMESPACE
+
+class Q_MULTIMEDIA_EXPORT QAbstractVideoBuffer
+{
+public:
+ struct MapData
+ {
+ int planeCount = 0;
+ int bytesPerLine[4] = {};
+ uchar *data[4] = {};
+ int dataSize[4] = {};
+ };
+
+ virtual ~QAbstractVideoBuffer();
+ virtual MapData map(QtVideo::MapMode mode) = 0;
+ virtual void unmap() { }
+ virtual QVideoFrameFormat format() const = 0;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/multimedia/video/qabstractvideobuffer_p.h b/src/multimedia/video/qabstractvideobuffer_p.h
deleted file mode 100644
index 2004e25f7..000000000
--- a/src/multimedia/video/qabstractvideobuffer_p.h
+++ /dev/null
@@ -1,82 +0,0 @@
-// Copyright (C) 2022 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-#ifndef QABSTRACTVIDEOBUFFER_H
-#define QABSTRACTVIDEOBUFFER_H
-
-//
-// W A R N I N G
-// -------------
-//
-// This file is not part of the Qt API. It exists purely as an
-// implementation detail. This header file may change from version to
-// version without notice, or even be removed.
-//
-// We mean it.
-//
-
-#include <QtMultimedia/qtmultimediaglobal.h>
-#include <QtMultimedia/qvideoframe.h>
-
-#include <QtCore/qmetatype.h>
-#include <QtGui/qmatrix4x4.h>
-#include <QtCore/private/qglobal_p.h>
-
-#include <memory>
-
-QT_BEGIN_NAMESPACE
-
-
-class QVariant;
-class QRhi;
-class QRhiTexture;
-
-class Q_MULTIMEDIA_EXPORT QVideoFrameTextures
-{
-public:
- virtual ~QVideoFrameTextures() {}
- virtual QRhiTexture *texture(uint plane) const = 0;
-};
-
-class Q_MULTIMEDIA_EXPORT QAbstractVideoBuffer
-{
-public:
- QAbstractVideoBuffer(QVideoFrame::HandleType type, QRhi *rhi = nullptr);
- virtual ~QAbstractVideoBuffer();
-
- QVideoFrame::HandleType handleType() const;
- QRhi *rhi() const;
-
- struct MapData
- {
- int nPlanes = 0;
- int bytesPerLine[4] = {};
- uchar *data[4] = {};
- int size[4] = {};
- };
-
- virtual QVideoFrame::MapMode mapMode() const = 0;
- virtual MapData map(QVideoFrame::MapMode mode) = 0;
- virtual void unmap() = 0;
-
- virtual std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *) { return {}; }
- virtual quint64 textureHandle(QRhi *, int /*plane*/) const { return 0; }
-
- virtual QMatrix4x4 externalTextureMatrix() const { return {}; }
-
- virtual QByteArray underlyingByteArray(int /*plane*/) const { return {}; }
-protected:
- QVideoFrame::HandleType m_type;
- QRhi *m_rhi = nullptr;
-
-private:
- Q_DISABLE_COPY(QAbstractVideoBuffer)
-};
-
-#ifndef QT_NO_DEBUG_STREAM
-Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoFrame::MapMode);
-#endif
-
-QT_END_NAMESPACE
-
-#endif
diff --git a/src/multimedia/video/qhwvideobuffer.cpp b/src/multimedia/video/qhwvideobuffer.cpp
new file mode 100644
index 000000000..ecd3435d0
--- /dev/null
+++ b/src/multimedia/video/qhwvideobuffer.cpp
@@ -0,0 +1,17 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qhwvideobuffer_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QVideoFrameTextures::~QVideoFrameTextures() = default;
+
+QHwVideoBuffer::QHwVideoBuffer(QVideoFrame::HandleType type, QRhi *rhi) : m_type(type), m_rhi(rhi)
+{
+}
+
+// must be out-of-line to ensure correct working of dynamic_cast when QHwVideoBuffer is created in tests
+QHwVideoBuffer::~QHwVideoBuffer() = default;
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/video/qhwvideobuffer_p.h b/src/multimedia/video/qhwvideobuffer_p.h
new file mode 100644
index 000000000..fabf82dce
--- /dev/null
+++ b/src/multimedia/video/qhwvideobuffer_p.h
@@ -0,0 +1,58 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QHWVIDEOBUFFER_P_H
+#define QHWVIDEOBUFFER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qabstractvideobuffer.h"
+#include "qvideoframe.h"
+
+#include <QtGui/qmatrix4x4.h>
+
+QT_BEGIN_NAMESPACE
+
+class QRhi;
+class QRhiTexture;
+
+class Q_MULTIMEDIA_EXPORT QVideoFrameTextures
+{
+public:
+ virtual ~QVideoFrameTextures();
+ virtual QRhiTexture *texture(uint plane) const = 0;
+};
+
+class Q_MULTIMEDIA_EXPORT QHwVideoBuffer : public QAbstractVideoBuffer
+{
+public:
+ QHwVideoBuffer(QVideoFrame::HandleType type, QRhi *rhi = nullptr);
+
+ ~QHwVideoBuffer() override;
+
+ QVideoFrame::HandleType handleType() const { return m_type; }
+ QRhi *rhi() const { return m_rhi; }
+
+ QVideoFrameFormat format() const override { return {}; }
+
+ virtual std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *) { return {}; }
+ virtual quint64 textureHandle(QRhi *, int /*plane*/) const { return 0; }
+ virtual QMatrix4x4 externalTextureMatrix() const { return {}; }
+
+protected:
+ QVideoFrame::HandleType m_type;
+ QRhi *m_rhi = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QHWVIDEOBUFFER_P_H
diff --git a/src/multimedia/video/qimagevideobuffer.cpp b/src/multimedia/video/qimagevideobuffer.cpp
index bc825004e..6f35f0e53 100644
--- a/src/multimedia/video/qimagevideobuffer.cpp
+++ b/src/multimedia/video/qimagevideobuffer.cpp
@@ -51,27 +51,22 @@ QImage fixImage(QImage image)
} // namespace
-QImageVideoBuffer::QImageVideoBuffer(QImage image)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle), m_image(fixImage(std::move(image)))
-{
-}
-
-QVideoFrame::MapMode QImageVideoBuffer::mapMode() const
-{
- return m_mapMode;
-}
+QImageVideoBuffer::QImageVideoBuffer(QImage image) : m_image(fixImage(std::move(image))) { }
-QAbstractVideoBuffer::MapData QImageVideoBuffer::map(QVideoFrame::MapMode mode)
+QAbstractVideoBuffer::MapData QImageVideoBuffer::map(QtVideo::MapMode mode)
{
MapData mapData;
- if (m_mapMode == QVideoFrame::NotMapped && !m_image.isNull()
- && mode != QVideoFrame::NotMapped) {
+ if (m_mapMode == QtVideo::MapMode::NotMapped && !m_image.isNull()
+ && mode != QtVideo::MapMode::NotMapped) {
m_mapMode = mode;
- mapData.nPlanes = 1;
+ mapData.planeCount = 1;
mapData.bytesPerLine[0] = m_image.bytesPerLine();
- mapData.data[0] = m_image.bits();
- mapData.size[0] = m_image.sizeInBytes();
+ if (mode == QtVideo::MapMode::ReadOnly)
+ mapData.data[0] = const_cast<uint8_t *>(m_image.constBits());
+ else
+ mapData.data[0] = m_image.bits();
+ mapData.dataSize[0] = m_image.sizeInBytes();
}
return mapData;
@@ -79,7 +74,7 @@ QAbstractVideoBuffer::MapData QImageVideoBuffer::map(QVideoFrame::MapMode mode)
void QImageVideoBuffer::unmap()
{
- m_mapMode = QVideoFrame::NotMapped;
+ m_mapMode = QtVideo::MapMode::NotMapped;
}
QImage QImageVideoBuffer::underlyingImage() const
diff --git a/src/multimedia/video/qimagevideobuffer_p.h b/src/multimedia/video/qimagevideobuffer_p.h
index e5467563a..be2bcd800 100644
--- a/src/multimedia/video/qimagevideobuffer_p.h
+++ b/src/multimedia/video/qimagevideobuffer_p.h
@@ -4,7 +4,7 @@
#ifndef QIMAGEVIDEOBUFFER_P_H
#define QIMAGEVIDEOBUFFER_P_H
-#include <private/qabstractvideobuffer_p.h>
+#include <qabstractvideobuffer.h>
#include <qimage.h>
//
@@ -25,16 +25,16 @@ class Q_MULTIMEDIA_EXPORT QImageVideoBuffer : public QAbstractVideoBuffer
public:
QImageVideoBuffer(QImage image);
- QVideoFrame::MapMode mapMode() const override;
-
- MapData map(QVideoFrame::MapMode mode) override;
+ MapData map(QtVideo::MapMode mode) override;
void unmap() override;
+ QVideoFrameFormat format() const override { return {}; }
+
QImage underlyingImage() const;
private:
- QVideoFrame::MapMode m_mapMode = QVideoFrame::NotMapped;
+ QtVideo::MapMode m_mapMode = QtVideo::MapMode::NotMapped;
QImage m_image;
};
diff --git a/src/multimedia/video/qmemoryvideobuffer.cpp b/src/multimedia/video/qmemoryvideobuffer.cpp
index bcbbe7e59..dc3aada56 100644
--- a/src/multimedia/video/qmemoryvideobuffer.cpp
+++ b/src/multimedia/video/qmemoryvideobuffer.cpp
@@ -18,9 +18,7 @@ QT_BEGIN_NAMESPACE
Constructs a video buffer with an image stride of \a bytesPerLine from a byte \a array.
*/
QMemoryVideoBuffer::QMemoryVideoBuffer(QByteArray data, int bytesPerLine)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle),
- m_bytesPerLine(bytesPerLine),
- m_data(std::move(data))
+ : m_bytesPerLine(bytesPerLine), m_data(std::move(data))
{
}
@@ -32,29 +30,21 @@ QMemoryVideoBuffer::~QMemoryVideoBuffer() = default;
/*!
\reimp
*/
-QVideoFrame::MapMode QMemoryVideoBuffer::mapMode() const
-{
- return m_mapMode;
-}
-
-/*!
- \reimp
-*/
-QAbstractVideoBuffer::MapData QMemoryVideoBuffer::map(QVideoFrame::MapMode mode)
+QAbstractVideoBuffer::MapData QMemoryVideoBuffer::map(QtVideo::MapMode mode)
{
MapData mapData;
- if (m_mapMode == QVideoFrame::NotMapped && m_data.size() && mode != QVideoFrame::NotMapped) {
+ if (m_mapMode == QtVideo::MapMode::NotMapped && m_data.size() && mode != QtVideo::MapMode::NotMapped) {
m_mapMode = mode;
- mapData.nPlanes = 1;
+ mapData.planeCount = 1;
mapData.bytesPerLine[0] = m_bytesPerLine;
// avoid detaching and extra copying in case the underlyingByteArray is
// being held by textures or anything else.
- if (mode == QVideoFrame::ReadOnly)
+ if (mode == QtVideo::MapMode::ReadOnly)
mapData.data[0] = reinterpret_cast<uchar *>(const_cast<char*>(m_data.constData()));
else
mapData.data[0] = reinterpret_cast<uchar *>(m_data.data());
- mapData.size[0] = m_data.size();
+ mapData.dataSize[0] = m_data.size();
}
return mapData;
@@ -65,15 +55,7 @@ QAbstractVideoBuffer::MapData QMemoryVideoBuffer::map(QVideoFrame::MapMode mode)
*/
void QMemoryVideoBuffer::unmap()
{
- m_mapMode = QVideoFrame::NotMapped;
-}
-
-/*!
- \reimp
-*/
-QByteArray QMemoryVideoBuffer::underlyingByteArray(int plane) const
-{
- return plane == 0 ? m_data : QByteArray{};
+ m_mapMode = QtVideo::MapMode::NotMapped;
}
QT_END_NAMESPACE
diff --git a/src/multimedia/video/qmemoryvideobuffer_p.h b/src/multimedia/video/qmemoryvideobuffer_p.h
index ec97abd4f..b861ad479 100644
--- a/src/multimedia/video/qmemoryvideobuffer_p.h
+++ b/src/multimedia/video/qmemoryvideobuffer_p.h
@@ -4,7 +4,7 @@
#ifndef QMEMORYVIDEOBUFFER_P_H
#define QMEMORYVIDEOBUFFER_P_H
-#include <private/qabstractvideobuffer_p.h>
+#include "qabstractvideobuffer.h"
//
// W A R N I N G
@@ -23,17 +23,16 @@ class Q_MULTIMEDIA_EXPORT QMemoryVideoBuffer : public QAbstractVideoBuffer
{
public:
QMemoryVideoBuffer(QByteArray data, int bytesPerLine);
- ~QMemoryVideoBuffer();
+ ~QMemoryVideoBuffer() override;
- QVideoFrame::MapMode mapMode() const override;
-
- MapData map(QVideoFrame::MapMode mode) override;
+ MapData map(QtVideo::MapMode mode) override;
void unmap() override;
- QByteArray underlyingByteArray(int plane) const override;
+ QVideoFrameFormat format() const override { return {}; }
+
private:
int m_bytesPerLine = 0;
- QVideoFrame::MapMode m_mapMode = QVideoFrame::NotMapped;
+ QtVideo::MapMode m_mapMode = QtVideo::MapMode::NotMapped;
QByteArray m_data;
};
diff --git a/src/multimedia/video/qtvideo.cpp b/src/multimedia/video/qtvideo.cpp
index 29747b776..46f1e3101 100644
--- a/src/multimedia/video/qtvideo.cpp
+++ b/src/multimedia/video/qtvideo.cpp
@@ -3,6 +3,8 @@
#include "qtvideo.h"
+#include <QtCore/qdebug.h>
+
QT_BEGIN_NAMESPACE
/*!
@@ -25,6 +27,45 @@ QT_BEGIN_NAMESPACE
\value Clockwise270 The frame should be rotated clockwise by 270 degrees
*/
+/*!
+ \enum QtVideo::MapMode
+
+ Enumerates how a video buffer's data is mapped to system memory.
+
+ \value NotMapped
+ The video buffer is not mapped to memory.
+ \value ReadOnly
+ The mapped memory is populated with data from the video buffer when mapped,
+ but the content of the mapped memory may be discarded when unmapped.
+ \value WriteOnly
+ The mapped memory is uninitialized when mapped, but the possibly modified
+ content will be used to populate the video buffer when unmapped.
+ \value ReadWrite
+ The mapped memory is populated with data from the video
+ buffer, and the video buffer is repopulated with the content of the mapped
+ memory when it is unmapped.
+
+ \sa mapMode(), map()
+*/
+
+#ifndef QT_NO_DEBUG_STREAM
+QDebug operator<<(QDebug dbg, QtVideo::MapMode mode)
+{
+ QDebugStateSaver saver(dbg);
+ dbg.nospace();
+ switch (mode) {
+ case QtVideo::MapMode::ReadOnly:
+ return dbg << "ReadOnly";
+ case QtVideo::MapMode::ReadWrite:
+ return dbg << "ReadWrite";
+ case QtVideo::MapMode::WriteOnly:
+ return dbg << "WriteOnly";
+ default:
+ return dbg << "NotMapped";
+ }
+}
+#endif
+
QT_END_NAMESPACE
#include "moc_qtvideo.cpp"
diff --git a/src/multimedia/video/qtvideo.h b/src/multimedia/video/qtvideo.h
index a5f22ea2c..fdcb26419 100644
--- a/src/multimedia/video/qtvideo.h
+++ b/src/multimedia/video/qtvideo.h
@@ -16,8 +16,42 @@ Q_NAMESPACE_EXPORT(Q_MULTIMEDIA_EXPORT)
enum class Rotation { None = 0, Clockwise90 = 90, Clockwise180 = 180, Clockwise270 = 270 };
Q_ENUM_NS(Rotation)
+
+enum class MapMode
+{
+ NotMapped = 0x00,
+ ReadOnly = 0x01,
+ WriteOnly = 0x02,
+ ReadWrite = ReadOnly | WriteOnly
+};
+Q_ENUM_NS(MapMode)
+
+inline constexpr MapMode operator & (MapMode lhs, MapMode rhs)
+{
+ return MapMode(qToUnderlying(lhs) & qToUnderlying(rhs));
+}
+
+inline constexpr MapMode operator | (MapMode lhs, MapMode rhs)
+{
+ return MapMode(qToUnderlying(lhs) | qToUnderlying(rhs));
+}
+
+inline constexpr MapMode &operator &= (MapMode &lhs, MapMode rhs)
+{
+ return (lhs = lhs & rhs);
+}
+
+inline constexpr MapMode &operator |= (MapMode &lhs, MapMode rhs)
+{
+ return (lhs = lhs | rhs);
+}
+
}
+#ifndef QT_NO_DEBUG_STREAM
+Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QtVideo::MapMode);
+#endif
+
QT_END_NAMESPACE
#endif // QTVIDEO_H
diff --git a/src/multimedia/video/qvideoframe.cpp b/src/multimedia/video/qvideoframe.cpp
index 90560f506..9da4ea3b8 100644
--- a/src/multimedia/video/qvideoframe.cpp
+++ b/src/multimedia/video/qvideoframe.cpp
@@ -58,6 +58,23 @@ QT_DEFINE_QESDP_SPECIALIZATION_DTOR(QVideoFramePrivate);
\note Since video frames can be expensive to copy, QVideoFrame is explicitly shared, so any
change made to a video frame will also apply to any copies.
+
+ \sa QAbstractVideoBuffer, QVideoFrameFormat, QtVideo::MapMode
+*/
+
+/*!
+ \enum QVideoFrame::HandleType
+
+ Identifies the type of a video buffers handle.
+
+ \value NoHandle
+ The buffer has no handle, its data can only be accessed by mapping the buffer.
+ \value RhiTextureHandle
+ The handle of the buffer is defined by The Qt Rendering Hardware Interface
+ (RHI). RHI is Qt's internal graphics abstraction for 3D APIs, such as
+ OpenGL, Vulkan, Metal, and Direct 3D.
+
+ \sa handleType()
*/
@@ -68,6 +85,8 @@ QVideoFrame::QVideoFrame()
{
}
+#if QT_DEPRECATED_SINCE(6, 8)
+
/*!
\internal
Constructs a video frame from a \a buffer with the given pixel \a format and \a size in pixels.
@@ -75,9 +94,8 @@ QVideoFrame::QVideoFrame()
\note This doesn't increment the reference count of the video buffer.
*/
QVideoFrame::QVideoFrame(QAbstractVideoBuffer *buffer, const QVideoFrameFormat &format)
- : d(new QVideoFramePrivate(format))
+ : d(new QVideoFramePrivate(format, std::unique_ptr<QAbstractVideoBuffer>(buffer)))
{
- d->buffer.reset(buffer);
}
/*!
@@ -85,9 +103,11 @@ QVideoFrame::QVideoFrame(QAbstractVideoBuffer *buffer, const QVideoFrameFormat &
*/
QAbstractVideoBuffer *QVideoFrame::videoBuffer() const
{
- return d ? d->buffer.get() : nullptr;
+ return d ? d->videoBuffer.get() : nullptr;
}
+#endif
+
/*!
Constructs a video frame of the given pixel \a format.
@@ -103,21 +123,25 @@ QVideoFrame::QVideoFrame(const QVideoFrameFormat &format)
// Check the memory was successfully allocated.
if (!data.isEmpty())
- d->buffer = std::make_unique<QMemoryVideoBuffer>(data, textureDescription->strideForWidth(format.frameWidth()));
+ d->videoBuffer = std::make_unique<QMemoryVideoBuffer>(
+ data, textureDescription->strideForWidth(format.frameWidth()));
}
}
/*!
- Constructs a QVideoFrame from a QImage. The QImage pixels are copied
- into the QVideoFrame's memory buffer. The resulting frame has the
- same size as the QImage, but the number of bytes per line may
- differ.
+ Constructs a QVideoFrame from a QImage.
+ \since 6.8
If the QImage::Format matches one of the formats in
- QVideoFrameFormat::PixelFormat, the QVideoFrame will use that format
- without any pixel format conversion. Otherwise, the image is first
- converted to a supported (A)RGB format using QImage::convertedTo()
- with the Qt::AutoColor flag. This may incur a performance penalty.
+ QVideoFrameFormat::PixelFormat, the QVideoFrame will hold an instance of
+ the \a image and use that format without any pixel format conversion.
+ In this case, pixel data will be copied only if you call \l{QVideoFrame::map}
+ with \c WriteOnly flag while keeping the original image.
+
+ Otherwise, if the QImage::Format matches none of video formats,
+ the image is first converted to a supported (A)RGB format using
+ QImage::convertedTo() with the Qt::AutoColor flag.
+ This may incur a performance penalty.
If QImage::isNull() evaluates to true for the input QImage, the
QVideoFrame will be invalid and QVideoFrameFormat::isValid() will
@@ -150,6 +174,47 @@ QVideoFrame::QVideoFrame(const QImage &image)
}
/*!
+ Constructs a QVideoFrame from a \l QAbstractVideoBuffer.
+
+ \since 6.8
+
+ The specified \a videoBuffer refers to an instance a reimplemented
+ \l QAbstractVideoBuffer. The instance is expected to contain a preallocated custom
+ video buffer and must implement \l QAbstractVideoBuffer::format,
+ \l QAbstractVideoBuffer::map, and \l QAbstractVideoBuffer::unmap for GPU content.
+
+ If \a videoBuffer is null or gets an invalid \l QVideoFrameFormat,
+ the constructors creates an invalid video frame.
+
+ The created frame will hold ownership of the specified video buffer for its lifetime.
+ Considering that QVideoFrame is implemented via a shared private object,
+ the specified video buffer will be destroyed upon destruction of the last copy
+ of the created video frame.
+
+ Note, if a video frame has been passed to \l QMediaRecorder or a rendering pipeline,
+ the lifetime of the frame is undefined, and the media recorder can destroy it
+ in a different thread.
+
+ QVideoFrame will contain own instance of QVideoFrameFormat.
+ Upon invoking \l setStreamFrameRate, \l setMirrored, or \l setRotation,
+ the inner format can be modified, and \l surfaceFormat will return
+ a detached instance.
+
+ \sa QAbstractVideoBuffer, QVideoFrameFormat
+*/
+QVideoFrame::QVideoFrame(std::unique_ptr<QAbstractVideoBuffer> videoBuffer)
+{
+ if (!videoBuffer)
+ return;
+
+ QVideoFrameFormat format = videoBuffer->format();
+ if (!format.isValid())
+ return;
+
+ d = new QVideoFramePrivate{ std::move(format), std::move(videoBuffer) };
+}
+
+/*!
Constructs a shallow copy of \a other. Since QVideoFrame is
explicitly shared, these two instances will reflect the same frame.
@@ -212,7 +277,7 @@ QVideoFrame::~QVideoFrame() = default;
*/
bool QVideoFrame::isValid() const
{
- return (d && d->buffer) && d->format.pixelFormat() != QVideoFrameFormat::Format_Invalid;
+ return d && d->videoBuffer && d->format.pixelFormat() != QVideoFrameFormat::Format_Invalid;
}
/*!
@@ -239,7 +304,7 @@ QVideoFrameFormat QVideoFrame::surfaceFormat() const
*/
QVideoFrame::HandleType QVideoFrame::handleType() const
{
- return (d && d->buffer) ? d->buffer->handleType() : QVideoFrame::NoHandle;
+ return (d && d->hwVideoBuffer) ? d->hwVideoBuffer->handleType() : QVideoFrame::NoHandle;
}
/*!
@@ -269,25 +334,25 @@ int QVideoFrame::height() const
/*!
Identifies if a video frame's contents are currently mapped to system memory.
- This is a convenience function which checks that the \l {QVideoFrame::MapMode}{MapMode}
- of the frame is not equal to QVideoFrame::NotMapped.
+ This is a convenience function which checks that the \l {QtVideo::MapMode}{MapMode}
+ of the frame is not equal to QtVideo::MapMode::NotMapped.
Returns true if the contents of the video frame are mapped to system memory, and false
otherwise.
- \sa mapMode(), QVideoFrame::MapMode
+ \sa mapMode(), QtVideo::MapMode
*/
bool QVideoFrame::isMapped() const
{
- return d && d->buffer && d->buffer->mapMode() != QVideoFrame::NotMapped;
+ return d && d->mapMode != QtVideo::MapMode::NotMapped;
}
/*!
Identifies if the mapped contents of a video frame will be persisted when the frame is unmapped.
- This is a convenience function which checks if the \l {QVideoFrame::MapMode}{MapMode}
- contains the QVideoFrame::WriteOnly flag.
+ This is a convenience function which checks if the \l {QtVideo::MapMode}{MapMode}
+ contains the QtVideo::MapMode::WriteOnly flag.
Returns true if the video frame will be updated when unmapped, and false otherwise.
@@ -295,37 +360,37 @@ bool QVideoFrame::isMapped() const
Depending on the buffer implementation the changes may be persisted, or worse alter a shared
buffer.
- \sa mapMode(), QVideoFrame::MapMode
+ \sa mapMode(), QtVideo::MapMode
*/
bool QVideoFrame::isWritable() const
{
- return d && d->buffer && (d->buffer->mapMode() & QVideoFrame::WriteOnly);
+ return d && (d->mapMode & QtVideo::MapMode::WriteOnly) != QtVideo::MapMode::NotMapped;
}
/*!
Identifies if the mapped contents of a video frame were read from the frame when it was mapped.
- This is a convenience function which checks if the \l {QVideoFrame::MapMode}{MapMode}
- contains the QVideoFrame::WriteOnly flag.
+ This is a convenience function which checks if the \l {QtVideo::MapMode}{MapMode}
+ contains the QtVideo::MapMode::WriteOnly flag.
Returns true if the contents of the mapped memory were read from the video frame, and false
otherwise.
- \sa mapMode(), QVideoFrame::MapMode
+ \sa mapMode(), QtVideo::MapMode
*/
bool QVideoFrame::isReadable() const
{
- return d && d->buffer && (d->buffer->mapMode() & QVideoFrame::ReadOnly);
+ return d && (d->mapMode & QtVideo::MapMode::ReadOnly) != QtVideo::MapMode::NotMapped;
}
/*!
Returns the mode a video frame was mapped to system memory in.
- \sa map(), QVideoFrame::MapMode
+ \sa map(), QtVideo::MapMode
*/
QVideoFrame::MapMode QVideoFrame::mapMode() const
{
- return (d && d->buffer) ? d->buffer->mapMode() : QVideoFrame::NotMapped;
+ return static_cast<QVideoFrame::MapMode>(d ? d->mapMode : QtVideo::MapMode::NotMapped);
}
/*!
@@ -336,9 +401,9 @@ QVideoFrame::MapMode QVideoFrame::mapMode() const
copying the contents around, so avoid mapping and unmapping unless required.
The map \a mode indicates whether the contents of the mapped memory should be read from and/or
- written to the frame. If the map mode includes the \c QVideoFrame::ReadOnly flag the
+ written to the frame. If the map mode includes the \c QtVideo::MapMode::ReadOnly flag the
mapped memory will be populated with the content of the video frame when initially mapped. If the map
- mode includes the \c QVideoFrame::WriteOnly flag the content of the possibly modified
+ mode includes the \c QtVideo::MapMode::WriteOnly flag the content of the possibly modified
mapped memory will be written back to the frame when unmapped.
While mapped the contents of a video frame can be accessed directly through the pointer returned
@@ -358,20 +423,18 @@ QVideoFrame::MapMode QVideoFrame::mapMode() const
\sa unmap(), mapMode(), bits()
*/
-bool QVideoFrame::map(QVideoFrame::MapMode mode)
+bool QVideoFrame::map(QtVideo::MapMode mode)
{
-
- if (!d || !d->buffer)
+ if (!d || !d->videoBuffer)
return false;
QMutexLocker lock(&d->mapMutex);
- if (mode == QVideoFrame::NotMapped)
+ if (mode == QtVideo::MapMode::NotMapped)
return false;
if (d->mappedCount > 0) {
//it's allowed to map the video frame multiple times in read only mode
- if (d->buffer->mapMode() == QVideoFrame::ReadOnly
- && mode == QVideoFrame::ReadOnly) {
+ if (d->mapMode == QtVideo::MapMode::ReadOnly && mode == QtVideo::MapMode::ReadOnly) {
d->mappedCount++;
return true;
}
@@ -381,14 +444,16 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
Q_ASSERT(d->mapData.data[0] == nullptr);
Q_ASSERT(d->mapData.bytesPerLine[0] == 0);
- Q_ASSERT(d->mapData.nPlanes == 0);
- Q_ASSERT(d->mapData.size[0] == 0);
+ Q_ASSERT(d->mapData.planeCount == 0);
+ Q_ASSERT(d->mapData.dataSize[0] == 0);
- d->mapData = d->buffer->map(mode);
- if (d->mapData.nPlanes == 0)
+ d->mapData = d->videoBuffer->map(mode);
+ if (d->mapData.planeCount == 0)
return false;
- if (d->mapData.nPlanes == 1) {
+ d->mapMode = mode;
+
+ if (d->mapData.planeCount == 1) {
auto pixelFmt = d->format.pixelFormat();
// If the plane count is 1 derive the additional planes for planar formats.
switch (pixelFmt) {
@@ -426,16 +491,16 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
const int height = this->height();
const int yStride = d->mapData.bytesPerLine[0];
const int uvHeight = pixelFmt == QVideoFrameFormat::Format_YUV422P ? height : height / 2;
- const int uvStride = (d->mapData.size[0] - (yStride * height)) / uvHeight / 2;
+ const int uvStride = (d->mapData.dataSize[0] - (yStride * height)) / uvHeight / 2;
// Three planes, the second and third vertically (and horizontally for other than Format_YUV422P formats) subsampled.
- d->mapData.nPlanes = 3;
+ d->mapData.planeCount = 3;
d->mapData.bytesPerLine[2] = d->mapData.bytesPerLine[1] = uvStride;
- d->mapData.size[0] = yStride * height;
- d->mapData.size[1] = uvStride * uvHeight;
- d->mapData.size[2] = uvStride * uvHeight;
- d->mapData.data[1] = d->mapData.data[0] + d->mapData.size[0];
- d->mapData.data[2] = d->mapData.data[1] + d->mapData.size[1];
+ d->mapData.dataSize[0] = yStride * height;
+ d->mapData.dataSize[1] = uvStride * uvHeight;
+ d->mapData.dataSize[2] = uvStride * uvHeight;
+ d->mapData.data[1] = d->mapData.data[0] + d->mapData.dataSize[0];
+ d->mapData.data[2] = d->mapData.data[1] + d->mapData.dataSize[1];
break;
}
case QVideoFrameFormat::Format_NV12:
@@ -445,25 +510,25 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
case QVideoFrameFormat::Format_P010:
case QVideoFrameFormat::Format_P016: {
// Semi planar, Full resolution Y plane with interleaved subsampled U and V planes.
- d->mapData.nPlanes = 2;
+ d->mapData.planeCount = 2;
d->mapData.bytesPerLine[1] = d->mapData.bytesPerLine[0];
- int size = d->mapData.size[0];
- d->mapData.size[0] = (d->mapData.bytesPerLine[0] * height());
- d->mapData.size[1] = size - d->mapData.size[0];
- d->mapData.data[1] = d->mapData.data[0] + d->mapData.size[0];
+ int size = d->mapData.dataSize[0];
+ d->mapData.dataSize[0] = (d->mapData.bytesPerLine[0] * height());
+ d->mapData.dataSize[1] = size - d->mapData.dataSize[0];
+ d->mapData.data[1] = d->mapData.data[0] + d->mapData.dataSize[0];
break;
}
case QVideoFrameFormat::Format_IMC1:
case QVideoFrameFormat::Format_IMC3: {
// Three planes, the second and third vertically and horizontally subsumpled,
// but with lines padded to the width of the first plane.
- d->mapData.nPlanes = 3;
+ d->mapData.planeCount = 3;
d->mapData.bytesPerLine[2] = d->mapData.bytesPerLine[1] = d->mapData.bytesPerLine[0];
- d->mapData.size[0] = (d->mapData.bytesPerLine[0] * height());
- d->mapData.size[1] = (d->mapData.bytesPerLine[0] * height() / 2);
- d->mapData.size[2] = (d->mapData.bytesPerLine[0] * height() / 2);
- d->mapData.data[1] = d->mapData.data[0] + d->mapData.size[0];
- d->mapData.data[2] = d->mapData.data[1] + d->mapData.size[1];
+ d->mapData.dataSize[0] = (d->mapData.bytesPerLine[0] * height());
+ d->mapData.dataSize[1] = (d->mapData.bytesPerLine[0] * height() / 2);
+ d->mapData.dataSize[2] = (d->mapData.bytesPerLine[0] * height() / 2);
+ d->mapData.data[1] = d->mapData.data[0] + d->mapData.dataSize[0];
+ d->mapData.data[2] = d->mapData.data[1] + d->mapData.dataSize[1];
break;
}
}
@@ -474,7 +539,7 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
// unlock mapMutex to avoid potential deadlock imageMutex <--> mapMutex
lock.unlock();
- if ((mode & QVideoFrame::WriteOnly) != 0) {
+ if ((mode & QtVideo::MapMode::WriteOnly) != QtVideo::MapMode::NotMapped) {
QMutexLocker lock(&d->imageMutex);
d->image = {};
}
@@ -482,10 +547,73 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
return true;
}
+#if QT_DEPRECATED_SINCE(6, 8)
+
+/*!
+ \deprecated [6.8] Use \c QtVideo::MapMode instead. The values of this enum
+ are consistent with values of \c QtVideo::MapMode.
+ \enum QVideoFrame::MapMode
+
+ Enumerates how a video buffer's data is mapped to system memory.
+
+ \value NotMapped
+ The video buffer is not mapped to memory.
+ \value ReadOnly
+ The mapped memory is populated with data from the video buffer when mapped,
+ but the content of the mapped memory may be discarded when unmapped.
+ \value WriteOnly
+ The mapped memory is uninitialized when mapped, but the possibly modified
+ content will be used to populate the video buffer when unmapped.
+ \value ReadWrite
+ The mapped memory is populated with data from the video
+ buffer, and the video buffer is repopulated with the content of the mapped
+ memory when it is unmapped.
+
+ \sa mapMode(), map()
+*/
+
+/*!
+ \deprecated [6.8] Use \c QVideoFrame::map(Qt::Video::MapMode) instead.
+ Maps the contents of a video frame to system (CPU addressable) memory.
+
+ In some cases the video frame data might be stored in video memory or otherwise inaccessible
+ memory, so it is necessary to map a frame before accessing the pixel data. This may involve
+ copying the contents around, so avoid mapping and unmapping unless required.
+
+ The map \a mode indicates whether the contents of the mapped memory should be read from and/or
+ written to the frame. If the map mode includes the \c QVideoFrame::ReadOnly flag the
+ mapped memory will be populated with the content of the video frame when initially mapped. If the map
+ mode includes the \c QVideoFrame::WriteOnly flag the content of the possibly modified
+ mapped memory will be written back to the frame when unmapped.
+
+ While mapped the contents of a video frame can be accessed directly through the pointer returned
+ by the bits() function.
+
+ When access to the data is no longer needed, be sure to call the unmap() function to release the
+ mapped memory and possibly update the video frame contents.
+
+ If the video frame has been mapped in read only mode, it is permissible to map it
+ multiple times in read only mode (and unmap it a corresponding number of times). In all
+ other cases it is necessary to unmap the frame first before mapping a second time.
+
+ \note Writing to memory that is mapped as read-only is undefined, and may result in changes
+ to shared data or crashes.
+
+ Returns true if the frame was mapped to memory in the given \a mode and false otherwise.
+
+ \sa unmap(), mapMode(), bits()
+*/
+bool QVideoFrame::map(QVideoFrame::MapMode mode)
+{
+ return map(static_cast<QtVideo::MapMode>(mode));
+}
+
+#endif
+
/*!
Releases the memory mapped by the map() function.
- If the \l {QVideoFrame::MapMode}{MapMode} included the QVideoFrame::WriteOnly
+ If the \l {QtVideo::MapMode}{MapMode} included the QtVideo::MapMode::WriteOnly
flag this will persist the current content of the mapped memory to the video frame.
unmap() should not be called if map() function failed.
@@ -494,7 +622,7 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
*/
void QVideoFrame::unmap()
{
- if (!d || !d->buffer)
+ if (!d || !d->videoBuffer)
return;
QMutexLocker lock(&d->mapMutex);
@@ -508,7 +636,8 @@ void QVideoFrame::unmap()
if (d->mappedCount == 0) {
d->mapData = {};
- d->buffer->unmap();
+ d->mapMode = QtVideo::MapMode::NotMapped;
+ d->videoBuffer->unmap();
}
}
@@ -525,7 +654,7 @@ int QVideoFrame::bytesPerLine(int plane) const
{
if (!d)
return 0;
- return plane >= 0 && plane < d->mapData.nPlanes ? d->mapData.bytesPerLine[plane] : 0;
+ return plane >= 0 && plane < d->mapData.planeCount ? d->mapData.bytesPerLine[plane] : 0;
}
/*!
@@ -544,7 +673,7 @@ uchar *QVideoFrame::bits(int plane)
{
if (!d)
return nullptr;
- return plane >= 0 && plane < d->mapData.nPlanes ? d->mapData.data[plane] : nullptr;
+ return plane >= 0 && plane < d->mapData.planeCount ? d->mapData.data[plane] : nullptr;
}
/*!
@@ -562,7 +691,7 @@ const uchar *QVideoFrame::bits(int plane) const
{
if (!d)
return nullptr;
- return plane >= 0 && plane < d->mapData.nPlanes ? d->mapData.data[plane] : nullptr;
+ return plane >= 0 && plane < d->mapData.planeCount ? d->mapData.data[plane] : nullptr;
}
/*!
@@ -576,7 +705,7 @@ int QVideoFrame::mappedBytes(int plane) const
{
if (!d)
return 0;
- return plane >= 0 && plane < d->mapData.nPlanes ? d->mapData.size[plane] : 0;
+ return plane >= 0 && plane < d->mapData.planeCount ? d->mapData.dataSize[plane] : 0;
}
/*!
@@ -682,7 +811,7 @@ void QVideoFrame::setEndTime(qint64 time)
void QVideoFrame::setRotation(QtVideo::Rotation angle)
{
if (d)
- d->rotation = angle;
+ d->format.setRotation(angle);
}
/*!
@@ -690,16 +819,17 @@ void QVideoFrame::setRotation(QtVideo::Rotation angle)
*/
QtVideo::Rotation QVideoFrame::rotation() const
{
- return QtVideo::Rotation(d ? d->rotation : QtVideo::Rotation::None);
+ return d ? d->format.rotation() : QtVideo::Rotation::None;
}
/*!
- Sets the \a mirrored flag for the frame.
+ Sets the \a mirrored flag for the frame and
+ sets the flag to the underlying \l surfaceFormat.
*/
void QVideoFrame::setMirrored(bool mirrored)
{
if (d)
- d->mirrored = mirrored;
+ d->format.setMirrored(mirrored);
}
/*!
@@ -707,7 +837,24 @@ void QVideoFrame::setMirrored(bool mirrored)
*/
bool QVideoFrame::mirrored() const
{
- return d && d->mirrored;
+ return d && d->format.isMirrored();
+}
+
+/*!
+ Sets the frame \a rate of a video stream in frames per second.
+*/
+void QVideoFrame::setStreamFrameRate(qreal rate)
+{
+ if (d)
+ d->format.setStreamFrameRate(rate);
+}
+
+/*!
+ Returns the frame rate of a video stream in frames per second.
+*/
+qreal QVideoFrame::streamFrameRate() const
+{
+ return d ? d->format.streamFrameRate() : 0.;
}
/*!
@@ -791,7 +938,7 @@ void QVideoFrame::paint(QPainter *painter, const QRectF &rect, const PaintOption
}
}
- if (map(QVideoFrame::ReadOnly)) {
+ if (map(QtVideo::MapMode::ReadOnly)) {
const QTransform oldTransform = painter->transform();
QTransform transform = oldTransform;
transform.translate(targetRect.center().x() - size.width()/2,
diff --git a/src/multimedia/video/qvideoframe.h b/src/multimedia/video/qvideoframe.h
index a306162e8..c026972f8 100644
--- a/src/multimedia/video/qvideoframe.h
+++ b/src/multimedia/video/qvideoframe.h
@@ -35,10 +35,14 @@ public:
enum MapMode
{
- NotMapped = 0x00,
- ReadOnly = 0x01,
- WriteOnly = 0x02,
- ReadWrite = ReadOnly | WriteOnly
+ NotMapped Q_DECL_ENUMERATOR_DEPRECATED_X("Use QtVideo::MapMode::NotMapped instead")
+ = static_cast<int>(QtVideo::MapMode::NotMapped),
+ ReadOnly Q_DECL_ENUMERATOR_DEPRECATED_X("Use QtVideo::MapMode::ReadOnly instead")
+ = static_cast<int>(QtVideo::MapMode::ReadOnly),
+ WriteOnly Q_DECL_ENUMERATOR_DEPRECATED_X("Use QtVideo::MapMode::WriteOnly instead")
+ = static_cast<int>(QtVideo::MapMode::WriteOnly),
+ ReadWrite Q_DECL_ENUMERATOR_DEPRECATED_X("Use QtVideo::MapMode::ReadWrite instead")
+ = static_cast<int>(QtVideo::MapMode::ReadWrite)
};
#if QT_DEPRECATED_SINCE(6, 7)
@@ -54,6 +58,7 @@ public:
QVideoFrame();
QVideoFrame(const QVideoFrameFormat &format);
explicit QVideoFrame(const QImage &image);
+ explicit QVideoFrame(std::unique_ptr<QAbstractVideoBuffer> videoBuffer);
QVideoFrame(const QVideoFrame &other);
~QVideoFrame();
@@ -84,7 +89,11 @@ public:
QVideoFrame::MapMode mapMode() const;
+ bool map(QtVideo::MapMode mode);
+#if QT_DEPRECATED_SINCE(6, 8)
+ QT_DEPRECATED_VERSION_X_6_7("Use QVideoFrame::map(QtVideo::MapMode) instead")
bool map(QVideoFrame::MapMode mode);
+#endif
void unmap();
int bytesPerLine(int plane) const;
@@ -114,6 +123,9 @@ public:
void setMirrored(bool);
bool mirrored() const;
+ void setStreamFrameRate(qreal rate);
+ qreal streamFrameRate() const;
+
QImage toImage() const;
struct PaintOptions {
@@ -131,9 +143,13 @@ public:
void paint(QPainter *painter, const QRectF &rect, const PaintOptions &options);
+#if QT_DEPRECATED_SINCE(6, 8)
+ QT_DEPRECATED_VERSION_X_6_8("The constructor is internal and deprecated")
QVideoFrame(QAbstractVideoBuffer *buffer, const QVideoFrameFormat &format);
+ QT_DEPRECATED_VERSION_X_6_8("The method is internal and deprecated")
QAbstractVideoBuffer *videoBuffer() const;
+#endif
private:
friend class QVideoFramePrivate;
QExplicitlySharedDataPointer<QVideoFramePrivate> d;
diff --git a/src/multimedia/video/qvideoframe_p.h b/src/multimedia/video/qvideoframe_p.h
index 23457e55c..865e26463 100644
--- a/src/multimedia/video/qvideoframe_p.h
+++ b/src/multimedia/video/qvideoframe_p.h
@@ -16,7 +16,7 @@
//
#include "qvideoframe.h"
-#include "qabstractvideobuffer_p.h"
+#include "qhwvideobuffer_p.h"
#include <qmutex.h>
@@ -26,14 +26,38 @@ class QVideoFramePrivate : public QSharedData
{
public:
QVideoFramePrivate() = default;
- QVideoFramePrivate(const QVideoFrameFormat &format) : format(format) { }
- QVideoFramePrivate(QVideoFrameFormat format, std::unique_ptr<QAbstractVideoBuffer> buffer)
- : format{ std::move(format) }, buffer{ std::move(buffer) }
+
+ template <typename Buffer>
+ static QVideoFrame createFrame(std::unique_ptr<Buffer> buffer, QVideoFrameFormat format)
+ {
+ QVideoFrame result;
+ result.d.reset(new QVideoFramePrivate(std::move(format), std::move(buffer)));
+ return result;
+ }
+
+ template <typename Buffer = QAbstractVideoBuffer>
+ QVideoFramePrivate(QVideoFrameFormat format, std::unique_ptr<Buffer> buffer = nullptr)
+ : format{ std::move(format) }, videoBuffer{ std::move(buffer) }
{
+ if constexpr (std::is_base_of_v<QHwVideoBuffer, Buffer>)
+ hwVideoBuffer = static_cast<QHwVideoBuffer *>(videoBuffer.get());
+ else if constexpr (std::is_same_v<QAbstractVideoBuffer, Buffer>)
+ hwVideoBuffer = dynamic_cast<QHwVideoBuffer *>(videoBuffer.get());
+ // else hwVideoBuffer == nullptr
}
static QVideoFramePrivate *handle(QVideoFrame &frame) { return frame.d.get(); };
+ static QHwVideoBuffer *hwBuffer(const QVideoFrame &frame)
+ {
+ return frame.d ? frame.d->hwVideoBuffer : nullptr;
+ };
+
+ static QAbstractVideoBuffer *buffer(const QVideoFrame &frame)
+ {
+ return frame.d ? frame.d->videoBuffer.get() : nullptr;
+ };
+
QVideoFrame adoptThisByVideoFrame()
{
QVideoFrame frame;
@@ -44,13 +68,13 @@ public:
qint64 startTime = -1;
qint64 endTime = -1;
QAbstractVideoBuffer::MapData mapData;
+ QtVideo::MapMode mapMode = QtVideo::MapMode::NotMapped;
QVideoFrameFormat format;
- std::unique_ptr<QAbstractVideoBuffer> buffer;
+ std::unique_ptr<QAbstractVideoBuffer> videoBuffer;
+ QHwVideoBuffer *hwVideoBuffer = nullptr;
int mappedCount = 0;
QMutex mapMutex;
QString subtitleText;
- QtVideo::Rotation rotation = QtVideo::Rotation::None;
- bool mirrored = false;
QImage image;
QMutex imageMutex;
diff --git a/src/multimedia/video/qvideoframeconversionhelper.cpp b/src/multimedia/video/qvideoframeconversionhelper.cpp
index 1b570b74f..d3f2b0403 100644
--- a/src/multimedia/video/qvideoframeconversionhelper.cpp
+++ b/src/multimedia/video/qvideoframeconversionhelper.cpp
@@ -34,31 +34,30 @@ static inline void planarYUV420_to_ARGB32(const uchar *y, int yStride,
int width, int height)
{
height &= ~1;
- quint32 *rgb0 = rgb;
- quint32 *rgb1 = rgb + width;
- for (int j = 0; j < height; j += 2) {
+ for (int j = 0; j + 1 < height; j += 2) {
const uchar *lineY0 = y;
const uchar *lineY1 = y + yStride;
const uchar *lineU = u;
const uchar *lineV = v;
- for (int i = 0; i < width; i += 2) {
+ quint32 *rgb0 = rgb;
+ quint32 *rgb1 = rgb + width;
+ for (int i = 0; i + 1 < width; i += 2) {
EXPAND_UV(*lineU, *lineV);
lineU += uvPixelStride;
lineV += uvPixelStride;
- *rgb0++ = qYUVToARGB32(*lineY0++, rv, guv, bu);
- *rgb0++ = qYUVToARGB32(*lineY0++, rv, guv, bu);
- *rgb1++ = qYUVToARGB32(*lineY1++, rv, guv, bu);
- *rgb1++ = qYUVToARGB32(*lineY1++, rv, guv, bu);
+ rgb0[i] = qYUVToARGB32(*lineY0++, rv, guv, bu);
+ rgb0[i + 1] = qYUVToARGB32(*lineY0++, rv, guv, bu);
+ rgb1[i] = qYUVToARGB32(*lineY1++, rv, guv, bu);
+ rgb1[i + 1] = qYUVToARGB32(*lineY1++, rv, guv, bu);
}
y += yStride << 1; // stride * 2
u += uStride;
v += vStride;
- rgb0 += width;
- rgb1 += width;
+ rgb += width << 1; // width * 2
}
}
@@ -69,31 +68,27 @@ static inline void planarYUV422_to_ARGB32(const uchar *y, int yStride,
quint32 *rgb,
int width, int height)
{
- quint32 *rgb0 = rgb;
-
for (int j = 0; j < height; ++j) {
const uchar *lineY0 = y;
const uchar *lineU = u;
const uchar *lineV = v;
- for (int i = 0; i < width; i += 2) {
+ for (int i = 0; i + 1 < width; i += 2) {
EXPAND_UV(*lineU, *lineV);
lineU += uvPixelStride;
lineV += uvPixelStride;
- *rgb0++ = qYUVToARGB32(*lineY0++, rv, guv, bu);
- *rgb0++ = qYUVToARGB32(*lineY0++, rv, guv, bu);
+ rgb[i] = qYUVToARGB32(*lineY0++, rv, guv, bu);
+ rgb[i+1] = qYUVToARGB32(*lineY0++, rv, guv, bu);
}
y += yStride; // stride * 2
u += uStride;
v += vStride;
- rgb0 += width;
+ rgb += width;
}
}
-
-
static void QT_FASTCALL qt_convert_YUV420P_to_ARGB32(const QVideoFrame &frame, uchar *output)
{
FETCH_INFO_TRIPLANAR(frame)
@@ -187,8 +182,7 @@ static void QT_FASTCALL qt_convert_UYVY_to_ARGB32(const QVideoFrame &frame, ucha
for (int i = 0; i < height; ++i) {
const uchar *lineSrc = src;
-
- for (int j = 0; j < width; j += 2) {
+ for (int j = 0; j + 1 < width; j += 2) {
int u = *lineSrc++;
int y0 = *lineSrc++;
int v = *lineSrc++;
@@ -196,11 +190,12 @@ static void QT_FASTCALL qt_convert_UYVY_to_ARGB32(const QVideoFrame &frame, ucha
EXPAND_UV(u, v);
- *rgb++ = qYUVToARGB32(y0, rv, guv, bu);
- *rgb++ = qYUVToARGB32(y1, rv, guv, bu);
+ rgb[j] = qYUVToARGB32(y0, rv, guv, bu);
+ rgb[j+1] = qYUVToARGB32(y1, rv, guv, bu);
}
src += stride;
+ rgb += width;
}
}
@@ -213,8 +208,7 @@ static void QT_FASTCALL qt_convert_YUYV_to_ARGB32(const QVideoFrame &frame, ucha
for (int i = 0; i < height; ++i) {
const uchar *lineSrc = src;
-
- for (int j = 0; j < width; j += 2) {
+ for (int j = 0; j + 1 < width; j += 2) {
int y0 = *lineSrc++;
int u = *lineSrc++;
int y1 = *lineSrc++;
@@ -222,11 +216,12 @@ static void QT_FASTCALL qt_convert_YUYV_to_ARGB32(const QVideoFrame &frame, ucha
EXPAND_UV(u, v);
- *rgb++ = qYUVToARGB32(y0, rv, guv, bu);
- *rgb++ = qYUVToARGB32(y1, rv, guv, bu);
+ rgb[j] = qYUVToARGB32(y0, rv, guv, bu);
+ rgb[j+1] = qYUVToARGB32(y1, rv, guv, bu);
}
src += stride;
+ rgb += width;
}
}
@@ -376,23 +371,24 @@ static void QT_FASTCALL qt_convert_premultiplied_to_ARGB32(const QVideoFrame &fr
}
static inline void planarYUV420_16bit_to_ARGB32(const uchar *y, int yStride,
- const uchar *u, int uStride,
- const uchar *v, int vStride,
- int uvPixelStride,
- quint32 *rgb,
- int width, int height)
+ const uchar *u, int uStride,
+ const uchar *v, int vStride,
+ int uvPixelStride,
+ quint32 *rgb,
+ int width, int height)
{
height &= ~1;
- quint32 *rgb0 = rgb;
- quint32 *rgb1 = rgb + width;
- for (int j = 0; j < height; j += 2) {
+ for (int j = 0; j + 1 < height; j += 2) {
const uchar *lineY0 = y;
const uchar *lineY1 = y + yStride;
const uchar *lineU = u;
const uchar *lineV = v;
- for (int i = 0; i < width; i += 2) {
+ quint32 *rgb0 = rgb;
+ quint32 *rgb1 = rgb + width;
+
+ for (int i = 0; i + 1 < width; i += 2) {
EXPAND_UV(*lineU, *lineV);
lineU += uvPixelStride;
lineV += uvPixelStride;
@@ -410,11 +406,11 @@ static inline void planarYUV420_16bit_to_ARGB32(const uchar *y, int yStride,
y += yStride << 1; // stride * 2
u += uStride;
v += vStride;
- rgb0 += width;
- rgb1 += width;
+ rgb += width * 2;
}
}
+
static void QT_FASTCALL qt_convert_P016_to_ARGB32(const QVideoFrame &frame, uchar *output)
{
FETCH_INFO_BIPLANAR(frame)
diff --git a/src/multimedia/video/qvideoframeconverter.cpp b/src/multimedia/video/qvideoframeconverter.cpp
index 82e0a0af5..d406aa342 100644
--- a/src/multimedia/video/qvideoframeconverter.cpp
+++ b/src/multimedia/video/qvideoframeconverter.cpp
@@ -6,6 +6,7 @@
#include "qvideoframeformat.h"
#include "qvideoframe_p.h"
#include "qmultimediautils_p.h"
+#include "qabstractvideobuffer.h"
#include <QtCore/qcoreapplication.h>
#include <QtCore/qsize.h>
@@ -16,7 +17,6 @@
#include <QtGui/qoffscreensurface.h>
#include <qpa/qplatformintegration.h>
#include <private/qvideotexturehelper_p.h>
-#include <private/qabstractvideobuffer_p.h>
#include <private/qguiapplication_p.h>
#include <rhi/qrhi.h>
@@ -254,7 +254,7 @@ static bool updateTextures(QRhi *rhi,
static QImage convertJPEG(const QVideoFrame &frame, QtVideo::Rotation rotation, bool mirrorX, bool mirrorY)
{
QVideoFrame varFrame = frame;
- if (!varFrame.map(QVideoFrame::ReadOnly)) {
+ if (!varFrame.map(QtVideo::MapMode::ReadOnly)) {
qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
return {};
}
@@ -273,7 +273,7 @@ static QImage convertCPU(const QVideoFrame &frame, QtVideo::Rotation rotation, b
return {};
} else {
QVideoFrame varFrame = frame;
- if (!varFrame.map(QVideoFrame::ReadOnly)) {
+ if (!varFrame.map(QtVideo::MapMode::ReadOnly)) {
qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
return {};
}
@@ -286,7 +286,8 @@ static QImage convertCPU(const QVideoFrame &frame, QtVideo::Rotation rotation, b
}
}
-QImage qImageFromVideoFrame(const QVideoFrame &frame, QtVideo::Rotation rotation, bool mirrorX, bool mirrorY)
+QImage qImageFromVideoFrame(const QVideoFrame &frame, QtVideo::Rotation rotation, bool mirrorX,
+ bool mirrorY, bool forceCpu)
{
#ifdef Q_OS_DARWIN
QMacAutoReleasePool releasePool;
@@ -310,10 +311,13 @@ QImage qImageFromVideoFrame(const QVideoFrame &frame, QtVideo::Rotation rotation
if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg)
return convertJPEG(frame, rotation, mirrorX, mirrorY);
+ if (forceCpu) // For test purposes
+ return convertCPU(frame, rotation, mirrorX, mirrorY);
+
QRhi *rhi = nullptr;
- if (frame.videoBuffer())
- rhi = frame.videoBuffer()->rhi();
+ if (QHwVideoBuffer *buffer = QVideoFramePrivate::hwBuffer(frame))
+ rhi = buffer->rhi();
if (!rhi || rhi->thread() != QThread::currentThread())
rhi = initializeRHI(rhi);
@@ -428,7 +432,7 @@ QImage videoFramePlaneAsImage(QVideoFrame &frame, int plane, QImage::Format targ
if (plane >= frame.planeCount())
return {};
- if (!frame.map(QVideoFrame::ReadOnly)) {
+ if (!frame.map(QtVideo::MapMode::ReadOnly)) {
qWarning() << "Cannot map a video frame in ReadOnly mode!";
return {};
}
diff --git a/src/multimedia/video/qvideoframeconverter_p.h b/src/multimedia/video/qvideoframeconverter_p.h
index d22491f66..ad6cea9e4 100644
--- a/src/multimedia/video/qvideoframeconverter_p.h
+++ b/src/multimedia/video/qvideoframeconverter_p.h
@@ -19,7 +19,9 @@
QT_BEGIN_NAMESPACE
-Q_MULTIMEDIA_EXPORT QImage qImageFromVideoFrame(const QVideoFrame &frame, QtVideo::Rotation rotation = QtVideo::Rotation::None, bool mirrorX = false, bool mirrorY = false);
+Q_MULTIMEDIA_EXPORT QImage
+qImageFromVideoFrame(const QVideoFrame &frame, QtVideo::Rotation rotation = QtVideo::Rotation::None,
+ bool mirrorX = false, bool mirrorY = false, bool forceCpu = false);
/**
* @brief Maps the video frame and returns an image having a shared ownership for the video frame
diff --git a/src/multimedia/video/qvideoframeformat.cpp b/src/multimedia/video/qvideoframeformat.cpp
index b2c9dc5f1..b3177234f 100644
--- a/src/multimedia/video/qvideoframeformat.cpp
+++ b/src/multimedia/video/qvideoframeformat.cpp
@@ -39,7 +39,8 @@ public:
&& viewport == other.viewport
&& frameRatesEqual(frameRate, other.frameRate)
&& colorSpace == other.colorSpace
- && mirrored == other.mirrored)
+ && mirrored == other.mirrored
+ && rotation == other.rotation)
return true;
return false;
@@ -60,6 +61,7 @@ public:
float frameRate = 0.0;
float maxLuminance = -1.;
bool mirrored = false;
+ QtVideo::Rotation rotation = QtVideo::Rotation::None;
};
QT_DEFINE_QESDP_SPECIALIZATION_DTOR(QVideoFrameFormatPrivate);
@@ -538,12 +540,13 @@ void QVideoFrameFormat::setScanLineDirection(Direction direction)
d->scanLineDirection = direction;
}
+#if QT_DEPRECATED_SINCE(6, 8)
/*!
Returns the frame rate of a video stream in frames per second.
*/
qreal QVideoFrameFormat::frameRate() const
{
- return d->frameRate;
+ return streamFrameRate();
}
/*!
@@ -551,6 +554,23 @@ qreal QVideoFrameFormat::frameRate() const
*/
void QVideoFrameFormat::setFrameRate(qreal rate)
{
+ setStreamFrameRate(rate);
+}
+#endif
+
+/*!
+ Returns the frame rate of a video stream in frames per second.
+*/
+qreal QVideoFrameFormat::streamFrameRate() const
+{
+ return d->frameRate;
+}
+
+/*!
+ Sets the frame \a rate of a video stream in frames per second.
+*/
+void QVideoFrameFormat::setStreamFrameRate(qreal rate)
+{
detach();
d->frameRate = rate;
}
@@ -665,6 +685,23 @@ void QVideoFrameFormat::setMirrored(bool mirrored)
}
/*!
+ Returns the rotation angle the matching video frame should be rotated clockwise before displaying.
+ */
+QtVideo::Rotation QVideoFrameFormat::rotation() const
+{
+ return d->rotation;
+}
+
+/*!
+ Sets the \a rotation angle the matching video frame should be rotated clockwise before displaying.
+ */
+void QVideoFrameFormat::setRotation(QtVideo::Rotation rotation)
+{
+ detach();
+ d->rotation = rotation;
+}
+
+/*!
\internal
*/
QString QVideoFrameFormat::vertexShaderFileName() const
@@ -984,7 +1021,7 @@ QDebug operator<<(QDebug dbg, const QVideoFrameFormat &f)
<< "\n frame size=" << f.frameSize()
<< "\n viewport=" << f.viewport()
<< "\n colorSpace=" << f.colorSpace()
- << "\n frameRate=" << f.frameRate()
+ << "\n frameRate=" << f.streamFrameRate()
<< "\n mirrored=" << f.isMirrored();
return dbg;
diff --git a/src/multimedia/video/qvideoframeformat.h b/src/multimedia/video/qvideoframeformat.h
index 5fb6b3701..18dc9952d 100644
--- a/src/multimedia/video/qvideoframeformat.h
+++ b/src/multimedia/video/qvideoframeformat.h
@@ -5,6 +5,7 @@
#define QVIDEOSURFACEFORMAT_H
#include <QtMultimedia/qtmultimediaglobal.h>
+#include <QtMultimedia/qtvideo.h>
#include <QtCore/qlist.h>
#include <QtCore/qmetatype.h>
@@ -153,8 +154,15 @@ public:
Direction scanLineDirection() const;
void setScanLineDirection(Direction direction);
+#if QT_DEPRECATED_SINCE(6, 8)
+ QT_DEPRECATED_VERSION_X_6_8("Use streamFrameRate()")
qreal frameRate() const;
+ QT_DEPRECATED_VERSION_X_6_8("Use setStreamFrameRate()")
void setFrameRate(qreal rate);
+#endif
+
+ qreal streamFrameRate() const;
+ void setStreamFrameRate(qreal rate);
#if QT_DEPRECATED_SINCE(6, 4)
QT_DEPRECATED_VERSION_X_6_4("Use colorSpace()")
@@ -175,6 +183,9 @@ public:
bool isMirrored() const;
void setMirrored(bool mirrored);
+ QtVideo::Rotation rotation() const;
+ void setRotation(QtVideo::Rotation rotation);
+
QString vertexShaderFileName() const;
QString fragmentShaderFileName() const;
void updateUniformData(QByteArray *dst, const QVideoFrame &frame, const QMatrix4x4 &transform, float opacity) const;
diff --git a/src/multimedia/video/qvideooutputorientationhandler.cpp b/src/multimedia/video/qvideooutputorientationhandler.cpp
index c34e9e92a..ff91bd7fb 100644
--- a/src/multimedia/video/qvideooutputorientationhandler.cpp
+++ b/src/multimedia/video/qvideooutputorientationhandler.cpp
@@ -18,8 +18,8 @@ QVideoOutputOrientationHandler::QVideoOutputOrientationHandler(QObject *parent)
if (!screen)
return;
- connect(screen, SIGNAL(orientationChanged(Qt::ScreenOrientation)),
- this, SLOT(screenOrientationChanged(Qt::ScreenOrientation)));
+ connect(screen, &QScreen::orientationChanged, this,
+ &QVideoOutputOrientationHandler::screenOrientationChanged);
screenOrientationChanged(screen->orientation());
}
diff --git a/src/multimedia/video/qvideotexturehelper.cpp b/src/multimedia/video/qvideotexturehelper.cpp
index 937ff33cb..093989654 100644
--- a/src/multimedia/video/qvideotexturehelper.cpp
+++ b/src/multimedia/video/qvideotexturehelper.cpp
@@ -1,9 +1,11 @@
// Copyright (C) 2021 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qabstractvideobuffer.h"
+
#include "qvideotexturehelper_p.h"
-#include "qabstractvideobuffer_p.h"
#include "qvideoframeconverter_p.h"
+#include "qvideoframe_p.h"
#include <qpainter.h>
#include <qloggingcategory.h>
@@ -213,7 +215,7 @@ static const TextureDescription descriptions[QVideoFrameFormat::NPixelFormats] =
{ { 1, 1 }, { 1, 1 }, { 1, 1 } }
},
// Format_YUV420P10
- { 3, 1,
+ { 3, 2,
[](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
{ QRhiTexture::R16, QRhiTexture::R16, QRhiTexture::R16 },
{ { 1, 1 }, { 2, 2 }, { 2, 2 } }
@@ -520,7 +522,8 @@ void updateUniformData(QByteArray *dst, const QVideoFrameFormat &format, const Q
break;
case QVideoFrameFormat::Format_SamplerExternalOES:
// get Android specific transform for the externalsampler texture
- cmat = frame.videoBuffer()->externalTextureMatrix();
+ if (auto hwBuffer = QVideoFramePrivate::hwBuffer(frame))
+ cmat = hwBuffer->externalTextureMatrix();
break;
case QVideoFrameFormat::Format_SamplerRect:
{
@@ -629,6 +632,9 @@ static UpdateTextureWithMapResult updateTextureWithMap(const QVideoFrame &frame,
static std::unique_ptr<QRhiTexture> createTextureFromHandle(const QVideoFrame &frame, QRhi *rhi, int plane)
{
+ QHwVideoBuffer *hwBuffer = QVideoFramePrivate::hwBuffer(frame);
+ Q_ASSERT(hwBuffer);
+
QVideoFrameFormat fmt = frame.surfaceFormat();
QVideoFrameFormat::PixelFormat pixelFormat = fmt.pixelFormat();
QSize size = fmt.frameSize();
@@ -650,7 +656,7 @@ static std::unique_ptr<QRhiTexture> createTextureFromHandle(const QVideoFrame &f
#endif
}
- if (quint64 handle = frame.videoBuffer()->textureHandle(rhi, plane); handle) {
+ if (quint64 handle = hwBuffer->textureHandle(rhi, plane); handle) {
std::unique_ptr<QRhiTexture> tex(rhi->newTexture(texDesc.textureFormat[plane], planeSize, 1, textureFlags));
if (tex->createFrom({handle, 0}))
return tex;
@@ -712,7 +718,7 @@ static std::unique_ptr<QVideoFrameTextures> createTexturesFromMemory(QVideoFrame
if (oldArray)
textures = oldArray->takeTextures();
- if (!frame.map(QVideoFrame::ReadOnly)) {
+ if (!frame.map(QtVideo::MapMode::ReadOnly)) {
qWarning() << "Cannot map a video frame in ReadOnly mode!";
return {};
}
@@ -736,15 +742,16 @@ static std::unique_ptr<QVideoFrameTextures> createTexturesFromMemory(QVideoFrame
std::unique_ptr<QVideoFrameTextures> createTextures(QVideoFrame &frame, QRhi *rhi, QRhiResourceUpdateBatch *rub, std::unique_ptr<QVideoFrameTextures> &&oldTextures)
{
- QAbstractVideoBuffer *vf = frame.videoBuffer();
- if (!vf)
+ if (!frame.isValid())
return {};
- if (auto vft = vf->mapTextures(rhi))
- return vft;
+ if (QHwVideoBuffer *hwBuffer = QVideoFramePrivate::hwBuffer(frame)) {
+ if (auto textures = hwBuffer->mapTextures(rhi))
+ return textures;
- if (auto vft = createTexturesFromHandles(frame, rhi))
- return vft;
+ if (auto textures = createTexturesFromHandles(frame, rhi))
+ return textures;
+ }
return createTexturesFromMemory(frame, rhi, rub, oldTextures.get());
}
diff --git a/src/multimedia/video/qvideowindow.cpp b/src/multimedia/video/qvideowindow.cpp
index 9cab23f5f..9b88a86df 100644
--- a/src/multimedia/video/qvideowindow.cpp
+++ b/src/multimedia/video/qvideowindow.cpp
@@ -7,7 +7,9 @@
#include <qpainter.h>
#include <private/qguiapplication_p.h>
#include <private/qmemoryvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
#include <private/qmultimediautils_p.h>
+#include <private/qvideoframe_p.h>
#include <qpa/qplatformintegration.h>
QT_BEGIN_NAMESPACE
@@ -209,8 +211,9 @@ void QVideoWindowPrivate::updateTextures(QRhiResourceUpdateBatch *rub)
// We render a 1x1 black pixel when we don't have a video
if (!m_currentFrame.isValid())
- m_currentFrame = QVideoFrame(new QMemoryVideoBuffer(QByteArray{4, 0}, 4),
- QVideoFrameFormat(QSize(1,1), QVideoFrameFormat::Format_RGBA8888));
+ m_currentFrame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(QByteArray{ 4, 0 }, 4),
+ QVideoFrameFormat(QSize(1, 1), QVideoFrameFormat::Format_RGBA8888));
m_frameTextures = QVideoTextureHelper::createTextures(m_currentFrame, m_rhi.get(), rub, std::move(m_frameTextures));
if (!m_frameTextures)
diff --git a/src/multimediaquick/qquickimagecapture.cpp b/src/multimediaquick/qquickimagecapture.cpp
index 72dfb78a8..b7e56d18d 100644
--- a/src/multimediaquick/qquickimagecapture.cpp
+++ b/src/multimediaquick/qquickimagecapture.cpp
@@ -56,7 +56,7 @@ QT_BEGIN_NAMESPACE
QQuickImageCapture::QQuickImageCapture(QObject *parent)
: QImageCapture(parent)
{
- connect(this, SIGNAL(imageCaptured(int,QImage)), this, SLOT(_q_imageCaptured(int,QImage)));
+ connect(this, &QImageCapture::imageCaptured, this, &QQuickImageCapture::_q_imageCaptured);
}
QQuickImageCapture::~QQuickImageCapture() = default;
diff --git a/src/multimediaquick/qsgvideonode_p.cpp b/src/multimediaquick/qsgvideonode_p.cpp
index 5dc107337..405744507 100644
--- a/src/multimediaquick/qsgvideonode_p.cpp
+++ b/src/multimediaquick/qsgvideonode_p.cpp
@@ -9,7 +9,7 @@
#include <private/qsginternaltextnode_p.h>
#include <private/qquickitem_p.h>
#include <private/qquickvideooutput_p.h>
-#include <private/qabstractvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
QT_BEGIN_NAMESPACE
diff --git a/src/plugins/multimedia/CMakeLists.txt b/src/plugins/multimedia/CMakeLists.txt
index 978710112..5bc39c1f8 100644
--- a/src/plugins/multimedia/CMakeLists.txt
+++ b/src/plugins/multimedia/CMakeLists.txt
@@ -1,24 +1,24 @@
# Copyright (C) 2022 The Qt Company Ltd.
# SPDX-License-Identifier: BSD-3-Clause
-if (QT_FEATURE_ffmpeg)
+if(QT_FEATURE_ffmpeg)
add_subdirectory(ffmpeg)
-endif ()
-if (QT_FEATURE_gstreamer)
+endif()
+if(QT_FEATURE_gstreamer)
add_subdirectory(gstreamer)
-endif ()
-if (ANDROID)
+endif()
+if(ANDROID)
add_subdirectory(android)
-endif ()
-if (WASM)
+endif()
+if(WASM)
add_subdirectory(wasm)
-endif ()
-if (APPLE AND NOT WATCHOS)
+endif()
+if(APPLE AND NOT WATCHOS)
add_subdirectory(darwin)
-endif ()
-if (QT_FEATURE_wmf)
+endif()
+if(QT_FEATURE_wmf)
add_subdirectory(windows)
-endif ()
-if (QT_FEATURE_mmrenderer)
+endif()
+if(QT_FEATURE_mmrenderer)
add_subdirectory(qnx)
-endif ()
+endif()
diff --git a/src/plugins/multimedia/android/common/qandroidvideooutput.cpp b/src/plugins/multimedia/android/common/qandroidvideooutput.cpp
index 5a4eebf51..0724a8359 100644
--- a/src/plugins/multimedia/android/common/qandroidvideooutput.cpp
+++ b/src/plugins/multimedia/android/common/qandroidvideooutput.cpp
@@ -6,9 +6,10 @@
#include <rhi/qrhi.h>
#include <QtGui/private/qopenglextensions_p.h>
-#include <private/qabstractvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
#include <private/qvideoframeconverter_p.h>
#include <private/qplatformvideosink_p.h>
+#include <private/qvideoframe_p.h>
#include <qvideosink.h>
#include <qopenglcontext.h>
#include <qopenglfunctions.h>
@@ -49,27 +50,24 @@ private:
std::shared_ptr<AndroidTextureThread> m_thread;
};
-
-class AndroidTextureVideoBuffer : public QRhiWithThreadGuard, public QAbstractVideoBuffer
+class AndroidTextureVideoBuffer : public QRhiWithThreadGuard, public QHwVideoBuffer
{
public:
- AndroidTextureVideoBuffer(
- std::shared_ptr<QRhi> rhi, std::shared_ptr<AndroidTextureThread> thread,
- std::unique_ptr<QRhiTexture> tex, const QSize &size)
- : QRhiWithThreadGuard(std::move(rhi), std::move(thread))
- , QAbstractVideoBuffer(QVideoFrame::RhiTextureHandle, m_guardRhi.get())
- , m_size(size)
- , m_tex(std::move(tex))
+ AndroidTextureVideoBuffer(std::shared_ptr<QRhi> rhi,
+ std::shared_ptr<AndroidTextureThread> thread,
+ std::unique_ptr<QRhiTexture> tex, const QSize &size)
+ : QRhiWithThreadGuard(std::move(rhi), std::move(thread)),
+ QHwVideoBuffer(QVideoFrame::RhiTextureHandle, m_guardRhi.get()),
+ m_size(size),
+ m_tex(std::move(tex))
{}
- QVideoFrame::MapMode mapMode() const override { return m_mapMode; }
-
- MapData map(QVideoFrame::MapMode mode) override;
+ MapData map(QtVideo::MapMode mode) override;
void unmap() override
{
m_image = {};
- m_mapMode = QVideoFrame::NotMapped;
+ m_mapMode = QtVideo::MapMode::NotMapped;
}
std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *rhi) override
@@ -81,39 +79,39 @@ private:
QSize m_size;
std::unique_ptr<QRhiTexture> m_tex;
QImage m_image;
- QVideoFrame::MapMode m_mapMode = QVideoFrame::NotMapped;
+ QtVideo::MapMode m_mapMode = QtVideo::MapMode::NotMapped;
};
-class ImageFromVideoFrameHelper : public QAbstractVideoBuffer
+class ImageFromVideoFrameHelper : public QHwVideoBuffer
{
public:
ImageFromVideoFrameHelper(AndroidTextureVideoBuffer &atvb)
- : QAbstractVideoBuffer(QVideoFrame::RhiTextureHandle, atvb.rhi())
- , m_atvb(atvb)
+ : QHwVideoBuffer(QVideoFrame::RhiTextureHandle, atvb.rhi()), m_atvb(atvb)
{}
std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *rhi) override
{
return m_atvb.mapTextures(rhi);
}
- QVideoFrame::MapMode mapMode() const override { return QVideoFrame::NotMapped; }
- MapData map(QVideoFrame::MapMode) override { return {}; }
+
+ MapData map(QtVideo::MapMode) override { return {}; }
void unmap() override {}
private:
AndroidTextureVideoBuffer &m_atvb;
};
-QAbstractVideoBuffer::MapData AndroidTextureVideoBuffer::map(QVideoFrame::MapMode mode)
+QAbstractVideoBuffer::MapData AndroidTextureVideoBuffer::map(QtVideo::MapMode mode)
{
QAbstractVideoBuffer::MapData mapData;
- if (m_mapMode == QVideoFrame::NotMapped && mode == QVideoFrame::ReadOnly) {
- m_mapMode = QVideoFrame::ReadOnly;
- m_image = qImageFromVideoFrame(QVideoFrame(new ImageFromVideoFrameHelper(*this),
- QVideoFrameFormat(m_size, QVideoFrameFormat::Format_RGBA8888)));
- mapData.nPlanes = 1;
+ if (m_mapMode == QtVideo::MapMode::NotMapped && mode == QtVideo::MapMode::ReadOnly) {
+ m_mapMode = QtVideo::MapMode::ReadOnly;
+ m_image = qImageFromVideoFrame(QVideoFramePrivate::createFrame(
+ std::make_unique<ImageFromVideoFrameHelper>(*this),
+ QVideoFrameFormat(m_size, QVideoFrameFormat::Format_RGBA8888)));
+ mapData.planeCount = 1;
mapData.bytesPerLine[0] = m_image.bytesPerLine();
- mapData.size[0] = static_cast<int>(m_image.sizeInBytes());
+ mapData.dataSize[0] = static_cast<int>(m_image.sizeInBytes());
mapData.data[0] = m_image.bits();
}
diff --git a/src/plugins/multimedia/android/common/qandroidvideooutput_p.h b/src/plugins/multimedia/android/common/qandroidvideooutput_p.h
index c59a1b76c..7c9be5aee 100644
--- a/src/plugins/multimedia/android/common/qandroidvideooutput_p.h
+++ b/src/plugins/multimedia/android/common/qandroidvideooutput_p.h
@@ -18,7 +18,7 @@
#include <qsize.h>
#include <qmutex.h>
#include <qreadwritelock.h>
-#include <private/qabstractvideobuffer_p.h>
+#include <qabstractvideobuffer.h>
#include <qmatrix4x4.h>
#include <qoffscreensurface.h>
#include <rhi/qrhi.h>
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp b/src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp
index f7dd1c653..7eda1175f 100644
--- a/src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp
@@ -21,6 +21,7 @@
#include <private/qmemoryvideobuffer_p.h>
#include <private/qcameradevice_p.h>
#include <private/qmediastoragelocation_p.h>
+#include <private/qvideoframe_p.h>
#include <QImageWriter>
QT_BEGIN_NAMESPACE
@@ -734,7 +735,9 @@ void QAndroidCameraSession::processCapturedImage(int id, const QByteArray &bytes
void QAndroidCameraSession::processCapturedImageToBuffer(int id, const QByteArray &bytes,
QVideoFrameFormat::PixelFormat format, QSize size, int bytesPerLine)
{
- QVideoFrame frame(new QMemoryVideoBuffer(bytes, bytesPerLine), QVideoFrameFormat(size, format));
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(bytes, bytesPerLine),
+ QVideoFrameFormat(size, format));
emit imageAvailable(id, frame);
}
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp
index ee5af5dfd..3b005e4a5 100644
--- a/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp
@@ -133,7 +133,7 @@ void QAndroidCaptureSession::start(QMediaEncoderSettings &settings, const QUrl &
return;
if (!m_cameraSession && !m_audioInput) {
- emit error(QMediaRecorder::ResourceError, QLatin1String("No devices are set"));
+ updateError(QMediaRecorder::ResourceError, QLatin1String("No devices are set"));
return;
}
@@ -142,13 +142,13 @@ void QAndroidCaptureSession::start(QMediaEncoderSettings &settings, const QUrl &
const bool validCameraSession = m_cameraSession && m_cameraSession->camera();
if (validCameraSession && !qt_androidCheckCameraPermission()) {
- emit error(QMediaRecorder::ResourceError, QLatin1String("Camera permission denied."));
+ updateError(QMediaRecorder::ResourceError, QLatin1String("Camera permission denied."));
setKeepAlive(false);
return;
}
if (m_audioInput && !qt_androidCheckMicrophonePermission()) {
- emit error(QMediaRecorder::ResourceError, QLatin1String("Microphone permission denied."));
+ updateError(QMediaRecorder::ResourceError, QLatin1String("Microphone permission denied."));
setKeepAlive(false);
return;
}
@@ -221,15 +221,15 @@ void QAndroidCaptureSession::start(QMediaEncoderSettings &settings, const QUrl &
}
if (!m_mediaRecorder->prepare()) {
- emit error(QMediaRecorder::FormatError, QLatin1String("Unable to prepare the media recorder."));
+ updateError(QMediaRecorder::FormatError,
+ QLatin1String("Unable to prepare the media recorder."));
restartViewfinder();
return;
}
if (!m_mediaRecorder->start()) {
- emit error(QMediaRecorder::FormatError,
- QMediaRecorderPrivate::msgFailedStartRecording());
+ updateError(QMediaRecorder::FormatError, QMediaRecorderPrivate::msgFailedStartRecording());
restartViewfinder();
return;
@@ -451,7 +451,7 @@ void QAndroidCaptureSession::onError(int what, int extra)
Q_UNUSED(what);
Q_UNUSED(extra);
stop(true);
- emit error(QMediaRecorder::ResourceError, QLatin1String("Unknown error."));
+ updateError(QMediaRecorder::ResourceError, QLatin1String("Unknown error."));
}
void QAndroidCaptureSession::onInfo(int what, int extra)
@@ -460,11 +460,11 @@ void QAndroidCaptureSession::onInfo(int what, int extra)
if (what == 800) {
// MEDIA_RECORDER_INFO_MAX_DURATION_REACHED
stop();
- emit error(QMediaRecorder::OutOfSpaceError, QLatin1String("Maximum duration reached."));
+ updateError(QMediaRecorder::OutOfSpaceError, QLatin1String("Maximum duration reached."));
} else if (what == 801) {
// MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED
stop();
- emit error(QMediaRecorder::OutOfSpaceError, QLatin1String("Maximum file size reached."));
+ updateError(QMediaRecorder::OutOfSpaceError, QLatin1String("Maximum file size reached."));
}
}
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h
index ab91fc3ef..161d47994 100644
--- a/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h
@@ -67,10 +67,10 @@ public:
if (m_mediaEncoder)
m_mediaEncoder->actualLocationChanged(location);
}
- void error(int error, const QString &errorString)
+ void updateError(int error, const QString &errorString)
{
if (m_mediaEncoder)
- m_mediaEncoder->error(QMediaRecorder::Error(error), errorString);
+ m_mediaEncoder->updateError(QMediaRecorder::Error(error), errorString);
}
private Q_SLOTS:
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp b/src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp
index 268434217..cef36d7ad 100644
--- a/src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp
+++ b/src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp
@@ -8,6 +8,8 @@
#include "qandroidmultimediautils_p.h"
#include "qandroidglobal_p.h"
+#include <private/qvideoframe_p.h>
+
#include <qhash.h>
#include <qstringlist.h>
#include <qdebug.h>
@@ -145,9 +147,12 @@ static void notifyNewPreviewFrame(JNIEnv *env, jobject, int id, jbyteArray data,
QByteArray bytes(arrayLength, Qt::Uninitialized);
env->GetByteArrayRegion(data, 0, arrayLength, (jbyte*)bytes.data());
- QVideoFrame frame(new QMemoryVideoBuffer(bytes, bpl),
- QVideoFrameFormat(QSize(width, height),
- qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat(format))));
+ QVideoFrameFormat frameFormat(
+ QSize(width, height),
+ qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat(format)));
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(std::move(bytes), bpl), std::move(frameFormat));
Q_EMIT (*it)->newPreviewFrame(frame);
}
@@ -1730,9 +1735,12 @@ void AndroidCameraPrivate::fetchLastPreviewFrame()
const int format = m_cameraListener.callMethod<jint>("previewFormat");
const int bpl = m_cameraListener.callMethod<jint>("previewBytesPerLine");
- QVideoFrame frame(new QMemoryVideoBuffer(bytes, bpl),
- QVideoFrameFormat(QSize(width, height),
- qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat(format))));
+ QVideoFrameFormat frameFormat(
+ QSize(width, height),
+ qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat(format)));
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(std::move(bytes), bpl), std::move(frameFormat));
emit lastPreviewFrameFetched(frame);
}
diff --git a/src/plugins/multimedia/darwin/avfvideobuffer.mm b/src/plugins/multimedia/darwin/avfvideobuffer.mm
index 434080fc1..57ec89ae7 100644
--- a/src/plugins/multimedia/darwin/avfvideobuffer.mm
+++ b/src/plugins/multimedia/darwin/avfvideobuffer.mm
@@ -16,7 +16,8 @@
QT_USE_NAMESPACE
AVFVideoBuffer::AVFVideoBuffer(AVFVideoSinkInterface *sink, CVImageBufferRef buffer)
- : QAbstractVideoBuffer(sink->rhi() ? QVideoFrame::RhiTextureHandle : QVideoFrame::NoHandle, sink->rhi()),
+ : QHwVideoBuffer(sink->rhi() ? QVideoFrame::RhiTextureHandle : QVideoFrame::NoHandle,
+ sink->rhi()),
sink(sink),
m_buffer(buffer)
{
@@ -43,33 +44,33 @@ AVFVideoBuffer::~AVFVideoBuffer()
CVPixelBufferRelease(m_buffer);
}
-AVFVideoBuffer::MapData AVFVideoBuffer::map(QVideoFrame::MapMode mode)
+AVFVideoBuffer::MapData AVFVideoBuffer::map(QtVideo::MapMode mode)
{
MapData mapData;
- if (m_mode == QVideoFrame::NotMapped) {
- CVPixelBufferLockBaseAddress(m_buffer, mode == QVideoFrame::ReadOnly
+ if (m_mode == QtVideo::MapMode::NotMapped) {
+ CVPixelBufferLockBaseAddress(m_buffer, mode == QtVideo::MapMode::ReadOnly
? kCVPixelBufferLock_ReadOnly
: 0);
m_mode = mode;
}
- mapData.nPlanes = CVPixelBufferGetPlaneCount(m_buffer);
- Q_ASSERT(mapData.nPlanes <= 3);
+ mapData.planeCount = CVPixelBufferGetPlaneCount(m_buffer);
+ Q_ASSERT(mapData.planeCount <= 3);
- if (!mapData.nPlanes) {
+ if (!mapData.planeCount) {
// single plane
mapData.bytesPerLine[0] = CVPixelBufferGetBytesPerRow(m_buffer);
mapData.data[0] = static_cast<uchar*>(CVPixelBufferGetBaseAddress(m_buffer));
- mapData.size[0] = CVPixelBufferGetDataSize(m_buffer);
- mapData.nPlanes = mapData.data[0] ? 1 : 0;
+ mapData.dataSize[0] = CVPixelBufferGetDataSize(m_buffer);
+ mapData.planeCount = mapData.data[0] ? 1 : 0;
return mapData;
}
// For a bi-planar or tri-planar format we have to set the parameters correctly:
- for (int i = 0; i < mapData.nPlanes; ++i) {
+ for (int i = 0; i < mapData.planeCount; ++i) {
mapData.bytesPerLine[i] = CVPixelBufferGetBytesPerRowOfPlane(m_buffer, i);
- mapData.size[i] = mapData.bytesPerLine[i]*CVPixelBufferGetHeightOfPlane(m_buffer, i);
+ mapData.dataSize[i] = mapData.bytesPerLine[i]*CVPixelBufferGetHeightOfPlane(m_buffer, i);
mapData.data[i] = static_cast<uchar*>(CVPixelBufferGetBaseAddressOfPlane(m_buffer, i));
}
@@ -78,11 +79,11 @@ AVFVideoBuffer::MapData AVFVideoBuffer::map(QVideoFrame::MapMode mode)
void AVFVideoBuffer::unmap()
{
- if (m_mode != QVideoFrame::NotMapped) {
- CVPixelBufferUnlockBaseAddress(m_buffer, m_mode == QVideoFrame::ReadOnly
+ if (m_mode != QtVideo::MapMode::NotMapped) {
+ CVPixelBufferUnlockBaseAddress(m_buffer, m_mode == QtVideo::MapMode::ReadOnly
? kCVPixelBufferLock_ReadOnly
: 0);
- m_mode = QVideoFrame::NotMapped;
+ m_mode = QtVideo::MapMode::NotMapped;
}
}
diff --git a/src/plugins/multimedia/darwin/avfvideobuffer_p.h b/src/plugins/multimedia/darwin/avfvideobuffer_p.h
index 69d7b7f45..f70961c15 100644
--- a/src/plugins/multimedia/darwin/avfvideobuffer_p.h
+++ b/src/plugins/multimedia/darwin/avfvideobuffer_p.h
@@ -15,8 +15,7 @@
// We mean it.
//
-#include <QtMultimedia/qvideoframe.h>
-#include <private/qabstractvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
#include <private/qcore_mac_p.h>
#include <QtCore/qobject.h>
@@ -31,14 +30,13 @@
QT_BEGIN_NAMESPACE
struct AVFMetalTexture;
-class AVFVideoBuffer : public QAbstractVideoBuffer
+class AVFVideoBuffer : public QHwVideoBuffer
{
public:
AVFVideoBuffer(AVFVideoSinkInterface *sink, CVImageBufferRef buffer);
~AVFVideoBuffer();
- QVideoFrame::MapMode mapMode() const { return m_mode; }
- MapData map(QVideoFrame::MapMode mode);
+ MapData map(QtVideo::MapMode mode);
void unmap();
virtual quint64 textureHandle(QRhi *, int plane) const;
@@ -57,7 +55,7 @@ private:
#endif
CVImageBufferRef m_buffer = nullptr;
- QVideoFrame::MapMode m_mode = QVideoFrame::NotMapped;
+ QtVideo::MapMode m_mode = QtVideo::MapMode::NotMapped;
QVideoFrameFormat m_format;
};
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm b/src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm
index f2da5c6ed..63e5f9056 100644
--- a/src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm
+++ b/src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm
@@ -1,8 +1,9 @@
// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-#include "private/qabstractvideobuffer_p.h"
+#include "qabstractvideobuffer.h"
#include "private/qcameradevice_p.h"
+#include "private/qvideoframe_p.h"
#include "avfcamerarenderer_p.h"
#include "avfcamerasession_p.h"
#include "avfcameraservice_p.h"
@@ -21,8 +22,6 @@
#include <QtGui/qopengl.h>
#endif
-#include <private/qabstractvideobuffer_p.h>
-
#include <QtMultimedia/qvideoframeformat.h>
QT_USE_NAMESPACE
@@ -63,14 +62,13 @@ QT_USE_NAMESPACE
// avfmediaassetwriter).
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
- AVFVideoBuffer *buffer = new AVFVideoBuffer(m_renderer, imageBuffer);
+ auto buffer = std::make_unique<AVFVideoBuffer>(m_renderer, imageBuffer);
auto format = buffer->videoFormat();
if (!format.isValid()) {
- delete buffer;
return;
}
- QVideoFrame frame(buffer, format);
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(buffer), format);
m_renderer->syncHandleViewfinderFrame(frame);
}
diff --git a/src/plugins/multimedia/darwin/camera/avfimagecapture.mm b/src/plugins/multimedia/darwin/camera/avfimagecapture.mm
index 07988f3e2..2ee7b0597 100644
--- a/src/plugins/multimedia/darwin/camera/avfimagecapture.mm
+++ b/src/plugins/multimedia/darwin/camera/avfimagecapture.mm
@@ -11,6 +11,7 @@
#include "private/qmediastoragelocation_p.h"
#include <private/qplatformimagecapture_p.h>
#include <private/qmemoryvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
#include <QtCore/qurl.h>
#include <QtCore/qfile.h>
@@ -118,8 +119,10 @@ int AVFImageCapture::doCapture(const QString &actualFileName)
QBuffer data(&jpgData);
QImageReader reader(&data, "JPEG");
QSize size = reader.size();
- QVideoFrame frame(new QMemoryVideoBuffer(QByteArray(jpgData.constData(), jpgData.size()), -1),
- QVideoFrameFormat(size, QVideoFrameFormat::Format_Jpeg));
+ auto buffer = std::make_unique<QMemoryVideoBuffer>(
+ QByteArray(jpgData.constData(), jpgData.size()), -1);
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::move(buffer), QVideoFrameFormat(size, QVideoFrameFormat::Format_Jpeg));
QMetaObject::invokeMethod(this, "imageAvailable", Qt::QueuedConnection,
Q_ARG(int, request.captureId),
Q_ARG(QVideoFrame, frame));
diff --git a/src/plugins/multimedia/darwin/camera/avfmediaencoder.mm b/src/plugins/multimedia/darwin/camera/avfmediaencoder.mm
index 4a138d4e9..3fbc57995 100644
--- a/src/plugins/multimedia/darwin/camera/avfmediaencoder.mm
+++ b/src/plugins/multimedia/darwin/camera/avfmediaencoder.mm
@@ -479,7 +479,7 @@ void AVFMediaEncoder::record(QMediaEncoderSettings &settings)
if (!cameraControl && !audioInput) {
qWarning() << Q_FUNC_INFO << "Cannot record without any inputs";
- Q_EMIT error(QMediaRecorder::ResourceError, tr("No inputs specified"));
+ updateError(QMediaRecorder::ResourceError, tr("No inputs specified"));
return;
}
@@ -491,8 +491,8 @@ void AVFMediaEncoder::record(QMediaEncoderSettings &settings)
if (!audioOnly) {
if (!cameraControl || !cameraControl->isActive()) {
qCDebug(qLcCamera) << Q_FUNC_INFO << "can not start record while camera is not active";
- Q_EMIT error(QMediaRecorder::ResourceError,
- QMediaRecorderPrivate::msgFailedStartRecording());
+ updateError(QMediaRecorder::ResourceError,
+ QMediaRecorderPrivate::msgFailedStartRecording());
return;
}
}
@@ -506,13 +506,13 @@ void AVFMediaEncoder::record(QMediaEncoderSettings &settings)
NSURL *nsFileURL = fileURL.toNSURL();
if (!nsFileURL) {
qWarning() << Q_FUNC_INFO << "invalid output URL:" << fileURL;
- Q_EMIT error(QMediaRecorder::ResourceError, tr("Invalid output file URL"));
+ updateError(QMediaRecorder::ResourceError, tr("Invalid output file URL"));
return;
}
if (!qt_is_writable_file_URL(nsFileURL)) {
qWarning() << Q_FUNC_INFO << "invalid output URL:" << fileURL
<< "(the location is not writable)";
- Q_EMIT error(QMediaRecorder::ResourceError, tr("Non-writeable file location"));
+ updateError(QMediaRecorder::ResourceError, tr("Non-writeable file location"));
return;
}
if (qt_file_exists(nsFileURL)) {
@@ -520,7 +520,7 @@ void AVFMediaEncoder::record(QMediaEncoderSettings &settings)
// Objective-C exception, which is not good at all.
qWarning() << Q_FUNC_INFO << "invalid output URL:" << fileURL
<< "(file already exists)";
- Q_EMIT error(QMediaRecorder::ResourceError, tr("File already exists"));
+ updateError(QMediaRecorder::ResourceError, tr("File already exists"));
return;
}
@@ -555,8 +555,7 @@ void AVFMediaEncoder::record(QMediaEncoderSettings &settings)
[m_writer start];
} else {
[session startRunning];
- Q_EMIT error(QMediaRecorder::FormatError,
- QMediaRecorderPrivate::msgFailedStartRecording());
+ updateError(QMediaRecorder::FormatError, QMediaRecorderPrivate::msgFailedStartRecording());
}
}
@@ -632,7 +631,7 @@ void AVFMediaEncoder::assetWriterFinished()
void AVFMediaEncoder::assetWriterError(QString err)
{
- Q_EMIT error(QMediaRecorder::FormatError, err);
+ updateError(QMediaRecorder::FormatError, err);
if (m_state != QMediaRecorder::StoppedState)
stopWriter();
}
diff --git a/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm b/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm
index b6d9622ac..964964a8e 100644
--- a/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm
+++ b/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm
@@ -12,6 +12,7 @@
#include <qpointer.h>
#include <QFileInfo>
#include <QtCore/qmath.h>
+#include <QtCore/qmutex.h>
#import <AVFoundation/AVFoundation.h>
@@ -59,6 +60,12 @@ static void *AVFMediaPlayerObserverCurrentItemDurationObservationContext = &AVFM
- (BOOL) resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
@end
+#ifdef Q_OS_IOS
+// Alas, no such thing as 'class variable', hence globals:
+static unsigned sessionActivationCount;
+static QMutex sessionMutex;
+#endif // Q_OS_IOS
+
@implementation AVFMediaPlayerObserver
{
@private
@@ -70,10 +77,39 @@ static void *AVFMediaPlayerObserverCurrentItemDurationObservationContext = &AVFM
BOOL m_bufferIsLikelyToKeepUp;
NSData *m_data;
NSString *m_mimeType;
+#ifdef Q_OS_IOS
+ BOOL m_activated;
+#endif
}
@synthesize m_player, m_playerItem, m_playerLayer, m_session;
+#ifdef Q_OS_IOS
+- (void)setSessionActive:(BOOL)active
+{
+ const QMutexLocker lock(&sessionMutex);
+ if (active) {
+ // Don't count the same player twice if already activated,
+ // unless it tried to deactivate first:
+ if (m_activated)
+ return;
+ if (!sessionActivationCount)
+ [AVAudioSession.sharedInstance setActive:YES error:nil];
+ ++sessionActivationCount;
+ m_activated = YES;
+ } else {
+ if (!sessionActivationCount || !m_activated) {
+ qWarning("Unbalanced audio session deactivation, ignoring.");
+ return;
+ }
+ --sessionActivationCount;
+ m_activated = NO;
+ if (!sessionActivationCount)
+ [AVAudioSession.sharedInstance setActive:NO error:nil];
+ }
+}
+#endif // Q_OS_IOS
+
- (AVFMediaPlayerObserver *) initWithMediaPlayerSession:(AVFMediaPlayer *)session
{
if (!(self = [super init]))
@@ -159,7 +195,7 @@ static void *AVFMediaPlayerObserverCurrentItemDurationObservationContext = &AVFM
if (m_playerLayer)
m_playerLayer.player = nil;
#if defined(Q_OS_IOS)
- [[AVAudioSession sharedInstance] setActive:NO error:nil];
+ [self setSessionActive:NO];
#endif
}
@@ -279,7 +315,7 @@ static void *AVFMediaPlayerObserverCurrentItemDurationObservationContext = &AVFM
context:AVFMediaPlayerObserverCurrentItemDurationObservationContext];
#if defined(Q_OS_IOS)
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback withOptions:AVAudioSessionCategoryOptionMixWithOthers error:nil];
- [[AVAudioSession sharedInstance] setActive:YES error:nil];
+ [self setSessionActive:YES];
#endif
}
diff --git a/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm b/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm
index 002d688eb..66687c931 100644
--- a/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm
+++ b/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm
@@ -5,6 +5,7 @@
#include "avfdisplaylink_p.h"
#include <avfvideobuffer_p.h>
#include "qavfhelpers_p.h"
+#include "private/qvideoframe_p.h"
#include <QtMultimedia/qvideoframeformat.h>
@@ -148,11 +149,12 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(width, height);
if (!pixelBuffer)
return;
- AVFVideoBuffer *buffer = new AVFVideoBuffer(this, pixelBuffer);
-// qDebug() << "Got pixelbuffer with format" << fmt << Qt::hex << CVPixelBufferGetPixelFormatType(pixelBuffer);
+ auto buffer = std::make_unique<AVFVideoBuffer>(this, pixelBuffer);
+ // qDebug() << "Got pixelbuffer with format" << fmt << Qt::hex <<
+ // CVPixelBufferGetPixelFormatType(pixelBuffer);
CVPixelBufferRelease(pixelBuffer);
- frame = QVideoFrame(buffer, buffer->videoFormat());
+ frame = QVideoFramePrivate::createFrame(std::move(buffer), buffer->videoFormat());
frame.setRotation(m_rotation);
frame.setMirrored(m_mirrored);
m_sink->setVideoFrame(frame);
diff --git a/src/plugins/multimedia/ffmpeg/CMakeLists.txt b/src/plugins/multimedia/ffmpeg/CMakeLists.txt
index 68a09b20f..c6ab93273 100644
--- a/src/plugins/multimedia/ffmpeg/CMakeLists.txt
+++ b/src/plugins/multimedia/ffmpeg/CMakeLists.txt
@@ -21,6 +21,7 @@ qt_internal_add_plugin(QFFmpegMediaPlugin
qffmpegavaudioformat.cpp qffmpegavaudioformat_p.h
qffmpegaudiodecoder.cpp qffmpegaudiodecoder_p.h
qffmpegaudioinput.cpp qffmpegaudioinput_p.h
+ qffmpegconverter.cpp qffmpegconverter_p.h
qffmpeghwaccel.cpp qffmpeghwaccel_p.h
qffmpegmediametadata.cpp qffmpegmediametadata_p.h
qffmpegmediaplayer.cpp qffmpegmediaplayer_p.h
@@ -36,7 +37,6 @@ qt_internal_add_plugin(QFFmpegMediaPlugin
qffmpegencodingformatcontext.cpp qffmpegencodingformatcontext_p.h
qgrabwindowsurfacecapture.cpp qgrabwindowsurfacecapture_p.h
qffmpegsurfacecapturegrabber.cpp qffmpegsurfacecapturegrabber_p.h
- qffmpegsymbolsresolve_p.h
qffmpegplaybackengine.cpp qffmpegplaybackengine_p.h
playbackengine/qffmpegplaybackenginedefs_p.h
@@ -66,6 +66,10 @@ qt_internal_add_plugin(QFFmpegMediaPlugin
recordingengine/qffmpegmuxer.cpp
recordingengine/qffmpegrecordingengine_p.h
recordingengine/qffmpegrecordingengine.cpp
+ recordingengine/qffmpegencodinginitializer_p.h
+ recordingengine/qffmpegencodinginitializer.cpp
+ recordingengine/qffmpegrecordingengineutils_p.h
+ recordingengine/qffmpegrecordingengineutils.cpp
recordingengine/qffmpegvideoencoder_p.h
recordingengine/qffmpegvideoencoder.cpp
recordingengine/qffmpegvideoencoderutils_p.h
@@ -80,75 +84,39 @@ qt_internal_add_plugin(QFFmpegMediaPlugin
Qt::CorePrivate
)
-if(DYNAMIC_RESOLVE_OPENSSL_SYMBOLS)
- if(NOT OPENSSL_INCLUDE_DIR AND OPENSSL_ROOT_DIR)
- set(OPENSSL_INCLUDE_DIR "${OPENSSL_ROOT_DIR}/include")
- endif()
-endif()
-
-qt_internal_extend_target(QFFmpegMediaPlugin CONDITION DYNAMIC_RESOLVE_OPENSSL_SYMBOLS
- SOURCES
- qffmpegopensslsymbols.cpp
- INCLUDE_DIRECTORIES
- ${OPENSSL_INCLUDE_DIR}
-)
+if (LINUX OR ANDROID)
+ # We have 2 options: link shared stubs to QFFmpegMediaPlugin vs
+ # static compilation of the needed stubs to the FFmpeg plugin.
+ # Currently, we chose the second option so that user could trivially
+ # remove the FFmpeg libs we ship.
+ # Set QT_LINK_STUBS_TO_FFMPEG_PLUGIN = TRUE to change the behavior.
-if (ENABLE_DYNAMIC_RESOLVE_VAAPI_SYMBOLS)
- if (QT_FEATURE_vaapi AND NOT DYNAMIC_RESOLVE_VAAPI_SYMBOLS)
- if (NOT FFMPEG_SHARED_LIBRARIES)
- message(WARNING
- "QT_FEATURE_vaapi is found but statically built FFmpeg doesn't include vaapi,"
- "however dynamic symbols resolve is possible.")
- endif()
+ # set(QT_LINK_STUBS_TO_FFMPEG_PLUGIN TRUE)
- set(DYNAMIC_RESOLVE_VAAPI_SYMBOLS TRUE CACHE INTERNAL "")
- elseif (NOT QT_FEATURE_vaapi AND DYNAMIC_RESOLVE_VAAPI_SYMBOLS)
-
- message(FATAL_ERROR
- "QT_FEATURE_vaapi is not found "
- "but FFmpeg includes VAAPI and dynamic symbols resolve is enabled.")
- endif()
+ include("${CMAKE_CURRENT_SOURCE_DIR}/cmake/QtAddFFmpegStubs.cmake")
+ qt_internal_multimedia_add_ffmpeg_stubs()
endif()
-qt_internal_extend_target(QFFmpegMediaPlugin
- CONDITION
- DYNAMIC_RESOLVE_OPENSSL_SYMBOLS OR DYNAMIC_RESOLVE_VAAPI_SYMBOLS
- SOURCES
- qffmpegsymbolsresolveutils.cpp qffmpegsymbolsresolveutils_p.h
-)
-
-function (__propagate_to_compile_definitions VAR)
- if (${VAR})
- target_compile_definitions(QFFmpegMediaPlugin PRIVATE ${VAR})
- endif()
-endfunction()
-__propagate_to_compile_definitions(DYNAMIC_RESOLVE_OPENSSL_SYMBOLS)
-__propagate_to_compile_definitions(DYNAMIC_RESOLVE_VAAPI_SYMBOLS)
-__propagate_to_compile_definitions(DYNAMIC_RESOLVE_VA_DRM_SYMBOLS)
-__propagate_to_compile_definitions(DYNAMIC_RESOLVE_VA_X11_SYMBOLS)
-
-qt_internal_extend_target(QFFmpegMediaPlugin CONDITION DYNAMIC_RESOLVE_VAAPI_SYMBOLS
- SOURCES
- qffmpegvaapisymbols.cpp
- INCLUDE_DIRECTORIES
- "$<TARGET_PROPERTY:VAAPI::VAAPI,INTERFACE_INCLUDE_DIRECTORIES>"
-)
+if (QT_FEATURE_vaapi)
+ qt_internal_extend_target(QFFmpegMediaPlugin
+ SOURCES
+ qffmpeghwaccel_vaapi.cpp qffmpeghwaccel_vaapi_p.h
+ NO_UNITY_BUILD_SOURCES
+ # Conflicts with macros defined in X11.h, and Xlib.h
+ qffmpeghwaccel_vaapi.cpp
+ LIBRARIES
+ EGL::EGL
+ )
-qt_internal_extend_target(QFFmpegMediaPlugin
- CONDITION NOT DYNAMIC_RESOLVE_VAAPI_SYMBOLS AND QT_FEATURE_vaapi
- LIBRARIES VAAPI::VAAPI
-)
+ list(FIND FFMPEG_STUBS "va" va_stub_index)
+ if (NOT QT_LINK_STUBS_TO_FFMPEG_PLUGIN AND (FFMPEG_SHARED_LIBRARIES OR ${va_stub_index} EQUAL -1))
+ target_compile_definitions(QFFmpegMediaPlugin PRIVATE Q_FFMPEG_PLUGIN_STUBS_ONLY)
+ qt_internal_multimedia_find_vaapi_soversion()
+ qt_internal_multimedia_add_private_stub_to_plugin("va")
+ endif()
+endif()
-qt_internal_extend_target(QFFmpegMediaPlugin CONDITION QT_FEATURE_vaapi
- SOURCES
- qffmpeghwaccel_vaapi.cpp qffmpeghwaccel_vaapi_p.h
- NO_UNITY_BUILD_SOURCES
- # Conflicts with macros defined in X11.h, and Xlib.h
- qffmpeghwaccel_vaapi.cpp
- LIBRARIES
- EGL::EGL
-)
qt_internal_extend_target(QFFmpegMediaPlugin CONDITION APPLE
SOURCES
@@ -279,13 +247,14 @@ endif()
# TODO: get libs from FindFFmpeg.cmake
set(ffmpeg_libs FFmpeg::avformat FFmpeg::avcodec FFmpeg::swresample FFmpeg::swscale FFmpeg::avutil)
-if (QT_DEPLOY_FFMPEG AND NOT BUILD_SHARED_LIBS)
+if (QT_DEPLOY_FFMPEG AND NOT BUILD_SHARED_LIBS AND NOT UIKIT)
message(FATAL_ERROR "QT_DEPLOY_FFMPEG is not implemented yet for static builds")
endif()
-if (QT_DEPLOY_FFMPEG AND FFMPEG_SHARED_LIBRARIES AND BUILD_SHARED_LIBS)
+if (QT_DEPLOY_FFMPEG AND FFMPEG_SHARED_LIBRARIES AND (BUILD_SHARED_LIBS OR UIKIT))
include("${CMAKE_CURRENT_SOURCE_DIR}/cmake/QtDeployFFmpeg.cmake")
qt_internal_multimedia_copy_or_install_ffmpeg()
endif()
qt_internal_extend_target(QFFmpegMediaPlugin LIBRARIES ${ffmpeg_libs})
+
diff --git a/src/plugins/multimedia/ffmpeg/cmake/QtAddFFmpegStubs.cmake b/src/plugins/multimedia/ffmpeg/cmake/QtAddFFmpegStubs.cmake
new file mode 100644
index 000000000..5778ae4d2
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/cmake/QtAddFFmpegStubs.cmake
@@ -0,0 +1,199 @@
+# Copyright (C) 2024 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+# Utilities
+
+function(qt_internal_multimedia_find_ffmpeg_stubs)
+ foreach (stub ${FFMPEG_STUBS})
+ if (${stub} MATCHES ${vaapi_regex})
+ set(ffmpeg_has_vaapi TRUE PARENT_SCOPE)
+ elseif (${stub} MATCHES ${openssl_regex})
+ set(ffmpeg_has_openssl TRUE PARENT_SCOPE)
+ else()
+ set(unknown_ffmpeg_stubs
+ ${unknown_ffmpeg_stubs} ${stub} PARENT_SCOPE)
+ endif()
+ endforeach()
+endfunction()
+
+function(qt_internal_multimedia_check_ffmpeg_stubs_configuration)
+ if (NOT LINUX AND NOT ANDROID)
+ message(FATAL_ERROR "Currently, stubs are supported on Linux and Android")
+ endif()
+
+ if (unknown_ffmpeg_stubs)
+ message(FATAL_ERROR "Unknown ffmpeg stubs: ${unknown_ffmpeg_stubs}")
+ endif()
+
+ if (BUILD_SHARED_LIBS AND FFMPEG_SHARED_LIBRARIES AND FFMPEG_STUBS AND NOT QT_DEPLOY_FFMPEG)
+ message(FATAL_ERROR
+ "FFmpeg stubs have been found but QT_DEPLOY_FFMPEG is not specified. "
+ "Set -DQT_DEPLOY_FFMPEG=TRUE to continue.")
+ endif()
+
+ if (ffmpeg_has_vaapi AND NOT QT_FEATURE_vaapi)
+ message(FATAL_ERROR
+ "QT_FEATURE_vaapi is OFF but FFmpeg includes VAAPI.")
+ elseif (NOT ffmpeg_has_vaapi AND QT_FEATURE_vaapi)
+ message(WARNING
+ "QT_FEATURE_vaapi is ON "
+ "but FFmpeg includes VAAPI and dynamic symbols resolve is enabled.")
+ elseif(ffmpeg_has_vaapi AND NOT VAAPI_SUFFIX)
+ message(FATAL_ERROR "Cannot find VAAPI_SUFFIX, fix FindVAAPI.cmake")
+ elseif (ffmpeg_has_vaapi AND "${VAAPI_SUFFIX}" MATCHES "^1\\.32.*")
+ # drop the ancient vaapi version to avoid ABI problems
+ message(FATAL_ERROR "VAAPI ${VAAPI_SUFFIX} is not supported")
+ endif()
+
+ if (ffmpeg_has_openssl AND NOT QT_FEATURE_openssl)
+ message(FATAL_ERROR
+ "QT_FEATURE_openssl is OFF but FFmpeg includes OpenSSL.")
+ endif()
+endfunction()
+
+macro(qt_internal_multimedia_find_vaapi_soversion)
+ string(REGEX MATCH "^[0-9]+" va_soversion "${VAAPI_SUFFIX}")
+
+ set(va-drm_soversion "${va_soversion}")
+ set(va-x11_soversion "${va_soversion}")
+endmacro()
+
+macro(qt_internal_multimedia_find_openssl_soversion)
+ # Update OpenSSL variables since OPENSSL_SSL_LIBRARY is not propagated to this place in some cases.
+ qt_find_package(OpenSSL)
+
+ if (NOT OPENSSL_INCLUDE_DIR AND OPENSSL_ROOT_DIR)
+ set(OPENSSL_INCLUDE_DIR "${OPENSSL_ROOT_DIR}/include")
+ endif()
+
+ if (LINUX)
+ if (NOT OPENSSL_SSL_LIBRARY)
+ message(FATAL_ERROR "OPENSSL_SSL_LIBRARY is not found")
+ endif()
+
+ get_filename_component(ssl_lib_realpath "${OPENSSL_SSL_LIBRARY}" REALPATH)
+ string(REGEX MATCH "[0-9]+(\\.[0-9]+)*$" ssl_soversion "${ssl_lib_realpath}")
+ string(REGEX REPLACE "^3(\\..*|$)" "3" ssl_soversion "${ssl_soversion}")
+ endif()
+
+ #TODO: enhance finding openssl version and throw an error if it's not found.
+
+ set(crypto_soversion "${ssl_soversion}")
+endmacro()
+
+function(qt_internal_multimedia_set_stub_version_script stub stub_target)
+ if ("${stub}" MATCHES "${openssl_regex}")
+ if ("${ssl_soversion}" STREQUAL "3" OR
+ (NOT ssl_soversion AND "${OPENSSL_VERSION}" MATCHES "^3\\..*"))
+ # Symbols in OpenSSL 1.* are not versioned.
+ set(file_name "openssl3.ver")
+ endif()
+ elseif("${stub}" STREQUAL "va")
+ set(file_name "va.ver")
+ endif()
+
+ if (file_name)
+ set(version_script "${CMAKE_CURRENT_SOURCE_DIR}/symbolstubs/${file_name}")
+ set_property(TARGET ${stub_target} APPEND_STRING
+ PROPERTY LINK_FLAGS " -Wl,--version-script=${version_script}")
+ set_target_properties(${stub_target} PROPERTIES LINK_DEPENDS ${version_script})
+ source_group("Stubs Version Scripts" FILES ${version_script})
+ endif()
+endfunction()
+
+function(qt_internal_multimedia_set_stub_output stub stub_target)
+ set(output_dir "${QT_BUILD_DIR}/${INSTALL_LIBDIR}")
+
+ set_target_properties(${stub_target} PROPERTIES
+ RUNTIME_OUTPUT_DIRECTORY "${output_dir}"
+ LIBRARY_OUTPUT_DIRECTORY "${output_dir}"
+ )
+
+ if (${stub}_soversion)
+ set_target_properties(${stub_target} PROPERTIES
+ VERSION "${${stub}_soversion}"
+ SOVERSION "${${stub}_soversion}")
+ endif()
+
+ qt_apply_rpaths(TARGET ${stub_target} INSTALL_PATH "${INSTALL_LIBDIR}" RELATIVE_RPATH)
+endfunction()
+
+function(qt_internal_multimedia_set_stub_include_directories stub target)
+ qt_internal_extend_target(${target}
+ CONDITION ${stub} MATCHES "${openssl_regex}"
+ INCLUDE_DIRECTORIES "${OPENSSL_INCLUDE_DIR}")
+
+ qt_internal_extend_target(${target}
+ CONDITION ${stub} MATCHES "${vaapi_regex}"
+ INCLUDE_DIRECTORIES "${VAAPI_INCLUDE_DIR}")
+endfunction()
+
+function(qt_internal_multimedia_set_stub_symbols_visibility stub stub_target)
+ set_target_properties(${stub_target} PROPERTIES
+ C_VISIBILITY_PRESET hidden
+ CXX_VISIBILITY_PRESET hidden)
+ target_compile_definitions(${stub_target} PRIVATE Q_EXPORT_STUB_SYMBOLS)
+endfunction()
+
+function(qt_internal_multimedia_set_stub_libraries stub stub_target)
+ qt_internal_extend_target(${stub_target} LIBRARIES Qt::Core Qt::MultimediaPrivate)
+
+ if (LINK_STUBS_TO_FFMPEG_PLUGIN AND ${stub} STREQUAL "va")
+ qt_internal_extend_target(QFFmpegMediaPlugin LIBRARIES ${stub_target})
+ endif()
+endfunction()
+
+function(qt_internal_multimedia_define_stub_needed_version stub target)
+ string(TOUPPER ${stub} prefix)
+ string(REPLACE "-" "_" prefix ${prefix})
+
+ target_compile_definitions(${target} PRIVATE
+ "${prefix}_NEEDED_SOVERSION=\"${${stub}_soversion}\"")
+endfunction()
+
+function(qt_internal_multimedia_add_shared_stub stub)
+ set(stub_target "Qt${PROJECT_VERSION_MAJOR}FFmpegStub-${stub}")
+
+ qt_add_library(${stub_target} SHARED "symbolstubs/qffmpegsymbols-${stub}.cpp")
+
+ qt_internal_multimedia_set_stub_include_directories(${stub} ${stub_target})
+ qt_internal_multimedia_set_stub_output(${stub} ${stub_target})
+ qt_internal_multimedia_set_stub_symbols_visibility(${stub} ${stub_target})
+ qt_internal_multimedia_set_stub_version_script(${stub} ${stub_target})
+ qt_internal_multimedia_define_stub_needed_version(${stub} ${stub_target})
+ qt_internal_multimedia_set_stub_libraries(${stub} ${stub_target})
+
+ qt_install(TARGETS ${stub_target} LIBRARY NAMELINK_SKIP)
+endfunction()
+
+function(qt_internal_multimedia_add_private_stub_to_plugin stub)
+ qt_internal_multimedia_set_stub_include_directories(${stub} QFFmpegMediaPlugin)
+ qt_internal_multimedia_define_stub_needed_version(${stub} QFFmpegMediaPlugin)
+ qt_internal_extend_target(QFFmpegMediaPlugin SOURCES "symbolstubs/qffmpegsymbols-${stub}.cpp")
+endfunction()
+
+# Main function
+
+set(vaapi_regex "^(va|va-drm|va-x11)$")
+set(openssl_regex "^(ssl|crypto)$")
+
+function(qt_internal_multimedia_add_ffmpeg_stubs)
+ qt_internal_multimedia_find_ffmpeg_stubs()
+ qt_internal_multimedia_check_ffmpeg_stubs_configuration()
+
+ if (ffmpeg_has_vaapi)
+ qt_internal_multimedia_find_vaapi_soversion()
+ endif()
+
+ if (ffmpeg_has_openssl)
+ qt_internal_multimedia_find_openssl_soversion()
+ endif()
+
+ foreach (stub ${FFMPEG_STUBS})
+ if (FFMPEG_SHARED_LIBRARIES)
+ qt_internal_multimedia_add_shared_stub("${stub}")
+ else()
+ qt_internal_multimedia_add_private_stub_to_plugin("${stub}")
+ endif()
+ endforeach()
+endfunction()
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer.cpp
index 88caac941..64bd82dc0 100644
--- a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer.cpp
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer.cpp
@@ -4,6 +4,7 @@
#include "playbackengine/qffmpegaudiorenderer_p.h"
#include "qaudiosink.h"
#include "qaudiooutput.h"
+#include "qaudiobufferoutput.h"
#include "private/qplatformaudiooutput_p.h"
#include <QtCore/qloggingcategory.h>
@@ -56,10 +57,24 @@ qreal sampleRateFactor() {
return result;
}
+
+QAudioFormat audioFormatFromFrame(const Frame &frame)
+{
+ return QFFmpegMediaFormatInfo::audioFormatFromCodecParameters(
+ frame.codec()->stream()->codecpar);
+}
+
+std::unique_ptr<QFFmpegResampler> createResampler(const Frame &frame,
+ const QAudioFormat &outputFormat)
+{
+ return std::make_unique<QFFmpegResampler>(frame.codec(), outputFormat, frame.pts());
+}
+
} // namespace
-AudioRenderer::AudioRenderer(const TimeController &tc, QAudioOutput *output)
- : Renderer(tc), m_output(output)
+AudioRenderer::AudioRenderer(const TimeController &tc, QAudioOutput *output,
+ QAudioBufferOutput *bufferOutput)
+ : Renderer(tc), m_output(output), m_bufferOutput(bufferOutput)
{
if (output) {
// TODO: implement the signals in QPlatformAudioOutput and connect to them, QTBUG-112294
@@ -74,6 +89,12 @@ void AudioRenderer::setOutput(QAudioOutput *output)
setOutputInternal(m_output, output, [this](QAudioOutput *) { onDeviceChanged(); });
}
+void AudioRenderer::setOutput(QAudioBufferOutput *bufferOutput)
+{
+ setOutputInternal(m_bufferOutput, bufferOutput,
+ [this](QAudioBufferOutput *) { m_bufferOutputChanged = true; });
+}
+
AudioRenderer::~AudioRenderer()
{
freeOutput();
@@ -93,14 +114,29 @@ void AudioRenderer::onDeviceChanged()
Renderer::RenderingResult AudioRenderer::renderInternal(Frame frame)
{
if (frame.isValid())
- updateOutput(frame.codec());
+ updateOutputs(frame);
+
+ // push to sink first in order not to waste time on resampling
+ // for QAudioBufferOutput
+ const RenderingResult result = pushFrameToOutput(frame);
+
+ if (m_lastFramePushDone)
+ pushFrameToBufferOutput(frame);
+ // else // skip pushing the same data to QAudioBufferOutput
+
+ m_lastFramePushDone = result.done;
+
+ return result;
+}
+AudioRenderer::RenderingResult AudioRenderer::pushFrameToOutput(const Frame &frame)
+{
if (!m_ioDevice || !m_resampler)
return {};
Q_ASSERT(m_sink);
- auto firstFrameFlagGuard = qScopeGuard([&]() { m_firstFrame = false; });
+ auto firstFrameFlagGuard = qScopeGuard([&]() { m_firstFrameToSink = false; });
const SynchronizationStamp syncStamp{ m_sink->state(), m_sink->bytesFree(),
m_bufferedData.offset, Clock::now() };
@@ -143,6 +179,22 @@ Renderer::RenderingResult AudioRenderer::renderInternal(Frame frame)
return {};
}
+void AudioRenderer::pushFrameToBufferOutput(const Frame &frame)
+{
+ if (!m_bufferOutput)
+ return;
+
+ Q_ASSERT(m_bufferOutputResampler);
+
+ if (frame.isValid()) {
+ // TODO: get buffer from m_bufferedData if resample formats are equal
+ QAudioBuffer buffer = m_resampler->resample(frame.avFrame());
+ emit m_bufferOutput->audioBufferReceived(buffer);
+ } else {
+ emit m_bufferOutput->audioBufferReceived({});
+ }
+}
+
void AudioRenderer::onPlaybackRateChanged()
{
m_resampler.reset();
@@ -154,7 +206,7 @@ int AudioRenderer::timerInterval() const
const auto interval = Renderer::timerInterval();
- if (m_firstFrame || !m_sink || m_sink->state() != QAudio::IdleState
+ if (m_firstFrameToSink || !m_sink || m_sink->state() != QAudio::IdleState
|| interval > MaxFixableInterval)
return interval;
@@ -163,30 +215,18 @@ int AudioRenderer::timerInterval() const
void AudioRenderer::onPauseChanged()
{
- m_firstFrame = true;
+ m_firstFrameToSink = true;
Renderer::onPauseChanged();
}
-void AudioRenderer::initResempler(const Codec *codec)
+void AudioRenderer::initResempler(const Frame &frame)
{
// We recreate resampler whenever format is changed
- /* AVSampleFormat requiredFormat =
- QFFmpegMediaFormatInfo::avSampleFormat(m_format.sampleFormat());
-
- #if QT_FFMPEG_OLD_CHANNEL_LAYOUT
- qCDebug(qLcAudioRenderer) << "init resampler" << requiredFormat
- << codec->stream()->codecpar->channels;
- #else
- qCDebug(qLcAudioRenderer) << "init resampler" << requiredFormat
- << codec->stream()->codecpar->ch_layout.nb_channels;
- #endif
- */
-
- auto resamplerFormat = m_format;
+ auto resamplerFormat = m_sinkFormat;
resamplerFormat.setSampleRate(
- qRound(m_format.sampleRate() / playbackRate() * sampleRateFactor()));
- m_resampler = std::make_unique<QFFmpegResampler>(codec, resamplerFormat);
+ qRound(m_sinkFormat.sampleRate() / playbackRate() * sampleRateFactor()));
+ m_resampler = createResampler(frame, resamplerFormat);
}
void AudioRenderer::freeOutput()
@@ -203,34 +243,47 @@ void AudioRenderer::freeOutput()
m_bufferedData = {};
m_deviceChanged = false;
+ m_sinkFormat = {};
m_timings = {};
m_bufferLoadingInfo = {};
}
-void AudioRenderer::updateOutput(const Codec *codec)
+void AudioRenderer::updateOutputs(const Frame &frame)
{
if (m_deviceChanged) {
freeOutput();
- m_format = {};
m_resampler.reset();
}
+ if (m_bufferOutput) {
+ if (m_bufferOutputChanged) {
+ m_bufferOutputChanged = false;
+ m_bufferOutputResampler.reset();
+ }
+
+ if (!m_bufferOutputResampler) {
+ QAudioFormat outputFormat = m_bufferOutput->format();
+ if (!outputFormat.isValid())
+ outputFormat = audioFormatFromFrame(frame);
+ m_bufferOutputResampler = createResampler(frame, outputFormat);
+ }
+ }
+
if (!m_output)
return;
- if (!m_format.isValid()) {
- m_format =
- QFFmpegMediaFormatInfo::audioFormatFromCodecParameters(codec->stream()->codecpar);
- m_format.setChannelConfig(m_output->device().channelConfiguration());
+ if (!m_sinkFormat.isValid()) {
+ m_sinkFormat = audioFormatFromFrame(frame);
+ m_sinkFormat.setChannelConfig(m_output->device().channelConfiguration());
}
if (!m_sink) {
// Insert a delay here to test time offset synchronization, e.g. QThread::sleep(1)
- m_sink = std::make_unique<QAudioSink>(m_output->device(), m_format);
+ m_sink = std::make_unique<QAudioSink>(m_output->device(), m_sinkFormat);
updateVolume();
- m_sink->setBufferSize(m_format.bytesForDuration(DesiredBufferTime.count()));
+ m_sink->setBufferSize(m_sinkFormat.bytesForDuration(DesiredBufferTime.count()));
m_ioDevice = m_sink->start();
- m_firstFrame = true;
+ m_firstFrameToSink = true;
connect(m_sink.get(), &QAudioSink::stateChanged, this,
&AudioRenderer::onAudioSinkStateChanged);
@@ -244,9 +297,8 @@ void AudioRenderer::updateOutput(const Codec *codec)
&& m_timings.maxSoundDelay < m_timings.actualBufferDuration);
}
- if (!m_resampler) {
- initResempler(codec);
- }
+ if (!m_resampler)
+ initResempler(frame);
}
void AudioRenderer::updateSynchronization(const SynchronizationStamp &stamp, const Frame &frame)
@@ -269,7 +321,7 @@ void AudioRenderer::updateSynchronization(const SynchronizationStamp &stamp, con
// clang-format off
qCDebug(qLcAudioRenderer)
<< "Change rendering time:"
- << "\n First frame:" << m_firstFrame
+ << "\n First frame:" << m_firstFrameToSink
<< "\n Delay (frame+buffer-written):" << currentFrameDelay << "+"
<< bufferLoadingTime << "-"
<< writtenTime << "="
@@ -306,7 +358,7 @@ void AudioRenderer::updateSynchronization(const SynchronizationStamp &stamp, con
: qMax(soundDelay, fixedDelay);
if (stamp.timePoint - m_bufferLoadingInfo.timePoint > BufferLoadingMeasureTime
- || (m_firstFrame && isHigh) || shouldHandleIdle) {
+ || (m_firstFrameToSink && isHigh) || shouldHandleIdle) {
const auto targetDelay = isHigh
? (m_timings.maxSoundDelay + m_timings.minSoundDelay) / 2
: m_timings.minSoundDelay + DurationBias;
@@ -339,13 +391,13 @@ microseconds AudioRenderer::bufferLoadingTime(const SynchronizationStamp &syncSt
void AudioRenderer::onAudioSinkStateChanged(QAudio::State state)
{
- if (state == QAudio::IdleState && !m_firstFrame)
+ if (state == QAudio::IdleState && !m_firstFrameToSink)
scheduleNextStep();
}
microseconds AudioRenderer::durationForBytes(qsizetype bytes) const
{
- return microseconds(m_format.durationForBytes(static_cast<qint32>(bytes)));
+ return microseconds(m_sinkFormat.durationForBytes(static_cast<qint32>(bytes)));
}
} // namespace QFFmpeg
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer_p.h
index 196cd4fd0..9a22a8a48 100644
--- a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer_p.h
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer_p.h
@@ -21,6 +21,7 @@
QT_BEGIN_NAMESPACE
class QAudioOutput;
+class QAudioBufferOutput;
class QAudioSink;
class QFFmpegResampler;
@@ -30,10 +31,12 @@ class AudioRenderer : public Renderer
{
Q_OBJECT
public:
- AudioRenderer(const TimeController &tc, QAudioOutput *output);
+ AudioRenderer(const TimeController &tc, QAudioOutput *output, QAudioBufferOutput *bufferOutput);
void setOutput(QAudioOutput *output);
+ void setOutput(QAudioBufferOutput *bufferOutput);
+
~AudioRenderer() override;
protected:
@@ -73,6 +76,10 @@ protected:
RenderingResult renderInternal(Frame frame) override;
+ RenderingResult pushFrameToOutput(const Frame &frame);
+
+ void pushFrameToBufferOutput(const Frame &frame);
+
void onPlaybackRateChanged() override;
int timerInterval() const override;
@@ -81,9 +88,9 @@ protected:
void freeOutput();
- void updateOutput(const Codec *codec);
+ void updateOutputs(const Frame &frame);
- void initResempler(const Codec *codec);
+ void initResempler(const Frame &frame);
void onDeviceChanged();
@@ -99,18 +106,23 @@ protected:
private:
QPointer<QAudioOutput> m_output;
+ QPointer<QAudioBufferOutput> m_bufferOutput;
std::unique_ptr<QAudioSink> m_sink;
AudioTimings m_timings;
BufferLoadingInfo m_bufferLoadingInfo;
std::unique_ptr<QFFmpegResampler> m_resampler;
- QAudioFormat m_format;
+ std::unique_ptr<QFFmpegResampler> m_bufferOutputResampler;
+ QAudioFormat m_sinkFormat;
BufferedDataWithOffset m_bufferedData;
QIODevice *m_ioDevice = nullptr;
+ bool m_lastFramePushDone = true;
+
bool m_deviceChanged = false;
+ bool m_bufferOutputChanged = false;
bool m_drained = false;
- bool m_firstFrame = true;
+ bool m_firstFrameToSink = true;
};
} // namespace QFFmpeg
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec.cpp
index a99432c29..457b3603d 100644
--- a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec.cpp
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec.cpp
@@ -18,13 +18,6 @@ Codec::Data::Data(AVCodecContextUPtr context, AVStream *stream, AVFormatContext
pixelAspectRatio = av_guess_sample_aspect_ratio(formatContext, stream, nullptr);
}
-Codec::Data::~Data()
-{
- // TODO: investigate if we can remove avcodec_close
- // FFmpeg doc says that avcodec_free_context is enough
- avcodec_close(context.get());
-}
-
QMaybe<Codec> Codec::create(AVStream *stream, AVFormatContext *formatContext)
{
if (!stream)
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec_p.h
index 5510e0e84..449fb1f65 100644
--- a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec_p.h
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec_p.h
@@ -31,7 +31,6 @@ class Codec
{
Data(AVCodecContextUPtr context, AVStream *stream, AVFormatContext *formatContext,
std::unique_ptr<QFFmpeg::HWAccel> hwAccel);
- ~Data();
QAtomicInt ref;
AVCodecContextUPtr context;
AVStream *stream = nullptr;
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder.cpp
index fbb75dd44..f92f93ddb 100644
--- a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder.cpp
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder.cpp
@@ -65,6 +65,25 @@ static int streamOrientation(const AVStream *stream)
return rotation < 0 ? -rotation % 360 : -rotation % 360 + 360;
}
+
+static bool colorTransferSupportsHdr(const AVStream *stream)
+{
+ if (!stream)
+ return false;
+
+ const AVCodecParameters *codecPar = stream->codecpar;
+ if (!codecPar)
+ return false;
+
+ const QVideoFrameFormat::ColorTransfer colorTransfer = fromAvColorTransfer(codecPar->color_trc);
+
+ // Assume that content is using HDR if the color transfer supports high
+ // dynamic range. The video may still not utilize the extended range,
+ // but we can't determine the actual range without decoding frames.
+ return colorTransfer == QVideoFrameFormat::ColorTransfer_ST2084
+ || colorTransfer == QVideoFrameFormat::ColorTransfer_STD_B67;
+}
+
QtVideo::Rotation MediaDataHolder::rotation() const
{
int orientation = m_metaData.value(QMediaMetaData::Orientation).toInt();
@@ -97,6 +116,7 @@ static void insertMediaData(QMediaMetaData &metaData, QPlatformMediaPlayer::Trac
metaData.insert(QMediaMetaData::VideoFrameRate,
qreal(stream->avg_frame_rate.num) / qreal(stream->avg_frame_rate.den));
metaData.insert(QMediaMetaData::Orientation, QVariant::fromValue(streamOrientation(stream)));
+ metaData.insert(QMediaMetaData::HasHdrContent, colorTransferSupportsHdr(stream));
break;
case QPlatformMediaPlayer::AudioStream:
metaData.insert(QMediaMetaData::AudioBitRate, (int)codecPar->bit_rate);
@@ -151,6 +171,10 @@ loadMedia(const QUrl &mediaUrl, QIODevice *stream, const std::shared_ptr<ICancel
constexpr auto NetworkTimeoutUs = "5000000";
av_dict_set(dict, "timeout", NetworkTimeoutUs, 0);
+ const QByteArray protocolWhitelist = qgetenv("QT_FFMPEG_PROTOCOL_WHITELIST");
+ if (!protocolWhitelist.isNull())
+ av_dict_set(dict, "protocol_whitelist", protocolWhitelist.data(), 0);
+
context->interrupt_callback.opaque = cancelToken.get();
context->interrupt_callback.callback = [](void *opaque) {
const auto *cancelToken = static_cast<const ICancelToken *>(opaque);
@@ -189,6 +213,7 @@ loadMedia(const QUrl &mediaUrl, QIODevice *stream, const std::shared_ptr<ICancel
#endif
return context;
}
+
} // namespace
MediaDataHolder::Maybe MediaDataHolder::create(const QUrl &url, QIODevice *stream,
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer.cpp
index 7c8e90552..dceb00f83 100644
--- a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer.cpp
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer.cpp
@@ -4,6 +4,7 @@
#include "playbackengine/qffmpegvideorenderer_p.h"
#include "qffmpegvideobuffer_p.h"
#include "qvideosink.h"
+#include "private/qvideoframe_p.h"
QT_BEGIN_NAMESPACE
@@ -62,10 +63,10 @@ VideoRenderer::RenderingResult VideoRenderer::renderInternal(Frame frame)
format.setColorTransfer(buffer->colorTransfer());
format.setColorRange(buffer->colorRange());
format.setMaxLuminance(buffer->maxNits());
- QVideoFrame videoFrame(buffer.release(), format);
+ format.setRotation(m_rotation);
+ QVideoFrame videoFrame = QVideoFramePrivate::createFrame(std::move(buffer), format);
videoFrame.setStartTime(frame.pts());
videoFrame.setEndTime(frame.end());
- videoFrame.setRotation(m_rotation);
m_sink->setVideoFrame(videoFrame);
return {};
diff --git a/src/plugins/multimedia/ffmpeg/qandroidcamera.cpp b/src/plugins/multimedia/ffmpeg/qandroidcamera.cpp
index df583ce0e..56725b2bb 100644
--- a/src/plugins/multimedia/ffmpeg/qandroidcamera.cpp
+++ b/src/plugins/multimedia/ffmpeg/qandroidcamera.cpp
@@ -55,8 +55,8 @@ QCameraFormat getDefaultCameraFormat()
QCameraFormatPrivate *defaultFormat = new QCameraFormatPrivate{
.pixelFormat = QVideoFrameFormat::Format_YUV420P,
.resolution = { 1920, 1080 },
- .minFrameRate = 30,
- .maxFrameRate = 60,
+ .minFrameRate = 12,
+ .maxFrameRate = 30,
};
return defaultFormat->create();
}
@@ -252,7 +252,7 @@ void QAndroidCamera::setActive(bool active)
return;
if (!m_jniCamera.isValid()) {
- emit error(QCamera::CameraError, "No connection to Android Camera2 API");
+ updateError(QCamera::CameraError, QStringLiteral("No connection to Android Camera2 API"));
return;
}
@@ -273,24 +273,24 @@ void QAndroidCamera::setActive(bool active)
setState(State::WaitingOpen);
g_qcameras->insert(m_cameraDevice.id(), this);
+ // this should use the camera format.
+ // but there is only 2 fully supported formats on android - JPG and YUV420P
+ // and JPEG is not supported for encoding in FFmpeg, so it's locked for YUV for now.
+ const static int imageFormat =
+ QJniObject::getStaticField<QtJniTypes::AndroidImageFormat, jint>("YUV_420_888");
+ m_jniCamera.callMethod<void>("prepareCamera", jint(width), jint(height),
+ jint(imageFormat), jint(m_cameraFormat.minFrameRate()),
+ jint(m_cameraFormat.maxFrameRate()));
+
bool canOpen = m_jniCamera.callMethod<jboolean>(
"open", QJniObject::fromString(m_cameraDevice.id()).object<jstring>());
if (!canOpen) {
g_qcameras->remove(m_cameraDevice.id());
setState(State::Closed);
- emit error(QCamera::CameraError,
- QString("Failed to start camera: ").append(m_cameraDevice.description()));
+ updateError(QCamera::CameraError,
+ QString("Failed to start camera: ").append(m_cameraDevice.description()));
}
-
- // this should use the camera format.
- // but there is only 2 fully supported formats on android - JPG and YUV420P
- // and JPEG is not supported for encoding in FFmpeg, so it's locked for YUV for now.
- const static int imageFormat =
- QJniObject::getStaticField<QtJniTypes::AndroidImageFormat, jint>("YUV_420_888");
- m_jniCamera.callMethod<jboolean>("addImageReader", jint(width), jint(height),
- jint(imageFormat));
-
} else {
m_jniCamera.callMethod<void>("stopAndClose");
m_jniCamera.callMethod<void>("clearSurfaces");
@@ -316,8 +316,8 @@ void QAndroidCamera::setState(QAndroidCamera::State newState)
m_state = State::Closed;
- emit error(QCamera::CameraError,
- QString("Failed to start Camera %1").arg(m_cameraDevice.description()));
+ updateError(QCamera::CameraError,
+ QString("Failed to start Camera %1").arg(m_cameraDevice.description()));
}
if (m_state == State::Closed && newState == State::WaitingOpen)
@@ -332,10 +332,18 @@ void QAndroidCamera::setState(QAndroidCamera::State newState)
bool QAndroidCamera::setCameraFormat(const QCameraFormat &format)
{
- if (!format.isNull() && !m_cameraDevice.videoFormats().contains(format))
+ const auto chosenFormat = format.isNull() ? getDefaultCameraFormat() : format;
+
+ if (chosenFormat == m_cameraFormat || !m_cameraDevice.videoFormats().contains(chosenFormat))
return false;
- m_cameraFormat = format.isNull() ? getDefaultCameraFormat() : format;
+ m_cameraFormat = chosenFormat;
+
+ if (isActive()) {
+ // Restart the camera to set new camera format
+ setActive(false);
+ setActive(true);
+ }
return true;
}
@@ -513,10 +521,10 @@ void QAndroidCamera::onCameraDisconnect()
void QAndroidCamera::onCameraError(int reason)
{
- emit error(QCamera::CameraError,
- QString("Capture error with Camera %1. Camera2 Api error code: %2")
- .arg(m_cameraDevice.description())
- .arg(reason));
+ updateError(QCamera::CameraError,
+ QString("Capture error with Camera %1. Camera2 Api error code: %2")
+ .arg(m_cameraDevice.description())
+ .arg(reason));
}
void QAndroidCamera::onSessionActive()
@@ -544,10 +552,10 @@ void QAndroidCamera::onCaptureSessionFailed(int reason, long frameNumber)
{
Q_UNUSED(frameNumber);
- emit error(QCamera::CameraError,
- QString("Capture session failure with Camera %1. Camera2 Api error code: %2")
- .arg(m_cameraDevice.description())
- .arg(reason));
+ updateError(QCamera::CameraError,
+ QStringLiteral("Capture session failure with Camera %1. Camera2 Api error code: %2")
+ .arg(m_cameraDevice.description())
+ .arg(reason));
}
// JNI logic
diff --git a/src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp b/src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp
index ef088e6d7..28d02b20e 100644
--- a/src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp
+++ b/src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp
@@ -12,10 +12,20 @@ Q_DECLARE_JNI_CLASS(AndroidImageFormat, "android/graphics/ImageFormat");
Q_DECLARE_JNI_CLASS(AndroidImage, "android/media/Image")
Q_DECLARE_JNI_TYPE(AndroidImagePlaneArray, "[Landroid/media/Image$Plane;")
Q_DECLARE_JNI_CLASS(JavaByteBuffer, "java/nio/ByteBuffer")
+Q_DECLARE_JNI_CLASS(QtVideoDeviceManager,
+ "org/qtproject/qt/android/multimedia/QtVideoDeviceManager");
QT_BEGIN_NAMESPACE
static Q_LOGGING_CATEGORY(qLCAndroidCameraFrame, "qt.multimedia.ffmpeg.android.camera.frame");
+namespace {
+bool isWorkaroundForEmulatorNeeded() {
+ const static bool workaroundForEmulator
+ = QtJniTypes::QtVideoDeviceManager::callStaticMethod<jboolean>("isEmulator");
+ return workaroundForEmulator;
+}
+}
+
bool QAndroidCameraFrame::parse(const QJniObject &frame)
{
QJniEnvironment jniEnv;
@@ -130,12 +140,25 @@ bool QAndroidCameraFrame::parse(const QJniObject &frame)
m_planes[mapIndex].data = buffer[arrayIndex];
};
+ int width = frame.callMethod<jint>("getWidth");
+ int height = frame.callMethod<jint>("getHeight");
+ m_size = QSize(width, height);
+
switch (calculedPixelFormat) {
case QVideoFrameFormat::Format_YUV420P:
m_numberPlanes = 3;
copyPlane(0, 0);
copyPlane(1, 1);
copyPlane(2, 2);
+
+ if (isWorkaroundForEmulatorNeeded()) {
+ for (int i = 0; i < 3; ++i) {
+ const int dataSize = (i == 0) ? width * height : width * height / 4;
+ m_planes[i].data = new uint8_t[dataSize];
+ memcpy(m_planes[i].data, buffer[i], dataSize);
+ }
+ }
+
m_pixelFormat = QVideoFrameFormat::Format_YUV420P;
break;
case QVideoFrameFormat::Format_NV12:
@@ -161,10 +184,6 @@ bool QAndroidCameraFrame::parse(const QJniObject &frame)
long timestamp = frame.callMethod<jlong>("getTimestamp");
m_timestamp = timestamp / 1000;
- int width = frame.callMethod<jint>("getWidth");
- int height = frame.callMethod<jint>("getHeight");
- m_size = QSize(width, height);
-
return true;
}
@@ -193,6 +212,13 @@ QAndroidCameraFrame::~QAndroidCameraFrame()
QJniEnvironment jniEnv;
if (m_frame)
jniEnv->DeleteGlobalRef(m_frame);
+
+ if (isWorkaroundForEmulatorNeeded()) {
+ if (m_pixelFormat == QVideoFrameFormat::Format_YUV420P) {
+ for (int i = 0; i < 3; ++i)
+ delete[] m_planes[i].data;
+ }
+ }
}
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate.mm b/src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate.mm
index 9acca8566..8a4c77a9e 100644
--- a/src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate.mm
+++ b/src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate.mm
@@ -8,6 +8,7 @@
#include "qffmpeghwaccel_p.h"
#include "qavfhelpers_p.h"
#include "qffmpegvideobuffer_p.h"
+#include "private/qvideoframe_p.h"
#undef AVMediaType
@@ -25,8 +26,7 @@ namespace {
class CVImageVideoBuffer : public QAbstractVideoBuffer
{
public:
- CVImageVideoBuffer(CVImageBufferRef imageBuffer)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle), m_buffer(imageBuffer)
+ CVImageVideoBuffer(CVImageBufferRef imageBuffer) : m_buffer(imageBuffer)
{
CVPixelBufferRetain(imageBuffer);
}
@@ -37,52 +37,52 @@ public:
CVPixelBufferRelease(m_buffer);
}
- CVImageVideoBuffer::MapData map(QVideoFrame::MapMode mode) override
+ CVImageVideoBuffer::MapData map(QtVideo::MapMode mode) override
{
MapData mapData;
- if (m_mode == QVideoFrame::NotMapped) {
+ if (m_mode == QtVideo::MapMode::NotMapped) {
CVPixelBufferLockBaseAddress(
- m_buffer, mode == QVideoFrame::ReadOnly ? kCVPixelBufferLock_ReadOnly : 0);
+ m_buffer, mode == QtVideo::MapMode::ReadOnly ? kCVPixelBufferLock_ReadOnly : 0);
m_mode = mode;
}
- mapData.nPlanes = CVPixelBufferGetPlaneCount(m_buffer);
- Q_ASSERT(mapData.nPlanes <= 3);
+ mapData.planeCount = CVPixelBufferGetPlaneCount(m_buffer);
+ Q_ASSERT(mapData.planeCount <= 3);
- if (!mapData.nPlanes) {
+ if (!mapData.planeCount) {
// single plane
mapData.bytesPerLine[0] = CVPixelBufferGetBytesPerRow(m_buffer);
mapData.data[0] = static_cast<uchar *>(CVPixelBufferGetBaseAddress(m_buffer));
- mapData.size[0] = CVPixelBufferGetDataSize(m_buffer);
- mapData.nPlanes = mapData.data[0] ? 1 : 0;
+ mapData.dataSize[0] = CVPixelBufferGetDataSize(m_buffer);
+ mapData.planeCount = mapData.data[0] ? 1 : 0;
return mapData;
}
// For a bi-planar or tri-planar format we have to set the parameters correctly:
- for (int i = 0; i < mapData.nPlanes; ++i) {
+ for (int i = 0; i < mapData.planeCount; ++i) {
mapData.bytesPerLine[i] = CVPixelBufferGetBytesPerRowOfPlane(m_buffer, i);
- mapData.size[i] = mapData.bytesPerLine[i] * CVPixelBufferGetHeightOfPlane(m_buffer, i);
+ mapData.dataSize[i] = mapData.bytesPerLine[i] * CVPixelBufferGetHeightOfPlane(m_buffer, i);
mapData.data[i] = static_cast<uchar *>(CVPixelBufferGetBaseAddressOfPlane(m_buffer, i));
}
return mapData;
}
- QVideoFrame::MapMode mapMode() const override { return m_mode; }
-
void unmap() override
{
- if (m_mode != QVideoFrame::NotMapped) {
+ if (m_mode != QtVideo::MapMode::NotMapped) {
CVPixelBufferUnlockBaseAddress(
- m_buffer, m_mode == QVideoFrame::ReadOnly ? kCVPixelBufferLock_ReadOnly : 0);
- m_mode = QVideoFrame::NotMapped;
+ m_buffer, m_mode == QtVideo::MapMode::ReadOnly ? kCVPixelBufferLock_ReadOnly : 0);
+ m_mode = QtVideo::MapMode::NotMapped;
}
}
+ QVideoFrameFormat format() const override { return {}; }
+
private:
CVImageBufferRef m_buffer;
- QVideoFrame::MapMode m_mode = QVideoFrame::NotMapped;
+ QtVideo::MapMode m_mode = QtVideo::MapMode::NotMapped;
};
}
@@ -147,7 +147,8 @@ static QVideoFrame createHwVideoFrame(QAVFSampleBufferDelegate &delegate,
avFrame->pts = delegate.startTime - *delegate.baseTime;
- return QVideoFrame(new QFFmpegVideoBuffer(std::move(avFrame)), format);
+ return QVideoFramePrivate::createFrame(std::make_unique<QFFmpegVideoBuffer>(std::move(avFrame)),
+ format);
}
- (instancetype)initWithFrameHandler:(std::function<void(const QVideoFrame &)>)handler
@@ -196,11 +197,12 @@ static QVideoFrame createHwVideoFrame(QAVFSampleBufferDelegate &delegate,
return;
}
- format.setFrameRate(frameRate);
+ format.setStreamFrameRate(frameRate);
auto frame = createHwVideoFrame(*self, imageBuffer, format);
if (!frame.isValid())
- frame = QVideoFrame(new CVImageVideoBuffer(imageBuffer), format);
+ frame = QVideoFramePrivate::createFrame(std::make_unique<CVImageVideoBuffer>(imageBuffer),
+ std::move(format));
frame.setStartTime(startTime - *baseTime);
frame.setEndTime(frameTime - *baseTime);
diff --git a/src/plugins/multimedia/ffmpeg/qcgwindowcapture.mm b/src/plugins/multimedia/ffmpeg/qcgwindowcapture.mm
index 93b079bac..6fa2f620f 100644
--- a/src/plugins/multimedia/ffmpeg/qcgwindowcapture.mm
+++ b/src/plugins/multimedia/ffmpeg/qcgwindowcapture.mm
@@ -1,10 +1,12 @@
// Copyright (C) 2022 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qabstractvideobuffer.h"
+
#include "qcgwindowcapture_p.h"
#include "private/qcapturablewindow_p.h"
#include "qffmpegsurfacecapturegrabber_p.h"
-#include "private/qabstractvideobuffer_p.h"
+#include "private/qvideoframe_p.h"
#include "qscreen.h"
#include "qguiapplication.h"
@@ -39,7 +41,7 @@ QT_BEGIN_NAMESPACE
class QCGImageVideoBuffer : public QAbstractVideoBuffer
{
public:
- QCGImageVideoBuffer(CGImageRef image) : QAbstractVideoBuffer(QVideoFrame::NoHandle)
+ QCGImageVideoBuffer(CGImageRef image)
{
auto provider = CGImageGetDataProvider(image);
m_data = CGDataProviderCopyData(provider);
@@ -48,27 +50,27 @@ public:
~QCGImageVideoBuffer() override { CFRelease(m_data); }
- QVideoFrame::MapMode mapMode() const override { return m_mapMode; }
-
- MapData map(QVideoFrame::MapMode mode) override
+ MapData map(QtVideo::MapMode mode) override
{
MapData mapData;
- if (m_mapMode == QVideoFrame::NotMapped) {
+ if (m_mapMode == QtVideo::MapMode::NotMapped) {
m_mapMode = mode;
- mapData.nPlanes = 1;
+ mapData.planeCount = 1;
mapData.bytesPerLine[0] = static_cast<int>(m_bytesPerLine);
mapData.data[0] = (uchar *)CFDataGetBytePtr(m_data);
- mapData.size[0] = static_cast<int>(CFDataGetLength(m_data));
+ mapData.dataSize[0] = static_cast<int>(CFDataGetLength(m_data));
}
return mapData;
}
- void unmap() override { m_mapMode = QVideoFrame::NotMapped; }
+ void unmap() override { m_mapMode = QtVideo::MapMode::NotMapped; }
+
+ QVideoFrameFormat format() const override { return {}; }
private:
- QVideoFrame::MapMode m_mapMode = QVideoFrame::NotMapped;
+ QtVideo::MapMode m_mapMode = QtVideo::MapMode::NotMapped;
CFDataRef m_data;
size_t m_bytesPerLine = 0;
};
@@ -129,9 +131,10 @@ protected:
QVideoFrameFormat format(QSize(CGImageGetWidth(imageRef), CGImageGetHeight(imageRef)),
QVideoFrameFormat::Format_BGRA8888);
- format.setFrameRate(frameRate());
+ format.setStreamFrameRate(frameRate());
- return QVideoFrame(new QCGImageVideoBuffer(imageRef), format);
+ return QVideoFramePrivate::createFrame(std::make_unique<QCGImageVideoBuffer>(imageRef),
+ std::move(format));
}
void onNewFrame(QVideoFrame frame)
diff --git a/src/plugins/multimedia/ffmpeg/qeglfsscreencapture.cpp b/src/plugins/multimedia/ffmpeg/qeglfsscreencapture.cpp
index 09fd633bc..b316a5704 100644
--- a/src/plugins/multimedia/ffmpeg/qeglfsscreencapture.cpp
+++ b/src/plugins/multimedia/ffmpeg/qeglfsscreencapture.cpp
@@ -7,6 +7,7 @@
#include "qguiapplication.h"
#include "qopenglvideobuffer_p.h"
#include "private/qimagevideobuffer_p.h"
+#include "private/qvideoframe_p.h"
#include <QtOpenGL/private/qopenglcompositor_p.h>
#include <QtOpenGL/private/qopenglframebufferobject_p.h>
@@ -54,7 +55,7 @@ protected:
if (!m_format.isValid()) {
auto image = videoBuffer->ensureImageBuffer().underlyingImage();
m_format = { image.size(), QVideoFrameFormat::pixelFormatFromImageFormat(image.format()) };
- m_format.setFrameRate(frameRate());
+ m_format.setStreamFrameRate(frameRate());
}
return QVideoFrame(videoBuffer.release(), m_format);
@@ -90,10 +91,11 @@ protected:
if (!m_format.isValid()) {
m_format = { image.size(),
QVideoFrameFormat::pixelFormatFromImageFormat(image.format()) };
- m_format.setFrameRate(frameRate());
+ m_format.setStreamFrameRate(frameRate());
}
- return QVideoFrame(new QImageVideoBuffer(std::move(image)), m_format);
+ return QVideoFramePrivate::createFrame(
+ std::make_unique<QImageVideoBuffer>(std::move(image)), m_format);
}
private:
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeg.cpp b/src/plugins/multimedia/ffmpeg/qffmpeg.cpp
index f769ac4d4..ce7dfc682 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpeg.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpeg.cpp
@@ -190,9 +190,7 @@ bool isCodecValid(const AVCodec *codec, const std::vector<AVHWDeviceType> &avail
if (codec->type != AVMEDIA_TYPE_VIDEO)
return true;
- const auto pixFmts = codec->pix_fmts;
-
- if (!pixFmts) {
+ if (!codec->pix_fmts) {
#if defined(Q_OS_LINUX) || defined(Q_OS_ANDROID)
// Disable V4L2 M2M codecs for encoding for now,
// TODO: Investigate on how to get them working
@@ -211,14 +209,14 @@ bool isCodecValid(const AVCodec *codec, const std::vector<AVHWDeviceType> &avail
// and with v4l2m2m codecs, that is suspicious.
}
- if (findAVFormat(pixFmts, &isHwPixelFormat) == AV_PIX_FMT_NONE)
+ if (findAVPixelFormat(codec, &isHwPixelFormat) == AV_PIX_FMT_NONE)
return true;
if ((codec->capabilities & AV_CODEC_CAP_HARDWARE) == 0)
return true;
- auto checkDeviceType = [pixFmts](AVHWDeviceType type) {
- return hasAVFormat(pixFmts, pixelFormatForHwDevice(type));
+ auto checkDeviceType = [codec](AVHWDeviceType type) {
+ return isAVFormatSupported(codec, pixelFormatForHwDevice(type));
};
if (codecAvailableOnDevice && codecAvailableOnDevice->count(codec->id) == 0)
@@ -338,6 +336,9 @@ const char *preferredHwCodecNameSuffix(bool isEncoder, AVHWDeviceType deviceType
return "_videotoolbox";
case AV_HWDEVICE_TYPE_D3D11VA:
case AV_HWDEVICE_TYPE_DXVA2:
+#if QT_FFMPEG_HAS_D3D12VA
+ case AV_HWDEVICE_TYPE_D3D12VA:
+#endif
return "_mf";
case AV_HWDEVICE_TYPE_CUDA:
case AV_HWDEVICE_TYPE_VDPAU:
@@ -386,6 +387,8 @@ const AVCodec *findAVCodec(CodecStorageType codecsType, AVCodecID codecId,
const std::optional<AVHWDeviceType> &deviceType,
const std::optional<PixelOrSampleFormat> &format)
{
+ // TODO: remove deviceType and use only isAVFormatSupported to check the format
+
return findAVCodec(codecsType, codecId, [&](const AVCodec *codec) {
if (format && !isAVFormatSupported(codec, *format))
return NotSuitableAVScore;
@@ -411,6 +414,7 @@ const AVCodec *findAVCodec(CodecStorageType codecsType, AVCodecID codecId,
// The situation happens mostly with encoders
// Probably, it's ffmpeg bug: avcodec_get_hw_config returns null even though
// hw acceleration is supported
+ // To be removed: only isAVFormatSupported should be used.
if (hasAVFormat(codec->pix_fmts, pixelFormatForHwDevice(*deviceType)))
return hwCodecNameScores(codec, *deviceType);
}
@@ -441,8 +445,10 @@ const AVCodec *findAVEncoder(AVCodecID codecId,
bool isAVFormatSupported(const AVCodec *codec, PixelOrSampleFormat format)
{
- if (codec->type == AVMEDIA_TYPE_VIDEO)
- return hasAVFormat(codec->pix_fmts, AVPixelFormat(format));
+ if (codec->type == AVMEDIA_TYPE_VIDEO) {
+ auto checkFormat = [format](AVPixelFormat f) { return f == format; };
+ return findAVPixelFormat(codec, checkFormat) != AV_PIX_FMT_NONE;
+ }
if (codec->type == AVMEDIA_TYPE_AUDIO)
return hasAVFormat(codec->sample_fmts, AVSampleFormat(format));
@@ -489,6 +495,10 @@ AVPixelFormat pixelFormatForHwDevice(AVHWDeviceType deviceType)
return AV_PIX_FMT_QSV;
case AV_HWDEVICE_TYPE_D3D11VA:
return AV_PIX_FMT_D3D11;
+#if QT_FFMPEG_HAS_D3D12VA
+ case AV_HWDEVICE_TYPE_D3D12VA:
+ return AV_PIX_FMT_D3D12;
+#endif
case AV_HWDEVICE_TYPE_DXVA2:
return AV_PIX_FMT_DXVA2_VLD;
case AV_HWDEVICE_TYPE_DRM:
@@ -502,6 +512,32 @@ AVPixelFormat pixelFormatForHwDevice(AVHWDeviceType deviceType)
}
}
+AVPacketSideData *addStreamSideData(AVStream *stream, AVPacketSideData sideData)
+{
+ QScopeGuard freeData([&sideData]() { av_free(sideData.data); });
+#if QT_FFMPEG_STREAM_SIDE_DATA_DEPRECATED
+ AVPacketSideData *result = av_packet_side_data_add(
+ &stream->codecpar->coded_side_data,
+ &stream->codecpar->nb_coded_side_data,
+ sideData.type,
+ sideData.data,
+ sideData.size,
+ 0);
+ if (result) {
+ // If the result is not null, the ownership is taken by AVStream,
+ // otherwise the data must be deleted.
+ freeData.dismiss();
+ return result;
+ }
+#else
+ Q_UNUSED(stream);
+ // TODO: implement for older FFmpeg versions
+ qWarning() << "Adding stream side data is not supported for FFmpeg < 6.1";
+#endif
+
+ return nullptr;
+}
+
const AVPacketSideData *streamSideData(const AVStream *stream, AVPacketSideDataType type)
{
Q_ASSERT(stream);
@@ -554,6 +590,36 @@ SwrContextUPtr createResampleContext(const AVAudioFormat &inputFormat,
return SwrContextUPtr(resampler);
}
+QVideoFrameFormat::ColorTransfer fromAvColorTransfer(AVColorTransferCharacteristic colorTrc) {
+ switch (colorTrc) {
+ case AVCOL_TRC_BT709:
+ // The following three cases have transfer characteristics identical to BT709
+ case AVCOL_TRC_BT1361_ECG:
+ case AVCOL_TRC_BT2020_10:
+ case AVCOL_TRC_BT2020_12:
+ case AVCOL_TRC_SMPTE240M: // almost identical to bt709
+ return QVideoFrameFormat::ColorTransfer_BT709;
+ case AVCOL_TRC_GAMMA22:
+ case AVCOL_TRC_SMPTE428: // No idea, let's hope for the best...
+ case AVCOL_TRC_IEC61966_2_1: // sRGB, close enough to 2.2...
+ case AVCOL_TRC_IEC61966_2_4: // not quite, but probably close enough
+ return QVideoFrameFormat::ColorTransfer_Gamma22;
+ case AVCOL_TRC_GAMMA28:
+ return QVideoFrameFormat::ColorTransfer_Gamma28;
+ case AVCOL_TRC_SMPTE170M:
+ return QVideoFrameFormat::ColorTransfer_BT601;
+ case AVCOL_TRC_LINEAR:
+ return QVideoFrameFormat::ColorTransfer_Linear;
+ case AVCOL_TRC_SMPTE2084:
+ return QVideoFrameFormat::ColorTransfer_ST2084;
+ case AVCOL_TRC_ARIB_STD_B67:
+ return QVideoFrameFormat::ColorTransfer_STD_B67;
+ default:
+ break;
+ }
+ return QVideoFrameFormat::ColorTransfer_Unknown;
+}
+
#ifdef Q_OS_DARWIN
bool isCVFormatSupported(uint32_t cvFormat)
{
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeg_p.h b/src/plugins/multimedia/ffmpeg/qffmpeg_p.h
index 601b44ccb..09bf7e4f4 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpeg_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpeg_p.h
@@ -16,6 +16,7 @@
#include "qffmpegdefs_p.h"
#include "qffmpegavaudioformat_p.h"
+#include <QtMultimedia/qvideoframeformat.h>
#include <qstring.h>
#include <optional>
@@ -188,6 +189,34 @@ Format findAVFormat(const Format *fmts, const Predicate &predicate)
return findBestAVFormat(fmts, scoresGetter).first;
}
+template <typename Predicate>
+const AVCodecHWConfig *findHwConfig(const AVCodec *codec, const Predicate &predicate)
+{
+ for (int i = 0; const auto hwConfig = avcodec_get_hw_config(codec, i); ++i) {
+ if (predicate(hwConfig))
+ return hwConfig;
+ }
+
+ return nullptr;
+}
+
+template <typename Predicate>
+AVPixelFormat findAVPixelFormat(const AVCodec *codec, const Predicate &predicate)
+{
+ const AVPixelFormat format = findAVFormat(codec->pix_fmts, predicate);
+ if (format != AV_PIX_FMT_NONE)
+ return format;
+
+ auto checkHwConfig = [&predicate](const AVCodecHWConfig *config) {
+ return config->pix_fmt != AV_PIX_FMT_NONE && predicate(config->pix_fmt);
+ };
+
+ if (auto hwConfig = findHwConfig(codec, checkHwConfig))
+ return hwConfig->pix_fmt;
+
+ return AV_PIX_FMT_NONE;
+}
+
template <typename Value, typename CalculateScore>
auto findBestAVValue(const Value *values, const CalculateScore &calculateScore,
Value invalidValue = {})
@@ -227,11 +256,15 @@ void applyExperimentalCodecOptions(const AVCodec *codec, AVDictionary** opts);
AVPixelFormat pixelFormatForHwDevice(AVHWDeviceType deviceType);
+AVPacketSideData *addStreamSideData(AVStream *stream, AVPacketSideData sideData);
+
const AVPacketSideData *streamSideData(const AVStream *stream, AVPacketSideDataType type);
SwrContextUPtr createResampleContext(const AVAudioFormat &inputFormat,
const AVAudioFormat &outputFormat);
+QVideoFrameFormat::ColorTransfer fromAvColorTransfer(AVColorTransferCharacteristic colorTrc);
+
#ifdef Q_OS_DARWIN
bool isCVFormatSupported(uint32_t format);
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h b/src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h
index a232978f6..288b3f432 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h
@@ -15,6 +15,7 @@
//
#include <private/qplatformaudioinput_p.h>
+#include <private/qplatformaudiobufferinput_p.h>
#include "qffmpegthread_p.h"
#include <qaudioinput.h>
@@ -28,8 +29,9 @@ class AudioSourceIO;
constexpr int DefaultAudioInputBufferSize = 4096;
-class QFFmpegAudioInput : public QObject, public QPlatformAudioInput
+class QFFmpegAudioInput : public QPlatformAudioBufferInputBase, public QPlatformAudioInput
{
+ // for qobject_cast
Q_OBJECT
public:
QFFmpegAudioInput(QAudioInput *qq);
@@ -44,9 +46,6 @@ public:
int bufferSize() const;
-Q_SIGNALS:
- void newAudioBuffer(const QAudioBuffer &buffer);
-
private:
QFFmpeg::AudioSourceIO *audioIO = nullptr;
std::unique_ptr<QThread> inputThread;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegconverter.cpp b/src/plugins/multimedia/ffmpeg/qffmpegconverter.cpp
new file mode 100644
index 000000000..ba87ce3ed
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegconverter.cpp
@@ -0,0 +1,272 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegconverter_p.h"
+#include <QtMultimedia/qvideoframeformat.h>
+#include <QtMultimedia/qvideoframe.h>
+#include <QtCore/qloggingcategory.h>
+#include <private/qvideotexturehelper_p.h>
+
+extern "C" {
+#include <libswscale/swscale.h>
+}
+
+QT_BEGIN_NAMESPACE
+
+namespace {
+
+Q_LOGGING_CATEGORY(lc, "qt.multimedia.ffmpeg.converter");
+
+
+// Converts to FFmpeg pixel format. This function differs from
+// QFFmpegVideoBuffer::toAVPixelFormat which only covers the subset
+// of pixel formats required for encoding. Here we need to cover more
+// pixel formats to be able to generate test images for decoding/display
+AVPixelFormat toAVPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat)
+{
+ switch (pixelFormat) {
+ default:
+ case QVideoFrameFormat::Format_Invalid:
+ return AV_PIX_FMT_NONE;
+ case QVideoFrameFormat::Format_AYUV:
+ case QVideoFrameFormat::Format_AYUV_Premultiplied:
+ return AV_PIX_FMT_NONE; // TODO: Fixme (No corresponding FFmpeg format available)
+ case QVideoFrameFormat::Format_YV12:
+ case QVideoFrameFormat::Format_IMC1:
+ case QVideoFrameFormat::Format_IMC3:
+ case QVideoFrameFormat::Format_IMC2:
+ case QVideoFrameFormat::Format_IMC4:
+ return AV_PIX_FMT_YUV420P;
+ case QVideoFrameFormat::Format_Jpeg:
+ return AV_PIX_FMT_BGRA;
+ case QVideoFrameFormat::Format_ARGB8888:
+ return AV_PIX_FMT_ARGB;
+ case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
+ case QVideoFrameFormat::Format_XRGB8888:
+ return AV_PIX_FMT_0RGB;
+ case QVideoFrameFormat::Format_BGRA8888:
+ return AV_PIX_FMT_BGRA;
+ case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
+ case QVideoFrameFormat::Format_BGRX8888:
+ return AV_PIX_FMT_BGR0;
+ case QVideoFrameFormat::Format_ABGR8888:
+ return AV_PIX_FMT_ABGR;
+ case QVideoFrameFormat::Format_XBGR8888:
+ return AV_PIX_FMT_0BGR;
+ case QVideoFrameFormat::Format_RGBA8888:
+ return AV_PIX_FMT_RGBA;
+ case QVideoFrameFormat::Format_RGBX8888:
+ return AV_PIX_FMT_RGB0;
+ case QVideoFrameFormat::Format_YUV422P:
+ return AV_PIX_FMT_YUV422P;
+ case QVideoFrameFormat::Format_YUV420P:
+ return AV_PIX_FMT_YUV420P;
+ case QVideoFrameFormat::Format_YUV420P10:
+ return AV_PIX_FMT_YUV420P10;
+ case QVideoFrameFormat::Format_UYVY:
+ return AV_PIX_FMT_UYVY422;
+ case QVideoFrameFormat::Format_YUYV:
+ return AV_PIX_FMT_YUYV422;
+ case QVideoFrameFormat::Format_NV12:
+ return AV_PIX_FMT_NV12;
+ case QVideoFrameFormat::Format_NV21:
+ return AV_PIX_FMT_NV21;
+ case QVideoFrameFormat::Format_Y8:
+ return AV_PIX_FMT_GRAY8;
+ case QVideoFrameFormat::Format_Y16:
+ return AV_PIX_FMT_GRAY16;
+ case QVideoFrameFormat::Format_P010:
+ return AV_PIX_FMT_P010;
+ case QVideoFrameFormat::Format_P016:
+ return AV_PIX_FMT_P016;
+ case QVideoFrameFormat::Format_SamplerExternalOES:
+ return AV_PIX_FMT_MEDIACODEC;
+ }
+}
+
+struct SwsFrameData
+{
+ static constexpr int arraySize = 4; // Array size required by sws_scale
+ std::array<uchar *, arraySize> bits;
+ std::array<int, arraySize> stride;
+};
+
+SwsFrameData getSwsData(QVideoFrame &dst)
+{
+ switch (dst.pixelFormat()) {
+ case QVideoFrameFormat::Format_YV12:
+ case QVideoFrameFormat::Format_IMC1:
+ return { { dst.bits(0), dst.bits(2), dst.bits(1), nullptr },
+ { dst.bytesPerLine(0), dst.bytesPerLine(2), dst.bytesPerLine(1), 0 } };
+
+ case QVideoFrameFormat::Format_IMC2:
+ return { { dst.bits(0), dst.bits(1) + dst.bytesPerLine(1) / 2, dst.bits(1), nullptr },
+ { dst.bytesPerLine(0), dst.bytesPerLine(1), dst.bytesPerLine(1), 0 } };
+
+ case QVideoFrameFormat::Format_IMC4:
+ return { { dst.bits(0), dst.bits(1), dst.bits(1) + dst.bytesPerLine(1) / 2, nullptr },
+ { dst.bytesPerLine(0), dst.bytesPerLine(1), dst.bytesPerLine(1), 0 } };
+ default:
+ return { { dst.bits(0), dst.bits(1), dst.bits(2), nullptr },
+ { dst.bytesPerLine(0), dst.bytesPerLine(1), dst.bytesPerLine(2), 0 } };
+ }
+}
+
+struct SwsColorSpace
+{
+ int colorSpace;
+ int colorRange; // 0 - mpeg/video, 1 - jpeg/full
+};
+
+// Qt heuristics for determining color space requires checking
+// both frame color space and range. This function mimics logic
+// used elsewhere in Qt Multimedia.
+SwsColorSpace toSwsColorSpace(QVideoFrameFormat::ColorRange colorRange,
+ QVideoFrameFormat::ColorSpace colorSpace)
+{
+ const int avRange = colorRange == QVideoFrameFormat::ColorRange_Video ? 0 : 1;
+
+ switch (colorSpace) {
+ case QVideoFrameFormat::ColorSpace_BT601:
+ if (colorRange == QVideoFrameFormat::ColorRange_Full)
+ return { SWS_CS_ITU709, 1 }; // TODO: FIXME - Not exact match
+ return { SWS_CS_ITU601, 0 };
+ case QVideoFrameFormat::ColorSpace_BT709:
+ return { SWS_CS_ITU709, avRange };
+ case QVideoFrameFormat::ColorSpace_AdobeRgb:
+ return { SWS_CS_ITU601, 1 }; // TODO: Why do ITU601 and Adobe RGB match well?
+ case QVideoFrameFormat::ColorSpace_BT2020:
+ return { SWS_CS_BT2020, avRange };
+ case QVideoFrameFormat::ColorSpace_Undefined:
+ default:
+ return { SWS_CS_DEFAULT, avRange };
+ }
+}
+
+using SwsContextUPtr = std::unique_ptr<SwsContext, decltype(&sws_freeContext)>;
+using PixelFormat = QVideoFrameFormat::PixelFormat;
+
+// clang-format off
+
+SwsContextUPtr createConverter(const QSize &srcSize, PixelFormat srcPixFmt,
+ const QSize &dstSize, PixelFormat dstPixFmt)
+{
+ SwsContext* context = sws_getContext(
+ srcSize.width(), srcSize.height(), toAVPixelFormat(srcPixFmt),
+ dstSize.width(), dstSize.height(), toAVPixelFormat(dstPixFmt),
+ SWS_BILINEAR, nullptr, nullptr, nullptr);
+
+ return { context, &sws_freeContext };
+}
+
+bool setColorSpaceDetails(SwsContext *context,
+ const QVideoFrameFormat &srcFormat,
+ const QVideoFrameFormat &dstFormat)
+{
+ const SwsColorSpace src = toSwsColorSpace(srcFormat.colorRange(), srcFormat.colorSpace());
+ const SwsColorSpace dst = toSwsColorSpace(dstFormat.colorRange(), dstFormat.colorSpace());
+
+ constexpr int brightness = 0;
+ constexpr int contrast = 0;
+ constexpr int saturation = 0;
+ const int status = sws_setColorspaceDetails(context,
+ sws_getCoefficients(src.colorSpace), src.colorRange,
+ sws_getCoefficients(dst.colorSpace), dst.colorRange,
+ brightness, contrast, saturation);
+
+ return status == 0;
+}
+
+bool convert(SwsContext *context, QVideoFrame &src, int srcHeight, QVideoFrame &dst)
+{
+ if (!src.map(QtVideo::MapMode::ReadOnly))
+ return false;
+
+ QScopeGuard unmapSrc{[&] {
+ src.unmap();
+ }};
+
+ if (!dst.map(QtVideo::MapMode::WriteOnly))
+ return false;
+
+ QScopeGuard unmapDst{[&] {
+ dst.unmap();
+ }};
+
+ const SwsFrameData srcData = getSwsData(src);
+ const SwsFrameData dstData = getSwsData(dst);
+
+ constexpr int firstSrcSliceRow = 0;
+ const int scaledHeight = sws_scale(context,
+ srcData.bits.data(), srcData.stride.data(),
+ firstSrcSliceRow, srcHeight,
+ dstData.bits.data(), dstData.stride.data());
+
+ if (scaledHeight != srcHeight)
+ return false;
+
+ return true;
+}
+
+// Ensure even size if using planar format with chroma subsampling
+QSize adjustSize(const QSize& size, PixelFormat srcFmt, PixelFormat dstFmt)
+{
+ const auto* srcDesc = QVideoTextureHelper::textureDescription(srcFmt);
+ const auto* dstDesc = QVideoTextureHelper::textureDescription(dstFmt);
+
+ QSize output = size;
+ for (const auto desc : { srcDesc, dstDesc }) {
+ for (int i = 0; i < desc->nplanes; ++i) {
+ // TODO: Assumes that max subsampling is 2
+ if (desc->sizeScale[i].x != 1)
+ output.setWidth(output.width() & ~1); // Make even
+
+ if (desc->sizeScale[i].y != 1)
+ output.setHeight(output.height() & ~1); // Make even
+ }
+ }
+
+ return output;
+}
+
+} // namespace
+
+// Converts a video frame to the dstFormat video frame format.
+QVideoFrame convertFrame(QVideoFrame &src, const QVideoFrameFormat &dstFormat)
+{
+ if (src.size() != dstFormat.frameSize()) {
+ qCCritical(lc) << "Resizing is not supported";
+ return {};
+ }
+
+ // Adjust size to even width/height if we have chroma subsampling
+ const QSize size = adjustSize(src.size(), src.pixelFormat(), dstFormat.pixelFormat());
+ if (size != src.size())
+ qCWarning(lc) << "Input truncated to even width/height";
+
+ const SwsContextUPtr conv = createConverter(
+ size, src.pixelFormat(), size, dstFormat.pixelFormat());
+
+ if (!conv) {
+ qCCritical(lc) << "Failed to create SW converter";
+ return {};
+ }
+
+ if (!setColorSpaceDetails(conv.get(), src.surfaceFormat(), dstFormat)) {
+ qCCritical(lc) << "Failed to set color space details";
+ return {};
+ }
+
+ QVideoFrame dst{ dstFormat };
+
+ if (!convert(conv.get(), src, size.height(), dst)) {
+ qCCritical(lc) << "Frame conversion failed";
+ return {};
+ }
+
+ return dst;
+}
+
+// clang-format on
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolve_p.h b/src/plugins/multimedia/ffmpeg/qffmpegconverter_p.h
index 8064b8d85..57ee3135f 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolve_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegconverter_p.h
@@ -1,10 +1,8 @@
-// Copyright (C) 2023 The Qt Company Ltd.
+// Copyright (C) 2024 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-#ifndef QFFMPEGSYMBOLSRESOLVE_P_H
-#define QFFMPEGSYMBOLSRESOLVE_P_H
-
-#include "qnamespace.h"
+#ifndef QFFMPEGCONVERTER_P_H
+#define QFFMPEGCONVERTER_P_H
//
// W A R N I N G
@@ -17,21 +15,16 @@
// We mean it.
//
+#include <QtCore/qtconfigmacros.h>
+#include <private/qtmultimediaglobal_p.h>
+
QT_BEGIN_NAMESPACE
-inline void resolveSymbols()
-{
-#ifdef DYNAMIC_RESOLVE_OPENSSL_SYMBOLS
- extern bool resolveOpenSsl();
- resolveOpenSsl();
-#endif
+class QVideoFrameFormat;
+class QVideoFrame;
-#ifdef DYNAMIC_RESOLVE_VAAPI_SYMBOLS
- extern bool resolveVAAPI();
- resolveVAAPI();
-#endif
-}
+QVideoFrame convertFrame(QVideoFrame &src, const QVideoFrameFormat &dstFormat);
QT_END_NAMESPACE
-#endif // QFFMPEGSYMBOLSRESOLVE_P_H
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegdefs_p.h b/src/plugins/multimedia/ffmpeg/qffmpegdefs_p.h
index f3860377e..239d8ff0c 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegdefs_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegdefs_p.h
@@ -32,6 +32,8 @@ extern "C" {
(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(60, 3, 100)) // since ffmpeg n6.0
#define QT_FFMPEG_STREAM_SIDE_DATA_DEPRECATED \
(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(60, 15, 100)) // since ffmpeg n6.1
+#define QT_FFMPEG_HAS_D3D12VA \
+ (LIBAVUTIL_VERSION_INT >= AV_VERSION_INT(59, 8, 100)) // since ffmpeg n7.0
#define QT_FFMPEG_SWR_CONST_CH_LAYOUT (LIBSWRESAMPLE_VERSION_INT >= AV_VERSION_INT(4, 9, 100))
#define QT_FFMPEG_AVIO_WRITE_CONST \
(LIBAVFORMAT_VERSION_MAJOR >= 61)
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp
index 78e265b4b..5b140f0ca 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp
@@ -93,6 +93,11 @@ static bool precheckDriver(AVHWDeviceType type)
if (type == AV_HWDEVICE_TYPE_D3D11VA)
return QSystemLibrary(QLatin1String("d3d11.dll")).load();
+#if QT_FFMPEG_HAS_D3D12VA
+ if (type == AV_HWDEVICE_TYPE_D3D12VA)
+ return QSystemLibrary(QLatin1String("d3d12.dll")).load();
+#endif
+
if (type == AV_HWDEVICE_TYPE_DXVA2)
return QSystemLibrary(QLatin1String("d3d9.dll")).load();
@@ -122,6 +127,9 @@ static bool checkHwType(AVHWDeviceType type)
if (type == AV_HWDEVICE_TYPE_MEDIACODEC ||
type == AV_HWDEVICE_TYPE_VIDEOTOOLBOX ||
type == AV_HWDEVICE_TYPE_D3D11VA ||
+#if QT_FFMPEG_HAS_D3D12VA
+ type == AV_HWDEVICE_TYPE_D3D12VA ||
+#endif
type == AV_HWDEVICE_TYPE_DXVA2)
return true; // Don't waste time; it's expected to work fine of the precheck is OK
@@ -143,10 +151,11 @@ static const std::vector<AVHWDeviceType> &deviceTypes()
std::unordered_set<AVPixelFormat> hwPixFormats;
void *opaque = nullptr;
while (auto codec = av_codec_iterate(&opaque)) {
- if (auto pixFmt = codec->pix_fmts)
- for (; *pixFmt != AV_PIX_FMT_NONE; ++pixFmt)
- if (isHwPixelFormat(*pixFmt))
- hwPixFormats.insert(*pixFmt);
+ findAVPixelFormat(codec, [&](AVPixelFormat format) {
+ if (isHwPixelFormat(format))
+ hwPixFormats.insert(format);
+ return false;
+ });
}
// create a device types list
@@ -292,7 +301,9 @@ AVPixelFormat getFormat(AVCodecContext *codecContext, const AVPixelFormat *sugge
const bool shouldCheckCodecFormats = config->pix_fmt == AV_PIX_FMT_NONE;
auto scoresGettor = [&](AVPixelFormat format) {
- if (shouldCheckCodecFormats && !isAVFormatSupported(codecContext->codec, format))
+ // check in supported codec->pix_fmts;
+ // no reason to use findAVPixelFormat as we're already in the hw_config loop
+ if (shouldCheckCodecFormats && !hasAVFormat(codecContext->codec->pix_fmts, format))
return NotSuitableAVScore;
if (!shouldCheckCodecFormats && config->pix_fmt != format)
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h
index eee535343..bc6547f12 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h
@@ -16,7 +16,8 @@
#include "qffmpeg_p.h"
#include "qvideoframeformat.h"
-#include <private/qabstractvideobuffer_p.h>
+#include "qabstractvideobuffer.h"
+
#include <qshareddata.h>
#include <memory>
#include <functional>
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp
index 81eef89ef..7e46e3537 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp
@@ -184,7 +184,7 @@ VAAPITextureConverter::VAAPITextureConverter(QRhi *rhi)
}
const QString platform = QGuiApplication::platformName();
QPlatformNativeInterface *pni = QGuiApplication::platformNativeInterface();
- eglDisplay = pni->nativeResourceForIntegration("egldisplay");
+ eglDisplay = pni->nativeResourceForIntegration(QByteArrayLiteral("egldisplay"));
qCDebug(qLHWAccelVAAPI) << " platform is" << platform << eglDisplay;
if (!eglDisplay) {
@@ -231,7 +231,7 @@ TextureSet *VAAPITextureConverter::getTextures(AVFrame *frame)
VASurfaceID vaSurface = (uintptr_t)frame->data[3];
- VADRMPRIMESurfaceDescriptor prime;
+ VADRMPRIMESurfaceDescriptor prime = {};
if (vaExportSurfaceHandle(vaDisplay, vaSurface,
VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2,
VA_EXPORT_SURFACE_READ_ONLY |
@@ -245,6 +245,13 @@ TextureSet *VAAPITextureConverter::getTextures(AVFrame *frame)
qWarning() << "vaExportSurfaceHandle failed";
return nullptr;
}
+
+ // Make sure all fd's in 'prime' are closed when we return from this function
+ QScopeGuard closeObjectsGuard([&prime]() {
+ for (uint32_t i = 0; i < prime.num_objects; ++i)
+ close(prime.objects[i].fd);
+ });
+
// ### Check that prime.fourcc is what we expect
vaSyncSurface(vaDisplay, vaSurface);
@@ -325,9 +332,6 @@ TextureSet *VAAPITextureConverter::getTextures(AVFrame *frame)
qWarning() << "eglImageTargetTexture2D failed with error code" << error;
}
- for (int i = 0; i < (int)prime.num_objects; ++i)
- close(prime.objects[i].fd);
-
for (int i = 0; i < nPlanes; ++i) {
functions.glActiveTexture(GL_TEXTURE0 + i);
functions.glBindTexture(GL_TEXTURE_2D, 0);
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp
index f02593d16..1b6db5813 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp
@@ -6,9 +6,12 @@
#include "private/qplatformaudioinput_p.h"
#include "private/qplatformaudiooutput_p.h"
#include "private/qplatformsurfacecapture_p.h"
+#include "private/qplatformaudiobufferinput_p.h"
+#include "private/qplatformvideoframeinput_p.h"
+#include "private/qplatformcamera_p.h"
+
#include "qffmpegimagecapture_p.h"
#include "qffmpegmediarecorder_p.h"
-#include "private/qplatformcamera_p.h"
#include "qvideosink.h"
#include "qffmpegaudioinput_p.h"
#include "qaudiosink.h"
@@ -72,6 +75,17 @@ void QFFmpegMediaCaptureSession::setWindowCapture(QPlatformSurfaceCapture *windo
emit windowCaptureChanged();
}
+QPlatformVideoFrameInput *QFFmpegMediaCaptureSession::videoFrameInput()
+{
+ return m_videoFrameInput;
+}
+
+void QFFmpegMediaCaptureSession::setVideoFrameInput(QPlatformVideoFrameInput *input)
+{
+ if (setVideoSource(m_videoFrameInput, input))
+ emit videoFrameInputChanged();
+}
+
QPlatformImageCapture *QFFmpegMediaCaptureSession::imageCapture()
{
return m_imageCapture;
@@ -136,6 +150,12 @@ void QFFmpegMediaCaptureSession::setAudioInput(QPlatformAudioInput *input)
updateAudioSink();
}
+void QFFmpegMediaCaptureSession::setAudioBufferInput(QPlatformAudioBufferInput *input)
+{
+ // TODO: implement binding to audio sink like setAudioInput does
+ m_audioBufferInput = input;
+}
+
void QFFmpegMediaCaptureSession::updateAudioSink()
{
if (m_audioSink) {
@@ -191,7 +211,7 @@ void QFFmpegMediaCaptureSession::updateVolume()
m_audioSink->setVolume(m_audioOutput->muted ? 0.f : m_audioOutput->volume);
}
-QPlatformAudioInput *QFFmpegMediaCaptureSession::audioInput()
+QPlatformAudioInput *QFFmpegMediaCaptureSession::audioInput() const
{
return m_audioInput;
}
@@ -281,6 +301,18 @@ QPlatformVideoSource *QFFmpegMediaCaptureSession::primaryActiveVideoSource()
return m_primaryActiveVideoSource;
}
+std::vector<QPlatformAudioBufferInputBase *> QFFmpegMediaCaptureSession::activeAudioInputs() const
+{
+ std::vector<QPlatformAudioBufferInputBase *> result;
+ if (m_audioInput)
+ result.push_back(m_audioInput);
+
+ if (m_audioBufferInput)
+ result.push_back(m_audioBufferInput);
+
+ return result;
+}
+
QT_END_NAMESPACE
#include "moc_qffmpegmediacapturesession_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h
index 6c80d0b09..25340dad5 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h
@@ -29,6 +29,8 @@ class QAudioSink;
class QFFmpegAudioInput;
class QAudioBuffer;
class QPlatformVideoSource;
+class QPlatformAudioBufferInput;
+class QPlatformAudioBufferInputBase;
class QFFmpegMediaCaptureSession : public QPlatformMediaCaptureSession
{
@@ -49,6 +51,9 @@ public:
QPlatformSurfaceCapture *windowCapture() override;
void setWindowCapture(QPlatformSurfaceCapture *) override;
+ QPlatformVideoFrameInput *videoFrameInput() override;
+ void setVideoFrameInput(QPlatformVideoFrameInput *) override;
+
QPlatformImageCapture *imageCapture() override;
void setImageCapture(QPlatformImageCapture *imageCapture) override;
@@ -56,13 +61,19 @@ public:
void setMediaRecorder(QPlatformMediaRecorder *recorder) override;
void setAudioInput(QPlatformAudioInput *input) override;
- QPlatformAudioInput *audioInput();
+ QPlatformAudioInput *audioInput() const;
+
+ void setAudioBufferInput(QPlatformAudioBufferInput *input) override;
void setVideoPreview(QVideoSink *sink) override;
void setAudioOutput(QPlatformAudioOutput *output) override;
QPlatformVideoSource *primaryActiveVideoSource();
+ // it might be moved to the base class, but it needs QPlatformAudioInput
+ // to be QPlatformAudioBufferInputBase, which might not make sense
+ std::vector<QPlatformAudioBufferInputBase *> activeAudioInputs() const;
+
private Q_SLOTS:
void updateAudioSink();
void updateVolume();
@@ -79,9 +90,12 @@ private:
QPointer<QPlatformCamera> m_camera;
QPointer<QPlatformSurfaceCapture> m_screenCapture;
QPointer<QPlatformSurfaceCapture> m_windowCapture;
+ QPointer<QPlatformVideoFrameInput> m_videoFrameInput;
QPointer<QPlatformVideoSource> m_primaryActiveVideoSource;
- QFFmpegAudioInput *m_audioInput = nullptr;
+ QPointer<QFFmpegAudioInput> m_audioInput;
+ QPointer<QPlatformAudioBufferInput> m_audioBufferInput;
+
QFFmpegImageCapture *m_imageCapture = nullptr;
QFFmpegMediaRecorder *m_mediaRecorder = nullptr;
QPlatformAudioOutput *m_audioOutput = nullptr;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp
index 291ca1111..ba1fff3b3 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp
@@ -13,8 +13,8 @@
#include "qffmpegaudioinput_p.h"
#include "qffmpegaudiodecoder_p.h"
#include "qffmpegresampler_p.h"
-#include "qffmpegsymbolsresolve_p.h"
#include "qgrabwindowsurfacecapture_p.h"
+#include "qffmpegconverter_p.h"
#ifdef Q_OS_MACOS
#include <VideoToolbox/VideoToolbox.h>
@@ -171,8 +171,6 @@ static QPlatformSurfaceCapture *createWindowCaptureByBackend(QString backend)
QFFmpegMediaIntegration::QFFmpegMediaIntegration()
: QPlatformMediaIntegration(QLatin1String("ffmpeg"))
{
- resolveSymbols();
-
setupFFmpegLogger();
#ifndef QT_NO_DEBUG
@@ -308,6 +306,12 @@ QMaybe<QPlatformAudioInput *> QFFmpegMediaIntegration::createAudioInput(QAudioIn
return new QFFmpegAudioInput(input);
}
+QVideoFrame QFFmpegMediaIntegration::convertVideoFrame(QVideoFrame &srcFrame,
+ const QVideoFrameFormat &destFormat)
+{
+ return convertFrame(srcFrame, destFormat);
+}
+
QPlatformMediaFormatInfo *QFFmpegMediaIntegration::createFormatInfo()
{
return new QFFmpegMediaFormatInfo;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h
index b8787bcc4..473a5f044 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h
@@ -41,6 +41,9 @@ public:
QMaybe<QPlatformAudioInput *> createAudioInput(QAudioInput *input) override;
// QPlatformAudioOutput *createAudioOutput(QAudioOutput *) override;
+ QVideoFrame convertVideoFrame(QVideoFrame &srcFrame,
+ const QVideoFrameFormat &destFormat) override;
+
protected:
QPlatformMediaFormatInfo *createFormatInfo() override;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp
index 6a950a6ad..951144692 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp
@@ -5,6 +5,7 @@
#include "private/qplatformaudiooutput_p.h"
#include "qvideosink.h"
#include "qaudiooutput.h"
+#include "qaudiobufferoutput.h"
#include "qffmpegplaybackengine_p.h"
#include <qiodevice.h>
@@ -75,12 +76,17 @@ void QFFmpegMediaPlayer::updatePosition()
void QFFmpegMediaPlayer::endOfStream()
{
- // start update timer and report end position anyway
+ // stop update timer and report end position anyway
m_positionUpdateTimer.stop();
+ QPointer currentPlaybackEngine(m_playbackEngine.get());
positionChanged(duration());
- stateChanged(QMediaPlayer::StoppedState);
- mediaStatusChanged(QMediaPlayer::EndOfMedia);
+ // skip changing state and mediaStatus if playbackEngine has been recreated,
+ // e.g. if new media has been loaded as a response to positionChanged signal
+ if (currentPlaybackEngine)
+ stateChanged(QMediaPlayer::StoppedState);
+ if (currentPlaybackEngine)
+ mediaStatusChanged(QMediaPlayer::EndOfMedia);
}
void QFFmpegMediaPlayer::onLoopChanged()
@@ -146,7 +152,7 @@ void QFFmpegMediaPlayer::setPlaybackRate(qreal rate)
if (m_playbackEngine)
m_playbackEngine->setPlaybackRate(effectiveRate);
- emit playbackRateChanged(effectiveRate);
+ playbackRateChanged(effectiveRate);
}
QUrl QFFmpegMediaPlayer::media() const
@@ -241,8 +247,10 @@ void QFFmpegMediaPlayer::setMediaAsync(QFFmpeg::MediaDataHolder::Maybe mediaData
m_playbackEngine->setMedia(std::move(*mediaDataHolder.value()));
+ m_playbackEngine->setAudioBufferOutput(m_audioBufferOutput);
m_playbackEngine->setAudioSink(m_audioOutput);
m_playbackEngine->setVideoSink(m_videoSink);
+
m_playbackEngine->setLoops(loops());
m_playbackEngine->setPlaybackRate(m_playbackRate);
@@ -336,14 +344,17 @@ void QFFmpegMediaPlayer::stop()
void QFFmpegMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
{
- if (m_audioOutput == output)
- return;
-
m_audioOutput = output;
if (m_playbackEngine)
m_playbackEngine->setAudioSink(output);
}
+void QFFmpegMediaPlayer::setAudioBufferOutput(QAudioBufferOutput *output) {
+ m_audioBufferOutput = output;
+ if (m_playbackEngine)
+ m_playbackEngine->setAudioBufferOutput(output);
+}
+
QMediaMetaData QFFmpegMediaPlayer::metaData() const
{
return m_playbackEngine ? m_playbackEngine->metaData() : QMediaMetaData{};
@@ -351,9 +362,6 @@ QMediaMetaData QFFmpegMediaPlayer::metaData() const
void QFFmpegMediaPlayer::setVideoSink(QVideoSink *sink)
{
- if (m_videoSink == sink)
- return;
-
m_videoSink = sink;
if (m_playbackEngine)
m_playbackEngine->setVideoSink(sink);
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h
index 4e700787e..4ab5701da 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h
@@ -61,6 +61,8 @@ public:
void setAudioOutput(QPlatformAudioOutput *) override;
+ void setAudioBufferOutput(QAudioBufferOutput *) override;
+
QMediaMetaData metaData() const override;
void setVideoSink(QVideoSink *sink) override;
@@ -98,6 +100,7 @@ private:
std::unique_ptr<PlaybackEngine> m_playbackEngine;
QPlatformAudioOutput *m_audioOutput = nullptr;
+ QPointer<QAudioBufferOutput> m_audioBufferOutput;
QPointer<QVideoSink> m_videoSink;
QUrl m_url;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp
index 9b1a70742..0315a0061 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp
@@ -32,21 +32,28 @@ bool QFFmpegMediaRecorder::isLocationWritable(const QUrl &) const
void QFFmpegMediaRecorder::handleSessionError(QMediaRecorder::Error code, const QString &description)
{
- error(code, description);
+ updateError(code, description);
stop();
}
+void QFFmpegMediaRecorder::handleEndOfSourceStream()
+{
+ if (mediaRecorder()->autoStop())
+ stop();
+}
+
void QFFmpegMediaRecorder::record(QMediaEncoderSettings &settings)
{
if (!m_session || state() != QMediaRecorder::StoppedState)
return;
auto videoSources = m_session->activeVideoSources();
+ auto audioInputs = m_session->activeAudioInputs();
const auto hasVideo = !videoSources.empty();
- const auto hasAudio = m_session->audioInput() != nullptr;
+ const auto hasAudio = !audioInputs.empty();
if (!hasVideo && !hasAudio) {
- error(QMediaRecorder::ResourceError, QMediaRecorder::tr("No video or audio input"));
+ updateError(QMediaRecorder::ResourceError, QMediaRecorder::tr("No video or audio input"));
return;
}
@@ -72,36 +79,36 @@ void QFFmpegMediaRecorder::record(QMediaEncoderSettings &settings)
<< settings.audioCodec();
if (!formatContext->isAVIOOpen()) {
- error(QMediaRecorder::LocationNotWritable,
- QMediaRecorder::tr("Cannot open the output location for writing"));
+ updateError(QMediaRecorder::LocationNotWritable,
+ QMediaRecorder::tr("Cannot open the output location for writing"));
return;
}
- m_encoder.reset(new RecordingEngine(settings, std::move(formatContext)));
- m_encoder->setMetaData(m_metaData);
- connect(m_encoder.get(), &QFFmpeg::RecordingEngine::durationChanged, this,
+ m_recordingEngine.reset(new RecordingEngine(settings, std::move(formatContext)));
+ m_recordingEngine->setMetaData(m_metaData);
+ connect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::durationChanged, this,
&QFFmpegMediaRecorder::newDuration);
- connect(m_encoder.get(), &QFFmpeg::RecordingEngine::finalizationDone, this,
+ connect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::finalizationDone, this,
&QFFmpegMediaRecorder::finalizationDone);
- connect(m_encoder.get(), &QFFmpeg::RecordingEngine::error, this,
+ connect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::sessionError, this,
&QFFmpegMediaRecorder::handleSessionError);
+ connect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::endOfSourceStreams, this,
+ &QFFmpegMediaRecorder::handleEndOfSourceStream);
- auto *audioInput = m_session->audioInput();
- if (audioInput) {
- if (audioInput->device.isNull())
- qWarning() << "Audio input device is null; cannot encode audio";
- else
- m_encoder->addAudioInput(static_cast<QFFmpegAudioInput *>(audioInput));
- }
+ auto handleStreamInitializationError = [this](QMediaRecorder::Error code,
+ const QString &description) {
+ qCWarning(qLcMediaEncoder) << "Stream initialization error:" << description;
+ updateError(code, description);
+ };
- for (auto source : videoSources)
- m_encoder->addVideoSource(source);
+ connect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::streamInitializationError, this,
+ handleStreamInitializationError);
durationChanged(0);
stateChanged(QMediaRecorder::RecordingState);
actualLocationChanged(QUrl::fromLocalFile(actualLocation));
- m_encoder->start();
+ m_recordingEngine->initialize(audioInputs, videoSources);
}
void QFFmpegMediaRecorder::pause()
@@ -109,8 +116,8 @@ void QFFmpegMediaRecorder::pause()
if (!m_session || state() != QMediaRecorder::RecordingState)
return;
- Q_ASSERT(m_encoder);
- m_encoder->setPaused(true);
+ Q_ASSERT(m_recordingEngine);
+ m_recordingEngine->setPaused(true);
stateChanged(QMediaRecorder::PausedState);
}
@@ -120,8 +127,8 @@ void QFFmpegMediaRecorder::resume()
if (!m_session || state() != QMediaRecorder::PausedState)
return;
- Q_ASSERT(m_encoder);
- m_encoder->setPaused(false);
+ Q_ASSERT(m_recordingEngine);
+ m_recordingEngine->setPaused(false);
stateChanged(QMediaRecorder::RecordingState);
}
@@ -135,7 +142,7 @@ void QFFmpegMediaRecorder::stop()
static_cast<QFFmpegAudioInput *>(input)->setRunning(false);
qCDebug(qLcMediaEncoder) << "stop";
- m_encoder.reset();
+ m_recordingEngine.reset();
}
void QFFmpegMediaRecorder::finalizationDone()
@@ -169,11 +176,12 @@ void QFFmpegMediaRecorder::setCaptureSession(QFFmpegMediaCaptureSession *session
return;
}
-void QFFmpegMediaRecorder::EncoderDeleter::operator()(RecordingEngine *encoder) const
+void QFFmpegMediaRecorder::RecordingEngineDeleter::operator()(
+ RecordingEngine *recordingEngine) const
{
// ### all of the below should be done asynchronous. finalize() should do it's work in a thread
// to avoid blocking the UI in case of slow codecs
- encoder->finalize();
+ recordingEngine->finalize();
}
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h
index 3a3bbcf5c..1bca093c2 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h
@@ -52,10 +52,11 @@ private Q_SLOTS:
void newDuration(qint64 d) { durationChanged(d); }
void finalizationDone();
void handleSessionError(QMediaRecorder::Error code, const QString &description);
+ void handleEndOfSourceStream();
private:
using RecordingEngine = QFFmpeg::RecordingEngine;
- struct EncoderDeleter
+ struct RecordingEngineDeleter
{
void operator()(RecordingEngine *) const;
};
@@ -63,7 +64,7 @@ private:
QFFmpegMediaCaptureSession *m_session = nullptr;
QMediaMetaData m_metaData;
- std::unique_ptr<RecordingEngine, EncoderDeleter> m_encoder;
+ std::unique_ptr<RecordingEngine, RecordingEngineDeleter> m_recordingEngine;
};
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegopensslsymbols.cpp b/src/plugins/multimedia/ffmpeg/qffmpegopensslsymbols.cpp
deleted file mode 100644
index e0e5de137..000000000
--- a/src/plugins/multimedia/ffmpeg/qffmpegopensslsymbols.cpp
+++ /dev/null
@@ -1,185 +0,0 @@
-// Copyright (C) 2023 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-#include <QtCore/qlibrary.h>
-
-#include "qffmpegsymbolsresolveutils_p.h"
-
-#include <QtCore/qglobal.h>
-#include <qstringliteral.h>
-
-#include <openssl/bio.h>
-#include <openssl/ssl.h>
-#include <openssl/bn.h>
-#include <openssl/err.h>
-#include <openssl/rand.h>
-
-QT_BEGIN_NAMESPACE
-
-using namespace Qt::StringLiterals;
-
-static Libs loadLibs()
-{
- Libs libs(2);
- libs[0] = std::make_unique<QLibrary>();
- libs[1] = std::make_unique<QLibrary>();
-
- const auto majorVersion = OPENSSL_VERSION_NUMBER >> 28;
-
- auto tryLoad = [&](QString sslName, QString cryptoName, auto version) {
- libs[0]->setFileNameAndVersion(sslName, version);
- libs[1]->setFileNameAndVersion(cryptoName, version);
- return LibSymbolsResolver::tryLoad(libs);
- };
-
-// Due to binary compatibility issues between 1.x.x openssl version, let's try taking exact version
-#if defined(SHLIB_VERSION_NUMBER)
- if (majorVersion <= 1 && tryLoad("ssl"_L1, "crypto"_L1, SHLIB_VERSION_NUMBER ""_L1))
- return libs;
-#endif
-
-// openssl on Android has specific suffixes
-#if defined(Q_OS_ANDROID)
- {
- auto suffix = qEnvironmentVariable("ANDROID_OPENSSL_SUFFIX");
- if (suffix.isEmpty())
- suffix = QString("_"_L1) + QString::number(majorVersion);
-
- if (tryLoad("ssl"_L1 + suffix, "crypto"_L1 + suffix, -1))
- return libs;
- }
-#endif
-
- if (tryLoad("ssl"_L1, "crypto"_L1, majorVersion))
- return libs;
-
- return {};
-};
-
-Q_GLOBAL_STATIC(LibSymbolsResolver, resolver, "OpenSsl", 75, loadLibs);
-
-void resolveOpenSsl()
-{
- resolver()->resolve();
-}
-
-QT_END_NAMESPACE
-
-QT_USE_NAMESPACE
-
-// BN functions
-
-DEFINE_FUNC(BN_value_one, 0);
-DEFINE_FUNC(BN_mod_word, 2);
-
-DEFINE_FUNC(BN_div_word, 2)
-DEFINE_FUNC(BN_mul_word, 2)
-DEFINE_FUNC(BN_add_word, 2)
-DEFINE_FUNC(BN_sub_word, 2)
-DEFINE_FUNC(BN_set_word, 2)
-DEFINE_FUNC(BN_new, 0)
-DEFINE_FUNC(BN_cmp, 2)
-
-DEFINE_FUNC(BN_free, 1);
-
-DEFINE_FUNC(BN_copy, 2);
-
-DEFINE_FUNC(BN_CTX_new, 0);
-
-DEFINE_FUNC(BN_CTX_free, 1);
-DEFINE_FUNC(BN_CTX_start, 1);
-
-DEFINE_FUNC(BN_CTX_get, 1);
-DEFINE_FUNC(BN_CTX_end, 1);
-
-DEFINE_FUNC(BN_rand, 4);
-DEFINE_FUNC(BN_mod_exp, 5);
-
-DEFINE_FUNC(BN_num_bits, 1);
-DEFINE_FUNC(BN_num_bits_word, 1);
-
-DEFINE_FUNC(BN_bn2hex, 1);
-DEFINE_FUNC(BN_bn2dec, 1);
-
-DEFINE_FUNC(BN_hex2bn, 2);
-DEFINE_FUNC(BN_dec2bn, 2);
-DEFINE_FUNC(BN_asc2bn, 2);
-
-DEFINE_FUNC(BN_bn2bin, 2);
-DEFINE_FUNC(BN_bin2bn, 3);
-
-// BIO-related functions
-
-DEFINE_FUNC(BIO_new, 1);
-DEFINE_FUNC(BIO_free, 1);
-
-DEFINE_FUNC(BIO_read, 3, -1);
-DEFINE_FUNC(BIO_write, 3, -1);
-DEFINE_FUNC(BIO_s_mem, 0);
-
-DEFINE_FUNC(BIO_set_data, 2);
-
-DEFINE_FUNC(BIO_get_data, 1);
-DEFINE_FUNC(BIO_set_init, 2);
-
-DEFINE_FUNC(BIO_set_flags, 2);
-DEFINE_FUNC(BIO_test_flags, 2);
-DEFINE_FUNC(BIO_clear_flags, 2);
-
-DEFINE_FUNC(BIO_meth_new, 2);
-DEFINE_FUNC(BIO_meth_free, 1);
-
-DEFINE_FUNC(BIO_meth_set_write, 2);
-DEFINE_FUNC(BIO_meth_set_read, 2);
-DEFINE_FUNC(BIO_meth_set_puts, 2);
-DEFINE_FUNC(BIO_meth_set_gets, 2);
-DEFINE_FUNC(BIO_meth_set_ctrl, 2);
-DEFINE_FUNC(BIO_meth_set_create, 2);
-DEFINE_FUNC(BIO_meth_set_destroy, 2);
-DEFINE_FUNC(BIO_meth_set_callback_ctrl, 2);
-
-// SSL functions
-
-DEFINE_FUNC(SSL_CTX_new, 1);
-DEFINE_FUNC(SSL_CTX_up_ref, 1);
-DEFINE_FUNC(SSL_CTX_free, 1);
-
-DEFINE_FUNC(SSL_new, 1);
-DEFINE_FUNC(SSL_up_ref, 1);
-DEFINE_FUNC(SSL_free, 1);
-
-DEFINE_FUNC(SSL_accept, 1);
-DEFINE_FUNC(SSL_stateless, 1);
-DEFINE_FUNC(SSL_connect, 1);
-DEFINE_FUNC(SSL_read, 3, -1);
-DEFINE_FUNC(SSL_peek, 3);
-DEFINE_FUNC(SSL_write, 3, -1);
-DEFINE_FUNC(SSL_ctrl, 4);
-DEFINE_FUNC(SSL_shutdown, 1);
-DEFINE_FUNC(SSL_set_bio, 3);
-
-// options are unsigned long in openssl 1.1.1, and uint64 in 3.x.x
-DEFINE_FUNC(SSL_CTX_set_options, 2);
-
-DEFINE_FUNC(SSL_get_error, 2);
-DEFINE_FUNC(SSL_CTX_load_verify_locations, 3, -1);
-
-DEFINE_FUNC(SSL_CTX_set_verify, 3);
-DEFINE_FUNC(SSL_CTX_use_PrivateKey, 2);
-
-DEFINE_FUNC(SSL_CTX_use_PrivateKey_file, 3);
-DEFINE_FUNC(SSL_CTX_use_certificate_chain_file, 2);
-
-DEFINE_FUNC(ERR_get_error, 0);
-
-static char ErrorString[] = "Ssl not found";
-DEFINE_FUNC(ERR_error_string, 2, ErrorString);
-
-// TLS functions
-
-DEFINE_FUNC(TLS_client_method, 0);
-DEFINE_FUNC(TLS_server_method, 0);
-
-// RAND functions
-
-DEFINE_FUNC(RAND_bytes, 2);
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine.cpp b/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine.cpp
index 811feb0d5..11dccb149 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine.cpp
@@ -6,6 +6,7 @@
#include "qaudiooutput.h"
#include "private/qplatformaudiooutput_p.h"
#include "private/qplatformvideosink_p.h"
+#include "private/qaudiobufferoutput_p.h"
#include "qiodevice.h"
#include "playbackengine/qffmpegdemuxer_p.h"
#include "playbackengine/qffmpegstreamdecoder_p.h"
@@ -200,8 +201,8 @@ PlaybackEngine::createRenderer(QPlatformMediaPlayer::TrackType trackType)
? createPlaybackEngineObject<VideoRenderer>(m_timeController, m_videoSink, m_media.rotation())
: RendererPtr{ {}, {} };
case QPlatformMediaPlayer::AudioStream:
- return m_audioOutput
- ? createPlaybackEngineObject<AudioRenderer>(m_timeController, m_audioOutput)
+ return m_audioOutput || m_audioBufferOutput
+ ? createPlaybackEngineObject<AudioRenderer>(m_timeController, m_audioOutput, m_audioBufferOutput)
: RendererPtr{ {}, {} };
case QPlatformMediaPlayer::SubtitleStream:
return m_videoSink
@@ -486,7 +487,7 @@ void PlaybackEngine::setAudioSink(QPlatformAudioOutput *output) {
void PlaybackEngine::setAudioSink(QAudioOutput *output)
{
- auto prev = std::exchange(m_audioOutput, output);
+ QAudioOutput *prev = std::exchange(m_audioOutput, output);
if (prev == output)
return;
@@ -498,6 +499,14 @@ void PlaybackEngine::setAudioSink(QAudioOutput *output)
}
}
+void PlaybackEngine::setAudioBufferOutput(QAudioBufferOutput *output)
+{
+ QAudioBufferOutput *prev = std::exchange(m_audioBufferOutput, output);
+ if (prev == output)
+ return;
+ updateActiveAudioOutput(output);
+}
+
qint64 PlaybackEngine::currentPosition(bool topPos) const {
std::optional<qint64> pos;
@@ -572,7 +581,10 @@ void PlaybackEngine::finilizeTime(qint64 pos)
void PlaybackEngine::finalizeOutputs()
{
- updateActiveAudioOutput(nullptr);
+ if (m_audioBufferOutput)
+ updateActiveAudioOutput(static_cast<QAudioBufferOutput *>(nullptr));
+ if (m_audioOutput)
+ updateActiveAudioOutput(static_cast<QAudioOutput *>(nullptr));
updateActiveVideoOutput(nullptr, true);
}
@@ -582,7 +594,8 @@ bool PlaybackEngine::hasRenderer(quint64 id) const
[id](auto &renderer) { return renderer && renderer->id() == id; });
}
-void PlaybackEngine::updateActiveAudioOutput(QAudioOutput *output)
+template <typename AudioOutput>
+void PlaybackEngine::updateActiveAudioOutput(AudioOutput *output)
{
if (auto renderer =
qobject_cast<AudioRenderer *>(m_renderers[QPlatformMediaPlayer::AudioStream].get()))
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine_p.h b/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine_p.h
index f36828771..50c94c955 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine_p.h
@@ -60,6 +60,7 @@ QT_BEGIN_NAMESPACE
class QAudioSink;
class QVideoSink;
class QAudioOutput;
+class QAudioBufferOutput;
class QFFmpegMediaPlayer;
namespace QFFmpeg
@@ -81,6 +82,8 @@ public:
void setAudioSink(QPlatformAudioOutput *output);
+ void setAudioBufferOutput(QAudioBufferOutput *output);
+
void setState(QMediaPlayer::PlaybackState state);
void play() {
@@ -141,7 +144,8 @@ protected: // objects managing
virtual RendererPtr createRenderer(QPlatformMediaPlayer::TrackType trackType);
- void updateActiveAudioOutput(QAudioOutput *output);
+ template <typename AudioOutput>
+ void updateActiveAudioOutput(AudioOutput *output);
void updateActiveVideoOutput(QVideoSink *sink, bool cleanOutput = false);
@@ -203,6 +207,7 @@ private:
QPointer<QVideoSink> m_videoSink;
QPointer<QAudioOutput> m_audioOutput;
+ QPointer<QAudioBufferOutput> m_audioBufferOutput;
QMediaPlayer::PlaybackState m_state = QMediaPlayer::StoppedState;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp b/src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp
index e5e9ca3bb..141a6ade2 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp
@@ -21,8 +21,9 @@ QFFmpegResampler::QFFmpegResampler(const QAudioFormat &inputFormat, const QAudio
createResampleContext(AVAudioFormat(m_inputFormat), AVAudioFormat(m_outputFormat));
}
-QFFmpegResampler::QFFmpegResampler(const Codec* codec, const QAudioFormat &outputFormat)
- : m_outputFormat(outputFormat)
+QFFmpegResampler::QFFmpegResampler(const Codec *codec, const QAudioFormat &outputFormat,
+ qint64 startTime)
+ : m_outputFormat(outputFormat), m_startTime(startTime)
{
Q_ASSERT(codec);
@@ -64,7 +65,7 @@ QAudioBuffer QFFmpegResampler::resample(const uint8_t **inputData, int inputSamp
samples.resize(m_outputFormat.bytesForFrames(outSamples));
- qint64 startTime = m_outputFormat.durationForFrames(m_samplesProcessed);
+ const qint64 startTime = m_outputFormat.durationForFrames(m_samplesProcessed) + m_startTime;
m_samplesProcessed += outSamples;
qCDebug(qLcResampler) << " new frame" << startTime << "in_samples" << inputSamplesCount
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h b/src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h
index 5109ecf11..530f40aa2 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h
@@ -29,7 +29,8 @@ class QFFmpegResampler : public QPlatformAudioResampler
{
public:
QFFmpegResampler(const QAudioFormat &inputFormat, const QAudioFormat &outputFormat);
- QFFmpegResampler(const QFFmpeg::Codec* codec, const QAudioFormat &outputFormat);
+ QFFmpegResampler(const QFFmpeg::Codec *codec, const QAudioFormat &outputFormat,
+ qint64 startTime = 0);
~QFFmpegResampler() override;
@@ -49,6 +50,7 @@ private:
private:
QAudioFormat m_inputFormat;
QAudioFormat m_outputFormat;
+ qint64 m_startTime = 0;
QFFmpeg::SwrContextUPtr m_resampler;
qint64 m_samplesProcessed = 0;
qint64 m_endCompensationSample = std::numeric_limits<qint64>::min();
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi.cpp b/src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi.cpp
index 69d2d0643..feab39697 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi.cpp
@@ -3,9 +3,10 @@
#include "qffmpegscreencapture_dxgi_p.h"
#include "qffmpegsurfacecapturegrabber_p.h"
-#include <private/qabstractvideobuffer_p.h>
+#include "qabstractvideobuffer.h"
#include <private/qmultimediautils_p.h>
#include <private/qwindowsmultimediautils_p.h>
+#include <private/qvideoframe_p.h>
#include <qtgui/qscreen_platform.h>
#include "qvideoframe.h"
@@ -71,11 +72,7 @@ class QD3D11TextureVideoBuffer : public QAbstractVideoBuffer
public:
QD3D11TextureVideoBuffer(const ComPtr<ID3D11Device> &device, std::shared_ptr<QMutex> &mutex,
const ComPtr<ID3D11Texture2D> &texture, QSize size)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle)
- , m_device(device)
- , m_texture(texture)
- , m_ctxMutex(mutex)
- , m_size(size)
+ : m_device(device), m_texture(texture), m_ctxMutex(mutex), m_size(size)
{}
~QD3D11TextureVideoBuffer()
@@ -83,15 +80,10 @@ public:
QD3D11TextureVideoBuffer::unmap();
}
- QVideoFrame::MapMode mapMode() const override
- {
- return m_mapMode;
- }
-
- MapData map(QVideoFrame::MapMode mode) override
+ MapData map(QtVideo::MapMode mode) override
{
MapData mapData;
- if (!m_ctx && mode == QVideoFrame::ReadOnly) {
+ if (!m_ctx && mode == QtVideo::MapMode::ReadOnly) {
D3D11_TEXTURE2D_DESC texDesc = {};
m_texture->GetDesc(&texDesc);
texDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
@@ -121,10 +113,10 @@ public:
}
m_mapMode = mode;
- mapData.nPlanes = 1;
+ mapData.planeCount = 1;
mapData.bytesPerLine[0] = int(resource.RowPitch);
mapData.data[0] = reinterpret_cast<uchar*>(resource.pData);
- mapData.size[0] = m_size.height() * int(resource.RowPitch);
+ mapData.dataSize[0] = m_size.height() * int(resource.RowPitch);
}
return mapData;
@@ -132,7 +124,7 @@ public:
void unmap() override
{
- if (m_mapMode == QVideoFrame::NotMapped)
+ if (m_mapMode == QtVideo::MapMode::NotMapped)
return;
if (m_ctx) {
m_ctxMutex->lock();
@@ -141,9 +133,11 @@ public:
m_ctx.Reset();
}
m_cpuTexture.Reset();
- m_mapMode = QVideoFrame::NotMapped;
+ m_mapMode = QtVideo::MapMode::NotMapped;
}
+ QVideoFrameFormat format() const override { return {}; }
+
QSize getSize() const
{
if (!m_texture)
@@ -162,7 +156,7 @@ private:
ComPtr<ID3D11DeviceContext> m_ctx;
std::shared_ptr<QMutex> m_ctxMutex;
QSize m_size;
- QVideoFrame::MapMode m_mapMode = QVideoFrame::NotMapped;
+ QtVideo::MapMode m_mapMode = QtVideo::MapMode::NotMapped;
};
namespace {
@@ -344,7 +338,7 @@ QVideoFrameFormat getFrameFormat(QScreen* screen)
const QSize screenSize = getPhysicalSizePixels(screen);
QVideoFrameFormat format = { screenSize, QVideoFrameFormat::Format_BGRA8888 };
- format.setFrameRate(static_cast<int>(screen->refreshRate()));
+ format.setStreamFrameRate(static_cast<int>(screen->refreshRate()));
return format;
}
@@ -408,7 +402,7 @@ public:
if (bufSize != m_format.frameSize())
m_format.setFrameSize(bufSize);
- frame = { buffer.release(), format() };
+ frame = QVideoFramePrivate::createFrame(std::move(buffer), format());
}
return frame;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolveutils.cpp b/src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolveutils.cpp
deleted file mode 100644
index c4a4d9666..000000000
--- a/src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolveutils.cpp
+++ /dev/null
@@ -1,103 +0,0 @@
-// Copyright (C) 2023 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-#include "qffmpegsymbolsresolveutils_p.h"
-
-#include <qdebug.h>
-#include <algorithm>
-#include <qloggingcategory.h>
-
-QT_BEGIN_NAMESPACE
-
-static Q_LOGGING_CATEGORY(qLcLibSymbolsRelolver, "qt.multimedia.ffmpeg.libsymbolsresolver");
-
-LibSymbolsResolver::LibSymbolsResolver(const char *libName, size_t symbolsCount,
- LibsLoader libsLoader)
- : m_libName(libName), m_libsLoader(libsLoader)
-{
- Q_ASSERT(m_libName);
- Q_ASSERT(m_libsLoader);
- m_symbols.reserve(symbolsCount);
-}
-
-bool LibSymbolsResolver::resolve()
-{
- if (m_state.testAndSetRelaxed(Initial, Requested)
- || !m_state.testAndSetAcquire(Ready, Finished))
- return false;
-
- qCDebug(qLcLibSymbolsRelolver)
- << "Start" << m_libName << "symbols resolving:" << m_symbols.size() << "symbols";
-
- Q_ASSERT(m_symbols.size() == m_symbols.capacity());
-
- auto cleanup = qScopeGuard([this]() { m_symbols = {}; });
-
- auto libs = m_libsLoader();
- if (libs.empty()) {
- qCWarning(qLcLibSymbolsRelolver) << "Couldn't load" << m_libName << "library";
- return false;
- }
-
- std::vector<QFunctionPointer> functions(m_symbols.size());
-
- auto resolveElement = [&libs](const SymbolElement &element) {
- return resolve(libs, element.name);
- };
-
- std::transform(m_symbols.begin(), m_symbols.end(), functions.begin(), resolveElement);
-
- if (std::find(functions.begin(), functions.end(), nullptr) != functions.end()) {
- unload(libs);
- qCWarning(qLcLibSymbolsRelolver) << "Couldn't resolve" << m_libName << "symbols";
- return false;
- }
-
- for (size_t i = 0; i < functions.size(); ++i)
- m_symbols[i].setter(functions[i]);
-
- qCDebug(qLcLibSymbolsRelolver) << m_libName << "symbols resolved";
- return true;
-}
-
-void LibSymbolsResolver::registerSymbol(const char *name, FunctionSetter setter)
-{
- Q_ASSERT(setter);
- Q_ASSERT(m_symbols.size() < m_symbols.capacity());
-
- m_symbols.push_back({ name, setter });
-
- // handle the corner case: a user has initialized QtMM with global vars construction
- // and it happened before the symbols initializing
- if (m_symbols.size() == m_symbols.capacity() && !m_state.testAndSetRelease(Initial, Ready)
- && m_state.testAndSetRelease(Requested, Ready))
- resolve();
-}
-
-void LibSymbolsResolver::unload(const Libs &libs)
-{
- for (auto &lib : libs)
- lib->unload();
-}
-
-bool LibSymbolsResolver::tryLoad(const Libs &libs)
-{
- auto load = [](auto &lib) { return lib->load(); };
- if (std::all_of(libs.begin(), libs.end(), load))
- return true;
-
- unload(libs);
- return false;
-}
-
-QFunctionPointer LibSymbolsResolver::resolve(const Libs &libs, const char *symbolName)
-{
- for (auto &lib : libs)
- if (auto pointer = lib->resolve(symbolName))
- return pointer;
-
- qWarning() << "Cannot resolve symbol" << symbolName;
- return nullptr;
-}
-
-QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolveutils_p.h b/src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolveutils_p.h
deleted file mode 100644
index f7a2169d3..000000000
--- a/src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolveutils_p.h
+++ /dev/null
@@ -1,142 +0,0 @@
-// Copyright (C) 2023 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-#ifndef QFFMPEGSYMBOLSRESOLVEUTILS_P_H
-#define QFFMPEGSYMBOLSRESOLVEUTILS_P_H
-
-//
-// W A R N I N G
-// -------------
-//
-// This file is not part of the Qt API. It exists purely as an
-// implementation detail. This header file may change from version to
-// version without notice, or even be removed.
-//
-// We mean it.
-//
-
-#include <QtCore/qlibrary.h>
-
-#include <qatomic.h>
-
-#include <vector>
-#include <memory>
-#include <tuple>
-
-QT_BEGIN_NAMESPACE
-
-using Libs = std::vector<std::unique_ptr<QLibrary>>;
-
-class LibSymbolsResolver
-{
-public:
- using FunctionSetter = void (*)(QFunctionPointer);
- using LibsLoader = Libs (*)();
-
- LibSymbolsResolver(const char *libName, size_t symbolsCount, LibsLoader libsLoader);
-
- bool resolve();
-
- void registerSymbol(const char *name, FunctionSetter setter);
-
- static void unload(const Libs &libs);
-
- static bool tryLoad(const Libs &libs);
-
-private:
- static QFunctionPointer resolve(const Libs &libs, const char *symbolName);
-
-private:
- const char *const m_libName;
- LibsLoader m_libsLoader;
-
- struct SymbolElement
- {
- const char *name;
- FunctionSetter setter;
- };
-
- std::vector<SymbolElement> m_symbols;
-
- enum State { Initial, Requested, Ready, Finished };
-
- QAtomicInteger<int> m_state = Initial;
-};
-
-QT_END_NAMESPACE
-
-template <typename T>
-struct DefaultReturn
-{
- template <typename... Arg>
- T operator()(Arg &&...) { return val; }
- T val;
-};
-
-template <>
-struct DefaultReturn<void>
-{
- DefaultReturn(int = 0){};
- template <typename... Arg>
- void operator()(Arg &&...) { }
-};
-
-template <typename...>
-struct FuncInfo;
-
-template <typename R, typename... A>
-struct FuncInfo<R(A...)>
-{
- using Return = R;
- using Args = std::tuple<A...>;
-};
-
-// clang-format off
-
-#define DEFINE_FUNC_IMPL(F, Vars, TypesWithVars, ReturnFunc) \
- using F##_ReturnType = FuncInfo<decltype(F)>::Return; \
- using q_##F##_Type = F##_ReturnType (*)(TypesWithVars(F)); \
- static q_##F##_Type q_##F = []() { \
- auto setter = [](QFunctionPointer ptr) { q_##F = (q_##F##_Type)ptr; }; \
- resolver()->registerSymbol(#F, setter); \
- return [](TypesWithVars(F)) { return ReturnFunc(Vars()); }; \
- }(); \
- extern "C" [[maybe_unused]] F##_ReturnType F(TypesWithVars(F)) { return q_##F(Vars()); }
-
-#define VAR(I) a##I
-#define VARS0()
-#define VARS1() VAR(0)
-#define VARS2() VARS1(), VAR(1)
-#define VARS3() VARS2(), VAR(2)
-#define VARS4() VARS3(), VAR(3)
-#define VARS5() VARS4(), VAR(4)
-#define VARS6() VARS5(), VAR(5)
-#define VARS7() VARS6(), VAR(6)
-#define VARS8() VARS7(), VAR(7)
-#define VARS9() VARS8(), VAR(8)
-#define VARS10() VARS9(), VAR(9)
-#define VARS11() VARS10(), VAR(10)
-
-#define TYPE_WITH_VAR(F, I) std::tuple_element_t<I, FuncInfo<decltype(F)>::Args> VAR(I)
-#define TYPES_WITH_VARS0(F)
-#define TYPES_WITH_VARS1(F) TYPE_WITH_VAR(F, 0)
-#define TYPES_WITH_VARS2(F) TYPES_WITH_VARS1(F), TYPE_WITH_VAR(F, 1)
-#define TYPES_WITH_VARS3(F) TYPES_WITH_VARS2(F), TYPE_WITH_VAR(F, 2)
-#define TYPES_WITH_VARS4(F) TYPES_WITH_VARS3(F), TYPE_WITH_VAR(F, 3)
-#define TYPES_WITH_VARS5(F) TYPES_WITH_VARS4(F), TYPE_WITH_VAR(F, 4)
-#define TYPES_WITH_VARS6(F) TYPES_WITH_VARS5(F), TYPE_WITH_VAR(F, 5)
-#define TYPES_WITH_VARS7(F) TYPES_WITH_VARS6(F), TYPE_WITH_VAR(F, 6)
-#define TYPES_WITH_VARS8(F) TYPES_WITH_VARS7(F), TYPE_WITH_VAR(F, 7)
-#define TYPES_WITH_VARS9(F) TYPES_WITH_VARS8(F), TYPE_WITH_VAR(F, 8)
-#define TYPES_WITH_VARS10(F) TYPES_WITH_VARS9(F), TYPE_WITH_VAR(F, 9)
-#define TYPES_WITH_VARS11(F) TYPES_WITH_VARS10(F), TYPE_WITH_VAR(F, 10)
-
-
-#define RET(F, ...) DefaultReturn<FuncInfo<decltype(F)>::Return>{__VA_ARGS__}
-
-#define DEFINE_FUNC(F, ArgsCount, /*Return value*/...) \
- DEFINE_FUNC_IMPL(F, VARS##ArgsCount, TYPES_WITH_VARS##ArgsCount, RET(F, __VA_ARGS__));
-
-// clang-format on
-
-#endif // QFFMPEGSYMBOLSRESOLVEUTILS_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegthread.cpp b/src/plugins/multimedia/ffmpeg/qffmpegthread.cpp
index c098bb68c..fb14ced54 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegthread.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegthread.cpp
@@ -11,8 +11,8 @@ using namespace QFFmpeg;
void ConsumerThread::stopAndDelete()
{
{
- QMutexLocker locker(&exitMutex);
- exit = true;
+ QMutexLocker locker(&m_loopDataMutex);
+ m_exit = true;
}
dataReady();
wait();
@@ -21,7 +21,7 @@ void ConsumerThread::stopAndDelete()
void ConsumerThread::dataReady()
{
- condition.wakeAll();
+ m_condition.wakeAll();
}
void ConsumerThread::run()
@@ -31,11 +31,11 @@ void ConsumerThread::run()
while (true) {
{
- QMutexLocker locker(&exitMutex);
- while (!hasData() && !exit)
- condition.wait(&exitMutex);
+ QMutexLocker locker(&m_loopDataMutex);
+ while (!hasData() && !m_exit)
+ m_condition.wait(&m_loopDataMutex);
- if (exit)
+ if (m_exit)
break;
}
@@ -45,4 +45,9 @@ void ConsumerThread::run()
cleanup();
}
+QMutexLocker<QMutex> ConsumerThread::lockLoopData() const
+{
+ return QMutexLocker(&m_loopDataMutex);
+}
+
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegthread_p.h b/src/plugins/multimedia/ffmpeg/qffmpegthread_p.h
index a9f80c36f..a7c5b0927 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegthread_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegthread_p.h
@@ -65,7 +65,8 @@ protected:
/*!
Wake thread from sleep and process data until
- hasData() returns false.
+ hasData() returns false. The method is supposed to be invoked
+ right after the scope of QMutexLocker that lockLoopData returns.
*/
void dataReady();
@@ -74,12 +75,18 @@ protected:
*/
virtual bool hasData() const = 0;
+ /*!
+ Locks the loop data mutex. It must be used to protect loop data
+ like a queue of video frames.
+ */
+ QMutexLocker<QMutex> lockLoopData() const;
+
private:
void run() final;
- QMutex exitMutex; // Protects exit flag.
- QWaitCondition condition;
- bool exit = false;
+ mutable QMutex m_loopDataMutex;
+ QWaitCondition m_condition;
+ bool m_exit = false;
};
}
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp b/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp
index 5b79af5b3..0edf355d8 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp
@@ -27,7 +27,7 @@ static bool isFrameFlipped(const AVFrame& frame) {
static Q_LOGGING_CATEGORY(qLcFFmpegVideoBuffer, "qt.multimedia.ffmpeg.videobuffer");
QFFmpegVideoBuffer::QFFmpegVideoBuffer(AVFrameUPtr frame, AVRational pixelAspectRatio)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle),
+ : QHwVideoBuffer(QVideoFrame::NoHandle),
m_frame(frame.get()),
m_size(qCalculateFrameSize({ frame->width, frame->height },
{ pixelAspectRatio.num, pixelAspectRatio.den }))
@@ -111,33 +111,7 @@ QVideoFrameFormat::ColorSpace QFFmpegVideoBuffer::colorSpace() const
QVideoFrameFormat::ColorTransfer QFFmpegVideoBuffer::colorTransfer() const
{
- switch (m_frame->color_trc) {
- case AVCOL_TRC_BT709:
- // The following three cases have transfer characteristics identical to BT709
- case AVCOL_TRC_BT1361_ECG:
- case AVCOL_TRC_BT2020_10:
- case AVCOL_TRC_BT2020_12:
- case AVCOL_TRC_SMPTE240M: // almost identical to bt709
- return QVideoFrameFormat::ColorTransfer_BT709;
- case AVCOL_TRC_GAMMA22:
- case AVCOL_TRC_SMPTE428 : // No idea, let's hope for the best...
- case AVCOL_TRC_IEC61966_2_1: // sRGB, close enough to 2.2...
- case AVCOL_TRC_IEC61966_2_4: // not quite, but probably close enough
- return QVideoFrameFormat::ColorTransfer_Gamma22;
- case AVCOL_TRC_GAMMA28:
- return QVideoFrameFormat::ColorTransfer_Gamma28;
- case AVCOL_TRC_SMPTE170M:
- return QVideoFrameFormat::ColorTransfer_BT601;
- case AVCOL_TRC_LINEAR:
- return QVideoFrameFormat::ColorTransfer_Linear;
- case AVCOL_TRC_SMPTE2084:
- return QVideoFrameFormat::ColorTransfer_ST2084;
- case AVCOL_TRC_ARIB_STD_B67:
- return QVideoFrameFormat::ColorTransfer_STD_B67;
- default:
- break;
- }
- return QVideoFrameFormat::ColorTransfer_Unknown;
+ return QFFmpeg::fromAvColorTransfer(m_frame->color_trc);
}
QVideoFrameFormat::ColorRange QFFmpegVideoBuffer::colorRange() const
@@ -168,12 +142,7 @@ float QFFmpegVideoBuffer::maxNits()
return maxNits;
}
-QVideoFrame::MapMode QFFmpegVideoBuffer::mapMode() const
-{
- return m_mode;
-}
-
-QAbstractVideoBuffer::MapData QFFmpegVideoBuffer::map(QVideoFrame::MapMode mode)
+QAbstractVideoBuffer::MapData QFFmpegVideoBuffer::map(QtVideo::MapMode mode)
{
if (!m_swFrame) {
Q_ASSERT(m_hwFrame && m_hwFrame->hw_frames_ctx);
@@ -191,16 +160,16 @@ QAbstractVideoBuffer::MapData QFFmpegVideoBuffer::map(QVideoFrame::MapMode mode)
MapData mapData;
auto *desc = QVideoTextureHelper::textureDescription(pixelFormat());
- mapData.nPlanes = desc->nplanes;
- for (int i = 0; i < mapData.nPlanes; ++i) {
+ mapData.planeCount = desc->nplanes;
+ for (int i = 0; i < mapData.planeCount; ++i) {
Q_ASSERT(m_swFrame->linesize[i] >= 0);
mapData.data[i] = m_swFrame->data[i];
mapData.bytesPerLine[i] = m_swFrame->linesize[i];
- mapData.size[i] = mapData.bytesPerLine[i]*desc->heightForPlane(m_swFrame->height, i);
+ mapData.dataSize[i] = mapData.bytesPerLine[i]*desc->heightForPlane(m_swFrame->height, i);
}
- if ((mode & QVideoFrame::WriteOnly) != 0 && m_hwFrame) {
+ if ((mode & QtVideo::MapMode::WriteOnly) != QtVideo::MapMode::NotMapped && m_hwFrame) {
m_type = QVideoFrame::NoHandle;
m_hwFrame.reset();
if (m_textures) {
@@ -219,7 +188,7 @@ void QFFmpegVideoBuffer::unmap()
{
// nothing to do here for SW buffers.
// Set NotMapped mode to ensure map/unmap/mapMode consisteny.
- m_mode = QVideoFrame::NotMapped;
+ m_mode = QtVideo::MapMode::NotMapped;
}
std::unique_ptr<QVideoFrameTextures> QFFmpegVideoBuffer::mapTextures(QRhi *)
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h b/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h
index 18a580528..c61c3f5ff 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h
@@ -15,9 +15,7 @@
// We mean it.
//
-#include <private/qtmultimediaglobal_p.h>
-#include <private/qabstractvideobuffer_p.h>
-#include <qvideoframe.h>
+#include <private/qhwvideobuffer_p.h>
#include <QtCore/qvariant.h>
#include "qffmpeg_p.h"
@@ -25,7 +23,7 @@
QT_BEGIN_NAMESPACE
-class QFFmpegVideoBuffer : public QAbstractVideoBuffer
+class QFFmpegVideoBuffer : public QHwVideoBuffer
{
public:
using AVFrameUPtr = QFFmpeg::AVFrameUPtr;
@@ -33,8 +31,7 @@ public:
QFFmpegVideoBuffer(AVFrameUPtr frame, AVRational pixelAspectRatio = { 1, 1 });
~QFFmpegVideoBuffer() override;
- QVideoFrame::MapMode mapMode() const override;
- MapData map(QVideoFrame::MapMode mode) override;
+ MapData map(QtVideo::MapMode mode) override;
void unmap() override;
virtual std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *) override;
@@ -65,7 +62,7 @@ private:
AVFrameUPtr m_swFrame;
QSize m_size;
QFFmpeg::TextureConverter m_textureConverter;
- QVideoFrame::MapMode m_mode = QVideoFrame::NotMapped;
+ QtVideo::MapMode m_mode = QtVideo::MapMode::NotMapped;
std::unique_ptr<QFFmpeg::TextureSet> m_textures;
};
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp b/src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp
index ec99d8dd2..2f02f09c1 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp
@@ -2,6 +2,7 @@
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include <qffmpegvideosink_p.h>
#include <qffmpegvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
QT_BEGIN_NAMESPACE
@@ -21,7 +22,7 @@ void QFFmpegVideoSink::setRhi(QRhi *rhi)
void QFFmpegVideoSink::setVideoFrame(const QVideoFrame &frame)
{
- auto *buffer = dynamic_cast<QFFmpegVideoBuffer *>(frame.videoBuffer());
+ auto *buffer = dynamic_cast<QFFmpegVideoBuffer *>(QVideoFramePrivate::hwBuffer(frame));
if (buffer)
buffer->setTextureConverter(textureConverter);
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp.cpp b/src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp.cpp
index c8543b593..b36279cc3 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp.cpp
@@ -3,7 +3,8 @@
#include "qffmpegwindowcapture_uwp_p.h"
#include "qffmpegsurfacecapturegrabber_p.h"
-#include <private/qabstractvideobuffer_p.h>
+#include "qabstractvideobuffer.h"
+#include <private/qvideoframe_p.h>
#include <unknwn.h>
#include <winrt/base.h>
@@ -82,21 +83,16 @@ struct MultithreadedApartment
class QUwpTextureVideoBuffer : public QAbstractVideoBuffer
{
public:
- QUwpTextureVideoBuffer(com_ptr<IDXGISurface> &&surface)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle), m_surface(surface)
- {
- }
+ QUwpTextureVideoBuffer(com_ptr<IDXGISurface> &&surface) : m_surface(surface) { }
~QUwpTextureVideoBuffer() override { QUwpTextureVideoBuffer::unmap(); }
- QVideoFrame::MapMode mapMode() const override { return m_mapMode; }
-
- MapData map(QVideoFrame::MapMode mode) override
+ MapData map(QtVideo::MapMode mode) override
{
- if (m_mapMode != QVideoFrame::NotMapped)
+ if (m_mapMode != QtVideo::MapMode::NotMapped)
return {};
- if (mode == QVideoFrame::ReadOnly) {
+ if (mode == QtVideo::MapMode::ReadOnly) {
DXGI_MAPPED_RECT rect = {};
HRESULT hr = m_surface->Map(&rect, DXGI_MAP_READ);
if (SUCCEEDED(hr)) {
@@ -104,12 +100,12 @@ public:
hr = m_surface->GetDesc(&desc);
MapData md = {};
- md.nPlanes = 1;
+ md.planeCount = 1;
md.bytesPerLine[0] = rect.Pitch;
md.data[0] = rect.pBits;
- md.size[0] = rect.Pitch * desc.Height;
+ md.dataSize[0] = rect.Pitch * desc.Height;
- m_mapMode = QVideoFrame::ReadOnly;
+ m_mapMode = QtVideo::MapMode::ReadOnly;
return md;
} else {
@@ -123,18 +119,20 @@ public:
void unmap() override
{
- if (m_mapMode == QVideoFrame::NotMapped)
+ if (m_mapMode == QtVideo::MapMode::NotMapped)
return;
const HRESULT hr = m_surface->Unmap();
if (FAILED(hr))
qCDebug(qLcWindowCaptureUwp) << "Failed to unmap surface" << errorString(hr);
- m_mapMode = QVideoFrame::NotMapped;
+ m_mapMode = QtVideo::MapMode::NotMapped;
}
+ QVideoFrameFormat format() const override { return {}; }
+
private:
- QVideoFrame::MapMode m_mapMode = QVideoFrame::NotMapped;
+ QtVideo::MapMode m_mapMode = QtVideo::MapMode::NotMapped;
com_ptr<IDXGISurface> m_surface;
};
@@ -306,7 +304,7 @@ public:
const qreal refreshRate = getMonitorRefreshRateHz(monitor);
- m_format.setFrameRate(refreshRate);
+ m_format.setStreamFrameRate(refreshRate);
setFrameRate(refreshRate);
addFrameCallback(capture, &QFFmpegWindowCaptureUwp::newVideoFrame);
@@ -354,7 +352,8 @@ protected:
m_format.setFrameSize(size);
- return QVideoFrame(new QUwpTextureVideoBuffer(std::move(texture)), m_format);
+ return QVideoFramePrivate::createFrame(
+ std::make_unique<QUwpTextureVideoBuffer>(std::move(texture)), m_format);
} catch (const winrt::hresult_error &err) {
diff --git a/src/plugins/multimedia/ffmpeg/qgdiwindowcapture.cpp b/src/plugins/multimedia/ffmpeg/qgdiwindowcapture.cpp
index 4188ed4c2..97742043c 100644
--- a/src/plugins/multimedia/ffmpeg/qgdiwindowcapture.cpp
+++ b/src/plugins/multimedia/ffmpeg/qgdiwindowcapture.cpp
@@ -7,6 +7,7 @@
#include "qffmpegsurfacecapturegrabber_p.h"
#include "private/qcapturablewindow_p.h"
#include "private/qmemoryvideobuffer_p.h"
+#include "private/qvideoframe_p.h"
#include <qt_windows.h>
#include <QtCore/qloggingcategory.h>
@@ -104,7 +105,7 @@ private:
}
QVideoFrameFormat format(size, QVideoFrameFormat::Format_BGRX8888);
- format.setFrameRate(frameRate());
+ format.setStreamFrameRate(frameRate());
m_format = format;
return true;
}
@@ -155,7 +156,8 @@ private:
return {};
}
- return QVideoFrame(new QMemoryVideoBuffer(array, bytesPerLine), m_format);
+ return QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(std::move(array), bytesPerLine), m_format);
}
private:
diff --git a/src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture.cpp b/src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture.cpp
index df16a8f56..4bd1f6a65 100644
--- a/src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture.cpp
+++ b/src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture.cpp
@@ -8,6 +8,7 @@
#include "private/qimagevideobuffer_p.h"
#include "private/qcapturablewindow_p.h"
+#include "private/qvideoframe_p.h"
#include "qscreen.h"
#include "qmutex.h"
@@ -136,7 +137,7 @@ private:
QVideoFrameFormat format(img.size(),
QVideoFrameFormat::pixelFormatFromImageFormat(img.format()));
- format.setFrameRate(screen->refreshRate());
+ format.setStreamFrameRate(screen->refreshRate());
updateFormat(format);
if (!format.isValid()) {
@@ -145,7 +146,7 @@ private:
return {};
}
- return QVideoFrame(buffer.release(), format);
+ return QVideoFramePrivate::createFrame(std::move(buffer), std::move(format));
}
private:
diff --git a/src/plugins/multimedia/ffmpeg/qopenglvideobuffer.cpp b/src/plugins/multimedia/ffmpeg/qopenglvideobuffer.cpp
index c3e739ffd..4ac08fd24 100644
--- a/src/plugins/multimedia/ffmpeg/qopenglvideobuffer.cpp
+++ b/src/plugins/multimedia/ffmpeg/qopenglvideobuffer.cpp
@@ -55,19 +55,14 @@ static bool setCurrentOpenGLContext()
}
QOpenGLVideoBuffer::QOpenGLVideoBuffer(std::unique_ptr<QOpenGLFramebufferObject> fbo)
- : QAbstractVideoBuffer(QVideoFrame::RhiTextureHandle), m_fbo(std::move(fbo))
+ : QHwVideoBuffer(QVideoFrame::RhiTextureHandle), m_fbo(std::move(fbo))
{
Q_ASSERT(m_fbo);
}
QOpenGLVideoBuffer::~QOpenGLVideoBuffer() { }
-QVideoFrame::MapMode QOpenGLVideoBuffer::mapMode() const
-{
- return m_imageBuffer ? m_imageBuffer->mapMode() : QVideoFrame::NotMapped;
-}
-
-QAbstractVideoBuffer::MapData QOpenGLVideoBuffer::map(QVideoFrame::MapMode mode)
+QAbstractVideoBuffer::MapData QOpenGLVideoBuffer::map(QtVideo::MapMode mode)
{
return ensureImageBuffer().map(mode);
}
diff --git a/src/plugins/multimedia/ffmpeg/qopenglvideobuffer_p.h b/src/plugins/multimedia/ffmpeg/qopenglvideobuffer_p.h
index bbbb2f2c7..6e62625d0 100644
--- a/src/plugins/multimedia/ffmpeg/qopenglvideobuffer_p.h
+++ b/src/plugins/multimedia/ffmpeg/qopenglvideobuffer_p.h
@@ -15,21 +15,20 @@
// We mean it.
//
-#include <private/qabstractvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
QT_BEGIN_NAMESPACE
class QImageVideoBuffer;
class QOpenGLFramebufferObject;
-class QOpenGLVideoBuffer : public QAbstractVideoBuffer
+class QOpenGLVideoBuffer : public QHwVideoBuffer
{
public:
QOpenGLVideoBuffer(std::unique_ptr<QOpenGLFramebufferObject> fbo);
~QOpenGLVideoBuffer();
- QVideoFrame::MapMode mapMode() const override;
- MapData map(QVideoFrame::MapMode mode) override;
+ MapData map(QtVideo::MapMode mode) override;
void unmap() override;
quint64 textureHandle(QRhi *, int plane) const override;
diff --git a/src/plugins/multimedia/ffmpeg/qv4l2camera.cpp b/src/plugins/multimedia/ffmpeg/qv4l2camera.cpp
index 2086af10d..800460f14 100644
--- a/src/plugins/multimedia/ffmpeg/qv4l2camera.cpp
+++ b/src/plugins/multimedia/ffmpeg/qv4l2camera.cpp
@@ -8,6 +8,7 @@
#include <private/qcameradevice_p.h>
#include <private/qmultimediautils_p.h>
#include <private/qmemoryvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
#include <private/qcore_unix_p.h>
#include <qsocketnotifier.h>
@@ -373,8 +374,8 @@ void QV4L2Camera::readFrame()
return;
}
- auto videoBuffer = new QMemoryVideoBuffer(buffer->data, m_bytesPerLine);
- QVideoFrame frame(videoBuffer, frameFormat());
+ auto videoBuffer = std::make_unique<QMemoryVideoBuffer>(buffer->data, m_bytesPerLine);
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(videoBuffer), frameFormat());
auto &v4l2Buffer = buffer->v4l2Buffer;
@@ -394,7 +395,7 @@ void QV4L2Camera::readFrame()
void QV4L2Camera::setCameraBusy()
{
m_cameraBusy = true;
- emit error(QCamera::CameraError, QLatin1String("Camera is in use"));
+ updateError(QCamera::CameraError, QLatin1String("Camera is in use"));
}
void QV4L2Camera::initV4L2Controls()
@@ -412,7 +413,7 @@ void QV4L2Camera::initV4L2Controls()
qCWarning(qLcV4L2Camera) << "Unable to open the camera" << deviceName
<< "for read to query the parameter info:"
<< qt_error_string(errno);
- emit error(QCamera::CameraError, QLatin1String("Cannot open camera"));
+ updateError(QCamera::CameraError, QLatin1String("Cannot open camera"));
return;
}
@@ -651,7 +652,7 @@ void QV4L2Camera::initV4L2MemoryTransfer()
if (!m_memoryTransfer) {
qCWarning(qLcV4L2Camera) << "Cannot init v4l2 memory transfer," << qt_error_string(errno);
- emit error(QCamera::CameraError, QLatin1String("Cannot init V4L2 memory transfer"));
+ updateError(QCamera::CameraError, QLatin1String("Cannot init V4L2 memory transfer"));
}
}
diff --git a/src/plugins/multimedia/ffmpeg/qwindowscamera.cpp b/src/plugins/multimedia/ffmpeg/qwindowscamera.cpp
index 39aac3527..61a4ebe52 100644
--- a/src/plugins/multimedia/ffmpeg/qwindowscamera.cpp
+++ b/src/plugins/multimedia/ffmpeg/qwindowscamera.cpp
@@ -8,6 +8,7 @@
#include <private/qmemoryvideobuffer_p.h>
#include <private/qwindowsmfdefs_p.h>
#include <private/qwindowsmultimediautils_p.h>
+#include <private/qvideoframe_p.h>
#include <private/qcomobject_p.h>
#include <mfapi.h>
@@ -186,7 +187,7 @@ public:
{
if (FAILED(status)) {
const std::string msg{ std::system_category().message(status) };
- emit m_windowsCamera.error(QCamera::CameraError, QString::fromStdString(msg));
+ m_windowsCamera.updateError(QCamera::CameraError, QString::fromStdString(msg));
return;
}
@@ -198,7 +199,10 @@ public:
BYTE *buffer = nullptr;
if (SUCCEEDED(mediaBuffer->Lock(&buffer, nullptr, &bufLen))) {
QByteArray bytes(reinterpret_cast<char*>(buffer), qsizetype(bufLen));
- QVideoFrame frame(new QMemoryVideoBuffer(bytes, m_videoFrameStride), m_frameFormat);
+ auto buffer = std::make_unique<QMemoryVideoBuffer>(std::move(bytes),
+ m_videoFrameStride);
+ QVideoFrame frame =
+ QVideoFramePrivate::createFrame(std::move(buffer), m_frameFormat);
// WMF uses 100-nanosecond units, Qt uses microseconds
frame.setStartTime(timestamp / 10);
diff --git a/src/plugins/multimedia/ffmpeg/qx11surfacecapture.cpp b/src/plugins/multimedia/ffmpeg/qx11surfacecapture.cpp
index 1f04703f1..d9343cdfe 100644
--- a/src/plugins/multimedia/ffmpeg/qx11surfacecapture.cpp
+++ b/src/plugins/multimedia/ffmpeg/qx11surfacecapture.cpp
@@ -11,10 +11,10 @@
#include <qguiapplication.h>
#include <qloggingcategory.h>
-#include "private/qabstractvideobuffer_p.h"
#include "private/qcapturablewindow_p.h"
#include "private/qmemoryvideobuffer_p.h"
#include "private/qvideoframeconversionhelper_p.h"
+#include "private/qvideoframe_p.h"
#include <X11/Xlib.h>
#include <sys/shm.h>
@@ -245,7 +245,7 @@ private:
}
QVideoFrameFormat format(QSize(m_xImage->width, m_xImage->height), pixelFormat);
- format.setFrameRate(frameRate());
+ format.setStreamFrameRate(frameRate());
m_format = format;
}
@@ -276,8 +276,8 @@ protected:
qCopyPixelsWithAlphaMask(pixelDst, pixelSrc, pixelCount, m_format.pixelFormat(),
xImageAlphaVaries);
- auto buffer = new QMemoryVideoBuffer(data, m_xImage->bytes_per_line);
- return QVideoFrame(buffer, m_format);
+ auto buffer = std::make_unique<QMemoryVideoBuffer>(data, m_xImage->bytes_per_line);
+ return QVideoFramePrivate::createFrame(std::move(buffer), m_format);
}
private:
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder.cpp
index 57b798fed..9c8be91d1 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder.cpp
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder.cpp
@@ -1,6 +1,7 @@
// Copyright (C) 2024 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include "qffmpegaudioencoder_p.h"
+#include "qffmpegrecordingengineutils_p.h"
#include "qffmpegaudioencoderutils_p.h"
#include "qffmpegaudioinput_p.h"
#include "qffmpegencoderoptions_p.h"
@@ -15,16 +16,15 @@ namespace QFFmpeg {
static Q_LOGGING_CATEGORY(qLcFFmpegAudioEncoder, "qt.multimedia.ffmpeg.audioencoder");
-AudioEncoder::AudioEncoder(RecordingEngine *encoder, QFFmpegAudioInput *input,
+AudioEncoder::AudioEncoder(RecordingEngine &recordingEngine, const QAudioFormat &sourceFormat,
const QMediaEncoderSettings &settings)
- : EncoderThread(encoder), m_input(input), m_settings(settings)
+ : EncoderThread(recordingEngine), m_format(sourceFormat), m_settings(settings)
{
setObjectName(QLatin1String("AudioEncoder"));
qCDebug(qLcFFmpegAudioEncoder) << "AudioEncoder" << settings.audioCodec();
- m_format = input->device.preferredFormat();
auto codecID = QFFmpegMediaFormatInfo::codecIdForAudioCodec(settings.audioCodec());
- Q_ASSERT(avformat_query_codec(encoder->avFormatContext()->oformat, codecID,
+ Q_ASSERT(avformat_query_codec(recordingEngine.avFormatContext()->oformat, codecID,
FF_COMPLIANCE_NORMAL));
const AVAudioFormat requestedAudioFormat(m_format);
@@ -38,8 +38,8 @@ AudioEncoder::AudioEncoder(RecordingEngine *encoder, QFFmpegAudioInput *input,
Q_ASSERT(m_avCodec);
- m_stream = avformat_new_stream(encoder->avFormatContext(), nullptr);
- m_stream->id = encoder->avFormatContext()->nb_streams - 1;
+ m_stream = avformat_new_stream(recordingEngine.avFormatContext(), nullptr);
+ m_stream->id = recordingEngine.avFormatContext()->nb_streams - 1;
m_stream->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
m_stream->codecpar->codec_id = codecID;
#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
@@ -82,40 +82,56 @@ void AudioEncoder::open()
applyAudioEncoderOptions(m_settings, m_avCodec->name, m_codecContext.get(), opts);
applyExperimentalCodecOptions(m_avCodec, opts);
- int res = avcodec_open2(m_codecContext.get(), m_avCodec, opts);
+ const int res = avcodec_open2(m_codecContext.get(), m_avCodec, opts);
+
qCDebug(qLcFFmpegAudioEncoder) << "audio codec opened" << res;
qCDebug(qLcFFmpegAudioEncoder) << "audio codec params: fmt=" << m_codecContext->sample_fmt
<< "rate=" << m_codecContext->sample_rate;
- const AVAudioFormat requestedAudioFormat(m_format);
- const AVAudioFormat codecAudioFormat(m_codecContext.get());
-
- if (requestedAudioFormat != codecAudioFormat)
- m_resampler = createResampleContext(requestedAudioFormat, codecAudioFormat);
+ updateResampler();
}
void AudioEncoder::addBuffer(const QAudioBuffer &buffer)
{
- QMutexLocker locker(&m_queueMutex);
- if (!m_paused.loadRelaxed()) {
+ if (!buffer.isValid()) {
+ setEndOfSourceStream(true);
+ return;
+ }
+
+ setEndOfSourceStream(false);
+
+ {
+ const std::chrono::microseconds bufferDuration(buffer.duration());
+ auto guard = lockLoopData();
+
+ if (m_paused)
+ return;
+
+ // TODO: apply logic with canPushFrame
+
m_audioBufferQueue.push(buffer);
- locker.unlock();
- dataReady();
+ m_queueDuration += bufferDuration;
}
+
+ dataReady();
}
QAudioBuffer AudioEncoder::takeBuffer()
{
- QMutexLocker locker(&m_queueMutex);
- return dequeueIfPossible(m_audioBufferQueue);
+ auto locker = lockLoopData();
+ QAudioBuffer result = dequeueIfPossible(m_audioBufferQueue);
+ m_queueDuration -= std::chrono::microseconds(result.duration());
+ return result;
}
void AudioEncoder::init()
{
open();
- if (m_input) {
- m_input->setFrameSize(m_codecContext->frame_size);
- }
+
+ // TODO: try to address this dependency here.
+ if (auto input = qobject_cast<QFFmpegAudioInput *>(source()))
+ input->setFrameSize(m_codecContext->frame_size);
+
qCDebug(qLcFFmpegAudioEncoder) << "AudioEncoder::init started audio device thread.";
}
@@ -123,6 +139,14 @@ void AudioEncoder::cleanup()
{
while (!m_audioBufferQueue.empty())
processOne();
+
+ if (m_avFrameSamplesOffset) {
+ // the size of the last frame can be less than m_codecContext->frame_size
+
+ retrievePackets();
+ sendPendingFrameToAVCodec();
+ }
+
while (avcodec_send_frame(m_codecContext.get(), nullptr) == AVERROR(EAGAIN))
retrievePackets();
retrievePackets();
@@ -130,7 +154,6 @@ void AudioEncoder::cleanup()
bool AudioEncoder::hasData() const
{
- QMutexLocker locker(&m_queueMutex);
return !m_audioBufferQueue.empty();
}
@@ -153,68 +176,167 @@ void AudioEncoder::retrievePackets()
// qCDebug(qLcFFmpegEncoder) << "writing audio packet" << packet->size << packet->pts <<
// packet->dts;
packet->stream_index = m_stream->id;
- m_encoder->getMuxer()->addPacket(std::move(packet));
+ m_recordingEngine.getMuxer()->addPacket(std::move(packet));
}
}
void AudioEncoder::processOne()
{
QAudioBuffer buffer = takeBuffer();
- if (!buffer.isValid())
- return;
+ Q_ASSERT(buffer.isValid());
+
+ // qCDebug(qLcFFmpegEncoder) << "new audio buffer" << buffer.byteCount() << buffer.format()
+ // << buffer.frameCount() << codec->frame_size;
if (buffer.format() != m_format) {
- // should we recreate recreate resampler here?
- qWarning() << "Get invalid audio format:" << buffer.format() << ", expected:" << m_format;
- return;
+ m_format = buffer.format();
+ updateResampler();
}
- // qCDebug(qLcFFmpegEncoder) << "new audio buffer" << buffer.byteCount() << buffer.format()
- // << buffer.frameCount() << codec->frame_size;
- retrievePackets();
+ int samplesOffset = 0;
+ const int bufferSamplesCount = static_cast<int>(buffer.frameCount());
+
+ while (samplesOffset < bufferSamplesCount)
+ handleAudioData(buffer.constData<uint8_t>(), samplesOffset, bufferSamplesCount);
+
+ Q_ASSERT(samplesOffset == bufferSamplesCount);
+}
+
+bool AudioEncoder::checkIfCanPushFrame() const
+{
+ if (isRunning())
+ return m_audioBufferQueue.size() <= 1 || m_queueDuration < m_maxQueueDuration;
+ if (!isFinished())
+ return m_audioBufferQueue.empty();
+
+ return false;
+}
+
+void AudioEncoder::updateResampler()
+{
+ m_resampler.reset();
+
+ const AVAudioFormat requestedAudioFormat(m_format);
+ const AVAudioFormat codecAudioFormat(m_codecContext.get());
+
+ if (requestedAudioFormat != codecAudioFormat)
+ m_resampler = createResampleContext(requestedAudioFormat, codecAudioFormat);
+
+ qCDebug(qLcFFmpegAudioEncoder)
+ << "Resampler updated. Input format:" << m_format << "Resampler:" << m_resampler.get();
+}
+
+void AudioEncoder::ensurePendingFrame(int availableSamplesCount)
+{
+ Q_ASSERT(availableSamplesCount >= 0);
- auto frame = makeAVFrame();
- frame->format = m_codecContext->sample_fmt;
+ if (m_avFrame)
+ return;
+
+ m_avFrame = makeAVFrame();
+
+ m_avFrame->format = m_codecContext->sample_fmt;
#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
- frame->channel_layout = m_codecContext->channel_layout;
- frame->channels = m_codecContext->channels;
+ m_avFrame->channel_layout = m_codecContext->channel_layout;
+ m_avFrame->channels = m_codecContext->channels;
#else
- frame->ch_layout = m_codecContext->ch_layout;
+ m_avFrame->ch_layout = m_codecContext->ch_layout;
#endif
- frame->sample_rate = m_codecContext->sample_rate;
- frame->nb_samples = buffer.frameCount();
- if (frame->nb_samples)
- av_frame_get_buffer(frame.get(), 0);
+ m_avFrame->sample_rate = m_codecContext->sample_rate;
- if (m_resampler) {
- const uint8_t *data = buffer.constData<uint8_t>();
- swr_convert(m_resampler.get(), frame->extended_data, frame->nb_samples, &data,
- frame->nb_samples);
- } else {
- memcpy(frame->buf[0]->data, buffer.constData<uint8_t>(), buffer.byteCount());
- }
+ const bool isFixedFrameSize = !(m_avCodec->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE)
+ && m_codecContext->frame_size;
+ m_avFrame->nb_samples = isFixedFrameSize ? m_codecContext->frame_size : availableSamplesCount;
+ if (m_avFrame->nb_samples)
+ av_frame_get_buffer(m_avFrame.get(), 0);
const auto &timeBase = m_stream->time_base;
const auto pts = timeBase.den && timeBase.num
? timeBase.den * m_samplesWritten / (m_codecContext->sample_rate * timeBase.num)
: m_samplesWritten;
- setAVFrameTime(*frame, pts, timeBase);
- m_samplesWritten += buffer.frameCount();
+ setAVFrameTime(*m_avFrame, pts, timeBase);
+}
+
+void AudioEncoder::writeDataToPendingFrame(const uchar *data, int &samplesOffset, int samplesCount)
+{
+ Q_ASSERT(m_avFrame);
+ Q_ASSERT(m_avFrameSamplesOffset <= m_avFrame->nb_samples);
+
+ const int bytesPerSample = av_get_bytes_per_sample(m_codecContext->sample_fmt);
+ const bool isPlanar = av_sample_fmt_is_planar(m_codecContext->sample_fmt);
+
+#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
+ const int channelsCount = m_codecContext->channels;
+#else
+ const int channelsCount = m_codecContext->ch_layout.nb_channels;
+#endif
+
+ const int audioDataOffset = isPlanar ? bytesPerSample * m_avFrameSamplesOffset
+ : bytesPerSample * m_avFrameSamplesOffset * channelsCount;
+
+ const int planesCount = isPlanar ? channelsCount : 1;
+ m_avFramePlanesData.resize(planesCount);
+ for (int plane = 0; plane < planesCount; ++plane)
+ m_avFramePlanesData[plane] = m_avFrame->extended_data[plane] + audioDataOffset;
- qint64 time = m_format.durationForFrames(m_samplesWritten);
- m_encoder->newTimeStamp(time / 1000);
+ const int samplesToRead =
+ std::min(m_avFrame->nb_samples - m_avFrameSamplesOffset, samplesCount - samplesOffset);
- // qCDebug(qLcFFmpegEncoder) << "sending audio frame" << buffer.byteCount() << frame->pts <<
- // ((double)buffer.frameCount()/frame->sample_rate);
+ data += m_format.bytesForFrames(samplesOffset);
- int ret = avcodec_send_frame(m_codecContext.get(), frame.get());
+ if (m_resampler) {
+ m_avFrameSamplesOffset += swr_convert(m_resampler.get(), m_avFramePlanesData.data(),
+ samplesToRead, &data, samplesToRead);
+ } else {
+ Q_ASSERT(planesCount == 1);
+ m_avFrameSamplesOffset += samplesToRead;
+ memcpy(m_avFramePlanesData[0], data, m_format.bytesForFrames(samplesToRead));
+ }
+
+ samplesOffset += samplesToRead;
+}
+
+void AudioEncoder::sendPendingFrameToAVCodec()
+{
+ Q_ASSERT(m_avFrame);
+ Q_ASSERT(m_avFrameSamplesOffset <= m_avFrame->nb_samples);
+
+ m_avFrame->nb_samples = m_avFrameSamplesOffset;
+
+ m_samplesWritten += m_avFrameSamplesOffset;
+
+ const qint64 time = m_format.durationForFrames(m_samplesWritten);
+ m_recordingEngine.newTimeStamp(time / 1000);
+
+ // qCDebug(qLcFFmpegEncoder) << "sending audio frame" << buffer.byteCount() << frame->pts <<
+ // ((double)buffer.frameCount()/frame->sample_rate);
+
+ int ret = avcodec_send_frame(m_codecContext.get(), m_avFrame.get());
if (ret < 0) {
- char errStr[1024];
- av_strerror(ret, errStr, 1024);
- // qCDebug(qLcFFmpegEncoder) << "error sending frame" << ret << errStr;
+ char errStr[AV_ERROR_MAX_STRING_SIZE];
+ av_strerror(ret, errStr, AV_ERROR_MAX_STRING_SIZE);
+ qCDebug(qLcFFmpegAudioEncoder) << "error sending frame" << ret << errStr;
}
+
+ m_avFrame = nullptr;
+ m_avFrameSamplesOffset = 0;
+ std::fill(m_avFramePlanesData.begin(), m_avFramePlanesData.end(), nullptr);
}
+void AudioEncoder::handleAudioData(const uchar *data, int &samplesOffset, int samplesCount)
+{
+ ensurePendingFrame(samplesCount - samplesOffset);
+
+ writeDataToPendingFrame(data, samplesOffset, samplesCount);
+
+ // The frame is not ready yet
+ if (m_avFrameSamplesOffset < m_avFrame->nb_samples)
+ return;
+
+ retrievePackets();
+
+ sendPendingFrameToAVCodec();
+}
} // namespace QFFmpeg
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder_p.h
index c4dc20eac..4408ff54f 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder_p.h
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder_p.h
@@ -8,47 +8,65 @@
#include "private/qplatformmediarecorder_p.h"
#include <qaudiobuffer.h>
#include <queue>
+#include <chrono>
QT_BEGIN_NAMESPACE
class QMediaEncoderSettings;
-class QFFmpegAudioInput;
namespace QFFmpeg {
class AudioEncoder : public EncoderThread
{
public:
- AudioEncoder(RecordingEngine *encoder, QFFmpegAudioInput *input,
+ AudioEncoder(RecordingEngine &recordingEngine, const QAudioFormat &sourceFormat,
const QMediaEncoderSettings &settings);
- void open();
void addBuffer(const QAudioBuffer &buffer);
- QFFmpegAudioInput *audioInput() const { return m_input; }
+protected:
+ bool checkIfCanPushFrame() const override;
private:
+ void open();
+
QAudioBuffer takeBuffer();
void retrievePackets();
+ void updateResampler();
void init() override;
void cleanup() override;
bool hasData() const override;
void processOne() override;
+ void handleAudioData(const uchar *data, int &samplesOffset, int samplesCount);
+
+ void ensurePendingFrame(int availableSamplesCount);
+
+ void writeDataToPendingFrame(const uchar *data, int &samplesOffset, int samplesCount);
+
+ void sendPendingFrameToAVCodec();
+
private:
- mutable QMutex m_queueMutex;
std::queue<QAudioBuffer> m_audioBufferQueue;
+ // Arbitrarily chosen to limit audio queue duration
+ const std::chrono::microseconds m_maxQueueDuration = std::chrono::seconds(5);
+
+ std::chrono::microseconds m_queueDuration{ 0 };
+
AVStream *m_stream = nullptr;
AVCodecContextUPtr m_codecContext;
- QFFmpegAudioInput *m_input = nullptr;
QAudioFormat m_format;
SwrContextUPtr m_resampler;
qint64 m_samplesWritten = 0;
const AVCodec *m_avCodec = nullptr;
QMediaEncoderSettings m_settings;
+
+ AVFrameUPtr m_avFrame;
+ int m_avFrameSamplesOffset = 0;
+ std::vector<uint8_t *> m_avFramePlanesData;
};
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread.cpp
index 97c8fb7a9..27dcc23af 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread.cpp
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread.cpp
@@ -1,18 +1,31 @@
// Copyright (C) 2024 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include "qffmpegencoderthread_p.h"
+#include "qmetaobject.h"
QT_BEGIN_NAMESPACE
namespace QFFmpeg {
-EncoderThread::EncoderThread(RecordingEngine *encoder) : m_encoder(encoder) { }
+EncoderThread::EncoderThread(RecordingEngine &recordingEngine) : m_recordingEngine(recordingEngine)
+{
+}
+
+void EncoderThread::setPaused(bool paused)
+{
+ auto guard = lockLoopData();
+ m_paused = paused;
+}
-void EncoderThread::setPaused(bool b)
+void EncoderThread::setEndOfSourceStream(bool isEnd)
{
- m_paused.storeRelease(b);
+ m_endOfSourceStream = isEnd;
+ if (isEnd)
+ emit endOfSourceStream();
}
} // namespace QFFmpeg
QT_END_NAMESPACE
+
+#include "moc_qffmpegencoderthread_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread_p.h
index 6ef5e97f6..8cba7bf69 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread_p.h
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread_p.h
@@ -4,6 +4,9 @@
#define QFFMPEGENCODERTHREAD_P_H
#include "qffmpegthread_p.h"
+#include "qpointer.h"
+
+#include "private/qmediainputencoderinterface_p.h"
QT_BEGIN_NAMESPACE
@@ -11,15 +14,49 @@ namespace QFFmpeg {
class RecordingEngine;
-class EncoderThread : public ConsumerThread
+class EncoderThread : public ConsumerThread, public QMediaInputEncoderInterface
{
+ Q_OBJECT
public:
- EncoderThread(RecordingEngine *encoder);
- virtual void setPaused(bool b);
+ EncoderThread(RecordingEngine &recordingEngine);
+
+ void setPaused(bool paused);
+
+ void setSource(QObject *source) { m_source = source; }
+
+ QObject *source() const { return m_source; }
+
+ bool canPushFrame() const override { return m_canPushFrame.load(std::memory_order_relaxed); }
+
+ void setEndOfSourceStream(bool isEnd);
+
+ bool isEndOfSourceStream() const { return m_endOfSourceStream; }
+
+protected:
+ void updateCanPushFrame();
+
+ virtual bool checkIfCanPushFrame() const = 0;
+
+ auto lockLoopData()
+ {
+ return QScopeGuard([this, locker = ConsumerThread::lockLoopData()]() mutable {
+ const bool canPush = !m_paused && checkIfCanPushFrame();
+ locker.unlock();
+ if (m_canPushFrame.exchange(canPush, std::memory_order_relaxed) != canPush)
+ emit canPushFrameChanged();
+ });
+ }
+
+Q_SIGNALS:
+ void canPushFrameChanged();
+ void endOfSourceStream();
protected:
- QAtomicInteger<bool> m_paused = false;
- RecordingEngine *m_encoder = nullptr;
+ bool m_paused = false;
+ bool m_endOfSourceStream = false;
+ std::atomic_bool m_canPushFrame = false;
+ RecordingEngine &m_recordingEngine;
+ QPointer<QObject> m_source;
};
} // namespace QFFmpeg
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer.cpp
new file mode 100644
index 000000000..4f8c21bd5
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer.cpp
@@ -0,0 +1,165 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegencodinginitializer_p.h"
+#include "qffmpegrecordingengineutils_p.h"
+#include "qffmpegrecordingengine_p.h"
+#include "qffmpegaudioinput_p.h"
+#include "qvideoframe.h"
+
+#include "private/qplatformvideoframeinput_p.h"
+#include "private/qplatformaudiobufferinput_p.h"
+#include "private/qplatformaudiobufferinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+EncodingInitializer::EncodingInitializer(RecordingEngine &engine) : m_recordingEngine(engine) { }
+
+EncodingInitializer::~EncodingInitializer()
+{
+ for (QObject *source : m_pendingSources)
+ setEncoderInterface(source, nullptr);
+}
+
+void EncodingInitializer::start(const std::vector<QPlatformAudioBufferInputBase *> &audioSources,
+ const std::vector<QPlatformVideoSource *> &videoSources)
+{
+ for (auto source : audioSources) {
+ if (auto audioInput = qobject_cast<QFFmpegAudioInput *>(source))
+ m_recordingEngine.addAudioInput(audioInput);
+ else if (auto audioBufferInput = qobject_cast<QPlatformAudioBufferInput *>(source))
+ addAudioBufferInput(audioBufferInput);
+ else
+ Q_ASSERT(!"Undefined source type");
+ }
+
+ for (auto source : videoSources)
+ addVideoSource(source);
+
+ tryStartRecordingEngine();
+}
+
+void EncodingInitializer::addAudioBufferInput(QPlatformAudioBufferInput *input)
+{
+ Q_ASSERT(input);
+
+ if (input->audioFormat().isValid())
+ m_recordingEngine.addAudioBufferInput(input, {});
+ else
+ addPendingAudioBufferInput(input);
+}
+
+void EncodingInitializer::addPendingAudioBufferInput(QPlatformAudioBufferInput *input)
+{
+ addPendingSource(input);
+
+ connect(input, &QPlatformAudioBufferInput::destroyed, this, [this, input]() {
+ erasePendingSource(input, QStringLiteral("Audio source deleted"), true);
+ });
+
+ connect(input, &QPlatformAudioBufferInput::newAudioBuffer, this,
+ [this, input](const QAudioBuffer &buffer) {
+ if (buffer.isValid())
+ erasePendingSource(
+ input, [&]() { m_recordingEngine.addAudioBufferInput(input, buffer); });
+ else
+ erasePendingSource(input,
+ QStringLiteral("Audio source has sent the end frame"));
+ });
+}
+
+void EncodingInitializer::addVideoSource(QPlatformVideoSource *source)
+{
+ Q_ASSERT(source);
+ Q_ASSERT(source->isActive());
+
+ if (source->frameFormat().isValid())
+ m_recordingEngine.addVideoSource(source, {});
+ else if (source->hasError())
+ emitStreamInitializationError(QStringLiteral("Video source error: ")
+ + source->errorString());
+ else
+ addPendingVideoSource(source);
+}
+
+void EncodingInitializer::addPendingVideoSource(QPlatformVideoSource *source)
+{
+ addPendingSource(source);
+
+ connect(source, &QPlatformVideoSource::errorChanged, this, [this, source]() {
+ if (source->hasError())
+ erasePendingSource(source,
+ QStringLiteral("Videio source error: ") + source->errorString());
+ });
+
+ connect(source, &QPlatformVideoSource::destroyed, this, [this, source]() {
+ erasePendingSource(source, QStringLiteral("Source deleted"), true);
+ });
+
+ connect(source, &QPlatformVideoSource::activeChanged, this, [this, source]() {
+ if (!source->isActive())
+ erasePendingSource(source, QStringLiteral("Video source deactivated"));
+ });
+
+ connect(source, &QPlatformVideoSource::newVideoFrame, this,
+ [this, source](const QVideoFrame &frame) {
+ if (frame.isValid())
+ erasePendingSource(source,
+ [&]() { m_recordingEngine.addVideoSource(source, frame); });
+ else
+ erasePendingSource(source,
+ QStringLiteral("Video source has sent the end frame"));
+ });
+}
+
+void EncodingInitializer::tryStartRecordingEngine()
+{
+ if (m_pendingSources.empty())
+ m_recordingEngine.start();
+}
+
+void EncodingInitializer::emitStreamInitializationError(QString error)
+{
+ emit m_recordingEngine.streamInitializationError(
+ QMediaRecorder::ResourceError,
+ QStringLiteral("Video steam initialization error. ") + error);
+}
+
+void EncodingInitializer::addPendingSource(QObject *source)
+{
+ Q_ASSERT(m_pendingSources.count(source) == 0);
+
+ setEncoderInterface(source, this);
+ m_pendingSources.emplace(source);
+}
+
+template <typename F>
+void EncodingInitializer::erasePendingSource(QObject *source, F &&functionOrError, bool destroyed)
+{
+ const auto erasedCount = m_pendingSources.erase(source);
+ if (erasedCount == 0)
+ return; // got a queued event, just ignore it.
+
+ if (!destroyed) {
+ setEncoderInterface(source, nullptr);
+ disconnect(source, nullptr, this, nullptr);
+ }
+
+ if constexpr (std::is_invocable_v<F>)
+ functionOrError();
+ else
+ emitStreamInitializationError(functionOrError);
+
+ tryStartRecordingEngine();
+}
+
+bool EncodingInitializer::canPushFrame() const
+{
+ return true;
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer_p.h
new file mode 100644
index 000000000..e3bcb3428
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer_p.h
@@ -0,0 +1,77 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QENCODINGINITIALIZER_P_H
+#define QENCODINGINITIALIZER_P_H
+
+#include "qobject.h"
+#include "private/qmediainputencoderinterface_p.h"
+#include <unordered_set>
+#include <vector>
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+QT_BEGIN_NAMESPACE
+
+class QFFmpegAudioInput;
+class QPlatformVideoSource;
+class QPlatformAudioBufferInput;
+class QPlatformAudioBufferInputBase;
+class QMediaInputEncoderInterface;
+
+namespace QFFmpeg {
+
+class RecordingEngine;
+
+// Initializes RecordingEngine with audio and video sources, potentially lazily
+// upon first frame arrival if video frame format is not pre-determined.
+class EncodingInitializer : public QObject, private QMediaInputEncoderInterface
+{
+public:
+ EncodingInitializer(RecordingEngine &engine);
+
+ ~EncodingInitializer() override;
+
+ void start(const std::vector<QPlatformAudioBufferInputBase *> &audioSources,
+ const std::vector<QPlatformVideoSource *> &videoSources);
+
+private:
+ void addAudioBufferInput(QPlatformAudioBufferInput *input);
+
+ void addPendingAudioBufferInput(QPlatformAudioBufferInput *input);
+
+ void addVideoSource(QPlatformVideoSource *source);
+
+ void addPendingVideoSource(QPlatformVideoSource *source);
+
+ void addPendingSource(QObject *source);
+
+ void tryStartRecordingEngine();
+
+private:
+ void emitStreamInitializationError(QString error);
+
+ template <typename F>
+ void erasePendingSource(QObject *source, F &&functionOrError, bool destroyed = false);
+
+ bool canPushFrame() const override;
+
+private:
+ RecordingEngine &m_recordingEngine;
+ std::unordered_set<QObject *> m_pendingSources;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QENCODINGINITIALIZER_P_H
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer.cpp
index 6367dde3b..6a33e79dd 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer.cpp
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer.cpp
@@ -2,6 +2,7 @@
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include "qffmpegmuxer_p.h"
#include "qffmpegrecordingengine_p.h"
+#include "qffmpegrecordingengineutils_p.h"
#include <QtCore/qloggingcategory.h>
QT_BEGIN_NAMESPACE
@@ -18,7 +19,7 @@ Muxer::Muxer(RecordingEngine *encoder) : m_encoder(encoder)
void Muxer::addPacket(AVPacketUPtr packet)
{
{
- QMutexLocker locker(&m_queueMutex);
+ QMutexLocker locker = lockLoopData();
m_packetQueue.push(std::move(packet));
}
@@ -28,7 +29,7 @@ void Muxer::addPacket(AVPacketUPtr packet)
AVPacketUPtr Muxer::takePacket()
{
- QMutexLocker locker(&m_queueMutex);
+ QMutexLocker locker = lockLoopData();
return dequeueIfPossible(m_packetQueue);
}
@@ -37,11 +38,14 @@ void Muxer::init()
qCDebug(qLcFFmpegMuxer) << "Muxer::init started thread.";
}
-void Muxer::cleanup() { }
+void Muxer::cleanup()
+{
+ while (!m_packetQueue.empty())
+ processOne();
+}
bool QFFmpeg::Muxer::hasData() const
{
- QMutexLocker locker(&m_queueMutex);
return !m_packetQueue.empty();
}
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer_p.h
index 8cdf73c6f..4f8f4d27a 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer_p.h
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer_p.h
@@ -29,7 +29,6 @@ private:
void processOne() override;
private:
- mutable QMutex m_queueMutex;
std::queue<AVPacketUPtr> m_packetQueue;
RecordingEngine *m_encoder;
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine.cpp
index 20d38ace1..61b3b0cf6 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine.cpp
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine.cpp
@@ -1,19 +1,21 @@
// Copyright (C) 2021 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include "qffmpegrecordingengine_p.h"
-#include "qffmpegmediaformatinfo_p.h"
-#include "qffmpegvideoframeencoder_p.h"
-#include "private/qmultimediautils_p.h"
-
-#include <qdebug.h>
+#include "qffmpegencodinginitializer_p.h"
#include "qffmpegaudioencoder_p.h"
#include "qffmpegaudioinput_p.h"
-#include <private/qplatformcamera_p.h>
-#include "qffmpegvideobuffer_p.h"
+#include "qffmpegrecordingengineutils_p.h"
+
+#include "private/qmultimediautils_p.h"
+#include "private/qplatformaudiobufferinput_p.h"
+#include "private/qplatformvideosource_p.h"
+#include "private/qplatformvideoframeinput_p.h"
+
+#include "qdebug.h"
#include "qffmpegvideoencoder_p.h"
#include "qffmpegmediametadata_p.h"
#include "qffmpegmuxer_p.h"
-#include <qloggingcategory.h>
+#include "qloggingcategory.h"
QT_BEGIN_NAMESPACE
@@ -36,21 +38,69 @@ RecordingEngine::~RecordingEngine()
void RecordingEngine::addAudioInput(QFFmpegAudioInput *input)
{
- m_audioEncoder = new AudioEncoder(this, input, m_settings);
- addMediaFrameHandler(input, &QFFmpegAudioInput::newAudioBuffer, m_audioEncoder,
- &AudioEncoder::addBuffer);
+ Q_ASSERT(input);
+
+ if (input->device.isNull()) {
+ emit streamInitializationError(QMediaRecorder::ResourceError,
+ QLatin1StringView("Audio device is null"));
+ return;
+ }
+
+ const QAudioFormat format = input->device.preferredFormat();
+
+ if (!format.isValid()) {
+ emit streamInitializationError(
+ QMediaRecorder::FormatError,
+ QLatin1StringView("Audio device has invalid preferred format"));
+ return;
+ }
+
+ AudioEncoder *audioEncoder = createAudioEncoder(format);
+ connectEncoderToSource(audioEncoder, input);
+
input->setRunning(true);
}
-void RecordingEngine::addVideoSource(QPlatformVideoSource * source)
+void RecordingEngine::addAudioBufferInput(QPlatformAudioBufferInput *input,
+ const QAudioBuffer &firstBuffer)
{
- auto frameFormat = source->frameFormat();
+ Q_ASSERT(input);
+ const QAudioFormat format = firstBuffer.isValid() ? firstBuffer.format() : input->audioFormat();
- if (!frameFormat.isValid()) {
- qCWarning(qLcFFmpegEncoder) << "Cannot add source; invalid vide frame format";
- emit error(QMediaRecorder::ResourceError,
- QLatin1StringView("Cannot get video source format"));
- return;
+ AudioEncoder *audioEncoder = createAudioEncoder(format);
+
+ // set the buffer before connecting to avoid potential races
+ if (firstBuffer.isValid())
+ audioEncoder->addBuffer(firstBuffer);
+
+ connectEncoderToSource(audioEncoder, input);
+}
+
+AudioEncoder *RecordingEngine::createAudioEncoder(const QAudioFormat &format)
+{
+ Q_ASSERT(format.isValid());
+
+ auto audioEncoder = new AudioEncoder(*this, format, m_settings);
+ m_audioEncoders.push_back(audioEncoder);
+ connect(audioEncoder, &EncoderThread::endOfSourceStream, this,
+ &RecordingEngine::handleSourceEndOfStream);
+
+ return audioEncoder;
+}
+
+void RecordingEngine::addVideoSource(QPlatformVideoSource *source, const QVideoFrame &firstFrame)
+{
+ QVideoFrameFormat frameFormat =
+ firstFrame.isValid() ? firstFrame.surfaceFormat() : source->frameFormat();
+
+ Q_ASSERT(frameFormat.isValid());
+
+ if (firstFrame.isValid() && frameFormat.streamFrameRate() <= 0.f) {
+ const qint64 startTime = firstFrame.startTime();
+ const qint64 endTime = firstFrame.endTime();
+ if (startTime != -1 && endTime > startTime)
+ frameFormat.setStreamFrameRate(static_cast<qreal>(VideoFrameTimeBase)
+ / (endTime - startTime));
}
std::optional<AVPixelFormat> hwPixelFormat = source->ffmpegHWPixelFormat()
@@ -60,22 +110,40 @@ void RecordingEngine::addVideoSource(QPlatformVideoSource * source)
qCDebug(qLcFFmpegEncoder) << "adding video source" << source->metaObject()->className() << ":"
<< "pixelFormat=" << frameFormat.pixelFormat()
<< "frameSize=" << frameFormat.frameSize()
- << "frameRate=" << frameFormat.frameRate() << "ffmpegHWPixelFormat="
- << (hwPixelFormat ? *hwPixelFormat : AV_PIX_FMT_NONE);
+ << "frameRate=" << frameFormat.streamFrameRate()
+ << "ffmpegHWPixelFormat=" << (hwPixelFormat ? *hwPixelFormat : AV_PIX_FMT_NONE);
- auto veUPtr = std::make_unique<VideoEncoder>(this, m_settings, frameFormat, hwPixelFormat);
+ auto veUPtr = std::make_unique<VideoEncoder>(*this, m_settings, frameFormat, hwPixelFormat);
if (!veUPtr->isValid()) {
- emit error(QMediaRecorder::FormatError, QLatin1StringView("Cannot initialize encoder"));
+ emit streamInitializationError(QMediaRecorder::FormatError,
+ QLatin1StringView("Cannot initialize encoder"));
return;
}
- auto ve = veUPtr.release();
- addMediaFrameHandler(source, &QPlatformVideoSource::newVideoFrame, ve, &VideoEncoder::addFrame);
- m_videoEncoders.append(ve);
+ auto videoEncoder = veUPtr.release();
+ m_videoEncoders.append(videoEncoder);
+
+ connect(videoEncoder, &EncoderThread::endOfSourceStream, this,
+ &RecordingEngine::handleSourceEndOfStream);
+
+ // set the frame before connecting to avoid potential races
+ if (firstFrame.isValid())
+ videoEncoder->addFrame(firstFrame);
+
+ connectEncoderToSource(videoEncoder, source);
}
void RecordingEngine::start()
{
+ Q_ASSERT(m_initializer);
+ m_initializer.reset();
+
+ if (m_audioEncoders.empty() && m_videoEncoders.empty()) {
+ emit sessionError(QMediaRecorder::ResourceError,
+ QLatin1StringView("No valid stream found for encoding"));
+ return;
+ }
+
qCDebug(qLcFFmpegEncoder) << "RecordingEngine::start!";
avFormatContext()->metadata = QFFmpegMetaData::toAVMetaData(m_metaData);
@@ -85,7 +153,8 @@ void RecordingEngine::start()
int res = avformat_write_header(avFormatContext(), nullptr);
if (res < 0) {
qWarning() << "could not write header, error:" << res << err2str(res);
- emit error(QMediaRecorder::ResourceError, "Cannot start writing the stream");
+ emit sessionError(QMediaRecorder::ResourceError,
+ QLatin1StringView("Cannot start writing the stream"));
return;
}
@@ -94,62 +163,66 @@ void RecordingEngine::start()
qCDebug(qLcFFmpegEncoder) << "stream header is successfully written";
m_muxer->start();
- if (m_audioEncoder)
- m_audioEncoder->start();
- for (auto *videoEncoder : m_videoEncoders)
- if (videoEncoder->isValid())
- videoEncoder->start();
+
+ forEachEncoder([](QThread *thread) { thread->start(); });
+}
+
+void RecordingEngine::initialize(const std::vector<QPlatformAudioBufferInputBase *> &audioSources,
+ const std::vector<QPlatformVideoSource *> &videoSources)
+{
+ qCDebug(qLcFFmpegEncoder) << ">>>>>>>>>>>>>>> initialize";
+
+ m_initializer = std::make_unique<EncodingInitializer>(*this);
+ m_initializer->start(audioSources, videoSources);
}
-RecordingEngine::EncodingFinalizer::EncodingFinalizer(RecordingEngine *e) : m_encoder(e)
+RecordingEngine::EncodingFinalizer::EncodingFinalizer(RecordingEngine &recordingEngine)
+ : m_recordingEngine(recordingEngine)
{
connect(this, &QThread::finished, this, &QObject::deleteLater);
}
void RecordingEngine::EncodingFinalizer::run()
{
- if (m_encoder->m_audioEncoder)
- m_encoder->m_audioEncoder->stopAndDelete();
- for (auto &videoEncoder : m_encoder->m_videoEncoders)
- videoEncoder->stopAndDelete();
- m_encoder->m_muxer->stopAndDelete();
-
- if (m_encoder->m_isHeaderWritten) {
- const int res = av_write_trailer(m_encoder->avFormatContext());
+ m_recordingEngine.forEachEncoder(&EncoderThread::stopAndDelete);
+ m_recordingEngine.m_muxer->stopAndDelete();
+
+ if (m_recordingEngine.m_isHeaderWritten) {
+ const int res = av_write_trailer(m_recordingEngine.avFormatContext());
if (res < 0) {
const auto errorDescription = err2str(res);
qCWarning(qLcFFmpegEncoder) << "could not write trailer" << res << errorDescription;
- emit m_encoder->error(QMediaRecorder::FormatError,
- QLatin1String("Cannot write trailer: ") + errorDescription);
+ emit m_recordingEngine.sessionError(QMediaRecorder::FormatError,
+ QLatin1String("Cannot write trailer: ")
+ + errorDescription);
}
}
// else ffmpeg might crash
// close AVIO before emitting finalizationDone.
- m_encoder->m_formatContext->closeAVIO();
+ m_recordingEngine.m_formatContext->closeAVIO();
qCDebug(qLcFFmpegEncoder) << " done finalizing.";
- emit m_encoder->finalizationDone();
- delete m_encoder;
+ emit m_recordingEngine.finalizationDone();
+ auto recordingEnginePtr = &m_recordingEngine;
+ delete recordingEnginePtr;
}
void RecordingEngine::finalize()
{
qCDebug(qLcFFmpegEncoder) << ">>>>>>>>>>>>>>> finalize";
- for (auto &conn : m_connections)
- disconnect(conn);
+ m_initializer.reset();
+
+ forEachEncoder(&disconnectEncoderFromSource);
- auto *finalizer = new EncodingFinalizer(this);
+ auto *finalizer = new EncodingFinalizer(*this);
finalizer->start();
}
-void RecordingEngine::setPaused(bool p)
+void RecordingEngine::setPaused(bool paused)
{
- if (m_audioEncoder)
- m_audioEncoder->setPaused(p);
- for (auto &videoEncoder : m_videoEncoders)
- videoEncoder->setPaused(p);
+ forEachEncoder(&EncoderThread::setPaused, paused);
}
void RecordingEngine::setMetaData(const QMediaMetaData &metaData)
@@ -166,94 +239,27 @@ void RecordingEngine::newTimeStamp(qint64 time)
}
}
-template<typename... Args>
-void RecordingEngine::addMediaFrameHandler(Args &&...args)
+bool RecordingEngine::isEndOfSourceStreams() const
{
- auto connection = connect(std::forward<Args>(args)..., Qt::DirectConnection);
- m_connections.append(connection);
+ auto isAtEnd = [](EncoderThread *encoder) { return encoder->isEndOfSourceStream(); };
+ return std::all_of(m_videoEncoders.cbegin(), m_videoEncoders.cend(), isAtEnd)
+ && std::all_of(m_audioEncoders.cbegin(), m_audioEncoders.cend(), isAtEnd);
}
-struct QVideoFrameHolder
+void RecordingEngine::handleSourceEndOfStream()
{
- QVideoFrame f;
- QImage i;
-};
-
-static void freeQVideoFrame(void *opaque, uint8_t *)
-{
- delete reinterpret_cast<QVideoFrameHolder *>(opaque);
+ if (isEndOfSourceStreams())
+ emit endOfSourceStreams();
}
-void VideoEncoder::processOne()
+template <typename F, typename... Args>
+void RecordingEngine::forEachEncoder(F &&f, Args &&...args)
{
- retrievePackets();
-
- auto frame = takeFrame();
- if (!frame.isValid())
- return;
-
- if (!isValid())
- return;
-
-// qCDebug(qLcFFmpegEncoder) << "new video buffer" << frame.startTime();
-
- AVFrameUPtr avFrame;
-
- auto *videoBuffer = dynamic_cast<QFFmpegVideoBuffer *>(frame.videoBuffer());
- if (videoBuffer) {
- // ffmpeg video buffer, let's use the native AVFrame stored in there
- auto *hwFrame = videoBuffer->getHWFrame();
- if (hwFrame && hwFrame->format == m_frameEncoder->sourceFormat())
- avFrame.reset(av_frame_clone(hwFrame));
- }
-
- if (!avFrame) {
- frame.map(QVideoFrame::ReadOnly);
- auto size = frame.size();
- avFrame = makeAVFrame();
- avFrame->format = m_frameEncoder->sourceFormat();
- avFrame->width = size.width();
- avFrame->height = size.height();
-
- for (int i = 0; i < 4; ++i) {
- avFrame->data[i] = const_cast<uint8_t *>(frame.bits(i));
- avFrame->linesize[i] = frame.bytesPerLine(i);
- }
-
- QImage img;
- if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg) {
- // the QImage is cached inside the video frame, so we can take the pointer to the image data here
- img = frame.toImage();
- avFrame->data[0] = (uint8_t *)img.bits();
- avFrame->linesize[0] = img.bytesPerLine();
- }
-
- Q_ASSERT(avFrame->data[0]);
- // ensure the video frame and it's data is alive as long as it's being used in the encoder
- avFrame->opaque_ref = av_buffer_create(nullptr, 0, freeQVideoFrame, new QVideoFrameHolder{frame, img}, 0);
- }
-
- if (m_baseTime.loadAcquire() == std::numeric_limits<qint64>::min()) {
- m_baseTime.storeRelease(frame.startTime() - m_lastFrameTime);
- qCDebug(qLcFFmpegEncoder) << ">>>> adjusting base time to" << m_baseTime.loadAcquire()
- << frame.startTime() << m_lastFrameTime;
- }
-
- qint64 time = frame.startTime() - m_baseTime.loadAcquire();
- m_lastFrameTime = frame.endTime() - m_baseTime.loadAcquire();
-
- setAVFrameTime(*avFrame, m_frameEncoder->getPts(time), m_frameEncoder->getTimeBase());
-
- m_encoder->newTimeStamp(time / 1000);
-
- qCDebug(qLcFFmpegEncoder) << ">>> sending frame" << avFrame->pts << time << m_lastFrameTime;
- int ret = m_frameEncoder->sendFrame(std::move(avFrame));
- if (ret < 0) {
- qCDebug(qLcFFmpegEncoder) << "error sending frame" << ret << err2str(ret);
- emit m_encoder->error(QMediaRecorder::ResourceError, err2str(ret));
- }
+ for (AudioEncoder *audioEncoder : m_audioEncoders)
+ std::invoke(f, audioEncoder, args...);
+ for (VideoEncoder *videoEncoder : m_videoEncoders)
+ std::invoke(f, videoEncoder, args...);
}
-
}
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine_p.h
index 10174f9a4..125bdc56a 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine_p.h
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine_p.h
@@ -20,12 +20,13 @@
#include <private/qplatformmediarecorder_p.h>
#include <qmediarecorder.h>
-#include <queue>
-
QT_BEGIN_NAMESPACE
class QFFmpegAudioInput;
+class QPlatformAudioBufferInput;
+class QPlatformAudioBufferInputBase;
class QVideoFrame;
+class QAudioBuffer;
class QPlatformVideoSource;
namespace QFFmpeg
@@ -36,17 +37,7 @@ class Muxer;
class AudioEncoder;
class VideoEncoder;
class VideoFrameEncoder;
-
-template <typename T>
-T dequeueIfPossible(std::queue<T> &queue)
-{
- if (queue.empty())
- return T{};
-
- auto result = std::move(queue.front());
- queue.pop();
- return result;
-}
+class EncodingInitializer;
class RecordingEngine : public QObject
{
@@ -55,10 +46,8 @@ public:
RecordingEngine(const QMediaEncoderSettings &settings, std::unique_ptr<EncodingFormatContext> context);
~RecordingEngine();
- void addAudioInput(QFFmpegAudioInput *input);
- void addVideoSource(QPlatformVideoSource *source);
-
- void start();
+ void initialize(const std::vector<QPlatformAudioBufferInputBase *> &audioSources,
+ const std::vector<QPlatformVideoSource *> &videoSources);
void finalize();
void setPaused(bool p);
@@ -67,38 +56,52 @@ public:
AVFormatContext *avFormatContext() { return m_formatContext->avFormatContext(); }
Muxer *getMuxer() { return m_muxer; }
+ bool isEndOfSourceStreams() const;
+
public Q_SLOTS:
void newTimeStamp(qint64 time);
Q_SIGNALS:
void durationChanged(qint64 duration);
- void error(QMediaRecorder::Error code, const QString &description);
+ void sessionError(QMediaRecorder::Error code, const QString &description);
+ void streamInitializationError(QMediaRecorder::Error code, const QString &description);
void finalizationDone();
+ void endOfSourceStreams();
private:
- template<typename... Args>
- void addMediaFrameHandler(Args &&...args);
-
class EncodingFinalizer : public QThread
{
public:
- EncodingFinalizer(RecordingEngine *e);
+ EncodingFinalizer(RecordingEngine &recordingEngine);
void run() override;
private:
- RecordingEngine *m_encoder = nullptr;
+ RecordingEngine &m_recordingEngine;
};
+ friend class EncodingInitializer;
+ void addAudioInput(QFFmpegAudioInput *input);
+ void addAudioBufferInput(QPlatformAudioBufferInput *input, const QAudioBuffer &firstBuffer);
+ AudioEncoder *createAudioEncoder(const QAudioFormat &format);
+
+ void addVideoSource(QPlatformVideoSource *source, const QVideoFrame &firstFrame);
+ void handleSourceEndOfStream();
+
+ void start();
+
+ template <typename F, typename... Args>
+ void forEachEncoder(F &&f, Args &&...args);
+
private:
QMediaEncoderSettings m_settings;
QMediaMetaData m_metaData;
std::unique_ptr<EncodingFormatContext> m_formatContext;
Muxer *m_muxer = nullptr;
- AudioEncoder *m_audioEncoder = nullptr;
+ QList<AudioEncoder *> m_audioEncoders;
QList<VideoEncoder *> m_videoEncoders;
- QList<QMetaObject::Connection> m_connections;
+ std::unique_ptr<EncodingInitializer> m_initializer;
QMutex m_timeMutex;
qint64 m_timeRecorded = 0;
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils.cpp
new file mode 100644
index 000000000..6c2ba8b15
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils.cpp
@@ -0,0 +1,63 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "recordingengine/qffmpegrecordingengineutils_p.h"
+#include "recordingengine/qffmpegencoderthread_p.h"
+#include "private/qplatformaudiobufferinput_p.h"
+#include "private/qplatformvideoframeinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+template <typename F>
+void doWithMediaFrameInput(QObject *source, F &&f)
+{
+ if (auto videoFrameInput = qobject_cast<QPlatformVideoFrameInput *>(source))
+ f(videoFrameInput);
+ else if (auto audioBufferInput = qobject_cast<QPlatformAudioBufferInput *>(source))
+ f(audioBufferInput);
+}
+
+void setEncoderInterface(QObject *source, QMediaInputEncoderInterface *interface)
+{
+ doWithMediaFrameInput(source, [&](auto source) {
+ using Source = std::remove_pointer_t<decltype(source)>;
+
+ source->setEncoderInterface(interface);
+ if (interface)
+ // Postpone emit 'encoderUpdated' as the encoding pipeline may be not
+ // completely ready at the moment. The case is calling QMediaRecorder::stop
+ // upon handling 'readyToSendFrame'
+ QMetaObject::invokeMethod(source, &Source::encoderUpdated, Qt::QueuedConnection);
+ else
+ emit source->encoderUpdated();
+ });
+}
+
+void setEncoderUpdateConnection(QObject *source, EncoderThread *encoder)
+{
+ doWithMediaFrameInput(source, [&](auto source) {
+ using Source = std::remove_pointer_t<decltype(source)>;
+ QObject::connect(encoder, &EncoderThread::canPushFrameChanged, source,
+ &Source::encoderUpdated);
+ });
+}
+
+void disconnectEncoderFromSource(EncoderThread *encoder)
+{
+ QObject *source = encoder->source();
+ if (!source)
+ return;
+
+ // We should address the dependency AudioEncoder from QFFmpegAudioInput to
+ // set null source here.
+ // encoder->setSource(nullptr);
+
+ QObject::disconnect(source, nullptr, encoder, nullptr);
+ setEncoderInterface(source, nullptr);
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils_p.h
new file mode 100644
index 000000000..3ef56435a
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils_p.h
@@ -0,0 +1,81 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGRECORDINGENGINEUTILS_P_H
+#define QFFMPEGRECORDINGENGINEUTILS_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qobject.h"
+#include <queue>
+
+QT_BEGIN_NAMESPACE
+
+class QMediaInputEncoderInterface;
+class QPlatformVideoSource;
+
+namespace QFFmpeg {
+
+constexpr qint64 VideoFrameTimeBase = 1000000; // us in sec
+
+class EncoderThread;
+
+template <typename T>
+T dequeueIfPossible(std::queue<T> &queue)
+{
+ if (queue.empty())
+ return T{};
+
+ auto result = std::move(queue.front());
+ queue.pop();
+ return result;
+}
+
+void setEncoderInterface(QObject *source, QMediaInputEncoderInterface *interface);
+
+void setEncoderUpdateConnection(QObject *source, EncoderThread *encoder);
+
+template <typename Encoder, typename Source>
+void connectEncoderToSource(Encoder *encoder, Source *source)
+{
+ Q_ASSERT(!encoder->source());
+ encoder->setSource(source);
+
+ if constexpr (std::is_same_v<Source, QPlatformVideoSource>) {
+ QObject::connect(source, &Source::newVideoFrame, encoder, &Encoder::addFrame,
+ Qt::DirectConnection);
+
+ QObject::connect(source, &Source::activeChanged, encoder, [=]() {
+ if (!source->isActive())
+ encoder->setEndOfSourceStream(true);
+ });
+ } else {
+ QObject::connect(source, &Source::newAudioBuffer, encoder, &Encoder::addBuffer,
+ Qt::DirectConnection);
+ }
+
+ // TODO:
+ // QObject::connect(source, &Source::disconnectedFromSession, encoder, [=]() {
+ // encoder->setSourceEndOfStream(true);
+ // });
+
+ setEncoderUpdateConnection(source, encoder);
+ setEncoderInterface(source, encoder);
+}
+
+void disconnectEncoderFromSource(EncoderThread *encoder);
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGRECORDINGENGINEUTILS_P_H
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder.cpp
index 04ed5a728..5ae3aa496 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder.cpp
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder.cpp
@@ -5,6 +5,8 @@
#include "qffmpegvideobuffer_p.h"
#include "qffmpegrecordingengine_p.h"
#include "qffmpegvideoframeencoder_p.h"
+#include "qffmpegrecordingengineutils_p.h"
+#include "private/qvideoframe_p.h"
#include <QtCore/qloggingcategory.h>
QT_BEGIN_NAMESPACE
@@ -13,17 +15,16 @@ namespace QFFmpeg {
static Q_LOGGING_CATEGORY(qLcFFmpegVideoEncoder, "qt.multimedia.ffmpeg.videoencoder");
-
-VideoEncoder::VideoEncoder(RecordingEngine *encoder, const QMediaEncoderSettings &settings,
+VideoEncoder::VideoEncoder(RecordingEngine &recordingEngine, const QMediaEncoderSettings &settings,
const QVideoFrameFormat &format, std::optional<AVPixelFormat> hwFormat)
- : EncoderThread(encoder)
+ : EncoderThread(recordingEngine)
{
setObjectName(QLatin1String("VideoEncoder"));
AVPixelFormat swFormat = QFFmpegVideoBuffer::toAVPixelFormat(format.pixelFormat());
AVPixelFormat ffmpegPixelFormat =
hwFormat && *hwFormat != AV_PIX_FMT_NONE ? *hwFormat : swFormat;
- auto frameRate = format.frameRate();
+ auto frameRate = format.streamFrameRate();
if (frameRate <= 0.) {
qWarning() << "Invalid frameRate" << frameRate << "; Using the default instead";
@@ -31,9 +32,13 @@ VideoEncoder::VideoEncoder(RecordingEngine *encoder, const QMediaEncoderSettings
frameRate = 30.;
}
- m_frameEncoder =
- VideoFrameEncoder::create(settings, format.frameSize(), frameRate, ffmpegPixelFormat,
- swFormat, encoder->avFormatContext());
+ m_frameEncoder = VideoFrameEncoder::create(settings,
+ format.frameSize(),
+ format.rotation(),
+ frameRate,
+ ffmpegPixelFormat,
+ swFormat,
+ recordingEngine.avFormatContext());
}
VideoEncoder::~VideoEncoder() = default;
@@ -45,25 +50,40 @@ bool VideoEncoder::isValid() const
void VideoEncoder::addFrame(const QVideoFrame &frame)
{
- QMutexLocker locker(&m_queueMutex);
+ if (!frame.isValid()) {
+ setEndOfSourceStream(true);
+ return;
+ }
- // Drop frames if encoder can not keep up with the video source data rate
- const bool queueFull = m_videoFrameQueue.size() >= m_maxQueueSize;
+ setEndOfSourceStream(false);
- if (queueFull) {
- qCDebug(qLcFFmpegVideoEncoder) << "RecordingEngine frame queue full. Frame lost.";
- } else if (!m_paused.loadRelaxed()) {
- m_videoFrameQueue.push(frame);
+ {
+ auto guard = lockLoopData();
- locker.unlock(); // Avoid context switch on wake wake-up
+ if (m_paused) {
+ m_shouldAdjustTimeBaseForNextFrame = true;
+ return;
+ }
- dataReady();
+ // Drop frames if encoder can not keep up with the video source data rate;
+ // canPushFrame might be used instead
+ const bool queueFull = m_videoFrameQueue.size() >= m_maxQueueSize;
+
+ if (queueFull) {
+ qCDebug(qLcFFmpegVideoEncoder) << "RecordingEngine frame queue full. Frame lost.";
+ return;
+ }
+
+ m_videoFrameQueue.push({ frame, m_shouldAdjustTimeBaseForNextFrame });
+ m_shouldAdjustTimeBaseForNextFrame = false;
}
+
+ dataReady();
}
-QVideoFrame VideoEncoder::takeFrame()
+VideoEncoder::FrameInfo VideoEncoder::takeFrame()
{
- QMutexLocker locker(&m_queueMutex);
+ auto guard = lockLoopData();
return dequeueIfPossible(m_videoFrameQueue);
}
@@ -72,15 +92,18 @@ void VideoEncoder::retrievePackets()
if (!m_frameEncoder)
return;
while (auto packet = m_frameEncoder->retrievePacket())
- m_encoder->getMuxer()->addPacket(std::move(packet));
+ m_recordingEngine.getMuxer()->addPacket(std::move(packet));
}
void VideoEncoder::init()
{
+ Q_ASSERT(isValid());
+
qCDebug(qLcFFmpegVideoEncoder) << "VideoEncoder::init started video device thread.";
bool ok = m_frameEncoder->open();
if (!ok)
- emit m_encoder->error(QMediaRecorder::ResourceError, "Could not initialize encoder");
+ emit m_recordingEngine.sessionError(QMediaRecorder::ResourceError,
+ "Could not initialize encoder");
}
void VideoEncoder::cleanup()
@@ -96,10 +119,127 @@ void VideoEncoder::cleanup()
bool VideoEncoder::hasData() const
{
- QMutexLocker locker(&m_queueMutex);
return !m_videoFrameQueue.empty();
}
+struct QVideoFrameHolder
+{
+ QVideoFrame f;
+ QImage i;
+};
+
+static void freeQVideoFrame(void *opaque, uint8_t *)
+{
+ delete reinterpret_cast<QVideoFrameHolder *>(opaque);
+}
+
+void VideoEncoder::processOne()
+{
+ retrievePackets();
+
+ FrameInfo frameInfo = takeFrame();
+ QVideoFrame &frame = frameInfo.frame;
+ Q_ASSERT(frame.isValid());
+
+ if (!isValid())
+ return;
+
+ // qCDebug(qLcFFmpegEncoder) << "new video buffer" << frame.startTime();
+
+ AVFrameUPtr avFrame;
+
+ auto *videoBuffer = dynamic_cast<QFFmpegVideoBuffer *>(QVideoFramePrivate::hwBuffer(frame));
+ if (videoBuffer) {
+ // ffmpeg video buffer, let's use the native AVFrame stored in there
+ auto *hwFrame = videoBuffer->getHWFrame();
+ if (hwFrame && hwFrame->format == m_frameEncoder->sourceFormat())
+ avFrame.reset(av_frame_clone(hwFrame));
+ }
+
+ if (!avFrame) {
+ frame.map(QtVideo::MapMode::ReadOnly);
+ auto size = frame.size();
+ avFrame = makeAVFrame();
+ avFrame->format = m_frameEncoder->sourceFormat();
+ avFrame->width = size.width();
+ avFrame->height = size.height();
+
+ for (int i = 0; i < 4; ++i) {
+ avFrame->data[i] = const_cast<uint8_t *>(frame.bits(i));
+ avFrame->linesize[i] = frame.bytesPerLine(i);
+ }
+
+ QImage img;
+ if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg) {
+ // the QImage is cached inside the video frame, so we can take the pointer to the image
+ // data here
+ img = frame.toImage();
+ avFrame->data[0] = (uint8_t *)img.bits();
+ avFrame->linesize[0] = img.bytesPerLine();
+ }
+
+ Q_ASSERT(avFrame->data[0]);
+ // ensure the video frame and it's data is alive as long as it's being used in the encoder
+ avFrame->opaque_ref = av_buffer_create(nullptr, 0, freeQVideoFrame,
+ new QVideoFrameHolder{ frame, img }, 0);
+ }
+
+ const auto [startTime, endTime] = frameTimeStamps(frame);
+
+ if (frameInfo.shouldAdjustTimeBase) {
+ m_baseTime += startTime - m_lastFrameTime;
+ qCDebug(qLcFFmpegVideoEncoder)
+ << ">>>> adjusting base time to" << m_baseTime << startTime << m_lastFrameTime;
+ }
+
+ const qint64 time = startTime - m_baseTime;
+ m_lastFrameTime = endTime;
+
+ setAVFrameTime(*avFrame, m_frameEncoder->getPts(time), m_frameEncoder->getTimeBase());
+
+ m_recordingEngine.newTimeStamp(time / 1000);
+
+ qCDebug(qLcFFmpegVideoEncoder)
+ << ">>> sending frame" << avFrame->pts << time << m_lastFrameTime;
+ int ret = m_frameEncoder->sendFrame(std::move(avFrame));
+ if (ret < 0) {
+ qCDebug(qLcFFmpegVideoEncoder) << "error sending frame" << ret << err2str(ret);
+ emit m_recordingEngine.sessionError(QMediaRecorder::ResourceError, err2str(ret));
+ }
+}
+
+bool VideoEncoder::checkIfCanPushFrame() const
+{
+ if (isRunning())
+ return m_videoFrameQueue.size() < m_maxQueueSize;
+ if (!isFinished())
+ return m_videoFrameQueue.empty();
+
+ return false;
+}
+
+std::pair<qint64, qint64> VideoEncoder::frameTimeStamps(const QVideoFrame &frame) const
+{
+ qint64 startTime = frame.startTime();
+ qint64 endTime = frame.endTime();
+
+ if (startTime == -1) {
+ startTime = m_lastFrameTime;
+ endTime = -1;
+ }
+
+ if (endTime == -1) {
+ qreal frameRate = frame.streamFrameRate();
+ if (frameRate <= 0.)
+ frameRate = m_frameEncoder->settings().videoFrameRate();
+
+ Q_ASSERT(frameRate > 0.f);
+ endTime = startTime + static_cast<qint64>(std::round(VideoFrameTimeBase / frameRate));
+ }
+
+ return { startTime, endTime };
+}
+
} // namespace QFFmpeg
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder_p.h
index f07f146e2..ff6a74fc8 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder_p.h
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder_p.h
@@ -16,11 +16,10 @@ class QMediaEncoderSettings;
namespace QFFmpeg {
class VideoFrameEncoder;
-
class VideoEncoder : public EncoderThread
{
public:
- VideoEncoder(RecordingEngine *encoder, const QMediaEncoderSettings &settings,
+ VideoEncoder(RecordingEngine &recordingEngine, const QMediaEncoderSettings &settings,
const QVideoFrameFormat &format, std::optional<AVPixelFormat> hwFormat);
~VideoEncoder() override;
@@ -28,15 +27,17 @@ public:
void addFrame(const QVideoFrame &frame);
- void setPaused(bool b) override
- {
- EncoderThread::setPaused(b);
- if (b)
- m_baseTime.storeRelease(-1);
- }
+protected:
+ bool checkIfCanPushFrame() const override;
private:
- QVideoFrame takeFrame();
+ struct FrameInfo
+ {
+ QVideoFrame frame;
+ bool shouldAdjustTimeBase = false;
+ };
+
+ FrameInfo takeFrame();
void retrievePackets();
void init() override;
@@ -44,13 +45,15 @@ private:
bool hasData() const override;
void processOne() override;
+ std::pair<qint64, qint64> frameTimeStamps(const QVideoFrame &frame) const;
+
private:
- mutable QMutex m_queueMutex;
- std::queue<QVideoFrame> m_videoFrameQueue;
+ std::queue<FrameInfo> m_videoFrameQueue;
const size_t m_maxQueueSize = 10; // Arbitrarily chosen to limit memory usage (332 MB @ 4K)
std::unique_ptr<VideoFrameEncoder> m_frameEncoder;
- QAtomicInteger<qint64> m_baseTime = std::numeric_limits<qint64>::min();
+ qint64 m_baseTime = 0;
+ bool m_shouldAdjustTimeBaseForNextFrame = true;
qint64 m_lastFrameTime = 0;
};
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils.cpp
index 83b9575b4..eef2a64bf 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils.cpp
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils.cpp
@@ -108,8 +108,9 @@ AVPixelFormat findTargetFormat(AVPixelFormat sourceFormat, AVPixelFormat sourceS
if (constraints && hasAVFormat(constraints->valid_hw_formats, hwFormat))
return hwFormat;
- // Some codecs, don't expose constraints, let's find the format in codec->pix_fmts
- if (hasAVFormat(codec->pix_fmts, hwFormat))
+ // Some codecs, don't expose constraints,
+ // let's find the format in codec->pix_fmts and hw_config
+ if (isAVFormatSupported(codec, hwFormat))
return hwFormat;
}
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder.cpp
index f5c63f3ae..6c52f1e87 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder.cpp
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder.cpp
@@ -7,6 +7,10 @@
#include "qffmpegvideoencoderutils_p.h"
#include <qloggingcategory.h>
+extern "C" {
+#include "libavutil/display.h"
+}
+
QT_BEGIN_NAMESPACE
static Q_LOGGING_CATEGORY(qLcVideoFrameEncoder, "qt.multimedia.ffmpeg.videoencoder");
@@ -14,9 +18,13 @@ static Q_LOGGING_CATEGORY(qLcVideoFrameEncoder, "qt.multimedia.ffmpeg.videoencod
namespace QFFmpeg {
std::unique_ptr<VideoFrameEncoder>
-VideoFrameEncoder::create(const QMediaEncoderSettings &encoderSettings, const QSize &sourceSize,
- qreal sourceFrameRate, AVPixelFormat sourceFormat,
- AVPixelFormat sourceSWFormat, AVFormatContext *formatContext)
+VideoFrameEncoder::create(const QMediaEncoderSettings &encoderSettings,
+ const QSize &sourceSize,
+ QtVideo::Rotation sourceRotation,
+ qreal sourceFrameRate,
+ AVPixelFormat sourceFormat,
+ AVPixelFormat sourceSWFormat,
+ AVFormatContext *formatContext)
{
Q_ASSERT(isSwPixelFormat(sourceSWFormat));
Q_ASSERT(isHwPixelFormat(sourceFormat) || sourceSWFormat == sourceFormat);
@@ -26,6 +34,7 @@ VideoFrameEncoder::create(const QMediaEncoderSettings &encoderSettings, const QS
result->m_settings = encoderSettings;
result->m_sourceSize = sourceSize;
result->m_sourceFormat = sourceFormat;
+ result->m_sourceRotation = sourceRotation;
// Temporary: check isSwPixelFormat because of android issue (QTBUG-116836)
result->m_sourceSWFormat = isSwPixelFormat(sourceFormat) ? sourceFormat : sourceSWFormat;
@@ -154,6 +163,15 @@ bool QFFmpeg::VideoFrameEncoder::initCodecContext(AVFormatContext *formatContext
m_stream->codecpar->height = resolution.height();
m_stream->codecpar->sample_aspect_ratio = AVRational{ 1, 1 };
+ if (m_sourceRotation != QtVideo::Rotation::None) {
+ constexpr auto displayMatrixSize = sizeof(int32_t) * 9;
+ AVPacketSideData sideData = { reinterpret_cast<uint8_t *>(av_malloc(displayMatrixSize)),
+ displayMatrixSize, AV_PKT_DATA_DISPLAYMATRIX };
+ av_display_rotation_set(reinterpret_cast<int32_t *>(sideData.data),
+ static_cast<double>(m_sourceRotation));
+ addStreamSideData(m_stream, sideData);
+ }
+
Q_ASSERT(m_codec);
m_stream->time_base = adjustFrameTimeBase(m_codec->supported_framerates, m_codecFrameRate);
@@ -226,18 +244,8 @@ int VideoFrameEncoder::sendFrame(AVFrameUPtr frame)
if (!frame)
return avcodec_send_frame(m_codecContext.get(), frame.get());
- if (frame->format != m_sourceFormat) {
- qWarning() << "Frame format has changed:" << m_sourceFormat << "->" << frame->format;
+ if (!updateSourceFormatAndSize(frame.get()))
return AVERROR(EINVAL);
- }
-
- const QSize frameSize(frame->width, frame->height);
- if (frameSize != m_sourceSize) {
- qCDebug(qLcVideoFrameEncoder) << "Update conversions on the fly. Source size"
- << m_sourceSize << "->" << frameSize;
- m_sourceSize = frameSize;
- updateConversions();
- }
int64_t pts = 0;
AVRational timeBase = {};
@@ -359,6 +367,44 @@ AVPacketUPtr VideoFrameEncoder::retrievePacket()
return nullptr;
}
+bool VideoFrameEncoder::updateSourceFormatAndSize(const AVFrame *frame)
+{
+ Q_ASSERT(frame);
+
+ const QSize frameSize(frame->width, frame->height);
+ const AVPixelFormat frameFormat = static_cast<AVPixelFormat>(frame->format);
+
+ if (frameSize == m_sourceSize && frameFormat == m_sourceFormat)
+ return true;
+
+ auto applySourceFormatAndSize = [&](AVPixelFormat swFormat) {
+ m_sourceSize = frameSize;
+ m_sourceFormat = frameFormat;
+ m_sourceSWFormat = swFormat;
+ updateConversions();
+ return true;
+ };
+
+ if (frameFormat == m_sourceFormat)
+ return applySourceFormatAndSize(m_sourceSWFormat);
+
+ if (frameFormat == AV_PIX_FMT_NONE) {
+ qWarning() << "Got a frame with invalid pixel format";
+ return false;
+ }
+
+ if (isSwPixelFormat(frameFormat))
+ return applySourceFormatAndSize(frameFormat);
+
+ auto framesCtx = reinterpret_cast<const AVHWFramesContext *>(frame->hw_frames_ctx->data);
+ if (!framesCtx || framesCtx->sw_format == AV_PIX_FMT_NONE) {
+ qWarning() << "Cannot update conversions as hw frame has invalid framesCtx" << framesCtx;
+ return false;
+ }
+
+ return applySourceFormatAndSize(framesCtx->sw_format);
+}
+
void VideoFrameEncoder::updateConversions()
{
const bool needToScale = m_sourceSize != m_settings.videoResolution();
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder_p.h
index b44e9cbf7..af57730f2 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder_p.h
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder_p.h
@@ -27,7 +27,9 @@ class VideoFrameEncoder
{
public:
static std::unique_ptr<VideoFrameEncoder> create(const QMediaEncoderSettings &encoderSettings,
- const QSize &sourceSize, qreal sourceFrameRate,
+ const QSize &sourceSize,
+ QtVideo::Rotation sourceRotation,
+ qreal sourceFrameRate,
AVPixelFormat sourceFormat,
AVPixelFormat sourceSWFormat,
AVFormatContext *formatContext);
@@ -46,9 +48,13 @@ public:
int sendFrame(AVFrameUPtr frame);
AVPacketUPtr retrievePacket();
+ const QMediaEncoderSettings &settings() { return m_settings; }
+
private:
VideoFrameEncoder() = default;
+ bool updateSourceFormatAndSize(const AVFrame *frame);
+
void updateConversions();
bool initCodec();
@@ -60,6 +66,7 @@ private:
private:
QMediaEncoderSettings m_settings;
QSize m_sourceSize;
+ QtVideo::Rotation m_sourceRotation = QtVideo::Rotation::None;
std::unique_ptr<HWAccel> m_accel;
const AVCodec *m_codec = nullptr;
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/openssl3.ver b/src/plugins/multimedia/ffmpeg/symbolstubs/openssl3.ver
new file mode 100644
index 000000000..88235a94c
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/openssl3.ver
@@ -0,0 +1,7 @@
+# Copyright (C) 2024 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+OPENSSL_3.0.0 {
+ global:
+ *;
+};
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-crypto.cpp b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-crypto.cpp
new file mode 100644
index 000000000..fbf3b783c
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-crypto.cpp
@@ -0,0 +1,6 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtMultimedia/private/qsymbolsresolveutils_p.h>
+
+// No stub functions are needed for ffmpeg
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-ssl.cpp b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-ssl.cpp
new file mode 100644
index 000000000..3e38e398c
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-ssl.cpp
@@ -0,0 +1,300 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtMultimedia/private/qsymbolsresolveutils_p.h>
+
+#include <qstringliteral.h>
+
+#include <openssl/bio.h>
+#include <openssl/ssl.h>
+#include <openssl/bn.h>
+#include <openssl/err.h>
+#include <openssl/rand.h>
+
+using namespace Qt::StringLiterals;
+
+[[maybe_unused]] static constexpr auto SHLIB_VERSION =
+#if defined(OPENSSL_SHLIB_VERSION)
+ OPENSSL_SHLIB_VERSION;
+#elif defined(SHLIB_VERSION_NUMBER)
+ SHLIB_VERSION_NUMBER;
+#endif
+
+
+#if !defined(Q_OS_ANDROID)
+CHECK_VERSIONS("ssl", SSL_NEEDED_SOVERSION, SHLIB_VERSION);
+#endif
+
+static std::unique_ptr<QLibrary> loadLib()
+{
+ auto lib = std::make_unique<QLibrary>();
+
+ auto tryLoad = [&](QString sslName, auto version) {
+ lib->setFileNameAndVersion(sslName, version);
+ return lib->load();
+ };
+
+// openssl on Android has specific suffixes
+#if defined(Q_OS_ANDROID)
+ {
+ auto suffix = qEnvironmentVariable("ANDROID_OPENSSL_SUFFIX");
+ if (suffix.isEmpty()) {
+#if (OPENSSL_VERSION_NUMBER >> 28) < 3 // major version < 3
+ suffix = "_1_1"_L1;
+#elif OPENSSL_VERSION_MAJOR == 3
+ suffix = "_3"_L1;
+#else
+ static_assert(false, "Unexpected openssl version");
+#endif
+ }
+
+ if (tryLoad("ssl"_L1 + suffix, -1))
+ return lib;
+ }
+#endif
+
+ if (tryLoad("ssl"_L1, SSL_NEEDED_SOVERSION ""_L1))
+ return lib;
+
+ return {};
+};
+
+
+BEGIN_INIT_FUNCS("ssl", loadLib)
+
+// BN functions
+
+INIT_FUNC(BN_value_one);
+INIT_FUNC(BN_mod_word);
+
+INIT_FUNC(BN_div_word)
+INIT_FUNC(BN_mul_word)
+INIT_FUNC(BN_add_word)
+INIT_FUNC(BN_sub_word)
+INIT_FUNC(BN_set_word)
+INIT_FUNC(BN_new)
+INIT_FUNC(BN_cmp)
+
+INIT_FUNC(BN_free);
+
+INIT_FUNC(BN_copy);
+
+INIT_FUNC(BN_CTX_new);
+
+INIT_FUNC(BN_CTX_free);
+INIT_FUNC(BN_CTX_start);
+
+INIT_FUNC(BN_CTX_get);
+INIT_FUNC(BN_CTX_end);
+
+INIT_FUNC(BN_rand);
+INIT_FUNC(BN_mod_exp);
+
+INIT_FUNC(BN_num_bits);
+INIT_FUNC(BN_num_bits_word);
+
+INIT_FUNC(BN_bn2hex);
+INIT_FUNC(BN_bn2dec);
+
+INIT_FUNC(BN_hex2bn);
+INIT_FUNC(BN_dec2bn);
+INIT_FUNC(BN_asc2bn);
+
+INIT_FUNC(BN_bn2bin);
+INIT_FUNC(BN_bin2bn);
+
+// BIO-related functions
+
+INIT_FUNC(BIO_new);
+INIT_FUNC(BIO_free);
+
+INIT_FUNC(BIO_read);
+INIT_FUNC(BIO_write);
+INIT_FUNC(BIO_s_mem);
+
+INIT_FUNC(BIO_set_data);
+
+INIT_FUNC(BIO_get_data);
+INIT_FUNC(BIO_set_init);
+
+INIT_FUNC(BIO_set_flags);
+INIT_FUNC(BIO_test_flags);
+INIT_FUNC(BIO_clear_flags);
+
+INIT_FUNC(BIO_meth_new);
+INIT_FUNC(BIO_meth_free);
+
+INIT_FUNC(BIO_meth_set_write);
+INIT_FUNC(BIO_meth_set_read);
+INIT_FUNC(BIO_meth_set_puts);
+INIT_FUNC(BIO_meth_set_gets);
+INIT_FUNC(BIO_meth_set_ctrl);
+INIT_FUNC(BIO_meth_set_create);
+INIT_FUNC(BIO_meth_set_destroy);
+INIT_FUNC(BIO_meth_set_callback_ctrl);
+
+// SSL functions
+
+INIT_FUNC(SSL_CTX_new);
+INIT_FUNC(SSL_CTX_up_ref);
+INIT_FUNC(SSL_CTX_free);
+
+INIT_FUNC(SSL_new);
+INIT_FUNC(SSL_up_ref);
+INIT_FUNC(SSL_free);
+
+INIT_FUNC(SSL_accept);
+INIT_FUNC(SSL_stateless);
+INIT_FUNC(SSL_connect);
+INIT_FUNC(SSL_read);
+INIT_FUNC(SSL_peek);
+INIT_FUNC(SSL_write);
+INIT_FUNC(SSL_ctrl);
+INIT_FUNC(SSL_shutdown);
+INIT_FUNC(SSL_set_bio);
+
+// options are unsigned long in openssl 1.1.1, and uint64 in 3.x.x
+INIT_FUNC(SSL_CTX_set_options);
+
+INIT_FUNC(SSL_get_error);
+INIT_FUNC(SSL_CTX_load_verify_locations);
+
+INIT_FUNC(SSL_CTX_set_verify);
+INIT_FUNC(SSL_CTX_use_PrivateKey);
+
+INIT_FUNC(SSL_CTX_use_PrivateKey_file);
+INIT_FUNC(SSL_CTX_use_certificate_chain_file);
+
+INIT_FUNC(ERR_get_error);
+
+INIT_FUNC(ERR_error_string);
+
+// TLS functions
+
+INIT_FUNC(TLS_client_method);
+INIT_FUNC(TLS_server_method);
+
+// RAND functions
+
+INIT_FUNC(RAND_bytes);
+
+END_INIT_FUNCS()
+
+//////////// Define
+
+// BN functions
+
+DEFINE_FUNC(BN_value_one, 0);
+DEFINE_FUNC(BN_mod_word, 2);
+
+DEFINE_FUNC(BN_div_word, 2)
+DEFINE_FUNC(BN_mul_word, 2)
+DEFINE_FUNC(BN_add_word, 2)
+DEFINE_FUNC(BN_sub_word, 2)
+DEFINE_FUNC(BN_set_word, 2)
+DEFINE_FUNC(BN_new, 0)
+DEFINE_FUNC(BN_cmp, 2)
+
+DEFINE_FUNC(BN_free, 1);
+
+DEFINE_FUNC(BN_copy, 2);
+
+DEFINE_FUNC(BN_CTX_new, 0);
+
+DEFINE_FUNC(BN_CTX_free, 1);
+DEFINE_FUNC(BN_CTX_start, 1);
+
+DEFINE_FUNC(BN_CTX_get, 1);
+DEFINE_FUNC(BN_CTX_end, 1);
+
+DEFINE_FUNC(BN_rand, 4);
+DEFINE_FUNC(BN_mod_exp, 5);
+
+DEFINE_FUNC(BN_num_bits, 1);
+DEFINE_FUNC(BN_num_bits_word, 1);
+
+DEFINE_FUNC(BN_bn2hex, 1);
+DEFINE_FUNC(BN_bn2dec, 1);
+
+DEFINE_FUNC(BN_hex2bn, 2);
+DEFINE_FUNC(BN_dec2bn, 2);
+DEFINE_FUNC(BN_asc2bn, 2);
+
+DEFINE_FUNC(BN_bn2bin, 2);
+DEFINE_FUNC(BN_bin2bn, 3);
+
+// BIO-related functions
+
+DEFINE_FUNC(BIO_new, 1);
+DEFINE_FUNC(BIO_free, 1);
+
+DEFINE_FUNC(BIO_read, 3, -1);
+DEFINE_FUNC(BIO_write, 3, -1);
+DEFINE_FUNC(BIO_s_mem, 0);
+
+DEFINE_FUNC(BIO_set_data, 2);
+
+DEFINE_FUNC(BIO_get_data, 1);
+DEFINE_FUNC(BIO_set_init, 2);
+
+DEFINE_FUNC(BIO_set_flags, 2);
+DEFINE_FUNC(BIO_test_flags, 2);
+DEFINE_FUNC(BIO_clear_flags, 2);
+
+DEFINE_FUNC(BIO_meth_new, 2);
+DEFINE_FUNC(BIO_meth_free, 1);
+
+DEFINE_FUNC(BIO_meth_set_write, 2);
+DEFINE_FUNC(BIO_meth_set_read, 2);
+DEFINE_FUNC(BIO_meth_set_puts, 2);
+DEFINE_FUNC(BIO_meth_set_gets, 2);
+DEFINE_FUNC(BIO_meth_set_ctrl, 2);
+DEFINE_FUNC(BIO_meth_set_create, 2);
+DEFINE_FUNC(BIO_meth_set_destroy, 2);
+DEFINE_FUNC(BIO_meth_set_callback_ctrl, 2);
+
+// SSL functions
+
+DEFINE_FUNC(SSL_CTX_new, 1);
+DEFINE_FUNC(SSL_CTX_up_ref, 1);
+DEFINE_FUNC(SSL_CTX_free, 1);
+
+DEFINE_FUNC(SSL_new, 1);
+DEFINE_FUNC(SSL_up_ref, 1);
+DEFINE_FUNC(SSL_free, 1);
+
+DEFINE_FUNC(SSL_accept, 1);
+DEFINE_FUNC(SSL_stateless, 1);
+DEFINE_FUNC(SSL_connect, 1);
+DEFINE_FUNC(SSL_read, 3, -1);
+DEFINE_FUNC(SSL_peek, 3);
+DEFINE_FUNC(SSL_write, 3, -1);
+DEFINE_FUNC(SSL_ctrl, 4);
+DEFINE_FUNC(SSL_shutdown, 1);
+DEFINE_FUNC(SSL_set_bio, 3);
+
+// options are unsigned long in openssl 1.1.1, and uint64 in 3.x.x
+DEFINE_FUNC(SSL_CTX_set_options, 2);
+
+DEFINE_FUNC(SSL_get_error, 2);
+DEFINE_FUNC(SSL_CTX_load_verify_locations, 3, -1);
+
+DEFINE_FUNC(SSL_CTX_set_verify, 3);
+DEFINE_FUNC(SSL_CTX_use_PrivateKey, 2);
+
+DEFINE_FUNC(SSL_CTX_use_PrivateKey_file, 3);
+DEFINE_FUNC(SSL_CTX_use_certificate_chain_file, 2);
+
+DEFINE_FUNC(ERR_get_error, 0);
+
+static char ErrorString[] = "Ssl not found";
+DEFINE_FUNC(ERR_error_string, 2, ErrorString);
+
+// TLS functions
+
+DEFINE_FUNC(TLS_client_method, 0);
+DEFINE_FUNC(TLS_server_method, 0);
+
+// RAND functions
+
+DEFINE_FUNC(RAND_bytes, 2);
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-drm.cpp b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-drm.cpp
new file mode 100644
index 000000000..655a6b2b6
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-drm.cpp
@@ -0,0 +1,14 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtMultimedia/private/qsymbolsresolveutils_p.h>
+
+#include <va/va_drm.h>
+
+CHECK_VERSIONS("va-drm", VA_DRM_NEEDED_SOVERSION, VA_MAJOR_VERSION + 1);
+
+BEGIN_INIT_FUNCS("va-drm", VA_DRM_NEEDED_SOVERSION)
+INIT_FUNC(vaGetDisplayDRM)
+END_INIT_FUNCS()
+
+DEFINE_FUNC(vaGetDisplayDRM, 1);
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-x11.cpp b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-x11.cpp
new file mode 100644
index 000000000..3bada9e69
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-x11.cpp
@@ -0,0 +1,14 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtMultimedia/private/qsymbolsresolveutils_p.h>
+
+#include <va/va_x11.h>
+
+CHECK_VERSIONS("va-x11", VA_X11_NEEDED_SOVERSION, VA_MAJOR_VERSION + 1);
+
+BEGIN_INIT_FUNCS("va-x11", VA_X11_NEEDED_SOVERSION)
+INIT_FUNC(vaGetDisplay)
+END_INIT_FUNCS()
+
+DEFINE_FUNC(vaGetDisplay, 1);
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvaapisymbols.cpp b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va.cpp
index 58bf4dce7..cfd2e5686 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegvaapisymbols.cpp
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va.cpp
@@ -1,64 +1,96 @@
-// Copyright (C) 2023 The Qt Company Ltd.
+// Copyright (C) 2024 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-#include <QtCore/qlibrary.h>
-
-#include "qffmpegsymbolsresolveutils_p.h"
-
-#include <QtCore/qglobal.h>
-#include <qstringliteral.h>
+#include <QtMultimedia/private/qsymbolsresolveutils_p.h>
#include <va/va.h>
-#ifdef DYNAMIC_RESOLVE_VA_DRM_SYMBOLS
-#include <va/va_drm.h>
-#endif
-#ifdef DYNAMIC_RESOLVE_VA_X11_SYMBOLS
-#include <va/va_x11.h>
-#endif
#include <va/va_str.h>
-QT_BEGIN_NAMESPACE
-
-static Libs loadLibs()
-{
- Libs libs;
- libs.push_back(std::make_unique<QLibrary>("va"));
-#ifdef DYNAMIC_RESOLVE_VA_DRM_SYMBOLS
- libs.push_back(std::make_unique<QLibrary>("va-drm"));
+// VAAPI generated the actual *.so name due to the rule:
+// https://github.com/intel/libva/blob/master/configure.ac
+//
+// The library name is generated libva.<x>.<y>.0 where
+// <x> = VA-API major version + 1
+// <y> = 100 * VA-API minor version + VA-API micro version
+CHECK_VERSIONS("va", VA_NEEDED_SOVERSION, VA_MAJOR_VERSION + 1);
+
+#ifdef Q_FFMPEG_PLUGIN_STUBS_ONLY
+constexpr const char *loggingName = "va(in plugin)";
+#else
+constexpr const char *loggingName = nullptr;
#endif
-#ifdef DYNAMIC_RESOLVE_VA_X11_SYMBOLS
- libs.push_back(std::make_unique<QLibrary>("va-x11"));
-#endif
+BEGIN_INIT_FUNCS("va", VA_NEEDED_SOVERSION, loggingName)
+
+
+INIT_FUNC(vaExportSurfaceHandle);
+INIT_FUNC(vaSyncSurface);
+INIT_FUNC(vaQueryVendorString);
- if (LibSymbolsResolver::tryLoad(libs))
- return libs;
+#ifndef Q_FFMPEG_PLUGIN_STUBS_ONLY
- return {};
-}
+INIT_FUNC(vaInitialize);
+INIT_FUNC(vaTerminate);
+INIT_FUNC(vaErrorStr);
+INIT_FUNC(vaSetErrorCallback);
+INIT_FUNC(vaSetInfoCallback);
-constexpr size_t symbolsCount = 38
+INIT_FUNC(vaCreateImage);
+INIT_FUNC(vaGetImage);
+INIT_FUNC(vaPutImage);
+INIT_FUNC(vaDeriveImage);
+INIT_FUNC(vaDestroyImage);
+INIT_FUNC(vaQueryImageFormats);
+
+INIT_FUNC(vaBeginPicture);
+INIT_FUNC(vaRenderPicture);
+INIT_FUNC(vaEndPicture);
+
+INIT_FUNC(vaCreateBuffer);
+INIT_FUNC(vaMapBuffer);
+INIT_FUNC(vaUnmapBuffer);
#if VA_CHECK_VERSION(1, 9, 0)
- + 1
+INIT_FUNC(vaSyncBuffer);
#endif
-#ifdef DYNAMIC_RESOLVE_VA_DRM_SYMBOLS
- + 1
-#endif
-#ifdef DYNAMIC_RESOLVE_VA_X11_SYMBOLS
- + 1
+INIT_FUNC(vaDestroyBuffer);
+
+INIT_FUNC(vaCreateSurfaces);
+INIT_FUNC(vaDestroySurfaces);
+
+INIT_FUNC(vaCreateConfig);
+INIT_FUNC(vaGetConfigAttributes);
+INIT_FUNC(vaMaxNumProfiles);
+INIT_FUNC(vaMaxNumImageFormats);
+INIT_FUNC(vaMaxNumEntrypoints);
+INIT_FUNC(vaQueryConfigProfiles);
+INIT_FUNC(vaQueryConfigEntrypoints);
+INIT_FUNC(vaQuerySurfaceAttributes);
+INIT_FUNC(vaDestroyConfig);
+
+INIT_FUNC(vaCreateContext);
+INIT_FUNC(vaDestroyContext);
+
+INIT_FUNC(vaProfileStr);
+INIT_FUNC(vaEntrypointStr);
+
+INIT_FUNC(vaGetDisplayAttributes);
+
+INIT_FUNC(vaSetDriverName);
+
+INIT_FUNC(vaAcquireBufferHandle);
+INIT_FUNC(vaReleaseBufferHandle);
+
#endif
- ;
-Q_GLOBAL_STATIC(LibSymbolsResolver, resolver, "VAAPI", symbolsCount, loadLibs);
+END_INIT_FUNCS()
-void resolveVAAPI()
-{
- resolver()->resolve();
-}
+constexpr auto emptyString = "";
-QT_END_NAMESPACE
+DEFINE_FUNC(vaExportSurfaceHandle, 5, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaSyncSurface, 2, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaQueryVendorString, 1, emptyString);
-QT_USE_NAMESPACE
+#ifndef Q_FFMPEG_PLUGIN_STUBS_ONLY
DEFINE_FUNC(vaInitialize, 3, VA_STATUS_ERROR_OPERATION_FAILED);
DEFINE_FUNC(vaTerminate, 1, VA_STATUS_ERROR_OPERATION_FAILED);
@@ -88,8 +120,6 @@ DEFINE_FUNC(vaSyncBuffer, 3, VA_STATUS_ERROR_OPERATION_FAILED);
DEFINE_FUNC(vaDestroyBuffer, 2, VA_STATUS_ERROR_OPERATION_FAILED);
DEFINE_FUNC(vaCreateSurfaces, 8, VA_STATUS_ERROR_OPERATION_FAILED);
-DEFINE_FUNC(vaSyncSurface, 2, VA_STATUS_ERROR_OPERATION_FAILED);
-DEFINE_FUNC(vaExportSurfaceHandle, 5, VA_STATUS_ERROR_OPERATION_FAILED);
DEFINE_FUNC(vaDestroySurfaces, 3, VA_STATUS_ERROR_OPERATION_FAILED);
DEFINE_FUNC(vaCreateConfig, 6, VA_STATUS_ERROR_OPERATION_FAILED);
@@ -105,8 +135,7 @@ DEFINE_FUNC(vaDestroyConfig, 2, VA_STATUS_ERROR_OPERATION_FAILED);
DEFINE_FUNC(vaCreateContext, 8);
DEFINE_FUNC(vaDestroyContext, 2);
-constexpr auto emptyString = "";
-DEFINE_FUNC(vaQueryVendorString, 1, emptyString);
+
DEFINE_FUNC(vaProfileStr, 1, emptyString);
DEFINE_FUNC(vaEntrypointStr, 1, emptyString);
@@ -114,10 +143,8 @@ DEFINE_FUNC(vaGetDisplayAttributes, 3, VA_STATUS_ERROR_OPERATION_FAILED);
DEFINE_FUNC(vaSetDriverName, 2, VA_STATUS_ERROR_OPERATION_FAILED);
-#ifdef DYNAMIC_RESOLVE_VA_DRM_SYMBOLS
-DEFINE_FUNC(vaGetDisplayDRM, 1); // va-drm
-#endif
+DEFINE_FUNC(vaAcquireBufferHandle, 3, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaReleaseBufferHandle, 2, VA_STATUS_ERROR_OPERATION_FAILED);
-#ifdef DYNAMIC_RESOLVE_VA_X11_SYMBOLS
-DEFINE_FUNC(vaGetDisplay, 1); // va-x11
#endif
+
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/va.ver b/src/plugins/multimedia/ffmpeg/symbolstubs/va.ver
new file mode 100644
index 000000000..80c9a6dc0
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/va.ver
@@ -0,0 +1,7 @@
+# Copyright (C) 2024 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+VA_API_0.33.0 {
+ global:
+ vaCreateSurfaces;
+};
diff --git a/src/plugins/multimedia/gstreamer/CMakeLists.txt b/src/plugins/multimedia/gstreamer/CMakeLists.txt
index 91fca82a3..1ef1f9a36 100644
--- a/src/plugins/multimedia/gstreamer/CMakeLists.txt
+++ b/src/plugins/multimedia/gstreamer/CMakeLists.txt
@@ -8,8 +8,6 @@ qt_internal_add_module(QGstreamerMediaPluginPrivate
INTERNAL_MODULE
SOURCES
audio/qgstreameraudiodevice.cpp audio/qgstreameraudiodevice_p.h
- audio/qgstreameraudiosource.cpp audio/qgstreameraudiosource_p.h
- audio/qgstreameraudiosink.cpp audio/qgstreameraudiosink_p.h
audio/qgstreameraudiodecoder.cpp audio/qgstreameraudiodecoder_p.h
common/qglist_helper_p.h
common/qgst.cpp common/qgst_p.h
@@ -52,13 +50,14 @@ qt_internal_add_module(QGstreamerMediaPluginPrivate
)
qt_internal_extend_target(QGstreamerMediaPluginPrivate CONDITION QT_FEATURE_gstreamer_photography
- LIBRARIES
+ PUBLIC_LIBRARIES
GStreamer::Photography
)
qt_internal_extend_target(QGstreamerMediaPluginPrivate CONDITION QT_FEATURE_gstreamer_gl
- LIBRARIES
+ PUBLIC_LIBRARIES
GStreamer::Gl
+ LIBRARIES
EGL::EGL
)
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder.cpp b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder.cpp
index 240c69b5b..513ab8dae 100644
--- a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder.cpp
+++ b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder.cpp
@@ -21,8 +21,6 @@
#include <QtCore/qurl.h>
#include <QtCore/qloggingcategory.h>
-#define MAX_BUFFERS_IN_QUEUE 4
-
QT_BEGIN_NAMESPACE
static Q_LOGGING_CATEGORY(qLcGstreamerAudioDecoder, "qt.multimedia.gstreameraudiodecoder");
@@ -42,23 +40,22 @@ typedef enum {
QMaybe<QPlatformAudioDecoder *> QGstreamerAudioDecoder::create(QAudioDecoder *parent)
{
- QGstElement audioconvert = QGstElement::createFromFactory("audioconvert", "audioconvert");
- if (!audioconvert)
- return errorMessageCannotFindElement("audioconvert");
-
- QGstPipeline playbin = QGstPipeline::adopt(
- GST_PIPELINE_CAST(QGstElement::createFromFactory("playbin", "playbin").element()));
- if (!playbin)
- return errorMessageCannotFindElement("playbin");
+ static const auto error = qGstErrorMessageIfElementsNotAvailable("audioconvert", "playbin");
+ if (error)
+ return *error;
- return new QGstreamerAudioDecoder(playbin, audioconvert, parent);
+ return new QGstreamerAudioDecoder(parent);
}
-QGstreamerAudioDecoder::QGstreamerAudioDecoder(QGstPipeline playbin, QGstElement audioconvert,
- QAudioDecoder *parent)
+QGstreamerAudioDecoder::QGstreamerAudioDecoder(QAudioDecoder *parent)
: QPlatformAudioDecoder(parent),
- m_playbin(std::move(playbin)),
- m_audioConvert(std::move(audioconvert))
+ m_playbin{
+ QGstPipeline::adopt(GST_PIPELINE_CAST(
+ QGstElement::createFromFactory("playbin", "playbin").element())),
+ },
+ m_audioConvert{
+ QGstElement::createFromFactory("audioconvert", "audioconvert"),
+ }
{
// Sort out messages
m_playbin.installMessageFilter(this);
@@ -73,8 +70,9 @@ QGstreamerAudioDecoder::QGstreamerAudioDecoder(QGstPipeline playbin, QGstElement
m_outputBin.addGhostPad(m_audioConvert, "sink");
g_object_set(m_playbin.object(), "audio-sink", m_outputBin.element(), NULL);
- g_signal_connect(m_playbin.object(), "deep-notify::source",
- (GCallback)&QGstreamerAudioDecoder::configureAppSrcElement, (gpointer)this);
+
+ m_deepNotifySourceConnection = m_playbin.connect(
+ "deep-notify::source", (GCallback)&configureAppSrcElement, (gpointer)this);
// Set volume to 100%
gdouble volume = 1.0;
@@ -115,63 +113,24 @@ void QGstreamerAudioDecoder::configureAppSrcElement([[maybe_unused]] GObject *ob
bool QGstreamerAudioDecoder::processBusMessage(const QGstreamerMessage &message)
{
- if (message.isNull())
- return false;
-
- constexpr bool extendedMessageTracing = false;
-
qCDebug(qLcGstreamerAudioDecoder) << "received bus message:" << message;
GstMessage *gm = message.message();
- if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_DURATION) {
+ switch (message.type()) {
+ case GST_MESSAGE_DURATION: {
updateDuration();
- } else if (GST_MESSAGE_SRC(gm) == m_playbin.object()) {
- switch (GST_MESSAGE_TYPE(gm)) {
- case GST_MESSAGE_STATE_CHANGED: {
- GstState oldState;
- GstState newState;
- GstState pending;
-
- gst_message_parse_state_changed(gm, &oldState, &newState, &pending);
-
- if constexpr (extendedMessageTracing)
- qCDebug(qLcGstreamerAudioDecoder) << " state changed message from" << oldState
- << "to" << newState << pending;
-
- bool isDecoding = false;
- switch (newState) {
- case GST_STATE_VOID_PENDING:
- case GST_STATE_NULL:
- case GST_STATE_READY:
- break;
- case GST_STATE_PLAYING:
- isDecoding = true;
- break;
- case GST_STATE_PAUSED:
- isDecoding = true;
-
- // gstreamer doesn't give a reliable indication the duration
- // information is ready, GST_MESSAGE_DURATION is not sent by most elements
- // the duration is queried up to 5 times with increasing delay
- m_durationQueries = 5;
- updateDuration();
- break;
- }
+ return false;
+ }
- setIsDecoding(isDecoding);
- break;
- };
+ case GST_MESSAGE_ERROR: {
+ qCDebug(qLcGstreamerAudioDecoder) << " error" << QCompactGstMessageAdaptor(message);
- case GST_MESSAGE_EOS:
- m_playbin.setState(GST_STATE_NULL);
- finished();
- break;
+ QUniqueGErrorHandle err;
+ QGString debug;
+ gst_message_parse_error(gm, &err, &debug);
- case GST_MESSAGE_ERROR: {
- QUniqueGErrorHandle err;
- QGString debug;
- gst_message_parse_error(gm, &err, &debug);
+ if (message.source() == m_playbin) {
if (err.get()->domain == GST_STREAM_ERROR
&& err.get()->code == GST_STREAM_ERROR_CODEC_NOT_FOUND)
processInvalidMedia(QAudioDecoder::FormatError,
@@ -179,63 +138,103 @@ bool QGstreamerAudioDecoder::processBusMessage(const QGstreamerMessage &message)
else
processInvalidMedia(QAudioDecoder::ResourceError,
QString::fromUtf8(err.get()->message));
- qCWarning(qLcGstreamerAudioDecoder) << "Error:" << err;
- break;
+ } else {
+ QAudioDecoder::Error qerror = QAudioDecoder::ResourceError;
+ if (err.get()->domain == GST_STREAM_ERROR) {
+ switch (err.get()->code) {
+ case GST_STREAM_ERROR_DECRYPT:
+ case GST_STREAM_ERROR_DECRYPT_NOKEY:
+ qerror = QAudioDecoder::AccessDeniedError;
+ break;
+ case GST_STREAM_ERROR_FORMAT:
+ case GST_STREAM_ERROR_DEMUX:
+ case GST_STREAM_ERROR_DECODE:
+ case GST_STREAM_ERROR_WRONG_TYPE:
+ case GST_STREAM_ERROR_TYPE_NOT_FOUND:
+ case GST_STREAM_ERROR_CODEC_NOT_FOUND:
+ qerror = QAudioDecoder::FormatError;
+ break;
+ default:
+ break;
+ }
+ } else if (err.get()->domain == GST_CORE_ERROR) {
+ switch (err.get()->code) {
+ case GST_CORE_ERROR_MISSING_PLUGIN:
+ qerror = QAudioDecoder::FormatError;
+ break;
+ default:
+ break;
+ }
+ }
+
+ processInvalidMedia(qerror, QString::fromUtf8(err.get()->message));
}
- case GST_MESSAGE_WARNING: {
- QUniqueGErrorHandle err;
- QGString debug;
- gst_message_parse_warning(gm, &err, &debug);
- qCWarning(qLcGstreamerAudioDecoder) << "Warning:" << err;
+ break;
+ }
+
+ default:
+ if (message.source() == m_playbin)
+ return handlePlaybinMessage(message);
+ }
+
+ return false;
+}
+
+bool QGstreamerAudioDecoder::handlePlaybinMessage(const QGstreamerMessage &message)
+{
+ GstMessage *gm = message.message();
+
+ switch (GST_MESSAGE_TYPE(gm)) {
+ case GST_MESSAGE_STATE_CHANGED: {
+ GstState oldState;
+ GstState newState;
+ GstState pending;
+
+ gst_message_parse_state_changed(gm, &oldState, &newState, &pending);
+
+ bool isDecoding = false;
+ switch (newState) {
+ case GST_STATE_VOID_PENDING:
+ case GST_STATE_NULL:
+ case GST_STATE_READY:
break;
- }
- case GST_MESSAGE_INFO: {
- if (qLcGstreamerAudioDecoder().isDebugEnabled()) {
- QUniqueGErrorHandle err;
- QGString debug;
- gst_message_parse_info(gm, &err, &debug);
- qDebug() << "Info:" << err;
- }
+ case GST_STATE_PLAYING:
+ isDecoding = true;
break;
- }
- default:
+ case GST_STATE_PAUSED:
+ isDecoding = true;
+
+ // gstreamer doesn't give a reliable indication the duration
+ // information is ready, GST_MESSAGE_DURATION is not sent by most elements
+ // the duration is queried up to 5 times with increasing delay
+ m_durationQueries = 5;
+ updateDuration();
break;
}
- } else if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ERROR) {
- QUniqueGErrorHandle err;
- QGString debug;
- gst_message_parse_error(gm, &err, &debug);
- qCDebug(qLcGstreamerAudioDecoder) << " error" << err << debug;
-
- QAudioDecoder::Error qerror = QAudioDecoder::ResourceError;
- if (err.get()->domain == GST_STREAM_ERROR) {
- switch (err.get()->code) {
- case GST_STREAM_ERROR_DECRYPT:
- case GST_STREAM_ERROR_DECRYPT_NOKEY:
- qerror = QAudioDecoder::AccessDeniedError;
- break;
- case GST_STREAM_ERROR_FORMAT:
- case GST_STREAM_ERROR_DEMUX:
- case GST_STREAM_ERROR_DECODE:
- case GST_STREAM_ERROR_WRONG_TYPE:
- case GST_STREAM_ERROR_TYPE_NOT_FOUND:
- case GST_STREAM_ERROR_CODEC_NOT_FOUND:
- qerror = QAudioDecoder::FormatError;
- break;
- default:
- break;
- }
- } else if (err.get()->domain == GST_CORE_ERROR) {
- switch (err.get()->code) {
- case GST_CORE_ERROR_MISSING_PLUGIN:
- qerror = QAudioDecoder::FormatError;
- break;
- default:
- break;
- }
- }
- processInvalidMedia(qerror, QString::fromUtf8(err.get()->message));
+ setIsDecoding(isDecoding);
+ break;
+ };
+
+ case GST_MESSAGE_EOS:
+ m_playbin.setState(GST_STATE_NULL);
+ finished();
+ break;
+
+ case GST_MESSAGE_ERROR:
+ Q_UNREACHABLE_RETURN(false); // handled in processBusMessage
+
+ case GST_MESSAGE_WARNING:
+ qCWarning(qLcGstreamerAudioDecoder) << "Warning:" << QCompactGstMessageAdaptor(message);
+ break;
+
+ case GST_MESSAGE_INFO: {
+ if (qLcGstreamerAudioDecoder().isDebugEnabled())
+ qCWarning(qLcGstreamerAudioDecoder) << "Info:" << QCompactGstMessageAdaptor(message);
+ break;
+ }
+ default:
+ break;
}
return false;
@@ -325,6 +324,7 @@ void QGstreamerAudioDecoder::start()
void QGstreamerAudioDecoder::stop()
{
m_playbin.setState(GST_STATE_NULL);
+ m_currentSessionId += 1;
removeAppSink();
// GStreamer thread is stopped. Can safely access m_buffersAvailable
@@ -333,14 +333,14 @@ void QGstreamerAudioDecoder::stop()
bufferAvailableChanged(false);
}
- if (m_position != -1) {
- m_position = -1;
- positionChanged(m_position);
+ if (m_position != invalidPosition) {
+ m_position = invalidPosition;
+ positionChanged(m_position.count());
}
- if (m_duration != -1) {
- m_duration = -1;
- durationChanged(m_duration);
+ if (m_duration != invalidDuration) {
+ m_duration = invalidDuration;
+ durationChanged(m_duration.count());
}
setIsDecoding(false);
@@ -361,65 +361,54 @@ void QGstreamerAudioDecoder::setAudioFormat(const QAudioFormat &format)
QAudioBuffer QGstreamerAudioDecoder::read()
{
+ using namespace std::chrono;
+
QAudioBuffer audioBuffer;
- int buffersAvailable;
- {
- QMutexLocker locker(&m_buffersMutex);
- buffersAvailable = m_buffersAvailable;
+ if (m_buffersAvailable == 0)
+ return audioBuffer;
- // need to decrement before pulling a buffer
- // to make sure assert in QGstreamerAudioDecoderControl::new_buffer works
- m_buffersAvailable--;
- }
+ m_buffersAvailable -= 1;
+ if (m_buffersAvailable == 0)
+ bufferAvailableChanged(false);
- if (buffersAvailable) {
- if (buffersAvailable == 1)
- bufferAvailableChanged(false);
-
- const char* bufferData = nullptr;
- int bufferSize = 0;
-
- QGstSampleHandle sample = m_appSink.pullSample();
- GstBuffer *buffer = gst_sample_get_buffer(sample.get());
- GstMapInfo mapInfo;
- gst_buffer_map(buffer, &mapInfo, GST_MAP_READ);
- bufferData = (const char*)mapInfo.data;
- bufferSize = mapInfo.size;
- QAudioFormat format = QGstUtils::audioFormatForSample(sample.get());
-
- if (format.isValid()) {
- // XXX At the moment we have to copy data from GstBuffer into QAudioBuffer.
- // We could improve performance by implementing QAbstractAudioBuffer for GstBuffer.
- qint64 position = getPositionFromBuffer(buffer);
- audioBuffer = QAudioBuffer(QByteArray((const char*)bufferData, bufferSize), format, position);
- position /= 1000; // convert to milliseconds
- if (position != m_position) {
- m_position = position;
- positionChanged(m_position);
- }
+ QGstSampleHandle sample = m_appSink.pullSample();
+ GstBuffer *buffer = gst_sample_get_buffer(sample.get());
+ GstMapInfo mapInfo;
+ gst_buffer_map(buffer, &mapInfo, GST_MAP_READ);
+ const char *bufferData = (const char *)mapInfo.data;
+ int bufferSize = mapInfo.size;
+ QAudioFormat format = QGstUtils::audioFormatForSample(sample.get());
+
+ if (format.isValid()) {
+ // XXX At the moment we have to copy data from GstBuffer into QAudioBuffer.
+ // We could improve performance by implementing QAbstractAudioBuffer for GstBuffer.
+ nanoseconds position = getPositionFromBuffer(buffer);
+ audioBuffer = QAudioBuffer{
+ QByteArray(bufferData, bufferSize),
+ format,
+ round<microseconds>(position).count(),
+ };
+ milliseconds positionInMs = round<milliseconds>(position);
+ if (position != m_position) {
+ m_position = positionInMs;
+ positionChanged(m_position.count());
}
- gst_buffer_unmap(buffer, &mapInfo);
}
+ gst_buffer_unmap(buffer, &mapInfo);
return audioBuffer;
}
-bool QGstreamerAudioDecoder::bufferAvailable() const
-{
- QMutexLocker locker(&m_buffersMutex);
- return m_buffersAvailable > 0;
-}
-
qint64 QGstreamerAudioDecoder::position() const
{
- return m_position;
+ return m_position.count();
}
qint64 QGstreamerAudioDecoder::duration() const
{
- return m_duration;
+ return m_duration.count();
}
void QGstreamerAudioDecoder::processInvalidMedia(QAudioDecoder::Error errorCode, const QString& errorString)
@@ -428,30 +417,30 @@ void QGstreamerAudioDecoder::processInvalidMedia(QAudioDecoder::Error errorCode,
error(int(errorCode), errorString);
}
-GstFlowReturn QGstreamerAudioDecoder::new_sample(GstAppSink *, gpointer user_data)
+GstFlowReturn QGstreamerAudioDecoder::newSample(GstAppSink *)
{
- qCDebug(qLcGstreamerAudioDecoder) << "QGstreamerAudioDecoder::new_sample";
-
// "Note that the preroll buffer will also be returned as the first buffer when calling
// gst_app_sink_pull_buffer()."
- QGstreamerAudioDecoder *decoder = reinterpret_cast<QGstreamerAudioDecoder*>(user_data);
-
- int buffersAvailable;
- {
- QMutexLocker locker(&decoder->m_buffersMutex);
- buffersAvailable = decoder->m_buffersAvailable;
- decoder->m_buffersAvailable++;
- Q_ASSERT(decoder->m_buffersAvailable <= MAX_BUFFERS_IN_QUEUE);
- }
- qCDebug(qLcGstreamerAudioDecoder) << "QGstreamerAudioDecoder::new_sample" << buffersAvailable;
+ QMetaObject::invokeMethod(this, [this, sessionId = m_currentSessionId] {
+ if (sessionId != m_currentSessionId)
+ return; // stop()ed before message is executed
+
+ m_buffersAvailable += 1;
+ bufferAvailableChanged(true);
+ bufferReady();
+ });
- if (!buffersAvailable)
- decoder->bufferAvailableChanged(true);
- decoder->bufferReady();
return GST_FLOW_OK;
}
+GstFlowReturn QGstreamerAudioDecoder::new_sample(GstAppSink *sink, gpointer user_data)
+{
+ QGstreamerAudioDecoder *decoder = reinterpret_cast<QGstreamerAudioDecoder *>(user_data);
+ qCDebug(qLcGstreamerAudioDecoder) << "QGstreamerAudioDecoder::new_sample";
+ return decoder->newSample(sink);
+}
+
void QGstreamerAudioDecoder::setAudioFlags(bool wantNativeAudio)
{
int flags = m_playbin.getInt("flags");
@@ -466,6 +455,8 @@ void QGstreamerAudioDecoder::setAudioFlags(bool wantNativeAudio)
void QGstreamerAudioDecoder::addAppSink()
{
+ using namespace std::chrono_literals;
+
if (m_appSink)
return;
@@ -474,8 +465,17 @@ void QGstreamerAudioDecoder::addAppSink()
GstAppSinkCallbacks callbacks{};
callbacks.new_sample = new_sample;
m_appSink.setCallbacks(callbacks, this, nullptr);
- gst_app_sink_set_max_buffers(m_appSink.appSink(), MAX_BUFFERS_IN_QUEUE);
- gst_base_sink_set_sync(m_appSink.baseSink(), FALSE);
+
+#if GST_CHECK_VERSION(1, 24, 0)
+ static constexpr auto maxBufferTime = 500ms;
+ m_appSink.setMaxBufferTime(maxBufferTime);
+#else
+ static constexpr int maxBuffers = 16;
+ m_appSink.setMaxBuffers(maxBuffers);
+#endif
+
+ static constexpr bool sync = false;
+ m_appSink.setSync(sync);
QGstPipeline::modifyPipelineWhileNotRunning(m_playbin.getPipeline(), [&] {
m_outputBin.add(m_appSink);
@@ -499,32 +499,33 @@ void QGstreamerAudioDecoder::removeAppSink()
void QGstreamerAudioDecoder::updateDuration()
{
- int duration = m_playbin.duration() / 1000000;
+ std::chrono::milliseconds duration = m_playbin.durationInMs();
if (m_duration != duration) {
m_duration = duration;
- durationChanged(m_duration);
+ durationChanged(m_duration.count());
}
- if (m_duration > 0)
+ if (m_duration.count() > 0)
m_durationQueries = 0;
if (m_durationQueries > 0) {
//increase delay between duration requests
int delay = 25 << (5 - m_durationQueries);
- QTimer::singleShot(delay, this, SLOT(updateDuration()));
+ QTimer::singleShot(delay, this, &QGstreamerAudioDecoder::updateDuration);
m_durationQueries--;
}
}
-qint64 QGstreamerAudioDecoder::getPositionFromBuffer(GstBuffer* buffer)
+std::chrono::nanoseconds QGstreamerAudioDecoder::getPositionFromBuffer(GstBuffer *buffer)
{
- qint64 position = GST_BUFFER_TIMESTAMP(buffer);
- if (position >= 0)
- position = position / G_GINT64_CONSTANT(1000); // microseconds
+ using namespace std::chrono;
+ using namespace std::chrono_literals;
+ nanoseconds position{ GST_BUFFER_TIMESTAMP(buffer) };
+ if (position >= 0ns)
+ return position;
else
- position = -1;
- return position;
+ return invalidPosition;
}
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder_p.h b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder_p.h
index c45e9f309..a5e192a38 100644
--- a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder_p.h
+++ b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder_p.h
@@ -57,7 +57,6 @@ public:
void setAudioFormat(const QAudioFormat &format) override;
QAudioBuffer read() override;
- bool bufferAvailable() const override;
qint64 position() const override;
qint64 duration() const override;
@@ -69,10 +68,12 @@ private slots:
void updateDuration();
private:
- QGstreamerAudioDecoder(QGstPipeline playbin, QGstElement audioconvert, QAudioDecoder *parent);
+ explicit QGstreamerAudioDecoder(QAudioDecoder *parent);
#if QT_CONFIG(gstreamer_app)
static GstFlowReturn new_sample(GstAppSink *sink, gpointer user_data);
+ GstFlowReturn newSample(GstAppSink *sink);
+
static void configureAppSrcElement(GObject *, GObject *, GParamSpec *,
QGstreamerAudioDecoder *_this);
#endif
@@ -81,8 +82,10 @@ private:
void addAppSink();
void removeAppSink();
- void processInvalidMedia(QAudioDecoder::Error errorCode, const QString& errorString);
- static qint64 getPositionFromBuffer(GstBuffer* buffer);
+ bool handlePlaybinMessage(const QGstreamerMessage &);
+
+ void processInvalidMedia(QAudioDecoder::Error errorCode, const QString &errorString);
+ static std::chrono::nanoseconds getPositionFromBuffer(GstBuffer *buffer);
QGstPipeline m_playbin;
QGstBin m_outputBin;
@@ -94,13 +97,18 @@ private:
QIODevice *mDevice = nullptr;
QAudioFormat mFormat;
- mutable QMutex m_buffersMutex;
int m_buffersAvailable = 0;
- qint64 m_position = -1;
- qint64 m_duration = -1;
+ static constexpr auto invalidDuration = std::chrono::milliseconds{ -1 };
+ static constexpr auto invalidPosition = std::chrono::milliseconds{ -1 };
+ std::chrono::milliseconds m_position{ invalidPosition };
+ std::chrono::milliseconds m_duration{ invalidDuration };
int m_durationQueries = 0;
+
+ qint32 m_currentSessionId{};
+
+ QGObjectHandlerScopedConnection m_deepNotifySourceConnection;
};
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice.cpp b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice.cpp
index 2c6b57e55..dc6975030 100644
--- a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice.cpp
+++ b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice.cpp
@@ -49,6 +49,29 @@ QGStreamerAudioDeviceInfo::QGStreamerAudioDeviceInfo(GstDevice *d, const QByteAr
preferredFormat.setSampleFormat(f);
}
-QGStreamerAudioDeviceInfo::~QGStreamerAudioDeviceInfo() = default;
+QGStreamerCustomAudioDeviceInfo::QGStreamerCustomAudioDeviceInfo(
+ const QByteArray &gstreamerPipeline, QAudioDevice::Mode mode)
+ : QAudioDevicePrivate{
+ gstreamerPipeline,
+ mode,
+ }
+{
+}
+
+QAudioDevice qMakeCustomGStreamerAudioInput(const QByteArray &gstreamerPipeline)
+{
+ auto deviceInfo = std::make_unique<QGStreamerCustomAudioDeviceInfo>(gstreamerPipeline,
+ QAudioDevice::Mode::Input);
+
+ return deviceInfo.release()->create();
+}
+
+QAudioDevice qMakeCustomGStreamerAudioOutput(const QByteArray &gstreamerPipeline)
+{
+ auto deviceInfo = std::make_unique<QGStreamerCustomAudioDeviceInfo>(gstreamerPipeline,
+ QAudioDevice::Mode::Output);
+
+ return deviceInfo.release()->create();
+}
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice_p.h b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice_p.h
index dee0c40bc..34d25bceb 100644
--- a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice_p.h
+++ b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice_p.h
@@ -19,11 +19,11 @@
#include <QtCore/qstringlist.h>
#include <QtCore/qlist.h>
-#include "qaudio.h"
-#include "qaudiodevice.h"
-#include <private/qaudiodevice_p.h>
+#include <QtMultimedia/qaudio.h>
+#include <QtMultimedia/qaudiodevice.h>
+#include <QtMultimedia/private/qaudiodevice_p.h>
-#include <common/qgst_handle_types_p.h>
+#include <QtQGstreamerMediaPlugin/private/qgst_handle_types_p.h>
#include <gst/gst.h>
@@ -33,11 +33,19 @@ class QGStreamerAudioDeviceInfo : public QAudioDevicePrivate
{
public:
QGStreamerAudioDeviceInfo(GstDevice *gstDevice, const QByteArray &device, QAudioDevice::Mode mode);
- ~QGStreamerAudioDeviceInfo();
QGstDeviceHandle gstDevice;
};
+class QGStreamerCustomAudioDeviceInfo : public QAudioDevicePrivate
+{
+public:
+ QGStreamerCustomAudioDeviceInfo(const QByteArray &gstreamerPipeline, QAudioDevice::Mode mode);
+};
+
+QAudioDevice qMakeCustomGStreamerAudioInput(const QByteArray &gstreamerPipeline);
+QAudioDevice qMakeCustomGStreamerAudioOutput(const QByteArray &gstreamerPipeline);
+
QT_END_NAMESPACE
#endif
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosink.cpp b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosink.cpp
deleted file mode 100644
index 5399a3e64..000000000
--- a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosink.cpp
+++ /dev/null
@@ -1,381 +0,0 @@
-// Copyright (C) 2016 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-#include <QtCore/qcoreapplication.h>
-#include <QtCore/qdebug.h>
-#include <QtCore/qmath.h>
-#include <QtMultimedia/private/qaudiohelpers_p.h>
-
-#include <sys/types.h>
-#include <unistd.h>
-
-#include <audio/qgstreameraudiosink_p.h>
-#include <audio/qgstreameraudiodevice_p.h>
-#include <common/qgst_debug_p.h>
-#include <common/qgstappsource_p.h>
-#include <common/qgstpipeline_p.h>
-#include <common/qgstreamermessage_p.h>
-#include <common/qgstutils_p.h>
-
-#include <utility>
-
-QT_BEGIN_NAMESPACE
-
-QMaybe<QPlatformAudioSink *> QGStreamerAudioSink::create(const QAudioDevice &device, QObject *parent)
-{
- auto maybeAppSrc = QGstAppSource::create();
- if (!maybeAppSrc)
- return maybeAppSrc.error();
-
- QGstElement audioconvert = QGstElement::createFromFactory("audioconvert", "conv");
- if (!audioconvert)
- return errorMessageCannotFindElement("audioconvert");
-
- QGstElement volume = QGstElement::createFromFactory("volume", "volume");
- if (!volume)
- return errorMessageCannotFindElement("volume");
-
- return new QGStreamerAudioSink(device, maybeAppSrc.value(), audioconvert, volume, parent);
-}
-
-QGStreamerAudioSink::QGStreamerAudioSink(const QAudioDevice &device, QGstAppSource *appsrc,
- QGstElement audioconvert, QGstElement volume,
- QObject *parent)
- : QPlatformAudioSink(parent),
- m_device(device.id()),
- gstPipeline(QGstPipeline::create("audioSinkPipeline")),
- gstVolume(std::move(volume)),
- m_appSrc(appsrc)
-{
- gstPipeline.installMessageFilter(this);
-
- connect(m_appSrc, &QGstAppSource::bytesProcessed, this, &QGStreamerAudioSink::bytesProcessedByAppSrc);
- connect(m_appSrc, &QGstAppSource::noMoreData, this, &QGStreamerAudioSink::needData);
- gstAppSrc = m_appSrc->element();
-
- QGstElement queue = QGstElement::createFromFactory("queue", "audioSinkQueue");
-
- if (m_volume != 1.)
- gstVolume.set("volume", m_volume);
-
- // link decodeBin to audioconvert in a callback once we get a pad from the decoder
- // g_signal_connect (gstDecodeBin, "pad-added", (GCallback) padAdded, conv);
-
- const auto *audioInfo = static_cast<const QGStreamerAudioDeviceInfo *>(device.handle());
- gstOutput = QGstElement::createFromDevice(audioInfo->gstDevice, nullptr);
-
- gstPipeline.add(gstAppSrc, queue, /*gstDecodeBin, */ audioconvert, gstVolume, gstOutput);
- qLinkGstElements(gstAppSrc, queue, audioconvert, gstVolume, gstOutput);
-}
-
-QGStreamerAudioSink::~QGStreamerAudioSink()
-{
- close();
- gstPipeline.removeMessageFilter(this);
-
- gstPipeline = {};
- gstVolume = {};
- gstAppSrc = {};
- delete m_appSrc;
- m_appSrc = nullptr;
-}
-
-void QGStreamerAudioSink::setError(QAudio::Error error)
-{
- if (m_errorState == error)
- return;
-
- m_errorState = error;
- emit errorChanged(error);
-}
-
-QAudio::Error QGStreamerAudioSink::error() const
-{
- return m_errorState;
-}
-
-void QGStreamerAudioSink::setState(QAudio::State state)
-{
- if (m_deviceState == state)
- return;
-
- m_deviceState = state;
- emit stateChanged(state);
-}
-
-QAudio::State QGStreamerAudioSink::state() const
-{
- return m_deviceState;
-}
-
-void QGStreamerAudioSink::start(QIODevice *device)
-{
- setState(QAudio::StoppedState);
- setError(QAudio::NoError);
-
- close();
-
- if (!m_format.isValid()) {
- setError(QAudio::OpenError);
- return;
- }
-
- m_pullMode = true;
- m_audioSource = device;
-
- if (!open()) {
- m_audioSource = nullptr;
- setError(QAudio::OpenError);
- return;
- }
-
- setState(QAudio::ActiveState);
-}
-
-QIODevice *QGStreamerAudioSink::start()
-{
- setState(QAudio::StoppedState);
- setError(QAudio::NoError);
-
- close();
-
- if (!m_format.isValid()) {
- setError(QAudio::OpenError);
- return nullptr;
- }
-
- m_pullMode = false;
-
- if (!open())
- return nullptr;
-
- m_audioSource = new GStreamerOutputPrivate(this);
- m_audioSource->open(QIODevice::WriteOnly|QIODevice::Unbuffered);
-
- setState(QAudio::IdleState);
-
- return m_audioSource;
-}
-
-#if 0
-static void padAdded(GstElement *element, GstPad *pad, gpointer data)
-{
- GstElement *other = static_cast<GstElement *>(data);
-
- QGString name { gst_pad_get_name(pad)};
- qDebug("A new pad %s was created for %s\n", name, gst_element_get_name(element));
-
- qDebug("element %s will be linked to %s\n",
- gst_element_get_name(element),
- gst_element_get_name(other));
- gst_element_link(element, other);
-}
-#endif
-
-bool QGStreamerAudioSink::processBusMessage(const QGstreamerMessage &message)
-{
- auto *msg = message.message();
- switch (GST_MESSAGE_TYPE (msg)) {
- case GST_MESSAGE_EOS:
- setState(QAudio::IdleState);
- break;
- case GST_MESSAGE_ERROR: {
- setError(QAudio::IOError);
- QUniqueGErrorHandle error;
- QGString debug;
-
- gst_message_parse_error(msg, &error, &debug);
- qDebug() << "Error:" << error;
-
- break;
- }
- default:
- break;
- }
-
- return true;
-}
-
-bool QGStreamerAudioSink::open()
-{
- if (m_opened)
- return true;
-
- if (gstOutput.isNull()) {
- setError(QAudio::OpenError);
- setState(QAudio::StoppedState);
- return false;
- }
-
-// qDebug() << "GST caps:" << gst_caps_to_string(caps);
- m_appSrc->setup(m_audioSource, m_audioSource ? m_audioSource->pos() : 0);
- m_appSrc->setAudioFormat(m_format);
-
- /* run */
- gstPipeline.setState(GST_STATE_PLAYING);
-
- m_opened = true;
-
- m_timeStamp.restart();
- m_bytesProcessed = 0;
-
- return true;
-}
-
-void QGStreamerAudioSink::close()
-{
- if (!m_opened)
- return;
-
- if (!gstPipeline.setStateSync(GST_STATE_NULL))
- qWarning() << "failed to close the audio output stream";
-
- if (!m_pullMode && m_audioSource)
- delete m_audioSource;
- m_audioSource = nullptr;
- m_opened = false;
-}
-
-qint64 QGStreamerAudioSink::write(const char *data, qint64 len)
-{
- if (!len)
- return 0;
- if (m_errorState == QAudio::UnderrunError)
- m_errorState = QAudio::NoError;
-
- m_appSrc->write(data, len);
- return len;
-}
-
-void QGStreamerAudioSink::stop()
-{
- if (m_deviceState == QAudio::StoppedState)
- return;
-
- close();
-
- setError(QAudio::NoError);
- setState(QAudio::StoppedState);
-}
-
-qsizetype QGStreamerAudioSink::bytesFree() const
-{
- if (m_deviceState != QAudio::ActiveState && m_deviceState != QAudio::IdleState)
- return 0;
-
- return m_appSrc->canAcceptMoreData() ? 4096*4 : 0;
-}
-
-void QGStreamerAudioSink::setBufferSize(qsizetype value)
-{
- m_bufferSize = value;
- if (!gstAppSrc.isNull())
- gst_app_src_set_max_bytes(GST_APP_SRC(gstAppSrc.element()), value);
-}
-
-qsizetype QGStreamerAudioSink::bufferSize() const
-{
- return m_bufferSize;
-}
-
-qint64 QGStreamerAudioSink::processedUSecs() const
-{
- qint64 result = qint64(1000000) * m_bytesProcessed /
- m_format.bytesPerFrame() /
- m_format.sampleRate();
-
- return result;
-}
-
-void QGStreamerAudioSink::resume()
-{
- if (m_deviceState == QAudio::SuspendedState) {
- m_appSrc->resume();
- gstPipeline.setState(GST_STATE_PLAYING);
-
- setState(m_suspendedInState);
- setError(QAudio::NoError);
- }
-}
-
-void QGStreamerAudioSink::setFormat(const QAudioFormat &format)
-{
- m_format = format;
-}
-
-QAudioFormat QGStreamerAudioSink::format() const
-{
- return m_format;
-}
-
-void QGStreamerAudioSink::suspend()
-{
- if (m_deviceState == QAudio::ActiveState || m_deviceState == QAudio::IdleState) {
- m_suspendedInState = m_deviceState;
- setError(QAudio::NoError);
- setState(QAudio::SuspendedState);
-
- gstPipeline.setState(GST_STATE_PAUSED);
- m_appSrc->suspend();
- // ### elapsed time
- }
-}
-
-void QGStreamerAudioSink::reset()
-{
- stop();
-}
-
-GStreamerOutputPrivate::GStreamerOutputPrivate(QGStreamerAudioSink *audio)
-{
- m_audioDevice = audio;
-}
-
-qint64 GStreamerOutputPrivate::readData(char *data, qint64 len)
-{
- Q_UNUSED(data);
- Q_UNUSED(len);
-
- return 0;
-}
-
-qint64 GStreamerOutputPrivate::writeData(const char *data, qint64 len)
-{
- if (m_audioDevice->state() == QAudio::IdleState)
- m_audioDevice->setState(QAudio::ActiveState);
- return m_audioDevice->write(data, len);
-}
-
-void QGStreamerAudioSink::setVolume(qreal vol)
-{
- if (m_volume == vol)
- return;
-
- m_volume = vol;
- if (!gstVolume.isNull())
- gstVolume.set("volume", vol);
-}
-
-qreal QGStreamerAudioSink::volume() const
-{
- return m_volume;
-}
-
-void QGStreamerAudioSink::bytesProcessedByAppSrc(int bytes)
-{
- m_bytesProcessed += bytes;
- setState(QAudio::ActiveState);
- setError(QAudio::NoError);
-}
-
-void QGStreamerAudioSink::needData()
-{
- if (state() != QAudio::StoppedState && state() != QAudio::IdleState) {
- setState(QAudio::IdleState);
- setError(m_audioSource && m_audioSource->atEnd() ? QAudio::NoError : QAudio::UnderrunError);
- }
-}
-
-QT_END_NAMESPACE
-
-#include "moc_qgstreameraudiosink_p.cpp"
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosink_p.h b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosink_p.h
deleted file mode 100644
index 1aadb2290..000000000
--- a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosink_p.h
+++ /dev/null
@@ -1,124 +0,0 @@
-// Copyright (C) 2016 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-#ifndef QAUDIOOUTPUTGSTREAMER_H
-#define QAUDIOOUTPUTGSTREAMER_H
-
-//
-// W A R N I N G
-// -------------
-//
-// This file is not part of the Qt API. It exists purely as an
-// implementation detail. This header file may change from version to
-// version without notice, or even be removed.
-//
-// We mean it.
-//
-
-#include <QtCore/qfile.h>
-#include <QtCore/qtimer.h>
-#include <QtCore/qstring.h>
-#include <QtCore/qstringlist.h>
-#include <QtCore/qelapsedtimer.h>
-#include <QtCore/qiodevice.h>
-#include <QtCore/private/qringbuffer_p.h>
-
-#include "qaudio.h"
-#include "qaudiodevice.h"
-#include <private/qaudiosystem_p.h>
-#include <private/qmultimediautils_p.h>
-
-#include <common/qgst_p.h>
-#include <common/qgstpipeline_p.h>
-
-QT_BEGIN_NAMESPACE
-
-class QGstAppSource;
-
-class QGStreamerAudioSink
- : public QPlatformAudioSink,
- public QGstreamerBusMessageFilter
-{
- friend class GStreamerOutputPrivate;
- Q_OBJECT
-
-public:
- static QMaybe<QPlatformAudioSink *> create(const QAudioDevice &device, QObject *parent);
- ~QGStreamerAudioSink();
-
- void start(QIODevice *device) override;
- QIODevice *start() override;
- void stop() override;
- void reset() override;
- void suspend() override;
- void resume() override;
- qsizetype bytesFree() const override;
- void setBufferSize(qsizetype value) override;
- qsizetype bufferSize() const override;
- qint64 processedUSecs() const override;
- QAudio::Error error() const override;
- QAudio::State state() const override;
- void setFormat(const QAudioFormat &format) override;
- QAudioFormat format() const override;
-
- void setVolume(qreal volume) override;
- qreal volume() const override;
-
-private Q_SLOTS:
- void bytesProcessedByAppSrc(int bytes);
- void needData();
-
-private:
- QGStreamerAudioSink(const QAudioDevice &device, QGstAppSource *appsrc, QGstElement audioconvert,
- QGstElement volume, QObject *parent);
-
- void setState(QAudio::State state);
- void setError(QAudio::Error error);
-
- bool processBusMessage(const QGstreamerMessage &message) override;
-
- bool open();
- void close();
- qint64 write(const char *data, qint64 len);
-
-private:
- QByteArray m_device;
- QAudioFormat m_format;
- QAudio::Error m_errorState = QAudio::NoError;
- QAudio::State m_deviceState = QAudio::StoppedState;
- QAudio::State m_suspendedInState = QAudio::SuspendedState;
- bool m_pullMode = true;
- bool m_opened = false;
- QIODevice *m_audioSource = nullptr;
- int m_bufferSize = 0;
- qint64 m_bytesProcessed = 0;
- QElapsedTimer m_timeStamp;
- qreal m_volume = 1.;
- QByteArray pushData;
-
- QGstPipeline gstPipeline;
- QGstElement gstOutput;
- QGstElement gstVolume;
- QGstElement gstAppSrc;
- QGstAppSource *m_appSrc = nullptr;
-};
-
-class GStreamerOutputPrivate : public QIODevice
-{
- friend class QGStreamerAudioSink;
- Q_OBJECT
-
-public:
- explicit GStreamerOutputPrivate(QGStreamerAudioSink *audio);
-
-protected:
- qint64 readData(char *data, qint64 len) override;
- qint64 writeData(const char *data, qint64 len) override;
-
-private:
- QGStreamerAudioSink *m_audioDevice;
-};
-
-QT_END_NAMESPACE
-
-#endif
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosource.cpp b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosource.cpp
deleted file mode 100644
index ba9823d98..000000000
--- a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosource.cpp
+++ /dev/null
@@ -1,369 +0,0 @@
-// Copyright (C) 2016 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-#include <QtCore/qcoreapplication.h>
-#include <QtCore/qdebug.h>
-#include <QtCore/qmath.h>
-#include <private/qaudiohelpers_p.h>
-
-#include "qgstreameraudiosource_p.h"
-#include "qgstreameraudiodevice_p.h"
-#include <sys/types.h>
-#include <unistd.h>
-
-#include <gst/gst.h>
-Q_DECLARE_OPAQUE_POINTER(GstSample *);
-Q_DECLARE_METATYPE(GstSample *);
-
-QT_BEGIN_NAMESPACE
-
-QGStreamerAudioSource::QGStreamerAudioSource(const QAudioDevice &device, QObject *parent)
- : QPlatformAudioSource(parent),
- m_info(device),
- m_device(device.id())
-{
- qRegisterMetaType<GstSample *>();
-}
-
-QGStreamerAudioSource::~QGStreamerAudioSource()
-{
- close();
-}
-
-void QGStreamerAudioSource::setError(QAudio::Error error)
-{
- if (m_errorState == error)
- return;
-
- m_errorState = error;
- emit errorChanged(error);
-}
-
-QAudio::Error QGStreamerAudioSource::error() const
-{
- return m_errorState;
-}
-
-void QGStreamerAudioSource::setState(QAudio::State state)
-{
- if (m_deviceState == state)
- return;
-
- m_deviceState = state;
- emit stateChanged(state);
-}
-
-QAudio::State QGStreamerAudioSource::state() const
-{
- return m_deviceState;
-}
-
-void QGStreamerAudioSource::setFormat(const QAudioFormat &format)
-{
- if (m_deviceState == QAudio::StoppedState)
- m_format = format;
-}
-
-QAudioFormat QGStreamerAudioSource::format() const
-{
- return m_format;
-}
-
-void QGStreamerAudioSource::start(QIODevice *device)
-{
- setState(QAudio::StoppedState);
- setError(QAudio::NoError);
-
- close();
-
- if (!open())
- return;
-
- m_pullMode = true;
- m_audioSink = device;
-
- setState(QAudio::ActiveState);
-}
-
-QIODevice *QGStreamerAudioSource::start()
-{
- setState(QAudio::StoppedState);
- setError(QAudio::NoError);
-
- close();
-
- if (!open())
- return nullptr;
-
- m_pullMode = false;
- m_audioSink = new GStreamerInputPrivate(this);
- m_audioSink->open(QIODevice::ReadOnly | QIODevice::Unbuffered);
-
- setState(QAudio::IdleState);
-
- return m_audioSink;
-}
-
-void QGStreamerAudioSource::stop()
-{
- if (m_deviceState == QAudio::StoppedState)
- return;
-
- close();
-
- setError(QAudio::NoError);
- setState(QAudio::StoppedState);
-}
-
-bool QGStreamerAudioSource::open()
-{
- if (m_opened)
- return true;
-
- const auto *deviceInfo = static_cast<const QGStreamerAudioDeviceInfo *>(m_info.handle());
- if (!deviceInfo->gstDevice) {
- setError(QAudio::OpenError);
- setState(QAudio::StoppedState);
- return false;
- }
-
- gstInput = QGstElement::createFromDevice(deviceInfo->gstDevice);
- if (gstInput.isNull()) {
- setError(QAudio::OpenError);
- setState(QAudio::StoppedState);
- return false;
- }
-
- auto gstCaps = QGstUtils::capsForAudioFormat(m_format);
-
- if (gstCaps.isNull()) {
- setError(QAudio::OpenError);
- setState(QAudio::StoppedState);
- return false;
- }
-
-
-#ifdef DEBUG_AUDIO
- qDebug() << "Opening input" << QTime::currentTime();
- qDebug() << "Caps: " << gst_caps_to_string(gstCaps);
-#endif
-
- gstPipeline = QGstPipeline::create("audioSourcePipeline");
-
- auto *gstBus = gst_pipeline_get_bus(gstPipeline.pipeline());
- gst_bus_add_watch(gstBus, &QGStreamerAudioSource::busMessage, this);
- gst_object_unref (gstBus);
-
- gstAppSink = createAppSink();
- gstAppSink.set("caps", gstCaps);
-
- QGstElement conv = QGstElement::createFromFactory("audioconvert", "conv");
- gstVolume = QGstElement::createFromFactory("volume", "volume");
- Q_ASSERT(gstVolume);
- if (m_volume != 1.)
- gstVolume.set("volume", m_volume);
-
- gstPipeline.add(gstInput, gstVolume, conv, gstAppSink);
- qLinkGstElements(gstInput, gstVolume, conv, gstAppSink);
-
- gstPipeline.setState(GST_STATE_PLAYING);
-
- m_opened = true;
-
- m_timeStamp.restart();
- m_elapsedTimeOffset = 0;
- m_bytesWritten = 0;
-
- return true;
-}
-
-void QGStreamerAudioSource::close()
-{
- if (!m_opened)
- return;
-
- gstPipeline.setState(GST_STATE_NULL);
- gstPipeline = {};
- gstVolume = {};
- gstAppSink = {};
- gstInput = {};
-
- if (!m_pullMode && m_audioSink) {
- delete m_audioSink;
- }
- m_audioSink = nullptr;
- m_opened = false;
-}
-
-gboolean QGStreamerAudioSource::busMessage(GstBus *, GstMessage *msg, gpointer user_data)
-{
- QGStreamerAudioSource *input = static_cast<QGStreamerAudioSource *>(user_data);
- switch (GST_MESSAGE_TYPE (msg)) {
- case GST_MESSAGE_EOS:
- input->stop();
- break;
- case GST_MESSAGE_ERROR: {
- input->setError(QAudio::IOError);
- QUniqueGErrorHandle error;
- QGString debug;
-
- gst_message_parse_error (msg, &error, &debug);
- qDebug() << "Error:" << error.get();
-
- break;
- }
- default:
- break;
- }
- return false;
-}
-
-qsizetype QGStreamerAudioSource::bytesReady() const
-{
- return m_buffer.size();
-}
-
-void QGStreamerAudioSource::resume()
-{
- if (m_deviceState == QAudio::SuspendedState || m_deviceState == QAudio::IdleState) {
- gstPipeline.setState(GST_STATE_PLAYING);
- setState(QAudio::ActiveState);
- setError(QAudio::NoError);
- }
-}
-
-void QGStreamerAudioSource::setVolume(qreal vol)
-{
- if (m_volume == vol)
- return;
-
- m_volume = vol;
- if (!gstVolume.isNull())
- gstVolume.set("volume", vol);
-}
-
-qreal QGStreamerAudioSource::volume() const
-{
- return m_volume;
-}
-
-void QGStreamerAudioSource::setBufferSize(qsizetype value)
-{
- m_bufferSize = value;
-}
-
-qsizetype QGStreamerAudioSource::bufferSize() const
-{
- return m_bufferSize;
-}
-
-qint64 QGStreamerAudioSource::processedUSecs() const
-{
- return m_format.durationForBytes(m_bytesWritten);
-}
-
-void QGStreamerAudioSource::suspend()
-{
- if (m_deviceState == QAudio::ActiveState) {
- setError(QAudio::NoError);
- setState(QAudio::SuspendedState);
-
- gstPipeline.setState(GST_STATE_PAUSED);
- }
-}
-
-void QGStreamerAudioSource::reset()
-{
- stop();
- m_buffer.clear();
-}
-
-//#define MAX_BUFFERS_IN_QUEUE 4
-
-QGstAppSink QGStreamerAudioSource::createAppSink()
-{
- QGstAppSink sink = QGstAppSink::create("appsink");
-
- GstAppSinkCallbacks callbacks{};
- callbacks.eos = eos;
- callbacks.new_sample = new_sample;
- sink.setCallbacks(callbacks, this, nullptr);
- // gst_app_sink_set_max_buffers(sink.appSink(), MAX_BUFFERS_IN_QUEUE);
- gst_base_sink_set_sync(sink.baseSink(), FALSE);
-
- return sink;
-}
-
-void QGStreamerAudioSource::newDataAvailable(QGstSampleHandle sample)
-{
- if (m_audioSink) {
- GstBuffer *buffer = gst_sample_get_buffer(sample.get());
- GstMapInfo mapInfo;
- gst_buffer_map(buffer, &mapInfo, GST_MAP_READ);
- const char *bufferData = (const char*)mapInfo.data;
- gsize bufferSize = mapInfo.size;
-
- if (!m_pullMode) {
- // need to store that data in the QBuffer
- m_buffer.append(bufferData, bufferSize);
- m_audioSink->readyRead();
- } else {
- m_bytesWritten += bufferSize;
- m_audioSink->write(bufferData, bufferSize);
- }
-
- gst_buffer_unmap(buffer, &mapInfo);
- }
-}
-
-GstFlowReturn QGStreamerAudioSource::new_sample(GstAppSink *sink, gpointer user_data)
-{
- // "Note that the preroll buffer will also be returned as the first buffer when calling gst_app_sink_pull_buffer()."
- QGStreamerAudioSource *control = static_cast<QGStreamerAudioSource*>(user_data);
-
- QGstSampleHandle sample{
- gst_app_sink_pull_sample(sink),
- QGstSampleHandle::HasRef,
- };
-
- QMetaObject::invokeMethod(control, [control, sample = std::move(sample)]() mutable {
- control->newDataAvailable(std::move(sample));
- });
-
- return GST_FLOW_OK;
-}
-
-void QGStreamerAudioSource::eos(GstAppSink *, gpointer user_data)
-{
- QGStreamerAudioSource *control = static_cast<QGStreamerAudioSource*>(user_data);
- control->setState(QAudio::StoppedState);
-}
-
-GStreamerInputPrivate::GStreamerInputPrivate(QGStreamerAudioSource *audio)
-{
- m_audioDevice = audio;
-}
-
-qint64 GStreamerInputPrivate::readData(char *data, qint64 len)
-{
- if (m_audioDevice->state() == QAudio::IdleState)
- m_audioDevice->setState(QAudio::ActiveState);
- qint64 bytes = m_audioDevice->m_buffer.read(data, len);
- m_audioDevice->m_bytesWritten += bytes;
- return bytes;
-}
-
-qint64 GStreamerInputPrivate::writeData(const char *data, qint64 len)
-{
- Q_UNUSED(data);
- Q_UNUSED(len);
- return 0;
-}
-
-qint64 GStreamerInputPrivate::bytesAvailable() const
-{
- return m_audioDevice->m_buffer.size();
-}
-
-
-QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosource_p.h b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosource_p.h
deleted file mode 100644
index 9021f1ddd..000000000
--- a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosource_p.h
+++ /dev/null
@@ -1,120 +0,0 @@
-// Copyright (C) 2016 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-//
-// W A R N I N G
-// -------------
-//
-// This file is not part of the Qt API. It exists for the convenience
-// of other Qt classes. This header file may change from version to
-// version without notice, or even be removed.
-//
-// We mean it.
-//
-
-#ifndef QAUDIOINPUTGSTREAMER_H
-#define QAUDIOINPUTGSTREAMER_H
-
-#include <QtCore/qfile.h>
-#include <QtCore/qtimer.h>
-#include <QtCore/qstring.h>
-#include <QtCore/qstringlist.h>
-#include <QtCore/qelapsedtimer.h>
-#include <QtCore/qiodevice.h>
-#include <QtCore/qmutex.h>
-#include <QtCore/qatomic.h>
-#include <QtCore/private/qringbuffer_p.h>
-
-#include "qaudio.h"
-#include "qaudiodevice.h"
-#include <private/qaudiosystem_p.h>
-
-#include <common/qgstutils_p.h>
-#include <common/qgstpipeline_p.h>
-
-#include <gst/app/gstappsink.h>
-
-QT_BEGIN_NAMESPACE
-
-class GStreamerInputPrivate;
-
-class QGStreamerAudioSource
- : public QPlatformAudioSource
-{
- friend class GStreamerInputPrivate;
-public:
- QGStreamerAudioSource(const QAudioDevice &device, QObject *parent);
- ~QGStreamerAudioSource();
-
- void start(QIODevice *device) override;
- QIODevice *start() override;
- void stop() override;
- void reset() override;
- void suspend() override;
- void resume() override;
- qsizetype bytesReady() const override;
- void setBufferSize(qsizetype value) override;
- qsizetype bufferSize() const override;
- qint64 processedUSecs() const override;
- QAudio::Error error() const override;
- QAudio::State state() const override;
- void setFormat(const QAudioFormat &format) override;
- QAudioFormat format() const override;
-
- void setVolume(qreal volume) override;
- qreal volume() const override;
-
-private:
- void setState(QAudio::State state);
- void setError(QAudio::Error error);
-
- QGstAppSink createAppSink();
- static GstFlowReturn new_sample(GstAppSink *, gpointer user_data);
- static void eos(GstAppSink *, gpointer user_data);
-
- bool open();
- void close();
-
- static gboolean busMessage(GstBus *bus, GstMessage *msg, gpointer user_data);
-
- void newDataAvailable(QGstSampleHandle sample);
-
- QAudioDevice m_info;
- qint64 m_bytesWritten = 0;
- QIODevice *m_audioSink = nullptr;
- QAudioFormat m_format;
- QAudio::Error m_errorState = QAudio::NoError;
- QAudio::State m_deviceState = QAudio::StoppedState;
- qreal m_volume = 1.;
-
- QRingBuffer m_buffer;
- QAtomicInteger<bool> m_pullMode = true;
- bool m_opened = false;
- int m_bufferSize = 0;
- qint64 m_elapsedTimeOffset = 0;
- QElapsedTimer m_timeStamp;
- QByteArray m_device;
- QByteArray m_tempBuffer;
-
- QGstElement gstInput;
- QGstPipeline gstPipeline;
- QGstElement gstVolume;
- QGstAppSink gstAppSink;
-};
-
-class GStreamerInputPrivate : public QIODevice
-{
-public:
- explicit GStreamerInputPrivate(QGStreamerAudioSource *audio);
-
- qint64 readData(char *data, qint64 len) override;
- qint64 writeData(const char *data, qint64 len) override;
- qint64 bytesAvailable() const override;
- bool isSequential() const override { return true; }
-private:
- QGStreamerAudioSource *m_audioDevice;
-};
-
-QT_END_NAMESPACE
-
-#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgst.cpp b/src/plugins/multimedia/gstreamer/common/qgst.cpp
index 83d95a9e2..4050ea06f 100644
--- a/src/plugins/multimedia/gstreamer/common/qgst.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgst.cpp
@@ -127,11 +127,11 @@ std::optional<QGRange<int>> QGValue::toIntRange() const
return QGRange<int>{ gst_value_get_int_range_min(value), gst_value_get_int_range_max(value) };
}
-QGstStructure QGValue::toStructure() const
+QGstStructureView QGValue::toStructure() const
{
if (!value || !GST_VALUE_HOLDS_STRUCTURE(value))
- return QGstStructure();
- return QGstStructure(gst_value_get_structure(value));
+ return QGstStructureView(nullptr);
+ return QGstStructureView(gst_value_get_structure(value));
}
QGstCaps QGValue::toCaps() const
@@ -156,38 +156,52 @@ QGValue QGValue::at(int index) const
return QGValue{ gst_value_list_get_value(value, index) };
}
-// QGstStructure
+// QGstStructureView
-QGstStructure::QGstStructure(const GstStructure *s) : structure(s) { }
+QGstStructureView::QGstStructureView(const GstStructure *s) : structure(s) { }
-void QGstStructure::free()
+QGstStructureView::QGstStructureView(const QUniqueGstStructureHandle &handle)
+ : QGstStructureView{ handle.get() }
{
- if (structure)
- gst_structure_free(const_cast<GstStructure *>(structure));
- structure = nullptr;
}
-bool QGstStructure::isNull() const
+QUniqueGstStructureHandle QGstStructureView::clone() const
+{
+ return QUniqueGstStructureHandle{ gst_structure_copy(structure) };
+}
+
+bool QGstStructureView::isNull() const
{
return !structure;
}
-QByteArrayView QGstStructure::name() const
+QByteArrayView QGstStructureView::name() const
{
return gst_structure_get_name(structure);
}
-QGValue QGstStructure::operator[](const char *name) const
+QGValue QGstStructureView::operator[](const char *fieldname) const
{
- return QGValue{ gst_structure_get_value(structure, name) };
+ return QGValue{ gst_structure_get_value(structure, fieldname) };
}
-QGstStructure QGstStructure::copy() const
+QGstCaps QGstStructureView::caps() const
{
- return gst_structure_copy(structure);
+ return operator[]("caps").toCaps();
}
-QSize QGstStructure::resolution() const
+QGstTagListHandle QGstStructureView::tags() const
+{
+ QGValue tags = operator[]("tags");
+ if (tags.isNull())
+ return {};
+
+ QGstTagListHandle tagList;
+ gst_structure_get(structure, "tags", GST_TYPE_TAG_LIST, &tagList, nullptr);
+ return tagList;
+}
+
+QSize QGstStructureView::resolution() const
{
QSize size;
@@ -201,7 +215,7 @@ QSize QGstStructure::resolution() const
return size;
}
-QVideoFrameFormat::PixelFormat QGstStructure::pixelFormat() const
+QVideoFrameFormat::PixelFormat QGstStructureView::pixelFormat() const
{
QVideoFrameFormat::PixelFormat pixelFormat = QVideoFrameFormat::Format_Invalid;
@@ -224,7 +238,7 @@ QVideoFrameFormat::PixelFormat QGstStructure::pixelFormat() const
return pixelFormat;
}
-QGRange<float> QGstStructure::frameRateRange() const
+QGRange<float> QGstStructureView::frameRateRange() const
{
float minRate = 0.;
float maxRate = 0.;
@@ -276,14 +290,14 @@ QGRange<float> QGstStructure::frameRateRange() const
return { minRate, maxRate };
}
-QGstreamerMessage QGstStructure::getMessage()
+QGstreamerMessage QGstStructureView::getMessage()
{
GstMessage *message = nullptr;
gst_structure_get(structure, "message", GST_TYPE_MESSAGE, &message, nullptr);
return QGstreamerMessage(message, QGstreamerMessage::HasRef);
}
-std::optional<Fraction> QGstStructure::pixelAspectRatio() const
+std::optional<Fraction> QGstStructureView::pixelAspectRatio() const
{
gint numerator;
gint denominator;
@@ -297,7 +311,20 @@ std::optional<Fraction> QGstStructure::pixelAspectRatio() const
return std::nullopt;
}
-QSize QGstStructure::nativeSize() const
+// QTBUG-125249: gstreamer tries "to keep the input height (because of interlacing)". Can we align
+// the behavior between gstreamer and ffmpeg?
+static QSize qCalculateFrameSizeGStreamer(QSize resolution, Fraction par)
+{
+ if (par.numerator == par.denominator || par.numerator < 1 || par.denominator < 1)
+ return resolution;
+
+ return QSize{
+ resolution.width() * par.numerator / par.denominator,
+ resolution.height(),
+ };
+}
+
+QSize QGstStructureView::nativeSize() const
{
QSize size = resolution();
if (!size.isValid()) {
@@ -307,7 +334,7 @@ QSize QGstStructure::nativeSize() const
std::optional<Fraction> par = pixelAspectRatio();
if (par)
- size = qCalculateFrameSize(size, *par);
+ size = qCalculateFrameSizeGStreamer(size, *par);
return size;
}
@@ -329,7 +356,7 @@ std::optional<std::pair<QVideoFrameFormat, GstVideoInfo>> QGstCaps::formatAndVid
qt_videoFormatLookup[index].pixelFormat);
if (vidInfo.fps_d > 0)
- format.setFrameRate(qreal(vidInfo.fps_n) / vidInfo.fps_d);
+ format.setStreamFrameRate(qreal(vidInfo.fps_n) / vidInfo.fps_d);
QVideoFrameFormat::ColorRange range = QVideoFrameFormat::ColorRange_Unknown;
switch (vidInfo.colorimetry.range) {
@@ -482,6 +509,14 @@ QGstCaps QGstCaps::fromCameraFormat(const QCameraFormat &format)
return caps;
}
+QGstCaps QGstCaps::copy() const
+{
+ return QGstCaps{
+ gst_caps_copy(caps()),
+ QGstCaps::HasRef,
+ };
+}
+
QGstCaps::MemoryFormat QGstCaps::memoryFormat() const
{
auto *features = gst_caps_get_features(get(), 0);
@@ -497,9 +532,11 @@ int QGstCaps::size() const
return int(gst_caps_get_size(get()));
}
-QGstStructure QGstCaps::at(int index) const
+QGstStructureView QGstCaps::at(int index) const
{
- return gst_caps_get_structure(get(), index);
+ return QGstStructureView{
+ gst_caps_get_structure(get(), index),
+ };
}
GstCaps *QGstCaps::caps() const
@@ -566,11 +603,11 @@ QGString QGstObject::getString(const char *property) const
return QGString(s);
}
-QGstStructure QGstObject::getStructure(const char *property) const
+QGstStructureView QGstObject::getStructure(const char *property) const
{
GstStructure *s = nullptr;
g_object_get(get(), property, &s, nullptr);
- return QGstStructure(s);
+ return QGstStructureView(s);
}
bool QGstObject::getBool(const char *property) const
@@ -648,14 +685,23 @@ GType QGstObject::type() const
return G_OBJECT_TYPE(get());
}
+QLatin1StringView QGstObject::typeName() const
+{
+ return QLatin1StringView{
+ g_type_name(type()),
+ };
+}
+
GstObject *QGstObject::object() const
{
return get();
}
-const char *QGstObject::name() const
+QLatin1StringView QGstObject::name() const
{
- return get() ? GST_OBJECT_NAME(get()) : "(null)";
+ using namespace Qt::StringLiterals;
+
+ return get() ? QLatin1StringView{ GST_OBJECT_NAME(get()) } : "(null)"_L1;
}
// QGObjectHandlerConnection
@@ -723,6 +769,28 @@ QGstCaps QGstPad::queryCaps() const
return QGstCaps(gst_pad_query_caps(pad(), nullptr), QGstCaps::HasRef);
}
+QGstTagListHandle QGstPad::tags() const
+{
+ QGstTagListHandle tagList;
+ g_object_get(object(), "tags", &tagList, nullptr);
+ return tagList;
+}
+
+std::optional<QPlatformMediaPlayer::TrackType> QGstPad::inferTrackTypeFromName() const
+{
+ using namespace Qt::Literals;
+ QLatin1StringView padName = name();
+
+ if (padName.startsWith("video_"_L1))
+ return QPlatformMediaPlayer::TrackType::VideoStream;
+ if (padName.startsWith("audio_"_L1))
+ return QPlatformMediaPlayer::TrackType::AudioStream;
+ if (padName.startsWith("text_"_L1))
+ return QPlatformMediaPlayer::TrackType::SubtitleStream;
+
+ return std::nullopt;
+}
+
bool QGstPad::isLinked() const
{
return gst_pad_is_linked(pad());
@@ -850,6 +918,38 @@ QGstElement QGstElement::createFromDevice(GstDevice *device, const char *name)
};
}
+QGstElement QGstElement::createFromPipelineDescription(const char *str)
+{
+ QUniqueGErrorHandle error;
+ QGstElement element{
+ gst_parse_launch(str, &error),
+ QGstElement::NeedsRef,
+ };
+
+ if (error) // error does not mean that the element could not be constructed
+ qWarning() << "gst_parse_launch error:" << error;
+
+ return element;
+}
+
+QGstElement QGstElement::createFromPipelineDescription(const QByteArray &str)
+{
+ return createFromPipelineDescription(str.constData());
+}
+
+QGstElementFactoryHandle QGstElement::findFactory(const char *name)
+{
+ return QGstElementFactoryHandle{
+ gst_element_factory_find(name),
+ QGstElementFactoryHandle::HasRef,
+ };
+}
+
+QGstElementFactoryHandle QGstElement::findFactory(const QByteArray &name)
+{
+ return findFactory(name.constData());
+}
+
QGstPad QGstElement::staticPad(const char *name) const
{
return QGstPad(gst_element_get_static_pad(element(), name), HasRef);
@@ -901,14 +1001,23 @@ GstStateChangeReturn QGstElement::setState(GstState state)
bool QGstElement::setStateSync(GstState state, std::chrono::nanoseconds timeout)
{
+ if (state == GST_STATE_NULL) {
+ // QTBUG-125251: when changing pipeline state too quickly between NULL->PAUSED->NULL there
+ // may be a pending task to activate pads while we try to switch to NULL. This can cause an
+ // assertion failure in gstreamer. we therefore finish the state change when called on a bin
+ // or pipeline.
+ if (qIsGstObjectOfType<GstBin>(element()))
+ finishStateChange();
+ }
+
GstStateChangeReturn change = gst_element_set_state(element(), state);
- if (change == GST_STATE_CHANGE_ASYNC) {
+ if (change == GST_STATE_CHANGE_ASYNC)
change = gst_element_get_state(element(), nullptr, &state, timeout.count());
- }
-#ifndef QT_NO_DEBUG
- if (change != GST_STATE_CHANGE_SUCCESS && change != GST_STATE_CHANGE_NO_PREROLL)
+
+ if (change != GST_STATE_CHANGE_SUCCESS && change != GST_STATE_CHANGE_NO_PREROLL) {
qWarning() << "Could not change state of" << name() << "to" << state << change;
-#endif
+ dumpPipelineGraph("setStatSyncFailure");
+ }
return change == GST_STATE_CHANGE_SUCCESS;
}
@@ -924,10 +1033,10 @@ bool QGstElement::finishStateChange(std::chrono::nanoseconds timeout)
GstStateChangeReturn change =
gst_element_get_state(element(), &state, &pending, timeout.count());
-#ifndef QT_NO_DEBUG
- if (change != GST_STATE_CHANGE_SUCCESS && change != GST_STATE_CHANGE_NO_PREROLL)
+ if (change != GST_STATE_CHANGE_SUCCESS && change != GST_STATE_CHANGE_NO_PREROLL) {
qWarning() << "Could not finish change state of" << name() << change << state << pending;
-#endif
+ dumpPipelineGraph("finishStateChangeFailure");
+ }
return change == GST_STATE_CHANGE_SUCCESS;
}
@@ -991,6 +1100,16 @@ QGstPipeline QGstElement::getPipeline() const
}
}
+void QGstElement::dumpPipelineGraph(const char *filename) const
+{
+ static const bool dumpEnabled = qEnvironmentVariableIsSet("GST_DEBUG_DUMP_DOT_DIR");
+ if (dumpEnabled) {
+ QGstPipeline pipeline = getPipeline();
+ if (pipeline)
+ pipeline.dumpGraph(filename);
+ }
+}
+
// QGstBin
QGstBin QGstBin::create(const char *name)
@@ -1008,6 +1127,36 @@ QGstBin QGstBin::createFromFactory(const char *factory, const char *name)
};
}
+QGstBin QGstBin::createFromPipelineDescription(const QByteArray &pipelineDescription,
+ const char *name, bool ghostUnlinkedPads)
+{
+ return createFromPipelineDescription(pipelineDescription.constData(), name, ghostUnlinkedPads);
+}
+
+QGstBin QGstBin::createFromPipelineDescription(const char *pipelineDescription, const char *name,
+ bool ghostUnlinkedPads)
+{
+ QUniqueGErrorHandle error;
+
+ GstElement *element =
+ gst_parse_bin_from_description_full(pipelineDescription, ghostUnlinkedPads,
+ /*context=*/nullptr, GST_PARSE_FLAG_NONE, &error);
+
+ if (!element) {
+ qWarning() << "Failed to make element from pipeline description" << pipelineDescription
+ << error;
+ return QGstBin{};
+ }
+
+ if (name)
+ gst_element_set_name(element, name);
+
+ return QGstBin{
+ element,
+ NeedsRef,
+ };
+}
+
QGstBin::QGstBin(GstBin *bin, RefMode mode)
: QGstElement{
qGstCheckedCast<GstElement>(bin),
@@ -1041,12 +1190,15 @@ void QGstBin::dumpGraph(const char *fileNamePrefix)
if (isNull())
return;
- GST_DEBUG_BIN_TO_DOT_FILE(bin(),
- GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL
- | GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE
- | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS
- | GST_DEBUG_GRAPH_SHOW_STATES),
- fileNamePrefix);
+ GST_DEBUG_BIN_TO_DOT_FILE(bin(), GST_DEBUG_GRAPH_SHOW_VERBOSE, fileNamePrefix);
+}
+
+QGstElement QGstBin::findByName(const char *name)
+{
+ return QGstElement{
+ gst_bin_get_by_name(bin(), name),
+ QGstElement::NeedsRef,
+ };
}
// QGstBaseSink
@@ -1059,6 +1211,11 @@ QGstBaseSink::QGstBaseSink(GstBaseSink *element, RefMode mode)
{
}
+void QGstBaseSink::setSync(bool arg)
+{
+ gst_base_sink_set_sync(baseSink(), arg ? TRUE : FALSE);
+}
+
GstBaseSink *QGstBaseSink::baseSink() const
{
return qGstCheckedCast<GstBaseSink>(element());
@@ -1105,6 +1262,18 @@ GstAppSink *QGstAppSink::appSink() const
return qGstCheckedCast<GstAppSink>(element());
}
+# if GST_CHECK_VERSION(1, 24, 0)
+void QGstAppSink::setMaxBufferTime(std::chrono::nanoseconds ns)
+{
+ gst_app_sink_set_max_time(appSink(), qGstClockTimeFromChrono(ns));
+}
+# endif
+
+void QGstAppSink::setMaxBuffers(int n)
+{
+ gst_app_sink_set_max_buffers(appSink(), n);
+}
+
void QGstAppSink::setCaps(const QGstCaps &caps)
{
gst_app_sink_set_caps(appSink(), caps.caps());
@@ -1161,4 +1330,10 @@ GstFlowReturn QGstAppSrc::pushBuffer(GstBuffer *buffer)
#endif
+QString qGstErrorMessageCannotFindElement(std::string_view element)
+{
+ return QStringLiteral("Could not find the %1 GStreamer element")
+ .arg(QLatin1StringView(element));
+}
+
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgst_debug.cpp b/src/plugins/multimedia/gstreamer/common/qgst_debug.cpp
index ee28a5c45..413b02f44 100644
--- a/src/plugins/multimedia/gstreamer/common/qgst_debug.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgst_debug.cpp
@@ -4,6 +4,8 @@
#include "qgst_debug_p.h"
#include "qgstreamermessage_p.h"
+#include <gst/gstclock.h>
+
QT_BEGIN_NAMESPACE
// NOLINTBEGIN(performance-unnecessary-value-param)
@@ -18,7 +20,7 @@ QDebug operator<<(QDebug dbg, const QGstCaps &caps)
return dbg << caps.caps();
}
-QDebug operator<<(QDebug dbg, const QGstStructure &structure)
+QDebug operator<<(QDebug dbg, const QGstStructureView &structure)
{
return dbg << structure.structure;
}
@@ -43,6 +45,21 @@ QDebug operator<<(QDebug dbg, const QUniqueGStringHandle &handle)
return dbg << handle.get();
}
+QDebug operator<<(QDebug dbg, const QGstStreamCollectionHandle &handle)
+{
+ return dbg << handle.get();
+}
+
+QDebug operator<<(QDebug dbg, const QGstStreamHandle &handle)
+{
+ return dbg << handle.get();
+}
+
+QDebug operator<<(QDebug dbg, const QGstTagListHandle &handle)
+{
+ return dbg << handle.get();
+}
+
QDebug operator<<(QDebug dbg, const QGstElement &element)
{
return dbg << element.element();
@@ -155,27 +172,126 @@ QDebug operator<<(QDebug dbg, const GstDevice *device)
return dbg;
}
+namespace {
+
+struct Timepoint
+{
+ explicit Timepoint(guint64 us) : ts{ us } { }
+ guint64 ts;
+};
+
+QDebug operator<<(QDebug dbg, Timepoint ts)
+{
+ char buffer[128];
+ snprintf(buffer, sizeof(buffer), "%" GST_TIME_FORMAT, GST_TIME_ARGS(ts.ts));
+ dbg << buffer;
+ return dbg;
+}
+
+} // namespace
+
QDebug operator<<(QDebug dbg, const GstMessage *msg)
{
QDebugStateSaver saver(dbg);
dbg.nospace();
+ dbg << GST_MESSAGE_TYPE_NAME(msg) << ", Source: " << GST_MESSAGE_SRC_NAME(msg);
+ if (GST_MESSAGE_TIMESTAMP(msg) != 0xFFFFFFFFFFFFFFFF)
+ dbg << ", Timestamp: " << GST_MESSAGE_TIMESTAMP(msg);
+
switch (msg->type) {
case GST_MESSAGE_ERROR: {
QUniqueGErrorHandle err;
QGString debug;
gst_message_parse_error(const_cast<GstMessage *>(msg), &err, &debug);
- dbg << GST_MESSAGE_TYPE_NAME(msg) << ", Source: " << GST_MESSAGE_SRC_NAME(msg)
- << ", Timestamp: " << GST_MESSAGE_TIMESTAMP(msg) << ", Error: " << err << " (" << debug
- << ")";
+ dbg << ", Error: " << err << " (" << debug << ")";
break;
}
- default: {
- dbg << GST_MESSAGE_TYPE_NAME(msg) << ", Source: " << GST_MESSAGE_SRC_NAME(msg)
- << ", Timestamp: " << GST_MESSAGE_TIMESTAMP(msg);
+ case GST_MESSAGE_WARNING: {
+ QUniqueGErrorHandle err;
+ QGString debug;
+ gst_message_parse_warning(const_cast<GstMessage *>(msg), &err, &debug);
+
+ dbg << ", Warning: " << err << " (" << debug << ")";
+ break;
+ }
+
+ case GST_MESSAGE_INFO: {
+ QUniqueGErrorHandle err;
+ QGString debug;
+ gst_message_parse_info(const_cast<GstMessage *>(msg), &err, &debug);
+
+ dbg << ", Info: " << err << " (" << debug << ")";
+ break;
+ }
+
+ case GST_MESSAGE_TAG: {
+ QGstTagListHandle tagList;
+ gst_message_parse_tag(const_cast<GstMessage *>(msg), &tagList);
+
+ dbg << ", Tags: " << tagList;
+ break;
+ }
+
+ case GST_MESSAGE_QOS: {
+ gboolean live;
+ guint64 running_time;
+ guint64 stream_time;
+ guint64 timestamp;
+ guint64 duration;
+
+ gst_message_parse_qos(const_cast<GstMessage *>(msg), &live, &running_time, &stream_time,
+ &timestamp, &duration);
+
+ dbg << ", Live: " << bool(live) << ", Running time: " << Timepoint{ running_time }
+ << ", Stream time: " << Timepoint{ stream_time }
+ << ", Timestamp: " << Timepoint{ timestamp } << ", Duration: " << Timepoint{ duration };
+ break;
+ }
+
+ case GST_MESSAGE_STATE_CHANGED: {
+ GstState oldState;
+ GstState newState;
+ GstState pending;
+
+ gst_message_parse_state_changed(const_cast<GstMessage *>(msg), &oldState, &newState,
+ &pending);
+
+ dbg << ", Transition: " << oldState << "->" << newState;
+
+ if (pending != GST_STATE_VOID_PENDING)
+ dbg << ", Pending State: " << pending;
+ break;
}
+
+ case GST_MESSAGE_STREAM_COLLECTION: {
+ QGstStreamCollectionHandle collection;
+ gst_message_parse_stream_collection(const_cast<GstMessage *>(msg), &collection);
+
+ dbg << ", " << collection;
+ break;
+ }
+
+ case GST_MESSAGE_STREAMS_SELECTED: {
+ QGstStreamCollectionHandle collection;
+ gst_message_parse_streams_selected(const_cast<GstMessage *>(msg), &collection);
+
+ dbg << ", " << collection;
+ break;
+ }
+
+ case GST_MESSAGE_STREAM_STATUS: {
+ GstStreamStatusType streamStatus;
+ gst_message_parse_stream_status(const_cast<GstMessage *>(msg), &streamStatus, nullptr);
+
+ dbg << ", Stream Status: " << streamStatus;
+ break;
+ }
+
+ default:
+ break;
}
return dbg;
}
@@ -208,6 +324,50 @@ QDebug operator<<(QDebug dbg, const GstPadTemplate *padTemplate)
return dbg;
}
+QDebug operator<<(QDebug dbg, const GstStreamCollection *streamCollection)
+{
+ GstStreamCollection *collection = const_cast<GstStreamCollection *>(streamCollection);
+ guint size = gst_stream_collection_get_size(collection);
+
+ dbg << "Stream Collection: {";
+ for (guint index = 0; index != size; ++index) {
+ dbg << gst_stream_collection_get_stream(collection, index);
+ if (index + 1 != size)
+ dbg << ", ";
+ }
+
+ dbg << "}";
+ return dbg;
+}
+
+QDebug operator<<(QDebug dbg, const GstStream *cstream)
+{
+ GstStream *stream = const_cast<GstStream *>(cstream);
+
+ dbg << "GstStream { ";
+ dbg << "Type: " << gst_stream_type_get_name(gst_stream_get_stream_type(stream));
+
+ QGstTagListHandle tagList{
+ gst_stream_get_tags(stream),
+ QGstTagListHandle::HasRef,
+ };
+
+ if (tagList)
+ dbg << ", Tags: " << tagList;
+
+ QGstCaps caps{
+ gst_stream_get_caps(stream),
+ QGstCaps::HasRef,
+ };
+
+ if (caps)
+ dbg << ", Caps: " << caps;
+
+ dbg << "}";
+
+ return dbg;
+}
+
QDebug operator<<(QDebug dbg, GstState state)
{
return dbg << gst_element_state_get_name(state);
@@ -228,19 +388,40 @@ QDebug operator<<(QDebug dbg, GstMessageType type)
return dbg << gst_message_type_get_name(type);
}
+#define ADD_ENUM_SWITCH(value) \
+ case value: \
+ return dbg << #value; \
+ static_assert(true, "enforce semicolon")
+
QDebug operator<<(QDebug dbg, GstPadDirection direction)
{
switch (direction) {
- case GST_PAD_UNKNOWN:
- return dbg << "GST_PAD_UNKNOWN";
- case GST_PAD_SRC:
- return dbg << "GST_PAD_SRC";
- case GST_PAD_SINK:
- return dbg << "GST_PAD_SINK";
+ ADD_ENUM_SWITCH(GST_PAD_UNKNOWN);
+ ADD_ENUM_SWITCH(GST_PAD_SRC);
+ ADD_ENUM_SWITCH(GST_PAD_SINK);
+ default:
+ Q_UNREACHABLE_RETURN(dbg);
+ }
+}
+
+QDebug operator<<(QDebug dbg, GstStreamStatusType type)
+{
+ switch (type) {
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_CREATE);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_ENTER);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_LEAVE);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_DESTROY);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_START);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_PAUSE);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_STOP);
+ default:
+ Q_UNREACHABLE_RETURN(dbg);
}
return dbg;
}
+#undef ADD_ENUM_SWITCH
+
QDebug operator<<(QDebug dbg, const GValue *value)
{
switch (G_VALUE_TYPE(value)) {
@@ -316,4 +497,69 @@ QDebug operator<<(QDebug dbg, const GError *error)
return dbg << error->message;
}
+QCompactGstMessageAdaptor::QCompactGstMessageAdaptor(const QGstreamerMessage &m)
+ : QCompactGstMessageAdaptor{
+ m.message(),
+ }
+{
+}
+
+QCompactGstMessageAdaptor::QCompactGstMessageAdaptor(GstMessage *m)
+ : msg{
+ m,
+ }
+{
+}
+
+QDebug operator<<(QDebug dbg, const QCompactGstMessageAdaptor &m)
+{
+ std::optional<QDebugStateSaver> saver(dbg);
+ dbg.nospace();
+
+ switch (GST_MESSAGE_TYPE(m.msg)) {
+ case GST_MESSAGE_ERROR: {
+ QUniqueGErrorHandle err;
+ QGString debug;
+ gst_message_parse_error(m.msg, &err, &debug);
+ dbg << err << " (" << debug << ")";
+ return dbg;
+ }
+
+ case GST_MESSAGE_WARNING: {
+ QUniqueGErrorHandle err;
+ QGString debug;
+ gst_message_parse_warning(m.msg, &err, &debug);
+ dbg << err << " (" << debug << ")";
+ return dbg;
+ }
+
+ case GST_MESSAGE_INFO: {
+ QUniqueGErrorHandle err;
+ QGString debug;
+ gst_message_parse_info(m.msg, &err, &debug);
+
+ dbg << err << " (" << debug << ")";
+ return dbg;
+ }
+
+ case GST_MESSAGE_STATE_CHANGED: {
+ GstState oldState;
+ GstState newState;
+ GstState pending;
+
+ gst_message_parse_state_changed(m.msg, &oldState, &newState, &pending);
+
+ dbg << oldState << " -> " << newState;
+ if (pending != GST_STATE_VOID_PENDING)
+ dbg << " (pending: " << pending << ")";
+ return dbg;
+ }
+
+ default: {
+ saver.reset();
+ return dbg << m.msg;
+ }
+ }
+}
+
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgst_debug_p.h b/src/plugins/multimedia/gstreamer/common/qgst_debug_p.h
index d64c240c6..df13c6c13 100644
--- a/src/plugins/multimedia/gstreamer/common/qgst_debug_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgst_debug_p.h
@@ -23,7 +23,7 @@ QT_BEGIN_NAMESPACE
class QGstreamerMessage;
QDebug operator<<(QDebug, const QGstCaps &);
-QDebug operator<<(QDebug, const QGstStructure &);
+QDebug operator<<(QDebug, const QGstStructureView &);
QDebug operator<<(QDebug, const QGstElement &);
QDebug operator<<(QDebug, const QGstPad &);
QDebug operator<<(QDebug, const QGString &);
@@ -31,6 +31,9 @@ QDebug operator<<(QDebug, const QGValue &);
QDebug operator<<(QDebug, const QGstreamerMessage &);
QDebug operator<<(QDebug, const QUniqueGErrorHandle &);
QDebug operator<<(QDebug, const QUniqueGStringHandle &);
+QDebug operator<<(QDebug, const QGstStreamCollectionHandle &);
+QDebug operator<<(QDebug, const QGstStreamHandle &);
+QDebug operator<<(QDebug, const QGstTagListHandle &);
QDebug operator<<(QDebug, const GstCaps *);
QDebug operator<<(QDebug, const GstVideoInfo *);
@@ -44,16 +47,28 @@ QDebug operator<<(QDebug, const GstTagList *);
QDebug operator<<(QDebug, const GstQuery *);
QDebug operator<<(QDebug, const GstEvent *);
QDebug operator<<(QDebug, const GstPadTemplate *);
+QDebug operator<<(QDebug, const GstStreamCollection *);
+QDebug operator<<(QDebug, const GstStream *);
QDebug operator<<(QDebug, GstState);
QDebug operator<<(QDebug, GstStateChange);
QDebug operator<<(QDebug, GstStateChangeReturn);
QDebug operator<<(QDebug, GstMessageType);
QDebug operator<<(QDebug, GstPadDirection);
+QDebug operator<<(QDebug, GstStreamStatusType);
QDebug operator<<(QDebug, const GValue *);
QDebug operator<<(QDebug, const GError *);
+struct QCompactGstMessageAdaptor
+{
+ explicit QCompactGstMessageAdaptor(const QGstreamerMessage &m);
+ explicit QCompactGstMessageAdaptor(GstMessage *m);
+ GstMessage *msg;
+};
+
+QDebug operator<<(QDebug, const QCompactGstMessageAdaptor &);
+
QT_END_NAMESPACE
#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgst_handle_types_p.h b/src/plugins/multimedia/gstreamer/common/qgst_handle_types_p.h
index b72e92db1..9c4f5683a 100644
--- a/src/plugins/multimedia/gstreamer/common/qgst_handle_types_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgst_handle_types_p.h
@@ -46,7 +46,7 @@ struct QSharedHandle : private QUniqueHandle<HandleTraits>
}
QSharedHandle(const QSharedHandle &o)
- : QSharedHandle{
+ : BaseClass{
HandleTraits::ref(o.get()),
}
{
@@ -163,6 +163,18 @@ struct QUniqueGErrorHandleTraits
}
};
+
+struct QUniqueGstDateTimeHandleTraits
+{
+ using Type = GstDateTime *;
+ static constexpr Type invalidValue() noexcept { return nullptr; }
+ static bool close(Type handle) noexcept
+ {
+ gst_date_time_unref(handle);
+ return true;
+ }
+};
+
struct QFileDescriptorHandleTraits
{
using Type = int;
@@ -226,10 +238,12 @@ struct QGstMiniObjectHandleHelper
using QGstClockHandle = QGstImpl::QGstHandleHelper<GstClock>::UniqueHandle;
using QGstElementHandle = QGstImpl::QGstHandleHelper<GstElement>::UniqueHandle;
-using QGstElementFactoryHandle = QGstImpl::QGstHandleHelper<GstElementFactory>::UniqueHandle;
+using QGstElementFactoryHandle = QGstImpl::QGstHandleHelper<GstElementFactory>::SharedHandle;
using QGstDeviceHandle = QGstImpl::QGstHandleHelper<GstDevice>::SharedHandle;
using QGstDeviceMonitorHandle = QGstImpl::QGstHandleHelper<GstDeviceMonitor>::UniqueHandle;
using QGstBusHandle = QGstImpl::QGstHandleHelper<GstBus>::UniqueHandle;
+using QGstStreamCollectionHandle = QGstImpl::QGstHandleHelper<GstStreamCollection>::SharedHandle;
+using QGstStreamHandle = QGstImpl::QGstHandleHelper<GstStream>::SharedHandle;
using QGstTagListHandle = QGstImpl::QSharedHandle<QGstImpl::QGstTagListHandleTraits>;
using QGstSampleHandle = QGstImpl::QSharedHandle<QGstImpl::QGstSampleHandleTraits>;
@@ -237,10 +251,13 @@ using QGstSampleHandle = QGstImpl::QSharedHandle<QGstImpl::QGstSampleHandleTrait
using QUniqueGstStructureHandle = QUniqueHandle<QGstImpl::QUniqueGstStructureHandleTraits>;
using QUniqueGStringHandle = QUniqueHandle<QGstImpl::QUniqueGStringHandleTraits>;
using QUniqueGErrorHandle = QUniqueHandle<QGstImpl::QUniqueGErrorHandleTraits>;
+using QUniqueGstDateTimeHandle = QUniqueHandle<QGstImpl::QUniqueGstDateTimeHandleTraits>;
using QFileDescriptorHandle = QUniqueHandle<QGstImpl::QFileDescriptorHandleTraits>;
+using QGstBufferHandle = QGstImpl::QGstMiniObjectHandleHelper<GstBuffer>::SharedHandle;
using QGstContextHandle = QGstImpl::QGstMiniObjectHandleHelper<GstContext>::UniqueHandle;
using QGstGstDateTimeHandle = QGstImpl::QGstMiniObjectHandleHelper<GstDateTime>::SharedHandle;
using QGstPluginFeatureHandle = QGstImpl::QGstHandleHelper<GstPluginFeature>::SharedHandle;
+using QGstQueryHandle = QGstImpl::QGstMiniObjectHandleHelper<GstQuery>::SharedHandle;
#if QT_CONFIG(gstreamer_gl)
using QGstGLContextHandle = QGstImpl::QGstHandleHelper<GstGLContext>::UniqueHandle;
diff --git a/src/plugins/multimedia/gstreamer/common/qgst_p.h b/src/plugins/multimedia/gstreamer/common/qgst_p.h
index ab264f552..68412258e 100644
--- a/src/plugins/multimedia/gstreamer/common/qgst_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgst_p.h
@@ -15,21 +15,21 @@
// We mean it.
//
-#include <common/qgst_handle_types_p.h>
-
-#include <private/qtmultimediaglobal_p.h>
-#include <private/qmultimediautils_p.h>
-
#include <QtCore/qdebug.h>
#include <QtCore/qlist.h>
#include <QtCore/qsemaphore.h>
#include <QtMultimedia/qaudioformat.h>
#include <QtMultimedia/qvideoframe.h>
+#include <QtMultimedia/private/qtmultimediaglobal_p.h>
+#include <QtMultimedia/private/qmultimediautils_p.h>
+#include <QtMultimedia/private/qplatformmediaplayer_p.h>
#include <gst/gst.h>
#include <gst/video/video-info.h>
+#include "qgst_handle_types_p.h"
+
#include <type_traits>
#if QT_CONFIG(gstreamer_photography)
@@ -80,6 +80,29 @@ struct GstObjectTraits
}; \
static_assert(true, "ensure semicolon")
+#define QGST_DEFINE_CAST_TRAITS_FOR_INTERFACE(ClassName, MACRO_LABEL) \
+ template <> \
+ struct GstObjectTraits<ClassName> \
+ { \
+ using Type = ClassName; \
+ template <typename U> \
+ static bool isObjectOfType(U *arg) \
+ { \
+ return GST_IS_##MACRO_LABEL(arg); \
+ } \
+ template <typename U> \
+ static Type *cast(U *arg) \
+ { \
+ return checked_cast(arg); \
+ } \
+ template <typename U> \
+ static Type *checked_cast(U *arg) \
+ { \
+ return GST_##MACRO_LABEL(arg); \
+ } \
+ }; \
+ static_assert(true, "ensure semicolon")
+
QGST_DEFINE_CAST_TRAITS(GstBin, BIN);
QGST_DEFINE_CAST_TRAITS(GstClock, CLOCK);
QGST_DEFINE_CAST_TRAITS(GstElement, ELEMENT);
@@ -89,6 +112,8 @@ QGST_DEFINE_CAST_TRAITS(GstPipeline, PIPELINE);
QGST_DEFINE_CAST_TRAITS(GstBaseSink, BASE_SINK);
QGST_DEFINE_CAST_TRAITS(GstBaseSrc, BASE_SRC);
+QGST_DEFINE_CAST_TRAITS_FOR_INTERFACE(GstTagSetter, TAG_SETTER);
+
#if QT_CONFIG(gstreamer_app)
QGST_DEFINE_CAST_TRAITS(GstAppSink, APP_SINK);
QGST_DEFINE_CAST_TRAITS(GstAppSrc, APP_SRC);
@@ -116,10 +141,18 @@ struct GstObjectTraits<GObject>
};
#undef QGST_DEFINE_CAST_TRAITS
+#undef QGST_DEFINE_CAST_TRAITS_FOR_INTERFACE
} // namespace QGstImpl
template <typename DestinationType, typename SourceType>
+bool qIsGstObjectOfType(SourceType *arg)
+{
+ using Traits = QGstImpl::GstObjectTraits<DestinationType>;
+ return arg && Traits::isObjectOfType(arg);
+}
+
+template <typename DestinationType, typename SourceType>
DestinationType *qGstSafeCast(SourceType *arg)
{
using Traits = QGstImpl::GstObjectTraits<DestinationType>;
@@ -138,7 +171,7 @@ DestinationType *qGstCheckedCast(SourceType *arg)
}
class QSize;
-class QGstStructure;
+class QGstStructureView;
class QGstCaps;
class QGstPipelinePrivate;
class QCameraFormat;
@@ -179,7 +212,7 @@ public:
std::optional<QGRange<float>> getFractionRange() const;
std::optional<QGRange<int>> toIntRange() const;
- QGstStructure toStructure() const;
+ QGstStructureView toStructure() const;
QGstCaps toCaps() const;
bool isList() const;
@@ -277,18 +310,21 @@ protected:
class QGstreamerMessage;
-class QGstStructure
+class QGstStructureView
{
public:
const GstStructure *structure = nullptr;
- QGstStructure() = default;
- QGstStructure(const GstStructure *s);
- void free();
+ explicit QGstStructureView(const GstStructure *);
+ explicit QGstStructureView(const QUniqueGstStructureHandle &);
- bool isNull() const;
+ QUniqueGstStructureHandle clone() const;
+ bool isNull() const;
QByteArrayView name() const;
- QGValue operator[](const char *name) const;
+ QGValue operator[](const char *fieldname) const;
+
+ QGstCaps caps() const;
+ QGstTagListHandle tags() const;
QSize resolution() const;
QVideoFrameFormat::PixelFormat pixelFormat() const;
@@ -296,8 +332,6 @@ public:
QGstreamerMessage getMessage();
std::optional<Fraction> pixelAspectRatio() const;
QSize nativeSize() const;
-
- QGstStructure copy() const;
};
template <>
@@ -321,7 +355,7 @@ public:
enum MemoryFormat { CpuMemory, GLTexture, DMABuf };
int size() const;
- QGstStructure at(int index) const;
+ QGstStructureView at(int index) const;
GstCaps *caps() const;
MemoryFormat memoryFormat() const;
@@ -333,6 +367,8 @@ public:
static QGstCaps create();
static QGstCaps fromCameraFormat(const QCameraFormat &format);
+
+ QGstCaps copy() const;
};
template <>
@@ -367,7 +403,7 @@ public:
void set(const char *property, const QGstCaps &c);
QGString getString(const char *property) const;
- QGstStructure getStructure(const char *property) const;
+ QGstStructureView getStructure(const char *property) const;
bool getBool(const char *property) const;
uint getUInt(const char *property) const;
int getInt(const char *property) const;
@@ -381,8 +417,9 @@ public:
void disconnect(gulong handlerId);
GType type() const;
+ QLatin1StringView typeName() const;
GstObject *object() const;
- const char *name() const;
+ QLatin1StringView name() const;
};
class QGObjectHandlerConnection
@@ -443,6 +480,11 @@ public:
QGstCaps currentCaps() const;
QGstCaps queryCaps() const;
+ QGstTagListHandle tags() const;
+
+ std::optional<QPlatformMediaPlayer::TrackType>
+ inferTrackTypeFromName() const; // for decodebin3 etc
+
bool isLinked() const;
bool link(const QGstPad &sink) const;
bool unlink(const QGstPad &sink) const;
@@ -531,6 +573,11 @@ public:
const char *name = nullptr);
static QGstElement createFromDevice(const QGstDeviceHandle &, const char *name = nullptr);
static QGstElement createFromDevice(GstDevice *, const char *name = nullptr);
+ static QGstElement createFromPipelineDescription(const char *);
+ static QGstElement createFromPipelineDescription(const QByteArray &);
+
+ static QGstElementFactoryHandle findFactory(const char *);
+ static QGstElementFactoryHandle findFactory(const QByteArray &name);
QGstPad staticPad(const char *name) const;
QGstPad src() const;
@@ -599,6 +646,7 @@ public:
QGstElement getParent() const;
QGstPipeline getPipeline() const;
+ void dumpPipelineGraph(const char *filename) const;
};
template <typename... Ts>
@@ -642,6 +690,12 @@ public:
explicit QGstBin(GstBin *bin, RefMode mode = NeedsRef);
static QGstBin create(const char *name);
static QGstBin createFromFactory(const char *factory, const char *name);
+ static QGstBin createFromPipelineDescription(const QByteArray &pipelineDescription,
+ const char *name = nullptr,
+ bool ghostUnlinkedPads = false);
+ static QGstBin createFromPipelineDescription(const char *pipelineDescription,
+ const char *name = nullptr,
+ bool ghostUnlinkedPads = false);
template <typename... Ts>
std::enable_if_t<(std::is_base_of_v<QGstElement, Ts> && ...), void> add(const Ts &...ts)
@@ -678,6 +732,8 @@ public:
bool syncChildrenState();
void dumpGraph(const char *fileNamePrefix);
+
+ QGstElement findByName(const char *);
};
class QGstBaseSink : public QGstElement
@@ -692,6 +748,8 @@ public:
QGstBaseSink &operator=(const QGstBaseSink &) = default;
QGstBaseSink &operator=(QGstBaseSink &&) noexcept = default;
+ void setSync(bool);
+
GstBaseSink *baseSink() const;
};
@@ -727,6 +785,11 @@ public:
GstAppSink *appSink() const;
+ void setMaxBuffers(int);
+# if GST_CHECK_VERSION(1, 24, 0)
+ void setMaxBufferTime(std::chrono::nanoseconds);
+# endif
+
void setCaps(const QGstCaps &caps);
void setCallbacks(GstAppSinkCallbacks &callbacks, gpointer user_data, GDestroyNotify notify);
@@ -756,10 +819,24 @@ public:
#endif
-inline QString errorMessageCannotFindElement(std::string_view element)
+inline GstClockTime qGstClockTimeFromChrono(std::chrono::nanoseconds ns)
{
- return QStringLiteral("Could not find the %1 GStreamer element")
- .arg(QLatin1StringView(element));
+ return ns.count();
+}
+
+QString qGstErrorMessageCannotFindElement(std::string_view element);
+
+template <typename Arg, typename... Args>
+std::optional<QString> qGstErrorMessageIfElementsNotAvailable(const Arg &arg, Args... args)
+{
+ QGstElementFactoryHandle factory = QGstElement::findFactory(arg);
+ if (!factory)
+ return qGstErrorMessageCannotFindElement(arg);
+
+ if constexpr (sizeof...(args) != 0)
+ return qGstErrorMessageIfElementsNotAvailable(args...);
+ else
+ return std::nullopt;
}
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstappsource.cpp b/src/plugins/multimedia/gstreamer/common/qgstappsource.cpp
index 99af8443c..3c345de82 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstappsource.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstappsource.cpp
@@ -16,7 +16,7 @@ QMaybe<QGstAppSource *> QGstAppSource::create(QObject *parent)
{
QGstAppSrc appsrc = QGstAppSrc::create("appsrc");
if (!appsrc)
- return errorMessageCannotFindElement("appsrc");
+ return qGstErrorMessageCannotFindElement("appsrc");
return new QGstAppSource(appsrc, parent);
}
diff --git a/src/plugins/multimedia/gstreamer/common/qgstpipeline.cpp b/src/plugins/multimedia/gstreamer/common/qgstpipeline.cpp
index 392898245..16b0e28a9 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstpipeline.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstpipeline.cpp
@@ -14,11 +14,16 @@
QT_BEGIN_NAMESPACE
+static constexpr GstSeekFlags rateChangeSeekFlags =
+#if GST_CHECK_VERSION(1, 18, 0)
+ GST_SEEK_FLAG_INSTANT_RATE_CHANGE;
+#else
+ GST_SEEK_FLAG_FLUSH;
+#endif
+
class QGstPipelinePrivate : public QObject
{
- Q_OBJECT
public:
-
int m_ref = 0;
guint m_tag = 0;
GstBus *m_bus = nullptr;
@@ -27,7 +32,7 @@ public:
QList<QGstreamerSyncMessageFilter*> syncFilters;
QList<QGstreamerBusMessageFilter*> busFilters;
bool inStoppedState = true;
- mutable qint64 m_position = 0;
+ mutable std::chrono::nanoseconds m_position{};
double m_rate = 1.;
bool m_flushOnConfigChanges = false;
bool m_pendingFlush = false;
@@ -46,8 +51,21 @@ public:
void installMessageFilter(QGstreamerBusMessageFilter *filter);
void removeMessageFilter(QGstreamerBusMessageFilter *filter);
- static GstBusSyncReply syncGstBusFilter(GstBus* bus, GstMessage* message, QGstPipelinePrivate *d)
+ void processMessage(const QGstreamerMessage &msg)
{
+ for (QGstreamerBusMessageFilter *filter : std::as_const(busFilters)) {
+ if (filter->processBusMessage(msg))
+ break;
+ }
+ }
+
+private:
+ static GstBusSyncReply syncGstBusFilter(GstBus *bus, GstMessage *message,
+ QGstPipelinePrivate *d)
+ {
+ if (!message)
+ return GST_BUS_PASS;
+
Q_UNUSED(bus);
QMutexLocker lock(&d->filterMutex);
@@ -62,31 +80,17 @@ public:
return GST_BUS_PASS;
}
-private Q_SLOTS:
- void interval()
- {
- GstMessage* message;
- while ((message = gst_bus_poll(m_bus, GST_MESSAGE_ANY, 0)) != nullptr) {
- processMessage(message);
- gst_message_unref(message);
- }
- }
- void doProcessMessage(const QGstreamerMessage& msg)
- {
- for (QGstreamerBusMessageFilter *filter : std::as_const(busFilters)) {
- if (filter->processBusMessage(msg))
- break;
- }
- }
-
-private:
void processMessage(GstMessage *message)
{
+ if (!message)
+ return;
+
QGstreamerMessage msg{
message,
QGstreamerMessage::NeedsRef,
};
- doProcessMessage(msg);
+
+ processMessage(msg);
}
static gboolean busCallback(GstBus *, GstMessage *message, gpointer data)
@@ -106,7 +110,13 @@ QGstPipelinePrivate::QGstPipelinePrivate(GstBus* bus, QObject* parent)
if (!hasGlib) {
m_intervalTimer = new QTimer(this);
m_intervalTimer->setInterval(250);
- connect(m_intervalTimer, SIGNAL(timeout()), SLOT(interval()));
+ QObject::connect(m_intervalTimer, &QTimer::timeout, this, [this] {
+ GstMessage *message;
+ while ((message = gst_bus_poll(m_bus, GST_MESSAGE_ANY, 0)) != nullptr) {
+ processMessage(message);
+ gst_message_unref(message);
+ }
+ });
m_intervalTimer->start();
} else {
m_tag = gst_bus_add_watch_full(bus, G_PRIORITY_DEFAULT, busCallback, this, nullptr);
@@ -235,6 +245,16 @@ GstStateChangeReturn QGstPipeline::setState(GstState state)
return retval;
}
+void QGstPipeline::processMessages(GstMessageType types)
+{
+ QGstPipelinePrivate *d = getPrivate();
+ QGstreamerMessage message{
+ gst_bus_pop_filtered(d->m_bus, types),
+ QGstreamerMessage::HasRef,
+ };
+ d->processMessage(message);
+}
+
void QGstPipeline::dumpGraph(const char *fileName)
{
if (isNull())
@@ -268,8 +288,8 @@ void QGstPipeline::beginConfig()
break;
}
case GST_STATE_CHANGE_FAILURE: {
- // should not happen
- qCritical() << "QGstPipeline::beginConfig: state change failure";
+ qDebug() << "QGstPipeline::beginConfig: state change failure";
+ dumpGraph("beginConfigFailure");
break;
}
@@ -301,48 +321,45 @@ void QGstPipeline::endConfig()
void QGstPipeline::flush()
{
- QGstPipelinePrivate *d = getPrivate();
- seek(position(), d->m_rate);
+ seek(position());
}
-bool QGstPipeline::seek(qint64 pos, double rate)
+void QGstPipeline::seek(std::chrono::nanoseconds pos, double rate)
{
+ using namespace std::chrono_literals;
+
QGstPipelinePrivate *d = getPrivate();
- // always adjust the rate, so it can be set before playback starts
+ // always adjust the rate, so it can be set before playback starts
// setting position needs a loaded media file that's seekable
- d->m_rate = rate;
- qint64 from = rate > 0 ? pos : 0;
- qint64 to = rate > 0 ? duration() : pos;
- bool success = gst_element_seek(element(), rate, GST_FORMAT_TIME,
- GstSeekFlags(GST_SEEK_FLAG_FLUSH),
- GST_SEEK_TYPE_SET, from,
- GST_SEEK_TYPE_SET, to);
- if (!success)
- return false;
+
+ bool success = (rate > 0)
+ ? gst_element_seek(element(), d->m_rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
+ GST_SEEK_TYPE_SET, pos.count(), GST_SEEK_TYPE_END, 0)
+ : gst_element_seek(element(), d->m_rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
+ GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, pos.count());
+
+ if (!success) {
+ qDebug() << "seek: gst_element_seek failed" << pos;
+ return;
+ }
d->m_position = pos;
- return true;
}
-bool QGstPipeline::setPlaybackRate(double rate)
+void QGstPipeline::seek(std::chrono::nanoseconds pos)
+{
+ seek(pos, getPrivate()->m_rate);
+}
+
+void QGstPipeline::setPlaybackRate(double rate)
{
QGstPipelinePrivate *d = getPrivate();
if (rate == d->m_rate)
- return false;
-
- constexpr GstSeekFlags seekFlags =
-#if GST_CHECK_VERSION(1, 18, 0)
- GST_SEEK_FLAG_INSTANT_RATE_CHANGE;
-#else
- GST_SEEK_FLAG_FLUSH;
-#endif
+ return;
- bool success = gst_element_seek(element(), rate, GST_FORMAT_TIME, seekFlags, GST_SEEK_TYPE_NONE,
- GST_CLOCK_TIME_NONE, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
- if (success)
- d->m_rate = rate;
+ d->m_rate = rate;
- return success;
+ applyPlaybackRate(/*instantRateChange =*/true);
}
double QGstPipeline::playbackRate() const
@@ -351,27 +368,50 @@ double QGstPipeline::playbackRate() const
return d->m_rate;
}
-bool QGstPipeline::setPosition(qint64 pos)
+void QGstPipeline::applyPlaybackRate(bool instantRateChange)
{
QGstPipelinePrivate *d = getPrivate();
- return seek(pos, d->m_rate);
+
+ bool success = gst_element_seek(element(), d->m_rate, GST_FORMAT_UNDEFINED,
+ instantRateChange ? rateChangeSeekFlags : GST_SEEK_FLAG_FLUSH,
+ GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE, GST_SEEK_TYPE_NONE,
+ GST_CLOCK_TIME_NONE);
+ if (!success)
+ qDebug() << "setPlaybackRate: gst_element_seek failed";
}
-qint64 QGstPipeline::position() const
+void QGstPipeline::setPosition(std::chrono::nanoseconds pos)
+{
+ seek(pos);
+}
+
+std::chrono::nanoseconds QGstPipeline::position() const
{
gint64 pos;
QGstPipelinePrivate *d = getPrivate();
if (gst_element_query_position(element(), GST_FORMAT_TIME, &pos))
- d->m_position = pos;
+ d->m_position = std::chrono::nanoseconds{ pos };
return d->m_position;
}
-qint64 QGstPipeline::duration() const
+std::chrono::milliseconds QGstPipeline::positionInMs() const
+{
+ using namespace std::chrono;
+ return round<milliseconds>(position());
+}
+
+std::chrono::nanoseconds QGstPipeline::duration() const
{
gint64 d;
if (!gst_element_query_duration(element(), GST_FORMAT_TIME, &d))
- return 0.;
- return d;
+ return {};
+ return std::chrono::nanoseconds{ d };
+}
+
+std::chrono::milliseconds QGstPipeline::durationInMs() const
+{
+ using namespace std::chrono;
+ return round<milliseconds>(duration());
}
QGstPipelinePrivate *QGstPipeline::getPrivate() const
@@ -383,6 +423,3 @@ QGstPipelinePrivate *QGstPipeline::getPrivate() const
}
QT_END_NAMESPACE
-
-#include "qgstpipeline.moc"
-
diff --git a/src/plugins/multimedia/gstreamer/common/qgstpipeline_p.h b/src/plugins/multimedia/gstreamer/common/qgstpipeline_p.h
index 23610dd00..41275587e 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstpipeline_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstpipeline_p.h
@@ -15,10 +15,10 @@
// We mean it.
//
-#include <private/qtmultimediaglobal_p.h>
-#include <QObject>
+#include <QtMultimedia/private/qtmultimediaglobal_p.h>
+#include <QtCore/qobject.h>
-#include <common/qgst_p.h>
+#include "qgst_p.h"
QT_BEGIN_NAMESPACE
@@ -73,6 +73,8 @@ public:
GstPipeline *pipeline() const { return GST_PIPELINE_CAST(get()); }
+ void processMessages(GstMessageType = GST_MESSAGE_ANY);
+
void dumpGraph(const char *fileName);
template <typename Functor>
@@ -94,16 +96,21 @@ public:
void flush();
- bool seek(qint64 pos, double rate);
- bool setPlaybackRate(double rate);
+ void setPlaybackRate(double rate);
double playbackRate() const;
+ void applyPlaybackRate(bool instantRateChange);
- bool setPosition(qint64 pos);
- qint64 position() const;
+ void setPosition(std::chrono::nanoseconds pos);
+ std::chrono::nanoseconds position() const;
+ std::chrono::milliseconds positionInMs() const;
- qint64 duration() const;
+ std::chrono::nanoseconds duration() const;
+ std::chrono::milliseconds durationInMs() const;
private:
+ void seek(std::chrono::nanoseconds pos, double rate);
+ void seek(std::chrono::nanoseconds pos);
+
QGstPipelinePrivate *getPrivate() const;
void beginConfig();
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput.cpp b/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput.cpp
index 0381b921e..7c620da39 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput.cpp
@@ -21,24 +21,23 @@ QT_BEGIN_NAMESPACE
QMaybe<QPlatformAudioInput *> QGstreamerAudioInput::create(QAudioInput *parent)
{
- QGstElement autoaudiosrc = QGstElement::createFromFactory("autoaudiosrc", "autoaudiosrc");
- if (!autoaudiosrc)
- return errorMessageCannotFindElement("autoaudiosrc");
+ static const auto error = qGstErrorMessageIfElementsNotAvailable("autoaudiosrc", "volume");
+ if (error)
+ return *error;
- QGstElement volume = QGstElement::createFromFactory("volume", "volume");
- if (!volume)
- return errorMessageCannotFindElement("volume");
-
- return new QGstreamerAudioInput(autoaudiosrc, volume, parent);
+ return new QGstreamerAudioInput(parent);
}
-QGstreamerAudioInput::QGstreamerAudioInput(QGstElement autoaudiosrc, QGstElement volume,
- QAudioInput *parent)
+QGstreamerAudioInput::QGstreamerAudioInput(QAudioInput *parent)
: QObject(parent),
QPlatformAudioInput(parent),
gstAudioInput(QGstBin::create("audioInput")),
- audioSrc(std::move(autoaudiosrc)),
- audioVolume(std::move(volume))
+ audioSrc{
+ QGstElement::createFromFactory("autoaudiosrc", "autoaudiosrc"),
+ },
+ audioVolume{
+ QGstElement::createFromFactory("volume", "volume"),
+ }
{
gstAudioInput.add(audioSrc, audioVolume);
qLinkGstElements(audioSrc, audioVolume);
@@ -46,6 +45,56 @@ QGstreamerAudioInput::QGstreamerAudioInput(QGstElement autoaudiosrc, QGstElement
gstAudioInput.addGhostPad(audioVolume, "src");
}
+QGstElement QGstreamerAudioInput::createGstElement()
+{
+ const auto *customDeviceInfo =
+ dynamic_cast<const QGStreamerCustomAudioDeviceInfo *>(m_audioDevice.handle());
+
+ if (customDeviceInfo) {
+ qCDebug(qLcMediaAudioInput)
+ << "requesting custom audio src element: " << customDeviceInfo->id;
+
+ QGstElement element = QGstBin::createFromPipelineDescription(customDeviceInfo->id,
+ /*name=*/nullptr,
+ /*ghostUnlinkedPads=*/true);
+ if (element)
+ return element;
+
+ qCWarning(qLcMediaAudioInput)
+ << "Cannot create audio source element:" << customDeviceInfo->id;
+ }
+
+ const QByteArray &id = m_audioDevice.id();
+ if constexpr (QT_CONFIG(pulseaudio)) {
+ QGstElement newSrc = QGstElement::createFromFactory("pulsesrc", "audiosrc");
+ if (newSrc) {
+ newSrc.set("device", id.constData());
+ return newSrc;
+ } else {
+ qWarning() << "Cannot create pulsesrc";
+ }
+ } else if constexpr (QT_CONFIG(alsa)) {
+ QGstElement newSrc = QGstElement::createFromFactory("alsasrc", "audiosrc");
+ if (newSrc) {
+ newSrc.set("device", id.constData());
+ return newSrc;
+ } else {
+ qWarning() << "Cannot create alsasrc";
+ }
+ } else {
+ auto *deviceInfo = dynamic_cast<const QGStreamerAudioDeviceInfo *>(m_audioDevice.handle());
+ if (deviceInfo && deviceInfo->gstDevice) {
+ QGstElement element = QGstElement::createFromDevice(deviceInfo->gstDevice, "audiosrc");
+ if (element)
+ return element;
+ }
+ }
+ qCWarning(qLcMediaAudioInput) << "Invalid audio device";
+ qCWarning(qLcMediaAudioInput)
+ << "Failed to create a gst element for the audio device, using a default audio source";
+ return QGstElement::createFromFactory("autoaudiosrc", "audiosrc");
+}
+
QGstreamerAudioInput::~QGstreamerAudioInput()
{
gstAudioInput.setStateSync(GST_STATE_NULL);
@@ -68,26 +117,7 @@ void QGstreamerAudioInput::setAudioDevice(const QAudioDevice &device)
qCDebug(qLcMediaAudioInput) << "setAudioInput" << device.description() << device.isNull();
m_audioDevice = device;
- QGstElement newSrc;
- if constexpr (QT_CONFIG(pulseaudio)) {
- auto id = m_audioDevice.id();
- newSrc = QGstElement::createFromFactory("pulsesrc", "audiosrc");
- if (!newSrc.isNull())
- newSrc.set("device", id.constData());
- else
- qCWarning(qLcMediaAudioInput) << "Invalid audio device";
- } else {
- auto *deviceInfo = static_cast<const QGStreamerAudioDeviceInfo *>(m_audioDevice.handle());
- if (deviceInfo && deviceInfo->gstDevice)
- newSrc = QGstElement::createFromDevice(deviceInfo->gstDevice, "audiosrc");
- else
- qCWarning(qLcMediaAudioInput) << "Invalid audio device";
- }
-
- if (newSrc.isNull()) {
- qCWarning(qLcMediaAudioInput) << "Failed to create a gst element for the audio device, using a default audio source";
- newSrc = QGstElement::createFromFactory("autoaudiosrc", "audiosrc");
- }
+ QGstElement newSrc = createGstElement();
QGstPipeline::modifyPipelineWhileNotRunning(gstAudioInput.getPipeline(), [&] {
qUnlinkGstElements(audioSrc, audioVolume);
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput_p.h b/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput_p.h
index 69500ecab..5ca0e1a49 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput_p.h
@@ -44,7 +44,9 @@ public:
QGstElement gstElement() const { return gstAudioInput; }
private:
- QGstreamerAudioInput(QGstElement autoaudiosrc, QGstElement volume, QAudioInput *parent);
+ explicit QGstreamerAudioInput(QAudioInput *parent);
+
+ QGstElement createGstElement();
QAudioDevice m_audioDevice;
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput.cpp b/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput.cpp
index f45c371e9..9cea7fb62 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput.cpp
@@ -16,43 +16,90 @@ QT_BEGIN_NAMESPACE
QMaybe<QPlatformAudioOutput *> QGstreamerAudioOutput::create(QAudioOutput *parent)
{
- QGstElement audioconvert = QGstElement::createFromFactory("audioconvert", "audioConvert");
- if (!audioconvert)
- return errorMessageCannotFindElement("audioconvert");
+ static const auto error = qGstErrorMessageIfElementsNotAvailable(
+ "audioconvert", "audioresample", "volume", "autoaudiosink");
+ if (error)
+ return *error;
- QGstElement audioresample = QGstElement::createFromFactory("audioresample", "audioResample");
- if (!audioresample)
- return errorMessageCannotFindElement("audioresample");
-
- QGstElement volume = QGstElement::createFromFactory("volume", "volume");
- if (!volume)
- return errorMessageCannotFindElement("volume");
-
- QGstElement autoaudiosink = QGstElement::createFromFactory("autoaudiosink", "autoAudioSink");
- if (!autoaudiosink)
- return errorMessageCannotFindElement("autoaudiosink");
-
- return new QGstreamerAudioOutput(audioconvert, audioresample, volume, autoaudiosink, parent);
+ return new QGstreamerAudioOutput(parent);
}
-QGstreamerAudioOutput::QGstreamerAudioOutput(QGstElement audioconvert, QGstElement audioresample,
- QGstElement volume, QGstElement autoaudiosink,
- QAudioOutput *parent)
+QGstreamerAudioOutput::QGstreamerAudioOutput(QAudioOutput *parent)
: QObject(parent),
QPlatformAudioOutput(parent),
gstAudioOutput(QGstBin::create("audioOutput")),
- audioConvert(std::move(audioconvert)),
- audioResample(std::move(audioresample)),
- audioVolume(std::move(volume)),
- audioSink(std::move(autoaudiosink))
+ audioQueue{
+ QGstElement::createFromFactory("queue", "audioQueue"),
+ },
+ audioConvert{
+ QGstElement::createFromFactory("audioconvert", "audioConvert"),
+ },
+ audioResample{
+ QGstElement::createFromFactory("audioresample", "audioResample"),
+ },
+ audioVolume{
+ QGstElement::createFromFactory("volume", "volume"),
+ },
+ audioSink{
+ QGstElement::createFromFactory("autoaudiosink", "autoAudioSink"),
+ }
{
- audioQueue = QGstElement::createFromFactory("queue", "audioQueue");
gstAudioOutput.add(audioQueue, audioConvert, audioResample, audioVolume, audioSink);
qLinkGstElements(audioQueue, audioConvert, audioResample, audioVolume, audioSink);
gstAudioOutput.addGhostPad(audioQueue, "sink");
}
+QGstElement QGstreamerAudioOutput::createGstElement()
+{
+ const auto *customDeviceInfo =
+ dynamic_cast<const QGStreamerCustomAudioDeviceInfo *>(m_audioOutput.handle());
+
+ if (customDeviceInfo) {
+ qCDebug(qLcMediaAudioOutput)
+ << "requesting custom audio sink element: " << customDeviceInfo->id;
+
+ QGstElement element =
+ QGstBin::createFromPipelineDescription(customDeviceInfo->id, /*name=*/nullptr,
+ /*ghostUnlinkedPads=*/true);
+ if (element)
+ return element;
+
+ qCWarning(qLcMediaAudioOutput)
+ << "Cannot create audio sink element:" << customDeviceInfo->id;
+ }
+
+ const QByteArray &id = m_audioOutput.id();
+ if constexpr (QT_CONFIG(pulseaudio)) {
+ QGstElement newSink = QGstElement::createFromFactory("pulsesink", "audiosink");
+ if (newSink) {
+ newSink.set("device", id.constData());
+ return newSink;
+ } else {
+ qWarning() << "Cannot create pulsesink";
+ }
+ } else if constexpr (QT_CONFIG(alsa)) {
+ QGstElement newSink = QGstElement::createFromFactory("alsasink", "audiosink");
+ if (newSink) {
+ newSink.set("device", id.constData());
+ return newSink;
+ } else {
+ qWarning() << "Cannot create alsasink";
+ }
+ } else {
+ auto *deviceInfo = dynamic_cast<const QGStreamerAudioDeviceInfo *>(m_audioOutput.handle());
+ if (deviceInfo && deviceInfo->gstDevice) {
+ QGstElement element = QGstElement::createFromDevice(deviceInfo->gstDevice, "audiosink");
+ if (element)
+ return element;
+ }
+ }
+ qCWarning(qLcMediaAudioOutput) << "Invalid audio device:" << m_audioOutput.id();
+ qCWarning(qLcMediaAudioOutput)
+ << "Failed to create a gst element for the audio device, using a default audio sink";
+ return QGstElement::createFromFactory("autoaudiosink", "audiosink");
+}
+
QGstreamerAudioOutput::~QGstreamerAudioOutput()
{
gstAudioOutput.setStateSync(GST_STATE_NULL);
@@ -73,28 +120,10 @@ void QGstreamerAudioOutput::setAudioDevice(const QAudioDevice &info)
if (info == m_audioOutput)
return;
qCDebug(qLcMediaAudioOutput) << "setAudioOutput" << info.description() << info.isNull();
- m_audioOutput = info;
- QGstElement newSink;
- if constexpr (QT_CONFIG(pulseaudio)) {
- auto id = m_audioOutput.id();
- newSink = QGstElement::createFromFactory("pulsesink", "audiosink");
- if (!newSink.isNull())
- newSink.set("device", id.constData());
- else
- qCWarning(qLcMediaAudioOutput) << "Invalid audio device";
- } else {
- auto *deviceInfo = static_cast<const QGStreamerAudioDeviceInfo *>(m_audioOutput.handle());
- if (deviceInfo && deviceInfo->gstDevice)
- newSink = QGstElement::createFromDevice(deviceInfo->gstDevice, "audiosink");
- else
- qCWarning(qLcMediaAudioOutput) << "Invalid audio device";
- }
+ m_audioOutput = info;
- if (newSink.isNull()) {
- qCWarning(qLcMediaAudioOutput) << "Failed to create a gst element for the audio device, using a default audio sink";
- newSink = QGstElement::createFromFactory("autoaudiosink", "audiosink");
- }
+ QGstElement newSink = createGstElement();
QGstPipeline::modifyPipelineWhileNotRunning(gstAudioOutput.getPipeline(), [&] {
qUnlinkGstElements(audioVolume, audioSink);
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput_p.h b/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput_p.h
index 4b528d9ee..dea53e5c4 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput_p.h
@@ -41,8 +41,9 @@ public:
QGstElement gstElement() const { return gstAudioOutput; }
private:
- QGstreamerAudioOutput(QGstElement audioconvert, QGstElement audioresample, QGstElement volume,
- QGstElement autoaudiosink, QAudioOutput *parent);
+ explicit QGstreamerAudioOutput(QAudioOutput *parent);
+
+ QGstElement createGstElement();
QAudioDevice m_audioOutput;
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe.cpp
index 341cb69b3..9cba810db 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe.cpp
@@ -3,6 +3,8 @@
#include <common/qgstreamerbufferprobe_p.h>
+#include <common/qgst_p.h>
+
QT_BEGIN_NAMESPACE
QGstreamerBufferProbe::QGstreamerBufferProbe(Flags flags)
@@ -14,10 +16,14 @@ QGstreamerBufferProbe::~QGstreamerBufferProbe() = default;
void QGstreamerBufferProbe::addProbeToPad(GstPad *pad, bool downstream)
{
- if (GstCaps *caps = gst_pad_get_current_caps(pad)) {
- probeCaps(caps);
- gst_caps_unref(caps);
- }
+ QGstCaps caps{
+ gst_pad_get_current_caps(pad),
+ QGstCaps::HasRef,
+ };
+
+ if (caps)
+ probeCaps(caps.caps());
+
if (m_flags & ProbeCaps) {
m_capsProbeId = gst_pad_add_probe(
pad,
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer.cpp
index 8388ce8d2..1d7299da2 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer.cpp
@@ -26,6 +26,10 @@
#include <sys/stat.h>
#include <fcntl.h>
+#if QT_CONFIG(gstreamer_gl)
+# include <gst/gl/gl.h>
+#endif
+
static Q_LOGGING_CATEGORY(qLcMediaPlayer, "qt.multimedia.player")
QT_BEGIN_NAMESPACE
@@ -74,13 +78,25 @@ QGstreamerMediaPlayer::TrackSelector &QGstreamerMediaPlayer::trackSelector(Track
return ts;
}
+void QGstreamerMediaPlayer::updateBufferProgress(float newProgress)
+{
+ if (qFuzzyIsNull(newProgress - m_bufferProgress))
+ return;
+
+ m_bufferProgress = newProgress;
+ bufferProgressChanged(m_bufferProgress);
+}
+
void QGstreamerMediaPlayer::disconnectDecoderHandlers()
{
auto handlers = std::initializer_list<QGObjectHandlerScopedConnection *>{
- &padAdded, &padRemoved, &sourceSetup, &elementAdded, &unknownType,
+ &padAdded, &padRemoved, &sourceSetup, &uridecodebinElementAdded,
+ &unknownType, &elementAdded, &elementRemoved,
};
for (QGObjectHandlerScopedConnection *handler : handlers)
handler->disconnect();
+
+ decodeBinQueues = 0;
}
QMaybe<QPlatformMediaPlayer *> QGstreamerMediaPlayer::create(QMediaPlayer *parent)
@@ -89,35 +105,26 @@ QMaybe<QPlatformMediaPlayer *> QGstreamerMediaPlayer::create(QMediaPlayer *paren
if (!videoOutput)
return videoOutput.error();
- QGstElement videoInputSelector =
- QGstElement::createFromFactory("input-selector", "videoInputSelector");
- if (!videoInputSelector)
- return errorMessageCannotFindElement("input-selector");
+ static const auto error =
+ qGstErrorMessageIfElementsNotAvailable("input-selector", "decodebin", "uridecodebin");
+ if (error)
+ return *error;
- QGstElement audioInputSelector =
- QGstElement::createFromFactory("input-selector", "audioInputSelector");
- if (!audioInputSelector)
- return errorMessageCannotFindElement("input-selector");
-
- QGstElement subTitleInputSelector =
- QGstElement::createFromFactory("input-selector", "subTitleInputSelector");
- if (!subTitleInputSelector)
- return errorMessageCannotFindElement("input-selector");
-
- return new QGstreamerMediaPlayer(videoOutput.value(), videoInputSelector, audioInputSelector,
- subTitleInputSelector, parent);
+ return new QGstreamerMediaPlayer(videoOutput.value(), parent);
}
QGstreamerMediaPlayer::QGstreamerMediaPlayer(QGstreamerVideoOutput *videoOutput,
- QGstElement videoInputSelector,
- QGstElement audioInputSelector,
- QGstElement subTitleInputSelector,
QMediaPlayer *parent)
: QObject(parent),
QPlatformMediaPlayer(parent),
- trackSelectors{ { { VideoStream, videoInputSelector },
- { AudioStream, audioInputSelector },
- { SubtitleStream, subTitleInputSelector } } },
+ trackSelectors{ {
+ { VideoStream,
+ QGstElement::createFromFactory("input-selector", "videoInputSelector") },
+ { AudioStream,
+ QGstElement::createFromFactory("input-selector", "audioInputSelector") },
+ { SubtitleStream,
+ QGstElement::createFromFactory("input-selector", "subTitleInputSelector") },
+ } },
playerPipeline(QGstPipeline::create("playerPipeline")),
gstVideoOutput(videoOutput)
{
@@ -129,7 +136,6 @@ QGstreamerMediaPlayer::QGstreamerMediaPlayer(QGstreamerVideoOutput *videoOutput,
for (auto &ts : trackSelectors)
playerPipeline.add(ts.selector);
- playerPipeline.setState(GST_STATE_NULL);
playerPipeline.installMessageFilter(static_cast<QGstreamerBusMessageFilter *>(this));
playerPipeline.installMessageFilter(static_cast<QGstreamerSyncMessageFilter *>(this));
@@ -139,7 +145,9 @@ QGstreamerMediaPlayer::QGstreamerMediaPlayer(QGstreamerVideoOutput *videoOutput,
gst_pipeline_use_clock(playerPipeline.pipeline(), systemClock.get());
- connect(&positionUpdateTimer, &QTimer::timeout, this, &QGstreamerMediaPlayer::updatePosition);
+ connect(&positionUpdateTimer, &QTimer::timeout, this, [this] {
+ updatePositionFromPipeline();
+ });
}
QGstreamerMediaPlayer::~QGstreamerMediaPlayer()
@@ -147,25 +155,42 @@ QGstreamerMediaPlayer::~QGstreamerMediaPlayer()
playerPipeline.removeMessageFilter(static_cast<QGstreamerBusMessageFilter *>(this));
playerPipeline.removeMessageFilter(static_cast<QGstreamerSyncMessageFilter *>(this));
playerPipeline.setStateSync(GST_STATE_NULL);
- topology.free();
}
-qint64 QGstreamerMediaPlayer::position() const
+std::chrono::nanoseconds QGstreamerMediaPlayer::pipelinePosition() const
+{
+ if (m_url.isEmpty())
+ return {};
+
+ Q_ASSERT(playerPipeline);
+ return playerPipeline.position();
+}
+
+void QGstreamerMediaPlayer::updatePositionFromPipeline()
{
- if (playerPipeline.isNull() || m_url.isEmpty())
- return 0;
+ using namespace std::chrono;
- return playerPipeline.position()/1e6;
+ positionChanged(round<milliseconds>(pipelinePosition()));
+}
+
+void QGstreamerMediaPlayer::updateDurationFromPipeline()
+{
+ std::chrono::milliseconds d = playerPipeline.durationInMs();
+ qCDebug(qLcMediaPlayer) << "updateDurationFromPipeline" << d;
+ if (d != m_duration) {
+ m_duration = d;
+ durationChanged(m_duration);
+ }
}
qint64 QGstreamerMediaPlayer::duration() const
{
- return m_duration;
+ return m_duration.count();
}
float QGstreamerMediaPlayer::bufferProgress() const
{
- return m_bufferProgress/100.;
+ return m_bufferProgress;
}
QMediaTimeRange QGstreamerMediaPlayer::availablePlaybackRanges() const
@@ -180,18 +205,28 @@ qreal QGstreamerMediaPlayer::playbackRate() const
void QGstreamerMediaPlayer::setPlaybackRate(qreal rate)
{
- if (playerPipeline.setPlaybackRate(rate))
- playbackRateChanged(rate);
+ if (rate == m_rate)
+ return;
+
+ m_rate = rate;
+
+ playerPipeline.setPlaybackRate(rate);
+ playbackRateChanged(rate);
}
void QGstreamerMediaPlayer::setPosition(qint64 pos)
{
- qint64 currentPos = playerPipeline.position()/1e6;
- if (pos == currentPos)
+ std::chrono::milliseconds posInMs{ pos };
+ setPosition(posInMs);
+}
+
+void QGstreamerMediaPlayer::setPosition(std::chrono::milliseconds pos)
+{
+ if (pos == playerPipeline.position())
return;
playerPipeline.finishStateChange();
- playerPipeline.setPosition(pos*1e6);
- qCDebug(qLcMediaPlayer) << Q_FUNC_INFO << pos << playerPipeline.position()/1e6;
+ playerPipeline.setPosition(pos);
+ qCDebug(qLcMediaPlayer) << Q_FUNC_INFO << pos << playerPipeline.positionInMs();
if (mediaStatus() == QMediaPlayer::EndOfMedia)
mediaStatusChanged(QMediaPlayer::LoadedMedia);
positionChanged(pos);
@@ -201,12 +236,14 @@ void QGstreamerMediaPlayer::play()
{
if (state() == QMediaPlayer::PlayingState || m_url.isEmpty())
return;
- resetCurrentLoop();
+
+ if (state() != QMediaPlayer::PausedState)
+ resetCurrentLoop();
playerPipeline.setInStoppedState(false);
if (mediaStatus() == QMediaPlayer::EndOfMedia) {
- playerPipeline.setPosition(0);
- updatePosition();
+ playerPipeline.setPosition({});
+ positionChanged(0);
}
qCDebug(qLcMediaPlayer) << "play().";
@@ -216,13 +253,17 @@ void QGstreamerMediaPlayer::play()
// immediately, when they happen while paused.
playerPipeline.flush();
m_requiresSeekOnPlay = false;
+ } else {
+ // we get an assertion failure during instant playback rate changes
+ // https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/3545
+ constexpr bool performInstantRateChange = false;
+ playerPipeline.applyPlaybackRate(/*instantRateChange=*/performInstantRateChange);
}
if (ret == GST_STATE_CHANGE_FAILURE)
qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the playing state.";
- if (mediaStatus() == QMediaPlayer::LoadedMedia)
- mediaStatusChanged(QMediaPlayer::BufferedMedia);
- emit stateChanged(QMediaPlayer::PlayingState);
+
positionUpdateTimer.start(100);
+ stateChanged(QMediaPlayer::PlayingState);
}
void QGstreamerMediaPlayer::pause()
@@ -236,53 +277,83 @@ void QGstreamerMediaPlayer::pause()
playerPipeline.setInStoppedState(false);
playerPipeline.flush();
}
- int ret = playerPipeline.setState(GST_STATE_PAUSED);
+ int ret = playerPipeline.setStateSync(GST_STATE_PAUSED);
if (ret == GST_STATE_CHANGE_FAILURE)
qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the paused state.";
if (mediaStatus() == QMediaPlayer::EndOfMedia) {
- playerPipeline.setPosition(0);
- mediaStatusChanged(QMediaPlayer::BufferedMedia);
+ playerPipeline.setPosition({});
+ positionChanged(0);
+ } else {
+ updatePositionFromPipeline();
}
- updatePosition();
- emit stateChanged(QMediaPlayer::PausedState);
+ stateChanged(QMediaPlayer::PausedState);
+
+ if (m_bufferProgress > 0 || !canTrackProgress())
+ mediaStatusChanged(QMediaPlayer::BufferedMedia);
+ else
+ mediaStatusChanged(QMediaPlayer::BufferingMedia);
}
void QGstreamerMediaPlayer::stop()
{
+ using namespace std::chrono_literals;
if (state() == QMediaPlayer::StoppedState) {
if (position() != 0) {
- playerPipeline.setPosition(0);
- positionChanged(0);
+ playerPipeline.setPosition({});
+ positionChanged(0ms);
+ mediaStatusChanged(QMediaPlayer::LoadedMedia);
}
return;
}
stopOrEOS(false);
}
-void *QGstreamerMediaPlayer::nativePipeline()
+const QGstPipeline &QGstreamerMediaPlayer::pipeline() const
{
- return playerPipeline.pipeline();
+ return playerPipeline;
}
void QGstreamerMediaPlayer::stopOrEOS(bool eos)
{
+ using namespace std::chrono_literals;
+
positionUpdateTimer.stop();
playerPipeline.setInStoppedState(true);
bool ret = playerPipeline.setStateSync(GST_STATE_PAUSED);
if (!ret)
qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the stopped state.";
- if (!eos)
- playerPipeline.setPosition(0);
- updatePosition();
- emit stateChanged(QMediaPlayer::StoppedState);
- mediaStatusChanged(eos ? QMediaPlayer::EndOfMedia : QMediaPlayer::LoadedMedia);
+ if (!eos) {
+ playerPipeline.setPosition(0ms);
+ positionChanged(0ms);
+ }
+ stateChanged(QMediaPlayer::StoppedState);
+ if (eos)
+ mediaStatusChanged(QMediaPlayer::EndOfMedia);
+ else
+ mediaStatusChanged(QMediaPlayer::LoadedMedia);
+ m_initialBufferProgressSent = false;
+ bufferProgressChanged(0.f);
}
-bool QGstreamerMediaPlayer::processBusMessage(const QGstreamerMessage &message)
+void QGstreamerMediaPlayer::detectPipelineIsSeekable()
{
- if (message.isNull())
- return false;
+ qCDebug(qLcMediaPlayer) << "detectPipelineIsSeekable";
+ QGstQueryHandle query{
+ gst_query_new_seeking(GST_FORMAT_TIME),
+ QGstQueryHandle::HasRef,
+ };
+ gboolean canSeek = false;
+ if (gst_element_query(playerPipeline.element(), query.get())) {
+ gst_query_parse_seeking(query.get(), nullptr, &canSeek, nullptr, nullptr);
+ qCDebug(qLcMediaPlayer) << " pipeline is seekable:" << canSeek;
+ } else {
+ qCWarning(qLcMediaPlayer) << " query for seekable failed.";
+ }
+ seekableChanged(canSeek);
+}
+bool QGstreamerMediaPlayer::processBusMessage(const QGstreamerMessage &message)
+{
qCDebug(qLcMediaPlayer) << "received bus message:" << message;
GstMessage* gm = message.message();
@@ -293,34 +364,55 @@ bool QGstreamerMediaPlayer::processBusMessage(const QGstreamerMessage &message)
gst_message_parse_tag(gm, &tagList);
qCDebug(qLcMediaPlayer) << " Got tags: " << tagList.get();
- auto metaData = QGstreamerMetaData::fromGstTagList(tagList.get());
- for (auto k : metaData.keys())
- m_metaData.insert(k, metaData.value(k));
+
+ QMediaMetaData originalMetaData = m_metaData;
+ extendMetaDataFromTagList(m_metaData, tagList);
+ if (originalMetaData != m_metaData)
+ metaDataChanged();
+
+ if (gstVideoOutput) {
+ QVariant rotation = m_metaData.value(QMediaMetaData::Orientation);
+ gstVideoOutput->setRotation(rotation.value<QtVideo::Rotation>());
+ }
break;
}
case GST_MESSAGE_DURATION_CHANGED: {
- qint64 d = playerPipeline.duration()/1e6;
- qCDebug(qLcMediaPlayer) << " duration changed message" << d;
- if (d != m_duration) {
- m_duration = d;
- emit durationChanged(duration());
- }
+ if (!prerolling)
+ updateDurationFromPipeline();
+
return false;
}
- case GST_MESSAGE_EOS:
+ case GST_MESSAGE_EOS: {
+ positionChanged(m_duration);
if (doLoop()) {
setPosition(0);
break;
}
stopOrEOS(true);
break;
+ }
case GST_MESSAGE_BUFFERING: {
- qCDebug(qLcMediaPlayer) << " buffering message";
int progress = 0;
gst_message_parse_buffering(gm, &progress);
- m_bufferProgress = progress;
- mediaStatusChanged(m_bufferProgress == 100 ? QMediaPlayer::BufferedMedia : QMediaPlayer::BufferingMedia);
- emit bufferProgressChanged(m_bufferProgress/100.);
+
+ qCDebug(qLcMediaPlayer) << " buffering message: " << progress;
+
+ if (state() != QMediaPlayer::StoppedState && !prerolling) {
+ if (!m_initialBufferProgressSent) {
+ mediaStatusChanged(QMediaPlayer::BufferingMedia);
+ m_initialBufferProgressSent = true;
+ }
+
+ if (m_bufferProgress > 0 && progress == 0)
+ mediaStatusChanged(QMediaPlayer::StalledMedia);
+ else if (progress >= 50)
+ // QTBUG-124517: rethink buffering
+ mediaStatusChanged(QMediaPlayer::BufferedMedia);
+ else
+ mediaStatusChanged(QMediaPlayer::BufferingMedia);
+ }
+
+ updateBufferProgress(progress * 0.01);
break;
}
case GST_MESSAGE_STATE_CHANGED: {
@@ -332,78 +424,81 @@ bool QGstreamerMediaPlayer::processBusMessage(const QGstreamerMessage &message)
GstState pending;
gst_message_parse_state_changed(gm, &oldState, &newState, &pending);
- qCDebug(qLcMediaPlayer) << " state changed message from" << oldState << "to" << newState
- << pending;
+ qCDebug(qLcMediaPlayer) << " state changed message from"
+ << QCompactGstMessageAdaptor(message);
switch (newState) {
case GST_STATE_VOID_PENDING:
case GST_STATE_NULL:
case GST_STATE_READY:
break;
- case GST_STATE_PAUSED:
- {
+ case GST_STATE_PAUSED: {
if (prerolling) {
qCDebug(qLcMediaPlayer) << "Preroll done, setting status to Loaded";
- prerolling = false;
- GST_DEBUG_BIN_TO_DOT_FILE(playerPipeline.bin(), GST_DEBUG_GRAPH_SHOW_ALL, "playerPipeline");
+ playerPipeline.dumpGraph("playerPipelinePrerollDone");
- qint64 d = playerPipeline.duration()/1e6;
- if (d != m_duration) {
- m_duration = d;
- qCDebug(qLcMediaPlayer) << " duration changed" << d;
- emit durationChanged(duration());
- }
+ prerolling = false;
+ updateDurationFromPipeline();
+ m_metaData.insert(QMediaMetaData::Duration, duration());
+ m_metaData.insert(QMediaMetaData::Url, m_url);
parseStreamsAndMetadata();
+ metaDataChanged();
- emit tracksChanged();
+ tracksChanged();
mediaStatusChanged(QMediaPlayer::LoadedMedia);
- GstQuery *query = gst_query_new_seeking(GST_FORMAT_TIME);
- gboolean canSeek = false;
- if (gst_element_query(playerPipeline.element(), query)) {
- gst_query_parse_seeking(query, nullptr, &canSeek, nullptr, nullptr);
- qCDebug(qLcMediaPlayer) << " pipeline is seekable:" << canSeek;
- } else {
- qCDebug(qLcMediaPlayer) << " query for seekable failed.";
+ if (!playerPipeline.inStoppedState()) {
+ Q_ASSERT(!m_initialBufferProgressSent);
+
+ bool immediatelySendBuffered = !canTrackProgress() || m_bufferProgress > 0;
+ mediaStatusChanged(QMediaPlayer::BufferingMedia);
+ m_initialBufferProgressSent = true;
+ if (immediatelySendBuffered)
+ mediaStatusChanged(QMediaPlayer::BufferedMedia);
}
- gst_query_unref(query);
- seekableChanged(canSeek);
}
break;
}
- case GST_STATE_PLAYING:
- mediaStatusChanged(QMediaPlayer::BufferedMedia);
+ case GST_STATE_PLAYING: {
+ if (!m_initialBufferProgressSent) {
+ bool immediatelySendBuffered = !canTrackProgress() || m_bufferProgress > 0;
+ mediaStatusChanged(QMediaPlayer::BufferingMedia);
+ m_initialBufferProgressSent = true;
+ if (immediatelySendBuffered)
+ mediaStatusChanged(QMediaPlayer::BufferedMedia);
+ }
break;
}
+ }
break;
}
case GST_MESSAGE_ERROR: {
+ qCDebug(qLcMediaPlayer) << " error" << QCompactGstMessageAdaptor(message);
+
QUniqueGErrorHandle err;
QUniqueGStringHandle debug;
gst_message_parse_error(gm, &err, &debug);
- qCDebug(qLcMediaPlayer) << " error" << err << debug;
-
GQuark errorDomain = err.get()->domain;
gint errorCode = err.get()->code;
if (errorDomain == GST_STREAM_ERROR) {
if (errorCode == GST_STREAM_ERROR_CODEC_NOT_FOUND)
- emit error(QMediaPlayer::FormatError, tr("Cannot play stream of type: <unknown>"));
+ error(QMediaPlayer::FormatError, tr("Cannot play stream of type: <unknown>"));
else {
- emit error(QMediaPlayer::FormatError, QString::fromUtf8(err.get()->message));
+ error(QMediaPlayer::FormatError, QString::fromUtf8(err.get()->message));
}
} else if (errorDomain == GST_RESOURCE_ERROR) {
if (errorCode == GST_RESOURCE_ERROR_NOT_FOUND) {
if (m_resourceErrorState != ResourceErrorState::ErrorReported) {
// gstreamer seems to deliver multiple GST_RESOURCE_ERROR_NOT_FOUND events
- emit error(QMediaPlayer::ResourceError, QString::fromUtf8(err.get()->message));
+ error(QMediaPlayer::ResourceError, QString::fromUtf8(err.get()->message));
m_resourceErrorState = ResourceErrorState::ErrorReported;
m_url.clear();
}
} else {
- emit error(QMediaPlayer::ResourceError, QString::fromUtf8(err.get()->message));
+ error(QMediaPlayer::ResourceError, QString::fromUtf8(err.get()->message));
}
} else {
playerPipeline.dumpGraph("error");
@@ -411,40 +506,40 @@ bool QGstreamerMediaPlayer::processBusMessage(const QGstreamerMessage &message)
mediaStatusChanged(QMediaPlayer::InvalidMedia);
break;
}
- case GST_MESSAGE_WARNING: {
- QUniqueGErrorHandle err;
- QUniqueGStringHandle debug;
- gst_message_parse_warning (gm, &err, &debug);
- qCWarning(qLcMediaPlayer) << "Warning:" << err;
+
+ case GST_MESSAGE_WARNING:
+ qCWarning(qLcMediaPlayer) << "Warning:" << QCompactGstMessageAdaptor(message);
playerPipeline.dumpGraph("warning");
break;
- }
- case GST_MESSAGE_INFO: {
- if (qLcMediaPlayer().isDebugEnabled()) {
- QUniqueGErrorHandle err;
- QUniqueGStringHandle debug;
- gst_message_parse_info (gm, &err, &debug);
- qCDebug(qLcMediaPlayer) << "Info:" << err;
- }
+
+ case GST_MESSAGE_INFO:
+ if (qLcMediaPlayer().isDebugEnabled())
+ qCDebug(qLcMediaPlayer) << "Info:" << QCompactGstMessageAdaptor(message);
break;
- }
+
case GST_MESSAGE_SEGMENT_START: {
qCDebug(qLcMediaPlayer) << " segment start message, updating position";
- QGstStructure structure(gst_message_get_structure(gm));
+ QGstStructureView structure(gst_message_get_structure(gm));
auto p = structure["position"].toInt64();
if (p) {
- qint64 position = (*p)/1000000;
- emit positionChanged(position);
+ std::chrono::milliseconds position{
+ (*p) / 1000000,
+ };
+ positionChanged(position);
}
break;
}
case GST_MESSAGE_ELEMENT: {
- QGstStructure structure(gst_message_get_structure(gm));
+ QGstStructureView structure(gst_message_get_structure(gm));
auto type = structure.name();
- if (type == "stream-topology") {
- topology.free();
- topology = structure.copy();
- }
+ if (type == "stream-topology")
+ topology = structure.clone();
+
+ break;
+ }
+
+ case GST_MESSAGE_ASYNC_DONE: {
+ detectPipelineIsSeekable();
break;
}
@@ -524,19 +619,19 @@ void QGstreamerMediaPlayer::decoderPadAdded(const QGstElement &src, const QGstPa
if (streamType == VideoStream) {
connectOutput(ts);
ts.setActiveInputPad(sinkPad);
- emit videoAvailableChanged(true);
+ videoAvailableChanged(true);
}
else if (streamType == AudioStream) {
connectOutput(ts);
ts.setActiveInputPad(sinkPad);
- emit audioAvailableChanged(true);
+ audioAvailableChanged(true);
}
}
if (!prerolling)
- emit tracksChanged();
+ tracksChanged();
- decoderOutputMap.insert(pad.name(), sinkPad);
+ decoderOutputMap.emplace(pad, sinkPad);
}
void QGstreamerMediaPlayer::decoderPadRemoved(const QGstElement &src, const QGstPad &pad)
@@ -545,9 +640,11 @@ void QGstreamerMediaPlayer::decoderPadRemoved(const QGstElement &src, const QGst
return;
qCDebug(qLcMediaPlayer) << "Removed pad" << pad.name() << "from" << src.name();
- auto track = decoderOutputMap.value(pad.name());
- if (track.isNull())
+
+ auto it = decoderOutputMap.find(pad);
+ if (it == decoderOutputMap.end())
return;
+ QGstPad track = it->second;
auto ts = std::find_if(std::begin(trackSelectors), std::end(trackSelectors),
[&](TrackSelector &ts){ return ts.selector == track.parent(); });
@@ -604,7 +701,7 @@ void QGstreamerMediaPlayer::connectOutput(TrackSelector &ts)
qCDebug(qLcMediaPlayer) << "connecting output for track type" << ts.type;
playerPipeline.add(e);
qLinkGstElements(ts.selector, e);
- e.setState(GST_STATE_PAUSED);
+ e.syncStateWithParent();
}
ts.isConnected = true;
@@ -639,6 +736,18 @@ void QGstreamerMediaPlayer::removeOutput(TrackSelector &ts)
ts.isConnected = false;
}
+void QGstreamerMediaPlayer::removeDynamicPipelineElements()
+{
+ for (QGstElement *element : { &src, &decoder }) {
+ if (element->isNull())
+ continue;
+
+ element->setStateSync(GstState::GST_STATE_NULL);
+ playerPipeline.remove(*element);
+ *element = QGstElement{};
+ }
+}
+
void QGstreamerMediaPlayer::uridecodebinElementAddedCallback(GstElement * /*uridecodebin*/,
GstElement *child,
QGstreamerMediaPlayer *)
@@ -647,9 +756,7 @@ void QGstreamerMediaPlayer::uridecodebinElementAddedCallback(GstElement * /*urid
qCDebug(qLcMediaPlayer) << "New element added to uridecodebin:" << c.name();
static const GType decodeBinType = [] {
- QGstElementFactoryHandle factory = QGstElementFactoryHandle{
- gst_element_factory_find("decodebin"),
- };
+ QGstElementFactoryHandle factory = QGstElement::findFactory("decodebin");
return gst_element_factory_get_element_type(factory.get());
}();
@@ -705,8 +812,43 @@ void QGstreamerMediaPlayer::unknownTypeCallback(GstElement *decodebin, GstPad *p
});
}
+static bool isQueue(const QGstElement &element)
+{
+ static const GType queueType = [] {
+ QGstElementFactoryHandle factory = QGstElement::findFactory("queue");
+ return gst_element_factory_get_element_type(factory.get());
+ }();
+
+ static const GType multiQueueType = [] {
+ QGstElementFactoryHandle factory = QGstElement::findFactory("multiqueue");
+ return gst_element_factory_get_element_type(factory.get());
+ }();
+
+ return element.type() == queueType || element.type() == multiQueueType;
+}
+
+void QGstreamerMediaPlayer::decodebinElementAddedCallback(GstBin * /*decodebin*/,
+ GstBin * /*sub_bin*/, GstElement *child,
+ QGstreamerMediaPlayer *self)
+{
+ QGstElement c(child, QGstElement::NeedsRef);
+ if (isQueue(c))
+ self->decodeBinQueues += 1;
+}
+
+void QGstreamerMediaPlayer::decodebinElementRemovedCallback(GstBin * /*decodebin*/,
+ GstBin * /*sub_bin*/, GstElement *child,
+ QGstreamerMediaPlayer *self)
+{
+ QGstElement c(child, QGstElement::NeedsRef);
+ if (isQueue(c))
+ self->decodeBinQueues -= 1;
+}
+
void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
{
+ using namespace std::chrono_literals;
+
qCDebug(qLcMediaPlayer) << Q_FUNC_INFO << "setting location to" << content;
prerolling = true;
@@ -719,30 +861,27 @@ void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
m_url = content;
m_stream = stream;
- if (!src.isNull())
- playerPipeline.remove(src);
- if (!decoder.isNull())
- playerPipeline.remove(decoder);
- src = QGstElement();
+ removeDynamicPipelineElements();
disconnectDecoderHandlers();
- decoder = QGstElement();
removeAllOutputs();
seekableChanged(false);
Q_ASSERT(playerPipeline.inStoppedState());
- if (m_duration != 0) {
- m_duration = 0;
- durationChanged(0);
+ if (m_duration != 0ms) {
+ m_duration = 0ms;
+ durationChanged(0ms);
}
stateChanged(QMediaPlayer::StoppedState);
if (position() != 0)
- positionChanged(0);
- mediaStatusChanged(QMediaPlayer::NoMedia);
+ positionChanged(0ms);
if (!m_metaData.isEmpty()) {
m_metaData.clear();
metaDataChanged();
}
+ if (content.isEmpty() && !stream)
+ mediaStatusChanged(QMediaPlayer::NoMedia);
+
if (content.isEmpty())
return;
@@ -752,20 +891,23 @@ void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
if (maybeAppSrc) {
m_appSrc = maybeAppSrc.value();
} else {
- emit error(QMediaPlayer::ResourceError, maybeAppSrc.error());
+ error(QMediaPlayer::ResourceError, maybeAppSrc.error());
return;
}
}
src = m_appSrc->element();
decoder = QGstElement::createFromFactory("decodebin", "decoder");
if (!decoder) {
- emit error(QMediaPlayer::ResourceError, errorMessageCannotFindElement("decodebin"));
+ error(QMediaPlayer::ResourceError, qGstErrorMessageCannotFindElement("decodebin"));
return;
}
decoder.set("post-stream-topology", true);
decoder.set("use-buffering", true);
- unknownType = decoder.connect("unknown-type",
- GCallback(QGstreamerMediaPlayer::unknownTypeCallback), this);
+ unknownType = decoder.connect("unknown-type", GCallback(unknownTypeCallback), this);
+ elementAdded = decoder.connect("deep-element-added",
+ GCallback(decodebinElementAddedCallback), this);
+ elementRemoved = decoder.connect("deep-element-removed",
+ GCallback(decodebinElementAddedCallback), this);
playerPipeline.add(src, decoder);
qLinkGstElements(src, decoder);
@@ -776,7 +918,7 @@ void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
// use uridecodebin
decoder = QGstElement::createFromFactory("uridecodebin", "decoder");
if (!decoder) {
- emit error(QMediaPlayer::ResourceError, errorMessageCannotFindElement("uridecodebin"));
+ error(QMediaPlayer::ResourceError, qGstErrorMessageCannotFindElement("uridecodebin"));
return;
}
playerPipeline.add(decoder);
@@ -787,34 +929,38 @@ void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
} else {
// can't set post-stream-topology to true, as uridecodebin doesn't have the property.
// Use a hack
- elementAdded = decoder.connect(
- "element-added",
- GCallback(QGstreamerMediaPlayer::uridecodebinElementAddedCallback), this);
+ uridecodebinElementAdded = decoder.connect(
+ "element-added", GCallback(uridecodebinElementAddedCallback), this);
}
- sourceSetup = decoder.connect("source-setup",
- GCallback(QGstreamerMediaPlayer::sourceSetupCallback), this);
-
- unknownType = decoder.connect("unknown-type",
- GCallback(QGstreamerMediaPlayer::unknownTypeCallback), this);
+ sourceSetup = decoder.connect("source-setup", GCallback(sourceSetupCallback), this);
+ unknownType = decoder.connect("unknown-type", GCallback(unknownTypeCallback), this);
decoder.set("uri", content.toEncoded().constData());
decoder.set("use-buffering", true);
- if (m_bufferProgress != 0) {
- m_bufferProgress = 0;
- emit bufferProgressChanged(0.);
- }
+
+ constexpr int mb = 1024 * 1024;
+ decoder.set("ring-buffer-max-size", 2 * mb);
+
+ updateBufferProgress(0.f);
+
+ elementAdded = decoder.connect("deep-element-added",
+ GCallback(decodebinElementAddedCallback), this);
+ elementRemoved = decoder.connect("deep-element-removed",
+ GCallback(decodebinElementAddedCallback), this);
}
padAdded = decoder.onPadAdded<&QGstreamerMediaPlayer::decoderPadAdded>(this);
padRemoved = decoder.onPadRemoved<&QGstreamerMediaPlayer::decoderPadRemoved>(this);
mediaStatusChanged(QMediaPlayer::LoadingMedia);
-
- if (!playerPipeline.setState(GST_STATE_PAUSED))
+ if (!playerPipeline.setStateSync(GST_STATE_PAUSED)) {
qCWarning(qLcMediaPlayer) << "Unable to set the pipeline to the paused state.";
+ // Note: no further error handling: errors will be delivered via a GstMessage
+ return;
+ }
- playerPipeline.setPosition(0);
- positionChanged(0);
+ playerPipeline.setPosition(0ms);
+ positionChanged(0ms);
}
void QGstreamerMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
@@ -844,9 +990,9 @@ void QGstreamerMediaPlayer::setVideoSink(QVideoSink *sink)
gstVideoOutput->setVideoSink(sink);
}
-static QGstStructure endOfChain(const QGstStructure &s)
+static QGstStructureView endOfChain(const QGstStructureView &s)
{
- QGstStructure e = s;
+ QGstStructureView e = s;
while (1) {
auto next = e["next"].toStructure();
if (!next.isNull())
@@ -860,32 +1006,26 @@ static QGstStructure endOfChain(const QGstStructure &s)
void QGstreamerMediaPlayer::parseStreamsAndMetadata()
{
qCDebug(qLcMediaPlayer) << "============== parse topology ============";
- if (topology.isNull()) {
+
+ if (!topology) {
qCDebug(qLcMediaPlayer) << " null topology";
return;
}
- auto caps = topology["caps"].toCaps();
- auto structure = caps.at(0);
- auto fileFormat = QGstreamerFormatInfo::fileFormatForCaps(structure);
- qCDebug(qLcMediaPlayer) << caps << fileFormat;
- m_metaData.insert(QMediaMetaData::FileFormat, QVariant::fromValue(fileFormat));
- m_metaData.insert(QMediaMetaData::Duration, duration());
- m_metaData.insert(QMediaMetaData::Url, m_url);
- QGValue tags = topology["tags"];
- if (!tags.isNull()) {
- QGstTagListHandle tagList;
- gst_structure_get(topology.structure, "tags", GST_TYPE_TAG_LIST, &tagList, nullptr);
- const auto metaData = QGstreamerMetaData::fromGstTagList(tagList.get());
- for (auto k : metaData.keys())
- m_metaData.insert(k, metaData.value(k));
- }
+ QGstStructureView topologyView{ topology };
- auto demux = endOfChain(topology);
- auto next = demux["next"];
+ QGstCaps caps = topologyView.caps();
+ extendMetaDataFromCaps(m_metaData, caps);
+
+ QGstTagListHandle tagList = QGstStructureView{ topology }.tags();
+ if (tagList)
+ extendMetaDataFromTagList(m_metaData, tagList);
+
+ QGstStructureView demux = endOfChain(topologyView);
+ QGValue next = demux["next"];
if (!next.isList()) {
qCDebug(qLcMediaPlayer) << " no additional streams";
- emit metaDataChanged();
+ metaDataChanged();
return;
}
@@ -893,43 +1033,28 @@ void QGstreamerMediaPlayer::parseStreamsAndMetadata()
int size = next.listSize();
for (int i = 0; i < size; ++i) {
auto val = next.at(i);
- caps = val.toStructure()["caps"].toCaps();
- structure = caps.at(0);
- if (structure.name().startsWith("audio/")) {
- auto codec = QGstreamerFormatInfo::audioCodecForCaps(structure);
- m_metaData.insert(QMediaMetaData::AudioCodec, QVariant::fromValue(codec));
- qCDebug(qLcMediaPlayer) << " audio" << caps << (int)codec;
- } else if (structure.name().startsWith("video/")) {
- auto codec = QGstreamerFormatInfo::videoCodecForCaps(structure);
- m_metaData.insert(QMediaMetaData::VideoCodec, QVariant::fromValue(codec));
- qCDebug(qLcMediaPlayer) << " video" << caps << (int)codec;
- auto framerate = structure["framerate"].getFraction();
- if (framerate)
- m_metaData.insert(QMediaMetaData::VideoFrameRate, *framerate);
-
- QSize resolution = structure.resolution();
- if (resolution.isValid())
- m_metaData.insert(QMediaMetaData::Resolution, resolution);
+ caps = val.toStructure().caps();
+
+ extendMetaDataFromCaps(m_metaData, caps);
+
+ QGstStructureView structure = caps.at(0);
+
+ if (structure.name().startsWith("video/")) {
+ QSize nativeSize = structure.nativeSize();
+ gstVideoOutput->setNativeSize(nativeSize);
}
}
auto sinkPad = trackSelector(VideoStream).activeInputPad();
- if (!sinkPad.isNull()) {
- QGstTagListHandle tagList;
-
- g_object_get(sinkPad.object(), "tags", &tagList, nullptr);
+ if (sinkPad) {
+ QGstTagListHandle tagList = sinkPad.tags();
if (tagList)
qCDebug(qLcMediaPlayer) << " tags=" << tagList.get();
else
qCDebug(qLcMediaPlayer) << " tags=(null)";
-
- QSize nativeSize = structure.nativeSize();
- gstVideoOutput->setNativeSize(nativeSize);
}
-
qCDebug(qLcMediaPlayer) << "============== end parse topology ============";
- emit metaDataChanged();
playerPipeline.dumpGraph("playback");
}
@@ -941,13 +1066,11 @@ int QGstreamerMediaPlayer::trackCount(QPlatformMediaPlayer::TrackType type)
QMediaMetaData QGstreamerMediaPlayer::trackMetaData(QPlatformMediaPlayer::TrackType type, int index)
{
auto track = trackSelector(type).inputPad(index);
- if (track.isNull())
+ if (!track)
return {};
- QGstTagListHandle tagList;
- g_object_get(track.object(), "tags", &tagList, nullptr);
-
- return tagList ? QGstreamerMetaData::fromGstTagList(tagList.get()) : QMediaMetaData{};
+ QGstTagListHandle tagList = track.tags();
+ return taglistToMetaData(tagList);
}
int QGstreamerMediaPlayer::activeTrack(TrackType type)
@@ -986,5 +1109,3 @@ void QGstreamerMediaPlayer::setActiveTrack(TrackType type, int index)
}
QT_END_NAMESPACE
-
-#include "moc_qgstreamermediaplayer_p.cpp"
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer_p.h
index 3b2129296..28e7a0c31 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer_p.h
@@ -40,13 +40,10 @@ class QGstreamerMediaPlayer : public QObject,
public QGstreamerBusMessageFilter,
public QGstreamerSyncMessageFilter
{
- Q_OBJECT
-
public:
static QMaybe<QPlatformMediaPlayer *> create(QMediaPlayer *parent = nullptr);
~QGstreamerMediaPlayer();
- qint64 position() const override;
qint64 duration() const override;
float bufferProgress() const override;
@@ -58,7 +55,7 @@ public:
QUrl media() const override;
const QIODevice *mediaStream() const override;
- void setMedia(const QUrl&, QIODevice *) override;
+ void setMedia(const QUrl &, QIODevice *) override;
bool streamPlaybackSupported() const override { return true; }
@@ -74,24 +71,22 @@ public:
void setActiveTrack(TrackType, int /*streamNumber*/) override;
void setPosition(qint64 pos) override;
+ void setPosition(std::chrono::milliseconds pos);
void play() override;
void pause() override;
void stop() override;
- void *nativePipeline() override;
+ const QGstPipeline &pipeline() const;
bool processBusMessage(const QGstreamerMessage& message) override;
bool processSyncMessage(const QGstreamerMessage& message) override;
-public Q_SLOTS:
- void updatePosition() { positionChanged(position()); }
private:
- QGstreamerMediaPlayer(QGstreamerVideoOutput *videoOutput, QGstElement videoInputSelector,
- QGstElement audioInputSelector, QGstElement subTitleInputSelector,
- QMediaPlayer *parent);
+ QGstreamerMediaPlayer(QGstreamerVideoOutput *videoOutput, QMediaPlayer *parent);
- struct TrackSelector {
+ struct TrackSelector
+ {
TrackSelector(TrackType, QGstElement selector);
QGstPad createInputPad();
void removeInputPad(QGstPad pad);
@@ -115,22 +110,36 @@ private:
void decoderPadAdded(const QGstElement &src, const QGstPad &pad);
void decoderPadRemoved(const QGstElement &src, const QGstPad &pad);
void disconnectDecoderHandlers();
- static void uridecodebinElementAddedCallback(GstElement *uridecodebin, GstElement *child, QGstreamerMediaPlayer *that);
- static void sourceSetupCallback(GstElement *uridecodebin, GstElement *source, QGstreamerMediaPlayer *that);
+ static void uridecodebinElementAddedCallback(GstElement *uridecodebin, GstElement *child,
+ QGstreamerMediaPlayer *that);
+ static void sourceSetupCallback(GstElement *uridecodebin, GstElement *source,
+ QGstreamerMediaPlayer *that);
static void unknownTypeCallback(GstElement *decodebin, GstPad *pad, GstCaps *caps,
QGstreamerMediaPlayer *self);
+ static void decodebinElementAddedCallback(GstBin *decodebin, GstBin *sub_bin,
+ GstElement *element, QGstreamerMediaPlayer *self);
+ static void decodebinElementRemovedCallback(GstBin *decodebin, GstBin *sub_bin,
+ GstElement *element, QGstreamerMediaPlayer *self);
+
void parseStreamsAndMetadata();
void connectOutput(TrackSelector &ts);
void removeOutput(TrackSelector &ts);
+ void removeDynamicPipelineElements();
void removeAllOutputs();
void stopOrEOS(bool eos);
+ bool canTrackProgress() const { return decodeBinQueues > 0; }
+ void detectPipelineIsSeekable();
+
+ std::chrono::nanoseconds pipelinePosition() const;
+ void updatePositionFromPipeline();
+ void updateDurationFromPipeline();
+ void updateBufferProgress(float);
std::array<TrackSelector, NTrackTypes> trackSelectors;
TrackSelector &trackSelector(TrackType type);
QMediaMetaData m_metaData;
- int m_bufferProgress = 0;
QUrl m_url;
QIODevice *m_stream = nullptr;
@@ -142,13 +151,16 @@ private:
bool prerolling = false;
bool m_requiresSeekOnPlay = false;
+ bool m_initialBufferProgressSent = false;
ResourceErrorState m_resourceErrorState = ResourceErrorState::NoError;
- qint64 m_duration = 0;
+ float m_rate = 1.f;
+ float m_bufferProgress = 0.f;
+ std::chrono::milliseconds m_duration{};
QTimer positionUpdateTimer;
QGstAppSource *m_appSrc = nullptr;
- QGstStructure topology;
+ QUniqueGstStructureHandle topology;
// Gst elements
QGstPipeline playerPipeline;
@@ -160,14 +172,26 @@ private:
// QGstElement streamSynchronizer;
- QHash<QByteArray, QGstPad> decoderOutputMap;
+ struct QGstPadLess
+ {
+ bool operator()(const QGstPad &lhs, const QGstPad &rhs) const
+ {
+ return lhs.pad() < rhs.pad();
+ }
+ };
+
+ std::map<QGstPad, QGstPad, QGstPadLess> decoderOutputMap;
// decoder connections
QGObjectHandlerScopedConnection padAdded;
QGObjectHandlerScopedConnection padRemoved;
QGObjectHandlerScopedConnection sourceSetup;
- QGObjectHandlerScopedConnection elementAdded;
+ QGObjectHandlerScopedConnection uridecodebinElementAdded;
QGObjectHandlerScopedConnection unknownType;
+ QGObjectHandlerScopedConnection elementAdded;
+ QGObjectHandlerScopedConnection elementRemoved;
+
+ int decodeBinQueues = 0;
};
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermessage_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamermessage_p.h
index 01fe68acb..9836bd0cb 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamermessage_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamermessage_p.h
@@ -43,7 +43,7 @@ public:
GstMessageType type() const { return GST_MESSAGE_TYPE(get()); }
QGstObject source() const { return QGstObject(GST_MESSAGE_SRC(get()), QGstObject::NeedsRef); }
- QGstStructure structure() const { return QGstStructure(gst_message_get_structure(get())); }
+ QGstStructureView structure() const { return QGstStructureView(gst_message_get_structure(get())); }
GstMessage *message() const { return get(); }
};
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermetadata.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamermetadata.cpp
index 953acb56a..4ac679873 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamermetadata.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamermetadata.cpp
@@ -3,6 +3,7 @@
#include "qgstreamermetadata_p.h"
#include <QtMultimedia/qmediametadata.h>
+#include <QtMultimedia/qtvideo.h>
#include <QtCore/qdebug.h>
#include <QtCore/qdatetime.h>
#include <QtCore/qlocale.h>
@@ -12,267 +13,477 @@
#include <gst/gstversion.h>
#include <common/qgst_handle_types_p.h>
#include <common/qgstutils_p.h>
+#include <qgstreamerformatinfo_p.h>
QT_BEGIN_NAMESPACE
-struct {
+namespace {
+
+namespace MetadataLookupImpl {
+
+#ifdef __cpp_lib_constexpr_algorithms
+# define constexpr_lookup constexpr
+#else
+# define constexpr_lookup /*constexpr*/
+#endif
+
+struct MetadataKeyValuePair
+{
const char *tag;
QMediaMetaData::Key key;
-} gstTagToMetaDataKey[] = {
- { GST_TAG_TITLE, QMediaMetaData::Title },
- { GST_TAG_COMMENT, QMediaMetaData::Comment },
- { GST_TAG_DESCRIPTION, QMediaMetaData::Description },
- { GST_TAG_GENRE, QMediaMetaData::Genre },
- { GST_TAG_DATE_TIME, QMediaMetaData::Date },
- { GST_TAG_DATE, QMediaMetaData::Date },
+};
+
+constexpr const char *toTag(const char *t)
+{
+ return t;
+}
+constexpr const char *toTag(const MetadataKeyValuePair &kv)
+{
+ return kv.tag;
+}
+
+constexpr QMediaMetaData::Key toKey(QMediaMetaData::Key k)
+{
+ return k;
+}
+constexpr QMediaMetaData::Key toKey(const MetadataKeyValuePair &kv)
+{
+ return kv.key;
+}
+
+constexpr auto compareByKey = [](const auto &lhs, const auto &rhs) {
+ return toKey(lhs) < toKey(rhs);
+};
- { GST_TAG_LANGUAGE_CODE, QMediaMetaData::Language },
+constexpr auto compareByTag = [](const auto &lhs, const auto &rhs) {
+ return std::strcmp(toTag(lhs), toTag(rhs)) < 0;
+};
+
+constexpr_lookup auto makeLookupTable()
+{
+ std::array<MetadataKeyValuePair, 22> lookupTable{ {
+ { GST_TAG_TITLE, QMediaMetaData::Title },
+ { GST_TAG_COMMENT, QMediaMetaData::Comment },
+ { GST_TAG_DESCRIPTION, QMediaMetaData::Description },
+ { GST_TAG_GENRE, QMediaMetaData::Genre },
+ { GST_TAG_DATE_TIME, QMediaMetaData::Date },
+ { GST_TAG_DATE, QMediaMetaData::Date },
+
+ { GST_TAG_LANGUAGE_CODE, QMediaMetaData::Language },
+
+ { GST_TAG_ORGANIZATION, QMediaMetaData::Publisher },
+ { GST_TAG_COPYRIGHT, QMediaMetaData::Copyright },
+
+ // Media
+ { GST_TAG_DURATION, QMediaMetaData::Duration },
+
+ // Audio
+ { GST_TAG_BITRATE, QMediaMetaData::AudioBitRate },
+ { GST_TAG_AUDIO_CODEC, QMediaMetaData::AudioCodec },
+
+ // Music
+ { GST_TAG_ALBUM, QMediaMetaData::AlbumTitle },
+ { GST_TAG_ALBUM_ARTIST, QMediaMetaData::AlbumArtist },
+ { GST_TAG_ARTIST, QMediaMetaData::ContributingArtist },
+ { GST_TAG_TRACK_NUMBER, QMediaMetaData::TrackNumber },
+
+ { GST_TAG_PREVIEW_IMAGE, QMediaMetaData::ThumbnailImage },
+ { GST_TAG_IMAGE, QMediaMetaData::CoverArtImage },
+
+ // Image/Video
+ { "resolution", QMediaMetaData::Resolution },
+ { GST_TAG_IMAGE_ORIENTATION, QMediaMetaData::Orientation },
+
+ // Video
+ { GST_TAG_VIDEO_CODEC, QMediaMetaData::VideoCodec },
+
+ // Movie
+ { GST_TAG_PERFORMER, QMediaMetaData::LeadPerformer },
+ } };
+
+ std::sort(lookupTable.begin(), lookupTable.end(),
+ [](const MetadataKeyValuePair &lhs, const MetadataKeyValuePair &rhs) {
+ return std::string_view(lhs.tag) < std::string_view(rhs.tag);
+ });
+ return lookupTable;
+}
- { GST_TAG_ORGANIZATION, QMediaMetaData::Publisher },
- { GST_TAG_COPYRIGHT, QMediaMetaData::Copyright },
+constexpr_lookup auto gstTagToMetaDataKey = makeLookupTable();
+constexpr_lookup auto metaDataKeyToGstTag = [] {
+ auto array = gstTagToMetaDataKey;
+ std::sort(array.begin(), array.end(), compareByKey);
+ return array;
+}();
- // Media
- { GST_TAG_DURATION, QMediaMetaData::Duration },
+} // namespace MetadataLookupImpl
- // Audio
- { GST_TAG_BITRATE, QMediaMetaData::AudioBitRate },
- { GST_TAG_AUDIO_CODEC, QMediaMetaData::AudioCodec },
+QMediaMetaData::Key tagToKey(const char *tag)
+{
+ if (tag == nullptr)
+ return QMediaMetaData::Key(-1);
- // Music
- { GST_TAG_ALBUM, QMediaMetaData::AlbumTitle },
- { GST_TAG_ALBUM_ARTIST, QMediaMetaData::AlbumArtist },
- { GST_TAG_ARTIST, QMediaMetaData::ContributingArtist },
- { GST_TAG_TRACK_NUMBER, QMediaMetaData::TrackNumber },
+ using namespace MetadataLookupImpl;
+ auto foundIterator = std::lower_bound(gstTagToMetaDataKey.begin(), gstTagToMetaDataKey.end(),
+ tag, compareByTag);
+ if (std::strcmp(foundIterator->tag, tag) == 0)
+ return foundIterator->key;
- { GST_TAG_PREVIEW_IMAGE, QMediaMetaData::ThumbnailImage },
- { GST_TAG_IMAGE, QMediaMetaData::CoverArtImage },
+ return QMediaMetaData::Key(-1);
+}
- // Image/Video
- { "resolution", QMediaMetaData::Resolution },
- { GST_TAG_IMAGE_ORIENTATION, QMediaMetaData::Orientation },
+const char *keyToTag(QMediaMetaData::Key key)
+{
+ using namespace MetadataLookupImpl;
+ auto foundIterator = std::lower_bound(metaDataKeyToGstTag.begin(), metaDataKeyToGstTag.end(),
+ key, compareByKey);
+ if (foundIterator->key == key)
+ return foundIterator->tag;
- // Video
- { GST_TAG_VIDEO_CODEC, QMediaMetaData::VideoCodec },
+ return nullptr;
+}
- // Movie
- { GST_TAG_PERFORMER, QMediaMetaData::LeadPerformer },
+#undef constexpr_lookup
- { nullptr, QMediaMetaData::Title }
-};
+QtVideo::Rotation parseRotationTag(const char *string)
+{
+ using namespace std::string_view_literals;
+
+ if (string == "rotate-90"sv)
+ return QtVideo::Rotation::Clockwise90;
+ if (string == "rotate-180"sv)
+ return QtVideo::Rotation::Clockwise180;
+ if (string == "rotate-270"sv)
+ return QtVideo::Rotation::Clockwise270;
+ if (string == "rotate-0"sv)
+ return QtVideo::Rotation::None;
+
+ qCritical() << "cannot parse orientation: {}" << string;
+ return QtVideo::Rotation::None;
+}
+
+QDateTime parseDate(const GValue &val)
+{
+ Q_ASSERT(G_VALUE_TYPE(&val) == G_TYPE_DATE);
+
+ const GDate *date = (const GDate *)g_value_get_boxed(&val);
+ if (!g_date_valid(date))
+ return {};
+
+ int year = g_date_get_year(date);
+ int month = g_date_get_month(date);
+ int day = g_date_get_day(date);
+ return QDateTime(QDate(year, month, day), QTime());
+}
-static QMediaMetaData::Key tagToKey(const char *tag)
+QDateTime parseDateTime(const GValue &val)
{
- auto *map = gstTagToMetaDataKey;
- while (map->tag) {
- if (!strcmp(map->tag, tag))
- return map->key;
- ++map;
+ Q_ASSERT(G_VALUE_TYPE(&val) == GST_TYPE_DATE_TIME);
+
+ const GstDateTime *dateTime = (const GstDateTime *)g_value_get_boxed(&val);
+ int year = gst_date_time_has_year(dateTime) ? gst_date_time_get_year(dateTime) : 0;
+ int month = gst_date_time_has_month(dateTime) ? gst_date_time_get_month(dateTime) : 0;
+ int day = gst_date_time_has_day(dateTime) ? gst_date_time_get_day(dateTime) : 0;
+ int hour = 0;
+ int minute = 0;
+ int second = 0;
+ float tz = 0;
+ if (gst_date_time_has_time(dateTime)) {
+ hour = gst_date_time_get_hour(dateTime);
+ minute = gst_date_time_get_minute(dateTime);
+ second = gst_date_time_get_second(dateTime);
+ tz = gst_date_time_get_time_zone_offset(dateTime);
}
- return QMediaMetaData::Key(-1);
+ return QDateTime{
+ QDate(year, month, day),
+ QTime(hour, minute, second),
+ QTimeZone(tz * 60 * 60),
+ };
}
-static const char *keyToTag(QMediaMetaData::Key key)
+QImage parseImage(const GValue &val)
{
- auto *map = gstTagToMetaDataKey;
- while (map->tag) {
- if (map->key == key)
- return map->tag;
- ++map;
+ Q_ASSERT(G_VALUE_TYPE(&val) == GST_TYPE_SAMPLE);
+
+ GstSample *sample = (GstSample *)g_value_get_boxed(&val);
+ GstCaps *caps = gst_sample_get_caps(sample);
+ if (caps && !gst_caps_is_empty(caps)) {
+ GstStructure *structure = gst_caps_get_structure(caps, 0);
+ const gchar *name = gst_structure_get_name(structure);
+ if (QByteArray(name).startsWith("image/")) {
+ GstBuffer *buffer = gst_sample_get_buffer(sample);
+ if (buffer) {
+ GstMapInfo info;
+ gst_buffer_map(buffer, &info, GST_MAP_READ);
+ QImage image = QImage::fromData(info.data, info.size, name);
+ gst_buffer_unmap(buffer, &info);
+ return image;
+ }
+ }
+ }
+
+ return {};
+}
+
+std::optional<double> parseFractionAsDouble(const GValue &val)
+{
+ Q_ASSERT(G_VALUE_TYPE(&val) == GST_TYPE_FRACTION);
+
+ int nom = gst_value_get_fraction_numerator(&val);
+ int denom = gst_value_get_fraction_denominator(&val);
+ if (denom == 0)
+ return std::nullopt;
+ return double(nom) / double(denom);
+}
+
+constexpr std::string_view extendedComment{ GST_TAG_EXTENDED_COMMENT };
+
+void addTagsFromExtendedComment(const GstTagList *list, const gchar *tag, QMediaMetaData &metadata)
+{
+ using namespace Qt::Literals;
+ assert(tag == extendedComment);
+
+ int entryCount = gst_tag_list_get_tag_size(list, tag);
+ for (int i = 0; i != entryCount; ++i) {
+ const GValue *value = gst_tag_list_get_value_index(list, tag, i);
+
+ const QLatin1StringView strValue{ g_value_get_string(value) };
+
+ auto equalIndex = strValue.indexOf(QLatin1StringView("="));
+ if (equalIndex == -1) {
+ qDebug() << "Cannot parse GST_TAG_EXTENDED_COMMENT entry: " << value;
+ continue;
+ }
+
+ const QLatin1StringView key = strValue.first(equalIndex);
+ const QLatin1StringView valueString = strValue.last(strValue.size() - equalIndex - 1);
+
+ if (key == "DURATION"_L1) {
+ QUniqueGstDateTimeHandle duration{
+ gst_date_time_new_from_iso8601_string(valueString.data()),
+ };
+
+ if (duration) {
+ using namespace std::chrono;
+
+ auto chronoDuration = hours(gst_date_time_get_hour(duration.get()))
+ + minutes(gst_date_time_get_minute(duration.get()))
+ + seconds(gst_date_time_get_second(duration.get()))
+ + microseconds(gst_date_time_get_microsecond(duration.get()));
+
+ metadata.insert(QMediaMetaData::Duration,
+ QVariant::fromValue(round<milliseconds>(chronoDuration).count()));
+ }
+ }
}
- return nullptr;
}
-//internal
-static void addTagToMap(const GstTagList *list,
- const gchar *tag,
- gpointer user_data)
+void addTagToMetaData(const GstTagList *list, const gchar *tag, void *userdata)
{
+ QMediaMetaData &metadata = *reinterpret_cast<QMediaMetaData *>(userdata);
+
QMediaMetaData::Key key = tagToKey(tag);
- if (key == QMediaMetaData::Key(-1))
- return;
+ if (key == QMediaMetaData::Key(-1)) {
+ if (tag == extendedComment)
+ addTagsFromExtendedComment(list, tag, metadata);
- auto *map = reinterpret_cast<QHash<QMediaMetaData::Key, QVariant>* >(user_data);
+ return;
+ }
- GValue val;
- val.g_type = 0;
+ GValue val{};
gst_tag_list_copy_value(&val, list, tag);
- switch (G_VALUE_TYPE(&val)) {
- case G_TYPE_STRING: {
+ GType type = G_VALUE_TYPE(&val);
+
+ if (auto entryCount = gst_tag_list_get_tag_size(list, tag) != 0; entryCount != 1)
+ qWarning() << "addTagToMetaData: invaled entry count for" << tag << "-" << entryCount;
+
+ if (type == G_TYPE_STRING) {
const gchar *str_value = g_value_get_string(&val);
- if (key == QMediaMetaData::Language) {
- map->insert(key,
- QVariant::fromValue(QLocale::codeToLanguage(QString::fromUtf8(str_value),
- QLocale::ISO639Part2)));
+
+ switch (key) {
+ case QMediaMetaData::Language: {
+ metadata.insert(key,
+ QVariant::fromValue(QLocale::codeToLanguage(
+ QString::fromUtf8(str_value), QLocale::ISO639Part2)));
break;
}
- map->insert(key, QString::fromUtf8(str_value));
- break;
- }
- case G_TYPE_INT:
- map->insert(key, g_value_get_int(&val));
- break;
- case G_TYPE_UINT:
- map->insert(key, g_value_get_uint(&val));
- break;
- case G_TYPE_LONG:
- map->insert(key, qint64(g_value_get_long(&val)));
- break;
- case G_TYPE_BOOLEAN:
- map->insert(key, g_value_get_boolean(&val));
- break;
- case G_TYPE_CHAR:
- map->insert(key, g_value_get_schar(&val));
- break;
- case G_TYPE_DOUBLE:
- map->insert(key, g_value_get_double(&val));
- break;
- default:
- // GST_TYPE_DATE is a function, not a constant, so pull it out of the switch
- if (G_VALUE_TYPE(&val) == G_TYPE_DATE) {
- const GDate *date = (const GDate *)g_value_get_boxed(&val);
- if (g_date_valid(date)) {
- int year = g_date_get_year(date);
- int month = g_date_get_month(date);
- int day = g_date_get_day(date);
- // don't insert if we already have a datetime.
- if (!map->contains(key))
- map->insert(key, QDateTime(QDate(year, month, day), QTime()));
- }
- } else if (G_VALUE_TYPE(&val) == GST_TYPE_DATE_TIME) {
- const GstDateTime *dateTime = (const GstDateTime *)g_value_get_boxed(&val);
- int year = gst_date_time_has_year(dateTime) ? gst_date_time_get_year(dateTime) : 0;
- int month = gst_date_time_has_month(dateTime) ? gst_date_time_get_month(dateTime) : 0;
- int day = gst_date_time_has_day(dateTime) ? gst_date_time_get_day(dateTime) : 0;
- int hour = 0;
- int minute = 0;
- int second = 0;
- float tz = 0;
- if (gst_date_time_has_time(dateTime)) {
- hour = gst_date_time_get_hour(dateTime);
- minute = gst_date_time_get_minute(dateTime);
- second = gst_date_time_get_second(dateTime);
- tz = gst_date_time_get_time_zone_offset(dateTime);
- }
- QDateTime qDateTime(QDate(year, month, day), QTime(hour, minute, second),
- QTimeZone(tz * 60 * 60));
- map->insert(key, qDateTime);
- } else if (G_VALUE_TYPE(&val) == GST_TYPE_SAMPLE) {
- GstSample *sample = (GstSample *)g_value_get_boxed(&val);
- GstCaps *caps = gst_sample_get_caps(sample);
- if (caps && !gst_caps_is_empty(caps)) {
- GstStructure *structure = gst_caps_get_structure(caps, 0);
- const gchar *name = gst_structure_get_name(structure);
- if (QByteArray(name).startsWith("image/")) {
- GstBuffer *buffer = gst_sample_get_buffer(sample);
- if (buffer) {
- GstMapInfo info;
- gst_buffer_map(buffer, &info, GST_MAP_READ);
- map->insert(key, QImage::fromData(info.data, info.size, name));
- gst_buffer_unmap(buffer, &info);
- }
- }
- }
- } else if (G_VALUE_TYPE(&val) == GST_TYPE_FRACTION) {
- int nom = gst_value_get_fraction_numerator(&val);
- int denom = gst_value_get_fraction_denominator(&val);
-
- if (denom > 0) {
- map->insert(key, double(nom) / denom);
- }
+ case QMediaMetaData::Orientation: {
+ metadata.insert(key, QVariant::fromValue(parseRotationTag(str_value)));
+ break;
+ }
+ default:
+ metadata.insert(key, QString::fromUtf8(str_value));
+ break;
+ };
+ } else if (type == G_TYPE_INT) {
+ metadata.insert(key, g_value_get_int(&val));
+ } else if (type == G_TYPE_UINT) {
+ metadata.insert(key, g_value_get_uint(&val));
+ } else if (type == G_TYPE_LONG) {
+ metadata.insert(key, qint64(g_value_get_long(&val)));
+ } else if (type == G_TYPE_BOOLEAN) {
+ metadata.insert(key, g_value_get_boolean(&val));
+ } else if (type == G_TYPE_CHAR) {
+ metadata.insert(key, g_value_get_schar(&val));
+ } else if (type == G_TYPE_DOUBLE) {
+ metadata.insert(key, g_value_get_double(&val));
+ } else if (type == G_TYPE_DATE) {
+ if (!metadata.keys().contains(key)) {
+ QDateTime date = parseDate(val);
+ if (date.isValid())
+ metadata.insert(key, date);
}
- break;
+ } else if (type == GST_TYPE_DATE_TIME) {
+ metadata.insert(key, parseDateTime(val));
+ } else if (type == GST_TYPE_SAMPLE) {
+ QImage image = parseImage(val);
+ if (!image.isNull())
+ metadata.insert(key, image);
+ } else if (type == GST_TYPE_FRACTION) {
+ std::optional<double> fraction = parseFractionAsDouble(val);
+
+ if (fraction)
+ metadata.insert(key, *fraction);
}
g_value_unset(&val);
}
+} // namespace
-QGstreamerMetaData QGstreamerMetaData::fromGstTagList(const GstTagList *tags)
+QMediaMetaData taglistToMetaData(const QGstTagListHandle &handle)
{
- QGstreamerMetaData m;
- gst_tag_list_foreach(tags, addTagToMap, &m.data);
+ QMediaMetaData m;
+ extendMetaDataFromTagList(m, handle);
return m;
}
-
-void QGstreamerMetaData::setMetaData(GstElement *element) const
+void extendMetaDataFromTagList(QMediaMetaData &metadata, const QGstTagListHandle &handle)
{
- if (!GST_IS_TAG_SETTER(element))
- return;
+ if (handle)
+ gst_tag_list_foreach(handle.get(), reinterpret_cast<GstTagForeachFunc>(&addTagToMetaData),
+ &metadata);
+}
- gst_tag_setter_reset_tags(GST_TAG_SETTER(element));
+static void applyMetaDataToTagSetter(const QMediaMetaData &metadata, GstTagSetter *element)
+{
+ gst_tag_setter_reset_tags(element);
- for (auto it = data.cbegin(), end = data.cend(); it != end; ++it) {
- const char *tagName = keyToTag(it.key());
+ for (QMediaMetaData::Key key : metadata.keys()) {
+ const char *tagName = keyToTag(key);
if (!tagName)
continue;
- const QVariant &tagValue = it.value();
+ const QVariant &tagValue = metadata.value(key);
+
+ auto setTag = [&](const auto &value) {
+ gst_tag_setter_add_tags(element, GST_TAG_MERGE_REPLACE, tagName, value, nullptr);
+ };
switch (tagValue.typeId()) {
- case QMetaType::QString:
- gst_tag_setter_add_tags(GST_TAG_SETTER(element),
- GST_TAG_MERGE_REPLACE,
- tagName,
- tagValue.toString().toUtf8().constData(),
- nullptr);
- break;
- case QMetaType::Int:
- case QMetaType::LongLong:
- gst_tag_setter_add_tags(GST_TAG_SETTER(element),
- GST_TAG_MERGE_REPLACE,
- tagName,
- tagValue.toInt(),
- nullptr);
- break;
- case QMetaType::Double:
- gst_tag_setter_add_tags(GST_TAG_SETTER(element),
- GST_TAG_MERGE_REPLACE,
- tagName,
- tagValue.toDouble(),
- nullptr);
- break;
- case QMetaType::QDate:
- case QMetaType::QDateTime: {
- QDateTime date = tagValue.toDateTime();
-
- QGstGstDateTimeHandle dateTime{
- gst_date_time_new(date.offsetFromUtc() / 60. / 60., date.date().year(),
- date.date().month(), date.date().day(), date.time().hour(),
- date.time().minute(), date.time().second()),
- QGstGstDateTimeHandle::HasRef,
- };
-
- gst_tag_setter_add_tags(GST_TAG_SETTER(element), GST_TAG_MERGE_REPLACE, tagName,
- dateTime.get(), nullptr);
- break;
- }
- default: {
- if (tagValue.typeId() == qMetaTypeId<QLocale::Language>()) {
- QByteArray language = QLocale::languageToCode(tagValue.value<QLocale::Language>(), QLocale::ISO639Part2).toUtf8();
- gst_tag_setter_add_tags(GST_TAG_SETTER(element),
- GST_TAG_MERGE_REPLACE,
- tagName,
- language.constData(),
- nullptr);
- }
-
- break;
+ case QMetaType::QString:
+ setTag(tagValue.toString().toUtf8().constData());
+ break;
+ case QMetaType::Int:
+ case QMetaType::LongLong:
+ setTag(tagValue.toInt());
+ break;
+ case QMetaType::Double:
+ setTag(tagValue.toDouble());
+ break;
+ case QMetaType::QDate:
+ case QMetaType::QDateTime: {
+ QDateTime date = tagValue.toDateTime();
+
+ QGstGstDateTimeHandle dateTime{
+ gst_date_time_new(date.offsetFromUtc() / 60. / 60., date.date().year(),
+ date.date().month(), date.date().day(), date.time().hour(),
+ date.time().minute(), date.time().second()),
+ QGstGstDateTimeHandle::HasRef,
+ };
+
+ setTag(dateTime.get());
+ break;
+ }
+ default: {
+ if (tagValue.typeId() == qMetaTypeId<QLocale::Language>()) {
+ QByteArray language = QLocale::languageToCode(tagValue.value<QLocale::Language>(),
+ QLocale::ISO639Part2)
+ .toUtf8();
+ setTag(language.constData());
}
+
+ break;
+ }
}
}
}
-void QGstreamerMetaData::setMetaData(GstBin *bin) const
+void applyMetaDataToTagSetter(const QMediaMetaData &metadata, const QGstElement &element)
{
- GstIterator *elements = gst_bin_iterate_all_by_interface(bin, GST_TYPE_TAG_SETTER);
- GValue item = G_VALUE_INIT;
+ GstTagSetter *tagSetter = qGstSafeCast<GstTagSetter>(element.element());
+ if (tagSetter)
+ applyMetaDataToTagSetter(metadata, tagSetter);
+ else
+ qWarning() << "applyMetaDataToTagSetter failed: element not a GstTagSetter"
+ << element.name();
+}
+
+void applyMetaDataToTagSetter(const QMediaMetaData &metadata, const QGstBin &bin)
+{
+ GstIterator *elements = gst_bin_iterate_all_by_interface(bin.bin(), GST_TYPE_TAG_SETTER);
+ GValue item = {};
+
while (gst_iterator_next(elements, &item) == GST_ITERATOR_OK) {
- GstElement * const element = GST_ELEMENT(g_value_get_object(&item));
- setMetaData(element);
+ GstElement *element = static_cast<GstElement *>(g_value_get_object(&item));
+ if (!element)
+ continue;
+
+ GstTagSetter *tagSetter = qGstSafeCast<GstTagSetter>(element);
+
+ if (tagSetter)
+ applyMetaDataToTagSetter(metadata, tagSetter);
}
+
gst_iterator_free(elements);
}
+void extendMetaDataFromCaps(QMediaMetaData &metadata, const QGstCaps &caps)
+{
+ QGstStructureView structure = caps.at(0);
+
+ QMediaFormat::FileFormat fileFormat = QGstreamerFormatInfo::fileFormatForCaps(structure);
+ if (fileFormat != QMediaFormat::FileFormat::UnspecifiedFormat) {
+ // Container caps
+ metadata.insert(QMediaMetaData::FileFormat, fileFormat);
+ return;
+ }
+
+ QMediaFormat::AudioCodec audioCodec = QGstreamerFormatInfo::audioCodecForCaps(structure);
+ if (audioCodec != QMediaFormat::AudioCodec::Unspecified) {
+ // Audio stream caps
+ metadata.insert(QMediaMetaData::AudioCodec, QVariant::fromValue(audioCodec));
+ return;
+ }
+
+ QMediaFormat::VideoCodec videoCodec = QGstreamerFormatInfo::videoCodecForCaps(structure);
+ if (videoCodec != QMediaFormat::VideoCodec::Unspecified) {
+ // Video stream caps
+ metadata.insert(QMediaMetaData::VideoCodec, QVariant::fromValue(videoCodec));
+ std::optional<float> framerate = structure["framerate"].getFraction();
+ if (framerate)
+ metadata.insert(QMediaMetaData::VideoFrameRate, *framerate);
+
+ QSize resolution = structure.resolution();
+ if (resolution.isValid())
+ metadata.insert(QMediaMetaData::Resolution, resolution);
+ }
+}
+
+QMediaMetaData capsToMetaData(const QGstCaps &caps)
+{
+ QMediaMetaData metadata;
+ extendMetaDataFromCaps(metadata, caps);
+ return metadata;
+}
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermetadata_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamermetadata_p.h
index 7ff5552b2..f04a9aba9 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamermetadata_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamermetadata_p.h
@@ -16,20 +16,19 @@
//
#include <qmediametadata.h>
-#include <qvariant.h>
-#include <gst/gst.h>
+#include "qgst_p.h"
QT_BEGIN_NAMESPACE
-class QGstreamerMetaData : public QMediaMetaData
-{
-public:
- static QGstreamerMetaData fromGstTagList(const GstTagList *tags);
+QMediaMetaData taglistToMetaData(const QGstTagListHandle &);
+void extendMetaDataFromTagList(QMediaMetaData &, const QGstTagListHandle &);
- void setMetaData(GstBin *bin) const;
- void setMetaData(GstElement *element) const;
-};
+QMediaMetaData capsToMetaData(const QGstCaps &);
+void extendMetaDataFromCaps(QMediaMetaData &, const QGstCaps &);
+
+void applyMetaDataToTagSetter(const QMediaMetaData &metadata, const QGstBin &);
+void applyMetaDataToTagSetter(const QMediaMetaData &metadata, const QGstElement &);
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput.cpp
index 053dd973b..2f3197e92 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput.cpp
@@ -19,40 +19,39 @@ QMaybe<QGstreamerVideoOutput *> QGstreamerVideoOutput::create(QObject *parent)
QGstElement videoConvert;
QGstElement videoScale;
- QGstElementFactoryHandle factory = QGstElementFactoryHandle{
- gst_element_factory_find("videoconvertscale"),
- };
+ QGstElementFactoryHandle factory = QGstElement::findFactory("videoconvertscale");
if (factory) { // videoconvertscale is only available in gstreamer 1.20
videoConvert = QGstElement::createFromFactory(factory, "videoConvertScale");
} else {
videoConvert = QGstElement::createFromFactory("videoconvert", "videoConvert");
if (!videoConvert)
- return errorMessageCannotFindElement("videoconvert");
+ return qGstErrorMessageCannotFindElement("videoconvert");
videoScale = QGstElement::createFromFactory("videoscale", "videoScale");
if (!videoScale)
- return errorMessageCannotFindElement("videoscale");
+ return qGstErrorMessageCannotFindElement("videoscale");
}
- QGstElement videoSink = QGstElement::createFromFactory("fakesink", "fakeVideoSink");
- if (!videoSink)
- return errorMessageCannotFindElement("fakesink");
- videoSink.set("sync", true);
+ if (!QGstElement::findFactory("fakesink"))
+ return qGstErrorMessageCannotFindElement("fakesink");
- return new QGstreamerVideoOutput(videoConvert, videoScale, videoSink, parent);
+ return new QGstreamerVideoOutput(videoConvert, videoScale, parent);
}
QGstreamerVideoOutput::QGstreamerVideoOutput(QGstElement convert, QGstElement scale,
- QGstElement sink, QObject *parent)
+ QObject *parent)
: QObject(parent),
gstVideoOutput(QGstBin::create("videoOutput")),
+ videoQueue{
+ QGstElement::createFromFactory("queue", "videoQueue"),
+ },
videoConvert(std::move(convert)),
videoScale(std::move(scale)),
- videoSink(std::move(sink))
+ videoSink{
+ QGstElement::createFromFactory("fakesink", "fakeVideoSink"),
+ }
{
- videoQueue = QGstElement::createFromFactory("queue", "videoQueue");
-
videoSink.set("sync", true);
videoSink.set("async", false); // no asynchronous state changes
@@ -121,11 +120,7 @@ void QGstreamerVideoOutput::setVideoSink(QVideoSink *sink)
qCDebug(qLcMediaVideoOutput) << "sinkChanged" << gstSink.name();
- GST_DEBUG_BIN_TO_DOT_FILE(gstPipeline.bin(),
- GstDebugGraphDetails(/*GST_DEBUG_GRAPH_SHOW_ALL |*/ GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE |
- GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES),
- videoSink.name());
-
+ gstPipeline.dumpGraph(videoSink.name().constData());
}
void QGstreamerVideoOutput::setPipeline(const QGstPipeline &pipeline)
@@ -179,6 +174,14 @@ void QGstreamerVideoOutput::doLinkSubtitleStream()
qLinkGstElements(subtitleSrc, subtitleSink);
}
+void QGstreamerVideoOutput::updateNativeSize()
+{
+ if (!m_videoSink)
+ return;
+
+ m_videoSink->setNativeSize(qRotatedFrameSize(nativeSize, rotation));
+}
+
void QGstreamerVideoOutput::setIsPreview()
{
// configures the queue to be fast and lightweight for camera preview
@@ -204,8 +207,13 @@ void QGstreamerVideoOutput::flushSubtitles()
void QGstreamerVideoOutput::setNativeSize(QSize sz)
{
nativeSize = sz;
- if (m_videoSink)
- m_videoSink->setNativeSize(nativeSize);
+ updateNativeSize();
+}
+
+void QGstreamerVideoOutput::setRotation(QtVideo::Rotation rot)
+{
+ rotation = rot;
+ updateNativeSize();
}
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput_p.h
index 62bd4b219..883946dda 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput_p.h
@@ -50,12 +50,13 @@ public:
void flushSubtitles();
void setNativeSize(QSize);
+ void setRotation(QtVideo::Rotation);
private:
- QGstreamerVideoOutput(QGstElement videoConvert, QGstElement videoScale, QGstElement videoSink,
- QObject *parent);
+ QGstreamerVideoOutput(QGstElement videoConvert, QGstElement videoScale, QObject *parent);
void doLinkSubtitleStream();
+ void updateNativeSize();
QPointer<QGstreamerVideoSink> m_videoSink;
@@ -72,6 +73,7 @@ private:
QGstElement subtitleSink;
QSize nativeSize;
+ QtVideo::Rotation rotation{};
};
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideosink.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamervideosink.cpp
index 2ed2acb36..bf3ca4239 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamervideosink.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideosink.cpp
@@ -37,10 +37,13 @@ QT_BEGIN_NAMESPACE
static Q_LOGGING_CATEGORY(qLcGstVideoSink, "qt.multimedia.gstvideosink");
QGstreamerVideoSink::QGstreamerVideoSink(QVideoSink *parent)
- : QPlatformVideoSink(parent)
+ : QPlatformVideoSink{
+ parent,
+ },
+ sinkBin{
+ QGstBin::create("videoSinkBin"),
+ }
{
- sinkBin = QGstBin::create("videoSinkBin");
-
// This is a hack for some iMX and NVidia platforms. These require the use of a special video
// conversion element in the pipeline before the video sink, as they unfortunately
// output some proprietary format from the decoder even though it's sometimes marked as
@@ -48,38 +51,44 @@ QGstreamerVideoSink::QGstreamerVideoSink(QVideoSink *parent)
//
// To fix this, simply insert the element into the pipeline if it's available. Otherwise
// we simply use an identity element.
- gstQueue = QGstElement::createFromFactory("queue", "videoSinkQueue");
-
QGstElementFactoryHandle factory;
- // QT_MULTIMEDIA_GSTREAMER_OVERRIDE_VIDEO_CONVERSION_ELEMENT allows users to override the
+ // QT_GSTREAMER_OVERRIDE_VIDEO_CONVERSION_ELEMENT allows users to override the
// conversion element. Ideally we construct the element programatically, though.
- QByteArray preprocessOverride =
- qgetenv("QT_MULTIMEDIA_GSTREAMER_OVERRIDE_VIDEO_CONVERSION_ELEMENT");
+ QByteArray preprocessOverride = qgetenv("QT_GSTREAMER_OVERRIDE_VIDEO_CONVERSION_ELEMENT");
if (!preprocessOverride.isEmpty()) {
- qCDebug(qLcGstVideoSink) << "requesting conversion element from environment: "
+ qCDebug(qLcGstVideoSink) << "requesting conversion element from environment:"
<< preprocessOverride;
- factory = QGstElementFactoryHandle{
- gst_element_factory_find(preprocessOverride.constData()),
- };
+
+ gstPreprocess = QGstBin::createFromPipelineDescription(preprocessOverride, nullptr,
+ /*ghostUnlinkedPads=*/true);
+ if (!gstPreprocess)
+ qCWarning(qLcGstVideoSink) << "Cannot create conversion element:" << preprocessOverride;
}
- if (!factory)
- factory = QGstElementFactoryHandle{
- gst_element_factory_find("imxvideoconvert_g2d"),
+ if (!gstPreprocess) {
+ // This is a hack for some iMX and NVidia platforms. These require the use of a special
+ // video conversion element in the pipeline before the video sink, as they unfortunately
+ // output some proprietary format from the decoder even though it's sometimes marked as
+ // a regular supported video/x-raw format.
+ static constexpr auto decodersToTest = {
+ "imxvideoconvert_g2d",
+ "nvvidconv",
};
- if (!factory)
- factory = QGstElementFactoryHandle{
- gst_element_factory_find("nvvidconv"),
- };
+ for (const char *decoder : decodersToTest) {
+ factory = QGstElement::findFactory(decoder);
+ if (factory)
+ break;
+ }
- if (factory) {
- qCDebug(qLcGstVideoSink) << "instantiating conversion element: "
- << g_type_name(
- gst_element_factory_get_element_type(factory.get()));
+ if (factory) {
+ qCDebug(qLcGstVideoSink)
+ << "instantiating conversion element:"
+ << g_type_name(gst_element_factory_get_element_type(factory.get()));
- gstPreprocess = QGstElement::createFromFactory(factory, "preprocess");
+ gstPreprocess = QGstElement::createFromFactory(factory, "preprocess");
+ }
}
bool disablePixelAspectRatio =
@@ -103,13 +112,13 @@ QGstreamerVideoSink::QGstreamerVideoSink(QVideoSink *parent)
}
if (gstPreprocess) {
- sinkBin.add(gstQueue, gstPreprocess, gstCapsFilter);
- qLinkGstElements(gstQueue, gstPreprocess, gstCapsFilter);
+ sinkBin.add(gstPreprocess, gstCapsFilter);
+ qLinkGstElements(gstPreprocess, gstCapsFilter);
+ sinkBin.addGhostPad(gstPreprocess, "sink");
} else {
- sinkBin.add(gstQueue, gstCapsFilter);
- qLinkGstElements(gstQueue, gstCapsFilter);
+ sinkBin.add(gstCapsFilter);
+ sinkBin.addGhostPad(gstCapsFilter, "sink");
}
- sinkBin.addGhostPad(gstQueue, "sink");
gstSubtitleSink =
QGstElement(GST_ELEMENT(QGstSubtitleSink::createSink(this)), QGstElement::NeedsRef);
@@ -117,6 +126,8 @@ QGstreamerVideoSink::QGstreamerVideoSink(QVideoSink *parent)
QGstreamerVideoSink::~QGstreamerVideoSink()
{
+ emit aboutToBeDestroyed();
+
unrefGstContexts();
setPipeline(QGstPipeline());
@@ -200,6 +211,8 @@ void QGstreamerVideoSink::unrefGstContexts()
void QGstreamerVideoSink::updateGstContexts()
{
+ using namespace Qt::Literals;
+
unrefGstContexts();
#if QT_CONFIG(gstreamer_gl)
@@ -212,12 +225,12 @@ void QGstreamerVideoSink::updateGstContexts()
const QString platform = QGuiApplication::platformName();
QPlatformNativeInterface *pni = QGuiApplication::platformNativeInterface();
- m_eglDisplay = pni->nativeResourceForIntegration("egldisplay");
+ m_eglDisplay = pni->nativeResourceForIntegration("egldisplay"_ba);
// qDebug() << "platform is" << platform << m_eglDisplay;
QGstGLDisplayHandle gstGlDisplay;
- const char *contextName = "eglcontext";
+ QByteArray contextName = "eglcontext"_ba;
GstGLPlatform glPlatform = GST_GL_PLATFORM_EGL;
// use the egl display if we have one
if (m_eglDisplay) {
@@ -227,12 +240,12 @@ void QGstreamerVideoSink::updateGstContexts()
m_eglImageTargetTexture2D = eglGetProcAddress("glEGLImageTargetTexture2DOES");
#endif
} else {
- auto display = pni->nativeResourceForIntegration("display");
+ auto display = pni->nativeResourceForIntegration("display"_ba);
if (display) {
#if GST_GL_HAVE_WINDOW_X11 && __has_include("X11/Xlib-xcb.h")
if (platform == QLatin1String("xcb")) {
- contextName = "glxcontext";
+ contextName = "glxcontext"_ba;
glPlatform = GST_GL_PLATFORM_GLX;
gstGlDisplay.reset(GST_GL_DISPLAY_CAST(
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideosink_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamervideosink_p.h
index 132eab557..da00903fb 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamervideosink_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideosink_p.h
@@ -15,25 +15,17 @@
// We mean it.
//
-#include <private/qtmultimediaglobal_p.h>
-#include <private/qplatformvideosink_p.h>
+#include <QtMultimedia/qvideosink.h>
+#include <QtMultimedia/private/qplatformvideosink_p.h>
#include <common/qgstpipeline_p.h>
-#include <common/qgstreamervideooverlay_p.h>
-#include <QtGui/qcolor.h>
-#include <qvideosink.h>
-
-#if QT_CONFIG(gstreamer_gl)
-#include <gst/gl/gl.h>
-#endif
QT_BEGIN_NAMESPACE
-class QGstreamerVideoRenderer;
-class QVideoWindow;
class QGstreamerVideoSink : public QPlatformVideoSink
{
Q_OBJECT
+
public:
explicit QGstreamerVideoSink(QVideoSink *parent = nullptr);
~QGstreamerVideoSink();
@@ -52,6 +44,9 @@ public:
Qt::HANDLE eglDisplay() const { return m_eglDisplay; }
QFunctionPointer eglImageTargetTexture2D() const { return m_eglImageTargetTexture2D; }
+Q_SIGNALS:
+ void aboutToBeDestroyed();
+
private:
void createQtSink();
void updateSinkElement();
@@ -61,7 +56,6 @@ private:
QGstPipeline gstPipeline;
QGstBin sinkBin;
- QGstElement gstQueue;
QGstElement gstPreprocess;
QGstElement gstCapsFilter;
QGstElement gstVideoSink;
diff --git a/src/plugins/multimedia/gstreamer/common/qgstutils.cpp b/src/plugins/multimedia/gstreamer/common/qgstutils.cpp
index d3d5f6124..8ec2bde3c 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstutils.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstutils.cpp
@@ -56,7 +56,7 @@ QAudioFormat QGstUtils::audioFormatForSample(GstSample *sample)
QAudioFormat QGstUtils::audioFormatForCaps(const QGstCaps &caps)
{
QAudioFormat format;
- QGstStructure s = caps.at(0);
+ QGstStructureView s = caps.at(0);
if (s.name() != "audio/x-raw")
return format;
@@ -115,7 +115,7 @@ QList<QAudioFormat::SampleFormat> QGValue::getSampleFormats() const
return formats;
}
-void QGstUtils::setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer)
+void QGstUtils::setFrameTimeStampsFromBuffer(QVideoFrame *frame, GstBuffer *buffer)
{
using namespace std::chrono;
using namespace std::chrono_literals;
diff --git a/src/plugins/multimedia/gstreamer/common/qgstutils_p.h b/src/plugins/multimedia/gstreamer/common/qgstutils_p.h
index 4141ae0bf..c65fcf090 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstutils_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstutils_p.h
@@ -31,7 +31,7 @@ QAudioFormat audioFormatForSample(GstSample *sample);
QAudioFormat audioFormatForCaps(const QGstCaps &caps);
QGstCaps capsForAudioFormat(const QAudioFormat &format);
-void setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer);
+void setFrameTimeStampsFromBuffer(QVideoFrame *frame, GstBuffer *buffer);
} // namespace QGstUtils
GList *qt_gst_video_sinks();
diff --git a/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp
index 101d56af6..be6342ea8 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp
@@ -51,18 +51,19 @@ QT_BEGIN_NAMESPACE
#define DRM_FORMAT_GR1616 fourcc_code('G', 'R', '3', '2') /* [31:0] G:R 16:16 little endian */
#define DRM_FORMAT_BGRA1010102 fourcc_code('B', 'A', '3', '0') /* [31:0] B:G:R:A 10:10:10:2 little endian */
-QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, const GstVideoInfo &info, QGstreamerVideoSink *sink,
- const QVideoFrameFormat &frameFormat,
+QGstVideoBuffer::QGstVideoBuffer(QGstBufferHandle buffer, const GstVideoInfo &info,
+ QGstreamerVideoSink *sink, const QVideoFrameFormat &frameFormat,
QGstCaps::MemoryFormat format)
- : QAbstractVideoBuffer((sink && sink->rhi() && format != QGstCaps::CpuMemory) ?
- QVideoFrame::RhiTextureHandle : QVideoFrame::NoHandle, sink ? sink->rhi() : nullptr)
- , memoryFormat(format)
- , m_frameFormat(frameFormat)
- , m_rhi(sink ? sink->rhi() : nullptr)
- , m_videoInfo(info)
- , m_buffer(buffer)
+ : QHwVideoBuffer((sink && sink->rhi() && format != QGstCaps::CpuMemory)
+ ? QVideoFrame::RhiTextureHandle
+ : QVideoFrame::NoHandle,
+ sink ? sink->rhi() : nullptr),
+ memoryFormat(format),
+ m_frameFormat(frameFormat),
+ m_rhi(sink ? sink->rhi() : nullptr),
+ m_videoInfo(info),
+ m_buffer(std::move(buffer))
{
- gst_buffer_ref(m_buffer);
if (sink) {
eglDisplay = sink->eglDisplay();
eglImageTargetTexture2D = sink->eglImageTargetTexture2D();
@@ -76,41 +77,34 @@ QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, const GstVideoInfo &info, QG
QGstVideoBuffer::~QGstVideoBuffer()
{
unmap();
-
- gst_buffer_unref(m_buffer);
}
-
-QVideoFrame::MapMode QGstVideoBuffer::mapMode() const
+QAbstractVideoBuffer::MapData QGstVideoBuffer::map(QtVideo::MapMode mode)
{
- return m_mode;
-}
-
-QAbstractVideoBuffer::MapData QGstVideoBuffer::map(QVideoFrame::MapMode mode)
-{
- const GstMapFlags flags = GstMapFlags(((mode & QVideoFrame::ReadOnly) ? GST_MAP_READ : 0)
- | ((mode & QVideoFrame::WriteOnly) ? GST_MAP_WRITE : 0));
+ const GstMapFlags flags = GstMapFlags(
+ ((mode & QtVideo::MapMode::ReadOnly ) == QtVideo::MapMode::NotMapped ? 0 : GST_MAP_READ)
+ | ((mode & QtVideo::MapMode::WriteOnly) == QtVideo::MapMode::NotMapped ? 0 : GST_MAP_WRITE));
MapData mapData;
- if (mode == QVideoFrame::NotMapped || m_mode != QVideoFrame::NotMapped)
+ if (mode == QtVideo::MapMode::NotMapped || m_mode != QtVideo::MapMode::NotMapped)
return mapData;
if (m_videoInfo.finfo->n_planes == 0) { // Encoded
- if (gst_buffer_map(m_buffer, &m_frame.map[0], flags)) {
- mapData.nPlanes = 1;
+ if (gst_buffer_map(m_buffer.get(), &m_frame.map[0], flags)) {
+ mapData.planeCount = 1;
mapData.bytesPerLine[0] = -1;
- mapData.size[0] = m_frame.map[0].size;
+ mapData.dataSize[0] = m_frame.map[0].size;
mapData.data[0] = static_cast<uchar *>(m_frame.map[0].data);
m_mode = mode;
}
- } else if (gst_video_frame_map(&m_frame, &m_videoInfo, m_buffer, flags)) {
- mapData.nPlanes = GST_VIDEO_FRAME_N_PLANES(&m_frame);
+ } else if (gst_video_frame_map(&m_frame, &m_videoInfo, m_buffer.get(), flags)) {
+ mapData.planeCount = GST_VIDEO_FRAME_N_PLANES(&m_frame);
for (guint i = 0; i < GST_VIDEO_FRAME_N_PLANES(&m_frame); ++i) {
mapData.bytesPerLine[i] = GST_VIDEO_FRAME_PLANE_STRIDE(&m_frame, i);
mapData.data[i] = static_cast<uchar *>(GST_VIDEO_FRAME_PLANE_DATA(&m_frame, i));
- mapData.size[i] = mapData.bytesPerLine[i]*GST_VIDEO_FRAME_COMP_HEIGHT(&m_frame, i);
+ mapData.dataSize[i] = mapData.bytesPerLine[i]*GST_VIDEO_FRAME_COMP_HEIGHT(&m_frame, i);
}
m_mode = mode;
@@ -120,13 +114,13 @@ QAbstractVideoBuffer::MapData QGstVideoBuffer::map(QVideoFrame::MapMode mode)
void QGstVideoBuffer::unmap()
{
- if (m_mode != QVideoFrame::NotMapped) {
+ if (m_mode != QtVideo::MapMode::NotMapped) {
if (m_videoInfo.finfo->n_planes == 0)
- gst_buffer_unmap(m_buffer, &m_frame.map[0]);
+ gst_buffer_unmap(m_buffer.get(), &m_frame.map[0]);
else
gst_video_frame_unmap(&m_frame);
}
- m_mode = QVideoFrame::NotMapped;
+ m_mode = QtVideo::MapMode::NotMapped;
}
#if QT_CONFIG(gstreamer_gl) && QT_CONFIG(linux_dmabuf)
@@ -258,9 +252,10 @@ private:
std::unique_ptr<QRhiTexture> m_textures[QVideoTextureHelper::TextureDescription::maxPlanes];
};
-
-static GlTextures mapFromGlTexture(GstBuffer *buffer, GstVideoFrame &frame, GstVideoInfo &videoInfo)
+static GlTextures mapFromGlTexture(const QGstBufferHandle &bufferHandle, GstVideoFrame &frame,
+ GstVideoInfo &videoInfo)
{
+ GstBuffer *buffer = bufferHandle.get();
auto *mem = GST_GL_BASE_MEMORY_CAST(gst_buffer_peek_memory(buffer, 0));
if (!mem)
return {};
@@ -293,10 +288,12 @@ static GlTextures mapFromGlTexture(GstBuffer *buffer, GstVideoFrame &frame, GstV
}
#if GST_GL_HAVE_PLATFORM_EGL && QT_CONFIG(linux_dmabuf)
-static GlTextures mapFromDmaBuffer(QRhi *rhi, GstBuffer *buffer, GstVideoFrame &frame,
- GstVideoInfo &videoInfo, Qt::HANDLE eglDisplay,
- QFunctionPointer eglImageTargetTexture2D)
+static GlTextures mapFromDmaBuffer(QRhi *rhi, const QGstBufferHandle &bufferHandle,
+ GstVideoFrame &frame, GstVideoInfo &videoInfo,
+ Qt::HANDLE eglDisplay, QFunctionPointer eglImageTargetTexture2D)
{
+ GstBuffer *buffer = bufferHandle.get();
+
Q_ASSERT(gst_is_dmabuf_memory(gst_buffer_peek_memory(buffer, 0)));
Q_ASSERT(eglDisplay);
Q_ASSERT(eglImageTargetTexture2D);
@@ -377,14 +374,15 @@ std::unique_ptr<QVideoFrameTextures> QGstVideoBuffer::mapTextures(QRhi *rhi)
#if QT_CONFIG(gstreamer_gl)
GlTextures textures = {};
- if (memoryFormat == QGstCaps::GLTexture) {
+ if (memoryFormat == QGstCaps::GLTexture)
textures = mapFromGlTexture(m_buffer, m_frame, m_videoInfo);
- }
-#if GST_GL_HAVE_PLATFORM_EGL && QT_CONFIG(linux_dmabuf)
- else if (memoryFormat == QGstCaps::DMABuf) {
- textures = mapFromDmaBuffer(m_rhi, m_buffer, m_frame, m_videoInfo, eglDisplay, eglImageTargetTexture2D);
- }
-#endif
+
+# if GST_GL_HAVE_PLATFORM_EGL && QT_CONFIG(linux_dmabuf)
+ else if (memoryFormat == QGstCaps::DMABuf)
+ textures = mapFromDmaBuffer(m_rhi, m_buffer, m_frame, m_videoInfo, eglDisplay,
+ eglImageTargetTexture2D);
+
+# endif
if (textures.count > 0)
return std::make_unique<QGstQVideoFrameTextures>(rhi, QSize{m_videoInfo.width, m_videoInfo.height},
m_frameFormat.pixelFormat(), textures);
diff --git a/src/plugins/multimedia/gstreamer/common/qgstvideobuffer_p.h b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer_p.h
index 27567c9f7..573a4662c 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstvideobuffer_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer_p.h
@@ -15,8 +15,7 @@
// We mean it.
//
-#include <private/qtmultimediaglobal_p.h>
-#include <private/qabstractvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
#include <QtCore/qvariant.h>
#include <common/qgst_p.h>
@@ -27,18 +26,14 @@ class QVideoFrameFormat;
class QGstreamerVideoSink;
class QOpenGLContext;
-class QGstVideoBuffer final : public QAbstractVideoBuffer
+class QGstVideoBuffer final : public QHwVideoBuffer
{
public:
-
- QGstVideoBuffer(GstBuffer *buffer, const GstVideoInfo &info, QGstreamerVideoSink *sink,
+ QGstVideoBuffer(QGstBufferHandle buffer, const GstVideoInfo &info, QGstreamerVideoSink *sink,
const QVideoFrameFormat &frameFormat, QGstCaps::MemoryFormat format);
~QGstVideoBuffer();
- GstBuffer *buffer() const { return m_buffer; }
- QVideoFrame::MapMode mapMode() const override;
-
- MapData map(QVideoFrame::MapMode mode) override;
+ MapData map(QtVideo::MapMode mode) override;
void unmap() override;
std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *) override;
@@ -49,8 +44,8 @@ private:
QRhi *m_rhi = nullptr;
mutable GstVideoInfo m_videoInfo;
mutable GstVideoFrame m_frame{};
- GstBuffer *m_buffer = nullptr;
- QVideoFrame::MapMode m_mode = QVideoFrame::NotMapped;
+ const QGstBufferHandle m_buffer;
+ QtVideo::MapMode m_mode = QtVideo::MapMode::NotMapped;
Qt::HANDLE eglDisplay = nullptr;
QFunctionPointer eglImageTargetTexture2D = nullptr;
};
diff --git a/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp b/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp
index 0a2de1228..f9c936ea6 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp
@@ -20,6 +20,8 @@
#include <common/qgst_debug_p.h>
#include <common/qgstutils_p.h>
+#include <private/qvideoframe_p.h>
+
#include <gst/video/video.h>
#include <gst/video/gstvideometa.h>
@@ -41,6 +43,13 @@ QT_BEGIN_NAMESPACE
QGstVideoRenderer::QGstVideoRenderer(QGstreamerVideoSink *sink)
: m_sink(sink), m_surfaceCaps(createSurfaceCaps(sink))
{
+ QObject::connect(
+ sink, &QGstreamerVideoSink::aboutToBeDestroyed, this,
+ [this] {
+ QMutexLocker locker(&m_sinkMutex);
+ m_sink = nullptr;
+ },
+ Qt::DirectConnection);
}
QGstVideoRenderer::~QGstVideoRenderer() = default;
@@ -111,91 +120,104 @@ const QGstCaps &QGstVideoRenderer::caps()
bool QGstVideoRenderer::start(const QGstCaps& caps)
{
qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::start" << caps;
- QMutexLocker locker(&m_mutex);
-
- m_frameMirrored = false;
- m_frameRotationAngle = QtVideo::Rotation::None;
-
- if (m_active) {
- m_flush = true;
- m_stop = true;
- }
-
- m_startCaps = caps;
-
- /*
- Waiting for start() to be invoked in the main thread may block
- if gstreamer blocks the main thread until this call is finished.
- This situation is rare and usually caused by setState(Null)
- while pipeline is being prerolled.
- The proper solution to this involves controlling gstreamer pipeline from
- other thread than video surface.
-
- Currently start() fails if wait() timed out.
- */
- if (!waitForAsyncEvent(&locker, &m_setupCondition, 1000) && !m_startCaps.isNull()) {
- qWarning() << "Failed to start video surface due to main thread blocked.";
- m_startCaps = {};
+ {
+ m_frameRotationAngle = QtVideo::Rotation::None;
+ auto optionalFormatAndVideoInfo = caps.formatAndVideoInfo();
+ if (optionalFormatAndVideoInfo) {
+ std::tie(m_format, m_videoInfo) = std::move(*optionalFormatAndVideoInfo);
+ } else {
+ m_format = {};
+ m_videoInfo = {};
+ }
+ m_memoryFormat = caps.memoryFormat();
}
- return m_active;
+ return true;
}
void QGstVideoRenderer::stop()
{
- QMutexLocker locker(&m_mutex);
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::stop";
- if (!m_active)
+ QMetaObject::invokeMethod(this, [this] {
+ m_currentState.buffer = {};
+ m_sink->setVideoFrame(QVideoFrame{});
return;
-
- m_flush = true;
- m_stop = true;
-
- m_startCaps = {};
-
- waitForAsyncEvent(&locker, &m_setupCondition, 500);
+ });
}
void QGstVideoRenderer::unlock()
{
- QMutexLocker locker(&m_mutex);
-
- m_setupCondition.wakeAll();
- m_renderCondition.wakeAll();
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::unlock";
}
-bool QGstVideoRenderer::proposeAllocation(GstQuery *query)
+bool QGstVideoRenderer::proposeAllocation(GstQuery *)
{
- Q_UNUSED(query);
- QMutexLocker locker(&m_mutex);
- return m_active;
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::proposeAllocation";
+ return true;
}
-void QGstVideoRenderer::flush()
+GstFlowReturn QGstVideoRenderer::render(GstBuffer *buffer)
{
- QMutexLocker locker(&m_mutex);
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::render";
- m_flush = true;
- m_renderBuffer = nullptr;
- m_renderCondition.wakeAll();
+ GstVideoCropMeta *meta = gst_buffer_get_video_crop_meta(buffer);
+ if (meta) {
+ QRect vp(meta->x, meta->y, meta->width, meta->height);
+ if (m_format.viewport() != vp) {
+ qCDebug(qLcGstVideoRenderer)
+ << Q_FUNC_INFO << " Update viewport on Metadata: [" << meta->height << "x"
+ << meta->width << " | " << meta->x << "x" << meta->y << "]";
+ // Update viewport if data is not the same
+ m_format.setViewport(vp);
+ }
+ }
- notify();
-}
+ RenderBufferState state{
+ .buffer = QGstBufferHandle{ buffer, QGstBufferHandle::NeedsRef },
+ .format = m_format,
+ .memoryFormat = m_memoryFormat,
+ .mirrored = m_frameMirrored,
+ .rotationAngle = m_frameRotationAngle,
+ };
-GstFlowReturn QGstVideoRenderer::render(GstBuffer *buffer)
-{
- QMutexLocker locker(&m_mutex);
- qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::render";
+ qCDebug(qLcGstVideoRenderer) << " sending video frame";
+
+ QMetaObject::invokeMethod(this, [this, state = std::move(state)]() mutable {
+ if (state == m_currentState) {
+ // same buffer received twice
+ if (!m_sink || !m_sink->inStoppedState())
+ return;
- m_renderReturn = GST_FLOW_OK;
- m_renderBuffer = buffer;
+ qCDebug(qLcGstVideoRenderer) << " showing empty video frame";
+ m_currentVideoFrame = {};
+ m_sink->setVideoFrame(m_currentVideoFrame);
+ m_currentState = {};
+ return;
+ }
- waitForAsyncEvent(&locker, &m_renderCondition, 300);
+ auto videoBuffer = std::make_unique<QGstVideoBuffer>(state.buffer, m_videoInfo, m_sink,
+ state.format, state.memoryFormat);
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(videoBuffer), state.format);
+ QGstUtils::setFrameTimeStampsFromBuffer(&frame, state.buffer.get());
+ frame.setMirrored(state.mirrored);
+ frame.setRotation(state.rotationAngle);
+ m_currentVideoFrame = std::move(frame);
+ m_currentState = std::move(state);
+
+ if (!m_sink)
+ return;
+
+ if (m_sink->inStoppedState()) {
+ qCDebug(qLcGstVideoRenderer) << " showing empty video frame";
+ m_currentVideoFrame = {};
+ }
- m_renderBuffer = nullptr;
+ m_sink->setVideoFrame(m_currentVideoFrame);
+ });
- return m_renderReturn;
+ return GST_FLOW_OK;
}
bool QGstVideoRenderer::query(GstQuery *query)
@@ -208,6 +230,10 @@ bool QGstVideoRenderer::query(GstQuery *query)
if (strcmp(type, "gst.gl.local_context") != 0)
return false;
+ QMutexLocker locker(&m_sinkMutex);
+ if (!m_sink)
+ return false;
+
auto *gstGlContext = m_sink->gstGlLocalContext();
if (!gstGlContext)
return false;
@@ -224,9 +250,22 @@ bool QGstVideoRenderer::query(GstQuery *query)
void QGstVideoRenderer::gstEvent(GstEvent *event)
{
- if (GST_EVENT_TYPE(event) != GST_EVENT_TAG)
+ switch (GST_EVENT_TYPE(event)) {
+ case GST_EVENT_TAG:
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::gstEvent: Tag";
+ return gstEventHandleTag(event);
+ case GST_EVENT_EOS:
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::gstEvent: EOS";
+ return gstEventHandleEOS(event);
+
+ default:
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::gstEvent: unhandled event - " << event;
return;
+ }
+}
+void QGstVideoRenderer::gstEventHandleTag(GstEvent *event)
+{
GstTagList *taglist = nullptr;
gst_event_parse_tag(event, &taglist);
if (!taglist)
@@ -253,152 +292,28 @@ void QGstVideoRenderer::gstEvent(GstEvent *event)
rotationAngle = (180 + atoi(value.get() + flipRotateLen)) % 360;
}
- QMutexLocker locker(&m_mutex);
m_frameMirrored = mirrored;
switch (rotationAngle) {
- case 0: m_frameRotationAngle = QtVideo::Rotation::None; break;
- case 90: m_frameRotationAngle = QtVideo::Rotation::Clockwise90; break;
- case 180: m_frameRotationAngle = QtVideo::Rotation::Clockwise180; break;
- case 270: m_frameRotationAngle = QtVideo::Rotation::Clockwise270; break;
- default: m_frameRotationAngle = QtVideo::Rotation::None;
+ case 0:
+ m_frameRotationAngle = QtVideo::Rotation::None;
+ break;
+ case 90:
+ m_frameRotationAngle = QtVideo::Rotation::Clockwise90;
+ break;
+ case 180:
+ m_frameRotationAngle = QtVideo::Rotation::Clockwise180;
+ break;
+ case 270:
+ m_frameRotationAngle = QtVideo::Rotation::Clockwise270;
+ break;
+ default:
+ m_frameRotationAngle = QtVideo::Rotation::None;
}
}
-bool QGstVideoRenderer::event(QEvent *event)
+void QGstVideoRenderer::gstEventHandleEOS(GstEvent *)
{
- if (event->type() == QEvent::UpdateRequest) {
- QMutexLocker locker(&m_mutex);
-
- if (m_notified) {
- while (handleEvent(&locker)) {}
- m_notified = false;
- }
- return true;
- }
-
- return QObject::event(event);
-}
-
-bool QGstVideoRenderer::handleEvent(QMutexLocker<QMutex> *locker)
-{
- if (m_flush) {
- m_flush = false;
- if (m_active) {
- locker->unlock();
-
- if (m_sink && !m_flushed)
- m_sink->setVideoFrame(QVideoFrame());
- m_flushed = true;
- locker->relock();
- }
- } else if (m_stop) {
- m_stop = false;
-
- if (m_active) {
- m_active = false;
- m_flushed = true;
- }
- } else if (!m_startCaps.isNull()) {
- Q_ASSERT(!m_active);
-
- auto startCaps = m_startCaps;
- m_startCaps = {};
-
- if (m_sink) {
- locker->unlock();
-
- m_flushed = true;
- auto optionalFormatAndVideoInfo = startCaps.formatAndVideoInfo();
- if (optionalFormatAndVideoInfo) {
- std::tie(m_format, m_videoInfo) = std::move(*optionalFormatAndVideoInfo);
- } else {
- m_format = {};
- m_videoInfo = {};
- }
-
- memoryFormat = startCaps.memoryFormat();
-
- locker->relock();
- m_active = m_format.isValid();
- } else if (m_active) {
- m_active = false;
- m_flushed = true;
- }
-
- } else if (m_renderBuffer) {
- GstBuffer *buffer = m_renderBuffer;
- m_renderBuffer = nullptr;
- m_renderReturn = GST_FLOW_ERROR;
-
- qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::handleEvent(renderBuffer)" << m_active << m_sink;
- if (m_active && m_sink) {
- gst_buffer_ref(buffer);
-
- locker->unlock();
-
- m_flushed = false;
-
- GstVideoCropMeta *meta = gst_buffer_get_video_crop_meta(buffer);
- if (meta) {
- QRect vp(meta->x, meta->y, meta->width, meta->height);
- if (m_format.viewport() != vp) {
- qCDebug(qLcGstVideoRenderer) << Q_FUNC_INFO << " Update viewport on Metadata: [" << meta->height << "x" << meta->width << " | " << meta->x << "x" << meta->y << "]";
- // Update viewport if data is not the same
- m_format.setViewport(vp);
- }
- }
-
- if (m_sink->inStoppedState()) {
- qCDebug(qLcGstVideoRenderer) << " sending empty video frame";
- m_sink->setVideoFrame(QVideoFrame());
- } else {
- QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer, m_videoInfo, m_sink, m_format, memoryFormat);
- QVideoFrame frame(videoBuffer, m_format);
- QGstUtils::setFrameTimeStamps(&frame, buffer);
- frame.setMirrored(m_frameMirrored);
- frame.setRotation(m_frameRotationAngle);
-
- qCDebug(qLcGstVideoRenderer) << " sending video frame";
- m_sink->setVideoFrame(frame);
- }
-
- gst_buffer_unref(buffer);
-
- locker->relock();
-
- m_renderReturn = GST_FLOW_OK;
- }
-
- m_renderCondition.wakeAll();
- } else {
- m_setupCondition.wakeAll();
-
- return false;
- }
- return true;
-}
-
-void QGstVideoRenderer::notify()
-{
- if (!m_notified) {
- m_notified = true;
- QCoreApplication::postEvent(this, new QEvent(QEvent::UpdateRequest));
- }
-}
-
-bool QGstVideoRenderer::waitForAsyncEvent(
- QMutexLocker<QMutex> *locker, QWaitCondition *condition, unsigned long time)
-{
- if (QThread::currentThread() == thread()) {
- while (handleEvent(locker)) {}
- m_notified = false;
-
- return true;
- }
-
- notify();
-
- return condition->wait(&m_mutex, time);
+ stop();
}
static GstVideoSinkClass *gvrs_sink_parent_class;
@@ -412,8 +327,6 @@ QGstVideoRendererSink *QGstVideoRendererSink::createSink(QGstreamerVideoSink *si
QGstVideoRendererSink *gstSink = reinterpret_cast<QGstVideoRendererSink *>(
g_object_new(QGstVideoRendererSink::get_type(), nullptr));
- g_signal_connect(G_OBJECT(gstSink), "notify::show-preroll-frame", G_CALLBACK(handleShowPrerollChange), gstSink);
-
return gstSink;
}
@@ -509,41 +422,9 @@ void QGstVideoRendererSink::finalize(GObject *object)
G_OBJECT_CLASS(gvrs_sink_parent_class)->finalize(object);
}
-void QGstVideoRendererSink::handleShowPrerollChange(GObject *o, GParamSpec *p, gpointer d)
-{
- Q_UNUSED(o);
- Q_UNUSED(p);
- QGstVideoRendererSink *sink = reinterpret_cast<QGstVideoRendererSink *>(d);
-
- gboolean showPrerollFrame = true; // "show-preroll-frame" property is true by default
- g_object_get(G_OBJECT(sink), "show-preroll-frame", &showPrerollFrame, nullptr);
-
- if (!showPrerollFrame) {
- GstState state = GST_STATE_VOID_PENDING;
- GstClockTime timeout = 10000000; // 10 ms
- gst_element_get_state(GST_ELEMENT(sink), &state, nullptr, timeout);
- // show-preroll-frame being set to 'false' while in GST_STATE_PAUSED means
- // the QMediaPlayer was stopped from the paused state.
- // We need to flush the current frame.
- if (state == GST_STATE_PAUSED)
- sink->renderer->flush();
- }
-}
-
GstStateChangeReturn QGstVideoRendererSink::change_state(
GstElement *element, GstStateChange transition)
{
- QGstVideoRendererSink *sink = reinterpret_cast<QGstVideoRendererSink *>(element);
-
- gboolean showPrerollFrame = true; // "show-preroll-frame" property is true by default
- g_object_get(G_OBJECT(element), "show-preroll-frame", &showPrerollFrame, nullptr);
-
- // If show-preroll-frame is 'false' when transitioning from GST_STATE_PLAYING to
- // GST_STATE_PAUSED, it means the QMediaPlayer was stopped.
- // We need to flush the current frame.
- if (transition == GST_STATE_CHANGE_PLAYING_TO_PAUSED && !showPrerollFrame)
- sink->renderer->flush();
-
return GST_ELEMENT_CLASS(gvrs_sink_parent_class)->change_state(element, transition);
}
diff --git a/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink_p.h b/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink_p.h
index 6a923ed32..d9e3db462 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink_p.h
@@ -15,7 +15,11 @@
// We mean it.
//
+#include <QtMultimedia/qvideoframeformat.h>
+#include <QtMultimedia/qvideoframe.h>
#include <QtMultimedia/private/qtmultimediaglobal_p.h>
+#include <QtCore/qmutex.h>
+
#include <gst/video/gstvideosink.h>
#include <gst/video/video.h>
@@ -30,63 +34,61 @@
#include <common/qgst_p.h>
QT_BEGIN_NAMESPACE
-class QVideoSink;
class QGstVideoRenderer : public QObject
{
public:
- explicit QGstVideoRenderer(QGstreamerVideoSink *sink);
+ explicit QGstVideoRenderer(QGstreamerVideoSink *);
~QGstVideoRenderer();
const QGstCaps &caps();
- bool start(const QGstCaps& caps);
+ bool start(const QGstCaps &);
void stop();
void unlock();
- bool proposeAllocation(GstQuery *query);
-
- void flush();
-
- GstFlowReturn render(GstBuffer *buffer);
-
- bool event(QEvent *event) override;
- bool query(GstQuery *query);
- void gstEvent(GstEvent *event);
-
-private slots:
- bool handleEvent(QMutexLocker<QMutex> *locker);
+ bool proposeAllocation(GstQuery *);
+ GstFlowReturn render(GstBuffer *);
+ bool query(GstQuery *);
+ void gstEvent(GstEvent *);
private:
void notify();
- bool waitForAsyncEvent(QMutexLocker<QMutex> *locker, QWaitCondition *condition, unsigned long time);
static QGstCaps createSurfaceCaps(QGstreamerVideoSink *);
- QPointer<QGstreamerVideoSink> m_sink;
-
- QMutex m_mutex;
- QWaitCondition m_setupCondition;
- QWaitCondition m_renderCondition;
+ void gstEventHandleTag(GstEvent *);
+ void gstEventHandleEOS(GstEvent *);
- // --- accessed from multiple threads, need to hold mutex to access
- GstFlowReturn m_renderReturn = GST_FLOW_OK;
- bool m_active = false;
+ QMutex m_sinkMutex;
+ QGstreamerVideoSink *m_sink = nullptr; // written only from qt thread. so only readers on
+ // worker threads need to acquire the lock
+ // --- only accessed from gstreamer thread
const QGstCaps m_surfaceCaps;
-
- QGstCaps m_startCaps;
- GstBuffer *m_renderBuffer = nullptr;
-
- bool m_notified = false;
- bool m_stop = false;
- bool m_flush = false;
+ QVideoFrameFormat m_format;
+ GstVideoInfo m_videoInfo{};
+ QGstCaps::MemoryFormat m_memoryFormat = QGstCaps::CpuMemory;
bool m_frameMirrored = false;
QtVideo::Rotation m_frameRotationAngle = QtVideo::Rotation::None;
- // --- only accessed from one thread
- QVideoFrameFormat m_format;
- GstVideoInfo m_videoInfo{};
- bool m_flushed = true;
- QGstCaps::MemoryFormat memoryFormat = QGstCaps::CpuMemory;
+ // --- only accessed from qt thread
+ QVideoFrame m_currentVideoFrame;
+
+ struct RenderBufferState
+ {
+ QGstBufferHandle buffer;
+ QVideoFrameFormat format;
+ QGstCaps::MemoryFormat memoryFormat;
+ bool mirrored;
+ QtVideo::Rotation rotationAngle;
+
+ bool operator==(const RenderBufferState &rhs) const
+ {
+ return std::tie(buffer, format, memoryFormat, mirrored, rotationAngle)
+ == std::tie(rhs.buffer, rhs.format, rhs.memoryFormat, rhs.mirrored,
+ rhs.rotationAngle);
+ }
+ };
+ RenderBufferState m_currentState;
};
class QGstVideoRendererSink
@@ -105,8 +107,6 @@ private:
static void finalize(GObject *object);
- static void handleShowPrerollChange(GObject *o, GParamSpec *p, gpointer d);
-
static GstStateChangeReturn change_state(GstElement *element, GstStateChange transition);
static GstCaps *get_caps(GstBaseSink *sink, GstCaps *filter);
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera.cpp b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera.cpp
index 8d3cd6baf..c54e8b74b 100644
--- a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera.cpp
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera.cpp
@@ -5,6 +5,7 @@
#include <QtMultimedia/qcameradevice.h>
#include <QtMultimedia/qmediacapturesession.h>
+#include <QtMultimedia/private/qcameradevice_p.h>
#include <QtCore/qdebug.h>
#include <common/qgst_debug_p.h>
@@ -21,36 +22,35 @@ QT_BEGIN_NAMESPACE
QMaybe<QPlatformCamera *> QGstreamerCamera::create(QCamera *camera)
{
- QGstElement videotestsrc = QGstElement::createFromFactory("videotestsrc");
- if (!videotestsrc)
- return errorMessageCannotFindElement("videotestsrc");
+ static const auto error = qGstErrorMessageIfElementsNotAvailable(
+ "videotestsrc", "capsfilter", "videoconvert", "videoscale", "identity");
+ if (error)
+ return *error;
- QGstElement capsFilter = QGstElement::createFromFactory("capsfilter", "videoCapsFilter");
- if (!capsFilter)
- return errorMessageCannotFindElement("capsfilter");
-
- QGstElement videoconvert = QGstElement::createFromFactory("videoconvert", "videoConvert");
- if (!videoconvert)
- return errorMessageCannotFindElement("videoconvert");
-
- QGstElement videoscale = QGstElement::createFromFactory("videoscale", "videoScale");
- if (!videoscale)
- return errorMessageCannotFindElement("videoscale");
-
- return new QGstreamerCamera(videotestsrc, capsFilter, videoconvert, videoscale, camera);
+ return new QGstreamerCamera(camera);
}
-QGstreamerCamera::QGstreamerCamera(QGstElement videotestsrc, QGstElement capsFilter,
- QGstElement videoconvert, QGstElement videoscale,
- QCamera *camera)
- : QPlatformCamera(camera),
- gstCamera(std::move(videotestsrc)),
- gstCapsFilter(std::move(capsFilter)),
- gstVideoConvert(std::move(videoconvert)),
- gstVideoScale(std::move(videoscale))
+QGstreamerCamera::QGstreamerCamera(QCamera *camera)
+ : QGstreamerCameraBase(camera),
+ gstCameraBin{
+ QGstBin::create("camerabin"),
+ },
+ gstCamera{
+ QGstElement::createFromFactory("videotestsrc"),
+ },
+ gstCapsFilter{
+ QGstElement::createFromFactory("capsfilter", "videoCapsFilter"),
+ },
+ gstDecode{
+ QGstElement::createFromFactory("identity"),
+ },
+ gstVideoConvert{
+ QGstElement::createFromFactory("videoconvert", "videoConvert"),
+ },
+ gstVideoScale{
+ QGstElement::createFromFactory("videoscale", "videoScale"),
+ }
{
- gstDecode = QGstElement::createFromFactory("identity");
- gstCameraBin = QGstBin::create("camerabin");
gstCameraBin.add(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert, gstVideoScale);
qLinkGstElements(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert, gstVideoScale);
gstCameraBin.addGhostPad(gstVideoScale, "src");
@@ -80,6 +80,8 @@ void QGstreamerCamera::setActive(bool active)
void QGstreamerCamera::setCamera(const QCameraDevice &camera)
{
+ using namespace Qt::Literals;
+
if (m_cameraDevice == camera)
return;
@@ -90,12 +92,24 @@ void QGstreamerCamera::setCamera(const QCameraDevice &camera)
gstNewCamera = QGstElement::createFromFactory("videotestsrc");
} else {
auto *integration = static_cast<QGstreamerIntegration *>(QGstreamerIntegration::instance());
- auto *device = integration->videoDevice(camera.id());
+ GstDevice *device = integration->videoDevice(camera.id());
+
+ if (!device) {
+ updateError(QCamera::Error::CameraError,
+ u"Failed to create GstDevice for camera: "_s
+ + QString::fromUtf8(camera.id()));
+ return;
+ }
+
gstNewCamera = QGstElement::createFromDevice(device, "camerasrc");
- if (QGstStructure properties = gst_device_get_properties(device); !properties.isNull()) {
- if (properties.name() == "v4l2deviceprovider")
- m_v4l2DevicePath = QString::fromUtf8(properties["device.path"].toString());
- properties.free();
+ QUniqueGstStructureHandle properties{
+ gst_device_get_properties(device),
+ };
+
+ if (properties) {
+ QGstStructureView propertiesView{ properties };
+ if (propertiesView.name() == "v4l2deviceprovider")
+ m_v4l2DevicePath = QString::fromUtf8(propertiesView["device.path"].toString());
}
}
@@ -105,6 +119,8 @@ void QGstreamerCamera::setCamera(const QCameraDevice &camera)
f.pixelFormat() == QVideoFrameFormat::Format_Jpeg ? "jpegdec" : "identity");
QGstPipeline::modifyPipelineWhileNotRunning(gstCamera.getPipeline(), [&] {
+ gstCamera.setStateSync(GST_STATE_READY); // stop camera, as it may have active tasks
+
qUnlinkGstElements(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert);
gstCameraBin.stopAndRemoveElements(gstCamera, gstDecode);
@@ -137,7 +153,7 @@ bool QGstreamerCamera::setCameraFormat(const QCameraFormat &format)
f.pixelFormat() == QVideoFrameFormat::Format_Jpeg ? "jpegdec" : "identity");
QGstPipeline::modifyPipelineWhileNotRunning(gstCamera.getPipeline(), [&] {
- newGstDecode.syncStateWithParent();
+ gstCamera.setStateSync(GST_STATE_READY); // stop camera, as it may have active tasks
qUnlinkGstElements(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert);
gstCameraBin.stopAndRemoveElements(gstDecode);
@@ -703,8 +719,53 @@ int QGstreamerCamera::getV4L2Parameter(quint32 id) const
});
}
+QGstreamerCustomCamera::QGstreamerCustomCamera(QCamera *camera)
+ : QGstreamerCameraBase{
+ camera,
+ },
+ m_userProvidedGstElement{
+ false,
+ }
+{
+}
+
+QGstreamerCustomCamera::QGstreamerCustomCamera(QCamera *camera, QGstElement element)
+ : QGstreamerCameraBase{
+ camera,
+ },
+ gstCamera{
+ std::move(element),
+ },
+ m_userProvidedGstElement{
+ true,
+ }
+{
+}
+
+void QGstreamerCustomCamera::setCamera(const QCameraDevice &device)
+{
+ if (m_userProvidedGstElement)
+ return;
+
+ gstCamera = QGstBin::createFromPipelineDescription(device.id(), /*name=*/nullptr,
+ /* ghostUnlinkedPads=*/true);
+}
+
+bool QGstreamerCustomCamera::isActive() const
+{
+ return m_active;
+}
+
+void QGstreamerCustomCamera::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+
+ m_active = active;
+
+ emit activeChanged(active);
+}
+
#endif
QT_END_NAMESPACE
-
-#include "moc_qgstreamercamera_p.cpp"
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera_p.h b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera_p.h
index 74f12f918..f43c01f34 100644
--- a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera_p.h
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera_p.h
@@ -24,9 +24,16 @@
QT_BEGIN_NAMESPACE
-class QGstreamerCamera : public QPlatformCamera
+class QGstreamerCameraBase : public QPlatformCamera
+{
+public:
+ using QPlatformCamera::QPlatformCamera;
+
+ virtual QGstElement gstElement() const = 0;
+};
+
+class QGstreamerCamera : public QGstreamerCameraBase
{
- Q_OBJECT
public:
static QMaybe<QPlatformCamera *> create(QCamera *camera);
@@ -38,7 +45,7 @@ public:
void setCamera(const QCameraDevice &camera) override;
bool setCameraFormat(const QCameraFormat &format) override;
- QGstElement gstElement() const { return gstCameraBin; }
+ QGstElement gstElement() const override { return gstCameraBin; }
#if QT_CONFIG(gstreamer_photography)
GstPhotography *photography() const;
#endif
@@ -63,8 +70,7 @@ public:
void setColorTemperature(int temperature) override;
private:
- QGstreamerCamera(QGstElement videotestsrc, QGstElement capsFilter, QGstElement videoconvert,
- QGstElement videoscale, QCamera *camera);
+ QGstreamerCamera(QCamera *camera);
void updateCameraProperties();
@@ -118,12 +124,29 @@ private:
QGstElement gstDecode;
QGstElement gstVideoConvert;
QGstElement gstVideoScale;
- QGstPipeline gstPipeline;
bool m_active = false;
QString m_v4l2DevicePath;
};
+class QGstreamerCustomCamera : public QGstreamerCameraBase
+{
+public:
+ explicit QGstreamerCustomCamera(QCamera *);
+ explicit QGstreamerCustomCamera(QCamera *, QGstElement element);
+
+ QGstElement gstElement() const override { return gstCamera; }
+ void setCamera(const QCameraDevice &) override;
+
+ bool isActive() const override;
+ void setActive(bool) override;
+
+private:
+ QGstElement gstCamera;
+ bool m_active{};
+ const bool m_userProvidedGstElement;
+};
+
QT_END_NAMESPACE
#endif // QGSTREAMERCAMERACONTROL_H
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture.cpp b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture.cpp
index 3eee3c800..9c21dc083 100644
--- a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture.cpp
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture.cpp
@@ -3,13 +3,15 @@
#include "qgstreamerimagecapture_p.h"
-#include <QtMultimedia/private/qplatformcamera_p.h>
-#include <QtMultimedia/private/qplatformimagecapture_p.h>
#include <QtMultimedia/qvideoframeformat.h>
#include <QtMultimedia/private/qmediastoragelocation_p.h>
+#include <QtMultimedia/private/qplatformcamera_p.h>
+#include <QtMultimedia/private/qplatformimagecapture_p.h>
+#include <QtMultimedia/private/qvideoframe_p.h>
#include <QtCore/qdebug.h>
#include <QtCore/qdir.h>
#include <QtCore/qstandardpaths.h>
+#include <QtCore/qcoreapplication.h>
#include <QtCore/qloggingcategory.h>
#include <common/qgstreamermetadata_p.h>
@@ -20,37 +22,96 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcImageCaptureGst, "qt.multimedia.imageCapture")
+namespace {
+Q_LOGGING_CATEGORY(qLcImageCaptureGst, "qt.multimedia.imageCapture")
-QMaybe<QPlatformImageCapture *> QGstreamerImageCapture::create(QImageCapture *parent)
+struct ThreadPoolSingleton
{
- QGstElement videoconvert =
- QGstElement::createFromFactory("videoconvert", "imageCaptureConvert");
- if (!videoconvert)
- return errorMessageCannotFindElement("videoconvert");
+ QObject m_context;
+ QMutex m_poolMutex;
+ QThreadPool *m_instance{};
+ bool m_appUnderDestruction = false;
+
+ QThreadPool *get(const QMutexLocker<QMutex> &)
+ {
+ if (m_instance)
+ return m_instance;
+ if (m_appUnderDestruction || !qApp)
+ return nullptr;
+
+ using namespace std::chrono;
+
+ m_instance = new QThreadPool(qApp);
+ m_instance->setMaxThreadCount(1); // 1 thread;
+ static constexpr auto expiryTimeout = minutes(5);
+ m_instance->setExpiryTimeout(round<milliseconds>(expiryTimeout).count());
+
+ QObject::connect(qApp, &QCoreApplication::aboutToQuit, &m_context, [&] {
+ // we need to make sure that thread-local QRhi is destroyed before the application to
+ // prevent QTBUG-124189
+ QMutexLocker guard(&m_poolMutex);
+ delete m_instance;
+ m_instance = {};
+ m_appUnderDestruction = true;
+ });
+
+ QObject::connect(qApp, &QCoreApplication::destroyed, &m_context, [&] {
+ m_appUnderDestruction = false;
+ });
+ return m_instance;
+ }
+
+ template <typename Functor>
+ QFuture<void> run(Functor &&f)
+ {
+ QMutexLocker guard(&m_poolMutex);
+ QThreadPool *pool = get(guard);
+ if (!pool)
+ return QFuture<void>{};
+
+ return QtConcurrent::run(pool, std::forward<Functor>(f));
+ }
+};
+
+ThreadPoolSingleton s_threadPoolSingleton;
- QGstElement jpegenc = QGstElement::createFromFactory("jpegenc", "jpegEncoder");
- if (!jpegenc)
- return errorMessageCannotFindElement("jpegenc");
+}; // namespace
- QGstElement jifmux = QGstElement::createFromFactory("jifmux", "jpegMuxer");
- if (!jifmux)
- return errorMessageCannotFindElement("jifmux");
+QMaybe<QPlatformImageCapture *> QGstreamerImageCapture::create(QImageCapture *parent)
+{
+ static const auto error = qGstErrorMessageIfElementsNotAvailable(
+ "queue", "capsfilter", "videoconvert", "jpegenc", "jifmux", "fakesink");
+ if (error)
+ return *error;
- return new QGstreamerImageCapture(videoconvert, jpegenc, jifmux, parent);
+ return new QGstreamerImageCapture(parent);
}
-QGstreamerImageCapture::QGstreamerImageCapture(QGstElement videoconvert, QGstElement jpegenc,
- QGstElement jifmux, QImageCapture *parent)
+QGstreamerImageCapture::QGstreamerImageCapture(QImageCapture *parent)
: QPlatformImageCapture(parent),
QGstreamerBufferProbe(ProbeBuffers),
- videoConvert(std::move(videoconvert)),
- encoder(std::move(jpegenc)),
- muxer(std::move(jifmux))
+ bin{
+ QGstBin::create("imageCaptureBin"),
+ },
+ queue{
+ QGstElement::createFromFactory("queue", "imageCaptureQueue"),
+ },
+ filter{
+ QGstElement::createFromFactory("capsfilter", "filter"),
+ },
+ videoConvert{
+ QGstElement::createFromFactory("videoconvert", "imageCaptureConvert"),
+ },
+ encoder{
+ QGstElement::createFromFactory("jpegenc", "jpegEncoder"),
+ },
+ muxer{
+ QGstElement::createFromFactory("jifmux", "jpegMuxer"),
+ },
+ sink{
+ QGstElement::createFromFactory("fakesink", "imageCaptureSink"),
+ }
{
- bin = QGstBin::create("imageCaptureBin");
-
- queue = QGstElement::createFromFactory("queue", "imageCaptureQueue");
// configures the queue to be fast, lightweight and non blocking
queue.set("leaky", 2 /*downstream*/);
queue.set("silent", true);
@@ -58,8 +119,6 @@ QGstreamerImageCapture::QGstreamerImageCapture(QGstElement videoconvert, QGstEle
queue.set("max-size-bytes", uint(0));
queue.set("max-size-time", quint64(0));
- sink = QGstElement::createFromFactory("fakesink", "imageCaptureSink");
- filter = QGstElement::createFromFactory("capsfilter", "filter");
// imageCaptureSink do not wait for a preroll buffer when going READY -> PAUSED
// as no buffer will arrive until capture() is called
sink.set("async", false);
@@ -71,22 +130,34 @@ QGstreamerImageCapture::QGstreamerImageCapture(QGstElement videoconvert, QGstEle
addProbeToPad(queue.staticPad("src").pad(), false);
sink.set("signal-handoffs", true);
- g_signal_connect(sink.object(), "handoff", G_CALLBACK(&QGstreamerImageCapture::saveImageFilter), this);
+ m_handoffConnection = sink.connect("handoff", G_CALLBACK(&saveImageFilter), this);
}
QGstreamerImageCapture::~QGstreamerImageCapture()
{
bin.setStateSync(GST_STATE_NULL);
+
+ // wait for pending futures
+ auto pendingFutures = [&] {
+ QMutexLocker guard(&m_mutex);
+ return std::move(m_pendingFutures);
+ }();
+
+ for (QFuture<void> &pendingImage : pendingFutures)
+ pendingImage.waitForFinished();
}
bool QGstreamerImageCapture::isReadyForCapture() const
{
+ QMutexLocker guard(&m_mutex);
return m_session && !passImage && cameraActive;
}
int QGstreamerImageCapture::capture(const QString &fileName)
{
- QString path = QMediaStorageLocation::generateFileName(fileName, QStandardPaths::PicturesLocation, QLatin1String("jpg"));
+ using namespace Qt::Literals;
+ QString path = QMediaStorageLocation::generateFileName(
+ fileName, QStandardPaths::PicturesLocation, u"jpg"_s);
return doCapture(path);
}
@@ -98,44 +169,41 @@ int QGstreamerImageCapture::captureToBuffer()
int QGstreamerImageCapture::doCapture(const QString &fileName)
{
qCDebug(qLcImageCaptureGst) << "do capture";
- if (!m_session) {
- //emit error in the next event loop,
- //so application can associate it with returned request id.
- QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
- Q_ARG(int, -1),
- Q_ARG(int, QImageCapture::ResourceError),
- Q_ARG(QString, QPlatformImageCapture::msgImageCaptureNotSet()));
-
- qCDebug(qLcImageCaptureGst) << "error 1";
- return -1;
- }
- if (!m_session->camera()) {
- //emit error in the next event loop,
- //so application can associate it with returned request id.
- QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
- Q_ARG(int, -1),
- Q_ARG(int, QImageCapture::ResourceError),
- Q_ARG(QString,tr("No camera available.")));
-
- qCDebug(qLcImageCaptureGst) << "error 2";
- return -1;
- }
- if (passImage) {
- //emit error in the next event loop,
- //so application can associate it with returned request id.
- QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
- Q_ARG(int, -1),
- Q_ARG(int, QImageCapture::NotReadyError),
- Q_ARG(QString, QPlatformImageCapture::msgCameraNotReady()));
-
- qCDebug(qLcImageCaptureGst) << "error 3";
- return -1;
- }
- m_lastId++;
- pendingImages.enqueue({m_lastId, fileName, QMediaMetaData{}});
- // let one image pass the pipeline
- passImage = true;
+ {
+ QMutexLocker guard(&m_mutex);
+ if (!m_session) {
+ invokeDeferred([this] {
+ emit error(-1, QImageCapture::ResourceError,
+ QPlatformImageCapture::msgImageCaptureNotSet());
+ });
+
+ qCDebug(qLcImageCaptureGst) << "error 1";
+ return -1;
+ }
+ if (!m_session->camera()) {
+ invokeDeferred([this] {
+ emit error(-1, QImageCapture::ResourceError, tr("No camera available."));
+ });
+
+ qCDebug(qLcImageCaptureGst) << "error 2";
+ return -1;
+ }
+ if (passImage) {
+ invokeDeferred([this] {
+ emit error(-1, QImageCapture::NotReadyError,
+ QPlatformImageCapture::msgCameraNotReady());
+ });
+
+ qCDebug(qLcImageCaptureGst) << "error 3";
+ return -1;
+ }
+ m_lastId++;
+
+ pendingImages.enqueue({ m_lastId, fileName, QMediaMetaData{} });
+ // let one image pass the pipeline
+ passImage = true;
+ }
emit readyForCaptureChanged(false);
return m_lastId;
@@ -143,32 +211,48 @@ int QGstreamerImageCapture::doCapture(const QString &fileName)
void QGstreamerImageCapture::setResolution(const QSize &resolution)
{
- auto padCaps = QGstCaps(gst_pad_get_current_caps(bin.staticPad("sink").pad()), QGstCaps::HasRef);
+ QGstCaps padCaps = bin.staticPad("sink").currentCaps();
if (padCaps.isNull()) {
qDebug() << "Camera not ready";
return;
}
- auto caps = QGstCaps(gst_caps_copy(padCaps.caps()), QGstCaps::HasRef);
- if (caps.isNull()) {
+ QGstCaps caps = padCaps.copy();
+ if (caps.isNull())
return;
- }
+
gst_caps_set_simple(caps.caps(), "width", G_TYPE_INT, resolution.width(), "height", G_TYPE_INT,
resolution.height(), nullptr);
filter.set("caps", caps);
}
+// HACK: gcc-10 and earlier reject [=,this] when building with c++17
+#if __cplusplus >= 202002L
+# define EQ_THIS_CAPTURE =, this
+#else
+# define EQ_THIS_CAPTURE =
+#endif
+
bool QGstreamerImageCapture::probeBuffer(GstBuffer *buffer)
{
+ QMutexLocker guard(&m_mutex);
+
if (!passImage)
return false;
qCDebug(qLcImageCaptureGst) << "probe buffer";
- passImage = false;
+ QGstBufferHandle bufferHandle{
+ buffer,
+ QGstBufferHandle::NeedsRef,
+ };
- emit readyForCaptureChanged(isReadyForCapture());
+ passImage = false;
- auto caps = QGstCaps(gst_pad_get_current_caps(bin.staticPad("sink").pad()), QGstCaps::HasRef);
+ bool ready = isReadyForCapture();
+ invokeDeferred([this, ready] {
+ emit readyForCaptureChanged(ready);
+ });
+ QGstCaps caps = bin.staticPad("sink").currentCaps();
auto memoryFormat = caps.memoryFormat();
GstVideoInfo previewInfo;
@@ -177,40 +261,60 @@ bool QGstreamerImageCapture::probeBuffer(GstBuffer *buffer)
if (optionalFormatAndVideoInfo)
std::tie(fmt, previewInfo) = std::move(*optionalFormatAndVideoInfo);
- auto *sink = m_session->gstreamerVideoSink();
- auto *gstBuffer = new QGstVideoBuffer(buffer, previewInfo, sink, fmt, memoryFormat);
- QVideoFrame frame(gstBuffer, fmt);
- QImage img = frame.toImage();
- if (img.isNull()) {
- qDebug() << "received a null image";
- return true;
- }
-
- auto &imageData = pendingImages.head();
+ int futureId = futureIDAllocator += 1;
- emit imageExposed(imageData.id);
+ // ensure QVideoFrame::toImage is executed on a worker thread that is joined before the
+ // qApplication is destroyed
+ QFuture<void> future = s_threadPoolSingleton.run([EQ_THIS_CAPTURE]() mutable {
+ QMutexLocker guard(&m_mutex);
+ auto scopeExit = qScopeGuard([&] {
+ m_pendingFutures.remove(futureId);
+ });
- qCDebug(qLcImageCaptureGst) << "Image available!";
- emit imageAvailable(imageData.id, frame);
+ if (!m_session) {
+ qDebug() << "QGstreamerImageCapture::probeBuffer: no session";
+ return;
+ }
- emit imageCaptured(imageData.id, img);
+ auto *sink = m_session->gstreamerVideoSink();
+ auto gstBuffer = std::make_unique<QGstVideoBuffer>(std::move(bufferHandle), previewInfo,
+ sink, fmt, memoryFormat);
- QMediaMetaData metaData = this->metaData();
- metaData.insert(QMediaMetaData::Date, QDateTime::currentDateTime());
- metaData.insert(QMediaMetaData::Resolution, frame.size());
- imageData.metaData = metaData;
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(gstBuffer), fmt);
+ QImage img = frame.toImage();
+ if (img.isNull()) {
+ qDebug() << "received a null image";
+ return;
+ }
- // ensure taginject injects this metaData
- const auto &md = static_cast<const QGstreamerMetaData &>(metaData);
- md.setMetaData(muxer.element());
+ QMediaMetaData imageMetaData = metaData();
+ imageMetaData.insert(QMediaMetaData::Resolution, frame.size());
+ pendingImages.head().metaData = std::move(imageMetaData);
+ PendingImage pendingImage = pendingImages.head();
+
+ invokeDeferred([this, pendingImage = std::move(pendingImage), frame = std::move(frame),
+ img = std::move(img)]() mutable {
+ emit imageExposed(pendingImage.id);
+ qCDebug(qLcImageCaptureGst) << "Image available!";
+ emit imageAvailable(pendingImage.id, frame);
+ emit imageCaptured(pendingImage.id, img);
+ emit imageMetadataAvailable(pendingImage.id, pendingImage.metaData);
+ });
+ });
+
+ if (!future.isValid()) // during qApplication shutdown the threadpool becomes unusable
+ return true;
- emit imageMetadataAvailable(imageData.id, metaData);
+ m_pendingFutures.insert(futureId, future);
return true;
}
+#undef EQ_THIS_CAPTURE
+
void QGstreamerImageCapture::setCaptureSession(QPlatformMediaCaptureSession *session)
{
+ QMutexLocker guard(&m_mutex);
QGstreamerMediaCapture *captureSession = static_cast<QGstreamerMediaCapture *>(session);
if (m_session == captureSession)
return;
@@ -231,10 +335,22 @@ void QGstreamerImageCapture::setCaptureSession(QPlatformMediaCaptureSession *ses
return;
}
- connect(m_session, &QPlatformMediaCaptureSession::cameraChanged, this, &QGstreamerImageCapture::onCameraChanged);
+ connect(m_session, &QPlatformMediaCaptureSession::cameraChanged, this,
+ &QGstreamerImageCapture::onCameraChanged);
onCameraChanged();
}
+void QGstreamerImageCapture::setMetaData(const QMediaMetaData &m)
+{
+ {
+ QMutexLocker guard(&m_mutex);
+ QPlatformImageCapture::setMetaData(m);
+ }
+
+ // ensure taginject injects this metaData
+ applyMetaDataToTagSetter(m, muxer);
+}
+
void QGstreamerImageCapture::cameraActiveChanged(bool active)
{
qCDebug(qLcImageCaptureGst) << "cameraActiveChanged" << cameraActive << active;
@@ -247,55 +363,70 @@ void QGstreamerImageCapture::cameraActiveChanged(bool active)
void QGstreamerImageCapture::onCameraChanged()
{
+ QMutexLocker guard(&m_mutex);
if (m_session->camera()) {
cameraActiveChanged(m_session->camera()->isActive());
- connect(m_session->camera(), &QPlatformCamera::activeChanged, this, &QGstreamerImageCapture::cameraActiveChanged);
+ connect(m_session->camera(), &QPlatformCamera::activeChanged, this,
+ &QGstreamerImageCapture::cameraActiveChanged);
} else {
cameraActiveChanged(false);
}
}
-gboolean QGstreamerImageCapture::saveImageFilter(GstElement *element,
- GstBuffer *buffer,
- GstPad *pad,
- void *appdata)
+gboolean QGstreamerImageCapture::saveImageFilter(GstElement *, GstBuffer *buffer, GstPad *,
+ QGstreamerImageCapture *capture)
{
- Q_UNUSED(element);
- Q_UNUSED(pad);
- QGstreamerImageCapture *capture = static_cast<QGstreamerImageCapture *>(appdata);
+ capture->saveBufferToImage(buffer);
+ return true;
+}
- capture->passImage = false;
+void QGstreamerImageCapture::saveBufferToImage(GstBuffer *buffer)
+{
+ QMutexLocker guard(&m_mutex);
+ passImage = false;
- if (capture->pendingImages.isEmpty()) {
- return true;
- }
+ if (pendingImages.isEmpty())
+ return;
- auto imageData = capture->pendingImages.dequeue();
- if (imageData.filename.isEmpty()) {
- return true;
- }
+ PendingImage imageData = pendingImages.dequeue();
+ if (imageData.filename.isEmpty())
+ return;
- qCDebug(qLcImageCaptureGst) << "saving image as" << imageData.filename;
+ int id = futureIDAllocator++;
+ QGstBufferHandle bufferHandle{
+ buffer,
+ QGstBufferHandle::NeedsRef,
+ };
+
+ QFuture<void> saveImageFuture = QtConcurrent::run([this, imageData, bufferHandle,
+ id]() mutable {
+ auto cleanup = qScopeGuard([&] {
+ QMutexLocker guard(&m_mutex);
+ m_pendingFutures.remove(id);
+ });
+
+ qCDebug(qLcImageCaptureGst) << "saving image as" << imageData.filename;
+
+ QFile f(imageData.filename);
+ if (!f.open(QFile::WriteOnly)) {
+ qCDebug(qLcImageCaptureGst) << " could not open image file for writing";
+ return;
+ }
- QFile f(imageData.filename);
- if (f.open(QFile::WriteOnly)) {
GstMapInfo info;
+ GstBuffer *buffer = bufferHandle.get();
if (gst_buffer_map(buffer, &info, GST_MAP_READ)) {
f.write(reinterpret_cast<const char *>(info.data), info.size);
gst_buffer_unmap(buffer, &info);
}
f.close();
- static QMetaMethod savedSignal = QMetaMethod::fromSignal(&QGstreamerImageCapture::imageSaved);
- savedSignal.invoke(capture,
- Qt::QueuedConnection,
- Q_ARG(int, imageData.id),
- Q_ARG(QString, imageData.filename));
- } else {
- qCDebug(qLcImageCaptureGst) << " could not open image file for writing";
- }
+ QMetaObject::invokeMethod(this, [this, imageData = std::move(imageData)]() mutable {
+ emit imageSaved(imageData.id, imageData.filename);
+ });
+ });
- return TRUE;
+ m_pendingFutures.insert(id, saveImageFuture);
}
QImageEncoderSettings QGstreamerImageCapture::imageSettings() const
@@ -307,9 +438,9 @@ void QGstreamerImageCapture::setImageSettings(const QImageEncoderSettings &setti
{
if (m_settings != settings) {
QSize resolution = settings.resolution();
- if (m_settings.resolution() != resolution && !resolution.isEmpty()) {
+ if (m_settings.resolution() != resolution && !resolution.isEmpty())
setResolution(resolution);
- }
+
m_settings = settings;
}
}
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture_p.h b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture_p.h
index 3d8636cbe..04a7c00b4 100644
--- a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture_p.h
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture_p.h
@@ -15,10 +15,12 @@
// We mean it.
//
-#include <private/qplatformimagecapture_p.h>
-#include <private/qmultimediautils_p.h>
+#include <QtMultimedia/private/qplatformimagecapture_p.h>
+#include <QtMultimedia/private/qmultimediautils_p.h>
-#include <qqueue.h>
+#include <QtCore/qmutex.h>
+#include <QtCore/qqueue.h>
+#include <QtConcurrent/QtConcurrentRun>
#include <common/qgst_p.h>
#include <common/qgstreamerbufferprobe_p.h>
@@ -28,9 +30,9 @@
QT_BEGIN_NAMESPACE
class QGstreamerImageCapture : public QPlatformImageCapture, private QGstreamerBufferProbe
-
{
Q_OBJECT
+
public:
static QMaybe<QPlatformImageCapture *> create(QImageCapture *parent);
virtual ~QGstreamerImageCapture();
@@ -48,18 +50,24 @@ public:
QGstElement gstElement() const { return bin; }
+ void setMetaData(const QMediaMetaData &m) override;
+
public Q_SLOTS:
void cameraActiveChanged(bool active);
void onCameraChanged();
private:
- QGstreamerImageCapture(QGstElement videoconvert, QGstElement jpegenc, QGstElement jifmux,
- QImageCapture *parent);
+ QGstreamerImageCapture(QImageCapture *parent);
void setResolution(const QSize &resolution);
int doCapture(const QString &fileName);
- static gboolean saveImageFilter(GstElement *element, GstBuffer *buffer, GstPad *pad, void *appdata);
+ static gboolean saveImageFilter(GstElement *element, GstBuffer *buffer, GstPad *pad,
+ QGstreamerImageCapture *capture);
+
+ void saveBufferToImage(GstBuffer *buffer);
+ mutable QRecursiveMutex
+ m_mutex; // guard all elements accessed from probeBuffer/saveBufferToImage
QGstreamerMediaCapture *m_session = nullptr;
int m_lastId = 0;
QImageEncoderSettings m_settings;
@@ -83,6 +91,17 @@ private:
bool passImage = false;
bool cameraActive = false;
+
+ QGObjectHandlerScopedConnection m_handoffConnection;
+
+ QMap<int, QFuture<void>> m_pendingFutures;
+ int futureIDAllocator = 0;
+
+ template <typename Functor>
+ void invokeDeferred(Functor &&fn)
+ {
+ QMetaObject::invokeMethod(this, std::forward<decltype(fn)>(fn), Qt::QueuedConnection);
+ };
};
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture.cpp b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture.cpp
index 839187a9d..7ecbb07d7 100644
--- a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture.cpp
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture.cpp
@@ -24,32 +24,25 @@ static void linkTeeToPad(QGstElement tee, QGstPad sink)
source.link(sink);
}
-static void unlinkTeeFromPad(QGstElement tee, QGstPad sink)
-{
- if (tee.isNull() || sink.isNull())
- return;
-
- auto source = sink.peer();
- source.unlink(sink);
-
- tee.releaseRequestPad(source);
-}
-
QMaybe<QPlatformMediaCaptureSession *> QGstreamerMediaCapture::create()
{
auto videoOutput = QGstreamerVideoOutput::create();
if (!videoOutput)
return videoOutput.error();
+ static const auto error = qGstErrorMessageIfElementsNotAvailable("tee", "capsfilter");
+ if (error)
+ return *error;
+
return new QGstreamerMediaCapture(videoOutput.value());
}
QGstreamerMediaCapture::QGstreamerMediaCapture(QGstreamerVideoOutput *videoOutput)
- : gstPipeline(QGstPipeline::create("mediaCapturePipeline")), gstVideoOutput(videoOutput)
+ : capturePipeline(QGstPipeline::create("mediaCapturePipeline")), gstVideoOutput(videoOutput)
{
gstVideoOutput->setParent(this);
gstVideoOutput->setIsPreview();
- gstVideoOutput->setPipeline(gstPipeline);
+ gstVideoOutput->setPipeline(capturePipeline);
// Use system clock to drive all elements in the pipeline. Otherwise,
// the clock is sourced from the elements (e.g. from an audio source).
@@ -59,14 +52,14 @@ QGstreamerMediaCapture::QGstreamerMediaCapture(QGstreamerVideoOutput *videoOutpu
QGstClockHandle systemClock{
gst_system_clock_obtain(),
};
- gst_pipeline_use_clock(gstPipeline.pipeline(), systemClock.get());
+ gst_pipeline_use_clock(capturePipeline.pipeline(), systemClock.get());
// This is the recording pipeline with only live sources, thus the pipeline
// will be always in the playing state.
- gstPipeline.setState(GST_STATE_PLAYING);
- gstPipeline.setInStoppedState(false);
+ capturePipeline.setState(GST_STATE_PLAYING);
+ capturePipeline.setInStoppedState(false);
- gstPipeline.dumpGraph("initial");
+ capturePipeline.dumpGraph("initial");
}
QGstreamerMediaCapture::~QGstreamerMediaCapture()
@@ -74,7 +67,7 @@ QGstreamerMediaCapture::~QGstreamerMediaCapture()
setMediaRecorder(nullptr);
setImageCapture(nullptr);
setCamera(nullptr);
- gstPipeline.setStateSync(GST_STATE_NULL);
+ capturePipeline.setStateSync(GST_STATE_NULL);
}
QPlatformCamera *QGstreamerMediaCapture::camera()
@@ -84,7 +77,7 @@ QPlatformCamera *QGstreamerMediaCapture::camera()
void QGstreamerMediaCapture::setCamera(QPlatformCamera *platformCamera)
{
- QGstreamerCamera *camera = static_cast<QGstreamerCamera *>(platformCamera);
+ auto *camera = static_cast<QGstreamerCameraBase *>(platformCamera);
if (gstCamera == camera)
return;
@@ -97,7 +90,7 @@ void QGstreamerMediaCapture::setCamera(QPlatformCamera *platformCamera)
gstCamera = camera;
if (gstCamera) {
- gstCameraActiveConnection = QObject::connect(camera, &QGstreamerCamera::activeChanged, this,
+ gstCameraActiveConnection = QObject::connect(camera, &QPlatformCamera::activeChanged, this,
&QGstreamerMediaCapture::setCameraActive);
if (gstCamera->isActive())
setCameraActive(true);
@@ -108,13 +101,13 @@ void QGstreamerMediaCapture::setCamera(QPlatformCamera *platformCamera)
void QGstreamerMediaCapture::setCameraActive(bool activate)
{
- gstPipeline.modifyPipelineWhileNotRunning([&] {
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
if (activate) {
QGstElement cameraElement = gstCamera->gstElement();
gstVideoTee = QGstElement::createFromFactory("tee", "videotee");
gstVideoTee.set("allow-not-linked", true);
- gstPipeline.add(gstVideoOutput->gstElement(), cameraElement, gstVideoTee);
+ capturePipeline.add(gstVideoOutput->gstElement(), cameraElement, gstVideoTee);
linkTeeToPad(gstVideoTee, encoderVideoSink);
linkTeeToPad(gstVideoTee, gstVideoOutput->gstElement().staticPad("sink"));
@@ -122,21 +115,24 @@ void QGstreamerMediaCapture::setCameraActive(bool activate)
qLinkGstElements(cameraElement, gstVideoTee);
- gstPipeline.syncChildrenState();
+ capturePipeline.syncChildrenState();
} else {
- unlinkTeeFromPad(gstVideoTee, encoderVideoSink);
- unlinkTeeFromPad(gstVideoTee, imageCaptureSink);
+ if (encoderVideoCapsFilter)
+ qUnlinkGstElements(gstVideoTee, encoderVideoCapsFilter);
+ if (m_imageCapture)
+ qUnlinkGstElements(gstVideoTee, m_imageCapture->gstElement());
auto camera = gstCamera->gstElement();
- gstPipeline.stopAndRemoveElements(camera, gstVideoTee, gstVideoOutput->gstElement());
+ capturePipeline.stopAndRemoveElements(camera, gstVideoTee,
+ gstVideoOutput->gstElement());
gstVideoTee = {};
gstCamera->setCaptureSession(nullptr);
}
});
- gstPipeline.dumpGraph("camera");
+ capturePipeline.dumpGraph("camera");
}
QPlatformImageCapture *QGstreamerMediaCapture::imageCapture()
@@ -150,10 +146,10 @@ void QGstreamerMediaCapture::setImageCapture(QPlatformImageCapture *imageCapture
if (m_imageCapture == control)
return;
- gstPipeline.modifyPipelineWhileNotRunning([&] {
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
if (m_imageCapture) {
- unlinkTeeFromPad(gstVideoTee, imageCaptureSink);
- gstPipeline.stopAndRemoveElements(m_imageCapture->gstElement());
+ qUnlinkGstElements(gstVideoTee, m_imageCapture->gstElement());
+ capturePipeline.stopAndRemoveElements(m_imageCapture->gstElement());
imageCaptureSink = {};
m_imageCapture->setCaptureSession(nullptr);
}
@@ -161,14 +157,14 @@ void QGstreamerMediaCapture::setImageCapture(QPlatformImageCapture *imageCapture
m_imageCapture = control;
if (m_imageCapture) {
imageCaptureSink = m_imageCapture->gstElement().staticPad("sink");
- gstPipeline.add(m_imageCapture->gstElement());
+ capturePipeline.add(m_imageCapture->gstElement());
m_imageCapture->gstElement().syncStateWithParent();
linkTeeToPad(gstVideoTee, imageCaptureSink);
m_imageCapture->setCaptureSession(this);
}
});
- gstPipeline.dumpGraph("imageCapture");
+ capturePipeline.dumpGraph("imageCapture");
emit imageCaptureChanged();
}
@@ -186,7 +182,7 @@ void QGstreamerMediaCapture::setMediaRecorder(QPlatformMediaRecorder *recorder)
m_mediaEncoder->setCaptureSession(this);
emit encoderChanged();
- gstPipeline.dumpGraph("encoder");
+ capturePipeline.dumpGraph("encoder");
}
QPlatformMediaRecorder *QGstreamerMediaCapture::mediaRecorder()
@@ -196,16 +192,16 @@ QPlatformMediaRecorder *QGstreamerMediaCapture::mediaRecorder()
void QGstreamerMediaCapture::linkEncoder(QGstPad audioSink, QGstPad videoSink)
{
- gstPipeline.modifyPipelineWhileNotRunning([&] {
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
if (!gstVideoTee.isNull() && !videoSink.isNull()) {
- auto caps = gst_pad_get_current_caps(gstVideoTee.sink().pad());
+ QGstCaps caps = gstVideoTee.sink().currentCaps();
encoderVideoCapsFilter =
QGstElement::createFromFactory("capsfilter", "encoderVideoCapsFilter");
Q_ASSERT(encoderVideoCapsFilter);
- encoderVideoCapsFilter.set("caps", QGstCaps(caps, QGstCaps::HasRef));
+ encoderVideoCapsFilter.set("caps", caps);
- gstPipeline.add(encoderVideoCapsFilter);
+ capturePipeline.add(encoderVideoCapsFilter);
encoderVideoCapsFilter.src().link(videoSink);
linkTeeToPad(gstVideoTee, encoderVideoCapsFilter.sink());
@@ -213,14 +209,14 @@ void QGstreamerMediaCapture::linkEncoder(QGstPad audioSink, QGstPad videoSink)
}
if (!gstAudioTee.isNull() && !audioSink.isNull()) {
- auto caps = gst_pad_get_current_caps(gstAudioTee.sink().pad());
+ QGstCaps caps = gstAudioTee.sink().currentCaps();
encoderAudioCapsFilter =
QGstElement::createFromFactory("capsfilter", "encoderAudioCapsFilter");
Q_ASSERT(encoderAudioCapsFilter);
- encoderAudioCapsFilter.set("caps", QGstCaps(caps, QGstCaps::HasRef));
+ encoderAudioCapsFilter.set("caps", caps);
- gstPipeline.add(encoderAudioCapsFilter);
+ capturePipeline.add(encoderAudioCapsFilter);
encoderAudioCapsFilter.src().link(audioSink);
linkTeeToPad(gstAudioTee, encoderAudioCapsFilter.sink());
@@ -231,18 +227,16 @@ void QGstreamerMediaCapture::linkEncoder(QGstPad audioSink, QGstPad videoSink)
void QGstreamerMediaCapture::unlinkEncoder()
{
- gstPipeline.modifyPipelineWhileNotRunning([&] {
- if (!encoderVideoCapsFilter.isNull()) {
- encoderVideoCapsFilter.src().unlinkPeer();
- unlinkTeeFromPad(gstVideoTee, encoderVideoCapsFilter.sink());
- gstPipeline.stopAndRemoveElements(encoderVideoCapsFilter);
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
+ if (encoderVideoCapsFilter) {
+ qUnlinkGstElements(gstVideoTee, encoderVideoCapsFilter);
+ capturePipeline.stopAndRemoveElements(encoderVideoCapsFilter);
encoderVideoCapsFilter = {};
}
- if (!encoderAudioCapsFilter.isNull()) {
- encoderAudioCapsFilter.src().unlinkPeer();
- unlinkTeeFromPad(gstAudioTee, encoderAudioCapsFilter.sink());
- gstPipeline.stopAndRemoveElements(encoderAudioCapsFilter);
+ if (encoderAudioCapsFilter) {
+ qUnlinkGstElements(gstAudioTee, encoderAudioCapsFilter);
+ capturePipeline.stopAndRemoveElements(encoderAudioCapsFilter);
encoderAudioCapsFilter = {};
}
@@ -251,22 +245,27 @@ void QGstreamerMediaCapture::unlinkEncoder()
});
}
+const QGstPipeline &QGstreamerMediaCapture::pipeline() const
+{
+ return capturePipeline;
+}
+
void QGstreamerMediaCapture::setAudioInput(QPlatformAudioInput *input)
{
if (gstAudioInput == input)
return;
- gstPipeline.modifyPipelineWhileNotRunning([&] {
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
if (gstAudioInput) {
- unlinkTeeFromPad(gstAudioTee, encoderAudioSink);
+ if (encoderAudioCapsFilter)
+ qUnlinkGstElements(gstAudioTee, encoderAudioCapsFilter);
if (gstAudioOutput) {
- unlinkTeeFromPad(gstAudioTee, gstAudioOutput->gstElement().staticPad("sink"));
- gstPipeline.remove(gstAudioOutput->gstElement());
- gstAudioOutput->gstElement().setStateSync(GST_STATE_NULL);
+ qUnlinkGstElements(gstAudioTee, gstAudioOutput->gstElement());
+ capturePipeline.stopAndRemoveElements(gstAudioOutput->gstElement());
}
- gstPipeline.stopAndRemoveElements(gstAudioInput->gstElement(), gstAudioTee);
+ capturePipeline.stopAndRemoveElements(gstAudioInput->gstElement(), gstAudioTee);
gstAudioTee = {};
}
@@ -275,16 +274,16 @@ void QGstreamerMediaCapture::setAudioInput(QPlatformAudioInput *input)
Q_ASSERT(gstAudioTee.isNull());
gstAudioTee = QGstElement::createFromFactory("tee", "audiotee");
gstAudioTee.set("allow-not-linked", true);
- gstPipeline.add(gstAudioInput->gstElement(), gstAudioTee);
+ capturePipeline.add(gstAudioInput->gstElement(), gstAudioTee);
qLinkGstElements(gstAudioInput->gstElement(), gstAudioTee);
if (gstAudioOutput) {
- gstPipeline.add(gstAudioOutput->gstElement());
+ capturePipeline.add(gstAudioOutput->gstElement());
gstAudioOutput->gstElement().setState(GST_STATE_PLAYING);
linkTeeToPad(gstAudioTee, gstAudioOutput->gstElement().staticPad("sink"));
}
- gstPipeline.syncChildrenState();
+ capturePipeline.syncChildrenState();
linkTeeToPad(gstAudioTee, encoderAudioSink);
}
@@ -301,17 +300,17 @@ void QGstreamerMediaCapture::setAudioOutput(QPlatformAudioOutput *output)
if (gstAudioOutput == output)
return;
- gstPipeline.modifyPipelineWhileNotRunning([&] {
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
if (gstAudioOutput && gstAudioInput) {
// If audio input is set, the output is in the pipeline
- unlinkTeeFromPad(gstAudioTee, gstAudioOutput->gstElement().staticPad("sink"));
- gstPipeline.stopAndRemoveElements(gstAudioOutput->gstElement());
+ qUnlinkGstElements(gstAudioTee, gstAudioOutput->gstElement());
+ capturePipeline.stopAndRemoveElements(gstAudioOutput->gstElement());
}
gstAudioOutput = static_cast<QGstreamerAudioOutput *>(output);
if (gstAudioOutput && gstAudioInput) {
- gstPipeline.add(gstAudioOutput->gstElement());
- gstPipeline.syncChildrenState();
+ capturePipeline.add(gstAudioOutput->gstElement());
+ capturePipeline.syncChildrenState();
linkTeeToPad(gstAudioTee, gstAudioOutput->gstElement().staticPad("sink"));
}
});
@@ -322,7 +321,6 @@ QGstreamerVideoSink *QGstreamerMediaCapture::gstreamerVideoSink() const
return gstVideoOutput ? gstVideoOutput->gstreamerVideoSink() : nullptr;
}
-
QT_END_NAMESPACE
#include "moc_qgstreamermediacapture_p.cpp"
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture_p.h b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture_p.h
index 6e93e8564..c44e31f0e 100644
--- a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture_p.h
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture_p.h
@@ -25,7 +25,7 @@
QT_BEGIN_NAMESPACE
-class QGstreamerCamera;
+class QGstreamerCameraBase;
class QGstreamerImageCapture;
class QGstreamerMediaEncoder;
class QGstreamerAudioInput;
@@ -59,7 +59,7 @@ public:
void linkEncoder(QGstPad audioSink, QGstPad videoSink);
void unlinkEncoder();
- QGstPipeline pipeline() const { return gstPipeline; }
+ const QGstPipeline &pipeline() const;
QGstreamerVideoSink *gstreamerVideoSink() const;
@@ -70,10 +70,10 @@ private:
friend QGstreamerMediaEncoder;
// Gst elements
- QGstPipeline gstPipeline;
+ QGstPipeline capturePipeline;
QGstreamerAudioInput *gstAudioInput = nullptr;
- QGstreamerCamera *gstCamera = nullptr;
+ QGstreamerCameraBase *gstCamera = nullptr;
QMetaObject::Connection gstCameraActiveConnection;
QGstElement gstAudioTee;
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder.cpp b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder.cpp
index 218026ede..4ec10ca84 100644
--- a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder.cpp
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder.cpp
@@ -40,10 +40,10 @@ QGstreamerMediaEncoder::QGstreamerMediaEncoder(QMediaRecorder *parent)
QGstreamerMediaEncoder::~QGstreamerMediaEncoder()
{
- if (!gstPipeline.isNull()) {
+ if (!capturePipeline.isNull()) {
finalize();
- gstPipeline.removeMessageFilter(this);
- gstPipeline.setStateSync(GST_STATE_NULL);
+ capturePipeline.removeMessageFilter(this);
+ capturePipeline.setStateSync(GST_STATE_NULL);
}
}
@@ -54,7 +54,7 @@ bool QGstreamerMediaEncoder::isLocationWritable(const QUrl &) const
void QGstreamerMediaEncoder::handleSessionError(QMediaRecorder::Error code, const QString &description)
{
- error(code, description);
+ updateError(code, description);
stop();
}
@@ -63,15 +63,12 @@ bool QGstreamerMediaEncoder::processBusMessage(const QGstreamerMessage &msg)
constexpr bool traceStateChange = false;
constexpr bool traceAllEvents = false;
- if (msg.isNull())
- return false;
-
if constexpr (traceAllEvents)
qCDebug(qLcMediaEncoderGst) << "received event:" << msg;
switch (msg.type()) {
case GST_MESSAGE_ELEMENT: {
- QGstStructure s = msg.structure();
+ QGstStructureView s = msg.structure();
if (s.name() == "GstBinForwarded")
return processBusMessage(s.getMessage());
@@ -87,11 +84,13 @@ bool QGstreamerMediaEncoder::processBusMessage(const QGstreamerMessage &msg)
}
case GST_MESSAGE_ERROR: {
+ qCDebug(qLcMediaEncoderGst)
+ << "received error:" << msg.source().name() << QCompactGstMessageAdaptor(msg);
+
QUniqueGErrorHandle err;
QGString debug;
gst_message_parse_error(msg.message(), &err, &debug);
- qCDebug(qLcMediaEncoderGst) << "received error:" << msg.source().name() << err << debug;
- error(QMediaRecorder::ResourceError, QString::fromUtf8(err.get()->message));
+ updateError(QMediaRecorder::ResourceError, QString::fromUtf8(err.get()->message));
if (!m_finalizing)
stop();
finalize();
@@ -99,14 +98,9 @@ bool QGstreamerMediaEncoder::processBusMessage(const QGstreamerMessage &msg)
}
case GST_MESSAGE_STATE_CHANGED: {
- if constexpr (traceStateChange) {
- GstState oldState;
- GstState newState;
- GstState pending;
- gst_message_parse_state_changed(msg.message(), &oldState, &newState, &pending);
- qCDebug(qLcMediaEncoderGst) << "received state change from" << msg.source().name()
- << oldState << newState << pending;
- }
+ if constexpr (traceStateChange)
+ qCDebug(qLcMediaEncoderGst)
+ << "received state change" << QCompactGstMessageAdaptor(msg);
return false;
}
@@ -268,7 +262,7 @@ void QGstreamerMediaEncoder::record(QMediaEncoderSettings &settings)
const auto hasAudio = m_session->audioInput() != nullptr;
if (!hasVideo && !hasAudio) {
- error(QMediaRecorder::ResourceError, QMediaRecorder::tr("No camera or audio input"));
+ updateError(QMediaRecorder::ResourceError, QMediaRecorder::tr("No camera or audio input"));
return;
}
@@ -316,10 +310,10 @@ void QGstreamerMediaEncoder::record(QMediaEncoderSettings &settings)
videoPauseControl.installOn(videoSink);
}
- gstPipeline.modifyPipelineWhileNotRunning([&] {
- gstPipeline.add(gstEncoder, gstFileSink);
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
+ capturePipeline.add(gstEncoder, gstFileSink);
qLinkGstElements(gstEncoder, gstFileSink);
- m_metaData.setMetaData(gstEncoder.bin());
+ applyMetaDataToTagSetter(m_metaData, gstEncoder);
m_session->linkEncoder(audioSink, videoSink);
@@ -328,7 +322,7 @@ void QGstreamerMediaEncoder::record(QMediaEncoderSettings &settings)
});
signalDurationChangedTimer.start();
- gstPipeline.dumpGraph("recording");
+ capturePipeline.dumpGraph("recording");
durationChanged(0);
stateChanged(QMediaRecorder::RecordingState);
@@ -340,13 +334,14 @@ void QGstreamerMediaEncoder::pause()
if (!m_session || m_finalizing || state() != QMediaRecorder::RecordingState)
return;
signalDurationChangedTimer.stop();
- gstPipeline.dumpGraph("before-pause");
+ durationChanged(duration());
+ capturePipeline.dumpGraph("before-pause");
stateChanged(QMediaRecorder::PausedState);
}
void QGstreamerMediaEncoder::resume()
{
- gstPipeline.dumpGraph("before-resume");
+ capturePipeline.dumpGraph("before-resume");
if (!m_session || m_finalizing || state() != QMediaRecorder::PausedState)
return;
signalDurationChangedTimer.start();
@@ -357,6 +352,7 @@ void QGstreamerMediaEncoder::stop()
{
if (!m_session || m_finalizing || state() == QMediaRecorder::StoppedState)
return;
+ durationChanged(duration());
qCDebug(qLcMediaEncoderGst) << "stop";
m_finalizing = true;
m_session->unlinkEncoder();
@@ -373,7 +369,7 @@ void QGstreamerMediaEncoder::finalize()
qCDebug(qLcMediaEncoderGst) << "finalize";
- gstPipeline.stopAndRemoveElements(gstEncoder, gstFileSink);
+ capturePipeline.stopAndRemoveElements(gstEncoder, gstFileSink);
gstFileSink = {};
gstEncoder = {};
m_finalizing = false;
@@ -384,7 +380,7 @@ void QGstreamerMediaEncoder::setMetaData(const QMediaMetaData &metaData)
{
if (!m_session)
return;
- m_metaData = static_cast<const QGstreamerMetaData &>(metaData);
+ m_metaData = metaData;
}
QMediaMetaData QGstreamerMediaEncoder::metaData() const
@@ -402,21 +398,22 @@ void QGstreamerMediaEncoder::setCaptureSession(QPlatformMediaCaptureSession *ses
stop();
if (m_finalizing) {
QEventLoop loop;
- loop.connect(mediaRecorder(), SIGNAL(recorderStateChanged(RecorderState)), SLOT(quit()));
+ QObject::connect(mediaRecorder(), &QMediaRecorder::recorderStateChanged, &loop,
+ &QEventLoop::quit);
loop.exec();
}
- gstPipeline.removeMessageFilter(this);
- gstPipeline = {};
+ capturePipeline.removeMessageFilter(this);
+ capturePipeline = {};
}
m_session = captureSession;
if (!m_session)
return;
- gstPipeline = captureSession->gstPipeline;
- gstPipeline.set("message-forward", true);
- gstPipeline.installMessageFilter(this);
+ capturePipeline = captureSession->capturePipeline;
+ capturePipeline.set("message-forward", true);
+ capturePipeline.installMessageFilter(this);
}
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder_p.h b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder_p.h
index f570f069e..56e8c193b 100644
--- a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder_p.h
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder_p.h
@@ -76,10 +76,10 @@ private:
void finalize();
QGstreamerMediaCapture *m_session = nullptr;
- QGstreamerMetaData m_metaData;
+ QMediaMetaData m_metaData;
QTimer signalDurationChangedTimer;
- QGstPipeline gstPipeline;
+ QGstPipeline capturePipeline;
QGstBin gstEncoder;
QGstElement gstFileSink;
diff --git a/src/plugins/multimedia/gstreamer/qgstreamerformatinfo.cpp b/src/plugins/multimedia/gstreamer/qgstreamerformatinfo.cpp
index 86d59a9a8..a657fc52f 100644
--- a/src/plugins/multimedia/gstreamer/qgstreamerformatinfo.cpp
+++ b/src/plugins/multimedia/gstreamer/qgstreamerformatinfo.cpp
@@ -8,14 +8,15 @@
QT_BEGIN_NAMESPACE
-QMediaFormat::AudioCodec QGstreamerFormatInfo::audioCodecForCaps(QGstStructure structure)
+QMediaFormat::AudioCodec QGstreamerFormatInfo::audioCodecForCaps(QGstStructureView structure)
{
+ using namespace std::string_view_literals;
const char *name = structure.name().data();
- if (!name || strncmp(name, "audio/", 6))
+ if (!name || (strncmp(name, "audio/", 6) != 0))
return QMediaFormat::AudioCodec::Unspecified;
name += 6;
- if (!strcmp(name, "mpeg")) {
+ if (name == "mpeg"sv) {
auto version = structure["mpegversion"].toInt();
if (version == 1) {
auto layer = structure["layer"];
@@ -24,91 +25,120 @@ QMediaFormat::AudioCodec QGstreamerFormatInfo::audioCodecForCaps(QGstStructure s
}
if (version == 4)
return QMediaFormat::AudioCodec::AAC;
- } else if (!strcmp(name, "x-ac3")) {
+ return QMediaFormat::AudioCodec::Unspecified;
+ }
+ if (name == "x-ac3"sv)
return QMediaFormat::AudioCodec::AC3;
- } else if (!strcmp(name, "x-eac3")) {
+
+ if (name == "x-eac3"sv)
return QMediaFormat::AudioCodec::EAC3;
- } else if (!strcmp(name, "x-flac")) {
+
+ if (name == "x-flac"sv)
return QMediaFormat::AudioCodec::FLAC;
- } else if (!strcmp(name, "x-alac")) {
+
+ if (name == "x-alac"sv)
return QMediaFormat::AudioCodec::ALAC;
- } else if (!strcmp(name, "x-true-hd")) {
+
+ if (name == "x-true-hd"sv)
return QMediaFormat::AudioCodec::DolbyTrueHD;
- } else if (!strcmp(name, "x-vorbis")) {
+
+ if (name == "x-vorbis"sv)
return QMediaFormat::AudioCodec::Vorbis;
- } else if (!strcmp(name, "x-opus")) {
+
+ if (name == "x-opus"sv)
return QMediaFormat::AudioCodec::Opus;
- } else if (!strcmp(name, "x-wav")) {
+
+ if (name == "x-wav"sv)
return QMediaFormat::AudioCodec::Wave;
- } else if (!strcmp(name, "x-wma")) {
+
+ if (name == "x-wma"sv)
return QMediaFormat::AudioCodec::WMA;
- }
+
return QMediaFormat::AudioCodec::Unspecified;
}
-QMediaFormat::VideoCodec QGstreamerFormatInfo::videoCodecForCaps(QGstStructure structure)
+QMediaFormat::VideoCodec QGstreamerFormatInfo::videoCodecForCaps(QGstStructureView structure)
{
+ using namespace std::string_view_literals;
const char *name = structure.name().data();
- if (!name || strncmp(name, "video/", 6))
+ if (!name || (strncmp(name, "video/", 6) != 0))
return QMediaFormat::VideoCodec::Unspecified;
name += 6;
- if (!strcmp(name, "mpeg")) {
+ if (name == "mpeg"sv) {
auto version = structure["mpegversion"].toInt();
if (version == 1)
return QMediaFormat::VideoCodec::MPEG1;
- else if (version == 2)
+ if (version == 2)
return QMediaFormat::VideoCodec::MPEG2;
- else if (version == 4)
+ if (version == 4)
return QMediaFormat::VideoCodec::MPEG4;
- } else if (!strcmp(name, "x-h264")) {
+ return QMediaFormat::VideoCodec::Unspecified;
+ }
+ if (name == "x-h264"sv)
return QMediaFormat::VideoCodec::H264;
+
#if GST_CHECK_VERSION(1, 17, 0) // x265enc seems to be broken on 1.16 at least
- } else if (!strcmp(name, "x-h265")) {
+ if (name == "x-h265"sv)
return QMediaFormat::VideoCodec::H265;
#endif
- } else if (!strcmp(name, "x-vp8")) {
+
+ if (name == "x-vp8"sv)
return QMediaFormat::VideoCodec::VP8;
- } else if (!strcmp(name, "x-vp9")) {
+
+ if (name == "x-vp9"sv)
return QMediaFormat::VideoCodec::VP9;
- } else if (!strcmp(name, "x-av1")) {
+
+ if (name == "x-av1"sv)
return QMediaFormat::VideoCodec::AV1;
- } else if (!strcmp(name, "x-theora")) {
+
+ if (name == "x-theora"sv)
return QMediaFormat::VideoCodec::Theora;
- } else if (!strcmp(name, "x-jpeg")) {
+
+ if (name == "x-jpeg"sv)
return QMediaFormat::VideoCodec::MotionJPEG;
- } else if (!strcmp(name, "x-wmv")) {
+
+ if (name == "x-wmv"sv)
return QMediaFormat::VideoCodec::WMV;
- }
+
return QMediaFormat::VideoCodec::Unspecified;
}
-QMediaFormat::FileFormat QGstreamerFormatInfo::fileFormatForCaps(QGstStructure structure)
+QMediaFormat::FileFormat QGstreamerFormatInfo::fileFormatForCaps(QGstStructureView structure)
{
+ using namespace std::string_view_literals;
const char *name = structure.name().data();
- if (!strcmp(name, "video/x-ms-asf")) {
+ if (name == "video/x-ms-asf"sv)
return QMediaFormat::FileFormat::WMV;
- } else if (!strcmp(name, "video/x-msvideo")) {
+
+ if (name == "video/x-msvideo"sv)
return QMediaFormat::FileFormat::AVI;
- } else if (!strcmp(name, "video/x-matroska")) {
+
+ if (name == "video/x-matroska"sv)
return QMediaFormat::FileFormat::Matroska;
- } else if (!strcmp(name, "video/quicktime")) {
- auto variant = structure["variant"].toString();
+
+ if (name == "video/quicktime"sv) {
+ const char *variant = structure["variant"].toString();
if (!variant)
return QMediaFormat::FileFormat::QuickTime;
- else if (!strcmp(variant, "iso"))
+ if (variant == "iso"sv)
return QMediaFormat::FileFormat::MPEG4;
- } else if (!strcmp(name, "video/ogg")) {
+ }
+ if (name == "video/ogg"sv)
return QMediaFormat::FileFormat::Ogg;
- } else if (!strcmp(name, "video/webm")) {
+
+ if (name == "video/webm"sv)
return QMediaFormat::FileFormat::WebM;
- } else if (!strcmp(name, "audio/x-m4a")) {
+
+ if (name == "audio/x-m4a"sv)
return QMediaFormat::FileFormat::Mpeg4Audio;
- } else if (!strcmp(name, "audio/x-wav")) {
+
+ if (name == "audio/x-wav"sv)
return QMediaFormat::FileFormat::Wave;
- } else if (!strcmp(name, "audio/mpeg")) {
+
+ if (name == "audio/mpeg"sv) {
auto mpegversion = structure["mpegversion"].toInt();
if (mpegversion == 1) {
auto layer = structure["layer"];
@@ -116,23 +146,28 @@ QMediaFormat::FileFormat QGstreamerFormatInfo::fileFormatForCaps(QGstStructure s
return QMediaFormat::FileFormat::MP3;
}
}
+
return QMediaFormat::UnspecifiedFormat;
}
-QImageCapture::FileFormat QGstreamerFormatInfo::imageFormatForCaps(QGstStructure structure)
+QImageCapture::FileFormat QGstreamerFormatInfo::imageFormatForCaps(QGstStructureView structure)
{
+ using namespace std::string_view_literals;
const char *name = structure.name().data();
- if (!strcmp(name, "image/jpeg")) {
+ if (name == "image/jpeg"sv)
return QImageCapture::JPEG;
- } else if (!strcmp(name, "image/png")) {
+
+ if (name == "image/png"sv)
return QImageCapture::PNG;
- } else if (!strcmp(name, "image/webp")) {
+
+ if (name == "image/webp"sv)
return QImageCapture::WebP;
- } else if (!strcmp(name, "image/tiff")) {
+
+ if (name == "image/tiff"sv)
return QImageCapture::Tiff;
- }
+
return QImageCapture::UnspecifiedFormat;
}
@@ -155,7 +190,7 @@ static QPair<QList<QMediaFormat::AudioCodec>, QList<QMediaFormat::VideoCodec>> g
auto caps = QGstCaps(gst_static_caps_get(&padTemplate->static_caps), QGstCaps::HasRef);
for (int i = 0; i < caps.size(); i++) {
- QGstStructure structure = caps.at(i);
+ QGstStructureView structure = caps.at(i);
auto a = QGstreamerFormatInfo::audioCodecForCaps(structure);
if (a != QMediaFormat::AudioCodec::Unspecified && !audio.contains(a))
audio.append(a);
@@ -195,7 +230,7 @@ QList<QGstreamerFormatInfo::CodecMap> QGstreamerFormatInfo::getMuxerList(bool de
auto caps = QGstCaps(gst_static_caps_get(&padTemplate->static_caps), QGstCaps::HasRef);
for (int i = 0; i < caps.size(); i++) {
- QGstStructure structure = caps.at(i);
+ QGstStructureView structure = caps.at(i);
auto fmt = fileFormatForCaps(structure);
if (fmt != QMediaFormat::UnspecifiedFormat)
fileFormats.append(fmt);
@@ -218,7 +253,7 @@ QList<QGstreamerFormatInfo::CodecMap> QGstreamerFormatInfo::getMuxerList(bool de
bool acceptsRawAudio = false;
for (int i = 0; i < caps.size(); i++) {
- QGstStructure structure = caps.at(i);
+ QGstStructureView structure = caps.at(i);
if (structure.name() == "audio/x-raw")
acceptsRawAudio = true;
auto audio = audioCodecForCaps(structure);
@@ -280,7 +315,7 @@ static QList<QImageCapture::FileFormat> getImageFormatList()
QGstCaps caps = QGstCaps(gst_static_caps_get(&padTemplate->static_caps), QGstCaps::HasRef);
for (int i = 0; i < caps.size(); i++) {
- QGstStructure structure = caps.at(i);
+ QGstStructureView structure = caps.at(i);
auto f = QGstreamerFormatInfo::imageFormatForCaps(structure);
if (f != QImageCapture::UnspecifiedFormat) {
// qDebug() << structure.toString() << f;
diff --git a/src/plugins/multimedia/gstreamer/qgstreamerformatinfo_p.h b/src/plugins/multimedia/gstreamer/qgstreamerformatinfo_p.h
index def42b7ea..bba10edb9 100644
--- a/src/plugins/multimedia/gstreamer/qgstreamerformatinfo_p.h
+++ b/src/plugins/multimedia/gstreamer/qgstreamerformatinfo_p.h
@@ -31,10 +31,10 @@ public:
QGstCaps audioCaps(const QMediaFormat &f) const;
QGstCaps videoCaps(const QMediaFormat &f) const;
- static QMediaFormat::AudioCodec audioCodecForCaps(QGstStructure structure);
- static QMediaFormat::VideoCodec videoCodecForCaps(QGstStructure structure);
- static QMediaFormat::FileFormat fileFormatForCaps(QGstStructure structure);
- static QImageCapture::FileFormat imageFormatForCaps(QGstStructure structure);
+ static QMediaFormat::AudioCodec audioCodecForCaps(QGstStructureView structure);
+ static QMediaFormat::VideoCodec videoCodecForCaps(QGstStructureView structure);
+ static QMediaFormat::FileFormat fileFormatForCaps(QGstStructureView structure);
+ static QImageCapture::FileFormat imageFormatForCaps(QGstStructureView structure);
QList<CodecMap> getMuxerList(bool demuxer, QList<QMediaFormat::AudioCodec> audioCodecs, QList<QMediaFormat::VideoCodec> videoCodecs);
};
diff --git a/src/plugins/multimedia/gstreamer/qgstreamerintegration.cpp b/src/plugins/multimedia/gstreamer/qgstreamerintegration.cpp
index 4ee5b36e8..87c514f2e 100644
--- a/src/plugins/multimedia/gstreamer/qgstreamerintegration.cpp
+++ b/src/plugins/multimedia/gstreamer/qgstreamerintegration.cpp
@@ -4,6 +4,7 @@
#include <qgstreamerintegration_p.h>
#include <qgstreamerformatinfo_p.h>
#include <qgstreamervideodevices_p.h>
+#include <audio/qgstreameraudiodevice_p.h>
#include <audio/qgstreameraudiodecoder_p.h>
#include <common/qgstreameraudioinput_p.h>
#include <common/qgstreameraudiooutput_p.h>
@@ -15,42 +16,149 @@
#include <mediacapture/qgstreamermediaencoder_p.h>
#include <QtCore/qloggingcategory.h>
+#include <QtMultimedia/private/qmediaplayer_p.h>
+#include <QtMultimedia/private/qmediacapturesession_p.h>
+#include <QtMultimedia/private/qcameradevice_p.h>
QT_BEGIN_NAMESPACE
+static thread_local bool inCustomCameraConstruction = false;
+static thread_local QGstElement pendingCameraElement{};
+
+QGStreamerPlatformSpecificInterfaceImplementation::
+ ~QGStreamerPlatformSpecificInterfaceImplementation() = default;
+
+QAudioDevice QGStreamerPlatformSpecificInterfaceImplementation::makeCustomGStreamerAudioInput(
+ const QByteArray &gstreamerPipeline)
+{
+ return qMakeCustomGStreamerAudioInput(gstreamerPipeline);
+}
+
+QAudioDevice QGStreamerPlatformSpecificInterfaceImplementation::makeCustomGStreamerAudioOutput(
+ const QByteArray &gstreamerPipeline)
+{
+ return qMakeCustomGStreamerAudioOutput(gstreamerPipeline);
+}
+
+QCamera *QGStreamerPlatformSpecificInterfaceImplementation::makeCustomGStreamerCamera(
+ const QByteArray &gstreamerPipeline, QObject *parent)
+{
+ QCameraDevicePrivate *info = new QCameraDevicePrivate;
+ info->id = gstreamerPipeline;
+ QCameraDevice device = info->create();
+
+ inCustomCameraConstruction = true;
+ auto guard = qScopeGuard([] {
+ inCustomCameraConstruction = false;
+ });
+
+ return new QCamera(device, parent);
+}
+
+QCamera *
+QGStreamerPlatformSpecificInterfaceImplementation::makeCustomGStreamerCamera(GstElement *element,
+ QObject *parent)
+{
+ QCameraDevicePrivate *info = new QCameraDevicePrivate;
+ info->id = "Custom Camera from GstElement";
+ QCameraDevice device = info->create();
+
+ pendingCameraElement = QGstElement{
+ element,
+ QGstElement::NeedsRef,
+ };
+
+ inCustomCameraConstruction = true;
+ auto guard = qScopeGuard([] {
+ inCustomCameraConstruction = false;
+ Q_ASSERT(!pendingCameraElement);
+ });
+
+ return new QCamera(device, parent);
+}
+
+GstPipeline *QGStreamerPlatformSpecificInterfaceImplementation::gstPipeline(QMediaPlayer *player)
+{
+ auto *priv = reinterpret_cast<QMediaPlayerPrivate *>(QMediaPlayerPrivate::get(player));
+ if (!priv)
+ return nullptr;
+
+ QGstreamerMediaPlayer *gstreamerPlayer = dynamic_cast<QGstreamerMediaPlayer *>(priv->control);
+ return gstreamerPlayer ? gstreamerPlayer->pipeline().pipeline() : nullptr;
+}
+
+GstPipeline *
+QGStreamerPlatformSpecificInterfaceImplementation::gstPipeline(QMediaCaptureSession *session)
+{
+ auto *priv = QMediaCaptureSessionPrivate::get(session);
+ if (!priv)
+ return nullptr;
+
+ QGstreamerMediaCapture *gstreamerCapture =
+ dynamic_cast<QGstreamerMediaCapture *>(priv->captureSession.get());
+ return gstreamerCapture ? gstreamerCapture->pipeline().pipeline() : nullptr;
+}
+
Q_LOGGING_CATEGORY(lcGstreamer, "qt.multimedia.gstreamer")
+namespace {
+
+void rankDownPlugin(GstRegistry *reg, const char *name)
+{
+ QGstPluginFeatureHandle pluginFeature{
+ gst_registry_lookup_feature(reg, name),
+ QGstPluginFeatureHandle::HasRef,
+ };
+ if (pluginFeature)
+ gst_plugin_feature_set_rank(pluginFeature.get(), GST_RANK_PRIMARY - 1);
+}
+
+// https://gstreamer.freedesktop.org/documentation/vaapi/index.html
+constexpr auto vaapiPluginNames = {
+ "vaapidecodebin", "vaapih264dec", "vaapih264enc", "vaapih265dec",
+ "vaapijpegdec", "vaapijpegenc", "vaapimpeg2dec", "vaapipostproc",
+ "vaapisink", "vaapivp8dec", "vaapivp9dec",
+};
+
+// https://gstreamer.freedesktop.org/documentation/va/index.html
+constexpr auto vaPluginNames = {
+ "vaav1dec", "vacompositor", "vadeinterlace", "vah264dec", "vah264enc", "vah265dec",
+ "vajpegdec", "vampeg2dec", "vapostproc", "vavp8dec", "vavp9dec",
+};
+
+// https://gstreamer.freedesktop.org/documentation/nvcodec/index.html
+constexpr auto nvcodecPluginNames = {
+ "cudaconvert", "cudaconvertscale", "cudadownload", "cudaipcsink", "cudaipcsrc",
+ "cudascale", "cudaupload", "nvautogpuh264enc", "nvautogpuh265enc", "nvav1dec",
+ "nvcudah264enc", "nvcudah265enc", "nvd3d11h264enc", "nvd3d11h265enc", "nvh264dec",
+ "nvh264enc", "nvh265dec", "nvh265enc", "nvjpegdec", "nvjpegenc",
+ "nvmpeg2videodec", "nvmpeg4videodec", "nvmpegvideodec", "nvvp8dec", "nvvp9dec",
+};
+
+} // namespace
+
QGstreamerIntegration::QGstreamerIntegration()
: QPlatformMediaIntegration(QLatin1String("gstreamer"))
{
gst_init(nullptr, nullptr);
qCDebug(lcGstreamer) << "Using gstreamer version: " << gst_version_string();
+ GstRegistry *reg = gst_registry_get();
+
if constexpr (!GST_CHECK_VERSION(1, 22, 0)) {
GstRegistry* reg = gst_registry_get();
- const auto pluginNames = {
- "vaapidecodebin",
- "vaapih264dec",
- "vaapih264enc",
- "vaapih265dec",
- "vaapijpegdec",
- "vaapijpegenc",
- "vaapimpeg2dec",
- "vaapipostproc",
- "vaapisink",
- "vaapivp8dec",
- "vaapivp9dec"
- };
-
- for (auto name : pluginNames) {
- QGstPluginFeatureHandle pluginFeature {
- gst_registry_lookup_feature(reg, name),
- QGstPluginFeatureHandle::HasRef,
- };
- if (pluginFeature) {
- gst_plugin_feature_set_rank(pluginFeature.get(), GST_RANK_PRIMARY - 1);
- }
- }
+ for (const char *name : vaapiPluginNames)
+ rankDownPlugin(reg, name);
+ }
+
+ if (qEnvironmentVariableIsSet("QT_GSTREAMER_DISABLE_VA")) {
+ for (const char *name : vaPluginNames)
+ rankDownPlugin(reg, name);
+ }
+
+ if (qEnvironmentVariableIsSet("QT_GSTREAMER_DISABLE_NVCODEC")) {
+ for (const char *name : nvcodecPluginNames)
+ rankDownPlugin(reg, name);
}
}
@@ -86,6 +194,12 @@ QMaybe<QPlatformMediaPlayer *> QGstreamerIntegration::createPlayer(QMediaPlayer
QMaybe<QPlatformCamera *> QGstreamerIntegration::createCamera(QCamera *camera)
{
+ if (inCustomCameraConstruction) {
+ QGstElement element = std::exchange(pendingCameraElement, {});
+ return element ? new QGstreamerCustomCamera{ camera, std::move(element) }
+ : new QGstreamerCustomCamera{ camera };
+ }
+
return QGstreamerCamera::create(camera);
}
@@ -120,4 +234,9 @@ GstDevice *QGstreamerIntegration::videoDevice(const QByteArray &id)
return devices ? static_cast<QGstreamerVideoDevices *>(devices)->videoDevice(id) : nullptr;
}
+QAbstractPlatformSpecificInterface *QGstreamerIntegration::platformSpecificInterface()
+{
+ return &m_platformSpecificImplementation;
+}
+
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/qgstreamerintegration_p.h b/src/plugins/multimedia/gstreamer/qgstreamerintegration_p.h
index 9cb84c57b..229bbd48e 100644
--- a/src/plugins/multimedia/gstreamer/qgstreamerintegration_p.h
+++ b/src/plugins/multimedia/gstreamer/qgstreamerintegration_p.h
@@ -15,13 +15,31 @@
// We mean it.
//
-#include <private/qplatformmediaintegration_p.h>
+#include <QtMultimedia/private/qplatformmediaintegration_p.h>
+#include <QtMultimedia/private/qgstreamer_platformspecificinterface_p.h>
+
#include <gst/gst.h>
QT_BEGIN_NAMESPACE
class QGstreamerFormatInfo;
+class QGStreamerPlatformSpecificInterfaceImplementation : public QGStreamerPlatformSpecificInterface
+{
+public:
+ ~QGStreamerPlatformSpecificInterfaceImplementation() override;
+
+ QAudioDevice makeCustomGStreamerAudioInput(const QByteArray &gstreamerPipeline) override;
+ QAudioDevice makeCustomGStreamerAudioOutput(const QByteArray &gstreamerPipeline) override;
+ QCamera *makeCustomGStreamerCamera(const QByteArray &gstreamerPipeline,
+ QObject *parent) override;
+
+ QCamera *makeCustomGStreamerCamera(GstElement *, QObject *parent) override;
+
+ GstPipeline *gstPipeline(QMediaPlayer *) override;
+ GstPipeline *gstPipeline(QMediaCaptureSession *) override;
+};
+
class QGstreamerIntegration : public QPlatformMediaIntegration
{
public:
@@ -47,9 +65,13 @@ public:
const QGstreamerFormatInfo *gstFormatsInfo();
GstDevice *videoDevice(const QByteArray &id);
+ QAbstractPlatformSpecificInterface *platformSpecificInterface() override;
+
protected:
QPlatformMediaFormatInfo *createFormatInfo() override;
QPlatformVideoDevices *createVideoDevices() override;
+
+ QGStreamerPlatformSpecificInterfaceImplementation m_platformSpecificImplementation;
};
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/qgstreamervideodevices.cpp b/src/plugins/multimedia/gstreamer/qgstreamervideodevices.cpp
index cceaca621..78ac16eb4 100644
--- a/src/plugins/multimedia/gstreamer/qgstreamervideodevices.cpp
+++ b/src/plugins/multimedia/gstreamer/qgstreamervideodevices.cpp
@@ -76,11 +76,13 @@ QList<QCameraDevice> QGstreamerVideoDevices::videoDevices() const
info->description = desc.toQString();
info->id = device.id;
- if (QGstStructure properties = gst_device_get_properties(device.gstDevice.get());
- !properties.isNull()) {
- auto def = properties["is-default"].toBool();
+ QUniqueGstStructureHandle properties{
+ gst_device_get_properties(device.gstDevice.get()),
+ };
+ if (properties) {
+ QGstStructureView view{ properties };
+ auto def = view["is-default"].toBool();
info->isDefault = def && *def;
- properties.free();
}
if (info->isDefault)
diff --git a/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer.cpp b/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer.cpp
index 8ad0894f7..6595c5d42 100644
--- a/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer.cpp
+++ b/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer.cpp
@@ -105,7 +105,7 @@ static QAbstractVideoBuffer::MapData mapData(const camera_frame_nv12_t &frame,
{
return {
- .nPlanes = 2,
+ .planeCount = 2,
.bytesPerLine = {
toInt(frame.stride),
toInt(frame.uv_stride)
@@ -114,7 +114,7 @@ static QAbstractVideoBuffer::MapData mapData(const camera_frame_nv12_t &frame,
baseAddress,
baseAddress + frame.uv_offset
},
- .size = {
+ .dataSize = {
toInt(frame.stride * frame.height),
toInt(frame.uv_stride * frame.height / 2)
}
@@ -125,14 +125,14 @@ static QAbstractVideoBuffer::MapData mapData(const camera_frame_rgb8888_t &frame
unsigned char *baseAddress)
{
return {
- .nPlanes = 1,
+ .planeCount = 1,
.bytesPerLine = {
toInt(frame.stride)
},
.data = {
baseAddress
},
- .size = {
+ .dataSize = {
toInt(frame.stride * frame.height),
}
};
@@ -142,14 +142,14 @@ static QAbstractVideoBuffer::MapData mapData(const camera_frame_gray8_t &frame,
unsigned char *baseAddress)
{
return {
- .nPlanes = 1,
+ .planeCount = 1,
.bytesPerLine = {
toInt(frame.stride)
},
.data = {
baseAddress
},
- .size = {
+ .dataSize = {
toInt(frame.stride * frame.height)
}
};
@@ -159,14 +159,14 @@ static QAbstractVideoBuffer::MapData mapData(const camera_frame_cbycry_t &frame,
unsigned char *baseAddress)
{
return {
- .nPlanes = 1,
+ .planeCount = 1,
.bytesPerLine = {
toInt(frame.stride)
},
.data = {
baseAddress
},
- .size = {
+ .dataSize = {
toInt(frame.bufsize),
}
};
@@ -176,7 +176,7 @@ static QAbstractVideoBuffer::MapData mapData(const camera_frame_ycbcr420p_t &fra
unsigned char *baseAddress)
{
return {
- .nPlanes = 3,
+ .planeCount = 3,
.bytesPerLine = {
toInt(frame.y_stride),
frame.cb_stride,
@@ -187,7 +187,7 @@ static QAbstractVideoBuffer::MapData mapData(const camera_frame_ycbcr420p_t &fra
baseAddress + frame.cb_offset,
baseAddress + frame.cr_offset,
},
- .size = {
+ .dataSize = {
toInt(frame.y_stride * frame.height),
toInt(frame.cb_stride * frame.height / 2),
toInt(frame.cr_stride * frame.height / 2)
@@ -199,14 +199,14 @@ static QAbstractVideoBuffer::MapData mapData(const camera_frame_ycbycr_t &frame,
unsigned char *baseAddress)
{
return {
- .nPlanes = 1,
+ .planeCount = 1,
.bytesPerLine = {
toInt(frame.stride)
},
.data = {
baseAddress
},
- .size = {
+ .dataSize = {
toInt(frame.stride * frame.height)
}
};
@@ -260,10 +260,10 @@ static constexpr QSize frameSize(const camera_buffer_t *buffer)
QT_BEGIN_NAMESPACE
QQnxCameraFrameBuffer::QQnxCameraFrameBuffer(const camera_buffer_t *buffer, QRhi *rhi)
- : QAbstractVideoBuffer(rhi ? QVideoFrame::RhiTextureHandle : QVideoFrame::NoHandle, rhi)
- , m_rhi(rhi)
- , m_pixelFormat(::frameTypeToPixelFormat(buffer->frametype))
- , m_dataSize(::bufferDataSize(buffer))
+ : QHwVideoBuffer(rhi ? QVideoFrame::RhiTextureHandle : QVideoFrame::NoHandle, rhi),
+ m_rhi(rhi),
+ m_pixelFormat(::frameTypeToPixelFormat(buffer->frametype)),
+ m_dataSize(::bufferDataSize(buffer))
{
if (m_dataSize <= 0)
return;
@@ -277,12 +277,7 @@ QQnxCameraFrameBuffer::QQnxCameraFrameBuffer(const camera_buffer_t *buffer, QRhi
m_frameSize = ::frameSize(buffer);
}
-QVideoFrame::MapMode QQnxCameraFrameBuffer::mapMode() const
-{
- return QVideoFrame::ReadOnly;
-}
-
-QAbstractVideoBuffer::MapData QQnxCameraFrameBuffer::map(QVideoFrame::MapMode)
+QAbstractVideoBuffer::MapData QQnxCameraFrameBuffer::map(QtVideo::MapMode)
{
return m_mapData;
}
diff --git a/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer_p.h b/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer_p.h
index 9fed113a6..20f724552 100644
--- a/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer_p.h
+++ b/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer_p.h
@@ -14,7 +14,7 @@
// We mean it.
//
-#include <private/qabstractvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
#include <QtCore/qsize.h>
@@ -26,7 +26,7 @@ QT_BEGIN_NAMESPACE
class QRhi;
-class QQnxCameraFrameBuffer : public QAbstractVideoBuffer
+class QQnxCameraFrameBuffer : public QHwVideoBuffer
{
public:
explicit QQnxCameraFrameBuffer(const camera_buffer_t *buffer, QRhi *rhi = nullptr);
@@ -34,8 +34,7 @@ public:
QQnxCameraFrameBuffer(const QQnxCameraFrameBuffer&) = delete;
QQnxCameraFrameBuffer& operator=(const QQnxCameraFrameBuffer&) = delete;
- QVideoFrame::MapMode mapMode() const override;
- MapData map(QVideoFrame::MapMode mode) override;
+ MapData map(QtVideo::MapMode mode) override;
void unmap() override;
QVideoFrameFormat::PixelFormat pixelFormat() const;
diff --git a/src/plugins/multimedia/qnx/camera/qqnxplatformcamera.cpp b/src/plugins/multimedia/qnx/camera/qqnxplatformcamera.cpp
index fdf69618e..b604f4561 100644
--- a/src/plugins/multimedia/qnx/camera/qqnxplatformcamera.cpp
+++ b/src/plugins/multimedia/qnx/camera/qqnxplatformcamera.cpp
@@ -12,6 +12,7 @@
#include <qmediadevices.h>
#include <private/qmediastoragelocation_p.h>
+#include <private/qvideoframe_p.h>
#include <camera/camera_api.h>
#include <camera/camera_3a.h>
@@ -402,16 +403,14 @@ void QQnxPlatformCamera::onFrameAvailable()
if (!m_videoSink)
return;
- std::unique_ptr<QQnxCameraFrameBuffer> currentFrame = m_qnxCamera->takeCurrentFrame();
+ std::unique_ptr<QQnxCameraFrameBuffer> currentFrameBuffer = m_qnxCamera->takeCurrentFrame();
- if (!currentFrame)
+ if (!currentFrameBuffer)
return;
- const QVideoFrame actualFrame(currentFrame.get(),
- QVideoFrameFormat(currentFrame->size(), currentFrame->pixelFormat()));
-
- currentFrame.release(); // QVideoFrame has taken ownership of the internal
- // buffer
+ QVideoFrameFormat format(currentFrameBuffer->size(), currentFrameBuffer->pixelFormat());
+ const QVideoFrame actualFrame =
+ QVideoFramePrivate::createFrame(std::move(currentFrameBuffer), std::move(format));
m_videoSink->setVideoFrame(actualFrame);
diff --git a/src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer.cpp b/src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer.cpp
index 1c28c3b14..14b190836 100644
--- a/src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer.cpp
+++ b/src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer.cpp
@@ -7,7 +7,8 @@
#include "qqnxmediaeventthread_p.h"
#include "qqnxwindowgrabber_p.h"
-#include <private/qabstractvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
#include <QtCore/qabstracteventdispatcher.h>
#include <QtCore/qcoreapplication.h>
@@ -62,24 +63,19 @@ static std::tuple<int, int, bool> parseBufferLevel(const QString &value)
return { level, capacity, true };
}
-class QnxTextureBuffer : public QAbstractVideoBuffer
+class QnxTextureBuffer : public QHwVideoBuffer
{
public:
QnxTextureBuffer(QQnxWindowGrabber *QQnxWindowGrabber)
- : QAbstractVideoBuffer(QVideoFrame::RhiTextureHandle)
+ : QHwVideoBuffer(QVideoFrame::RhiTextureHandle)
{
m_windowGrabber = QQnxWindowGrabber;
m_handle = 0;
}
- QVideoFrame::MapMode mapMode() const override
- {
- return QVideoFrame::ReadWrite;
- }
-
void unmap() override {}
- MapData map(QVideoFrame::MapMode /*mode*/) override
+ MapData map(QtVideo::MapMode /*mode*/) override
{
return {};
}
@@ -102,19 +98,13 @@ private:
class QnxRasterBuffer : public QAbstractVideoBuffer
{
public:
- QnxRasterBuffer(QQnxWindowGrabber *windowGrabber)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle)
- {
- m_windowGrabber = windowGrabber;
- }
+ QnxRasterBuffer(QQnxWindowGrabber *windowGrabber) { m_windowGrabber = windowGrabber; }
- QVideoFrame::MapMode mapMode() const override
+ MapData map(QtVideo::MapMode mode) override
{
- return QVideoFrame::ReadOnly;
- }
+ if (mode != QtVideo::MapMode::ReadOnly)
+ return {};
- MapData map(QVideoFrame::MapMode /*mode*/) override
- {
if (buffer.data) {
qWarning("QnxRasterBuffer: need to unmap before mapping");
return {};
@@ -123,10 +113,10 @@ public:
buffer = m_windowGrabber->getNextBuffer();
return {
- .nPlanes = 1,
+ .planeCount = 1,
.bytesPerLine = { buffer.stride },
.data = { buffer.data },
- .size = { buffer.width * buffer.height * buffer.pixelSize }
+ .dataSize = { buffer.width * buffer.height * buffer.pixelSize }
};
}
@@ -135,6 +125,8 @@ public:
buffer = {};
}
+ QVideoFrameFormat format() const override { return {}; }
+
private:
QQnxWindowGrabber *m_windowGrabber;
QQnxWindowGrabber::BufferView buffer;
@@ -517,12 +509,13 @@ void QQnxMediaPlayer::updateScene(const QSize &size)
if (!m_platformVideoSink)
return;
- auto *buffer = m_windowGrabber->isEglImageSupported()
- ? static_cast<QAbstractVideoBuffer*>(new QnxTextureBuffer(m_windowGrabber))
- : static_cast<QAbstractVideoBuffer*>(new QnxRasterBuffer(m_windowGrabber));
+ QVideoFrameFormat format(size, QVideoFrameFormat::Format_BGRX8888);
- const QVideoFrame actualFrame(buffer,
- QVideoFrameFormat(size, QVideoFrameFormat::Format_BGRX8888));
+ const QVideoFrame actualFrame = m_windowGrabber->isEglImageSupported()
+ ? QVideoFramePrivate::createFrame(std::make_unique<QnxTextureBuffer>(m_windowGrabber),
+ std::move(format))
+ : QVideoFramePrivate::createFrame(std::make_unique<QnxRasterBuffer>(m_windowGrabber),
+ std::move(format));
m_platformVideoSink->setVideoFrame(actualFrame);
}
diff --git a/src/plugins/multimedia/wasm/common/qwasmvideooutput.cpp b/src/plugins/multimedia/wasm/common/qwasmvideooutput.cpp
index 74c14959c..84d325635 100644
--- a/src/plugins/multimedia/wasm/common/qwasmvideooutput.cpp
+++ b/src/plugins/multimedia/wasm/common/qwasmvideooutput.cpp
@@ -13,10 +13,10 @@
#include "qwasmvideooutput_p.h"
#include <qvideosink.h>
-#include <private/qabstractvideobuffer_p.h>
#include <private/qplatformvideosink_p.h>
#include <private/qmemoryvideobuffer_p.h>
#include <private/qvideotexturehelper_p.h>
+#include <private/qvideoframe_p.h>
#include <private/qstdweb_p.h>
#include <QTimer>
@@ -58,10 +58,9 @@ static bool checkForVideoFrame()
return (!videoFrame.isNull() && !videoFrame.isUndefined());
}
-Q_GLOBAL_STATIC_WITH_ARGS(bool, m_hasVideoFrame, (checkForVideoFrame()))
-
QWasmVideoOutput::QWasmVideoOutput(QObject *parent) : QObject{ parent }
{
+ m_hasVideoFrame = checkForVideoFrame();
}
void QWasmVideoOutput::setVideoSize(const QSize &newSize)
@@ -862,9 +861,10 @@ void QWasmVideoOutput::videoComputeFrame(void *context)
auto *textureDescription = QVideoTextureHelper::textureDescription(frameFormat.pixelFormat());
- QVideoFrame vFrame(
- new QMemoryVideoBuffer(frameBytes,
- textureDescription->strideForWidth(frameFormat.frameWidth())),
+ QVideoFrame vFrame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(
+ std::move(frameBytes),
+ textureDescription->strideForWidth(frameFormat.frameWidth())),
frameFormat);
QWasmVideoOutput *wasmVideoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
@@ -924,10 +924,12 @@ void QWasmVideoOutput::videoFrameCallback(emscripten::val now, emscripten::val m
auto *textureDescription = QVideoTextureHelper::textureDescription(frameFormat.pixelFormat());
- QVideoFrame vFrame(
- new QMemoryVideoBuffer(frameBytes,
- textureDescription->strideForWidth(frameFormat.frameWidth())),
- frameFormat);
+ auto buffer = std::make_unique<QMemoryVideoBuffer>(
+ std::move(frameBytes),
+ textureDescription->strideForWidth(frameFormat.frameWidth()));
+
+ QVideoFrame vFrame =
+ QVideoFramePrivate::createFrame(std::move(buffer), std::move(frameFormat));
if (!wasmVideoOutput) {
qCDebug(qWasmMediaVideoOutput) << "ERROR:"
diff --git a/src/plugins/multimedia/wasm/common/qwasmvideooutput_p.h b/src/plugins/multimedia/wasm/common/qwasmvideooutput_p.h
index dc4a762bf..f078ffb44 100644
--- a/src/plugins/multimedia/wasm/common/qwasmvideooutput_p.h
+++ b/src/plugins/multimedia/wasm/common/qwasmvideooutput_p.h
@@ -81,6 +81,7 @@ public:
emscripten::val getDeviceCapabilities();
bool setDeviceSetting(const std::string &key, emscripten::val value);
bool isCameraReady() { return m_cameraIsReady; }
+ bool m_hasVideoFrame = false;
static void videoFrameCallback(emscripten::val now, emscripten::val metadata);
void videoFrameTimerCallback();
diff --git a/src/plugins/multimedia/wasm/mediacapture/qwasmcamera.cpp b/src/plugins/multimedia/wasm/mediacapture/qwasmcamera.cpp
index 9bd63b081..fbc5cf262 100644
--- a/src/plugins/multimedia/wasm/mediacapture/qwasmcamera.cpp
+++ b/src/plugins/multimedia/wasm/mediacapture/qwasmcamera.cpp
@@ -4,7 +4,6 @@
#include "qwasmcamera_p.h"
#include "qmediadevices.h"
#include <qcameradevice.h>
-#include "private/qabstractvideobuffer_p.h"
#include "private/qplatformvideosink_p.h"
#include <private/qmemoryvideobuffer_p.h>
#include <private/qvideotexturehelper_p.h>
@@ -64,7 +63,7 @@ void QWasmCamera::setActive(bool active)
{
if (!m_CaptureSession) {
- emit error(QCamera::CameraError, QStringLiteral("video surface error"));
+ updateError(QCamera::CameraError, QStringLiteral("video surface error"));
m_shouldBeActive = true;
return;
}
@@ -120,7 +119,7 @@ void QWasmCamera::setCamera(const QCameraDevice &camera)
createCamera(m_cameraDev);
emit cameraIsReady();
} else {
- emit error(QCamera::CameraError, QStringLiteral("Failed to find a camera"));
+ updateError(QCamera::CameraError, QStringLiteral("Failed to find a camera"));
}
}
diff --git a/src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder.cpp b/src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder.cpp
index 98d0d860b..98f04616a 100644
--- a/src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder.cpp
+++ b/src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder.cpp
@@ -285,8 +285,8 @@ void QWasmMediaRecorder::setStream(emscripten::val stream)
theError["target"]["data-mediarecordercontext"].as<quintptr>());
if (recorder) {
- recorder->error(QMediaRecorder::ResourceError,
- QString::fromStdString(theError["message"].as<std::string>()));
+ recorder->updateError(QMediaRecorder::ResourceError,
+ QString::fromStdString(theError["message"].as<std::string>()));
emit recorder->stateChanged(recorder->state());
}
};
@@ -381,12 +381,12 @@ void QWasmMediaRecorder::audioDataAvailable(emscripten::val blob, double timeCod
auto fileReader = std::make_shared<qstdweb::FileReader>();
fileReader->onError([=](emscripten::val theError) {
- error(QMediaRecorder::ResourceError,
- QString::fromStdString(theError["message"].as<std::string>()));
+ updateError(QMediaRecorder::ResourceError,
+ QString::fromStdString(theError["message"].as<std::string>()));
});
fileReader->onAbort([=](emscripten::val) {
- error(QMediaRecorder::ResourceError, QStringLiteral("File read aborted"));
+ updateError(QMediaRecorder::ResourceError, QStringLiteral("File read aborted"));
});
fileReader->onLoad([=](emscripten::val) {
@@ -473,7 +473,8 @@ void QWasmMediaRecorder::setTrackContraints(QMediaEncoderSettings &settings, ems
qCDebug(qWasmMediaRecorder)
<< theError["code"].as<int>()
<< QString::fromStdString(theError["message"].as<std::string>());
- error(QMediaRecorder::ResourceError, QString::fromStdString(theError["message"].as<std::string>()));
+ updateError(QMediaRecorder::ResourceError,
+ QString::fromStdString(theError["message"].as<std::string>()));
} },
constraints);
}
diff --git a/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp b/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp
index cc14cd419..517f1d969 100644
--- a/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp
+++ b/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp
@@ -5,7 +5,8 @@
#include "evrhelpers_p.h"
-#include <private/qabstractvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
#include <qvideoframe.h>
#include <QDebug>
#include <qthread.h>
@@ -24,31 +25,29 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcEvrD3DPresentEngine, "qt.multimedia.evrd3dpresentengine")
+static Q_LOGGING_CATEGORY(qLcEvrD3DPresentEngine, "qt.multimedia.evrd3dpresentengine");
-class IMFSampleVideoBuffer: public QAbstractVideoBuffer
+class IMFSampleVideoBuffer : public QHwVideoBuffer
{
public:
- IMFSampleVideoBuffer(ComPtr<IDirect3DDevice9Ex> device,
- const ComPtr<IMFSample> &sample, QRhi *rhi, QVideoFrame::HandleType type = QVideoFrame::NoHandle)
- : QAbstractVideoBuffer(type, rhi)
- , m_device(device)
- , m_sample(sample)
- , m_mapMode(QVideoFrame::NotMapped)
+ IMFSampleVideoBuffer(ComPtr<IDirect3DDevice9Ex> device, const ComPtr<IMFSample> &sample,
+ QRhi *rhi, QVideoFrame::HandleType type = QVideoFrame::NoHandle)
+ : QHwVideoBuffer(type, rhi),
+ m_device(device),
+ m_sample(sample),
+ m_mapMode(QtVideo::MapMode::NotMapped)
{
}
~IMFSampleVideoBuffer() override
{
- if (m_memSurface && m_mapMode != QVideoFrame::NotMapped)
+ if (m_memSurface && m_mapMode != QtVideo::MapMode::NotMapped)
m_memSurface->UnlockRect();
}
- QVideoFrame::MapMode mapMode() const override { return m_mapMode; }
-
- MapData map(QVideoFrame::MapMode mode) override
+ MapData map(QtVideo::MapMode mode) override
{
- if (!m_sample || m_mapMode != QVideoFrame::NotMapped || mode != QVideoFrame::ReadOnly)
+ if (!m_sample || m_mapMode != QtVideo::MapMode::NotMapped || mode != QtVideo::MapMode::ReadOnly)
return {};
D3DSURFACE_DESC desc;
@@ -80,25 +79,25 @@ public:
}
D3DLOCKED_RECT rect;
- if (FAILED(m_memSurface->LockRect(&rect, NULL, mode == QVideoFrame::ReadOnly ? D3DLOCK_READONLY : 0)))
+ if (FAILED(m_memSurface->LockRect(&rect, NULL, mode == QtVideo::MapMode::ReadOnly ? D3DLOCK_READONLY : 0)))
return {};
m_mapMode = mode;
MapData mapData;
- mapData.nPlanes = 1;
+ mapData.planeCount = 1;
mapData.bytesPerLine[0] = (int)rect.Pitch;
mapData.data[0] = reinterpret_cast<uchar *>(rect.pBits);
- mapData.size[0] = (int)(rect.Pitch * desc.Height);
+ mapData.dataSize[0] = (int)(rect.Pitch * desc.Height);
return mapData;
}
void unmap() override
{
- if (m_mapMode == QVideoFrame::NotMapped)
+ if (m_mapMode == QtVideo::MapMode::NotMapped)
return;
- m_mapMode = QVideoFrame::NotMapped;
+ m_mapMode = QtVideo::MapMode::NotMapped;
if (m_memSurface)
m_memSurface->UnlockRect();
}
@@ -109,7 +108,7 @@ protected:
private:
ComPtr<IDirect3DSurface9> m_memSurface;
- QVideoFrame::MapMode m_mapMode;
+ QtVideo::MapMode m_mapMode;
};
class QVideoFrameD3D11Textures: public QVideoFrameTextures
@@ -665,22 +664,23 @@ QVideoFrame D3DPresentEngine::makeVideoFrame(const ComPtr<IMFSample> &sample)
if (p.first == sample.Get())
sharedHandle = p.second;
- QAbstractVideoBuffer *vb = nullptr;
+ std::unique_ptr<IMFSampleVideoBuffer> vb;
QRhi *rhi = m_sink ? m_sink->rhi() : nullptr;
if (m_useTextureRendering && sharedHandle && rhi) {
if (rhi->backend() == QRhi::D3D11) {
- vb = new D3D11TextureVideoBuffer(m_device, sample, sharedHandle, rhi);
+ vb = std::make_unique<D3D11TextureVideoBuffer>(m_device, sample, sharedHandle, rhi);
#if QT_CONFIG(opengl)
} else if (rhi->backend() == QRhi::OpenGLES2) {
- vb = new OpenGlVideoBuffer(m_device, sample, m_wglNvDxInterop, sharedHandle, rhi);
+ vb = std::make_unique<OpenGlVideoBuffer>(m_device, sample, m_wglNvDxInterop,
+ sharedHandle, rhi);
#endif
}
}
if (!vb)
- vb = new IMFSampleVideoBuffer(m_device, sample, rhi);
+ vb = std::make_unique<IMFSampleVideoBuffer>(m_device, sample, rhi);
- QVideoFrame frame(vb, m_surfaceFormat);
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(vb), m_surfaceFormat);
// WMF uses 100-nanosecond units, Qt uses microseconds
LONGLONG startTime = 0;
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp
index 2bdc4ea7d..e99b95ad2 100644
--- a/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp
@@ -8,6 +8,7 @@
#include <qmediadevices.h>
#include <qaudiodevice.h>
#include <private/qmemoryvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
#include <private/qwindowsmfdefs_p.h>
#include <private/qcomptr_p.h>
#include <QtCore/qdebug.h>
@@ -955,9 +956,11 @@ STDMETHODIMP QWindowsMediaDeviceReader::OnReadSample(HRESULT hrStatus, DWORD dwS
if (SUCCEEDED(mediaBuffer->Lock(&buffer, nullptr, &bufLen))) {
auto bytes = QByteArray(reinterpret_cast<char*>(buffer), bufLen);
+ QVideoFrameFormat format(QSize(m_frameWidth, m_frameHeight), m_pixelFormat);
- QVideoFrame frame(new QMemoryVideoBuffer(bytes, m_stride),
- QVideoFrameFormat(QSize(m_frameWidth, m_frameHeight), m_pixelFormat));
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(std::move(bytes), m_stride),
+ std::move(format));
// WMF uses 100-nanosecond units, Qt uses microseconds
frame.setStartTime(llTimestamp * 0.1);
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp b/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp
index 1d901c036..512110af6 100644
--- a/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp
@@ -52,8 +52,8 @@ void QWindowsMediaEncoder::record(QMediaEncoderSettings &settings)
m_mediaDeviceSession->setActive(true);
if (!m_mediaDeviceSession->isActivating()) {
- error(QMediaRecorder::ResourceError,
- QMediaRecorderPrivate::msgFailedStartRecording());
+ updateError(QMediaRecorder::ResourceError,
+ QMediaRecorderPrivate::msgFailedStartRecording());
return;
}
}
@@ -72,7 +72,7 @@ void QWindowsMediaEncoder::record(QMediaEncoderSettings &settings)
stateChanged(m_state);
} else {
- error(ec, QMediaRecorderPrivate::msgFailedStartRecording());
+ updateError(ec, QMediaRecorderPrivate::msgFailedStartRecording());
}
}
@@ -85,7 +85,7 @@ void QWindowsMediaEncoder::pause()
m_state = QMediaRecorder::PausedState;
stateChanged(m_state);
} else {
- error(QMediaRecorder::FormatError, tr("Failed to pause recording"));
+ updateError(QMediaRecorder::FormatError, tr("Failed to pause recording"));
}
}
@@ -98,7 +98,7 @@ void QWindowsMediaEncoder::resume()
m_state = QMediaRecorder::RecordingState;
stateChanged(m_state);
} else {
- error(QMediaRecorder::FormatError, tr("Failed to resume recording"));
+ updateError(QMediaRecorder::FormatError, tr("Failed to resume recording"));
}
}
@@ -178,11 +178,11 @@ void QWindowsMediaEncoder::onDurationChanged(qint64 duration)
void QWindowsMediaEncoder::onStreamingError(int errorCode)
{
if (errorCode == MF_E_VIDEO_RECORDING_DEVICE_INVALIDATED)
- error(QMediaRecorder::ResourceError, tr("Camera is no longer present"));
+ updateError(QMediaRecorder::ResourceError, tr("Camera is no longer present"));
else if (errorCode == MF_E_AUDIO_RECORDING_DEVICE_INVALIDATED)
- error(QMediaRecorder::ResourceError, tr("Audio input is no longer present"));
+ updateError(QMediaRecorder::ResourceError, tr("Audio input is no longer present"));
else
- error(QMediaRecorder::ResourceError, tr("Streaming error"));
+ updateError(QMediaRecorder::ResourceError, tr("Streaming error"));
if (m_state != QMediaRecorder::StoppedState) {
m_mediaDeviceSession->stopRecording();
@@ -194,7 +194,7 @@ void QWindowsMediaEncoder::onStreamingError(int errorCode)
void QWindowsMediaEncoder::onRecordingError(int errorCode)
{
Q_UNUSED(errorCode);
- error(QMediaRecorder::ResourceError, tr("Recording error"));
+ updateError(QMediaRecorder::ResourceError, tr("Recording error"));
auto lastState = m_state;
m_state = QMediaRecorder::StoppedState;
diff --git a/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp b/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp
index 7ef983120..599dbb9e2 100644
--- a/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp
+++ b/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp
@@ -8,6 +8,7 @@
#include "qsgvivantevideomaterialshader.h"
#include "qsgvivantevideonode.h"
#include "private/qsgvideotexture_p.h"
+#include "private/qvideoframe_p.h"
#include <QOpenGLContext>
#include <QThread>
@@ -142,7 +143,7 @@ GLuint QSGVivanteVideoMaterial::vivanteMapping(QVideoFrame vF)
clearTextures();
}
- if (vF.map(QVideoFrame::ReadOnly)) {
+ if (vF.map(QtVideo::MapMode::ReadOnly)) {
if (mMappable) {
if (!mBitsToTextureMap.contains(vF.bits())) {
@@ -191,7 +192,7 @@ GLuint QSGVivanteVideoMaterial::vivanteMapping(QVideoFrame vF)
GLuint physical = ~0U;
#if GST_CHECK_VERSION(1,14,0)
- auto buffer = reinterpret_cast<QGstVideoBuffer *>(vF.buffer());
+ auto buffer = reinterpret_cast<QGstVideoBuffer *>(QVideoFramePrivate::buffer(vF));
auto mem = gst_buffer_peek_memory(buffer->buffer(), 0);
auto phys_addr = gst_is_phys_memory(mem) ? gst_phys_memory_get_phys_addr(mem) : 0;
if (phys_addr)
diff --git a/src/resonance-audio/CMakeLists.txt b/src/resonance-audio/CMakeLists.txt
index 6b82e9ac5..1e967a117 100644
--- a/src/resonance-audio/CMakeLists.txt
+++ b/src/resonance-audio/CMakeLists.txt
@@ -1,7 +1,7 @@
# Copyright (C) 2022 The Qt Company Ltd.
# SPDX-License-Identifier: BSD-3-Clause
-if (MINGW AND CMAKE_SIZEOF_VOID_P EQUAL 4)
+if(MINGW AND CMAKE_SIZEOF_VOID_P EQUAL 4)
set(NO_SIMD_DEFINES PFFFT_SIMD_DISABLE DISABLE_SIMD)
endif()