summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.cmake.conf2
-rw-r--r--cmake/FindFFmpeg.cmake67
-rw-r--r--cmake/FindVAAPI.cmake34
-rw-r--r--coin/instructions/run_ffmpeg_backend_tests.yaml16
-rw-r--r--coin/module_config.yaml15
-rw-r--r--dependencies.yaml8
-rw-r--r--examples/multimedia/audiodevices/audiodevices.cpp7
-rw-r--r--examples/multimedia/camera/android/AndroidManifest.xml32
-rw-r--r--examples/multimedia/camera/camera.cpp19
-rw-r--r--examples/multimedia/camera/metadatadialog.cpp2
-rw-r--r--examples/multimedia/player/player.cpp48
-rw-r--r--examples/multimedia/player/player.h6
-rw-r--r--examples/multimedia/player/playercontrols.cpp47
-rw-r--r--examples/multimedia/player/playercontrols.h4
-rw-r--r--examples/multimedia/video/mediaplayer/Main.qml2
-rw-r--r--examples/multimedia/video/mediaplayer/controls/PlaybackControl.qml1
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java83
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java88
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java38
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java22
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtExifDataHandler.java6
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtMediaRecorderListener.java4
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java51
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceHolderCallback.java4
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureHolder.java4
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureListener.java4
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java52
-rw-r--r--src/multimedia/CMakeLists.txt24
-rw-r--r--src/multimedia/alsa/qalsaaudiodevice.cpp52
-rw-r--r--src/multimedia/alsa/qalsaaudiodevice_p.h6
-rw-r--r--src/multimedia/alsa/qalsaaudiosink.cpp5
-rw-r--r--src/multimedia/alsa/qalsaaudiosink_p.h1
-rw-r--r--src/multimedia/alsa/qalsaaudiosource.cpp20
-rw-r--r--src/multimedia/alsa/qalsamediadevices.cpp98
-rw-r--r--src/multimedia/android/qandroidmediadevices.cpp8
-rw-r--r--src/multimedia/audio/qaudiobufferinput.cpp184
-rw-r--r--src/multimedia/audio/qaudiobufferinput.h48
-rw-r--r--src/multimedia/audio/qaudiobufferoutput.cpp78
-rw-r--r--src/multimedia/audio/qaudiobufferoutput.h37
-rw-r--r--src/multimedia/audio/qaudiobufferoutput_p.h42
-rw-r--r--src/multimedia/audio/qsamplecache_p.cpp13
-rw-r--r--src/multimedia/audio/qsoundeffect.cpp5
-rw-r--r--src/multimedia/audio/qwavedecoder.cpp8
-rw-r--r--src/multimedia/camera/qcamera.cpp25
-rw-r--r--src/multimedia/camera/qcamera.h2
-rw-r--r--src/multimedia/camera/qcamera_p.h6
-rw-r--r--src/multimedia/camera/qcameradevice.cpp10
-rw-r--r--src/multimedia/camera/qimagecapture.cpp25
-rw-r--r--src/multimedia/configure.cmake23
-rw-r--r--src/multimedia/doc/src/qtmultimedia-index.qdoc26
-rw-r--r--src/multimedia/platform/qgstreamer_platformspecificinterface.cpp27
-rw-r--r--src/multimedia/platform/qgstreamer_platformspecificinterface_p.h46
-rw-r--r--src/multimedia/platform/qplatformaudiobufferinput.cpp10
-rw-r--r--src/multimedia/platform/qplatformaudiobufferinput_p.h56
-rw-r--r--src/multimedia/platform/qplatformcamera.cpp9
-rw-r--r--src/multimedia/platform/qplatformcamera_p.h10
-rw-r--r--src/multimedia/platform/qplatformmediacapture.cpp11
-rw-r--r--src/multimedia/platform/qplatformmediacapture_p.h13
-rw-r--r--src/multimedia/platform/qplatformmediaintegration.cpp8
-rw-r--r--src/multimedia/platform/qplatformmediaintegration_p.h12
-rw-r--r--src/multimedia/platform/qplatformmediaplayer.cpp16
-rw-r--r--src/multimedia/platform/qplatformmediaplayer_p.h33
-rw-r--r--src/multimedia/platform/qplatformmediarecorder.cpp6
-rw-r--r--src/multimedia/platform/qplatformmediarecorder_p.h6
-rw-r--r--src/multimedia/platform/qplatformsurfacecapture_p.h3
-rw-r--r--src/multimedia/platform/qplatformvideoframeinput.cpp10
-rw-r--r--src/multimedia/platform/qplatformvideoframeinput_p.h55
-rw-r--r--src/multimedia/platform/qplatformvideosource_p.h5
-rw-r--r--src/multimedia/playback/qmediaplayer.cpp52
-rw-r--r--src/multimedia/playback/qmediaplayer.h5
-rw-r--r--src/multimedia/playback/qmediaplayer_p.h7
-rw-r--r--src/multimedia/pulseaudio/qaudioengine_pulse.cpp2
-rw-r--r--src/multimedia/qmediaframeinput.cpp43
-rw-r--r--src/multimedia/qmediaframeinput_p.h74
-rw-r--r--src/multimedia/qmediainputencoderinterface_p.h31
-rw-r--r--src/multimedia/qmediametadata.cpp50
-rw-r--r--src/multimedia/qmediametadata.h10
-rw-r--r--src/multimedia/qsymbolsresolveutils.cpp79
-rw-r--r--src/multimedia/qsymbolsresolveutils_p.h178
-rw-r--r--src/multimedia/recording/qmediacapturesession.cpp249
-rw-r--r--src/multimedia/recording/qmediacapturesession.h18
-rw-r--r--src/multimedia/recording/qmediacapturesession_p.h15
-rw-r--r--src/multimedia/recording/qmediarecorder.cpp43
-rw-r--r--src/multimedia/recording/qmediarecorder.h5
-rw-r--r--src/multimedia/recording/qmediarecorder_p.h1
-rw-r--r--src/multimedia/recording/qscreencapture-limitations.qdocinc15
-rw-r--r--src/multimedia/recording/qvideoframeinput.cpp181
-rw-r--r--src/multimedia/recording/qvideoframeinput.h48
-rw-r--r--src/multimedia/video/qabstractvideobuffer.cpp213
-rw-r--r--src/multimedia/video/qabstractvideobuffer.h32
-rw-r--r--src/multimedia/video/qabstractvideobuffer_p.h82
-rw-r--r--src/multimedia/video/qhwvideobuffer.cpp17
-rw-r--r--src/multimedia/video/qhwvideobuffer_p.h58
-rw-r--r--src/multimedia/video/qimagevideobuffer.cpp30
-rw-r--r--src/multimedia/video/qimagevideobuffer_p.h9
-rw-r--r--src/multimedia/video/qmemoryvideobuffer.cpp41
-rw-r--r--src/multimedia/video/qmemoryvideobuffer_p.h11
-rw-r--r--src/multimedia/video/qtvideo.cpp21
-rw-r--r--src/multimedia/video/qtvideo.h36
-rw-r--r--src/multimedia/video/qvideoframe.cpp292
-rw-r--r--src/multimedia/video/qvideoframe.h25
-rw-r--r--src/multimedia/video/qvideoframe_p.h44
-rw-r--r--src/multimedia/video/qvideoframeconversionhelper.cpp70
-rw-r--r--src/multimedia/video/qvideoframeconverter.cpp20
-rw-r--r--src/multimedia/video/qvideoframeconverter_p.h4
-rw-r--r--src/multimedia/video/qvideoframeformat.cpp43
-rw-r--r--src/multimedia/video/qvideoframeformat.h13
-rw-r--r--src/multimedia/video/qvideooutputorientationhandler.cpp4
-rw-r--r--src/multimedia/video/qvideotexturehelper.cpp29
-rw-r--r--src/multimedia/video/qvideowindow.cpp7
-rw-r--r--src/multimediaquick/CMakeLists.txt1
-rw-r--r--src/multimediaquick/qquickimagecapture.cpp2
-rw-r--r--src/multimediaquick/qquickvideooutput.cpp2
-rw-r--r--src/multimediaquick/qsgvideonode_p.cpp2
-rw-r--r--src/multimediawidgets/CMakeLists.txt1
-rw-r--r--src/plugins/multimedia/android/common/qandroidvideooutput.cpp52
-rw-r--r--src/plugins/multimedia/android/common/qandroidvideooutput_p.h2
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp5
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp18
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h4
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp20
-rw-r--r--src/plugins/multimedia/darwin/avfvideobuffer.mm31
-rw-r--r--src/plugins/multimedia/darwin/avfvideobuffer_p.h10
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm16
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerautility.mm5
-rw-r--r--src/plugins/multimedia/darwin/camera/avfimagecapture.mm7
-rw-r--r--src/plugins/multimedia/darwin/camera/avfmediaencoder.mm17
-rw-r--r--src/plugins/multimedia/darwin/camera/qavfcamerabase.mm8
-rw-r--r--src/plugins/multimedia/darwin/common/avfmetadata.mm25
-rw-r--r--src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm49
-rw-r--r--src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer_p.h2
-rw-r--r--src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm8
-rw-r--r--src/plugins/multimedia/darwin/qavfhelpers.mm23
-rw-r--r--src/plugins/multimedia/ffmpeg/CMakeLists.txt99
-rw-r--r--src/plugins/multimedia/ffmpeg/cmake/QtAddFFmpegStubs.cmake199
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer.cpp130
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer_p.h22
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec.cpp51
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec_p.h7
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegdemuxer.cpp2
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder.cpp27
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegrenderer.cpp2
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegstreamdecoder.cpp7
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer.cpp5
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidcamera.cpp59
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp63
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidcameraframe_p.h21
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidvideodevices.cpp8
-rw-r--r--src/plugins/multimedia/ffmpeg/qavfcamera.mm4
-rw-r--r--src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate.mm46
-rw-r--r--src/plugins/multimedia/ffmpeg/qcgwindowcapture.mm29
-rw-r--r--src/plugins/multimedia/ffmpeg/qeglfsscreencapture.cpp10
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeg.cpp201
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeg_p.h73
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder.cpp2
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h7
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegconverter.cpp272
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegconverter_p.h (renamed from src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolve_p.h)27
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegdefs_p.h2
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegencodingformatcontext.cpp2
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp34
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11.cpp113
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11_p.h2
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h5
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp5
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegimagecapture.cpp2
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp38
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h18
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo.cpp2
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp10
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h3
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediametadata.cpp5
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp28
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h3
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp52
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h2
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegopensslsymbols.cpp185
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegplaybackengine.cpp25
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegplaybackengine_p.h7
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp9
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h4
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi.cpp39
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegsurfacecapturegrabber.cpp2
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolveutils.cpp103
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolveutils_p.h142
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp82
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h11
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp3
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp.cpp37
-rw-r--r--src/plugins/multimedia/ffmpeg/qgdiwindowcapture.cpp6
-rw-r--r--src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture.cpp5
-rw-r--r--src/plugins/multimedia/ffmpeg/qopenglvideobuffer.cpp9
-rw-r--r--src/plugins/multimedia/ffmpeg/qopenglvideobuffer_p.h7
-rw-r--r--src/plugins/multimedia/ffmpeg/qv4l2camera.cpp13
-rw-r--r--src/plugins/multimedia/ffmpeg/qv4l2cameradevices.cpp2
-rw-r--r--src/plugins/multimedia/ffmpeg/qv4l2memorytransfer.cpp2
-rw-r--r--src/plugins/multimedia/ffmpeg/qwindowscamera.cpp8
-rw-r--r--src/plugins/multimedia/ffmpeg/qx11surfacecapture.cpp10
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder.cpp227
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder_p.h29
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoderutils.cpp2
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread.cpp24
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread_p.h49
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer.cpp165
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer_p.h77
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer.cpp14
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer_p.h1
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine.cpp194
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine_p.h54
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils.cpp63
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils_p.h81
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder.cpp134
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder_p.h23
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils.cpp13
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder.cpp274
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder_p.h28
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/openssl3.ver7
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-crypto.cpp6
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-ssl.cpp300
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-drm.cpp14
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-x11.cpp14
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va.cpp (renamed from src/plugins/multimedia/ffmpeg/qffmpegvaapisymbols.cpp)128
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/va.ver7
-rw-r--r--src/plugins/multimedia/gstreamer/CMakeLists.txt2
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder.cpp101
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder_p.h20
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice.cpp35
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice_p.h21
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiosink.cpp376
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiosink_p.h124
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiosource.cpp366
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiosource_p.h120
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgst.cpp318
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgst_debug.cpp201
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgst_debug_p.h8
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgst_handle_types_p.h21
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgst_p.h105
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstappsource.cpp135
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstappsource_p.h31
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstpipeline.cpp201
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstpipeline_p.h18
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreameraudioinput.cpp167
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreameraudioinput_p.h19
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput.cpp187
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput_p.h28
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer.cpp499
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer_p.h43
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamermessage_p.h2
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamermetadata.cpp304
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamermetadata_p.h5
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamervideooutput.cpp234
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamervideooutput_p.h41
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamervideosink.cpp166
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamervideosink_p.h32
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstsubtitlesink.cpp78
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstsubtitlesink_p.h24
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstutils.cpp2
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp37
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstvideobuffer_p.h11
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp315
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstvideorenderersink_p.h75
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera.cpp129
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera_p.h35
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture.cpp334
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture_p.h20
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture.cpp122
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture_p.h10
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder.cpp36
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder_p.h2
-rw-r--r--src/plugins/multimedia/gstreamer/qgstreamerformatinfo.cpp133
-rw-r--r--src/plugins/multimedia/gstreamer/qgstreamerformatinfo_p.h8
-rw-r--r--src/plugins/multimedia/gstreamer/qgstreamerintegration.cpp165
-rw-r--r--src/plugins/multimedia/gstreamer/qgstreamerintegration_p.h24
-rw-r--r--src/plugins/multimedia/gstreamer/qgstreamervideodevices.cpp10
-rw-r--r--src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer.cpp39
-rw-r--r--src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer_p.h7
-rw-r--r--src/plugins/multimedia/qnx/camera/qqnxplatformcamera.cpp13
-rw-r--r--src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer.cpp45
-rw-r--r--src/plugins/multimedia/wasm/common/qwasmvideooutput.cpp22
-rw-r--r--src/plugins/multimedia/wasm/common/qwasmvideooutput_p.h1
-rw-r--r--src/plugins/multimedia/wasm/mediacapture/qwasmcamera.cpp5
-rw-r--r--src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder.cpp13
-rw-r--r--src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp50
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp7
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp18
-rw-r--r--src/plugins/multimedia/windows/player/mfplayersession.cpp6
-rw-r--r--src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp5
-rw-r--r--src/spatialaudio/CMakeLists.txt1
-rw-r--r--src/spatialaudioquick3d/CMakeLists.txt1
-rw-r--r--tests/auto/integration/CMakeLists.txt2
-rw-r--r--tests/auto/integration/qaudiodecoderbackend/CMakeLists.txt8
-rw-r--r--tests/auto/integration/qaudiodecoderbackend/tst_qaudiodecoderbackend.cpp144
-rw-r--r--tests/auto/integration/qaudiosink/tst_qaudiosink.cpp18
-rw-r--r--tests/auto/integration/qaudiosource/tst_qaudiosource.cpp18
-rw-r--r--tests/auto/integration/qcamerabackend/tst_qcamerabackend.cpp78
-rw-r--r--tests/auto/integration/qmediacapturesession/tst_qmediacapturesession.cpp231
-rw-r--r--tests/auto/integration/qmediaframeinputsbackend/CMakeLists.txt22
-rw-r--r--tests/auto/integration/qmediaframeinputsbackend/capturesessionfixture.cpp88
-rw-r--r--tests/auto/integration/qmediaframeinputsbackend/capturesessionfixture.h49
-rw-r--r--tests/auto/integration/qmediaframeinputsbackend/framegenerator.cpp148
-rw-r--r--tests/auto/integration/qmediaframeinputsbackend/framegenerator.h82
-rw-r--r--tests/auto/integration/qmediaframeinputsbackend/mediainfo.h96
-rw-r--r--tests/auto/integration/qmediaframeinputsbackend/tst_qmediaframeinputsbackend.cpp411
-rw-r--r--tests/auto/integration/qmediaframeinputsbackend/tst_qmediaframeinputsbackend.h47
-rw-r--r--tests/auto/integration/qmediaplayerbackend/CMakeLists.txt21
-rw-r--r--tests/auto/integration/qmediaplayerbackend/fixture.h16
-rw-r--r--tests/auto/integration/qmediaplayerbackend/testdata/15s.mkvbin0 -> 61283 bytes
-rw-r--r--tests/auto/integration/qmediaplayerbackend/testdata/h264_avc1_yuv420p10le_tv_bt2020.movbin0 -> 20164 bytes
-rw-r--r--tests/auto/integration/qmediaplayerbackend/testdata/multitrack-subtitle-start-at-zero.mkvbin0 -> 153449 bytes
-rw-r--r--tests/auto/integration/qmediaplayerbackend/testdata/multitrack.mkvbin0 -> 153452 bytes
-rw-r--r--tests/auto/integration/qmediaplayerbackend/testdata/subtitletest.mkvbin0 -> 17398 bytes
-rw-r--r--tests/auto/integration/qmediaplayerbackend/tst_qmediaplayerbackend.cpp1428
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/CMakeLists.txt30
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/README.md35
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.avibin0 -> 11284 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.mkvbin0 -> 3019 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.mp4bin0 -> 3280 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.mpegbin0 -> 34816 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.wmvbin0 -> 29587 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/containers/unsupported/container.webpbin0 -> 2676 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/flipable.gifbin0 -> 131710 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_bgr0.mp4bin0 -> 12335 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_bgr24.mp4bin0 -> 12335 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_gray.mp4bin0 -> 6289 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_gray10le.mp4bin0 -> 6326 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_nv12.mp4bin0 -> 7734 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_nv16.mp4bin0 -> 8646 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_nv21.mp4bin0 -> 7734 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_rgb24.mp4bin0 -> 12335 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv420p.mp4bin0 -> 7747 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv420p10.mp4bin0 -> 7699 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv420p10le.mp4bin0 -> 7699 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv422p.mp4bin0 -> 8646 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv422p10.mp4bin0 -> 8675 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv422p10le.mp4bin0 -> 8675 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv444p.mp4bin0 -> 7575 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv444p10.mp4bin0 -> 7549 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuvj420p.mp4bin0 -> 8081 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuvj422p.mp4bin0 -> 9059 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuvj444p.mp4bin0 -> 7939 bytes
-rw-r--r--tests/auto/integration/qmediaplayerformatsupport/tst_qmediaplayerformatsupport.cpp124
-rw-r--r--tests/auto/integration/qquickvideooutput/tst_qquickvideooutput.cpp2
-rw-r--r--tests/auto/integration/qsoundeffect/CMakeLists.txt31
-rw-r--r--tests/auto/integration/qsoundeffect/tst_qsoundeffect.cpp18
-rw-r--r--tests/auto/integration/qvideoframebackend/CMakeLists.txt8
-rw-r--r--tests/auto/integration/qvideoframebackend/tst_qvideoframebackend.cpp36
-rw-r--r--tests/auto/integration/shared/mediabackendutils.h16
-rw-r--r--tests/auto/integration/shared/mediafileselector.h17
-rw-r--r--tests/auto/integration/shared/testvideosink.h7
-rw-r--r--tests/auto/shared/qscopedenvironmentvariable.h29
-rw-r--r--tests/auto/unit/mockbackend/qmockaudiodecoder.cpp4
-rw-r--r--tests/auto/unit/mockbackend/qmockcamera.cpp6
-rw-r--r--tests/auto/unit/mockbackend/qmockcamera.h3
-rw-r--r--tests/auto/unit/mockbackend/qmockimagecapture.cpp2
-rw-r--r--tests/auto/unit/mockbackend/qmockmediacapturesession.h8
-rw-r--r--tests/auto/unit/mockbackend/qmockmediaencoder.h20
-rw-r--r--tests/auto/unit/mockbackend/qmockmediaplayer.h33
-rw-r--r--tests/auto/unit/mockbackend/qmocksurfacecapture.h10
-rw-r--r--tests/auto/unit/mockbackend/qmockvideobuffer.h22
-rw-r--r--tests/auto/unit/multimedia/CMakeLists.txt5
-rw-r--r--tests/auto/unit/multimedia/gstreamer_backend/tst_gstreamer_backend.cpp240
-rw-r--r--tests/auto/unit/multimedia/gstreamer_backend/tst_gstreamer_backend.h20
-rw-r--r--tests/auto/unit/multimedia/qabstractvideobuffer/tst_qabstractvideobuffer.cpp43
-rw-r--r--tests/auto/unit/multimedia/qaudiodecoder/tst_qaudiodecoder.cpp22
-rw-r--r--tests/auto/unit/multimedia/qcamera/tst_qcamera.cpp56
-rw-r--r--tests/auto/unit/multimedia/qcameradevice/tst_qcameradevice.cpp57
-rw-r--r--tests/auto/unit/multimedia/qimagecapture/tst_qimagecapture.cpp10
-rw-r--r--tests/auto/unit/multimedia/qmediacapture_gstreamer/CMakeLists.txt4
-rw-r--r--tests/auto/unit/multimedia/qmediacapture_gstreamer/tst_qmediacapture_gstreamer.cpp143
-rw-r--r--tests/auto/unit/multimedia/qmediametadata/CMakeLists.txt13
-rw-r--r--tests/auto/unit/multimedia/qmediametadata/tst_qmediametadata.cpp96
-rw-r--r--tests/auto/unit/multimedia/qmediaplayer/tst_qmediaplayer.cpp114
-rw-r--r--tests/auto/unit/multimedia/qmediaplayer_gstreamer/CMakeLists.txt18
-rw-r--r--tests/auto/unit/multimedia/qmediaplayer_gstreamer/testdata/color_matrix.mp4bin0 -> 21412 bytes
-rw-r--r--tests/auto/unit/multimedia/qmediaplayer_gstreamer/tst_qmediaplayer_gstreamer.cpp135
-rw-r--r--tests/auto/unit/multimedia/qmediaplayer_gstreamer/tst_qmediaplayer_gstreamer.h46
-rw-r--r--tests/auto/unit/multimedia/qmediarecorder/tst_qmediarecorder.cpp14
-rw-r--r--tests/auto/unit/multimedia/qvideobuffers/tst_qvideobuffers.cpp148
-rw-r--r--tests/auto/unit/multimedia/qvideoframe/tst_qvideoframe.cpp391
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/CMakeLists.txt9
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpgbin16610 -> 12786 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_AdobeRgb_Full.pngbin54758 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_AdobeRgb_Video.pngbin54758 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT2020_Full.pngbin54892 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT2020_Video.pngbin54334 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT601_Full.pngbin54742 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT601_Video.pngbin54118 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT709_Full.pngbin54957 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT709_Video.pngbin54287 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_AdobeRgb_Full.pngbin55061 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_AdobeRgb_Video.pngbin55061 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT2020_Full.pngbin55125 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT2020_Video.pngbin54440 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT601_Full.pngbin54971 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT601_Video.pngbin54241 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT709_Full.pngbin55235 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT709_Video.pngbin54445 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_adobergb_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_adobergb_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_adobergb_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_adobergb_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt2020_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt2020_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt2020_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt2020_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt601_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt601_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt601_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt601_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt709_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt709_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt709_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt709_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_adobergb_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_adobergb_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_adobergb_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_adobergb_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt2020_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt2020_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt2020_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt2020_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt601_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt601_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt601_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt601_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt709_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt709_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt709_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt709_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_adobergb_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_adobergb_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_adobergb_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_adobergb_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt2020_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt2020_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt2020_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt2020_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt601_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt601_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt601_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt601_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt709_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt709_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt709_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt709_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_adobergb_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_adobergb_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_adobergb_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_adobergb_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt2020_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt2020_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt2020_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt2020_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt601_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt601_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt601_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt601_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt709_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt709_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt709_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt709_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_adobergb_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_adobergb_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_adobergb_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_adobergb_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt2020_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt2020_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt2020_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt2020_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt601_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt601_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt601_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt601_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt709_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt709_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt709_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt709_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_adobergb_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_adobergb_full_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_adobergb_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_adobergb_video_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt2020_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt2020_full_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt2020_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt2020_video_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt601_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt601_full_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt601_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt601_video_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt709_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt709_full_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt709_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt709_video_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_AdobeRgb_Full.pngbin54758 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_AdobeRgb_Video.pngbin54758 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT2020_Full.pngbin54892 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT2020_Video.pngbin54334 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT601_Full.pngbin54742 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT601_Video.pngbin54118 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT709_Full.pngbin54957 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT709_Video.pngbin54287 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_adobergb_full.pngbin0 -> 40898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_adobergb_full_cpu.pngbin0 -> 35874 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_adobergb_video.pngbin0 -> 40898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_adobergb_video_cpu.pngbin0 -> 35874 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt2020_full.pngbin0 -> 40908 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt2020_full_cpu.pngbin0 -> 35882 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt2020_video.pngbin0 -> 40978 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt2020_video_cpu.pngbin0 -> 35946 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt601_full.pngbin0 -> 40854 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt601_full_cpu.pngbin0 -> 35877 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt601_video.pngbin0 -> 40904 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt601_video_cpu.pngbin0 -> 35900 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt709_full.pngbin0 -> 40945 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt709_full_cpu.pngbin0 -> 35877 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt709_video.pngbin0 -> 40959 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt709_video_cpu.pngbin0 -> 35898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_AdobeRgb_Full.pngbin54779 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_AdobeRgb_Video.pngbin54779 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT2020_Full.pngbin54922 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT2020_Video.pngbin54361 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT601_Full.pngbin54766 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT601_Video.pngbin54140 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT709_Full.pngbin54992 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT709_Video.pngbin54323 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_adobergb_full.pngbin0 -> 40934 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_adobergb_full_cpu.pngbin0 -> 35874 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_adobergb_video.pngbin0 -> 40934 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_adobergb_video_cpu.pngbin0 -> 35874 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt2020_full.pngbin0 -> 40912 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt2020_full_cpu.pngbin0 -> 35882 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt2020_video.pngbin0 -> 41004 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt2020_video_cpu.pngbin0 -> 35946 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt601_full.pngbin0 -> 40870 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt601_full_cpu.pngbin0 -> 35877 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt601_video.pngbin0 -> 40924 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt601_video_cpu.pngbin0 -> 35900 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt709_full.pngbin0 -> 40966 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt709_full_cpu.pngbin0 -> 35877 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt709_video.pngbin0 -> 40975 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt709_video_cpu.pngbin0 -> 35898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_AdobeRgb_Full.pngbin54758 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_AdobeRgb_Video.pngbin54758 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT2020_Full.pngbin54892 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT2020_Video.pngbin54334 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT601_Full.pngbin54742 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT601_Video.pngbin54118 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT709_Full.pngbin54957 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT709_Video.pngbin54287 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_adobergb_full.pngbin0 -> 40898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_adobergb_full_cpu.pngbin0 -> 35874 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_adobergb_video.pngbin0 -> 40898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_adobergb_video_cpu.pngbin0 -> 35874 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt2020_full.pngbin0 -> 40908 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt2020_full_cpu.pngbin0 -> 35882 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt2020_video.pngbin0 -> 40978 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt2020_video_cpu.pngbin0 -> 35946 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt601_full.pngbin0 -> 40854 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt601_full_cpu.pngbin0 -> 35877 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt601_video.pngbin0 -> 40904 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt601_video_cpu.pngbin0 -> 35900 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt709_full.pngbin0 -> 40945 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt709_full_cpu.pngbin0 -> 35877 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt709_video.pngbin0 -> 40959 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt709_video_cpu.pngbin0 -> 35898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_AdobeRgb_Full.pngbin54783 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_AdobeRgb_Video.pngbin54783 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT2020_Full.pngbin54913 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT2020_Video.pngbin54358 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT601_Full.pngbin54751 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT601_Video.pngbin54140 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT709_Full.pngbin54991 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT709_Video.pngbin54314 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_adobergb_full.pngbin0 -> 40908 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_adobergb_full_cpu.pngbin0 -> 35874 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_adobergb_video.pngbin0 -> 40908 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_adobergb_video_cpu.pngbin0 -> 35874 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt2020_full.pngbin0 -> 40912 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt2020_full_cpu.pngbin0 -> 35882 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt2020_video.pngbin0 -> 40987 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt2020_video_cpu.pngbin0 -> 35946 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt601_full.pngbin0 -> 40877 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt601_full_cpu.pngbin0 -> 35877 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt601_video.pngbin0 -> 40918 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt601_video_cpu.pngbin0 -> 35900 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt709_full.pngbin0 -> 40967 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt709_full_cpu.pngbin0 -> 35877 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt709_video.pngbin0 -> 40961 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt709_video_cpu.pngbin0 -> 35898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_AdobeRgb_Full.pngbin54758 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_AdobeRgb_Video.pngbin54758 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT2020_Full.pngbin54892 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT2020_Video.pngbin54334 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT601_Full.pngbin54742 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT601_Video.pngbin54118 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT709_Full.pngbin54957 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT709_Video.pngbin54287 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_adobergb_full.pngbin0 -> 40898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_adobergb_full_cpu.pngbin0 -> 35874 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_adobergb_video.pngbin0 -> 40898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_adobergb_video_cpu.pngbin0 -> 35874 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt2020_full.pngbin0 -> 40908 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt2020_full_cpu.pngbin0 -> 35882 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt2020_video.pngbin0 -> 40978 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt2020_video_cpu.pngbin0 -> 35946 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt601_full.pngbin0 -> 40854 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt601_full_cpu.pngbin0 -> 35877 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt601_video.pngbin0 -> 40904 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt601_video_cpu.pngbin0 -> 35900 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt709_full.pngbin0 -> 40945 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt709_full_cpu.pngbin0 -> 35877 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt709_video.pngbin0 -> 40959 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt709_video_cpu.pngbin0 -> 35898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_AdobeRgb_Full.pngbin54758 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_AdobeRgb_Video.pngbin54758 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT2020_Full.pngbin54892 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT2020_Video.pngbin54334 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT601_Full.pngbin54742 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT601_Video.pngbin54118 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT709_Full.pngbin54957 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT709_Video.pngbin54287 -> 0 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_adobergb_full.pngbin0 -> 40898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_adobergb_full_cpu.pngbin0 -> 35874 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_adobergb_video.pngbin0 -> 40898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_adobergb_video_cpu.pngbin0 -> 35874 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt2020_full.pngbin0 -> 40908 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt2020_full_cpu.pngbin0 -> 35882 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt2020_video.pngbin0 -> 40978 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt2020_video_cpu.pngbin0 -> 35946 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt601_full.pngbin0 -> 40854 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt601_full_cpu.pngbin0 -> 35877 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt601_video.pngbin0 -> 40904 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt601_video_cpu.pngbin0 -> 35900 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt709_full.pngbin0 -> 40945 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt709_full_cpu.pngbin0 -> 35877 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt709_video.pngbin0 -> 40959 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt709_video_cpu.pngbin0 -> 35898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_adobergb_full.pngbin0 -> 40927 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_adobergb_full_cpu.pngbin0 -> 35927 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_adobergb_video.pngbin0 -> 40927 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_adobergb_video_cpu.pngbin0 -> 35927 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt2020_full.pngbin0 -> 40923 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt2020_full_cpu.pngbin0 -> 35936 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt2020_video.pngbin0 -> 40964 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt2020_video_cpu.pngbin0 -> 35927 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt601_full.pngbin0 -> 40897 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt601_full_cpu.pngbin0 -> 35887 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt601_video.pngbin0 -> 40914 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt601_video_cpu.pngbin0 -> 35881 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt709_full.pngbin0 -> 40936 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt709_full_cpu.pngbin0 -> 35887 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt709_video.pngbin0 -> 40991 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt709_video_cpu.pngbin0 -> 35933 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_adobergb_full.pngbin0 -> 40913 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_adobergb_full_cpu.pngbin0 -> 35941 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_adobergb_video.pngbin0 -> 40913 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_adobergb_video_cpu.pngbin0 -> 35941 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt2020_full.pngbin0 -> 40969 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt2020_full_cpu.pngbin0 -> 35925 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt2020_video.pngbin0 -> 40951 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt2020_video_cpu.pngbin0 -> 35968 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt601_full.pngbin0 -> 40860 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt601_full_cpu.pngbin0 -> 35857 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt601_video.pngbin0 -> 40946 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt601_video_cpu.pngbin0 -> 35908 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt709_full.pngbin0 -> 40942 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt709_full_cpu.pngbin0 -> 35857 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt709_video.pngbin0 -> 40966 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt709_video_cpu.pngbin0 -> 35945 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_adobergb_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_adobergb_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_adobergb_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_adobergb_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt2020_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt2020_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt2020_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt2020_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt601_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt601_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt601_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt601_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt709_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt709_full_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt709_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt709_video_cpu.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_adobergb_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_adobergb_full_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_adobergb_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_adobergb_video_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt2020_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt2020_full_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt2020_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt2020_video_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt601_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt601_full_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt601_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt601_video_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt709_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt709_full_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt709_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt709_video_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_adobergb_full.pngbin0 -> 39859 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_adobergb_full_cpu.pngbin0 -> 36094 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_adobergb_video.pngbin0 -> 39859 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_adobergb_video_cpu.pngbin0 -> 36094 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt2020_full.pngbin0 -> 39864 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt2020_full_cpu.pngbin0 -> 36087 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt2020_video.pngbin0 -> 39919 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt2020_video_cpu.pngbin0 -> 36215 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt601_full.pngbin0 -> 39789 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt601_full_cpu.pngbin0 -> 36021 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt601_video.pngbin0 -> 39890 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt601_video_cpu.pngbin0 -> 36258 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt709_full.pngbin0 -> 39879 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt709_full_cpu.pngbin0 -> 36021 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt709_video.pngbin0 -> 39951 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt709_video_cpu.pngbin0 -> 36189 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_adobergb_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_adobergb_full_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_adobergb_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_adobergb_video_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt2020_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt2020_full_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt2020_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt2020_video_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt601_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt601_full_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt601_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt601_video_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt709_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt709_full_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt709_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt709_video_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_adobergb_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_adobergb_full_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_adobergb_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_adobergb_video_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt2020_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt2020_full_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt2020_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt2020_video_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt601_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt601_full_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt601_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt601_video_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt709_full.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt709_full_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt709_video.pngbin0 -> 41243 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt709_video_cpu.pngbin0 -> 36154 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_adobergb_full.pngbin0 -> 25240 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_adobergb_full_cpu.pngbin0 -> 23434 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_adobergb_video.pngbin0 -> 25240 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_adobergb_video_cpu.pngbin0 -> 23434 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt2020_full.pngbin0 -> 25544 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt2020_full_cpu.pngbin0 -> 23596 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt2020_video.pngbin0 -> 24030 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt2020_video_cpu.pngbin0 -> 22241 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt601_full.pngbin0 -> 25557 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt601_full_cpu.pngbin0 -> 23545 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt601_video.pngbin0 -> 23884 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt601_video_cpu.pngbin0 -> 22208 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt709_full.pngbin0 -> 25557 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt709_full_cpu.pngbin0 -> 23545 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt709_video.pngbin0 -> 24021 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt709_video_cpu.pngbin0 -> 22392 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_adobergb_full.pngbin0 -> 25364 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_adobergb_full_cpu.pngbin0 -> 23481 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_adobergb_video.pngbin0 -> 25364 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_adobergb_video_cpu.pngbin0 -> 23481 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt2020_full.pngbin0 -> 25430 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt2020_full_cpu.pngbin0 -> 23425 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt2020_video.pngbin0 -> 23981 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt2020_video_cpu.pngbin0 -> 22350 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt601_full.pngbin0 -> 25497 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt601_full_cpu.pngbin0 -> 23568 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt601_video.pngbin0 -> 23904 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt601_video_cpu.pngbin0 -> 22203 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt709_full.pngbin0 -> 25497 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt709_full_cpu.pngbin0 -> 23568 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt709_video.pngbin0 -> 23979 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt709_video_cpu.pngbin0 -> 22331 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_adobergb_full.pngbin0 -> 40919 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_adobergb_video.pngbin0 -> 40919 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt2020_full.pngbin0 -> 40952 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt2020_video.pngbin0 -> 40967 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt601_full.pngbin0 -> 40881 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt601_video.pngbin0 -> 40962 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt709_full.pngbin0 -> 40954 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt709_video.pngbin0 -> 40987 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_adobergb_full.pngbin0 -> 40898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_adobergb_full_cpu.pngbin0 -> 35874 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_adobergb_video.pngbin0 -> 40898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_adobergb_video_cpu.pngbin0 -> 35874 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt2020_full.pngbin0 -> 40908 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt2020_full_cpu.pngbin0 -> 35882 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt2020_video.pngbin0 -> 40978 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt2020_video_cpu.pngbin0 -> 35946 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt601_full.pngbin0 -> 40854 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt601_full_cpu.pngbin0 -> 35877 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt601_video.pngbin0 -> 40904 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt601_video_cpu.pngbin0 -> 35900 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt709_full.pngbin0 -> 40945 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt709_full_cpu.pngbin0 -> 35877 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt709_video.pngbin0 -> 40959 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt709_video_cpu.pngbin0 -> 35898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_adobergb_full.pngbin0 -> 41163 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_adobergb_full_cpu.pngbin0 -> 36094 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_adobergb_video.pngbin0 -> 41163 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_adobergb_video_cpu.pngbin0 -> 36094 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt2020_full.pngbin0 -> 41160 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt2020_full_cpu.pngbin0 -> 36087 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt2020_video.pngbin0 -> 41199 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt2020_video_cpu.pngbin0 -> 36215 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt601_full.pngbin0 -> 41000 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt601_full_cpu.pngbin0 -> 36021 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt601_video.pngbin0 -> 41177 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt601_video_cpu.pngbin0 -> 36258 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt709_full.pngbin0 -> 41183 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt709_full_cpu.pngbin0 -> 36021 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt709_video.pngbin0 -> 41209 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt709_video_cpu.pngbin0 -> 36189 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_adobergb_full.pngbin0 -> 39859 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_adobergb_full_cpu.pngbin0 -> 36094 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_adobergb_video.pngbin0 -> 39859 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_adobergb_video_cpu.pngbin0 -> 36094 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt2020_full.pngbin0 -> 39864 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt2020_full_cpu.pngbin0 -> 36087 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt2020_video.pngbin0 -> 39919 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt2020_video_cpu.pngbin0 -> 36215 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt601_full.pngbin0 -> 39789 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt601_full_cpu.pngbin0 -> 36021 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt601_video.pngbin0 -> 39890 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt601_video_cpu.pngbin0 -> 36258 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt709_full.pngbin0 -> 39879 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt709_full_cpu.pngbin0 -> 36021 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt709_video.pngbin0 -> 39951 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt709_video_cpu.pngbin0 -> 36189 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_adobergb_full.pngbin0 -> 40898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_adobergb_full_cpu.pngbin0 -> 35874 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_adobergb_video.pngbin0 -> 40898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_adobergb_video_cpu.pngbin0 -> 35874 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt2020_full.pngbin0 -> 40908 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt2020_full_cpu.pngbin0 -> 35882 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt2020_video.pngbin0 -> 40978 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt2020_video_cpu.pngbin0 -> 35946 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt601_full.pngbin0 -> 40854 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt601_full_cpu.pngbin0 -> 35877 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt601_video.pngbin0 -> 40904 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt601_video_cpu.pngbin0 -> 35900 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt709_full.pngbin0 -> 40945 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt709_full_cpu.pngbin0 -> 35877 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt709_video.pngbin0 -> 40959 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt709_video_cpu.pngbin0 -> 35898 bytes
-rw-r--r--tests/auto/unit/multimedia/qvideoframecolormanagement/tst_qvideoframecolormanagement.cpp334
-rw-r--r--tests/auto/unit/multimedia/qvideoframeformat/tst_qvideoframeformat.cpp22
-rw-r--r--tests/auto/unit/multimedia/qwavedecoder/tst_qwavedecoder.cpp8
-rw-r--r--tests/auto/unit/multimediawidgets/qcamerawidgets/tst_qcamerawidgets.cpp2
-rw-r--r--tests/auto/unit/multimediawidgets/qgraphicsvideoitem/tst_qgraphicsvideoitem.cpp4
-rw-r--r--tests/auto/unit/multimediawidgets/qvideowidget/tst_qvideowidget.cpp4
-rw-r--r--tests/manual/CMakeLists.txt6
-rw-r--r--tests/manual/gstreamer-custom-camera-rtp/CMakeLists.txt39
-rw-r--r--tests/manual/gstreamer-custom-camera-rtp/Info.plist.in46
-rw-r--r--tests/manual/gstreamer-custom-camera-rtp/gstreamer-custom-camera-rtp.cpp57
-rw-r--r--tests/manual/gstreamer-custom-camera/CMakeLists.txt37
-rw-r--r--tests/manual/gstreamer-custom-camera/Info.plist.in46
-rw-r--r--tests/manual/gstreamer-custom-camera/gstreamer-custom-camera.cpp50
-rw-r--r--tests/manual/mediaformats/CMakeLists.txt37
-rw-r--r--tests/manual/mediaformats/main.cpp87
-rw-r--r--tests/manual/minimal-player/minimal-player.cpp13
869 files changed, 13688 insertions, 6392 deletions
diff --git a/.cmake.conf b/.cmake.conf
index dc1d7a924..6792234a2 100644
--- a/.cmake.conf
+++ b/.cmake.conf
@@ -1,4 +1,4 @@
-set(QT_REPO_MODULE_VERSION "6.8.0")
+set(QT_REPO_MODULE_VERSION "6.9.0")
set(QT_REPO_MODULE_PRERELEASE_VERSION_SEGMENT "alpha1")
set(QT_EXTRA_INTERNAL_TARGET_DEFINES "QT_NO_AS_CONST=1")
list(APPEND QT_EXTRA_INTERNAL_TARGET_DEFINES "QT_NO_FOREACH=1")
diff --git a/cmake/FindFFmpeg.cmake b/cmake/FindFFmpeg.cmake
index 86be24dd7..ecf9b07cb 100644
--- a/cmake/FindFFmpeg.cmake
+++ b/cmake/FindFFmpeg.cmake
@@ -201,6 +201,7 @@ endmacro()
# Clear the previously cached variables, because they are recomputed every time
# the Find script is included.
unset(FFMPEG_SHARED_LIBRARIES CACHE)
+unset(FFMPEG_STUBS CACHE)
# Check for components.
foreach (_component ${FFmpeg_FIND_COMPONENTS})
@@ -225,33 +226,34 @@ foreach (_component ${FFmpeg_FIND_COMPONENTS})
endif()
endforeach()
-if (NOT FFMPEG_SHARED_COMPONENTS AND (ANDROID OR LINUX))
- set(ENABLE_DYNAMIC_RESOLVE_OPENSSL_SYMBOLS TRUE CACHE INTERNAL "")
-endif()
-set(ENABLE_DYNAMIC_RESOLVE_VAAPI_SYMBOLS ${LINUX} CACHE INTERNAL "")
+function(qt_internal_multimedia_try_add_dynamic_resolve_dependency _component dep)
+ set(dynamic_resolve_added FALSE PARENT_SCOPE)
-function(__try_add_dynamic_resolve_dependency dep added)
- set(added TRUE PARENT_SCOPE)
+ if (NOT ANDROID AND NOT LINUX)
+ return()
+ endif()
- if(ENABLE_DYNAMIC_RESOLVE_OPENSSL_SYMBOLS AND
- (${dep} STREQUAL "ssl" OR ${dep} STREQUAL "crypto"))
- set(DYNAMIC_RESOLVE_OPENSSL_SYMBOLS TRUE CACHE INTERNAL "")
- elseif(ENABLE_DYNAMIC_RESOLVE_VAAPI_SYMBOLS AND ${dep} STREQUAL "va")
- set(DYNAMIC_RESOLVE_VAAPI_SYMBOLS TRUE CACHE INTERNAL "")
- elseif(ENABLE_DYNAMIC_RESOLVE_VAAPI_SYMBOLS AND ${dep} STREQUAL "va-drm")
- set(DYNAMIC_RESOLVE_VA_DRM_SYMBOLS TRUE CACHE INTERNAL "")
- elseif(ENABLE_DYNAMIC_RESOLVE_VAAPI_SYMBOLS AND ${dep} STREQUAL "va-x11")
- set(DYNAMIC_RESOLVE_VA_X11_SYMBOLS TRUE CACHE INTERNAL "")
- else()
- set(added FALSE PARENT_SCOPE)
+ set(supported_stubs "ssl|crypto|va|va-drm|va-x11")
+ if(${_component}_SHARED_LIBRARIES)
+ set(stub_prefix "Qt${PROJECT_VERSION_MAJOR}FFmpegStub-")
+ if (${dep} MATCHES "^${stub_prefix}(${supported_stubs})$")
+ string(REPLACE "${stub_prefix}" "" dep "${dep}")
+ set(FFMPEG_STUBS ${FFMPEG_STUBS} ${dep} CACHE INTERNAL "")
+
+ set(dynamic_resolve_added TRUE PARENT_SCOPE)
+ endif()
+ elseif (${dep} MATCHES "^(${supported_stubs})$")
+ set(FFMPEG_STUBS ${FFMPEG_STUBS} ${dep} CACHE INTERNAL "")
+ set(dynamic_resolve_added TRUE PARENT_SCOPE)
endif()
endfunction()
# Function parses package config file to find the static library dependencies
# and adds them to the target library.
-function(__ffmpeg_internal_set_dependencies lib)
- set(PC_FILE ${FFMPEG_DIR}/lib/pkgconfig/lib${lib}.pc)
+function(__ffmpeg_internal_set_dependencies _component)
+ string(TOLOWER ${_component} lib)
+ set(PC_FILE ${${_component}_LIBRARY_DIR}/pkgconfig/lib${lib}.pc)
if(EXISTS ${PC_FILE})
file(READ ${PC_FILE} pcfile)
@@ -270,18 +272,22 @@ function(__ffmpeg_internal_set_dependencies lib)
foreach(dependency ${deps_no_suffix})
string(REGEX REPLACE ${prefix_l} "" dependency ${dependency})
if(NOT ${lib} STREQUAL ${dependency})
- __try_add_dynamic_resolve_dependency(${dependency} added)
- if(NOT added)
+ qt_internal_multimedia_try_add_dynamic_resolve_dependency(${_component} ${dependency})
+ if(NOT dynamic_resolve_added AND NOT ${_component}_SHARED_LIBRARIES)
target_link_libraries(FFmpeg::${lib} INTERFACE ${dependency})
endif()
endif()
endforeach()
- list(APPEND deps_lib_suffix ${libs_dependency_lib} ${libs_private_dependency_lib})
- foreach(dependency ${deps_lib_suffix})
- string(REGEX REPLACE ${suffix_lib} "" dependency ${dependency})
- target_link_libraries(FFmpeg::${lib} INTERFACE ${dependency})
- endforeach()
+ if(NOT ${_component}_SHARED_LIBRARIES)
+ list(APPEND deps_lib_suffix ${libs_dependency_lib} ${libs_private_dependency_lib})
+ foreach(dependency ${deps_lib_suffix})
+ string(REGEX REPLACE ${suffix_lib} "" dependency ${dependency})
+ target_link_libraries(FFmpeg::${lib} INTERFACE ${dependency})
+ endforeach()
+ endif()
+ else()
+ message(WARNING "FFmpeg pc file ${PC_FILE} is not found")
endif()
endfunction()
@@ -302,9 +308,8 @@ endfunction()
INTERFACE_LINK_LIBRARIES "${${_component}_LIBRARY_NAME}"
INTERFACE_LINK_DIRECTORIES "${${_component}_LIBRARY_DIR}"
)
- if(NOT ${_component}_SHARED_LIBRARIES)
- __ffmpeg_internal_set_dependencies(${_lowerComponent})
- endif()
+
+ __ffmpeg_internal_set_dependencies(${_component})
target_link_libraries(FFmpeg::${_lowerComponent} INTERFACE "${${_component}_LIBRARY_NAME}")
if (UNIX AND NOT APPLE)
target_link_options(FFmpeg::${_lowerComponent} INTERFACE "-Wl,--exclude-libs=lib${_lowerComponent}")
@@ -317,13 +322,17 @@ endfunction()
list(REMOVE_DUPLICATES FFMPEG_INCLUDE_DIRS)
list(REMOVE_DUPLICATES FFMPEG_LIBRARY_DIRS)
list(REMOVE_DUPLICATES FFMPEG_SHARED_LIBRARIES)
+ list(REMOVE_DUPLICATES FFMPEG_STUBS)
message(STATUS "FFmpeg shared libs: ${FFMPEG_SHARED_LIBRARIES}")
+ message(STATUS "FFmpeg stubs: ${FFMPEG_STUBS}")
# cache the vars.
set(FFMPEG_SHARED_LIBRARIES ${FFMPEG_SHARED_LIBRARIES} CACHE STRING "The FFmpeg dynamic libraries." FORCE)
+ set(FFMPEG_STUBS ${FFMPEG_STUBS} CACHE STRING "The FFmpeg stubs." FORCE)
mark_as_advanced(FFMPEG_SHARED_LIBRARIES)
+ mark_as_advanced(FFMPEG_STUBS)
# endif ()
list(LENGTH FFMPEG_LIBRARY_DIRS DIRS_COUNT)
diff --git a/cmake/FindVAAPI.cmake b/cmake/FindVAAPI.cmake
index a3ea6cd58..b1170dc8e 100644
--- a/cmake/FindVAAPI.cmake
+++ b/cmake/FindVAAPI.cmake
@@ -4,13 +4,40 @@
find_package(PkgConfig QUIET)
+function(qt_internal_multimedia_set_va_outputs component include_dir lib_path)
+ if ("${component}" STREQUAL "VA")
+ set(VAAPI_INCLUDE_DIR "${include_dir}" CACHE INTERNAL "")
+ get_filename_component(lib_realpath "${lib_path}" REALPATH)
+
+ string(REGEX MATCH "[0-9]+(\\.[0-9]+)*$" VAAPI_SUFFIX "${lib_realpath}")
+ set(VAAPI_SUFFIX "${VAAPI_SUFFIX}" CACHE INTERNAL "")
+
+ mark_as_advanced(VAAPI_SUFFIX VAAPI_INCLUDE_DIR)
+ endif()
+endfunction()
+
function(find_component component prefix header library)
if(NOT TARGET VAAPI::${component})
string(TOUPPER ${component} upper)
- pkg_check_modules(PC_VAAPI_${upper} ${prefix} IMPORTED_TARGET)
+ pkg_search_module(PC_VAAPI_${upper} ${prefix} IMPORTED_TARGET)
if(TARGET PkgConfig::PC_VAAPI_${upper})
add_library(VAAPI::${component} INTERFACE IMPORTED)
target_link_libraries(VAAPI::${component} INTERFACE PkgConfig::PC_VAAPI_${upper})
+
+ if (NOT PC_VAAPI_${upper}_LINK_LIBRARIES)
+ get_target_property(PC_VAAPI_${upper}_LINK_LIBRARIES PkgConfig::PC_VAAPI_${upper} INTERFACE_LINK_LIBRARIES)
+ message(STATUS "PC_VAAPI_${upper}_LINK_LIBRARIES is not defined by PkgConfig; "
+ "Get the value from target properties: ${PC_VAAPI_${upper}_LINK_LIBRARIES}")
+ endif()
+
+ foreach (lib_path ${PC_VAAPI_${upper}_LINK_LIBRARIES})
+ get_filename_component(lib_name "${lib_path}" NAME_WLE)
+ if (${lib_name} STREQUAL ${prefix})
+ qt_internal_multimedia_set_va_outputs(${component}
+ "${PC_VAAPI_${upper}_INCLUDEDIR}" "${lib_path}")
+ break()
+ endif()
+ endforeach()
else()
find_path(VAAPI_${component}_INCLUDE_DIR
NAMES ${header}
@@ -25,6 +52,9 @@ function(find_component component prefix header library)
target_link_libraries(VAAPI::${component} INTERFACE ${VAAPI_${component}_LIBRARY})
endif()
mark_as_advanced(VAAPI_${component}_INCLUDE_DIR VAAPI_${component}_LIBRARY)
+
+ qt_internal_multimedia_set_va_outputs(${component}
+ "${VAAPI_${component}_INCLUDE_DIR}" "${VAAPI_${component}_LIBRARY}")
endif()
endif()
@@ -48,6 +78,8 @@ find_package_handle_standard_args(VAAPI
REQUIRED_VARS
VAAPI_VA_FOUND
VAAPI_DRM_FOUND
+ VAAPI_INCLUDE_DIR
+ VAAPI_SUFFIX
HANDLE_COMPONENTS
)
diff --git a/coin/instructions/run_ffmpeg_backend_tests.yaml b/coin/instructions/run_ffmpeg_backend_tests.yaml
index 92096bd33..7d2bf2ba2 100644
--- a/coin/instructions/run_ffmpeg_backend_tests.yaml
+++ b/coin/instructions/run_ffmpeg_backend_tests.yaml
@@ -18,21 +18,11 @@ enable_if:
env_var: TARGET_OSVERSION_COIN
contains_value: android_any
- condition: runtime
+ env_var: TARGET_OSVERSION_COIN
+ contains_value: windows
+ - condition: runtime
env_var: NON_QTBASE_CMAKE_ARGS
contains_value: "-DFFMPEG_DIR=/"
- - condition: and
- conditions:
- - condition: runtime
- env_var: TARGET_OS_COIN
- equals_value: windows
- - condition: or
- conditions:
- - condition: property
- property: target.arch
- not_equals_value: arm64
- - condition: runtime
- env_var: NON_QTBASE_CMAKE_ARGS
- contains_value: "-DFFMPEG_DIR=C:"
instructions:
- type: EnvironmentVariable
variableName: QT_MEDIA_BACKEND
diff --git a/coin/module_config.yaml b/coin/module_config.yaml
index baafc4ca4..101c18b3c 100644
--- a/coin/module_config.yaml
+++ b/coin/module_config.yaml
@@ -9,5 +9,20 @@ instructions:
- !include "{{qt/qtbase}}/coin_module_build_template_v2.yaml"
Test:
+ - type: Group
+ enable_if:
+ condition: or
+ conditions:
+ - condition: property
+ property: id
+ contains_value: macos-12-arm
+ - condition: property
+ property: id
+ contains_value: macos-13-arm
+ instructions:
+ - type: EnvironmentVariable
+ variableName: COIN_CTEST_IGNORE_EXIT_CODE
+ variableValue: "1"
+
- !include "{{qt/qtmultimedia}}/run_ffmpeg_backend_tests.yaml"
- !include "{{qt/qtbase}}/coin_module_test_docs.yaml"
diff --git a/dependencies.yaml b/dependencies.yaml
index ec2753928..86d7cfee4 100644
--- a/dependencies.yaml
+++ b/dependencies.yaml
@@ -1,13 +1,13 @@
dependencies:
../qtbase:
- ref: f0633e823796775d2c019363ca4f1cb008851402
+ ref: 08c6de0c5d646a1c99834408ccd257f60f7624db
required: true
../qtdeclarative:
- ref: 1635ca51f018bbb8d1ea5069a7f2ed8503be8cb9
+ ref: 53b3a0d41545106fdd82cedc42393ff38d70f1fd
required: false
../qtquick3d:
- ref: f2e9ce22f6c265f3b7aea34d2c1e6aad9c010779
+ ref: 287389639f69e02eb264212d767d229b7e90519c
required: false
../qtshadertools:
- ref: e512987ff675fee0b7aadf80a548e167a1f7d5d2
+ ref: 0416d2d40954211dd4519a54899322d66f231078
required: true
diff --git a/examples/multimedia/audiodevices/audiodevices.cpp b/examples/multimedia/audiodevices/audiodevices.cpp
index 30f52e0c9..d211189fd 100644
--- a/examples/multimedia/audiodevices/audiodevices.cpp
+++ b/examples/multimedia/audiodevices/audiodevices.cpp
@@ -70,7 +70,6 @@ void AudioTest::init()
}
#endif
m_devices->videoInputs();
- qDebug() << "<<<<<<<<<<<<<<<<<<";
QMediaFormat().supportedFileFormats(QMediaFormat::Encode);
connect(testButton, &QPushButton::clicked, this, &AudioTest::test);
connect(modeBox, QOverload<int>::of(&QComboBox::activated), this, &AudioTest::modeChanged);
@@ -172,19 +171,19 @@ void AudioTest::populateTable()
allFormatsTable->setRowCount(row + 1);
QTableWidgetItem *sampleTypeItem = new QTableWidgetItem(toString(sampleFormat));
- allFormatsTable->setItem(row, 2, sampleTypeItem);
+ allFormatsTable->setItem(row, 0, sampleTypeItem);
QTableWidgetItem *sampleRateItem =
new QTableWidgetItem(QStringLiteral("%1 - %2")
.arg(m_deviceInfo.minimumSampleRate())
.arg(m_deviceInfo.maximumSampleRate()));
- allFormatsTable->setItem(row, 0, sampleRateItem);
+ allFormatsTable->setItem(row, 1, sampleRateItem);
QTableWidgetItem *channelsItem =
new QTableWidgetItem(QStringLiteral("%1 - %2")
.arg(m_deviceInfo.minimumChannelCount())
.arg(m_deviceInfo.maximumChannelCount()));
- allFormatsTable->setItem(row, 1, channelsItem);
+ allFormatsTable->setItem(row, 2, channelsItem);
++row;
}
diff --git a/examples/multimedia/camera/android/AndroidManifest.xml b/examples/multimedia/camera/android/AndroidManifest.xml
index 4af2fe92f..9f77579b4 100644
--- a/examples/multimedia/camera/android/AndroidManifest.xml
+++ b/examples/multimedia/camera/android/AndroidManifest.xml
@@ -4,12 +4,7 @@
android:installLocation="auto"
android:versionCode="-- %%INSERT_VERSION_CODE%% --"
android:versionName="-- %%INSERT_VERSION_NAME%% --">
- <!-- The comment below will be replaced with dependencies permissions upon deployment.
- Remove the comment if you do not require these default permissions. -->
<!-- %%INSERT_PERMISSIONS -->
-
- <!-- The comment below will be replaced with dependencies permissions upon deployment.
- Remove the comment if you do not require these default features. -->
<!-- %%INSERT_FEATURES -->
<supports-screens
@@ -28,7 +23,6 @@
<activity
android:name="org.qtproject.qt.android.bindings.QtActivity"
android:configChanges="orientation|uiMode|screenLayout|screenSize|smallestScreenSize|layoutDirection|locale|fontScale|keyboard|keyboardHidden|navigation|mcc|mnc|density"
- android:label="-- %%INSERT_APP_NAME%% --"
android:launchMode="singleTop"
android:screenOrientation="portrait"
android:exported="true">
@@ -36,36 +30,10 @@
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
- <!-- Application arguments -->
-
- <meta-data
- android:name="android.app.arguments"
- android:value="-- %%INSERT_APP_ARGUMENTS%% --" />
- <!-- Application arguments -->
<meta-data
android:name="android.app.lib_name"
android:value="-- %%INSERT_APP_LIB_NAME%% --" />
- <!-- Background running -->
- <!-- Warning: changing this value to true may cause unexpected crashes if the
- application still try to draw after
- "applicationStateChanged(Qt::ApplicationSuspended)" signal is sent! -->
- <meta-data
- android:name="android.app.background_running"
- android:value="false" />
- <!-- Background running -->
-
- <!-- extract android style -->
- <!-- available android:values :
- * default - In most cases this will be the same as "full", but it can also be
- * something else if needed, e.g., for compatibility reasons
- * full - useful QWidget & Quick Controls 1 apps
- * minimal - useful for Quick Controls 2 apps, it is much faster than "full"
- * none - useful for apps that don't use any of the above Qt modules -->
- <meta-data
- android:name="android.app.extract_android_style"
- android:value="minimal" />
- <!-- extract android style -->
</activity>
</application>
</manifest>
diff --git a/examples/multimedia/camera/camera.cpp b/examples/multimedia/camera/camera.cpp
index bc24b1b89..c5aa19e2d 100644
--- a/examples/multimedia/camera/camera.cpp
+++ b/examples/multimedia/camera/camera.cpp
@@ -391,17 +391,26 @@ void Camera::saveMetaData()
for (int i = 0; i < QMediaMetaData::NumMetaData; i++) {
QString val = m_metaDataDialog->m_metaDataFields[i]->text();
if (!val.isEmpty()) {
- auto key = static_cast<QMediaMetaData::Key>(i);
- if (i == QMediaMetaData::CoverArtImage) {
+ const auto key = static_cast<QMediaMetaData::Key>(i);
+ switch (key) {
+ case QMediaMetaData::CoverArtImage: {
QImage coverArt(val);
data.insert(key, coverArt);
- } else if (i == QMediaMetaData::ThumbnailImage) {
+ break;
+ }
+ case QMediaMetaData::ThumbnailImage: {
QImage thumbnail(val);
data.insert(key, thumbnail);
- } else if (i == QMediaMetaData::Date) {
+ break;
+ }
+ case QMediaMetaData::Date: {
QDateTime date = QDateTime::fromString(val);
data.insert(key, date);
- } else {
+ break;
+ }
+ case QMediaMetaData::HasHdrContent:
+ break;
+ default:
data.insert(key, val);
}
}
diff --git a/examples/multimedia/camera/metadatadialog.cpp b/examples/multimedia/camera/metadatadialog.cpp
index b2147d868..8213baa6f 100644
--- a/examples/multimedia/camera/metadatadialog.cpp
+++ b/examples/multimedia/camera/metadatadialog.cpp
@@ -65,6 +65,8 @@ MetaDataDialog::MetaDataDialog(QWidget *parent) : QDialog(parent)
metaDataLayout->addRow(label, layout);
}
break;
+ case QMediaMetaData::HasHdrContent:
+ break; // Read-only
default:
metaDataLayout->addRow(label, lineEdit);
break;
diff --git a/examples/multimedia/player/player.cpp b/examples/multimedia/player/player.cpp
index c674554e8..3f2b9c98f 100644
--- a/examples/multimedia/player/player.cpp
+++ b/examples/multimedia/player/player.cpp
@@ -108,7 +108,10 @@ Player::Player(QWidget *parent) : QWidget(parent)
connect(controls, &PlayerControls::changeRate, m_player, &QMediaPlayer::setPlaybackRate);
connect(controls, &PlayerControls::stop, m_videoWidget, QOverload<>::of(&QVideoWidget::update));
- connect(m_player, &QMediaPlayer::playbackStateChanged, controls, &PlayerControls::setState);
+ connect(m_player, &QMediaPlayer::playbackStateChanged, controls,
+ [controls](QMediaPlayer::PlaybackState arg) {
+ controls->setState(arg);
+ });
connect(m_audioOutput, &QAudioOutput::volumeChanged, controls, &PlayerControls::setVolume);
connect(m_audioOutput, &QAudioOutput::mutedChanged, controls, &PlayerControls::setMuted);
@@ -121,12 +124,16 @@ Player::Player(QWidget *parent) : QWidget(parent)
#if !defined(Q_OS_ANDROID) && !defined(Q_OS_IOS)
m_audioOutputCombo = new QComboBox(this);
- m_audioOutputCombo->addItem(QStringLiteral("Default"), QVariant::fromValue(QAudioDevice()));
- for (auto &deviceInfo : QMediaDevices::audioOutputs())
- m_audioOutputCombo->addItem(deviceInfo.description(), QVariant::fromValue(deviceInfo));
+ controlLayout->addWidget(m_audioOutputCombo);
+
+ updateAudioDevices();
+
connect(m_audioOutputCombo, QOverload<int>::of(&QComboBox::activated), this,
&Player::audioOutputChanged);
- controlLayout->addWidget(m_audioOutputCombo);
+
+ QObject::connect(&m_mediaDevices, &QMediaDevices::audioOutputsChanged, this, [this] {
+ updateAudioDevices();
+ });
#endif
layout->addLayout(controlLayout);
@@ -280,18 +287,16 @@ void Player::metaDataChanged()
m_metaDataLabels[i]->setDisabled(true);
}
- for (auto &key : metaData.keys()) {
+ for (auto &&[key, value] : metaData.asKeyValueRange()) {
int i = int(key);
if (key == QMediaMetaData::CoverArtImage) {
- QVariant v = metaData.value(key);
if (QLabel *cover = qobject_cast<QLabel *>(m_metaDataFields[key])) {
- QImage coverImage = v.value<QImage>();
+ QImage coverImage = value.value<QImage>();
cover->setPixmap(QPixmap::fromImage(coverImage));
}
} else if (key == QMediaMetaData::ThumbnailImage) {
- QVariant v = metaData.value(key);
if (QLabel *thumbnail = qobject_cast<QLabel *>(m_metaDataFields[key])) {
- QImage thumbnailImage = v.value<QImage>();
+ QImage thumbnailImage = value.value<QImage>();
thumbnail->setPixmap(QPixmap::fromImage(thumbnailImage));
}
} else if (QLineEdit *field = qobject_cast<QLineEdit *>(m_metaDataFields[key])) {
@@ -301,6 +306,20 @@ void Player::metaDataChanged()
m_metaDataFields[i]->setDisabled(false);
m_metaDataLabels[i]->setDisabled(false);
}
+
+ const QList<QMediaMetaData> tracks = m_player->videoTracks();
+ const int currentVideoTrack = m_player->activeVideoTrack();
+ if (currentVideoTrack >= 0 && currentVideoTrack < tracks.size()) {
+ const QMediaMetaData track = tracks.value(currentVideoTrack);
+ for (const QMediaMetaData::Key &key : track.keys()) {
+ if (QLineEdit *field = qobject_cast<QLineEdit *>(m_metaDataFields[key])) {
+ QString stringValue = track.stringValue(key);
+ field->setText(stringValue);
+ }
+ m_metaDataFields[key]->setDisabled(true);
+ m_metaDataLabels[key]->setDisabled(true);
+ }
+ }
#endif
}
@@ -518,6 +537,15 @@ void Player::updateDurationInfo(qint64 currentInfo)
m_labelDuration->setText(tStr);
}
+void Player::updateAudioDevices()
+{
+ m_audioOutputCombo->clear();
+
+ m_audioOutputCombo->addItem(QStringLiteral("Default"), QVariant::fromValue(QAudioDevice()));
+ for (auto &deviceInfo : QMediaDevices::audioOutputs())
+ m_audioOutputCombo->addItem(deviceInfo.description(), QVariant::fromValue(deviceInfo));
+}
+
void Player::audioOutputChanged(int index)
{
auto device = m_audioOutputCombo->itemData(index).value<QAudioDevice>();
diff --git a/examples/multimedia/player/player.h b/examples/multimedia/player/player.h
index 66b1b8fab..f20995fcf 100644
--- a/examples/multimedia/player/player.h
+++ b/examples/multimedia/player/player.h
@@ -8,6 +8,7 @@
#include <QMediaMetaData>
#include <QMediaPlayer>
+#include <QMediaDevices>
#include <QWidget>
QT_BEGIN_NAMESPACE
@@ -69,6 +70,9 @@ private:
void setStatusInfo(const QString &info);
void handleCursor(QMediaPlayer::MediaStatus status);
void updateDurationInfo(qint64 currentInfo);
+
+ void updateAudioDevices();
+
QString trackName(const QMediaMetaData &metaData, int index);
QMediaPlayer *m_player = nullptr;
@@ -92,6 +96,8 @@ private:
QString m_statusInfo;
qint64 m_duration;
+ QMediaDevices m_mediaDevices;
+
QWidget *m_metaDataFields[QMediaMetaData::NumMetaData] = {};
QLabel *m_metaDataLabels[QMediaMetaData::NumMetaData] = {};
};
diff --git a/examples/multimedia/player/playercontrols.cpp b/examples/multimedia/player/playercontrols.cpp
index 4933bf8cf..9a6c81dd2 100644
--- a/examples/multimedia/player/playercontrols.cpp
+++ b/examples/multimedia/player/playercontrols.cpp
@@ -17,10 +17,12 @@ PlayerControls::PlayerControls(QWidget *parent) : QWidget(parent)
connect(m_playButton, &QAbstractButton::clicked, this, &PlayerControls::playClicked);
+ m_pauseButton = new QToolButton(this);
+ m_pauseButton->setIcon(style()->standardIcon(QStyle::SP_MediaPause));
+ connect(m_pauseButton, &QAbstractButton::clicked, this, &PlayerControls::pauseClicked);
+
m_stopButton = new QToolButton(this);
m_stopButton->setIcon(style()->standardIcon(QStyle::SP_MediaStop));
- m_stopButton->setEnabled(false);
-
connect(m_stopButton, &QAbstractButton::clicked, this, &PlayerControls::stop);
m_nextButton = new QToolButton(this);
@@ -56,10 +58,13 @@ PlayerControls::PlayerControls(QWidget *parent) : QWidget(parent)
connect(m_rateBox, QOverload<int>::of(&QComboBox::activated), this,
&PlayerControls::updateRate);
+ setState(QMediaPlayer::StoppedState, /*force=*/true);
+
QBoxLayout *layout = new QHBoxLayout;
layout->setContentsMargins(0, 0, 0, 0);
layout->addWidget(m_stopButton);
layout->addWidget(m_previousButton);
+ layout->addWidget(m_pauseButton);
layout->addWidget(m_playButton);
layout->addWidget(m_nextButton);
layout->addWidget(m_muteButton);
@@ -73,23 +78,30 @@ QMediaPlayer::PlaybackState PlayerControls::state() const
return m_playerState;
}
-void PlayerControls::setState(QMediaPlayer::PlaybackState state)
+void PlayerControls::setState(QMediaPlayer::PlaybackState state, bool force)
{
- if (state != m_playerState) {
+ if (state != m_playerState || force) {
m_playerState = state;
+ QColor baseColor = palette().color(QPalette::Base);
+ QString inactiveStyleSheet = QStringLiteral("background-color: %1").arg(baseColor.name());
+ QString defaultStyleSheet = QStringLiteral("");
+
switch (state) {
case QMediaPlayer::StoppedState:
- m_stopButton->setEnabled(false);
- m_playButton->setIcon(style()->standardIcon(QStyle::SP_MediaPlay));
+ m_stopButton->setStyleSheet(inactiveStyleSheet);
+ m_playButton->setStyleSheet(defaultStyleSheet);
+ m_pauseButton->setStyleSheet(defaultStyleSheet);
break;
case QMediaPlayer::PlayingState:
- m_stopButton->setEnabled(true);
- m_playButton->setIcon(style()->standardIcon(QStyle::SP_MediaPause));
+ m_stopButton->setStyleSheet(defaultStyleSheet);
+ m_playButton->setStyleSheet(inactiveStyleSheet);
+ m_pauseButton->setStyleSheet(defaultStyleSheet);
break;
case QMediaPlayer::PausedState:
- m_stopButton->setEnabled(true);
- m_playButton->setIcon(style()->standardIcon(QStyle::SP_MediaPlay));
+ m_stopButton->setStyleSheet(defaultStyleSheet);
+ m_playButton->setStyleSheet(defaultStyleSheet);
+ m_pauseButton->setStyleSheet(inactiveStyleSheet);
break;
}
}
@@ -129,15 +141,12 @@ void PlayerControls::setMuted(bool muted)
void PlayerControls::playClicked()
{
- switch (m_playerState) {
- case QMediaPlayer::StoppedState:
- case QMediaPlayer::PausedState:
- emit play();
- break;
- case QMediaPlayer::PlayingState:
- emit pause();
- break;
- }
+ emit play();
+}
+
+void PlayerControls::pauseClicked()
+{
+ emit pause();
}
void PlayerControls::muteClicked()
diff --git a/examples/multimedia/player/playercontrols.h b/examples/multimedia/player/playercontrols.h
index 72dddd68f..322b4ca24 100644
--- a/examples/multimedia/player/playercontrols.h
+++ b/examples/multimedia/player/playercontrols.h
@@ -26,7 +26,7 @@ public:
qreal playbackRate() const;
public slots:
- void setState(QMediaPlayer::PlaybackState state);
+ void setState(QMediaPlayer::PlaybackState state, bool force = false);
void setVolume(float volume);
void setMuted(bool muted);
void setPlaybackRate(float rate);
@@ -43,6 +43,7 @@ signals:
private slots:
void playClicked();
+ void pauseClicked();
void muteClicked();
void updateRate();
void onVolumeSliderValueChanged();
@@ -51,6 +52,7 @@ private:
QMediaPlayer::PlaybackState m_playerState = QMediaPlayer::StoppedState;
bool m_playerMuted = false;
QAbstractButton *m_playButton = nullptr;
+ QAbstractButton *m_pauseButton = nullptr;
QAbstractButton *m_stopButton = nullptr;
QAbstractButton *m_nextButton = nullptr;
QAbstractButton *m_previousButton = nullptr;
diff --git a/examples/multimedia/video/mediaplayer/Main.qml b/examples/multimedia/video/mediaplayer/Main.qml
index 9ee9cf6c3..bc924dfb8 100644
--- a/examples/multimedia/video/mediaplayer/Main.qml
+++ b/examples/multimedia/video/mediaplayer/Main.qml
@@ -41,7 +41,7 @@ ApplicationWindow {
Timer {
id: timer
- interval: 3000 // milliseconds
+ interval: 1500 // milliseconds
onTriggered: activityListener.inactiveMouse = true
}
diff --git a/examples/multimedia/video/mediaplayer/controls/PlaybackControl.qml b/examples/multimedia/video/mediaplayer/controls/PlaybackControl.qml
index 056cf50d0..c61656103 100644
--- a/examples/multimedia/video/mediaplayer/controls/PlaybackControl.qml
+++ b/examples/multimedia/video/mediaplayer/controls/PlaybackControl.qml
@@ -27,7 +27,6 @@ Item {
|| settingsPopup.visible
|| audioControl.busy
|| playbackSeekControl.busy
- || !playbackController.mediaPlayer.playing
implicitHeight: landscapePlaybackControls ? 168 : 208
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java
index 6ddc64dc2..9bfad0aa4 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java
@@ -27,19 +27,19 @@ import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
-public class QtAndroidMediaPlayer
+class QtAndroidMediaPlayer
{
// Native callback functions for MediaPlayer
- native public void onErrorNative(int what, int extra, long id);
- native public void onBufferingUpdateNative(int percent, long id);
- native public void onProgressUpdateNative(int progress, long id);
- native public void onDurationChangedNative(int duration, long id);
- native public void onInfoNative(int what, int extra, long id);
- native public void onVideoSizeChangedNative(int width, int height, long id);
- native public void onStateChangedNative(int state, long id);
+ native void onErrorNative(int what, int extra, long id);
+ native void onBufferingUpdateNative(int percent, long id);
+ native void onProgressUpdateNative(int progress, long id);
+ native void onDurationChangedNative(int duration, long id);
+ native void onInfoNative(int what, int extra, long id);
+ native void onVideoSizeChangedNative(int width, int height, long id);
+ native void onStateChangedNative(int state, long id);
- native public void onTrackInfoChangedNative(long id);
- native public void onTimedTextChangedNative(String text, int time, long id);
+ native void onTrackInfoChangedNative(long id);
+ native void onTimedTextChangedNative(String text, int time, long id);
private MediaPlayer mMediaPlayer = null;
private AudioAttributes mAudioAttributes = null;
@@ -67,7 +67,7 @@ public class QtAndroidMediaPlayer
final static int Error = 0x200;
}
- public class TrackInfo
+ class TrackInfo
{
private int type;
private String mime, language;
@@ -206,19 +206,20 @@ public class QtAndroidMediaPlayer
private class MediaPlayerTimedTextListener implements MediaPlayer.OnTimedTextListener
{
- @Override public void onTimedText(MediaPlayer mp, TimedText text)
+ @Override
+ public void onTimedText(MediaPlayer mp, TimedText text)
{
onTimedTextChangedNative(text.getText(), mp.getCurrentPosition(), mID);
}
}
- public QtAndroidMediaPlayer(final Context context, final long id)
+ QtAndroidMediaPlayer(final Context context, final long id)
{
mID = id;
mContext = context;
}
- public MediaPlayer getMediaPlayerHandle()
+ MediaPlayer getMediaPlayerHandle()
{
return mMediaPlayer;
}
@@ -256,7 +257,7 @@ public class QtAndroidMediaPlayer
mProgressScheduler = Executors.newScheduledThreadPool(1);
}
- public void startProgressWatcher()
+ void startProgressWatcher()
{
// if it was shutdown, request new thread
if (mProgressScheduler.isTerminated() || mProgressScheduler == null)
@@ -271,13 +272,13 @@ public class QtAndroidMediaPlayer
}, 10, 100, TimeUnit.MILLISECONDS);
}
- public void cancelProgressWatcher()
+ void cancelProgressWatcher()
{
if (mProgressScheduler != null)
mProgressScheduler.shutdown();
}
- public void start()
+ void start()
{
if ((mState & (State.Prepared
| State.Started
@@ -295,7 +296,7 @@ public class QtAndroidMediaPlayer
}
}
- public void pause()
+ void pause()
{
if ((mState & (State.Started | State.Paused | State.PlaybackCompleted)) == 0)
return;
@@ -309,7 +310,7 @@ public class QtAndroidMediaPlayer
}
- public void stop()
+ void stop()
{
if ((mState & (State.Prepared
| State.Started
@@ -329,7 +330,7 @@ public class QtAndroidMediaPlayer
}
- public void seekTo(final int msec)
+ void seekTo(final int msec)
{
if ((mState & (State.Prepared
| State.Started
@@ -351,7 +352,7 @@ public class QtAndroidMediaPlayer
}
}
- public boolean isPlaying()
+ boolean isPlaying()
{
boolean playing = false;
if ((mState & (State.Idle
@@ -373,7 +374,7 @@ public class QtAndroidMediaPlayer
return playing;
}
- public void prepareAsync()
+ void prepareAsync()
{
if ((mState & (State.Initialized | State.Stopped)) == 0)
return;
@@ -386,17 +387,17 @@ public class QtAndroidMediaPlayer
}
}
- public void initHeaders()
+ void initHeaders()
{
mHeaders = new HashMap<String, String>();
}
- public void setHeader(final String header, final String value)
+ void setHeader(final String header, final String value)
{
mHeaders.put(header, value);
}
- public void setDataSource(final String path)
+ void setDataSource(final String path)
{
if (mState == State.Uninitialized)
init();
@@ -462,7 +463,7 @@ public class QtAndroidMediaPlayer
return ((mState & preparedState) != 0);
}
- public TrackInfo[] getAllTrackInfo()
+ TrackInfo[] getAllTrackInfo()
{
if (!isMediaPlayerPrepared()) {
Log.w(TAG, "Trying to get track info of a media player that is not prepared!");
@@ -511,7 +512,7 @@ public class QtAndroidMediaPlayer
return mimeType;
}
- public void selectTrack(int index)
+ void selectTrack(int index)
{
if (!isMediaPlayerPrepared()) {
Log.d(TAG, "Trying to select a track of a media player that is not prepared!");
@@ -525,7 +526,7 @@ public class QtAndroidMediaPlayer
}
}
- public void deselectTrack(int index)
+ void deselectTrack(int index)
{
if (!isMediaPlayerPrepared()) {
Log.d(TAG, "Trying to deselect track of a media player that is not prepared!");
@@ -540,7 +541,7 @@ public class QtAndroidMediaPlayer
}
}
- public int getSelectedTrack(int type)
+ int getSelectedTrack(int type)
{
int InvalidTrack = -1;
@@ -571,7 +572,7 @@ public class QtAndroidMediaPlayer
return InvalidTrack;
}
- public int getCurrentPosition()
+ int getCurrentPosition()
{
int currentPosition = 0;
if ((mState & (State.Idle
@@ -594,7 +595,7 @@ public class QtAndroidMediaPlayer
}
- public int getDuration()
+ int getDuration()
{
int duration = 0;
if ((mState & (State.Prepared
@@ -614,7 +615,7 @@ public class QtAndroidMediaPlayer
return duration;
}
- public void setVolume(int volume)
+ void setVolume(int volume)
{
if (volume < 0)
volume = 0;
@@ -648,12 +649,12 @@ public class QtAndroidMediaPlayer
}
}
- public SurfaceHolder display()
+ SurfaceHolder display()
{
return mSurfaceHolder;
}
- public void setDisplay(SurfaceHolder sh)
+ void setDisplay(SurfaceHolder sh)
{
mSurfaceHolder = sh;
@@ -664,23 +665,23 @@ public class QtAndroidMediaPlayer
}
- public int getVolume()
+ int getVolume()
{
return mVolume;
}
- public void mute(final boolean mute)
+ void mute(final boolean mute)
{
mMuted = mute;
setVolumeHelper(mute ? 0 : mVolume);
}
- public boolean isMuted()
+ boolean isMuted()
{
return mMuted;
}
- public void reset()
+ void reset()
{
if (mState == State.Uninitialized) {
return;
@@ -691,7 +692,7 @@ public class QtAndroidMediaPlayer
cancelProgressWatcher();
}
- public void release()
+ void release()
{
if (mMediaPlayer != null) {
mMediaPlayer.reset();
@@ -703,7 +704,7 @@ public class QtAndroidMediaPlayer
cancelProgressWatcher();
}
- public void setAudioAttributes(int type, int usage)
+ void setAudioAttributes(int type, int usage)
{
mAudioAttributes = new AudioAttributes.Builder()
.setUsage(usage)
@@ -725,7 +726,7 @@ public class QtAndroidMediaPlayer
}
}
- public boolean setPlaybackRate(float rate)
+ boolean setPlaybackRate(float rate)
{
PlaybackParams playbackParams = mMediaPlayer.getPlaybackParams();
playbackParams.setSpeed(rate);
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
index 83d704838..c79e9da31 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
@@ -4,72 +4,71 @@
package org.qtproject.qt.android.multimedia;
import java.util.ArrayList;
-import android.bluetooth.BluetoothA2dp;
-import android.bluetooth.BluetoothAdapter;
-import android.bluetooth.BluetoothDevice;
-import android.bluetooth.BluetoothHeadset;
-import android.content.BroadcastReceiver;
import android.content.Context;
-import android.content.Intent;
-import android.content.IntentFilter;
+import android.media.AudioDeviceCallback;
import android.media.AudioDeviceInfo;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.media.MediaRecorder;
+import android.os.Handler;
+import android.os.Looper;
import android.util.Log;
-public class QtAudioDeviceManager
+class QtAudioDeviceManager
{
private static final String TAG = "QtAudioDeviceManager";
static private AudioManager m_audioManager = null;
static private final AudioDevicesReceiver m_audioDevicesReceiver = new AudioDevicesReceiver();
+ static private Handler handler = new Handler(Looper.getMainLooper());
static private AudioRecord m_recorder = null;
static private AudioTrack m_streamPlayer = null;
static private Thread m_streamingThread = null;
static private boolean m_isStreaming = false;
+ static private boolean m_useSpeaker = false;
static private final int m_sampleRate = 8000;
static private final int m_channels = AudioFormat.CHANNEL_CONFIGURATION_MONO;
static private final int m_audioFormat = AudioFormat.ENCODING_PCM_16BIT;
static private final int m_bufferSize = AudioRecord.getMinBufferSize(m_sampleRate, m_channels, m_audioFormat);
- public static native void onAudioInputDevicesUpdated();
- public static native void onAudioOutputDevicesUpdated();
+ static native void onAudioInputDevicesUpdated();
+ static native void onAudioOutputDevicesUpdated();
+
+ static private void updateDeviceList() {
+ onAudioInputDevicesUpdated();
+ onAudioOutputDevicesUpdated();
+ if (m_useSpeaker) {
+ final AudioDeviceInfo[] audioDevices =
+ m_audioManager.getDevices(AudioManager.GET_DEVICES_OUTPUTS);
+ setAudioOutput(getModeForSpeaker(audioDevices), false, true);
+ }
+ }
+
+ private static class AudioDevicesReceiver extends AudioDeviceCallback {
+ @Override
+ public void onAudioDevicesAdded(AudioDeviceInfo[] addedDevices) {
+ updateDeviceList();
+ }
- static private class AudioDevicesReceiver extends BroadcastReceiver
- {
@Override
- public void onReceive(Context context, Intent intent) {
- onAudioInputDevicesUpdated();
- onAudioOutputDevicesUpdated();
+ public void onAudioDevicesRemoved(AudioDeviceInfo[] removedDevices) {
+ updateDeviceList();
}
}
- public static void registerAudioHeadsetStateReceiver(Context context)
+
+ static void registerAudioHeadsetStateReceiver()
{
- IntentFilter audioDevicesFilter = new IntentFilter();
- audioDevicesFilter.addAction(AudioManager.ACTION_HEADSET_PLUG);
- audioDevicesFilter.addAction(AudioManager.ACTION_HDMI_AUDIO_PLUG);
- audioDevicesFilter.addAction(BluetoothDevice.ACTION_ACL_CONNECTED);
- audioDevicesFilter.addAction(BluetoothDevice.ACTION_ACL_DISCONNECTED);
- audioDevicesFilter.addAction(BluetoothDevice.ACTION_ACL_DISCONNECT_REQUESTED);
- audioDevicesFilter.addAction(BluetoothAdapter.ACTION_STATE_CHANGED);
- audioDevicesFilter.addAction(BluetoothDevice.ACTION_BOND_STATE_CHANGED);
- audioDevicesFilter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED);
- audioDevicesFilter.addAction(AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED);
- audioDevicesFilter.addAction(BluetoothA2dp.ACTION_CONNECTION_STATE_CHANGED);
- audioDevicesFilter.addAction(BluetoothA2dp.ACTION_PLAYING_STATE_CHANGED);
-
- context.registerReceiver(m_audioDevicesReceiver, audioDevicesFilter);
+ m_audioManager.registerAudioDeviceCallback(m_audioDevicesReceiver, handler);
}
- public static void unregisterAudioHeadsetStateReceiver(Context context)
+ static void unregisterAudioHeadsetStateReceiver()
{
- context.unregisterReceiver(m_audioDevicesReceiver);
+ m_audioManager.unregisterAudioDeviceCallback(m_audioDevicesReceiver);
}
- static public void setContext(Context context)
+ static void setContext(Context context)
{
m_audioManager = (AudioManager)context.getSystemService(Context.AUDIO_SERVICE);
}
@@ -226,8 +225,27 @@ public class QtAudioDeviceManager
return ret;
}
+ private static int getModeForSpeaker(AudioDeviceInfo[] audioDevices)
+ {
+ // If we want to force device to use speaker when Bluetooth or Wiread headset is connected,
+ // we need to use MODE_IN_COMMUNICATION. Otherwise the MODE_NORMAL can be used.
+ for (AudioDeviceInfo deviceInfo : audioDevices) {
+ switch (deviceInfo.getType()) {
+ case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP:
+ case AudioDeviceInfo.TYPE_BLUETOOTH_SCO:
+ case AudioDeviceInfo.TYPE_WIRED_HEADSET:
+ case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
+ return AudioManager.MODE_IN_COMMUNICATION;
+ default: break;
+ }
+ }
+ return AudioManager.MODE_NORMAL;
+ }
+
+
private static boolean setAudioOutput(int id)
{
+ m_useSpeaker = false;
final AudioDeviceInfo[] audioDevices =
m_audioManager.getDevices(AudioManager.GET_DEVICES_OUTPUTS);
for (AudioDeviceInfo deviceInfo : audioDevices) {
@@ -239,7 +257,8 @@ public class QtAudioDeviceManager
setAudioOutput(AudioManager.MODE_IN_COMMUNICATION, true, false);
return true;
case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER:
- setAudioOutput(AudioManager.STREAM_MUSIC, false, true);
+ m_useSpeaker = true;
+ setAudioOutput(getModeForSpeaker(audioDevices), false, true);
return true;
case AudioDeviceInfo.TYPE_WIRED_HEADSET:
case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
@@ -313,6 +332,7 @@ public class QtAudioDeviceManager
m_isStreaming = true;
m_streamingThread = new Thread(new Runnable() {
+ @Override
public void run() {
streamSound();
}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java
index ac8140197..f74fc1e6e 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java
@@ -2,9 +2,6 @@
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
package org.qtproject.qt.android.multimedia;
-import org.qtproject.qt.android.multimedia.QtVideoDeviceManager;
-import org.qtproject.qt.android.multimedia.QtExifDataHandler;
-
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.Context;
@@ -20,21 +17,17 @@ import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.media.Image;
import android.media.ImageReader;
-import android.graphics.ImageFormat;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Range;
import android.view.Surface;
-import android.media.MediaCodec;
-import android.media.MediaCodecInfo;
-import android.media.MediaFormat;
import java.lang.Thread;
import java.util.ArrayList;
import java.util.List;
@TargetApi(23)
-public class QtCamera2 {
+class QtCamera2 {
CameraDevice mCameraDevice = null;
QtVideoDeviceManager mVideoDeviceManager = null;
@@ -124,6 +117,7 @@ public class QtCamera2 {
native void onSessionClosed(String cameraId);
native void onCaptureSessionFailed(String cameraId, int reason, long frameNumber);
CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() {
+ @Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
onCaptureSessionFailed(mCameraId, failure.getReason(), failure.getFrameNumber());
@@ -189,7 +183,7 @@ public class QtCamera2 {
}
};
- public QtCamera2(Context context) {
+ QtCamera2(Context context) {
mCameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
mVideoDeviceManager = new QtVideoDeviceManager(context);
startBackgroundThread();
@@ -213,7 +207,7 @@ public class QtCamera2 {
}
@SuppressLint("MissingPermission")
- public boolean open(String cameraId) {
+ boolean open(String cameraId) {
try {
mCameraId = cameraId;
mCameraManager.openCamera(cameraId,mStateCallback,mBackgroundHandler);
@@ -264,7 +258,7 @@ public class QtCamera2 {
};
- public void prepareCamera(int width, int height, int format, int minFps, int maxFps) {
+ void prepareCamera(int width, int height, int format, int minFps, int maxFps) {
addImageReader(width, height, format);
setFrameRate(minFps, maxFps);
@@ -295,22 +289,22 @@ public class QtCamera2 {
mFpsRange = new Range<>(minFrameRate, maxFrameRate);
}
- public boolean addSurface(Surface surface) {
+ boolean addSurface(Surface surface) {
if (mTargetSurfaces.contains(surface))
return true;
return mTargetSurfaces.add(surface);
}
- public boolean removeSurface(Surface surface) {
+ boolean removeSurface(Surface surface) {
return mTargetSurfaces.remove(surface);
}
- public void clearSurfaces() {
+ void clearSurfaces() {
mTargetSurfaces.clear();
}
- public boolean createSession() {
+ boolean createSession() {
if (mCameraDevice == null)
return false;
@@ -323,7 +317,7 @@ public class QtCamera2 {
return false;
}
- public boolean start(int template) {
+ boolean start(int template) {
if (mCameraDevice == null)
return false;
@@ -364,7 +358,7 @@ public class QtCamera2 {
}
}
- public void stopAndClose() {
+ void stopAndClose() {
synchronized (mStartMutex) {
try {
if (null != mCaptureSession) {
@@ -422,7 +416,7 @@ public class QtCamera2 {
}
}
- public void takePhoto() {
+ void takePhoto() {
try {
if (mAFMode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE) {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
@@ -436,7 +430,7 @@ public class QtCamera2 {
}
}
- public void saveExifToFile(String path)
+ void saveExifToFile(String path)
{
if (mExifDataHandler != null)
mExifDataHandler.save(path);
@@ -456,7 +450,7 @@ public class QtCamera2 {
activePixels.height() - croppedHeight/2);
}
- public void zoomTo(float factor)
+ void zoomTo(float factor)
{
synchronized (mStartMutex) {
mZoomFactor = factor;
@@ -476,7 +470,7 @@ public class QtCamera2 {
}
}
}
- public void setFlashMode(String flashMode)
+ void setFlashMode(String flashMode)
{
synchronized (mStartMutex) {
@@ -506,7 +500,7 @@ public class QtCamera2 {
return mode ? CameraMetadata.FLASH_MODE_TORCH : CameraMetadata.FLASH_MODE_OFF;
}
- public void setTorchMode(boolean torchMode)
+ void setTorchMode(boolean torchMode)
{
synchronized (mStartMutex) {
mTorchMode = getTorchModeValue(torchMode);
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java
index 3e35eb416..23e9a3580 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java
@@ -15,7 +15,7 @@ import android.util.Log;
import java.lang.Math;
import java.io.ByteArrayOutputStream;
-public class QtCameraListener implements Camera.ShutterCallback,
+class QtCameraListener implements Camera.ShutterCallback,
Camera.PictureCallback,
Camera.AutoFocusCallback,
Camera.PreviewCallback
@@ -40,22 +40,22 @@ public class QtCameraListener implements Camera.ShutterCallback,
m_cameraId = id;
}
- public void notifyNewFrames(boolean notify)
+ void notifyNewFrames(boolean notify)
{
m_notifyNewFrames = notify;
}
- public void notifyWhenFrameAvailable(boolean notify)
+ void notifyWhenFrameAvailable(boolean notify)
{
m_notifyWhenFrameAvailable = notify;
}
- public byte[] lastPreviewBuffer()
+ byte[] lastPreviewBuffer()
{
return m_lastPreviewBuffer;
}
- public int previewWidth()
+ int previewWidth()
{
if (m_previewSize == null)
return -1;
@@ -63,7 +63,7 @@ public class QtCameraListener implements Camera.ShutterCallback,
return m_previewSize.width;
}
- public int previewHeight()
+ int previewHeight()
{
if (m_previewSize == null)
return -1;
@@ -71,27 +71,27 @@ public class QtCameraListener implements Camera.ShutterCallback,
return m_previewSize.height;
}
- public int previewFormat()
+ int previewFormat()
{
return m_previewFormat;
}
- public int previewBytesPerLine()
+ int previewBytesPerLine()
{
return m_previewBytesPerLine;
}
- public void clearPreviewCallback(Camera camera)
+ void clearPreviewCallback(Camera camera)
{
camera.setPreviewCallbackWithBuffer(null);
}
- public void setPhotoRotation(int rotation)
+ void setPhotoRotation(int rotation)
{
m_rotation = rotation;
}
- public void setupPreviewCallback(Camera camera)
+ void setupPreviewCallback(Camera camera)
{
// Clear previous callback (also clears added buffers)
clearPreviewCallback(camera);
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtExifDataHandler.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtExifDataHandler.java
index c2699eb1d..a27d98967 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtExifDataHandler.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtExifDataHandler.java
@@ -9,14 +9,14 @@ import android.util.Log;
import java.io.IOException;
-public class QtExifDataHandler {
+class QtExifDataHandler {
private int mFlashFired = 0;
private long mExposureTime = 0L;
private float mFocalLength = 0;
private static String mModel = Build.MANUFACTURER + " " + Build.MODEL;
- public QtExifDataHandler(CaptureResult r)
+ QtExifDataHandler(CaptureResult r)
{
Integer flash = r.get(CaptureResult.FLASH_STATE);
if (flash != null && flash == CaptureResult.FLASH_STATE_FIRED)
@@ -28,7 +28,7 @@ public class QtExifDataHandler {
mFocalLength = r.get(CaptureResult.LENS_FOCAL_LENGTH);
}
- public void save(String path)
+ void save(String path)
{
ExifInterface exif;
try {
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMediaRecorderListener.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMediaRecorderListener.java
index 97d317119..3cf77c323 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMediaRecorderListener.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMediaRecorderListener.java
@@ -5,11 +5,11 @@ package org.qtproject.qt.android.multimedia;
import android.media.MediaRecorder;
-public class QtMediaRecorderListener implements MediaRecorder.OnErrorListener, MediaRecorder.OnInfoListener
+class QtMediaRecorderListener implements MediaRecorder.OnErrorListener, MediaRecorder.OnInfoListener
{
private long m_id = -1;
- public QtMediaRecorderListener(long id)
+ QtMediaRecorderListener(long id)
{
m_id = id;
}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java
index bfac6670f..21a3989a6 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java
@@ -17,13 +17,13 @@ import java.lang.String;
import java.io.File;
import android.util.Log;
-public class QtMultimediaUtils
+class QtMultimediaUtils
{
static private class OrientationListener extends OrientationEventListener
{
- static public int deviceOrientation = 0;
+ static int deviceOrientation = 0;
- public OrientationListener(Context context)
+ OrientationListener(Context context)
{
super(context);
}
@@ -42,17 +42,17 @@ public class QtMultimediaUtils
static private OrientationListener m_orientationListener = null;
private static final String QtTAG = "Qt QtMultimediaUtils";
- static public void setActivity(Activity qtMainActivity, Object qtActivityDelegate)
+ static void setActivity(Activity qtMainActivity, Object qtActivityDelegate)
{
}
- static public void setContext(Context context)
+ static void setContext(Context context)
{
m_context = context;
m_orientationListener = new OrientationListener(context);
}
- public QtMultimediaUtils()
+ QtMultimediaUtils()
{
}
@@ -66,7 +66,7 @@ public class QtMultimediaUtils
static int getDeviceOrientation()
{
- return m_orientationListener.deviceOrientation;
+ return OrientationListener.deviceOrientation;
}
static String getDefaultMediaDirectory(int type)
@@ -125,24 +125,25 @@ public class QtMultimediaUtils
return codecs;
}
-public static String getMimeType(Context context, String url)
-{
- Uri parsedUri = Uri.parse(url);
- String type = null;
-
- try {
- String scheme = parsedUri.getScheme();
- if (scheme != null && scheme.contains("content")) {
- ContentResolver cR = context.getContentResolver();
- type = cR.getType(parsedUri);
- } else {
- String extension = MimeTypeMap.getFileExtensionFromUrl(url);
- if (extension != null)
+ static String getMimeType(Context context, String url)
+ {
+ Uri parsedUri = Uri.parse(url);
+ String type = null;
+
+ try {
+ String scheme = parsedUri.getScheme();
+ if (scheme != null && scheme.contains("content")) {
+ ContentResolver cR = context.getContentResolver();
+ type = cR.getType(parsedUri);
+ } else {
+ String extension = MimeTypeMap.getFileExtensionFromUrl(url);
+ if (extension != null)
type = MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension);
- }
- } catch (Exception e) {
- Log.e(QtTAG, "getMimeType(): " + e.toString());
+ }
+ } catch (Exception e) {
+ Log.e(QtTAG, "getMimeType(): " + e.toString());
+ }
+ return type;
}
- return type;
-}
}
+
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceHolderCallback.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceHolderCallback.java
index 30dad68d5..bbaa0d5b9 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceHolderCallback.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceHolderCallback.java
@@ -5,11 +5,11 @@ package org.qtproject.qt.android.multimedia;
import android.view.SurfaceHolder;
-public class QtSurfaceHolderCallback implements SurfaceHolder.Callback
+class QtSurfaceHolderCallback implements SurfaceHolder.Callback
{
private long m_id = -1;
- public QtSurfaceHolderCallback(long id)
+ QtSurfaceHolderCallback(long id)
{
m_id = id;
}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureHolder.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureHolder.java
index 59406ca59..345c313e2 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureHolder.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureHolder.java
@@ -8,11 +8,11 @@ import android.view.Surface;
import android.graphics.Rect;
import android.graphics.Canvas;
-public class QtSurfaceTextureHolder implements SurfaceHolder
+class QtSurfaceTextureHolder implements SurfaceHolder
{
private Surface surfaceTexture;
- public QtSurfaceTextureHolder(Surface surface)
+ QtSurfaceTextureHolder(Surface surface)
{
surfaceTexture = surface;
}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureListener.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureListener.java
index 4974f9301..9b4180b5d 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureListener.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureListener.java
@@ -5,11 +5,11 @@ package org.qtproject.qt.android.multimedia;
import android.graphics.SurfaceTexture;
-public class QtSurfaceTextureListener implements SurfaceTexture.OnFrameAvailableListener
+class QtSurfaceTextureListener implements SurfaceTexture.OnFrameAvailableListener
{
private final long m_id;
- public QtSurfaceTextureListener(long id)
+ QtSurfaceTextureListener(long id)
{
m_id = id;
}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java
index 3339bddc9..6ec2073d8 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java
@@ -13,6 +13,7 @@ import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaCodecList;
import android.media.MediaCodecInfo;
+import android.os.Build;
import android.util.Range;
import android.util.Size;
import android.util.Log;
@@ -24,17 +25,17 @@ import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
-public class QtVideoDeviceManager {
+class QtVideoDeviceManager {
CameraManager mCameraManager;
Map<String, CameraCharacteristics> cache;
- public QtVideoDeviceManager(Context context) {
+ QtVideoDeviceManager(Context context) {
mCameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
cache = new WeakHashMap<String, CameraCharacteristics>();
}
- public CameraCharacteristics getCameraCharacteristics(String cameraId) {
+ CameraCharacteristics getCameraCharacteristics(String cameraId) {
if (cache.containsKey(cameraId))
return cache.get(cameraId);
@@ -77,10 +78,10 @@ public class QtVideoDeviceManager {
return codecs.toArray(new String[codecs.size()]);
}
- static public String[] getHWVideoDecoders() { return getHWVideoCodecs(CODEC.DECODER); }
- static public String[] getHWVideoEncoders() { return getHWVideoCodecs(CODEC.ENCODER); }
+ static String[] getHWVideoDecoders() { return getHWVideoCodecs(CODEC.DECODER); }
+ static String[] getHWVideoEncoders() { return getHWVideoCodecs(CODEC.ENCODER); }
- public String[] getCameraIdList() {
+ String[] getCameraIdList() {
try {
return mCameraManager.getCameraIdList();
} catch (Exception e) {
@@ -89,21 +90,21 @@ public class QtVideoDeviceManager {
return null;
}
- public int getSensorOrientation(String cameraId) {
+ int getSensorOrientation(String cameraId) {
CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
if (characteristics == null)
return 0;
return characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
}
- public int getLensFacing(String cameraId) {
+ int getLensFacing(String cameraId) {
CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
if (characteristics == null)
return 0;
return characteristics.get(CameraCharacteristics.LENS_FACING);
}
- public String[] getFpsRange(String cameraId) {
+ String[] getFpsRange(String cameraId) {
CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
if (characteristics == null)
@@ -120,7 +121,7 @@ public class QtVideoDeviceManager {
return fps;
}
- public float getMaxZoom(String cameraId) {
+ float getMaxZoom(String cameraId) {
float maxZoom = 1.0f;
final CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
@@ -129,7 +130,7 @@ public class QtVideoDeviceManager {
return maxZoom;
}
- public Rect getActiveArraySize(String cameraId) {
+ Rect getActiveArraySize(String cameraId) {
Rect activeArraySize = new Rect();
final CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
if (characteristics != null)
@@ -138,7 +139,7 @@ public class QtVideoDeviceManager {
}
static final int maxResolution = 3840*2160; // 4k resolution
- public String[] getStreamConfigurationsSizes(String cameraId, int imageFormat) {
+ String[] getStreamConfigurationsSizes(String cameraId, int imageFormat) {
CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
if (characteristics == null)
@@ -159,7 +160,7 @@ public class QtVideoDeviceManager {
return stream.toArray(new String[0]);
}
- public int stringToControlAEMode(String mode) {
+ int stringToControlAEMode(String mode) {
switch (mode) {
case "off":
return CaptureRequest.CONTROL_AE_MODE_ON;
@@ -176,7 +177,7 @@ public class QtVideoDeviceManager {
}
}
- public String controlAEModeToString(int mode) {
+ String controlAEModeToString(int mode) {
switch (mode) {
case CaptureRequest.CONTROL_AE_MODE_ON:
return "off";
@@ -194,7 +195,7 @@ public class QtVideoDeviceManager {
}
}
- public int[] getSupportedAfModes(String cameraId) {
+ int[] getSupportedAfModes(String cameraId) {
CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
if (characteristics == null)
@@ -203,7 +204,7 @@ public class QtVideoDeviceManager {
return characteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
}
- public String[] getSupportedFlashModes(String cameraId) {
+ String[] getSupportedFlashModes(String cameraId) {
CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
if (characteristics == null)
@@ -219,7 +220,24 @@ public class QtVideoDeviceManager {
return supportedFlashModesList.toArray(ret);
}
- public boolean isTorchModeSupported(String cameraId) {
+ static boolean isEmulator()
+ {
+ return ((Build.BRAND.startsWith("generic") && Build.DEVICE.startsWith("generic"))
+ || Build.FINGERPRINT.startsWith("generic")
+ || Build.FINGERPRINT.startsWith("unknown")
+ || Build.HARDWARE.contains("goldfish")
+ || Build.HARDWARE.contains("ranchu")
+ || Build.MODEL.contains("google_sdk")
+ || Build.MODEL.contains("Emulator")
+ || Build.MODEL.contains("Android SDK built for x86")
+ || Build.MANUFACTURER.contains("Genymotion")
+ || Build.PRODUCT.contains("sdk")
+ || Build.PRODUCT.contains("vbox86p")
+ || Build.PRODUCT.contains("emulator")
+ || Build.PRODUCT.contains("simulator"));
+ }
+
+ boolean isTorchModeSupported(String cameraId) {
boolean ret = false;
final CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
if (characteristics != null)
diff --git a/src/multimedia/CMakeLists.txt b/src/multimedia/CMakeLists.txt
index 8c58545b5..8ccf81c0c 100644
--- a/src/multimedia/CMakeLists.txt
+++ b/src/multimedia/CMakeLists.txt
@@ -23,6 +23,8 @@ qt_internal_add_module(Multimedia
audio/qaudiodecoder.cpp audio/qaudiodecoder.h audio/qaudiodecoder_p.h
audio/qaudiodevice.cpp audio/qaudiodevice.h audio/qaudiodevice_p.h
audio/qaudioinput.cpp audio/qaudioinput.h
+ audio/qaudiobufferinput.cpp audio/qaudiobufferinput.h
+ audio/qaudiobufferoutput.cpp audio/qaudiobufferoutput.h audio/qaudiobufferoutput_p.h
audio/qaudiooutput.cpp audio/qaudiooutput.h
audio/qaudioformat.cpp audio/qaudioformat.h
audio/qaudiohelpers.cpp audio/qaudiohelpers_p.h
@@ -38,25 +40,28 @@ qt_internal_add_module(Multimedia
camera/qcameradevice.cpp camera/qcameradevice.h camera/qcameradevice_p.h
camera/qimagecapture.cpp camera/qimagecapture.h
compat/removed_api.cpp
+ platform/qgstreamer_platformspecificinterface.cpp platform/qgstreamer_platformspecificinterface_p.h
platform/qplatformaudiodecoder.cpp platform/qplatformaudiodecoder_p.h
platform/qplatformaudioinput_p.h
platform/qplatformaudiooutput_p.h
platform/qplatformaudioresampler_p.h
platform/qplatformcamera.cpp platform/qplatformcamera_p.h
- platform/qplatformvideosource.cpp platform/qplatformvideosource_p.h
- platform/qplatformsurfacecapture.cpp platform/qplatformsurfacecapture_p.h
+ platform/qplatformcapturablewindows_p.h
platform/qplatformimagecapture.cpp platform/qplatformimagecapture_p.h
platform/qplatformmediacapture.cpp platform/qplatformmediacapture_p.h
platform/qplatformmediadevices.cpp platform/qplatformmediadevices_p.h
- platform/qplatformmediarecorder.cpp platform/qplatformmediarecorder_p.h
platform/qplatformmediaformatinfo.cpp platform/qplatformmediaformatinfo_p.h
platform/qplatformmediaintegration.cpp platform/qplatformmediaintegration_p.h
platform/qplatformmediaplayer.cpp platform/qplatformmediaplayer_p.h
platform/qplatformmediaplugin.cpp platform/qplatformmediaplugin_p.h
+ platform/qplatformmediarecorder.cpp platform/qplatformmediarecorder_p.h
+ platform/qplatformsurfacecapture.cpp platform/qplatformsurfacecapture_p.h
platform/qplatformvideodevices.cpp platform/qplatformvideodevices_p.h
platform/qplatformvideosink.cpp platform/qplatformvideosink_p.h
+ platform/qplatformvideosource.cpp platform/qplatformvideosource_p.h
+ platform/qplatformvideoframeinput.cpp platform/qplatformvideoframeinput_p.h
+ platform/qplatformaudiobufferinput.cpp platform/qplatformaudiobufferinput_p.h
playback/qmediaplayer.cpp playback/qmediaplayer.h playback/qmediaplayer_p.h
- platform/qplatformcapturablewindows_p.h
qmediadevices.cpp qmediadevices.h
qmediaenumdebug.h
qmediaformat.cpp qmediaformat.h
@@ -64,15 +69,19 @@ qt_internal_add_module(Multimedia
qmediastoragelocation.cpp qmediastoragelocation_p.h
qmediatimerange.cpp qmediatimerange.h
qmultimediautils.cpp qmultimediautils_p.h
+ qmediaframeinput.cpp qmediaframeinput_p.h
qmaybe_p.h
qtmultimediaglobal.h qtmultimediaglobal_p.h
qerrorinfo_p.h
+ qmediainputencoderinterface_p.h
recording/qmediacapturesession.cpp recording/qmediacapturesession.h recording/qmediacapturesession_p.h
recording/qmediarecorder.cpp recording/qmediarecorder.h recording/qmediarecorder_p.h
recording/qscreencapture.cpp recording/qscreencapture.h
recording/qwindowcapture.cpp recording/qwindowcapture.h
recording/qcapturablewindow.cpp recording/qcapturablewindow.h recording/qcapturablewindow_p.h
- video/qabstractvideobuffer.cpp video/qabstractvideobuffer_p.h
+ recording/qvideoframeinput.cpp recording/qvideoframeinput.h
+ video/qabstractvideobuffer.cpp video/qabstractvideobuffer.h
+ video/qhwvideobuffer.cpp video/qhwvideobuffer_p.h
video/qmemoryvideobuffer.cpp video/qmemoryvideobuffer_p.h
video/qimagevideobuffer.cpp video/qimagevideobuffer_p.h
video/qvideoframe.cpp video/qvideoframe.h video/qvideoframe_p.h
@@ -104,9 +113,12 @@ qt_internal_add_module(Multimedia
Qt::GuiPrivate
NO_PCH_SOURCES
compat/removed_api.cpp
- GENERATE_CPP_EXPORTS
)
+qt_internal_extend_target(Multimedia
+ CONDITION LINUX OR ANDROID
+ SOURCES qsymbolsresolveutils.cpp qsymbolsresolveutils_p.h)
+
qt_internal_add_simd_part(Multimedia SIMD sse2
SOURCES
video/qvideoframeconversionhelper_sse2.cpp
diff --git a/src/multimedia/alsa/qalsaaudiodevice.cpp b/src/multimedia/alsa/qalsaaudiodevice.cpp
index f5d4a2209..893375270 100644
--- a/src/multimedia/alsa/qalsaaudiodevice.cpp
+++ b/src/multimedia/alsa/qalsaaudiodevice.cpp
@@ -37,55 +37,35 @@ QAlsaAudioDeviceInfo::QAlsaAudioDeviceInfo(const QByteArray &dev, const QString
minimumSampleRate = 8000;
maximumSampleRate = 48000;
- supportedSampleFormats << QAudioFormat::UInt8 << QAudioFormat::Int16 << QAudioFormat::Int32 << QAudioFormat::Float;
+ supportedSampleFormats = {
+ QAudioFormat::UInt8,
+ QAudioFormat::Int16,
+ QAudioFormat::Int32,
+ QAudioFormat::Float,
+ };
preferredFormat.setChannelCount(mode == QAudioDevice::Input ? 1 : 2);
preferredFormat.setSampleFormat(QAudioFormat::Float);
preferredFormat.setSampleRate(48000);
}
-QAlsaAudioDeviceInfo::~QAlsaAudioDeviceInfo()
-{
-}
+QAlsaAudioDeviceInfo::~QAlsaAudioDeviceInfo() = default;
void QAlsaAudioDeviceInfo::checkSurround()
{
+ if (mode != QAudioDevice::Output)
+ return;
+
surround40 = false;
surround51 = false;
surround71 = false;
- void **hints, **n;
- char *name, *descr, *io;
-
- if(snd_device_name_hint(-1, "pcm", &hints) < 0)
- return;
-
- n = hints;
-
- while (*n != NULL) {
- name = snd_device_name_get_hint(*n, "NAME");
- descr = snd_device_name_get_hint(*n, "DESC");
- io = snd_device_name_get_hint(*n, "IOID");
- if((name != NULL) && (descr != NULL)) {
- QString deviceName = QLatin1String(name);
- if (mode == QAudioDevice::Output) {
- if(deviceName.contains(QLatin1String("surround40")))
- surround40 = true;
- if(deviceName.contains(QLatin1String("surround51")))
- surround51 = true;
- if(deviceName.contains(QLatin1String("surround71")))
- surround71 = true;
- }
- }
- if(name != NULL)
- free(name);
- if(descr != NULL)
- free(descr);
- if(io != NULL)
- free(io);
- ++n;
- }
- snd_device_name_free_hint(hints);
+ if (id.startsWith(QLatin1String("surround40")))
+ surround40 = true;
+ if (id.startsWith(QLatin1String("surround51")))
+ surround51 = true;
+ if (id.startsWith(QLatin1String("surround71")))
+ surround71 = true;
}
QT_END_NAMESPACE
diff --git a/src/multimedia/alsa/qalsaaudiodevice_p.h b/src/multimedia/alsa/qalsaaudiodevice_p.h
index f82ea4f5a..dcbc9e692 100644
--- a/src/multimedia/alsa/qalsaaudiodevice_p.h
+++ b/src/multimedia/alsa/qalsaaudiodevice_p.h
@@ -38,9 +38,9 @@ public:
private:
void checkSurround();
- bool surround40;
- bool surround51;
- bool surround71;
+ bool surround40{};
+ bool surround51{};
+ bool surround71{};
};
QT_END_NAMESPACE
diff --git a/src/multimedia/alsa/qalsaaudiosink.cpp b/src/multimedia/alsa/qalsaaudiosink.cpp
index 98a68861f..e515219a2 100644
--- a/src/multimedia/alsa/qalsaaudiosink.cpp
+++ b/src/multimedia/alsa/qalsaaudiosink.cpp
@@ -30,13 +30,13 @@ QAlsaAudioSink::QAlsaAudioSink(const QByteArray &device, QObject *parent)
m_device = device;
timer = new QTimer(this);
- connect(timer, SIGNAL(timeout()), this, SLOT(userFeed()));
+ connect(timer, &QTimer::timeout, this, &QAlsaAudioSink::userFeed);
}
QAlsaAudioSink::~QAlsaAudioSink()
{
close();
- disconnect(timer, SIGNAL(timeout()));
+ disconnect(timer, &QTimer::timeout, this, &QAlsaAudioSink::userFeed);
QCoreApplication::processEvents();
delete timer;
}
@@ -130,6 +130,7 @@ int QAlsaAudioSink::setFormat()
pcmformat = SND_PCM_FORMAT_FLOAT_BE;
else
pcmformat = SND_PCM_FORMAT_FLOAT_LE;
+ break;
default:
break;
}
diff --git a/src/multimedia/alsa/qalsaaudiosink_p.h b/src/multimedia/alsa/qalsaaudiosink_p.h
index 7e8836f96..0f5a5aa5a 100644
--- a/src/multimedia/alsa/qalsaaudiosink_p.h
+++ b/src/multimedia/alsa/qalsaaudiosink_p.h
@@ -96,7 +96,6 @@ private:
char* audioBuffer = nullptr;
snd_pcm_t* handle = nullptr;
snd_pcm_access_t access = SND_PCM_ACCESS_RW_INTERLEAVED;
- snd_pcm_format_t pcmformat = SND_PCM_FORMAT_S16;
snd_pcm_hw_params_t *hwparams = nullptr;
qreal m_volume = 1.0f;
};
diff --git a/src/multimedia/alsa/qalsaaudiosource.cpp b/src/multimedia/alsa/qalsaaudiosource.cpp
index ce099463d..ebf6e24e2 100644
--- a/src/multimedia/alsa/qalsaaudiosource.cpp
+++ b/src/multimedia/alsa/qalsaaudiosource.cpp
@@ -16,7 +16,6 @@
#include <QtCore/qvarlengtharray.h>
#include <QtMultimedia/private/qaudiohelpers_p.h>
#include "qalsaaudiosource_p.h"
-#include "qalsaaudiodevice_p.h"
QT_BEGIN_NAMESPACE
@@ -45,13 +44,13 @@ QAlsaAudioSource::QAlsaAudioSource(const QByteArray &device, QObject *parent)
m_device = device;
timer = new QTimer(this);
- connect(timer, SIGNAL(timeout()), this, SLOT(userFeed()));
+ connect(timer, &QTimer::timeout, this, &QAlsaAudioSource::userFeed);
}
QAlsaAudioSource::~QAlsaAudioSource()
{
close();
- disconnect(timer, SIGNAL(timeout()));
+ disconnect(timer, &QTimer::timeout, this, &QAlsaAudioSource::userFeed);
QCoreApplication::processEvents();
delete timer;
}
@@ -143,21 +142,22 @@ int QAlsaAudioSource::setFormat()
break;
case QAudioFormat::Int16:
if constexpr (QSysInfo::ByteOrder == QSysInfo::BigEndian)
- pcmformat = SND_PCM_FORMAT_S16_LE;
- else
pcmformat = SND_PCM_FORMAT_S16_BE;
+ else
+ pcmformat = SND_PCM_FORMAT_S16_LE;
break;
case QAudioFormat::Int32:
if constexpr (QSysInfo::ByteOrder == QSysInfo::BigEndian)
- pcmformat = SND_PCM_FORMAT_S32_LE;
- else
pcmformat = SND_PCM_FORMAT_S32_BE;
+ else
+ pcmformat = SND_PCM_FORMAT_S32_LE;
break;
case QAudioFormat::Float:
if constexpr (QSysInfo::ByteOrder == QSysInfo::BigEndian)
- pcmformat = SND_PCM_FORMAT_FLOAT_LE;
- else
pcmformat = SND_PCM_FORMAT_FLOAT_BE;
+ else
+ pcmformat = SND_PCM_FORMAT_FLOAT_LE;
+ break;
default:
break;
}
@@ -370,7 +370,7 @@ bool QAlsaAudioSource::open()
bytesAvailable = checkBytesReady();
if(pullMode)
- connect(audioSource,SIGNAL(readyRead()),this,SLOT(userFeed()));
+ connect(audioSource, &QIODevice::readyRead, this, &QAlsaAudioSource::userFeed);
// Step 6: Start audio processing
chunks = buffer_size/period_size;
diff --git a/src/multimedia/alsa/qalsamediadevices.cpp b/src/multimedia/alsa/qalsamediadevices.cpp
index 5a133e9d1..9466fa0cd 100644
--- a/src/multimedia/alsa/qalsamediadevices.cpp
+++ b/src/multimedia/alsa/qalsamediadevices.cpp
@@ -13,6 +13,26 @@
QT_BEGIN_NAMESPACE
+namespace {
+
+struct free_char
+{
+ void operator()(char *c) const { ::free(c); }
+};
+
+using unique_str = std::unique_ptr<char, free_char>;
+
+bool operator==(const unique_str &str, std::string_view sv)
+{
+ return std::string_view{ str.get() } == sv;
+}
+bool operator!=(const unique_str &str, std::string_view sv)
+{
+ return !(str == sv);
+}
+
+} // namespace
+
QAlsaMediaDevices::QAlsaMediaDevices()
: QPlatformMediaDevices()
{
@@ -22,52 +42,50 @@ static QList<QAudioDevice> availableDevices(QAudioDevice::Mode mode)
{
QList<QAudioDevice> devices;
- QByteArray filter;
-
// Create a list of all current audio devices that support mode
- void **hints, **n;
- char *name, *descr, *io;
- bool hasDefault = false;
-
- if(snd_device_name_hint(-1, "pcm", &hints) < 0) {
+ void **hints;
+ if (snd_device_name_hint(-1, "pcm", &hints) < 0) {
qWarning() << "no alsa devices available";
return devices;
}
- n = hints;
- if(mode == QAudioDevice::Input) {
- filter = "Input";
- } else {
- filter = "Output";
- }
+ std::string_view filter = (mode == QAudioDevice::Input) ? "Input" : "Output";
- QAlsaAudioDeviceInfo* sysdefault = nullptr;
+ QAlsaAudioDeviceInfo *sysdefault = nullptr;
- while (*n != NULL) {
- name = snd_device_name_get_hint(*n, "NAME");
- if (name != 0 && qstrcmp(name, "null") != 0) {
- descr = snd_device_name_get_hint(*n, "DESC");
- io = snd_device_name_get_hint(*n, "IOID");
-
- if ((descr != NULL) && ((io == NULL) || (io == filter))) {
- auto *infop = new QAlsaAudioDeviceInfo(name, QString::fromUtf8(descr), mode);
- devices.append(infop->create());
- if (!hasDefault && strcmp(name, "default") == 0) {
- infop->isDefault = true;
- hasDefault = true;
- }
- else if (!sysdefault && !hasDefault && strcmp(name, "sysdefault") == 0) {
- sysdefault = infop;
- }
+ auto makeDeviceInfo = [&filter, mode](void *entry) -> QAlsaAudioDeviceInfo * {
+ unique_str name{ snd_device_name_get_hint(entry, "NAME") };
+ if (name && name != "null") {
+ unique_str descr{ snd_device_name_get_hint(entry, "DESC") };
+ unique_str io{ snd_device_name_get_hint(entry, "IOID") };
+
+ if (descr && (!io || (io == filter))) {
+ auto *infop = new QAlsaAudioDeviceInfo{
+ name.get(),
+ QString::fromUtf8(descr.get()),
+ mode,
+ };
+ return infop;
}
+ }
+ return nullptr;
+ };
+
+ bool hasDefault = false;
+ void **n = hints;
+ while (*n != NULL) {
+ QAlsaAudioDeviceInfo *infop = makeDeviceInfo(*n++);
- free(descr);
- free(io);
+ if (infop) {
+ devices.append(infop->create());
+ if (!hasDefault && infop->id.startsWith("default")) {
+ infop->isDefault = true;
+ hasDefault = true;
+ }
+ if (!sysdefault && infop->id.startsWith("sysdefault"))
+ sysdefault = infop;
}
- free(name);
- ++n;
}
- snd_device_name_free_hint(hints);
if (!hasDefault && sysdefault) {
// Make "sysdefault" the default device if there is no "default" device exists
@@ -75,11 +93,15 @@ static QList<QAudioDevice> availableDevices(QAudioDevice::Mode mode)
hasDefault = true;
}
if (!hasDefault && devices.size() > 0) {
- auto infop = new QAlsaAudioDeviceInfo("default", QString(), QAudioDevice::Output);
- infop->isDefault = true;
- devices.prepend(infop->create());
+ // forcefully declare the first device as "default"
+ QAlsaAudioDeviceInfo *infop = makeDeviceInfo(hints[0]);
+ if (infop) {
+ infop->isDefault = true;
+ devices.prepend(infop->create());
+ }
}
+ snd_device_name_free_hint(hints);
return devices;
}
diff --git a/src/multimedia/android/qandroidmediadevices.cpp b/src/multimedia/android/qandroidmediadevices.cpp
index 55533621c..7688da079 100644
--- a/src/multimedia/android/qandroidmediadevices.cpp
+++ b/src/multimedia/android/qandroidmediadevices.cpp
@@ -23,9 +23,7 @@ Q_DECLARE_JNI_CLASS(QtAudioDeviceManager,
QAndroidMediaDevices::QAndroidMediaDevices() : QPlatformMediaDevices()
{
- QtJniTypes::QtAudioDeviceManager::callStaticMethod<void>(
- "registerAudioHeadsetStateReceiver",
- QNativeInterface::QAndroidApplication::context());
+ QtJniTypes::QtAudioDeviceManager::callStaticMethod<void>("registerAudioHeadsetStateReceiver");
}
QAndroidMediaDevices::~QAndroidMediaDevices()
@@ -33,9 +31,7 @@ QAndroidMediaDevices::~QAndroidMediaDevices()
// Object of QAndroidMediaDevices type is static. Unregistering will happend only when closing
// the application. In such case it is probably not needed, but let's leave it for
// compatibility with Android documentation
- QtJniTypes::QtAudioDeviceManager::callStaticMethod<void>(
- "unregisterAudioHeadsetStateReceiver",
- QNativeInterface::QAndroidApplication::context());
+ QtJniTypes::QtAudioDeviceManager::callStaticMethod<void>("unregisterAudioHeadsetStateReceiver");
}
QList<QAudioDevice> QAndroidMediaDevices::audioInputs() const
diff --git a/src/multimedia/audio/qaudiobufferinput.cpp b/src/multimedia/audio/qaudiobufferinput.cpp
new file mode 100644
index 000000000..e43066f10
--- /dev/null
+++ b/src/multimedia/audio/qaudiobufferinput.cpp
@@ -0,0 +1,184 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qaudiobufferinput.h"
+#include "qplatformaudiobufferinput_p.h"
+#include "qmediainputencoderinterface_p.h"
+#include "qmediaframeinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QAudioBufferInputPrivate : public QMediaFrameInputPrivate
+{
+public:
+ QAudioBufferInputPrivate(QAudioBufferInput *q) : q(q) { }
+
+ bool sendAudioBuffer(const QAudioBuffer &audioBuffer)
+ {
+ return sendMediaFrame(
+ [&]() { emit m_platfromAudioBufferInput->newAudioBuffer(audioBuffer); });
+ }
+
+ void initialize(const QAudioFormat &format = {})
+ {
+ m_platfromAudioBufferInput = std::make_unique<QPlatformAudioBufferInput>(format);
+ addUpdateSignal(m_platfromAudioBufferInput.get(),
+ &QPlatformAudioBufferInput::encoderUpdated);
+ }
+
+ void uninitialize()
+ {
+ m_platfromAudioBufferInput.reset();
+
+ if (captureSession())
+ captureSession()->setAudioBufferInput(nullptr);
+ }
+
+ QMediaCaptureSession *session() const { return m_captureSession; }
+
+ QPlatformAudioBufferInput *platfromAudioBufferInput() const
+ {
+ return m_platfromAudioBufferInput.get();
+ }
+
+private:
+ void updateCaptureSessionConnections(QMediaCaptureSession *prevSession,
+ QMediaCaptureSession *newSession) override
+ {
+ if (prevSession)
+ removeUpdateSignal(prevSession, &QMediaCaptureSession::audioOutputChanged);
+
+ if (newSession)
+ addUpdateSignal(newSession, &QMediaCaptureSession::audioOutputChanged);
+ }
+
+ bool checkIfCanSendMediaFrame() const override
+ {
+ if (auto encoderInterface = m_platfromAudioBufferInput->encoderInterface())
+ return encoderInterface->canPushFrame();
+
+ // Not implemented yet
+ // return captureSession()->audioOutput() != nullptr;
+ return false;
+ }
+
+ void emitReadyToSendMediaFrame() override { emit q->readyToSendAudioBuffer(); }
+
+private:
+ QAudioBufferInput *q = nullptr;
+ QMediaCaptureSession *m_captureSession = nullptr;
+ std::unique_ptr<QPlatformAudioBufferInput> m_platfromAudioBufferInput;
+};
+
+/*!
+ \class QAudioBufferInput
+ \inmodule QtMultimedia
+ \ingroup multimedia
+ \ingroup multimedia_audio
+ \since 6.8
+
+ \brief The QAudioBufferInput class is used for providing custom audio buffers
+ to \l QMediaRecorder through \l QMediaCaptureSession.
+
+ \sa QMediaRecorder, QMediaCaptureSession
+*/
+
+/*!
+ Constructs a new QAudioBufferInput object with \a parent.
+*/
+QAudioBufferInput::QAudioBufferInput(QObject *parent) : QAudioBufferInput({}, parent) { }
+
+/*!
+ Constructs a new QAudioBufferInput object with audio \a format and \a parent.
+
+ The specified \a format will work as a hint for the initialization of the matching
+ audio encoder upon invoking \l QMediaRecorder::record().
+ If the format is not specified or not valid, the audio encoder will be initialized
+ upon sending the first audio buffer.
+
+ We recommend specifying the format if you know in advance what kind of audio buffers
+ you're going to send.
+*/
+QAudioBufferInput::QAudioBufferInput(const QAudioFormat &format, QObject *parent)
+ : QObject(*new QAudioBufferInputPrivate(this), parent)
+{
+ Q_D(QAudioBufferInput);
+ d->initialize(format);
+}
+
+/*!
+ Destroys the object.
+ */
+QAudioBufferInput::~QAudioBufferInput()
+{
+ Q_D(QAudioBufferInput);
+ d->uninitialize();
+}
+
+/*!
+ Sends \l QAudioBuffer to \l QMediaRecorder through \l QMediaCaptureSession.
+
+ Returns \c true if the specified \a audioBuffer has been sent successfully
+ to the destination. Returns \c false, if the buffer hasn't been sent,
+ which can happen if the instance is not assigned to
+ \l QMediaCaptureSession, the session doesn't have a media recorder,
+ the media recorder is not started or its queue is full.
+ The \l readyToSendAudioBuffer() signal will be emitted as soon as
+ the destination is able to handle a new audio buffer.
+
+ Sending of an empty audio buffer is treated by \l QMediaRecorder
+ as an end of the input stream. QMediaRecorder stops the recording
+ automatically if \l QMediaRecorder::autoStop is \c true and
+ all the inputs have reported the end of the stream.
+*/
+bool QAudioBufferInput::sendAudioBuffer(const QAudioBuffer &audioBuffer)
+{
+ Q_D(QAudioBufferInput);
+ return d->sendAudioBuffer(audioBuffer);
+}
+
+/*!
+ Returns the audio format that was specified upon construction of the audio buffer input.
+*/
+QAudioFormat QAudioBufferInput::format() const
+{
+ Q_D(const QAudioBufferInput);
+ return d->platfromAudioBufferInput()->audioFormat();
+}
+
+/*!
+ Returns the capture session this audio buffer input is connected to, or
+ a \c nullptr if the audio buffer input is not connected to a capture session.
+
+ Use QMediaCaptureSession::setAudioBufferInput() to connect
+ the audio buffer input to a session.
+*/
+QMediaCaptureSession *QAudioBufferInput::captureSession() const
+{
+ Q_D(const QAudioBufferInput);
+ return d->captureSession();
+}
+
+void QAudioBufferInput::setCaptureSession(QMediaCaptureSession *captureSession)
+{
+ Q_D(QAudioBufferInput);
+ d->setCaptureSession(captureSession);
+}
+
+QPlatformAudioBufferInput *QAudioBufferInput::platformAudioBufferInput() const
+{
+ Q_D(const QAudioBufferInput);
+ return d->platfromAudioBufferInput();
+}
+
+/*!
+ \fn void QAudioBufferInput::readyToSendAudioBuffer()
+
+ Signals that a new audio buffer can be sent to the audio buffer input.
+ After receiving the signal, if you have audio date to be sent, invoke \l sendAudioBuffer
+ once or in a loop until it returns \c false.
+
+ \sa sendAudioBuffer()
+*/
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/audio/qaudiobufferinput.h b/src/multimedia/audio/qaudiobufferinput.h
new file mode 100644
index 000000000..f48db186a
--- /dev/null
+++ b/src/multimedia/audio/qaudiobufferinput.h
@@ -0,0 +1,48 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QAUDIOBUFFERINPUT_H
+#define QAUDIOBUFFERINPUT_H
+
+#include <QtMultimedia/qtmultimediaexports.h>
+#include <QtMultimedia/qaudiobuffer.h>
+#include <QtCore/qobject.h>
+
+QT_BEGIN_NAMESPACE
+
+class QPlatformAudioBufferInput;
+class QAudioBufferInputPrivate;
+class QMediaCaptureSession;
+
+class Q_MULTIMEDIA_EXPORT QAudioBufferInput : public QObject
+{
+ Q_OBJECT
+public:
+ explicit QAudioBufferInput(QObject *parent = nullptr);
+
+ explicit QAudioBufferInput(const QAudioFormat &format, QObject *parent = nullptr);
+
+ ~QAudioBufferInput() override;
+
+ bool sendAudioBuffer(const QAudioBuffer &audioBuffer);
+
+ QAudioFormat format() const;
+
+ QMediaCaptureSession *captureSession() const;
+
+Q_SIGNALS:
+ void readyToSendAudioBuffer();
+
+private:
+ void setCaptureSession(QMediaCaptureSession *captureSession);
+
+ QPlatformAudioBufferInput *platformAudioBufferInput() const;
+
+ friend class QMediaCaptureSession;
+ Q_DISABLE_COPY(QAudioBufferInput)
+ Q_DECLARE_PRIVATE(QAudioBufferInput)
+};
+
+QT_END_NAMESPACE
+
+#endif // QAUDIOBUFFERINPUT_H
diff --git a/src/multimedia/audio/qaudiobufferoutput.cpp b/src/multimedia/audio/qaudiobufferoutput.cpp
new file mode 100644
index 000000000..50389c49a
--- /dev/null
+++ b/src/multimedia/audio/qaudiobufferoutput.cpp
@@ -0,0 +1,78 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qaudiobufferoutput_p.h"
+#include "qmediaplayer.h"
+
+QT_BEGIN_NAMESPACE
+
+/*!
+ \class QAudioBufferOutput
+ \inmodule QtMultimedia
+ \ingroup multimedia
+ \ingroup multimedia_audio
+ \since 6.8
+
+ \brief The QAudioBufferOutput class is used for capturing audio data provided by \l QMediaPlayer.
+
+ QAudioBufferOutput can be set to QMediaPlayer in order to receive audio buffers
+ decoded by the media player. The received audio data can be used for any
+ processing or visualization.
+
+ \sa QMediaPlayer, QMediaPlayer::setAudioBufferOutput, QAudioBuffer
+*/
+
+/*!
+ Constructs a new QAudioBufferOutput object with \a parent.
+
+ The audio format of output audio buffers will depend on
+ the source media file and the inner audio decoder in \l QMediaPlayer.
+*/
+QAudioBufferOutput::QAudioBufferOutput(QObject *parent)
+ : QObject(*new QAudioBufferOutputPrivate, parent)
+{
+}
+
+/*!
+ Constructs a new QAudioBufferOutput object with audio \a format and \a parent.
+
+ If the specified \a format is valid, it will be the format of output
+ audio buffers. Otherwise, the format of output audio buffers
+ will depend on the source media file and the inner audio decoder in \l QMediaPlayer.
+*/
+QAudioBufferOutput::QAudioBufferOutput(const QAudioFormat &format, QObject *parent)
+ : QObject(*new QAudioBufferOutputPrivate(format), parent)
+{
+}
+
+/*!
+ Destroys the audio buffer output object.
+*/
+QAudioBufferOutput::~QAudioBufferOutput()
+{
+ Q_D(QAudioBufferOutput);
+
+ if (d->mediaPlayer)
+ d->mediaPlayer->setAudioBufferOutput(nullptr);
+}
+
+/*!
+ Gets the audio format specified in the constructor.
+
+ If the format is valid, it specifies the format of output oudio buffers.
+*/
+QAudioFormat QAudioBufferOutput::format() const
+{
+ Q_D(const QAudioBufferOutput);
+ return d->format;
+}
+
+/*!
+ \fn void QAudioBufferOutput::audioBufferReceived(const QAudioBuffer &buffer)
+
+ Signals that a new audio \a buffer has been received from \l QMediaPlayer.
+*/
+
+QT_END_NAMESPACE
+
+#include "moc_qaudiobufferoutput.cpp"
diff --git a/src/multimedia/audio/qaudiobufferoutput.h b/src/multimedia/audio/qaudiobufferoutput.h
new file mode 100644
index 000000000..2e4fab1a4
--- /dev/null
+++ b/src/multimedia/audio/qaudiobufferoutput.h
@@ -0,0 +1,37 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QAUDIOBUFFEROUTPUT_H
+#define QAUDIOBUFFEROUTPUT_H
+
+#include <QtMultimedia/qtmultimediaexports.h>
+#include <QtMultimedia/qaudiobuffer.h>
+#include <QtCore/qobject.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAudioBufferOutputPrivate;
+
+class Q_MULTIMEDIA_EXPORT QAudioBufferOutput : public QObject
+{
+ Q_OBJECT
+public:
+ explicit QAudioBufferOutput(QObject *parent = nullptr);
+
+ explicit QAudioBufferOutput(const QAudioFormat &format, QObject *parent = nullptr);
+
+ ~QAudioBufferOutput() override;
+
+ QAudioFormat format() const;
+
+Q_SIGNALS:
+ void audioBufferReceived(const QAudioBuffer &buffer);
+
+private:
+ Q_DISABLE_COPY(QAudioBufferOutput)
+ Q_DECLARE_PRIVATE(QAudioBufferOutput)
+};
+
+QT_END_NAMESPACE
+
+#endif // QAUDIOBUFFEROUTPUT_H
diff --git a/src/multimedia/audio/qaudiobufferoutput_p.h b/src/multimedia/audio/qaudiobufferoutput_p.h
new file mode 100644
index 000000000..2f9c11bd1
--- /dev/null
+++ b/src/multimedia/audio/qaudiobufferoutput_p.h
@@ -0,0 +1,42 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QAUDIOBUFFEROUTPUT_P_H
+#define QAUDIOBUFFEROUTPUT_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/private/qobject_p.h>
+#include "qaudiobufferoutput.h"
+
+QT_BEGIN_NAMESPACE
+
+class QMediaPlayer;
+
+class QAudioBufferOutputPrivate : public QObjectPrivate
+{
+public:
+ QAudioBufferOutputPrivate(const QAudioFormat &format = {}) : format(std::move(format)) { }
+
+ static QMediaPlayer *exchangeMediaPlayer(QAudioBufferOutput &output, QMediaPlayer *player)
+ {
+ auto outputPrivate = static_cast<QAudioBufferOutputPrivate *>(output.d_func());
+ return std::exchange(outputPrivate->mediaPlayer, player);
+ }
+
+ QAudioFormat format;
+ QMediaPlayer *mediaPlayer = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QAUDIOBUFFEROUTPUT_P_H
diff --git a/src/multimedia/audio/qsamplecache_p.cpp b/src/multimedia/audio/qsamplecache_p.cpp
index 825c79685..805ab534e 100644
--- a/src/multimedia/audio/qsamplecache_p.cpp
+++ b/src/multimedia/audio/qsamplecache_p.cpp
@@ -11,7 +11,7 @@
#include <QtCore/QDebug>
#include <QtCore/qloggingcategory.h>
-static Q_LOGGING_CATEGORY(qLcSampleCache, "qt.multimedia.samplecache")
+Q_STATIC_LOGGING_CATEGORY(qLcSampleCache, "qt.multimedia.samplecache")
#include <mutex>
@@ -357,12 +357,13 @@ void QSample::load()
Q_ASSERT(QThread::currentThread()->objectName() == QLatin1String("QSampleCache::LoadingThread"));
#endif
qCDebug(qLcSampleCache) << "QSample: load [" << m_url << "]";
- m_stream = m_parent->networkAccessManager().get(QNetworkRequest(m_url));
- connect(m_stream, SIGNAL(errorOccurred(QNetworkReply::NetworkError)), SLOT(loadingError(QNetworkReply::NetworkError)));
+ QNetworkReply *reply = m_parent->networkAccessManager().get(QNetworkRequest(m_url));
+ m_stream = reply;
+ connect(reply, &QNetworkReply::errorOccurred, this, &QSample::loadingError);
m_waveDecoder = new QWaveDecoder(m_stream);
- connect(m_waveDecoder, SIGNAL(formatKnown()), SLOT(decoderReady()));
- connect(m_waveDecoder, SIGNAL(parsingError()), SLOT(decoderError()));
- connect(m_waveDecoder, SIGNAL(readyRead()), SLOT(readSample()));
+ connect(m_waveDecoder, &QWaveDecoder::formatKnown, this, &QSample::decoderReady);
+ connect(m_waveDecoder, &QWaveDecoder::parsingError, this, &QSample::decoderError);
+ connect(m_waveDecoder, &QIODevice::readyRead, this, &QSample::readSample);
m_waveDecoder->open(QIODevice::ReadOnly);
}
diff --git a/src/multimedia/audio/qsoundeffect.cpp b/src/multimedia/audio/qsoundeffect.cpp
index c12114672..c403648f9 100644
--- a/src/multimedia/audio/qsoundeffect.cpp
+++ b/src/multimedia/audio/qsoundeffect.cpp
@@ -13,7 +13,7 @@
#include <private/qplatformmediaintegration_p.h>
#include <private/qplatformaudioresampler_p.h>
-static Q_LOGGING_CATEGORY(qLcSoundEffect, "qt.multimedia.soundeffect")
+Q_STATIC_LOGGING_CATEGORY(qLcSoundEffect, "qt.multimedia.soundeffect")
QT_BEGIN_NAMESPACE
@@ -421,8 +421,7 @@ void QSoundEffect::setSource(const QUrl &url)
disconnect(d->m_sample.get(), &QSample::error, d, &QSoundEffectPrivate::decoderError);
disconnect(d->m_sample.get(), &QSample::ready, d, &QSoundEffectPrivate::sampleReady);
}
- d->m_sample->release();
- d->m_sample = nullptr;
+ d->m_sample.reset();
}
if (d->m_audioSink) {
diff --git a/src/multimedia/audio/qwavedecoder.cpp b/src/multimedia/audio/qwavedecoder.cpp
index 36ac3c779..452363ddc 100644
--- a/src/multimedia/audio/qwavedecoder.cpp
+++ b/src/multimedia/audio/qwavedecoder.cpp
@@ -56,7 +56,7 @@ bool QWaveDecoder::open(QIODevice::OpenMode mode)
if (canOpen && enoughDataAvailable())
handleData();
else
- connect(device, SIGNAL(readyRead()), SLOT(handleData()));
+ connect(device, &QIODevice::readyRead, this, &QWaveDecoder::handleData);
return canOpen;
}
@@ -274,7 +274,7 @@ bool QWaveDecoder::writeDataLength()
void QWaveDecoder::parsingFailed()
{
Q_ASSERT(device);
- device->disconnect(SIGNAL(readyRead()), this, SLOT(handleData()));
+ disconnect(device, &QIODevice::readyRead, this, &QWaveDecoder::handleData);
emit parsingError();
}
@@ -386,7 +386,7 @@ void QWaveDecoder::handleData()
if (state == QWaveDecoder::WaitingForDataState) {
if (findChunk("data")) {
- device->disconnect(SIGNAL(readyRead()), this, SLOT(handleData()));
+ disconnect(device, &QIODevice::readyRead, this, &QWaveDecoder::handleData);
chunk descriptor;
device->read(reinterpret_cast<char *>(&descriptor), sizeof(chunk));
@@ -400,7 +400,7 @@ void QWaveDecoder::handleData()
dataSize = device->size() - headerLength();
haveFormat = true;
- connect(device, SIGNAL(readyRead()), SIGNAL(readyRead()));
+ connect(device, &QIODevice::readyRead, this, &QIODevice::readyRead);
emit formatKnown();
return;
diff --git a/src/multimedia/camera/qcamera.cpp b/src/multimedia/camera/qcamera.cpp
index 527b14c25..9cfbcc01d 100644
--- a/src/multimedia/camera/qcamera.cpp
+++ b/src/multimedia/camera/qcamera.cpp
@@ -152,14 +152,6 @@ QT_BEGIN_NAMESPACE
See the \l{Camera Overview}{camera overview} for more information.
*/
-
-void QCameraPrivate::_q_error(int error, const QString &errorString)
-{
- Q_Q(QCamera);
-
- this->error.setAndNotify(QCamera::Error(error), errorString, *q);
-}
-
void QCameraPrivate::init(const QCameraDevice &device)
{
Q_Q(QCamera);
@@ -167,16 +159,16 @@ void QCameraPrivate::init(const QCameraDevice &device)
auto maybeControl = QPlatformMediaIntegration::instance()->createCamera(q);
if (!maybeControl) {
qWarning() << "Failed to initialize QCamera" << maybeControl.error();
- error = { QCamera::CameraError, maybeControl.error() };
return;
}
control = maybeControl.value();
cameraDevice = !device.isNull() ? device : QMediaDevices::defaultVideoInput();
if (cameraDevice.isNull())
- _q_error(QCamera::CameraError, QStringLiteral("No camera detected"));
+ control->updateError(QCamera::CameraError, QStringLiteral("No camera detected"));
control->setCamera(cameraDevice);
- q->connect(control, SIGNAL(activeChanged(bool)), q, SIGNAL(activeChanged(bool)));
- q->connect(control, SIGNAL(error(int,QString)), q, SLOT(_q_error(int,QString)));
+ q->connect(control, &QPlatformVideoSource::activeChanged, q, &QCamera::activeChanged);
+ q->connect(control, &QPlatformCamera::errorChanged, q, &QCamera::errorChanged);
+ q->connect(control, &QPlatformCamera::errorOccurred, q, &QCamera::errorOccurred);
}
/*!
@@ -296,7 +288,9 @@ void QCamera::setActive(bool active)
QCamera::Error QCamera::error() const
{
- return d_func()->error.code();
+ Q_D(const QCamera);
+
+ return d->control ? d->control->error() : QCamera::CameraError;
}
/*!
@@ -312,7 +306,10 @@ QCamera::Error QCamera::error() const
*/
QString QCamera::errorString() const
{
- return d_func()->error.description();
+ Q_D(const QCamera);
+
+ return d->control ? d->control->errorString()
+ : QStringLiteral("Camera is not supported on the platform");
}
/*! \enum QCamera::Feature
diff --git a/src/multimedia/camera/qcamera.h b/src/multimedia/camera/qcamera.h
index 09d9521ff..ce7e83427 100644
--- a/src/multimedia/camera/qcamera.h
+++ b/src/multimedia/camera/qcamera.h
@@ -131,6 +131,7 @@ public:
FocusDistance = 0x20
};
Q_DECLARE_FLAGS(Features, Feature)
+ Q_FLAG(Features)
explicit QCamera(QObject *parent = nullptr);
explicit QCamera(const QCameraDevice& cameraDevice, QObject *parent = nullptr);
@@ -261,7 +262,6 @@ private:
friend class QMediaCaptureSession;
Q_DISABLE_COPY(QCamera)
Q_DECLARE_PRIVATE(QCamera)
- Q_PRIVATE_SLOT(d_func(), void _q_error(int, const QString &))
friend class QCameraDevice;
};
diff --git a/src/multimedia/camera/qcamera_p.h b/src/multimedia/camera/qcamera_p.h
index c0477c242..ae1299435 100644
--- a/src/multimedia/camera/qcamera_p.h
+++ b/src/multimedia/camera/qcamera_p.h
@@ -16,7 +16,6 @@
//
#include "private/qobject_p.h"
-#include "private/qerrorinfo_p.h"
#include "qcamera.h"
#include "qcameradevice.h"
@@ -34,13 +33,8 @@ public:
QMediaCaptureSession *captureSession = nullptr;
QPlatformCamera *control = nullptr;
- QErrorInfo<QCamera::Error> error;
-
QCameraDevice cameraDevice;
QCameraFormat cameraFormat;
-
- void _q_error(int error, const QString &errorString);
- void unsetError() { error = {}; }
};
QT_END_NAMESPACE
diff --git a/src/multimedia/camera/qcameradevice.cpp b/src/multimedia/camera/qcameradevice.cpp
index 50727d49c..63e7fb4c0 100644
--- a/src/multimedia/camera/qcameradevice.cpp
+++ b/src/multimedia/camera/qcameradevice.cpp
@@ -455,10 +455,12 @@ QCameraDevice& QCameraDevice::operator=(const QCameraDevice& other) = default;
#ifndef QT_NO_DEBUG_STREAM
QDebug operator<<(QDebug d, const QCameraDevice &camera)
{
- d.maybeSpace() << QStringLiteral("QCameraDevice(name=%1, position=%2, orientation=%3)")
- .arg(camera.description())
- .arg(QString::fromLatin1(QCamera::staticMetaObject.enumerator(QCamera::staticMetaObject.indexOfEnumerator("Position"))
- .valueToKey(camera.position())));
+ d.maybeSpace() << QStringLiteral("QCameraDevice(name=%1, id=%2, position=%3)")
+ .arg(camera.description())
+ .arg(QLatin1StringView(camera.id()))
+ .arg(QLatin1StringView(
+ QMetaEnum::fromType<QCameraDevice::Position>().valueToKey(
+ camera.position())));
return d.space();
}
#endif
diff --git a/src/multimedia/camera/qimagecapture.cpp b/src/multimedia/camera/qimagecapture.cpp
index 9b92ce743..ecf39935c 100644
--- a/src/multimedia/camera/qimagecapture.cpp
+++ b/src/multimedia/camera/qimagecapture.cpp
@@ -92,18 +92,15 @@ QImageCapture::QImageCapture(QObject *parent)
}
d->control = maybeControl.value();
- connect(d->control, SIGNAL(imageExposed(int)),
- this, SIGNAL(imageExposed(int)));
- connect(d->control, SIGNAL(imageCaptured(int,QImage)),
- this, SIGNAL(imageCaptured(int,QImage)));
- connect(d->control, SIGNAL(imageMetadataAvailable(int,QMediaMetaData)),
- this, SIGNAL(imageMetadataAvailable(int,QMediaMetaData)));
- connect(d->control, SIGNAL(imageAvailable(int,QVideoFrame)),
- this, SIGNAL(imageAvailable(int,QVideoFrame)));
- connect(d->control, SIGNAL(imageSaved(int,QString)),
- this, SIGNAL(imageSaved(int,QString)));
- connect(d->control, SIGNAL(readyForCaptureChanged(bool)),
- this, SIGNAL(readyForCaptureChanged(bool)));
+ connect(d->control, &QPlatformImageCapture::imageExposed, this, &QImageCapture::imageExposed);
+ connect(d->control, &QPlatformImageCapture::imageCaptured, this, &QImageCapture::imageCaptured);
+ connect(d->control, &QPlatformImageCapture::imageMetadataAvailable, this,
+ &QImageCapture::imageMetadataAvailable);
+ connect(d->control, &QPlatformImageCapture::imageAvailable, this,
+ &QImageCapture::imageAvailable);
+ connect(d->control, &QPlatformImageCapture::imageSaved, this, &QImageCapture::imageSaved);
+ connect(d->control, &QPlatformImageCapture::readyForCaptureChanged, this,
+ &QImageCapture::readyForCaptureChanged);
connect(d->control, SIGNAL(error(int,int,QString)),
this, SLOT(_q_error(int,int,QString)));
}
@@ -214,8 +211,8 @@ void QImageCapture::addMetaData(const QMediaMetaData &metaData)
{
Q_D(QImageCapture);
auto data = d->metaData;
- for (auto k : metaData.keys())
- data.insert(k, metaData.value(k));
+ for (auto &&[key, value] : metaData.asKeyValueRange())
+ data.insert(key, value);
setMetaData(data);
}
diff --git a/src/multimedia/configure.cmake b/src/multimedia/configure.cmake
index 5fe25f172..4bc66e038 100644
--- a/src/multimedia/configure.cmake
+++ b/src/multimedia/configure.cmake
@@ -103,29 +103,20 @@ qt_feature("evr" PUBLIC PRIVATE
LABEL "evr.h"
CONDITION WIN32 AND TEST_evr
)
-qt_feature("gstreamer_1_0" PRIVATE
- LABEL "GStreamer 1.0"
- CONDITION GStreamer_FOUND
-)
-qt_feature("gstreamer_app" PRIVATE
- LABEL "GStreamer App"
- CONDITION ( QT_FEATURE_gstreamer_1_0 AND GStreamer_App_FOUND )
+qt_feature("gstreamer" PRIVATE
+ LABEL "QtMM GStreamer plugin"
+ CONDITION GStreamer_FOUND AND GStreamer_App_FOUND
+ ENABLE INPUT_gstreamer STREQUAL 'yes'
+ DISABLE INPUT_gstreamer STREQUAL 'no'
)
qt_feature("gstreamer_photography" PRIVATE
LABEL "GStreamer Photography"
- CONDITION ( QT_FEATURE_gstreamer_1_0 AND GStreamer_Photography_FOUND )
+ CONDITION QT_FEATURE_gstreamer AND GStreamer_Photography_FOUND
)
qt_feature("gstreamer_gl" PRIVATE
LABEL "GStreamer OpenGL"
- CONDITION QT_FEATURE_opengl AND QT_FEATURE_gstreamer_1_0 AND GStreamer_Gl_FOUND AND EGL_FOUND
+ CONDITION QT_FEATURE_opengl AND QT_FEATURE_gstreamer AND GStreamer_Gl_FOUND AND EGL_FOUND
)
-qt_feature("gstreamer" PRIVATE
- LABEL "QtMM GStreamer plugin"
- CONDITION (QT_FEATURE_gstreamer_1_0 AND QT_FEATURE_gstreamer_app)
- ENABLE INPUT_gstreamer STREQUAL 'yes'
- DISABLE INPUT_gstreamer STREQUAL 'no'
-)
-
qt_feature("gpu_vivante" PRIVATE
LABEL "Vivante GPU"
CONDITION QT_FEATURE_gui AND QT_FEATURE_opengles2 AND TEST_gpu_vivante
diff --git a/src/multimedia/doc/src/qtmultimedia-index.qdoc b/src/multimedia/doc/src/qtmultimedia-index.qdoc
index 67b6688be..74646b84c 100644
--- a/src/multimedia/doc/src/qtmultimedia-index.qdoc
+++ b/src/multimedia/doc/src/qtmultimedia-index.qdoc
@@ -193,18 +193,18 @@
The version shipped with Qt binary packages is \b{FFmpeg 6.1.1} and is tested
by the maintainers.
- \note On the Windows platform, Qt's FFmpeg media backend uses
- dynamic linking to the FFmpeg libraries. Windows applications must
- therefore bundle FFmpeg binaries in their installer, and make them
- visible to the application according to Windows dll loading rules.
- We recommend to store the FFmpeg dlls in the same directory as the
- application's executable file, because this guarantees that the
- correct build of FFmpeg is being used if multiple versions are
- available on the system. All necessary FFmpeg dlls are shipped with
- the Qt Online Installer and are automatically deployed if the
- windeployqt tool is used to create the deployment. Applications can
- also deploy their own build of FFmpeg, as long as the FFmpeg major
- version matches the version used by Qt.
+ \note On the Windows and macOS platforms, Qt's FFmpeg media backend
+ uses dynamic linking to the FFmpeg libraries. Windows and macOS
+ applications must therefore bundle FFmpeg binaries in their
+ installer, and make them visible to the application at runtime. On
+ Windows, we recommend to store the FFmpeg dlls in the same directory
+ as the application's executable file, because this guarantees that
+ the correct build of FFmpeg is being used if multiple versions are
+ available on the system. All necessary FFmpeg libraries are shipped
+ with the Qt Online Installer and are automatically deployed if the
+ windeployqt or macdeployqt tools are used to create the deployment.
+ Applications can also deploy their own build of FFmpeg, as long as
+ the FFmpeg major version matches the version used by Qt.
\note See \l{Licenses and Attributions} regarding what components are removed
in the package shipped by Qt.
@@ -220,6 +220,8 @@
\note These are still available but with \b limited support. The gstreamer
backend is only available on Linux.
+ \note MediaCodec on Android is deprecated as of Qt 6.8 and will be removed
+ in Qt 7.0.
\section2 Backend support
Maintainers will strive to fix critical issues with the native backends but
diff --git a/src/multimedia/platform/qgstreamer_platformspecificinterface.cpp b/src/multimedia/platform/qgstreamer_platformspecificinterface.cpp
new file mode 100644
index 000000000..06ce46e3c
--- /dev/null
+++ b/src/multimedia/platform/qgstreamer_platformspecificinterface.cpp
@@ -0,0 +1,27 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/private/qgstreamer_platformspecificinterface_p.h>
+
+QT_BEGIN_NAMESPACE
+
+QGStreamerPlatformSpecificInterface::~QGStreamerPlatformSpecificInterface() = default;
+
+QGStreamerPlatformSpecificInterface *QGStreamerPlatformSpecificInterface::instance()
+{
+ return dynamic_cast<QGStreamerPlatformSpecificInterface *>(
+ QPlatformMediaIntegration::instance()->platformSpecificInterface());
+}
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/platform/qgstreamer_platformspecificinterface_p.h b/src/multimedia/platform/qgstreamer_platformspecificinterface_p.h
new file mode 100644
index 000000000..1a086f5a4
--- /dev/null
+++ b/src/multimedia/platform/qgstreamer_platformspecificinterface_p.h
@@ -0,0 +1,46 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef GSTREAMER_PLATFORMSPECIFICINTERFACE_P_H
+#define GSTREAMER_PLATFORMSPECIFICINTERFACE_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/private/qplatformmediaintegration_p.h>
+
+typedef struct _GstPipeline GstPipeline; // NOLINT (bugprone-reserved-identifier)
+typedef struct _GstElement GstElement; // NOLINT (bugprone-reserved-identifier)
+
+QT_BEGIN_NAMESPACE
+
+class Q_MULTIMEDIA_EXPORT QGStreamerPlatformSpecificInterface
+ : public QAbstractPlatformSpecificInterface
+{
+public:
+ ~QGStreamerPlatformSpecificInterface() override;
+
+ static QGStreamerPlatformSpecificInterface *instance();
+
+ virtual QAudioDevice makeCustomGStreamerAudioInput(const QByteArray &gstreamerPipeline) = 0;
+ virtual QAudioDevice makeCustomGStreamerAudioOutput(const QByteArray &gstreamerPipeline) = 0;
+ virtual QCamera *makeCustomGStreamerCamera(const QByteArray &gstreamerPipeline,
+ QObject *parent) = 0;
+
+ // Note: ownership of GstElement is not transferred
+ virtual QCamera *makeCustomGStreamerCamera(GstElement *, QObject *parent) = 0;
+
+ virtual GstPipeline *gstPipeline(QMediaPlayer *) = 0;
+ virtual GstPipeline *gstPipeline(QMediaCaptureSession *) = 0;
+};
+
+QT_END_NAMESPACE
+
+#endif // GSTREAMER_PLATFORMSPECIFICINTERFACE_P_H
diff --git a/src/multimedia/platform/qplatformaudiobufferinput.cpp b/src/multimedia/platform/qplatformaudiobufferinput.cpp
new file mode 100644
index 000000000..883b11fc0
--- /dev/null
+++ b/src/multimedia/platform/qplatformaudiobufferinput.cpp
@@ -0,0 +1,10 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qplatformaudiobufferinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QT_END_NAMESPACE
+
+#include "moc_qplatformaudiobufferinput_p.cpp"
diff --git a/src/multimedia/platform/qplatformaudiobufferinput_p.h b/src/multimedia/platform/qplatformaudiobufferinput_p.h
new file mode 100644
index 000000000..55636ce06
--- /dev/null
+++ b/src/multimedia/platform/qplatformaudiobufferinput_p.h
@@ -0,0 +1,56 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QPLATFORMAUDIOBUFFERINPUT_P_H
+#define QPLATFORMAUDIOBUFFERINPUT_P_H
+
+#include "qaudioformat.h"
+#include "qaudiobuffer.h"
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+QT_BEGIN_NAMESPACE
+
+class QMediaInputEncoderInterface;
+
+class Q_MULTIMEDIA_EXPORT QPlatformAudioBufferInputBase : public QObject
+{
+ Q_OBJECT
+Q_SIGNALS:
+ void newAudioBuffer(const QAudioBuffer &buffer);
+};
+
+class Q_MULTIMEDIA_EXPORT QPlatformAudioBufferInput : public QPlatformAudioBufferInputBase
+{
+ Q_OBJECT
+public:
+ QPlatformAudioBufferInput(const QAudioFormat &format = {}) : m_format(format) { }
+
+ const QAudioFormat &audioFormat() const { return m_format; }
+
+ QMediaInputEncoderInterface *encoderInterface() const { return m_encoderInterface; }
+ void setEncoderInterface(QMediaInputEncoderInterface *interface)
+ {
+ m_encoderInterface = interface;
+ }
+
+Q_SIGNALS:
+ void encoderUpdated();
+
+private:
+ QMediaInputEncoderInterface *m_encoderInterface = nullptr;
+ QAudioFormat m_format;
+};
+
+QT_END_NAMESPACE
+
+#endif // QPLATFORMAUDIOBUFFERINPUT_P_H
diff --git a/src/multimedia/platform/qplatformcamera.cpp b/src/multimedia/platform/qplatformcamera.cpp
index 0d3975550..d03c19d67 100644
--- a/src/multimedia/platform/qplatformcamera.cpp
+++ b/src/multimedia/platform/qplatformcamera.cpp
@@ -50,7 +50,7 @@ QVideoFrameFormat QPlatformCamera::frameFormat() const
m_framePixelFormat == QVideoFrameFormat::Format_Invalid
? m_cameraFormat.pixelFormat()
: m_framePixelFormat);
- result.setFrameRate(m_cameraFormat.maxFrameRate());
+ result.setStreamFrameRate(m_cameraFormat.maxFrameRate());
return result;
}
@@ -221,6 +221,13 @@ int QPlatformCamera::colorTemperatureForWhiteBalance(QCamera::WhiteBalanceMode m
return 0;
}
+void QPlatformCamera::updateError(QCamera::Error error, const QString &errorString)
+{
+ QMetaObject::invokeMethod(this, [this, error, errorString]() {
+ m_error.setAndNotify(error, errorString, *this);
+ });
+}
+
QT_END_NAMESPACE
#include "moc_qplatformcamera_p.cpp"
diff --git a/src/multimedia/platform/qplatformcamera_p.h b/src/multimedia/platform/qplatformcamera_p.h
index 85624c0ce..341bf9121 100644
--- a/src/multimedia/platform/qplatformcamera_p.h
+++ b/src/multimedia/platform/qplatformcamera_p.h
@@ -16,7 +16,7 @@
//
#include "qplatformvideosource_p.h"
-
+#include "private/qerrorinfo_p.h"
#include <QtMultimedia/qcamera.h>
QT_BEGIN_NAMESPACE
@@ -110,8 +110,13 @@ public:
static int colorTemperatureForWhiteBalance(QCamera::WhiteBalanceMode mode);
+ QCamera::Error error() const { return m_error.code(); }
+ QString errorString() const final { return m_error.description(); }
+
+ void updateError(QCamera::Error error, const QString &errorString);
+
Q_SIGNALS:
- void error(int error, const QString &errorString);
+ void errorOccurred(QCamera::Error error, const QString &errorString);
protected:
explicit QPlatformCamera(QCamera *parent);
@@ -150,6 +155,7 @@ private:
float m_maxExposureTime = -1.;
QCamera::WhiteBalanceMode m_whiteBalance = QCamera::WhiteBalanceAuto;
int m_colorTemperature = 0;
+ QErrorInfo<QCamera::Error> m_error;
};
QT_END_NAMESPACE
diff --git a/src/multimedia/platform/qplatformmediacapture.cpp b/src/multimedia/platform/qplatformmediacapture.cpp
index c8aded824..13bcbd63b 100644
--- a/src/multimedia/platform/qplatformmediacapture.cpp
+++ b/src/multimedia/platform/qplatformmediacapture.cpp
@@ -8,6 +8,7 @@
#include <QtMultimedia/private/qplatformmediacapture_p.h>
#include <QtMultimedia/private/qmediacapturesession_p.h>
#include <QtMultimedia/private/qplatformsurfacecapture_p.h>
+#include <QtMultimedia/private/qplatformvideoframeinput_p.h>
#include <QtMultimedia/private/qtmultimediaglobal_p.h>
QT_BEGIN_NAMESPACE
@@ -23,6 +24,7 @@ std::vector<QPlatformVideoSource *> QPlatformMediaCaptureSession::activeVideoSou
result.push_back(source);
};
+ checkSource(videoFrameInput());
checkSource(camera());
checkSource(screenCapture());
checkSource(windowCapture());
@@ -30,15 +32,6 @@ std::vector<QPlatformVideoSource *> QPlatformMediaCaptureSession::activeVideoSou
return result;
}
-void *QPlatformMediaCaptureSession::nativePipeline(QMediaCaptureSession *session)
-{
- auto sessionPrivate = session->d_func();
- if (!sessionPrivate || !sessionPrivate->captureSession)
- return nullptr;
-
- return sessionPrivate->captureSession->nativePipeline();
-}
-
QT_END_NAMESPACE
#include "moc_qplatformmediacapture_p.cpp"
diff --git a/src/multimedia/platform/qplatformmediacapture_p.h b/src/multimedia/platform/qplatformmediacapture_p.h
index 981cf199b..8d6afc90e 100644
--- a/src/multimedia/platform/qplatformmediacapture_p.h
+++ b/src/multimedia/platform/qplatformmediacapture_p.h
@@ -29,6 +29,8 @@ class QPlatformAudioOutput;
class QMediaCaptureSession;
class QPlatformSurfaceCapture;
class QPlatformVideoSource;
+class QPlatformAudioBufferInput;
+class QPlatformVideoFrameInput;
class Q_MULTIMEDIA_EXPORT QPlatformMediaCaptureSession : public QObject
{
@@ -49,6 +51,9 @@ public:
virtual QPlatformSurfaceCapture *windowCapture() { return nullptr; }
virtual void setWindowCapture(QPlatformSurfaceCapture *) { }
+ virtual QPlatformVideoFrameInput *videoFrameInput() { return nullptr; }
+ virtual void setVideoFrameInput(QPlatformVideoFrameInput *) { }
+
virtual QPlatformImageCapture *imageCapture() = 0;
virtual void setImageCapture(QPlatformImageCapture *) {}
@@ -57,6 +62,8 @@ public:
virtual void setAudioInput(QPlatformAudioInput *input) = 0;
+ virtual void setAudioBufferInput(QPlatformAudioBufferInput *) { }
+
virtual void setVideoPreview(QVideoSink * /*sink*/) {}
virtual void setAudioOutput(QPlatformAudioOutput *) {}
@@ -64,15 +71,11 @@ public:
// TBD: implement ordering of the sources basing on the order of adding
std::vector<QPlatformVideoSource *> activeVideoSources();
- virtual void *nativePipeline() { return nullptr; }
-
- // private API, the purpose is getting GstPipeline
- static void *nativePipeline(QMediaCaptureSession *);
-
Q_SIGNALS:
void cameraChanged();
void screenCaptureChanged();
void windowCaptureChanged();
+ void videoFrameInputChanged();
void imageCaptureChanged();
void encoderChanged();
diff --git a/src/multimedia/platform/qplatformmediaintegration.cpp b/src/multimedia/platform/qplatformmediaintegration.cpp
index dda00de61..b9aa1e258 100644
--- a/src/multimedia/platform/qplatformmediaintegration.cpp
+++ b/src/multimedia/platform/qplatformmediaintegration.cpp
@@ -33,7 +33,7 @@ public:
}
};
-static Q_LOGGING_CATEGORY(qLcMediaPlugin, "qt.multimedia.plugin")
+Q_STATIC_LOGGING_CATEGORY(qLcMediaPlugin, "qt.multimedia.plugin")
Q_GLOBAL_STATIC_WITH_ARGS(QFactoryLoader, loader,
(QPlatformMediaPlugin_iid,
@@ -207,6 +207,12 @@ QLatin1String QPlatformMediaIntegration::name()
return m_backendName;
}
+QVideoFrame QPlatformMediaIntegration::convertVideoFrame(QVideoFrame &,
+ const QVideoFrameFormat &)
+{
+ return {};
+}
+
QPlatformMediaIntegration::QPlatformMediaIntegration(QLatin1String name) : m_backendName(name) { }
QPlatformMediaIntegration::~QPlatformMediaIntegration() = default;
diff --git a/src/multimedia/platform/qplatformmediaintegration_p.h b/src/multimedia/platform/qplatformmediaintegration_p.h
index 19fa40baf..d03d0c794 100644
--- a/src/multimedia/platform/qplatformmediaintegration_p.h
+++ b/src/multimedia/platform/qplatformmediaintegration_p.h
@@ -53,6 +53,13 @@ class QPlatformAudioOutput;
class QPlatformVideoDevices;
class QCapturableWindow;
class QPlatformCapturableWindows;
+class QVideoFrame;
+
+class Q_MULTIMEDIA_EXPORT QAbstractPlatformSpecificInterface
+{
+public:
+ virtual ~QAbstractPlatformSpecificInterface() = default;
+};
class Q_MULTIMEDIA_EXPORT QPlatformMediaIntegration : public QObject
{
@@ -96,6 +103,11 @@ public:
static QStringList availableBackends();
QLatin1String name(); // for unit tests
+ // Convert a QVideoFrame to the destination format
+ virtual QVideoFrame convertVideoFrame(QVideoFrame &, const QVideoFrameFormat &);
+
+ virtual QAbstractPlatformSpecificInterface *platformSpecificInterface() { return nullptr; }
+
protected:
virtual QPlatformMediaFormatInfo *createFormatInfo();
diff --git a/src/multimedia/platform/qplatformmediaplayer.cpp b/src/multimedia/platform/qplatformmediaplayer.cpp
index ea22f94df..00840f074 100644
--- a/src/multimedia/platform/qplatformmediaplayer.cpp
+++ b/src/multimedia/platform/qplatformmediaplayer.cpp
@@ -14,9 +14,7 @@ QPlatformMediaPlayer::QPlatformMediaPlayer(QMediaPlayer *parent) : player(parent
QPlatformMediaIntegration::instance()->mediaDevices()->prepareAudio();
}
-QPlatformMediaPlayer::~QPlatformMediaPlayer()
-{
-}
+QPlatformMediaPlayer::~QPlatformMediaPlayer() = default;
void QPlatformMediaPlayer::stateChanged(QMediaPlayer::PlaybackState newState)
{
@@ -39,16 +37,4 @@ void QPlatformMediaPlayer::error(int error, const QString &errorString)
player->d_func()->setError(QMediaPlayer::Error(error), errorString);
}
-void *QPlatformMediaPlayer::nativePipeline(QMediaPlayer *player)
-{
- if (!player)
- return nullptr;
-
- auto playerPrivate = player->d_func();
- if (!playerPrivate || !playerPrivate->control)
- return nullptr;
-
- return playerPrivate->control->nativePipeline();
-}
-
QT_END_NAMESPACE
diff --git a/src/multimedia/platform/qplatformmediaplayer_p.h b/src/multimedia/platform/qplatformmediaplayer_p.h
index 6e3590763..f8815958b 100644
--- a/src/multimedia/platform/qplatformmediaplayer_p.h
+++ b/src/multimedia/platform/qplatformmediaplayer_p.h
@@ -22,6 +22,7 @@
#include <QtCore/qpair.h>
#include <QtCore/private/qglobal_p.h>
+#include <QtCore/qobject.h>
QT_BEGIN_NAMESPACE
@@ -64,19 +65,23 @@ public:
virtual void setAudioOutput(QPlatformAudioOutput *) {}
+ virtual void setAudioBufferOutput(QAudioBufferOutput *) { }
+
virtual QMediaMetaData metaData() const { return {}; }
virtual void setVideoSink(QVideoSink * /*sink*/) = 0;
// media streams
- enum TrackType { VideoStream, AudioStream, SubtitleStream, NTrackTypes };
+ enum TrackType : uint8_t { VideoStream, AudioStream, SubtitleStream, NTrackTypes };
virtual int trackCount(TrackType) { return 0; };
virtual QMediaMetaData trackMetaData(TrackType /*type*/, int /*streamNumber*/) { return QMediaMetaData(); }
virtual int activeTrack(TrackType) { return -1; }
virtual void setActiveTrack(TrackType, int /*streamNumber*/) {}
+ void durationChanged(std::chrono::milliseconds ms) { durationChanged(ms.count()); }
void durationChanged(qint64 duration) { emit player->durationChanged(duration); }
+ void positionChanged(std::chrono::milliseconds ms) { positionChanged(ms.count()); }
void positionChanged(qint64 position) {
if (m_position == position)
return;
@@ -115,7 +120,7 @@ public:
bool doLoop() {
return isSeekable() && (m_loops < 0 || ++m_currentLoop < m_loops);
}
- int loops() { return m_loops; }
+ int loops() const { return m_loops; }
virtual void setLoops(int loops)
{
if (m_loops == loops)
@@ -124,11 +129,6 @@ public:
Q_EMIT player->loopsChanged();
}
- virtual void *nativePipeline() { return nullptr; }
-
- // private API, the purpose is getting GstPipeline
- static void *nativePipeline(QMediaPlayer *player);
-
protected:
explicit QPlatformMediaPlayer(QMediaPlayer *parent = nullptr);
@@ -144,6 +144,25 @@ private:
qint64 m_position = 0;
};
+#ifndef QT_NO_DEBUG_STREAM
+inline QDebug operator<<(QDebug dbg, QPlatformMediaPlayer::TrackType type)
+{
+ QDebugStateSaver save(dbg);
+ dbg.nospace();
+
+ switch (type) {
+ case QPlatformMediaPlayer::TrackType::AudioStream:
+ return dbg << "AudioStream";
+ case QPlatformMediaPlayer::TrackType::VideoStream:
+ return dbg << "VideoStream";
+ case QPlatformMediaPlayer::TrackType::SubtitleStream:
+ return dbg << "SubtitleStream";
+ default:
+ Q_UNREACHABLE_RETURN(dbg);
+ }
+}
+#endif
+
QT_END_NAMESPACE
diff --git a/src/multimedia/platform/qplatformmediarecorder.cpp b/src/multimedia/platform/qplatformmediarecorder.cpp
index ba9ea0165..30dba0a45 100644
--- a/src/multimedia/platform/qplatformmediarecorder.cpp
+++ b/src/multimedia/platform/qplatformmediarecorder.cpp
@@ -15,12 +15,12 @@ QPlatformMediaRecorder::QPlatformMediaRecorder(QMediaRecorder *parent)
void QPlatformMediaRecorder::pause()
{
- error(QMediaRecorder::FormatError, QMediaRecorder::tr("Pause not supported"));
+ updateError(QMediaRecorder::FormatError, QMediaRecorder::tr("Pause not supported"));
}
void QPlatformMediaRecorder::resume()
{
- error(QMediaRecorder::FormatError, QMediaRecorder::tr("Resume not supported"));
+ updateError(QMediaRecorder::FormatError, QMediaRecorder::tr("Resume not supported"));
}
void QPlatformMediaRecorder::stateChanged(QMediaRecorder::RecorderState state)
@@ -47,7 +47,7 @@ void QPlatformMediaRecorder::actualLocationChanged(const QUrl &location)
emit q->actualLocationChanged(location);
}
-void QPlatformMediaRecorder::error(QMediaRecorder::Error error, const QString &errorString)
+void QPlatformMediaRecorder::updateError(QMediaRecorder::Error error, const QString &errorString)
{
m_error.setAndNotify(error, errorString, *q);
}
diff --git a/src/multimedia/platform/qplatformmediarecorder_p.h b/src/multimedia/platform/qplatformmediarecorder_p.h
index 6e88dc187..ab6af759d 100644
--- a/src/multimedia/platform/qplatformmediarecorder_p.h
+++ b/src/multimedia/platform/qplatformmediarecorder_p.h
@@ -125,18 +125,20 @@ public:
virtual void setOutputLocation(const QUrl &location) { m_outputLocation = location; }
QUrl actualLocation() const { return m_actualLocation; }
void clearActualLocation() { m_actualLocation.clear(); }
- void clearError() { error(QMediaRecorder::NoError, QString()); }
+ void clearError() { updateError(QMediaRecorder::NoError, QString()); }
QIODevice *outputDevice() const { return m_outputDevice; }
void setOutputDevice(QIODevice *device) { m_outputDevice = device; }
+ virtual void updateAutoStop() { }
+
protected:
explicit QPlatformMediaRecorder(QMediaRecorder *parent);
void stateChanged(QMediaRecorder::RecorderState state);
void durationChanged(qint64 position);
void actualLocationChanged(const QUrl &location);
- void error(QMediaRecorder::Error error, const QString &errorString);
+ void updateError(QMediaRecorder::Error error, const QString &errorString);
void metaDataChanged();
QMediaRecorder *mediaRecorder() { return q; }
diff --git a/src/multimedia/platform/qplatformsurfacecapture_p.h b/src/multimedia/platform/qplatformsurfacecapture_p.h
index 42fbda474..e4c59c6f4 100644
--- a/src/multimedia/platform/qplatformsurfacecapture_p.h
+++ b/src/multimedia/platform/qplatformsurfacecapture_p.h
@@ -61,7 +61,7 @@ public:
Source source() const { return m_source; }
Error error() const;
- QString errorString() const;
+ QString errorString() const final;
protected:
virtual bool setActiveInternal(bool) = 0;
@@ -74,7 +74,6 @@ public Q_SLOTS:
Q_SIGNALS:
void sourceChanged(WindowSource);
void sourceChanged(ScreenSource);
- void errorChanged();
void errorOccurred(Error error, QString errorString);
private:
diff --git a/src/multimedia/platform/qplatformvideoframeinput.cpp b/src/multimedia/platform/qplatformvideoframeinput.cpp
new file mode 100644
index 000000000..d90306345
--- /dev/null
+++ b/src/multimedia/platform/qplatformvideoframeinput.cpp
@@ -0,0 +1,10 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qplatformvideoframeinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QT_END_NAMESPACE
+
+#include "moc_qplatformvideoframeinput_p.cpp"
diff --git a/src/multimedia/platform/qplatformvideoframeinput_p.h b/src/multimedia/platform/qplatformvideoframeinput_p.h
new file mode 100644
index 000000000..45714492c
--- /dev/null
+++ b/src/multimedia/platform/qplatformvideoframeinput_p.h
@@ -0,0 +1,55 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QPLATFORMVIDEOFRAMEINPUT_P_H
+#define QPLATFORMVIDEOFRAMEINPUT_P_H
+
+#include "qplatformvideosource_p.h"
+#include "qmetaobject.h"
+#include "qpointer.h"
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+QT_BEGIN_NAMESPACE
+
+class QMediaInputEncoderInterface;
+
+class Q_MULTIMEDIA_EXPORT QPlatformVideoFrameInput : public QPlatformVideoSource
+{
+ Q_OBJECT
+public:
+ QPlatformVideoFrameInput(QVideoFrameFormat format = {}) : m_format(std::move(format)) { }
+
+ void setActive(bool) final { }
+ bool isActive() const final { return true; }
+
+ QVideoFrameFormat frameFormat() const final { return m_format; }
+
+ QString errorString() const final { return {}; }
+
+ QMediaInputEncoderInterface *encoderInterface() const { return m_encoderInterface; }
+ void setEncoderInterface(QMediaInputEncoderInterface *interface)
+ {
+ m_encoderInterface = interface;
+ }
+
+Q_SIGNALS:
+ void encoderUpdated();
+
+private:
+ QMediaInputEncoderInterface *m_encoderInterface = nullptr;
+ QVideoFrameFormat m_format;
+};
+
+QT_END_NAMESPACE
+
+#endif // QPLATFORMVIDEOFRAMEINPUT_P_H
diff --git a/src/multimedia/platform/qplatformvideosource_p.h b/src/multimedia/platform/qplatformvideosource_p.h
index 3ed76d3e2..b11524226 100644
--- a/src/multimedia/platform/qplatformvideosource_p.h
+++ b/src/multimedia/platform/qplatformvideosource_p.h
@@ -43,9 +43,14 @@ public:
virtual void setCaptureSession(QPlatformMediaCaptureSession *) { }
+ virtual QString errorString() const = 0;
+
+ bool hasError() const { return !errorString().isEmpty(); }
+
Q_SIGNALS:
void newVideoFrame(const QVideoFrame &);
void activeChanged(bool);
+ void errorChanged();
};
QT_END_NAMESPACE
diff --git a/src/multimedia/playback/qmediaplayer.cpp b/src/multimedia/playback/qmediaplayer.cpp
index dc8e3dab8..644c2d094 100644
--- a/src/multimedia/playback/qmediaplayer.cpp
+++ b/src/multimedia/playback/qmediaplayer.cpp
@@ -5,6 +5,7 @@
#include <private/qmultimediautils_p.h>
#include <private/qplatformmediaintegration_p.h>
+#include <private/qaudiobufferoutput_p.h>
#include <qvideosink.h>
#include <qaudiooutput.h>
@@ -596,6 +597,12 @@ void QMediaPlayer::setPlaybackRate(qreal rate)
It does not wait for the media to finish loading and does not check for errors. Listen for
the mediaStatusChanged() and error() signals to be notified when the media is loaded and
when an error occurs during loading.
+
+ \note FFmpeg, used by the FFmpeg media backend, restricts use of nested protocols for
+ security reasons. In controlled environments where all inputs are trusted, the list of
+ approved protocols can be overridden using the QT_FFMPEG_PROTOCOL_WHITELIST environment
+ variable. This environment variable is Qt's private API and can change between patch
+ releases without notice.
*/
void QMediaPlayer::setSource(const QUrl &source)
@@ -643,6 +650,51 @@ void QMediaPlayer::setSourceDevice(QIODevice *device, const QUrl &sourceUrl)
}
/*!
+ Sets an audio buffer \a output to the media player.
+
+ If \l QAudioBufferOutput is specified and the media source
+ contains an audio stream, the media player, it will emit
+ the signal \l{QAudioBufferOutput::audioBufferReceived} with
+ audio buffers containing decoded audio data. At the end of
+ the audio stream, \c QMediaPlayer emits an empty \l QAudioBuffer.
+
+ \c QMediaPlayer emits outputs frames at the same time as it
+ pushes the matching data to the audio output if it's specified.
+ However, the sound can be played with a small delay due to
+ audio bufferization.
+*/
+void QMediaPlayer::setAudioBufferOutput(QAudioBufferOutput *output)
+{
+ Q_D(QMediaPlayer);
+
+ QAudioBufferOutput *oldOutput = d->audioBufferOutput;
+ if (oldOutput == output)
+ return;
+
+ d->audioBufferOutput = output;
+
+ if (output) {
+ auto oldPlayer = QAudioBufferOutputPrivate::exchangeMediaPlayer(*oldOutput, this);
+ if (oldPlayer)
+ oldPlayer->setAudioBufferOutput(nullptr);
+ }
+
+ if (d->control)
+ d->control->setAudioBufferOutput(output);
+
+ emit audioBufferOutputChanged();
+}
+
+/*!
+ Get \l QAudioBufferOutput that has been set to the media player.
+*/
+QAudioBufferOutput *QMediaPlayer::audioBufferOutput() const
+{
+ Q_D(const QMediaPlayer);
+ return d->audioBufferOutput;
+}
+
+/*!
\qmlproperty AudioOutput QtMultimedia::MediaPlayer::audioOutput
This property holds the target audio output.
diff --git a/src/multimedia/playback/qmediaplayer.h b/src/multimedia/playback/qmediaplayer.h
index 015a30f05..e0d1fec75 100644
--- a/src/multimedia/playback/qmediaplayer.h
+++ b/src/multimedia/playback/qmediaplayer.h
@@ -17,6 +17,7 @@ class QAudioOutput;
class QAudioDevice;
class QMediaMetaData;
class QMediaTimeRange;
+class QAudioBufferOutput;
class QMediaPlayerPrivate;
class Q_MULTIMEDIA_EXPORT QMediaPlayer : public QObject
@@ -106,6 +107,9 @@ public:
void setActiveVideoTrack(int index);
void setActiveSubtitleTrack(int index);
+ void setAudioBufferOutput(QAudioBufferOutput *output);
+ QAudioBufferOutput *audioBufferOutput() const;
+
void setAudioOutput(QAudioOutput *output);
QAudioOutput *audioOutput() const;
@@ -177,6 +181,7 @@ Q_SIGNALS:
void metaDataChanged();
void videoOutputChanged();
void audioOutputChanged();
+ void audioBufferOutputChanged();
void tracksChanged();
void activeTracksChanged();
diff --git a/src/multimedia/playback/qmediaplayer_p.h b/src/multimedia/playback/qmediaplayer_p.h
index ece086d06..3d32d4e68 100644
--- a/src/multimedia/playback/qmediaplayer_p.h
+++ b/src/multimedia/playback/qmediaplayer_p.h
@@ -19,6 +19,7 @@
#include "qmediametadata.h"
#include "qvideosink.h"
#include "qaudiooutput.h"
+#include "qaudiobufferoutput.h"
#include <private/qplatformmediaplayer_p.h>
#include <private/qerrorinfo_p.h>
@@ -40,9 +41,15 @@ class QMediaPlayerPrivate : public QObjectPrivate
Q_DECLARE_PUBLIC(QMediaPlayer)
public:
+ static QMediaPlayerPrivate *get(QMediaPlayer *session)
+ {
+ return reinterpret_cast<QMediaPlayerPrivate *>(QObjectPrivate::get(session));
+ }
+
QMediaPlayerPrivate() = default;
QPlatformMediaPlayer *control = nullptr;
+ QPointer<QAudioBufferOutput> audioBufferOutput;
QPointer<QAudioOutput> audioOutput;
QPointer<QVideoSink> videoSink;
QPointer<QObject> videoOutput;
diff --git a/src/multimedia/pulseaudio/qaudioengine_pulse.cpp b/src/multimedia/pulseaudio/qaudioengine_pulse.cpp
index e54356404..5fac7234a 100644
--- a/src/multimedia/pulseaudio/qaudioengine_pulse.cpp
+++ b/src/multimedia/pulseaudio/qaudioengine_pulse.cpp
@@ -477,7 +477,7 @@ void QPulseAudioEngine::onContextFailed()
release();
// Try to reconnect later
- QTimer::singleShot(3000, this, SLOT(prepare()));
+ QTimer::singleShot(3000, this, &QPulseAudioEngine::prepare);
}
QPulseAudioEngine *QPulseAudioEngine::instance()
diff --git a/src/multimedia/qmediaframeinput.cpp b/src/multimedia/qmediaframeinput.cpp
new file mode 100644
index 000000000..4bb90d3ee
--- /dev/null
+++ b/src/multimedia/qmediaframeinput.cpp
@@ -0,0 +1,43 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qmediaframeinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+void QMediaFrameInputPrivate::setCaptureSession(QMediaCaptureSession *session)
+{
+ if (session == m_captureSession)
+ return;
+
+ auto prevSession = std::exchange(m_captureSession, session);
+ updateCaptureSessionConnections(prevSession, session);
+ updateCanSendMediaFrame();
+}
+
+void QMediaFrameInputPrivate::updateCanSendMediaFrame()
+{
+ const bool canSendMediaFrame = m_captureSession && checkIfCanSendMediaFrame();
+ if (m_canSendMediaFrame != canSendMediaFrame) {
+ m_canSendMediaFrame = canSendMediaFrame;
+ if (m_canSendMediaFrame)
+ emitReadyToSendMediaFrame();
+ }
+}
+
+void QMediaFrameInputPrivate::postponeCheckReadyToSend()
+{
+ if (m_canSendMediaFrame && !m_postponeReadyToSendCheckRun) {
+ m_postponeReadyToSendCheckRun = true;
+ QMetaObject::invokeMethod(
+ q_ptr,
+ [this]() {
+ m_postponeReadyToSendCheckRun = false;
+ if (m_canSendMediaFrame)
+ emitReadyToSendMediaFrame();
+ },
+ Qt::QueuedConnection);
+ }
+}
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/qmediaframeinput_p.h b/src/multimedia/qmediaframeinput_p.h
new file mode 100644
index 000000000..22277865d
--- /dev/null
+++ b/src/multimedia/qmediaframeinput_p.h
@@ -0,0 +1,74 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QMEDIAFRAMEINPUT_P_H
+#define QMEDIAFRAMEINPUT_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qmediacapturesession.h"
+#include <QtCore/private/qobject_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QMediaFrameInputPrivate : public QObjectPrivate
+{
+public:
+ void setCaptureSession(QMediaCaptureSession *session);
+
+ QMediaCaptureSession *captureSession() const { return m_captureSession; }
+
+protected:
+ template <typename Sender>
+ bool sendMediaFrame(Sender &&sender)
+ {
+ if (!m_canSendMediaFrame)
+ return false;
+
+ sender();
+ postponeCheckReadyToSend();
+ return true;
+ }
+
+ template <typename Sender, typename Signal>
+ void addUpdateSignal(Sender sender, Signal signal)
+ {
+ connect(sender, signal, this, &QMediaFrameInputPrivate::updateCanSendMediaFrame);
+ }
+
+ template <typename Sender, typename Signal>
+ void removeUpdateSignal(Sender sender, Signal signal)
+ {
+ disconnect(sender, signal, this, &QMediaFrameInputPrivate::updateCanSendMediaFrame);
+ }
+
+ void updateCanSendMediaFrame();
+
+private:
+ void postponeCheckReadyToSend();
+
+ virtual bool checkIfCanSendMediaFrame() const = 0;
+
+ virtual void emitReadyToSendMediaFrame() = 0;
+
+ virtual void updateCaptureSessionConnections(QMediaCaptureSession *prevSession,
+ QMediaCaptureSession *currentSession) = 0;
+
+private:
+ QMediaCaptureSession *m_captureSession = nullptr;
+ bool m_canSendMediaFrame = false;
+ bool m_postponeReadyToSendCheckRun = false;
+};
+
+QT_END_NAMESPACE
+
+#endif // QMEDIAFRAMEINPUT_P_H
diff --git a/src/multimedia/qmediainputencoderinterface_p.h b/src/multimedia/qmediainputencoderinterface_p.h
new file mode 100644
index 000000000..c199e59b4
--- /dev/null
+++ b/src/multimedia/qmediainputencoderinterface_p.h
@@ -0,0 +1,31 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QMEDIAINPUTENCODERINTERFACE_P_H
+#define QMEDIAINPUTENCODERINTERFACE_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/qtmultimediaglobal.h>
+
+QT_BEGIN_NAMESPACE
+
+class QMediaInputEncoderInterface
+{
+public:
+ virtual ~QMediaInputEncoderInterface() = default;
+ virtual bool canPushFrame() const = 0;
+};
+
+QT_END_NAMESPACE
+
+#endif // QMEDIAINPUTENCODERINTERFACE_P_H
diff --git a/src/multimedia/qmediametadata.cpp b/src/multimedia/qmediametadata.cpp
index dc238721f..29b2a0a8d 100644
--- a/src/multimedia/qmediametadata.cpp
+++ b/src/multimedia/qmediametadata.cpp
@@ -2,14 +2,15 @@
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include "qmediametadata.h"
+
#include <QtCore/qcoreapplication.h>
-#include <qvariant.h>
-#include <qobject.h>
-#include <qdatetime.h>
-#include <qmediaformat.h>
-#include <qsize.h>
-#include <qurl.h>
-#include <qimage.h>
+#include <QtCore/qdatetime.h>
+#include <QtCore/qobject.h>
+#include <QtCore/qsize.h>
+#include <QtCore/qurl.h>
+#include <QtCore/qvariant.h>
+#include <QtGui/qimage.h>
+#include <QtMultimedia/qmediaformat.h>
QT_BEGIN_NAMESPACE
@@ -41,7 +42,7 @@ QT_BEGIN_NAMESPACE
Media attributes
\row \li MediaType \li The type of the media (audio, video, etc). \li QString
\row \li FileFormat \li The file format of the media. \li QMediaFormat::FileFormat
- \row \li Duration \li The duration in millseconds of the media. \li qint64
+ \row \li Duration \li The duration in milliseconds of the media. \li qint64
\header \li {3,1}
Audio attributes
@@ -53,6 +54,7 @@ QT_BEGIN_NAMESPACE
\row \li VideoFrameRate \li The frame rate of the media's video stream. \li qreal
\row \li VideoBitRate \li The bit rate of the media's video stream in bits per second. \li int
\row \li VideoCodec \li The codec of the media's video stream. \li QMediaFormat::VideoCodec
+ \row \li HasHdrContent \li True if video is intended for HDR display (FFmpeg and Darwin media backends only). \li bool
\header \li {3,1}
Music attributes
@@ -129,6 +131,10 @@ QMetaType QMediaMetaData::keyType(Key key)
case Resolution:
return QMetaType::fromType<QSize>();
+
+ case HasHdrContent:
+ return QMetaType::fromType<bool>();
+
default:
return QMetaType::fromType<void>();
}
@@ -276,6 +282,7 @@ QMetaType QMediaMetaData::keyType(Key key)
\value CoverArtImage Media cover art
\value Orientation
\value Resolution
+ \value [since 6.8] HasHdrContent Video may have HDR content (read only, FFmpeg and Darwin media backends only)
*/
/*!
@@ -385,6 +392,7 @@ QString QMediaMetaData::stringValue(QMediaMetaData::Key key) const
case Composer:
case Orientation:
case LeadPerformer:
+ case HasHdrContent:
return value.toString();
case Language: {
auto l = value.value<QLocale::Language>();
@@ -479,10 +487,31 @@ QString QMediaMetaData::metaDataKeyToString(QMediaMetaData::Key key)
return (QCoreApplication::translate("QMediaMetaData", "Resolution"));
case QMediaMetaData::LeadPerformer:
return (QCoreApplication::translate("QMediaMetaData", "Lead performer"));
+ case QMediaMetaData::HasHdrContent:
+ return (QCoreApplication::translate("QMediaMetaData", "Has HDR content"));
}
return QString();
}
+QDebug operator<<(QDebug dbg, const QMediaMetaData &metaData)
+{
+ QDebugStateSaver sv(dbg);
+ dbg.nospace();
+
+ dbg << "QMediaMetaData{";
+ auto range = metaData.asKeyValueRange();
+ auto begin = std::begin(range);
+
+ for (auto it = begin; it != std::end(range); ++it) {
+ if (it != begin)
+ dbg << ", ";
+ dbg << it->first << ": " << it->second;
+ }
+
+ dbg << "}";
+ return dbg;
+}
+
// operator documentation
/*!
\fn QVariant &QMediaMetaData ::operator[](QMediaMetaData::Key k)
@@ -511,6 +540,11 @@ QString QMediaMetaData::metaDataKeyToString(QMediaMetaData::Key key)
\note this is a \c protected member of its class.
*/
+/*!
+ \fn auto QMediaMetaData::asKeyValueRange() const
+ \internal
+*/
+
QT_END_NAMESPACE
#include "moc_qmediametadata.cpp"
diff --git a/src/multimedia/qmediametadata.h b/src/multimedia/qmediametadata.h
index d6f4477d3..e21594a02 100644
--- a/src/multimedia/qmediametadata.h
+++ b/src/multimedia/qmediametadata.h
@@ -57,11 +57,13 @@ public:
CoverArtImage,
Orientation,
- Resolution
+ Resolution,
+
+ HasHdrContent
};
Q_ENUM(Key)
- static constexpr int NumMetaData = Resolution + 1;
+ static constexpr int NumMetaData = HasHdrContent + 1;
// QMetaType typeForKey(Key k);
Q_INVOKABLE QVariant value(Key k) const { return data.value(k); }
@@ -77,7 +79,11 @@ public:
Q_INVOKABLE static QString metaDataKeyToString(Key k);
+ QT_TECH_PREVIEW_API auto asKeyValueRange() const { return data.asKeyValueRange(); }
+
protected:
+ Q_MULTIMEDIA_EXPORT friend QDebug operator<<(QDebug, const QMediaMetaData &);
+
friend bool operator==(const QMediaMetaData &a, const QMediaMetaData &b)
{ return a.data == b.data; }
friend bool operator!=(const QMediaMetaData &a, const QMediaMetaData &b)
diff --git a/src/multimedia/qsymbolsresolveutils.cpp b/src/multimedia/qsymbolsresolveutils.cpp
new file mode 100644
index 000000000..81c7410d2
--- /dev/null
+++ b/src/multimedia/qsymbolsresolveutils.cpp
@@ -0,0 +1,79 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qsymbolsresolveutils_p.h"
+
+#include <qdebug.h>
+#include <algorithm>
+#include <qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+Q_STATIC_LOGGING_CATEGORY(qLcSymbolsResolver, "qt.multimedia.symbolsresolver");
+
+bool SymbolsResolver::isLazyLoadEnabled()
+{
+ static const bool lazyLoad =
+ !static_cast<bool>(qEnvironmentVariableIntValue("QT_INSTANT_LOAD_FFMPEG_STUBS"));
+ return lazyLoad;
+}
+
+SymbolsResolver::SymbolsResolver(const char *libLoggingName, LibraryLoader loader)
+ : m_libLoggingName(libLoggingName)
+{
+ Q_ASSERT(libLoggingName);
+ Q_ASSERT(loader);
+
+ auto library = loader();
+ if (library && library->isLoaded())
+ m_library = std::move(library);
+ else
+ qCWarning(qLcSymbolsResolver) << "Couldn't load" << m_libLoggingName << "library";
+}
+
+SymbolsResolver::SymbolsResolver(const char *libName, const char *version,
+ const char *libLoggingName)
+ : m_libLoggingName(libLoggingName ? libLoggingName : libName)
+{
+ Q_ASSERT(libName);
+ Q_ASSERT(version);
+
+ auto library = std::make_unique<QLibrary>(QString::fromLocal8Bit(libName),
+ QString::fromLocal8Bit(version));
+ if (library->load())
+ m_library = std::move(library);
+ else
+ qCWarning(qLcSymbolsResolver) << "Couldn't load" << m_libLoggingName << "library";
+}
+
+SymbolsResolver::~SymbolsResolver()
+{
+ if (m_library)
+ m_library->unload();
+}
+
+QFunctionPointer SymbolsResolver::initFunction(const char *funcName)
+{
+ if (!m_library)
+ return nullptr;
+ if (auto func = m_library->resolve(funcName))
+ return func;
+
+ qCWarning(qLcSymbolsResolver) << "Couldn't resolve" << m_libLoggingName << "symbol" << funcName;
+ m_library->unload();
+ m_library.reset();
+ return nullptr;
+}
+
+void SymbolsResolver::checkLibrariesLoaded(SymbolsMarker *begin, SymbolsMarker *end)
+{
+ if (m_library) {
+ qCDebug(qLcSymbolsResolver) << m_libLoggingName << "symbols resolved";
+ } else {
+ const auto size = reinterpret_cast<char *>(end) - reinterpret_cast<char *>(begin);
+ memset(begin, 0, size);
+ qCWarning(qLcSymbolsResolver) << "Couldn't resolve" << m_libLoggingName << "symbols";
+ }
+}
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/qsymbolsresolveutils_p.h b/src/multimedia/qsymbolsresolveutils_p.h
new file mode 100644
index 000000000..98a552170
--- /dev/null
+++ b/src/multimedia/qsymbolsresolveutils_p.h
@@ -0,0 +1,178 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef Q_SYMBOLSRESOLVEUTILS
+#define Q_SYMBOLSRESOLVEUTILS
+
+#include <QtCore/qlibrary.h>
+#include <QtMultimedia/qtmultimediaexports.h>
+#include <tuple>
+#include <memory>
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+QT_BEGIN_NAMESPACE
+
+constexpr bool areVersionsEqual(const char lhs[], const char rhs[])
+{
+ int i = 0;
+ for (; lhs[i] && rhs[i]; ++i)
+ if (lhs[i] != rhs[i])
+ return false;
+ return lhs[i] == rhs[i];
+}
+
+constexpr bool areVersionsEqual(const char lhs[], int rhsInt)
+{
+ int lhsInt = 0;
+ for (int i = 0; lhs[i]; ++i) {
+ if (lhs[i] < '0' || lhs[i] > '9')
+ return false;
+
+ lhsInt *= 10;
+ lhsInt += lhs[i] - '0';
+ }
+
+ return lhsInt == rhsInt;
+}
+
+
+template <typename T>
+struct DefaultReturn
+{
+ template <typename... Arg>
+ T operator()(Arg &&...) { return val; }
+ T val;
+};
+
+template <>
+struct DefaultReturn<void>
+{
+ template <typename... Arg>
+ void operator()(Arg &&...) { }
+};
+
+template <typename...>
+struct FuncInfo;
+
+template <typename R, typename... A>
+struct FuncInfo<R(A...)>
+{
+ using Return = R;
+ using Args = std::tuple<A...>;
+};
+
+class Q_MULTIMEDIA_EXPORT SymbolsResolver
+{
+public:
+ using LibraryLoader = std::unique_ptr<QLibrary> (*)();
+ static bool isLazyLoadEnabled();
+
+ ~SymbolsResolver();
+protected:
+ SymbolsResolver(const char *libLoggingName, LibraryLoader loader);
+
+ SymbolsResolver(const char *libName, const char *version = "",
+ const char *libLoggingName = nullptr);
+
+ QFunctionPointer initFunction(const char *name);
+
+ struct SymbolsMarker {};
+ void checkLibrariesLoaded(SymbolsMarker *begin, SymbolsMarker *end);
+
+private:
+ const char *m_libLoggingName;
+ std::unique_ptr<QLibrary> m_library;
+};
+
+
+QT_END_NAMESPACE
+
+// clang-format off
+
+#define CHECK_VERSIONS(Name, NeededSoversion, DetectedVersion) \
+ static_assert(areVersionsEqual(NeededSoversion, DetectedVersion), \
+ "Configuartion error: misleading " Name " versions!")
+
+#define BEGIN_INIT_FUNCS(...) \
+ QT_USE_NAMESPACE \
+ namespace { \
+ class SymbolsResolverImpl : SymbolsResolver { \
+ public: \
+ SymbolsResolverImpl() : SymbolsResolver(__VA_ARGS__) \
+ { checkLibrariesLoaded(&symbolsBegin, &symbolsEnd); } \
+ static const SymbolsResolverImpl& instance() \
+ { static const SymbolsResolverImpl instance; return instance; } \
+ SymbolsMarker symbolsBegin;
+
+#define INIT_FUNC(F) QFunctionPointer F = initFunction(#F);
+
+#define END_INIT_FUNCS() \
+ SymbolsMarker symbolsEnd; \
+ }; \
+ [[maybe_unused]] static const auto *instantResolver = \
+ SymbolsResolver::isLazyLoadEnabled() ? &SymbolsResolverImpl::instance() : nullptr; \
+ }
+
+
+#ifdef Q_EXPORT_STUB_SYMBOLS
+#define EXPORT_FUNC Q_MULTIMEDIA_EXPORT
+#else
+#define EXPORT_FUNC
+#endif
+
+#define DEFINE_FUNC_IMPL(F, Vars, TypesWithVars, ReturnFunc) \
+ using F##_ReturnType = FuncInfo<decltype(F)>::Return; \
+ extern "C" EXPORT_FUNC [[maybe_unused]] F##_ReturnType F(TypesWithVars(F)) { \
+ using F##_Type = F##_ReturnType (*)(TypesWithVars(F)); \
+ const auto f = SymbolsResolverImpl::instance().F; \
+ return f ? (reinterpret_cast<F##_Type>(f))(Vars()) : ReturnFunc(); \
+ }
+
+
+#define VAR(I) a##I
+#define VARS0()
+#define VARS1() VAR(0)
+#define VARS2() VARS1(), VAR(1)
+#define VARS3() VARS2(), VAR(2)
+#define VARS4() VARS3(), VAR(3)
+#define VARS5() VARS4(), VAR(4)
+#define VARS6() VARS5(), VAR(5)
+#define VARS7() VARS6(), VAR(6)
+#define VARS8() VARS7(), VAR(7)
+#define VARS9() VARS8(), VAR(8)
+#define VARS10() VARS9(), VAR(9)
+#define VARS11() VARS10(), VAR(10)
+
+#define TYPE_WITH_VAR(F, I) std::tuple_element_t<I, FuncInfo<decltype(F)>::Args> VAR(I)
+#define TYPES_WITH_VARS0(F)
+#define TYPES_WITH_VARS1(F) TYPE_WITH_VAR(F, 0)
+#define TYPES_WITH_VARS2(F) TYPES_WITH_VARS1(F), TYPE_WITH_VAR(F, 1)
+#define TYPES_WITH_VARS3(F) TYPES_WITH_VARS2(F), TYPE_WITH_VAR(F, 2)
+#define TYPES_WITH_VARS4(F) TYPES_WITH_VARS3(F), TYPE_WITH_VAR(F, 3)
+#define TYPES_WITH_VARS5(F) TYPES_WITH_VARS4(F), TYPE_WITH_VAR(F, 4)
+#define TYPES_WITH_VARS6(F) TYPES_WITH_VARS5(F), TYPE_WITH_VAR(F, 5)
+#define TYPES_WITH_VARS7(F) TYPES_WITH_VARS6(F), TYPE_WITH_VAR(F, 6)
+#define TYPES_WITH_VARS8(F) TYPES_WITH_VARS7(F), TYPE_WITH_VAR(F, 7)
+#define TYPES_WITH_VARS9(F) TYPES_WITH_VARS8(F), TYPE_WITH_VAR(F, 8)
+#define TYPES_WITH_VARS10(F) TYPES_WITH_VARS9(F), TYPE_WITH_VAR(F, 9)
+#define TYPES_WITH_VARS11(F) TYPES_WITH_VARS10(F), TYPE_WITH_VAR(F, 10)
+
+
+#define RET(F, ...) DefaultReturn<FuncInfo<decltype(F)>::Return>{__VA_ARGS__}
+
+#define DEFINE_FUNC(F, ArgsCount, /*Return value*/...) \
+ DEFINE_FUNC_IMPL(F, VARS##ArgsCount, TYPES_WITH_VARS##ArgsCount, RET(F, __VA_ARGS__));
+
+// clang-format on
+
+#endif // Q_SYMBOLSRESOLVEUTILS
diff --git a/src/multimedia/recording/qmediacapturesession.cpp b/src/multimedia/recording/qmediacapturesession.cpp
index f175cd98e..9df09acef 100644
--- a/src/multimedia/recording/qmediacapturesession.cpp
+++ b/src/multimedia/recording/qmediacapturesession.cpp
@@ -10,16 +10,20 @@
#include "qvideosink.h"
#include "qscreencapture.h"
#include "qwindowcapture.h"
+#include "qvideoframeinput.h"
#include "qplatformmediaintegration_p.h"
#include "qplatformmediacapture_p.h"
#include "qaudioinput.h"
+#include "qaudiobufferinput.h"
#include "qaudiooutput.h"
QT_BEGIN_NAMESPACE
void QMediaCaptureSessionPrivate::setVideoSink(QVideoSink *sink)
{
+ Q_Q(QMediaCaptureSession);
+
if (sink == videoSink)
return;
if (videoSink)
@@ -41,18 +45,23 @@ void QMediaCaptureSessionPrivate::setVideoSink(QVideoSink *sink)
\ingroup multimedia_video
\ingroup multimedia_audio
- The QMediaCaptureSession is the central class that manages capturing of media on the local device.
+ The QMediaCaptureSession is the central class that manages capturing of media on the local
+ device.
- You can connect a video input to QMediaCaptureSession using setCamera(), setScreenCapture() or setWindowCapture().
- A preview of the captured media can be seen by setting a QVideoWidget or QGraphicsVideoItem using setVideoOutput().
+ You can connect a video input to QMediaCaptureSession using setCamera(),
+ setScreenCapture(), setWindowCapture() or setVideoFrameInput().
+ A preview of the captured media can be seen by setting a QVideoWidget or QGraphicsVideoItem
+ using setVideoOutput().
- You can connect a microphone to QMediaCaptureSession using setAudioInput().
+ You can connect a microphone to QMediaCaptureSession using setAudioInput(), or set your
+ custom audio input using setAudioBufferInput().
The captured sound can be heard by routing the audio to an output device using setAudioOutput().
- You can capture still images from a camera by setting a QImageCapture object on the capture session,
- and record audio/video using a QMediaRecorder.
+ You can capture still images from a camera by setting a QImageCapture object on the capture
+ session, and record audio/video using a QMediaRecorder.
- \sa QCamera, QAudioDevice, QMediaRecorder, QImageCapture, QScreenCapture, QWindowCapture, QMediaRecorder, QGraphicsVideoItem
+ \sa QCamera, QAudioDevice, QMediaRecorder, QImageCapture, QScreenCapture, QWindowCapture,
+ QVideoFrameInput, QMediaRecorder, QGraphicsVideoItem
*/
/*!
@@ -112,14 +121,16 @@ void QMediaCaptureSessionPrivate::setVideoSink(QVideoSink *sink)
Creates a session for media capture from the \a parent object.
*/
QMediaCaptureSession::QMediaCaptureSession(QObject *parent)
- : QObject(parent),
- d_ptr(new QMediaCaptureSessionPrivate)
+ : QObject{ *new QMediaCaptureSessionPrivate, parent }
{
- d_ptr->q = this;
+ QT6_ONLY(Q_UNUSED(unused))
+
+ Q_D(QMediaCaptureSession);
+
auto maybeCaptureSession = QPlatformMediaIntegration::instance()->createCaptureSession();
if (maybeCaptureSession) {
- d_ptr->captureSession = maybeCaptureSession.value();
- d_ptr->captureSession->setCaptureSession(this);
+ d->captureSession.reset(maybeCaptureSession.value());
+ d->captureSession->setCaptureSession(this);
} else {
qWarning() << "Failed to initialize QMediaCaptureSession" << maybeCaptureSession.error();
}
@@ -130,16 +141,19 @@ QMediaCaptureSession::QMediaCaptureSession(QObject *parent)
*/
QMediaCaptureSession::~QMediaCaptureSession()
{
+ Q_D(QMediaCaptureSession);
+
setCamera(nullptr);
setRecorder(nullptr);
setImageCapture(nullptr);
setScreenCapture(nullptr);
setWindowCapture(nullptr);
+ setVideoFrameInput(nullptr);
+ setAudioBufferInput(nullptr);
setAudioInput(nullptr);
setAudioOutput(nullptr);
- d_ptr->setVideoSink(nullptr);
- delete d_ptr->captureSession;
- delete d_ptr;
+ d->setVideoSink(nullptr);
+ d->captureSession.reset();
}
/*!
\qmlproperty AudioInput QtMultimedia::CaptureSession::audioInput
@@ -154,7 +168,8 @@ QMediaCaptureSession::~QMediaCaptureSession()
*/
QAudioInput *QMediaCaptureSession::audioInput() const
{
- return d_ptr->audioInput;
+ Q_D(const QMediaCaptureSession);
+ return d->audioInput;
}
/*!
@@ -164,28 +179,69 @@ QAudioInput *QMediaCaptureSession::audioInput() const
*/
void QMediaCaptureSession::setAudioInput(QAudioInput *input)
{
- QAudioInput *oldInput = d_ptr->audioInput;
+ Q_D(QMediaCaptureSession);
+
+ QAudioInput *oldInput = d->audioInput;
if (oldInput == input)
return;
// To avoid double emit of audioInputChanged
// from recursive setAudioInput(nullptr) call.
- d_ptr->audioInput = nullptr;
+ d->audioInput = nullptr;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setAudioInput(nullptr);
+ if (d->captureSession)
+ d->captureSession->setAudioInput(nullptr);
if (oldInput)
oldInput->setDisconnectFunction({});
if (input) {
input->setDisconnectFunction([this](){ setAudioInput(nullptr); });
- if (d_ptr->captureSession)
- d_ptr->captureSession->setAudioInput(input->handle());
+ if (d->captureSession)
+ d->captureSession->setAudioInput(input->handle());
}
- d_ptr->audioInput = input;
+ d->audioInput = input;
emit audioInputChanged();
}
/*!
+ \property QMediaCaptureSession::audioBufferInput
+ \since 6.8
+
+ \brief The object used to send custom audio buffers to \l QMediaRecorder.
+*/
+QAudioBufferInput *QMediaCaptureSession::audioBufferInput() const
+{
+ Q_D(const QMediaCaptureSession);
+
+ return d->audioBufferInput;
+}
+
+void QMediaCaptureSession::setAudioBufferInput(QAudioBufferInput *input)
+{
+ Q_D(QMediaCaptureSession);
+
+ // TODO: come up with an unification of the captures setup
+ QAudioBufferInput *oldInput = d->audioBufferInput;
+ if (oldInput == input)
+ return;
+ d->audioBufferInput = input;
+ if (d->captureSession)
+ d->captureSession->setAudioBufferInput(nullptr);
+ if (oldInput) {
+ if (oldInput->captureSession() && oldInput->captureSession() != this)
+ oldInput->captureSession()->setAudioBufferInput(nullptr);
+ oldInput->setCaptureSession(nullptr);
+ }
+ if (input) {
+ if (input->captureSession())
+ input->captureSession()->setAudioBufferInput(nullptr);
+ if (d->captureSession)
+ d->captureSession->setAudioBufferInput(input->platformAudioBufferInput());
+ input->setCaptureSession(this);
+ }
+ emit audioBufferInputChanged();
+}
+
+/*!
\qmlproperty Camera QtMultimedia::CaptureSession::camera
\brief The camera used to capture video.
@@ -204,18 +260,22 @@ void QMediaCaptureSession::setAudioInput(QAudioInput *input)
*/
QCamera *QMediaCaptureSession::camera() const
{
- return d_ptr->camera;
+ Q_D(const QMediaCaptureSession);
+
+ return d->camera;
}
void QMediaCaptureSession::setCamera(QCamera *camera)
{
+ Q_D(QMediaCaptureSession);
+
// TODO: come up with an unification of the captures setup
- QCamera *oldCamera = d_ptr->camera;
+ QCamera *oldCamera = d->camera;
if (oldCamera == camera)
return;
- d_ptr->camera = camera;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setCamera(nullptr);
+ d->camera = camera;
+ if (d->captureSession)
+ d->captureSession->setCamera(nullptr);
if (oldCamera) {
if (oldCamera->captureSession() && oldCamera->captureSession() != this)
oldCamera->captureSession()->setCamera(nullptr);
@@ -224,8 +284,8 @@ void QMediaCaptureSession::setCamera(QCamera *camera)
if (camera) {
if (camera->captureSession())
camera->captureSession()->setCamera(nullptr);
- if (d_ptr->captureSession)
- d_ptr->captureSession->setCamera(camera->platformCamera());
+ if (d->captureSession)
+ d->captureSession->setCamera(camera->platformCamera());
camera->setCaptureSession(this);
}
emit cameraChanged();
@@ -252,18 +312,22 @@ void QMediaCaptureSession::setCamera(QCamera *camera)
*/
QScreenCapture *QMediaCaptureSession::screenCapture()
{
- return d_ptr ? d_ptr->screenCapture : nullptr;
+ Q_D(QMediaCaptureSession);
+
+ return d->screenCapture;
}
void QMediaCaptureSession::setScreenCapture(QScreenCapture *screenCapture)
{
+ Q_D(QMediaCaptureSession);
+
// TODO: come up with an unification of the captures setup
- QScreenCapture *oldScreenCapture = d_ptr->screenCapture;
+ QScreenCapture *oldScreenCapture = d->screenCapture;
if (oldScreenCapture == screenCapture)
return;
- d_ptr->screenCapture = screenCapture;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setScreenCapture(nullptr);
+ d->screenCapture = screenCapture;
+ if (d->captureSession)
+ d->captureSession->setScreenCapture(nullptr);
if (oldScreenCapture) {
if (oldScreenCapture->captureSession() && oldScreenCapture->captureSession() != this)
oldScreenCapture->captureSession()->setScreenCapture(nullptr);
@@ -272,8 +336,8 @@ void QMediaCaptureSession::setScreenCapture(QScreenCapture *screenCapture)
if (screenCapture) {
if (screenCapture->captureSession())
screenCapture->captureSession()->setScreenCapture(nullptr);
- if (d_ptr->captureSession)
- d_ptr->captureSession->setScreenCapture(screenCapture->platformScreenCapture());
+ if (d->captureSession)
+ d->captureSession->setScreenCapture(screenCapture->platformScreenCapture());
screenCapture->setCaptureSession(this);
}
emit screenCaptureChanged();
@@ -298,19 +362,23 @@ void QMediaCaptureSession::setScreenCapture(QScreenCapture *screenCapture)
Record a window by adding a window capture objet
to the capture session using this property.
*/
-QWindowCapture *QMediaCaptureSession::windowCapture() {
- return d_ptr ? d_ptr->windowCapture : nullptr;
+QWindowCapture *QMediaCaptureSession::windowCapture()
+{
+ Q_D(QMediaCaptureSession);
+ return d->windowCapture;
}
void QMediaCaptureSession::setWindowCapture(QWindowCapture *windowCapture)
{
+ Q_D(QMediaCaptureSession);
+
// TODO: come up with an unification of the captures setup
- QWindowCapture *oldCapture = d_ptr->windowCapture;
+ QWindowCapture *oldCapture = d->windowCapture;
if (oldCapture == windowCapture)
return;
- d_ptr->windowCapture = windowCapture;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setWindowCapture(nullptr);
+ d->windowCapture = windowCapture;
+ if (d->captureSession)
+ d->captureSession->setWindowCapture(nullptr);
if (oldCapture) {
if (oldCapture->captureSession() && oldCapture->captureSession() != this)
oldCapture->captureSession()->setWindowCapture(nullptr);
@@ -319,14 +387,52 @@ void QMediaCaptureSession::setWindowCapture(QWindowCapture *windowCapture)
if (windowCapture) {
if (windowCapture->captureSession())
windowCapture->captureSession()->setWindowCapture(nullptr);
- if (d_ptr->captureSession)
- d_ptr->captureSession->setWindowCapture(windowCapture->platformWindowCapture());
+ if (d->captureSession)
+ d->captureSession->setWindowCapture(windowCapture->platformWindowCapture());
windowCapture->setCaptureSession(this);
}
emit windowCaptureChanged();
}
/*!
+ \property QMediaCaptureSession::videoFrameInput
+ \since 6.8
+
+ \brief The object used to send custom video frames to
+ \l QMediaRecorder or a video output.
+*/
+QVideoFrameInput *QMediaCaptureSession::videoFrameInput() const
+{
+ Q_D(const QMediaCaptureSession);
+ return d->videoFrameInput;
+}
+
+void QMediaCaptureSession::setVideoFrameInput(QVideoFrameInput *input)
+{
+ Q_D(QMediaCaptureSession);
+ // TODO: come up with an unification of the captures setup
+ QVideoFrameInput *oldInput = d->videoFrameInput;
+ if (oldInput == input)
+ return;
+ d->videoFrameInput = input;
+ if (d->captureSession)
+ d->captureSession->setVideoFrameInput(nullptr);
+ if (oldInput) {
+ if (oldInput->captureSession() && oldInput->captureSession() != this)
+ oldInput->captureSession()->setVideoFrameInput(nullptr);
+ oldInput->setCaptureSession(nullptr);
+ }
+ if (input) {
+ if (input->captureSession())
+ input->captureSession()->setVideoFrameInput(nullptr);
+ if (d->captureSession)
+ d->captureSession->setVideoFrameInput(input->platformVideoFrameInput());
+ input->setCaptureSession(this);
+ }
+ emit videoFrameInputChanged();
+}
+
+/*!
\qmlproperty ImageCapture QtMultimedia::CaptureSession::imageCapture
\brief The object used to capture still images.
@@ -344,18 +450,22 @@ void QMediaCaptureSession::setWindowCapture(QWindowCapture *windowCapture)
*/
QImageCapture *QMediaCaptureSession::imageCapture()
{
- return d_ptr->imageCapture;
+ Q_D(QMediaCaptureSession);
+
+ return d->imageCapture;
}
void QMediaCaptureSession::setImageCapture(QImageCapture *imageCapture)
{
+ Q_D(QMediaCaptureSession);
+
// TODO: come up with an unification of the captures setup
- QImageCapture *oldImageCapture = d_ptr->imageCapture;
+ QImageCapture *oldImageCapture = d->imageCapture;
if (oldImageCapture == imageCapture)
return;
- d_ptr->imageCapture = imageCapture;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setImageCapture(nullptr);
+ d->imageCapture = imageCapture;
+ if (d->captureSession)
+ d->captureSession->setImageCapture(nullptr);
if (oldImageCapture) {
if (oldImageCapture->captureSession() && oldImageCapture->captureSession() != this)
oldImageCapture->captureSession()->setImageCapture(nullptr);
@@ -364,8 +474,8 @@ void QMediaCaptureSession::setImageCapture(QImageCapture *imageCapture)
if (imageCapture) {
if (imageCapture->captureSession())
imageCapture->captureSession()->setImageCapture(nullptr);
- if (d_ptr->captureSession)
- d_ptr->captureSession->setImageCapture(imageCapture->platformImageCapture());
+ if (d->captureSession)
+ d->captureSession->setImageCapture(imageCapture->platformImageCapture());
imageCapture->setCaptureSession(this);
}
emit imageCaptureChanged();
@@ -389,17 +499,19 @@ void QMediaCaptureSession::setImageCapture(QImageCapture *imageCapture)
QMediaRecorder *QMediaCaptureSession::recorder()
{
- return d_ptr->recorder;
+ Q_D(QMediaCaptureSession);
+ return d->recorder;
}
void QMediaCaptureSession::setRecorder(QMediaRecorder *recorder)
{
- QMediaRecorder *oldRecorder = d_ptr->recorder;
+ Q_D(QMediaCaptureSession);
+ QMediaRecorder *oldRecorder = d->recorder;
if (oldRecorder == recorder)
return;
- d_ptr->recorder = recorder;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setMediaRecorder(nullptr);
+ d->recorder = recorder;
+ if (d->captureSession)
+ d->captureSession->setMediaRecorder(nullptr);
if (oldRecorder) {
if (oldRecorder->captureSession() && oldRecorder->captureSession() != this)
oldRecorder->captureSession()->setRecorder(nullptr);
@@ -408,8 +520,8 @@ void QMediaCaptureSession::setRecorder(QMediaRecorder *recorder)
if (recorder) {
if (recorder->captureSession())
recorder->captureSession()->setRecorder(nullptr);
- if (d_ptr->captureSession)
- d_ptr->captureSession->setMediaRecorder(recorder->platformRecoder());
+ if (d->captureSession)
+ d->captureSession->setMediaRecorder(recorder->platformRecoder());
recorder->setCaptureSession(this);
}
emit recorderChanged();
@@ -487,25 +599,27 @@ QVideoSink *QMediaCaptureSession::videoSink() const
*/
void QMediaCaptureSession::setAudioOutput(QAudioOutput *output)
{
- QAudioOutput *oldOutput = d_ptr->audioOutput;
+ Q_D(QMediaCaptureSession);
+
+ QAudioOutput *oldOutput = d->audioOutput;
if (oldOutput == output)
return;
// We don't want to end up with signal emitted
// twice (from recursive call setAudioInput(nullptr)
// from oldOutput->setDisconnectFunction():
- d_ptr->audioOutput = nullptr;
+ d->audioOutput = nullptr;
- if (d_ptr->captureSession)
- d_ptr->captureSession->setAudioOutput(nullptr);
+ if (d->captureSession)
+ d->captureSession->setAudioOutput(nullptr);
if (oldOutput)
oldOutput->setDisconnectFunction({});
if (output) {
output->setDisconnectFunction([this](){ setAudioOutput(nullptr); });
- if (d_ptr->captureSession)
- d_ptr->captureSession->setAudioOutput(output->handle());
+ if (d->captureSession)
+ d->captureSession->setAudioOutput(output->handle());
}
- d_ptr->audioOutput = output;
+ d->audioOutput = output;
emit audioOutputChanged();
}
/*!
@@ -531,7 +645,8 @@ QAudioOutput *QMediaCaptureSession::audioOutput() const
*/
QPlatformMediaCaptureSession *QMediaCaptureSession::platformSession() const
{
- return d_ptr->captureSession;
+ Q_D(const QMediaCaptureSession);
+ return d->captureSession.get();
}
/*!
\qmlsignal QtMultimedia::CaptureSession::audioInputChanged()
diff --git a/src/multimedia/recording/qmediacapturesession.h b/src/multimedia/recording/qmediacapturesession.h
index 1333af7eb..219c382d1 100644
--- a/src/multimedia/recording/qmediacapturesession.h
+++ b/src/multimedia/recording/qmediacapturesession.h
@@ -11,6 +11,7 @@ QT_BEGIN_NAMESPACE
class QCamera;
class QAudioInput;
+class QAudioBufferInput;
class QAudioOutput;
class QCameraDevice;
class QImageCapture;
@@ -19,18 +20,23 @@ class QPlatformMediaCaptureSession;
class QVideoSink;
class QScreenCapture;
class QWindowCapture;
+class QVideoFrameInput;
class QMediaCaptureSessionPrivate;
class Q_MULTIMEDIA_EXPORT QMediaCaptureSession : public QObject
{
Q_OBJECT
Q_PROPERTY(QAudioInput *audioInput READ audioInput WRITE setAudioInput NOTIFY audioInputChanged)
+ Q_PROPERTY(QAudioBufferInput *audioBufferInput READ audioBufferInput WRITE setAudioBufferInput
+ NOTIFY audioBufferInputChanged)
Q_PROPERTY(QAudioOutput *audioOutput READ audioOutput WRITE setAudioOutput NOTIFY audioOutputChanged)
Q_PROPERTY(QCamera *camera READ camera WRITE setCamera NOTIFY cameraChanged)
Q_PROPERTY(
QScreenCapture *screenCapture READ screenCapture WRITE setScreenCapture NOTIFY screenCaptureChanged)
Q_PROPERTY(
QWindowCapture *windowCapture READ windowCapture WRITE setWindowCapture NOTIFY windowCaptureChanged)
+ Q_PROPERTY(QVideoFrameInput *videoFrameInput READ videoFrameInput WRITE setVideoFrameInput
+ NOTIFY videoFrameInputChanged)
Q_PROPERTY(QImageCapture *imageCapture READ imageCapture WRITE setImageCapture NOTIFY imageCaptureChanged)
Q_PROPERTY(QMediaRecorder *recorder READ recorder WRITE setRecorder NOTIFY recorderChanged)
Q_PROPERTY(QObject *videoOutput READ videoOutput WRITE setVideoOutput NOTIFY videoOutputChanged)
@@ -41,6 +47,9 @@ public:
QAudioInput *audioInput() const;
void setAudioInput(QAudioInput *input);
+ QAudioBufferInput *audioBufferInput() const;
+ void setAudioBufferInput(QAudioBufferInput *input);
+
QCamera *camera() const;
void setCamera(QCamera *camera);
@@ -53,6 +62,9 @@ public:
QWindowCapture *windowCapture();
void setWindowCapture(QWindowCapture *windowCapture);
+ QVideoFrameInput *videoFrameInput() const;
+ void setVideoFrameInput(QVideoFrameInput *input);
+
QMediaRecorder *recorder();
void setRecorder(QMediaRecorder *recorder);
@@ -69,9 +81,11 @@ public:
Q_SIGNALS:
void audioInputChanged();
+ void audioBufferInputChanged();
void cameraChanged();
void screenCaptureChanged();
void windowCaptureChanged();
+ void videoFrameInputChanged();
void imageCaptureChanged();
void recorderChanged();
void videoOutputChanged();
@@ -80,7 +94,9 @@ Q_SIGNALS:
private:
friend class QPlatformMediaCaptureSession;
- QMediaCaptureSessionPrivate *d_ptr;
+ // ### Qt7: remove unused member
+ QT6_ONLY(void *unused = nullptr;) // for ABI compatibility
+
Q_DISABLE_COPY(QMediaCaptureSession)
Q_DECLARE_PRIVATE(QMediaCaptureSession)
};
diff --git a/src/multimedia/recording/qmediacapturesession_p.h b/src/multimedia/recording/qmediacapturesession_p.h
index 8702c8d2b..cba222993 100644
--- a/src/multimedia/recording/qmediacapturesession_p.h
+++ b/src/multimedia/recording/qmediacapturesession_p.h
@@ -18,19 +18,28 @@
#include <QtMultimedia/qmediacapturesession.h>
#include <QtCore/qpointer.h>
+#include <QtCore/private/qobject_p.h>
QT_BEGIN_NAMESPACE
-class QMediaCaptureSessionPrivate
+class QMediaCaptureSessionPrivate : public QObjectPrivate
{
public:
- QMediaCaptureSession *q = nullptr;
- QPlatformMediaCaptureSession *captureSession = nullptr;
+ static QMediaCaptureSessionPrivate *get(QMediaCaptureSession *session)
+ {
+ return reinterpret_cast<QMediaCaptureSessionPrivate *>(QObjectPrivate::get(session));
+ }
+
+ Q_DECLARE_PUBLIC(QMediaCaptureSession)
+
+ std::unique_ptr<QPlatformMediaCaptureSession> captureSession;
QAudioInput *audioInput = nullptr;
+ QPointer<QAudioBufferInput> audioBufferInput;
QAudioOutput *audioOutput = nullptr;
QPointer<QCamera> camera;
QPointer<QScreenCapture> screenCapture;
QPointer<QWindowCapture> windowCapture;
+ QPointer<QVideoFrameInput> videoFrameInput;
QPointer<QImageCapture> imageCapture;
QPointer<QMediaRecorder> recorder;
QPointer<QVideoSink> videoSink;
diff --git a/src/multimedia/recording/qmediarecorder.cpp b/src/multimedia/recording/qmediarecorder.cpp
index a7f5a31b8..ea38b231a 100644
--- a/src/multimedia/recording/qmediarecorder.cpp
+++ b/src/multimedia/recording/qmediarecorder.cpp
@@ -227,7 +227,7 @@ void QMediaRecorder::setOutputLocation(const QUrl &location)
/*!
Set the output IO device for media content.
- The \a device must have been opened in the \l{QIODevice::Write}{Write} or
+ The \a device must have been opened in the \l{QIODevice::WriteOnly}{WriteOnly} or
\l{QIODevice::ReadWrite}{ReadWrite} modes before the recording starts.
The media recorder doesn't take ownership of the specified \a device.
@@ -573,10 +573,47 @@ void QMediaRecorder::addMetaData(const QMediaMetaData &metaData)
{
auto data = this->metaData();
// merge data
- for (const auto &k : metaData.keys())
- data.insert(k, metaData.value(k));
+ for (auto &&[key, value] : metaData.asKeyValueRange())
+ data.insert(key, value);
setMetaData(data);
}
+
+/*!
+ \property QMediaRecorder::autoStop
+
+ This property controls whether the media recorder stops automatically when
+ all media inputs have reported the end of the stream or have been deactivated.
+
+ The end of the stream is reported by sending an empty media frame,
+ which you can send explicitly via \l QVideoFrameInput or \l QAudioBufferInput.
+
+ Video inputs, specificly, \l QCamera, \l QScreenCapture and \l QWindowCapture,
+ can be deactivated via the function \c setActive.
+
+ Defaults to \c false.
+
+ \sa QCamera, QScreenCapture, QWindowCapture
+*/
+
+bool QMediaRecorder::autoStop() const
+{
+ Q_D(const QMediaRecorder);
+
+ return d->autoStop;
+}
+
+void QMediaRecorder::setAutoStop(bool autoStop)
+{
+ Q_D(QMediaRecorder);
+
+ if (d->autoStop == autoStop)
+ return;
+
+ d->autoStop = autoStop;
+ d->control->updateAutoStop();
+ emit autoStopChanged();
+}
+
/*!
\qmlsignal QtMultimedia::MediaRecorder::metaDataChanged()
diff --git a/src/multimedia/recording/qmediarecorder.h b/src/multimedia/recording/qmediarecorder.h
index fed276baf..a73d9f8af 100644
--- a/src/multimedia/recording/qmediarecorder.h
+++ b/src/multimedia/recording/qmediarecorder.h
@@ -44,6 +44,7 @@ class Q_MULTIMEDIA_EXPORT QMediaRecorder : public QObject
Q_PROPERTY(int audioBitRate READ audioBitRate WRITE setAudioBitRate NOTIFY audioBitRateChanged)
Q_PROPERTY(int audioChannelCount READ audioChannelCount WRITE setAudioChannelCount NOTIFY audioChannelCountChanged)
Q_PROPERTY(int audioSampleRate READ audioSampleRate WRITE setAudioSampleRate NOTIFY audioSampleRateChanged)
+ Q_PROPERTY(bool autoStop READ autoStop WRITE setAutoStop NOTIFY autoStopChanged)
public:
enum Quality
{
@@ -134,6 +135,9 @@ public:
void setMetaData(const QMediaMetaData &metaData);
void addMetaData(const QMediaMetaData &metaData);
+ bool autoStop() const;
+ void setAutoStop(bool autoStop);
+
QMediaCaptureSession *captureSession() const;
QPlatformMediaRecorder *platformRecoder() const;
@@ -162,6 +166,7 @@ Q_SIGNALS:
void audioBitRateChanged();
void audioChannelCountChanged();
void audioSampleRateChanged();
+ void autoStopChanged();
private:
QMediaRecorderPrivate *d_ptr;
diff --git a/src/multimedia/recording/qmediarecorder_p.h b/src/multimedia/recording/qmediarecorder_p.h
index 193aa5f00..896f6c368 100644
--- a/src/multimedia/recording/qmediarecorder_p.h
+++ b/src/multimedia/recording/qmediarecorder_p.h
@@ -38,6 +38,7 @@ public:
QMediaCaptureSession *captureSession = nullptr;
QPlatformMediaRecorder *control = nullptr;
QString initErrorMessage;
+ bool autoStop = false;
bool settingsChanged = false;
diff --git a/src/multimedia/recording/qscreencapture-limitations.qdocinc b/src/multimedia/recording/qscreencapture-limitations.qdocinc
index cac51df02..240a1a389 100644
--- a/src/multimedia/recording/qscreencapture-limitations.qdocinc
+++ b/src/multimedia/recording/qscreencapture-limitations.qdocinc
@@ -1,22 +1,25 @@
-// Copyright (C) 2023 The Qt Company Ltd.
+// Copyright (C) 2024 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GFDL-1.3-no-invariants-only
/*!
//! [content]
\section1 Screen Capture Limitations
- On Qt 6.5.2 and 6.5.3, the following limitations apply to using \1ScreenCapture:
+ On Qt 6.5.2 and above, the following limitations apply to using \1ScreenCapture:
\list
\li It is only supported with the FFmpeg backend.
- \li It is supported on all desktop platforms, except Linux with Wayland
- compositor, due to Wayland protocol restrictions and limitations.
+ \li It is unsupported on Linux with Wayland compositor, due to Wayland
+ protocol restrictions and limitations.
\li It is not supported on mobile operating systems, except on Android.
There, you might run into performance issues as the class is currently
implemented via QScreen::grabWindow, which is not optimal for the use case.
- \li On Linux, it works with X11, but it has not been tested on embedded.
+ \li On embedded with EGLFS, it has limited functionality. For Qt Quick
+ applications, the class is currently implemented via
+ QQuickWindow::grabWindow, which can cause performance issues.
\li In most cases, we set a screen capture frame rate that equals the screen
refresh rate, except on Windows, where the rate might be flexible.
Such a frame rate (75/120 FPS) might cause performance issues on weak
- CPUs if the captured screen is of 4K resolution.
+ CPUs if the captured screen is of 4K resolution. On EGLFS, the capture
+ frame rate is currently locked to 30 FPS.
\endlist
//! [content]
*/
diff --git a/src/multimedia/recording/qvideoframeinput.cpp b/src/multimedia/recording/qvideoframeinput.cpp
new file mode 100644
index 000000000..99500bb65
--- /dev/null
+++ b/src/multimedia/recording/qvideoframeinput.cpp
@@ -0,0 +1,181 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qvideoframeinput.h"
+#include "qmediaframeinput_p.h"
+#include "qmediainputencoderinterface_p.h"
+#include "qplatformvideoframeinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QVideoFrameInputPrivate : public QMediaFrameInputPrivate
+{
+public:
+ QVideoFrameInputPrivate(QVideoFrameInput *q) : q(q) { }
+
+ bool sendVideoFrame(const QVideoFrame &frame)
+ {
+ return sendMediaFrame([&]() { emit m_platfromVideoFrameInput->newVideoFrame(frame); });
+ }
+
+ void initialize(QVideoFrameFormat format = {})
+ {
+ m_platfromVideoFrameInput = std::make_unique<QPlatformVideoFrameInput>(std::move(format));
+ addUpdateSignal(m_platfromVideoFrameInput.get(), &QPlatformVideoFrameInput::encoderUpdated);
+ }
+
+ void uninitialize()
+ {
+ m_platfromVideoFrameInput.reset();
+
+ if (captureSession())
+ captureSession()->setVideoFrameInput(nullptr);
+ }
+
+ QPlatformVideoFrameInput *platfromVideoFrameInput() const
+ {
+ return m_platfromVideoFrameInput.get();
+ }
+
+protected:
+ void updateCaptureSessionConnections(QMediaCaptureSession *prevSession,
+ QMediaCaptureSession *newSession) override
+ {
+ if (prevSession)
+ removeUpdateSignal(prevSession, &QMediaCaptureSession::videoOutputChanged);
+
+ if (newSession)
+ addUpdateSignal(newSession, &QMediaCaptureSession::videoOutputChanged);
+ }
+
+ bool checkIfCanSendMediaFrame() const override
+ {
+ if (auto encoderInterface = m_platfromVideoFrameInput->encoderInterface())
+ return encoderInterface->canPushFrame();
+
+ return captureSession()->videoOutput() || captureSession()->videoSink();
+ }
+
+ void emitReadyToSendMediaFrame() override { emit q->readyToSendVideoFrame(); }
+
+private:
+ QVideoFrameInput *q = nullptr;
+ std::unique_ptr<QPlatformVideoFrameInput> m_platfromVideoFrameInput;
+};
+
+/*!
+ \class QVideoFrameInput
+ \inmodule QtMultimedia
+ \ingroup multimedia
+ \ingroup multimedia_video
+ \since 6.8
+
+ \brief The QVideoFrameInput class is used for providing custom video frames
+ to \l QMediaRecorder or a video output through \l QMediaCaptureSession.
+
+ \sa QMediaRecorder, QMediaCaptureSession, QVideoSink
+*/
+
+/*!
+ Constructs a new QVideoFrameInput object with \a parent.
+*/
+QVideoFrameInput::QVideoFrameInput(QObject *parent) : QVideoFrameInput({}, parent) { }
+
+/*!
+ Constructs a new QVideoFrameInput object with video frame \a format and \a parent.
+
+ The specified \a format will work as a hint for the initialization of the matching
+ video encoder upon invoking \l QMediaRecorder::record().
+ If the format is not specified or not valid, the video encoder will be initialized
+ upon sending the first frame.
+ Sending of video frames with another pixel format and size after initialization
+ of the matching video encoder might cause a performance penalty during recording.
+
+ We recommend specifying the format if you know in advance what kind of frames you're
+ going to send.
+*/
+QVideoFrameInput::QVideoFrameInput(const QVideoFrameFormat &format, QObject *parent)
+ : QObject(*new QVideoFrameInputPrivate(this), parent)
+{
+ Q_D(QVideoFrameInput);
+ d->initialize(format);
+}
+
+/*!
+ Destroys the object.
+ */
+QVideoFrameInput::~QVideoFrameInput()
+{
+ Q_D(QVideoFrameInput);
+ d->uninitialize();
+}
+
+/*!
+ Sends \l QVideoFrame to \l QMediaRecorder or a video output
+ through \l QMediaCaptureSession.
+
+ Returns \c true if the specified \a frame has been sent successfully
+ to the destination. Returns \c false, if the frame hasn't been sent,
+ which can happen if the instance is not assigned to
+ \l QMediaCaptureSession, the session doesn't have video outputs or
+ a media recorder, the media recorder is not started or its queue is full.
+ The signal \l readyToSendVideoFrame will be sent as soon as
+ the destination is able to handle a new frame.
+
+ Sending of an empty video frame is treated by \l QMediaRecorder
+ as an end of the input stream. QMediaRecorder stops the recording
+ automatically if \l QMediaRecorder::autoStop is \c true and
+ all the inputs have reported the end of the stream.
+*/
+bool QVideoFrameInput::sendVideoFrame(const QVideoFrame &frame)
+{
+ Q_D(QVideoFrameInput);
+ return d->sendVideoFrame(frame);
+}
+
+/*!
+ Returns the video frame format that was specified
+ upon construction of the video frame input.
+*/
+QVideoFrameFormat QVideoFrameInput::format() const
+{
+ Q_D(const QVideoFrameInput);
+ return d->platfromVideoFrameInput()->frameFormat();
+}
+
+/*!
+ Returns the capture session this video frame input is connected to, or
+ a \c nullptr if the video frame input is not connected to a capture session.
+
+ Use QMediaCaptureSession::setVideoFrameInput() to connect
+ the video frame input to a session.
+*/
+QMediaCaptureSession *QVideoFrameInput::captureSession() const
+{
+ Q_D(const QVideoFrameInput);
+ return d->captureSession();
+}
+
+void QVideoFrameInput::setCaptureSession(QMediaCaptureSession *captureSession)
+{
+ Q_D(QVideoFrameInput);
+ d->setCaptureSession(captureSession);
+}
+
+QPlatformVideoFrameInput *QVideoFrameInput::platformVideoFrameInput() const
+{
+ Q_D(const QVideoFrameInput);
+ return d->platfromVideoFrameInput();
+}
+
+/*!
+ \fn void QVideoFrameInput::readyToSendVideoFrame()
+
+ Signals that a new frame can be sent to the video frame input.
+ After receiving the signal, if you have frames to be sent, invoke \l sendVideoFrame
+ once or in a loop until it returns \c false.
+
+ \sa sendVideoFrame()
+*/
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/recording/qvideoframeinput.h b/src/multimedia/recording/qvideoframeinput.h
new file mode 100644
index 000000000..fbe56b7db
--- /dev/null
+++ b/src/multimedia/recording/qvideoframeinput.h
@@ -0,0 +1,48 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QVIDEOFRAMEINPUT_H
+#define QVIDEOFRAMEINPUT_H
+
+#include <QtMultimedia/qtmultimediaexports.h>
+#include <QtMultimedia/qvideoframe.h>
+#include <QtCore/qobject.h>
+
+QT_BEGIN_NAMESPACE
+
+class QPlatformVideoFrameInput;
+class QVideoFrameInputPrivate;
+class QMediaCaptureSession;
+
+class Q_MULTIMEDIA_EXPORT QVideoFrameInput : public QObject
+{
+ Q_OBJECT
+public:
+ explicit QVideoFrameInput(QObject *parent = nullptr);
+
+ explicit QVideoFrameInput(const QVideoFrameFormat &format, QObject *parent = nullptr);
+
+ ~QVideoFrameInput() override;
+
+ bool sendVideoFrame(const QVideoFrame &frame);
+
+ QVideoFrameFormat format() const;
+
+ QMediaCaptureSession *captureSession() const;
+
+Q_SIGNALS:
+ void readyToSendVideoFrame();
+
+private:
+ void setCaptureSession(QMediaCaptureSession *captureSession);
+
+ QPlatformVideoFrameInput *platformVideoFrameInput() const;
+
+ friend class QMediaCaptureSession;
+ Q_DISABLE_COPY(QVideoFrameInput)
+ Q_DECLARE_PRIVATE(QVideoFrameInput)
+};
+
+QT_END_NAMESPACE
+
+#endif // QVIDEOFRAMEINPUT_H
diff --git a/src/multimedia/video/qabstractvideobuffer.cpp b/src/multimedia/video/qabstractvideobuffer.cpp
index d65438855..7368082b1 100644
--- a/src/multimedia/video/qabstractvideobuffer.cpp
+++ b/src/multimedia/video/qabstractvideobuffer.cpp
@@ -1,139 +1,115 @@
-// Copyright (C) 2016 The Qt Company Ltd.
+// Copyright (C) 2024 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-#include "qabstractvideobuffer_p.h"
-
-#include <qvariant.h>
-#include <rhi/qrhi.h>
-
-#include <QDebug>
-
+#include "qabstractvideobuffer.h"
QT_BEGIN_NAMESPACE
/*!
\class QAbstractVideoBuffer
- \internal
+ \since 6.8
\brief The QAbstractVideoBuffer class is an abstraction for video data.
\inmodule QtMultimedia
\ingroup multimedia
\ingroup multimedia_video
- The QVideoFrame class makes use of a QAbstractVideoBuffer internally to reference a buffer of
- video data. Quite often video data buffers may reside in video memory rather than system
- memory, and this class provides an abstraction of the location.
-
- In addition, creating a subclass of QAbstractVideoBuffer will allow you to construct video
- frames from preallocated or static buffers. This caters for cases where the QVideoFrame constructors
- taking a QByteArray or a QImage do not suffice. This may be necessary when implementing
- a new hardware accelerated video system, for example.
+ The \l QVideoFrame class makes use of a QAbstractVideoBuffer internally to reference a buffer of
+ video data. Creating a subclass of QAbstractVideoBuffer allows you to construct video
+ frames from preallocated or static buffers. The subclass can contain a hardware buffer,
+ and implement access to the data by mapping the buffer to CPU memory.
The contents of a buffer can be accessed by mapping the buffer to memory using the map()
- function, which returns a pointer to memory containing the contents of the video buffer.
- The memory returned by map() is released by calling the unmap() function.
+ function, which returns a structure containing information about plane layout of the current
+ video data.
- The handle() of a buffer may also be used to manipulate its contents using type specific APIs.
- The type of a buffer's handle is given by the handleType() function.
-
- \sa QVideoFrame
+ \sa QVideoFrame, QVideoFrameFormat, QtVideo::MapMode
*/
/*!
- \enum QVideoFrame::HandleType
+ \class QAbstractVideoBuffer::MapData
+ \brief The QAbstractVideoBuffer::MapData structure describes the mapped plane layout.
+ \inmodule QtMultimedia
+ \ingroup multimedia
+ \ingroup multimedia_video
- Identifies the type of a video buffers handle.
+ The structure contains a number of mapped planes, and plane data for each plane,
+ specificly, a number of bytes per line, a data pointer, and a data size.
+ The structure doesn't hold any ownership of the data it refers to.
- \value NoHandle
- The buffer has no handle, its data can only be accessed by mapping the buffer.
- \value RhiTextureHandle
- The handle of the buffer is defined by The Qt Rendering Hardware Interface
- (RHI). RHI is Qt's internal graphics abstraction for 3D APIs, such as
- OpenGL, Vulkan, Metal, and Direct 3D.
+ A defaultly created structure means that no data has been mapped.
- \sa handleType()
+ All the values in the structure default to zeros.
+
+ \sa QAbstractVideoBuffer::map
*/
/*!
- \enum QVideoFrame::MapMode
-
- Enumerates how a video buffer's data is mapped to system memory.
-
- \value NotMapped
- The video buffer is not mapped to memory.
- \value ReadOnly
- The mapped memory is populated with data from the video buffer when mapped,
- but the content of the mapped memory may be discarded when unmapped.
- \value WriteOnly
- The mapped memory is uninitialized when mapped, but the possibly modified
- content will be used to populate the video buffer when unmapped.
- \value ReadWrite
- The mapped memory is populated with data from the video
- buffer, and the video buffer is repopulated with the content of the mapped
- memory when it is unmapped.
-
- \sa mapMode(), map()
+ \variable QAbstractVideoBuffer::MapData::planeCount
+
+ The number of planes of the mapped video data. If the format of the data
+ is multiplanar, and the value is \c 1, the actual plane layout will
+ be calculated upon invoking of \l QVideoFrame::map from the frame height,
+ \c{bytesPerLine[0]}, and \c{dataSize[0]}.
+
+ Defaults to \c 0.
*/
/*!
- Constructs an abstract video buffer of the given \a type.
+ \variable QAbstractVideoBuffer::MapData::bytesPerLine
+
+ The array of numbrers of bytes per line for each
+ plane from \c 0 to \c{planeCount - 1}.
+
+ The values of the array default to \c 0.
*/
-QAbstractVideoBuffer::QAbstractVideoBuffer(QVideoFrame::HandleType type, QRhi *rhi)
- : m_type(type),
- m_rhi(rhi)
-{
-}
/*!
- Destroys an abstract video buffer.
+ \variable QAbstractVideoBuffer::MapData::data
+
+ The array of pointers to the mapped video pixel data
+ for each plane from \c 0 to \c{planeCount - 1}.
+ The implementation of QAbstractVideoBuffer must hold ownership of the data
+ at least until \l QAbstractVideoBuffer::unmap is called.
+
+ The values of the array default to \c nullptr.
*/
-QAbstractVideoBuffer::~QAbstractVideoBuffer()
-{
-}
/*!
- Returns the type of a video buffer's handle.
+ \variable QAbstractVideoBuffer::MapData::dataSize
+
+ The array of sizes in bytes of the mapped video pixel data
+ for each plane from \c 0 to \c{planeCount - 1}.
- \sa handle()
+ The values of the array default to \c 0.
*/
-QVideoFrame::HandleType QAbstractVideoBuffer::handleType() const
-{
- return m_type;
-}
+// must be out-of-line to ensure correct working of dynamic_cast when QHwVideoBuffer is created in tests
/*!
- Returns the QRhi instance.
+ Destroys a video buffer.
*/
-QRhi *QAbstractVideoBuffer::rhi() const
-{
- return m_rhi;
-}
+QAbstractVideoBuffer::~QAbstractVideoBuffer() = default;
-/*! \fn uchar *QAbstractVideoBuffer::map(MapMode mode, int *numBytes, int *bytesPerLine)
+/*! \fn QAbstractVideoBuffer::MapData QAbstractVideoBuffer::map(QtVideo::MapMode mode)
- Independently maps the planes of a video buffer to memory.
+ Maps the planes of a video buffer to memory.
- The map \a mode indicates whether the contents of the mapped memory should be read from and/or
- written to the buffer. If the map mode includes the \c QVideoFrame::ReadOnly flag the
- mapped memory will be populated with the content of the buffer when initially mapped. If the map
- mode includes the \c QVideoFrame::WriteOnly flag the content of the possibly modified
- mapped memory will be written back to the buffer when unmapped.
-
- When access to the data is no longer needed be sure to call the unmap() function to release the
- mapped memory and possibly update the buffer contents.
+ Returns a \l MapData structure that contains information about the plane layout of
+ the mapped current video data. If the mapping fails, the method returns the default structure.
+ For CPU memory buffers, the data is considered as already mapped, so the function
+ just returns the plane layout of the preallocated underlying data.
- Returns the number of planes in the mapped video data. For each plane the line stride of that
- plane will be returned in \a bytesPerLine, and a pointer to the plane data will be returned in
- \a data. The accumulative size of the mapped data is returned in \a numBytes.
-
- Not all buffer implementations will map more than the first plane, if this returns a single
- plane for a planar format the additional planes will have to be calculated from the line stride
- of the first plane and the frame height. Mapping a buffer with QVideoFrame will do this for
- you.
+ The map \a mode indicates whether the contents of the mapped memory should be read from and/or
+ written to the buffer. If the map mode includes the \c QtVideo::MapMode::WriteOnly flag,
+ the content of the possibly modified mapped memory is expected to be written back
+ to the buffer when unmapped.
- To implement this function create a derivative of QAbstractPlanarVideoBuffer and implement
- its map function instance instead.
+ When access to the data is no longer needed, the \l unmap function is called
+ to release the mapped memory and possibly update the buffer contents.
- \since 5.4
+ If the format of the video data is multiplanar, the method may map the whole pixel data
+ as a single plane. In this case, mapping a buffer with \l QVideoFrame
+ will calculate additional planes from the specified line stride of the first plane,
+ the frame height, and the data size.
*/
/*!
@@ -141,56 +117,23 @@ QRhi *QAbstractVideoBuffer::rhi() const
Releases the memory mapped by the map() function.
- If the \l {QVideoFrame::MapMode}{MapMode} included the \c QVideoFrame::WriteOnly
+ If the \l {QtVideo::MapMode}{MapMode} included the \c QtVideo::MapMode::WriteOnly
flag this will write the current content of the mapped memory back to the video frame.
- \sa map()
-*/
-
-/*! \fn quint64 QAbstractVideoBuffer::textureHandle(QRhi *rhi, int plane) const
-
- Returns a texture handle to the data buffer.
+ For CPU video buffers, the function may be not overridden.
+ The default implementation of \c unmap does nothing.
- \sa handleType()
+ \sa map()
*/
-/*
- \fn int QAbstractPlanarVideoBuffer::map(MapMode mode, int *numBytes, int bytesPerLine[4], uchar *data[4])
-
- Maps the contents of a video buffer to memory.
-
- The map \a mode indicates whether the contents of the mapped memory should be read from and/or
- written to the buffer. If the map mode includes the \c QVideoFrame::ReadOnly flag the
- mapped memory will be populated with the content of the buffer when initially mapped. If the map
- mode includes the \c QVideoFrame::WriteOnly flag the content of the possibly modified
- mapped memory will be written back to the buffer when unmapped.
-
- When access to the data is no longer needed be sure to call the unmap() function to release the
- mapped memory and possibly update the buffer contents.
+/*!
+ \fn QAbstractVideoBuffer::format() const
- Returns the number of planes in the mapped video data. For each plane the line stride of that
- plane will be returned in \a bytesPerLine, and a pointer to the plane data will be returned in
- \a data. The accumulative size of the mapped data is returned in \a numBytes.
+ Gets \l QVideoFrameFormat of the underlying video buffer.
- \sa QAbstractVideoBuffer::map(), QAbstractVideoBuffer::unmap(), QVideoFrame::mapMode()
+ The format must be available upon construction of \l QVideoFrame.
+ QVideoFrame will contain won instance of the given format, that
+ can be detached and modified.
*/
-#ifndef QT_NO_DEBUG_STREAM
-QDebug operator<<(QDebug dbg, QVideoFrame::MapMode mode)
-{
- QDebugStateSaver saver(dbg);
- dbg.nospace();
- switch (mode) {
- case QVideoFrame::ReadOnly:
- return dbg << "ReadOnly";
- case QVideoFrame::ReadWrite:
- return dbg << "ReadWrite";
- case QVideoFrame::WriteOnly:
- return dbg << "WriteOnly";
- default:
- return dbg << "NotMapped";
- }
-}
-#endif
-
QT_END_NAMESPACE
diff --git a/src/multimedia/video/qabstractvideobuffer.h b/src/multimedia/video/qabstractvideobuffer.h
new file mode 100644
index 000000000..3e046f3b4
--- /dev/null
+++ b/src/multimedia/video/qabstractvideobuffer.h
@@ -0,0 +1,32 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QABSTRACTVIDEOBUFFER_H
+#define QABSTRACTVIDEOBUFFER_H
+
+#include <QtMultimedia/qtmultimediaexports.h>
+#include <QtMultimedia/qvideoframeformat.h>
+#include <QtMultimedia/qtvideo.h>
+
+QT_BEGIN_NAMESPACE
+
+class Q_MULTIMEDIA_EXPORT QAbstractVideoBuffer
+{
+public:
+ struct MapData
+ {
+ int planeCount = 0;
+ int bytesPerLine[4] = {};
+ uchar *data[4] = {};
+ int dataSize[4] = {};
+ };
+
+ virtual ~QAbstractVideoBuffer();
+ virtual MapData map(QtVideo::MapMode mode) = 0;
+ virtual void unmap() { }
+ virtual QVideoFrameFormat format() const = 0;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/multimedia/video/qabstractvideobuffer_p.h b/src/multimedia/video/qabstractvideobuffer_p.h
deleted file mode 100644
index 2004e25f7..000000000
--- a/src/multimedia/video/qabstractvideobuffer_p.h
+++ /dev/null
@@ -1,82 +0,0 @@
-// Copyright (C) 2022 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-#ifndef QABSTRACTVIDEOBUFFER_H
-#define QABSTRACTVIDEOBUFFER_H
-
-//
-// W A R N I N G
-// -------------
-//
-// This file is not part of the Qt API. It exists purely as an
-// implementation detail. This header file may change from version to
-// version without notice, or even be removed.
-//
-// We mean it.
-//
-
-#include <QtMultimedia/qtmultimediaglobal.h>
-#include <QtMultimedia/qvideoframe.h>
-
-#include <QtCore/qmetatype.h>
-#include <QtGui/qmatrix4x4.h>
-#include <QtCore/private/qglobal_p.h>
-
-#include <memory>
-
-QT_BEGIN_NAMESPACE
-
-
-class QVariant;
-class QRhi;
-class QRhiTexture;
-
-class Q_MULTIMEDIA_EXPORT QVideoFrameTextures
-{
-public:
- virtual ~QVideoFrameTextures() {}
- virtual QRhiTexture *texture(uint plane) const = 0;
-};
-
-class Q_MULTIMEDIA_EXPORT QAbstractVideoBuffer
-{
-public:
- QAbstractVideoBuffer(QVideoFrame::HandleType type, QRhi *rhi = nullptr);
- virtual ~QAbstractVideoBuffer();
-
- QVideoFrame::HandleType handleType() const;
- QRhi *rhi() const;
-
- struct MapData
- {
- int nPlanes = 0;
- int bytesPerLine[4] = {};
- uchar *data[4] = {};
- int size[4] = {};
- };
-
- virtual QVideoFrame::MapMode mapMode() const = 0;
- virtual MapData map(QVideoFrame::MapMode mode) = 0;
- virtual void unmap() = 0;
-
- virtual std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *) { return {}; }
- virtual quint64 textureHandle(QRhi *, int /*plane*/) const { return 0; }
-
- virtual QMatrix4x4 externalTextureMatrix() const { return {}; }
-
- virtual QByteArray underlyingByteArray(int /*plane*/) const { return {}; }
-protected:
- QVideoFrame::HandleType m_type;
- QRhi *m_rhi = nullptr;
-
-private:
- Q_DISABLE_COPY(QAbstractVideoBuffer)
-};
-
-#ifndef QT_NO_DEBUG_STREAM
-Q_MULTIMEDIA_EXPORT QDebug operator<<(QDebug, QVideoFrame::MapMode);
-#endif
-
-QT_END_NAMESPACE
-
-#endif
diff --git a/src/multimedia/video/qhwvideobuffer.cpp b/src/multimedia/video/qhwvideobuffer.cpp
new file mode 100644
index 000000000..ecd3435d0
--- /dev/null
+++ b/src/multimedia/video/qhwvideobuffer.cpp
@@ -0,0 +1,17 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qhwvideobuffer_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QVideoFrameTextures::~QVideoFrameTextures() = default;
+
+QHwVideoBuffer::QHwVideoBuffer(QVideoFrame::HandleType type, QRhi *rhi) : m_type(type), m_rhi(rhi)
+{
+}
+
+// must be out-of-line to ensure correct working of dynamic_cast when QHwVideoBuffer is created in tests
+QHwVideoBuffer::~QHwVideoBuffer() = default;
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/video/qhwvideobuffer_p.h b/src/multimedia/video/qhwvideobuffer_p.h
new file mode 100644
index 000000000..fabf82dce
--- /dev/null
+++ b/src/multimedia/video/qhwvideobuffer_p.h
@@ -0,0 +1,58 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QHWVIDEOBUFFER_P_H
+#define QHWVIDEOBUFFER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qabstractvideobuffer.h"
+#include "qvideoframe.h"
+
+#include <QtGui/qmatrix4x4.h>
+
+QT_BEGIN_NAMESPACE
+
+class QRhi;
+class QRhiTexture;
+
+class Q_MULTIMEDIA_EXPORT QVideoFrameTextures
+{
+public:
+ virtual ~QVideoFrameTextures();
+ virtual QRhiTexture *texture(uint plane) const = 0;
+};
+
+class Q_MULTIMEDIA_EXPORT QHwVideoBuffer : public QAbstractVideoBuffer
+{
+public:
+ QHwVideoBuffer(QVideoFrame::HandleType type, QRhi *rhi = nullptr);
+
+ ~QHwVideoBuffer() override;
+
+ QVideoFrame::HandleType handleType() const { return m_type; }
+ QRhi *rhi() const { return m_rhi; }
+
+ QVideoFrameFormat format() const override { return {}; }
+
+ virtual std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *) { return {}; }
+ virtual quint64 textureHandle(QRhi *, int /*plane*/) const { return 0; }
+ virtual QMatrix4x4 externalTextureMatrix() const { return {}; }
+
+protected:
+ QVideoFrame::HandleType m_type;
+ QRhi *m_rhi = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QHWVIDEOBUFFER_P_H
diff --git a/src/multimedia/video/qimagevideobuffer.cpp b/src/multimedia/video/qimagevideobuffer.cpp
index bc825004e..400b89319 100644
--- a/src/multimedia/video/qimagevideobuffer.cpp
+++ b/src/multimedia/video/qimagevideobuffer.cpp
@@ -51,37 +51,25 @@ QImage fixImage(QImage image)
} // namespace
-QImageVideoBuffer::QImageVideoBuffer(QImage image)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle), m_image(fixImage(std::move(image)))
-{
-}
-
-QVideoFrame::MapMode QImageVideoBuffer::mapMode() const
-{
- return m_mapMode;
-}
+QImageVideoBuffer::QImageVideoBuffer(QImage image) : m_image(fixImage(std::move(image))) { }
-QAbstractVideoBuffer::MapData QImageVideoBuffer::map(QVideoFrame::MapMode mode)
+QAbstractVideoBuffer::MapData QImageVideoBuffer::map(QtVideo::MapMode mode)
{
MapData mapData;
- if (m_mapMode == QVideoFrame::NotMapped && !m_image.isNull()
- && mode != QVideoFrame::NotMapped) {
- m_mapMode = mode;
- mapData.nPlanes = 1;
+ if (!m_image.isNull()) {
+ mapData.planeCount = 1;
mapData.bytesPerLine[0] = m_image.bytesPerLine();
- mapData.data[0] = m_image.bits();
- mapData.size[0] = m_image.sizeInBytes();
+ if (mode == QtVideo::MapMode::ReadOnly)
+ mapData.data[0] = const_cast<uint8_t *>(m_image.constBits());
+ else
+ mapData.data[0] = m_image.bits();
+ mapData.dataSize[0] = m_image.sizeInBytes();
}
return mapData;
}
-void QImageVideoBuffer::unmap()
-{
- m_mapMode = QVideoFrame::NotMapped;
-}
-
QImage QImageVideoBuffer::underlyingImage() const
{
return m_image;
diff --git a/src/multimedia/video/qimagevideobuffer_p.h b/src/multimedia/video/qimagevideobuffer_p.h
index e5467563a..4ea894ba8 100644
--- a/src/multimedia/video/qimagevideobuffer_p.h
+++ b/src/multimedia/video/qimagevideobuffer_p.h
@@ -4,7 +4,7 @@
#ifndef QIMAGEVIDEOBUFFER_P_H
#define QIMAGEVIDEOBUFFER_P_H
-#include <private/qabstractvideobuffer_p.h>
+#include <qabstractvideobuffer.h>
#include <qimage.h>
//
@@ -25,16 +25,13 @@ class Q_MULTIMEDIA_EXPORT QImageVideoBuffer : public QAbstractVideoBuffer
public:
QImageVideoBuffer(QImage image);
- QVideoFrame::MapMode mapMode() const override;
+ MapData map(QtVideo::MapMode mode) override;
- MapData map(QVideoFrame::MapMode mode) override;
-
- void unmap() override;
+ QVideoFrameFormat format() const override { return {}; }
QImage underlyingImage() const;
private:
- QVideoFrame::MapMode m_mapMode = QVideoFrame::NotMapped;
QImage m_image;
};
diff --git a/src/multimedia/video/qmemoryvideobuffer.cpp b/src/multimedia/video/qmemoryvideobuffer.cpp
index bcbbe7e59..0940d2ca4 100644
--- a/src/multimedia/video/qmemoryvideobuffer.cpp
+++ b/src/multimedia/video/qmemoryvideobuffer.cpp
@@ -18,9 +18,7 @@ QT_BEGIN_NAMESPACE
Constructs a video buffer with an image stride of \a bytesPerLine from a byte \a array.
*/
QMemoryVideoBuffer::QMemoryVideoBuffer(QByteArray data, int bytesPerLine)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle),
- m_bytesPerLine(bytesPerLine),
- m_data(std::move(data))
+ : m_bytesPerLine(bytesPerLine), m_data(std::move(data))
{
}
@@ -32,48 +30,23 @@ QMemoryVideoBuffer::~QMemoryVideoBuffer() = default;
/*!
\reimp
*/
-QVideoFrame::MapMode QMemoryVideoBuffer::mapMode() const
-{
- return m_mapMode;
-}
-
-/*!
- \reimp
-*/
-QAbstractVideoBuffer::MapData QMemoryVideoBuffer::map(QVideoFrame::MapMode mode)
+QAbstractVideoBuffer::MapData QMemoryVideoBuffer::map(QtVideo::MapMode mode)
{
MapData mapData;
- if (m_mapMode == QVideoFrame::NotMapped && m_data.size() && mode != QVideoFrame::NotMapped) {
- m_mapMode = mode;
- mapData.nPlanes = 1;
+ if (!m_data.isEmpty()) {
+ mapData.planeCount = 1;
mapData.bytesPerLine[0] = m_bytesPerLine;
// avoid detaching and extra copying in case the underlyingByteArray is
// being held by textures or anything else.
- if (mode == QVideoFrame::ReadOnly)
- mapData.data[0] = reinterpret_cast<uchar *>(const_cast<char*>(m_data.constData()));
+ if (mode == QtVideo::MapMode::ReadOnly)
+ mapData.data[0] = reinterpret_cast<uchar *>(const_cast<char *>(m_data.constData()));
else
mapData.data[0] = reinterpret_cast<uchar *>(m_data.data());
- mapData.size[0] = m_data.size();
+ mapData.dataSize[0] = m_data.size();
}
return mapData;
}
-/*!
- \reimp
-*/
-void QMemoryVideoBuffer::unmap()
-{
- m_mapMode = QVideoFrame::NotMapped;
-}
-
-/*!
- \reimp
-*/
-QByteArray QMemoryVideoBuffer::underlyingByteArray(int plane) const
-{
- return plane == 0 ? m_data : QByteArray{};
-}
-
QT_END_NAMESPACE
diff --git a/src/multimedia/video/qmemoryvideobuffer_p.h b/src/multimedia/video/qmemoryvideobuffer_p.h
index ec97abd4f..1bd5d6be2 100644
--- a/src/multimedia/video/qmemoryvideobuffer_p.h
+++ b/src/multimedia/video/qmemoryvideobuffer_p.h
@@ -4,7 +4,7 @@
#ifndef QMEMORYVIDEOBUFFER_P_H
#define QMEMORYVIDEOBUFFER_P_H
-#include <private/qabstractvideobuffer_p.h>
+#include "qabstractvideobuffer.h"
//
// W A R N I N G
@@ -23,17 +23,14 @@ class Q_MULTIMEDIA_EXPORT QMemoryVideoBuffer : public QAbstractVideoBuffer
{
public:
QMemoryVideoBuffer(QByteArray data, int bytesPerLine);
- ~QMemoryVideoBuffer();
+ ~QMemoryVideoBuffer() override;
- QVideoFrame::MapMode mapMode() const override;
+ MapData map(QtVideo::MapMode mode) override;
- MapData map(QVideoFrame::MapMode mode) override;
- void unmap() override;
+ QVideoFrameFormat format() const override { return {}; }
- QByteArray underlyingByteArray(int plane) const override;
private:
int m_bytesPerLine = 0;
- QVideoFrame::MapMode m_mapMode = QVideoFrame::NotMapped;
QByteArray m_data;
};
diff --git a/src/multimedia/video/qtvideo.cpp b/src/multimedia/video/qtvideo.cpp
index 29747b776..b971afbce 100644
--- a/src/multimedia/video/qtvideo.cpp
+++ b/src/multimedia/video/qtvideo.cpp
@@ -25,6 +25,27 @@ QT_BEGIN_NAMESPACE
\value Clockwise270 The frame should be rotated clockwise by 270 degrees
*/
+/*!
+ \enum QtVideo::MapMode
+
+ Enumerates how a video buffer's data is mapped to system memory.
+
+ \value NotMapped
+ The video buffer is not mapped to memory.
+ \value ReadOnly
+ The mapped memory is populated with data from the video buffer when mapped,
+ but the content of the mapped memory may be discarded when unmapped.
+ \value WriteOnly
+ The mapped memory is uninitialized when mapped, but the possibly modified
+ content will be used to populate the video buffer when unmapped.
+ \value ReadWrite
+ The mapped memory is populated with data from the video
+ buffer, and the video buffer is repopulated with the content of the mapped
+ memory when it is unmapped.
+
+ \sa QVideoFrame::mapMode(), map()
+*/
+
QT_END_NAMESPACE
#include "moc_qtvideo.cpp"
diff --git a/src/multimedia/video/qtvideo.h b/src/multimedia/video/qtvideo.h
index a5f22ea2c..4106f568a 100644
--- a/src/multimedia/video/qtvideo.h
+++ b/src/multimedia/video/qtvideo.h
@@ -14,10 +14,44 @@ namespace QtVideo
{
Q_NAMESPACE_EXPORT(Q_MULTIMEDIA_EXPORT)
-enum class Rotation { None = 0, Clockwise90 = 90, Clockwise180 = 180, Clockwise270 = 270 };
+enum class Rotation {
+ None = 0,
+ Clockwise90 = 90,
+ Clockwise180 = 180,
+ Clockwise270 = 270,
+};
Q_ENUM_NS(Rotation)
+
+enum class MapMode {
+ NotMapped = 0x00,
+ ReadOnly = 0x01,
+ WriteOnly = 0x02,
+ ReadWrite = ReadOnly | WriteOnly,
+};
+Q_ENUM_NS(MapMode)
+
+constexpr MapMode operator&(MapMode lhs, MapMode rhs)
+{
+ return MapMode(qToUnderlying(lhs) & qToUnderlying(rhs));
}
+constexpr MapMode operator|(MapMode lhs, MapMode rhs)
+{
+ return MapMode(qToUnderlying(lhs) | qToUnderlying(rhs));
+}
+
+constexpr MapMode &operator&=(MapMode &lhs, MapMode rhs)
+{
+ return (lhs = lhs & rhs);
+}
+
+constexpr MapMode &operator|=(MapMode &lhs, MapMode rhs)
+{
+ return (lhs = lhs | rhs);
+}
+
+} // namespace QtVideo
+
QT_END_NAMESPACE
#endif // QTVIDEO_H
diff --git a/src/multimedia/video/qvideoframe.cpp b/src/multimedia/video/qvideoframe.cpp
index de981f423..9da4ea3b8 100644
--- a/src/multimedia/video/qvideoframe.cpp
+++ b/src/multimedia/video/qvideoframe.cpp
@@ -58,6 +58,23 @@ QT_DEFINE_QESDP_SPECIALIZATION_DTOR(QVideoFramePrivate);
\note Since video frames can be expensive to copy, QVideoFrame is explicitly shared, so any
change made to a video frame will also apply to any copies.
+
+ \sa QAbstractVideoBuffer, QVideoFrameFormat, QtVideo::MapMode
+*/
+
+/*!
+ \enum QVideoFrame::HandleType
+
+ Identifies the type of a video buffers handle.
+
+ \value NoHandle
+ The buffer has no handle, its data can only be accessed by mapping the buffer.
+ \value RhiTextureHandle
+ The handle of the buffer is defined by The Qt Rendering Hardware Interface
+ (RHI). RHI is Qt's internal graphics abstraction for 3D APIs, such as
+ OpenGL, Vulkan, Metal, and Direct 3D.
+
+ \sa handleType()
*/
@@ -68,6 +85,8 @@ QVideoFrame::QVideoFrame()
{
}
+#if QT_DEPRECATED_SINCE(6, 8)
+
/*!
\internal
Constructs a video frame from a \a buffer with the given pixel \a format and \a size in pixels.
@@ -75,9 +94,8 @@ QVideoFrame::QVideoFrame()
\note This doesn't increment the reference count of the video buffer.
*/
QVideoFrame::QVideoFrame(QAbstractVideoBuffer *buffer, const QVideoFrameFormat &format)
- : d(new QVideoFramePrivate(format))
+ : d(new QVideoFramePrivate(format, std::unique_ptr<QAbstractVideoBuffer>(buffer)))
{
- d->buffer.reset(buffer);
}
/*!
@@ -85,9 +103,11 @@ QVideoFrame::QVideoFrame(QAbstractVideoBuffer *buffer, const QVideoFrameFormat &
*/
QAbstractVideoBuffer *QVideoFrame::videoBuffer() const
{
- return d ? d->buffer.get() : nullptr;
+ return d ? d->videoBuffer.get() : nullptr;
}
+#endif
+
/*!
Constructs a video frame of the given pixel \a format.
@@ -103,22 +123,25 @@ QVideoFrame::QVideoFrame(const QVideoFrameFormat &format)
// Check the memory was successfully allocated.
if (!data.isEmpty())
- d->buffer = std::make_unique<QMemoryVideoBuffer>(data, textureDescription->strideForWidth(format.frameWidth()));
+ d->videoBuffer = std::make_unique<QMemoryVideoBuffer>(
+ data, textureDescription->strideForWidth(format.frameWidth()));
}
}
/*!
- Constructs a QVideoFrame from a QImage. The QImage pixels are copied
- into the QVideoFrame's memory buffer. The resulting frame has the
- same size as the QImage, but the number of bytes per line may
- differ.
+ Constructs a QVideoFrame from a QImage.
\since 6.8
If the QImage::Format matches one of the formats in
- QVideoFrameFormat::PixelFormat, the QVideoFrame will use that format
- without any pixel format conversion. Otherwise, the image is first
- converted to a supported (A)RGB format using QImage::convertedTo()
- with the Qt::AutoColor flag. This may incur a performance penalty.
+ QVideoFrameFormat::PixelFormat, the QVideoFrame will hold an instance of
+ the \a image and use that format without any pixel format conversion.
+ In this case, pixel data will be copied only if you call \l{QVideoFrame::map}
+ with \c WriteOnly flag while keeping the original image.
+
+ Otherwise, if the QImage::Format matches none of video formats,
+ the image is first converted to a supported (A)RGB format using
+ QImage::convertedTo() with the Qt::AutoColor flag.
+ This may incur a performance penalty.
If QImage::isNull() evaluates to true for the input QImage, the
QVideoFrame will be invalid and QVideoFrameFormat::isValid() will
@@ -151,6 +174,47 @@ QVideoFrame::QVideoFrame(const QImage &image)
}
/*!
+ Constructs a QVideoFrame from a \l QAbstractVideoBuffer.
+
+ \since 6.8
+
+ The specified \a videoBuffer refers to an instance a reimplemented
+ \l QAbstractVideoBuffer. The instance is expected to contain a preallocated custom
+ video buffer and must implement \l QAbstractVideoBuffer::format,
+ \l QAbstractVideoBuffer::map, and \l QAbstractVideoBuffer::unmap for GPU content.
+
+ If \a videoBuffer is null or gets an invalid \l QVideoFrameFormat,
+ the constructors creates an invalid video frame.
+
+ The created frame will hold ownership of the specified video buffer for its lifetime.
+ Considering that QVideoFrame is implemented via a shared private object,
+ the specified video buffer will be destroyed upon destruction of the last copy
+ of the created video frame.
+
+ Note, if a video frame has been passed to \l QMediaRecorder or a rendering pipeline,
+ the lifetime of the frame is undefined, and the media recorder can destroy it
+ in a different thread.
+
+ QVideoFrame will contain own instance of QVideoFrameFormat.
+ Upon invoking \l setStreamFrameRate, \l setMirrored, or \l setRotation,
+ the inner format can be modified, and \l surfaceFormat will return
+ a detached instance.
+
+ \sa QAbstractVideoBuffer, QVideoFrameFormat
+*/
+QVideoFrame::QVideoFrame(std::unique_ptr<QAbstractVideoBuffer> videoBuffer)
+{
+ if (!videoBuffer)
+ return;
+
+ QVideoFrameFormat format = videoBuffer->format();
+ if (!format.isValid())
+ return;
+
+ d = new QVideoFramePrivate{ std::move(format), std::move(videoBuffer) };
+}
+
+/*!
Constructs a shallow copy of \a other. Since QVideoFrame is
explicitly shared, these two instances will reflect the same frame.
@@ -213,7 +277,7 @@ QVideoFrame::~QVideoFrame() = default;
*/
bool QVideoFrame::isValid() const
{
- return (d && d->buffer) && d->format.pixelFormat() != QVideoFrameFormat::Format_Invalid;
+ return d && d->videoBuffer && d->format.pixelFormat() != QVideoFrameFormat::Format_Invalid;
}
/*!
@@ -240,7 +304,7 @@ QVideoFrameFormat QVideoFrame::surfaceFormat() const
*/
QVideoFrame::HandleType QVideoFrame::handleType() const
{
- return (d && d->buffer) ? d->buffer->handleType() : QVideoFrame::NoHandle;
+ return (d && d->hwVideoBuffer) ? d->hwVideoBuffer->handleType() : QVideoFrame::NoHandle;
}
/*!
@@ -270,25 +334,25 @@ int QVideoFrame::height() const
/*!
Identifies if a video frame's contents are currently mapped to system memory.
- This is a convenience function which checks that the \l {QVideoFrame::MapMode}{MapMode}
- of the frame is not equal to QVideoFrame::NotMapped.
+ This is a convenience function which checks that the \l {QtVideo::MapMode}{MapMode}
+ of the frame is not equal to QtVideo::MapMode::NotMapped.
Returns true if the contents of the video frame are mapped to system memory, and false
otherwise.
- \sa mapMode(), QVideoFrame::MapMode
+ \sa mapMode(), QtVideo::MapMode
*/
bool QVideoFrame::isMapped() const
{
- return d && d->buffer && d->buffer->mapMode() != QVideoFrame::NotMapped;
+ return d && d->mapMode != QtVideo::MapMode::NotMapped;
}
/*!
Identifies if the mapped contents of a video frame will be persisted when the frame is unmapped.
- This is a convenience function which checks if the \l {QVideoFrame::MapMode}{MapMode}
- contains the QVideoFrame::WriteOnly flag.
+ This is a convenience function which checks if the \l {QtVideo::MapMode}{MapMode}
+ contains the QtVideo::MapMode::WriteOnly flag.
Returns true if the video frame will be updated when unmapped, and false otherwise.
@@ -296,37 +360,37 @@ bool QVideoFrame::isMapped() const
Depending on the buffer implementation the changes may be persisted, or worse alter a shared
buffer.
- \sa mapMode(), QVideoFrame::MapMode
+ \sa mapMode(), QtVideo::MapMode
*/
bool QVideoFrame::isWritable() const
{
- return d && d->buffer && (d->buffer->mapMode() & QVideoFrame::WriteOnly);
+ return d && (d->mapMode & QtVideo::MapMode::WriteOnly) != QtVideo::MapMode::NotMapped;
}
/*!
Identifies if the mapped contents of a video frame were read from the frame when it was mapped.
- This is a convenience function which checks if the \l {QVideoFrame::MapMode}{MapMode}
- contains the QVideoFrame::WriteOnly flag.
+ This is a convenience function which checks if the \l {QtVideo::MapMode}{MapMode}
+ contains the QtVideo::MapMode::WriteOnly flag.
Returns true if the contents of the mapped memory were read from the video frame, and false
otherwise.
- \sa mapMode(), QVideoFrame::MapMode
+ \sa mapMode(), QtVideo::MapMode
*/
bool QVideoFrame::isReadable() const
{
- return d && d->buffer && (d->buffer->mapMode() & QVideoFrame::ReadOnly);
+ return d && (d->mapMode & QtVideo::MapMode::ReadOnly) != QtVideo::MapMode::NotMapped;
}
/*!
Returns the mode a video frame was mapped to system memory in.
- \sa map(), QVideoFrame::MapMode
+ \sa map(), QtVideo::MapMode
*/
QVideoFrame::MapMode QVideoFrame::mapMode() const
{
- return (d && d->buffer) ? d->buffer->mapMode() : QVideoFrame::NotMapped;
+ return static_cast<QVideoFrame::MapMode>(d ? d->mapMode : QtVideo::MapMode::NotMapped);
}
/*!
@@ -337,9 +401,9 @@ QVideoFrame::MapMode QVideoFrame::mapMode() const
copying the contents around, so avoid mapping and unmapping unless required.
The map \a mode indicates whether the contents of the mapped memory should be read from and/or
- written to the frame. If the map mode includes the \c QVideoFrame::ReadOnly flag the
+ written to the frame. If the map mode includes the \c QtVideo::MapMode::ReadOnly flag the
mapped memory will be populated with the content of the video frame when initially mapped. If the map
- mode includes the \c QVideoFrame::WriteOnly flag the content of the possibly modified
+ mode includes the \c QtVideo::MapMode::WriteOnly flag the content of the possibly modified
mapped memory will be written back to the frame when unmapped.
While mapped the contents of a video frame can be accessed directly through the pointer returned
@@ -359,20 +423,18 @@ QVideoFrame::MapMode QVideoFrame::mapMode() const
\sa unmap(), mapMode(), bits()
*/
-bool QVideoFrame::map(QVideoFrame::MapMode mode)
+bool QVideoFrame::map(QtVideo::MapMode mode)
{
-
- if (!d || !d->buffer)
+ if (!d || !d->videoBuffer)
return false;
QMutexLocker lock(&d->mapMutex);
- if (mode == QVideoFrame::NotMapped)
+ if (mode == QtVideo::MapMode::NotMapped)
return false;
if (d->mappedCount > 0) {
//it's allowed to map the video frame multiple times in read only mode
- if (d->buffer->mapMode() == QVideoFrame::ReadOnly
- && mode == QVideoFrame::ReadOnly) {
+ if (d->mapMode == QtVideo::MapMode::ReadOnly && mode == QtVideo::MapMode::ReadOnly) {
d->mappedCount++;
return true;
}
@@ -382,14 +444,16 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
Q_ASSERT(d->mapData.data[0] == nullptr);
Q_ASSERT(d->mapData.bytesPerLine[0] == 0);
- Q_ASSERT(d->mapData.nPlanes == 0);
- Q_ASSERT(d->mapData.size[0] == 0);
+ Q_ASSERT(d->mapData.planeCount == 0);
+ Q_ASSERT(d->mapData.dataSize[0] == 0);
- d->mapData = d->buffer->map(mode);
- if (d->mapData.nPlanes == 0)
+ d->mapData = d->videoBuffer->map(mode);
+ if (d->mapData.planeCount == 0)
return false;
- if (d->mapData.nPlanes == 1) {
+ d->mapMode = mode;
+
+ if (d->mapData.planeCount == 1) {
auto pixelFmt = d->format.pixelFormat();
// If the plane count is 1 derive the additional planes for planar formats.
switch (pixelFmt) {
@@ -427,16 +491,16 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
const int height = this->height();
const int yStride = d->mapData.bytesPerLine[0];
const int uvHeight = pixelFmt == QVideoFrameFormat::Format_YUV422P ? height : height / 2;
- const int uvStride = (d->mapData.size[0] - (yStride * height)) / uvHeight / 2;
+ const int uvStride = (d->mapData.dataSize[0] - (yStride * height)) / uvHeight / 2;
// Three planes, the second and third vertically (and horizontally for other than Format_YUV422P formats) subsampled.
- d->mapData.nPlanes = 3;
+ d->mapData.planeCount = 3;
d->mapData.bytesPerLine[2] = d->mapData.bytesPerLine[1] = uvStride;
- d->mapData.size[0] = yStride * height;
- d->mapData.size[1] = uvStride * uvHeight;
- d->mapData.size[2] = uvStride * uvHeight;
- d->mapData.data[1] = d->mapData.data[0] + d->mapData.size[0];
- d->mapData.data[2] = d->mapData.data[1] + d->mapData.size[1];
+ d->mapData.dataSize[0] = yStride * height;
+ d->mapData.dataSize[1] = uvStride * uvHeight;
+ d->mapData.dataSize[2] = uvStride * uvHeight;
+ d->mapData.data[1] = d->mapData.data[0] + d->mapData.dataSize[0];
+ d->mapData.data[2] = d->mapData.data[1] + d->mapData.dataSize[1];
break;
}
case QVideoFrameFormat::Format_NV12:
@@ -446,25 +510,25 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
case QVideoFrameFormat::Format_P010:
case QVideoFrameFormat::Format_P016: {
// Semi planar, Full resolution Y plane with interleaved subsampled U and V planes.
- d->mapData.nPlanes = 2;
+ d->mapData.planeCount = 2;
d->mapData.bytesPerLine[1] = d->mapData.bytesPerLine[0];
- int size = d->mapData.size[0];
- d->mapData.size[0] = (d->mapData.bytesPerLine[0] * height());
- d->mapData.size[1] = size - d->mapData.size[0];
- d->mapData.data[1] = d->mapData.data[0] + d->mapData.size[0];
+ int size = d->mapData.dataSize[0];
+ d->mapData.dataSize[0] = (d->mapData.bytesPerLine[0] * height());
+ d->mapData.dataSize[1] = size - d->mapData.dataSize[0];
+ d->mapData.data[1] = d->mapData.data[0] + d->mapData.dataSize[0];
break;
}
case QVideoFrameFormat::Format_IMC1:
case QVideoFrameFormat::Format_IMC3: {
// Three planes, the second and third vertically and horizontally subsumpled,
// but with lines padded to the width of the first plane.
- d->mapData.nPlanes = 3;
+ d->mapData.planeCount = 3;
d->mapData.bytesPerLine[2] = d->mapData.bytesPerLine[1] = d->mapData.bytesPerLine[0];
- d->mapData.size[0] = (d->mapData.bytesPerLine[0] * height());
- d->mapData.size[1] = (d->mapData.bytesPerLine[0] * height() / 2);
- d->mapData.size[2] = (d->mapData.bytesPerLine[0] * height() / 2);
- d->mapData.data[1] = d->mapData.data[0] + d->mapData.size[0];
- d->mapData.data[2] = d->mapData.data[1] + d->mapData.size[1];
+ d->mapData.dataSize[0] = (d->mapData.bytesPerLine[0] * height());
+ d->mapData.dataSize[1] = (d->mapData.bytesPerLine[0] * height() / 2);
+ d->mapData.dataSize[2] = (d->mapData.bytesPerLine[0] * height() / 2);
+ d->mapData.data[1] = d->mapData.data[0] + d->mapData.dataSize[0];
+ d->mapData.data[2] = d->mapData.data[1] + d->mapData.dataSize[1];
break;
}
}
@@ -475,7 +539,7 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
// unlock mapMutex to avoid potential deadlock imageMutex <--> mapMutex
lock.unlock();
- if ((mode & QVideoFrame::WriteOnly) != 0) {
+ if ((mode & QtVideo::MapMode::WriteOnly) != QtVideo::MapMode::NotMapped) {
QMutexLocker lock(&d->imageMutex);
d->image = {};
}
@@ -483,10 +547,73 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
return true;
}
+#if QT_DEPRECATED_SINCE(6, 8)
+
+/*!
+ \deprecated [6.8] Use \c QtVideo::MapMode instead. The values of this enum
+ are consistent with values of \c QtVideo::MapMode.
+ \enum QVideoFrame::MapMode
+
+ Enumerates how a video buffer's data is mapped to system memory.
+
+ \value NotMapped
+ The video buffer is not mapped to memory.
+ \value ReadOnly
+ The mapped memory is populated with data from the video buffer when mapped,
+ but the content of the mapped memory may be discarded when unmapped.
+ \value WriteOnly
+ The mapped memory is uninitialized when mapped, but the possibly modified
+ content will be used to populate the video buffer when unmapped.
+ \value ReadWrite
+ The mapped memory is populated with data from the video
+ buffer, and the video buffer is repopulated with the content of the mapped
+ memory when it is unmapped.
+
+ \sa mapMode(), map()
+*/
+
+/*!
+ \deprecated [6.8] Use \c QVideoFrame::map(Qt::Video::MapMode) instead.
+ Maps the contents of a video frame to system (CPU addressable) memory.
+
+ In some cases the video frame data might be stored in video memory or otherwise inaccessible
+ memory, so it is necessary to map a frame before accessing the pixel data. This may involve
+ copying the contents around, so avoid mapping and unmapping unless required.
+
+ The map \a mode indicates whether the contents of the mapped memory should be read from and/or
+ written to the frame. If the map mode includes the \c QVideoFrame::ReadOnly flag the
+ mapped memory will be populated with the content of the video frame when initially mapped. If the map
+ mode includes the \c QVideoFrame::WriteOnly flag the content of the possibly modified
+ mapped memory will be written back to the frame when unmapped.
+
+ While mapped the contents of a video frame can be accessed directly through the pointer returned
+ by the bits() function.
+
+ When access to the data is no longer needed, be sure to call the unmap() function to release the
+ mapped memory and possibly update the video frame contents.
+
+ If the video frame has been mapped in read only mode, it is permissible to map it
+ multiple times in read only mode (and unmap it a corresponding number of times). In all
+ other cases it is necessary to unmap the frame first before mapping a second time.
+
+ \note Writing to memory that is mapped as read-only is undefined, and may result in changes
+ to shared data or crashes.
+
+ Returns true if the frame was mapped to memory in the given \a mode and false otherwise.
+
+ \sa unmap(), mapMode(), bits()
+*/
+bool QVideoFrame::map(QVideoFrame::MapMode mode)
+{
+ return map(static_cast<QtVideo::MapMode>(mode));
+}
+
+#endif
+
/*!
Releases the memory mapped by the map() function.
- If the \l {QVideoFrame::MapMode}{MapMode} included the QVideoFrame::WriteOnly
+ If the \l {QtVideo::MapMode}{MapMode} included the QtVideo::MapMode::WriteOnly
flag this will persist the current content of the mapped memory to the video frame.
unmap() should not be called if map() function failed.
@@ -495,7 +622,7 @@ bool QVideoFrame::map(QVideoFrame::MapMode mode)
*/
void QVideoFrame::unmap()
{
- if (!d || !d->buffer)
+ if (!d || !d->videoBuffer)
return;
QMutexLocker lock(&d->mapMutex);
@@ -509,7 +636,8 @@ void QVideoFrame::unmap()
if (d->mappedCount == 0) {
d->mapData = {};
- d->buffer->unmap();
+ d->mapMode = QtVideo::MapMode::NotMapped;
+ d->videoBuffer->unmap();
}
}
@@ -526,7 +654,7 @@ int QVideoFrame::bytesPerLine(int plane) const
{
if (!d)
return 0;
- return plane >= 0 && plane < d->mapData.nPlanes ? d->mapData.bytesPerLine[plane] : 0;
+ return plane >= 0 && plane < d->mapData.planeCount ? d->mapData.bytesPerLine[plane] : 0;
}
/*!
@@ -545,7 +673,7 @@ uchar *QVideoFrame::bits(int plane)
{
if (!d)
return nullptr;
- return plane >= 0 && plane < d->mapData.nPlanes ? d->mapData.data[plane] : nullptr;
+ return plane >= 0 && plane < d->mapData.planeCount ? d->mapData.data[plane] : nullptr;
}
/*!
@@ -563,7 +691,7 @@ const uchar *QVideoFrame::bits(int plane) const
{
if (!d)
return nullptr;
- return plane >= 0 && plane < d->mapData.nPlanes ? d->mapData.data[plane] : nullptr;
+ return plane >= 0 && plane < d->mapData.planeCount ? d->mapData.data[plane] : nullptr;
}
/*!
@@ -577,7 +705,7 @@ int QVideoFrame::mappedBytes(int plane) const
{
if (!d)
return 0;
- return plane >= 0 && plane < d->mapData.nPlanes ? d->mapData.size[plane] : 0;
+ return plane >= 0 && plane < d->mapData.planeCount ? d->mapData.dataSize[plane] : 0;
}
/*!
@@ -683,7 +811,7 @@ void QVideoFrame::setEndTime(qint64 time)
void QVideoFrame::setRotation(QtVideo::Rotation angle)
{
if (d)
- d->rotation = angle;
+ d->format.setRotation(angle);
}
/*!
@@ -691,16 +819,17 @@ void QVideoFrame::setRotation(QtVideo::Rotation angle)
*/
QtVideo::Rotation QVideoFrame::rotation() const
{
- return QtVideo::Rotation(d ? d->rotation : QtVideo::Rotation::None);
+ return d ? d->format.rotation() : QtVideo::Rotation::None;
}
/*!
- Sets the \a mirrored flag for the frame.
+ Sets the \a mirrored flag for the frame and
+ sets the flag to the underlying \l surfaceFormat.
*/
void QVideoFrame::setMirrored(bool mirrored)
{
if (d)
- d->mirrored = mirrored;
+ d->format.setMirrored(mirrored);
}
/*!
@@ -708,7 +837,24 @@ void QVideoFrame::setMirrored(bool mirrored)
*/
bool QVideoFrame::mirrored() const
{
- return d && d->mirrored;
+ return d && d->format.isMirrored();
+}
+
+/*!
+ Sets the frame \a rate of a video stream in frames per second.
+*/
+void QVideoFrame::setStreamFrameRate(qreal rate)
+{
+ if (d)
+ d->format.setStreamFrameRate(rate);
+}
+
+/*!
+ Returns the frame rate of a video stream in frames per second.
+*/
+qreal QVideoFrame::streamFrameRate() const
+{
+ return d ? d->format.streamFrameRate() : 0.;
}
/*!
@@ -792,7 +938,7 @@ void QVideoFrame::paint(QPainter *painter, const QRectF &rect, const PaintOption
}
}
- if (map(QVideoFrame::ReadOnly)) {
+ if (map(QtVideo::MapMode::ReadOnly)) {
const QTransform oldTransform = painter->transform();
QTransform transform = oldTransform;
transform.translate(targetRect.center().x() - size.width()/2,
diff --git a/src/multimedia/video/qvideoframe.h b/src/multimedia/video/qvideoframe.h
index a306162e8..146547830 100644
--- a/src/multimedia/video/qvideoframe.h
+++ b/src/multimedia/video/qvideoframe.h
@@ -25,6 +25,7 @@ QT_DECLARE_QESDP_SPECIALIZATION_DTOR_WITH_EXPORT(QVideoFramePrivate, Q_MULTIMEDI
class Q_MULTIMEDIA_EXPORT QVideoFrame
{
+ Q_GADGET
public:
enum HandleType
@@ -35,10 +36,14 @@ public:
enum MapMode
{
- NotMapped = 0x00,
- ReadOnly = 0x01,
- WriteOnly = 0x02,
- ReadWrite = ReadOnly | WriteOnly
+ NotMapped Q_DECL_ENUMERATOR_DEPRECATED_X("Use QtVideo::MapMode::NotMapped instead")
+ = static_cast<int>(QtVideo::MapMode::NotMapped),
+ ReadOnly Q_DECL_ENUMERATOR_DEPRECATED_X("Use QtVideo::MapMode::ReadOnly instead")
+ = static_cast<int>(QtVideo::MapMode::ReadOnly),
+ WriteOnly Q_DECL_ENUMERATOR_DEPRECATED_X("Use QtVideo::MapMode::WriteOnly instead")
+ = static_cast<int>(QtVideo::MapMode::WriteOnly),
+ ReadWrite Q_DECL_ENUMERATOR_DEPRECATED_X("Use QtVideo::MapMode::ReadWrite instead")
+ = static_cast<int>(QtVideo::MapMode::ReadWrite)
};
#if QT_DEPRECATED_SINCE(6, 7)
@@ -54,6 +59,7 @@ public:
QVideoFrame();
QVideoFrame(const QVideoFrameFormat &format);
explicit QVideoFrame(const QImage &image);
+ explicit QVideoFrame(std::unique_ptr<QAbstractVideoBuffer> videoBuffer);
QVideoFrame(const QVideoFrame &other);
~QVideoFrame();
@@ -84,7 +90,11 @@ public:
QVideoFrame::MapMode mapMode() const;
+ bool map(QtVideo::MapMode mode);
+#if QT_DEPRECATED_SINCE(6, 8)
+ QT_DEPRECATED_VERSION_X_6_7("Use QVideoFrame::map(QtVideo::MapMode) instead")
bool map(QVideoFrame::MapMode mode);
+#endif
void unmap();
int bytesPerLine(int plane) const;
@@ -114,6 +124,9 @@ public:
void setMirrored(bool);
bool mirrored() const;
+ void setStreamFrameRate(qreal rate);
+ qreal streamFrameRate() const;
+
QImage toImage() const;
struct PaintOptions {
@@ -131,9 +144,13 @@ public:
void paint(QPainter *painter, const QRectF &rect, const PaintOptions &options);
+#if QT_DEPRECATED_SINCE(6, 8)
+ QT_DEPRECATED_VERSION_X_6_8("The constructor is internal and deprecated")
QVideoFrame(QAbstractVideoBuffer *buffer, const QVideoFrameFormat &format);
+ QT_DEPRECATED_VERSION_X_6_8("The method is internal and deprecated")
QAbstractVideoBuffer *videoBuffer() const;
+#endif
private:
friend class QVideoFramePrivate;
QExplicitlySharedDataPointer<QVideoFramePrivate> d;
diff --git a/src/multimedia/video/qvideoframe_p.h b/src/multimedia/video/qvideoframe_p.h
index 23457e55c..2ca798fbe 100644
--- a/src/multimedia/video/qvideoframe_p.h
+++ b/src/multimedia/video/qvideoframe_p.h
@@ -16,7 +16,7 @@
//
#include "qvideoframe.h"
-#include "qabstractvideobuffer_p.h"
+#include "qhwvideobuffer_p.h"
#include <qmutex.h>
@@ -26,14 +26,44 @@ class QVideoFramePrivate : public QSharedData
{
public:
QVideoFramePrivate() = default;
- QVideoFramePrivate(const QVideoFrameFormat &format) : format(format) { }
- QVideoFramePrivate(QVideoFrameFormat format, std::unique_ptr<QAbstractVideoBuffer> buffer)
- : format{ std::move(format) }, buffer{ std::move(buffer) }
+
+ ~QVideoFramePrivate()
+ {
+ if (videoBuffer && mapMode != QtVideo::MapMode::NotMapped)
+ videoBuffer->unmap();
+ }
+
+ template <typename Buffer>
+ static QVideoFrame createFrame(std::unique_ptr<Buffer> buffer, QVideoFrameFormat format)
{
+ QVideoFrame result;
+ result.d.reset(new QVideoFramePrivate(std::move(format), std::move(buffer)));
+ return result;
+ }
+
+ template <typename Buffer = QAbstractVideoBuffer>
+ QVideoFramePrivate(QVideoFrameFormat format, std::unique_ptr<Buffer> buffer = nullptr)
+ : format{ std::move(format) }, videoBuffer{ std::move(buffer) }
+ {
+ if constexpr (std::is_base_of_v<QHwVideoBuffer, Buffer>)
+ hwVideoBuffer = static_cast<QHwVideoBuffer *>(videoBuffer.get());
+ else if constexpr (std::is_same_v<QAbstractVideoBuffer, Buffer>)
+ hwVideoBuffer = dynamic_cast<QHwVideoBuffer *>(videoBuffer.get());
+ // else hwVideoBuffer == nullptr
}
static QVideoFramePrivate *handle(QVideoFrame &frame) { return frame.d.get(); };
+ static QHwVideoBuffer *hwBuffer(const QVideoFrame &frame)
+ {
+ return frame.d ? frame.d->hwVideoBuffer : nullptr;
+ };
+
+ static QAbstractVideoBuffer *buffer(const QVideoFrame &frame)
+ {
+ return frame.d ? frame.d->videoBuffer.get() : nullptr;
+ };
+
QVideoFrame adoptThisByVideoFrame()
{
QVideoFrame frame;
@@ -44,13 +74,13 @@ public:
qint64 startTime = -1;
qint64 endTime = -1;
QAbstractVideoBuffer::MapData mapData;
+ QtVideo::MapMode mapMode = QtVideo::MapMode::NotMapped;
QVideoFrameFormat format;
- std::unique_ptr<QAbstractVideoBuffer> buffer;
+ std::unique_ptr<QAbstractVideoBuffer> videoBuffer;
+ QHwVideoBuffer *hwVideoBuffer = nullptr;
int mappedCount = 0;
QMutex mapMutex;
QString subtitleText;
- QtVideo::Rotation rotation = QtVideo::Rotation::None;
- bool mirrored = false;
QImage image;
QMutex imageMutex;
diff --git a/src/multimedia/video/qvideoframeconversionhelper.cpp b/src/multimedia/video/qvideoframeconversionhelper.cpp
index 1b570b74f..d3f2b0403 100644
--- a/src/multimedia/video/qvideoframeconversionhelper.cpp
+++ b/src/multimedia/video/qvideoframeconversionhelper.cpp
@@ -34,31 +34,30 @@ static inline void planarYUV420_to_ARGB32(const uchar *y, int yStride,
int width, int height)
{
height &= ~1;
- quint32 *rgb0 = rgb;
- quint32 *rgb1 = rgb + width;
- for (int j = 0; j < height; j += 2) {
+ for (int j = 0; j + 1 < height; j += 2) {
const uchar *lineY0 = y;
const uchar *lineY1 = y + yStride;
const uchar *lineU = u;
const uchar *lineV = v;
- for (int i = 0; i < width; i += 2) {
+ quint32 *rgb0 = rgb;
+ quint32 *rgb1 = rgb + width;
+ for (int i = 0; i + 1 < width; i += 2) {
EXPAND_UV(*lineU, *lineV);
lineU += uvPixelStride;
lineV += uvPixelStride;
- *rgb0++ = qYUVToARGB32(*lineY0++, rv, guv, bu);
- *rgb0++ = qYUVToARGB32(*lineY0++, rv, guv, bu);
- *rgb1++ = qYUVToARGB32(*lineY1++, rv, guv, bu);
- *rgb1++ = qYUVToARGB32(*lineY1++, rv, guv, bu);
+ rgb0[i] = qYUVToARGB32(*lineY0++, rv, guv, bu);
+ rgb0[i + 1] = qYUVToARGB32(*lineY0++, rv, guv, bu);
+ rgb1[i] = qYUVToARGB32(*lineY1++, rv, guv, bu);
+ rgb1[i + 1] = qYUVToARGB32(*lineY1++, rv, guv, bu);
}
y += yStride << 1; // stride * 2
u += uStride;
v += vStride;
- rgb0 += width;
- rgb1 += width;
+ rgb += width << 1; // width * 2
}
}
@@ -69,31 +68,27 @@ static inline void planarYUV422_to_ARGB32(const uchar *y, int yStride,
quint32 *rgb,
int width, int height)
{
- quint32 *rgb0 = rgb;
-
for (int j = 0; j < height; ++j) {
const uchar *lineY0 = y;
const uchar *lineU = u;
const uchar *lineV = v;
- for (int i = 0; i < width; i += 2) {
+ for (int i = 0; i + 1 < width; i += 2) {
EXPAND_UV(*lineU, *lineV);
lineU += uvPixelStride;
lineV += uvPixelStride;
- *rgb0++ = qYUVToARGB32(*lineY0++, rv, guv, bu);
- *rgb0++ = qYUVToARGB32(*lineY0++, rv, guv, bu);
+ rgb[i] = qYUVToARGB32(*lineY0++, rv, guv, bu);
+ rgb[i+1] = qYUVToARGB32(*lineY0++, rv, guv, bu);
}
y += yStride; // stride * 2
u += uStride;
v += vStride;
- rgb0 += width;
+ rgb += width;
}
}
-
-
static void QT_FASTCALL qt_convert_YUV420P_to_ARGB32(const QVideoFrame &frame, uchar *output)
{
FETCH_INFO_TRIPLANAR(frame)
@@ -187,8 +182,7 @@ static void QT_FASTCALL qt_convert_UYVY_to_ARGB32(const QVideoFrame &frame, ucha
for (int i = 0; i < height; ++i) {
const uchar *lineSrc = src;
-
- for (int j = 0; j < width; j += 2) {
+ for (int j = 0; j + 1 < width; j += 2) {
int u = *lineSrc++;
int y0 = *lineSrc++;
int v = *lineSrc++;
@@ -196,11 +190,12 @@ static void QT_FASTCALL qt_convert_UYVY_to_ARGB32(const QVideoFrame &frame, ucha
EXPAND_UV(u, v);
- *rgb++ = qYUVToARGB32(y0, rv, guv, bu);
- *rgb++ = qYUVToARGB32(y1, rv, guv, bu);
+ rgb[j] = qYUVToARGB32(y0, rv, guv, bu);
+ rgb[j+1] = qYUVToARGB32(y1, rv, guv, bu);
}
src += stride;
+ rgb += width;
}
}
@@ -213,8 +208,7 @@ static void QT_FASTCALL qt_convert_YUYV_to_ARGB32(const QVideoFrame &frame, ucha
for (int i = 0; i < height; ++i) {
const uchar *lineSrc = src;
-
- for (int j = 0; j < width; j += 2) {
+ for (int j = 0; j + 1 < width; j += 2) {
int y0 = *lineSrc++;
int u = *lineSrc++;
int y1 = *lineSrc++;
@@ -222,11 +216,12 @@ static void QT_FASTCALL qt_convert_YUYV_to_ARGB32(const QVideoFrame &frame, ucha
EXPAND_UV(u, v);
- *rgb++ = qYUVToARGB32(y0, rv, guv, bu);
- *rgb++ = qYUVToARGB32(y1, rv, guv, bu);
+ rgb[j] = qYUVToARGB32(y0, rv, guv, bu);
+ rgb[j+1] = qYUVToARGB32(y1, rv, guv, bu);
}
src += stride;
+ rgb += width;
}
}
@@ -376,23 +371,24 @@ static void QT_FASTCALL qt_convert_premultiplied_to_ARGB32(const QVideoFrame &fr
}
static inline void planarYUV420_16bit_to_ARGB32(const uchar *y, int yStride,
- const uchar *u, int uStride,
- const uchar *v, int vStride,
- int uvPixelStride,
- quint32 *rgb,
- int width, int height)
+ const uchar *u, int uStride,
+ const uchar *v, int vStride,
+ int uvPixelStride,
+ quint32 *rgb,
+ int width, int height)
{
height &= ~1;
- quint32 *rgb0 = rgb;
- quint32 *rgb1 = rgb + width;
- for (int j = 0; j < height; j += 2) {
+ for (int j = 0; j + 1 < height; j += 2) {
const uchar *lineY0 = y;
const uchar *lineY1 = y + yStride;
const uchar *lineU = u;
const uchar *lineV = v;
- for (int i = 0; i < width; i += 2) {
+ quint32 *rgb0 = rgb;
+ quint32 *rgb1 = rgb + width;
+
+ for (int i = 0; i + 1 < width; i += 2) {
EXPAND_UV(*lineU, *lineV);
lineU += uvPixelStride;
lineV += uvPixelStride;
@@ -410,11 +406,11 @@ static inline void planarYUV420_16bit_to_ARGB32(const uchar *y, int yStride,
y += yStride << 1; // stride * 2
u += uStride;
v += vStride;
- rgb0 += width;
- rgb1 += width;
+ rgb += width * 2;
}
}
+
static void QT_FASTCALL qt_convert_P016_to_ARGB32(const QVideoFrame &frame, uchar *output)
{
FETCH_INFO_BIPLANAR(frame)
diff --git a/src/multimedia/video/qvideoframeconverter.cpp b/src/multimedia/video/qvideoframeconverter.cpp
index 82e0a0af5..7883f91a5 100644
--- a/src/multimedia/video/qvideoframeconverter.cpp
+++ b/src/multimedia/video/qvideoframeconverter.cpp
@@ -6,6 +6,7 @@
#include "qvideoframeformat.h"
#include "qvideoframe_p.h"
#include "qmultimediautils_p.h"
+#include "qabstractvideobuffer.h"
#include <QtCore/qcoreapplication.h>
#include <QtCore/qsize.h>
@@ -16,7 +17,6 @@
#include <QtGui/qoffscreensurface.h>
#include <qpa/qplatformintegration.h>
#include <private/qvideotexturehelper_p.h>
-#include <private/qabstractvideobuffer_p.h>
#include <private/qguiapplication_p.h>
#include <rhi/qrhi.h>
@@ -26,7 +26,7 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcVideoFrameConverter, "qt.multimedia.video.frameconverter")
+Q_STATIC_LOGGING_CATEGORY(qLcVideoFrameConverter, "qt.multimedia.video.frameconverter")
namespace {
@@ -254,7 +254,7 @@ static bool updateTextures(QRhi *rhi,
static QImage convertJPEG(const QVideoFrame &frame, QtVideo::Rotation rotation, bool mirrorX, bool mirrorY)
{
QVideoFrame varFrame = frame;
- if (!varFrame.map(QVideoFrame::ReadOnly)) {
+ if (!varFrame.map(QtVideo::MapMode::ReadOnly)) {
qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
return {};
}
@@ -273,7 +273,7 @@ static QImage convertCPU(const QVideoFrame &frame, QtVideo::Rotation rotation, b
return {};
} else {
QVideoFrame varFrame = frame;
- if (!varFrame.map(QVideoFrame::ReadOnly)) {
+ if (!varFrame.map(QtVideo::MapMode::ReadOnly)) {
qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
return {};
}
@@ -286,7 +286,8 @@ static QImage convertCPU(const QVideoFrame &frame, QtVideo::Rotation rotation, b
}
}
-QImage qImageFromVideoFrame(const QVideoFrame &frame, QtVideo::Rotation rotation, bool mirrorX, bool mirrorY)
+QImage qImageFromVideoFrame(const QVideoFrame &frame, QtVideo::Rotation rotation, bool mirrorX,
+ bool mirrorY, bool forceCpu)
{
#ifdef Q_OS_DARWIN
QMacAutoReleasePool releasePool;
@@ -310,10 +311,13 @@ QImage qImageFromVideoFrame(const QVideoFrame &frame, QtVideo::Rotation rotation
if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg)
return convertJPEG(frame, rotation, mirrorX, mirrorY);
+ if (forceCpu) // For test purposes
+ return convertCPU(frame, rotation, mirrorX, mirrorY);
+
QRhi *rhi = nullptr;
- if (frame.videoBuffer())
- rhi = frame.videoBuffer()->rhi();
+ if (QHwVideoBuffer *buffer = QVideoFramePrivate::hwBuffer(frame))
+ rhi = buffer->rhi();
if (!rhi || rhi->thread() != QThread::currentThread())
rhi = initializeRHI(rhi);
@@ -428,7 +432,7 @@ QImage videoFramePlaneAsImage(QVideoFrame &frame, int plane, QImage::Format targ
if (plane >= frame.planeCount())
return {};
- if (!frame.map(QVideoFrame::ReadOnly)) {
+ if (!frame.map(QtVideo::MapMode::ReadOnly)) {
qWarning() << "Cannot map a video frame in ReadOnly mode!";
return {};
}
diff --git a/src/multimedia/video/qvideoframeconverter_p.h b/src/multimedia/video/qvideoframeconverter_p.h
index d22491f66..ad6cea9e4 100644
--- a/src/multimedia/video/qvideoframeconverter_p.h
+++ b/src/multimedia/video/qvideoframeconverter_p.h
@@ -19,7 +19,9 @@
QT_BEGIN_NAMESPACE
-Q_MULTIMEDIA_EXPORT QImage qImageFromVideoFrame(const QVideoFrame &frame, QtVideo::Rotation rotation = QtVideo::Rotation::None, bool mirrorX = false, bool mirrorY = false);
+Q_MULTIMEDIA_EXPORT QImage
+qImageFromVideoFrame(const QVideoFrame &frame, QtVideo::Rotation rotation = QtVideo::Rotation::None,
+ bool mirrorX = false, bool mirrorY = false, bool forceCpu = false);
/**
* @brief Maps the video frame and returns an image having a shared ownership for the video frame
diff --git a/src/multimedia/video/qvideoframeformat.cpp b/src/multimedia/video/qvideoframeformat.cpp
index b2c9dc5f1..b3177234f 100644
--- a/src/multimedia/video/qvideoframeformat.cpp
+++ b/src/multimedia/video/qvideoframeformat.cpp
@@ -39,7 +39,8 @@ public:
&& viewport == other.viewport
&& frameRatesEqual(frameRate, other.frameRate)
&& colorSpace == other.colorSpace
- && mirrored == other.mirrored)
+ && mirrored == other.mirrored
+ && rotation == other.rotation)
return true;
return false;
@@ -60,6 +61,7 @@ public:
float frameRate = 0.0;
float maxLuminance = -1.;
bool mirrored = false;
+ QtVideo::Rotation rotation = QtVideo::Rotation::None;
};
QT_DEFINE_QESDP_SPECIALIZATION_DTOR(QVideoFrameFormatPrivate);
@@ -538,12 +540,13 @@ void QVideoFrameFormat::setScanLineDirection(Direction direction)
d->scanLineDirection = direction;
}
+#if QT_DEPRECATED_SINCE(6, 8)
/*!
Returns the frame rate of a video stream in frames per second.
*/
qreal QVideoFrameFormat::frameRate() const
{
- return d->frameRate;
+ return streamFrameRate();
}
/*!
@@ -551,6 +554,23 @@ qreal QVideoFrameFormat::frameRate() const
*/
void QVideoFrameFormat::setFrameRate(qreal rate)
{
+ setStreamFrameRate(rate);
+}
+#endif
+
+/*!
+ Returns the frame rate of a video stream in frames per second.
+*/
+qreal QVideoFrameFormat::streamFrameRate() const
+{
+ return d->frameRate;
+}
+
+/*!
+ Sets the frame \a rate of a video stream in frames per second.
+*/
+void QVideoFrameFormat::setStreamFrameRate(qreal rate)
+{
detach();
d->frameRate = rate;
}
@@ -665,6 +685,23 @@ void QVideoFrameFormat::setMirrored(bool mirrored)
}
/*!
+ Returns the rotation angle the matching video frame should be rotated clockwise before displaying.
+ */
+QtVideo::Rotation QVideoFrameFormat::rotation() const
+{
+ return d->rotation;
+}
+
+/*!
+ Sets the \a rotation angle the matching video frame should be rotated clockwise before displaying.
+ */
+void QVideoFrameFormat::setRotation(QtVideo::Rotation rotation)
+{
+ detach();
+ d->rotation = rotation;
+}
+
+/*!
\internal
*/
QString QVideoFrameFormat::vertexShaderFileName() const
@@ -984,7 +1021,7 @@ QDebug operator<<(QDebug dbg, const QVideoFrameFormat &f)
<< "\n frame size=" << f.frameSize()
<< "\n viewport=" << f.viewport()
<< "\n colorSpace=" << f.colorSpace()
- << "\n frameRate=" << f.frameRate()
+ << "\n frameRate=" << f.streamFrameRate()
<< "\n mirrored=" << f.isMirrored();
return dbg;
diff --git a/src/multimedia/video/qvideoframeformat.h b/src/multimedia/video/qvideoframeformat.h
index 5fb6b3701..9b3879108 100644
--- a/src/multimedia/video/qvideoframeformat.h
+++ b/src/multimedia/video/qvideoframeformat.h
@@ -5,6 +5,7 @@
#define QVIDEOSURFACEFORMAT_H
#include <QtMultimedia/qtmultimediaglobal.h>
+#include <QtMultimedia/qtvideo.h>
#include <QtCore/qlist.h>
#include <QtCore/qmetatype.h>
@@ -25,6 +26,7 @@ QT_DECLARE_QESDP_SPECIALIZATION_DTOR_WITH_EXPORT(QVideoFrameFormatPrivate, Q_MUL
class Q_MULTIMEDIA_EXPORT QVideoFrameFormat
{
+ Q_GADGET
public:
enum PixelFormat
{
@@ -65,6 +67,7 @@ public:
Format_YUV420P10
};
+ Q_ENUM(PixelFormat)
#ifndef Q_QDOC
static constexpr int NPixelFormats = Format_YUV420P10 + 1;
#endif
@@ -153,8 +156,15 @@ public:
Direction scanLineDirection() const;
void setScanLineDirection(Direction direction);
+#if QT_DEPRECATED_SINCE(6, 8)
+ QT_DEPRECATED_VERSION_X_6_8("Use streamFrameRate()")
qreal frameRate() const;
+ QT_DEPRECATED_VERSION_X_6_8("Use setStreamFrameRate()")
void setFrameRate(qreal rate);
+#endif
+
+ qreal streamFrameRate() const;
+ void setStreamFrameRate(qreal rate);
#if QT_DEPRECATED_SINCE(6, 4)
QT_DEPRECATED_VERSION_X_6_4("Use colorSpace()")
@@ -175,6 +185,9 @@ public:
bool isMirrored() const;
void setMirrored(bool mirrored);
+ QtVideo::Rotation rotation() const;
+ void setRotation(QtVideo::Rotation rotation);
+
QString vertexShaderFileName() const;
QString fragmentShaderFileName() const;
void updateUniformData(QByteArray *dst, const QVideoFrame &frame, const QMatrix4x4 &transform, float opacity) const;
diff --git a/src/multimedia/video/qvideooutputorientationhandler.cpp b/src/multimedia/video/qvideooutputorientationhandler.cpp
index c34e9e92a..ff91bd7fb 100644
--- a/src/multimedia/video/qvideooutputorientationhandler.cpp
+++ b/src/multimedia/video/qvideooutputorientationhandler.cpp
@@ -18,8 +18,8 @@ QVideoOutputOrientationHandler::QVideoOutputOrientationHandler(QObject *parent)
if (!screen)
return;
- connect(screen, SIGNAL(orientationChanged(Qt::ScreenOrientation)),
- this, SLOT(screenOrientationChanged(Qt::ScreenOrientation)));
+ connect(screen, &QScreen::orientationChanged, this,
+ &QVideoOutputOrientationHandler::screenOrientationChanged);
screenOrientationChanged(screen->orientation());
}
diff --git a/src/multimedia/video/qvideotexturehelper.cpp b/src/multimedia/video/qvideotexturehelper.cpp
index 937ff33cb..093989654 100644
--- a/src/multimedia/video/qvideotexturehelper.cpp
+++ b/src/multimedia/video/qvideotexturehelper.cpp
@@ -1,9 +1,11 @@
// Copyright (C) 2021 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qabstractvideobuffer.h"
+
#include "qvideotexturehelper_p.h"
-#include "qabstractvideobuffer_p.h"
#include "qvideoframeconverter_p.h"
+#include "qvideoframe_p.h"
#include <qpainter.h>
#include <qloggingcategory.h>
@@ -213,7 +215,7 @@ static const TextureDescription descriptions[QVideoFrameFormat::NPixelFormats] =
{ { 1, 1 }, { 1, 1 }, { 1, 1 } }
},
// Format_YUV420P10
- { 3, 1,
+ { 3, 2,
[](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
{ QRhiTexture::R16, QRhiTexture::R16, QRhiTexture::R16 },
{ { 1, 1 }, { 2, 2 }, { 2, 2 } }
@@ -520,7 +522,8 @@ void updateUniformData(QByteArray *dst, const QVideoFrameFormat &format, const Q
break;
case QVideoFrameFormat::Format_SamplerExternalOES:
// get Android specific transform for the externalsampler texture
- cmat = frame.videoBuffer()->externalTextureMatrix();
+ if (auto hwBuffer = QVideoFramePrivate::hwBuffer(frame))
+ cmat = hwBuffer->externalTextureMatrix();
break;
case QVideoFrameFormat::Format_SamplerRect:
{
@@ -629,6 +632,9 @@ static UpdateTextureWithMapResult updateTextureWithMap(const QVideoFrame &frame,
static std::unique_ptr<QRhiTexture> createTextureFromHandle(const QVideoFrame &frame, QRhi *rhi, int plane)
{
+ QHwVideoBuffer *hwBuffer = QVideoFramePrivate::hwBuffer(frame);
+ Q_ASSERT(hwBuffer);
+
QVideoFrameFormat fmt = frame.surfaceFormat();
QVideoFrameFormat::PixelFormat pixelFormat = fmt.pixelFormat();
QSize size = fmt.frameSize();
@@ -650,7 +656,7 @@ static std::unique_ptr<QRhiTexture> createTextureFromHandle(const QVideoFrame &f
#endif
}
- if (quint64 handle = frame.videoBuffer()->textureHandle(rhi, plane); handle) {
+ if (quint64 handle = hwBuffer->textureHandle(rhi, plane); handle) {
std::unique_ptr<QRhiTexture> tex(rhi->newTexture(texDesc.textureFormat[plane], planeSize, 1, textureFlags));
if (tex->createFrom({handle, 0}))
return tex;
@@ -712,7 +718,7 @@ static std::unique_ptr<QVideoFrameTextures> createTexturesFromMemory(QVideoFrame
if (oldArray)
textures = oldArray->takeTextures();
- if (!frame.map(QVideoFrame::ReadOnly)) {
+ if (!frame.map(QtVideo::MapMode::ReadOnly)) {
qWarning() << "Cannot map a video frame in ReadOnly mode!";
return {};
}
@@ -736,15 +742,16 @@ static std::unique_ptr<QVideoFrameTextures> createTexturesFromMemory(QVideoFrame
std::unique_ptr<QVideoFrameTextures> createTextures(QVideoFrame &frame, QRhi *rhi, QRhiResourceUpdateBatch *rub, std::unique_ptr<QVideoFrameTextures> &&oldTextures)
{
- QAbstractVideoBuffer *vf = frame.videoBuffer();
- if (!vf)
+ if (!frame.isValid())
return {};
- if (auto vft = vf->mapTextures(rhi))
- return vft;
+ if (QHwVideoBuffer *hwBuffer = QVideoFramePrivate::hwBuffer(frame)) {
+ if (auto textures = hwBuffer->mapTextures(rhi))
+ return textures;
- if (auto vft = createTexturesFromHandles(frame, rhi))
- return vft;
+ if (auto textures = createTexturesFromHandles(frame, rhi))
+ return textures;
+ }
return createTexturesFromMemory(frame, rhi, rub, oldTextures.get());
}
diff --git a/src/multimedia/video/qvideowindow.cpp b/src/multimedia/video/qvideowindow.cpp
index 9cab23f5f..9b88a86df 100644
--- a/src/multimedia/video/qvideowindow.cpp
+++ b/src/multimedia/video/qvideowindow.cpp
@@ -7,7 +7,9 @@
#include <qpainter.h>
#include <private/qguiapplication_p.h>
#include <private/qmemoryvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
#include <private/qmultimediautils_p.h>
+#include <private/qvideoframe_p.h>
#include <qpa/qplatformintegration.h>
QT_BEGIN_NAMESPACE
@@ -209,8 +211,9 @@ void QVideoWindowPrivate::updateTextures(QRhiResourceUpdateBatch *rub)
// We render a 1x1 black pixel when we don't have a video
if (!m_currentFrame.isValid())
- m_currentFrame = QVideoFrame(new QMemoryVideoBuffer(QByteArray{4, 0}, 4),
- QVideoFrameFormat(QSize(1,1), QVideoFrameFormat::Format_RGBA8888));
+ m_currentFrame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(QByteArray{ 4, 0 }, 4),
+ QVideoFrameFormat(QSize(1, 1), QVideoFrameFormat::Format_RGBA8888));
m_frameTextures = QVideoTextureHelper::createTextures(m_currentFrame, m_rhi.get(), rub, std::move(m_frameTextures));
if (!m_frameTextures)
diff --git a/src/multimediaquick/CMakeLists.txt b/src/multimediaquick/CMakeLists.txt
index eceef4e06..1376f9274 100644
--- a/src/multimediaquick/CMakeLists.txt
+++ b/src/multimediaquick/CMakeLists.txt
@@ -39,7 +39,6 @@ qt_internal_add_qml_module(MultimediaQuickPrivate
Qt::MultimediaPrivate
Qt::Quick
Qt::QuickPrivate
- GENERATE_CPP_EXPORTS
)
target_sources(quickmultimedia PRIVATE multimedia_plugin.cpp)
diff --git a/src/multimediaquick/qquickimagecapture.cpp b/src/multimediaquick/qquickimagecapture.cpp
index 72dfb78a8..b7e56d18d 100644
--- a/src/multimediaquick/qquickimagecapture.cpp
+++ b/src/multimediaquick/qquickimagecapture.cpp
@@ -56,7 +56,7 @@ QT_BEGIN_NAMESPACE
QQuickImageCapture::QQuickImageCapture(QObject *parent)
: QImageCapture(parent)
{
- connect(this, SIGNAL(imageCaptured(int,QImage)), this, SLOT(_q_imageCaptured(int,QImage)));
+ connect(this, &QImageCapture::imageCaptured, this, &QQuickImageCapture::_q_imageCaptured);
}
QQuickImageCapture::~QQuickImageCapture() = default;
diff --git a/src/multimediaquick/qquickvideooutput.cpp b/src/multimediaquick/qquickvideooutput.cpp
index 8af974759..50b344846 100644
--- a/src/multimediaquick/qquickvideooutput.cpp
+++ b/src/multimediaquick/qquickvideooutput.cpp
@@ -17,7 +17,7 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcVideo, "qt.multimedia.video")
+Q_STATIC_LOGGING_CATEGORY(qLcVideo, "qt.multimedia.video")
namespace {
diff --git a/src/multimediaquick/qsgvideonode_p.cpp b/src/multimediaquick/qsgvideonode_p.cpp
index 5dc107337..405744507 100644
--- a/src/multimediaquick/qsgvideonode_p.cpp
+++ b/src/multimediaquick/qsgvideonode_p.cpp
@@ -9,7 +9,7 @@
#include <private/qsginternaltextnode_p.h>
#include <private/qquickitem_p.h>
#include <private/qquickvideooutput_p.h>
-#include <private/qabstractvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
QT_BEGIN_NAMESPACE
diff --git a/src/multimediawidgets/CMakeLists.txt b/src/multimediawidgets/CMakeLists.txt
index 8425d3bac..2f18cadfc 100644
--- a/src/multimediawidgets/CMakeLists.txt
+++ b/src/multimediawidgets/CMakeLists.txt
@@ -22,7 +22,6 @@ qt_internal_add_module(MultimediaWidgets
PRIVATE_MODULE_INTERFACE
Qt::MultimediaPrivate
Qt::WidgetsPrivate
- GENERATE_CPP_EXPORTS
)
## Scopes:
diff --git a/src/plugins/multimedia/android/common/qandroidvideooutput.cpp b/src/plugins/multimedia/android/common/qandroidvideooutput.cpp
index 5a4eebf51..0724a8359 100644
--- a/src/plugins/multimedia/android/common/qandroidvideooutput.cpp
+++ b/src/plugins/multimedia/android/common/qandroidvideooutput.cpp
@@ -6,9 +6,10 @@
#include <rhi/qrhi.h>
#include <QtGui/private/qopenglextensions_p.h>
-#include <private/qabstractvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
#include <private/qvideoframeconverter_p.h>
#include <private/qplatformvideosink_p.h>
+#include <private/qvideoframe_p.h>
#include <qvideosink.h>
#include <qopenglcontext.h>
#include <qopenglfunctions.h>
@@ -49,27 +50,24 @@ private:
std::shared_ptr<AndroidTextureThread> m_thread;
};
-
-class AndroidTextureVideoBuffer : public QRhiWithThreadGuard, public QAbstractVideoBuffer
+class AndroidTextureVideoBuffer : public QRhiWithThreadGuard, public QHwVideoBuffer
{
public:
- AndroidTextureVideoBuffer(
- std::shared_ptr<QRhi> rhi, std::shared_ptr<AndroidTextureThread> thread,
- std::unique_ptr<QRhiTexture> tex, const QSize &size)
- : QRhiWithThreadGuard(std::move(rhi), std::move(thread))
- , QAbstractVideoBuffer(QVideoFrame::RhiTextureHandle, m_guardRhi.get())
- , m_size(size)
- , m_tex(std::move(tex))
+ AndroidTextureVideoBuffer(std::shared_ptr<QRhi> rhi,
+ std::shared_ptr<AndroidTextureThread> thread,
+ std::unique_ptr<QRhiTexture> tex, const QSize &size)
+ : QRhiWithThreadGuard(std::move(rhi), std::move(thread)),
+ QHwVideoBuffer(QVideoFrame::RhiTextureHandle, m_guardRhi.get()),
+ m_size(size),
+ m_tex(std::move(tex))
{}
- QVideoFrame::MapMode mapMode() const override { return m_mapMode; }
-
- MapData map(QVideoFrame::MapMode mode) override;
+ MapData map(QtVideo::MapMode mode) override;
void unmap() override
{
m_image = {};
- m_mapMode = QVideoFrame::NotMapped;
+ m_mapMode = QtVideo::MapMode::NotMapped;
}
std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *rhi) override
@@ -81,39 +79,39 @@ private:
QSize m_size;
std::unique_ptr<QRhiTexture> m_tex;
QImage m_image;
- QVideoFrame::MapMode m_mapMode = QVideoFrame::NotMapped;
+ QtVideo::MapMode m_mapMode = QtVideo::MapMode::NotMapped;
};
-class ImageFromVideoFrameHelper : public QAbstractVideoBuffer
+class ImageFromVideoFrameHelper : public QHwVideoBuffer
{
public:
ImageFromVideoFrameHelper(AndroidTextureVideoBuffer &atvb)
- : QAbstractVideoBuffer(QVideoFrame::RhiTextureHandle, atvb.rhi())
- , m_atvb(atvb)
+ : QHwVideoBuffer(QVideoFrame::RhiTextureHandle, atvb.rhi()), m_atvb(atvb)
{}
std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *rhi) override
{
return m_atvb.mapTextures(rhi);
}
- QVideoFrame::MapMode mapMode() const override { return QVideoFrame::NotMapped; }
- MapData map(QVideoFrame::MapMode) override { return {}; }
+
+ MapData map(QtVideo::MapMode) override { return {}; }
void unmap() override {}
private:
AndroidTextureVideoBuffer &m_atvb;
};
-QAbstractVideoBuffer::MapData AndroidTextureVideoBuffer::map(QVideoFrame::MapMode mode)
+QAbstractVideoBuffer::MapData AndroidTextureVideoBuffer::map(QtVideo::MapMode mode)
{
QAbstractVideoBuffer::MapData mapData;
- if (m_mapMode == QVideoFrame::NotMapped && mode == QVideoFrame::ReadOnly) {
- m_mapMode = QVideoFrame::ReadOnly;
- m_image = qImageFromVideoFrame(QVideoFrame(new ImageFromVideoFrameHelper(*this),
- QVideoFrameFormat(m_size, QVideoFrameFormat::Format_RGBA8888)));
- mapData.nPlanes = 1;
+ if (m_mapMode == QtVideo::MapMode::NotMapped && mode == QtVideo::MapMode::ReadOnly) {
+ m_mapMode = QtVideo::MapMode::ReadOnly;
+ m_image = qImageFromVideoFrame(QVideoFramePrivate::createFrame(
+ std::make_unique<ImageFromVideoFrameHelper>(*this),
+ QVideoFrameFormat(m_size, QVideoFrameFormat::Format_RGBA8888)));
+ mapData.planeCount = 1;
mapData.bytesPerLine[0] = m_image.bytesPerLine();
- mapData.size[0] = static_cast<int>(m_image.sizeInBytes());
+ mapData.dataSize[0] = static_cast<int>(m_image.sizeInBytes());
mapData.data[0] = m_image.bits();
}
diff --git a/src/plugins/multimedia/android/common/qandroidvideooutput_p.h b/src/plugins/multimedia/android/common/qandroidvideooutput_p.h
index c59a1b76c..7c9be5aee 100644
--- a/src/plugins/multimedia/android/common/qandroidvideooutput_p.h
+++ b/src/plugins/multimedia/android/common/qandroidvideooutput_p.h
@@ -18,7 +18,7 @@
#include <qsize.h>
#include <qmutex.h>
#include <qreadwritelock.h>
-#include <private/qabstractvideobuffer_p.h>
+#include <qabstractvideobuffer.h>
#include <qmatrix4x4.h>
#include <qoffscreensurface.h>
#include <rhi/qrhi.h>
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp b/src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp
index f7dd1c653..7eda1175f 100644
--- a/src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp
@@ -21,6 +21,7 @@
#include <private/qmemoryvideobuffer_p.h>
#include <private/qcameradevice_p.h>
#include <private/qmediastoragelocation_p.h>
+#include <private/qvideoframe_p.h>
#include <QImageWriter>
QT_BEGIN_NAMESPACE
@@ -734,7 +735,9 @@ void QAndroidCameraSession::processCapturedImage(int id, const QByteArray &bytes
void QAndroidCameraSession::processCapturedImageToBuffer(int id, const QByteArray &bytes,
QVideoFrameFormat::PixelFormat format, QSize size, int bytesPerLine)
{
- QVideoFrame frame(new QMemoryVideoBuffer(bytes, bytesPerLine), QVideoFrameFormat(size, format));
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(bytes, bytesPerLine),
+ QVideoFrameFormat(size, format));
emit imageAvailable(id, frame);
}
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp
index ee5af5dfd..3b005e4a5 100644
--- a/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp
@@ -133,7 +133,7 @@ void QAndroidCaptureSession::start(QMediaEncoderSettings &settings, const QUrl &
return;
if (!m_cameraSession && !m_audioInput) {
- emit error(QMediaRecorder::ResourceError, QLatin1String("No devices are set"));
+ updateError(QMediaRecorder::ResourceError, QLatin1String("No devices are set"));
return;
}
@@ -142,13 +142,13 @@ void QAndroidCaptureSession::start(QMediaEncoderSettings &settings, const QUrl &
const bool validCameraSession = m_cameraSession && m_cameraSession->camera();
if (validCameraSession && !qt_androidCheckCameraPermission()) {
- emit error(QMediaRecorder::ResourceError, QLatin1String("Camera permission denied."));
+ updateError(QMediaRecorder::ResourceError, QLatin1String("Camera permission denied."));
setKeepAlive(false);
return;
}
if (m_audioInput && !qt_androidCheckMicrophonePermission()) {
- emit error(QMediaRecorder::ResourceError, QLatin1String("Microphone permission denied."));
+ updateError(QMediaRecorder::ResourceError, QLatin1String("Microphone permission denied."));
setKeepAlive(false);
return;
}
@@ -221,15 +221,15 @@ void QAndroidCaptureSession::start(QMediaEncoderSettings &settings, const QUrl &
}
if (!m_mediaRecorder->prepare()) {
- emit error(QMediaRecorder::FormatError, QLatin1String("Unable to prepare the media recorder."));
+ updateError(QMediaRecorder::FormatError,
+ QLatin1String("Unable to prepare the media recorder."));
restartViewfinder();
return;
}
if (!m_mediaRecorder->start()) {
- emit error(QMediaRecorder::FormatError,
- QMediaRecorderPrivate::msgFailedStartRecording());
+ updateError(QMediaRecorder::FormatError, QMediaRecorderPrivate::msgFailedStartRecording());
restartViewfinder();
return;
@@ -451,7 +451,7 @@ void QAndroidCaptureSession::onError(int what, int extra)
Q_UNUSED(what);
Q_UNUSED(extra);
stop(true);
- emit error(QMediaRecorder::ResourceError, QLatin1String("Unknown error."));
+ updateError(QMediaRecorder::ResourceError, QLatin1String("Unknown error."));
}
void QAndroidCaptureSession::onInfo(int what, int extra)
@@ -460,11 +460,11 @@ void QAndroidCaptureSession::onInfo(int what, int extra)
if (what == 800) {
// MEDIA_RECORDER_INFO_MAX_DURATION_REACHED
stop();
- emit error(QMediaRecorder::OutOfSpaceError, QLatin1String("Maximum duration reached."));
+ updateError(QMediaRecorder::OutOfSpaceError, QLatin1String("Maximum duration reached."));
} else if (what == 801) {
// MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED
stop();
- emit error(QMediaRecorder::OutOfSpaceError, QLatin1String("Maximum file size reached."));
+ updateError(QMediaRecorder::OutOfSpaceError, QLatin1String("Maximum file size reached."));
}
}
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h
index ab91fc3ef..161d47994 100644
--- a/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h
@@ -67,10 +67,10 @@ public:
if (m_mediaEncoder)
m_mediaEncoder->actualLocationChanged(location);
}
- void error(int error, const QString &errorString)
+ void updateError(int error, const QString &errorString)
{
if (m_mediaEncoder)
- m_mediaEncoder->error(QMediaRecorder::Error(error), errorString);
+ m_mediaEncoder->updateError(QMediaRecorder::Error(error), errorString);
}
private Q_SLOTS:
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp b/src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp
index 268434217..cef36d7ad 100644
--- a/src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp
+++ b/src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp
@@ -8,6 +8,8 @@
#include "qandroidmultimediautils_p.h"
#include "qandroidglobal_p.h"
+#include <private/qvideoframe_p.h>
+
#include <qhash.h>
#include <qstringlist.h>
#include <qdebug.h>
@@ -145,9 +147,12 @@ static void notifyNewPreviewFrame(JNIEnv *env, jobject, int id, jbyteArray data,
QByteArray bytes(arrayLength, Qt::Uninitialized);
env->GetByteArrayRegion(data, 0, arrayLength, (jbyte*)bytes.data());
- QVideoFrame frame(new QMemoryVideoBuffer(bytes, bpl),
- QVideoFrameFormat(QSize(width, height),
- qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat(format))));
+ QVideoFrameFormat frameFormat(
+ QSize(width, height),
+ qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat(format)));
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(std::move(bytes), bpl), std::move(frameFormat));
Q_EMIT (*it)->newPreviewFrame(frame);
}
@@ -1730,9 +1735,12 @@ void AndroidCameraPrivate::fetchLastPreviewFrame()
const int format = m_cameraListener.callMethod<jint>("previewFormat");
const int bpl = m_cameraListener.callMethod<jint>("previewBytesPerLine");
- QVideoFrame frame(new QMemoryVideoBuffer(bytes, bpl),
- QVideoFrameFormat(QSize(width, height),
- qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat(format))));
+ QVideoFrameFormat frameFormat(
+ QSize(width, height),
+ qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat(format)));
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(std::move(bytes), bpl), std::move(frameFormat));
emit lastPreviewFrameFetched(frame);
}
diff --git a/src/plugins/multimedia/darwin/avfvideobuffer.mm b/src/plugins/multimedia/darwin/avfvideobuffer.mm
index 434080fc1..0f5e3bcb3 100644
--- a/src/plugins/multimedia/darwin/avfvideobuffer.mm
+++ b/src/plugins/multimedia/darwin/avfvideobuffer.mm
@@ -16,7 +16,8 @@
QT_USE_NAMESPACE
AVFVideoBuffer::AVFVideoBuffer(AVFVideoSinkInterface *sink, CVImageBufferRef buffer)
- : QAbstractVideoBuffer(sink->rhi() ? QVideoFrame::RhiTextureHandle : QVideoFrame::NoHandle, sink->rhi()),
+ : QHwVideoBuffer(sink->rhi() ? QVideoFrame::RhiTextureHandle : QVideoFrame::NoHandle,
+ sink->rhi()),
sink(sink),
m_buffer(buffer)
{
@@ -29,7 +30,7 @@ AVFVideoBuffer::AVFVideoBuffer(AVFVideoSinkInterface *sink, CVImageBufferRef buf
AVFVideoBuffer::~AVFVideoBuffer()
{
- AVFVideoBuffer::unmap();
+ Q_ASSERT(m_mode == QtVideo::MapMode::NotMapped);
for (int i = 0; i < 3; ++i)
if (cvMetalTexture[i])
CFRelease(cvMetalTexture[i]);
@@ -43,33 +44,33 @@ AVFVideoBuffer::~AVFVideoBuffer()
CVPixelBufferRelease(m_buffer);
}
-AVFVideoBuffer::MapData AVFVideoBuffer::map(QVideoFrame::MapMode mode)
+AVFVideoBuffer::MapData AVFVideoBuffer::map(QtVideo::MapMode mode)
{
MapData mapData;
- if (m_mode == QVideoFrame::NotMapped) {
- CVPixelBufferLockBaseAddress(m_buffer, mode == QVideoFrame::ReadOnly
+ if (m_mode == QtVideo::MapMode::NotMapped) {
+ CVPixelBufferLockBaseAddress(m_buffer, mode == QtVideo::MapMode::ReadOnly
? kCVPixelBufferLock_ReadOnly
: 0);
m_mode = mode;
}
- mapData.nPlanes = CVPixelBufferGetPlaneCount(m_buffer);
- Q_ASSERT(mapData.nPlanes <= 3);
+ mapData.planeCount = CVPixelBufferGetPlaneCount(m_buffer);
+ Q_ASSERT(mapData.planeCount <= 3);
- if (!mapData.nPlanes) {
+ if (!mapData.planeCount) {
// single plane
mapData.bytesPerLine[0] = CVPixelBufferGetBytesPerRow(m_buffer);
mapData.data[0] = static_cast<uchar*>(CVPixelBufferGetBaseAddress(m_buffer));
- mapData.size[0] = CVPixelBufferGetDataSize(m_buffer);
- mapData.nPlanes = mapData.data[0] ? 1 : 0;
+ mapData.dataSize[0] = CVPixelBufferGetDataSize(m_buffer);
+ mapData.planeCount = mapData.data[0] ? 1 : 0;
return mapData;
}
// For a bi-planar or tri-planar format we have to set the parameters correctly:
- for (int i = 0; i < mapData.nPlanes; ++i) {
+ for (int i = 0; i < mapData.planeCount; ++i) {
mapData.bytesPerLine[i] = CVPixelBufferGetBytesPerRowOfPlane(m_buffer, i);
- mapData.size[i] = mapData.bytesPerLine[i]*CVPixelBufferGetHeightOfPlane(m_buffer, i);
+ mapData.dataSize[i] = mapData.bytesPerLine[i]*CVPixelBufferGetHeightOfPlane(m_buffer, i);
mapData.data[i] = static_cast<uchar*>(CVPixelBufferGetBaseAddressOfPlane(m_buffer, i));
}
@@ -78,11 +79,11 @@ AVFVideoBuffer::MapData AVFVideoBuffer::map(QVideoFrame::MapMode mode)
void AVFVideoBuffer::unmap()
{
- if (m_mode != QVideoFrame::NotMapped) {
- CVPixelBufferUnlockBaseAddress(m_buffer, m_mode == QVideoFrame::ReadOnly
+ if (m_mode != QtVideo::MapMode::NotMapped) {
+ CVPixelBufferUnlockBaseAddress(m_buffer, m_mode == QtVideo::MapMode::ReadOnly
? kCVPixelBufferLock_ReadOnly
: 0);
- m_mode = QVideoFrame::NotMapped;
+ m_mode = QtVideo::MapMode::NotMapped;
}
}
diff --git a/src/plugins/multimedia/darwin/avfvideobuffer_p.h b/src/plugins/multimedia/darwin/avfvideobuffer_p.h
index 69d7b7f45..f70961c15 100644
--- a/src/plugins/multimedia/darwin/avfvideobuffer_p.h
+++ b/src/plugins/multimedia/darwin/avfvideobuffer_p.h
@@ -15,8 +15,7 @@
// We mean it.
//
-#include <QtMultimedia/qvideoframe.h>
-#include <private/qabstractvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
#include <private/qcore_mac_p.h>
#include <QtCore/qobject.h>
@@ -31,14 +30,13 @@
QT_BEGIN_NAMESPACE
struct AVFMetalTexture;
-class AVFVideoBuffer : public QAbstractVideoBuffer
+class AVFVideoBuffer : public QHwVideoBuffer
{
public:
AVFVideoBuffer(AVFVideoSinkInterface *sink, CVImageBufferRef buffer);
~AVFVideoBuffer();
- QVideoFrame::MapMode mapMode() const { return m_mode; }
- MapData map(QVideoFrame::MapMode mode);
+ MapData map(QtVideo::MapMode mode);
void unmap();
virtual quint64 textureHandle(QRhi *, int plane) const;
@@ -57,7 +55,7 @@ private:
#endif
CVImageBufferRef m_buffer = nullptr;
- QVideoFrame::MapMode m_mode = QVideoFrame::NotMapped;
+ QtVideo::MapMode m_mode = QtVideo::MapMode::NotMapped;
QVideoFrameFormat m_format;
};
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm b/src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm
index f2da5c6ed..0c9ab3f2c 100644
--- a/src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm
+++ b/src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm
@@ -1,8 +1,9 @@
// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-#include "private/qabstractvideobuffer_p.h"
+#include "qabstractvideobuffer.h"
#include "private/qcameradevice_p.h"
+#include "private/qvideoframe_p.h"
#include "avfcamerarenderer_p.h"
#include "avfcamerasession_p.h"
#include "avfcameraservice_p.h"
@@ -21,8 +22,6 @@
#include <QtGui/qopengl.h>
#endif
-#include <private/qabstractvideobuffer_p.h>
-
#include <QtMultimedia/qvideoframeformat.h>
QT_USE_NAMESPACE
@@ -63,14 +62,13 @@ QT_USE_NAMESPACE
// avfmediaassetwriter).
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
- AVFVideoBuffer *buffer = new AVFVideoBuffer(m_renderer, imageBuffer);
+ auto buffer = std::make_unique<AVFVideoBuffer>(m_renderer, imageBuffer);
auto format = buffer->videoFormat();
if (!format.isValid()) {
- delete buffer;
return;
}
- QVideoFrame frame(buffer, format);
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(buffer), format);
m_renderer->syncHandleViewfinderFrame(frame);
}
@@ -240,11 +238,7 @@ void AVFCameraRenderer::handleViewfinderFrame()
}
if (m_sink && frame.isValid()) {
- // ### pass format to surface
- QVideoFrameFormat format = frame.surfaceFormat();
- if (m_needsHorizontalMirroring)
- format.setMirrored(true);
-
+ // frame.setMirroed(m_needsHorizontalMirroring) ?
m_sink->setVideoFrame(frame);
}
}
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerautility.mm b/src/plugins/multimedia/darwin/camera/avfcamerautility.mm
index 0306a31ef..1864eb0e8 100644
--- a/src/plugins/multimedia/darwin/camera/avfcamerautility.mm
+++ b/src/plugins/multimedia/darwin/camera/avfcamerautility.mm
@@ -128,9 +128,10 @@ qt_convert_to_capture_device_format(AVCaptureDevice *captureDevice,
if (cvFormatValidator && !cvFormatValidator(cvPixFormat))
continue;
+ const float epsilon = 0.001f;
for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRanges) {
- if (frameRateRange.minFrameRate >= cameraFormatPrivate->minFrameRate
- && frameRateRange.maxFrameRate <= cameraFormatPrivate->maxFrameRate
+ if (frameRateRange.minFrameRate >= cameraFormatPrivate->minFrameRate - epsilon
+ && frameRateRange.maxFrameRate <= cameraFormatPrivate->maxFrameRate + epsilon
&& newFormatMaxFrameRate < frameRateRange.maxFrameRate) {
newFormat = format;
newFormatMaxFrameRate = frameRateRange.maxFrameRate;
diff --git a/src/plugins/multimedia/darwin/camera/avfimagecapture.mm b/src/plugins/multimedia/darwin/camera/avfimagecapture.mm
index 07988f3e2..2ee7b0597 100644
--- a/src/plugins/multimedia/darwin/camera/avfimagecapture.mm
+++ b/src/plugins/multimedia/darwin/camera/avfimagecapture.mm
@@ -11,6 +11,7 @@
#include "private/qmediastoragelocation_p.h"
#include <private/qplatformimagecapture_p.h>
#include <private/qmemoryvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
#include <QtCore/qurl.h>
#include <QtCore/qfile.h>
@@ -118,8 +119,10 @@ int AVFImageCapture::doCapture(const QString &actualFileName)
QBuffer data(&jpgData);
QImageReader reader(&data, "JPEG");
QSize size = reader.size();
- QVideoFrame frame(new QMemoryVideoBuffer(QByteArray(jpgData.constData(), jpgData.size()), -1),
- QVideoFrameFormat(size, QVideoFrameFormat::Format_Jpeg));
+ auto buffer = std::make_unique<QMemoryVideoBuffer>(
+ QByteArray(jpgData.constData(), jpgData.size()), -1);
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::move(buffer), QVideoFrameFormat(size, QVideoFrameFormat::Format_Jpeg));
QMetaObject::invokeMethod(this, "imageAvailable", Qt::QueuedConnection,
Q_ARG(int, request.captureId),
Q_ARG(QVideoFrame, frame));
diff --git a/src/plugins/multimedia/darwin/camera/avfmediaencoder.mm b/src/plugins/multimedia/darwin/camera/avfmediaencoder.mm
index 4a138d4e9..3fbc57995 100644
--- a/src/plugins/multimedia/darwin/camera/avfmediaencoder.mm
+++ b/src/plugins/multimedia/darwin/camera/avfmediaencoder.mm
@@ -479,7 +479,7 @@ void AVFMediaEncoder::record(QMediaEncoderSettings &settings)
if (!cameraControl && !audioInput) {
qWarning() << Q_FUNC_INFO << "Cannot record without any inputs";
- Q_EMIT error(QMediaRecorder::ResourceError, tr("No inputs specified"));
+ updateError(QMediaRecorder::ResourceError, tr("No inputs specified"));
return;
}
@@ -491,8 +491,8 @@ void AVFMediaEncoder::record(QMediaEncoderSettings &settings)
if (!audioOnly) {
if (!cameraControl || !cameraControl->isActive()) {
qCDebug(qLcCamera) << Q_FUNC_INFO << "can not start record while camera is not active";
- Q_EMIT error(QMediaRecorder::ResourceError,
- QMediaRecorderPrivate::msgFailedStartRecording());
+ updateError(QMediaRecorder::ResourceError,
+ QMediaRecorderPrivate::msgFailedStartRecording());
return;
}
}
@@ -506,13 +506,13 @@ void AVFMediaEncoder::record(QMediaEncoderSettings &settings)
NSURL *nsFileURL = fileURL.toNSURL();
if (!nsFileURL) {
qWarning() << Q_FUNC_INFO << "invalid output URL:" << fileURL;
- Q_EMIT error(QMediaRecorder::ResourceError, tr("Invalid output file URL"));
+ updateError(QMediaRecorder::ResourceError, tr("Invalid output file URL"));
return;
}
if (!qt_is_writable_file_URL(nsFileURL)) {
qWarning() << Q_FUNC_INFO << "invalid output URL:" << fileURL
<< "(the location is not writable)";
- Q_EMIT error(QMediaRecorder::ResourceError, tr("Non-writeable file location"));
+ updateError(QMediaRecorder::ResourceError, tr("Non-writeable file location"));
return;
}
if (qt_file_exists(nsFileURL)) {
@@ -520,7 +520,7 @@ void AVFMediaEncoder::record(QMediaEncoderSettings &settings)
// Objective-C exception, which is not good at all.
qWarning() << Q_FUNC_INFO << "invalid output URL:" << fileURL
<< "(file already exists)";
- Q_EMIT error(QMediaRecorder::ResourceError, tr("File already exists"));
+ updateError(QMediaRecorder::ResourceError, tr("File already exists"));
return;
}
@@ -555,8 +555,7 @@ void AVFMediaEncoder::record(QMediaEncoderSettings &settings)
[m_writer start];
} else {
[session startRunning];
- Q_EMIT error(QMediaRecorder::FormatError,
- QMediaRecorderPrivate::msgFailedStartRecording());
+ updateError(QMediaRecorder::FormatError, QMediaRecorderPrivate::msgFailedStartRecording());
}
}
@@ -632,7 +631,7 @@ void AVFMediaEncoder::assetWriterFinished()
void AVFMediaEncoder::assetWriterError(QString err)
{
- Q_EMIT error(QMediaRecorder::FormatError, err);
+ updateError(QMediaRecorder::FormatError, err);
if (m_state != QMediaRecorder::StoppedState)
stopWriter();
}
diff --git a/src/plugins/multimedia/darwin/camera/qavfcamerabase.mm b/src/plugins/multimedia/darwin/camera/qavfcamerabase.mm
index 9d99de0b9..11dfa99a8 100644
--- a/src/plugins/multimedia/darwin/camera/qavfcamerabase.mm
+++ b/src/plugins/multimedia/darwin/camera/qavfcamerabase.mm
@@ -758,12 +758,8 @@ bool QAVFCameraBase::isExposureModeSupported(QCamera::ExposureMode mode) const
if (mode != QCamera::ExposureManual)
return false;
- if (@available(macOS 10.15, *)) {
- AVCaptureDevice *captureDevice = device();
- return captureDevice && [captureDevice isExposureModeSupported:AVCaptureExposureModeCustom];
- }
-
- return false;
+ AVCaptureDevice *captureDevice = device();
+ return captureDevice && [captureDevice isExposureModeSupported:AVCaptureExposureModeCustom];
}
void QAVFCameraBase::applyFlashSettings()
diff --git a/src/plugins/multimedia/darwin/common/avfmetadata.mm b/src/plugins/multimedia/darwin/common/avfmetadata.mm
index da07f69c6..994ef9e42 100644
--- a/src/plugins/multimedia/darwin/common/avfmetadata.mm
+++ b/src/plugins/multimedia/darwin/common/avfmetadata.mm
@@ -110,12 +110,14 @@ const AVMetadataIDs keyToAVMetaDataID[] = {
// Orientation
{ nil, nil, AVMetadataIdentifierQuickTimeMetadataVideoOrientation, nil, nil, nil },
// Resolution
+ { nil, nil, nil, nil, nil, nil },
+ // HasHdrContent
{ nil, nil, nil, nil, nil, nil }
};
static AVMetadataIdentifier toIdentifier(QMediaMetaData::Key key, AVMetadataKeySpace keySpace)
{
- static_assert(sizeof(keyToAVMetaDataID)/sizeof(AVMetadataIDs) == QMediaMetaData::Key::Resolution + 1);
+ static_assert(sizeof(keyToAVMetaDataID) / sizeof(AVMetadataIDs) == QMediaMetaData::NumMetaData);
AVMetadataIdentifier identifier = nil;
if ([keySpace isEqualToString:AVMetadataKeySpaceiTunes]) {
@@ -132,7 +134,7 @@ static AVMetadataIdentifier toIdentifier(QMediaMetaData::Key key, AVMetadataKeyS
static std::optional<QMediaMetaData::Key> toKey(AVMetadataItem *item)
{
- static_assert(sizeof(keyToAVMetaDataID)/sizeof(AVMetadataIDs) == QMediaMetaData::Key::Resolution + 1);
+ static_assert(sizeof(keyToAVMetaDataID) / sizeof(AVMetadataIDs) == QMediaMetaData::NumMetaData);
// The item identifier may be different than the ones we support,
// so check by common key first, as it will get the metadata
@@ -181,7 +183,7 @@ static std::optional<QMediaMetaData::Key> toKey(AVMetadataItem *item)
itemKeySpace = ID3;
}
- for (int key = 0; key < QMediaMetaData::Resolution + 1; key++) {
+ for (int key = 0; key < QMediaMetaData::NumMetaData; key++) {
AVMetadataIdentifier idForKey = nil;
switch (itemKeySpace) {
case iTunes:
@@ -267,6 +269,23 @@ QMediaMetaData AVFMetaData::fromAssetTrack(AVAssetTrack *asset)
Q_UNUSED(mirrored);
metadata.insert(QMediaMetaData::Orientation, int(angle));
}
+
+ // add HDR content
+ if (metadata.value(QMediaMetaData::HasHdrContent).isNull()) {
+ auto hasHdrContent = false;
+
+ NSArray *formatDescriptions = [asset formatDescriptions];
+ for (id formatDescription in formatDescriptions) {
+ NSDictionary *extensions = (__bridge NSDictionary *)CMFormatDescriptionGetExtensions((CMFormatDescriptionRef)formatDescription);
+ NSString *transferFunction = extensions[(__bridge NSString *)kCMFormatDescriptionExtension_TransferFunction];
+ if ([transferFunction isEqualToString:(__bridge NSString *)kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ]) {
+ hasHdrContent = true;
+ break;
+ }
+ }
+
+ metadata.insert(QMediaMetaData::HasHdrContent, hasHdrContent);
+ }
}
return metadata;
}
diff --git a/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm b/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm
index b6d9622ac..694cc0e3d 100644
--- a/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm
+++ b/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm
@@ -12,6 +12,7 @@
#include <qpointer.h>
#include <QFileInfo>
#include <QtCore/qmath.h>
+#include <QtCore/qmutex.h>
#import <AVFoundation/AVFoundation.h>
@@ -59,6 +60,12 @@ static void *AVFMediaPlayerObserverCurrentItemDurationObservationContext = &AVFM
- (BOOL) resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
@end
+#ifdef Q_OS_IOS
+// Alas, no such thing as 'class variable', hence globals:
+static unsigned sessionActivationCount;
+static QMutex sessionMutex;
+#endif // Q_OS_IOS
+
@implementation AVFMediaPlayerObserver
{
@private
@@ -70,10 +77,39 @@ static void *AVFMediaPlayerObserverCurrentItemDurationObservationContext = &AVFM
BOOL m_bufferIsLikelyToKeepUp;
NSData *m_data;
NSString *m_mimeType;
+#ifdef Q_OS_IOS
+ BOOL m_activated;
+#endif
}
@synthesize m_player, m_playerItem, m_playerLayer, m_session;
+#ifdef Q_OS_IOS
+- (void)setSessionActive:(BOOL)active
+{
+ const QMutexLocker lock(&sessionMutex);
+ if (active) {
+ // Don't count the same player twice if already activated,
+ // unless it tried to deactivate first:
+ if (m_activated)
+ return;
+ if (!sessionActivationCount)
+ [AVAudioSession.sharedInstance setActive:YES error:nil];
+ ++sessionActivationCount;
+ m_activated = YES;
+ } else {
+ if (!sessionActivationCount || !m_activated) {
+ qWarning("Unbalanced audio session deactivation, ignoring.");
+ return;
+ }
+ --sessionActivationCount;
+ m_activated = NO;
+ if (!sessionActivationCount)
+ [AVAudioSession.sharedInstance setActive:NO error:nil];
+ }
+}
+#endif // Q_OS_IOS
+
- (AVFMediaPlayerObserver *) initWithMediaPlayerSession:(AVFMediaPlayer *)session
{
if (!(self = [super init]))
@@ -159,7 +195,7 @@ static void *AVFMediaPlayerObserverCurrentItemDurationObservationContext = &AVFM
if (m_playerLayer)
m_playerLayer.player = nil;
#if defined(Q_OS_IOS)
- [[AVAudioSession sharedInstance] setActive:NO error:nil];
+ [self setSessionActive:NO];
#endif
}
@@ -248,11 +284,12 @@ static void *AVFMediaPlayerObserverCurrentItemDurationObservationContext = &AVFM
m_player = [AVPlayer playerWithPlayerItem:m_playerItem];
[m_player retain];
- //Set the initial volume on new player object
+ //Set the initial audio ouptut settings on new player object
if (self.session) {
auto *audioOutput = m_session->m_audioOutput;
m_player.volume = (audioOutput ? audioOutput->volume : 1.);
m_player.muted = (audioOutput ? audioOutput->muted : true);
+ m_session->updateAudioOutputDevice();
}
//Assign the output layer to the new player
@@ -279,7 +316,7 @@ static void *AVFMediaPlayerObserverCurrentItemDurationObservationContext = &AVFM
context:AVFMediaPlayerObserverCurrentItemDurationObservationContext];
#if defined(Q_OS_IOS)
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback withOptions:AVAudioSessionCategoryOptionMixWithOthers error:nil];
- [[AVAudioSession sharedInstance] setActive:YES error:nil];
+ [self setSessionActive:YES];
#endif
}
@@ -714,12 +751,12 @@ void AVFMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
m_audioOutput->q->disconnect(this);
m_audioOutput = output;
if (m_audioOutput) {
- connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this, &AVFMediaPlayer::audioOutputChanged);
+ connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this, &AVFMediaPlayer::updateAudioOutputDevice);
connect(m_audioOutput->q, &QAudioOutput::volumeChanged, this, &AVFMediaPlayer::setVolume);
connect(m_audioOutput->q, &QAudioOutput::mutedChanged, this, &AVFMediaPlayer::setMuted);
//connect(m_audioOutput->q, &QAudioOutput::audioRoleChanged, this, &AVFMediaPlayer::setAudioRole);
}
- audioOutputChanged();
+ updateAudioOutputDevice();
setMuted(m_audioOutput ? m_audioOutput->muted : true);
setVolume(m_audioOutput ? m_audioOutput->volume : 1.);
}
@@ -900,7 +937,7 @@ void AVFMediaPlayer::setMuted(bool muted)
player.muted = muted;
}
-void AVFMediaPlayer::audioOutputChanged()
+void AVFMediaPlayer::updateAudioOutputDevice()
{
#ifdef Q_OS_MACOS
AVPlayer *player = [static_cast<AVFMediaPlayerObserver*>(m_observer) player];
diff --git a/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer_p.h b/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer_p.h
index d04ab0818..6ac3aef46 100644
--- a/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer_p.h
+++ b/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer_p.h
@@ -89,7 +89,7 @@ public Q_SLOTS:
void setVolume(float volume);
void setMuted(bool muted);
- void audioOutputChanged();
+ void updateAudioOutputDevice();
void processEOS();
void processLoadStateChange(QMediaPlayer::PlaybackState newState);
diff --git a/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm b/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm
index 002d688eb..66687c931 100644
--- a/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm
+++ b/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm
@@ -5,6 +5,7 @@
#include "avfdisplaylink_p.h"
#include <avfvideobuffer_p.h>
#include "qavfhelpers_p.h"
+#include "private/qvideoframe_p.h"
#include <QtMultimedia/qvideoframeformat.h>
@@ -148,11 +149,12 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(width, height);
if (!pixelBuffer)
return;
- AVFVideoBuffer *buffer = new AVFVideoBuffer(this, pixelBuffer);
-// qDebug() << "Got pixelbuffer with format" << fmt << Qt::hex << CVPixelBufferGetPixelFormatType(pixelBuffer);
+ auto buffer = std::make_unique<AVFVideoBuffer>(this, pixelBuffer);
+ // qDebug() << "Got pixelbuffer with format" << fmt << Qt::hex <<
+ // CVPixelBufferGetPixelFormatType(pixelBuffer);
CVPixelBufferRelease(pixelBuffer);
- frame = QVideoFrame(buffer, buffer->videoFormat());
+ frame = QVideoFramePrivate::createFrame(std::move(buffer), buffer->videoFormat());
frame.setRotation(m_rotation);
frame.setMirrored(m_mirrored);
m_sink->setVideoFrame(frame);
diff --git a/src/plugins/multimedia/darwin/qavfhelpers.mm b/src/plugins/multimedia/darwin/qavfhelpers.mm
index 51ae9eedc..6921309ed 100644
--- a/src/plugins/multimedia/darwin/qavfhelpers.mm
+++ b/src/plugins/multimedia/darwin/qavfhelpers.mm
@@ -90,10 +90,8 @@ QVideoFrameFormat QAVFHelpers::videoFormatForImageBuffer(CVImageBufferRef buffer
} else if (CFEqual(cSpace, kCVImageBufferYCbCrMatrix_ITU_R_601_4)
|| CFEqual(cSpace, kCVImageBufferYCbCrMatrix_SMPTE_240M_1995)) {
colorSpace = QVideoFrameFormat::ColorSpace_BT601;
- } else if (@available(macOS 10.11, iOS 9.0, *)) {
- if (CFEqual(cSpace, kCVImageBufferYCbCrMatrix_ITU_R_2020)) {
- colorSpace = QVideoFrameFormat::ColorSpace_BT2020;
- }
+ } else if (CFEqual(cSpace, kCVImageBufferYCbCrMatrix_ITU_R_2020)) {
+ colorSpace = QVideoFrameFormat::ColorSpace_BT2020;
}
}
@@ -122,17 +120,12 @@ QVideoFrameFormat QAVFHelpers::videoFormatForImageBuffer(CVImageBufferRef buffer
colorTransfer = QVideoFrameFormat::ColorTransfer_Gamma22;
else if (g < 3.2)
colorTransfer = QVideoFrameFormat::ColorTransfer_Gamma28;
- }
- if (@available(macOS 10.12, iOS 11.0, *)) {
- if (CFEqual(cTransfer, kCVImageBufferTransferFunction_ITU_R_2020))
- colorTransfer = QVideoFrameFormat::ColorTransfer_BT709;
- }
- if (@available(macOS 10.12, iOS 11.0, *)) {
- if (CFEqual(cTransfer, kCVImageBufferTransferFunction_ITU_R_2100_HLG)) {
- colorTransfer = QVideoFrameFormat::ColorTransfer_STD_B67;
- } else if (CFEqual(cTransfer, kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ)) {
- colorTransfer = QVideoFrameFormat::ColorTransfer_ST2084;
- }
+ } else if (CFEqual(cTransfer, kCVImageBufferTransferFunction_ITU_R_2020)) {
+ colorTransfer = QVideoFrameFormat::ColorTransfer_BT709;
+ } else if (CFEqual(cTransfer, kCVImageBufferTransferFunction_ITU_R_2100_HLG)) {
+ colorTransfer = QVideoFrameFormat::ColorTransfer_STD_B67;
+ } else if (CFEqual(cTransfer, kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ)) {
+ colorTransfer = QVideoFrameFormat::ColorTransfer_ST2084;
}
}
diff --git a/src/plugins/multimedia/ffmpeg/CMakeLists.txt b/src/plugins/multimedia/ffmpeg/CMakeLists.txt
index 68a09b20f..c6ab93273 100644
--- a/src/plugins/multimedia/ffmpeg/CMakeLists.txt
+++ b/src/plugins/multimedia/ffmpeg/CMakeLists.txt
@@ -21,6 +21,7 @@ qt_internal_add_plugin(QFFmpegMediaPlugin
qffmpegavaudioformat.cpp qffmpegavaudioformat_p.h
qffmpegaudiodecoder.cpp qffmpegaudiodecoder_p.h
qffmpegaudioinput.cpp qffmpegaudioinput_p.h
+ qffmpegconverter.cpp qffmpegconverter_p.h
qffmpeghwaccel.cpp qffmpeghwaccel_p.h
qffmpegmediametadata.cpp qffmpegmediametadata_p.h
qffmpegmediaplayer.cpp qffmpegmediaplayer_p.h
@@ -36,7 +37,6 @@ qt_internal_add_plugin(QFFmpegMediaPlugin
qffmpegencodingformatcontext.cpp qffmpegencodingformatcontext_p.h
qgrabwindowsurfacecapture.cpp qgrabwindowsurfacecapture_p.h
qffmpegsurfacecapturegrabber.cpp qffmpegsurfacecapturegrabber_p.h
- qffmpegsymbolsresolve_p.h
qffmpegplaybackengine.cpp qffmpegplaybackengine_p.h
playbackengine/qffmpegplaybackenginedefs_p.h
@@ -66,6 +66,10 @@ qt_internal_add_plugin(QFFmpegMediaPlugin
recordingengine/qffmpegmuxer.cpp
recordingengine/qffmpegrecordingengine_p.h
recordingengine/qffmpegrecordingengine.cpp
+ recordingengine/qffmpegencodinginitializer_p.h
+ recordingengine/qffmpegencodinginitializer.cpp
+ recordingengine/qffmpegrecordingengineutils_p.h
+ recordingengine/qffmpegrecordingengineutils.cpp
recordingengine/qffmpegvideoencoder_p.h
recordingengine/qffmpegvideoencoder.cpp
recordingengine/qffmpegvideoencoderutils_p.h
@@ -80,75 +84,39 @@ qt_internal_add_plugin(QFFmpegMediaPlugin
Qt::CorePrivate
)
-if(DYNAMIC_RESOLVE_OPENSSL_SYMBOLS)
- if(NOT OPENSSL_INCLUDE_DIR AND OPENSSL_ROOT_DIR)
- set(OPENSSL_INCLUDE_DIR "${OPENSSL_ROOT_DIR}/include")
- endif()
-endif()
-
-qt_internal_extend_target(QFFmpegMediaPlugin CONDITION DYNAMIC_RESOLVE_OPENSSL_SYMBOLS
- SOURCES
- qffmpegopensslsymbols.cpp
- INCLUDE_DIRECTORIES
- ${OPENSSL_INCLUDE_DIR}
-)
+if (LINUX OR ANDROID)
+ # We have 2 options: link shared stubs to QFFmpegMediaPlugin vs
+ # static compilation of the needed stubs to the FFmpeg plugin.
+ # Currently, we chose the second option so that user could trivially
+ # remove the FFmpeg libs we ship.
+ # Set QT_LINK_STUBS_TO_FFMPEG_PLUGIN = TRUE to change the behavior.
-if (ENABLE_DYNAMIC_RESOLVE_VAAPI_SYMBOLS)
- if (QT_FEATURE_vaapi AND NOT DYNAMIC_RESOLVE_VAAPI_SYMBOLS)
- if (NOT FFMPEG_SHARED_LIBRARIES)
- message(WARNING
- "QT_FEATURE_vaapi is found but statically built FFmpeg doesn't include vaapi,"
- "however dynamic symbols resolve is possible.")
- endif()
+ # set(QT_LINK_STUBS_TO_FFMPEG_PLUGIN TRUE)
- set(DYNAMIC_RESOLVE_VAAPI_SYMBOLS TRUE CACHE INTERNAL "")
- elseif (NOT QT_FEATURE_vaapi AND DYNAMIC_RESOLVE_VAAPI_SYMBOLS)
-
- message(FATAL_ERROR
- "QT_FEATURE_vaapi is not found "
- "but FFmpeg includes VAAPI and dynamic symbols resolve is enabled.")
- endif()
+ include("${CMAKE_CURRENT_SOURCE_DIR}/cmake/QtAddFFmpegStubs.cmake")
+ qt_internal_multimedia_add_ffmpeg_stubs()
endif()
-qt_internal_extend_target(QFFmpegMediaPlugin
- CONDITION
- DYNAMIC_RESOLVE_OPENSSL_SYMBOLS OR DYNAMIC_RESOLVE_VAAPI_SYMBOLS
- SOURCES
- qffmpegsymbolsresolveutils.cpp qffmpegsymbolsresolveutils_p.h
-)
-
-function (__propagate_to_compile_definitions VAR)
- if (${VAR})
- target_compile_definitions(QFFmpegMediaPlugin PRIVATE ${VAR})
- endif()
-endfunction()
-__propagate_to_compile_definitions(DYNAMIC_RESOLVE_OPENSSL_SYMBOLS)
-__propagate_to_compile_definitions(DYNAMIC_RESOLVE_VAAPI_SYMBOLS)
-__propagate_to_compile_definitions(DYNAMIC_RESOLVE_VA_DRM_SYMBOLS)
-__propagate_to_compile_definitions(DYNAMIC_RESOLVE_VA_X11_SYMBOLS)
-
-qt_internal_extend_target(QFFmpegMediaPlugin CONDITION DYNAMIC_RESOLVE_VAAPI_SYMBOLS
- SOURCES
- qffmpegvaapisymbols.cpp
- INCLUDE_DIRECTORIES
- "$<TARGET_PROPERTY:VAAPI::VAAPI,INTERFACE_INCLUDE_DIRECTORIES>"
-)
+if (QT_FEATURE_vaapi)
+ qt_internal_extend_target(QFFmpegMediaPlugin
+ SOURCES
+ qffmpeghwaccel_vaapi.cpp qffmpeghwaccel_vaapi_p.h
+ NO_UNITY_BUILD_SOURCES
+ # Conflicts with macros defined in X11.h, and Xlib.h
+ qffmpeghwaccel_vaapi.cpp
+ LIBRARIES
+ EGL::EGL
+ )
-qt_internal_extend_target(QFFmpegMediaPlugin
- CONDITION NOT DYNAMIC_RESOLVE_VAAPI_SYMBOLS AND QT_FEATURE_vaapi
- LIBRARIES VAAPI::VAAPI
-)
+ list(FIND FFMPEG_STUBS "va" va_stub_index)
+ if (NOT QT_LINK_STUBS_TO_FFMPEG_PLUGIN AND (FFMPEG_SHARED_LIBRARIES OR ${va_stub_index} EQUAL -1))
+ target_compile_definitions(QFFmpegMediaPlugin PRIVATE Q_FFMPEG_PLUGIN_STUBS_ONLY)
+ qt_internal_multimedia_find_vaapi_soversion()
+ qt_internal_multimedia_add_private_stub_to_plugin("va")
+ endif()
+endif()
-qt_internal_extend_target(QFFmpegMediaPlugin CONDITION QT_FEATURE_vaapi
- SOURCES
- qffmpeghwaccel_vaapi.cpp qffmpeghwaccel_vaapi_p.h
- NO_UNITY_BUILD_SOURCES
- # Conflicts with macros defined in X11.h, and Xlib.h
- qffmpeghwaccel_vaapi.cpp
- LIBRARIES
- EGL::EGL
-)
qt_internal_extend_target(QFFmpegMediaPlugin CONDITION APPLE
SOURCES
@@ -279,13 +247,14 @@ endif()
# TODO: get libs from FindFFmpeg.cmake
set(ffmpeg_libs FFmpeg::avformat FFmpeg::avcodec FFmpeg::swresample FFmpeg::swscale FFmpeg::avutil)
-if (QT_DEPLOY_FFMPEG AND NOT BUILD_SHARED_LIBS)
+if (QT_DEPLOY_FFMPEG AND NOT BUILD_SHARED_LIBS AND NOT UIKIT)
message(FATAL_ERROR "QT_DEPLOY_FFMPEG is not implemented yet for static builds")
endif()
-if (QT_DEPLOY_FFMPEG AND FFMPEG_SHARED_LIBRARIES AND BUILD_SHARED_LIBS)
+if (QT_DEPLOY_FFMPEG AND FFMPEG_SHARED_LIBRARIES AND (BUILD_SHARED_LIBS OR UIKIT))
include("${CMAKE_CURRENT_SOURCE_DIR}/cmake/QtDeployFFmpeg.cmake")
qt_internal_multimedia_copy_or_install_ffmpeg()
endif()
qt_internal_extend_target(QFFmpegMediaPlugin LIBRARIES ${ffmpeg_libs})
+
diff --git a/src/plugins/multimedia/ffmpeg/cmake/QtAddFFmpegStubs.cmake b/src/plugins/multimedia/ffmpeg/cmake/QtAddFFmpegStubs.cmake
new file mode 100644
index 000000000..5778ae4d2
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/cmake/QtAddFFmpegStubs.cmake
@@ -0,0 +1,199 @@
+# Copyright (C) 2024 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+# Utilities
+
+function(qt_internal_multimedia_find_ffmpeg_stubs)
+ foreach (stub ${FFMPEG_STUBS})
+ if (${stub} MATCHES ${vaapi_regex})
+ set(ffmpeg_has_vaapi TRUE PARENT_SCOPE)
+ elseif (${stub} MATCHES ${openssl_regex})
+ set(ffmpeg_has_openssl TRUE PARENT_SCOPE)
+ else()
+ set(unknown_ffmpeg_stubs
+ ${unknown_ffmpeg_stubs} ${stub} PARENT_SCOPE)
+ endif()
+ endforeach()
+endfunction()
+
+function(qt_internal_multimedia_check_ffmpeg_stubs_configuration)
+ if (NOT LINUX AND NOT ANDROID)
+ message(FATAL_ERROR "Currently, stubs are supported on Linux and Android")
+ endif()
+
+ if (unknown_ffmpeg_stubs)
+ message(FATAL_ERROR "Unknown ffmpeg stubs: ${unknown_ffmpeg_stubs}")
+ endif()
+
+ if (BUILD_SHARED_LIBS AND FFMPEG_SHARED_LIBRARIES AND FFMPEG_STUBS AND NOT QT_DEPLOY_FFMPEG)
+ message(FATAL_ERROR
+ "FFmpeg stubs have been found but QT_DEPLOY_FFMPEG is not specified. "
+ "Set -DQT_DEPLOY_FFMPEG=TRUE to continue.")
+ endif()
+
+ if (ffmpeg_has_vaapi AND NOT QT_FEATURE_vaapi)
+ message(FATAL_ERROR
+ "QT_FEATURE_vaapi is OFF but FFmpeg includes VAAPI.")
+ elseif (NOT ffmpeg_has_vaapi AND QT_FEATURE_vaapi)
+ message(WARNING
+ "QT_FEATURE_vaapi is ON "
+ "but FFmpeg includes VAAPI and dynamic symbols resolve is enabled.")
+ elseif(ffmpeg_has_vaapi AND NOT VAAPI_SUFFIX)
+ message(FATAL_ERROR "Cannot find VAAPI_SUFFIX, fix FindVAAPI.cmake")
+ elseif (ffmpeg_has_vaapi AND "${VAAPI_SUFFIX}" MATCHES "^1\\.32.*")
+ # drop the ancient vaapi version to avoid ABI problems
+ message(FATAL_ERROR "VAAPI ${VAAPI_SUFFIX} is not supported")
+ endif()
+
+ if (ffmpeg_has_openssl AND NOT QT_FEATURE_openssl)
+ message(FATAL_ERROR
+ "QT_FEATURE_openssl is OFF but FFmpeg includes OpenSSL.")
+ endif()
+endfunction()
+
+macro(qt_internal_multimedia_find_vaapi_soversion)
+ string(REGEX MATCH "^[0-9]+" va_soversion "${VAAPI_SUFFIX}")
+
+ set(va-drm_soversion "${va_soversion}")
+ set(va-x11_soversion "${va_soversion}")
+endmacro()
+
+macro(qt_internal_multimedia_find_openssl_soversion)
+ # Update OpenSSL variables since OPENSSL_SSL_LIBRARY is not propagated to this place in some cases.
+ qt_find_package(OpenSSL)
+
+ if (NOT OPENSSL_INCLUDE_DIR AND OPENSSL_ROOT_DIR)
+ set(OPENSSL_INCLUDE_DIR "${OPENSSL_ROOT_DIR}/include")
+ endif()
+
+ if (LINUX)
+ if (NOT OPENSSL_SSL_LIBRARY)
+ message(FATAL_ERROR "OPENSSL_SSL_LIBRARY is not found")
+ endif()
+
+ get_filename_component(ssl_lib_realpath "${OPENSSL_SSL_LIBRARY}" REALPATH)
+ string(REGEX MATCH "[0-9]+(\\.[0-9]+)*$" ssl_soversion "${ssl_lib_realpath}")
+ string(REGEX REPLACE "^3(\\..*|$)" "3" ssl_soversion "${ssl_soversion}")
+ endif()
+
+ #TODO: enhance finding openssl version and throw an error if it's not found.
+
+ set(crypto_soversion "${ssl_soversion}")
+endmacro()
+
+function(qt_internal_multimedia_set_stub_version_script stub stub_target)
+ if ("${stub}" MATCHES "${openssl_regex}")
+ if ("${ssl_soversion}" STREQUAL "3" OR
+ (NOT ssl_soversion AND "${OPENSSL_VERSION}" MATCHES "^3\\..*"))
+ # Symbols in OpenSSL 1.* are not versioned.
+ set(file_name "openssl3.ver")
+ endif()
+ elseif("${stub}" STREQUAL "va")
+ set(file_name "va.ver")
+ endif()
+
+ if (file_name)
+ set(version_script "${CMAKE_CURRENT_SOURCE_DIR}/symbolstubs/${file_name}")
+ set_property(TARGET ${stub_target} APPEND_STRING
+ PROPERTY LINK_FLAGS " -Wl,--version-script=${version_script}")
+ set_target_properties(${stub_target} PROPERTIES LINK_DEPENDS ${version_script})
+ source_group("Stubs Version Scripts" FILES ${version_script})
+ endif()
+endfunction()
+
+function(qt_internal_multimedia_set_stub_output stub stub_target)
+ set(output_dir "${QT_BUILD_DIR}/${INSTALL_LIBDIR}")
+
+ set_target_properties(${stub_target} PROPERTIES
+ RUNTIME_OUTPUT_DIRECTORY "${output_dir}"
+ LIBRARY_OUTPUT_DIRECTORY "${output_dir}"
+ )
+
+ if (${stub}_soversion)
+ set_target_properties(${stub_target} PROPERTIES
+ VERSION "${${stub}_soversion}"
+ SOVERSION "${${stub}_soversion}")
+ endif()
+
+ qt_apply_rpaths(TARGET ${stub_target} INSTALL_PATH "${INSTALL_LIBDIR}" RELATIVE_RPATH)
+endfunction()
+
+function(qt_internal_multimedia_set_stub_include_directories stub target)
+ qt_internal_extend_target(${target}
+ CONDITION ${stub} MATCHES "${openssl_regex}"
+ INCLUDE_DIRECTORIES "${OPENSSL_INCLUDE_DIR}")
+
+ qt_internal_extend_target(${target}
+ CONDITION ${stub} MATCHES "${vaapi_regex}"
+ INCLUDE_DIRECTORIES "${VAAPI_INCLUDE_DIR}")
+endfunction()
+
+function(qt_internal_multimedia_set_stub_symbols_visibility stub stub_target)
+ set_target_properties(${stub_target} PROPERTIES
+ C_VISIBILITY_PRESET hidden
+ CXX_VISIBILITY_PRESET hidden)
+ target_compile_definitions(${stub_target} PRIVATE Q_EXPORT_STUB_SYMBOLS)
+endfunction()
+
+function(qt_internal_multimedia_set_stub_libraries stub stub_target)
+ qt_internal_extend_target(${stub_target} LIBRARIES Qt::Core Qt::MultimediaPrivate)
+
+ if (LINK_STUBS_TO_FFMPEG_PLUGIN AND ${stub} STREQUAL "va")
+ qt_internal_extend_target(QFFmpegMediaPlugin LIBRARIES ${stub_target})
+ endif()
+endfunction()
+
+function(qt_internal_multimedia_define_stub_needed_version stub target)
+ string(TOUPPER ${stub} prefix)
+ string(REPLACE "-" "_" prefix ${prefix})
+
+ target_compile_definitions(${target} PRIVATE
+ "${prefix}_NEEDED_SOVERSION=\"${${stub}_soversion}\"")
+endfunction()
+
+function(qt_internal_multimedia_add_shared_stub stub)
+ set(stub_target "Qt${PROJECT_VERSION_MAJOR}FFmpegStub-${stub}")
+
+ qt_add_library(${stub_target} SHARED "symbolstubs/qffmpegsymbols-${stub}.cpp")
+
+ qt_internal_multimedia_set_stub_include_directories(${stub} ${stub_target})
+ qt_internal_multimedia_set_stub_output(${stub} ${stub_target})
+ qt_internal_multimedia_set_stub_symbols_visibility(${stub} ${stub_target})
+ qt_internal_multimedia_set_stub_version_script(${stub} ${stub_target})
+ qt_internal_multimedia_define_stub_needed_version(${stub} ${stub_target})
+ qt_internal_multimedia_set_stub_libraries(${stub} ${stub_target})
+
+ qt_install(TARGETS ${stub_target} LIBRARY NAMELINK_SKIP)
+endfunction()
+
+function(qt_internal_multimedia_add_private_stub_to_plugin stub)
+ qt_internal_multimedia_set_stub_include_directories(${stub} QFFmpegMediaPlugin)
+ qt_internal_multimedia_define_stub_needed_version(${stub} QFFmpegMediaPlugin)
+ qt_internal_extend_target(QFFmpegMediaPlugin SOURCES "symbolstubs/qffmpegsymbols-${stub}.cpp")
+endfunction()
+
+# Main function
+
+set(vaapi_regex "^(va|va-drm|va-x11)$")
+set(openssl_regex "^(ssl|crypto)$")
+
+function(qt_internal_multimedia_add_ffmpeg_stubs)
+ qt_internal_multimedia_find_ffmpeg_stubs()
+ qt_internal_multimedia_check_ffmpeg_stubs_configuration()
+
+ if (ffmpeg_has_vaapi)
+ qt_internal_multimedia_find_vaapi_soversion()
+ endif()
+
+ if (ffmpeg_has_openssl)
+ qt_internal_multimedia_find_openssl_soversion()
+ endif()
+
+ foreach (stub ${FFMPEG_STUBS})
+ if (FFMPEG_SHARED_LIBRARIES)
+ qt_internal_multimedia_add_shared_stub("${stub}")
+ else()
+ qt_internal_multimedia_add_private_stub_to_plugin("${stub}")
+ endif()
+ endforeach()
+endfunction()
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer.cpp
index 88caac941..773e573e5 100644
--- a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer.cpp
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer.cpp
@@ -4,6 +4,7 @@
#include "playbackengine/qffmpegaudiorenderer_p.h"
#include "qaudiosink.h"
#include "qaudiooutput.h"
+#include "qaudiobufferoutput.h"
#include "private/qplatformaudiooutput_p.h"
#include <QtCore/qloggingcategory.h>
@@ -12,7 +13,7 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcAudioRenderer, "qt.multimedia.ffmpeg.audiorenderer");
+Q_STATIC_LOGGING_CATEGORY(qLcAudioRenderer, "qt.multimedia.ffmpeg.audiorenderer");
namespace QFFmpeg {
@@ -56,10 +57,24 @@ qreal sampleRateFactor() {
return result;
}
+
+QAudioFormat audioFormatFromFrame(const Frame &frame)
+{
+ return QFFmpegMediaFormatInfo::audioFormatFromCodecParameters(
+ frame.codec()->stream()->codecpar);
+}
+
+std::unique_ptr<QFFmpegResampler> createResampler(const Frame &frame,
+ const QAudioFormat &outputFormat)
+{
+ return std::make_unique<QFFmpegResampler>(frame.codec(), outputFormat, frame.pts());
+}
+
} // namespace
-AudioRenderer::AudioRenderer(const TimeController &tc, QAudioOutput *output)
- : Renderer(tc), m_output(output)
+AudioRenderer::AudioRenderer(const TimeController &tc, QAudioOutput *output,
+ QAudioBufferOutput *bufferOutput)
+ : Renderer(tc), m_output(output), m_bufferOutput(bufferOutput)
{
if (output) {
// TODO: implement the signals in QPlatformAudioOutput and connect to them, QTBUG-112294
@@ -74,6 +89,12 @@ void AudioRenderer::setOutput(QAudioOutput *output)
setOutputInternal(m_output, output, [this](QAudioOutput *) { onDeviceChanged(); });
}
+void AudioRenderer::setOutput(QAudioBufferOutput *bufferOutput)
+{
+ setOutputInternal(m_bufferOutput, bufferOutput,
+ [this](QAudioBufferOutput *) { m_bufferOutputChanged = true; });
+}
+
AudioRenderer::~AudioRenderer()
{
freeOutput();
@@ -93,14 +114,29 @@ void AudioRenderer::onDeviceChanged()
Renderer::RenderingResult AudioRenderer::renderInternal(Frame frame)
{
if (frame.isValid())
- updateOutput(frame.codec());
+ updateOutputs(frame);
+
+ // push to sink first in order not to waste time on resampling
+ // for QAudioBufferOutput
+ const RenderingResult result = pushFrameToOutput(frame);
+
+ if (m_lastFramePushDone)
+ pushFrameToBufferOutput(frame);
+ // else // skip pushing the same data to QAudioBufferOutput
+
+ m_lastFramePushDone = result.done;
+
+ return result;
+}
+AudioRenderer::RenderingResult AudioRenderer::pushFrameToOutput(const Frame &frame)
+{
if (!m_ioDevice || !m_resampler)
return {};
Q_ASSERT(m_sink);
- auto firstFrameFlagGuard = qScopeGuard([&]() { m_firstFrame = false; });
+ auto firstFrameFlagGuard = qScopeGuard([&]() { m_firstFrameToSink = false; });
const SynchronizationStamp syncStamp{ m_sink->state(), m_sink->bytesFree(),
m_bufferedData.offset, Clock::now() };
@@ -143,6 +179,22 @@ Renderer::RenderingResult AudioRenderer::renderInternal(Frame frame)
return {};
}
+void AudioRenderer::pushFrameToBufferOutput(const Frame &frame)
+{
+ if (!m_bufferOutput)
+ return;
+
+ Q_ASSERT(m_bufferOutputResampler);
+
+ if (frame.isValid()) {
+ // TODO: get buffer from m_bufferedData if resample formats are equal
+ QAudioBuffer buffer = m_resampler->resample(frame.avFrame());
+ emit m_bufferOutput->audioBufferReceived(buffer);
+ } else {
+ emit m_bufferOutput->audioBufferReceived({});
+ }
+}
+
void AudioRenderer::onPlaybackRateChanged()
{
m_resampler.reset();
@@ -154,7 +206,7 @@ int AudioRenderer::timerInterval() const
const auto interval = Renderer::timerInterval();
- if (m_firstFrame || !m_sink || m_sink->state() != QAudio::IdleState
+ if (m_firstFrameToSink || !m_sink || m_sink->state() != QAudio::IdleState
|| interval > MaxFixableInterval)
return interval;
@@ -163,30 +215,18 @@ int AudioRenderer::timerInterval() const
void AudioRenderer::onPauseChanged()
{
- m_firstFrame = true;
+ m_firstFrameToSink = true;
Renderer::onPauseChanged();
}
-void AudioRenderer::initResempler(const Codec *codec)
+void AudioRenderer::initResempler(const Frame &frame)
{
// We recreate resampler whenever format is changed
- /* AVSampleFormat requiredFormat =
- QFFmpegMediaFormatInfo::avSampleFormat(m_format.sampleFormat());
-
- #if QT_FFMPEG_OLD_CHANNEL_LAYOUT
- qCDebug(qLcAudioRenderer) << "init resampler" << requiredFormat
- << codec->stream()->codecpar->channels;
- #else
- qCDebug(qLcAudioRenderer) << "init resampler" << requiredFormat
- << codec->stream()->codecpar->ch_layout.nb_channels;
- #endif
- */
-
- auto resamplerFormat = m_format;
+ auto resamplerFormat = m_sinkFormat;
resamplerFormat.setSampleRate(
- qRound(m_format.sampleRate() / playbackRate() * sampleRateFactor()));
- m_resampler = std::make_unique<QFFmpegResampler>(codec, resamplerFormat);
+ qRound(m_sinkFormat.sampleRate() / playbackRate() * sampleRateFactor()));
+ m_resampler = createResampler(frame, resamplerFormat);
}
void AudioRenderer::freeOutput()
@@ -203,34 +243,47 @@ void AudioRenderer::freeOutput()
m_bufferedData = {};
m_deviceChanged = false;
+ m_sinkFormat = {};
m_timings = {};
m_bufferLoadingInfo = {};
}
-void AudioRenderer::updateOutput(const Codec *codec)
+void AudioRenderer::updateOutputs(const Frame &frame)
{
if (m_deviceChanged) {
freeOutput();
- m_format = {};
m_resampler.reset();
}
+ if (m_bufferOutput) {
+ if (m_bufferOutputChanged) {
+ m_bufferOutputChanged = false;
+ m_bufferOutputResampler.reset();
+ }
+
+ if (!m_bufferOutputResampler) {
+ QAudioFormat outputFormat = m_bufferOutput->format();
+ if (!outputFormat.isValid())
+ outputFormat = audioFormatFromFrame(frame);
+ m_bufferOutputResampler = createResampler(frame, outputFormat);
+ }
+ }
+
if (!m_output)
return;
- if (!m_format.isValid()) {
- m_format =
- QFFmpegMediaFormatInfo::audioFormatFromCodecParameters(codec->stream()->codecpar);
- m_format.setChannelConfig(m_output->device().channelConfiguration());
+ if (!m_sinkFormat.isValid()) {
+ m_sinkFormat = audioFormatFromFrame(frame);
+ m_sinkFormat.setChannelConfig(m_output->device().channelConfiguration());
}
if (!m_sink) {
// Insert a delay here to test time offset synchronization, e.g. QThread::sleep(1)
- m_sink = std::make_unique<QAudioSink>(m_output->device(), m_format);
+ m_sink = std::make_unique<QAudioSink>(m_output->device(), m_sinkFormat);
updateVolume();
- m_sink->setBufferSize(m_format.bytesForDuration(DesiredBufferTime.count()));
+ m_sink->setBufferSize(m_sinkFormat.bytesForDuration(DesiredBufferTime.count()));
m_ioDevice = m_sink->start();
- m_firstFrame = true;
+ m_firstFrameToSink = true;
connect(m_sink.get(), &QAudioSink::stateChanged, this,
&AudioRenderer::onAudioSinkStateChanged);
@@ -244,9 +297,8 @@ void AudioRenderer::updateOutput(const Codec *codec)
&& m_timings.maxSoundDelay < m_timings.actualBufferDuration);
}
- if (!m_resampler) {
- initResempler(codec);
- }
+ if (!m_resampler)
+ initResempler(frame);
}
void AudioRenderer::updateSynchronization(const SynchronizationStamp &stamp, const Frame &frame)
@@ -269,7 +321,7 @@ void AudioRenderer::updateSynchronization(const SynchronizationStamp &stamp, con
// clang-format off
qCDebug(qLcAudioRenderer)
<< "Change rendering time:"
- << "\n First frame:" << m_firstFrame
+ << "\n First frame:" << m_firstFrameToSink
<< "\n Delay (frame+buffer-written):" << currentFrameDelay << "+"
<< bufferLoadingTime << "-"
<< writtenTime << "="
@@ -306,7 +358,7 @@ void AudioRenderer::updateSynchronization(const SynchronizationStamp &stamp, con
: qMax(soundDelay, fixedDelay);
if (stamp.timePoint - m_bufferLoadingInfo.timePoint > BufferLoadingMeasureTime
- || (m_firstFrame && isHigh) || shouldHandleIdle) {
+ || (m_firstFrameToSink && isHigh) || shouldHandleIdle) {
const auto targetDelay = isHigh
? (m_timings.maxSoundDelay + m_timings.minSoundDelay) / 2
: m_timings.minSoundDelay + DurationBias;
@@ -339,13 +391,13 @@ microseconds AudioRenderer::bufferLoadingTime(const SynchronizationStamp &syncSt
void AudioRenderer::onAudioSinkStateChanged(QAudio::State state)
{
- if (state == QAudio::IdleState && !m_firstFrame)
+ if (state == QAudio::IdleState && !m_firstFrameToSink)
scheduleNextStep();
}
microseconds AudioRenderer::durationForBytes(qsizetype bytes) const
{
- return microseconds(m_format.durationForBytes(static_cast<qint32>(bytes)));
+ return microseconds(m_sinkFormat.durationForBytes(static_cast<qint32>(bytes)));
}
} // namespace QFFmpeg
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer_p.h
index 196cd4fd0..9a22a8a48 100644
--- a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer_p.h
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer_p.h
@@ -21,6 +21,7 @@
QT_BEGIN_NAMESPACE
class QAudioOutput;
+class QAudioBufferOutput;
class QAudioSink;
class QFFmpegResampler;
@@ -30,10 +31,12 @@ class AudioRenderer : public Renderer
{
Q_OBJECT
public:
- AudioRenderer(const TimeController &tc, QAudioOutput *output);
+ AudioRenderer(const TimeController &tc, QAudioOutput *output, QAudioBufferOutput *bufferOutput);
void setOutput(QAudioOutput *output);
+ void setOutput(QAudioBufferOutput *bufferOutput);
+
~AudioRenderer() override;
protected:
@@ -73,6 +76,10 @@ protected:
RenderingResult renderInternal(Frame frame) override;
+ RenderingResult pushFrameToOutput(const Frame &frame);
+
+ void pushFrameToBufferOutput(const Frame &frame);
+
void onPlaybackRateChanged() override;
int timerInterval() const override;
@@ -81,9 +88,9 @@ protected:
void freeOutput();
- void updateOutput(const Codec *codec);
+ void updateOutputs(const Frame &frame);
- void initResempler(const Codec *codec);
+ void initResempler(const Frame &frame);
void onDeviceChanged();
@@ -99,18 +106,23 @@ protected:
private:
QPointer<QAudioOutput> m_output;
+ QPointer<QAudioBufferOutput> m_bufferOutput;
std::unique_ptr<QAudioSink> m_sink;
AudioTimings m_timings;
BufferLoadingInfo m_bufferLoadingInfo;
std::unique_ptr<QFFmpegResampler> m_resampler;
- QAudioFormat m_format;
+ std::unique_ptr<QFFmpegResampler> m_bufferOutputResampler;
+ QAudioFormat m_sinkFormat;
BufferedDataWithOffset m_bufferedData;
QIODevice *m_ioDevice = nullptr;
+ bool m_lastFramePushDone = true;
+
bool m_deviceChanged = false;
+ bool m_bufferOutputChanged = false;
bool m_drained = false;
- bool m_firstFrame = true;
+ bool m_firstFrameToSink = true;
};
} // namespace QFFmpeg
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec.cpp
index 457b3603d..96cfc1bfe 100644
--- a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec.cpp
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec.cpp
@@ -6,7 +6,7 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcPlaybackEngineCodec, "qt.multimedia.playbackengine.codec");
+Q_STATIC_LOGGING_CATEGORY(qLcPlaybackEngineCodec, "qt.multimedia.playbackengine.codec");
namespace QFFmpeg {
@@ -23,17 +23,46 @@ QMaybe<Codec> Codec::create(AVStream *stream, AVFormatContext *formatContext)
if (!stream)
return { "Invalid stream" };
+ if (stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
+ auto hwCodec = create(stream, formatContext, Hw);
+ if (hwCodec)
+ return hwCodec;
+
+ qCInfo(qLcPlaybackEngineCodec) << hwCodec.error();
+ }
+
+ auto codec = create(stream, formatContext, Sw);
+ if (!codec)
+ qCWarning(qLcPlaybackEngineCodec) << codec.error();
+
+ return codec;
+}
+
+AVRational Codec::pixelAspectRatio(AVFrame *frame) const
+{
+ // does the same as av_guess_sample_aspect_ratio, but more efficient
+ return d->pixelAspectRatio.num && d->pixelAspectRatio.den ? d->pixelAspectRatio
+ : frame->sample_aspect_ratio;
+}
+
+QMaybe<Codec> Codec::create(AVStream *stream, AVFormatContext *formatContext,
+ VideoCodecCreationPolicy videoCodecPolicy)
+{
+ Q_ASSERT(stream);
+
+ if (videoCodecPolicy == Hw && stream->codecpar->codec_type != AVMEDIA_TYPE_VIDEO)
+ Q_ASSERT(!"Codec::create has been called with Hw policy on a non-video stream");
+
const AVCodec *decoder = nullptr;
std::unique_ptr<QFFmpeg::HWAccel> hwAccel;
- if (stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
+ if (videoCodecPolicy == Hw)
std::tie(decoder, hwAccel) = HWAccel::findDecoderWithHwAccel(stream->codecpar->codec_id);
-
- if (!decoder)
+ else
decoder = QFFmpeg::findAVDecoder(stream->codecpar->codec_id);
if (!decoder)
- return { "Failed to find a valid FFmpeg decoder" };
+ return { QString("No %1 decoder found").arg(videoCodecPolicy == Hw ? "HW" : "SW") };
qCDebug(qLcPlaybackEngineCodec) << "found decoder" << decoder->name << "for id" << decoder->id;
@@ -51,7 +80,7 @@ QMaybe<Codec> Codec::create(AVStream *stream, AVFormatContext *formatContext)
int ret = avcodec_parameters_to_context(context.get(), stream->codecpar);
if (ret < 0)
- return { "Failed to set FFmpeg codec parameters" };
+ return QStringLiteral("Failed to set FFmpeg codec parameters: %1").arg(err2str(ret));
// ### This still gives errors about wrong HW formats (as we accept all of them)
// But it would be good to get so we can filter out pixel format we don't support natively
@@ -64,19 +93,13 @@ QMaybe<Codec> Codec::create(AVStream *stream, AVFormatContext *formatContext)
applyExperimentalCodecOptions(decoder, opts);
ret = avcodec_open2(context.get(), decoder, opts);
+
if (ret < 0)
- return QString("Failed to open FFmpeg codec context " + err2str(ret));
+ return QStringLiteral("Failed to open FFmpeg codec context: %1").arg(err2str(ret));
return Codec(new Data(std::move(context), stream, formatContext, std::move(hwAccel)));
}
-AVRational Codec::pixelAspectRatio(AVFrame *frame) const
-{
- // does the same as av_guess_sample_aspect_ratio, but more efficient
- return d->pixelAspectRatio.num && d->pixelAspectRatio.den ? d->pixelAspectRatio
- : frame->sample_aspect_ratio;
-}
-
QT_END_NAMESPACE
} // namespace QFFmpeg
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec_p.h
index 449fb1f65..b6866ed6b 100644
--- a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec_p.h
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec_p.h
@@ -51,6 +51,13 @@ public:
qint64 toUs(qint64 ts) const { return timeStampUs(ts, d->stream->time_base).value_or(0); }
private:
+ enum VideoCodecCreationPolicy {
+ Hw,
+ Sw,
+ };
+
+ static QMaybe<Codec> create(AVStream *stream, AVFormatContext *formatContext,
+ VideoCodecCreationPolicy videoCodecPolicy);
Codec(Data *data) : d(data) { }
QExplicitlySharedDataPointer<Data> d;
};
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegdemuxer.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegdemuxer.cpp
index 8cced835c..f11d3e811 100644
--- a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegdemuxer.cpp
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegdemuxer.cpp
@@ -14,7 +14,7 @@ static constexpr qint64 MaxBufferedSize = 32 * 1024 * 1024;
namespace QFFmpeg {
-static Q_LOGGING_CATEGORY(qLcDemuxer, "qt.multimedia.ffmpeg.demuxer");
+Q_STATIC_LOGGING_CATEGORY(qLcDemuxer, "qt.multimedia.ffmpeg.demuxer");
static qint64 streamTimeToUs(const AVStream *stream, qint64 time)
{
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder.cpp
index fbb75dd44..3bb9aad16 100644
--- a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder.cpp
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder.cpp
@@ -19,7 +19,7 @@ extern "C" {
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcMediaDataHolder, "qt.multimedia.ffmpeg.mediadataholder")
+Q_STATIC_LOGGING_CATEGORY(qLcMediaDataHolder, "qt.multimedia.ffmpeg.mediadataholder")
namespace QFFmpeg {
@@ -65,6 +65,25 @@ static int streamOrientation(const AVStream *stream)
return rotation < 0 ? -rotation % 360 : -rotation % 360 + 360;
}
+
+static bool colorTransferSupportsHdr(const AVStream *stream)
+{
+ if (!stream)
+ return false;
+
+ const AVCodecParameters *codecPar = stream->codecpar;
+ if (!codecPar)
+ return false;
+
+ const QVideoFrameFormat::ColorTransfer colorTransfer = fromAvColorTransfer(codecPar->color_trc);
+
+ // Assume that content is using HDR if the color transfer supports high
+ // dynamic range. The video may still not utilize the extended range,
+ // but we can't determine the actual range without decoding frames.
+ return colorTransfer == QVideoFrameFormat::ColorTransfer_ST2084
+ || colorTransfer == QVideoFrameFormat::ColorTransfer_STD_B67;
+}
+
QtVideo::Rotation MediaDataHolder::rotation() const
{
int orientation = m_metaData.value(QMediaMetaData::Orientation).toInt();
@@ -97,6 +116,7 @@ static void insertMediaData(QMediaMetaData &metaData, QPlatformMediaPlayer::Trac
metaData.insert(QMediaMetaData::VideoFrameRate,
qreal(stream->avg_frame_rate.num) / qreal(stream->avg_frame_rate.den));
metaData.insert(QMediaMetaData::Orientation, QVariant::fromValue(streamOrientation(stream)));
+ metaData.insert(QMediaMetaData::HasHdrContent, colorTransferSupportsHdr(stream));
break;
case QPlatformMediaPlayer::AudioStream:
metaData.insert(QMediaMetaData::AudioBitRate, (int)codecPar->bit_rate);
@@ -151,6 +171,10 @@ loadMedia(const QUrl &mediaUrl, QIODevice *stream, const std::shared_ptr<ICancel
constexpr auto NetworkTimeoutUs = "5000000";
av_dict_set(dict, "timeout", NetworkTimeoutUs, 0);
+ const QByteArray protocolWhitelist = qgetenv("QT_FFMPEG_PROTOCOL_WHITELIST");
+ if (!protocolWhitelist.isNull())
+ av_dict_set(dict, "protocol_whitelist", protocolWhitelist.data(), 0);
+
context->interrupt_callback.opaque = cancelToken.get();
context->interrupt_callback.callback = [](void *opaque) {
const auto *cancelToken = static_cast<const ICancelToken *>(opaque);
@@ -189,6 +213,7 @@ loadMedia(const QUrl &mediaUrl, QIODevice *stream, const std::shared_ptr<ICancel
#endif
return context;
}
+
} // namespace
MediaDataHolder::Maybe MediaDataHolder::create(const QUrl &url, QIODevice *stream,
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegrenderer.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegrenderer.cpp
index e763c786b..5382ff023 100644
--- a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegrenderer.cpp
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegrenderer.cpp
@@ -8,7 +8,7 @@ QT_BEGIN_NAMESPACE
namespace QFFmpeg {
-static Q_LOGGING_CATEGORY(qLcRenderer, "qt.multimedia.ffmpeg.renderer");
+Q_STATIC_LOGGING_CATEGORY(qLcRenderer, "qt.multimedia.ffmpeg.renderer");
Renderer::Renderer(const TimeController &tc, const std::chrono::microseconds &seekPosTimeOffset)
: m_timeController(tc),
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegstreamdecoder.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegstreamdecoder.cpp
index 2f40c53aa..2d9d63b90 100644
--- a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegstreamdecoder.cpp
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegstreamdecoder.cpp
@@ -7,7 +7,7 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcStreamDecoder, "qt.multimedia.ffmpeg.streamdecoder");
+Q_STATIC_LOGGING_CATEGORY(qLcStreamDecoder, "qt.multimedia.ffmpeg.streamdecoder");
namespace QFFmpeg {
@@ -163,6 +163,11 @@ void StreamDecoder::receiveAVFrames()
break;
}
+
+ // Avoid starvation on FFmpeg decoders with fixed size frame pool
+ if (m_trackType == QPlatformMediaPlayer::VideoStream)
+ avFrame = copyFromHwPool(std::move(avFrame));
+
onFrameFound({ m_offset, std::move(avFrame), m_codec, 0, id() });
}
}
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer.cpp
index 7c8e90552..dceb00f83 100644
--- a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer.cpp
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer.cpp
@@ -4,6 +4,7 @@
#include "playbackengine/qffmpegvideorenderer_p.h"
#include "qffmpegvideobuffer_p.h"
#include "qvideosink.h"
+#include "private/qvideoframe_p.h"
QT_BEGIN_NAMESPACE
@@ -62,10 +63,10 @@ VideoRenderer::RenderingResult VideoRenderer::renderInternal(Frame frame)
format.setColorTransfer(buffer->colorTransfer());
format.setColorRange(buffer->colorRange());
format.setMaxLuminance(buffer->maxNits());
- QVideoFrame videoFrame(buffer.release(), format);
+ format.setRotation(m_rotation);
+ QVideoFrame videoFrame = QVideoFramePrivate::createFrame(std::move(buffer), format);
videoFrame.setStartTime(frame.pts());
videoFrame.setEndTime(frame.end());
- videoFrame.setRotation(m_rotation);
m_sink->setVideoFrame(videoFrame);
return {};
diff --git a/src/plugins/multimedia/ffmpeg/qandroidcamera.cpp b/src/plugins/multimedia/ffmpeg/qandroidcamera.cpp
index bf01a4e30..42a0a12df 100644
--- a/src/plugins/multimedia/ffmpeg/qandroidcamera.cpp
+++ b/src/plugins/multimedia/ffmpeg/qandroidcamera.cpp
@@ -26,20 +26,8 @@
extern "C" {
#include "libavutil/hwcontext.h"
-#include "libavutil/pixfmt.h"
}
-Q_DECLARE_JNI_CLASS(QtCamera2, "org/qtproject/qt/android/multimedia/QtCamera2");
-Q_DECLARE_JNI_CLASS(QtVideoDeviceManager,
- "org/qtproject/qt/android/multimedia/QtVideoDeviceManager");
-
-Q_DECLARE_JNI_CLASS(AndroidImageFormat, "android/graphics/ImageFormat");
-
-Q_DECLARE_JNI_CLASS(AndroidImage, "android/media/Image")
-Q_DECLARE_JNI_TYPE(AndroidImagePlaneArray, "[Landroid/media/Image$Plane;")
-Q_DECLARE_JNI_CLASS(JavaByteBuffer, "java/nio/ByteBuffer")
-Q_DECLARE_JNI_TYPE(StringArray, "[Ljava/lang/String;")
-
QT_BEGIN_NAMESPACE
static Q_LOGGING_CATEGORY(qLCAndroidCamera, "qt.multimedia.ffmpeg.androidCamera");
@@ -49,7 +37,7 @@ Q_GLOBAL_STATIC(QReadWriteLock, rwLock)
namespace {
-QCameraFormat getDefaultCameraFormat()
+QCameraFormat getDefaultCameraFormat(const QCameraDevice & cameraDevice)
{
// default settings
QCameraFormatPrivate *defaultFormat = new QCameraFormatPrivate{
@@ -58,7 +46,12 @@ QCameraFormat getDefaultCameraFormat()
.minFrameRate = 12,
.maxFrameRate = 30,
};
- return defaultFormat->create();
+ QCameraFormat format = defaultFormat->create();
+
+ if (!cameraDevice.videoFormats().empty() && !cameraDevice.videoFormats().contains(format))
+ return cameraDevice.videoFormats().first();
+
+ return format;
}
bool checkCameraPermission()
@@ -98,7 +91,7 @@ QAndroidCamera::QAndroidCamera(QCamera *camera) : QPlatformCamera(camera)
if (camera) {
m_cameraDevice = camera->cameraDevice();
m_cameraFormat = !camera->cameraFormat().isNull() ? camera->cameraFormat()
- : getDefaultCameraFormat();
+ : getDefaultCameraFormat(m_cameraDevice);
updateCameraCharacteristics();
}
@@ -129,7 +122,7 @@ void QAndroidCamera::setCamera(const QCameraDevice &camera)
m_cameraDevice = camera;
updateCameraCharacteristics();
- m_cameraFormat = getDefaultCameraFormat();
+ m_cameraFormat = getDefaultCameraFormat(camera);
if (active)
setActive(true);
@@ -252,7 +245,7 @@ void QAndroidCamera::setActive(bool active)
return;
if (!m_jniCamera.isValid()) {
- emit error(QCamera::CameraError, "No connection to Android Camera2 API");
+ updateError(QCamera::CameraError, QStringLiteral("No connection to Android Camera2 API"));
return;
}
@@ -262,7 +255,7 @@ void QAndroidCamera::setActive(bool active)
int height = m_cameraFormat.resolution().height();
if (width < 0 || height < 0) {
- m_cameraFormat = getDefaultCameraFormat();
+ m_cameraFormat = getDefaultCameraFormat(m_cameraDevice);
width = m_cameraFormat.resolution().width();
height = m_cameraFormat.resolution().height();
}
@@ -288,8 +281,8 @@ void QAndroidCamera::setActive(bool active)
if (!canOpen) {
g_qcameras->remove(m_cameraDevice.id());
setState(State::Closed);
- emit error(QCamera::CameraError,
- QString("Failed to start camera: ").append(m_cameraDevice.description()));
+ updateError(QCamera::CameraError,
+ QString("Failed to start camera: ").append(m_cameraDevice.description()));
}
} else {
m_jniCamera.callMethod<void>("stopAndClose");
@@ -316,8 +309,8 @@ void QAndroidCamera::setState(QAndroidCamera::State newState)
m_state = State::Closed;
- emit error(QCamera::CameraError,
- QString("Failed to start Camera %1").arg(m_cameraDevice.description()));
+ updateError(QCamera::CameraError,
+ QString("Failed to start Camera %1").arg(m_cameraDevice.description()));
}
if (m_state == State::Closed && newState == State::WaitingOpen)
@@ -332,9 +325,11 @@ void QAndroidCamera::setState(QAndroidCamera::State newState)
bool QAndroidCamera::setCameraFormat(const QCameraFormat &format)
{
- const auto chosenFormat = format.isNull() ? getDefaultCameraFormat() : format;
+ const auto chosenFormat = format.isNull() ? getDefaultCameraFormat(m_cameraDevice) : format;
- if (chosenFormat == m_cameraFormat || !m_cameraDevice.videoFormats().contains(chosenFormat))
+ if (chosenFormat == m_cameraFormat)
+ return true;
+ if (!m_cameraDevice.videoFormats().contains(chosenFormat))
return false;
m_cameraFormat = chosenFormat;
@@ -521,10 +516,10 @@ void QAndroidCamera::onCameraDisconnect()
void QAndroidCamera::onCameraError(int reason)
{
- emit error(QCamera::CameraError,
- QString("Capture error with Camera %1. Camera2 Api error code: %2")
- .arg(m_cameraDevice.description())
- .arg(reason));
+ updateError(QCamera::CameraError,
+ QString("Capture error with Camera %1. Camera2 Api error code: %2")
+ .arg(m_cameraDevice.description())
+ .arg(reason));
}
void QAndroidCamera::onSessionActive()
@@ -552,10 +547,10 @@ void QAndroidCamera::onCaptureSessionFailed(int reason, long frameNumber)
{
Q_UNUSED(frameNumber);
- emit error(QCamera::CameraError,
- QString("Capture session failure with Camera %1. Camera2 Api error code: %2")
- .arg(m_cameraDevice.description())
- .arg(reason));
+ updateError(QCamera::CameraError,
+ QStringLiteral("Capture session failure with Camera %1. Camera2 Api error code: %2")
+ .arg(m_cameraDevice.description())
+ .arg(reason));
}
// JNI logic
diff --git a/src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp b/src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp
index ef088e6d7..0bdf3e07f 100644
--- a/src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp
+++ b/src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp
@@ -7,15 +7,17 @@
#include <QtCore/qjnitypes.h>
#include <QtCore/QLoggingCategory>
-Q_DECLARE_JNI_CLASS(AndroidImageFormat, "android/graphics/ImageFormat");
-
-Q_DECLARE_JNI_CLASS(AndroidImage, "android/media/Image")
-Q_DECLARE_JNI_TYPE(AndroidImagePlaneArray, "[Landroid/media/Image$Plane;")
-Q_DECLARE_JNI_CLASS(JavaByteBuffer, "java/nio/ByteBuffer")
-
QT_BEGIN_NAMESPACE
static Q_LOGGING_CATEGORY(qLCAndroidCameraFrame, "qt.multimedia.ffmpeg.android.camera.frame");
+namespace {
+bool isWorkaroundForEmulatorNeeded() {
+ const static bool workaroundForEmulator
+ = QtJniTypes::QtVideoDeviceManager::callStaticMethod<jboolean>("isEmulator");
+ return workaroundForEmulator;
+}
+}
+
bool QAndroidCameraFrame::parse(const QJniObject &frame)
{
QJniEnvironment jniEnv;
@@ -86,9 +88,12 @@ bool QAndroidCameraFrame::parse(const QJniObject &frame)
}
if (pixelStrides[1] == 1)
calculedPixelFormat = QVideoFrameFormat::Format_YUV420P;
- else if (pixelStrides[1] == 2 && abs(buffer[1] - buffer[2]) == 1)
- // this can be NV21, but it will converted below
- calculedPixelFormat = QVideoFrameFormat::Format_NV12;
+ else if (pixelStrides[1] == 2) {
+ if (buffer[1] - buffer[2] == -1) // Interleaved UVUV -> NV12
+ calculedPixelFormat = QVideoFrameFormat::Format_NV12;
+ else if (buffer[1] - buffer[2] == 1) // Interleaved VUVU -> NV21
+ calculedPixelFormat = QVideoFrameFormat::Format_NV21;
+ }
break;
case AndroidImageFormat::HEIC:
// QImage cannot parse HEIC
@@ -130,19 +135,44 @@ bool QAndroidCameraFrame::parse(const QJniObject &frame)
m_planes[mapIndex].data = buffer[arrayIndex];
};
+ int width = frame.callMethod<jint>("getWidth");
+ int height = frame.callMethod<jint>("getHeight");
+ m_size = QSize(width, height);
+
switch (calculedPixelFormat) {
case QVideoFrameFormat::Format_YUV420P:
m_numberPlanes = 3;
copyPlane(0, 0);
copyPlane(1, 1);
copyPlane(2, 2);
+
+ if (isWorkaroundForEmulatorNeeded()) {
+ for (int i = 0; i < 3; ++i) {
+ const int dataSize = (i == 0) ? width * height : width * height / 4;
+ m_planes[i].data = new uint8_t[dataSize];
+ memcpy(m_planes[i].data, buffer[i], dataSize);
+ }
+ }
+
m_pixelFormat = QVideoFrameFormat::Format_YUV420P;
break;
case QVideoFrameFormat::Format_NV12:
+ case QVideoFrameFormat::Format_NV21:
+ // Y-plane and combined interleaved UV-plane
m_numberPlanes = 2;
copyPlane(0, 0);
- copyPlane(1, 1);
- m_pixelFormat = QVideoFrameFormat::Format_NV12;
+
+ // Android reports U and V planes as planes[1] and planes[2] respectively, regardless of the
+ // order of interleaved samples. We point to whichever is first in memory.
+ copyPlane(1, calculedPixelFormat == QVideoFrameFormat::Format_NV21 ? 2 : 1);
+
+ // With interleaved UV plane, Android reports the size of each plane as the smallest size
+ // that includes all samples of that plane. For example, if the UV plane is [u, v, u, v],
+ // the size of the U-plane is 3, not 4. With FFmpeg we need to count the total number of
+ // bytes in the UV-plane, which is 1 more than what Android reports.
+ m_planes[1].size++;
+
+ m_pixelFormat = calculedPixelFormat;
break;
case QVideoFrameFormat::Format_Jpeg:
qCWarning(qLCAndroidCameraFrame)
@@ -161,10 +191,6 @@ bool QAndroidCameraFrame::parse(const QJniObject &frame)
long timestamp = frame.callMethod<jlong>("getTimestamp");
m_timestamp = timestamp / 1000;
- int width = frame.callMethod<jint>("getWidth");
- int height = frame.callMethod<jint>("getHeight");
- m_size = QSize(width, height);
-
return true;
}
@@ -193,6 +219,13 @@ QAndroidCameraFrame::~QAndroidCameraFrame()
QJniEnvironment jniEnv;
if (m_frame)
jniEnv->DeleteGlobalRef(m_frame);
+
+ if (isWorkaroundForEmulatorNeeded()) {
+ if (m_pixelFormat == QVideoFrameFormat::Format_YUV420P) {
+ for (int i = 0; i < 3; ++i)
+ delete[] m_planes[i].data;
+ }
+ }
}
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qandroidcameraframe_p.h b/src/plugins/multimedia/ffmpeg/qandroidcameraframe_p.h
index 23a737f7d..f55c066ae 100644
--- a/src/plugins/multimedia/ffmpeg/qandroidcameraframe_p.h
+++ b/src/plugins/multimedia/ffmpeg/qandroidcameraframe_p.h
@@ -17,6 +17,25 @@
#include <QVideoFrameFormat>
#include <QJniObject>
+#include <QtCore/qjnitypes.h>
+
+Q_DECLARE_JNI_CLASS(QtCamera2, "org/qtproject/qt/android/multimedia/QtCamera2");
+Q_DECLARE_JNI_CLASS(QtVideoDeviceManager,
+ "org/qtproject/qt/android/multimedia/QtVideoDeviceManager");
+
+Q_DECLARE_JNI_CLASS(AndroidImage, "android/media/Image");
+Q_DECLARE_JNI_CLASS(AndroidImageFormat, "android/graphics/ImageFormat");
+Q_DECLARE_JNI_CLASS(AndroidImagePlane, "android/media/Image$Plane");
+Q_DECLARE_JNI_CLASS(JavaByteBuffer, "java/nio/ByteBuffer");
+
+#ifndef QT_DECLARE_JNI_CLASS_STANDARD_TYPES
+Q_DECLARE_JNI_CLASS(String, "java/lang/String");
+#endif
+
+namespace QtJniTypes {
+using AndroidImagePlaneArray = QJniArray<AndroidImagePlane>;
+using StringArray = QJniArray<String>;
+}
class QAndroidCameraFrame
{
@@ -36,7 +55,7 @@ public:
int numberPlanes() const { return m_numberPlanes; }
Plane plane(int index) const
{
- if (index < 0 || index > numberPlanes())
+ if (index < 0 || index >= numberPlanes())
return {};
return m_planes[index];
diff --git a/src/plugins/multimedia/ffmpeg/qandroidvideodevices.cpp b/src/plugins/multimedia/ffmpeg/qandroidvideodevices.cpp
index fd4221d55..0116171d0 100644
--- a/src/plugins/multimedia/ffmpeg/qandroidvideodevices.cpp
+++ b/src/plugins/multimedia/ffmpeg/qandroidvideodevices.cpp
@@ -2,6 +2,7 @@
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include "qandroidvideodevices_p.h"
+#include "qandroidcameraframe_p.h"
#include <private/qcameradevice_p.h>
@@ -9,16 +10,11 @@
#include <QtCore/qcoreapplication.h>
#include <QtCore/private/qandroidextras_p.h>
#include <QtCore/qcoreapplication_platform.h>
-#include <QJniEnvironment>
+#include <QtCore/qjnienvironment.h>
#include <jni.h>
static Q_LOGGING_CATEGORY(qLCAndroidVideoDevices, "qt.multimedia.ffmpeg.android.videoDevices")
-Q_DECLARE_JNI_CLASS(QtVideoDeviceManager,
- "org/qtproject/qt/android/multimedia/QtVideoDeviceManager");
-Q_DECLARE_JNI_TYPE(StringArray, "[Ljava/lang/String;")
-Q_DECLARE_JNI_CLASS(AndroidImageFormat, "android/graphics/ImageFormat");
-
QCameraFormat createCameraFormat(int width, int height, int fpsMin, int fpsMax)
{
QCameraFormatPrivate *format = new QCameraFormatPrivate();
diff --git a/src/plugins/multimedia/ffmpeg/qavfcamera.mm b/src/plugins/multimedia/ffmpeg/qavfcamera.mm
index 38b743e65..891c4b376 100644
--- a/src/plugins/multimedia/ffmpeg/qavfcamera.mm
+++ b/src/plugins/multimedia/ffmpeg/qavfcamera.mm
@@ -306,6 +306,9 @@ uint32_t QAVFCamera::setPixelFormat(QVideoFrameFormat::PixelFormat cameraPixelFo
QSize QAVFCamera::adjustedResolution() const
{
+#ifdef Q_OS_MACOS
+ return m_cameraFormat.resolution();
+#else
// Check, that we have matching dimesnions.
QSize resolution = m_cameraFormat.resolution();
AVCaptureConnection *connection = [m_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
@@ -321,6 +324,7 @@ QSize QAVFCamera::adjustedResolution() const
resolution.transpose();
return resolution;
+#endif // Q_OS_MACOS
}
void QAVFCamera::syncHandleFrame(const QVideoFrame &frame)
diff --git a/src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate.mm b/src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate.mm
index 9acca8566..ecdce8266 100644
--- a/src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate.mm
+++ b/src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate.mm
@@ -8,6 +8,7 @@
#include "qffmpeghwaccel_p.h"
#include "qavfhelpers_p.h"
#include "qffmpegvideobuffer_p.h"
+#include "private/qvideoframe_p.h"
#undef AVMediaType
@@ -25,64 +26,63 @@ namespace {
class CVImageVideoBuffer : public QAbstractVideoBuffer
{
public:
- CVImageVideoBuffer(CVImageBufferRef imageBuffer)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle), m_buffer(imageBuffer)
+ CVImageVideoBuffer(CVImageBufferRef imageBuffer) : m_buffer(imageBuffer)
{
CVPixelBufferRetain(imageBuffer);
}
~CVImageVideoBuffer()
{
- CVImageVideoBuffer::unmap();
+ Q_ASSERT(m_mode == QtVideo::MapMode::NotMapped);
CVPixelBufferRelease(m_buffer);
}
- CVImageVideoBuffer::MapData map(QVideoFrame::MapMode mode) override
+ CVImageVideoBuffer::MapData map(QtVideo::MapMode mode) override
{
MapData mapData;
- if (m_mode == QVideoFrame::NotMapped) {
+ if (m_mode == QtVideo::MapMode::NotMapped) {
CVPixelBufferLockBaseAddress(
- m_buffer, mode == QVideoFrame::ReadOnly ? kCVPixelBufferLock_ReadOnly : 0);
+ m_buffer, mode == QtVideo::MapMode::ReadOnly ? kCVPixelBufferLock_ReadOnly : 0);
m_mode = mode;
}
- mapData.nPlanes = CVPixelBufferGetPlaneCount(m_buffer);
- Q_ASSERT(mapData.nPlanes <= 3);
+ mapData.planeCount = CVPixelBufferGetPlaneCount(m_buffer);
+ Q_ASSERT(mapData.planeCount <= 3);
- if (!mapData.nPlanes) {
+ if (!mapData.planeCount) {
// single plane
mapData.bytesPerLine[0] = CVPixelBufferGetBytesPerRow(m_buffer);
mapData.data[0] = static_cast<uchar *>(CVPixelBufferGetBaseAddress(m_buffer));
- mapData.size[0] = CVPixelBufferGetDataSize(m_buffer);
- mapData.nPlanes = mapData.data[0] ? 1 : 0;
+ mapData.dataSize[0] = CVPixelBufferGetDataSize(m_buffer);
+ mapData.planeCount = mapData.data[0] ? 1 : 0;
return mapData;
}
// For a bi-planar or tri-planar format we have to set the parameters correctly:
- for (int i = 0; i < mapData.nPlanes; ++i) {
+ for (int i = 0; i < mapData.planeCount; ++i) {
mapData.bytesPerLine[i] = CVPixelBufferGetBytesPerRowOfPlane(m_buffer, i);
- mapData.size[i] = mapData.bytesPerLine[i] * CVPixelBufferGetHeightOfPlane(m_buffer, i);
+ mapData.dataSize[i] = mapData.bytesPerLine[i] * CVPixelBufferGetHeightOfPlane(m_buffer, i);
mapData.data[i] = static_cast<uchar *>(CVPixelBufferGetBaseAddressOfPlane(m_buffer, i));
}
return mapData;
}
- QVideoFrame::MapMode mapMode() const override { return m_mode; }
-
void unmap() override
{
- if (m_mode != QVideoFrame::NotMapped) {
+ if (m_mode != QtVideo::MapMode::NotMapped) {
CVPixelBufferUnlockBaseAddress(
- m_buffer, m_mode == QVideoFrame::ReadOnly ? kCVPixelBufferLock_ReadOnly : 0);
- m_mode = QVideoFrame::NotMapped;
+ m_buffer, m_mode == QtVideo::MapMode::ReadOnly ? kCVPixelBufferLock_ReadOnly : 0);
+ m_mode = QtVideo::MapMode::NotMapped;
}
}
+ QVideoFrameFormat format() const override { return {}; }
+
private:
CVImageBufferRef m_buffer;
- QVideoFrame::MapMode m_mode = QVideoFrame::NotMapped;
+ QtVideo::MapMode m_mode = QtVideo::MapMode::NotMapped;
};
}
@@ -147,7 +147,8 @@ static QVideoFrame createHwVideoFrame(QAVFSampleBufferDelegate &delegate,
avFrame->pts = delegate.startTime - *delegate.baseTime;
- return QVideoFrame(new QFFmpegVideoBuffer(std::move(avFrame)), format);
+ return QVideoFramePrivate::createFrame(std::make_unique<QFFmpegVideoBuffer>(std::move(avFrame)),
+ format);
}
- (instancetype)initWithFrameHandler:(std::function<void(const QVideoFrame &)>)handler
@@ -196,11 +197,12 @@ static QVideoFrame createHwVideoFrame(QAVFSampleBufferDelegate &delegate,
return;
}
- format.setFrameRate(frameRate);
+ format.setStreamFrameRate(frameRate);
auto frame = createHwVideoFrame(*self, imageBuffer, format);
if (!frame.isValid())
- frame = QVideoFrame(new CVImageVideoBuffer(imageBuffer), format);
+ frame = QVideoFramePrivate::createFrame(std::make_unique<CVImageVideoBuffer>(imageBuffer),
+ std::move(format));
frame.setStartTime(startTime - *baseTime);
frame.setEndTime(frameTime - *baseTime);
diff --git a/src/plugins/multimedia/ffmpeg/qcgwindowcapture.mm b/src/plugins/multimedia/ffmpeg/qcgwindowcapture.mm
index 93b079bac..a671fcdd6 100644
--- a/src/plugins/multimedia/ffmpeg/qcgwindowcapture.mm
+++ b/src/plugins/multimedia/ffmpeg/qcgwindowcapture.mm
@@ -1,10 +1,12 @@
// Copyright (C) 2022 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qabstractvideobuffer.h"
+
#include "qcgwindowcapture_p.h"
#include "private/qcapturablewindow_p.h"
#include "qffmpegsurfacecapturegrabber_p.h"
-#include "private/qabstractvideobuffer_p.h"
+#include "private/qvideoframe_p.h"
#include "qscreen.h"
#include "qguiapplication.h"
@@ -39,7 +41,7 @@ QT_BEGIN_NAMESPACE
class QCGImageVideoBuffer : public QAbstractVideoBuffer
{
public:
- QCGImageVideoBuffer(CGImageRef image) : QAbstractVideoBuffer(QVideoFrame::NoHandle)
+ QCGImageVideoBuffer(CGImageRef image)
{
auto provider = CGImageGetDataProvider(image);
m_data = CGDataProviderCopyData(provider);
@@ -48,27 +50,21 @@ public:
~QCGImageVideoBuffer() override { CFRelease(m_data); }
- QVideoFrame::MapMode mapMode() const override { return m_mapMode; }
-
- MapData map(QVideoFrame::MapMode mode) override
+ MapData map(QtVideo::MapMode /*mode*/) override
{
MapData mapData;
- if (m_mapMode == QVideoFrame::NotMapped) {
- m_mapMode = mode;
- mapData.nPlanes = 1;
- mapData.bytesPerLine[0] = static_cast<int>(m_bytesPerLine);
- mapData.data[0] = (uchar *)CFDataGetBytePtr(m_data);
- mapData.size[0] = static_cast<int>(CFDataGetLength(m_data));
- }
+ mapData.planeCount = 1;
+ mapData.bytesPerLine[0] = static_cast<int>(m_bytesPerLine);
+ mapData.data[0] = (uchar *)CFDataGetBytePtr(m_data);
+ mapData.dataSize[0] = static_cast<int>(CFDataGetLength(m_data));
return mapData;
}
- void unmap() override { m_mapMode = QVideoFrame::NotMapped; }
+ QVideoFrameFormat format() const override { return {}; }
private:
- QVideoFrame::MapMode m_mapMode = QVideoFrame::NotMapped;
CFDataRef m_data;
size_t m_bytesPerLine = 0;
};
@@ -129,9 +125,10 @@ protected:
QVideoFrameFormat format(QSize(CGImageGetWidth(imageRef), CGImageGetHeight(imageRef)),
QVideoFrameFormat::Format_BGRA8888);
- format.setFrameRate(frameRate());
+ format.setStreamFrameRate(frameRate());
- return QVideoFrame(new QCGImageVideoBuffer(imageRef), format);
+ return QVideoFramePrivate::createFrame(std::make_unique<QCGImageVideoBuffer>(imageRef),
+ std::move(format));
}
void onNewFrame(QVideoFrame frame)
diff --git a/src/plugins/multimedia/ffmpeg/qeglfsscreencapture.cpp b/src/plugins/multimedia/ffmpeg/qeglfsscreencapture.cpp
index 09fd633bc..871cafd4f 100644
--- a/src/plugins/multimedia/ffmpeg/qeglfsscreencapture.cpp
+++ b/src/plugins/multimedia/ffmpeg/qeglfsscreencapture.cpp
@@ -7,6 +7,7 @@
#include "qguiapplication.h"
#include "qopenglvideobuffer_p.h"
#include "private/qimagevideobuffer_p.h"
+#include "private/qvideoframe_p.h"
#include <QtOpenGL/private/qopenglcompositor_p.h>
#include <QtOpenGL/private/qopenglframebufferobject_p.h>
@@ -54,10 +55,10 @@ protected:
if (!m_format.isValid()) {
auto image = videoBuffer->ensureImageBuffer().underlyingImage();
m_format = { image.size(), QVideoFrameFormat::pixelFormatFromImageFormat(image.format()) };
- m_format.setFrameRate(frameRate());
+ m_format.setStreamFrameRate(frameRate());
}
- return QVideoFrame(videoBuffer.release(), m_format);
+ return QVideoFramePrivate::createFrame(std::move(videoBuffer), m_format);
}
QVideoFrameFormat m_format;
@@ -90,10 +91,11 @@ protected:
if (!m_format.isValid()) {
m_format = { image.size(),
QVideoFrameFormat::pixelFormatFromImageFormat(image.format()) };
- m_format.setFrameRate(frameRate());
+ m_format.setStreamFrameRate(frameRate());
}
- return QVideoFrame(new QImageVideoBuffer(std::move(image)), m_format);
+ return QVideoFramePrivate::createFrame(
+ std::make_unique<QImageVideoBuffer>(std::move(image)), m_format);
}
private:
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeg.cpp b/src/plugins/multimedia/ffmpeg/qffmpeg.cpp
index f769ac4d4..8969c6132 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpeg.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpeg.cpp
@@ -33,10 +33,13 @@ QT_BEGIN_NAMESPACE
#ifdef Q_OS_ANDROID
Q_DECLARE_JNI_CLASS(QtVideoDeviceManager,
"org/qtproject/qt/android/multimedia/QtVideoDeviceManager");
+
+# ifndef QT_DECLARE_JNI_CLASS_STANDARD_TYPES
Q_DECLARE_JNI_CLASS(String, "java/lang/String");
-#endif
+# endif
+#endif // Q_OS_ANDROID
-static Q_LOGGING_CATEGORY(qLcFFmpegUtils, "qt.multimedia.ffmpeg.utils");
+Q_STATIC_LOGGING_CATEGORY(qLcFFmpegUtils, "qt.multimedia.ffmpeg.utils");
namespace QFFmpeg {
@@ -190,9 +193,7 @@ bool isCodecValid(const AVCodec *codec, const std::vector<AVHWDeviceType> &avail
if (codec->type != AVMEDIA_TYPE_VIDEO)
return true;
- const auto pixFmts = codec->pix_fmts;
-
- if (!pixFmts) {
+ if (!codec->pix_fmts) {
#if defined(Q_OS_LINUX) || defined(Q_OS_ANDROID)
// Disable V4L2 M2M codecs for encoding for now,
// TODO: Investigate on how to get them working
@@ -211,14 +212,14 @@ bool isCodecValid(const AVCodec *codec, const std::vector<AVHWDeviceType> &avail
// and with v4l2m2m codecs, that is suspicious.
}
- if (findAVFormat(pixFmts, &isHwPixelFormat) == AV_PIX_FMT_NONE)
+ if (findAVPixelFormat(codec, &isHwPixelFormat) == AV_PIX_FMT_NONE)
return true;
if ((codec->capabilities & AV_CODEC_CAP_HARDWARE) == 0)
return true;
- auto checkDeviceType = [pixFmts](AVHWDeviceType type) {
- return hasAVFormat(pixFmts, pixelFormatForHwDevice(type));
+ auto checkDeviceType = [codec](AVHWDeviceType type) {
+ return isAVFormatSupported(codec, pixelFormatForHwDevice(type));
};
if (codecAvailableOnDevice && codecAvailableOnDevice->count(codec->id) == 0)
@@ -232,6 +233,7 @@ std::optional<std::unordered_set<AVCodecID>> availableHWCodecs(const CodecStorag
{
#ifdef Q_OS_ANDROID
using namespace Qt::StringLiterals;
+ using namespace QtJniTypes;
std::unordered_set<AVCodecID> availabeCodecs;
auto getCodecId = [] (const QString& codecName) {
@@ -244,14 +246,11 @@ std::optional<std::unordered_set<AVCodecID>> availableHWCodecs(const CodecStorag
return AV_CODEC_ID_NONE;
};
- const QJniObject jniCodecs =
- QtJniTypes::QtVideoDeviceManager::callStaticMethod<QtJniTypes::String[]>(
+ const QJniArray jniCodecs = QtVideoDeviceManager::callStaticMethod<String[]>(
type == ENCODERS ? "getHWVideoEncoders" : "getHWVideoDecoders");
- QJniArray<QtJniTypes::String> arrCodecs(jniCodecs.object<jobjectArray>());
- for (int i = 0; i < arrCodecs.size(); ++i) {
- availabeCodecs.insert(getCodecId(arrCodecs.at(i).toString()));
- }
+ for (const auto &codec : jniCodecs)
+ availabeCodecs.insert(getCodecId(codec.toString()));
return availabeCodecs;
#else
Q_UNUSED(type);
@@ -338,6 +337,9 @@ const char *preferredHwCodecNameSuffix(bool isEncoder, AVHWDeviceType deviceType
return "_videotoolbox";
case AV_HWDEVICE_TYPE_D3D11VA:
case AV_HWDEVICE_TYPE_DXVA2:
+#if QT_FFMPEG_HAS_D3D12VA
+ case AV_HWDEVICE_TYPE_D3D12VA:
+#endif
return "_mf";
case AV_HWDEVICE_TYPE_CUDA:
case AV_HWDEVICE_TYPE_VDPAU:
@@ -386,6 +388,8 @@ const AVCodec *findAVCodec(CodecStorageType codecsType, AVCodecID codecId,
const std::optional<AVHWDeviceType> &deviceType,
const std::optional<PixelOrSampleFormat> &format)
{
+ // TODO: remove deviceType and use only isAVFormatSupported to check the format
+
return findAVCodec(codecsType, codecId, [&](const AVCodec *codec) {
if (format && !isAVFormatSupported(codec, *format))
return NotSuitableAVScore;
@@ -411,6 +415,7 @@ const AVCodec *findAVCodec(CodecStorageType codecsType, AVCodecID codecId,
// The situation happens mostly with encoders
// Probably, it's ffmpeg bug: avcodec_get_hw_config returns null even though
// hw acceleration is supported
+ // To be removed: only isAVFormatSupported should be used.
if (hasAVFormat(codec->pix_fmts, pixelFormatForHwDevice(*deviceType)))
return hwCodecNameScores(codec, *deviceType);
}
@@ -441,8 +446,10 @@ const AVCodec *findAVEncoder(AVCodecID codecId,
bool isAVFormatSupported(const AVCodec *codec, PixelOrSampleFormat format)
{
- if (codec->type == AVMEDIA_TYPE_VIDEO)
- return hasAVFormat(codec->pix_fmts, AVPixelFormat(format));
+ if (codec->type == AVMEDIA_TYPE_VIDEO) {
+ auto checkFormat = [format](AVPixelFormat f) { return f == format; };
+ return findAVPixelFormat(codec, checkFormat) != AV_PIX_FMT_NONE;
+ }
if (codec->type == AVMEDIA_TYPE_AUDIO)
return hasAVFormat(codec->sample_fmts, AVSampleFormat(format));
@@ -489,6 +496,10 @@ AVPixelFormat pixelFormatForHwDevice(AVHWDeviceType deviceType)
return AV_PIX_FMT_QSV;
case AV_HWDEVICE_TYPE_D3D11VA:
return AV_PIX_FMT_D3D11;
+#if QT_FFMPEG_HAS_D3D12VA
+ case AV_HWDEVICE_TYPE_D3D12VA:
+ return AV_PIX_FMT_D3D12;
+#endif
case AV_HWDEVICE_TYPE_DXVA2:
return AV_PIX_FMT_DXVA2_VLD;
case AV_HWDEVICE_TYPE_DRM:
@@ -502,6 +513,32 @@ AVPixelFormat pixelFormatForHwDevice(AVHWDeviceType deviceType)
}
}
+AVPacketSideData *addStreamSideData(AVStream *stream, AVPacketSideData sideData)
+{
+ QScopeGuard freeData([&sideData]() { av_free(sideData.data); });
+#if QT_FFMPEG_STREAM_SIDE_DATA_DEPRECATED
+ AVPacketSideData *result = av_packet_side_data_add(
+ &stream->codecpar->coded_side_data,
+ &stream->codecpar->nb_coded_side_data,
+ sideData.type,
+ sideData.data,
+ sideData.size,
+ 0);
+ if (result) {
+ // If the result is not null, the ownership is taken by AVStream,
+ // otherwise the data must be deleted.
+ freeData.dismiss();
+ return result;
+ }
+#else
+ Q_UNUSED(stream);
+ // TODO: implement for older FFmpeg versions
+ qWarning() << "Adding stream side data is not supported for FFmpeg < 6.1";
+#endif
+
+ return nullptr;
+}
+
const AVPacketSideData *streamSideData(const AVStream *stream, AVPacketSideDataType type)
{
Q_ASSERT(stream);
@@ -554,6 +591,138 @@ SwrContextUPtr createResampleContext(const AVAudioFormat &inputFormat,
return SwrContextUPtr(resampler);
}
+QVideoFrameFormat::ColorTransfer fromAvColorTransfer(AVColorTransferCharacteristic colorTrc) {
+ switch (colorTrc) {
+ case AVCOL_TRC_BT709:
+ // The following three cases have transfer characteristics identical to BT709
+ case AVCOL_TRC_BT1361_ECG:
+ case AVCOL_TRC_BT2020_10:
+ case AVCOL_TRC_BT2020_12:
+ case AVCOL_TRC_SMPTE240M: // almost identical to bt709
+ return QVideoFrameFormat::ColorTransfer_BT709;
+ case AVCOL_TRC_GAMMA22:
+ case AVCOL_TRC_SMPTE428: // No idea, let's hope for the best...
+ case AVCOL_TRC_IEC61966_2_1: // sRGB, close enough to 2.2...
+ case AVCOL_TRC_IEC61966_2_4: // not quite, but probably close enough
+ return QVideoFrameFormat::ColorTransfer_Gamma22;
+ case AVCOL_TRC_GAMMA28:
+ return QVideoFrameFormat::ColorTransfer_Gamma28;
+ case AVCOL_TRC_SMPTE170M:
+ return QVideoFrameFormat::ColorTransfer_BT601;
+ case AVCOL_TRC_LINEAR:
+ return QVideoFrameFormat::ColorTransfer_Linear;
+ case AVCOL_TRC_SMPTE2084:
+ return QVideoFrameFormat::ColorTransfer_ST2084;
+ case AVCOL_TRC_ARIB_STD_B67:
+ return QVideoFrameFormat::ColorTransfer_STD_B67;
+ default:
+ break;
+ }
+ return QVideoFrameFormat::ColorTransfer_Unknown;
+}
+
+AVColorTransferCharacteristic toAvColorTransfer(QVideoFrameFormat::ColorTransfer colorTrc)
+{
+ switch (colorTrc) {
+ case QVideoFrameFormat::ColorTransfer_BT709:
+ return AVCOL_TRC_BT709;
+ case QVideoFrameFormat::ColorTransfer_BT601:
+ return AVCOL_TRC_BT709; // which one is the best?
+ case QVideoFrameFormat::ColorTransfer_Linear:
+ return AVCOL_TRC_SMPTE2084;
+ case QVideoFrameFormat::ColorTransfer_Gamma22:
+ return AVCOL_TRC_GAMMA22;
+ case QVideoFrameFormat::ColorTransfer_Gamma28:
+ return AVCOL_TRC_GAMMA28;
+ case QVideoFrameFormat::ColorTransfer_ST2084:
+ return AVCOL_TRC_SMPTE2084;
+ case QVideoFrameFormat::ColorTransfer_STD_B67:
+ return AVCOL_TRC_ARIB_STD_B67;
+ default:
+ return AVCOL_TRC_UNSPECIFIED;
+ }
+}
+
+QVideoFrameFormat::ColorSpace fromAvColorSpace(AVColorSpace colorSpace)
+{
+ switch (colorSpace) {
+ default:
+ case AVCOL_SPC_UNSPECIFIED:
+ case AVCOL_SPC_RESERVED:
+ case AVCOL_SPC_FCC:
+ case AVCOL_SPC_SMPTE240M:
+ case AVCOL_SPC_YCGCO:
+ case AVCOL_SPC_SMPTE2085:
+ case AVCOL_SPC_CHROMA_DERIVED_NCL:
+ case AVCOL_SPC_CHROMA_DERIVED_CL:
+ case AVCOL_SPC_ICTCP: // BT.2100 ICtCp
+ return QVideoFrameFormat::ColorSpace_Undefined;
+ case AVCOL_SPC_RGB:
+ return QVideoFrameFormat::ColorSpace_AdobeRgb;
+ case AVCOL_SPC_BT709:
+ return QVideoFrameFormat::ColorSpace_BT709;
+ case AVCOL_SPC_BT470BG: // BT601
+ case AVCOL_SPC_SMPTE170M: // Also BT601
+ return QVideoFrameFormat::ColorSpace_BT601;
+ case AVCOL_SPC_BT2020_NCL: // Non constant luminence
+ case AVCOL_SPC_BT2020_CL: // Constant luminence
+ return QVideoFrameFormat::ColorSpace_BT2020;
+ }
+}
+
+AVColorSpace toAvColorSpace(QVideoFrameFormat::ColorSpace colorSpace)
+{
+ switch (colorSpace) {
+ case QVideoFrameFormat::ColorSpace_BT601:
+ return AVCOL_SPC_BT470BG;
+ case QVideoFrameFormat::ColorSpace_BT709:
+ return AVCOL_SPC_BT709;
+ case QVideoFrameFormat::ColorSpace_AdobeRgb:
+ return AVCOL_SPC_RGB;
+ case QVideoFrameFormat::ColorSpace_BT2020:
+ return AVCOL_SPC_BT2020_CL;
+ default:
+ return AVCOL_SPC_UNSPECIFIED;
+ }
+}
+
+QVideoFrameFormat::ColorRange fromAvColorRange(AVColorRange colorRange)
+{
+ switch (colorRange) {
+ case AVCOL_RANGE_MPEG:
+ return QVideoFrameFormat::ColorRange_Video;
+ case AVCOL_RANGE_JPEG:
+ return QVideoFrameFormat::ColorRange_Full;
+ default:
+ return QVideoFrameFormat::ColorRange_Unknown;
+ }
+}
+
+AVColorRange toAvColorRange(QVideoFrameFormat::ColorRange colorRange)
+{
+ switch (colorRange) {
+ case QVideoFrameFormat::ColorRange_Video:
+ return AVCOL_RANGE_MPEG;
+ case QVideoFrameFormat::ColorRange_Full:
+ return AVCOL_RANGE_JPEG;
+ default:
+ return AVCOL_RANGE_UNSPECIFIED;
+ }
+}
+
+AVHWDeviceContext* avFrameDeviceContext(const AVFrame* frame) {
+ if (!frame)
+ return {};
+ if (!frame->hw_frames_ctx)
+ return {};
+
+ const auto *frameCtx = reinterpret_cast<AVHWFramesContext *>(frame->hw_frames_ctx->data);
+ if (!frameCtx)
+ return {};
+
+ return frameCtx->device_ctx;
+}
+
#ifdef Q_OS_DARWIN
bool isCVFormatSupported(uint32_t cvFormat)
{
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeg_p.h b/src/plugins/multimedia/ffmpeg/qffmpeg_p.h
index 601b44ccb..a0b87ff5d 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpeg_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpeg_p.h
@@ -16,6 +16,7 @@
#include "qffmpegdefs_p.h"
#include "qffmpegavaudioformat_p.h"
+#include <QtMultimedia/qvideoframeformat.h>
#include <qstring.h>
#include <optional>
@@ -160,6 +161,15 @@ constexpr AVScore DefaultAVScore = 0;
constexpr AVScore NotSuitableAVScore = std::numeric_limits<AVScore>::min();
constexpr AVScore MinAVScore = NotSuitableAVScore + 1;
+template <typename T>
+inline constexpr auto InvalidAvValue = T{};
+
+template<>
+inline constexpr auto InvalidAvValue<AVSampleFormat> = AV_SAMPLE_FMT_NONE;
+
+template<>
+inline constexpr auto InvalidAvValue<AVPixelFormat> = AV_PIX_FMT_NONE;
+
const AVCodec *findAVDecoder(AVCodecID codecId,
const std::optional<AVHWDeviceType> &deviceType = {},
const std::optional<PixelOrSampleFormat> &format = {});
@@ -176,7 +186,7 @@ bool isAVFormatSupported(const AVCodec *codec, PixelOrSampleFormat format);
template<typename Format>
bool hasAVFormat(const Format *fmts, Format format)
{
- return findAVFormat(fmts, [format](Format f) { return f == format; }) != Format(-1);
+ return findAVFormat(fmts, [format](Format f) { return f == format; }) != InvalidAvValue<Format>;
}
template<typename Format, typename Predicate>
@@ -185,16 +195,46 @@ Format findAVFormat(const Format *fmts, const Predicate &predicate)
auto scoresGetter = [&predicate](Format fmt) {
return predicate(fmt) ? BestAVScore : NotSuitableAVScore;
};
- return findBestAVFormat(fmts, scoresGetter).first;
+ return findBestAVValue(fmts, scoresGetter).first;
+}
+
+template <typename Predicate>
+const AVCodecHWConfig *findHwConfig(const AVCodec *codec, const Predicate &predicate)
+{
+ for (int i = 0; const auto hwConfig = avcodec_get_hw_config(codec, i); ++i) {
+ if (predicate(hwConfig))
+ return hwConfig;
+ }
+
+ return nullptr;
+}
+
+template <typename Predicate>
+AVPixelFormat findAVPixelFormat(const AVCodec *codec, const Predicate &predicate)
+{
+ const AVPixelFormat format = findAVFormat(codec->pix_fmts, predicate);
+ if (format != AV_PIX_FMT_NONE)
+ return format;
+
+ auto checkHwConfig = [&predicate](const AVCodecHWConfig *config) {
+ return config->pix_fmt != AV_PIX_FMT_NONE && predicate(config->pix_fmt);
+ };
+
+ if (auto hwConfig = findHwConfig(codec, checkHwConfig))
+ return hwConfig->pix_fmt;
+
+ return AV_PIX_FMT_NONE;
}
template <typename Value, typename CalculateScore>
-auto findBestAVValue(const Value *values, const CalculateScore &calculateScore,
- Value invalidValue = {})
+auto findBestAVValue(const Value *values, const CalculateScore &calculateScore)
{
using Limits = std::numeric_limits<decltype(calculateScore(*values))>;
+
+ const Value invalidValue = InvalidAvValue<Value>;
std::pair result(invalidValue, Limits::min());
if (values) {
+
for (; *values != invalidValue && result.second != Limits::max(); ++values) {
const auto score = calculateScore(*values);
if (score > result.second)
@@ -205,15 +245,6 @@ auto findBestAVValue(const Value *values, const CalculateScore &calculateScore,
return result;
}
-template <typename Format, typename CalculateScore>
-std::pair<Format, AVScore> findBestAVFormat(const Format *fmts,
- const CalculateScore &calculateScore)
-{
- static_assert(std::is_same_v<Format, AVSampleFormat> || std::is_same_v<Format, AVPixelFormat>,
- "The input value is not AV format, use findBestAVValue instead.");
- return findBestAVValue(fmts, calculateScore, Format(-1));
-}
-
bool isHwPixelFormat(AVPixelFormat format);
inline bool isSwPixelFormat(AVPixelFormat format)
@@ -227,11 +258,27 @@ void applyExperimentalCodecOptions(const AVCodec *codec, AVDictionary** opts);
AVPixelFormat pixelFormatForHwDevice(AVHWDeviceType deviceType);
+AVPacketSideData *addStreamSideData(AVStream *stream, AVPacketSideData sideData);
+
const AVPacketSideData *streamSideData(const AVStream *stream, AVPacketSideDataType type);
SwrContextUPtr createResampleContext(const AVAudioFormat &inputFormat,
const AVAudioFormat &outputFormat);
+QVideoFrameFormat::ColorTransfer fromAvColorTransfer(AVColorTransferCharacteristic colorTrc);
+
+AVColorTransferCharacteristic toAvColorTransfer(QVideoFrameFormat::ColorTransfer colorTrc);
+
+QVideoFrameFormat::ColorSpace fromAvColorSpace(AVColorSpace colorSpace);
+
+AVColorSpace toAvColorSpace(QVideoFrameFormat::ColorSpace colorSpace);
+
+QVideoFrameFormat::ColorRange fromAvColorRange(AVColorRange colorRange);
+
+AVColorRange toAvColorRange(QVideoFrameFormat::ColorRange colorRange);
+
+AVHWDeviceContext *avFrameDeviceContext(const AVFrame *frame);
+
#ifdef Q_OS_DARWIN
bool isCVFormatSupported(uint32_t format);
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder.cpp b/src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder.cpp
index 69820cc79..20c27982c 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder.cpp
@@ -9,7 +9,7 @@
#include <qloggingcategory.h>
-static Q_LOGGING_CATEGORY(qLcAudioDecoder, "qt.multimedia.ffmpeg.audioDecoder")
+Q_STATIC_LOGGING_CATEGORY(qLcAudioDecoder, "qt.multimedia.ffmpeg.audioDecoder")
QT_BEGIN_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h b/src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h
index a232978f6..288b3f432 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h
@@ -15,6 +15,7 @@
//
#include <private/qplatformaudioinput_p.h>
+#include <private/qplatformaudiobufferinput_p.h>
#include "qffmpegthread_p.h"
#include <qaudioinput.h>
@@ -28,8 +29,9 @@ class AudioSourceIO;
constexpr int DefaultAudioInputBufferSize = 4096;
-class QFFmpegAudioInput : public QObject, public QPlatformAudioInput
+class QFFmpegAudioInput : public QPlatformAudioBufferInputBase, public QPlatformAudioInput
{
+ // for qobject_cast
Q_OBJECT
public:
QFFmpegAudioInput(QAudioInput *qq);
@@ -44,9 +46,6 @@ public:
int bufferSize() const;
-Q_SIGNALS:
- void newAudioBuffer(const QAudioBuffer &buffer);
-
private:
QFFmpeg::AudioSourceIO *audioIO = nullptr;
std::unique_ptr<QThread> inputThread;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegconverter.cpp b/src/plugins/multimedia/ffmpeg/qffmpegconverter.cpp
new file mode 100644
index 000000000..ba87ce3ed
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegconverter.cpp
@@ -0,0 +1,272 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegconverter_p.h"
+#include <QtMultimedia/qvideoframeformat.h>
+#include <QtMultimedia/qvideoframe.h>
+#include <QtCore/qloggingcategory.h>
+#include <private/qvideotexturehelper_p.h>
+
+extern "C" {
+#include <libswscale/swscale.h>
+}
+
+QT_BEGIN_NAMESPACE
+
+namespace {
+
+Q_LOGGING_CATEGORY(lc, "qt.multimedia.ffmpeg.converter");
+
+
+// Converts to FFmpeg pixel format. This function differs from
+// QFFmpegVideoBuffer::toAVPixelFormat which only covers the subset
+// of pixel formats required for encoding. Here we need to cover more
+// pixel formats to be able to generate test images for decoding/display
+AVPixelFormat toAVPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat)
+{
+ switch (pixelFormat) {
+ default:
+ case QVideoFrameFormat::Format_Invalid:
+ return AV_PIX_FMT_NONE;
+ case QVideoFrameFormat::Format_AYUV:
+ case QVideoFrameFormat::Format_AYUV_Premultiplied:
+ return AV_PIX_FMT_NONE; // TODO: Fixme (No corresponding FFmpeg format available)
+ case QVideoFrameFormat::Format_YV12:
+ case QVideoFrameFormat::Format_IMC1:
+ case QVideoFrameFormat::Format_IMC3:
+ case QVideoFrameFormat::Format_IMC2:
+ case QVideoFrameFormat::Format_IMC4:
+ return AV_PIX_FMT_YUV420P;
+ case QVideoFrameFormat::Format_Jpeg:
+ return AV_PIX_FMT_BGRA;
+ case QVideoFrameFormat::Format_ARGB8888:
+ return AV_PIX_FMT_ARGB;
+ case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
+ case QVideoFrameFormat::Format_XRGB8888:
+ return AV_PIX_FMT_0RGB;
+ case QVideoFrameFormat::Format_BGRA8888:
+ return AV_PIX_FMT_BGRA;
+ case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
+ case QVideoFrameFormat::Format_BGRX8888:
+ return AV_PIX_FMT_BGR0;
+ case QVideoFrameFormat::Format_ABGR8888:
+ return AV_PIX_FMT_ABGR;
+ case QVideoFrameFormat::Format_XBGR8888:
+ return AV_PIX_FMT_0BGR;
+ case QVideoFrameFormat::Format_RGBA8888:
+ return AV_PIX_FMT_RGBA;
+ case QVideoFrameFormat::Format_RGBX8888:
+ return AV_PIX_FMT_RGB0;
+ case QVideoFrameFormat::Format_YUV422P:
+ return AV_PIX_FMT_YUV422P;
+ case QVideoFrameFormat::Format_YUV420P:
+ return AV_PIX_FMT_YUV420P;
+ case QVideoFrameFormat::Format_YUV420P10:
+ return AV_PIX_FMT_YUV420P10;
+ case QVideoFrameFormat::Format_UYVY:
+ return AV_PIX_FMT_UYVY422;
+ case QVideoFrameFormat::Format_YUYV:
+ return AV_PIX_FMT_YUYV422;
+ case QVideoFrameFormat::Format_NV12:
+ return AV_PIX_FMT_NV12;
+ case QVideoFrameFormat::Format_NV21:
+ return AV_PIX_FMT_NV21;
+ case QVideoFrameFormat::Format_Y8:
+ return AV_PIX_FMT_GRAY8;
+ case QVideoFrameFormat::Format_Y16:
+ return AV_PIX_FMT_GRAY16;
+ case QVideoFrameFormat::Format_P010:
+ return AV_PIX_FMT_P010;
+ case QVideoFrameFormat::Format_P016:
+ return AV_PIX_FMT_P016;
+ case QVideoFrameFormat::Format_SamplerExternalOES:
+ return AV_PIX_FMT_MEDIACODEC;
+ }
+}
+
+struct SwsFrameData
+{
+ static constexpr int arraySize = 4; // Array size required by sws_scale
+ std::array<uchar *, arraySize> bits;
+ std::array<int, arraySize> stride;
+};
+
+SwsFrameData getSwsData(QVideoFrame &dst)
+{
+ switch (dst.pixelFormat()) {
+ case QVideoFrameFormat::Format_YV12:
+ case QVideoFrameFormat::Format_IMC1:
+ return { { dst.bits(0), dst.bits(2), dst.bits(1), nullptr },
+ { dst.bytesPerLine(0), dst.bytesPerLine(2), dst.bytesPerLine(1), 0 } };
+
+ case QVideoFrameFormat::Format_IMC2:
+ return { { dst.bits(0), dst.bits(1) + dst.bytesPerLine(1) / 2, dst.bits(1), nullptr },
+ { dst.bytesPerLine(0), dst.bytesPerLine(1), dst.bytesPerLine(1), 0 } };
+
+ case QVideoFrameFormat::Format_IMC4:
+ return { { dst.bits(0), dst.bits(1), dst.bits(1) + dst.bytesPerLine(1) / 2, nullptr },
+ { dst.bytesPerLine(0), dst.bytesPerLine(1), dst.bytesPerLine(1), 0 } };
+ default:
+ return { { dst.bits(0), dst.bits(1), dst.bits(2), nullptr },
+ { dst.bytesPerLine(0), dst.bytesPerLine(1), dst.bytesPerLine(2), 0 } };
+ }
+}
+
+struct SwsColorSpace
+{
+ int colorSpace;
+ int colorRange; // 0 - mpeg/video, 1 - jpeg/full
+};
+
+// Qt heuristics for determining color space requires checking
+// both frame color space and range. This function mimics logic
+// used elsewhere in Qt Multimedia.
+SwsColorSpace toSwsColorSpace(QVideoFrameFormat::ColorRange colorRange,
+ QVideoFrameFormat::ColorSpace colorSpace)
+{
+ const int avRange = colorRange == QVideoFrameFormat::ColorRange_Video ? 0 : 1;
+
+ switch (colorSpace) {
+ case QVideoFrameFormat::ColorSpace_BT601:
+ if (colorRange == QVideoFrameFormat::ColorRange_Full)
+ return { SWS_CS_ITU709, 1 }; // TODO: FIXME - Not exact match
+ return { SWS_CS_ITU601, 0 };
+ case QVideoFrameFormat::ColorSpace_BT709:
+ return { SWS_CS_ITU709, avRange };
+ case QVideoFrameFormat::ColorSpace_AdobeRgb:
+ return { SWS_CS_ITU601, 1 }; // TODO: Why do ITU601 and Adobe RGB match well?
+ case QVideoFrameFormat::ColorSpace_BT2020:
+ return { SWS_CS_BT2020, avRange };
+ case QVideoFrameFormat::ColorSpace_Undefined:
+ default:
+ return { SWS_CS_DEFAULT, avRange };
+ }
+}
+
+using SwsContextUPtr = std::unique_ptr<SwsContext, decltype(&sws_freeContext)>;
+using PixelFormat = QVideoFrameFormat::PixelFormat;
+
+// clang-format off
+
+SwsContextUPtr createConverter(const QSize &srcSize, PixelFormat srcPixFmt,
+ const QSize &dstSize, PixelFormat dstPixFmt)
+{
+ SwsContext* context = sws_getContext(
+ srcSize.width(), srcSize.height(), toAVPixelFormat(srcPixFmt),
+ dstSize.width(), dstSize.height(), toAVPixelFormat(dstPixFmt),
+ SWS_BILINEAR, nullptr, nullptr, nullptr);
+
+ return { context, &sws_freeContext };
+}
+
+bool setColorSpaceDetails(SwsContext *context,
+ const QVideoFrameFormat &srcFormat,
+ const QVideoFrameFormat &dstFormat)
+{
+ const SwsColorSpace src = toSwsColorSpace(srcFormat.colorRange(), srcFormat.colorSpace());
+ const SwsColorSpace dst = toSwsColorSpace(dstFormat.colorRange(), dstFormat.colorSpace());
+
+ constexpr int brightness = 0;
+ constexpr int contrast = 0;
+ constexpr int saturation = 0;
+ const int status = sws_setColorspaceDetails(context,
+ sws_getCoefficients(src.colorSpace), src.colorRange,
+ sws_getCoefficients(dst.colorSpace), dst.colorRange,
+ brightness, contrast, saturation);
+
+ return status == 0;
+}
+
+bool convert(SwsContext *context, QVideoFrame &src, int srcHeight, QVideoFrame &dst)
+{
+ if (!src.map(QtVideo::MapMode::ReadOnly))
+ return false;
+
+ QScopeGuard unmapSrc{[&] {
+ src.unmap();
+ }};
+
+ if (!dst.map(QtVideo::MapMode::WriteOnly))
+ return false;
+
+ QScopeGuard unmapDst{[&] {
+ dst.unmap();
+ }};
+
+ const SwsFrameData srcData = getSwsData(src);
+ const SwsFrameData dstData = getSwsData(dst);
+
+ constexpr int firstSrcSliceRow = 0;
+ const int scaledHeight = sws_scale(context,
+ srcData.bits.data(), srcData.stride.data(),
+ firstSrcSliceRow, srcHeight,
+ dstData.bits.data(), dstData.stride.data());
+
+ if (scaledHeight != srcHeight)
+ return false;
+
+ return true;
+}
+
+// Ensure even size if using planar format with chroma subsampling
+QSize adjustSize(const QSize& size, PixelFormat srcFmt, PixelFormat dstFmt)
+{
+ const auto* srcDesc = QVideoTextureHelper::textureDescription(srcFmt);
+ const auto* dstDesc = QVideoTextureHelper::textureDescription(dstFmt);
+
+ QSize output = size;
+ for (const auto desc : { srcDesc, dstDesc }) {
+ for (int i = 0; i < desc->nplanes; ++i) {
+ // TODO: Assumes that max subsampling is 2
+ if (desc->sizeScale[i].x != 1)
+ output.setWidth(output.width() & ~1); // Make even
+
+ if (desc->sizeScale[i].y != 1)
+ output.setHeight(output.height() & ~1); // Make even
+ }
+ }
+
+ return output;
+}
+
+} // namespace
+
+// Converts a video frame to the dstFormat video frame format.
+QVideoFrame convertFrame(QVideoFrame &src, const QVideoFrameFormat &dstFormat)
+{
+ if (src.size() != dstFormat.frameSize()) {
+ qCCritical(lc) << "Resizing is not supported";
+ return {};
+ }
+
+ // Adjust size to even width/height if we have chroma subsampling
+ const QSize size = adjustSize(src.size(), src.pixelFormat(), dstFormat.pixelFormat());
+ if (size != src.size())
+ qCWarning(lc) << "Input truncated to even width/height";
+
+ const SwsContextUPtr conv = createConverter(
+ size, src.pixelFormat(), size, dstFormat.pixelFormat());
+
+ if (!conv) {
+ qCCritical(lc) << "Failed to create SW converter";
+ return {};
+ }
+
+ if (!setColorSpaceDetails(conv.get(), src.surfaceFormat(), dstFormat)) {
+ qCCritical(lc) << "Failed to set color space details";
+ return {};
+ }
+
+ QVideoFrame dst{ dstFormat };
+
+ if (!convert(conv.get(), src, size.height(), dst)) {
+ qCCritical(lc) << "Frame conversion failed";
+ return {};
+ }
+
+ return dst;
+}
+
+// clang-format on
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolve_p.h b/src/plugins/multimedia/ffmpeg/qffmpegconverter_p.h
index 8064b8d85..57ee3135f 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolve_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegconverter_p.h
@@ -1,10 +1,8 @@
-// Copyright (C) 2023 The Qt Company Ltd.
+// Copyright (C) 2024 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-#ifndef QFFMPEGSYMBOLSRESOLVE_P_H
-#define QFFMPEGSYMBOLSRESOLVE_P_H
-
-#include "qnamespace.h"
+#ifndef QFFMPEGCONVERTER_P_H
+#define QFFMPEGCONVERTER_P_H
//
// W A R N I N G
@@ -17,21 +15,16 @@
// We mean it.
//
+#include <QtCore/qtconfigmacros.h>
+#include <private/qtmultimediaglobal_p.h>
+
QT_BEGIN_NAMESPACE
-inline void resolveSymbols()
-{
-#ifdef DYNAMIC_RESOLVE_OPENSSL_SYMBOLS
- extern bool resolveOpenSsl();
- resolveOpenSsl();
-#endif
+class QVideoFrameFormat;
+class QVideoFrame;
-#ifdef DYNAMIC_RESOLVE_VAAPI_SYMBOLS
- extern bool resolveVAAPI();
- resolveVAAPI();
-#endif
-}
+QVideoFrame convertFrame(QVideoFrame &src, const QVideoFrameFormat &dstFormat);
QT_END_NAMESPACE
-#endif // QFFMPEGSYMBOLSRESOLVE_P_H
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegdefs_p.h b/src/plugins/multimedia/ffmpeg/qffmpegdefs_p.h
index f3860377e..239d8ff0c 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegdefs_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegdefs_p.h
@@ -32,6 +32,8 @@ extern "C" {
(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(60, 3, 100)) // since ffmpeg n6.0
#define QT_FFMPEG_STREAM_SIDE_DATA_DEPRECATED \
(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(60, 15, 100)) // since ffmpeg n6.1
+#define QT_FFMPEG_HAS_D3D12VA \
+ (LIBAVUTIL_VERSION_INT >= AV_VERSION_INT(59, 8, 100)) // since ffmpeg n7.0
#define QT_FFMPEG_SWR_CONST_CH_LAYOUT (LIBSWRESAMPLE_VERSION_INT >= AV_VERSION_INT(4, 9, 100))
#define QT_FFMPEG_AVIO_WRITE_CONST \
(LIBAVFORMAT_VERSION_MAJOR >= 61)
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegencodingformatcontext.cpp b/src/plugins/multimedia/ffmpeg/qffmpegencodingformatcontext.cpp
index 8b367a822..7117d0c02 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegencodingformatcontext.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegencodingformatcontext.cpp
@@ -11,7 +11,7 @@ QT_BEGIN_NAMESPACE
namespace QFFmpeg {
-static Q_LOGGING_CATEGORY(qLcEncodingFormatContext, "qt.multimedia.ffmpeg.encodingformatcontext");
+Q_STATIC_LOGGING_CATEGORY(qLcEncodingFormatContext, "qt.multimedia.ffmpeg.encodingformatcontext");
namespace {
// In the example https://ffmpeg.org/doxygen/trunk/avio_read_callback_8c-example.html,
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp
index 78e265b4b..24e5614ce 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp
@@ -34,7 +34,7 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLHWAccel, "qt.multimedia.ffmpeg.hwaccel");
+Q_STATIC_LOGGING_CATEGORY(qLHWAccel, "qt.multimedia.ffmpeg.hwaccel");
extern bool thread_local FFmpegLogsEnabledInThread;
namespace QFFmpeg {
@@ -93,6 +93,11 @@ static bool precheckDriver(AVHWDeviceType type)
if (type == AV_HWDEVICE_TYPE_D3D11VA)
return QSystemLibrary(QLatin1String("d3d11.dll")).load();
+#if QT_FFMPEG_HAS_D3D12VA
+ if (type == AV_HWDEVICE_TYPE_D3D12VA)
+ return QSystemLibrary(QLatin1String("d3d12.dll")).load();
+#endif
+
if (type == AV_HWDEVICE_TYPE_DXVA2)
return QSystemLibrary(QLatin1String("d3d9.dll")).load();
@@ -122,6 +127,9 @@ static bool checkHwType(AVHWDeviceType type)
if (type == AV_HWDEVICE_TYPE_MEDIACODEC ||
type == AV_HWDEVICE_TYPE_VIDEOTOOLBOX ||
type == AV_HWDEVICE_TYPE_D3D11VA ||
+#if QT_FFMPEG_HAS_D3D12VA
+ type == AV_HWDEVICE_TYPE_D3D12VA ||
+#endif
type == AV_HWDEVICE_TYPE_DXVA2)
return true; // Don't waste time; it's expected to work fine of the precheck is OK
@@ -143,10 +151,11 @@ static const std::vector<AVHWDeviceType> &deviceTypes()
std::unordered_set<AVPixelFormat> hwPixFormats;
void *opaque = nullptr;
while (auto codec = av_codec_iterate(&opaque)) {
- if (auto pixFmt = codec->pix_fmts)
- for (; *pixFmt != AV_PIX_FMT_NONE; ++pixFmt)
- if (isHwPixelFormat(*pixFmt))
- hwPixFormats.insert(*pixFmt);
+ findAVPixelFormat(codec, [&](AVPixelFormat format) {
+ if (isHwPixelFormat(format))
+ hwPixFormats.insert(format);
+ return false;
+ });
}
// create a device types list
@@ -292,7 +301,9 @@ AVPixelFormat getFormat(AVCodecContext *codecContext, const AVPixelFormat *sugge
const bool shouldCheckCodecFormats = config->pix_fmt == AV_PIX_FMT_NONE;
auto scoresGettor = [&](AVPixelFormat format) {
- if (shouldCheckCodecFormats && !isAVFormatSupported(codecContext->codec, format))
+ // check in supported codec->pix_fmts;
+ // no reason to use findAVPixelFormat as we're already in the hw_config loop
+ if (shouldCheckCodecFormats && !hasAVFormat(codecContext->codec->pix_fmts, format))
return NotSuitableAVScore;
if (!shouldCheckCodecFormats && config->pix_fmt != format)
@@ -308,7 +319,7 @@ AVPixelFormat getFormat(AVCodecContext *codecContext, const AVPixelFormat *sugge
return result;
};
- const auto found = findBestAVFormat(suggestedFormats, scoresGettor);
+ const auto found = findBestAVValue(suggestedFormats, scoresGettor);
if (found.second > formatAndScore.second)
formatAndScore = found;
@@ -488,6 +499,15 @@ void TextureConverter::updateBackend(AVPixelFormat fmt)
d->format = fmt;
}
+AVFrameUPtr copyFromHwPool(AVFrameUPtr frame)
+{
+#if QT_CONFIG(wmf)
+ return copyFromHwPoolD3D11(std::move(frame));
+#else
+ return frame;
+#endif
+}
+
} // namespace QFFmpeg
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11.cpp b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11.cpp
index a2533a132..a44ceef7f 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11.cpp
@@ -42,6 +42,62 @@ ComPtr<ID3D11Device1> GetD3DDevice(QRhi *rhi)
return dev1;
}
+ComPtr<ID3D11Texture2D> getAvFrameTexture(const AVFrame *frame)
+{
+ return reinterpret_cast<ID3D11Texture2D *>(frame->data[0]);
+}
+
+int getAvFramePoolIndex(const AVFrame *frame)
+{
+ return static_cast<int>(reinterpret_cast<intptr_t>(frame->data[1]));
+}
+
+const AVD3D11VADeviceContext *getHwDeviceContext(const AVHWDeviceContext *ctx)
+{
+ return static_cast<AVD3D11VADeviceContext *>(ctx->hwctx);
+}
+
+void freeTextureAndData(void *opaque, uint8_t *data)
+{
+ static_cast<ID3D11Texture2D *>(opaque)->Release();
+ av_free(data);
+}
+
+AVBufferRef *wrapTextureAsBuffer(const ComPtr<ID3D11Texture2D> &tex)
+{
+ AVD3D11FrameDescriptor *avFrameDesc =
+ static_cast<AVD3D11FrameDescriptor *>(av_mallocz(sizeof(AVD3D11FrameDescriptor)));
+ avFrameDesc->index = 0;
+ avFrameDesc->texture = tex.Get();
+
+ return av_buffer_create(reinterpret_cast<uint8_t *>(avFrameDesc),
+ sizeof(AVD3D11FrameDescriptor *), freeTextureAndData, tex.Get(), 0);
+}
+
+ComPtr<ID3D11Texture2D> copyTexture(const AVD3D11VADeviceContext *hwDevCtx, const AVFrame *src)
+{
+ const int poolIndex = getAvFramePoolIndex(src);
+ const ComPtr<ID3D11Texture2D> poolTex = getAvFrameTexture(src);
+
+ D3D11_TEXTURE2D_DESC texDesc{};
+ poolTex->GetDesc(&texDesc);
+
+ texDesc.ArraySize = 1;
+ texDesc.MiscFlags = 0;
+ texDesc.BindFlags = 0;
+
+ ComPtr<ID3D11Texture2D> destTex;
+ if (hwDevCtx->device->CreateTexture2D(&texDesc, nullptr, &destTex) != S_OK) {
+ qCCritical(qLcMediaFFmpegHWAccel) << "Unable to copy frame from decoder pool";
+ return {};
+ }
+
+ hwDevCtx->device_context->CopySubresourceRegion(destTex.Get(), 0, 0, 0, 0, poolTex.Get(),
+ poolIndex, nullptr);
+
+ return destTex;
+}
+
} // namespace
namespace QFFmpeg {
@@ -228,18 +284,17 @@ TextureSet *D3D11TextureConverter::getTextures(AVFrame *frame)
if (!frame || !frame->hw_frames_ctx || frame->format != AV_PIX_FMT_D3D11)
return nullptr;
- const auto *fCtx = reinterpret_cast<AVHWFramesContext *>(frame->hw_frames_ctx->data);
- const auto *ctx = fCtx->device_ctx;
+ const auto *ctx = avFrameDeviceContext(frame);
if (!ctx || ctx->type != AV_HWDEVICE_TYPE_D3D11VA)
return nullptr;
- const ComPtr<ID3D11Texture2D> ffmpegTex = reinterpret_cast<ID3D11Texture2D *>(frame->data[0]);
- const int index = static_cast<int>(reinterpret_cast<intptr_t>(frame->data[1]));
+ const ComPtr<ID3D11Texture2D> ffmpegTex = getAvFrameTexture(frame);
+ const int index = getAvFramePoolIndex(frame);
if (rhi->backend() == QRhi::D3D11) {
{
- const auto *avDeviceCtx = static_cast<AVD3D11VADeviceContext *>(ctx->hwctx);
+ const auto *avDeviceCtx = getHwDeviceContext(ctx);
if (!avDeviceCtx)
return nullptr;
@@ -273,19 +328,6 @@ TextureSet *D3D11TextureConverter::getTextures(AVFrame *frame)
void D3D11TextureConverter::SetupDecoderTextures(AVCodecContext *s)
{
- // We are holding pool frames alive for quite long, which may cause
- // codecs to run out of frames because FFmpeg has a fixed size
- // decoder frame pool. We must therefore add extra frames to the pool
- // to account for the frames we keep alive. First, we need to account
- // for the maximum number of queued frames during rendering. In
- // addition, we add one frame for the RHI rendering pipeline, and one
- // additional frame because we may hold one in the Qt event loop.
-
- const qint32 maxRenderQueueSize = StreamDecoder::maxQueueSize(QPlatformMediaPlayer::VideoStream);
- constexpr qint32 framesHeldByRhi = 1;
- constexpr qint32 framesHeldByQtEventLoop = 1;
- s->extra_hw_frames = maxRenderQueueSize + framesHeldByRhi + framesHeldByQtEventLoop;
-
int ret = avcodec_get_hw_frames_parameters(s, s->hw_device_ctx, AV_PIX_FMT_D3D11,
&s->hw_frames_ctx);
if (ret < 0) {
@@ -304,6 +346,41 @@ void D3D11TextureConverter::SetupDecoderTextures(AVCodecContext *s)
}
}
+AVFrameUPtr copyFromHwPoolD3D11(AVFrameUPtr src)
+{
+ if (!src || !src->hw_frames_ctx || src->format != AV_PIX_FMT_D3D11)
+ return src;
+
+ const AVHWDeviceContext *avDevCtx = avFrameDeviceContext(src.get());
+ if (!avDevCtx || avDevCtx->type != AV_HWDEVICE_TYPE_D3D11VA)
+ return src;
+
+ AVFrameUPtr dest = makeAVFrame();
+ if (av_frame_copy_props(dest.get(), src.get()) != 0) {
+ qCCritical(qLcMediaFFmpegHWAccel) << "Unable to copy frame from decoder pool";
+ return src;
+ }
+
+ const AVD3D11VADeviceContext *hwDevCtx = getHwDeviceContext(avDevCtx);
+ ComPtr<ID3D11Texture2D> destTex;
+ {
+ hwDevCtx->lock(hwDevCtx->lock_ctx);
+ destTex = copyTexture(hwDevCtx, src.get());
+ hwDevCtx->unlock(hwDevCtx->lock_ctx);
+ }
+
+ dest->buf[0] = wrapTextureAsBuffer(destTex);
+ dest->data[0] = reinterpret_cast<uint8_t *>(destTex.Detach());
+ dest->data[1] = reinterpret_cast<uint8_t *>(0); // This texture is not a texture array
+
+ dest->width = src->width;
+ dest->height = src->height;
+ dest->format = src->format;
+ dest->hw_frames_ctx = av_buffer_ref(src->hw_frames_ctx);
+
+ return dest;
+}
+
} // namespace QFFmpeg
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11_p.h b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11_p.h
index bfcc1f10c..395016c69 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11_p.h
@@ -95,6 +95,8 @@ private:
TextureBridge m_bridge;
};
+AVFrameUPtr copyFromHwPoolD3D11(AVFrameUPtr src);
+
} // namespace QFFmpeg
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h
index eee535343..5207e76c7 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h
@@ -16,7 +16,8 @@
#include "qffmpeg_p.h"
#include "qvideoframeformat.h"
-#include <private/qabstractvideobuffer_p.h>
+#include "qabstractvideobuffer.h"
+
#include <qshareddata.h>
#include <memory>
#include <functional>
@@ -122,6 +123,8 @@ private:
HWAccel(AVBufferUPtr hwDeviceContext) : m_hwDeviceContext(std::move(hwDeviceContext)) { }
};
+AVFrameUPtr copyFromHwPool(AVFrameUPtr frame);
+
}
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp
index 09ffaaf71..069a04f52 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp
@@ -184,7 +184,7 @@ VAAPITextureConverter::VAAPITextureConverter(QRhi *rhi)
}
const QString platform = QGuiApplication::platformName();
QPlatformNativeInterface *pni = QGuiApplication::platformNativeInterface();
- eglDisplay = pni->nativeResourceForIntegration("egldisplay");
+ eglDisplay = pni->nativeResourceForIntegration(QByteArrayLiteral("egldisplay"));
qCDebug(qLHWAccelVAAPI) << " platform is" << platform << eglDisplay;
if (!eglDisplay) {
@@ -217,8 +217,7 @@ TextureSet *VAAPITextureConverter::getTextures(AVFrame *frame)
if (!frame->hw_frames_ctx)
return nullptr;
- auto *fCtx = (AVHWFramesContext *)frame->hw_frames_ctx->data;
- auto *ctx = fCtx->device_ctx;
+ auto *ctx = avFrameDeviceContext(frame);
if (!ctx)
return nullptr;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegimagecapture.cpp b/src/plugins/multimedia/ffmpeg/qffmpegimagecapture.cpp
index 2fb878784..a9fc8f7af 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegimagecapture.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegimagecapture.cpp
@@ -20,7 +20,7 @@ QT_BEGIN_NAMESPACE
// Probably, might be increased. To be investigated and tested on Android implementation
static constexpr int MaxPendingImagesCount = 1;
-static Q_LOGGING_CATEGORY(qLcImageCapture, "qt.multimedia.imageCapture")
+Q_STATIC_LOGGING_CATEGORY(qLcImageCapture, "qt.multimedia.imageCapture")
QFFmpegImageCapture::QFFmpegImageCapture(QImageCapture *parent)
: QPlatformImageCapture(parent)
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp
index f02593d16..545464ce8 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp
@@ -6,9 +6,12 @@
#include "private/qplatformaudioinput_p.h"
#include "private/qplatformaudiooutput_p.h"
#include "private/qplatformsurfacecapture_p.h"
+#include "private/qplatformaudiobufferinput_p.h"
+#include "private/qplatformvideoframeinput_p.h"
+#include "private/qplatformcamera_p.h"
+
#include "qffmpegimagecapture_p.h"
#include "qffmpegmediarecorder_p.h"
-#include "private/qplatformcamera_p.h"
#include "qvideosink.h"
#include "qffmpegaudioinput_p.h"
#include "qaudiosink.h"
@@ -19,7 +22,7 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcFFmpegMediaCaptureSession, "qt.multimedia.ffmpeg.mediacapturesession")
+Q_STATIC_LOGGING_CATEGORY(qLcFFmpegMediaCaptureSession, "qt.multimedia.ffmpeg.mediacapturesession")
static int preferredAudioSinkBufferSize(const QFFmpegAudioInput &input)
{
@@ -72,6 +75,17 @@ void QFFmpegMediaCaptureSession::setWindowCapture(QPlatformSurfaceCapture *windo
emit windowCaptureChanged();
}
+QPlatformVideoFrameInput *QFFmpegMediaCaptureSession::videoFrameInput()
+{
+ return m_videoFrameInput;
+}
+
+void QFFmpegMediaCaptureSession::setVideoFrameInput(QPlatformVideoFrameInput *input)
+{
+ if (setVideoSource(m_videoFrameInput, input))
+ emit videoFrameInputChanged();
+}
+
QPlatformImageCapture *QFFmpegMediaCaptureSession::imageCapture()
{
return m_imageCapture;
@@ -136,6 +150,12 @@ void QFFmpegMediaCaptureSession::setAudioInput(QPlatformAudioInput *input)
updateAudioSink();
}
+void QFFmpegMediaCaptureSession::setAudioBufferInput(QPlatformAudioBufferInput *input)
+{
+ // TODO: implement binding to audio sink like setAudioInput does
+ m_audioBufferInput = input;
+}
+
void QFFmpegMediaCaptureSession::updateAudioSink()
{
if (m_audioSink) {
@@ -191,7 +211,7 @@ void QFFmpegMediaCaptureSession::updateVolume()
m_audioSink->setVolume(m_audioOutput->muted ? 0.f : m_audioOutput->volume);
}
-QPlatformAudioInput *QFFmpegMediaCaptureSession::audioInput()
+QPlatformAudioInput *QFFmpegMediaCaptureSession::audioInput() const
{
return m_audioInput;
}
@@ -281,6 +301,18 @@ QPlatformVideoSource *QFFmpegMediaCaptureSession::primaryActiveVideoSource()
return m_primaryActiveVideoSource;
}
+std::vector<QPlatformAudioBufferInputBase *> QFFmpegMediaCaptureSession::activeAudioInputs() const
+{
+ std::vector<QPlatformAudioBufferInputBase *> result;
+ if (m_audioInput)
+ result.push_back(m_audioInput);
+
+ if (m_audioBufferInput)
+ result.push_back(m_audioBufferInput);
+
+ return result;
+}
+
QT_END_NAMESPACE
#include "moc_qffmpegmediacapturesession_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h
index 6c80d0b09..25340dad5 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h
@@ -29,6 +29,8 @@ class QAudioSink;
class QFFmpegAudioInput;
class QAudioBuffer;
class QPlatformVideoSource;
+class QPlatformAudioBufferInput;
+class QPlatformAudioBufferInputBase;
class QFFmpegMediaCaptureSession : public QPlatformMediaCaptureSession
{
@@ -49,6 +51,9 @@ public:
QPlatformSurfaceCapture *windowCapture() override;
void setWindowCapture(QPlatformSurfaceCapture *) override;
+ QPlatformVideoFrameInput *videoFrameInput() override;
+ void setVideoFrameInput(QPlatformVideoFrameInput *) override;
+
QPlatformImageCapture *imageCapture() override;
void setImageCapture(QPlatformImageCapture *imageCapture) override;
@@ -56,13 +61,19 @@ public:
void setMediaRecorder(QPlatformMediaRecorder *recorder) override;
void setAudioInput(QPlatformAudioInput *input) override;
- QPlatformAudioInput *audioInput();
+ QPlatformAudioInput *audioInput() const;
+
+ void setAudioBufferInput(QPlatformAudioBufferInput *input) override;
void setVideoPreview(QVideoSink *sink) override;
void setAudioOutput(QPlatformAudioOutput *output) override;
QPlatformVideoSource *primaryActiveVideoSource();
+ // it might be moved to the base class, but it needs QPlatformAudioInput
+ // to be QPlatformAudioBufferInputBase, which might not make sense
+ std::vector<QPlatformAudioBufferInputBase *> activeAudioInputs() const;
+
private Q_SLOTS:
void updateAudioSink();
void updateVolume();
@@ -79,9 +90,12 @@ private:
QPointer<QPlatformCamera> m_camera;
QPointer<QPlatformSurfaceCapture> m_screenCapture;
QPointer<QPlatformSurfaceCapture> m_windowCapture;
+ QPointer<QPlatformVideoFrameInput> m_videoFrameInput;
QPointer<QPlatformVideoSource> m_primaryActiveVideoSource;
- QFFmpegAudioInput *m_audioInput = nullptr;
+ QPointer<QFFmpegAudioInput> m_audioInput;
+ QPointer<QPlatformAudioBufferInput> m_audioBufferInput;
+
QFFmpegImageCapture *m_imageCapture = nullptr;
QFFmpegMediaRecorder *m_mediaRecorder = nullptr;
QPlatformAudioOutput *m_audioOutput = nullptr;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo.cpp
index 6389b4eed..b57ffb4b3 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo.cpp
@@ -9,7 +9,7 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcMediaFormatInfo, "qt.multimedia.ffmpeg.mediaformatinfo")
+Q_STATIC_LOGGING_CATEGORY(qLcMediaFormatInfo, "qt.multimedia.ffmpeg.mediaformatinfo")
static struct {
AVCodecID id;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp
index 291ca1111..ba1fff3b3 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp
@@ -13,8 +13,8 @@
#include "qffmpegaudioinput_p.h"
#include "qffmpegaudiodecoder_p.h"
#include "qffmpegresampler_p.h"
-#include "qffmpegsymbolsresolve_p.h"
#include "qgrabwindowsurfacecapture_p.h"
+#include "qffmpegconverter_p.h"
#ifdef Q_OS_MACOS
#include <VideoToolbox/VideoToolbox.h>
@@ -171,8 +171,6 @@ static QPlatformSurfaceCapture *createWindowCaptureByBackend(QString backend)
QFFmpegMediaIntegration::QFFmpegMediaIntegration()
: QPlatformMediaIntegration(QLatin1String("ffmpeg"))
{
- resolveSymbols();
-
setupFFmpegLogger();
#ifndef QT_NO_DEBUG
@@ -308,6 +306,12 @@ QMaybe<QPlatformAudioInput *> QFFmpegMediaIntegration::createAudioInput(QAudioIn
return new QFFmpegAudioInput(input);
}
+QVideoFrame QFFmpegMediaIntegration::convertVideoFrame(QVideoFrame &srcFrame,
+ const QVideoFrameFormat &destFormat)
+{
+ return convertFrame(srcFrame, destFormat);
+}
+
QPlatformMediaFormatInfo *QFFmpegMediaIntegration::createFormatInfo()
{
return new QFFmpegMediaFormatInfo;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h
index b8787bcc4..473a5f044 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h
@@ -41,6 +41,9 @@ public:
QMaybe<QPlatformAudioInput *> createAudioInput(QAudioInput *input) override;
// QPlatformAudioOutput *createAudioOutput(QAudioOutput *) override;
+ QVideoFrame convertVideoFrame(QVideoFrame &srcFrame,
+ const QVideoFrameFormat &destFormat) override;
+
protected:
QPlatformMediaFormatInfo *createFormatInfo() override;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediametadata.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediametadata.cpp
index 465e380db..db3878c09 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediametadata.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediametadata.cpp
@@ -12,7 +12,7 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcMetaData, "qt.multimedia.ffmpeg.metadata")
+Q_STATIC_LOGGING_CATEGORY(qLcMetaData, "qt.multimedia.ffmpeg.metadata")
namespace {
@@ -163,9 +163,8 @@ QByteArray QFFmpegMetaData::value(const QMediaMetaData &metaData, QMediaMetaData
AVDictionary *QFFmpegMetaData::toAVMetaData(const QMediaMetaData &metaData)
{
- const QList<Key> keys = metaData.keys();
AVDictionary *dict = nullptr;
- for (const auto &k : keys) {
+ for (const auto &&[k, v] : metaData.asKeyValueRange()) {
const char *key = ::keyToTag(k);
if (!key)
continue;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp
index 6a950a6ad..951144692 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp
@@ -5,6 +5,7 @@
#include "private/qplatformaudiooutput_p.h"
#include "qvideosink.h"
#include "qaudiooutput.h"
+#include "qaudiobufferoutput.h"
#include "qffmpegplaybackengine_p.h"
#include <qiodevice.h>
@@ -75,12 +76,17 @@ void QFFmpegMediaPlayer::updatePosition()
void QFFmpegMediaPlayer::endOfStream()
{
- // start update timer and report end position anyway
+ // stop update timer and report end position anyway
m_positionUpdateTimer.stop();
+ QPointer currentPlaybackEngine(m_playbackEngine.get());
positionChanged(duration());
- stateChanged(QMediaPlayer::StoppedState);
- mediaStatusChanged(QMediaPlayer::EndOfMedia);
+ // skip changing state and mediaStatus if playbackEngine has been recreated,
+ // e.g. if new media has been loaded as a response to positionChanged signal
+ if (currentPlaybackEngine)
+ stateChanged(QMediaPlayer::StoppedState);
+ if (currentPlaybackEngine)
+ mediaStatusChanged(QMediaPlayer::EndOfMedia);
}
void QFFmpegMediaPlayer::onLoopChanged()
@@ -146,7 +152,7 @@ void QFFmpegMediaPlayer::setPlaybackRate(qreal rate)
if (m_playbackEngine)
m_playbackEngine->setPlaybackRate(effectiveRate);
- emit playbackRateChanged(effectiveRate);
+ playbackRateChanged(effectiveRate);
}
QUrl QFFmpegMediaPlayer::media() const
@@ -241,8 +247,10 @@ void QFFmpegMediaPlayer::setMediaAsync(QFFmpeg::MediaDataHolder::Maybe mediaData
m_playbackEngine->setMedia(std::move(*mediaDataHolder.value()));
+ m_playbackEngine->setAudioBufferOutput(m_audioBufferOutput);
m_playbackEngine->setAudioSink(m_audioOutput);
m_playbackEngine->setVideoSink(m_videoSink);
+
m_playbackEngine->setLoops(loops());
m_playbackEngine->setPlaybackRate(m_playbackRate);
@@ -336,14 +344,17 @@ void QFFmpegMediaPlayer::stop()
void QFFmpegMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
{
- if (m_audioOutput == output)
- return;
-
m_audioOutput = output;
if (m_playbackEngine)
m_playbackEngine->setAudioSink(output);
}
+void QFFmpegMediaPlayer::setAudioBufferOutput(QAudioBufferOutput *output) {
+ m_audioBufferOutput = output;
+ if (m_playbackEngine)
+ m_playbackEngine->setAudioBufferOutput(output);
+}
+
QMediaMetaData QFFmpegMediaPlayer::metaData() const
{
return m_playbackEngine ? m_playbackEngine->metaData() : QMediaMetaData{};
@@ -351,9 +362,6 @@ QMediaMetaData QFFmpegMediaPlayer::metaData() const
void QFFmpegMediaPlayer::setVideoSink(QVideoSink *sink)
{
- if (m_videoSink == sink)
- return;
-
m_videoSink = sink;
if (m_playbackEngine)
m_playbackEngine->setVideoSink(sink);
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h
index 4e700787e..4ab5701da 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h
@@ -61,6 +61,8 @@ public:
void setAudioOutput(QPlatformAudioOutput *) override;
+ void setAudioBufferOutput(QAudioBufferOutput *) override;
+
QMediaMetaData metaData() const override;
void setVideoSink(QVideoSink *sink) override;
@@ -98,6 +100,7 @@ private:
std::unique_ptr<PlaybackEngine> m_playbackEngine;
QPlatformAudioOutput *m_audioOutput = nullptr;
+ QPointer<QAudioBufferOutput> m_audioBufferOutput;
QPointer<QVideoSink> m_videoSink;
QUrl m_url;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp
index 97b4570d5..f30350f83 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp
@@ -15,7 +15,7 @@
#include <qdebug.h>
#include <qloggingcategory.h>
-static Q_LOGGING_CATEGORY(qLcMediaEncoder, "qt.multimedia.ffmpeg.encoder");
+Q_STATIC_LOGGING_CATEGORY(qLcMediaEncoder, "qt.multimedia.ffmpeg.encoder");
QT_BEGIN_NAMESPACE
@@ -32,7 +32,7 @@ bool QFFmpegMediaRecorder::isLocationWritable(const QUrl &) const
void QFFmpegMediaRecorder::handleSessionError(QMediaRecorder::Error code, const QString &description)
{
- error(code, description);
+ updateError(code, description);
stop();
}
@@ -42,11 +42,12 @@ void QFFmpegMediaRecorder::record(QMediaEncoderSettings &settings)
return;
auto videoSources = m_session->activeVideoSources();
+ auto audioInputs = m_session->activeAudioInputs();
const auto hasVideo = !videoSources.empty();
- const auto hasAudio = m_session->audioInput() != nullptr;
+ const auto hasAudio = !audioInputs.empty();
if (!hasVideo && !hasAudio) {
- error(QMediaRecorder::ResourceError, QMediaRecorder::tr("No video or audio input"));
+ updateError(QMediaRecorder::ResourceError, QMediaRecorder::tr("No video or audio input"));
return;
}
@@ -72,36 +73,37 @@ void QFFmpegMediaRecorder::record(QMediaEncoderSettings &settings)
<< settings.audioCodec();
if (!formatContext->isAVIOOpen()) {
- error(QMediaRecorder::LocationNotWritable,
- QMediaRecorder::tr("Cannot open the output location for writing"));
+ updateError(QMediaRecorder::LocationNotWritable,
+ QMediaRecorder::tr("Cannot open the output location for writing"));
return;
}
m_recordingEngine.reset(new RecordingEngine(settings, std::move(formatContext)));
m_recordingEngine->setMetaData(m_metaData);
+
connect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::durationChanged, this,
&QFFmpegMediaRecorder::newDuration);
connect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::finalizationDone, this,
&QFFmpegMediaRecorder::finalizationDone);
- connect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::error, this,
+ connect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::sessionError, this,
&QFFmpegMediaRecorder::handleSessionError);
- auto *audioInput = m_session->audioInput();
- if (audioInput) {
- if (audioInput->device.isNull())
- qWarning() << "Audio input device is null; cannot encode audio";
- else
- m_recordingEngine->addAudioInput(static_cast<QFFmpegAudioInput *>(audioInput));
- }
+ updateAutoStop();
+
+ auto handleStreamInitializationError = [this](QMediaRecorder::Error code,
+ const QString &description) {
+ qCWarning(qLcMediaEncoder) << "Stream initialization error:" << description;
+ updateError(code, description);
+ };
- for (auto source : videoSources)
- m_recordingEngine->addVideoSource(source);
+ connect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::streamInitializationError, this,
+ handleStreamInitializationError);
durationChanged(0);
stateChanged(QMediaRecorder::RecordingState);
actualLocationChanged(QUrl::fromLocalFile(actualLocation));
- m_recordingEngine->start();
+ m_recordingEngine->initialize(audioInputs, videoSources);
}
void QFFmpegMediaRecorder::pause()
@@ -169,6 +171,22 @@ void QFFmpegMediaRecorder::setCaptureSession(QFFmpegMediaCaptureSession *session
return;
}
+void QFFmpegMediaRecorder::updateAutoStop()
+{
+ const bool autoStop = mediaRecorder()->autoStop();
+ if (!m_recordingEngine || m_recordingEngine->autoStop() == autoStop)
+ return;
+
+ if (autoStop)
+ connect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::autoStopped, this,
+ &QFFmpegMediaRecorder::stop);
+ else
+ disconnect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::autoStopped, this,
+ &QFFmpegMediaRecorder::stop);
+
+ m_recordingEngine->setAutoStop(autoStop);
+}
+
void QFFmpegMediaRecorder::RecordingEngineDeleter::operator()(
RecordingEngine *recordingEngine) const
{
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h
index 8b73ad76d..af3ee1509 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h
@@ -48,6 +48,8 @@ public:
void setCaptureSession(QFFmpegMediaCaptureSession *session);
+ void updateAutoStop() override;
+
private Q_SLOTS:
void newDuration(qint64 d) { durationChanged(d); }
void finalizationDone();
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegopensslsymbols.cpp b/src/plugins/multimedia/ffmpeg/qffmpegopensslsymbols.cpp
deleted file mode 100644
index e0e5de137..000000000
--- a/src/plugins/multimedia/ffmpeg/qffmpegopensslsymbols.cpp
+++ /dev/null
@@ -1,185 +0,0 @@
-// Copyright (C) 2023 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-#include <QtCore/qlibrary.h>
-
-#include "qffmpegsymbolsresolveutils_p.h"
-
-#include <QtCore/qglobal.h>
-#include <qstringliteral.h>
-
-#include <openssl/bio.h>
-#include <openssl/ssl.h>
-#include <openssl/bn.h>
-#include <openssl/err.h>
-#include <openssl/rand.h>
-
-QT_BEGIN_NAMESPACE
-
-using namespace Qt::StringLiterals;
-
-static Libs loadLibs()
-{
- Libs libs(2);
- libs[0] = std::make_unique<QLibrary>();
- libs[1] = std::make_unique<QLibrary>();
-
- const auto majorVersion = OPENSSL_VERSION_NUMBER >> 28;
-
- auto tryLoad = [&](QString sslName, QString cryptoName, auto version) {
- libs[0]->setFileNameAndVersion(sslName, version);
- libs[1]->setFileNameAndVersion(cryptoName, version);
- return LibSymbolsResolver::tryLoad(libs);
- };
-
-// Due to binary compatibility issues between 1.x.x openssl version, let's try taking exact version
-#if defined(SHLIB_VERSION_NUMBER)
- if (majorVersion <= 1 && tryLoad("ssl"_L1, "crypto"_L1, SHLIB_VERSION_NUMBER ""_L1))
- return libs;
-#endif
-
-// openssl on Android has specific suffixes
-#if defined(Q_OS_ANDROID)
- {
- auto suffix = qEnvironmentVariable("ANDROID_OPENSSL_SUFFIX");
- if (suffix.isEmpty())
- suffix = QString("_"_L1) + QString::number(majorVersion);
-
- if (tryLoad("ssl"_L1 + suffix, "crypto"_L1 + suffix, -1))
- return libs;
- }
-#endif
-
- if (tryLoad("ssl"_L1, "crypto"_L1, majorVersion))
- return libs;
-
- return {};
-};
-
-Q_GLOBAL_STATIC(LibSymbolsResolver, resolver, "OpenSsl", 75, loadLibs);
-
-void resolveOpenSsl()
-{
- resolver()->resolve();
-}
-
-QT_END_NAMESPACE
-
-QT_USE_NAMESPACE
-
-// BN functions
-
-DEFINE_FUNC(BN_value_one, 0);
-DEFINE_FUNC(BN_mod_word, 2);
-
-DEFINE_FUNC(BN_div_word, 2)
-DEFINE_FUNC(BN_mul_word, 2)
-DEFINE_FUNC(BN_add_word, 2)
-DEFINE_FUNC(BN_sub_word, 2)
-DEFINE_FUNC(BN_set_word, 2)
-DEFINE_FUNC(BN_new, 0)
-DEFINE_FUNC(BN_cmp, 2)
-
-DEFINE_FUNC(BN_free, 1);
-
-DEFINE_FUNC(BN_copy, 2);
-
-DEFINE_FUNC(BN_CTX_new, 0);
-
-DEFINE_FUNC(BN_CTX_free, 1);
-DEFINE_FUNC(BN_CTX_start, 1);
-
-DEFINE_FUNC(BN_CTX_get, 1);
-DEFINE_FUNC(BN_CTX_end, 1);
-
-DEFINE_FUNC(BN_rand, 4);
-DEFINE_FUNC(BN_mod_exp, 5);
-
-DEFINE_FUNC(BN_num_bits, 1);
-DEFINE_FUNC(BN_num_bits_word, 1);
-
-DEFINE_FUNC(BN_bn2hex, 1);
-DEFINE_FUNC(BN_bn2dec, 1);
-
-DEFINE_FUNC(BN_hex2bn, 2);
-DEFINE_FUNC(BN_dec2bn, 2);
-DEFINE_FUNC(BN_asc2bn, 2);
-
-DEFINE_FUNC(BN_bn2bin, 2);
-DEFINE_FUNC(BN_bin2bn, 3);
-
-// BIO-related functions
-
-DEFINE_FUNC(BIO_new, 1);
-DEFINE_FUNC(BIO_free, 1);
-
-DEFINE_FUNC(BIO_read, 3, -1);
-DEFINE_FUNC(BIO_write, 3, -1);
-DEFINE_FUNC(BIO_s_mem, 0);
-
-DEFINE_FUNC(BIO_set_data, 2);
-
-DEFINE_FUNC(BIO_get_data, 1);
-DEFINE_FUNC(BIO_set_init, 2);
-
-DEFINE_FUNC(BIO_set_flags, 2);
-DEFINE_FUNC(BIO_test_flags, 2);
-DEFINE_FUNC(BIO_clear_flags, 2);
-
-DEFINE_FUNC(BIO_meth_new, 2);
-DEFINE_FUNC(BIO_meth_free, 1);
-
-DEFINE_FUNC(BIO_meth_set_write, 2);
-DEFINE_FUNC(BIO_meth_set_read, 2);
-DEFINE_FUNC(BIO_meth_set_puts, 2);
-DEFINE_FUNC(BIO_meth_set_gets, 2);
-DEFINE_FUNC(BIO_meth_set_ctrl, 2);
-DEFINE_FUNC(BIO_meth_set_create, 2);
-DEFINE_FUNC(BIO_meth_set_destroy, 2);
-DEFINE_FUNC(BIO_meth_set_callback_ctrl, 2);
-
-// SSL functions
-
-DEFINE_FUNC(SSL_CTX_new, 1);
-DEFINE_FUNC(SSL_CTX_up_ref, 1);
-DEFINE_FUNC(SSL_CTX_free, 1);
-
-DEFINE_FUNC(SSL_new, 1);
-DEFINE_FUNC(SSL_up_ref, 1);
-DEFINE_FUNC(SSL_free, 1);
-
-DEFINE_FUNC(SSL_accept, 1);
-DEFINE_FUNC(SSL_stateless, 1);
-DEFINE_FUNC(SSL_connect, 1);
-DEFINE_FUNC(SSL_read, 3, -1);
-DEFINE_FUNC(SSL_peek, 3);
-DEFINE_FUNC(SSL_write, 3, -1);
-DEFINE_FUNC(SSL_ctrl, 4);
-DEFINE_FUNC(SSL_shutdown, 1);
-DEFINE_FUNC(SSL_set_bio, 3);
-
-// options are unsigned long in openssl 1.1.1, and uint64 in 3.x.x
-DEFINE_FUNC(SSL_CTX_set_options, 2);
-
-DEFINE_FUNC(SSL_get_error, 2);
-DEFINE_FUNC(SSL_CTX_load_verify_locations, 3, -1);
-
-DEFINE_FUNC(SSL_CTX_set_verify, 3);
-DEFINE_FUNC(SSL_CTX_use_PrivateKey, 2);
-
-DEFINE_FUNC(SSL_CTX_use_PrivateKey_file, 3);
-DEFINE_FUNC(SSL_CTX_use_certificate_chain_file, 2);
-
-DEFINE_FUNC(ERR_get_error, 0);
-
-static char ErrorString[] = "Ssl not found";
-DEFINE_FUNC(ERR_error_string, 2, ErrorString);
-
-// TLS functions
-
-DEFINE_FUNC(TLS_client_method, 0);
-DEFINE_FUNC(TLS_server_method, 0);
-
-// RAND functions
-
-DEFINE_FUNC(RAND_bytes, 2);
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine.cpp b/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine.cpp
index 811feb0d5..74e58203a 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine.cpp
@@ -6,6 +6,7 @@
#include "qaudiooutput.h"
#include "private/qplatformaudiooutput_p.h"
#include "private/qplatformvideosink_p.h"
+#include "private/qaudiobufferoutput_p.h"
#include "qiodevice.h"
#include "playbackengine/qffmpegdemuxer_p.h"
#include "playbackengine/qffmpegstreamdecoder_p.h"
@@ -19,7 +20,7 @@ QT_BEGIN_NAMESPACE
namespace QFFmpeg {
-static Q_LOGGING_CATEGORY(qLcPlaybackEngine, "qt.multimedia.ffmpeg.playbackengine");
+Q_STATIC_LOGGING_CATEGORY(qLcPlaybackEngine, "qt.multimedia.ffmpeg.playbackengine");
// The helper is needed since on some compilers std::unique_ptr
// doesn't have a default constructor in the case of sizeof(CustomDeleter) > 0
@@ -200,8 +201,8 @@ PlaybackEngine::createRenderer(QPlatformMediaPlayer::TrackType trackType)
? createPlaybackEngineObject<VideoRenderer>(m_timeController, m_videoSink, m_media.rotation())
: RendererPtr{ {}, {} };
case QPlatformMediaPlayer::AudioStream:
- return m_audioOutput
- ? createPlaybackEngineObject<AudioRenderer>(m_timeController, m_audioOutput)
+ return m_audioOutput || m_audioBufferOutput
+ ? createPlaybackEngineObject<AudioRenderer>(m_timeController, m_audioOutput, m_audioBufferOutput)
: RendererPtr{ {}, {} };
case QPlatformMediaPlayer::SubtitleStream:
return m_videoSink
@@ -486,7 +487,7 @@ void PlaybackEngine::setAudioSink(QPlatformAudioOutput *output) {
void PlaybackEngine::setAudioSink(QAudioOutput *output)
{
- auto prev = std::exchange(m_audioOutput, output);
+ QAudioOutput *prev = std::exchange(m_audioOutput, output);
if (prev == output)
return;
@@ -498,6 +499,14 @@ void PlaybackEngine::setAudioSink(QAudioOutput *output)
}
}
+void PlaybackEngine::setAudioBufferOutput(QAudioBufferOutput *output)
+{
+ QAudioBufferOutput *prev = std::exchange(m_audioBufferOutput, output);
+ if (prev == output)
+ return;
+ updateActiveAudioOutput(output);
+}
+
qint64 PlaybackEngine::currentPosition(bool topPos) const {
std::optional<qint64> pos;
@@ -572,7 +581,10 @@ void PlaybackEngine::finilizeTime(qint64 pos)
void PlaybackEngine::finalizeOutputs()
{
- updateActiveAudioOutput(nullptr);
+ if (m_audioBufferOutput)
+ updateActiveAudioOutput(static_cast<QAudioBufferOutput *>(nullptr));
+ if (m_audioOutput)
+ updateActiveAudioOutput(static_cast<QAudioOutput *>(nullptr));
updateActiveVideoOutput(nullptr, true);
}
@@ -582,7 +594,8 @@ bool PlaybackEngine::hasRenderer(quint64 id) const
[id](auto &renderer) { return renderer && renderer->id() == id; });
}
-void PlaybackEngine::updateActiveAudioOutput(QAudioOutput *output)
+template <typename AudioOutput>
+void PlaybackEngine::updateActiveAudioOutput(AudioOutput *output)
{
if (auto renderer =
qobject_cast<AudioRenderer *>(m_renderers[QPlatformMediaPlayer::AudioStream].get()))
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine_p.h b/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine_p.h
index f36828771..50c94c955 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine_p.h
@@ -60,6 +60,7 @@ QT_BEGIN_NAMESPACE
class QAudioSink;
class QVideoSink;
class QAudioOutput;
+class QAudioBufferOutput;
class QFFmpegMediaPlayer;
namespace QFFmpeg
@@ -81,6 +82,8 @@ public:
void setAudioSink(QPlatformAudioOutput *output);
+ void setAudioBufferOutput(QAudioBufferOutput *output);
+
void setState(QMediaPlayer::PlaybackState state);
void play() {
@@ -141,7 +144,8 @@ protected: // objects managing
virtual RendererPtr createRenderer(QPlatformMediaPlayer::TrackType trackType);
- void updateActiveAudioOutput(QAudioOutput *output);
+ template <typename AudioOutput>
+ void updateActiveAudioOutput(AudioOutput *output);
void updateActiveVideoOutput(QVideoSink *sink, bool cleanOutput = false);
@@ -203,6 +207,7 @@ private:
QPointer<QVideoSink> m_videoSink;
QPointer<QAudioOutput> m_audioOutput;
+ QPointer<QAudioBufferOutput> m_audioBufferOutput;
QMediaPlayer::PlaybackState m_state = QMediaPlayer::StoppedState;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp b/src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp
index e5e9ca3bb..8a685b9fd 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp
@@ -5,7 +5,7 @@
#include "qffmpegmediaformatinfo_p.h"
#include <qloggingcategory.h>
-static Q_LOGGING_CATEGORY(qLcResampler, "qt.multimedia.ffmpeg.resampler")
+Q_STATIC_LOGGING_CATEGORY(qLcResampler, "qt.multimedia.ffmpeg.resampler")
QT_BEGIN_NAMESPACE
@@ -21,8 +21,9 @@ QFFmpegResampler::QFFmpegResampler(const QAudioFormat &inputFormat, const QAudio
createResampleContext(AVAudioFormat(m_inputFormat), AVAudioFormat(m_outputFormat));
}
-QFFmpegResampler::QFFmpegResampler(const Codec* codec, const QAudioFormat &outputFormat)
- : m_outputFormat(outputFormat)
+QFFmpegResampler::QFFmpegResampler(const Codec *codec, const QAudioFormat &outputFormat,
+ qint64 startTime)
+ : m_outputFormat(outputFormat), m_startTime(startTime)
{
Q_ASSERT(codec);
@@ -64,7 +65,7 @@ QAudioBuffer QFFmpegResampler::resample(const uint8_t **inputData, int inputSamp
samples.resize(m_outputFormat.bytesForFrames(outSamples));
- qint64 startTime = m_outputFormat.durationForFrames(m_samplesProcessed);
+ const qint64 startTime = m_outputFormat.durationForFrames(m_samplesProcessed) + m_startTime;
m_samplesProcessed += outSamples;
qCDebug(qLcResampler) << " new frame" << startTime << "in_samples" << inputSamplesCount
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h b/src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h
index 5109ecf11..530f40aa2 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h
@@ -29,7 +29,8 @@ class QFFmpegResampler : public QPlatformAudioResampler
{
public:
QFFmpegResampler(const QAudioFormat &inputFormat, const QAudioFormat &outputFormat);
- QFFmpegResampler(const QFFmpeg::Codec* codec, const QAudioFormat &outputFormat);
+ QFFmpegResampler(const QFFmpeg::Codec *codec, const QAudioFormat &outputFormat,
+ qint64 startTime = 0);
~QFFmpegResampler() override;
@@ -49,6 +50,7 @@ private:
private:
QAudioFormat m_inputFormat;
QAudioFormat m_outputFormat;
+ qint64 m_startTime = 0;
QFFmpeg::SwrContextUPtr m_resampler;
qint64 m_samplesProcessed = 0;
qint64 m_endCompensationSample = std::numeric_limits<qint64>::min();
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi.cpp b/src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi.cpp
index 69d2d0643..eb5bcfdf8 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi.cpp
@@ -3,9 +3,10 @@
#include "qffmpegscreencapture_dxgi_p.h"
#include "qffmpegsurfacecapturegrabber_p.h"
-#include <private/qabstractvideobuffer_p.h>
+#include "qabstractvideobuffer.h"
#include <private/qmultimediautils_p.h>
#include <private/qwindowsmultimediautils_p.h>
+#include <private/qvideoframe_p.h>
#include <qtgui/qscreen_platform.h>
#include "qvideoframe.h"
@@ -71,27 +72,15 @@ class QD3D11TextureVideoBuffer : public QAbstractVideoBuffer
public:
QD3D11TextureVideoBuffer(const ComPtr<ID3D11Device> &device, std::shared_ptr<QMutex> &mutex,
const ComPtr<ID3D11Texture2D> &texture, QSize size)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle)
- , m_device(device)
- , m_texture(texture)
- , m_ctxMutex(mutex)
- , m_size(size)
+ : m_device(device), m_texture(texture), m_ctxMutex(mutex), m_size(size)
{}
- ~QD3D11TextureVideoBuffer()
- {
- QD3D11TextureVideoBuffer::unmap();
- }
-
- QVideoFrame::MapMode mapMode() const override
- {
- return m_mapMode;
- }
+ ~QD3D11TextureVideoBuffer() { Q_ASSERT(m_mapMode == QtVideo::MapMode::NotMapped); }
- MapData map(QVideoFrame::MapMode mode) override
+ MapData map(QtVideo::MapMode mode) override
{
MapData mapData;
- if (!m_ctx && mode == QVideoFrame::ReadOnly) {
+ if (!m_ctx && mode == QtVideo::MapMode::ReadOnly) {
D3D11_TEXTURE2D_DESC texDesc = {};
m_texture->GetDesc(&texDesc);
texDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
@@ -121,10 +110,10 @@ public:
}
m_mapMode = mode;
- mapData.nPlanes = 1;
+ mapData.planeCount = 1;
mapData.bytesPerLine[0] = int(resource.RowPitch);
mapData.data[0] = reinterpret_cast<uchar*>(resource.pData);
- mapData.size[0] = m_size.height() * int(resource.RowPitch);
+ mapData.dataSize[0] = m_size.height() * int(resource.RowPitch);
}
return mapData;
@@ -132,7 +121,7 @@ public:
void unmap() override
{
- if (m_mapMode == QVideoFrame::NotMapped)
+ if (m_mapMode == QtVideo::MapMode::NotMapped)
return;
if (m_ctx) {
m_ctxMutex->lock();
@@ -141,9 +130,11 @@ public:
m_ctx.Reset();
}
m_cpuTexture.Reset();
- m_mapMode = QVideoFrame::NotMapped;
+ m_mapMode = QtVideo::MapMode::NotMapped;
}
+ QVideoFrameFormat format() const override { return {}; }
+
QSize getSize() const
{
if (!m_texture)
@@ -162,7 +153,7 @@ private:
ComPtr<ID3D11DeviceContext> m_ctx;
std::shared_ptr<QMutex> m_ctxMutex;
QSize m_size;
- QVideoFrame::MapMode m_mapMode = QVideoFrame::NotMapped;
+ QtVideo::MapMode m_mapMode = QtVideo::MapMode::NotMapped;
};
namespace {
@@ -344,7 +335,7 @@ QVideoFrameFormat getFrameFormat(QScreen* screen)
const QSize screenSize = getPhysicalSizePixels(screen);
QVideoFrameFormat format = { screenSize, QVideoFrameFormat::Format_BGRA8888 };
- format.setFrameRate(static_cast<int>(screen->refreshRate()));
+ format.setStreamFrameRate(static_cast<int>(screen->refreshRate()));
return format;
}
@@ -408,7 +399,7 @@ public:
if (bufSize != m_format.frameSize())
m_format.setFrameSize(bufSize);
- frame = { buffer.release(), format() };
+ frame = QVideoFramePrivate::createFrame(std::move(buffer), format());
}
return frame;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegsurfacecapturegrabber.cpp b/src/plugins/multimedia/ffmpeg/qffmpegsurfacecapturegrabber.cpp
index f708f5021..38b48938f 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegsurfacecapturegrabber.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegsurfacecapturegrabber.cpp
@@ -10,7 +10,7 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcScreenCaptureGrabber, "qt.multimedia.ffmpeg.surfacecapturegrabber");
+Q_STATIC_LOGGING_CATEGORY(qLcScreenCaptureGrabber, "qt.multimedia.ffmpeg.surfacecapturegrabber");
namespace {
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolveutils.cpp b/src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolveutils.cpp
deleted file mode 100644
index c4a4d9666..000000000
--- a/src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolveutils.cpp
+++ /dev/null
@@ -1,103 +0,0 @@
-// Copyright (C) 2023 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-#include "qffmpegsymbolsresolveutils_p.h"
-
-#include <qdebug.h>
-#include <algorithm>
-#include <qloggingcategory.h>
-
-QT_BEGIN_NAMESPACE
-
-static Q_LOGGING_CATEGORY(qLcLibSymbolsRelolver, "qt.multimedia.ffmpeg.libsymbolsresolver");
-
-LibSymbolsResolver::LibSymbolsResolver(const char *libName, size_t symbolsCount,
- LibsLoader libsLoader)
- : m_libName(libName), m_libsLoader(libsLoader)
-{
- Q_ASSERT(m_libName);
- Q_ASSERT(m_libsLoader);
- m_symbols.reserve(symbolsCount);
-}
-
-bool LibSymbolsResolver::resolve()
-{
- if (m_state.testAndSetRelaxed(Initial, Requested)
- || !m_state.testAndSetAcquire(Ready, Finished))
- return false;
-
- qCDebug(qLcLibSymbolsRelolver)
- << "Start" << m_libName << "symbols resolving:" << m_symbols.size() << "symbols";
-
- Q_ASSERT(m_symbols.size() == m_symbols.capacity());
-
- auto cleanup = qScopeGuard([this]() { m_symbols = {}; });
-
- auto libs = m_libsLoader();
- if (libs.empty()) {
- qCWarning(qLcLibSymbolsRelolver) << "Couldn't load" << m_libName << "library";
- return false;
- }
-
- std::vector<QFunctionPointer> functions(m_symbols.size());
-
- auto resolveElement = [&libs](const SymbolElement &element) {
- return resolve(libs, element.name);
- };
-
- std::transform(m_symbols.begin(), m_symbols.end(), functions.begin(), resolveElement);
-
- if (std::find(functions.begin(), functions.end(), nullptr) != functions.end()) {
- unload(libs);
- qCWarning(qLcLibSymbolsRelolver) << "Couldn't resolve" << m_libName << "symbols";
- return false;
- }
-
- for (size_t i = 0; i < functions.size(); ++i)
- m_symbols[i].setter(functions[i]);
-
- qCDebug(qLcLibSymbolsRelolver) << m_libName << "symbols resolved";
- return true;
-}
-
-void LibSymbolsResolver::registerSymbol(const char *name, FunctionSetter setter)
-{
- Q_ASSERT(setter);
- Q_ASSERT(m_symbols.size() < m_symbols.capacity());
-
- m_symbols.push_back({ name, setter });
-
- // handle the corner case: a user has initialized QtMM with global vars construction
- // and it happened before the symbols initializing
- if (m_symbols.size() == m_symbols.capacity() && !m_state.testAndSetRelease(Initial, Ready)
- && m_state.testAndSetRelease(Requested, Ready))
- resolve();
-}
-
-void LibSymbolsResolver::unload(const Libs &libs)
-{
- for (auto &lib : libs)
- lib->unload();
-}
-
-bool LibSymbolsResolver::tryLoad(const Libs &libs)
-{
- auto load = [](auto &lib) { return lib->load(); };
- if (std::all_of(libs.begin(), libs.end(), load))
- return true;
-
- unload(libs);
- return false;
-}
-
-QFunctionPointer LibSymbolsResolver::resolve(const Libs &libs, const char *symbolName)
-{
- for (auto &lib : libs)
- if (auto pointer = lib->resolve(symbolName))
- return pointer;
-
- qWarning() << "Cannot resolve symbol" << symbolName;
- return nullptr;
-}
-
-QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolveutils_p.h b/src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolveutils_p.h
deleted file mode 100644
index f7a2169d3..000000000
--- a/src/plugins/multimedia/ffmpeg/qffmpegsymbolsresolveutils_p.h
+++ /dev/null
@@ -1,142 +0,0 @@
-// Copyright (C) 2023 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-#ifndef QFFMPEGSYMBOLSRESOLVEUTILS_P_H
-#define QFFMPEGSYMBOLSRESOLVEUTILS_P_H
-
-//
-// W A R N I N G
-// -------------
-//
-// This file is not part of the Qt API. It exists purely as an
-// implementation detail. This header file may change from version to
-// version without notice, or even be removed.
-//
-// We mean it.
-//
-
-#include <QtCore/qlibrary.h>
-
-#include <qatomic.h>
-
-#include <vector>
-#include <memory>
-#include <tuple>
-
-QT_BEGIN_NAMESPACE
-
-using Libs = std::vector<std::unique_ptr<QLibrary>>;
-
-class LibSymbolsResolver
-{
-public:
- using FunctionSetter = void (*)(QFunctionPointer);
- using LibsLoader = Libs (*)();
-
- LibSymbolsResolver(const char *libName, size_t symbolsCount, LibsLoader libsLoader);
-
- bool resolve();
-
- void registerSymbol(const char *name, FunctionSetter setter);
-
- static void unload(const Libs &libs);
-
- static bool tryLoad(const Libs &libs);
-
-private:
- static QFunctionPointer resolve(const Libs &libs, const char *symbolName);
-
-private:
- const char *const m_libName;
- LibsLoader m_libsLoader;
-
- struct SymbolElement
- {
- const char *name;
- FunctionSetter setter;
- };
-
- std::vector<SymbolElement> m_symbols;
-
- enum State { Initial, Requested, Ready, Finished };
-
- QAtomicInteger<int> m_state = Initial;
-};
-
-QT_END_NAMESPACE
-
-template <typename T>
-struct DefaultReturn
-{
- template <typename... Arg>
- T operator()(Arg &&...) { return val; }
- T val;
-};
-
-template <>
-struct DefaultReturn<void>
-{
- DefaultReturn(int = 0){};
- template <typename... Arg>
- void operator()(Arg &&...) { }
-};
-
-template <typename...>
-struct FuncInfo;
-
-template <typename R, typename... A>
-struct FuncInfo<R(A...)>
-{
- using Return = R;
- using Args = std::tuple<A...>;
-};
-
-// clang-format off
-
-#define DEFINE_FUNC_IMPL(F, Vars, TypesWithVars, ReturnFunc) \
- using F##_ReturnType = FuncInfo<decltype(F)>::Return; \
- using q_##F##_Type = F##_ReturnType (*)(TypesWithVars(F)); \
- static q_##F##_Type q_##F = []() { \
- auto setter = [](QFunctionPointer ptr) { q_##F = (q_##F##_Type)ptr; }; \
- resolver()->registerSymbol(#F, setter); \
- return [](TypesWithVars(F)) { return ReturnFunc(Vars()); }; \
- }(); \
- extern "C" [[maybe_unused]] F##_ReturnType F(TypesWithVars(F)) { return q_##F(Vars()); }
-
-#define VAR(I) a##I
-#define VARS0()
-#define VARS1() VAR(0)
-#define VARS2() VARS1(), VAR(1)
-#define VARS3() VARS2(), VAR(2)
-#define VARS4() VARS3(), VAR(3)
-#define VARS5() VARS4(), VAR(4)
-#define VARS6() VARS5(), VAR(5)
-#define VARS7() VARS6(), VAR(6)
-#define VARS8() VARS7(), VAR(7)
-#define VARS9() VARS8(), VAR(8)
-#define VARS10() VARS9(), VAR(9)
-#define VARS11() VARS10(), VAR(10)
-
-#define TYPE_WITH_VAR(F, I) std::tuple_element_t<I, FuncInfo<decltype(F)>::Args> VAR(I)
-#define TYPES_WITH_VARS0(F)
-#define TYPES_WITH_VARS1(F) TYPE_WITH_VAR(F, 0)
-#define TYPES_WITH_VARS2(F) TYPES_WITH_VARS1(F), TYPE_WITH_VAR(F, 1)
-#define TYPES_WITH_VARS3(F) TYPES_WITH_VARS2(F), TYPE_WITH_VAR(F, 2)
-#define TYPES_WITH_VARS4(F) TYPES_WITH_VARS3(F), TYPE_WITH_VAR(F, 3)
-#define TYPES_WITH_VARS5(F) TYPES_WITH_VARS4(F), TYPE_WITH_VAR(F, 4)
-#define TYPES_WITH_VARS6(F) TYPES_WITH_VARS5(F), TYPE_WITH_VAR(F, 5)
-#define TYPES_WITH_VARS7(F) TYPES_WITH_VARS6(F), TYPE_WITH_VAR(F, 6)
-#define TYPES_WITH_VARS8(F) TYPES_WITH_VARS7(F), TYPE_WITH_VAR(F, 7)
-#define TYPES_WITH_VARS9(F) TYPES_WITH_VARS8(F), TYPE_WITH_VAR(F, 8)
-#define TYPES_WITH_VARS10(F) TYPES_WITH_VARS9(F), TYPE_WITH_VAR(F, 9)
-#define TYPES_WITH_VARS11(F) TYPES_WITH_VARS10(F), TYPE_WITH_VAR(F, 10)
-
-
-#define RET(F, ...) DefaultReturn<FuncInfo<decltype(F)>::Return>{__VA_ARGS__}
-
-#define DEFINE_FUNC(F, ArgsCount, /*Return value*/...) \
- DEFINE_FUNC_IMPL(F, VARS##ArgsCount, TYPES_WITH_VARS##ArgsCount, RET(F, __VA_ARGS__));
-
-// clang-format on
-
-#endif // QFFMPEGSYMBOLSRESOLVEUTILS_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp b/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp
index 5b79af5b3..3bc5a8c8a 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp
@@ -24,10 +24,10 @@ static bool isFrameFlipped(const AVFrame& frame) {
return false;
}
-static Q_LOGGING_CATEGORY(qLcFFmpegVideoBuffer, "qt.multimedia.ffmpeg.videobuffer");
+Q_STATIC_LOGGING_CATEGORY(qLcFFmpegVideoBuffer, "qt.multimedia.ffmpeg.videobuffer");
QFFmpegVideoBuffer::QFFmpegVideoBuffer(AVFrameUPtr frame, AVRational pixelAspectRatio)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle),
+ : QHwVideoBuffer(QVideoFrame::NoHandle),
m_frame(frame.get()),
m_size(qCalculateFrameSize({ frame->width, frame->height },
{ pixelAspectRatio.num, pixelAspectRatio.den }))
@@ -84,72 +84,17 @@ void QFFmpegVideoBuffer::setTextureConverter(const QFFmpeg::TextureConverter &co
QVideoFrameFormat::ColorSpace QFFmpegVideoBuffer::colorSpace() const
{
- switch (m_frame->colorspace) {
- default:
- case AVCOL_SPC_UNSPECIFIED:
- case AVCOL_SPC_RESERVED:
- case AVCOL_SPC_FCC:
- case AVCOL_SPC_SMPTE240M:
- case AVCOL_SPC_YCGCO:
- case AVCOL_SPC_SMPTE2085:
- case AVCOL_SPC_CHROMA_DERIVED_NCL:
- case AVCOL_SPC_CHROMA_DERIVED_CL:
- case AVCOL_SPC_ICTCP: // BT.2100 ICtCp
- return QVideoFrameFormat::ColorSpace_Undefined;
- case AVCOL_SPC_RGB:
- return QVideoFrameFormat::ColorSpace_AdobeRgb;
- case AVCOL_SPC_BT709:
- return QVideoFrameFormat::ColorSpace_BT709;
- case AVCOL_SPC_BT470BG: // BT601
- case AVCOL_SPC_SMPTE170M: // Also BT601
- return QVideoFrameFormat::ColorSpace_BT601;
- case AVCOL_SPC_BT2020_NCL: // Non constant luminence
- case AVCOL_SPC_BT2020_CL: // Constant luminence
- return QVideoFrameFormat::ColorSpace_BT2020;
- }
+ return QFFmpeg::fromAvColorSpace(m_frame->colorspace);
}
QVideoFrameFormat::ColorTransfer QFFmpegVideoBuffer::colorTransfer() const
{
- switch (m_frame->color_trc) {
- case AVCOL_TRC_BT709:
- // The following three cases have transfer characteristics identical to BT709
- case AVCOL_TRC_BT1361_ECG:
- case AVCOL_TRC_BT2020_10:
- case AVCOL_TRC_BT2020_12:
- case AVCOL_TRC_SMPTE240M: // almost identical to bt709
- return QVideoFrameFormat::ColorTransfer_BT709;
- case AVCOL_TRC_GAMMA22:
- case AVCOL_TRC_SMPTE428 : // No idea, let's hope for the best...
- case AVCOL_TRC_IEC61966_2_1: // sRGB, close enough to 2.2...
- case AVCOL_TRC_IEC61966_2_4: // not quite, but probably close enough
- return QVideoFrameFormat::ColorTransfer_Gamma22;
- case AVCOL_TRC_GAMMA28:
- return QVideoFrameFormat::ColorTransfer_Gamma28;
- case AVCOL_TRC_SMPTE170M:
- return QVideoFrameFormat::ColorTransfer_BT601;
- case AVCOL_TRC_LINEAR:
- return QVideoFrameFormat::ColorTransfer_Linear;
- case AVCOL_TRC_SMPTE2084:
- return QVideoFrameFormat::ColorTransfer_ST2084;
- case AVCOL_TRC_ARIB_STD_B67:
- return QVideoFrameFormat::ColorTransfer_STD_B67;
- default:
- break;
- }
- return QVideoFrameFormat::ColorTransfer_Unknown;
+ return QFFmpeg::fromAvColorTransfer(m_frame->color_trc);
}
QVideoFrameFormat::ColorRange QFFmpegVideoBuffer::colorRange() const
{
- switch (m_frame->color_range) {
- case AVCOL_RANGE_MPEG:
- return QVideoFrameFormat::ColorRange_Video;
- case AVCOL_RANGE_JPEG:
- return QVideoFrameFormat::ColorRange_Full;
- default:
- return QVideoFrameFormat::ColorRange_Unknown;
- }
+ return QFFmpeg::fromAvColorRange(m_frame->color_range);
}
float QFFmpegVideoBuffer::maxNits()
@@ -168,12 +113,7 @@ float QFFmpegVideoBuffer::maxNits()
return maxNits;
}
-QVideoFrame::MapMode QFFmpegVideoBuffer::mapMode() const
-{
- return m_mode;
-}
-
-QAbstractVideoBuffer::MapData QFFmpegVideoBuffer::map(QVideoFrame::MapMode mode)
+QAbstractVideoBuffer::MapData QFFmpegVideoBuffer::map(QtVideo::MapMode mode)
{
if (!m_swFrame) {
Q_ASSERT(m_hwFrame && m_hwFrame->hw_frames_ctx);
@@ -191,16 +131,16 @@ QAbstractVideoBuffer::MapData QFFmpegVideoBuffer::map(QVideoFrame::MapMode mode)
MapData mapData;
auto *desc = QVideoTextureHelper::textureDescription(pixelFormat());
- mapData.nPlanes = desc->nplanes;
- for (int i = 0; i < mapData.nPlanes; ++i) {
+ mapData.planeCount = desc->nplanes;
+ for (int i = 0; i < mapData.planeCount; ++i) {
Q_ASSERT(m_swFrame->linesize[i] >= 0);
mapData.data[i] = m_swFrame->data[i];
mapData.bytesPerLine[i] = m_swFrame->linesize[i];
- mapData.size[i] = mapData.bytesPerLine[i]*desc->heightForPlane(m_swFrame->height, i);
+ mapData.dataSize[i] = mapData.bytesPerLine[i]*desc->heightForPlane(m_swFrame->height, i);
}
- if ((mode & QVideoFrame::WriteOnly) != 0 && m_hwFrame) {
+ if ((mode & QtVideo::MapMode::WriteOnly) != QtVideo::MapMode::NotMapped && m_hwFrame) {
m_type = QVideoFrame::NoHandle;
m_hwFrame.reset();
if (m_textures) {
@@ -219,7 +159,7 @@ void QFFmpegVideoBuffer::unmap()
{
// nothing to do here for SW buffers.
// Set NotMapped mode to ensure map/unmap/mapMode consisteny.
- m_mode = QVideoFrame::NotMapped;
+ m_mode = QtVideo::MapMode::NotMapped;
}
std::unique_ptr<QVideoFrameTextures> QFFmpegVideoBuffer::mapTextures(QRhi *)
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h b/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h
index 18a580528..c61c3f5ff 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h
+++ b/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h
@@ -15,9 +15,7 @@
// We mean it.
//
-#include <private/qtmultimediaglobal_p.h>
-#include <private/qabstractvideobuffer_p.h>
-#include <qvideoframe.h>
+#include <private/qhwvideobuffer_p.h>
#include <QtCore/qvariant.h>
#include "qffmpeg_p.h"
@@ -25,7 +23,7 @@
QT_BEGIN_NAMESPACE
-class QFFmpegVideoBuffer : public QAbstractVideoBuffer
+class QFFmpegVideoBuffer : public QHwVideoBuffer
{
public:
using AVFrameUPtr = QFFmpeg::AVFrameUPtr;
@@ -33,8 +31,7 @@ public:
QFFmpegVideoBuffer(AVFrameUPtr frame, AVRational pixelAspectRatio = { 1, 1 });
~QFFmpegVideoBuffer() override;
- QVideoFrame::MapMode mapMode() const override;
- MapData map(QVideoFrame::MapMode mode) override;
+ MapData map(QtVideo::MapMode mode) override;
void unmap() override;
virtual std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *) override;
@@ -65,7 +62,7 @@ private:
AVFrameUPtr m_swFrame;
QSize m_size;
QFFmpeg::TextureConverter m_textureConverter;
- QVideoFrame::MapMode m_mode = QVideoFrame::NotMapped;
+ QtVideo::MapMode m_mode = QtVideo::MapMode::NotMapped;
std::unique_ptr<QFFmpeg::TextureSet> m_textures;
};
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp b/src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp
index ec99d8dd2..2f02f09c1 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp
@@ -2,6 +2,7 @@
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include <qffmpegvideosink_p.h>
#include <qffmpegvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
QT_BEGIN_NAMESPACE
@@ -21,7 +22,7 @@ void QFFmpegVideoSink::setRhi(QRhi *rhi)
void QFFmpegVideoSink::setVideoFrame(const QVideoFrame &frame)
{
- auto *buffer = dynamic_cast<QFFmpegVideoBuffer *>(frame.videoBuffer());
+ auto *buffer = dynamic_cast<QFFmpegVideoBuffer *>(QVideoFramePrivate::hwBuffer(frame));
if (buffer)
buffer->setTextureConverter(textureConverter);
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp.cpp b/src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp.cpp
index c8543b593..c139b942e 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp.cpp
@@ -3,7 +3,8 @@
#include "qffmpegwindowcapture_uwp_p.h"
#include "qffmpegsurfacecapturegrabber_p.h"
-#include <private/qabstractvideobuffer_p.h>
+#include "qabstractvideobuffer.h"
+#include <private/qvideoframe_p.h>
#include <unknwn.h>
#include <winrt/base.h>
@@ -82,21 +83,16 @@ struct MultithreadedApartment
class QUwpTextureVideoBuffer : public QAbstractVideoBuffer
{
public:
- QUwpTextureVideoBuffer(com_ptr<IDXGISurface> &&surface)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle), m_surface(surface)
- {
- }
+ QUwpTextureVideoBuffer(com_ptr<IDXGISurface> &&surface) : m_surface(surface) { }
- ~QUwpTextureVideoBuffer() override { QUwpTextureVideoBuffer::unmap(); }
+ ~QUwpTextureVideoBuffer() override { Q_ASSERT(m_mapMode == QtVideo::MapMode::NotMapped); }
- QVideoFrame::MapMode mapMode() const override { return m_mapMode; }
-
- MapData map(QVideoFrame::MapMode mode) override
+ MapData map(QtVideo::MapMode mode) override
{
- if (m_mapMode != QVideoFrame::NotMapped)
+ if (m_mapMode != QtVideo::MapMode::NotMapped)
return {};
- if (mode == QVideoFrame::ReadOnly) {
+ if (mode == QtVideo::MapMode::ReadOnly) {
DXGI_MAPPED_RECT rect = {};
HRESULT hr = m_surface->Map(&rect, DXGI_MAP_READ);
if (SUCCEEDED(hr)) {
@@ -104,12 +100,12 @@ public:
hr = m_surface->GetDesc(&desc);
MapData md = {};
- md.nPlanes = 1;
+ md.planeCount = 1;
md.bytesPerLine[0] = rect.Pitch;
md.data[0] = rect.pBits;
- md.size[0] = rect.Pitch * desc.Height;
+ md.dataSize[0] = rect.Pitch * desc.Height;
- m_mapMode = QVideoFrame::ReadOnly;
+ m_mapMode = QtVideo::MapMode::ReadOnly;
return md;
} else {
@@ -123,18 +119,20 @@ public:
void unmap() override
{
- if (m_mapMode == QVideoFrame::NotMapped)
+ if (m_mapMode == QtVideo::MapMode::NotMapped)
return;
const HRESULT hr = m_surface->Unmap();
if (FAILED(hr))
qCDebug(qLcWindowCaptureUwp) << "Failed to unmap surface" << errorString(hr);
- m_mapMode = QVideoFrame::NotMapped;
+ m_mapMode = QtVideo::MapMode::NotMapped;
}
+ QVideoFrameFormat format() const override { return {}; }
+
private:
- QVideoFrame::MapMode m_mapMode = QVideoFrame::NotMapped;
+ QtVideo::MapMode m_mapMode = QtVideo::MapMode::NotMapped;
com_ptr<IDXGISurface> m_surface;
};
@@ -306,7 +304,7 @@ public:
const qreal refreshRate = getMonitorRefreshRateHz(monitor);
- m_format.setFrameRate(refreshRate);
+ m_format.setStreamFrameRate(refreshRate);
setFrameRate(refreshRate);
addFrameCallback(capture, &QFFmpegWindowCaptureUwp::newVideoFrame);
@@ -354,7 +352,8 @@ protected:
m_format.setFrameSize(size);
- return QVideoFrame(new QUwpTextureVideoBuffer(std::move(texture)), m_format);
+ return QVideoFramePrivate::createFrame(
+ std::make_unique<QUwpTextureVideoBuffer>(std::move(texture)), m_format);
} catch (const winrt::hresult_error &err) {
diff --git a/src/plugins/multimedia/ffmpeg/qgdiwindowcapture.cpp b/src/plugins/multimedia/ffmpeg/qgdiwindowcapture.cpp
index 4188ed4c2..97742043c 100644
--- a/src/plugins/multimedia/ffmpeg/qgdiwindowcapture.cpp
+++ b/src/plugins/multimedia/ffmpeg/qgdiwindowcapture.cpp
@@ -7,6 +7,7 @@
#include "qffmpegsurfacecapturegrabber_p.h"
#include "private/qcapturablewindow_p.h"
#include "private/qmemoryvideobuffer_p.h"
+#include "private/qvideoframe_p.h"
#include <qt_windows.h>
#include <QtCore/qloggingcategory.h>
@@ -104,7 +105,7 @@ private:
}
QVideoFrameFormat format(size, QVideoFrameFormat::Format_BGRX8888);
- format.setFrameRate(frameRate());
+ format.setStreamFrameRate(frameRate());
m_format = format;
return true;
}
@@ -155,7 +156,8 @@ private:
return {};
}
- return QVideoFrame(new QMemoryVideoBuffer(array, bytesPerLine), m_format);
+ return QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(std::move(array), bytesPerLine), m_format);
}
private:
diff --git a/src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture.cpp b/src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture.cpp
index df16a8f56..4bd1f6a65 100644
--- a/src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture.cpp
+++ b/src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture.cpp
@@ -8,6 +8,7 @@
#include "private/qimagevideobuffer_p.h"
#include "private/qcapturablewindow_p.h"
+#include "private/qvideoframe_p.h"
#include "qscreen.h"
#include "qmutex.h"
@@ -136,7 +137,7 @@ private:
QVideoFrameFormat format(img.size(),
QVideoFrameFormat::pixelFormatFromImageFormat(img.format()));
- format.setFrameRate(screen->refreshRate());
+ format.setStreamFrameRate(screen->refreshRate());
updateFormat(format);
if (!format.isValid()) {
@@ -145,7 +146,7 @@ private:
return {};
}
- return QVideoFrame(buffer.release(), format);
+ return QVideoFramePrivate::createFrame(std::move(buffer), std::move(format));
}
private:
diff --git a/src/plugins/multimedia/ffmpeg/qopenglvideobuffer.cpp b/src/plugins/multimedia/ffmpeg/qopenglvideobuffer.cpp
index c3e739ffd..4ac08fd24 100644
--- a/src/plugins/multimedia/ffmpeg/qopenglvideobuffer.cpp
+++ b/src/plugins/multimedia/ffmpeg/qopenglvideobuffer.cpp
@@ -55,19 +55,14 @@ static bool setCurrentOpenGLContext()
}
QOpenGLVideoBuffer::QOpenGLVideoBuffer(std::unique_ptr<QOpenGLFramebufferObject> fbo)
- : QAbstractVideoBuffer(QVideoFrame::RhiTextureHandle), m_fbo(std::move(fbo))
+ : QHwVideoBuffer(QVideoFrame::RhiTextureHandle), m_fbo(std::move(fbo))
{
Q_ASSERT(m_fbo);
}
QOpenGLVideoBuffer::~QOpenGLVideoBuffer() { }
-QVideoFrame::MapMode QOpenGLVideoBuffer::mapMode() const
-{
- return m_imageBuffer ? m_imageBuffer->mapMode() : QVideoFrame::NotMapped;
-}
-
-QAbstractVideoBuffer::MapData QOpenGLVideoBuffer::map(QVideoFrame::MapMode mode)
+QAbstractVideoBuffer::MapData QOpenGLVideoBuffer::map(QtVideo::MapMode mode)
{
return ensureImageBuffer().map(mode);
}
diff --git a/src/plugins/multimedia/ffmpeg/qopenglvideobuffer_p.h b/src/plugins/multimedia/ffmpeg/qopenglvideobuffer_p.h
index bbbb2f2c7..6e62625d0 100644
--- a/src/plugins/multimedia/ffmpeg/qopenglvideobuffer_p.h
+++ b/src/plugins/multimedia/ffmpeg/qopenglvideobuffer_p.h
@@ -15,21 +15,20 @@
// We mean it.
//
-#include <private/qabstractvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
QT_BEGIN_NAMESPACE
class QImageVideoBuffer;
class QOpenGLFramebufferObject;
-class QOpenGLVideoBuffer : public QAbstractVideoBuffer
+class QOpenGLVideoBuffer : public QHwVideoBuffer
{
public:
QOpenGLVideoBuffer(std::unique_ptr<QOpenGLFramebufferObject> fbo);
~QOpenGLVideoBuffer();
- QVideoFrame::MapMode mapMode() const override;
- MapData map(QVideoFrame::MapMode mode) override;
+ MapData map(QtVideo::MapMode mode) override;
void unmap() override;
quint64 textureHandle(QRhi *, int plane) const override;
diff --git a/src/plugins/multimedia/ffmpeg/qv4l2camera.cpp b/src/plugins/multimedia/ffmpeg/qv4l2camera.cpp
index 2086af10d..f9f18296b 100644
--- a/src/plugins/multimedia/ffmpeg/qv4l2camera.cpp
+++ b/src/plugins/multimedia/ffmpeg/qv4l2camera.cpp
@@ -8,6 +8,7 @@
#include <private/qcameradevice_p.h>
#include <private/qmultimediautils_p.h>
#include <private/qmemoryvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
#include <private/qcore_unix_p.h>
#include <qsocketnotifier.h>
@@ -15,7 +16,7 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcV4L2Camera, "qt.multimedia.ffmpeg.v4l2camera");
+Q_STATIC_LOGGING_CATEGORY(qLcV4L2Camera, "qt.multimedia.ffmpeg.v4l2camera");
static const struct {
QVideoFrameFormat::PixelFormat fmt;
@@ -373,8 +374,8 @@ void QV4L2Camera::readFrame()
return;
}
- auto videoBuffer = new QMemoryVideoBuffer(buffer->data, m_bytesPerLine);
- QVideoFrame frame(videoBuffer, frameFormat());
+ auto videoBuffer = std::make_unique<QMemoryVideoBuffer>(buffer->data, m_bytesPerLine);
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(videoBuffer), frameFormat());
auto &v4l2Buffer = buffer->v4l2Buffer;
@@ -394,7 +395,7 @@ void QV4L2Camera::readFrame()
void QV4L2Camera::setCameraBusy()
{
m_cameraBusy = true;
- emit error(QCamera::CameraError, QLatin1String("Camera is in use"));
+ updateError(QCamera::CameraError, QLatin1String("Camera is in use"));
}
void QV4L2Camera::initV4L2Controls()
@@ -412,7 +413,7 @@ void QV4L2Camera::initV4L2Controls()
qCWarning(qLcV4L2Camera) << "Unable to open the camera" << deviceName
<< "for read to query the parameter info:"
<< qt_error_string(errno);
- emit error(QCamera::CameraError, QLatin1String("Cannot open camera"));
+ updateError(QCamera::CameraError, QLatin1String("Cannot open camera"));
return;
}
@@ -651,7 +652,7 @@ void QV4L2Camera::initV4L2MemoryTransfer()
if (!m_memoryTransfer) {
qCWarning(qLcV4L2Camera) << "Cannot init v4l2 memory transfer," << qt_error_string(errno);
- emit error(QCamera::CameraError, QLatin1String("Cannot init V4L2 memory transfer"));
+ updateError(QCamera::CameraError, QLatin1String("Cannot init V4L2 memory transfer"));
}
}
diff --git a/src/plugins/multimedia/ffmpeg/qv4l2cameradevices.cpp b/src/plugins/multimedia/ffmpeg/qv4l2cameradevices.cpp
index e450cf7bc..82a9658c7 100644
--- a/src/plugins/multimedia/ffmpeg/qv4l2cameradevices.cpp
+++ b/src/plugins/multimedia/ffmpeg/qv4l2cameradevices.cpp
@@ -17,7 +17,7 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcV4L2CameraDevices, "qt.multimedia.ffmpeg.v4l2cameradevices");
+Q_STATIC_LOGGING_CATEGORY(qLcV4L2CameraDevices, "qt.multimedia.ffmpeg.v4l2cameradevices");
static bool areCamerasEqual(QList<QCameraDevice> a, QList<QCameraDevice> b)
{
diff --git a/src/plugins/multimedia/ffmpeg/qv4l2memorytransfer.cpp b/src/plugins/multimedia/ffmpeg/qv4l2memorytransfer.cpp
index 32ee4f8f8..a2873235a 100644
--- a/src/plugins/multimedia/ffmpeg/qv4l2memorytransfer.cpp
+++ b/src/plugins/multimedia/ffmpeg/qv4l2memorytransfer.cpp
@@ -11,7 +11,7 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcV4L2MemoryTransfer, "qt.multimedia.ffmpeg.v4l2camera.memorytransfer");
+Q_STATIC_LOGGING_CATEGORY(qLcV4L2MemoryTransfer, "qt.multimedia.ffmpeg.v4l2camera.memorytransfer");
namespace {
diff --git a/src/plugins/multimedia/ffmpeg/qwindowscamera.cpp b/src/plugins/multimedia/ffmpeg/qwindowscamera.cpp
index 39aac3527..61a4ebe52 100644
--- a/src/plugins/multimedia/ffmpeg/qwindowscamera.cpp
+++ b/src/plugins/multimedia/ffmpeg/qwindowscamera.cpp
@@ -8,6 +8,7 @@
#include <private/qmemoryvideobuffer_p.h>
#include <private/qwindowsmfdefs_p.h>
#include <private/qwindowsmultimediautils_p.h>
+#include <private/qvideoframe_p.h>
#include <private/qcomobject_p.h>
#include <mfapi.h>
@@ -186,7 +187,7 @@ public:
{
if (FAILED(status)) {
const std::string msg{ std::system_category().message(status) };
- emit m_windowsCamera.error(QCamera::CameraError, QString::fromStdString(msg));
+ m_windowsCamera.updateError(QCamera::CameraError, QString::fromStdString(msg));
return;
}
@@ -198,7 +199,10 @@ public:
BYTE *buffer = nullptr;
if (SUCCEEDED(mediaBuffer->Lock(&buffer, nullptr, &bufLen))) {
QByteArray bytes(reinterpret_cast<char*>(buffer), qsizetype(bufLen));
- QVideoFrame frame(new QMemoryVideoBuffer(bytes, m_videoFrameStride), m_frameFormat);
+ auto buffer = std::make_unique<QMemoryVideoBuffer>(std::move(bytes),
+ m_videoFrameStride);
+ QVideoFrame frame =
+ QVideoFramePrivate::createFrame(std::move(buffer), m_frameFormat);
// WMF uses 100-nanosecond units, Qt uses microseconds
frame.setStartTime(timestamp / 10);
diff --git a/src/plugins/multimedia/ffmpeg/qx11surfacecapture.cpp b/src/plugins/multimedia/ffmpeg/qx11surfacecapture.cpp
index 1f04703f1..e1b236283 100644
--- a/src/plugins/multimedia/ffmpeg/qx11surfacecapture.cpp
+++ b/src/plugins/multimedia/ffmpeg/qx11surfacecapture.cpp
@@ -11,10 +11,10 @@
#include <qguiapplication.h>
#include <qloggingcategory.h>
-#include "private/qabstractvideobuffer_p.h"
#include "private/qcapturablewindow_p.h"
#include "private/qmemoryvideobuffer_p.h"
#include "private/qvideoframeconversionhelper_p.h"
+#include "private/qvideoframe_p.h"
#include <X11/Xlib.h>
#include <sys/shm.h>
@@ -26,7 +26,7 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcX11SurfaceCapture, "qt.multimedia.ffmpeg.qx11surfacecapture");
+Q_STATIC_LOGGING_CATEGORY(qLcX11SurfaceCapture, "qt.multimedia.ffmpeg.qx11surfacecapture");
namespace {
@@ -245,7 +245,7 @@ private:
}
QVideoFrameFormat format(QSize(m_xImage->width, m_xImage->height), pixelFormat);
- format.setFrameRate(frameRate());
+ format.setStreamFrameRate(frameRate());
m_format = format;
}
@@ -276,8 +276,8 @@ protected:
qCopyPixelsWithAlphaMask(pixelDst, pixelSrc, pixelCount, m_format.pixelFormat(),
xImageAlphaVaries);
- auto buffer = new QMemoryVideoBuffer(data, m_xImage->bytes_per_line);
- return QVideoFrame(buffer, m_format);
+ auto buffer = std::make_unique<QMemoryVideoBuffer>(data, m_xImage->bytes_per_line);
+ return QVideoFramePrivate::createFrame(std::move(buffer), m_format);
}
private:
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder.cpp
index 9948952e8..d8eaae58b 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder.cpp
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder.cpp
@@ -1,6 +1,7 @@
// Copyright (C) 2024 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include "qffmpegaudioencoder_p.h"
+#include "qffmpegrecordingengineutils_p.h"
#include "qffmpegaudioencoderutils_p.h"
#include "qffmpegaudioinput_p.h"
#include "qffmpegencoderoptions_p.h"
@@ -13,16 +14,15 @@ QT_BEGIN_NAMESPACE
namespace QFFmpeg {
-static Q_LOGGING_CATEGORY(qLcFFmpegAudioEncoder, "qt.multimedia.ffmpeg.audioencoder");
+Q_STATIC_LOGGING_CATEGORY(qLcFFmpegAudioEncoder, "qt.multimedia.ffmpeg.audioencoder");
-AudioEncoder::AudioEncoder(RecordingEngine &recordingEngine, QFFmpegAudioInput *input,
+AudioEncoder::AudioEncoder(RecordingEngine &recordingEngine, const QAudioFormat &sourceFormat,
const QMediaEncoderSettings &settings)
- : EncoderThread(recordingEngine), m_input(input), m_settings(settings)
+ : EncoderThread(recordingEngine), m_format(sourceFormat), m_settings(settings)
{
setObjectName(QLatin1String("AudioEncoder"));
qCDebug(qLcFFmpegAudioEncoder) << "AudioEncoder" << settings.audioCodec();
- m_format = input->device.preferredFormat();
auto codecID = QFFmpegMediaFormatInfo::codecIdForAudioCodec(settings.audioCodec());
Q_ASSERT(avformat_query_codec(recordingEngine.avFormatContext()->oformat, codecID,
FF_COMPLIANCE_NORMAL));
@@ -82,40 +82,56 @@ void AudioEncoder::open()
applyAudioEncoderOptions(m_settings, m_avCodec->name, m_codecContext.get(), opts);
applyExperimentalCodecOptions(m_avCodec, opts);
- int res = avcodec_open2(m_codecContext.get(), m_avCodec, opts);
+ const int res = avcodec_open2(m_codecContext.get(), m_avCodec, opts);
+
qCDebug(qLcFFmpegAudioEncoder) << "audio codec opened" << res;
qCDebug(qLcFFmpegAudioEncoder) << "audio codec params: fmt=" << m_codecContext->sample_fmt
<< "rate=" << m_codecContext->sample_rate;
- const AVAudioFormat requestedAudioFormat(m_format);
- const AVAudioFormat codecAudioFormat(m_codecContext.get());
-
- if (requestedAudioFormat != codecAudioFormat)
- m_resampler = createResampleContext(requestedAudioFormat, codecAudioFormat);
+ updateResampler();
}
void AudioEncoder::addBuffer(const QAudioBuffer &buffer)
{
- QMutexLocker locker = lockLoopData();
- if (!m_paused.loadRelaxed()) {
+ if (!buffer.isValid()) {
+ setEndOfSourceStream();
+ return;
+ }
+
+ {
+ const std::chrono::microseconds bufferDuration(buffer.duration());
+ auto guard = lockLoopData();
+
+ resetEndOfSourceStream();
+
+ if (m_paused)
+ return;
+
+ // TODO: apply logic with canPushFrame
+
m_audioBufferQueue.push(buffer);
- locker.unlock();
- dataReady();
+ m_queueDuration += bufferDuration;
}
+
+ dataReady();
}
QAudioBuffer AudioEncoder::takeBuffer()
{
- QMutexLocker locker = lockLoopData();
- return dequeueIfPossible(m_audioBufferQueue);
+ auto locker = lockLoopData();
+ QAudioBuffer result = dequeueIfPossible(m_audioBufferQueue);
+ m_queueDuration -= std::chrono::microseconds(result.duration());
+ return result;
}
void AudioEncoder::init()
{
open();
- if (m_input) {
- m_input->setFrameSize(m_codecContext->frame_size);
- }
+
+ // TODO: try to address this dependency here.
+ if (auto input = qobject_cast<QFFmpegAudioInput *>(source()))
+ input->setFrameSize(m_codecContext->frame_size);
+
qCDebug(qLcFFmpegAudioEncoder) << "AudioEncoder::init started audio device thread.";
}
@@ -123,6 +139,14 @@ void AudioEncoder::cleanup()
{
while (!m_audioBufferQueue.empty())
processOne();
+
+ if (m_avFrameSamplesOffset) {
+ // the size of the last frame can be less than m_codecContext->frame_size
+
+ retrievePackets();
+ sendPendingFrameToAVCodec();
+ }
+
while (avcodec_send_frame(m_codecContext.get(), nullptr) == AVERROR(EAGAIN))
retrievePackets();
retrievePackets();
@@ -159,61 +183,160 @@ void AudioEncoder::retrievePackets()
void AudioEncoder::processOne()
{
QAudioBuffer buffer = takeBuffer();
- if (!buffer.isValid())
- return;
+ Q_ASSERT(buffer.isValid());
+
+ // qCDebug(qLcFFmpegEncoder) << "new audio buffer" << buffer.byteCount() << buffer.format()
+ // << buffer.frameCount() << codec->frame_size;
if (buffer.format() != m_format) {
- // should we recreate recreate resampler here?
- qWarning() << "Get invalid audio format:" << buffer.format() << ", expected:" << m_format;
- return;
+ m_format = buffer.format();
+ updateResampler();
}
- // qCDebug(qLcFFmpegEncoder) << "new audio buffer" << buffer.byteCount() << buffer.format()
- // << buffer.frameCount() << codec->frame_size;
- retrievePackets();
+ int samplesOffset = 0;
+ const int bufferSamplesCount = static_cast<int>(buffer.frameCount());
+
+ while (samplesOffset < bufferSamplesCount)
+ handleAudioData(buffer.constData<uint8_t>(), samplesOffset, bufferSamplesCount);
+
+ Q_ASSERT(samplesOffset == bufferSamplesCount);
+}
+
+bool AudioEncoder::checkIfCanPushFrame() const
+{
+ if (isRunning())
+ return m_audioBufferQueue.size() <= 1 || m_queueDuration < m_maxQueueDuration;
+ if (!isFinished())
+ return m_audioBufferQueue.empty();
+
+ return false;
+}
+
+void AudioEncoder::updateResampler()
+{
+ m_resampler.reset();
+
+ const AVAudioFormat requestedAudioFormat(m_format);
+ const AVAudioFormat codecAudioFormat(m_codecContext.get());
+
+ if (requestedAudioFormat != codecAudioFormat)
+ m_resampler = createResampleContext(requestedAudioFormat, codecAudioFormat);
+
+ qCDebug(qLcFFmpegAudioEncoder)
+ << "Resampler updated. Input format:" << m_format << "Resampler:" << m_resampler.get();
+}
+
+void AudioEncoder::ensurePendingFrame(int availableSamplesCount)
+{
+ Q_ASSERT(availableSamplesCount >= 0);
- auto frame = makeAVFrame();
- frame->format = m_codecContext->sample_fmt;
+ if (m_avFrame)
+ return;
+
+ m_avFrame = makeAVFrame();
+
+ m_avFrame->format = m_codecContext->sample_fmt;
#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
- frame->channel_layout = m_codecContext->channel_layout;
- frame->channels = m_codecContext->channels;
+ m_avFrame->channel_layout = m_codecContext->channel_layout;
+ m_avFrame->channels = m_codecContext->channels;
#else
- frame->ch_layout = m_codecContext->ch_layout;
+ m_avFrame->ch_layout = m_codecContext->ch_layout;
#endif
- frame->sample_rate = m_codecContext->sample_rate;
- frame->nb_samples = buffer.frameCount();
- if (frame->nb_samples)
- av_frame_get_buffer(frame.get(), 0);
+ m_avFrame->sample_rate = m_codecContext->sample_rate;
- if (m_resampler) {
- const uint8_t *data = buffer.constData<uint8_t>();
- swr_convert(m_resampler.get(), frame->extended_data, frame->nb_samples, &data,
- frame->nb_samples);
- } else {
- memcpy(frame->buf[0]->data, buffer.constData<uint8_t>(), buffer.byteCount());
- }
+ const bool isFixedFrameSize = !(m_avCodec->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE)
+ && m_codecContext->frame_size;
+ m_avFrame->nb_samples = isFixedFrameSize ? m_codecContext->frame_size : availableSamplesCount;
+ if (m_avFrame->nb_samples)
+ av_frame_get_buffer(m_avFrame.get(), 0);
const auto &timeBase = m_stream->time_base;
const auto pts = timeBase.den && timeBase.num
? timeBase.den * m_samplesWritten / (m_codecContext->sample_rate * timeBase.num)
: m_samplesWritten;
- setAVFrameTime(*frame, pts, timeBase);
- m_samplesWritten += buffer.frameCount();
+ setAVFrameTime(*m_avFrame, pts, timeBase);
+}
+
+void AudioEncoder::writeDataToPendingFrame(const uchar *data, int &samplesOffset, int samplesCount)
+{
+ Q_ASSERT(m_avFrame);
+ Q_ASSERT(m_avFrameSamplesOffset <= m_avFrame->nb_samples);
+
+ const int bytesPerSample = av_get_bytes_per_sample(m_codecContext->sample_fmt);
+ const bool isPlanar = av_sample_fmt_is_planar(m_codecContext->sample_fmt);
+
+#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
+ const int channelsCount = m_codecContext->channels;
+#else
+ const int channelsCount = m_codecContext->ch_layout.nb_channels;
+#endif
+
+ const int audioDataOffset = isPlanar ? bytesPerSample * m_avFrameSamplesOffset
+ : bytesPerSample * m_avFrameSamplesOffset * channelsCount;
+
+ const int planesCount = isPlanar ? channelsCount : 1;
+ m_avFramePlanesData.resize(planesCount);
+ for (int plane = 0; plane < planesCount; ++plane)
+ m_avFramePlanesData[plane] = m_avFrame->extended_data[plane] + audioDataOffset;
- qint64 time = m_format.durationForFrames(m_samplesWritten);
+ const int samplesToRead =
+ std::min(m_avFrame->nb_samples - m_avFrameSamplesOffset, samplesCount - samplesOffset);
+
+ data += m_format.bytesForFrames(samplesOffset);
+
+ if (m_resampler) {
+ m_avFrameSamplesOffset += swr_convert(m_resampler.get(), m_avFramePlanesData.data(),
+ samplesToRead, &data, samplesToRead);
+ } else {
+ Q_ASSERT(planesCount == 1);
+ m_avFrameSamplesOffset += samplesToRead;
+ memcpy(m_avFramePlanesData[0], data, m_format.bytesForFrames(samplesToRead));
+ }
+
+ samplesOffset += samplesToRead;
+}
+
+void AudioEncoder::sendPendingFrameToAVCodec()
+{
+ Q_ASSERT(m_avFrame);
+ Q_ASSERT(m_avFrameSamplesOffset <= m_avFrame->nb_samples);
+
+ m_avFrame->nb_samples = m_avFrameSamplesOffset;
+
+ m_samplesWritten += m_avFrameSamplesOffset;
+
+ const qint64 time = m_format.durationForFrames(m_samplesWritten);
m_recordingEngine.newTimeStamp(time / 1000);
- // qCDebug(qLcFFmpegEncoder) << "sending audio frame" << buffer.byteCount() << frame->pts <<
- // ((double)buffer.frameCount()/frame->sample_rate);
+ // qCDebug(qLcFFmpegEncoder) << "sending audio frame" << buffer.byteCount() << frame->pts <<
+ // ((double)buffer.frameCount()/frame->sample_rate);
- int ret = avcodec_send_frame(m_codecContext.get(), frame.get());
+ int ret = avcodec_send_frame(m_codecContext.get(), m_avFrame.get());
if (ret < 0) {
- char errStr[1024];
- av_strerror(ret, errStr, 1024);
- // qCDebug(qLcFFmpegEncoder) << "error sending frame" << ret << errStr;
+ char errStr[AV_ERROR_MAX_STRING_SIZE];
+ av_strerror(ret, errStr, AV_ERROR_MAX_STRING_SIZE);
+ qCDebug(qLcFFmpegAudioEncoder) << "error sending frame" << ret << errStr;
}
+
+ m_avFrame = nullptr;
+ m_avFrameSamplesOffset = 0;
+ std::fill(m_avFramePlanesData.begin(), m_avFramePlanesData.end(), nullptr);
}
+void AudioEncoder::handleAudioData(const uchar *data, int &samplesOffset, int samplesCount)
+{
+ ensurePendingFrame(samplesCount - samplesOffset);
+
+ writeDataToPendingFrame(data, samplesOffset, samplesCount);
+
+ // The frame is not ready yet
+ if (m_avFrameSamplesOffset < m_avFrame->nb_samples)
+ return;
+
+ retrievePackets();
+
+ sendPendingFrameToAVCodec();
+}
} // namespace QFFmpeg
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder_p.h
index 16d8d81a1..4408ff54f 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder_p.h
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder_p.h
@@ -8,46 +8,65 @@
#include "private/qplatformmediarecorder_p.h"
#include <qaudiobuffer.h>
#include <queue>
+#include <chrono>
QT_BEGIN_NAMESPACE
class QMediaEncoderSettings;
-class QFFmpegAudioInput;
namespace QFFmpeg {
class AudioEncoder : public EncoderThread
{
public:
- AudioEncoder(RecordingEngine &recordingEngine, QFFmpegAudioInput *input,
+ AudioEncoder(RecordingEngine &recordingEngine, const QAudioFormat &sourceFormat,
const QMediaEncoderSettings &settings);
- void open();
void addBuffer(const QAudioBuffer &buffer);
- QFFmpegAudioInput *audioInput() const { return m_input; }
+protected:
+ bool checkIfCanPushFrame() const override;
private:
+ void open();
+
QAudioBuffer takeBuffer();
void retrievePackets();
+ void updateResampler();
void init() override;
void cleanup() override;
bool hasData() const override;
void processOne() override;
+ void handleAudioData(const uchar *data, int &samplesOffset, int samplesCount);
+
+ void ensurePendingFrame(int availableSamplesCount);
+
+ void writeDataToPendingFrame(const uchar *data, int &samplesOffset, int samplesCount);
+
+ void sendPendingFrameToAVCodec();
+
private:
std::queue<QAudioBuffer> m_audioBufferQueue;
+ // Arbitrarily chosen to limit audio queue duration
+ const std::chrono::microseconds m_maxQueueDuration = std::chrono::seconds(5);
+
+ std::chrono::microseconds m_queueDuration{ 0 };
+
AVStream *m_stream = nullptr;
AVCodecContextUPtr m_codecContext;
- QFFmpegAudioInput *m_input = nullptr;
QAudioFormat m_format;
SwrContextUPtr m_resampler;
qint64 m_samplesWritten = 0;
const AVCodec *m_avCodec = nullptr;
QMediaEncoderSettings m_settings;
+
+ AVFrameUPtr m_avFrame;
+ int m_avFrameSamplesOffset = 0;
+ std::vector<uint8_t *> m_avFramePlanesData;
};
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoderutils.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoderutils.cpp
index ea36a8138..4d4dc69d2 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoderutils.cpp
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoderutils.cpp
@@ -16,7 +16,7 @@ AVSampleFormat adjustSampleFormat(const AVSampleFormat *supportedFormats, AVSamp
: 0;
};
- const auto result = findBestAVFormat(supportedFormats, calcScore).first;
+ const auto result = findBestAVValue(supportedFormats, calcScore).first;
return result == AV_SAMPLE_FMT_NONE ? requested : result;
}
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread.cpp
index b673af450..61fe954c8 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread.cpp
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread.cpp
@@ -1,6 +1,7 @@
// Copyright (C) 2024 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include "qffmpegencoderthread_p.h"
+#include "qmetaobject.h"
QT_BEGIN_NAMESPACE
@@ -10,11 +11,30 @@ EncoderThread::EncoderThread(RecordingEngine &recordingEngine) : m_recordingEngi
{
}
-void EncoderThread::setPaused(bool b)
+void EncoderThread::setPaused(bool paused)
{
- m_paused.storeRelease(b);
+ auto guard = lockLoopData();
+ m_paused = paused;
+}
+
+void EncoderThread::setAutoStop(bool autoStop)
+{
+ auto guard = lockLoopData();
+ m_autoStop = autoStop;
+}
+
+void EncoderThread::setEndOfSourceStream()
+{
+ {
+ auto guard = lockLoopData();
+ m_endOfSourceStream = true;
+ }
+
+ emit endOfSourceStream();
}
} // namespace QFFmpeg
QT_END_NAMESPACE
+
+#include "moc_qffmpegencoderthread_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread_p.h
index 1fe35303b..f1f6b610a 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread_p.h
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread_p.h
@@ -4,6 +4,9 @@
#define QFFMPEGENCODERTHREAD_P_H
#include "qffmpegthread_p.h"
+#include "qpointer.h"
+
+#include "private/qmediainputencoderinterface_p.h"
QT_BEGIN_NAMESPACE
@@ -11,15 +14,55 @@ namespace QFFmpeg {
class RecordingEngine;
-class EncoderThread : public ConsumerThread
+class EncoderThread : public ConsumerThread, public QMediaInputEncoderInterface
{
+ Q_OBJECT
public:
EncoderThread(RecordingEngine &recordingEngine);
- virtual void setPaused(bool b);
+
+ void setPaused(bool paused);
+
+ void setAutoStop(bool autoStop);
+
+ void setSource(QObject *source) { m_source = source; }
+
+ QObject *source() const { return m_source; }
+
+ bool canPushFrame() const override { return m_canPushFrame.load(std::memory_order_relaxed); }
+
+ void setEndOfSourceStream();
+
+ bool isEndOfSourceStream() const { return m_endOfSourceStream; }
+
+protected:
+ void updateCanPushFrame();
+
+ virtual bool checkIfCanPushFrame() const = 0;
+
+ void resetEndOfSourceStream() { m_endOfSourceStream = false; }
+
+ auto lockLoopData()
+ {
+ return QScopeGuard([this, locker = ConsumerThread::lockLoopData()]() mutable {
+ const bool autoStopActivated = m_endOfSourceStream && m_autoStop;
+ const bool canPush = !autoStopActivated && !m_paused && checkIfCanPushFrame();
+ locker.unlock();
+ if (m_canPushFrame.exchange(canPush, std::memory_order_relaxed) != canPush)
+ emit canPushFrameChanged();
+ });
+ }
+
+Q_SIGNALS:
+ void canPushFrameChanged();
+ void endOfSourceStream();
protected:
- QAtomicInteger<bool> m_paused = false;
+ bool m_paused = false;
+ bool m_endOfSourceStream = false;
+ bool m_autoStop = false;
+ std::atomic_bool m_canPushFrame = false;
RecordingEngine &m_recordingEngine;
+ QPointer<QObject> m_source;
};
} // namespace QFFmpeg
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer.cpp
new file mode 100644
index 000000000..4f8c21bd5
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer.cpp
@@ -0,0 +1,165 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegencodinginitializer_p.h"
+#include "qffmpegrecordingengineutils_p.h"
+#include "qffmpegrecordingengine_p.h"
+#include "qffmpegaudioinput_p.h"
+#include "qvideoframe.h"
+
+#include "private/qplatformvideoframeinput_p.h"
+#include "private/qplatformaudiobufferinput_p.h"
+#include "private/qplatformaudiobufferinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+EncodingInitializer::EncodingInitializer(RecordingEngine &engine) : m_recordingEngine(engine) { }
+
+EncodingInitializer::~EncodingInitializer()
+{
+ for (QObject *source : m_pendingSources)
+ setEncoderInterface(source, nullptr);
+}
+
+void EncodingInitializer::start(const std::vector<QPlatformAudioBufferInputBase *> &audioSources,
+ const std::vector<QPlatformVideoSource *> &videoSources)
+{
+ for (auto source : audioSources) {
+ if (auto audioInput = qobject_cast<QFFmpegAudioInput *>(source))
+ m_recordingEngine.addAudioInput(audioInput);
+ else if (auto audioBufferInput = qobject_cast<QPlatformAudioBufferInput *>(source))
+ addAudioBufferInput(audioBufferInput);
+ else
+ Q_ASSERT(!"Undefined source type");
+ }
+
+ for (auto source : videoSources)
+ addVideoSource(source);
+
+ tryStartRecordingEngine();
+}
+
+void EncodingInitializer::addAudioBufferInput(QPlatformAudioBufferInput *input)
+{
+ Q_ASSERT(input);
+
+ if (input->audioFormat().isValid())
+ m_recordingEngine.addAudioBufferInput(input, {});
+ else
+ addPendingAudioBufferInput(input);
+}
+
+void EncodingInitializer::addPendingAudioBufferInput(QPlatformAudioBufferInput *input)
+{
+ addPendingSource(input);
+
+ connect(input, &QPlatformAudioBufferInput::destroyed, this, [this, input]() {
+ erasePendingSource(input, QStringLiteral("Audio source deleted"), true);
+ });
+
+ connect(input, &QPlatformAudioBufferInput::newAudioBuffer, this,
+ [this, input](const QAudioBuffer &buffer) {
+ if (buffer.isValid())
+ erasePendingSource(
+ input, [&]() { m_recordingEngine.addAudioBufferInput(input, buffer); });
+ else
+ erasePendingSource(input,
+ QStringLiteral("Audio source has sent the end frame"));
+ });
+}
+
+void EncodingInitializer::addVideoSource(QPlatformVideoSource *source)
+{
+ Q_ASSERT(source);
+ Q_ASSERT(source->isActive());
+
+ if (source->frameFormat().isValid())
+ m_recordingEngine.addVideoSource(source, {});
+ else if (source->hasError())
+ emitStreamInitializationError(QStringLiteral("Video source error: ")
+ + source->errorString());
+ else
+ addPendingVideoSource(source);
+}
+
+void EncodingInitializer::addPendingVideoSource(QPlatformVideoSource *source)
+{
+ addPendingSource(source);
+
+ connect(source, &QPlatformVideoSource::errorChanged, this, [this, source]() {
+ if (source->hasError())
+ erasePendingSource(source,
+ QStringLiteral("Videio source error: ") + source->errorString());
+ });
+
+ connect(source, &QPlatformVideoSource::destroyed, this, [this, source]() {
+ erasePendingSource(source, QStringLiteral("Source deleted"), true);
+ });
+
+ connect(source, &QPlatformVideoSource::activeChanged, this, [this, source]() {
+ if (!source->isActive())
+ erasePendingSource(source, QStringLiteral("Video source deactivated"));
+ });
+
+ connect(source, &QPlatformVideoSource::newVideoFrame, this,
+ [this, source](const QVideoFrame &frame) {
+ if (frame.isValid())
+ erasePendingSource(source,
+ [&]() { m_recordingEngine.addVideoSource(source, frame); });
+ else
+ erasePendingSource(source,
+ QStringLiteral("Video source has sent the end frame"));
+ });
+}
+
+void EncodingInitializer::tryStartRecordingEngine()
+{
+ if (m_pendingSources.empty())
+ m_recordingEngine.start();
+}
+
+void EncodingInitializer::emitStreamInitializationError(QString error)
+{
+ emit m_recordingEngine.streamInitializationError(
+ QMediaRecorder::ResourceError,
+ QStringLiteral("Video steam initialization error. ") + error);
+}
+
+void EncodingInitializer::addPendingSource(QObject *source)
+{
+ Q_ASSERT(m_pendingSources.count(source) == 0);
+
+ setEncoderInterface(source, this);
+ m_pendingSources.emplace(source);
+}
+
+template <typename F>
+void EncodingInitializer::erasePendingSource(QObject *source, F &&functionOrError, bool destroyed)
+{
+ const auto erasedCount = m_pendingSources.erase(source);
+ if (erasedCount == 0)
+ return; // got a queued event, just ignore it.
+
+ if (!destroyed) {
+ setEncoderInterface(source, nullptr);
+ disconnect(source, nullptr, this, nullptr);
+ }
+
+ if constexpr (std::is_invocable_v<F>)
+ functionOrError();
+ else
+ emitStreamInitializationError(functionOrError);
+
+ tryStartRecordingEngine();
+}
+
+bool EncodingInitializer::canPushFrame() const
+{
+ return true;
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer_p.h
new file mode 100644
index 000000000..e3bcb3428
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer_p.h
@@ -0,0 +1,77 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QENCODINGINITIALIZER_P_H
+#define QENCODINGINITIALIZER_P_H
+
+#include "qobject.h"
+#include "private/qmediainputencoderinterface_p.h"
+#include <unordered_set>
+#include <vector>
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+QT_BEGIN_NAMESPACE
+
+class QFFmpegAudioInput;
+class QPlatformVideoSource;
+class QPlatformAudioBufferInput;
+class QPlatformAudioBufferInputBase;
+class QMediaInputEncoderInterface;
+
+namespace QFFmpeg {
+
+class RecordingEngine;
+
+// Initializes RecordingEngine with audio and video sources, potentially lazily
+// upon first frame arrival if video frame format is not pre-determined.
+class EncodingInitializer : public QObject, private QMediaInputEncoderInterface
+{
+public:
+ EncodingInitializer(RecordingEngine &engine);
+
+ ~EncodingInitializer() override;
+
+ void start(const std::vector<QPlatformAudioBufferInputBase *> &audioSources,
+ const std::vector<QPlatformVideoSource *> &videoSources);
+
+private:
+ void addAudioBufferInput(QPlatformAudioBufferInput *input);
+
+ void addPendingAudioBufferInput(QPlatformAudioBufferInput *input);
+
+ void addVideoSource(QPlatformVideoSource *source);
+
+ void addPendingVideoSource(QPlatformVideoSource *source);
+
+ void addPendingSource(QObject *source);
+
+ void tryStartRecordingEngine();
+
+private:
+ void emitStreamInitializationError(QString error);
+
+ template <typename F>
+ void erasePendingSource(QObject *source, F &&functionOrError, bool destroyed = false);
+
+ bool canPushFrame() const override;
+
+private:
+ RecordingEngine &m_recordingEngine;
+ std::unordered_set<QObject *> m_pendingSources;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QENCODINGINITIALIZER_P_H
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer.cpp
index 6367dde3b..dbb96d00c 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer.cpp
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer.cpp
@@ -2,13 +2,14 @@
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include "qffmpegmuxer_p.h"
#include "qffmpegrecordingengine_p.h"
+#include "qffmpegrecordingengineutils_p.h"
#include <QtCore/qloggingcategory.h>
QT_BEGIN_NAMESPACE
namespace QFFmpeg {
-static Q_LOGGING_CATEGORY(qLcFFmpegMuxer, "qt.multimedia.ffmpeg.muxer");
+Q_STATIC_LOGGING_CATEGORY(qLcFFmpegMuxer, "qt.multimedia.ffmpeg.muxer");
Muxer::Muxer(RecordingEngine *encoder) : m_encoder(encoder)
{
@@ -18,7 +19,7 @@ Muxer::Muxer(RecordingEngine *encoder) : m_encoder(encoder)
void Muxer::addPacket(AVPacketUPtr packet)
{
{
- QMutexLocker locker(&m_queueMutex);
+ QMutexLocker locker = lockLoopData();
m_packetQueue.push(std::move(packet));
}
@@ -28,7 +29,7 @@ void Muxer::addPacket(AVPacketUPtr packet)
AVPacketUPtr Muxer::takePacket()
{
- QMutexLocker locker(&m_queueMutex);
+ QMutexLocker locker = lockLoopData();
return dequeueIfPossible(m_packetQueue);
}
@@ -37,11 +38,14 @@ void Muxer::init()
qCDebug(qLcFFmpegMuxer) << "Muxer::init started thread.";
}
-void Muxer::cleanup() { }
+void Muxer::cleanup()
+{
+ while (!m_packetQueue.empty())
+ processOne();
+}
bool QFFmpeg::Muxer::hasData() const
{
- QMutexLocker locker(&m_queueMutex);
return !m_packetQueue.empty();
}
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer_p.h
index 8cdf73c6f..4f8f4d27a 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer_p.h
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer_p.h
@@ -29,7 +29,6 @@ private:
void processOne() override;
private:
- mutable QMutex m_queueMutex;
std::queue<AVPacketUPtr> m_packetQueue;
RecordingEngine *m_encoder;
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine.cpp
index 2b32af502..469cd1c48 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine.cpp
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine.cpp
@@ -1,23 +1,25 @@
// Copyright (C) 2021 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include "qffmpegrecordingengine_p.h"
-#include "qffmpegmediaformatinfo_p.h"
-#include "qffmpegvideoframeencoder_p.h"
-#include "private/qmultimediautils_p.h"
-
-#include <qdebug.h>
+#include "qffmpegencodinginitializer_p.h"
#include "qffmpegaudioencoder_p.h"
#include "qffmpegaudioinput_p.h"
-#include <private/qplatformcamera_p.h>
-#include "qffmpegvideobuffer_p.h"
+#include "qffmpegrecordingengineutils_p.h"
+
+#include "private/qmultimediautils_p.h"
+#include "private/qplatformaudiobufferinput_p.h"
+#include "private/qplatformvideosource_p.h"
+#include "private/qplatformvideoframeinput_p.h"
+
+#include "qdebug.h"
#include "qffmpegvideoencoder_p.h"
#include "qffmpegmediametadata_p.h"
#include "qffmpegmuxer_p.h"
-#include <qloggingcategory.h>
+#include "qloggingcategory.h"
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcFFmpegEncoder, "qt.multimedia.ffmpeg.encoder");
+Q_STATIC_LOGGING_CATEGORY(qLcFFmpegEncoder, "qt.multimedia.ffmpeg.encoder");
namespace QFFmpeg
{
@@ -36,21 +38,71 @@ RecordingEngine::~RecordingEngine()
void RecordingEngine::addAudioInput(QFFmpegAudioInput *input)
{
- m_audioEncoder = new AudioEncoder(*this, input, m_settings);
- addMediaFrameHandler(input, &QFFmpegAudioInput::newAudioBuffer, m_audioEncoder,
- &AudioEncoder::addBuffer);
+ Q_ASSERT(input);
+
+ if (input->device.isNull()) {
+ emit streamInitializationError(QMediaRecorder::ResourceError,
+ QLatin1StringView("Audio device is null"));
+ return;
+ }
+
+ const QAudioFormat format = input->device.preferredFormat();
+
+ if (!format.isValid()) {
+ emit streamInitializationError(
+ QMediaRecorder::FormatError,
+ QLatin1StringView("Audio device has invalid preferred format"));
+ return;
+ }
+
+ AudioEncoder *audioEncoder = createAudioEncoder(format);
+ connectEncoderToSource(audioEncoder, input);
+
input->setRunning(true);
}
-void RecordingEngine::addVideoSource(QPlatformVideoSource * source)
+void RecordingEngine::addAudioBufferInput(QPlatformAudioBufferInput *input,
+ const QAudioBuffer &firstBuffer)
{
- auto frameFormat = source->frameFormat();
+ Q_ASSERT(input);
+ const QAudioFormat format = firstBuffer.isValid() ? firstBuffer.format() : input->audioFormat();
- if (!frameFormat.isValid()) {
- qCWarning(qLcFFmpegEncoder) << "Cannot add source; invalid vide frame format";
- emit error(QMediaRecorder::ResourceError,
- QLatin1StringView("Cannot get video source format"));
- return;
+ AudioEncoder *audioEncoder = createAudioEncoder(format);
+
+ // set the buffer before connecting to avoid potential races
+ if (firstBuffer.isValid())
+ audioEncoder->addBuffer(firstBuffer);
+
+ connectEncoderToSource(audioEncoder, input);
+}
+
+AudioEncoder *RecordingEngine::createAudioEncoder(const QAudioFormat &format)
+{
+ Q_ASSERT(format.isValid());
+
+ auto audioEncoder = new AudioEncoder(*this, format, m_settings);
+ m_audioEncoders.push_back(audioEncoder);
+ connect(audioEncoder, &EncoderThread::endOfSourceStream, this,
+ &RecordingEngine::handleSourceEndOfStream);
+ if (m_autoStop)
+ audioEncoder->setAutoStop(true);
+
+ return audioEncoder;
+}
+
+void RecordingEngine::addVideoSource(QPlatformVideoSource *source, const QVideoFrame &firstFrame)
+{
+ QVideoFrameFormat frameFormat =
+ firstFrame.isValid() ? firstFrame.surfaceFormat() : source->frameFormat();
+
+ Q_ASSERT(frameFormat.isValid());
+
+ if (firstFrame.isValid() && frameFormat.streamFrameRate() <= 0.f) {
+ const qint64 startTime = firstFrame.startTime();
+ const qint64 endTime = firstFrame.endTime();
+ if (startTime != -1 && endTime > startTime)
+ frameFormat.setStreamFrameRate(static_cast<qreal>(VideoFrameTimeBase)
+ / (endTime - startTime));
}
std::optional<AVPixelFormat> hwPixelFormat = source->ffmpegHWPixelFormat()
@@ -60,22 +112,42 @@ void RecordingEngine::addVideoSource(QPlatformVideoSource * source)
qCDebug(qLcFFmpegEncoder) << "adding video source" << source->metaObject()->className() << ":"
<< "pixelFormat=" << frameFormat.pixelFormat()
<< "frameSize=" << frameFormat.frameSize()
- << "frameRate=" << frameFormat.frameRate() << "ffmpegHWPixelFormat="
- << (hwPixelFormat ? *hwPixelFormat : AV_PIX_FMT_NONE);
+ << "frameRate=" << frameFormat.streamFrameRate()
+ << "ffmpegHWPixelFormat=" << (hwPixelFormat ? *hwPixelFormat : AV_PIX_FMT_NONE);
auto veUPtr = std::make_unique<VideoEncoder>(*this, m_settings, frameFormat, hwPixelFormat);
if (!veUPtr->isValid()) {
- emit error(QMediaRecorder::FormatError, QLatin1StringView("Cannot initialize encoder"));
+ emit streamInitializationError(QMediaRecorder::FormatError,
+ QLatin1StringView("Cannot initialize encoder"));
return;
}
- auto ve = veUPtr.release();
- addMediaFrameHandler(source, &QPlatformVideoSource::newVideoFrame, ve, &VideoEncoder::addFrame);
- m_videoEncoders.append(ve);
+ auto videoEncoder = veUPtr.release();
+ m_videoEncoders.append(videoEncoder);
+ if (m_autoStop)
+ videoEncoder->setAutoStop(true);
+
+ connect(videoEncoder, &EncoderThread::endOfSourceStream, this,
+ &RecordingEngine::handleSourceEndOfStream);
+
+ // set the frame before connecting to avoid potential races
+ if (firstFrame.isValid())
+ videoEncoder->addFrame(firstFrame);
+
+ connectEncoderToSource(videoEncoder, source);
}
void RecordingEngine::start()
{
+ Q_ASSERT(m_initializer);
+ m_initializer.reset();
+
+ if (m_audioEncoders.empty() && m_videoEncoders.empty()) {
+ emit sessionError(QMediaRecorder::ResourceError,
+ QLatin1StringView("No valid stream found for encoding"));
+ return;
+ }
+
qCDebug(qLcFFmpegEncoder) << "RecordingEngine::start!";
avFormatContext()->metadata = QFFmpegMetaData::toAVMetaData(m_metaData);
@@ -85,7 +157,8 @@ void RecordingEngine::start()
int res = avformat_write_header(avFormatContext(), nullptr);
if (res < 0) {
qWarning() << "could not write header, error:" << res << err2str(res);
- emit error(QMediaRecorder::ResourceError, "Cannot start writing the stream");
+ emit sessionError(QMediaRecorder::ResourceError,
+ QLatin1StringView("Cannot start writing the stream"));
return;
}
@@ -94,11 +167,17 @@ void RecordingEngine::start()
qCDebug(qLcFFmpegEncoder) << "stream header is successfully written";
m_muxer->start();
- if (m_audioEncoder)
- m_audioEncoder->start();
- for (auto *videoEncoder : m_videoEncoders)
- if (videoEncoder->isValid())
- videoEncoder->start();
+
+ forEachEncoder([](QThread *thread) { thread->start(); });
+}
+
+void RecordingEngine::initialize(const std::vector<QPlatformAudioBufferInputBase *> &audioSources,
+ const std::vector<QPlatformVideoSource *> &videoSources)
+{
+ qCDebug(qLcFFmpegEncoder) << ">>>>>>>>>>>>>>> initialize";
+
+ m_initializer = std::make_unique<EncodingInitializer>(*this);
+ m_initializer->start(audioSources, videoSources);
}
RecordingEngine::EncodingFinalizer::EncodingFinalizer(RecordingEngine &recordingEngine)
@@ -109,10 +188,7 @@ RecordingEngine::EncodingFinalizer::EncodingFinalizer(RecordingEngine &recording
void RecordingEngine::EncodingFinalizer::run()
{
- if (m_recordingEngine.m_audioEncoder)
- m_recordingEngine.m_audioEncoder->stopAndDelete();
- for (auto &videoEncoder : m_recordingEngine.m_videoEncoders)
- videoEncoder->stopAndDelete();
+ m_recordingEngine.forEachEncoder(&EncoderThread::stopAndDelete);
m_recordingEngine.m_muxer->stopAndDelete();
if (m_recordingEngine.m_isHeaderWritten) {
@@ -120,9 +196,9 @@ void RecordingEngine::EncodingFinalizer::run()
if (res < 0) {
const auto errorDescription = err2str(res);
qCWarning(qLcFFmpegEncoder) << "could not write trailer" << res << errorDescription;
- emit m_recordingEngine.error(QMediaRecorder::FormatError,
- QLatin1String("Cannot write trailer: ")
- + errorDescription);
+ emit m_recordingEngine.sessionError(QMediaRecorder::FormatError,
+ QLatin1String("Cannot write trailer: ")
+ + errorDescription);
}
}
// else ffmpeg might crash
@@ -140,19 +216,24 @@ void RecordingEngine::finalize()
{
qCDebug(qLcFFmpegEncoder) << ">>>>>>>>>>>>>>> finalize";
- for (auto &conn : m_connections)
- disconnect(conn);
+ m_initializer.reset();
+
+ forEachEncoder(&disconnectEncoderFromSource);
auto *finalizer = new EncodingFinalizer(*this);
finalizer->start();
}
-void RecordingEngine::setPaused(bool p)
+void RecordingEngine::setPaused(bool paused)
+{
+ forEachEncoder(&EncoderThread::setPaused, paused);
+}
+
+void RecordingEngine::setAutoStop(bool autoStop)
{
- if (m_audioEncoder)
- m_audioEncoder->setPaused(p);
- for (auto &videoEncoder : m_videoEncoders)
- videoEncoder->setPaused(p);
+ m_autoStop = autoStop;
+ forEachEncoder(&EncoderThread::setAutoStop, autoStop);
+ handleSourceEndOfStream();
}
void RecordingEngine::setMetaData(const QMediaMetaData &metaData)
@@ -169,11 +250,26 @@ void RecordingEngine::newTimeStamp(qint64 time)
}
}
-template<typename... Args>
-void RecordingEngine::addMediaFrameHandler(Args &&...args)
+bool RecordingEngine::isEndOfSourceStreams() const
+{
+ auto isAtEnd = [](EncoderThread *encoder) { return encoder->isEndOfSourceStream(); };
+ return std::all_of(m_videoEncoders.cbegin(), m_videoEncoders.cend(), isAtEnd)
+ && std::all_of(m_audioEncoders.cbegin(), m_audioEncoders.cend(), isAtEnd);
+}
+
+void RecordingEngine::handleSourceEndOfStream()
+{
+ if (m_autoStop && isEndOfSourceStreams())
+ emit autoStopped();
+}
+
+template <typename F, typename... Args>
+void RecordingEngine::forEachEncoder(F &&f, Args &&...args)
{
- auto connection = connect(std::forward<Args>(args)..., Qt::DirectConnection);
- m_connections.append(connection);
+ for (AudioEncoder *audioEncoder : m_audioEncoders)
+ std::invoke(f, audioEncoder, args...);
+ for (VideoEncoder *videoEncoder : m_videoEncoders)
+ std::invoke(f, videoEncoder, args...);
}
}
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine_p.h
index b74fbba9f..ce3aaa6bb 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine_p.h
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine_p.h
@@ -20,12 +20,13 @@
#include <private/qplatformmediarecorder_p.h>
#include <qmediarecorder.h>
-#include <queue>
-
QT_BEGIN_NAMESPACE
class QFFmpegAudioInput;
+class QPlatformAudioBufferInput;
+class QPlatformAudioBufferInputBase;
class QVideoFrame;
+class QAudioBuffer;
class QPlatformVideoSource;
namespace QFFmpeg
@@ -36,17 +37,7 @@ class Muxer;
class AudioEncoder;
class VideoEncoder;
class VideoFrameEncoder;
-
-template <typename T>
-T dequeueIfPossible(std::queue<T> &queue)
-{
- if (queue.empty())
- return T{};
-
- auto result = std::move(queue.front());
- queue.pop();
- return result;
-}
+class EncodingInitializer;
class RecordingEngine : public QObject
{
@@ -55,30 +46,33 @@ public:
RecordingEngine(const QMediaEncoderSettings &settings, std::unique_ptr<EncodingFormatContext> context);
~RecordingEngine();
- void addAudioInput(QFFmpegAudioInput *input);
- void addVideoSource(QPlatformVideoSource *source);
-
- void start();
+ void initialize(const std::vector<QPlatformAudioBufferInputBase *> &audioSources,
+ const std::vector<QPlatformVideoSource *> &videoSources);
void finalize();
void setPaused(bool p);
+ void setAutoStop(bool autoStop);
+
+ bool autoStop() const { return m_autoStop; }
+
void setMetaData(const QMediaMetaData &metaData);
AVFormatContext *avFormatContext() { return m_formatContext->avFormatContext(); }
Muxer *getMuxer() { return m_muxer; }
+ bool isEndOfSourceStreams() const;
+
public Q_SLOTS:
void newTimeStamp(qint64 time);
Q_SIGNALS:
void durationChanged(qint64 duration);
- void error(QMediaRecorder::Error code, const QString &description);
+ void sessionError(QMediaRecorder::Error code, const QString &description);
+ void streamInitializationError(QMediaRecorder::Error code, const QString &description);
void finalizationDone();
+ void autoStopped();
private:
- template<typename... Args>
- void addMediaFrameHandler(Args &&...args);
-
class EncodingFinalizer : public QThread
{
public:
@@ -90,20 +84,34 @@ private:
RecordingEngine &m_recordingEngine;
};
+ friend class EncodingInitializer;
+ void addAudioInput(QFFmpegAudioInput *input);
+ void addAudioBufferInput(QPlatformAudioBufferInput *input, const QAudioBuffer &firstBuffer);
+ AudioEncoder *createAudioEncoder(const QAudioFormat &format);
+
+ void addVideoSource(QPlatformVideoSource *source, const QVideoFrame &firstFrame);
+ void handleSourceEndOfStream();
+
+ void start();
+
+ template <typename F, typename... Args>
+ void forEachEncoder(F &&f, Args &&...args);
+
private:
QMediaEncoderSettings m_settings;
QMediaMetaData m_metaData;
std::unique_ptr<EncodingFormatContext> m_formatContext;
Muxer *m_muxer = nullptr;
- AudioEncoder *m_audioEncoder = nullptr;
+ QList<AudioEncoder *> m_audioEncoders;
QList<VideoEncoder *> m_videoEncoders;
- QList<QMetaObject::Connection> m_connections;
+ std::unique_ptr<EncodingInitializer> m_initializer;
QMutex m_timeMutex;
qint64 m_timeRecorded = 0;
bool m_isHeaderWritten = false;
+ bool m_autoStop = false;
};
}
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils.cpp
new file mode 100644
index 000000000..6c2ba8b15
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils.cpp
@@ -0,0 +1,63 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "recordingengine/qffmpegrecordingengineutils_p.h"
+#include "recordingengine/qffmpegencoderthread_p.h"
+#include "private/qplatformaudiobufferinput_p.h"
+#include "private/qplatformvideoframeinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+template <typename F>
+void doWithMediaFrameInput(QObject *source, F &&f)
+{
+ if (auto videoFrameInput = qobject_cast<QPlatformVideoFrameInput *>(source))
+ f(videoFrameInput);
+ else if (auto audioBufferInput = qobject_cast<QPlatformAudioBufferInput *>(source))
+ f(audioBufferInput);
+}
+
+void setEncoderInterface(QObject *source, QMediaInputEncoderInterface *interface)
+{
+ doWithMediaFrameInput(source, [&](auto source) {
+ using Source = std::remove_pointer_t<decltype(source)>;
+
+ source->setEncoderInterface(interface);
+ if (interface)
+ // Postpone emit 'encoderUpdated' as the encoding pipeline may be not
+ // completely ready at the moment. The case is calling QMediaRecorder::stop
+ // upon handling 'readyToSendFrame'
+ QMetaObject::invokeMethod(source, &Source::encoderUpdated, Qt::QueuedConnection);
+ else
+ emit source->encoderUpdated();
+ });
+}
+
+void setEncoderUpdateConnection(QObject *source, EncoderThread *encoder)
+{
+ doWithMediaFrameInput(source, [&](auto source) {
+ using Source = std::remove_pointer_t<decltype(source)>;
+ QObject::connect(encoder, &EncoderThread::canPushFrameChanged, source,
+ &Source::encoderUpdated);
+ });
+}
+
+void disconnectEncoderFromSource(EncoderThread *encoder)
+{
+ QObject *source = encoder->source();
+ if (!source)
+ return;
+
+ // We should address the dependency AudioEncoder from QFFmpegAudioInput to
+ // set null source here.
+ // encoder->setSource(nullptr);
+
+ QObject::disconnect(source, nullptr, encoder, nullptr);
+ setEncoderInterface(source, nullptr);
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils_p.h
new file mode 100644
index 000000000..a60f81696
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils_p.h
@@ -0,0 +1,81 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGRECORDINGENGINEUTILS_P_H
+#define QFFMPEGRECORDINGENGINEUTILS_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qobject.h"
+#include <queue>
+
+QT_BEGIN_NAMESPACE
+
+class QMediaInputEncoderInterface;
+class QPlatformVideoSource;
+
+namespace QFFmpeg {
+
+constexpr qint64 VideoFrameTimeBase = 1000000; // us in sec
+
+class EncoderThread;
+
+template <typename T>
+T dequeueIfPossible(std::queue<T> &queue)
+{
+ if (queue.empty())
+ return T{};
+
+ auto result = std::move(queue.front());
+ queue.pop();
+ return result;
+}
+
+void setEncoderInterface(QObject *source, QMediaInputEncoderInterface *interface);
+
+void setEncoderUpdateConnection(QObject *source, EncoderThread *encoder);
+
+template <typename Encoder, typename Source>
+void connectEncoderToSource(Encoder *encoder, Source *source)
+{
+ Q_ASSERT(!encoder->source());
+ encoder->setSource(source);
+
+ if constexpr (std::is_same_v<Source, QPlatformVideoSource>) {
+ QObject::connect(source, &Source::newVideoFrame, encoder, &Encoder::addFrame,
+ Qt::DirectConnection);
+
+ QObject::connect(source, &Source::activeChanged, encoder, [=]() {
+ if (!source->isActive())
+ encoder->setEndOfSourceStream();
+ });
+ } else {
+ QObject::connect(source, &Source::newAudioBuffer, encoder, &Encoder::addBuffer,
+ Qt::DirectConnection);
+ }
+
+ // TODO:
+ // QObject::connect(source, &Source::disconnectedFromSession, encoder, [=]() {
+ // encoder->setSourceEndOfStream();
+ // });
+
+ setEncoderUpdateConnection(source, encoder);
+ setEncoderInterface(source, encoder);
+}
+
+void disconnectEncoderFromSource(EncoderThread *encoder);
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGRECORDINGENGINEUTILS_P_H
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder.cpp
index a47968096..27706580b 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder.cpp
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder.cpp
@@ -5,13 +5,15 @@
#include "qffmpegvideobuffer_p.h"
#include "qffmpegrecordingengine_p.h"
#include "qffmpegvideoframeencoder_p.h"
+#include "qffmpegrecordingengineutils_p.h"
+#include "private/qvideoframe_p.h"
#include <QtCore/qloggingcategory.h>
QT_BEGIN_NAMESPACE
namespace QFFmpeg {
-static Q_LOGGING_CATEGORY(qLcFFmpegVideoEncoder, "qt.multimedia.ffmpeg.videoencoder");
+Q_STATIC_LOGGING_CATEGORY(qLcFFmpegVideoEncoder, "qt.multimedia.ffmpeg.videoencoder");
VideoEncoder::VideoEncoder(RecordingEngine &recordingEngine, const QMediaEncoderSettings &settings,
const QVideoFrameFormat &format, std::optional<AVPixelFormat> hwFormat)
@@ -19,10 +21,8 @@ VideoEncoder::VideoEncoder(RecordingEngine &recordingEngine, const QMediaEncoder
{
setObjectName(QLatin1String("VideoEncoder"));
- AVPixelFormat swFormat = QFFmpegVideoBuffer::toAVPixelFormat(format.pixelFormat());
- AVPixelFormat ffmpegPixelFormat =
- hwFormat && *hwFormat != AV_PIX_FMT_NONE ? *hwFormat : swFormat;
- auto frameRate = format.frameRate();
+ const AVPixelFormat swFormat = QFFmpegVideoBuffer::toAVPixelFormat(format.pixelFormat());
+ qreal frameRate = format.streamFrameRate();
if (frameRate <= 0.) {
qWarning() << "Invalid frameRate" << frameRate << "; Using the default instead";
@@ -30,9 +30,20 @@ VideoEncoder::VideoEncoder(RecordingEngine &recordingEngine, const QMediaEncoder
frameRate = 30.;
}
+ VideoFrameEncoder::SourceParams sourceParams;
+ sourceParams.size = format.frameSize();
+ sourceParams.format = hwFormat && *hwFormat != AV_PIX_FMT_NONE ? *hwFormat : swFormat;
+ sourceParams.swFormat = swFormat;
+ sourceParams.rotation = format.rotation();
+ sourceParams.xMirrored = format.isMirrored();
+ sourceParams.yMirrored = format.scanLineDirection() == QVideoFrameFormat::BottomToTop;
+ sourceParams.frameRate = frameRate;
+ sourceParams.colorTransfer = QFFmpeg::toAvColorTransfer(format.colorTransfer());
+ sourceParams.colorSpace = QFFmpeg::toAvColorSpace(format.colorSpace());
+ sourceParams.colorRange = QFFmpeg::toAvColorRange(format.colorRange());
+
m_frameEncoder =
- VideoFrameEncoder::create(settings, format.frameSize(), frameRate, ffmpegPixelFormat,
- swFormat, recordingEngine.avFormatContext());
+ VideoFrameEncoder::create(settings, sourceParams, recordingEngine.avFormatContext());
}
VideoEncoder::~VideoEncoder() = default;
@@ -44,25 +55,40 @@ bool VideoEncoder::isValid() const
void VideoEncoder::addFrame(const QVideoFrame &frame)
{
- QMutexLocker locker = lockLoopData();
+ if (!frame.isValid()) {
+ setEndOfSourceStream();
+ return;
+ }
- // Drop frames if encoder can not keep up with the video source data rate
- const bool queueFull = m_videoFrameQueue.size() >= m_maxQueueSize;
+ {
+ auto guard = lockLoopData();
- if (queueFull) {
- qCDebug(qLcFFmpegVideoEncoder) << "RecordingEngine frame queue full. Frame lost.";
- } else if (!m_paused.loadRelaxed()) {
- m_videoFrameQueue.push(frame);
+ resetEndOfSourceStream();
- locker.unlock(); // Avoid context switch on wake wake-up
+ if (m_paused) {
+ m_shouldAdjustTimeBaseForNextFrame = true;
+ return;
+ }
+
+ // Drop frames if encoder can not keep up with the video source data rate;
+ // canPushFrame might be used instead
+ const bool queueFull = m_videoFrameQueue.size() >= m_maxQueueSize;
+
+ if (queueFull) {
+ qCDebug(qLcFFmpegVideoEncoder) << "RecordingEngine frame queue full. Frame lost.";
+ return;
+ }
- dataReady();
+ m_videoFrameQueue.push({ frame, m_shouldAdjustTimeBaseForNextFrame });
+ m_shouldAdjustTimeBaseForNextFrame = false;
}
+
+ dataReady();
}
-QVideoFrame VideoEncoder::takeFrame()
+VideoEncoder::FrameInfo VideoEncoder::takeFrame()
{
- QMutexLocker locker = lockLoopData();
+ auto guard = lockLoopData();
return dequeueIfPossible(m_videoFrameQueue);
}
@@ -76,10 +102,13 @@ void VideoEncoder::retrievePackets()
void VideoEncoder::init()
{
+ Q_ASSERT(isValid());
+
qCDebug(qLcFFmpegVideoEncoder) << "VideoEncoder::init started video device thread.";
bool ok = m_frameEncoder->open();
if (!ok)
- emit m_recordingEngine.error(QMediaRecorder::ResourceError, "Could not initialize encoder");
+ emit m_recordingEngine.sessionError(QMediaRecorder::ResourceError,
+ "Could not initialize encoder");
}
void VideoEncoder::cleanup()
@@ -113,9 +142,9 @@ void VideoEncoder::processOne()
{
retrievePackets();
- auto frame = takeFrame();
- if (!frame.isValid())
- return;
+ FrameInfo frameInfo = takeFrame();
+ QVideoFrame &frame = frameInfo.frame;
+ Q_ASSERT(frame.isValid());
if (!isValid())
return;
@@ -124,7 +153,7 @@ void VideoEncoder::processOne()
AVFrameUPtr avFrame;
- auto *videoBuffer = dynamic_cast<QFFmpegVideoBuffer *>(frame.videoBuffer());
+ auto *videoBuffer = dynamic_cast<QFFmpegVideoBuffer *>(QVideoFramePrivate::hwBuffer(frame));
if (videoBuffer) {
// ffmpeg video buffer, let's use the native AVFrame stored in there
auto *hwFrame = videoBuffer->getHWFrame();
@@ -133,7 +162,7 @@ void VideoEncoder::processOne()
}
if (!avFrame) {
- frame.map(QVideoFrame::ReadOnly);
+ frame.map(QtVideo::MapMode::ReadOnly);
auto size = frame.size();
avFrame = makeAVFrame();
avFrame->format = m_frameEncoder->sourceFormat();
@@ -145,6 +174,15 @@ void VideoEncoder::processOne()
avFrame->linesize[i] = frame.bytesPerLine(i);
}
+ // TODO: investigate if we need to set color params to AVFrame.
+ // Setting only codec carameters might be sufficient.
+ // What happens if frame color params are set and not equal codec prms?
+ //
+ // QVideoFrameFormat format = frame.surfaceFormat();
+ // avFrame->color_trc = QFFmpeg::toAvColorTransfer(format.colorTransfer());
+ // avFrame->colorspace = QFFmpeg::toAvColorSpace(format.colorSpace());
+ // avFrame->color_range = QFFmpeg::toAvColorRange(format.colorRange());
+
QImage img;
if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg) {
// the QImage is cached inside the video frame, so we can take the pointer to the image
@@ -160,14 +198,16 @@ void VideoEncoder::processOne()
new QVideoFrameHolder{ frame, img }, 0);
}
- if (m_baseTime.loadAcquire() == std::numeric_limits<qint64>::min()) {
- m_baseTime.storeRelease(frame.startTime() - m_lastFrameTime);
- qCDebug(qLcFFmpegVideoEncoder) << ">>>> adjusting base time to" << m_baseTime.loadAcquire()
- << frame.startTime() << m_lastFrameTime;
+ const auto [startTime, endTime] = frameTimeStamps(frame);
+
+ if (frameInfo.shouldAdjustTimeBase) {
+ m_baseTime += startTime - m_lastFrameTime;
+ qCDebug(qLcFFmpegVideoEncoder)
+ << ">>>> adjusting base time to" << m_baseTime << startTime << m_lastFrameTime;
}
- qint64 time = frame.startTime() - m_baseTime.loadAcquire();
- m_lastFrameTime = frame.endTime() - m_baseTime.loadAcquire();
+ const qint64 time = startTime - m_baseTime;
+ m_lastFrameTime = endTime;
setAVFrameTime(*avFrame, m_frameEncoder->getPts(time), m_frameEncoder->getTimeBase());
@@ -178,10 +218,42 @@ void VideoEncoder::processOne()
int ret = m_frameEncoder->sendFrame(std::move(avFrame));
if (ret < 0) {
qCDebug(qLcFFmpegVideoEncoder) << "error sending frame" << ret << err2str(ret);
- emit m_recordingEngine.error(QMediaRecorder::ResourceError, err2str(ret));
+ emit m_recordingEngine.sessionError(QMediaRecorder::ResourceError, err2str(ret));
}
}
+bool VideoEncoder::checkIfCanPushFrame() const
+{
+ if (isRunning())
+ return m_videoFrameQueue.size() < m_maxQueueSize;
+ if (!isFinished())
+ return m_videoFrameQueue.empty();
+
+ return false;
+}
+
+std::pair<qint64, qint64> VideoEncoder::frameTimeStamps(const QVideoFrame &frame) const
+{
+ qint64 startTime = frame.startTime();
+ qint64 endTime = frame.endTime();
+
+ if (startTime == -1) {
+ startTime = m_lastFrameTime;
+ endTime = -1;
+ }
+
+ if (endTime == -1) {
+ qreal frameRate = frame.streamFrameRate();
+ if (frameRate <= 0.)
+ frameRate = m_frameEncoder->settings().videoFrameRate();
+
+ Q_ASSERT(frameRate > 0.f);
+ endTime = startTime + static_cast<qint64>(std::round(VideoFrameTimeBase / frameRate));
+ }
+
+ return { startTime, endTime };
+}
+
} // namespace QFFmpeg
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder_p.h
index 8f9a943de..ff6a74fc8 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder_p.h
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder_p.h
@@ -27,15 +27,17 @@ public:
void addFrame(const QVideoFrame &frame);
- void setPaused(bool b) override
- {
- EncoderThread::setPaused(b);
- if (b)
- m_baseTime.storeRelease(-1);
- }
+protected:
+ bool checkIfCanPushFrame() const override;
private:
- QVideoFrame takeFrame();
+ struct FrameInfo
+ {
+ QVideoFrame frame;
+ bool shouldAdjustTimeBase = false;
+ };
+
+ FrameInfo takeFrame();
void retrievePackets();
void init() override;
@@ -43,12 +45,15 @@ private:
bool hasData() const override;
void processOne() override;
+ std::pair<qint64, qint64> frameTimeStamps(const QVideoFrame &frame) const;
+
private:
- std::queue<QVideoFrame> m_videoFrameQueue;
+ std::queue<FrameInfo> m_videoFrameQueue;
const size_t m_maxQueueSize = 10; // Arbitrarily chosen to limit memory usage (332 MB @ 4K)
std::unique_ptr<VideoFrameEncoder> m_frameEncoder;
- QAtomicInteger<qint64> m_baseTime = std::numeric_limits<qint64>::min();
+ qint64 m_baseTime = 0;
+ bool m_shouldAdjustTimeBaseForNextFrame = true;
qint64 m_lastFrameTime = 0;
};
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils.cpp
index 83b9575b4..69073688b 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils.cpp
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils.cpp
@@ -82,12 +82,12 @@ AVPixelFormat findTargetSWFormat(AVPixelFormat sourceSWFormat, const AVCodec *co
const auto constraints = accel.constraints();
if (constraints && constraints->valid_sw_formats)
- return findBestAVFormat(constraints->valid_sw_formats, scoreCalculator).first;
+ return findBestAVValue(constraints->valid_sw_formats, scoreCalculator).first;
// Some codecs, e.g. mediacodec, don't expose constraints, let's find the format in
// codec->pix_fmts
if (codec->pix_fmts)
- return findBestAVFormat(codec->pix_fmts, scoreCalculator).first;
+ return findBestAVValue(codec->pix_fmts, scoreCalculator).first;
return AV_PIX_FMT_NONE;
}
@@ -108,8 +108,9 @@ AVPixelFormat findTargetFormat(AVPixelFormat sourceFormat, AVPixelFormat sourceS
if (constraints && hasAVFormat(constraints->valid_hw_formats, hwFormat))
return hwFormat;
- // Some codecs, don't expose constraints, let's find the format in codec->pix_fmts
- if (hasAVFormat(codec->pix_fmts, hwFormat))
+ // Some codecs, don't expose constraints,
+ // let's find the format in codec->pix_fmts and hw_config
+ if (isAVFormatSupported(codec, hwFormat))
return hwFormat;
}
@@ -120,7 +121,7 @@ AVPixelFormat findTargetFormat(AVPixelFormat sourceFormat, AVPixelFormat sourceS
}
auto swScoreCalculator = targetSwFormatScoreCalculator(sourceSWFormat);
- return findBestAVFormat(codec->pix_fmts, swScoreCalculator).first;
+ return findBestAVValue(codec->pix_fmts, swScoreCalculator).first;
}
std::pair<const AVCodec *, std::unique_ptr<HWAccel>> findHwEncoder(AVCodecID codecID,
@@ -153,7 +154,7 @@ const AVCodec *findSwEncoder(AVCodecID codecID, AVPixelFormat sourceSWFormat)
// codecs without pix_fmts are suspicious
return MinAVScore;
- return findBestAVFormat(codec->pix_fmts, formatScoreCalculator).second;
+ return findBestAVValue(codec->pix_fmts, formatScoreCalculator).second;
});
}
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder.cpp
index f5c63f3ae..ce2a1af28 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder.cpp
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder.cpp
@@ -6,38 +6,43 @@
#include "qffmpegencoderoptions_p.h"
#include "qffmpegvideoencoderutils_p.h"
#include <qloggingcategory.h>
+#include <QtMultimedia/private/qmaybe_p.h>
+
+extern "C" {
+#include "libavutil/display.h"
+}
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcVideoFrameEncoder, "qt.multimedia.ffmpeg.videoencoder");
+Q_STATIC_LOGGING_CATEGORY(qLcVideoFrameEncoder, "qt.multimedia.ffmpeg.videoencoder");
namespace QFFmpeg {
std::unique_ptr<VideoFrameEncoder>
-VideoFrameEncoder::create(const QMediaEncoderSettings &encoderSettings, const QSize &sourceSize,
- qreal sourceFrameRate, AVPixelFormat sourceFormat,
- AVPixelFormat sourceSWFormat, AVFormatContext *formatContext)
+VideoFrameEncoder::create(const QMediaEncoderSettings &encoderSettings,
+ const SourceParams &sourceParams, AVFormatContext *formatContext)
{
- Q_ASSERT(isSwPixelFormat(sourceSWFormat));
- Q_ASSERT(isHwPixelFormat(sourceFormat) || sourceSWFormat == sourceFormat);
+ Q_ASSERT(isSwPixelFormat(sourceParams.swFormat));
+ Q_ASSERT(isHwPixelFormat(sourceParams.format) || sourceParams.swFormat == sourceParams.format);
std::unique_ptr<VideoFrameEncoder> result(new VideoFrameEncoder);
result->m_settings = encoderSettings;
- result->m_sourceSize = sourceSize;
- result->m_sourceFormat = sourceFormat;
+ result->m_sourceSize = sourceParams.size;
+ result->m_sourceFormat = sourceParams.format;
// Temporary: check isSwPixelFormat because of android issue (QTBUG-116836)
- result->m_sourceSWFormat = isSwPixelFormat(sourceFormat) ? sourceFormat : sourceSWFormat;
+ result->m_sourceSWFormat =
+ isSwPixelFormat(sourceParams.format) ? sourceParams.format : sourceParams.swFormat;
if (!result->m_settings.videoResolution().isValid())
- result->m_settings.setVideoResolution(sourceSize);
+ result->m_settings.setVideoResolution(sourceParams.size);
if (result->m_settings.videoFrameRate() <= 0.)
- result->m_settings.setVideoFrameRate(sourceFrameRate);
+ result->m_settings.setVideoFrameRate(sourceParams.frameRate);
if (!result->initCodec() || !result->initTargetFormats()
- || !result->initCodecContext(formatContext)) {
+ || !result->initCodecContext(sourceParams, formatContext)) {
return nullptr;
}
@@ -134,7 +139,8 @@ bool VideoFrameEncoder::initTargetFormats()
VideoFrameEncoder::~VideoFrameEncoder() = default;
-bool QFFmpeg::VideoFrameEncoder::initCodecContext(AVFormatContext *formatContext)
+bool VideoFrameEncoder::initCodecContext(const SourceParams &sourceParams,
+ AVFormatContext *formatContext)
{
m_stream = avformat_new_stream(formatContext, nullptr);
m_stream->id = formatContext->nb_streams - 1;
@@ -153,6 +159,21 @@ bool QFFmpeg::VideoFrameEncoder::initCodecContext(AVFormatContext *formatContext
m_stream->codecpar->width = resolution.width();
m_stream->codecpar->height = resolution.height();
m_stream->codecpar->sample_aspect_ratio = AVRational{ 1, 1 };
+ m_stream->codecpar->color_trc = sourceParams.colorTransfer;
+ m_stream->codecpar->color_space = sourceParams.colorSpace;
+ m_stream->codecpar->color_range = sourceParams.colorRange;
+
+ if (sourceParams.rotation != QtVideo::Rotation::None || sourceParams.xMirrored
+ || sourceParams.yMirrored) {
+ constexpr auto displayMatrixSize = sizeof(int32_t) * 9;
+ AVPacketSideData sideData = { reinterpret_cast<uint8_t *>(av_malloc(displayMatrixSize)),
+ displayMatrixSize, AV_PKT_DATA_DISPLAYMATRIX };
+ int32_t *matrix = reinterpret_cast<int32_t *>(sideData.data);
+ av_display_rotation_set(matrix, static_cast<double>(sourceParams.rotation));
+ av_display_matrix_flip(matrix, sourceParams.xMirrored, sourceParams.yMirrored);
+
+ addStreamSideData(m_stream, sideData);
+ }
Q_ASSERT(m_codec);
@@ -216,92 +237,167 @@ const AVRational &VideoFrameEncoder::getTimeBase() const
return m_stream->time_base;
}
-int VideoFrameEncoder::sendFrame(AVFrameUPtr frame)
+namespace {
+struct FrameConverter
{
- if (!m_codecContext) {
- qWarning() << "codec context is not initialized!";
- return AVERROR(EINVAL);
- }
+ FrameConverter(AVFrameUPtr inputFrame) : m_inputFrame{ std::move(inputFrame) } { }
- if (!frame)
- return avcodec_send_frame(m_codecContext.get(), frame.get());
+ int downloadFromHw()
+ {
+ AVFrameUPtr cpuFrame = makeAVFrame();
- if (frame->format != m_sourceFormat) {
- qWarning() << "Frame format has changed:" << m_sourceFormat << "->" << frame->format;
- return AVERROR(EINVAL);
- }
-
- const QSize frameSize(frame->width, frame->height);
- if (frameSize != m_sourceSize) {
- qCDebug(qLcVideoFrameEncoder) << "Update conversions on the fly. Source size"
- << m_sourceSize << "->" << frameSize;
- m_sourceSize = frameSize;
- updateConversions();
- }
-
- int64_t pts = 0;
- AVRational timeBase = {};
- getAVFrameTime(*frame, pts, timeBase);
-
- if (m_downloadFromHW) {
- auto f = makeAVFrame();
-
- int err = av_hwframe_transfer_data(f.get(), frame.get(), 0);
+ int err = av_hwframe_transfer_data(cpuFrame.get(), currentFrame(), 0);
if (err < 0) {
- qCDebug(qLcVideoFrameEncoder) << "Error transferring frame data to surface." << err2str(err);
+ qCDebug(qLcVideoFrameEncoder)
+ << "Error transferring frame data to surface." << err2str(err);
return err;
}
- frame = std::move(f);
+ setFrame(std::move(cpuFrame));
+ return 0;
}
- if (m_converter) {
- auto f = makeAVFrame();
+ void convert(SwsContext *converter, AVPixelFormat format, const QSize &size)
+ {
+ AVFrameUPtr scaledFrame = makeAVFrame();
- f->format = m_targetSWFormat;
- f->width = m_settings.videoResolution().width();
- f->height = m_settings.videoResolution().height();
+ scaledFrame->format = format;
+ scaledFrame->width = size.width();
+ scaledFrame->height = size.height();
- av_frame_get_buffer(f.get(), 0);
- const auto scaledHeight = sws_scale(m_converter.get(), frame->data, frame->linesize, 0,
- frame->height, f->data, f->linesize);
+ av_frame_get_buffer(scaledFrame.get(), 0);
+ const auto scaledHeight =
+ sws_scale(converter, currentFrame()->data, currentFrame()->linesize, 0, currentFrame()->height,
+ scaledFrame->data, scaledFrame->linesize);
- if (scaledHeight != f->height)
- qCWarning(qLcVideoFrameEncoder) << "Scaled height" << scaledHeight << "!=" << f->height;
+ if (scaledHeight != scaledFrame->height)
+ qCWarning(qLcVideoFrameEncoder)
+ << "Scaled height" << scaledHeight << "!=" << scaledFrame->height;
- frame = std::move(f);
+ setFrame(std::move(scaledFrame));
}
- if (m_uploadToHW) {
- auto *hwFramesContext = m_accel->hwFramesContextAsBuffer();
+ int uploadToHw(HWAccel *accel)
+ {
+ auto *hwFramesContext = accel->hwFramesContextAsBuffer();
Q_ASSERT(hwFramesContext);
- auto f = makeAVFrame();
-
- if (!f)
+ AVFrameUPtr hwFrame = makeAVFrame();
+ if (!hwFrame)
return AVERROR(ENOMEM);
- int err = av_hwframe_get_buffer(hwFramesContext, f.get(), 0);
+
+ int err = av_hwframe_get_buffer(hwFramesContext, hwFrame.get(), 0);
if (err < 0) {
qCDebug(qLcVideoFrameEncoder) << "Error getting HW buffer" << err2str(err);
return err;
} else {
qCDebug(qLcVideoFrameEncoder) << "got HW buffer";
}
- if (!f->hw_frames_ctx) {
+ if (!hwFrame->hw_frames_ctx) {
qCDebug(qLcVideoFrameEncoder) << "no hw frames context";
return AVERROR(ENOMEM);
}
- err = av_hwframe_transfer_data(f.get(), frame.get(), 0);
+ err = av_hwframe_transfer_data(hwFrame.get(), currentFrame(), 0);
if (err < 0) {
- qCDebug(qLcVideoFrameEncoder) << "Error transferring frame data to surface." << err2str(err);
+ qCDebug(qLcVideoFrameEncoder)
+ << "Error transferring frame data to surface." << err2str(err);
return err;
}
- frame = std::move(f);
+
+ setFrame(std::move(hwFrame));
+
+ return 0;
+ }
+
+ QMaybe<AVFrameUPtr, int> takeResultFrame()
+ {
+ // Ensure that object is reset to empty state
+ AVFrameUPtr converted = std::move(m_convertedFrame);
+ AVFrameUPtr input = std::move(m_inputFrame);
+
+ if (!converted)
+ return input;
+
+ // Copy metadata except size and format from input frame
+ const int status = av_frame_copy_props(converted.get(), input.get());
+ if (status != 0)
+ return status;
+
+ return converted;
+ }
+
+private:
+ void setFrame(AVFrameUPtr frame) { m_convertedFrame = std::move(frame); }
+
+ AVFrame *currentFrame() const
+ {
+ if (m_convertedFrame)
+ return m_convertedFrame.get();
+ return m_inputFrame.get();
+ }
+
+ AVFrameUPtr m_inputFrame;
+ AVFrameUPtr m_convertedFrame;
+};
+}
+
+int VideoFrameEncoder::sendFrame(AVFrameUPtr inputFrame)
+{
+ if (!m_codecContext) {
+ qWarning() << "codec context is not initialized!";
+ return AVERROR(EINVAL);
+ }
+
+ if (!inputFrame)
+ return avcodec_send_frame(m_codecContext.get(), nullptr); // Flush
+
+ if (!updateSourceFormatAndSize(inputFrame.get()))
+ return AVERROR(EINVAL);
+
+ FrameConverter converter{ std::move(inputFrame) };
+
+ if (m_downloadFromHW) {
+ const int status = converter.downloadFromHw();
+ if (status != 0)
+ return status;
}
+ if (m_converter)
+ converter.convert(m_converter.get(), m_targetSWFormat, m_settings.videoResolution());
+
+ if (m_uploadToHW) {
+ const int status = converter.uploadToHw(m_accel.get());
+ if (status != 0)
+ return status;
+ }
+
+ const QMaybe<AVFrameUPtr, int> resultFrame = converter.takeResultFrame();
+ if (!resultFrame)
+ return resultFrame.error();
+
+ AVRational timeBase{};
+ int64_t pts{};
+ getAVFrameTime(*resultFrame.value(), pts, timeBase);
qCDebug(qLcVideoFrameEncoder) << "sending frame" << pts << "*" << timeBase;
- setAVFrameTime(*frame, pts, timeBase);
- return avcodec_send_frame(m_codecContext.get(), frame.get());
+ return avcodec_send_frame(m_codecContext.get(), resultFrame.value().get());
+}
+
+qint64 VideoFrameEncoder::estimateDuration(const AVPacket &packet, bool isFirstPacket)
+{
+ qint64 duration = 0; // In stream units, multiply by time_base to get seconds
+
+ if (isFirstPacket) {
+ // First packet - Estimate duration from frame rate. Duration must
+ // be set for single-frame videos, otherwise they won't open in
+ // media player.
+ const AVRational frameDuration = av_inv_q(m_codecContext->framerate);
+ duration = av_rescale_q(1, frameDuration, m_stream->time_base);
+ } else {
+ // Duration is calculated from actual packet times. TODO: Handle discontinuities
+ duration = packet.pts - m_lastPacketTime;
+ }
+
+ return duration;
}
AVPacketUPtr VideoFrameEncoder::retrievePacket()
@@ -323,6 +419,14 @@ AVPacketUPtr VideoFrameEncoder::retrievePacket()
<< "got a packet" << packet->pts << packet->dts << (ts ? *ts : 0);
packet->stream_index = m_stream->id;
+
+ if (packet->duration == 0) {
+ const bool firstFrame = m_lastPacketTime == AV_NOPTS_VALUE;
+ packet->duration = estimateDuration(*packet, firstFrame);
+ }
+
+ m_lastPacketTime = packet->pts;
+
return packet;
};
@@ -359,6 +463,44 @@ AVPacketUPtr VideoFrameEncoder::retrievePacket()
return nullptr;
}
+bool VideoFrameEncoder::updateSourceFormatAndSize(const AVFrame *frame)
+{
+ Q_ASSERT(frame);
+
+ const QSize frameSize(frame->width, frame->height);
+ const AVPixelFormat frameFormat = static_cast<AVPixelFormat>(frame->format);
+
+ if (frameSize == m_sourceSize && frameFormat == m_sourceFormat)
+ return true;
+
+ auto applySourceFormatAndSize = [&](AVPixelFormat swFormat) {
+ m_sourceSize = frameSize;
+ m_sourceFormat = frameFormat;
+ m_sourceSWFormat = swFormat;
+ updateConversions();
+ return true;
+ };
+
+ if (frameFormat == m_sourceFormat)
+ return applySourceFormatAndSize(m_sourceSWFormat);
+
+ if (frameFormat == AV_PIX_FMT_NONE) {
+ qWarning() << "Got a frame with invalid pixel format";
+ return false;
+ }
+
+ if (isSwPixelFormat(frameFormat))
+ return applySourceFormatAndSize(frameFormat);
+
+ auto framesCtx = reinterpret_cast<const AVHWFramesContext *>(frame->hw_frames_ctx->data);
+ if (!framesCtx || framesCtx->sw_format == AV_PIX_FMT_NONE) {
+ qWarning() << "Cannot update conversions as hw frame has invalid framesCtx" << framesCtx;
+ return false;
+ }
+
+ return applySourceFormatAndSize(framesCtx->sw_format);
+}
+
void VideoFrameEncoder::updateConversions()
{
const bool needToScale = m_sourceSize != m_settings.videoResolution();
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder_p.h
index b44e9cbf7..731789926 100644
--- a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder_p.h
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder_p.h
@@ -26,10 +26,21 @@ namespace QFFmpeg {
class VideoFrameEncoder
{
public:
+ struct SourceParams
+ {
+ QSize size;
+ AVPixelFormat format = AV_PIX_FMT_NONE;
+ AVPixelFormat swFormat = AV_PIX_FMT_NONE;
+ QtVideo::Rotation rotation = QtVideo::Rotation::None;
+ bool xMirrored = false;
+ bool yMirrored = false;
+ qreal frameRate = 0.;
+ AVColorTransferCharacteristic colorTransfer = AVCOL_TRC_UNSPECIFIED;
+ AVColorSpace colorSpace = AVCOL_SPC_UNSPECIFIED;
+ AVColorRange colorRange = AVCOL_RANGE_UNSPECIFIED;
+ };
static std::unique_ptr<VideoFrameEncoder> create(const QMediaEncoderSettings &encoderSettings,
- const QSize &sourceSize, qreal sourceFrameRate,
- AVPixelFormat sourceFormat,
- AVPixelFormat sourceSWFormat,
+ const SourceParams &sourceParams,
AVFormatContext *formatContext);
~VideoFrameEncoder();
@@ -43,19 +54,25 @@ public:
const AVRational &getTimeBase() const;
- int sendFrame(AVFrameUPtr frame);
+ int sendFrame(AVFrameUPtr inputFrame);
AVPacketUPtr retrievePacket();
+ const QMediaEncoderSettings &settings() { return m_settings; }
+
private:
VideoFrameEncoder() = default;
+ bool updateSourceFormatAndSize(const AVFrame *frame);
+
void updateConversions();
bool initCodec();
bool initTargetFormats();
- bool initCodecContext(AVFormatContext *formatContext);
+ bool initCodecContext(const SourceParams &sourceParams, AVFormatContext *formatContext);
+
+ qint64 estimateDuration(const AVPacket &packet, bool isFirstPacket);
private:
QMediaEncoderSettings m_settings;
@@ -64,6 +81,7 @@ private:
std::unique_ptr<HWAccel> m_accel;
const AVCodec *m_codec = nullptr;
AVStream *m_stream = nullptr;
+ qint64 m_lastPacketTime = AV_NOPTS_VALUE;
AVCodecContextUPtr m_codecContext;
std::unique_ptr<SwsContext, decltype(&sws_freeContext)> m_converter = { nullptr,
&sws_freeContext };
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/openssl3.ver b/src/plugins/multimedia/ffmpeg/symbolstubs/openssl3.ver
new file mode 100644
index 000000000..88235a94c
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/openssl3.ver
@@ -0,0 +1,7 @@
+# Copyright (C) 2024 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+OPENSSL_3.0.0 {
+ global:
+ *;
+};
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-crypto.cpp b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-crypto.cpp
new file mode 100644
index 000000000..fbf3b783c
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-crypto.cpp
@@ -0,0 +1,6 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtMultimedia/private/qsymbolsresolveutils_p.h>
+
+// No stub functions are needed for ffmpeg
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-ssl.cpp b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-ssl.cpp
new file mode 100644
index 000000000..3e38e398c
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-ssl.cpp
@@ -0,0 +1,300 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtMultimedia/private/qsymbolsresolveutils_p.h>
+
+#include <qstringliteral.h>
+
+#include <openssl/bio.h>
+#include <openssl/ssl.h>
+#include <openssl/bn.h>
+#include <openssl/err.h>
+#include <openssl/rand.h>
+
+using namespace Qt::StringLiterals;
+
+[[maybe_unused]] static constexpr auto SHLIB_VERSION =
+#if defined(OPENSSL_SHLIB_VERSION)
+ OPENSSL_SHLIB_VERSION;
+#elif defined(SHLIB_VERSION_NUMBER)
+ SHLIB_VERSION_NUMBER;
+#endif
+
+
+#if !defined(Q_OS_ANDROID)
+CHECK_VERSIONS("ssl", SSL_NEEDED_SOVERSION, SHLIB_VERSION);
+#endif
+
+static std::unique_ptr<QLibrary> loadLib()
+{
+ auto lib = std::make_unique<QLibrary>();
+
+ auto tryLoad = [&](QString sslName, auto version) {
+ lib->setFileNameAndVersion(sslName, version);
+ return lib->load();
+ };
+
+// openssl on Android has specific suffixes
+#if defined(Q_OS_ANDROID)
+ {
+ auto suffix = qEnvironmentVariable("ANDROID_OPENSSL_SUFFIX");
+ if (suffix.isEmpty()) {
+#if (OPENSSL_VERSION_NUMBER >> 28) < 3 // major version < 3
+ suffix = "_1_1"_L1;
+#elif OPENSSL_VERSION_MAJOR == 3
+ suffix = "_3"_L1;
+#else
+ static_assert(false, "Unexpected openssl version");
+#endif
+ }
+
+ if (tryLoad("ssl"_L1 + suffix, -1))
+ return lib;
+ }
+#endif
+
+ if (tryLoad("ssl"_L1, SSL_NEEDED_SOVERSION ""_L1))
+ return lib;
+
+ return {};
+};
+
+
+BEGIN_INIT_FUNCS("ssl", loadLib)
+
+// BN functions
+
+INIT_FUNC(BN_value_one);
+INIT_FUNC(BN_mod_word);
+
+INIT_FUNC(BN_div_word)
+INIT_FUNC(BN_mul_word)
+INIT_FUNC(BN_add_word)
+INIT_FUNC(BN_sub_word)
+INIT_FUNC(BN_set_word)
+INIT_FUNC(BN_new)
+INIT_FUNC(BN_cmp)
+
+INIT_FUNC(BN_free);
+
+INIT_FUNC(BN_copy);
+
+INIT_FUNC(BN_CTX_new);
+
+INIT_FUNC(BN_CTX_free);
+INIT_FUNC(BN_CTX_start);
+
+INIT_FUNC(BN_CTX_get);
+INIT_FUNC(BN_CTX_end);
+
+INIT_FUNC(BN_rand);
+INIT_FUNC(BN_mod_exp);
+
+INIT_FUNC(BN_num_bits);
+INIT_FUNC(BN_num_bits_word);
+
+INIT_FUNC(BN_bn2hex);
+INIT_FUNC(BN_bn2dec);
+
+INIT_FUNC(BN_hex2bn);
+INIT_FUNC(BN_dec2bn);
+INIT_FUNC(BN_asc2bn);
+
+INIT_FUNC(BN_bn2bin);
+INIT_FUNC(BN_bin2bn);
+
+// BIO-related functions
+
+INIT_FUNC(BIO_new);
+INIT_FUNC(BIO_free);
+
+INIT_FUNC(BIO_read);
+INIT_FUNC(BIO_write);
+INIT_FUNC(BIO_s_mem);
+
+INIT_FUNC(BIO_set_data);
+
+INIT_FUNC(BIO_get_data);
+INIT_FUNC(BIO_set_init);
+
+INIT_FUNC(BIO_set_flags);
+INIT_FUNC(BIO_test_flags);
+INIT_FUNC(BIO_clear_flags);
+
+INIT_FUNC(BIO_meth_new);
+INIT_FUNC(BIO_meth_free);
+
+INIT_FUNC(BIO_meth_set_write);
+INIT_FUNC(BIO_meth_set_read);
+INIT_FUNC(BIO_meth_set_puts);
+INIT_FUNC(BIO_meth_set_gets);
+INIT_FUNC(BIO_meth_set_ctrl);
+INIT_FUNC(BIO_meth_set_create);
+INIT_FUNC(BIO_meth_set_destroy);
+INIT_FUNC(BIO_meth_set_callback_ctrl);
+
+// SSL functions
+
+INIT_FUNC(SSL_CTX_new);
+INIT_FUNC(SSL_CTX_up_ref);
+INIT_FUNC(SSL_CTX_free);
+
+INIT_FUNC(SSL_new);
+INIT_FUNC(SSL_up_ref);
+INIT_FUNC(SSL_free);
+
+INIT_FUNC(SSL_accept);
+INIT_FUNC(SSL_stateless);
+INIT_FUNC(SSL_connect);
+INIT_FUNC(SSL_read);
+INIT_FUNC(SSL_peek);
+INIT_FUNC(SSL_write);
+INIT_FUNC(SSL_ctrl);
+INIT_FUNC(SSL_shutdown);
+INIT_FUNC(SSL_set_bio);
+
+// options are unsigned long in openssl 1.1.1, and uint64 in 3.x.x
+INIT_FUNC(SSL_CTX_set_options);
+
+INIT_FUNC(SSL_get_error);
+INIT_FUNC(SSL_CTX_load_verify_locations);
+
+INIT_FUNC(SSL_CTX_set_verify);
+INIT_FUNC(SSL_CTX_use_PrivateKey);
+
+INIT_FUNC(SSL_CTX_use_PrivateKey_file);
+INIT_FUNC(SSL_CTX_use_certificate_chain_file);
+
+INIT_FUNC(ERR_get_error);
+
+INIT_FUNC(ERR_error_string);
+
+// TLS functions
+
+INIT_FUNC(TLS_client_method);
+INIT_FUNC(TLS_server_method);
+
+// RAND functions
+
+INIT_FUNC(RAND_bytes);
+
+END_INIT_FUNCS()
+
+//////////// Define
+
+// BN functions
+
+DEFINE_FUNC(BN_value_one, 0);
+DEFINE_FUNC(BN_mod_word, 2);
+
+DEFINE_FUNC(BN_div_word, 2)
+DEFINE_FUNC(BN_mul_word, 2)
+DEFINE_FUNC(BN_add_word, 2)
+DEFINE_FUNC(BN_sub_word, 2)
+DEFINE_FUNC(BN_set_word, 2)
+DEFINE_FUNC(BN_new, 0)
+DEFINE_FUNC(BN_cmp, 2)
+
+DEFINE_FUNC(BN_free, 1);
+
+DEFINE_FUNC(BN_copy, 2);
+
+DEFINE_FUNC(BN_CTX_new, 0);
+
+DEFINE_FUNC(BN_CTX_free, 1);
+DEFINE_FUNC(BN_CTX_start, 1);
+
+DEFINE_FUNC(BN_CTX_get, 1);
+DEFINE_FUNC(BN_CTX_end, 1);
+
+DEFINE_FUNC(BN_rand, 4);
+DEFINE_FUNC(BN_mod_exp, 5);
+
+DEFINE_FUNC(BN_num_bits, 1);
+DEFINE_FUNC(BN_num_bits_word, 1);
+
+DEFINE_FUNC(BN_bn2hex, 1);
+DEFINE_FUNC(BN_bn2dec, 1);
+
+DEFINE_FUNC(BN_hex2bn, 2);
+DEFINE_FUNC(BN_dec2bn, 2);
+DEFINE_FUNC(BN_asc2bn, 2);
+
+DEFINE_FUNC(BN_bn2bin, 2);
+DEFINE_FUNC(BN_bin2bn, 3);
+
+// BIO-related functions
+
+DEFINE_FUNC(BIO_new, 1);
+DEFINE_FUNC(BIO_free, 1);
+
+DEFINE_FUNC(BIO_read, 3, -1);
+DEFINE_FUNC(BIO_write, 3, -1);
+DEFINE_FUNC(BIO_s_mem, 0);
+
+DEFINE_FUNC(BIO_set_data, 2);
+
+DEFINE_FUNC(BIO_get_data, 1);
+DEFINE_FUNC(BIO_set_init, 2);
+
+DEFINE_FUNC(BIO_set_flags, 2);
+DEFINE_FUNC(BIO_test_flags, 2);
+DEFINE_FUNC(BIO_clear_flags, 2);
+
+DEFINE_FUNC(BIO_meth_new, 2);
+DEFINE_FUNC(BIO_meth_free, 1);
+
+DEFINE_FUNC(BIO_meth_set_write, 2);
+DEFINE_FUNC(BIO_meth_set_read, 2);
+DEFINE_FUNC(BIO_meth_set_puts, 2);
+DEFINE_FUNC(BIO_meth_set_gets, 2);
+DEFINE_FUNC(BIO_meth_set_ctrl, 2);
+DEFINE_FUNC(BIO_meth_set_create, 2);
+DEFINE_FUNC(BIO_meth_set_destroy, 2);
+DEFINE_FUNC(BIO_meth_set_callback_ctrl, 2);
+
+// SSL functions
+
+DEFINE_FUNC(SSL_CTX_new, 1);
+DEFINE_FUNC(SSL_CTX_up_ref, 1);
+DEFINE_FUNC(SSL_CTX_free, 1);
+
+DEFINE_FUNC(SSL_new, 1);
+DEFINE_FUNC(SSL_up_ref, 1);
+DEFINE_FUNC(SSL_free, 1);
+
+DEFINE_FUNC(SSL_accept, 1);
+DEFINE_FUNC(SSL_stateless, 1);
+DEFINE_FUNC(SSL_connect, 1);
+DEFINE_FUNC(SSL_read, 3, -1);
+DEFINE_FUNC(SSL_peek, 3);
+DEFINE_FUNC(SSL_write, 3, -1);
+DEFINE_FUNC(SSL_ctrl, 4);
+DEFINE_FUNC(SSL_shutdown, 1);
+DEFINE_FUNC(SSL_set_bio, 3);
+
+// options are unsigned long in openssl 1.1.1, and uint64 in 3.x.x
+DEFINE_FUNC(SSL_CTX_set_options, 2);
+
+DEFINE_FUNC(SSL_get_error, 2);
+DEFINE_FUNC(SSL_CTX_load_verify_locations, 3, -1);
+
+DEFINE_FUNC(SSL_CTX_set_verify, 3);
+DEFINE_FUNC(SSL_CTX_use_PrivateKey, 2);
+
+DEFINE_FUNC(SSL_CTX_use_PrivateKey_file, 3);
+DEFINE_FUNC(SSL_CTX_use_certificate_chain_file, 2);
+
+DEFINE_FUNC(ERR_get_error, 0);
+
+static char ErrorString[] = "Ssl not found";
+DEFINE_FUNC(ERR_error_string, 2, ErrorString);
+
+// TLS functions
+
+DEFINE_FUNC(TLS_client_method, 0);
+DEFINE_FUNC(TLS_server_method, 0);
+
+// RAND functions
+
+DEFINE_FUNC(RAND_bytes, 2);
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-drm.cpp b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-drm.cpp
new file mode 100644
index 000000000..655a6b2b6
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-drm.cpp
@@ -0,0 +1,14 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtMultimedia/private/qsymbolsresolveutils_p.h>
+
+#include <va/va_drm.h>
+
+CHECK_VERSIONS("va-drm", VA_DRM_NEEDED_SOVERSION, VA_MAJOR_VERSION + 1);
+
+BEGIN_INIT_FUNCS("va-drm", VA_DRM_NEEDED_SOVERSION)
+INIT_FUNC(vaGetDisplayDRM)
+END_INIT_FUNCS()
+
+DEFINE_FUNC(vaGetDisplayDRM, 1);
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-x11.cpp b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-x11.cpp
new file mode 100644
index 000000000..3bada9e69
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-x11.cpp
@@ -0,0 +1,14 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtMultimedia/private/qsymbolsresolveutils_p.h>
+
+#include <va/va_x11.h>
+
+CHECK_VERSIONS("va-x11", VA_X11_NEEDED_SOVERSION, VA_MAJOR_VERSION + 1);
+
+BEGIN_INIT_FUNCS("va-x11", VA_X11_NEEDED_SOVERSION)
+INIT_FUNC(vaGetDisplay)
+END_INIT_FUNCS()
+
+DEFINE_FUNC(vaGetDisplay, 1);
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvaapisymbols.cpp b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va.cpp
index ed2532e73..cfd2e5686 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegvaapisymbols.cpp
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va.cpp
@@ -1,64 +1,96 @@
-// Copyright (C) 2023 The Qt Company Ltd.
+// Copyright (C) 2024 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-#include <QtCore/qlibrary.h>
-
-#include "qffmpegsymbolsresolveutils_p.h"
-
-#include <QtCore/qglobal.h>
-#include <qstringliteral.h>
+#include <QtMultimedia/private/qsymbolsresolveutils_p.h>
#include <va/va.h>
-#ifdef DYNAMIC_RESOLVE_VA_DRM_SYMBOLS
-#include <va/va_drm.h>
-#endif
-#ifdef DYNAMIC_RESOLVE_VA_X11_SYMBOLS
-#include <va/va_x11.h>
-#endif
#include <va/va_str.h>
-QT_BEGIN_NAMESPACE
-
-static Libs loadLibs()
-{
- Libs libs;
- libs.push_back(std::make_unique<QLibrary>("va"));
-#ifdef DYNAMIC_RESOLVE_VA_DRM_SYMBOLS
- libs.push_back(std::make_unique<QLibrary>("va-drm"));
+// VAAPI generated the actual *.so name due to the rule:
+// https://github.com/intel/libva/blob/master/configure.ac
+//
+// The library name is generated libva.<x>.<y>.0 where
+// <x> = VA-API major version + 1
+// <y> = 100 * VA-API minor version + VA-API micro version
+CHECK_VERSIONS("va", VA_NEEDED_SOVERSION, VA_MAJOR_VERSION + 1);
+
+#ifdef Q_FFMPEG_PLUGIN_STUBS_ONLY
+constexpr const char *loggingName = "va(in plugin)";
+#else
+constexpr const char *loggingName = nullptr;
#endif
-#ifdef DYNAMIC_RESOLVE_VA_X11_SYMBOLS
- libs.push_back(std::make_unique<QLibrary>("va-x11"));
-#endif
+BEGIN_INIT_FUNCS("va", VA_NEEDED_SOVERSION, loggingName)
+
+
+INIT_FUNC(vaExportSurfaceHandle);
+INIT_FUNC(vaSyncSurface);
+INIT_FUNC(vaQueryVendorString);
- if (LibSymbolsResolver::tryLoad(libs))
- return libs;
+#ifndef Q_FFMPEG_PLUGIN_STUBS_ONLY
- return {};
-}
+INIT_FUNC(vaInitialize);
+INIT_FUNC(vaTerminate);
+INIT_FUNC(vaErrorStr);
+INIT_FUNC(vaSetErrorCallback);
+INIT_FUNC(vaSetInfoCallback);
-constexpr size_t symbolsCount = 40
+INIT_FUNC(vaCreateImage);
+INIT_FUNC(vaGetImage);
+INIT_FUNC(vaPutImage);
+INIT_FUNC(vaDeriveImage);
+INIT_FUNC(vaDestroyImage);
+INIT_FUNC(vaQueryImageFormats);
+
+INIT_FUNC(vaBeginPicture);
+INIT_FUNC(vaRenderPicture);
+INIT_FUNC(vaEndPicture);
+
+INIT_FUNC(vaCreateBuffer);
+INIT_FUNC(vaMapBuffer);
+INIT_FUNC(vaUnmapBuffer);
#if VA_CHECK_VERSION(1, 9, 0)
- + 1
+INIT_FUNC(vaSyncBuffer);
#endif
-#ifdef DYNAMIC_RESOLVE_VA_DRM_SYMBOLS
- + 1
-#endif
-#ifdef DYNAMIC_RESOLVE_VA_X11_SYMBOLS
- + 1
+INIT_FUNC(vaDestroyBuffer);
+
+INIT_FUNC(vaCreateSurfaces);
+INIT_FUNC(vaDestroySurfaces);
+
+INIT_FUNC(vaCreateConfig);
+INIT_FUNC(vaGetConfigAttributes);
+INIT_FUNC(vaMaxNumProfiles);
+INIT_FUNC(vaMaxNumImageFormats);
+INIT_FUNC(vaMaxNumEntrypoints);
+INIT_FUNC(vaQueryConfigProfiles);
+INIT_FUNC(vaQueryConfigEntrypoints);
+INIT_FUNC(vaQuerySurfaceAttributes);
+INIT_FUNC(vaDestroyConfig);
+
+INIT_FUNC(vaCreateContext);
+INIT_FUNC(vaDestroyContext);
+
+INIT_FUNC(vaProfileStr);
+INIT_FUNC(vaEntrypointStr);
+
+INIT_FUNC(vaGetDisplayAttributes);
+
+INIT_FUNC(vaSetDriverName);
+
+INIT_FUNC(vaAcquireBufferHandle);
+INIT_FUNC(vaReleaseBufferHandle);
+
#endif
- ;
-Q_GLOBAL_STATIC(LibSymbolsResolver, resolver, "VAAPI", symbolsCount, loadLibs);
+END_INIT_FUNCS()
-void resolveVAAPI()
-{
- resolver()->resolve();
-}
+constexpr auto emptyString = "";
-QT_END_NAMESPACE
+DEFINE_FUNC(vaExportSurfaceHandle, 5, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaSyncSurface, 2, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaQueryVendorString, 1, emptyString);
-QT_USE_NAMESPACE
+#ifndef Q_FFMPEG_PLUGIN_STUBS_ONLY
DEFINE_FUNC(vaInitialize, 3, VA_STATUS_ERROR_OPERATION_FAILED);
DEFINE_FUNC(vaTerminate, 1, VA_STATUS_ERROR_OPERATION_FAILED);
@@ -88,8 +120,6 @@ DEFINE_FUNC(vaSyncBuffer, 3, VA_STATUS_ERROR_OPERATION_FAILED);
DEFINE_FUNC(vaDestroyBuffer, 2, VA_STATUS_ERROR_OPERATION_FAILED);
DEFINE_FUNC(vaCreateSurfaces, 8, VA_STATUS_ERROR_OPERATION_FAILED);
-DEFINE_FUNC(vaSyncSurface, 2, VA_STATUS_ERROR_OPERATION_FAILED);
-DEFINE_FUNC(vaExportSurfaceHandle, 5, VA_STATUS_ERROR_OPERATION_FAILED);
DEFINE_FUNC(vaDestroySurfaces, 3, VA_STATUS_ERROR_OPERATION_FAILED);
DEFINE_FUNC(vaCreateConfig, 6, VA_STATUS_ERROR_OPERATION_FAILED);
@@ -105,8 +135,7 @@ DEFINE_FUNC(vaDestroyConfig, 2, VA_STATUS_ERROR_OPERATION_FAILED);
DEFINE_FUNC(vaCreateContext, 8);
DEFINE_FUNC(vaDestroyContext, 2);
-constexpr auto emptyString = "";
-DEFINE_FUNC(vaQueryVendorString, 1, emptyString);
+
DEFINE_FUNC(vaProfileStr, 1, emptyString);
DEFINE_FUNC(vaEntrypointStr, 1, emptyString);
@@ -117,10 +146,5 @@ DEFINE_FUNC(vaSetDriverName, 2, VA_STATUS_ERROR_OPERATION_FAILED);
DEFINE_FUNC(vaAcquireBufferHandle, 3, VA_STATUS_ERROR_OPERATION_FAILED);
DEFINE_FUNC(vaReleaseBufferHandle, 2, VA_STATUS_ERROR_OPERATION_FAILED);
-#ifdef DYNAMIC_RESOLVE_VA_DRM_SYMBOLS
-DEFINE_FUNC(vaGetDisplayDRM, 1); // va-drm
#endif
-#ifdef DYNAMIC_RESOLVE_VA_X11_SYMBOLS
-DEFINE_FUNC(vaGetDisplay, 1); // va-x11
-#endif
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/va.ver b/src/plugins/multimedia/ffmpeg/symbolstubs/va.ver
new file mode 100644
index 000000000..80c9a6dc0
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/va.ver
@@ -0,0 +1,7 @@
+# Copyright (C) 2024 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+VA_API_0.33.0 {
+ global:
+ vaCreateSurfaces;
+};
diff --git a/src/plugins/multimedia/gstreamer/CMakeLists.txt b/src/plugins/multimedia/gstreamer/CMakeLists.txt
index 80fc1fce0..1ef1f9a36 100644
--- a/src/plugins/multimedia/gstreamer/CMakeLists.txt
+++ b/src/plugins/multimedia/gstreamer/CMakeLists.txt
@@ -8,8 +8,6 @@ qt_internal_add_module(QGstreamerMediaPluginPrivate
INTERNAL_MODULE
SOURCES
audio/qgstreameraudiodevice.cpp audio/qgstreameraudiodevice_p.h
- audio/qgstreameraudiosource.cpp audio/qgstreameraudiosource_p.h
- audio/qgstreameraudiosink.cpp audio/qgstreameraudiosink_p.h
audio/qgstreameraudiodecoder.cpp audio/qgstreameraudiodecoder_p.h
common/qglist_helper_p.h
common/qgst.cpp common/qgst_p.h
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder.cpp b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder.cpp
index 0cfa28169..280b43cdb 100644
--- a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder.cpp
+++ b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder.cpp
@@ -21,8 +21,6 @@
#include <QtCore/qurl.h>
#include <QtCore/qloggingcategory.h>
-#define MAX_BUFFERS_IN_QUEUE 4
-
QT_BEGIN_NAMESPACE
static Q_LOGGING_CATEGORY(qLcGstreamerAudioDecoder, "qt.multimedia.gstreameraudiodecoder");
@@ -42,23 +40,22 @@ typedef enum {
QMaybe<QPlatformAudioDecoder *> QGstreamerAudioDecoder::create(QAudioDecoder *parent)
{
- QGstElement audioconvert = QGstElement::createFromFactory("audioconvert", "audioconvert");
- if (!audioconvert)
- return errorMessageCannotFindElement("audioconvert");
-
- QGstPipeline playbin = QGstPipeline::adopt(
- GST_PIPELINE_CAST(QGstElement::createFromFactory("playbin", "playbin").element()));
- if (!playbin)
- return errorMessageCannotFindElement("playbin");
+ static const auto error = qGstErrorMessageIfElementsNotAvailable("audioconvert", "playbin");
+ if (error)
+ return *error;
- return new QGstreamerAudioDecoder(playbin, audioconvert, parent);
+ return new QGstreamerAudioDecoder(parent);
}
-QGstreamerAudioDecoder::QGstreamerAudioDecoder(QGstPipeline playbin, QGstElement audioconvert,
- QAudioDecoder *parent)
+QGstreamerAudioDecoder::QGstreamerAudioDecoder(QAudioDecoder *parent)
: QPlatformAudioDecoder(parent),
- m_playbin(std::move(playbin)),
- m_audioConvert(std::move(audioconvert))
+ m_playbin{
+ QGstPipeline::adopt(GST_PIPELINE_CAST(
+ QGstElement::createFromFactory("playbin", "playbin").element())),
+ },
+ m_audioConvert{
+ QGstElement::createFromFactory("audioconvert", "audioconvert"),
+ }
{
// Sort out messages
m_playbin.installMessageFilter(this);
@@ -88,12 +85,9 @@ QGstreamerAudioDecoder::~QGstreamerAudioDecoder()
m_playbin.removeMessageFilter(this);
-#if QT_CONFIG(gstreamer_app)
delete m_appSrc;
-#endif
}
-#if QT_CONFIG(gstreamer_app)
void QGstreamerAudioDecoder::configureAppSrcElement([[maybe_unused]] GObject *object, GObject *orig,
[[maybe_unused]] GParamSpec *pspec,
QGstreamerAudioDecoder *self)
@@ -112,7 +106,6 @@ void QGstreamerAudioDecoder::configureAppSrcElement([[maybe_unused]] GObject *ob
});
qAppSrc->setup(self->mDevice);
}
-#endif
bool QGstreamerAudioDecoder::processBusMessage(const QGstreamerMessage &message)
{
@@ -336,14 +329,14 @@ void QGstreamerAudioDecoder::stop()
bufferAvailableChanged(false);
}
- if (m_position != -1) {
- m_position = -1;
- positionChanged(m_position);
+ if (m_position != invalidPosition) {
+ m_position = invalidPosition;
+ positionChanged(m_position.count());
}
- if (m_duration != -1) {
- m_duration = -1;
- durationChanged(m_duration);
+ if (m_duration != invalidDuration) {
+ m_duration = invalidDuration;
+ durationChanged(m_duration.count());
}
setIsDecoding(false);
@@ -364,6 +357,8 @@ void QGstreamerAudioDecoder::setAudioFormat(const QAudioFormat &format)
QAudioBuffer QGstreamerAudioDecoder::read()
{
+ using namespace std::chrono;
+
QAudioBuffer audioBuffer;
if (m_buffersAvailable == 0)
@@ -385,12 +380,16 @@ QAudioBuffer QGstreamerAudioDecoder::read()
if (format.isValid()) {
// XXX At the moment we have to copy data from GstBuffer into QAudioBuffer.
// We could improve performance by implementing QAbstractAudioBuffer for GstBuffer.
- qint64 position = getPositionFromBuffer(buffer);
- audioBuffer = QAudioBuffer(QByteArray(bufferData, bufferSize), format, position);
- position /= 1000; // convert to milliseconds
+ nanoseconds position = getPositionFromBuffer(buffer);
+ audioBuffer = QAudioBuffer{
+ QByteArray(bufferData, bufferSize),
+ format,
+ round<microseconds>(position).count(),
+ };
+ milliseconds positionInMs = round<milliseconds>(position);
if (position != m_position) {
- m_position = position;
- positionChanged(m_position);
+ m_position = positionInMs;
+ positionChanged(m_position.count());
}
}
gst_buffer_unmap(buffer, &mapInfo);
@@ -400,12 +399,12 @@ QAudioBuffer QGstreamerAudioDecoder::read()
qint64 QGstreamerAudioDecoder::position() const
{
- return m_position;
+ return m_position.count();
}
qint64 QGstreamerAudioDecoder::duration() const
{
- return m_duration;
+ return m_duration.count();
}
void QGstreamerAudioDecoder::processInvalidMedia(QAudioDecoder::Error errorCode, const QString& errorString)
@@ -452,6 +451,8 @@ void QGstreamerAudioDecoder::setAudioFlags(bool wantNativeAudio)
void QGstreamerAudioDecoder::addAppSink()
{
+ using namespace std::chrono_literals;
+
if (m_appSink)
return;
@@ -460,8 +461,17 @@ void QGstreamerAudioDecoder::addAppSink()
GstAppSinkCallbacks callbacks{};
callbacks.new_sample = new_sample;
m_appSink.setCallbacks(callbacks, this, nullptr);
- gst_app_sink_set_max_buffers(m_appSink.appSink(), MAX_BUFFERS_IN_QUEUE);
- gst_base_sink_set_sync(m_appSink.baseSink(), FALSE);
+
+#if GST_CHECK_VERSION(1, 24, 0)
+ static constexpr auto maxBufferTime = 500ms;
+ m_appSink.setMaxBufferTime(maxBufferTime);
+#else
+ static constexpr int maxBuffers = 16;
+ m_appSink.setMaxBuffers(maxBuffers);
+#endif
+
+ static constexpr bool sync = false;
+ m_appSink.setSync(sync);
QGstPipeline::modifyPipelineWhileNotRunning(m_playbin.getPipeline(), [&] {
m_outputBin.add(m_appSink);
@@ -485,14 +495,16 @@ void QGstreamerAudioDecoder::removeAppSink()
void QGstreamerAudioDecoder::updateDuration()
{
- int duration = m_playbin.duration() / 1000000;
+ std::optional<std::chrono::milliseconds> duration = m_playbin.durationInMs();
+ if (!duration)
+ duration = invalidDuration;
if (m_duration != duration) {
- m_duration = duration;
- durationChanged(m_duration);
+ m_duration = *duration;
+ durationChanged(m_duration.count());
}
- if (m_duration > 0)
+ if (m_duration.count() > 0)
m_durationQueries = 0;
if (m_durationQueries > 0) {
@@ -503,14 +515,15 @@ void QGstreamerAudioDecoder::updateDuration()
}
}
-qint64 QGstreamerAudioDecoder::getPositionFromBuffer(GstBuffer* buffer)
+std::chrono::nanoseconds QGstreamerAudioDecoder::getPositionFromBuffer(GstBuffer *buffer)
{
- qint64 position = GST_BUFFER_TIMESTAMP(buffer);
- if (position >= 0)
- position = position / G_GINT64_CONSTANT(1000); // microseconds
+ using namespace std::chrono;
+ using namespace std::chrono_literals;
+ nanoseconds position{ GST_BUFFER_TIMESTAMP(buffer) };
+ if (position >= 0ns)
+ return position;
else
- position = -1;
- return position;
+ return invalidPosition;
}
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder_p.h b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder_p.h
index eba1025fa..d2d259dde 100644
--- a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder_p.h
+++ b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder_p.h
@@ -23,12 +23,9 @@
#include <QtCore/qmutex.h>
#include <QtCore/qurl.h>
-#include <common/qgstpipeline_p.h>
#include <common/qgst_p.h>
-
-#if QT_CONFIG(gstreamer_app)
-# include <common/qgstappsource_p.h>
-#endif
+#include <common/qgstappsource_p.h>
+#include <common/qgstpipeline_p.h>
#include <gst/app/gstappsink.h>
@@ -68,15 +65,13 @@ private slots:
void updateDuration();
private:
- QGstreamerAudioDecoder(QGstPipeline playbin, QGstElement audioconvert, QAudioDecoder *parent);
+ explicit QGstreamerAudioDecoder(QAudioDecoder *parent);
-#if QT_CONFIG(gstreamer_app)
static GstFlowReturn new_sample(GstAppSink *sink, gpointer user_data);
GstFlowReturn newSample(GstAppSink *sink);
static void configureAppSrcElement(GObject *, GObject *, GParamSpec *,
QGstreamerAudioDecoder *_this);
-#endif
void setAudioFlags(bool wantNativeAudio);
void addAppSink();
@@ -85,7 +80,7 @@ private:
bool handlePlaybinMessage(const QGstreamerMessage &);
void processInvalidMedia(QAudioDecoder::Error errorCode, const QString &errorString);
- static qint64 getPositionFromBuffer(GstBuffer* buffer);
+ static std::chrono::nanoseconds getPositionFromBuffer(GstBuffer *buffer);
QGstPipeline m_playbin;
QGstBin m_outputBin;
@@ -98,8 +93,11 @@ private:
QAudioFormat mFormat;
int m_buffersAvailable = 0;
- qint64 m_position = -1;
- qint64 m_duration = -1;
+
+ static constexpr auto invalidDuration = std::chrono::milliseconds{ -1 };
+ static constexpr auto invalidPosition = std::chrono::milliseconds{ -1 };
+ std::chrono::milliseconds m_position{ invalidPosition };
+ std::chrono::milliseconds m_duration{ invalidDuration };
int m_durationQueries = 0;
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice.cpp b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice.cpp
index 2c6b57e55..b22e40118 100644
--- a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice.cpp
+++ b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice.cpp
@@ -49,6 +49,39 @@ QGStreamerAudioDeviceInfo::QGStreamerAudioDeviceInfo(GstDevice *d, const QByteAr
preferredFormat.setSampleFormat(f);
}
-QGStreamerAudioDeviceInfo::~QGStreamerAudioDeviceInfo() = default;
+QGStreamerCustomAudioDeviceInfo::QGStreamerCustomAudioDeviceInfo(
+ const QByteArray &gstreamerPipeline, QAudioDevice::Mode mode)
+ : QAudioDevicePrivate{
+ gstreamerPipeline,
+ mode,
+ }
+{
+}
+
+QAudioDevice qMakeCustomGStreamerAudioInput(const QByteArray &gstreamerPipeline)
+{
+ auto deviceInfo = std::make_unique<QGStreamerCustomAudioDeviceInfo>(gstreamerPipeline,
+ QAudioDevice::Mode::Input);
+
+ return deviceInfo.release()->create();
+}
+
+QAudioDevice qMakeCustomGStreamerAudioOutput(const QByteArray &gstreamerPipeline)
+{
+ auto deviceInfo = std::make_unique<QGStreamerCustomAudioDeviceInfo>(gstreamerPipeline,
+ QAudioDevice::Mode::Output);
+
+ return deviceInfo.release()->create();
+}
+
+bool isCustomAudioDevice(const QAudioDevicePrivate *device)
+{
+ return dynamic_cast<const QGStreamerCustomAudioDeviceInfo *>(device);
+}
+
+bool isCustomAudioDevice(const QAudioDevice &device)
+{
+ return isCustomAudioDevice(device.handle());
+}
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice_p.h b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice_p.h
index dee0c40bc..403fd5e74 100644
--- a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice_p.h
+++ b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice_p.h
@@ -19,11 +19,11 @@
#include <QtCore/qstringlist.h>
#include <QtCore/qlist.h>
-#include "qaudio.h"
-#include "qaudiodevice.h"
-#include <private/qaudiodevice_p.h>
+#include <QtMultimedia/qaudio.h>
+#include <QtMultimedia/qaudiodevice.h>
+#include <QtMultimedia/private/qaudiodevice_p.h>
-#include <common/qgst_handle_types_p.h>
+#include <QtQGstreamerMediaPlugin/private/qgst_handle_types_p.h>
#include <gst/gst.h>
@@ -33,11 +33,22 @@ class QGStreamerAudioDeviceInfo : public QAudioDevicePrivate
{
public:
QGStreamerAudioDeviceInfo(GstDevice *gstDevice, const QByteArray &device, QAudioDevice::Mode mode);
- ~QGStreamerAudioDeviceInfo();
QGstDeviceHandle gstDevice;
};
+class QGStreamerCustomAudioDeviceInfo : public QAudioDevicePrivate
+{
+public:
+ QGStreamerCustomAudioDeviceInfo(const QByteArray &gstreamerPipeline, QAudioDevice::Mode mode);
+};
+
+bool isCustomAudioDevice(const QAudioDevicePrivate *device);
+bool isCustomAudioDevice(const QAudioDevice &device);
+
+QAudioDevice qMakeCustomGStreamerAudioInput(const QByteArray &gstreamerPipeline);
+QAudioDevice qMakeCustomGStreamerAudioOutput(const QByteArray &gstreamerPipeline);
+
QT_END_NAMESPACE
#endif
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosink.cpp b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosink.cpp
deleted file mode 100644
index 6fd972524..000000000
--- a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosink.cpp
+++ /dev/null
@@ -1,376 +0,0 @@
-// Copyright (C) 2016 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-#include <QtCore/qcoreapplication.h>
-#include <QtCore/qdebug.h>
-#include <QtCore/qmath.h>
-#include <QtMultimedia/private/qaudiohelpers_p.h>
-
-#include <sys/types.h>
-#include <unistd.h>
-
-#include <audio/qgstreameraudiosink_p.h>
-#include <audio/qgstreameraudiodevice_p.h>
-#include <common/qgst_debug_p.h>
-#include <common/qgstappsource_p.h>
-#include <common/qgstpipeline_p.h>
-#include <common/qgstreamermessage_p.h>
-#include <common/qgstutils_p.h>
-
-#include <utility>
-
-QT_BEGIN_NAMESPACE
-
-QMaybe<QPlatformAudioSink *> QGStreamerAudioSink::create(const QAudioDevice &device, QObject *parent)
-{
- auto maybeAppSrc = QGstAppSource::create();
- if (!maybeAppSrc)
- return maybeAppSrc.error();
-
- QGstElement audioconvert = QGstElement::createFromFactory("audioconvert", "conv");
- if (!audioconvert)
- return errorMessageCannotFindElement("audioconvert");
-
- QGstElement volume = QGstElement::createFromFactory("volume", "volume");
- if (!volume)
- return errorMessageCannotFindElement("volume");
-
- return new QGStreamerAudioSink(device, maybeAppSrc.value(), audioconvert, volume, parent);
-}
-
-QGStreamerAudioSink::QGStreamerAudioSink(const QAudioDevice &device, QGstAppSource *appsrc,
- QGstElement audioconvert, QGstElement volume,
- QObject *parent)
- : QPlatformAudioSink(parent),
- m_device(device.id()),
- gstPipeline(QGstPipeline::create("audioSinkPipeline")),
- gstVolume(std::move(volume)),
- m_appSrc(appsrc)
-{
- gstPipeline.installMessageFilter(this);
-
- connect(m_appSrc, &QGstAppSource::bytesProcessed, this, &QGStreamerAudioSink::bytesProcessedByAppSrc);
- connect(m_appSrc, &QGstAppSource::noMoreData, this, &QGStreamerAudioSink::needData);
- gstAppSrc = m_appSrc->element();
-
- QGstElement queue = QGstElement::createFromFactory("queue", "audioSinkQueue");
-
- if (m_volume != 1.)
- gstVolume.set("volume", m_volume);
-
- // link decodeBin to audioconvert in a callback once we get a pad from the decoder
- // g_signal_connect (gstDecodeBin, "pad-added", (GCallback) padAdded, conv);
-
- const auto *audioInfo = static_cast<const QGStreamerAudioDeviceInfo *>(device.handle());
- gstOutput = QGstElement::createFromDevice(audioInfo->gstDevice, nullptr);
-
- gstPipeline.add(gstAppSrc, queue, /*gstDecodeBin, */ audioconvert, gstVolume, gstOutput);
- qLinkGstElements(gstAppSrc, queue, audioconvert, gstVolume, gstOutput);
-}
-
-QGStreamerAudioSink::~QGStreamerAudioSink()
-{
- close();
- gstPipeline.removeMessageFilter(this);
-
- gstPipeline = {};
- gstVolume = {};
- gstAppSrc = {};
- delete m_appSrc;
- m_appSrc = nullptr;
-}
-
-void QGStreamerAudioSink::setError(QAudio::Error error)
-{
- if (m_errorState == error)
- return;
-
- m_errorState = error;
- emit errorChanged(error);
-}
-
-QAudio::Error QGStreamerAudioSink::error() const
-{
- return m_errorState;
-}
-
-void QGStreamerAudioSink::setState(QAudio::State state)
-{
- if (m_deviceState == state)
- return;
-
- m_deviceState = state;
- emit stateChanged(state);
-}
-
-QAudio::State QGStreamerAudioSink::state() const
-{
- return m_deviceState;
-}
-
-void QGStreamerAudioSink::start(QIODevice *device)
-{
- setState(QAudio::StoppedState);
- setError(QAudio::NoError);
-
- close();
-
- if (!m_format.isValid()) {
- setError(QAudio::OpenError);
- return;
- }
-
- m_pullMode = true;
- m_audioSource = device;
-
- if (!open()) {
- m_audioSource = nullptr;
- setError(QAudio::OpenError);
- return;
- }
-
- setState(QAudio::ActiveState);
-}
-
-QIODevice *QGStreamerAudioSink::start()
-{
- setState(QAudio::StoppedState);
- setError(QAudio::NoError);
-
- close();
-
- if (!m_format.isValid()) {
- setError(QAudio::OpenError);
- return nullptr;
- }
-
- m_pullMode = false;
-
- if (!open())
- return nullptr;
-
- m_audioSource = new GStreamerOutputPrivate(this);
- m_audioSource->open(QIODevice::WriteOnly|QIODevice::Unbuffered);
-
- setState(QAudio::IdleState);
-
- return m_audioSource;
-}
-
-#if 0
-static void padAdded(GstElement *element, GstPad *pad, gpointer data)
-{
- GstElement *other = static_cast<GstElement *>(data);
-
- QGString name { gst_pad_get_name(pad)};
- qDebug("A new pad %s was created for %s\n", name, gst_element_get_name(element));
-
- qDebug("element %s will be linked to %s\n",
- gst_element_get_name(element),
- gst_element_get_name(other));
- gst_element_link(element, other);
-}
-#endif
-
-bool QGStreamerAudioSink::processBusMessage(const QGstreamerMessage &message)
-{
- auto *msg = message.message();
- switch (GST_MESSAGE_TYPE (msg)) {
- case GST_MESSAGE_EOS:
- setState(QAudio::IdleState);
- break;
- case GST_MESSAGE_ERROR: {
- setError(QAudio::IOError);
- qDebug() << "Error:" << QCompactGstMessageAdaptor(message);
- break;
- }
- default:
- break;
- }
-
- return true;
-}
-
-bool QGStreamerAudioSink::open()
-{
- if (m_opened)
- return true;
-
- if (gstOutput.isNull()) {
- setError(QAudio::OpenError);
- setState(QAudio::StoppedState);
- return false;
- }
-
-// qDebug() << "GST caps:" << gst_caps_to_string(caps);
- m_appSrc->setup(m_audioSource, m_audioSource ? m_audioSource->pos() : 0);
- m_appSrc->setAudioFormat(m_format);
-
- /* run */
- gstPipeline.setState(GST_STATE_PLAYING);
-
- m_opened = true;
-
- m_timeStamp.restart();
- m_bytesProcessed = 0;
-
- return true;
-}
-
-void QGStreamerAudioSink::close()
-{
- if (!m_opened)
- return;
-
- if (!gstPipeline.setStateSync(GST_STATE_NULL))
- qWarning() << "failed to close the audio output stream";
-
- if (!m_pullMode && m_audioSource)
- delete m_audioSource;
- m_audioSource = nullptr;
- m_opened = false;
-}
-
-qint64 QGStreamerAudioSink::write(const char *data, qint64 len)
-{
- if (!len)
- return 0;
- if (m_errorState == QAudio::UnderrunError)
- m_errorState = QAudio::NoError;
-
- m_appSrc->write(data, len);
- return len;
-}
-
-void QGStreamerAudioSink::stop()
-{
- if (m_deviceState == QAudio::StoppedState)
- return;
-
- close();
-
- setError(QAudio::NoError);
- setState(QAudio::StoppedState);
-}
-
-qsizetype QGStreamerAudioSink::bytesFree() const
-{
- if (m_deviceState != QAudio::ActiveState && m_deviceState != QAudio::IdleState)
- return 0;
-
- return m_appSrc->canAcceptMoreData() ? 4096*4 : 0;
-}
-
-void QGStreamerAudioSink::setBufferSize(qsizetype value)
-{
- m_bufferSize = value;
- if (!gstAppSrc.isNull())
- gst_app_src_set_max_bytes(GST_APP_SRC(gstAppSrc.element()), value);
-}
-
-qsizetype QGStreamerAudioSink::bufferSize() const
-{
- return m_bufferSize;
-}
-
-qint64 QGStreamerAudioSink::processedUSecs() const
-{
- qint64 result = qint64(1000000) * m_bytesProcessed /
- m_format.bytesPerFrame() /
- m_format.sampleRate();
-
- return result;
-}
-
-void QGStreamerAudioSink::resume()
-{
- if (m_deviceState == QAudio::SuspendedState) {
- m_appSrc->resume();
- gstPipeline.setState(GST_STATE_PLAYING);
-
- setState(m_suspendedInState);
- setError(QAudio::NoError);
- }
-}
-
-void QGStreamerAudioSink::setFormat(const QAudioFormat &format)
-{
- m_format = format;
-}
-
-QAudioFormat QGStreamerAudioSink::format() const
-{
- return m_format;
-}
-
-void QGStreamerAudioSink::suspend()
-{
- if (m_deviceState == QAudio::ActiveState || m_deviceState == QAudio::IdleState) {
- m_suspendedInState = m_deviceState;
- setError(QAudio::NoError);
- setState(QAudio::SuspendedState);
-
- gstPipeline.setState(GST_STATE_PAUSED);
- m_appSrc->suspend();
- // ### elapsed time
- }
-}
-
-void QGStreamerAudioSink::reset()
-{
- stop();
-}
-
-GStreamerOutputPrivate::GStreamerOutputPrivate(QGStreamerAudioSink *audio)
-{
- m_audioDevice = audio;
-}
-
-qint64 GStreamerOutputPrivate::readData(char *data, qint64 len)
-{
- Q_UNUSED(data);
- Q_UNUSED(len);
-
- return 0;
-}
-
-qint64 GStreamerOutputPrivate::writeData(const char *data, qint64 len)
-{
- if (m_audioDevice->state() == QAudio::IdleState)
- m_audioDevice->setState(QAudio::ActiveState);
- return m_audioDevice->write(data, len);
-}
-
-void QGStreamerAudioSink::setVolume(qreal vol)
-{
- if (m_volume == vol)
- return;
-
- m_volume = vol;
- if (!gstVolume.isNull())
- gstVolume.set("volume", vol);
-}
-
-qreal QGStreamerAudioSink::volume() const
-{
- return m_volume;
-}
-
-void QGStreamerAudioSink::bytesProcessedByAppSrc(int bytes)
-{
- m_bytesProcessed += bytes;
- setState(QAudio::ActiveState);
- setError(QAudio::NoError);
-}
-
-void QGStreamerAudioSink::needData()
-{
- if (state() != QAudio::StoppedState && state() != QAudio::IdleState) {
- setState(QAudio::IdleState);
- setError(m_audioSource && m_audioSource->atEnd() ? QAudio::NoError : QAudio::UnderrunError);
- }
-}
-
-QT_END_NAMESPACE
-
-#include "moc_qgstreameraudiosink_p.cpp"
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosink_p.h b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosink_p.h
deleted file mode 100644
index 1aadb2290..000000000
--- a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosink_p.h
+++ /dev/null
@@ -1,124 +0,0 @@
-// Copyright (C) 2016 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-#ifndef QAUDIOOUTPUTGSTREAMER_H
-#define QAUDIOOUTPUTGSTREAMER_H
-
-//
-// W A R N I N G
-// -------------
-//
-// This file is not part of the Qt API. It exists purely as an
-// implementation detail. This header file may change from version to
-// version without notice, or even be removed.
-//
-// We mean it.
-//
-
-#include <QtCore/qfile.h>
-#include <QtCore/qtimer.h>
-#include <QtCore/qstring.h>
-#include <QtCore/qstringlist.h>
-#include <QtCore/qelapsedtimer.h>
-#include <QtCore/qiodevice.h>
-#include <QtCore/private/qringbuffer_p.h>
-
-#include "qaudio.h"
-#include "qaudiodevice.h"
-#include <private/qaudiosystem_p.h>
-#include <private/qmultimediautils_p.h>
-
-#include <common/qgst_p.h>
-#include <common/qgstpipeline_p.h>
-
-QT_BEGIN_NAMESPACE
-
-class QGstAppSource;
-
-class QGStreamerAudioSink
- : public QPlatformAudioSink,
- public QGstreamerBusMessageFilter
-{
- friend class GStreamerOutputPrivate;
- Q_OBJECT
-
-public:
- static QMaybe<QPlatformAudioSink *> create(const QAudioDevice &device, QObject *parent);
- ~QGStreamerAudioSink();
-
- void start(QIODevice *device) override;
- QIODevice *start() override;
- void stop() override;
- void reset() override;
- void suspend() override;
- void resume() override;
- qsizetype bytesFree() const override;
- void setBufferSize(qsizetype value) override;
- qsizetype bufferSize() const override;
- qint64 processedUSecs() const override;
- QAudio::Error error() const override;
- QAudio::State state() const override;
- void setFormat(const QAudioFormat &format) override;
- QAudioFormat format() const override;
-
- void setVolume(qreal volume) override;
- qreal volume() const override;
-
-private Q_SLOTS:
- void bytesProcessedByAppSrc(int bytes);
- void needData();
-
-private:
- QGStreamerAudioSink(const QAudioDevice &device, QGstAppSource *appsrc, QGstElement audioconvert,
- QGstElement volume, QObject *parent);
-
- void setState(QAudio::State state);
- void setError(QAudio::Error error);
-
- bool processBusMessage(const QGstreamerMessage &message) override;
-
- bool open();
- void close();
- qint64 write(const char *data, qint64 len);
-
-private:
- QByteArray m_device;
- QAudioFormat m_format;
- QAudio::Error m_errorState = QAudio::NoError;
- QAudio::State m_deviceState = QAudio::StoppedState;
- QAudio::State m_suspendedInState = QAudio::SuspendedState;
- bool m_pullMode = true;
- bool m_opened = false;
- QIODevice *m_audioSource = nullptr;
- int m_bufferSize = 0;
- qint64 m_bytesProcessed = 0;
- QElapsedTimer m_timeStamp;
- qreal m_volume = 1.;
- QByteArray pushData;
-
- QGstPipeline gstPipeline;
- QGstElement gstOutput;
- QGstElement gstVolume;
- QGstElement gstAppSrc;
- QGstAppSource *m_appSrc = nullptr;
-};
-
-class GStreamerOutputPrivate : public QIODevice
-{
- friend class QGStreamerAudioSink;
- Q_OBJECT
-
-public:
- explicit GStreamerOutputPrivate(QGStreamerAudioSink *audio);
-
-protected:
- qint64 readData(char *data, qint64 len) override;
- qint64 writeData(const char *data, qint64 len) override;
-
-private:
- QGStreamerAudioSink *m_audioDevice;
-};
-
-QT_END_NAMESPACE
-
-#endif
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosource.cpp b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosource.cpp
deleted file mode 100644
index 829d116a2..000000000
--- a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosource.cpp
+++ /dev/null
@@ -1,366 +0,0 @@
-// Copyright (C) 2016 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-#include <QtCore/qcoreapplication.h>
-#include <QtCore/qdebug.h>
-#include <QtCore/qmath.h>
-#include <QtMultimedia/private/qaudiohelpers_p.h>
-
-#include "qgstreameraudiosource_p.h"
-#include "qgstreameraudiodevice_p.h"
-#include "common/qgst_p.h"
-#include "common/qgst_debug_p.h"
-
-#include <sys/types.h>
-#include <unistd.h>
-
-Q_DECLARE_OPAQUE_POINTER(GstSample *);
-Q_DECLARE_METATYPE(GstSample *);
-
-QT_BEGIN_NAMESPACE
-
-QGStreamerAudioSource::QGStreamerAudioSource(const QAudioDevice &device, QObject *parent)
- : QPlatformAudioSource(parent),
- m_info(device),
- m_device(device.id())
-{
- qRegisterMetaType<GstSample *>();
-}
-
-QGStreamerAudioSource::~QGStreamerAudioSource()
-{
- close();
-}
-
-void QGStreamerAudioSource::setError(QAudio::Error error)
-{
- if (m_errorState == error)
- return;
-
- m_errorState = error;
- emit errorChanged(error);
-}
-
-QAudio::Error QGStreamerAudioSource::error() const
-{
- return m_errorState;
-}
-
-void QGStreamerAudioSource::setState(QAudio::State state)
-{
- if (m_deviceState == state)
- return;
-
- m_deviceState = state;
- emit stateChanged(state);
-}
-
-QAudio::State QGStreamerAudioSource::state() const
-{
- return m_deviceState;
-}
-
-void QGStreamerAudioSource::setFormat(const QAudioFormat &format)
-{
- if (m_deviceState == QAudio::StoppedState)
- m_format = format;
-}
-
-QAudioFormat QGStreamerAudioSource::format() const
-{
- return m_format;
-}
-
-void QGStreamerAudioSource::start(QIODevice *device)
-{
- setState(QAudio::StoppedState);
- setError(QAudio::NoError);
-
- close();
-
- if (!open())
- return;
-
- m_pullMode = true;
- m_audioSink = device;
-
- setState(QAudio::ActiveState);
-}
-
-QIODevice *QGStreamerAudioSource::start()
-{
- setState(QAudio::StoppedState);
- setError(QAudio::NoError);
-
- close();
-
- if (!open())
- return nullptr;
-
- m_pullMode = false;
- m_audioSink = new GStreamerInputPrivate(this);
- m_audioSink->open(QIODevice::ReadOnly | QIODevice::Unbuffered);
-
- setState(QAudio::IdleState);
-
- return m_audioSink;
-}
-
-void QGStreamerAudioSource::stop()
-{
- if (m_deviceState == QAudio::StoppedState)
- return;
-
- close();
-
- setError(QAudio::NoError);
- setState(QAudio::StoppedState);
-}
-
-bool QGStreamerAudioSource::open()
-{
- if (m_opened)
- return true;
-
- const auto *deviceInfo = static_cast<const QGStreamerAudioDeviceInfo *>(m_info.handle());
- if (!deviceInfo->gstDevice) {
- setError(QAudio::OpenError);
- setState(QAudio::StoppedState);
- return false;
- }
-
- gstInput = QGstElement::createFromDevice(deviceInfo->gstDevice);
- if (gstInput.isNull()) {
- setError(QAudio::OpenError);
- setState(QAudio::StoppedState);
- return false;
- }
-
- auto gstCaps = QGstUtils::capsForAudioFormat(m_format);
-
- if (gstCaps.isNull()) {
- setError(QAudio::OpenError);
- setState(QAudio::StoppedState);
- return false;
- }
-
-
-#ifdef DEBUG_AUDIO
- qDebug() << "Opening input" << QTime::currentTime();
- qDebug() << "Caps: " << gst_caps_to_string(gstCaps);
-#endif
-
- gstPipeline = QGstPipeline::create("audioSourcePipeline");
-
- auto *gstBus = gst_pipeline_get_bus(gstPipeline.pipeline());
- gst_bus_add_watch(gstBus, &QGStreamerAudioSource::busMessage, this);
- gst_object_unref (gstBus);
-
- gstAppSink = createAppSink();
- gstAppSink.set("caps", gstCaps);
-
- QGstElement conv = QGstElement::createFromFactory("audioconvert", "conv");
- gstVolume = QGstElement::createFromFactory("volume", "volume");
- Q_ASSERT(gstVolume);
- if (m_volume != 1.)
- gstVolume.set("volume", m_volume);
-
- gstPipeline.add(gstInput, gstVolume, conv, gstAppSink);
- qLinkGstElements(gstInput, gstVolume, conv, gstAppSink);
-
- gstPipeline.setState(GST_STATE_PLAYING);
-
- m_opened = true;
-
- m_timeStamp.restart();
- m_elapsedTimeOffset = 0;
- m_bytesWritten = 0;
-
- return true;
-}
-
-void QGStreamerAudioSource::close()
-{
- if (!m_opened)
- return;
-
- gstPipeline.setState(GST_STATE_NULL);
- gstPipeline = {};
- gstVolume = {};
- gstAppSink = {};
- gstInput = {};
-
- if (!m_pullMode && m_audioSink) {
- delete m_audioSink;
- }
- m_audioSink = nullptr;
- m_opened = false;
-}
-
-gboolean QGStreamerAudioSource::busMessage(GstBus *, GstMessage *msg, gpointer user_data)
-{
- QGStreamerAudioSource *input = static_cast<QGStreamerAudioSource *>(user_data);
- switch (GST_MESSAGE_TYPE (msg)) {
- case GST_MESSAGE_EOS:
- input->stop();
- break;
- case GST_MESSAGE_ERROR: {
- input->setError(QAudio::IOError);
- qDebug() << "Error:" << QCompactGstMessageAdaptor(msg);
- break;
- }
- default:
- break;
- }
- return false;
-}
-
-qsizetype QGStreamerAudioSource::bytesReady() const
-{
- return m_buffer.size();
-}
-
-void QGStreamerAudioSource::resume()
-{
- if (m_deviceState == QAudio::SuspendedState || m_deviceState == QAudio::IdleState) {
- gstPipeline.setState(GST_STATE_PLAYING);
- setState(QAudio::ActiveState);
- setError(QAudio::NoError);
- }
-}
-
-void QGStreamerAudioSource::setVolume(qreal vol)
-{
- if (m_volume == vol)
- return;
-
- m_volume = vol;
- if (!gstVolume.isNull())
- gstVolume.set("volume", vol);
-}
-
-qreal QGStreamerAudioSource::volume() const
-{
- return m_volume;
-}
-
-void QGStreamerAudioSource::setBufferSize(qsizetype value)
-{
- m_bufferSize = value;
-}
-
-qsizetype QGStreamerAudioSource::bufferSize() const
-{
- return m_bufferSize;
-}
-
-qint64 QGStreamerAudioSource::processedUSecs() const
-{
- return m_format.durationForBytes(m_bytesWritten);
-}
-
-void QGStreamerAudioSource::suspend()
-{
- if (m_deviceState == QAudio::ActiveState) {
- setError(QAudio::NoError);
- setState(QAudio::SuspendedState);
-
- gstPipeline.setState(GST_STATE_PAUSED);
- }
-}
-
-void QGStreamerAudioSource::reset()
-{
- stop();
- m_buffer.clear();
-}
-
-//#define MAX_BUFFERS_IN_QUEUE 4
-
-QGstAppSink QGStreamerAudioSource::createAppSink()
-{
- QGstAppSink sink = QGstAppSink::create("appsink");
-
- GstAppSinkCallbacks callbacks{};
- callbacks.eos = eos;
- callbacks.new_sample = new_sample;
- sink.setCallbacks(callbacks, this, nullptr);
- // gst_app_sink_set_max_buffers(sink.appSink(), MAX_BUFFERS_IN_QUEUE);
- gst_base_sink_set_sync(sink.baseSink(), FALSE);
-
- return sink;
-}
-
-void QGStreamerAudioSource::newDataAvailable(QGstSampleHandle sample)
-{
- if (m_audioSink) {
- GstBuffer *buffer = gst_sample_get_buffer(sample.get());
- GstMapInfo mapInfo;
- gst_buffer_map(buffer, &mapInfo, GST_MAP_READ);
- const char *bufferData = (const char*)mapInfo.data;
- gsize bufferSize = mapInfo.size;
-
- if (!m_pullMode) {
- // need to store that data in the QBuffer
- m_buffer.append(bufferData, bufferSize);
- m_audioSink->readyRead();
- } else {
- m_bytesWritten += bufferSize;
- m_audioSink->write(bufferData, bufferSize);
- }
-
- gst_buffer_unmap(buffer, &mapInfo);
- }
-}
-
-GstFlowReturn QGStreamerAudioSource::new_sample(GstAppSink *sink, gpointer user_data)
-{
- // "Note that the preroll buffer will also be returned as the first buffer when calling gst_app_sink_pull_buffer()."
- QGStreamerAudioSource *control = static_cast<QGStreamerAudioSource*>(user_data);
-
- QGstSampleHandle sample{
- gst_app_sink_pull_sample(sink),
- QGstSampleHandle::HasRef,
- };
-
- QMetaObject::invokeMethod(control, [control, sample = std::move(sample)]() mutable {
- control->newDataAvailable(std::move(sample));
- });
-
- return GST_FLOW_OK;
-}
-
-void QGStreamerAudioSource::eos(GstAppSink *, gpointer user_data)
-{
- QGStreamerAudioSource *control = static_cast<QGStreamerAudioSource*>(user_data);
- control->setState(QAudio::StoppedState);
-}
-
-GStreamerInputPrivate::GStreamerInputPrivate(QGStreamerAudioSource *audio)
-{
- m_audioDevice = audio;
-}
-
-qint64 GStreamerInputPrivate::readData(char *data, qint64 len)
-{
- if (m_audioDevice->state() == QAudio::IdleState)
- m_audioDevice->setState(QAudio::ActiveState);
- qint64 bytes = m_audioDevice->m_buffer.read(data, len);
- m_audioDevice->m_bytesWritten += bytes;
- return bytes;
-}
-
-qint64 GStreamerInputPrivate::writeData(const char *data, qint64 len)
-{
- Q_UNUSED(data);
- Q_UNUSED(len);
- return 0;
-}
-
-qint64 GStreamerInputPrivate::bytesAvailable() const
-{
- return m_audioDevice->m_buffer.size();
-}
-
-
-QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosource_p.h b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosource_p.h
deleted file mode 100644
index 9021f1ddd..000000000
--- a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiosource_p.h
+++ /dev/null
@@ -1,120 +0,0 @@
-// Copyright (C) 2016 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-//
-// W A R N I N G
-// -------------
-//
-// This file is not part of the Qt API. It exists for the convenience
-// of other Qt classes. This header file may change from version to
-// version without notice, or even be removed.
-//
-// We mean it.
-//
-
-#ifndef QAUDIOINPUTGSTREAMER_H
-#define QAUDIOINPUTGSTREAMER_H
-
-#include <QtCore/qfile.h>
-#include <QtCore/qtimer.h>
-#include <QtCore/qstring.h>
-#include <QtCore/qstringlist.h>
-#include <QtCore/qelapsedtimer.h>
-#include <QtCore/qiodevice.h>
-#include <QtCore/qmutex.h>
-#include <QtCore/qatomic.h>
-#include <QtCore/private/qringbuffer_p.h>
-
-#include "qaudio.h"
-#include "qaudiodevice.h"
-#include <private/qaudiosystem_p.h>
-
-#include <common/qgstutils_p.h>
-#include <common/qgstpipeline_p.h>
-
-#include <gst/app/gstappsink.h>
-
-QT_BEGIN_NAMESPACE
-
-class GStreamerInputPrivate;
-
-class QGStreamerAudioSource
- : public QPlatformAudioSource
-{
- friend class GStreamerInputPrivate;
-public:
- QGStreamerAudioSource(const QAudioDevice &device, QObject *parent);
- ~QGStreamerAudioSource();
-
- void start(QIODevice *device) override;
- QIODevice *start() override;
- void stop() override;
- void reset() override;
- void suspend() override;
- void resume() override;
- qsizetype bytesReady() const override;
- void setBufferSize(qsizetype value) override;
- qsizetype bufferSize() const override;
- qint64 processedUSecs() const override;
- QAudio::Error error() const override;
- QAudio::State state() const override;
- void setFormat(const QAudioFormat &format) override;
- QAudioFormat format() const override;
-
- void setVolume(qreal volume) override;
- qreal volume() const override;
-
-private:
- void setState(QAudio::State state);
- void setError(QAudio::Error error);
-
- QGstAppSink createAppSink();
- static GstFlowReturn new_sample(GstAppSink *, gpointer user_data);
- static void eos(GstAppSink *, gpointer user_data);
-
- bool open();
- void close();
-
- static gboolean busMessage(GstBus *bus, GstMessage *msg, gpointer user_data);
-
- void newDataAvailable(QGstSampleHandle sample);
-
- QAudioDevice m_info;
- qint64 m_bytesWritten = 0;
- QIODevice *m_audioSink = nullptr;
- QAudioFormat m_format;
- QAudio::Error m_errorState = QAudio::NoError;
- QAudio::State m_deviceState = QAudio::StoppedState;
- qreal m_volume = 1.;
-
- QRingBuffer m_buffer;
- QAtomicInteger<bool> m_pullMode = true;
- bool m_opened = false;
- int m_bufferSize = 0;
- qint64 m_elapsedTimeOffset = 0;
- QElapsedTimer m_timeStamp;
- QByteArray m_device;
- QByteArray m_tempBuffer;
-
- QGstElement gstInput;
- QGstPipeline gstPipeline;
- QGstElement gstVolume;
- QGstAppSink gstAppSink;
-};
-
-class GStreamerInputPrivate : public QIODevice
-{
-public:
- explicit GStreamerInputPrivate(QGStreamerAudioSource *audio);
-
- qint64 readData(char *data, qint64 len) override;
- qint64 writeData(const char *data, qint64 len) override;
- qint64 bytesAvailable() const override;
- bool isSequential() const override { return true; }
-private:
- QGStreamerAudioSource *m_audioDevice;
-};
-
-QT_END_NAMESPACE
-
-#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgst.cpp b/src/plugins/multimedia/gstreamer/common/qgst.cpp
index 8a77533a6..cb1f38495 100644
--- a/src/plugins/multimedia/gstreamer/common/qgst.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgst.cpp
@@ -127,11 +127,11 @@ std::optional<QGRange<int>> QGValue::toIntRange() const
return QGRange<int>{ gst_value_get_int_range_min(value), gst_value_get_int_range_max(value) };
}
-QGstStructure QGValue::toStructure() const
+QGstStructureView QGValue::toStructure() const
{
if (!value || !GST_VALUE_HOLDS_STRUCTURE(value))
- return QGstStructure();
- return QGstStructure(gst_value_get_structure(value));
+ return QGstStructureView(nullptr);
+ return QGstStructureView(gst_value_get_structure(value));
}
QGstCaps QGValue::toCaps() const
@@ -156,38 +156,52 @@ QGValue QGValue::at(int index) const
return QGValue{ gst_value_list_get_value(value, index) };
}
-// QGstStructure
+// QGstStructureView
-QGstStructure::QGstStructure(const GstStructure *s) : structure(s) { }
+QGstStructureView::QGstStructureView(const GstStructure *s) : structure(s) { }
-void QGstStructure::free()
+QGstStructureView::QGstStructureView(const QUniqueGstStructureHandle &handle)
+ : QGstStructureView{ handle.get() }
{
- if (structure)
- gst_structure_free(const_cast<GstStructure *>(structure));
- structure = nullptr;
}
-bool QGstStructure::isNull() const
+QUniqueGstStructureHandle QGstStructureView::clone() const
+{
+ return QUniqueGstStructureHandle{ gst_structure_copy(structure) };
+}
+
+bool QGstStructureView::isNull() const
{
return !structure;
}
-QByteArrayView QGstStructure::name() const
+QByteArrayView QGstStructureView::name() const
{
return gst_structure_get_name(structure);
}
-QGValue QGstStructure::operator[](const char *name) const
+QGValue QGstStructureView::operator[](const char *fieldname) const
+{
+ return QGValue{ gst_structure_get_value(structure, fieldname) };
+}
+
+QGstCaps QGstStructureView::caps() const
{
- return QGValue{ gst_structure_get_value(structure, name) };
+ return operator[]("caps").toCaps();
}
-QGstStructure QGstStructure::copy() const
+QGstTagListHandle QGstStructureView::tags() const
{
- return gst_structure_copy(structure);
+ QGValue tags = operator[]("tags");
+ if (tags.isNull())
+ return {};
+
+ QGstTagListHandle tagList;
+ gst_structure_get(structure, "tags", GST_TYPE_TAG_LIST, &tagList, nullptr);
+ return tagList;
}
-QSize QGstStructure::resolution() const
+QSize QGstStructureView::resolution() const
{
QSize size;
@@ -201,7 +215,7 @@ QSize QGstStructure::resolution() const
return size;
}
-QVideoFrameFormat::PixelFormat QGstStructure::pixelFormat() const
+QVideoFrameFormat::PixelFormat QGstStructureView::pixelFormat() const
{
QVideoFrameFormat::PixelFormat pixelFormat = QVideoFrameFormat::Format_Invalid;
@@ -224,7 +238,7 @@ QVideoFrameFormat::PixelFormat QGstStructure::pixelFormat() const
return pixelFormat;
}
-QGRange<float> QGstStructure::frameRateRange() const
+QGRange<float> QGstStructureView::frameRateRange() const
{
float minRate = 0.;
float maxRate = 0.;
@@ -276,14 +290,14 @@ QGRange<float> QGstStructure::frameRateRange() const
return { minRate, maxRate };
}
-QGstreamerMessage QGstStructure::getMessage()
+QGstreamerMessage QGstStructureView::getMessage()
{
GstMessage *message = nullptr;
gst_structure_get(structure, "message", GST_TYPE_MESSAGE, &message, nullptr);
return QGstreamerMessage(message, QGstreamerMessage::HasRef);
}
-std::optional<Fraction> QGstStructure::pixelAspectRatio() const
+std::optional<Fraction> QGstStructureView::pixelAspectRatio() const
{
gint numerator;
gint denominator;
@@ -297,7 +311,20 @@ std::optional<Fraction> QGstStructure::pixelAspectRatio() const
return std::nullopt;
}
-QSize QGstStructure::nativeSize() const
+// QTBUG-125249: gstreamer tries "to keep the input height (because of interlacing)". Can we align
+// the behavior between gstreamer and ffmpeg?
+static QSize qCalculateFrameSizeGStreamer(QSize resolution, Fraction par)
+{
+ if (par.numerator == par.denominator || par.numerator < 1 || par.denominator < 1)
+ return resolution;
+
+ return QSize{
+ resolution.width() * par.numerator / par.denominator,
+ resolution.height(),
+ };
+}
+
+QSize QGstStructureView::nativeSize() const
{
QSize size = resolution();
if (!size.isValid()) {
@@ -307,7 +334,7 @@ QSize QGstStructure::nativeSize() const
std::optional<Fraction> par = pixelAspectRatio();
if (par)
- size = qCalculateFrameSize(size, *par);
+ size = qCalculateFrameSizeGStreamer(size, *par);
return size;
}
@@ -329,7 +356,7 @@ std::optional<std::pair<QVideoFrameFormat, GstVideoInfo>> QGstCaps::formatAndVid
qt_videoFormatLookup[index].pixelFormat);
if (vidInfo.fps_d > 0)
- format.setFrameRate(qreal(vidInfo.fps_n) / vidInfo.fps_d);
+ format.setStreamFrameRate(qreal(vidInfo.fps_n) / vidInfo.fps_d);
QVideoFrameFormat::ColorRange range = QVideoFrameFormat::ColorRange_Unknown;
switch (vidInfo.colorimetry.range) {
@@ -505,9 +532,11 @@ int QGstCaps::size() const
return int(gst_caps_get_size(get()));
}
-QGstStructure QGstCaps::at(int index) const
+QGstStructureView QGstCaps::at(int index) const
{
- return gst_caps_get_structure(get(), index);
+ return QGstStructureView{
+ gst_caps_get_structure(get(), index),
+ };
}
GstCaps *QGstCaps::caps() const
@@ -574,11 +603,11 @@ QGString QGstObject::getString(const char *property) const
return QGString(s);
}
-QGstStructure QGstObject::getStructure(const char *property) const
+QGstStructureView QGstObject::getStructure(const char *property) const
{
GstStructure *s = nullptr;
g_object_get(get(), property, &s, nullptr);
- return QGstStructure(s);
+ return QGstStructureView(s);
}
bool QGstObject::getBool(const char *property) const
@@ -656,14 +685,23 @@ GType QGstObject::type() const
return G_OBJECT_TYPE(get());
}
+QLatin1StringView QGstObject::typeName() const
+{
+ return QLatin1StringView{
+ g_type_name(type()),
+ };
+}
+
GstObject *QGstObject::object() const
{
return get();
}
-const char *QGstObject::name() const
+QLatin1StringView QGstObject::name() const
{
- return get() ? GST_OBJECT_NAME(get()) : "(null)";
+ using namespace Qt::StringLiterals;
+
+ return get() ? QLatin1StringView{ GST_OBJECT_NAME(get()) } : "(null)"_L1;
}
// QGObjectHandlerConnection
@@ -731,6 +769,28 @@ QGstCaps QGstPad::queryCaps() const
return QGstCaps(gst_pad_query_caps(pad(), nullptr), QGstCaps::HasRef);
}
+QGstTagListHandle QGstPad::tags() const
+{
+ QGstTagListHandle tagList;
+ g_object_get(object(), "tags", &tagList, nullptr);
+ return tagList;
+}
+
+std::optional<QPlatformMediaPlayer::TrackType> QGstPad::inferTrackTypeFromName() const
+{
+ using namespace Qt::Literals;
+ QLatin1StringView padName = name();
+
+ if (padName.startsWith("video_"_L1))
+ return QPlatformMediaPlayer::TrackType::VideoStream;
+ if (padName.startsWith("audio_"_L1))
+ return QPlatformMediaPlayer::TrackType::AudioStream;
+ if (padName.startsWith("text_"_L1))
+ return QPlatformMediaPlayer::TrackType::SubtitleStream;
+
+ return std::nullopt;
+}
+
bool QGstPad::isLinked() const
{
return gst_pad_is_linked(pad());
@@ -858,6 +918,38 @@ QGstElement QGstElement::createFromDevice(GstDevice *device, const char *name)
};
}
+QGstElement QGstElement::createFromPipelineDescription(const char *str)
+{
+ QUniqueGErrorHandle error;
+ QGstElement element{
+ gst_parse_launch(str, &error),
+ QGstElement::NeedsRef,
+ };
+
+ if (error) // error does not mean that the element could not be constructed
+ qWarning() << "gst_parse_launch error:" << error;
+
+ return element;
+}
+
+QGstElement QGstElement::createFromPipelineDescription(const QByteArray &str)
+{
+ return createFromPipelineDescription(str.constData());
+}
+
+QGstElementFactoryHandle QGstElement::findFactory(const char *name)
+{
+ return QGstElementFactoryHandle{
+ gst_element_factory_find(name),
+ QGstElementFactoryHandle::HasRef,
+ };
+}
+
+QGstElementFactoryHandle QGstElement::findFactory(const QByteArray &name)
+{
+ return findFactory(name.constData());
+}
+
QGstPad QGstElement::staticPad(const char *name) const
{
return QGstPad(gst_element_get_static_pad(element(), name), HasRef);
@@ -909,14 +1001,23 @@ GstStateChangeReturn QGstElement::setState(GstState state)
bool QGstElement::setStateSync(GstState state, std::chrono::nanoseconds timeout)
{
+ if (state == GST_STATE_NULL) {
+ // QTBUG-125251: when changing pipeline state too quickly between NULL->PAUSED->NULL there
+ // may be a pending task to activate pads while we try to switch to NULL. This can cause an
+ // assertion failure in gstreamer. we therefore finish the state change when called on a bin
+ // or pipeline.
+ if (qIsGstObjectOfType<GstBin>(element()))
+ finishStateChange();
+ }
+
GstStateChangeReturn change = gst_element_set_state(element(), state);
- if (change == GST_STATE_CHANGE_ASYNC) {
+ if (change == GST_STATE_CHANGE_ASYNC)
change = gst_element_get_state(element(), nullptr, &state, timeout.count());
- }
-#ifndef QT_NO_DEBUG
- if (change != GST_STATE_CHANGE_SUCCESS && change != GST_STATE_CHANGE_NO_PREROLL)
+
+ if (change != GST_STATE_CHANGE_SUCCESS && change != GST_STATE_CHANGE_NO_PREROLL) {
qWarning() << "Could not change state of" << name() << "to" << state << change;
-#endif
+ dumpPipelineGraph("setStateSyncFailure");
+ }
return change == GST_STATE_CHANGE_SUCCESS;
}
@@ -932,10 +1033,10 @@ bool QGstElement::finishStateChange(std::chrono::nanoseconds timeout)
GstStateChangeReturn change =
gst_element_get_state(element(), &state, &pending, timeout.count());
-#ifndef QT_NO_DEBUG
- if (change != GST_STATE_CHANGE_SUCCESS && change != GST_STATE_CHANGE_NO_PREROLL)
+ if (change != GST_STATE_CHANGE_SUCCESS && change != GST_STATE_CHANGE_NO_PREROLL) {
qWarning() << "Could not finish change state of" << name() << change << state << pending;
-#endif
+ dumpPipelineGraph("finishStateChangeFailure");
+ }
return change == GST_STATE_CHANGE_SUCCESS;
}
@@ -959,6 +1060,64 @@ void QGstElement::sendEos() const
sendEvent(gst_event_new_eos());
}
+std::optional<std::chrono::nanoseconds> QGstElement::duration() const
+{
+ gint64 d;
+ if (!gst_element_query_duration(element(), GST_FORMAT_TIME, &d)) {
+ qDebug() << "QGstElement: failed to query duration";
+ return std::nullopt;
+ }
+ return std::chrono::nanoseconds{ d };
+}
+
+std::optional<std::chrono::milliseconds> QGstElement::durationInMs() const
+{
+ using namespace std::chrono;
+ auto dur = duration();
+ if (dur)
+ return round<milliseconds>(*dur);
+ return std::nullopt;
+}
+
+std::optional<std::chrono::nanoseconds> QGstElement::position() const
+{
+ QGstQueryHandle &query = positionQuery();
+
+ gint64 pos;
+ if (gst_element_query(element(), query.get())) {
+ gst_query_parse_position(query.get(), nullptr, &pos);
+ return std::chrono::nanoseconds{ pos };
+ }
+
+ qDebug() << "QGstElement: failed to query position";
+ return std::nullopt;
+}
+
+std::optional<std::chrono::milliseconds> QGstElement::positionInMs() const
+{
+ using namespace std::chrono;
+ auto pos = position();
+ if (pos)
+ return round<milliseconds>(*pos);
+ return std::nullopt;
+}
+
+std::optional<bool> QGstElement::canSeek() const
+{
+ QGstQueryHandle query{
+ gst_query_new_seeking(GST_FORMAT_TIME),
+ QGstQueryHandle::HasRef,
+ };
+ gboolean canSeek = false;
+ gst_query_parse_seeking(query.get(), nullptr, &canSeek, nullptr, nullptr);
+
+ if (gst_element_query(element(), query.get())) {
+ gst_query_parse_seeking(query.get(), nullptr, &canSeek, nullptr, nullptr);
+ return canSeek;
+ }
+ return std::nullopt;
+}
+
GstClockTime QGstElement::baseTime() const
{
return gst_element_get_base_time(element());
@@ -999,6 +1158,27 @@ QGstPipeline QGstElement::getPipeline() const
}
}
+void QGstElement::dumpPipelineGraph(const char *filename) const
+{
+ static const bool dumpEnabled = qEnvironmentVariableIsSet("GST_DEBUG_DUMP_DOT_DIR");
+ if (dumpEnabled) {
+ QGstPipeline pipeline = getPipeline();
+ if (pipeline)
+ pipeline.dumpGraph(filename);
+ }
+}
+
+QGstQueryHandle &QGstElement::positionQuery() const
+{
+ if (Q_UNLIKELY(!m_positionQuery))
+ m_positionQuery = QGstQueryHandle{
+ gst_query_new_position(GST_FORMAT_TIME),
+ QGstQueryHandle::HasRef,
+ };
+
+ return m_positionQuery;
+}
+
// QGstBin
QGstBin QGstBin::create(const char *name)
@@ -1016,6 +1196,36 @@ QGstBin QGstBin::createFromFactory(const char *factory, const char *name)
};
}
+QGstBin QGstBin::createFromPipelineDescription(const QByteArray &pipelineDescription,
+ const char *name, bool ghostUnlinkedPads)
+{
+ return createFromPipelineDescription(pipelineDescription.constData(), name, ghostUnlinkedPads);
+}
+
+QGstBin QGstBin::createFromPipelineDescription(const char *pipelineDescription, const char *name,
+ bool ghostUnlinkedPads)
+{
+ QUniqueGErrorHandle error;
+
+ GstElement *element =
+ gst_parse_bin_from_description_full(pipelineDescription, ghostUnlinkedPads,
+ /*context=*/nullptr, GST_PARSE_FLAG_NONE, &error);
+
+ if (!element) {
+ qWarning() << "Failed to make element from pipeline description" << pipelineDescription
+ << error;
+ return QGstBin{};
+ }
+
+ if (name)
+ gst_element_set_name(element, name);
+
+ return QGstBin{
+ element,
+ NeedsRef,
+ };
+}
+
QGstBin::QGstBin(GstBin *bin, RefMode mode)
: QGstElement{
qGstCheckedCast<GstElement>(bin),
@@ -1044,17 +1254,12 @@ bool QGstBin::syncChildrenState()
return gst_bin_sync_children_states(bin());
}
-void QGstBin::dumpGraph(const char *fileNamePrefix)
+void QGstBin::dumpGraph(const char *fileNamePrefix) const
{
if (isNull())
return;
- GST_DEBUG_BIN_TO_DOT_FILE(bin(),
- GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL
- | GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE
- | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS
- | GST_DEBUG_GRAPH_SHOW_STATES),
- fileNamePrefix);
+ GST_DEBUG_BIN_TO_DOT_FILE(bin(), GST_DEBUG_GRAPH_SHOW_VERBOSE, fileNamePrefix);
}
QGstElement QGstBin::findByName(const char *name)
@@ -1075,6 +1280,11 @@ QGstBaseSink::QGstBaseSink(GstBaseSink *element, RefMode mode)
{
}
+void QGstBaseSink::setSync(bool arg)
+{
+ gst_base_sink_set_sync(baseSink(), arg ? TRUE : FALSE);
+}
+
GstBaseSink *QGstBaseSink::baseSink() const
{
return qGstCheckedCast<GstBaseSink>(element());
@@ -1095,8 +1305,6 @@ GstBaseSrc *QGstBaseSrc::baseSrc() const
return qGstCheckedCast<GstBaseSrc>(element());
}
-#if QT_CONFIG(gstreamer_app)
-
// QGstAppSink
QGstAppSink::QGstAppSink(GstAppSink *element, RefMode mode)
@@ -1121,6 +1329,18 @@ GstAppSink *QGstAppSink::appSink() const
return qGstCheckedCast<GstAppSink>(element());
}
+# if GST_CHECK_VERSION(1, 24, 0)
+void QGstAppSink::setMaxBufferTime(std::chrono::nanoseconds ns)
+{
+ gst_app_sink_set_max_time(appSink(), qGstClockTimeFromChrono(ns));
+}
+# endif
+
+void QGstAppSink::setMaxBuffers(int n)
+{
+ gst_app_sink_set_max_buffers(appSink(), n);
+}
+
void QGstAppSink::setCaps(const QGstCaps &caps)
{
gst_app_sink_set_caps(appSink(), caps.caps());
@@ -1175,6 +1395,10 @@ GstFlowReturn QGstAppSrc::pushBuffer(GstBuffer *buffer)
return gst_app_src_push_buffer(appSrc(), buffer);
}
-#endif
+QString qGstErrorMessageCannotFindElement(std::string_view element)
+{
+ return QStringLiteral("Could not find the %1 GStreamer element")
+ .arg(QLatin1StringView(element));
+}
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgst_debug.cpp b/src/plugins/multimedia/gstreamer/common/qgst_debug.cpp
index ea749c817..e47515d2d 100644
--- a/src/plugins/multimedia/gstreamer/common/qgst_debug.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgst_debug.cpp
@@ -4,6 +4,8 @@
#include "qgst_debug_p.h"
#include "qgstreamermessage_p.h"
+#include <gst/gstclock.h>
+
QT_BEGIN_NAMESPACE
// NOLINTBEGIN(performance-unnecessary-value-param)
@@ -18,7 +20,7 @@ QDebug operator<<(QDebug dbg, const QGstCaps &caps)
return dbg << caps.caps();
}
-QDebug operator<<(QDebug dbg, const QGstStructure &structure)
+QDebug operator<<(QDebug dbg, const QGstStructureView &structure)
{
return dbg << structure.structure;
}
@@ -43,6 +45,21 @@ QDebug operator<<(QDebug dbg, const QUniqueGStringHandle &handle)
return dbg << handle.get();
}
+QDebug operator<<(QDebug dbg, const QGstStreamCollectionHandle &handle)
+{
+ return dbg << handle.get();
+}
+
+QDebug operator<<(QDebug dbg, const QGstStreamHandle &handle)
+{
+ return dbg << handle.get();
+}
+
+QDebug operator<<(QDebug dbg, const QGstTagListHandle &handle)
+{
+ return dbg << handle.get();
+}
+
QDebug operator<<(QDebug dbg, const QGstElement &element)
{
return dbg << element.element();
@@ -155,20 +172,40 @@ QDebug operator<<(QDebug dbg, const GstDevice *device)
return dbg;
}
+namespace {
+
+struct Timepoint
+{
+ explicit Timepoint(guint64 us) : ts{ us } { }
+ guint64 ts;
+};
+
+QDebug operator<<(QDebug dbg, Timepoint ts)
+{
+ char buffer[128];
+ snprintf(buffer, sizeof(buffer), "%" GST_TIME_FORMAT, GST_TIME_ARGS(ts.ts));
+ dbg << buffer;
+ return dbg;
+}
+
+} // namespace
+
QDebug operator<<(QDebug dbg, const GstMessage *msg)
{
QDebugStateSaver saver(dbg);
dbg.nospace();
+ dbg << GST_MESSAGE_TYPE_NAME(msg) << ", Source: " << GST_MESSAGE_SRC_NAME(msg);
+ if (GST_MESSAGE_TIMESTAMP(msg) != 0xFFFFFFFFFFFFFFFF)
+ dbg << ", Timestamp: " << GST_MESSAGE_TIMESTAMP(msg);
+
switch (msg->type) {
case GST_MESSAGE_ERROR: {
QUniqueGErrorHandle err;
QGString debug;
gst_message_parse_error(const_cast<GstMessage *>(msg), &err, &debug);
- dbg << GST_MESSAGE_TYPE_NAME(msg) << ", Source: " << GST_MESSAGE_SRC_NAME(msg)
- << ", Timestamp: " << GST_MESSAGE_TIMESTAMP(msg) << ", Error: " << err << " (" << debug
- << ")";
+ dbg << ", Error: " << err << " (" << debug << ")";
break;
}
@@ -177,9 +214,7 @@ QDebug operator<<(QDebug dbg, const GstMessage *msg)
QGString debug;
gst_message_parse_warning(const_cast<GstMessage *>(msg), &err, &debug);
- dbg << GST_MESSAGE_TYPE_NAME(msg) << ", Source: " << GST_MESSAGE_SRC_NAME(msg)
- << ", Timestamp: " << GST_MESSAGE_TIMESTAMP(msg) << ", Warning: " << err << " ("
- << debug << ")";
+ dbg << ", Warning: " << err << " (" << debug << ")";
break;
}
@@ -188,9 +223,31 @@ QDebug operator<<(QDebug dbg, const GstMessage *msg)
QGString debug;
gst_message_parse_info(const_cast<GstMessage *>(msg), &err, &debug);
- dbg << GST_MESSAGE_TYPE_NAME(msg) << ", Source: " << GST_MESSAGE_SRC_NAME(msg)
- << ", Timestamp: " << GST_MESSAGE_TIMESTAMP(msg) << ", Info: " << err << " (" << debug
- << ")";
+ dbg << ", Info: " << err << " (" << debug << ")";
+ break;
+ }
+
+ case GST_MESSAGE_TAG: {
+ QGstTagListHandle tagList;
+ gst_message_parse_tag(const_cast<GstMessage *>(msg), &tagList);
+
+ dbg << ", Tags: " << tagList;
+ break;
+ }
+
+ case GST_MESSAGE_QOS: {
+ gboolean live;
+ guint64 running_time;
+ guint64 stream_time;
+ guint64 timestamp;
+ guint64 duration;
+
+ gst_message_parse_qos(const_cast<GstMessage *>(msg), &live, &running_time, &stream_time,
+ &timestamp, &duration);
+
+ dbg << ", Live: " << bool(live) << ", Running time: " << Timepoint{ running_time }
+ << ", Stream time: " << Timepoint{ stream_time }
+ << ", Timestamp: " << Timepoint{ timestamp } << ", Duration: " << Timepoint{ duration };
break;
}
@@ -202,16 +259,47 @@ QDebug operator<<(QDebug dbg, const GstMessage *msg)
gst_message_parse_state_changed(const_cast<GstMessage *>(msg), &oldState, &newState,
&pending);
- dbg << GST_MESSAGE_TYPE_NAME(msg) << ", Source: " << GST_MESSAGE_SRC_NAME(msg)
- << ", Timestamp: " << GST_MESSAGE_TIMESTAMP(msg) << ", OldState: " << oldState
- << ", NewState: " << newState << "Pending State: " << pending;
+ dbg << ", Transition: " << oldState << "->" << newState;
+
+ if (pending != GST_STATE_VOID_PENDING)
+ dbg << ", Pending State: " << pending;
break;
}
- default: {
- dbg << GST_MESSAGE_TYPE_NAME(msg) << ", Source: " << GST_MESSAGE_SRC_NAME(msg)
- << ", Timestamp: " << GST_MESSAGE_TIMESTAMP(msg);
+ case GST_MESSAGE_STREAM_COLLECTION: {
+ QGstStreamCollectionHandle collection;
+ gst_message_parse_stream_collection(const_cast<GstMessage *>(msg), &collection);
+
+ dbg << ", " << collection;
+ break;
+ }
+
+ case GST_MESSAGE_STREAMS_SELECTED: {
+ QGstStreamCollectionHandle collection;
+ gst_message_parse_streams_selected(const_cast<GstMessage *>(msg), &collection);
+
+ dbg << ", " << collection;
+ break;
}
+
+ case GST_MESSAGE_STREAM_STATUS: {
+ GstStreamStatusType streamStatus;
+ gst_message_parse_stream_status(const_cast<GstMessage *>(msg), &streamStatus, nullptr);
+
+ dbg << ", Stream Status: " << streamStatus;
+ break;
+ }
+
+ case GST_MESSAGE_BUFFERING: {
+ int progress = 0;
+ gst_message_parse_buffering(const_cast<GstMessage *>(msg), &progress);
+
+ dbg << ", Buffering: " << progress << "%";
+ break;
+ }
+
+ default:
+ break;
}
return dbg;
}
@@ -244,6 +332,50 @@ QDebug operator<<(QDebug dbg, const GstPadTemplate *padTemplate)
return dbg;
}
+QDebug operator<<(QDebug dbg, const GstStreamCollection *streamCollection)
+{
+ GstStreamCollection *collection = const_cast<GstStreamCollection *>(streamCollection);
+ guint size = gst_stream_collection_get_size(collection);
+
+ dbg << "Stream Collection: {";
+ for (guint index = 0; index != size; ++index) {
+ dbg << gst_stream_collection_get_stream(collection, index);
+ if (index + 1 != size)
+ dbg << ", ";
+ }
+
+ dbg << "}";
+ return dbg;
+}
+
+QDebug operator<<(QDebug dbg, const GstStream *cstream)
+{
+ GstStream *stream = const_cast<GstStream *>(cstream);
+
+ dbg << "GstStream { ";
+ dbg << "Type: " << gst_stream_type_get_name(gst_stream_get_stream_type(stream));
+
+ QGstTagListHandle tagList{
+ gst_stream_get_tags(stream),
+ QGstTagListHandle::HasRef,
+ };
+
+ if (tagList)
+ dbg << ", Tags: " << tagList;
+
+ QGstCaps caps{
+ gst_stream_get_caps(stream),
+ QGstCaps::HasRef,
+ };
+
+ if (caps)
+ dbg << ", Caps: " << caps;
+
+ dbg << "}";
+
+ return dbg;
+}
+
QDebug operator<<(QDebug dbg, GstState state)
{
return dbg << gst_element_state_get_name(state);
@@ -264,19 +396,40 @@ QDebug operator<<(QDebug dbg, GstMessageType type)
return dbg << gst_message_type_get_name(type);
}
+#define ADD_ENUM_SWITCH(value) \
+ case value: \
+ return dbg << #value; \
+ static_assert(true, "enforce semicolon")
+
QDebug operator<<(QDebug dbg, GstPadDirection direction)
{
switch (direction) {
- case GST_PAD_UNKNOWN:
- return dbg << "GST_PAD_UNKNOWN";
- case GST_PAD_SRC:
- return dbg << "GST_PAD_SRC";
- case GST_PAD_SINK:
- return dbg << "GST_PAD_SINK";
+ ADD_ENUM_SWITCH(GST_PAD_UNKNOWN);
+ ADD_ENUM_SWITCH(GST_PAD_SRC);
+ ADD_ENUM_SWITCH(GST_PAD_SINK);
+ default:
+ Q_UNREACHABLE_RETURN(dbg);
+ }
+}
+
+QDebug operator<<(QDebug dbg, GstStreamStatusType type)
+{
+ switch (type) {
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_CREATE);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_ENTER);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_LEAVE);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_DESTROY);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_START);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_PAUSE);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_STOP);
+ default:
+ Q_UNREACHABLE_RETURN(dbg);
}
return dbg;
}
+#undef ADD_ENUM_SWITCH
+
QDebug operator<<(QDebug dbg, const GValue *value)
{
switch (G_VALUE_TYPE(value)) {
@@ -404,7 +557,9 @@ QDebug operator<<(QDebug dbg, const QCompactGstMessageAdaptor &m)
gst_message_parse_state_changed(m.msg, &oldState, &newState, &pending);
- dbg << oldState << "->" << newState << "(pending: " << pending << ")";
+ dbg << oldState << " -> " << newState;
+ if (pending != GST_STATE_VOID_PENDING)
+ dbg << " (pending: " << pending << ")";
return dbg;
}
diff --git a/src/plugins/multimedia/gstreamer/common/qgst_debug_p.h b/src/plugins/multimedia/gstreamer/common/qgst_debug_p.h
index 31c722a90..df13c6c13 100644
--- a/src/plugins/multimedia/gstreamer/common/qgst_debug_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgst_debug_p.h
@@ -23,7 +23,7 @@ QT_BEGIN_NAMESPACE
class QGstreamerMessage;
QDebug operator<<(QDebug, const QGstCaps &);
-QDebug operator<<(QDebug, const QGstStructure &);
+QDebug operator<<(QDebug, const QGstStructureView &);
QDebug operator<<(QDebug, const QGstElement &);
QDebug operator<<(QDebug, const QGstPad &);
QDebug operator<<(QDebug, const QGString &);
@@ -31,6 +31,9 @@ QDebug operator<<(QDebug, const QGValue &);
QDebug operator<<(QDebug, const QGstreamerMessage &);
QDebug operator<<(QDebug, const QUniqueGErrorHandle &);
QDebug operator<<(QDebug, const QUniqueGStringHandle &);
+QDebug operator<<(QDebug, const QGstStreamCollectionHandle &);
+QDebug operator<<(QDebug, const QGstStreamHandle &);
+QDebug operator<<(QDebug, const QGstTagListHandle &);
QDebug operator<<(QDebug, const GstCaps *);
QDebug operator<<(QDebug, const GstVideoInfo *);
@@ -44,12 +47,15 @@ QDebug operator<<(QDebug, const GstTagList *);
QDebug operator<<(QDebug, const GstQuery *);
QDebug operator<<(QDebug, const GstEvent *);
QDebug operator<<(QDebug, const GstPadTemplate *);
+QDebug operator<<(QDebug, const GstStreamCollection *);
+QDebug operator<<(QDebug, const GstStream *);
QDebug operator<<(QDebug, GstState);
QDebug operator<<(QDebug, GstStateChange);
QDebug operator<<(QDebug, GstStateChangeReturn);
QDebug operator<<(QDebug, GstMessageType);
QDebug operator<<(QDebug, GstPadDirection);
+QDebug operator<<(QDebug, GstStreamStatusType);
QDebug operator<<(QDebug, const GValue *);
QDebug operator<<(QDebug, const GError *);
diff --git a/src/plugins/multimedia/gstreamer/common/qgst_handle_types_p.h b/src/plugins/multimedia/gstreamer/common/qgst_handle_types_p.h
index c37ac5971..e813f4181 100644
--- a/src/plugins/multimedia/gstreamer/common/qgst_handle_types_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgst_handle_types_p.h
@@ -163,6 +163,18 @@ struct QUniqueGErrorHandleTraits
}
};
+
+struct QUniqueGstDateTimeHandleTraits
+{
+ using Type = GstDateTime *;
+ static constexpr Type invalidValue() noexcept { return nullptr; }
+ static bool close(Type handle) noexcept
+ {
+ gst_date_time_unref(handle);
+ return true;
+ }
+};
+
struct QFileDescriptorHandleTraits
{
using Type = int;
@@ -213,7 +225,8 @@ struct QGstMiniObjectHandleHelper
static Type ref(Type handle) noexcept
{
- gst_mini_object_ref(GST_MINI_OBJECT_CAST(handle));
+ if (GST_MINI_OBJECT_CAST(handle))
+ gst_mini_object_ref(GST_MINI_OBJECT_CAST(handle));
return handle;
}
};
@@ -226,10 +239,12 @@ struct QGstMiniObjectHandleHelper
using QGstClockHandle = QGstImpl::QGstHandleHelper<GstClock>::UniqueHandle;
using QGstElementHandle = QGstImpl::QGstHandleHelper<GstElement>::UniqueHandle;
-using QGstElementFactoryHandle = QGstImpl::QGstHandleHelper<GstElementFactory>::UniqueHandle;
+using QGstElementFactoryHandle = QGstImpl::QGstHandleHelper<GstElementFactory>::SharedHandle;
using QGstDeviceHandle = QGstImpl::QGstHandleHelper<GstDevice>::SharedHandle;
using QGstDeviceMonitorHandle = QGstImpl::QGstHandleHelper<GstDeviceMonitor>::UniqueHandle;
using QGstBusHandle = QGstImpl::QGstHandleHelper<GstBus>::UniqueHandle;
+using QGstStreamCollectionHandle = QGstImpl::QGstHandleHelper<GstStreamCollection>::SharedHandle;
+using QGstStreamHandle = QGstImpl::QGstHandleHelper<GstStream>::SharedHandle;
using QGstTagListHandle = QGstImpl::QSharedHandle<QGstImpl::QGstTagListHandleTraits>;
using QGstSampleHandle = QGstImpl::QSharedHandle<QGstImpl::QGstSampleHandleTraits>;
@@ -237,11 +252,13 @@ using QGstSampleHandle = QGstImpl::QSharedHandle<QGstImpl::QGstSampleHandleTrait
using QUniqueGstStructureHandle = QUniqueHandle<QGstImpl::QUniqueGstStructureHandleTraits>;
using QUniqueGStringHandle = QUniqueHandle<QGstImpl::QUniqueGStringHandleTraits>;
using QUniqueGErrorHandle = QUniqueHandle<QGstImpl::QUniqueGErrorHandleTraits>;
+using QUniqueGstDateTimeHandle = QUniqueHandle<QGstImpl::QUniqueGstDateTimeHandleTraits>;
using QFileDescriptorHandle = QUniqueHandle<QGstImpl::QFileDescriptorHandleTraits>;
using QGstBufferHandle = QGstImpl::QGstMiniObjectHandleHelper<GstBuffer>::SharedHandle;
using QGstContextHandle = QGstImpl::QGstMiniObjectHandleHelper<GstContext>::UniqueHandle;
using QGstGstDateTimeHandle = QGstImpl::QGstMiniObjectHandleHelper<GstDateTime>::SharedHandle;
using QGstPluginFeatureHandle = QGstImpl::QGstHandleHelper<GstPluginFeature>::SharedHandle;
+using QGstQueryHandle = QGstImpl::QGstMiniObjectHandleHelper<GstQuery>::SharedHandle;
#if QT_CONFIG(gstreamer_gl)
using QGstGLContextHandle = QGstImpl::QGstHandleHelper<GstGLContext>::UniqueHandle;
diff --git a/src/plugins/multimedia/gstreamer/common/qgst_p.h b/src/plugins/multimedia/gstreamer/common/qgst_p.h
index ec41b508d..bf5290d5d 100644
--- a/src/plugins/multimedia/gstreamer/common/qgst_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgst_p.h
@@ -23,8 +23,11 @@
#include <QtMultimedia/qvideoframe.h>
#include <QtMultimedia/private/qtmultimediaglobal_p.h>
#include <QtMultimedia/private/qmultimediautils_p.h>
+#include <QtMultimedia/private/qplatformmediaplayer_p.h>
#include <gst/gst.h>
+#include <gst/app/gstappsink.h>
+#include <gst/app/gstappsrc.h>
#include <gst/video/video-info.h>
#include "qgst_handle_types_p.h"
@@ -37,10 +40,6 @@
# undef GST_USE_UNSTABLE_API
#endif
-#if QT_CONFIG(gstreamer_app)
-# include <gst/app/gstappsink.h>
-# include <gst/app/gstappsrc.h>
-#endif
QT_BEGIN_NAMESPACE
@@ -110,13 +109,11 @@ QGST_DEFINE_CAST_TRAITS(GstPad, PAD);
QGST_DEFINE_CAST_TRAITS(GstPipeline, PIPELINE);
QGST_DEFINE_CAST_TRAITS(GstBaseSink, BASE_SINK);
QGST_DEFINE_CAST_TRAITS(GstBaseSrc, BASE_SRC);
+QGST_DEFINE_CAST_TRAITS(GstAppSink, APP_SINK);
+QGST_DEFINE_CAST_TRAITS(GstAppSrc, APP_SRC);
QGST_DEFINE_CAST_TRAITS_FOR_INTERFACE(GstTagSetter, TAG_SETTER);
-#if QT_CONFIG(gstreamer_app)
-QGST_DEFINE_CAST_TRAITS(GstAppSink, APP_SINK);
-QGST_DEFINE_CAST_TRAITS(GstAppSrc, APP_SRC);
-#endif
template <>
struct GstObjectTraits<GObject>
@@ -145,6 +142,13 @@ struct GstObjectTraits<GObject>
} // namespace QGstImpl
template <typename DestinationType, typename SourceType>
+bool qIsGstObjectOfType(SourceType *arg)
+{
+ using Traits = QGstImpl::GstObjectTraits<DestinationType>;
+ return arg && Traits::isObjectOfType(arg);
+}
+
+template <typename DestinationType, typename SourceType>
DestinationType *qGstSafeCast(SourceType *arg)
{
using Traits = QGstImpl::GstObjectTraits<DestinationType>;
@@ -163,7 +167,7 @@ DestinationType *qGstCheckedCast(SourceType *arg)
}
class QSize;
-class QGstStructure;
+class QGstStructureView;
class QGstCaps;
class QGstPipelinePrivate;
class QCameraFormat;
@@ -204,7 +208,7 @@ public:
std::optional<QGRange<float>> getFractionRange() const;
std::optional<QGRange<int>> toIntRange() const;
- QGstStructure toStructure() const;
+ QGstStructureView toStructure() const;
QGstCaps toCaps() const;
bool isList() const;
@@ -302,18 +306,21 @@ protected:
class QGstreamerMessage;
-class QGstStructure
+class QGstStructureView
{
public:
const GstStructure *structure = nullptr;
- QGstStructure() = default;
- QGstStructure(const GstStructure *s);
- void free();
+ explicit QGstStructureView(const GstStructure *);
+ explicit QGstStructureView(const QUniqueGstStructureHandle &);
- bool isNull() const;
+ QUniqueGstStructureHandle clone() const;
+ bool isNull() const;
QByteArrayView name() const;
- QGValue operator[](const char *name) const;
+ QGValue operator[](const char *fieldname) const;
+
+ QGstCaps caps() const;
+ QGstTagListHandle tags() const;
QSize resolution() const;
QVideoFrameFormat::PixelFormat pixelFormat() const;
@@ -321,8 +328,6 @@ public:
QGstreamerMessage getMessage();
std::optional<Fraction> pixelAspectRatio() const;
QSize nativeSize() const;
-
- QGstStructure copy() const;
};
template <>
@@ -346,7 +351,7 @@ public:
enum MemoryFormat { CpuMemory, GLTexture, DMABuf };
int size() const;
- QGstStructure at(int index) const;
+ QGstStructureView at(int index) const;
GstCaps *caps() const;
MemoryFormat memoryFormat() const;
@@ -394,7 +399,7 @@ public:
void set(const char *property, const QGstCaps &c);
QGString getString(const char *property) const;
- QGstStructure getStructure(const char *property) const;
+ QGstStructureView getStructure(const char *property) const;
bool getBool(const char *property) const;
uint getUInt(const char *property) const;
int getInt(const char *property) const;
@@ -408,8 +413,9 @@ public:
void disconnect(gulong handlerId);
GType type() const;
+ QLatin1StringView typeName() const;
GstObject *object() const;
- const char *name() const;
+ QLatin1StringView name() const;
};
class QGObjectHandlerConnection
@@ -470,6 +476,11 @@ public:
QGstCaps currentCaps() const;
QGstCaps queryCaps() const;
+ QGstTagListHandle tags() const;
+
+ std::optional<QPlatformMediaPlayer::TrackType>
+ inferTrackTypeFromName() const; // for decodebin3 etc
+
bool isLinked() const;
bool link(const QGstPad &sink) const;
bool unlink(const QGstPad &sink) const;
@@ -558,6 +569,11 @@ public:
const char *name = nullptr);
static QGstElement createFromDevice(const QGstDeviceHandle &, const char *name = nullptr);
static QGstElement createFromDevice(GstDevice *, const char *name = nullptr);
+ static QGstElement createFromPipelineDescription(const char *);
+ static QGstElement createFromPipelineDescription(const QByteArray &);
+
+ static QGstElementFactoryHandle findFactory(const char *);
+ static QGstElementFactoryHandle findFactory(const QByteArray &name);
QGstPad staticPad(const char *name) const;
QGstPad src() const;
@@ -577,6 +593,12 @@ public:
void sendEvent(GstEvent *event) const;
void sendEos() const;
+ std::optional<std::chrono::nanoseconds> duration() const;
+ std::optional<std::chrono::milliseconds> durationInMs() const;
+ std::optional<std::chrono::nanoseconds> position() const;
+ std::optional<std::chrono::milliseconds> positionInMs() const;
+ std::optional<bool> canSeek() const;
+
template <auto Member, typename T>
QGObjectHandlerConnection onPadAdded(T *instance)
{
@@ -626,6 +648,11 @@ public:
QGstElement getParent() const;
QGstPipeline getPipeline() const;
+ void dumpPipelineGraph(const char *filename) const;
+
+private:
+ QGstQueryHandle &positionQuery() const;
+ mutable QGstQueryHandle m_positionQuery;
};
template <typename... Ts>
@@ -669,6 +696,12 @@ public:
explicit QGstBin(GstBin *bin, RefMode mode = NeedsRef);
static QGstBin create(const char *name);
static QGstBin createFromFactory(const char *factory, const char *name);
+ static QGstBin createFromPipelineDescription(const QByteArray &pipelineDescription,
+ const char *name = nullptr,
+ bool ghostUnlinkedPads = false);
+ static QGstBin createFromPipelineDescription(const char *pipelineDescription,
+ const char *name = nullptr,
+ bool ghostUnlinkedPads = false);
template <typename... Ts>
std::enable_if_t<(std::is_base_of_v<QGstElement, Ts> && ...), void> add(const Ts &...ts)
@@ -704,7 +737,7 @@ public:
bool syncChildrenState();
- void dumpGraph(const char *fileNamePrefix);
+ void dumpGraph(const char *fileNamePrefix) const;
QGstElement findByName(const char *);
};
@@ -721,6 +754,8 @@ public:
QGstBaseSink &operator=(const QGstBaseSink &) = default;
QGstBaseSink &operator=(QGstBaseSink &&) noexcept = default;
+ void setSync(bool);
+
GstBaseSink *baseSink() const;
};
@@ -739,7 +774,6 @@ public:
GstBaseSrc *baseSrc() const;
};
-#if QT_CONFIG(gstreamer_app)
class QGstAppSink : public QGstBaseSink
{
public:
@@ -756,6 +790,11 @@ public:
GstAppSink *appSink() const;
+ void setMaxBuffers(int);
+# if GST_CHECK_VERSION(1, 24, 0)
+ void setMaxBufferTime(std::chrono::nanoseconds);
+# endif
+
void setCaps(const QGstCaps &caps);
void setCallbacks(GstAppSinkCallbacks &callbacks, gpointer user_data, GDestroyNotify notify);
@@ -783,12 +822,24 @@ public:
GstFlowReturn pushBuffer(GstBuffer *); // take ownership
};
-#endif
+inline GstClockTime qGstClockTimeFromChrono(std::chrono::nanoseconds ns)
+{
+ return ns.count();
+}
-inline QString errorMessageCannotFindElement(std::string_view element)
+QString qGstErrorMessageCannotFindElement(std::string_view element);
+
+template <typename Arg, typename... Args>
+std::optional<QString> qGstErrorMessageIfElementsNotAvailable(const Arg &arg, Args... args)
{
- return QStringLiteral("Could not find the %1 GStreamer element")
- .arg(QLatin1StringView(element));
+ QGstElementFactoryHandle factory = QGstElement::findFactory(arg);
+ if (!factory)
+ return qGstErrorMessageCannotFindElement(arg);
+
+ if constexpr (sizeof...(args) != 0)
+ return qGstErrorMessageIfElementsNotAvailable(args...);
+ else
+ return std::nullopt;
}
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstappsource.cpp b/src/plugins/multimedia/gstreamer/common/qgstappsource.cpp
index 99af8443c..5779ba8b1 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstappsource.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstappsource.cpp
@@ -1,12 +1,12 @@
// Copyright (C) 2016 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-#include <QDebug>
-
#include "qgstappsource_p.h"
+
+#include <QtCore/qdebug.h>
+#include <QtCore/qloggingcategory.h>
+
#include <common/qgstutils_p.h>
-#include "qnetworkreply.h"
-#include "qloggingcategory.h"
static Q_LOGGING_CATEGORY(qLcAppSrc, "qt.multimedia.appsrc")
@@ -16,7 +16,7 @@ QMaybe<QGstAppSource *> QGstAppSource::create(QObject *parent)
{
QGstAppSrc appsrc = QGstAppSrc::create("appsrc");
if (!appsrc)
- return errorMessageCannotFindElement("appsrc");
+ return qGstErrorMessageCannotFindElement("appsrc");
return new QGstAppSource(appsrc, parent);
}
@@ -53,7 +53,6 @@ bool QGstAppSource::setup(QIODevice *stream, qint64 offset)
GstAppSrc *appSrc = m_appSrc.appSrc();
m_maxBytes = gst_app_src_get_max_bytes(appSrc);
- m_suspended = false;
if (m_sequential)
m_streamType = GST_APP_STREAM_TYPE_STREAM;
@@ -62,25 +61,9 @@ bool QGstAppSource::setup(QIODevice *stream, qint64 offset)
gst_app_src_set_stream_type(appSrc, m_streamType);
gst_app_src_set_size(appSrc, m_sequential ? -1 : m_stream->size() - m_offset);
- m_noMoreData = true;
-
return true;
}
-void QGstAppSource::setAudioFormat(const QAudioFormat &f)
-{
- QMutexLocker locker(&m_mutex);
-
- m_format = f;
- if (!m_format.isValid())
- return;
-
- auto caps = QGstUtils::capsForAudioFormat(m_format);
- Q_ASSERT(!caps.isNull());
- m_appSrc.set("caps", caps);
- m_appSrc.set("format", GST_FORMAT_TIME);
-}
-
void QGstAppSource::setExternalAppSrc(QGstAppSrc appsrc)
{
QMutexLocker locker(&m_mutex);
@@ -98,7 +81,6 @@ bool QGstAppSource::setStream(QIODevice *stream, qint64 offset)
m_dataRequestSize = 0;
m_sequential = true;
m_maxBytes = 0;
- streamedSamples = 0;
if (stream) {
if (!stream->isOpen() && !stream->open(QIODevice::ReadOnly))
@@ -122,38 +104,6 @@ QGstElement QGstAppSource::element() const
return m_appSrc;
}
-void QGstAppSource::write(const char *data, qsizetype size)
-{
- QMutexLocker locker(&m_mutex);
-
- qCDebug(qLcAppSrc) << "write" << size << m_noMoreData << m_dataRequestSize;
- if (!size)
- return;
- Q_ASSERT(!m_stream);
- m_buffer.append(data, size);
- m_noMoreData = false;
- pushData();
-}
-
-bool QGstAppSource::canAcceptMoreData() const
-{
- QMutexLocker locker(&m_mutex);
- return m_noMoreData || m_dataRequestSize != 0;
-}
-
-void QGstAppSource::suspend()
-{
- QMutexLocker locker(&m_mutex);
- m_suspended = true;
-}
-
-void QGstAppSource::resume()
-{
- QMutexLocker locker(&m_mutex);
- m_suspended = false;
- m_noMoreData = true;
-}
-
void QGstAppSource::onDataReady()
{
qCDebug(qLcAppSrc) << "onDataReady" << m_stream->bytesAvailable() << m_stream->size();
@@ -165,29 +115,27 @@ void QGstAppSource::streamDestroyed()
qCDebug(qLcAppSrc) << "stream destroyed";
m_stream = nullptr;
m_dataRequestSize = 0;
- streamedSamples = 0;
sendEOS();
}
void QGstAppSource::pushData()
{
- if (m_appSrc.isNull() || !m_dataRequestSize || m_suspended) {
- qCDebug(qLcAppSrc) << "push data: return immediately" << m_appSrc.isNull() << m_dataRequestSize << m_suspended;
+ if (m_appSrc.isNull() || !m_dataRequestSize) {
+ qCDebug(qLcAppSrc) << "push data: return immediately" << m_appSrc.isNull()
+ << m_dataRequestSize;
return;
}
- qCDebug(qLcAppSrc) << "pushData" << (m_stream ? m_stream : nullptr) << m_buffer.size();
+ Q_ASSERT(m_stream);
+
+ qCDebug(qLcAppSrc) << "pushData" << m_stream;
if ((m_stream && m_stream->atEnd())) {
- eosOrIdle();
- qCDebug(qLcAppSrc) << "end pushData" << (m_stream ? m_stream : nullptr) << m_buffer.size();
+ sendEOS();
+ qCDebug(qLcAppSrc) << "end pushData" << m_stream;
return;
}
- qint64 size;
- if (m_stream)
- size = m_stream->bytesAvailable();
- else
- size = m_buffer.size();
+ qint64 size = m_stream->bytesAvailable();
if (!m_dataRequestSize)
m_dataRequestSize = m_maxBytes;
@@ -196,51 +144,41 @@ void QGstAppSource::pushData()
GstBuffer* buffer = gst_buffer_new_and_alloc(size);
- if (m_sequential || !m_stream)
+ if (m_sequential)
buffer->offset = bytesReadSoFar;
else
buffer->offset = m_stream->pos();
- if (m_format.isValid()) {
- // timestamp raw audio data
- uint nSamples = size/m_format.bytesPerFrame();
-
- GST_BUFFER_TIMESTAMP(buffer) = gst_util_uint64_scale(streamedSamples, GST_SECOND, m_format.sampleRate());
- GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale(nSamples, GST_SECOND, m_format.sampleRate());
- streamedSamples += nSamples;
- }
-
GstMapInfo mapInfo;
gst_buffer_map(buffer, &mapInfo, GST_MAP_WRITE);
void* bufferData = mapInfo.data;
qint64 bytesRead;
- if (m_stream)
- bytesRead = m_stream->read((char*)bufferData, size);
- else
- bytesRead = m_buffer.read((char*)bufferData, size);
- buffer->offset_end = buffer->offset + bytesRead - 1;
+ bytesRead = m_stream->read((char *)bufferData, size);
+
+ buffer->offset_end = buffer->offset + bytesRead - 1;
bytesReadSoFar += bytesRead;
gst_buffer_unmap(buffer, &mapInfo);
qCDebug(qLcAppSrc) << "pushing bytes into gstreamer" << buffer->offset << bytesRead;
if (bytesRead == 0) {
gst_buffer_unref(buffer);
- eosOrIdle();
- qCDebug(qLcAppSrc) << "end pushData" << (m_stream ? m_stream : nullptr) << m_buffer.size();
+ sendEOS();
+ qCDebug(qLcAppSrc) << "end pushData" << m_stream;
return;
}
- m_noMoreData = false;
- emit bytesProcessed(bytesRead);
GstFlowReturn ret = m_appSrc.pushBuffer(buffer);
- if (ret == GST_FLOW_ERROR) {
- qWarning() << "QGstAppSrc: push buffer error";
- } else if (ret == GST_FLOW_FLUSHING) {
- qWarning() << "QGstAppSrc: push buffer wrong state";
+ switch (ret) {
+ case GST_FLOW_OK:
+ break;
+
+ default:
+ qWarning() << "QGstAppSrc: push buffer error -" << gst_flow_get_name(ret);
+ break;
}
- qCDebug(qLcAppSrc) << "end pushData" << (m_stream ? m_stream : nullptr) << m_buffer.size();
+ qCDebug(qLcAppSrc) << "end pushData" << m_stream;
}
bool QGstAppSource::doSeek(qint64 value)
@@ -297,23 +235,6 @@ void QGstAppSource::sendEOS()
gst_app_src_end_of_stream(GST_APP_SRC(m_appSrc.element()));
}
-void QGstAppSource::eosOrIdle()
-{
- qCDebug(qLcAppSrc) << "eosOrIdle";
- if (m_appSrc.isNull())
- return;
-
- if (!m_sequential) {
- sendEOS();
- return;
- }
- if (m_noMoreData)
- return;
- qCDebug(qLcAppSrc) << " idle!";
- m_noMoreData = true;
- emit noMoreData();
-}
-
QT_END_NAMESPACE
#include "moc_qgstappsource_p.cpp"
diff --git a/src/plugins/multimedia/gstreamer/common/qgstappsource_p.h b/src/plugins/multimedia/gstreamer/common/qgstappsource_p.h
index 59ced00dc..b181212d2 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstappsource_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstappsource_p.h
@@ -15,16 +15,14 @@
// We mean it.
//
-#include <private/qtmultimediaglobal_p.h>
-#include <private/qmultimediautils_p.h>
-#include <qaudioformat.h>
#include <QtCore/qobject.h>
#include <QtCore/qiodevice.h>
-#include <QtCore/private/qringbuffer_p.h>
#include <QtCore/qatomic.h>
#include <QtCore/qmutex.h>
+#include <QtMultimedia/private/qtmultimediaglobal_p.h>
+
#include <common/qgst_p.h>
#include <gst/app/gstappsrc.h>
@@ -38,29 +36,18 @@ public:
~QGstAppSource();
bool setup(QIODevice *stream = nullptr, qint64 offset = 0);
- void setAudioFormat(const QAudioFormat &f);
void setExternalAppSrc(QGstAppSrc);
QGstElement element() const;
- void write(const char *data, qsizetype size);
-
- bool canAcceptMoreData() const;
-
- void suspend();
- void resume();
-
-Q_SIGNALS:
- void bytesProcessed(int bytes);
- void noMoreData();
-
private Q_SLOTS:
- void pushData();
- bool doSeek(qint64);
void onDataReady();
-
void streamDestroyed();
+
private:
+ bool doSeek(qint64);
+ void pushData();
+
QGstAppSource(QGstAppSrc appsrc, QObject *parent);
bool setStream(QIODevice *, qint64 offset);
@@ -71,24 +58,18 @@ private:
static void on_need_data(GstAppSrc *element, uint arg0, gpointer userdata);
void sendEOS();
- void eosOrIdle();
mutable QMutex m_mutex;
QIODevice *m_stream = nullptr;
- QRingBuffer m_buffer;
- QAudioFormat m_format;
QGstAppSrc m_appSrc;
bool m_sequential = true;
- bool m_suspended = false;
- bool m_noMoreData = false;
GstAppStreamType m_streamType = GST_APP_STREAM_TYPE_RANDOM_ACCESS;
qint64 m_offset = 0;
qint64 m_maxBytes = 0;
qint64 bytesReadSoFar = 0;
QAtomicInteger<unsigned int> m_dataRequestSize = 0;
- int streamedSamples = 0;
};
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstpipeline.cpp b/src/plugins/multimedia/gstreamer/common/qgstpipeline.cpp
index 7d507f076..8898d84a9 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstpipeline.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstpipeline.cpp
@@ -1,51 +1,64 @@
// Copyright (C) 2016 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-#include <QtCore/qmap.h>
-#include <QtCore/qtimer.h>
-#include <QtCore/qmutex.h>
-#include <QtCore/qlist.h>
#include <QtCore/qabstracteventdispatcher.h>
#include <QtCore/qcoreapplication.h>
+#include <QtCore/qlist.h>
+#include <QtCore/qloggingcategory.h>
+#include <QtCore/qmap.h>
+#include <QtCore/qmutex.h>
#include <QtCore/qproperty.h>
+#include <QtCore/qtimer.h>
#include "qgstpipeline_p.h"
#include "qgstreamermessage_p.h"
QT_BEGIN_NAMESPACE
-class QGstPipelinePrivate : public QObject
+static Q_LOGGING_CATEGORY(qLcGstPipeline, "qt.multimedia.gstpipeline");
+
+static constexpr GstSeekFlags rateChangeSeekFlags =
+#if GST_CHECK_VERSION(1, 18, 0)
+ GST_SEEK_FLAG_INSTANT_RATE_CHANGE;
+#else
+ GST_SEEK_FLAG_FLUSH;
+#endif
+
+class QGstPipelinePrivate
{
public:
- int m_ref = 0;
- guint m_tag = 0;
+ guint m_eventSourceID = 0;
GstBus *m_bus = nullptr;
- QTimer *m_intervalTimer = nullptr;
+ std::unique_ptr<QTimer> m_intervalTimer;
QMutex filterMutex;
QList<QGstreamerSyncMessageFilter*> syncFilters;
QList<QGstreamerBusMessageFilter*> busFilters;
bool inStoppedState = true;
- mutable qint64 m_position = 0;
+ mutable std::chrono::nanoseconds m_position{};
double m_rate = 1.;
- bool m_flushOnConfigChanges = false;
- bool m_pendingFlush = false;
int m_configCounter = 0;
GstState m_savedState = GST_STATE_NULL;
- explicit QGstPipelinePrivate(GstBus *bus, QObject *parent = nullptr);
+ explicit QGstPipelinePrivate(GstBus *bus);
~QGstPipelinePrivate();
- void ref() { ++ m_ref; }
- void deref() { if (!--m_ref) delete this; }
-
void installMessageFilter(QGstreamerSyncMessageFilter *filter);
void removeMessageFilter(QGstreamerSyncMessageFilter *filter);
void installMessageFilter(QGstreamerBusMessageFilter *filter);
void removeMessageFilter(QGstreamerBusMessageFilter *filter);
+ void processMessage(const QGstreamerMessage &msg)
+ {
+ for (QGstreamerBusMessageFilter *filter : std::as_const(busFilters)) {
+ if (filter->processBusMessage(msg))
+ break;
+ }
+ }
+
private:
- static GstBusSyncReply syncGstBusFilter(GstBus* bus, GstMessage* message, QGstPipelinePrivate *d)
+ static GstBusSyncReply syncGstBusFilter(GstBus *bus, GstMessage *message,
+ QGstPipelinePrivate *d)
{
if (!message)
return GST_BUS_PASS;
@@ -74,10 +87,7 @@ private:
QGstreamerMessage::NeedsRef,
};
- for (QGstreamerBusMessageFilter *filter : std::as_const(busFilters)) {
- if (filter->processBusMessage(msg))
- break;
- }
+ processMessage(msg);
}
static gboolean busCallback(GstBus *, GstMessage *message, gpointer data)
@@ -87,17 +97,15 @@ private:
}
};
-QGstPipelinePrivate::QGstPipelinePrivate(GstBus* bus, QObject* parent)
- : QObject(parent),
- m_bus(bus)
+QGstPipelinePrivate::QGstPipelinePrivate(GstBus *bus) : m_bus(bus)
{
// glib event loop can be disabled either by env variable or QT_NO_GLIB define, so check the dispacher
QAbstractEventDispatcher *dispatcher = QCoreApplication::eventDispatcher();
const bool hasGlib = dispatcher && dispatcher->inherits("QEventDispatcherGlib");
if (!hasGlib) {
- m_intervalTimer = new QTimer(this);
+ m_intervalTimer = std::make_unique<QTimer>();
m_intervalTimer->setInterval(250);
- QObject::connect(m_intervalTimer, &QTimer::timeout, this, [this] {
+ QObject::connect(m_intervalTimer.get(), &QTimer::timeout, m_intervalTimer.get(), [this] {
GstMessage *message;
while ((message = gst_bus_poll(m_bus, GST_MESSAGE_ANY, 0)) != nullptr) {
processMessage(message);
@@ -106,7 +114,8 @@ QGstPipelinePrivate::QGstPipelinePrivate(GstBus* bus, QObject* parent)
});
m_intervalTimer->start();
} else {
- m_tag = gst_bus_add_watch_full(bus, G_PRIORITY_DEFAULT, busCallback, this, nullptr);
+ m_eventSourceID =
+ gst_bus_add_watch_full(bus, G_PRIORITY_DEFAULT, busCallback, this, nullptr);
}
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)syncGstBusFilter, this, nullptr);
@@ -114,9 +123,9 @@ QGstPipelinePrivate::QGstPipelinePrivate(GstBus* bus, QObject* parent)
QGstPipelinePrivate::~QGstPipelinePrivate()
{
- delete m_intervalTimer;
+ m_intervalTimer.reset();
- if (m_tag)
+ if (m_eventSourceID)
gst_bus_remove_watch(m_bus);
gst_bus_set_sync_handler(m_bus, nullptr, nullptr, nullptr);
@@ -191,12 +200,6 @@ void QGstPipeline::setInStoppedState(bool stopped)
d->inStoppedState = stopped;
}
-void QGstPipeline::setFlushOnConfigChanges(bool flush)
-{
- QGstPipelinePrivate *d = getPrivate();
- d->m_flushOnConfigChanges = flush;
-}
-
void QGstPipeline::installMessageFilter(QGstreamerSyncMessageFilter *filter)
{
QGstPipelinePrivate *d = getPrivate();
@@ -223,21 +226,17 @@ void QGstPipeline::removeMessageFilter(QGstreamerBusMessageFilter *filter)
GstStateChangeReturn QGstPipeline::setState(GstState state)
{
- QGstPipelinePrivate *d = getPrivate();
- auto retval = gst_element_set_state(element(), state);
- if (d->m_pendingFlush) {
- d->m_pendingFlush = false;
- flush();
- }
- return retval;
+ return gst_element_set_state(element(), state);
}
-void QGstPipeline::dumpGraph(const char *fileName)
+void QGstPipeline::processMessages(GstMessageType types)
{
- if (isNull())
- return;
-
- QGstBin{ bin(), QGstBin::NeedsRef }.dumpGraph(fileName);
+ QGstPipelinePrivate *d = getPrivate();
+ QGstreamerMessage message{
+ gst_bus_pop_filtered(d->m_bus, types),
+ QGstreamerMessage::HasRef,
+ };
+ d->processMessage(message);
}
void QGstPipeline::beginConfig()
@@ -265,8 +264,8 @@ void QGstPipeline::beginConfig()
break;
}
case GST_STATE_CHANGE_FAILURE: {
- // should not happen
- qCritical() << "QGstPipeline::beginConfig: state change failure";
+ qDebug() << "QGstPipeline::beginConfig: state change failure";
+ dumpGraph("beginConfigFailure");
break;
}
@@ -289,8 +288,6 @@ void QGstPipeline::endConfig()
if (d->m_configCounter)
return;
- if (d->m_flushOnConfigChanges)
- d->m_pendingFlush = true;
if (d->m_savedState == GST_STATE_PLAYING)
setState(GST_STATE_PLAYING);
d->m_savedState = GST_STATE_NULL;
@@ -298,53 +295,50 @@ void QGstPipeline::endConfig()
void QGstPipeline::flush()
{
- QGstPipelinePrivate *d = getPrivate();
- seek(position(), d->m_rate);
+ seek(position());
}
-bool QGstPipeline::seek(qint64 pos, double rate)
+void QGstPipeline::seek(std::chrono::nanoseconds pos, double rate)
{
+ using namespace std::chrono_literals;
+
QGstPipelinePrivate *d = getPrivate();
- // always adjust the rate, so it can be set before playback starts
+ // always adjust the rate, so it can be set before playback starts
// setting position needs a loaded media file that's seekable
- d->m_rate = rate;
- qint64 from = rate > 0 ? pos : 0;
- qint64 to = rate > 0 ? duration() : pos;
- bool success = gst_element_seek(element(), rate, GST_FORMAT_TIME,
- GstSeekFlags(GST_SEEK_FLAG_FLUSH),
- GST_SEEK_TYPE_SET, from,
- GST_SEEK_TYPE_SET, to);
- if (!success)
- return false;
+
+ qCDebug(qLcGstPipeline) << "QGstPipeline::seek to" << pos << "rate:" << rate;
+
+ bool success = (rate > 0)
+ ? gst_element_seek(element(), rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
+ GST_SEEK_TYPE_SET, pos.count(), GST_SEEK_TYPE_END, 0)
+ : gst_element_seek(element(), rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
+ GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, pos.count());
+
+ if (!success) {
+ qDebug() << "seek: gst_element_seek failed" << pos;
+ return;
+ }
d->m_position = pos;
- return true;
}
-bool QGstPipeline::setPlaybackRate(double rate, bool applyToPipeline)
+void QGstPipeline::seek(std::chrono::nanoseconds pos)
+{
+ qCDebug(qLcGstPipeline) << "QGstPipeline::seek to" << pos;
+ seek(pos, getPrivate()->m_rate);
+}
+
+void QGstPipeline::setPlaybackRate(double rate)
{
QGstPipelinePrivate *d = getPrivate();
if (rate == d->m_rate)
- return false;
-
- if (!applyToPipeline) {
- d->m_rate = rate;
- return true;
- }
+ return;
- constexpr GstSeekFlags seekFlags =
-#if GST_CHECK_VERSION(1, 18, 0)
- GST_SEEK_FLAG_INSTANT_RATE_CHANGE;
-#else
- GST_SEEK_FLAG_FLUSH;
-#endif
+ d->m_rate = rate;
- bool success = gst_element_seek(element(), rate, GST_FORMAT_TIME, seekFlags, GST_SEEK_TYPE_NONE,
- GST_CLOCK_TIME_NONE, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
- if (success)
- d->m_rate = rate;
+ qCDebug(qLcGstPipeline) << "QGstPipeline::setPlaybackRate to" << rate;
- return success;
+ applyPlaybackRate(/*instantRateChange =*/true);
}
double QGstPipeline::playbackRate() const
@@ -353,27 +347,48 @@ double QGstPipeline::playbackRate() const
return d->m_rate;
}
-bool QGstPipeline::setPosition(qint64 pos)
+void QGstPipeline::applyPlaybackRate(bool instantRateChange)
{
QGstPipelinePrivate *d = getPrivate();
- return seek(pos, d->m_rate);
+
+ // do not GST_SEEK_FLAG_FLUSH with GST_SEEK_TYPE_NONE
+ // https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/3604
+ if (instantRateChange && GST_CHECK_VERSION(1, 18, 0)) {
+ qCDebug(qLcGstPipeline) << "QGstPipeline::applyPlaybackRate instantly";
+ bool success = gst_element_seek(
+ element(), d->m_rate, GST_FORMAT_UNDEFINED, rateChangeSeekFlags, GST_SEEK_TYPE_NONE,
+ GST_CLOCK_TIME_NONE, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
+ if (!success)
+ qDebug() << "setPlaybackRate: gst_element_seek failed";
+ } else {
+ seek(position(), d->m_rate);
+ }
}
-qint64 QGstPipeline::position() const
+void QGstPipeline::setPosition(std::chrono::nanoseconds pos)
+{
+ seek(pos);
+}
+
+std::chrono::nanoseconds QGstPipeline::position() const
{
- gint64 pos;
QGstPipelinePrivate *d = getPrivate();
- if (gst_element_query_position(element(), GST_FORMAT_TIME, &pos))
- d->m_position = pos;
+ std::optional<std::chrono::nanoseconds> pos = QGstElement::position();
+ if (pos) {
+ d->m_position = *pos;
+ qCDebug(qLcGstPipeline) << "QGstPipeline::position:"
+ << std::chrono::round<std::chrono::milliseconds>(*pos);
+ } else {
+ qDebug() << "QGstPipeline: failed to query position, using previous position";
+ }
+
return d->m_position;
}
-qint64 QGstPipeline::duration() const
+std::chrono::milliseconds QGstPipeline::positionInMs() const
{
- gint64 d;
- if (!gst_element_query_duration(element(), GST_FORMAT_TIME, &d))
- return 0.;
- return d;
+ using namespace std::chrono;
+ return round<milliseconds>(position());
}
QGstPipelinePrivate *QGstPipeline::getPrivate() const
diff --git a/src/plugins/multimedia/gstreamer/common/qgstpipeline_p.h b/src/plugins/multimedia/gstreamer/common/qgstpipeline_p.h
index 6914993de..559e7b382 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstpipeline_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstpipeline_p.h
@@ -62,8 +62,6 @@ public:
bool inStoppedState() const;
void setInStoppedState(bool stopped);
- void setFlushOnConfigChanges(bool flush);
-
void installMessageFilter(QGstreamerSyncMessageFilter *filter);
void removeMessageFilter(QGstreamerSyncMessageFilter *filter);
void installMessageFilter(QGstreamerBusMessageFilter *filter);
@@ -73,7 +71,7 @@ public:
GstPipeline *pipeline() const { return GST_PIPELINE_CAST(get()); }
- void dumpGraph(const char *fileName);
+ void processMessages(GstMessageType = GST_MESSAGE_ANY);
template <typename Functor>
void modifyPipelineWhileNotRunning(Functor &&fn)
@@ -94,16 +92,18 @@ public:
void flush();
- bool seek(qint64 pos, double rate);
- bool setPlaybackRate(double rate, bool applyToPipeline = true);
+ void setPlaybackRate(double rate);
double playbackRate() const;
+ void applyPlaybackRate(bool instantRateChange);
- bool setPosition(qint64 pos);
- qint64 position() const;
-
- qint64 duration() const;
+ void setPosition(std::chrono::nanoseconds pos);
+ std::chrono::nanoseconds position() const;
+ std::chrono::milliseconds positionInMs() const;
private:
+ void seek(std::chrono::nanoseconds pos, double rate);
+ void seek(std::chrono::nanoseconds pos);
+
QGstPipelinePrivate *getPrivate() const;
void beginConfig();
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput.cpp b/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput.cpp
index 0381b921e..a2f60eaa1 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput.cpp
@@ -1,107 +1,156 @@
// Copyright (C) 2021 The Qt Company Ltd.
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-#include <QtMultimedia/qaudiodevice.h>
-#include <QtMultimedia/qaudioinput.h>
+#include <common/qgstreameraudioinput_p.h>
#include <QtCore/qloggingcategory.h>
+#include <QtMultimedia/qaudiodevice.h>
+#include <QtMultimedia/qaudioinput.h>
#include <audio/qgstreameraudiodevice_p.h>
-#include <common/qgstreameraudioinput_p.h>
+#include <common/qgstpipeline_p.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <fcntl.h>
-#include <utility>
+QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcMediaAudioInput, "qt.multimedia.audioInput")
+namespace {
-QT_BEGIN_NAMESPACE
+Q_LOGGING_CATEGORY(qLcMediaAudioInput, "qt.multimedia.audioinput")
-QMaybe<QPlatformAudioInput *> QGstreamerAudioInput::create(QAudioInput *parent)
+constexpr QLatin1String defaultSrcName = [] {
+ using namespace Qt::Literals;
+
+ if constexpr (QT_CONFIG(pulseaudio))
+ return "pulsesrc"_L1;
+ else if constexpr (QT_CONFIG(alsa))
+ return "alsasrc"_L1;
+ else
+ return "autoaudiosrc"_L1;
+}();
+
+bool hasDeviceProperty(const QGstElement &element)
{
- QGstElement autoaudiosrc = QGstElement::createFromFactory("autoaudiosrc", "autoaudiosrc");
- if (!autoaudiosrc)
- return errorMessageCannotFindElement("autoaudiosrc");
+ using namespace Qt::Literals;
+ QLatin1String elementType = element.typeName();
+
+ if constexpr (QT_CONFIG(pulseaudio))
+ return elementType == "GstPulseSrc"_L1;
- QGstElement volume = QGstElement::createFromFactory("volume", "volume");
- if (!volume)
- return errorMessageCannotFindElement("volume");
+ if constexpr (0 && QT_CONFIG(alsa)) // alsasrc has a "device" property, but it cannot be changed
+ // during playback
+ return elementType == "GstAlsaSrc"_L1;
- return new QGstreamerAudioInput(autoaudiosrc, volume, parent);
+ return false;
}
-QGstreamerAudioInput::QGstreamerAudioInput(QGstElement autoaudiosrc, QGstElement volume,
- QAudioInput *parent)
+} // namespace
+
+QMaybe<QPlatformAudioInput *> QGstreamerAudioInput::create(QAudioInput *parent)
+{
+ static const auto error = qGstErrorMessageIfElementsNotAvailable("autoaudiosrc", "volume");
+ if (error)
+ return *error;
+
+ return new QGstreamerAudioInput(parent);
+}
+
+QGstreamerAudioInput::QGstreamerAudioInput(QAudioInput *parent)
: QObject(parent),
QPlatformAudioInput(parent),
- gstAudioInput(QGstBin::create("audioInput")),
- audioSrc(std::move(autoaudiosrc)),
- audioVolume(std::move(volume))
+ m_audioInputBin(QGstBin::create("audioInput")),
+ m_audioSrc{
+ QGstElement::createFromFactory(defaultSrcName.constData(), "autoaudiosrc"),
+ },
+ m_audioVolume{
+ QGstElement::createFromFactory("volume", "volume"),
+ }
+{
+ m_audioInputBin.add(m_audioSrc, m_audioVolume);
+ qLinkGstElements(m_audioSrc, m_audioVolume);
+
+ m_audioInputBin.addGhostPad(m_audioVolume, "src");
+}
+
+QGstElement QGstreamerAudioInput::createGstElement()
{
- gstAudioInput.add(audioSrc, audioVolume);
- qLinkGstElements(audioSrc, audioVolume);
+ const auto *customDeviceInfo =
+ dynamic_cast<const QGStreamerCustomAudioDeviceInfo *>(m_audioDevice.handle());
+
+ if (customDeviceInfo) {
+ qCDebug(qLcMediaAudioInput)
+ << "requesting custom audio src element: " << customDeviceInfo->id;
+
+ QGstElement element = QGstBin::createFromPipelineDescription(customDeviceInfo->id,
+ /*name=*/nullptr,
+ /*ghostUnlinkedPads=*/true);
+ if (element)
+ return element;
+
+ qCWarning(qLcMediaAudioInput)
+ << "Cannot create audio source element:" << customDeviceInfo->id;
+ }
+
+ const QByteArray &id = m_audioDevice.id();
+ if constexpr (QT_CONFIG(pulseaudio) || QT_CONFIG(alsa)) {
+ QGstElement newSrc = QGstElement::createFromFactory(defaultSrcName.constData(), "audiosrc");
+ if (newSrc) {
+ newSrc.set("device", id.constData());
+ return newSrc;
+ }
+
+ qWarning() << "Cannot create" << defaultSrcName;
- gstAudioInput.addGhostPad(audioVolume, "src");
+ } else {
+ auto *deviceInfo = dynamic_cast<const QGStreamerAudioDeviceInfo *>(m_audioDevice.handle());
+ if (deviceInfo && deviceInfo->gstDevice) {
+ QGstElement element = QGstElement::createFromDevice(deviceInfo->gstDevice, "audiosrc");
+ if (element)
+ return element;
+ }
+ }
+ qCWarning(qLcMediaAudioInput) << "Invalid audio device";
+ qCWarning(qLcMediaAudioInput)
+ << "Failed to create a gst element for the audio device, using a default audio source";
+ return QGstElement::createFromFactory("autoaudiosrc", "audiosrc");
}
QGstreamerAudioInput::~QGstreamerAudioInput()
{
- gstAudioInput.setStateSync(GST_STATE_NULL);
+ m_audioInputBin.setStateSync(GST_STATE_NULL);
}
void QGstreamerAudioInput::setVolume(float volume)
{
- audioVolume.set("volume", volume);
+ m_audioVolume.set("volume", volume);
}
void QGstreamerAudioInput::setMuted(bool muted)
{
- audioVolume.set("mute", muted);
+ m_audioVolume.set("mute", muted);
}
void QGstreamerAudioInput::setAudioDevice(const QAudioDevice &device)
{
if (device == m_audioDevice)
return;
- qCDebug(qLcMediaAudioInput) << "setAudioInput" << device.description() << device.isNull();
+ qCDebug(qLcMediaAudioInput) << "setAudioDevice" << device.description() << device.isNull();
m_audioDevice = device;
- QGstElement newSrc;
- if constexpr (QT_CONFIG(pulseaudio)) {
- auto id = m_audioDevice.id();
- newSrc = QGstElement::createFromFactory("pulsesrc", "audiosrc");
- if (!newSrc.isNull())
- newSrc.set("device", id.constData());
- else
- qCWarning(qLcMediaAudioInput) << "Invalid audio device";
- } else {
- auto *deviceInfo = static_cast<const QGStreamerAudioDeviceInfo *>(m_audioDevice.handle());
- if (deviceInfo && deviceInfo->gstDevice)
- newSrc = QGstElement::createFromDevice(deviceInfo->gstDevice, "audiosrc");
- else
- qCWarning(qLcMediaAudioInput) << "Invalid audio device";
+ if (hasDeviceProperty(m_audioSrc) && !isCustomAudioDevice(m_audioDevice)) {
+ m_audioSrc.set("device", m_audioDevice.id().constData());
+ return;
}
- if (newSrc.isNull()) {
- qCWarning(qLcMediaAudioInput) << "Failed to create a gst element for the audio device, using a default audio source";
- newSrc = QGstElement::createFromFactory("autoaudiosrc", "audiosrc");
- }
+ QGstElement newSrc = createGstElement();
- QGstPipeline::modifyPipelineWhileNotRunning(gstAudioInput.getPipeline(), [&] {
- qUnlinkGstElements(audioSrc, audioVolume);
- gstAudioInput.stopAndRemoveElements(audioSrc);
- audioSrc = std::move(newSrc);
- gstAudioInput.add(audioSrc);
- qLinkGstElements(audioSrc, audioVolume);
- audioSrc.syncStateWithParent();
+ QGstPipeline::modifyPipelineWhileNotRunning(m_audioInputBin.getPipeline(), [&] {
+ qUnlinkGstElements(m_audioSrc, m_audioVolume);
+ m_audioInputBin.stopAndRemoveElements(m_audioSrc);
+ m_audioSrc = std::move(newSrc);
+ m_audioInputBin.add(m_audioSrc);
+ qLinkGstElements(m_audioSrc, m_audioVolume);
+ m_audioSrc.syncStateWithParent();
});
}
-QAudioDevice QGstreamerAudioInput::audioInput() const
-{
- return m_audioDevice;
-}
-
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput_p.h b/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput_p.h
index 69500ecab..4b01b53a6 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput_p.h
@@ -16,13 +16,9 @@
//
#include <QtCore/qobject.h>
-#include <QtMultimedia/private/qmultimediautils_p.h>
#include <QtMultimedia/private/qplatformaudioinput_p.h>
-#include <QtMultimedia/private/qtmultimediaglobal_p.h>
-#include <QtMultimedia/qaudiodevice.h>
#include <common/qgst_p.h>
-#include <common/qgstpipeline_p.h>
QT_BEGIN_NAMESPACE
@@ -34,25 +30,24 @@ public:
static QMaybe<QPlatformAudioInput *> create(QAudioInput *parent);
~QGstreamerAudioInput();
- bool setAudioInput(const QAudioDevice &);
- QAudioDevice audioInput() const;
-
void setAudioDevice(const QAudioDevice &) override;
void setVolume(float) override;
void setMuted(bool) override;
- QGstElement gstElement() const { return gstAudioInput; }
+ QGstElement gstElement() const { return m_audioInputBin; }
private:
- QGstreamerAudioInput(QGstElement autoaudiosrc, QGstElement volume, QAudioInput *parent);
+ explicit QGstreamerAudioInput(QAudioInput *parent);
+
+ QGstElement createGstElement();
QAudioDevice m_audioDevice;
// Gst elements
- QGstBin gstAudioInput;
+ QGstBin m_audioInputBin;
- QGstElement audioSrc;
- QGstElement audioVolume;
+ QGstElement m_audioSrc;
+ QGstElement m_audioVolume;
};
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput.cpp b/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput.cpp
index f45c371e9..1a8c6976c 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput.cpp
@@ -2,108 +2,169 @@
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
#include <common/qgstreameraudiooutput_p.h>
-#include <audio/qgstreameraudiodevice_p.h>
+#include <QtCore/qloggingcategory.h>
#include <QtMultimedia/qaudiodevice.h>
#include <QtMultimedia/qaudiooutput.h>
-#include <QtCore/qloggingcategory.h>
-#include <utility>
+#include <common/qgstpipeline_p.h>
+#include <audio/qgstreameraudiodevice_p.h>
-static Q_LOGGING_CATEGORY(qLcMediaAudioOutput, "qt.multimedia.audiooutput")
QT_BEGIN_NAMESPACE
-QMaybe<QPlatformAudioOutput *> QGstreamerAudioOutput::create(QAudioOutput *parent)
+namespace {
+
+Q_LOGGING_CATEGORY(qLcMediaAudioOutput, "qt.multimedia.audiooutput")
+
+constexpr QLatin1String defaultSinkName = [] {
+ using namespace Qt::Literals;
+
+ if constexpr (QT_CONFIG(pulseaudio))
+ return "pulsesink"_L1;
+ else if constexpr (QT_CONFIG(alsa))
+ return "alsasink"_L1;
+ else
+ return "autoaudiosink"_L1;
+}();
+
+bool hasDeviceProperty(const QGstElement &element)
{
- QGstElement audioconvert = QGstElement::createFromFactory("audioconvert", "audioConvert");
- if (!audioconvert)
- return errorMessageCannotFindElement("audioconvert");
+ using namespace Qt::Literals;
+ QLatin1String elementType = element.typeName();
+
+ if constexpr (QT_CONFIG(pulseaudio))
+ return elementType == "GstPulseSink"_L1;
+ if constexpr (0 && QT_CONFIG(alsa)) // alsasrc has a "device" property, but it cannot be changed
+ // during playback
+ return elementType == "GstAlsaSink"_L1;
- QGstElement audioresample = QGstElement::createFromFactory("audioresample", "audioResample");
- if (!audioresample)
- return errorMessageCannotFindElement("audioresample");
+ return false;
+}
- QGstElement volume = QGstElement::createFromFactory("volume", "volume");
- if (!volume)
- return errorMessageCannotFindElement("volume");
+} // namespace
- QGstElement autoaudiosink = QGstElement::createFromFactory("autoaudiosink", "autoAudioSink");
- if (!autoaudiosink)
- return errorMessageCannotFindElement("autoaudiosink");
+QMaybe<QPlatformAudioOutput *> QGstreamerAudioOutput::create(QAudioOutput *parent)
+{
+ static const auto error = qGstErrorMessageIfElementsNotAvailable(
+ "audioconvert", "audioresample", "volume", "autoaudiosink");
+ if (error)
+ return *error;
- return new QGstreamerAudioOutput(audioconvert, audioresample, volume, autoaudiosink, parent);
+ return new QGstreamerAudioOutput(parent);
}
-QGstreamerAudioOutput::QGstreamerAudioOutput(QGstElement audioconvert, QGstElement audioresample,
- QGstElement volume, QGstElement autoaudiosink,
- QAudioOutput *parent)
+QGstreamerAudioOutput::QGstreamerAudioOutput(QAudioOutput *parent)
: QObject(parent),
QPlatformAudioOutput(parent),
- gstAudioOutput(QGstBin::create("audioOutput")),
- audioConvert(std::move(audioconvert)),
- audioResample(std::move(audioresample)),
- audioVolume(std::move(volume)),
- audioSink(std::move(autoaudiosink))
+ m_audioOutputBin(QGstBin::create("audioOutput")),
+ m_audioQueue{
+ QGstElement::createFromFactory("queue", "audioQueue"),
+ },
+ m_audioConvert{
+ QGstElement::createFromFactory("audioconvert", "audioConvert"),
+ },
+ m_audioResample{
+ QGstElement::createFromFactory("audioresample", "audioResample"),
+ },
+ m_audioVolume{
+ QGstElement::createFromFactory("volume", "volume"),
+ },
+ m_audioSink{
+ QGstElement::createFromFactory(defaultSinkName.constData(), "audiosink"),
+ }
+{
+ m_audioOutputBin.add(m_audioQueue, m_audioConvert, m_audioResample, m_audioVolume, m_audioSink);
+ qLinkGstElements(m_audioQueue, m_audioConvert, m_audioResample, m_audioVolume, m_audioSink);
+
+ m_audioOutputBin.addGhostPad(m_audioQueue, "sink");
+}
+
+QGstElement QGstreamerAudioOutput::createGstElement()
{
- audioQueue = QGstElement::createFromFactory("queue", "audioQueue");
- gstAudioOutput.add(audioQueue, audioConvert, audioResample, audioVolume, audioSink);
- qLinkGstElements(audioQueue, audioConvert, audioResample, audioVolume, audioSink);
+ const auto *customDeviceInfo =
+ dynamic_cast<const QGStreamerCustomAudioDeviceInfo *>(m_audioDevice.handle());
+
+ if (customDeviceInfo) {
+ qCDebug(qLcMediaAudioOutput)
+ << "requesting custom audio sink element: " << customDeviceInfo->id;
+
+ QGstElement element =
+ QGstBin::createFromPipelineDescription(customDeviceInfo->id, /*name=*/nullptr,
+ /*ghostUnlinkedPads=*/true);
+ if (element)
+ return element;
+
+ qCWarning(qLcMediaAudioOutput)
+ << "Cannot create audio sink element:" << customDeviceInfo->id;
+ }
+
+ const QByteArray &id = m_audioDevice.id();
+ if constexpr (QT_CONFIG(pulseaudio) || QT_CONFIG(alsa)) {
+ QGstElement newSink =
+ QGstElement::createFromFactory(defaultSinkName.constData(), "audiosink");
+ if (newSink) {
+ newSink.set("device", id.constData());
+ return newSink;
+ }
- gstAudioOutput.addGhostPad(audioQueue, "sink");
+ qWarning() << "Cannot create" << defaultSinkName;
+ } else {
+ auto *deviceInfo = dynamic_cast<const QGStreamerAudioDeviceInfo *>(m_audioDevice.handle());
+ if (deviceInfo && deviceInfo->gstDevice) {
+ QGstElement element = QGstElement::createFromDevice(deviceInfo->gstDevice, "audiosink");
+ if (element)
+ return element;
+ }
+ }
+ qCWarning(qLcMediaAudioOutput) << "Invalid audio device:" << m_audioDevice.id();
+ qCWarning(qLcMediaAudioOutput)
+ << "Failed to create a gst element for the audio device, using a default audio sink";
+ return QGstElement::createFromFactory("autoaudiosink", "audiosink");
}
QGstreamerAudioOutput::~QGstreamerAudioOutput()
{
- gstAudioOutput.setStateSync(GST_STATE_NULL);
+ m_audioOutputBin.setStateSync(GST_STATE_NULL);
}
void QGstreamerAudioOutput::setVolume(float volume)
{
- audioVolume.set("volume", volume);
+ m_audioVolume.set("volume", volume);
}
void QGstreamerAudioOutput::setMuted(bool muted)
{
- audioVolume.set("mute", muted);
+ m_audioVolume.set("mute", muted);
}
-void QGstreamerAudioOutput::setAudioDevice(const QAudioDevice &info)
+void QGstreamerAudioOutput::setAudioDevice(const QAudioDevice &device)
{
- if (info == m_audioOutput)
+ if (device == m_audioDevice)
return;
- qCDebug(qLcMediaAudioOutput) << "setAudioOutput" << info.description() << info.isNull();
- m_audioOutput = info;
-
- QGstElement newSink;
- if constexpr (QT_CONFIG(pulseaudio)) {
- auto id = m_audioOutput.id();
- newSink = QGstElement::createFromFactory("pulsesink", "audiosink");
- if (!newSink.isNull())
- newSink.set("device", id.constData());
- else
- qCWarning(qLcMediaAudioOutput) << "Invalid audio device";
- } else {
- auto *deviceInfo = static_cast<const QGStreamerAudioDeviceInfo *>(m_audioOutput.handle());
- if (deviceInfo && deviceInfo->gstDevice)
- newSink = QGstElement::createFromDevice(deviceInfo->gstDevice, "audiosink");
- else
- qCWarning(qLcMediaAudioOutput) << "Invalid audio device";
- }
+ qCDebug(qLcMediaAudioOutput) << "setAudioDevice" << device.description() << device.isNull();
- if (newSink.isNull()) {
- qCWarning(qLcMediaAudioOutput) << "Failed to create a gst element for the audio device, using a default audio sink";
- newSink = QGstElement::createFromFactory("autoaudiosink", "audiosink");
+ m_audioDevice = device;
+
+ if (hasDeviceProperty(m_audioSink) && !isCustomAudioDevice(m_audioDevice)) {
+ m_audioSink.set("device", m_audioDevice.id().constData());
+ return;
}
- QGstPipeline::modifyPipelineWhileNotRunning(gstAudioOutput.getPipeline(), [&] {
- qUnlinkGstElements(audioVolume, audioSink);
- gstAudioOutput.stopAndRemoveElements(audioSink);
- audioSink = std::move(newSink);
- gstAudioOutput.add(audioSink);
- audioSink.syncStateWithParent();
- qLinkGstElements(audioVolume, audioSink);
+ QGstElement newSink = createGstElement();
+
+ QGstPipeline::modifyPipelineWhileNotRunning(m_audioOutputBin.getPipeline(), [&] {
+ qUnlinkGstElements(m_audioVolume, m_audioSink);
+ m_audioOutputBin.stopAndRemoveElements(m_audioSink);
+ m_audioSink = std::move(newSink);
+ m_audioOutputBin.add(m_audioSink);
+ m_audioSink.syncStateWithParent();
+ qLinkGstElements(m_audioVolume, m_audioSink);
});
+
+ // we need to flush the pipeline, otherwise, the new sink doesn't always reach the new state
+ if (m_audioOutputBin.getPipeline())
+ m_audioOutputBin.getPipeline().flush();
}
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput_p.h b/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput_p.h
index 4b528d9ee..da11c39d2 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput_p.h
@@ -16,13 +16,9 @@
//
#include <QtCore/qobject.h>
-#include <QtMultimedia/private/qmultimediautils_p.h>
#include <QtMultimedia/private/qplatformaudiooutput_p.h>
-#include <QtMultimedia/private/qtmultimediaglobal_p.h>
-#include <QtMultimedia/qaudiodevice.h>
#include <common/qgst_p.h>
-#include <common/qgstpipeline_p.h>
QT_BEGIN_NAMESPACE
@@ -38,23 +34,23 @@ public:
void setVolume(float) override;
void setMuted(bool) override;
- QGstElement gstElement() const { return gstAudioOutput; }
+ QGstElement gstElement() const { return m_audioOutputBin; }
private:
- QGstreamerAudioOutput(QGstElement audioconvert, QGstElement audioresample, QGstElement volume,
- QGstElement autoaudiosink, QAudioOutput *parent);
+ explicit QGstreamerAudioOutput(QAudioOutput *parent);
- QAudioDevice m_audioOutput;
+ QGstElement createGstElement();
+
+ QAudioDevice m_audioDevice;
// Gst elements
- QGstPipeline gstPipeline;
- QGstBin gstAudioOutput;
-
- QGstElement audioQueue;
- QGstElement audioConvert;
- QGstElement audioResample;
- QGstElement audioVolume;
- QGstElement audioSink;
+ QGstBin m_audioOutputBin;
+
+ QGstElement m_audioQueue;
+ QGstElement m_audioConvert;
+ QGstElement m_audioResample;
+ QGstElement m_audioVolume;
+ QGstElement m_audioSink;
};
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer.cpp
index 9a5b9e46a..014bbe77d 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer.cpp
@@ -26,6 +26,10 @@
#include <sys/stat.h>
#include <fcntl.h>
+#if QT_CONFIG(gstreamer_gl)
+# include <gst/gl/gl.h>
+#endif
+
static Q_LOGGING_CATEGORY(qLcMediaPlayer, "qt.multimedia.player")
QT_BEGIN_NAMESPACE
@@ -74,6 +78,23 @@ QGstreamerMediaPlayer::TrackSelector &QGstreamerMediaPlayer::trackSelector(Track
return ts;
}
+void QGstreamerMediaPlayer::mediaStatusChanged(QMediaPlayer::MediaStatus status)
+{
+ if (status != QMediaPlayer::StalledMedia)
+ m_stalledMediaNotifier.stop();
+
+ QPlatformMediaPlayer::mediaStatusChanged(status);
+}
+
+void QGstreamerMediaPlayer::updateBufferProgress(float newProgress)
+{
+ if (qFuzzyIsNull(newProgress - m_bufferProgress))
+ return;
+
+ m_bufferProgress = newProgress;
+ bufferProgressChanged(m_bufferProgress);
+}
+
void QGstreamerMediaPlayer::disconnectDecoderHandlers()
{
auto handlers = std::initializer_list<QGObjectHandlerScopedConnection *>{
@@ -92,47 +113,35 @@ QMaybe<QPlatformMediaPlayer *> QGstreamerMediaPlayer::create(QMediaPlayer *paren
if (!videoOutput)
return videoOutput.error();
- QGstElement videoInputSelector =
- QGstElement::createFromFactory("input-selector", "videoInputSelector");
- if (!videoInputSelector)
- return errorMessageCannotFindElement("input-selector");
+ static const auto error =
+ qGstErrorMessageIfElementsNotAvailable("input-selector", "decodebin", "uridecodebin");
+ if (error)
+ return *error;
- QGstElement audioInputSelector =
- QGstElement::createFromFactory("input-selector", "audioInputSelector");
- if (!audioInputSelector)
- return errorMessageCannotFindElement("input-selector");
-
- QGstElement subTitleInputSelector =
- QGstElement::createFromFactory("input-selector", "subTitleInputSelector");
- if (!subTitleInputSelector)
- return errorMessageCannotFindElement("input-selector");
-
- return new QGstreamerMediaPlayer(videoOutput.value(), videoInputSelector, audioInputSelector,
- subTitleInputSelector, parent);
+ return new QGstreamerMediaPlayer(videoOutput.value(), parent);
}
QGstreamerMediaPlayer::QGstreamerMediaPlayer(QGstreamerVideoOutput *videoOutput,
- QGstElement videoInputSelector,
- QGstElement audioInputSelector,
- QGstElement subTitleInputSelector,
QMediaPlayer *parent)
: QObject(parent),
QPlatformMediaPlayer(parent),
- trackSelectors{ { { VideoStream, videoInputSelector },
- { AudioStream, audioInputSelector },
- { SubtitleStream, subTitleInputSelector } } },
+ trackSelectors{ {
+ { VideoStream,
+ QGstElement::createFromFactory("input-selector", "videoInputSelector") },
+ { AudioStream,
+ QGstElement::createFromFactory("input-selector", "audioInputSelector") },
+ { SubtitleStream,
+ QGstElement::createFromFactory("input-selector", "subTitleInputSelector") },
+ } },
playerPipeline(QGstPipeline::create("playerPipeline")),
gstVideoOutput(videoOutput)
{
- playerPipeline.setFlushOnConfigChanges(true);
-
gstVideoOutput->setParent(this);
gstVideoOutput->setPipeline(playerPipeline);
for (auto &ts : trackSelectors)
playerPipeline.add(ts.selector);
- playerPipeline.setState(GST_STATE_NULL);
playerPipeline.installMessageFilter(static_cast<QGstreamerBusMessageFilter *>(this));
playerPipeline.installMessageFilter(static_cast<QGstreamerSyncMessageFilter *>(this));
@@ -143,7 +152,12 @@ QGstreamerMediaPlayer::QGstreamerMediaPlayer(QGstreamerVideoOutput *videoOutput,
gst_pipeline_use_clock(playerPipeline.pipeline(), systemClock.get());
connect(&positionUpdateTimer, &QTimer::timeout, this, [this] {
- updatePosition();
+ updatePositionFromPipeline();
+ });
+
+ m_stalledMediaNotifier.setSingleShot(true);
+ connect(&m_stalledMediaNotifier, &QTimer::timeout, this, [this] {
+ mediaStatusChanged(QMediaPlayer::StalledMedia);
});
}
@@ -152,25 +166,45 @@ QGstreamerMediaPlayer::~QGstreamerMediaPlayer()
playerPipeline.removeMessageFilter(static_cast<QGstreamerBusMessageFilter *>(this));
playerPipeline.removeMessageFilter(static_cast<QGstreamerSyncMessageFilter *>(this));
playerPipeline.setStateSync(GST_STATE_NULL);
- topology.free();
}
-qint64 QGstreamerMediaPlayer::position() const
+std::chrono::nanoseconds QGstreamerMediaPlayer::pipelinePosition() const
+{
+ if (!hasMedia())
+ return {};
+
+ Q_ASSERT(playerPipeline);
+ return playerPipeline.position();
+}
+
+void QGstreamerMediaPlayer::updatePositionFromPipeline()
{
- if (playerPipeline.isNull() || m_url.isEmpty())
- return 0;
+ using namespace std::chrono;
- return playerPipeline.position()/1e6;
+ positionChanged(round<milliseconds>(pipelinePosition()));
+}
+
+void QGstreamerMediaPlayer::updateDurationFromPipeline()
+{
+ std::optional<std::chrono::milliseconds> duration = playerPipeline.durationInMs();
+ if (!duration)
+ duration = std::chrono::milliseconds{ -1 };
+
+ if (duration != m_duration) {
+ qCDebug(qLcMediaPlayer) << "updateDurationFromPipeline" << *duration;
+ m_duration = *duration;
+ durationChanged(m_duration);
+ }
}
qint64 QGstreamerMediaPlayer::duration() const
{
- return m_duration;
+ return m_duration.count();
}
float QGstreamerMediaPlayer::bufferProgress() const
{
- return m_bufferProgress/100.;
+ return m_bufferProgress;
}
QMediaTimeRange QGstreamerMediaPlayer::availablePlaybackRanges() const
@@ -185,19 +219,28 @@ qreal QGstreamerMediaPlayer::playbackRate() const
void QGstreamerMediaPlayer::setPlaybackRate(qreal rate)
{
- bool applyRateToPipeline = state() != QMediaPlayer::StoppedState;
- if (playerPipeline.setPlaybackRate(rate, applyRateToPipeline))
- playbackRateChanged(rate);
+ if (rate == m_rate)
+ return;
+
+ m_rate = rate;
+
+ playerPipeline.setPlaybackRate(rate);
+ playbackRateChanged(rate);
}
void QGstreamerMediaPlayer::setPosition(qint64 pos)
{
- qint64 currentPos = playerPipeline.position()/1e6;
- if (pos == currentPos)
+ std::chrono::milliseconds posInMs{ pos };
+ setPosition(posInMs);
+}
+
+void QGstreamerMediaPlayer::setPosition(std::chrono::milliseconds pos)
+{
+ if (pos == playerPipeline.position())
return;
playerPipeline.finishStateChange();
- playerPipeline.setPosition(pos*1e6);
- qCDebug(qLcMediaPlayer) << Q_FUNC_INFO << pos << playerPipeline.position()/1e6;
+ playerPipeline.setPosition(pos);
+ qCDebug(qLcMediaPlayer) << Q_FUNC_INFO << pos << playerPipeline.positionInMs();
if (mediaStatus() == QMediaPlayer::EndOfMedia)
mediaStatusChanged(QMediaPlayer::LoadedMedia);
positionChanged(pos);
@@ -205,34 +248,44 @@ void QGstreamerMediaPlayer::setPosition(qint64 pos)
void QGstreamerMediaPlayer::play()
{
- if (state() == QMediaPlayer::PlayingState || m_url.isEmpty())
+ QMediaPlayer::PlaybackState currentState = state();
+ if (currentState == QMediaPlayer::PlayingState || !hasMedia())
return;
- resetCurrentLoop();
+
+ if (currentState != QMediaPlayer::PausedState)
+ resetCurrentLoop();
playerPipeline.setInStoppedState(false);
if (mediaStatus() == QMediaPlayer::EndOfMedia) {
- playerPipeline.setPosition(0);
- updatePosition();
+ playerPipeline.setPosition({});
+ positionChanged(0);
}
qCDebug(qLcMediaPlayer) << "play().";
int ret = playerPipeline.setState(GST_STATE_PLAYING);
if (m_requiresSeekOnPlay) {
- // Flushing the pipeline is required to get track changes
- // immediately, when they happen while paused.
+ // Flushing the pipeline is required to get track changes immediately, when they happen
+ // while paused.
playerPipeline.flush();
m_requiresSeekOnPlay = false;
+ } else {
+ if (currentState == QMediaPlayer::StoppedState) {
+ // we get an assertion failure during instant playback rate changes
+ // https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/3545
+ constexpr bool performInstantRateChange = false;
+ playerPipeline.applyPlaybackRate(/*instantRateChange=*/performInstantRateChange);
+ }
}
if (ret == GST_STATE_CHANGE_FAILURE)
qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the playing state.";
positionUpdateTimer.start(100);
- emit stateChanged(QMediaPlayer::PlayingState);
+ stateChanged(QMediaPlayer::PlayingState);
}
void QGstreamerMediaPlayer::pause()
{
- if (state() == QMediaPlayer::PausedState || m_url.isEmpty()
+ if (state() == QMediaPlayer::PausedState || !hasMedia()
|| m_resourceErrorState != ResourceErrorState::NoError)
return;
@@ -241,29 +294,30 @@ void QGstreamerMediaPlayer::pause()
playerPipeline.setInStoppedState(false);
playerPipeline.flush();
}
- int ret = playerPipeline.setState(GST_STATE_PAUSED);
+ int ret = playerPipeline.setStateSync(GST_STATE_PAUSED);
if (ret == GST_STATE_CHANGE_FAILURE)
qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the paused state.";
if (mediaStatus() == QMediaPlayer::EndOfMedia) {
- playerPipeline.setPosition(0);
+ playerPipeline.setPosition({});
+ positionChanged(0);
+ } else {
+ updatePositionFromPipeline();
}
- updatePosition();
- emit stateChanged(QMediaPlayer::PausedState);
+ stateChanged(QMediaPlayer::PausedState);
if (m_bufferProgress > 0 || !canTrackProgress())
mediaStatusChanged(QMediaPlayer::BufferedMedia);
else
mediaStatusChanged(QMediaPlayer::BufferingMedia);
-
- emit bufferProgressChanged(m_bufferProgress / 100.);
}
void QGstreamerMediaPlayer::stop()
{
+ using namespace std::chrono_literals;
if (state() == QMediaPlayer::StoppedState) {
if (position() != 0) {
- playerPipeline.setPosition(0);
- positionChanged(0);
+ playerPipeline.setPosition({});
+ positionChanged(0ms);
mediaStatusChanged(QMediaPlayer::LoadedMedia);
}
return;
@@ -271,27 +325,63 @@ void QGstreamerMediaPlayer::stop()
stopOrEOS(false);
}
-void *QGstreamerMediaPlayer::nativePipeline()
+const QGstPipeline &QGstreamerMediaPlayer::pipeline() const
{
- return playerPipeline.pipeline();
+ return playerPipeline;
}
void QGstreamerMediaPlayer::stopOrEOS(bool eos)
{
+ using namespace std::chrono_literals;
+
positionUpdateTimer.stop();
playerPipeline.setInStoppedState(true);
bool ret = playerPipeline.setStateSync(GST_STATE_PAUSED);
if (!ret)
qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the stopped state.";
- if (!eos)
- playerPipeline.setPosition(0);
- updatePosition();
- emit stateChanged(QMediaPlayer::StoppedState);
+ if (!eos) {
+ playerPipeline.setPosition(0ms);
+ positionChanged(0ms);
+ }
+ stateChanged(QMediaPlayer::StoppedState);
if (eos)
mediaStatusChanged(QMediaPlayer::EndOfMedia);
else
mediaStatusChanged(QMediaPlayer::LoadedMedia);
m_initialBufferProgressSent = false;
+ bufferProgressChanged(0.f);
+}
+
+void QGstreamerMediaPlayer::detectPipelineIsSeekable()
+{
+ std::optional<bool> canSeek = playerPipeline.canSeek();
+ if (canSeek) {
+ qCDebug(qLcMediaPlayer) << "detectPipelineIsSeekable: pipeline is seekable:" << *canSeek;
+ seekableChanged(*canSeek);
+ } else {
+ qCWarning(qLcMediaPlayer) << "detectPipelineIsSeekable: query for seekable failed.";
+ seekableChanged(false);
+ }
+}
+
+QGstElement QGstreamerMediaPlayer::getSinkElementForTrackType(TrackType trackType)
+{
+ switch (trackType) {
+ case AudioStream:
+ return gstAudioOutput ? gstAudioOutput->gstElement() : QGstElement{};
+ case VideoStream:
+ return gstVideoOutput ? gstVideoOutput->gstElement() : QGstElement{};
+ case SubtitleStream:
+ return gstVideoOutput ? gstVideoOutput->gstSubtitleElement() : QGstElement{};
+ break;
+ default:
+ Q_UNREACHABLE_RETURN(QGstElement{});
+ }
+}
+
+bool QGstreamerMediaPlayer::hasMedia() const
+{
+ return !m_url.isEmpty() || m_stream;
}
bool QGstreamerMediaPlayer::processBusMessage(const QGstreamerMessage &message)
@@ -306,9 +396,11 @@ bool QGstreamerMediaPlayer::processBusMessage(const QGstreamerMessage &message)
gst_message_parse_tag(gm, &tagList);
qCDebug(qLcMediaPlayer) << " Got tags: " << tagList.get();
- auto metaData = taglistToMetaData(tagList);
- for (auto k : metaData.keys())
- m_metaData.insert(k, metaData.value(k));
+
+ QMediaMetaData originalMetaData = m_metaData;
+ extendMetaDataFromTagList(m_metaData, tagList);
+ if (originalMetaData != m_metaData)
+ metaDataChanged();
if (gstVideoOutput) {
QVariant rotation = m_metaData.value(QMediaMetaData::Orientation);
@@ -317,45 +409,40 @@ bool QGstreamerMediaPlayer::processBusMessage(const QGstreamerMessage &message)
break;
}
case GST_MESSAGE_DURATION_CHANGED: {
- qint64 d = playerPipeline.duration()/1e6;
- qCDebug(qLcMediaPlayer) << " duration changed message" << d;
- if (d != m_duration) {
- m_duration = d;
- emit durationChanged(duration());
- }
+ if (!prerolling)
+ updateDurationFromPipeline();
+
return false;
}
- case GST_MESSAGE_EOS:
+ case GST_MESSAGE_EOS: {
+ positionChanged(m_duration);
if (doLoop()) {
setPosition(0);
break;
}
stopOrEOS(true);
break;
+ }
case GST_MESSAGE_BUFFERING: {
int progress = 0;
gst_message_parse_buffering(gm, &progress);
- qCDebug(qLcMediaPlayer) << " buffering message: " << progress;
-
if (state() != QMediaPlayer::StoppedState && !prerolling) {
if (!m_initialBufferProgressSent) {
mediaStatusChanged(QMediaPlayer::BufferingMedia);
m_initialBufferProgressSent = true;
}
- if (m_bufferProgress > 0 && progress == 0)
- mediaStatusChanged(QMediaPlayer::StalledMedia);
- else if (progress >= 50)
+ if (m_bufferProgress > 0 && progress == 0) {
+ m_stalledMediaNotifier.start(stalledMediaDebouncePeriod);
+ } else if (progress >= 50)
// QTBUG-124517: rethink buffering
mediaStatusChanged(QMediaPlayer::BufferedMedia);
else
mediaStatusChanged(QMediaPlayer::BufferingMedia);
}
- m_bufferProgress = progress;
-
- emit bufferProgressChanged(m_bufferProgress / 100.);
+ updateBufferProgress(progress * 0.01);
break;
}
case GST_MESSAGE_STATE_CHANGED: {
@@ -378,33 +465,21 @@ bool QGstreamerMediaPlayer::processBusMessage(const QGstreamerMessage &message)
case GST_STATE_PAUSED: {
if (prerolling) {
qCDebug(qLcMediaPlayer) << "Preroll done, setting status to Loaded";
+ playerPipeline.dumpGraph("playerPipelinePrerollDone");
+
prerolling = false;
- GST_DEBUG_BIN_TO_DOT_FILE(playerPipeline.bin(), GST_DEBUG_GRAPH_SHOW_ALL,
- "playerPipeline");
-
- qint64 d = playerPipeline.duration() / 1e6;
- if (d != m_duration) {
- m_duration = d;
- qCDebug(qLcMediaPlayer) << " duration changed" << d;
- emit durationChanged(duration());
- }
+ updateDurationFromPipeline();
+
+ m_metaData.insert(QMediaMetaData::Duration, duration());
+ if (!m_url.isEmpty())
+ m_metaData.insert(QMediaMetaData::Url, m_url);
parseStreamsAndMetadata();
+ metaDataChanged();
- emit tracksChanged();
+ tracksChanged();
mediaStatusChanged(QMediaPlayer::LoadedMedia);
- GstQuery *query = gst_query_new_seeking(GST_FORMAT_TIME);
- gboolean canSeek = false;
- if (gst_element_query(playerPipeline.element(), query)) {
- gst_query_parse_seeking(query, nullptr, &canSeek, nullptr, nullptr);
- qCDebug(qLcMediaPlayer) << " pipeline is seekable:" << canSeek;
- } else {
- qCDebug(qLcMediaPlayer) << " query for seekable failed.";
- }
- gst_query_unref(query);
- seekableChanged(canSeek);
-
if (!playerPipeline.inStoppedState()) {
Q_ASSERT(!m_initialBufferProgressSent);
@@ -453,6 +528,7 @@ bool QGstreamerMediaPlayer::processBusMessage(const QGstreamerMessage &message)
error(QMediaPlayer::ResourceError, QString::fromUtf8(err.get()->message));
m_resourceErrorState = ResourceErrorState::ErrorReported;
m_url.clear();
+ m_stream = nullptr;
}
} else {
error(QMediaPlayer::ResourceError, QString::fromUtf8(err.get()->message));
@@ -476,21 +552,28 @@ bool QGstreamerMediaPlayer::processBusMessage(const QGstreamerMessage &message)
case GST_MESSAGE_SEGMENT_START: {
qCDebug(qLcMediaPlayer) << " segment start message, updating position";
- QGstStructure structure(gst_message_get_structure(gm));
+ QGstStructureView structure(gst_message_get_structure(gm));
auto p = structure["position"].toInt64();
if (p) {
- qint64 position = (*p)/1000000;
- emit positionChanged(position);
+ std::chrono::milliseconds position{
+ (*p) / 1000000,
+ };
+ positionChanged(position);
}
break;
}
case GST_MESSAGE_ELEMENT: {
- QGstStructure structure(gst_message_get_structure(gm));
+ QGstStructureView structure(gst_message_get_structure(gm));
auto type = structure.name();
- if (type == "stream-topology") {
- topology.free();
- topology = structure.copy();
- }
+ if (type == "stream-topology")
+ topology = structure.clone();
+
+ break;
+ }
+
+ case GST_MESSAGE_ASYNC_DONE: {
+ if (playerPipeline.state() >= GST_STATE_PAUSED)
+ detectPipelineIsSeekable();
break;
}
@@ -570,19 +653,19 @@ void QGstreamerMediaPlayer::decoderPadAdded(const QGstElement &src, const QGstPa
if (streamType == VideoStream) {
connectOutput(ts);
ts.setActiveInputPad(sinkPad);
- emit videoAvailableChanged(true);
+ videoAvailableChanged(true);
}
else if (streamType == AudioStream) {
connectOutput(ts);
ts.setActiveInputPad(sinkPad);
- emit audioAvailableChanged(true);
+ audioAvailableChanged(true);
}
}
if (!prerolling)
- emit tracksChanged();
+ tracksChanged();
- decoderOutputMap.insert(pad.name(), sinkPad);
+ decoderOutputMap.emplace(pad, sinkPad);
}
void QGstreamerMediaPlayer::decoderPadRemoved(const QGstElement &src, const QGstPad &pad)
@@ -591,9 +674,11 @@ void QGstreamerMediaPlayer::decoderPadRemoved(const QGstElement &src, const QGst
return;
qCDebug(qLcMediaPlayer) << "Removed pad" << pad.name() << "from" << src.name();
- auto track = decoderOutputMap.value(pad.name());
- if (track.isNull())
+
+ auto it = decoderOutputMap.find(pad);
+ if (it == decoderOutputMap.end())
return;
+ QGstPad track = it->second;
auto ts = std::find_if(std::begin(trackSelectors), std::end(trackSelectors),
[&](TrackSelector &ts){ return ts.selector == track.parent(); });
@@ -630,27 +715,12 @@ void QGstreamerMediaPlayer::connectOutput(TrackSelector &ts)
if (ts.isConnected)
return;
- QGstElement e;
- switch (ts.type) {
- case AudioStream:
- e = gstAudioOutput ? gstAudioOutput->gstElement() : QGstElement{};
- break;
- case VideoStream:
- e = gstVideoOutput ? gstVideoOutput->gstElement() : QGstElement{};
- break;
- case SubtitleStream:
- if (gstVideoOutput)
- gstVideoOutput->linkSubtitleStream(ts.selector);
- break;
- default:
- return;
- }
-
- if (!e.isNull()) {
+ QGstElement e = getSinkElementForTrackType(ts.type);
+ if (e) {
qCDebug(qLcMediaPlayer) << "connecting output for track type" << ts.type;
playerPipeline.add(e);
qLinkGstElements(ts.selector, e);
- e.setState(GST_STATE_PAUSED);
+ e.syncStateWithParent();
}
ts.isConnected = true;
@@ -661,23 +731,8 @@ void QGstreamerMediaPlayer::removeOutput(TrackSelector &ts)
if (!ts.isConnected)
return;
- QGstElement e;
- switch (ts.type) {
- case AudioStream:
- e = gstAudioOutput ? gstAudioOutput->gstElement() : QGstElement{};
- break;
- case VideoStream:
- e = gstVideoOutput ? gstVideoOutput->gstElement() : QGstElement{};
- break;
- case SubtitleStream:
- if (gstVideoOutput)
- gstVideoOutput->unlinkSubtitleStream();
- break;
- default:
- break;
- }
-
- if (!e.isNull()) {
+ QGstElement e = getSinkElementForTrackType(ts.type);
+ if (e) {
qCDebug(qLcMediaPlayer) << "removing output for track type" << ts.type;
playerPipeline.stopAndRemoveElements(e);
}
@@ -685,6 +740,18 @@ void QGstreamerMediaPlayer::removeOutput(TrackSelector &ts)
ts.isConnected = false;
}
+void QGstreamerMediaPlayer::removeDynamicPipelineElements()
+{
+ for (QGstElement *element : { &src, &decoder }) {
+ if (element->isNull())
+ continue;
+
+ element->setStateSync(GstState::GST_STATE_NULL);
+ playerPipeline.remove(*element);
+ *element = QGstElement{};
+ }
+}
+
void QGstreamerMediaPlayer::uridecodebinElementAddedCallback(GstElement * /*uridecodebin*/,
GstElement *child,
QGstreamerMediaPlayer *)
@@ -693,9 +760,7 @@ void QGstreamerMediaPlayer::uridecodebinElementAddedCallback(GstElement * /*urid
qCDebug(qLcMediaPlayer) << "New element added to uridecodebin:" << c.name();
static const GType decodeBinType = [] {
- QGstElementFactoryHandle factory = QGstElementFactoryHandle{
- gst_element_factory_find("decodebin"),
- };
+ QGstElementFactoryHandle factory = QGstElement::findFactory("decodebin");
return gst_element_factory_get_element_type(factory.get());
}();
@@ -754,16 +819,12 @@ void QGstreamerMediaPlayer::unknownTypeCallback(GstElement *decodebin, GstPad *p
static bool isQueue(const QGstElement &element)
{
static const GType queueType = [] {
- QGstElementFactoryHandle factory = QGstElementFactoryHandle{
- gst_element_factory_find("queue"),
- };
+ QGstElementFactoryHandle factory = QGstElement::findFactory("queue");
return gst_element_factory_get_element_type(factory.get());
}();
static const GType multiQueueType = [] {
- QGstElementFactoryHandle factory = QGstElementFactoryHandle{
- gst_element_factory_find("multiqueue"),
- };
+ QGstElementFactoryHandle factory = QGstElement::findFactory("multiqueue");
return gst_element_factory_get_element_type(factory.get());
}();
@@ -790,9 +851,12 @@ void QGstreamerMediaPlayer::decodebinElementRemovedCallback(GstBin * /*decodebin
void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
{
+ using namespace std::chrono_literals;
+
qCDebug(qLcMediaPlayer) << Q_FUNC_INFO << "setting location to" << content;
prerolling = true;
+ m_requiresSeekOnPlay = true;
m_resourceErrorState = ResourceErrorState::NoError;
bool ret = playerPipeline.setStateSync(GST_STATE_NULL);
@@ -802,34 +866,28 @@ void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
m_url = content;
m_stream = stream;
- if (!src.isNull())
- playerPipeline.remove(src);
- if (!decoder.isNull())
- playerPipeline.remove(decoder);
- src = QGstElement();
+ removeDynamicPipelineElements();
disconnectDecoderHandlers();
- decoder = QGstElement();
removeAllOutputs();
seekableChanged(false);
Q_ASSERT(playerPipeline.inStoppedState());
- if (m_duration != 0) {
- m_duration = 0;
- durationChanged(0);
+ if (m_duration != 0ms) {
+ m_duration = 0ms;
+ durationChanged(0ms);
}
stateChanged(QMediaPlayer::StoppedState);
if (position() != 0)
- positionChanged(0);
+ positionChanged(0ms);
if (!m_metaData.isEmpty()) {
m_metaData.clear();
metaDataChanged();
}
- if (content.isEmpty() && !stream)
+ if (content.isEmpty() && !stream) {
mediaStatusChanged(QMediaPlayer::NoMedia);
-
- if (content.isEmpty())
return;
+ }
if (m_stream) {
if (!m_appSrc) {
@@ -844,7 +902,7 @@ void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
src = m_appSrc->element();
decoder = QGstElement::createFromFactory("decodebin", "decoder");
if (!decoder) {
- error(QMediaPlayer::ResourceError, errorMessageCannotFindElement("decodebin"));
+ error(QMediaPlayer::ResourceError, qGstErrorMessageCannotFindElement("decodebin"));
return;
}
decoder.set("post-stream-topology", true);
@@ -864,7 +922,7 @@ void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
// use uridecodebin
decoder = QGstElement::createFromFactory("uridecodebin", "decoder");
if (!decoder) {
- error(QMediaPlayer::ResourceError, errorMessageCannotFindElement("uridecodebin"));
+ error(QMediaPlayer::ResourceError, qGstErrorMessageCannotFindElement("uridecodebin"));
return;
}
playerPipeline.add(decoder);
@@ -884,11 +942,11 @@ void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
decoder.set("uri", content.toEncoded().constData());
decoder.set("use-buffering", true);
- decoder.set("ring-buffer-max-size", 128 /*kb*/);
- if (m_bufferProgress != 0) {
- m_bufferProgress = 0;
- emit bufferProgressChanged(0.);
- }
+
+ constexpr int mb = 1024 * 1024;
+ decoder.set("ring-buffer-max-size", 2 * mb);
+
+ updateBufferProgress(0.f);
elementAdded = decoder.connect("deep-element-added",
GCallback(decodebinElementAddedCallback), this);
@@ -899,12 +957,14 @@ void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
padRemoved = decoder.onPadRemoved<&QGstreamerMediaPlayer::decoderPadRemoved>(this);
mediaStatusChanged(QMediaPlayer::LoadingMedia);
-
- if (!playerPipeline.setState(GST_STATE_PAUSED))
+ if (!playerPipeline.setStateSync(GST_STATE_PAUSED)) {
qCWarning(qLcMediaPlayer) << "Unable to set the pipeline to the paused state.";
+ // Note: no further error handling: errors will be delivered via a GstMessage
+ return;
+ }
- playerPipeline.setPosition(0);
- positionChanged(0);
+ playerPipeline.setPosition(0ms);
+ positionChanged(0ms);
}
void QGstreamerMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
@@ -934,9 +994,9 @@ void QGstreamerMediaPlayer::setVideoSink(QVideoSink *sink)
gstVideoOutput->setVideoSink(sink);
}
-static QGstStructure endOfChain(const QGstStructure &s)
+static QGstStructureView endOfChain(const QGstStructureView &s)
{
- QGstStructure e = s;
+ QGstStructureView e = s;
while (1) {
auto next = e["next"].toStructure();
if (!next.isNull())
@@ -950,32 +1010,26 @@ static QGstStructure endOfChain(const QGstStructure &s)
void QGstreamerMediaPlayer::parseStreamsAndMetadata()
{
qCDebug(qLcMediaPlayer) << "============== parse topology ============";
- if (topology.isNull()) {
+
+ if (!topology) {
qCDebug(qLcMediaPlayer) << " null topology";
return;
}
- auto caps = topology["caps"].toCaps();
- auto structure = caps.at(0);
- auto fileFormat = QGstreamerFormatInfo::fileFormatForCaps(structure);
- qCDebug(qLcMediaPlayer) << caps << fileFormat;
- m_metaData.insert(QMediaMetaData::FileFormat, QVariant::fromValue(fileFormat));
- m_metaData.insert(QMediaMetaData::Duration, duration());
- m_metaData.insert(QMediaMetaData::Url, m_url);
- QGValue tags = topology["tags"];
- if (!tags.isNull()) {
- QGstTagListHandle tagList;
- gst_structure_get(topology.structure, "tags", GST_TYPE_TAG_LIST, &tagList, nullptr);
- const auto metaData = taglistToMetaData(tagList);
- for (auto k : metaData.keys())
- m_metaData.insert(k, metaData.value(k));
- }
+ QGstStructureView topologyView{ topology };
+
+ QGstCaps caps = topologyView.caps();
+ extendMetaDataFromCaps(m_metaData, caps);
+
+ QGstTagListHandle tagList = QGstStructureView{ topology }.tags();
+ if (tagList)
+ extendMetaDataFromTagList(m_metaData, tagList);
- auto demux = endOfChain(topology);
- auto next = demux["next"];
+ QGstStructureView demux = endOfChain(topologyView);
+ QGValue next = demux["next"];
if (!next.isList()) {
qCDebug(qLcMediaPlayer) << " no additional streams";
- emit metaDataChanged();
+ metaDataChanged();
return;
}
@@ -983,43 +1037,28 @@ void QGstreamerMediaPlayer::parseStreamsAndMetadata()
int size = next.listSize();
for (int i = 0; i < size; ++i) {
auto val = next.at(i);
- caps = val.toStructure()["caps"].toCaps();
- structure = caps.at(0);
- if (structure.name().startsWith("audio/")) {
- auto codec = QGstreamerFormatInfo::audioCodecForCaps(structure);
- m_metaData.insert(QMediaMetaData::AudioCodec, QVariant::fromValue(codec));
- qCDebug(qLcMediaPlayer) << " audio" << caps << (int)codec;
- } else if (structure.name().startsWith("video/")) {
- auto codec = QGstreamerFormatInfo::videoCodecForCaps(structure);
- m_metaData.insert(QMediaMetaData::VideoCodec, QVariant::fromValue(codec));
- qCDebug(qLcMediaPlayer) << " video" << caps << (int)codec;
- auto framerate = structure["framerate"].getFraction();
- if (framerate)
- m_metaData.insert(QMediaMetaData::VideoFrameRate, *framerate);
-
- QSize resolution = structure.resolution();
- if (resolution.isValid())
- m_metaData.insert(QMediaMetaData::Resolution, resolution);
+ caps = val.toStructure().caps();
+
+ extendMetaDataFromCaps(m_metaData, caps);
+ QGstStructureView structure = caps.at(0);
+
+ if (structure.name().startsWith("video/")) {
QSize nativeSize = structure.nativeSize();
gstVideoOutput->setNativeSize(nativeSize);
}
}
auto sinkPad = trackSelector(VideoStream).activeInputPad();
- if (!sinkPad.isNull()) {
- QGstTagListHandle tagList;
-
- g_object_get(sinkPad.object(), "tags", &tagList, nullptr);
+ if (sinkPad) {
+ QGstTagListHandle tagList = sinkPad.tags();
if (tagList)
qCDebug(qLcMediaPlayer) << " tags=" << tagList.get();
else
qCDebug(qLcMediaPlayer) << " tags=(null)";
}
-
qCDebug(qLcMediaPlayer) << "============== end parse topology ============";
- emit metaDataChanged();
playerPipeline.dumpGraph("playback");
}
@@ -1031,12 +1070,10 @@ int QGstreamerMediaPlayer::trackCount(QPlatformMediaPlayer::TrackType type)
QMediaMetaData QGstreamerMediaPlayer::trackMetaData(QPlatformMediaPlayer::TrackType type, int index)
{
auto track = trackSelector(type).inputPad(index);
- if (track.isNull())
+ if (!track)
return {};
- QGstTagListHandle tagList;
- g_object_get(track.object(), "tags", &tagList, nullptr);
-
+ QGstTagListHandle tagList = track.tags();
return taglistToMetaData(tagList);
}
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer_p.h
index 5c33d531f..f634d32a1 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer_p.h
@@ -44,7 +44,6 @@ public:
static QMaybe<QPlatformMediaPlayer *> create(QMediaPlayer *parent = nullptr);
~QGstreamerMediaPlayer();
- qint64 position() const override;
qint64 duration() const override;
float bufferProgress() const override;
@@ -72,22 +71,19 @@ public:
void setActiveTrack(TrackType, int /*streamNumber*/) override;
void setPosition(qint64 pos) override;
+ void setPosition(std::chrono::milliseconds pos);
void play() override;
void pause() override;
void stop() override;
- void *nativePipeline() override;
+ const QGstPipeline &pipeline() const;
bool processBusMessage(const QGstreamerMessage& message) override;
bool processSyncMessage(const QGstreamerMessage& message) override;
- void updatePosition() { positionChanged(position()); }
-
private:
- QGstreamerMediaPlayer(QGstreamerVideoOutput *videoOutput, QGstElement videoInputSelector,
- QGstElement audioInputSelector, QGstElement subTitleInputSelector,
- QMediaPlayer *parent);
+ QGstreamerMediaPlayer(QGstreamerVideoOutput *videoOutput, QMediaPlayer *parent);
struct TrackSelector
{
@@ -128,16 +124,25 @@ private:
void parseStreamsAndMetadata();
void connectOutput(TrackSelector &ts);
void removeOutput(TrackSelector &ts);
+ void removeDynamicPipelineElements();
void removeAllOutputs();
void stopOrEOS(bool eos);
bool canTrackProgress() const { return decodeBinQueues > 0; }
+ void detectPipelineIsSeekable();
+ bool hasMedia() const;
+
+ std::chrono::nanoseconds pipelinePosition() const;
+ void updatePositionFromPipeline();
+ void updateDurationFromPipeline();
+ void updateBufferProgress(float);
+
+ QGstElement getSinkElementForTrackType(TrackType);
std::array<TrackSelector, NTrackTypes> trackSelectors;
TrackSelector &trackSelector(TrackType type);
QMediaMetaData m_metaData;
- int m_bufferProgress = 0;
QUrl m_url;
QIODevice *m_stream = nullptr;
@@ -148,15 +153,17 @@ private:
};
bool prerolling = false;
- bool m_requiresSeekOnPlay = false;
+ bool m_requiresSeekOnPlay = true;
bool m_initialBufferProgressSent = false;
ResourceErrorState m_resourceErrorState = ResourceErrorState::NoError;
- qint64 m_duration = 0;
+ float m_rate = 1.f;
+ float m_bufferProgress = 0.f;
+ std::chrono::milliseconds m_duration{};
QTimer positionUpdateTimer;
QGstAppSource *m_appSrc = nullptr;
- QGstStructure topology;
+ QUniqueGstStructureHandle topology;
// Gst elements
QGstPipeline playerPipeline;
@@ -168,7 +175,15 @@ private:
// QGstElement streamSynchronizer;
- QHash<QByteArray, QGstPad> decoderOutputMap;
+ struct QGstPadLess
+ {
+ bool operator()(const QGstPad &lhs, const QGstPad &rhs) const
+ {
+ return lhs.pad() < rhs.pad();
+ }
+ };
+
+ std::map<QGstPad, QGstPad, QGstPadLess> decoderOutputMap;
// decoder connections
QGObjectHandlerScopedConnection padAdded;
@@ -180,6 +195,10 @@ private:
QGObjectHandlerScopedConnection elementRemoved;
int decodeBinQueues = 0;
+
+ void mediaStatusChanged(QMediaPlayer::MediaStatus status);
+ static constexpr auto stalledMediaDebouncePeriod = std::chrono::milliseconds{ 500 };
+ QTimer m_stalledMediaNotifier;
};
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermessage_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamermessage_p.h
index 01fe68acb..9836bd0cb 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamermessage_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamermessage_p.h
@@ -43,7 +43,7 @@ public:
GstMessageType type() const { return GST_MESSAGE_TYPE(get()); }
QGstObject source() const { return QGstObject(GST_MESSAGE_SRC(get()), QGstObject::NeedsRef); }
- QGstStructure structure() const { return QGstStructure(gst_message_get_structure(get())); }
+ QGstStructureView structure() const { return QGstStructureView(gst_message_get_structure(get())); }
GstMessage *message() const { return get(); }
};
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermetadata.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamermetadata.cpp
index bc28509a1..9aa9406b9 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamermetadata.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamermetadata.cpp
@@ -13,6 +13,7 @@
#include <gst/gstversion.h>
#include <common/qgst_handle_types_p.h>
#include <common/qgstutils_p.h>
+#include <qgstreamerformatinfo_p.h>
QT_BEGIN_NAMESPACE
@@ -143,121 +144,206 @@ const char *keyToTag(QMediaMetaData::Key key)
#undef constexpr_lookup
-//internal
+QtVideo::Rotation parseRotationTag(const char *string)
+{
+ using namespace std::string_view_literals;
+
+ if (string == "rotate-90"sv)
+ return QtVideo::Rotation::Clockwise90;
+ if (string == "rotate-180"sv)
+ return QtVideo::Rotation::Clockwise180;
+ if (string == "rotate-270"sv)
+ return QtVideo::Rotation::Clockwise270;
+ if (string == "rotate-0"sv)
+ return QtVideo::Rotation::None;
+
+ qCritical() << "cannot parse orientation: {}" << string;
+ return QtVideo::Rotation::None;
+}
+
+QDateTime parseDate(const GValue &val)
+{
+ Q_ASSERT(G_VALUE_TYPE(&val) == G_TYPE_DATE);
+
+ const GDate *date = (const GDate *)g_value_get_boxed(&val);
+ if (!g_date_valid(date))
+ return {};
+
+ int year = g_date_get_year(date);
+ int month = g_date_get_month(date);
+ int day = g_date_get_day(date);
+ return QDateTime(QDate(year, month, day), QTime());
+}
+
+QDateTime parseDateTime(const GValue &val)
+{
+ Q_ASSERT(G_VALUE_TYPE(&val) == GST_TYPE_DATE_TIME);
+
+ const GstDateTime *dateTime = (const GstDateTime *)g_value_get_boxed(&val);
+ int year = gst_date_time_has_year(dateTime) ? gst_date_time_get_year(dateTime) : 0;
+ int month = gst_date_time_has_month(dateTime) ? gst_date_time_get_month(dateTime) : 0;
+ int day = gst_date_time_has_day(dateTime) ? gst_date_time_get_day(dateTime) : 0;
+ int hour = 0;
+ int minute = 0;
+ int second = 0;
+ float tz = 0;
+ if (gst_date_time_has_time(dateTime)) {
+ hour = gst_date_time_get_hour(dateTime);
+ minute = gst_date_time_get_minute(dateTime);
+ second = gst_date_time_get_second(dateTime);
+ tz = gst_date_time_get_time_zone_offset(dateTime);
+ }
+ return QDateTime{
+ QDate(year, month, day),
+ QTime(hour, minute, second),
+ QTimeZone(tz * 60 * 60),
+ };
+}
+
+QImage parseImage(const GValue &val)
+{
+ Q_ASSERT(G_VALUE_TYPE(&val) == GST_TYPE_SAMPLE);
+
+ GstSample *sample = (GstSample *)g_value_get_boxed(&val);
+ GstCaps *caps = gst_sample_get_caps(sample);
+ if (caps && !gst_caps_is_empty(caps)) {
+ GstStructure *structure = gst_caps_get_structure(caps, 0);
+ const gchar *name = gst_structure_get_name(structure);
+ if (QByteArray(name).startsWith("image/")) {
+ GstBuffer *buffer = gst_sample_get_buffer(sample);
+ if (buffer) {
+ GstMapInfo info;
+ gst_buffer_map(buffer, &info, GST_MAP_READ);
+ QImage image = QImage::fromData(info.data, info.size, name);
+ gst_buffer_unmap(buffer, &info);
+ return image;
+ }
+ }
+ }
+
+ return {};
+}
+
+std::optional<double> parseFractionAsDouble(const GValue &val)
+{
+ Q_ASSERT(G_VALUE_TYPE(&val) == GST_TYPE_FRACTION);
+
+ int nom = gst_value_get_fraction_numerator(&val);
+ int denom = gst_value_get_fraction_denominator(&val);
+ if (denom == 0)
+ return std::nullopt;
+ return double(nom) / double(denom);
+}
+
+constexpr std::string_view extendedComment{ GST_TAG_EXTENDED_COMMENT };
+
+void addTagsFromExtendedComment(const GstTagList *list, const gchar *tag, QMediaMetaData &metadata)
+{
+ using namespace Qt::Literals;
+ assert(tag == extendedComment);
+
+ int entryCount = gst_tag_list_get_tag_size(list, tag);
+ for (int i = 0; i != entryCount; ++i) {
+ const GValue *value = gst_tag_list_get_value_index(list, tag, i);
+
+ const QLatin1StringView strValue{ g_value_get_string(value) };
+
+ auto equalIndex = strValue.indexOf(QLatin1StringView("="));
+ if (equalIndex == -1) {
+ qDebug() << "Cannot parse GST_TAG_EXTENDED_COMMENT entry: " << value;
+ continue;
+ }
+
+ const QLatin1StringView key = strValue.first(equalIndex);
+ const QLatin1StringView valueString = strValue.last(strValue.size() - equalIndex - 1);
+
+ if (key == "DURATION"_L1) {
+ QUniqueGstDateTimeHandle duration{
+ gst_date_time_new_from_iso8601_string(valueString.data()),
+ };
+
+ if (duration) {
+ using namespace std::chrono;
+
+ auto chronoDuration = hours(gst_date_time_get_hour(duration.get()))
+ + minutes(gst_date_time_get_minute(duration.get()))
+ + seconds(gst_date_time_get_second(duration.get()))
+ + microseconds(gst_date_time_get_microsecond(duration.get()));
+
+ metadata.insert(QMediaMetaData::Duration,
+ QVariant::fromValue(round<milliseconds>(chronoDuration).count()));
+ }
+ }
+ }
+}
+
void addTagToMetaData(const GstTagList *list, const gchar *tag, void *userdata)
{
QMediaMetaData &metadata = *reinterpret_cast<QMediaMetaData *>(userdata);
- using namespace std::string_view_literals;
-
QMediaMetaData::Key key = tagToKey(tag);
- if (key == QMediaMetaData::Key(-1))
+ if (key == QMediaMetaData::Key(-1)) {
+ if (tag == extendedComment)
+ addTagsFromExtendedComment(list, tag, metadata);
+
return;
+ }
- GValue val;
- val.g_type = 0;
+ GValue val{};
gst_tag_list_copy_value(&val, list, tag);
- switch (G_VALUE_TYPE(&val)) {
- case G_TYPE_STRING: {
+ GType type = G_VALUE_TYPE(&val);
+
+ if (auto entryCount = gst_tag_list_get_tag_size(list, tag) != 0; entryCount != 1)
+ qWarning() << "addTagToMetaData: invaled entry count for" << tag << "-" << entryCount;
+
+ if (type == G_TYPE_STRING) {
const gchar *str_value = g_value_get_string(&val);
switch (key) {
case QMediaMetaData::Language: {
metadata.insert(key,
QVariant::fromValue(QLocale::codeToLanguage(
- QString::fromUtf8(str_value), QLocale::ISO639Part2)));
+ QString::fromUtf8(str_value), QLocale::AnyLanguageCode)));
break;
}
case QMediaMetaData::Orientation: {
- if (str_value == "rotate-90"sv)
- metadata.insert(key, QVariant::fromValue(QtVideo::Rotation::Clockwise90));
- else if (str_value == "rotate-180"sv)
- metadata.insert(key, QVariant::fromValue(QtVideo::Rotation::Clockwise180));
- else if (str_value == "rotate-270"sv)
- metadata.insert(key, QVariant::fromValue(QtVideo::Rotation::Clockwise270));
- else if (str_value == "rotate-0"sv)
- metadata.insert(key, QVariant::fromValue(QtVideo::Rotation::None));
+ metadata.insert(key, QVariant::fromValue(parseRotationTag(str_value)));
break;
}
default:
metadata.insert(key, QString::fromUtf8(str_value));
break;
};
- break;
- }
- case G_TYPE_INT:
+ } else if (type == G_TYPE_INT) {
metadata.insert(key, g_value_get_int(&val));
- break;
- case G_TYPE_UINT:
+ } else if (type == G_TYPE_UINT) {
metadata.insert(key, g_value_get_uint(&val));
- break;
- case G_TYPE_LONG:
+ } else if (type == G_TYPE_LONG) {
metadata.insert(key, qint64(g_value_get_long(&val)));
- break;
- case G_TYPE_BOOLEAN:
+ } else if (type == G_TYPE_BOOLEAN) {
metadata.insert(key, g_value_get_boolean(&val));
- break;
- case G_TYPE_CHAR:
+ } else if (type == G_TYPE_CHAR) {
metadata.insert(key, g_value_get_schar(&val));
- break;
- case G_TYPE_DOUBLE:
+ } else if (type == G_TYPE_DOUBLE) {
metadata.insert(key, g_value_get_double(&val));
- break;
- default:
- // GST_TYPE_DATE is a function, not a constant, so pull it out of the switch
- if (G_VALUE_TYPE(&val) == G_TYPE_DATE) {
- const GDate *date = (const GDate *)g_value_get_boxed(&val);
- if (g_date_valid(date)) {
- int year = g_date_get_year(date);
- int month = g_date_get_month(date);
- int day = g_date_get_day(date);
- // don't insert if we already have a datetime.
- if (!metadata.keys().contains(key))
- metadata.insert(key, QDateTime(QDate(year, month, day), QTime()));
- }
- } else if (G_VALUE_TYPE(&val) == GST_TYPE_DATE_TIME) {
- const GstDateTime *dateTime = (const GstDateTime *)g_value_get_boxed(&val);
- int year = gst_date_time_has_year(dateTime) ? gst_date_time_get_year(dateTime) : 0;
- int month = gst_date_time_has_month(dateTime) ? gst_date_time_get_month(dateTime) : 0;
- int day = gst_date_time_has_day(dateTime) ? gst_date_time_get_day(dateTime) : 0;
- int hour = 0;
- int minute = 0;
- int second = 0;
- float tz = 0;
- if (gst_date_time_has_time(dateTime)) {
- hour = gst_date_time_get_hour(dateTime);
- minute = gst_date_time_get_minute(dateTime);
- second = gst_date_time_get_second(dateTime);
- tz = gst_date_time_get_time_zone_offset(dateTime);
- }
- QDateTime qDateTime(QDate(year, month, day), QTime(hour, minute, second),
- QTimeZone(tz * 60 * 60));
- metadata.insert(key, qDateTime);
- } else if (G_VALUE_TYPE(&val) == GST_TYPE_SAMPLE) {
- GstSample *sample = (GstSample *)g_value_get_boxed(&val);
- GstCaps *caps = gst_sample_get_caps(sample);
- if (caps && !gst_caps_is_empty(caps)) {
- GstStructure *structure = gst_caps_get_structure(caps, 0);
- const gchar *name = gst_structure_get_name(structure);
- if (QByteArray(name).startsWith("image/")) {
- GstBuffer *buffer = gst_sample_get_buffer(sample);
- if (buffer) {
- GstMapInfo info;
- gst_buffer_map(buffer, &info, GST_MAP_READ);
- metadata.insert(key, QImage::fromData(info.data, info.size, name));
- gst_buffer_unmap(buffer, &info);
- }
- }
- }
- } else if (G_VALUE_TYPE(&val) == GST_TYPE_FRACTION) {
- int nom = gst_value_get_fraction_numerator(&val);
- int denom = gst_value_get_fraction_denominator(&val);
-
- if (denom > 0)
- metadata.insert(key, double(nom) / denom);
+ } else if (type == G_TYPE_DATE) {
+ if (!metadata.keys().contains(key)) {
+ QDateTime date = parseDate(val);
+ if (date.isValid())
+ metadata.insert(key, date);
}
- break;
+ } else if (type == GST_TYPE_DATE_TIME) {
+ metadata.insert(key, parseDateTime(val));
+ } else if (type == GST_TYPE_SAMPLE) {
+ QImage image = parseImage(val);
+ if (!image.isNull())
+ metadata.insert(key, image);
+ } else if (type == GST_TYPE_FRACTION) {
+ std::optional<double> fraction = parseFractionAsDouble(val);
+
+ if (fraction)
+ metadata.insert(key, *fraction);
}
g_value_unset(&val);
@@ -265,17 +351,18 @@ void addTagToMetaData(const GstTagList *list, const gchar *tag, void *userdata)
} // namespace
-QMediaMetaData taglistToMetaData(const GstTagList *tagList)
+QMediaMetaData taglistToMetaData(const QGstTagListHandle &handle)
{
QMediaMetaData m;
- if (tagList)
- gst_tag_list_foreach(tagList, reinterpret_cast<GstTagForeachFunc>(&addTagToMetaData), &m);
+ extendMetaDataFromTagList(m, handle);
return m;
}
-QMediaMetaData taglistToMetaData(const QGstTagListHandle &handle)
+void extendMetaDataFromTagList(QMediaMetaData &metadata, const QGstTagListHandle &handle)
{
- return taglistToMetaData(handle.get());
+ if (handle)
+ gst_tag_list_foreach(handle.get(), reinterpret_cast<GstTagForeachFunc>(&addTagToMetaData),
+ &metadata);
}
static void applyMetaDataToTagSetter(const QMediaMetaData &metadata, GstTagSetter *element)
@@ -360,4 +447,43 @@ void applyMetaDataToTagSetter(const QMediaMetaData &metadata, const QGstBin &bin
gst_iterator_free(elements);
}
+void extendMetaDataFromCaps(QMediaMetaData &metadata, const QGstCaps &caps)
+{
+ QGstStructureView structure = caps.at(0);
+
+ QMediaFormat::FileFormat fileFormat = QGstreamerFormatInfo::fileFormatForCaps(structure);
+ if (fileFormat != QMediaFormat::FileFormat::UnspecifiedFormat) {
+ // Container caps
+ metadata.insert(QMediaMetaData::FileFormat, fileFormat);
+ return;
+ }
+
+ QMediaFormat::AudioCodec audioCodec = QGstreamerFormatInfo::audioCodecForCaps(structure);
+ if (audioCodec != QMediaFormat::AudioCodec::Unspecified) {
+ // Audio stream caps
+ metadata.insert(QMediaMetaData::AudioCodec, QVariant::fromValue(audioCodec));
+ return;
+ }
+
+ QMediaFormat::VideoCodec videoCodec = QGstreamerFormatInfo::videoCodecForCaps(structure);
+ if (videoCodec != QMediaFormat::VideoCodec::Unspecified) {
+ // Video stream caps
+ metadata.insert(QMediaMetaData::VideoCodec, QVariant::fromValue(videoCodec));
+ std::optional<float> framerate = structure["framerate"].getFraction();
+ if (framerate)
+ metadata.insert(QMediaMetaData::VideoFrameRate, *framerate);
+
+ QSize resolution = structure.resolution();
+ if (resolution.isValid())
+ metadata.insert(QMediaMetaData::Resolution, resolution);
+ }
+}
+
+QMediaMetaData capsToMetaData(const QGstCaps &caps)
+{
+ QMediaMetaData metadata;
+ extendMetaDataFromCaps(metadata, caps);
+ return metadata;
+}
+
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermetadata_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamermetadata_p.h
index d0d7620db..f04a9aba9 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamermetadata_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamermetadata_p.h
@@ -21,8 +21,11 @@
QT_BEGIN_NAMESPACE
-QMediaMetaData taglistToMetaData(const GstTagList *);
QMediaMetaData taglistToMetaData(const QGstTagListHandle &);
+void extendMetaDataFromTagList(QMediaMetaData &, const QGstTagListHandle &);
+
+QMediaMetaData capsToMetaData(const QGstCaps &);
+void extendMetaDataFromCaps(QMediaMetaData &, const QGstCaps &);
void applyMetaDataToTagSetter(const QMediaMetaData &metadata, const QGstBin &);
void applyMetaDataToTagSetter(const QMediaMetaData &metadata, const QGstElement &);
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput.cpp
index 6bc65693a..3d20a4b87 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput.cpp
@@ -14,194 +14,156 @@ static Q_LOGGING_CATEGORY(qLcMediaVideoOutput, "qt.multimedia.videooutput")
QT_BEGIN_NAMESPACE
+static QGstElement makeVideoConvertScale(const char *name)
+{
+ QGstElementFactoryHandle factory = QGstElement::findFactory("videoconvertscale");
+ if (factory) // videoconvertscale is only available in gstreamer 1.20
+ return QGstElement::createFromFactory(factory, name);
+
+ return QGstBin::createFromPipelineDescription("videoconvert ! videoscale", name,
+ /*ghostUnlinkedPads=*/true);
+}
+
QMaybe<QGstreamerVideoOutput *> QGstreamerVideoOutput::create(QObject *parent)
{
- QGstElement videoConvert;
- QGstElement videoScale;
+ QGstElementFactoryHandle factory = QGstElement::findFactory("videoconvertscale");
- QGstElementFactoryHandle factory = QGstElementFactoryHandle{
- gst_element_factory_find("videoconvertscale"),
- };
+ static std::optional<QString> elementCheck = []() -> std::optional<QString> {
+ std::optional<QString> error = qGstErrorMessageIfElementsNotAvailable("fakesink", "queue");
+ if (error)
+ return error;
- if (factory) { // videoconvertscale is only available in gstreamer 1.20
- videoConvert = QGstElement::createFromFactory(factory, "videoConvertScale");
- } else {
- videoConvert = QGstElement::createFromFactory("videoconvert", "videoConvert");
- if (!videoConvert)
- return errorMessageCannotFindElement("videoconvert");
+ QGstElementFactoryHandle factory = QGstElement::findFactory("videoconvertscale");
+ if (factory)
+ return std::nullopt;
- videoScale = QGstElement::createFromFactory("videoscale", "videoScale");
- if (!videoScale)
- return errorMessageCannotFindElement("videoscale");
- }
+ return qGstErrorMessageIfElementsNotAvailable("videoconvert", "videoscale");
+ }();
- QGstElement videoSink = QGstElement::createFromFactory("fakesink", "fakeVideoSink");
- if (!videoSink)
- return errorMessageCannotFindElement("fakesink");
- videoSink.set("sync", true);
+ if (elementCheck)
+ return *elementCheck;
- return new QGstreamerVideoOutput(videoConvert, videoScale, videoSink, parent);
+ return new QGstreamerVideoOutput(parent);
}
-QGstreamerVideoOutput::QGstreamerVideoOutput(QGstElement convert, QGstElement scale,
- QGstElement sink, QObject *parent)
+QGstreamerVideoOutput::QGstreamerVideoOutput(QObject *parent)
: QObject(parent),
- gstVideoOutput(QGstBin::create("videoOutput")),
- videoConvert(std::move(convert)),
- videoScale(std::move(scale)),
- videoSink(std::move(sink))
+ m_outputBin{
+ QGstBin::create("videoOutput"),
+ },
+ m_videoQueue{
+ QGstElement::createFromFactory("queue", "videoQueue"),
+ },
+ m_videoConvertScale{
+ makeVideoConvertScale("videoConvertScale"),
+ },
+ m_videoSink{
+ QGstElement::createFromFactory("fakesink", "fakeVideoSink"),
+ }
{
- videoQueue = QGstElement::createFromFactory("queue", "videoQueue");
+ m_videoSink.set("sync", true);
+ m_videoSink.set("async", false); // no asynchronous state changes
- videoSink.set("sync", true);
- videoSink.set("async", false); // no asynchronous state changes
+ m_outputBin.add(m_videoQueue, m_videoConvertScale, m_videoSink);
+ qLinkGstElements(m_videoQueue, m_videoConvertScale, m_videoSink);
- if (videoScale) {
- gstVideoOutput.add(videoQueue, videoConvert, videoScale, videoSink);
- qLinkGstElements(videoQueue, videoConvert, videoScale, videoSink);
- } else {
- gstVideoOutput.add(videoQueue, videoConvert, videoSink);
- qLinkGstElements(videoQueue, videoConvert, videoSink);
- }
+ m_subtitleSink = QGstSubtitleSink::createSink(this);
- gstVideoOutput.addGhostPad(videoQueue, "sink");
+ m_outputBin.addGhostPad(m_videoQueue, "sink");
}
QGstreamerVideoOutput::~QGstreamerVideoOutput()
{
- gstVideoOutput.setStateSync(GST_STATE_NULL);
+ QObject::disconnect(m_subtitleConnection);
+ m_outputBin.setStateSync(GST_STATE_NULL);
}
void QGstreamerVideoOutput::setVideoSink(QVideoSink *sink)
{
auto *gstVideoSink = sink ? static_cast<QGstreamerVideoSink *>(sink->platformVideoSink()) : nullptr;
- if (gstVideoSink == m_videoSink)
+ if (gstVideoSink == m_platformVideoSink)
return;
- if (m_videoSink)
- m_videoSink->setPipeline({});
+ if (m_platformVideoSink)
+ m_platformVideoSink->setPipeline({});
- m_videoSink = gstVideoSink;
- if (m_videoSink) {
- m_videoSink->setPipeline(gstPipeline);
- if (nativeSize.isValid())
- m_videoSink->setNativeSize(nativeSize);
+ m_platformVideoSink = gstVideoSink;
+ if (m_platformVideoSink) {
+ m_platformVideoSink->setPipeline(m_pipeline);
+ if (m_nativeSize.isValid())
+ m_platformVideoSink->setNativeSize(m_nativeSize);
}
- QGstElement gstSink;
- if (m_videoSink) {
- gstSink = m_videoSink->gstSink();
+ QGstElement videoSink;
+ if (m_platformVideoSink) {
+ videoSink = m_platformVideoSink->gstSink();
} else {
- gstSink = QGstElement::createFromFactory("fakesink", "fakevideosink");
- Q_ASSERT(gstSink);
- gstSink.set("sync", true);
- gstSink.set("async", false); // no asynchronous state changes
+ videoSink = QGstElement::createFromFactory("fakesink", "fakevideosink");
+ Q_ASSERT(videoSink);
+ videoSink.set("sync", true);
+ videoSink.set("async", false); // no asynchronous state changes
+ }
+
+ QObject::disconnect(m_subtitleConnection);
+ if (sink) {
+ m_subtitleConnection = QObject::connect(this, &QGstreamerVideoOutput::subtitleChanged, sink,
+ [sink](const QString &subtitle) {
+ sink->setSubtitleText(subtitle);
+ });
+ sink->setSubtitleText(m_lastSubtitleString);
}
- if (videoSink == gstSink)
+ if (m_videoSink == videoSink)
return;
- gstPipeline.modifyPipelineWhileNotRunning([&] {
- if (!videoSink.isNull())
- gstVideoOutput.stopAndRemoveElements(videoSink);
+ m_pipeline.modifyPipelineWhileNotRunning([&] {
+ if (!m_videoSink.isNull())
+ m_outputBin.stopAndRemoveElements(m_videoSink);
- videoSink = gstSink;
- gstVideoOutput.add(videoSink);
+ m_videoSink = videoSink;
+ m_outputBin.add(m_videoSink);
- if (videoScale)
- qLinkGstElements(videoScale, videoSink);
- else
- qLinkGstElements(videoConvert, videoSink);
+ qLinkGstElements(m_videoConvertScale, m_videoSink);
GstEvent *event = gst_event_new_reconfigure();
- gst_element_send_event(videoSink.element(), event);
- videoSink.syncStateWithParent();
-
- doLinkSubtitleStream();
+ gst_element_send_event(m_videoSink.element(), event);
+ m_videoSink.syncStateWithParent();
});
- qCDebug(qLcMediaVideoOutput) << "sinkChanged" << gstSink.name();
-
- GST_DEBUG_BIN_TO_DOT_FILE(gstPipeline.bin(),
- GstDebugGraphDetails(/*GST_DEBUG_GRAPH_SHOW_ALL |*/ GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE |
- GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES),
- videoSink.name());
+ qCDebug(qLcMediaVideoOutput) << "sinkChanged" << videoSink.name();
+ m_pipeline.dumpGraph(m_videoSink.name().constData());
}
void QGstreamerVideoOutput::setPipeline(const QGstPipeline &pipeline)
{
- gstPipeline = pipeline;
- if (m_videoSink)
- m_videoSink->setPipeline(gstPipeline);
-}
-
-void QGstreamerVideoOutput::linkSubtitleStream(QGstElement src)
-{
- qCDebug(qLcMediaVideoOutput) << "link subtitle stream" << src.isNull();
- if (src == subtitleSrc)
- return;
-
- gstPipeline.modifyPipelineWhileNotRunning([&] {
- subtitleSrc = src;
- doLinkSubtitleStream();
- });
-}
-
-void QGstreamerVideoOutput::unlinkSubtitleStream()
-{
- if (subtitleSrc.isNull())
- return;
- qCDebug(qLcMediaVideoOutput) << "unlink subtitle stream";
- subtitleSrc = {};
- if (!subtitleSink.isNull()) {
- gstPipeline.modifyPipelineWhileNotRunning([&] {
- gstPipeline.stopAndRemoveElements(subtitleSink);
- return;
- });
- subtitleSink = {};
- }
- if (m_videoSink)
- m_videoSink->setSubtitleText({});
-}
-
-void QGstreamerVideoOutput::doLinkSubtitleStream()
-{
- if (!subtitleSink.isNull()) {
- gstPipeline.stopAndRemoveElements(subtitleSink);
- subtitleSink = {};
- }
- if (!m_videoSink || subtitleSrc.isNull())
- return;
- if (subtitleSink.isNull()) {
- subtitleSink = m_videoSink->subtitleSink();
- gstPipeline.add(subtitleSink);
- }
- qLinkGstElements(subtitleSrc, subtitleSink);
+ m_pipeline = pipeline;
+ if (m_platformVideoSink)
+ m_platformVideoSink->setPipeline(m_pipeline);
}
void QGstreamerVideoOutput::updateNativeSize()
{
- if (!m_videoSink)
+ if (!m_platformVideoSink)
return;
- m_videoSink->setNativeSize(qRotatedFrameSize(nativeSize, rotation));
+ m_platformVideoSink->setNativeSize(qRotatedFrameSize(m_nativeSize, m_rotation));
}
void QGstreamerVideoOutput::setIsPreview()
{
// configures the queue to be fast and lightweight for camera preview
// also avoids blocking the queue in case we have an encodebin attached to the tee as well
- videoQueue.set("leaky", 2 /*downstream*/);
- videoQueue.set("silent", true);
- videoQueue.set("max-size-buffers", uint(1));
- videoQueue.set("max-size-bytes", uint(0));
- videoQueue.set("max-size-time", quint64(0));
+ m_videoQueue.set("leaky", 2 /*downstream*/);
+ m_videoQueue.set("silent", true);
+ m_videoQueue.set("max-size-buffers", uint(1));
+ m_videoQueue.set("max-size-bytes", uint(0));
+ m_videoQueue.set("max-size-time", quint64(0));
}
void QGstreamerVideoOutput::flushSubtitles()
{
- if (!subtitleSink.isNull()) {
- auto pad = subtitleSink.staticPad("sink");
+ if (!m_subtitleSink.isNull()) {
+ auto pad = m_subtitleSink.staticPad("sink");
auto *event = gst_event_new_flush_start();
pad.sendEvent(event);
event = gst_event_new_flush_stop(false);
@@ -211,16 +173,26 @@ void QGstreamerVideoOutput::flushSubtitles()
void QGstreamerVideoOutput::setNativeSize(QSize sz)
{
- nativeSize = sz;
+ m_nativeSize = sz;
updateNativeSize();
}
void QGstreamerVideoOutput::setRotation(QtVideo::Rotation rot)
{
- rotation = rot;
+ m_rotation = rot;
updateNativeSize();
}
+void QGstreamerVideoOutput::updateSubtitle(QString string)
+{
+ // GStreamer thread
+
+ QMetaObject::invokeMethod(this, [this, string = std::move(string)]() mutable {
+ m_lastSubtitleString = string;
+ Q_EMIT subtitleChanged(std::move(string));
+ });
+}
+
QT_END_NAMESPACE
#include "moc_qgstreamervideooutput_p.cpp"
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput_p.h
index 42acb18cc..a74f058f0 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput_p.h
@@ -21,6 +21,7 @@
#include <common/qgst_p.h>
#include <common/qgstpipeline_p.h>
#include <common/qgstreamervideosink_p.h>
+#include <common/qgstsubtitlesink_p.h>
#include <qwaitcondition.h>
#include <qmutex.h>
#include <qpointer.h>
@@ -29,7 +30,7 @@ QT_BEGIN_NAMESPACE
class QVideoSink;
-class QGstreamerVideoOutput : public QObject
+class QGstreamerVideoOutput : public QObject, QAbstractSubtitleObserver
{
Q_OBJECT
@@ -38,13 +39,12 @@ public:
~QGstreamerVideoOutput();
void setVideoSink(QVideoSink *sink);
- QGstreamerVideoSink *gstreamerVideoSink() const { return m_videoSink; }
+ QGstreamerVideoSink *gstreamerVideoSink() const { return m_platformVideoSink; }
void setPipeline(const QGstPipeline &pipeline);
- QGstElement gstElement() const { return gstVideoOutput; }
- void linkSubtitleStream(QGstElement subtitleSrc);
- void unlinkSubtitleStream();
+ QGstElement gstElement() const { return m_outputBin; }
+ QGstElement gstSubtitleElement() const { return m_subtitleSink; }
void setIsPreview();
void flushSubtitles();
@@ -52,29 +52,32 @@ public:
void setNativeSize(QSize);
void setRotation(QtVideo::Rotation);
+ void updateSubtitle(QString) override;
+
+signals:
+ void subtitleChanged(QString);
+
private:
- QGstreamerVideoOutput(QGstElement videoConvert, QGstElement videoScale, QGstElement videoSink,
- QObject *parent);
+ explicit QGstreamerVideoOutput(QObject *parent);
- void doLinkSubtitleStream();
void updateNativeSize();
- QPointer<QGstreamerVideoSink> m_videoSink;
+ QPointer<QGstreamerVideoSink> m_platformVideoSink;
// Gst elements
- QGstPipeline gstPipeline;
+ QGstPipeline m_pipeline;
- QGstBin gstVideoOutput;
- QGstElement videoQueue;
- QGstElement videoConvert;
- QGstElement videoScale;
- QGstElement videoSink;
+ QGstBin m_outputBin;
+ QGstElement m_videoQueue;
+ QGstElement m_videoConvertScale;
+ QGstElement m_videoSink;
- QGstElement subtitleSrc;
- QGstElement subtitleSink;
+ QGstElement m_subtitleSink;
+ QMetaObject::Connection m_subtitleConnection;
+ QString m_lastSubtitleString;
- QSize nativeSize;
- QtVideo::Rotation rotation{};
+ QSize m_nativeSize;
+ QtVideo::Rotation m_rotation{};
};
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideosink.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamervideosink.cpp
index 2ed2acb36..456febe2a 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamervideosink.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideosink.cpp
@@ -3,44 +3,45 @@
#include <common/qgstreamervideosink_p.h>
#include <common/qgstvideorenderersink_p.h>
-#include <common/qgstsubtitlesink_p.h>
#include <common/qgst_debug_p.h>
#include <common/qgstutils_p.h>
#include <rhi/qrhi.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qloggingcategory.h>
+
#if QT_CONFIG(gstreamer_gl)
-#include <QGuiApplication>
-#include <QtGui/qopenglcontext.h>
-#include <QWindow>
-#include <qpa/qplatformnativeinterface.h>
-#include <gst/gl/gstglconfig.h>
+# include <QtGui/QGuiApplication>
+# include <QtGui/qopenglcontext.h>
+# include <QtGui/QWindow>
+# include <QtGui/qpa/qplatformnativeinterface.h>
+# include <gst/gl/gstglconfig.h>
-#if GST_GL_HAVE_WINDOW_X11 && __has_include("X11/Xlib-xcb.h")
+# if GST_GL_HAVE_WINDOW_X11 && __has_include("X11/Xlib-xcb.h")
# include <gst/gl/x11/gstgldisplay_x11.h>
-#endif
-#if GST_GL_HAVE_PLATFORM_EGL
+# endif
+# if GST_GL_HAVE_PLATFORM_EGL
# include <gst/gl/egl/gstgldisplay_egl.h>
# include <EGL/egl.h>
# include <EGL/eglext.h>
-#endif
-#if GST_GL_HAVE_WINDOW_WAYLAND && __has_include("wayland-client.h")
+# endif
+# if GST_GL_HAVE_WINDOW_WAYLAND && __has_include("wayland-client.h")
# include <gst/gl/wayland/gstgldisplay_wayland.h>
-#endif
+# endif
#endif // #if QT_CONFIG(gstreamer_gl)
-#include <QtCore/qdebug.h>
-
-#include <QtCore/qloggingcategory.h>
-
QT_BEGIN_NAMESPACE
static Q_LOGGING_CATEGORY(qLcGstVideoSink, "qt.multimedia.gstvideosink");
QGstreamerVideoSink::QGstreamerVideoSink(QVideoSink *parent)
- : QPlatformVideoSink(parent)
+ : QPlatformVideoSink{
+ parent,
+ },
+ m_sinkBin{
+ QGstBin::create("videoSinkBin"),
+ }
{
- sinkBin = QGstBin::create("videoSinkBin");
-
// This is a hack for some iMX and NVidia platforms. These require the use of a special video
// conversion element in the pipeline before the video sink, as they unfortunately
// output some proprietary format from the decoder even though it's sometimes marked as
@@ -48,38 +49,44 @@ QGstreamerVideoSink::QGstreamerVideoSink(QVideoSink *parent)
//
// To fix this, simply insert the element into the pipeline if it's available. Otherwise
// we simply use an identity element.
- gstQueue = QGstElement::createFromFactory("queue", "videoSinkQueue");
-
QGstElementFactoryHandle factory;
- // QT_MULTIMEDIA_GSTREAMER_OVERRIDE_VIDEO_CONVERSION_ELEMENT allows users to override the
+ // QT_GSTREAMER_OVERRIDE_VIDEO_CONVERSION_ELEMENT allows users to override the
// conversion element. Ideally we construct the element programatically, though.
- QByteArray preprocessOverride =
- qgetenv("QT_MULTIMEDIA_GSTREAMER_OVERRIDE_VIDEO_CONVERSION_ELEMENT");
+ QByteArray preprocessOverride = qgetenv("QT_GSTREAMER_OVERRIDE_VIDEO_CONVERSION_ELEMENT");
if (!preprocessOverride.isEmpty()) {
- qCDebug(qLcGstVideoSink) << "requesting conversion element from environment: "
+ qCDebug(qLcGstVideoSink) << "requesting conversion element from environment:"
<< preprocessOverride;
- factory = QGstElementFactoryHandle{
- gst_element_factory_find(preprocessOverride.constData()),
- };
+
+ m_gstPreprocess = QGstBin::createFromPipelineDescription(preprocessOverride, nullptr,
+ /*ghostUnlinkedPads=*/true);
+ if (!m_gstPreprocess)
+ qCWarning(qLcGstVideoSink) << "Cannot create conversion element:" << preprocessOverride;
}
- if (!factory)
- factory = QGstElementFactoryHandle{
- gst_element_factory_find("imxvideoconvert_g2d"),
+ if (!m_gstPreprocess) {
+ // This is a hack for some iMX and NVidia platforms. These require the use of a special
+ // video conversion element in the pipeline before the video sink, as they unfortunately
+ // output some proprietary format from the decoder even though it's sometimes marked as
+ // a regular supported video/x-raw format.
+ static constexpr auto decodersToTest = {
+ "imxvideoconvert_g2d",
+ "nvvidconv",
};
- if (!factory)
- factory = QGstElementFactoryHandle{
- gst_element_factory_find("nvvidconv"),
- };
+ for (const char *decoder : decodersToTest) {
+ factory = QGstElement::findFactory(decoder);
+ if (factory)
+ break;
+ }
- if (factory) {
- qCDebug(qLcGstVideoSink) << "instantiating conversion element: "
- << g_type_name(
- gst_element_factory_get_element_type(factory.get()));
+ if (factory) {
+ qCDebug(qLcGstVideoSink)
+ << "instantiating conversion element:"
+ << g_type_name(gst_element_factory_get_element_type(factory.get()));
- gstPreprocess = QGstElement::createFromFactory(factory, "preprocess");
+ m_gstPreprocess = QGstElement::createFromFactory(factory, "preprocess");
+ }
}
bool disablePixelAspectRatio =
@@ -91,32 +98,32 @@ QGstreamerVideoSink::QGstreamerVideoSink(QVideoSink *parent)
// pixel-aspect-ratio handling
//
// compare: https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/6242
- gstCapsFilter =
+ m_gstCapsFilter =
QGstElement::createFromFactory("identity", "nullPixelAspectRatioCapsFilter");
} else {
- gstCapsFilter = QGstElement::createFromFactory("capsfilter", "pixelAspectRatioCapsFilter");
+ m_gstCapsFilter =
+ QGstElement::createFromFactory("capsfilter", "pixelAspectRatioCapsFilter");
QGstCaps capsFilterCaps{
gst_caps_new_simple("video/x-raw", "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, NULL),
QGstCaps::HasRef,
};
- g_object_set(gstCapsFilter.element(), "caps", capsFilterCaps.caps(), NULL);
+ g_object_set(m_gstCapsFilter.element(), "caps", capsFilterCaps.caps(), NULL);
}
- if (gstPreprocess) {
- sinkBin.add(gstQueue, gstPreprocess, gstCapsFilter);
- qLinkGstElements(gstQueue, gstPreprocess, gstCapsFilter);
+ if (m_gstPreprocess) {
+ m_sinkBin.add(m_gstPreprocess, m_gstCapsFilter);
+ qLinkGstElements(m_gstPreprocess, m_gstCapsFilter);
+ m_sinkBin.addGhostPad(m_gstPreprocess, "sink");
} else {
- sinkBin.add(gstQueue, gstCapsFilter);
- qLinkGstElements(gstQueue, gstCapsFilter);
+ m_sinkBin.add(m_gstCapsFilter);
+ m_sinkBin.addGhostPad(m_gstCapsFilter, "sink");
}
- sinkBin.addGhostPad(gstQueue, "sink");
-
- gstSubtitleSink =
- QGstElement(GST_ELEMENT(QGstSubtitleSink::createSink(this)), QGstElement::NeedsRef);
}
QGstreamerVideoSink::~QGstreamerVideoSink()
{
+ emit aboutToBeDestroyed();
+
unrefGstContexts();
setPipeline(QGstPipeline());
@@ -125,19 +132,19 @@ QGstreamerVideoSink::~QGstreamerVideoSink()
QGstElement QGstreamerVideoSink::gstSink()
{
updateSinkElement();
- return sinkBin;
+ return m_sinkBin;
}
void QGstreamerVideoSink::setPipeline(QGstPipeline pipeline)
{
- gstPipeline = std::move(pipeline);
+ m_pipeline = std::move(pipeline);
}
bool QGstreamerVideoSink::inStoppedState() const
{
- if (gstPipeline.isNull())
+ if (m_pipeline.isNull())
return true;
- return gstPipeline.inStoppedState();
+ return m_pipeline.inStoppedState();
}
void QGstreamerVideoSink::setRhi(QRhi *rhi)
@@ -149,7 +156,7 @@ void QGstreamerVideoSink::setRhi(QRhi *rhi)
m_rhi = rhi;
updateGstContexts();
- if (!gstQtSink.isNull()) {
+ if (!m_gstQtSink.isNull()) {
// force creation of a new sink with proper caps
createQtSink();
updateSinkElement();
@@ -158,36 +165,37 @@ void QGstreamerVideoSink::setRhi(QRhi *rhi)
void QGstreamerVideoSink::createQtSink()
{
- if (gstQtSink)
- gstQtSink.setStateSync(GST_STATE_NULL);
+ if (m_gstQtSink)
+ m_gstQtSink.setStateSync(GST_STATE_NULL);
- gstQtSink = QGstElement(reinterpret_cast<GstElement *>(QGstVideoRendererSink::createSink(this)),
- QGstElement::NeedsRef);
+ m_gstQtSink =
+ QGstElement(reinterpret_cast<GstElement *>(QGstVideoRendererSink::createSink(this)),
+ QGstElement::NeedsRef);
}
void QGstreamerVideoSink::updateSinkElement()
{
QGstElement newSink;
- if (gstQtSink.isNull())
+ if (m_gstQtSink.isNull())
createQtSink();
- newSink = gstQtSink;
+ newSink = m_gstQtSink;
- if (newSink == gstVideoSink)
+ if (newSink == m_gstVideoSink)
return;
- gstPipeline.modifyPipelineWhileNotRunning([&] {
- if (!gstVideoSink.isNull())
- sinkBin.stopAndRemoveElements(gstVideoSink);
+ m_pipeline.modifyPipelineWhileNotRunning([&] {
+ if (!m_gstVideoSink.isNull())
+ m_sinkBin.stopAndRemoveElements(m_gstVideoSink);
newSink.set("async", false); // no asynchronous state changes
- gstVideoSink = newSink;
- sinkBin.add(gstVideoSink);
- qLinkGstElements(gstCapsFilter, gstVideoSink);
- gstVideoSink.setState(GST_STATE_PAUSED);
+ m_gstVideoSink = newSink;
+ m_sinkBin.add(m_gstVideoSink);
+ qLinkGstElements(m_gstCapsFilter, m_gstVideoSink);
+ m_gstVideoSink.setState(GST_STATE_PAUSED);
});
- gstPipeline.dumpGraph("updateVideoSink");
+ m_pipeline.dumpGraph("updateVideoSink");
}
void QGstreamerVideoSink::unrefGstContexts()
@@ -200,6 +208,8 @@ void QGstreamerVideoSink::unrefGstContexts()
void QGstreamerVideoSink::updateGstContexts()
{
+ using namespace Qt::Literals;
+
unrefGstContexts();
#if QT_CONFIG(gstreamer_gl)
@@ -212,12 +222,12 @@ void QGstreamerVideoSink::updateGstContexts()
const QString platform = QGuiApplication::platformName();
QPlatformNativeInterface *pni = QGuiApplication::platformNativeInterface();
- m_eglDisplay = pni->nativeResourceForIntegration("egldisplay");
+ m_eglDisplay = pni->nativeResourceForIntegration("egldisplay"_ba);
// qDebug() << "platform is" << platform << m_eglDisplay;
QGstGLDisplayHandle gstGlDisplay;
- const char *contextName = "eglcontext";
+ QByteArray contextName = "eglcontext"_ba;
GstGLPlatform glPlatform = GST_GL_PLATFORM_EGL;
// use the egl display if we have one
if (m_eglDisplay) {
@@ -227,12 +237,12 @@ void QGstreamerVideoSink::updateGstContexts()
m_eglImageTargetTexture2D = eglGetProcAddress("glEGLImageTargetTexture2DOES");
#endif
} else {
- auto display = pni->nativeResourceForIntegration("display");
+ auto display = pni->nativeResourceForIntegration("display"_ba);
if (display) {
#if GST_GL_HAVE_WINDOW_X11 && __has_include("X11/Xlib-xcb.h")
if (platform == QLatin1String("xcb")) {
- contextName = "glxcontext";
+ contextName = "glxcontext"_ba;
glPlatform = GST_GL_PLATFORM_GLX;
gstGlDisplay.reset(GST_GL_DISPLAY_CAST(
@@ -289,8 +299,8 @@ void QGstreamerVideoSink::updateGstContexts()
gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, displayContext.get(), nullptr);
displayContext.close();
- if (!gstPipeline.isNull())
- gst_element_set_context(gstPipeline.element(), m_gstGlLocalContext.get());
+ if (m_pipeline)
+ gst_element_set_context(m_pipeline.element(), m_gstGlLocalContext.get());
#endif // #if QT_CONFIG(gstreamer_gl)
}
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideosink_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamervideosink_p.h
index 132eab557..d940485f4 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstreamervideosink_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideosink_p.h
@@ -15,25 +15,17 @@
// We mean it.
//
-#include <private/qtmultimediaglobal_p.h>
-#include <private/qplatformvideosink_p.h>
+#include <QtMultimedia/qvideosink.h>
+#include <QtMultimedia/private/qplatformvideosink_p.h>
#include <common/qgstpipeline_p.h>
-#include <common/qgstreamervideooverlay_p.h>
-#include <QtGui/qcolor.h>
-#include <qvideosink.h>
-
-#if QT_CONFIG(gstreamer_gl)
-#include <gst/gl/gl.h>
-#endif
QT_BEGIN_NAMESPACE
-class QGstreamerVideoRenderer;
-class QVideoWindow;
class QGstreamerVideoSink : public QPlatformVideoSink
{
Q_OBJECT
+
public:
explicit QGstreamerVideoSink(QVideoSink *parent = nullptr);
~QGstreamerVideoSink();
@@ -42,7 +34,6 @@ public:
QRhi *rhi() const { return m_rhi; }
QGstElement gstSink();
- QGstElement subtitleSink() const { return gstSubtitleSink; }
void setPipeline(QGstPipeline pipeline);
bool inStoppedState() const;
@@ -52,6 +43,9 @@ public:
Qt::HANDLE eglDisplay() const { return m_eglDisplay; }
QFunctionPointer eglImageTargetTexture2D() const { return m_eglImageTargetTexture2D; }
+Q_SIGNALS:
+ void aboutToBeDestroyed();
+
private:
void createQtSink();
void updateSinkElement();
@@ -59,14 +53,12 @@ private:
void unrefGstContexts();
void updateGstContexts();
- QGstPipeline gstPipeline;
- QGstBin sinkBin;
- QGstElement gstQueue;
- QGstElement gstPreprocess;
- QGstElement gstCapsFilter;
- QGstElement gstVideoSink;
- QGstElement gstQtSink;
- QGstElement gstSubtitleSink;
+ QGstPipeline m_pipeline;
+ QGstBin m_sinkBin;
+ QGstElement m_gstPreprocess;
+ QGstElement m_gstCapsFilter;
+ QGstElement m_gstVideoSink;
+ QGstElement m_gstQtSink;
QRhi *m_rhi = nullptr;
diff --git a/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink.cpp b/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink.cpp
index c6b230d85..58b5c3f53 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink.cpp
@@ -1,55 +1,62 @@
// Copyright (C) 2021 The Qt Company
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-#include <QDebug>
-#include <QThread>
-#include <QEvent>
-
-#include "qgstreamervideosink_p.h"
#include "qgstsubtitlesink_p.h"
+#include "qgst_debug_p.h"
+
+#include <QtCore/qdebug.h>
QT_BEGIN_NAMESPACE
-static GstBaseSinkClass *gst_sink_parent_class;
-static thread_local QGstreamerVideoSink *gst_current_sink;
+namespace {
+GstBaseSinkClass *gst_sink_parent_class;
+thread_local QAbstractSubtitleObserver *gst_current_observer;
+
+class QGstSubtitleSinkClass
+{
+public:
+ GstBaseSinkClass parent_class;
+};
+
+} // namespace
#define ST_SINK(s) QGstSubtitleSink *sink(reinterpret_cast<QGstSubtitleSink *>(s))
-QGstSubtitleSink *QGstSubtitleSink::createSink(QGstreamerVideoSink *sink)
+QGstElement QGstSubtitleSink::createSink(QAbstractSubtitleObserver *observer)
{
- gst_current_sink = sink;
+ gst_current_observer = observer;
QGstSubtitleSink *gstSink = reinterpret_cast<QGstSubtitleSink *>(
g_object_new(QGstSubtitleSink::get_type(), nullptr));
g_object_set(gstSink, "async", false, nullptr);
- return gstSink;
+ return QGstElement{
+ qGstCheckedCast<GstElement>(gstSink),
+ QGstElement::NeedsRef,
+ };
}
GType QGstSubtitleSink::get_type()
{
- static const GTypeInfo info =
+ // clang-format off
+ static constexpr GTypeInfo info =
{
- sizeof(QGstSubtitleSinkClass), // class_size
- base_init, // base_init
- nullptr, // base_finalize
- class_init, // class_init
- nullptr, // class_finalize
- nullptr, // class_data
- sizeof(QGstSubtitleSink), // instance_size
- 0, // n_preallocs
- instance_init, // instance_init
- nullptr // value_table
+ sizeof(QGstSubtitleSinkClass), // class_size
+ base_init, // base_init
+ nullptr, // base_finalize
+ class_init, // class_init
+ nullptr, // class_finalize
+ nullptr, // class_data
+ sizeof(QGstSubtitleSink), // instance_size
+ 0, // n_preallocs
+ instance_init, // instance_init
+ nullptr // value_table
};
+ // clang-format on
static const GType type = []() {
const auto result = g_type_register_static(
GST_TYPE_BASE_SINK, "QGstSubtitleSink", &info, GTypeFlags(0));
-
- // Register the sink type to be used in custom piplines.
- // When surface is ready the sink can be used.
- gst_element_register(nullptr, "qtsubtitlesink", GST_RANK_PRIMARY, result);
-
return result;
}();
@@ -83,21 +90,20 @@ void QGstSubtitleSink::class_init(gpointer g_class, gpointer class_data)
void QGstSubtitleSink::base_init(gpointer g_class)
{
- static GstStaticPadTemplate sink_pad_template = GST_STATIC_PAD_TEMPLATE(
- "sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS("ANY"));
+ static GstStaticPadTemplate sink_pad_template =
+ GST_STATIC_PAD_TEMPLATE("sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS("ANY"));
gst_element_class_add_pad_template(
GST_ELEMENT_CLASS(g_class), gst_static_pad_template_get(&sink_pad_template));
}
-void QGstSubtitleSink::instance_init(GTypeInstance *instance, gpointer g_class)
+void QGstSubtitleSink::instance_init(GTypeInstance *instance, gpointer /*g_class*/)
{
- Q_UNUSED(g_class);
ST_SINK(instance);
- Q_ASSERT(gst_current_sink);
- sink->sink = gst_current_sink;
- gst_current_sink = nullptr;
+ Q_ASSERT(gst_current_observer);
+ sink->observer = gst_current_observer;
+ gst_current_observer = nullptr;
}
void QGstSubtitleSink::finalize(GObject *object)
@@ -132,8 +138,8 @@ GstFlowReturn QGstSubtitleSink::wait_event(GstBaseSink *base, GstEvent *event)
GstFlowReturn retval = gst_sink_parent_class->wait_event(base, event);
ST_SINK(base);
if (event->type == GST_EVENT_GAP) {
-// qDebug() << "gap, clearing subtitle";
- sink->sink->setSubtitleText(QString());
+ // qDebug() << "gap, clearing subtitle";
+ sink->observer->updateSubtitle(QString());
}
return retval;
}
@@ -148,7 +154,7 @@ GstFlowReturn QGstSubtitleSink::render(GstBaseSink *base, GstBuffer *buffer)
subtitle = QString::fromUtf8(reinterpret_cast<const char *>(info.data));
gst_memory_unmap(mem, &info);
// qDebug() << "render" << buffer << subtitle;
- sink->sink->setSubtitleText(subtitle);
+ sink->observer->updateSubtitle(subtitle);
return GST_FLOW_OK;
}
diff --git a/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink_p.h b/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink_p.h
index 0f515cb99..1970ac48b 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink_p.h
@@ -17,24 +17,25 @@
#include <QtMultimedia/private/qtmultimediaglobal_p.h>
-#include <QtCore/qlist.h>
-#include <QtCore/qmutex.h>
-#include <QtCore/qqueue.h>
-#include <QtCore/qpointer.h>
-#include <QtCore/qwaitcondition.h>
+#include <QtCore/qstring.h>
#include <common/qgst_p.h>
#include <gst/base/gstbasesink.h>
QT_BEGIN_NAMESPACE
-class QGstreamerVideoSink;
+class QAbstractSubtitleObserver
+{
+public:
+ virtual ~QAbstractSubtitleObserver() = default;
+ virtual void updateSubtitle(QString) = 0;
+};
class QGstSubtitleSink
{
public:
GstBaseSink parent{};
- static QGstSubtitleSink *createSink(QGstreamerVideoSink *sink);
+ static QGstElement createSink(QAbstractSubtitleObserver *observer);
private:
static GType get_type();
@@ -55,14 +56,7 @@ private:
static GstFlowReturn render(GstBaseSink *sink, GstBuffer *buffer);
private:
- QGstreamerVideoSink *sink = nullptr;
-};
-
-
-class QGstSubtitleSinkClass
-{
-public:
- GstBaseSinkClass parent_class;
+ QAbstractSubtitleObserver *observer = nullptr;
};
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstutils.cpp b/src/plugins/multimedia/gstreamer/common/qgstutils.cpp
index 40fb4b6f7..8ec2bde3c 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstutils.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstutils.cpp
@@ -56,7 +56,7 @@ QAudioFormat QGstUtils::audioFormatForSample(GstSample *sample)
QAudioFormat QGstUtils::audioFormatForCaps(const QGstCaps &caps)
{
QAudioFormat format;
- QGstStructure s = caps.at(0);
+ QGstStructureView s = caps.at(0);
if (s.name() != "audio/x-raw")
return format;
diff --git a/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp
index 6552786bb..df3fb3d69 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp
@@ -54,10 +54,10 @@ QT_BEGIN_NAMESPACE
QGstVideoBuffer::QGstVideoBuffer(QGstBufferHandle buffer, const GstVideoInfo &info,
QGstreamerVideoSink *sink, const QVideoFrameFormat &frameFormat,
QGstCaps::MemoryFormat format)
- : QAbstractVideoBuffer((sink && sink->rhi() && format != QGstCaps::CpuMemory)
- ? QVideoFrame::RhiTextureHandle
- : QVideoFrame::NoHandle,
- sink ? sink->rhi() : nullptr),
+ : QHwVideoBuffer((sink && sink->rhi() && format != QGstCaps::CpuMemory)
+ ? QVideoFrame::RhiTextureHandle
+ : QVideoFrame::NoHandle,
+ sink ? sink->rhi() : nullptr),
memoryFormat(format),
m_frameFormat(frameFormat),
m_rhi(sink ? sink->rhi() : nullptr),
@@ -76,40 +76,35 @@ QGstVideoBuffer::QGstVideoBuffer(QGstBufferHandle buffer, const GstVideoInfo &in
QGstVideoBuffer::~QGstVideoBuffer()
{
- unmap();
+ Q_ASSERT(m_mode == QtVideo::MapMode::NotMapped);
}
-
-QVideoFrame::MapMode QGstVideoBuffer::mapMode() const
-{
- return m_mode;
-}
-
-QAbstractVideoBuffer::MapData QGstVideoBuffer::map(QVideoFrame::MapMode mode)
+QAbstractVideoBuffer::MapData QGstVideoBuffer::map(QtVideo::MapMode mode)
{
- const GstMapFlags flags = GstMapFlags(((mode & QVideoFrame::ReadOnly) ? GST_MAP_READ : 0)
- | ((mode & QVideoFrame::WriteOnly) ? GST_MAP_WRITE : 0));
+ const GstMapFlags flags = GstMapFlags(
+ ((mode & QtVideo::MapMode::ReadOnly ) == QtVideo::MapMode::NotMapped ? 0 : GST_MAP_READ)
+ | ((mode & QtVideo::MapMode::WriteOnly) == QtVideo::MapMode::NotMapped ? 0 : GST_MAP_WRITE));
MapData mapData;
- if (mode == QVideoFrame::NotMapped || m_mode != QVideoFrame::NotMapped)
+ if (mode == QtVideo::MapMode::NotMapped || m_mode != QtVideo::MapMode::NotMapped)
return mapData;
if (m_videoInfo.finfo->n_planes == 0) { // Encoded
if (gst_buffer_map(m_buffer.get(), &m_frame.map[0], flags)) {
- mapData.nPlanes = 1;
+ mapData.planeCount = 1;
mapData.bytesPerLine[0] = -1;
- mapData.size[0] = m_frame.map[0].size;
+ mapData.dataSize[0] = m_frame.map[0].size;
mapData.data[0] = static_cast<uchar *>(m_frame.map[0].data);
m_mode = mode;
}
} else if (gst_video_frame_map(&m_frame, &m_videoInfo, m_buffer.get(), flags)) {
- mapData.nPlanes = GST_VIDEO_FRAME_N_PLANES(&m_frame);
+ mapData.planeCount = GST_VIDEO_FRAME_N_PLANES(&m_frame);
for (guint i = 0; i < GST_VIDEO_FRAME_N_PLANES(&m_frame); ++i) {
mapData.bytesPerLine[i] = GST_VIDEO_FRAME_PLANE_STRIDE(&m_frame, i);
mapData.data[i] = static_cast<uchar *>(GST_VIDEO_FRAME_PLANE_DATA(&m_frame, i));
- mapData.size[i] = mapData.bytesPerLine[i]*GST_VIDEO_FRAME_COMP_HEIGHT(&m_frame, i);
+ mapData.dataSize[i] = mapData.bytesPerLine[i]*GST_VIDEO_FRAME_COMP_HEIGHT(&m_frame, i);
}
m_mode = mode;
@@ -119,13 +114,13 @@ QAbstractVideoBuffer::MapData QGstVideoBuffer::map(QVideoFrame::MapMode mode)
void QGstVideoBuffer::unmap()
{
- if (m_mode != QVideoFrame::NotMapped) {
+ if (m_mode != QtVideo::MapMode::NotMapped) {
if (m_videoInfo.finfo->n_planes == 0)
gst_buffer_unmap(m_buffer.get(), &m_frame.map[0]);
else
gst_video_frame_unmap(&m_frame);
}
- m_mode = QVideoFrame::NotMapped;
+ m_mode = QtVideo::MapMode::NotMapped;
}
#if QT_CONFIG(gstreamer_gl) && QT_CONFIG(linux_dmabuf)
diff --git a/src/plugins/multimedia/gstreamer/common/qgstvideobuffer_p.h b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer_p.h
index 151927d3d..573a4662c 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstvideobuffer_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer_p.h
@@ -15,8 +15,7 @@
// We mean it.
//
-#include <private/qtmultimediaglobal_p.h>
-#include <private/qabstractvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
#include <QtCore/qvariant.h>
#include <common/qgst_p.h>
@@ -27,16 +26,14 @@ class QVideoFrameFormat;
class QGstreamerVideoSink;
class QOpenGLContext;
-class QGstVideoBuffer final : public QAbstractVideoBuffer
+class QGstVideoBuffer final : public QHwVideoBuffer
{
public:
QGstVideoBuffer(QGstBufferHandle buffer, const GstVideoInfo &info, QGstreamerVideoSink *sink,
const QVideoFrameFormat &frameFormat, QGstCaps::MemoryFormat format);
~QGstVideoBuffer();
- QVideoFrame::MapMode mapMode() const override;
-
- MapData map(QVideoFrame::MapMode mode) override;
+ MapData map(QtVideo::MapMode mode) override;
void unmap() override;
std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *) override;
@@ -48,7 +45,7 @@ private:
mutable GstVideoInfo m_videoInfo;
mutable GstVideoFrame m_frame{};
const QGstBufferHandle m_buffer;
- QVideoFrame::MapMode m_mode = QVideoFrame::NotMapped;
+ QtVideo::MapMode m_mode = QtVideo::MapMode::NotMapped;
Qt::HANDLE eglDisplay = nullptr;
QFunctionPointer eglImageTargetTexture2D = nullptr;
};
diff --git a/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp b/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp
index 4e9619b11..f9c936ea6 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp
+++ b/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp
@@ -20,6 +20,8 @@
#include <common/qgst_debug_p.h>
#include <common/qgstutils_p.h>
+#include <private/qvideoframe_p.h>
+
#include <gst/video/video.h>
#include <gst/video/gstvideometa.h>
@@ -41,6 +43,13 @@ QT_BEGIN_NAMESPACE
QGstVideoRenderer::QGstVideoRenderer(QGstreamerVideoSink *sink)
: m_sink(sink), m_surfaceCaps(createSurfaceCaps(sink))
{
+ QObject::connect(
+ sink, &QGstreamerVideoSink::aboutToBeDestroyed, this,
+ [this] {
+ QMutexLocker locker(&m_sinkMutex);
+ m_sink = nullptr;
+ },
+ Qt::DirectConnection);
}
QGstVideoRenderer::~QGstVideoRenderer() = default;
@@ -111,94 +120,104 @@ const QGstCaps &QGstVideoRenderer::caps()
bool QGstVideoRenderer::start(const QGstCaps& caps)
{
qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::start" << caps;
- QMutexLocker locker(&m_mutex);
-
- m_frameMirrored = false;
- m_frameRotationAngle = QtVideo::Rotation::None;
-
- if (m_active) {
- m_flush = true;
- m_stop = true;
- }
-
- m_startCaps = caps;
-
- /*
- Waiting for start() to be invoked in the main thread may block
- if gstreamer blocks the main thread until this call is finished.
- This situation is rare and usually caused by setState(Null)
- while pipeline is being prerolled.
- The proper solution to this involves controlling gstreamer pipeline from
- other thread than video surface.
-
- Currently start() fails if wait() timed out.
- */
- if (!waitForAsyncEvent(&locker, &m_setupCondition, 1000) && !m_startCaps.isNull()) {
- qWarning() << "Failed to start video surface due to main thread blocked.";
- m_startCaps = {};
+ {
+ m_frameRotationAngle = QtVideo::Rotation::None;
+ auto optionalFormatAndVideoInfo = caps.formatAndVideoInfo();
+ if (optionalFormatAndVideoInfo) {
+ std::tie(m_format, m_videoInfo) = std::move(*optionalFormatAndVideoInfo);
+ } else {
+ m_format = {};
+ m_videoInfo = {};
+ }
+ m_memoryFormat = caps.memoryFormat();
}
- return m_active;
+ return true;
}
void QGstVideoRenderer::stop()
{
- QMutexLocker locker(&m_mutex);
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::stop";
- if (!m_active)
+ QMetaObject::invokeMethod(this, [this] {
+ m_currentState.buffer = {};
+ m_sink->setVideoFrame(QVideoFrame{});
return;
-
- m_flush = true;
- m_stop = true;
-
- m_startCaps = {};
-
- waitForAsyncEvent(&locker, &m_setupCondition, 500);
+ });
}
void QGstVideoRenderer::unlock()
{
- QMutexLocker locker(&m_mutex);
-
- m_setupCondition.wakeAll();
- m_renderCondition.wakeAll();
-}
-
-bool QGstVideoRenderer::proposeAllocation(GstQuery *query)
-{
- Q_UNUSED(query);
- QMutexLocker locker(&m_mutex);
- return m_active;
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::unlock";
}
-void QGstVideoRenderer::flush()
+bool QGstVideoRenderer::proposeAllocation(GstQuery *)
{
- QMutexLocker locker(&m_mutex);
-
- m_flush = true;
- m_renderBuffer = {};
- m_renderCondition.wakeAll();
-
- notify();
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::proposeAllocation";
+ return true;
}
GstFlowReturn QGstVideoRenderer::render(GstBuffer *buffer)
{
- QMutexLocker locker(&m_mutex);
qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::render";
- m_renderReturn = GST_FLOW_OK;
- m_renderBuffer = QGstBufferHandle{
- buffer,
- QGstBufferHandle::NeedsRef,
+ GstVideoCropMeta *meta = gst_buffer_get_video_crop_meta(buffer);
+ if (meta) {
+ QRect vp(meta->x, meta->y, meta->width, meta->height);
+ if (m_format.viewport() != vp) {
+ qCDebug(qLcGstVideoRenderer)
+ << Q_FUNC_INFO << " Update viewport on Metadata: [" << meta->height << "x"
+ << meta->width << " | " << meta->x << "x" << meta->y << "]";
+ // Update viewport if data is not the same
+ m_format.setViewport(vp);
+ }
+ }
+
+ RenderBufferState state{
+ .buffer = QGstBufferHandle{ buffer, QGstBufferHandle::NeedsRef },
+ .format = m_format,
+ .memoryFormat = m_memoryFormat,
+ .mirrored = m_frameMirrored,
+ .rotationAngle = m_frameRotationAngle,
};
- waitForAsyncEvent(&locker, &m_renderCondition, 300);
+ qCDebug(qLcGstVideoRenderer) << " sending video frame";
+
+ QMetaObject::invokeMethod(this, [this, state = std::move(state)]() mutable {
+ if (state == m_currentState) {
+ // same buffer received twice
+ if (!m_sink || !m_sink->inStoppedState())
+ return;
- m_renderBuffer = {};
+ qCDebug(qLcGstVideoRenderer) << " showing empty video frame";
+ m_currentVideoFrame = {};
+ m_sink->setVideoFrame(m_currentVideoFrame);
+ m_currentState = {};
+ return;
+ }
+
+ auto videoBuffer = std::make_unique<QGstVideoBuffer>(state.buffer, m_videoInfo, m_sink,
+ state.format, state.memoryFormat);
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(videoBuffer), state.format);
+ QGstUtils::setFrameTimeStampsFromBuffer(&frame, state.buffer.get());
+ frame.setMirrored(state.mirrored);
+ frame.setRotation(state.rotationAngle);
+ m_currentVideoFrame = std::move(frame);
+ m_currentState = std::move(state);
+
+ if (!m_sink)
+ return;
+
+ if (m_sink->inStoppedState()) {
+ qCDebug(qLcGstVideoRenderer) << " showing empty video frame";
+ m_currentVideoFrame = {};
+ }
- return m_renderReturn;
+ m_sink->setVideoFrame(m_currentVideoFrame);
+ });
+
+ return GST_FLOW_OK;
}
bool QGstVideoRenderer::query(GstQuery *query)
@@ -211,6 +230,10 @@ bool QGstVideoRenderer::query(GstQuery *query)
if (strcmp(type, "gst.gl.local_context") != 0)
return false;
+ QMutexLocker locker(&m_sinkMutex);
+ if (!m_sink)
+ return false;
+
auto *gstGlContext = m_sink->gstGlLocalContext();
if (!gstGlContext)
return false;
@@ -269,7 +292,6 @@ void QGstVideoRenderer::gstEventHandleTag(GstEvent *event)
rotationAngle = (180 + atoi(value.get() + flipRotateLen)) % 360;
}
- QMutexLocker locker(&m_mutex);
m_frameMirrored = mirrored;
switch (rotationAngle) {
case 0:
@@ -294,139 +316,6 @@ void QGstVideoRenderer::gstEventHandleEOS(GstEvent *)
stop();
}
-bool QGstVideoRenderer::event(QEvent *event)
-{
- if (event->type() == QEvent::UpdateRequest) {
- QMutexLocker locker(&m_mutex);
-
- if (m_notified) {
- while (handleEvent(&locker)) {}
- m_notified = false;
- }
- return true;
- }
-
- return QObject::event(event);
-}
-
-bool QGstVideoRenderer::handleEvent(QMutexLocker<QMutex> *locker)
-{
- if (m_flush) {
- m_flush = false;
- if (m_active) {
- locker->unlock();
-
- if (m_sink && !m_flushed)
- m_sink->setVideoFrame(QVideoFrame());
- m_flushed = true;
- locker->relock();
- }
- } else if (m_stop) {
- m_stop = false;
-
- if (m_active) {
- m_active = false;
- m_flushed = true;
- }
- } else if (!m_startCaps.isNull()) {
- Q_ASSERT(!m_active);
-
- auto startCaps = m_startCaps;
- m_startCaps = {};
-
- if (m_sink) {
- locker->unlock();
-
- m_flushed = true;
- auto optionalFormatAndVideoInfo = startCaps.formatAndVideoInfo();
- if (optionalFormatAndVideoInfo) {
- std::tie(m_format, m_videoInfo) = std::move(*optionalFormatAndVideoInfo);
- } else {
- m_format = {};
- m_videoInfo = {};
- }
-
- memoryFormat = startCaps.memoryFormat();
-
- locker->relock();
- m_active = m_format.isValid();
- } else if (m_active) {
- m_active = false;
- m_flushed = true;
- }
-
- } else if (m_renderBuffer) {
- QGstBufferHandle buffer = std::move(m_renderBuffer);
- m_renderReturn = GST_FLOW_ERROR;
-
- qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::handleEvent(renderBuffer)" << m_active << m_sink;
- if (m_active && m_sink) {
-
- locker->unlock();
-
- m_flushed = false;
-
- GstVideoCropMeta *meta = gst_buffer_get_video_crop_meta(buffer.get());
- if (meta) {
- QRect vp(meta->x, meta->y, meta->width, meta->height);
- if (m_format.viewport() != vp) {
- qCDebug(qLcGstVideoRenderer) << Q_FUNC_INFO << " Update viewport on Metadata: [" << meta->height << "x" << meta->width << " | " << meta->x << "x" << meta->y << "]";
- // Update viewport if data is not the same
- m_format.setViewport(vp);
- }
- }
-
- if (m_sink->inStoppedState()) {
- qCDebug(qLcGstVideoRenderer) << " sending empty video frame";
- m_sink->setVideoFrame(QVideoFrame());
- } else {
- QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer, m_videoInfo, m_sink, m_format, memoryFormat);
- QVideoFrame frame(videoBuffer, m_format);
- QGstUtils::setFrameTimeStampsFromBuffer(&frame, buffer.get());
- frame.setMirrored(m_frameMirrored);
- frame.setRotation(m_frameRotationAngle);
-
- qCDebug(qLcGstVideoRenderer) << " sending video frame";
- m_sink->setVideoFrame(frame);
- }
-
- locker->relock();
-
- m_renderReturn = GST_FLOW_OK;
- }
-
- m_renderCondition.wakeAll();
- } else {
- m_setupCondition.wakeAll();
-
- return false;
- }
- return true;
-}
-
-void QGstVideoRenderer::notify()
-{
- if (!m_notified) {
- m_notified = true;
- QCoreApplication::postEvent(this, new QEvent(QEvent::UpdateRequest));
- }
-}
-
-bool QGstVideoRenderer::waitForAsyncEvent(
- QMutexLocker<QMutex> *locker, QWaitCondition *condition, unsigned long time)
-{
- if (QThread::currentThread() == thread()) {
- while (handleEvent(locker)) {}
- m_notified = false;
-
- return true;
- }
-
- notify();
-
- return condition->wait(&m_mutex, time);
-}
-
static GstVideoSinkClass *gvrs_sink_parent_class;
static thread_local QGstreamerVideoSink *gvrs_current_sink;
@@ -438,8 +327,6 @@ QGstVideoRendererSink *QGstVideoRendererSink::createSink(QGstreamerVideoSink *si
QGstVideoRendererSink *gstSink = reinterpret_cast<QGstVideoRendererSink *>(
g_object_new(QGstVideoRendererSink::get_type(), nullptr));
- g_signal_connect(G_OBJECT(gstSink), "notify::show-preroll-frame", G_CALLBACK(handleShowPrerollChange), gstSink);
-
return gstSink;
}
@@ -535,41 +422,9 @@ void QGstVideoRendererSink::finalize(GObject *object)
G_OBJECT_CLASS(gvrs_sink_parent_class)->finalize(object);
}
-void QGstVideoRendererSink::handleShowPrerollChange(GObject *o, GParamSpec *p, gpointer d)
-{
- Q_UNUSED(o);
- Q_UNUSED(p);
- QGstVideoRendererSink *sink = reinterpret_cast<QGstVideoRendererSink *>(d);
-
- gboolean showPrerollFrame = true; // "show-preroll-frame" property is true by default
- g_object_get(G_OBJECT(sink), "show-preroll-frame", &showPrerollFrame, nullptr);
-
- if (!showPrerollFrame) {
- GstState state = GST_STATE_VOID_PENDING;
- GstClockTime timeout = 10000000; // 10 ms
- gst_element_get_state(GST_ELEMENT(sink), &state, nullptr, timeout);
- // show-preroll-frame being set to 'false' while in GST_STATE_PAUSED means
- // the QMediaPlayer was stopped from the paused state.
- // We need to flush the current frame.
- if (state == GST_STATE_PAUSED)
- sink->renderer->flush();
- }
-}
-
GstStateChangeReturn QGstVideoRendererSink::change_state(
GstElement *element, GstStateChange transition)
{
- QGstVideoRendererSink *sink = reinterpret_cast<QGstVideoRendererSink *>(element);
-
- gboolean showPrerollFrame = true; // "show-preroll-frame" property is true by default
- g_object_get(G_OBJECT(element), "show-preroll-frame", &showPrerollFrame, nullptr);
-
- // If show-preroll-frame is 'false' when transitioning from GST_STATE_PLAYING to
- // GST_STATE_PAUSED, it means the QMediaPlayer was stopped.
- // We need to flush the current frame.
- if (transition == GST_STATE_CHANGE_PLAYING_TO_PAUSED && !showPrerollFrame)
- sink->renderer->flush();
-
return GST_ELEMENT_CLASS(gvrs_sink_parent_class)->change_state(element, transition);
}
diff --git a/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink_p.h b/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink_p.h
index 99d1b0ac8..d9e3db462 100644
--- a/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink_p.h
+++ b/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink_p.h
@@ -15,7 +15,11 @@
// We mean it.
//
+#include <QtMultimedia/qvideoframeformat.h>
+#include <QtMultimedia/qvideoframe.h>
#include <QtMultimedia/private/qtmultimediaglobal_p.h>
+#include <QtCore/qmutex.h>
+
#include <gst/video/gstvideosink.h>
#include <gst/video/video.h>
@@ -30,66 +34,61 @@
#include <common/qgst_p.h>
QT_BEGIN_NAMESPACE
-class QVideoSink;
class QGstVideoRenderer : public QObject
{
public:
- explicit QGstVideoRenderer(QGstreamerVideoSink *sink);
+ explicit QGstVideoRenderer(QGstreamerVideoSink *);
~QGstVideoRenderer();
const QGstCaps &caps();
- bool start(const QGstCaps& caps);
+ bool start(const QGstCaps &);
void stop();
void unlock();
- bool proposeAllocation(GstQuery *query);
-
- void flush();
-
- GstFlowReturn render(GstBuffer *buffer);
-
- bool event(QEvent *event) override;
- bool query(GstQuery *query);
- void gstEvent(GstEvent *event);
-
-private slots:
- bool handleEvent(QMutexLocker<QMutex> *locker);
+ bool proposeAllocation(GstQuery *);
+ GstFlowReturn render(GstBuffer *);
+ bool query(GstQuery *);
+ void gstEvent(GstEvent *);
private:
void notify();
- bool waitForAsyncEvent(QMutexLocker<QMutex> *locker, QWaitCondition *condition, unsigned long time);
static QGstCaps createSurfaceCaps(QGstreamerVideoSink *);
void gstEventHandleTag(GstEvent *);
void gstEventHandleEOS(GstEvent *);
- QPointer<QGstreamerVideoSink> m_sink;
-
- QMutex m_mutex;
- QWaitCondition m_setupCondition;
- QWaitCondition m_renderCondition;
-
- // --- accessed from multiple threads, need to hold mutex to access
- GstFlowReturn m_renderReturn = GST_FLOW_OK;
- bool m_active = false;
+ QMutex m_sinkMutex;
+ QGstreamerVideoSink *m_sink = nullptr; // written only from qt thread. so only readers on
+ // worker threads need to acquire the lock
+ // --- only accessed from gstreamer thread
const QGstCaps m_surfaceCaps;
-
- QGstCaps m_startCaps;
- QGstBufferHandle m_renderBuffer;
-
- bool m_notified = false;
- bool m_stop = false;
- bool m_flush = false;
+ QVideoFrameFormat m_format;
+ GstVideoInfo m_videoInfo{};
+ QGstCaps::MemoryFormat m_memoryFormat = QGstCaps::CpuMemory;
bool m_frameMirrored = false;
QtVideo::Rotation m_frameRotationAngle = QtVideo::Rotation::None;
- // --- only accessed from one thread
- QVideoFrameFormat m_format;
- GstVideoInfo m_videoInfo{};
- bool m_flushed = true;
- QGstCaps::MemoryFormat memoryFormat = QGstCaps::CpuMemory;
+ // --- only accessed from qt thread
+ QVideoFrame m_currentVideoFrame;
+
+ struct RenderBufferState
+ {
+ QGstBufferHandle buffer;
+ QVideoFrameFormat format;
+ QGstCaps::MemoryFormat memoryFormat;
+ bool mirrored;
+ QtVideo::Rotation rotationAngle;
+
+ bool operator==(const RenderBufferState &rhs) const
+ {
+ return std::tie(buffer, format, memoryFormat, mirrored, rotationAngle)
+ == std::tie(rhs.buffer, rhs.format, rhs.memoryFormat, rhs.mirrored,
+ rhs.rotationAngle);
+ }
+ };
+ RenderBufferState m_currentState;
};
class QGstVideoRendererSink
@@ -108,8 +107,6 @@ private:
static void finalize(GObject *object);
- static void handleShowPrerollChange(GObject *o, GParamSpec *p, gpointer d);
-
static GstStateChangeReturn change_state(GstElement *element, GstStateChange transition);
static GstCaps *get_caps(GstBaseSink *sink, GstCaps *filter);
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera.cpp b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera.cpp
index 8d3cd6baf..c54e8b74b 100644
--- a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera.cpp
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera.cpp
@@ -5,6 +5,7 @@
#include <QtMultimedia/qcameradevice.h>
#include <QtMultimedia/qmediacapturesession.h>
+#include <QtMultimedia/private/qcameradevice_p.h>
#include <QtCore/qdebug.h>
#include <common/qgst_debug_p.h>
@@ -21,36 +22,35 @@ QT_BEGIN_NAMESPACE
QMaybe<QPlatformCamera *> QGstreamerCamera::create(QCamera *camera)
{
- QGstElement videotestsrc = QGstElement::createFromFactory("videotestsrc");
- if (!videotestsrc)
- return errorMessageCannotFindElement("videotestsrc");
+ static const auto error = qGstErrorMessageIfElementsNotAvailable(
+ "videotestsrc", "capsfilter", "videoconvert", "videoscale", "identity");
+ if (error)
+ return *error;
- QGstElement capsFilter = QGstElement::createFromFactory("capsfilter", "videoCapsFilter");
- if (!capsFilter)
- return errorMessageCannotFindElement("capsfilter");
-
- QGstElement videoconvert = QGstElement::createFromFactory("videoconvert", "videoConvert");
- if (!videoconvert)
- return errorMessageCannotFindElement("videoconvert");
-
- QGstElement videoscale = QGstElement::createFromFactory("videoscale", "videoScale");
- if (!videoscale)
- return errorMessageCannotFindElement("videoscale");
-
- return new QGstreamerCamera(videotestsrc, capsFilter, videoconvert, videoscale, camera);
+ return new QGstreamerCamera(camera);
}
-QGstreamerCamera::QGstreamerCamera(QGstElement videotestsrc, QGstElement capsFilter,
- QGstElement videoconvert, QGstElement videoscale,
- QCamera *camera)
- : QPlatformCamera(camera),
- gstCamera(std::move(videotestsrc)),
- gstCapsFilter(std::move(capsFilter)),
- gstVideoConvert(std::move(videoconvert)),
- gstVideoScale(std::move(videoscale))
+QGstreamerCamera::QGstreamerCamera(QCamera *camera)
+ : QGstreamerCameraBase(camera),
+ gstCameraBin{
+ QGstBin::create("camerabin"),
+ },
+ gstCamera{
+ QGstElement::createFromFactory("videotestsrc"),
+ },
+ gstCapsFilter{
+ QGstElement::createFromFactory("capsfilter", "videoCapsFilter"),
+ },
+ gstDecode{
+ QGstElement::createFromFactory("identity"),
+ },
+ gstVideoConvert{
+ QGstElement::createFromFactory("videoconvert", "videoConvert"),
+ },
+ gstVideoScale{
+ QGstElement::createFromFactory("videoscale", "videoScale"),
+ }
{
- gstDecode = QGstElement::createFromFactory("identity");
- gstCameraBin = QGstBin::create("camerabin");
gstCameraBin.add(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert, gstVideoScale);
qLinkGstElements(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert, gstVideoScale);
gstCameraBin.addGhostPad(gstVideoScale, "src");
@@ -80,6 +80,8 @@ void QGstreamerCamera::setActive(bool active)
void QGstreamerCamera::setCamera(const QCameraDevice &camera)
{
+ using namespace Qt::Literals;
+
if (m_cameraDevice == camera)
return;
@@ -90,12 +92,24 @@ void QGstreamerCamera::setCamera(const QCameraDevice &camera)
gstNewCamera = QGstElement::createFromFactory("videotestsrc");
} else {
auto *integration = static_cast<QGstreamerIntegration *>(QGstreamerIntegration::instance());
- auto *device = integration->videoDevice(camera.id());
+ GstDevice *device = integration->videoDevice(camera.id());
+
+ if (!device) {
+ updateError(QCamera::Error::CameraError,
+ u"Failed to create GstDevice for camera: "_s
+ + QString::fromUtf8(camera.id()));
+ return;
+ }
+
gstNewCamera = QGstElement::createFromDevice(device, "camerasrc");
- if (QGstStructure properties = gst_device_get_properties(device); !properties.isNull()) {
- if (properties.name() == "v4l2deviceprovider")
- m_v4l2DevicePath = QString::fromUtf8(properties["device.path"].toString());
- properties.free();
+ QUniqueGstStructureHandle properties{
+ gst_device_get_properties(device),
+ };
+
+ if (properties) {
+ QGstStructureView propertiesView{ properties };
+ if (propertiesView.name() == "v4l2deviceprovider")
+ m_v4l2DevicePath = QString::fromUtf8(propertiesView["device.path"].toString());
}
}
@@ -105,6 +119,8 @@ void QGstreamerCamera::setCamera(const QCameraDevice &camera)
f.pixelFormat() == QVideoFrameFormat::Format_Jpeg ? "jpegdec" : "identity");
QGstPipeline::modifyPipelineWhileNotRunning(gstCamera.getPipeline(), [&] {
+ gstCamera.setStateSync(GST_STATE_READY); // stop camera, as it may have active tasks
+
qUnlinkGstElements(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert);
gstCameraBin.stopAndRemoveElements(gstCamera, gstDecode);
@@ -137,7 +153,7 @@ bool QGstreamerCamera::setCameraFormat(const QCameraFormat &format)
f.pixelFormat() == QVideoFrameFormat::Format_Jpeg ? "jpegdec" : "identity");
QGstPipeline::modifyPipelineWhileNotRunning(gstCamera.getPipeline(), [&] {
- newGstDecode.syncStateWithParent();
+ gstCamera.setStateSync(GST_STATE_READY); // stop camera, as it may have active tasks
qUnlinkGstElements(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert);
gstCameraBin.stopAndRemoveElements(gstDecode);
@@ -703,8 +719,53 @@ int QGstreamerCamera::getV4L2Parameter(quint32 id) const
});
}
+QGstreamerCustomCamera::QGstreamerCustomCamera(QCamera *camera)
+ : QGstreamerCameraBase{
+ camera,
+ },
+ m_userProvidedGstElement{
+ false,
+ }
+{
+}
+
+QGstreamerCustomCamera::QGstreamerCustomCamera(QCamera *camera, QGstElement element)
+ : QGstreamerCameraBase{
+ camera,
+ },
+ gstCamera{
+ std::move(element),
+ },
+ m_userProvidedGstElement{
+ true,
+ }
+{
+}
+
+void QGstreamerCustomCamera::setCamera(const QCameraDevice &device)
+{
+ if (m_userProvidedGstElement)
+ return;
+
+ gstCamera = QGstBin::createFromPipelineDescription(device.id(), /*name=*/nullptr,
+ /* ghostUnlinkedPads=*/true);
+}
+
+bool QGstreamerCustomCamera::isActive() const
+{
+ return m_active;
+}
+
+void QGstreamerCustomCamera::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+
+ m_active = active;
+
+ emit activeChanged(active);
+}
+
#endif
QT_END_NAMESPACE
-
-#include "moc_qgstreamercamera_p.cpp"
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera_p.h b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera_p.h
index 74f12f918..f43c01f34 100644
--- a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera_p.h
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera_p.h
@@ -24,9 +24,16 @@
QT_BEGIN_NAMESPACE
-class QGstreamerCamera : public QPlatformCamera
+class QGstreamerCameraBase : public QPlatformCamera
+{
+public:
+ using QPlatformCamera::QPlatformCamera;
+
+ virtual QGstElement gstElement() const = 0;
+};
+
+class QGstreamerCamera : public QGstreamerCameraBase
{
- Q_OBJECT
public:
static QMaybe<QPlatformCamera *> create(QCamera *camera);
@@ -38,7 +45,7 @@ public:
void setCamera(const QCameraDevice &camera) override;
bool setCameraFormat(const QCameraFormat &format) override;
- QGstElement gstElement() const { return gstCameraBin; }
+ QGstElement gstElement() const override { return gstCameraBin; }
#if QT_CONFIG(gstreamer_photography)
GstPhotography *photography() const;
#endif
@@ -63,8 +70,7 @@ public:
void setColorTemperature(int temperature) override;
private:
- QGstreamerCamera(QGstElement videotestsrc, QGstElement capsFilter, QGstElement videoconvert,
- QGstElement videoscale, QCamera *camera);
+ QGstreamerCamera(QCamera *camera);
void updateCameraProperties();
@@ -118,12 +124,29 @@ private:
QGstElement gstDecode;
QGstElement gstVideoConvert;
QGstElement gstVideoScale;
- QGstPipeline gstPipeline;
bool m_active = false;
QString m_v4l2DevicePath;
};
+class QGstreamerCustomCamera : public QGstreamerCameraBase
+{
+public:
+ explicit QGstreamerCustomCamera(QCamera *);
+ explicit QGstreamerCustomCamera(QCamera *, QGstElement element);
+
+ QGstElement gstElement() const override { return gstCamera; }
+ void setCamera(const QCameraDevice &) override;
+
+ bool isActive() const override;
+ void setActive(bool) override;
+
+private:
+ QGstElement gstCamera;
+ bool m_active{};
+ const bool m_userProvidedGstElement;
+};
+
QT_END_NAMESPACE
#endif // QGSTREAMERCAMERACONTROL_H
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture.cpp b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture.cpp
index bb4990603..9c21dc083 100644
--- a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture.cpp
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture.cpp
@@ -3,13 +3,15 @@
#include "qgstreamerimagecapture_p.h"
-#include <QtMultimedia/private/qplatformcamera_p.h>
-#include <QtMultimedia/private/qplatformimagecapture_p.h>
#include <QtMultimedia/qvideoframeformat.h>
#include <QtMultimedia/private/qmediastoragelocation_p.h>
+#include <QtMultimedia/private/qplatformcamera_p.h>
+#include <QtMultimedia/private/qplatformimagecapture_p.h>
+#include <QtMultimedia/private/qvideoframe_p.h>
#include <QtCore/qdebug.h>
#include <QtCore/qdir.h>
#include <QtCore/qstandardpaths.h>
+#include <QtCore/qcoreapplication.h>
#include <QtCore/qloggingcategory.h>
#include <common/qgstreamermetadata_p.h>
@@ -20,37 +22,96 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcImageCaptureGst, "qt.multimedia.imageCapture")
+namespace {
+Q_LOGGING_CATEGORY(qLcImageCaptureGst, "qt.multimedia.imageCapture")
-QMaybe<QPlatformImageCapture *> QGstreamerImageCapture::create(QImageCapture *parent)
+struct ThreadPoolSingleton
{
- QGstElement videoconvert =
- QGstElement::createFromFactory("videoconvert", "imageCaptureConvert");
- if (!videoconvert)
- return errorMessageCannotFindElement("videoconvert");
+ QObject m_context;
+ QMutex m_poolMutex;
+ QThreadPool *m_instance{};
+ bool m_appUnderDestruction = false;
+
+ QThreadPool *get(const QMutexLocker<QMutex> &)
+ {
+ if (m_instance)
+ return m_instance;
+ if (m_appUnderDestruction || !qApp)
+ return nullptr;
+
+ using namespace std::chrono;
+
+ m_instance = new QThreadPool(qApp);
+ m_instance->setMaxThreadCount(1); // 1 thread;
+ static constexpr auto expiryTimeout = minutes(5);
+ m_instance->setExpiryTimeout(round<milliseconds>(expiryTimeout).count());
+
+ QObject::connect(qApp, &QCoreApplication::aboutToQuit, &m_context, [&] {
+ // we need to make sure that thread-local QRhi is destroyed before the application to
+ // prevent QTBUG-124189
+ QMutexLocker guard(&m_poolMutex);
+ delete m_instance;
+ m_instance = {};
+ m_appUnderDestruction = true;
+ });
+
+ QObject::connect(qApp, &QCoreApplication::destroyed, &m_context, [&] {
+ m_appUnderDestruction = false;
+ });
+ return m_instance;
+ }
+
+ template <typename Functor>
+ QFuture<void> run(Functor &&f)
+ {
+ QMutexLocker guard(&m_poolMutex);
+ QThreadPool *pool = get(guard);
+ if (!pool)
+ return QFuture<void>{};
+
+ return QtConcurrent::run(pool, std::forward<Functor>(f));
+ }
+};
- QGstElement jpegenc = QGstElement::createFromFactory("jpegenc", "jpegEncoder");
- if (!jpegenc)
- return errorMessageCannotFindElement("jpegenc");
+ThreadPoolSingleton s_threadPoolSingleton;
- QGstElement jifmux = QGstElement::createFromFactory("jifmux", "jpegMuxer");
- if (!jifmux)
- return errorMessageCannotFindElement("jifmux");
+}; // namespace
- return new QGstreamerImageCapture(videoconvert, jpegenc, jifmux, parent);
+QMaybe<QPlatformImageCapture *> QGstreamerImageCapture::create(QImageCapture *parent)
+{
+ static const auto error = qGstErrorMessageIfElementsNotAvailable(
+ "queue", "capsfilter", "videoconvert", "jpegenc", "jifmux", "fakesink");
+ if (error)
+ return *error;
+
+ return new QGstreamerImageCapture(parent);
}
-QGstreamerImageCapture::QGstreamerImageCapture(QGstElement videoconvert, QGstElement jpegenc,
- QGstElement jifmux, QImageCapture *parent)
+QGstreamerImageCapture::QGstreamerImageCapture(QImageCapture *parent)
: QPlatformImageCapture(parent),
QGstreamerBufferProbe(ProbeBuffers),
- videoConvert(std::move(videoconvert)),
- encoder(std::move(jpegenc)),
- muxer(std::move(jifmux))
+ bin{
+ QGstBin::create("imageCaptureBin"),
+ },
+ queue{
+ QGstElement::createFromFactory("queue", "imageCaptureQueue"),
+ },
+ filter{
+ QGstElement::createFromFactory("capsfilter", "filter"),
+ },
+ videoConvert{
+ QGstElement::createFromFactory("videoconvert", "imageCaptureConvert"),
+ },
+ encoder{
+ QGstElement::createFromFactory("jpegenc", "jpegEncoder"),
+ },
+ muxer{
+ QGstElement::createFromFactory("jifmux", "jpegMuxer"),
+ },
+ sink{
+ QGstElement::createFromFactory("fakesink", "imageCaptureSink"),
+ }
{
- bin = QGstBin::create("imageCaptureBin");
-
- queue = QGstElement::createFromFactory("queue", "imageCaptureQueue");
// configures the queue to be fast, lightweight and non blocking
queue.set("leaky", 2 /*downstream*/);
queue.set("silent", true);
@@ -58,8 +119,6 @@ QGstreamerImageCapture::QGstreamerImageCapture(QGstElement videoconvert, QGstEle
queue.set("max-size-bytes", uint(0));
queue.set("max-size-time", quint64(0));
- sink = QGstElement::createFromFactory("fakesink", "imageCaptureSink");
- filter = QGstElement::createFromFactory("capsfilter", "filter");
// imageCaptureSink do not wait for a preroll buffer when going READY -> PAUSED
// as no buffer will arrive until capture() is called
sink.set("async", false);
@@ -77,10 +136,20 @@ QGstreamerImageCapture::QGstreamerImageCapture(QGstElement videoconvert, QGstEle
QGstreamerImageCapture::~QGstreamerImageCapture()
{
bin.setStateSync(GST_STATE_NULL);
+
+ // wait for pending futures
+ auto pendingFutures = [&] {
+ QMutexLocker guard(&m_mutex);
+ return std::move(m_pendingFutures);
+ }();
+
+ for (QFuture<void> &pendingImage : pendingFutures)
+ pendingImage.waitForFinished();
}
bool QGstreamerImageCapture::isReadyForCapture() const
{
+ QMutexLocker guard(&m_mutex);
return m_session && !passImage && cameraActive;
}
@@ -101,43 +170,40 @@ int QGstreamerImageCapture::doCapture(const QString &fileName)
{
qCDebug(qLcImageCaptureGst) << "do capture";
- // emit error in the next event loop,
- // so application can associate it with returned request id.
- auto invokeDeferred = [&](auto &&fn) {
- QMetaObject::invokeMethod(this, std::forward<decltype(fn)>(fn), Qt::QueuedConnection);
- };
-
- if (!m_session) {
- invokeDeferred([this] {
- emit error(-1, QImageCapture::ResourceError,
- QPlatformImageCapture::msgImageCaptureNotSet());
- });
-
- qCDebug(qLcImageCaptureGst) << "error 1";
- return -1;
- }
- if (!m_session->camera()) {
- invokeDeferred([this] {
- emit error(-1, QImageCapture::ResourceError, tr("No camera available."));
- });
-
- qCDebug(qLcImageCaptureGst) << "error 2";
- return -1;
- }
- if (passImage) {
- invokeDeferred([this] {
- emit error(-1, QImageCapture::NotReadyError,
- QPlatformImageCapture::msgCameraNotReady());
- });
-
- qCDebug(qLcImageCaptureGst) << "error 3";
- return -1;
+ {
+ QMutexLocker guard(&m_mutex);
+ if (!m_session) {
+ invokeDeferred([this] {
+ emit error(-1, QImageCapture::ResourceError,
+ QPlatformImageCapture::msgImageCaptureNotSet());
+ });
+
+ qCDebug(qLcImageCaptureGst) << "error 1";
+ return -1;
+ }
+ if (!m_session->camera()) {
+ invokeDeferred([this] {
+ emit error(-1, QImageCapture::ResourceError, tr("No camera available."));
+ });
+
+ qCDebug(qLcImageCaptureGst) << "error 2";
+ return -1;
+ }
+ if (passImage) {
+ invokeDeferred([this] {
+ emit error(-1, QImageCapture::NotReadyError,
+ QPlatformImageCapture::msgCameraNotReady());
+ });
+
+ qCDebug(qLcImageCaptureGst) << "error 3";
+ return -1;
+ }
+ m_lastId++;
+
+ pendingImages.enqueue({ m_lastId, fileName, QMediaMetaData{} });
+ // let one image pass the pipeline
+ passImage = true;
}
- m_lastId++;
-
- pendingImages.enqueue({m_lastId, fileName, QMediaMetaData{}});
- // let one image pass the pipeline
- passImage = true;
emit readyForCaptureChanged(false);
return m_lastId;
@@ -159,8 +225,17 @@ void QGstreamerImageCapture::setResolution(const QSize &resolution)
filter.set("caps", caps);
}
+// HACK: gcc-10 and earlier reject [=,this] when building with c++17
+#if __cplusplus >= 202002L
+# define EQ_THIS_CAPTURE =, this
+#else
+# define EQ_THIS_CAPTURE =
+#endif
+
bool QGstreamerImageCapture::probeBuffer(GstBuffer *buffer)
{
+ QMutexLocker guard(&m_mutex);
+
if (!passImage)
return false;
qCDebug(qLcImageCaptureGst) << "probe buffer";
@@ -172,7 +247,10 @@ bool QGstreamerImageCapture::probeBuffer(GstBuffer *buffer)
passImage = false;
- emit readyForCaptureChanged(isReadyForCapture());
+ bool ready = isReadyForCapture();
+ invokeDeferred([this, ready] {
+ emit readyForCaptureChanged(ready);
+ });
QGstCaps caps = bin.staticPad("sink").currentCaps();
auto memoryFormat = caps.memoryFormat();
@@ -183,41 +261,60 @@ bool QGstreamerImageCapture::probeBuffer(GstBuffer *buffer)
if (optionalFormatAndVideoInfo)
std::tie(fmt, previewInfo) = std::move(*optionalFormatAndVideoInfo);
- auto *sink = m_session->gstreamerVideoSink();
- auto *gstBuffer = new QGstVideoBuffer{
- std::move(bufferHandle), previewInfo, sink, fmt, memoryFormat,
- };
- QVideoFrame frame(gstBuffer, fmt);
- QImage img = frame.toImage();
- if (img.isNull()) {
- qDebug() << "received a null image";
- return true;
- }
-
- auto &imageData = pendingImages.head();
-
- emit imageExposed(imageData.id);
-
- qCDebug(qLcImageCaptureGst) << "Image available!";
- emit imageAvailable(imageData.id, frame);
+ int futureId = futureIDAllocator += 1;
- emit imageCaptured(imageData.id, img);
+ // ensure QVideoFrame::toImage is executed on a worker thread that is joined before the
+ // qApplication is destroyed
+ QFuture<void> future = s_threadPoolSingleton.run([EQ_THIS_CAPTURE]() mutable {
+ QMutexLocker guard(&m_mutex);
+ auto scopeExit = qScopeGuard([&] {
+ m_pendingFutures.remove(futureId);
+ });
- QMediaMetaData metaData = this->metaData();
- metaData.insert(QMediaMetaData::Date, QDateTime::currentDateTime());
- metaData.insert(QMediaMetaData::Resolution, frame.size());
- imageData.metaData = metaData;
+ if (!m_session) {
+ qDebug() << "QGstreamerImageCapture::probeBuffer: no session";
+ return;
+ }
+
+ auto *sink = m_session->gstreamerVideoSink();
+ auto gstBuffer = std::make_unique<QGstVideoBuffer>(std::move(bufferHandle), previewInfo,
+ sink, fmt, memoryFormat);
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(gstBuffer), fmt);
+ QImage img = frame.toImage();
+ if (img.isNull()) {
+ qDebug() << "received a null image";
+ return;
+ }
+
+ QMediaMetaData imageMetaData = metaData();
+ imageMetaData.insert(QMediaMetaData::Resolution, frame.size());
+ pendingImages.head().metaData = std::move(imageMetaData);
+ PendingImage pendingImage = pendingImages.head();
+
+ invokeDeferred([this, pendingImage = std::move(pendingImage), frame = std::move(frame),
+ img = std::move(img)]() mutable {
+ emit imageExposed(pendingImage.id);
+ qCDebug(qLcImageCaptureGst) << "Image available!";
+ emit imageAvailable(pendingImage.id, frame);
+ emit imageCaptured(pendingImage.id, img);
+ emit imageMetadataAvailable(pendingImage.id, pendingImage.metaData);
+ });
+ });
- // ensure taginject injects this metaData
- applyMetaDataToTagSetter(metaData, muxer);
+ if (!future.isValid()) // during qApplication shutdown the threadpool becomes unusable
+ return true;
- emit imageMetadataAvailable(imageData.id, metaData);
+ m_pendingFutures.insert(futureId, future);
return true;
}
+#undef EQ_THIS_CAPTURE
+
void QGstreamerImageCapture::setCaptureSession(QPlatformMediaCaptureSession *session)
{
+ QMutexLocker guard(&m_mutex);
QGstreamerMediaCapture *captureSession = static_cast<QGstreamerMediaCapture *>(session);
if (m_session == captureSession)
return;
@@ -243,6 +340,17 @@ void QGstreamerImageCapture::setCaptureSession(QPlatformMediaCaptureSession *ses
onCameraChanged();
}
+void QGstreamerImageCapture::setMetaData(const QMediaMetaData &m)
+{
+ {
+ QMutexLocker guard(&m_mutex);
+ QPlatformImageCapture::setMetaData(m);
+ }
+
+ // ensure taginject injects this metaData
+ applyMetaDataToTagSetter(m, muxer);
+}
+
void QGstreamerImageCapture::cameraActiveChanged(bool active)
{
qCDebug(qLcImageCaptureGst) << "cameraActiveChanged" << cameraActive << active;
@@ -255,9 +363,11 @@ void QGstreamerImageCapture::cameraActiveChanged(bool active)
void QGstreamerImageCapture::onCameraChanged()
{
+ QMutexLocker guard(&m_mutex);
if (m_session->camera()) {
cameraActiveChanged(m_session->camera()->isActive());
- connect(m_session->camera(), &QPlatformCamera::activeChanged, this, &QGstreamerImageCapture::cameraActiveChanged);
+ connect(m_session->camera(), &QPlatformCamera::activeChanged, this,
+ &QGstreamerImageCapture::cameraActiveChanged);
} else {
cameraActiveChanged(false);
}
@@ -272,33 +382,51 @@ gboolean QGstreamerImageCapture::saveImageFilter(GstElement *, GstBuffer *buffer
void QGstreamerImageCapture::saveBufferToImage(GstBuffer *buffer)
{
+ QMutexLocker guard(&m_mutex);
passImage = false;
if (pendingImages.isEmpty())
return;
- auto imageData = pendingImages.dequeue();
+ PendingImage imageData = pendingImages.dequeue();
if (imageData.filename.isEmpty())
return;
- qCDebug(qLcImageCaptureGst) << "saving image as" << imageData.filename;
+ int id = futureIDAllocator++;
+ QGstBufferHandle bufferHandle{
+ buffer,
+ QGstBufferHandle::NeedsRef,
+ };
+
+ QFuture<void> saveImageFuture = QtConcurrent::run([this, imageData, bufferHandle,
+ id]() mutable {
+ auto cleanup = qScopeGuard([&] {
+ QMutexLocker guard(&m_mutex);
+ m_pendingFutures.remove(id);
+ });
- QFile f(imageData.filename);
- if (!f.open(QFile::WriteOnly)) {
- qCDebug(qLcImageCaptureGst) << " could not open image file for writing";
- return;
- }
+ qCDebug(qLcImageCaptureGst) << "saving image as" << imageData.filename;
- GstMapInfo info;
- if (gst_buffer_map(buffer, &info, GST_MAP_READ)) {
- f.write(reinterpret_cast<const char *>(info.data), info.size);
- gst_buffer_unmap(buffer, &info);
- }
- f.close();
+ QFile f(imageData.filename);
+ if (!f.open(QFile::WriteOnly)) {
+ qCDebug(qLcImageCaptureGst) << " could not open image file for writing";
+ return;
+ }
+
+ GstMapInfo info;
+ GstBuffer *buffer = bufferHandle.get();
+ if (gst_buffer_map(buffer, &info, GST_MAP_READ)) {
+ f.write(reinterpret_cast<const char *>(info.data), info.size);
+ gst_buffer_unmap(buffer, &info);
+ }
+ f.close();
- QMetaObject::invokeMethod(this, [this, imageData = std::move(imageData)]() mutable {
- imageSaved(imageData.id, imageData.filename);
+ QMetaObject::invokeMethod(this, [this, imageData = std::move(imageData)]() mutable {
+ emit imageSaved(imageData.id, imageData.filename);
+ });
});
+
+ m_pendingFutures.insert(id, saveImageFuture);
}
QImageEncoderSettings QGstreamerImageCapture::imageSettings() const
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture_p.h b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture_p.h
index 79c6a02e0..04a7c00b4 100644
--- a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture_p.h
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture_p.h
@@ -18,7 +18,9 @@
#include <QtMultimedia/private/qplatformimagecapture_p.h>
#include <QtMultimedia/private/qmultimediautils_p.h>
+#include <QtCore/qmutex.h>
#include <QtCore/qqueue.h>
+#include <QtConcurrent/QtConcurrentRun>
#include <common/qgst_p.h>
#include <common/qgstreamerbufferprobe_p.h>
@@ -28,9 +30,9 @@
QT_BEGIN_NAMESPACE
class QGstreamerImageCapture : public QPlatformImageCapture, private QGstreamerBufferProbe
-
{
Q_OBJECT
+
public:
static QMaybe<QPlatformImageCapture *> create(QImageCapture *parent);
virtual ~QGstreamerImageCapture();
@@ -48,13 +50,14 @@ public:
QGstElement gstElement() const { return bin; }
+ void setMetaData(const QMediaMetaData &m) override;
+
public Q_SLOTS:
void cameraActiveChanged(bool active);
void onCameraChanged();
private:
- QGstreamerImageCapture(QGstElement videoconvert, QGstElement jpegenc, QGstElement jifmux,
- QImageCapture *parent);
+ QGstreamerImageCapture(QImageCapture *parent);
void setResolution(const QSize &resolution);
int doCapture(const QString &fileName);
@@ -63,6 +66,8 @@ private:
void saveBufferToImage(GstBuffer *buffer);
+ mutable QRecursiveMutex
+ m_mutex; // guard all elements accessed from probeBuffer/saveBufferToImage
QGstreamerMediaCapture *m_session = nullptr;
int m_lastId = 0;
QImageEncoderSettings m_settings;
@@ -88,6 +93,15 @@ private:
bool cameraActive = false;
QGObjectHandlerScopedConnection m_handoffConnection;
+
+ QMap<int, QFuture<void>> m_pendingFutures;
+ int futureIDAllocator = 0;
+
+ template <typename Functor>
+ void invokeDeferred(Functor &&fn)
+ {
+ QMetaObject::invokeMethod(this, std::forward<decltype(fn)>(fn), Qt::QueuedConnection);
+ };
};
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture.cpp b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture.cpp
index 720ff5603..7ecbb07d7 100644
--- a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture.cpp
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture.cpp
@@ -24,32 +24,25 @@ static void linkTeeToPad(QGstElement tee, QGstPad sink)
source.link(sink);
}
-static void unlinkTeeFromPad(QGstElement tee, QGstPad sink)
-{
- if (tee.isNull() || sink.isNull())
- return;
-
- auto source = sink.peer();
- source.unlink(sink);
-
- tee.releaseRequestPad(source);
-}
-
QMaybe<QPlatformMediaCaptureSession *> QGstreamerMediaCapture::create()
{
auto videoOutput = QGstreamerVideoOutput::create();
if (!videoOutput)
return videoOutput.error();
+ static const auto error = qGstErrorMessageIfElementsNotAvailable("tee", "capsfilter");
+ if (error)
+ return *error;
+
return new QGstreamerMediaCapture(videoOutput.value());
}
QGstreamerMediaCapture::QGstreamerMediaCapture(QGstreamerVideoOutput *videoOutput)
- : gstPipeline(QGstPipeline::create("mediaCapturePipeline")), gstVideoOutput(videoOutput)
+ : capturePipeline(QGstPipeline::create("mediaCapturePipeline")), gstVideoOutput(videoOutput)
{
gstVideoOutput->setParent(this);
gstVideoOutput->setIsPreview();
- gstVideoOutput->setPipeline(gstPipeline);
+ gstVideoOutput->setPipeline(capturePipeline);
// Use system clock to drive all elements in the pipeline. Otherwise,
// the clock is sourced from the elements (e.g. from an audio source).
@@ -59,14 +52,14 @@ QGstreamerMediaCapture::QGstreamerMediaCapture(QGstreamerVideoOutput *videoOutpu
QGstClockHandle systemClock{
gst_system_clock_obtain(),
};
- gst_pipeline_use_clock(gstPipeline.pipeline(), systemClock.get());
+ gst_pipeline_use_clock(capturePipeline.pipeline(), systemClock.get());
// This is the recording pipeline with only live sources, thus the pipeline
// will be always in the playing state.
- gstPipeline.setState(GST_STATE_PLAYING);
- gstPipeline.setInStoppedState(false);
+ capturePipeline.setState(GST_STATE_PLAYING);
+ capturePipeline.setInStoppedState(false);
- gstPipeline.dumpGraph("initial");
+ capturePipeline.dumpGraph("initial");
}
QGstreamerMediaCapture::~QGstreamerMediaCapture()
@@ -74,7 +67,7 @@ QGstreamerMediaCapture::~QGstreamerMediaCapture()
setMediaRecorder(nullptr);
setImageCapture(nullptr);
setCamera(nullptr);
- gstPipeline.setStateSync(GST_STATE_NULL);
+ capturePipeline.setStateSync(GST_STATE_NULL);
}
QPlatformCamera *QGstreamerMediaCapture::camera()
@@ -84,7 +77,7 @@ QPlatformCamera *QGstreamerMediaCapture::camera()
void QGstreamerMediaCapture::setCamera(QPlatformCamera *platformCamera)
{
- QGstreamerCamera *camera = static_cast<QGstreamerCamera *>(platformCamera);
+ auto *camera = static_cast<QGstreamerCameraBase *>(platformCamera);
if (gstCamera == camera)
return;
@@ -97,7 +90,7 @@ void QGstreamerMediaCapture::setCamera(QPlatformCamera *platformCamera)
gstCamera = camera;
if (gstCamera) {
- gstCameraActiveConnection = QObject::connect(camera, &QGstreamerCamera::activeChanged, this,
+ gstCameraActiveConnection = QObject::connect(camera, &QPlatformCamera::activeChanged, this,
&QGstreamerMediaCapture::setCameraActive);
if (gstCamera->isActive())
setCameraActive(true);
@@ -108,13 +101,13 @@ void QGstreamerMediaCapture::setCamera(QPlatformCamera *platformCamera)
void QGstreamerMediaCapture::setCameraActive(bool activate)
{
- gstPipeline.modifyPipelineWhileNotRunning([&] {
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
if (activate) {
QGstElement cameraElement = gstCamera->gstElement();
gstVideoTee = QGstElement::createFromFactory("tee", "videotee");
gstVideoTee.set("allow-not-linked", true);
- gstPipeline.add(gstVideoOutput->gstElement(), cameraElement, gstVideoTee);
+ capturePipeline.add(gstVideoOutput->gstElement(), cameraElement, gstVideoTee);
linkTeeToPad(gstVideoTee, encoderVideoSink);
linkTeeToPad(gstVideoTee, gstVideoOutput->gstElement().staticPad("sink"));
@@ -122,21 +115,24 @@ void QGstreamerMediaCapture::setCameraActive(bool activate)
qLinkGstElements(cameraElement, gstVideoTee);
- gstPipeline.syncChildrenState();
+ capturePipeline.syncChildrenState();
} else {
- unlinkTeeFromPad(gstVideoTee, encoderVideoSink);
- unlinkTeeFromPad(gstVideoTee, imageCaptureSink);
+ if (encoderVideoCapsFilter)
+ qUnlinkGstElements(gstVideoTee, encoderVideoCapsFilter);
+ if (m_imageCapture)
+ qUnlinkGstElements(gstVideoTee, m_imageCapture->gstElement());
auto camera = gstCamera->gstElement();
- gstPipeline.stopAndRemoveElements(camera, gstVideoTee, gstVideoOutput->gstElement());
+ capturePipeline.stopAndRemoveElements(camera, gstVideoTee,
+ gstVideoOutput->gstElement());
gstVideoTee = {};
gstCamera->setCaptureSession(nullptr);
}
});
- gstPipeline.dumpGraph("camera");
+ capturePipeline.dumpGraph("camera");
}
QPlatformImageCapture *QGstreamerMediaCapture::imageCapture()
@@ -150,10 +146,10 @@ void QGstreamerMediaCapture::setImageCapture(QPlatformImageCapture *imageCapture
if (m_imageCapture == control)
return;
- gstPipeline.modifyPipelineWhileNotRunning([&] {
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
if (m_imageCapture) {
- unlinkTeeFromPad(gstVideoTee, imageCaptureSink);
- gstPipeline.stopAndRemoveElements(m_imageCapture->gstElement());
+ qUnlinkGstElements(gstVideoTee, m_imageCapture->gstElement());
+ capturePipeline.stopAndRemoveElements(m_imageCapture->gstElement());
imageCaptureSink = {};
m_imageCapture->setCaptureSession(nullptr);
}
@@ -161,14 +157,14 @@ void QGstreamerMediaCapture::setImageCapture(QPlatformImageCapture *imageCapture
m_imageCapture = control;
if (m_imageCapture) {
imageCaptureSink = m_imageCapture->gstElement().staticPad("sink");
- gstPipeline.add(m_imageCapture->gstElement());
+ capturePipeline.add(m_imageCapture->gstElement());
m_imageCapture->gstElement().syncStateWithParent();
linkTeeToPad(gstVideoTee, imageCaptureSink);
m_imageCapture->setCaptureSession(this);
}
});
- gstPipeline.dumpGraph("imageCapture");
+ capturePipeline.dumpGraph("imageCapture");
emit imageCaptureChanged();
}
@@ -186,7 +182,7 @@ void QGstreamerMediaCapture::setMediaRecorder(QPlatformMediaRecorder *recorder)
m_mediaEncoder->setCaptureSession(this);
emit encoderChanged();
- gstPipeline.dumpGraph("encoder");
+ capturePipeline.dumpGraph("encoder");
}
QPlatformMediaRecorder *QGstreamerMediaCapture::mediaRecorder()
@@ -196,7 +192,7 @@ QPlatformMediaRecorder *QGstreamerMediaCapture::mediaRecorder()
void QGstreamerMediaCapture::linkEncoder(QGstPad audioSink, QGstPad videoSink)
{
- gstPipeline.modifyPipelineWhileNotRunning([&] {
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
if (!gstVideoTee.isNull() && !videoSink.isNull()) {
QGstCaps caps = gstVideoTee.sink().currentCaps();
@@ -205,7 +201,7 @@ void QGstreamerMediaCapture::linkEncoder(QGstPad audioSink, QGstPad videoSink)
Q_ASSERT(encoderVideoCapsFilter);
encoderVideoCapsFilter.set("caps", caps);
- gstPipeline.add(encoderVideoCapsFilter);
+ capturePipeline.add(encoderVideoCapsFilter);
encoderVideoCapsFilter.src().link(videoSink);
linkTeeToPad(gstVideoTee, encoderVideoCapsFilter.sink());
@@ -220,7 +216,7 @@ void QGstreamerMediaCapture::linkEncoder(QGstPad audioSink, QGstPad videoSink)
Q_ASSERT(encoderAudioCapsFilter);
encoderAudioCapsFilter.set("caps", caps);
- gstPipeline.add(encoderAudioCapsFilter);
+ capturePipeline.add(encoderAudioCapsFilter);
encoderAudioCapsFilter.src().link(audioSink);
linkTeeToPad(gstAudioTee, encoderAudioCapsFilter.sink());
@@ -231,18 +227,16 @@ void QGstreamerMediaCapture::linkEncoder(QGstPad audioSink, QGstPad videoSink)
void QGstreamerMediaCapture::unlinkEncoder()
{
- gstPipeline.modifyPipelineWhileNotRunning([&] {
- if (!encoderVideoCapsFilter.isNull()) {
- encoderVideoCapsFilter.src().unlinkPeer();
- unlinkTeeFromPad(gstVideoTee, encoderVideoCapsFilter.sink());
- gstPipeline.stopAndRemoveElements(encoderVideoCapsFilter);
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
+ if (encoderVideoCapsFilter) {
+ qUnlinkGstElements(gstVideoTee, encoderVideoCapsFilter);
+ capturePipeline.stopAndRemoveElements(encoderVideoCapsFilter);
encoderVideoCapsFilter = {};
}
- if (!encoderAudioCapsFilter.isNull()) {
- encoderAudioCapsFilter.src().unlinkPeer();
- unlinkTeeFromPad(gstAudioTee, encoderAudioCapsFilter.sink());
- gstPipeline.stopAndRemoveElements(encoderAudioCapsFilter);
+ if (encoderAudioCapsFilter) {
+ qUnlinkGstElements(gstAudioTee, encoderAudioCapsFilter);
+ capturePipeline.stopAndRemoveElements(encoderAudioCapsFilter);
encoderAudioCapsFilter = {};
}
@@ -251,22 +245,27 @@ void QGstreamerMediaCapture::unlinkEncoder()
});
}
+const QGstPipeline &QGstreamerMediaCapture::pipeline() const
+{
+ return capturePipeline;
+}
+
void QGstreamerMediaCapture::setAudioInput(QPlatformAudioInput *input)
{
if (gstAudioInput == input)
return;
- gstPipeline.modifyPipelineWhileNotRunning([&] {
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
if (gstAudioInput) {
- unlinkTeeFromPad(gstAudioTee, encoderAudioSink);
+ if (encoderAudioCapsFilter)
+ qUnlinkGstElements(gstAudioTee, encoderAudioCapsFilter);
if (gstAudioOutput) {
- unlinkTeeFromPad(gstAudioTee, gstAudioOutput->gstElement().staticPad("sink"));
- gstPipeline.remove(gstAudioOutput->gstElement());
- gstAudioOutput->gstElement().setStateSync(GST_STATE_NULL);
+ qUnlinkGstElements(gstAudioTee, gstAudioOutput->gstElement());
+ capturePipeline.stopAndRemoveElements(gstAudioOutput->gstElement());
}
- gstPipeline.stopAndRemoveElements(gstAudioInput->gstElement(), gstAudioTee);
+ capturePipeline.stopAndRemoveElements(gstAudioInput->gstElement(), gstAudioTee);
gstAudioTee = {};
}
@@ -275,16 +274,16 @@ void QGstreamerMediaCapture::setAudioInput(QPlatformAudioInput *input)
Q_ASSERT(gstAudioTee.isNull());
gstAudioTee = QGstElement::createFromFactory("tee", "audiotee");
gstAudioTee.set("allow-not-linked", true);
- gstPipeline.add(gstAudioInput->gstElement(), gstAudioTee);
+ capturePipeline.add(gstAudioInput->gstElement(), gstAudioTee);
qLinkGstElements(gstAudioInput->gstElement(), gstAudioTee);
if (gstAudioOutput) {
- gstPipeline.add(gstAudioOutput->gstElement());
+ capturePipeline.add(gstAudioOutput->gstElement());
gstAudioOutput->gstElement().setState(GST_STATE_PLAYING);
linkTeeToPad(gstAudioTee, gstAudioOutput->gstElement().staticPad("sink"));
}
- gstPipeline.syncChildrenState();
+ capturePipeline.syncChildrenState();
linkTeeToPad(gstAudioTee, encoderAudioSink);
}
@@ -301,17 +300,17 @@ void QGstreamerMediaCapture::setAudioOutput(QPlatformAudioOutput *output)
if (gstAudioOutput == output)
return;
- gstPipeline.modifyPipelineWhileNotRunning([&] {
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
if (gstAudioOutput && gstAudioInput) {
// If audio input is set, the output is in the pipeline
- unlinkTeeFromPad(gstAudioTee, gstAudioOutput->gstElement().staticPad("sink"));
- gstPipeline.stopAndRemoveElements(gstAudioOutput->gstElement());
+ qUnlinkGstElements(gstAudioTee, gstAudioOutput->gstElement());
+ capturePipeline.stopAndRemoveElements(gstAudioOutput->gstElement());
}
gstAudioOutput = static_cast<QGstreamerAudioOutput *>(output);
if (gstAudioOutput && gstAudioInput) {
- gstPipeline.add(gstAudioOutput->gstElement());
- gstPipeline.syncChildrenState();
+ capturePipeline.add(gstAudioOutput->gstElement());
+ capturePipeline.syncChildrenState();
linkTeeToPad(gstAudioTee, gstAudioOutput->gstElement().staticPad("sink"));
}
});
@@ -322,11 +321,6 @@ QGstreamerVideoSink *QGstreamerMediaCapture::gstreamerVideoSink() const
return gstVideoOutput ? gstVideoOutput->gstreamerVideoSink() : nullptr;
}
-void *QGstreamerMediaCapture::nativePipeline()
-{
- return gstPipeline.pipeline();
-}
-
QT_END_NAMESPACE
#include "moc_qgstreamermediacapture_p.cpp"
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture_p.h b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture_p.h
index 219773413..c44e31f0e 100644
--- a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture_p.h
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture_p.h
@@ -25,7 +25,7 @@
QT_BEGIN_NAMESPACE
-class QGstreamerCamera;
+class QGstreamerCameraBase;
class QGstreamerImageCapture;
class QGstreamerMediaEncoder;
class QGstreamerAudioInput;
@@ -59,12 +59,10 @@ public:
void linkEncoder(QGstPad audioSink, QGstPad videoSink);
void unlinkEncoder();
- QGstPipeline pipeline() const { return gstPipeline; }
+ const QGstPipeline &pipeline() const;
QGstreamerVideoSink *gstreamerVideoSink() const;
- void *nativePipeline() override;
-
private:
void setCameraActive(bool activate);
@@ -72,10 +70,10 @@ private:
friend QGstreamerMediaEncoder;
// Gst elements
- QGstPipeline gstPipeline;
+ QGstPipeline capturePipeline;
QGstreamerAudioInput *gstAudioInput = nullptr;
- QGstreamerCamera *gstCamera = nullptr;
+ QGstreamerCameraBase *gstCamera = nullptr;
QMetaObject::Connection gstCameraActiveConnection;
QGstElement gstAudioTee;
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder.cpp b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder.cpp
index 93baa6343..4ec10ca84 100644
--- a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder.cpp
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder.cpp
@@ -40,10 +40,10 @@ QGstreamerMediaEncoder::QGstreamerMediaEncoder(QMediaRecorder *parent)
QGstreamerMediaEncoder::~QGstreamerMediaEncoder()
{
- if (!gstPipeline.isNull()) {
+ if (!capturePipeline.isNull()) {
finalize();
- gstPipeline.removeMessageFilter(this);
- gstPipeline.setStateSync(GST_STATE_NULL);
+ capturePipeline.removeMessageFilter(this);
+ capturePipeline.setStateSync(GST_STATE_NULL);
}
}
@@ -54,7 +54,7 @@ bool QGstreamerMediaEncoder::isLocationWritable(const QUrl &) const
void QGstreamerMediaEncoder::handleSessionError(QMediaRecorder::Error code, const QString &description)
{
- error(code, description);
+ updateError(code, description);
stop();
}
@@ -68,7 +68,7 @@ bool QGstreamerMediaEncoder::processBusMessage(const QGstreamerMessage &msg)
switch (msg.type()) {
case GST_MESSAGE_ELEMENT: {
- QGstStructure s = msg.structure();
+ QGstStructureView s = msg.structure();
if (s.name() == "GstBinForwarded")
return processBusMessage(s.getMessage());
@@ -90,7 +90,7 @@ bool QGstreamerMediaEncoder::processBusMessage(const QGstreamerMessage &msg)
QUniqueGErrorHandle err;
QGString debug;
gst_message_parse_error(msg.message(), &err, &debug);
- error(QMediaRecorder::ResourceError, QString::fromUtf8(err.get()->message));
+ updateError(QMediaRecorder::ResourceError, QString::fromUtf8(err.get()->message));
if (!m_finalizing)
stop();
finalize();
@@ -262,7 +262,7 @@ void QGstreamerMediaEncoder::record(QMediaEncoderSettings &settings)
const auto hasAudio = m_session->audioInput() != nullptr;
if (!hasVideo && !hasAudio) {
- error(QMediaRecorder::ResourceError, QMediaRecorder::tr("No camera or audio input"));
+ updateError(QMediaRecorder::ResourceError, QMediaRecorder::tr("No camera or audio input"));
return;
}
@@ -310,8 +310,8 @@ void QGstreamerMediaEncoder::record(QMediaEncoderSettings &settings)
videoPauseControl.installOn(videoSink);
}
- gstPipeline.modifyPipelineWhileNotRunning([&] {
- gstPipeline.add(gstEncoder, gstFileSink);
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
+ capturePipeline.add(gstEncoder, gstFileSink);
qLinkGstElements(gstEncoder, gstFileSink);
applyMetaDataToTagSetter(m_metaData, gstEncoder);
@@ -322,7 +322,7 @@ void QGstreamerMediaEncoder::record(QMediaEncoderSettings &settings)
});
signalDurationChangedTimer.start();
- gstPipeline.dumpGraph("recording");
+ capturePipeline.dumpGraph("recording");
durationChanged(0);
stateChanged(QMediaRecorder::RecordingState);
@@ -335,13 +335,13 @@ void QGstreamerMediaEncoder::pause()
return;
signalDurationChangedTimer.stop();
durationChanged(duration());
- gstPipeline.dumpGraph("before-pause");
+ capturePipeline.dumpGraph("before-pause");
stateChanged(QMediaRecorder::PausedState);
}
void QGstreamerMediaEncoder::resume()
{
- gstPipeline.dumpGraph("before-resume");
+ capturePipeline.dumpGraph("before-resume");
if (!m_session || m_finalizing || state() != QMediaRecorder::PausedState)
return;
signalDurationChangedTimer.start();
@@ -369,7 +369,7 @@ void QGstreamerMediaEncoder::finalize()
qCDebug(qLcMediaEncoderGst) << "finalize";
- gstPipeline.stopAndRemoveElements(gstEncoder, gstFileSink);
+ capturePipeline.stopAndRemoveElements(gstEncoder, gstFileSink);
gstFileSink = {};
gstEncoder = {};
m_finalizing = false;
@@ -403,17 +403,17 @@ void QGstreamerMediaEncoder::setCaptureSession(QPlatformMediaCaptureSession *ses
loop.exec();
}
- gstPipeline.removeMessageFilter(this);
- gstPipeline = {};
+ capturePipeline.removeMessageFilter(this);
+ capturePipeline = {};
}
m_session = captureSession;
if (!m_session)
return;
- gstPipeline = captureSession->gstPipeline;
- gstPipeline.set("message-forward", true);
- gstPipeline.installMessageFilter(this);
+ capturePipeline = captureSession->capturePipeline;
+ capturePipeline.set("message-forward", true);
+ capturePipeline.installMessageFilter(this);
}
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder_p.h b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder_p.h
index 637fb7264..56e8c193b 100644
--- a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder_p.h
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder_p.h
@@ -79,7 +79,7 @@ private:
QMediaMetaData m_metaData;
QTimer signalDurationChangedTimer;
- QGstPipeline gstPipeline;
+ QGstPipeline capturePipeline;
QGstBin gstEncoder;
QGstElement gstFileSink;
diff --git a/src/plugins/multimedia/gstreamer/qgstreamerformatinfo.cpp b/src/plugins/multimedia/gstreamer/qgstreamerformatinfo.cpp
index 86d59a9a8..a657fc52f 100644
--- a/src/plugins/multimedia/gstreamer/qgstreamerformatinfo.cpp
+++ b/src/plugins/multimedia/gstreamer/qgstreamerformatinfo.cpp
@@ -8,14 +8,15 @@
QT_BEGIN_NAMESPACE
-QMediaFormat::AudioCodec QGstreamerFormatInfo::audioCodecForCaps(QGstStructure structure)
+QMediaFormat::AudioCodec QGstreamerFormatInfo::audioCodecForCaps(QGstStructureView structure)
{
+ using namespace std::string_view_literals;
const char *name = structure.name().data();
- if (!name || strncmp(name, "audio/", 6))
+ if (!name || (strncmp(name, "audio/", 6) != 0))
return QMediaFormat::AudioCodec::Unspecified;
name += 6;
- if (!strcmp(name, "mpeg")) {
+ if (name == "mpeg"sv) {
auto version = structure["mpegversion"].toInt();
if (version == 1) {
auto layer = structure["layer"];
@@ -24,91 +25,120 @@ QMediaFormat::AudioCodec QGstreamerFormatInfo::audioCodecForCaps(QGstStructure s
}
if (version == 4)
return QMediaFormat::AudioCodec::AAC;
- } else if (!strcmp(name, "x-ac3")) {
+ return QMediaFormat::AudioCodec::Unspecified;
+ }
+ if (name == "x-ac3"sv)
return QMediaFormat::AudioCodec::AC3;
- } else if (!strcmp(name, "x-eac3")) {
+
+ if (name == "x-eac3"sv)
return QMediaFormat::AudioCodec::EAC3;
- } else if (!strcmp(name, "x-flac")) {
+
+ if (name == "x-flac"sv)
return QMediaFormat::AudioCodec::FLAC;
- } else if (!strcmp(name, "x-alac")) {
+
+ if (name == "x-alac"sv)
return QMediaFormat::AudioCodec::ALAC;
- } else if (!strcmp(name, "x-true-hd")) {
+
+ if (name == "x-true-hd"sv)
return QMediaFormat::AudioCodec::DolbyTrueHD;
- } else if (!strcmp(name, "x-vorbis")) {
+
+ if (name == "x-vorbis"sv)
return QMediaFormat::AudioCodec::Vorbis;
- } else if (!strcmp(name, "x-opus")) {
+
+ if (name == "x-opus"sv)
return QMediaFormat::AudioCodec::Opus;
- } else if (!strcmp(name, "x-wav")) {
+
+ if (name == "x-wav"sv)
return QMediaFormat::AudioCodec::Wave;
- } else if (!strcmp(name, "x-wma")) {
+
+ if (name == "x-wma"sv)
return QMediaFormat::AudioCodec::WMA;
- }
+
return QMediaFormat::AudioCodec::Unspecified;
}
-QMediaFormat::VideoCodec QGstreamerFormatInfo::videoCodecForCaps(QGstStructure structure)
+QMediaFormat::VideoCodec QGstreamerFormatInfo::videoCodecForCaps(QGstStructureView structure)
{
+ using namespace std::string_view_literals;
const char *name = structure.name().data();
- if (!name || strncmp(name, "video/", 6))
+ if (!name || (strncmp(name, "video/", 6) != 0))
return QMediaFormat::VideoCodec::Unspecified;
name += 6;
- if (!strcmp(name, "mpeg")) {
+ if (name == "mpeg"sv) {
auto version = structure["mpegversion"].toInt();
if (version == 1)
return QMediaFormat::VideoCodec::MPEG1;
- else if (version == 2)
+ if (version == 2)
return QMediaFormat::VideoCodec::MPEG2;
- else if (version == 4)
+ if (version == 4)
return QMediaFormat::VideoCodec::MPEG4;
- } else if (!strcmp(name, "x-h264")) {
+ return QMediaFormat::VideoCodec::Unspecified;
+ }
+ if (name == "x-h264"sv)
return QMediaFormat::VideoCodec::H264;
+
#if GST_CHECK_VERSION(1, 17, 0) // x265enc seems to be broken on 1.16 at least
- } else if (!strcmp(name, "x-h265")) {
+ if (name == "x-h265"sv)
return QMediaFormat::VideoCodec::H265;
#endif
- } else if (!strcmp(name, "x-vp8")) {
+
+ if (name == "x-vp8"sv)
return QMediaFormat::VideoCodec::VP8;
- } else if (!strcmp(name, "x-vp9")) {
+
+ if (name == "x-vp9"sv)
return QMediaFormat::VideoCodec::VP9;
- } else if (!strcmp(name, "x-av1")) {
+
+ if (name == "x-av1"sv)
return QMediaFormat::VideoCodec::AV1;
- } else if (!strcmp(name, "x-theora")) {
+
+ if (name == "x-theora"sv)
return QMediaFormat::VideoCodec::Theora;
- } else if (!strcmp(name, "x-jpeg")) {
+
+ if (name == "x-jpeg"sv)
return QMediaFormat::VideoCodec::MotionJPEG;
- } else if (!strcmp(name, "x-wmv")) {
+
+ if (name == "x-wmv"sv)
return QMediaFormat::VideoCodec::WMV;
- }
+
return QMediaFormat::VideoCodec::Unspecified;
}
-QMediaFormat::FileFormat QGstreamerFormatInfo::fileFormatForCaps(QGstStructure structure)
+QMediaFormat::FileFormat QGstreamerFormatInfo::fileFormatForCaps(QGstStructureView structure)
{
+ using namespace std::string_view_literals;
const char *name = structure.name().data();
- if (!strcmp(name, "video/x-ms-asf")) {
+ if (name == "video/x-ms-asf"sv)
return QMediaFormat::FileFormat::WMV;
- } else if (!strcmp(name, "video/x-msvideo")) {
+
+ if (name == "video/x-msvideo"sv)
return QMediaFormat::FileFormat::AVI;
- } else if (!strcmp(name, "video/x-matroska")) {
+
+ if (name == "video/x-matroska"sv)
return QMediaFormat::FileFormat::Matroska;
- } else if (!strcmp(name, "video/quicktime")) {
- auto variant = structure["variant"].toString();
+
+ if (name == "video/quicktime"sv) {
+ const char *variant = structure["variant"].toString();
if (!variant)
return QMediaFormat::FileFormat::QuickTime;
- else if (!strcmp(variant, "iso"))
+ if (variant == "iso"sv)
return QMediaFormat::FileFormat::MPEG4;
- } else if (!strcmp(name, "video/ogg")) {
+ }
+ if (name == "video/ogg"sv)
return QMediaFormat::FileFormat::Ogg;
- } else if (!strcmp(name, "video/webm")) {
+
+ if (name == "video/webm"sv)
return QMediaFormat::FileFormat::WebM;
- } else if (!strcmp(name, "audio/x-m4a")) {
+
+ if (name == "audio/x-m4a"sv)
return QMediaFormat::FileFormat::Mpeg4Audio;
- } else if (!strcmp(name, "audio/x-wav")) {
+
+ if (name == "audio/x-wav"sv)
return QMediaFormat::FileFormat::Wave;
- } else if (!strcmp(name, "audio/mpeg")) {
+
+ if (name == "audio/mpeg"sv) {
auto mpegversion = structure["mpegversion"].toInt();
if (mpegversion == 1) {
auto layer = structure["layer"];
@@ -116,23 +146,28 @@ QMediaFormat::FileFormat QGstreamerFormatInfo::fileFormatForCaps(QGstStructure s
return QMediaFormat::FileFormat::MP3;
}
}
+
return QMediaFormat::UnspecifiedFormat;
}
-QImageCapture::FileFormat QGstreamerFormatInfo::imageFormatForCaps(QGstStructure structure)
+QImageCapture::FileFormat QGstreamerFormatInfo::imageFormatForCaps(QGstStructureView structure)
{
+ using namespace std::string_view_literals;
const char *name = structure.name().data();
- if (!strcmp(name, "image/jpeg")) {
+ if (name == "image/jpeg"sv)
return QImageCapture::JPEG;
- } else if (!strcmp(name, "image/png")) {
+
+ if (name == "image/png"sv)
return QImageCapture::PNG;
- } else if (!strcmp(name, "image/webp")) {
+
+ if (name == "image/webp"sv)
return QImageCapture::WebP;
- } else if (!strcmp(name, "image/tiff")) {
+
+ if (name == "image/tiff"sv)
return QImageCapture::Tiff;
- }
+
return QImageCapture::UnspecifiedFormat;
}
@@ -155,7 +190,7 @@ static QPair<QList<QMediaFormat::AudioCodec>, QList<QMediaFormat::VideoCodec>> g
auto caps = QGstCaps(gst_static_caps_get(&padTemplate->static_caps), QGstCaps::HasRef);
for (int i = 0; i < caps.size(); i++) {
- QGstStructure structure = caps.at(i);
+ QGstStructureView structure = caps.at(i);
auto a = QGstreamerFormatInfo::audioCodecForCaps(structure);
if (a != QMediaFormat::AudioCodec::Unspecified && !audio.contains(a))
audio.append(a);
@@ -195,7 +230,7 @@ QList<QGstreamerFormatInfo::CodecMap> QGstreamerFormatInfo::getMuxerList(bool de
auto caps = QGstCaps(gst_static_caps_get(&padTemplate->static_caps), QGstCaps::HasRef);
for (int i = 0; i < caps.size(); i++) {
- QGstStructure structure = caps.at(i);
+ QGstStructureView structure = caps.at(i);
auto fmt = fileFormatForCaps(structure);
if (fmt != QMediaFormat::UnspecifiedFormat)
fileFormats.append(fmt);
@@ -218,7 +253,7 @@ QList<QGstreamerFormatInfo::CodecMap> QGstreamerFormatInfo::getMuxerList(bool de
bool acceptsRawAudio = false;
for (int i = 0; i < caps.size(); i++) {
- QGstStructure structure = caps.at(i);
+ QGstStructureView structure = caps.at(i);
if (structure.name() == "audio/x-raw")
acceptsRawAudio = true;
auto audio = audioCodecForCaps(structure);
@@ -280,7 +315,7 @@ static QList<QImageCapture::FileFormat> getImageFormatList()
QGstCaps caps = QGstCaps(gst_static_caps_get(&padTemplate->static_caps), QGstCaps::HasRef);
for (int i = 0; i < caps.size(); i++) {
- QGstStructure structure = caps.at(i);
+ QGstStructureView structure = caps.at(i);
auto f = QGstreamerFormatInfo::imageFormatForCaps(structure);
if (f != QImageCapture::UnspecifiedFormat) {
// qDebug() << structure.toString() << f;
diff --git a/src/plugins/multimedia/gstreamer/qgstreamerformatinfo_p.h b/src/plugins/multimedia/gstreamer/qgstreamerformatinfo_p.h
index def42b7ea..bba10edb9 100644
--- a/src/plugins/multimedia/gstreamer/qgstreamerformatinfo_p.h
+++ b/src/plugins/multimedia/gstreamer/qgstreamerformatinfo_p.h
@@ -31,10 +31,10 @@ public:
QGstCaps audioCaps(const QMediaFormat &f) const;
QGstCaps videoCaps(const QMediaFormat &f) const;
- static QMediaFormat::AudioCodec audioCodecForCaps(QGstStructure structure);
- static QMediaFormat::VideoCodec videoCodecForCaps(QGstStructure structure);
- static QMediaFormat::FileFormat fileFormatForCaps(QGstStructure structure);
- static QImageCapture::FileFormat imageFormatForCaps(QGstStructure structure);
+ static QMediaFormat::AudioCodec audioCodecForCaps(QGstStructureView structure);
+ static QMediaFormat::VideoCodec videoCodecForCaps(QGstStructureView structure);
+ static QMediaFormat::FileFormat fileFormatForCaps(QGstStructureView structure);
+ static QImageCapture::FileFormat imageFormatForCaps(QGstStructureView structure);
QList<CodecMap> getMuxerList(bool demuxer, QList<QMediaFormat::AudioCodec> audioCodecs, QList<QMediaFormat::VideoCodec> videoCodecs);
};
diff --git a/src/plugins/multimedia/gstreamer/qgstreamerintegration.cpp b/src/plugins/multimedia/gstreamer/qgstreamerintegration.cpp
index 4ee5b36e8..87c514f2e 100644
--- a/src/plugins/multimedia/gstreamer/qgstreamerintegration.cpp
+++ b/src/plugins/multimedia/gstreamer/qgstreamerintegration.cpp
@@ -4,6 +4,7 @@
#include <qgstreamerintegration_p.h>
#include <qgstreamerformatinfo_p.h>
#include <qgstreamervideodevices_p.h>
+#include <audio/qgstreameraudiodevice_p.h>
#include <audio/qgstreameraudiodecoder_p.h>
#include <common/qgstreameraudioinput_p.h>
#include <common/qgstreameraudiooutput_p.h>
@@ -15,42 +16,149 @@
#include <mediacapture/qgstreamermediaencoder_p.h>
#include <QtCore/qloggingcategory.h>
+#include <QtMultimedia/private/qmediaplayer_p.h>
+#include <QtMultimedia/private/qmediacapturesession_p.h>
+#include <QtMultimedia/private/qcameradevice_p.h>
QT_BEGIN_NAMESPACE
+static thread_local bool inCustomCameraConstruction = false;
+static thread_local QGstElement pendingCameraElement{};
+
+QGStreamerPlatformSpecificInterfaceImplementation::
+ ~QGStreamerPlatformSpecificInterfaceImplementation() = default;
+
+QAudioDevice QGStreamerPlatformSpecificInterfaceImplementation::makeCustomGStreamerAudioInput(
+ const QByteArray &gstreamerPipeline)
+{
+ return qMakeCustomGStreamerAudioInput(gstreamerPipeline);
+}
+
+QAudioDevice QGStreamerPlatformSpecificInterfaceImplementation::makeCustomGStreamerAudioOutput(
+ const QByteArray &gstreamerPipeline)
+{
+ return qMakeCustomGStreamerAudioOutput(gstreamerPipeline);
+}
+
+QCamera *QGStreamerPlatformSpecificInterfaceImplementation::makeCustomGStreamerCamera(
+ const QByteArray &gstreamerPipeline, QObject *parent)
+{
+ QCameraDevicePrivate *info = new QCameraDevicePrivate;
+ info->id = gstreamerPipeline;
+ QCameraDevice device = info->create();
+
+ inCustomCameraConstruction = true;
+ auto guard = qScopeGuard([] {
+ inCustomCameraConstruction = false;
+ });
+
+ return new QCamera(device, parent);
+}
+
+QCamera *
+QGStreamerPlatformSpecificInterfaceImplementation::makeCustomGStreamerCamera(GstElement *element,
+ QObject *parent)
+{
+ QCameraDevicePrivate *info = new QCameraDevicePrivate;
+ info->id = "Custom Camera from GstElement";
+ QCameraDevice device = info->create();
+
+ pendingCameraElement = QGstElement{
+ element,
+ QGstElement::NeedsRef,
+ };
+
+ inCustomCameraConstruction = true;
+ auto guard = qScopeGuard([] {
+ inCustomCameraConstruction = false;
+ Q_ASSERT(!pendingCameraElement);
+ });
+
+ return new QCamera(device, parent);
+}
+
+GstPipeline *QGStreamerPlatformSpecificInterfaceImplementation::gstPipeline(QMediaPlayer *player)
+{
+ auto *priv = reinterpret_cast<QMediaPlayerPrivate *>(QMediaPlayerPrivate::get(player));
+ if (!priv)
+ return nullptr;
+
+ QGstreamerMediaPlayer *gstreamerPlayer = dynamic_cast<QGstreamerMediaPlayer *>(priv->control);
+ return gstreamerPlayer ? gstreamerPlayer->pipeline().pipeline() : nullptr;
+}
+
+GstPipeline *
+QGStreamerPlatformSpecificInterfaceImplementation::gstPipeline(QMediaCaptureSession *session)
+{
+ auto *priv = QMediaCaptureSessionPrivate::get(session);
+ if (!priv)
+ return nullptr;
+
+ QGstreamerMediaCapture *gstreamerCapture =
+ dynamic_cast<QGstreamerMediaCapture *>(priv->captureSession.get());
+ return gstreamerCapture ? gstreamerCapture->pipeline().pipeline() : nullptr;
+}
+
Q_LOGGING_CATEGORY(lcGstreamer, "qt.multimedia.gstreamer")
+namespace {
+
+void rankDownPlugin(GstRegistry *reg, const char *name)
+{
+ QGstPluginFeatureHandle pluginFeature{
+ gst_registry_lookup_feature(reg, name),
+ QGstPluginFeatureHandle::HasRef,
+ };
+ if (pluginFeature)
+ gst_plugin_feature_set_rank(pluginFeature.get(), GST_RANK_PRIMARY - 1);
+}
+
+// https://gstreamer.freedesktop.org/documentation/vaapi/index.html
+constexpr auto vaapiPluginNames = {
+ "vaapidecodebin", "vaapih264dec", "vaapih264enc", "vaapih265dec",
+ "vaapijpegdec", "vaapijpegenc", "vaapimpeg2dec", "vaapipostproc",
+ "vaapisink", "vaapivp8dec", "vaapivp9dec",
+};
+
+// https://gstreamer.freedesktop.org/documentation/va/index.html
+constexpr auto vaPluginNames = {
+ "vaav1dec", "vacompositor", "vadeinterlace", "vah264dec", "vah264enc", "vah265dec",
+ "vajpegdec", "vampeg2dec", "vapostproc", "vavp8dec", "vavp9dec",
+};
+
+// https://gstreamer.freedesktop.org/documentation/nvcodec/index.html
+constexpr auto nvcodecPluginNames = {
+ "cudaconvert", "cudaconvertscale", "cudadownload", "cudaipcsink", "cudaipcsrc",
+ "cudascale", "cudaupload", "nvautogpuh264enc", "nvautogpuh265enc", "nvav1dec",
+ "nvcudah264enc", "nvcudah265enc", "nvd3d11h264enc", "nvd3d11h265enc", "nvh264dec",
+ "nvh264enc", "nvh265dec", "nvh265enc", "nvjpegdec", "nvjpegenc",
+ "nvmpeg2videodec", "nvmpeg4videodec", "nvmpegvideodec", "nvvp8dec", "nvvp9dec",
+};
+
+} // namespace
+
QGstreamerIntegration::QGstreamerIntegration()
: QPlatformMediaIntegration(QLatin1String("gstreamer"))
{
gst_init(nullptr, nullptr);
qCDebug(lcGstreamer) << "Using gstreamer version: " << gst_version_string();
+ GstRegistry *reg = gst_registry_get();
+
if constexpr (!GST_CHECK_VERSION(1, 22, 0)) {
GstRegistry* reg = gst_registry_get();
- const auto pluginNames = {
- "vaapidecodebin",
- "vaapih264dec",
- "vaapih264enc",
- "vaapih265dec",
- "vaapijpegdec",
- "vaapijpegenc",
- "vaapimpeg2dec",
- "vaapipostproc",
- "vaapisink",
- "vaapivp8dec",
- "vaapivp9dec"
- };
-
- for (auto name : pluginNames) {
- QGstPluginFeatureHandle pluginFeature {
- gst_registry_lookup_feature(reg, name),
- QGstPluginFeatureHandle::HasRef,
- };
- if (pluginFeature) {
- gst_plugin_feature_set_rank(pluginFeature.get(), GST_RANK_PRIMARY - 1);
- }
- }
+ for (const char *name : vaapiPluginNames)
+ rankDownPlugin(reg, name);
+ }
+
+ if (qEnvironmentVariableIsSet("QT_GSTREAMER_DISABLE_VA")) {
+ for (const char *name : vaPluginNames)
+ rankDownPlugin(reg, name);
+ }
+
+ if (qEnvironmentVariableIsSet("QT_GSTREAMER_DISABLE_NVCODEC")) {
+ for (const char *name : nvcodecPluginNames)
+ rankDownPlugin(reg, name);
}
}
@@ -86,6 +194,12 @@ QMaybe<QPlatformMediaPlayer *> QGstreamerIntegration::createPlayer(QMediaPlayer
QMaybe<QPlatformCamera *> QGstreamerIntegration::createCamera(QCamera *camera)
{
+ if (inCustomCameraConstruction) {
+ QGstElement element = std::exchange(pendingCameraElement, {});
+ return element ? new QGstreamerCustomCamera{ camera, std::move(element) }
+ : new QGstreamerCustomCamera{ camera };
+ }
+
return QGstreamerCamera::create(camera);
}
@@ -120,4 +234,9 @@ GstDevice *QGstreamerIntegration::videoDevice(const QByteArray &id)
return devices ? static_cast<QGstreamerVideoDevices *>(devices)->videoDevice(id) : nullptr;
}
+QAbstractPlatformSpecificInterface *QGstreamerIntegration::platformSpecificInterface()
+{
+ return &m_platformSpecificImplementation;
+}
+
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/qgstreamerintegration_p.h b/src/plugins/multimedia/gstreamer/qgstreamerintegration_p.h
index 9cb84c57b..229bbd48e 100644
--- a/src/plugins/multimedia/gstreamer/qgstreamerintegration_p.h
+++ b/src/plugins/multimedia/gstreamer/qgstreamerintegration_p.h
@@ -15,13 +15,31 @@
// We mean it.
//
-#include <private/qplatformmediaintegration_p.h>
+#include <QtMultimedia/private/qplatformmediaintegration_p.h>
+#include <QtMultimedia/private/qgstreamer_platformspecificinterface_p.h>
+
#include <gst/gst.h>
QT_BEGIN_NAMESPACE
class QGstreamerFormatInfo;
+class QGStreamerPlatformSpecificInterfaceImplementation : public QGStreamerPlatformSpecificInterface
+{
+public:
+ ~QGStreamerPlatformSpecificInterfaceImplementation() override;
+
+ QAudioDevice makeCustomGStreamerAudioInput(const QByteArray &gstreamerPipeline) override;
+ QAudioDevice makeCustomGStreamerAudioOutput(const QByteArray &gstreamerPipeline) override;
+ QCamera *makeCustomGStreamerCamera(const QByteArray &gstreamerPipeline,
+ QObject *parent) override;
+
+ QCamera *makeCustomGStreamerCamera(GstElement *, QObject *parent) override;
+
+ GstPipeline *gstPipeline(QMediaPlayer *) override;
+ GstPipeline *gstPipeline(QMediaCaptureSession *) override;
+};
+
class QGstreamerIntegration : public QPlatformMediaIntegration
{
public:
@@ -47,9 +65,13 @@ public:
const QGstreamerFormatInfo *gstFormatsInfo();
GstDevice *videoDevice(const QByteArray &id);
+ QAbstractPlatformSpecificInterface *platformSpecificInterface() override;
+
protected:
QPlatformMediaFormatInfo *createFormatInfo() override;
QPlatformVideoDevices *createVideoDevices() override;
+
+ QGStreamerPlatformSpecificInterfaceImplementation m_platformSpecificImplementation;
};
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/qgstreamervideodevices.cpp b/src/plugins/multimedia/gstreamer/qgstreamervideodevices.cpp
index cceaca621..78ac16eb4 100644
--- a/src/plugins/multimedia/gstreamer/qgstreamervideodevices.cpp
+++ b/src/plugins/multimedia/gstreamer/qgstreamervideodevices.cpp
@@ -76,11 +76,13 @@ QList<QCameraDevice> QGstreamerVideoDevices::videoDevices() const
info->description = desc.toQString();
info->id = device.id;
- if (QGstStructure properties = gst_device_get_properties(device.gstDevice.get());
- !properties.isNull()) {
- auto def = properties["is-default"].toBool();
+ QUniqueGstStructureHandle properties{
+ gst_device_get_properties(device.gstDevice.get()),
+ };
+ if (properties) {
+ QGstStructureView view{ properties };
+ auto def = view["is-default"].toBool();
info->isDefault = def && *def;
- properties.free();
}
if (info->isDefault)
diff --git a/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer.cpp b/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer.cpp
index 8ad0894f7..6595c5d42 100644
--- a/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer.cpp
+++ b/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer.cpp
@@ -105,7 +105,7 @@ static QAbstractVideoBuffer::MapData mapData(const camera_frame_nv12_t &frame,
{
return {
- .nPlanes = 2,
+ .planeCount = 2,
.bytesPerLine = {
toInt(frame.stride),
toInt(frame.uv_stride)
@@ -114,7 +114,7 @@ static QAbstractVideoBuffer::MapData mapData(const camera_frame_nv12_t &frame,
baseAddress,
baseAddress + frame.uv_offset
},
- .size = {
+ .dataSize = {
toInt(frame.stride * frame.height),
toInt(frame.uv_stride * frame.height / 2)
}
@@ -125,14 +125,14 @@ static QAbstractVideoBuffer::MapData mapData(const camera_frame_rgb8888_t &frame
unsigned char *baseAddress)
{
return {
- .nPlanes = 1,
+ .planeCount = 1,
.bytesPerLine = {
toInt(frame.stride)
},
.data = {
baseAddress
},
- .size = {
+ .dataSize = {
toInt(frame.stride * frame.height),
}
};
@@ -142,14 +142,14 @@ static QAbstractVideoBuffer::MapData mapData(const camera_frame_gray8_t &frame,
unsigned char *baseAddress)
{
return {
- .nPlanes = 1,
+ .planeCount = 1,
.bytesPerLine = {
toInt(frame.stride)
},
.data = {
baseAddress
},
- .size = {
+ .dataSize = {
toInt(frame.stride * frame.height)
}
};
@@ -159,14 +159,14 @@ static QAbstractVideoBuffer::MapData mapData(const camera_frame_cbycry_t &frame,
unsigned char *baseAddress)
{
return {
- .nPlanes = 1,
+ .planeCount = 1,
.bytesPerLine = {
toInt(frame.stride)
},
.data = {
baseAddress
},
- .size = {
+ .dataSize = {
toInt(frame.bufsize),
}
};
@@ -176,7 +176,7 @@ static QAbstractVideoBuffer::MapData mapData(const camera_frame_ycbcr420p_t &fra
unsigned char *baseAddress)
{
return {
- .nPlanes = 3,
+ .planeCount = 3,
.bytesPerLine = {
toInt(frame.y_stride),
frame.cb_stride,
@@ -187,7 +187,7 @@ static QAbstractVideoBuffer::MapData mapData(const camera_frame_ycbcr420p_t &fra
baseAddress + frame.cb_offset,
baseAddress + frame.cr_offset,
},
- .size = {
+ .dataSize = {
toInt(frame.y_stride * frame.height),
toInt(frame.cb_stride * frame.height / 2),
toInt(frame.cr_stride * frame.height / 2)
@@ -199,14 +199,14 @@ static QAbstractVideoBuffer::MapData mapData(const camera_frame_ycbycr_t &frame,
unsigned char *baseAddress)
{
return {
- .nPlanes = 1,
+ .planeCount = 1,
.bytesPerLine = {
toInt(frame.stride)
},
.data = {
baseAddress
},
- .size = {
+ .dataSize = {
toInt(frame.stride * frame.height)
}
};
@@ -260,10 +260,10 @@ static constexpr QSize frameSize(const camera_buffer_t *buffer)
QT_BEGIN_NAMESPACE
QQnxCameraFrameBuffer::QQnxCameraFrameBuffer(const camera_buffer_t *buffer, QRhi *rhi)
- : QAbstractVideoBuffer(rhi ? QVideoFrame::RhiTextureHandle : QVideoFrame::NoHandle, rhi)
- , m_rhi(rhi)
- , m_pixelFormat(::frameTypeToPixelFormat(buffer->frametype))
- , m_dataSize(::bufferDataSize(buffer))
+ : QHwVideoBuffer(rhi ? QVideoFrame::RhiTextureHandle : QVideoFrame::NoHandle, rhi),
+ m_rhi(rhi),
+ m_pixelFormat(::frameTypeToPixelFormat(buffer->frametype)),
+ m_dataSize(::bufferDataSize(buffer))
{
if (m_dataSize <= 0)
return;
@@ -277,12 +277,7 @@ QQnxCameraFrameBuffer::QQnxCameraFrameBuffer(const camera_buffer_t *buffer, QRhi
m_frameSize = ::frameSize(buffer);
}
-QVideoFrame::MapMode QQnxCameraFrameBuffer::mapMode() const
-{
- return QVideoFrame::ReadOnly;
-}
-
-QAbstractVideoBuffer::MapData QQnxCameraFrameBuffer::map(QVideoFrame::MapMode)
+QAbstractVideoBuffer::MapData QQnxCameraFrameBuffer::map(QtVideo::MapMode)
{
return m_mapData;
}
diff --git a/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer_p.h b/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer_p.h
index 9fed113a6..20f724552 100644
--- a/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer_p.h
+++ b/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer_p.h
@@ -14,7 +14,7 @@
// We mean it.
//
-#include <private/qabstractvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
#include <QtCore/qsize.h>
@@ -26,7 +26,7 @@ QT_BEGIN_NAMESPACE
class QRhi;
-class QQnxCameraFrameBuffer : public QAbstractVideoBuffer
+class QQnxCameraFrameBuffer : public QHwVideoBuffer
{
public:
explicit QQnxCameraFrameBuffer(const camera_buffer_t *buffer, QRhi *rhi = nullptr);
@@ -34,8 +34,7 @@ public:
QQnxCameraFrameBuffer(const QQnxCameraFrameBuffer&) = delete;
QQnxCameraFrameBuffer& operator=(const QQnxCameraFrameBuffer&) = delete;
- QVideoFrame::MapMode mapMode() const override;
- MapData map(QVideoFrame::MapMode mode) override;
+ MapData map(QtVideo::MapMode mode) override;
void unmap() override;
QVideoFrameFormat::PixelFormat pixelFormat() const;
diff --git a/src/plugins/multimedia/qnx/camera/qqnxplatformcamera.cpp b/src/plugins/multimedia/qnx/camera/qqnxplatformcamera.cpp
index fdf69618e..b604f4561 100644
--- a/src/plugins/multimedia/qnx/camera/qqnxplatformcamera.cpp
+++ b/src/plugins/multimedia/qnx/camera/qqnxplatformcamera.cpp
@@ -12,6 +12,7 @@
#include <qmediadevices.h>
#include <private/qmediastoragelocation_p.h>
+#include <private/qvideoframe_p.h>
#include <camera/camera_api.h>
#include <camera/camera_3a.h>
@@ -402,16 +403,14 @@ void QQnxPlatformCamera::onFrameAvailable()
if (!m_videoSink)
return;
- std::unique_ptr<QQnxCameraFrameBuffer> currentFrame = m_qnxCamera->takeCurrentFrame();
+ std::unique_ptr<QQnxCameraFrameBuffer> currentFrameBuffer = m_qnxCamera->takeCurrentFrame();
- if (!currentFrame)
+ if (!currentFrameBuffer)
return;
- const QVideoFrame actualFrame(currentFrame.get(),
- QVideoFrameFormat(currentFrame->size(), currentFrame->pixelFormat()));
-
- currentFrame.release(); // QVideoFrame has taken ownership of the internal
- // buffer
+ QVideoFrameFormat format(currentFrameBuffer->size(), currentFrameBuffer->pixelFormat());
+ const QVideoFrame actualFrame =
+ QVideoFramePrivate::createFrame(std::move(currentFrameBuffer), std::move(format));
m_videoSink->setVideoFrame(actualFrame);
diff --git a/src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer.cpp b/src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer.cpp
index 1c28c3b14..14b190836 100644
--- a/src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer.cpp
+++ b/src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer.cpp
@@ -7,7 +7,8 @@
#include "qqnxmediaeventthread_p.h"
#include "qqnxwindowgrabber_p.h"
-#include <private/qabstractvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
#include <QtCore/qabstracteventdispatcher.h>
#include <QtCore/qcoreapplication.h>
@@ -62,24 +63,19 @@ static std::tuple<int, int, bool> parseBufferLevel(const QString &value)
return { level, capacity, true };
}
-class QnxTextureBuffer : public QAbstractVideoBuffer
+class QnxTextureBuffer : public QHwVideoBuffer
{
public:
QnxTextureBuffer(QQnxWindowGrabber *QQnxWindowGrabber)
- : QAbstractVideoBuffer(QVideoFrame::RhiTextureHandle)
+ : QHwVideoBuffer(QVideoFrame::RhiTextureHandle)
{
m_windowGrabber = QQnxWindowGrabber;
m_handle = 0;
}
- QVideoFrame::MapMode mapMode() const override
- {
- return QVideoFrame::ReadWrite;
- }
-
void unmap() override {}
- MapData map(QVideoFrame::MapMode /*mode*/) override
+ MapData map(QtVideo::MapMode /*mode*/) override
{
return {};
}
@@ -102,19 +98,13 @@ private:
class QnxRasterBuffer : public QAbstractVideoBuffer
{
public:
- QnxRasterBuffer(QQnxWindowGrabber *windowGrabber)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle)
- {
- m_windowGrabber = windowGrabber;
- }
+ QnxRasterBuffer(QQnxWindowGrabber *windowGrabber) { m_windowGrabber = windowGrabber; }
- QVideoFrame::MapMode mapMode() const override
+ MapData map(QtVideo::MapMode mode) override
{
- return QVideoFrame::ReadOnly;
- }
+ if (mode != QtVideo::MapMode::ReadOnly)
+ return {};
- MapData map(QVideoFrame::MapMode /*mode*/) override
- {
if (buffer.data) {
qWarning("QnxRasterBuffer: need to unmap before mapping");
return {};
@@ -123,10 +113,10 @@ public:
buffer = m_windowGrabber->getNextBuffer();
return {
- .nPlanes = 1,
+ .planeCount = 1,
.bytesPerLine = { buffer.stride },
.data = { buffer.data },
- .size = { buffer.width * buffer.height * buffer.pixelSize }
+ .dataSize = { buffer.width * buffer.height * buffer.pixelSize }
};
}
@@ -135,6 +125,8 @@ public:
buffer = {};
}
+ QVideoFrameFormat format() const override { return {}; }
+
private:
QQnxWindowGrabber *m_windowGrabber;
QQnxWindowGrabber::BufferView buffer;
@@ -517,12 +509,13 @@ void QQnxMediaPlayer::updateScene(const QSize &size)
if (!m_platformVideoSink)
return;
- auto *buffer = m_windowGrabber->isEglImageSupported()
- ? static_cast<QAbstractVideoBuffer*>(new QnxTextureBuffer(m_windowGrabber))
- : static_cast<QAbstractVideoBuffer*>(new QnxRasterBuffer(m_windowGrabber));
+ QVideoFrameFormat format(size, QVideoFrameFormat::Format_BGRX8888);
- const QVideoFrame actualFrame(buffer,
- QVideoFrameFormat(size, QVideoFrameFormat::Format_BGRX8888));
+ const QVideoFrame actualFrame = m_windowGrabber->isEglImageSupported()
+ ? QVideoFramePrivate::createFrame(std::make_unique<QnxTextureBuffer>(m_windowGrabber),
+ std::move(format))
+ : QVideoFramePrivate::createFrame(std::make_unique<QnxRasterBuffer>(m_windowGrabber),
+ std::move(format));
m_platformVideoSink->setVideoFrame(actualFrame);
}
diff --git a/src/plugins/multimedia/wasm/common/qwasmvideooutput.cpp b/src/plugins/multimedia/wasm/common/qwasmvideooutput.cpp
index 74c14959c..84d325635 100644
--- a/src/plugins/multimedia/wasm/common/qwasmvideooutput.cpp
+++ b/src/plugins/multimedia/wasm/common/qwasmvideooutput.cpp
@@ -13,10 +13,10 @@
#include "qwasmvideooutput_p.h"
#include <qvideosink.h>
-#include <private/qabstractvideobuffer_p.h>
#include <private/qplatformvideosink_p.h>
#include <private/qmemoryvideobuffer_p.h>
#include <private/qvideotexturehelper_p.h>
+#include <private/qvideoframe_p.h>
#include <private/qstdweb_p.h>
#include <QTimer>
@@ -58,10 +58,9 @@ static bool checkForVideoFrame()
return (!videoFrame.isNull() && !videoFrame.isUndefined());
}
-Q_GLOBAL_STATIC_WITH_ARGS(bool, m_hasVideoFrame, (checkForVideoFrame()))
-
QWasmVideoOutput::QWasmVideoOutput(QObject *parent) : QObject{ parent }
{
+ m_hasVideoFrame = checkForVideoFrame();
}
void QWasmVideoOutput::setVideoSize(const QSize &newSize)
@@ -862,9 +861,10 @@ void QWasmVideoOutput::videoComputeFrame(void *context)
auto *textureDescription = QVideoTextureHelper::textureDescription(frameFormat.pixelFormat());
- QVideoFrame vFrame(
- new QMemoryVideoBuffer(frameBytes,
- textureDescription->strideForWidth(frameFormat.frameWidth())),
+ QVideoFrame vFrame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(
+ std::move(frameBytes),
+ textureDescription->strideForWidth(frameFormat.frameWidth())),
frameFormat);
QWasmVideoOutput *wasmVideoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
@@ -924,10 +924,12 @@ void QWasmVideoOutput::videoFrameCallback(emscripten::val now, emscripten::val m
auto *textureDescription = QVideoTextureHelper::textureDescription(frameFormat.pixelFormat());
- QVideoFrame vFrame(
- new QMemoryVideoBuffer(frameBytes,
- textureDescription->strideForWidth(frameFormat.frameWidth())),
- frameFormat);
+ auto buffer = std::make_unique<QMemoryVideoBuffer>(
+ std::move(frameBytes),
+ textureDescription->strideForWidth(frameFormat.frameWidth()));
+
+ QVideoFrame vFrame =
+ QVideoFramePrivate::createFrame(std::move(buffer), std::move(frameFormat));
if (!wasmVideoOutput) {
qCDebug(qWasmMediaVideoOutput) << "ERROR:"
diff --git a/src/plugins/multimedia/wasm/common/qwasmvideooutput_p.h b/src/plugins/multimedia/wasm/common/qwasmvideooutput_p.h
index dc4a762bf..f078ffb44 100644
--- a/src/plugins/multimedia/wasm/common/qwasmvideooutput_p.h
+++ b/src/plugins/multimedia/wasm/common/qwasmvideooutput_p.h
@@ -81,6 +81,7 @@ public:
emscripten::val getDeviceCapabilities();
bool setDeviceSetting(const std::string &key, emscripten::val value);
bool isCameraReady() { return m_cameraIsReady; }
+ bool m_hasVideoFrame = false;
static void videoFrameCallback(emscripten::val now, emscripten::val metadata);
void videoFrameTimerCallback();
diff --git a/src/plugins/multimedia/wasm/mediacapture/qwasmcamera.cpp b/src/plugins/multimedia/wasm/mediacapture/qwasmcamera.cpp
index 9bd63b081..fbc5cf262 100644
--- a/src/plugins/multimedia/wasm/mediacapture/qwasmcamera.cpp
+++ b/src/plugins/multimedia/wasm/mediacapture/qwasmcamera.cpp
@@ -4,7 +4,6 @@
#include "qwasmcamera_p.h"
#include "qmediadevices.h"
#include <qcameradevice.h>
-#include "private/qabstractvideobuffer_p.h"
#include "private/qplatformvideosink_p.h"
#include <private/qmemoryvideobuffer_p.h>
#include <private/qvideotexturehelper_p.h>
@@ -64,7 +63,7 @@ void QWasmCamera::setActive(bool active)
{
if (!m_CaptureSession) {
- emit error(QCamera::CameraError, QStringLiteral("video surface error"));
+ updateError(QCamera::CameraError, QStringLiteral("video surface error"));
m_shouldBeActive = true;
return;
}
@@ -120,7 +119,7 @@ void QWasmCamera::setCamera(const QCameraDevice &camera)
createCamera(m_cameraDev);
emit cameraIsReady();
} else {
- emit error(QCamera::CameraError, QStringLiteral("Failed to find a camera"));
+ updateError(QCamera::CameraError, QStringLiteral("Failed to find a camera"));
}
}
diff --git a/src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder.cpp b/src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder.cpp
index 98d0d860b..98f04616a 100644
--- a/src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder.cpp
+++ b/src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder.cpp
@@ -285,8 +285,8 @@ void QWasmMediaRecorder::setStream(emscripten::val stream)
theError["target"]["data-mediarecordercontext"].as<quintptr>());
if (recorder) {
- recorder->error(QMediaRecorder::ResourceError,
- QString::fromStdString(theError["message"].as<std::string>()));
+ recorder->updateError(QMediaRecorder::ResourceError,
+ QString::fromStdString(theError["message"].as<std::string>()));
emit recorder->stateChanged(recorder->state());
}
};
@@ -381,12 +381,12 @@ void QWasmMediaRecorder::audioDataAvailable(emscripten::val blob, double timeCod
auto fileReader = std::make_shared<qstdweb::FileReader>();
fileReader->onError([=](emscripten::val theError) {
- error(QMediaRecorder::ResourceError,
- QString::fromStdString(theError["message"].as<std::string>()));
+ updateError(QMediaRecorder::ResourceError,
+ QString::fromStdString(theError["message"].as<std::string>()));
});
fileReader->onAbort([=](emscripten::val) {
- error(QMediaRecorder::ResourceError, QStringLiteral("File read aborted"));
+ updateError(QMediaRecorder::ResourceError, QStringLiteral("File read aborted"));
});
fileReader->onLoad([=](emscripten::val) {
@@ -473,7 +473,8 @@ void QWasmMediaRecorder::setTrackContraints(QMediaEncoderSettings &settings, ems
qCDebug(qWasmMediaRecorder)
<< theError["code"].as<int>()
<< QString::fromStdString(theError["message"].as<std::string>());
- error(QMediaRecorder::ResourceError, QString::fromStdString(theError["message"].as<std::string>()));
+ updateError(QMediaRecorder::ResourceError,
+ QString::fromStdString(theError["message"].as<std::string>()));
} },
constraints);
}
diff --git a/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp b/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp
index cc14cd419..517f1d969 100644
--- a/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp
+++ b/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp
@@ -5,7 +5,8 @@
#include "evrhelpers_p.h"
-#include <private/qabstractvideobuffer_p.h>
+#include <private/qhwvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
#include <qvideoframe.h>
#include <QDebug>
#include <qthread.h>
@@ -24,31 +25,29 @@
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLcEvrD3DPresentEngine, "qt.multimedia.evrd3dpresentengine")
+static Q_LOGGING_CATEGORY(qLcEvrD3DPresentEngine, "qt.multimedia.evrd3dpresentengine");
-class IMFSampleVideoBuffer: public QAbstractVideoBuffer
+class IMFSampleVideoBuffer : public QHwVideoBuffer
{
public:
- IMFSampleVideoBuffer(ComPtr<IDirect3DDevice9Ex> device,
- const ComPtr<IMFSample> &sample, QRhi *rhi, QVideoFrame::HandleType type = QVideoFrame::NoHandle)
- : QAbstractVideoBuffer(type, rhi)
- , m_device(device)
- , m_sample(sample)
- , m_mapMode(QVideoFrame::NotMapped)
+ IMFSampleVideoBuffer(ComPtr<IDirect3DDevice9Ex> device, const ComPtr<IMFSample> &sample,
+ QRhi *rhi, QVideoFrame::HandleType type = QVideoFrame::NoHandle)
+ : QHwVideoBuffer(type, rhi),
+ m_device(device),
+ m_sample(sample),
+ m_mapMode(QtVideo::MapMode::NotMapped)
{
}
~IMFSampleVideoBuffer() override
{
- if (m_memSurface && m_mapMode != QVideoFrame::NotMapped)
+ if (m_memSurface && m_mapMode != QtVideo::MapMode::NotMapped)
m_memSurface->UnlockRect();
}
- QVideoFrame::MapMode mapMode() const override { return m_mapMode; }
-
- MapData map(QVideoFrame::MapMode mode) override
+ MapData map(QtVideo::MapMode mode) override
{
- if (!m_sample || m_mapMode != QVideoFrame::NotMapped || mode != QVideoFrame::ReadOnly)
+ if (!m_sample || m_mapMode != QtVideo::MapMode::NotMapped || mode != QtVideo::MapMode::ReadOnly)
return {};
D3DSURFACE_DESC desc;
@@ -80,25 +79,25 @@ public:
}
D3DLOCKED_RECT rect;
- if (FAILED(m_memSurface->LockRect(&rect, NULL, mode == QVideoFrame::ReadOnly ? D3DLOCK_READONLY : 0)))
+ if (FAILED(m_memSurface->LockRect(&rect, NULL, mode == QtVideo::MapMode::ReadOnly ? D3DLOCK_READONLY : 0)))
return {};
m_mapMode = mode;
MapData mapData;
- mapData.nPlanes = 1;
+ mapData.planeCount = 1;
mapData.bytesPerLine[0] = (int)rect.Pitch;
mapData.data[0] = reinterpret_cast<uchar *>(rect.pBits);
- mapData.size[0] = (int)(rect.Pitch * desc.Height);
+ mapData.dataSize[0] = (int)(rect.Pitch * desc.Height);
return mapData;
}
void unmap() override
{
- if (m_mapMode == QVideoFrame::NotMapped)
+ if (m_mapMode == QtVideo::MapMode::NotMapped)
return;
- m_mapMode = QVideoFrame::NotMapped;
+ m_mapMode = QtVideo::MapMode::NotMapped;
if (m_memSurface)
m_memSurface->UnlockRect();
}
@@ -109,7 +108,7 @@ protected:
private:
ComPtr<IDirect3DSurface9> m_memSurface;
- QVideoFrame::MapMode m_mapMode;
+ QtVideo::MapMode m_mapMode;
};
class QVideoFrameD3D11Textures: public QVideoFrameTextures
@@ -665,22 +664,23 @@ QVideoFrame D3DPresentEngine::makeVideoFrame(const ComPtr<IMFSample> &sample)
if (p.first == sample.Get())
sharedHandle = p.second;
- QAbstractVideoBuffer *vb = nullptr;
+ std::unique_ptr<IMFSampleVideoBuffer> vb;
QRhi *rhi = m_sink ? m_sink->rhi() : nullptr;
if (m_useTextureRendering && sharedHandle && rhi) {
if (rhi->backend() == QRhi::D3D11) {
- vb = new D3D11TextureVideoBuffer(m_device, sample, sharedHandle, rhi);
+ vb = std::make_unique<D3D11TextureVideoBuffer>(m_device, sample, sharedHandle, rhi);
#if QT_CONFIG(opengl)
} else if (rhi->backend() == QRhi::OpenGLES2) {
- vb = new OpenGlVideoBuffer(m_device, sample, m_wglNvDxInterop, sharedHandle, rhi);
+ vb = std::make_unique<OpenGlVideoBuffer>(m_device, sample, m_wglNvDxInterop,
+ sharedHandle, rhi);
#endif
}
}
if (!vb)
- vb = new IMFSampleVideoBuffer(m_device, sample, rhi);
+ vb = std::make_unique<IMFSampleVideoBuffer>(m_device, sample, rhi);
- QVideoFrame frame(vb, m_surfaceFormat);
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(vb), m_surfaceFormat);
// WMF uses 100-nanosecond units, Qt uses microseconds
LONGLONG startTime = 0;
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp
index 2bdc4ea7d..e99b95ad2 100644
--- a/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp
@@ -8,6 +8,7 @@
#include <qmediadevices.h>
#include <qaudiodevice.h>
#include <private/qmemoryvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
#include <private/qwindowsmfdefs_p.h>
#include <private/qcomptr_p.h>
#include <QtCore/qdebug.h>
@@ -955,9 +956,11 @@ STDMETHODIMP QWindowsMediaDeviceReader::OnReadSample(HRESULT hrStatus, DWORD dwS
if (SUCCEEDED(mediaBuffer->Lock(&buffer, nullptr, &bufLen))) {
auto bytes = QByteArray(reinterpret_cast<char*>(buffer), bufLen);
+ QVideoFrameFormat format(QSize(m_frameWidth, m_frameHeight), m_pixelFormat);
- QVideoFrame frame(new QMemoryVideoBuffer(bytes, m_stride),
- QVideoFrameFormat(QSize(m_frameWidth, m_frameHeight), m_pixelFormat));
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(std::move(bytes), m_stride),
+ std::move(format));
// WMF uses 100-nanosecond units, Qt uses microseconds
frame.setStartTime(llTimestamp * 0.1);
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp b/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp
index 1d901c036..512110af6 100644
--- a/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp
@@ -52,8 +52,8 @@ void QWindowsMediaEncoder::record(QMediaEncoderSettings &settings)
m_mediaDeviceSession->setActive(true);
if (!m_mediaDeviceSession->isActivating()) {
- error(QMediaRecorder::ResourceError,
- QMediaRecorderPrivate::msgFailedStartRecording());
+ updateError(QMediaRecorder::ResourceError,
+ QMediaRecorderPrivate::msgFailedStartRecording());
return;
}
}
@@ -72,7 +72,7 @@ void QWindowsMediaEncoder::record(QMediaEncoderSettings &settings)
stateChanged(m_state);
} else {
- error(ec, QMediaRecorderPrivate::msgFailedStartRecording());
+ updateError(ec, QMediaRecorderPrivate::msgFailedStartRecording());
}
}
@@ -85,7 +85,7 @@ void QWindowsMediaEncoder::pause()
m_state = QMediaRecorder::PausedState;
stateChanged(m_state);
} else {
- error(QMediaRecorder::FormatError, tr("Failed to pause recording"));
+ updateError(QMediaRecorder::FormatError, tr("Failed to pause recording"));
}
}
@@ -98,7 +98,7 @@ void QWindowsMediaEncoder::resume()
m_state = QMediaRecorder::RecordingState;
stateChanged(m_state);
} else {
- error(QMediaRecorder::FormatError, tr("Failed to resume recording"));
+ updateError(QMediaRecorder::FormatError, tr("Failed to resume recording"));
}
}
@@ -178,11 +178,11 @@ void QWindowsMediaEncoder::onDurationChanged(qint64 duration)
void QWindowsMediaEncoder::onStreamingError(int errorCode)
{
if (errorCode == MF_E_VIDEO_RECORDING_DEVICE_INVALIDATED)
- error(QMediaRecorder::ResourceError, tr("Camera is no longer present"));
+ updateError(QMediaRecorder::ResourceError, tr("Camera is no longer present"));
else if (errorCode == MF_E_AUDIO_RECORDING_DEVICE_INVALIDATED)
- error(QMediaRecorder::ResourceError, tr("Audio input is no longer present"));
+ updateError(QMediaRecorder::ResourceError, tr("Audio input is no longer present"));
else
- error(QMediaRecorder::ResourceError, tr("Streaming error"));
+ updateError(QMediaRecorder::ResourceError, tr("Streaming error"));
if (m_state != QMediaRecorder::StoppedState) {
m_mediaDeviceSession->stopRecording();
@@ -194,7 +194,7 @@ void QWindowsMediaEncoder::onStreamingError(int errorCode)
void QWindowsMediaEncoder::onRecordingError(int errorCode)
{
Q_UNUSED(errorCode);
- error(QMediaRecorder::ResourceError, tr("Recording error"));
+ updateError(QMediaRecorder::ResourceError, tr("Recording error"));
auto lastState = m_state;
m_state = QMediaRecorder::StoppedState;
diff --git a/src/plugins/multimedia/windows/player/mfplayersession.cpp b/src/plugins/multimedia/windows/player/mfplayersession.cpp
index 16cba7266..996ce35d8 100644
--- a/src/plugins/multimedia/windows/player/mfplayersession.cpp
+++ b/src/plugins/multimedia/windows/player/mfplayersession.cpp
@@ -1708,21 +1708,21 @@ void MFPlayerSession::setActiveTrack(QPlatformMediaPlayer::TrackType type, int i
int MFPlayerSession::activeTrack(QPlatformMediaPlayer::TrackType type)
{
- if (type < 0 || type >= QPlatformMediaPlayer::NTrackTypes)
+ if (type >= QPlatformMediaPlayer::NTrackTypes)
return -1;
return m_trackInfo[type].currentIndex;
}
int MFPlayerSession::trackCount(QPlatformMediaPlayer::TrackType type)
{
- if (type < 0 || type >= QPlatformMediaPlayer::NTrackTypes)
+ if (type >= QPlatformMediaPlayer::NTrackTypes)
return -1;
return m_trackInfo[type].metaData.count();
}
QMediaMetaData MFPlayerSession::trackMetaData(QPlatformMediaPlayer::TrackType type, int trackNumber)
{
- if (type < 0 || type >= QPlatformMediaPlayer::NTrackTypes)
+ if (type >= QPlatformMediaPlayer::NTrackTypes)
return {};
if (trackNumber < 0 || trackNumber >= m_trackInfo[type].metaData.count())
diff --git a/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp b/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp
index 7ef983120..599dbb9e2 100644
--- a/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp
+++ b/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp
@@ -8,6 +8,7 @@
#include "qsgvivantevideomaterialshader.h"
#include "qsgvivantevideonode.h"
#include "private/qsgvideotexture_p.h"
+#include "private/qvideoframe_p.h"
#include <QOpenGLContext>
#include <QThread>
@@ -142,7 +143,7 @@ GLuint QSGVivanteVideoMaterial::vivanteMapping(QVideoFrame vF)
clearTextures();
}
- if (vF.map(QVideoFrame::ReadOnly)) {
+ if (vF.map(QtVideo::MapMode::ReadOnly)) {
if (mMappable) {
if (!mBitsToTextureMap.contains(vF.bits())) {
@@ -191,7 +192,7 @@ GLuint QSGVivanteVideoMaterial::vivanteMapping(QVideoFrame vF)
GLuint physical = ~0U;
#if GST_CHECK_VERSION(1,14,0)
- auto buffer = reinterpret_cast<QGstVideoBuffer *>(vF.buffer());
+ auto buffer = reinterpret_cast<QGstVideoBuffer *>(QVideoFramePrivate::buffer(vF));
auto mem = gst_buffer_peek_memory(buffer->buffer(), 0);
auto phys_addr = gst_is_phys_memory(mem) ? gst_phys_memory_get_phys_addr(mem) : 0;
if (phys_addr)
diff --git a/src/spatialaudio/CMakeLists.txt b/src/spatialaudio/CMakeLists.txt
index d0d005e1e..46120c40b 100644
--- a/src/spatialaudio/CMakeLists.txt
+++ b/src/spatialaudio/CMakeLists.txt
@@ -20,7 +20,6 @@ qt_internal_add_module(SpatialAudio
Qt::BundledResonanceAudio
PUBLIC_LIBRARIES
Qt::Multimedia
- GENERATE_CPP_EXPORTS
)
diff --git a/src/spatialaudioquick3d/CMakeLists.txt b/src/spatialaudioquick3d/CMakeLists.txt
index 64b154158..f781ef766 100644
--- a/src/spatialaudioquick3d/CMakeLists.txt
+++ b/src/spatialaudioquick3d/CMakeLists.txt
@@ -27,7 +27,6 @@ qt_internal_add_qml_module(Quick3DSpatialAudioPrivate
PUBLIC_LIBRARIES
Qt::Quick3DPrivate
Qt::SpatialAudio
- GENERATE_CPP_EXPORTS
)
target_sources(quick3dspatialaudio PRIVATE qquick3dspatialaudio_plugin.cpp)
diff --git a/tests/auto/integration/CMakeLists.txt b/tests/auto/integration/CMakeLists.txt
index cbacf0c1a..9be80db63 100644
--- a/tests/auto/integration/CMakeLists.txt
+++ b/tests/auto/integration/CMakeLists.txt
@@ -9,10 +9,12 @@ add_subdirectory(qaudiodevice)
add_subdirectory(qaudiosource)
add_subdirectory(qaudiosink)
add_subdirectory(qmediaplayerbackend)
+add_subdirectory(qmediaplayerformatsupport)
add_subdirectory(qsoundeffect)
add_subdirectory(qvideoframebackend)
add_subdirectory(backends)
add_subdirectory(multiapp)
+add_subdirectory(qmediaframeinputsbackend)
if(TARGET Qt::Widgets)
add_subdirectory(qmediacapturesession)
add_subdirectory(qcamerabackend)
diff --git a/tests/auto/integration/qaudiodecoderbackend/CMakeLists.txt b/tests/auto/integration/qaudiodecoderbackend/CMakeLists.txt
index 0133c7498..d2206182f 100644
--- a/tests/auto/integration/qaudiodecoderbackend/CMakeLists.txt
+++ b/tests/auto/integration/qaudiodecoderbackend/CMakeLists.txt
@@ -27,11 +27,3 @@ qt_internal_add_test(tst_qaudiodecoderbackend
Qt::MultimediaPrivate
TESTDATA ${test_data}
)
-
-## Scopes:
-#####################################################################
-
-qt_internal_extend_target(tst_qaudiodecoderbackend CONDITION boot2qt
- DEFINES
- WAV_SUPPORT_NOT_FORCED
-)
diff --git a/tests/auto/integration/qaudiodecoderbackend/tst_qaudiodecoderbackend.cpp b/tests/auto/integration/qaudiodecoderbackend/tst_qaudiodecoderbackend.cpp
index 33206372e..5a48b4457 100644
--- a/tests/auto/integration/qaudiodecoderbackend/tst_qaudiodecoderbackend.cpp
+++ b/tests/auto/integration/qaudiodecoderbackend/tst_qaudiodecoderbackend.cpp
@@ -14,6 +14,18 @@ constexpr char TEST_CORRUPTED_FILE_NAME[] = "testdata/test-corrupted.wav";
constexpr char TEST_INVALID_SOURCE[] = "invalid";
constexpr char TEST_NO_AUDIO_TRACK[] = "testdata/test-no-audio-track.mp4";
+constexpr int testFileSampleCount = 44094;
+constexpr int testFileSampleRate = 44100;
+
+constexpr std::chrono::microseconds testFileDuration = [] {
+ using namespace std::chrono;
+ using namespace std::chrono_literals;
+ auto duration = nanoseconds(1s) * testFileSampleCount / testFileSampleRate;
+ return round<microseconds>(duration);
+}();
+
+constexpr qint64 testFileDurationUs = qint64(testFileDuration.count());
+
QT_USE_NAMESPACE
/*
@@ -152,7 +164,7 @@ void tst_QAudioDecoderBackend::directBruteForceReading()
checkNoMoreChanges(decoder);
- QCOMPARE(sampleCount, 44094);
+ QCOMPARE(sampleCount, testFileSampleCount);
}
void tst_QAudioDecoderBackend::indirectReadingByBufferReadySignal()
@@ -167,7 +179,6 @@ void tst_QAudioDecoderBackend::indirectReadingByBufferReadySignal()
connect(&decoder, &QAudioDecoder::bufferReady, this, [&]() {
QVERIFY(decoder.bufferAvailable());
- QVERIFY(decoder.isDecoding());
auto buffer = decoder.read();
QVERIFY(buffer.isValid());
@@ -191,7 +202,7 @@ void tst_QAudioDecoderBackend::indirectReadingByBufferReadySignal()
checkNoMoreChanges(decoder);
- QCOMPARE(sampleCount, 44094);
+ QCOMPARE(sampleCount, testFileSampleCount);
QCOMPARE(finishSpy.size(), 1);
}
@@ -210,8 +221,6 @@ void tst_QAudioDecoderBackend::indirectReadingByBufferAvailableSignal() {
if (!available)
return;
- QVERIFY(decoder.isDecoding());
-
while (decoder.bufferAvailable()) {
auto buffer = decoder.read();
QVERIFY(buffer.isValid());
@@ -235,7 +244,7 @@ void tst_QAudioDecoderBackend::indirectReadingByBufferAvailableSignal() {
checkNoMoreChanges(decoder);
- QCOMPARE(sampleCount, 44094);
+ QCOMPARE(sampleCount, testFileSampleCount);
QCOMPARE(finishSpy.size(), 1);
}
@@ -268,6 +277,8 @@ void tst_QAudioDecoderBackend::stopOnBufferReady()
void tst_QAudioDecoderBackend::restartOnBufferReady()
{
+ QSKIP_GSTREAMER("QTBUG-124005: failures on gstreamer");
+
CHECK_SELECTED_URL(m_wavFile);
QAudioDecoder decoder;
@@ -303,7 +314,7 @@ void tst_QAudioDecoderBackend::restartOnBufferReady()
checkNoMoreChanges(decoder);
- QCOMPARE(sampleCount, 44094);
+ QCOMPARE(sampleCount, testFileSampleCount);
}
void tst_QAudioDecoderBackend::restartOnFinish()
@@ -344,7 +355,7 @@ void tst_QAudioDecoderBackend::restartOnFinish()
QVERIFY(!decoder.isDecoding());
checkNoMoreChanges(decoder);
- QCOMPARE(sampleCount, 44094);
+ QCOMPARE(sampleCount, testFileSampleCount);
}
void tst_QAudioDecoderBackend::fileTest()
@@ -371,13 +382,13 @@ void tst_QAudioDecoderBackend::fileTest()
QVERIFY(!d.bufferAvailable());
QCOMPARE(d.source(), *m_wavFile);
- QSignalSpy readySpy(&d, SIGNAL(bufferReady()));
- QSignalSpy bufferChangedSpy(&d, SIGNAL(bufferAvailableChanged(bool)));
+ QSignalSpy readySpy(&d, &QAudioDecoder::bufferReady);
+ QSignalSpy bufferChangedSpy(&d, &QAudioDecoder::bufferAvailableChanged);
QSignalSpy errorSpy(&d, SIGNAL(error(QAudioDecoder::Error)));
- QSignalSpy isDecodingSpy(&d, SIGNAL(isDecodingChanged(bool)));
- QSignalSpy durationSpy(&d, SIGNAL(durationChanged(qint64)));
- QSignalSpy finishedSpy(&d, SIGNAL(finished()));
- QSignalSpy positionSpy(&d, SIGNAL(positionChanged(qint64)));
+ QSignalSpy isDecodingSpy(&d, &QAudioDecoder::isDecodingChanged);
+ QSignalSpy durationSpy(&d, &QAudioDecoder::durationChanged);
+ QSignalSpy finishedSpy(&d, &QAudioDecoder::finished);
+ QSignalSpy positionSpy(&d, &QAudioDecoder::positionChanged);
d.start();
@@ -398,7 +409,7 @@ void tst_QAudioDecoderBackend::fileTest()
// Test file is 44.1K 16bit mono, 44094 samples
QCOMPARE(buffer.format().channelCount(), 1);
- QCOMPARE(buffer.format().sampleRate(), 44100);
+ QCOMPARE(buffer.format().sampleRate(), testFileSampleRate);
QCOMPARE(buffer.format().sampleFormat(), QAudioFormat::Int16);
QCOMPARE(buffer.byteCount(), buffer.sampleCount() * 2); // 16bit mono
@@ -411,28 +422,35 @@ void tst_QAudioDecoderBackend::fileTest()
byteCount += buffer.byteCount();
// Now drain the decoder
- if (sampleCount < 44094) {
+ if (sampleCount < testFileSampleCount) {
QTRY_COMPARE(d.bufferAvailable(), true);
}
+ auto durationToMs = [](uint64_t dur) {
+ if (isGStreamerPlatform())
+ return std::round(dur / 1000.0);
+ else
+ return dur / 1000.0;
+ };
+
while (d.bufferAvailable()) {
buffer = d.read();
QVERIFY(buffer.isValid());
QTRY_VERIFY(!positionSpy.isEmpty());
- QCOMPARE(positionSpy.takeLast().at(0).toLongLong(), qint64(duration / 1000));
+ QCOMPARE(positionSpy.takeLast().at(0).toLongLong(), qint64(durationToMs(duration)));
duration += buffer.duration();
sampleCount += buffer.sampleCount();
byteCount += buffer.byteCount();
- if (sampleCount < 44094) {
+ if (sampleCount < testFileSampleCount) {
QTRY_COMPARE(d.bufferAvailable(), true);
}
}
// Make sure the duration is roughly correct (+/- 20ms)
- QCOMPARE(sampleCount, 44094);
- QCOMPARE(byteCount, 44094 * 2);
+ QCOMPARE(sampleCount, testFileSampleCount);
+ QCOMPARE(byteCount, testFileSampleCount * 2);
QVERIFY(qAbs(qint64(duration) - 1000000) < 20000);
QVERIFY(qAbs((d.position() + (buffer.duration() / 1000)) - 1000) < 20);
QTRY_COMPARE(finishedSpy.size(), 1);
@@ -498,34 +516,29 @@ void tst_QAudioDecoderBackend::fileTest()
sampleCount += buffer.sampleCount();
byteCount += buffer.byteCount();
- // Now drain the decoder
- if (duration < 996000) {
- QTRY_COMPARE(d.bufferAvailable(), true);
- }
+ while (finishedSpy.isEmpty() || d.bufferAvailable()) {
+ if (!d.bufferAvailable()) {
+ QTest::qWait(std::chrono::milliseconds(10));
+ continue;
+ }
- while (d.bufferAvailable()) {
buffer = d.read();
QVERIFY(buffer.isValid());
QTRY_VERIFY(!positionSpy.isEmpty());
- QCOMPARE(positionSpy.takeLast().at(0).toLongLong(), qlonglong(duration / 1000));
- QCOMPARE_LT(d.position() - (duration / 1000), 20u);
+ QCOMPARE(positionSpy.takeLast().at(0).toLongLong(), qlonglong(durationToMs(duration)));
+ QCOMPARE_LT(d.position() - durationToMs(duration), 20u);
duration += buffer.duration();
sampleCount += buffer.sampleCount();
byteCount += buffer.byteCount();
-
- if (duration < 996000) {
- QTRY_COMPARE(d.bufferAvailable(), true);
- }
}
// Resampling might end up with fewer or more samples
// so be a bit sloppy
QCOMPARE_LT(qAbs(sampleCount - 22047), 100);
QCOMPARE_LT(qAbs(byteCount - 22047), 100);
- QCOMPARE_LT(qAbs(qint64(duration) - 1000000), 20000);
+ QCOMPARE_LT(qAbs(qint64(duration) - testFileDurationUs), 20000);
QCOMPARE_LT(qAbs((d.position() + (buffer.duration() / 1000)) - 1000), 20);
- QTRY_COMPARE(finishedSpy.size(), 1);
QVERIFY(!d.bufferAvailable());
QVERIFY(!d.isDecoding());
@@ -558,13 +571,13 @@ void tst_QAudioDecoderBackend::unsupportedFileTest()
QVERIFY(!d.bufferAvailable());
QCOMPARE(d.source(), url);
- QSignalSpy readySpy(&d, SIGNAL(bufferReady()));
- QSignalSpy bufferChangedSpy(&d, SIGNAL(bufferAvailableChanged(bool)));
+ QSignalSpy readySpy(&d, &QAudioDecoder::bufferReady);
+ QSignalSpy bufferChangedSpy(&d, &QAudioDecoder::bufferAvailableChanged);
QSignalSpy errorSpy(&d, SIGNAL(error(QAudioDecoder::Error)));
- QSignalSpy isDecodingSpy(&d, SIGNAL(isDecodingChanged(bool)));
- QSignalSpy durationSpy(&d, SIGNAL(durationChanged(qint64)));
- QSignalSpy finishedSpy(&d, SIGNAL(finished()));
- QSignalSpy positionSpy(&d, SIGNAL(positionChanged(qint64)));
+ QSignalSpy isDecodingSpy(&d, &QAudioDecoder::isDecodingChanged);
+ QSignalSpy durationSpy(&d, &QAudioDecoder::durationChanged);
+ QSignalSpy finishedSpy(&d, &QAudioDecoder::finished);
+ QSignalSpy positionSpy(&d, &QAudioDecoder::positionChanged);
d.start();
QTRY_VERIFY(!d.isDecoding());
@@ -637,13 +650,13 @@ void tst_QAudioDecoderBackend::corruptedFileTest()
QVERIFY(!d.bufferAvailable());
QCOMPARE(d.source(), url);
- QSignalSpy readySpy(&d, SIGNAL(bufferReady()));
- QSignalSpy bufferChangedSpy(&d, SIGNAL(bufferAvailableChanged(bool)));
+ QSignalSpy readySpy(&d, &QAudioDecoder::bufferReady);
+ QSignalSpy bufferChangedSpy(&d, &QAudioDecoder::bufferAvailableChanged);
QSignalSpy errorSpy(&d, SIGNAL(error(QAudioDecoder::Error)));
- QSignalSpy isDecodingSpy(&d, SIGNAL(isDecodingChanged(bool)));
- QSignalSpy durationSpy(&d, SIGNAL(durationChanged(qint64)));
- QSignalSpy finishedSpy(&d, SIGNAL(finished()));
- QSignalSpy positionSpy(&d, SIGNAL(positionChanged(qint64)));
+ QSignalSpy isDecodingSpy(&d, &QAudioDecoder::isDecodingChanged);
+ QSignalSpy durationSpy(&d, &QAudioDecoder::durationChanged);
+ QSignalSpy finishedSpy(&d, &QAudioDecoder::finished);
+ QSignalSpy positionSpy(&d, &QAudioDecoder::positionChanged);
d.start();
QTRY_VERIFY(!d.isDecoding());
@@ -711,13 +724,13 @@ void tst_QAudioDecoderBackend::invalidSource()
QVERIFY(!d.bufferAvailable());
QCOMPARE(d.source(), url);
- QSignalSpy readySpy(&d, SIGNAL(bufferReady()));
- QSignalSpy bufferChangedSpy(&d, SIGNAL(bufferAvailableChanged(bool)));
+ QSignalSpy readySpy(&d, &QAudioDecoder::bufferReady);
+ QSignalSpy bufferChangedSpy(&d, &QAudioDecoder::bufferAvailableChanged);
QSignalSpy errorSpy(&d, SIGNAL(error(QAudioDecoder::Error)));
- QSignalSpy isDecodingSpy(&d, SIGNAL(isDecodingChanged(bool)));
- QSignalSpy durationSpy(&d, SIGNAL(durationChanged(qint64)));
- QSignalSpy finishedSpy(&d, SIGNAL(finished()));
- QSignalSpy positionSpy(&d, SIGNAL(positionChanged(qint64)));
+ QSignalSpy isDecodingSpy(&d, &QAudioDecoder::isDecodingChanged);
+ QSignalSpy durationSpy(&d, &QAudioDecoder::durationChanged);
+ QSignalSpy finishedSpy(&d, &QAudioDecoder::finished);
+ QSignalSpy positionSpy(&d, &QAudioDecoder::positionChanged);
d.start();
QTRY_VERIFY(!d.isDecoding());
@@ -784,6 +797,7 @@ void tst_QAudioDecoderBackend::invalidSource()
void tst_QAudioDecoderBackend::deviceTest()
{
+ using namespace std::chrono;
CHECK_SELECTED_URL(m_wavFile);
QAudioDecoder d;
@@ -793,13 +807,13 @@ void tst_QAudioDecoderBackend::deviceTest()
quint64 duration = 0;
int sampleCount = 0;
- QSignalSpy readySpy(&d, SIGNAL(bufferReady()));
- QSignalSpy bufferChangedSpy(&d, SIGNAL(bufferAvailableChanged(bool)));
+ QSignalSpy readySpy(&d, &QAudioDecoder::bufferReady);
+ QSignalSpy bufferChangedSpy(&d, &QAudioDecoder::bufferAvailableChanged);
QSignalSpy errorSpy(&d, SIGNAL(error(QAudioDecoder::Error)));
- QSignalSpy isDecodingSpy(&d, SIGNAL(isDecodingChanged(bool)));
- QSignalSpy durationSpy(&d, SIGNAL(durationChanged(qint64)));
- QSignalSpy finishedSpy(&d, SIGNAL(finished()));
- QSignalSpy positionSpy(&d, SIGNAL(positionChanged(qint64)));
+ QSignalSpy isDecodingSpy(&d, &QAudioDecoder::isDecodingChanged);
+ QSignalSpy durationSpy(&d, &QAudioDecoder::durationChanged);
+ QSignalSpy finishedSpy(&d, &QAudioDecoder::finished);
+ QSignalSpy positionSpy(&d, &QAudioDecoder::positionChanged);
QVERIFY(!d.isDecoding());
QVERIFY(d.bufferAvailable() == false);
@@ -832,7 +846,7 @@ void tst_QAudioDecoderBackend::deviceTest()
// Test file is 44.1K 16bit mono
QCOMPARE(buffer.format().channelCount(), 1);
- QCOMPARE(buffer.format().sampleRate(), 44100);
+ QCOMPARE(buffer.format().sampleRate(), testFileSampleRate);
QCOMPARE(buffer.format().sampleFormat(), QAudioFormat::Int16);
QVERIFY(errorSpy.isEmpty());
@@ -841,7 +855,7 @@ void tst_QAudioDecoderBackend::deviceTest()
sampleCount += buffer.sampleCount();
// Now drain the decoder
- if (sampleCount < 44094) {
+ if (sampleCount < testFileSampleCount) {
QTRY_COMPARE(d.bufferAvailable(), true);
}
@@ -849,18 +863,24 @@ void tst_QAudioDecoderBackend::deviceTest()
buffer = d.read();
QVERIFY(buffer.isValid());
QTRY_VERIFY(!positionSpy.isEmpty());
- QVERIFY(positionSpy.takeLast().at(0).toLongLong() == qint64(duration / 1000));
+ if (isGStreamerPlatform())
+ QCOMPARE_EQ(positionSpy.takeLast().at(0).toLongLong(),
+ round<milliseconds>(microseconds{ duration }).count());
+ else
+ QCOMPARE_EQ(positionSpy.takeLast().at(0).toLongLong(),
+ floor<milliseconds>(microseconds{ duration }).count());
+
QVERIFY(d.position() - (duration / 1000) < 20);
duration += buffer.duration();
sampleCount += buffer.sampleCount();
- if (sampleCount < 44094) {
+ if (sampleCount < testFileSampleCount) {
QTRY_COMPARE(d.bufferAvailable(), true);
}
}
// Make sure the duration is roughly correct (+/- 20ms)
- QCOMPARE(sampleCount, 44094);
+ QCOMPARE(sampleCount, testFileSampleCount);
QVERIFY(qAbs(qint64(duration) - 1000000) < 20000);
QVERIFY(qAbs((d.position() + (buffer.duration() / 1000)) - 1000) < 20);
QTRY_COMPARE(finishedSpy.size(), 1);
diff --git a/tests/auto/integration/qaudiosink/tst_qaudiosink.cpp b/tests/auto/integration/qaudiosink/tst_qaudiosink.cpp
index d86044a3d..6fdfe8221 100644
--- a/tests/auto/integration/qaudiosink/tst_qaudiosink.cpp
+++ b/tests/auto/integration/qaudiosink/tst_qaudiosink.cpp
@@ -398,7 +398,7 @@ void tst_QAudioSink::stopWhileStopped()
QVERIFY2((audioOutput.state() == QAudio::StoppedState), "state() was not set to StoppedState before start()");
QVERIFY2((audioOutput.error() == QAudio::NoError), "error() was not set to QAudio::NoError before start()");
- QSignalSpy stateSignal(&audioOutput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioOutput, &QAudioSink::stateChanged);
audioOutput.stop();
// Check that no state transition occurred
@@ -418,7 +418,7 @@ void tst_QAudioSink::suspendWhileStopped()
QVERIFY2((audioOutput.state() == QAudio::StoppedState), "state() was not set to StoppedState before start()");
QVERIFY2((audioOutput.error() == QAudio::NoError), "error() was not set to QAudio::NoError before start()");
- QSignalSpy stateSignal(&audioOutput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioOutput, &QAudioSink::stateChanged);
audioOutput.suspend();
// Check that no state transition occurred
@@ -438,7 +438,7 @@ void tst_QAudioSink::resumeWhileStopped()
QVERIFY2((audioOutput.state() == QAudio::StoppedState), "state() was not set to StoppedState before start()");
QVERIFY2((audioOutput.error() == QAudio::NoError), "error() was not set to QAudio::NoError before start()");
- QSignalSpy stateSignal(&audioOutput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioOutput, &QAudioSink::stateChanged);
audioOutput.resume();
// Check that no state transition occurred
@@ -455,7 +455,7 @@ void tst_QAudioSink::pull()
audioOutput.setVolume(0.1f);
- QSignalSpy stateSignal(&audioOutput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioOutput, &QAudioSink::stateChanged);
// Check that we are in the default state before calling start
QVERIFY2((audioOutput.state() == QAudio::StoppedState), "state() was not set to StoppedState before start()");
@@ -514,7 +514,7 @@ void tst_QAudioSink::pullSuspendResume()
audioOutput.setVolume(0.1f);
- QSignalSpy stateSignal(&audioOutput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioOutput, &QAudioSink::stateChanged);
// Check that we are in the default state before calling start
QVERIFY2((audioOutput.state() == QAudio::StoppedState), "state() was not set to StoppedState before start()");
@@ -627,7 +627,7 @@ void tst_QAudioSink::pullResumeFromUnderrun()
AudioPullSource audioSource;
QAudioSink audioOutput(format, this);
- QSignalSpy stateSignal(&audioOutput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioOutput, &QAudioSink::stateChanged);
audioSource.open(QIODeviceBase::ReadOnly);
audioSource.available = chunkSize;
@@ -673,7 +673,7 @@ void tst_QAudioSink::push()
audioOutput.setVolume(0.1f);
- QSignalSpy stateSignal(&audioOutput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioOutput, &QAudioSink::stateChanged);
// Check that we are in the default state before calling start
QVERIFY2((audioOutput.state() == QAudio::StoppedState), "state() was not set to StoppedState before start()");
@@ -754,7 +754,7 @@ void tst_QAudioSink::pushSuspendResume()
audioOutput.setVolume(0.1f);
- QSignalSpy stateSignal(&audioOutput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioOutput, &QAudioSink::stateChanged);
// Check that we are in the default state before calling start
QVERIFY2((audioOutput.state() == QAudio::StoppedState), "state() was not set to StoppedState before start()");
@@ -922,7 +922,7 @@ void tst_QAudioSink::pushUnderrun()
audioOutput.setVolume(0.1f);
- QSignalSpy stateSignal(&audioOutput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioOutput, &QAudioSink::stateChanged);
// Check that we are in the default state before calling start
QVERIFY2((audioOutput.state() == QAudio::StoppedState), "state() was not set to StoppedState before start()");
diff --git a/tests/auto/integration/qaudiosource/tst_qaudiosource.cpp b/tests/auto/integration/qaudiosource/tst_qaudiosource.cpp
index 3bf57e78b..ae100a08b 100644
--- a/tests/auto/integration/qaudiosource/tst_qaudiosource.cpp
+++ b/tests/auto/integration/qaudiosource/tst_qaudiosource.cpp
@@ -292,7 +292,7 @@ void tst_QAudioSource::stopWhileStopped()
QVERIFY2((audioInput.state() == QAudio::StoppedState), "state() was not set to StoppedState before start()");
QVERIFY2((audioInput.error() == QAudio::NoError), "error() was not set to QAudio::NoError before start()");
- QSignalSpy stateSignal(&audioInput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioInput, &QAudioSource::stateChanged);
audioInput.stop();
// Check that no state transition occurred
@@ -312,7 +312,7 @@ void tst_QAudioSource::suspendWhileStopped()
QVERIFY2((audioInput.state() == QAudio::StoppedState), "state() was not set to StoppedState before start()");
QVERIFY2((audioInput.error() == QAudio::NoError), "error() was not set to QAudio::NoError before start()");
- QSignalSpy stateSignal(&audioInput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioInput, &QAudioSource::stateChanged);
audioInput.suspend();
// Check that no state transition occurred
@@ -332,7 +332,7 @@ void tst_QAudioSource::resumeWhileStopped()
QVERIFY2((audioInput.state() == QAudio::StoppedState), "state() was not set to StoppedState before start()");
QVERIFY2((audioInput.error() == QAudio::NoError), "error() was not set to QAudio::NoError before start()");
- QSignalSpy stateSignal(&audioInput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioInput, &QAudioSource::stateChanged);
audioInput.resume();
// Check that no state transition occurred
@@ -347,7 +347,7 @@ void tst_QAudioSource::pull()
QAudioSource audioInput(audioFormat, this);
- QSignalSpy stateSignal(&audioInput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioInput, &QAudioSource::stateChanged);
// Check that we are in the default state before calling start
QVERIFY2((audioInput.state() == QAudio::StoppedState), "state() was not set to StoppedState before start()");
@@ -416,7 +416,7 @@ void tst_QAudioSource::pullSuspendResume()
QAudioSource audioInput(audioFormat, this);
- QSignalSpy stateSignal(&audioInput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioInput, &QAudioSource::stateChanged);
// Check that we are in the default state before calling start
QVERIFY2((audioInput.state() == QAudio::StoppedState), "state() was not set to StoppedState before start()");
@@ -518,7 +518,7 @@ void tst_QAudioSource::push()
QAudioSource audioInput(audioFormat, this);
- QSignalSpy stateSignal(&audioInput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioInput, &QAudioSource::stateChanged);
// Check that we are in the default state before calling start
QVERIFY2((audioInput.state() == QAudio::StoppedState), "state() was not set to StoppedState before start()");
@@ -609,7 +609,7 @@ void tst_QAudioSource::pushSuspendResume()
audioInput.setBufferSize(audioFormat.bytesForDuration(100000));
- QSignalSpy stateSignal(&audioInput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioInput, &QAudioSource::stateChanged);
// Check that we are in the default state before calling start
QVERIFY2((audioInput.state() == QAudio::StoppedState), "state() was not set to StoppedState before start()");
@@ -735,7 +735,7 @@ void tst_QAudioSource::reset()
{
QAudioSource audioInput(audioFormat, this);
- QSignalSpy stateSignal(&audioInput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioInput, &QAudioSource::stateChanged);
// Check that we are in the default state before calling start
QVERIFY2((audioInput.state() == QAudio::StoppedState), "state() was not set to StoppedState before start()");
@@ -765,7 +765,7 @@ void tst_QAudioSource::reset()
QBuffer buffer;
buffer.open(QIODevice::WriteOnly);
- QSignalSpy stateSignal(&audioInput, SIGNAL(stateChanged(QAudio::State)));
+ QSignalSpy stateSignal(&audioInput, &QAudioSource::stateChanged);
// Check that we are in the default state before calling start
QVERIFY2((audioInput.state() == QAudio::StoppedState), "state() was not set to StoppedState before start()");
diff --git a/tests/auto/integration/qcamerabackend/tst_qcamerabackend.cpp b/tests/auto/integration/qcamerabackend/tst_qcamerabackend.cpp
index 43e1a9a31..63790dfd2 100644
--- a/tests/auto/integration/qcamerabackend/tst_qcamerabackend.cpp
+++ b/tests/auto/integration/qcamerabackend/tst_qcamerabackend.cpp
@@ -115,9 +115,6 @@ public Q_SLOTS:
void tst_QCameraBackend::initTestCase()
{
-#ifdef Q_OS_ANDROID
- QSKIP("SKIP initTestCase on CI, because of QTBUG-118571");
-#endif
QCamera camera;
noCamera = !camera.isAvailable();
}
@@ -216,8 +213,8 @@ void tst_QCameraBackend::testCameraActive()
session.setCamera(&camera);
session.setImageCapture(&imageCapture);
- QSignalSpy errorSignal(&camera, SIGNAL(errorOccurred(QCamera::Error, const QString &)));
- QSignalSpy activeChangedSignal(&camera, SIGNAL(activeChanged(bool)));
+ QSignalSpy errorSignal(&camera, &QCamera::errorOccurred);
+ QSignalSpy activeChangedSignal(&camera, &QCamera::activeChanged);
QCOMPARE(camera.isActive(), false);
@@ -280,7 +277,7 @@ void tst_QCameraBackend::testCameraFormat()
if (videoFormats.isEmpty())
QSKIP("No Camera available, skipping test.");
QCameraFormat cameraFormat = videoFormats.first();
- QSignalSpy spy(&camera, SIGNAL(cameraFormatChanged()));
+ QSignalSpy spy(&camera, &QCamera::cameraFormatChanged);
QVERIFY(spy.size() == 0);
QMediaCaptureSession session;
@@ -344,9 +341,9 @@ void tst_QCameraBackend::testCameraCapture()
QVERIFY(!imageCapture.isReadyForCapture());
- QSignalSpy capturedSignal(&imageCapture, SIGNAL(imageCaptured(int,QImage)));
- QSignalSpy savedSignal(&imageCapture, SIGNAL(imageSaved(int,QString)));
- QSignalSpy errorSignal(&imageCapture, SIGNAL(errorOccurred(int,QImageCapture::Error,const QString&)));
+ QSignalSpy capturedSignal(&imageCapture, &QImageCapture::imageCaptured);
+ QSignalSpy savedSignal(&imageCapture, &QImageCapture::imageSaved);
+ QSignalSpy errorSignal(&imageCapture, &QImageCapture::errorOccurred);
imageCapture.captureToFile();
QTRY_COMPARE(errorSignal.size(), 1);
@@ -403,10 +400,10 @@ void tst_QCameraBackend::testCaptureToBuffer()
QTRY_VERIFY(camera.isActive());
- QSignalSpy capturedSignal(&imageCapture, SIGNAL(imageCaptured(int,QImage)));
- QSignalSpy imageAvailableSignal(&imageCapture, SIGNAL(imageAvailable(int,QVideoFrame)));
- QSignalSpy savedSignal(&imageCapture, SIGNAL(imageSaved(int,QString)));
- QSignalSpy errorSignal(&imageCapture, SIGNAL(errorOccurred(int,QImageCapture::Error,const QString&)));
+ QSignalSpy capturedSignal(&imageCapture, &QImageCapture::imageCaptured);
+ QSignalSpy imageAvailableSignal(&imageCapture, &QImageCapture::imageAvailable);
+ QSignalSpy savedSignal(&imageCapture, &QImageCapture::imageSaved);
+ QSignalSpy errorSignal(&imageCapture, &QImageCapture::errorOccurred);
camera.start();
QTRY_VERIFY(imageCapture.isReadyForCapture());
@@ -448,8 +445,14 @@ void tst_QCameraBackend::testCameraCaptureMetadata()
camera.setFlashMode(QCamera::FlashOff);
- QSignalSpy metadataSignal(&imageCapture, SIGNAL(imageMetadataAvailable(int,const QMediaMetaData&)));
- QSignalSpy savedSignal(&imageCapture, SIGNAL(imageSaved(int,QString)));
+ QMediaMetaData referenceMetaData;
+ referenceMetaData.insert(QMediaMetaData::Title, QStringLiteral("Title"));
+ referenceMetaData.insert(QMediaMetaData::Language, QVariant::fromValue(QLocale::German));
+ referenceMetaData.insert(QMediaMetaData::Description, QStringLiteral("Description"));
+ imageCapture.setMetaData(referenceMetaData);
+
+ QSignalSpy metadataSignal(&imageCapture, &QImageCapture::imageMetadataAvailable);
+ QSignalSpy savedSignal(&imageCapture, &QImageCapture::imageSaved);
camera.start();
@@ -460,7 +463,19 @@ void tst_QCameraBackend::testCameraCaptureMetadata()
int id = imageCapture.captureToFile(tmpFile);
QTRY_VERIFY(!savedSignal.isEmpty());
QVERIFY(!metadataSignal.isEmpty());
+
QCOMPARE(metadataSignal.first().first().toInt(), id);
+ QMediaMetaData receivedMetaData = metadataSignal.first()[1].value<QMediaMetaData>();
+
+ if (isGStreamerPlatform()) {
+ for (auto key : {
+ QMediaMetaData::Title,
+ QMediaMetaData::Language,
+ QMediaMetaData::Description,
+ })
+ QCOMPARE(receivedMetaData[key], referenceMetaData[key]);
+ QVERIFY(receivedMetaData[QMediaMetaData::Resolution].isValid());
+ }
}
void tst_QCameraBackend::testExposureCompensation()
@@ -472,9 +487,9 @@ void tst_QCameraBackend::testExposureCompensation()
QCamera camera;
session.setCamera(&camera);
- QSignalSpy exposureCompensationSignal(&camera, SIGNAL(exposureCompensationChanged(float)));
+ QSignalSpy exposureCompensationSignal(&camera, &QCamera::exposureCompensationChanged);
- //it should be possible to set exposure parameters in Unloaded state
+ // it should be possible to set exposure parameters in Unloaded state
QCOMPARE(camera.exposureCompensation(), 0.);
if (!(camera.supportedFeatures() & QCamera::Feature::ExposureCompensation))
return;
@@ -571,10 +586,10 @@ void tst_QCameraBackend::testVideoRecording()
QMediaRecorder recorder;
session.setRecorder(&recorder);
- QSignalSpy errorSignal(camera.data(), SIGNAL(errorOccurred(QCamera::Error, const QString &)));
- QSignalSpy recorderErrorSignal(&recorder, SIGNAL(errorOccurred(QMediaRecorder::Error, const QString &)));
- QSignalSpy recorderStateChanged(&recorder, SIGNAL(recorderStateChanged(RecorderState)));
- QSignalSpy durationChanged(&recorder, SIGNAL(durationChanged(qint64)));
+ QSignalSpy errorSignal(camera.data(), &QCamera::errorOccurred);
+ QSignalSpy recorderErrorSignal(&recorder, &QMediaRecorder::errorOccurred);
+ QSignalSpy recorderStateChanged(&recorder, &QMediaRecorder::recorderStateChanged);
+ QSignalSpy durationChanged(&recorder, &QMediaRecorder::durationChanged);
recorder.setVideoResolution(320, 240);
@@ -643,10 +658,10 @@ void tst_QCameraBackend::testNativeMetadata()
QMediaRecorder recorder;
session.setRecorder(&recorder);
- QSignalSpy errorSignal(&camera, SIGNAL(errorOccurred(QCamera::Error, const QString &)));
- QSignalSpy recorderErrorSignal(&recorder, SIGNAL(errorOccurred(Error, const QString &)));
- QSignalSpy recorderStateChanged(&recorder, SIGNAL(recorderStateChanged(RecorderState)));
- QSignalSpy durationChanged(&recorder, SIGNAL(durationChanged(qint64)));
+ QSignalSpy errorSignal(&camera, &QCamera::errorOccurred);
+ QSignalSpy recorderErrorSignal(&recorder, &QMediaRecorder::errorOccurred);
+ QSignalSpy recorderStateChanged(&recorder, &QMediaRecorder::recorderStateChanged);
+ QSignalSpy durationChanged(&recorder, &QMediaRecorder::durationChanged);
camera.start();
if (device.isNull()) {
@@ -687,8 +702,6 @@ void tst_QCameraBackend::testNativeMetadata()
QVERIFY(!fileName.isEmpty());
QVERIFY(QFileInfo(fileName).size() > 0);
- QSKIP_GSTREAMER("QTBUG-124182: spurious failure while retrieving the metadata");
-
// QMediaRecorder::metaData() can only test that QMediaMetaData is set properly on the recorder.
// Use QMediaPlayer to test that the native metadata is properly set on the track
QAudioOutput output;
@@ -700,15 +713,20 @@ void tst_QCameraBackend::testNativeMetadata()
player.setSource(QUrl::fromLocalFile(fileName));
player.play();
- QTRY_VERIFY(metadataChangedSpy.size() > 0);
+ int metadataChangedRequiredCount = isGStreamerPlatform() ? 2 : 1;
+
+ QTRY_VERIFY(metadataChangedSpy.size() >= metadataChangedRequiredCount);
- QCOMPARE(player.metaData().value(QMediaMetaData::Title).toString(), metaData.value(QMediaMetaData::Title).toString());
+ QCOMPARE(player.metaData().value(QMediaMetaData::Title).toString(),
+ metaData.value(QMediaMetaData::Title).toString());
auto lang = player.metaData().value(QMediaMetaData::Language).value<QLocale::Language>();
if (lang != QLocale::AnyLanguage)
QCOMPARE(lang, metaData.value(QMediaMetaData::Language).value<QLocale::Language>());
QCOMPARE(player.metaData().value(QMediaMetaData::Description).toString(), metaData.value(QMediaMetaData::Description).toString());
+ QVERIFY(player.metaData().value(QMediaMetaData::Resolution).isValid());
- metadataChangedSpy.clear();
+ if (isGStreamerPlatform())
+ QVERIFY(player.metaData().value(QMediaMetaData::Date).isValid());
player.stop();
player.setSource({});
diff --git a/tests/auto/integration/qmediacapturesession/tst_qmediacapturesession.cpp b/tests/auto/integration/qmediacapturesession/tst_qmediacapturesession.cpp
index d22d0a3df..0a42851e5 100644
--- a/tests/auto/integration/qmediacapturesession/tst_qmediacapturesession.cpp
+++ b/tests/auto/integration/qmediacapturesession/tst_qmediacapturesession.cpp
@@ -21,6 +21,10 @@
#include <qaudiodevice.h>
#include <qaudiodecoder.h>
#include <qaudiobuffer.h>
+#include <qscreencapture.h>
+#include <qwindowcapture.h>
+#include <qaudiobufferinput.h>
+#include <qvideoframeinput.h>
#include <qcamera.h>
#include <QMediaFormat>
@@ -43,14 +47,6 @@ class tst_QMediaCaptureSession: public QObject
private slots:
- void initTestCase()
- {
- if (qEnvironmentVariable("QTEST_ENVIRONMENT").toLower() == "ci") {
-#ifdef Q_OS_ANDROID
- QSKIP("SKIP initTestCase on CI, because of QTBUG-118571");
-#endif
- }
- }
void testAudioMute();
void stress_test_setup_and_teardown();
void stress_test_setup_and_teardown_keep_session();
@@ -67,9 +63,16 @@ private slots:
void can_change_AudioInput_during_recording();
void disconnects_deleted_AudioInput();
void can_move_AudioInput_between_sessions();
+
void disconnects_deleted_AudioOutput();
void can_move_AudioOutput_between_sessions_and_player();
+ void disconnects_deleted_AudioBufferInput();
+ void can_move_AudioBufferInput_between_sessions();
+
+ void disconnects_deleted_VideoFrameInput();
+ void can_move_VideoFrameInput_between_sessions();
+
void can_add_and_remove_Camera();
void can_move_Camera_between_sessions();
void can_disconnect_Camera_when_recording();
@@ -104,8 +107,8 @@ void tst_QMediaCaptureSession::recordOk(QMediaCaptureSession &session)
QMediaRecorder recorder;
session.setRecorder(&recorder);
- QSignalSpy recorderErrorSignal(&recorder, SIGNAL(errorOccurred(Error, const QString &)));
- QSignalSpy durationChanged(&recorder, SIGNAL(durationChanged(qint64)));
+ QSignalSpy recorderErrorSignal(&recorder, &QMediaRecorder::errorOccurred);
+ QSignalSpy durationChanged(&recorder, &QMediaRecorder::durationChanged);
recorder.record();
QTRY_VERIFY_WITH_TIMEOUT(recorder.recorderState() == QMediaRecorder::RecordingState, 2000);
@@ -124,7 +127,7 @@ void tst_QMediaCaptureSession::recordOk(QMediaCaptureSession &session)
void tst_QMediaCaptureSession::recordFail(QMediaCaptureSession &session)
{
QMediaRecorder recorder;
- QSignalSpy recorderErrorSignal(&recorder, SIGNAL(errorOccurred(Error, const QString &)));
+ QSignalSpy recorderErrorSignal(&recorder, &QMediaRecorder::errorOccurred);
session.setRecorder(&recorder);
recorder.record();
@@ -292,7 +295,7 @@ void tst_QMediaCaptureSession::record_video_without_preview()
session.setRecorder(&recorder);
- QSignalSpy cameraChanged(&session, SIGNAL(cameraChanged()));
+ QSignalSpy cameraChanged(&session, &QMediaCaptureSession::cameraChanged);
session.setCamera(&camera);
camera.setActive(true);
@@ -317,8 +320,8 @@ void tst_QMediaCaptureSession::can_add_and_remove_AudioInput_with_and_without_Au
QSKIP("No audio input available");
QMediaCaptureSession session;
- QSignalSpy audioInputChanged(&session, SIGNAL(audioInputChanged()));
- QSignalSpy audioOutputChanged(&session, SIGNAL(audioOutputChanged()));
+ QSignalSpy audioInputChanged(&session, &QMediaCaptureSession::audioInputChanged);
+ QSignalSpy audioOutputChanged(&session, &QMediaCaptureSession::audioOutputChanged);
session.setAudioInput(&input);
QTRY_COMPARE(audioInputChanged.size(), 1);
@@ -349,10 +352,10 @@ void tst_QMediaCaptureSession::can_change_AudioDevices_on_attached_AudioInput()
QSKIP("Two audio inputs are not available");
QAudioInput input(audioInputs[0]);
- QSignalSpy deviceChanged(&input, SIGNAL(deviceChanged()));
+ QSignalSpy deviceChanged(&input, &QAudioInput::deviceChanged);
QMediaCaptureSession session;
- QSignalSpy audioInputChanged(&session, SIGNAL(audioInputChanged()));
+ QSignalSpy audioInputChanged(&session, &QMediaCaptureSession::audioInputChanged);
session.setAudioInput(&input);
QTRY_COMPARE(audioInputChanged.size(), 1);
@@ -384,9 +387,9 @@ void tst_QMediaCaptureSession::can_change_AudioInput_during_recording()
session.setRecorder(&recorder);
- QSignalSpy audioInputChanged(&session, SIGNAL(audioInputChanged()));
- QSignalSpy recorderErrorSignal(&recorder, SIGNAL(errorOccurred(Error, const QString &)));
- QSignalSpy durationChanged(&recorder, SIGNAL(durationChanged(qint64)));
+ QSignalSpy audioInputChanged(&session, &QMediaCaptureSession::audioInputChanged);
+ QSignalSpy recorderErrorSignal(&recorder, &QMediaRecorder::errorOccurred);
+ QSignalSpy durationChanged(&recorder, &QMediaRecorder::durationChanged);
session.setAudioInput(&input);
QTRY_COMPARE(audioInputChanged.size(), 1);
@@ -418,7 +421,7 @@ void tst_QMediaCaptureSession::disconnects_deleted_AudioInput()
QSKIP("No audio input available");
QMediaCaptureSession session;
- QSignalSpy audioInputChanged(&session, SIGNAL(audioInputChanged()));
+ QSignalSpy audioInputChanged(&session, &QMediaCaptureSession::audioInputChanged);
{
QAudioInput input;
session.setAudioInput(&input);
@@ -435,13 +438,13 @@ void tst_QMediaCaptureSession::can_move_AudioInput_between_sessions()
QMediaCaptureSession session0;
QMediaCaptureSession session1;
- QSignalSpy audioInputChanged0(&session0, SIGNAL(audioInputChanged()));
- QSignalSpy audioInputChanged1(&session1, SIGNAL(audioInputChanged()));
+ QSignalSpy audioInputChanged0(&session0, &QMediaCaptureSession::audioInputChanged);
+ QSignalSpy audioInputChanged1(&session1, &QMediaCaptureSession::audioInputChanged);
QAudioInput input;
{
QMediaCaptureSession session2;
- QSignalSpy audioInputChanged2(&session2, SIGNAL(audioInputChanged()));
+ QSignalSpy audioInputChanged2(&session2, &QMediaCaptureSession::audioInputChanged);
session2.setAudioInput(&input);
QTRY_COMPARE(audioInputChanged2.size(), 1);
}
@@ -462,7 +465,7 @@ void tst_QMediaCaptureSession::disconnects_deleted_AudioOutput()
QSKIP("No audio output available");
QMediaCaptureSession session;
- QSignalSpy audioOutputChanged(&session, SIGNAL(audioOutputChanged()));
+ QSignalSpy audioOutputChanged(&session, &QMediaCaptureSession::audioOutputChanged);
{
QAudioOutput output;
session.setAudioOutput(&output);
@@ -482,13 +485,13 @@ void tst_QMediaCaptureSession::can_move_AudioOutput_between_sessions_and_player(
QMediaCaptureSession session0;
QMediaCaptureSession session1;
QMediaPlayer player;
- QSignalSpy audioOutputChanged0(&session0, SIGNAL(audioOutputChanged()));
- QSignalSpy audioOutputChanged1(&session1, SIGNAL(audioOutputChanged()));
- QSignalSpy audioOutputChangedPlayer(&player, SIGNAL(audioOutputChanged()));
+ QSignalSpy audioOutputChanged0(&session0, &QMediaCaptureSession::audioOutputChanged);
+ QSignalSpy audioOutputChanged1(&session1, &QMediaCaptureSession::audioOutputChanged);
+ QSignalSpy audioOutputChangedPlayer(&player, &QMediaPlayer::audioOutputChanged);
{
QMediaCaptureSession session2;
- QSignalSpy audioOutputChanged2(&session2, SIGNAL(audioOutputChanged()));
+ QSignalSpy audioOutputChanged2(&session2, &QMediaCaptureSession::audioOutputChanged);
session2.setAudioOutput(&output);
QTRY_COMPARE(audioOutputChanged2.size(), 1);
}
@@ -518,6 +521,89 @@ void tst_QMediaCaptureSession::can_move_AudioOutput_between_sessions_and_player(
QVERIFY(player.audioOutput() == nullptr);
}
+void tst_QMediaCaptureSession::disconnects_deleted_AudioBufferInput()
+{
+ QMediaCaptureSession session;
+ QSignalSpy audioBufferInputChanged(&session, &QMediaCaptureSession::audioBufferInputChanged);
+ {
+ QAudioBufferInput input;
+ session.setAudioBufferInput(&input);
+ QTRY_COMPARE(audioBufferInputChanged.size(), 1);
+ }
+ QCOMPARE(session.audioBufferInput(), nullptr);
+ QCOMPARE(audioBufferInputChanged.size(), 2);
+}
+
+void tst_QMediaCaptureSession::can_move_AudioBufferInput_between_sessions()
+{
+ QMediaCaptureSession session0;
+ QMediaCaptureSession session1;
+ QSignalSpy audioBufferInputChanged0(&session0, &QMediaCaptureSession::audioBufferInputChanged);
+ QSignalSpy audioBufferInputChanged1(&session1, &QMediaCaptureSession::audioBufferInputChanged);
+
+ QAudioBufferInput input;
+ {
+ QMediaCaptureSession session2;
+ QSignalSpy audioBufferInputChanged2(&session2,
+ &QMediaCaptureSession::audioBufferInputChanged);
+ session2.setAudioBufferInput(&input);
+ QCOMPARE(audioBufferInputChanged2.size(), 1);
+ }
+ session0.setAudioBufferInput(&input);
+ QCOMPARE(audioBufferInputChanged0.size(), 1);
+ QCOMPARE(session0.audioBufferInput(), &input);
+ QCOMPARE(input.captureSession(), &session0);
+
+ session1.setAudioBufferInput(&input);
+
+ QCOMPARE(audioBufferInputChanged0.size(), 2);
+ QCOMPARE(session0.audioBufferInput(), nullptr);
+ QCOMPARE(audioBufferInputChanged1.size(), 1);
+ QCOMPARE(session1.audioBufferInput(), &input);
+ QCOMPARE(input.captureSession(), &session1);
+}
+
+void tst_QMediaCaptureSession::disconnects_deleted_VideoFrameInput()
+{
+ QMediaCaptureSession session;
+ QSignalSpy videoFrameInputChanged(&session, &QMediaCaptureSession::videoFrameInputChanged);
+ {
+ QVideoFrameInput input;
+ session.setVideoFrameInput(&input);
+ QTRY_COMPARE(videoFrameInputChanged.size(), 1);
+ }
+ QCOMPARE(session.videoFrameInput(), nullptr);
+ QCOMPARE(videoFrameInputChanged.size(), 2);
+}
+
+void tst_QMediaCaptureSession::can_move_VideoFrameInput_between_sessions()
+{
+ QMediaCaptureSession session0;
+ QMediaCaptureSession session1;
+ QSignalSpy videoFrameInputChanged0(&session0, &QMediaCaptureSession::videoFrameInputChanged);
+ QSignalSpy videoFrameInputChanged1(&session1, &QMediaCaptureSession::videoFrameInputChanged);
+
+ QVideoFrameInput input;
+ {
+ QMediaCaptureSession session2;
+ QSignalSpy videoFrameInputChanged2(&session2,
+ &QMediaCaptureSession::videoFrameInputChanged);
+ session2.setVideoFrameInput(&input);
+ QCOMPARE(videoFrameInputChanged2.size(), 1);
+ }
+ session0.setVideoFrameInput(&input);
+ QCOMPARE(videoFrameInputChanged0.size(), 1);
+ QCOMPARE(session0.videoFrameInput(), &input);
+ QCOMPARE(input.captureSession(), &session0);
+
+ session1.setVideoFrameInput(&input);
+
+ QCOMPARE(videoFrameInputChanged0.size(), 2);
+ QCOMPARE(session0.videoFrameInput(), nullptr);
+ QCOMPARE(videoFrameInputChanged1.size(), 1);
+ QCOMPARE(session1.videoFrameInput(), &input);
+ QCOMPARE(input.captureSession(), &session1);
+}
void tst_QMediaCaptureSession::can_add_and_remove_Camera()
{
@@ -531,7 +617,7 @@ void tst_QMediaCaptureSession::can_add_and_remove_Camera()
session.setRecorder(&recorder);
- QSignalSpy cameraChanged(&session, SIGNAL(cameraChanged()));
+ QSignalSpy cameraChanged(&session, &QMediaCaptureSession::cameraChanged);
session.setCamera(&camera);
camera.setActive(true);
@@ -552,13 +638,13 @@ void tst_QMediaCaptureSession::can_move_Camera_between_sessions()
{
QMediaCaptureSession session0;
QMediaCaptureSession session1;
- QSignalSpy cameraChanged0(&session0, SIGNAL(cameraChanged()));
- QSignalSpy cameraChanged1(&session1, SIGNAL(cameraChanged()));
+ QSignalSpy cameraChanged0(&session0, &QMediaCaptureSession::cameraChanged);
+ QSignalSpy cameraChanged1(&session1, &QMediaCaptureSession::cameraChanged);
{
QCamera camera;
{
QMediaCaptureSession session2;
- QSignalSpy cameraChanged2(&session2, SIGNAL(cameraChanged()));
+ QSignalSpy cameraChanged2(&session2, &QMediaCaptureSession::cameraChanged);
session2.setCamera(&camera);
QTRY_COMPARE(cameraChanged2.size(), 1);
}
@@ -592,9 +678,9 @@ void tst_QMediaCaptureSession::can_disconnect_Camera_when_recording()
session.setRecorder(&recorder);
- QSignalSpy cameraChanged(&session, SIGNAL(cameraChanged()));
- QSignalSpy recorderErrorSignal(&recorder, SIGNAL(errorOccurred(Error, const QString &)));
- QSignalSpy durationChanged(&recorder, SIGNAL(durationChanged(qint64)));
+ QSignalSpy cameraChanged(&session, &QMediaCaptureSession::cameraChanged);
+ QSignalSpy recorderErrorSignal(&recorder, &QMediaRecorder::errorOccurred);
+ QSignalSpy durationChanged(&recorder, &QMediaRecorder::durationChanged);
camera.setActive(true);
session.setCamera(&camera);
@@ -634,7 +720,7 @@ void tst_QMediaCaptureSession::can_add_and_remove_different_Cameras()
session.setRecorder(&recorder);
- QSignalSpy cameraChanged(&session, SIGNAL(cameraChanged()));
+ QSignalSpy cameraChanged(&session, &QMediaCaptureSession::cameraChanged);
camera.setActive(true);
session.setCamera(&camera);
@@ -667,8 +753,8 @@ void tst_QMediaCaptureSession::can_change_CameraDevice_on_attached_Camera()
session.setRecorder(&recorder);
- QSignalSpy cameraDeviceChanged(&camera, SIGNAL(cameraDeviceChanged()));
- QSignalSpy cameraChanged(&session, SIGNAL(cameraChanged()));
+ QSignalSpy cameraDeviceChanged(&camera, &QCamera::cameraDeviceChanged);
+ QSignalSpy cameraChanged(&session, &QMediaCaptureSession::cameraChanged);
session.setCamera(&camera);
QTRY_COMPARE(cameraChanged.size(), 1);
@@ -704,8 +790,8 @@ void tst_QMediaCaptureSession::can_change_VideoOutput_with_and_without_camera()
QMediaCaptureSession session;
- QSignalSpy videoOutputChanged(&session, SIGNAL(videoOutputChanged()));
- QSignalSpy cameraChanged(&session, SIGNAL(cameraChanged()));
+ QSignalSpy videoOutputChanged(&session, &QMediaCaptureSession::videoOutputChanged);
+ QSignalSpy cameraChanged(&session, &QMediaCaptureSession::cameraChanged);
session.setCamera(&camera);
QTRY_COMPARE(cameraChanged.size(), 1);
@@ -740,10 +826,10 @@ void tst_QMediaCaptureSession::can_change_VideoOutput_when_recording()
session.setRecorder(&recorder);
- QSignalSpy cameraChanged(&session, SIGNAL(cameraChanged()));
- QSignalSpy recorderErrorSignal(&recorder, SIGNAL(errorOccurred(Error, const QString &)));
- QSignalSpy durationChanged(&recorder, SIGNAL(durationChanged(qint64)));
- QSignalSpy videoOutputChanged(&session, SIGNAL(videoOutputChanged()));
+ QSignalSpy cameraChanged(&session, &QMediaCaptureSession::cameraChanged);
+ QSignalSpy recorderErrorSignal(&recorder, &QMediaRecorder::errorOccurred);
+ QSignalSpy durationChanged(&recorder, &QMediaRecorder::durationChanged);
+ QSignalSpy videoOutputChanged(&session, &QMediaCaptureSession::videoOutputChanged);
camera.setActive(true);
session.setCamera(&camera);
@@ -783,8 +869,8 @@ void tst_QMediaCaptureSession::can_add_and_remove_recorders()
QMediaRecorder recorder2;
QMediaCaptureSession session;
- QSignalSpy audioInputChanged(&session, SIGNAL(audioInputChanged()));
- QSignalSpy recorderChanged(&session, SIGNAL(recorderChanged()));
+ QSignalSpy audioInputChanged(&session, &QMediaCaptureSession::audioInputChanged);
+ QSignalSpy recorderChanged(&session, &QMediaCaptureSession::recorderChanged);
session.setAudioInput(&input);
QTRY_COMPARE(audioInputChanged.size(), 1);
@@ -806,13 +892,13 @@ void tst_QMediaCaptureSession::can_move_Recorder_between_sessions()
{
QMediaCaptureSession session0;
QMediaCaptureSession session1;
- QSignalSpy recorderChanged0(&session0, SIGNAL(recorderChanged()));
- QSignalSpy recorderChanged1(&session1, SIGNAL(recorderChanged()));
+ QSignalSpy recorderChanged0(&session0, &QMediaCaptureSession::recorderChanged);
+ QSignalSpy recorderChanged1(&session1, &QMediaCaptureSession::recorderChanged);
{
QMediaRecorder recorder;
{
QMediaCaptureSession session2;
- QSignalSpy recorderChanged2(&session2, SIGNAL(recorderChanged()));
+ QSignalSpy recorderChanged2(&session2, &QMediaCaptureSession::recorderChanged);
session2.setRecorder(&recorder);
QTRY_COMPARE(recorderChanged2.size(), 1);
}
@@ -849,7 +935,7 @@ void tst_QMediaCaptureSession::can_record_AudioInput_with_null_AudioDevice()
QAudioInput input(nullDevice);
QMediaCaptureSession session;
- QSignalSpy audioInputChanged(&session, SIGNAL(audioInputChanged()));
+ QSignalSpy audioInputChanged(&session, &QMediaCaptureSession::audioInputChanged);
session.setAudioInput(&input);
QTRY_COMPARE(audioInputChanged.size(), 1);
@@ -867,7 +953,7 @@ void tst_QMediaCaptureSession::can_record_Camera_with_null_CameraDevice()
QCamera camera(nullDevice);
QMediaCaptureSession session;
- QSignalSpy cameraChanged(&session, SIGNAL(cameraChanged()));
+ QSignalSpy cameraChanged(&session, &QMediaCaptureSession::cameraChanged);
session.setCamera(&camera);
QTRY_COMPARE(cameraChanged.size(), 1);
@@ -888,10 +974,10 @@ void tst_QMediaCaptureSession::recording_stops_when_recorder_removed()
QMediaRecorder recorder;
QMediaCaptureSession session;
- QSignalSpy audioInputChanged(&session, SIGNAL(audioInputChanged()));
- QSignalSpy recorderChanged(&session, SIGNAL(recorderChanged()));
- QSignalSpy recorderErrorSignal(&recorder, SIGNAL(errorOccurred(Error, const QString &)));
- QSignalSpy durationChanged(&recorder, SIGNAL(durationChanged(qint64)));
+ QSignalSpy audioInputChanged(&session, &QMediaCaptureSession::audioInputChanged);
+ QSignalSpy recorderChanged(&session, &QMediaCaptureSession::recorderChanged);
+ QSignalSpy recorderErrorSignal(&recorder, &QMediaRecorder::errorOccurred);
+ QSignalSpy durationChanged(&recorder, &QMediaRecorder::durationChanged);
session.setAudioInput(&input);
QTRY_COMPARE(audioInputChanged.size(), 1);
@@ -925,9 +1011,9 @@ void tst_QMediaCaptureSession::can_add_and_remove_ImageCapture()
QImageCapture capture;
QMediaCaptureSession session;
- QSignalSpy cameraChanged(&session, SIGNAL(cameraChanged()));
- QSignalSpy imageCaptureChanged(&session, SIGNAL(imageCaptureChanged()));
- QSignalSpy readyForCaptureChanged(&capture, SIGNAL(readyForCaptureChanged(bool)));
+ QSignalSpy cameraChanged(&session, &QMediaCaptureSession::cameraChanged);
+ QSignalSpy imageCaptureChanged(&session, &QMediaCaptureSession::imageCaptureChanged);
+ QSignalSpy readyForCaptureChanged(&capture, &QImageCapture::readyForCaptureChanged);
QVERIFY(!capture.isAvailable());
QVERIFY(!capture.isReadyForCapture());
@@ -965,15 +1051,17 @@ void tst_QMediaCaptureSession::can_add_and_remove_ImageCapture()
void tst_QMediaCaptureSession::can_move_ImageCapture_between_sessions()
{
+ QSKIP_GSTREAMER("QTBUG-124005: Spurious failure on CI");
+
QMediaCaptureSession session0;
QMediaCaptureSession session1;
- QSignalSpy imageCaptureChanged0(&session0, SIGNAL(imageCaptureChanged()));
- QSignalSpy imageCaptureChanged1(&session1, SIGNAL(imageCaptureChanged()));
+ QSignalSpy imageCaptureChanged0(&session0, &QMediaCaptureSession::imageCaptureChanged);
+ QSignalSpy imageCaptureChanged1(&session1, &QMediaCaptureSession::imageCaptureChanged);
{
QImageCapture imageCapture;
{
QMediaCaptureSession session2;
- QSignalSpy imageCaptureChanged2(&session2, SIGNAL(imageCaptureChanged()));
+ QSignalSpy imageCaptureChanged2(&session2, &QMediaCaptureSession::imageCaptureChanged);
session2.setImageCapture(&imageCapture);
QTRY_COMPARE(imageCaptureChanged2.size(), 1);
}
@@ -995,7 +1083,6 @@ void tst_QMediaCaptureSession::can_move_ImageCapture_between_sessions()
QVERIFY(session1.imageCapture() == nullptr);
}
-
void tst_QMediaCaptureSession::capture_is_not_available_when_Camera_is_null()
{
QCamera camera;
@@ -1006,9 +1093,9 @@ void tst_QMediaCaptureSession::capture_is_not_available_when_Camera_is_null()
QImageCapture capture;
QMediaCaptureSession session;
- QSignalSpy cameraChanged(&session, SIGNAL(cameraChanged()));
- QSignalSpy capturedSignal(&capture, SIGNAL(imageCaptured(int,QImage)));
- QSignalSpy readyForCaptureChanged(&capture, SIGNAL(readyForCaptureChanged(bool)));
+ QSignalSpy cameraChanged(&session, &QMediaCaptureSession::cameraChanged);
+ QSignalSpy capturedSignal(&capture, &QImageCapture::imageCaptured);
+ QSignalSpy readyForCaptureChanged(&capture, &QImageCapture::readyForCaptureChanged);
session.setImageCapture(&capture);
session.setCamera(&camera);
@@ -1043,12 +1130,12 @@ void tst_QMediaCaptureSession::can_add_ImageCapture_and_capture_during_recording
QMediaCaptureSession session;
QMediaRecorder recorder;
- QSignalSpy recorderChanged(&session, SIGNAL(recorderChanged()));
- QSignalSpy recorderErrorSignal(&recorder, SIGNAL(errorOccurred(Error, const QString &)));
- QSignalSpy durationChanged(&recorder, SIGNAL(durationChanged(qint64)));
- QSignalSpy imageCaptureChanged(&session, SIGNAL(imageCaptureChanged()));
- QSignalSpy readyForCaptureChanged(&capture, SIGNAL(readyForCaptureChanged(bool)));
- QSignalSpy capturedSignal(&capture, SIGNAL(imageCaptured(int,QImage)));
+ QSignalSpy recorderChanged(&session, &QMediaCaptureSession::recorderChanged);
+ QSignalSpy recorderErrorSignal(&recorder, &QMediaRecorder::errorOccurred);
+ QSignalSpy durationChanged(&recorder, &QMediaRecorder::durationChanged);
+ QSignalSpy imageCaptureChanged(&session, &QMediaCaptureSession::imageCaptureChanged);
+ QSignalSpy readyForCaptureChanged(&capture, &QImageCapture::readyForCaptureChanged);
+ QSignalSpy capturedSignal(&capture, &QImageCapture::imageCaptured);
session.setCamera(&camera);
camera.setActive(true);
@@ -1100,7 +1187,7 @@ void tst_QMediaCaptureSession::testAudioMute()
recorder.setOutputLocation(QStringLiteral("test"));
QSignalSpy spy(&audioInput, &QAudioInput::mutedChanged);
- QSignalSpy durationChanged(&recorder, SIGNAL(durationChanged(qint64)));
+ QSignalSpy durationChanged(&recorder, &QMediaRecorder::durationChanged);
QMediaFormat format;
format.setAudioCodec(QMediaFormat::AudioCodec::Wave);
diff --git a/tests/auto/integration/qmediaframeinputsbackend/CMakeLists.txt b/tests/auto/integration/qmediaframeinputsbackend/CMakeLists.txt
new file mode 100644
index 000000000..8d35b1de0
--- /dev/null
+++ b/tests/auto/integration/qmediaframeinputsbackend/CMakeLists.txt
@@ -0,0 +1,22 @@
+# Copyright (C) 2024 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+#####################################################################
+## tst_qmediaframeinputsbackend Test:
+#####################################################################
+
+qt_internal_add_test(tst_qmediaframeinputsbackend
+ SOURCES
+ tst_qmediaframeinputsbackend.cpp tst_qmediaframeinputsbackend.h
+ capturesessionfixture.cpp capturesessionfixture.h
+ mediainfo.h
+ framegenerator.cpp framegenerator.h
+ ../shared/testvideosink.h
+ LIBRARIES
+ Qt::Multimedia
+ Qt::MultimediaPrivate
+ Qt::Gui
+ Qt::Widgets
+)
+
+
diff --git a/tests/auto/integration/qmediaframeinputsbackend/capturesessionfixture.cpp b/tests/auto/integration/qmediaframeinputsbackend/capturesessionfixture.cpp
new file mode 100644
index 000000000..aae03df60
--- /dev/null
+++ b/tests/auto/integration/qmediaframeinputsbackend/capturesessionfixture.cpp
@@ -0,0 +1,88 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
+
+#include "capturesessionfixture.h"
+#include <QtTest/qtest.h>
+
+QT_BEGIN_NAMESPACE
+
+using namespace std::chrono;
+
+CaptureSessionFixture::CaptureSessionFixture(StreamType streamType, AutoStop autoStop)
+ : m_streamType{ streamType }
+{
+ m_recorder.setQuality(QMediaRecorder::VeryHighQuality);
+ m_session.setRecorder(&m_recorder);
+
+ if (hasVideo()) {
+ m_session.setVideoFrameInput(&m_videoInput);
+
+ QObject::connect(&m_videoGenerator, &VideoGenerator::frameCreated, //
+ &m_videoInput, &QVideoFrameInput::sendVideoFrame);
+
+ if (autoStop == AutoStop::EmitEmpty) {
+ m_recorder.setAutoStop(true);
+ m_videoGenerator.emitEmptyFrameOnStop();
+ }
+ }
+
+ if (hasAudio()) {
+ m_session.setAudioBufferInput(&m_audioInput);
+
+ QObject::connect(&m_audioGenerator, &AudioGenerator::audioBufferCreated, //
+ &m_audioInput, &QAudioBufferInput::sendAudioBuffer);
+
+ if (autoStop == AutoStop::EmitEmpty) {
+ m_recorder.setAutoStop(true);
+ m_audioGenerator.emitEmptyBufferOnStop();
+ }
+ }
+
+ m_tempFile.open();
+ m_recorder.setOutputLocation(m_tempFile.fileName());
+}
+
+CaptureSessionFixture::~CaptureSessionFixture()
+{
+ QFile::remove(m_recorder.actualLocation().toLocalFile());
+}
+
+void CaptureSessionFixture::connectPullMode()
+{
+ if (hasVideo())
+ QObject::connect(&m_videoInput, &QVideoFrameInput::readyToSendVideoFrame, //
+ &m_videoGenerator, &VideoGenerator::nextFrame);
+
+ if (hasAudio())
+ QObject::connect(&m_audioInput, &QAudioBufferInput::readyToSendAudioBuffer, //
+ &m_audioGenerator, &AudioGenerator::nextBuffer);
+}
+
+bool CaptureSessionFixture::waitForRecorderStopped(milliseconds duration)
+{
+ // StoppedState is emitted when media is finalized.
+ const bool stopped = QTest::qWaitFor(
+ [&] { //
+ return recorderStateChanged.contains(
+ QList<QVariant>{ QMediaRecorder::RecorderState::StoppedState });
+ },
+ duration);
+
+ if (!stopped)
+ return false;
+
+ return m_recorder.recorderState() == QMediaRecorder::StoppedState
+ && m_recorder.error() == QMediaRecorder::NoError;
+}
+
+bool CaptureSessionFixture::hasAudio() const
+{
+ return m_streamType == StreamType::Audio || m_streamType == StreamType::AudioAndVideo;
+}
+
+bool CaptureSessionFixture::hasVideo() const
+{
+ return m_streamType == StreamType::Video || m_streamType == StreamType::AudioAndVideo;
+}
+
+QT_END_NAMESPACE
diff --git a/tests/auto/integration/qmediaframeinputsbackend/capturesessionfixture.h b/tests/auto/integration/qmediaframeinputsbackend/capturesessionfixture.h
new file mode 100644
index 000000000..f7aa27a65
--- /dev/null
+++ b/tests/auto/integration/qmediaframeinputsbackend/capturesessionfixture.h
@@ -0,0 +1,49 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
+
+#ifndef CAPTURESESSIONFIXTURE_H
+#define CAPTURESESSIONFIXTURE_H
+
+#include "framegenerator.h"
+#include <QtMultimedia/qvideoframeinput.h>
+#include <QtMultimedia/qaudioinput.h>
+#include <QtMultimedia/qmediacapturesession.h>
+#include <QtMultimedia/qmediarecorder.h>
+#include <QtMultimedia/qaudiobufferinput.h>
+#include <QtCore/qtemporaryfile.h>
+
+#include <../shared/testvideosink.h>
+#include <QtTest/qsignalspy.h>
+
+QT_BEGIN_NAMESPACE
+
+enum class StreamType { Audio, Video, AudioAndVideo };
+enum class AutoStop { EmitEmpty, No };
+
+struct CaptureSessionFixture
+{
+ explicit CaptureSessionFixture(StreamType streamType, AutoStop autoStop);
+ ~CaptureSessionFixture();
+
+ void connectPullMode();
+ bool waitForRecorderStopped(milliseconds duration);
+ bool hasAudio() const;
+ bool hasVideo() const;
+
+ VideoGenerator m_videoGenerator;
+ AudioGenerator m_audioGenerator;
+ QVideoFrameInput m_videoInput;
+ QAudioBufferInput m_audioInput;
+ QMediaCaptureSession m_session;
+ QMediaRecorder m_recorder;
+ QTemporaryFile m_tempFile;
+ StreamType m_streamType = StreamType::Video;
+
+ QSignalSpy readyToSendVideoFrame{ &m_videoInput, &QVideoFrameInput::readyToSendVideoFrame };
+ QSignalSpy readyToSendAudioBuffer{ &m_audioInput, &QAudioBufferInput::readyToSendAudioBuffer };
+ QSignalSpy recorderStateChanged{ &m_recorder, &QMediaRecorder::recorderStateChanged };
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/tests/auto/integration/qmediaframeinputsbackend/framegenerator.cpp b/tests/auto/integration/qmediaframeinputsbackend/framegenerator.cpp
new file mode 100644
index 000000000..5d844a716
--- /dev/null
+++ b/tests/auto/integration/qmediaframeinputsbackend/framegenerator.cpp
@@ -0,0 +1,148 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
+
+#include "framegenerator.h"
+#include <QtCore/qdebug.h>
+
+QT_BEGIN_NAMESPACE
+
+void VideoGenerator::setPattern(ImagePattern pattern)
+{
+ m_pattern = pattern;
+}
+
+void VideoGenerator::setFrameCount(int count)
+{
+ m_maxFrameCount = count;
+}
+
+void VideoGenerator::setSize(QSize size)
+{
+ m_size = size;
+}
+
+void VideoGenerator::setFrameRate(double rate)
+{
+ m_frameRate = rate;
+}
+
+void VideoGenerator::setPeriod(milliseconds period)
+{
+ m_period = period;
+}
+
+void VideoGenerator::emitEmptyFrameOnStop()
+{
+ m_emitEmptyFrameOnStop = true;
+}
+
+static void fillColoredSquares(QImage& image)
+{
+ QList<QColor> colors = { Qt::red, Qt::green, Qt::blue, Qt::yellow };
+ const int width = image.width();
+ const int height = image.height();
+
+ for (int j = 0; j < height; ++j) {
+ for (int i = 0; i < width; ++i) {
+ const int colorX = i < width / 2 ? 0 : 1;
+ const int colorY = j < height / 2 ? 0 : 1;
+ const int colorIndex = colorX + 2 * colorY;
+ image.setPixel(i, j, colors[colorIndex].rgb());
+ }
+ }
+}
+
+QVideoFrame VideoGenerator::createFrame()
+{
+ QImage image(m_size, QImage::Format_ARGB32);
+ switch (m_pattern) {
+ case ImagePattern::SingleColor:
+ image.fill(colors[m_frameIndex % colors.size()]);
+ break;
+ case ImagePattern::ColoredSquares:
+ fillColoredSquares(image);
+ break;
+ }
+
+ QVideoFrame frame(image);
+
+ if (m_frameRate)
+ frame.setStreamFrameRate(*m_frameRate);
+
+ if (m_period) {
+ frame.setStartTime(duration_cast<microseconds>(*m_period).count() * m_frameIndex);
+ frame.setEndTime(duration_cast<microseconds>(*m_period).count() * (m_frameIndex + 1));
+ }
+
+ return frame;
+}
+
+void VideoGenerator::nextFrame()
+{
+ if (m_frameIndex == m_maxFrameCount) {
+ emit done();
+ if (m_emitEmptyFrameOnStop)
+ emit frameCreated({});
+ return;
+ }
+
+ const QVideoFrame frame = createFrame();
+ emit frameCreated(frame);
+ ++m_frameIndex;
+}
+
+AudioGenerator::AudioGenerator()
+{
+ m_format.setSampleFormat(QAudioFormat::UInt8);
+ m_format.setSampleRate(8000);
+ m_format.setChannelConfig(QAudioFormat::ChannelConfigMono);
+}
+
+void AudioGenerator::setFormat(const QAudioFormat &format)
+{
+ m_format = format;
+}
+
+void AudioGenerator::setBufferCount(int count)
+{
+ m_maxBufferCount = count;
+}
+
+void AudioGenerator::setDuration(microseconds duration)
+{
+ m_duration = duration;
+}
+
+void AudioGenerator::emitEmptyBufferOnStop()
+{
+ m_emitEmptyBufferOnStop = true;
+}
+
+QAudioBuffer AudioGenerator::createAudioBuffer()
+{
+ const microseconds bufferDuration = m_duration / m_maxBufferCount.value_or(1);
+ const qint32 byteCount = m_format.bytesForDuration(bufferDuration.count());
+ const QByteArray data(byteCount, '\0');
+
+ QAudioBuffer buffer(data, m_format);
+ return buffer;
+}
+
+void AudioGenerator::nextBuffer()
+{
+ if (m_bufferIndex == m_maxBufferCount) {
+ emit done();
+ if (m_emitEmptyBufferOnStop)
+ emit audioBufferCreated({});
+ return;
+ }
+
+ const QAudioBuffer buffer = createAudioBuffer();
+
+ emit audioBufferCreated(buffer);
+ ++m_bufferIndex;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_framegenerator.cpp"
diff --git a/tests/auto/integration/qmediaframeinputsbackend/framegenerator.h b/tests/auto/integration/qmediaframeinputsbackend/framegenerator.h
new file mode 100644
index 000000000..dafb00681
--- /dev/null
+++ b/tests/auto/integration/qmediaframeinputsbackend/framegenerator.h
@@ -0,0 +1,82 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
+
+#ifndef FRAMEGENERATOR_H
+#define FRAMEGENERATOR_H
+
+#include <QtCore/qobject.h>
+#include <QtCore/qlist.h>
+#include <QtMultimedia/qvideoframe.h>
+#include <QtMultimedia/qaudiobuffer.h>
+#include <functional>
+#include <chrono>
+
+QT_BEGIN_NAMESPACE
+
+using namespace std::chrono;
+
+enum class ImagePattern
+{
+ SingleColor, // Image filled with a single color
+ ColoredSquares // Colored squares, [red, green; blue, yellow]
+};
+
+class VideoGenerator : public QObject
+{
+ Q_OBJECT
+public:
+ void setPattern(ImagePattern pattern);
+ void setFrameCount(int count);
+ void setSize(QSize size);
+ void setFrameRate(double rate);
+ void setPeriod(milliseconds period);
+ void emitEmptyFrameOnStop();
+ QVideoFrame createFrame();
+
+signals:
+ void done();
+ void frameCreated(const QVideoFrame &frame);
+
+public slots:
+ void nextFrame();
+
+private:
+ QList<QColor> colors = { Qt::red, Qt::green, Qt::blue, Qt::black, Qt::white };
+ ImagePattern m_pattern = ImagePattern::SingleColor;
+ QSize m_size{ 640, 480 };
+ std::optional<int> m_maxFrameCount;
+ int m_frameIndex = 0;
+ std::optional<double> m_frameRate;
+ std::optional<milliseconds> m_period;
+ bool m_emitEmptyFrameOnStop = false;
+};
+
+class AudioGenerator : public QObject
+{
+ Q_OBJECT
+public:
+ AudioGenerator();
+ void setFormat(const QAudioFormat &format);
+ void setBufferCount(int count);
+ void setDuration(microseconds duration);
+ void emitEmptyBufferOnStop();
+ QAudioBuffer createAudioBuffer();
+
+signals:
+ void done();
+ void audioBufferCreated(const QAudioBuffer &buffer);
+
+public slots:
+ void nextBuffer();
+
+private:
+ std::optional<int> m_maxBufferCount;
+ microseconds m_duration = 1s;
+ int m_bufferIndex = 0;
+ QAudioFormat m_format;
+ bool m_emitEmptyBufferOnStop = false;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/tests/auto/integration/qmediaframeinputsbackend/mediainfo.h b/tests/auto/integration/qmediaframeinputsbackend/mediainfo.h
new file mode 100644
index 000000000..6c1141c67
--- /dev/null
+++ b/tests/auto/integration/qmediaframeinputsbackend/mediainfo.h
@@ -0,0 +1,96 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
+
+#ifndef MEDIAINFO_H
+#define MEDIAINFO_H
+
+#include <QtTest/QTest>
+#include <QtMultimedia/qmediaplayer.h>
+#include <QtMultimedia/qmediametadata.h>
+#include <QtMultimedia/qaudiooutput.h>
+#include "../shared/testvideosink.h"
+#include <chrono>
+
+QT_USE_NAMESPACE
+
+using namespace std::chrono;
+
+// Extracts media metadata from a input media file
+struct MediaInfo
+{
+ static std::optional<MediaInfo> create(const QUrl &fileLocation)
+ {
+ QMediaPlayer player;
+ const QSignalSpy mediaStatusChanged{ &player, &QMediaPlayer::mediaStatusChanged };
+
+ QAudioOutput audioOutput;
+ player.setAudioOutput(&audioOutput);
+
+ TestVideoSink sink;
+ player.setVideoSink(&sink);
+
+ std::vector<std::array<QColor, 4>> colors;
+ QObject::connect(
+ &sink, &TestVideoSink::videoFrameChangedSync, &sink,
+ [&](const QVideoFrame &frame) { //
+ if (frame.isValid())
+ colors.push_back(sampleQuadrants(frame.toImage()));
+ });
+
+ player.setSource(fileLocation);
+
+ // Loop through all frames to be able to count them
+ player.setPlaybackRate(50); // let's speed it up
+ player.play();
+
+ const bool endReached = QTest::qWaitFor(
+ [&] {
+ return mediaStatusChanged.contains(QList<QVariant>{ QMediaPlayer::EndOfMedia })
+ || mediaStatusChanged.contains(
+ QList<QVariant>{ QMediaPlayer::InvalidMedia });
+ },
+ 10min);
+
+ if (!endReached)
+ return {};
+
+ MediaInfo info{};
+ info.m_frameRate = player.metaData().value(QMediaMetaData::VideoFrameRate).toReal();
+ info.m_size = player.metaData().value(QMediaMetaData::Resolution).toSize();
+
+ info.m_duration = milliseconds{ player.duration() };
+ info.m_frameCount = sink.m_totalFrames - 1;
+ info.m_frameTimes = sink.m_frameTimes;
+ info.m_hasVideo = player.hasVideo();
+ info.m_hasAudio = player.hasAudio();
+ info.m_colors = colors;
+ return info;
+ }
+
+
+
+ static std::array<QColor, 4> sampleQuadrants(const QImage &image)
+ {
+ const int width = image.width();
+ const int height = image.height();
+ return {
+ image.pixel(width / 4, height / 4),
+ image.pixel(3 * width / 4, height / 4),
+ image.pixel(width / 4, 3 * height / 4),
+ image.pixel(3 * width / 4, 3 * height / 4),
+ };
+
+ }
+
+ int m_frameCount = 0;
+ qreal m_frameRate = 0.0f;
+ QSize m_size;
+ milliseconds m_duration;
+ bool m_hasVideo = false;
+ bool m_hasAudio = false;
+ std::vector<std::array<QColor, 4>> m_colors; // Colors in upper left, upper right, bottom left, and bottom right
+
+ std::vector<TestVideoSink::TimePoint> m_frameTimes;
+};
+
+#endif
diff --git a/tests/auto/integration/qmediaframeinputsbackend/tst_qmediaframeinputsbackend.cpp b/tests/auto/integration/qmediaframeinputsbackend/tst_qmediaframeinputsbackend.cpp
new file mode 100644
index 000000000..54623c807
--- /dev/null
+++ b/tests/auto/integration/qmediaframeinputsbackend/tst_qmediaframeinputsbackend.cpp
@@ -0,0 +1,411 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
+
+#include "capturesessionfixture.h"
+#include "tst_qmediaframeinputsbackend.h"
+
+#include "mediainfo.h"
+#include <QtTest/QtTest>
+#include <qvideoframeinput.h>
+#include <qaudiobufferinput.h>
+#include <qsignalspy.h>
+#include <qmediarecorder.h>
+#include <qmediaplayer.h>
+#include <../shared/testvideosink.h>
+#include <../shared/mediabackendutils.h>
+
+QT_BEGIN_NAMESPACE
+
+void tst_QMediaFrameInputsBackend::initTestCase()
+{
+ QSKIP_GSTREAMER("Not implemented in the gstreamer backend");
+}
+
+void tst_QMediaFrameInputsBackend::mediaRecorderWritesAudio_whenAudioFramesInputSends_data()
+{
+ QTest::addColumn<int>("bufferCount");
+ QTest::addColumn<QAudioFormat::SampleFormat>("sampleFormat");
+ QTest::addColumn<QAudioFormat::ChannelConfig>("channelConfig");
+ QTest::addColumn<int>("sampleRate");
+ QTest::addColumn<milliseconds>("duration");
+
+#ifndef Q_OS_WINDOWS // sample rate 8000 is not supported. TODO: investigate.
+ QTest::addRow("bufferCount: 20; sampleFormat: Int16; channelConfig: Mono; sampleRate: 8000; "
+ "duration: 1000")
+ << 20 << QAudioFormat::Int16 << QAudioFormat::ChannelConfigMono << 8000 << 1000ms;
+#endif
+ QTest::addRow("bufferCount: 30; sampleFormat: Int32; channelConfig: Stereo; sampleRate: "
+ "12000; duration: 2000")
+ << 30 << QAudioFormat::Int32 << QAudioFormat::ChannelConfigStereo << 12000 << 2000ms;
+
+ // TODO: investigate fails of channels configuration
+ // QTest::addRow("bufferCount: 10; sampleFormat: UInt8; channelConfig: 2Dot1; sampleRate:
+ // 40000; duration: 1500")
+ // << 10 << QAudioFormat::UInt8 << QAudioFormat::ChannelConfig2Dot1 << 40000 << 1500;
+ // QTest::addRow("bufferCount: 10; sampleFormat: Float; channelConfig: 3Dot0; sampleRate:
+ // 50000; duration: 2500")
+ // << 40 << QAudioFormat::Float << QAudioFormat::ChannelConfig3Dot0 << 50000 << 2500;
+}
+
+void tst_QMediaFrameInputsBackend::mediaRecorderWritesAudio_whenAudioFramesInputSends()
+{
+ QFETCH(const int, bufferCount);
+ QFETCH(const QAudioFormat::SampleFormat, sampleFormat);
+ QFETCH(const QAudioFormat::ChannelConfig, channelConfig);
+ QFETCH(const int, sampleRate);
+ QFETCH(const milliseconds, duration);
+
+ CaptureSessionFixture f{ StreamType::Audio, AutoStop::EmitEmpty };
+ f.connectPullMode();
+
+ QAudioFormat format;
+ format.setSampleFormat(sampleFormat);
+ format.setSampleRate(sampleRate);
+ format.setChannelConfig(channelConfig);
+
+ f.m_audioGenerator.setFormat(format);
+ f.m_audioGenerator.setBufferCount(bufferCount);
+ f.m_audioGenerator.setDuration(duration);
+
+ f.m_recorder.record();
+
+ QVERIFY(f.waitForRecorderStopped(60s));
+
+ auto info = MediaInfo::create(f.m_recorder.actualLocation());
+
+ QVERIFY(info->m_hasAudio);
+ QCOMPARE_GE(info->m_duration, duration - 50ms);
+ QCOMPARE_LE(info->m_duration, duration + 50ms);
+}
+
+void tst_QMediaFrameInputsBackend::mediaRecorderWritesVideo_whenVideoFramesInputSendsFrames_data()
+{
+ QTest::addColumn<int>("framesNumber");
+ QTest::addColumn<milliseconds>("frameDuration");
+ QTest::addColumn<QSize>("resolution");
+ QTest::addColumn<bool>("setTimeStamp");
+
+ QTest::addRow("framesNumber: 5; frameRate: 2; resolution: 50x80; with time stamps")
+ << 5 << 500ms << QSize(50, 80) << true;
+ QTest::addRow("framesNumber: 20; frameRate: 1; resolution: 200x100; with time stamps")
+ << 20 << 1000ms << QSize(200, 100) << true;
+
+ QTest::addRow("framesNumber: 20; frameRate: 30; resolution: 200x100; with frame rate")
+ << 20 << 250ms << QSize(200, 100) << false;
+ QTest::addRow("framesNumber: 60; frameRate: 4; resolution: 200x100; with frame rate")
+ << 60 << 24ms << QSize(200, 100) << false;
+}
+
+void tst_QMediaFrameInputsBackend::mediaRecorderWritesVideo_whenVideoFramesInputSendsFrames()
+{
+ QFETCH(const int, framesNumber);
+ QFETCH(const milliseconds, frameDuration);
+ QFETCH(const QSize, resolution);
+ QFETCH(const bool, setTimeStamp);
+
+ CaptureSessionFixture f{ StreamType::Video, AutoStop::EmitEmpty };
+ f.connectPullMode();
+ f.m_videoGenerator.setFrameCount(framesNumber);
+ f.m_videoGenerator.setSize(resolution);
+
+ const qreal frameRate = 1e6 / duration_cast<microseconds>(frameDuration).count();
+ if (setTimeStamp)
+ f.m_videoGenerator.setPeriod(frameDuration);
+ else
+ f.m_videoGenerator.setFrameRate(frameRate);
+
+ f.m_recorder.record();
+
+ QVERIFY(f.waitForRecorderStopped(60s));
+
+ auto info = MediaInfo::create(f.m_recorder.actualLocation());
+
+ QCOMPARE_LT(info->m_frameRate, frameRate * 1.001);
+ QCOMPARE_GT(info->m_frameRate, frameRate * 0.999);
+
+ QCOMPARE_LT(info->m_duration, frameDuration * framesNumber * 1.001);
+ QCOMPARE_GE(info->m_duration, frameDuration * framesNumber * 0.999);
+
+ QCOMPARE(info->m_size, resolution);
+ QCOMPARE_EQ(info->m_frameCount, framesNumber);
+}
+
+struct YUV
+{
+ double Y;
+ double U;
+ double V;
+};
+
+// Poor man's RGB to YUV conversion with BT.709 coefficients
+// from https://en.wikipedia.org/wiki/Y%E2%80%B2UV
+QVector3D RGBToYUV(const QColor &c)
+{
+ const float R = c.redF();
+ const float G = c.greenF();
+ const float B = c.blueF();
+ QVector3D yuv;
+ yuv[0] = 0.2126f * R + 0.7152f * G + 0.0722f * B;
+ yuv[1] = -0.09991f * R - 0.33609f * G + 0.436f * B;
+ yuv[2] = 0.615f * R - 0.55861f * G - 0.05639f * B;
+ return yuv;
+}
+
+// Considers two colors equal if their YUV components are
+// pointing in the same direction and have similar luma (Y)
+bool fuzzyCompare(const QColor &lhs, const QColor& rhs, float tol = 1e-2)
+{
+ const QVector3D lhsYuv = RGBToYUV(lhs);
+ const QVector3D rhsYuv = RGBToYUV(rhs);
+ const float relativeLumaDiff =
+ 0.5f * std::abs((lhsYuv[0] - rhsYuv[0]) / (lhsYuv[0] + rhsYuv[0]));
+ const float colorDiff = QVector3D::crossProduct(lhsYuv, rhsYuv).length();
+ return colorDiff < tol && relativeLumaDiff < tol;
+}
+
+void tst_QMediaFrameInputsBackend::mediaRecorderWritesVideo_withCorrectColors()
+{
+ CaptureSessionFixture f{ StreamType::Video, AutoStop::EmitEmpty };
+ f.connectPullMode();
+ f.m_videoGenerator.setPattern(ImagePattern::ColoredSquares);
+ f.m_videoGenerator.setFrameCount(3);
+ f.m_recorder.record();
+ QVERIFY(f.waitForRecorderStopped(60s));
+
+ const auto info = MediaInfo::create(f.m_recorder.actualLocation());
+ QCOMPARE_EQ(info->m_colors.size(), 3);
+
+ std::array<QColor, 4> colors = info->m_colors.front();
+ QVERIFY(fuzzyCompare(colors[0], Qt::red));
+ QVERIFY(fuzzyCompare(colors[1], Qt::green));
+ QVERIFY(fuzzyCompare(colors[2], Qt::blue));
+ QVERIFY(fuzzyCompare(colors[3], Qt::yellow));
+}
+
+void tst_QMediaFrameInputsBackend::mediaRecorderWritesVideo_whenInputFrameShrinksOverTime()
+{
+ CaptureSessionFixture f{ StreamType::Video, AutoStop::EmitEmpty };
+ f.m_recorder.record();
+ f.readyToSendVideoFrame.wait();
+
+ constexpr int startSize = 38;
+ int frameCount = 0;
+ for (int i = 0; i < startSize; i += 2) { // TODO crash in sws_scale if subsequent frames are odd-sized QTBUG-126259
+ ++frameCount;
+ const QSize size{ startSize - i, startSize - i };
+ f.m_videoGenerator.setSize(size);
+ f.m_videoInput.sendVideoFrame(f.m_videoGenerator.createFrame());
+ f.readyToSendVideoFrame.wait();
+ }
+
+ f.m_videoInput.sendVideoFrame({});
+
+ QVERIFY(f.waitForRecorderStopped(60s));
+ auto info = MediaInfo::create(f.m_recorder.actualLocation());
+
+ QCOMPARE_EQ(info->m_frameCount, frameCount);
+
+ // All frames should be resized to the size of the first frame
+ QCOMPARE_EQ(info->m_size, QSize(startSize, startSize));
+}
+
+void tst_QMediaFrameInputsBackend::mediaRecorderWritesVideo_whenInputFrameGrowsOverTime()
+{
+ CaptureSessionFixture f{ StreamType::Video, AutoStop::EmitEmpty };
+ f.m_recorder.record();
+ f.readyToSendVideoFrame.wait();
+
+ constexpr int startSize = 38;
+ constexpr int maxSize = 256;
+ int frameCount = 0;
+
+ for (int i = 0; i < maxSize - startSize; i += 2) { // TODO crash in sws_scale if subsequent frames are odd-sized QTBUG-126259
+ ++frameCount;
+ const QSize size{ startSize + i, startSize + i };
+ f.m_videoGenerator.setPattern(ImagePattern::ColoredSquares);
+ f.m_videoGenerator.setSize(size);
+ f.m_videoInput.sendVideoFrame(f.m_videoGenerator.createFrame());
+ f.readyToSendVideoFrame.wait();
+ }
+
+ f.m_videoInput.sendVideoFrame({});
+
+ QVERIFY(f.waitForRecorderStopped(60s));
+ auto info = MediaInfo::create(f.m_recorder.actualLocation());
+
+ QCOMPARE_EQ(info->m_frameCount, frameCount);
+
+ // All frames should be resized to the size of the first frame
+ QCOMPARE_EQ(info->m_size, QSize(startSize, startSize));
+}
+
+void tst_QMediaFrameInputsBackend::mediaRecorderWritesVideo_withSingleFrame()
+{
+ CaptureSessionFixture f{ StreamType::Video, AutoStop::EmitEmpty };
+ f.connectPullMode();
+ f.m_videoGenerator.setFrameCount(1);
+ f.m_videoGenerator.setSize({ 640, 480 });
+ f.m_videoGenerator.setPeriod(1s);
+ f.m_recorder.record();
+ QVERIFY(f.waitForRecorderStopped(60s));
+ auto info = MediaInfo::create(f.m_recorder.actualLocation());
+
+ QCOMPARE_EQ(info->m_frameCount, 1);
+ QCOMPARE_EQ(info->m_duration, 1s);
+}
+
+void tst_QMediaFrameInputsBackend::mediaRecorderStopsRecording_whenInputsReportedEndOfStream_data()
+{
+ QTest::addColumn<bool>("audioStopsFirst");
+
+ QTest::addRow("audio stops first") << true;
+ QTest::addRow("video stops first") << true;
+}
+
+void tst_QMediaFrameInputsBackend::mediaRecorderStopsRecording_whenInputsReportedEndOfStream()
+{
+ QFETCH(const bool, audioStopsFirst);
+
+ CaptureSessionFixture f{ StreamType::AudioAndVideo, AutoStop::No };
+ f.m_recorder.setAutoStop(true);
+ f.connectPullMode();
+
+ f.m_audioGenerator.setBufferCount(30);
+ f.m_videoGenerator.setFrameCount(30);
+
+ QSignalSpy audioDone{ &f.m_audioGenerator, &AudioGenerator::done };
+ QSignalSpy videoDone{ &f.m_videoGenerator, &VideoGenerator::done };
+
+ f.m_recorder.record();
+
+ audioDone.wait();
+ videoDone.wait();
+
+ if (audioStopsFirst) {
+ f.m_audioInput.sendAudioBuffer({});
+ QVERIFY(!f.waitForRecorderStopped(300ms)); // Should not stop until both streams stopped
+ f.m_videoInput.sendVideoFrame({});
+ } else {
+ f.m_videoInput.sendVideoFrame({});
+ QVERIFY(!f.waitForRecorderStopped(300ms)); // Should not stop until both streams stopped
+ f.m_audioInput.sendAudioBuffer({});
+ }
+
+ QVERIFY(f.waitForRecorderStopped(60s));
+
+ // check if the file has been written
+
+ const std::optional<MediaInfo> mediaInfo = MediaInfo::create(f.m_recorder.actualLocation());
+
+ QVERIFY(mediaInfo);
+ QVERIFY(mediaInfo->m_hasVideo);
+ QVERIFY(mediaInfo->m_hasAudio);
+}
+
+void tst_QMediaFrameInputsBackend::readyToSend_isEmitted_whenRecordingStarts_data()
+{
+ QTest::addColumn<StreamType>("streamType");
+ QTest::addRow("audio") << StreamType::Audio;
+ QTest::addRow("video") << StreamType::Video;
+ QTest::addRow("audioAndVideo") << StreamType::AudioAndVideo;
+}
+
+void tst_QMediaFrameInputsBackend::readyToSend_isEmitted_whenRecordingStarts()
+{
+ QFETCH(StreamType, streamType);
+
+ CaptureSessionFixture f{ streamType, AutoStop::No };
+
+ f.m_recorder.record();
+
+ if (f.hasAudio())
+ QTRY_COMPARE_EQ(f.readyToSendAudioBuffer.size(), 1);
+
+ if (f.hasVideo())
+ QTRY_COMPARE_EQ(f.readyToSendVideoFrame.size(), 1);
+}
+
+void tst_QMediaFrameInputsBackend::readyToSendVideoFrame_isEmitted_whenSendVideoFrameIsCalled()
+{
+ CaptureSessionFixture f{ StreamType::Video, AutoStop::No };
+
+ f.m_recorder.record();
+ QVERIFY(f.readyToSendVideoFrame.wait());
+
+ f.m_videoInput.sendVideoFrame(f.m_videoGenerator.createFrame());
+ QVERIFY(f.readyToSendVideoFrame.wait());
+
+ f.m_videoInput.sendVideoFrame(f.m_videoGenerator.createFrame());
+ QVERIFY(f.readyToSendVideoFrame.wait());
+}
+
+void tst_QMediaFrameInputsBackend::readyToSendAudioBuffer_isEmitted_whenSendAudioBufferIsCalled()
+{
+ CaptureSessionFixture f{ StreamType::Audio, AutoStop::No };
+
+ f.m_recorder.record();
+ QVERIFY(f.readyToSendAudioBuffer.wait());
+
+ f.m_audioInput.sendAudioBuffer(f.m_audioGenerator.createAudioBuffer());
+ QVERIFY(f.readyToSendAudioBuffer.wait());
+
+ f.m_audioInput.sendAudioBuffer(f.m_audioGenerator.createAudioBuffer());
+ QVERIFY(f.readyToSendAudioBuffer.wait());
+}
+
+void tst_QMediaFrameInputsBackend::readyToSendVideoFrame_isEmittedRepeatedly_whenPullModeIsEnabled()
+{
+ CaptureSessionFixture f{ StreamType::Video, AutoStop::EmitEmpty };
+ f.connectPullMode();
+
+ constexpr int expectedSignalCount = 4;
+ f.m_videoGenerator.setFrameCount(expectedSignalCount - 1);
+
+ f.m_recorder.record();
+
+ f.waitForRecorderStopped(60s);
+
+ QCOMPARE_EQ(f.readyToSendVideoFrame.size(), expectedSignalCount);
+}
+
+void tst_QMediaFrameInputsBackend::
+ readyToSendAudioBuffer_isEmittedRepeatedly_whenPullModeIsEnabled()
+{
+ CaptureSessionFixture f{ StreamType::Audio, AutoStop::EmitEmpty };
+ f.connectPullMode();
+
+ constexpr int expectedSignalCount = 4;
+ f.m_audioGenerator.setBufferCount(expectedSignalCount - 1);
+
+ f.m_recorder.record();
+
+ f.waitForRecorderStopped(60s);
+
+ QCOMPARE_EQ(f.readyToSendAudioBuffer.size(), expectedSignalCount);
+}
+
+void tst_QMediaFrameInputsBackend::
+ readyToSendAudioBufferAndVideoFrame_isEmittedRepeatedly_whenPullModeIsEnabled()
+{
+ CaptureSessionFixture f{ StreamType::AudioAndVideo, AutoStop::EmitEmpty };
+ f.connectPullMode();
+
+ constexpr int expectedSignalCount = 4;
+ f.m_audioGenerator.setBufferCount(expectedSignalCount - 1);
+ f.m_videoGenerator.setFrameCount(expectedSignalCount - 1);
+
+ f.m_recorder.record();
+
+ f.waitForRecorderStopped(60s);
+
+ QCOMPARE_EQ(f.readyToSendAudioBuffer.size(), expectedSignalCount);
+ QCOMPARE_EQ(f.readyToSendVideoFrame.size(), expectedSignalCount);
+}
+
+QT_END_NAMESPACE
+
+QT_USE_NAMESPACE
+
+QTEST_MAIN(tst_QMediaFrameInputsBackend)
+
+#include "moc_tst_qmediaframeinputsbackend.cpp"
diff --git a/tests/auto/integration/qmediaframeinputsbackend/tst_qmediaframeinputsbackend.h b/tests/auto/integration/qmediaframeinputsbackend/tst_qmediaframeinputsbackend.h
new file mode 100644
index 000000000..451e1aceb
--- /dev/null
+++ b/tests/auto/integration/qmediaframeinputsbackend/tst_qmediaframeinputsbackend.h
@@ -0,0 +1,47 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
+
+#ifndef TST_QMEDIAFRAMEINPUTSBACKEND_H
+#define TST_QMEDIAFRAMEINPUTSBACKEND_H
+
+#include <QObject>
+
+QT_BEGIN_NAMESPACE
+
+class tst_QMediaFrameInputsBackend : public QObject
+{
+ Q_OBJECT
+
+private slots:
+ void initTestCase();
+
+ void mediaRecorderWritesAudio_whenAudioFramesInputSends_data();
+ void mediaRecorderWritesAudio_whenAudioFramesInputSends();
+
+ void mediaRecorderWritesVideo_whenVideoFramesInputSendsFrames_data();
+ void mediaRecorderWritesVideo_whenVideoFramesInputSendsFrames();
+
+ void mediaRecorderWritesVideo_whenInputFrameShrinksOverTime();
+ void mediaRecorderWritesVideo_whenInputFrameGrowsOverTime();
+
+ void mediaRecorderWritesVideo_withSingleFrame();
+
+ void mediaRecorderWritesVideo_withCorrectColors();
+
+ void mediaRecorderStopsRecording_whenInputsReportedEndOfStream_data();
+ void mediaRecorderStopsRecording_whenInputsReportedEndOfStream();
+
+ void readyToSend_isEmitted_whenRecordingStarts_data();
+ void readyToSend_isEmitted_whenRecordingStarts();
+
+ void readyToSendVideoFrame_isEmitted_whenSendVideoFrameIsCalled();
+ void readyToSendAudioBuffer_isEmitted_whenSendAudioBufferIsCalled();
+
+ void readyToSendVideoFrame_isEmittedRepeatedly_whenPullModeIsEnabled();
+ void readyToSendAudioBuffer_isEmittedRepeatedly_whenPullModeIsEnabled();
+ void readyToSendAudioBufferAndVideoFrame_isEmittedRepeatedly_whenPullModeIsEnabled();
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/tests/auto/integration/qmediaplayerbackend/CMakeLists.txt b/tests/auto/integration/qmediaplayerbackend/CMakeLists.txt
index 39684a32d..3a9d25926 100644
--- a/tests/auto/integration/qmediaplayerbackend/CMakeLists.txt
+++ b/tests/auto/integration/qmediaplayerbackend/CMakeLists.txt
@@ -30,23 +30,10 @@ qt_internal_add_test(tst_qmediaplayerbackend
Qt::Qml
Qt::Quick
Qt::QuickPrivate
- TESTDATA ${testdata_resource_files}
- INCLUDE_DIRECTORIES
- ../shared/
-)
-
-qt_internal_add_resource(tst_qmediaplayerbackend "testdata"
- PREFIX
- "/"
- FILES
+ BUILTIN_TESTDATA
+ TESTDATA
${testdata_resource_files}
"LazyLoad.qml"
-)
-
-## Scopes:
-#####################################################################
-
-qt_internal_extend_target(tst_qmediaplayerbackend CONDITION boot2qt
- DEFINES
- SKIP_OGV_TEST
+ INCLUDE_DIRECTORIES
+ ../shared/
)
diff --git a/tests/auto/integration/qmediaplayerbackend/fixture.h b/tests/auto/integration/qmediaplayerbackend/fixture.h
index bf539d7ff..883330513 100644
--- a/tests/auto/integration/qmediaplayerbackend/fixture.h
+++ b/tests/auto/integration/qmediaplayerbackend/fixture.h
@@ -76,4 +76,20 @@ public:
// Helper to create an object that is comparable to a QSignalSpy
using SignalList = QList<QList<QVariant>>;
+struct TestSubtitleSink : QObject
+{
+ Q_OBJECT
+
+public Q_SLOTS:
+ void addSubtitle(QString string)
+ {
+ QMetaObject::invokeMethod(this, [this, string = std::move(string)]() mutable {
+ subtitles.append(std::move(string));
+ });
+ }
+
+public:
+ QStringList subtitles;
+};
+
#endif // FIXTURE_H
diff --git a/tests/auto/integration/qmediaplayerbackend/testdata/15s.mkv b/tests/auto/integration/qmediaplayerbackend/testdata/15s.mkv
new file mode 100644
index 000000000..80ee0f923
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerbackend/testdata/15s.mkv
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerbackend/testdata/h264_avc1_yuv420p10le_tv_bt2020.mov b/tests/auto/integration/qmediaplayerbackend/testdata/h264_avc1_yuv420p10le_tv_bt2020.mov
new file mode 100644
index 000000000..c5a508a1f
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerbackend/testdata/h264_avc1_yuv420p10le_tv_bt2020.mov
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerbackend/testdata/multitrack-subtitle-start-at-zero.mkv b/tests/auto/integration/qmediaplayerbackend/testdata/multitrack-subtitle-start-at-zero.mkv
new file mode 100644
index 000000000..1962f00c1
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerbackend/testdata/multitrack-subtitle-start-at-zero.mkv
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerbackend/testdata/multitrack.mkv b/tests/auto/integration/qmediaplayerbackend/testdata/multitrack.mkv
new file mode 100644
index 000000000..a3c2e9bb9
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerbackend/testdata/multitrack.mkv
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerbackend/testdata/subtitletest.mkv b/tests/auto/integration/qmediaplayerbackend/testdata/subtitletest.mkv
new file mode 100644
index 000000000..2051e4df5
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerbackend/testdata/subtitletest.mkv
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerbackend/tst_qmediaplayerbackend.cpp b/tests/auto/integration/qmediaplayerbackend/tst_qmediaplayerbackend.cpp
index 5ea590522..b212b4b63 100644
--- a/tests/auto/integration/qmediaplayerbackend/tst_qmediaplayerbackend.cpp
+++ b/tests/auto/integration/qmediaplayerbackend/tst_qmediaplayerbackend.cpp
@@ -10,10 +10,14 @@
#include "server.h"
#include <qmediametadata.h>
#include <qaudiobuffer.h>
+#include <qaudiodevice.h>
#include <qvideosink.h>
#include <qvideoframe.h>
#include <qaudiooutput.h>
+#include <qmediadevices.h>
+#if QT_CONFIG(process)
#include <qprocess.h>
+#endif
#include <private/qglobal_p.h>
#ifdef QT_FEATURE_network
#include <qtcpserver.h>
@@ -92,16 +96,24 @@ private slots:
void setSource_emitsMediaStatusChange_whenCalledWithInvalidFile();
void setSource_doesNotEmitPlaybackStateChange_whenCalledWithInvalidFile();
void setSource_setsSourceMediaStatusAndError_whenCalledWithInvalidFile();
+ void setSource_initializesExpectedDefaultState();
+ void setSource_initializesExpectedDefaultState_data();
void setSource_silentlyCancelsPreviousCall_whenServerDoesNotRespond();
void setSource_changesSourceAndMediaStatus_whenCalledWithValidFile();
void setSource_updatesExpectedAttributes_whenMediaHasLoaded();
void setSource_stopsAndEntersErrorState_whenPlayerWasPlaying();
void setSource_loadsAudioTrack_whenCalledWithValidWavFile();
void setSource_resetsState_whenCalledWithEmptyUrl();
+ void setSource_resetsState_whenCalledWithEmptyUrl_data();
void setSource_loadsNewMedia_whenPreviousMediaWasFullyLoaded();
void setSource_loadsCorrectTracks_whenLoadingMediaInSequence();
void setSource_remainsInStoppedState_whenPlayerWasStopped();
void setSource_entersStoppedState_whenPlayerWasPlaying();
+ void setSource_emitsError_whenSdpFileIsLoaded();
+ void setSource_updatesTrackProperties_data();
+ void setSource_updatesTrackProperties();
+ void setSource_emitsTracksChanged_data();
+ void setSource_emitsTracksChanged();
void setSourceAndPlay_setCorrectVideoSize_whenVideoHasNonStandardPixelAspectRatio_data();
void setSourceAndPlay_setCorrectVideoSize_whenVideoHasNonStandardPixelAspectRatio();
@@ -109,6 +121,10 @@ private slots:
void pause_doesNotChangePlayerState_whenInvalidFileLoaded();
void pause_doesNothing_whenMediaIsNotLoaded();
void pause_entersPauseState_whenPlayerWasPlaying();
+ void pause_initializesExpectedDefaultState();
+ void pause_initializesExpectedDefaultState_data();
+ void pause_doesNotAdvancePosition();
+ void pause_playback_resumesFromPausedPosition();
void play_resetsErrorState_whenCalledWithInvalidFile();
void play_resumesPlaying_whenValidMediaIsProvidedAfterInvalidMedia();
@@ -121,13 +137,19 @@ private slots:
void play_waitsForLastFrameEnd_whenPlayingVideoWithLongFrames();
void play_startsPlayback_withAndWithoutOutputsConnected();
void play_startsPlayback_withAndWithoutOutputsConnected_data();
+ void play_playsRtpStream_whenSdpFileIsLoaded();
+ void play_succeedsFromSourceDevice();
+ void play_succeedsFromSourceDevice_data();
void stop_entersStoppedState_whenPlayerWasPaused();
+ void stop_entersStoppedState_whenPlayerWasPaused_data();
void stop_setsPositionToZero_afterPlayingToEndOfMedia();
void playbackRate_returnsOne_byDefault();
void setPlaybackRate_changesPlaybackRateAndEmitsSignal_data();
void setPlaybackRate_changesPlaybackRateAndEmitsSignal();
+ void setPlaybackRate_changesPlaybackDuration();
+ void setPlaybackRate_changesPlaybackDuration_data();
void setVolume_changesVolume_whenVolumeIsInRange();
void setVolume_clampsToRange_whenVolumeIsOutsideRange();
@@ -149,9 +171,12 @@ private slots:
void metadata();
void metadata_returnsMetadataWithThumbnail_whenMediaHasThumbnail_data();
void metadata_returnsMetadataWithThumbnail_whenMediaHasThumbnail();
+ void metadata_returnsMetadataWithHasHdrContent_whenMediaHasHdrContent_data();
+ void metadata_returnsMetadataWithHasHdrContent_whenMediaHasHdrContent();
void playerStateAtEOS();
void playFromBuffer();
void audioVideoAvailable();
+ void audioVideoAvailable_updatedOnNewMedia();
void isSeekable();
void positionAfterSeek();
void pause_rendersVideoAtCorrectResolution_data();
@@ -178,11 +203,33 @@ private slots:
void play_playsRotatedVideoOutput_whenVideoFileHasOrientationMetadata_data();
void play_playsRotatedVideoOutput_whenVideoFileHasOrientationMetadata();
+ void setVideoOutput_doesNotStopPlayback_data();
+ void setVideoOutput_doesNotStopPlayback();
+ void setAudioOutput_doesNotStopPlayback_data();
+ void setAudioOutput_doesNotStopPlayback();
+ void swapAudioDevice_doesNotStopPlayback_data();
+ void swapAudioDevice_doesNotStopPlayback();
+
+ void play_readsSubtitle();
+ void multiTrack_validateMetadata();
+ void play_readsSubtitle_fromMultiTrack();
+ void play_readsSubtitle_fromMultiTrack_data();
+
+ void setActiveSubtitleTrack_switchesSubtitles();
+ void setActiveSubtitleTrack_switchesSubtitles_data();
+
+ void setActiveVideoTrack_switchesVideoTrack();
+
+ void disablingAllTracks_doesNotStopPlayback();
+ void disablingAllTracks_beforeTracksChanged_doesNotStopPlayback();
+
private:
QUrl selectVideoFile(const QStringList &mediaCandidates);
- bool canCreateRtspStream() const;
- std::unique_ptr<QProcess> createRtspStreamProcess(QString fileName, QString outputUrl);
+ bool canCreateRtpStream() const;
+#if QT_CONFIG(process)
+ std::unique_ptr<QProcess> createRtpStreamProcess(QString fileName, QString sdpUrl);
+#endif
void detectVlcCommand();
// one second local wav file
@@ -204,6 +251,11 @@ private:
MaybeUrl m_colorMatrix90degClockwiseVideo = QUnexpect{};
MaybeUrl m_colorMatrix180degClockwiseVideo = QUnexpect{};
MaybeUrl m_colorMatrix270degClockwiseVideo = QUnexpect{};
+ MaybeUrl m_hdrVideo = QUnexpect{};
+ MaybeUrl m_15sVideo = QUnexpect{};
+ MaybeUrl m_subtitleVideo = QUnexpect{};
+ MaybeUrl m_multitrackVideo = QUnexpect{};
+ MaybeUrl m_multitrackSubtitleStartsAtZeroVideo = QUnexpect{};
MediaFileSelector m_mediaSelector;
@@ -276,17 +328,13 @@ void tst_QMediaPlayerBackend::detectVlcCommand()
m_vlcCommand.clear();
}
-bool tst_QMediaPlayerBackend::canCreateRtspStream() const
+bool tst_QMediaPlayerBackend::canCreateRtpStream() const
{
return !m_vlcCommand.isEmpty();
}
void tst_QMediaPlayerBackend::initTestCase()
{
-#ifdef Q_OS_ANDROID
- QSKIP("SKIP initTestCase on CI, because of QTBUG-118571");
-#endif
-
QMediaPlayer player;
if (!player.isAvailable())
QSKIP("Media player service is not available");
@@ -335,6 +383,13 @@ void tst_QMediaPlayerBackend::initTestCase()
m_colorMatrix270degClockwiseVideo =
m_mediaSelector.select("qrc:/testdata/color_matrix_270_deg_clockwise.mp4");
+ m_hdrVideo = m_mediaSelector.select("qrc:/testdata/h264_avc1_yuv420p10le_tv_bt2020.mov");
+ m_15sVideo = m_mediaSelector.select("qrc:/testdata/15s.mkv");
+ m_subtitleVideo = m_mediaSelector.select("qrc:/testdata/subtitletest.mkv");
+ m_multitrackVideo = m_mediaSelector.select("qrc:/testdata/multitrack.mkv");
+ m_multitrackSubtitleStartsAtZeroVideo =
+ m_mediaSelector.select("qrc:/testdata/multitrack-subtitle-start-at-zero.mkv");
+
detectVlcCommand();
}
@@ -374,7 +429,10 @@ void tst_QMediaPlayerBackend::destructor_emitsOnlyQObjectDestroyedSignal_whenPla
// Arrange
m_fixture->player.setSource(*m_localVideoFile3ColorsWithSound);
m_fixture->player.play();
- QTRY_COMPARE(m_fixture->player.mediaStatus(), QMediaPlayer::BufferedMedia);
+
+ // Wait for started
+ QTRY_VERIFY(m_fixture->player.mediaStatus() == QMediaPlayer::BufferedMedia
+ || m_fixture->player.mediaStatus() == QMediaPlayer::EndOfMedia);
m_fixture->clearSpies();
@@ -490,6 +548,44 @@ void tst_QMediaPlayerBackend::setSource_setsSourceMediaStatusAndError_whenCalled
COMPARE_MEDIA_PLAYER_STATE_EQ(actualState, expectedState);
}
+void tst_QMediaPlayerBackend::setSource_initializesExpectedDefaultState()
+{
+ QFETCH(MaybeUrl, url);
+ CHECK_SELECTED_URL(url);
+
+ QMediaPlayer &player = m_fixture->player;
+ player.setSource(*url);
+
+ MediaPlayerState expectedState = MediaPlayerState::defaultState();
+ expectedState.source = *url;
+ expectedState.mediaStatus = QMediaPlayer::LoadingMedia;
+
+ if (isGStreamerPlatform()) {
+ // gstreamer initializes the tracks
+ expectedState.audioTracks = std::nullopt;
+ expectedState.videoTracks = std::nullopt;
+ expectedState.activeAudioTrack = std::nullopt;
+ expectedState.activeVideoTrack = std::nullopt;
+ expectedState.hasAudio = std::nullopt;
+ expectedState.hasVideo = std::nullopt;
+
+ expectedState.isSeekable = true;
+ }
+
+ const MediaPlayerState actualState{ m_fixture->player };
+ COMPARE_MEDIA_PLAYER_STATE_EQ(actualState, expectedState);
+}
+
+void tst_QMediaPlayerBackend::setSource_initializesExpectedDefaultState_data()
+{
+ QTest::addColumn<MaybeUrl>("url");
+
+ QTest::addRow("with wave file") << m_localWavFile;
+ QTest::addRow("with video file") << m_localVideoFile;
+ QTest::addRow("with av1 file") << m_av1File;
+ QTest::addRow("with compressed sound file") << m_localCompressedSoundFile;
+}
+
void tst_QMediaPlayerBackend::setSource_silentlyCancelsPreviousCall_whenServerDoesNotRespond()
{
#ifdef QT_FEATURE_network
@@ -542,6 +638,7 @@ void tst_QMediaPlayerBackend::setSource_changesSourceAndMediaStatus_whenCalledWi
MediaPlayerState actualState{ m_fixture->player };
+ QSKIP_GSTREAMER("QTBUG-124005: spurious failures");
COMPARE_MEDIA_PLAYER_STATE_EQ(actualState, expectedState);
}
@@ -638,25 +735,31 @@ void tst_QMediaPlayerBackend::setSource_loadsAudioTrack_whenCalledWithValidWavFi
void tst_QMediaPlayerBackend::setSource_resetsState_whenCalledWithEmptyUrl()
{
- CHECK_SELECTED_URL(m_localWavFile);
+ QFETCH(MaybeUrl, url);
+ CHECK_SELECTED_URL(url);
+
+ QMediaPlayer &player = m_fixture->player;
// Load valid media and start playing
- m_fixture->player.setSource(*m_localWavFile);
+ player.setSource(*url);
- QTRY_COMPARE(m_fixture->player.mediaStatus(), QMediaPlayer::LoadedMedia);
+ QTRY_COMPARE(player.mediaStatus(), QMediaPlayer::LoadedMedia);
- QVERIFY(m_fixture->player.position() == 0);
-#ifdef Q_OS_QNX
- // QNX mm-renderer only updates the duration when 'play' is triggered
- QVERIFY(m_fixture->player.duration() == 0);
-#else
- QVERIFY(m_fixture->player.duration() > 0);
-#endif
+ QCOMPARE(player.position(), 0);
- m_fixture->player.play();
+ if (isQNXPlatform())
+ // QNX mm-renderer updates the duration when 'play' is triggered
+ QCOMPARE(player.duration(), 0);
+ else
+ QCOMPARE_GT(player.duration(), 0);
+
+ player.play();
- QTRY_VERIFY(m_fixture->player.position() > 0);
- QVERIFY(m_fixture->player.duration() > 0);
+ QTRY_COMPARE_GT(player.position(), 0);
+ if (isGStreamerPlatform())
+ QTRY_COMPARE_GT(player.duration(), 0); // duration update is asynchronous
+ else
+ QCOMPARE_GT(player.duration(), 0);
// Set empty URL and verify that state is fully reset to default
m_fixture->clearSpies();
@@ -666,12 +769,22 @@ void tst_QMediaPlayerBackend::setSource_resetsState_whenCalledWithEmptyUrl()
QVERIFY(!m_fixture->mediaStatusChanged.isEmpty());
QVERIFY(!m_fixture->sourceChanged.isEmpty());
- const MediaPlayerState expectedState = MediaPlayerState::defaultState();
- const MediaPlayerState actualState{ m_fixture->player };
+ MediaPlayerState expectedState = MediaPlayerState::defaultState();
+ if (isGStreamerPlatform()) // QTBUG-124005: no buffer progress update
+ expectedState.bufferProgress = std::nullopt;
+ const MediaPlayerState actualState{ player };
COMPARE_MEDIA_PLAYER_STATE_EQ(actualState, expectedState);
}
+void tst_QMediaPlayerBackend::setSource_resetsState_whenCalledWithEmptyUrl_data()
+{
+ QTest::addColumn<MaybeUrl>("url");
+
+ QTest::addRow("with wave file") << m_localWavFile;
+ QTest::addRow("with video file") << m_localVideoFile;
+}
+
void tst_QMediaPlayerBackend::setSource_loadsNewMedia_whenPreviousMediaWasFullyLoaded()
{
CHECK_SELECTED_URL(m_localWavFile);
@@ -687,7 +800,8 @@ void tst_QMediaPlayerBackend::setSource_loadsNewMedia_whenPreviousMediaWasFullyL
QCOMPARE(m_fixture->player.mediaStatus(), QMediaPlayer::LoadingMedia);
QTRY_COMPARE(m_fixture->player.mediaStatus(), QMediaPlayer::LoadedMedia);
m_fixture->player.play();
- QTRY_COMPARE(m_fixture->player.mediaStatus(), QMediaPlayer::BufferedMedia);
+ QTRY_VERIFY(m_fixture->player.mediaStatus() == QMediaPlayer::BufferedMedia
+ || m_fixture->player.mediaStatus() == QMediaPlayer::EndOfMedia);
// Load first file again, and wait for it to start loading
m_fixture->player.setSource(*m_localWavFile2);
@@ -776,12 +890,14 @@ void tst_QMediaPlayerBackend::setSource_entersStoppedState_whenPlayerWasPlaying(
// Assert
QTRY_COMPARE(m_fixture->player.mediaStatus(), QMediaPlayer::LoadedMedia);
QTRY_COMPARE(m_fixture->mediaStatusChanged,
- SignalList({ { QMediaPlayer::LoadedMedia },
- { QMediaPlayer::BufferingMedia },
- { QMediaPlayer::BufferedMedia },
- { QMediaPlayer::LoadedMedia },
- { QMediaPlayer::LoadingMedia },
- { QMediaPlayer::LoadedMedia } }));
+ SignalList({
+ { QMediaPlayer::LoadedMedia },
+ { QMediaPlayer::BufferingMedia },
+ { QMediaPlayer::BufferedMedia },
+ { QMediaPlayer::LoadedMedia },
+ { QMediaPlayer::LoadingMedia },
+ { QMediaPlayer::LoadedMedia },
+ }));
QCOMPARE(m_fixture->player.playbackState(), QMediaPlayer::StoppedState);
QTRY_COMPARE(m_fixture->playbackStateChanged,
@@ -793,6 +909,116 @@ void tst_QMediaPlayerBackend::setSource_entersStoppedState_whenPlayerWasPlaying(
QCOMPARE(m_fixture->player.position(), 0);
}
+void tst_QMediaPlayerBackend::setSource_emitsError_whenSdpFileIsLoaded()
+{
+#if !QT_CONFIG(process)
+ QSKIP("This test requires QProcess support");
+#else
+ // NOTE: This test checks that playing rtp streams using local .sdp file as a source is blocked
+ // by default. For when the user wants to override these defaults, see
+ // play_playsRtpStream_whenSdpFileIsLoaded
+
+ if (!isFFMPEGPlatform())
+ QSKIP("This test is only for FFmpeg backend");
+
+ // Create stream
+ if (!canCreateRtpStream())
+ QSKIP("Rtp stream cannot be created");
+
+ // Make sure the default whitelist is used
+ qunsetenv("QT_FFMPEG_PROTOCOL_WHITELIST");
+
+ auto temporaryFile = copyResourceToTemporaryFile(":/testdata/colors.mp4", "colors.XXXXXX.mp4");
+ QVERIFY(temporaryFile);
+
+ // Pass a "file:" URL to VLC in order to generate an .sdp file
+ const QUrl sdpUrl = QUrl::fromLocalFile(QFileInfo("test.sdp").absoluteFilePath());
+
+ auto process = createRtpStreamProcess(temporaryFile->fileName(), sdpUrl.toString());
+ QVERIFY2(process, "Cannot start rtp process");
+
+ auto processCloser = qScopeGuard([&process, &sdpUrl]() {
+ // End stream
+ process->close();
+
+ // Remove .sdp file created by VLC
+ QFile(sdpUrl.toLocalFile()).remove();
+ });
+
+ m_fixture->player.setSource(sdpUrl);
+ QTRY_COMPARE_EQ(m_fixture->player.error(), QMediaPlayer::ResourceError);
+#endif // QT_CONFIG(process)
+}
+
+void tst_QMediaPlayerBackend::setSource_updatesTrackProperties_data()
+{
+ QTest::addColumn<MaybeUrl>("url");
+ QTest::addColumn<int>("numberOfVideoTracks");
+ QTest::addColumn<int>("numberOfAudioTracks");
+ QTest::addColumn<int>("numberOfSubtitleTracks");
+
+ QTest::addRow("video file with audio") << m_localVideoFile3ColorsWithSound << 1 << 1 << 0;
+ QTest::addRow("video file without audio") << m_colorMatrixVideo << 1 << 0 << 0;
+ QTest::addRow("uncompressed audio file") << m_localWavFile << 0 << 1 << 0;
+ QTest::addRow("compressed audio file") << m_localCompressedSoundFile << 0 << 1 << 0;
+ QTest::addRow("video with subtitle") << m_subtitleVideo << 1 << 1 << 1;
+ QTest::addRow("video with multiple streams") << m_multitrackVideo << 2 << 2 << 2;
+}
+
+void tst_QMediaPlayerBackend::setSource_updatesTrackProperties()
+{
+ QFETCH(MaybeUrl, url);
+ QFETCH(int, numberOfVideoTracks);
+ QFETCH(int, numberOfAudioTracks);
+ QFETCH(int, numberOfSubtitleTracks);
+
+ QMediaPlayer &player = m_fixture->player;
+
+ CHECK_SELECTED_URL(url);
+
+ player.setSource(*url);
+
+ QTRY_COMPARE(player.videoTracks().size(), numberOfVideoTracks);
+ QTRY_COMPARE(player.audioTracks().size(), numberOfAudioTracks);
+ QTRY_COMPARE(player.subtitleTracks().size(), numberOfSubtitleTracks);
+}
+
+void tst_QMediaPlayerBackend::setSource_emitsTracksChanged_data()
+{
+ QTest::addColumn<MaybeUrl>("url");
+ QTest::addColumn<int>("numberOfVideoTracks");
+ QTest::addColumn<int>("numberOfAudioTracks");
+ QTest::addColumn<int>("numberOfSubtitleTracks");
+
+ QTest::addRow("video file with audio") << m_localVideoFile3ColorsWithSound << 1 << 1 << 0;
+ QTest::addRow("video file without audio") << m_colorMatrixVideo << 1 << 0 << 0;
+ QTest::addRow("uncompressed audio file") << m_localWavFile << 0 << 1 << 0;
+ QTest::addRow("compressed audio file") << m_localCompressedSoundFile << 0 << 1 << 0;
+ QTest::addRow("video with subtitle") << m_subtitleVideo << 1 << 1 << 1;
+ QTest::addRow("video with multiple streams") << m_multitrackVideo << 2 << 2 << 2;
+}
+
+void tst_QMediaPlayerBackend::setSource_emitsTracksChanged()
+{
+ QFETCH(MaybeUrl, url);
+ QFETCH(int, numberOfVideoTracks);
+ QFETCH(int, numberOfAudioTracks);
+ QFETCH(int, numberOfSubtitleTracks);
+
+ QMediaPlayer &player = m_fixture->player;
+
+ CHECK_SELECTED_URL(url);
+
+ QSignalSpy tracksChanged(&player, &QMediaPlayer::tracksChanged);
+ player.setSource(*url);
+
+ QVERIFY(tracksChanged.wait());
+
+ QCOMPARE(player.videoTracks().size(), numberOfVideoTracks);
+ QCOMPARE(player.audioTracks().size(), numberOfAudioTracks);
+ QCOMPARE(player.subtitleTracks().size(), numberOfSubtitleTracks);
+}
+
void tst_QMediaPlayerBackend::
setSourceAndPlay_setCorrectVideoSize_whenVideoHasNonStandardPixelAspectRatio_data()
{
@@ -801,13 +1027,25 @@ void tst_QMediaPlayerBackend::
QTest::addRow("Horizontal expanding (par=3/2)")
<< m_192x108_PAR_3_2_Video << QSize(192 * 3 / 2, 108);
- QTest::addRow("Vertical expanding (par=2/3)")
- << m_192x108_PAR_2_3_Video << QSize(192, 108 * 3 / 2);
+
+ if (isGStreamerPlatform())
+ // QTBUG-125249: gstreamer tries "to keep the input height (because of interlacing)"
+ QTest::addRow("Horizontal shrinking (par=2/3)")
+ << m_192x108_PAR_2_3_Video << QSize(192 * 2 / 3, 108);
+ else
+ QTest::addRow("Vertical expanding (par=2/3)")
+ << m_192x108_PAR_2_3_Video << QSize(192, 108 * 3 / 2);
}
void tst_QMediaPlayerBackend::
setSourceAndPlay_setCorrectVideoSize_whenVideoHasNonStandardPixelAspectRatio()
{
+#ifdef Q_OS_ANDROID
+ QSKIP("SKIP initTestCase on CI, because of QTBUG-126428");
+#endif
+ if (isGStreamerPlatform() && isCI())
+ QSKIP("QTBUG-124005: Fails with gstreamer on CI");
+
QFETCH(MaybeUrl, url);
QFETCH(QSize, expectedVideoSize);
@@ -839,13 +1077,13 @@ void tst_QMediaPlayerBackend::
// Video schema:
//
// 192
- // /---------------------\
+ // *---------------------*
// | White | |
// | | |
// |----------/ | 108
// | Red |
// | |
- // \---------------------/
+ // *---------------------*
// clang-format on
@@ -907,7 +1145,7 @@ void tst_QMediaPlayerBackend::pause_entersPauseState_whenPlayerWasPlaying()
// Arrange
m_fixture->player.setSource(*m_localWavFile);
m_fixture->player.play();
- QTRY_VERIFY(m_fixture->player.position() > 100);
+ QTRY_COMPARE_GT(m_fixture->player.position(), 100);
m_fixture->clearSpies();
const qint64 positionBeforePause = m_fixture->player.position();
@@ -919,9 +1157,123 @@ void tst_QMediaPlayerBackend::pause_entersPauseState_whenPlayerWasPlaying()
QCOMPARE_EQ(m_fixture->playbackStateChanged, SignalList({ { QMediaPlayer::PausedState } }));
QTRY_COMPARE(m_fixture->player.mediaStatus(), QMediaPlayer::BufferedMedia);
+ QTRY_COMPARE_LT(qAbs(m_fixture->player.position() - positionBeforePause), 200);
+
QTest::qWait(500);
- QTRY_VERIFY(qAbs(m_fixture->player.position() - positionBeforePause) < 150);
+ QTRY_COMPARE_LT(qAbs(m_fixture->player.position() - positionBeforePause), 200);
+}
+
+void tst_QMediaPlayerBackend::pause_initializesExpectedDefaultState()
+{
+ QFETCH(MaybeUrl, url);
+ QFETCH(bool, hasVideo);
+ QFETCH(bool, hasAudio);
+ CHECK_SELECTED_URL(url);
+
+ if (isFFMPEGPlatform() && url->path().contains("Av1"))
+ QSKIP("QTBUG-119711: ffmpeg's binaries on CI do not support av1");
+
+ QMediaPlayer &player = m_fixture->player;
+ player.setSource(*url);
+ player.pause();
+
+ QTRY_COMPARE(player.playbackState(), QMediaPlayer::PausedState);
+
+ MediaPlayerState expectedState = MediaPlayerState::defaultState();
+ expectedState.source = *url;
+ expectedState.playbackState = QMediaPlayer::PausedState;
+ expectedState.isSeekable = true;
+
+ expectedState.mediaStatus = std::nullopt;
+ expectedState.duration = std::nullopt;
+ expectedState.bufferProgress = std::nullopt;
+
+ expectedState.audioTracks = std::nullopt;
+ expectedState.videoTracks = std::nullopt;
+ expectedState.metaData = std::nullopt;
+
+ if (hasVideo) {
+ expectedState.activeVideoTrack = 0;
+ expectedState.hasVideo = std::nullopt;
+ }
+
+ if (hasAudio) {
+ expectedState.activeAudioTrack = 0;
+ expectedState.hasAudio = std::nullopt;
+ }
+
+ const MediaPlayerState actualState{ player };
+ COMPARE_MEDIA_PLAYER_STATE_EQ(actualState, expectedState);
+
+ QVERIFY(actualState.mediaStatus == QMediaPlayer::BufferingMedia
+ || actualState.mediaStatus == QMediaPlayer::BufferedMedia);
+
+ if (hasVideo)
+ QCOMPARE(actualState.videoTracks->size(), 1);
+ if (hasAudio)
+ QCOMPARE(actualState.audioTracks->size(), 1);
+
+ QEXPECT_FAIL_GSTREAMER("", "GStreamer doesn't update bufferProgress while paused", Continue);
+
+ QTRY_COMPARE_GT(actualState.bufferProgress, 0);
+}
+
+void tst_QMediaPlayerBackend::pause_initializesExpectedDefaultState_data()
+{
+ QTest::addColumn<MaybeUrl>("url");
+ QTest::addColumn<bool>("hasVideo");
+ QTest::addColumn<bool>("hasAudio");
+
+ QTest::addRow("with wave file") << m_localWavFile << false << true;
+ QTest::addRow("with video file") << m_localVideoFile << true << true;
+ QTest::addRow("with av1 file") << m_av1File << true << false;
+ QTest::addRow("with compressed sound file") << m_localCompressedSoundFile << false << true;
+}
+
+void tst_QMediaPlayerBackend::pause_doesNotAdvancePosition()
+{
+ using namespace std::chrono_literals;
+
+ CHECK_SELECTED_URL(m_localVideoFile);
+
+ QMediaPlayer &player = m_fixture->player;
+ player.setSource(*m_localVideoFile);
+
+ player.pause();
+
+ QTest::qWait(1s);
+
+ QTRY_COMPARE_EQ(player.position(), 0);
+}
+
+void tst_QMediaPlayerBackend::pause_playback_resumesFromPausedPosition()
+{
+ using namespace std::chrono_literals;
+
+ CHECK_SELECTED_URL(m_localVideoFile);
+
+ QMediaPlayer &player = m_fixture->player;
+ player.setSource(*m_localVideoFile);
+
+ player.play();
+
+ QTRY_COMPARE_GT(player.position(), 100);
+
+ player.pause();
+
+ qint64 pausePos = player.position();
+ QTest::qWait(1s);
+
+ QCOMPARE_EQ(player.position(), pausePos);
+
+ player.play();
+
+ // Make sure the media player does not make up for the lost time
+ m_fixture->positionChanged.wait();
+ m_fixture->positionChanged.wait();
+
+ QCOMPARE_LT(player.position(), pausePos + 500);
}
void tst_QMediaPlayerBackend::play_resetsErrorState_whenCalledWithInvalidFile()
@@ -959,7 +1311,8 @@ void tst_QMediaPlayerBackend::play_resumesPlaying_whenValidMediaIsProvidedAfterI
// Assert
QTRY_VERIFY(m_fixture->framesCount > 0);
- QTRY_COMPARE(m_fixture->player.mediaStatus(), QMediaPlayer::BufferedMedia);
+ QTRY_VERIFY(m_fixture->player.mediaStatus() == QMediaPlayer::BufferedMedia
+ || m_fixture->player.mediaStatus() == QMediaPlayer::EndOfMedia);
QCOMPARE_EQ(m_fixture->player.playbackState(), QMediaPlayer::PlayingState);
QCOMPARE(m_fixture->player.error(), QMediaPlayer::NoError);
}
@@ -987,14 +1340,19 @@ void tst_QMediaPlayerBackend::play_setsPlaybackStateAndMediaStatus_whenValidFile
m_fixture->player.play();
QTRY_COMPARE_EQ(m_fixture->player.playbackState(), QMediaPlayer::PlayingState);
- QTRY_COMPARE_EQ(m_fixture->player.mediaStatus(), QMediaPlayer::BufferedMedia);
+ QTRY_VERIFY(m_fixture->player.mediaStatus() == QMediaPlayer::BufferedMedia
+ || m_fixture->player.mediaStatus() == QMediaPlayer::EndOfMedia);
QCOMPARE(m_fixture->playbackStateChanged, SignalList({ { QMediaPlayer::PlayingState } }));
- QTRY_COMPARE_EQ(m_fixture->mediaStatusChanged,
- SignalList({ { QMediaPlayer::LoadingMedia },
- { QMediaPlayer::LoadedMedia },
- { QMediaPlayer::BufferingMedia },
- { QMediaPlayer::BufferedMedia } }));
+
+ auto expectedMediaStatus = SignalList{
+ { QMediaPlayer::LoadingMedia },
+ { QMediaPlayer::LoadedMedia },
+ { QMediaPlayer::BufferingMedia },
+ { QMediaPlayer::BufferedMedia },
+ };
+
+ QTRY_COMPARE_EQ(m_fixture->mediaStatusChanged.first(4), expectedMediaStatus);
QTRY_COMPARE_GT(m_fixture->bufferProgressChanged.size(), 0);
QTRY_COMPARE_NE(m_fixture->bufferProgressChanged.front().front(), 0.f);
@@ -1032,15 +1390,26 @@ void tst_QMediaPlayerBackend::play_doesNotEnterMediaLoadingState_whenResumingPla
// Assert
QCOMPARE(m_fixture->player.playbackState(), QMediaPlayer::PlayingState);
- QTRY_COMPARE(m_fixture->player.mediaStatus(), QMediaPlayer::BufferedMedia);
- QCOMPARE_EQ(m_fixture->playbackStateChanged, SignalList({ { QMediaPlayer::PlayingState } }));
+ QTRY_VERIFY(m_fixture->player.mediaStatus() == QMediaPlayer::BufferedMedia
+ || m_fixture->player.mediaStatus() == QMediaPlayer::EndOfMedia);
+ QTRY_VERIFY(m_fixture->playbackStateChanged.contains({ QMediaPlayer::PlayingState }));
// Note: Should not go through Loading again when play -> stop -> play
- QCOMPARE_EQ(m_fixture->mediaStatusChanged,
- SignalList({
- { QMediaPlayer::BufferingMedia },
- { QMediaPlayer::BufferedMedia },
- }));
+ if (!isGStreamerPlatform()) {
+ QCOMPARE_EQ(m_fixture->mediaStatusChanged,
+ SignalList({
+ { QMediaPlayer::BufferingMedia },
+ { QMediaPlayer::BufferedMedia },
+ }));
+ } else {
+ QTRY_COMPARE_EQ(m_fixture->mediaStatusChanged,
+ // gstreamer may see EndOfMedia
+ SignalList({
+ { QMediaPlayer::BufferingMedia },
+ { QMediaPlayer::BufferedMedia },
+ { QMediaPlayer::EndOfMedia },
+ }));
+ }
}
void tst_QMediaPlayerBackend::playAndSetSource_emitsExpectedSignalsAndStopsPlayback_whenSetSourceWasCalledWithEmptyUrl()
@@ -1061,12 +1430,25 @@ void tst_QMediaPlayerBackend::playAndSetSource_emitsExpectedSignalsAndStopsPlayb
const MediaPlayerState actualState{ m_fixture->player };
COMPARE_MEDIA_PLAYER_STATE_EQ(actualState, expectedState);
- QTRY_COMPARE_EQ(m_fixture->mediaStatusChanged,
- SignalList({ { QMediaPlayer::LoadedMedia },
- { QMediaPlayer::BufferingMedia },
- { QMediaPlayer::BufferedMedia },
- { QMediaPlayer::LoadedMedia },
- { QMediaPlayer::NoMedia } }));
+ QList allowedSignalSequences = {
+ SignalList{
+ { QMediaPlayer::LoadedMedia },
+ { QMediaPlayer::BufferingMedia },
+ { QMediaPlayer::BufferedMedia },
+ { QMediaPlayer::LoadedMedia },
+ { QMediaPlayer::NoMedia },
+ },
+ SignalList{
+ { QMediaPlayer::LoadedMedia },
+ { QMediaPlayer::BufferingMedia },
+ { QMediaPlayer::BufferedMedia },
+ { QMediaPlayer::EndOfMedia }, // EndOfMedia can be reached before setSource({})
+ { QMediaPlayer::LoadedMedia },
+ { QMediaPlayer::NoMedia },
+ },
+ };
+
+ QTRY_VERIFY(allowedSignalSequences.contains(m_fixture->mediaStatusChanged));
QTRY_COMPARE_EQ(m_fixture->playbackStateChanged,
SignalList({ { QMediaPlayer::PlayingState }, { QMediaPlayer::StoppedState } }));
@@ -1078,15 +1460,20 @@ void tst_QMediaPlayerBackend::playAndSetSource_emitsExpectedSignalsAndStopsPlayb
void tst_QMediaPlayerBackend::
play_createsFramesWithExpectedContentAndIncreasingFrameTime_whenPlayingRtspMediaStream()
{
- if (!canCreateRtspStream())
+#if !QT_CONFIG(process)
+ QSKIP("This test requires QProcess support");
+#else
+ if (!canCreateRtpStream())
QSKIP("Rtsp stream cannot be created");
+ QSKIP_GSTREAMER("GStreamer tests fail");
+
auto temporaryFile = copyResourceToTemporaryFile(":/testdata/colors.mp4", "colors.XXXXXX.mp4");
QVERIFY(temporaryFile);
const QString streamUrl = "rtsp://localhost:8083/stream";
- auto process = createRtspStreamProcess(temporaryFile->fileName(), streamUrl);
+ auto process = createRtpStreamProcess(temporaryFile->fileName(), streamUrl);
QVERIFY2(process, "Cannot start rtsp process");
auto processCloser = qScopeGuard([&process]() { process->close(); });
@@ -1138,10 +1525,17 @@ void tst_QMediaPlayerBackend::
QCOMPARE(player.playbackState(), QMediaPlayer::StoppedState);
QCOMPARE(errorSpy.size(), 0);
+#endif //QT_CONFIG(process)
}
void tst_QMediaPlayerBackend::play_waitsForLastFrameEnd_whenPlayingVideoWithLongFrames()
{
+#ifdef Q_OS_ANDROID
+ QSKIP("SKIP initTestCase on CI, because of QTBUG-126428");
+#endif
+ if (isCI() && isGStreamerPlatform())
+ QSKIP_GSTREAMER("QTBUG-124005: spurious failures with gstreamer");
+
CHECK_SELECTED_URL(m_oneRedFrameVideo);
m_fixture->surface.setStoreFrames(true);
@@ -1162,7 +1556,7 @@ void tst_QMediaPlayerBackend::play_waitsForLastFrameEnd_whenPlayingVideoWithLong
// QTBUG-124005: GStreamer timing seems to be off
// 1000 is expected
- QCOMPARE_GT(elapsed, 900);
+ QCOMPARE_GT(elapsed, 850);
QCOMPARE_LT(elapsed, 1400);
}
@@ -1173,8 +1567,6 @@ void tst_QMediaPlayerBackend::play_waitsForLastFrameEnd_whenPlayingVideoWithLong
void tst_QMediaPlayerBackend::play_startsPlayback_withAndWithoutOutputsConnected()
{
- QSKIP_GSTREAMER("QTBUG-124501: Fails with gstreamer");
-
QFETCH(const bool, audioConnected);
QFETCH(const bool, videoConnected);
@@ -1219,37 +1611,137 @@ void tst_QMediaPlayerBackend::play_startsPlayback_withAndWithoutOutputsConnected
QTest::addRow("no output connected") << false << false;
}
+void tst_QMediaPlayerBackend::play_playsRtpStream_whenSdpFileIsLoaded()
+{
+#if !QT_CONFIG(process)
+ QSKIP("This test requires QProcess support");
+#else
+ if (!isFFMPEGPlatform())
+ QSKIP("This test is only for FFmpeg backend");
+
+ // Create stream
+ if (!canCreateRtpStream())
+ QSKIP("Rtp stream cannot be created");
+
+ auto temporaryFile = copyResourceToTemporaryFile(":/testdata/colors.mp4", "colors.XXXXXX.mp4");
+ QVERIFY(temporaryFile);
+
+ // Pass a "file:" URL to VLC in order to generate an .sdp file
+ const QUrl sdpUrl = QUrl::fromLocalFile(QFileInfo("test.sdp").absoluteFilePath());
+
+ auto process = createRtpStreamProcess(temporaryFile->fileName(), sdpUrl.toString());
+ QVERIFY2(process, "Cannot start rtp process");
+
+ // Set reasonable protocol whitelist that includes rtp and udp
+ qputenv("QT_FFMPEG_PROTOCOL_WHITELIST", "file,crypto,data,rtp,udp");
+
+ auto processCloser = qScopeGuard([&process, &sdpUrl]() {
+ // End stream
+ process->close();
+
+ // Remove .sdp file created by VLC
+ QFile(sdpUrl.toLocalFile()).remove();
+
+ // Unset environment variable
+ qunsetenv("QT_FFMPEG_PROTOCOL_WHITELIST");
+ });
+
+ m_fixture->player.setSource(sdpUrl);
+
+ // Play
+ m_fixture->player.play();
+ QTRY_COMPARE(m_fixture->player.playbackState(), QMediaPlayer::PlayingState);
+#endif // QT_CONFIG(process)
+}
+
+void tst_QMediaPlayerBackend::play_succeedsFromSourceDevice()
+{
+ QFETCH(const MaybeUrl, mediaUrl);
+ QFETCH(bool, streamOutlivesPlayer);
+
+ CHECK_SELECTED_URL(mediaUrl);
+
+ auto *stream = new QFile(u":"_s + mediaUrl->path());
+
+ QVERIFY(stream->open(QFile::ReadOnly));
+
+ QMediaPlayer &player = m_fixture->player;
+
+ player.setSourceDevice(stream);
+
+ player.play();
+ QTRY_COMPARE_GT(player.position(), 100);
+
+ if (streamOutlivesPlayer)
+ stream->setParent(&player);
+ else
+ delete stream;
+}
+
+void tst_QMediaPlayerBackend::play_succeedsFromSourceDevice_data()
+{
+ QTest::addColumn<MaybeUrl>("mediaUrl");
+ QTest::addColumn<bool>("streamOutlivesPlayer");
+
+ QTest::addRow("audio file") << m_localWavFile << true;
+ QTest::addRow("video file") << m_localVideoFile << true;
+
+ // QMediaPlayer crashes when we delete the stream during playback
+ constexpr bool validateStreamDestructionDuringPlayback = false;
+ if constexpr (validateStreamDestructionDuringPlayback) {
+ QTest::addRow("audio file, stream destroyed during playback") << m_localWavFile << false;
+ QTest::addRow("video file, stream destroyed during playback") << m_localVideoFile << false;
+ }
+}
+
void tst_QMediaPlayerBackend::stop_entersStoppedState_whenPlayerWasPaused()
{
- CHECK_SELECTED_URL(m_localWavFile);
+ QFETCH(const MaybeUrl, mediaUrl);
+
+ CHECK_SELECTED_URL(mediaUrl);
+ QMediaPlayer &player = m_fixture->player;
// Arrange
- m_fixture->player.setSource(*m_localWavFile);
- m_fixture->player.play();
- QTRY_VERIFY(m_fixture->player.position() > 100);
- m_fixture->player.pause();
- QTRY_COMPARE(m_fixture->player.mediaStatus(), QMediaPlayer::BufferedMedia);
+ player.setSource(*mediaUrl);
+ player.play();
+ QTRY_COMPARE_GT(player.position(), 100);
+ player.pause();
+ QTRY_COMPARE(player.mediaStatus(), QMediaPlayer::BufferedMedia);
m_fixture->clearSpies();
+ if (!isGStreamerPlatform()) // Gstreamer may see EOS already
+ QCOMPARE_GT(player.position(), 100);
+
// Act
- m_fixture->player.stop();
+ player.stop();
// Assert
- QCOMPARE(m_fixture->player.playbackState(), QMediaPlayer::StoppedState);
- QTRY_COMPARE(m_fixture->player.mediaStatus(), QMediaPlayer::LoadedMedia);
+ QCOMPARE(player.playbackState(), QMediaPlayer::StoppedState);
+ QTRY_COMPARE(player.mediaStatus(), QMediaPlayer::LoadedMedia);
QCOMPARE(m_fixture->playbackStateChanged, SignalList({ { QMediaPlayer::StoppedState } }));
// it's allowed to emit statusChanged() signal async
QTRY_COMPARE(m_fixture->mediaStatusChanged, SignalList({ { QMediaPlayer::LoadedMedia } }));
- if (!isGStreamerPlatform())
+ if (isGStreamerPlatform() && *mediaUrl == *m_localWavFile) {
// QTBUG-124517: for some media types gstreamer does not emit buffer progress messages
+ } else {
QCOMPARE(m_fixture->bufferProgressChanged, SignalList({ { 0.f } }));
+ }
QTRY_COMPARE(m_fixture->player.position(), qint64(0));
+
QTRY_VERIFY(!m_fixture->positionChanged.empty());
QCOMPARE(m_fixture->positionChanged.last()[0].value<qint64>(), qint64(0));
- QVERIFY(m_fixture->player.duration() > 0);
+ QVERIFY(player.duration() > 0);
+}
+
+void tst_QMediaPlayerBackend::stop_entersStoppedState_whenPlayerWasPaused_data()
+{
+ QTest::addColumn<MaybeUrl>("mediaUrl");
+
+ QTest::addRow("audio file") << m_localWavFile;
+ QTest::addRow("video file") << m_localVideoFile;
}
void tst_QMediaPlayerBackend::stop_setsPositionToZero_afterPlayingToEndOfMedia()
@@ -1269,6 +1761,10 @@ void tst_QMediaPlayerBackend::stop_setsPositionToZero_afterPlayingToEndOfMedia()
QCOMPARE(m_fixture->player.playbackState(), QMediaPlayer::StoppedState);
m_fixture->player.play();
+
+ if (isGStreamerPlatform())
+ QSKIP_GSTREAMER("QTBUG-124005: spurious failures with gstreamer");
+
QVERIFY(m_fixture->surface.waitForFrame().isValid());
}
@@ -1324,6 +1820,75 @@ void tst_QMediaPlayerBackend::setPlaybackRate_changesPlaybackRateAndEmitsSignal(
QCOMPARE_EQ(m_fixture->player.playbackRate(), expectedPlaybackRate);
}
+void tst_QMediaPlayerBackend::setPlaybackRate_changesPlaybackDuration()
+{
+ using namespace std::chrono;
+ using namespace std::chrono_literals;
+
+ CHECK_SELECTED_URL(m_15sVideo);
+
+ // speeding up a 15s file by 3 should result in a duration of 5s
+ // auto minDuration = 3s;
+ // auto maxDuration = 7s;
+ // auto playbackRate = 3.0;
+
+ // speeding up a 15s file by 5 should result in a duration of 3s
+ auto minDuration = 2s;
+ auto maxDuration = 4s;
+ auto playbackRate = 5.0;
+
+ QFETCH(const QLatin1String, testMode);
+
+ QMediaPlayer &player = m_fixture->player;
+
+ if (testMode == "SetRateBeforeSetSource"_L1)
+ player.setPlaybackRate(playbackRate);
+
+ player.setSource(*m_15sVideo);
+
+ QTRY_COMPARE_EQ(player.mediaStatus(), QMediaPlayer::LoadedMedia);
+
+ auto begin = steady_clock::now();
+
+ if (testMode == "SetRateBeforePlay"_L1) {
+ QSKIP_GSTREAMER("FIXME: SetRateBeforeSetSource is currently broken");
+ player.setPlaybackRate(playbackRate);
+ }
+
+ player.play();
+
+ if (testMode == "SetRateAfterPlay"_L1)
+ player.setPlaybackRate(playbackRate);
+
+ if (testMode == "SetRateAfterPlaybackStarted"_L1) {
+ QTRY_COMPARE_GT(player.position(), 50);
+ player.setPlaybackRate(playbackRate);
+ }
+
+ QCOMPARE(player.playbackRate(), playbackRate);
+
+ QTRY_COMPARE_EQ_WITH_TIMEOUT(player.playbackState(), QMediaPlayer::StoppedState, 20s);
+
+ auto end = steady_clock::now();
+ auto duration = end - begin;
+
+ if (false)
+ qDebug() << round<milliseconds>(duration);
+
+ QCOMPARE_LT(duration, maxDuration);
+ QCOMPARE_GT(duration, minDuration);
+}
+
+void tst_QMediaPlayerBackend::setPlaybackRate_changesPlaybackDuration_data()
+{
+ QTest::addColumn<QLatin1String>("testMode");
+
+ QTest::addRow("SetRateBeforeSetSource") << "SetRateBeforeSetSource"_L1;
+ QTest::addRow("SetRateBeforePlay") << "SetRateBeforePlay"_L1;
+ QTest::addRow("SetRateAfterPlay") << "SetRateAfterPlay"_L1;
+ QTest::addRow("SetRateAfterPlaybackStarted") << "SetRateAfterPlaybackStarted"_L1;
+}
+
void tst_QMediaPlayerBackend::setVolume_changesVolume_whenVolumeIsInRange()
{
m_fixture->output.setVolume(0.0f);
@@ -1393,9 +1958,17 @@ void tst_QMediaPlayerBackend::setMuted_doesNotChangeVolume()
void tst_QMediaPlayerBackend::processEOS()
{
- CHECK_SELECTED_URL(m_localWavFile);
+ QSKIP_GSTREAMER("QTBUG-124005: spurious failure with gstreamer");
- m_fixture->player.setSource(*m_localWavFile);
+ if (!isGStreamerPlatform()) {
+ // QTBUG-124517: for some media types, including wav files, gstreamer does not emit buffer
+ // progress messages
+ CHECK_SELECTED_URL(m_localWavFile);
+ m_fixture->player.setSource(*m_localWavFile);
+ } else {
+ CHECK_SELECTED_URL(m_localVideoFile3ColorsWithSound);
+ m_fixture->player.setSource(*m_localVideoFile3ColorsWithSound);
+ }
m_fixture->player.play();
m_fixture->player.setPosition(900);
@@ -1421,12 +1994,13 @@ void tst_QMediaPlayerBackend::processEOS()
m_fixture->player.play();
//position is reset to start
- QTRY_VERIFY(m_fixture->player.position() < 100);
+ QTRY_COMPARE_LT(m_fixture->player.position(), 500);
QTRY_VERIFY(m_fixture->positionChanged.size() > 0);
QCOMPARE(m_fixture->positionChanged.first()[0].value<qint64>(), 0);
QCOMPARE(m_fixture->player.playbackState(), QMediaPlayer::PlayingState);
- QTRY_COMPARE(m_fixture->player.mediaStatus(), QMediaPlayer::BufferedMedia);
+ QTRY_VERIFY(m_fixture->player.mediaStatus() == QMediaPlayer::BufferedMedia
+ || m_fixture->player.mediaStatus() == QMediaPlayer::EndOfMedia);
QCOMPARE(m_fixture->playbackStateChanged.size(), 1);
QCOMPARE(m_fixture->playbackStateChanged.last()[0].value<QMediaPlayer::PlaybackState>(), QMediaPlayer::PlayingState);
@@ -1445,11 +2019,8 @@ void tst_QMediaPlayerBackend::processEOS()
QCOMPARE(m_fixture->playbackStateChanged.size(), 2);
QCOMPARE(m_fixture->playbackStateChanged.last()[0].value<QMediaPlayer::PlaybackState>(), QMediaPlayer::StoppedState);
- if (!isGStreamerPlatform()) {
- // QTBUG-124517: for some media types gstreamer does not emit buffer progress messages
- QCOMPARE_GT(m_fixture->bufferProgressChanged.size(), 1);
- QCOMPARE(m_fixture->bufferProgressChanged.back().front(), 0.f);
- }
+ QCOMPARE_GT(m_fixture->bufferProgressChanged.size(), 1);
+ QCOMPARE(m_fixture->bufferProgressChanged.back().front(), 0.f);
// position stays at the end of file
QCOMPARE(m_fixture->player.position(), m_fixture->player.duration());
@@ -1542,9 +2113,9 @@ void tst_QMediaPlayerBackend::deleteLaterAtEOS()
// QTRY_VERIFY or QTest::qWait. QTest::qWait makes extra effort to process
// DeferredDelete events during the wait, which interferes with this test.
QEventLoop loop;
- QTimer::singleShot(0, &deleter, SLOT(play()));
- QTimer::singleShot(5000, &loop, SLOT(quit()));
- connect(player.data(), SIGNAL(destroyed()), &loop, SLOT(quit()));
+ QTimer::singleShot(0, &deleter, &DeleteLaterAtEos::play);
+ QTimer::singleShot(5000, &loop, &QEventLoop::quit);
+ connect(player.data(), &QObject::destroyed, &loop, &QEventLoop::quit);
loop.exec();
// Verify that the player was destroyed within the event loop.
// This check will fail without the fix for QTBUG-24927.
@@ -1651,7 +2222,7 @@ void tst_QMediaPlayerBackend::seekPauseSeek()
player.setAudioOutput(&output);
- QSignalSpy positionSpy(&player, SIGNAL(positionChanged(qint64)));
+ QSignalSpy positionSpy(&player, &QMediaPlayer::positionChanged);
player.setVideoOutput(&surface);
@@ -1727,8 +2298,8 @@ void tst_QMediaPlayerBackend::seekInStoppedState()
player.setAudioOutput(&output);
player.setVideoOutput(&surface);
- QSignalSpy stateSpy(&player, SIGNAL(playbackStateChanged(QMediaPlayer::PlaybackState)));
- QSignalSpy positionSpy(&player, SIGNAL(positionChanged(qint64)));
+ QSignalSpy stateSpy(&player, &QMediaPlayer::playbackStateChanged);
+ QSignalSpy positionSpy(&player, &QMediaPlayer::positionChanged);
player.setSource(*m_localVideoFile);
QTRY_COMPARE(player.mediaStatus(), QMediaPlayer::LoadedMedia);
@@ -1822,7 +2393,8 @@ void tst_QMediaPlayerBackend::seekInStoppedState()
player.play();
QTRY_COMPARE(player.playbackState(), QMediaPlayer::PlayingState);
- QTRY_COMPARE(player.mediaStatus(), QMediaPlayer::BufferedMedia);
+ QTRY_VERIFY(player.mediaStatus() == QMediaPlayer::BufferedMedia
+ || player.mediaStatus() == QMediaPlayer::EndOfMedia);
positionSpy.clear();
QTRY_VERIFY(player.position() > (position - 200));
@@ -1836,6 +2408,8 @@ void tst_QMediaPlayerBackend::seekInStoppedState()
void tst_QMediaPlayerBackend::subsequentPlayback()
{
+ QSKIP_GSTREAMER("QTBUG-124005: spurious seek failures with gstreamer");
+
CHECK_SELECTED_URL(m_localCompressedSoundFile);
QAudioOutput output;
@@ -1849,7 +2423,7 @@ void tst_QMediaPlayerBackend::subsequentPlayback()
QCOMPARE(player.error(), QMediaPlayer::NoError);
QTRY_COMPARE(player.playbackState(), QMediaPlayer::PlayingState);
- QTRY_COMPARE(player.mediaStatus(), QMediaPlayer::EndOfMedia);
+ QTRY_COMPARE_WITH_TIMEOUT(player.mediaStatus(), QMediaPlayer::EndOfMedia, 10s);
QCOMPARE(player.playbackState(), QMediaPlayer::StoppedState);
// Could differ by up to 1 compressed frame length
QVERIFY(qAbs(player.position() - player.duration()) < 100);
@@ -1894,6 +2468,7 @@ void tst_QMediaPlayerBackend::multipleMediaPlayback()
QCOMPARE(player.error(), QMediaPlayer::NoError);
QCOMPARE(player.playbackState(), QMediaPlayer::PlayingState);
+ QVERIFY(player.isSeekable());
QTRY_VERIFY(player.position() > 0);
QCOMPARE(player.source(), *m_localVideoFile);
@@ -1922,11 +2497,14 @@ void tst_QMediaPlayerBackend::multiplePlaybackRateChangingStressTest()
{
CHECK_SELECTED_URL(m_localVideoFile3ColorsWithSound);
-#ifdef Q_OS_MACOS
- if (qEnvironmentVariable("QTEST_ENVIRONMENT").toLower() == "ci")
- QSKIP("SKIP on macOS CI since multiple fake drawing on macOS CI platform causes UB. To be "
- "investigated.");
-#endif
+ if (isCI()) {
+ if (isDarwinPlatform())
+ QSKIP("SKIP on macOS CI since multiple fake drawing on macOS CI platform causes UB. To "
+ "be investigated.");
+
+ if (isGStreamerPlatform())
+ QSKIP_GSTREAMER("QTBUG-124005: spurious failures with gstreamer");
+ }
TestVideoSink surface(false);
QAudioOutput output;
@@ -1943,24 +2521,35 @@ void tst_QMediaPlayerBackend::multiplePlaybackRateChangingStressTest()
QSignalSpy spy(&player, &QMediaPlayer::playbackStateChanged);
- constexpr qint64 expectedVideoDuration = 3000;
- constexpr int waitingInterval = 200;
- constexpr qint64 maxDuration = expectedVideoDuration + 2000;
- constexpr qint64 minDuration = expectedVideoDuration - 100;
- constexpr qint64 maxFrameDelay = 2000;
+ using namespace std::chrono_literals;
+ using namespace std::chrono;
+
+ constexpr milliseconds expectedVideoDuration = 3000ms;
+ constexpr milliseconds waitingInterval = 200ms;
+ constexpr milliseconds maxDuration = expectedVideoDuration + 2000ms;
+ constexpr milliseconds minDuration = expectedVideoDuration - 100ms;
+ constexpr milliseconds maxFrameDelay = 2000ms;
surface.m_elapsedTimer.start();
- qint64 duration = 0;
+ nanoseconds duration = 0ns;
- for (int i = 0; !spy.wait(waitingInterval); ++i) {
- duration += waitingInterval * player.playbackRate();
+ auto waitForPlaybackStateChange = [&]() {
+ QElapsedTimer timer;
+ timer.start();
+ QScopeGuard addDuration([&]() {
+ duration += duration_cast<nanoseconds>(timer.durationElapsed() * player.playbackRate());
+ });
+ return spy.wait(waitingInterval);
+ };
+
+ for (int i = 0; !waitForPlaybackStateChange(); ++i) {
player.setPlaybackRate(0.5 * (i % 4 + 1));
QCOMPARE_LE(duration, maxDuration);
- QVERIFY2(surface.m_elapsedTimer.elapsed() < maxFrameDelay,
+ QVERIFY2(surface.m_elapsedTimer.durationElapsed() < maxFrameDelay,
"If the delay is more than 2s, we consider the video playing is hanging.");
/* Some debug code for windows. Use the code instead of the check above to debug the bug.
@@ -1975,8 +2564,6 @@ void tst_QMediaPlayerBackend::multiplePlaybackRateChangingStressTest()
}*/
}
- duration += waitingInterval * player.playbackRate();
-
QCOMPARE_GT(duration, minDuration);
QCOMPARE(spy.size(), 1);
@@ -1989,6 +2576,8 @@ void tst_QMediaPlayerBackend::multiplePlaybackRateChangingStressTest()
void tst_QMediaPlayerBackend::multipleSeekStressTest()
{
+ QSKIP_GSTREAMER("QTBUG-124005: spurious test failures with gstreamer");
+
#ifdef Q_OS_ANDROID
QSKIP("frame.toImage will return null image because of QTBUG-108446");
#endif
@@ -2039,7 +2628,7 @@ void tst_QMediaPlayerBackend::multipleSeekStressTest()
};
auto seekAndCheck = [&](qint64 pos) {
- QSignalSpy positionSpy(&player, SIGNAL(positionChanged(qint64)));
+ QSignalSpy positionSpy(&player, &QMediaPlayer::positionChanged);
player.setPosition(pos);
QTRY_VERIFY(positionSpy.size() >= 1);
@@ -2088,7 +2677,7 @@ void tst_QMediaPlayerBackend::setPlaybackRate_changesActualRateAndFramesRenderin
QTest::addColumn<bool>("withAudio");
QTest::addColumn<int>("positionDeviationMs");
- QTest::newRow("Without audio") << false << 150;
+ QTest::newRow("Without audio") << false << 170;
// set greater positionDeviationMs for case with audio due to possible synchronization.
QTest::newRow("With audio") << true << 200;
@@ -2096,6 +2685,8 @@ void tst_QMediaPlayerBackend::setPlaybackRate_changesActualRateAndFramesRenderin
void tst_QMediaPlayerBackend::setPlaybackRate_changesActualRateAndFramesRenderingTime()
{
+ QSKIP_GSTREAMER("QTBUG-124005: timing issues");
+
QFETCH(bool, withAudio);
QFETCH(int, positionDeviationMs);
@@ -2192,6 +2783,8 @@ void tst_QMediaPlayerBackend::setPlaybackRate_changesActualRateAndFramesRenderin
void tst_QMediaPlayerBackend::surfaceTest()
{
+ QSKIP_GSTREAMER("QTBUG-124005: spurious failure, probably asynchronous event delivery");
+
CHECK_SELECTED_URL(m_localVideoFile);
// 25 fps video file
@@ -2208,6 +2801,10 @@ void tst_QMediaPlayerBackend::surfaceTest()
void tst_QMediaPlayerBackend::metadata()
{
+ // QTBUG-124380: gstreamer reports CoverArtImage instead of ThumbnailImage
+ QMediaMetaData::Key thumbnailKey =
+ isGStreamerPlatform() ? QMediaMetaData::CoverArtImage : QMediaMetaData::ThumbnailImage;
+
CHECK_SELECTED_URL(m_localFileWithMetadata);
m_fixture->player.setSource(*m_localFileWithMetadata);
@@ -2219,7 +2816,7 @@ void tst_QMediaPlayerBackend::metadata()
QCOMPARE(metadata.value(QMediaMetaData::ContributingArtist).toString(), QStringLiteral("TestArtist"));
QCOMPARE(metadata.value(QMediaMetaData::AlbumTitle).toString(), QStringLiteral("TestAlbum"));
QCOMPARE(metadata.value(QMediaMetaData::Duration), QVariant(7704));
- QVERIFY(!metadata.value(QMediaMetaData::ThumbnailImage).value<QImage>().isNull());
+ QVERIFY(!metadata.value(thumbnailKey).value<QImage>().isNull());
m_fixture->clearSpies();
m_fixture->player.setSource(QUrl());
@@ -2276,6 +2873,32 @@ void tst_QMediaPlayerBackend::metadata_returnsMetadataWithThumbnail_whenMediaHas
}
}
+void tst_QMediaPlayerBackend::metadata_returnsMetadataWithHasHdrContent_whenMediaHasHdrContent_data()
+{
+ QTest::addColumn<MaybeUrl>("mediaUrl");
+ QTest::addColumn<bool>("hasHdrContent");
+
+ QTest::addRow("SDR Video") << m_localVideoFile << false;
+ QTest::addRow("HDR Video") << m_hdrVideo << true;
+}
+
+void tst_QMediaPlayerBackend::metadata_returnsMetadataWithHasHdrContent_whenMediaHasHdrContent()
+{
+ QFETCH(const MaybeUrl, mediaUrl);
+ QFETCH(const bool, hasHdrContent);
+
+ if (!isFFMPEGPlatform() && !isDarwinPlatform())
+ QSKIP("This test is only for FFmpeg and Darwin backends");
+
+ m_fixture->player.setSource(*mediaUrl);
+ QTRY_VERIFY(!m_fixture->metadataChanged.empty());
+
+ const QMediaMetaData metadata = m_fixture->player.videoTracks().front();
+ const bool hdrContent = metadata.value(QMediaMetaData::HasHdrContent).value<bool>();
+
+ QCOMPARE_EQ(hasHdrContent, hdrContent);
+}
+
void tst_QMediaPlayerBackend::playerStateAtEOS()
{
CHECK_SELECTED_URL(m_localWavFile);
@@ -2302,6 +2925,8 @@ void tst_QMediaPlayerBackend::playerStateAtEOS()
void tst_QMediaPlayerBackend::playFromBuffer()
{
+ QSKIP_GSTREAMER("QTBUG-124005: spurious failure, probably asynchronous event delivery");
+
CHECK_SELECTED_URL(m_localVideoFile);
TestVideoSink surface(false);
@@ -2323,8 +2948,8 @@ void tst_QMediaPlayerBackend::audioVideoAvailable()
TestVideoSink surface(false);
QAudioOutput output;
QMediaPlayer player;
- QSignalSpy hasVideoSpy(&player, SIGNAL(hasVideoChanged(bool)));
- QSignalSpy hasAudioSpy(&player, SIGNAL(hasAudioChanged(bool)));
+ QSignalSpy hasVideoSpy(&player, &QMediaPlayer::hasVideoChanged);
+ QSignalSpy hasAudioSpy(&player, &QMediaPlayer::hasAudioChanged);
player.setVideoOutput(&surface);
player.setAudioOutput(&output);
player.setSource(*m_localVideoFile);
@@ -2339,6 +2964,46 @@ void tst_QMediaPlayerBackend::audioVideoAvailable()
QCOMPARE(hasAudioSpy.size(), 2);
}
+void tst_QMediaPlayerBackend::audioVideoAvailable_updatedOnNewMedia()
+{
+ CHECK_SELECTED_URL(m_localVideoFile);
+ CHECK_SELECTED_URL(m_localWavFile);
+
+ TestVideoSink surface(false);
+ QAudioOutput output;
+ QMediaPlayer player;
+ QSignalSpy hasVideoSpy(&player, &QMediaPlayer::hasVideoChanged);
+ QSignalSpy hasAudioSpy(&player, &QMediaPlayer::hasAudioChanged);
+ player.setVideoOutput(&surface);
+ player.setAudioOutput(&output);
+ player.setSource(*m_localVideoFile);
+ QTRY_VERIFY(player.hasVideo());
+ QTRY_VERIFY(player.hasAudio());
+ QCOMPARE(hasVideoSpy.size(), 1);
+ QCOMPARE(hasAudioSpy.size(), 1);
+
+ hasVideoSpy.clear();
+ hasAudioSpy.clear();
+
+ player.setSource(*m_localWavFile);
+
+ auto expectedHasVideoSignals = SignalList{
+ { false },
+ };
+ QTRY_COMPARE(hasVideoSpy, expectedHasVideoSignals);
+
+ if (isGStreamerPlatform()) {
+ // GStreamer unsets hasAudio/hasVideo on new URIs
+ auto expectedHasAudioSignals = SignalList{
+ { false },
+ { true },
+ };
+ QTRY_COMPARE(hasAudioSpy, expectedHasAudioSignals);
+ } else {
+ QCOMPARE(hasAudioSpy.size(), 0);
+ }
+}
+
void tst_QMediaPlayerBackend::isSeekable()
{
CHECK_SELECTED_URL(m_localVideoFile);
@@ -2386,6 +3051,9 @@ void tst_QMediaPlayerBackend::pause_rendersVideoAtCorrectResolution_data()
void tst_QMediaPlayerBackend::pause_rendersVideoAtCorrectResolution()
{
+#ifdef Q_OS_ANDROID
+ QSKIP("SKIP initTestCase on CI, because of QTBUG-126428");
+#endif
QFETCH(const MaybeUrl, mediaFile);
QFETCH(const int, width);
QFETCH(const int, height);
@@ -2402,8 +3070,9 @@ void tst_QMediaPlayerBackend::pause_rendersVideoAtCorrectResolution()
// Act
player.pause();
- if (qEnvironmentVariable("QTEST_ENVIRONMENT").toLower() == "ci")
- QEXPECT_FAIL("av1", "QTBUG-119711: AV1 decoding requires HW support", Abort);
+ if (isCI() && isFFMPEGPlatform())
+ QEXPECT_FAIL("av1", "QTBUG-119711: AV1 decoding requires HW support in the FFMPEG backend",
+ Abort);
QTRY_COMPARE(surface.m_totalFrames, 1);
@@ -2472,6 +3141,9 @@ void tst_QMediaPlayerBackend::durationDetectionIssues_data()
void tst_QMediaPlayerBackend::durationDetectionIssues()
{
+ if (isGStreamerPlatform() && isCI())
+ QSKIP("QTBUG-124005: Fails with gstreamer on CI");
+
QFETCH(QString, mediaFile);
QFETCH(qint64, expectedDuration);
QFETCH(int, expectedVideoTrackCount);
@@ -2545,6 +3217,8 @@ static std::vector<LoopIteration> loopIterations(const QSignalSpy &positionSpy)
void tst_QMediaPlayerBackend::finiteLoops()
{
+ QSKIP_GSTREAMER("QTBUG-123056(?): spuriously failures of the gstreamer backend");
+
CHECK_SELECTED_URL(m_localVideoFile3ColorsWithSound);
#ifdef Q_OS_MACOS
@@ -2579,13 +3253,15 @@ void tst_QMediaPlayerBackend::finiteLoops()
QCOMPARE(iterations.size(), 3u);
QCOMPARE_GT(iterations[0].startPos, 0);
QCOMPARE(iterations[0].endPos, m_fixture->player.duration());
- QCOMPARE_GT(iterations[0].posCount, 10);
QCOMPARE(iterations[1].startPos, 0);
QCOMPARE(iterations[1].endPos, m_fixture->player.duration());
- QCOMPARE_GT(iterations[1].posCount, 10);
QCOMPARE(iterations[2].startPos, 0);
QCOMPARE(iterations[2].endPos, m_fixture->player.duration());
- QCOMPARE_GT(iterations[2].posCount, 10);
+ if (isFFMPEGPlatform()) {
+ QCOMPARE_GT(iterations[0].posCount, 10);
+ QCOMPARE_GT(iterations[1].posCount, 10);
+ QCOMPARE_GT(iterations[2].posCount, 10);
+ }
QCOMPARE(m_fixture->player.mediaStatus(), QMediaPlayer::EndOfMedia);
@@ -2604,6 +3280,8 @@ void tst_QMediaPlayerBackend::finiteLoops()
void tst_QMediaPlayerBackend::infiniteLoops()
{
+ QSKIP_GSTREAMER("QTBUG-123056(?): spuriously failures of the gstreamer backend");
+
CHECK_SELECTED_URL(m_localVideoFile2);
#ifdef Q_OS_MACOS
@@ -2638,7 +3316,8 @@ void tst_QMediaPlayerBackend::infiniteLoops()
QCOMPARE(iterations.front().endPos, m_fixture->player.duration());
}
- QTRY_COMPARE(m_fixture->player.mediaStatus(), QMediaPlayer::BufferedMedia);
+ QTRY_VERIFY(m_fixture->player.mediaStatus() == QMediaPlayer::BufferedMedia
+ || m_fixture->player.mediaStatus() == QMediaPlayer::EndOfMedia);
m_fixture->player.stop(); // QMediaPlayer::stop stops whether or not looping is infinite
QCOMPARE(m_fixture->player.playbackState(), QMediaPlayer::StoppedState);
@@ -2653,6 +3332,8 @@ void tst_QMediaPlayerBackend::infiniteLoops()
void tst_QMediaPlayerBackend::seekOnLoops()
{
+ QSKIP_GSTREAMER("QTBUG-123056(?): spuriously failures of the gstreamer backend");
+
CHECK_SELECTED_URL(m_localVideoFile3ColorsWithSound);
#ifdef Q_OS_MACOS
@@ -2702,6 +3383,8 @@ void tst_QMediaPlayerBackend::seekOnLoops()
void tst_QMediaPlayerBackend::changeLoopsOnTheFly()
{
+ QSKIP_GSTREAMER("QTBUG-123056(?): spuriously failures of the gstreamer backend");
+
CHECK_SELECTED_URL(m_localVideoFile3ColorsWithSound);
#ifdef Q_OS_MACOS
@@ -2775,6 +3458,8 @@ void tst_QMediaPlayerBackend::seekAfterLoopReset()
void tst_QMediaPlayerBackend::changeVideoOutputNoFramesLost()
{
+ QSKIP_GSTREAMER("QTBUG-124005: gstreamer will lose frames, possibly due to buffering");
+
CHECK_SELECTED_URL(m_localVideoFile3ColorsWithSound);
QVideoSink sinks[4];
@@ -2797,11 +3482,11 @@ void tst_QMediaPlayerBackend::changeVideoOutputNoFramesLost()
player.setVideoOutput(&sinks[1]);
player.play();
- QTRY_VERIFY(framesCount[1] >= framesCount[0] / 4);
+ QTRY_COMPARE_GE(framesCount[1], framesCount[0] / 4);
player.setVideoOutput(&sinks[2]);
const int savedFrameNumber1 = framesCount[1];
- QTRY_VERIFY(framesCount[2] >= (framesCount[0] - savedFrameNumber1) / 2);
+ QTRY_COMPARE_GE(framesCount[2], (framesCount[0] - savedFrameNumber1) / 2);
player.setVideoOutput(&sinks[3]);
const int savedFrameNumber2 = framesCount[2];
@@ -2817,6 +3502,9 @@ void tst_QMediaPlayerBackend::changeVideoOutputNoFramesLost()
void tst_QMediaPlayerBackend::cleanSinkAndNoMoreFramesAfterStop()
{
+ QSKIP_GSTREAMER(
+ "QTBUG-124005: spurious failures on gstreamer, probably due to asynchronous play()");
+
CHECK_SELECTED_URL(m_localVideoFile3ColorsWithSound);
QVideoSink sink;
@@ -2848,6 +3536,9 @@ void tst_QMediaPlayerBackend::cleanSinkAndNoMoreFramesAfterStop()
QTest::qWait(30);
+ if (isGStreamerPlatform())
+ continue; // QTBUG-124005: stop() is asynchronous in gstreamer
+
// check if nothing changed after short waiting
QCOMPARE(framesCount, 0);
}
@@ -2891,6 +3582,9 @@ void tst_QMediaPlayerBackend::lazyLoadVideo()
void tst_QMediaPlayerBackend::videoSinkSignals()
{
+#ifdef Q_OS_ANDROID
+ QSKIP("SKIP initTestCase on CI, because of QTBUG-126428");
+#endif
std::atomic<int> videoFrameCounter = 0;
std::atomic<int> videoSizeCounter = 0;
@@ -2939,7 +3633,8 @@ void tst_QMediaPlayerBackend::nonAsciiFileName()
m_fixture->player.setSource(temporaryFile->fileName());
m_fixture->player.play();
- QTRY_COMPARE(m_fixture->player.mediaStatus(), QMediaPlayer::BufferedMedia);
+ QTRY_VERIFY(m_fixture->player.mediaStatus() == QMediaPlayer::BufferedMedia
+ || m_fixture->player.mediaStatus() == QMediaPlayer::EndOfMedia);
QCOMPARE(m_fixture->errorOccurred.size(), 0);
}
@@ -2971,8 +3666,9 @@ void tst_QMediaPlayerBackend::setMedia_setsVideoSinkSize_beforePlaying()
QCOMPARE(spy2.size(), 1);
}
-std::unique_ptr<QProcess> tst_QMediaPlayerBackend::createRtspStreamProcess(QString fileName,
- QString outputUrl)
+#if QT_CONFIG(process)
+std::unique_ptr<QProcess> tst_QMediaPlayerBackend::createRtpStreamProcess(QString fileName,
+ QString sdpUrl)
{
Q_ASSERT(!m_vlcCommand.isEmpty());
@@ -2981,25 +3677,26 @@ std::unique_ptr<QProcess> tst_QMediaPlayerBackend::createRtspStreamProcess(QStri
fileName.replace('/', '\\');
#endif
- // clang-format off
- QStringList vlcParams =
- {
- "-vvv", fileName,
- "--sout", QLatin1String("#rtp{sdp=%1}").arg(outputUrl),
- "--intf", "dummy"
- };
- // clang-format on
+ QStringList vlcParams = { "-vvv", fileName,
+ "--sout", QStringLiteral("#rtp{dst=localhost,sdp=%1}").arg(sdpUrl),
+ "--intf", "dummy" };
process->start(m_vlcCommand, vlcParams);
if (!process->waitForStarted())
return nullptr;
- // rtsp stream might be with started some delay after the vlc process starts.
- // Ideally, we should wait for open connections, it requires some extra work + QNetwork dependency.
- QTest::qWait(500);
+ // rtp stream might be with started some delay after the vlc process starts.
+ // Ideally, we should wait for open connections, it requires some extra work + QNetwork
+ // dependency.
+ int timeout = 500;
+#ifdef Q_OS_MACOS
+ timeout = 2000;
+#endif
+ QTest::qWait(timeout);
return process;
}
+#endif //QT_CONFIG(process)
void tst_QMediaPlayerBackend::play_playsRotatedVideoOutput_whenVideoFileHasOrientationMetadata_data()
{
@@ -3033,6 +3730,9 @@ void tst_QMediaPlayerBackend::play_playsRotatedVideoOutput_whenVideoFileHasOrien
void tst_QMediaPlayerBackend::play_playsRotatedVideoOutput_whenVideoFileHasOrientationMetadata()
{
+ if (isGStreamerPlatform() && isCI())
+ QSKIP("QTBUG-124005: Fails with gstreamer on CI");
+
// This test uses 4 video files with a 2x2 color matrix consisting of
// red (upper left), blue (lower left), yellow (lower right) and green (upper right).
// The files are identical, except that three of them contain
@@ -3071,15 +3771,479 @@ void tst_QMediaPlayerBackend::play_playsRotatedVideoOutput_whenVideoFileHasOrien
QVideoFrame videoFrame = m_fixture->surface.waitForFrame();
QVERIFY(videoFrame.isValid());
QCOMPARE(videoFrame.rotation(), expectedRotationAngle);
+#ifdef Q_OS_ANDROID
+ QSKIP("frame.toImage will return null image because of QTBUG-108446");
+#endif
QImage image = videoFrame.toImage();
QVERIFY(!image.isNull());
QRgb upperLeftColor = image.pixel(5, 5);
QCOMPARE_LT(colorDifference(upperLeftColor, expectedColor), 0.004);
+ QSKIP_GSTREAMER("QTBUG-124005: surface.videoSize() not updated with rotation");
+
// Compare videoSize of the output video sink with the expected value after getting a frame
QCOMPARE(m_fixture->surface.videoSize(), videoSize);
}
+void tst_QMediaPlayerBackend::setVideoOutput_doesNotStopPlayback()
+{
+ using namespace std::chrono_literals;
+
+ CHECK_SELECTED_URL(m_15sVideo);
+
+ QFETCH(QMediaPlayer::PlaybackState, playbackState);
+
+ TestVideoSink surface(false);
+ QAudioOutput audioOut;
+
+ QMediaPlayer player;
+ player.setAudioOutput(&audioOut);
+ player.setSource(*m_15sVideo);
+
+ switch (playbackState) {
+ case QMediaPlayer::StoppedState:
+ break;
+ case QMediaPlayer::PausedState:
+ QSKIP_FFMPEG("QTBUG-126014: Test failure with the ffmpeg backend");
+ player.pause();
+ break;
+ case QMediaPlayer::PlayingState:
+ QSKIP_FFMPEG("QTBUG-126014: Test failure with the ffmpeg backend");
+ QSKIP_GSTREAMER("QTBUG-124005: Test failure with the gstreamer backend");
+ player.play();
+ break;
+ }
+
+ // set video output
+ QTest::qWait(1s);
+ player.setVideoOutput(&surface);
+
+ if (playbackState == QMediaPlayer::PlayingState) {
+ QVideoFrame frame = surface.waitForFrame();
+ QCOMPARE(frame.size(), QSize(20, 20));
+ }
+
+ // unset video output
+ QTest::qWait(1s);
+ player.setVideoOutput(nullptr);
+
+ // wait for play until end
+ if (playbackState != QMediaPlayer::PlayingState)
+ player.play();
+
+ player.setPlaybackRate(5);
+ QTRY_COMPARE(player.playbackState(), QMediaPlayer::StoppedState);
+}
+
+void tst_QMediaPlayerBackend::setVideoOutput_doesNotStopPlayback_data()
+{
+ QTest::addColumn<QMediaPlayer::PlaybackState>("playbackState");
+ QTest::newRow("StoppedState") << QMediaPlayer::StoppedState;
+ QTest::newRow("PausedState") << QMediaPlayer::PausedState;
+ QTest::newRow("PlayingState") << QMediaPlayer::PlayingState;
+}
+
+void tst_QMediaPlayerBackend::setAudioOutput_doesNotStopPlayback()
+{
+ QSKIP_FFMPEG("QTBUG-126014: Test failure with the ffmpeg backend");
+
+ using namespace std::chrono_literals;
+
+ CHECK_SELECTED_URL(m_15sVideo);
+ QFETCH(QMediaPlayer::PlaybackState, playbackState);
+
+ TestVideoSink surface(false);
+ QAudioOutput audioOut;
+
+ QMediaPlayer player;
+ player.setVideoOutput(&surface);
+ player.setSource(*m_15sVideo);
+
+ switch (playbackState) {
+ case QMediaPlayer::StoppedState:
+ break;
+ case QMediaPlayer::PausedState:
+ player.pause();
+ break;
+ case QMediaPlayer::PlayingState:
+ player.play();
+ break;
+ }
+
+ // set audio output
+ QTest::qWait(1s);
+ player.setAudioOutput(&audioOut);
+
+ // unset audio output
+ QTest::qWait(1s);
+ player.setAudioOutput(nullptr);
+
+ // wait for play until end
+ if (playbackState != QMediaPlayer::PlayingState)
+ player.play();
+ player.setPlaybackRate(5);
+ QTRY_COMPARE(player.playbackState(), QMediaPlayer::StoppedState);
+}
+
+void tst_QMediaPlayerBackend::setAudioOutput_doesNotStopPlayback_data()
+{
+ QTest::addColumn<QMediaPlayer::PlaybackState>("playbackState");
+ QTest::newRow("StoppedState") << QMediaPlayer::StoppedState;
+ QTest::newRow("PausedState") << QMediaPlayer::PausedState;
+ QTest::newRow("PlayingState") << QMediaPlayer::PlayingState;
+}
+
+void tst_QMediaPlayerBackend::swapAudioDevice_doesNotStopPlayback()
+{
+ using namespace std::chrono_literals;
+
+ const QList<QAudioDevice> outputDevices = QMediaDevices::audioOutputs();
+
+ if (outputDevices.size() < 2)
+ QSKIP("swapAudioDevice_doesNotStopPlayback requires two audio output devices");
+
+ CHECK_SELECTED_URL(m_15sVideo);
+ QFETCH(QMediaPlayer::PlaybackState, playbackState);
+
+ TestVideoSink surface(false);
+ QAudioOutput audioOut;
+
+ QMediaPlayer player;
+ player.setVideoOutput(&surface);
+ player.setAudioOutput(&audioOut);
+ player.setSource(*m_15sVideo);
+ switch (playbackState) {
+ case QMediaPlayer::StoppedState:
+ break;
+ case QMediaPlayer::PausedState:
+ player.pause();
+ break;
+ case QMediaPlayer::PlayingState:
+ player.play();
+ break;
+ }
+
+ // swap output device
+ QTest::qWait(1s);
+ audioOut.setDevice(outputDevices[0]);
+
+ QTest::qWait(1s);
+ audioOut.setDevice(outputDevices[1]);
+
+ QTest::qWait(1s);
+ audioOut.setDevice(outputDevices[0]);
+
+ // wait for play until end
+ if (playbackState != QMediaPlayer::PlayingState)
+ player.play();
+ player.setPlaybackRate(5);
+ QTRY_COMPARE(player.playbackState(), QMediaPlayer::StoppedState);
+}
+
+void tst_QMediaPlayerBackend::swapAudioDevice_doesNotStopPlayback_data()
+{
+ QTest::addColumn<QMediaPlayer::PlaybackState>("playbackState");
+ QTest::newRow("StoppedState") << QMediaPlayer::StoppedState;
+ QTest::newRow("PausedState") << QMediaPlayer::PausedState;
+ QTest::newRow("PlayingState") << QMediaPlayer::PlayingState;
+}
+
+void tst_QMediaPlayerBackend::play_readsSubtitle()
+{
+ using namespace std::chrono_literals;
+ CHECK_SELECTED_URL(m_subtitleVideo);
+
+ QVideoSink &sink = m_fixture->surface;
+ QMediaPlayer &player = m_fixture->player;
+
+ TestSubtitleSink subtitleSink;
+ QObject::connect(&sink, &QVideoSink::subtitleTextChanged, &subtitleSink,
+ &TestSubtitleSink::addSubtitle);
+
+ player.setSource(*m_subtitleVideo);
+ QTRY_COMPARE(player.subtitleTracks().size(), 1);
+ QCOMPARE_EQ(player.subtitleTracks()[0].value(QMediaMetaData::Duration), 3000);
+
+ player.setActiveSubtitleTrack(0);
+
+ if (!isGStreamerPlatform()) // FIXME: spurious deadlocks
+ player.setPlaybackRate(5.f);
+
+ player.play();
+
+ QStringList expectedSubtitleList = {
+ u"Hello"_s,
+ u""_s,
+ u"World"_s,
+ u""_s,
+ };
+
+ QTRY_COMPARE(subtitleSink.subtitles, expectedSubtitleList);
+}
+
+void tst_QMediaPlayerBackend::multiTrack_validateMetadata()
+{
+ CHECK_SELECTED_URL(m_multitrackVideo);
+ QMediaPlayer &player = m_fixture->player;
+
+ player.setSource(*m_multitrackVideo);
+
+ QTRY_COMPARE(player.videoTracks().size(), 2);
+ QTRY_COMPARE(player.audioTracks().size(), 2);
+ QTRY_COMPARE(player.subtitleTracks().size(), 2);
+
+ QSKIP_GSTREAMER("GStreamer does not provide correct track order");
+
+ QCOMPARE(player.videoTracks()[0][QMediaMetaData::Title], u"One"_s);
+ QCOMPARE(player.videoTracks()[1][QMediaMetaData::Title], u"Two"_s);
+
+ QCOMPARE(player.audioTracks()[0][QMediaMetaData::Language], QLocale::Language::English);
+ QCOMPARE(player.audioTracks()[1][QMediaMetaData::Language], QLocale::Language::Spanish);
+ QCOMPARE(player.subtitleTracks()[0][QMediaMetaData::Language], QLocale::Language::English);
+ QCOMPARE(player.subtitleTracks()[1][QMediaMetaData::Language], QLocale::Language::Spanish);
+}
+
+void tst_QMediaPlayerBackend::play_readsSubtitle_fromMultiTrack()
+{
+ using namespace std::chrono_literals;
+ CHECK_SELECTED_URL(m_multitrackVideo);
+
+ QFETCH(int, track);
+ QFETCH(const QStringList, expectedSubtitles);
+
+ QVideoSink &sink = m_fixture->surface;
+ QMediaPlayer &player = m_fixture->player;
+
+ TestSubtitleSink subtitleSink;
+ QObject::connect(&sink, &QVideoSink::subtitleTextChanged, &subtitleSink,
+ &TestSubtitleSink::addSubtitle);
+
+ player.setSource(*m_multitrackVideo);
+
+ QTRY_COMPARE(player.subtitleTracks().size(), 2);
+
+ if (track != -1) {
+ if (isGStreamerPlatform())
+ QCOMPARE(player.subtitleTracks()[0].value(QMediaMetaData::Duration), 4000);
+ if (isFFMPEGPlatform())
+ QCOMPARE(player.subtitleTracks()[0].value(QMediaMetaData::Duration), 15046);
+ }
+
+ if (isGStreamerPlatform()) {
+ bool swapTracks =
+ player.subtitleTracks()[0][QMediaMetaData::Language] == QLocale::Language::Spanish;
+
+ if (swapTracks && track == 1)
+ track = 0;
+ if (swapTracks && track == 0)
+ track = 1;
+ }
+
+ player.setActiveSubtitleTrack(track);
+ if (!isGStreamerPlatform())
+ player.setPlaybackRate(5.f);
+ player.play();
+
+ if (expectedSubtitles.isEmpty())
+ QTRY_COMPARE_GT(player.position(), 2000);
+
+ QTRY_COMPARE(subtitleSink.subtitles, expectedSubtitles);
+}
+
+void tst_QMediaPlayerBackend::play_readsSubtitle_fromMultiTrack_data()
+{
+ QSKIP_GSTREAMER("GStreamer does not provide consistent track order");
+
+ QTest::addColumn<int>("track");
+ QTest::addColumn<QStringList>("expectedSubtitles");
+
+ QTest::addRow("track 0") << 0
+ << QStringList{
+ u"1s track 1"_s,
+ u""_s,
+ u"3s track 1"_s,
+ u""_s,
+ };
+ QTest::addRow("track 1") << 1
+ << QStringList{
+ u"1s track 2"_s,
+ u""_s,
+ u"3s track 2"_s,
+ u""_s,
+ };
+
+ QTest::addRow("no subtitles") << -1 << QStringList{};
+}
+
+void tst_QMediaPlayerBackend::setActiveSubtitleTrack_switchesSubtitles()
+{
+ QVideoSink &sink = m_fixture->surface;
+ QMediaPlayer &player = m_fixture->player;
+
+ QFETCH(const QUrl, media);
+ QFETCH(const int, positionToSwapTrack);
+ QFETCH(const QLatin1String, testMode);
+ QFETCH(const QStringList, expectedSubtitles);
+
+ TestSubtitleSink subtitleSink;
+ QObject::connect(&sink, &QVideoSink::subtitleTextChanged, &subtitleSink,
+ &TestSubtitleSink::addSubtitle);
+
+ player.setSource(media);
+
+ QTRY_COMPARE(player.subtitleTracks().size(), 2);
+
+ int track0 = 0;
+ int track1 = 1;
+ if (isGStreamerPlatform()) {
+ bool swapTracks =
+ player.subtitleTracks()[0][QMediaMetaData::Language] == QLocale::Language::Spanish;
+
+ if (swapTracks) {
+ track1 = 0;
+ track0 = 1;
+ }
+ }
+
+ player.setActiveSubtitleTrack(track0);
+
+ player.play();
+ QTRY_COMPARE_GT(player.position(), positionToSwapTrack);
+
+ if (testMode == "setWhilePaused"_L1) {
+ player.pause();
+ player.setActiveSubtitleTrack(track1);
+ player.play();
+ } else if (testMode == "setWhilePlaying"_L1) {
+ player.setActiveSubtitleTrack(track1);
+ } else {
+ QFAIL("should not reach");
+ }
+
+ QTRY_COMPARE(subtitleSink.subtitles, expectedSubtitles);
+}
+
+void tst_QMediaPlayerBackend::setActiveSubtitleTrack_switchesSubtitles_data()
+{
+ QSKIP_GSTREAMER("GStreamer does not provide consistent track order");
+
+ QTest::addColumn<QUrl>("media");
+ QTest::addColumn<QLatin1String>("testMode");
+ QTest::addColumn<int>("positionToSwapTrack");
+ QTest::addColumn<QStringList>("expectedSubtitles");
+
+ QTest::addRow("while paused") << *m_multitrackVideo << "setWhilePaused"_L1 << 2100
+ << QStringList{
+ u"1s track 1"_s,
+ u""_s,
+ u"3s track 2"_s,
+ u""_s,
+ };
+ QTest::addRow("while playing") << *m_multitrackVideo << "setWhilePlaying"_L1 << 2100
+ << QStringList{
+ u"1s track 1"_s,
+ u""_s,
+ u"3s track 2"_s,
+ u""_s,
+ };
+
+ QTest::addRow("while paused, subtitles start at zero")
+ << *m_multitrackSubtitleStartsAtZeroVideo << "setWhilePaused"_L1 << 1100
+ << QStringList{
+ u"0s track 1"_s,
+ u""_s,
+ u"2s track 2"_s,
+ u""_s,
+ };
+ QTest::addRow("while playing, subtitles start at zero")
+ << *m_multitrackSubtitleStartsAtZeroVideo << "setWhilePlaying"_L1 << 1100
+ << QStringList{
+ u"0s track 1"_s,
+ u""_s,
+ u"2s track 2"_s,
+ u""_s,
+ };
+}
+
+void tst_QMediaPlayerBackend::setActiveVideoTrack_switchesVideoTrack()
+{
+ using namespace std::chrono_literals;
+ QSKIP_GSTREAMER("GStreamer does not provide consistent track order");
+
+ TestVideoSink &sink = m_fixture->surface;
+ sink.setStoreFrames();
+ QMediaPlayer &player = m_fixture->player;
+
+ player.setSource(*m_multitrackVideo);
+
+ QTRY_COMPARE(player.videoTracks().size(), 2);
+
+ int track0 = 0;
+ int track1 = 1;
+ if (isGStreamerPlatform()) {
+ bool swapTracks = player.subtitleTracks()[0][QMediaMetaData::Title] != u"One"_s;
+
+ if (swapTracks) {
+ track0 = 1;
+ track1 = 0;
+ }
+ }
+
+ player.setActiveVideoTrack(track0);
+ player.play();
+
+ sink.waitForFrame();
+
+ QTest::qWait(500ms);
+ sink.waitForFrame();
+ QCOMPARE(sink.m_frameList.back().toImage().pixel(10, 10), QColor(0xff, 0x80, 0x7f).rgb());
+
+ player.setActiveVideoTrack(track1);
+
+ QTest::qWait(500ms);
+ sink.waitForFrame();
+ QCOMPARE(sink.m_frameList.back().toImage().pixel(10, 10), QColor(0x80, 0x80, 0xff).rgb());
+}
+
+void tst_QMediaPlayerBackend::disablingAllTracks_doesNotStopPlayback()
+{
+ QSKIP_GSTREAMER("position does not advance in GStreamer");
+
+ QMediaPlayer &player = m_fixture->player;
+
+ player.setSource(*m_multitrackVideo);
+
+ // CAVEAT: we cannot set active tracks before tracksChanged is emitted
+ QTRY_COMPARE(player.videoTracks().size(), 2);
+
+ player.setActiveVideoTrack(-1);
+ player.setActiveAudioTrack(-1);
+
+ player.play();
+ QTRY_VERIFY(player.position() > 1000);
+
+ QCOMPARE(m_fixture->surface.m_totalFrames, 0);
+}
+
+void tst_QMediaPlayerBackend::disablingAllTracks_beforeTracksChanged_doesNotStopPlayback()
+{
+ QSKIP_GSTREAMER("position does not advance in GStreamer");
+ QSKIP_FFMPEG("setActiveXXXTrack(-1) only works after tracksChanged");
+
+ QMediaPlayer &player = m_fixture->player;
+
+ player.setSource(*m_multitrackVideo);
+
+ player.setActiveVideoTrack(-1);
+ player.setActiveAudioTrack(-1);
+
+ player.play();
+ QTRY_VERIFY(player.position() > 1000);
+
+ QCOMPARE(m_fixture->surface.m_totalFrames, 0);
+}
+
QTEST_MAIN(tst_QMediaPlayerBackend)
+
#include "tst_qmediaplayerbackend.moc"
diff --git a/tests/auto/integration/qmediaplayerformatsupport/CMakeLists.txt b/tests/auto/integration/qmediaplayerformatsupport/CMakeLists.txt
new file mode 100644
index 000000000..b4dd19f75
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/CMakeLists.txt
@@ -0,0 +1,30 @@
+# Copyright (C) 2024 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+file(GLOB_RECURSE test_data_glob
+ RELATIVE
+ ${CMAKE_CURRENT_SOURCE_DIR}
+ testdata/**
+)
+
+list(APPEND testdata_resource_files ${test_data_glob})
+
+qt_internal_add_test(tst_qmediaplayerformatsupport
+ SOURCES
+ tst_qmediaplayerformatsupport.cpp
+ ../shared/mediabackendutils.h
+ INCLUDE_DIRECTORIES
+ ../shared/
+ LIBRARIES
+ Qt::Core
+ Qt::MultimediaPrivate
+ TESTDATA
+ ${testdata_resource_files}
+)
+
+qt_internal_add_resource(tst_qmediaplayerformatsupport "testdata"
+ PREFIX
+ "/"
+ FILES
+ ${testdata_resource_files}
+)
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/README.md b/tests/auto/integration/qmediaplayerformatsupport/testdata/README.md
new file mode 100644
index 000000000..f52108bcc
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/README.md
@@ -0,0 +1,35 @@
+Sample video files are created using FFmpeg, for example:
+
+:: Supported container formats
+ffmpeg -ss 2 -i flipable.gif -t 1 -r 5 containers/supported/container.avi
+ffmpeg -ss 2 -i flipable.gif -t 1 -r 5 containers/supported/container.mkv
+ffmpeg -ss 2 -i flipable.gif -t 1 -r 5 containers/supported/container.mp4
+ffmpeg -ss 2 -i flipable.gif -t 1 -r 25 containers/supported/container.mpeg
+ffmpeg -ss 2 -i flipable.gif -t 1 -r 25 containers/supported/container.wmv
+
+:: Unsupported container formats
+ffmpeg -ss 2 -i flipable.gif -t 1 -r 5 containers/unsupported/container.webm
+
+:: Supported pixel formats h264
+ffmpeg -i flipable.gif -pix_fmt yuv420p -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264 -r 5 pixel_formats/supported/h264_yuv420p.mp4
+ffmpeg -i flipable.gif -pix_fmt yuv420p10 -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264 -r 5 pixel_formats/supported/h264_yuv420p10.mp4
+ffmpeg -i flipable.gif -pix_fmt yuv422p -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264 -r 5 pixel_formats/supported/h264_yuv422p.mp4
+ffmpeg -i flipable.gif -pix_fmt yuv422p10 -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264 -r 5 pixel_formats/supported/h264_yuv422p10.mp4
+ffmpeg -i flipable.gif -pix_fmt yuv444p -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264 -r 5 pixel_formats/supported/h264_yuv444p.mp4
+ffmpeg -i flipable.gif -pix_fmt yuvj420p -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264 -r 5 pixel_formats/supported/h264_yuvj420p.mp4
+ffmpeg -i flipable.gif -pix_fmt yuvj422p -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264 -r 5 pixel_formats/supported/h264_yuvj422p.mp4
+ffmpeg -i flipable.gif -pix_fmt yuvj444p -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264 -r 5 pixel_formats/supported/h264_yuvj444p.mp4
+ffmpeg -i flipable.gif -pix_fmt nv12 -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264 -r 5 pixel_formats/supported/h264_nv12.mp4
+ffmpeg -i flipable.gif -pix_fmt nv16 -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264 -r 5 pixel_formats/supported/h264_nv16.mp4
+ffmpeg -i flipable.gif -pix_fmt nv21 -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264 -r 5 pixel_formats/supported/h264_nv21.mp4
+ffmpeg -i flipable.gif -pix_fmt yuv420p10le -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264 -r 5 pixel_formats/supported/h264_yuv420p10le.mp4
+ffmpeg -i flipable.gif -pix_fmt yuv444p10 -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264 -r 5 pixel_formats/supported/h264_yuv444p10.mp4
+ffmpeg -i flipable.gif -pix_fmt yuv422p10le -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264 -r 5 pixel_formats/supported/h264_yuv422p10le.mp4
+ffmpeg -i flipable.gif -pix_fmt gray -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264 -r 5 pixel_formats/supported/h264_gray.mp4
+ffmpeg -i flipable.gif -pix_fmt gray10le -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264 -r 5 pixel_formats/supported/h264_gray10le.mp4
+
+ffmpeg -i flipable.gif -pix_fmt bgr0 -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264rgb -r 5 pixel_formats/supported/h264_bgr0.mp4
+ffmpeg -i flipable.gif -pix_fmt bgr24 -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264rgb -r 5 pixel_formats/supported/h264_bgr24.mp4
+ffmpeg -i flipable.gif -pix_fmt rgb24 -colorspace bt709 -color_primaries bt709 -color_trc bt709 -color_range tv -vcodec libx264rgb -r 5 pixel_formats/supported/h264_rgb24.mp4
+
+
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.avi b/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.avi
new file mode 100644
index 000000000..a48028550
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.avi
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.mkv b/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.mkv
new file mode 100644
index 000000000..2e362d7ca
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.mkv
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.mp4
new file mode 100644
index 000000000..bff40278d
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.mpeg b/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.mpeg
new file mode 100644
index 000000000..b94a47f0a
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.mpeg
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.wmv b/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.wmv
new file mode 100644
index 000000000..1a7577e3f
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/supported/container.wmv
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/unsupported/container.webp b/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/unsupported/container.webp
new file mode 100644
index 000000000..ed8b6f631
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/containers/unsupported/container.webp
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/flipable.gif b/tests/auto/integration/qmediaplayerformatsupport/testdata/flipable.gif
new file mode 100644
index 000000000..fd187906b
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/flipable.gif
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_bgr0.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_bgr0.mp4
new file mode 100644
index 000000000..ff9eacb65
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_bgr0.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_bgr24.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_bgr24.mp4
new file mode 100644
index 000000000..ff9eacb65
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_bgr24.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_gray.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_gray.mp4
new file mode 100644
index 000000000..c69357d2b
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_gray.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_gray10le.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_gray10le.mp4
new file mode 100644
index 000000000..339912757
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_gray10le.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_nv12.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_nv12.mp4
new file mode 100644
index 000000000..20b51609a
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_nv12.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_nv16.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_nv16.mp4
new file mode 100644
index 000000000..165819b22
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_nv16.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_nv21.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_nv21.mp4
new file mode 100644
index 000000000..20b51609a
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_nv21.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_rgb24.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_rgb24.mp4
new file mode 100644
index 000000000..ff9eacb65
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_rgb24.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv420p.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv420p.mp4
new file mode 100644
index 000000000..53ef62b45
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv420p.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv420p10.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv420p10.mp4
new file mode 100644
index 000000000..bb45d344c
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv420p10.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv420p10le.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv420p10le.mp4
new file mode 100644
index 000000000..bb45d344c
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv420p10le.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv422p.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv422p.mp4
new file mode 100644
index 000000000..165819b22
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv422p.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv422p10.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv422p10.mp4
new file mode 100644
index 000000000..563fce6da
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv422p10.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv422p10le.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv422p10le.mp4
new file mode 100644
index 000000000..563fce6da
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv422p10le.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv444p.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv444p.mp4
new file mode 100644
index 000000000..953d014ec
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv444p.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv444p10.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv444p10.mp4
new file mode 100644
index 000000000..c27683ed7
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuv444p10.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuvj420p.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuvj420p.mp4
new file mode 100644
index 000000000..3906325e1
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuvj420p.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuvj422p.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuvj422p.mp4
new file mode 100644
index 000000000..6b2d43fd9
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuvj422p.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuvj444p.mp4 b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuvj444p.mp4
new file mode 100644
index 000000000..75008f2aa
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/testdata/pixel_formats/supported/h264_yuvj444p.mp4
Binary files differ
diff --git a/tests/auto/integration/qmediaplayerformatsupport/tst_qmediaplayerformatsupport.cpp b/tests/auto/integration/qmediaplayerformatsupport/tst_qmediaplayerformatsupport.cpp
new file mode 100644
index 000000000..8cabb28c8
--- /dev/null
+++ b/tests/auto/integration/qmediaplayerformatsupport/tst_qmediaplayerformatsupport.cpp
@@ -0,0 +1,124 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
+
+#include "mediabackendutils.h"
+#include <QtTest/QtTest>
+#include <QDebug>
+#include <QtMultimedia/qmediaplayer.h>
+#include <QtMultimedia/QVideoSink>
+
+using namespace Qt::StringLiterals;
+
+QT_USE_NAMESPACE
+
+struct Fixture
+{
+ Fixture() { player.setVideoOutput(&videoOutput); }
+
+ QVideoSink videoOutput;
+ QMediaPlayer player;
+ QSignalSpy errorOccurred{ &player, &QMediaPlayer::errorOccurred };
+ QSignalSpy playbackStateChanged{ &player, &QMediaPlayer::playbackStateChanged };
+
+ bool startedPlaying() const
+ {
+ return playbackStateChanged.contains(QList<QVariant>{ QMediaPlayer::PlayingState });
+ }
+};
+
+void addTestData(QLatin1StringView dir)
+{
+ QDirIterator it(dir);
+ while (it.hasNext()) {
+ QString v = it.next();
+ QTest::addRow("%s", v.toLatin1().data()) << QUrl{ "qrc" + v };
+ }
+}
+
+class tst_qmediaplayerformatsupport : public QObject
+{
+ Q_OBJECT
+
+public slots:
+ void initTestCase()
+ {
+ if (!isFFMPEGPlatform())
+ QSKIP("Test is only intended for FFmpeg backend");
+ }
+
+private slots:
+ void play_succeeds_withSupportedContainer_data()
+ {
+ QTest::addColumn<QUrl>("url");
+ addTestData(":testdata/containers/supported"_L1);
+ }
+
+ void play_succeeds_withSupportedContainer()
+ {
+ QFETCH(const QUrl, url);
+
+ Fixture f;
+ f.player.setSource(url);
+ f.player.play();
+
+ QTRY_VERIFY(f.startedPlaying());
+
+ // Log to understand failures in CI
+ for (const QList<QVariant> &err : f.errorOccurred)
+ qCritical() << "Unexpected failure detected:" << err[0] << "," << err[1];
+
+#ifdef Q_OS_ANDROID
+ QSKIP("QTBUG-125613 Limited format support on Android 14");
+#endif
+
+ QVERIFY(f.errorOccurred.empty());
+ }
+
+ void play_succeeds_withSupportedPixelFormats_data()
+ {
+ QTest::addColumn<QUrl>("url");
+ addTestData(":testdata/pixel_formats/supported"_L1);
+ }
+
+ void play_succeeds_withSupportedPixelFormats()
+ {
+ QFETCH(const QUrl, url);
+
+ Fixture f;
+ f.player.setSource(url);
+ f.player.play();
+
+ QTRY_VERIFY(f.startedPlaying());
+
+ // Log to understand failures in CI
+ for (const QList<QVariant> &err : f.errorOccurred)
+ qCritical() << "Unexpected failure detected:" << err[0] << "," << err[1];
+
+#ifdef Q_OS_ANDROID
+ QSKIP("QTBUG-125613 Limited format support on Android 14");
+#endif
+
+ QVERIFY(f.errorOccurred.empty());
+ }
+
+ void play_fails_withUnsupportedContainer_data()
+ {
+ QTest::addColumn<QUrl>("url");
+ addTestData(":testdata/containers/unsupported"_L1);
+ }
+
+ void play_fails_withUnsupportedContainer()
+ {
+ QFETCH(const QUrl, url);
+
+ Fixture f;
+ f.player.setSource(url);
+ f.player.play();
+
+ QTRY_COMPARE_NE(f.player.error(), QMediaPlayer::NoError);
+ }
+};
+
+QTEST_MAIN(tst_qmediaplayerformatsupport)
+
+#include "tst_qmediaplayerformatsupport.moc"
diff --git a/tests/auto/integration/qquickvideooutput/tst_qquickvideooutput.cpp b/tests/auto/integration/qquickvideooutput/tst_qquickvideooutput.cpp
index a7b6f286c..85a30bbb7 100644
--- a/tests/auto/integration/qquickvideooutput/tst_qquickvideooutput.cpp
+++ b/tests/auto/integration/qquickvideooutput/tst_qquickvideooutput.cpp
@@ -266,7 +266,7 @@ void tst_QQuickVideoOutput::paintSurface()
videoOutput->setSize(QSize(2, 2));
QVideoFrame frame(QVideoFrameFormat(QSize(4, 4), QVideoFrameFormat::Format_ARGB8888));
- frame.map(QVideoFrame::ReadWrite);
+ frame.map(QtVideo::MapMode::ReadWrite);
QCOMPARE(frame.mappedBytes(0), 64);
memcpy(frame.bits(0), rgb32ImageData, 64);
frame.unmap();
diff --git a/tests/auto/integration/qsoundeffect/CMakeLists.txt b/tests/auto/integration/qsoundeffect/CMakeLists.txt
index d3760e816..d403d9514 100644
--- a/tests/auto/integration/qsoundeffect/CMakeLists.txt
+++ b/tests/auto/integration/qsoundeffect/CMakeLists.txt
@@ -7,37 +7,14 @@
## tst_qsoundeffect Test:
#####################################################################
-# Collect test data
-list(APPEND test_data "test.wav")
-
qt_internal_add_test(tst_qsoundeffect
SOURCES
tst_qsoundeffect.cpp
LIBRARIES
Qt::Gui
Qt::MultimediaPrivate
- TESTDATA ${test_data}
-)
-
-# Resources:
-set(resources_resource_files
- "test.wav"
- "test_corrupted.wav"
- "test_tone.wav"
-)
-
-qt_internal_add_resource(tst_qsoundeffect "resources"
- PREFIX
- "/"
- FILES
- ${resources_resource_files}
-)
-
-
-## Scopes:
-#####################################################################
-
-qt_internal_extend_target(tst_qsoundeffect CONDITION UNIX AND NOT APPLE AND NOT QT_FEATURE_pulseaudio
- DEFINES
- QT_MULTIMEDIA_QMEDIAPLAYER
+ TESTDATA
+ "test.wav"
+ "test_corrupted.wav"
+ "test_tone.wav"
)
diff --git a/tests/auto/integration/qsoundeffect/tst_qsoundeffect.cpp b/tests/auto/integration/qsoundeffect/tst_qsoundeffect.cpp
index 6b2871a38..0d3d9f8b3 100644
--- a/tests/auto/integration/qsoundeffect/tst_qsoundeffect.cpp
+++ b/tests/auto/integration/qsoundeffect/tst_qsoundeffect.cpp
@@ -89,7 +89,7 @@ void tst_QSoundEffect::initTestCase()
void tst_QSoundEffect::testSource()
{
- QSignalSpy readSignal(sound, SIGNAL(sourceChanged()));
+ QSignalSpy readSignal(sound, &QSoundEffect::sourceChanged);
sound->setSource(url);
sound->setVolume(0.1f);
@@ -108,8 +108,8 @@ void tst_QSoundEffect::testLooping()
sound->setSource(url);
QTRY_COMPARE(sound->status(), QSoundEffect::Ready);
- QSignalSpy readSignal_Count(sound, SIGNAL(loopCountChanged()));
- QSignalSpy readSignal_Remaining(sound, SIGNAL(loopsRemainingChanged()));
+ QSignalSpy readSignal_Count(sound, &QSoundEffect::loopCountChanged);
+ QSignalSpy readSignal_Remaining(sound, &QSoundEffect::loopsRemainingChanged);
sound->setLoopCount(3);
sound->setVolume(0.1f);
@@ -195,7 +195,7 @@ void tst_QSoundEffect::testLooping()
void tst_QSoundEffect::testVolume()
{
- QSignalSpy readSignal(sound, SIGNAL(volumeChanged()));
+ QSignalSpy readSignal(sound, &QSoundEffect::volumeChanged);
sound->setVolume(0.5);
QCOMPARE(sound->volume(),0.5);
@@ -205,7 +205,7 @@ void tst_QSoundEffect::testVolume()
void tst_QSoundEffect::testMuting()
{
- QSignalSpy readSignal(sound, SIGNAL(mutedChanged()));
+ QSignalSpy readSignal(sound, &QSoundEffect::mutedChanged);
sound->setMuted(true);
QCOMPARE(sound->isMuted(),true);
@@ -374,8 +374,14 @@ void tst_QSoundEffect::testSupportedMimeTypes()
void tst_QSoundEffect::testCorruptFile()
{
+ using namespace Qt::Literals;
+ auto expectedMessagePattern =
+ QRegularExpression(uR"(^QSoundEffect\(qaudio\): Error decoding source .*$)"_s);
+
for (int i = 0; i < 10; i++) {
- QSignalSpy statusSpy(sound, SIGNAL(statusChanged()));
+ QSignalSpy statusSpy(sound, &QSoundEffect::statusChanged);
+ QTest::ignoreMessage(QtMsgType::QtWarningMsg, expectedMessagePattern);
+
sound->setSource(urlCorrupted);
QVERIFY(!sound->isPlaying());
QVERIFY(sound->status() == QSoundEffect::Loading || sound->status() == QSoundEffect::Error);
diff --git a/tests/auto/integration/qvideoframebackend/CMakeLists.txt b/tests/auto/integration/qvideoframebackend/CMakeLists.txt
index 8c129fa97..e4fa79201 100644
--- a/tests/auto/integration/qvideoframebackend/CMakeLists.txt
+++ b/tests/auto/integration/qvideoframebackend/CMakeLists.txt
@@ -19,14 +19,8 @@ qt_internal_add_test(tst_qvideoframebackend
LIBRARIES
Qt::Gui
Qt::MultimediaPrivate
+ BUILTIN_TESTDATA
TESTDATA ${testdata_resource_files}
INCLUDE_DIRECTORIES
../shared/
)
-
-qt_internal_add_resource(tst_qvideoframebackend "testdata"
- PREFIX
- "/"
- FILES
- ${testdata_resource_files}
-)
diff --git a/tests/auto/integration/qvideoframebackend/tst_qvideoframebackend.cpp b/tests/auto/integration/qvideoframebackend/tst_qvideoframebackend.cpp
index 72751d889..30f96fb50 100644
--- a/tests/auto/integration/qvideoframebackend/tst_qvideoframebackend.cpp
+++ b/tests/auto/integration/qvideoframebackend/tst_qvideoframebackend.cpp
@@ -7,6 +7,7 @@
#include <qdebug.h>
#include "mediafileselector.h"
+#include "mediabackendutils.h"
#include "testvideosink.h"
#include "private/qvideotexturehelper_p.h"
#include "private/qvideowindow_p.h"
@@ -81,6 +82,11 @@ void tst_QVideoFrameBackend::addMediaPlayerFrameTestData(F &&f)
return;
}
+ if (isGStreamerPlatform()) {
+ qWarning() << "createMediaPlayerFrame spuriously fails with gstreamer";
+ return;
+ }
+
f();
}
@@ -107,22 +113,22 @@ void tst_QVideoFrameBackend::testMediaFilesAreSupported()
void tst_QVideoFrameBackend::toImage_retainsThePreviousMappedState_data()
{
QTest::addColumn<FrameCreator>("frameCreator");
- QTest::addColumn<QVideoFrame::MapMode>("initialMapMode");
+ QTest::addColumn<QtVideo::MapMode>("initialMapMode");
// clang-format off
QTest::addRow("defaulFrame.notMapped") << &tst_QVideoFrameBackend::createDefaultFrame
- << QVideoFrame::NotMapped;
+ << QtVideo::MapMode::NotMapped;
QTest::addRow("defaulFrame.readOnly") << &tst_QVideoFrameBackend::createDefaultFrame
- << QVideoFrame::ReadOnly;
+ << QtVideo::MapMode::ReadOnly;
addMediaPlayerFrameTestData([]()
{
QTest::addRow("mediaPlayerFrame.notMapped")
<< &tst_QVideoFrameBackend::createMediaPlayerFrame
- << QVideoFrame::NotMapped;
+ << QtVideo::MapMode::NotMapped;
QTest::addRow("mediaPlayerFrame.readOnly")
<< &tst_QVideoFrameBackend::createMediaPlayerFrame
- << QVideoFrame::ReadOnly;
+ << QtVideo::MapMode::ReadOnly;
});
// clang-format on
@@ -131,41 +137,41 @@ void tst_QVideoFrameBackend::toImage_retainsThePreviousMappedState_data()
void tst_QVideoFrameBackend::toImage_retainsThePreviousMappedState()
{
QFETCH(const FrameCreator, frameCreator);
- QFETCH(const QVideoFrame::MapMode, initialMapMode);
- const bool initiallyMapped = initialMapMode != QVideoFrame::NotMapped;
+ QFETCH(const QtVideo::MapMode, initialMapMode);
+ const bool initiallyMapped = initialMapMode != QtVideo::MapMode::NotMapped;
QVideoFrame frame = std::invoke(frameCreator, this);
QVERIFY(frame.isValid());
frame.map(initialMapMode);
- QCOMPARE(frame.mapMode(), initialMapMode);
+ QCOMPARE(static_cast<QtVideo::MapMode>(frame.mapMode()), initialMapMode);
QImage image = frame.toImage();
QVERIFY(!image.isNull());
- QCOMPARE(frame.mapMode(), initialMapMode);
+ QCOMPARE(static_cast<QtVideo::MapMode>(frame.mapMode()), initialMapMode);
QCOMPARE(frame.isMapped(), initiallyMapped);
}
void tst_QVideoFrameBackend::toImage_rendersUpdatedFrame_afterMappingInWriteModeAndModifying_data()
{
QTest::addColumn<FrameCreator>("frameCreator");
- QTest::addColumn<QVideoFrame::MapMode>("mapMode");
+ QTest::addColumn<QtVideo::MapMode>("mapMode");
// clang-format off
QTest::addRow("defaulFrame.writeOnly") << &tst_QVideoFrameBackend::createDefaultFrame
- << QVideoFrame::WriteOnly;
+ << QtVideo::MapMode::WriteOnly;
QTest::addRow("defaulFrame.readWrite") << &tst_QVideoFrameBackend::createDefaultFrame
- << QVideoFrame::ReadWrite;
+ << QtVideo::MapMode::ReadWrite;
addMediaPlayerFrameTestData([]()
{
QTest::addRow("mediaPlayerFrame.writeOnly")
<< &tst_QVideoFrameBackend::createMediaPlayerFrame
- << QVideoFrame::WriteOnly;
+ << QtVideo::MapMode::WriteOnly;
QTest::addRow("mediaPlayerFrame.readWrite")
<< &tst_QVideoFrameBackend::createMediaPlayerFrame
- << QVideoFrame::ReadWrite;
+ << QtVideo::MapMode::ReadWrite;
});
// clang-format on
}
@@ -173,7 +179,7 @@ void tst_QVideoFrameBackend::toImage_rendersUpdatedFrame_afterMappingInWriteMode
void tst_QVideoFrameBackend::toImage_rendersUpdatedFrame_afterMappingInWriteModeAndModifying()
{
QFETCH(const FrameCreator, frameCreator);
- QFETCH(const QVideoFrame::MapMode, mapMode);
+ QFETCH(const QtVideo::MapMode, mapMode);
// Arrange
diff --git a/tests/auto/integration/shared/mediabackendutils.h b/tests/auto/integration/shared/mediabackendutils.h
index 3279f7044..b1fa30bb2 100644
--- a/tests/auto/integration/shared/mediabackendutils.h
+++ b/tests/auto/integration/shared/mediabackendutils.h
@@ -12,6 +12,11 @@ inline bool isGStreamerPlatform()
return QPlatformMediaIntegration::instance()->name() == "gstreamer";
}
+inline bool isQNXPlatform()
+{
+ return QPlatformMediaIntegration::instance()->name() == "qnx";
+}
+
inline bool isDarwinPlatform()
{
return QPlatformMediaIntegration::instance()->name() == "darwin";
@@ -32,12 +37,23 @@ inline bool isWindowsPlatform()
return QPlatformMediaIntegration::instance()->name() == "windows";
}
+inline bool isCI()
+{
+ return qEnvironmentVariable("QTEST_ENVIRONMENT").toLower() == "ci";
+}
+
#define QSKIP_GSTREAMER(message) \
do { \
if (isGStreamerPlatform()) \
QSKIP(message); \
} while (0)
+#define QSKIP_IF_NOT_FFMPEG() \
+ do { \
+ if (!isFFMPEGPlatform()) \
+ QSKIP("Feature is only supported on FFmpeg"); \
+ } while (0)
+
#define QSKIP_FFMPEG(message) \
do { \
if (isFFMPEGPlatform()) \
diff --git a/tests/auto/integration/shared/mediafileselector.h b/tests/auto/integration/shared/mediafileselector.h
index e36677f34..aa192f3e9 100644
--- a/tests/auto/integration/shared/mediafileselector.h
+++ b/tests/auto/integration/shared/mediafileselector.h
@@ -107,10 +107,19 @@ private:
player.play();
const auto waitingFinished = QTest::qWaitFor([&]() {
- const auto status = player.mediaStatus();
- return status == QMediaPlayer::BufferedMedia || status == QMediaPlayer::EndOfMedia
- || status == QMediaPlayer::InvalidMedia
- || player.error() != QMediaPlayer::NoError;
+ if (player.error() != QMediaPlayer::NoError)
+ return true;
+
+ switch (player.mediaStatus()) {
+ case QMediaPlayer::BufferingMedia:
+ case QMediaPlayer::BufferedMedia:
+ case QMediaPlayer::EndOfMedia:
+ case QMediaPlayer::InvalidMedia:
+ return true;
+
+ default:
+ return false;
+ }
});
auto enumValueToString = [](auto enumValue) {
diff --git a/tests/auto/integration/shared/testvideosink.h b/tests/auto/integration/shared/testvideosink.h
index 8c3f9a5bd..b14c819c5 100644
--- a/tests/auto/integration/shared/testvideosink.h
+++ b/tests/auto/integration/shared/testvideosink.h
@@ -8,6 +8,7 @@
#include <qvideoframe.h>
#include <qelapsedtimer.h>
#include <qsignalspy.h>
+#include <chrono>
QT_BEGIN_NAMESPACE
@@ -42,6 +43,10 @@ private Q_SLOTS:
if (m_storeFrames)
m_frameList.append(frame);
+
+ if (frame.isValid())
+ m_frameTimes.emplace_back(std::chrono::microseconds(frame.startTime()));
+
++m_totalFrames;
}
@@ -52,6 +57,8 @@ public:
QList<QVideoFrame> m_frameList;
int m_totalFrames = 0; // used instead of the list when frames are not stored
QElapsedTimer m_elapsedTimer;
+ using TimePoint = std::chrono::time_point<std::chrono::high_resolution_clock>;
+ std::vector<TimePoint> m_frameTimes;
private:
bool m_storeFrames;
diff --git a/tests/auto/shared/qscopedenvironmentvariable.h b/tests/auto/shared/qscopedenvironmentvariable.h
new file mode 100644
index 000000000..390dfd400
--- /dev/null
+++ b/tests/auto/shared/qscopedenvironmentvariable.h
@@ -0,0 +1,29 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
+
+#ifndef QSCOPEDENVIRONMENTVARIABLE_H
+#define QSCOPEDENVIRONMENTVARIABLE_H
+
+#include <QtCore/qbytearrayview.h>
+#include <QtCore/qtenvironmentvariables.h>
+
+struct QScopedEnvironmentVariable
+{
+ QScopedEnvironmentVariable(const QScopedEnvironmentVariable &) = delete;
+ QScopedEnvironmentVariable(QScopedEnvironmentVariable &&) = delete;
+ QScopedEnvironmentVariable &operator=(const QScopedEnvironmentVariable &) = delete;
+ QScopedEnvironmentVariable &operator=(QScopedEnvironmentVariable &&) = delete;
+
+ QScopedEnvironmentVariable(const char *envvar, QByteArrayView name) : envvar{ envvar }
+ {
+ Q_ASSERT(envvar);
+ qputenv(envvar, name);
+ };
+
+ ~QScopedEnvironmentVariable() { qunsetenv(envvar); }
+
+private:
+ const char *envvar;
+};
+
+#endif // QSCOPEDENVIRONMENTVARIABLE_H
diff --git a/tests/auto/unit/mockbackend/qmockaudiodecoder.cpp b/tests/auto/unit/mockbackend/qmockaudiodecoder.cpp
index 700bf4486..3c6b940a9 100644
--- a/tests/auto/unit/mockbackend/qmockaudiodecoder.cpp
+++ b/tests/auto/unit/mockbackend/qmockaudiodecoder.cpp
@@ -60,7 +60,7 @@ void QMockAudioDecoder::start()
setIsDecoding(true);
durationChanged(duration());
- QTimer::singleShot(50, this, SLOT(pretendDecode()));
+ QTimer::singleShot(50, this, &QMockAudioDecoder::pretendDecode);
} else {
error(QAudioDecoder::ResourceError, "No source set");
}
@@ -92,7 +92,7 @@ QAudioBuffer QMockAudioDecoder::read()
if (mBuffers.isEmpty() && mSerial >= MOCK_DECODER_MAX_BUFFERS) {
finished();
} else
- QTimer::singleShot(50, this, SLOT(pretendDecode()));
+ QTimer::singleShot(50, this, &QMockAudioDecoder::pretendDecode);
}
return a;
diff --git a/tests/auto/unit/mockbackend/qmockcamera.cpp b/tests/auto/unit/mockbackend/qmockcamera.cpp
index 9347e8c84..23ecc36ae 100644
--- a/tests/auto/unit/mockbackend/qmockcamera.cpp
+++ b/tests/auto/unit/mockbackend/qmockcamera.cpp
@@ -34,12 +34,6 @@ void QMockCamera::setActive(bool active)
emit activeChanged(active);
}
-/* helper method to emit the signal error */
-void QMockCamera::setError(QCamera::Error err, QString errorString)
-{
- emit error(err, errorString);
-}
-
void QMockCamera::setCamera(const QCameraDevice &camera)
{
m_camera = camera;
diff --git a/tests/auto/unit/mockbackend/qmockcamera.h b/tests/auto/unit/mockbackend/qmockcamera.h
index df8a08874..3d8159e84 100644
--- a/tests/auto/unit/mockbackend/qmockcamera.h
+++ b/tests/auto/unit/mockbackend/qmockcamera.h
@@ -31,9 +31,6 @@ public:
void setActive(bool active) override;
- /* helper method to emit the signal error */
- void setError(QCamera::Error err, QString errorString);
-
void setCamera(const QCameraDevice &camera) override;
bool setCameraFormat(const QCameraFormat &format) override;
diff --git a/tests/auto/unit/mockbackend/qmockimagecapture.cpp b/tests/auto/unit/mockbackend/qmockimagecapture.cpp
index 74ab08e59..96e53b2f4 100644
--- a/tests/auto/unit/mockbackend/qmockimagecapture.cpp
+++ b/tests/auto/unit/mockbackend/qmockimagecapture.cpp
@@ -25,7 +25,7 @@ int QMockImageCapture::capture(const QString &fileName)
m_fileName = fileName;
m_captureRequest++;
emit readyForCaptureChanged(m_ready = false);
- QTimer::singleShot(5, this, SLOT(captured()));
+ QTimer::singleShot(5, this, &QMockImageCapture::captured);
return m_captureRequest;
} else {
emit error(-1, QImageCapture::NotReadyError,
diff --git a/tests/auto/unit/mockbackend/qmockmediacapturesession.h b/tests/auto/unit/mockbackend/qmockmediacapturesession.h
index 24453d795..0a2d3fb60 100644
--- a/tests/auto/unit/mockbackend/qmockmediacapturesession.h
+++ b/tests/auto/unit/mockbackend/qmockmediacapturesession.h
@@ -65,11 +65,19 @@ public:
QPlatformSurfaceCapture *screenCapture() override { return m_screenCapture; }
void setScreenCapture(QPlatformSurfaceCapture *capture) override { m_screenCapture = capture; }
+ QPlatformSurfaceCapture *windowCapture() override { return m_windowCapture; }
+ void setWindowCapture(QPlatformSurfaceCapture *capture) override { m_windowCapture = capture; }
+
+ QPlatformVideoFrameInput *videoFrameInput() override { return m_videoFrameInput; }
+ void setVideoFrameInput(QPlatformVideoFrameInput *input) override { m_videoFrameInput = input; }
+
QMockCamera *mockCameraControl = nullptr;
QPlatformImageCapture *mockImageCapture = nullptr;
QMockMediaEncoder *mockControl = nullptr;
QPlatformAudioInput *m_audioInput = nullptr;
QPlatformSurfaceCapture *m_screenCapture = nullptr;
+ QPlatformSurfaceCapture *m_windowCapture = nullptr;
+ QPlatformVideoFrameInput *m_videoFrameInput = nullptr;
bool hasControls;
};
diff --git a/tests/auto/unit/mockbackend/qmockmediaencoder.h b/tests/auto/unit/mockbackend/qmockmediaencoder.h
index 6d475df62..cf855488b 100644
--- a/tests/auto/unit/mockbackend/qmockmediaencoder.h
+++ b/tests/auto/unit/mockbackend/qmockmediaencoder.h
@@ -37,11 +37,11 @@ public:
virtual void setMetaData(const QMediaMetaData &m) override
{
m_metaData = m;
- emit metaDataChanged();
+ metaDataChanged();
}
virtual QMediaMetaData metaData() const override { return m_metaData; }
- using QPlatformMediaRecorder::error;
+ using QPlatformMediaRecorder::updateError;
public:
void record(QMediaEncoderSettings &settings) override
@@ -49,30 +49,30 @@ public:
m_state = QMediaRecorder::RecordingState;
m_settings = settings;
m_position=1;
- emit stateChanged(m_state);
- emit durationChanged(m_position);
+ stateChanged(m_state);
+ durationChanged(m_position);
QUrl actualLocation = outputLocation().isEmpty() ? QUrl::fromLocalFile("default_name.mp4") : outputLocation();
- emit actualLocationChanged(actualLocation);
+ actualLocationChanged(actualLocation);
}
void pause() override
{
m_state = QMediaRecorder::PausedState;
- emit stateChanged(m_state);
+ stateChanged(m_state);
}
void resume() override
{
m_state = QMediaRecorder::RecordingState;
- emit stateChanged(m_state);
+ stateChanged(m_state);
}
void stop() override
{
m_position=0;
m_state = QMediaRecorder::StoppedState;
- emit stateChanged(m_state);
+ stateChanged(m_state);
}
void reset()
@@ -80,8 +80,8 @@ public:
m_state = QMediaRecorder::StoppedState;
m_settings = QMediaEncoderSettings();
m_position = 0;
- emit stateChanged(m_state);
- emit durationChanged(m_position);
+ stateChanged(m_state);
+ durationChanged(m_position);
clearActualLocation();
}
diff --git a/tests/auto/unit/mockbackend/qmockmediaplayer.h b/tests/auto/unit/mockbackend/qmockmediaplayer.h
index 8db0b3a1a..a3ba76beb 100644
--- a/tests/auto/unit/mockbackend/qmockmediaplayer.h
+++ b/tests/auto/unit/mockbackend/qmockmediaplayer.h
@@ -44,11 +44,15 @@ public:
}
qint64 duration() const override { return _duration; }
- void setDuration(qint64 duration) { emit durationChanged(_duration = duration); }
+ void setDuration(qint64 duration) { durationChanged(_duration = duration); }
qint64 position() const override { return _position; }
- void setPosition(qint64 position) override { if (position != _position) emit positionChanged(_position = position); }
+ void setPosition(qint64 position) override
+ {
+ if (position != _position)
+ positionChanged(_position = position);
+ }
float bufferProgress() const override { return _bufferProgress; }
void setBufferStatus(float status)
@@ -63,13 +67,17 @@ public:
bool isVideoAvailable() const override { return _videoAvailable; }
bool isSeekable() const override { return _isSeekable; }
- void setSeekable(bool seekable) { emit seekableChanged(_isSeekable = seekable); }
+ void setSeekable(bool seekable) { seekableChanged(_isSeekable = seekable); }
QMediaTimeRange availablePlaybackRanges() const override { return QMediaTimeRange(_seekRange.first, _seekRange.second); }
void setSeekRange(qint64 minimum, qint64 maximum) { _seekRange = qMakePair(minimum, maximum); }
qreal playbackRate() const override { return _playbackRate; }
- void setPlaybackRate(qreal rate) override { if (rate != _playbackRate) emit playbackRateChanged(_playbackRate = rate); }
+ void setPlaybackRate(qreal rate) override
+ {
+ if (rate != _playbackRate)
+ playbackRateChanged(_playbackRate = rate);
+ }
QUrl media() const override { return _media; }
void setMedia(const QUrl &content, QIODevice *stream) override
@@ -92,10 +100,7 @@ public:
void setAudioOutput(QPlatformAudioOutput *output) override { m_audioOutput = output; }
- void emitError(QMediaPlayer::Error err, const QString &errorString)
- {
- emit error(err, errorString);
- }
+ void emitError(QMediaPlayer::Error err, const QString &errorString) { error(err, errorString); }
void setState(QMediaPlayer::PlaybackState state)
{
@@ -119,8 +124,16 @@ public:
void setIsValid(bool isValid) { _isValid = isValid; }
void setMedia(QUrl media) { _media = media; }
void setVideoAvailable(bool videoAvailable) { _videoAvailable = videoAvailable; }
- void setError(QMediaPlayer::Error err) { _error = err; emit error(_error, _errorString); }
- void setErrorString(QString errorString) { _errorString = errorString; emit error(_error, _errorString); }
+ void setError(QMediaPlayer::Error err)
+ {
+ _error = err;
+ error(_error, _errorString);
+ }
+ void setErrorString(QString errorString)
+ {
+ _errorString = errorString;
+ error(_error, _errorString);
+ }
void reset()
{
diff --git a/tests/auto/unit/mockbackend/qmocksurfacecapture.h b/tests/auto/unit/mockbackend/qmocksurfacecapture.h
index 0924b7255..00ce80ebb 100644
--- a/tests/auto/unit/mockbackend/qmocksurfacecapture.h
+++ b/tests/auto/unit/mockbackend/qmocksurfacecapture.h
@@ -5,6 +5,7 @@
#define QMOCKSURFACECAPTURE_H
#include "private/qplatformsurfacecapture_p.h"
+#include "private/qvideoframe_p.h"
#include "qmockvideobuffer.h"
#include "qthread.h"
@@ -25,10 +26,11 @@ class QMockSurfaceCapture : public QPlatformSurfaceCapture
image.fill(i % 2 ? Qt::red : Qt::blue);
- QVideoFrame frame(new QMockVideoBuffer(image),
- QVideoFrameFormat(m_capture.m_imageSize,
- QVideoFrameFormat::pixelFormatFromImageFormat(
- m_capture.m_imageFormat)));
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMockVideoBuffer>(image),
+ QVideoFrameFormat(m_capture.m_imageSize,
+ QVideoFrameFormat::pixelFormatFromImageFormat(
+ m_capture.m_imageFormat)));
emit m_capture.newVideoFrame(frame);
}
diff --git a/tests/auto/unit/mockbackend/qmockvideobuffer.h b/tests/auto/unit/mockbackend/qmockvideobuffer.h
index f82a09a59..0ee32416c 100644
--- a/tests/auto/unit/mockbackend/qmockvideobuffer.h
+++ b/tests/auto/unit/mockbackend/qmockvideobuffer.h
@@ -5,35 +5,33 @@
#define QMOCKVIDEOBUFFER_H
#include "qimage.h"
-#include "private/qabstractvideobuffer_p.h"
+#include "private/qhwvideobuffer_p.h"
-class QMockVideoBuffer : public QAbstractVideoBuffer
+class QMockVideoBuffer : public QHwVideoBuffer
{
public:
- QMockVideoBuffer(QImage image) : QAbstractVideoBuffer(QVideoFrame::NoHandle), m_image(image) { }
+ QMockVideoBuffer(QImage image) : QHwVideoBuffer(QVideoFrame::NoHandle), m_image(image) { }
- QVideoFrame::MapMode mapMode() const override { return m_mapMode; }
-
- MapData map(QVideoFrame::MapMode mode) override
+ MapData map(QtVideo::MapMode mode) override
{
MapData mapData;
- if (m_mapMode == QVideoFrame::NotMapped && !m_image.isNull()
- && mode != QVideoFrame::NotMapped) {
+ if (m_mapMode == QtVideo::MapMode::NotMapped && !m_image.isNull()
+ && mode != QtVideo::MapMode::NotMapped) {
m_mapMode = mode;
- mapData.nPlanes = 1;
+ mapData.planeCount = 1;
mapData.bytesPerLine[0] = m_image.bytesPerLine();
mapData.data[0] = m_image.bits();
- mapData.size[0] = m_image.sizeInBytes();
+ mapData.dataSize[0] = m_image.sizeInBytes();
}
return mapData;
}
- void unmap() override { m_mapMode = QVideoFrame::NotMapped; }
+ void unmap() override { m_mapMode = QtVideo::MapMode::NotMapped; }
private:
- QVideoFrame::MapMode m_mapMode = QVideoFrame::NotMapped;
+ QtVideo::MapMode m_mapMode = QtVideo::MapMode::NotMapped;
QImage m_image;
};
diff --git a/tests/auto/unit/multimedia/CMakeLists.txt b/tests/auto/unit/multimedia/CMakeLists.txt
index 598bad5ad..151577d14 100644
--- a/tests/auto/unit/multimedia/CMakeLists.txt
+++ b/tests/auto/unit/multimedia/CMakeLists.txt
@@ -12,6 +12,7 @@ add_subdirectory(qcamera)
add_subdirectory(qcameradevice)
add_subdirectory(qimagecapture)
add_subdirectory(qmediaformat)
+add_subdirectory(qmediametadata)
add_subdirectory(qmediaplayer)
#add_subdirectory(qmediaplaylist)
add_subdirectory(qmediarecorder)
@@ -19,7 +20,9 @@ add_subdirectory(qmediatimerange)
add_subdirectory(qmultimediautils)
add_subdirectory(qvideoframe)
add_subdirectory(qvideoframeformat)
-add_subdirectory(qvideoframecolormanagement)
+if(QT_FEATURE_ffmpeg)
+ add_subdirectory(qvideoframecolormanagement)
+endif()
add_subdirectory(qaudiobuffer)
add_subdirectory(qaudiodecoder)
add_subdirectory(qsamplecache)
diff --git a/tests/auto/unit/multimedia/gstreamer_backend/tst_gstreamer_backend.cpp b/tests/auto/unit/multimedia/gstreamer_backend/tst_gstreamer_backend.cpp
index d51962076..929c46b3e 100644
--- a/tests/auto/unit/multimedia/gstreamer_backend/tst_gstreamer_backend.cpp
+++ b/tests/auto/unit/multimedia/gstreamer_backend/tst_gstreamer_backend.cpp
@@ -1,16 +1,42 @@
// Copyright (C) 2024 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only WITH Qt-GPL-exception-1.0
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
#include "tst_gstreamer_backend.h"
#include <QtTest/QtTest>
-#include <QtQGstreamerMediaPlugin/private/qgstreamermetadata_p.h>
+#include <QtMultimedia/qmediaformat.h>
+
#include <QtQGstreamerMediaPlugin/private/qgst_handle_types_p.h>
+#include <QtQGstreamerMediaPlugin/private/qgst_p.h>
+#include <QtQGstreamerMediaPlugin/private/qgst_debug_p.h>
+#include <QtQGstreamerMediaPlugin/private/qgstpipeline_p.h>
+#include <QtQGstreamerMediaPlugin/private/qgstreamermetadata_p.h>
QT_USE_NAMESPACE
+// NOLINTBEGIN(readability-convert-member-functions-to-static)
+
using namespace Qt::Literals;
+namespace {
+
+template <typename... Pairs>
+QMediaMetaData makeQMediaMetaData(Pairs &&...pairs)
+{
+ QMediaMetaData metadata;
+
+ auto addKeyValuePair = [&](auto &&pair) {
+ metadata.insert(pair.first, pair.second);
+ return;
+ };
+
+ (addKeyValuePair(pairs), ...);
+
+ return metadata;
+}
+
+} // namespace
+
QGstTagListHandle tst_GStreamer::parseTagList(const char *str)
{
QGstTagListHandle tagList{
@@ -25,6 +51,38 @@ QGstTagListHandle tst_GStreamer::parseTagList(const QByteArray &ba)
return parseTagList(ba.constData());
}
+void tst_GStreamer::qGstCasts_withElement()
+{
+ QGstElement element = QGstElement::createFromFactory("identity", "myPipeline");
+ QVERIFY(element);
+
+ QVERIFY(!qIsGstObjectOfType<GstPipeline>(element.element()));
+ QVERIFY(!qIsGstObjectOfType<GstBin>(element.element()));
+}
+
+void tst_GStreamer::qGstCasts_withBin()
+{
+ QGstBin bin = QGstBin::create("bin");
+ QVERIFY(bin);
+
+ QVERIFY(!qIsGstObjectOfType<GstPipeline>(bin.element()));
+ QVERIFY(qIsGstObjectOfType<GstBin>(bin.element()));
+}
+
+void tst_GStreamer::qGstCasts_withPipeline()
+{
+ QGstPipeline pipeline = QGstPipeline::create("myPipeline");
+
+ QGstElement element{
+ qGstSafeCast<GstElement>(pipeline.pipeline()),
+ QGstElement::NeedsRef,
+ };
+
+ QVERIFY(element);
+ QVERIFY(qIsGstObjectOfType<GstPipeline>(element.element()));
+ QVERIFY(qIsGstObjectOfType<GstBin>(element.element()));
+}
+
void tst_GStreamer::metadata_taglistToMetaData()
{
QGstTagListHandle tagList = parseTagList(R"(taglist, title="My Video", comment="yada")");
@@ -68,12 +126,184 @@ void tst_GStreamer::metadata_taglistToMetaData_extractsDuration()
QGstTagListHandle tagList = parseTagList(
R"__(taglist, video-codec=(string)"On2\ VP9", container-specific-track-id=(string)1, extended-comment=(string){ "ALPHA_MODE\=1", "HANDLER_NAME\=Apple\ Video\ Media\ Handler", "VENDOR_ID\=appl", "TIMECODE\=00:00:00:00", "DURATION\=00:00:00.400000000" }, encoder=(string)"Lavc59.37.100\ libvpx-vp9")__");
- QMediaMetaData parsed = taglistToMetaData(tagList.get());
-
- QEXPECT_FAIL("", "duration in extended comment", Continue);
+ QMediaMetaData parsed = taglistToMetaData(tagList);
QCOMPARE(parsed[QMediaMetaData::Duration].value<int>(), 400);
}
+void tst_GStreamer::metadata_taglistToMetaData_extractsLanguage()
+{
+ QFETCH(QByteArray, tagListString);
+ QFETCH(QLocale::Language, language);
+
+ QGstTagListHandle tagList = parseTagList(tagListString);
+ QVERIFY(tagList);
+
+ QMediaMetaData parsed = taglistToMetaData(tagList);
+ QCOMPARE(parsed[QMediaMetaData::Language].value<QLocale::Language>(), language);
+}
+
+void tst_GStreamer::metadata_taglistToMetaData_extractsLanguage_data()
+{
+ QTest::addColumn<QByteArray>("tagListString");
+ QTest::addColumn<QLocale::Language>("language");
+
+ QTest::newRow("english, en")
+ << R"__(taglist, container-format=(string)Matroska, audio-codec=(string)"MPEG-4\ AAC", language-code=(string)en, container-specific-track-id=(string)5, encoder=(string)Lavf60.16.100, extended-comment=(string)"DURATION\=00:00:05.055000000")__"_ba
+ << QLocale::Language::English;
+ QTest::newRow("spanish, es")
+ << R"__(taglist, container-format=(string)Matroska, audio-codec=(string)"MPEG-4\ AAC", language-code=(string)es, container-specific-track-id=(string)5, encoder=(string)Lavf60.16.100, extended-comment=(string)"DURATION\=00:00:05.055000000")__"_ba
+ << QLocale::Language::Spanish;
+ QTest::newRow("english, eng")
+ << R"__(taglist, container-format=(string)Matroska, audio-codec=(string)"MPEG-4\ AAC", language-code=(string)eng, container-specific-track-id=(string)5, encoder=(string)Lavf60.16.100, extended-comment=(string)"DURATION\=00:00:05.055000000")__"_ba
+ << QLocale::Language::English;
+ QTest::newRow("spanish, spa")
+ << R"__(taglist, container-format=(string)Matroska, audio-codec=(string)"MPEG-4\ AAC", language-code=(string)spa, container-specific-track-id=(string)5, encoder=(string)Lavf60.16.100, extended-comment=(string)"DURATION\=00:00:05.055000000")__"_ba
+ << QLocale::Language::Spanish;
+}
+
+void tst_GStreamer::metadata_capsToMetaData()
+{
+ QFETCH(QByteArray, capsString);
+ QFETCH(QMediaMetaData, expectedMetadata);
+
+ QGstCaps caps{
+ gst_caps_from_string(capsString.constData()),
+ QGstCaps::HasRef,
+ };
+
+ QMediaMetaData md = capsToMetaData(caps);
+
+ QCOMPARE(md, expectedMetadata);
+}
+
+void tst_GStreamer::metadata_capsToMetaData_data()
+{
+ using Key = QMediaMetaData::Key;
+ using KVPair = std::pair<QMediaMetaData::Key, QVariant>;
+
+ auto makeKVPair = [](Key key, auto value) {
+ return KVPair{
+ key,
+ QVariant::fromValue(value),
+ };
+ };
+
+ QTest::addColumn<QByteArray>("capsString");
+ QTest::addColumn<QMediaMetaData>("expectedMetadata");
+
+ QTest::newRow("container") << R"(video/quicktime, variant=(string)iso)"_ba
+ << makeQMediaMetaData(makeKVPair(Key::FileFormat,
+ QMediaFormat::FileFormat::MPEG4));
+
+ QTest::newRow("video")
+ << R"(video/x-h264, stream-format=(string)avc, alignment=(string)au, level=(string)3.1, profile=(string)main, codec_data=(buffer)014d401fffe10017674d401fda014016ec0440000003004000000c83c60ca801000468ef3c80, width=(int)1280, height=(int)720, framerate=(fraction)25/1, pixel-aspect-ratio=(fraction)1/1)"_ba
+ << makeQMediaMetaData(makeKVPair(Key::VideoCodec, QMediaFormat::VideoCodec::H264),
+ makeKVPair(Key::VideoFrameRate, 25),
+ makeKVPair(Key::Resolution, QSize(1280, 720)));
+
+ QTest::newRow("audio")
+ << R"(audio/mpeg, mpegversion=(int)4, framed=(boolean)true, stream-format=(string)raw, level=(string)4, base-profile=(string)lc, profile=(string)lc, codec_data=(buffer)11b0, rate=(int)48000, channels=(int)6)"_ba
+ << makeQMediaMetaData(makeKVPair(Key::AudioCodec, QMediaFormat::AudioCodec::AAC));
+}
+
+void tst_GStreamer::QGstBin_createFromPipelineDescription()
+{
+ QGstBin bin = QGstBin::createFromPipelineDescription("identity name=foo ! identity name=bar");
+
+ QVERIFY(bin);
+ QVERIFY(bin.findByName("foo"));
+ QCOMPARE_EQ(bin.findByName("foo").getParent(), bin);
+ QVERIFY(bin.findByName("bar"));
+ QVERIFY(!bin.findByName("baz"));
+ bin.dumpGraph("QGstBin_createFromPipelineDescription");
+}
+
+void tst_GStreamer::QGstElement_createFromPipelineDescription()
+{
+ using namespace std::string_view_literals;
+ QGstElement element = QGstElement::createFromPipelineDescription("identity name=foo");
+ QCOMPARE_EQ(element.name(), "foo"sv);
+ QCOMPARE_EQ(element.typeName(), "GstIdentity"sv);
+}
+
+void tst_GStreamer::QGstElement_createFromPipelineDescription_multipleElementsCreatesBin()
+{
+ using namespace std::string_view_literals;
+ QGstElement element =
+ QGstElement::createFromPipelineDescription("identity name=foo ! identity name=bar");
+
+ QVERIFY(element);
+ QCOMPARE_EQ(element.typeName(), "GstPipeline"sv);
+
+ QGstBin bin{
+ qGstSafeCast<GstBin>(element.element()),
+ QGstBin::NeedsRef,
+ };
+
+ QVERIFY(bin);
+ QVERIFY(bin.findByName("foo"));
+ QCOMPARE_EQ(bin.findByName("foo").getParent(), bin);
+ QVERIFY(bin.findByName("bar"));
+ QVERIFY(!bin.findByName("baz"));
+
+ bin.dumpGraph("QGstElement_createFromPipelineDescription_multipleElements");
+}
+
+void tst_GStreamer::QGstPad_inferTypeFromName()
+{
+ auto makePad = [](const char *name, GstPadDirection direction) {
+ return QGstPad{
+ gst_pad_new(name, direction),
+ QGstPad::NeedsRef,
+ };
+ };
+
+ QVERIFY(makePad("audio_0", GST_PAD_SRC).inferTrackTypeFromName()
+ == QPlatformMediaPlayer::AudioStream);
+ QVERIFY(makePad("video_0", GST_PAD_SRC).inferTrackTypeFromName()
+ == QPlatformMediaPlayer::VideoStream);
+ QVERIFY(makePad("text_0", GST_PAD_SRC).inferTrackTypeFromName()
+ == QPlatformMediaPlayer::SubtitleStream);
+ QVERIFY(makePad("src_0", GST_PAD_SRC).inferTrackTypeFromName() == std::nullopt);
+ QVERIFY(makePad("text", GST_PAD_SRC).inferTrackTypeFromName() == std::nullopt);
+}
+
+void tst_GStreamer::qDebug_GstPadDirection()
+{
+ auto validate = [](GstPadDirection direction, QString expectedString) {
+ QString str;
+ QDebug dbg(&str);
+
+ dbg << direction;
+
+ QCOMPARE_EQ(str, expectedString);
+ };
+
+ validate(GST_PAD_UNKNOWN, u"GST_PAD_UNKNOWN "_s);
+ validate(GST_PAD_SRC, u"GST_PAD_SRC "_s);
+ validate(GST_PAD_SINK, u"GST_PAD_SINK "_s);
+}
+
+void tst_GStreamer::qDebug_GstStreamStatusType()
+{
+ auto validate = [](GstStreamStatusType type, QString expectedString) {
+ QString str;
+ QDebug dbg(&str);
+
+ dbg << type;
+
+ QCOMPARE_EQ(str, expectedString);
+ };
+
+ validate(GST_STREAM_STATUS_TYPE_CREATE, u"GST_STREAM_STATUS_TYPE_CREATE "_s);
+ validate(GST_STREAM_STATUS_TYPE_ENTER, u"GST_STREAM_STATUS_TYPE_ENTER "_s);
+ validate(GST_STREAM_STATUS_TYPE_LEAVE, u"GST_STREAM_STATUS_TYPE_LEAVE "_s);
+ validate(GST_STREAM_STATUS_TYPE_DESTROY, u"GST_STREAM_STATUS_TYPE_DESTROY "_s);
+ validate(GST_STREAM_STATUS_TYPE_START, u"GST_STREAM_STATUS_TYPE_START "_s);
+ validate(GST_STREAM_STATUS_TYPE_PAUSE, u"GST_STREAM_STATUS_TYPE_PAUSE "_s);
+ validate(GST_STREAM_STATUS_TYPE_STOP, u"GST_STREAM_STATUS_TYPE_STOP "_s);
+}
+
QTEST_GUILESS_MAIN(tst_GStreamer)
#include "moc_tst_gstreamer_backend.cpp"
diff --git a/tests/auto/unit/multimedia/gstreamer_backend/tst_gstreamer_backend.h b/tests/auto/unit/multimedia/gstreamer_backend/tst_gstreamer_backend.h
index 7b118703f..7252dffdd 100644
--- a/tests/auto/unit/multimedia/gstreamer_backend/tst_gstreamer_backend.h
+++ b/tests/auto/unit/multimedia/gstreamer_backend/tst_gstreamer_backend.h
@@ -1,5 +1,5 @@
// Copyright (C) 2024 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only WITH Qt-GPL-exception-1.0
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
#ifndef TST_GSTREAMER_BACKEND_H
#define TST_GSTREAMER_BACKEND_H
@@ -19,10 +19,28 @@ class tst_GStreamer : public QObject
QGstTagListHandle parseTagList(const QByteArray &);
private slots:
+ void qGstCasts_withElement();
+ void qGstCasts_withBin();
+ void qGstCasts_withPipeline();
+
void metadata_taglistToMetaData();
void metadata_taglistToMetaData_extractsOrientation();
void metadata_taglistToMetaData_extractsOrientation_data();
void metadata_taglistToMetaData_extractsDuration();
+ void metadata_taglistToMetaData_extractsLanguage();
+ void metadata_taglistToMetaData_extractsLanguage_data();
+
+ void metadata_capsToMetaData();
+ void metadata_capsToMetaData_data();
+
+ void QGstBin_createFromPipelineDescription();
+ void QGstElement_createFromPipelineDescription();
+ void QGstElement_createFromPipelineDescription_multipleElementsCreatesBin();
+
+ void QGstPad_inferTypeFromName();
+
+ void qDebug_GstPadDirection();
+ void qDebug_GstStreamStatusType();
private:
QGstreamerIntegration integration;
diff --git a/tests/auto/unit/multimedia/qabstractvideobuffer/tst_qabstractvideobuffer.cpp b/tests/auto/unit/multimedia/qabstractvideobuffer/tst_qabstractvideobuffer.cpp
index 567b086f2..e75e1395a 100644
--- a/tests/auto/unit/multimedia/qabstractvideobuffer/tst_qabstractvideobuffer.cpp
+++ b/tests/auto/unit/multimedia/qabstractvideobuffer/tst_qabstractvideobuffer.cpp
@@ -3,13 +3,7 @@
#include <QtTest/QtTest>
-#include <private/qabstractvideobuffer_p.h>
-
-// Adds an enum, and the stringized version
-#define ADD_ENUM_TEST(x) \
- QTest::newRow(#x) \
- << QVideoFrame::x \
- << QString(QLatin1String(#x));
+#include <private/qhwvideobuffer_p.h>
class tst_QAbstractVideoBuffer : public QObject
{
@@ -28,19 +22,16 @@ private slots:
void handleType_data();
void handleType();
void handle();
- void mapMode();
void mapModeDebug_data();
void mapModeDebug();
};
-class QtTestVideoBuffer : public QAbstractVideoBuffer
+class QtTestVideoBuffer : public QHwVideoBuffer
{
public:
- QtTestVideoBuffer(QVideoFrame::HandleType type) : QAbstractVideoBuffer(type) {}
-
- [[nodiscard]] QVideoFrame::MapMode mapMode() const override { return QVideoFrame::ReadWrite; }
+ QtTestVideoBuffer(QVideoFrame::HandleType type) : QHwVideoBuffer(type) { }
- MapData map(QVideoFrame::MapMode) override { return {}; }
+ MapData map(QtVideo::MapMode) override { return {}; }
void unmap() override {}
};
@@ -73,8 +64,8 @@ void tst_QAbstractVideoBuffer::handleType_data()
QTest::addColumn<QVideoFrame::HandleType>("type");
QTest::addColumn<QString>("stringized");
- ADD_ENUM_TEST(NoHandle);
- ADD_ENUM_TEST(RhiTextureHandle);
+ QTest::newRow("NoHandle") << QVideoFrame::NoHandle << QStringLiteral("NoHandle");
+ QTest::newRow("RhiTextureHandle") << QVideoFrame::RhiTextureHandle << QStringLiteral("RhiTextureHandle");
}
void tst_QAbstractVideoBuffer::handleType()
@@ -97,26 +88,24 @@ void tst_QAbstractVideoBuffer::handle()
QVERIFY(buffer.textureHandle(nullptr, 0) == 0);
}
-void tst_QAbstractVideoBuffer::mapMode()
-{
- QtTestVideoBuffer maptest(QVideoFrame::NoHandle);
- QVERIFY2(maptest.mapMode() == QVideoFrame::ReadWrite, "ReadWrite Failed");
-}
-
void tst_QAbstractVideoBuffer::mapModeDebug_data()
{
- QTest::addColumn<QVideoFrame::MapMode>("mapMode");
+ QTest::addColumn<QtVideo::MapMode>("mapMode");
QTest::addColumn<QString>("stringized");
- ADD_ENUM_TEST(NotMapped);
- ADD_ENUM_TEST(ReadOnly);
- ADD_ENUM_TEST(WriteOnly);
- ADD_ENUM_TEST(ReadWrite);
+ QTest::newRow("NotMapped") << QtVideo::MapMode::NotMapped
+ << QStringLiteral("QtVideo::MapMode::NotMapped");
+ QTest::newRow("ReadOnly") << QtVideo::MapMode::ReadOnly
+ << QStringLiteral("QtVideo::MapMode::ReadOnly");
+ QTest::newRow("WriteOnly") << QtVideo::MapMode::WriteOnly
+ << QStringLiteral("QtVideo::MapMode::WriteOnly");
+ QTest::newRow("ReadWrite") << QtVideo::MapMode::ReadWrite
+ << QStringLiteral("QtVideo::MapMode::ReadWrite");
}
void tst_QAbstractVideoBuffer::mapModeDebug()
{
- QFETCH(QVideoFrame::MapMode, mapMode);
+ QFETCH(QtVideo::MapMode, mapMode);
QFETCH(QString, stringized);
QTest::ignoreMessage(QtDebugMsg, stringized.toLatin1().constData());
diff --git a/tests/auto/unit/multimedia/qaudiodecoder/tst_qaudiodecoder.cpp b/tests/auto/unit/multimedia/qaudiodecoder/tst_qaudiodecoder.cpp
index 1d2c39e7f..77e161fda 100644
--- a/tests/auto/unit/multimedia/qaudiodecoder/tst_qaudiodecoder.cpp
+++ b/tests/auto/unit/multimedia/qaudiodecoder/tst_qaudiodecoder.cpp
@@ -54,8 +54,8 @@ void tst_QAudioDecoder::read()
QVERIFY(!d.isDecoding());
QVERIFY(d.bufferAvailable() == false);
- QSignalSpy readySpy(&d, SIGNAL(bufferReady()));
- QSignalSpy bufferChangedSpy(&d, SIGNAL(bufferAvailableChanged(bool)));
+ QSignalSpy readySpy(&d, &QAudioDecoder::bufferReady);
+ QSignalSpy bufferChangedSpy(&d, &QAudioDecoder::bufferAvailableChanged);
QSignalSpy errorSpy(&d, SIGNAL(error(QAudioDecoder::Error)));
// Starting with empty source == error
@@ -115,8 +115,8 @@ void tst_QAudioDecoder::stop()
QVERIFY(!d.isDecoding());
QVERIFY(d.bufferAvailable() == false);
- QSignalSpy readySpy(&d, SIGNAL(bufferReady()));
- QSignalSpy bufferChangedSpy(&d, SIGNAL(bufferAvailableChanged(bool)));
+ QSignalSpy readySpy(&d, &QAudioDecoder::bufferReady);
+ QSignalSpy bufferChangedSpy(&d, &QAudioDecoder::bufferAvailableChanged);
QSignalSpy errorSpy(&d, SIGNAL(error(QAudioDecoder::Error)));
// Starting with empty source == error
@@ -167,8 +167,8 @@ void tst_QAudioDecoder::format()
QVERIFY(!d.isDecoding());
QVERIFY(d.bufferAvailable() == false);
- QSignalSpy readySpy(&d, SIGNAL(bufferReady()));
- QSignalSpy bufferChangedSpy(&d, SIGNAL(bufferAvailableChanged(bool)));
+ QSignalSpy readySpy(&d, &QAudioDecoder::bufferReady);
+ QSignalSpy bufferChangedSpy(&d, &QAudioDecoder::bufferAvailableChanged);
QSignalSpy errorSpy(&d, SIGNAL(error(QAudioDecoder::Error)));
// Set the source to something
@@ -255,11 +255,11 @@ void tst_QAudioDecoder::readAll()
d.setSource(QUrl::fromLocalFile("Foo"));
QVERIFY(!d.isDecoding());
- QSignalSpy durationSpy(&d, SIGNAL(durationChanged(qint64)));
- QSignalSpy positionSpy(&d, SIGNAL(positionChanged(qint64)));
- QSignalSpy isDecodingSpy(&d, SIGNAL(isDecodingChanged(bool)));
- QSignalSpy finishedSpy(&d, SIGNAL(finished()));
- QSignalSpy bufferAvailableSpy(&d, SIGNAL(bufferAvailableChanged(bool)));
+ QSignalSpy durationSpy(&d, &QAudioDecoder::durationChanged);
+ QSignalSpy positionSpy(&d, &QAudioDecoder::positionChanged);
+ QSignalSpy isDecodingSpy(&d, &QAudioDecoder::isDecodingChanged);
+ QSignalSpy finishedSpy(&d, &QAudioDecoder::finished);
+ QSignalSpy bufferAvailableSpy(&d, &QAudioDecoder::bufferAvailableChanged);
d.start();
int i = 0;
forever {
diff --git a/tests/auto/unit/multimedia/qcamera/tst_qcamera.cpp b/tests/auto/unit/multimedia/qcamera/tst_qcamera.cpp
index 848cd05b7..bd1972550 100644
--- a/tests/auto/unit/multimedia/qcamera/tst_qcamera.cpp
+++ b/tests/auto/unit/multimedia/qcamera/tst_qcamera.cpp
@@ -197,7 +197,7 @@ void tst_QCamera::testSimpleCameraCapture()
QCOMPARE(imageCapture.error(), QImageCapture::NoError);
QVERIFY(imageCapture.errorString().isEmpty());
- QSignalSpy errorSignal(&imageCapture, SIGNAL(errorOccurred(int,QImageCapture::Error,QString)));
+ QSignalSpy errorSignal(&imageCapture, &QImageCapture::errorOccurred);
imageCapture.captureToFile(QStringLiteral("/dev/null"));
QCOMPARE(errorSignal.size(), 1);
QCOMPARE(imageCapture.error(), QImageCapture::NotReadyError);
@@ -220,8 +220,8 @@ void tst_QCamera::testCameraCapture()
QVERIFY(!imageCapture.isReadyForCapture());
- QSignalSpy capturedSignal(&imageCapture, SIGNAL(imageCaptured(int,QImage)));
- QSignalSpy errorSignal(&imageCapture, SIGNAL(errorOccurred(int,QImageCapture::Error,QString)));
+ QSignalSpy capturedSignal(&imageCapture, &QImageCapture::imageCaptured);
+ QSignalSpy errorSignal(&imageCapture, &QImageCapture::errorOccurred);
imageCapture.captureToFile(QStringLiteral("/dev/null"));
QCOMPARE(capturedSignal.size(), 0);
@@ -249,8 +249,8 @@ void tst_QCamera::testCameraCaptureMetadata()
session.setCamera(&camera);
session.setImageCapture(&imageCapture);
- QSignalSpy metadataSignal(&imageCapture, SIGNAL(imageMetadataAvailable(int,const QMediaMetaData&)));
- QSignalSpy savedSignal(&imageCapture, SIGNAL(imageSaved(int,QString)));
+ QSignalSpy metadataSignal(&imageCapture, &QImageCapture::imageMetadataAvailable);
+ QSignalSpy savedSignal(&imageCapture, &QImageCapture::imageSaved);
camera.start();
int id = imageCapture.captureToFile(QStringLiteral("/dev/null"));
@@ -419,7 +419,7 @@ void tst_QCamera::testCameraEncodingProperyChange()
session.setCamera(&camera);
session.setImageCapture(&imageCapture);
- QSignalSpy activeChangedSignal(&camera, SIGNAL(activeChanged(bool)));
+ QSignalSpy activeChangedSignal(&camera, &QCamera::activeChanged);
camera.start();
QCOMPARE(camera.isActive(), true);
@@ -604,10 +604,10 @@ void tst_QCamera::testErrorSignal()
Q_ASSERT(service);
Q_ASSERT(service->mockCameraControl);
- QSignalSpy spyError(&camera, SIGNAL(errorOccurred(QCamera::Error,const QString&)));
+ QSignalSpy spyError(&camera, &QCamera::errorOccurred);
/* Set the QPlatformCamera error and verify if the signal is emitted correctly in QCamera */
- service->mockCameraControl->setError(QCamera::CameraError,QStringLiteral("Camera Error"));
+ service->mockCameraControl->updateError(QCamera::CameraError, QStringLiteral("Camera Error"));
QVERIFY(spyError.size() == 1);
QCamera::Error err = qvariant_cast<QCamera::Error >(spyError.at(0).at(0));
@@ -616,7 +616,8 @@ void tst_QCamera::testErrorSignal()
spyError.clear();
/* Set the QPlatformCamera error and verify if the signal is emitted correctly in QCamera */
- service->mockCameraControl->setError(QCamera::CameraError,QStringLiteral("InvalidRequestError Error"));
+ service->mockCameraControl->updateError(QCamera::CameraError,
+ QStringLiteral("InvalidRequestError Error"));
QVERIFY(spyError.size() == 1);
err = qvariant_cast<QCamera::Error >(spyError.at(0).at(0));
QVERIFY(err == QCamera::CameraError);
@@ -624,7 +625,8 @@ void tst_QCamera::testErrorSignal()
spyError.clear();
/* Set the QPlatformCamera error and verify if the signal is emitted correctly in QCamera */
- service->mockCameraControl->setError(QCamera::CameraError,QStringLiteral("NotSupportedFeatureError Error"));
+ service->mockCameraControl->updateError(QCamera::CameraError,
+ QStringLiteral("NotSupportedFeatureError Error"));
QVERIFY(spyError.size() == 1);
err = qvariant_cast<QCamera::Error >(spyError.at(0).at(0));
QVERIFY(err == QCamera::CameraError);
@@ -640,15 +642,17 @@ void tst_QCamera::testError()
auto *service = QMockIntegration::instance()->lastCaptureService();
/* Set the QPlatformCamera error and verify if it is set correctly in QCamera */
- service->mockCameraControl->setError(QCamera::CameraError,QStringLiteral("Camera Error"));
+ service->mockCameraControl->updateError(QCamera::CameraError, QStringLiteral("Camera Error"));
QVERIFY(camera.error() == QCamera::CameraError);
/* Set the QPlatformCamera error and verify if it is set correctly in QCamera */
- service->mockCameraControl->setError(QCamera::CameraError,QStringLiteral("InvalidRequestError Error"));
+ service->mockCameraControl->updateError(QCamera::CameraError,
+ QStringLiteral("InvalidRequestError Error"));
QVERIFY(camera.error() == QCamera::CameraError);
/* Set the QPlatformCamera error and verify if it is set correctly in QCamera */
- service->mockCameraControl->setError(QCamera::CameraError,QStringLiteral("CameraError Error"));
+ service->mockCameraControl->updateError(QCamera::CameraError,
+ QStringLiteral("CameraError Error"));
QVERIFY(camera.error() == QCamera::CameraError);
}
@@ -662,15 +666,17 @@ void tst_QCamera::testErrorString()
auto *service = QMockIntegration::instance()->lastCaptureService();
/* Set the QPlatformCamera error and verify if it is set correctly in QCamera */
- service->mockCameraControl->setError(QCamera::CameraError,QStringLiteral("Camera Error"));
+ service->mockCameraControl->updateError(QCamera::CameraError, QStringLiteral("Camera Error"));
QVERIFY(camera.errorString() == QStringLiteral("Camera Error"));
/* Set the QPlatformCamera error and verify if it is set correctly in QCamera */
- service->mockCameraControl->setError(QCamera::CameraError,QStringLiteral("InvalidRequestError Error"));
+ service->mockCameraControl->updateError(QCamera::CameraError,
+ QStringLiteral("InvalidRequestError Error"));
QVERIFY(camera.errorString() == QStringLiteral("InvalidRequestError Error"));
/* Set the QPlatformCamera error and verify if it is set correctly in QCamera */
- service->mockCameraControl->setError(QCamera::CameraError,QStringLiteral("CameraError Error"));
+ service->mockCameraControl->updateError(QCamera::CameraError,
+ QStringLiteral("CameraError Error"));
QVERIFY(camera.errorString() == QStringLiteral("CameraError Error"));
}
@@ -681,7 +687,7 @@ void tst_QCamera::testSetCameraFormat()
auto videoFormats = device.videoFormats();
QVERIFY(videoFormats.size());
QCameraFormat cameraFormat = videoFormats.first();
- QSignalSpy spy(&camera, SIGNAL(cameraFormatChanged()));
+ QSignalSpy spy(&camera, &QCamera::cameraFormatChanged);
QVERIFY(spy.size() == 0);
camera.setCameraFormat(cameraFormat);
QCOMPARE(spy.size(), 1);
@@ -733,7 +739,7 @@ void tst_QCamera::testZoomChanged()
QCamera camera;
session.setCamera(&camera);
- QSignalSpy spy(&camera, SIGNAL(zoomFactorChanged(float)));
+ QSignalSpy spy(&camera, &QCamera::zoomFactorChanged);
QVERIFY(spy.size() == 0);
camera.setZoomFactor(2.0);
QVERIFY(spy.size() == 1);
@@ -751,7 +757,7 @@ void tst_QCamera::testMaxZoomChangedSignal()
QMockCamera *mock = QMockIntegration::instance()->lastCamera();
// ### change max zoom factor on backend, e.g. by changing camera
- QSignalSpy spy(&camera, SIGNAL(maximumZoomFactorChanged(float)));
+ QSignalSpy spy(&camera, &QCamera::maximumZoomFactorChanged);
mock->maximumZoomFactorChanged(55);
QVERIFY(spy.size() == 1);
QCOMPARE(camera.maximumZoomFactor(), 55);
@@ -763,9 +769,9 @@ void tst_QCamera::testSignalExposureCompensationChanged()
QCamera camera;
session.setCamera(&camera);
- QSignalSpy spyExposureCompensationChanged(&camera, SIGNAL(exposureCompensationChanged(float)));
+ QSignalSpy spyExposureCompensationChanged(&camera, &QCamera::exposureCompensationChanged);
- QVERIFY(spyExposureCompensationChanged.size() ==0);
+ QVERIFY(spyExposureCompensationChanged.size() == 0);
QVERIFY(camera.exposureCompensation() != 800);
camera.setExposureCompensation(2.0);
@@ -790,7 +796,7 @@ void tst_QCamera::testSignalIsoSensitivityChanged()
QCamera camera;
session.setCamera(&camera);
- QSignalSpy spyisoSensitivityChanged(&camera, SIGNAL(isoSensitivityChanged(int)));
+ QSignalSpy spyisoSensitivityChanged(&camera, &QCamera::isoSensitivityChanged);
QVERIFY(spyisoSensitivityChanged.size() ==0);
@@ -805,9 +811,9 @@ void tst_QCamera::testSignalShutterSpeedChanged()
QCamera camera;
session.setCamera(&camera);
- QSignalSpy spySignalExposureTimeChanged(&camera, SIGNAL(exposureTimeChanged(float)));
+ QSignalSpy spySignalExposureTimeChanged(&camera, &QCamera::exposureTimeChanged);
- QVERIFY(spySignalExposureTimeChanged.size() ==0);
+ QVERIFY(spySignalExposureTimeChanged.size() == 0);
camera.setManualExposureTime(2.0);//set the ManualShutterSpeed to 2.0
QTest::qWait(100);
@@ -821,7 +827,7 @@ void tst_QCamera::testSignalFlashReady()
QCamera camera;
session.setCamera(&camera);
- QSignalSpy spyflashReady(&camera,SIGNAL(flashReady(bool)));
+ QSignalSpy spyflashReady(&camera, &QCamera::flashReady);
QVERIFY(spyflashReady.size() == 0);
diff --git a/tests/auto/unit/multimedia/qcameradevice/tst_qcameradevice.cpp b/tests/auto/unit/multimedia/qcameradevice/tst_qcameradevice.cpp
index 7ca2b5980..455586243 100644
--- a/tests/auto/unit/multimedia/qcameradevice/tst_qcameradevice.cpp
+++ b/tests/auto/unit/multimedia/qcameradevice/tst_qcameradevice.cpp
@@ -9,40 +9,25 @@
#include <qmediadevices.h>
#include "qmockintegration.h"
-#include "qmockmediacapturesession.h"
QT_USE_NAMESPACE
Q_ENABLE_MOCK_MULTIMEDIA_PLUGIN
+using namespace Qt::Literals;
+
class tst_QCameraDevice: public QObject
{
Q_OBJECT
-public slots:
- void initTestCase();
- void init();
- void cleanup();
-
private slots:
void constructor();
void defaultCamera();
void availableCameras();
void equality_operators();
+ void qDebug_operator();
};
-void tst_QCameraDevice::initTestCase()
-{
-}
-
-void tst_QCameraDevice::init()
-{
-}
-
-void tst_QCameraDevice::cleanup()
-{
-}
-
void tst_QCameraDevice::constructor()
{
{
@@ -50,8 +35,8 @@ void tst_QCameraDevice::constructor()
QCamera camera;
QCameraDevice info(camera.cameraDevice());
QVERIFY(!info.isNull());
- QCOMPARE(info.id(), QStringLiteral("default"));
- QCOMPARE(info.description(), QStringLiteral("defaultCamera"));
+ QCOMPARE(info.id(), u"default"_s);
+ QCOMPARE(info.description(), u"defaultCamera"_s);
QCOMPARE(info.position(), QCameraDevice::UnspecifiedPosition);
}
@@ -66,14 +51,14 @@ void tst_QCameraDevice::constructor()
QCamera camera(info);
QCOMPARE(info, camera.cameraDevice());
QVERIFY(!info.isNull());
- QCOMPARE(info.id(), QStringLiteral("back"));
- QCOMPARE(info.description(), QStringLiteral("backCamera"));
+ QCOMPARE(info.id(), u"back"_s);
+ QCOMPARE(info.description(), u"backCamera"_s);
QCOMPARE(info.position(), QCameraDevice::BackFace);
QCameraDevice info2(info);
QVERIFY(!info2.isNull());
- QCOMPARE(info2.id(), QStringLiteral("back"));
- QCOMPARE(info2.description(), QStringLiteral("backCamera"));
+ QCOMPARE(info2.id(), u"back"_s);
+ QCOMPARE(info2.description(), u"backCamera"_s);
QCOMPARE(info2.position(), QCameraDevice::BackFace);
}
@@ -82,8 +67,8 @@ void tst_QCameraDevice::defaultCamera()
QCameraDevice info = QMediaDevices::defaultVideoInput();
QVERIFY(!info.isNull());
- QCOMPARE(info.id(), QStringLiteral("default"));
- QCOMPARE(info.description(), QStringLiteral("defaultCamera"));
+ QCOMPARE(info.id(), u"default"_s);
+ QCOMPARE(info.description(), u"defaultCamera"_s);
QCOMPARE(info.position(), QCameraDevice::UnspecifiedPosition);
QCamera camera(info);
@@ -97,8 +82,8 @@ void tst_QCameraDevice::availableCameras()
QCameraDevice info = cameras.at(0);
QVERIFY(!info.isNull());
- QCOMPARE(info.id(), QStringLiteral("default"));
- QCOMPARE(info.description(), QStringLiteral("defaultCamera"));
+ QCOMPARE(info.id(), u"default"_s);
+ QCOMPARE(info.description(), u"defaultCamera"_s);
QCOMPARE(info.position(), QCameraDevice::UnspecifiedPosition);
info = cameras.at(1);
@@ -110,8 +95,8 @@ void tst_QCameraDevice::availableCameras()
QCOMPARE(cameras.size(), 3);
info = cameras.at(2);
QVERIFY(!info.isNull());
- QCOMPARE(info.id(), QStringLiteral("back"));
- QCOMPARE(info.description(), QStringLiteral("backCamera"));
+ QCOMPARE(info.id(), u"back"_s);
+ QCOMPARE(info.description(), u"backCamera"_s);
QCOMPARE(info.position(), QCameraDevice::BackFace);
}
@@ -136,6 +121,18 @@ void tst_QCameraDevice::equality_operators()
}
}
+void tst_QCameraDevice::qDebug_operator()
+{
+ QString outputString;
+ QDebug debug(&outputString);
+ debug.nospace();
+
+ QCameraDevice defaultCamera = QMediaDevices::defaultVideoInput();
+ debug << defaultCamera;
+
+ QCOMPARE(outputString,
+ u"\"QCameraDevice(name=defaultCamera, id=default, position=UnspecifiedPosition)\" "_s);
+}
QTEST_MAIN(tst_QCameraDevice)
diff --git a/tests/auto/unit/multimedia/qimagecapture/tst_qimagecapture.cpp b/tests/auto/unit/multimedia/qimagecapture/tst_qimagecapture.cpp
index c56712d14..3267b6f40 100644
--- a/tests/auto/unit/multimedia/qimagecapture/tst_qimagecapture.cpp
+++ b/tests/auto/unit/multimedia/qimagecapture/tst_qimagecapture.cpp
@@ -178,7 +178,7 @@ void tst_QImageCapture::error()
session.setCamera(&camera);
session.setImageCapture(&imageCapture);
- QSignalSpy spy(&imageCapture, SIGNAL(errorOccurred(int,QImageCapture::Error,QString)));
+ QSignalSpy spy(&imageCapture, &QImageCapture::errorOccurred);
imageCapture.captureToFile();
QTest::qWait(30);
QVERIFY(spy.size() == 1);
@@ -196,7 +196,7 @@ void tst_QImageCapture::imageCaptured()
session.setCamera(&camera);
session.setImageCapture(&imageCapture);
- QSignalSpy spy(&imageCapture, SIGNAL(imageCaptured(int,QImage)));
+ QSignalSpy spy(&imageCapture, &QImageCapture::imageCaptured);
QVERIFY(imageCapture.isAvailable() == true);
QVERIFY(imageCapture.isReadyForCapture() == false);
camera.start();
@@ -219,7 +219,7 @@ void tst_QImageCapture::imageExposed()
session.setCamera(&camera);
session.setImageCapture(&imageCapture);
- QSignalSpy spy(&imageCapture, SIGNAL(imageExposed(int)));
+ QSignalSpy spy(&imageCapture, &QImageCapture::imageExposed);
QVERIFY(imageCapture.isAvailable() == true);
QVERIFY(imageCapture.isReadyForCapture() == false);
camera.start();
@@ -240,7 +240,7 @@ void tst_QImageCapture::imageSaved()
session.setCamera(&camera);
session.setImageCapture(&imageCapture);
- QSignalSpy spy(&imageCapture, SIGNAL(imageSaved(int,QString)));
+ QSignalSpy spy(&imageCapture, &QImageCapture::imageSaved);
QVERIFY(imageCapture.isAvailable() == true);
QVERIFY(imageCapture.isReadyForCapture() == false);
camera.start();
@@ -262,7 +262,7 @@ void tst_QImageCapture::readyForCaptureChanged()
session.setCamera(&camera);
session.setImageCapture(&imageCapture);
- QSignalSpy spy(&imageCapture, SIGNAL(readyForCaptureChanged(bool)));
+ QSignalSpy spy(&imageCapture, &QImageCapture::readyForCaptureChanged);
QVERIFY(imageCapture.isReadyForCapture() == false);
imageCapture.captureToFile();
QTest::qWait(100);
diff --git a/tests/auto/unit/multimedia/qmediacapture_gstreamer/CMakeLists.txt b/tests/auto/unit/multimedia/qmediacapture_gstreamer/CMakeLists.txt
index 8a4734434..209be5883 100644
--- a/tests/auto/unit/multimedia/qmediacapture_gstreamer/CMakeLists.txt
+++ b/tests/auto/unit/multimedia/qmediacapture_gstreamer/CMakeLists.txt
@@ -8,7 +8,11 @@
qt_internal_add_test(tst_qmediacapture_gstreamer
SOURCES
tst_qmediacapture_gstreamer.cpp
+ ../../../shared/qscopedenvironmentvariable.h
+ INCLUDE_DIRECTORIES
+ ../../../shared
LIBRARIES
+ Qt::Multimedia
Qt::MultimediaPrivate
Qt::QGstreamerMediaPluginPrivate
)
diff --git a/tests/auto/unit/multimedia/qmediacapture_gstreamer/tst_qmediacapture_gstreamer.cpp b/tests/auto/unit/multimedia/qmediacapture_gstreamer/tst_qmediacapture_gstreamer.cpp
index 21258005c..7fe6a06ac 100644
--- a/tests/auto/unit/multimedia/qmediacapture_gstreamer/tst_qmediacapture_gstreamer.cpp
+++ b/tests/auto/unit/multimedia/qmediacapture_gstreamer/tst_qmediacapture_gstreamer.cpp
@@ -1,15 +1,26 @@
// Copyright (C) 2024 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only WITH Qt-GPL-exception-1.0
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
#include <QtTest/QtTest>
+#include <QtMultimedia/QAudioDevice>
+#include <QtMultimedia/QAudioInput>
+#include <QtMultimedia/QAudioOutput>
+#include <QtMultimedia/QCamera>
#include <QtMultimedia/QMediaCaptureSession>
+#include <QtMultimedia/private/qgstreamer_platformspecificinterface_p.h>
#include <QtMultimedia/private/qplatformmediacapture_p.h>
#include <QtQGstreamerMediaPlugin/private/qgstpipeline_p.h>
+#include <qscopedenvironmentvariable.h>
+
#include <memory>
+// NOLINTBEGIN(readability-convert-member-functions-to-static)
+
QT_USE_NAMESPACE
+using namespace Qt::Literals;
+
class tst_QMediaCaptureGStreamer : public QObject
{
Q_OBJECT
@@ -22,15 +33,35 @@ public slots:
void cleanup();
private slots:
+ void mediaIntegration_hasPlatformSpecificInterface();
void constructor_preparesGstPipeline();
+ void audioInput_makeCustomGStreamerAudioInput_fromPipelineDescription();
+ void audioOutput_makeCustomGStreamerAudioOutput_fromPipelineDescription();
+
+ void makeCustomGStreamerCamera_fromPipelineDescription();
+ void makeCustomGStreamerCamera_fromPipelineDescription_multipleItems();
+ void makeCustomGStreamerCamera_fromPipelineDescription_userProvidedGstElement();
private:
std::unique_ptr<QMediaCaptureSession> session;
+ QGStreamerPlatformSpecificInterface *gstInterface()
+ {
+ return QGStreamerPlatformSpecificInterface::instance();
+ }
+
GstPipeline *getGstPipeline()
{
- return reinterpret_cast<GstPipeline *>(
- QPlatformMediaCaptureSession::nativePipeline(session.get()));
+ auto *iface = QGStreamerPlatformSpecificInterface::instance();
+ return iface ? iface->gstPipeline(session.get()) : nullptr;
+ }
+
+ QGstPipeline getPipeline()
+ {
+ return QGstPipeline{
+ getGstPipeline(),
+ QGstPipeline::NeedsRef,
+ };
}
void dumpGraph(const char *fileNamePrefix)
@@ -56,6 +87,11 @@ void tst_QMediaCaptureGStreamer::cleanup()
session.reset();
}
+void tst_QMediaCaptureGStreamer::mediaIntegration_hasPlatformSpecificInterface()
+{
+ QVERIFY(QGStreamerPlatformSpecificInterface::instance());
+}
+
void tst_QMediaCaptureGStreamer::constructor_preparesGstPipeline()
{
auto *rawPipeline = getGstPipeline();
@@ -70,6 +106,107 @@ void tst_QMediaCaptureGStreamer::constructor_preparesGstPipeline()
dumpGraph("constructor_preparesGstPipeline");
}
+void tst_QMediaCaptureGStreamer::audioInput_makeCustomGStreamerAudioInput_fromPipelineDescription()
+{
+ auto pipelineString =
+ "audiotestsrc wave=2 freq=200 name=myOscillator ! identity name=myConverter"_ba;
+
+ QAudioInput input{
+ gstInterface()->makeCustomGStreamerAudioInput(pipelineString),
+ };
+
+ session->setAudioInput(&input);
+
+ QGstPipeline pipeline = getPipeline();
+ QTEST_ASSERT(pipeline);
+
+ pipeline.finishStateChange();
+
+ QVERIFY(pipeline.findByName("myOscillator"));
+ QVERIFY(pipeline.findByName("myConverter"));
+
+ dumpGraph("audioInput_customAudioDevice");
+}
+
+void tst_QMediaCaptureGStreamer::
+ audioOutput_makeCustomGStreamerAudioOutput_fromPipelineDescription()
+{
+ auto pipelineStringInput =
+ "audiotestsrc wave=2 freq=200 name=myOscillator ! identity name=myConverter"_ba;
+ QAudioInput input{
+ gstInterface()->makeCustomGStreamerAudioInput(pipelineStringInput),
+ };
+ session->setAudioInput(&input);
+
+ auto pipelineStringOutput = "identity name=myConverter ! fakesink name=mySink"_ba;
+ QAudioOutput output{
+ gstInterface()->makeCustomGStreamerAudioOutput(pipelineStringOutput),
+ };
+ session->setAudioOutput(&output);
+
+ QGstPipeline pipeline = getPipeline();
+ QTEST_ASSERT(pipeline);
+
+ pipeline.finishStateChange();
+
+ QVERIFY(pipeline.findByName("mySink"));
+ QVERIFY(pipeline.findByName("myConverter"));
+
+ dumpGraph("audioOutput_customAudioDevice");
+}
+
+void tst_QMediaCaptureGStreamer::makeCustomGStreamerCamera_fromPipelineDescription()
+{
+ auto pipelineString = "videotestsrc name=mySrc"_ba;
+ QCamera *cam =
+ gstInterface()->makeCustomGStreamerCamera(pipelineString, /*parent=*/session.get());
+
+ session->setCamera(cam);
+ cam->start();
+
+ QGstPipeline pipeline = getPipeline();
+ QTEST_ASSERT(pipeline);
+ QVERIFY(pipeline.findByName("mySrc"));
+ dumpGraph("makeCustomGStreamerCamera_fromPipelineDescription");
+}
+
+void tst_QMediaCaptureGStreamer::makeCustomGStreamerCamera_fromPipelineDescription_multipleItems()
+{
+ auto pipelineString = "videotestsrc name=mySrc ! gamma gamma=2.0 name=myFilter"_ba;
+ QCamera *cam =
+ gstInterface()->makeCustomGStreamerCamera(pipelineString, /*parent=*/session.get());
+
+ session->setCamera(cam);
+ cam->start();
+
+ QGstPipeline pipeline = getPipeline();
+ QTEST_ASSERT(pipeline);
+ QVERIFY(pipeline.findByName("mySrc"));
+ QVERIFY(pipeline.findByName("myFilter"));
+ dumpGraph("makeCustomGStreamerCamera_fromPipelineDescription_multipleItems");
+}
+
+void tst_QMediaCaptureGStreamer::
+ makeCustomGStreamerCamera_fromPipelineDescription_userProvidedGstElement()
+{
+ QGstElement element = QGstElement::createFromPipelineDescription("videotestsrc");
+ gst_element_set_name(element.element(), "mySrc");
+
+ QCamera *cam =
+ gstInterface()->makeCustomGStreamerCamera(element.element(), /*parent=*/session.get());
+
+ session->setCamera(cam);
+ cam->start();
+
+ QGstPipeline pipeline = getPipeline();
+ QTEST_ASSERT(pipeline);
+ QCOMPARE(pipeline.findByName("mySrc"), element);
+ dumpGraph("makeCustomGStreamerCamera_fromPipelineDescription_userProvidedGstElement");
+
+ element.set("foreground-color", 0xff0000);
+ dumpGraph("makeCustomGStreamerCamera_fromPipelineDescription_userProvidedGstElement2");
+}
+
QTEST_GUILESS_MAIN(tst_QMediaCaptureGStreamer)
#include "tst_qmediacapture_gstreamer.moc"
diff --git a/tests/auto/unit/multimedia/qmediametadata/CMakeLists.txt b/tests/auto/unit/multimedia/qmediametadata/CMakeLists.txt
new file mode 100644
index 000000000..7b7b8f174
--- /dev/null
+++ b/tests/auto/unit/multimedia/qmediametadata/CMakeLists.txt
@@ -0,0 +1,13 @@
+# Copyright (C) 2024 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+#####################################################################
+## tst_qmediametadata Test:
+#####################################################################
+
+qt_internal_add_test(tst_qmediametadata
+ SOURCES
+ tst_qmediametadata.cpp
+ LIBRARIES
+ Qt::MultimediaPrivate
+)
diff --git a/tests/auto/unit/multimedia/qmediametadata/tst_qmediametadata.cpp b/tests/auto/unit/multimedia/qmediametadata/tst_qmediametadata.cpp
new file mode 100644
index 000000000..a3763399d
--- /dev/null
+++ b/tests/auto/unit/multimedia/qmediametadata/tst_qmediametadata.cpp
@@ -0,0 +1,96 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
+
+#include <QtTest/QtTest>
+
+#include <QtCore/qdebug.h>
+#include <QtCore/QString>
+
+#include <QtMultimedia/qmediametadata.h>
+
+QT_USE_NAMESPACE
+
+using namespace Qt::Literals;
+
+class tst_QMediaMetaData : public QObject
+{
+ Q_OBJECT
+
+private slots:
+ void insertAndRemove();
+ void qdebug_empty();
+ void qdebug_printContent();
+};
+
+void tst_QMediaMetaData::insertAndRemove()
+{
+ QMediaMetaData dut;
+ QVERIFY(dut.isEmpty());
+
+ // fill
+ dut.insert(QMediaMetaData::Author, "yada");
+ dut.insert(QMediaMetaData::Title, "title");
+ QVERIFY(!dut.isEmpty());
+
+ // validate
+ {
+ auto range = dut.asKeyValueRange();
+ QCOMPARE_EQ(std::distance(range.begin(), range.end()), 2u);
+
+ QSet expectedKeys{
+ QMediaMetaData::Author,
+ QMediaMetaData::Title,
+ };
+
+ QList keyList = dut.keys();
+ QSet<QMediaMetaData::Key> keys{ keyList.begin(), keyList.end() };
+
+ QCOMPARE_EQ(keys, expectedKeys);
+
+ QCOMPARE(dut.value(QMediaMetaData::Author), u"yada"_s);
+ QCOMPARE(dut.stringValue(QMediaMetaData::Author), u"yada"_s);
+ };
+
+ // remove missing key
+ QMediaMetaData reference = dut;
+ dut.remove(QMediaMetaData::AlbumArtist);
+ QCOMPARE_EQ(dut, reference);
+
+ // clear
+ dut.clear();
+ QVERIFY(dut.isEmpty());
+}
+
+void tst_QMediaMetaData::qdebug_empty()
+{
+ QMediaMetaData dut;
+
+ QString str;
+ QDebug dbg(&str);
+ dbg << dut;
+
+ auto expected = u"QMediaMetaData{} ";
+
+ QCOMPARE_EQ(str, expected);
+}
+
+void tst_QMediaMetaData::qdebug_printContent()
+{
+ QMediaMetaData dut;
+ dut.insert(QMediaMetaData::Author, "yada");
+ dut.insert(QMediaMetaData::Title, "title");
+
+ QString str;
+ QDebug dbg(&str);
+ dbg << dut;
+
+ auto expected = u"QMediaMetaData{QMediaMetaData::Title: QVariant(QString, \"title\"), "
+ u"QMediaMetaData::Author: QVariant(QString, \"yada\")} ";
+ auto expected2 = u"QMediaMetaData{QMediaMetaData::Author: QVariant(QString, \"yada\"), "
+ u"QMediaMetaData::Title: QVariant(QString, \"title\")} ";
+
+ QVERIFY(str == expected || str == expected2);
+}
+
+QTEST_GUILESS_MAIN(tst_QMediaMetaData)
+#include "tst_qmediametadata.moc"
diff --git a/tests/auto/unit/multimedia/qmediaplayer/tst_qmediaplayer.cpp b/tests/auto/unit/multimedia/qmediaplayer/tst_qmediaplayer.cpp
index 2deaaf846..3fb77ca2d 100644
--- a/tests/auto/unit/multimedia/qmediaplayer/tst_qmediaplayer.cpp
+++ b/tests/auto/unit/multimedia/qmediaplayer/tst_qmediaplayer.cpp
@@ -294,32 +294,40 @@ void tst_QMediaPlayer::testPosition()
QVERIFY(player->duration() == duration);
if (seekable) {
- { QSignalSpy spy(player, SIGNAL(positionChanged(qint64)));
- player->setPosition(position);
- QCOMPARE(player->position(), position);
- QCOMPARE(spy.size(), 0); }
+ {
+ QSignalSpy spy(player, &QMediaPlayer::positionChanged);
+ player->setPosition(position);
+ QCOMPARE(player->position(), position);
+ QCOMPARE(spy.size(), 0);
+ }
mockPlayer->setPosition(position);
- { QSignalSpy spy(player, SIGNAL(positionChanged(qint64)));
- player->setPosition(0);
- QCOMPARE(player->position(), qint64(0));
- QCOMPARE(spy.size(), position == 0 ? 0 : 1); }
+ {
+ QSignalSpy spy(player, &QMediaPlayer::positionChanged);
+ player->setPosition(0);
+ QCOMPARE(player->position(), qint64(0));
+ QCOMPARE(spy.size(), position == 0 ? 0 : 1);
+ }
mockPlayer->setPosition(position);
- { QSignalSpy spy(player, SIGNAL(positionChanged(qint64)));
- player->setPosition(duration);
- QCOMPARE(player->position(), duration);
- QCOMPARE(spy.size(), position == duration ? 0 : 1); }
+ {
+ QSignalSpy spy(player, &QMediaPlayer::positionChanged);
+ player->setPosition(duration);
+ QCOMPARE(player->position(), duration);
+ QCOMPARE(spy.size(), position == duration ? 0 : 1);
+ }
mockPlayer->setPosition(position);
- { QSignalSpy spy(player, SIGNAL(positionChanged(qint64)));
- player->setPosition(-1);
- QCOMPARE(player->position(), qint64(0));
- QCOMPARE(spy.size(), position == 0 ? 0 : 1); }
+ {
+ QSignalSpy spy(player, &QMediaPlayer::positionChanged);
+ player->setPosition(-1);
+ QCOMPARE(player->position(), qint64(0));
+ QCOMPARE(spy.size(), position == 0 ? 0 : 1);
+ }
}
else {
- QSignalSpy spy(player, SIGNAL(positionChanged(qint64)));
+ QSignalSpy spy(player, &QMediaPlayer::positionChanged);
player->setPosition(position);
QCOMPARE(player->position(), position);
@@ -342,25 +350,33 @@ void tst_QMediaPlayer::testVolume()
QVERIFY(audioOutput->volume() == vol);
if (valid) {
- { QSignalSpy spy(audioOutput, SIGNAL(volumeChanged(float)));
- audioOutput->setVolume(.1f);
- QCOMPARE(audioOutput->volume(), .1f);
- QCOMPARE(spy.size(), 1); }
-
- { QSignalSpy spy(audioOutput, SIGNAL(volumeChanged(float)));
- audioOutput->setVolume(-1000.f);
- QCOMPARE(audioOutput->volume(), 0.f);
- QCOMPARE(spy.size(), 1); }
-
- { QSignalSpy spy(audioOutput, SIGNAL(volumeChanged(float)));
- audioOutput->setVolume(1.f);
- QCOMPARE(audioOutput->volume(), 1.f);
- QCOMPARE(spy.size(), 1); }
-
- { QSignalSpy spy(audioOutput, SIGNAL(volumeChanged(float)));
- audioOutput->setVolume(1000.f);
- QCOMPARE(audioOutput->volume(), 1.f);
- QCOMPARE(spy.size(), 0); }
+ {
+ QSignalSpy spy(audioOutput, &QAudioOutput::volumeChanged);
+ audioOutput->setVolume(.1f);
+ QCOMPARE(audioOutput->volume(), .1f);
+ QCOMPARE(spy.size(), 1);
+ }
+
+ {
+ QSignalSpy spy(audioOutput, &QAudioOutput::volumeChanged);
+ audioOutput->setVolume(-1000.f);
+ QCOMPARE(audioOutput->volume(), 0.f);
+ QCOMPARE(spy.size(), 1);
+ }
+
+ {
+ QSignalSpy spy(audioOutput, &QAudioOutput::volumeChanged);
+ audioOutput->setVolume(1.f);
+ QCOMPARE(audioOutput->volume(), 1.f);
+ QCOMPARE(spy.size(), 1);
+ }
+
+ {
+ QSignalSpy spy(audioOutput, &QAudioOutput::volumeChanged);
+ audioOutput->setVolume(1000.f);
+ QCOMPARE(audioOutput->volume(), 1.f);
+ QCOMPARE(spy.size(), 0);
+ }
}
}
@@ -381,7 +397,7 @@ void tst_QMediaPlayer::testMuted()
audioOutput->setVolume(vol);
QVERIFY(audioOutput->isMuted() == muted);
- QSignalSpy spy(audioOutput, SIGNAL(mutedChanged(bool)));
+ QSignalSpy spy(audioOutput, &QAudioOutput::mutedChanged);
audioOutput->setMuted(!muted);
QCOMPARE(audioOutput->isMuted(), !muted);
QCOMPARE(audioOutput->volume(), vol);
@@ -442,7 +458,7 @@ void tst_QMediaPlayer::testPlaybackRate()
mockPlayer->setPlaybackRate(playbackRate);
QVERIFY(player->playbackRate() == playbackRate);
- QSignalSpy spy(player, SIGNAL(playbackRateChanged(qreal)));
+ QSignalSpy spy(player, &QMediaPlayer::playbackRateChanged);
player->setPlaybackRate(playbackRate + 0.5f);
QCOMPARE(player->playbackRate(), playbackRate + 0.5f);
QCOMPARE(spy.size(), 1);
@@ -512,8 +528,8 @@ void tst_QMediaPlayer::testPlay()
QCOMPARE(player->isPlaying(), state == QMediaPlayer::PlayingState);
QCOMPARE(player->source(), mediaContent);
- QSignalSpy spy(player, SIGNAL(playbackStateChanged(QMediaPlayer::PlaybackState)));
- QSignalSpy playingChanged(player, SIGNAL(playingChanged(bool)));
+ QSignalSpy spy(player, &QMediaPlayer::playbackStateChanged);
+ QSignalSpy playingChanged(player, &QMediaPlayer::playingChanged);
player->play();
@@ -549,8 +565,8 @@ void tst_QMediaPlayer::testPause()
QCOMPARE(player->isPlaying(), state == QMediaPlayer::PlayingState);
QVERIFY(player->source() == mediaContent);
- QSignalSpy spy(player, SIGNAL(playbackStateChanged(QMediaPlayer::PlaybackState)));
- QSignalSpy playingChanged(player, SIGNAL(playingChanged(bool)));
+ QSignalSpy spy(player, &QMediaPlayer::playbackStateChanged);
+ QSignalSpy playingChanged(player, &QMediaPlayer::playingChanged);
player->pause();
@@ -584,8 +600,8 @@ void tst_QMediaPlayer::testStop()
QCOMPARE(player->isPlaying(), state == QMediaPlayer::PlayingState);
QVERIFY(player->source() == mediaContent);
- QSignalSpy spy(player, SIGNAL(playbackStateChanged(QMediaPlayer::PlaybackState)));
- QSignalSpy playingChanged(player, SIGNAL(playingChanged(bool)));
+ QSignalSpy spy(player, &QMediaPlayer::playbackStateChanged);
+ QSignalSpy playingChanged(player, &QMediaPlayer::playingChanged);
player->stop();
@@ -616,8 +632,8 @@ void tst_QMediaPlayer::testMediaStatus()
mockPlayer->setMediaStatus(QMediaPlayer::NoMedia);
mockPlayer->setBufferStatus(bufferProgress);
- QSignalSpy statusSpy(player, SIGNAL(mediaStatusChanged(QMediaPlayer::MediaStatus)));
- QSignalSpy bufferSpy(player, SIGNAL(bufferProgressChanged(float)));
+ QSignalSpy statusSpy(player, &QMediaPlayer::mediaStatusChanged);
+ QSignalSpy bufferSpy(player, &QMediaPlayer::bufferProgressChanged);
QCOMPARE(player->mediaStatus(), QMediaPlayer::NoMedia);
@@ -786,9 +802,9 @@ void tst_QMediaPlayer::testQrc()
mockPlayer->setState(QMediaPlayer::PlayingState, QMediaPlayer::NoMedia);
mockPlayer->setStreamPlaybackSupported(backendHasStream);
- QSignalSpy mediaSpy(player, SIGNAL(sourceChanged(QUrl)));
- QSignalSpy statusSpy(player, SIGNAL(mediaStatusChanged(QMediaPlayer::MediaStatus)));
- QSignalSpy errorSpy(player, SIGNAL(errorOccurred(QMediaPlayer::Error,const QString&)));
+ QSignalSpy mediaSpy(player, &QMediaPlayer::sourceChanged);
+ QSignalSpy statusSpy(player, &QMediaPlayer::mediaStatusChanged);
+ QSignalSpy errorSpy(player, &QMediaPlayer::errorOccurred);
player->setSource(mediaContent);
diff --git a/tests/auto/unit/multimedia/qmediaplayer_gstreamer/CMakeLists.txt b/tests/auto/unit/multimedia/qmediaplayer_gstreamer/CMakeLists.txt
index 2437df00c..dd54afd25 100644
--- a/tests/auto/unit/multimedia/qmediaplayer_gstreamer/CMakeLists.txt
+++ b/tests/auto/unit/multimedia/qmediaplayer_gstreamer/CMakeLists.txt
@@ -8,7 +8,25 @@
qt_internal_add_test(tst_qmediaplayer_gstreamer
SOURCES
tst_qmediaplayer_gstreamer.cpp
+ tst_qmediaplayer_gstreamer.h
+ ../../../shared/qscopedenvironmentvariable.h
+ INCLUDE_DIRECTORIES
+ ../../../shared
LIBRARIES
Qt::MultimediaPrivate
Qt::QGstreamerMediaPluginPrivate
)
+
+
+# Resources:
+set(testdata_resource_files
+ "testdata/color_matrix.mp4"
+)
+
+qt_internal_add_resource(tst_qmediaplayer_gstreamer "testdata"
+ PREFIX
+ "/"
+ FILES
+ ${testdata_resource_files}
+)
+
diff --git a/tests/auto/unit/multimedia/qmediaplayer_gstreamer/testdata/color_matrix.mp4 b/tests/auto/unit/multimedia/qmediaplayer_gstreamer/testdata/color_matrix.mp4
new file mode 100644
index 000000000..a3661b9d2
--- /dev/null
+++ b/tests/auto/unit/multimedia/qmediaplayer_gstreamer/testdata/color_matrix.mp4
Binary files differ
diff --git a/tests/auto/unit/multimedia/qmediaplayer_gstreamer/tst_qmediaplayer_gstreamer.cpp b/tests/auto/unit/multimedia/qmediaplayer_gstreamer/tst_qmediaplayer_gstreamer.cpp
index 8b0f3f073..3bb0b626e 100644
--- a/tests/auto/unit/multimedia/qmediaplayer_gstreamer/tst_qmediaplayer_gstreamer.cpp
+++ b/tests/auto/unit/multimedia/qmediaplayer_gstreamer/tst_qmediaplayer_gstreamer.cpp
@@ -1,50 +1,79 @@
// Copyright (C) 2024 The Qt Company Ltd.
-// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only WITH Qt-GPL-exception-1.0
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
+
+#include "tst_qmediaplayer_gstreamer.h"
#include <QtTest/QtTest>
-#include <QtMultimedia/qmediaplayer.h>
#include <QtMultimedia/private/qmediaplayer_p.h>
-#include <QtQGstreamerMediaPlugin/private/qgstpipeline_p.h>
-#include <memory>
+#include <qscopedenvironmentvariable.h>
QT_USE_NAMESPACE
-class tst_QMediaPlayerGStreamer : public QObject
-{
- Q_OBJECT
-
-public:
- tst_QMediaPlayerGStreamer();
+using namespace Qt::Literals;
-public slots:
- void init();
- void cleanup();
-
-private slots:
- void constructor_preparesGstPipeline();
+QGStreamerPlatformSpecificInterface *tst_QMediaPlayerGStreamer::gstInterface()
+{
+ return dynamic_cast<QGStreamerPlatformSpecificInterface *>(
+ QPlatformMediaIntegration::instance()->platformSpecificInterface());
+}
-private:
- std::unique_ptr<QMediaPlayer> player;
+GstPipeline *tst_QMediaPlayerGStreamer::getGstPipeline()
+{
+ QGStreamerPlatformSpecificInterface *iface = gstInterface();
+ return iface ? iface->gstPipeline(player.get()) : nullptr;
+}
- GstPipeline *getGstPipeline()
- {
- return reinterpret_cast<GstPipeline *>(QPlatformMediaPlayer::nativePipeline(player.get()));
- }
+QGstPipeline tst_QMediaPlayerGStreamer::getPipeline()
+{
+ return QGstPipeline{
+ getGstPipeline(),
+ QGstPipeline::NeedsRef,
+ };
+}
- void dumpGraph(const char *fileNamePrefix)
- {
- GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(getGstPipeline()),
- GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_VERBOSE),
- fileNamePrefix);
- }
-};
+void tst_QMediaPlayerGStreamer::dumpGraph(const char *fileNamePrefix)
+{
+ GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(getGstPipeline()),
+ GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_VERBOSE), fileNamePrefix);
+}
tst_QMediaPlayerGStreamer::tst_QMediaPlayerGStreamer()
{
qputenv("QT_MEDIA_BACKEND", "gstreamer");
}
+void tst_QMediaPlayerGStreamer::initTestCase()
+{
+ using namespace std::chrono_literals;
+
+ QMediaPlayer player;
+
+ QVideoSink sink;
+ player.setVideoSink(&sink);
+ player.setSource(QUrl("qrc:/testdata/color_matrix.mp4"));
+
+ for (;;) {
+ QMediaPlayer::MediaStatus status = player.mediaStatus();
+ switch (status) {
+ case QMediaPlayer::MediaStatus::InvalidMedia: {
+ mediaSupported = false;
+ return;
+ }
+ case QMediaPlayer::MediaStatus::NoMedia:
+ case QMediaPlayer::MediaStatus::StalledMedia:
+ case QMediaPlayer::MediaStatus::LoadingMedia:
+ QTest::qWait(20ms);
+ continue;
+
+ default: {
+ mediaSupported = true;
+ return;
+ }
+ }
+ }
+}
+
void tst_QMediaPlayerGStreamer::init()
{
player = std::make_unique<QMediaPlayer>();
@@ -73,6 +102,52 @@ void tst_QMediaPlayerGStreamer::constructor_preparesGstPipeline()
dumpGraph("constructor_preparesGstPipeline");
}
+void tst_QMediaPlayerGStreamer::videoSink_constructor_overridesConversionElement()
+{
+ if (!mediaSupported)
+ QSKIP("Media playback not supported");
+
+ QScopedEnvironmentVariable convOverride{
+ "QT_GSTREAMER_OVERRIDE_VIDEO_CONVERSION_ELEMENT",
+ "identity name=myConverter",
+ };
+
+ QVideoSink sink;
+ player->setVideoSink(&sink);
+ player->setSource(QUrl("qrc:/testdata/color_matrix.mp4"));
+
+ QGstPipeline pipeline = getPipeline();
+ QTEST_ASSERT(pipeline);
+
+ QTRY_VERIFY(pipeline.findByName("myConverter"));
+
+ dumpGraph("videoSink_constructor_overridesConversionElement");
+}
+
+void tst_QMediaPlayerGStreamer::
+ videoSink_constructor_overridesConversionElement_withMultipleElements()
+{
+ if (!mediaSupported)
+ QSKIP("Media playback not supported");
+
+ QScopedEnvironmentVariable convOverride{
+ "QT_GSTREAMER_OVERRIDE_VIDEO_CONVERSION_ELEMENT",
+ "identity name=myConverter ! identity name=myConverter2",
+ };
+
+ QVideoSink sink;
+ player->setVideoSink(&sink);
+ player->setSource(QUrl("qrc:/testdata/color_matrix.mp4"));
+
+ QGstPipeline pipeline = getPipeline();
+ QTEST_ASSERT(pipeline);
+
+ QTRY_VERIFY(pipeline.findByName("myConverter"));
+ QTRY_VERIFY(pipeline.findByName("myConverter2"));
+
+ dumpGraph("videoSink_constructer_overridesConversionElement_withMultipleElements");
+}
+
QTEST_GUILESS_MAIN(tst_QMediaPlayerGStreamer)
-#include "tst_qmediaplayer_gstreamer.moc"
+#include "moc_tst_qmediaplayer_gstreamer.cpp"
diff --git a/tests/auto/unit/multimedia/qmediaplayer_gstreamer/tst_qmediaplayer_gstreamer.h b/tests/auto/unit/multimedia/qmediaplayer_gstreamer/tst_qmediaplayer_gstreamer.h
new file mode 100644
index 000000000..08e958404
--- /dev/null
+++ b/tests/auto/unit/multimedia/qmediaplayer_gstreamer/tst_qmediaplayer_gstreamer.h
@@ -0,0 +1,46 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
+
+#ifndef TST_GMEDIAPLAYER_GSTREAMER_H
+#define TST_GMEDIAPLAYER_GSTREAMER_H
+
+#include <QtCore/qtemporaryfile.h>
+#include <QtCore/qstandardpaths.h>
+#include <QtMultimedia/qmediaplayer.h>
+#include <QtQGstreamerMediaPlugin/private/qgstpipeline_p.h>
+#include <QtMultimedia/private/qgstreamer_platformspecificinterface_p.h>
+
+#include <memory>
+
+QT_USE_NAMESPACE
+
+class tst_QMediaPlayerGStreamer : public QObject
+{
+ Q_OBJECT
+
+public:
+ tst_QMediaPlayerGStreamer();
+
+public slots:
+ void initTestCase();
+ void init();
+ void cleanup();
+
+private slots:
+ void constructor_preparesGstPipeline();
+ void videoSink_constructor_overridesConversionElement();
+ void videoSink_constructor_overridesConversionElement_withMultipleElements();
+
+private:
+ std::unique_ptr<QMediaPlayer> player;
+
+ static QGStreamerPlatformSpecificInterface *gstInterface();
+
+ GstPipeline *getGstPipeline();
+ QGstPipeline getPipeline();
+ void dumpGraph(const char *fileNamePrefix);
+
+ bool mediaSupported;
+};
+
+#endif // TST_GMEDIAPLAYER_GSTREAMER_H
diff --git a/tests/auto/unit/multimedia/qmediarecorder/tst_qmediarecorder.cpp b/tests/auto/unit/multimedia/qmediarecorder/tst_qmediarecorder.cpp
index 4873c2407..a11f39207 100644
--- a/tests/auto/unit/multimedia/qmediarecorder/tst_qmediarecorder.cpp
+++ b/tests/auto/unit/multimedia/qmediarecorder/tst_qmediarecorder.cpp
@@ -125,7 +125,7 @@ void tst_QMediaRecorder::testNullControls()
QCOMPARE(recorder.mediaFormat().videoCodec(), QMediaFormat::VideoCodec::VP9);
QCOMPARE(recorder.mediaFormat().fileFormat(), QMediaFormat::MPEG4);
- QSignalSpy spy(&recorder, SIGNAL(recorderStateChanged(RecorderState)));
+ QSignalSpy spy(&recorder, &QMediaRecorder::recorderStateChanged);
recorder.record();
QCOMPARE(recorder.recorderState(), QMediaRecorder::StoppedState);
@@ -190,12 +190,12 @@ void tst_QMediaRecorder::testError()
{
const QString errorString(QLatin1String("format error"));
- QSignalSpy spy(encoder, SIGNAL(errorOccurred(Error, const QString&)));
+ QSignalSpy spy(encoder, &QMediaRecorder::errorOccurred);
QCOMPARE(encoder->error(), QMediaRecorder::NoError);
QCOMPARE(encoder->errorString(), QString());
- mock->error(QMediaRecorder::FormatError, errorString);
+ mock->updateError(QMediaRecorder::FormatError, errorString);
QCOMPARE(encoder->error(), QMediaRecorder::FormatError);
QCOMPARE(encoder->errorString(), errorString);
QCOMPARE(spy.size(), 1);
@@ -230,8 +230,8 @@ void tst_QMediaRecorder::testSink()
void tst_QMediaRecorder::testRecord()
{
- QSignalSpy stateSignal(encoder,SIGNAL(recorderStateChanged(RecorderState)));
- QSignalSpy progressSignal(encoder, SIGNAL(durationChanged(qint64)));
+ QSignalSpy stateSignal(encoder, &QMediaRecorder::recorderStateChanged);
+ QSignalSpy progressSignal(encoder, &QMediaRecorder::durationChanged);
encoder->record();
QCOMPARE(encoder->recorderState(), QMediaRecorder::RecordingState);
QCOMPARE(encoder->error(), QMediaRecorder::NoError);
@@ -413,12 +413,12 @@ void tst_QMediaRecorder::testEnum()
{
const QString errorString(QLatin1String("resource error"));
- QSignalSpy spy(encoder, SIGNAL(errorOccurred(Error, const QString&)));
+ QSignalSpy spy(encoder, &QMediaRecorder::errorOccurred);
QCOMPARE(encoder->error(), QMediaRecorder::NoError);
QCOMPARE(encoder->errorString(), QString());
- emit mock->error(QMediaRecorder::ResourceError, errorString);
+ mock->updateError(QMediaRecorder::ResourceError, errorString);
QCOMPARE(encoder->error(), QMediaRecorder::ResourceError);
QCOMPARE(encoder->errorString(), errorString);
QCOMPARE(spy.size(), 1);
diff --git a/tests/auto/unit/multimedia/qvideobuffers/tst_qvideobuffers.cpp b/tests/auto/unit/multimedia/qvideobuffers/tst_qvideobuffers.cpp
index 97f5e3b62..162620fb6 100644
--- a/tests/auto/unit/multimedia/qvideobuffers/tst_qvideobuffers.cpp
+++ b/tests/auto/unit/multimedia/qvideobuffers/tst_qvideobuffers.cpp
@@ -8,9 +8,9 @@
#include "qvideoframeformat.h"
using BufferPtr = std::shared_ptr<QAbstractVideoBuffer>;
-using MapModes = std::vector<QVideoFrame::MapMode>;
+using MapModes = std::vector<QtVideo::MapMode>;
-static const MapModes validMapModes = { QVideoFrame::ReadOnly, QVideoFrame::WriteOnly, QVideoFrame::ReadWrite };
+static const MapModes validMapModes = { QtVideo::MapMode::ReadOnly, QtVideo::MapMode::WriteOnly, QtVideo::MapMode::ReadWrite };
class tst_QVideoBuffers : public QObject
{
@@ -22,21 +22,14 @@ public slots:
void initTestCase();
private slots:
- void map_changesMappedStateAndReturnsProperMappings_whenBufferIsNotMapped_data();
- void map_changesMappedStateAndReturnsProperMappings_whenBufferIsNotMapped();
+ void map_returnsProperMappings_whenBufferIsNotMapped_data();
+ void map_returnsProperMappings_whenBufferIsNotMapped();
- void mapWithNotMappedMode_doesNothing_data();
- void mapWithNotMappedMode_doesNothing();
+ void map_returnsProperMappings_whenBufferIsMapped_data();
+ void map_returnsProperMappings_whenBufferIsMapped();
- void map_doesNothing_whenBufferIsMapped_data();
- void map_doesNothing_whenBufferIsMapped();
-
- void mapMemoryBufferWithReadOnly_doesntDetachArray();
-
- void mapMemoryBufferWithWriteModes_detachsArray_data();
- void mapMemoryBufferWithWriteModes_detachsArray();
-
- void underlyingByteArray_returnsCorrectValueForPlanes();
+ void mapMemoryOrImageBuffer_detachesDataDependingOnMode_data();
+ void mapMemoryOrImageBuffer_detachesDataDependingOnMode();
void unmap_resetsMappedState_whenBufferIsMapped_data();
void unmap_resetsMappedState_whenBufferIsMapped();
@@ -45,16 +38,16 @@ private slots:
void imageBuffer_fixesInputImage();
private:
- QString mapModeToString(QVideoFrame::MapMode mapMode) const
+ QString mapModeToString(QtVideo::MapMode mapMode) const
{
switch (mapMode) {
- case QVideoFrame::NotMapped:
+ case QtVideo::MapMode::NotMapped:
return QLatin1String("NotMapped");
- case QVideoFrame::ReadOnly:
+ case QtVideo::MapMode::ReadOnly:
return QLatin1String("ReadOnly");
- case QVideoFrame::WriteOnly:
+ case QtVideo::MapMode::WriteOnly:
return QLatin1String("WriteOnly");
- case QVideoFrame::ReadWrite:
+ case QtVideo::MapMode::ReadWrite:
return QLatin1String("ReadWrite");
default:
return QLatin1String("Unknown");
@@ -64,16 +57,25 @@ private:
void generateImageAndMemoryBuffersWithAllModes(const MapModes& modes = validMapModes) const
{
QTest::addColumn<BufferPtr>("buffer");
- QTest::addColumn<QVideoFrame::MapMode>("mapMode");
+ QTest::addColumn<QtVideo::MapMode>("mapMode");
+ QTest::addColumn<const uint8_t *>("sourcePointer");
for (auto mode : modes) {
- QTest::newRow(QStringLiteral("ImageBuffer, %1").arg(mapModeToString(mode)).toUtf8().constData())
- << createImageBuffer() << mode;
- QTest::newRow(QStringLiteral("MemoryBuffer, %1").arg(mapModeToString(mode)).toUtf8().constData())
- << createMemoryBuffer() << mode;
+ QTest::newRow(QStringLiteral("ImageBuffer, %1").arg(mapModeToString(mode)).toLocal8Bit().constData())
+ << createImageBuffer() << mode << m_image.constBits();
+ QTest::newRow(QStringLiteral("MemoryBuffer, %1").arg(mapModeToString(mode)).toLocal8Bit().constData())
+ << createMemoryBuffer() << mode << reinterpret_cast<const uint8_t *>(m_byteArray.constData());
}
}
+ void generateMapModes(const MapModes &modes = validMapModes) const
+ {
+ QTest::addColumn<QtVideo::MapMode>("mapMode");
+
+ for (auto mode : modes)
+ QTest::newRow(mapModeToString(mode).toLocal8Bit().constData()) << mode;
+ }
+
BufferPtr createImageBuffer() const
{
return std::make_shared<QImageVideoBuffer>(m_image);
@@ -99,102 +101,63 @@ void tst_QVideoBuffers::initTestCase()
m_byteArray.assign(m_image.constBits(), m_image.constBits() + m_image.sizeInBytes());
}
-void tst_QVideoBuffers::map_changesMappedStateAndReturnsProperMappings_whenBufferIsNotMapped_data()
+void tst_QVideoBuffers::map_returnsProperMappings_whenBufferIsNotMapped_data()
{
generateImageAndMemoryBuffersWithAllModes();
}
-void tst_QVideoBuffers::map_changesMappedStateAndReturnsProperMappings_whenBufferIsNotMapped()
+void tst_QVideoBuffers::map_returnsProperMappings_whenBufferIsNotMapped()
{
QFETCH(BufferPtr, buffer);
- QFETCH(QVideoFrame::MapMode, mapMode);
+ QFETCH(QtVideo::MapMode, mapMode);
auto mappedData = buffer->map(mapMode);
- QCOMPARE(buffer->mapMode(), mapMode);
-
- QCOMPARE(mappedData.nPlanes, 1);
+ QCOMPARE(mappedData.planeCount, 1);
QVERIFY(mappedData.data[0]);
- QCOMPARE(mappedData.size[0], 80);
+ QCOMPARE(mappedData.dataSize[0], 80);
QCOMPARE(mappedData.bytesPerLine[0], 20);
const auto data = reinterpret_cast<const char*>(mappedData.data[0]);
- QCOMPARE(QByteArray(data, mappedData.size[0]), m_byteArray);
+ QCOMPARE(QByteArray(data, mappedData.dataSize[0]), m_byteArray);
}
-void tst_QVideoBuffers::mapWithNotMappedMode_doesNothing_data()
+void tst_QVideoBuffers::map_returnsProperMappings_whenBufferIsMapped_data()
{
generateImageAndMemoryBuffersWithAllModes();
}
-void tst_QVideoBuffers::mapWithNotMappedMode_doesNothing()
+void tst_QVideoBuffers::map_returnsProperMappings_whenBufferIsMapped()
{
QFETCH(BufferPtr, buffer);
- QFETCH(QVideoFrame::MapMode, mapMode);
+ QFETCH(QtVideo::MapMode, mapMode);
- buffer->map(mapMode);
-
- buffer->map(QVideoFrame::NotMapped);
+ auto mappedData1 = buffer->map(mapMode);
+ auto mappedData2 = buffer->map(mapMode);
- QCOMPARE(buffer->mapMode(), mapMode);
+ QCOMPARE(mappedData1.planeCount, mappedData2.planeCount);
+ QCOMPARE(mappedData1.data[0], mappedData2.data[0]);
+ QCOMPARE(mappedData1.dataSize[0], mappedData2.dataSize[0]);
+ QCOMPARE(mappedData1.bytesPerLine[0], mappedData2.bytesPerLine[0]);
}
-void tst_QVideoBuffers::map_doesNothing_whenBufferIsMapped_data()
+void tst_QVideoBuffers::mapMemoryOrImageBuffer_detachesDataDependingOnMode_data()
{
generateImageAndMemoryBuffersWithAllModes();
}
-void tst_QVideoBuffers::map_doesNothing_whenBufferIsMapped()
+void tst_QVideoBuffers::mapMemoryOrImageBuffer_detachesDataDependingOnMode()
{
QFETCH(BufferPtr, buffer);
- QFETCH(QVideoFrame::MapMode, mapMode);
-
- buffer->map(mapMode);
- auto mappedData = buffer->map(QVideoFrame::ReadOnly);
- QCOMPARE(mappedData.nPlanes, 0);
- QCOMPARE(buffer->mapMode(), mapMode);
-}
-
-void tst_QVideoBuffers::mapMemoryBufferWithReadOnly_doesntDetachArray()
-{
- auto buffer = createMemoryBuffer();
- auto underlyingArray = buffer->underlyingByteArray(0);
-
- auto mappedData = buffer->map(QVideoFrame::ReadOnly);
- QCOMPARE(mappedData.nPlanes, 1);
- QCOMPARE(mappedData.data[0], reinterpret_cast<const uchar *>(underlyingArray.constData()));
- QCOMPARE(mappedData.data[0], reinterpret_cast<const uchar *>(m_byteArray.constData()));
-}
-
-void tst_QVideoBuffers::mapMemoryBufferWithWriteModes_detachsArray_data()
-{
- QTest::addColumn<QVideoFrame::MapMode>("mapMode");
-
- QTest::newRow(mapModeToString(QVideoFrame::WriteOnly).toUtf8().constData()) << QVideoFrame::WriteOnly;
- QTest::newRow(mapModeToString(QVideoFrame::WriteOnly).toUtf8().constData()) << QVideoFrame::WriteOnly;
-}
-
-void tst_QVideoBuffers::mapMemoryBufferWithWriteModes_detachsArray()
-{
- QFETCH(QVideoFrame::MapMode, mapMode);
-
- auto buffer = createMemoryBuffer();
- auto underlyingArray = buffer->underlyingByteArray(0);
+ QFETCH(QtVideo::MapMode, mapMode);
+ QFETCH(const uint8_t *, sourcePointer);
auto mappedData = buffer->map(mapMode);
- QCOMPARE(mappedData.nPlanes, 1);
- QCOMPARE_NE(mappedData.data[0], reinterpret_cast<const uchar *>(underlyingArray.constData()));
-}
+ QCOMPARE(mappedData.planeCount, 1);
-void tst_QVideoBuffers::underlyingByteArray_returnsCorrectValueForPlanes()
-{
- auto buffer = createMemoryBuffer();
-
- QCOMPARE(buffer->underlyingByteArray(0).constData(), m_byteArray.constData());
-
- QVERIFY(buffer->underlyingByteArray(-1).isNull());
- QVERIFY(buffer->underlyingByteArray(1).isNull());
- QVERIFY(buffer->underlyingByteArray(2).isNull());
+ const bool isDetached = mappedData.data[0] != sourcePointer;
+ const bool isWriteMode = (mapMode & QtVideo::MapMode::WriteOnly) == QtVideo::MapMode::WriteOnly;
+ QCOMPARE(isDetached, isWriteMode);
}
void tst_QVideoBuffers::unmap_resetsMappedState_whenBufferIsMapped_data()
@@ -205,21 +168,18 @@ void tst_QVideoBuffers::unmap_resetsMappedState_whenBufferIsMapped_data()
void tst_QVideoBuffers::unmap_resetsMappedState_whenBufferIsMapped()
{
QFETCH(BufferPtr, buffer);
- QFETCH(QVideoFrame::MapMode, mapMode);
+ QFETCH(QtVideo::MapMode, mapMode);
buffer->map(mapMode);
buffer->unmap();
- QCOMPARE(buffer->mapMode(), QVideoFrame::NotMapped);
-
// Check buffer is valid and it's possible to map again
- auto mappedData = buffer->map(QVideoFrame::ReadOnly);
- QCOMPARE(mappedData.nPlanes, 1);
- QCOMPARE(buffer->mapMode(), QVideoFrame::ReadOnly);
+ auto mappedData = buffer->map(QtVideo::MapMode::ReadOnly);
+ QCOMPARE(mappedData.planeCount, 1);
const auto data = reinterpret_cast<const char*>(mappedData.data[0]);
- QCOMPARE(QByteArray(data, mappedData.size[0]), m_byteArray);
+ QCOMPARE(QByteArray(data, mappedData.dataSize[0]), m_byteArray);
}
void tst_QVideoBuffers::imageBuffer_fixesInputImage_data()
diff --git a/tests/auto/unit/multimedia/qvideoframe/tst_qvideoframe.cpp b/tests/auto/unit/multimedia/qvideoframe/tst_qvideoframe.cpp
index 607479412..295484a92 100644
--- a/tests/auto/unit/multimedia/qvideoframe/tst_qvideoframe.cpp
+++ b/tests/auto/unit/multimedia/qvideoframe/tst_qvideoframe.cpp
@@ -5,10 +5,14 @@
#include <qvideoframe.h>
#include <qvideoframeformat.h>
+#include "QtTest/qtestcase.h"
#include "private/qmemoryvideobuffer_p.h"
+#include "private/qhwvideobuffer_p.h"
+#include "private/qvideoframe_p.h"
#include <QtGui/QImage>
#include <QtCore/QPointer>
#include <QtMultimedia/private/qtmultimedia-config_p.h>
+#include "private/qvideoframeconverter_p.h"
// Adds an enum, and the stringized version
#define ADD_ENUM_TEST(x) \
@@ -80,7 +84,7 @@ std::vector<QRgb> swizzle(const std::vector<uint> &pixels, QVideoFrameFormat::Pi
std::optional<std::vector<QRgb>> getPixels(QVideoFrame &frame)
{
- if (!frame.map(QVideoFrame::ReadOnly))
+ if (!frame.map(QtVideo::MapMode::ReadOnly))
return std::nullopt;
const uint *mappedPixels = reinterpret_cast<const uint *>(frame.bits(0));
@@ -124,6 +128,53 @@ bool compareEq(QVideoFrame &frame, const QImage &image)
return true;
}
+QSet s_pixelFormats{ QVideoFrameFormat::Format_ARGB8888,
+ QVideoFrameFormat::Format_ARGB8888_Premultiplied,
+ QVideoFrameFormat::Format_XRGB8888,
+ QVideoFrameFormat::Format_BGRA8888,
+ QVideoFrameFormat::Format_BGRA8888_Premultiplied,
+ QVideoFrameFormat::Format_BGRX8888,
+ QVideoFrameFormat::Format_ABGR8888,
+ QVideoFrameFormat::Format_XBGR8888,
+ QVideoFrameFormat::Format_RGBA8888,
+ QVideoFrameFormat::Format_RGBX8888,
+ QVideoFrameFormat::Format_NV12,
+ QVideoFrameFormat::Format_NV21,
+ QVideoFrameFormat::Format_IMC1,
+ QVideoFrameFormat::Format_IMC2,
+ QVideoFrameFormat::Format_IMC3,
+ QVideoFrameFormat::Format_IMC4,
+ QVideoFrameFormat::Format_AYUV,
+ QVideoFrameFormat::Format_AYUV_Premultiplied,
+ QVideoFrameFormat::Format_YV12,
+ QVideoFrameFormat::Format_YUV420P,
+ QVideoFrameFormat::Format_YUV422P,
+ QVideoFrameFormat::Format_UYVY,
+ QVideoFrameFormat::Format_YUYV,
+ QVideoFrameFormat::Format_Y8,
+ QVideoFrameFormat::Format_Y16,
+ QVideoFrameFormat::Format_P010,
+ QVideoFrameFormat::Format_P016,
+ QVideoFrameFormat::Format_YUV420P10 };
+
+bool isSupportedPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat)
+{
+#ifdef Q_OS_ANDROID
+ // TODO: QTBUG-125238
+ switch (pixelFormat) {
+ case QVideoFrameFormat::Format_Y16:
+ case QVideoFrameFormat::Format_P010:
+ case QVideoFrameFormat::Format_P016:
+ case QVideoFrameFormat::Format_YUV420P10:
+ return false;
+ default:
+ return true;
+ }
+#else
+ return true;
+#endif
+}
+
class tst_QVideoFrame : public QObject
{
Q_OBJECT
@@ -146,7 +197,8 @@ private slots:
void createFromBuffer();
void createFromImage_data();
void createNull();
- void destructor();
+ void destructor_deletesVideoBuffer();
+ void destructorOfPrivateData_unmapsAndDeletesVideoBuffer_whenNoMoreFramesCopies();
void copy_data();
void copy();
void assign_data();
@@ -158,6 +210,9 @@ private slots:
void formatConversion_data();
void formatConversion();
+ void qImageFromVideoFrame_doesNotCrash_whenCalledWithEvenAndOddSizedFrames_data();
+ void qImageFromVideoFrame_doesNotCrash_whenCalledWithEvenAndOddSizedFrames();
+
void isMapped();
void isReadable();
void isWritable();
@@ -167,6 +222,10 @@ private slots:
void emptyData();
+ void mirrored_takesValue_fromVideoFrameFormat();
+ void rotation_takesValue_fromVideoFrameFormat();
+ void streamFrameRate_takesValue_fromVideoFrameFormat();
+
void constructor_createsInvalidFrame_whenCalledWithNullImage();
void constructor_createsInvalidFrame_whenCalledWithEmptyImage();
void constructor_createsInvalidFrame_whenCalledWithInvalidImageFormat();
@@ -176,57 +235,39 @@ private slots:
void constructor_copiesImageData_whenCalledWithRGBFormats();
};
-class QtTestDummyVideoBuffer : public QObject, public QAbstractVideoBuffer
+class QtTestVideoBuffer : public QObject, public QHwVideoBuffer
{
Q_OBJECT
public:
- QtTestDummyVideoBuffer()
- : QAbstractVideoBuffer(QVideoFrame::NoHandle) {}
- explicit QtTestDummyVideoBuffer(QVideoFrame::HandleType type)
- : QAbstractVideoBuffer(type) {}
-
- [[nodiscard]] QVideoFrame::MapMode mapMode() const override { return QVideoFrame::NotMapped; }
-
- MapData map(QVideoFrame::MapMode) override { return {}; }
- void unmap() override {}
-};
-
-class QtTestVideoBuffer : public QAbstractVideoBuffer
-{
-public:
- QtTestVideoBuffer()
- : QAbstractVideoBuffer(QVideoFrame::NoHandle)
- {}
- explicit QtTestVideoBuffer(QVideoFrame::HandleType type)
- : QAbstractVideoBuffer(type)
- {}
-
- [[nodiscard]] QVideoFrame::MapMode mapMode() const override { return m_mapMode; }
+ QtTestVideoBuffer() : QHwVideoBuffer(QVideoFrame::NoHandle) { }
+ explicit QtTestVideoBuffer(QVideoFrame::HandleType type) : QHwVideoBuffer(type) { }
+ ~QtTestVideoBuffer() override { QTEST_ASSERT(!m_mapObject); }
- MapData map(QVideoFrame::MapMode mode) override
+ MapData map(QtVideo::MapMode) override
{
- m_mapMode = mode;
+ m_mapObject = std::make_unique<QObject>();
MapData mapData;
int nBytes = m_numBytes;
- mapData.nPlanes = m_planeCount;
+ mapData.planeCount = m_planeCount;
for (int i = 0; i < m_planeCount; ++i) {
mapData.data[i] = m_data[i];
mapData.bytesPerLine[i] = m_bytesPerLine[i];
if (i) {
- mapData.size[i-1] = m_data[i] - m_data[i-1];
- nBytes -= mapData.size[i-1];
+ mapData.dataSize[i-1] = m_data[i] - m_data[i-1];
+ nBytes -= mapData.dataSize[i-1];
}
- mapData.size[i] = nBytes;
+ mapData.dataSize[i] = nBytes;
}
return mapData;
}
- void unmap() override { m_mapMode = QVideoFrame::NotMapped; }
+
+ void unmap() override { m_mapObject.reset(); }
uchar *m_data[4];
int m_bytesPerLine[4];
- int m_planeCount = 0;
+ int m_planeCount = 1;
int m_numBytes;
- QVideoFrame::MapMode m_mapMode = QVideoFrame::NotMapped;
+ std::unique_ptr<QObject> m_mapObject;
};
tst_QVideoFrame::tst_QVideoFrame()
@@ -257,34 +298,43 @@ void tst_QVideoFrame::create_data()
{
QTest::addColumn<QSize>("size");
QTest::addColumn<QVideoFrameFormat::PixelFormat>("pixelFormat");
- QTest::addColumn<int>("bytes");
QTest::addColumn<int>("bytesPerLine");
QTest::newRow("64x64 ARGB32")
<< QSize(64, 64)
- << QVideoFrameFormat::Format_ARGB8888;
+ << QVideoFrameFormat::Format_ARGB8888
+ << 64*4;
QTest::newRow("32x256 YUV420P")
<< QSize(32, 256)
- << QVideoFrameFormat::Format_YUV420P;
+ << QVideoFrameFormat::Format_YUV420P
+ << 32;
+ QTest::newRow("32x256 UYVY")
+ << QSize(32, 256)
+ << QVideoFrameFormat::Format_UYVY
+ << 32*2;
}
void tst_QVideoFrame::create()
{
QFETCH(QSize, size);
QFETCH(QVideoFrameFormat::PixelFormat, pixelFormat);
+ QFETCH(int, bytesPerLine);
QVideoFrame frame(QVideoFrameFormat(size, pixelFormat));
QVERIFY(frame.isValid());
QCOMPARE(frame.handleType(), QVideoFrame::NoHandle);
- QVERIFY(frame.videoBuffer() != nullptr);
- QCOMPARE(frame.videoBuffer()->textureHandle(nullptr, 0), 0u);
+ QCOMPARE(QVideoFramePrivate::hwBuffer(frame), nullptr);
+ QCOMPARE_NE(QVideoFramePrivate::buffer(frame), nullptr);
QCOMPARE(frame.pixelFormat(), pixelFormat);
QCOMPARE(frame.size(), size);
QCOMPARE(frame.width(), size.width());
QCOMPARE(frame.height(), size.height());
QCOMPARE(frame.startTime(), qint64(-1));
QCOMPARE(frame.endTime(), qint64(-1));
+ frame.map(QtVideo::MapMode::ReadOnly);
+ QCOMPARE(frame.bytesPerLine(0), bytesPerLine);
+ frame.unmap();
}
void tst_QVideoFrame::createInvalid_data()
@@ -309,7 +359,7 @@ void tst_QVideoFrame::createInvalid()
QVERIFY(!frame.isValid());
QCOMPARE(frame.handleType(), QVideoFrame::NoHandle);
- QCOMPARE(frame.videoBuffer(), nullptr);
+ QCOMPARE(QVideoFramePrivate::buffer(frame), nullptr);
QCOMPARE(frame.pixelFormat(), pixelFormat);
QCOMPARE(frame.size(), size);
QCOMPARE(frame.width(), size.width());
@@ -340,7 +390,8 @@ void tst_QVideoFrame::createFromBuffer()
QFETCH(QSize, size);
QFETCH(QVideoFrameFormat::PixelFormat, pixelFormat);
- QVideoFrame frame(new QtTestDummyVideoBuffer(handleType), QVideoFrameFormat(size, pixelFormat));
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QtTestVideoBuffer>(handleType), QVideoFrameFormat(size, pixelFormat));
QVERIFY(frame.isValid());
QCOMPARE(frame.handleType(), handleType);
@@ -382,10 +433,10 @@ void tst_QVideoFrame::createNull()
QCOMPARE(frame.height(), -1);
QCOMPARE(frame.startTime(), qint64(-1));
QCOMPARE(frame.endTime(), qint64(-1));
- QCOMPARE(frame.mapMode(), QVideoFrame::NotMapped);
- QVERIFY(!frame.map(QVideoFrame::ReadOnly));
- QVERIFY(!frame.map(QVideoFrame::ReadWrite));
- QVERIFY(!frame.map(QVideoFrame::WriteOnly));
+ QCOMPARE(static_cast<QtVideo::MapMode>(frame.mapMode()), QtVideo::MapMode::NotMapped);
+ QVERIFY(!frame.map(QtVideo::MapMode::ReadOnly));
+ QVERIFY(!frame.map(QtVideo::MapMode::ReadWrite));
+ QVERIFY(!frame.map(QtVideo::MapMode::WriteOnly));
QCOMPARE(frame.isMapped(), false);
frame.unmap(); // Shouldn't crash
QCOMPARE(frame.isReadable(), false);
@@ -394,7 +445,9 @@ void tst_QVideoFrame::createNull()
// Null buffer (shouldn't crash)
{
- QVideoFrame frame(nullptr, QVideoFrameFormat(QSize(1024,768), QVideoFrameFormat::Format_ARGB8888));
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::unique_ptr<QHwVideoBuffer>(),
+ QVideoFrameFormat(QSize(1024, 768), QVideoFrameFormat::Format_ARGB8888));
QVERIFY(!frame.isValid());
QCOMPARE(frame.handleType(), QVideoFrame::NoHandle);
QCOMPARE(frame.pixelFormat(), QVideoFrameFormat::Format_ARGB8888);
@@ -403,10 +456,10 @@ void tst_QVideoFrame::createNull()
QCOMPARE(frame.height(), 768);
QCOMPARE(frame.startTime(), qint64(-1));
QCOMPARE(frame.endTime(), qint64(-1));
- QCOMPARE(frame.mapMode(), QVideoFrame::NotMapped);
- QVERIFY(!frame.map(QVideoFrame::ReadOnly));
- QVERIFY(!frame.map(QVideoFrame::ReadWrite));
- QVERIFY(!frame.map(QVideoFrame::WriteOnly));
+ QCOMPARE(static_cast<QtVideo::MapMode>(frame.mapMode()), QtVideo::MapMode::NotMapped);
+ QVERIFY(!frame.map(QtVideo::MapMode::ReadOnly));
+ QVERIFY(!frame.map(QtVideo::MapMode::ReadWrite));
+ QVERIFY(!frame.map(QtVideo::MapMode::WriteOnly));
QCOMPARE(frame.isMapped(), false);
frame.unmap(); // Shouldn't crash
QCOMPARE(frame.isReadable(), false);
@@ -414,17 +467,55 @@ void tst_QVideoFrame::createNull()
}
}
-void tst_QVideoFrame::destructor()
+void tst_QVideoFrame::destructor_deletesVideoBuffer()
{
- QPointer<QtTestDummyVideoBuffer> buffer = new QtTestDummyVideoBuffer;
+ QPointer buffer(new QtTestVideoBuffer);
{
- QVideoFrame frame(buffer, QVideoFrameFormat(QSize(4, 1), QVideoFrameFormat::Format_ARGB8888));
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::unique_ptr<QHwVideoBuffer>(buffer),
+ QVideoFrameFormat(QSize(4, 1), QVideoFrameFormat::Format_ARGB8888));
}
QVERIFY(buffer.isNull());
}
+void tst_QVideoFrame::destructorOfPrivateData_unmapsAndDeletesVideoBuffer_whenNoMoreFramesCopies()
+{
+ uchar bufferData[16] = {
+ 0,
+ };
+
+ QPointer buffer(new QtTestVideoBuffer);
+ buffer->m_data[0] = bufferData;
+ buffer->m_bytesPerLine[0] = 16;
+ buffer->m_planeCount = 1;
+ buffer->m_numBytes = sizeof(bufferData);
+
+ QVideoFrame frame1 = QVideoFramePrivate::createFrame(
+ std::unique_ptr<QHwVideoBuffer>(buffer),
+ QVideoFrameFormat(QSize(4, 1), QVideoFrameFormat::Format_ARGB8888));
+
+ frame1.map(QtVideo::MapMode::ReadOnly);
+
+ QVERIFY(buffer);
+ QPointer mapObject(buffer->m_mapObject.get());
+
+ QVideoFrame frame2 = frame1;
+
+ frame1 = {};
+
+ // check if the buffer and the map object are still alive
+ QVERIFY(mapObject);
+ QVERIFY(buffer);
+
+ frame2 = {};
+
+ // check if the buffer and the map object have been deleted
+ QVERIFY(!mapObject);
+ QVERIFY(!buffer);
+}
+
void tst_QVideoFrame::copy_data()
{
QTest::addColumn<QVideoFrame::HandleType>("handleType");
@@ -473,10 +564,11 @@ void tst_QVideoFrame::copy()
QFETCH(qint64, startTime);
QFETCH(qint64, endTime);
- QPointer<QtTestDummyVideoBuffer> buffer = new QtTestDummyVideoBuffer(handleType);
+ QPointer<QtTestVideoBuffer> buffer = new QtTestVideoBuffer(handleType);
{
- QVideoFrame frame(buffer, QVideoFrameFormat(size, pixelFormat));
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::unique_ptr<QHwVideoBuffer>(buffer),
+ QVideoFrameFormat(size, pixelFormat));
frame.setStartTime(startTime);
frame.setEndTime(endTime);
@@ -562,11 +654,12 @@ void tst_QVideoFrame::assign()
QFETCH(qint64, startTime);
QFETCH(qint64, endTime);
- QPointer<QtTestDummyVideoBuffer> buffer = new QtTestDummyVideoBuffer(handleType);
+ QPointer<QtTestVideoBuffer> buffer = new QtTestVideoBuffer(handleType);
QVideoFrame frame;
{
- QVideoFrame otherFrame(buffer, QVideoFrameFormat(size, pixelFormat));
+ QVideoFrame otherFrame = QVideoFramePrivate::createFrame(
+ std::unique_ptr<QHwVideoBuffer>(buffer), QVideoFrameFormat(size, pixelFormat));
otherFrame.setStartTime(startTime);
otherFrame.setEndTime(endTime);
@@ -626,44 +719,44 @@ void tst_QVideoFrame::map_data()
{
QTest::addColumn<QSize>("size");
QTest::addColumn<QVideoFrameFormat::PixelFormat>("pixelFormat");
- QTest::addColumn<QVideoFrame::MapMode>("mode");
+ QTest::addColumn<QtVideo::MapMode>("mode");
QTest::newRow("read-only")
<< QSize(64, 64)
<< QVideoFrameFormat::Format_ARGB8888
- << QVideoFrame::ReadOnly;
+ << QtVideo::MapMode::ReadOnly;
QTest::newRow("write-only")
<< QSize(64, 64)
<< QVideoFrameFormat::Format_ARGB8888
- << QVideoFrame::WriteOnly;
+ << QtVideo::MapMode::WriteOnly;
QTest::newRow("read-write")
<< QSize(64, 64)
<< QVideoFrameFormat::Format_ARGB8888
- << QVideoFrame::ReadWrite;
+ << QtVideo::MapMode::ReadWrite;
}
void tst_QVideoFrame::map()
{
QFETCH(QSize, size);
QFETCH(QVideoFrameFormat::PixelFormat, pixelFormat);
- QFETCH(QVideoFrame::MapMode, mode);
+ QFETCH(QtVideo::MapMode, mode);
QVideoFrame frame(QVideoFrameFormat(size, pixelFormat));
QVERIFY(!frame.bits(0));
QCOMPARE(frame.mappedBytes(0), 0);
QCOMPARE(frame.bytesPerLine(0), 0);
- QCOMPARE(frame.mapMode(), QVideoFrame::NotMapped);
+ QCOMPARE(static_cast<QtVideo::MapMode>(frame.mapMode()), QtVideo::MapMode::NotMapped);
QVERIFY(frame.map(mode));
// Mapping multiple times is allowed in ReadOnly mode
- if (mode == QVideoFrame::ReadOnly) {
+ if (mode == QtVideo::MapMode::ReadOnly) {
const uchar *bits = frame.bits(0);
- QVERIFY(frame.map(QVideoFrame::ReadOnly));
+ QVERIFY(frame.map(QtVideo::MapMode::ReadOnly));
QVERIFY(frame.isMapped());
QCOMPARE(frame.bits(0), bits);
@@ -673,23 +766,23 @@ void tst_QVideoFrame::map()
QCOMPARE(frame.bits(0), bits);
//re-mapping in Write or ReadWrite modes should fail
- QVERIFY(!frame.map(QVideoFrame::WriteOnly));
- QVERIFY(!frame.map(QVideoFrame::ReadWrite));
+ QVERIFY(!frame.map(QtVideo::MapMode::WriteOnly));
+ QVERIFY(!frame.map(QtVideo::MapMode::ReadWrite));
} else {
// Mapping twice in ReadWrite or WriteOnly modes should fail, but leave it mapped (and the mode is ignored)
QVERIFY(!frame.map(mode));
- QVERIFY(!frame.map(QVideoFrame::ReadOnly));
+ QVERIFY(!frame.map(QtVideo::MapMode::ReadOnly));
}
QVERIFY(frame.bits(0));
- QCOMPARE(frame.mapMode(), mode);
+ QCOMPARE(static_cast<QtVideo::MapMode>(frame.mapMode()), mode);
frame.unmap();
QVERIFY(!frame.bits(0));
QCOMPARE(frame.mappedBytes(0), 0);
QCOMPARE(frame.bytesPerLine(0), 0);
- QCOMPARE(frame.mapMode(), QVideoFrame::NotMapped);
+ QCOMPARE(static_cast<QtVideo::MapMode>(frame.mapMode()), QtVideo::MapMode::NotMapped);
}
void tst_QVideoFrame::mapPlanes_data()
@@ -705,7 +798,7 @@ void tst_QVideoFrame::mapPlanes_data()
static uchar bufferData[1024];
- QtTestVideoBuffer *planarBuffer = new QtTestVideoBuffer;
+ auto planarBuffer = std::make_unique<QtTestVideoBuffer>();
planarBuffer->m_data[0] = bufferData;
planarBuffer->m_data[1] = bufferData + 512;
planarBuffer->m_data[2] = bufferData + 765;
@@ -715,10 +808,10 @@ void tst_QVideoFrame::mapPlanes_data()
planarBuffer->m_planeCount = 3;
planarBuffer->m_numBytes = sizeof(bufferData);
- QTest::newRow("Planar")
- << QVideoFrame(planarBuffer, QVideoFrameFormat(QSize(64, 64), QVideoFrameFormat::Format_YUV420P))
- << (QList<int>() << 64 << 36 << 36)
- << (QList<int>() << 512 << 765);
+ QTest::newRow("Planar") << QVideoFramePrivate::createFrame(
+ std::move(planarBuffer),
+ QVideoFrameFormat(QSize(64, 64), QVideoFrameFormat::Format_YUV420P))
+ << (QList<int>() << 64 << 36 << 36) << (QList<int>() << 512 << 765);
QTest::newRow("Format_YUV420P")
<< QVideoFrame(QVideoFrameFormat(QSize(60, 64), QVideoFrameFormat::Format_YUV420P))
<< (QList<int>() << 64 << 32 << 32)
@@ -769,7 +862,7 @@ void tst_QVideoFrame::mapPlanes()
QCOMPARE(strides.size(), offsets.size() + 1);
- QCOMPARE(frame.map(QVideoFrame::ReadOnly), true);
+ QCOMPARE(frame.map(QtVideo::MapMode::ReadOnly), true);
QCOMPARE(frame.planeCount(), strides.size());
QVERIFY(strides.size() > 0);
@@ -926,13 +1019,76 @@ void tst_QVideoFrame::formatConversion()
QCOMPARE(QVideoFrameFormat::imageFormatFromPixelFormat(pixelFormat), imageFormat);
}
+void tst_QVideoFrame::qImageFromVideoFrame_doesNotCrash_whenCalledWithEvenAndOddSizedFrames_data() {
+ QTest::addColumn<QSize>("size");
+ QTest::addColumn<QVideoFrameFormat::PixelFormat>("pixelFormat");
+ QTest::addColumn<bool>("forceCpuConversion");
+ QTest::addColumn<bool>("supportedOnPlatform");
+
+ const std::vector<QSize> sizes{
+ // Even sized
+ { 2, 2 },
+ { 2, 10 },
+ { 10, 2 },
+ { 640, 480 },
+ { 4096, 2160 },
+ // Odd sized
+ { 0, 0 },
+ { 3, 3 },
+ { 2, 3 },
+ { 3, 2 },
+ { 641, 480 },
+ { 640, 481 },
+ // TODO: Crashes
+ // { 1, 1 } // TODO: Division by zero in QVideoFrame::map (Debug)
+ // { 1, 2 } // TODO: D3D validation error in QRhiD3D11::executeCommandBuffer
+ // { 2, 1 } // TODO: D3D validation error in QRhiD3D11::executeCommandBuffer
+ };
+
+ for (const QSize &size : sizes) {
+ for (const QVideoFrameFormat::PixelFormat pixelFormat : s_pixelFormats) {
+ for (const bool forceCpu : { false, true }) {
+
+ if (pixelFormat == QVideoFrameFormat::Format_YUV420P10 && forceCpu)
+ continue; // TODO: Cpu conversion not implemented
+
+ QString name = QStringLiteral("%1x%2_%3%4")
+ .arg(size.width())
+ .arg(size.height())
+ .arg(QVideoFrameFormat::pixelFormatToString(pixelFormat))
+ .arg(forceCpu ? "_cpu" : "");
+
+ QTest::addRow("%s", name.toLatin1().data())
+ << size << pixelFormat << forceCpu << isSupportedPixelFormat(pixelFormat);
+ }
+ }
+ }
+}
+
+void tst_QVideoFrame::qImageFromVideoFrame_doesNotCrash_whenCalledWithEvenAndOddSizedFrames() {
+ QFETCH(const QSize, size);
+ QFETCH(const QVideoFrameFormat::PixelFormat, pixelFormat);
+ QFETCH(const bool, forceCpuConversion);
+ QFETCH(const bool, supportedOnPlatform);
+
+ const QVideoFrameFormat format{ size, pixelFormat };
+ const QVideoFrame frame{ format };
+ const QImage actual = qImageFromVideoFrame(frame, QtVideo::Rotation::None, false, false,
+ forceCpuConversion);
+
+ if (supportedOnPlatform)
+ QCOMPARE_EQ(actual.isNull(), size.isEmpty());
+ // Otherwise, we don't expect an image being produced, although it might.
+ // TODO: Investigate why 16 bit formats fail on some Android flavors.
+}
+
#define TEST_MAPPED(frame, mode) \
do { \
QVERIFY(frame.bits(0)); \
QVERIFY(frame.isMapped()); \
QCOMPARE(frame.mappedBytes(0), 16384); \
QCOMPARE(frame.bytesPerLine(0), 256); \
- QCOMPARE(frame.mapMode(), mode); \
+ QCOMPARE(static_cast<QtVideo::MapMode>(frame.mapMode()), mode); \
} while (0)
#define TEST_UNMAPPED(frame) \
@@ -941,7 +1097,7 @@ do { \
QVERIFY(!frame.isMapped()); \
QCOMPARE(frame.mappedBytes(0), 0); \
QCOMPARE(frame.bytesPerLine(0), 0); \
- QCOMPARE(frame.mapMode(), QVideoFrame::NotMapped); \
+ QCOMPARE(static_cast<QtVideo::MapMode>(frame.mapMode()), QtVideo::MapMode::NotMapped); \
} while (0)
void tst_QVideoFrame::isMapped()
@@ -952,23 +1108,23 @@ void tst_QVideoFrame::isMapped()
TEST_UNMAPPED(frame);
TEST_UNMAPPED(constFrame);
- QVERIFY(frame.map(QVideoFrame::ReadOnly));
- TEST_MAPPED(frame, QVideoFrame::ReadOnly);
- TEST_MAPPED(constFrame, QVideoFrame::ReadOnly);
+ QVERIFY(frame.map(QtVideo::MapMode::ReadOnly));
+ TEST_MAPPED(frame, QtVideo::MapMode::ReadOnly);
+ TEST_MAPPED(constFrame, QtVideo::MapMode::ReadOnly);
frame.unmap();
TEST_UNMAPPED(frame);
TEST_UNMAPPED(constFrame);
- QVERIFY(frame.map(QVideoFrame::WriteOnly));
- TEST_MAPPED(frame, QVideoFrame::WriteOnly);
- TEST_MAPPED(constFrame, QVideoFrame::WriteOnly);
+ QVERIFY(frame.map(QtVideo::MapMode::WriteOnly));
+ TEST_MAPPED(frame, QtVideo::MapMode::WriteOnly);
+ TEST_MAPPED(constFrame, QtVideo::MapMode::WriteOnly);
frame.unmap();
TEST_UNMAPPED(frame);
TEST_UNMAPPED(constFrame);
- QVERIFY(frame.map(QVideoFrame::ReadWrite));
- TEST_MAPPED(frame, QVideoFrame::ReadWrite);
- TEST_MAPPED(constFrame, QVideoFrame::ReadWrite);
+ QVERIFY(frame.map(QtVideo::MapMode::ReadWrite));
+ TEST_MAPPED(frame, QtVideo::MapMode::ReadWrite);
+ TEST_MAPPED(constFrame, QtVideo::MapMode::ReadWrite);
frame.unmap();
TEST_UNMAPPED(frame);
TEST_UNMAPPED(constFrame);
@@ -981,17 +1137,17 @@ void tst_QVideoFrame::isReadable()
QVERIFY(!frame.isMapped());
QVERIFY(!frame.isReadable());
- QVERIFY(frame.map(QVideoFrame::ReadOnly));
+ QVERIFY(frame.map(QtVideo::MapMode::ReadOnly));
QVERIFY(frame.isMapped());
QVERIFY(frame.isReadable());
frame.unmap();
- QVERIFY(frame.map(QVideoFrame::WriteOnly));
+ QVERIFY(frame.map(QtVideo::MapMode::WriteOnly));
QVERIFY(frame.isMapped());
QVERIFY(!frame.isReadable());
frame.unmap();
- QVERIFY(frame.map(QVideoFrame::ReadWrite));
+ QVERIFY(frame.map(QtVideo::MapMode::ReadWrite));
QVERIFY(frame.isMapped());
QVERIFY(frame.isReadable());
frame.unmap();
@@ -1004,17 +1160,17 @@ void tst_QVideoFrame::isWritable()
QVERIFY(!frame.isMapped());
QVERIFY(!frame.isWritable());
- QVERIFY(frame.map(QVideoFrame::ReadOnly));
+ QVERIFY(frame.map(QtVideo::MapMode::ReadOnly));
QVERIFY(frame.isMapped());
QVERIFY(!frame.isWritable());
frame.unmap();
- QVERIFY(frame.map(QVideoFrame::WriteOnly));
+ QVERIFY(frame.map(QtVideo::MapMode::WriteOnly));
QVERIFY(frame.isMapped());
QVERIFY(frame.isWritable());
frame.unmap();
- QVERIFY(frame.map(QVideoFrame::ReadWrite));
+ QVERIFY(frame.map(QtVideo::MapMode::ReadWrite));
QVERIFY(frame.isMapped());
QVERIFY(frame.isWritable());
frame.unmap();
@@ -1093,9 +1249,52 @@ void tst_QVideoFrame::image()
void tst_QVideoFrame::emptyData()
{
QByteArray data(nullptr, 0);
- QVideoFrame f(new QMemoryVideoBuffer(data, 600),
- QVideoFrameFormat(QSize(800, 600), QVideoFrameFormat::Format_ARGB8888));
- QVERIFY(!f.map(QVideoFrame::ReadOnly));
+ QVideoFrame f = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(data, 600),
+ QVideoFrameFormat(QSize(800, 600), QVideoFrameFormat::Format_ARGB8888));
+ QVERIFY(!f.map(QtVideo::MapMode::ReadOnly));
+}
+
+void tst_QVideoFrame::mirrored_takesValue_fromVideoFrameFormat()
+{
+ QVideoFrameFormat format(QSize(10, 20), QVideoFrameFormat::Format_ARGB8888);
+ format.setMirrored(true);
+
+ QVideoFrame frame(format);
+ QVERIFY(frame.mirrored());
+
+ frame.setMirrored(false);
+ frame.setRotation(QtVideo::Rotation::Clockwise180);
+ QVERIFY(!frame.mirrored());
+ QVERIFY(!frame.surfaceFormat().isMirrored());
+}
+
+void tst_QVideoFrame::rotation_takesValue_fromVideoFrameFormat()
+{
+ QVideoFrameFormat format(QSize(10, 20), QVideoFrameFormat::Format_ARGB8888);
+ format.setRotation(QtVideo::Rotation::Clockwise270);
+
+ QVideoFrame frame(format);
+ QCOMPARE(frame.rotation(), QtVideo::Rotation::Clockwise270);
+
+ frame.setRotation(QtVideo::Rotation::Clockwise180);
+
+ QCOMPARE(frame.rotation(), QtVideo::Rotation::Clockwise180);
+ QCOMPARE(frame.surfaceFormat().rotation(), QtVideo::Rotation::Clockwise180);
+}
+
+void tst_QVideoFrame::streamFrameRate_takesValue_fromVideoFrameFormat()
+{
+ QVideoFrameFormat format(QSize(10, 20), QVideoFrameFormat::Format_ARGB8888);
+ format.setStreamFrameRate(20.);
+
+ QVideoFrame frame(format);
+ QCOMPARE(frame.streamFrameRate(), 20.);
+
+ frame.setStreamFrameRate(25.);
+
+ QCOMPARE(frame.streamFrameRate(), 25.);
+ QCOMPARE(frame.surfaceFormat().streamFrameRate(), 25.);
}
void tst_QVideoFrame::constructor_createsInvalidFrame_whenCalledWithNullImage()
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/CMakeLists.txt b/tests/auto/unit/multimedia/qvideoframecolormanagement/CMakeLists.txt
index ed1aa7062..d2e3086d2 100644
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/CMakeLists.txt
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/CMakeLists.txt
@@ -7,19 +7,12 @@ file(GLOB_RECURSE test_data_glob
testdata/*)
list(APPEND testdata_resource_files ${test_data_glob})
-
qt_internal_add_test(tst_qvideoframecolormanagement
SOURCES
tst_qvideoframecolormanagement.cpp
LIBRARIES
Qt::Gui
Qt::MultimediaPrivate
+ BUILTIN_TESTDATA
TESTDATA ${testdata_resource_files}
)
-
-qt_internal_add_resource(tst_qvideoframecolormanagement "testdata"
- PREFIX
- "/"
- FILES
- ${testdata_resource_files}
-)
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg
index 5ed72dbd8..52b0f620b 100644
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_AdobeRgb_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_AdobeRgb_Full.png
deleted file mode 100644
index 5ee0ffd50..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_AdobeRgb_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_AdobeRgb_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_AdobeRgb_Video.png
deleted file mode 100644
index 5ee0ffd50..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_AdobeRgb_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT2020_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT2020_Full.png
deleted file mode 100644
index 1ffaa49a7..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT2020_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT2020_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT2020_Video.png
deleted file mode 100644
index bbb449843..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT2020_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT601_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT601_Full.png
deleted file mode 100644
index 8d23d5f97..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT601_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT601_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT601_Video.png
deleted file mode 100644
index b93a5bbb5..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT601_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT709_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT709_Full.png
deleted file mode 100644
index fd03d6c1b..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT709_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT709_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT709_Video.png
deleted file mode 100644
index ba25be895..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_420p_BT709_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_AdobeRgb_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_AdobeRgb_Full.png
deleted file mode 100644
index 1f7981cfb..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_AdobeRgb_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_AdobeRgb_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_AdobeRgb_Video.png
deleted file mode 100644
index 1f7981cfb..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_AdobeRgb_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT2020_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT2020_Full.png
deleted file mode 100644
index 15cc4c820..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT2020_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT2020_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT2020_Video.png
deleted file mode 100644
index 2adc8f256..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT2020_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT601_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT601_Full.png
deleted file mode 100644
index 7e4dda498..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT601_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT601_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT601_Video.png
deleted file mode 100644
index 041390cf2..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT601_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT709_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT709_Full.png
deleted file mode 100644
index f8ebfd1c1..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT709_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT709_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT709_Video.png
deleted file mode 100644
index c4f0d8481..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_422p_BT709_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_adobergb_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_adobergb_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_adobergb_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_adobergb_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt2020_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt2020_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt2020_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt2020_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt601_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt601_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt601_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt601_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt709_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt709_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt709_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt709_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_abgr8888_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_adobergb_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_adobergb_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_adobergb_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_adobergb_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt2020_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt2020_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt2020_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt2020_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt601_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt601_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt601_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt601_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt709_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt709_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt709_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt709_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_adobergb_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_adobergb_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_adobergb_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_adobergb_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt2020_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt2020_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt2020_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt2020_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt601_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt601_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt601_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt601_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt709_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt709_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt709_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt709_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_argb8888_premultiplied_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_adobergb_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_adobergb_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_adobergb_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_adobergb_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt2020_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt2020_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt2020_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt2020_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt601_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt601_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt601_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt601_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt709_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt709_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt709_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt709_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_adobergb_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_adobergb_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_adobergb_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_adobergb_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt2020_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt2020_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt2020_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt2020_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt601_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt601_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt601_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt601_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt709_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt709_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt709_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt709_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgra8888_premultiplied_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_adobergb_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_adobergb_full_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_adobergb_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_adobergb_video_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt2020_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt2020_full_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt2020_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt2020_video_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt601_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt601_full_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt601_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt601_video_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt709_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt709_full_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt709_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt709_video_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_bgrx8888_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_AdobeRgb_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_AdobeRgb_Full.png
deleted file mode 100644
index 5ee0ffd50..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_AdobeRgb_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_AdobeRgb_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_AdobeRgb_Video.png
deleted file mode 100644
index 5ee0ffd50..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_AdobeRgb_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT2020_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT2020_Full.png
deleted file mode 100644
index 1ffaa49a7..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT2020_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT2020_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT2020_Video.png
deleted file mode 100644
index bbb449843..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT2020_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT601_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT601_Full.png
deleted file mode 100644
index 8d23d5f97..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT601_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT601_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT601_Video.png
deleted file mode 100644
index b93a5bbb5..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT601_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT709_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT709_Full.png
deleted file mode 100644
index fd03d6c1b..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT709_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT709_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT709_Video.png
deleted file mode 100644
index ba25be895..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_BT709_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_adobergb_full.png
new file mode 100644
index 000000000..2af7cdaa4
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_adobergb_full_cpu.png
new file mode 100644
index 000000000..12685832f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_adobergb_video.png
new file mode 100644
index 000000000..2af7cdaa4
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_adobergb_video_cpu.png
new file mode 100644
index 000000000..12685832f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt2020_full.png
new file mode 100644
index 000000000..d6d461f5d
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt2020_full_cpu.png
new file mode 100644
index 000000000..2138f7b91
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt2020_video.png
new file mode 100644
index 000000000..2a4f7d8a7
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt2020_video_cpu.png
new file mode 100644
index 000000000..0de0ffbc5
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt601_full.png
new file mode 100644
index 000000000..d291f62bb
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt601_full_cpu.png
new file mode 100644
index 000000000..33229f55f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt601_video.png
new file mode 100644
index 000000000..35296fc03
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt601_video_cpu.png
new file mode 100644
index 000000000..7b198e19b
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt709_full.png
new file mode 100644
index 000000000..64e5eb6dc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt709_full_cpu.png
new file mode 100644
index 000000000..33229f55f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt709_video.png
new file mode 100644
index 000000000..9f6bdd1ea
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt709_video_cpu.png
new file mode 100644
index 000000000..74b3efccd
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc1_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_AdobeRgb_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_AdobeRgb_Full.png
deleted file mode 100644
index ae268bb2c..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_AdobeRgb_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_AdobeRgb_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_AdobeRgb_Video.png
deleted file mode 100644
index ae268bb2c..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_AdobeRgb_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT2020_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT2020_Full.png
deleted file mode 100644
index 936c09291..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT2020_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT2020_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT2020_Video.png
deleted file mode 100644
index b82d839ff..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT2020_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT601_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT601_Full.png
deleted file mode 100644
index b31fe973f..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT601_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT601_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT601_Video.png
deleted file mode 100644
index acee1379f..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT601_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT709_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT709_Full.png
deleted file mode 100644
index 29907ab63..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT709_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT709_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT709_Video.png
deleted file mode 100644
index 484bebb18..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_BT709_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_adobergb_full.png
new file mode 100644
index 000000000..90b2b3601
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_adobergb_full_cpu.png
new file mode 100644
index 000000000..12685832f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_adobergb_video.png
new file mode 100644
index 000000000..90b2b3601
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_adobergb_video_cpu.png
new file mode 100644
index 000000000..12685832f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt2020_full.png
new file mode 100644
index 000000000..2e78cfc31
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt2020_full_cpu.png
new file mode 100644
index 000000000..2138f7b91
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt2020_video.png
new file mode 100644
index 000000000..d673b7ce5
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt2020_video_cpu.png
new file mode 100644
index 000000000..0de0ffbc5
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt601_full.png
new file mode 100644
index 000000000..8be30a706
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt601_full_cpu.png
new file mode 100644
index 000000000..33229f55f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt601_video.png
new file mode 100644
index 000000000..1f64ea0f1
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt601_video_cpu.png
new file mode 100644
index 000000000..7b198e19b
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt709_full.png
new file mode 100644
index 000000000..24fb9065e
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt709_full_cpu.png
new file mode 100644
index 000000000..33229f55f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt709_video.png
new file mode 100644
index 000000000..f737d8602
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt709_video_cpu.png
new file mode 100644
index 000000000..74b3efccd
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc2_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_AdobeRgb_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_AdobeRgb_Full.png
deleted file mode 100644
index 5ee0ffd50..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_AdobeRgb_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_AdobeRgb_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_AdobeRgb_Video.png
deleted file mode 100644
index 5ee0ffd50..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_AdobeRgb_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT2020_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT2020_Full.png
deleted file mode 100644
index 1ffaa49a7..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT2020_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT2020_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT2020_Video.png
deleted file mode 100644
index bbb449843..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT2020_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT601_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT601_Full.png
deleted file mode 100644
index 8d23d5f97..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT601_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT601_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT601_Video.png
deleted file mode 100644
index b93a5bbb5..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT601_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT709_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT709_Full.png
deleted file mode 100644
index fd03d6c1b..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT709_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT709_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT709_Video.png
deleted file mode 100644
index ba25be895..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_BT709_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_adobergb_full.png
new file mode 100644
index 000000000..2af7cdaa4
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_adobergb_full_cpu.png
new file mode 100644
index 000000000..12685832f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_adobergb_video.png
new file mode 100644
index 000000000..2af7cdaa4
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_adobergb_video_cpu.png
new file mode 100644
index 000000000..12685832f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt2020_full.png
new file mode 100644
index 000000000..d6d461f5d
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt2020_full_cpu.png
new file mode 100644
index 000000000..2138f7b91
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt2020_video.png
new file mode 100644
index 000000000..2a4f7d8a7
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt2020_video_cpu.png
new file mode 100644
index 000000000..0de0ffbc5
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt601_full.png
new file mode 100644
index 000000000..d291f62bb
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt601_full_cpu.png
new file mode 100644
index 000000000..33229f55f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt601_video.png
new file mode 100644
index 000000000..35296fc03
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt601_video_cpu.png
new file mode 100644
index 000000000..7b198e19b
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt709_full.png
new file mode 100644
index 000000000..64e5eb6dc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt709_full_cpu.png
new file mode 100644
index 000000000..33229f55f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt709_video.png
new file mode 100644
index 000000000..9f6bdd1ea
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt709_video_cpu.png
new file mode 100644
index 000000000..74b3efccd
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc3_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_AdobeRgb_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_AdobeRgb_Full.png
deleted file mode 100644
index 5a9b205d7..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_AdobeRgb_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_AdobeRgb_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_AdobeRgb_Video.png
deleted file mode 100644
index 5a9b205d7..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_AdobeRgb_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT2020_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT2020_Full.png
deleted file mode 100644
index 7b4bf0e80..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT2020_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT2020_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT2020_Video.png
deleted file mode 100644
index c4337e393..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT2020_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT601_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT601_Full.png
deleted file mode 100644
index ffb3cb44d..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT601_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT601_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT601_Video.png
deleted file mode 100644
index b0bb04ff6..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT601_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT709_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT709_Full.png
deleted file mode 100644
index 8d0863068..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT709_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT709_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT709_Video.png
deleted file mode 100644
index 13768c1f8..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_BT709_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_adobergb_full.png
new file mode 100644
index 000000000..6efa73ea2
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_adobergb_full_cpu.png
new file mode 100644
index 000000000..12685832f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_adobergb_video.png
new file mode 100644
index 000000000..6efa73ea2
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_adobergb_video_cpu.png
new file mode 100644
index 000000000..12685832f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt2020_full.png
new file mode 100644
index 000000000..8d6a36a1c
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt2020_full_cpu.png
new file mode 100644
index 000000000..2138f7b91
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt2020_video.png
new file mode 100644
index 000000000..dab23bf0d
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt2020_video_cpu.png
new file mode 100644
index 000000000..0de0ffbc5
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt601_full.png
new file mode 100644
index 000000000..36e787cef
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt601_full_cpu.png
new file mode 100644
index 000000000..33229f55f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt601_video.png
new file mode 100644
index 000000000..01e6ab967
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt601_video_cpu.png
new file mode 100644
index 000000000..7b198e19b
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt709_full.png
new file mode 100644
index 000000000..22beff2e8
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt709_full_cpu.png
new file mode 100644
index 000000000..33229f55f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt709_video.png
new file mode 100644
index 000000000..c2af074b8
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt709_video_cpu.png
new file mode 100644
index 000000000..74b3efccd
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_imc4_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_AdobeRgb_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_AdobeRgb_Full.png
deleted file mode 100644
index 5ee0ffd50..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_AdobeRgb_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_AdobeRgb_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_AdobeRgb_Video.png
deleted file mode 100644
index 5ee0ffd50..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_AdobeRgb_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT2020_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT2020_Full.png
deleted file mode 100644
index 1ffaa49a7..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT2020_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT2020_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT2020_Video.png
deleted file mode 100644
index bbb449843..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT2020_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT601_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT601_Full.png
deleted file mode 100644
index 8d23d5f97..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT601_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT601_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT601_Video.png
deleted file mode 100644
index b93a5bbb5..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT601_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT709_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT709_Full.png
deleted file mode 100644
index fd03d6c1b..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT709_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT709_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT709_Video.png
deleted file mode 100644
index ba25be895..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_BT709_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_adobergb_full.png
new file mode 100644
index 000000000..2af7cdaa4
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_adobergb_full_cpu.png
new file mode 100644
index 000000000..12685832f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_adobergb_video.png
new file mode 100644
index 000000000..2af7cdaa4
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_adobergb_video_cpu.png
new file mode 100644
index 000000000..12685832f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt2020_full.png
new file mode 100644
index 000000000..d6d461f5d
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt2020_full_cpu.png
new file mode 100644
index 000000000..2138f7b91
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt2020_video.png
new file mode 100644
index 000000000..2a4f7d8a7
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt2020_video_cpu.png
new file mode 100644
index 000000000..0de0ffbc5
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt601_full.png
new file mode 100644
index 000000000..d291f62bb
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt601_full_cpu.png
new file mode 100644
index 000000000..33229f55f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt601_video.png
new file mode 100644
index 000000000..35296fc03
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt601_video_cpu.png
new file mode 100644
index 000000000..7b198e19b
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt709_full.png
new file mode 100644
index 000000000..64e5eb6dc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt709_full_cpu.png
new file mode 100644
index 000000000..33229f55f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt709_video.png
new file mode 100644
index 000000000..9f6bdd1ea
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt709_video_cpu.png
new file mode 100644
index 000000000..74b3efccd
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv12_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_AdobeRgb_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_AdobeRgb_Full.png
deleted file mode 100644
index 5ee0ffd50..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_AdobeRgb_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_AdobeRgb_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_AdobeRgb_Video.png
deleted file mode 100644
index 5ee0ffd50..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_AdobeRgb_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT2020_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT2020_Full.png
deleted file mode 100644
index 1ffaa49a7..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT2020_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT2020_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT2020_Video.png
deleted file mode 100644
index bbb449843..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT2020_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT601_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT601_Full.png
deleted file mode 100644
index 8d23d5f97..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT601_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT601_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT601_Video.png
deleted file mode 100644
index b93a5bbb5..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT601_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT709_Full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT709_Full.png
deleted file mode 100644
index fd03d6c1b..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT709_Full.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT709_Video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT709_Video.png
deleted file mode 100644
index ba25be895..000000000
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_BT709_Video.png
+++ /dev/null
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_adobergb_full.png
new file mode 100644
index 000000000..2af7cdaa4
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_adobergb_full_cpu.png
new file mode 100644
index 000000000..12685832f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_adobergb_video.png
new file mode 100644
index 000000000..2af7cdaa4
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_adobergb_video_cpu.png
new file mode 100644
index 000000000..12685832f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt2020_full.png
new file mode 100644
index 000000000..d6d461f5d
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt2020_full_cpu.png
new file mode 100644
index 000000000..2138f7b91
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt2020_video.png
new file mode 100644
index 000000000..2a4f7d8a7
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt2020_video_cpu.png
new file mode 100644
index 000000000..0de0ffbc5
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt601_full.png
new file mode 100644
index 000000000..d291f62bb
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt601_full_cpu.png
new file mode 100644
index 000000000..33229f55f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt601_video.png
new file mode 100644
index 000000000..35296fc03
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt601_video_cpu.png
new file mode 100644
index 000000000..7b198e19b
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt709_full.png
new file mode 100644
index 000000000..64e5eb6dc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt709_full_cpu.png
new file mode 100644
index 000000000..33229f55f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt709_video.png
new file mode 100644
index 000000000..9f6bdd1ea
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt709_video_cpu.png
new file mode 100644
index 000000000..74b3efccd
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_nv21_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_adobergb_full.png
new file mode 100644
index 000000000..71e107b8a
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_adobergb_full_cpu.png
new file mode 100644
index 000000000..1242dd25b
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_adobergb_video.png
new file mode 100644
index 000000000..71e107b8a
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_adobergb_video_cpu.png
new file mode 100644
index 000000000..1242dd25b
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt2020_full.png
new file mode 100644
index 000000000..58a7ebc92
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt2020_full_cpu.png
new file mode 100644
index 000000000..4286840f8
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt2020_video.png
new file mode 100644
index 000000000..d8756caac
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt2020_video_cpu.png
new file mode 100644
index 000000000..fb6d356f8
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt601_full.png
new file mode 100644
index 000000000..905568bf9
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt601_full_cpu.png
new file mode 100644
index 000000000..d819e478c
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt601_video.png
new file mode 100644
index 000000000..f374df207
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt601_video_cpu.png
new file mode 100644
index 000000000..2fbc2225c
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt709_full.png
new file mode 100644
index 000000000..d2ee0f8e2
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt709_full_cpu.png
new file mode 100644
index 000000000..d819e478c
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt709_video.png
new file mode 100644
index 000000000..740de7f79
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt709_video_cpu.png
new file mode 100644
index 000000000..d19223883
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p010_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_adobergb_full.png
new file mode 100644
index 000000000..ad76d393a
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_adobergb_full_cpu.png
new file mode 100644
index 000000000..68509c232
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_adobergb_video.png
new file mode 100644
index 000000000..ad76d393a
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_adobergb_video_cpu.png
new file mode 100644
index 000000000..68509c232
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt2020_full.png
new file mode 100644
index 000000000..a6e47132c
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt2020_full_cpu.png
new file mode 100644
index 000000000..2cb927d35
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt2020_video.png
new file mode 100644
index 000000000..d9760b9c9
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt2020_video_cpu.png
new file mode 100644
index 000000000..3f65f27db
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt601_full.png
new file mode 100644
index 000000000..04ae5e1cd
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt601_full_cpu.png
new file mode 100644
index 000000000..299548d61
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt601_video.png
new file mode 100644
index 000000000..9faa15fad
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt601_video_cpu.png
new file mode 100644
index 000000000..d544f8767
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt709_full.png
new file mode 100644
index 000000000..84b04ff9e
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt709_full_cpu.png
new file mode 100644
index 000000000..299548d61
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt709_video.png
new file mode 100644
index 000000000..505752c10
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt709_video_cpu.png
new file mode 100644
index 000000000..7da761925
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_p016_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_adobergb_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_adobergb_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_adobergb_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_adobergb_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt2020_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt2020_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt2020_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt2020_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt601_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt601_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt601_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt601_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt709_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt709_full_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt709_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt709_video_cpu.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgba8888_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_adobergb_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_adobergb_full_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_adobergb_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_adobergb_video_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt2020_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt2020_full_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt2020_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt2020_video_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt601_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt601_full_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt601_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt601_video_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt709_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt709_full_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt709_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt709_video_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_rgbx8888_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_adobergb_full.png
new file mode 100644
index 000000000..c5243c441
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_adobergb_full_cpu.png
new file mode 100644
index 000000000..6a0cb7dd8
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_adobergb_video.png
new file mode 100644
index 000000000..c5243c441
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_adobergb_video_cpu.png
new file mode 100644
index 000000000..6a0cb7dd8
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt2020_full.png
new file mode 100644
index 000000000..0a9874943
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt2020_full_cpu.png
new file mode 100644
index 000000000..126744377
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt2020_video.png
new file mode 100644
index 000000000..7318c1e99
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt2020_video_cpu.png
new file mode 100644
index 000000000..908b28c2f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt601_full.png
new file mode 100644
index 000000000..68789bef5
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt601_full_cpu.png
new file mode 100644
index 000000000..7ef68b58b
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt601_video.png
new file mode 100644
index 000000000..bfd6396ec
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt601_video_cpu.png
new file mode 100644
index 000000000..03e337184
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt709_full.png
new file mode 100644
index 000000000..704c59cf9
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt709_full_cpu.png
new file mode 100644
index 000000000..7ef68b58b
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt709_video.png
new file mode 100644
index 000000000..d9ad9c239
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt709_video_cpu.png
new file mode 100644
index 000000000..0030ce5bc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_uyvy_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_adobergb_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_adobergb_full_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_adobergb_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_adobergb_video_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt2020_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt2020_full_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt2020_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt2020_video_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt601_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt601_full_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt601_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt601_video_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt709_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt709_full_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt709_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt709_video_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xbgr8888_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_adobergb_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_adobergb_full_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_adobergb_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_adobergb_video_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt2020_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt2020_full_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt2020_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt2020_video_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt601_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt601_full_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt601_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt601_video_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt709_full.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt709_full_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt709_video.png
new file mode 100644
index 000000000..682e999cc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt709_video_cpu.png
new file mode 100644
index 000000000..7d1d73109
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_xrgb8888_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_adobergb_full.png
new file mode 100644
index 000000000..b1dc781f2
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_adobergb_full_cpu.png
new file mode 100644
index 000000000..584ad4c25
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_adobergb_video.png
new file mode 100644
index 000000000..b1dc781f2
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_adobergb_video_cpu.png
new file mode 100644
index 000000000..584ad4c25
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt2020_full.png
new file mode 100644
index 000000000..619ee36a4
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt2020_full_cpu.png
new file mode 100644
index 000000000..16445be0c
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt2020_video.png
new file mode 100644
index 000000000..881f6be33
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt2020_video_cpu.png
new file mode 100644
index 000000000..9c7e87238
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt601_full.png
new file mode 100644
index 000000000..b1d3111df
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt601_full_cpu.png
new file mode 100644
index 000000000..1a3025e2d
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt601_video.png
new file mode 100644
index 000000000..e4d1ce940
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt601_video_cpu.png
new file mode 100644
index 000000000..614b71e3e
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt709_full.png
new file mode 100644
index 000000000..b1d3111df
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt709_full_cpu.png
new file mode 100644
index 000000000..1a3025e2d
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt709_video.png
new file mode 100644
index 000000000..df8df3edd
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt709_video_cpu.png
new file mode 100644
index 000000000..d6bed0482
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y16_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_adobergb_full.png
new file mode 100644
index 000000000..130a3b541
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_adobergb_full_cpu.png
new file mode 100644
index 000000000..61d2c6ca0
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_adobergb_video.png
new file mode 100644
index 000000000..130a3b541
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_adobergb_video_cpu.png
new file mode 100644
index 000000000..61d2c6ca0
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt2020_full.png
new file mode 100644
index 000000000..21ed2218a
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt2020_full_cpu.png
new file mode 100644
index 000000000..188efe1d9
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt2020_video.png
new file mode 100644
index 000000000..f60f53d02
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt2020_video_cpu.png
new file mode 100644
index 000000000..512c467b6
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt601_full.png
new file mode 100644
index 000000000..df59b71e7
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt601_full_cpu.png
new file mode 100644
index 000000000..bfc57d849
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt601_video.png
new file mode 100644
index 000000000..dbca71c70
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt601_video_cpu.png
new file mode 100644
index 000000000..52f4b0223
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt709_full.png
new file mode 100644
index 000000000..df59b71e7
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt709_full_cpu.png
new file mode 100644
index 000000000..bfc57d849
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt709_video.png
new file mode 100644
index 000000000..3479bb890
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt709_video_cpu.png
new file mode 100644
index 000000000..b3a488e2f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_y8_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_adobergb_full.png
new file mode 100644
index 000000000..20b24da65
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_adobergb_video.png
new file mode 100644
index 000000000..20b24da65
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt2020_full.png
new file mode 100644
index 000000000..b96379a0b
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt2020_video.png
new file mode 100644
index 000000000..c77645b59
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt601_full.png
new file mode 100644
index 000000000..a1b8b62da
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt601_video.png
new file mode 100644
index 000000000..7a69f6afa
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt709_full.png
new file mode 100644
index 000000000..644b083fe
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt709_video.png
new file mode 100644
index 000000000..d4e9debd7
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p10_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_adobergb_full.png
new file mode 100644
index 000000000..2af7cdaa4
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_adobergb_full_cpu.png
new file mode 100644
index 000000000..12685832f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_adobergb_video.png
new file mode 100644
index 000000000..2af7cdaa4
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_adobergb_video_cpu.png
new file mode 100644
index 000000000..12685832f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt2020_full.png
new file mode 100644
index 000000000..d6d461f5d
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt2020_full_cpu.png
new file mode 100644
index 000000000..2138f7b91
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt2020_video.png
new file mode 100644
index 000000000..2a4f7d8a7
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt2020_video_cpu.png
new file mode 100644
index 000000000..0de0ffbc5
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt601_full.png
new file mode 100644
index 000000000..d291f62bb
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt601_full_cpu.png
new file mode 100644
index 000000000..33229f55f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt601_video.png
new file mode 100644
index 000000000..35296fc03
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt601_video_cpu.png
new file mode 100644
index 000000000..7b198e19b
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt709_full.png
new file mode 100644
index 000000000..64e5eb6dc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt709_full_cpu.png
new file mode 100644
index 000000000..33229f55f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt709_video.png
new file mode 100644
index 000000000..9f6bdd1ea
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt709_video_cpu.png
new file mode 100644
index 000000000..74b3efccd
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv420p_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_adobergb_full.png
new file mode 100644
index 000000000..3e255af2f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_adobergb_full_cpu.png
new file mode 100644
index 000000000..6a0cb7dd8
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_adobergb_video.png
new file mode 100644
index 000000000..3e255af2f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_adobergb_video_cpu.png
new file mode 100644
index 000000000..6a0cb7dd8
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt2020_full.png
new file mode 100644
index 000000000..74fd12726
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt2020_full_cpu.png
new file mode 100644
index 000000000..126744377
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt2020_video.png
new file mode 100644
index 000000000..e358d16d8
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt2020_video_cpu.png
new file mode 100644
index 000000000..908b28c2f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt601_full.png
new file mode 100644
index 000000000..cb1cbbd34
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt601_full_cpu.png
new file mode 100644
index 000000000..7ef68b58b
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt601_video.png
new file mode 100644
index 000000000..6dd95a078
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt601_video_cpu.png
new file mode 100644
index 000000000..03e337184
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt709_full.png
new file mode 100644
index 000000000..3e92f3695
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt709_full_cpu.png
new file mode 100644
index 000000000..7ef68b58b
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt709_video.png
new file mode 100644
index 000000000..e94891e1c
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt709_video_cpu.png
new file mode 100644
index 000000000..0030ce5bc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuv422p_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_adobergb_full.png
new file mode 100644
index 000000000..c5243c441
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_adobergb_full_cpu.png
new file mode 100644
index 000000000..6a0cb7dd8
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_adobergb_video.png
new file mode 100644
index 000000000..c5243c441
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_adobergb_video_cpu.png
new file mode 100644
index 000000000..6a0cb7dd8
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt2020_full.png
new file mode 100644
index 000000000..0a9874943
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt2020_full_cpu.png
new file mode 100644
index 000000000..126744377
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt2020_video.png
new file mode 100644
index 000000000..7318c1e99
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt2020_video_cpu.png
new file mode 100644
index 000000000..908b28c2f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt601_full.png
new file mode 100644
index 000000000..68789bef5
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt601_full_cpu.png
new file mode 100644
index 000000000..7ef68b58b
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt601_video.png
new file mode 100644
index 000000000..bfd6396ec
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt601_video_cpu.png
new file mode 100644
index 000000000..03e337184
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt709_full.png
new file mode 100644
index 000000000..704c59cf9
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt709_full_cpu.png
new file mode 100644
index 000000000..7ef68b58b
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt709_video.png
new file mode 100644
index 000000000..d9ad9c239
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt709_video_cpu.png
new file mode 100644
index 000000000..0030ce5bc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yuyv_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_adobergb_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_adobergb_full.png
new file mode 100644
index 000000000..2af7cdaa4
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_adobergb_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_adobergb_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_adobergb_full_cpu.png
new file mode 100644
index 000000000..12685832f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_adobergb_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_adobergb_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_adobergb_video.png
new file mode 100644
index 000000000..2af7cdaa4
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_adobergb_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_adobergb_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_adobergb_video_cpu.png
new file mode 100644
index 000000000..12685832f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_adobergb_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt2020_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt2020_full.png
new file mode 100644
index 000000000..d6d461f5d
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt2020_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt2020_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt2020_full_cpu.png
new file mode 100644
index 000000000..2138f7b91
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt2020_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt2020_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt2020_video.png
new file mode 100644
index 000000000..2a4f7d8a7
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt2020_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt2020_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt2020_video_cpu.png
new file mode 100644
index 000000000..0de0ffbc5
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt2020_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt601_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt601_full.png
new file mode 100644
index 000000000..d291f62bb
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt601_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt601_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt601_full_cpu.png
new file mode 100644
index 000000000..33229f55f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt601_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt601_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt601_video.png
new file mode 100644
index 000000000..35296fc03
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt601_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt601_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt601_video_cpu.png
new file mode 100644
index 000000000..7b198e19b
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt601_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt709_full.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt709_full.png
new file mode 100644
index 000000000..64e5eb6dc
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt709_full.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt709_full_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt709_full_cpu.png
new file mode 100644
index 000000000..33229f55f
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt709_full_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt709_video.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt709_video.png
new file mode 100644
index 000000000..9f6bdd1ea
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt709_video.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt709_video_cpu.png b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt709_video_cpu.png
new file mode 100644
index 000000000..74b3efccd
--- /dev/null
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/testdata/umbrellas.jpg_yv12_bt709_video_cpu.png
Binary files differ
diff --git a/tests/auto/unit/multimedia/qvideoframecolormanagement/tst_qvideoframecolormanagement.cpp b/tests/auto/unit/multimedia/qvideoframecolormanagement/tst_qvideoframecolormanagement.cpp
index 31337603e..ad221e54e 100644
--- a/tests/auto/unit/multimedia/qvideoframecolormanagement/tst_qvideoframecolormanagement.cpp
+++ b/tests/auto/unit/multimedia/qvideoframecolormanagement/tst_qvideoframecolormanagement.cpp
@@ -6,10 +6,16 @@
#include <qvideoframe.h>
#include <qvideoframeformat.h>
#include "private/qmemoryvideobuffer_p.h"
+#include "private/qvideoframeconverter_p.h"
+#include "private/qplatformmediaintegration_p.h"
+#include "private/qimagevideobuffer_p.h"
+#include "private/qvideoframe_p.h"
#include <QtGui/QColorSpace>
#include <QtGui/QImage>
#include <QtCore/QPointer>
+#include "../../../integration/shared/mediabackendutils.h"
+
QT_USE_NAMESPACE
namespace {
@@ -20,6 +26,7 @@ struct TestParams
QVideoFrameFormat::PixelFormat pixelFormat;
QVideoFrameFormat::ColorSpace colorSpace;
QVideoFrameFormat::ColorRange colorRange;
+ bool forceCpu;
};
QString toString(QVideoFrameFormat::ColorRange r)
@@ -43,39 +50,75 @@ std::vector<QVideoFrameFormat::ColorRange> colorRanges()
};
}
+const QSet s_formats{ QVideoFrameFormat::Format_ARGB8888,
+ QVideoFrameFormat::Format_ARGB8888_Premultiplied,
+ QVideoFrameFormat::Format_XRGB8888,
+ QVideoFrameFormat::Format_BGRA8888,
+ QVideoFrameFormat::Format_BGRA8888_Premultiplied,
+ QVideoFrameFormat::Format_BGRX8888,
+ QVideoFrameFormat::Format_ABGR8888,
+ QVideoFrameFormat::Format_XBGR8888,
+ QVideoFrameFormat::Format_RGBA8888,
+ QVideoFrameFormat::Format_RGBX8888,
+ QVideoFrameFormat::Format_NV12,
+ QVideoFrameFormat::Format_NV21,
+ QVideoFrameFormat::Format_IMC1,
+ QVideoFrameFormat::Format_IMC2,
+ QVideoFrameFormat::Format_IMC3,
+ QVideoFrameFormat::Format_IMC4,
+ QVideoFrameFormat::Format_AYUV,
+ QVideoFrameFormat::Format_AYUV_Premultiplied,
+ QVideoFrameFormat::Format_YV12,
+ QVideoFrameFormat::Format_YUV420P,
+ QVideoFrameFormat::Format_YUV422P,
+ QVideoFrameFormat::Format_UYVY,
+ QVideoFrameFormat::Format_YUYV,
+ QVideoFrameFormat::Format_Y8,
+ QVideoFrameFormat::Format_Y16,
+ QVideoFrameFormat::Format_P010,
+ QVideoFrameFormat::Format_P016,
+ QVideoFrameFormat::Format_YUV420P10 };
+
+bool hasCorrespondingFFmpegFormat(QVideoFrameFormat::PixelFormat format)
+{
+ return format != QVideoFrameFormat::Format_AYUV
+ && format != QVideoFrameFormat::Format_AYUV_Premultiplied;
+}
+
+bool supportsCpuConversion(QVideoFrameFormat::PixelFormat format)
+{
+ return format != QVideoFrameFormat::Format_YUV420P10;
+}
+
QString toString(QVideoFrameFormat::PixelFormat f)
{
- switch (f) {
- case QVideoFrameFormat::Format_NV12:
- return "nv12";
- case QVideoFrameFormat::Format_NV21:
- return "nv21";
- case QVideoFrameFormat::Format_IMC1:
- return "imc1";
- case QVideoFrameFormat::Format_IMC2:
- return "imc2";
- case QVideoFrameFormat::Format_IMC3:
- return "imc3";
- case QVideoFrameFormat::Format_IMC4:
- return "imc4";
- case QVideoFrameFormat::Format_YUV420P:
- return "420p";
- case QVideoFrameFormat::Format_YUV422P:
- return "422p";
- default:
- Q_ASSERT(false);
- return ""; // Not implemented yet
- }
+ return QVideoFrameFormat::pixelFormatToString(f);
}
-std::vector<QVideoFrameFormat::PixelFormat> pixelFormats()
+QSet<QVideoFrameFormat::PixelFormat> pixelFormats()
{
- return { QVideoFrameFormat::Format_NV12, QVideoFrameFormat::Format_NV21,
- QVideoFrameFormat::Format_IMC1, QVideoFrameFormat::Format_IMC2,
- QVideoFrameFormat::Format_IMC3, QVideoFrameFormat::Format_IMC4,
- QVideoFrameFormat::Format_YUV420P, QVideoFrameFormat::Format_YUV422P };
+ return s_formats;
}
+bool isSupportedPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat)
+{
+#ifdef Q_OS_ANDROID
+ // TODO: QTBUG-125238
+ switch (pixelFormat) {
+ case QVideoFrameFormat::Format_Y16:
+ case QVideoFrameFormat::Format_P010:
+ case QVideoFrameFormat::Format_P016:
+ case QVideoFrameFormat::Format_YUV420P10:
+ return false;
+ default:
+ return true;
+ }
+#else
+ return true;
+#endif
+}
+
+
QString toString(QVideoFrameFormat::ColorSpace s)
{
switch (s) {
@@ -101,11 +144,15 @@ std::vector<QVideoFrameFormat::ColorSpace> colorSpaces()
QString name(const TestParams &p)
{
- return QStringLiteral("%1_%2_%3_%4")
- .arg(p.fileName)
- .arg(toString(p.pixelFormat))
- .arg(toString(p.colorSpace))
- .arg(toString(p.colorRange));
+ QString name = QStringLiteral("%1_%2_%3_%4%5")
+ .arg(p.fileName)
+ .arg(toString(p.pixelFormat))
+ .arg(toString(p.colorSpace))
+ .arg(toString(p.colorRange))
+ .arg(p.forceCpu ? "_cpu" : "")
+ .toLower();
+ name.replace(" ", "_");
+ return name;
}
QString path(const QTemporaryDir &dir, const TestParams &param, const QString &suffix = ".png")
@@ -113,161 +160,6 @@ QString path(const QTemporaryDir &dir, const TestParams &param, const QString &s
return dir.filePath(name(param) + suffix);
}
-// clang-format off
-
-class RgbToYCbCrConverter
-{
-public:
- constexpr RgbToYCbCrConverter(double Wr, double Wg)
- : m_wr{ Wr }, m_wg{ Wg }, m_wb{ 1.0 - Wr - Wg }
- { }
-
- // Calculate Y in range [0..255]
- constexpr double Y(QRgb rgb) const
- {
- return m_wr * qRed(rgb) + m_wg * qGreen(rgb) + m_wb * qBlue(rgb);
- }
-
- // Calculate Cb in range [0..255]
- constexpr double Cb(QRgb rgb) const
- {
- return (qBlue(rgb) - Y(rgb)) / (2 * (1.0 - m_wb)) + 255.0 / 2;
- }
-
- // Calculate Cr in range [0..255]
- constexpr double Cr(QRgb rgb) const
- {
- return (qRed(rgb) - Y(rgb)) / (2 * (1.0 - m_wr)) + 255.0 / 2;
- }
-
-private:
- const double m_wr;
- const double m_wg;
- const double m_wb;
-};
-
-// clang-format on
-
-constexpr RgbToYCbCrConverter rgb2yuv_bt709_full{0.2126, 0.7152};
-
-constexpr uchar double2uchar(double v)
-{
- return static_cast<uchar>(std::clamp(v + 0.5, 0.5, 255.5));
-}
-
-constexpr void rgb2y(const QRgb &rgb, uchar *y)
-{
- const double Y = rgb2yuv_bt709_full.Y(rgb);
- y[0] = double2uchar(Y);
-}
-
-constexpr uchar rgb2u(const QRgb &rgb)
-{
- const double U = rgb2yuv_bt709_full.Cb(rgb);
- return double2uchar(U);
-}
-
-constexpr uchar rgb2v(const QRgb &rgb)
-{
- const double V = rgb2yuv_bt709_full.Cr(rgb);
- return double2uchar(V);
-}
-
-void rgb2y(const QImage &image, QVideoFrame &frame, int yPlane)
-{
- uchar *bits = frame.bits(yPlane);
- for (int row = 0; row < image.height(); ++row) {
- for (int col = 0; col < image.width(); ++col) {
- const QRgb pixel = image.pixel(col, row);
- rgb2y(pixel, bits + col);
- }
- bits += frame.bytesPerLine(yPlane);
- }
-}
-
-void rgb2uv(const QImage &image, QVideoFrame &frame)
-{
- uchar *vBits = nullptr;
- uchar *uBits = nullptr;
- int vStride = 0;
- int uStride = 0;
- int sampleIncrement = 1;
- int verticalScale = 2;
- if (frame.pixelFormat() == QVideoFrameFormat::Format_IMC1) {
- uStride = frame.bytesPerLine(2);
- vStride = frame.bytesPerLine(1);
- uBits = frame.bits(2);
- vBits = frame.bits(1);
- } else if (frame.pixelFormat() == QVideoFrameFormat::Format_IMC2) {
- uStride = frame.bytesPerLine(1);
- vStride = frame.bytesPerLine(1);
- uBits = frame.bits(1) + vStride / 2;
- vBits = frame.bits(1);
- } else if (frame.pixelFormat() == QVideoFrameFormat::Format_IMC3) {
- uStride = frame.bytesPerLine(1);
- vStride = frame.bytesPerLine(2);
- uBits = frame.bits(1);
- vBits = frame.bits(2);
- } else if (frame.pixelFormat() == QVideoFrameFormat::Format_IMC4) {
- uStride = frame.bytesPerLine(1);
- vStride = frame.bytesPerLine(1);
- uBits = frame.bits(1);
- vBits = frame.bits(1) + vStride / 2;
- } else if (frame.pixelFormat() == QVideoFrameFormat::Format_NV12) {
- uStride = frame.bytesPerLine(1);
- vStride = frame.bytesPerLine(1);
- uBits = frame.bits(1);
- vBits = frame.bits(1) + 1;
- sampleIncrement = 2;
- } else if (frame.pixelFormat() == QVideoFrameFormat::Format_NV21) {
- uStride = frame.bytesPerLine(1);
- vStride = frame.bytesPerLine(1);
- uBits = frame.bits(1) + 1;
- vBits = frame.bits(1);
- sampleIncrement = 2;
- } else if (frame.pixelFormat() == QVideoFrameFormat::Format_YUV420P) {
- uStride = frame.bytesPerLine(1);
- vStride = frame.bytesPerLine(2);
- uBits = frame.bits(1);
- vBits = frame.bits(2);
- } else if (frame.pixelFormat() == QVideoFrameFormat::Format_YUV422P) {
- uStride = frame.bytesPerLine(1);
- vStride = frame.bytesPerLine(2);
- uBits = frame.bits(1);
- vBits = frame.bits(2);
- verticalScale = 1;
- }
-
- const QImage downSampled = image.scaled(image.width() / 2, image.height() / verticalScale);
- const int width = downSampled.width();
- const int height = downSampled.height();
- {
- for (int row = 0; row < height; ++row) {
- for (int col = 0; col < width; ++col) {
- const QRgb pixel = downSampled.pixel(col, row);
- uBits[col * sampleIncrement] = rgb2u(pixel);
- vBits[col * sampleIncrement] = rgb2v(pixel);
- }
- vBits += vStride;
- uBits += uStride;
- }
- }
-}
-
-void naive_rgbToYuv(const QImage &image, QVideoFrame &frame)
-{
- Q_ASSERT(image.format() == QImage::Format_RGB32);
- Q_ASSERT(frame.planeCount() > 1);
- Q_ASSERT(image.size() == frame.size());
-
- frame.map(QVideoFrame::WriteOnly);
-
- rgb2y(image, frame, 0);
- rgb2uv(image, frame);
-
- frame.unmap();
-}
-
QVideoFrame createTestFrame(const TestParams &params, const QImage &image)
{
QVideoFrameFormat format(image.size(), params.pixelFormat);
@@ -275,24 +167,13 @@ QVideoFrame createTestFrame(const TestParams &params, const QImage &image)
format.setColorSpace(params.colorSpace);
format.setColorTransfer(QVideoFrameFormat::ColorTransfer_Unknown);
- QVideoFrame frame(format);
-
- if (params.pixelFormat == QVideoFrameFormat::Format_IMC1
- || params.pixelFormat == QVideoFrameFormat::Format_IMC2
- || params.pixelFormat == QVideoFrameFormat::Format_IMC3
- || params.pixelFormat == QVideoFrameFormat::Format_IMC4
- || params.pixelFormat == QVideoFrameFormat::Format_NV12
- || params.pixelFormat == QVideoFrameFormat::Format_NV21
- || params.pixelFormat == QVideoFrameFormat::Format_YUV420P
- || params.pixelFormat == QVideoFrameFormat::Format_YUV422P) {
- naive_rgbToYuv(image, frame);
- } else {
- qDebug() << "Not implemented yet";
- Q_ASSERT(false);
- return {};
- }
+ auto buffer = std::make_unique<QImageVideoBuffer>(image);
+ QVideoFrameFormat imageFormat = {
+ image.size(), QVideoFrameFormat::pixelFormatFromImageFormat(image.format())
+ };
- return frame;
+ QVideoFrame source = QVideoFramePrivate::createFrame(std::move(buffer), imageFormat);
+ return QPlatformMediaIntegration::instance()->convertVideoFrame(source, format);
}
struct ImageDiffReport
@@ -508,8 +389,13 @@ class tst_qvideoframecolormanagement : public QObject
{
Q_OBJECT
private slots:
+ void initTestCase()
+ {
+ if (!isFFMPEGPlatform())
+ QSKIP("This test requires the ffmpeg backend to create test frames");
+ }
- void toImage_savesWithCorrectColors_data()
+ void qImageFromVideoFrame_returnsQImageWithCorrectColors_data()
{
QTest::addColumn<QString>("fileName");
QTest::addColumn<TestParams>("params");
@@ -517,8 +403,22 @@ private slots:
for (const QVideoFrameFormat::PixelFormat pixelFormat : pixelFormats()) {
for (const QVideoFrameFormat::ColorSpace colorSpace : colorSpaces()) {
for (const QVideoFrameFormat::ColorRange colorRange : colorRanges()) {
- TestParams param{ file, pixelFormat, colorSpace, colorRange };
- QTest::addRow("%s", name(param).toLatin1().data()) << file << param;
+ for (const bool forceCpu : { false, true }) {
+
+ if (!isSupportedPixelFormat(pixelFormat))
+ continue;
+
+ if (forceCpu && !supportsCpuConversion(pixelFormat))
+ continue; // TODO: CPU Conversion not implemented
+
+ if (!hasCorrespondingFFmpegFormat(pixelFormat))
+ continue;
+
+ TestParams param{
+ file, pixelFormat, colorSpace, colorRange, forceCpu,
+ };
+ QTest::addRow("%s", name(param).toLatin1().data()) << file << param;
+ }
}
}
}
@@ -526,10 +426,11 @@ private slots:
}
// This test is a regression test for the QMultimedia display pipeline.
- // It compares rendered output (as created by toImage) against reference
- // images stored to file. The reference images were created by the test
- // itself, and does not verify correctness, just changes to render output.
- void toImage_savesWithCorrectColors()
+ // It compares rendered output (as created by qImageFromVideoFrame)
+ // against reference images stored to file. The reference images were
+ // created by the test itself, and does not verify correctness, just
+ // changes to render output.
+ void qImageFromVideoFrame_returnsQImageWithCorrectColors()
{
QFETCH(const QString, fileName);
QFETCH(const TestParams, params);
@@ -540,7 +441,8 @@ private slots:
const QVideoFrame frame = createTestFrame(params, templateImage);
// Act
- const QImage actual = frame.toImage();
+ const QImage actual =
+ qImageFromVideoFrame(frame, QtVideo::Rotation::None, false, false, params.forceCpu);
// Assert
constexpr int diffThreshold = 4;
@@ -554,10 +456,16 @@ private slots:
// Verify that images are similar
const double ratioAboveThreshold =
static_cast<double>(result->DiffCountAboveThreshold) / result->PixelCount;
- QCOMPARE_LT(ratioAboveThreshold, 0.01);
- QCOMPARE_LT(result->MaxDiff, 5);
+
+ // These thresholds are empirically determined to allow tests to pass in CI.
+ // If tests fail, review the difference between the reference and actual
+ // output to determine if it is a platform dependent inaccuracy before
+ // adjusting the limits
+ QCOMPARE_LT(ratioAboveThreshold, 0.01); // Fraction of pixels with larger differences
+ QCOMPARE_LT(result->MaxDiff, 6); // Maximum per-channel difference
}
+
private:
ReferenceData m_reference;
};
diff --git a/tests/auto/unit/multimedia/qvideoframeformat/tst_qvideoframeformat.cpp b/tests/auto/unit/multimedia/qvideoframeformat/tst_qvideoframeformat.cpp
index 2cc062195..41d54de0d 100644
--- a/tests/auto/unit/multimedia/qvideoframeformat/tst_qvideoframeformat.cpp
+++ b/tests/auto/unit/multimedia/qvideoframeformat/tst_qvideoframeformat.cpp
@@ -84,7 +84,7 @@ void tst_QVideoFrameFormat::constructNull()
QCOMPARE(format.frameHeight(), -1);
QCOMPARE(format.viewport(), QRect());
QCOMPARE(format.scanLineDirection(), QVideoFrameFormat::TopToBottom);
- QCOMPARE(format.frameRate(), 0.0);
+ QCOMPARE(format.streamFrameRate(), 0.0);
QCOMPARE(format.colorSpace(), QVideoFrameFormat::ColorSpace_Undefined);
}
@@ -132,7 +132,7 @@ void tst_QVideoFrameFormat::construct()
QCOMPARE(format.isValid(), valid);
QCOMPARE(format.viewport(), viewport);
QCOMPARE(format.scanLineDirection(), QVideoFrameFormat::TopToBottom);
- QCOMPARE(format.frameRate(), 0.0);
+ QCOMPARE(format.streamFrameRate(), 0.0);
QCOMPARE(format.colorSpace(), QVideoFrameFormat::ColorSpace_Undefined);
}
@@ -335,9 +335,9 @@ void tst_QVideoFrameFormat::frameRate()
QVideoFrameFormat format(QSize(64, 64), QVideoFrameFormat::Format_XRGB8888);
- format.setFrameRate(frameRate);
+ format.setStreamFrameRate(frameRate);
- QCOMPARE(format.frameRate(), frameRate);
+ QCOMPARE(format.streamFrameRate(), frameRate);
}
void tst_QVideoFrameFormat::compare()
@@ -395,13 +395,13 @@ void tst_QVideoFrameFormat::compare()
QCOMPARE(format1 == format2, true);
QCOMPARE(format1 != format2, false);
- format1.setFrameRate(7.5);
+ format1.setStreamFrameRate(7.5);
// Not equal frame rate differs.
QCOMPARE(format1 == format2, false);
QCOMPARE(format1 != format2, true);
- format2.setFrameRate(qreal(7.50001));
+ format2.setStreamFrameRate(qreal(7.50001));
// Equal.
QCOMPARE(format1 == format2, true);
@@ -506,7 +506,7 @@ void tst_QVideoFrameFormat::copyAllParameters()
original.setScanLineDirection(QVideoFrameFormat::BottomToTop);
original.setViewport(QRect(0, 0, 1024, 1024));
- original.setFrameRate(qreal(15.0));
+ original.setStreamFrameRate(qreal(15.0));
original.setColorSpace(QVideoFrameFormat::ColorSpace_BT709);
/* Copy the original instance to copy and verify if both the instances
@@ -517,7 +517,7 @@ void tst_QVideoFrameFormat::copyAllParameters()
QCOMPARE(copy.frameSize(), QSize(1024, 768));
QCOMPARE(copy.scanLineDirection(), QVideoFrameFormat::BottomToTop);
QCOMPARE(copy.viewport(), QRect(0, 0, 1024, 1024));
- QCOMPARE(copy.frameRate(), qreal(15.0));
+ QCOMPARE(copy.streamFrameRate(), qreal(15.0));
QCOMPARE(copy.colorSpace(), QVideoFrameFormat::ColorSpace_BT709);
/* Verify if both the instances are eqaul */
@@ -533,7 +533,7 @@ void tst_QVideoFrameFormat::assignAllParameters()
QSize(64, 64), QVideoFrameFormat::Format_AYUV);
copy.setScanLineDirection(QVideoFrameFormat::TopToBottom);
copy.setViewport(QRect(0, 0, 640, 320));
- copy.setFrameRate(qreal(7.5));
+ copy.setStreamFrameRate(qreal(7.5));
copy.setColorSpace(QVideoFrameFormat::ColorSpace_BT601);
/* Create the instance and set all the parameters. */
@@ -541,7 +541,7 @@ void tst_QVideoFrameFormat::assignAllParameters()
QSize(1024, 768), QVideoFrameFormat::Format_ARGB8888);
original.setScanLineDirection(QVideoFrameFormat::BottomToTop);
original.setViewport(QRect(0, 0, 1024, 1024));
- original.setFrameRate(qreal(15.0));
+ original.setStreamFrameRate(qreal(15.0));
original.setColorSpace(QVideoFrameFormat::ColorSpace_BT709);
/* Assign the original instance to copy and verify if both the instancess
@@ -552,7 +552,7 @@ void tst_QVideoFrameFormat::assignAllParameters()
QCOMPARE(copy.frameSize(), QSize(1024, 768));
QCOMPARE(copy.scanLineDirection(), QVideoFrameFormat::BottomToTop);
QCOMPARE(copy.viewport(), QRect(0, 0, 1024, 1024));
- QCOMPARE(copy.frameRate(), qreal(15.0));
+ QCOMPARE(copy.streamFrameRate(), qreal(15.0));
QCOMPARE(copy.colorSpace(), QVideoFrameFormat::ColorSpace_BT709);
/* Verify if both the instances are eqaul */
diff --git a/tests/auto/unit/multimedia/qwavedecoder/tst_qwavedecoder.cpp b/tests/auto/unit/multimedia/qwavedecoder/tst_qwavedecoder.cpp
index c78e9cfb8..079f98075 100644
--- a/tests/auto/unit/multimedia/qwavedecoder/tst_qwavedecoder.cpp
+++ b/tests/auto/unit/multimedia/qwavedecoder/tst_qwavedecoder.cpp
@@ -176,8 +176,8 @@ void tst_QWaveDecoder::http()
QNetworkReply *reply = nam.get(QNetworkRequest(QUrl::fromLocalFile(file)));
QWaveDecoder waveDecoder(reply);
- QSignalSpy validFormatSpy(&waveDecoder, SIGNAL(formatKnown()));
- QSignalSpy parsingErrorSpy(&waveDecoder, SIGNAL(parsingError()));
+ QSignalSpy validFormatSpy(&waveDecoder, &QWaveDecoder::formatKnown);
+ QSignalSpy parsingErrorSpy(&waveDecoder, &QWaveDecoder::parsingError);
QVERIFY(waveDecoder.open(QIODeviceBase::ReadOnly));
@@ -227,7 +227,7 @@ void tst_QWaveDecoder::readAllAtOnce()
QVERIFY(stream.isOpen());
QWaveDecoder waveDecoder(&stream);
- QSignalSpy validFormatSpy(&waveDecoder, SIGNAL(formatKnown()));
+ QSignalSpy validFormatSpy(&waveDecoder, &QWaveDecoder::formatKnown);
QVERIFY(waveDecoder.open(QIODeviceBase::ReadOnly));
@@ -255,7 +255,7 @@ void tst_QWaveDecoder::readPerByte()
QVERIFY(stream.isOpen());
QWaveDecoder waveDecoder(&stream);
- QSignalSpy validFormatSpy(&waveDecoder, SIGNAL(formatKnown()));
+ QSignalSpy validFormatSpy(&waveDecoder, &QWaveDecoder::formatKnown);
QVERIFY(waveDecoder.open(QIODeviceBase::ReadOnly));
diff --git a/tests/auto/unit/multimediawidgets/qcamerawidgets/tst_qcamerawidgets.cpp b/tests/auto/unit/multimediawidgets/qcamerawidgets/tst_qcamerawidgets.cpp
index 743eda276..6c31a4b66 100644
--- a/tests/auto/unit/multimediawidgets/qcamerawidgets/tst_qcamerawidgets.cpp
+++ b/tests/auto/unit/multimediawidgets/qcamerawidgets/tst_qcamerawidgets.cpp
@@ -51,7 +51,7 @@ void tst_QCameraWidgets::testCameraEncodingProperyChange()
session.setCamera(&camera);
session.setImageCapture(&imageCapture);
- QSignalSpy activeChangedSignal(&camera, SIGNAL(activeChanged(bool)));
+ QSignalSpy activeChangedSignal(&camera, &QCamera::activeChanged);
camera.start();
QCOMPARE(camera.isActive(), true);
diff --git a/tests/auto/unit/multimediawidgets/qgraphicsvideoitem/tst_qgraphicsvideoitem.cpp b/tests/auto/unit/multimediawidgets/qgraphicsvideoitem/tst_qgraphicsvideoitem.cpp
index 2fba9daa6..0cda11de1 100644
--- a/tests/auto/unit/multimediawidgets/qgraphicsvideoitem/tst_qgraphicsvideoitem.cpp
+++ b/tests/auto/unit/multimediawidgets/qgraphicsvideoitem/tst_qgraphicsvideoitem.cpp
@@ -214,7 +214,7 @@ void tst_QGraphicsVideoItem::nativeSize()
QCOMPARE(item.nativeSize(), QSizeF());
- QSignalSpy spy(&item, SIGNAL(nativeSizeChanged(QSizeF)));
+ QSignalSpy spy(&item, &QGraphicsVideoItem::nativeSizeChanged);
QVideoFrameFormat format(frameSize, QVideoFrameFormat::Format_ARGB8888);
format.setViewport(viewport);
@@ -375,7 +375,7 @@ void tst_QGraphicsVideoItem::paint()
QVideoFrameFormat format(QSize(2, 2), QVideoFrameFormat::Format_XRGB8888);
QVideoFrame frame(format);
- frame.map(QVideoFrame::WriteOnly);
+ frame.map(QtVideo::MapMode::WriteOnly);
memcpy(frame.bits(0), rgb32ImageData, frame.mappedBytes(0));
frame.unmap();
diff --git a/tests/auto/unit/multimediawidgets/qvideowidget/tst_qvideowidget.cpp b/tests/auto/unit/multimediawidgets/qvideowidget/tst_qvideowidget.cpp
index de1c5a4e3..b999020a4 100644
--- a/tests/auto/unit/multimediawidgets/qvideowidget/tst_qvideowidget.cpp
+++ b/tests/auto/unit/multimediawidgets/qvideowidget/tst_qvideowidget.cpp
@@ -184,7 +184,7 @@ void tst_QVideoWidget::fullScreen()
Qt::WindowFlags windowFlags = widget.windowFlags();
- QSignalSpy spy(&widget, SIGNAL(fullScreenChanged(bool)));
+ QSignalSpy spy(&widget, &QVideoWidget::fullScreenChanged);
// Test showing full screen with setFullScreen(true).
widget.setFullScreen(true);
@@ -253,7 +253,7 @@ void tst_QVideoWidget::paint()
QVideoFrameFormat format(QSize(2, 2), QVideoFrameFormat::Format_XRGB8888);
QVideoFrame frame(format);
- QVERIFY(frame.map(QVideoFrame::ReadWrite));
+ QVERIFY(frame.map(QtVideo::MapMode::ReadWrite));
uchar *data = frame.bits(0);
memcpy(data, rgb32ImageData, sizeof(rgb32ImageData));
frame.unmap();
diff --git a/tests/manual/CMakeLists.txt b/tests/manual/CMakeLists.txt
index 08e38e893..b37b86192 100644
--- a/tests/manual/CMakeLists.txt
+++ b/tests/manual/CMakeLists.txt
@@ -3,9 +3,15 @@
add_subdirectory(audiodecoder)
add_subdirectory(devices)
+add_subdirectory(mediaformats)
add_subdirectory(minimal-player)
add_subdirectory(wasm)
+if(QT_FEATURE_gstreamer)
+ add_subdirectory(gstreamer-custom-camera)
+ add_subdirectory(gstreamer-custom-camera-rtp)
+endif()
+
if(TARGET Qt::Quick)
add_subdirectory(qml-minimal-camera)
add_subdirectory(qml-minimal-player)
diff --git a/tests/manual/gstreamer-custom-camera-rtp/CMakeLists.txt b/tests/manual/gstreamer-custom-camera-rtp/CMakeLists.txt
new file mode 100644
index 000000000..58608db5c
--- /dev/null
+++ b/tests/manual/gstreamer-custom-camera-rtp/CMakeLists.txt
@@ -0,0 +1,39 @@
+# Copyright (C) 2024 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+cmake_minimum_required(VERSION 3.16)
+project(gstreamer-custom-camera-rtp LANGUAGES CXX)
+
+set(CMAKE_AUTOMOC ON)
+
+if(NOT DEFINED INSTALL_EXAMPLESDIR)
+ set(INSTALL_EXAMPLESDIR "examples")
+endif()
+
+set(INSTALL_EXAMPLEDIR "${INSTALL_EXAMPLESDIR}/multimedia/gstreamer-custom-camera-rtp")
+
+find_package(Qt6 REQUIRED COMPONENTS Widgets Multimedia MultimediaWidgets QGstreamerMediaPluginPrivate)
+
+qt_add_executable( gstreamer-custom-camera-rtp WIN32 MACOSX_BUNDLE
+ gstreamer-custom-camera-rtp.cpp
+ Info.plist.in
+)
+
+target_link_libraries( gstreamer-custom-camera-rtp PUBLIC
+ Qt::Widgets
+ Qt::Multimedia
+ Qt::MultimediaPrivate
+ Qt::MultimediaWidgets
+
+ Qt::QGstreamerMediaPluginPrivate
+)
+
+install(TARGETS gstreamer-custom-camera-rtp
+ RUNTIME DESTINATION "${INSTALL_EXAMPLEDIR}"
+ BUNDLE DESTINATION "${INSTALL_EXAMPLEDIR}"
+ LIBRARY DESTINATION "${INSTALL_EXAMPLEDIR}"
+)
+
+set_target_properties( gstreamer-custom-camera-rtp PROPERTIES
+ MACOSX_BUNDLE_INFO_PLIST ${CMAKE_CURRENT_SOURCE_DIR}/Info.plist.in
+)
diff --git a/tests/manual/gstreamer-custom-camera-rtp/Info.plist.in b/tests/manual/gstreamer-custom-camera-rtp/Info.plist.in
new file mode 100644
index 000000000..46a9ecf2d
--- /dev/null
+++ b/tests/manual/gstreamer-custom-camera-rtp/Info.plist.in
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+
+ <key>CFBundleName</key>
+ <string>${MACOSX_BUNDLE_BUNDLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>${MACOSX_BUNDLE_GUI_IDENTIFIER}</string>
+ <key>CFBundleExecutable</key>
+ <string>${MACOSX_BUNDLE_EXECUTABLE_NAME}</string>
+
+ <key>CFBundleVersion</key>
+ <string>${MACOSX_BUNDLE_BUNDLE_VERSION}</string>
+ <key>CFBundleShortVersionString</key>
+ <string>${MACOSX_BUNDLE_SHORT_VERSION_STRING}</string>
+ <key>CFBundleLongVersionString</key>
+ <string>${MACOSX_BUNDLE_LONG_VERSION_STRING}</string>
+
+ <key>LSMinimumSystemVersion</key>
+ <string>${CMAKE_OSX_DEPLOYMENT_TARGET}</string>
+
+ <key>CFBundleGetInfoString</key>
+ <string>${MACOSX_BUNDLE_INFO_STRING}</string>
+ <key>NSHumanReadableCopyright</key>
+ <string>${MACOSX_BUNDLE_COPYRIGHT}</string>
+
+ <key>CFBundleIconFile</key>
+ <string>${MACOSX_BUNDLE_ICON_FILE}</string>
+
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+
+ <key>NSCameraUsageDescription</key>
+ <string>Qt Multimedia Example</string>
+ <key>NSMicrophoneUsageDescription</key>
+ <string>Qt Multimedia Example</string>
+
+ <key>NSSupportsAutomaticGraphicsSwitching</key>
+ <true/>
+</dict>
+</plist>
diff --git a/tests/manual/gstreamer-custom-camera-rtp/gstreamer-custom-camera-rtp.cpp b/tests/manual/gstreamer-custom-camera-rtp/gstreamer-custom-camera-rtp.cpp
new file mode 100644
index 000000000..b61df7ce9
--- /dev/null
+++ b/tests/manual/gstreamer-custom-camera-rtp/gstreamer-custom-camera-rtp.cpp
@@ -0,0 +1,57 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
+
+#include <QtMultimedia/QAudioOutput>
+#include <QtMultimedia/private/qgstreamer_platformspecificinterface_p.h>
+#include <QtMultimediaWidgets/QtMultimediaWidgets>
+#include <QtWidgets/QApplication>
+#include <QtCore/QCommandLineParser>
+
+#include <QtQGstreamerMediaPlugin/private/qgst_p.h>
+#include <QtQGstreamerMediaPlugin/private/qgstpipeline_p.h>
+
+using namespace std::chrono_literals;
+using namespace Qt::Literals;
+
+struct GStreamerRtpStreamSender
+{
+ GStreamerRtpStreamSender()
+ {
+ element = QGstElement::createFromPipelineDescription(
+ "videotestsrc ! jpegenc ! rtpjpegpay ! udpsink host=127.0.0.1 port=50004"_ba);
+
+ pipeline.add(element);
+ pipeline.setStateSync(GstState::GST_STATE_PLAYING);
+ pipeline.dumpGraph("sender");
+ }
+
+ ~GStreamerRtpStreamSender() { pipeline.setStateSync(GstState::GST_STATE_NULL); }
+
+ QGstPipeline pipeline = QGstPipeline::create("UdpSend");
+ QGstElement element;
+};
+
+int main(int argc, char **argv)
+{
+ qputenv("QT_MEDIA_BACKEND", "gstreamer");
+
+ gst_init(&argc, &argv);
+ GStreamerRtpStreamSender sender;
+
+ QApplication app(argc, argv);
+
+ QByteArray pipelineString =
+ R"(udpsrc port=50004 ! application/x-rtp,encoding=JPEG,payload=26 ! rtpjpegdepay ! jpegdec ! videoconvert)"_ba;
+ QVideoWidget wid;
+ wid.show();
+
+ QMediaCaptureSession session;
+ session.setVideoSink(wid.videoSink());
+
+ QCamera *cam = QGStreamerPlatformSpecificInterface::instance()->makeCustomGStreamerCamera(
+ pipelineString, &session);
+ session.setCamera(cam);
+ cam->start();
+
+ return QApplication::exec();
+}
diff --git a/tests/manual/gstreamer-custom-camera/CMakeLists.txt b/tests/manual/gstreamer-custom-camera/CMakeLists.txt
new file mode 100644
index 000000000..f161a0630
--- /dev/null
+++ b/tests/manual/gstreamer-custom-camera/CMakeLists.txt
@@ -0,0 +1,37 @@
+# Copyright (C) 2024 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+cmake_minimum_required(VERSION 3.16)
+project(gstreamer-custom-camera-rtp LANGUAGES CXX)
+
+set(CMAKE_AUTOMOC ON)
+
+if(NOT DEFINED INSTALL_EXAMPLESDIR)
+ set(INSTALL_EXAMPLESDIR "examples")
+endif()
+
+set(INSTALL_EXAMPLEDIR "${INSTALL_EXAMPLESDIR}/multimedia/gstreamer-custom-camera")
+
+find_package(Qt6 REQUIRED COMPONENTS Widgets Multimedia MultimediaWidgets)
+
+qt_add_executable( gstreamer-custom-camera WIN32 MACOSX_BUNDLE
+ gstreamer-custom-camera.cpp
+ Info.plist.in
+)
+
+target_link_libraries( gstreamer-custom-camera PUBLIC
+ Qt::Widgets
+ Qt::Multimedia
+ Qt::MultimediaPrivate
+ Qt::MultimediaWidgets
+)
+
+install(TARGETS gstreamer-custom-camera
+ RUNTIME DESTINATION "${INSTALL_EXAMPLEDIR}"
+ BUNDLE DESTINATION "${INSTALL_EXAMPLEDIR}"
+ LIBRARY DESTINATION "${INSTALL_EXAMPLEDIR}"
+)
+
+set_target_properties( gstreamer-custom-camera PROPERTIES
+ MACOSX_BUNDLE_INFO_PLIST ${CMAKE_CURRENT_SOURCE_DIR}/Info.plist.in
+)
diff --git a/tests/manual/gstreamer-custom-camera/Info.plist.in b/tests/manual/gstreamer-custom-camera/Info.plist.in
new file mode 100644
index 000000000..46a9ecf2d
--- /dev/null
+++ b/tests/manual/gstreamer-custom-camera/Info.plist.in
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+
+ <key>CFBundleName</key>
+ <string>${MACOSX_BUNDLE_BUNDLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>${MACOSX_BUNDLE_GUI_IDENTIFIER}</string>
+ <key>CFBundleExecutable</key>
+ <string>${MACOSX_BUNDLE_EXECUTABLE_NAME}</string>
+
+ <key>CFBundleVersion</key>
+ <string>${MACOSX_BUNDLE_BUNDLE_VERSION}</string>
+ <key>CFBundleShortVersionString</key>
+ <string>${MACOSX_BUNDLE_SHORT_VERSION_STRING}</string>
+ <key>CFBundleLongVersionString</key>
+ <string>${MACOSX_BUNDLE_LONG_VERSION_STRING}</string>
+
+ <key>LSMinimumSystemVersion</key>
+ <string>${CMAKE_OSX_DEPLOYMENT_TARGET}</string>
+
+ <key>CFBundleGetInfoString</key>
+ <string>${MACOSX_BUNDLE_INFO_STRING}</string>
+ <key>NSHumanReadableCopyright</key>
+ <string>${MACOSX_BUNDLE_COPYRIGHT}</string>
+
+ <key>CFBundleIconFile</key>
+ <string>${MACOSX_BUNDLE_ICON_FILE}</string>
+
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+
+ <key>NSCameraUsageDescription</key>
+ <string>Qt Multimedia Example</string>
+ <key>NSMicrophoneUsageDescription</key>
+ <string>Qt Multimedia Example</string>
+
+ <key>NSSupportsAutomaticGraphicsSwitching</key>
+ <true/>
+</dict>
+</plist>
diff --git a/tests/manual/gstreamer-custom-camera/gstreamer-custom-camera.cpp b/tests/manual/gstreamer-custom-camera/gstreamer-custom-camera.cpp
new file mode 100644
index 000000000..dbd729d15
--- /dev/null
+++ b/tests/manual/gstreamer-custom-camera/gstreamer-custom-camera.cpp
@@ -0,0 +1,50 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
+
+#include <QtMultimedia/QAudioOutput>
+#include <QtMultimedia/private/qgstreamer_platformspecificinterface_p.h>
+#include <QtMultimediaWidgets/QtMultimediaWidgets>
+#include <QtWidgets/QApplication>
+#include <QtCore/QCommandLineParser>
+
+using namespace std::chrono_literals;
+using namespace Qt::Literals;
+
+int main(int argc, char **argv)
+{
+ qputenv("QT_MEDIA_BACKEND", "gstreamer");
+
+ QApplication app(argc, argv);
+
+ QCommandLineParser parser;
+ parser.setApplicationDescription("GStreamer Custom Camera");
+ parser.addHelpOption();
+ parser.addVersionOption();
+ parser.addPositionalArgument(
+ "pipeline", "Pipeline string, e.g. `videotestsrc pattern=smpte-rp-219 is-live=true`");
+
+ parser.process(app);
+
+ QByteArray pipelineString;
+
+ if (parser.positionalArguments().isEmpty()) {
+ // pipelineString = "videotestsrc pattern=smpte-rp-219 is-live=true";
+ pipelineString = "videotestsrc is-live=true ! gamma gamma=2.0";
+ } else {
+ pipelineString = parser.positionalArguments()[0].toLatin1();
+ }
+
+ QVideoWidget wid;
+
+ QMediaCaptureSession session;
+ session.setVideoSink(wid.videoSink());
+
+ QCamera *cam = QGStreamerPlatformSpecificInterface::instance()->makeCustomGStreamerCamera(
+ pipelineString, &session);
+ session.setCamera(cam);
+ cam->start();
+
+ wid.show();
+
+ return QApplication::exec();
+}
diff --git a/tests/manual/mediaformats/CMakeLists.txt b/tests/manual/mediaformats/CMakeLists.txt
new file mode 100644
index 000000000..ed58e628e
--- /dev/null
+++ b/tests/manual/mediaformats/CMakeLists.txt
@@ -0,0 +1,37 @@
+# Copyright (C) 2024 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+cmake_minimum_required(VERSION 3.16)
+project(devices LANGUAGES CXX)
+
+if(ANDROID OR IOS)
+ message(FATAL_ERROR "This is a commandline tool that is not supported on mobile platforms")
+endif()
+
+if(NOT DEFINED INSTALL_EXAMPLESDIR)
+ set(INSTALL_EXAMPLESDIR "examples")
+endif()
+
+set(INSTALL_EXAMPLEDIR "${INSTALL_EXAMPLESDIR}/multimedia/mediaformats")
+
+find_package(Qt6 REQUIRED COMPONENTS Core Multimedia)
+
+qt_add_executable(mediaformats
+ main.cpp
+)
+
+set_target_properties(mediaformats PROPERTIES
+ WIN32_EXECUTABLE FALSE
+ MACOSX_BUNDLE TRUE
+)
+
+target_link_libraries(mediaformats PUBLIC
+ Qt::Core
+ Qt::Multimedia
+)
+
+install(TARGETS mediaformats
+ RUNTIME DESTINATION "${INSTALL_EXAMPLEDIR}"
+ BUNDLE DESTINATION "${INSTALL_EXAMPLEDIR}"
+ LIBRARY DESTINATION "${INSTALL_EXAMPLEDIR}"
+)
diff --git a/tests/manual/mediaformats/main.cpp b/tests/manual/mediaformats/main.cpp
new file mode 100644
index 000000000..57d0ad831
--- /dev/null
+++ b/tests/manual/mediaformats/main.cpp
@@ -0,0 +1,87 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
+
+#include <QtCore/QCoreApplication>
+#include <QtCore/QMimeType>
+#include <QtCore/QTextStream>
+#include <QtMultimedia/QMediaFormat>
+
+#include <stdio.h>
+
+namespace {
+
+void printFileFormatEntry(QMediaFormat::FileFormat format, QTextStream &out)
+{
+ out << " " << QMediaFormat::fileFormatName(format) << " - "
+ << QMediaFormat::fileFormatDescription(format);
+
+ QMimeType mimeType = QMediaFormat(format).mimeType();
+ if (mimeType.isValid()) {
+ out << " (" << mimeType.name() << ")\n";
+ out << " " << mimeType.suffixes().join(", ") << "\n";
+ }
+}
+
+void printCodecEntry(QMediaFormat::AudioCodec codec, QTextStream &out)
+{
+ out << " " << QMediaFormat::audioCodecName(codec) << " - "
+ << QMediaFormat::audioCodecDescription(codec) << "\n";
+}
+
+void printCodecEntry(QMediaFormat::VideoCodec codec, QTextStream &out)
+{
+ out << " " << QMediaFormat::videoCodecName(codec) << " - "
+ << QMediaFormat::videoCodecDescription(codec) << "\n";
+}
+
+void printFileFormats(QTextStream &out)
+{
+ out << "Supported file formats for decoding: \n";
+ for (QMediaFormat::FileFormat format :
+ QMediaFormat().supportedFileFormats(QMediaFormat::Decode))
+ printFileFormatEntry(format, out);
+
+ out << "\nSupported file formats for encoding: \n";
+ for (QMediaFormat::FileFormat format :
+ QMediaFormat().supportedFileFormats(QMediaFormat::Encode))
+ printFileFormatEntry(format, out);
+}
+
+void printAudioCodecs(QTextStream &out)
+{
+ out << "Supported audio codecs for decoding: \n";
+ for (QMediaFormat::AudioCodec codec : QMediaFormat().supportedAudioCodecs(QMediaFormat::Decode))
+ printCodecEntry(codec, out);
+
+ out << "\nSupported audio codecs for encoding: \n";
+ for (QMediaFormat::AudioCodec codec : QMediaFormat().supportedAudioCodecs(QMediaFormat::Encode))
+ printCodecEntry(codec, out);
+}
+
+void printVideoCodecs(QTextStream &out)
+{
+ out << "Supported video codecs for decoding: \n";
+ for (QMediaFormat::VideoCodec codec : QMediaFormat().supportedVideoCodecs(QMediaFormat::Decode))
+ printCodecEntry(codec, out);
+
+ out << "\nSupported video codecs for encoding: \n";
+ for (QMediaFormat::VideoCodec codec : QMediaFormat().supportedVideoCodecs(QMediaFormat::Encode))
+ printCodecEntry(codec, out);
+}
+
+} // namespace
+
+int main(int argc, char *argv[])
+{
+ QCoreApplication app(argc, argv); // QtMultimedia needs an application singleton
+
+ QTextStream out(stdout);
+
+ printFileFormats(out);
+ out << "\n";
+ printAudioCodecs(out);
+ out << "\n";
+ printVideoCodecs(out);
+
+ return 0;
+}
diff --git a/tests/manual/minimal-player/minimal-player.cpp b/tests/manual/minimal-player/minimal-player.cpp
index 70512dff3..17a11b050 100644
--- a/tests/manual/minimal-player/minimal-player.cpp
+++ b/tests/manual/minimal-player/minimal-player.cpp
@@ -42,7 +42,7 @@ int mainToggleWidgets(QString filename)
return QApplication::exec();
}
-int mainSimple(QString filename)
+int mainSimple(QString filename, bool loop)
{
QMediaPlayer player;
QVideoWidget widget1;
@@ -52,6 +52,10 @@ int mainSimple(QString filename)
player.setSource(filename);
widget1.show();
+
+ if (loop)
+ player.setLoops(QMediaPlayer::Infinite);
+
player.play();
return QApplication::exec();
}
@@ -69,6 +73,9 @@ int main(int argc, char **argv)
QCommandLineOption toggleWidgetsOption{ "toggle-widgets", "Toggle between widgets." };
parser.addOption(toggleWidgetsOption);
+ QCommandLineOption loopOption{ "loop", "Loop." };
+ parser.addOption(loopOption);
+
parser.process(app);
if (parser.positionalArguments().isEmpty()) {
@@ -81,5 +88,7 @@ int main(int argc, char **argv)
if (parser.isSet(toggleWidgetsOption))
return mainToggleWidgets(filename);
- return mainSimple(filename);
+ bool loop = parser.isSet(loopOption);
+
+ return mainSimple(filename, loop);
}