summaryrefslogtreecommitdiffstats
path: root/src/plugins/multimedia
diff options
context:
space:
mode:
Diffstat (limited to 'src/plugins/multimedia')
-rw-r--r--src/plugins/multimedia/CMakeLists.txt24
-rw-r--r--src/plugins/multimedia/android/CMakeLists.txt62
-rw-r--r--src/plugins/multimedia/android/android.json3
-rw-r--r--src/plugins/multimedia/android/audio/qandroidaudiodecoder.cpp437
-rw-r--r--src/plugins/multimedia/android/audio/qandroidaudiodecoder_p.h118
-rw-r--r--src/plugins/multimedia/android/common/qandroidaudioinput.cpp47
-rw-r--r--src/plugins/multimedia/android/common/qandroidaudioinput_p.h47
-rw-r--r--src/plugins/multimedia/android/common/qandroidaudiooutput_p.h30
-rw-r--r--src/plugins/multimedia/android/common/qandroidglobal_p.h28
-rw-r--r--src/plugins/multimedia/android/common/qandroidmultimediautils.cpp125
-rw-r--r--src/plugins/multimedia/android/common/qandroidmultimediautils_p.h40
-rw-r--r--src/plugins/multimedia/android/common/qandroidvideooutput.cpp468
-rw-r--r--src/plugins/multimedia/android/common/qandroidvideooutput_p.h93
-rw-r--r--src/plugins/multimedia/android/common/qandroidvideosink.cpp35
-rw-r--r--src/plugins/multimedia/android/common/qandroidvideosink_p.h41
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcamera.cpp562
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcamera_p.h99
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp808
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcamerasession_p.h166
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp473
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h158
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidimagecapture.cpp73
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidimagecapture_p.h48
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidmediacapturesession.cpp115
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidmediacapturesession_p.h66
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidmediaencoder.cpp72
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidmediaencoder_p.h50
-rw-r--r--src/plugins/multimedia/android/mediaplayer/qandroidmediaplayer.cpp999
-rw-r--r--src/plugins/multimedia/android/mediaplayer/qandroidmediaplayer_p.h127
-rw-r--r--src/plugins/multimedia/android/mediaplayer/qandroidmetadata.cpp163
-rw-r--r--src/plugins/multimedia/android/mediaplayer/qandroidmetadata_p.h47
-rw-r--r--src/plugins/multimedia/android/qandroidformatsinfo.cpp160
-rw-r--r--src/plugins/multimedia/android/qandroidformatsinfo_p.h40
-rw-r--r--src/plugins/multimedia/android/qandroidintegration.cpp136
-rw-r--r--src/plugins/multimedia/android/qandroidintegration_p.h48
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp1797
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidcamera_p.h208
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmediametadataretriever.cpp136
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmediametadataretriever_p.h66
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmediaplayer.cpp535
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmediaplayer_p.h135
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmediarecorder.cpp337
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmediarecorder_p.h161
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmultimediautils.cpp43
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmultimediautils_p.h40
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidsurfacetexture.cpp152
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidsurfacetexture_p.h61
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidsurfaceview.cpp152
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidsurfaceview_p.h78
-rw-r--r--src/plugins/multimedia/darwin/CMakeLists.txt70
-rw-r--r--src/plugins/multimedia/darwin/avfaudiodecoder.mm544
-rw-r--r--src/plugins/multimedia/darwin/avfaudiodecoder_p.h99
-rw-r--r--src/plugins/multimedia/darwin/avfvideobuffer.mm207
-rw-r--r--src/plugins/multimedia/darwin/avfvideobuffer_p.h64
-rw-r--r--src/plugins/multimedia/darwin/avfvideosink.mm228
-rw-r--r--src/plugins/multimedia/darwin/avfvideosink_p.h99
-rw-r--r--src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate.mm98
-rw-r--r--src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate_p.h40
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamera.mm89
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamera_p.h48
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcameradebug_p.h26
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm292
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerarenderer_p.h95
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcameraservice.mm169
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcameraservice_p.h84
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerasession.mm513
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerasession_p.h132
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerautility.mm730
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerautility_p.h165
-rw-r--r--src/plugins/multimedia/darwin/camera/avfimagecapture.mm385
-rw-r--r--src/plugins/multimedia/darwin/camera/avfimagecapture_p.h81
-rw-r--r--src/plugins/multimedia/darwin/camera/avfmediaassetwriter.mm556
-rw-r--r--src/plugins/multimedia/darwin/camera/avfmediaassetwriter_p.h54
-rw-r--r--src/plugins/multimedia/darwin/camera/avfmediaencoder.mm664
-rw-r--r--src/plugins/multimedia/darwin/camera/avfmediaencoder_p.h96
-rw-r--r--src/plugins/multimedia/darwin/camera/qavfcamerabase.mm1084
-rw-r--r--src/plugins/multimedia/darwin/camera/qavfcamerabase_p.h110
-rw-r--r--src/plugins/multimedia/darwin/common/avfmetadata.mm382
-rw-r--r--src/plugins/multimedia/darwin/common/avfmetadata_p.h37
-rw-r--r--src/plugins/multimedia/darwin/darwin.json3
-rw-r--r--src/plugins/multimedia/darwin/mediaplayer/avfdisplaylink.mm207
-rw-r--r--src/plugins/multimedia/darwin/mediaplayer/avfdisplaylink_p.h65
-rw-r--r--src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm1270
-rw-r--r--src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer_p.h151
-rw-r--r--src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm222
-rw-r--r--src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol_p.h72
-rw-r--r--src/plugins/multimedia/darwin/qavfhelpers.mm143
-rw-r--r--src/plugins/multimedia/darwin/qavfhelpers_p.h41
-rw-r--r--src/plugins/multimedia/darwin/qdarwinformatsinfo.mm211
-rw-r--r--src/plugins/multimedia/darwin/qdarwinformatsinfo_p.h38
-rw-r--r--src/plugins/multimedia/darwin/qdarwinintegration.mm93
-rw-r--r--src/plugins/multimedia/darwin/qdarwinintegration_p.h45
-rw-r--r--src/plugins/multimedia/ffmpeg/CMakeLists.txt260
-rw-r--r--src/plugins/multimedia/ffmpeg/cmake/QtAddFFmpegStubs.cmake199
-rw-r--r--src/plugins/multimedia/ffmpeg/cmake/QtDeployFFmpeg.cmake43
-rw-r--r--src/plugins/multimedia/ffmpeg/ffmpeg.json3
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer.cpp407
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer_p.h132
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec.cpp82
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec_p.h62
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegdemuxer.cpp228
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegdemuxer_p.h87
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegframe_p.h109
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder.cpp390
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder_p.h107
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegpacket_p.h61
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegplaybackenginedefs_p.h46
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegplaybackengineobject.cpp109
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegplaybackengineobject_p.h84
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegpositionwithoffset_p.h40
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegrenderer.cpp216
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegrenderer_p.h125
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegstreamdecoder.cpp240
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegstreamdecoder_p.h87
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegsubtitlerenderer.cpp44
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegsubtitlerenderer_p.h48
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegtimecontroller.cpp165
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegtimecontroller_p.h94
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer.cpp79
-rw-r--r--src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer_p.h47
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidcamera.cpp697
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidcamera_p.h91
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp224
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidcameraframe_p.h75
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidimagecapture.cpp46
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidimagecapture_p.h38
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidvideodevices.cpp150
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidvideodevices_p.h35
-rw-r--r--src/plugins/multimedia/ffmpeg/qavfcamera.mm349
-rw-r--r--src/plugins/multimedia/ffmpeg/qavfcamera_p.h90
-rw-r--r--src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate.mm224
-rw-r--r--src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate_p.h51
-rw-r--r--src/plugins/multimedia/ffmpeg/qavfscreencapture.mm201
-rw-r--r--src/plugins/multimedia/ffmpeg/qavfscreencapture_p.h60
-rw-r--r--src/plugins/multimedia/ffmpeg/qcgcapturablewindows.mm48
-rw-r--r--src/plugins/multimedia/ffmpeg/qcgcapturablewindows_p.h32
-rw-r--r--src/plugins/multimedia/ffmpeg/qcgwindowcapture.mm203
-rw-r--r--src/plugins/multimedia/ffmpeg/qcgwindowcapture_p.h43
-rw-r--r--src/plugins/multimedia/ffmpeg/qeglfsscreencapture.cpp180
-rw-r--r--src/plugins/multimedia/ffmpeg/qeglfsscreencapture_p.h48
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeg.cpp645
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeg_p.h280
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder.cpp247
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder_p.h68
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegaudioinput.cpp195
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h57
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegavaudioformat.cpp78
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegavaudioformat_p.h68
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegconverter.cpp272
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegconverter_p.h30
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegdefs_p.h41
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegencodingformatcontext.cpp116
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegencodingformatcontext_p.h60
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp504
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11.cpp309
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11_p.h104
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec.cpp120
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec_p.h36
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h130
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp364
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi_p.h48
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel_videotoolbox.mm288
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel_videotoolbox_p.h63
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegimagecapture.cpp271
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegimagecapture_p.h71
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegioutils.cpp55
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegioutils_p.h40
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp318
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h112
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo.cpp517
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo_p.h52
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp375
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h57
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediametadata.cpp182
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediametadata_p.h35
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp411
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h119
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp200
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h73
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegplaybackengine.cpp649
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegplaybackengine_p.h234
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp112
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h62
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi.cpp467
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi_p.h44
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegsurfacecapturegrabber.cpp202
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegsurfacecapturegrabber_p.h92
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegthread.cpp53
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegthread_p.h96
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp363
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h71
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp34
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegvideosink_p.h44
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp.cpp508
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp_p.h46
-rw-r--r--src/plugins/multimedia/ffmpeg/qgdiwindowcapture.cpp197
-rw-r--r--src/plugins/multimedia/ffmpeg/qgdiwindowcapture_p.h43
-rw-r--r--src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture.cpp221
-rw-r--r--src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture_p.h48
-rw-r--r--src/plugins/multimedia/ffmpeg/qopenglvideobuffer.cpp96
-rw-r--r--src/plugins/multimedia/ffmpeg/qopenglvideobuffer_p.h44
-rw-r--r--src/plugins/multimedia/ffmpeg/qv4l2camera.cpp708
-rw-r--r--src/plugins/multimedia/ffmpeg/qv4l2camera_p.h133
-rw-r--r--src/plugins/multimedia/ffmpeg/qv4l2cameradevices.cpp182
-rw-r--r--src/plugins/multimedia/ffmpeg/qv4l2cameradevices_p.h46
-rw-r--r--src/plugins/multimedia/ffmpeg/qv4l2filedescriptor.cpp71
-rw-r--r--src/plugins/multimedia/ffmpeg/qv4l2filedescriptor_p.h50
-rw-r--r--src/plugins/multimedia/ffmpeg/qv4l2memorytransfer.cpp223
-rw-r--r--src/plugins/multimedia/ffmpeg/qv4l2memorytransfer_p.h66
-rw-r--r--src/plugins/multimedia/ffmpeg/qwincapturablewindows.cpp74
-rw-r--r--src/plugins/multimedia/ffmpeg/qwincapturablewindows_p.h32
-rw-r--r--src/plugins/multimedia/ffmpeg/qwindowscamera.cpp330
-rw-r--r--src/plugins/multimedia/ffmpeg/qwindowscamera_p.h45
-rw-r--r--src/plugins/multimedia/ffmpeg/qx11capturablewindows.cpp74
-rw-r--r--src/plugins/multimedia/ffmpeg/qx11capturablewindows_p.h45
-rw-r--r--src/plugins/multimedia/ffmpeg/qx11surfacecapture.cpp342
-rw-r--r--src/plugins/multimedia/ffmpeg/qx11surfacecapture_p.h48
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder.cpp343
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder_p.h77
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoderutils.cpp97
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoderutils_p.h28
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderoptions.cpp362
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderoptions_p.h32
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread.cpp40
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread_p.h72
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer.cpp165
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer_p.h77
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer.cpp64
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer_p.h41
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine.cpp278
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine_p.h121
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils.cpp63
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils_p.h81
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder.cpp245
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder_p.h64
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils.cpp214
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils_p.h64
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder.cpp477
-rw-r--r--src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder_p.h96
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/openssl3.ver7
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-crypto.cpp6
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-ssl.cpp300
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-drm.cpp14
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-x11.cpp14
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va.cpp150
-rw-r--r--src/plugins/multimedia/ffmpeg/symbolstubs/va.ver7
-rw-r--r--src/plugins/multimedia/gstreamer/CMakeLists.txt73
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder.cpp535
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder_p.h116
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice.cpp77
-rw-r--r--src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice_p.h52
-rw-r--r--src/plugins/multimedia/gstreamer/common/qglist_helper_p.h82
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgst.cpp1392
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgst_debug.cpp565
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgst_debug_p.h74
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgst_handle_types_p.h270
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgst_p.h853
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstappsource.cpp319
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstappsource_p.h96
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstpipeline.cpp414
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstpipeline_p.h119
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreameraudioinput.cpp137
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreameraudioinput_p.h62
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput.cpp138
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput_p.h63
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe.cpp88
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe_p.h56
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer.cpp1114
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer_p.h199
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamermessage_p.h55
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamermetadata.cpp489
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamermetadata_p.h35
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamervideooutput.cpp220
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamervideooutput_p.h80
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamervideooverlay.cpp218
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamervideooverlay_p.h74
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamervideosink.cpp314
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstreamervideosink_p.h76
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstsubtitlesink.cpp155
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstsubtitlesink_p.h70
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstutils.cpp141
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstutils_p.h41
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp393
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstvideobuffer_p.h55
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp499
-rw-r--r--src/plugins/multimedia/gstreamer/common/qgstvideorenderersink_p.h138
-rw-r--r--src/plugins/multimedia/gstreamer/gstreamer.json3
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera.cpp771
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera_p.h152
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture.cpp450
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture_p.h109
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture.cpp326
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture_p.h97
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder.cpp419
-rw-r--r--src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder_p.h91
-rw-r--r--src/plugins/multimedia/gstreamer/qgstreamerformatinfo.cpp445
-rw-r--r--src/plugins/multimedia/gstreamer/qgstreamerformatinfo_p.h44
-rw-r--r--src/plugins/multimedia/gstreamer/qgstreamerintegration.cpp242
-rw-r--r--src/plugins/multimedia/gstreamer/qgstreamerintegration_p.h79
-rw-r--r--src/plugins/multimedia/gstreamer/qgstreamerplugin.cpp28
-rw-r--r--src/plugins/multimedia/gstreamer/qgstreamervideodevices.cpp158
-rw-r--r--src/plugins/multimedia/gstreamer/qgstreamervideodevices_p.h54
-rw-r--r--src/plugins/multimedia/qnx/CMakeLists.txt39
-rw-r--r--src/plugins/multimedia/qnx/camera/qqnxcamera.cpp820
-rw-r--r--src/plugins/multimedia/qnx/camera/qqnxcamera_p.h201
-rw-r--r--src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer.cpp299
-rw-r--r--src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer_p.h60
-rw-r--r--src/plugins/multimedia/qnx/camera/qqnxcamerahandle_p.h102
-rw-r--r--src/plugins/multimedia/qnx/camera/qqnximagecapture.cpp257
-rw-r--r--src/plugins/multimedia/qnx/camera/qqnximagecapture_p.h63
-rw-r--r--src/plugins/multimedia/qnx/camera/qqnxplatformcamera.cpp426
-rw-r--r--src/plugins/multimedia/qnx/camera/qqnxplatformcamera_p.h113
-rw-r--r--src/plugins/multimedia/qnx/capture/qqnxaudiorecorder.cpp284
-rw-r--r--src/plugins/multimedia/qnx/capture/qqnxaudiorecorder_p.h103
-rw-r--r--src/plugins/multimedia/qnx/capture/qqnxmediacapturesession.cpp121
-rw-r--r--src/plugins/multimedia/qnx/capture/qqnxmediacapturesession_p.h67
-rw-r--r--src/plugins/multimedia/qnx/capture/qqnxmediarecorder.cpp115
-rw-r--r--src/plugins/multimedia/qnx/capture/qqnxmediarecorder_p.h51
-rw-r--r--src/plugins/multimedia/qnx/common/mmrenderertypes.h95
-rw-r--r--src/plugins/multimedia/qnx/common/qqnxaudioinput.cpp25
-rw-r--r--src/plugins/multimedia/qnx/common/qqnxaudioinput_p.h33
-rw-r--r--src/plugins/multimedia/qnx/common/qqnxaudiooutput.cpp52
-rw-r--r--src/plugins/multimedia/qnx/common/qqnxaudiooutput_p.h39
-rw-r--r--src/plugins/multimedia/qnx/common/qqnxmediaeventthread.cpp98
-rw-r--r--src/plugins/multimedia/qnx/common/qqnxmediaeventthread_p.h55
-rw-r--r--src/plugins/multimedia/qnx/common/qqnxwindowgrabber.cpp435
-rw-r--r--src/plugins/multimedia/qnx/common/qqnxwindowgrabber_p.h114
-rw-r--r--src/plugins/multimedia/qnx/mediaplayer/qqnxmediametadata.cpp262
-rw-r--r--src/plugins/multimedia/qnx/mediaplayer/qqnxmediametadata_p.h74
-rw-r--r--src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer.cpp887
-rw-r--r--src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer_p.h167
-rw-r--r--src/plugins/multimedia/qnx/mediaplayer/qqnxmediautil.cpp126
-rw-r--r--src/plugins/multimedia/qnx/mediaplayer/qqnxmediautil_p.h32
-rw-r--r--src/plugins/multimedia/qnx/mediaplayer/qqnxvideosink.cpp26
-rw-r--r--src/plugins/multimedia/qnx/mediaplayer/qqnxvideosink_p.h41
-rw-r--r--src/plugins/multimedia/qnx/qnx.json3
-rw-r--r--src/plugins/multimedia/qnx/qqnxformatinfo.cpp36
-rw-r--r--src/plugins/multimedia/qnx/qqnxformatinfo_p.h33
-rw-r--r--src/plugins/multimedia/qnx/qqnxmediaintegration.cpp79
-rw-r--r--src/plugins/multimedia/qnx/qqnxmediaintegration_p.h50
-rw-r--r--src/plugins/multimedia/qnx/qqnxvideodevices.cpp111
-rw-r--r--src/plugins/multimedia/qnx/qqnxvideodevices_p.h32
-rw-r--r--src/plugins/multimedia/wasm/CMakeLists.txt25
-rw-r--r--src/plugins/multimedia/wasm/common/qwasmaudioinput.cpp107
-rw-r--r--src/plugins/multimedia/wasm/common/qwasmaudioinput_p.h57
-rw-r--r--src/plugins/multimedia/wasm/common/qwasmaudiooutput.cpp378
-rw-r--r--src/plugins/multimedia/wasm/common/qwasmaudiooutput_p.h97
-rw-r--r--src/plugins/multimedia/wasm/common/qwasmvideooutput.cpp1071
-rw-r--r--src/plugins/multimedia/wasm/common/qwasmvideooutput_p.h153
-rw-r--r--src/plugins/multimedia/wasm/mediacapture/qwasmcamera.cpp478
-rw-r--r--src/plugins/multimedia/wasm/mediacapture/qwasmcamera_p.h99
-rw-r--r--src/plugins/multimedia/wasm/mediacapture/qwasmimagecapture.cpp130
-rw-r--r--src/plugins/multimedia/wasm/mediacapture/qwasmimagecapture_p.h58
-rw-r--r--src/plugins/multimedia/wasm/mediacapture/qwasmmediacapturesession.cpp111
-rw-r--r--src/plugins/multimedia/wasm/mediacapture/qwasmmediacapturesession_p.h71
-rw-r--r--src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder.cpp520
-rw-r--r--src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder_p.h89
-rw-r--r--src/plugins/multimedia/wasm/mediaplayer/qwasmmediaplayer.cpp475
-rw-r--r--src/plugins/multimedia/wasm/mediaplayer/qwasmmediaplayer_p.h124
-rw-r--r--src/plugins/multimedia/wasm/mediaplayer/qwasmvideosink.cpp26
-rw-r--r--src/plugins/multimedia/wasm/mediaplayer/qwasmvideosink_p.h40
-rw-r--r--src/plugins/multimedia/wasm/qwasmmediaintegration.cpp109
-rw-r--r--src/plugins/multimedia/wasm/qwasmmediaintegration_p.h50
-rw-r--r--src/plugins/multimedia/wasm/wasm.json5
-rw-r--r--src/plugins/multimedia/windows/CMakeLists.txt69
-rw-r--r--src/plugins/multimedia/windows/common/mfmetadata.cpp408
-rw-r--r--src/plugins/multimedia/windows/common/mfmetadata_p.h30
-rw-r--r--src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol.cpp225
-rw-r--r--src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol_p.h75
-rw-r--r--src/plugins/multimedia/windows/decoder/mfdecodersourcereader.cpp103
-rw-r--r--src/plugins/multimedia/windows/decoder/mfdecodersourcereader_p.h63
-rw-r--r--src/plugins/multimedia/windows/evr/evrcustompresenter.cpp1849
-rw-r--r--src/plugins/multimedia/windows/evr/evrcustompresenter_p.h357
-rw-r--r--src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp699
-rw-r--r--src/plugins/multimedia/windows/evr/evrd3dpresentengine_p.h153
-rw-r--r--src/plugins/multimedia/windows/evr/evrhelpers.cpp140
-rw-r--r--src/plugins/multimedia/windows/evr/evrhelpers_p.h93
-rw-r--r--src/plugins/multimedia/windows/evr/evrvideowindowcontrol.cpp228
-rw-r--r--src/plugins/multimedia/windows/evr/evrvideowindowcontrol_p.h72
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowscamera.cpp101
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowscamera_p.h55
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsimagecapture.cpp207
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsimagecapture_p.h64
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediacapture.cpp109
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediacapture_p.h62
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp1019
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader_p.h154
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicesession.cpp376
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicesession_p.h100
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp225
-rw-r--r--src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder_p.h71
-rw-r--r--src/plugins/multimedia/windows/mfstream.cpp326
-rw-r--r--src/plugins/multimedia/windows/mfstream_p.h124
-rw-r--r--src/plugins/multimedia/windows/player/mfactivate.cpp17
-rw-r--r--src/plugins/multimedia/windows/player/mfactivate_p.h202
-rw-r--r--src/plugins/multimedia/windows/player/mfevrvideowindowcontrol.cpp55
-rw-r--r--src/plugins/multimedia/windows/player/mfevrvideowindowcontrol_p.h38
-rw-r--r--src/plugins/multimedia/windows/player/mfplayercontrol.cpp306
-rw-r--r--src/plugins/multimedia/windows/player/mfplayercontrol_p.h103
-rw-r--r--src/plugins/multimedia/windows/player/mfplayersession.cpp1736
-rw-r--r--src/plugins/multimedia/windows/player/mfplayersession_p.h240
-rw-r--r--src/plugins/multimedia/windows/player/mfvideorenderercontrol.cpp152
-rw-r--r--src/plugins/multimedia/windows/player/mfvideorenderercontrol_p.h47
-rw-r--r--src/plugins/multimedia/windows/qwindowsformatinfo.cpp187
-rw-r--r--src/plugins/multimedia/windows/qwindowsformatinfo_p.h31
-rw-r--r--src/plugins/multimedia/windows/qwindowsintegration.cpp96
-rw-r--r--src/plugins/multimedia/windows/qwindowsintegration_p.h51
-rw-r--r--src/plugins/multimedia/windows/qwindowsvideodevices.cpp228
-rw-r--r--src/plugins/multimedia/windows/qwindowsvideodevices_p.h44
-rw-r--r--src/plugins/multimedia/windows/sourceresolver.cpp294
-rw-r--r--src/plugins/multimedia/windows/sourceresolver_p.h83
-rw-r--r--src/plugins/multimedia/windows/windows.json3
412 files changed, 78179 insertions, 0 deletions
diff --git a/src/plugins/multimedia/CMakeLists.txt b/src/plugins/multimedia/CMakeLists.txt
new file mode 100644
index 000000000..5bc39c1f8
--- /dev/null
+++ b/src/plugins/multimedia/CMakeLists.txt
@@ -0,0 +1,24 @@
+# Copyright (C) 2022 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+if(QT_FEATURE_ffmpeg)
+ add_subdirectory(ffmpeg)
+endif()
+if(QT_FEATURE_gstreamer)
+ add_subdirectory(gstreamer)
+endif()
+if(ANDROID)
+ add_subdirectory(android)
+endif()
+if(WASM)
+ add_subdirectory(wasm)
+endif()
+if(APPLE AND NOT WATCHOS)
+ add_subdirectory(darwin)
+endif()
+if(QT_FEATURE_wmf)
+ add_subdirectory(windows)
+endif()
+if(QT_FEATURE_mmrenderer)
+ add_subdirectory(qnx)
+endif()
diff --git a/src/plugins/multimedia/android/CMakeLists.txt b/src/plugins/multimedia/android/CMakeLists.txt
new file mode 100644
index 000000000..31a94ff4f
--- /dev/null
+++ b/src/plugins/multimedia/android/CMakeLists.txt
@@ -0,0 +1,62 @@
+# Copyright (C) 2022 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+qt_internal_add_plugin(QAndroidMediaPlugin
+ OUTPUT_NAME androidmediaplugin
+ PLUGIN_TYPE multimedia
+ SOURCES
+ common/qandroidaudiooutput_p.h
+ common/qandroidaudioinput.cpp common/qandroidaudioinput_p.h
+ audio/qandroidaudiodecoder.cpp audio/qandroidaudiodecoder_p.h
+ common/qandroidglobal_p.h
+ common/qandroidmultimediautils.cpp common/qandroidmultimediautils_p.h
+ common/qandroidvideosink.cpp common/qandroidvideosink_p.h
+ common/qandroidvideooutput.cpp common/qandroidvideooutput_p.h
+ mediacapture/qandroidcamera.cpp mediacapture/qandroidcamera_p.h
+ mediacapture/qandroidimagecapture.cpp mediacapture/qandroidimagecapture_p.h
+ mediacapture/qandroidcamerasession.cpp mediacapture/qandroidcamerasession_p.h
+ mediacapture/qandroidmediacapturesession.cpp mediacapture/qandroidmediacapturesession_p.h
+ mediacapture/qandroidcapturesession.cpp mediacapture/qandroidcapturesession_p.h
+ mediacapture/qandroidmediaencoder.cpp mediacapture/qandroidmediaencoder_p.h
+ mediaplayer/qandroidmediaplayer.cpp mediaplayer/qandroidmediaplayer_p.h
+ mediaplayer/qandroidmetadata.cpp mediaplayer/qandroidmetadata_p.h
+ qandroidformatsinfo.cpp qandroidformatsinfo_p.h
+ qandroidintegration.cpp qandroidintegration_p.h
+ wrappers/jni/androidcamera.cpp wrappers/jni/androidcamera_p.h
+ wrappers/jni/androidmediametadataretriever.cpp wrappers/jni/androidmediametadataretriever_p.h
+ wrappers/jni/androidmediaplayer.cpp wrappers/jni/androidmediaplayer_p.h
+ wrappers/jni/androidmediarecorder.cpp wrappers/jni/androidmediarecorder_p.h
+ wrappers/jni/androidmultimediautils.cpp wrappers/jni/androidmultimediautils_p.h
+ wrappers/jni/androidsurfacetexture.cpp wrappers/jni/androidsurfacetexture_p.h
+ wrappers/jni/androidsurfaceview.cpp wrappers/jni/androidsurfaceview_p.h
+ NO_UNITY_BUILD_SOURCES
+ # Resolves two problems:
+ # - Collision of `rwLock` with wrappers/jni/androidmediaplayer.cpp
+ # - and redefinition of `notifyFrameAvailable` with different signature
+ # with wrappers/jni/androidsurfacetexture.cpp
+ wrappers/jni/androidcamera.cpp
+ INCLUDE_DIRECTORIES
+ audio
+ common
+ mediacapture
+ mediaplayer
+ wrappers/jni
+ ../android
+ LIBRARIES
+ Qt::MultimediaPrivate
+ Qt::CorePrivate
+ OpenSLES
+ mediandk
+)
+
+set_property(TARGET QAndroidMediaPlugin APPEND PROPERTY QT_ANDROID_BUNDLED_JAR_DEPENDENCIES
+ jar/Qt${QtMultimedia_VERSION_MAJOR}AndroidMultimedia.jar:org.qtproject.qt.android.multimedia.QtMultimediaUtils
+)
+set_property(TARGET QAndroidMediaPlugin APPEND PROPERTY QT_ANDROID_LIB_DEPENDENCIES
+ ${INSTALL_PLUGINSDIR}/multimedia/libplugins_multimedia_androidmediaplugin.so
+)
+set_property(TARGET QAndroidMediaPlugin APPEND PROPERTY QT_ANDROID_PERMISSIONS
+ android.permission.CAMERA android.permission.RECORD_AUDIO
+ android.permission.BLUETOOTH
+ android.permission.MODIFY_AUDIO_SETTINGS
+)
diff --git a/src/plugins/multimedia/android/android.json b/src/plugins/multimedia/android/android.json
new file mode 100644
index 000000000..6843bd330
--- /dev/null
+++ b/src/plugins/multimedia/android/android.json
@@ -0,0 +1,3 @@
+{
+ "Keys": [ "android" ]
+}
diff --git a/src/plugins/multimedia/android/audio/qandroidaudiodecoder.cpp b/src/plugins/multimedia/android/audio/qandroidaudiodecoder.cpp
new file mode 100644
index 000000000..d200a72b5
--- /dev/null
+++ b/src/plugins/multimedia/android/audio/qandroidaudiodecoder.cpp
@@ -0,0 +1,437 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qandroidaudiodecoder_p.h"
+
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qjniobject.h>
+#include <QtCore/qjnienvironment.h>
+#include <QtCore/private/qandroidextras_p.h>
+#include <qloggingcategory.h>
+#include <QTimer>
+#include <QFile>
+#include <QDir>
+
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <unistd.h>
+
+QT_BEGIN_NAMESPACE
+
+static const char tempFile[] = "encoded.wav";
+constexpr int dequeueTimeout = 5000;
+static Q_LOGGING_CATEGORY(adLogger, "QAndroidAudioDecoder")
+
+Decoder::Decoder()
+ : m_format(AMediaFormat_new())
+{}
+
+Decoder::~Decoder()
+{
+ if (m_codec) {
+ AMediaCodec_delete(m_codec);
+ m_codec = nullptr;
+ }
+
+ if (m_extractor) {
+ AMediaExtractor_delete(m_extractor);
+ m_extractor = nullptr;
+ }
+
+ if (m_format) {
+ AMediaFormat_delete(m_format);
+ m_format = nullptr;
+ }
+}
+
+void Decoder::stop()
+{
+ if (!m_codec)
+ return;
+
+ const media_status_t err = AMediaCodec_stop(m_codec);
+ if (err != AMEDIA_OK)
+ qCWarning(adLogger) << "stop() error: " << err;
+}
+
+void Decoder::setSource(const QUrl &source)
+{
+ const QJniObject path = QJniObject::callStaticObjectMethod(
+ "org/qtproject/qt/android/multimedia/QtMultimediaUtils",
+ "getMimeType",
+ "(Landroid/content/Context;Ljava/lang/String;)Ljava/lang/String;",
+ QNativeInterface::QAndroidApplication::context().object(),
+ QJniObject::fromString(source.path()).object());
+
+ const QString mime = path.isValid() ? path.toString() : "";
+
+ if (!mime.isEmpty() && !mime.contains("audio", Qt::CaseInsensitive)) {
+ m_formatError = tr("Cannot set source, invalid mime type for the source provided.");
+ return;
+ }
+
+ if (!m_extractor)
+ m_extractor = AMediaExtractor_new();
+
+ QFile file(source.path());
+ if (!file.open(QFile::ReadOnly)) {
+ emit error(QAudioDecoder::ResourceError, tr("Cannot open the file"));
+ return;
+ }
+
+ const int fd = file.handle();
+
+ if (fd < 0) {
+ emit error(QAudioDecoder::ResourceError, tr("Invalid fileDescriptor for source."));
+ return;
+ }
+ const int size = file.size();
+ media_status_t status = AMediaExtractor_setDataSourceFd(m_extractor, fd, 0,
+ size > 0 ? size : LONG_MAX);
+ close(fd);
+
+ if (status != AMEDIA_OK) {
+ if (m_extractor) {
+ AMediaExtractor_delete(m_extractor);
+ m_extractor = nullptr;
+ }
+ m_formatError = tr("Setting source for Audio Decoder failed.");
+ }
+}
+
+void Decoder::createDecoder()
+{
+ // get encoded format for decoder
+ m_format = AMediaExtractor_getTrackFormat(m_extractor, 0);
+
+ const char *mime;
+ if (!AMediaFormat_getString(m_format, AMEDIAFORMAT_KEY_MIME, &mime)) {
+ if (m_extractor) {
+ AMediaExtractor_delete(m_extractor);
+ m_extractor = nullptr;
+ }
+ emit error(QAudioDecoder::FormatError, tr("Format not supported by Audio Decoder."));
+
+ return;
+ }
+
+ // get audio duration from source
+ int64_t durationUs;
+ AMediaFormat_getInt64(m_format, AMEDIAFORMAT_KEY_DURATION, &durationUs);
+ emit durationChanged(durationUs / 1000);
+
+ // set default output audio format from input file
+ if (!m_outputFormat.isValid()) {
+ int32_t sampleRate;
+ AMediaFormat_getInt32(m_format, AMEDIAFORMAT_KEY_SAMPLE_RATE, &sampleRate);
+ m_outputFormat.setSampleRate(sampleRate);
+ int32_t channelCount;
+ AMediaFormat_getInt32(m_format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &channelCount);
+ m_outputFormat.setChannelCount(channelCount);
+ m_outputFormat.setSampleFormat(QAudioFormat::Int16);
+ }
+
+ m_codec = AMediaCodec_createDecoderByType(mime);
+}
+
+void Decoder::doDecode()
+{
+ if (!m_formatError.isEmpty()) {
+ emit error(QAudioDecoder::FormatError, m_formatError);
+ return;
+ }
+
+ if (!m_extractor) {
+ emit error(QAudioDecoder::ResourceError, tr("Cannot decode, source not set."));
+ return;
+ }
+
+ createDecoder();
+
+ if (!m_codec) {
+ emit error(QAudioDecoder::ResourceError, tr("Audio Decoder could not be created."));
+ return;
+ }
+
+ media_status_t status = AMediaCodec_configure(m_codec, m_format, nullptr /* surface */,
+ nullptr /* crypto */, 0);
+
+ if (status != AMEDIA_OK) {
+ emit error(QAudioDecoder::ResourceError, tr("Audio Decoder failed configuration."));
+ return;
+ }
+
+ status = AMediaCodec_start(m_codec);
+ if (status != AMEDIA_OK) {
+ emit error(QAudioDecoder::ResourceError, tr("Audio Decoder failed to start."));
+ return;
+ }
+
+ AMediaExtractor_selectTrack(m_extractor, 0);
+
+ emit decodingChanged(true);
+ m_inputEOS = false;
+ while (!m_inputEOS) {
+ // handle input buffer
+ const ssize_t bufferIdx = AMediaCodec_dequeueInputBuffer(m_codec, dequeueTimeout);
+
+ if (bufferIdx >= 0) {
+ size_t bufferSize = {};
+ uint8_t *buffer = AMediaCodec_getInputBuffer(m_codec, bufferIdx, &bufferSize);
+ const int sample = AMediaExtractor_readSampleData(m_extractor, buffer, bufferSize);
+ if (sample < 0) {
+ m_inputEOS = true;
+ break;
+ }
+
+ const int64_t presentationTimeUs = AMediaExtractor_getSampleTime(m_extractor);
+ AMediaCodec_queueInputBuffer(m_codec, bufferIdx, 0, sample, presentationTimeUs,
+ m_inputEOS ? AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM : 0);
+ AMediaExtractor_advance(m_extractor);
+
+ // handle output buffer
+ AMediaCodecBufferInfo info;
+ ssize_t idx = AMediaCodec_dequeueOutputBuffer(m_codec, &info, dequeueTimeout);
+
+ while (idx == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED
+ || idx == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED) {
+ if (idx == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED)
+ qCWarning(adLogger) << "dequeueOutputBuffer() status: outputFormat changed";
+
+ idx = AMediaCodec_dequeueOutputBuffer(m_codec, &info, dequeueTimeout);
+ }
+
+ if (idx >= 0) {
+ if (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM)
+ break;
+
+ if (info.size > 0) {
+ size_t bufferSize;
+ const uint8_t *bufferData = AMediaCodec_getOutputBuffer(m_codec, idx,
+ &bufferSize);
+ const QByteArray data((const char*)(bufferData + info.offset), info.size);
+ auto audioBuffer = QAudioBuffer(data, m_outputFormat, presentationTimeUs);
+ if (presentationTimeUs >= 0)
+ emit positionChanged(std::move(audioBuffer), presentationTimeUs / 1000);
+
+ AMediaCodec_releaseOutputBuffer(m_codec, idx, false);
+ }
+ } else if (idx == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
+ qCWarning(adLogger) << "dequeueOutputBuffer() status: try again later";
+ break;
+ } else {
+ qCWarning(adLogger) <<
+ "AMediaCodec_dequeueOutputBuffer() status: invalid buffer idx " << idx;
+ }
+ } else {
+ qCWarning(adLogger) << "dequeueInputBuffer() status: invalid buffer idx " << bufferIdx;
+ }
+ }
+ emit finished();
+}
+
+QAndroidAudioDecoder::QAndroidAudioDecoder(QAudioDecoder *parent)
+ : QPlatformAudioDecoder(parent),
+ m_decoder(new Decoder())
+{
+ connect(m_decoder, &Decoder::positionChanged, this, &QAndroidAudioDecoder::positionChanged);
+ connect(m_decoder, &Decoder::durationChanged, this, &QAndroidAudioDecoder::durationChanged);
+ connect(m_decoder, &Decoder::error, this, &QAndroidAudioDecoder::error);
+ connect(m_decoder, &Decoder::finished, this, &QAndroidAudioDecoder::finished);
+ connect(m_decoder, &Decoder::decodingChanged, this, &QPlatformAudioDecoder::setIsDecoding);
+ connect(this, &QAndroidAudioDecoder::setSourceUrl, m_decoder, & Decoder::setSource);
+}
+
+QAndroidAudioDecoder::~QAndroidAudioDecoder()
+{
+ m_decoder->thread()->quit();
+ m_decoder->thread()->wait();
+ delete m_threadDecoder;
+ delete m_decoder;
+}
+
+void QAndroidAudioDecoder::setSource(const QUrl &fileName)
+{
+ if (!requestPermissions())
+ return;
+
+ if (isDecoding())
+ return;
+
+ m_device = nullptr;
+ error(QAudioDecoder::NoError, QStringLiteral(""));
+
+ if (m_source != fileName) {
+ m_source = fileName;
+ emit setSourceUrl(m_source);
+ sourceChanged();
+ }
+}
+
+void QAndroidAudioDecoder::setSourceDevice(QIODevice *device)
+{
+ if (isDecoding())
+ return;
+
+ m_source.clear();
+ if (m_device != device) {
+ m_device = device;
+
+ if (!requestPermissions())
+ return;
+
+ sourceChanged();
+ }
+}
+
+void QAndroidAudioDecoder::start()
+{
+ if (isDecoding())
+ return;
+
+ m_position = -1;
+
+ if (m_device && (!m_device->isOpen() || !m_device->isReadable())) {
+ emit error(QAudioDecoder::ResourceError,
+ QString::fromUtf8("Unable to read from the specified device"));
+ return;
+ }
+
+ if (!m_threadDecoder) {
+ m_threadDecoder = new QThread(this);
+ m_decoder->moveToThread(m_threadDecoder);
+ m_threadDecoder->start();
+ }
+
+ decode();
+}
+
+void QAndroidAudioDecoder::stop()
+{
+ if (!isDecoding() && m_position < 0 && m_duration < 0)
+ return;
+
+ m_decoder->stop();
+ m_audioBuffer.clear();
+ m_position = -1;
+ m_duration = -1;
+ setIsDecoding(false);
+
+ emit bufferAvailableChanged(false);
+ emit QPlatformAudioDecoder::positionChanged(m_position);
+}
+
+QAudioBuffer QAndroidAudioDecoder::read()
+{
+ if (!m_audioBuffer.isEmpty()) {
+ QPair<QAudioBuffer, int> buffer = m_audioBuffer.takeFirst();
+ m_position = buffer.second;
+ emit QPlatformAudioDecoder::positionChanged(buffer.second);
+ return buffer.first;
+ }
+
+ // no buffers available
+ return {};
+}
+
+bool QAndroidAudioDecoder::bufferAvailable() const
+{
+ return m_audioBuffer.size() > 0;
+}
+
+qint64 QAndroidAudioDecoder::position() const
+{
+ return m_position;
+}
+
+qint64 QAndroidAudioDecoder::duration() const
+{
+ return m_duration;
+}
+
+void QAndroidAudioDecoder::positionChanged(QAudioBuffer audioBuffer, qint64 position)
+{
+ m_audioBuffer.append(QPair<QAudioBuffer, int>(audioBuffer, position));
+ m_position = position;
+ emit bufferReady();
+}
+
+void QAndroidAudioDecoder::durationChanged(qint64 duration)
+{
+ m_duration = duration;
+ emit QPlatformAudioDecoder::durationChanged(duration);
+}
+
+void QAndroidAudioDecoder::error(const QAudioDecoder::Error err, const QString &errorString)
+{
+ stop();
+ emit QPlatformAudioDecoder::error(err, errorString);
+}
+
+void QAndroidAudioDecoder::finished()
+{
+ emit bufferAvailableChanged(m_audioBuffer.size() > 0);
+
+ if (m_duration != -1)
+ emit durationChanged(m_duration);
+
+ // remove temp file when decoding is finished
+ QFile(QString(QDir::tempPath()).append(QString::fromUtf8(tempFile))).remove();
+ emit QPlatformAudioDecoder::finished();
+}
+
+bool QAndroidAudioDecoder::requestPermissions()
+{
+ const auto writeRes = QtAndroidPrivate::requestPermission(QStringLiteral("android.permission.WRITE_EXTERNAL_STORAGE"));
+ if (writeRes.result() == QtAndroidPrivate::Authorized)
+ return true;
+
+ return false;
+}
+
+void QAndroidAudioDecoder::decode()
+{
+ if (m_device) {
+ connect(m_device, &QIODevice::readyRead, this, &QAndroidAudioDecoder::readDevice);
+ if (m_device->bytesAvailable())
+ readDevice();
+ } else {
+ QTimer::singleShot(0, m_decoder, &Decoder::doDecode);
+ }
+}
+
+bool QAndroidAudioDecoder::createTempFile()
+{
+ QFile file = QFile(QDir::tempPath().append(QString::fromUtf8(tempFile)), this);
+
+ bool success = file.open(QIODevice::QIODevice::ReadWrite);
+ if (!success)
+ emit error(QAudioDecoder::ResourceError, tr("Error opening temporary file: %1").arg(file.errorString()));
+
+ success &= (file.write(m_deviceBuffer) == m_deviceBuffer.size());
+ if (!success)
+ emit error(QAudioDecoder::ResourceError, tr("Error while writing data to temporary file"));
+
+ file.close();
+ m_deviceBuffer.clear();
+ if (success)
+ m_decoder->setSource(file.fileName());
+
+ return success;
+}
+
+void QAndroidAudioDecoder::readDevice() {
+ m_deviceBuffer.append(m_device->readAll());
+ if (m_device->atEnd()) {
+ disconnect(m_device, &QIODevice::readyRead, this, &QAndroidAudioDecoder::readDevice);
+ if (!createTempFile()) {
+ m_deviceBuffer.clear();
+ stop();
+ return;
+ }
+ QTimer::singleShot(0, m_decoder, &Decoder::doDecode);
+ }
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qandroidaudiodecoder_p.cpp"
diff --git a/src/plugins/multimedia/android/audio/qandroidaudiodecoder_p.h b/src/plugins/multimedia/android/audio/qandroidaudiodecoder_p.h
new file mode 100644
index 000000000..65a0f1855
--- /dev/null
+++ b/src/plugins/multimedia/android/audio/qandroidaudiodecoder_p.h
@@ -0,0 +1,118 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDAUDIODECODER_P_H
+#define QANDROIDAUDIODECODER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+#include "private/qplatformaudiodecoder_p.h"
+
+#include <QtCore/qurl.h>
+#include <QThread>
+
+#include "media/NdkMediaCodec.h"
+#include "media/NdkMediaExtractor.h"
+#include "media/NdkMediaFormat.h"
+#include "media/NdkMediaError.h"
+
+
+QT_USE_NAMESPACE
+
+class Decoder : public QObject
+{
+ Q_OBJECT
+public:
+ Decoder();
+ ~Decoder();
+
+public slots:
+ void setSource(const QUrl &source);
+ void doDecode();
+ void stop();
+
+signals:
+ void positionChanged(const QAudioBuffer &buffer, qint64 position);
+ void durationChanged(const qint64 duration);
+ void error(const QAudioDecoder::Error error, const QString &errorString);
+ void finished();
+ void decodingChanged(bool decoding);
+
+private:
+ void createDecoder();
+
+ AMediaCodec *m_codec = nullptr;
+ AMediaExtractor *m_extractor = nullptr;
+ AMediaFormat *m_format = nullptr;
+
+ QAudioFormat m_outputFormat;
+ QString m_formatError;
+ bool m_inputEOS;
+};
+
+
+class QAndroidAudioDecoder : public QPlatformAudioDecoder
+{
+ Q_OBJECT
+public:
+ QAndroidAudioDecoder(QAudioDecoder *parent);
+ virtual ~QAndroidAudioDecoder();
+
+ QUrl source() const override { return m_source; }
+ void setSource(const QUrl &fileName) override;
+
+ QIODevice *sourceDevice() const override { return m_device; }
+ void setSourceDevice(QIODevice *device) override;
+
+ void start() override;
+ void stop() override;
+
+ QAudioFormat audioFormat() const override { return {}; }
+ void setAudioFormat(const QAudioFormat &/*format*/) override {}
+
+ QAudioBuffer read() override;
+ bool bufferAvailable() const override;
+
+ qint64 position() const override;
+ qint64 duration() const override;
+
+signals:
+ void setSourceUrl(const QUrl &source);
+
+private slots:
+ void positionChanged(QAudioBuffer audioBuffer, qint64 position);
+ void durationChanged(qint64 duration);
+ void error(const QAudioDecoder::Error error, const QString &errorString);
+ void readDevice();
+ void finished();
+
+private:
+ bool requestPermissions();
+ bool createTempFile();
+ void decode();
+
+ QIODevice *m_device = nullptr;
+ Decoder *m_decoder;
+
+ QList<QPair<QAudioBuffer, int>> m_audioBuffer;
+ QUrl m_source;
+
+ qint64 m_position = -1;
+ qint64 m_duration = -1;
+
+ QByteArray m_deviceBuffer;
+
+ QThread *m_threadDecoder = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDAUDIODECODER_P_H
diff --git a/src/plugins/multimedia/android/common/qandroidaudioinput.cpp b/src/plugins/multimedia/android/common/qandroidaudioinput.cpp
new file mode 100644
index 000000000..a1eb9258b
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidaudioinput.cpp
@@ -0,0 +1,47 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidaudioinput_p.h"
+
+#include <qaudioinput.h>
+
+#include <QtCore/qjniobject.h>
+
+QT_BEGIN_NAMESPACE
+
+QAndroidAudioInput::QAndroidAudioInput(QAudioInput *parent)
+ : QObject(parent),
+ QPlatformAudioInput(parent)
+{
+ m_muted = isMuted();
+}
+
+QAndroidAudioInput::~QAndroidAudioInput()
+{
+ setMuted(m_muted);
+}
+
+void QAndroidAudioInput::setMuted(bool muted)
+{
+ bool isInputMuted = isMuted();
+ if (muted != isInputMuted) {
+ QJniObject::callStaticMethod<void>(
+ "org/qtproject/qt/android/multimedia/QtAudioDeviceManager",
+ "setInputMuted",
+ "(Z)V",
+ muted);
+ emit mutedChanged(muted);
+ }
+}
+
+bool QAndroidAudioInput::isMuted() const
+{
+ return QJniObject::callStaticMethod<jboolean>(
+ "org/qtproject/qt/android/multimedia/QtAudioDeviceManager",
+ "isMicrophoneMute",
+ "()Z");
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qandroidaudioinput_p.cpp"
diff --git a/src/plugins/multimedia/android/common/qandroidaudioinput_p.h b/src/plugins/multimedia/android/common/qandroidaudioinput_p.h
new file mode 100644
index 000000000..ef59da8ec
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidaudioinput_p.h
@@ -0,0 +1,47 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDAUDIOINPUT_H
+#define QANDROIDAUDIOINPUT_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qobject.h>
+
+#include <private/qtmultimediaglobal_p.h>
+#include <private/qplatformaudioinput_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class Q_MULTIMEDIA_EXPORT QAndroidAudioInput : public QObject, public QPlatformAudioInput
+{
+ Q_OBJECT
+
+public:
+ explicit QAndroidAudioInput(QAudioInput *parent);
+ ~QAndroidAudioInput();
+
+ void setMuted(bool muted) override;
+
+ bool isMuted() const;
+
+Q_SIGNALS:
+ void mutedChanged(bool muted);
+
+private:
+ bool m_muted = false;
+
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/android/common/qandroidaudiooutput_p.h b/src/plugins/multimedia/android/common/qandroidaudiooutput_p.h
new file mode 100644
index 000000000..d5d25b458
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidaudiooutput_p.h
@@ -0,0 +1,30 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QANDROIDAUDIOOUTPUT_H
+#define QANDROIDAUDIOOUTPUT_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformaudiooutput_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class Q_MULTIMEDIA_EXPORT QAndroidAudioOutput : public QPlatformAudioOutput
+{
+public:
+ QAndroidAudioOutput(QAudioOutput *qq) : QPlatformAudioOutput(qq) {}
+};
+
+QT_END_NAMESPACE
+
+
+#endif // QANDROIDAUDIOOUTPUT_H
diff --git a/src/plugins/multimedia/android/common/qandroidglobal_p.h b/src/plugins/multimedia/android/common/qandroidglobal_p.h
new file mode 100644
index 000000000..1022fa061
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidglobal_p.h
@@ -0,0 +1,28 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDGLOBAL_H
+#define QANDROIDGLOBAL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qtmultimediaglobal_p.h>
+#include <QtCore/qglobal.h>
+#include <QtCore/qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+Q_DECLARE_LOGGING_CATEGORY(qtAndroidMediaPlugin)
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDGLOBAL_H
diff --git a/src/plugins/multimedia/android/common/qandroidmultimediautils.cpp b/src/plugins/multimedia/android/common/qandroidmultimediautils.cpp
new file mode 100644
index 000000000..6e4b95fe9
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidmultimediautils.cpp
@@ -0,0 +1,125 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidmultimediautils_p.h"
+#include "qandroidglobal_p.h"
+
+#include <qlist.h>
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qpermissions.h>
+#include <QtCore/private/qandroidextras_p.h>
+
+QT_BEGIN_NAMESPACE
+
+int qt_findClosestValue(const QList<int> &list, int value)
+{
+ if (list.size() < 2)
+ return 0;
+
+ int begin = 0;
+ int end = list.size() - 1;
+ int pivot = begin + (end - begin) / 2;
+ int v = list.at(pivot);
+
+ while (end - begin > 1) {
+ if (value == v)
+ return pivot;
+
+ if (value > v)
+ begin = pivot;
+ else
+ end = pivot;
+
+ pivot = begin + (end - begin) / 2;
+ v = list.at(pivot);
+ }
+
+ return value - v >= list.at(pivot + 1) - value ? pivot + 1 : pivot;
+}
+
+bool qt_sizeLessThan(const QSize &s1, const QSize &s2)
+{
+ return s1.width() * s1.height() < s2.width() * s2.height();
+}
+
+QVideoFrameFormat::PixelFormat qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat f)
+{
+ switch (f) {
+ case AndroidCamera::NV21:
+ return QVideoFrameFormat::Format_NV21;
+ case AndroidCamera::YV12:
+ return QVideoFrameFormat::Format_YV12;
+ case AndroidCamera::YUY2:
+ return QVideoFrameFormat::Format_YUYV;
+ case AndroidCamera::JPEG:
+ return QVideoFrameFormat::Format_Jpeg;
+ default:
+ return QVideoFrameFormat::Format_Invalid;
+ }
+}
+
+AndroidCamera::ImageFormat qt_androidImageFormatFromPixelFormat(QVideoFrameFormat::PixelFormat f)
+{
+ switch (f) {
+ case QVideoFrameFormat::Format_NV21:
+ return AndroidCamera::NV21;
+ case QVideoFrameFormat::Format_YV12:
+ return AndroidCamera::YV12;
+ case QVideoFrameFormat::Format_YUYV:
+ return AndroidCamera::YUY2;
+ case QVideoFrameFormat::Format_Jpeg:
+ return AndroidCamera::JPEG;
+ default:
+ return AndroidCamera::UnknownImageFormat;
+ }
+}
+
+static bool androidRequestPermission(const QString &permission)
+{
+ if (QNativeInterface::QAndroidApplication::sdkVersion() < 23)
+ return true;
+
+ // Permission already granted?
+ if (QtAndroidPrivate::checkPermission(permission).result() == QtAndroidPrivate::Authorized)
+ return true;
+
+ if (QtAndroidPrivate::requestPermission(permission).result() != QtAndroidPrivate::Authorized)
+ return false;
+
+ return true;
+}
+
+static bool androidCheckPermission(const QPermission &permission)
+{
+ return qApp->checkPermission(permission) == Qt::PermissionStatus::Granted;
+}
+
+bool qt_androidCheckCameraPermission()
+{
+ const QCameraPermission permission;
+ const auto granted = androidCheckPermission(permission);
+ if (!granted)
+ qCDebug(qtAndroidMediaPlugin, "Camera permission not granted!");
+ return granted;
+}
+
+bool qt_androidCheckMicrophonePermission()
+{
+ const QMicrophonePermission permission;
+ const auto granted = androidCheckPermission(permission);
+ if (!granted)
+ qCDebug(qtAndroidMediaPlugin, "Microphone permission not granted!");
+ return granted;
+}
+
+bool qt_androidRequestWriteStoragePermission()
+{
+ if (!androidRequestPermission(QStringLiteral("android.permission.WRITE_EXTERNAL_STORAGE"))) {
+ qCDebug(qtAndroidMediaPlugin, "Storage permission denied by user!");
+ return false;
+ }
+
+ return true;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/android/common/qandroidmultimediautils_p.h b/src/plugins/multimedia/android/common/qandroidmultimediautils_p.h
new file mode 100644
index 000000000..5fe841e8c
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidmultimediautils_p.h
@@ -0,0 +1,40 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDMULTIMEDIAUTILS_H
+#define QANDROIDMULTIMEDIAUTILS_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qglobal.h>
+#include <qsize.h>
+#include "androidcamera_p.h"
+
+QT_BEGIN_NAMESPACE
+
+// return the index of the closest value to <value> in <list>
+// (binary search)
+int qt_findClosestValue(const QList<int> &list, int value);
+
+bool qt_sizeLessThan(const QSize &s1, const QSize &s2);
+
+QVideoFrameFormat::PixelFormat qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat f);
+AndroidCamera::ImageFormat qt_androidImageFormatFromPixelFormat(QVideoFrameFormat::PixelFormat f);
+
+bool qt_androidRequestWriteStoragePermission();
+
+bool qt_androidCheckCameraPermission();
+bool qt_androidCheckMicrophonePermission();
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDMULTIMEDIAUTILS_H
diff --git a/src/plugins/multimedia/android/common/qandroidvideooutput.cpp b/src/plugins/multimedia/android/common/qandroidvideooutput.cpp
new file mode 100644
index 000000000..0724a8359
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidvideooutput.cpp
@@ -0,0 +1,468 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidvideooutput_p.h"
+#include "androidsurfacetexture_p.h"
+
+#include <rhi/qrhi.h>
+#include <QtGui/private/qopenglextensions_p.h>
+#include <private/qhwvideobuffer_p.h>
+#include <private/qvideoframeconverter_p.h>
+#include <private/qplatformvideosink_p.h>
+#include <private/qvideoframe_p.h>
+#include <qvideosink.h>
+#include <qopenglcontext.h>
+#include <qopenglfunctions.h>
+#include <qvideoframeformat.h>
+#include <qthread.h>
+#include <qfile.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidVideoFrameTextures : public QVideoFrameTextures
+{
+public:
+ QAndroidVideoFrameTextures(QRhi *rhi, QSize size, quint64 handle)
+ {
+ m_tex.reset(rhi->newTexture(QRhiTexture::RGBA8, size, 1));
+ m_tex->createFrom({quint64(handle), 0});
+ }
+
+ QRhiTexture *texture(uint plane) const override
+ {
+ return plane == 0 ? m_tex.get() : nullptr;
+ }
+
+private:
+ std::unique_ptr<QRhiTexture> m_tex;
+};
+
+// QRhiWithThreadGuard keeps QRhi and QThread (that created it) alive to allow proper cleaning
+class QRhiWithThreadGuard : public QObject {
+ Q_OBJECT
+public:
+ QRhiWithThreadGuard(std::shared_ptr<QRhi> r, std::shared_ptr<AndroidTextureThread> t)
+ : m_guardRhi(std::move(r)), m_thread(std::move(t)) {}
+ ~QRhiWithThreadGuard();
+protected:
+ std::shared_ptr<QRhi> m_guardRhi;
+private:
+ std::shared_ptr<AndroidTextureThread> m_thread;
+};
+
+class AndroidTextureVideoBuffer : public QRhiWithThreadGuard, public QHwVideoBuffer
+{
+public:
+ AndroidTextureVideoBuffer(std::shared_ptr<QRhi> rhi,
+ std::shared_ptr<AndroidTextureThread> thread,
+ std::unique_ptr<QRhiTexture> tex, const QSize &size)
+ : QRhiWithThreadGuard(std::move(rhi), std::move(thread)),
+ QHwVideoBuffer(QVideoFrame::RhiTextureHandle, m_guardRhi.get()),
+ m_size(size),
+ m_tex(std::move(tex))
+ {}
+
+ MapData map(QtVideo::MapMode mode) override;
+
+ void unmap() override
+ {
+ m_image = {};
+ m_mapMode = QtVideo::MapMode::NotMapped;
+ }
+
+ std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *rhi) override
+ {
+ return std::make_unique<QAndroidVideoFrameTextures>(rhi, m_size, m_tex->nativeTexture().object);
+ }
+
+private:
+ QSize m_size;
+ std::unique_ptr<QRhiTexture> m_tex;
+ QImage m_image;
+ QtVideo::MapMode m_mapMode = QtVideo::MapMode::NotMapped;
+};
+
+class ImageFromVideoFrameHelper : public QHwVideoBuffer
+{
+public:
+ ImageFromVideoFrameHelper(AndroidTextureVideoBuffer &atvb)
+ : QHwVideoBuffer(QVideoFrame::RhiTextureHandle, atvb.rhi()), m_atvb(atvb)
+ {}
+ std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *rhi) override
+ {
+ return m_atvb.mapTextures(rhi);
+ }
+
+ MapData map(QtVideo::MapMode) override { return {}; }
+ void unmap() override {}
+
+private:
+ AndroidTextureVideoBuffer &m_atvb;
+};
+
+QAbstractVideoBuffer::MapData AndroidTextureVideoBuffer::map(QtVideo::MapMode mode)
+{
+ QAbstractVideoBuffer::MapData mapData;
+
+ if (m_mapMode == QtVideo::MapMode::NotMapped && mode == QtVideo::MapMode::ReadOnly) {
+ m_mapMode = QtVideo::MapMode::ReadOnly;
+ m_image = qImageFromVideoFrame(QVideoFramePrivate::createFrame(
+ std::make_unique<ImageFromVideoFrameHelper>(*this),
+ QVideoFrameFormat(m_size, QVideoFrameFormat::Format_RGBA8888)));
+ mapData.planeCount = 1;
+ mapData.bytesPerLine[0] = m_image.bytesPerLine();
+ mapData.dataSize[0] = static_cast<int>(m_image.sizeInBytes());
+ mapData.data[0] = m_image.bits();
+ }
+
+ return mapData;
+}
+
+static const float g_quad[] = {
+ -1.f, -1.f, 0.f, 0.f,
+ -1.f, 1.f, 0.f, 1.f,
+ 1.f, 1.f, 1.f, 1.f,
+ 1.f, -1.f, 1.f, 0.f
+};
+
+class TextureCopy
+{
+ static QShader getShader(const QString &name)
+ {
+ QFile f(name);
+ if (f.open(QIODevice::ReadOnly))
+ return QShader::fromSerialized(f.readAll());
+ return {};
+ }
+
+public:
+ TextureCopy(QRhi *rhi, QRhiTexture *externalTex)
+ : m_rhi(rhi)
+ {
+ m_vertexBuffer.reset(m_rhi->newBuffer(QRhiBuffer::Immutable, QRhiBuffer::VertexBuffer, sizeof(g_quad)));
+ m_vertexBuffer->create();
+
+ m_uniformBuffer.reset(m_rhi->newBuffer(QRhiBuffer::Dynamic, QRhiBuffer::UniformBuffer, 64 + 64 + 4 + 4));
+ m_uniformBuffer->create();
+
+ m_sampler.reset(m_rhi->newSampler(QRhiSampler::Nearest, QRhiSampler::Nearest, QRhiSampler::None,
+ QRhiSampler::ClampToEdge, QRhiSampler::ClampToEdge));
+ m_sampler->create();
+
+ m_srb.reset(m_rhi->newShaderResourceBindings());
+ m_srb->setBindings({
+ QRhiShaderResourceBinding::uniformBuffer(0, QRhiShaderResourceBinding::VertexStage | QRhiShaderResourceBinding::FragmentStage, m_uniformBuffer.get()),
+ QRhiShaderResourceBinding::sampledTexture(1, QRhiShaderResourceBinding::FragmentStage, externalTex, m_sampler.get())
+ });
+ m_srb->create();
+
+ m_vertexShader = getShader(QStringLiteral(":/qt-project.org/multimedia/shaders/externalsampler.vert.qsb"));
+ Q_ASSERT(m_vertexShader.isValid());
+ m_fragmentShader = getShader(QStringLiteral(":/qt-project.org/multimedia/shaders/externalsampler.frag.qsb"));
+ Q_ASSERT(m_fragmentShader.isValid());
+ }
+
+ std::unique_ptr<QRhiTexture> copyExternalTexture(QSize size, const QMatrix4x4 &externalTexMatrix);
+
+private:
+ QRhi *m_rhi = nullptr;
+ std::unique_ptr<QRhiBuffer> m_vertexBuffer;
+ std::unique_ptr<QRhiBuffer> m_uniformBuffer;
+ std::unique_ptr<QRhiSampler> m_sampler;
+ std::unique_ptr<QRhiShaderResourceBindings> m_srb;
+ QShader m_vertexShader;
+ QShader m_fragmentShader;
+};
+
+static std::unique_ptr<QRhiGraphicsPipeline> newGraphicsPipeline(QRhi *rhi,
+ QRhiShaderResourceBindings *shaderResourceBindings,
+ QRhiRenderPassDescriptor *renderPassDescriptor,
+ QShader vertexShader,
+ QShader fragmentShader)
+{
+ std::unique_ptr<QRhiGraphicsPipeline> gp(rhi->newGraphicsPipeline());
+ gp->setTopology(QRhiGraphicsPipeline::TriangleFan);
+ gp->setShaderStages({
+ { QRhiShaderStage::Vertex, vertexShader },
+ { QRhiShaderStage::Fragment, fragmentShader }
+ });
+ QRhiVertexInputLayout inputLayout;
+ inputLayout.setBindings({
+ { 4 * sizeof(float) }
+ });
+ inputLayout.setAttributes({
+ { 0, 0, QRhiVertexInputAttribute::Float2, 0 },
+ { 0, 1, QRhiVertexInputAttribute::Float2, 2 * sizeof(float) }
+ });
+ gp->setVertexInputLayout(inputLayout);
+ gp->setShaderResourceBindings(shaderResourceBindings);
+ gp->setRenderPassDescriptor(renderPassDescriptor);
+ gp->create();
+
+ return gp;
+}
+
+std::unique_ptr<QRhiTexture> TextureCopy::copyExternalTexture(QSize size, const QMatrix4x4 &externalTexMatrix)
+{
+ std::unique_ptr<QRhiTexture> tex(m_rhi->newTexture(QRhiTexture::RGBA8, size, 1, QRhiTexture::RenderTarget));
+ if (!tex->create()) {
+ qWarning("Failed to create frame texture");
+ return {};
+ }
+
+ std::unique_ptr<QRhiTextureRenderTarget> renderTarget(m_rhi->newTextureRenderTarget({ { tex.get() } }));
+ std::unique_ptr<QRhiRenderPassDescriptor> renderPassDescriptor(renderTarget->newCompatibleRenderPassDescriptor());
+ renderTarget->setRenderPassDescriptor(renderPassDescriptor.get());
+ renderTarget->create();
+
+ QRhiResourceUpdateBatch *rub = m_rhi->nextResourceUpdateBatch();
+ rub->uploadStaticBuffer(m_vertexBuffer.get(), g_quad);
+
+ QMatrix4x4 identity;
+ char *p = m_uniformBuffer->beginFullDynamicBufferUpdateForCurrentFrame();
+ memcpy(p, identity.constData(), 64);
+ memcpy(p + 64, externalTexMatrix.constData(), 64);
+ float opacity = 1.0f;
+ memcpy(p + 64 + 64, &opacity, 4);
+ m_uniformBuffer->endFullDynamicBufferUpdateForCurrentFrame();
+
+ auto graphicsPipeline = newGraphicsPipeline(m_rhi, m_srb.get(), renderPassDescriptor.get(),
+ m_vertexShader, m_fragmentShader);
+
+ const QRhiCommandBuffer::VertexInput vbufBinding(m_vertexBuffer.get(), 0);
+
+ QRhiCommandBuffer *cb = nullptr;
+ if (m_rhi->beginOffscreenFrame(&cb) != QRhi::FrameOpSuccess)
+ return {};
+
+ cb->beginPass(renderTarget.get(), Qt::transparent, { 1.0f, 0 }, rub);
+ cb->setGraphicsPipeline(graphicsPipeline.get());
+ cb->setViewport({0, 0, float(size.width()), float(size.height())});
+ cb->setShaderResources(m_srb.get());
+ cb->setVertexInput(0, 1, &vbufBinding);
+ cb->draw(4);
+ cb->endPass();
+ m_rhi->endOffscreenFrame();
+
+ QOpenGLContext *ctx = QOpenGLContext::currentContext();
+ QOpenGLFunctions *f = ctx->functions();
+ static_cast<QOpenGLExtensions *>(f)->flushShared();
+
+ return tex;
+}
+
+static QMatrix4x4 extTransformMatrix(AndroidSurfaceTexture *surfaceTexture)
+{
+ QMatrix4x4 m = surfaceTexture->getTransformMatrix();
+ // flip it back, see
+ // http://androidxref.com/9.0.0_r3/xref/frameworks/native/libs/gui/GLConsumer.cpp#866
+ // (NB our matrix ctor takes row major)
+ static const QMatrix4x4 flipV(1.0f, 0.0f, 0.0f, 0.0f,
+ 0.0f, -1.0f, 0.0f, 1.0f,
+ 0.0f, 0.0f, 1.0f, 0.0f,
+ 0.0f, 0.0f, 0.0f, 1.0f);
+ m *= flipV;
+ return m;
+}
+
+class AndroidTextureThread : public QThread
+{
+ Q_OBJECT
+public:
+ AndroidTextureThread(QAndroidTextureVideoOutput * vo)
+ : QThread()
+ , m_videoOutput(vo)
+ {
+ }
+
+ ~AndroidTextureThread() {
+ QMetaObject::invokeMethod(this,
+ &AndroidTextureThread::clearSurfaceTexture, Qt::BlockingQueuedConnection);
+ this->quit();
+ this->wait();
+ }
+
+ void start()
+ {
+ QThread::start();
+ moveToThread(this);
+ }
+
+ void initRhi(QOpenGLContext *context)
+ {
+ QRhiGles2InitParams params;
+ params.shareContext = context;
+ params.fallbackSurface = QRhiGles2InitParams::newFallbackSurface();
+ m_rhi.reset(QRhi::create(QRhi::OpenGLES2, &params));
+ }
+
+public slots:
+ void onFrameAvailable(quint64 index)
+ {
+ // Check if 'm_surfaceTexture' is not reset and if the current index is the same that
+ // was used for creating connection because there can be pending frames in queue.
+ if (m_surfaceTexture && m_surfaceTexture->index() == index) {
+ m_surfaceTexture->updateTexImage();
+ auto matrix = extTransformMatrix(m_surfaceTexture.get());
+ auto tex = m_textureCopy->copyExternalTexture(m_size, matrix);
+ auto *buffer = new AndroidTextureVideoBuffer(m_rhi, m_videoOutput->getSurfaceThread(), std::move(tex), m_size);
+ QVideoFrame frame(buffer, QVideoFrameFormat(m_size, QVideoFrameFormat::Format_RGBA8888));
+ emit newFrame(frame);
+ }
+ }
+
+ void clearFrame() { emit newFrame({}); }
+
+ void setFrameSize(QSize size) { m_size = size; }
+
+ void clearSurfaceTexture()
+ {
+ m_surfaceTexture.reset();
+ m_texture.reset();
+ m_textureCopy.reset();
+ m_rhi.reset();
+ }
+
+ AndroidSurfaceTexture *createSurfaceTexture(QRhi *rhi)
+ {
+ if (m_surfaceTexture)
+ return m_surfaceTexture.get();
+
+ QOpenGLContext *ctx = rhi
+ ? static_cast<const QRhiGles2NativeHandles *>(rhi->nativeHandles())->context
+ : nullptr;
+ initRhi(ctx);
+
+ m_texture.reset(m_rhi->newTexture(QRhiTexture::RGBA8, m_size, 1, QRhiTexture::ExternalOES));
+ m_texture->create();
+ m_surfaceTexture = std::make_unique<AndroidSurfaceTexture>(m_texture->nativeTexture().object);
+ if (m_surfaceTexture->surfaceTexture()) {
+ const quint64 index = m_surfaceTexture->index();
+ connect(m_surfaceTexture.get(), &AndroidSurfaceTexture::frameAvailable, this,
+ [this, index] () { this->onFrameAvailable(index); });
+
+ m_textureCopy = std::make_unique<TextureCopy>(m_rhi.get(), m_texture.get());
+
+ } else {
+ m_texture.reset();
+ m_surfaceTexture.reset();
+ }
+
+ return m_surfaceTexture.get();
+ }
+
+signals:
+ void newFrame(const QVideoFrame &);
+
+private:
+ QAndroidTextureVideoOutput * m_videoOutput;
+ std::shared_ptr<QRhi> m_rhi;
+ std::unique_ptr<AndroidSurfaceTexture> m_surfaceTexture;
+ std::unique_ptr<QRhiTexture> m_texture;
+ std::unique_ptr<TextureCopy> m_textureCopy;
+ QSize m_size;
+};
+
+QRhiWithThreadGuard::~QRhiWithThreadGuard() {
+ // It may happen that reseting m_rhi shared_ptr will delete it (if it is the last reference)
+ // QRHI need to be deleted from the thread that created it.
+ QMetaObject::invokeMethod(m_thread.get(), [&]() {m_guardRhi.reset();}, Qt::BlockingQueuedConnection);
+}
+
+QAndroidTextureVideoOutput::QAndroidTextureVideoOutput(QVideoSink *sink, QObject *parent)
+ : QAndroidVideoOutput(parent)
+ , m_sink(sink)
+{
+ if (!m_sink) {
+ qDebug() << "Cannot create QAndroidTextureVideoOutput without a sink.";
+ m_surfaceThread = nullptr;
+ return;
+ }
+
+ startNewSurfaceThread();
+}
+
+void QAndroidTextureVideoOutput::startNewSurfaceThread()
+{
+ m_surfaceThread = std::make_shared<AndroidTextureThread>(this);
+ connect(m_surfaceThread.get(), &AndroidTextureThread::newFrame,
+ this, &QAndroidTextureVideoOutput::newFrame);
+ m_surfaceThread->start();
+}
+
+QAndroidTextureVideoOutput::~QAndroidTextureVideoOutput()
+{
+ // Make sure that no more VideFrames will be created by surfaceThread
+ QMetaObject::invokeMethod(m_surfaceThread.get(),
+ &AndroidTextureThread::clearSurfaceTexture, Qt::BlockingQueuedConnection);
+}
+
+void QAndroidTextureVideoOutput::setSubtitle(const QString &subtitle)
+{
+ if (m_sink) {
+ auto *sink = m_sink->platformVideoSink();
+ if (sink)
+ sink->setSubtitleText(subtitle);
+ }
+}
+
+bool QAndroidTextureVideoOutput::shouldTextureBeUpdated() const
+{
+ return m_sink->rhi() && m_surfaceCreatedWithoutRhi;
+}
+
+AndroidSurfaceTexture *QAndroidTextureVideoOutput::surfaceTexture()
+{
+ if (!m_sink)
+ return nullptr;
+
+ AndroidSurfaceTexture *surface = nullptr;
+ QMetaObject::invokeMethod(m_surfaceThread.get(), [&]() {
+ auto rhi = m_sink->rhi();
+ if (!rhi) {
+ m_surfaceCreatedWithoutRhi = true;
+ }
+ else if (m_surfaceCreatedWithoutRhi) {
+ m_surfaceThread->clearSurfaceTexture();
+ m_surfaceCreatedWithoutRhi = false;
+ }
+ surface = m_surfaceThread->createSurfaceTexture(rhi);
+ },
+ Qt::BlockingQueuedConnection);
+ return surface;
+}
+
+void QAndroidTextureVideoOutput::setVideoSize(const QSize &size)
+{
+ if (m_nativeSize == size)
+ return;
+
+ m_nativeSize = size;
+ QMetaObject::invokeMethod(m_surfaceThread.get(),
+ [&](){ m_surfaceThread->setFrameSize(size); },
+ Qt::BlockingQueuedConnection);
+}
+
+void QAndroidTextureVideoOutput::stop()
+{
+ m_nativeSize = {};
+ QMetaObject::invokeMethod(m_surfaceThread.get(), [&](){ m_surfaceThread->clearFrame(); });
+}
+
+void QAndroidTextureVideoOutput::reset()
+{
+ if (m_sink)
+ m_sink->platformVideoSink()->setVideoFrame({});
+ QMetaObject::invokeMethod(m_surfaceThread.get(), &AndroidTextureThread::clearSurfaceTexture);
+}
+
+void QAndroidTextureVideoOutput::newFrame(const QVideoFrame &frame)
+{
+ if (m_sink)
+ m_sink->setVideoFrame(frame);
+}
+
+QT_END_NAMESPACE
+
+#include "qandroidvideooutput.moc"
+#include "moc_qandroidvideooutput_p.cpp"
diff --git a/src/plugins/multimedia/android/common/qandroidvideooutput_p.h b/src/plugins/multimedia/android/common/qandroidvideooutput_p.h
new file mode 100644
index 000000000..7c9be5aee
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidvideooutput_p.h
@@ -0,0 +1,93 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDVIDEOOUTPUT_H
+#define QANDROIDVIDEOOUTPUT_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qsize.h>
+#include <qmutex.h>
+#include <qreadwritelock.h>
+#include <qabstractvideobuffer.h>
+#include <qmatrix4x4.h>
+#include <qoffscreensurface.h>
+#include <rhi/qrhi.h>
+
+QT_BEGIN_NAMESPACE
+
+class AndroidSurfaceTexture;
+class AndroidSurfaceHolder;
+class QVideoSink;
+
+class QAndroidVideoOutput : public QObject
+{
+ Q_OBJECT
+public:
+ virtual ~QAndroidVideoOutput() { }
+
+ virtual AndroidSurfaceTexture *surfaceTexture() { return 0; }
+ virtual AndroidSurfaceHolder *surfaceHolder() { return 0; }
+
+ virtual bool isReady() { return true; }
+
+ virtual void setVideoSize(const QSize &) { }
+ virtual void start() { }
+ virtual void stop() { }
+ virtual void reset() { }
+ virtual QSize getVideoSize() const { return QSize(0, 0); }
+
+Q_SIGNALS:
+ void readyChanged(bool);
+
+protected:
+ QAndroidVideoOutput(QObject *parent) : QObject(parent) { }
+};
+
+class AndroidTextureThread;
+class QAndroidTextureVideoOutput : public QAndroidVideoOutput
+{
+ Q_OBJECT
+public:
+ explicit QAndroidTextureVideoOutput(QVideoSink *sink, QObject *parent = 0);
+ ~QAndroidTextureVideoOutput() override;
+
+ QVideoSink *surface() const { return m_sink; }
+ bool shouldTextureBeUpdated() const;
+
+ AndroidSurfaceTexture *surfaceTexture() override;
+
+ void setVideoSize(const QSize &) override;
+ void stop() override;
+ void reset() override;
+ QSize getVideoSize() const override { return m_nativeSize; }
+
+ void setSubtitle(const QString &subtitle);
+ std::shared_ptr<AndroidTextureThread> getSurfaceThread() { return m_surfaceThread; }
+private Q_SLOTS:
+ void newFrame(const QVideoFrame &);
+
+private:
+ void startNewSurfaceThread();
+ QVideoSink *m_sink = nullptr;
+ QSize m_nativeSize;
+ bool m_surfaceCreatedWithoutRhi = false;
+
+ std::shared_ptr<AndroidTextureThread> m_surfaceThread;
+};
+
+QT_END_NAMESPACE
+
+Q_DECLARE_METATYPE(QList<QRhiResource *>)
+Q_DECLARE_METATYPE(QRhi*)
+
+#endif // QANDROIDVIDEOOUTPUT_H
diff --git a/src/plugins/multimedia/android/common/qandroidvideosink.cpp b/src/plugins/multimedia/android/common/qandroidvideosink.cpp
new file mode 100644
index 000000000..3da5eab31
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidvideosink.cpp
@@ -0,0 +1,35 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidvideosink_p.h"
+#include <rhi/qrhi.h>
+
+#include <QtCore/qdebug.h>
+
+#include <QtCore/qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+QAndroidVideoSink::QAndroidVideoSink(QVideoSink *parent)
+ : QPlatformVideoSink(parent)
+{
+}
+
+QAndroidVideoSink::~QAndroidVideoSink()
+{
+}
+
+void QAndroidVideoSink::setRhi(QRhi *rhi)
+{
+ if (rhi && rhi->backend() != QRhi::OpenGLES2)
+ rhi = nullptr;
+ if (m_rhi == rhi)
+ return;
+
+ m_rhi = rhi;
+ emit rhiChanged(rhi);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qandroidvideosink_p.cpp"
diff --git a/src/plugins/multimedia/android/common/qandroidvideosink_p.h b/src/plugins/multimedia/android/common/qandroidvideosink_p.h
new file mode 100644
index 000000000..9afc58f65
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidvideosink_p.h
@@ -0,0 +1,41 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDVIDEOSINK_P_H
+#define QANDROIDVIDEOSINK_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qtmultimediaglobal_p.h>
+#include <private/qplatformvideosink_p.h>
+
+#include <qvideosink.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidVideoSink
+ : public QPlatformVideoSink
+{
+ Q_OBJECT
+public:
+ explicit QAndroidVideoSink(QVideoSink *parent = 0);
+ ~QAndroidVideoSink();
+
+ void setRhi(QRhi *rhi) override;
+
+private:
+ QRhi *m_rhi = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcamera.cpp b/src/plugins/multimedia/android/mediacapture/qandroidcamera.cpp
new file mode 100644
index 000000000..52d2e00f6
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcamera.cpp
@@ -0,0 +1,562 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidcamera_p.h"
+#include "qandroidcamerasession_p.h"
+#include "qandroidcapturesession_p.h"
+#include "qandroidmediacapturesession_p.h"
+#include <qmediadevices.h>
+#include <qcameradevice.h>
+#include <qtimer.h>
+#include "qandroidmultimediautils_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QAndroidCamera::QAndroidCamera(QCamera *camera)
+ : QPlatformCamera(camera)
+{
+ Q_ASSERT(camera);
+}
+
+QAndroidCamera::~QAndroidCamera()
+{
+}
+
+void QAndroidCamera::setActive(bool active)
+{
+ if (m_cameraSession) {
+ m_cameraSession->setActive(active);
+ } else {
+ isPendingSetActive = active;
+ }
+}
+
+bool QAndroidCamera::isActive() const
+{
+ return m_cameraSession ? m_cameraSession->isActive() : false;
+}
+
+void QAndroidCamera::setCamera(const QCameraDevice &camera)
+{
+ m_cameraDev = camera;
+
+ if (m_cameraSession) {
+ int id = 0;
+ auto cameras = QMediaDevices::videoInputs();
+ for (int i = 0; i < cameras.size(); ++i) {
+ if (cameras.at(i) == camera) {
+ id = i;
+ break;
+ }
+ }
+ if (id != m_cameraSession->getSelectedCameraId()) {
+ m_cameraSession->setSelectedCameraId(id);
+ reactivateCameraSession();
+ }
+ }
+}
+
+void QAndroidCamera::reactivateCameraSession()
+{
+ if (m_cameraSession->isActive()) {
+ if (m_service->captureSession() &&
+ m_service->captureSession()->state() == QMediaRecorder::RecordingState) {
+ m_service->captureSession()->stop();
+ qWarning() << "Changing camera during recording not supported";
+ }
+ m_cameraSession->setActive(false);
+ m_cameraSession->setActive(true);
+ }
+}
+
+bool QAndroidCamera::setCameraFormat(const QCameraFormat &format)
+{
+ m_cameraFormat = format;
+
+ if (m_cameraSession)
+ m_cameraSession->setCameraFormat(m_cameraFormat);
+
+ return true;
+}
+
+void QAndroidCamera::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ QAndroidMediaCaptureSession *captureSession = static_cast<QAndroidMediaCaptureSession *>(session);
+ if (m_service == captureSession)
+ return;
+
+ m_service = captureSession;
+ if (!m_service) {
+ disconnect(m_cameraSession,nullptr,this,nullptr);
+ m_cameraSession = nullptr;
+ return;
+ }
+
+ m_cameraSession = m_service->cameraSession();
+ Q_ASSERT(m_cameraSession);
+ if (!m_cameraFormat.isNull())
+ m_cameraSession->setCameraFormat(m_cameraFormat);
+
+ setCamera(m_cameraDev);
+
+ connect(m_cameraSession, &QAndroidCameraSession::activeChanged, this, &QAndroidCamera::activeChanged);
+ connect(m_cameraSession, &QAndroidCameraSession::error, this, &QAndroidCamera::error);
+ connect(m_cameraSession, &QAndroidCameraSession::opened, this, &QAndroidCamera::onCameraOpened);
+
+ if (isPendingSetActive) {
+ setActive(true);
+ isPendingSetActive = false;
+ }
+}
+
+void QAndroidCamera::setFocusMode(QCamera::FocusMode mode)
+{
+ if (!m_cameraSession || !m_cameraSession->camera())
+ return;
+
+ if (isFocusModeSupported(mode)) {
+ QString focusMode;
+
+ switch (mode) {
+ case QCamera::FocusModeHyperfocal:
+ focusMode = QLatin1String("edof");
+ break;
+ case QCamera::FocusModeInfinity: // not 100%, but close
+ focusMode = QLatin1String("infinity");
+ break;
+ case QCamera::FocusModeManual:
+ focusMode = QLatin1String("fixed");
+ break;
+ case QCamera::FocusModeAutoNear:
+ focusMode = QLatin1String("macro");
+ break;
+ case QCamera::FocusModeAuto:
+ case QCamera::FocusModeAutoFar:
+ if (1) { // ###?
+ focusMode = QLatin1String("continuous-video");
+ } else {
+ focusMode = QLatin1String("continuous-picture");
+ }
+ break;
+ }
+
+ m_cameraSession->camera()->setFocusMode(focusMode);
+
+ // reset focus position
+ m_cameraSession->camera()->cancelAutoFocus();
+
+ focusModeChanged(mode);
+ }
+}
+
+bool QAndroidCamera::isFocusModeSupported(QCamera::FocusMode mode) const
+{
+ return (m_cameraSession && m_cameraSession->camera()) ? m_supportedFocusModes.contains(mode) : false;
+}
+
+void QAndroidCamera::onCameraOpened()
+{
+ Q_ASSERT(m_cameraSession);
+ connect(m_cameraSession->camera(), &AndroidCamera::previewSizeChanged, this, &QAndroidCamera::setCameraFocusArea);
+
+ m_supportedFocusModes.clear();
+ m_continuousPictureFocusSupported = false;
+ m_continuousVideoFocusSupported = false;
+ m_focusPointSupported = false;
+
+ QStringList focusModes = m_cameraSession->camera()->getSupportedFocusModes();
+ for (int i = 0; i < focusModes.size(); ++i) {
+ const QString &focusMode = focusModes.at(i);
+ if (focusMode == QLatin1String("continuous-picture")) {
+ m_supportedFocusModes << QCamera::FocusModeAuto;
+ m_continuousPictureFocusSupported = true;
+ } else if (focusMode == QLatin1String("continuous-video")) {
+ m_supportedFocusModes << QCamera::FocusModeAuto;
+ m_continuousVideoFocusSupported = true;
+ } else if (focusMode == QLatin1String("edof")) {
+ m_supportedFocusModes << QCamera::FocusModeHyperfocal;
+ } else if (focusMode == QLatin1String("fixed")) {
+ m_supportedFocusModes << QCamera::FocusModeManual;
+ } else if (focusMode == QLatin1String("infinity")) {
+ m_supportedFocusModes << QCamera::FocusModeInfinity;
+ } else if (focusMode == QLatin1String("macro")) {
+ m_supportedFocusModes << QCamera::FocusModeAutoNear;
+ }
+ }
+
+ if (m_cameraSession->camera()->getMaxNumFocusAreas() > 0)
+ m_focusPointSupported = true;
+
+ auto m = focusMode();
+ if (!m_supportedFocusModes.contains(m))
+ m = QCamera::FocusModeAuto;
+
+ setFocusMode(m);
+ setCustomFocusPoint(focusPoint());
+
+ if (m_cameraSession->camera()->isZoomSupported()) {
+ m_zoomRatios = m_cameraSession->camera()->getZoomRatios();
+ qreal maxZoom = m_zoomRatios.last() / qreal(100);
+ maximumZoomFactorChanged(maxZoom);
+ zoomTo(1, -1);
+ } else {
+ m_zoomRatios.clear();
+ maximumZoomFactorChanged(1.0);
+ }
+
+ m_minExposureCompensationIndex = m_cameraSession->camera()->getMinExposureCompensation();
+ m_maxExposureCompensationIndex = m_cameraSession->camera()->getMaxExposureCompensation();
+ m_exposureCompensationStep = m_cameraSession->camera()->getExposureCompensationStep();
+ exposureCompensationRangeChanged(m_minExposureCompensationIndex*m_exposureCompensationStep,
+ m_maxExposureCompensationIndex*m_exposureCompensationStep);
+
+ m_supportedExposureModes.clear();
+ QStringList sceneModes = m_cameraSession->camera()->getSupportedSceneModes();
+ if (!sceneModes.isEmpty()) {
+ for (int i = 0; i < sceneModes.size(); ++i) {
+ const QString &sceneMode = sceneModes.at(i);
+ if (sceneMode == QLatin1String("auto"))
+ m_supportedExposureModes << QCamera::ExposureAuto;
+ else if (sceneMode == QLatin1String("beach"))
+ m_supportedExposureModes << QCamera::ExposureBeach;
+ else if (sceneMode == QLatin1String("night"))
+ m_supportedExposureModes << QCamera::ExposureNight;
+ else if (sceneMode == QLatin1String("portrait"))
+ m_supportedExposureModes << QCamera::ExposurePortrait;
+ else if (sceneMode == QLatin1String("snow"))
+ m_supportedExposureModes << QCamera::ExposureSnow;
+ else if (sceneMode == QLatin1String("sports"))
+ m_supportedExposureModes << QCamera::ExposureSports;
+ else if (sceneMode == QLatin1String("action"))
+ m_supportedExposureModes << QCamera::ExposureAction;
+ else if (sceneMode == QLatin1String("landscape"))
+ m_supportedExposureModes << QCamera::ExposureLandscape;
+ else if (sceneMode == QLatin1String("night-portrait"))
+ m_supportedExposureModes << QCamera::ExposureNightPortrait;
+ else if (sceneMode == QLatin1String("theatre"))
+ m_supportedExposureModes << QCamera::ExposureTheatre;
+ else if (sceneMode == QLatin1String("sunset"))
+ m_supportedExposureModes << QCamera::ExposureSunset;
+ else if (sceneMode == QLatin1String("steadyphoto"))
+ m_supportedExposureModes << QCamera::ExposureSteadyPhoto;
+ else if (sceneMode == QLatin1String("fireworks"))
+ m_supportedExposureModes << QCamera::ExposureFireworks;
+ else if (sceneMode == QLatin1String("party"))
+ m_supportedExposureModes << QCamera::ExposureParty;
+ else if (sceneMode == QLatin1String("candlelight"))
+ m_supportedExposureModes << QCamera::ExposureCandlelight;
+ else if (sceneMode == QLatin1String("barcode"))
+ m_supportedExposureModes << QCamera::ExposureBarcode;
+ }
+ }
+
+ setExposureCompensation(exposureCompensation());
+ setExposureMode(exposureMode());
+
+ isFlashSupported = false;
+ isFlashAutoSupported = false;
+ isTorchSupported = false;
+
+ QStringList flashModes = m_cameraSession->camera()->getSupportedFlashModes();
+ for (int i = 0; i < flashModes.size(); ++i) {
+ const QString &flashMode = flashModes.at(i);
+ if (flashMode == QLatin1String("auto"))
+ isFlashAutoSupported = true;
+ else if (flashMode == QLatin1String("on"))
+ isFlashSupported = true;
+ else if (flashMode == QLatin1String("torch"))
+ isTorchSupported = true;
+ }
+
+ setFlashMode(flashMode());
+
+ m_supportedWhiteBalanceModes.clear();
+ QStringList whiteBalanceModes = m_cameraSession->camera()->getSupportedWhiteBalance();
+ for (int i = 0; i < whiteBalanceModes.size(); ++i) {
+ const QString &wb = whiteBalanceModes.at(i);
+ if (wb == QLatin1String("auto")) {
+ m_supportedWhiteBalanceModes.insert(QCamera::WhiteBalanceAuto,
+ QStringLiteral("auto"));
+ } else if (wb == QLatin1String("cloudy-daylight")) {
+ m_supportedWhiteBalanceModes.insert(QCamera::WhiteBalanceCloudy,
+ QStringLiteral("cloudy-daylight"));
+ } else if (wb == QLatin1String("daylight")) {
+ m_supportedWhiteBalanceModes.insert(QCamera::WhiteBalanceSunlight,
+ QStringLiteral("daylight"));
+ } else if (wb == QLatin1String("fluorescent")) {
+ m_supportedWhiteBalanceModes.insert(QCamera::WhiteBalanceFluorescent,
+ QStringLiteral("fluorescent"));
+ } else if (wb == QLatin1String("incandescent")) {
+ m_supportedWhiteBalanceModes.insert(QCamera::WhiteBalanceTungsten,
+ QStringLiteral("incandescent"));
+ } else if (wb == QLatin1String("shade")) {
+ m_supportedWhiteBalanceModes.insert(QCamera::WhiteBalanceShade,
+ QStringLiteral("shade"));
+ } else if (wb == QLatin1String("twilight")) {
+ m_supportedWhiteBalanceModes.insert(QCamera::WhiteBalanceSunset,
+ QStringLiteral("twilight"));
+ } else if (wb == QLatin1String("warm-fluorescent")) {
+ m_supportedWhiteBalanceModes.insert(QCamera::WhiteBalanceFlash,
+ QStringLiteral("warm-fluorescent"));
+ }
+ }
+
+}
+
+//void QAndroidCameraFocusControl::onCameraCaptureModeChanged()
+//{
+// if (m_cameraSession->camera() && m_focusMode == QCamera::FocusModeAudio) {
+// QString focusMode;
+// if ((m_cameraSession->captureMode().testFlag(QCamera::CaptureVideo) && m_continuousVideoFocusSupported)
+// || !m_continuousPictureFocusSupported) {
+// focusMode = QLatin1String("continuous-video");
+// } else {
+// focusMode = QLatin1String("continuous-picture");
+// }
+// m_cameraSession->camera()->setFocusMode(focusMode);
+// m_cameraSession->camera()->cancelAutoFocus();
+// }
+//}
+
+static QRect adjustedArea(const QRectF &area)
+{
+ // Qt maps focus points in the range (0.0, 0.0) -> (1.0, 1.0)
+ // Android maps focus points in the range (-1000, -1000) -> (1000, 1000)
+ // Converts an area in Qt coordinates to Android coordinates
+ return QRect(-1000 + qRound(area.x() * 2000),
+ -1000 + qRound(area.y() * 2000),
+ qRound(area.width() * 2000),
+ qRound(area.height() * 2000))
+ .intersected(QRect(-1000, -1000, 2000, 2000));
+}
+
+void QAndroidCamera::setCameraFocusArea()
+{
+ if (!m_cameraSession)
+ return;
+
+ QList<QRect> areas;
+ auto focusPoint = customFocusPoint();
+ if (QRectF(0., 0., 1., 1.).contains(focusPoint)) {
+ // in FocusPointAuto mode, leave the area list empty
+ // to let the driver choose the focus point.
+ QSize viewportSize = m_cameraSession->camera()->previewSize();
+
+ if (!viewportSize.isValid())
+ return;
+
+ // Set up a 50x50 pixel focus area around the focal point
+ QSizeF focusSize(50.f / viewportSize.width(), 50.f / viewportSize.height());
+ float x = qBound(qreal(0),
+ focusPoint.x() - (focusSize.width() / 2),
+ 1.f - focusSize.width());
+ float y = qBound(qreal(0),
+ focusPoint.y() - (focusSize.height() / 2),
+ 1.f - focusSize.height());
+
+ QRectF area(QPointF(x, y), focusSize);
+
+ areas.append(adjustedArea(area));
+ }
+ m_cameraSession->camera()->setFocusAreas(areas);
+}
+
+void QAndroidCamera::zoomTo(float factor, float rate)
+{
+ Q_UNUSED(rate);
+
+ if (zoomFactor() == factor)
+ return;
+
+ if (!m_cameraSession || !m_cameraSession->camera())
+ return;
+
+ factor = qBound(qreal(1), factor, maxZoomFactor());
+ int validZoomIndex = qt_findClosestValue(m_zoomRatios, qRound(factor * 100));
+ float newZoom = m_zoomRatios.at(validZoomIndex) / qreal(100);
+ m_cameraSession->camera()->setZoom(validZoomIndex);
+ zoomFactorChanged(newZoom);
+}
+
+void QAndroidCamera::setFlashMode(QCamera::FlashMode mode)
+{
+ if (!m_cameraSession || !m_cameraSession->camera())
+ return;
+
+ if (!isFlashModeSupported(mode))
+ return;
+
+ QString flashMode;
+ if (mode == QCamera::FlashAuto)
+ flashMode = QLatin1String("auto");
+ else if (mode == QCamera::FlashOn)
+ flashMode = QLatin1String("on");
+ else // FlashOff
+ flashMode = QLatin1String("off");
+
+ m_cameraSession->camera()->setFlashMode(flashMode);
+ flashModeChanged(mode);
+}
+
+bool QAndroidCamera::isFlashModeSupported(QCamera::FlashMode mode) const
+{
+ if (!m_cameraSession || !m_cameraSession->camera())
+ return false;
+ switch (mode) {
+ case QCamera::FlashOff:
+ return true;
+ case QCamera::FlashOn:
+ return isFlashSupported;
+ case QCamera::FlashAuto:
+ return isFlashAutoSupported;
+ }
+}
+
+bool QAndroidCamera::isFlashReady() const
+{
+ // Android doesn't have an API for that
+ return true;
+}
+
+void QAndroidCamera::setTorchMode(QCamera::TorchMode mode)
+{
+ if (!m_cameraSession)
+ return;
+ auto *camera = m_cameraSession->camera();
+ if (!camera || !isTorchSupported || mode == QCamera::TorchAuto)
+ return;
+
+ if (mode == QCamera::TorchOn) {
+ camera->setFlashMode(QLatin1String("torch"));
+ } else if (mode == QCamera::TorchOff) {
+ // if torch was enabled, it first needs to be turned off before restoring the flash mode
+ camera->setFlashMode(QLatin1String("off"));
+ setFlashMode(flashMode());
+ }
+ torchModeChanged(mode);
+}
+
+bool QAndroidCamera::isTorchModeSupported(QCamera::TorchMode mode) const
+{
+ if (!m_cameraSession || !m_cameraSession->camera())
+ return false;
+ switch (mode) {
+ case QCamera::TorchOff:
+ return true;
+ case QCamera::TorchOn:
+ return isTorchSupported;
+ case QCamera::TorchAuto:
+ return false;
+ }
+}
+
+void QAndroidCamera::setExposureMode(QCamera::ExposureMode mode)
+{
+ if (exposureMode() == mode)
+ return;
+
+ if (!m_cameraSession || !m_cameraSession->camera())
+ return;
+
+ if (!m_supportedExposureModes.contains(mode))
+ return;
+
+ QString sceneMode;
+ switch (mode) {
+ case QCamera::ExposureAuto:
+ sceneMode = QLatin1String("auto");
+ break;
+ case QCamera::ExposureSports:
+ sceneMode = QLatin1String("sports");
+ break;
+ case QCamera::ExposurePortrait:
+ sceneMode = QLatin1String("portrait");
+ break;
+ case QCamera::ExposureBeach:
+ sceneMode = QLatin1String("beach");
+ break;
+ case QCamera::ExposureSnow:
+ sceneMode = QLatin1String("snow");
+ break;
+ case QCamera::ExposureNight:
+ sceneMode = QLatin1String("night");
+ break;
+ case QCamera::ExposureAction:
+ sceneMode = QLatin1String("action");
+ break;
+ case QCamera::ExposureLandscape:
+ sceneMode = QLatin1String("landscape");
+ break;
+ case QCamera::ExposureNightPortrait:
+ sceneMode = QLatin1String("night-portrait");
+ break;
+ case QCamera::ExposureTheatre:
+ sceneMode = QLatin1String("theatre");
+ break;
+ case QCamera::ExposureSunset:
+ sceneMode = QLatin1String("sunset");
+ break;
+ case QCamera::ExposureSteadyPhoto:
+ sceneMode = QLatin1String("steadyphoto");
+ break;
+ case QCamera::ExposureFireworks:
+ sceneMode = QLatin1String("fireworks");
+ break;
+ case QCamera::ExposureParty:
+ sceneMode = QLatin1String("party");
+ break;
+ case QCamera::ExposureCandlelight:
+ sceneMode = QLatin1String("candlelight");
+ break;
+ case QCamera::ExposureBarcode:
+ sceneMode = QLatin1String("barcode");
+ break;
+ default:
+ sceneMode = QLatin1String("auto");
+ mode = QCamera::ExposureAuto;
+ break;
+ }
+
+ m_cameraSession->camera()->setSceneMode(sceneMode);
+ exposureModeChanged(mode);
+}
+
+bool QAndroidCamera::isExposureModeSupported(QCamera::ExposureMode mode) const
+{
+ return m_supportedExposureModes.contains(mode);
+}
+
+void QAndroidCamera::setExposureCompensation(float bias)
+{
+ if (exposureCompensation() == bias || !m_cameraSession || !m_cameraSession->camera())
+ return;
+
+ int biasIndex = qRound(bias / m_exposureCompensationStep);
+ biasIndex = qBound(m_minExposureCompensationIndex, biasIndex, m_maxExposureCompensationIndex);
+ float comp = biasIndex * m_exposureCompensationStep;
+ m_cameraSession->camera()->setExposureCompensation(biasIndex);
+ exposureCompensationChanged(comp);
+}
+
+bool QAndroidCamera::isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const
+{
+ return m_supportedWhiteBalanceModes.contains(mode);
+}
+
+void QAndroidCamera::setWhiteBalanceMode(QCamera::WhiteBalanceMode mode)
+{
+ if (!m_cameraSession)
+ return;
+ auto *camera = m_cameraSession->camera();
+ if (!camera)
+ return;
+ QString wb = m_supportedWhiteBalanceModes.value(mode, QString());
+ if (!wb.isEmpty()) {
+ camera->setWhiteBalance(wb);
+ whiteBalanceModeChanged(mode);
+ }
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qandroidcamera_p.cpp"
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcamera_p.h b/src/plugins/multimedia/android/mediacapture/qandroidcamera_p.h
new file mode 100644
index 000000000..77bbc3133
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcamera_p.h
@@ -0,0 +1,99 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+
+#ifndef QANDROIDCAMERACONTROL_H
+#define QANDROIDCAMERACONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformcamera_p.h>
+
+#include <qmap.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidCameraSession;
+class QAndroidCameraVideoRendererControl;
+class QAndroidMediaCaptureSession;
+
+class QAndroidCamera : public QPlatformCamera
+{
+ Q_OBJECT
+public:
+ explicit QAndroidCamera(QCamera *camera);
+ virtual ~QAndroidCamera();
+
+ bool isActive() const override;
+ void setActive(bool active) override;
+
+ void setCamera(const QCameraDevice &camera) override;
+ bool setCameraFormat(const QCameraFormat &format) override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session) override;
+
+ void setFocusMode(QCamera::FocusMode mode) override;
+ bool isFocusModeSupported(QCamera::FocusMode mode) const override;
+
+ void zoomTo(float factor, float rate) override;
+
+ void setFlashMode(QCamera::FlashMode mode) override;
+ bool isFlashModeSupported(QCamera::FlashMode mode) const override;
+ bool isFlashReady() const override;
+
+ void setTorchMode(QCamera::TorchMode mode) override;
+ bool isTorchModeSupported(QCamera::TorchMode mode) const override;
+
+ void setExposureMode(QCamera::ExposureMode mode) override;
+ bool isExposureModeSupported(QCamera::ExposureMode mode) const override;
+
+ void setExposureCompensation(float bias) override;
+
+ bool isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const override;
+ void setWhiteBalanceMode(QCamera::WhiteBalanceMode mode) override;
+
+private Q_SLOTS:
+ void onCameraOpened();
+ void setCameraFocusArea();
+
+private:
+ void reactivateCameraSession();
+
+ QAndroidCameraSession *m_cameraSession = nullptr;
+ QAndroidMediaCaptureSession *m_service = nullptr;
+
+ QList<QCamera::FocusMode> m_supportedFocusModes;
+ bool m_continuousPictureFocusSupported = false;
+ bool m_continuousVideoFocusSupported = false;
+ bool m_focusPointSupported = false;
+
+ QList<int> m_zoomRatios;
+
+ QList<QCamera::ExposureMode> m_supportedExposureModes;
+ int m_minExposureCompensationIndex;
+ int m_maxExposureCompensationIndex;
+ qreal m_exposureCompensationStep;
+
+ bool isFlashSupported = false;
+ bool isFlashAutoSupported = false;
+ bool isTorchSupported = false;
+ bool isPendingSetActive = false;
+ QCameraDevice m_cameraDev;
+
+ QMap<QCamera::WhiteBalanceMode, QString> m_supportedWhiteBalanceModes;
+ QCameraFormat m_cameraFormat;
+};
+
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDCAMERACONTROL_H
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp b/src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp
new file mode 100644
index 000000000..7eda1175f
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp
@@ -0,0 +1,808 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// Copyright (C) 2016 Ruslan Baratov
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidcamerasession_p.h"
+
+#include "androidcamera_p.h"
+#include "androidmultimediautils_p.h"
+#include "qandroidvideooutput_p.h"
+#include "qandroidmultimediautils_p.h"
+#include "androidmediarecorder_p.h"
+#include <qvideosink.h>
+#include <QtConcurrent/qtconcurrentrun.h>
+#include <qfile.h>
+#include <qguiapplication.h>
+#include <qscreen.h>
+#include <qdebug.h>
+#include <qvideoframe.h>
+#include <private/qplatformimagecapture_p.h>
+#include <private/qplatformvideosink_p.h>
+#include <private/qmemoryvideobuffer_p.h>
+#include <private/qcameradevice_p.h>
+#include <private/qmediastoragelocation_p.h>
+#include <private/qvideoframe_p.h>
+#include <QImageWriter>
+
+QT_BEGIN_NAMESPACE
+
+Q_GLOBAL_STATIC(QList<QCameraDevice>, g_availableCameras)
+
+QAndroidCameraSession::QAndroidCameraSession(QObject *parent)
+ : QObject(parent)
+ , m_selectedCamera(0)
+ , m_camera(0)
+ , m_videoOutput(0)
+ , m_savedState(-1)
+ , m_previewStarted(false)
+ , m_readyForCapture(false)
+ , m_currentImageCaptureId(-1)
+ , m_previewCallback(0)
+ , m_keepActive(false)
+{
+ if (qApp) {
+ connect(qApp, &QGuiApplication::applicationStateChanged,
+ this, &QAndroidCameraSession::onApplicationStateChanged);
+
+ auto screen = qApp->primaryScreen();
+ if (screen) {
+ connect(screen, &QScreen::orientationChanged, this,
+ &QAndroidCameraSession::updateOrientation);
+ enableRotation();
+ }
+ }
+}
+
+QAndroidCameraSession::~QAndroidCameraSession()
+{
+ if (m_sink)
+ disconnect(m_retryPreviewConnection);
+ close();
+}
+
+//void QAndroidCameraSession::setCaptureMode(QCamera::CaptureModes mode)
+//{
+// if (m_captureMode == mode || !isCaptureModeSupported(mode))
+// return;
+
+// m_captureMode = mode;
+// emit captureModeChanged(m_captureMode);
+
+// if (m_previewStarted && m_captureMode.testFlag(QCamera::CaptureStillImage))
+// applyResolution(m_actualImageSettings.resolution());
+//}
+
+void QAndroidCameraSession::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+
+ // If the application is inactive, the camera shouldn't be started. Save the desired state
+ // instead and it will be set when the application becomes active.
+ if (active && qApp->applicationState() == Qt::ApplicationInactive) {
+ m_isStateSaved = true;
+ m_savedState = active;
+ return;
+ }
+
+ m_isStateSaved = false;
+ m_active = active;
+ setActiveHelper(m_active);
+ emit activeChanged(m_active);
+}
+
+void QAndroidCameraSession::setActiveHelper(bool active)
+{
+ if (!active) {
+ stopPreview();
+ close();
+ } else {
+ if (!m_camera && !open()) {
+ emit error(QCamera::CameraError, QStringLiteral("Failed to open camera"));
+ return;
+ }
+ startPreview();
+ }
+}
+
+void QAndroidCameraSession::updateAvailableCameras()
+{
+ g_availableCameras->clear();
+
+ const int numCameras = AndroidCamera::getNumberOfCameras();
+ for (int i = 0; i < numCameras; ++i) {
+ QCameraDevicePrivate *info = new QCameraDevicePrivate;
+ AndroidCamera::getCameraInfo(i, info);
+
+ if (!info->id.isEmpty()) {
+ // Add supported picture and video sizes to the camera info
+ AndroidCamera *camera = AndroidCamera::open(i);
+
+ if (camera) {
+ info->videoFormats = camera->getSupportedFormats();
+ info->photoResolutions = camera->getSupportedPictureSizes();
+ }
+
+ delete camera;
+ g_availableCameras->append(info->create());
+ }
+ }
+}
+
+const QList<QCameraDevice> &QAndroidCameraSession::availableCameras()
+{
+ if (g_availableCameras->isEmpty())
+ updateAvailableCameras();
+
+ return *g_availableCameras;
+}
+
+bool QAndroidCameraSession::open()
+{
+ close();
+
+ m_camera = AndroidCamera::open(m_selectedCamera);
+
+ if (m_camera) {
+ connect(m_camera, &AndroidCamera::pictureExposed,
+ this, &QAndroidCameraSession::onCameraPictureExposed);
+ connect(m_camera, &AndroidCamera::lastPreviewFrameFetched,
+ this, &QAndroidCameraSession::onLastPreviewFrameFetched,
+ Qt::DirectConnection);
+ connect(m_camera, &AndroidCamera::newPreviewFrame,
+ this, &QAndroidCameraSession::onNewPreviewFrame,
+ Qt::DirectConnection);
+ connect(m_camera, &AndroidCamera::pictureCaptured,
+ this, &QAndroidCameraSession::onCameraPictureCaptured);
+ connect(m_camera, &AndroidCamera::previewStarted,
+ this, &QAndroidCameraSession::onCameraPreviewStarted);
+ connect(m_camera, &AndroidCamera::previewStopped,
+ this, &QAndroidCameraSession::onCameraPreviewStopped);
+ connect(m_camera, &AndroidCamera::previewFailedToStart,
+ this, &QAndroidCameraSession::onCameraPreviewFailedToStart);
+ connect(m_camera, &AndroidCamera::takePictureFailed,
+ this, &QAndroidCameraSession::onCameraTakePictureFailed);
+
+ if (m_camera->getPreviewFormat() != AndroidCamera::NV21)
+ m_camera->setPreviewFormat(AndroidCamera::NV21);
+
+ m_camera->notifyNewFrames(m_previewCallback);
+
+ emit opened();
+ setActive(true);
+ }
+
+ return m_camera != 0;
+}
+
+void QAndroidCameraSession::close()
+{
+ if (!m_camera)
+ return;
+
+ stopPreview();
+
+ m_readyForCapture = false;
+ m_currentImageCaptureId = -1;
+ m_currentImageCaptureFileName.clear();
+ m_actualImageSettings = m_requestedImageSettings;
+
+ m_camera->release();
+ delete m_camera;
+ m_camera = 0;
+
+ setActive(false);
+}
+
+void QAndroidCameraSession::setVideoOutput(QAndroidVideoOutput *output)
+{
+ if (m_videoOutput) {
+ m_videoOutput->stop();
+ m_videoOutput->reset();
+ }
+
+ if (output) {
+ m_videoOutput = output;
+ if (m_videoOutput->isReady()) {
+ onVideoOutputReady(true);
+ } else {
+ connect(m_videoOutput, &QAndroidVideoOutput::readyChanged,
+ this, &QAndroidCameraSession::onVideoOutputReady);
+ }
+ } else {
+ m_videoOutput = 0;
+ }
+}
+
+void QAndroidCameraSession::setCameraFormat(const QCameraFormat &format)
+{
+ m_requestedFpsRange.min = format.minFrameRate();
+ m_requestedFpsRange.max = format.maxFrameRate();
+ m_requestedPixelFromat = AndroidCamera::AndroidImageFormatFromQtPixelFormat(format.pixelFormat());
+
+ m_requestedImageSettings.setResolution(format.resolution());
+ m_actualImageSettings.setResolution(format.resolution());
+ if (m_readyForCapture)
+ applyResolution(m_actualImageSettings.resolution());
+}
+
+void QAndroidCameraSession::applyResolution(const QSize &captureSize, bool restartPreview)
+{
+ if (!m_camera)
+ return;
+
+ const QSize currentViewfinderResolution = m_camera->previewSize();
+ const AndroidCamera::ImageFormat currentPreviewFormat = m_camera->getPreviewFormat();
+ const AndroidCamera::FpsRange currentFpsRange = m_camera->getPreviewFpsRange();
+
+ // -- adjust resolution
+ QSize adjustedViewfinderResolution;
+ const QList<QSize> previewSizes = m_camera->getSupportedPreviewSizes();
+
+ const bool validCaptureSize = captureSize.width() > 0 && captureSize.height() > 0;
+ if (validCaptureSize
+ && m_camera->getPreferredPreviewSizeForVideo().isEmpty()) {
+ // According to the Android doc, if getPreferredPreviewSizeForVideo() returns null, it means
+ // the preview size cannot be different from the capture size
+ adjustedViewfinderResolution = captureSize;
+ } else {
+ qreal captureAspectRatio = 0;
+ if (validCaptureSize)
+ captureAspectRatio = qreal(captureSize.width()) / qreal(captureSize.height());
+
+ if (validCaptureSize) {
+ // search for viewfinder resolution with the same aspect ratio
+ qreal minAspectDiff = 1;
+ QSize closestResolution;
+ for (int i = previewSizes.count() - 1; i >= 0; --i) {
+ const QSize &size = previewSizes.at(i);
+ const qreal sizeAspect = qreal(size.width()) / size.height();
+ if (qFuzzyCompare(captureAspectRatio, sizeAspect)) {
+ adjustedViewfinderResolution = size;
+ break;
+ } else if (minAspectDiff > qAbs(sizeAspect - captureAspectRatio)) {
+ closestResolution = size;
+ minAspectDiff = qAbs(sizeAspect - captureAspectRatio);
+ }
+ }
+ if (!adjustedViewfinderResolution.isValid()) {
+ qWarning("Cannot find a viewfinder resolution matching the capture aspect ratio.");
+ if (closestResolution.isValid()) {
+ adjustedViewfinderResolution = closestResolution;
+ qWarning("Using closest viewfinder resolution.");
+ } else {
+ return;
+ }
+ }
+ } else {
+ adjustedViewfinderResolution = previewSizes.last();
+ }
+ }
+
+ // -- adjust pixel format
+
+ AndroidCamera::ImageFormat adjustedPreviewFormat = m_requestedPixelFromat;
+ if (adjustedPreviewFormat == AndroidCamera::UnknownImageFormat)
+ adjustedPreviewFormat = AndroidCamera::NV21;
+
+ // -- adjust FPS
+
+ AndroidCamera::FpsRange adjustedFps = m_requestedFpsRange;
+ if (adjustedFps.min == 0 || adjustedFps.max == 0)
+ adjustedFps = currentFpsRange;
+
+ // -- Set values on camera
+
+ // fix the resolution of output based on the orientation
+ QSize cameraOutputResolution = adjustedViewfinderResolution;
+ QSize videoOutputResolution = adjustedViewfinderResolution;
+ QSize currentVideoOutputResolution = m_videoOutput ? m_videoOutput->getVideoSize() : QSize(0, 0);
+ const int rotation = currentCameraRotation();
+ // only transpose if it's valid for the preview
+ if (rotation == 90 || rotation == 270) {
+ videoOutputResolution.transpose();
+ if (previewSizes.contains(cameraOutputResolution.transposed()))
+ cameraOutputResolution.transpose();
+ }
+
+ if (currentViewfinderResolution != cameraOutputResolution
+ || (m_videoOutput && currentVideoOutputResolution != videoOutputResolution)
+ || currentPreviewFormat != adjustedPreviewFormat || currentFpsRange.min != adjustedFps.min
+ || currentFpsRange.max != adjustedFps.max) {
+ if (m_videoOutput) {
+ m_videoOutput->setVideoSize(videoOutputResolution);
+ }
+
+ // if preview is started, we have to stop it first before changing its size
+ if (m_previewStarted && restartPreview)
+ m_camera->stopPreview();
+
+ m_camera->setPreviewSize(cameraOutputResolution);
+ m_camera->setPreviewFormat(adjustedPreviewFormat);
+ m_camera->setPreviewFpsRange(adjustedFps);
+
+ // restart preview
+ if (m_previewStarted && restartPreview)
+ m_camera->startPreview();
+ }
+}
+
+QList<QSize> QAndroidCameraSession::getSupportedPreviewSizes() const
+{
+ return m_camera ? m_camera->getSupportedPreviewSizes() : QList<QSize>();
+}
+
+QList<QVideoFrameFormat::PixelFormat> QAndroidCameraSession::getSupportedPixelFormats() const
+{
+ QList<QVideoFrameFormat::PixelFormat> formats;
+
+ if (!m_camera)
+ return formats;
+
+ const QList<AndroidCamera::ImageFormat> nativeFormats = m_camera->getSupportedPreviewFormats();
+
+ formats.reserve(nativeFormats.size());
+
+ for (AndroidCamera::ImageFormat nativeFormat : nativeFormats) {
+ QVideoFrameFormat::PixelFormat format = AndroidCamera::QtPixelFormatFromAndroidImageFormat(nativeFormat);
+ if (format != QVideoFrameFormat::Format_Invalid)
+ formats.append(format);
+ }
+
+ return formats;
+}
+
+QList<AndroidCamera::FpsRange> QAndroidCameraSession::getSupportedPreviewFpsRange() const
+{
+ return m_camera ? m_camera->getSupportedPreviewFpsRange() : QList<AndroidCamera::FpsRange>();
+}
+
+
+bool QAndroidCameraSession::startPreview()
+{
+ if (!m_camera || !m_videoOutput)
+ return false;
+
+ if (m_previewStarted)
+ return true;
+
+ if (!m_videoOutput->isReady())
+ return true; // delay starting until the video output is ready
+
+ Q_ASSERT(m_videoOutput->surfaceTexture() || m_videoOutput->surfaceHolder());
+
+ if ((m_videoOutput->surfaceTexture() && !m_camera->setPreviewTexture(m_videoOutput->surfaceTexture()))
+ || (m_videoOutput->surfaceHolder() && !m_camera->setPreviewDisplay(m_videoOutput->surfaceHolder())))
+ return false;
+
+ applyResolution(m_actualImageSettings.resolution());
+
+ AndroidMultimediaUtils::enableOrientationListener(true);
+
+ updateOrientation();
+ m_camera->startPreview();
+ m_previewStarted = true;
+ m_videoOutput->start();
+
+ return true;
+}
+
+QSize QAndroidCameraSession::getDefaultResolution() const
+{
+ const bool hasHighQualityProfile = AndroidCamcorderProfile::hasProfile(
+ m_camera->cameraId(),
+ AndroidCamcorderProfile::Quality(AndroidCamcorderProfile::QUALITY_HIGH));
+
+ if (hasHighQualityProfile) {
+ const AndroidCamcorderProfile camProfile = AndroidCamcorderProfile::get(
+ m_camera->cameraId(),
+ AndroidCamcorderProfile::Quality(AndroidCamcorderProfile::QUALITY_HIGH));
+
+ return QSize(camProfile.getValue(AndroidCamcorderProfile::videoFrameWidth),
+ camProfile.getValue(AndroidCamcorderProfile::videoFrameHeight));
+ }
+ return QSize();
+}
+
+void QAndroidCameraSession::stopPreview()
+{
+ if (!m_camera || !m_previewStarted)
+ return;
+
+ AndroidMultimediaUtils::enableOrientationListener(false);
+
+ m_camera->stopPreview();
+ m_camera->setPreviewSize(QSize());
+ m_camera->setPreviewTexture(0);
+ m_camera->setPreviewDisplay(0);
+
+ if (m_videoOutput) {
+ m_videoOutput->stop();
+ }
+ m_previewStarted = false;
+}
+
+void QAndroidCameraSession::setImageSettings(const QImageEncoderSettings &settings)
+{
+ if (m_requestedImageSettings == settings)
+ return;
+
+ m_requestedImageSettings = m_actualImageSettings = settings;
+
+ applyImageSettings();
+
+ if (m_readyForCapture)
+ applyResolution(m_actualImageSettings.resolution());
+}
+
+void QAndroidCameraSession::enableRotation()
+{
+ m_rotationEnabled = true;
+}
+
+void QAndroidCameraSession::disableRotation()
+{
+ m_rotationEnabled = false;
+}
+
+void QAndroidCameraSession::updateOrientation()
+{
+ if (!m_camera || !m_rotationEnabled)
+ return;
+
+ m_camera->setDisplayOrientation(currentCameraRotation());
+ applyResolution(m_actualImageSettings.resolution());
+}
+
+
+int QAndroidCameraSession::currentCameraRotation() const
+{
+ if (!m_camera)
+ return 0;
+
+ auto screen = QGuiApplication::primaryScreen();
+ auto screenOrientation = screen->orientation();
+ if (screenOrientation == Qt::PrimaryOrientation)
+ screenOrientation = screen->primaryOrientation();
+
+ int deviceOrientation = 0;
+ switch (screenOrientation) {
+ case Qt::PrimaryOrientation:
+ case Qt::PortraitOrientation:
+ break;
+ case Qt::LandscapeOrientation:
+ deviceOrientation = 90;
+ break;
+ case Qt::InvertedPortraitOrientation:
+ deviceOrientation = 180;
+ break;
+ case Qt::InvertedLandscapeOrientation:
+ deviceOrientation = 270;
+ break;
+ }
+
+ int nativeCameraOrientation = m_camera->getNativeOrientation();
+
+ int rotation;
+ // subtract natural camera orientation and physical device orientation
+ if (m_camera->getFacing() == AndroidCamera::CameraFacingFront) {
+ rotation = (nativeCameraOrientation + deviceOrientation) % 360;
+ rotation = (360 - rotation) % 360; // compensate the mirror
+ } else { // back-facing camera
+ rotation = (nativeCameraOrientation - deviceOrientation + 360) % 360;
+ }
+ return rotation;
+}
+
+void QAndroidCameraSession::setPreviewFormat(AndroidCamera::ImageFormat format)
+{
+ if (format == AndroidCamera::UnknownImageFormat)
+ return;
+
+ m_camera->setPreviewFormat(format);
+}
+
+void QAndroidCameraSession::setPreviewCallback(PreviewCallback *callback)
+{
+ m_videoFrameCallbackMutex.lock();
+ m_previewCallback = callback;
+ if (m_camera)
+ m_camera->notifyNewFrames(m_previewCallback);
+ m_videoFrameCallbackMutex.unlock();
+}
+
+void QAndroidCameraSession::applyImageSettings()
+{
+ if (!m_camera)
+ return;
+
+ // only supported format right now.
+ m_actualImageSettings.setFormat(QImageCapture::JPEG);
+
+ const QSize requestedResolution = m_requestedImageSettings.resolution();
+ const QList<QSize> supportedResolutions = m_camera->getSupportedPictureSizes();
+ if (!requestedResolution.isValid()) {
+ m_actualImageSettings.setResolution(getDefaultResolution());
+ } else if (!supportedResolutions.contains(requestedResolution)) {
+ // if the requested resolution is not supported, find the closest one
+ int reqPixelCount = requestedResolution.width() * requestedResolution.height();
+ QList<int> supportedPixelCounts;
+ for (int i = 0; i < supportedResolutions.size(); ++i) {
+ const QSize &s = supportedResolutions.at(i);
+ supportedPixelCounts.append(s.width() * s.height());
+ }
+ int closestIndex = qt_findClosestValue(supportedPixelCounts, reqPixelCount);
+ m_actualImageSettings.setResolution(supportedResolutions.at(closestIndex));
+ }
+ m_camera->setPictureSize(m_actualImageSettings.resolution());
+
+ int jpegQuality = 100;
+ switch (m_requestedImageSettings.quality()) {
+ case QImageCapture::VeryLowQuality:
+ jpegQuality = 20;
+ break;
+ case QImageCapture::LowQuality:
+ jpegQuality = 40;
+ break;
+ case QImageCapture::NormalQuality:
+ jpegQuality = 60;
+ break;
+ case QImageCapture::HighQuality:
+ jpegQuality = 80;
+ break;
+ case QImageCapture::VeryHighQuality:
+ jpegQuality = 100;
+ break;
+ }
+ m_camera->setJpegQuality(jpegQuality);
+}
+
+bool QAndroidCameraSession::isReadyForCapture() const
+{
+ return isActive() && m_readyForCapture;
+}
+
+void QAndroidCameraSession::setReadyForCapture(bool ready)
+{
+ if (m_readyForCapture == ready)
+ return;
+
+ m_readyForCapture = ready;
+ emit readyForCaptureChanged(ready);
+}
+
+int QAndroidCameraSession::captureImage()
+{
+ const int newImageCaptureId = m_currentImageCaptureId + 1;
+
+ if (!isReadyForCapture()) {
+ emit imageCaptureError(newImageCaptureId, QImageCapture::NotReadyError,
+ QPlatformImageCapture::msgCameraNotReady());
+ return newImageCaptureId;
+ }
+
+ setReadyForCapture(false);
+
+ m_currentImageCaptureId = newImageCaptureId;
+
+ applyResolution(m_actualImageSettings.resolution());
+ m_camera->takePicture();
+
+ return m_currentImageCaptureId;
+}
+
+int QAndroidCameraSession::capture(const QString &fileName)
+{
+ m_currentImageCaptureFileName = fileName;
+ m_imageCaptureToBuffer = false;
+ return captureImage();
+}
+
+int QAndroidCameraSession::captureToBuffer()
+{
+ m_currentImageCaptureFileName.clear();
+ m_imageCaptureToBuffer = true;
+ return captureImage();
+}
+
+void QAndroidCameraSession::onCameraTakePictureFailed()
+{
+ emit imageCaptureError(m_currentImageCaptureId, QImageCapture::ResourceError,
+ tr("Failed to capture image"));
+
+ // Preview needs to be restarted and the preview call back must be setup again
+ m_camera->startPreview();
+}
+
+void QAndroidCameraSession::onCameraPictureExposed()
+{
+ if (!m_camera)
+ return;
+
+ emit imageExposed(m_currentImageCaptureId);
+ m_camera->fetchLastPreviewFrame();
+}
+
+void QAndroidCameraSession::onLastPreviewFrameFetched(const QVideoFrame &frame)
+{
+ if (!m_camera)
+ return;
+
+ updateOrientation();
+
+ (void)QtConcurrent::run(&QAndroidCameraSession::processPreviewImage, this,
+ m_currentImageCaptureId, frame, currentCameraRotation());
+}
+
+void QAndroidCameraSession::processPreviewImage(int id, const QVideoFrame &frame, int rotation)
+{
+ // Preview display of front-facing cameras is flipped horizontally, but the frame data
+ // we get here is not. Flip it ourselves if the camera is front-facing to match what the user
+ // sees on the viewfinder.
+ QTransform transform;
+ transform.rotate(rotation);
+
+ if (m_camera->getFacing() == AndroidCamera::CameraFacingFront)
+ transform.scale(-1, 1);
+
+ emit imageCaptured(id, frame.toImage().transformed(transform));
+}
+
+void QAndroidCameraSession::onNewPreviewFrame(const QVideoFrame &frame)
+{
+ if (!m_camera)
+ return;
+
+ m_videoFrameCallbackMutex.lock();
+
+ if (m_previewCallback)
+ m_previewCallback->onFrameAvailable(frame);
+
+ m_videoFrameCallbackMutex.unlock();
+}
+
+void QAndroidCameraSession::onCameraPictureCaptured(const QByteArray &bytes,
+ QVideoFrameFormat::PixelFormat format, QSize size,int bytesPerLine)
+{
+ if (m_imageCaptureToBuffer) {
+ processCapturedImageToBuffer(m_currentImageCaptureId, bytes, format, size, bytesPerLine);
+ } else {
+ // Loading and saving the captured image can be slow, do it in a separate thread
+ (void)QtConcurrent::run(&QAndroidCameraSession::processCapturedImage, this,
+ m_currentImageCaptureId, bytes, m_currentImageCaptureFileName);
+ }
+
+ // Preview needs to be restarted after taking a picture
+ if (m_camera)
+ m_camera->startPreview();
+}
+
+void QAndroidCameraSession::onCameraPreviewStarted()
+{
+ setReadyForCapture(true);
+}
+
+void QAndroidCameraSession::onCameraPreviewFailedToStart()
+{
+ if (isActive()) {
+ Q_EMIT error(QCamera::CameraError, tr("Camera preview failed to start."));
+
+ AndroidMultimediaUtils::enableOrientationListener(false);
+ m_camera->setPreviewSize(QSize());
+ m_camera->setPreviewTexture(0);
+ if (m_videoOutput) {
+ m_videoOutput->stop();
+ m_videoOutput->reset();
+ }
+ m_previewStarted = false;
+
+ setActive(false);
+ setReadyForCapture(false);
+ }
+}
+
+void QAndroidCameraSession::onCameraPreviewStopped()
+{
+ if (!m_previewStarted)
+ setActive(false);
+ setReadyForCapture(false);
+}
+
+void QAndroidCameraSession::processCapturedImage(int id, const QByteArray &bytes, const QString &fileName)
+{
+ const QString actualFileName = QMediaStorageLocation::generateFileName(
+ fileName, QStandardPaths::PicturesLocation, QLatin1String("jpg"));
+ QFile writer(actualFileName);
+ if (!writer.open(QIODeviceBase::WriteOnly)) {
+ const QString errorMessage = tr("File is not available: %1").arg(writer.errorString());
+ emit imageCaptureError(id, QImageCapture::Error::ResourceError, errorMessage);
+ return;
+ }
+
+ if (writer.write(bytes) < 0) {
+ const QString errorMessage = tr("Could not save to file: %1").arg(writer.errorString());
+ emit imageCaptureError(id, QImageCapture::Error::ResourceError, errorMessage);
+ return;
+ }
+
+ writer.close();
+ if (fileName.isEmpty() || QFileInfo(fileName).isRelative())
+ AndroidMultimediaUtils::registerMediaFile(actualFileName);
+
+ emit imageSaved(id, actualFileName);
+}
+
+void QAndroidCameraSession::processCapturedImageToBuffer(int id, const QByteArray &bytes,
+ QVideoFrameFormat::PixelFormat format, QSize size, int bytesPerLine)
+{
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(bytes, bytesPerLine),
+ QVideoFrameFormat(size, format));
+ emit imageAvailable(id, frame);
+}
+
+void QAndroidCameraSession::onVideoOutputReady(bool ready)
+{
+ if (ready && m_active)
+ startPreview();
+}
+
+void QAndroidCameraSession::onApplicationStateChanged()
+{
+
+ switch (QGuiApplication::applicationState()) {
+ case Qt::ApplicationInactive:
+ if (!m_keepActive && m_active) {
+ m_savedState = m_active;
+ setActive(false);
+ m_isStateSaved = true;
+ }
+ break;
+ case Qt::ApplicationActive:
+ if (m_isStateSaved) {
+ setActive(m_savedState);
+ m_isStateSaved = false;
+ }
+ break;
+ default:
+ break;
+ }
+}
+
+void QAndroidCameraSession::setKeepAlive(bool keepAlive)
+{
+ m_keepActive = keepAlive;
+}
+
+void QAndroidCameraSession::setVideoSink(QVideoSink *sink)
+{
+ if (m_sink == sink)
+ return;
+
+ if (m_sink)
+ disconnect(m_retryPreviewConnection);
+
+ m_sink = sink;
+
+ if (m_sink)
+ m_retryPreviewConnection =
+ connect(m_sink->platformVideoSink(), &QPlatformVideoSink::rhiChanged, this, [&]()
+ {
+ if (m_active) {
+ setActive(false);
+ setActive(true);
+ }
+ }, Qt::DirectConnection);
+ if (m_sink) {
+ delete m_textureOutput;
+ m_textureOutput = nullptr;
+
+ m_textureOutput = new QAndroidTextureVideoOutput(m_sink, this);
+ }
+
+ setVideoOutput(m_textureOutput);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qandroidcamerasession_p.cpp"
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcamerasession_p.h b/src/plugins/multimedia/android/mediacapture/qandroidcamerasession_p.h
new file mode 100644
index 000000000..3b56d9c3b
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcamerasession_p.h
@@ -0,0 +1,166 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// Copyright (C) 2016 Ruslan Baratov
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDCAMERASESSION_H
+#define QANDROIDCAMERASESSION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qcamera.h>
+#include <QImageCapture>
+#include <QSet>
+#include <QMutex>
+#include <private/qplatformimagecapture_p.h>
+#include "androidcamera_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidVideoOutput;
+class QAndroidTextureVideoOutput ;
+class QVideoSink;
+
+class QAndroidCameraSession : public QObject
+{
+ Q_OBJECT
+public:
+ explicit QAndroidCameraSession(QObject *parent = 0);
+ ~QAndroidCameraSession();
+
+ static const QList<QCameraDevice> &availableCameras();
+
+ void setSelectedCameraId(int cameraId) { m_selectedCamera = cameraId; }
+ int getSelectedCameraId() { return m_selectedCamera; }
+ AndroidCamera *camera() const { return m_camera; }
+
+ bool isActive() const { return m_active; }
+ void setActive(bool active);
+
+ void applyResolution(const QSize &captureSize = QSize(), bool restartPreview = true);
+
+ QAndroidVideoOutput *videoOutput() const { return m_videoOutput; }
+ void setVideoOutput(QAndroidVideoOutput *output);
+
+ void setCameraFormat(const QCameraFormat &format);
+
+ QList<QSize> getSupportedPreviewSizes() const;
+ QList<QVideoFrameFormat::PixelFormat> getSupportedPixelFormats() const;
+ QList<AndroidCamera::FpsRange> getSupportedPreviewFpsRange() const;
+
+ QImageEncoderSettings imageSettings() const { return m_actualImageSettings; }
+ void setImageSettings(const QImageEncoderSettings &settings);
+
+ bool isReadyForCapture() const;
+ void setReadyForCapture(bool ready);
+ int capture(const QString &fileName);
+ int captureToBuffer();
+
+ int currentCameraRotation() const;
+
+ void setPreviewFormat(AndroidCamera::ImageFormat format);
+
+ struct PreviewCallback
+ {
+ virtual void onFrameAvailable(const QVideoFrame &frame) = 0;
+ };
+ void setPreviewCallback(PreviewCallback *callback);
+
+ void setVideoSink(QVideoSink *surface);
+
+ void disableRotation();
+ void enableRotation();
+
+ void setKeepAlive(bool keepAlive);
+
+Q_SIGNALS:
+ void activeChanged(bool);
+ void error(int error, const QString &errorString);
+ void opened();
+
+ void readyForCaptureChanged(bool);
+ void imageExposed(int id);
+ void imageCaptured(int id, const QImage &preview);
+ void imageMetadataAvailable(int id, const QMediaMetaData &key);
+ void imageAvailable(int id, const QVideoFrame &buffer);
+ void imageSaved(int id, const QString &fileName);
+ void imageCaptureError(int id, int error, const QString &errorString);
+
+private Q_SLOTS:
+ void onVideoOutputReady(bool ready);
+ void updateOrientation();
+
+ void onApplicationStateChanged();
+
+ void onCameraTakePictureFailed();
+ void onCameraPictureExposed();
+ void onCameraPictureCaptured(const QByteArray &bytes, QVideoFrameFormat::PixelFormat format, QSize size, int bytesPerLine);
+ void onLastPreviewFrameFetched(const QVideoFrame &frame);
+ void onNewPreviewFrame(const QVideoFrame &frame);
+ void onCameraPreviewStarted();
+ void onCameraPreviewFailedToStart();
+ void onCameraPreviewStopped();
+
+private:
+ static void updateAvailableCameras();
+
+ bool open();
+ void close();
+
+ bool startPreview();
+ void stopPreview();
+
+ void applyImageSettings();
+
+ void processPreviewImage(int id, const QVideoFrame &frame, int rotation);
+ void processCapturedImage(int id, const QByteArray &bytes, const QString &fileName);
+ void processCapturedImageToBuffer(int id, const QByteArray &bytes,
+ QVideoFrameFormat::PixelFormat format, QSize size, int bytesPerLine);
+
+ void setActiveHelper(bool active);
+
+ int captureImage();
+
+ QSize getDefaultResolution() const;
+
+ int m_selectedCamera;
+ AndroidCamera *m_camera;
+ QAndroidVideoOutput *m_videoOutput;
+
+ bool m_active = false;
+ bool m_isStateSaved = false;
+ bool m_savedState = false;
+ bool m_previewStarted;
+
+ bool m_rotationEnabled = false;
+
+ QVideoSink *m_sink = nullptr;
+ QAndroidTextureVideoOutput *m_textureOutput = nullptr;
+
+ QImageEncoderSettings m_requestedImageSettings;
+ QImageEncoderSettings m_actualImageSettings;
+ AndroidCamera::FpsRange m_requestedFpsRange;
+ AndroidCamera::ImageFormat m_requestedPixelFromat = AndroidCamera::ImageFormat::NV21;
+
+ bool m_readyForCapture;
+ int m_currentImageCaptureId;
+ QString m_currentImageCaptureFileName;
+ bool m_imageCaptureToBuffer;
+
+ QMutex m_videoFrameCallbackMutex;
+ PreviewCallback *m_previewCallback;
+ bool m_keepActive;
+ QMetaObject::Connection m_retryPreviewConnection;
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDCAMERASESSION_H
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp
new file mode 100644
index 000000000..3b005e4a5
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp
@@ -0,0 +1,473 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidcapturesession_p.h"
+
+#include "androidcamera_p.h"
+#include "qandroidcamerasession_p.h"
+#include "qaudioinput.h"
+#include "qaudiooutput.h"
+#include "androidmediaplayer_p.h"
+#include "androidmultimediautils_p.h"
+#include "qandroidmultimediautils_p.h"
+#include "qandroidvideooutput_p.h"
+#include "qandroidglobal_p.h"
+#include <private/qplatformaudioinput_p.h>
+#include <private/qplatformaudiooutput_p.h>
+#include <private/qmediarecorder_p.h>
+#include <private/qmediastoragelocation_p.h>
+#include <QtCore/qmimetype.h>
+
+#include <algorithm>
+
+QT_BEGIN_NAMESPACE
+
+QAndroidCaptureSession::QAndroidCaptureSession()
+ : QObject()
+ , m_mediaRecorder(0)
+ , m_cameraSession(0)
+ , m_duration(0)
+ , m_state(QMediaRecorder::StoppedState)
+ , m_outputFormat(AndroidMediaRecorder::DefaultOutputFormat)
+ , m_audioEncoder(AndroidMediaRecorder::DefaultAudioEncoder)
+ , m_videoEncoder(AndroidMediaRecorder::DefaultVideoEncoder)
+{
+ m_notifyTimer.setInterval(1000);
+ connect(&m_notifyTimer, &QTimer::timeout, this, &QAndroidCaptureSession::updateDuration);
+}
+
+QAndroidCaptureSession::~QAndroidCaptureSession()
+{
+ stop();
+ m_mediaRecorder = nullptr;
+ if (m_audioInput && m_audioOutput)
+ AndroidMediaPlayer::stopSoundStreaming();
+}
+
+void QAndroidCaptureSession::setCameraSession(QAndroidCameraSession *cameraSession)
+{
+ if (m_cameraSession) {
+ disconnect(m_connOpenCamera);
+ disconnect(m_connActiveChangedCamera);
+ }
+
+ m_cameraSession = cameraSession;
+ if (m_cameraSession) {
+ m_connOpenCamera = connect(cameraSession, &QAndroidCameraSession::opened,
+ this, &QAndroidCaptureSession::onCameraOpened);
+ m_connActiveChangedCamera = connect(cameraSession, &QAndroidCameraSession::activeChanged,
+ this, [this](bool isActive) {
+ if (!isActive)
+ stop();
+ });
+ }
+}
+
+void QAndroidCaptureSession::setAudioInput(QPlatformAudioInput *input)
+{
+ if (m_audioInput == input)
+ return;
+
+ if (m_audioInput) {
+ disconnect(m_audioInputChanged);
+ }
+
+ m_audioInput = input;
+
+ if (m_audioInput) {
+ m_audioInputChanged = connect(m_audioInput->q, &QAudioInput::deviceChanged, this, [this]() {
+ if (m_state == QMediaRecorder::RecordingState)
+ m_mediaRecorder->setAudioInput(m_audioInput->device.id());
+ updateStreamingState();
+ });
+ }
+ updateStreamingState();
+}
+
+void QAndroidCaptureSession::setAudioOutput(QPlatformAudioOutput *output)
+{
+ if (m_audioOutput == output)
+ return;
+
+ if (m_audioOutput)
+ disconnect(m_audioOutputChanged);
+
+ m_audioOutput = output;
+
+ if (m_audioOutput) {
+ m_audioOutputChanged = connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this,
+ [this] () {
+ AndroidMediaPlayer::setAudioOutput(m_audioOutput->device.id());
+ updateStreamingState();
+ });
+ AndroidMediaPlayer::setAudioOutput(m_audioOutput->device.id());
+ }
+ updateStreamingState();
+}
+
+void QAndroidCaptureSession::updateStreamingState()
+{
+ if (m_audioInput && m_audioOutput) {
+ AndroidMediaPlayer::startSoundStreaming(m_audioInput->device.id().toInt(),
+ m_audioOutput->device.id().toInt());
+ } else {
+ AndroidMediaPlayer::stopSoundStreaming();
+ }
+}
+
+QMediaRecorder::RecorderState QAndroidCaptureSession::state() const
+{
+ return m_state;
+}
+
+void QAndroidCaptureSession::setKeepAlive(bool keepAlive)
+{
+ if (m_cameraSession)
+ m_cameraSession->setKeepAlive(keepAlive);
+}
+
+
+void QAndroidCaptureSession::start(QMediaEncoderSettings &settings, const QUrl &outputLocation)
+{
+ if (m_state == QMediaRecorder::RecordingState)
+ return;
+
+ if (!m_cameraSession && !m_audioInput) {
+ updateError(QMediaRecorder::ResourceError, QLatin1String("No devices are set"));
+ return;
+ }
+
+ setKeepAlive(true);
+
+ const bool validCameraSession = m_cameraSession && m_cameraSession->camera();
+
+ if (validCameraSession && !qt_androidCheckCameraPermission()) {
+ updateError(QMediaRecorder::ResourceError, QLatin1String("Camera permission denied."));
+ setKeepAlive(false);
+ return;
+ }
+
+ if (m_audioInput && !qt_androidCheckMicrophonePermission()) {
+ updateError(QMediaRecorder::ResourceError, QLatin1String("Microphone permission denied."));
+ setKeepAlive(false);
+ return;
+ }
+
+ m_mediaRecorder = std::make_shared<AndroidMediaRecorder>();
+ connect(m_mediaRecorder.get(), &AndroidMediaRecorder::error, this,
+ &QAndroidCaptureSession::onError);
+ connect(m_mediaRecorder.get(), &AndroidMediaRecorder::info, this,
+ &QAndroidCaptureSession::onInfo);
+
+ applySettings(settings);
+
+ // Set audio/video sources
+ if (validCameraSession) {
+ m_cameraSession->camera()->stopPreviewSynchronous();
+ m_cameraSession->camera()->unlock();
+
+ m_mediaRecorder->setCamera(m_cameraSession->camera());
+ m_mediaRecorder->setVideoSource(AndroidMediaRecorder::Camera);
+ }
+
+ if (m_audioInput) {
+ m_mediaRecorder->setAudioInput(m_audioInput->device.id());
+ if (!m_mediaRecorder->isAudioSourceSet())
+ m_mediaRecorder->setAudioSource(AndroidMediaRecorder::DefaultAudioSource);
+ }
+
+ // Set output format
+ m_mediaRecorder->setOutputFormat(m_outputFormat);
+
+ // Set video encoder settings
+ if (validCameraSession) {
+ m_mediaRecorder->setVideoSize(settings.videoResolution());
+ m_mediaRecorder->setVideoFrameRate(qRound(settings.videoFrameRate()));
+ m_mediaRecorder->setVideoEncodingBitRate(settings.videoBitRate());
+ m_mediaRecorder->setVideoEncoder(m_videoEncoder);
+
+ // media recorder is also compensanting the mirror on front camera
+ auto rotation = m_cameraSession->currentCameraRotation();
+ if (m_cameraSession->camera()->getFacing() == AndroidCamera::CameraFacingFront)
+ rotation = (360 - rotation) % 360; // remove mirror compensation
+
+ m_mediaRecorder->setOrientationHint(rotation);
+ }
+
+ // Set audio encoder settings
+ if (m_audioInput) {
+ m_mediaRecorder->setAudioChannels(settings.audioChannelCount());
+ m_mediaRecorder->setAudioEncodingBitRate(settings.audioBitRate());
+ m_mediaRecorder->setAudioSamplingRate(settings.audioSampleRate());
+ m_mediaRecorder->setAudioEncoder(m_audioEncoder);
+ }
+
+ QString extension = settings.mimeType().preferredSuffix();
+ // Set output file
+ auto location = outputLocation.toString(QUrl::PreferLocalFile);
+ QString filePath = location;
+ if (QUrl(filePath).scheme() != QLatin1String("content")) {
+ filePath = QMediaStorageLocation::generateFileName(
+ location, m_cameraSession ? QStandardPaths::MoviesLocation
+ : QStandardPaths::MusicLocation, extension);
+ }
+
+ m_usedOutputLocation = QUrl::fromLocalFile(filePath);
+ m_outputLocationIsStandard = location.isEmpty() || QFileInfo(location).isRelative();
+ m_mediaRecorder->setOutputFile(filePath);
+
+ if (validCameraSession) {
+ m_cameraSession->disableRotation();
+ }
+
+ if (!m_mediaRecorder->prepare()) {
+ updateError(QMediaRecorder::FormatError,
+ QLatin1String("Unable to prepare the media recorder."));
+ restartViewfinder();
+
+ return;
+ }
+
+ if (!m_mediaRecorder->start()) {
+ updateError(QMediaRecorder::FormatError, QMediaRecorderPrivate::msgFailedStartRecording());
+ restartViewfinder();
+
+ return;
+ }
+
+ m_elapsedTime.start();
+ m_notifyTimer.start();
+ updateDuration();
+
+ if (validCameraSession) {
+ m_cameraSession->setReadyForCapture(false);
+
+ // Preview frame callback is cleared when setting up the camera with the media recorder.
+ // We need to reset it.
+ m_cameraSession->camera()->setupPreviewFrameCallback();
+ }
+
+ m_state = QMediaRecorder::RecordingState;
+ emit stateChanged(m_state);
+}
+
+void QAndroidCaptureSession::stop(bool error)
+{
+ if (m_state == QMediaRecorder::StoppedState || m_mediaRecorder == nullptr)
+ return;
+
+ m_mediaRecorder->stop();
+ m_notifyTimer.stop();
+ updateDuration();
+ m_elapsedTime.invalidate();
+
+ m_mediaRecorder = nullptr;
+
+ if (m_cameraSession && m_cameraSession->isActive()) {
+ // Viewport needs to be restarted after recording
+ restartViewfinder();
+ }
+
+ if (!error) {
+ // if the media is saved into the standard media location, register it
+ // with the Android media scanner so it appears immediately in apps
+ // such as the gallery.
+ if (m_outputLocationIsStandard)
+ AndroidMultimediaUtils::registerMediaFile(m_usedOutputLocation.toLocalFile());
+
+ emit actualLocationChanged(m_usedOutputLocation);
+ }
+
+ m_state = QMediaRecorder::StoppedState;
+ emit stateChanged(m_state);
+}
+
+qint64 QAndroidCaptureSession::duration() const
+{
+ return m_duration;
+}
+
+void QAndroidCaptureSession::applySettings(QMediaEncoderSettings &settings)
+{
+ // container settings
+ auto fileFormat = settings.mediaFormat().fileFormat();
+ if (!m_cameraSession && fileFormat == QMediaFormat::AAC) {
+ m_outputFormat = AndroidMediaRecorder::AAC_ADTS;
+ } else if (fileFormat == QMediaFormat::Ogg) {
+ m_outputFormat = AndroidMediaRecorder::OGG;
+ } else if (fileFormat == QMediaFormat::WebM) {
+ m_outputFormat = AndroidMediaRecorder::WEBM;
+// } else if (fileFormat == QLatin1String("3gp")) {
+// m_outputFormat = AndroidMediaRecorder::THREE_GPP;
+ } else {
+ // fallback to MP4
+ m_outputFormat = AndroidMediaRecorder::MPEG_4;
+ }
+
+ // audio settings
+ if (settings.audioChannelCount() <= 0)
+ settings.setAudioChannelCount(m_defaultSettings.audioChannels);
+ if (settings.audioBitRate() <= 0)
+ settings.setAudioBitRate(m_defaultSettings.audioBitRate);
+ if (settings.audioSampleRate() <= 0)
+ settings.setAudioSampleRate(m_defaultSettings.audioSampleRate);
+
+ if (settings.audioCodec() == QMediaFormat::AudioCodec::AAC)
+ m_audioEncoder = AndroidMediaRecorder::AAC;
+ else if (settings.audioCodec() == QMediaFormat::AudioCodec::Opus)
+ m_audioEncoder = AndroidMediaRecorder::OPUS;
+ else if (settings.audioCodec() == QMediaFormat::AudioCodec::Vorbis)
+ m_audioEncoder = AndroidMediaRecorder::VORBIS;
+ else
+ m_audioEncoder = m_defaultSettings.audioEncoder;
+
+
+ // video settings
+ if (m_cameraSession && m_cameraSession->camera()) {
+ if (settings.videoResolution().isEmpty()) {
+ settings.setVideoResolution(m_defaultSettings.videoResolution);
+ } else if (!m_supportedResolutions.contains(settings.videoResolution())) {
+ // if the requested resolution is not supported, find the closest one
+ QSize reqSize = settings.videoResolution();
+ int reqPixelCount = reqSize.width() * reqSize.height();
+ QList<int> supportedPixelCounts;
+ for (int i = 0; i < m_supportedResolutions.size(); ++i) {
+ const QSize &s = m_supportedResolutions.at(i);
+ supportedPixelCounts.append(s.width() * s.height());
+ }
+ int closestIndex = qt_findClosestValue(supportedPixelCounts, reqPixelCount);
+ settings.setVideoResolution(m_supportedResolutions.at(closestIndex));
+ }
+
+ if (settings.videoFrameRate() <= 0)
+ settings.setVideoFrameRate(m_defaultSettings.videoFrameRate);
+ if (settings.videoBitRate() <= 0)
+ settings.setVideoBitRate(m_defaultSettings.videoBitRate);
+
+ if (settings.videoCodec() == QMediaFormat::VideoCodec::H264)
+ m_videoEncoder = AndroidMediaRecorder::H264;
+ else if (settings.videoCodec() == QMediaFormat::VideoCodec::H265)
+ m_videoEncoder = AndroidMediaRecorder::HEVC;
+ else if (settings.videoCodec() == QMediaFormat::VideoCodec::MPEG4)
+ m_videoEncoder = AndroidMediaRecorder::MPEG_4_SP;
+ else
+ m_videoEncoder = m_defaultSettings.videoEncoder;
+
+ }
+}
+
+void QAndroidCaptureSession::restartViewfinder()
+{
+
+ setKeepAlive(false);
+
+ if (!m_cameraSession)
+ return;
+
+ if (m_cameraSession && m_cameraSession->camera()) {
+ m_cameraSession->camera()->reconnect();
+ m_cameraSession->camera()->stopPreviewSynchronous();
+ m_cameraSession->camera()->startPreview();
+ m_cameraSession->setReadyForCapture(true);
+ m_cameraSession->enableRotation();
+ }
+
+ m_mediaRecorder = nullptr;
+}
+
+void QAndroidCaptureSession::updateDuration()
+{
+ if (m_elapsedTime.isValid())
+ m_duration = m_elapsedTime.elapsed();
+
+ emit durationChanged(m_duration);
+}
+
+void QAndroidCaptureSession::onCameraOpened()
+{
+ m_supportedResolutions.clear();
+ m_supportedFramerates.clear();
+
+ // get supported resolutions from predefined profiles
+ for (int i = 0; i < 8; ++i) {
+ CaptureProfile profile = getProfile(i);
+ if (!profile.isNull) {
+ if (i == AndroidCamcorderProfile::QUALITY_HIGH)
+ m_defaultSettings = profile;
+
+ if (!m_supportedResolutions.contains(profile.videoResolution))
+ m_supportedResolutions.append(profile.videoResolution);
+ if (!m_supportedFramerates.contains(profile.videoFrameRate))
+ m_supportedFramerates.append(profile.videoFrameRate);
+ }
+ }
+
+ std::sort(m_supportedResolutions.begin(), m_supportedResolutions.end(), qt_sizeLessThan);
+ std::sort(m_supportedFramerates.begin(), m_supportedFramerates.end());
+
+ QMediaEncoderSettings defaultSettings;
+ applySettings(defaultSettings);
+ m_cameraSession->applyResolution(defaultSettings.videoResolution());
+}
+
+QAndroidCaptureSession::CaptureProfile QAndroidCaptureSession::getProfile(int id)
+{
+ CaptureProfile profile;
+ const bool hasProfile = AndroidCamcorderProfile::hasProfile(m_cameraSession->camera()->cameraId(),
+ AndroidCamcorderProfile::Quality(id));
+
+ if (hasProfile) {
+ AndroidCamcorderProfile camProfile = AndroidCamcorderProfile::get(m_cameraSession->camera()->cameraId(),
+ AndroidCamcorderProfile::Quality(id));
+
+ profile.outputFormat = AndroidMediaRecorder::OutputFormat(camProfile.getValue(AndroidCamcorderProfile::fileFormat));
+ profile.audioEncoder = AndroidMediaRecorder::AudioEncoder(camProfile.getValue(AndroidCamcorderProfile::audioCodec));
+ profile.audioBitRate = camProfile.getValue(AndroidCamcorderProfile::audioBitRate);
+ profile.audioChannels = camProfile.getValue(AndroidCamcorderProfile::audioChannels);
+ profile.audioSampleRate = camProfile.getValue(AndroidCamcorderProfile::audioSampleRate);
+ profile.videoEncoder = AndroidMediaRecorder::VideoEncoder(camProfile.getValue(AndroidCamcorderProfile::videoCodec));
+ profile.videoBitRate = camProfile.getValue(AndroidCamcorderProfile::videoBitRate);
+ profile.videoFrameRate = camProfile.getValue(AndroidCamcorderProfile::videoFrameRate);
+ profile.videoResolution = QSize(camProfile.getValue(AndroidCamcorderProfile::videoFrameWidth),
+ camProfile.getValue(AndroidCamcorderProfile::videoFrameHeight));
+
+ if (profile.outputFormat == AndroidMediaRecorder::MPEG_4)
+ profile.outputFileExtension = QStringLiteral("mp4");
+ else if (profile.outputFormat == AndroidMediaRecorder::THREE_GPP)
+ profile.outputFileExtension = QStringLiteral("3gp");
+ else if (profile.outputFormat == AndroidMediaRecorder::AMR_NB_Format)
+ profile.outputFileExtension = QStringLiteral("amr");
+ else if (profile.outputFormat == AndroidMediaRecorder::AMR_WB_Format)
+ profile.outputFileExtension = QStringLiteral("awb");
+
+ profile.isNull = false;
+ }
+
+ return profile;
+}
+
+void QAndroidCaptureSession::onError(int what, int extra)
+{
+ Q_UNUSED(what);
+ Q_UNUSED(extra);
+ stop(true);
+ updateError(QMediaRecorder::ResourceError, QLatin1String("Unknown error."));
+}
+
+void QAndroidCaptureSession::onInfo(int what, int extra)
+{
+ Q_UNUSED(extra);
+ if (what == 800) {
+ // MEDIA_RECORDER_INFO_MAX_DURATION_REACHED
+ stop();
+ updateError(QMediaRecorder::OutOfSpaceError, QLatin1String("Maximum duration reached."));
+ } else if (what == 801) {
+ // MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED
+ stop();
+ updateError(QMediaRecorder::OutOfSpaceError, QLatin1String("Maximum file size reached."));
+ }
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qandroidcapturesession_p.cpp"
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h
new file mode 100644
index 000000000..161d47994
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h
@@ -0,0 +1,158 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDCAPTURESESSION_H
+#define QANDROIDCAPTURESESSION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qobject.h>
+#include <qmediarecorder.h>
+#include <qurl.h>
+#include <qelapsedtimer.h>
+#include <qtimer.h>
+#include "androidmediarecorder_p.h"
+#include "qandroidmediaencoder_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QAudioInput;
+class QAndroidCameraSession;
+
+class QAndroidCaptureSession : public QObject
+{
+ Q_OBJECT
+public:
+ explicit QAndroidCaptureSession();
+ ~QAndroidCaptureSession();
+
+ QList<QSize> supportedResolutions() const { return m_supportedResolutions; }
+ QList<qreal> supportedFrameRates() const { return m_supportedFramerates; }
+
+ void setCameraSession(QAndroidCameraSession *cameraSession = 0);
+ void setAudioInput(QPlatformAudioInput *input);
+ void setAudioOutput(QPlatformAudioOutput *output);
+
+ QMediaRecorder::RecorderState state() const;
+
+ void start(QMediaEncoderSettings &settings, const QUrl &outputLocation);
+ void stop(bool error = false);
+
+ qint64 duration() const;
+
+ QMediaEncoderSettings encoderSettings() { return m_encoderSettings; }
+
+ void setMediaEncoder(QAndroidMediaEncoder *encoder) { m_mediaEncoder = encoder; }
+
+ void stateChanged(QMediaRecorder::RecorderState state) {
+ if (m_mediaEncoder)
+ m_mediaEncoder->stateChanged(state);
+ }
+ void durationChanged(qint64 position)
+ {
+ if (m_mediaEncoder)
+ m_mediaEncoder->durationChanged(position);
+ }
+ void actualLocationChanged(const QUrl &location)
+ {
+ if (m_mediaEncoder)
+ m_mediaEncoder->actualLocationChanged(location);
+ }
+ void updateError(int error, const QString &errorString)
+ {
+ if (m_mediaEncoder)
+ m_mediaEncoder->updateError(QMediaRecorder::Error(error), errorString);
+ }
+
+private Q_SLOTS:
+ void updateDuration();
+ void onCameraOpened();
+
+ void onError(int what, int extra);
+ void onInfo(int what, int extra);
+
+private:
+ void applySettings(QMediaEncoderSettings &settings);
+
+ struct CaptureProfile {
+ AndroidMediaRecorder::OutputFormat outputFormat;
+ QString outputFileExtension;
+
+ AndroidMediaRecorder::AudioEncoder audioEncoder;
+ int audioBitRate;
+ int audioChannels;
+ int audioSampleRate;
+
+ AndroidMediaRecorder::VideoEncoder videoEncoder;
+ int videoBitRate;
+ int videoFrameRate;
+ QSize videoResolution;
+
+ bool isNull;
+
+ CaptureProfile()
+ : outputFormat(AndroidMediaRecorder::MPEG_4)
+ , outputFileExtension(QLatin1String("mp4"))
+ , audioEncoder(AndroidMediaRecorder::DefaultAudioEncoder)
+ , audioBitRate(128000)
+ , audioChannels(2)
+ , audioSampleRate(44100)
+ , videoEncoder(AndroidMediaRecorder::DefaultVideoEncoder)
+ , videoBitRate(1)
+ , videoFrameRate(-1)
+ , videoResolution(1280, 720)
+ , isNull(true)
+ { }
+ };
+
+ CaptureProfile getProfile(int id);
+
+ void restartViewfinder();
+ void updateStreamingState();
+
+ QAndroidMediaEncoder *m_mediaEncoder = nullptr;
+ std::shared_ptr<AndroidMediaRecorder> m_mediaRecorder;
+ QAndroidCameraSession *m_cameraSession;
+
+ QPlatformAudioInput *m_audioInput = nullptr;
+ QPlatformAudioOutput *m_audioOutput = nullptr;
+
+ QElapsedTimer m_elapsedTime;
+ QTimer m_notifyTimer;
+ qint64 m_duration;
+
+ QMediaRecorder::RecorderState m_state;
+ QUrl m_usedOutputLocation;
+ bool m_outputLocationIsStandard = false;
+
+ CaptureProfile m_defaultSettings;
+
+ QMediaEncoderSettings m_encoderSettings;
+ AndroidMediaRecorder::OutputFormat m_outputFormat;
+ AndroidMediaRecorder::AudioEncoder m_audioEncoder;
+ AndroidMediaRecorder::VideoEncoder m_videoEncoder;
+
+ QList<QSize> m_supportedResolutions;
+ QList<qreal> m_supportedFramerates;
+
+ QMetaObject::Connection m_audioInputChanged;
+ QMetaObject::Connection m_audioOutputChanged;
+ QMetaObject::Connection m_connOpenCamera;
+ QMetaObject::Connection m_connActiveChangedCamera;
+
+ void setKeepAlive(bool keepAlive);
+
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDCAPTURESESSION_H
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidimagecapture.cpp b/src/plugins/multimedia/android/mediacapture/qandroidimagecapture.cpp
new file mode 100644
index 000000000..4105851ed
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidimagecapture.cpp
@@ -0,0 +1,73 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidimagecapture_p.h"
+
+#include "qandroidcamerasession_p.h"
+#include "qandroidmediacapturesession_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QAndroidImageCapture::QAndroidImageCapture(QImageCapture *parent)
+ : QPlatformImageCapture(parent)
+{
+}
+
+bool QAndroidImageCapture::isReadyForCapture() const
+{
+ return m_session->isReadyForCapture();
+}
+
+int QAndroidImageCapture::capture(const QString &fileName)
+{
+ return m_session->capture(fileName);
+}
+
+int QAndroidImageCapture::captureToBuffer()
+{
+ return m_session->captureToBuffer();
+}
+
+QImageEncoderSettings QAndroidImageCapture::imageSettings() const
+{
+ return m_session->imageSettings();
+}
+
+void QAndroidImageCapture::setImageSettings(const QImageEncoderSettings &settings)
+{
+ m_session->setImageSettings(settings);
+}
+
+void QAndroidImageCapture::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ QAndroidMediaCaptureSession *captureSession = static_cast<QAndroidMediaCaptureSession *>(session);
+ if (m_service == captureSession)
+ return;
+
+ m_service = captureSession;
+ if (!m_service) {
+ disconnect(m_session, nullptr, this, nullptr);
+ return;
+ }
+
+ m_session = m_service->cameraSession();
+ Q_ASSERT(m_session);
+
+ connect(m_session, &QAndroidCameraSession::readyForCaptureChanged,
+ this, &QAndroidImageCapture::readyForCaptureChanged);
+ connect(m_session, &QAndroidCameraSession::imageExposed,
+ this, &QAndroidImageCapture::imageExposed);
+ connect(m_session, &QAndroidCameraSession::imageCaptured,
+ this, &QAndroidImageCapture::imageCaptured);
+ connect(m_session, &QAndroidCameraSession::imageMetadataAvailable,
+ this, &QAndroidImageCapture::imageMetadataAvailable);
+ connect(m_session, &QAndroidCameraSession::imageAvailable,
+ this, &QAndroidImageCapture::imageAvailable);
+ connect(m_session, &QAndroidCameraSession::imageSaved,
+ this, &QAndroidImageCapture::imageSaved);
+ connect(m_session, &QAndroidCameraSession::imageCaptureError,
+ this, &QAndroidImageCapture::error);
+}
+QT_END_NAMESPACE
+
+#include "moc_qandroidimagecapture_p.cpp"
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidimagecapture_p.h b/src/plugins/multimedia/android/mediacapture/qandroidimagecapture_p.h
new file mode 100644
index 000000000..ac273c195
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidimagecapture_p.h
@@ -0,0 +1,48 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDCAMERAIMAGECAPTURECONTROL_H
+#define QANDROIDCAMERAIMAGECAPTURECONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformimagecapture_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidCameraSession;
+class QAndroidMediaCaptureSession;
+
+class QAndroidImageCapture : public QPlatformImageCapture
+{
+ Q_OBJECT
+public:
+ explicit QAndroidImageCapture(QImageCapture *parent = nullptr);
+
+ bool isReadyForCapture() const override;
+
+ int capture(const QString &fileName) override;
+ int captureToBuffer() override;
+
+ QImageEncoderSettings imageSettings() const override;
+ void setImageSettings(const QImageEncoderSettings &settings) override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+private:
+ QAndroidCameraSession *m_session;
+ QAndroidMediaCaptureSession *m_service;
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDCAMERAIMAGECAPTURECONTROL_H
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidmediacapturesession.cpp b/src/plugins/multimedia/android/mediacapture/qandroidmediacapturesession.cpp
new file mode 100644
index 000000000..e2b551d35
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidmediacapturesession.cpp
@@ -0,0 +1,115 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// Copyright (C) 2016 Ruslan Baratov
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidmediacapturesession_p.h"
+
+#include "qandroidmediaencoder_p.h"
+#include "qandroidcapturesession_p.h"
+#include "qandroidcamera_p.h"
+#include "qandroidcamerasession_p.h"
+#include "qandroidimagecapture_p.h"
+#include "qmediadevices.h"
+#include "qaudiodevice.h"
+
+QT_BEGIN_NAMESPACE
+
+QAndroidMediaCaptureSession::QAndroidMediaCaptureSession()
+ : m_captureSession(new QAndroidCaptureSession())
+ , m_cameraSession(new QAndroidCameraSession())
+{
+}
+
+QAndroidMediaCaptureSession::~QAndroidMediaCaptureSession()
+{
+ delete m_captureSession;
+ delete m_cameraSession;
+}
+
+QPlatformCamera *QAndroidMediaCaptureSession::camera()
+{
+ return m_cameraControl;
+}
+
+void QAndroidMediaCaptureSession::setCamera(QPlatformCamera *camera)
+{
+ if (camera) {
+ m_captureSession->setCameraSession(m_cameraSession);
+ } else {
+ m_captureSession->setCameraSession(nullptr);
+ }
+
+ QAndroidCamera *control = static_cast<QAndroidCamera *>(camera);
+ if (m_cameraControl == control)
+ return;
+
+ if (m_cameraControl)
+ m_cameraControl->setCaptureSession(nullptr);
+
+ m_cameraControl = control;
+ if (m_cameraControl)
+ m_cameraControl->setCaptureSession(this);
+
+ emit cameraChanged();
+}
+
+QPlatformImageCapture *QAndroidMediaCaptureSession::imageCapture()
+{
+ return m_imageCaptureControl;
+}
+
+void QAndroidMediaCaptureSession::setImageCapture(QPlatformImageCapture *imageCapture)
+{
+ QAndroidImageCapture *control = static_cast<QAndroidImageCapture *>(imageCapture);
+ if (m_imageCaptureControl == control)
+ return;
+
+ if (m_imageCaptureControl)
+ m_imageCaptureControl->setCaptureSession(nullptr);
+
+ m_imageCaptureControl = control;
+ if (m_imageCaptureControl)
+ m_imageCaptureControl->setCaptureSession(this);
+}
+
+QPlatformMediaRecorder *QAndroidMediaCaptureSession::mediaRecorder()
+{
+ return m_encoder;
+}
+
+void QAndroidMediaCaptureSession::setMediaRecorder(QPlatformMediaRecorder *recorder)
+{
+ QAndroidMediaEncoder *control = static_cast<QAndroidMediaEncoder *>(recorder);
+
+ if (m_encoder == control)
+ return;
+
+ if (m_encoder)
+ m_encoder->setCaptureSession(nullptr);
+
+ m_encoder = control;
+ if (m_encoder)
+ m_encoder->setCaptureSession(this);
+
+ emit encoderChanged();
+
+}
+
+void QAndroidMediaCaptureSession::setAudioInput(QPlatformAudioInput *input)
+{
+ m_captureSession->setAudioInput(input);
+}
+
+void QAndroidMediaCaptureSession::setAudioOutput(QPlatformAudioOutput *output)
+{
+ m_captureSession->setAudioOutput(output);
+}
+
+void QAndroidMediaCaptureSession::setVideoPreview(QVideoSink *sink)
+{
+ m_cameraSession->setVideoSink(sink);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qandroidmediacapturesession_p.cpp"
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidmediacapturesession_p.h b/src/plugins/multimedia/android/mediacapture/qandroidmediacapturesession_p.h
new file mode 100644
index 000000000..90c792c32
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidmediacapturesession_p.h
@@ -0,0 +1,66 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// Copyright (C) 2016 Ruslan Baratov
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDCAPTURESERVICE_H
+#define QANDROIDCAPTURESERVICE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediacapture_p.h>
+#include <private/qplatformmediaintegration_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidMediaEncoder;
+class QAndroidCaptureSession;
+class QAndroidCamera;
+class QAndroidCameraSession;
+class QAndroidImageCapture;
+
+class QAndroidMediaCaptureSession : public QPlatformMediaCaptureSession
+{
+ Q_OBJECT
+
+public:
+ explicit QAndroidMediaCaptureSession();
+ virtual ~QAndroidMediaCaptureSession();
+
+ QPlatformCamera *camera() override;
+ void setCamera(QPlatformCamera *camera) override;
+
+ QPlatformImageCapture *imageCapture() override;
+ void setImageCapture(QPlatformImageCapture *imageCapture) override;
+
+ QPlatformMediaRecorder *mediaRecorder() override;
+ void setMediaRecorder(QPlatformMediaRecorder *recorder) override;
+
+ void setAudioInput(QPlatformAudioInput *input) override;
+
+ void setVideoPreview(QVideoSink *sink) override;
+
+ void setAudioOutput(QPlatformAudioOutput *output) override;
+
+ QAndroidCaptureSession *captureSession() const { return m_captureSession; }
+ QAndroidCameraSession *cameraSession() const { return m_cameraSession; }
+
+private:
+ QAndroidMediaEncoder *m_encoder = nullptr;
+ QAndroidCaptureSession *m_captureSession = nullptr;
+ QAndroidCamera *m_cameraControl = nullptr;
+ QAndroidCameraSession *m_cameraSession = nullptr;
+ QAndroidImageCapture *m_imageCaptureControl = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDCAPTURESERVICE_H
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidmediaencoder.cpp b/src/plugins/multimedia/android/mediacapture/qandroidmediaencoder.cpp
new file mode 100644
index 000000000..d3449312d
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidmediaencoder.cpp
@@ -0,0 +1,72 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidmediaencoder_p.h"
+#include "qandroidmultimediautils_p.h"
+#include "qandroidcapturesession_p.h"
+#include "qandroidmediacapturesession_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QAndroidMediaEncoder::QAndroidMediaEncoder(QMediaRecorder *parent)
+ : QPlatformMediaRecorder(parent)
+{
+}
+
+bool QAndroidMediaEncoder::isLocationWritable(const QUrl &location) const
+{
+ return location.isValid()
+ && (location.isLocalFile() || location.isRelative());
+}
+
+QMediaRecorder::RecorderState QAndroidMediaEncoder::state() const
+{
+ return m_session ? m_session->state() : QMediaRecorder::StoppedState;
+}
+
+qint64 QAndroidMediaEncoder::duration() const
+{
+ return m_session ? m_session->duration() : 0;
+
+}
+
+void QAndroidMediaEncoder::record(QMediaEncoderSettings &settings)
+{
+ if (m_session)
+ m_session->start(settings, outputLocation());
+}
+
+void QAndroidMediaEncoder::stop()
+{
+ if (m_session)
+ m_session->stop();
+}
+
+void QAndroidMediaEncoder::setOutputLocation(const QUrl &location)
+{
+ if (location.isLocalFile()) {
+ qt_androidRequestWriteStoragePermission();
+ }
+ QPlatformMediaRecorder::setOutputLocation(location);
+}
+
+void QAndroidMediaEncoder::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ QAndroidMediaCaptureSession *captureSession = static_cast<QAndroidMediaCaptureSession *>(session);
+ if (m_service == captureSession)
+ return;
+
+ if (m_service)
+ stop();
+ if (m_session)
+ m_session->setMediaEncoder(nullptr);
+
+ m_service = captureSession;
+ if (!m_service)
+ return;
+ m_session = m_service->captureSession();
+ Q_ASSERT(m_session);
+ m_session->setMediaEncoder(this);
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidmediaencoder_p.h b/src/plugins/multimedia/android/mediacapture/qandroidmediaencoder_p.h
new file mode 100644
index 000000000..b46268449
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidmediaencoder_p.h
@@ -0,0 +1,50 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDMEDIAENCODER_H
+#define QANDROIDMEDIAENCODER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediarecorder_p.h>
+#include <private/qplatformmediacapture_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidCaptureSession;
+class QAndroidMediaCaptureSession;
+
+class QAndroidMediaEncoder : public QPlatformMediaRecorder
+{
+public:
+ explicit QAndroidMediaEncoder(QMediaRecorder *parent);
+
+ bool isLocationWritable(const QUrl &location) const override;
+ QMediaRecorder::RecorderState state() const override;
+ qint64 duration() const override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+ void setOutputLocation(const QUrl &location) override;
+ void record(QMediaEncoderSettings &settings) override;
+ void stop() override;
+
+private:
+ friend class QAndroidCaptureSession;
+
+ QAndroidCaptureSession *m_session = nullptr;
+ QAndroidMediaCaptureSession *m_service = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDMEDIAENCODER_H
diff --git a/src/plugins/multimedia/android/mediaplayer/qandroidmediaplayer.cpp b/src/plugins/multimedia/android/mediaplayer/qandroidmediaplayer.cpp
new file mode 100644
index 000000000..b257a8986
--- /dev/null
+++ b/src/plugins/multimedia/android/mediaplayer/qandroidmediaplayer.cpp
@@ -0,0 +1,999 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidmediaplayer_p.h"
+#include "androidmediaplayer_p.h"
+#include "qandroidvideooutput_p.h"
+#include "qandroidmetadata_p.h"
+#include "qandroidaudiooutput_p.h"
+#include "qaudiooutput.h"
+
+#include <private/qplatformvideosink_p.h>
+#include <qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(lcMediaPlayer, "qt.multimedia.mediaplayer.android")
+
+class StateChangeNotifier
+{
+public:
+ StateChangeNotifier(QAndroidMediaPlayer *mp)
+ : mControl(mp)
+ , mPreviousState(mp->state())
+ , mPreviousMediaStatus(mp->mediaStatus())
+ {
+ ++mControl->mActiveStateChangeNotifiers;
+ }
+
+ ~StateChangeNotifier()
+ {
+ if (--mControl->mActiveStateChangeNotifiers)
+ return;
+
+ if (mPreviousMediaStatus != mControl->mediaStatus())
+ Q_EMIT mControl->mediaStatusChanged(mControl->mediaStatus());
+
+ if (mPreviousState != mControl->state())
+ Q_EMIT mControl->stateChanged(mControl->state());
+ }
+
+private:
+ QAndroidMediaPlayer *mControl;
+ QMediaPlayer::PlaybackState mPreviousState;
+ QMediaPlayer::MediaStatus mPreviousMediaStatus;
+};
+
+QAndroidMediaPlayer::QAndroidMediaPlayer(QMediaPlayer *parent)
+ : QPlatformMediaPlayer(parent),
+ mMediaPlayer(new AndroidMediaPlayer),
+ mState(AndroidMediaPlayer::Uninitialized)
+{
+ // Set seekable to True by default. It changes if MEDIA_INFO_NOT_SEEKABLE is received
+ seekableChanged(true);
+ connect(mMediaPlayer, &AndroidMediaPlayer::bufferingChanged, this,
+ &QAndroidMediaPlayer::onBufferingChanged);
+ connect(mMediaPlayer, &AndroidMediaPlayer::info, this, &QAndroidMediaPlayer::onInfo);
+ connect(mMediaPlayer, &AndroidMediaPlayer::error, this, &QAndroidMediaPlayer::onError);
+ connect(mMediaPlayer, &AndroidMediaPlayer::stateChanged, this,
+ &QAndroidMediaPlayer::onStateChanged);
+ connect(mMediaPlayer, &AndroidMediaPlayer::videoSizeChanged, this,
+ &QAndroidMediaPlayer::onVideoSizeChanged);
+ connect(mMediaPlayer, &AndroidMediaPlayer::progressChanged, this,
+ &QAndroidMediaPlayer::positionChanged);
+ connect(mMediaPlayer, &AndroidMediaPlayer::durationChanged, this,
+ &QAndroidMediaPlayer::durationChanged);
+ connect(mMediaPlayer, &AndroidMediaPlayer::tracksInfoChanged, this,
+ &QAndroidMediaPlayer::updateTrackInfo);
+}
+
+QAndroidMediaPlayer::~QAndroidMediaPlayer()
+{
+ if (m_videoSink)
+ disconnect(m_videoSink->platformVideoSink(), nullptr, this, nullptr);
+
+ mMediaPlayer->disconnect();
+ mMediaPlayer->release();
+ delete mMediaPlayer;
+}
+
+qint64 QAndroidMediaPlayer::duration() const
+{
+ if (mediaStatus() == QMediaPlayer::NoMedia)
+ return 0;
+
+ if ((mState & (AndroidMediaPlayer::Prepared
+ | AndroidMediaPlayer::Started
+ | AndroidMediaPlayer::Paused
+ | AndroidMediaPlayer::Stopped
+ | AndroidMediaPlayer::PlaybackCompleted)) == 0) {
+ return 0;
+ }
+
+ return mMediaPlayer->getDuration();
+}
+
+qint64 QAndroidMediaPlayer::position() const
+{
+ if (mediaStatus() == QMediaPlayer::EndOfMedia)
+ return duration();
+
+ if ((mState & (AndroidMediaPlayer::Prepared
+ | AndroidMediaPlayer::Started
+ | AndroidMediaPlayer::Paused
+ | AndroidMediaPlayer::PlaybackCompleted))) {
+ return mMediaPlayer->getCurrentPosition();
+ }
+
+ return (mPendingPosition == -1) ? 0 : mPendingPosition;
+}
+
+void QAndroidMediaPlayer::setPosition(qint64 position)
+{
+ if (!isSeekable())
+ return;
+
+ const int seekPosition = (position > INT_MAX) ? INT_MAX : position;
+
+ qint64 currentPosition = mMediaPlayer->getCurrentPosition();
+ if (seekPosition == currentPosition) {
+ // update position - will send a new frame of this position
+ // for consistency with other platforms
+ mMediaPlayer->seekTo(seekPosition);
+ return;
+ }
+ StateChangeNotifier notifier(this);
+
+ if (mediaStatus() == QMediaPlayer::EndOfMedia)
+ setMediaStatus(QMediaPlayer::LoadedMedia);
+
+ if ((mState & (AndroidMediaPlayer::Prepared
+ | AndroidMediaPlayer::Started
+ | AndroidMediaPlayer::Paused
+ | AndroidMediaPlayer::PlaybackCompleted)) == 0) {
+ mPendingPosition = seekPosition;
+ } else {
+ mMediaPlayer->seekTo(seekPosition);
+
+ if (mPendingPosition != -1) {
+ mPendingPosition = -1;
+ }
+ }
+
+ Q_EMIT positionChanged(seekPosition);
+}
+
+void QAndroidMediaPlayer::setVolume(float volume)
+{
+ if ((mState & (AndroidMediaPlayer::Idle
+ | AndroidMediaPlayer::Initialized
+ | AndroidMediaPlayer::Stopped
+ | AndroidMediaPlayer::Prepared
+ | AndroidMediaPlayer::Started
+ | AndroidMediaPlayer::Paused
+ | AndroidMediaPlayer::PlaybackCompleted)) == 0) {
+ mPendingVolume = volume;
+ return;
+ }
+
+ mMediaPlayer->setVolume(qRound(volume*100.));
+ mPendingVolume = -1;
+}
+
+void QAndroidMediaPlayer::setMuted(bool muted)
+{
+ if ((mState & (AndroidMediaPlayer::Idle
+ | AndroidMediaPlayer::Initialized
+ | AndroidMediaPlayer::Stopped
+ | AndroidMediaPlayer::Prepared
+ | AndroidMediaPlayer::Started
+ | AndroidMediaPlayer::Paused
+ | AndroidMediaPlayer::PlaybackCompleted)) == 0) {
+ mPendingMute = muted;
+ return;
+ }
+
+ mMediaPlayer->setMuted(muted);
+ mPendingMute = -1;
+}
+
+QMediaMetaData QAndroidMediaPlayer::metaData() const
+{
+ return QAndroidMetaData::extractMetadata(mMediaContent);
+}
+
+float QAndroidMediaPlayer::bufferProgress() const
+{
+ return mBufferFilled ? 1. : mBufferPercent;
+}
+
+bool QAndroidMediaPlayer::isAudioAvailable() const
+{
+ return mAudioAvailable;
+}
+
+bool QAndroidMediaPlayer::isVideoAvailable() const
+{
+ return mVideoAvailable;
+}
+
+QMediaTimeRange QAndroidMediaPlayer::availablePlaybackRanges() const
+{
+ return mAvailablePlaybackRange;
+}
+
+void QAndroidMediaPlayer::updateAvailablePlaybackRanges()
+{
+ if (mBuffering) {
+ const qint64 pos = position();
+ const qint64 end = (duration() / 100) * mBufferPercent;
+ mAvailablePlaybackRange.addInterval(pos, end);
+ } else if (isSeekable()) {
+ mAvailablePlaybackRange = QMediaTimeRange(0, duration());
+ } else {
+ mAvailablePlaybackRange = QMediaTimeRange();
+ }
+
+// #### Q_EMIT availablePlaybackRangesChanged(mAvailablePlaybackRange);
+}
+
+qreal QAndroidMediaPlayer::playbackRate() const
+{
+ return mCurrentPlaybackRate;
+}
+
+void QAndroidMediaPlayer::setPlaybackRate(qreal rate)
+{
+ if (mState != AndroidMediaPlayer::Started) {
+ // If video isn't playing, changing speed rate may start it automatically
+ // It need to be postponed
+ if (mCurrentPlaybackRate != rate) {
+ mCurrentPlaybackRate = rate;
+ mHasPendingPlaybackRate = true;
+ Q_EMIT playbackRateChanged(rate);
+ }
+ return;
+ }
+
+ if (mMediaPlayer->setPlaybackRate(rate)) {
+ mCurrentPlaybackRate = rate;
+ Q_EMIT playbackRateChanged(rate);
+ }
+}
+
+QUrl QAndroidMediaPlayer::media() const
+{
+ return mMediaContent;
+}
+
+const QIODevice *QAndroidMediaPlayer::mediaStream() const
+{
+ return mMediaStream;
+}
+
+void QAndroidMediaPlayer::setMedia(const QUrl &mediaContent,
+ QIODevice *stream)
+{
+ StateChangeNotifier notifier(this);
+
+ mReloadingMedia = (mMediaContent == mediaContent) && !mPendingSetMedia;
+
+ if (!mReloadingMedia) {
+ mMediaContent = mediaContent;
+ mMediaStream = stream;
+ }
+
+ if (mediaContent.isEmpty()) {
+ setMediaStatus(QMediaPlayer::NoMedia);
+ } else {
+ if (mVideoOutput && !mVideoOutput->isReady()) {
+ // if a video output is set but the video texture is not ready, delay loading the media
+ // since it can cause problems on some hardware
+ mPendingSetMedia = true;
+ return;
+ }
+
+ if (mVideoSize.isValid() && mVideoOutput)
+ mVideoOutput->setVideoSize(mVideoSize);
+
+ if (mVideoOutput &&
+ (mMediaPlayer->display() == 0 || mVideoOutput->shouldTextureBeUpdated()))
+ mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
+ mMediaPlayer->setDataSource(QNetworkRequest(mediaContent));
+ mMediaPlayer->prepareAsync();
+
+ if (!mReloadingMedia)
+ setMediaStatus(QMediaPlayer::LoadingMedia);
+ }
+
+ resetBufferingProgress();
+
+ mReloadingMedia = false;
+}
+
+void QAndroidMediaPlayer::setVideoSink(QVideoSink *sink)
+{
+ if (m_videoSink == sink)
+ return;
+
+ if (m_videoSink)
+ disconnect(m_videoSink->platformVideoSink(), nullptr, this, nullptr);
+
+ m_videoSink = sink;
+
+ if (!m_videoSink) {
+ return;
+ }
+
+ if (mVideoOutput) {
+ delete mVideoOutput;
+ mVideoOutput = nullptr;
+ mMediaPlayer->setDisplay(nullptr);
+ }
+
+ mVideoOutput = new QAndroidTextureVideoOutput(sink, this);
+ connect(mVideoOutput, &QAndroidTextureVideoOutput::readyChanged, this,
+ &QAndroidMediaPlayer::onVideoOutputReady);
+ connect(mMediaPlayer, &AndroidMediaPlayer::timedTextChanged, mVideoOutput,
+ &QAndroidTextureVideoOutput::setSubtitle);
+
+ if (mVideoOutput->isReady())
+ mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
+
+ connect(m_videoSink->platformVideoSink(), &QPlatformVideoSink::rhiChanged, this, [&]()
+ { mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture()); });
+}
+
+void QAndroidMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
+{
+ if (m_audioOutput == output)
+ return;
+ if (m_audioOutput)
+ m_audioOutput->q->disconnect(this);
+ m_audioOutput = static_cast<QAndroidAudioOutput *>(output);
+ if (m_audioOutput) {
+ connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this, &QAndroidMediaPlayer::updateAudioDevice);
+ connect(m_audioOutput->q, &QAudioOutput::volumeChanged, this, &QAndroidMediaPlayer::setVolume);
+ connect(m_audioOutput->q, &QAudioOutput::mutedChanged, this, &QAndroidMediaPlayer::setMuted);
+ updateAudioDevice();
+ }
+}
+
+void QAndroidMediaPlayer::updateAudioDevice()
+{
+ if (m_audioOutput)
+ mMediaPlayer->setAudioOutput(m_audioOutput->device.id());
+}
+
+void QAndroidMediaPlayer::play()
+{
+ StateChangeNotifier notifier(this);
+
+ resetCurrentLoop();
+
+ // We need to prepare the mediaplayer again.
+ if ((mState & AndroidMediaPlayer::Stopped) && !mMediaContent.isEmpty()) {
+ setMedia(mMediaContent, mMediaStream);
+ }
+
+ if (!mMediaContent.isEmpty())
+ stateChanged(QMediaPlayer::PlayingState);
+
+ if ((mState & (AndroidMediaPlayer::Prepared
+ | AndroidMediaPlayer::Started
+ | AndroidMediaPlayer::Paused
+ | AndroidMediaPlayer::PlaybackCompleted)) == 0) {
+ mPendingState = QMediaPlayer::PlayingState;
+ return;
+ }
+
+ if (mVideoOutput)
+ mVideoOutput->start();
+
+ updateAudioDevice();
+
+ if (mHasPendingPlaybackRate) {
+ mHasPendingPlaybackRate = false;
+ if (mMediaPlayer->setPlaybackRate(mCurrentPlaybackRate))
+ return;
+ mCurrentPlaybackRate = mMediaPlayer->playbackRate();
+ Q_EMIT playbackRateChanged(mCurrentPlaybackRate);
+ }
+
+ mMediaPlayer->play();
+}
+
+void QAndroidMediaPlayer::pause()
+{
+ // cannot pause without media
+ if (mediaStatus() == QMediaPlayer::NoMedia)
+ return;
+
+ StateChangeNotifier notifier(this);
+
+ stateChanged(QMediaPlayer::PausedState);
+
+ if ((mState & (AndroidMediaPlayer::Started
+ | AndroidMediaPlayer::Paused
+ | AndroidMediaPlayer::PlaybackCompleted
+ | AndroidMediaPlayer::Prepared
+ | AndroidMediaPlayer::Stopped)) == 0) {
+ mPendingState = QMediaPlayer::PausedState;
+ return;
+ }
+
+ const qint64 currentPosition = mMediaPlayer->getCurrentPosition();
+ setPosition(currentPosition);
+
+ mMediaPlayer->pause();
+}
+
+void QAndroidMediaPlayer::stop()
+{
+ StateChangeNotifier notifier(this);
+
+ stateChanged(QMediaPlayer::StoppedState);
+
+ if ((mState & (AndroidMediaPlayer::Prepared
+ | AndroidMediaPlayer::Started
+ | AndroidMediaPlayer::Stopped
+ | AndroidMediaPlayer::Paused
+ | AndroidMediaPlayer::PlaybackCompleted)) == 0) {
+ if ((mState & (AndroidMediaPlayer::Idle | AndroidMediaPlayer::Uninitialized | AndroidMediaPlayer::Error)) == 0)
+ mPendingState = QMediaPlayer::StoppedState;
+ return;
+ }
+
+ if (mCurrentPlaybackRate != 1.)
+ // Playback rate need to by reapplied
+ mHasPendingPlaybackRate = true;
+
+ if (mVideoOutput)
+ mVideoOutput->stop();
+
+ mMediaPlayer->stop();
+}
+
+void QAndroidMediaPlayer::onInfo(qint32 what, qint32 extra)
+{
+ StateChangeNotifier notifier(this);
+
+ Q_UNUSED(extra);
+ switch (what) {
+ case AndroidMediaPlayer::MEDIA_INFO_UNKNOWN:
+ break;
+ case AndroidMediaPlayer::MEDIA_INFO_VIDEO_TRACK_LAGGING:
+ // IGNORE
+ break;
+ case AndroidMediaPlayer::MEDIA_INFO_VIDEO_RENDERING_START:
+ break;
+ case AndroidMediaPlayer::MEDIA_INFO_BUFFERING_START:
+ mPendingState = state();
+ stateChanged(QMediaPlayer::PausedState);
+ setMediaStatus(QMediaPlayer::StalledMedia);
+ break;
+ case AndroidMediaPlayer::MEDIA_INFO_BUFFERING_END:
+ if (state() != QMediaPlayer::StoppedState)
+ flushPendingStates();
+ break;
+ case AndroidMediaPlayer::MEDIA_INFO_BAD_INTERLEAVING:
+ break;
+ case AndroidMediaPlayer::MEDIA_INFO_NOT_SEEKABLE:
+ seekableChanged(false);
+ break;
+ case AndroidMediaPlayer::MEDIA_INFO_METADATA_UPDATE:
+ Q_EMIT metaDataChanged();
+ break;
+ }
+}
+
+void QAndroidMediaPlayer::onError(qint32 what, qint32 extra)
+{
+ StateChangeNotifier notifier(this);
+
+ QString errorString;
+ QMediaPlayer::Error error = QMediaPlayer::ResourceError;
+
+ switch (what) {
+ case AndroidMediaPlayer::MEDIA_ERROR_UNKNOWN:
+ errorString = QLatin1String("Error:");
+ break;
+ case AndroidMediaPlayer::MEDIA_ERROR_SERVER_DIED:
+ errorString = QLatin1String("Error: Server died");
+ error = QMediaPlayer::ResourceError;
+ break;
+ case AndroidMediaPlayer::MEDIA_ERROR_INVALID_STATE:
+ errorString = QLatin1String("Error: Invalid state");
+ error = QMediaPlayer::ResourceError;
+ break;
+ }
+
+ switch (extra) {
+ case AndroidMediaPlayer::MEDIA_ERROR_IO: // Network OR file error
+ errorString += QLatin1String(" (I/O operation failed)");
+ error = QMediaPlayer::NetworkError;
+ setMediaStatus(QMediaPlayer::InvalidMedia);
+ break;
+ case AndroidMediaPlayer::MEDIA_ERROR_MALFORMED:
+ errorString += QLatin1String(" (Malformed bitstream)");
+ error = QMediaPlayer::FormatError;
+ setMediaStatus(QMediaPlayer::InvalidMedia);
+ break;
+ case AndroidMediaPlayer::MEDIA_ERROR_UNSUPPORTED:
+ errorString += QLatin1String(" (Unsupported media)");
+ error = QMediaPlayer::FormatError;
+ setMediaStatus(QMediaPlayer::InvalidMedia);
+ break;
+ case AndroidMediaPlayer::MEDIA_ERROR_TIMED_OUT:
+ errorString += QLatin1String(" (Timed out)");
+ break;
+ case AndroidMediaPlayer::MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK:
+ errorString += QLatin1String(" (Unable to start progressive playback')");
+ error = QMediaPlayer::FormatError;
+ setMediaStatus(QMediaPlayer::InvalidMedia);
+ break;
+ case AndroidMediaPlayer::MEDIA_ERROR_BAD_THINGS_ARE_GOING_TO_HAPPEN:
+ errorString += mMediaContent.scheme() == QLatin1String("rtsp")
+ ? QLatin1String(" (Unknown error/Insufficient resources or RTSP may not be supported)")
+ : QLatin1String(" (Unknown error/Insufficient resources)");
+ error = QMediaPlayer::ResourceError;
+ break;
+ }
+
+ Q_EMIT QPlatformMediaPlayer::error(error, errorString);
+}
+
+void QAndroidMediaPlayer::onBufferingChanged(qint32 percent)
+{
+ StateChangeNotifier notifier(this);
+
+ mBuffering = percent != 100;
+ mBufferPercent = percent;
+
+ updateAvailablePlaybackRanges();
+
+ if (state() != QMediaPlayer::StoppedState)
+ setMediaStatus(mBuffering ? QMediaPlayer::BufferingMedia : QMediaPlayer::BufferedMedia);
+
+ updateBufferStatus();
+}
+
+void QAndroidMediaPlayer::onVideoSizeChanged(qint32 width, qint32 height)
+{
+ QSize newSize(width, height);
+
+ if (width == 0 || height == 0 || newSize == mVideoSize)
+ return;
+
+ setVideoAvailable(true);
+ mVideoSize = newSize;
+
+ if (mVideoOutput)
+ mVideoOutput->setVideoSize(mVideoSize);
+}
+
+void QAndroidMediaPlayer::onStateChanged(qint32 state)
+{
+ // If reloading, don't report state changes unless the new state is Prepared or Error.
+ if ((mState & AndroidMediaPlayer::Stopped)
+ && (state & (AndroidMediaPlayer::Prepared | AndroidMediaPlayer::Error | AndroidMediaPlayer::Uninitialized)) == 0) {
+ return;
+ }
+
+ StateChangeNotifier notifier(this);
+
+ mState = state;
+ switch (mState) {
+ case AndroidMediaPlayer::Idle:
+ break;
+ case AndroidMediaPlayer::Initialized:
+ break;
+ case AndroidMediaPlayer::Preparing:
+ if (!mReloadingMedia)
+ setMediaStatus(QMediaPlayer::LoadingMedia);
+ break;
+ case AndroidMediaPlayer::Prepared:
+ setMediaStatus(QMediaPlayer::LoadedMedia);
+ if (mBuffering) {
+ setMediaStatus(mBufferPercent == 100 ? QMediaPlayer::BufferedMedia
+ : QMediaPlayer::BufferingMedia);
+ } else {
+ onBufferingChanged(100);
+ }
+ setPosition(0);
+ Q_EMIT metaDataChanged();
+ setAudioAvailable(true);
+ flushPendingStates();
+ break;
+ case AndroidMediaPlayer::Started:
+ stateChanged(QMediaPlayer::PlayingState);
+ if (mBuffering) {
+ setMediaStatus(mBufferPercent == 100 ? QMediaPlayer::BufferedMedia
+ : QMediaPlayer::BufferingMedia);
+ } else {
+ setMediaStatus(QMediaPlayer::BufferedMedia);
+ }
+ Q_EMIT positionChanged(position());
+ break;
+ case AndroidMediaPlayer::Paused:
+ stateChanged(QMediaPlayer::PausedState);
+ if (mediaStatus() == QMediaPlayer::EndOfMedia) {
+ setPosition(0);
+ setMediaStatus(QMediaPlayer::BufferedMedia);
+ } else {
+ Q_EMIT positionChanged(position());
+ }
+ break;
+ case AndroidMediaPlayer::Error:
+ stateChanged(QMediaPlayer::StoppedState);
+ setMediaStatus(QMediaPlayer::InvalidMedia);
+ mMediaPlayer->release();
+ Q_EMIT positionChanged(0);
+ break;
+ case AndroidMediaPlayer::Stopped:
+ stateChanged(QMediaPlayer::StoppedState);
+ setMediaStatus(QMediaPlayer::LoadedMedia);
+ Q_EMIT positionChanged(0);
+ break;
+ case AndroidMediaPlayer::PlaybackCompleted:
+ if (doLoop()) {
+ setPosition(0);
+ mMediaPlayer->play();
+ break;
+ }
+ stateChanged(QMediaPlayer::StoppedState);
+ setMediaStatus(QMediaPlayer::EndOfMedia);
+ break;
+ case AndroidMediaPlayer::Uninitialized:
+ // reset some properties (unless we reload the same media)
+ if (!mReloadingMedia) {
+ resetBufferingProgress();
+ mPendingPosition = -1;
+ mPendingSetMedia = false;
+ mPendingState = -1;
+
+ Q_EMIT durationChanged(0);
+ Q_EMIT positionChanged(0);
+
+ setAudioAvailable(false);
+ setVideoAvailable(false);
+ seekableChanged(true);
+ }
+ break;
+ default:
+ break;
+ }
+
+ if ((mState & (AndroidMediaPlayer::Stopped | AndroidMediaPlayer::Uninitialized)) != 0) {
+ mMediaPlayer->setDisplay(0);
+ if (mVideoOutput) {
+ mVideoOutput->stop();
+ }
+ }
+}
+
+int QAndroidMediaPlayer::trackCount(TrackType trackType)
+{
+ if (!mTracksMetadata.contains(trackType))
+ return -1;
+
+ auto tracks = mTracksMetadata.value(trackType);
+ return tracks.count();
+}
+
+QMediaMetaData QAndroidMediaPlayer::trackMetaData(TrackType trackType, int streamNumber)
+{
+ if (!mTracksMetadata.contains(trackType))
+ return QMediaMetaData();
+
+ auto tracks = mTracksMetadata.value(trackType);
+ if (tracks.count() < streamNumber)
+ return QMediaMetaData();
+
+ QAndroidMetaData trackInfo = tracks.at(streamNumber);
+ return static_cast<QMediaMetaData>(trackInfo);
+}
+
+QPlatformMediaPlayer::TrackType convertTrackType(AndroidMediaPlayer::TrackType type)
+{
+ switch (type) {
+ case AndroidMediaPlayer::TrackType::Video:
+ return QPlatformMediaPlayer::TrackType::VideoStream;
+ case AndroidMediaPlayer::TrackType::Audio:
+ return QPlatformMediaPlayer::TrackType::AudioStream;
+ case AndroidMediaPlayer::TrackType::TimedText:
+ return QPlatformMediaPlayer::TrackType::SubtitleStream;
+ case AndroidMediaPlayer::TrackType::Subtitle:
+ return QPlatformMediaPlayer::TrackType::SubtitleStream;
+ case AndroidMediaPlayer::TrackType::Unknown:
+ case AndroidMediaPlayer::TrackType::Metadata:
+ return QPlatformMediaPlayer::TrackType::NTrackTypes;
+ }
+
+ return QPlatformMediaPlayer::TrackType::NTrackTypes;
+}
+
+int QAndroidMediaPlayer::convertTrackNumber(int androidTrackNumber)
+{
+ int trackNumber = androidTrackNumber;
+
+ int videoTrackCount = trackCount(QPlatformMediaPlayer::TrackType::VideoStream);
+ if (trackNumber <= videoTrackCount)
+ return trackNumber;
+
+ trackNumber = trackNumber - videoTrackCount;
+
+ int audioTrackCount = trackCount(QPlatformMediaPlayer::TrackType::AudioStream);
+ if (trackNumber <= audioTrackCount)
+ return trackNumber;
+
+ trackNumber = trackNumber - audioTrackCount;
+
+ auto subtitleTracks = mTracksMetadata.value(QPlatformMediaPlayer::TrackType::SubtitleStream);
+ int timedTextCount = 0;
+ int subtitleTextCount = 0;
+ for (const auto &track : subtitleTracks) {
+ if (track.androidTrackType() == 3) // 3 == TimedText
+ timedTextCount++;
+
+ if (track.androidTrackType() == 4) // 4 == Subtitle
+ subtitleTextCount++;
+ }
+
+ if (trackNumber <= timedTextCount)
+ return trackNumber;
+
+ trackNumber = trackNumber - timedTextCount;
+
+ if (trackNumber <= subtitleTextCount)
+ return trackNumber;
+
+ return -1;
+}
+
+int QAndroidMediaPlayer::activeTrack(TrackType trackType)
+{
+ int androidTrackNumber = -1;
+
+ switch (trackType) {
+ case QPlatformMediaPlayer::TrackType::VideoStream: {
+ if (!mIsVideoTrackEnabled)
+ return -1;
+ androidTrackNumber = mMediaPlayer->activeTrack(AndroidMediaPlayer::TrackType::Video);
+ }
+ case QPlatformMediaPlayer::TrackType::AudioStream: {
+ if (!mIsAudioTrackEnabled)
+ return -1;
+
+ androidTrackNumber = mMediaPlayer->activeTrack(AndroidMediaPlayer::TrackType::Audio);
+ }
+ case QPlatformMediaPlayer::TrackType::SubtitleStream: {
+ int timedTextSelectedTrack =
+ mMediaPlayer->activeTrack(AndroidMediaPlayer::TrackType::TimedText);
+
+ if (timedTextSelectedTrack > -1) {
+ androidTrackNumber = timedTextSelectedTrack;
+ break;
+ }
+
+ int subtitleSelectedTrack =
+ mMediaPlayer->activeTrack(AndroidMediaPlayer::TrackType::Subtitle);
+ if (subtitleSelectedTrack > -1) {
+ androidTrackNumber = subtitleSelectedTrack;
+ break;
+ }
+
+ return -1;
+ }
+ case QPlatformMediaPlayer::TrackType::NTrackTypes:
+ return -1;
+ }
+
+ return convertTrackNumber(androidTrackNumber);
+}
+
+void QAndroidMediaPlayer::disableTrack(TrackType trackType)
+{
+ const auto track = activeTrack(trackType);
+
+ switch (trackType) {
+ case VideoStream: {
+ if (track > -1) {
+ mMediaPlayer->setDisplay(nullptr);
+ mIsVideoTrackEnabled = false;
+ }
+ break;
+ }
+ case AudioStream: {
+ if (track > -1) {
+ mMediaPlayer->setMuted(true);
+ mMediaPlayer->blockAudio();
+ mIsAudioTrackEnabled = false;
+ }
+ break;
+ }
+ case SubtitleStream: {
+ // subtitles and timedtext tracks can be selected at the same time so deselect both
+ int subtitleSelectedTrack =
+ mMediaPlayer->activeTrack(AndroidMediaPlayer::TrackType::Subtitle);
+ if (subtitleSelectedTrack > -1)
+ mMediaPlayer->deselectTrack(subtitleSelectedTrack);
+
+ int timedTextSelectedTrack =
+ mMediaPlayer->activeTrack(AndroidMediaPlayer::TrackType::TimedText);
+ if (timedTextSelectedTrack > -1)
+ mMediaPlayer->deselectTrack(timedTextSelectedTrack);
+
+ break;
+ }
+ case NTrackTypes:
+ break;
+ }
+}
+
+void QAndroidMediaPlayer::setActiveTrack(TrackType trackType, int streamNumber)
+{
+
+ if (!mTracksMetadata.contains(trackType)) {
+ qCWarning(lcMediaPlayer)
+ << "Trying to set a active track which type has no available tracks.";
+ return;
+ }
+
+ const auto &tracks = mTracksMetadata.value(trackType);
+ if (streamNumber > tracks.count()) {
+ qCWarning(lcMediaPlayer) << "Trying to set a active track that does not exist.";
+ return;
+ }
+
+ // in case of < 0 deselect tracktype
+ if (streamNumber < 0) {
+ disableTrack(trackType);
+ return;
+ }
+
+ const auto currentTrack = activeTrack(trackType);
+ if (streamNumber == currentTrack) {
+ return;
+ }
+
+ if (trackType == TrackType::VideoStream && !mIsVideoTrackEnabled) {
+ // enable video stream
+ mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
+ mIsVideoTrackEnabled = true;
+ }
+
+ if (trackType == TrackType::AudioStream && !mIsAudioTrackEnabled) {
+ // enable audio stream
+ mMediaPlayer->unblockAudio();
+ mMediaPlayer->setMuted(false);
+ mIsAudioTrackEnabled = true;
+ }
+
+ if (trackType == TrackType::SubtitleStream) {
+ // subtitles and timedtext tracks can be selected at the same time so deselect both before
+ // selecting a new one
+ disableTrack(TrackType::SubtitleStream);
+ }
+
+ const auto &trackInfo = tracks.at(streamNumber);
+ const auto &trackNumber = trackInfo.androidTrackNumber();
+ mMediaPlayer->selectTrack(trackNumber);
+
+ emit activeTracksChanged();
+}
+
+void QAndroidMediaPlayer::positionChanged(qint64 position)
+{
+ QPlatformMediaPlayer::positionChanged(position);
+}
+
+void QAndroidMediaPlayer::durationChanged(qint64 duration)
+{
+ QPlatformMediaPlayer::durationChanged(duration);
+}
+
+void QAndroidMediaPlayer::onVideoOutputReady(bool ready)
+{
+ if ((mMediaPlayer->display() == 0) && mVideoOutput && ready)
+ mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
+
+ flushPendingStates();
+}
+
+void QAndroidMediaPlayer::setMediaStatus(QMediaPlayer::MediaStatus status)
+{
+ mediaStatusChanged(status);
+
+ if (status == QMediaPlayer::NoMedia || status == QMediaPlayer::InvalidMedia) {
+ Q_EMIT durationChanged(0);
+ Q_EMIT metaDataChanged();
+ setAudioAvailable(false);
+ setVideoAvailable(false);
+ }
+
+ if (status == QMediaPlayer::EndOfMedia)
+ Q_EMIT positionChanged(position());
+
+ updateBufferStatus();
+}
+
+void QAndroidMediaPlayer::setAudioAvailable(bool available)
+{
+ if (mAudioAvailable == available)
+ return;
+
+ mAudioAvailable = available;
+ Q_EMIT audioAvailableChanged(mAudioAvailable);
+}
+
+void QAndroidMediaPlayer::setVideoAvailable(bool available)
+{
+ if (mVideoAvailable == available)
+ return;
+
+ if (!available)
+ mVideoSize = QSize();
+
+ mVideoAvailable = available;
+ Q_EMIT videoAvailableChanged(mVideoAvailable);
+}
+
+void QAndroidMediaPlayer::resetBufferingProgress()
+{
+ mBuffering = false;
+ mBufferPercent = 0;
+ mAvailablePlaybackRange = QMediaTimeRange();
+}
+
+void QAndroidMediaPlayer::flushPendingStates()
+{
+ if (mPendingSetMedia) {
+ setMedia(mMediaContent, 0);
+ mPendingSetMedia = false;
+ return;
+ }
+
+ const int newState = mPendingState;
+ mPendingState = -1;
+
+ if (mPendingPosition != -1)
+ setPosition(mPendingPosition);
+ if (mPendingVolume >= 0)
+ setVolume(mPendingVolume);
+ if (mPendingMute != -1)
+ setMuted((mPendingMute == 1));
+
+ switch (newState) {
+ case QMediaPlayer::PlayingState:
+ play();
+ break;
+ case QMediaPlayer::PausedState:
+ pause();
+ break;
+ case QMediaPlayer::StoppedState:
+ stop();
+ break;
+ default:
+ break;
+ }
+}
+
+void QAndroidMediaPlayer::updateBufferStatus()
+{
+ const auto &status = mediaStatus();
+ bool bufferFilled = (status == QMediaPlayer::BufferedMedia || status == QMediaPlayer::BufferingMedia);
+
+ if (mBufferFilled != bufferFilled)
+ mBufferFilled = bufferFilled;
+
+ emit bufferProgressChanged(bufferProgress());
+}
+
+void QAndroidMediaPlayer::updateTrackInfo()
+{
+ const auto &androidTracksInfo = mMediaPlayer->tracksInfo();
+
+ // prepare mTracksMetadata
+ mTracksMetadata[TrackType::VideoStream] = QList<QAndroidMetaData>();
+ mTracksMetadata[TrackType::AudioStream] = QList<QAndroidMetaData>();
+ mTracksMetadata[TrackType::SubtitleStream] = QList<QAndroidMetaData>();
+ mTracksMetadata[TrackType::NTrackTypes] = QList<QAndroidMetaData>();
+
+ for (const auto &androidTrackInfo : androidTracksInfo) {
+
+ const auto &mediaPlayerType = convertTrackType(androidTrackInfo.trackType);
+ auto &tracks = mTracksMetadata[mediaPlayerType];
+
+ const QAndroidMetaData metadata(mediaPlayerType, androidTrackInfo.trackType,
+ androidTrackInfo.trackNumber, androidTrackInfo.mimeType,
+ androidTrackInfo.language);
+ tracks.append(metadata);
+ }
+
+ emit tracksChanged();
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qandroidmediaplayer_p.cpp"
diff --git a/src/plugins/multimedia/android/mediaplayer/qandroidmediaplayer_p.h b/src/plugins/multimedia/android/mediaplayer/qandroidmediaplayer_p.h
new file mode 100644
index 000000000..dd2a3469d
--- /dev/null
+++ b/src/plugins/multimedia/android/mediaplayer/qandroidmediaplayer_p.h
@@ -0,0 +1,127 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDMEDIAPLAYERCONTROL_H
+#define QANDROIDMEDIAPLAYERCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qglobal.h>
+#include <private/qplatformmediaplayer_p.h>
+#include <qandroidmetadata_p.h>
+#include <qmap.h>
+#include <qsize.h>
+#include <qurl.h>
+
+QT_BEGIN_NAMESPACE
+
+class AndroidMediaPlayer;
+class QAndroidTextureVideoOutput;
+class QAndroidMediaPlayerVideoRendererControl;
+class QAndroidAudioOutput;
+
+class QAndroidMediaPlayer : public QObject, public QPlatformMediaPlayer
+{
+ Q_OBJECT
+
+public:
+ explicit QAndroidMediaPlayer(QMediaPlayer *parent = 0);
+ ~QAndroidMediaPlayer() override;
+
+ qint64 duration() const override;
+ qint64 position() const override;
+ float bufferProgress() const override;
+ bool isAudioAvailable() const override;
+ bool isVideoAvailable() const override;
+ QMediaTimeRange availablePlaybackRanges() const override;
+ qreal playbackRate() const override;
+ void setPlaybackRate(qreal rate) override;
+ QUrl media() const override;
+ const QIODevice *mediaStream() const override;
+ void setMedia(const QUrl &mediaContent, QIODevice *stream) override;
+
+ QMediaMetaData metaData() const override;
+
+ void setVideoSink(QVideoSink *surface) override;
+
+ void setAudioOutput(QPlatformAudioOutput *output) override;
+ void updateAudioDevice();
+
+ void setPosition(qint64 position) override;
+ void play() override;
+ void pause() override;
+ void stop() override;
+
+ int trackCount(TrackType trackType) override;
+ QMediaMetaData trackMetaData(TrackType trackType, int streamNumber) override;
+ int activeTrack(TrackType trackType) override;
+ void setActiveTrack(TrackType trackType, int streamNumber) override;
+
+private Q_SLOTS:
+ void setVolume(float volume);
+ void setMuted(bool muted);
+ void onVideoOutputReady(bool ready);
+ void onError(qint32 what, qint32 extra);
+ void onInfo(qint32 what, qint32 extra);
+ void onBufferingChanged(qint32 percent);
+ void onVideoSizeChanged(qint32 width, qint32 height);
+ void onStateChanged(qint32 state);
+ void positionChanged(qint64 position);
+ void durationChanged(qint64 duration);
+
+private:
+ AndroidMediaPlayer *mMediaPlayer = nullptr;
+ QAndroidAudioOutput *m_audioOutput = nullptr;
+ QUrl mMediaContent;
+ QIODevice *mMediaStream = nullptr;
+ QAndroidTextureVideoOutput *mVideoOutput = nullptr;
+ QVideoSink *m_videoSink = nullptr;
+ int mBufferPercent = -1;
+ bool mBufferFilled = false;
+ bool mAudioAvailable = false;
+ bool mVideoAvailable = false;
+ QSize mVideoSize;
+ bool mBuffering = false;
+ QMediaTimeRange mAvailablePlaybackRange;
+ int mState;
+ int mPendingState = -1;
+ qint64 mPendingPosition = -1;
+ bool mPendingSetMedia = false;
+ float mPendingVolume = -1;
+ int mPendingMute = -1;
+ bool mReloadingMedia = false;
+ int mActiveStateChangeNotifiers = 0;
+ qreal mCurrentPlaybackRate = 1.;
+ bool mHasPendingPlaybackRate = false; // we need this because the rate can theoretically be negative
+ QMap<TrackType, QList<QAndroidMetaData>> mTracksMetadata;
+
+ bool mIsVideoTrackEnabled = true;
+ bool mIsAudioTrackEnabled = true;
+
+ void setMediaStatus(QMediaPlayer::MediaStatus status);
+ void setAudioAvailable(bool available);
+ void setVideoAvailable(bool available);
+ void updateAvailablePlaybackRanges();
+ void resetBufferingProgress();
+ void flushPendingStates();
+ void updateBufferStatus();
+ void updateTrackInfo();
+ void setSubtitle(QString subtitle);
+ void disableTrack(TrackType trackType);
+
+ int convertTrackNumber(int androidTrackNumber);
+ friend class StateChangeNotifier;
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDMEDIAPLAYERCONTROL_H
diff --git a/src/plugins/multimedia/android/mediaplayer/qandroidmetadata.cpp b/src/plugins/multimedia/android/mediaplayer/qandroidmetadata.cpp
new file mode 100644
index 000000000..b01845fa7
--- /dev/null
+++ b/src/plugins/multimedia/android/mediaplayer/qandroidmetadata.cpp
@@ -0,0 +1,163 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidmetadata_p.h"
+
+#include "androidmediametadataretriever_p.h"
+#include <QtMultimedia/qmediametadata.h>
+#include <qsize.h>
+#include <QDate>
+#include <QtCore/qlist.h>
+#include <QtConcurrent/qtconcurrentrun.h>
+#include <QLoggingCategory>
+
+QT_BEGIN_NAMESPACE
+
+// Genre name ordered by ID
+// see: http://id3.org/id3v2.3.0#Appendix_A_-_Genre_List_from_ID3v1
+static const char* qt_ID3GenreNames[] =
+{
+ "Blues", "Classic Rock", "Country", "Dance", "Disco", "Funk", "Grunge", "Hip-Hop", "Jazz",
+ "Metal", "New Age", "Oldies", "Other", "Pop", "R&B", "Rap", "Reggae", "Rock", "Techno",
+ "Industrial", "Alternative", "Ska", "Death Metal", "Pranks", "Soundtrack", "Euro-Techno",
+ "Ambient", "Trip-Hop", "Vocal", "Jazz+Funk", "Fusion", "Trance", "Classical", "Instrumental",
+ "Acid", "House", "Game", "Sound Clip", "Gospel", "Noise", "AlternRock", "Bass", "Soul", "Punk",
+ "Space", "Meditative", "Instrumental Pop", "Instrumental Rock", "Ethnic", "Gothic", "Darkwave",
+ "Techno-Industrial", "Electronic", "Pop-Folk", "Eurodance", "Dream", "Southern Rock", "Comedy",
+ "Cult", "Gangsta", "Top 40", "Christian Rap", "Pop/Funk", "Jungle", "Native American",
+ "Cabaret", "New Wave", "Psychadelic", "Rave", "Showtunes", "Trailer", "Lo-Fi", "Tribal",
+ "Acid Punk", "Acid Jazz", "Polka", "Retro", "Musical", "Rock & Roll", "Hard Rock", "Folk",
+ "Folk-Rock", "National Folk", "Swing", "Fast Fusion", "Bebob", "Latin", "Revival", "Celtic",
+ "Bluegrass", "Avantgarde", "Gothic Rock", "Progressive Rock", "Psychedelic Rock",
+ "Symphonic Rock", "Slow Rock", "Big Band", "Chorus", "Easy Listening", "Acoustic", "Humour",
+ "Speech", "Chanson", "Opera", "Chamber Music", "Sonata", "Symphony", "Booty Bass", "Primus",
+ "Porn Groove", "Satire", "Slow Jam", "Club", "Tango", "Samba", "Folklore", "Ballad",
+ "Power Ballad", "Rhythmic Soul", "Freestyle", "Duet", "Punk Rock", "Drum Solo", "A capella",
+ "Euro-House", "Dance Hall"
+};
+
+QMediaMetaData QAndroidMetaData::extractMetadata(const QUrl &url)
+{
+ QMediaMetaData metadata;
+
+ if (!url.isEmpty()) {
+ AndroidMediaMetadataRetriever retriever;
+ if (!retriever.setDataSource(url))
+ return metadata;
+
+ QString mimeType = retriever.extractMetadata(AndroidMediaMetadataRetriever::MimeType);
+ if (!mimeType.isNull())
+ metadata.insert(QMediaMetaData::MediaType, mimeType);
+
+ bool isVideo = !retriever.extractMetadata(AndroidMediaMetadataRetriever::HasVideo).isNull()
+ || mimeType.startsWith(QStringLiteral("video"));
+
+ QString string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Album);
+ if (!string.isNull())
+ metadata.insert(QMediaMetaData::AlbumTitle, string);
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::AlbumArtist);
+ if (!string.isNull())
+ metadata.insert(QMediaMetaData::AlbumArtist, string);
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Artist);
+ if (!string.isNull()) {
+ metadata.insert(isVideo ? QMediaMetaData::LeadPerformer
+ : QMediaMetaData::ContributingArtist,
+ string.split(QLatin1Char('/'), Qt::SkipEmptyParts));
+ }
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Author);
+ if (!string.isNull())
+ metadata.insert(QMediaMetaData::Author, string.split(QLatin1Char('/'), Qt::SkipEmptyParts));
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Bitrate);
+ if (!string.isNull()) {
+ metadata.insert(isVideo ? QMediaMetaData::VideoBitRate
+ : QMediaMetaData::AudioBitRate,
+ string.toInt());
+ }
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::CDTrackNumber);
+ if (!string.isNull())
+ metadata.insert(QMediaMetaData::TrackNumber, string.toInt());
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Composer);
+ if (!string.isNull())
+ metadata.insert(QMediaMetaData::Composer, string.split(QLatin1Char('/'), Qt::SkipEmptyParts));
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Date);
+ if (!string.isNull())
+ metadata.insert(QMediaMetaData::Date, QDateTime::fromString(string, QStringLiteral("yyyyMMddTHHmmss.zzzZ")).date());
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Duration);
+ if (!string.isNull())
+ metadata.insert(QMediaMetaData::Duration, string.toLongLong());
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Genre);
+ if (!string.isNull()) {
+ // The genre can be returned as an ID3v2 id, get the name for it in that case
+ if (string.startsWith(QLatin1Char('(')) && string.endsWith(QLatin1Char(')'))) {
+ bool ok = false;
+ const int genreId = QStringView{string}.mid(1, string.length() - 2).toInt(&ok);
+ if (ok && genreId >= 0 && genreId <= 125)
+ string = QLatin1String(qt_ID3GenreNames[genreId]);
+ }
+ metadata.insert(QMediaMetaData::Genre, string);
+ }
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Title);
+ if (!string.isNull())
+ metadata.insert(QMediaMetaData::Title, string);
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::VideoHeight);
+ if (!string.isNull()) {
+ const int height = string.toInt();
+ const int width = retriever.extractMetadata(AndroidMediaMetadataRetriever::VideoWidth).toInt();
+ metadata.insert(QMediaMetaData::Resolution, QSize(width, height));
+ }
+
+// string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Writer);
+// if (!string.isNull())
+// metadata.insert(QMediaMetaData::Writer, string.split('/', Qt::SkipEmptyParts));
+
+ }
+
+ return metadata;
+}
+
+QLocale::Language getLocaleLanguage(const QString &language)
+{
+ // undefined language or uncoded language
+ if (language == QLatin1String("und") || language == QStringLiteral("mis"))
+ return QLocale::AnyLanguage;
+
+ return QLocale::codeToLanguage(language, QLocale::ISO639Part2);
+}
+
+QAndroidMetaData::QAndroidMetaData(int trackType, int androidTrackType, int androidTrackNumber,
+ const QString &mimeType, const QString &language)
+ : mTrackType(trackType),
+ mAndroidTrackType(androidTrackType),
+ mAndroidTrackNumber(androidTrackNumber)
+{
+ insert(QMediaMetaData::MediaType, mimeType);
+ insert(QMediaMetaData::Language, getLocaleLanguage(language));
+}
+
+int QAndroidMetaData::trackType() const
+{
+ return mTrackType;
+}
+
+int QAndroidMetaData::androidTrackType() const
+{
+ return mAndroidTrackType;
+}
+
+int QAndroidMetaData::androidTrackNumber() const
+{
+ return mAndroidTrackNumber;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/android/mediaplayer/qandroidmetadata_p.h b/src/plugins/multimedia/android/mediaplayer/qandroidmetadata_p.h
new file mode 100644
index 000000000..1bbad92dd
--- /dev/null
+++ b/src/plugins/multimedia/android/mediaplayer/qandroidmetadata_p.h
@@ -0,0 +1,47 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDMETADATA_H
+#define QANDROIDMETADATA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qmediametadata.h>
+#include <qurl.h>
+#include <QMutex>
+#include <QVariant>
+
+QT_BEGIN_NAMESPACE
+
+class AndroidMediaMetadataRetriever;
+
+class QAndroidMetaData : public QMediaMetaData
+{
+public:
+ static QMediaMetaData extractMetadata(const QUrl &url);
+
+ QAndroidMetaData(int trackType, int androidTrackType, int androidTrackNumber,
+ const QString &mimeType, const QString &language);
+
+ int trackType() const;
+ int androidTrackType() const;
+ int androidTrackNumber() const;
+
+private:
+ int mTrackType;
+ int mAndroidTrackType;
+ int mAndroidTrackNumber;
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDMETADATA_H
diff --git a/src/plugins/multimedia/android/qandroidformatsinfo.cpp b/src/plugins/multimedia/android/qandroidformatsinfo.cpp
new file mode 100644
index 000000000..3b23340ce
--- /dev/null
+++ b/src/plugins/multimedia/android/qandroidformatsinfo.cpp
@@ -0,0 +1,160 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidformatsinfo_p.h"
+
+#include <QtCore/qjnienvironment.h>
+#include <QtCore/qjniobject.h>
+#include <qcoreapplication.h>
+
+static const char encoderFilter[] = ".encoder";
+static const char decoderFilter[] = ".decoder";
+
+QT_BEGIN_NAMESPACE
+
+QAndroidFormatInfo::QAndroidFormatInfo()
+{
+ // Audio/Video/Image formats with their decoder/encoder information is documented at
+ // https://developer.android.com/guide/topics/media/media-formats
+
+ const QJniObject codecsArrayObject = QJniObject::callStaticObjectMethod(
+ "org/qtproject/qt/android/multimedia/QtMultimediaUtils",
+ "getMediaCodecs",
+ "()[Ljava/lang/String;");
+ QStringList codecs;
+ QJniEnvironment env;
+ const jobjectArray devsArray = codecsArrayObject.object<jobjectArray>();
+ for (int i = 0; i < env->GetArrayLength(devsArray); ++i) {
+ const QString codec = QJniObject(env->GetObjectArrayElement(devsArray, i)).toString();
+ if (codec.contains(QStringLiteral("encoder")))
+ m_supportedEncoders.append(codec);
+ else
+ m_supportedDecoders.append(codec);
+ }
+
+ auto removeUnspecifiedValues = [](QList<CodecMap> &map) {
+ for (CodecMap &codec : map) {
+ codec.audio.removeAll(QMediaFormat::AudioCodec::Unspecified);
+ codec.video.removeAll(QMediaFormat::VideoCodec::Unspecified);
+ }
+ erase_if(map, [](const CodecMap &codec) {
+ return codec.audio.isEmpty() && codec.video.isEmpty();
+ });
+ };
+
+ {
+ const QMediaFormat::AudioCodec aac = hasDecoder(QMediaFormat::AudioCodec::AAC);
+ const QMediaFormat::AudioCodec mp3 = hasDecoder(QMediaFormat::AudioCodec::MP3);
+ const QMediaFormat::AudioCodec flac = hasDecoder(QMediaFormat::AudioCodec::FLAC);
+ const QMediaFormat::AudioCodec opus = hasDecoder(QMediaFormat::AudioCodec::Opus);
+ const QMediaFormat::AudioCodec vorbis = hasDecoder(QMediaFormat::AudioCodec::Vorbis);
+
+ const QMediaFormat::VideoCodec vp8 = hasDecoder(QMediaFormat::VideoCodec::VP8);
+ const QMediaFormat::VideoCodec vp9 = hasDecoder(QMediaFormat::VideoCodec::VP9);
+ const QMediaFormat::VideoCodec h264 = hasDecoder(QMediaFormat::VideoCodec::H264);
+ const QMediaFormat::VideoCodec h265 = hasDecoder(QMediaFormat::VideoCodec::H265);
+ const QMediaFormat::VideoCodec av1 = hasDecoder(QMediaFormat::VideoCodec::AV1);
+
+ decoders = {
+ { QMediaFormat::AAC, {aac}, {} },
+ { QMediaFormat::MP3, {mp3}, {} },
+ { QMediaFormat::FLAC, {flac}, {} },
+ { QMediaFormat::Mpeg4Audio, {mp3, aac, flac, vorbis}, {} },
+ { QMediaFormat::MPEG4, {mp3, aac, flac, vorbis}, {h264, h265, av1} },
+ { QMediaFormat::Ogg, {opus, vorbis, flac}, {} },
+ { QMediaFormat::Matroska, {mp3, opus, vorbis}, {vp8, vp9, h264, h265, av1} },
+ { QMediaFormat::WebM, {opus, vorbis}, {vp8, vp9} }
+ };
+
+ removeUnspecifiedValues(decoders);
+ }
+
+ {
+ const QMediaFormat::AudioCodec aac = hasEncoder(QMediaFormat::AudioCodec::AAC);
+ const QMediaFormat::AudioCodec mp3 = hasEncoder(QMediaFormat::AudioCodec::MP3);
+ const QMediaFormat::AudioCodec opus = hasEncoder(QMediaFormat::AudioCodec::Opus);
+ const QMediaFormat::AudioCodec vorbis = hasEncoder(QMediaFormat::AudioCodec::Vorbis);
+
+ const QMediaFormat::VideoCodec vp8 = hasEncoder(QMediaFormat::VideoCodec::VP8);
+ const QMediaFormat::VideoCodec vp9 = hasEncoder(QMediaFormat::VideoCodec::VP9);
+ const QMediaFormat::VideoCodec h264 = hasEncoder(QMediaFormat::VideoCodec::H264);
+ const QMediaFormat::VideoCodec h265 = hasEncoder(QMediaFormat::VideoCodec::H265);
+ const QMediaFormat::VideoCodec av1 = hasEncoder(QMediaFormat::VideoCodec::AV1);
+
+ // MP3 and Vorbis encoders are not supported by the default Android SDK
+ // Opus encoder available only for Android 10+
+ encoders = {
+ { QMediaFormat::AAC, {aac}, {} },
+ { QMediaFormat::MP3, {mp3}, {} },
+ // FLAC encoder is not supported by the MediaRecorder used for recording
+ // { QMediaFormat::FLAC, {flac}, {} },
+ { QMediaFormat::Mpeg4Audio, {mp3, aac, vorbis}, {} },
+ { QMediaFormat::MPEG4, {mp3, aac, vorbis}, {h264, h265, av1} },
+ { QMediaFormat::Ogg, {opus, vorbis}, {} },
+ { QMediaFormat::Matroska, {mp3, opus}, {vp8, vp9, h264, h265, av1} },
+ // NOTE: WebM seems to be documented to supported with VP8 encoder,
+ // but the Camera API doesn't work with it, keep it commented for now.
+ // { QMediaFormat::WebM, {vorbis, opus}, {vp8, vp9} }
+ };
+
+ removeUnspecifiedValues(encoders);
+ }
+
+ imageFormats << QImageCapture::JPEG;
+ // NOTE: Add later if needed, the Camera API doens't seem to work with it.
+ // imageFormats << QImageCapture::PNG << QImageCapture::WebP;
+}
+
+QAndroidFormatInfo::~QAndroidFormatInfo()
+{
+}
+
+static QString getVideoCodecName(QMediaFormat::VideoCodec codec)
+{
+ QString codecString = QMediaFormat::videoCodecName(codec);
+ if (codec == QMediaFormat::VideoCodec::H265)
+ codecString = QLatin1String("HEVC");
+ return codecString;
+}
+
+QMediaFormat::AudioCodec QAndroidFormatInfo::hasEncoder(QMediaFormat::AudioCodec codec) const
+{
+ const QString codecString = QMediaFormat::audioCodecName(codec);
+ for (auto str : m_supportedEncoders) {
+ if (str.contains(codecString + QLatin1String(encoderFilter), Qt::CaseInsensitive))
+ return codec;
+ }
+ return QMediaFormat::AudioCodec::Unspecified;
+}
+
+QMediaFormat::VideoCodec QAndroidFormatInfo::hasEncoder(QMediaFormat::VideoCodec codec) const
+{
+ const QString codecString = getVideoCodecName(codec);
+ for (auto str : m_supportedEncoders) {
+ if (str.contains(codecString + QLatin1String(encoderFilter), Qt::CaseInsensitive))
+ return codec;
+ }
+ return QMediaFormat::VideoCodec::Unspecified;
+}
+
+QMediaFormat::AudioCodec QAndroidFormatInfo::hasDecoder(QMediaFormat::AudioCodec codec) const
+{
+ const QString codecString = QMediaFormat::audioCodecName(codec);
+ for (auto str : m_supportedDecoders) {
+ if (str.contains(codecString + QLatin1String(decoderFilter), Qt::CaseInsensitive))
+ return codec;
+ }
+ return QMediaFormat::AudioCodec::Unspecified;
+}
+
+QMediaFormat::VideoCodec QAndroidFormatInfo::hasDecoder(QMediaFormat::VideoCodec codec) const
+{
+ const QString codecString = getVideoCodecName(codec);
+ for (auto str : m_supportedDecoders) {
+ if (str.contains(codecString + QLatin1String(decoderFilter), Qt::CaseInsensitive))
+ return codec;
+ }
+ return QMediaFormat::VideoCodec::Unspecified;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/android/qandroidformatsinfo_p.h b/src/plugins/multimedia/android/qandroidformatsinfo_p.h
new file mode 100644
index 000000000..2d14ad181
--- /dev/null
+++ b/src/plugins/multimedia/android/qandroidformatsinfo_p.h
@@ -0,0 +1,40 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDFORMATINFO_H
+#define QANDROIDFORMATINFO_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaformatinfo_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidFormatInfo : public QPlatformMediaFormatInfo
+{
+public:
+ QAndroidFormatInfo();
+ ~QAndroidFormatInfo();
+
+private:
+ QMediaFormat::AudioCodec hasEncoder(QMediaFormat::AudioCodec codec) const;
+ QMediaFormat::VideoCodec hasEncoder(QMediaFormat::VideoCodec codec) const;
+ QMediaFormat::AudioCodec hasDecoder(QMediaFormat::AudioCodec codec) const;
+ QMediaFormat::VideoCodec hasDecoder(QMediaFormat::VideoCodec codec) const;
+
+ QStringList m_supportedDecoders;
+ QStringList m_supportedEncoders;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/android/qandroidintegration.cpp b/src/plugins/multimedia/android/qandroidintegration.cpp
new file mode 100644
index 000000000..c7077e49d
--- /dev/null
+++ b/src/plugins/multimedia/android/qandroidintegration.cpp
@@ -0,0 +1,136 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidintegration_p.h"
+#include "qandroidglobal_p.h"
+#include "qandroidmediacapturesession_p.h"
+#include "androidmediaplayer_p.h"
+#include "qandroidcamerasession_p.h"
+#include "androidsurfacetexture_p.h"
+#include "androidsurfaceview_p.h"
+#include "androidcamera_p.h"
+#include "qandroidcamera_p.h"
+#include "qandroidimagecapture_p.h"
+#include "qandroidmediaencoder_p.h"
+#include "androidmediarecorder_p.h"
+#include "qandroidformatsinfo_p.h"
+#include "qandroidmediaplayer_p.h"
+#include "qandroidaudiooutput_p.h"
+#include "qandroidaudioinput_p.h"
+#include "qandroidvideosink_p.h"
+#include "qandroidaudiodecoder_p.h"
+#include <QtMultimedia/private/qplatformmediaplugin_p.h>
+
+QT_BEGIN_NAMESPACE
+
+Q_LOGGING_CATEGORY(qtAndroidMediaPlugin, "qt.multimedia.android")
+
+class QAndroidMediaPlugin : public QPlatformMediaPlugin
+{
+ Q_OBJECT
+ Q_PLUGIN_METADATA(IID QPlatformMediaPlugin_iid FILE "android.json")
+
+public:
+ QAndroidMediaPlugin()
+ : QPlatformMediaPlugin()
+ {}
+
+ QPlatformMediaIntegration* create(const QString &name) override
+ {
+ if (name == u"android")
+ return new QAndroidIntegration;
+ return nullptr;
+ }
+};
+
+QAndroidIntegration::QAndroidIntegration() : QPlatformMediaIntegration(QLatin1String("android")) { }
+
+QMaybe<QPlatformAudioDecoder *> QAndroidIntegration::createAudioDecoder(QAudioDecoder *decoder)
+{
+ return new QAndroidAudioDecoder(decoder);
+}
+
+QPlatformMediaFormatInfo *QAndroidIntegration::createFormatInfo()
+{
+ return new QAndroidFormatInfo;
+}
+
+QMaybe<QPlatformMediaCaptureSession *> QAndroidIntegration::createCaptureSession()
+{
+ return new QAndroidMediaCaptureSession();
+}
+
+QMaybe<QPlatformMediaPlayer *> QAndroidIntegration::createPlayer(QMediaPlayer *player)
+{
+ return new QAndroidMediaPlayer(player);
+}
+
+QMaybe<QPlatformCamera *> QAndroidIntegration::createCamera(QCamera *camera)
+{
+ return new QAndroidCamera(camera);
+}
+
+QMaybe<QPlatformMediaRecorder *> QAndroidIntegration::createRecorder(QMediaRecorder *recorder)
+{
+ return new QAndroidMediaEncoder(recorder);
+}
+
+QMaybe<QPlatformImageCapture *> QAndroidIntegration::createImageCapture(QImageCapture *imageCapture)
+{
+ return new QAndroidImageCapture(imageCapture);
+}
+
+QMaybe<QPlatformAudioOutput *> QAndroidIntegration::createAudioOutput(QAudioOutput *q)
+{
+ return new QAndroidAudioOutput(q);
+}
+
+QMaybe<QPlatformAudioInput *> QAndroidIntegration::createAudioInput(QAudioInput *audioInput)
+{
+ return new QAndroidAudioInput(audioInput);
+}
+
+QMaybe<QPlatformVideoSink *> QAndroidIntegration::createVideoSink(QVideoSink *sink)
+{
+ return new QAndroidVideoSink(sink);
+}
+
+Q_DECL_EXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void * /*reserved*/)
+{
+ static bool initialized = false;
+ if (initialized)
+ return JNI_VERSION_1_6;
+ initialized = true;
+
+ QT_USE_NAMESPACE
+ typedef union {
+ JNIEnv *nativeEnvironment;
+ void *venv;
+ } UnionJNIEnvToVoid;
+
+ UnionJNIEnvToVoid uenv;
+ uenv.venv = NULL;
+
+ if (vm->GetEnv(&uenv.venv, JNI_VERSION_1_6) != JNI_OK)
+ return JNI_ERR;
+
+ if (!AndroidMediaPlayer::registerNativeMethods()
+ || !AndroidCamera::registerNativeMethods()
+ || !AndroidMediaRecorder::registerNativeMethods()
+ || !AndroidSurfaceHolder::registerNativeMethods()) {
+ return JNI_ERR;
+ }
+
+ AndroidSurfaceTexture::registerNativeMethods();
+
+ return JNI_VERSION_1_6;
+}
+
+QList<QCameraDevice> QAndroidIntegration::videoInputs()
+{
+ return QAndroidCameraSession::availableCameras();
+}
+
+QT_END_NAMESPACE
+
+#include "qandroidintegration.moc"
diff --git a/src/plugins/multimedia/android/qandroidintegration_p.h b/src/plugins/multimedia/android/qandroidintegration_p.h
new file mode 100644
index 000000000..9ef5a3267
--- /dev/null
+++ b/src/plugins/multimedia/android/qandroidintegration_p.h
@@ -0,0 +1,48 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDINTEGRATION_H
+#define QANDROIDINTEGRATION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaintegration_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidMediaDevices;
+
+class QAndroidIntegration : public QPlatformMediaIntegration
+{
+public:
+ QAndroidIntegration();
+
+ QMaybe<QPlatformAudioDecoder *> createAudioDecoder(QAudioDecoder *decoder) override;
+ QMaybe<QPlatformMediaCaptureSession *> createCaptureSession() override;
+ QMaybe<QPlatformMediaPlayer *> createPlayer(QMediaPlayer *player) override;
+ QMaybe<QPlatformCamera *> createCamera(QCamera *camera) override;
+ QMaybe<QPlatformMediaRecorder *> createRecorder(QMediaRecorder *recorder) override;
+ QMaybe<QPlatformImageCapture *> createImageCapture(QImageCapture *imageCapture) override;
+
+ QMaybe<QPlatformAudioOutput *> createAudioOutput(QAudioOutput *q) override;
+ QMaybe<QPlatformAudioInput *> createAudioInput(QAudioInput *audioInput) override;
+
+ QMaybe<QPlatformVideoSink *> createVideoSink(QVideoSink *) override;
+ QList<QCameraDevice> videoInputs() override;
+
+protected:
+ QPlatformMediaFormatInfo *createFormatInfo() override;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp b/src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp
new file mode 100644
index 000000000..cef36d7ad
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp
@@ -0,0 +1,1797 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// Copyright (C) 2016 Ruslan Baratov
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "androidcamera_p.h"
+#include "androidsurfacetexture_p.h"
+#include "androidsurfaceview_p.h"
+#include "qandroidmultimediautils_p.h"
+#include "qandroidglobal_p.h"
+
+#include <private/qvideoframe_p.h>
+
+#include <qhash.h>
+#include <qstringlist.h>
+#include <qdebug.h>
+#include <QtCore/qthread.h>
+#include <QtCore/qreadwritelock.h>
+#include <QtCore/qmutex.h>
+#include <QtMultimedia/private/qmemoryvideobuffer_p.h>
+#include <QtCore/qcoreapplication.h>
+
+#include <mutex>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(lcAndroidCamera, "qt.multimedia.android.camera")
+
+static const char QtCameraListenerClassName[] = "org/qtproject/qt/android/multimedia/QtCameraListener";
+
+typedef QHash<int, AndroidCamera *> CameraMap;
+Q_GLOBAL_STATIC(CameraMap, cameras)
+Q_GLOBAL_STATIC(QReadWriteLock, rwLock)
+
+static QRect areaToRect(jobject areaObj)
+{
+ QJniObject area(areaObj);
+ QJniObject rect = area.getObjectField("rect", "Landroid/graphics/Rect;");
+
+ return QRect(rect.getField<jint>("left"),
+ rect.getField<jint>("top"),
+ rect.callMethod<jint>("width"),
+ rect.callMethod<jint>("height"));
+}
+
+static QJniObject rectToArea(const QRect &rect)
+{
+ QJniObject jrect("android/graphics/Rect",
+ "(IIII)V",
+ rect.left(), rect.top(), rect.right(), rect.bottom());
+
+ QJniObject area("android/hardware/Camera$Area",
+ "(Landroid/graphics/Rect;I)V",
+ jrect.object(), 500);
+
+ return area;
+}
+
+// native method for QtCameraLisener.java
+static void notifyAutoFocusComplete(JNIEnv* , jobject, int id, jboolean success)
+{
+ QReadLocker locker(rwLock);
+ const auto it = cameras->constFind(id);
+ if (Q_UNLIKELY(it == cameras->cend()))
+ return;
+
+ Q_EMIT (*it)->autoFocusComplete(success);
+}
+
+static void notifyPictureExposed(JNIEnv* , jobject, int id)
+{
+ QReadLocker locker(rwLock);
+ const auto it = cameras->constFind(id);
+ if (Q_UNLIKELY(it == cameras->cend()))
+ return;
+
+ Q_EMIT (*it)->pictureExposed();
+}
+
+static void notifyPictureCaptured(JNIEnv *env, jobject, int id, jbyteArray data)
+{
+ QReadLocker locker(rwLock);
+ const auto it = cameras->constFind(id);
+ if (Q_UNLIKELY(it == cameras->cend())) {
+ qCWarning(lcAndroidCamera) << "Could not obtain camera!";
+ return;
+ }
+
+ AndroidCamera *camera = (*it);
+
+ const int arrayLength = env->GetArrayLength(data);
+ QByteArray bytes(arrayLength, Qt::Uninitialized);
+ env->GetByteArrayRegion(data, 0, arrayLength, reinterpret_cast<jbyte *>(bytes.data()));
+
+ auto parameters = camera->getParametersObject();
+
+ QJniObject size =
+ parameters.callObjectMethod("getPictureSize", "()Landroid/hardware/Camera$Size;");
+
+ if (!size.isValid()) {
+ qCWarning(lcAndroidCamera) << "Picture Size is not valid!";
+ return;
+ }
+
+ QSize pictureSize(size.getField<jint>("width"), size.getField<jint>("height"));
+
+ auto format = AndroidCamera::ImageFormat(parameters.callMethod<jint>("getPictureFormat"));
+
+ if (format == AndroidCamera::ImageFormat::UnknownImageFormat) {
+ qCWarning(lcAndroidCamera) << "Android Camera Image Format is UnknownImageFormat!";
+ return;
+ }
+
+ int bytesPerLine = 0;
+
+ switch (format) {
+ case AndroidCamera::ImageFormat::YV12:
+ bytesPerLine = (pictureSize.width() + 15) & ~15;
+ break;
+ case AndroidCamera::ImageFormat::NV21:
+ bytesPerLine = pictureSize.width();
+ break;
+ case AndroidCamera::ImageFormat::RGB565:
+ case AndroidCamera::ImageFormat::YUY2:
+ bytesPerLine = pictureSize.width() * 2;
+ break;
+ default:
+ bytesPerLine = -1;
+ }
+
+ auto pictureFormat = qt_pixelFormatFromAndroidImageFormat(format);
+
+ emit camera->pictureCaptured(bytes, pictureFormat, pictureSize, bytesPerLine);
+}
+
+static void notifyNewPreviewFrame(JNIEnv *env, jobject, int id, jbyteArray data,
+ int width, int height, int format, int bpl)
+{
+ QReadLocker locker(rwLock);
+ const auto it = cameras->constFind(id);
+ if (Q_UNLIKELY(it == cameras->cend()))
+ return;
+
+ const int arrayLength = env->GetArrayLength(data);
+ if (arrayLength == 0)
+ return;
+
+ QByteArray bytes(arrayLength, Qt::Uninitialized);
+ env->GetByteArrayRegion(data, 0, arrayLength, (jbyte*)bytes.data());
+
+ QVideoFrameFormat frameFormat(
+ QSize(width, height),
+ qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat(format)));
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(std::move(bytes), bpl), std::move(frameFormat));
+
+ Q_EMIT (*it)->newPreviewFrame(frame);
+}
+
+static void notifyFrameAvailable(JNIEnv *, jobject, int id)
+{
+ QReadLocker locker(rwLock);
+ const auto it = cameras->constFind(id);
+ if (Q_UNLIKELY(it == cameras->cend()))
+ return;
+
+ (*it)->fetchLastPreviewFrame();
+}
+
+class AndroidCameraPrivate : public QObject
+{
+ Q_OBJECT
+public:
+ AndroidCameraPrivate();
+ ~AndroidCameraPrivate();
+
+ Q_INVOKABLE bool init(int cameraId);
+
+ Q_INVOKABLE void release();
+ Q_INVOKABLE bool lock();
+ Q_INVOKABLE bool unlock();
+ Q_INVOKABLE bool reconnect();
+
+ Q_INVOKABLE AndroidCamera::CameraFacing getFacing();
+ Q_INVOKABLE int getNativeOrientation();
+
+ Q_INVOKABLE QSize getPreferredPreviewSizeForVideo();
+ Q_INVOKABLE QList<QSize> getSupportedPreviewSizes();
+ static QList<QSize> getSupportedPreviewSizes(QJniObject &parameters);
+
+ Q_INVOKABLE QList<AndroidCamera::FpsRange> getSupportedPreviewFpsRange();
+
+ Q_INVOKABLE AndroidCamera::FpsRange getPreviewFpsRange();
+ static AndroidCamera::FpsRange getPreviewFpsRange(QJniObject &parameters);
+ Q_INVOKABLE void setPreviewFpsRange(int min, int max);
+
+ Q_INVOKABLE AndroidCamera::ImageFormat getPreviewFormat();
+ Q_INVOKABLE void setPreviewFormat(AndroidCamera::ImageFormat fmt);
+ Q_INVOKABLE QList<AndroidCamera::ImageFormat> getSupportedPreviewFormats();
+ static QList<AndroidCamera::ImageFormat> getSupportedPreviewFormats(QJniObject &parameters);
+
+ Q_INVOKABLE QSize previewSize() const { return m_previewSize; }
+ Q_INVOKABLE QSize getPreviewSize();
+ Q_INVOKABLE void updatePreviewSize();
+ Q_INVOKABLE bool setPreviewTexture(void *surfaceTexture);
+ Q_INVOKABLE bool setPreviewDisplay(void *surfaceHolder);
+ Q_INVOKABLE void setDisplayOrientation(int degrees);
+
+ Q_INVOKABLE bool isZoomSupported();
+ Q_INVOKABLE int getMaxZoom();
+ Q_INVOKABLE QList<int> getZoomRatios();
+ Q_INVOKABLE int getZoom();
+ Q_INVOKABLE void setZoom(int value);
+
+ Q_INVOKABLE QString getFlashMode();
+ Q_INVOKABLE void setFlashMode(const QString &value);
+
+ Q_INVOKABLE QString getFocusMode();
+ Q_INVOKABLE void setFocusMode(const QString &value);
+
+ Q_INVOKABLE int getMaxNumFocusAreas();
+ Q_INVOKABLE QList<QRect> getFocusAreas();
+ Q_INVOKABLE void setFocusAreas(const QList<QRect> &areas);
+
+ Q_INVOKABLE void autoFocus();
+ Q_INVOKABLE void cancelAutoFocus();
+
+ Q_INVOKABLE bool isAutoExposureLockSupported();
+ Q_INVOKABLE bool getAutoExposureLock();
+ Q_INVOKABLE void setAutoExposureLock(bool toggle);
+
+ Q_INVOKABLE bool isAutoWhiteBalanceLockSupported();
+ Q_INVOKABLE bool getAutoWhiteBalanceLock();
+ Q_INVOKABLE void setAutoWhiteBalanceLock(bool toggle);
+
+ Q_INVOKABLE int getExposureCompensation();
+ Q_INVOKABLE void setExposureCompensation(int value);
+ Q_INVOKABLE float getExposureCompensationStep();
+ Q_INVOKABLE int getMinExposureCompensation();
+ Q_INVOKABLE int getMaxExposureCompensation();
+
+ Q_INVOKABLE QString getSceneMode();
+ Q_INVOKABLE void setSceneMode(const QString &value);
+
+ Q_INVOKABLE QString getWhiteBalance();
+ Q_INVOKABLE void setWhiteBalance(const QString &value);
+
+ Q_INVOKABLE void updateRotation();
+
+ Q_INVOKABLE QList<QSize> getSupportedPictureSizes();
+ Q_INVOKABLE QList<QSize> getSupportedVideoSizes();
+ Q_INVOKABLE void setPictureSize(const QSize &size);
+ Q_INVOKABLE void setJpegQuality(int quality);
+
+ Q_INVOKABLE void startPreview();
+ Q_INVOKABLE void stopPreview();
+
+ Q_INVOKABLE void takePicture();
+
+ Q_INVOKABLE void setupPreviewFrameCallback();
+ Q_INVOKABLE void notifyNewFrames(bool notify);
+ Q_INVOKABLE void fetchLastPreviewFrame();
+
+ Q_INVOKABLE void applyParameters();
+
+ Q_INVOKABLE QStringList callParametersStringListMethod(const QByteArray &methodName);
+
+ int m_cameraId;
+ QRecursiveMutex m_parametersMutex;
+ QSize m_previewSize;
+ int m_rotation;
+ QJniObject m_info;
+ QJniObject m_parameters;
+ QJniObject m_camera;
+ QJniObject m_cameraListener;
+
+Q_SIGNALS:
+ void previewSizeChanged();
+ void previewStarted();
+ void previewFailedToStart();
+ void previewStopped();
+
+ void autoFocusStarted();
+
+ void whiteBalanceChanged();
+
+ void takePictureFailed();
+
+ void lastPreviewFrameFetched(const QVideoFrame &frame);
+};
+
+AndroidCamera::AndroidCamera(AndroidCameraPrivate *d, QThread *worker)
+ : QObject(),
+ d_ptr(d),
+ m_worker(worker)
+
+{
+ connect(d, &AndroidCameraPrivate::previewSizeChanged, this, &AndroidCamera::previewSizeChanged);
+ connect(d, &AndroidCameraPrivate::previewStarted, this, &AndroidCamera::previewStarted);
+ connect(d, &AndroidCameraPrivate::previewFailedToStart, this, &AndroidCamera::previewFailedToStart);
+ connect(d, &AndroidCameraPrivate::previewStopped, this, &AndroidCamera::previewStopped);
+ connect(d, &AndroidCameraPrivate::autoFocusStarted, this, &AndroidCamera::autoFocusStarted);
+ connect(d, &AndroidCameraPrivate::whiteBalanceChanged, this, &AndroidCamera::whiteBalanceChanged);
+ connect(d, &AndroidCameraPrivate::takePictureFailed, this, &AndroidCamera::takePictureFailed);
+ connect(d, &AndroidCameraPrivate::lastPreviewFrameFetched, this, &AndroidCamera::lastPreviewFrameFetched);
+}
+
+AndroidCamera::~AndroidCamera()
+{
+ Q_D(AndroidCamera);
+ if (d->m_camera.isValid()) {
+ release();
+ QWriteLocker locker(rwLock);
+ cameras->remove(cameraId());
+ }
+
+ m_worker->exit();
+ m_worker->wait(5000);
+}
+
+AndroidCamera *AndroidCamera::open(int cameraId)
+{
+ if (!qt_androidCheckCameraPermission())
+ return nullptr;
+
+ AndroidCameraPrivate *d = new AndroidCameraPrivate();
+ QThread *worker = new QThread;
+ worker->start();
+ d->moveToThread(worker);
+ connect(worker, &QThread::finished, d, &AndroidCameraPrivate::deleteLater);
+ bool ok = true;
+ QMetaObject::invokeMethod(d, "init", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok), Q_ARG(int, cameraId));
+ if (!ok) {
+ worker->quit();
+ worker->wait(5000);
+ delete worker;
+ return 0;
+ }
+
+ AndroidCamera *q = new AndroidCamera(d, worker);
+ QWriteLocker locker(rwLock);
+ cameras->insert(cameraId, q);
+
+ return q;
+}
+
+int AndroidCamera::cameraId() const
+{
+ Q_D(const AndroidCamera);
+ return d->m_cameraId;
+}
+
+bool AndroidCamera::lock()
+{
+ Q_D(AndroidCamera);
+ bool ok = true;
+ QMetaObject::invokeMethod(d, "lock", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok));
+ return ok;
+}
+
+bool AndroidCamera::unlock()
+{
+ Q_D(AndroidCamera);
+ bool ok = true;
+ QMetaObject::invokeMethod(d, "unlock", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok));
+ return ok;
+}
+
+bool AndroidCamera::reconnect()
+{
+ Q_D(AndroidCamera);
+ bool ok = true;
+ QMetaObject::invokeMethod(d, "reconnect", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok));
+ return ok;
+}
+
+void AndroidCamera::release()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "release", Qt::BlockingQueuedConnection);
+}
+
+AndroidCamera::CameraFacing AndroidCamera::getFacing()
+{
+ Q_D(AndroidCamera);
+ return d->getFacing();
+}
+
+int AndroidCamera::getNativeOrientation()
+{
+ Q_D(AndroidCamera);
+ return d->getNativeOrientation();
+}
+
+QSize AndroidCamera::getPreferredPreviewSizeForVideo()
+{
+ Q_D(AndroidCamera);
+ return d->getPreferredPreviewSizeForVideo();
+}
+
+QList<QSize> AndroidCamera::getSupportedPreviewSizes()
+{
+ Q_D(AndroidCamera);
+ return d->getSupportedPreviewSizes();
+}
+
+QList<AndroidCamera::FpsRange> AndroidCamera::getSupportedPreviewFpsRange()
+{
+ Q_D(AndroidCamera);
+ return d->getSupportedPreviewFpsRange();
+}
+
+AndroidCamera::FpsRange AndroidCamera::getPreviewFpsRange()
+{
+ Q_D(AndroidCamera);
+ return d->getPreviewFpsRange();
+}
+
+void AndroidCamera::setPreviewFpsRange(FpsRange range)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setPreviewFpsRange", Q_ARG(int, range.min), Q_ARG(int, range.max));
+}
+
+AndroidCamera::ImageFormat AndroidCamera::getPreviewFormat()
+{
+ Q_D(AndroidCamera);
+ return d->getPreviewFormat();
+}
+
+void AndroidCamera::setPreviewFormat(ImageFormat fmt)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setPreviewFormat", Q_ARG(AndroidCamera::ImageFormat, fmt));
+}
+
+QList<AndroidCamera::ImageFormat> AndroidCamera::getSupportedPreviewFormats()
+{
+ Q_D(AndroidCamera);
+ return d->getSupportedPreviewFormats();
+}
+
+QSize AndroidCamera::previewSize() const
+{
+ Q_D(const AndroidCamera);
+ return d->m_previewSize;
+}
+
+QSize AndroidCamera::actualPreviewSize()
+{
+ Q_D(AndroidCamera);
+ return d->getPreviewSize();
+}
+
+void AndroidCamera::setPreviewSize(const QSize &size)
+{
+ Q_D(AndroidCamera);
+ d->m_parametersMutex.lock();
+ bool areParametersValid = d->m_parameters.isValid();
+ d->m_parametersMutex.unlock();
+ if (!areParametersValid)
+ return;
+
+ d->m_previewSize = size;
+ QMetaObject::invokeMethod(d, "updatePreviewSize");
+}
+
+bool AndroidCamera::setPreviewTexture(AndroidSurfaceTexture *surfaceTexture)
+{
+ Q_D(AndroidCamera);
+ bool ok = true;
+ QMetaObject::invokeMethod(d,
+ "setPreviewTexture",
+ Qt::BlockingQueuedConnection,
+ Q_RETURN_ARG(bool, ok),
+ Q_ARG(void *, surfaceTexture ? surfaceTexture->surfaceTexture() : 0));
+ return ok;
+}
+
+bool AndroidCamera::setPreviewDisplay(AndroidSurfaceHolder *surfaceHolder)
+{
+ Q_D(AndroidCamera);
+ bool ok = true;
+ QMetaObject::invokeMethod(d,
+ "setPreviewDisplay",
+ Qt::BlockingQueuedConnection,
+ Q_RETURN_ARG(bool, ok),
+ Q_ARG(void *, surfaceHolder ? surfaceHolder->surfaceHolder() : 0));
+ return ok;
+}
+
+void AndroidCamera::setDisplayOrientation(int degrees)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setDisplayOrientation", Qt::QueuedConnection, Q_ARG(int, degrees));
+}
+
+bool AndroidCamera::isZoomSupported()
+{
+ Q_D(AndroidCamera);
+ return d->isZoomSupported();
+}
+
+int AndroidCamera::getMaxZoom()
+{
+ Q_D(AndroidCamera);
+ return d->getMaxZoom();
+}
+
+QList<int> AndroidCamera::getZoomRatios()
+{
+ Q_D(AndroidCamera);
+ return d->getZoomRatios();
+}
+
+int AndroidCamera::getZoom()
+{
+ Q_D(AndroidCamera);
+ return d->getZoom();
+}
+
+void AndroidCamera::setZoom(int value)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setZoom", Q_ARG(int, value));
+}
+
+QStringList AndroidCamera::getSupportedFlashModes()
+{
+ Q_D(AndroidCamera);
+ return d->callParametersStringListMethod("getSupportedFlashModes");
+}
+
+QString AndroidCamera::getFlashMode()
+{
+ Q_D(AndroidCamera);
+ return d->getFlashMode();
+}
+
+void AndroidCamera::setFlashMode(const QString &value)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setFlashMode", Q_ARG(QString, value));
+}
+
+QStringList AndroidCamera::getSupportedFocusModes()
+{
+ Q_D(AndroidCamera);
+ return d->callParametersStringListMethod("getSupportedFocusModes");
+}
+
+QString AndroidCamera::getFocusMode()
+{
+ Q_D(AndroidCamera);
+ return d->getFocusMode();
+}
+
+void AndroidCamera::setFocusMode(const QString &value)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setFocusMode", Q_ARG(QString, value));
+}
+
+int AndroidCamera::getMaxNumFocusAreas()
+{
+ Q_D(AndroidCamera);
+ return d->getMaxNumFocusAreas();
+}
+
+QList<QRect> AndroidCamera::getFocusAreas()
+{
+ Q_D(AndroidCamera);
+ return d->getFocusAreas();
+}
+
+void AndroidCamera::setFocusAreas(const QList<QRect> &areas)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setFocusAreas", Q_ARG(QList<QRect>, areas));
+}
+
+void AndroidCamera::autoFocus()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "autoFocus");
+}
+
+void AndroidCamera::cancelAutoFocus()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "cancelAutoFocus", Qt::QueuedConnection);
+}
+
+bool AndroidCamera::isAutoExposureLockSupported()
+{
+ Q_D(AndroidCamera);
+ return d->isAutoExposureLockSupported();
+}
+
+bool AndroidCamera::getAutoExposureLock()
+{
+ Q_D(AndroidCamera);
+ return d->getAutoExposureLock();
+}
+
+void AndroidCamera::setAutoExposureLock(bool toggle)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setAutoExposureLock", Q_ARG(bool, toggle));
+}
+
+bool AndroidCamera::isAutoWhiteBalanceLockSupported()
+{
+ Q_D(AndroidCamera);
+ return d->isAutoWhiteBalanceLockSupported();
+}
+
+bool AndroidCamera::getAutoWhiteBalanceLock()
+{
+ Q_D(AndroidCamera);
+ return d->getAutoWhiteBalanceLock();
+}
+
+void AndroidCamera::setAutoWhiteBalanceLock(bool toggle)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setAutoWhiteBalanceLock", Q_ARG(bool, toggle));
+}
+
+int AndroidCamera::getExposureCompensation()
+{
+ Q_D(AndroidCamera);
+ return d->getExposureCompensation();
+}
+
+void AndroidCamera::setExposureCompensation(int value)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setExposureCompensation", Q_ARG(int, value));
+}
+
+float AndroidCamera::getExposureCompensationStep()
+{
+ Q_D(AndroidCamera);
+ return d->getExposureCompensationStep();
+}
+
+int AndroidCamera::getMinExposureCompensation()
+{
+ Q_D(AndroidCamera);
+ return d->getMinExposureCompensation();
+}
+
+int AndroidCamera::getMaxExposureCompensation()
+{
+ Q_D(AndroidCamera);
+ return d->getMaxExposureCompensation();
+}
+
+QStringList AndroidCamera::getSupportedSceneModes()
+{
+ Q_D(AndroidCamera);
+ return d->callParametersStringListMethod("getSupportedSceneModes");
+}
+
+QString AndroidCamera::getSceneMode()
+{
+ Q_D(AndroidCamera);
+ return d->getSceneMode();
+}
+
+void AndroidCamera::setSceneMode(const QString &value)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setSceneMode", Q_ARG(QString, value));
+}
+
+QStringList AndroidCamera::getSupportedWhiteBalance()
+{
+ Q_D(AndroidCamera);
+ return d->callParametersStringListMethod("getSupportedWhiteBalance");
+}
+
+QString AndroidCamera::getWhiteBalance()
+{
+ Q_D(AndroidCamera);
+ return d->getWhiteBalance();
+}
+
+void AndroidCamera::setWhiteBalance(const QString &value)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setWhiteBalance", Q_ARG(QString, value));
+}
+
+void AndroidCamera::setRotation(int rotation)
+{
+ Q_D(AndroidCamera);
+ //We need to do it here and not in worker class because we cache rotation
+ d->m_parametersMutex.lock();
+ bool areParametersValid = d->m_parameters.isValid();
+ d->m_parametersMutex.unlock();
+ if (!areParametersValid)
+ return;
+
+ d->m_rotation = rotation;
+ QMetaObject::invokeMethod(d, "updateRotation");
+}
+
+int AndroidCamera::getRotation() const
+{
+ Q_D(const AndroidCamera);
+ return d->m_rotation;
+}
+
+QList<QSize> AndroidCamera::getSupportedPictureSizes()
+{
+ Q_D(AndroidCamera);
+ return d->getSupportedPictureSizes();
+}
+
+QList<QSize> AndroidCamera::getSupportedVideoSizes()
+{
+ Q_D(AndroidCamera);
+ return d->getSupportedVideoSizes();
+}
+
+void AndroidCamera::setPictureSize(const QSize &size)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setPictureSize", Q_ARG(QSize, size));
+}
+
+void AndroidCamera::setJpegQuality(int quality)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setJpegQuality", Q_ARG(int, quality));
+}
+
+void AndroidCamera::takePicture()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "takePicture", Qt::BlockingQueuedConnection);
+}
+
+void AndroidCamera::setupPreviewFrameCallback()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setupPreviewFrameCallback");
+}
+
+void AndroidCamera::notifyNewFrames(bool notify)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "notifyNewFrames", Q_ARG(bool, notify));
+}
+
+void AndroidCamera::fetchLastPreviewFrame()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "fetchLastPreviewFrame");
+}
+
+QJniObject AndroidCamera::getCameraObject()
+{
+ Q_D(AndroidCamera);
+ return d->m_camera;
+}
+
+int AndroidCamera::getNumberOfCameras()
+{
+ if (!qt_androidCheckCameraPermission())
+ return 0;
+
+ return QJniObject::callStaticMethod<jint>("android/hardware/Camera",
+ "getNumberOfCameras");
+}
+
+void AndroidCamera::getCameraInfo(int id, QCameraDevicePrivate *info)
+{
+ Q_ASSERT(info);
+
+ QJniObject cameraInfo("android/hardware/Camera$CameraInfo");
+ QJniObject::callStaticMethod<void>("android/hardware/Camera",
+ "getCameraInfo",
+ "(ILandroid/hardware/Camera$CameraInfo;)V",
+ id, cameraInfo.object());
+
+ AndroidCamera::CameraFacing facing = AndroidCamera::CameraFacing(cameraInfo.getField<jint>("facing"));
+ // The orientation provided by Android is counter-clockwise, we need it clockwise
+ info->orientation = (360 - cameraInfo.getField<jint>("orientation")) % 360;
+
+ switch (facing) {
+ case AndroidCamera::CameraFacingBack:
+ info->id = QByteArray("back");
+ info->description = QStringLiteral("Rear-facing camera");
+ info->position = QCameraDevice::BackFace;
+ info->isDefault = true;
+ break;
+ case AndroidCamera::CameraFacingFront:
+ info->id = QByteArray("front");
+ info->description = QStringLiteral("Front-facing camera");
+ info->position = QCameraDevice::FrontFace;
+ break;
+ default:
+ break;
+ }
+ // Add a number to allow correct access to cameras on systems with two
+ // (and more) front/back cameras
+ if (id > 1) {
+ info->id.append(QByteArray::number(id));
+ info->description.append(QString(" %1").arg(id));
+ }
+}
+
+QVideoFrameFormat::PixelFormat AndroidCamera::QtPixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat format)
+{
+ switch (format) {
+ case AndroidCamera::NV21:
+ return QVideoFrameFormat::Format_NV21;
+ case AndroidCamera::YUY2:
+ return QVideoFrameFormat::Format_YUYV;
+ case AndroidCamera::JPEG:
+ return QVideoFrameFormat::Format_Jpeg;
+ case AndroidCamera::YV12:
+ return QVideoFrameFormat::Format_YV12;
+ default:
+ return QVideoFrameFormat::Format_Invalid;
+ }
+}
+
+AndroidCamera::ImageFormat AndroidCamera::AndroidImageFormatFromQtPixelFormat(QVideoFrameFormat::PixelFormat format)
+{
+ switch (format) {
+ case QVideoFrameFormat::Format_NV21:
+ return AndroidCamera::NV21;
+ case QVideoFrameFormat::Format_YUYV:
+ return AndroidCamera::YUY2;
+ case QVideoFrameFormat::Format_Jpeg:
+ return AndroidCamera::JPEG;
+ case QVideoFrameFormat::Format_YV12:
+ return AndroidCamera::YV12;
+ default:
+ return AndroidCamera::UnknownImageFormat;
+ }
+}
+
+QList<QCameraFormat> AndroidCamera::getSupportedFormats()
+{
+ QList<QCameraFormat> formats;
+ AndroidCamera::FpsRange range = getPreviewFpsRange();
+ for (const auto &previewSize : getSupportedVideoSizes()) {
+ for (const auto &previewFormat : getSupportedPreviewFormats()) {
+ QCameraFormatPrivate * format = new QCameraFormatPrivate();
+ format->pixelFormat = QtPixelFormatFromAndroidImageFormat(previewFormat);
+ format->resolution = previewSize;
+ format->minFrameRate = range.min;
+ format->maxFrameRate = range.max;
+ formats.append(format->create());
+ }
+ }
+
+ return formats;
+}
+
+void AndroidCamera::startPreview()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "startPreview");
+}
+
+void AndroidCamera::stopPreview()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "stopPreview");
+}
+
+void AndroidCamera::stopPreviewSynchronous()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "stopPreview", Qt::BlockingQueuedConnection);
+}
+
+QJniObject AndroidCamera::getParametersObject()
+{
+ Q_D(AndroidCamera);
+ return d->m_parameters;
+}
+
+AndroidCameraPrivate::AndroidCameraPrivate()
+ : QObject()
+{
+}
+
+AndroidCameraPrivate::~AndroidCameraPrivate()
+{
+}
+
+static qint32 s_activeCameras = 0;
+
+bool AndroidCameraPrivate::init(int cameraId)
+{
+ m_cameraId = cameraId;
+ QJniEnvironment env;
+
+ const bool opened = s_activeCameras & (1 << cameraId);
+ if (opened)
+ return false;
+
+ m_camera = QJniObject::callStaticObjectMethod("android/hardware/Camera",
+ "open",
+ "(I)Landroid/hardware/Camera;",
+ cameraId);
+ if (!m_camera.isValid())
+ return false;
+
+ m_cameraListener = QJniObject(QtCameraListenerClassName, "(I)V", m_cameraId);
+ m_info = QJniObject("android/hardware/Camera$CameraInfo");
+ m_camera.callStaticMethod<void>("android/hardware/Camera",
+ "getCameraInfo",
+ "(ILandroid/hardware/Camera$CameraInfo;)V",
+ cameraId,
+ m_info.object());
+
+ QJniObject params = m_camera.callObjectMethod("getParameters",
+ "()Landroid/hardware/Camera$Parameters;");
+ m_parameters = QJniObject(params);
+ s_activeCameras |= 1 << cameraId;
+
+ return true;
+}
+
+void AndroidCameraPrivate::release()
+{
+ m_previewSize = QSize();
+ m_parametersMutex.lock();
+ m_parameters = QJniObject();
+ m_parametersMutex.unlock();
+ if (m_camera.isValid()) {
+ m_camera.callMethod<void>("release");
+ s_activeCameras &= ~(1 << m_cameraId);
+ }
+}
+
+bool AndroidCameraPrivate::lock()
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_camera.objectClass(), "lock", "()V");
+ env->CallVoidMethod(m_camera.object(), methodId);
+
+ if (env.checkAndClearExceptions())
+ return false;
+ return true;
+}
+
+bool AndroidCameraPrivate::unlock()
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_camera.objectClass(), "unlock", "()V");
+ env->CallVoidMethod(m_camera.object(), methodId);
+
+ if (env.checkAndClearExceptions())
+ return false;
+ return true;
+}
+
+bool AndroidCameraPrivate::reconnect()
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_camera.objectClass(), "reconnect", "()V");
+ env->CallVoidMethod(m_camera.object(), methodId);
+
+ if (env.checkAndClearExceptions())
+ return false;
+ return true;
+}
+
+AndroidCamera::CameraFacing AndroidCameraPrivate::getFacing()
+{
+ return AndroidCamera::CameraFacing(m_info.getField<jint>("facing"));
+}
+
+int AndroidCameraPrivate::getNativeOrientation()
+{
+ return m_info.getField<jint>("orientation");
+}
+
+QSize AndroidCameraPrivate::getPreferredPreviewSizeForVideo()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return QSize();
+
+ QJniObject size = m_parameters.callObjectMethod("getPreferredPreviewSizeForVideo",
+ "()Landroid/hardware/Camera$Size;");
+
+ if (!size.isValid())
+ return QSize();
+
+ return QSize(size.getField<jint>("width"), size.getField<jint>("height"));
+}
+
+QList<QSize> AndroidCameraPrivate::getSupportedPreviewSizes()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+ return getSupportedPreviewSizes(m_parameters);
+}
+
+QList<QSize> AndroidCameraPrivate::getSupportedPreviewSizes(QJniObject &parameters)
+{
+ QList<QSize> list;
+
+ if (parameters.isValid()) {
+ QJniObject sizeList = parameters.callObjectMethod("getSupportedPreviewSizes",
+ "()Ljava/util/List;");
+ int count = sizeList.callMethod<jint>("size");
+ for (int i = 0; i < count; ++i) {
+ QJniObject size = sizeList.callObjectMethod("get",
+ "(I)Ljava/lang/Object;",
+ i);
+ list.append(QSize(size.getField<jint>("width"), size.getField<jint>("height")));
+ }
+
+ std::sort(list.begin(), list.end(), qt_sizeLessThan);
+ }
+
+ return list;
+}
+
+QList<AndroidCamera::FpsRange> AndroidCameraPrivate::getSupportedPreviewFpsRange()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ QJniEnvironment env;
+
+ QList<AndroidCamera::FpsRange> rangeList;
+
+ if (m_parameters.isValid()) {
+ QJniObject rangeListNative = m_parameters.callObjectMethod("getSupportedPreviewFpsRange",
+ "()Ljava/util/List;");
+ int count = rangeListNative.callMethod<jint>("size");
+
+ rangeList.reserve(count);
+
+ for (int i = 0; i < count; ++i) {
+ QJniObject range = rangeListNative.callObjectMethod("get",
+ "(I)Ljava/lang/Object;",
+ i);
+
+ jintArray jRange = static_cast<jintArray>(range.object());
+ jint* rangeArray = env->GetIntArrayElements(jRange, 0);
+
+ AndroidCamera::FpsRange fpsRange;
+
+ fpsRange.min = rangeArray[0];
+ fpsRange.max = rangeArray[1];
+
+ env->ReleaseIntArrayElements(jRange, rangeArray, 0);
+
+ rangeList << fpsRange;
+ }
+ }
+
+ return rangeList;
+}
+
+AndroidCamera::FpsRange AndroidCameraPrivate::getPreviewFpsRange()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+ return getPreviewFpsRange(m_parameters);
+}
+
+AndroidCamera::FpsRange AndroidCameraPrivate::getPreviewFpsRange(QJniObject &parameters)
+{
+ QJniEnvironment env;
+
+ AndroidCamera::FpsRange range;
+
+ if (!parameters.isValid())
+ return range;
+
+ jintArray jRangeArray = env->NewIntArray(2);
+ parameters.callMethod<void>("getPreviewFpsRange", "([I)V", jRangeArray);
+
+ jint* jRangeElements = env->GetIntArrayElements(jRangeArray, 0);
+
+ // Android Camera API returns values scaled by 1000, so divide here to report
+ // normal values for Qt
+ range.min = jRangeElements[0] / 1000;
+ range.max = jRangeElements[1] / 1000;
+
+ env->ReleaseIntArrayElements(jRangeArray, jRangeElements, 0);
+ env->DeleteLocalRef(jRangeArray);
+
+ return range;
+}
+
+void AndroidCameraPrivate::setPreviewFpsRange(int min, int max)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ // Android Camera API returns values scaled by 1000, so multiply here to
+ // give Android API the scale is expects
+ m_parameters.callMethod<void>("setPreviewFpsRange", "(II)V", min * 1000, max * 1000);
+}
+
+AndroidCamera::ImageFormat AndroidCameraPrivate::getPreviewFormat()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return AndroidCamera::UnknownImageFormat;
+
+ return AndroidCamera::ImageFormat(m_parameters.callMethod<jint>("getPreviewFormat"));
+}
+
+void AndroidCameraPrivate::setPreviewFormat(AndroidCamera::ImageFormat fmt)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setPreviewFormat", "(I)V", jint(fmt));
+ applyParameters();
+}
+
+QList<AndroidCamera::ImageFormat> AndroidCameraPrivate::getSupportedPreviewFormats()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+ return getSupportedPreviewFormats(m_parameters);
+}
+
+QList<AndroidCamera::ImageFormat> AndroidCameraPrivate::getSupportedPreviewFormats(QJniObject &parameters)
+{
+ QList<AndroidCamera::ImageFormat> list;
+
+ if (parameters.isValid()) {
+ QJniObject formatList = parameters.callObjectMethod("getSupportedPreviewFormats",
+ "()Ljava/util/List;");
+ int count = formatList.callMethod<jint>("size");
+ for (int i = 0; i < count; ++i) {
+ QJniObject format = formatList.callObjectMethod("get",
+ "(I)Ljava/lang/Object;",
+ i);
+ list.append(AndroidCamera::ImageFormat(format.callMethod<jint>("intValue")));
+ }
+ }
+
+ return list;
+}
+
+QSize AndroidCameraPrivate::getPreviewSize()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return QSize();
+
+ QJniObject size = m_parameters.callObjectMethod("getPreviewSize",
+ "()Landroid/hardware/Camera$Size;");
+
+ if (!size.isValid())
+ return QSize();
+
+ return QSize(size.getField<jint>("width"), size.getField<jint>("height"));
+}
+
+void AndroidCameraPrivate::updatePreviewSize()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (m_previewSize.isValid()) {
+ m_parameters.callMethod<void>("setPreviewSize", "(II)V", m_previewSize.width(), m_previewSize.height());
+ applyParameters();
+ }
+
+ emit previewSizeChanged();
+}
+
+bool AndroidCameraPrivate::setPreviewTexture(void *surfaceTexture)
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_camera.objectClass(), "setPreviewTexture",
+ "(Landroid/graphics/SurfaceTexture;)V");
+ env->CallVoidMethod(m_camera.object(), methodId, static_cast<jobject>(surfaceTexture));
+
+ if (env.checkAndClearExceptions())
+ return false;
+ return true;
+}
+
+bool AndroidCameraPrivate::setPreviewDisplay(void *surfaceHolder)
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_camera.objectClass(), "setPreviewDisplay",
+ "(Landroid/view/SurfaceHolder;)V");
+ env->CallVoidMethod(m_camera.object(), methodId, static_cast<jobject>(surfaceHolder));
+
+ if (env.checkAndClearExceptions())
+ return false;
+ return true;
+}
+
+void AndroidCameraPrivate::setDisplayOrientation(int degrees)
+{
+ m_camera.callMethod<void>("setDisplayOrientation", "(I)V", degrees);
+ m_cameraListener.callMethod<void>("setPhotoRotation", "(I)V", degrees);
+}
+
+bool AndroidCameraPrivate::isZoomSupported()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return false;
+
+ return m_parameters.callMethod<jboolean>("isZoomSupported");
+}
+
+int AndroidCameraPrivate::getMaxZoom()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return 0;
+
+ return m_parameters.callMethod<jint>("getMaxZoom");
+}
+
+QList<int> AndroidCameraPrivate::getZoomRatios()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ QList<int> ratios;
+
+ if (m_parameters.isValid()) {
+ QJniObject ratioList = m_parameters.callObjectMethod("getZoomRatios",
+ "()Ljava/util/List;");
+ int count = ratioList.callMethod<jint>("size");
+ for (int i = 0; i < count; ++i) {
+ QJniObject zoomRatio = ratioList.callObjectMethod("get",
+ "(I)Ljava/lang/Object;",
+ i);
+
+ ratios.append(zoomRatio.callMethod<jint>("intValue"));
+ }
+ }
+
+ return ratios;
+}
+
+int AndroidCameraPrivate::getZoom()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return 0;
+
+ return m_parameters.callMethod<jint>("getZoom");
+}
+
+void AndroidCameraPrivate::setZoom(int value)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setZoom", "(I)V", value);
+ applyParameters();
+}
+
+QString AndroidCameraPrivate::getFlashMode()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ QString value;
+
+ if (m_parameters.isValid()) {
+ QJniObject flashMode = m_parameters.callObjectMethod("getFlashMode",
+ "()Ljava/lang/String;");
+ if (flashMode.isValid())
+ value = flashMode.toString();
+ }
+
+ return value;
+}
+
+void AndroidCameraPrivate::setFlashMode(const QString &value)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setFlashMode",
+ "(Ljava/lang/String;)V",
+ QJniObject::fromString(value).object());
+ applyParameters();
+}
+
+QString AndroidCameraPrivate::getFocusMode()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ QString value;
+
+ if (m_parameters.isValid()) {
+ QJniObject focusMode = m_parameters.callObjectMethod("getFocusMode",
+ "()Ljava/lang/String;");
+ if (focusMode.isValid())
+ value = focusMode.toString();
+ }
+
+ return value;
+}
+
+void AndroidCameraPrivate::setFocusMode(const QString &value)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setFocusMode",
+ "(Ljava/lang/String;)V",
+ QJniObject::fromString(value).object());
+ applyParameters();
+}
+
+int AndroidCameraPrivate::getMaxNumFocusAreas()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return 0;
+
+ return m_parameters.callMethod<jint>("getMaxNumFocusAreas");
+}
+
+QList<QRect> AndroidCameraPrivate::getFocusAreas()
+{
+ QList<QRect> areas;
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (m_parameters.isValid()) {
+ QJniObject list = m_parameters.callObjectMethod("getFocusAreas",
+ "()Ljava/util/List;");
+
+ if (list.isValid()) {
+ int count = list.callMethod<jint>("size");
+ for (int i = 0; i < count; ++i) {
+ QJniObject area = list.callObjectMethod("get",
+ "(I)Ljava/lang/Object;",
+ i);
+
+ areas.append(areaToRect(area.object()));
+ }
+ }
+ }
+
+ return areas;
+}
+
+void AndroidCameraPrivate::setFocusAreas(const QList<QRect> &areas)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid() || areas.isEmpty())
+ return;
+
+ QJniObject list;
+
+ if (!areas.isEmpty()) {
+ QJniEnvironment env;
+ QJniObject arrayList("java/util/ArrayList", "(I)V", areas.size());
+ for (int i = 0; i < areas.size(); ++i) {
+ arrayList.callMethod<jboolean>("add",
+ "(Ljava/lang/Object;)Z",
+ rectToArea(areas.at(i)).object());
+ }
+ list = arrayList;
+ }
+
+ m_parameters.callMethod<void>("setFocusAreas", "(Ljava/util/List;)V", list.object());
+
+ applyParameters();
+}
+
+void AndroidCameraPrivate::autoFocus()
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_camera.objectClass(), "autoFocus",
+ "(Landroid/hardware/Camera$AutoFocusCallback;)V");
+ env->CallVoidMethod(m_camera.object(), methodId, m_cameraListener.object());
+
+ if (!env.checkAndClearExceptions())
+ emit autoFocusStarted();
+}
+
+void AndroidCameraPrivate::cancelAutoFocus()
+{
+ QJniEnvironment env;
+ m_camera.callMethod<void>("cancelAutoFocus");
+}
+
+bool AndroidCameraPrivate::isAutoExposureLockSupported()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return false;
+
+ return m_parameters.callMethod<jboolean>("isAutoExposureLockSupported");
+}
+
+bool AndroidCameraPrivate::getAutoExposureLock()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return false;
+
+ return m_parameters.callMethod<jboolean>("getAutoExposureLock");
+}
+
+void AndroidCameraPrivate::setAutoExposureLock(bool toggle)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setAutoExposureLock", "(Z)V", toggle);
+ applyParameters();
+}
+
+bool AndroidCameraPrivate::isAutoWhiteBalanceLockSupported()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return false;
+
+ return m_parameters.callMethod<jboolean>("isAutoWhiteBalanceLockSupported");
+}
+
+bool AndroidCameraPrivate::getAutoWhiteBalanceLock()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return false;
+
+ return m_parameters.callMethod<jboolean>("getAutoWhiteBalanceLock");
+}
+
+void AndroidCameraPrivate::setAutoWhiteBalanceLock(bool toggle)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setAutoWhiteBalanceLock", "(Z)V", toggle);
+ applyParameters();
+}
+
+int AndroidCameraPrivate::getExposureCompensation()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return 0;
+
+ return m_parameters.callMethod<jint>("getExposureCompensation");
+}
+
+void AndroidCameraPrivate::setExposureCompensation(int value)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setExposureCompensation", "(I)V", value);
+ applyParameters();
+}
+
+float AndroidCameraPrivate::getExposureCompensationStep()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return 0;
+
+ return m_parameters.callMethod<jfloat>("getExposureCompensationStep");
+}
+
+int AndroidCameraPrivate::getMinExposureCompensation()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return 0;
+
+ return m_parameters.callMethod<jint>("getMinExposureCompensation");
+}
+
+int AndroidCameraPrivate::getMaxExposureCompensation()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return 0;
+
+ return m_parameters.callMethod<jint>("getMaxExposureCompensation");
+}
+
+QString AndroidCameraPrivate::getSceneMode()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ QString value;
+
+ if (m_parameters.isValid()) {
+ QJniObject sceneMode = m_parameters.callObjectMethod("getSceneMode",
+ "()Ljava/lang/String;");
+ if (sceneMode.isValid())
+ value = sceneMode.toString();
+ }
+
+ return value;
+}
+
+void AndroidCameraPrivate::setSceneMode(const QString &value)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setSceneMode",
+ "(Ljava/lang/String;)V",
+ QJniObject::fromString(value).object());
+ applyParameters();
+}
+
+QString AndroidCameraPrivate::getWhiteBalance()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ QString value;
+
+ if (m_parameters.isValid()) {
+ QJniObject wb = m_parameters.callObjectMethod("getWhiteBalance",
+ "()Ljava/lang/String;");
+ if (wb.isValid())
+ value = wb.toString();
+ }
+
+ return value;
+}
+
+void AndroidCameraPrivate::setWhiteBalance(const QString &value)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setWhiteBalance",
+ "(Ljava/lang/String;)V",
+ QJniObject::fromString(value).object());
+ applyParameters();
+
+ emit whiteBalanceChanged();
+}
+
+void AndroidCameraPrivate::updateRotation()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ m_parameters.callMethod<void>("setRotation", "(I)V", m_rotation);
+ applyParameters();
+}
+
+QList<QSize> AndroidCameraPrivate::getSupportedPictureSizes()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ QList<QSize> list;
+
+ if (m_parameters.isValid()) {
+ QJniObject sizeList = m_parameters.callObjectMethod("getSupportedPictureSizes",
+ "()Ljava/util/List;");
+ int count = sizeList.callMethod<jint>("size");
+ for (int i = 0; i < count; ++i) {
+ QJniObject size = sizeList.callObjectMethod("get",
+ "(I)Ljava/lang/Object;",
+ i);
+ list.append(QSize(size.getField<jint>("width"), size.getField<jint>("height")));
+ }
+
+ std::sort(list.begin(), list.end(), qt_sizeLessThan);
+ }
+
+ return list;
+}
+
+QList<QSize> AndroidCameraPrivate::getSupportedVideoSizes()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+ QList<QSize> list;
+
+ if (m_parameters.isValid()) {
+ QJniObject sizeList = m_parameters.callObjectMethod("getSupportedVideoSizes",
+ "()Ljava/util/List;");
+ if (!sizeList.isValid())
+ return list;
+
+ int count = sizeList.callMethod<jint>("size");
+ for (int i = 0; i < count; ++i) {
+ const QJniObject size = sizeList.callObjectMethod("get", "(I)Ljava/lang/Object;", i);
+ if (size.isValid())
+ list.append(QSize(size.getField<jint>("width"), size.getField<jint>("height")));
+ }
+ std::sort(list.begin(), list.end(), qt_sizeLessThan);
+ }
+
+ return list;
+}
+
+void AndroidCameraPrivate::setPictureSize(const QSize &size)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setPictureSize", "(II)V", size.width(), size.height());
+ applyParameters();
+}
+
+void AndroidCameraPrivate::setJpegQuality(int quality)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setJpegQuality", "(I)V", quality);
+ applyParameters();
+}
+
+void AndroidCameraPrivate::startPreview()
+{
+ setupPreviewFrameCallback();
+
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_camera.objectClass(), "startPreview", "()V");
+ env->CallVoidMethod(m_camera.object(), methodId);
+
+ if (env.checkAndClearExceptions())
+ emit previewFailedToStart();
+ else
+ emit previewStarted();
+}
+
+void AndroidCameraPrivate::stopPreview()
+{
+ // cancel any pending new frame notification
+ m_cameraListener.callMethod<void>("notifyWhenFrameAvailable", "(Z)V", false);
+ m_camera.callMethod<void>("stopPreview");
+ emit previewStopped();
+}
+
+void AndroidCameraPrivate::takePicture()
+{
+ // We must clear the preview callback before calling takePicture(), otherwise the call will
+ // block and the camera server will be frozen until the next device restart...
+ // That problem only happens on some devices and on the emulator
+ m_cameraListener.callMethod<void>("clearPreviewCallback", "(Landroid/hardware/Camera;)V", m_camera.object());
+
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_camera.objectClass(), "takePicture",
+ "(Landroid/hardware/Camera$ShutterCallback;"
+ "Landroid/hardware/Camera$PictureCallback;"
+ "Landroid/hardware/Camera$PictureCallback;)V");
+ env->CallVoidMethod(m_camera.object(), methodId, m_cameraListener.object(),
+ jobject(0), m_cameraListener.object());
+
+ if (env.checkAndClearExceptions())
+ emit takePictureFailed();
+}
+
+void AndroidCameraPrivate::setupPreviewFrameCallback()
+{
+ m_cameraListener.callMethod<void>("setupPreviewCallback", "(Landroid/hardware/Camera;)V", m_camera.object());
+}
+
+void AndroidCameraPrivate::notifyNewFrames(bool notify)
+{
+ m_cameraListener.callMethod<void>("notifyNewFrames", "(Z)V", notify);
+}
+
+void AndroidCameraPrivate::fetchLastPreviewFrame()
+{
+ QJniEnvironment env;
+ QJniObject data = m_cameraListener.callObjectMethod("lastPreviewBuffer", "()[B");
+
+ if (!data.isValid()) {
+ // If there's no buffer received yet, retry when the next one arrives
+ m_cameraListener.callMethod<void>("notifyWhenFrameAvailable", "(Z)V", true);
+ return;
+ }
+
+ const int arrayLength = env->GetArrayLength(static_cast<jbyteArray>(data.object()));
+ if (arrayLength == 0)
+ return;
+
+ QByteArray bytes(arrayLength, Qt::Uninitialized);
+ env->GetByteArrayRegion(static_cast<jbyteArray>(data.object()),
+ 0,
+ arrayLength,
+ reinterpret_cast<jbyte *>(bytes.data()));
+
+ const int width = m_cameraListener.callMethod<jint>("previewWidth");
+ const int height = m_cameraListener.callMethod<jint>("previewHeight");
+ const int format = m_cameraListener.callMethod<jint>("previewFormat");
+ const int bpl = m_cameraListener.callMethod<jint>("previewBytesPerLine");
+
+ QVideoFrameFormat frameFormat(
+ QSize(width, height),
+ qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat(format)));
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(std::move(bytes), bpl), std::move(frameFormat));
+
+ emit lastPreviewFrameFetched(frame);
+}
+
+void AndroidCameraPrivate::applyParameters()
+{
+ QJniEnvironment env;
+ m_camera.callMethod<void>("setParameters",
+ "(Landroid/hardware/Camera$Parameters;)V",
+ m_parameters.object());
+}
+
+QStringList AndroidCameraPrivate::callParametersStringListMethod(const QByteArray &methodName)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ QStringList stringList;
+
+ if (m_parameters.isValid()) {
+ QJniObject list = m_parameters.callObjectMethod(methodName.constData(),
+ "()Ljava/util/List;");
+
+ if (list.isValid()) {
+ int count = list.callMethod<jint>("size");
+ for (int i = 0; i < count; ++i) {
+ QJniObject string = list.callObjectMethod("get",
+ "(I)Ljava/lang/Object;",
+ i);
+ stringList.append(string.toString());
+ }
+ }
+ }
+
+ return stringList;
+}
+
+bool AndroidCamera::registerNativeMethods()
+{
+ static const JNINativeMethod methods[] = {
+ {"notifyAutoFocusComplete", "(IZ)V", (void *)notifyAutoFocusComplete},
+ {"notifyPictureExposed", "(I)V", (void *)notifyPictureExposed},
+ {"notifyPictureCaptured", "(I[B)V", (void *)notifyPictureCaptured},
+ {"notifyNewPreviewFrame", "(I[BIIII)V", (void *)notifyNewPreviewFrame},
+ {"notifyFrameAvailable", "(I)V", (void *)notifyFrameAvailable}
+ };
+
+ const int size = std::size(methods);
+ return QJniEnvironment().registerNativeMethods(QtCameraListenerClassName, methods, size);
+}
+
+QT_END_NAMESPACE
+
+#include "androidcamera.moc"
+#include "moc_androidcamera_p.cpp"
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidcamera_p.h b/src/plugins/multimedia/android/wrappers/jni/androidcamera_p.h
new file mode 100644
index 000000000..8375cf3b1
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidcamera_p.h
@@ -0,0 +1,208 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// Copyright (C) 2016 Ruslan Baratov
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef ANDROIDCAMERA_H
+#define ANDROIDCAMERA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qobject.h>
+#include <qsize.h>
+#include <qrect.h>
+#include <QtMultimedia/qcamera.h>
+#include <QtCore/qjniobject.h>
+#include <private/qcameradevice_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QThread;
+
+class AndroidCameraPrivate;
+class AndroidSurfaceTexture;
+class AndroidSurfaceHolder;
+
+class AndroidCamera : public QObject
+{
+ Q_OBJECT
+public:
+ enum CameraFacing {
+ CameraFacingBack = 0,
+ CameraFacingFront = 1
+ };
+ Q_ENUM(CameraFacing)
+
+ enum ImageFormat { // same values as in android.graphics.ImageFormat Java class
+ UnknownImageFormat = 0,
+ RGB565 = 4,
+ NV16 = 16,
+ NV21 = 17,
+ YUY2 = 20,
+ JPEG = 256,
+ YV12 = 842094169
+ };
+ Q_ENUM(ImageFormat)
+
+ // http://developer.android.com/reference/android/hardware/Camera.Parameters.html#getSupportedPreviewFpsRange%28%29
+ // "The values are multiplied by 1000 and represented in integers"
+ struct FpsRange {
+ int min;
+ int max;
+
+ FpsRange(): min(0), max(0) {}
+
+ qreal getMinReal() const { return min / 1000.0; }
+ qreal getMaxReal() const { return max / 1000.0; }
+
+ static FpsRange makeFromQReal(qreal min, qreal max)
+ {
+ FpsRange range;
+ range.min = static_cast<int>(min * 1000.0);
+ range.max = static_cast<int>(max * 1000.0);
+ return range;
+ }
+ };
+
+ ~AndroidCamera();
+
+ static AndroidCamera *open(int cameraId);
+
+ int cameraId() const;
+
+ bool lock();
+ bool unlock();
+ bool reconnect();
+ void release();
+
+ CameraFacing getFacing();
+ int getNativeOrientation();
+
+ QSize getPreferredPreviewSizeForVideo();
+ QList<QSize> getSupportedPreviewSizes();
+
+ QList<FpsRange> getSupportedPreviewFpsRange();
+
+ FpsRange getPreviewFpsRange();
+ void setPreviewFpsRange(FpsRange);
+
+ ImageFormat getPreviewFormat();
+ void setPreviewFormat(ImageFormat fmt);
+ QList<ImageFormat> getSupportedPreviewFormats();
+
+ QSize previewSize() const;
+ QSize actualPreviewSize();
+ void setPreviewSize(const QSize &size);
+ bool setPreviewTexture(AndroidSurfaceTexture *surfaceTexture);
+ bool setPreviewDisplay(AndroidSurfaceHolder *surfaceHolder);
+ void setDisplayOrientation(int degrees);
+
+ bool isZoomSupported();
+ int getMaxZoom();
+ QList<int> getZoomRatios();
+ int getZoom();
+ void setZoom(int value);
+
+ QStringList getSupportedFlashModes();
+ QString getFlashMode();
+ void setFlashMode(const QString &value);
+
+ QStringList getSupportedFocusModes();
+ QString getFocusMode();
+ void setFocusMode(const QString &value);
+
+ int getMaxNumFocusAreas();
+ QList<QRect> getFocusAreas();
+ void setFocusAreas(const QList<QRect> &areas);
+
+ void autoFocus();
+ void cancelAutoFocus();
+
+ bool isAutoExposureLockSupported();
+ bool getAutoExposureLock();
+ void setAutoExposureLock(bool toggle);
+
+ bool isAutoWhiteBalanceLockSupported();
+ bool getAutoWhiteBalanceLock();
+ void setAutoWhiteBalanceLock(bool toggle);
+
+ int getExposureCompensation();
+ void setExposureCompensation(int value);
+ float getExposureCompensationStep();
+ int getMinExposureCompensation();
+ int getMaxExposureCompensation();
+
+ QStringList getSupportedSceneModes();
+ QString getSceneMode();
+ void setSceneMode(const QString &value);
+
+ QStringList getSupportedWhiteBalance();
+ QString getWhiteBalance();
+ void setWhiteBalance(const QString &value);
+
+ void setRotation(int rotation);
+ int getRotation() const;
+
+ QList<QCameraFormat> getSupportedFormats();
+ QList<QSize> getSupportedPictureSizes();
+ QList<QSize> getSupportedVideoSizes();
+ void setPictureSize(const QSize &size);
+ void setJpegQuality(int quality);
+
+ void startPreview();
+ void stopPreview();
+ void stopPreviewSynchronous();
+
+ void takePicture();
+
+ void setupPreviewFrameCallback();
+ void notifyNewFrames(bool notify);
+ void fetchLastPreviewFrame();
+ QJniObject getCameraObject();
+ QJniObject getParametersObject();
+
+ static int getNumberOfCameras();
+ static void getCameraInfo(int id, QCameraDevicePrivate *info);
+ static QVideoFrameFormat::PixelFormat QtPixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat);
+ static AndroidCamera::ImageFormat AndroidImageFormatFromQtPixelFormat(QVideoFrameFormat::PixelFormat);
+ static bool requestCameraPermission();
+
+ static bool registerNativeMethods();
+Q_SIGNALS:
+ void previewSizeChanged();
+ void previewStarted();
+ void previewFailedToStart();
+ void previewStopped();
+
+ void autoFocusStarted();
+ void autoFocusComplete(bool success);
+
+ void whiteBalanceChanged();
+
+ void takePictureFailed();
+ void pictureExposed();
+ void pictureCaptured(const QByteArray &frame, QVideoFrameFormat::PixelFormat format, QSize size, int bytesPerLine);
+ void lastPreviewFrameFetched(const QVideoFrame &frame);
+ void newPreviewFrame(const QVideoFrame &frame);
+
+private:
+ AndroidCamera(AndroidCameraPrivate *d, QThread *worker);
+
+ Q_DECLARE_PRIVATE(AndroidCamera)
+ AndroidCameraPrivate *d_ptr;
+ QScopedPointer<QThread> m_worker;
+};
+
+QT_END_NAMESPACE
+
+Q_DECLARE_METATYPE(AndroidCamera::ImageFormat)
+
+#endif // ANDROIDCAMERA_H
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmediametadataretriever.cpp b/src/plugins/multimedia/android/wrappers/jni/androidmediametadataretriever.cpp
new file mode 100644
index 000000000..25e1efdb0
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmediametadataretriever.cpp
@@ -0,0 +1,136 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "androidmediametadataretriever_p.h"
+
+#include <QtCore/QUrl>
+#include <qdebug.h>
+#include <QtCore/qcoreapplication.h>
+
+QT_BEGIN_NAMESPACE
+
+AndroidMediaMetadataRetriever::AndroidMediaMetadataRetriever()
+{
+ m_metadataRetriever = QJniObject("android/media/MediaMetadataRetriever");
+}
+
+AndroidMediaMetadataRetriever::~AndroidMediaMetadataRetriever()
+{
+ release();
+}
+
+QString AndroidMediaMetadataRetriever::extractMetadata(MetadataKey key)
+{
+ QString value;
+
+ QJniObject metadata = m_metadataRetriever.callObjectMethod("extractMetadata",
+ "(I)Ljava/lang/String;",
+ jint(key));
+ if (metadata.isValid())
+ value = metadata.toString();
+
+ return value;
+}
+
+void AndroidMediaMetadataRetriever::release()
+{
+ if (!m_metadataRetriever.isValid())
+ return;
+
+ m_metadataRetriever.callMethod<void>("release");
+}
+
+bool AndroidMediaMetadataRetriever::setDataSource(const QUrl &url)
+{
+ if (!m_metadataRetriever.isValid())
+ return false;
+
+ QJniEnvironment env;
+ if (url.isLocalFile()) { // also includes qrc files (copied to a temp file by QMediaPlayer)
+ QJniObject string = QJniObject::fromString(url.path());
+ QJniObject fileInputStream("java/io/FileInputStream",
+ "(Ljava/lang/String;)V",
+ string.object());
+
+ if (!fileInputStream.isValid())
+ return false;
+
+ QJniObject fd = fileInputStream.callObjectMethod("getFD",
+ "()Ljava/io/FileDescriptor;");
+ if (!fd.isValid()) {
+ fileInputStream.callMethod<void>("close");
+ return false;
+ }
+
+ auto methodId = env->GetMethodID(m_metadataRetriever.objectClass(), "setDataSource",
+ "(Ljava/io/FileDescriptor;)V");
+ env->CallVoidMethod(m_metadataRetriever.object(), methodId, fd.object());
+ bool ok = !env.checkAndClearExceptions();
+ fileInputStream.callMethod<void>("close");
+ if (!ok)
+ return false;
+ } else if (url.scheme() == QLatin1String("assets")) {
+ QJniObject string = QJniObject::fromString(url.path().mid(1)); // remove first '/'
+ QJniObject activity(QNativeInterface::QAndroidApplication::context());
+ QJniObject assetManager = activity.callObjectMethod("getAssets",
+ "()Landroid/content/res/AssetManager;");
+ QJniObject assetFd = assetManager.callObjectMethod("openFd",
+ "(Ljava/lang/String;)Landroid/content/res/AssetFileDescriptor;",
+ string.object());
+ if (!assetFd.isValid())
+ return false;
+
+ QJniObject fd = assetFd.callObjectMethod("getFileDescriptor",
+ "()Ljava/io/FileDescriptor;");
+ if (!fd.isValid()) {
+ assetFd.callMethod<void>("close");
+ return false;
+ }
+
+ auto methodId = env->GetMethodID(m_metadataRetriever.objectClass(), "setDataSource",
+ "(Ljava/io/FileDescriptor;JJ)V");
+ env->CallVoidMethod(m_metadataRetriever.object(), methodId,
+ fd.object(),
+ assetFd.callMethod<jlong>("getStartOffset"),
+ assetFd.callMethod<jlong>("getLength"));
+ bool ok = !env.checkAndClearExceptions();
+ assetFd.callMethod<void>("close");
+
+ if (!ok)
+ return false;
+ } else if (url.scheme() != QLatin1String("content")) {
+ // On API levels >= 14, only setDataSource(String, Map<String, String>) accepts remote media
+ QJniObject string = QJniObject::fromString(url.toString(QUrl::FullyEncoded));
+ QJniObject hash("java/util/HashMap");
+
+ auto methodId = env->GetMethodID(m_metadataRetriever.objectClass(), "setDataSource",
+ "(Ljava/lang/String;Ljava/util/Map;)V");
+ env->CallVoidMethod(m_metadataRetriever.object(), methodId,
+ string.object(), hash.object());
+ if (env.checkAndClearExceptions())
+ return false;
+ } else {
+ // While on API levels < 14, only setDataSource(Context, Uri) is available and works for
+ // remote media...
+ QJniObject string = QJniObject::fromString(url.toString(QUrl::FullyEncoded));
+ QJniObject uri = m_metadataRetriever.callStaticObjectMethod(
+ "android/net/Uri",
+ "parse",
+ "(Ljava/lang/String;)Landroid/net/Uri;",
+ string.object());
+ if (!uri.isValid())
+ return false;
+
+ auto methodId = env->GetMethodID(m_metadataRetriever.objectClass(), "setDataSource",
+ "(Landroid/content/Context;Landroid/net/Uri;)V");
+ env->CallVoidMethod(m_metadataRetriever.object(), methodId,
+ QNativeInterface::QAndroidApplication::context().object(),
+ uri.object());
+ if (env.checkAndClearExceptions())
+ return false;
+ }
+
+ return true;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmediametadataretriever_p.h b/src/plugins/multimedia/android/wrappers/jni/androidmediametadataretriever_p.h
new file mode 100644
index 000000000..68e346336
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmediametadataretriever_p.h
@@ -0,0 +1,66 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef ANDROIDMEDIAMETADATARETRIEVER_H
+#define ANDROIDMEDIAMETADATARETRIEVER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/private/qglobal_p.h>
+#include <QtCore/qurl.h>
+#include <QtCore/qjniobject.h>
+
+QT_BEGIN_NAMESPACE
+
+class AndroidMediaMetadataRetriever
+{
+public:
+ enum MetadataKey {
+ Album = 1,
+ AlbumArtist = 13,
+ Artist = 2,
+ Author = 3,
+ Bitrate = 20,
+ CDTrackNumber = 0,
+ Compilation = 15,
+ Composer = 4,
+ Date = 5,
+ DiscNumber = 14,
+ Duration = 9,
+ Genre = 6,
+ HasAudio = 16,
+ HasVideo = 17,
+ Location = 23,
+ MimeType = 12,
+ NumTracks = 10,
+ Title = 7,
+ VideoHeight = 19,
+ VideoWidth = 18,
+ VideoRotation = 24,
+ Writer = 11,
+ Year = 8
+ };
+
+ AndroidMediaMetadataRetriever();
+ ~AndroidMediaMetadataRetriever();
+
+ QString extractMetadata(MetadataKey key);
+ bool setDataSource(const QUrl &url);
+
+private:
+ void release();
+ QJniObject m_metadataRetriever;
+};
+
+QT_END_NAMESPACE
+
+#endif // ANDROIDMEDIAMETADATARETRIEVER_H
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer.cpp b/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer.cpp
new file mode 100644
index 000000000..91f489f9e
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer.cpp
@@ -0,0 +1,535 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "androidmediaplayer_p.h"
+#include "androidsurfacetexture_p.h"
+
+#include <QList>
+#include <QReadWriteLock>
+#include <QString>
+#include <QtCore/qcoreapplication.h>
+#include <qloggingcategory.h>
+
+static const char QtAndroidMediaPlayerClassName[] = "org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer";
+typedef QList<AndroidMediaPlayer *> MediaPlayerList;
+Q_GLOBAL_STATIC(MediaPlayerList, mediaPlayers)
+Q_GLOBAL_STATIC(QReadWriteLock, rwLock)
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(lcAudio, "qt.multimedia.audio")
+
+AndroidMediaPlayer::AndroidMediaPlayer()
+ : QObject()
+{
+ QWriteLocker locker(rwLock);
+ auto context = QNativeInterface::QAndroidApplication::context();
+ const jlong id = reinterpret_cast<jlong>(this);
+ mMediaPlayer = QJniObject(QtAndroidMediaPlayerClassName,
+ "(Landroid/content/Context;J)V",
+ context.object(),
+ id);
+ mediaPlayers->append(this);
+}
+
+AndroidMediaPlayer::~AndroidMediaPlayer()
+{
+ QWriteLocker locker(rwLock);
+ const int i = mediaPlayers->indexOf(this);
+ Q_ASSERT(i != -1);
+ mediaPlayers->remove(i);
+}
+
+void AndroidMediaPlayer::release()
+{
+ mMediaPlayer.callMethod<void>("release");
+}
+
+void AndroidMediaPlayer::reset()
+{
+ mMediaPlayer.callMethod<void>("reset");
+}
+
+int AndroidMediaPlayer::getCurrentPosition()
+{
+ return mMediaPlayer.callMethod<jint>("getCurrentPosition");
+}
+
+int AndroidMediaPlayer::getDuration()
+{
+ return mMediaPlayer.callMethod<jint>("getDuration");
+}
+
+bool AndroidMediaPlayer::isPlaying()
+{
+ return mMediaPlayer.callMethod<jboolean>("isPlaying");
+}
+
+int AndroidMediaPlayer::volume()
+{
+ return mMediaPlayer.callMethod<jint>("getVolume");
+}
+
+bool AndroidMediaPlayer::isMuted()
+{
+ return mMediaPlayer.callMethod<jboolean>("isMuted");
+}
+
+qreal AndroidMediaPlayer::playbackRate()
+{
+ qreal rate(1.0);
+
+ if (QNativeInterface::QAndroidApplication::sdkVersion() < 23)
+ return rate;
+
+ QJniObject player = mMediaPlayer.callObjectMethod("getMediaPlayerHandle",
+ "()Landroid/media/MediaPlayer;");
+ if (player.isValid()) {
+ QJniObject playbackParams = player.callObjectMethod("getPlaybackParams",
+ "()Landroid/media/PlaybackParams;");
+ if (playbackParams.isValid()) {
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(playbackParams.objectClass(), "getSpeed", "()F");
+ const qreal speed = env->CallFloatMethod(playbackParams.object(), methodId);
+ if (!env.checkAndClearExceptions())
+ rate = speed;
+ }
+ }
+
+ return rate;
+}
+
+jobject AndroidMediaPlayer::display()
+{
+ return mMediaPlayer.callObjectMethod("display", "()Landroid/view/SurfaceHolder;").object();
+}
+
+AndroidMediaPlayer::TrackInfo convertTrackInfo(int streamNumber, QJniObject androidTrackInfo)
+{
+ const QLatin1String unknownMimeType("application/octet-stream");
+ const QLatin1String undefinedLanguage("und");
+
+ if (!androidTrackInfo.isValid())
+ return { streamNumber, AndroidMediaPlayer::TrackType::Unknown, undefinedLanguage,
+ unknownMimeType };
+
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(androidTrackInfo.objectClass(), "getType", "()I");
+ const jint type = env->CallIntMethod(androidTrackInfo.object(), methodId);
+ if (env.checkAndClearExceptions())
+ return { streamNumber, AndroidMediaPlayer::TrackType::Unknown, undefinedLanguage,
+ unknownMimeType };
+
+ if (type < 0 || type > 5) {
+ return { streamNumber, AndroidMediaPlayer::TrackType::Unknown, undefinedLanguage,
+ unknownMimeType };
+ }
+
+ AndroidMediaPlayer::TrackType trackType = static_cast<AndroidMediaPlayer::TrackType>(type);
+
+ auto languageObject = androidTrackInfo.callObjectMethod("getLanguage", "()Ljava/lang/String;");
+ QString language = languageObject.isValid() ? languageObject.toString() : undefinedLanguage;
+
+ auto mimeTypeObject = androidTrackInfo.callObjectMethod("getMime", "()Ljava/lang/String;");
+ QString mimeType = mimeTypeObject.isValid() ? mimeTypeObject.toString() : unknownMimeType;
+
+ return { streamNumber, trackType, language, mimeType };
+}
+
+QList<AndroidMediaPlayer::TrackInfo> AndroidMediaPlayer::tracksInfo()
+{
+ auto androidTracksInfoObject = mMediaPlayer.callObjectMethod(
+ "getAllTrackInfo",
+ "()[Lorg/qtproject/qt/android/multimedia/QtAndroidMediaPlayer$TrackInfo;");
+
+ if (!androidTracksInfoObject.isValid())
+ return QList<AndroidMediaPlayer::TrackInfo>();
+
+ auto androidTracksInfo = androidTracksInfoObject.object<jobjectArray>();
+ if (!androidTracksInfo)
+ return QList<AndroidMediaPlayer::TrackInfo>();
+
+ QJniEnvironment environment;
+ auto numberofTracks = environment->GetArrayLength(androidTracksInfo);
+
+ QList<AndroidMediaPlayer::TrackInfo> tracksInformation;
+
+ for (int index = 0; index < numberofTracks; index++) {
+ auto androidTrackInformation = environment->GetObjectArrayElement(androidTracksInfo, index);
+
+ if (environment.checkAndClearExceptions()) {
+ continue;
+ }
+
+ auto trackInfo = convertTrackInfo(index, androidTrackInformation);
+ tracksInformation.insert(index, trackInfo);
+
+ environment->DeleteLocalRef(androidTrackInformation);
+ }
+
+ return tracksInformation;
+}
+
+int AndroidMediaPlayer::activeTrack(TrackType androidTrackType)
+{
+ int type = static_cast<int>(androidTrackType);
+ return mMediaPlayer.callMethod<jint>("getSelectedTrack", "(I)I", type);
+}
+
+void AndroidMediaPlayer::deselectTrack(int trackNumber)
+{
+ mMediaPlayer.callMethod<void>("deselectTrack", "(I)V", trackNumber);
+}
+
+void AndroidMediaPlayer::selectTrack(int trackNumber)
+{
+ mMediaPlayer.callMethod<void>("selectTrack", "(I)V", trackNumber);
+}
+
+void AndroidMediaPlayer::play()
+{
+ mMediaPlayer.callMethod<void>("start");
+}
+
+void AndroidMediaPlayer::pause()
+{
+ mMediaPlayer.callMethod<void>("pause");
+}
+
+void AndroidMediaPlayer::stop()
+{
+ mMediaPlayer.callMethod<void>("stop");
+}
+
+void AndroidMediaPlayer::seekTo(qint32 msec)
+{
+ mMediaPlayer.callMethod<void>("seekTo", "(I)V", jint(msec));
+}
+
+void AndroidMediaPlayer::setMuted(bool mute)
+{
+ if (mAudioBlocked)
+ return;
+
+ mMediaPlayer.callMethod<void>("mute", "(Z)V", jboolean(mute));
+}
+
+void AndroidMediaPlayer::setDataSource(const QNetworkRequest &request)
+{
+ QJniObject string = QJniObject::fromString(request.url().toString(QUrl::FullyEncoded));
+
+ mMediaPlayer.callMethod<void>("initHeaders", "()V");
+ for (auto &header : request.rawHeaderList()) {
+ auto value = request.rawHeader(header);
+ mMediaPlayer.callMethod<void>("setHeader", "(Ljava/lang/String;Ljava/lang/String;)V",
+ QJniObject::fromString(QLatin1String(header)).object(),
+ QJniObject::fromString(QLatin1String(value)).object());
+ }
+
+ mMediaPlayer.callMethod<void>("setDataSource", "(Ljava/lang/String;)V", string.object());
+}
+
+void AndroidMediaPlayer::prepareAsync()
+{
+ mMediaPlayer.callMethod<void>("prepareAsync");
+}
+
+void AndroidMediaPlayer::setVolume(int volume)
+{
+ if (mAudioBlocked)
+ return;
+
+ mMediaPlayer.callMethod<void>("setVolume", "(I)V", jint(volume));
+}
+
+void AndroidMediaPlayer::blockAudio()
+{
+ mAudioBlocked = true;
+}
+
+void AndroidMediaPlayer::unblockAudio()
+{
+ mAudioBlocked = false;
+}
+
+void AndroidMediaPlayer::startSoundStreaming(const int inputId, const int outputId)
+{
+ QJniObject::callStaticMethod<void>("org/qtproject/qt/android/multimedia/QtAudioDeviceManager",
+ "startSoundStreaming",
+ inputId,
+ outputId);
+}
+
+void AndroidMediaPlayer::stopSoundStreaming()
+{
+ QJniObject::callStaticMethod<void>(
+ "org/qtproject/qt/android/multimedia/QtAudioDeviceManager", "stopSoundStreaming");
+}
+
+bool AndroidMediaPlayer::setPlaybackRate(qreal rate)
+{
+ if (QNativeInterface::QAndroidApplication::sdkVersion() < 23) {
+ qWarning() << "Setting the playback rate on a media player requires"
+ << "Android 6.0 (API level 23) or later";
+ return false;
+ }
+
+ return mMediaPlayer.callMethod<jboolean>("setPlaybackRate", jfloat(rate));
+}
+
+void AndroidMediaPlayer::setDisplay(AndroidSurfaceTexture *surfaceTexture)
+{
+ mMediaPlayer.callMethod<void>("setDisplay",
+ "(Landroid/view/SurfaceHolder;)V",
+ surfaceTexture ? surfaceTexture->surfaceHolder() : 0);
+}
+
+bool AndroidMediaPlayer::setAudioOutput(const QByteArray &deviceId)
+{
+ const bool ret = QJniObject::callStaticMethod<jboolean>(
+ "org/qtproject/qt/android/multimedia/QtAudioDeviceManager",
+ "setAudioOutput",
+ "(I)Z",
+ deviceId.toInt());
+
+ if (!ret)
+ qCWarning(lcAudio) << "Output device not set";
+
+ return ret;
+}
+
+#if 0
+void AndroidMediaPlayer::setAudioRole(QAudio::Role role)
+{
+ QString r;
+ switch (role) {
+ case QAudio::MusicRole:
+ r = QLatin1String("CONTENT_TYPE_MUSIC");
+ break;
+ case QAudio::VideoRole:
+ r = QLatin1String("CONTENT_TYPE_MOVIE");
+ break;
+ case QAudio::VoiceCommunicationRole:
+ r = QLatin1String("USAGE_VOICE_COMMUNICATION");
+ break;
+ case QAudio::AlarmRole:
+ r = QLatin1String("USAGE_ALARM");
+ break;
+ case QAudio::NotificationRole:
+ r = QLatin1String("USAGE_NOTIFICATION");
+ break;
+ case QAudio::RingtoneRole:
+ r = QLatin1String("USAGE_NOTIFICATION_RINGTONE");
+ break;
+ case QAudio::AccessibilityRole:
+ r = QLatin1String("USAGE_ASSISTANCE_ACCESSIBILITY");
+ break;
+ case QAudio::SonificationRole:
+ r = QLatin1String("CONTENT_TYPE_SONIFICATION");
+ break;
+ case QAudio::GameRole:
+ r = QLatin1String("USAGE_GAME");
+ break;
+ default:
+ return;
+ }
+
+ int type = 0; // CONTENT_TYPE_UNKNOWN
+ int usage = 0; // USAGE_UNKNOWN
+
+ if (r == QLatin1String("CONTENT_TYPE_MOVIE"))
+ type = 3;
+ else if (r == QLatin1String("CONTENT_TYPE_MUSIC"))
+ type = 2;
+ else if (r == QLatin1String("CONTENT_TYPE_SONIFICATION"))
+ type = 4;
+ else if (r == QLatin1String("CONTENT_TYPE_SPEECH"))
+ type = 1;
+ else if (r == QLatin1String("USAGE_ALARM"))
+ usage = 4;
+ else if (r == QLatin1String("USAGE_ASSISTANCE_ACCESSIBILITY"))
+ usage = 11;
+ else if (r == QLatin1String("USAGE_ASSISTANCE_NAVIGATION_GUIDANCE"))
+ usage = 12;
+ else if (r == QLatin1String("USAGE_ASSISTANCE_SONIFICATION"))
+ usage = 13;
+ else if (r == QLatin1String("USAGE_ASSISTANT"))
+ usage = 16;
+ else if (r == QLatin1String("USAGE_GAME"))
+ usage = 14;
+ else if (r == QLatin1String("USAGE_MEDIA"))
+ usage = 1;
+ else if (r == QLatin1String("USAGE_NOTIFICATION"))
+ usage = 5;
+ else if (r == QLatin1String("USAGE_NOTIFICATION_COMMUNICATION_DELAYED"))
+ usage = 9;
+ else if (r == QLatin1String("USAGE_NOTIFICATION_COMMUNICATION_INSTANT"))
+ usage = 8;
+ else if (r == QLatin1String("USAGE_NOTIFICATION_COMMUNICATION_REQUEST"))
+ usage = 7;
+ else if (r == QLatin1String("USAGE_NOTIFICATION_EVENT"))
+ usage = 10;
+ else if (r == QLatin1String("USAGE_NOTIFICATION_RINGTONE"))
+ usage = 6;
+ else if (r == QLatin1String("USAGE_VOICE_COMMUNICATION"))
+ usage = 2;
+ else if (r == QLatin1String("USAGE_VOICE_COMMUNICATION_SIGNALLING"))
+ usage = 3;
+
+ mMediaPlayer.callMethod<void>("setAudioAttributes", "(II)V", jint(type), jint(usage));
+}
+#endif
+
+static void onErrorNative(JNIEnv *env, jobject thiz, jint what, jint extra, jlong id)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ QReadLocker locker(rwLock);
+ const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ Q_EMIT (*mediaPlayers)[i]->error(what, extra);
+}
+
+static void onBufferingUpdateNative(JNIEnv *env, jobject thiz, jint percent, jlong id)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ QReadLocker locker(rwLock);
+ const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ Q_EMIT (*mediaPlayers)[i]->bufferingChanged(percent);
+}
+
+static void onProgressUpdateNative(JNIEnv *env, jobject thiz, jint progress, jlong id)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ QReadLocker locker(rwLock);
+ const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ Q_EMIT (*mediaPlayers)[i]->progressChanged(progress);
+}
+
+static void onDurationChangedNative(JNIEnv *env, jobject thiz, jint duration, jlong id)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ QReadLocker locker(rwLock);
+ const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ Q_EMIT (*mediaPlayers)[i]->durationChanged(duration);
+}
+
+static void onInfoNative(JNIEnv *env, jobject thiz, jint what, jint extra, jlong id)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ QReadLocker locker(rwLock);
+ const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ Q_EMIT (*mediaPlayers)[i]->info(what, extra);
+}
+
+static void onStateChangedNative(JNIEnv *env, jobject thiz, jint state, jlong id)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ QReadLocker locker(rwLock);
+ const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ Q_EMIT (*mediaPlayers)[i]->stateChanged(state);
+}
+
+static void onVideoSizeChangedNative(JNIEnv *env,
+ jobject thiz,
+ jint width,
+ jint height,
+ jlong id)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ QReadLocker locker(rwLock);
+ const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ Q_EMIT (*mediaPlayers)[i]->videoSizeChanged(width, height);
+}
+
+static AndroidMediaPlayer *getMediaPlayer(jlong ptr)
+{
+ auto mediaplayer = reinterpret_cast<AndroidMediaPlayer *>(ptr);
+ if (!mediaplayer || !mediaPlayers->contains(mediaplayer))
+ return nullptr;
+
+ return mediaplayer;
+}
+
+static void onTrackInfoChangedNative(JNIEnv *env, jobject thiz, jlong ptr)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+
+ QReadLocker locker(rwLock);
+ auto mediaplayer = getMediaPlayer(ptr);
+ if (!mediaplayer)
+ return;
+
+ emit mediaplayer->tracksInfoChanged();
+}
+
+static void onTimedTextChangedNative(JNIEnv *env, jobject thiz, jstring timedText, jint time,
+ jlong ptr)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ Q_UNUSED(time);
+
+ QReadLocker locker(rwLock);
+
+ auto mediaplayer = getMediaPlayer(ptr);
+ if (!mediaplayer)
+ return;
+
+ QString subtitleText;
+ if (timedText != nullptr)
+ subtitleText = QString::fromUtf8(env->GetStringUTFChars(timedText, 0));
+
+ emit mediaplayer->timedTextChanged(subtitleText);
+}
+
+bool AndroidMediaPlayer::registerNativeMethods()
+{
+ static const JNINativeMethod methods[] = {
+ { "onErrorNative", "(IIJ)V", reinterpret_cast<void *>(onErrorNative) },
+ { "onBufferingUpdateNative", "(IJ)V", reinterpret_cast<void *>(onBufferingUpdateNative) },
+ { "onProgressUpdateNative", "(IJ)V", reinterpret_cast<void *>(onProgressUpdateNative) },
+ { "onDurationChangedNative", "(IJ)V", reinterpret_cast<void *>(onDurationChangedNative) },
+ { "onInfoNative", "(IIJ)V", reinterpret_cast<void *>(onInfoNative) },
+ { "onVideoSizeChangedNative", "(IIJ)V",
+ reinterpret_cast<void *>(onVideoSizeChangedNative) },
+ { "onStateChangedNative", "(IJ)V", reinterpret_cast<void *>(onStateChangedNative) },
+ { "onTrackInfoChangedNative", "(J)V", reinterpret_cast<void *>(onTrackInfoChangedNative) },
+ { "onTimedTextChangedNative", "(Ljava/lang/String;IJ)V",
+ reinterpret_cast<void *>(onTimedTextChangedNative) }
+ };
+
+ const int size = std::size(methods);
+ return QJniEnvironment().registerNativeMethods(QtAndroidMediaPlayerClassName, methods, size);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_androidmediaplayer_p.cpp"
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer_p.h b/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer_p.h
new file mode 100644
index 000000000..66095b114
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer_p.h
@@ -0,0 +1,135 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef ANDROIDMEDIAPLAYER_H
+#define ANDROIDMEDIAPLAYER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QObject>
+#include <QNetworkRequest>
+#include <QtCore/qjniobject.h>
+#include <QAudio>
+
+QT_BEGIN_NAMESPACE
+
+class AndroidSurfaceTexture;
+
+class AndroidMediaPlayer : public QObject
+{
+ Q_OBJECT
+public:
+ AndroidMediaPlayer();
+ ~AndroidMediaPlayer();
+
+ enum MediaError
+ {
+ // What
+ MEDIA_ERROR_UNKNOWN = 1,
+ MEDIA_ERROR_SERVER_DIED = 100,
+ MEDIA_ERROR_INVALID_STATE = -38, // Undocumented
+ // Extra
+ MEDIA_ERROR_IO = -1004,
+ MEDIA_ERROR_MALFORMED = -1007,
+ MEDIA_ERROR_UNSUPPORTED = -1010,
+ MEDIA_ERROR_TIMED_OUT = -110,
+ MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK = 200,
+ MEDIA_ERROR_BAD_THINGS_ARE_GOING_TO_HAPPEN = -2147483648 // Undocumented
+ };
+
+ enum MediaInfo
+ {
+ MEDIA_INFO_UNKNOWN = 1,
+ MEDIA_INFO_VIDEO_TRACK_LAGGING = 700,
+ MEDIA_INFO_VIDEO_RENDERING_START = 3,
+ MEDIA_INFO_BUFFERING_START = 701,
+ MEDIA_INFO_BUFFERING_END = 702,
+ MEDIA_INFO_BAD_INTERLEAVING = 800,
+ MEDIA_INFO_NOT_SEEKABLE = 801,
+ MEDIA_INFO_METADATA_UPDATE = 802
+ };
+
+ enum MediaPlayerState {
+ Uninitialized = 0x1, /* End */
+ Idle = 0x2,
+ Preparing = 0x4,
+ Prepared = 0x8,
+ Initialized = 0x10,
+ Started = 0x20,
+ Stopped = 0x40,
+ Paused = 0x80,
+ PlaybackCompleted = 0x100,
+ Error = 0x200
+ };
+
+ enum TrackType { Unknown = 0, Video, Audio, TimedText, Subtitle, Metadata };
+
+ struct TrackInfo
+ {
+ int trackNumber;
+ TrackType trackType;
+ QString language;
+ QString mimeType;
+ };
+
+ void release();
+ void reset();
+
+ int getCurrentPosition();
+ int getDuration();
+ bool isPlaying();
+ int volume();
+ bool isMuted();
+ qreal playbackRate();
+ jobject display();
+
+ void play();
+ void pause();
+ void stop();
+ void seekTo(qint32 msec);
+ void setMuted(bool mute);
+ void setDataSource(const QNetworkRequest &request);
+ void prepareAsync();
+ void setVolume(int volume);
+ static void startSoundStreaming(const int inputId, const int outputId);
+ static void stopSoundStreaming();
+ bool setPlaybackRate(qreal rate);
+ void setDisplay(AndroidSurfaceTexture *surfaceTexture);
+ static bool setAudioOutput(const QByteArray &deviceId);
+ QList<TrackInfo> tracksInfo();
+ int activeTrack(TrackType trackType);
+ void deselectTrack(int trackNumber);
+ void selectTrack(int trackNumber);
+
+ static bool registerNativeMethods();
+
+ void blockAudio();
+ void unblockAudio();
+Q_SIGNALS:
+ void error(qint32 what, qint32 extra);
+ void bufferingChanged(qint32 percent);
+ void durationChanged(qint64 duration);
+ void progressChanged(qint64 progress);
+ void stateChanged(qint32 state);
+ void info(qint32 what, qint32 extra);
+ void videoSizeChanged(qint32 width, qint32 height);
+ void timedTextChanged(QString text);
+ void tracksInfoChanged();
+
+private:
+ QJniObject mMediaPlayer;
+ bool mAudioBlocked = false;
+};
+
+QT_END_NAMESPACE
+
+#endif // ANDROIDMEDIAPLAYER_H
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmediarecorder.cpp b/src/plugins/multimedia/android/wrappers/jni/androidmediarecorder.cpp
new file mode 100644
index 000000000..a3c9f4556
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmediarecorder.cpp
@@ -0,0 +1,337 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "androidmediarecorder_p.h"
+#include "androidcamera_p.h"
+#include "androidsurfacetexture_p.h"
+#include "androidsurfaceview_p.h"
+#include "qandroidglobal_p.h"
+#include "qandroidmultimediautils_p.h"
+
+#include <qmap.h>
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qlogging.h>
+#include <QtCore/qurl.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(lcMediaRecorder, "qt.multimedia.mediarecorder.android")
+
+typedef QMap<QString, QJniObject> CamcorderProfiles;
+Q_GLOBAL_STATIC(CamcorderProfiles, g_camcorderProfiles)
+
+static QString profileKey()
+{
+ return QStringLiteral("%1-%2");
+}
+
+bool AndroidCamcorderProfile::hasProfile(jint cameraId, Quality quality)
+{
+ if (g_camcorderProfiles->contains(profileKey().arg(cameraId).arg(quality)))
+ return true;
+
+ return QJniObject::callStaticMethod<jboolean>("android/media/CamcorderProfile",
+ "hasProfile",
+ "(II)Z",
+ cameraId,
+ quality);
+}
+
+AndroidCamcorderProfile AndroidCamcorderProfile::get(jint cameraId, Quality quality)
+{
+ const QString key = profileKey().arg(cameraId).arg(quality);
+ QMap<QString, QJniObject>::const_iterator it = g_camcorderProfiles->constFind(key);
+
+ if (it != g_camcorderProfiles->constEnd())
+ return AndroidCamcorderProfile(*it);
+
+ QJniObject camProfile = QJniObject::callStaticObjectMethod("android/media/CamcorderProfile",
+ "get",
+ "(II)Landroid/media/CamcorderProfile;",
+ cameraId,
+ quality);
+
+ return AndroidCamcorderProfile((*g_camcorderProfiles)[key] = camProfile);
+}
+
+int AndroidCamcorderProfile::getValue(AndroidCamcorderProfile::Field field) const
+{
+ switch (field) {
+ case audioBitRate:
+ return m_camcorderProfile.getField<jint>("audioBitRate");
+ case audioChannels:
+ return m_camcorderProfile.getField<jint>("audioChannels");
+ case audioCodec:
+ return m_camcorderProfile.getField<jint>("audioCodec");
+ case audioSampleRate:
+ return m_camcorderProfile.getField<jint>("audioSampleRate");
+ case duration:
+ return m_camcorderProfile.getField<jint>("duration");
+ case fileFormat:
+ return m_camcorderProfile.getField<jint>("fileFormat");
+ case quality:
+ return m_camcorderProfile.getField<jint>("quality");
+ case videoBitRate:
+ return m_camcorderProfile.getField<jint>("videoBitRate");
+ case videoCodec:
+ return m_camcorderProfile.getField<jint>("videoCodec");
+ case videoFrameHeight:
+ return m_camcorderProfile.getField<jint>("videoFrameHeight");
+ case videoFrameRate:
+ return m_camcorderProfile.getField<jint>("videoFrameRate");
+ case videoFrameWidth:
+ return m_camcorderProfile.getField<jint>("videoFrameWidth");
+ }
+
+ return 0;
+}
+
+AndroidCamcorderProfile::AndroidCamcorderProfile(const QJniObject &camcorderProfile)
+{
+ m_camcorderProfile = camcorderProfile;
+}
+
+static const char QtMediaRecorderListenerClassName[] =
+ "org/qtproject/qt/android/multimedia/QtMediaRecorderListener";
+typedef QMap<jlong, AndroidMediaRecorder*> MediaRecorderMap;
+Q_GLOBAL_STATIC(MediaRecorderMap, mediaRecorders)
+
+static void notifyError(JNIEnv* , jobject, jlong id, jint what, jint extra)
+{
+ AndroidMediaRecorder *obj = mediaRecorders->value(id, 0);
+ if (obj)
+ emit obj->error(what, extra);
+}
+
+static void notifyInfo(JNIEnv* , jobject, jlong id, jint what, jint extra)
+{
+ AndroidMediaRecorder *obj = mediaRecorders->value(id, 0);
+ if (obj)
+ emit obj->info(what, extra);
+}
+
+AndroidMediaRecorder::AndroidMediaRecorder()
+ : QObject()
+ , m_id(reinterpret_cast<jlong>(this))
+{
+ m_mediaRecorder = QJniObject("android/media/MediaRecorder");
+ if (m_mediaRecorder.isValid()) {
+ QJniObject listener(QtMediaRecorderListenerClassName, "(J)V", m_id);
+ m_mediaRecorder.callMethod<void>("setOnErrorListener",
+ "(Landroid/media/MediaRecorder$OnErrorListener;)V",
+ listener.object());
+ m_mediaRecorder.callMethod<void>("setOnInfoListener",
+ "(Landroid/media/MediaRecorder$OnInfoListener;)V",
+ listener.object());
+ mediaRecorders->insert(m_id, this);
+ }
+}
+
+AndroidMediaRecorder::~AndroidMediaRecorder()
+{
+ if (m_isVideoSourceSet || m_isAudioSourceSet)
+ reset();
+
+ release();
+ mediaRecorders->remove(m_id);
+}
+
+void AndroidMediaRecorder::release()
+{
+ m_mediaRecorder.callMethod<void>("release");
+}
+
+bool AndroidMediaRecorder::prepare()
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_mediaRecorder.objectClass(), "prepare", "()V");
+ env->CallVoidMethod(m_mediaRecorder.object(), methodId);
+
+ if (env.checkAndClearExceptions())
+ return false;
+ return true;
+}
+
+void AndroidMediaRecorder::reset()
+{
+ m_mediaRecorder.callMethod<void>("reset");
+ m_isAudioSourceSet = false; // Now setAudioSource can be used again.
+ m_isVideoSourceSet = false;
+}
+
+bool AndroidMediaRecorder::start()
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_mediaRecorder.objectClass(), "start", "()V");
+ env->CallVoidMethod(m_mediaRecorder.object(), methodId);
+
+ if (env.checkAndClearExceptions())
+ return false;
+ return true;
+}
+
+void AndroidMediaRecorder::stop()
+{
+ m_mediaRecorder.callMethod<void>("stop");
+}
+
+void AndroidMediaRecorder::setAudioChannels(int numChannels)
+{
+ m_mediaRecorder.callMethod<void>("setAudioChannels", "(I)V", numChannels);
+}
+
+void AndroidMediaRecorder::setAudioEncoder(AudioEncoder encoder)
+{
+ QJniEnvironment env;
+ m_mediaRecorder.callMethod<void>("setAudioEncoder", "(I)V", int(encoder));
+}
+
+void AndroidMediaRecorder::setAudioEncodingBitRate(int bitRate)
+{
+ m_mediaRecorder.callMethod<void>("setAudioEncodingBitRate", "(I)V", bitRate);
+}
+
+void AndroidMediaRecorder::setAudioSamplingRate(int samplingRate)
+{
+ m_mediaRecorder.callMethod<void>("setAudioSamplingRate", "(I)V", samplingRate);
+}
+
+void AndroidMediaRecorder::setAudioSource(AudioSource source)
+{
+ if (!m_isAudioSourceSet) {
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_mediaRecorder.objectClass(), "setAudioSource", "(I)V");
+ env->CallVoidMethod(m_mediaRecorder.object(), methodId, source);
+ if (!env.checkAndClearExceptions())
+ m_isAudioSourceSet = true;
+ } else {
+ qCWarning(lcMediaRecorder) << "Audio source already set. Not setting a new source.";
+ }
+}
+
+bool AndroidMediaRecorder::isAudioSourceSet() const
+{
+ return m_isAudioSourceSet;
+}
+
+bool AndroidMediaRecorder::setAudioInput(const QByteArray &id)
+{
+ const bool ret = QJniObject::callStaticMethod<jboolean>(
+ "org/qtproject/qt/android/multimedia/QtAudioDeviceManager",
+ "setAudioInput",
+ "(Landroid/media/MediaRecorder;I)Z",
+ m_mediaRecorder.object(),
+ id.toInt());
+ if (!ret)
+ qCWarning(lcMediaRecorder) << "No default input device was set.";
+
+ return ret;
+}
+
+void AndroidMediaRecorder::setCamera(AndroidCamera *camera)
+{
+ QJniObject cam = camera->getCameraObject();
+ m_mediaRecorder.callMethod<void>("setCamera", "(Landroid/hardware/Camera;)V", cam.object());
+}
+
+void AndroidMediaRecorder::setVideoEncoder(VideoEncoder encoder)
+{
+ m_mediaRecorder.callMethod<void>("setVideoEncoder", "(I)V", int(encoder));
+}
+
+void AndroidMediaRecorder::setVideoEncodingBitRate(int bitRate)
+{
+ m_mediaRecorder.callMethod<void>("setVideoEncodingBitRate", "(I)V", bitRate);
+}
+
+void AndroidMediaRecorder::setVideoFrameRate(int rate)
+{
+ m_mediaRecorder.callMethod<void>("setVideoFrameRate", "(I)V", rate);
+}
+
+void AndroidMediaRecorder::setVideoSize(const QSize &size)
+{
+ m_mediaRecorder.callMethod<void>("setVideoSize", "(II)V", size.width(), size.height());
+}
+
+void AndroidMediaRecorder::setVideoSource(VideoSource source)
+{
+ QJniEnvironment env;
+
+ auto methodId = env->GetMethodID(m_mediaRecorder.objectClass(), "setVideoSource", "(I)V");
+ env->CallVoidMethod(m_mediaRecorder.object(), methodId, source);
+
+ if (!env.checkAndClearExceptions())
+ m_isVideoSourceSet = true;
+}
+
+void AndroidMediaRecorder::setOrientationHint(int degrees)
+{
+ m_mediaRecorder.callMethod<void>("setOrientationHint", "(I)V", degrees);
+}
+
+void AndroidMediaRecorder::setOutputFormat(OutputFormat format)
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_mediaRecorder.objectClass(), "setOutputFormat", "(I)V");
+ env->CallVoidMethod(m_mediaRecorder.object(), methodId, format);
+ // setAudioSource cannot be set after outputFormat is set.
+ if (!env.checkAndClearExceptions())
+ m_isAudioSourceSet = true;
+}
+
+void AndroidMediaRecorder::setOutputFile(const QString &path)
+{
+ if (QUrl(path).scheme() == QLatin1String("content")) {
+ const QJniObject fileDescriptor = QJniObject::callStaticObjectMethod(
+ "org/qtproject/qt/android/QtNative",
+ "openFdObjectForContentUrl",
+ "(Landroid/content/Context;Ljava/lang/String;Ljava/lang/String;)Ljava/io/FileDescriptor;",
+ QNativeInterface::QAndroidApplication::context().object(),
+ QJniObject::fromString(path).object(),
+ QJniObject::fromString(QLatin1String("rw")).object());
+
+ m_mediaRecorder.callMethod<void>("setOutputFile",
+ "(Ljava/io/FileDescriptor;)V",
+ fileDescriptor.object());
+ } else {
+ m_mediaRecorder.callMethod<void>("setOutputFile",
+ "(Ljava/lang/String;)V",
+ QJniObject::fromString(path).object());
+ }
+}
+
+void AndroidMediaRecorder::setSurfaceTexture(AndroidSurfaceTexture *texture)
+{
+ m_mediaRecorder.callMethod<void>("setPreviewDisplay",
+ "(Landroid/view/Surface;)V",
+ texture->surface());
+}
+
+void AndroidMediaRecorder::setSurfaceHolder(AndroidSurfaceHolder *holder)
+{
+ QJniObject surfaceHolder(holder->surfaceHolder());
+ QJniObject surface = surfaceHolder.callObjectMethod("getSurface",
+ "()Landroid/view/Surface;");
+ if (!surface.isValid())
+ return;
+
+ m_mediaRecorder.callMethod<void>("setPreviewDisplay",
+ "(Landroid/view/Surface;)V",
+ surface.object());
+}
+
+bool AndroidMediaRecorder::registerNativeMethods()
+{
+ static const JNINativeMethod methods[] = {
+ {"notifyError", "(JII)V", (void *)notifyError},
+ {"notifyInfo", "(JII)V", (void *)notifyInfo}
+ };
+
+ const int size = std::size(methods);
+ return QJniEnvironment().registerNativeMethods(QtMediaRecorderListenerClassName, methods, size);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_androidmediarecorder_p.cpp"
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmediarecorder_p.h b/src/plugins/multimedia/android/wrappers/jni/androidmediarecorder_p.h
new file mode 100644
index 000000000..ffdbcc149
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmediarecorder_p.h
@@ -0,0 +1,161 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef ANDROIDMEDIARECORDER_H
+#define ANDROIDMEDIARECORDER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qobject.h>
+#include <QtCore/qjniobject.h>
+#include <qsize.h>
+
+QT_BEGIN_NAMESPACE
+
+class AndroidCamera;
+class AndroidSurfaceTexture;
+class AndroidSurfaceHolder;
+
+class AndroidCamcorderProfile
+{
+public:
+ enum Quality { // Needs to match CamcorderProfile
+ QUALITY_LOW,
+ QUALITY_HIGH,
+ QUALITY_QCIF,
+ QUALITY_CIF,
+ QUALITY_480P,
+ QUALITY_720P,
+ QUALITY_1080P,
+ QUALITY_QVGA
+ };
+
+ enum Field {
+ audioBitRate,
+ audioChannels,
+ audioCodec,
+ audioSampleRate,
+ duration,
+ fileFormat,
+ quality,
+ videoBitRate,
+ videoCodec,
+ videoFrameHeight,
+ videoFrameRate,
+ videoFrameWidth
+ };
+
+ static bool hasProfile(jint cameraId, Quality quality);
+ static AndroidCamcorderProfile get(jint cameraId, Quality quality);
+ int getValue(Field field) const;
+
+private:
+ AndroidCamcorderProfile(const QJniObject &camcorderProfile);
+ QJniObject m_camcorderProfile;
+};
+
+class AndroidMediaRecorder : public QObject
+{
+ Q_OBJECT
+public:
+ enum AudioEncoder {
+ DefaultAudioEncoder = 0,
+ AMR_NB_Encoder = 1,
+ AMR_WB_Encoder = 2,
+ AAC = 3,
+ OPUS = 7,
+ VORBIS = 6
+ };
+
+ enum AudioSource {
+ DefaultAudioSource = 0,
+ Mic = 1,
+ VoiceUplink = 2,
+ VoiceDownlink = 3,
+ VoiceCall = 4,
+ Camcorder = 5,
+ VoiceRecognition = 6
+ };
+
+ enum VideoEncoder {
+ DefaultVideoEncoder = 0,
+ H263 = 1,
+ H264 = 2,
+ MPEG_4_SP = 3,
+ HEVC = 5
+ };
+
+ enum VideoSource {
+ DefaultVideoSource = 0,
+ Camera = 1
+ };
+
+ enum OutputFormat {
+ DefaultOutputFormat = 0,
+ THREE_GPP = 1,
+ MPEG_4 = 2,
+ AMR_NB_Format = 3,
+ AMR_WB_Format = 4,
+ AAC_ADTS = 6,
+ OGG = 11,
+ WEBM = 9
+ };
+
+ AndroidMediaRecorder();
+ ~AndroidMediaRecorder();
+
+ void release();
+ bool prepare();
+ void reset();
+
+ bool start();
+ void stop();
+
+ void setAudioChannels(int numChannels);
+ void setAudioEncoder(AudioEncoder encoder);
+ void setAudioEncodingBitRate(int bitRate);
+ void setAudioSamplingRate(int samplingRate);
+ void setAudioSource(AudioSource source);
+ bool isAudioSourceSet() const;
+ bool setAudioInput(const QByteArray &id);
+
+ void setCamera(AndroidCamera *camera);
+ void setVideoEncoder(VideoEncoder encoder);
+ void setVideoEncodingBitRate(int bitRate);
+ void setVideoFrameRate(int rate);
+ void setVideoSize(const QSize &size);
+ void setVideoSource(VideoSource source);
+
+ void setOrientationHint(int degrees);
+
+ void setOutputFormat(OutputFormat format);
+ void setOutputFile(const QString &path);
+
+ void setSurfaceTexture(AndroidSurfaceTexture *texture);
+ void setSurfaceHolder(AndroidSurfaceHolder *holder);
+
+ static bool registerNativeMethods();
+
+Q_SIGNALS:
+ void error(int what, int extra);
+ void info(int what, int extra);
+
+private:
+ jlong m_id;
+ QJniObject m_mediaRecorder;
+ bool m_isAudioSourceSet = false;
+ bool m_isVideoSourceSet = false;
+};
+
+QT_END_NAMESPACE
+
+#endif // ANDROIDMEDIARECORDER_H
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmultimediautils.cpp b/src/plugins/multimedia/android/wrappers/jni/androidmultimediautils.cpp
new file mode 100644
index 000000000..9606bd6bb
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmultimediautils.cpp
@@ -0,0 +1,43 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "androidmultimediautils_p.h"
+
+#include <QtCore/qjniobject.h>
+
+QT_BEGIN_NAMESPACE
+
+
+void AndroidMultimediaUtils::enableOrientationListener(bool enable)
+{
+ QJniObject::callStaticMethod<void>("org/qtproject/qt/android/multimedia/QtMultimediaUtils",
+ "enableOrientationListener",
+ "(Z)V",
+ enable);
+}
+
+int AndroidMultimediaUtils::getDeviceOrientation()
+{
+ return QJniObject::callStaticMethod<jint>("org/qtproject/qt/android/multimedia/QtMultimediaUtils",
+ "getDeviceOrientation");
+}
+
+QString AndroidMultimediaUtils::getDefaultMediaDirectory(MediaType type)
+{
+ QJniObject path = QJniObject::callStaticObjectMethod(
+ "org/qtproject/qt/android/multimedia/QtMultimediaUtils",
+ "getDefaultMediaDirectory",
+ "(I)Ljava/lang/String;",
+ jint(type));
+ return path.toString();
+}
+
+void AndroidMultimediaUtils::registerMediaFile(const QString &file)
+{
+ QJniObject::callStaticMethod<void>("org/qtproject/qt/android/multimedia/QtMultimediaUtils",
+ "registerMediaFile",
+ "(Ljava/lang/String;)V",
+ QJniObject::fromString(file).object());
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmultimediautils_p.h b/src/plugins/multimedia/android/wrappers/jni/androidmultimediautils_p.h
new file mode 100644
index 000000000..ee72c3c61
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmultimediautils_p.h
@@ -0,0 +1,40 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef ANDROIDMULTIMEDIAUTILS_H
+#define ANDROIDMULTIMEDIAUTILS_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qobject.h>
+
+QT_BEGIN_NAMESPACE
+
+class AndroidMultimediaUtils
+{
+public:
+ enum MediaType {
+ Music = 0,
+ Movies = 1,
+ DCIM = 2,
+ Sounds = 3
+ };
+
+ static void enableOrientationListener(bool enable);
+ static int getDeviceOrientation();
+ static QString getDefaultMediaDirectory(MediaType type);
+ static void registerMediaFile(const QString &file);
+};
+
+QT_END_NAMESPACE
+
+#endif // ANDROIDMULTIMEDIAUTILS_H
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidsurfacetexture.cpp b/src/plugins/multimedia/android/wrappers/jni/androidsurfacetexture.cpp
new file mode 100644
index 000000000..c5860b265
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidsurfacetexture.cpp
@@ -0,0 +1,152 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "androidsurfacetexture_p.h"
+#include <QtCore/qmutex.h>
+#include <QtCore/qcoreapplication.h>
+
+QT_BEGIN_NAMESPACE
+
+static const char QtSurfaceTextureListenerClassName[] = "org/qtproject/qt/android/multimedia/QtSurfaceTextureListener";
+typedef QList<jlong> SurfaceTextures;
+Q_GLOBAL_STATIC(SurfaceTextures, g_surfaceTextures);
+Q_GLOBAL_STATIC(QMutex, g_textureMutex);
+
+static QAtomicInteger<quint64> indexCounter = 0u;
+
+// native method for QtSurfaceTexture.java
+static void notifyFrameAvailable(JNIEnv* , jobject, jlong id)
+{
+ const QMutexLocker lock(g_textureMutex());
+ const int idx = g_surfaceTextures->indexOf(id);
+ if (idx == -1)
+ return;
+
+ AndroidSurfaceTexture *obj = reinterpret_cast<AndroidSurfaceTexture *>(g_surfaceTextures->at(idx));
+ if (obj)
+ Q_EMIT obj->frameAvailable();
+}
+
+AndroidSurfaceTexture::AndroidSurfaceTexture(quint32 texName)
+ : QObject()
+ , m_index(indexCounter.fetchAndAddRelaxed(1))
+{
+ Q_STATIC_ASSERT(sizeof (jlong) >= sizeof (void *));
+ m_surfaceTexture = QJniObject("android/graphics/SurfaceTexture", "(I)V", jint(texName));
+
+ if (!m_surfaceTexture.isValid())
+ return;
+
+ const QMutexLocker lock(g_textureMutex());
+ g_surfaceTextures->append(jlong(this));
+ QJniObject listener(QtSurfaceTextureListenerClassName, "(J)V", jlong(this));
+ setOnFrameAvailableListener(listener);
+}
+
+AndroidSurfaceTexture::~AndroidSurfaceTexture()
+{
+ if (m_surface.isValid())
+ m_surface.callMethod<void>("release");
+
+ if (m_surfaceTexture.isValid()) {
+ release();
+ const QMutexLocker lock(g_textureMutex());
+ const int idx = g_surfaceTextures->indexOf(jlong(this));
+ if (idx != -1)
+ g_surfaceTextures->remove(idx);
+ }
+}
+
+QMatrix4x4 AndroidSurfaceTexture::getTransformMatrix()
+{
+ QMatrix4x4 matrix;
+ if (!m_surfaceTexture.isValid())
+ return matrix;
+
+ QJniEnvironment env;
+ jfloatArray array = env->NewFloatArray(16);
+ m_surfaceTexture.callMethod<void>("getTransformMatrix", "([F)V", array);
+ env->GetFloatArrayRegion(array, 0, 16, matrix.data());
+ env->DeleteLocalRef(array);
+
+ return matrix;
+}
+
+void AndroidSurfaceTexture::release()
+{
+ m_surfaceTexture.callMethod<void>("release");
+}
+
+void AndroidSurfaceTexture::updateTexImage()
+{
+ if (!m_surfaceTexture.isValid())
+ return;
+
+ m_surfaceTexture.callMethod<void>("updateTexImage");
+}
+
+jobject AndroidSurfaceTexture::surfaceTexture()
+{
+ return m_surfaceTexture.object();
+}
+
+jobject AndroidSurfaceTexture::surface()
+{
+ if (!m_surface.isValid()) {
+ m_surface = QJniObject("android/view/Surface",
+ "(Landroid/graphics/SurfaceTexture;)V",
+ m_surfaceTexture.object());
+ }
+
+ return m_surface.object();
+}
+
+jobject AndroidSurfaceTexture::surfaceHolder()
+{
+ if (!m_surfaceHolder.isValid()) {
+ m_surfaceHolder = QJniObject("org/qtproject/qt/android/multimedia/QtSurfaceTextureHolder",
+ "(Landroid/view/Surface;)V",
+ surface());
+ }
+
+ return m_surfaceHolder.object();
+}
+
+void AndroidSurfaceTexture::attachToGLContext(quint32 texName)
+{
+ if (!m_surfaceTexture.isValid())
+ return;
+
+ m_surfaceTexture.callMethod<void>("attachToGLContext", "(I)V", texName);
+}
+
+void AndroidSurfaceTexture::detachFromGLContext()
+{
+ if (!m_surfaceTexture.isValid())
+ return;
+
+ m_surfaceTexture.callMethod<void>("detachFromGLContext");
+}
+
+bool AndroidSurfaceTexture::registerNativeMethods()
+{
+ static const JNINativeMethod methods[] = {
+ {"notifyFrameAvailable", "(J)V", (void *)notifyFrameAvailable}
+ };
+ const int size = std::size(methods);
+ if (QJniEnvironment().registerNativeMethods(QtSurfaceTextureListenerClassName, methods, size))
+ return false;
+
+ return true;
+}
+
+void AndroidSurfaceTexture::setOnFrameAvailableListener(const QJniObject &listener)
+{
+ m_surfaceTexture.callMethod<void>("setOnFrameAvailableListener",
+ "(Landroid/graphics/SurfaceTexture$OnFrameAvailableListener;)V",
+ listener.object());
+}
+
+QT_END_NAMESPACE
+
+#include "moc_androidsurfacetexture_p.cpp"
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidsurfacetexture_p.h b/src/plugins/multimedia/android/wrappers/jni/androidsurfacetexture_p.h
new file mode 100644
index 000000000..24581ca8d
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidsurfacetexture_p.h
@@ -0,0 +1,61 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef ANDROIDSURFACETEXTURE_H
+#define ANDROIDSURFACETEXTURE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qobject.h>
+#include <QtCore/qjniobject.h>
+
+#include <QMatrix4x4>
+
+QT_BEGIN_NAMESPACE
+
+class AndroidSurfaceTexture : public QObject
+{
+ Q_OBJECT
+public:
+ explicit AndroidSurfaceTexture(quint32 texName);
+ ~AndroidSurfaceTexture();
+
+ jobject surfaceTexture();
+ jobject surface();
+ jobject surfaceHolder();
+ inline bool isValid() const { return m_surfaceTexture.isValid(); }
+
+ QMatrix4x4 getTransformMatrix();
+ void release(); // API level 14
+ void updateTexImage();
+
+ void attachToGLContext(quint32 texName); // API level 16
+ void detachFromGLContext(); // API level 16
+
+ static bool registerNativeMethods();
+
+ quint64 index() const { return m_index; }
+Q_SIGNALS:
+ void frameAvailable();
+
+private:
+ void setOnFrameAvailableListener(const QJniObject &listener);
+
+ QJniObject m_surfaceTexture;
+ QJniObject m_surface;
+ QJniObject m_surfaceHolder;
+ const quint64 m_index = 0;
+};
+
+QT_END_NAMESPACE
+
+#endif // ANDROIDSURFACETEXTURE_H
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidsurfaceview.cpp b/src/plugins/multimedia/android/wrappers/jni/androidsurfaceview.cpp
new file mode 100644
index 000000000..dae9516c3
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidsurfaceview.cpp
@@ -0,0 +1,152 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "androidsurfaceview_p.h"
+
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qlist.h>
+#include <QtCore/qmutex.h>
+#include <QtGui/qwindow.h>
+
+QT_BEGIN_NAMESPACE
+
+static const char QtSurfaceHolderCallbackClassName[] = "org/qtproject/qt/android/multimedia/QtSurfaceHolderCallback";
+typedef QList<AndroidSurfaceHolder *> SurfaceHolders;
+Q_GLOBAL_STATIC(SurfaceHolders, surfaceHolders)
+Q_GLOBAL_STATIC(QMutex, shLock)
+
+AndroidSurfaceHolder::AndroidSurfaceHolder(QJniObject object)
+ : m_surfaceHolder(object)
+ , m_surfaceCreated(false)
+{
+ if (!m_surfaceHolder.isValid())
+ return;
+
+ {
+ QMutexLocker locker(shLock());
+ surfaceHolders->append(this);
+ }
+
+ QJniObject callback(QtSurfaceHolderCallbackClassName, "(J)V", reinterpret_cast<jlong>(this));
+ m_surfaceHolder.callMethod<void>("addCallback",
+ "(Landroid/view/SurfaceHolder$Callback;)V",
+ callback.object());
+}
+
+AndroidSurfaceHolder::~AndroidSurfaceHolder()
+{
+ QMutexLocker locker(shLock());
+ const int i = surfaceHolders->indexOf(this);
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ surfaceHolders->remove(i);
+}
+
+jobject AndroidSurfaceHolder::surfaceHolder() const
+{
+ return m_surfaceHolder.object();
+}
+
+bool AndroidSurfaceHolder::isSurfaceCreated() const
+{
+ QMutexLocker locker(shLock());
+ return m_surfaceCreated;
+}
+
+void AndroidSurfaceHolder::handleSurfaceCreated(JNIEnv*, jobject, jlong id)
+{
+ QMutexLocker locker(shLock());
+ const int i = surfaceHolders->indexOf(reinterpret_cast<AndroidSurfaceHolder *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ (*surfaceHolders)[i]->m_surfaceCreated = true;
+ Q_EMIT (*surfaceHolders)[i]->surfaceCreated();
+}
+
+void AndroidSurfaceHolder::handleSurfaceDestroyed(JNIEnv*, jobject, jlong id)
+{
+ QMutexLocker locker(shLock());
+ const int i = surfaceHolders->indexOf(reinterpret_cast<AndroidSurfaceHolder *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ (*surfaceHolders)[i]->m_surfaceCreated = false;
+}
+
+bool AndroidSurfaceHolder::registerNativeMethods()
+{
+ static const JNINativeMethod methods[] = {
+ {"notifySurfaceCreated", "(J)V", (void *)AndroidSurfaceHolder::handleSurfaceCreated},
+ {"notifySurfaceDestroyed", "(J)V", (void *)AndroidSurfaceHolder::handleSurfaceDestroyed}
+ };
+
+ const int size = std::size(methods);
+ return QJniEnvironment().registerNativeMethods(QtSurfaceHolderCallbackClassName, methods, size);
+}
+
+AndroidSurfaceView::AndroidSurfaceView()
+ : m_window(0)
+ , m_surfaceHolder(0)
+ , m_pendingVisible(-1)
+{
+ QNativeInterface::QAndroidApplication::runOnAndroidMainThread([this] {
+ m_surfaceView = QJniObject("android/view/SurfaceView",
+ "(Landroid/content/Context;)V",
+ QNativeInterface::QAndroidApplication::context().object());
+ }).waitForFinished();
+
+ Q_ASSERT(m_surfaceView.isValid());
+
+ QJniObject holder = m_surfaceView.callObjectMethod("getHolder",
+ "()Landroid/view/SurfaceHolder;");
+ if (!holder.isValid()) {
+ m_surfaceView = QJniObject();
+ } else {
+ m_surfaceHolder = new AndroidSurfaceHolder(holder);
+ connect(m_surfaceHolder, &AndroidSurfaceHolder::surfaceCreated,
+ this, &AndroidSurfaceView::surfaceCreated);
+ { // Lock now to avoid a race with handleSurfaceCreated()
+ QMutexLocker locker(shLock());
+ m_window = QWindow::fromWinId(WId(m_surfaceView.object()));
+
+ if (m_pendingVisible != -1)
+ m_window->setVisible(m_pendingVisible);
+ if (m_pendingGeometry.isValid())
+ m_window->setGeometry(m_pendingGeometry);
+ }
+ }
+}
+
+AndroidSurfaceView::~AndroidSurfaceView()
+{
+ delete m_surfaceHolder;
+ delete m_window;
+}
+
+AndroidSurfaceHolder *AndroidSurfaceView::holder() const
+{
+ return m_surfaceHolder;
+}
+
+void AndroidSurfaceView::setVisible(bool v)
+{
+ if (m_window)
+ m_window->setVisible(v);
+ else
+ m_pendingVisible = int(v);
+}
+
+void AndroidSurfaceView::setGeometry(int x, int y, int width, int height)
+{
+ if (m_window)
+ m_window->setGeometry(x, y, width, height);
+ else
+ m_pendingGeometry = QRect(x, y, width, height);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_androidsurfaceview_p.cpp"
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidsurfaceview_p.h b/src/plugins/multimedia/android/wrappers/jni/androidsurfaceview_p.h
new file mode 100644
index 000000000..e6be60ef1
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidsurfaceview_p.h
@@ -0,0 +1,78 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef ANDROIDSURFACEVIEW_H
+#define ANDROIDSURFACEVIEW_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qjniobject.h>
+#include <qrect.h>
+#include <QtCore/qrunnable.h>
+#include <QtCore/qobject.h>
+
+QT_BEGIN_NAMESPACE
+
+class QWindow;
+
+class AndroidSurfaceHolder : public QObject
+{
+ Q_OBJECT
+public:
+ ~AndroidSurfaceHolder();
+
+ jobject surfaceHolder() const;
+ bool isSurfaceCreated() const;
+
+ static bool registerNativeMethods();
+
+Q_SIGNALS:
+ void surfaceCreated();
+
+private:
+ AndroidSurfaceHolder(QJniObject object);
+
+ static void handleSurfaceCreated(JNIEnv*, jobject, jlong id);
+ static void handleSurfaceDestroyed(JNIEnv*, jobject, jlong id);
+
+ QJniObject m_surfaceHolder;
+ bool m_surfaceCreated;
+
+ friend class AndroidSurfaceView;
+};
+
+class AndroidSurfaceView : public QObject
+{
+ Q_OBJECT
+public:
+ AndroidSurfaceView();
+ ~AndroidSurfaceView();
+
+ AndroidSurfaceHolder *holder() const;
+
+ void setVisible(bool v);
+ void setGeometry(int x, int y, int width, int height);
+
+Q_SIGNALS:
+ void surfaceCreated();
+
+private:
+ QJniObject m_surfaceView;
+ QWindow *m_window;
+ AndroidSurfaceHolder *m_surfaceHolder;
+ int m_pendingVisible;
+ QRect m_pendingGeometry;
+};
+
+QT_END_NAMESPACE
+
+#endif // ANDROIDSURFACEVIEW_H
diff --git a/src/plugins/multimedia/darwin/CMakeLists.txt b/src/plugins/multimedia/darwin/CMakeLists.txt
new file mode 100644
index 000000000..0bbc054eb
--- /dev/null
+++ b/src/plugins/multimedia/darwin/CMakeLists.txt
@@ -0,0 +1,70 @@
+# Copyright (C) 2022 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+
+qt_internal_find_apple_system_framework(FWCoreMedia CoreMedia) # special case
+qt_internal_find_apple_system_framework(FWCoreAudio CoreAudio) # special case
+qt_internal_find_apple_system_framework(FWAudioUnit AudioUnit) # special case
+qt_internal_find_apple_system_framework(FWVideoToolbox VideoToolbox) # special case
+qt_internal_find_apple_system_framework(FWAVFoundation AVFoundation) # special case
+
+qt_internal_add_plugin(QDarwinMediaPlugin
+ OUTPUT_NAME darwinmediaplugin
+ PLUGIN_TYPE multimedia
+ SOURCES
+ avfaudiodecoder.mm avfaudiodecoder_p.h
+ mediaplayer/avfdisplaylink.mm mediaplayer/avfdisplaylink_p.h
+ mediaplayer/avfmediaplayer.mm mediaplayer/avfmediaplayer_p.h
+ common/avfmetadata.mm common/avfmetadata_p.h
+ mediaplayer/avfvideorenderercontrol.mm mediaplayer/avfvideorenderercontrol_p.h
+ avfvideosink.mm avfvideosink_p.h
+ avfvideobuffer.mm avfvideobuffer_p.h
+ qavfhelpers.mm qavfhelpers_p.h
+ qdarwinformatsinfo.mm qdarwinformatsinfo_p.h
+ qdarwinintegration.mm qdarwinintegration_p.h
+ INCLUDE_DIRECTORIES
+ audio
+ camera
+ common
+ mediaplayer
+ LIBRARIES
+ Qt::MultimediaPrivate
+ Qt::CorePrivate
+ ${FWCoreMedia}
+ ${FWCoreAudio}
+ ${FWCoreVideo}
+ ${FWFoundation}
+ ${FWMetal}
+ ${FWQuartzCore}
+ ${FWAudioToolbox}
+ AVFoundation::AVFoundation
+)
+
+qt_internal_extend_target(QDarwinMediaPlugin CONDITION NOT TVOS
+ SOURCES
+ camera/qavfcamerabase.mm camera/qavfcamerabase_p.h
+ camera/avfcamera.mm camera/avfcamera_p.h
+ camera/avfcameradebug_p.h
+ camera/avfaudiopreviewdelegate.mm camera/avfaudiopreviewdelegate_p.h
+ camera/avfcamerarenderer.mm camera/avfcamerarenderer_p.h
+ camera/avfcameraservice.mm camera/avfcameraservice_p.h
+ camera/avfcamerasession.mm camera/avfcamerasession_p.h
+ camera/avfcamerautility.mm camera/avfcamerautility_p.h
+ camera/avfimagecapture.mm camera/avfimagecapture_p.h
+ camera/avfmediaassetwriter.mm camera/avfmediaassetwriter_p.h
+ camera/avfmediaencoder.mm camera/avfmediaencoder_p.h
+)
+
+qt_internal_extend_target(QDarwinMediaPlugin CONDITION MACOS
+ LIBRARIES
+ ${FWAppKit}
+ ${FWAudioUnit}
+ ${FWVideoToolbox}
+ ${FWApplicationServices}
+)
+
+qt_internal_extend_target(QDarwinMediaPlugin CONDITION IOS
+ LIBRARIES
+ ${FWCoreGraphics}
+ ${FWCoreVideo}
+)
diff --git a/src/plugins/multimedia/darwin/avfaudiodecoder.mm b/src/plugins/multimedia/darwin/avfaudiodecoder.mm
new file mode 100644
index 000000000..3191b7db0
--- /dev/null
+++ b/src/plugins/multimedia/darwin/avfaudiodecoder.mm
@@ -0,0 +1,544 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfaudiodecoder_p.h"
+
+#include <QtCore/qmutex.h>
+#include <QtCore/qiodevice.h>
+#include <QMimeDatabase>
+#include <QThread>
+#include "private/qcoreaudioutils_p.h"
+#include <QtCore/qloggingcategory.h>
+
+#include <AVFoundation/AVFoundation.h>
+
+QT_USE_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcAVFAudioDecoder, "qt.multimedia.darwin.AVFAudioDecoder")
+constexpr static int MAX_BUFFERS_IN_QUEUE = 5;
+
+QAudioBuffer handleNextSampleBuffer(CMSampleBufferRef sampleBuffer)
+{
+ if (!sampleBuffer)
+ return {};
+
+ // Check format
+ CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
+ if (!formatDescription)
+ return {};
+ const AudioStreamBasicDescription* const asbd = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription);
+ QAudioFormat qtFormat = CoreAudioUtils::toQAudioFormat(*asbd);
+ if (qtFormat.sampleFormat() == QAudioFormat::Unknown && asbd->mBitsPerChannel == 8)
+ qtFormat.setSampleFormat(QAudioFormat::UInt8);
+ if (!qtFormat.isValid())
+ return {};
+
+ // Get the required size to allocate to audioBufferList
+ size_t audioBufferListSize = 0;
+ OSStatus err = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer,
+ &audioBufferListSize,
+ NULL,
+ 0,
+ NULL,
+ NULL,
+ kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
+ NULL);
+ if (err != noErr)
+ return {};
+
+ CMBlockBufferRef blockBuffer = NULL;
+ AudioBufferList* audioBufferList = (AudioBufferList*) malloc(audioBufferListSize);
+ // This ensures the buffers placed in audioBufferList are contiguous
+ err = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer,
+ NULL,
+ audioBufferList,
+ audioBufferListSize,
+ NULL,
+ NULL,
+ kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
+ &blockBuffer);
+ if (err != noErr) {
+ free(audioBufferList);
+ return {};
+ }
+
+ QByteArray abuf;
+ for (UInt32 i = 0; i < audioBufferList->mNumberBuffers; i++)
+ {
+ AudioBuffer audioBuffer = audioBufferList->mBuffers[i];
+ abuf.push_back(QByteArray((const char*)audioBuffer.mData, audioBuffer.mDataByteSize));
+ }
+
+ free(audioBufferList);
+ CFRelease(blockBuffer);
+
+ CMTime sampleStartTime = (CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
+ float sampleStartTimeSecs = CMTimeGetSeconds(sampleStartTime);
+
+ return QAudioBuffer(abuf, qtFormat, qint64(sampleStartTimeSecs * 1000000));
+}
+
+@interface AVFResourceReaderDelegate : NSObject <AVAssetResourceLoaderDelegate> {
+ AVFAudioDecoder *m_decoder;
+ QMutex m_mutex;
+}
+
+- (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader
+ shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
+
+@end
+
+@implementation AVFResourceReaderDelegate
+
+- (id)initWithDecoder:(AVFAudioDecoder *)decoder
+{
+ if (!(self = [super init]))
+ return nil;
+
+ m_decoder = decoder;
+
+ return self;
+}
+
+-(BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader
+ shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
+{
+ Q_UNUSED(resourceLoader);
+
+ if (![loadingRequest.request.URL.scheme isEqualToString:@"iodevice"])
+ return NO;
+
+ QMutexLocker locker(&m_mutex);
+
+ QIODevice *device = m_decoder->sourceDevice();
+ if (!device)
+ return NO;
+
+ device->seek(loadingRequest.dataRequest.requestedOffset);
+ if (loadingRequest.contentInformationRequest) {
+ loadingRequest.contentInformationRequest.contentLength = device->size();
+ loadingRequest.contentInformationRequest.byteRangeAccessSupported = YES;
+ }
+
+ if (loadingRequest.dataRequest) {
+ NSInteger requestedLength = loadingRequest.dataRequest.requestedLength;
+ int maxBytes = qMin(32 * 1024, int(requestedLength));
+ char buffer[maxBytes];
+ NSInteger submitted = 0;
+ while (submitted < requestedLength) {
+ qint64 len = device->read(buffer, maxBytes);
+ if (len < 1)
+ break;
+
+ [loadingRequest.dataRequest respondWithData:[NSData dataWithBytes:buffer length:len]];
+ submitted += len;
+ }
+
+ // Finish loading even if not all bytes submitted.
+ [loadingRequest finishLoading];
+ }
+
+ return YES;
+}
+
+@end
+
+namespace {
+
+NSDictionary *av_audio_settings_for_format(const QAudioFormat &format)
+{
+ float sampleRate = format.sampleRate();
+ int nChannels = format.channelCount();
+ int sampleSize = format.bytesPerSample() * 8;
+ BOOL isFloat = format.sampleFormat() == QAudioFormat::Float;
+
+ NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
+ [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
+ [NSNumber numberWithFloat:sampleRate], AVSampleRateKey,
+ [NSNumber numberWithInt:nChannels], AVNumberOfChannelsKey,
+ [NSNumber numberWithInt:sampleSize], AVLinearPCMBitDepthKey,
+ [NSNumber numberWithBool:isFloat], AVLinearPCMIsFloatKey,
+ [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
+ [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
+ nil];
+
+ return audioSettings;
+}
+
+QAudioFormat qt_format_for_audio_track(AVAssetTrack *track)
+{
+ QAudioFormat format;
+ CMFormatDescriptionRef desc = (__bridge CMFormatDescriptionRef)track.formatDescriptions[0];
+ const AudioStreamBasicDescription* const asbd =
+ CMAudioFormatDescriptionGetStreamBasicDescription(desc);
+ format = CoreAudioUtils::toQAudioFormat(*asbd);
+ // AudioStreamBasicDescription's mBitsPerChannel is 0 for compressed formats
+ // In this case set default Int16 sample format
+ if (asbd->mBitsPerChannel == 0)
+ format.setSampleFormat(QAudioFormat::Int16);
+ return format;
+}
+
+}
+
+struct AVFAudioDecoder::DecodingContext
+{
+ AVAssetReader *m_reader = nullptr;
+ AVAssetReaderTrackOutput *m_readerOutput = nullptr;
+
+ ~DecodingContext()
+ {
+ if (m_reader) {
+ [m_reader cancelReading];
+ [m_reader release];
+ }
+
+ [m_readerOutput release];
+ }
+};
+
+AVFAudioDecoder::AVFAudioDecoder(QAudioDecoder *parent)
+ : QPlatformAudioDecoder(parent)
+{
+ m_readingQueue = dispatch_queue_create("reader_queue", DISPATCH_QUEUE_SERIAL);
+ m_decodingQueue = dispatch_queue_create("decoder_queue", DISPATCH_QUEUE_SERIAL);
+
+ m_readerDelegate = [[AVFResourceReaderDelegate alloc] initWithDecoder:this];
+}
+
+AVFAudioDecoder::~AVFAudioDecoder()
+{
+ stop();
+
+ [m_readerDelegate release];
+ [m_asset release];
+
+ dispatch_release(m_readingQueue);
+ dispatch_release(m_decodingQueue);
+}
+
+QUrl AVFAudioDecoder::source() const
+{
+ return m_source;
+}
+
+void AVFAudioDecoder::setSource(const QUrl &fileName)
+{
+ if (!m_device && m_source == fileName)
+ return;
+
+ stop();
+ m_device = nullptr;
+ [m_asset release];
+ m_asset = nil;
+
+ m_source = fileName;
+
+ if (!m_source.isEmpty()) {
+ NSURL *nsURL = m_source.toNSURL();
+ m_asset = [[AVURLAsset alloc] initWithURL:nsURL options:nil];
+ }
+
+ sourceChanged();
+}
+
+QIODevice *AVFAudioDecoder::sourceDevice() const
+{
+ return m_device;
+}
+
+void AVFAudioDecoder::setSourceDevice(QIODevice *device)
+{
+ if (m_device == device && m_source.isEmpty())
+ return;
+
+ stop();
+ m_source.clear();
+ [m_asset release];
+ m_asset = nil;
+
+ m_device = device;
+
+ if (m_device) {
+ const QString ext = QMimeDatabase().mimeTypeForData(m_device).preferredSuffix();
+ const QString url = "iodevice:///iodevice." + ext;
+ NSString *urlString = url.toNSString();
+ NSURL *nsURL = [NSURL URLWithString:urlString];
+
+ m_asset = [[AVURLAsset alloc] initWithURL:nsURL options:nil];
+
+ // use decoding queue instead of reading queue in order to fix random stucks.
+ // Anyway, decoding queue is empty in the moment.
+ [m_asset.resourceLoader setDelegate:m_readerDelegate queue:m_decodingQueue];
+ }
+
+ sourceChanged();
+}
+
+void AVFAudioDecoder::start()
+{
+ if (m_decodingContext) {
+ qCDebug(qLcAVFAudioDecoder()) << "AVFAudioDecoder has been already started";
+ return;
+ }
+
+ positionChanged(-1);
+
+ if (m_device && (!m_device->isOpen() || !m_device->isReadable())) {
+ processInvalidMedia(QAudioDecoder::ResourceError, tr("Unable to read from specified device"));
+ return;
+ }
+
+ m_decodingContext = std::make_shared<DecodingContext>();
+ std::weak_ptr<DecodingContext> weakContext(m_decodingContext);
+
+ auto handleLoadingResult = [=]() {
+ NSError *error = nil;
+ AVKeyValueStatus status = [m_asset statusOfValueForKey:@"tracks" error:&error];
+
+ if (status == AVKeyValueStatusFailed) {
+ if (error.domain == NSURLErrorDomain)
+ processInvalidMedia(QAudioDecoder::ResourceError,
+ QString::fromNSString(error.localizedDescription));
+ else
+ processInvalidMedia(QAudioDecoder::FormatError,
+ tr("Could not load media source's tracks"));
+ } else if (status != AVKeyValueStatusLoaded) {
+ qWarning() << "Unexpected AVKeyValueStatus:" << status;
+ stop();
+ }
+ else {
+ initAssetReader();
+ }
+ };
+
+ [m_asset loadValuesAsynchronouslyForKeys:@[ @"tracks" ]
+ completionHandler:[=]() {
+ invokeWithDecodingContext(weakContext, handleLoadingResult);
+ }];
+}
+
+void AVFAudioDecoder::decBuffersCounter(uint val)
+{
+ if (val) {
+ QMutexLocker locker(&m_buffersCounterMutex);
+ m_buffersCounter -= val;
+ }
+
+ Q_ASSERT(m_buffersCounter >= 0);
+
+ m_buffersCounterCondition.wakeAll();
+}
+
+void AVFAudioDecoder::stop()
+{
+ qCDebug(qLcAVFAudioDecoder()) << "stop decoding";
+
+ m_decodingContext.reset();
+ decBuffersCounter(m_cachedBuffers.size());
+ m_cachedBuffers.clear();
+
+ bufferAvailableChanged(false);
+ positionChanged(-1);
+ durationChanged(-1);
+
+ onFinished();
+}
+
+QAudioFormat AVFAudioDecoder::audioFormat() const
+{
+ return m_format;
+}
+
+void AVFAudioDecoder::setAudioFormat(const QAudioFormat &format)
+{
+ if (m_format != format) {
+ m_format = format;
+ formatChanged(m_format);
+ }
+}
+
+QAudioBuffer AVFAudioDecoder::read()
+{
+ if (m_cachedBuffers.empty())
+ return QAudioBuffer();
+
+ Q_ASSERT(m_cachedBuffers.size() > 0);
+ QAudioBuffer buffer = m_cachedBuffers.dequeue();
+ decBuffersCounter(1);
+
+ positionChanged(buffer.startTime() / 1000);
+ bufferAvailableChanged(!m_cachedBuffers.empty());
+ return buffer;
+}
+
+void AVFAudioDecoder::processInvalidMedia(QAudioDecoder::Error errorCode,
+ const QString &errorString)
+{
+ qCDebug(qLcAVFAudioDecoder()) << "Invalid media. Error code:" << errorCode
+ << "Description:" << errorString;
+
+ Q_ASSERT(QThread::currentThread() == thread());
+
+ error(int(errorCode), errorString);
+
+ // TODO: may be check if decodingCondext was changed by
+ // user's action (restart) from the emitted error.
+ // We should handle it somehow (don't run stop, print warning or etc...)
+
+ stop();
+}
+
+void AVFAudioDecoder::onFinished()
+{
+ m_decodingContext.reset();
+
+ if (isDecoding())
+ finished();
+}
+
+void AVFAudioDecoder::initAssetReader()
+{
+ qCDebug(qLcAVFAudioDecoder()) << "Init asset reader";
+
+ Q_ASSERT(m_asset);
+ Q_ASSERT(QThread::currentThread() == thread());
+
+ NSArray<AVAssetTrack *> *tracks = [m_asset tracksWithMediaType:AVMediaTypeAudio];
+ if (!tracks.count) {
+ processInvalidMedia(QAudioDecoder::FormatError, tr("No audio tracks found"));
+ return;
+ }
+
+ AVAssetTrack *track = [tracks objectAtIndex:0];
+ QAudioFormat format = m_format.isValid() ? m_format : qt_format_for_audio_track(track);
+ if (!format.isValid()) {
+ processInvalidMedia(QAudioDecoder::FormatError, tr("Unsupported source format"));
+ return;
+ }
+
+ durationChanged(CMTimeGetSeconds(track.timeRange.duration) * 1000);
+
+ NSError *error = nil;
+ NSDictionary *audioSettings = av_audio_settings_for_format(format);
+
+ AVAssetReaderTrackOutput *readerOutput =
+ [[AVAssetReaderTrackOutput alloc] initWithTrack:track outputSettings:audioSettings];
+ AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:m_asset error:&error];
+ if (error) {
+ processInvalidMedia(QAudioDecoder::ResourceError, QString::fromNSString(error.localizedDescription));
+ return;
+ }
+ if (![reader canAddOutput:readerOutput]) {
+ processInvalidMedia(QAudioDecoder::ResourceError, tr("Failed to add asset reader output"));
+ return;
+ }
+
+ [reader addOutput:readerOutput];
+
+ Q_ASSERT(m_decodingContext);
+ m_decodingContext->m_reader = reader;
+ m_decodingContext->m_readerOutput = readerOutput;
+
+ startReading();
+}
+
+void AVFAudioDecoder::startReading()
+{
+ Q_ASSERT(m_decodingContext);
+ Q_ASSERT(m_decodingContext->m_reader);
+ Q_ASSERT(QThread::currentThread() == thread());
+
+ // Prepares the receiver for obtaining sample buffers from the asset.
+ if (![m_decodingContext->m_reader startReading]) {
+ processInvalidMedia(QAudioDecoder::ResourceError, tr("Could not start reading"));
+ return;
+ }
+
+ setIsDecoding(true);
+
+ std::weak_ptr<DecodingContext> weakContext = m_decodingContext;
+
+ // Since copyNextSampleBuffer is synchronous, submit it to an async dispatch queue
+ // to run in a separate thread. Call the handleNextSampleBuffer "callback" on another
+ // thread when new audio sample is read.
+ auto copyNextSampleBuffer = [=]() {
+ auto decodingContext = weakContext.lock();
+ if (!decodingContext)
+ return false;
+
+ CMSampleBufferRef sampleBuffer = [decodingContext->m_readerOutput copyNextSampleBuffer];
+ if (!sampleBuffer)
+ return false;
+
+ dispatch_async(m_decodingQueue, [=]() {
+ if (!weakContext.expired() && CMSampleBufferDataIsReady(sampleBuffer)) {
+ auto audioBuffer = handleNextSampleBuffer(sampleBuffer);
+
+ if (audioBuffer.isValid())
+ invokeWithDecodingContext(weakContext,
+ [=]() { handleNewAudioBuffer(audioBuffer); });
+ }
+
+ CFRelease(sampleBuffer);
+ });
+
+ return true;
+ };
+
+ dispatch_async(m_readingQueue, [=]() {
+ qCDebug(qLcAVFAudioDecoder()) << "start reading thread";
+
+ do {
+ // Note, waiting here doesn't ensure strong contol of the counter.
+ // However, it doesn't affect the logic: the reading flow works fine
+ // even if the counter is time-to-time more than max value
+ waitUntilBuffersCounterLessMax();
+ } while (copyNextSampleBuffer());
+
+ // TODO: check m_reader.status == AVAssetReaderStatusFailed
+ invokeWithDecodingContext(weakContext, [this]() { onFinished(); });
+ });
+}
+
+void AVFAudioDecoder::waitUntilBuffersCounterLessMax()
+{
+ if (m_buffersCounter >= MAX_BUFFERS_IN_QUEUE) {
+ // the check avoids extra mutex lock.
+
+ QMutexLocker locker(&m_buffersCounterMutex);
+
+ while (m_buffersCounter >= MAX_BUFFERS_IN_QUEUE)
+ m_buffersCounterCondition.wait(&m_buffersCounterMutex);
+ }
+}
+
+void AVFAudioDecoder::handleNewAudioBuffer(QAudioBuffer buffer)
+{
+ m_cachedBuffers.enqueue(buffer);
+ ++m_buffersCounter;
+
+ Q_ASSERT(m_cachedBuffers.size() == m_buffersCounter);
+
+ bufferAvailableChanged(true);
+ bufferReady();
+}
+
+/*
+ * The method calls the passed functor in the thread of AVFAudioDecoder and guarantees that
+ * the passed decoding context is not expired. In other words, it helps avoiding all callbacks
+ * after stopping of the decoder.
+ */
+template<typename F>
+void AVFAudioDecoder::invokeWithDecodingContext(std::weak_ptr<DecodingContext> weakContext, F &&f)
+{
+ if (!weakContext.expired())
+ QMetaObject::invokeMethod(this, [=]() {
+ // strong check: compare with actual decoding context.
+ // Otherwise, the context can be temporary locked by one of dispatch queues.
+ if (auto context = weakContext.lock(); context && context == m_decodingContext)
+ f();
+ });
+}
+
+#include "moc_avfaudiodecoder_p.cpp"
diff --git a/src/plugins/multimedia/darwin/avfaudiodecoder_p.h b/src/plugins/multimedia/darwin/avfaudiodecoder_p.h
new file mode 100644
index 000000000..81ef3f49e
--- /dev/null
+++ b/src/plugins/multimedia/darwin/avfaudiodecoder_p.h
@@ -0,0 +1,99 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFAUDIODECODER_H
+#define AVFAUDIODECODER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/private/qtmultimediaglobal_p.h>
+#include <QObject>
+#include <QtCore/qurl.h>
+#include <QWaitCondition>
+#include <QMutex>
+#include <QQueue>
+
+#include "private/qplatformaudiodecoder_p.h"
+#include "qaudiodecoder.h"
+
+#include <dispatch/dispatch.h>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(AVURLAsset);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVAssetReader);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVAssetReaderTrackOutput);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVFResourceReaderDelegate);
+
+QT_BEGIN_NAMESPACE
+
+class AVFAudioDecoder : public QPlatformAudioDecoder
+{
+ Q_OBJECT
+
+ struct DecodingContext;
+
+public:
+ AVFAudioDecoder(QAudioDecoder *parent);
+ virtual ~AVFAudioDecoder();
+
+ QUrl source() const override;
+ void setSource(const QUrl &fileName) override;
+
+ QIODevice *sourceDevice() const override;
+ void setSourceDevice(QIODevice *device) override;
+
+ void start() override;
+ void stop() override;
+
+ QAudioFormat audioFormat() const override;
+ void setAudioFormat(const QAudioFormat &format) override;
+
+ QAudioBuffer read() override;
+
+private:
+ void handleNewAudioBuffer(QAudioBuffer);
+ void startReading();
+
+ void processInvalidMedia(QAudioDecoder::Error errorCode, const QString& errorString);
+ void initAssetReader();
+ void onFinished();
+
+ void waitUntilBuffersCounterLessMax();
+
+ void decBuffersCounter(uint val);
+
+ template<typename F>
+ void invokeWithDecodingContext(std::weak_ptr<DecodingContext> weakContext, F &&f);
+
+private:
+ QUrl m_source;
+ QIODevice *m_device = nullptr;
+ QAudioFormat m_format;
+
+ // Use a separate counter instead of buffers queue size in order to
+ // ensure atomic access and also make mutex locking shorter
+ std::atomic<int> m_buffersCounter = 0;
+ QQueue<QAudioBuffer> m_cachedBuffers;
+
+ AVURLAsset *m_asset = nullptr;
+
+ AVFResourceReaderDelegate *m_readerDelegate = nullptr;
+ dispatch_queue_t m_readingQueue;
+ dispatch_queue_t m_decodingQueue;
+
+ std::shared_ptr<DecodingContext> m_decodingContext;
+ QMutex m_buffersCounterMutex;
+ QWaitCondition m_buffersCounterCondition;
+};
+
+QT_END_NAMESPACE
+
+#endif // AVFAUDIODECODER_H
diff --git a/src/plugins/multimedia/darwin/avfvideobuffer.mm b/src/plugins/multimedia/darwin/avfvideobuffer.mm
new file mode 100644
index 000000000..57ec89ae7
--- /dev/null
+++ b/src/plugins/multimedia/darwin/avfvideobuffer.mm
@@ -0,0 +1,207 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfvideobuffer_p.h"
+#include <rhi/qrhi.h>
+#include <CoreVideo/CVMetalTexture.h>
+#include <CoreVideo/CVMetalTextureCache.h>
+#include <QtGui/qopenglcontext.h>
+
+#include <private/qvideotexturehelper_p.h>
+#include "qavfhelpers_p.h"
+
+#import <AVFoundation/AVFoundation.h>
+#import <Metal/Metal.h>
+
+QT_USE_NAMESPACE
+
+AVFVideoBuffer::AVFVideoBuffer(AVFVideoSinkInterface *sink, CVImageBufferRef buffer)
+ : QHwVideoBuffer(sink->rhi() ? QVideoFrame::RhiTextureHandle : QVideoFrame::NoHandle,
+ sink->rhi()),
+ sink(sink),
+ m_buffer(buffer)
+{
+// m_type = QVideoFrame::NoHandle;
+// qDebug() << "RHI" << m_rhi;
+ CVPixelBufferRetain(m_buffer);
+ const bool rhiIsOpenGL = sink && sink->rhi() && sink->rhi()->backend() == QRhi::OpenGLES2;
+ m_format = QAVFHelpers::videoFormatForImageBuffer(m_buffer, rhiIsOpenGL);
+}
+
+AVFVideoBuffer::~AVFVideoBuffer()
+{
+ AVFVideoBuffer::unmap();
+ for (int i = 0; i < 3; ++i)
+ if (cvMetalTexture[i])
+ CFRelease(cvMetalTexture[i]);
+#if defined(Q_OS_MACOS)
+ if (cvOpenGLTexture)
+ CVOpenGLTextureRelease(cvOpenGLTexture);
+#elif defined(Q_OS_IOS)
+ if (cvOpenGLESTexture)
+ CFRelease(cvOpenGLESTexture);
+#endif
+ CVPixelBufferRelease(m_buffer);
+}
+
+AVFVideoBuffer::MapData AVFVideoBuffer::map(QtVideo::MapMode mode)
+{
+ MapData mapData;
+
+ if (m_mode == QtVideo::MapMode::NotMapped) {
+ CVPixelBufferLockBaseAddress(m_buffer, mode == QtVideo::MapMode::ReadOnly
+ ? kCVPixelBufferLock_ReadOnly
+ : 0);
+ m_mode = mode;
+ }
+
+ mapData.planeCount = CVPixelBufferGetPlaneCount(m_buffer);
+ Q_ASSERT(mapData.planeCount <= 3);
+
+ if (!mapData.planeCount) {
+ // single plane
+ mapData.bytesPerLine[0] = CVPixelBufferGetBytesPerRow(m_buffer);
+ mapData.data[0] = static_cast<uchar*>(CVPixelBufferGetBaseAddress(m_buffer));
+ mapData.dataSize[0] = CVPixelBufferGetDataSize(m_buffer);
+ mapData.planeCount = mapData.data[0] ? 1 : 0;
+ return mapData;
+ }
+
+ // For a bi-planar or tri-planar format we have to set the parameters correctly:
+ for (int i = 0; i < mapData.planeCount; ++i) {
+ mapData.bytesPerLine[i] = CVPixelBufferGetBytesPerRowOfPlane(m_buffer, i);
+ mapData.dataSize[i] = mapData.bytesPerLine[i]*CVPixelBufferGetHeightOfPlane(m_buffer, i);
+ mapData.data[i] = static_cast<uchar*>(CVPixelBufferGetBaseAddressOfPlane(m_buffer, i));
+ }
+
+ return mapData;
+}
+
+void AVFVideoBuffer::unmap()
+{
+ if (m_mode != QtVideo::MapMode::NotMapped) {
+ CVPixelBufferUnlockBaseAddress(m_buffer, m_mode == QtVideo::MapMode::ReadOnly
+ ? kCVPixelBufferLock_ReadOnly
+ : 0);
+ m_mode = QtVideo::MapMode::NotMapped;
+ }
+}
+
+static MTLPixelFormat rhiTextureFormatToMetalFormat(QRhiTexture::Format f)
+{
+ switch (f) {
+ default:
+ case QRhiTexture::UnknownFormat:
+ return MTLPixelFormatInvalid;
+ case QRhiTexture::RGBA8:
+ return MTLPixelFormatRGBA8Unorm;
+ case QRhiTexture::BGRA8:
+ return MTLPixelFormatBGRA8Unorm;
+ case QRhiTexture::R8:
+ return MTLPixelFormatR8Unorm;
+ case QRhiTexture::RG8:
+ return MTLPixelFormatRG8Unorm;
+ case QRhiTexture::R16:
+ return MTLPixelFormatR16Unorm;
+ case QRhiTexture::RG16:
+ return MTLPixelFormatRG16Unorm;
+
+ case QRhiTexture::RGBA16F:
+ return MTLPixelFormatRGBA16Float;
+ case QRhiTexture::RGBA32F:
+ return MTLPixelFormatRGBA32Float;
+ case QRhiTexture::R16F:
+ return MTLPixelFormatR16Float;
+ case QRhiTexture::R32F:
+ return MTLPixelFormatR32Float;
+ }
+}
+
+
+quint64 AVFVideoBuffer::textureHandle(QRhi *, int plane) const
+{
+ auto *textureDescription = QVideoTextureHelper::textureDescription(m_format.pixelFormat());
+ int bufferPlanes = CVPixelBufferGetPlaneCount(m_buffer);
+// qDebug() << "texture handle" << plane << m_rhi << (m_rhi->backend() == QRhi::Metal) << bufferPlanes;
+ if (plane > 0 && plane >= bufferPlanes)
+ return 0;
+ if (!m_rhi)
+ return 0;
+ if (m_rhi->backend() == QRhi::Metal) {
+ if (!cvMetalTexture[plane]) {
+ size_t width = CVPixelBufferGetWidth(m_buffer);
+ size_t height = CVPixelBufferGetHeight(m_buffer);
+ width = textureDescription->widthForPlane(width, plane);
+ height = textureDescription->heightForPlane(height, plane);
+
+ // Create a CoreVideo pixel buffer backed Metal texture image from the texture cache.
+ QMutexLocker locker(sink->textureCacheMutex());
+ if (!metalCache && sink->cvMetalTextureCache)
+ metalCache = CVMetalTextureCacheRef(CFRetain(sink->cvMetalTextureCache));
+ if (!metalCache) {
+ qWarning("cannot create texture, Metal texture cache was released?");
+ return {};
+ }
+ auto ret = CVMetalTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault,
+ metalCache,
+ m_buffer, nil,
+ rhiTextureFormatToMetalFormat(textureDescription->textureFormat[plane]),
+ width, height,
+ plane,
+ &cvMetalTexture[plane]);
+
+ if (ret != kCVReturnSuccess)
+ qWarning() << "texture creation failed" << ret;
+// auto t = CVMetalTextureGetTexture(cvMetalTexture[plane]);
+// qDebug() << " metal texture is" << quint64(cvMetalTexture[plane]) << width << height;
+// qDebug() << " " << t.iosurfacePlane << t.pixelFormat << t.width << t.height;
+ }
+
+ // Get a Metal texture using the CoreVideo Metal texture reference.
+// qDebug() << " -> " << quint64(CVMetalTextureGetTexture(cvMetalTexture[plane]));
+ return cvMetalTexture[plane] ? quint64(CVMetalTextureGetTexture(cvMetalTexture[plane])) : 0;
+ } else if (m_rhi->backend() == QRhi::OpenGLES2) {
+#if QT_CONFIG(opengl)
+#ifdef Q_OS_MACOS
+ CVOpenGLTextureCacheFlush(sink->cvOpenGLTextureCache, 0);
+ // Create a CVPixelBuffer-backed OpenGL texture image from the texture cache.
+ const CVReturn cvret = CVOpenGLTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault,
+ sink->cvOpenGLTextureCache,
+ m_buffer,
+ nil,
+ &cvOpenGLTexture);
+ if (cvret != kCVReturnSuccess)
+ qWarning() << "OpenGL texture creation failed" << cvret;
+
+ Q_ASSERT(CVOpenGLTextureGetTarget(cvOpenGLTexture) == GL_TEXTURE_RECTANGLE);
+ // Get an OpenGL texture name from the CVPixelBuffer-backed OpenGL texture image.
+ return CVOpenGLTextureGetName(cvOpenGLTexture);
+#endif
+#ifdef Q_OS_IOS
+ CVOpenGLESTextureCacheFlush(sink->cvOpenGLESTextureCache, 0);
+ // Create a CVPixelBuffer-backed OpenGL texture image from the texture cache.
+ const CVReturn cvret = CVOpenGLESTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault,
+ sink->cvOpenGLESTextureCache,
+ m_buffer,
+ nil,
+ GL_TEXTURE_2D,
+ GL_RGBA,
+ CVPixelBufferGetWidth(m_buffer),
+ CVPixelBufferGetHeight(m_buffer),
+ GL_RGBA,
+ GL_UNSIGNED_BYTE,
+ 0,
+ &cvOpenGLESTexture);
+ if (cvret != kCVReturnSuccess)
+ qWarning() << "OpenGL ES texture creation failed" << cvret;
+
+ // Get an OpenGL texture name from the CVPixelBuffer-backed OpenGL texture image.
+ return CVOpenGLESTextureGetName(cvOpenGLESTexture);
+#endif
+#endif
+ }
+ return 0;
+}
diff --git a/src/plugins/multimedia/darwin/avfvideobuffer_p.h b/src/plugins/multimedia/darwin/avfvideobuffer_p.h
new file mode 100644
index 000000000..f70961c15
--- /dev/null
+++ b/src/plugins/multimedia/darwin/avfvideobuffer_p.h
@@ -0,0 +1,64 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFVIDEOBUFFER_H
+#define AVFVIDEOBUFFER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qhwvideobuffer_p.h>
+#include <private/qcore_mac_p.h>
+
+#include <QtCore/qobject.h>
+#include <QtCore/qmutex.h>
+#include <avfvideosink_p.h>
+
+#include <CoreVideo/CVImageBuffer.h>
+
+#import "Metal/Metal.h"
+#import "MetalKit/MetalKit.h"
+
+QT_BEGIN_NAMESPACE
+
+struct AVFMetalTexture;
+class AVFVideoBuffer : public QHwVideoBuffer
+{
+public:
+ AVFVideoBuffer(AVFVideoSinkInterface *sink, CVImageBufferRef buffer);
+ ~AVFVideoBuffer();
+
+ MapData map(QtVideo::MapMode mode);
+ void unmap();
+
+ virtual quint64 textureHandle(QRhi *, int plane) const;
+
+ QVideoFrameFormat videoFormat() const { return m_format; }
+
+private:
+ AVFVideoSinkInterface *sink = nullptr;
+
+ mutable CVMetalTextureRef cvMetalTexture[3] = {};
+ mutable QCFType<CVMetalTextureCacheRef> metalCache;
+#if defined(Q_OS_MACOS)
+ mutable CVOpenGLTextureRef cvOpenGLTexture = nullptr;
+#elif defined(Q_OS_IOS)
+ mutable CVOpenGLESTextureRef cvOpenGLESTexture = nullptr;
+#endif
+
+ CVImageBufferRef m_buffer = nullptr;
+ QtVideo::MapMode m_mode = QtVideo::MapMode::NotMapped;
+ QVideoFrameFormat m_format;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/avfvideosink.mm b/src/plugins/multimedia/darwin/avfvideosink.mm
new file mode 100644
index 000000000..f4c8bdb2e
--- /dev/null
+++ b/src/plugins/multimedia/darwin/avfvideosink.mm
@@ -0,0 +1,228 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfvideosink_p.h"
+
+#include <rhi/qrhi.h>
+#include <QtGui/qopenglcontext.h>
+
+#include <AVFoundation/AVFoundation.h>
+#import <QuartzCore/CATransaction.h>
+
+#if __has_include(<AppKit/AppKit.h>)
+#include <AppKit/AppKit.h>
+#endif
+
+#if __has_include(<UIKit/UIKit.h>)
+#include <UIKit/UIKit.h>
+#endif
+
+QT_USE_NAMESPACE
+
+AVFVideoSink::AVFVideoSink(QVideoSink *parent)
+ : QPlatformVideoSink(parent)
+{
+}
+
+AVFVideoSink::~AVFVideoSink()
+{
+}
+
+void AVFVideoSink::setRhi(QRhi *rhi)
+{
+ if (m_rhi == rhi)
+ return;
+ m_rhi = rhi;
+ if (m_interface)
+ m_interface->setRhi(rhi);
+}
+
+void AVFVideoSink::setNativeSize(QSize size)
+{
+ if (size == nativeSize())
+ return;
+ QPlatformVideoSink::setNativeSize(size);
+ if (m_interface)
+ m_interface->nativeSizeChanged();
+}
+
+void AVFVideoSink::setVideoSinkInterface(AVFVideoSinkInterface *interface)
+{
+ m_interface = interface;
+ if (m_interface)
+ m_interface->setRhi(m_rhi);
+}
+
+AVFVideoSinkInterface::~AVFVideoSinkInterface()
+{
+ if (m_layer)
+ [m_layer release];
+ if (m_outputSettings)
+ [m_outputSettings release];
+ freeTextureCaches();
+}
+
+void AVFVideoSinkInterface::freeTextureCaches()
+{
+ if (cvMetalTextureCache)
+ CFRelease(cvMetalTextureCache);
+ cvMetalTextureCache = nullptr;
+#if defined(Q_OS_MACOS)
+ if (cvOpenGLTextureCache)
+ CFRelease(cvOpenGLTextureCache);
+ cvOpenGLTextureCache = nullptr;
+#elif defined(Q_OS_IOS)
+ if (cvOpenGLESTextureCache)
+ CFRelease(cvOpenGLESTextureCache);
+ cvOpenGLESTextureCache = nullptr;
+#endif
+}
+
+void AVFVideoSinkInterface::setVideoSink(AVFVideoSink *sink)
+{
+ if (sink == m_sink)
+ return;
+
+ if (m_sink)
+ m_sink->setVideoSinkInterface(nullptr);
+
+ m_sink = sink;
+
+ if (m_sink) {
+ m_sink->setVideoSinkInterface(this);
+ reconfigure();
+ }
+}
+
+void AVFVideoSinkInterface::setRhi(QRhi *rhi)
+{
+ QMutexLocker locker(&m_textureCacheMutex);
+ if (m_rhi == rhi)
+ return;
+ freeTextureCaches();
+ m_rhi = rhi;
+
+ if (!rhi)
+ return;
+ if (rhi->backend() == QRhi::Metal) {
+ const auto *metal = static_cast<const QRhiMetalNativeHandles *>(rhi->nativeHandles());
+
+ // Create a Metal Core Video texture cache from the pixel buffer.
+ Q_ASSERT(!cvMetalTextureCache);
+ if (CVMetalTextureCacheCreate(
+ kCFAllocatorDefault,
+ nil,
+ (id<MTLDevice>)metal->dev,
+ nil,
+ &cvMetalTextureCache) != kCVReturnSuccess) {
+ qWarning() << "Metal texture cache creation failed";
+ m_rhi = nullptr;
+ }
+ } else if (rhi->backend() == QRhi::OpenGLES2) {
+#if QT_CONFIG(opengl)
+#ifdef Q_OS_MACOS
+ const auto *gl = static_cast<const QRhiGles2NativeHandles *>(rhi->nativeHandles());
+
+ auto nsGLContext = gl->context->nativeInterface<QNativeInterface::QCocoaGLContext>()->nativeContext();
+ auto nsGLPixelFormat = nsGLContext.pixelFormat.CGLPixelFormatObj;
+
+ // Create an OpenGL CoreVideo texture cache from the pixel buffer.
+ if (CVOpenGLTextureCacheCreate(
+ kCFAllocatorDefault,
+ nullptr,
+ reinterpret_cast<CGLContextObj>(nsGLContext.CGLContextObj),
+ nsGLPixelFormat,
+ nil,
+ &cvOpenGLTextureCache)) {
+ qWarning() << "OpenGL texture cache creation failed";
+ m_rhi = nullptr;
+ }
+#endif
+#ifdef Q_OS_IOS
+ // Create an OpenGL CoreVideo texture cache from the pixel buffer.
+ if (CVOpenGLESTextureCacheCreate(
+ kCFAllocatorDefault,
+ nullptr,
+ [EAGLContext currentContext],
+ nullptr,
+ &cvOpenGLESTextureCache)) {
+ qWarning() << "OpenGL texture cache creation failed";
+ m_rhi = nullptr;
+ }
+#endif
+#else
+ m_rhi = nullptr;
+#endif // QT_CONFIG(opengl)
+ }
+ setOutputSettings();
+}
+
+void AVFVideoSinkInterface::setLayer(CALayer *layer)
+{
+ if (layer == m_layer)
+ return;
+
+ if (m_layer)
+ [m_layer release];
+
+ m_layer = layer;
+ if (m_layer)
+ [m_layer retain];
+
+ reconfigure();
+}
+
+void AVFVideoSinkInterface::setOutputSettings()
+{
+ if (m_outputSettings)
+ [m_outputSettings release];
+ m_outputSettings = nil;
+
+ // Set pixel format
+ NSDictionary *dictionary = nil;
+ if (m_rhi && m_rhi->backend() == QRhi::OpenGLES2) {
+#if QT_CONFIG(opengl)
+ dictionary = @{(NSString *)kCVPixelBufferPixelFormatTypeKey:
+ @(kCVPixelFormatType_32BGRA)
+#ifndef Q_OS_IOS // On iOS this key generates a warning about unsupported key.
+ , (NSString *)kCVPixelBufferOpenGLCompatibilityKey: @true
+#endif // Q_OS_IOS
+ };
+#endif
+ } else {
+ dictionary = @{(NSString *)kCVPixelBufferPixelFormatTypeKey:
+ @[
+ @(kCVPixelFormatType_32BGRA),
+ @(kCVPixelFormatType_32RGBA),
+ @(kCVPixelFormatType_422YpCbCr8),
+ @(kCVPixelFormatType_422YpCbCr8_yuvs),
+ @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange),
+ @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
+ @(kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange),
+ @(kCVPixelFormatType_420YpCbCr10BiPlanarFullRange),
+ @(kCVPixelFormatType_OneComponent8),
+ @(kCVPixelFormatType_OneComponent16),
+ @(kCVPixelFormatType_420YpCbCr8Planar),
+ @(kCVPixelFormatType_420YpCbCr8PlanarFullRange)
+ ]
+#ifndef Q_OS_IOS // This key is not supported and generates a warning.
+ , (NSString *)kCVPixelBufferMetalCompatibilityKey: @true
+#endif // Q_OS_IOS
+ };
+ }
+
+ m_outputSettings = [[NSDictionary alloc] initWithDictionary:dictionary];
+}
+
+void AVFVideoSinkInterface::updateLayerBounds()
+{
+ if (!m_layer)
+ return;
+ [CATransaction begin];
+ [CATransaction setDisableActions: YES]; // disable animation/flicks
+ m_layer.frame = QRectF(0, 0, nativeSize().width(), nativeSize().height()).toCGRect();
+ m_layer.bounds = m_layer.frame;
+ [CATransaction commit];
+}
+
+#include "moc_avfvideosink_p.cpp"
diff --git a/src/plugins/multimedia/darwin/avfvideosink_p.h b/src/plugins/multimedia/darwin/avfvideosink_p.h
new file mode 100644
index 000000000..9b66e79f2
--- /dev/null
+++ b/src/plugins/multimedia/darwin/avfvideosink_p.h
@@ -0,0 +1,99 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFVIDEOWINDOWCONTROL_H
+#define AVFVIDEOWINDOWCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/QMutex>
+#include "private/qplatformvideosink_p.h"
+
+Q_FORWARD_DECLARE_OBJC_CLASS(CALayer);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVPlayerLayer);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureVideoPreviewLayer);
+
+#include <CoreVideo/CVBase.h>
+#include <CoreVideo/CVPixelBuffer.h>
+#include <CoreVideo/CVImageBuffer.h>
+
+#import "Metal/Metal.h"
+#import "MetalKit/MetalKit.h"
+
+QT_BEGIN_NAMESPACE
+
+class AVFVideoSinkInterface;
+
+class AVFVideoSink : public QPlatformVideoSink
+{
+ Q_OBJECT
+
+public:
+ AVFVideoSink(QVideoSink *parent = nullptr);
+ virtual ~AVFVideoSink();
+
+ // QPlatformVideoSink interface
+public:
+ void setRhi(QRhi *rhi) override;
+
+ void setNativeSize(QSize size);
+
+ void setVideoSinkInterface(AVFVideoSinkInterface *interface);
+
+private:
+ AVFVideoSinkInterface *m_interface = nullptr;
+ QRhi *m_rhi = nullptr;
+};
+
+class AVFVideoSinkInterface
+{
+public:
+ ~AVFVideoSinkInterface();
+
+ void setVideoSink(AVFVideoSink *sink);
+
+
+ virtual void reconfigure() = 0;
+ virtual void setRhi(QRhi *);
+ virtual void setLayer(CALayer *layer);
+ virtual void setOutputSettings();
+
+ QMutex *textureCacheMutex() { return &m_textureCacheMutex; }
+
+ QRhi *rhi() const { return m_rhi; }
+
+ void updateLayerBounds();
+ void nativeSizeChanged() { updateLayerBounds(); }
+ QSize nativeSize() const { return m_sink ? m_sink->nativeSize() : QSize(); }
+
+ CVMetalTextureCacheRef cvMetalTextureCache = nullptr;
+#if defined(Q_OS_MACOS)
+ CVOpenGLTextureCacheRef cvOpenGLTextureCache = nullptr;
+#elif defined(Q_OS_IOS)
+ CVOpenGLESTextureCacheRef cvOpenGLESTextureCache = nullptr;
+#endif
+private:
+ void freeTextureCaches();
+
+protected:
+
+ AVFVideoSink *m_sink = nullptr;
+ QRhi *m_rhi = nullptr;
+ CALayer *m_layer = nullptr;
+ NSDictionary *m_outputSettings = nullptr;
+ QMutex m_textureCacheMutex;
+};
+
+
+QT_END_NAMESPACE
+
+#endif // AVFVIDEOWINDOWCONTROL_H
diff --git a/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate.mm b/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate.mm
new file mode 100644
index 000000000..1b2d4b15d
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate.mm
@@ -0,0 +1,98 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcamerasession_p.h"
+#include "avfaudiopreviewdelegate_p.h"
+
+QT_USE_NAMESPACE
+
+@implementation AVFAudioPreviewDelegate
+{
+@private
+ AVSampleBufferAudioRenderer *m_audioRenderer;
+ AVFCameraSession *m_session;
+ AVSampleBufferRenderSynchronizer *m_audioBufferSynchronizer;
+ dispatch_queue_t m_audioPreviewQueue;
+}
+
+- (id)init
+{
+ if (self = [super init]) {
+ m_session = nil;
+ m_audioBufferSynchronizer = [[AVSampleBufferRenderSynchronizer alloc] init];
+ m_audioRenderer = [[AVSampleBufferAudioRenderer alloc] init];
+ [m_audioBufferSynchronizer addRenderer:m_audioRenderer];
+ return self;
+ }
+ return nil;
+}
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection
+{
+ Q_UNUSED(connection);
+ Q_ASSERT(m_session);
+
+ if (!CMSampleBufferDataIsReady(sampleBuffer)) {
+ qWarning() << Q_FUNC_INFO << "sample buffer is not ready, skipping.";
+ return;
+ }
+
+ CFRetain(sampleBuffer);
+
+ dispatch_async(m_audioPreviewQueue, ^{
+ [self renderAudioSampleBuffer:sampleBuffer];
+ CFRelease(sampleBuffer);
+ });
+}
+
+- (void)renderAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer
+{
+ Q_ASSERT(sampleBuffer);
+ Q_ASSERT(m_session);
+
+ if (m_audioBufferSynchronizer && m_audioRenderer) {
+ [m_audioRenderer enqueueSampleBuffer:sampleBuffer];
+ if (m_audioBufferSynchronizer.rate == 0)
+ [m_audioBufferSynchronizer setRate:1 time:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
+ }
+}
+
+- (void)resetAudioPreviewDelegate
+{
+ [m_session->audioOutput() setSampleBufferDelegate:self queue:m_audioPreviewQueue];
+}
+
+- (void)setupWithCaptureSession: (AVFCameraSession*)session
+ audioOutputDevice: (NSString*)deviceId
+{
+ m_session = session;
+
+ m_audioPreviewQueue = dispatch_queue_create("audio-preview-queue", nullptr);
+ [m_session->audioOutput() setSampleBufferDelegate:self queue:m_audioPreviewQueue];
+#ifdef Q_OS_MACOS
+ m_audioRenderer.audioOutputDeviceUniqueID = deviceId;
+#endif
+}
+
+- (void)setVolume: (float)volume
+{
+ m_audioRenderer.volume = volume;
+}
+
+- (void)setMuted: (bool)muted
+{
+ m_audioRenderer.muted = muted;
+}
+
+-(void)dealloc {
+ m_session = nil;
+ [m_audioRenderer release];
+ [m_audioBufferSynchronizer release];
+ dispatch_release(m_audioPreviewQueue);
+
+ [super dealloc];
+}
+
+@end
diff --git a/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate_p.h b/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate_p.h
new file mode 100644
index 000000000..8fa06ef39
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate_p.h
@@ -0,0 +1,40 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFAUDIOPREVIEWDELEGATE_P_H
+#define AVFAUDIOPREVIEWDELEGATE_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qglobal.h>
+
+#include <AVFoundation/AVFoundation.h>
+
+QT_BEGIN_NAMESPACE
+
+class AVFCameraSession;
+
+QT_END_NAMESPACE
+
+@interface AVFAudioPreviewDelegate : NSObject<AVCaptureAudioDataOutputSampleBufferDelegate>
+
+- (id)init;
+- (void)setupWithCaptureSession: (AVFCameraSession*)session
+ audioOutputDevice: (NSString*)deviceId;
+- (void)renderAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
+- (void)resetAudioPreviewDelegate;
+- (void)setVolume: (float)volume;
+- (void)setMuted: (bool)muted;
+
+@end
+
+#endif // AVFAUDIOPREVIEWDELEGATE_P_H
diff --git a/src/plugins/multimedia/darwin/camera/avfcamera.mm b/src/plugins/multimedia/darwin/camera/avfcamera.mm
new file mode 100644
index 000000000..05cdbae17
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamera.mm
@@ -0,0 +1,89 @@
+// Copyright (C) 2022 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcameradebug_p.h"
+#include "avfcamera_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcamerautility_p.h"
+#include "avfcamerarenderer_p.h"
+#include <qmediacapturesession.h>
+
+QT_USE_NAMESPACE
+
+AVFCamera::AVFCamera(QCamera *camera)
+ : QAVFCameraBase(camera)
+{
+ Q_ASSERT(camera);
+}
+
+AVFCamera::~AVFCamera()
+{
+}
+
+void AVFCamera::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+ if (m_cameraDevice.isNull() && active)
+ return;
+
+ m_active = active;
+ if (m_session)
+ m_session->setActive(active);
+
+ if (active)
+ updateCameraConfiguration();
+ Q_EMIT activeChanged(m_active);
+}
+
+void AVFCamera::setCamera(const QCameraDevice &camera)
+{
+ if (m_cameraDevice == camera)
+ return;
+ m_cameraDevice = camera;
+ if (m_session)
+ m_session->setActiveCamera(camera);
+ setCameraFormat({});
+}
+
+bool AVFCamera::setCameraFormat(const QCameraFormat &format)
+{
+ if (!format.isNull() && !m_cameraDevice.videoFormats().contains(format))
+ return false;
+
+ m_cameraFormat = format.isNull() ? findBestCameraFormat(m_cameraDevice) : format;
+
+ if (m_session)
+ m_session->setCameraFormat(m_cameraFormat);
+
+ return true;
+}
+
+void AVFCamera::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ AVFCameraService *captureSession = static_cast<AVFCameraService *>(session);
+ if (m_service == captureSession)
+ return;
+
+ if (m_session) {
+ m_session->disconnect(this);
+ m_session->setActiveCamera({});
+ m_session->setCameraFormat({});
+ }
+
+ m_service = captureSession;
+ if (!m_service) {
+ m_session = nullptr;
+ return;
+ }
+
+ m_session = m_service->session();
+ Q_ASSERT(m_session);
+
+ m_session->setActiveCamera(m_cameraDevice);
+ m_session->setCameraFormat(m_cameraFormat);
+ m_session->setActive(m_active);
+}
+
+#include "moc_avfcamera_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/avfcamera_p.h b/src/plugins/multimedia/darwin/camera/avfcamera_p.h
new file mode 100644
index 000000000..3c3e6da09
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamera_p.h
@@ -0,0 +1,48 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERA_H
+#define AVFCAMERA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qavfcamerabase_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class AVFCameraSession;
+class AVFCameraService;
+class AVFCameraSession;
+
+class AVFCamera : public QAVFCameraBase
+{
+Q_OBJECT
+public:
+ AVFCamera(QCamera *camera);
+ ~AVFCamera();
+
+ void setActive(bool activce) override;
+
+ void setCamera(const QCameraDevice &camera) override;
+ bool setCameraFormat(const QCameraFormat &format) override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *) override;
+
+private:
+ friend class AVFCameraSession;
+ AVFCameraService *m_service = nullptr;
+ AVFCameraSession *m_session = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfcameradebug_p.h b/src/plugins/multimedia/darwin/camera/avfcameradebug_p.h
new file mode 100644
index 000000000..f93c85142
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcameradebug_p.h
@@ -0,0 +1,26 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFDEBUG_H
+#define AVFDEBUG_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qtmultimediaglobal.h"
+
+#include <QtCore/qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+Q_DECLARE_LOGGING_CATEGORY(qLcCamera)
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm b/src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm
new file mode 100644
index 000000000..0c9ab3f2c
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm
@@ -0,0 +1,292 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qabstractvideobuffer.h"
+#include "private/qcameradevice_p.h"
+#include "private/qvideoframe_p.h"
+#include "avfcamerarenderer_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcameradebug_p.h"
+#include "avfcamera_p.h"
+#include <avfvideosink_p.h>
+#include <avfvideobuffer_p.h>
+#include "qvideosink.h"
+#include "qavfhelpers_p.h"
+
+#include <rhi/qrhi.h>
+
+#import <AVFoundation/AVFoundation.h>
+
+#ifdef Q_OS_IOS
+#include <QtGui/qopengl.h>
+#endif
+
+#include <QtMultimedia/qvideoframeformat.h>
+
+QT_USE_NAMESPACE
+
+@interface AVFCaptureFramesDelegate : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
+
+- (AVFCaptureFramesDelegate *) initWithRenderer:(AVFCameraRenderer*)renderer;
+
+- (void) captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection;
+
+@end
+
+@implementation AVFCaptureFramesDelegate
+{
+@private
+ AVFCameraRenderer *m_renderer;
+}
+
+- (AVFCaptureFramesDelegate *) initWithRenderer:(AVFCameraRenderer*)renderer
+{
+ if (!(self = [super init]))
+ return nil;
+
+ self->m_renderer = renderer;
+ return self;
+}
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection
+{
+ Q_UNUSED(connection);
+ Q_UNUSED(captureOutput);
+
+ // NB: on iOS captureOutput/connection can be nil (when recording a video -
+ // avfmediaassetwriter).
+
+ CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ auto buffer = std::make_unique<AVFVideoBuffer>(m_renderer, imageBuffer);
+ auto format = buffer->videoFormat();
+ if (!format.isValid()) {
+ return;
+ }
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(buffer), format);
+ m_renderer->syncHandleViewfinderFrame(frame);
+}
+
+@end
+
+AVFCameraRenderer::AVFCameraRenderer(QObject *parent)
+ : QObject(parent)
+{
+ m_viewfinderFramesDelegate = [[AVFCaptureFramesDelegate alloc] initWithRenderer:this];
+ connect(&m_orientationHandler, &QVideoOutputOrientationHandler::orientationChanged,
+ this, &AVFCameraRenderer::deviceOrientationChanged);
+}
+
+AVFCameraRenderer::~AVFCameraRenderer()
+{
+ [m_cameraSession->captureSession() removeOutput:m_videoDataOutput];
+ [m_viewfinderFramesDelegate release];
+ [m_videoDataOutput release];
+
+ if (m_delegateQueue)
+ dispatch_release(m_delegateQueue);
+#ifdef Q_OS_IOS
+ if (m_textureCache)
+ CFRelease(m_textureCache);
+#endif
+}
+
+void AVFCameraRenderer::reconfigure()
+{
+ QMutexLocker lock(&m_vfMutex);
+
+ // ### This is a hack, need to use a reliable way to determine the size and not use the preview layer
+ if (m_layer)
+ m_sink->setNativeSize(QSize(m_layer.bounds.size.width, m_layer.bounds.size.height));
+ nativeSizeChanged();
+ deviceOrientationChanged();
+}
+
+void AVFCameraRenderer::setOutputSettings()
+{
+ if (!m_videoDataOutput)
+ return;
+
+ if (m_cameraSession) {
+ const auto format = m_cameraSession->cameraFormat();
+ if (format.pixelFormat() != QVideoFrameFormat::Format_Invalid)
+ setPixelFormat(format.pixelFormat(), QCameraFormatPrivate::getColorRange(format));
+ }
+
+ // If no output settings set from above,
+ // it's most likely because the rhi is OpenGL
+ // and the pixel format is not BGRA.
+ // We force this in the base class implementation
+ if (!m_outputSettings)
+ AVFVideoSinkInterface::setOutputSettings();
+
+ if (m_outputSettings)
+ m_videoDataOutput.videoSettings = m_outputSettings;
+}
+
+void AVFCameraRenderer::configureAVCaptureSession(AVFCameraSession *cameraSession)
+{
+ m_cameraSession = cameraSession;
+ connect(m_cameraSession, SIGNAL(readyToConfigureConnections()),
+ this, SLOT(updateCaptureConnection()));
+
+ m_needsHorizontalMirroring = false;
+
+ m_videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
+
+ // Configure video output
+ m_delegateQueue = dispatch_queue_create("vf_queue", nullptr);
+ [m_videoDataOutput
+ setSampleBufferDelegate:m_viewfinderFramesDelegate
+ queue:m_delegateQueue];
+
+ [m_cameraSession->captureSession() addOutput:m_videoDataOutput];
+}
+
+void AVFCameraRenderer::updateCaptureConnection()
+{
+ AVCaptureConnection *connection = [m_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
+ if (connection == nil || !m_cameraSession->videoCaptureDevice())
+ return;
+
+ // Frames of front-facing cameras should be mirrored horizontally (it's the default when using
+ // AVCaptureVideoPreviewLayer but not with AVCaptureVideoDataOutput)
+ if (connection.isVideoMirroringSupported)
+ connection.videoMirrored = m_cameraSession->videoCaptureDevice().position == AVCaptureDevicePositionFront;
+
+ // If the connection does't support mirroring, we'll have to do it ourselves
+ m_needsHorizontalMirroring = !connection.isVideoMirrored
+ && m_cameraSession->videoCaptureDevice().position == AVCaptureDevicePositionFront;
+
+ deviceOrientationChanged();
+}
+
+void AVFCameraRenderer::deviceOrientationChanged(int angle)
+{
+ AVCaptureConnection *connection = [m_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
+ if (connection == nil || !m_cameraSession->videoCaptureDevice())
+ return;
+
+ if (!connection.supportsVideoOrientation)
+ return;
+
+ if (angle < 0)
+ angle = m_orientationHandler.currentOrientation();
+
+ AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
+ switch (angle) {
+ default:
+ break;
+ case 90:
+ orientation = AVCaptureVideoOrientationLandscapeRight;
+ break;
+ case 180:
+ // this keeps the last orientation, don't do anything
+ return;
+ case 270:
+ orientation = AVCaptureVideoOrientationLandscapeLeft;
+ break;
+ }
+
+ connection.videoOrientation = orientation;
+}
+
+//can be called from non main thread
+void AVFCameraRenderer::syncHandleViewfinderFrame(const QVideoFrame &frame)
+{
+ Q_EMIT newViewfinderFrame(frame);
+
+ QMutexLocker lock(&m_vfMutex);
+
+ if (!m_lastViewfinderFrame.isValid()) {
+ static QMetaMethod handleViewfinderFrameSlot = metaObject()->method(
+ metaObject()->indexOfMethod("handleViewfinderFrame()"));
+
+ handleViewfinderFrameSlot.invoke(this, Qt::QueuedConnection);
+ }
+
+ m_lastViewfinderFrame = frame;
+}
+
+AVCaptureVideoDataOutput *AVFCameraRenderer::videoDataOutput() const
+{
+ return m_videoDataOutput;
+}
+
+AVFCaptureFramesDelegate *AVFCameraRenderer::captureDelegate() const
+{
+ return m_viewfinderFramesDelegate;
+}
+
+void AVFCameraRenderer::resetCaptureDelegate() const
+{
+ [m_videoDataOutput setSampleBufferDelegate:m_viewfinderFramesDelegate queue:m_delegateQueue];
+}
+
+void AVFCameraRenderer::handleViewfinderFrame()
+{
+ QVideoFrame frame;
+ {
+ QMutexLocker lock(&m_vfMutex);
+ frame = m_lastViewfinderFrame;
+ m_lastViewfinderFrame = QVideoFrame();
+ }
+
+ if (m_sink && frame.isValid()) {
+ // frame.setMirroed(m_needsHorizontalMirroring) ?
+ m_sink->setVideoFrame(frame);
+ }
+}
+
+void AVFCameraRenderer::setPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat,
+ QVideoFrameFormat::ColorRange colorRange)
+{
+ if (rhi() && rhi()->backend() == QRhi::OpenGLES2) {
+ if (pixelFormat != QVideoFrameFormat::Format_BGRA8888)
+ qWarning() << "OpenGL rhi backend only supports 32BGRA pixel format.";
+ return;
+ }
+
+ // Default to 32BGRA pixel formats on the viewfinder, in case the requested
+ // format can't be used (shouldn't happen unless the developers sets a wrong camera
+ // format on the camera).
+ auto cvPixelFormat = QAVFHelpers::toCVPixelFormat(pixelFormat, colorRange);
+ if (cvPixelFormat == CvPixelFormatInvalid) {
+ cvPixelFormat = kCVPixelFormatType_32BGRA;
+ qWarning() << "QCamera::setCameraFormat: couldn't convert requested pixel format, using ARGB32";
+ }
+
+ bool isSupported = false;
+ NSArray *supportedPixelFormats = m_videoDataOutput.availableVideoCVPixelFormatTypes;
+ for (NSNumber *currentPixelFormat in supportedPixelFormats)
+ {
+ if ([currentPixelFormat unsignedIntValue] == cvPixelFormat) {
+ isSupported = true;
+ break;
+ }
+ }
+
+ if (isSupported) {
+ NSDictionary *outputSettings = @{
+ (NSString *)
+ kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:cvPixelFormat]
+#ifndef Q_OS_IOS // On iOS this key generates a warning about 'unsupported key'.
+ ,
+ (NSString *)kCVPixelBufferMetalCompatibilityKey : @true
+#endif // Q_OS_IOS
+ };
+ if (m_outputSettings)
+ [m_outputSettings release];
+ m_outputSettings = [[NSDictionary alloc] initWithDictionary:outputSettings];
+ } else {
+ qWarning() << "QCamera::setCameraFormat: requested pixel format not supported. Did you use a camera format from another camera?";
+ }
+}
+
+#include "moc_avfcamerarenderer_p.cpp"
+
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerarenderer_p.h b/src/plugins/multimedia/darwin/camera/avfcamerarenderer_p.h
new file mode 100644
index 000000000..57f665cd6
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerarenderer_p.h
@@ -0,0 +1,95 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERARENDERER_H
+#define AVFCAMERARENDERER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qobject.h>
+#include <QtMultimedia/qvideoframe.h>
+#include <QtCore/qmutex.h>
+#include <avfvideosink_p.h>
+#include <private/qvideooutputorientationhandler_p.h>
+
+#include <CoreVideo/CVBase.h>
+#include <CoreVideo/CVPixelBuffer.h>
+#include <CoreVideo/CVImageBuffer.h>
+#ifdef Q_OS_IOS
+#include <CoreVideo/CVOpenGLESTexture.h>
+#include <CoreVideo/CVOpenGLESTextureCache.h>
+#endif
+
+#include <dispatch/dispatch.h>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(AVFCaptureFramesDelegate);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureVideoDataOutput);
+
+QT_BEGIN_NAMESPACE
+
+class AVFCameraSession;
+class AVFCameraService;
+class AVFCameraRenderer;
+class AVFVideoSink;
+
+class AVFCameraRenderer : public QObject, public AVFVideoSinkInterface
+{
+Q_OBJECT
+public:
+ AVFCameraRenderer(QObject *parent = nullptr);
+ ~AVFCameraRenderer();
+
+ void reconfigure() override;
+ void setOutputSettings() override;
+
+ void configureAVCaptureSession(AVFCameraSession *cameraSession);
+ void syncHandleViewfinderFrame(const QVideoFrame &frame);
+
+ AVCaptureVideoDataOutput *videoDataOutput() const;
+
+ AVFCaptureFramesDelegate *captureDelegate() const;
+ void resetCaptureDelegate() const;
+
+ void setPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat,
+ QVideoFrameFormat::ColorRange colorRange);
+
+Q_SIGNALS:
+ void newViewfinderFrame(const QVideoFrame &frame);
+
+private Q_SLOTS:
+ void handleViewfinderFrame();
+ void updateCaptureConnection();
+public Q_SLOTS:
+ void deviceOrientationChanged(int angle = -1);
+
+private:
+ AVFCaptureFramesDelegate *m_viewfinderFramesDelegate = nullptr;
+ AVFCameraSession *m_cameraSession = nullptr;
+ AVCaptureVideoDataOutput *m_videoDataOutput = nullptr;
+
+ bool m_needsHorizontalMirroring = false;
+
+#ifdef Q_OS_IOS
+ CVOpenGLESTextureCacheRef m_textureCache = nullptr;
+#endif
+
+ QVideoFrame m_lastViewfinderFrame;
+ QMutex m_vfMutex;
+ dispatch_queue_t m_delegateQueue;
+ QVideoOutputOrientationHandler m_orientationHandler;
+
+ friend class CVImageVideoBuffer;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfcameraservice.mm b/src/plugins/multimedia/darwin/camera/avfcameraservice.mm
new file mode 100644
index 000000000..b25fb50a9
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcameraservice.mm
@@ -0,0 +1,169 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtCore/qvariant.h>
+#include <QtCore/qdebug.h>
+
+#include "avfcameraservice_p.h"
+#include "avfcamera_p.h"
+#include "avfcamerasession_p.h"
+#include "avfimagecapture_p.h"
+#include "avfcamerarenderer_p.h"
+#include "avfimagecapture_p.h"
+#include "avfmediaencoder_p.h"
+#include <qmediadevices.h>
+#include <private/qplatformaudioinput_p.h>
+#include <private/qplatformaudiooutput_p.h>
+#include <qaudioinput.h>
+#include <qaudiooutput.h>
+
+QT_USE_NAMESPACE
+
+AVFCameraService::AVFCameraService()
+{
+ m_session = new AVFCameraSession(this);
+}
+
+AVFCameraService::~AVFCameraService()
+{
+ if (m_session)
+ delete m_session;
+}
+
+QPlatformCamera *AVFCameraService::camera()
+{
+ return m_cameraControl;
+}
+
+void AVFCameraService::setCamera(QPlatformCamera *camera)
+{
+ AVFCamera *control = static_cast<AVFCamera *>(camera);
+ if (m_cameraControl == control)
+ return;
+
+ if (m_cameraControl) {
+ if (m_encoder)
+ m_cameraControl->disconnect(m_encoder);
+ m_cameraControl->setCaptureSession(nullptr);
+ }
+
+ m_cameraControl = control;
+
+ if (m_cameraControl)
+ m_cameraControl->setCaptureSession(this);
+
+ emit cameraChanged();
+}
+
+QPlatformImageCapture *AVFCameraService::imageCapture()
+{
+ return m_imageCaptureControl;
+}
+
+void AVFCameraService::setImageCapture(QPlatformImageCapture *imageCapture)
+{
+ AVFImageCapture *control = static_cast<AVFImageCapture *>(imageCapture);
+ if (m_imageCaptureControl == control)
+ return;
+
+ if (m_imageCaptureControl)
+ m_imageCaptureControl->setCaptureSession(nullptr);
+
+ m_imageCaptureControl = control;
+ if (m_imageCaptureControl)
+ m_imageCaptureControl->setCaptureSession(this);
+}
+
+QPlatformMediaRecorder *AVFCameraService::mediaRecorder()
+{
+ return m_encoder;
+}
+
+void AVFCameraService::setMediaRecorder(QPlatformMediaRecorder *recorder)
+{
+ AVFMediaEncoder *control = static_cast<AVFMediaEncoder *>(recorder);
+ if (m_encoder == control)
+ return;
+
+ if (m_encoder)
+ m_encoder->setCaptureSession(nullptr);
+
+ m_encoder = control;
+ if (m_encoder)
+ m_encoder->setCaptureSession(this);
+
+ emit encoderChanged();
+}
+
+void AVFCameraService::setAudioInput(QPlatformAudioInput *input)
+{
+ if (m_audioInput == input)
+ return;
+ if (m_audioInput)
+ m_audioInput->q->disconnect(this);
+
+ m_audioInput = input;
+
+ if (input) {
+ connect(m_audioInput->q, &QAudioInput::destroyed, this, &AVFCameraService::audioInputDestroyed);
+ connect(m_audioInput->q, &QAudioInput::deviceChanged, this, &AVFCameraService::audioInputChanged);
+ connect(m_audioInput->q, &QAudioInput::mutedChanged, this, &AVFCameraService::setAudioInputMuted);
+ connect(m_audioInput->q, &QAudioInput::volumeChanged, this, &AVFCameraService::setAudioInputVolume);
+ }
+ audioInputChanged();
+}
+
+void AVFCameraService::setAudioOutput(QPlatformAudioOutput *output)
+{
+ if (m_audioOutput == output)
+ return;
+ if (m_audioOutput)
+ m_audioOutput->q->disconnect(this);
+
+ m_audioOutput = output;
+
+ if (m_audioOutput) {
+ connect(m_audioOutput->q, &QAudioOutput::destroyed, this, &AVFCameraService::audioOutputDestroyed);
+ connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this, &AVFCameraService::audioOutputChanged);
+ connect(m_audioOutput->q, &QAudioOutput::mutedChanged, this, &AVFCameraService::setAudioOutputMuted);
+ connect(m_audioOutput->q, &QAudioOutput::volumeChanged, this, &AVFCameraService::setAudioOutputVolume);
+ }
+ audioOutputChanged();
+}
+
+void AVFCameraService::audioInputChanged()
+{
+ m_session->updateAudioInput();
+}
+
+void AVFCameraService::audioOutputChanged()
+{
+ m_session->updateAudioOutput();
+}
+
+void AVFCameraService::setAudioInputMuted(bool muted)
+{
+ m_session->setAudioInputMuted(muted);
+}
+
+void AVFCameraService::setAudioInputVolume(float volume)
+{
+ m_session->setAudioInputVolume(volume);
+}
+
+void AVFCameraService::setAudioOutputMuted(bool muted)
+{
+ m_session->setAudioOutputMuted(muted);
+}
+
+void AVFCameraService::setAudioOutputVolume(float volume)
+{
+ m_session->setAudioOutputVolume(volume);
+}
+
+void AVFCameraService::setVideoPreview(QVideoSink *sink)
+{
+ m_session->setVideoSink(sink);
+}
+
+#include "moc_avfcameraservice_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/avfcameraservice_p.h b/src/plugins/multimedia/darwin/camera/avfcameraservice_p.h
new file mode 100644
index 000000000..f3ef8d08e
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcameraservice_p.h
@@ -0,0 +1,84 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERASERVICE_H
+#define AVFCAMERASERVICE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qobject.h>
+#include <QtCore/qset.h>
+#include <private/qplatformmediacapture_p.h>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureDevice);
+
+QT_BEGIN_NAMESPACE
+class QPlatformCamera;
+class QPlatformMediaRecorder;
+class AVFCamera;
+class AVFImageCapture;
+class AVFCameraSession;
+class AVFMediaEncoder;
+
+class AVFCameraService : public QPlatformMediaCaptureSession
+{
+ Q_OBJECT
+public:
+ AVFCameraService();
+ ~AVFCameraService();
+
+ QPlatformCamera *camera() override;
+ void setCamera(QPlatformCamera *camera) override;
+
+ QPlatformImageCapture *imageCapture() override;
+ void setImageCapture(QPlatformImageCapture *imageCapture) override;
+
+ QPlatformMediaRecorder *mediaRecorder() override;
+ void setMediaRecorder(QPlatformMediaRecorder *recorder) override;
+
+ void setAudioInput(QPlatformAudioInput *) override;
+ void setAudioOutput(QPlatformAudioOutput *) override;
+
+ void setVideoPreview(QVideoSink *sink) override;
+
+ AVFCameraSession *session() const { return m_session; }
+ AVFCamera *avfCameraControl() const { return m_cameraControl; }
+ AVFMediaEncoder *recorderControl() const { return m_encoder; }
+ AVFImageCapture *avfImageCaptureControl() const { return m_imageCaptureControl; }
+
+ QPlatformAudioInput *audioInput() { return m_audioInput; }
+ QPlatformAudioOutput *audioOutput() { return m_audioOutput; }
+
+public Q_SLOTS:
+ void audioInputDestroyed() { setAudioInput(nullptr); }
+ void audioInputChanged();
+ void audioOutputDestroyed() { setAudioOutput(nullptr); }
+ void audioOutputChanged();
+
+ void setAudioInputMuted(bool muted);
+ void setAudioInputVolume(float volume);
+ void setAudioOutputMuted(bool muted);
+ void setAudioOutputVolume(float volume);
+
+private:
+ QPlatformAudioInput *m_audioInput = nullptr;
+ QPlatformAudioOutput *m_audioOutput = nullptr;
+
+ AVFCameraSession *m_session = nullptr;
+ AVFCamera *m_cameraControl = nullptr;
+ AVFMediaEncoder *m_encoder = nullptr;
+ AVFImageCapture *m_imageCaptureControl = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerasession.mm b/src/plugins/multimedia/darwin/camera/avfcamerasession.mm
new file mode 100644
index 000000000..52e2eadfa
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerasession.mm
@@ -0,0 +1,513 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcameradebug_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcamera_p.h"
+#include "avfcamerarenderer_p.h"
+#include "avfimagecapture_p.h"
+#include "avfmediaencoder_p.h"
+#include "avfcamerautility_p.h"
+#include <avfvideosink_p.h>
+
+#include <CoreFoundation/CoreFoundation.h>
+#include <Foundation/Foundation.h>
+
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qdatetime.h>
+#include <QtCore/qurl.h>
+#include <QtCore/qelapsedtimer.h>
+#include <QtCore/qpermissions.h>
+#include <QtCore/qpointer.h>
+
+#include <private/qplatformaudioinput_p.h>
+#include <private/qplatformaudiooutput_p.h>
+
+#include <QtCore/qdebug.h>
+
+QT_USE_NAMESPACE
+
+@interface AVFCameraSessionObserver : NSObject
+
+- (AVFCameraSessionObserver *) initWithCameraSession:(AVFCameraSession*)session;
+- (void) processRuntimeError:(NSNotification *)notification;
+- (void) processSessionStarted:(NSNotification *)notification;
+- (void) processSessionStopped:(NSNotification *)notification;
+
+@end
+
+@implementation AVFCameraSessionObserver
+{
+@private
+ AVFCameraSession *m_session;
+ AVCaptureSession *m_captureSession;
+}
+
+- (AVFCameraSessionObserver *) initWithCameraSession:(AVFCameraSession*)session
+{
+ if (!(self = [super init]))
+ return nil;
+
+ self->m_session = session;
+ self->m_captureSession = session->captureSession();
+
+ [m_captureSession retain];
+ [[NSNotificationCenter defaultCenter] addObserver:self
+ selector:@selector(processRuntimeError:)
+ name:AVCaptureSessionRuntimeErrorNotification
+ object:m_captureSession];
+
+ [[NSNotificationCenter defaultCenter] addObserver:self
+ selector:@selector(processSessionStarted:)
+ name:AVCaptureSessionDidStartRunningNotification
+ object:m_captureSession];
+
+ [[NSNotificationCenter defaultCenter] addObserver:self
+ selector:@selector(processSessionStopped:)
+ name:AVCaptureSessionDidStopRunningNotification
+ object:m_captureSession];
+
+ return self;
+}
+
+- (void) dealloc
+{
+ [[NSNotificationCenter defaultCenter] removeObserver:self
+ name:AVCaptureSessionRuntimeErrorNotification
+ object:m_captureSession];
+
+ [[NSNotificationCenter defaultCenter] removeObserver:self
+ name:AVCaptureSessionDidStartRunningNotification
+ object:m_captureSession];
+
+ [[NSNotificationCenter defaultCenter] removeObserver:self
+ name:AVCaptureSessionDidStopRunningNotification
+ object:m_captureSession];
+ [m_captureSession release];
+ [super dealloc];
+}
+
+- (void) processRuntimeError:(NSNotification *)notification
+{
+ Q_UNUSED(notification);
+ QMetaObject::invokeMethod(m_session, "processRuntimeError", Qt::AutoConnection);
+}
+
+- (void) processSessionStarted:(NSNotification *)notification
+{
+ Q_UNUSED(notification);
+ QMetaObject::invokeMethod(m_session, "processSessionStarted", Qt::AutoConnection);
+}
+
+- (void) processSessionStopped:(NSNotification *)notification
+{
+ Q_UNUSED(notification);
+ QMetaObject::invokeMethod(m_session, "processSessionStopped", Qt::AutoConnection);
+}
+
+@end
+
+AVFCameraSession::AVFCameraSession(AVFCameraService *service, QObject *parent)
+ : QObject(parent)
+ , m_service(service)
+ , m_defaultCodec(0)
+{
+ m_captureSession = [[AVCaptureSession alloc] init];
+ m_observer = [[AVFCameraSessionObserver alloc] initWithCameraSession:this];
+}
+
+AVFCameraSession::~AVFCameraSession()
+{
+ if (m_videoInput) {
+ [m_captureSession removeInput:m_videoInput];
+ [m_videoInput release];
+ }
+
+ if (m_audioInput) {
+ [m_captureSession removeInput:m_audioInput];
+ [m_audioInput release];
+ }
+
+ if (m_audioOutput) {
+ [m_captureSession removeOutput:m_audioOutput];
+ [m_audioOutput release];
+ }
+
+ if (m_videoOutput)
+ delete m_videoOutput;
+
+ [m_observer release];
+ [m_captureSession release];
+}
+
+void AVFCameraSession::setActiveCamera(const QCameraDevice &info)
+{
+ if (m_activeCameraDevice != info) {
+ m_activeCameraDevice = info;
+
+ if (checkCameraPermission())
+ updateVideoInput();
+ }
+}
+
+void AVFCameraSession::setCameraFormat(const QCameraFormat &format)
+{
+ if (m_cameraFormat == format)
+ return;
+
+ updateCameraFormat(format);
+}
+
+QCameraFormat AVFCameraSession::cameraFormat() const
+{
+ return m_cameraFormat;
+}
+
+void AVFCameraSession::updateCameraFormat(const QCameraFormat &format)
+{
+ m_cameraFormat = format;
+
+ AVCaptureDevice *captureDevice = videoCaptureDevice();
+ if (!captureDevice)
+ return;
+
+ AVCaptureDeviceFormat *newFormat = qt_convert_to_capture_device_format(captureDevice, format);
+ if (newFormat)
+ qt_set_active_format(captureDevice, newFormat, false);
+}
+
+void AVFCameraSession::setVideoOutput(AVFCameraRenderer *output)
+{
+ if (m_videoOutput == output)
+ return;
+
+ delete m_videoOutput;
+ m_videoOutput = output;
+ if (output)
+ output->configureAVCaptureSession(this);
+}
+
+void AVFCameraSession::addAudioCapture()
+{
+ if (!m_audioOutput) {
+ m_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
+ if (m_audioOutput && [m_captureSession canAddOutput:m_audioOutput]) {
+ [m_captureSession addOutput:m_audioOutput];
+ } else {
+ qWarning() << Q_FUNC_INFO << "failed to add audio output";
+ }
+ }
+}
+
+AVCaptureDevice *AVFCameraSession::videoCaptureDevice() const
+{
+ if (m_videoInput)
+ return m_videoInput.device;
+
+ return nullptr;
+}
+
+AVCaptureDevice *AVFCameraSession::audioCaptureDevice() const
+{
+ if (m_audioInput)
+ return m_audioInput.device;
+
+ return nullptr;
+}
+
+void AVFCameraSession::setAudioInputVolume(float volume)
+{
+ m_inputVolume = volume;
+
+ if (m_inputMuted)
+ volume = 0.0;
+
+#ifdef Q_OS_MACOS
+ AVCaptureConnection *audioInputConnection = [m_audioOutput connectionWithMediaType:AVMediaTypeAudio];
+ NSArray<AVCaptureAudioChannel *> *audioChannels = audioInputConnection.audioChannels;
+ if (audioChannels) {
+ for (AVCaptureAudioChannel *channel in audioChannels) {
+ channel.volume = volume;
+ }
+ }
+#endif
+}
+
+void AVFCameraSession::setAudioInputMuted(bool muted)
+{
+ m_inputMuted = muted;
+ setAudioInputVolume(m_inputVolume);
+}
+
+void AVFCameraSession::setAudioOutputVolume(float volume)
+{
+ if (m_audioPreviewDelegate)
+ [m_audioPreviewDelegate setVolume:volume];
+}
+
+void AVFCameraSession::setAudioOutputMuted(bool muted)
+{
+ if (m_audioPreviewDelegate)
+ [m_audioPreviewDelegate setMuted:muted];
+}
+
+bool AVFCameraSession::isActive() const
+{
+ return m_active;
+}
+
+void AVFCameraSession::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+
+ m_active = active;
+
+ qCDebug(qLcCamera) << Q_FUNC_INFO << m_active << " -> " << active;
+
+ if (active) {
+ if (!m_activeCameraDevice.isNull()) {
+ Q_EMIT readyToConfigureConnections();
+ m_defaultCodec = 0;
+ defaultCodec();
+ }
+
+ applyImageEncoderSettings();
+
+ // According to the doc, the capture device must be locked before
+ // startRunning to prevent the format we set to be overridden by the
+ // session preset.
+ [videoCaptureDevice() lockForConfiguration:nil];
+ [m_captureSession startRunning];
+ [videoCaptureDevice() unlockForConfiguration];
+ } else {
+ [m_captureSession stopRunning];
+ }
+}
+
+void AVFCameraSession::processRuntimeError()
+{
+ qWarning() << tr("Runtime camera error");
+ m_active = false;
+ Q_EMIT error(QCamera::CameraError, tr("Runtime camera error"));
+}
+
+void AVFCameraSession::processSessionStarted()
+{
+ qCDebug(qLcCamera) << Q_FUNC_INFO;
+ if (!m_active) {
+ m_active = true;
+ Q_EMIT activeChanged(m_active);
+ }
+}
+
+void AVFCameraSession::processSessionStopped()
+{
+ qCDebug(qLcCamera) << Q_FUNC_INFO;
+ if (m_active) {
+ m_active = false;
+ Q_EMIT activeChanged(m_active);
+ }
+}
+
+AVCaptureDevice *AVFCameraSession::createVideoCaptureDevice()
+{
+ AVCaptureDevice *device = nullptr;
+
+ QByteArray deviceId = m_activeCameraDevice.id();
+ if (!deviceId.isEmpty()) {
+ device = [AVCaptureDevice deviceWithUniqueID:
+ [NSString stringWithUTF8String:
+ deviceId.constData()]];
+ }
+
+ return device;
+}
+
+AVCaptureDevice *AVFCameraSession::createAudioCaptureDevice()
+{
+ AVCaptureDevice *device = nullptr;
+
+ QByteArray deviceId = m_service->audioInput() ? m_service->audioInput()->device.id()
+ : QByteArray();
+ if (!deviceId.isEmpty())
+ device = [AVCaptureDevice deviceWithUniqueID: [NSString stringWithUTF8String:deviceId.constData()]];
+
+ return device;
+}
+
+void AVFCameraSession::attachVideoInputDevice()
+{
+ if (!checkCameraPermission())
+ return;
+
+ if (m_videoInput) {
+ [m_captureSession removeInput:m_videoInput];
+ [m_videoInput release];
+ m_videoInput = nullptr;
+ }
+
+ AVCaptureDevice *videoDevice = createVideoCaptureDevice();
+ if (!videoDevice)
+ return;
+
+ m_videoInput = [AVCaptureDeviceInput
+ deviceInputWithDevice:videoDevice
+ error:nil];
+ if (m_videoInput && [m_captureSession canAddInput:m_videoInput]) {
+ [m_videoInput retain];
+ [m_captureSession addInput:m_videoInput];
+ } else {
+ qWarning() << "Failed to create video device input";
+ }
+}
+
+void AVFCameraSession::attachAudioInputDevice()
+{
+ if (m_audioInput) {
+ [m_captureSession removeInput:m_audioInput];
+ [m_audioInput release];
+ m_audioInput = nullptr;
+ }
+
+ AVCaptureDevice *audioDevice = createAudioCaptureDevice();
+ if (!audioDevice)
+ return;
+
+ m_audioInput = [AVCaptureDeviceInput
+ deviceInputWithDevice:audioDevice
+ error:nil];
+
+ if (m_audioInput && [m_captureSession canAddInput:m_audioInput]) {
+ [m_audioInput retain];
+ [m_captureSession addInput:m_audioInput];
+ } else {
+ qWarning() << "Failed to create audio device input";
+ }
+}
+
+bool AVFCameraSession::applyImageEncoderSettings()
+{
+ if (AVFImageCapture *control = m_service->avfImageCaptureControl())
+ return control->applySettings();
+
+ return false;
+}
+
+FourCharCode AVFCameraSession::defaultCodec()
+{
+ if (!m_defaultCodec) {
+ if (AVCaptureDevice *device = videoCaptureDevice()) {
+ AVCaptureDeviceFormat *format = device.activeFormat;
+ if (!format || !format.formatDescription)
+ return m_defaultCodec;
+ m_defaultCodec = CMVideoFormatDescriptionGetCodecType(format.formatDescription);
+ }
+ }
+ return m_defaultCodec;
+}
+
+void AVFCameraSession::setVideoSink(QVideoSink *sink)
+{
+ auto *videoSink = sink ? static_cast<AVFVideoSink *>(sink->platformVideoSink()) : nullptr;
+
+ if (m_videoSink == videoSink)
+ return;
+
+ m_videoSink = videoSink;
+
+ updateVideoOutput();
+}
+
+void AVFCameraSession::updateVideoInput()
+{
+ auto recorder = m_service->recorderControl();
+ if (recorder && recorder->state() == QMediaRecorder::RecordingState)
+ recorder->toggleRecord(false);
+
+ [m_captureSession beginConfiguration];
+
+ attachVideoInputDevice();
+ if (!m_activeCameraDevice.isNull() && !m_videoOutput) {
+ setVideoOutput(new AVFCameraRenderer(this));
+ connect(m_videoOutput, &AVFCameraRenderer::newViewfinderFrame,
+ this, &AVFCameraSession::newViewfinderFrame);
+ updateVideoOutput();
+ }
+ if (m_videoOutput)
+ m_videoOutput->deviceOrientationChanged();
+
+ [m_captureSession commitConfiguration];
+
+ if (recorder && recorder->state() == QMediaRecorder::RecordingState)
+ recorder->toggleRecord(true);
+ Q_EMIT readyToConfigureConnections();
+}
+
+void AVFCameraSession::updateAudioInput()
+{
+ if (!checkMicrophonePermission())
+ return;
+
+ auto recorder = m_service->recorderControl();
+ if (recorder && recorder->state() == QMediaRecorder::RecordingState)
+ recorder->toggleRecord(false);
+
+ [m_captureSession beginConfiguration];
+ if (m_audioOutput) {
+ AVCaptureConnection *lastConnection = [m_audioOutput connectionWithMediaType:AVMediaTypeAudio];
+ [m_captureSession removeConnection:lastConnection];
+ }
+ attachAudioInputDevice();
+ if (m_audioInput)
+ addAudioCapture();
+ [m_captureSession commitConfiguration];
+
+ if (recorder && recorder->state() == QMediaRecorder::RecordingState)
+ recorder->toggleRecord(true);
+}
+
+void AVFCameraSession::updateAudioOutput()
+{
+ QByteArray deviceId = m_service->audioOutput()
+ ? m_service->audioOutput()->device.id()
+ : QByteArray();
+
+ [m_audioPreviewDelegate release];
+ m_audioPreviewDelegate = nil;
+ if (!deviceId.isEmpty()) {
+ m_audioPreviewDelegate = [[AVFAudioPreviewDelegate alloc] init];
+ [m_audioPreviewDelegate setupWithCaptureSession:this
+ audioOutputDevice:[NSString stringWithUTF8String:
+ deviceId.constData()]];
+ }
+}
+
+void AVFCameraSession::updateVideoOutput()
+{
+ if (m_videoOutput)
+ m_videoOutput->setVideoSink(m_videoSink);
+}
+
+bool AVFCameraSession::checkCameraPermission()
+{
+ const QCameraPermission permission;
+ const bool granted = qApp->checkPermission(permission) == Qt::PermissionStatus::Granted;
+ if (!granted)
+ qWarning() << "Access to camera not granted";
+
+ return granted;
+}
+
+bool AVFCameraSession::checkMicrophonePermission()
+{
+ const QMicrophonePermission permission;
+ const bool granted = qApp->checkPermission(permission) == Qt::PermissionStatus::Granted;
+ if (!granted)
+ qWarning() << "Access to microphone not granted";
+
+ return granted;
+}
+
+#include "moc_avfcamerasession_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerasession_p.h b/src/plugins/multimedia/darwin/camera/avfcamerasession_p.h
new file mode 100644
index 000000000..76e31ab48
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerasession_p.h
@@ -0,0 +1,132 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERASESSION_H
+#define AVFCAMERASESSION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qmutex.h>
+#include <QtMultimedia/qcamera.h>
+#include <QVideoFrame>
+#include <qcameradevice.h>
+#include "avfaudiopreviewdelegate_p.h"
+
+#import <AVFoundation/AVFoundation.h>
+
+@class AVFCameraSessionObserver;
+
+QT_BEGIN_NAMESPACE
+
+class AVFCamera;
+class AVFCameraService;
+class AVFCameraRenderer;
+class AVFVideoSink;
+class QVideoSink;
+
+class AVFCameraSession : public QObject
+{
+ Q_OBJECT
+public:
+ AVFCameraSession(AVFCameraService *service, QObject *parent = nullptr);
+ ~AVFCameraSession();
+
+ QCameraDevice activecameraDevice() const { return m_activeCameraDevice; }
+ void setActiveCamera(const QCameraDevice &info);
+
+ void setCameraFormat(const QCameraFormat &format);
+ QCameraFormat cameraFormat() const;
+
+ AVFCameraRenderer *videoOutput() const { return m_videoOutput; }
+ AVCaptureAudioDataOutput *audioOutput() const { return m_audioOutput; }
+ AVFAudioPreviewDelegate *audioPreviewDelegate() const { return m_audioPreviewDelegate; }
+
+ AVCaptureSession *captureSession() const { return m_captureSession; }
+ AVCaptureDevice *videoCaptureDevice() const;
+ AVCaptureDevice *audioCaptureDevice() const;
+
+ bool isActive() const;
+
+ FourCharCode defaultCodec();
+
+ AVCaptureDeviceInput *videoInput() const { return m_videoInput; }
+ AVCaptureDeviceInput *audioInput() const { return m_audioInput; }
+
+ void setVideoSink(QVideoSink *sink);
+
+ void updateVideoInput();
+
+ void updateAudioInput();
+ void updateAudioOutput();
+
+public Q_SLOTS:
+ void setActive(bool active);
+
+ void setAudioInputVolume(float volume);
+ void setAudioInputMuted(bool muted);
+ void setAudioOutputMuted(bool muted);
+ void setAudioOutputVolume(float volume);
+
+ void processRuntimeError();
+ void processSessionStarted();
+ void processSessionStopped();
+
+Q_SIGNALS:
+ void readyToConfigureConnections();
+ void activeChanged(bool);
+ void error(int error, const QString &errorString);
+ void newViewfinderFrame(const QVideoFrame &frame);
+
+private:
+ void updateCameraFormat(const QCameraFormat &format);
+
+ void setVideoOutput(AVFCameraRenderer *output);
+ void updateVideoOutput();
+
+ void addAudioCapture();
+
+ AVCaptureDevice *createVideoCaptureDevice();
+ AVCaptureDevice *createAudioCaptureDevice();
+ void attachVideoInputDevice();
+ void attachAudioInputDevice();
+ bool checkCameraPermission();
+ bool checkMicrophonePermission();
+
+ bool applyImageEncoderSettings();
+
+ QCameraDevice m_activeCameraDevice;
+ QCameraFormat m_cameraFormat;
+
+ AVFCameraService *m_service;
+ AVCaptureSession *m_captureSession;
+ AVFCameraSessionObserver *m_observer;
+
+ AVFCameraRenderer *m_videoOutput = nullptr;
+ AVFVideoSink *m_videoSink = nullptr;
+
+ AVCaptureDeviceInput *m_videoInput = nullptr;
+ AVCaptureDeviceInput *m_audioInput = nullptr;
+
+ AVCaptureAudioDataOutput *m_audioOutput = nullptr;
+ AVFAudioPreviewDelegate *m_audioPreviewDelegate = nullptr;
+
+ bool m_active = false;
+
+ float m_inputVolume = 1.0;
+ bool m_inputMuted = false;
+
+ FourCharCode m_defaultCodec;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerautility.mm b/src/plugins/multimedia/darwin/camera/avfcamerautility.mm
new file mode 100644
index 000000000..1864eb0e8
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerautility.mm
@@ -0,0 +1,730 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcamerautility_p.h"
+#include "avfcameradebug_p.h"
+
+#include <QtCore/qvector.h>
+#include <QtCore/qpair.h>
+#include <private/qmultimediautils_p.h>
+#include <private/qcameradevice_p.h>
+#include "avfvideobuffer_p.h"
+#include "qavfhelpers_p.h"
+
+#include <functional>
+#include <algorithm>
+#include <limits>
+#include <tuple>
+
+QT_BEGIN_NAMESPACE
+
+Q_LOGGING_CATEGORY(qLcCamera, "qt.multimedia.camera")
+
+AVFPSRange qt_connection_framerates(AVCaptureConnection *videoConnection)
+{
+ Q_ASSERT(videoConnection);
+
+ AVFPSRange newRange;
+ // "The value in the videoMinFrameDuration is equivalent to the reciprocal
+ // of the maximum framerate, the value in the videoMaxFrameDuration is equivalent
+ // to the reciprocal of the minimum framerate."
+ if (videoConnection.supportsVideoMinFrameDuration) {
+ const CMTime cmMin = videoConnection.videoMinFrameDuration;
+ if (CMTimeCompare(cmMin, kCMTimeInvalid)) { // Has some non-default value:
+ if (const Float64 minSeconds = CMTimeGetSeconds(cmMin))
+ newRange.second = 1. / minSeconds;
+ }
+ }
+
+ if (videoConnection.supportsVideoMaxFrameDuration) {
+ const CMTime cmMax = videoConnection.videoMaxFrameDuration;
+ if (CMTimeCompare(cmMax, kCMTimeInvalid)) {
+ if (const Float64 maxSeconds = CMTimeGetSeconds(cmMax))
+ newRange.first = 1. / maxSeconds;
+ }
+ }
+
+ return newRange;
+}
+
+namespace {
+
+inline bool qt_area_sane(const QSize &size)
+{
+ return !size.isNull() && size.isValid()
+ && std::numeric_limits<int>::max() / size.width() >= size.height();
+}
+
+template <template <typename...> class Comp> // std::less or std::greater (or std::equal_to)
+struct ByResolution
+{
+ bool operator() (AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)const
+ {
+ Q_ASSERT(f1 && f2);
+ const QSize r1(qt_device_format_resolution(f1));
+ const QSize r2(qt_device_format_resolution(f2));
+ // use std::tuple for lexicograpical sorting:
+ const Comp<std::tuple<int, int>> op = {};
+ return op(std::make_tuple(r1.width(), r1.height()),
+ std::make_tuple(r2.width(), r2.height()));
+ }
+};
+
+struct FormatHasNoFPSRange
+{
+ bool operator() (AVCaptureDeviceFormat *format) const
+ {
+ Q_ASSERT(format);
+ return !format.videoSupportedFrameRateRanges || !format.videoSupportedFrameRateRanges.count;
+ }
+};
+
+Float64 qt_find_min_framerate_distance(AVCaptureDeviceFormat *format, Float64 fps)
+{
+ Q_ASSERT(format && format.videoSupportedFrameRateRanges
+ && format.videoSupportedFrameRateRanges.count);
+
+ AVFrameRateRange *range = [format.videoSupportedFrameRateRanges objectAtIndex:0];
+ Float64 distance = qAbs(range.maxFrameRate - fps);
+ for (NSUInteger i = 1, e = format.videoSupportedFrameRateRanges.count; i < e; ++i) {
+ range = [format.videoSupportedFrameRateRanges objectAtIndex:i];
+ distance = qMin(distance, qAbs(range.maxFrameRate - fps));
+ }
+
+ return distance;
+}
+
+} // Unnamed namespace.
+
+AVCaptureDeviceFormat *
+qt_convert_to_capture_device_format(AVCaptureDevice *captureDevice,
+ const QCameraFormat &cameraFormat,
+ const std::function<bool(uint32_t)> &cvFormatValidator)
+{
+ const auto cameraFormatPrivate = QCameraFormatPrivate::handle(cameraFormat);
+ if (!cameraFormatPrivate)
+ return nil;
+
+ const auto requiredCvPixFormat = QAVFHelpers::toCVPixelFormat(cameraFormatPrivate->pixelFormat,
+ cameraFormatPrivate->colorRange);
+
+ if (requiredCvPixFormat == CvPixelFormatInvalid)
+ return nil;
+
+ AVCaptureDeviceFormat *newFormat = nil;
+ Float64 newFormatMaxFrameRate = {};
+ NSArray<AVCaptureDeviceFormat *> *formats = captureDevice.formats;
+ for (AVCaptureDeviceFormat *format in formats) {
+ CMFormatDescriptionRef formatDesc = format.formatDescription;
+ CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(formatDesc);
+ FourCharCode cvPixFormat = CMVideoFormatDescriptionGetCodecType(formatDesc);
+
+ if (cvPixFormat != requiredCvPixFormat)
+ continue;
+
+ if (cameraFormatPrivate->resolution != QSize(dim.width, dim.height))
+ continue;
+
+ if (cvFormatValidator && !cvFormatValidator(cvPixFormat))
+ continue;
+
+ const float epsilon = 0.001f;
+ for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRanges) {
+ if (frameRateRange.minFrameRate >= cameraFormatPrivate->minFrameRate - epsilon
+ && frameRateRange.maxFrameRate <= cameraFormatPrivate->maxFrameRate + epsilon
+ && newFormatMaxFrameRate < frameRateRange.maxFrameRate) {
+ newFormat = format;
+ newFormatMaxFrameRate = frameRateRange.maxFrameRate;
+ }
+ }
+ }
+ return newFormat;
+}
+
+QVector<AVCaptureDeviceFormat *> qt_unique_device_formats(AVCaptureDevice *captureDevice, FourCharCode filter)
+{
+ // 'filter' is the format we prefer if we have duplicates.
+ Q_ASSERT(captureDevice);
+
+ QVector<AVCaptureDeviceFormat *> formats;
+
+ if (!captureDevice.formats || !captureDevice.formats.count)
+ return formats;
+
+ formats.reserve(captureDevice.formats.count);
+ for (AVCaptureDeviceFormat *format in captureDevice.formats) {
+ const QSize resolution(qt_device_format_resolution(format));
+ if (resolution.isNull() || !resolution.isValid())
+ continue;
+ formats << format;
+ }
+
+ if (!formats.size())
+ return formats;
+
+ std::sort(formats.begin(), formats.end(), ByResolution<std::less>());
+
+ QSize size(qt_device_format_resolution(formats[0]));
+ FourCharCode codec = CMVideoFormatDescriptionGetCodecType(formats[0].formatDescription);
+ int last = 0;
+ for (int i = 1; i < formats.size(); ++i) {
+ const QSize nextSize(qt_device_format_resolution(formats[i]));
+ if (nextSize == size) {
+ if (codec == filter)
+ continue;
+ formats[last] = formats[i];
+ } else {
+ ++last;
+ formats[last] = formats[i];
+ size = nextSize;
+ }
+ codec = CMVideoFormatDescriptionGetCodecType(formats[i].formatDescription);
+ }
+ formats.resize(last + 1);
+
+ return formats;
+}
+
+QSize qt_device_format_resolution(AVCaptureDeviceFormat *format)
+{
+ if (!format || !format.formatDescription)
+ return QSize();
+
+ const CMVideoDimensions res = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ return QSize(res.width, res.height);
+}
+
+QSize qt_device_format_high_resolution(AVCaptureDeviceFormat *format)
+{
+ Q_ASSERT(format);
+ QSize res;
+#if defined(Q_OS_IOS)
+ const CMVideoDimensions hrDim(format.highResolutionStillImageDimensions);
+ res.setWidth(hrDim.width);
+ res.setHeight(hrDim.height);
+#endif
+ return res;
+}
+
+QVector<AVFPSRange> qt_device_format_framerates(AVCaptureDeviceFormat *format)
+{
+ Q_ASSERT(format);
+
+ QVector<AVFPSRange> qtRanges;
+
+ if (!format.videoSupportedFrameRateRanges || !format.videoSupportedFrameRateRanges.count)
+ return qtRanges;
+
+ qtRanges.reserve(format.videoSupportedFrameRateRanges.count);
+ for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges)
+ qtRanges << AVFPSRange(range.minFrameRate, range.maxFrameRate);
+
+ return qtRanges;
+}
+
+QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format)
+{
+ Q_ASSERT(format);
+
+ if (!format.formatDescription) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no format description found";
+ return QSize();
+ }
+
+ const CMVideoDimensions res = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ const CGSize resPAR = CMVideoFormatDescriptionGetPresentationDimensions(format.formatDescription, true, false);
+
+ if (qAbs(resPAR.width - res.width) < 1.) {
+ // "Pixel aspect ratio is used to adjust the width, leaving the height alone."
+ return QSize(1, 1);
+ }
+
+ if (!res.width || !resPAR.width)
+ return QSize();
+
+ auto frac = qRealToFraction(resPAR.width > res.width ? res.width / qreal(resPAR.width)
+ : resPAR.width / qreal(res.width));
+
+ return QSize(frac.numerator, frac.denominator);
+}
+
+AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDevice,
+ const QSize &request,
+ FourCharCode filter,
+ bool stillImage)
+{
+ Q_ASSERT(captureDevice);
+ Q_ASSERT(!request.isNull() && request.isValid());
+
+ if (!captureDevice.formats || !captureDevice.formats.count)
+ return nullptr;
+
+ QVector<AVCaptureDeviceFormat *> formats(qt_unique_device_formats(captureDevice, filter));
+
+ for (int i = 0; i < formats.size(); ++i) {
+ AVCaptureDeviceFormat *format = formats[i];
+ if (qt_device_format_resolution(format) == request)
+ return format;
+ // iOS only (still images).
+ if (stillImage && qt_device_format_high_resolution(format) == request)
+ return format;
+ }
+
+ if (!qt_area_sane(request))
+ return nullptr;
+
+ typedef QPair<QSize, AVCaptureDeviceFormat *> FormatPair;
+
+ QVector<FormatPair> pairs; // default|HR sizes
+ pairs.reserve(formats.size());
+
+ for (int i = 0; i < formats.size(); ++i) {
+ AVCaptureDeviceFormat *format = formats[i];
+ const QSize res(qt_device_format_resolution(format));
+ if (!res.isNull() && res.isValid() && qt_area_sane(res))
+ pairs << FormatPair(res, format);
+ const QSize highRes(qt_device_format_high_resolution(format));
+ if (stillImage && !highRes.isNull() && highRes.isValid() && qt_area_sane(highRes))
+ pairs << FormatPair(highRes, format);
+ }
+
+ if (!pairs.size())
+ return nullptr;
+
+ AVCaptureDeviceFormat *best = pairs[0].second;
+ QSize next(pairs[0].first);
+ int wDiff = qAbs(request.width() - next.width());
+ int hDiff = qAbs(request.height() - next.height());
+ const int area = request.width() * request.height();
+ int areaDiff = qAbs(area - next.width() * next.height());
+ for (int i = 1; i < pairs.size(); ++i) {
+ next = pairs[i].first;
+ const int newWDiff = qAbs(next.width() - request.width());
+ const int newHDiff = qAbs(next.height() - request.height());
+ const int newAreaDiff = qAbs(area - next.width() * next.height());
+
+ if ((newWDiff < wDiff && newHDiff < hDiff)
+ || ((newWDiff <= wDiff || newHDiff <= hDiff) && newAreaDiff <= areaDiff)) {
+ wDiff = newWDiff;
+ hDiff = newHDiff;
+ best = pairs[i].second;
+ areaDiff = newAreaDiff;
+ }
+ }
+
+ return best;
+}
+
+AVCaptureDeviceFormat *qt_find_best_framerate_match(AVCaptureDevice *captureDevice,
+ FourCharCode filter,
+ Float64 fps)
+{
+ Q_ASSERT(captureDevice);
+ Q_ASSERT(fps > 0.);
+
+ const qreal epsilon = 0.1;
+
+ QVector<AVCaptureDeviceFormat *>sorted(qt_unique_device_formats(captureDevice, filter));
+ // Sort formats by their resolution in decreasing order:
+ std::sort(sorted.begin(), sorted.end(), ByResolution<std::greater>());
+ // We can use only formats with framerate ranges:
+ sorted.erase(std::remove_if(sorted.begin(), sorted.end(), FormatHasNoFPSRange()), sorted.end());
+
+ if (!sorted.size())
+ return nil;
+
+ for (int i = 0; i < sorted.size(); ++i) {
+ AVCaptureDeviceFormat *format = sorted[i];
+ for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
+ if (range.maxFrameRate - range.minFrameRate < epsilon) {
+ // On OS X ranges are points (built-in camera).
+ if (qAbs(fps - range.maxFrameRate) < epsilon)
+ return format;
+ }
+
+ if (fps >= range.minFrameRate && fps <= range.maxFrameRate)
+ return format;
+ }
+ }
+
+ Float64 distance = qt_find_min_framerate_distance(sorted[0], fps);
+ AVCaptureDeviceFormat *match = sorted[0];
+ for (int i = 1; i < sorted.size(); ++i) {
+ const Float64 newDistance = qt_find_min_framerate_distance(sorted[i], fps);
+ if (newDistance < distance) {
+ distance = newDistance;
+ match = sorted[i];
+ }
+ }
+
+ return match;
+}
+
+AVFrameRateRange *qt_find_supported_framerate_range(AVCaptureDeviceFormat *format, Float64 fps)
+{
+ Q_ASSERT(format && format.videoSupportedFrameRateRanges
+ && format.videoSupportedFrameRateRanges.count);
+
+ const qreal epsilon = 0.1;
+
+ for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
+ if (range.maxFrameRate - range.minFrameRate < epsilon) {
+ // On OS X ranges are points (built-in camera).
+ if (qAbs(fps - range.maxFrameRate) < epsilon)
+ return range;
+ }
+
+ if (fps >= range.minFrameRate && fps <= range.maxFrameRate)
+ return range;
+ }
+
+ AVFrameRateRange *match = [format.videoSupportedFrameRateRanges objectAtIndex:0];
+ Float64 distance = qAbs(match.maxFrameRate - fps);
+ for (NSUInteger i = 1, e = format.videoSupportedFrameRateRanges.count; i < e; ++i) {
+ AVFrameRateRange *range = [format.videoSupportedFrameRateRanges objectAtIndex:i];
+ const Float64 newDistance = qAbs(range.maxFrameRate - fps);
+ if (newDistance < distance) {
+ distance = newDistance;
+ match = range;
+ }
+ }
+
+ return match;
+}
+
+bool qt_format_supports_framerate(AVCaptureDeviceFormat *format, qreal fps)
+{
+ if (format && fps > qreal(0)) {
+ const qreal epsilon = 0.1;
+ for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
+ if (fps >= range.minFrameRate - epsilon && fps <= range.maxFrameRate + epsilon)
+ return true;
+ }
+ }
+
+ return false;
+}
+
+bool qt_formats_are_equal(AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)
+{
+ if (f1 == f2)
+ return true;
+
+ if (![f1.mediaType isEqualToString:f2.mediaType])
+ return false;
+
+ return CMFormatDescriptionEqual(f1.formatDescription, f2.formatDescription);
+}
+
+bool qt_set_active_format(AVCaptureDevice *captureDevice, AVCaptureDeviceFormat *format, bool preserveFps)
+{
+ static bool firstSet = true;
+
+ if (!captureDevice || !format)
+ return false;
+
+ if (qt_formats_are_equal(captureDevice.activeFormat, format)) {
+ if (firstSet) {
+ // The capture device format is persistent. The first time we set a format, report that
+ // it changed even if the formats are the same.
+ // This prevents the session from resetting the format to the default value.
+ firstSet = false;
+ return true;
+ }
+ return false;
+ }
+
+ firstSet = false;
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qWarning("Failed to set active format (lock failed)");
+ return false;
+ }
+
+ // Changing the activeFormat resets the frame rate.
+ AVFPSRange fps;
+ if (preserveFps)
+ fps = qt_current_framerates(captureDevice, nil);
+
+ captureDevice.activeFormat = format;
+
+ if (preserveFps)
+ qt_set_framerate_limits(captureDevice, nil, fps.first, fps.second);
+
+ return true;
+}
+
+void qt_set_framerate_limits(AVCaptureConnection *videoConnection, qreal minFPS, qreal maxFPS)
+{
+ Q_ASSERT(videoConnection);
+
+ if (minFPS < 0. || maxFPS < 0. || (maxFPS && maxFPS < minFPS)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid framerates (min, max):"
+ << minFPS << maxFPS;
+ return;
+ }
+
+ CMTime minDuration = kCMTimeInvalid;
+ if (maxFPS > 0.) {
+ if (!videoConnection.supportsVideoMinFrameDuration)
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "maximum framerate is not supported";
+ else
+ minDuration = CMTimeMake(1, maxFPS);
+ }
+ if (videoConnection.supportsVideoMinFrameDuration)
+ videoConnection.videoMinFrameDuration = minDuration;
+
+ CMTime maxDuration = kCMTimeInvalid;
+ if (minFPS > 0.) {
+ if (!videoConnection.supportsVideoMaxFrameDuration)
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "minimum framerate is not supported";
+ else
+ maxDuration = CMTimeMake(1, minFPS);
+ }
+ if (videoConnection.supportsVideoMaxFrameDuration)
+ videoConnection.videoMaxFrameDuration = maxDuration;
+}
+
+CMTime qt_adjusted_frame_duration(AVFrameRateRange *range, qreal fps)
+{
+ Q_ASSERT(range);
+ Q_ASSERT(fps > 0.);
+
+ if (range.maxFrameRate - range.minFrameRate < 0.1) {
+ // Can happen on OS X.
+ return range.minFrameDuration;
+ }
+
+ if (fps <= range.minFrameRate)
+ return range.maxFrameDuration;
+ if (fps >= range.maxFrameRate)
+ return range.minFrameDuration;
+
+ auto frac = qRealToFraction(1. / fps);
+ return CMTimeMake(frac.numerator, frac.denominator);
+}
+
+void qt_set_framerate_limits(AVCaptureDevice *captureDevice, qreal minFPS, qreal maxFPS)
+{
+ Q_ASSERT(captureDevice);
+ if (!captureDevice.activeFormat) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no active capture device format";
+ return;
+ }
+
+ if (minFPS < 0. || maxFPS < 0. || (maxFPS && maxFPS < minFPS)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid framerates (min, max):"
+ << minFPS << maxFPS;
+ return;
+ }
+
+ CMTime minFrameDuration = kCMTimeInvalid;
+ CMTime maxFrameDuration = kCMTimeInvalid;
+ if (maxFPS || minFPS) {
+ AVFrameRateRange *range = qt_find_supported_framerate_range(captureDevice.activeFormat,
+ maxFPS ? maxFPS : minFPS);
+ if (!range) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no framerate range found, (min, max):"
+ << minFPS << maxFPS;
+ return;
+ }
+
+ if (maxFPS)
+ minFrameDuration = qt_adjusted_frame_duration(range, maxFPS);
+ if (minFPS)
+ maxFrameDuration = qt_adjusted_frame_duration(range, minFPS);
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ // While Apple's docs say kCMTimeInvalid will end in default
+ // settings for this format, kCMTimeInvalid on OS X ends with a runtime
+ // exception:
+ // "The activeVideoMinFrameDuration passed is not supported by the device."
+ // Instead, use the first item in the supported frame rates.
+#ifdef Q_OS_IOS
+ [captureDevice setActiveVideoMinFrameDuration:minFrameDuration];
+ [captureDevice setActiveVideoMaxFrameDuration:maxFrameDuration];
+#elif defined(Q_OS_MACOS)
+ if (CMTimeCompare(minFrameDuration, kCMTimeInvalid) == 0
+ && CMTimeCompare(maxFrameDuration, kCMTimeInvalid) == 0) {
+ AVFrameRateRange *range = captureDevice.activeFormat.videoSupportedFrameRateRanges.firstObject;
+ minFrameDuration = range.minFrameDuration;
+ maxFrameDuration = range.maxFrameDuration;
+ }
+
+ if (CMTimeCompare(minFrameDuration, kCMTimeInvalid))
+ [captureDevice setActiveVideoMinFrameDuration:minFrameDuration];
+
+ if (CMTimeCompare(maxFrameDuration, kCMTimeInvalid))
+ [captureDevice setActiveVideoMaxFrameDuration:maxFrameDuration];
+#endif // Q_OS_MACOS
+}
+
+void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection,
+ qreal minFPS, qreal maxFPS)
+{
+ Q_UNUSED(videoConnection);
+ Q_ASSERT(captureDevice);
+ qt_set_framerate_limits(captureDevice, minFPS, maxFPS);
+}
+
+AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection)
+{
+ Q_UNUSED(videoConnection);
+ Q_ASSERT(captureDevice);
+
+ AVFPSRange fps;
+ const CMTime minDuration = captureDevice.activeVideoMinFrameDuration;
+ if (CMTimeCompare(minDuration, kCMTimeInvalid)) {
+ if (const Float64 minSeconds = CMTimeGetSeconds(minDuration))
+ fps.second = 1. / minSeconds; // Max FPS = 1 / MinDuration.
+ }
+
+ const CMTime maxDuration = captureDevice.activeVideoMaxFrameDuration;
+ if (CMTimeCompare(maxDuration, kCMTimeInvalid)) {
+ if (const Float64 maxSeconds = CMTimeGetSeconds(maxDuration))
+ fps.first = 1. / maxSeconds; // Min FPS = 1 / MaxDuration.
+ }
+
+ return fps;
+}
+
+QList<AudioValueRange> qt_supported_sample_rates_for_format(int codecId)
+{
+ QList<AudioValueRange> result;
+ UInt32 format = codecId;
+ UInt32 size;
+ OSStatus err = AudioFormatGetPropertyInfo(
+ kAudioFormatProperty_AvailableEncodeSampleRates,
+ sizeof(format),
+ &format,
+ &size);
+
+ if (err != noErr)
+ return result;
+
+ UInt32 numRanges = size / sizeof(AudioValueRange);
+ AudioValueRange sampleRanges[numRanges];
+
+ err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeSampleRates,
+ sizeof(format),
+ &format,
+ &size,
+ sampleRanges);
+ if (err != noErr)
+ return result;
+
+ for (UInt32 i = 0; i < numRanges; i++)
+ result << sampleRanges[i];
+
+ return result;
+}
+
+QList<AudioValueRange> qt_supported_bit_rates_for_format(int codecId)
+{
+ QList<AudioValueRange> result;
+ UInt32 format = codecId;
+ UInt32 size;
+ OSStatus err = AudioFormatGetPropertyInfo(
+ kAudioFormatProperty_AvailableEncodeBitRates,
+ sizeof(format),
+ &format,
+ &size);
+
+ if (err != noErr)
+ return result;
+
+ UInt32 numRanges = size / sizeof(AudioValueRange);
+ AudioValueRange bitRanges[numRanges];
+
+ err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeBitRates,
+ sizeof(format),
+ &format,
+ &size,
+ bitRanges);
+ if (err != noErr)
+ return result;
+
+ for (UInt32 i = 0; i < numRanges; i++)
+ result << bitRanges[i];
+
+ return result;
+}
+
+std::optional<QList<UInt32>> qt_supported_channel_counts_for_format(int codecId)
+{
+ QList<UInt32> result;
+ AudioStreamBasicDescription sf = {};
+ sf.mFormatID = codecId;
+ UInt32 size;
+ OSStatus err = AudioFormatGetPropertyInfo(
+ kAudioFormatProperty_AvailableEncodeNumberChannels,
+ sizeof(sf),
+ &sf,
+ &size);
+
+ if (err != noErr)
+ return result;
+
+ // From Apple's docs:
+ // A value of 0xFFFFFFFF indicates that any number of channels may be encoded.
+ if (int(size) == -1)
+ return std::nullopt;
+
+ UInt32 numCounts = size / sizeof(UInt32);
+ UInt32 channelCounts[numCounts];
+
+ err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeNumberChannels,
+ sizeof(sf),
+ &sf,
+ &size,
+ channelCounts);
+ if (err != noErr)
+ return result;
+
+ for (UInt32 i = 0; i < numCounts; i++)
+ result << channelCounts[i];
+
+ return result;
+}
+
+QList<UInt32> qt_supported_channel_layout_tags_for_format(int codecId, int noChannels)
+{
+ QList<UInt32> result;
+ AudioStreamBasicDescription sf = {};
+ sf.mFormatID = codecId;
+ sf.mChannelsPerFrame = noChannels;
+ UInt32 size;
+ OSStatus err = AudioFormatGetPropertyInfo(
+ kAudioFormatProperty_AvailableEncodeChannelLayoutTags,
+ sizeof(sf),
+ &sf,
+ &size);
+
+ if (err != noErr)
+ return result;
+
+ UInt32 noTags = (UInt32)size / sizeof(UInt32);
+ AudioChannelLayoutTag tagsArr[noTags];
+
+ err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeChannelLayoutTags,
+ sizeof(sf),
+ &sf,
+ &size,
+ tagsArr);
+ if (err != noErr)
+ return result;
+
+ for (UInt32 i = 0; i < noTags; i++)
+ result << tagsArr[i];
+
+ return result;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerautility_p.h b/src/plugins/multimedia/darwin/camera/avfcamerautility_p.h
new file mode 100644
index 000000000..b5c9e9bda
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerautility_p.h
@@ -0,0 +1,165 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERAUTILITY_H
+#define AVFCAMERAUTILITY_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qglobal.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qlist.h>
+#include <QtCore/qpair.h>
+#include <QtCore/qsize.h>
+
+#include "qcameradevice.h"
+
+#include <AVFoundation/AVFoundation.h>
+
+// In case we have SDK below 10.7/7.0:
+@class AVCaptureDeviceFormat;
+
+QT_BEGIN_NAMESPACE
+
+class AVFConfigurationLock
+{
+public:
+ explicit AVFConfigurationLock(AVCaptureDevice *captureDevice)
+ : m_captureDevice(captureDevice),
+ m_locked(false)
+ {
+ Q_ASSERT(m_captureDevice);
+ NSError *error = nil;
+ m_locked = [m_captureDevice lockForConfiguration:&error];
+ }
+
+ ~AVFConfigurationLock()
+ {
+ if (m_locked)
+ [m_captureDevice unlockForConfiguration];
+ }
+
+ operator bool() const
+ {
+ return m_locked;
+ }
+
+private:
+ Q_DISABLE_COPY(AVFConfigurationLock)
+
+ AVCaptureDevice *m_captureDevice;
+ bool m_locked;
+};
+
+struct AVFObjectDeleter {
+ void operator()(NSObject *obj)
+ {
+ if (obj)
+ [obj release];
+ }
+};
+
+template<class T>
+class AVFScopedPointer : public std::unique_ptr<NSObject, AVFObjectDeleter>
+{
+public:
+ AVFScopedPointer() {}
+ explicit AVFScopedPointer(T *ptr) : std::unique_ptr<NSObject, AVFObjectDeleter>(ptr) {}
+ operator T*() const
+ {
+ // Quite handy operator to enable Obj-C messages: [ptr someMethod];
+ return data();
+ }
+
+ T *data() const
+ {
+ return static_cast<T *>(get());
+ }
+
+ T *take()
+ {
+ return static_cast<T *>(release());
+ }
+};
+
+template<>
+class AVFScopedPointer<dispatch_queue_t>
+{
+public:
+ AVFScopedPointer() : m_queue(nullptr) {}
+ explicit AVFScopedPointer(dispatch_queue_t q) : m_queue(q) {}
+
+ ~AVFScopedPointer()
+ {
+ if (m_queue)
+ dispatch_release(m_queue);
+ }
+
+ operator dispatch_queue_t() const
+ {
+ // Quite handy operator to enable Obj-C messages: [ptr someMethod];
+ return m_queue;
+ }
+
+ dispatch_queue_t data() const
+ {
+ return m_queue;
+ }
+
+ void reset(dispatch_queue_t q = nullptr)
+ {
+ if (m_queue)
+ dispatch_release(m_queue);
+ m_queue = q;
+ }
+
+private:
+ dispatch_queue_t m_queue;
+
+ Q_DISABLE_COPY(AVFScopedPointer)
+};
+
+typedef QPair<qreal, qreal> AVFPSRange;
+AVFPSRange qt_connection_framerates(AVCaptureConnection *videoConnection);
+
+AVCaptureDeviceFormat *qt_convert_to_capture_device_format(
+ AVCaptureDevice *captureDevice, const QCameraFormat &format,
+ const std::function<bool(uint32_t)> &cvFormatValidator = nullptr);
+QList<AVCaptureDeviceFormat *> qt_unique_device_formats(AVCaptureDevice *captureDevice,
+ FourCharCode preferredFormat);
+QSize qt_device_format_resolution(AVCaptureDeviceFormat *format);
+QSize qt_device_format_high_resolution(AVCaptureDeviceFormat *format);
+QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format);
+QList<AVFPSRange> qt_device_format_framerates(AVCaptureDeviceFormat *format);
+AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDevice, const QSize &res,
+ FourCharCode preferredFormat, bool stillImage = true);
+AVCaptureDeviceFormat *qt_find_best_framerate_match(AVCaptureDevice *captureDevice,
+ FourCharCode preferredFormat,
+ Float64 fps);
+AVFrameRateRange *qt_find_supported_framerate_range(AVCaptureDeviceFormat *format, Float64 fps);
+bool qt_format_supports_framerate(AVCaptureDeviceFormat *format, qreal fps);
+
+bool qt_formats_are_equal(AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2);
+bool qt_set_active_format(AVCaptureDevice *captureDevice, AVCaptureDeviceFormat *format, bool preserveFps);
+
+AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection);
+void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection,
+ qreal minFPS, qreal maxFPS);
+
+QList<AudioValueRange> qt_supported_sample_rates_for_format(int codecId);
+QList<AudioValueRange> qt_supported_bit_rates_for_format(int codecId);
+std::optional<QList<UInt32>> qt_supported_channel_counts_for_format(int codecId);
+QList<UInt32> qt_supported_channel_layout_tags_for_format(int codecId, int noChannels);
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfimagecapture.mm b/src/plugins/multimedia/darwin/camera/avfimagecapture.mm
new file mode 100644
index 000000000..2ee7b0597
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfimagecapture.mm
@@ -0,0 +1,385 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcameradebug_p.h"
+#include "avfimagecapture_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcamerautility_p.h"
+#include "avfcamera_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcamerarenderer_p.h"
+#include "private/qmediastoragelocation_p.h"
+#include <private/qplatformimagecapture_p.h>
+#include <private/qmemoryvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
+
+#include <QtCore/qurl.h>
+#include <QtCore/qfile.h>
+#include <QtCore/qbuffer.h>
+#include <QtConcurrent/qtconcurrentrun.h>
+#include <QtGui/qimagereader.h>
+
+#import <AVFoundation/AVFoundation.h>
+
+QT_USE_NAMESPACE
+
+AVFImageCapture::AVFImageCapture(QImageCapture *parent)
+ : QPlatformImageCapture(parent)
+{
+ m_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
+
+ NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:
+ AVVideoCodecTypeJPEG, AVVideoCodecKey, nil];
+
+ [m_stillImageOutput setOutputSettings:outputSettings];
+ [outputSettings release];
+}
+
+AVFImageCapture::~AVFImageCapture()
+{
+ [m_stillImageOutput release];
+}
+
+bool AVFImageCapture::isReadyForCapture() const
+{
+ return m_cameraControl && m_videoConnection && m_cameraControl->isActive();
+}
+
+void AVFImageCapture::updateReadyStatus()
+{
+ if (m_ready != isReadyForCapture()) {
+ m_ready = !m_ready;
+ qCDebug(qLcCamera) << "ReadyToCapture status changed:" << m_ready;
+ Q_EMIT readyForCaptureChanged(m_ready);
+ }
+}
+
+int AVFImageCapture::doCapture(const QString &actualFileName)
+{
+ if (!m_session) {
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, m_lastCaptureId),
+ Q_ARG(int, QImageCapture::ResourceError),
+ Q_ARG(QString, QPlatformImageCapture::msgImageCaptureNotSet()));
+ return -1;
+ }
+ if (!isReadyForCapture()) {
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, m_lastCaptureId),
+ Q_ARG(int, QImageCapture::NotReadyError),
+ Q_ARG(QString, QPlatformImageCapture::msgCameraNotReady()));
+ return -1;
+ }
+ m_lastCaptureId++;
+
+ bool captureToBuffer = actualFileName.isEmpty();
+
+ CaptureRequest request = { m_lastCaptureId, QSharedPointer<QSemaphore>::create()};
+ m_requestsMutex.lock();
+ m_captureRequests.enqueue(request);
+ m_requestsMutex.unlock();
+
+ QString fileName(actualFileName);
+
+ [m_stillImageOutput captureStillImageAsynchronouslyFromConnection:m_videoConnection
+ completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
+
+ if (error) {
+ QStringList messageParts;
+ messageParts << QString::fromUtf8([[error localizedDescription] UTF8String]);
+ messageParts << QString::fromUtf8([[error localizedFailureReason] UTF8String]);
+ messageParts << QString::fromUtf8([[error localizedRecoverySuggestion] UTF8String]);
+
+ QString errorMessage = messageParts.join(QChar(u' '));
+ qCDebug(qLcCamera) << "Image capture failed:" << errorMessage;
+
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(int, QImageCapture::ResourceError),
+ Q_ARG(QString, errorMessage));
+ return;
+ }
+
+ // Wait for the preview to be generated before saving the JPEG (but only
+ // if we have AVFCameraRenderer attached).
+ // It is possible to stop camera immediately after trying to capture an
+ // image; this can result in a blocked callback's thread, waiting for a
+ // new viewfinder's frame to arrive/semaphore to be released. It is also
+ // unspecified on which thread this callback gets executed, (probably it's
+ // not the same thread that initiated a capture and stopped the camera),
+ // so we cannot reliably check the camera's status. Instead, we wait
+ // with a timeout and treat a failure to acquire a semaphore as an error.
+ if (!m_session->videoOutput() || request.previewReady->tryAcquire(1, 1000)) {
+ qCDebug(qLcCamera) << "Image capture completed";
+
+ NSData *nsJpgData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
+ QByteArray jpgData = QByteArray::fromRawData((const char *)[nsJpgData bytes], [nsJpgData length]);
+
+ if (captureToBuffer) {
+ QBuffer data(&jpgData);
+ QImageReader reader(&data, "JPEG");
+ QSize size = reader.size();
+ auto buffer = std::make_unique<QMemoryVideoBuffer>(
+ QByteArray(jpgData.constData(), jpgData.size()), -1);
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::move(buffer), QVideoFrameFormat(size, QVideoFrameFormat::Format_Jpeg));
+ QMetaObject::invokeMethod(this, "imageAvailable", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(QVideoFrame, frame));
+ } else {
+ QFile f(fileName);
+ if (f.open(QFile::WriteOnly)) {
+ if (f.write(jpgData) != -1) {
+ QMetaObject::invokeMethod(this, "imageSaved", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(QString, fileName));
+ } else {
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(int, QImageCapture::OutOfSpaceError),
+ Q_ARG(QString, f.errorString()));
+ }
+ } else {
+ QString errorMessage = tr("Could not open destination file:\n%1").arg(fileName);
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(int, QImageCapture::ResourceError),
+ Q_ARG(QString, errorMessage));
+ }
+ }
+ } else {
+ const QLatin1String errorMessage("Image capture failed: timed out waiting"
+ " for a preview frame.");
+ qCDebug(qLcCamera) << errorMessage;
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(int, QImageCapture::ResourceError),
+ Q_ARG(QString, errorMessage));
+ }
+ }];
+
+ return request.captureId;
+}
+
+int AVFImageCapture::capture(const QString &fileName)
+{
+ auto actualFileName = QMediaStorageLocation::generateFileName(fileName, QStandardPaths::PicturesLocation, QLatin1String("jpg"));
+
+ qCDebug(qLcCamera) << "Capture image to" << actualFileName;
+ return doCapture(actualFileName);
+}
+
+int AVFImageCapture::captureToBuffer()
+{
+ return doCapture(QString());
+}
+
+void AVFImageCapture::onNewViewfinderFrame(const QVideoFrame &frame)
+{
+ QMutexLocker locker(&m_requestsMutex);
+
+ if (m_captureRequests.isEmpty())
+ return;
+
+ CaptureRequest request = m_captureRequests.dequeue();
+ Q_EMIT imageExposed(request.captureId);
+
+ (void) QtConcurrent::run(&AVFImageCapture::makeCapturePreview, this,
+ request,
+ frame,
+ 0 /* rotation */);
+}
+
+void AVFImageCapture::onCameraChanged()
+{
+ auto camera = m_service ? static_cast<AVFCamera *>(m_service->camera()) : nullptr;
+
+ if (camera == m_cameraControl)
+ return;
+
+ m_cameraControl = camera;
+
+ if (m_cameraControl)
+ connect(m_cameraControl, SIGNAL(activeChanged(bool)), this, SLOT(updateReadyStatus()));
+ updateReadyStatus();
+}
+
+void AVFImageCapture::makeCapturePreview(CaptureRequest request,
+ const QVideoFrame &frame,
+ int rotation)
+{
+ QTransform transform;
+ transform.rotate(rotation);
+
+ Q_EMIT imageCaptured(request.captureId, frame.toImage().transformed(transform));
+
+ request.previewReady->release();
+}
+
+void AVFImageCapture::updateCaptureConnection()
+{
+ if (m_session && m_session->videoCaptureDevice()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO;
+ AVCaptureSession *captureSession = m_session->captureSession();
+
+ if (![captureSession.outputs containsObject:m_stillImageOutput]) {
+ if ([captureSession canAddOutput:m_stillImageOutput]) {
+ [captureSession beginConfiguration];
+ // Lock the video capture device to make sure the active format is not reset
+ const AVFConfigurationLock lock(m_session->videoCaptureDevice());
+ [captureSession addOutput:m_stillImageOutput];
+ m_videoConnection = [m_stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
+ [captureSession commitConfiguration];
+ updateReadyStatus();
+ }
+ } else {
+ m_videoConnection = [m_stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
+ }
+ }
+}
+
+
+QImageEncoderSettings AVFImageCapture::imageSettings() const
+{
+ QImageEncoderSettings settings;
+
+ if (!videoCaptureDeviceIsValid())
+ return settings;
+
+ AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
+ if (!captureDevice.activeFormat) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no active format";
+ return settings;
+ }
+
+ QSize res(qt_device_format_resolution(captureDevice.activeFormat));
+#ifdef Q_OS_IOS
+ if (!m_service->avfImageCaptureControl() || !m_service->avfImageCaptureControl()->stillImageOutput()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no still image output";
+ return settings;
+ }
+
+ AVCaptureStillImageOutput *stillImageOutput = m_service->avfImageCaptureControl()->stillImageOutput();
+ if (stillImageOutput.highResolutionStillImageOutputEnabled)
+ res = qt_device_format_high_resolution(captureDevice.activeFormat);
+#endif
+ if (res.isNull() || !res.isValid()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to exctract the image resolution";
+ return settings;
+ }
+
+ settings.setResolution(res);
+ settings.setFormat(QImageCapture::JPEG);
+
+ return settings;
+}
+
+void AVFImageCapture::setImageSettings(const QImageEncoderSettings &settings)
+{
+ if (m_settings == settings)
+ return;
+
+ m_settings = settings;
+ applySettings();
+}
+
+bool AVFImageCapture::applySettings()
+{
+ if (!videoCaptureDeviceIsValid())
+ return false;
+
+ AVFCameraSession *session = m_service->session();
+ if (!session)
+ return false;
+
+ if (!m_service->imageCapture()
+ || !m_service->avfImageCaptureControl()->stillImageOutput()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no still image output";
+ return false;
+ }
+
+ if (m_settings.format() != QImageCapture::UnspecifiedFormat && m_settings.format() != QImageCapture::JPEG) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "unsupported format:" << m_settings.format();
+ return false;
+ }
+
+ QSize res(m_settings.resolution());
+ if (res.isNull()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid resolution:" << res;
+ return false;
+ }
+
+ if (!res.isValid()) {
+ // Invalid == default value.
+ // Here we could choose the best format available, but
+ // activeFormat is already equal to 'preset high' by default,
+ // which is good enough, otherwise we can end in some format with low framerates.
+ return false;
+ }
+
+ bool activeFormatChanged = false;
+
+ AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
+ AVCaptureDeviceFormat *match = qt_find_best_resolution_match(captureDevice, res,
+ m_service->session()->defaultCodec());
+
+ if (!match) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "unsupported resolution:" << res;
+ return false;
+ }
+
+ activeFormatChanged = qt_set_active_format(captureDevice, match, true);
+
+#ifdef Q_OS_IOS
+ AVCaptureStillImageOutput *imageOutput = m_service->avfImageCaptureControl()->stillImageOutput();
+ if (res == qt_device_format_high_resolution(captureDevice.activeFormat))
+ imageOutput.highResolutionStillImageOutputEnabled = YES;
+ else
+ imageOutput.highResolutionStillImageOutputEnabled = NO;
+#endif
+
+ return activeFormatChanged;
+}
+
+void AVFImageCapture::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ AVFCameraService *captureSession = static_cast<AVFCameraService *>(session);
+ if (m_service == captureSession)
+ return;
+
+ m_service = captureSession;
+ if (!m_service) {
+ m_session->disconnect(this);
+ if (m_cameraControl)
+ m_cameraControl->disconnect(this);
+ m_session = nullptr;
+ m_cameraControl = nullptr;
+ m_videoConnection = nil;
+ } else {
+ m_session = m_service->session();
+ Q_ASSERT(m_session);
+
+ connect(m_service, &AVFCameraService::cameraChanged, this, &AVFImageCapture::onCameraChanged);
+ connect(m_session, SIGNAL(readyToConfigureConnections()), SLOT(updateCaptureConnection()));
+ connect(m_session, &AVFCameraSession::newViewfinderFrame,
+ this, &AVFImageCapture::onNewViewfinderFrame);
+ }
+
+ updateCaptureConnection();
+ onCameraChanged();
+ updateReadyStatus();
+}
+
+bool AVFImageCapture::videoCaptureDeviceIsValid() const
+{
+ if (!m_service || !m_service->session() || !m_service->session()->videoCaptureDevice())
+ return false;
+
+ AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
+ if (!captureDevice.formats || !captureDevice.formats.count)
+ return false;
+
+ return true;
+}
+
+#include "moc_avfimagecapture_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/avfimagecapture_p.h b/src/plugins/multimedia/darwin/camera/avfimagecapture_p.h
new file mode 100644
index 000000000..0714fa3cc
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfimagecapture_p.h
@@ -0,0 +1,81 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERAIMAGECAPTURE_H
+#define AVFCAMERAIMAGECAPTURE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#import <AVFoundation/AVFoundation.h>
+
+#include <QtCore/qqueue.h>
+#include <QtCore/qsemaphore.h>
+#include <QtCore/qsharedpointer.h>
+#include <private/qplatformimagecapture_p.h>
+#include "avfcamerasession_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class AVFImageCapture : public QPlatformImageCapture
+{
+Q_OBJECT
+public:
+ struct CaptureRequest {
+ int captureId;
+ QSharedPointer<QSemaphore> previewReady;
+ };
+
+ AVFImageCapture(QImageCapture *parent = nullptr);
+ ~AVFImageCapture();
+
+ bool isReadyForCapture() const override;
+
+ AVCaptureStillImageOutput *stillImageOutput() const {return m_stillImageOutput;}
+
+ int doCapture(const QString &fileName);
+ int capture(const QString &fileName) override;
+ int captureToBuffer() override;
+
+ QImageEncoderSettings imageSettings() const override;
+ void setImageSettings(const QImageEncoderSettings &settings) override;
+ bool applySettings();
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+private Q_SLOTS:
+ void updateCaptureConnection();
+ void updateReadyStatus();
+ void onNewViewfinderFrame(const QVideoFrame &frame);
+ void onCameraChanged();
+
+private:
+ void makeCapturePreview(CaptureRequest request, const QVideoFrame &frame, int rotation);
+ bool videoCaptureDeviceIsValid() const;
+
+ AVFCameraService *m_service = nullptr;
+ AVFCameraSession *m_session = nullptr;
+ AVFCamera *m_cameraControl = nullptr;
+ bool m_ready = false;
+ int m_lastCaptureId = 0;
+ AVCaptureStillImageOutput *m_stillImageOutput;
+ AVCaptureConnection *m_videoConnection = nullptr;
+
+ QMutex m_requestsMutex;
+ QQueue<CaptureRequest> m_captureRequests;
+ QImageEncoderSettings m_settings;
+};
+
+Q_DECLARE_TYPEINFO(AVFImageCapture::CaptureRequest, Q_PRIMITIVE_TYPE);
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfmediaassetwriter.mm b/src/plugins/multimedia/darwin/camera/avfmediaassetwriter.mm
new file mode 100644
index 000000000..37fc69926
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfmediaassetwriter.mm
@@ -0,0 +1,556 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfmediaencoder_p.h"
+#include "avfcamerarenderer_p.h"
+#include "avfmediaassetwriter_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcameradebug_p.h"
+#include <qdarwinformatsinfo_p.h>
+#include <avfmetadata_p.h>
+
+#include <QtCore/qmetaobject.h>
+#include <QtCore/qatomic.h>
+
+QT_USE_NAMESPACE
+
+namespace {
+
+bool qt_capture_session_isValid(AVFCameraService *service)
+{
+ if (!service || !service->session())
+ return false;
+
+ AVFCameraSession *session = service->session();
+ if (!session->captureSession())
+ return false;
+
+ if (!session->videoInput() && !session->audioInput())
+ return false;
+
+ return true;
+}
+
+enum WriterState
+{
+ WriterStateIdle,
+ WriterStateActive,
+ WriterStatePaused,
+ WriterStateAborted
+};
+
+using AVFAtomicInt64 = QAtomicInteger<qint64>;
+
+} // unnamed namespace
+
+@interface QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) (PrivateAPI)
+- (bool)addWriterInputs;
+- (void)setQueues;
+- (void)updateDuration:(CMTime)newTimeStamp;
+- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset;
+@end
+
+@implementation QT_MANGLE_NAMESPACE(AVFMediaAssetWriter)
+{
+@private
+ AVFCameraService *m_service;
+
+ AVFScopedPointer<AVAssetWriterInput> m_cameraWriterInput;
+ AVFScopedPointer<AVAssetWriterInput> m_audioWriterInput;
+
+ // Queue to write sample buffers:
+ AVFScopedPointer<dispatch_queue_t> m_writerQueue;
+ // High priority serial queue for video output:
+ AVFScopedPointer<dispatch_queue_t> m_videoQueue;
+ // Serial queue for audio output:
+ AVFScopedPointer<dispatch_queue_t> m_audioQueue;
+
+ AVFScopedPointer<AVAssetWriter> m_assetWriter;
+
+ AVFMediaEncoder *m_delegate;
+
+ bool m_setStartTime;
+
+ QAtomicInt m_state;
+
+ bool m_writeFirstAudioBuffer;
+
+ CMTime m_startTime;
+ CMTime m_lastTimeStamp;
+ CMTime m_lastVideoTimestamp;
+ CMTime m_lastAudioTimestamp;
+ CMTime m_timeOffset;
+ bool m_adjustTime;
+
+ NSDictionary *m_audioSettings;
+ NSDictionary *m_videoSettings;
+
+ AVFAtomicInt64 m_durationInMs;
+}
+
+- (id)initWithDelegate:(AVFMediaEncoder *)delegate
+{
+ Q_ASSERT(delegate);
+
+ if (self = [super init]) {
+ m_delegate = delegate;
+ m_setStartTime = true;
+ m_state.storeRelaxed(WriterStateIdle);
+ m_startTime = kCMTimeInvalid;
+ m_lastTimeStamp = kCMTimeInvalid;
+ m_lastAudioTimestamp = kCMTimeInvalid;
+ m_lastVideoTimestamp = kCMTimeInvalid;
+ m_timeOffset = kCMTimeInvalid;
+ m_adjustTime = false;
+ m_durationInMs.storeRelaxed(0);
+ m_audioSettings = nil;
+ m_videoSettings = nil;
+ m_writeFirstAudioBuffer = false;
+ }
+
+ return self;
+}
+
+- (bool)setupWithFileURL:(NSURL *)fileURL
+ cameraService:(AVFCameraService *)service
+ audioSettings:(NSDictionary *)audioSettings
+ videoSettings:(NSDictionary *)videoSettings
+ fileFormat:(QMediaFormat::FileFormat)fileFormat
+ transform:(CGAffineTransform)transform
+{
+ Q_ASSERT(fileURL);
+
+ if (!qt_capture_session_isValid(service)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid capture session";
+ return false;
+ }
+
+ m_service = service;
+ m_audioSettings = audioSettings;
+ m_videoSettings = videoSettings;
+
+ AVFCameraSession *session = m_service->session();
+
+ m_writerQueue.reset(dispatch_queue_create("asset-writer-queue", DISPATCH_QUEUE_SERIAL));
+ if (!m_writerQueue) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create an asset writer's queue";
+ return false;
+ }
+
+ m_videoQueue.reset();
+ if (session->videoInput() && session->videoOutput() && session->videoOutput()->videoDataOutput()) {
+ m_videoQueue.reset(dispatch_queue_create("video-output-queue", DISPATCH_QUEUE_SERIAL));
+ if (!m_videoQueue) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create video queue";
+ return false;
+ }
+ dispatch_set_target_queue(m_videoQueue, dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0));
+ }
+
+ m_audioQueue.reset();
+ if (session->audioInput() && session->audioOutput()) {
+ m_audioQueue.reset(dispatch_queue_create("audio-output-queue", DISPATCH_QUEUE_SERIAL));
+ if (!m_audioQueue) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create audio queue";
+ if (!m_videoQueue)
+ return false;
+ // But we still can write video!
+ }
+ }
+
+ auto fileType = QDarwinFormatInfo::avFileTypeForContainerFormat(fileFormat);
+ m_assetWriter.reset([[AVAssetWriter alloc] initWithURL:fileURL
+ fileType:fileType
+ error:nil]);
+ if (!m_assetWriter) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create asset writer";
+ return false;
+ }
+
+ if (!m_videoQueue)
+ m_writeFirstAudioBuffer = true;
+
+ if (![self addWriterInputs]) {
+ m_assetWriter.reset();
+ return false;
+ }
+
+ if (m_cameraWriterInput)
+ m_cameraWriterInput.data().transform = transform;
+
+ [self setMetaData:fileType];
+
+ // Ready to start ...
+ return true;
+}
+
+- (void)setMetaData:(AVFileType)fileType
+{
+ m_assetWriter.data().metadata = AVFMetaData::toAVMetadataForFormat(m_delegate->metaData(), fileType);
+}
+
+- (void)start
+{
+ [self setQueues];
+
+ m_setStartTime = true;
+
+ m_state.storeRelease(WriterStateActive);
+
+ [m_assetWriter startWriting];
+ AVCaptureSession *session = m_service->session()->captureSession();
+ if (!session.running)
+ [session startRunning];
+}
+
+- (void)stop
+{
+ if (m_state.loadAcquire() != WriterStateActive && m_state.loadAcquire() != WriterStatePaused)
+ return;
+
+ if ([m_assetWriter status] != AVAssetWriterStatusWriting
+ && [m_assetWriter status] != AVAssetWriterStatusFailed)
+ return;
+
+ // Do this here so that -
+ // 1. '-abort' should not try calling finishWriting again and
+ // 2. async block (see below) will know if recorder control was deleted
+ // before the block's execution:
+ m_state.storeRelease(WriterStateIdle);
+ // Now, since we have to ensure no sample buffers are
+ // appended after a call to finishWriting, we must
+ // ensure writer's queue sees this change in m_state
+ // _before_ we call finishWriting:
+ dispatch_sync(m_writerQueue, ^{});
+ // Done, but now we also want to prevent video queue
+ // from updating our viewfinder:
+ if (m_videoQueue)
+ dispatch_sync(m_videoQueue, ^{});
+
+ // Now we're safe to stop:
+ [m_assetWriter finishWritingWithCompletionHandler:^{
+ // This block is async, so by the time it's executed,
+ // it's possible that render control was deleted already ...
+ if (m_state.loadAcquire() == WriterStateAborted)
+ return;
+
+ AVCaptureSession *session = m_service->session()->captureSession();
+ if (session.running)
+ [session stopRunning];
+ QMetaObject::invokeMethod(m_delegate, "assetWriterFinished", Qt::QueuedConnection);
+ }];
+}
+
+- (void)abort
+{
+ // -abort is to be called from recorder control's dtor.
+
+ if (m_state.fetchAndStoreRelease(WriterStateAborted) != WriterStateActive) {
+ // Not recording, nothing to stop.
+ return;
+ }
+
+ // From Apple's docs:
+ // "To guarantee that all sample buffers are successfully written,
+ // you must ensure that all calls to appendSampleBuffer: and
+ // appendPixelBuffer:withPresentationTime: have returned before
+ // invoking this method."
+ //
+ // The only way we can ensure this is:
+ dispatch_sync(m_writerQueue, ^{});
+ // At this point next block (if any) on the writer's queue
+ // will see m_state preventing it from any further processing.
+ if (m_videoQueue)
+ dispatch_sync(m_videoQueue, ^{});
+ // After this point video queue will not try to modify our
+ // viewfider, so we're safe to delete now.
+
+ [m_assetWriter finishWritingWithCompletionHandler:^{
+ }];
+}
+
+- (void)pause
+{
+ if (m_state.loadAcquire() != WriterStateActive)
+ return;
+ if ([m_assetWriter status] != AVAssetWriterStatusWriting)
+ return;
+
+ m_state.storeRelease(WriterStatePaused);
+ m_adjustTime = true;
+}
+
+- (void)resume
+{
+ if (m_state.loadAcquire() != WriterStatePaused)
+ return;
+ if ([m_assetWriter status] != AVAssetWriterStatusWriting)
+ return;
+
+ m_state.storeRelease(WriterStateActive);
+}
+
+- (void)setStartTimeFrom:(CMSampleBufferRef)sampleBuffer
+{
+ // Writer's queue only.
+ Q_ASSERT(m_setStartTime);
+ Q_ASSERT(sampleBuffer);
+
+ if (m_state.loadAcquire() != WriterStateActive)
+ return;
+
+ QMetaObject::invokeMethod(m_delegate, "assetWriterStarted", Qt::QueuedConnection);
+
+ m_durationInMs.storeRelease(0);
+ m_startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+ m_lastTimeStamp = m_startTime;
+ [m_assetWriter startSessionAtSourceTime:m_startTime];
+ m_setStartTime = false;
+}
+
+- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset
+{
+ CMItemCount count;
+ CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
+ CMSampleTimingInfo* timingInfo = (CMSampleTimingInfo*) malloc(sizeof(CMSampleTimingInfo) * count);
+ CMSampleBufferGetSampleTimingInfoArray(sample, count, timingInfo, &count);
+ for (CMItemCount i = 0; i < count; i++)
+ {
+ timingInfo[i].decodeTimeStamp = CMTimeSubtract(timingInfo[i].decodeTimeStamp, offset);
+ timingInfo[i].presentationTimeStamp = CMTimeSubtract(timingInfo[i].presentationTimeStamp, offset);
+ }
+ CMSampleBufferRef updatedBuffer;
+ CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, sample, count, timingInfo, &updatedBuffer);
+ free(timingInfo);
+ return updatedBuffer;
+}
+
+- (void)writeVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
+{
+ // This code is executed only on a writer's queue.
+ Q_ASSERT(sampleBuffer);
+
+ if (m_state.loadAcquire() == WriterStateActive) {
+ if (m_setStartTime)
+ [self setStartTimeFrom:sampleBuffer];
+
+ if (m_cameraWriterInput.data().readyForMoreMediaData) {
+ [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
+ [m_cameraWriterInput appendSampleBuffer:sampleBuffer];
+ }
+ }
+}
+
+- (void)writeAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer
+{
+ Q_ASSERT(sampleBuffer);
+
+ // This code is executed only on a writer's queue.
+ if (m_state.loadAcquire() == WriterStateActive) {
+ if (m_setStartTime)
+ [self setStartTimeFrom:sampleBuffer];
+
+ if (m_audioWriterInput.data().readyForMoreMediaData) {
+ [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
+ [m_audioWriterInput appendSampleBuffer:sampleBuffer];
+ }
+ }
+}
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection
+{
+ Q_UNUSED(connection);
+ Q_ASSERT(m_service && m_service->session());
+
+ if (m_state.loadAcquire() != WriterStateActive && m_state.loadAcquire() != WriterStatePaused)
+ return;
+
+ if ([m_assetWriter status] != AVAssetWriterStatusWriting) {
+ if ([m_assetWriter status] == AVAssetWriterStatusFailed) {
+ NSError *error = [m_assetWriter error];
+ NSString *failureReason = error.localizedFailureReason;
+ NSString *suggestion = error.localizedRecoverySuggestion;
+ NSString *errorString = suggestion ? [failureReason stringByAppendingString:suggestion] : failureReason;
+ QMetaObject::invokeMethod(m_delegate, "assetWriterError",
+ Qt::QueuedConnection,
+ Q_ARG(QString, QString::fromNSString(errorString)));
+ }
+ return;
+ }
+
+ if (!CMSampleBufferDataIsReady(sampleBuffer)) {
+ qWarning() << Q_FUNC_INFO << "sample buffer is not ready, skipping.";
+ return;
+ }
+
+ CFRetain(sampleBuffer);
+
+ bool isVideoBuffer = true;
+ isVideoBuffer = (captureOutput != m_service->session()->audioOutput());
+ if (isVideoBuffer) {
+ // Find renderercontrol's delegate and invoke its method to
+ // show updated viewfinder's frame.
+ if (m_service->session()->videoOutput()) {
+ NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *vfDelegate =
+ (NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *)m_service->session()->videoOutput()->captureDelegate();
+ if (vfDelegate) {
+ AVCaptureOutput *output = nil;
+ AVCaptureConnection *connection = nil;
+ [vfDelegate captureOutput:output didOutputSampleBuffer:sampleBuffer fromConnection:connection];
+ }
+ }
+ } else {
+ if (m_service->session()->audioOutput()) {
+ NSObject<AVCaptureAudioDataOutputSampleBufferDelegate> *audioPreviewDelegate =
+ (NSObject<AVCaptureAudioDataOutputSampleBufferDelegate> *)m_service->session()->audioPreviewDelegate();
+ if (audioPreviewDelegate) {
+ AVCaptureOutput *output = nil;
+ AVCaptureConnection *connection = nil;
+ [audioPreviewDelegate captureOutput:output didOutputSampleBuffer:sampleBuffer fromConnection:connection];
+ }
+ }
+ }
+
+ if (m_state.loadAcquire() != WriterStateActive) {
+ CFRelease(sampleBuffer);
+ return;
+ }
+
+ if (m_adjustTime) {
+ CMTime currentTimestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+ CMTime lastTimestamp = isVideoBuffer ? m_lastVideoTimestamp : m_lastAudioTimestamp;
+
+ if (!CMTIME_IS_INVALID(lastTimestamp)) {
+ if (!CMTIME_IS_INVALID(m_timeOffset))
+ currentTimestamp = CMTimeSubtract(currentTimestamp, m_timeOffset);
+
+ CMTime pauseDuration = CMTimeSubtract(currentTimestamp, lastTimestamp);
+
+ if (m_timeOffset.value == 0)
+ m_timeOffset = pauseDuration;
+ else
+ m_timeOffset = CMTimeAdd(m_timeOffset, pauseDuration);
+ }
+ m_lastVideoTimestamp = kCMTimeInvalid;
+ m_adjustTime = false;
+ }
+
+ if (m_timeOffset.value > 0) {
+ CFRelease(sampleBuffer);
+ sampleBuffer = [self adjustTime:sampleBuffer by:m_timeOffset];
+ }
+
+ CMTime currentTimestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+ CMTime currentDuration = CMSampleBufferGetDuration(sampleBuffer);
+ if (currentDuration.value > 0)
+ currentTimestamp = CMTimeAdd(currentTimestamp, currentDuration);
+
+ if (isVideoBuffer)
+ {
+ m_lastVideoTimestamp = currentTimestamp;
+ dispatch_async(m_writerQueue, ^{
+ [self writeVideoSampleBuffer:sampleBuffer];
+ m_writeFirstAudioBuffer = true;
+ CFRelease(sampleBuffer);
+ });
+ } else if (m_writeFirstAudioBuffer) {
+ m_lastAudioTimestamp = currentTimestamp;
+ dispatch_async(m_writerQueue, ^{
+ [self writeAudioSampleBuffer:sampleBuffer];
+ CFRelease(sampleBuffer);
+ });
+ }
+}
+
+- (bool)addWriterInputs
+{
+ Q_ASSERT(m_service && m_service->session());
+ Q_ASSERT(m_assetWriter.data());
+
+ AVFCameraSession *session = m_service->session();
+
+ m_cameraWriterInput.reset();
+ if (m_videoQueue)
+ {
+ Q_ASSERT(session->videoCaptureDevice() && session->videoOutput() && session->videoOutput()->videoDataOutput());
+ m_cameraWriterInput.reset([[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
+ outputSettings:m_videoSettings
+ sourceFormatHint:session->videoCaptureDevice().activeFormat.formatDescription]);
+
+ if (m_cameraWriterInput && [m_assetWriter canAddInput:m_cameraWriterInput]) {
+ [m_assetWriter addInput:m_cameraWriterInput];
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to add camera writer input";
+ m_cameraWriterInput.reset();
+ return false;
+ }
+
+ m_cameraWriterInput.data().expectsMediaDataInRealTime = YES;
+ }
+
+ m_audioWriterInput.reset();
+ if (m_audioQueue) {
+ m_audioWriterInput.reset([[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio
+ outputSettings:m_audioSettings]);
+ if (!m_audioWriterInput) {
+ qWarning() << Q_FUNC_INFO << "failed to create audio writer input";
+ // But we still can record video.
+ if (!m_cameraWriterInput)
+ return false;
+ } else if ([m_assetWriter canAddInput:m_audioWriterInput]) {
+ [m_assetWriter addInput:m_audioWriterInput];
+ m_audioWriterInput.data().expectsMediaDataInRealTime = YES;
+ } else {
+ qWarning() << Q_FUNC_INFO << "failed to add audio writer input";
+ m_audioWriterInput.reset();
+ if (!m_cameraWriterInput)
+ return false;
+ // We can (still) write video though ...
+ }
+ }
+
+ return true;
+}
+
+- (void)setQueues
+{
+ Q_ASSERT(m_service && m_service->session());
+ AVFCameraSession *session = m_service->session();
+
+ if (m_videoQueue) {
+ Q_ASSERT(session->videoOutput() && session->videoOutput()->videoDataOutput());
+ [session->videoOutput()->videoDataOutput() setSampleBufferDelegate:self queue:m_videoQueue];
+ }
+
+ if (m_audioQueue) {
+ Q_ASSERT(session->audioOutput());
+ [session->audioOutput() setSampleBufferDelegate:self queue:m_audioQueue];
+ }
+}
+
+- (void)updateDuration:(CMTime)newTimeStamp
+{
+ Q_ASSERT(CMTimeCompare(m_startTime, kCMTimeInvalid));
+ Q_ASSERT(CMTimeCompare(m_lastTimeStamp, kCMTimeInvalid));
+ if (CMTimeCompare(newTimeStamp, m_lastTimeStamp) > 0) {
+
+ const CMTime duration = CMTimeSubtract(newTimeStamp, m_startTime);
+ if (!CMTimeCompare(duration, kCMTimeInvalid))
+ return;
+
+ m_durationInMs.storeRelease(CMTimeGetSeconds(duration) * 1000);
+ m_lastTimeStamp = newTimeStamp;
+
+ m_delegate->updateDuration([self durationInMs]);
+ }
+}
+
+- (qint64)durationInMs
+{
+ return m_durationInMs.loadAcquire();
+}
+
+@end
diff --git a/src/plugins/multimedia/darwin/camera/avfmediaassetwriter_p.h b/src/plugins/multimedia/darwin/camera/avfmediaassetwriter_p.h
new file mode 100644
index 000000000..8fe3e8522
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfmediaassetwriter_p.h
@@ -0,0 +1,54 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFMEDIAASSETWRITER_H
+#define AVFMEDIAASSETWRITER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "avfcamerautility_p.h"
+#include "qmediaformat.h"
+
+#include <QtCore/qglobal.h>
+
+#include <AVFoundation/AVFoundation.h>
+
+QT_BEGIN_NAMESPACE
+
+class AVFMediaEncoder;
+class AVFCameraService;
+
+QT_END_NAMESPACE
+
+@interface QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate,
+ AVCaptureAudioDataOutputSampleBufferDelegate>
+- (id)initWithDelegate:(QT_PREPEND_NAMESPACE(AVFMediaEncoder) *)delegate;
+
+- (bool)setupWithFileURL:(NSURL *)fileURL
+ cameraService:(QT_PREPEND_NAMESPACE(AVFCameraService) *)service
+ audioSettings:(NSDictionary *)audioSettings
+ videoSettings:(NSDictionary *)videoSettings
+ fileFormat:(QMediaFormat::FileFormat)fileFormat
+ transform:(CGAffineTransform)transform;
+
+// This to be called from the recorder control's thread:
+- (void)start;
+- (void)stop;
+- (void)pause;
+- (void)resume;
+// This to be called from the recorder control's dtor:
+- (void)abort;
+- (qint64)durationInMs;
+
+@end
+
+#endif // AVFMEDIAASSETWRITER_H
diff --git a/src/plugins/multimedia/darwin/camera/avfmediaencoder.mm b/src/plugins/multimedia/darwin/camera/avfmediaencoder.mm
new file mode 100644
index 000000000..3fbc57995
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfmediaencoder.mm
@@ -0,0 +1,664 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+
+#include "avfmediaencoder_p.h"
+#include "avfcamerarenderer_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcamera_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcameradebug_p.h"
+#include "avfcamerautility_p.h"
+#include "qaudiodevice.h"
+
+#include "qmediadevices.h"
+#include "private/qmediastoragelocation_p.h"
+#include "private/qmediarecorder_p.h"
+#include "qdarwinformatsinfo_p.h"
+#include "private/qplatformaudiooutput_p.h"
+#include <private/qplatformaudioinput_p.h>
+
+#include <QtCore/qmath.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qmimetype.h>
+
+#include <private/qcoreaudioutils_p.h>
+
+QT_USE_NAMESPACE
+
+namespace {
+
+bool qt_is_writable_file_URL(NSURL *fileURL)
+{
+ Q_ASSERT(fileURL);
+
+ if (![fileURL isFileURL])
+ return false;
+
+ if (NSString *path = [[fileURL path] stringByExpandingTildeInPath]) {
+ return [[NSFileManager defaultManager]
+ isWritableFileAtPath:[path stringByDeletingLastPathComponent]];
+ }
+
+ return false;
+}
+
+bool qt_file_exists(NSURL *fileURL)
+{
+ Q_ASSERT(fileURL);
+
+ if (NSString *path = [[fileURL path] stringByExpandingTildeInPath])
+ return [[NSFileManager defaultManager] fileExistsAtPath:path];
+
+ return false;
+}
+
+}
+
+AVFMediaEncoder::AVFMediaEncoder(QMediaRecorder *parent)
+ : QObject(parent)
+ , QPlatformMediaRecorder(parent)
+ , m_state(QMediaRecorder::StoppedState)
+ , m_duration(0)
+ , m_audioSettings(nil)
+ , m_videoSettings(nil)
+ //, m_restoreFPS(-1, -1)
+{
+ m_writer.reset([[QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) alloc] initWithDelegate:this]);
+ if (!m_writer) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create an asset writer";
+ return;
+ }
+}
+
+AVFMediaEncoder::~AVFMediaEncoder()
+{
+ [m_writer abort];
+
+ if (m_audioSettings)
+ [m_audioSettings release];
+ if (m_videoSettings)
+ [m_videoSettings release];
+}
+
+bool AVFMediaEncoder::isLocationWritable(const QUrl &location) const
+{
+ return location.scheme() == QLatin1String("file") || location.scheme().isEmpty();
+}
+
+QMediaRecorder::RecorderState AVFMediaEncoder::state() const
+{
+ return m_state;
+}
+
+qint64 AVFMediaEncoder::duration() const
+{
+ return m_duration;
+}
+
+void AVFMediaEncoder::updateDuration(qint64 duration)
+{
+ m_duration = duration;
+ durationChanged(m_duration);
+}
+
+static NSDictionary *avfAudioSettings(const QMediaEncoderSettings &encoderSettings, const QAudioFormat &format)
+{
+ NSMutableDictionary *settings = [NSMutableDictionary dictionary];
+
+ // Codec
+ int codecId = QDarwinFormatInfo::audioFormatForCodec(encoderSettings.mediaFormat().audioCodec());
+ [settings setObject:[NSNumber numberWithInt:codecId] forKey:AVFormatIDKey];
+
+ // Setting AVEncoderQualityKey is not allowed when format ID is alac or lpcm
+ if (codecId != kAudioFormatAppleLossless && codecId != kAudioFormatLinearPCM
+ && encoderSettings.encodingMode() == QMediaRecorder::ConstantQualityEncoding) {
+ // AudioQuality
+ int quality;
+ switch (encoderSettings.quality()) {
+ case QMediaRecorder::VeryLowQuality:
+ quality = AVAudioQualityMin;
+ break;
+ case QMediaRecorder::LowQuality:
+ quality = AVAudioQualityLow;
+ break;
+ case QMediaRecorder::HighQuality:
+ quality = AVAudioQualityHigh;
+ break;
+ case QMediaRecorder::VeryHighQuality:
+ quality = AVAudioQualityMax;
+ break;
+ case QMediaRecorder::NormalQuality:
+ default:
+ quality = AVAudioQualityMedium;
+ break;
+ }
+ [settings setObject:[NSNumber numberWithInt:quality] forKey:AVEncoderAudioQualityKey];
+ } else {
+ // BitRate
+ bool isBitRateSupported = false;
+ int bitRate = encoderSettings.audioBitRate();
+ if (bitRate > 0) {
+ QList<AudioValueRange> bitRates = qt_supported_bit_rates_for_format(codecId);
+ for (int i = 0; i < bitRates.count(); i++) {
+ if (bitRate >= bitRates[i].mMinimum &&
+ bitRate <= bitRates[i].mMaximum) {
+ isBitRateSupported = true;
+ break;
+ }
+ }
+ if (isBitRateSupported)
+ [settings setObject:[NSNumber numberWithInt:encoderSettings.audioBitRate()]
+ forKey:AVEncoderBitRateKey];
+ }
+ }
+
+ // SampleRate
+ int sampleRate = encoderSettings.audioSampleRate();
+ bool isSampleRateSupported = false;
+ if (sampleRate >= 8000 && sampleRate <= 192000) {
+ QList<AudioValueRange> sampleRates = qt_supported_sample_rates_for_format(codecId);
+ for (int i = 0; i < sampleRates.count(); i++) {
+ if (sampleRate >= sampleRates[i].mMinimum && sampleRate <= sampleRates[i].mMaximum) {
+ isSampleRateSupported = true;
+ break;
+ }
+ }
+ }
+ if (!isSampleRateSupported)
+ sampleRate = 44100;
+ [settings setObject:[NSNumber numberWithInt:sampleRate] forKey:AVSampleRateKey];
+
+ // Channels
+ int channelCount = encoderSettings.audioChannelCount();
+ bool isChannelCountSupported = false;
+ if (channelCount > 0) {
+ std::optional<QList<UInt32>> channelCounts = qt_supported_channel_counts_for_format(codecId);
+ // An std::nullopt result indicates that
+ // any number of channels can be encoded.
+ if (channelCounts == std::nullopt) {
+ isChannelCountSupported = true;
+ } else {
+ for (int i = 0; i < channelCounts.value().count(); i++) {
+ if ((UInt32)channelCount == channelCounts.value()[i]) {
+ isChannelCountSupported = true;
+ break;
+ }
+ }
+ }
+
+ // if channel count is provided and it's bigger than 2
+ // provide a supported channel layout
+ if (isChannelCountSupported && channelCount > 2) {
+ AudioChannelLayout channelLayout;
+ memset(&channelLayout, 0, sizeof(AudioChannelLayout));
+ auto channelLayoutTags = qt_supported_channel_layout_tags_for_format(codecId, channelCount);
+ if (channelLayoutTags.size()) {
+ channelLayout.mChannelLayoutTag = channelLayoutTags.first();
+ [settings setObject:[NSData dataWithBytes: &channelLayout length: sizeof(channelLayout)] forKey:AVChannelLayoutKey];
+ } else {
+ isChannelCountSupported = false;
+ }
+ }
+
+ if (isChannelCountSupported)
+ [settings setObject:[NSNumber numberWithInt:channelCount] forKey:AVNumberOfChannelsKey];
+ }
+
+ if (!isChannelCountSupported) {
+ // fallback to providing channel layout if channel count is not specified or supported
+ UInt32 size = 0;
+ if (format.isValid()) {
+ auto layout = CoreAudioUtils::toAudioChannelLayout(format, &size);
+ [settings setObject:[NSData dataWithBytes:layout.get() length:sizeof(AudioChannelLayout)] forKey:AVChannelLayoutKey];
+ } else {
+ // finally default to setting channel count to 1
+ [settings setObject:[NSNumber numberWithInt:1] forKey:AVNumberOfChannelsKey];
+ }
+ }
+
+ if (codecId == kAudioFormatAppleLossless)
+ [settings setObject:[NSNumber numberWithInt:24] forKey:AVEncoderBitDepthHintKey];
+
+ if (codecId == kAudioFormatLinearPCM) {
+ [settings setObject:[NSNumber numberWithInt:16] forKey:AVLinearPCMBitDepthKey];
+ [settings setObject:[NSNumber numberWithInt:NO] forKey:AVLinearPCMIsBigEndianKey];
+ [settings setObject:[NSNumber numberWithInt:NO] forKey:AVLinearPCMIsFloatKey];
+ [settings setObject:[NSNumber numberWithInt:NO] forKey:AVLinearPCMIsNonInterleaved];
+ }
+
+ return settings;
+}
+
+NSDictionary *avfVideoSettings(QMediaEncoderSettings &encoderSettings, AVCaptureDevice *device, AVCaptureConnection *connection, QSize nativeSize)
+{
+ if (!device)
+ return nil;
+
+
+ // ### re-add needFpsChange
+// AVFPSRange currentFps = qt_current_framerates(device, connection);
+
+ NSMutableDictionary *videoSettings = [NSMutableDictionary dictionary];
+
+ // -- Codec
+
+ // AVVideoCodecKey is the only mandatory key
+ auto codec = encoderSettings.mediaFormat().videoCodec();
+ NSString *c = QDarwinFormatInfo::videoFormatForCodec(codec);
+ [videoSettings setObject:c forKey:AVVideoCodecKey];
+ [c release];
+
+ // -- Resolution
+
+ int w = encoderSettings.videoResolution().width();
+ int h = encoderSettings.videoResolution().height();
+
+ if (AVCaptureDeviceFormat *currentFormat = device.activeFormat) {
+ CMFormatDescriptionRef formatDesc = currentFormat.formatDescription;
+ CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(formatDesc);
+ FourCharCode formatCodec = CMVideoFormatDescriptionGetCodecType(formatDesc);
+
+ // We have to change the device's activeFormat in 3 cases:
+ // - the requested recording resolution is higher than the current device resolution
+ // - the requested recording resolution has a different aspect ratio than the current device aspect ratio
+ // - the requested frame rate is not available for the current device format
+ AVCaptureDeviceFormat *newFormat = nil;
+ if ((w <= 0 || h <= 0)
+ && encoderSettings.videoFrameRate() > 0
+ && !qt_format_supports_framerate(currentFormat, encoderSettings.videoFrameRate())) {
+
+ newFormat = qt_find_best_framerate_match(device,
+ formatCodec,
+ encoderSettings.videoFrameRate());
+
+ } else if (w > 0 && h > 0) {
+ AVCaptureDeviceFormat *f = qt_find_best_resolution_match(device,
+ encoderSettings.videoResolution(),
+ formatCodec);
+
+ if (f) {
+ CMVideoDimensions d = CMVideoFormatDescriptionGetDimensions(f.formatDescription);
+ qreal fAspectRatio = qreal(d.width) / d.height;
+
+ if (w > dim.width || h > dim.height
+ || qAbs((qreal(dim.width) / dim.height) - fAspectRatio) > 0.01) {
+ newFormat = f;
+ }
+ }
+ }
+
+ if (qt_set_active_format(device, newFormat, false /*### !needFpsChange*/)) {
+ formatDesc = newFormat.formatDescription;
+ dim = CMVideoFormatDescriptionGetDimensions(formatDesc);
+ }
+
+ if (w < 0 || h < 0) {
+ w = dim.width;
+ h = dim.height;
+ }
+
+
+ if (w > 0 && h > 0) {
+ // Make sure the recording resolution has the same aspect ratio as the device's
+ // current resolution
+ qreal deviceAspectRatio = qreal(dim.width) / dim.height;
+ qreal recAspectRatio = qreal(w) / h;
+ if (qAbs(deviceAspectRatio - recAspectRatio) > 0.01) {
+ if (recAspectRatio > deviceAspectRatio)
+ w = qRound(h * deviceAspectRatio);
+ else
+ h = qRound(w / deviceAspectRatio);
+ }
+
+ // recording resolution can't be higher than the device's active resolution
+ w = qMin(w, dim.width);
+ h = qMin(h, dim.height);
+ }
+ }
+
+ if (w > 0 && h > 0) {
+ // Width and height must be divisible by 2
+ w += w & 1;
+ h += h & 1;
+
+ bool isPortrait = nativeSize.width() < nativeSize.height();
+ // Make sure the video has the right aspect ratio
+ if (isPortrait && h < w)
+ qSwap(w, h);
+ else if (!isPortrait && w < h)
+ qSwap(w, h);
+
+ encoderSettings.setVideoResolution(QSize(w, h));
+ } else {
+ w = nativeSize.width();
+ h = nativeSize.height();
+ encoderSettings.setVideoResolution(nativeSize);
+ }
+ [videoSettings setObject:[NSNumber numberWithInt:w] forKey:AVVideoWidthKey];
+ [videoSettings setObject:[NSNumber numberWithInt:h] forKey:AVVideoHeightKey];
+
+ // -- FPS
+
+ if (true /*needFpsChange*/) {
+ const qreal fps = encoderSettings.videoFrameRate();
+ qt_set_framerate_limits(device, connection, fps, fps);
+ }
+ encoderSettings.setVideoFrameRate(qt_current_framerates(device, connection).second);
+
+ // -- Codec Settings
+
+ NSMutableDictionary *codecProperties = [NSMutableDictionary dictionary];
+ int bitrate = -1;
+ float quality = -1.f;
+
+ if (encoderSettings.encodingMode() == QMediaRecorder::ConstantQualityEncoding) {
+ if (encoderSettings.quality() != QMediaRecorder::NormalQuality) {
+ if (codec != QMediaFormat::VideoCodec::MotionJPEG) {
+ qWarning("ConstantQualityEncoding is not supported for MotionJPEG");
+ } else {
+ switch (encoderSettings.quality()) {
+ case QMediaRecorder::VeryLowQuality:
+ quality = 0.f;
+ break;
+ case QMediaRecorder::LowQuality:
+ quality = 0.25f;
+ break;
+ case QMediaRecorder::HighQuality:
+ quality = 0.75f;
+ break;
+ case QMediaRecorder::VeryHighQuality:
+ quality = 1.f;
+ break;
+ default:
+ quality = -1.f; // NormalQuality, let the system decide
+ break;
+ }
+ }
+ }
+ } else if (encoderSettings.encodingMode() == QMediaRecorder::AverageBitRateEncoding){
+ if (codec != QMediaFormat::VideoCodec::H264 && codec != QMediaFormat::VideoCodec::H265)
+ qWarning() << "AverageBitRateEncoding is not supported for codec" << QMediaFormat::videoCodecName(codec);
+ else
+ bitrate = encoderSettings.videoBitRate();
+ } else {
+ qWarning("Encoding mode is not supported");
+ }
+
+ if (bitrate != -1)
+ [codecProperties setObject:[NSNumber numberWithInt:bitrate] forKey:AVVideoAverageBitRateKey];
+ if (quality != -1.f)
+ [codecProperties setObject:[NSNumber numberWithFloat:quality] forKey:AVVideoQualityKey];
+
+ [videoSettings setObject:codecProperties forKey:AVVideoCompressionPropertiesKey];
+
+ return videoSettings;
+}
+
+void AVFMediaEncoder::applySettings(QMediaEncoderSettings &settings)
+{
+ unapplySettings();
+
+ AVFCameraSession *session = m_service->session();
+
+ // audio settings
+ const auto audioInput = m_service->audioInput();
+ const QAudioFormat audioFormat = audioInput ? audioInput->device.preferredFormat() : QAudioFormat();
+ m_audioSettings = avfAudioSettings(settings, audioFormat);
+ if (m_audioSettings)
+ [m_audioSettings retain];
+
+ // video settings
+ AVCaptureDevice *device = session->videoCaptureDevice();
+ if (!device)
+ return;
+ const AVFConfigurationLock lock(device); // prevents activeFormat from being overridden
+ AVCaptureConnection *conn = [session->videoOutput()->videoDataOutput() connectionWithMediaType:AVMediaTypeVideo];
+ auto nativeSize = session->videoOutput()->nativeSize();
+ m_videoSettings = avfVideoSettings(settings, device, conn, nativeSize);
+ if (m_videoSettings)
+ [m_videoSettings retain];
+}
+
+void AVFMediaEncoder::unapplySettings()
+{
+ if (m_audioSettings) {
+ [m_audioSettings release];
+ m_audioSettings = nil;
+ }
+ if (m_videoSettings) {
+ [m_videoSettings release];
+ m_videoSettings = nil;
+ }
+}
+
+void AVFMediaEncoder::setMetaData(const QMediaMetaData &metaData)
+{
+ m_metaData = metaData;
+}
+
+QMediaMetaData AVFMediaEncoder::metaData() const
+{
+ return m_metaData;
+}
+
+void AVFMediaEncoder::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ AVFCameraService *captureSession = static_cast<AVFCameraService *>(session);
+ if (m_service == captureSession)
+ return;
+
+ if (m_service)
+ stop();
+
+ m_service = captureSession;
+ if (!m_service)
+ return;
+
+ connect(m_service, &AVFCameraService::cameraChanged, this, &AVFMediaEncoder::onCameraChanged);
+ onCameraChanged();
+}
+
+void AVFMediaEncoder::record(QMediaEncoderSettings &settings)
+{
+ if (!m_service || !m_service->session()) {
+ qWarning() << Q_FUNC_INFO << "Encoder is not set to a capture session";
+ return;
+ }
+
+ if (!m_writer) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "Invalid recorder";
+ return;
+ }
+
+ if (QMediaRecorder::RecordingState == m_state)
+ return;
+
+ AVFCamera *cameraControl = m_service->avfCameraControl();
+ auto audioInput = m_service->audioInput();
+
+ if (!cameraControl && !audioInput) {
+ qWarning() << Q_FUNC_INFO << "Cannot record without any inputs";
+ updateError(QMediaRecorder::ResourceError, tr("No inputs specified"));
+ return;
+ }
+
+ m_service->session()->setActive(true);
+ const bool audioOnly = settings.videoCodec() == QMediaFormat::VideoCodec::Unspecified;
+ AVCaptureSession *session = m_service->session()->captureSession();
+ float rotation = 0;
+
+ if (!audioOnly) {
+ if (!cameraControl || !cameraControl->isActive()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "can not start record while camera is not active";
+ updateError(QMediaRecorder::ResourceError,
+ QMediaRecorderPrivate::msgFailedStartRecording());
+ return;
+ }
+ }
+
+ const QString path(outputLocation().scheme() == QLatin1String("file") ?
+ outputLocation().path() : outputLocation().toString());
+ const QUrl fileURL(QUrl::fromLocalFile(QMediaStorageLocation::generateFileName(path,
+ audioOnly ? QStandardPaths::MusicLocation : QStandardPaths::MoviesLocation,
+ settings.mimeType().preferredSuffix())));
+
+ NSURL *nsFileURL = fileURL.toNSURL();
+ if (!nsFileURL) {
+ qWarning() << Q_FUNC_INFO << "invalid output URL:" << fileURL;
+ updateError(QMediaRecorder::ResourceError, tr("Invalid output file URL"));
+ return;
+ }
+ if (!qt_is_writable_file_URL(nsFileURL)) {
+ qWarning() << Q_FUNC_INFO << "invalid output URL:" << fileURL
+ << "(the location is not writable)";
+ updateError(QMediaRecorder::ResourceError, tr("Non-writeable file location"));
+ return;
+ }
+ if (qt_file_exists(nsFileURL)) {
+ // We test for/handle this error here since AWAssetWriter will raise an
+ // Objective-C exception, which is not good at all.
+ qWarning() << Q_FUNC_INFO << "invalid output URL:" << fileURL
+ << "(file already exists)";
+ updateError(QMediaRecorder::ResourceError, tr("File already exists"));
+ return;
+ }
+
+ applySettings(settings);
+
+ QVideoOutputOrientationHandler::setIsRecording(true);
+
+ // We stop session now so that no more frames for renderer's queue
+ // generated, will restart in assetWriterStarted.
+ [session stopRunning];
+
+ if ([m_writer setupWithFileURL:nsFileURL
+ cameraService:m_service
+ audioSettings:m_audioSettings
+ videoSettings:m_videoSettings
+ fileFormat:settings.fileFormat()
+ transform:CGAffineTransformMakeRotation(qDegreesToRadians(rotation))]) {
+
+ m_state = QMediaRecorder::RecordingState;
+
+ Q_EMIT actualLocationChanged(fileURL);
+ Q_EMIT stateChanged(m_state);
+
+ // Apple recommends to call startRunning and do all
+ // setup on a special queue, and that's what we had
+ // initially (dispatch_async to writerQueue). Unfortunately,
+ // writer's queue is not the only queue/thread that can
+ // access/modify the session, and as a result we have
+ // all possible data/race-conditions with Obj-C exceptions
+ // at best and something worse in general.
+ // Now we try to only modify session on the same thread.
+ [m_writer start];
+ } else {
+ [session startRunning];
+ updateError(QMediaRecorder::FormatError, QMediaRecorderPrivate::msgFailedStartRecording());
+ }
+}
+
+void AVFMediaEncoder::pause()
+{
+ if (!m_service || !m_service->session() || state() != QMediaRecorder::RecordingState)
+ return;
+
+ toggleRecord(false);
+ m_state = QMediaRecorder::PausedState;
+ stateChanged(m_state);
+}
+
+void AVFMediaEncoder::resume()
+{
+ if (!m_service || !m_service->session() || state() != QMediaRecorder::PausedState)
+ return;
+
+ toggleRecord(true);
+ m_state = QMediaRecorder::RecordingState;
+ stateChanged(m_state);
+}
+
+void AVFMediaEncoder::stop()
+{
+ if (m_state != QMediaRecorder::StoppedState) {
+ // Do not check the camera status, we can stop if we started.
+ stopWriter();
+ }
+ QVideoOutputOrientationHandler::setIsRecording(false);
+}
+
+
+void AVFMediaEncoder::toggleRecord(bool enable)
+{
+ if (!m_service || !m_service->session())
+ return;
+
+ if (!enable)
+ [m_writer pause];
+ else
+ [m_writer resume];
+}
+
+void AVFMediaEncoder::assetWriterStarted()
+{
+}
+
+void AVFMediaEncoder::assetWriterFinished()
+{
+
+ const QMediaRecorder::RecorderState lastState = m_state;
+
+ unapplySettings();
+
+ if (m_service) {
+ AVFCameraSession *session = m_service->session();
+
+ if (session->videoOutput()) {
+ session->videoOutput()->resetCaptureDelegate();
+ }
+ if (session->audioPreviewDelegate()) {
+ [session->audioPreviewDelegate() resetAudioPreviewDelegate];
+ }
+ if (session->videoOutput() || session->audioPreviewDelegate())
+ [session->captureSession() startRunning];
+ }
+
+ m_state = QMediaRecorder::StoppedState;
+ if (m_state != lastState)
+ Q_EMIT stateChanged(m_state);
+}
+
+void AVFMediaEncoder::assetWriterError(QString err)
+{
+ updateError(QMediaRecorder::FormatError, err);
+ if (m_state != QMediaRecorder::StoppedState)
+ stopWriter();
+}
+
+void AVFMediaEncoder::onCameraChanged()
+{
+ if (m_service && m_service->avfCameraControl()) {
+ AVFCamera *cameraControl = m_service->avfCameraControl();
+ connect(cameraControl, SIGNAL(activeChanged(bool)),
+ SLOT(cameraActiveChanged(bool)));
+ }
+}
+
+void AVFMediaEncoder::cameraActiveChanged(bool active)
+{
+ Q_ASSERT(m_service);
+ AVFCamera *cameraControl = m_service->avfCameraControl();
+ Q_ASSERT(cameraControl);
+
+ if (!active) {
+ return stopWriter();
+ }
+}
+
+void AVFMediaEncoder::stopWriter()
+{
+ [m_writer stop];
+}
+
+#include "moc_avfmediaencoder_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/avfmediaencoder_p.h b/src/plugins/multimedia/darwin/camera/avfmediaencoder_p.h
new file mode 100644
index 000000000..23aced325
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfmediaencoder_p.h
@@ -0,0 +1,96 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFMEDIAENCODER_H
+#define AVFMEDIAENCODER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "avfmediaassetwriter_p.h"
+#include "avfcamerautility_p.h"
+#include "qaudiodevice.h"
+
+#include <private/qplatformmediarecorder_p.h>
+#include <private/qplatformmediacapture_p.h>
+#include <QtMultimedia/qmediametadata.h>
+
+#include <QtCore/qglobal.h>
+#include <QtCore/qurl.h>
+
+#include <AVFoundation/AVFoundation.h>
+
+QT_BEGIN_NAMESPACE
+
+class AVFCameraService;
+class QString;
+class QUrl;
+
+class AVFMediaEncoder : public QObject, public QPlatformMediaRecorder
+{
+ Q_OBJECT
+public:
+ AVFMediaEncoder(QMediaRecorder *parent);
+ ~AVFMediaEncoder() override;
+
+ bool isLocationWritable(const QUrl &location) const override;
+
+ QMediaRecorder::RecorderState state() const override;
+
+ qint64 duration() const override;
+
+ void record(QMediaEncoderSettings &settings) override;
+ void pause() override;
+ void resume() override;
+ void stop() override;
+
+ void setMetaData(const QMediaMetaData &) override;
+ QMediaMetaData metaData() const override;
+
+ AVFCameraService *cameraService() const { return m_service; }
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+ void updateDuration(qint64 duration);
+
+ void toggleRecord(bool enable);
+
+private:
+ void applySettings(QMediaEncoderSettings &settings);
+ void unapplySettings();
+
+ Q_INVOKABLE void assetWriterStarted();
+ Q_INVOKABLE void assetWriterFinished();
+ Q_INVOKABLE void assetWriterError(QString error);
+
+private Q_SLOTS:
+ void onCameraChanged();
+ void cameraActiveChanged(bool);
+
+private:
+ void stopWriter();
+
+ AVFCameraService *m_service = nullptr;
+ AVFScopedPointer<QT_MANGLE_NAMESPACE(AVFMediaAssetWriter)> m_writer;
+
+ QMediaRecorder::RecorderState m_state;
+
+ QMediaMetaData m_metaData;
+
+ qint64 m_duration;
+
+ NSDictionary *m_audioSettings;
+ NSDictionary *m_videoSettings;
+};
+
+QT_END_NAMESPACE
+
+#endif // AVFMEDIAENCODER_H
diff --git a/src/plugins/multimedia/darwin/camera/qavfcamerabase.mm b/src/plugins/multimedia/darwin/camera/qavfcamerabase.mm
new file mode 100644
index 000000000..9d99de0b9
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/qavfcamerabase.mm
@@ -0,0 +1,1084 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcameradebug_p.h"
+#include "qavfcamerabase_p.h"
+#include "avfcamerautility_p.h"
+#include <private/qcameradevice_p.h>
+#include "qavfhelpers_p.h"
+#include <private/qplatformmediaintegration_p.h>
+#include <QtCore/qset.h>
+#include <QtCore/qsystemdetection.h>
+
+QT_USE_NAMESPACE
+
+namespace {
+
+// All these methods to work with exposure/ISO/SS in custom mode do not support macOS.
+
+#ifdef Q_OS_IOS
+
+// Misc. helpers to check values/ranges:
+
+bool qt_check_exposure_duration(AVCaptureDevice *captureDevice, CMTime duration)
+{
+ Q_ASSERT(captureDevice);
+
+ AVCaptureDeviceFormat *activeFormat = captureDevice.activeFormat;
+ if (!activeFormat) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to obtain capture device format";
+ return false;
+ }
+
+ return CMTimeCompare(duration, activeFormat.minExposureDuration) != -1
+ && CMTimeCompare(activeFormat.maxExposureDuration, duration) != -1;
+}
+
+bool qt_check_ISO_value(AVCaptureDevice *captureDevice, int newISO)
+{
+ Q_ASSERT(captureDevice);
+
+ AVCaptureDeviceFormat *activeFormat = captureDevice.activeFormat;
+ if (!activeFormat) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to obtain capture device format";
+ return false;
+ }
+
+ return !(newISO < activeFormat.minISO || newISO > activeFormat.maxISO);
+}
+
+bool qt_exposure_duration_equal(AVCaptureDevice *captureDevice, qreal qDuration)
+{
+ Q_ASSERT(captureDevice);
+ const CMTime avDuration = CMTimeMakeWithSeconds(qDuration, captureDevice.exposureDuration.timescale);
+ return !CMTimeCompare(avDuration, captureDevice.exposureDuration);
+}
+
+bool qt_iso_equal(AVCaptureDevice *captureDevice, int iso)
+{
+ Q_ASSERT(captureDevice);
+ return qFuzzyCompare(float(iso), captureDevice.ISO);
+}
+
+bool qt_exposure_bias_equal(AVCaptureDevice *captureDevice, qreal bias)
+{
+ Q_ASSERT(captureDevice);
+ return qFuzzyCompare(bias, qreal(captureDevice.exposureTargetBias));
+}
+
+// Converters:
+
+bool qt_convert_exposure_mode(AVCaptureDevice *captureDevice, QCamera::ExposureMode mode,
+ AVCaptureExposureMode &avMode)
+{
+ // Test if mode supported and convert.
+ Q_ASSERT(captureDevice);
+
+ if (mode == QCamera::ExposureAuto) {
+ if ([captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
+ avMode = AVCaptureExposureModeContinuousAutoExposure;
+ return true;
+ }
+ }
+
+ if (mode == QCamera::ExposureManual) {
+ if ([captureDevice isExposureModeSupported:AVCaptureExposureModeCustom]) {
+ avMode = AVCaptureExposureModeCustom;
+ return true;
+ }
+ }
+
+ return false;
+}
+
+#endif // defined(Q_OS_IOS)
+
+} // Unnamed namespace.
+
+
+QAVFVideoDevices::QAVFVideoDevices(QPlatformMediaIntegration *integration)
+ : QPlatformVideoDevices(integration)
+{
+ NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter];
+ m_deviceConnectedObserver = [notificationCenter addObserverForName:AVCaptureDeviceWasConnectedNotification
+ object:nil
+ queue:[NSOperationQueue mainQueue]
+ usingBlock:^(NSNotification *) {
+ this->updateCameraDevices();
+ }];
+
+ m_deviceDisconnectedObserver = [notificationCenter addObserverForName:AVCaptureDeviceWasDisconnectedNotification
+ object:nil
+ queue:[NSOperationQueue mainQueue]
+ usingBlock:^(NSNotification *) {
+ this->updateCameraDevices();
+ }];
+ updateCameraDevices();
+}
+
+QAVFVideoDevices::~QAVFVideoDevices()
+{
+ NSNotificationCenter* notificationCenter = [NSNotificationCenter defaultCenter];
+ [notificationCenter removeObserver:(id)m_deviceConnectedObserver];
+ [notificationCenter removeObserver:(id)m_deviceDisconnectedObserver];
+}
+
+QList<QCameraDevice> QAVFVideoDevices::videoDevices() const
+{
+ return m_cameraDevices;
+}
+
+void QAVFVideoDevices::updateCameraDevices()
+{
+#ifdef Q_OS_IOS
+ // Cameras can't change dynamically on iOS. Update only once.
+ if (!m_cameraDevices.isEmpty())
+ return;
+#endif
+
+ QList<QCameraDevice> cameras;
+
+ // List of all capture device types that we want to discover. Seems that this is the
+ // only way to discover all types. This filter is mandatory and has no "unspecified"
+ // option like AVCaptureDevicePosition(Unspecified) has. Order of the list is important
+ // because discovered devices will be in the same order and we want the first one found
+ // to be our default device.
+ NSArray *discoveryDevices = @[
+#ifdef Q_OS_IOS
+ AVCaptureDeviceTypeBuiltInTripleCamera, // We always prefer triple camera.
+ AVCaptureDeviceTypeBuiltInDualCamera, // If triple is not available, we prefer
+ // dual with wide + tele lens.
+ AVCaptureDeviceTypeBuiltInDualWideCamera, // Dual with wide and ultrawide is still
+ // better than single.
+#endif
+ AVCaptureDeviceTypeBuiltInWideAngleCamera, // This is the most common single camera type.
+ // We prefer that over tele and ultra-wide.
+#ifdef Q_OS_IOS
+ AVCaptureDeviceTypeBuiltInTelephotoCamera, // Cannot imagine how, but if only tele and
+ // ultrawide are available, we prefer tele.
+ AVCaptureDeviceTypeBuiltInUltraWideCamera,
+#endif
+ ];
+
+#if QT_DARWIN_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_14_0, __IPHONE_17_0, __TVOS_NA, __WATCHOS_NA)
+ if (@available(macOS 14, iOS 17, *)) {
+ discoveryDevices = [discoveryDevices arrayByAddingObjectsFromArray: @[
+ AVCaptureDeviceTypeExternal,
+ AVCaptureDeviceTypeContinuityCamera
+ ]];
+ } else
+#endif
+ {
+#ifdef Q_OS_MACOS
+ QT_WARNING_PUSH
+ QT_WARNING_DISABLE_DEPRECATED
+ discoveryDevices = [discoveryDevices arrayByAddingObjectsFromArray: @[
+ AVCaptureDeviceTypeExternalUnknown
+ ]];
+ QT_WARNING_POP
+#endif
+ }
+ // Create discovery session to discover all possible camera types of the system.
+ // Both "hard" and "soft" types.
+ AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
+ discoverySessionWithDeviceTypes:discoveryDevices
+ mediaType:AVMediaTypeVideo
+ position:AVCaptureDevicePositionUnspecified];
+ NSArray<AVCaptureDevice *> *videoDevices = discoverySession.devices;
+
+ for (AVCaptureDevice *device in videoDevices) {
+ auto info = std::make_unique<QCameraDevicePrivate>();
+ if ([videoDevices[0].uniqueID isEqualToString:device.uniqueID])
+ info->isDefault = true;
+ info->id = QByteArray([[device uniqueID] UTF8String]);
+ info->description = QString::fromNSString([device localizedName]);
+
+ qCDebug(qLcCamera) << "Handling camera info" << info->description
+ << (info->isDefault ? "(default)" : "");
+
+ QSet<QSize> photoResolutions;
+ QList<QCameraFormat> videoFormats;
+
+ for (AVCaptureDeviceFormat *format in device.formats) {
+ if (![format.mediaType isEqualToString:AVMediaTypeVideo])
+ continue;
+
+ auto dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ QSize resolution(dimensions.width, dimensions.height);
+ photoResolutions.insert(resolution);
+
+ float maxFrameRate = 0;
+ float minFrameRate = 1.e6;
+
+ auto encoding = CMVideoFormatDescriptionGetCodecType(format.formatDescription);
+ auto pixelFormat = QAVFHelpers::fromCVPixelFormat(encoding);
+ auto colorRange = QAVFHelpers::colorRangeForCVPixelFormat(encoding);
+ // Ignore pixel formats we can't handle
+ if (pixelFormat == QVideoFrameFormat::Format_Invalid) {
+ qCDebug(qLcCamera) << "ignore camera CV format" << encoding
+ << "as no matching video format found";
+ continue;
+ }
+
+ for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRanges) {
+ if (frameRateRange.minFrameRate < minFrameRate)
+ minFrameRate = frameRateRange.minFrameRate;
+ if (frameRateRange.maxFrameRate > maxFrameRate)
+ maxFrameRate = frameRateRange.maxFrameRate;
+ }
+
+#ifdef Q_OS_IOS
+ // From Apple's docs (iOS):
+ // By default, AVCaptureStillImageOutput emits images with the same dimensions as
+ // its source AVCaptureDevice instance’s activeFormat.formatDescription. However,
+ // if you set this property to YES, the receiver emits still images at the capture
+ // device’s highResolutionStillImageDimensions value.
+ const QSize hrRes(qt_device_format_high_resolution(format));
+ if (!hrRes.isNull() && hrRes.isValid())
+ photoResolutions.insert(hrRes);
+#endif
+
+ qCDebug(qLcCamera) << "Add camera format. pixelFormat:" << pixelFormat
+ << "colorRange:" << colorRange << "cvPixelFormat" << encoding
+ << "resolution:" << resolution << "frameRate: [" << minFrameRate
+ << maxFrameRate << "]";
+
+ auto *f = new QCameraFormatPrivate{ QSharedData(), pixelFormat, resolution,
+ minFrameRate, maxFrameRate, colorRange };
+ videoFormats << f->create();
+ }
+ if (videoFormats.isEmpty()) {
+ // skip broken cameras without valid formats
+ qCWarning(qLcCamera())
+ << "Skip camera" << info->description << "without supported formats";
+ continue;
+ }
+ info->videoFormats = videoFormats;
+ info->photoResolutions = photoResolutions.values();
+
+ cameras.append(info.release()->create());
+ }
+
+ if (cameras != m_cameraDevices) {
+ m_cameraDevices = cameras;
+ emit videoInputsChanged();
+ }
+}
+
+
+QAVFCameraBase::QAVFCameraBase(QCamera *camera)
+ : QPlatformCamera(camera)
+{
+ Q_ASSERT(camera);
+}
+
+QAVFCameraBase::~QAVFCameraBase()
+{
+}
+
+bool QAVFCameraBase::isActive() const
+{
+ return m_active;
+}
+
+void QAVFCameraBase::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+ if (m_cameraDevice.isNull() && active)
+ return;
+
+ m_active = active;
+
+ if (active)
+ updateCameraConfiguration();
+ Q_EMIT activeChanged(m_active);
+}
+
+void QAVFCameraBase::setCamera(const QCameraDevice &camera)
+{
+ if (m_cameraDevice == camera)
+ return;
+ m_cameraDevice = camera;
+ setCameraFormat({});
+}
+
+bool QAVFCameraBase::setCameraFormat(const QCameraFormat &format)
+{
+ if (!format.isNull() && !m_cameraDevice.videoFormats().contains(format))
+ return false;
+
+ m_cameraFormat = format.isNull() ? findBestCameraFormat(m_cameraDevice) : format;
+
+ return true;
+}
+
+AVCaptureDevice *QAVFCameraBase::device() const
+{
+ AVCaptureDevice *device = nullptr;
+ QByteArray deviceId = m_cameraDevice.id();
+ if (!deviceId.isEmpty()) {
+ device = [AVCaptureDevice deviceWithUniqueID:
+ [NSString stringWithUTF8String:
+ deviceId.constData()]];
+ }
+ return device;
+}
+
+#ifdef Q_OS_IOS
+namespace
+{
+
+bool qt_focus_mode_supported(QCamera::FocusMode mode)
+{
+ // Check if QCamera::FocusMode has counterpart in AVFoundation.
+
+ // AVFoundation has 'Manual', 'Auto' and 'Continuous',
+ // where 'Manual' is actually 'Locked' + writable property 'lensPosition'.
+ return mode == QCamera::FocusModeAuto
+ || mode == QCamera::FocusModeManual;
+}
+
+AVCaptureFocusMode avf_focus_mode(QCamera::FocusMode requestedMode)
+{
+ switch (requestedMode) {
+ case QCamera::FocusModeHyperfocal:
+ case QCamera::FocusModeInfinity:
+ case QCamera::FocusModeManual:
+ return AVCaptureFocusModeLocked;
+ default:
+ return AVCaptureFocusModeContinuousAutoFocus;
+ }
+
+}
+
+}
+#endif
+
+void QAVFCameraBase::setFocusMode(QCamera::FocusMode mode)
+{
+#ifdef Q_OS_IOS
+ if (focusMode() == mode)
+ return;
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ if (qt_focus_mode_supported(mode)) {
+ focusModeChanged(mode);
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO
+ << "focus mode not supported";
+ }
+ return;
+ }
+
+ if (isFocusModeSupported(mode)) {
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO
+ << "failed to lock for configuration";
+ return;
+ }
+
+ captureDevice.focusMode = avf_focus_mode(mode);
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "focus mode not supported";
+ return;
+ }
+
+ Q_EMIT focusModeChanged(mode);
+#else
+ Q_UNUSED(mode);
+#endif
+}
+
+bool QAVFCameraBase::isFocusModeSupported(QCamera::FocusMode mode) const
+{
+#ifdef Q_OS_IOS
+ AVCaptureDevice *captureDevice = device();
+ if (captureDevice) {
+ AVCaptureFocusMode avMode = avf_focus_mode(mode);
+ switch (mode) {
+ case QCamera::FocusModeAuto:
+ case QCamera::FocusModeHyperfocal:
+ case QCamera::FocusModeInfinity:
+ case QCamera::FocusModeManual:
+ return [captureDevice isFocusModeSupported:avMode];
+ case QCamera::FocusModeAutoNear:
+ Q_FALLTHROUGH();
+ case QCamera::FocusModeAutoFar:
+ return captureDevice.autoFocusRangeRestrictionSupported
+ && [captureDevice isFocusModeSupported:avMode];
+ }
+ }
+#endif
+ return mode == QCamera::FocusModeAuto; // stupid builtin webcam doesn't do any focus handling, but hey it's usually focused :)
+}
+
+void QAVFCameraBase::setCustomFocusPoint(const QPointF &point)
+{
+ if (customFocusPoint() == point)
+ return;
+
+ if (!QRectF(0.f, 0.f, 1.f, 1.f).contains(point)) {
+ // ### release custom focus point, tell the camera to focus where it wants...
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid focus point (out of range)";
+ return;
+ }
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice)
+ return;
+
+ if ([captureDevice isFocusPointOfInterestSupported]) {
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ const CGPoint focusPOI = CGPointMake(point.x(), point.y());
+ [captureDevice setFocusPointOfInterest:focusPOI];
+ if (focusMode() != QCamera::FocusModeAuto)
+ [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
+
+ customFocusPointChanged(point);
+ }
+}
+
+void QAVFCameraBase::setFocusDistance(float d)
+{
+#ifdef Q_OS_IOS
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice)
+ return;
+
+ if (captureDevice.lockingFocusWithCustomLensPositionSupported) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "Setting custom focus distance not supported\n";
+ return;
+ }
+
+ {
+ AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+ [captureDevice setFocusModeLockedWithLensPosition:d completionHandler:nil];
+ }
+ focusDistanceChanged(d);
+#else
+ Q_UNUSED(d);
+#endif
+}
+
+void QAVFCameraBase::updateCameraConfiguration()
+{
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "capture device is nil in 'active' state";
+ return;
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ if ([captureDevice isFocusPointOfInterestSupported]) {
+ auto point = customFocusPoint();
+ const CGPoint focusPOI = CGPointMake(point.x(), point.y());
+ [captureDevice setFocusPointOfInterest:focusPOI];
+ }
+
+#ifdef Q_OS_IOS
+ if (focusMode() != QCamera::FocusModeAuto) {
+ const AVCaptureFocusMode avMode = avf_focus_mode(focusMode());
+ if (captureDevice.focusMode != avMode) {
+ if ([captureDevice isFocusModeSupported:avMode]) {
+ [captureDevice setFocusMode:avMode];
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "focus mode not supported";
+ }
+ }
+ }
+
+ if (!captureDevice.activeFormat) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "camera state is active, but active format is nil";
+ return;
+ }
+
+ minimumZoomFactorChanged(captureDevice.minAvailableVideoZoomFactor);
+ maximumZoomFactorChanged(captureDevice.activeFormat.videoMaxZoomFactor);
+
+ captureDevice.videoZoomFactor = zoomFactor();
+
+ CMTime newDuration = AVCaptureExposureDurationCurrent;
+ bool setCustomMode = false;
+
+ float exposureTime = manualExposureTime();
+ if (exposureTime > 0
+ && !qt_exposure_duration_equal(captureDevice, exposureTime)) {
+ newDuration = CMTimeMakeWithSeconds(exposureTime, captureDevice.exposureDuration.timescale);
+ if (!qt_check_exposure_duration(captureDevice, newDuration)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "requested exposure duration is out of range";
+ return;
+ }
+ setCustomMode = true;
+ }
+
+ float newISO = AVCaptureISOCurrent;
+ int iso = manualIsoSensitivity();
+ if (iso > 0 && !qt_iso_equal(captureDevice, iso)) {
+ newISO = iso;
+ if (!qt_check_ISO_value(captureDevice, newISO)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "requested ISO value is out of range";
+ return;
+ }
+ setCustomMode = true;
+ }
+
+ float bias = exposureCompensation();
+ if (bias != 0 && !qt_exposure_bias_equal(captureDevice, bias)) {
+ // TODO: mixed fpns.
+ if (bias < captureDevice.minExposureTargetBias || bias > captureDevice.maxExposureTargetBias) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "exposure compensation value is"
+ << "out of range";
+ return;
+ }
+ [captureDevice setExposureTargetBias:bias completionHandler:nil];
+ }
+
+ // Setting shutter speed (exposure duration) or ISO values
+ // also reset exposure mode into Custom. With this settings
+ // we ignore any attempts to set exposure mode.
+
+ if (setCustomMode) {
+ [captureDevice setExposureModeCustomWithDuration:newDuration
+ ISO:newISO
+ completionHandler:nil];
+ return;
+ }
+
+ QCamera::ExposureMode qtMode = exposureMode();
+ AVCaptureExposureMode avMode = AVCaptureExposureModeContinuousAutoExposure;
+ if (!qt_convert_exposure_mode(captureDevice, qtMode, avMode)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "requested exposure mode is not supported";
+ return;
+ }
+
+ captureDevice.exposureMode = avMode;
+#endif
+
+ isFlashSupported = isFlashAutoSupported = false;
+ isTorchSupported = isTorchAutoSupported = false;
+
+ if (captureDevice.hasFlash) {
+ if ([captureDevice isFlashModeSupported:AVCaptureFlashModeOn])
+ isFlashSupported = true;
+ if ([captureDevice isFlashModeSupported:AVCaptureFlashModeAuto])
+ isFlashAutoSupported = true;
+ }
+
+ if (captureDevice.hasTorch) {
+ if ([captureDevice isTorchModeSupported:AVCaptureTorchModeOn])
+ isTorchSupported = true;
+ if ([captureDevice isTorchModeSupported:AVCaptureTorchModeAuto])
+ isTorchAutoSupported = true;
+ }
+
+ applyFlashSettings();
+ flashReadyChanged(isFlashSupported);
+}
+
+void QAVFCameraBase::updateCameraProperties()
+{
+ QCamera::Features features;
+ AVCaptureDevice *captureDevice = device();
+
+#ifdef Q_OS_IOS
+ features = QCamera::Feature::ColorTemperature | QCamera::Feature::ExposureCompensation |
+ QCamera::Feature::IsoSensitivity | QCamera::Feature::ManualExposureTime;
+
+ if (captureDevice && [captureDevice isLockingFocusWithCustomLensPositionSupported])
+ features |= QCamera::Feature::FocusDistance;
+#endif
+
+ if (captureDevice && [captureDevice isFocusPointOfInterestSupported])
+ features |= QCamera::Feature::CustomFocusPoint;
+
+ supportedFeaturesChanged(features);
+}
+
+void QAVFCameraBase::zoomTo(float factor, float rate)
+{
+ Q_UNUSED(factor);
+ Q_UNUSED(rate);
+
+#ifdef Q_OS_IOS
+ if (zoomFactor() == factor)
+ return;
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice || !captureDevice.activeFormat)
+ return;
+
+ factor = qBound(captureDevice.minAvailableVideoZoomFactor, factor,
+ captureDevice.activeFormat.videoMaxZoomFactor);
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ if (rate <= 0)
+ captureDevice.videoZoomFactor = factor;
+ else
+ [captureDevice rampToVideoZoomFactor:factor withRate:rate];
+#endif
+}
+
+void QAVFCameraBase::setFlashMode(QCamera::FlashMode mode)
+{
+ if (flashMode() == mode)
+ return;
+
+ if (isActive() && !isFlashModeSupported(mode)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "unsupported mode" << mode;
+ return;
+ }
+
+ flashModeChanged(mode);
+
+ if (!isActive())
+ return;
+
+ applyFlashSettings();
+}
+
+bool QAVFCameraBase::isFlashModeSupported(QCamera::FlashMode mode) const
+{
+ if (mode == QCamera::FlashOff)
+ return true;
+ else if (mode == QCamera::FlashOn)
+ return isFlashSupported;
+ else //if (mode == QCamera::FlashAuto)
+ return isFlashAutoSupported;
+}
+
+bool QAVFCameraBase::isFlashReady() const
+{
+ if (!isActive())
+ return false;
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice)
+ return false;
+
+ if (!captureDevice.hasFlash)
+ return false;
+
+ if (!isFlashModeSupported(flashMode()))
+ return false;
+
+ // AVCaptureDevice's docs:
+ // "The flash may become unavailable if, for example,
+ // the device overheats and needs to cool off."
+ return [captureDevice isFlashAvailable];
+}
+
+void QAVFCameraBase::setTorchMode(QCamera::TorchMode mode)
+{
+ if (torchMode() == mode)
+ return;
+
+ if (isActive() && !isTorchModeSupported(mode)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "unsupported torch mode" << mode;
+ return;
+ }
+
+ torchModeChanged(mode);
+
+ if (!isActive())
+ return;
+
+ applyFlashSettings();
+}
+
+bool QAVFCameraBase::isTorchModeSupported(QCamera::TorchMode mode) const
+{
+ if (mode == QCamera::TorchOff)
+ return true;
+ else if (mode == QCamera::TorchOn)
+ return isTorchSupported;
+ else //if (mode == QCamera::TorchAuto)
+ return isTorchAutoSupported;
+}
+
+void QAVFCameraBase::setExposureMode(QCamera::ExposureMode qtMode)
+{
+#ifdef Q_OS_IOS
+ if (qtMode != QCamera::ExposureAuto && qtMode != QCamera::ExposureManual) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "exposure mode not supported";
+ return;
+ }
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ exposureModeChanged(qtMode);
+ return;
+ }
+
+ AVCaptureExposureMode avMode = AVCaptureExposureModeContinuousAutoExposure;
+ if (!qt_convert_exposure_mode(captureDevice, qtMode, avMode)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "exposure mode not supported";
+ return;
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock a capture device"
+ << "for configuration";
+ return;
+ }
+
+ [captureDevice setExposureMode:avMode];
+ exposureModeChanged(qtMode);
+#else
+ Q_UNUSED(qtMode);
+#endif
+}
+
+bool QAVFCameraBase::isExposureModeSupported(QCamera::ExposureMode mode) const
+{
+ if (mode == QCamera::ExposureAuto)
+ return true;
+ if (mode != QCamera::ExposureManual)
+ return false;
+
+ if (@available(macOS 10.15, *)) {
+ AVCaptureDevice *captureDevice = device();
+ return captureDevice && [captureDevice isExposureModeSupported:AVCaptureExposureModeCustom];
+ }
+
+ return false;
+}
+
+void QAVFCameraBase::applyFlashSettings()
+{
+ Q_ASSERT(isActive());
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no capture device found";
+ return;
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+
+ if (captureDevice.hasFlash) {
+ const auto mode = flashMode();
+
+ auto setAvFlashModeSafe = [&captureDevice](AVCaptureFlashMode avFlashMode) {
+ // Note, in some cases captureDevice.hasFlash == false even though
+ // no there're no supported flash modes.
+ if ([captureDevice isFlashModeSupported:avFlashMode])
+ captureDevice.flashMode = avFlashMode;
+ else
+ qCDebug(qLcCamera) << "Attempt to setup unsupported flash mode " << avFlashMode;
+ };
+
+ if (mode == QCamera::FlashOff) {
+ setAvFlashModeSafe(AVCaptureFlashModeOff);
+ } else {
+ if ([captureDevice isFlashAvailable]) {
+ if (mode == QCamera::FlashOn)
+ setAvFlashModeSafe(AVCaptureFlashModeOn);
+ else if (mode == QCamera::FlashAuto)
+ setAvFlashModeSafe(AVCaptureFlashModeAuto);
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "flash is not available at the moment";
+ }
+ }
+ }
+
+ if (captureDevice.hasTorch) {
+ const auto mode = torchMode();
+
+ auto setAvTorchModeSafe = [&captureDevice](AVCaptureTorchMode avTorchMode) {
+ if ([captureDevice isTorchModeSupported:avTorchMode])
+ captureDevice.torchMode = avTorchMode;
+ else
+ qCDebug(qLcCamera) << "Attempt to setup unsupported torch mode " << avTorchMode;
+ };
+
+ if (mode == QCamera::TorchOff) {
+ setAvTorchModeSafe(AVCaptureTorchModeOff);
+ } else {
+ if ([captureDevice isTorchAvailable]) {
+ if (mode == QCamera::TorchOn)
+ setAvTorchModeSafe(AVCaptureTorchModeOn);
+ else if (mode == QCamera::TorchAuto)
+ setAvTorchModeSafe(AVCaptureTorchModeAuto);
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "torch is not available at the moment";
+ }
+ }
+ }
+}
+
+
+void QAVFCameraBase::setExposureCompensation(float bias)
+{
+#ifdef Q_OS_IOS
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ exposureCompensationChanged(bias);
+ return;
+ }
+
+ bias = qBound(captureDevice.minExposureTargetBias, bias, captureDevice.maxExposureTargetBias);
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ [captureDevice setExposureTargetBias:bias completionHandler:nil];
+ exposureCompensationChanged(bias);
+#else
+ Q_UNUSED(bias);
+#endif
+}
+
+void QAVFCameraBase::setManualExposureTime(float value)
+{
+#ifdef Q_OS_IOS
+ if (value < 0) {
+ setExposureMode(QCamera::ExposureAuto);
+ return;
+ }
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ exposureTimeChanged(value);
+ return;
+ }
+
+ const CMTime newDuration = CMTimeMakeWithSeconds(value, captureDevice.exposureDuration.timescale);
+ if (!qt_check_exposure_duration(captureDevice, newDuration)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "shutter speed value is out of range";
+ return;
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ // Setting the shutter speed (exposure duration in Apple's terms,
+ // since there is no shutter actually) will also reset
+ // exposure mode into custom mode.
+ [captureDevice setExposureModeCustomWithDuration:newDuration
+ ISO:AVCaptureISOCurrent
+ completionHandler:nil];
+
+ exposureTimeChanged(value);
+
+#else
+ Q_UNUSED(value);
+#endif
+}
+
+float QAVFCameraBase::exposureTime() const
+{
+#ifdef Q_OS_IOS
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice)
+ return -1.;
+ auto duration = captureDevice.exposureDuration;
+ return CMTimeGetSeconds(duration);
+#else
+ return -1;
+#endif
+}
+
+#ifdef Q_OS_IOS
+namespace {
+
+void avf_convert_white_balance_mode(QCamera::WhiteBalanceMode qtMode,
+ AVCaptureWhiteBalanceMode &avMode)
+{
+ if (qtMode == QCamera::WhiteBalanceAuto)
+ avMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
+ else
+ avMode = AVCaptureWhiteBalanceModeLocked;
+}
+
+bool avf_set_white_balance_mode(AVCaptureDevice *captureDevice,
+ AVCaptureWhiteBalanceMode avMode)
+{
+ Q_ASSERT(captureDevice);
+
+ const bool lock = [captureDevice lockForConfiguration:nil];
+ if (!lock) {
+ qDebug() << "Failed to lock a capture device for configuration\n";
+ return false;
+ }
+
+ captureDevice.whiteBalanceMode = avMode;
+ [captureDevice unlockForConfiguration];
+ return true;
+}
+
+bool avf_convert_temp_and_tint_to_wb_gains(AVCaptureDevice *captureDevice,
+ float temp, float tint, AVCaptureWhiteBalanceGains &wbGains)
+{
+ Q_ASSERT(captureDevice);
+
+ AVCaptureWhiteBalanceTemperatureAndTintValues wbTTValues = {
+ .temperature = temp,
+ .tint = tint
+ };
+ wbGains = [captureDevice deviceWhiteBalanceGainsForTemperatureAndTintValues:wbTTValues];
+
+ if (wbGains.redGain >= 1.0 && wbGains.redGain <= captureDevice.maxWhiteBalanceGain
+ && wbGains.greenGain >= 1.0 && wbGains.greenGain <= captureDevice.maxWhiteBalanceGain
+ && wbGains.blueGain >= 1.0 && wbGains.blueGain <= captureDevice.maxWhiteBalanceGain)
+ return true;
+
+ return false;
+}
+
+bool avf_set_white_balance_gains(AVCaptureDevice *captureDevice,
+ AVCaptureWhiteBalanceGains wbGains)
+{
+ const bool lock = [captureDevice lockForConfiguration:nil];
+ if (!lock) {
+ qDebug() << "Failed to lock a capture device for configuration\n";
+ return false;
+ }
+
+ [captureDevice setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:wbGains
+ completionHandler:nil];
+ [captureDevice unlockForConfiguration];
+ return true;
+}
+
+}
+
+bool QAVFCameraBase::isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const
+{
+ if (mode == QCamera::WhiteBalanceAuto)
+ return true;
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice)
+ return false;
+ return [captureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeLocked];
+}
+
+void QAVFCameraBase::setWhiteBalanceMode(QCamera::WhiteBalanceMode mode)
+{
+ if (!isWhiteBalanceModeSupported(mode))
+ return;
+
+ AVCaptureDevice *captureDevice = device();
+ Q_ASSERT(captureDevice);
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock a capture device"
+ << "for configuration";
+ return;
+ }
+
+ AVCaptureWhiteBalanceMode avMode;
+ avf_convert_white_balance_mode(mode, avMode);
+ avf_set_white_balance_mode(captureDevice, avMode);
+
+ if (mode == QCamera::WhiteBalanceAuto || mode == QCamera::WhiteBalanceManual) {
+ whiteBalanceModeChanged(mode);
+ return;
+ }
+
+ const int colorTemp = colorTemperatureForWhiteBalance(mode);
+ AVCaptureWhiteBalanceGains wbGains;
+ if (avf_convert_temp_and_tint_to_wb_gains(captureDevice, colorTemp, 0., wbGains)
+ && avf_set_white_balance_gains(captureDevice, wbGains))
+ whiteBalanceModeChanged(mode);
+}
+
+void QAVFCameraBase::setColorTemperature(int colorTemp)
+{
+ if (colorTemp == 0) {
+ colorTemperatureChanged(colorTemp);
+ return;
+ }
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice || ![captureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeLocked])
+ return;
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock a capture device"
+ << "for configuration";
+ return;
+ }
+
+ AVCaptureWhiteBalanceGains wbGains;
+ if (avf_convert_temp_and_tint_to_wb_gains(captureDevice, colorTemp, 0., wbGains)
+ && avf_set_white_balance_gains(captureDevice, wbGains))
+ colorTemperatureChanged(colorTemp);
+}
+#endif
+
+void QAVFCameraBase::setManualIsoSensitivity(int value)
+{
+#ifdef Q_OS_IOS
+ if (value < 0) {
+ setExposureMode(QCamera::ExposureAuto);
+ return;
+ }
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ isoSensitivityChanged(value);
+ return;
+ }
+
+ if (!qt_check_ISO_value(captureDevice, value)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "ISO value is out of range";
+ return;
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock a capture device"
+ << "for configuration";
+ return;
+ }
+
+ // Setting the ISO will also reset
+ // exposure mode to the custom mode.
+ [captureDevice setExposureModeCustomWithDuration:AVCaptureExposureDurationCurrent
+ ISO:value
+ completionHandler:nil];
+
+ isoSensitivityChanged(value);
+#else
+ Q_UNUSED(value);
+#endif
+}
+
+int QAVFCameraBase::isoSensitivity() const
+{
+ return manualIsoSensitivity();
+}
+
+
+#include "moc_qavfcamerabase_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/qavfcamerabase_p.h b/src/plugins/multimedia/darwin/camera/qavfcamerabase_p.h
new file mode 100644
index 000000000..1ad3ba250
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/qavfcamerabase_p.h
@@ -0,0 +1,110 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QAVFCAMERABASE_H
+#define QAVFCAMERABASE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qobject.h>
+
+#include <private/qplatformcamera_p.h>
+#include <private/qplatformvideodevices_p.h>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureDeviceFormat);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureConnection);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureDevice);
+
+QT_BEGIN_NAMESPACE
+class QPlatformMediaIntegration;
+
+class QAVFVideoDevices : public QPlatformVideoDevices
+{
+public:
+ QAVFVideoDevices(QPlatformMediaIntegration *integration);
+ ~QAVFVideoDevices();
+
+ QList<QCameraDevice> videoDevices() const override;
+
+private:
+ void updateCameraDevices();
+
+ NSObject *m_deviceConnectedObserver;
+ NSObject *m_deviceDisconnectedObserver;
+
+ QList<QCameraDevice> m_cameraDevices;
+};
+
+
+class QAVFCameraBase : public QPlatformCamera
+{;
+Q_OBJECT
+public:
+ QAVFCameraBase(QCamera *camera);
+ ~QAVFCameraBase();
+
+ bool isActive() const override;
+ void setActive(bool activce) override;
+
+ void setCamera(const QCameraDevice &camera) override;
+ bool setCameraFormat(const QCameraFormat &format) override;
+
+ void setFocusMode(QCamera::FocusMode mode) override;
+ bool isFocusModeSupported(QCamera::FocusMode mode) const override;
+
+ void setCustomFocusPoint(const QPointF &point) override;
+
+ void setFocusDistance(float d) override;
+ void zoomTo(float factor, float rate) override;
+
+ void setFlashMode(QCamera::FlashMode mode) override;
+ bool isFlashModeSupported(QCamera::FlashMode mode) const override;
+ bool isFlashReady() const override;
+
+ void setTorchMode(QCamera::TorchMode mode) override;
+ bool isTorchModeSupported(QCamera::TorchMode mode) const override;
+
+ void setExposureMode(QCamera::ExposureMode) override;
+ bool isExposureModeSupported(QCamera::ExposureMode mode) const override;
+
+ void setExposureCompensation(float bias) override;
+ void setManualIsoSensitivity(int value) override;
+ virtual int isoSensitivity() const override;
+ void setManualExposureTime(float value) override;
+ virtual float exposureTime() const override;
+
+#ifdef Q_OS_IOS
+ // not supported on macOS
+ bool isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const override;
+ void setWhiteBalanceMode(QCamera::WhiteBalanceMode /*mode*/) override;
+ void setColorTemperature(int /*temperature*/) override;
+#endif
+
+ AVCaptureDevice *device() const;
+
+protected:
+ void updateCameraConfiguration();
+ void updateCameraProperties();
+ void applyFlashSettings();
+
+ QCameraDevice m_cameraDevice;
+ bool m_active = false;
+private:
+ bool isFlashSupported = false;
+ bool isFlashAutoSupported = false;
+ bool isTorchSupported = false;
+ bool isTorchAutoSupported = false;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/common/avfmetadata.mm b/src/plugins/multimedia/darwin/common/avfmetadata.mm
new file mode 100644
index 000000000..da07f69c6
--- /dev/null
+++ b/src/plugins/multimedia/darwin/common/avfmetadata.mm
@@ -0,0 +1,382 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfmetadata_p.h"
+#include <qdarwinformatsinfo_p.h>
+#include <avfmediaplayer_p.h>
+
+#include <QtCore/qbuffer.h>
+#include <QtCore/qiodevice.h>
+#include <QtCore/qdatetime.h>
+#include <QtCore/qlocale.h>
+#include <QtCore/qurl.h>
+#include <QImage>
+#include <QtMultimedia/qvideoframe.h>
+
+#if __has_include(<AppKit/AppKit.h>)
+#include <AppKit/AppKit.h>
+#endif
+
+#include <CoreFoundation/CoreFoundation.h>
+
+QT_USE_NAMESPACE
+
+struct AVMetadataIDs {
+ AVMetadataIdentifier common;
+ AVMetadataIdentifier iTunes;
+ AVMetadataIdentifier quickTime;
+ AVMetadataIdentifier ID3;
+ AVMetadataIdentifier quickTimeUserData;
+ AVMetadataIdentifier isoUserData;
+};
+
+const AVMetadataIDs keyToAVMetaDataID[] = {
+ // Title
+ { AVMetadataCommonIdentifierTitle, AVMetadataIdentifieriTunesMetadataSongName,
+ AVMetadataIdentifierQuickTimeMetadataTitle,
+ AVMetadataIdentifierID3MetadataTitleDescription,
+ nil, AVMetadata3GPUserDataKeyTitle },
+ // Author
+ { AVMetadataCommonIdentifierAuthor,AVMetadataIdentifieriTunesMetadataAuthor,
+ AVMetadataIdentifierQuickTimeMetadataAuthor, nil,
+ AVMetadataQuickTimeUserDataKeyAuthor, AVMetadata3GPUserDataKeyAuthor },
+ // Comment
+ { nil, AVMetadataIdentifieriTunesMetadataUserComment,
+ AVMetadataIdentifierQuickTimeMetadataComment, AVMetadataIdentifierID3MetadataComments,
+ AVMetadataQuickTimeUserDataKeyComment, nil },
+ // Description
+ { AVMetadataCommonIdentifierDescription,AVMetadataIdentifieriTunesMetadataDescription,
+ AVMetadataIdentifierQuickTimeMetadataDescription, nil,
+ AVMetadataQuickTimeUserDataKeyDescription, AVMetadata3GPUserDataKeyDescription },
+ // Genre
+ { nil, AVMetadataIdentifieriTunesMetadataUserGenre,
+ AVMetadataIdentifierQuickTimeMetadataGenre, nil,
+ AVMetadataQuickTimeUserDataKeyGenre, AVMetadata3GPUserDataKeyGenre },
+ // Date
+ { AVMetadataCommonIdentifierCreationDate, AVMetadataIdentifieriTunesMetadataReleaseDate,
+ AVMetadataIdentifierQuickTimeMetadataCreationDate, AVMetadataIdentifierID3MetadataDate,
+ AVMetadataQuickTimeUserDataKeyCreationDate, AVMetadataISOUserDataKeyDate },
+ // Language
+ { AVMetadataCommonIdentifierLanguage, nil, nil, AVMetadataIdentifierID3MetadataLanguage, nil, nil },
+ // Publisher
+ { AVMetadataCommonIdentifierPublisher, AVMetadataIdentifieriTunesMetadataPublisher,
+ AVMetadataIdentifierQuickTimeMetadataPublisher, AVMetadataIdentifierID3MetadataPublisher, nil, nil },
+ // Copyright
+ { AVMetadataCommonIdentifierCopyrights, AVMetadataIdentifieriTunesMetadataCopyright,
+ AVMetadataIdentifierQuickTimeMetadataCopyright, AVMetadataIdentifierID3MetadataCopyright,
+ AVMetadataQuickTimeUserDataKeyCopyright, AVMetadataISOUserDataKeyCopyright },
+ // Url
+ { nil, nil, nil, AVMetadataIdentifierID3MetadataOfficialAudioSourceWebpage, nil, nil },
+ // Duration
+ { nil, nil, nil, AVMetadataIdentifierID3MetadataLength, nil, nil },
+ // MediaType
+ { AVMetadataCommonIdentifierType, nil, nil, AVMetadataIdentifierID3MetadataContentType, nil, nil },
+ // FileFormat
+ { nil, nil, nil, AVMetadataIdentifierID3MetadataFileType, nil, nil },
+ // AudioBitRate
+ { nil, nil, nil, nil, nil, nil },
+ // AudioCodec
+ { nil, nil, nil, nil, nil, nil },
+ // VideoBitRate
+ { nil, nil, nil, nil, nil, nil },
+ // VideoCodec
+ { nil, nil, nil, nil, nil, nil },
+ // VideoFrameRate
+ { nil, nil, AVMetadataIdentifierQuickTimeMetadataCameraFrameReadoutTime, nil, nil, nil },
+ // AlbumTitle
+ { AVMetadataCommonIdentifierAlbumName, AVMetadataIdentifieriTunesMetadataAlbum,
+ AVMetadataIdentifierQuickTimeMetadataAlbum, AVMetadataIdentifierID3MetadataAlbumTitle,
+ AVMetadataQuickTimeUserDataKeyAlbum, AVMetadata3GPUserDataKeyAlbumAndTrack },
+ // AlbumArtist
+ { nil, AVMetadataIdentifieriTunesMetadataAlbumArtist, nil, nil,
+ AVMetadataQuickTimeUserDataKeyArtist, AVMetadata3GPUserDataKeyPerformer },
+ // ContributingArtist
+ { AVMetadataCommonIdentifierArtist, AVMetadataIdentifieriTunesMetadataArtist,
+ AVMetadataIdentifierQuickTimeMetadataArtist, nil, nil, nil },
+ // TrackNumber
+ { nil, AVMetadataIdentifieriTunesMetadataTrackNumber,
+ nil, AVMetadataIdentifierID3MetadataTrackNumber, nil, nil },
+ // Composer
+ { nil, AVMetadataIdentifieriTunesMetadataComposer,
+ AVMetadataIdentifierQuickTimeMetadataComposer, AVMetadataIdentifierID3MetadataComposer, nil, nil },
+ // LeadPerformer
+ { nil, AVMetadataIdentifieriTunesMetadataPerformer,
+ AVMetadataIdentifierQuickTimeMetadataPerformer, AVMetadataIdentifierID3MetadataLeadPerformer, nil, nil },
+ // ThumbnailImage
+ { nil, nil, nil, AVMetadataIdentifierID3MetadataAttachedPicture, nil, nil },
+ // CoverArtImage
+ { AVMetadataCommonIdentifierArtwork, AVMetadataIdentifieriTunesMetadataCoverArt,
+ AVMetadataIdentifierQuickTimeMetadataArtwork, nil, nil, nil },
+ // Orientation
+ { nil, nil, AVMetadataIdentifierQuickTimeMetadataVideoOrientation, nil, nil, nil },
+ // Resolution
+ { nil, nil, nil, nil, nil, nil }
+};
+
+static AVMetadataIdentifier toIdentifier(QMediaMetaData::Key key, AVMetadataKeySpace keySpace)
+{
+ static_assert(sizeof(keyToAVMetaDataID)/sizeof(AVMetadataIDs) == QMediaMetaData::Key::Resolution + 1);
+
+ AVMetadataIdentifier identifier = nil;
+ if ([keySpace isEqualToString:AVMetadataKeySpaceiTunes]) {
+ identifier = keyToAVMetaDataID[key].iTunes;
+ } else if ([keySpace isEqualToString:AVMetadataKeySpaceID3]) {
+ identifier = keyToAVMetaDataID[key].ID3;
+ } else if ([keySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata]) {
+ identifier = keyToAVMetaDataID[key].quickTime;
+ } else {
+ identifier = keyToAVMetaDataID[key].common;
+ }
+ return identifier;
+}
+
+static std::optional<QMediaMetaData::Key> toKey(AVMetadataItem *item)
+{
+ static_assert(sizeof(keyToAVMetaDataID)/sizeof(AVMetadataIDs) == QMediaMetaData::Key::Resolution + 1);
+
+ // The item identifier may be different than the ones we support,
+ // so check by common key first, as it will get the metadata
+ // irrespective of the format.
+ AVMetadataKey commonKey = item.commonKey;
+ if (commonKey.length != 0) {
+ if ([commonKey isEqualToString:AVMetadataCommonKeyTitle]) {
+ return QMediaMetaData::Title;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyDescription]) {
+ return QMediaMetaData::Description;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyPublisher]) {
+ return QMediaMetaData::Publisher;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyCreationDate]) {
+ return QMediaMetaData::Date;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyType]) {
+ return QMediaMetaData::MediaType;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyLanguage]) {
+ return QMediaMetaData::Language;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyCopyrights]) {
+ return QMediaMetaData::Copyright;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyAlbumName]) {
+ return QMediaMetaData::AlbumTitle;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyAuthor]) {
+ return QMediaMetaData::Author;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyArtist]) {
+ return QMediaMetaData::ContributingArtist;
+ }
+ }
+
+ // Check by identifier if no common key found
+ // No need to check for the common keySpace since there's no common key
+ enum keySpaces { iTunes, QuickTime, QuickTimeUserData, IsoUserData, ID3, Other } itemKeySpace;
+ itemKeySpace = Other;
+ AVMetadataKeySpace keySpace = [item keySpace];
+ AVMetadataIdentifier identifier = [item identifier];
+
+ if ([keySpace isEqualToString:AVMetadataKeySpaceiTunes]) {
+ itemKeySpace = iTunes;
+ } else if ([keySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata]) {
+ itemKeySpace = QuickTime;
+ } else if ([keySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData]) {
+ itemKeySpace = QuickTimeUserData;
+ } else if ([keySpace isEqualToString:AVMetadataKeySpaceISOUserData]) {
+ itemKeySpace = IsoUserData;
+ } else if (([keySpace isEqualToString:AVMetadataKeySpaceID3])) {
+ itemKeySpace = ID3;
+ }
+
+ for (int key = 0; key < QMediaMetaData::Resolution + 1; key++) {
+ AVMetadataIdentifier idForKey = nil;
+ switch (itemKeySpace) {
+ case iTunes:
+ idForKey = keyToAVMetaDataID[key].iTunes;
+ break;
+ case QuickTime:
+ idForKey = keyToAVMetaDataID[key].quickTime;
+ break;
+ case ID3:
+ idForKey = keyToAVMetaDataID[key].ID3;
+ break;
+ case QuickTimeUserData:
+ idForKey = keyToAVMetaDataID[key].quickTimeUserData;
+ break;
+ case IsoUserData:
+ idForKey = keyToAVMetaDataID[key].isoUserData;
+ break;
+ default:
+ break;
+ }
+
+ if ([identifier isEqualToString:idForKey])
+ return QMediaMetaData::Key(key);
+ }
+
+ return std::nullopt;
+}
+
+static QMediaMetaData fromAVMetadata(NSArray *metadataItems)
+{
+ QMediaMetaData metadata;
+
+ for (AVMetadataItem* item in metadataItems) {
+ auto key = toKey(item);
+ if (!key)
+ continue;
+
+ const QString value = QString::fromNSString([item stringValue]);
+ if (!value.isNull())
+ metadata.insert(*key, value);
+ }
+ return metadata;
+}
+
+QMediaMetaData AVFMetaData::fromAsset(AVAsset *asset)
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ QMediaMetaData metadata = fromAVMetadata([asset metadata]);
+
+ // add duration
+ const CMTime time = [asset duration];
+ const qint64 duration = static_cast<qint64>(float(time.value) / float(time.timescale) * 1000.0f);
+ metadata.insert(QMediaMetaData::Duration, duration);
+
+ return metadata;
+}
+
+QMediaMetaData AVFMetaData::fromAssetTrack(AVAssetTrack *asset)
+{
+ QMediaMetaData metadata = fromAVMetadata([asset metadata]);
+ if ([asset.mediaType isEqualToString:AVMediaTypeAudio]) {
+ if (metadata.value(QMediaMetaData::Language).isNull()) {
+ auto *languageCode = asset.languageCode;
+ if (languageCode) {
+ // languageCode is encoded as ISO 639-2, which QLocale does not handle.
+ // Convert it to 639-1 first.
+ auto id = CFLocaleCreateCanonicalLanguageIdentifierFromString(kCFAllocatorDefault,
+ (__bridge CFStringRef)languageCode);
+ QString lang = QString::fromCFString(id);
+ CFRelease(id);
+ metadata.insert(QMediaMetaData::Language, QLocale::codeToLanguage(lang));
+ }
+ }
+ }
+ if ([asset.mediaType isEqualToString:AVMediaTypeVideo]) {
+ // add orientation
+ if (metadata.value(QMediaMetaData::Orientation).isNull()) {
+ QtVideo::Rotation angle = QtVideo::Rotation::None;
+ bool mirrored;
+ AVFMediaPlayer::videoOrientationForAssetTrack(asset, angle, mirrored);
+ Q_UNUSED(mirrored);
+ metadata.insert(QMediaMetaData::Orientation, int(angle));
+ }
+ }
+ return metadata;
+}
+
+static AVMutableMetadataItem *setAVMetadataItemForKey(QMediaMetaData::Key key, const QVariant &value,
+ AVMetadataKeySpace keySpace = AVMetadataKeySpaceCommon)
+{
+ AVMetadataIdentifier identifier = toIdentifier(key, keySpace);
+ if (!identifier.length)
+ return nil;
+
+ AVMutableMetadataItem *item = [AVMutableMetadataItem metadataItem];
+ item.keySpace = keySpace;
+ item.identifier = identifier;
+
+ switch (key) {
+ case QMediaMetaData::ThumbnailImage:
+ case QMediaMetaData::CoverArtImage: {
+#if defined(Q_OS_MACOS)
+ QImage img = value.value<QImage>();
+ if (!img.isNull()) {
+ QByteArray arr;
+ QBuffer buffer(&arr);
+ buffer.open(QIODevice::WriteOnly);
+ img.save(&buffer);
+ NSData *data = arr.toNSData();
+ NSImage *nsImg = [[NSImage alloc] initWithData:data];
+ item.value = nsImg;
+ [nsImg release];
+ }
+#endif
+ break;
+ }
+ case QMediaMetaData::FileFormat: {
+ QMediaFormat::FileFormat qtFormat = value.value<QMediaFormat::FileFormat>();
+ AVFileType avFormat = QDarwinFormatInfo::avFileTypeForContainerFormat(qtFormat);
+ item.value = avFormat;
+ break;
+ }
+ case QMediaMetaData::Language: {
+ QString lang = QLocale::languageToCode(value.value<QLocale::Language>());
+ if (!lang.isEmpty())
+ item.value = lang.toNSString();
+ break;
+ }
+ case QMediaMetaData::Orientation: {
+ bool ok;
+ int rotation = value.toInt(&ok);
+ if (ok)
+ item.value = [NSNumber numberWithInt:rotation];
+ }
+ default: {
+ switch (value.typeId()) {
+ case QMetaType::QString: {
+ item.value = value.toString().toNSString();
+ break;
+ }
+ case QMetaType::Int: {
+ item.value = [NSNumber numberWithInt:value.toInt()];
+ break;
+ }
+ case QMetaType::LongLong: {
+ item.value = [NSNumber numberWithLongLong:value.toLongLong()];
+ break;
+ }
+ case QMetaType::Double: {
+ item.value = [NSNumber numberWithDouble:value.toDouble()];
+ break;
+ }
+ case QMetaType::QDate:
+ case QMetaType::QDateTime: {
+ item.value = value.toDateTime().toNSDate();
+ break;
+ }
+ case QMetaType::QUrl: {
+ item.value = value.toUrl().toNSURL();
+ break;
+ }
+ default:
+ break;
+ }
+ }
+ }
+
+ return item;
+}
+
+NSMutableArray<AVMetadataItem *> *AVFMetaData::toAVMetadataForFormat(QMediaMetaData metadata, AVFileType format)
+{
+ NSMutableArray<AVMetadataKeySpace> *keySpaces = [NSMutableArray<AVMetadataKeySpace> array];
+ if (format == AVFileTypeAppleM4A) {
+ [keySpaces addObject:AVMetadataKeySpaceiTunes];
+ } else if (format == AVFileTypeMPEGLayer3) {
+ [keySpaces addObject:AVMetadataKeySpaceID3];
+ [keySpaces addObject:AVMetadataKeySpaceiTunes];
+ } else if (format == AVFileTypeQuickTimeMovie) {
+ [keySpaces addObject:AVMetadataKeySpaceQuickTimeMetadata];
+ } else {
+ [keySpaces addObject:AVMetadataKeySpaceCommon];
+ }
+ NSMutableArray<AVMetadataItem *> *avMetaDataArr = [NSMutableArray array];
+ for (const auto &key : metadata.keys()) {
+ for (NSUInteger i = 0; i < [keySpaces count]; i++) {
+ const QVariant &value = metadata.value(key);
+ // set format-specific metadata
+ AVMetadataItem *item = setAVMetadataItemForKey(key, value, keySpaces[i]);
+ if (item)
+ [avMetaDataArr addObject:item];
+ }
+ }
+ return avMetaDataArr;
+}
+
diff --git a/src/plugins/multimedia/darwin/common/avfmetadata_p.h b/src/plugins/multimedia/darwin/common/avfmetadata_p.h
new file mode 100644
index 000000000..d1cb2e7e8
--- /dev/null
+++ b/src/plugins/multimedia/darwin/common/avfmetadata_p.h
@@ -0,0 +1,37 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFMEDIAPLAYERMETADATACONTROL_H
+#define AVFMEDIAPLAYERMETADATACONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/QMediaMetaData>
+#include <QtCore/qvariant.h>
+
+#import <AVFoundation/AVFoundation.h>
+
+QT_BEGIN_NAMESPACE
+
+class AVFMediaPlayer;
+
+class AVFMetaData
+{
+public:
+ static QMediaMetaData fromAsset(AVAsset *asset);
+ static QMediaMetaData fromAssetTrack(AVAssetTrack *asset);
+ static NSMutableArray<AVMetadataItem *> *toAVMetadataForFormat(QMediaMetaData metaData, AVFileType format);
+};
+
+QT_END_NAMESPACE
+
+#endif // AVFMEDIAPLAYERMETADATACONTROL_H
diff --git a/src/plugins/multimedia/darwin/darwin.json b/src/plugins/multimedia/darwin/darwin.json
new file mode 100644
index 000000000..f72350b17
--- /dev/null
+++ b/src/plugins/multimedia/darwin/darwin.json
@@ -0,0 +1,3 @@
+{
+ "Keys": [ "darwin" ]
+}
diff --git a/src/plugins/multimedia/darwin/mediaplayer/avfdisplaylink.mm b/src/plugins/multimedia/darwin/mediaplayer/avfdisplaylink.mm
new file mode 100644
index 000000000..8c6561f37
--- /dev/null
+++ b/src/plugins/multimedia/darwin/mediaplayer/avfdisplaylink.mm
@@ -0,0 +1,207 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfdisplaylink_p.h"
+#include <QtCore/qcoreapplication.h>
+
+#ifdef QT_DEBUG_AVF
+#include <QtCore/qdebug.h>
+#endif
+
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+#import <QuartzCore/CADisplayLink.h>
+#import <Foundation/NSRunLoop.h>
+#define _m_displayLink static_cast<DisplayLinkObserver*>(m_displayLink)
+#else
+#endif
+
+QT_USE_NAMESPACE
+
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+@interface DisplayLinkObserver : NSObject
+
+- (void)start;
+- (void)stop;
+- (void)displayLinkNotification:(CADisplayLink *)sender;
+
+@end
+
+@implementation DisplayLinkObserver
+{
+ AVFDisplayLink *m_avfDisplayLink;
+ CADisplayLink *m_displayLink;
+}
+
+- (id)initWithAVFDisplayLink:(AVFDisplayLink *)link
+{
+ self = [super init];
+
+ if (self) {
+ m_avfDisplayLink = link;
+ m_displayLink = [[CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkNotification:)] retain];
+ }
+
+ return self;
+}
+
+- (void) dealloc
+{
+ if (m_displayLink) {
+ [m_displayLink release];
+ m_displayLink = nullptr;
+ }
+
+ [super dealloc];
+}
+
+- (void)start
+{
+ [m_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
+}
+
+- (void)stop
+{
+ [m_displayLink removeFromRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
+}
+
+- (void)displayLinkNotification:(CADisplayLink *)sender
+{
+ Q_UNUSED(sender);
+ m_avfDisplayLink->displayLinkEvent(nullptr);
+}
+
+@end
+#else
+static CVReturn CVDisplayLinkCallback(CVDisplayLinkRef displayLink,
+ const CVTimeStamp *inNow,
+ const CVTimeStamp *inOutputTime,
+ CVOptionFlags flagsIn,
+ CVOptionFlags *flagsOut,
+ void *displayLinkContext)
+{
+ Q_UNUSED(displayLink);
+ Q_UNUSED(inNow);
+ Q_UNUSED(flagsIn);
+ Q_UNUSED(flagsOut);
+
+ AVFDisplayLink *link = (AVFDisplayLink *)displayLinkContext;
+
+ link->displayLinkEvent(inOutputTime);
+ return kCVReturnSuccess;
+}
+#endif
+
+AVFDisplayLink::AVFDisplayLink(QObject *parent)
+ : QObject(parent)
+ , m_displayLink(nullptr)
+ , m_pendingDisplayLinkEvent(false)
+ , m_isActive(false)
+{
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ m_displayLink = [[DisplayLinkObserver alloc] initWithAVFDisplayLink:this];
+#else
+ // create display link for the main display
+ CVDisplayLinkCreateWithCGDisplay(kCGDirectMainDisplay, &m_displayLink);
+ if (m_displayLink) {
+ // set the current display of a display link.
+ CVDisplayLinkSetCurrentCGDisplay(m_displayLink, kCGDirectMainDisplay);
+
+ // set the renderer output callback function
+ CVDisplayLinkSetOutputCallback(m_displayLink, &CVDisplayLinkCallback, this);
+ }
+#endif
+}
+
+AVFDisplayLink::~AVFDisplayLink()
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+
+ if (m_displayLink) {
+ stop();
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ [_m_displayLink release];
+#else
+ CVDisplayLinkRelease(m_displayLink);
+#endif
+ m_displayLink = nullptr;
+ }
+}
+
+bool AVFDisplayLink::isValid() const
+{
+ return m_displayLink != nullptr;
+}
+
+bool AVFDisplayLink::isActive() const
+{
+ return m_isActive;
+}
+
+void AVFDisplayLink::start()
+{
+ if (m_displayLink && !m_isActive) {
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ [_m_displayLink start];
+#else
+ CVDisplayLinkStart(m_displayLink);
+#endif
+ m_isActive = true;
+ }
+}
+
+void AVFDisplayLink::stop()
+{
+ if (m_displayLink && m_isActive) {
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ [_m_displayLink stop];
+#else
+ CVDisplayLinkStop(m_displayLink);
+#endif
+ m_isActive = false;
+ }
+}
+
+void AVFDisplayLink::displayLinkEvent(const CVTimeStamp *ts)
+{
+ // This function is called from a
+ // thread != gui thread. So we post the event.
+ // But we need to make sure that we don't post faster
+ // than the event loop can eat:
+ m_displayLinkMutex.lock();
+ bool pending = m_pendingDisplayLinkEvent;
+ m_pendingDisplayLinkEvent = true;
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ Q_UNUSED(ts);
+ memset(&m_frameTimeStamp, 0, sizeof(CVTimeStamp));
+#else
+ m_frameTimeStamp = *ts;
+#endif
+ m_displayLinkMutex.unlock();
+
+ if (!pending)
+ qApp->postEvent(this, new QEvent(QEvent::User), Qt::HighEventPriority);
+}
+
+bool AVFDisplayLink::event(QEvent *event)
+{
+ switch (event->type()){
+ case QEvent::User: {
+ m_displayLinkMutex.lock();
+ m_pendingDisplayLinkEvent = false;
+ CVTimeStamp ts = m_frameTimeStamp;
+ m_displayLinkMutex.unlock();
+
+ Q_EMIT tick(ts);
+
+ return false;
+ }
+ break;
+ default:
+ break;
+ }
+ return QObject::event(event);
+}
+
+#include "moc_avfdisplaylink_p.cpp"
diff --git a/src/plugins/multimedia/darwin/mediaplayer/avfdisplaylink_p.h b/src/plugins/multimedia/darwin/mediaplayer/avfdisplaylink_p.h
new file mode 100644
index 000000000..c4eb504a5
--- /dev/null
+++ b/src/plugins/multimedia/darwin/mediaplayer/avfdisplaylink_p.h
@@ -0,0 +1,65 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFDISPLAYLINK_H
+#define AVFDISPLAYLINK_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qobject.h>
+#include <QtCore/qmutex.h>
+
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+#include <CoreVideo/CVBase.h>
+#else
+#include <QuartzCore/CVDisplayLink.h>
+#endif
+
+QT_BEGIN_NAMESPACE
+
+class AVFDisplayLink : public QObject
+{
+ Q_OBJECT
+public:
+ explicit AVFDisplayLink(QObject *parent = nullptr);
+ virtual ~AVFDisplayLink();
+ bool isValid() const;
+ bool isActive() const;
+
+public Q_SLOTS:
+ void start();
+ void stop();
+
+Q_SIGNALS:
+ void tick(const CVTimeStamp &ts);
+
+public:
+ void displayLinkEvent(const CVTimeStamp *);
+
+protected:
+ virtual bool event(QEvent *) override;
+
+private:
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ void *m_displayLink;
+#else
+ CVDisplayLinkRef m_displayLink;
+#endif
+ QMutex m_displayLinkMutex;
+ bool m_pendingDisplayLinkEvent;
+ bool m_isActive;
+ CVTimeStamp m_frameTimeStamp;
+};
+
+QT_END_NAMESPACE
+
+#endif // AVFDISPLAYLINK_H
diff --git a/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm b/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm
new file mode 100644
index 000000000..964964a8e
--- /dev/null
+++ b/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm
@@ -0,0 +1,1270 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfmediaplayer_p.h"
+#include "avfvideorenderercontrol_p.h"
+#include <avfvideosink_p.h>
+#include <avfmetadata_p.h>
+
+#include "qaudiooutput.h"
+#include "private/qplatformaudiooutput_p.h"
+
+#include <qpointer.h>
+#include <QFileInfo>
+#include <QtCore/qmath.h>
+#include <QtCore/qmutex.h>
+
+#import <AVFoundation/AVFoundation.h>
+
+QT_USE_NAMESPACE
+
+//AVAsset Keys
+static NSString* const AVF_TRACKS_KEY = @"tracks";
+static NSString* const AVF_PLAYABLE_KEY = @"playable";
+
+//AVPlayerItem keys
+static NSString* const AVF_STATUS_KEY = @"status";
+static NSString* const AVF_BUFFER_LIKELY_KEEP_UP_KEY = @"playbackLikelyToKeepUp";
+
+//AVPlayer keys
+static NSString* const AVF_RATE_KEY = @"rate";
+static NSString* const AVF_CURRENT_ITEM_KEY = @"currentItem";
+static NSString* const AVF_CURRENT_ITEM_DURATION_KEY = @"currentItem.duration";
+
+static void *AVFMediaPlayerObserverRateObservationContext = &AVFMediaPlayerObserverRateObservationContext;
+static void *AVFMediaPlayerObserverStatusObservationContext = &AVFMediaPlayerObserverStatusObservationContext;
+static void *AVFMediaPlayerObserverPresentationSizeContext = &AVFMediaPlayerObserverPresentationSizeContext;
+static void *AVFMediaPlayerObserverBufferLikelyToKeepUpContext = &AVFMediaPlayerObserverBufferLikelyToKeepUpContext;
+static void *AVFMediaPlayerObserverTracksContext = &AVFMediaPlayerObserverTracksContext;
+static void *AVFMediaPlayerObserverCurrentItemObservationContext = &AVFMediaPlayerObserverCurrentItemObservationContext;
+static void *AVFMediaPlayerObserverCurrentItemDurationObservationContext = &AVFMediaPlayerObserverCurrentItemDurationObservationContext;
+
+@interface AVFMediaPlayerObserver : NSObject<AVAssetResourceLoaderDelegate>
+
+@property (readonly, getter=player) AVPlayer* m_player;
+@property (readonly, getter=playerItem) AVPlayerItem* m_playerItem;
+@property (readonly, getter=playerLayer) AVPlayerLayer* m_playerLayer;
+@property (readonly, getter=session) AVFMediaPlayer* m_session;
+@property (retain) AVPlayerItemTrack *videoTrack;
+
+- (AVFMediaPlayerObserver *) initWithMediaPlayerSession:(AVFMediaPlayer *)session;
+- (void) setURL:(NSURL *)url mimeType:(NSString *)mimeType;
+- (void) unloadMedia;
+- (void) prepareToPlayAsset:(AVURLAsset *)asset withKeys:(NSArray *)requestedKeys;
+- (void) assetFailedToPrepareForPlayback:(NSError *)error;
+- (void) playerItemDidReachEnd:(NSNotification *)notification;
+- (void) observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object
+ change:(NSDictionary *)change context:(void *)context;
+- (void) detatchSession;
+- (void) dealloc;
+- (BOOL) resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
+@end
+
+#ifdef Q_OS_IOS
+// Alas, no such thing as 'class variable', hence globals:
+static unsigned sessionActivationCount;
+static QMutex sessionMutex;
+#endif // Q_OS_IOS
+
+@implementation AVFMediaPlayerObserver
+{
+@private
+ AVFMediaPlayer *m_session;
+ AVPlayer *m_player;
+ AVPlayerItem *m_playerItem;
+ AVPlayerLayer *m_playerLayer;
+ NSURL *m_URL;
+ BOOL m_bufferIsLikelyToKeepUp;
+ NSData *m_data;
+ NSString *m_mimeType;
+#ifdef Q_OS_IOS
+ BOOL m_activated;
+#endif
+}
+
+@synthesize m_player, m_playerItem, m_playerLayer, m_session;
+
+#ifdef Q_OS_IOS
+- (void)setSessionActive:(BOOL)active
+{
+ const QMutexLocker lock(&sessionMutex);
+ if (active) {
+ // Don't count the same player twice if already activated,
+ // unless it tried to deactivate first:
+ if (m_activated)
+ return;
+ if (!sessionActivationCount)
+ [AVAudioSession.sharedInstance setActive:YES error:nil];
+ ++sessionActivationCount;
+ m_activated = YES;
+ } else {
+ if (!sessionActivationCount || !m_activated) {
+ qWarning("Unbalanced audio session deactivation, ignoring.");
+ return;
+ }
+ --sessionActivationCount;
+ m_activated = NO;
+ if (!sessionActivationCount)
+ [AVAudioSession.sharedInstance setActive:NO error:nil];
+ }
+}
+#endif // Q_OS_IOS
+
+- (AVFMediaPlayerObserver *) initWithMediaPlayerSession:(AVFMediaPlayer *)session
+{
+ if (!(self = [super init]))
+ return nil;
+
+ m_session = session;
+ m_bufferIsLikelyToKeepUp = FALSE;
+
+ m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:nil];
+ [m_playerLayer retain];
+ m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
+ m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
+ return self;
+}
+
+- (void) setURL:(NSURL *)url mimeType:(NSString *)mimeType
+{
+ if (!m_session)
+ return;
+
+ [m_mimeType release];
+ m_mimeType = [mimeType retain];
+
+ if (m_URL != url)
+ {
+ [m_URL release];
+ m_URL = [url copy];
+
+ //Create an asset for inspection of a resource referenced by a given URL.
+ //Load the values for the asset keys "tracks", "playable".
+
+ // use __block to avoid maintaining strong references on variables captured by the
+ // following block callback
+#if defined(Q_OS_IOS)
+ BOOL isAccessing = [m_URL startAccessingSecurityScopedResource];
+#endif
+ __block AVURLAsset *asset = [[AVURLAsset URLAssetWithURL:m_URL options:nil] retain];
+ [asset.resourceLoader setDelegate:self queue:dispatch_get_main_queue()];
+
+ __block NSArray *requestedKeys = [[NSArray arrayWithObjects:AVF_TRACKS_KEY, AVF_PLAYABLE_KEY, nil] retain];
+
+ __block AVFMediaPlayerObserver *blockSelf = [self retain];
+
+ // Tells the asset to load the values of any of the specified keys that are not already loaded.
+ [asset loadValuesAsynchronouslyForKeys:requestedKeys completionHandler:
+ ^{
+ dispatch_async( dispatch_get_main_queue(),
+ ^{
+#if defined(Q_OS_IOS)
+ if (isAccessing)
+ [m_URL stopAccessingSecurityScopedResource];
+#endif
+ [blockSelf prepareToPlayAsset:asset withKeys:requestedKeys];
+ [asset release];
+ [requestedKeys release];
+ [blockSelf release];
+ });
+ }];
+ }
+}
+
+- (void) unloadMedia
+{
+ if (m_playerItem) {
+ [m_playerItem removeObserver:self forKeyPath:@"presentationSize"];
+ [m_playerItem removeObserver:self forKeyPath:AVF_STATUS_KEY];
+ [m_playerItem removeObserver:self forKeyPath:AVF_BUFFER_LIKELY_KEEP_UP_KEY];
+ [m_playerItem removeObserver:self forKeyPath:AVF_TRACKS_KEY];
+
+ [[NSNotificationCenter defaultCenter] removeObserver:self
+ name:AVPlayerItemDidPlayToEndTimeNotification
+ object:m_playerItem];
+ m_playerItem = 0;
+ }
+ if (m_player) {
+ [m_player setRate:0.0];
+ [m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_DURATION_KEY];
+ [m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_KEY];
+ [m_player removeObserver:self forKeyPath:AVF_RATE_KEY];
+ [m_player release];
+ m_player = 0;
+ }
+ if (m_playerLayer)
+ m_playerLayer.player = nil;
+#if defined(Q_OS_IOS)
+ [self setSessionActive:NO];
+#endif
+}
+
+- (void) prepareToPlayAsset:(AVURLAsset *)asset
+ withKeys:(NSArray *)requestedKeys
+{
+ if (!m_session)
+ return;
+
+ //Make sure that the value of each key has loaded successfully.
+ for (NSString *thisKey in requestedKeys)
+ {
+ NSError *error = nil;
+ AVKeyValueStatus keyStatus = [asset statusOfValueForKey:thisKey error:&error];
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << [thisKey UTF8String] << " status: " << keyStatus;
+#endif
+ if (keyStatus == AVKeyValueStatusFailed)
+ {
+ [self assetFailedToPrepareForPlayback:error];
+ return;
+ }
+ }
+
+ //Use the AVAsset playable property to detect whether the asset can be played.
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << "isPlayable: " << [asset isPlayable];
+#endif
+ if (!asset.playable)
+ qWarning() << "Asset reported to be not playable. Playback of this asset may not be possible.";
+
+ //At this point we're ready to set up for playback of the asset.
+ //Stop observing our prior AVPlayerItem, if we have one.
+ if (m_playerItem)
+ {
+ //Remove existing player item key value observers and notifications.
+ [self unloadMedia];
+ }
+
+ //Create a new instance of AVPlayerItem from the now successfully loaded AVAsset.
+ m_playerItem = [AVPlayerItem playerItemWithAsset:asset];
+ if (!m_playerItem) {
+ qWarning() << "Failed to create player item";
+ //Generate an error describing the failure.
+ NSString *localizedDescription = NSLocalizedString(@"Item cannot be played", @"Item cannot be played description");
+ NSString *localizedFailureReason = NSLocalizedString(@"The assets tracks were loaded, but couldn't create player item.", @"Item cannot be played failure reason");
+ NSDictionary *errorDict = [NSDictionary dictionaryWithObjectsAndKeys:
+ localizedDescription, NSLocalizedDescriptionKey,
+ localizedFailureReason, NSLocalizedFailureReasonErrorKey,
+ nil];
+ NSError *assetCannotBePlayedError = [NSError errorWithDomain:@"StitchedStreamPlayer" code:0 userInfo:errorDict];
+
+ [self assetFailedToPrepareForPlayback:assetCannotBePlayedError];
+ return;
+ }
+
+ //Observe the player item "status" key to determine when it is ready to play.
+ [m_playerItem addObserver:self
+ forKeyPath:AVF_STATUS_KEY
+ options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
+ context:AVFMediaPlayerObserverStatusObservationContext];
+
+ [m_playerItem addObserver:self
+ forKeyPath:@"presentationSize"
+ options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
+ context:AVFMediaPlayerObserverPresentationSizeContext];
+
+ [m_playerItem addObserver:self
+ forKeyPath:AVF_BUFFER_LIKELY_KEEP_UP_KEY
+ options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
+ context:AVFMediaPlayerObserverBufferLikelyToKeepUpContext];
+
+ [m_playerItem addObserver:self
+ forKeyPath:AVF_TRACKS_KEY
+ options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
+ context:AVFMediaPlayerObserverTracksContext];
+
+ //When the player item has played to its end time we'll toggle
+ //the movie controller Pause button to be the Play button
+ [[NSNotificationCenter defaultCenter] addObserver:self
+ selector:@selector(playerItemDidReachEnd:)
+ name:AVPlayerItemDidPlayToEndTimeNotification
+ object:m_playerItem];
+
+ //Get a new AVPlayer initialized to play the specified player item.
+ m_player = [AVPlayer playerWithPlayerItem:m_playerItem];
+ [m_player retain];
+
+ //Set the initial volume on new player object
+ if (self.session) {
+ auto *audioOutput = m_session->m_audioOutput;
+ m_player.volume = (audioOutput ? audioOutput->volume : 1.);
+ m_player.muted = (audioOutput ? audioOutput->muted : true);
+ }
+
+ //Assign the output layer to the new player
+ m_playerLayer.player = m_player;
+
+ //Observe the AVPlayer "currentItem" property to find out when any
+ //AVPlayer replaceCurrentItemWithPlayerItem: replacement will/did
+ //occur.
+ [m_player addObserver:self
+ forKeyPath:AVF_CURRENT_ITEM_KEY
+ options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
+ context:AVFMediaPlayerObserverCurrentItemObservationContext];
+
+ //Observe the AVPlayer "rate" property to update the scrubber control.
+ [m_player addObserver:self
+ forKeyPath:AVF_RATE_KEY
+ options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
+ context:AVFMediaPlayerObserverRateObservationContext];
+
+ //Observe the duration for getting the buffer state
+ [m_player addObserver:self
+ forKeyPath:AVF_CURRENT_ITEM_DURATION_KEY
+ options:0
+ context:AVFMediaPlayerObserverCurrentItemDurationObservationContext];
+#if defined(Q_OS_IOS)
+ [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback withOptions:AVAudioSessionCategoryOptionMixWithOthers error:nil];
+ [self setSessionActive:YES];
+#endif
+}
+
+-(void) assetFailedToPrepareForPlayback:(NSError *)error
+{
+ Q_UNUSED(error);
+ QMetaObject::invokeMethod(m_session, "processMediaLoadError", Qt::AutoConnection);
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+ qDebug() << [[error localizedDescription] UTF8String];
+ qDebug() << [[error localizedFailureReason] UTF8String];
+ qDebug() << [[error localizedRecoverySuggestion] UTF8String];
+#endif
+}
+
+- (void) playerItemDidReachEnd:(NSNotification *)notification
+{
+ Q_UNUSED(notification);
+ if (self.session)
+ QMetaObject::invokeMethod(m_session, "processEOS", Qt::AutoConnection);
+}
+
+- (void) observeValueForKeyPath:(NSString*) path
+ ofObject:(id)object
+ change:(NSDictionary*)change
+ context:(void*)context
+{
+ //AVPlayerItem "status" property value observer.
+ if (context == AVFMediaPlayerObserverStatusObservationContext)
+ {
+ AVPlayerStatus status = (AVPlayerStatus)[[change objectForKey:NSKeyValueChangeNewKey] integerValue];
+ switch (status)
+ {
+ //Indicates that the status of the player is not yet known because
+ //it has not tried to load new media resources for playback
+ case AVPlayerStatusUnknown:
+ {
+ //QMetaObject::invokeMethod(m_session, "processLoadStateChange", Qt::AutoConnection);
+ }
+ break;
+
+ case AVPlayerStatusReadyToPlay:
+ {
+ //Once the AVPlayerItem becomes ready to play, i.e.
+ //[playerItem status] == AVPlayerItemStatusReadyToPlay,
+ //its duration can be fetched from the item.
+ if (self.session)
+ QMetaObject::invokeMethod(m_session, "processLoadStateChange", Qt::AutoConnection);
+ }
+ break;
+
+ case AVPlayerStatusFailed:
+ {
+ AVPlayerItem *playerItem = static_cast<AVPlayerItem*>(object);
+ [self assetFailedToPrepareForPlayback:playerItem.error];
+
+ if (self.session)
+ QMetaObject::invokeMethod(m_session, "processLoadStateFailure", Qt::AutoConnection);
+ }
+ break;
+ }
+ } else if (context == AVFMediaPlayerObserverPresentationSizeContext) {
+ QSize size(m_playerItem.presentationSize.width, m_playerItem.presentationSize.height);
+ QMetaObject::invokeMethod(m_session, "nativeSizeChanged", Qt::AutoConnection, Q_ARG(QSize, size));
+ } else if (context == AVFMediaPlayerObserverBufferLikelyToKeepUpContext)
+ {
+ const bool isPlaybackLikelyToKeepUp = [m_playerItem isPlaybackLikelyToKeepUp];
+ if (isPlaybackLikelyToKeepUp != m_bufferIsLikelyToKeepUp) {
+ m_bufferIsLikelyToKeepUp = isPlaybackLikelyToKeepUp;
+ QMetaObject::invokeMethod(m_session, "processBufferStateChange", Qt::AutoConnection,
+ Q_ARG(int, isPlaybackLikelyToKeepUp ? 100 : 0));
+ }
+ }
+ else if (context == AVFMediaPlayerObserverTracksContext)
+ {
+ QMetaObject::invokeMethod(m_session, "updateTracks", Qt::AutoConnection);
+ }
+ //AVPlayer "rate" property value observer.
+ else if (context == AVFMediaPlayerObserverRateObservationContext)
+ {
+ //QMetaObject::invokeMethod(m_session, "setPlaybackRate", Qt::AutoConnection, Q_ARG(qreal, [m_player rate]));
+ }
+ //AVPlayer "currentItem" property observer.
+ //Called when the AVPlayer replaceCurrentItemWithPlayerItem:
+ //replacement will/did occur.
+ else if (context == AVFMediaPlayerObserverCurrentItemObservationContext)
+ {
+ AVPlayerItem *newPlayerItem = [change objectForKey:NSKeyValueChangeNewKey];
+ if (m_playerItem != newPlayerItem)
+ m_playerItem = newPlayerItem;
+ }
+ else if (context == AVFMediaPlayerObserverCurrentItemDurationObservationContext)
+ {
+ const CMTime time = [m_playerItem duration];
+ const qint64 duration = static_cast<qint64>(float(time.value) / float(time.timescale) * 1000.0f);
+ if (self.session)
+ QMetaObject::invokeMethod(m_session, "processDurationChange", Qt::AutoConnection, Q_ARG(qint64, duration));
+ }
+ else
+ {
+ [super observeValueForKeyPath:path ofObject:object change:change context:context];
+ }
+}
+
+- (void) detatchSession
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ m_session = 0;
+}
+
+- (void) dealloc
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ [self unloadMedia];
+
+ if (m_URL) {
+ [m_URL release];
+ }
+
+ [m_mimeType release];
+ [m_playerLayer release];
+ [super dealloc];
+}
+
+- (BOOL) resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
+{
+ Q_UNUSED(resourceLoader);
+
+ if (![loadingRequest.request.URL.scheme isEqualToString:@"iodevice"])
+ return NO;
+
+ QIODevice *device = m_session->mediaStream();
+ if (!device)
+ return NO;
+
+ device->seek(loadingRequest.dataRequest.requestedOffset);
+ if (loadingRequest.contentInformationRequest) {
+ loadingRequest.contentInformationRequest.contentType = m_mimeType;
+ loadingRequest.contentInformationRequest.contentLength = device->size();
+ loadingRequest.contentInformationRequest.byteRangeAccessSupported = YES;
+ }
+
+ if (loadingRequest.dataRequest) {
+ NSInteger requestedLength = loadingRequest.dataRequest.requestedLength;
+ int maxBytes = qMin(32 * 1064, int(requestedLength));
+ char buffer[maxBytes];
+ NSInteger submitted = 0;
+ while (submitted < requestedLength) {
+ qint64 len = device->read(buffer, maxBytes);
+ if (len < 1)
+ break;
+
+ [loadingRequest.dataRequest respondWithData:[NSData dataWithBytes:buffer length:len]];
+ submitted += len;
+ }
+
+ // Finish loading even if not all bytes submitted.
+ [loadingRequest finishLoading];
+ }
+
+ return YES;
+}
+@end
+
+AVFMediaPlayer::AVFMediaPlayer(QMediaPlayer *player)
+ : QObject(player),
+ QPlatformMediaPlayer(player),
+ m_state(QMediaPlayer::StoppedState),
+ m_mediaStatus(QMediaPlayer::NoMedia),
+ m_mediaStream(nullptr),
+ m_rate(1.0),
+ m_requestedPosition(-1),
+ m_duration(0),
+ m_bufferProgress(0),
+ m_videoAvailable(false),
+ m_audioAvailable(false),
+ m_seekable(false)
+{
+ m_observer = [[AVFMediaPlayerObserver alloc] initWithMediaPlayerSession:this];
+ connect(&m_playbackTimer, &QTimer::timeout, this, &AVFMediaPlayer::processPositionChange);
+ setVideoOutput(new AVFVideoRendererControl(this));
+}
+
+AVFMediaPlayer::~AVFMediaPlayer()
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ //Detatch the session from the sessionObserver (which could still be alive trying to communicate with this session).
+ [m_observer detatchSession];
+ [static_cast<AVFMediaPlayerObserver*>(m_observer) release];
+}
+
+void AVFMediaPlayer::setVideoSink(QVideoSink *sink)
+{
+ m_videoSink = sink ? static_cast<AVFVideoSink *>(sink->platformVideoSink()): nullptr;
+ m_videoOutput->setVideoSink(m_videoSink);
+}
+
+void AVFMediaPlayer::setVideoOutput(AVFVideoRendererControl *output)
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << output;
+#endif
+
+ if (m_videoOutput == output)
+ return;
+
+ //Set the current output layer to null to stop rendering
+ if (m_videoOutput) {
+ m_videoOutput->setLayer(nullptr);
+ }
+
+ m_videoOutput = output;
+
+ if (m_videoOutput && m_state != QMediaPlayer::StoppedState)
+ m_videoOutput->setLayer([static_cast<AVFMediaPlayerObserver*>(m_observer) playerLayer]);
+}
+
+AVAsset *AVFMediaPlayer::currentAssetHandle()
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ AVAsset *currentAsset = [[static_cast<AVFMediaPlayerObserver*>(m_observer) playerItem] asset];
+ return currentAsset;
+}
+
+QMediaPlayer::PlaybackState AVFMediaPlayer::state() const
+{
+ return m_state;
+}
+
+QMediaPlayer::MediaStatus AVFMediaPlayer::mediaStatus() const
+{
+ return m_mediaStatus;
+}
+
+QUrl AVFMediaPlayer::media() const
+{
+ return m_resources;
+}
+
+QIODevice *AVFMediaPlayer::mediaStream() const
+{
+ return m_mediaStream;
+}
+
+static void setURL(AVFMediaPlayerObserver *observer, const QByteArray &url, const QString &mimeType = QString())
+{
+ NSString *urlString = [NSString stringWithUTF8String:url.constData()];
+ NSURL *nsurl = [NSURL URLWithString:urlString];
+ [observer setURL:nsurl mimeType:[NSString stringWithUTF8String:mimeType.toLatin1().constData()]];
+}
+
+static void setStreamURL(AVFMediaPlayerObserver *observer, const QByteArray &url)
+{
+ setURL(observer, QByteArrayLiteral("iodevice://") + url, QFileInfo(QString::fromUtf8(url)).suffix());
+}
+
+void AVFMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << content.request().url();
+#endif
+
+ [static_cast<AVFMediaPlayerObserver*>(m_observer) unloadMedia];
+
+ m_resources = content;
+ resetStream(stream);
+
+ setAudioAvailable(false);
+ setVideoAvailable(false);
+ setSeekable(false);
+ m_requestedPosition = -1;
+ orientationChanged(QtVideo::Rotation::None, false);
+ Q_EMIT positionChanged(position());
+ if (m_duration != 0) {
+ m_duration = 0;
+ Q_EMIT durationChanged(0);
+ }
+ if (!m_metaData.isEmpty()) {
+ m_metaData.clear();
+ metaDataChanged();
+ }
+ for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i) {
+ tracks[i].clear();
+ nativeTracks[i].clear();
+ }
+ tracksChanged();
+
+ const QMediaPlayer::MediaStatus oldMediaStatus = m_mediaStatus;
+ const QMediaPlayer::PlaybackState oldState = m_state;
+
+ if (!m_mediaStream && content.isEmpty()) {
+ m_mediaStatus = QMediaPlayer::NoMedia;
+ if (m_mediaStatus != oldMediaStatus)
+ Q_EMIT mediaStatusChanged(m_mediaStatus);
+
+ m_state = QMediaPlayer::StoppedState;
+ if (m_state != oldState)
+ Q_EMIT stateChanged(m_state);
+
+ return;
+ }
+
+ m_mediaStatus = QMediaPlayer::LoadingMedia;
+ if (m_mediaStatus != oldMediaStatus)
+ Q_EMIT mediaStatusChanged(m_mediaStatus);
+
+ if (m_mediaStream) {
+ // If there is a data, try to load it,
+ // otherwise wait for readyRead.
+ if (m_mediaStream->size())
+ setStreamURL(m_observer, m_resources.toEncoded());
+ } else {
+ //Load AVURLAsset
+ //initialize asset using content's URL
+ setURL(m_observer, m_resources.toEncoded());
+ }
+
+ m_state = QMediaPlayer::StoppedState;
+ if (m_state != oldState)
+ Q_EMIT stateChanged(m_state);
+}
+
+qint64 AVFMediaPlayer::position() const
+{
+ AVPlayerItem *playerItem = [static_cast<AVFMediaPlayerObserver*>(m_observer) playerItem];
+
+ if (m_requestedPosition != -1)
+ return m_requestedPosition;
+
+ if (!playerItem)
+ return 0;
+
+ CMTime time = [playerItem currentTime];
+ return static_cast<quint64>(float(time.value) / float(time.timescale) * 1000.0f);
+}
+
+qint64 AVFMediaPlayer::duration() const
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ return m_duration;
+}
+
+float AVFMediaPlayer::bufferProgress() const
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ return m_bufferProgress/100.;
+}
+
+void AVFMediaPlayer::setAudioAvailable(bool available)
+{
+ if (m_audioAvailable == available)
+ return;
+
+ m_audioAvailable = available;
+ Q_EMIT audioAvailableChanged(available);
+}
+
+bool AVFMediaPlayer::isAudioAvailable() const
+{
+ return m_audioAvailable;
+}
+
+void AVFMediaPlayer::setVideoAvailable(bool available)
+{
+ if (m_videoAvailable == available)
+ return;
+
+ m_videoAvailable = available;
+ Q_EMIT videoAvailableChanged(available);
+}
+
+bool AVFMediaPlayer::isVideoAvailable() const
+{
+ return m_videoAvailable;
+}
+
+bool AVFMediaPlayer::isSeekable() const
+{
+ return m_seekable;
+}
+
+void AVFMediaPlayer::setSeekable(bool seekable)
+{
+ if (m_seekable == seekable)
+ return;
+
+ m_seekable = seekable;
+ Q_EMIT seekableChanged(seekable);
+}
+
+QMediaTimeRange AVFMediaPlayer::availablePlaybackRanges() const
+{
+ AVPlayerItem *playerItem = [static_cast<AVFMediaPlayerObserver*>(m_observer) playerItem];
+
+ if (playerItem) {
+ QMediaTimeRange timeRanges;
+
+ NSArray *ranges = [playerItem loadedTimeRanges];
+ for (NSValue *timeRange in ranges) {
+ CMTimeRange currentTimeRange = [timeRange CMTimeRangeValue];
+ qint64 startTime = qint64(float(currentTimeRange.start.value) / currentTimeRange.start.timescale * 1000.0);
+ timeRanges.addInterval(startTime, startTime + qint64(float(currentTimeRange.duration.value) / currentTimeRange.duration.timescale * 1000.0));
+ }
+ if (!timeRanges.isEmpty())
+ return timeRanges;
+ }
+ return QMediaTimeRange(0, duration());
+}
+
+qreal AVFMediaPlayer::playbackRate() const
+{
+ return m_rate;
+}
+
+void AVFMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
+{
+ if (m_audioOutput == output)
+ return;
+ if (m_audioOutput)
+ m_audioOutput->q->disconnect(this);
+ m_audioOutput = output;
+ if (m_audioOutput) {
+ connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this, &AVFMediaPlayer::audioOutputChanged);
+ connect(m_audioOutput->q, &QAudioOutput::volumeChanged, this, &AVFMediaPlayer::setVolume);
+ connect(m_audioOutput->q, &QAudioOutput::mutedChanged, this, &AVFMediaPlayer::setMuted);
+ //connect(m_audioOutput->q, &QAudioOutput::audioRoleChanged, this, &AVFMediaPlayer::setAudioRole);
+ }
+ audioOutputChanged();
+ setMuted(m_audioOutput ? m_audioOutput->muted : true);
+ setVolume(m_audioOutput ? m_audioOutput->volume : 1.);
+}
+
+QMediaMetaData AVFMediaPlayer::metaData() const
+{
+ return m_metaData;
+}
+
+void AVFMediaPlayer::setPlaybackRate(qreal rate)
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << rate;
+#endif
+
+ if (qFuzzyCompare(m_rate, rate))
+ return;
+
+ m_rate = rate;
+
+ AVPlayer *player = [static_cast<AVFMediaPlayerObserver*>(m_observer) player];
+ if (player && m_state == QMediaPlayer::PlayingState)
+ [player setRate:m_rate];
+
+ Q_EMIT playbackRateChanged(m_rate);
+}
+
+void AVFMediaPlayer::setPosition(qint64 pos)
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << pos;
+#endif
+
+ if (pos == position())
+ return;
+
+ AVPlayerItem *playerItem = [static_cast<AVFMediaPlayerObserver*>(m_observer) playerItem];
+ if (!playerItem) {
+ m_requestedPosition = pos;
+ Q_EMIT positionChanged(m_requestedPosition);
+ return;
+ }
+
+ if (!isSeekable()) {
+ if (m_requestedPosition != -1) {
+ m_requestedPosition = -1;
+ Q_EMIT positionChanged(position());
+ }
+ return;
+ }
+
+ pos = qMax(qint64(0), pos);
+ if (duration() > 0)
+ pos = qMin(pos, duration());
+ m_requestedPosition = pos;
+
+ CMTime newTime = [playerItem currentTime];
+ newTime.value = (pos / 1000.0f) * newTime.timescale;
+ [playerItem seekToTime:newTime toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero
+ completionHandler:^(BOOL finished) {
+ if (finished)
+ m_requestedPosition = -1;
+ }];
+
+ Q_EMIT positionChanged(pos);
+
+ // Reset media status if the current status is EndOfMedia
+ if (m_mediaStatus == QMediaPlayer::EndOfMedia) {
+ QMediaPlayer::MediaStatus newMediaStatus = (m_state == QMediaPlayer::PausedState) ? QMediaPlayer::BufferedMedia
+ : QMediaPlayer::LoadedMedia;
+ Q_EMIT mediaStatusChanged((m_mediaStatus = newMediaStatus));
+ }
+}
+
+void AVFMediaPlayer::play()
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << "currently: " << m_state;
+#endif
+
+ if (m_mediaStatus == QMediaPlayer::NoMedia || m_mediaStatus == QMediaPlayer::InvalidMedia)
+ return;
+
+ if (m_state == QMediaPlayer::PlayingState)
+ return;
+
+ resetCurrentLoop();
+
+ if (m_videoOutput && m_videoSink)
+ m_videoOutput->setLayer([static_cast<AVFMediaPlayerObserver*>(m_observer) playerLayer]);
+
+ // Reset media status if the current status is EndOfMedia
+ if (m_mediaStatus == QMediaPlayer::EndOfMedia)
+ setPosition(0);
+
+ if (m_mediaStatus == QMediaPlayer::LoadedMedia || m_mediaStatus == QMediaPlayer::BufferedMedia) {
+ // Setting the rate starts playback
+ [[static_cast<AVFMediaPlayerObserver*>(m_observer) player] setRate:m_rate];
+ }
+
+ m_state = QMediaPlayer::PlayingState;
+ processLoadStateChange();
+
+ Q_EMIT stateChanged(m_state);
+ m_playbackTimer.start(100);
+}
+
+void AVFMediaPlayer::pause()
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << "currently: " << m_state;
+#endif
+
+ if (m_mediaStatus == QMediaPlayer::NoMedia)
+ return;
+
+ if (m_state == QMediaPlayer::PausedState)
+ return;
+
+ m_state = QMediaPlayer::PausedState;
+
+ if (m_videoOutput && m_videoSink)
+ m_videoOutput->setLayer([static_cast<AVFMediaPlayerObserver*>(m_observer) playerLayer]);
+
+ [[static_cast<AVFMediaPlayerObserver*>(m_observer) player] pause];
+
+ // Reset media status if the current status is EndOfMedia
+ if (m_mediaStatus == QMediaPlayer::EndOfMedia)
+ setPosition(0);
+
+ Q_EMIT positionChanged(position());
+ Q_EMIT stateChanged(m_state);
+ m_playbackTimer.stop();
+}
+
+void AVFMediaPlayer::stop()
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << "currently: " << m_state;
+#endif
+
+ if (m_state == QMediaPlayer::StoppedState)
+ return;
+
+ // AVPlayer doesn't have stop(), only pause() and play().
+ [[static_cast<AVFMediaPlayerObserver*>(m_observer) player] pause];
+ setPosition(0);
+
+ if (m_videoOutput)
+ m_videoOutput->setLayer(nullptr);
+
+ if (m_mediaStatus == QMediaPlayer::BufferedMedia)
+ Q_EMIT mediaStatusChanged((m_mediaStatus = QMediaPlayer::LoadedMedia));
+
+ Q_EMIT stateChanged((m_state = QMediaPlayer::StoppedState));
+ m_playbackTimer.stop();
+}
+
+void AVFMediaPlayer::setVolume(float volume)
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << volume;
+#endif
+
+ AVPlayer *player = [static_cast<AVFMediaPlayerObserver*>(m_observer) player];
+ if (player)
+ player.volume = volume;
+}
+
+void AVFMediaPlayer::setMuted(bool muted)
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << muted;
+#endif
+
+ AVPlayer *player = [static_cast<AVFMediaPlayerObserver*>(m_observer) player];
+ if (player)
+ player.muted = muted;
+}
+
+void AVFMediaPlayer::audioOutputChanged()
+{
+#ifdef Q_OS_MACOS
+ AVPlayer *player = [static_cast<AVFMediaPlayerObserver*>(m_observer) player];
+ if (!m_audioOutput || m_audioOutput->device.id().isEmpty()) {
+ player.audioOutputDeviceUniqueID = nil;
+ if (!m_audioOutput)
+ player.muted = true;
+ } else {
+ NSString *str = QString::fromUtf8(m_audioOutput->device.id()).toNSString();
+ player.audioOutputDeviceUniqueID = str;
+ }
+#endif
+}
+
+void AVFMediaPlayer::processEOS()
+{
+ if (doLoop()) {
+ setPosition(0);
+ [[static_cast<AVFMediaPlayerObserver*>(m_observer) player] setRate:m_rate];
+ return;
+ }
+
+ //AVPlayerItem has reached end of track/stream
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ Q_EMIT positionChanged(position());
+ m_mediaStatus = QMediaPlayer::EndOfMedia;
+ m_state = QMediaPlayer::StoppedState;
+
+ if (m_videoOutput)
+ m_videoOutput->setLayer(nullptr);
+
+ Q_EMIT mediaStatusChanged(m_mediaStatus);
+ Q_EMIT stateChanged(m_state);
+}
+
+void AVFMediaPlayer::processLoadStateChange(QMediaPlayer::PlaybackState newState)
+{
+ AVPlayerStatus currentStatus = [[static_cast<AVFMediaPlayerObserver*>(m_observer) player] status];
+
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << currentStatus << ", " << m_mediaStatus << ", " << newState;
+#endif
+
+ if (m_mediaStatus == QMediaPlayer::NoMedia)
+ return;
+
+ if (currentStatus == AVPlayerStatusReadyToPlay) {
+
+ QMediaPlayer::MediaStatus newStatus = m_mediaStatus;
+
+ AVPlayerItem *playerItem = [m_observer playerItem];
+
+ // get the meta data
+ m_metaData = AVFMetaData::fromAsset(playerItem.asset);
+ Q_EMIT metaDataChanged();
+ updateTracks();
+
+ if (playerItem) {
+ setSeekable([[playerItem seekableTimeRanges] count] > 0);
+
+ // Get the native size of the video, and reset the bounds of the player layer
+ AVPlayerLayer *playerLayer = [m_observer playerLayer];
+ if (m_observer.videoTrack && playerLayer) {
+ if (!playerLayer.bounds.size.width || !playerLayer.bounds.size.height) {
+ playerLayer.bounds = CGRectMake(0.0f, 0.0f,
+ m_observer.videoTrack.assetTrack.naturalSize.width,
+ m_observer.videoTrack.assetTrack.naturalSize.height);
+ }
+ }
+
+ if (m_requestedPosition != -1) {
+ setPosition(m_requestedPosition);
+ m_requestedPosition = -1;
+ }
+ }
+
+ newStatus = (newState != QMediaPlayer::StoppedState) ? QMediaPlayer::BufferedMedia
+ : QMediaPlayer::LoadedMedia;
+
+ if (newStatus != m_mediaStatus)
+ Q_EMIT mediaStatusChanged((m_mediaStatus = newStatus));
+
+ }
+
+ if (newState == QMediaPlayer::PlayingState && [static_cast<AVFMediaPlayerObserver*>(m_observer) player]) {
+ // Setting the rate is enough to start playback, no need to call play()
+ [[static_cast<AVFMediaPlayerObserver*>(m_observer) player] setRate:m_rate];
+ m_playbackTimer.start();
+ }
+}
+
+
+void AVFMediaPlayer::processLoadStateChange()
+{
+ processLoadStateChange(m_state);
+}
+
+
+void AVFMediaPlayer::processLoadStateFailure()
+{
+ Q_EMIT stateChanged((m_state = QMediaPlayer::StoppedState));
+}
+
+void AVFMediaPlayer::processBufferStateChange(int bufferProgress)
+{
+ if (bufferProgress == m_bufferProgress)
+ return;
+
+ auto status = m_mediaStatus;
+ // Buffered -> unbuffered.
+ if (!bufferProgress) {
+ status = QMediaPlayer::StalledMedia;
+ } else if (status == QMediaPlayer::StalledMedia) {
+ status = QMediaPlayer::BufferedMedia;
+ // Resume playback.
+ if (m_state == QMediaPlayer::PlayingState) {
+ [[static_cast<AVFMediaPlayerObserver*>(m_observer) player] setRate:m_rate];
+ m_playbackTimer.start();
+ }
+ }
+
+ if (m_mediaStatus != status)
+ Q_EMIT mediaStatusChanged(m_mediaStatus = status);
+
+ m_bufferProgress = bufferProgress;
+ Q_EMIT bufferProgressChanged(bufferProgress/100.);
+}
+
+void AVFMediaPlayer::processDurationChange(qint64 duration)
+{
+ if (duration == m_duration)
+ return;
+
+ m_duration = duration;
+ Q_EMIT durationChanged(duration);
+}
+
+void AVFMediaPlayer::processPositionChange()
+{
+ if (m_state == QMediaPlayer::StoppedState)
+ return;
+
+ Q_EMIT positionChanged(position());
+}
+
+void AVFMediaPlayer::processMediaLoadError()
+{
+ if (m_requestedPosition != -1) {
+ m_requestedPosition = -1;
+ Q_EMIT positionChanged(position());
+ }
+
+ Q_EMIT mediaStatusChanged((m_mediaStatus = QMediaPlayer::InvalidMedia));
+
+ Q_EMIT error(QMediaPlayer::FormatError, tr("Failed to load media"));
+}
+
+void AVFMediaPlayer::streamReady()
+{
+ setStreamURL(m_observer, m_resources.toEncoded());
+}
+
+void AVFMediaPlayer::streamDestroyed()
+{
+ resetStream(nullptr);
+}
+
+void AVFMediaPlayer::updateTracks()
+{
+ bool firstLoad = true;
+ for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i) {
+ if (tracks[i].count())
+ firstLoad = false;
+ tracks[i].clear();
+ nativeTracks[i].clear();
+ }
+ AVPlayerItem *playerItem = [m_observer playerItem];
+ if (playerItem) {
+ // Check each track for audio and video content
+ NSArray *tracks = playerItem.tracks;
+ for (AVPlayerItemTrack *track in tracks) {
+ AVAssetTrack *assetTrack = track.assetTrack;
+ if (assetTrack) {
+ int qtTrack = -1;
+ if ([assetTrack.mediaType isEqualToString:AVMediaTypeAudio]) {
+ qtTrack = QPlatformMediaPlayer::AudioStream;
+ setAudioAvailable(true);
+ } else if ([assetTrack.mediaType isEqualToString:AVMediaTypeVideo]) {
+ qtTrack = QPlatformMediaPlayer::VideoStream;
+ setVideoAvailable(true);
+ if (m_observer.videoTrack != track) {
+ m_observer.videoTrack = track;
+ bool isMirrored = false;
+ QtVideo::Rotation orientation = QtVideo::Rotation::None;
+ videoOrientationForAssetTrack(assetTrack, orientation, isMirrored);
+ orientationChanged(orientation, isMirrored);
+ }
+ }
+ else if ([assetTrack.mediaType isEqualToString:AVMediaTypeSubtitle]) {
+ qtTrack = QPlatformMediaPlayer::SubtitleStream;
+ }
+ if (qtTrack != -1) {
+ QMediaMetaData metaData = AVFMetaData::fromAssetTrack(assetTrack);
+ this->tracks[qtTrack].append(metaData);
+ nativeTracks[qtTrack].append(track);
+ }
+ }
+ }
+ // subtitles are disabled by default
+ if (firstLoad)
+ setActiveTrack(SubtitleStream, -1);
+ }
+ Q_EMIT tracksChanged();
+}
+
+void AVFMediaPlayer::setActiveTrack(QPlatformMediaPlayer::TrackType type, int index)
+{
+ const auto &t = nativeTracks[type];
+ if (type == QPlatformMediaPlayer::SubtitleStream) {
+ // subtitle streams are not always automatically enabled on macOS/iOS.
+ // this hack ensures they get enables and we actually get the text
+ AVPlayerItem *playerItem = m_observer.m_playerItem;
+ if (playerItem) {
+ AVAsset *asset = playerItem.asset;
+ if (!asset)
+ return;
+ AVMediaSelectionGroup *group = [asset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
+ if (!group)
+ return;
+ auto *options = group.options;
+ if (options.count)
+ [playerItem selectMediaOption:options.firstObject inMediaSelectionGroup:group];
+ }
+ }
+ for (int i = 0; i < t.count(); ++i)
+ t.at(i).enabled = (i == index);
+ emit activeTracksChanged();
+}
+
+int AVFMediaPlayer::activeTrack(QPlatformMediaPlayer::TrackType type)
+{
+ const auto &t = nativeTracks[type];
+ for (int i = 0; i < t.count(); ++i)
+ if (t.at(i).enabled)
+ return i;
+ return -1;
+}
+
+int AVFMediaPlayer::trackCount(QPlatformMediaPlayer::TrackType type)
+{
+ return nativeTracks[type].count();
+}
+
+QMediaMetaData AVFMediaPlayer::trackMetaData(QPlatformMediaPlayer::TrackType type, int trackNumber)
+{
+ const auto &t = tracks[type];
+ if (trackNumber < 0 || trackNumber >= t.count())
+ return QMediaMetaData();
+ return t.at(trackNumber);
+}
+
+void AVFMediaPlayer::resetStream(QIODevice *stream)
+{
+ if (m_mediaStream) {
+ disconnect(m_mediaStream, &QIODevice::readyRead, this, &AVFMediaPlayer::streamReady);
+ disconnect(m_mediaStream, &QIODevice::destroyed, this, &AVFMediaPlayer::streamDestroyed);
+ }
+
+ m_mediaStream = stream;
+
+ if (m_mediaStream) {
+ connect(m_mediaStream, &QIODevice::readyRead, this, &AVFMediaPlayer::streamReady);
+ connect(m_mediaStream, &QIODevice::destroyed, this, &AVFMediaPlayer::streamDestroyed);
+ }
+}
+
+void AVFMediaPlayer::nativeSizeChanged(QSize size)
+{
+ if (!m_videoSink)
+ return;
+ m_videoSink->setNativeSize(size);
+}
+
+void AVFMediaPlayer::orientationChanged(QtVideo::Rotation rotation, bool mirrored)
+{
+ if (!m_videoOutput)
+ return;
+
+ m_videoOutput->setVideoRotation(rotation);
+ m_videoOutput->setVideoMirrored(mirrored);
+}
+
+void AVFMediaPlayer::videoOrientationForAssetTrack(AVAssetTrack *videoTrack,
+ QtVideo::Rotation &angle,
+ bool &mirrored)
+{
+ angle = QtVideo::Rotation::None;
+ if (videoTrack) {
+ CGAffineTransform transform = videoTrack.preferredTransform;
+ if (CGAffineTransformIsIdentity(transform))
+ return;
+ qreal delta = transform.a * transform.d - transform.b * transform.c;
+ qreal radians = qAtan2(transform.b, transform.a);
+ qreal degrees = qRadiansToDegrees(radians);
+ qreal scaleX = (transform.a/qAbs(transform.a)) * qSqrt(qPow(transform.a, 2) + qPow(transform.c, 2));
+ qreal scaleY = (transform.d/abs(transform.d)) * qSqrt(qPow(transform.b, 2) + qPow(transform.d, 2));
+
+ if (delta < 0.0) { // flipped
+ if (scaleX < 0.0) {
+ // vertical flip
+ degrees = -degrees;
+ } else if (scaleY < 0.0) {
+ // horizontal flip
+ degrees = (180 + (int)degrees) % 360;
+ }
+ mirrored = true;
+ }
+
+ if (qFuzzyCompare(degrees, qreal(90)) || qFuzzyCompare(degrees, qreal(-270))) {
+ angle = QtVideo::Rotation::Clockwise90;
+ } else if (qFuzzyCompare(degrees, qreal(-90)) || qFuzzyCompare(degrees, qreal(270))) {
+ angle = QtVideo::Rotation::Clockwise270;
+ } else if (qFuzzyCompare(degrees, qreal(180)) || qFuzzyCompare(degrees, qreal(-180))) {
+ angle = QtVideo::Rotation::Clockwise180;
+ }
+ }
+}
+
+#include "moc_avfmediaplayer_p.cpp"
diff --git a/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer_p.h b/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer_p.h
new file mode 100644
index 000000000..d04ab0818
--- /dev/null
+++ b/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer_p.h
@@ -0,0 +1,151 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFMEDIAPLAYER_H
+#define AVFMEDIAPLAYER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/QObject>
+#include <QtCore/QByteArray>
+#include <QtCore/QSet>
+#include <QtCore/QResource>
+#include <QtCore/QUrl>
+#include <QtCore/QTimer>
+
+#include <private/qplatformmediaplayer_p.h>
+#include <QtMultimedia/QMediaPlayer>
+#include <QtMultimedia/QVideoFrame>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(AVAsset);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVPlayerItemTrack);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVFMediaPlayerObserver);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVAssetTrack);
+
+QT_BEGIN_NAMESPACE
+
+class AVFMediaPlayer;
+class AVFVideoRendererControl;
+class AVFVideoSink;
+
+class AVFMediaPlayer : public QObject, public QPlatformMediaPlayer
+{
+ Q_OBJECT
+public:
+ AVFMediaPlayer(QMediaPlayer *parent);
+ virtual ~AVFMediaPlayer();
+
+ void setVideoSink(QVideoSink *sink) override;
+ void setVideoOutput(AVFVideoRendererControl *output);
+ AVAsset *currentAssetHandle();
+
+ QMediaPlayer::PlaybackState state() const override;
+ QMediaPlayer::MediaStatus mediaStatus() const override;
+
+ QUrl media() const override;
+ QIODevice *mediaStream() const override;
+ void setMedia(const QUrl &content, QIODevice *stream) override;
+
+ qint64 position() const override;
+ qint64 duration() const override;
+
+ float bufferProgress() const override;
+
+ bool isAudioAvailable() const override;
+ bool isVideoAvailable() const override;
+
+ bool isSeekable() const override;
+ QMediaTimeRange availablePlaybackRanges() const override;
+
+ qreal playbackRate() const override;
+
+ void setAudioOutput(QPlatformAudioOutput *output) override;
+ QPlatformAudioOutput *m_audioOutput = nullptr;
+
+ QMediaMetaData metaData() const override;
+
+ static void videoOrientationForAssetTrack(AVAssetTrack *track,
+ QtVideo::Rotation &angle,
+ bool &mirrored);
+
+public Q_SLOTS:
+ void setPlaybackRate(qreal rate) override;
+ void nativeSizeChanged(QSize size);
+
+ void setPosition(qint64 pos) override;
+
+ void play() override;
+ void pause() override;
+ void stop() override;
+
+ void setVolume(float volume);
+ void setMuted(bool muted);
+ void audioOutputChanged();
+
+ void processEOS();
+ void processLoadStateChange(QMediaPlayer::PlaybackState newState);
+ void processPositionChange();
+ void processMediaLoadError();
+
+ void processLoadStateChange();
+ void processLoadStateFailure();
+
+ void processBufferStateChange(int bufferProgress);
+
+ void processDurationChange(qint64 duration);
+
+ void streamReady();
+ void streamDestroyed();
+ void updateTracks();
+ void setActiveTrack(QPlatformMediaPlayer::TrackType type, int index) override;
+ int activeTrack(QPlatformMediaPlayer::TrackType type) override;
+ int trackCount(TrackType) override;
+ QMediaMetaData trackMetaData(TrackType type, int trackNumber) override;
+
+public:
+ QList<QMediaMetaData> tracks[QPlatformMediaPlayer::NTrackTypes];
+ QList<AVPlayerItemTrack *> nativeTracks[QPlatformMediaPlayer::NTrackTypes];
+
+private:
+ void setAudioAvailable(bool available);
+ void setVideoAvailable(bool available);
+ void setSeekable(bool seekable);
+ void resetStream(QIODevice *stream = nullptr);
+
+ void orientationChanged(QtVideo::Rotation rotation, bool mirrored);
+
+ AVFVideoRendererControl *m_videoOutput = nullptr;
+ AVFVideoSink *m_videoSink = nullptr;
+
+ QMediaPlayer::PlaybackState m_state;
+ QMediaPlayer::MediaStatus m_mediaStatus;
+ QIODevice *m_mediaStream;
+ QUrl m_resources;
+ QMediaMetaData m_metaData;
+
+ qreal m_rate;
+ qint64 m_requestedPosition;
+
+ qint64 m_duration;
+ int m_bufferProgress;
+ bool m_videoAvailable;
+ bool m_audioAvailable;
+ bool m_seekable;
+
+ AVFMediaPlayerObserver *m_observer;
+
+ QTimer m_playbackTimer;
+};
+
+QT_END_NAMESPACE
+
+#endif // AVFMEDIAPLAYER_H
diff --git a/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm b/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm
new file mode 100644
index 000000000..66687c931
--- /dev/null
+++ b/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm
@@ -0,0 +1,222 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfvideorenderercontrol_p.h"
+#include "avfdisplaylink_p.h"
+#include <avfvideobuffer_p.h>
+#include "qavfhelpers_p.h"
+#include "private/qvideoframe_p.h"
+
+#include <QtMultimedia/qvideoframeformat.h>
+
+#include <avfvideosink_p.h>
+#include <rhi/qrhi.h>
+
+#include <QtCore/qdebug.h>
+
+#import <AVFoundation/AVFoundation.h>
+#include <CoreVideo/CVPixelBuffer.h>
+#include <CoreVideo/CVImageBuffer.h>
+
+QT_USE_NAMESPACE
+
+@interface SubtitleDelegate : NSObject <AVPlayerItemLegibleOutputPushDelegate>
+{
+ AVFVideoRendererControl *m_renderer;
+}
+
+- (void)legibleOutput:(AVPlayerItemLegibleOutput *)output
+ didOutputAttributedStrings:(NSArray<NSAttributedString *> *)strings
+ nativeSampleBuffers:(NSArray *)nativeSamples
+ forItemTime:(CMTime)itemTime;
+
+@end
+
+@implementation SubtitleDelegate
+
+-(id)initWithRenderer: (AVFVideoRendererControl *)renderer
+{
+ if (!(self = [super init]))
+ return nil;
+
+ m_renderer = renderer;
+
+ return self;
+}
+
+- (void)legibleOutput:(AVPlayerItemLegibleOutput *)output
+ didOutputAttributedStrings:(NSArray<NSAttributedString *> *)strings
+ nativeSampleBuffers:(NSArray *)nativeSamples
+ forItemTime:(CMTime)itemTime
+{
+ QString text;
+ for (NSAttributedString *s : strings) {
+ if (!text.isEmpty())
+ text += QChar::LineSeparator;
+ text += QString::fromNSString(s.string);
+ }
+ m_renderer->setSubtitleText(text);
+}
+
+@end
+
+
+AVFVideoRendererControl::AVFVideoRendererControl(QObject *parent)
+ : QObject(parent)
+{
+ m_displayLink = new AVFDisplayLink(this);
+ connect(m_displayLink, SIGNAL(tick(CVTimeStamp)), SLOT(updateVideoFrame(CVTimeStamp)));
+}
+
+AVFVideoRendererControl::~AVFVideoRendererControl()
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ m_displayLink->stop();
+ if (m_videoOutput)
+ [m_videoOutput release];
+ if (m_subtitleOutput)
+ [m_subtitleOutput release];
+ if (m_subtitleDelegate)
+ [m_subtitleDelegate release];
+}
+
+void AVFVideoRendererControl::reconfigure()
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << "reconfigure";
+#endif
+ if (!m_layer) {
+ m_displayLink->stop();
+ return;
+ }
+
+ QMutexLocker locker(&m_mutex);
+
+ m_displayLink->start();
+
+ nativeSizeChanged();
+}
+
+void AVFVideoRendererControl::setLayer(CALayer *layer)
+{
+ if (m_layer == layer)
+ return;
+
+ AVPlayerLayer *plLayer = playerLayer();
+ if (plLayer) {
+ if (m_videoOutput)
+ [[[plLayer player] currentItem] removeOutput:m_videoOutput];
+
+ if (m_subtitleOutput)
+ [[[plLayer player] currentItem] removeOutput:m_subtitleOutput];
+ }
+
+ if (!layer && m_sink)
+ m_sink->setVideoFrame(QVideoFrame());
+
+ AVFVideoSinkInterface::setLayer(layer);
+}
+
+void AVFVideoRendererControl::setVideoRotation(QtVideo::Rotation rotation)
+{
+ m_rotation = rotation;
+}
+
+void AVFVideoRendererControl::setVideoMirrored(bool mirrored)
+{
+ m_mirrored = mirrored;
+}
+
+void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
+{
+ Q_UNUSED(ts);
+
+ if (!m_sink)
+ return;
+
+ if (!m_layer)
+ return;
+
+ auto *layer = playerLayer();
+ if (!layer.readyForDisplay)
+ return;
+ nativeSizeChanged();
+
+ QVideoFrame frame;
+ size_t width, height;
+ CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(width, height);
+ if (!pixelBuffer)
+ return;
+ auto buffer = std::make_unique<AVFVideoBuffer>(this, pixelBuffer);
+ // qDebug() << "Got pixelbuffer with format" << fmt << Qt::hex <<
+ // CVPixelBufferGetPixelFormatType(pixelBuffer);
+ CVPixelBufferRelease(pixelBuffer);
+
+ frame = QVideoFramePrivate::createFrame(std::move(buffer), buffer->videoFormat());
+ frame.setRotation(m_rotation);
+ frame.setMirrored(m_mirrored);
+ m_sink->setVideoFrame(frame);
+}
+
+CVPixelBufferRef AVFVideoRendererControl::copyPixelBufferFromLayer(size_t& width, size_t& height)
+{
+ AVPlayerLayer *layer = playerLayer();
+ //Is layer valid
+ if (!layer) {
+#ifdef QT_DEBUG_AVF
+ qWarning("copyPixelBufferFromLayer: invalid layer");
+#endif
+ return nullptr;
+ }
+
+ AVPlayerItem * item = [[layer player] currentItem];
+
+ if (!m_videoOutput) {
+ if (!m_outputSettings)
+ setOutputSettings();
+ m_videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:m_outputSettings];
+ [m_videoOutput setDelegate:nil queue:nil];
+ }
+ if (!m_subtitleOutput) {
+ m_subtitleOutput = [[AVPlayerItemLegibleOutput alloc] init];
+ m_subtitleDelegate = [[SubtitleDelegate alloc] initWithRenderer:this];
+ [m_subtitleOutput setDelegate:m_subtitleDelegate queue:dispatch_get_main_queue()];
+ }
+ if (![item.outputs containsObject:m_videoOutput])
+ [item addOutput:m_videoOutput];
+ if (![item.outputs containsObject:m_subtitleOutput])
+ [item addOutput:m_subtitleOutput];
+
+ CFTimeInterval currentCAFrameTime = CACurrentMediaTime();
+ CMTime currentCMFrameTime = [m_videoOutput itemTimeForHostTime:currentCAFrameTime];
+ // happens when buffering / loading
+ if (CMTimeCompare(currentCMFrameTime, kCMTimeZero) < 0) {
+ return nullptr;
+ }
+
+ if (![m_videoOutput hasNewPixelBufferForItemTime:currentCMFrameTime])
+ return nullptr;
+
+ CVPixelBufferRef pixelBuffer = [m_videoOutput copyPixelBufferForItemTime:currentCMFrameTime
+ itemTimeForDisplay:nil];
+ if (!pixelBuffer) {
+#ifdef QT_DEBUG_AVF
+ qWarning("copyPixelBufferForItemTime returned nil");
+ CMTimeShow(currentCMFrameTime);
+#endif
+ return nullptr;
+ }
+
+ width = CVPixelBufferGetWidth(pixelBuffer);
+ height = CVPixelBufferGetHeight(pixelBuffer);
+// auto f = CVPixelBufferGetPixelFormatType(pixelBuffer);
+// char fmt[5];
+// memcpy(fmt, &f, 4);
+// fmt[4] = 0;
+// qDebug() << "copyPixelBuffer" << f << fmt << width << height;
+ return pixelBuffer;
+}
+
+#include "moc_avfvideorenderercontrol_p.cpp"
diff --git a/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol_p.h b/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol_p.h
new file mode 100644
index 000000000..177114127
--- /dev/null
+++ b/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol_p.h
@@ -0,0 +1,72 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFVIDEORENDERERCONTROL_H
+#define AVFVIDEORENDERERCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/QObject>
+#include <QtCore/QMutex>
+#include <QtCore/QSize>
+
+#include <avfvideosink_p.h>
+
+#include <CoreVideo/CVBase.h>
+#include <CoreVideo/CVPixelBuffer.h>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(CALayer);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVPlayerItemVideoOutput);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVPlayerItemLegibleOutput);
+Q_FORWARD_DECLARE_OBJC_CLASS(SubtitleDelegate);
+
+QT_BEGIN_NAMESPACE
+
+class AVFDisplayLink;
+
+class AVFVideoRendererControl : public QObject, public AVFVideoSinkInterface
+{
+ Q_OBJECT
+public:
+ explicit AVFVideoRendererControl(QObject *parent = nullptr);
+ virtual ~AVFVideoRendererControl();
+
+ // AVFVideoSinkInterface
+ void reconfigure() override;
+ void setLayer(CALayer *layer) override;
+
+ void setVideoRotation(QtVideo::Rotation);
+ void setVideoMirrored(bool mirrored);
+
+ void setSubtitleText(const QString &subtitle)
+ {
+ m_sink->setSubtitleText(subtitle);
+ }
+private Q_SLOTS:
+ void updateVideoFrame(const CVTimeStamp &ts);
+
+private:
+ AVPlayerLayer *playerLayer() const { return static_cast<AVPlayerLayer *>(m_layer); }
+ CVPixelBufferRef copyPixelBufferFromLayer(size_t& width, size_t& height);
+
+ QMutex m_mutex;
+ AVFDisplayLink *m_displayLink = nullptr;
+ AVPlayerItemVideoOutput *m_videoOutput = nullptr;
+ AVPlayerItemLegibleOutput *m_subtitleOutput = nullptr;
+ SubtitleDelegate *m_subtitleDelegate = nullptr;
+ QtVideo::Rotation m_rotation = QtVideo::Rotation::None;
+ bool m_mirrored = false;
+};
+
+QT_END_NAMESPACE
+
+#endif // AVFVIDEORENDERERCONTROL_H
diff --git a/src/plugins/multimedia/darwin/qavfhelpers.mm b/src/plugins/multimedia/darwin/qavfhelpers.mm
new file mode 100644
index 000000000..51ae9eedc
--- /dev/null
+++ b/src/plugins/multimedia/darwin/qavfhelpers.mm
@@ -0,0 +1,143 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include <qavfhelpers_p.h>
+#include <CoreMedia/CMFormatDescription.h>
+#include <CoreVideo/CoreVideo.h>
+#include <qdebug.h>
+
+#import <CoreVideo/CoreVideo.h>
+
+namespace {
+
+using PixelFormat = QVideoFrameFormat::PixelFormat;
+using ColorRange = QVideoFrameFormat::ColorRange;
+
+// clang-format off
+constexpr std::tuple<CvPixelFormat, PixelFormat, ColorRange> PixelFormatMap[] = {
+ { kCVPixelFormatType_32ARGB, PixelFormat::Format_ARGB8888, ColorRange::ColorRange_Unknown },
+ { kCVPixelFormatType_32BGRA, PixelFormat::Format_BGRA8888, ColorRange::ColorRange_Unknown },
+ { kCVPixelFormatType_420YpCbCr8Planar, PixelFormat::Format_YUV420P, ColorRange::ColorRange_Unknown },
+ { kCVPixelFormatType_420YpCbCr8PlanarFullRange, PixelFormat::Format_YUV420P, ColorRange::ColorRange_Full },
+ { kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, PixelFormat::Format_NV12, ColorRange::ColorRange_Video },
+ { kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, PixelFormat::Format_NV12, ColorRange::ColorRange_Full },
+ { kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange, PixelFormat::Format_P010, ColorRange::ColorRange_Video },
+ { kCVPixelFormatType_420YpCbCr10BiPlanarFullRange, PixelFormat::Format_P010, ColorRange::ColorRange_Full },
+ { kCVPixelFormatType_422YpCbCr8, PixelFormat::Format_UYVY, ColorRange::ColorRange_Video },
+ { kCVPixelFormatType_422YpCbCr8_yuvs, PixelFormat::Format_YUYV, ColorRange::ColorRange_Video },
+ { kCVPixelFormatType_OneComponent8, PixelFormat::Format_Y8, ColorRange::ColorRange_Unknown },
+ { kCVPixelFormatType_OneComponent16, PixelFormat::Format_Y16, ColorRange::ColorRange_Unknown },
+
+ // The cases with kCMVideoCodecType_JPEG/kCMVideoCodecType_JPEG_OpenDML as cv pixel format should be investigated.
+ // Matching kCMVideoCodecType_JPEG_OpenDML to ColorRange_Full is a little hack to distinguish between
+ // kCMVideoCodecType_JPEG and kCMVideoCodecType_JPEG_OpenDML.
+ { kCMVideoCodecType_JPEG, PixelFormat::Format_Jpeg, ColorRange::ColorRange_Unknown },
+ { kCMVideoCodecType_JPEG_OpenDML, PixelFormat::Format_Jpeg, ColorRange::ColorRange_Full }
+};
+// clang-format on
+
+template<typename Type, typename... Args>
+Type findInPixelFormatMap(Type defaultValue, Args... args)
+{
+ auto checkElement = [&](const auto &element) {
+ return ((args == std::get<Args>(element)) && ...);
+ };
+
+ auto found = std::find_if(std::begin(PixelFormatMap), std::end(PixelFormatMap), checkElement);
+ return found == std::end(PixelFormatMap) ? defaultValue : std::get<Type>(*found);
+}
+
+}
+
+ColorRange QAVFHelpers::colorRangeForCVPixelFormat(CvPixelFormat cvPixelFormat)
+{
+ return findInPixelFormatMap(ColorRange::ColorRange_Unknown, cvPixelFormat);
+}
+
+PixelFormat QAVFHelpers::fromCVPixelFormat(CvPixelFormat cvPixelFormat)
+{
+ return findInPixelFormatMap(PixelFormat::Format_Invalid, cvPixelFormat);
+}
+
+CvPixelFormat QAVFHelpers::toCVPixelFormat(PixelFormat pixelFmt, ColorRange colorRange)
+{
+ return findInPixelFormatMap(CvPixelFormatInvalid, pixelFmt, colorRange);
+}
+
+QVideoFrameFormat QAVFHelpers::videoFormatForImageBuffer(CVImageBufferRef buffer, bool openGL)
+{
+ auto cvPixelFormat = CVPixelBufferGetPixelFormatType(buffer);
+ auto pixelFormat = fromCVPixelFormat(cvPixelFormat);
+ if (openGL) {
+ if (cvPixelFormat == kCVPixelFormatType_32BGRA)
+ pixelFormat = QVideoFrameFormat::Format_SamplerRect;
+ else
+ qWarning() << "Accelerated macOS OpenGL video supports BGRA only, got CV pixel format"
+ << cvPixelFormat;
+ }
+
+ size_t width = CVPixelBufferGetWidth(buffer);
+ size_t height = CVPixelBufferGetHeight(buffer);
+
+ QVideoFrameFormat format(QSize(width, height), pixelFormat);
+
+ auto colorSpace = QVideoFrameFormat::ColorSpace_Undefined;
+ auto colorTransfer = QVideoFrameFormat::ColorTransfer_Unknown;
+
+ if (CFStringRef cSpace = reinterpret_cast<CFStringRef>(
+ CVBufferGetAttachment(buffer, kCVImageBufferYCbCrMatrixKey, nullptr))) {
+ if (CFEqual(cSpace, kCVImageBufferYCbCrMatrix_ITU_R_709_2)) {
+ colorSpace = QVideoFrameFormat::ColorSpace_BT709;
+ } else if (CFEqual(cSpace, kCVImageBufferYCbCrMatrix_ITU_R_601_4)
+ || CFEqual(cSpace, kCVImageBufferYCbCrMatrix_SMPTE_240M_1995)) {
+ colorSpace = QVideoFrameFormat::ColorSpace_BT601;
+ } else if (@available(macOS 10.11, iOS 9.0, *)) {
+ if (CFEqual(cSpace, kCVImageBufferYCbCrMatrix_ITU_R_2020)) {
+ colorSpace = QVideoFrameFormat::ColorSpace_BT2020;
+ }
+ }
+ }
+
+ if (CFStringRef cTransfer = reinterpret_cast<CFStringRef>(
+ CVBufferGetAttachment(buffer, kCVImageBufferTransferFunctionKey, nullptr))) {
+
+ if (CFEqual(cTransfer, kCVImageBufferTransferFunction_ITU_R_709_2)) {
+ colorTransfer = QVideoFrameFormat::ColorTransfer_BT709;
+ } else if (CFEqual(cTransfer, kCVImageBufferTransferFunction_SMPTE_240M_1995)) {
+ colorTransfer = QVideoFrameFormat::ColorTransfer_BT601;
+ } else if (CFEqual(cTransfer, kCVImageBufferTransferFunction_sRGB)) {
+ colorTransfer = QVideoFrameFormat::ColorTransfer_Gamma22;
+ } else if (CFEqual(cTransfer, kCVImageBufferTransferFunction_UseGamma)) {
+ auto gamma = reinterpret_cast<CFNumberRef>(
+ CVBufferGetAttachment(buffer, kCVImageBufferGammaLevelKey, nullptr));
+ double g;
+ CFNumberGetValue(gamma, kCFNumberFloat32Type, &g);
+ // These are best fit values given what we have in our enum
+ if (g < 0.8)
+ ; // unknown
+ else if (g < 1.5)
+ colorTransfer = QVideoFrameFormat::ColorTransfer_Linear;
+ else if (g < 2.1)
+ colorTransfer = QVideoFrameFormat::ColorTransfer_BT709;
+ else if (g < 2.5)
+ colorTransfer = QVideoFrameFormat::ColorTransfer_Gamma22;
+ else if (g < 3.2)
+ colorTransfer = QVideoFrameFormat::ColorTransfer_Gamma28;
+ }
+ if (@available(macOS 10.12, iOS 11.0, *)) {
+ if (CFEqual(cTransfer, kCVImageBufferTransferFunction_ITU_R_2020))
+ colorTransfer = QVideoFrameFormat::ColorTransfer_BT709;
+ }
+ if (@available(macOS 10.12, iOS 11.0, *)) {
+ if (CFEqual(cTransfer, kCVImageBufferTransferFunction_ITU_R_2100_HLG)) {
+ colorTransfer = QVideoFrameFormat::ColorTransfer_STD_B67;
+ } else if (CFEqual(cTransfer, kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ)) {
+ colorTransfer = QVideoFrameFormat::ColorTransfer_ST2084;
+ }
+ }
+ }
+
+ format.setColorRange(colorRangeForCVPixelFormat(cvPixelFormat));
+ format.setColorSpace(colorSpace);
+ format.setColorTransfer(colorTransfer);
+ return format;
+}
diff --git a/src/plugins/multimedia/darwin/qavfhelpers_p.h b/src/plugins/multimedia/darwin/qavfhelpers_p.h
new file mode 100644
index 000000000..8133d5500
--- /dev/null
+++ b/src/plugins/multimedia/darwin/qavfhelpers_p.h
@@ -0,0 +1,41 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QAVFHELPERS_H
+#define QAVFHELPERS_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/qvideoframe.h>
+#include <qvideoframeformat.h>
+
+#include <CoreVideo/CVBase.h>
+#include <CoreVideo/CVPixelBuffer.h>
+#include <CoreVideo/CVImageBuffer.h>
+
+QT_BEGIN_NAMESPACE
+
+using CvPixelFormat = unsigned;
+constexpr CvPixelFormat CvPixelFormatInvalid = 0;
+
+namespace QAVFHelpers
+{
+QVideoFrameFormat::ColorRange colorRangeForCVPixelFormat(CvPixelFormat cvPixelFormat);
+QVideoFrameFormat::PixelFormat fromCVPixelFormat(CvPixelFormat cvPixelFormat);
+CvPixelFormat toCVPixelFormat(QVideoFrameFormat::PixelFormat pixFmt,
+ QVideoFrameFormat::ColorRange colorRange);
+
+QVideoFrameFormat videoFormatForImageBuffer(CVImageBufferRef buffer, bool openGL = false);
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/qdarwinformatsinfo.mm b/src/plugins/multimedia/darwin/qdarwinformatsinfo.mm
new file mode 100644
index 000000000..582060a6c
--- /dev/null
+++ b/src/plugins/multimedia/darwin/qdarwinformatsinfo.mm
@@ -0,0 +1,211 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qdarwinformatsinfo_p.h"
+#include <AVFoundation/AVFoundation.h>
+#include <qdebug.h>
+
+QT_BEGIN_NAMESPACE
+
+static struct {
+ const char *name;
+ QMediaFormat::FileFormat value;
+} mediaContainerMap[] = {
+ { "video/x-ms-asf", QMediaFormat::WMV },
+ { "video/avi", QMediaFormat::AVI },
+ { "video/x-matroska", QMediaFormat::Matroska },
+ { "video/mp4", QMediaFormat::MPEG4 },
+ { "video/quicktime", QMediaFormat::QuickTime },
+ { "video/ogg", QMediaFormat::Ogg },
+ { "audio/mp3", QMediaFormat::MP3 },
+ { "audio/flac", QMediaFormat::FLAC },
+ { nullptr, QMediaFormat::UnspecifiedFormat }
+};
+
+static struct {
+ const char *name;
+ QMediaFormat::VideoCodec value;
+} videoCodecMap[] = {
+ // See CMVideoCodecType for the four character code names of codecs
+ { "; codecs=\"mp1v\"", QMediaFormat::VideoCodec::MPEG1 },
+ { "; codecs=\"mp2v\"", QMediaFormat::VideoCodec::MPEG2 },
+ { "; codecs=\"mp4v\"", QMediaFormat::VideoCodec::MPEG4 },
+ { "; codecs=\"avc1\"", QMediaFormat::VideoCodec::H264 },
+ { "; codecs=\"hvc1\"", QMediaFormat::VideoCodec::H265 },
+ { "; codecs=\"vp09\"", QMediaFormat::VideoCodec::VP9 },
+ { "; codecs=\"av01\"", QMediaFormat::VideoCodec::AV1 }, // ### ????
+ { "; codecs=\"jpeg\"", QMediaFormat::VideoCodec::MotionJPEG },
+ { nullptr, QMediaFormat::VideoCodec::Unspecified }
+};
+
+static struct {
+ const char *name;
+ QMediaFormat::AudioCodec value;
+} audioCodecMap[] = {
+ // AudioFile.h
+ // ### The next two entries do not work, probably because they contain non a space and period and AVFoundation doesn't like that
+ // We know they are supported on all Apple platforms, so we'll add them manually below
+// { "; codecs=\".mp3\"", QMediaFormat::AudioCodec::MP3 },
+// { "; codecs=\"aac \"", QMediaFormat::AudioCodec::AAC },
+ { "; codecs=\"ac-3\"", QMediaFormat::AudioCodec::AC3 },
+ { "; codecs=\"ec-3\"", QMediaFormat::AudioCodec::EAC3 },
+ { "; codecs=\"flac\"", QMediaFormat::AudioCodec::FLAC },
+ { "; codecs=\"alac\"", QMediaFormat::AudioCodec::ALAC },
+ { "; codecs=\"opus\"", QMediaFormat::AudioCodec::Opus },
+ { nullptr, QMediaFormat::AudioCodec::Unspecified },
+};
+
+QDarwinFormatInfo::QDarwinFormatInfo()
+{
+ auto avtypes = [AVURLAsset audiovisualMIMETypes];
+ for (AVFileType filetype in avtypes) {
+ auto *m = mediaContainerMap;
+ while (m->name) {
+ if (strcmp(filetype.UTF8String, m->name)) {
+ ++m;
+ continue;
+ }
+
+ QList<QMediaFormat::VideoCodec> video;
+ QList<QMediaFormat::AudioCodec> audio;
+
+ auto *v = videoCodecMap;
+ while (v->name) {
+ QByteArray extendedMimetype = m->name;
+ extendedMimetype += v->name;
+ if ([AVURLAsset isPlayableExtendedMIMEType:[NSString stringWithUTF8String:extendedMimetype.constData()]])
+ video << v->value;
+ ++v;
+ }
+
+ auto *a = audioCodecMap;
+ while (a->name) {
+ QByteArray extendedMimetype = m->name;
+ extendedMimetype += a->name;
+ if ([AVURLAsset isPlayableExtendedMIMEType:[NSString stringWithUTF8String:extendedMimetype.constData()]])
+ audio << a->value;
+ ++a;
+ }
+ // Added manually, see comment in the list above
+ if (m->value <= QMediaFormat::AAC)
+ audio << QMediaFormat::AudioCodec::AAC;
+ if (m->value < QMediaFormat::AAC || m->value == QMediaFormat::MP3)
+ audio << QMediaFormat::AudioCodec::MP3;
+
+ decoders << CodecMap{ m->value, audio, video };
+ ++m;
+ }
+ }
+
+ // seems AVFoundation only supports those for encoding
+ encoders = {
+ { QMediaFormat::MPEG4,
+ { QMediaFormat::AudioCodec::AAC, QMediaFormat::AudioCodec::ALAC },
+ { QMediaFormat::VideoCodec::H264, QMediaFormat::VideoCodec::H265, QMediaFormat::VideoCodec::MotionJPEG } },
+ { QMediaFormat::QuickTime,
+ { QMediaFormat::AudioCodec::AAC, QMediaFormat::AudioCodec::ALAC },
+ { QMediaFormat::VideoCodec::H264, QMediaFormat::VideoCodec::H265, QMediaFormat::VideoCodec::MotionJPEG } },
+ { QMediaFormat::Mpeg4Audio,
+ { QMediaFormat::AudioCodec::AAC },
+ {} },
+ { QMediaFormat::Wave,
+ { QMediaFormat::AudioCodec::Wave },
+ {} },
+ };
+
+ // ###
+ imageFormats << QImageCapture::JPEG;
+}
+
+QDarwinFormatInfo::~QDarwinFormatInfo()
+{
+}
+
+int QDarwinFormatInfo::audioFormatForCodec(QMediaFormat::AudioCodec codec)
+{
+ int codecId = kAudioFormatMPEG4AAC;
+ switch (codec) {
+ case QMediaFormat::AudioCodec::Unspecified:
+ case QMediaFormat::AudioCodec::DolbyTrueHD:
+ case QMediaFormat::AudioCodec::Vorbis:
+ case QMediaFormat::AudioCodec::WMA:
+ // Unsupported, shouldn't happen. Fall back to AAC
+ case QMediaFormat::AudioCodec::AAC:
+ codecId = kAudioFormatMPEG4AAC;
+ break;
+ case QMediaFormat::AudioCodec::MP3:
+ codecId = kAudioFormatMPEGLayer3;
+ break;
+ case QMediaFormat::AudioCodec::AC3:
+ codecId = kAudioFormatAC3;
+ break;
+ case QMediaFormat::AudioCodec::EAC3:
+ codecId = kAudioFormatEnhancedAC3;
+ break;
+ case QMediaFormat::AudioCodec::FLAC:
+ codecId = kAudioFormatFLAC;
+ break;
+ case QMediaFormat::AudioCodec::ALAC:
+ codecId = kAudioFormatAppleLossless;
+ break;
+ case QMediaFormat::AudioCodec::Opus:
+ codecId = kAudioFormatOpus;
+ break;
+ case QMediaFormat::AudioCodec::Wave:
+ codecId = kAudioFormatLinearPCM;
+ }
+ return codecId;
+}
+
+NSString *QDarwinFormatInfo::videoFormatForCodec(QMediaFormat::VideoCodec codec)
+{
+ const char *c = "hvc1"; // fallback is H265
+ switch (codec) {
+ case QMediaFormat::VideoCodec::Unspecified:
+ case QMediaFormat::VideoCodec::VP8:
+ case QMediaFormat::VideoCodec::H265:
+ case QMediaFormat::VideoCodec::AV1:
+ case QMediaFormat::VideoCodec::Theora:
+ case QMediaFormat::VideoCodec::WMV:
+ break;
+
+ case QMediaFormat::VideoCodec::MPEG1:
+ c = "mp1v";
+ break;
+ case QMediaFormat::VideoCodec::MPEG2:
+ c = "mp2v";
+ break;
+ case QMediaFormat::VideoCodec::MPEG4:
+ c = "mp4v";
+ break;
+ case QMediaFormat::VideoCodec::H264:
+ c = "avc1";
+ break;
+ case QMediaFormat::VideoCodec::VP9:
+ c = "vp09";
+ break;
+ case QMediaFormat::VideoCodec::MotionJPEG:
+ c = "jpeg";
+ }
+ return [NSString stringWithUTF8String:c];
+}
+
+NSString *QDarwinFormatInfo::avFileTypeForContainerFormat(QMediaFormat::FileFormat container)
+{
+ switch (container) {
+ case QMediaFormat::MPEG4:
+ return AVFileTypeMPEG4;
+ case QMediaFormat::QuickTime:
+ return AVFileTypeQuickTimeMovie;
+ case QMediaFormat::MP3:
+ return AVFileTypeMPEGLayer3;
+ case QMediaFormat::Mpeg4Audio:
+ return AVFileTypeAppleM4A;
+ case QMediaFormat::Wave:
+ return AVFileTypeWAVE;
+ default:
+ return AVFileTypeQuickTimeMovie;
+ }
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/darwin/qdarwinformatsinfo_p.h b/src/plugins/multimedia/darwin/qdarwinformatsinfo_p.h
new file mode 100644
index 000000000..e01486286
--- /dev/null
+++ b/src/plugins/multimedia/darwin/qdarwinformatsinfo_p.h
@@ -0,0 +1,38 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QDARWINFORMATINFO_H
+#define QDARWINFORMATINFO_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaformatinfo_p.h>
+#include <qlist.h>
+
+QT_BEGIN_NAMESPACE
+
+class QDarwinMediaDevices;
+
+class QDarwinFormatInfo : public QPlatformMediaFormatInfo
+{
+public:
+ QDarwinFormatInfo();
+ ~QDarwinFormatInfo();
+
+ static int audioFormatForCodec(QMediaFormat::AudioCodec codec);
+ static NSString *videoFormatForCodec(QMediaFormat::VideoCodec codec);
+ static NSString *avFileTypeForContainerFormat(QMediaFormat::FileFormat fileType);
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/qdarwinintegration.mm b/src/plugins/multimedia/darwin/qdarwinintegration.mm
new file mode 100644
index 000000000..0e880447e
--- /dev/null
+++ b/src/plugins/multimedia/darwin/qdarwinintegration.mm
@@ -0,0 +1,93 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qdarwinintegration_p.h"
+#include <avfmediaplayer_p.h>
+#include <avfcameraservice_p.h>
+#include <avfcamera_p.h>
+#include <avfimagecapture_p.h>
+#include <avfmediaencoder_p.h>
+#include <qdarwinformatsinfo_p.h>
+#include <avfvideosink_p.h>
+#include <avfaudiodecoder_p.h>
+#include <VideoToolbox/VideoToolbox.h>
+#include <qdebug.h>
+#include <private/qplatformmediaplugin_p.h>
+#include <qavfcamerabase_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QDarwinMediaPlugin : public QPlatformMediaPlugin
+{
+ Q_OBJECT
+ Q_PLUGIN_METADATA(IID QPlatformMediaPlugin_iid FILE "darwin.json")
+
+public:
+ QDarwinMediaPlugin()
+ : QPlatformMediaPlugin()
+ {}
+
+ QPlatformMediaIntegration* create(const QString &name) override
+ {
+ if (name == u"darwin")
+ return new QDarwinIntegration;
+ return nullptr;
+ }
+};
+
+QDarwinIntegration::QDarwinIntegration() : QPlatformMediaIntegration(QLatin1String("darwin"))
+{
+#if defined(Q_OS_MACOS) && QT_MACOS_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_11_0)
+ if (__builtin_available(macOS 11.0, *))
+ VTRegisterSupplementalVideoDecoderIfAvailable(kCMVideoCodecType_VP9);
+#endif
+}
+
+QPlatformMediaFormatInfo *QDarwinIntegration::createFormatInfo()
+{
+ return new QDarwinFormatInfo();
+}
+
+QPlatformVideoDevices *QDarwinIntegration::createVideoDevices()
+{
+ return new QAVFVideoDevices(this);
+}
+
+QMaybe<QPlatformAudioDecoder *> QDarwinIntegration::createAudioDecoder(QAudioDecoder *decoder)
+{
+ return new AVFAudioDecoder(decoder);
+}
+
+QMaybe<QPlatformMediaCaptureSession *> QDarwinIntegration::createCaptureSession()
+{
+ return new AVFCameraService;
+}
+
+QMaybe<QPlatformMediaPlayer *> QDarwinIntegration::createPlayer(QMediaPlayer *player)
+{
+ return new AVFMediaPlayer(player);
+}
+
+QMaybe<QPlatformCamera *> QDarwinIntegration::createCamera(QCamera *camera)
+{
+ return new AVFCamera(camera);
+}
+
+QMaybe<QPlatformMediaRecorder *> QDarwinIntegration::createRecorder(QMediaRecorder *recorder)
+{
+ return new AVFMediaEncoder(recorder);
+}
+
+QMaybe<QPlatformImageCapture *> QDarwinIntegration::createImageCapture(QImageCapture *imageCapture)
+{
+ return new AVFImageCapture(imageCapture);
+}
+
+QMaybe<QPlatformVideoSink *> QDarwinIntegration::createVideoSink(QVideoSink *sink)
+{
+ return new AVFVideoSink(sink);
+}
+
+QT_END_NAMESPACE
+
+#include "qdarwinintegration.moc"
diff --git a/src/plugins/multimedia/darwin/qdarwinintegration_p.h b/src/plugins/multimedia/darwin/qdarwinintegration_p.h
new file mode 100644
index 000000000..8333de4ec
--- /dev/null
+++ b/src/plugins/multimedia/darwin/qdarwinintegration_p.h
@@ -0,0 +1,45 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QDARWININTEGRATION_H
+#define QDARWININTEGRATION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaintegration_p.h>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(NSObject);
+
+QT_BEGIN_NAMESPACE
+
+class QDarwinIntegration : public QPlatformMediaIntegration
+{
+public:
+ QDarwinIntegration();
+
+ QMaybe<QPlatformAudioDecoder *> createAudioDecoder(QAudioDecoder *) override;
+ QMaybe<QPlatformMediaCaptureSession *> createCaptureSession() override;
+ QMaybe<QPlatformMediaPlayer *> createPlayer(QMediaPlayer *player) override;
+ QMaybe<QPlatformCamera *> createCamera(QCamera *camera) override;
+ QMaybe<QPlatformMediaRecorder *> createRecorder(QMediaRecorder *) override;
+ QMaybe<QPlatformImageCapture *> createImageCapture(QImageCapture *) override;
+
+ QMaybe<QPlatformVideoSink *> createVideoSink(QVideoSink *) override;
+
+protected:
+ QPlatformMediaFormatInfo *createFormatInfo() override;
+ QPlatformVideoDevices *createVideoDevices() override;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/CMakeLists.txt b/src/plugins/multimedia/ffmpeg/CMakeLists.txt
new file mode 100644
index 000000000..c6ab93273
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/CMakeLists.txt
@@ -0,0 +1,260 @@
+# Copyright (C) 2022 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+qt_find_package(EGL)
+qt_find_package(VAAPI COMPONENTS VA DRM PROVIDED_TARGETS VAAPI::VAAPI MODULE_NAME multimedia QMAKE_LIB vaapi)
+
+qt_internal_find_apple_system_framework(FWCoreMedia CoreMedia)
+qt_internal_find_apple_system_framework(FWCoreAudio CoreAudio)
+qt_internal_find_apple_system_framework(FWAudioUnit AudioUnit)
+qt_internal_find_apple_system_framework(FWVideoToolbox VideoToolbox)
+qt_internal_find_apple_system_framework(FWAVFoundation AVFoundation)
+qt_internal_find_apple_system_framework(FWSecurity Security)
+
+qt_internal_add_plugin(QFFmpegMediaPlugin
+ OUTPUT_NAME ffmpegmediaplugin
+ PLUGIN_TYPE multimedia
+ SOURCES
+ qffmpeg.cpp qffmpeg_p.h
+ qffmpegdefs_p.h
+ qffmpegioutils.cpp qffmpegioutils_p.h
+ qffmpegavaudioformat.cpp qffmpegavaudioformat_p.h
+ qffmpegaudiodecoder.cpp qffmpegaudiodecoder_p.h
+ qffmpegaudioinput.cpp qffmpegaudioinput_p.h
+ qffmpegconverter.cpp qffmpegconverter_p.h
+ qffmpeghwaccel.cpp qffmpeghwaccel_p.h
+ qffmpegmediametadata.cpp qffmpegmediametadata_p.h
+ qffmpegmediaplayer.cpp qffmpegmediaplayer_p.h
+ qffmpegvideosink.cpp qffmpegvideosink_p.h
+ qffmpegmediaformatinfo.cpp qffmpegmediaformatinfo_p.h
+ qffmpegmediaintegration.cpp qffmpegmediaintegration_p.h
+ qffmpegvideobuffer.cpp qffmpegvideobuffer_p.h
+ qffmpegimagecapture.cpp qffmpegimagecapture_p.h
+ qffmpegmediacapturesession.cpp qffmpegmediacapturesession_p.h
+ qffmpegmediarecorder.cpp qffmpegmediarecorder_p.h
+ qffmpegthread.cpp qffmpegthread_p.h
+ qffmpegresampler.cpp qffmpegresampler_p.h
+ qffmpegencodingformatcontext.cpp qffmpegencodingformatcontext_p.h
+ qgrabwindowsurfacecapture.cpp qgrabwindowsurfacecapture_p.h
+ qffmpegsurfacecapturegrabber.cpp qffmpegsurfacecapturegrabber_p.h
+
+ qffmpegplaybackengine.cpp qffmpegplaybackengine_p.h
+ playbackengine/qffmpegplaybackenginedefs_p.h
+ playbackengine/qffmpegplaybackengineobject.cpp playbackengine/qffmpegplaybackengineobject_p.h
+ playbackengine/qffmpegdemuxer.cpp playbackengine/qffmpegdemuxer_p.h
+ playbackengine/qffmpegstreamdecoder.cpp playbackengine/qffmpegstreamdecoder_p.h
+ playbackengine/qffmpegrenderer.cpp playbackengine/qffmpegrenderer_p.h
+ playbackengine/qffmpegaudiorenderer.cpp playbackengine/qffmpegaudiorenderer_p.h
+ playbackengine/qffmpegvideorenderer.cpp playbackengine/qffmpegvideorenderer_p.h
+ playbackengine/qffmpegsubtitlerenderer.cpp playbackengine/qffmpegsubtitlerenderer_p.h
+ playbackengine/qffmpegtimecontroller.cpp playbackengine/qffmpegtimecontroller_p.h
+ playbackengine/qffmpegmediadataholder.cpp playbackengine/qffmpegmediadataholder_p.h
+ playbackengine/qffmpegcodec.cpp playbackengine/qffmpegcodec_p.h
+ playbackengine/qffmpegpacket_p.h
+ playbackengine/qffmpegframe_p.h
+ playbackengine/qffmpegpositionwithoffset_p.h
+
+ recordingengine/qffmpegaudioencoder_p.h
+ recordingengine/qffmpegaudioencoder.cpp
+ recordingengine/qffmpegaudioencoderutils_p.h
+ recordingengine/qffmpegaudioencoderutils.cpp
+ recordingengine/qffmpegencoderthread_p.h
+ recordingengine/qffmpegencoderthread.cpp
+ recordingengine/qffmpegencoderoptions_p.h
+ recordingengine/qffmpegencoderoptions.cpp
+ recordingengine/qffmpegmuxer_p.h
+ recordingengine/qffmpegmuxer.cpp
+ recordingengine/qffmpegrecordingengine_p.h
+ recordingengine/qffmpegrecordingengine.cpp
+ recordingengine/qffmpegencodinginitializer_p.h
+ recordingengine/qffmpegencodinginitializer.cpp
+ recordingengine/qffmpegrecordingengineutils_p.h
+ recordingengine/qffmpegrecordingengineutils.cpp
+ recordingengine/qffmpegvideoencoder_p.h
+ recordingengine/qffmpegvideoencoder.cpp
+ recordingengine/qffmpegvideoencoderutils_p.h
+ recordingengine/qffmpegvideoencoderutils.cpp
+ recordingengine/qffmpegvideoframeencoder_p.h
+ recordingengine/qffmpegvideoframeencoder.cpp
+
+ DEFINES
+ QT_COMPILING_FFMPEG
+ LIBRARIES
+ Qt::MultimediaPrivate
+ Qt::CorePrivate
+)
+
+if (LINUX OR ANDROID)
+ # We have 2 options: link shared stubs to QFFmpegMediaPlugin vs
+ # static compilation of the needed stubs to the FFmpeg plugin.
+ # Currently, we chose the second option so that user could trivially
+ # remove the FFmpeg libs we ship.
+ # Set QT_LINK_STUBS_TO_FFMPEG_PLUGIN = TRUE to change the behavior.
+
+ # set(QT_LINK_STUBS_TO_FFMPEG_PLUGIN TRUE)
+
+ include("${CMAKE_CURRENT_SOURCE_DIR}/cmake/QtAddFFmpegStubs.cmake")
+ qt_internal_multimedia_add_ffmpeg_stubs()
+endif()
+
+
+if (QT_FEATURE_vaapi)
+ qt_internal_extend_target(QFFmpegMediaPlugin
+ SOURCES
+ qffmpeghwaccel_vaapi.cpp qffmpeghwaccel_vaapi_p.h
+ NO_UNITY_BUILD_SOURCES
+ # Conflicts with macros defined in X11.h, and Xlib.h
+ qffmpeghwaccel_vaapi.cpp
+ LIBRARIES
+ EGL::EGL
+ )
+
+ list(FIND FFMPEG_STUBS "va" va_stub_index)
+ if (NOT QT_LINK_STUBS_TO_FFMPEG_PLUGIN AND (FFMPEG_SHARED_LIBRARIES OR ${va_stub_index} EQUAL -1))
+ target_compile_definitions(QFFmpegMediaPlugin PRIVATE Q_FFMPEG_PLUGIN_STUBS_ONLY)
+ qt_internal_multimedia_find_vaapi_soversion()
+ qt_internal_multimedia_add_private_stub_to_plugin("va")
+ endif()
+endif()
+
+
+qt_internal_extend_target(QFFmpegMediaPlugin CONDITION APPLE
+ SOURCES
+ ../darwin/qavfhelpers.mm ../darwin/qavfhelpers_p.h
+ ../darwin/camera/qavfcamerabase_p.h ../darwin/camera/qavfcamerabase.mm
+ ../darwin/camera/avfcamerautility_p.h ../darwin/camera/avfcamerautility.mm
+ qffmpeghwaccel_videotoolbox.mm qffmpeghwaccel_videotoolbox_p.h
+ qavfcamera.mm qavfcamera_p.h
+ qavfsamplebufferdelegate.mm qavfsamplebufferdelegate_p.h
+
+ NO_UNITY_BUILD_SOURCES
+ qffmpeghwaccel_videotoolbox.mm # AVMediaType clash between libavformat and AVFoundation
+
+ INCLUDE_DIRECTORIES
+ ../darwin
+ ../darwin/camera
+ LIBRARIES
+ ${FWAudioToolbox}
+ ${FWCoreAudio}
+ ${FWCoreFoundation}
+ ${FWCoreMedia}
+ ${FWCoreVideo}
+ ${FWVideoToolbox}
+ ${FWSecurity}
+ AVFoundation::AVFoundation
+)
+
+qt_internal_extend_target(QFFmpegMediaPlugin CONDITION MACOS
+ SOURCES
+ qavfscreencapture.mm qavfscreencapture_p.h
+ qcgwindowcapture.mm qcgwindowcapture_p.h
+ qcgcapturablewindows.mm qcgcapturablewindows_p.h
+)
+
+qt_internal_extend_target(QFFmpegMediaPlugin CONDITION WIN32
+ SOURCES
+ ../windows/qwindowsvideodevices.cpp ../windows/qwindowsvideodevices_p.h
+ qwindowscamera.cpp qwindowscamera_p.h
+ qffmpeghwaccel_d3d11.cpp qffmpeghwaccel_d3d11_p.h
+ qgdiwindowcapture.cpp qgdiwindowcapture_p.h
+ qffmpegscreencapture_dxgi.cpp qffmpegscreencapture_dxgi_p.h
+ qwincapturablewindows.cpp qwincapturablewindows_p.h
+ INCLUDE_DIRECTORIES
+ ../windows
+ LIBRARIES
+ Qt::MultimediaPrivate
+ WMF::WMF
+ mfreadwrite
+)
+
+qt_internal_extend_target(QFFmpegMediaPlugin CONDITION QT_FEATURE_cpp_winrt
+ SOURCES
+ qffmpegwindowcapture_uwp.cpp qffmpegwindowcapture_uwp_p.h
+ INCLUDE_DIRECTORIES
+ ../windows
+ LIBRARIES
+ Dwmapi
+ Dxva2
+ windowsapp
+)
+
+qt_internal_extend_target(QFFmpegMediaPlugin CONDITION QT_FEATURE_xlib
+ SOURCES
+ qx11surfacecapture.cpp qx11surfacecapture_p.h
+ qx11capturablewindows.cpp qx11capturablewindows_p.h
+ LIBRARIES
+ X11
+ Xrandr
+ Xext
+)
+
+qt_internal_extend_target(QFFmpegMediaPlugin CONDITION QT_FEATURE_eglfs
+ SOURCES
+ qeglfsscreencapture.cpp qeglfsscreencapture_p.h
+ qopenglvideobuffer.cpp qopenglvideobuffer_p.h
+ LIBRARIES
+ Qt::OpenGLPrivate
+ Qt::Quick
+)
+
+set_source_files_properties(qx11surfacecapture.cpp qx11capturablewindows.cpp # X headers
+ PROPERTIES SKIP_UNITY_BUILD_INCLUSION ON)
+
+qt_internal_extend_target(QFFmpegMediaPlugin CONDITION QT_FEATURE_linux_v4l
+ SOURCES
+ qv4l2camera.cpp qv4l2camera_p.h
+ qv4l2filedescriptor.cpp qv4l2filedescriptor_p.h
+ qv4l2memorytransfer.cpp qv4l2memorytransfer_p.h
+ qv4l2cameradevices.cpp qv4l2cameradevices_p.h
+)
+
+if (ANDROID)
+ qt_internal_extend_target(QFFmpegMediaPlugin
+ SOURCES
+ qffmpeghwaccel_mediacodec.cpp qffmpeghwaccel_mediacodec_p.h
+ qandroidcamera_p.h qandroidcamera.cpp
+ qandroidvideodevices.cpp qandroidvideodevices_p.h
+ qandroidcameraframe_p.h qandroidcameraframe.cpp
+ qandroidimagecapture_p.h qandroidimagecapture.cpp
+ ../android/wrappers/jni/androidsurfacetexture_p.h
+ ../android/wrappers/jni/androidsurfacetexture.cpp
+ NO_UNITY_BUILD_SOURCES
+ # Duplicate declration of JNI Classes using `Q_DECLARE_JNI_CLASS`
+ qandroidcamera.cpp
+ qandroidvideodevices.cpp
+ qandroidcameraframe.cpp
+ INCLUDE_DIRECTORIES
+ ${FFMPEG_DIR}/include
+ ../android/wrappers/jni/
+ LIBRARIES
+ OpenSLES
+ mediandk
+ android
+ )
+
+ set_property(TARGET QFFmpegMediaPlugin APPEND PROPERTY QT_ANDROID_LIB_DEPENDENCIES
+ ${INSTALL_PLUGINSDIR}/multimedia/libplugins_multimedia_ffmpegmediaplugin.so
+ )
+
+ set_property(TARGET QFFmpegMediaPlugin APPEND PROPERTY QT_ANDROID_PERMISSIONS
+ android.permission.CAMERA
+ android.permission.RECORD_AUDIO
+ android.permission.BLUETOOTH
+ android.permission.MODIFY_AUDIO_SETTINGS
+ )
+endif()
+
+# TODO: get libs from FindFFmpeg.cmake
+set(ffmpeg_libs FFmpeg::avformat FFmpeg::avcodec FFmpeg::swresample FFmpeg::swscale FFmpeg::avutil)
+
+if (QT_DEPLOY_FFMPEG AND NOT BUILD_SHARED_LIBS AND NOT UIKIT)
+ message(FATAL_ERROR "QT_DEPLOY_FFMPEG is not implemented yet for static builds")
+endif()
+
+if (QT_DEPLOY_FFMPEG AND FFMPEG_SHARED_LIBRARIES AND (BUILD_SHARED_LIBS OR UIKIT))
+ include("${CMAKE_CURRENT_SOURCE_DIR}/cmake/QtDeployFFmpeg.cmake")
+ qt_internal_multimedia_copy_or_install_ffmpeg()
+endif()
+
+qt_internal_extend_target(QFFmpegMediaPlugin LIBRARIES ${ffmpeg_libs})
+
diff --git a/src/plugins/multimedia/ffmpeg/cmake/QtAddFFmpegStubs.cmake b/src/plugins/multimedia/ffmpeg/cmake/QtAddFFmpegStubs.cmake
new file mode 100644
index 000000000..5778ae4d2
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/cmake/QtAddFFmpegStubs.cmake
@@ -0,0 +1,199 @@
+# Copyright (C) 2024 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+# Utilities
+
+function(qt_internal_multimedia_find_ffmpeg_stubs)
+ foreach (stub ${FFMPEG_STUBS})
+ if (${stub} MATCHES ${vaapi_regex})
+ set(ffmpeg_has_vaapi TRUE PARENT_SCOPE)
+ elseif (${stub} MATCHES ${openssl_regex})
+ set(ffmpeg_has_openssl TRUE PARENT_SCOPE)
+ else()
+ set(unknown_ffmpeg_stubs
+ ${unknown_ffmpeg_stubs} ${stub} PARENT_SCOPE)
+ endif()
+ endforeach()
+endfunction()
+
+function(qt_internal_multimedia_check_ffmpeg_stubs_configuration)
+ if (NOT LINUX AND NOT ANDROID)
+ message(FATAL_ERROR "Currently, stubs are supported on Linux and Android")
+ endif()
+
+ if (unknown_ffmpeg_stubs)
+ message(FATAL_ERROR "Unknown ffmpeg stubs: ${unknown_ffmpeg_stubs}")
+ endif()
+
+ if (BUILD_SHARED_LIBS AND FFMPEG_SHARED_LIBRARIES AND FFMPEG_STUBS AND NOT QT_DEPLOY_FFMPEG)
+ message(FATAL_ERROR
+ "FFmpeg stubs have been found but QT_DEPLOY_FFMPEG is not specified. "
+ "Set -DQT_DEPLOY_FFMPEG=TRUE to continue.")
+ endif()
+
+ if (ffmpeg_has_vaapi AND NOT QT_FEATURE_vaapi)
+ message(FATAL_ERROR
+ "QT_FEATURE_vaapi is OFF but FFmpeg includes VAAPI.")
+ elseif (NOT ffmpeg_has_vaapi AND QT_FEATURE_vaapi)
+ message(WARNING
+ "QT_FEATURE_vaapi is ON "
+ "but FFmpeg includes VAAPI and dynamic symbols resolve is enabled.")
+ elseif(ffmpeg_has_vaapi AND NOT VAAPI_SUFFIX)
+ message(FATAL_ERROR "Cannot find VAAPI_SUFFIX, fix FindVAAPI.cmake")
+ elseif (ffmpeg_has_vaapi AND "${VAAPI_SUFFIX}" MATCHES "^1\\.32.*")
+ # drop the ancient vaapi version to avoid ABI problems
+ message(FATAL_ERROR "VAAPI ${VAAPI_SUFFIX} is not supported")
+ endif()
+
+ if (ffmpeg_has_openssl AND NOT QT_FEATURE_openssl)
+ message(FATAL_ERROR
+ "QT_FEATURE_openssl is OFF but FFmpeg includes OpenSSL.")
+ endif()
+endfunction()
+
+macro(qt_internal_multimedia_find_vaapi_soversion)
+ string(REGEX MATCH "^[0-9]+" va_soversion "${VAAPI_SUFFIX}")
+
+ set(va-drm_soversion "${va_soversion}")
+ set(va-x11_soversion "${va_soversion}")
+endmacro()
+
+macro(qt_internal_multimedia_find_openssl_soversion)
+ # Update OpenSSL variables since OPENSSL_SSL_LIBRARY is not propagated to this place in some cases.
+ qt_find_package(OpenSSL)
+
+ if (NOT OPENSSL_INCLUDE_DIR AND OPENSSL_ROOT_DIR)
+ set(OPENSSL_INCLUDE_DIR "${OPENSSL_ROOT_DIR}/include")
+ endif()
+
+ if (LINUX)
+ if (NOT OPENSSL_SSL_LIBRARY)
+ message(FATAL_ERROR "OPENSSL_SSL_LIBRARY is not found")
+ endif()
+
+ get_filename_component(ssl_lib_realpath "${OPENSSL_SSL_LIBRARY}" REALPATH)
+ string(REGEX MATCH "[0-9]+(\\.[0-9]+)*$" ssl_soversion "${ssl_lib_realpath}")
+ string(REGEX REPLACE "^3(\\..*|$)" "3" ssl_soversion "${ssl_soversion}")
+ endif()
+
+ #TODO: enhance finding openssl version and throw an error if it's not found.
+
+ set(crypto_soversion "${ssl_soversion}")
+endmacro()
+
+function(qt_internal_multimedia_set_stub_version_script stub stub_target)
+ if ("${stub}" MATCHES "${openssl_regex}")
+ if ("${ssl_soversion}" STREQUAL "3" OR
+ (NOT ssl_soversion AND "${OPENSSL_VERSION}" MATCHES "^3\\..*"))
+ # Symbols in OpenSSL 1.* are not versioned.
+ set(file_name "openssl3.ver")
+ endif()
+ elseif("${stub}" STREQUAL "va")
+ set(file_name "va.ver")
+ endif()
+
+ if (file_name)
+ set(version_script "${CMAKE_CURRENT_SOURCE_DIR}/symbolstubs/${file_name}")
+ set_property(TARGET ${stub_target} APPEND_STRING
+ PROPERTY LINK_FLAGS " -Wl,--version-script=${version_script}")
+ set_target_properties(${stub_target} PROPERTIES LINK_DEPENDS ${version_script})
+ source_group("Stubs Version Scripts" FILES ${version_script})
+ endif()
+endfunction()
+
+function(qt_internal_multimedia_set_stub_output stub stub_target)
+ set(output_dir "${QT_BUILD_DIR}/${INSTALL_LIBDIR}")
+
+ set_target_properties(${stub_target} PROPERTIES
+ RUNTIME_OUTPUT_DIRECTORY "${output_dir}"
+ LIBRARY_OUTPUT_DIRECTORY "${output_dir}"
+ )
+
+ if (${stub}_soversion)
+ set_target_properties(${stub_target} PROPERTIES
+ VERSION "${${stub}_soversion}"
+ SOVERSION "${${stub}_soversion}")
+ endif()
+
+ qt_apply_rpaths(TARGET ${stub_target} INSTALL_PATH "${INSTALL_LIBDIR}" RELATIVE_RPATH)
+endfunction()
+
+function(qt_internal_multimedia_set_stub_include_directories stub target)
+ qt_internal_extend_target(${target}
+ CONDITION ${stub} MATCHES "${openssl_regex}"
+ INCLUDE_DIRECTORIES "${OPENSSL_INCLUDE_DIR}")
+
+ qt_internal_extend_target(${target}
+ CONDITION ${stub} MATCHES "${vaapi_regex}"
+ INCLUDE_DIRECTORIES "${VAAPI_INCLUDE_DIR}")
+endfunction()
+
+function(qt_internal_multimedia_set_stub_symbols_visibility stub stub_target)
+ set_target_properties(${stub_target} PROPERTIES
+ C_VISIBILITY_PRESET hidden
+ CXX_VISIBILITY_PRESET hidden)
+ target_compile_definitions(${stub_target} PRIVATE Q_EXPORT_STUB_SYMBOLS)
+endfunction()
+
+function(qt_internal_multimedia_set_stub_libraries stub stub_target)
+ qt_internal_extend_target(${stub_target} LIBRARIES Qt::Core Qt::MultimediaPrivate)
+
+ if (LINK_STUBS_TO_FFMPEG_PLUGIN AND ${stub} STREQUAL "va")
+ qt_internal_extend_target(QFFmpegMediaPlugin LIBRARIES ${stub_target})
+ endif()
+endfunction()
+
+function(qt_internal_multimedia_define_stub_needed_version stub target)
+ string(TOUPPER ${stub} prefix)
+ string(REPLACE "-" "_" prefix ${prefix})
+
+ target_compile_definitions(${target} PRIVATE
+ "${prefix}_NEEDED_SOVERSION=\"${${stub}_soversion}\"")
+endfunction()
+
+function(qt_internal_multimedia_add_shared_stub stub)
+ set(stub_target "Qt${PROJECT_VERSION_MAJOR}FFmpegStub-${stub}")
+
+ qt_add_library(${stub_target} SHARED "symbolstubs/qffmpegsymbols-${stub}.cpp")
+
+ qt_internal_multimedia_set_stub_include_directories(${stub} ${stub_target})
+ qt_internal_multimedia_set_stub_output(${stub} ${stub_target})
+ qt_internal_multimedia_set_stub_symbols_visibility(${stub} ${stub_target})
+ qt_internal_multimedia_set_stub_version_script(${stub} ${stub_target})
+ qt_internal_multimedia_define_stub_needed_version(${stub} ${stub_target})
+ qt_internal_multimedia_set_stub_libraries(${stub} ${stub_target})
+
+ qt_install(TARGETS ${stub_target} LIBRARY NAMELINK_SKIP)
+endfunction()
+
+function(qt_internal_multimedia_add_private_stub_to_plugin stub)
+ qt_internal_multimedia_set_stub_include_directories(${stub} QFFmpegMediaPlugin)
+ qt_internal_multimedia_define_stub_needed_version(${stub} QFFmpegMediaPlugin)
+ qt_internal_extend_target(QFFmpegMediaPlugin SOURCES "symbolstubs/qffmpegsymbols-${stub}.cpp")
+endfunction()
+
+# Main function
+
+set(vaapi_regex "^(va|va-drm|va-x11)$")
+set(openssl_regex "^(ssl|crypto)$")
+
+function(qt_internal_multimedia_add_ffmpeg_stubs)
+ qt_internal_multimedia_find_ffmpeg_stubs()
+ qt_internal_multimedia_check_ffmpeg_stubs_configuration()
+
+ if (ffmpeg_has_vaapi)
+ qt_internal_multimedia_find_vaapi_soversion()
+ endif()
+
+ if (ffmpeg_has_openssl)
+ qt_internal_multimedia_find_openssl_soversion()
+ endif()
+
+ foreach (stub ${FFMPEG_STUBS})
+ if (FFMPEG_SHARED_LIBRARIES)
+ qt_internal_multimedia_add_shared_stub("${stub}")
+ else()
+ qt_internal_multimedia_add_private_stub_to_plugin("${stub}")
+ endif()
+ endforeach()
+endfunction()
diff --git a/src/plugins/multimedia/ffmpeg/cmake/QtDeployFFmpeg.cmake b/src/plugins/multimedia/ffmpeg/cmake/QtDeployFFmpeg.cmake
new file mode 100644
index 000000000..5e7d4b552
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/cmake/QtDeployFFmpeg.cmake
@@ -0,0 +1,43 @@
+# Copyright (C) 2023 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+function(qt_internal_multimedia_set_ffmpeg_link_directory directory)
+ foreach (lib ${ffmpeg_libs} FFmpeg)
+ set_target_properties(${lib} PROPERTIES INTERFACE_LINK_DIRECTORIES ${directory})
+ endforeach()
+endfunction()
+
+function(qt_internal_multimedia_copy_or_install_ffmpeg)
+ if (WIN32)
+ set(install_dir ${INSTALL_BINDIR})
+ else()
+ set(install_dir ${INSTALL_LIBDIR})
+ endif()
+
+ if (QT_WILL_INSTALL)
+ qt_install(FILES "${FFMPEG_SHARED_LIBRARIES}" DESTINATION ${install_dir})
+ else()
+ # elseif(NOT WIN32) actually we can just drop the coping for unix platforms
+ # However, it makes sense to copy anyway for consistency:
+ # in order to have the same configuration for developer builds.
+
+ set(ffmpeg_output_dir "${QT_BUILD_DIR}/${install_dir}")
+ file(MAKE_DIRECTORY ${ffmpeg_output_dir})
+
+ foreach(lib_path ${FFMPEG_SHARED_LIBRARIES})
+ get_filename_component(lib_name ${lib_path} NAME)
+ if(NOT EXISTS "${ffmpeg_output_dir}/${lib_name}")
+ file(COPY ${lib_path} DESTINATION ${ffmpeg_output_dir})
+ endif()
+ endforeach()
+
+ # On Windows, shared linking goes through 'integration' static libs,
+ # otherwise we should link the directory with copied libs
+ if (NOT WIN32)
+ qt_internal_multimedia_set_ffmpeg_link_directory(${ffmpeg_output_dir})
+ endif()
+ endif()
+
+ # Should we set the compile definition for the plugin or for the QtMM module?
+ # target_compile_definitions(QFFmpegMediaPlugin PRIVATE FFMPEG_DEPLOY_FOLDER="${FFMPEG_DEPLOY_FOLDER}")
+endfunction()
diff --git a/src/plugins/multimedia/ffmpeg/ffmpeg.json b/src/plugins/multimedia/ffmpeg/ffmpeg.json
new file mode 100644
index 000000000..d8e7e4456
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/ffmpeg.json
@@ -0,0 +1,3 @@
+{
+ "Keys": [ "ffmpeg" ]
+}
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer.cpp
new file mode 100644
index 000000000..64bd82dc0
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer.cpp
@@ -0,0 +1,407 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "playbackengine/qffmpegaudiorenderer_p.h"
+#include "qaudiosink.h"
+#include "qaudiooutput.h"
+#include "qaudiobufferoutput.h"
+#include "private/qplatformaudiooutput_p.h"
+#include <QtCore/qloggingcategory.h>
+
+#include "qffmpegresampler_p.h"
+#include "qffmpegmediaformatinfo_p.h"
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcAudioRenderer, "qt.multimedia.ffmpeg.audiorenderer");
+
+namespace QFFmpeg {
+
+using namespace std::chrono_literals;
+using namespace std::chrono;
+
+namespace {
+constexpr auto DesiredBufferTime = 110000us;
+constexpr auto MinDesiredBufferTime = 22000us;
+constexpr auto MaxDesiredBufferTime = 64000us;
+constexpr auto MinDesiredFreeBufferTime = 10000us;
+
+// It might be changed with #ifdef, as on Linux, QPulseAudioSink has quite unstable timings,
+// and it needs much more time to make sure that the buffer is overloaded.
+constexpr auto BufferLoadingMeasureTime = 400ms;
+
+constexpr auto DurationBias = 2ms; // avoids extra timer events
+
+qreal sampleRateFactor() {
+ // Test purposes:
+ //
+ // The env var describes a factor for the sample rate of
+ // audio data that we feed to the audio sink.
+ //
+ // In some cases audio sink might consume data slightly slower or faster than expected;
+ // even though the synchronization in the audio renderer is supposed to handle it,
+ // it makes sense to experiment with QT_MEDIA_PLAYER_AUDIO_SAMPLE_RATE_FACTOR != 1.
+ //
+ // Set QT_MEDIA_PLAYER_AUDIO_SAMPLE_RATE_FACTOR > 1 (e.g. 1.01 - 1.1) to test high buffer loading
+ // or try compensating too fast data consumption by the audio sink.
+ // Set QT_MEDIA_PLAYER_AUDIO_SAMPLE_RATE_FACTOR < 1 to test low buffer loading
+ // or try compensating too slow data consumption by the audio sink.
+
+
+ static const qreal result = []() {
+ const auto sampleRateFactorStr = qEnvironmentVariable("QT_MEDIA_PLAYER_AUDIO_SAMPLE_RATE_FACTOR");
+ bool ok = false;
+ const auto result = sampleRateFactorStr.toDouble(&ok);
+ return ok ? result : 1.;
+ }();
+
+ return result;
+}
+
+QAudioFormat audioFormatFromFrame(const Frame &frame)
+{
+ return QFFmpegMediaFormatInfo::audioFormatFromCodecParameters(
+ frame.codec()->stream()->codecpar);
+}
+
+std::unique_ptr<QFFmpegResampler> createResampler(const Frame &frame,
+ const QAudioFormat &outputFormat)
+{
+ return std::make_unique<QFFmpegResampler>(frame.codec(), outputFormat, frame.pts());
+}
+
+} // namespace
+
+AudioRenderer::AudioRenderer(const TimeController &tc, QAudioOutput *output,
+ QAudioBufferOutput *bufferOutput)
+ : Renderer(tc), m_output(output), m_bufferOutput(bufferOutput)
+{
+ if (output) {
+ // TODO: implement the signals in QPlatformAudioOutput and connect to them, QTBUG-112294
+ connect(output, &QAudioOutput::deviceChanged, this, &AudioRenderer::onDeviceChanged);
+ connect(output, &QAudioOutput::volumeChanged, this, &AudioRenderer::updateVolume);
+ connect(output, &QAudioOutput::mutedChanged, this, &AudioRenderer::updateVolume);
+ }
+}
+
+void AudioRenderer::setOutput(QAudioOutput *output)
+{
+ setOutputInternal(m_output, output, [this](QAudioOutput *) { onDeviceChanged(); });
+}
+
+void AudioRenderer::setOutput(QAudioBufferOutput *bufferOutput)
+{
+ setOutputInternal(m_bufferOutput, bufferOutput,
+ [this](QAudioBufferOutput *) { m_bufferOutputChanged = true; });
+}
+
+AudioRenderer::~AudioRenderer()
+{
+ freeOutput();
+}
+
+void AudioRenderer::updateVolume()
+{
+ if (m_sink)
+ m_sink->setVolume(m_output->isMuted() ? 0.f : m_output->volume());
+}
+
+void AudioRenderer::onDeviceChanged()
+{
+ m_deviceChanged = true;
+}
+
+Renderer::RenderingResult AudioRenderer::renderInternal(Frame frame)
+{
+ if (frame.isValid())
+ updateOutputs(frame);
+
+ // push to sink first in order not to waste time on resampling
+ // for QAudioBufferOutput
+ const RenderingResult result = pushFrameToOutput(frame);
+
+ if (m_lastFramePushDone)
+ pushFrameToBufferOutput(frame);
+ // else // skip pushing the same data to QAudioBufferOutput
+
+ m_lastFramePushDone = result.done;
+
+ return result;
+}
+
+AudioRenderer::RenderingResult AudioRenderer::pushFrameToOutput(const Frame &frame)
+{
+ if (!m_ioDevice || !m_resampler)
+ return {};
+
+ Q_ASSERT(m_sink);
+
+ auto firstFrameFlagGuard = qScopeGuard([&]() { m_firstFrameToSink = false; });
+
+ const SynchronizationStamp syncStamp{ m_sink->state(), m_sink->bytesFree(),
+ m_bufferedData.offset, Clock::now() };
+
+ if (!m_bufferedData.isValid()) {
+ if (!frame.isValid()) {
+ if (std::exchange(m_drained, true))
+ return {};
+
+ const auto time = bufferLoadingTime(syncStamp);
+
+ qCDebug(qLcAudioRenderer) << "Draining AudioRenderer, time:" << time;
+
+ return { time.count() == 0, time };
+ }
+
+ m_bufferedData = { m_resampler->resample(frame.avFrame()) };
+ }
+
+ if (m_bufferedData.isValid()) {
+ // synchronize after "QIODevice::write" to deliver audio data to the sink ASAP.
+ auto syncGuard = qScopeGuard([&]() { updateSynchronization(syncStamp, frame); });
+
+ const auto bytesWritten = m_ioDevice->write(m_bufferedData.data(), m_bufferedData.size());
+
+ m_bufferedData.offset += bytesWritten;
+
+ if (m_bufferedData.size() <= 0) {
+ m_bufferedData = {};
+
+ return {};
+ }
+
+ const auto remainingDuration = durationForBytes(m_bufferedData.size());
+
+ return { false,
+ std::min(remainingDuration + DurationBias, m_timings.actualBufferDuration / 2) };
+ }
+
+ return {};
+}
+
+void AudioRenderer::pushFrameToBufferOutput(const Frame &frame)
+{
+ if (!m_bufferOutput)
+ return;
+
+ Q_ASSERT(m_bufferOutputResampler);
+
+ if (frame.isValid()) {
+ // TODO: get buffer from m_bufferedData if resample formats are equal
+ QAudioBuffer buffer = m_resampler->resample(frame.avFrame());
+ emit m_bufferOutput->audioBufferReceived(buffer);
+ } else {
+ emit m_bufferOutput->audioBufferReceived({});
+ }
+}
+
+void AudioRenderer::onPlaybackRateChanged()
+{
+ m_resampler.reset();
+}
+
+int AudioRenderer::timerInterval() const
+{
+ constexpr auto MaxFixableInterval = 50; // ms
+
+ const auto interval = Renderer::timerInterval();
+
+ if (m_firstFrameToSink || !m_sink || m_sink->state() != QAudio::IdleState
+ || interval > MaxFixableInterval)
+ return interval;
+
+ return 0;
+}
+
+void AudioRenderer::onPauseChanged()
+{
+ m_firstFrameToSink = true;
+ Renderer::onPauseChanged();
+}
+
+void AudioRenderer::initResempler(const Frame &frame)
+{
+ // We recreate resampler whenever format is changed
+
+ auto resamplerFormat = m_sinkFormat;
+ resamplerFormat.setSampleRate(
+ qRound(m_sinkFormat.sampleRate() / playbackRate() * sampleRateFactor()));
+ m_resampler = createResampler(frame, resamplerFormat);
+}
+
+void AudioRenderer::freeOutput()
+{
+ qCDebug(qLcAudioRenderer) << "Free audio output";
+ if (m_sink) {
+ m_sink->reset();
+
+ // TODO: inestigate if it's enough to reset the sink without deleting
+ m_sink.reset();
+ }
+
+ m_ioDevice = nullptr;
+
+ m_bufferedData = {};
+ m_deviceChanged = false;
+ m_sinkFormat = {};
+ m_timings = {};
+ m_bufferLoadingInfo = {};
+}
+
+void AudioRenderer::updateOutputs(const Frame &frame)
+{
+ if (m_deviceChanged) {
+ freeOutput();
+ m_resampler.reset();
+ }
+
+ if (m_bufferOutput) {
+ if (m_bufferOutputChanged) {
+ m_bufferOutputChanged = false;
+ m_bufferOutputResampler.reset();
+ }
+
+ if (!m_bufferOutputResampler) {
+ QAudioFormat outputFormat = m_bufferOutput->format();
+ if (!outputFormat.isValid())
+ outputFormat = audioFormatFromFrame(frame);
+ m_bufferOutputResampler = createResampler(frame, outputFormat);
+ }
+ }
+
+ if (!m_output)
+ return;
+
+ if (!m_sinkFormat.isValid()) {
+ m_sinkFormat = audioFormatFromFrame(frame);
+ m_sinkFormat.setChannelConfig(m_output->device().channelConfiguration());
+ }
+
+ if (!m_sink) {
+ // Insert a delay here to test time offset synchronization, e.g. QThread::sleep(1)
+ m_sink = std::make_unique<QAudioSink>(m_output->device(), m_sinkFormat);
+ updateVolume();
+ m_sink->setBufferSize(m_sinkFormat.bytesForDuration(DesiredBufferTime.count()));
+ m_ioDevice = m_sink->start();
+ m_firstFrameToSink = true;
+
+ connect(m_sink.get(), &QAudioSink::stateChanged, this,
+ &AudioRenderer::onAudioSinkStateChanged);
+
+ m_timings.actualBufferDuration = durationForBytes(m_sink->bufferSize());
+ m_timings.maxSoundDelay = qMin(MaxDesiredBufferTime,
+ m_timings.actualBufferDuration - MinDesiredFreeBufferTime);
+ m_timings.minSoundDelay = MinDesiredBufferTime;
+
+ Q_ASSERT(DurationBias < m_timings.minSoundDelay
+ && m_timings.maxSoundDelay < m_timings.actualBufferDuration);
+ }
+
+ if (!m_resampler)
+ initResempler(frame);
+}
+
+void AudioRenderer::updateSynchronization(const SynchronizationStamp &stamp, const Frame &frame)
+{
+ if (!frame.isValid())
+ return;
+
+ Q_ASSERT(m_sink);
+
+ const auto bufferLoadingTime = this->bufferLoadingTime(stamp);
+ const auto currentFrameDelay = frameDelay(frame, stamp.timePoint);
+ const auto writtenTime = durationForBytes(stamp.bufferBytesWritten);
+ const auto soundDelay = currentFrameDelay + bufferLoadingTime - writtenTime;
+
+ auto synchronize = [&](microseconds fixedDelay, microseconds targetSoundDelay) {
+ // TODO: investigate if we need sample compensation here
+
+ changeRendererTime(fixedDelay - targetSoundDelay);
+ if (qLcAudioRenderer().isDebugEnabled()) {
+ // clang-format off
+ qCDebug(qLcAudioRenderer)
+ << "Change rendering time:"
+ << "\n First frame:" << m_firstFrameToSink
+ << "\n Delay (frame+buffer-written):" << currentFrameDelay << "+"
+ << bufferLoadingTime << "-"
+ << writtenTime << "="
+ << soundDelay
+ << "\n Fixed delay:" << fixedDelay
+ << "\n Target delay:" << targetSoundDelay
+ << "\n Buffer durations (min/max/limit):" << m_timings.minSoundDelay
+ << m_timings.maxSoundDelay
+ << m_timings.actualBufferDuration
+ << "\n Audio sink state:" << stamp.audioSinkState;
+ // clang-format on
+ }
+ };
+
+ const auto loadingType = soundDelay > m_timings.maxSoundDelay ? BufferLoadingInfo::High
+ : soundDelay < m_timings.minSoundDelay ? BufferLoadingInfo::Low
+ : BufferLoadingInfo::Moderate;
+
+ if (loadingType != m_bufferLoadingInfo.type) {
+ // qCDebug(qLcAudioRenderer) << "Change buffer loading type:" <<
+ // m_bufferLoadingInfo.type
+ // << "->" << loadingType << "soundDelay:" << soundDelay;
+ m_bufferLoadingInfo = { loadingType, stamp.timePoint, soundDelay };
+ }
+
+ if (loadingType != BufferLoadingInfo::Moderate) {
+ const auto isHigh = loadingType == BufferLoadingInfo::High;
+ const auto shouldHandleIdle = stamp.audioSinkState == QAudio::IdleState && !isHigh;
+
+ auto &fixedDelay = m_bufferLoadingInfo.delay;
+
+ fixedDelay = shouldHandleIdle ? soundDelay
+ : isHigh ? qMin(soundDelay, fixedDelay)
+ : qMax(soundDelay, fixedDelay);
+
+ if (stamp.timePoint - m_bufferLoadingInfo.timePoint > BufferLoadingMeasureTime
+ || (m_firstFrameToSink && isHigh) || shouldHandleIdle) {
+ const auto targetDelay = isHigh
+ ? (m_timings.maxSoundDelay + m_timings.minSoundDelay) / 2
+ : m_timings.minSoundDelay + DurationBias;
+
+ synchronize(fixedDelay, targetDelay);
+ m_bufferLoadingInfo = { BufferLoadingInfo::Moderate, stamp.timePoint, targetDelay };
+ }
+ }
+}
+
+microseconds AudioRenderer::bufferLoadingTime(const SynchronizationStamp &syncStamp) const
+{
+ Q_ASSERT(m_sink);
+
+ if (syncStamp.audioSinkState == QAudio::IdleState)
+ return microseconds(0);
+
+ const auto bytes = qMax(m_sink->bufferSize() - syncStamp.audioSinkBytesFree, 0);
+
+#ifdef Q_OS_ANDROID
+ // The hack has been added due to QAndroidAudioSink issues (QTBUG-118609).
+ // The method QAndroidAudioSink::bytesFree returns 0 or bufferSize, intermediate values are not
+ // available now; to be fixed.
+ if (bytes == 0)
+ return m_timings.minSoundDelay + MinDesiredBufferTime;
+#endif
+
+ return durationForBytes(bytes);
+}
+
+void AudioRenderer::onAudioSinkStateChanged(QAudio::State state)
+{
+ if (state == QAudio::IdleState && !m_firstFrameToSink)
+ scheduleNextStep();
+}
+
+microseconds AudioRenderer::durationForBytes(qsizetype bytes) const
+{
+ return microseconds(m_sinkFormat.durationForBytes(static_cast<qint32>(bytes)));
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegaudiorenderer_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer_p.h
new file mode 100644
index 000000000..9a22a8a48
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegaudiorenderer_p.h
@@ -0,0 +1,132 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGAUDIORENDERER_P_H
+#define QFFMPEGAUDIORENDERER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "playbackengine/qffmpegrenderer_p.h"
+
+#include "qaudiobuffer.h"
+
+QT_BEGIN_NAMESPACE
+
+class QAudioOutput;
+class QAudioBufferOutput;
+class QAudioSink;
+class QFFmpegResampler;
+
+namespace QFFmpeg {
+
+class AudioRenderer : public Renderer
+{
+ Q_OBJECT
+public:
+ AudioRenderer(const TimeController &tc, QAudioOutput *output, QAudioBufferOutput *bufferOutput);
+
+ void setOutput(QAudioOutput *output);
+
+ void setOutput(QAudioBufferOutput *bufferOutput);
+
+ ~AudioRenderer() override;
+
+protected:
+ using Microseconds = std::chrono::microseconds;
+ struct SynchronizationStamp
+ {
+ QAudio::State audioSinkState = QAudio::IdleState;
+ qsizetype audioSinkBytesFree = 0;
+ qsizetype bufferBytesWritten = 0;
+ TimePoint timePoint = TimePoint::max();
+ };
+
+ struct BufferLoadingInfo
+ {
+ enum Type { Low, Moderate, High };
+ Type type = Moderate;
+ TimePoint timePoint = TimePoint::max();
+ Microseconds delay = Microseconds(0);
+ };
+
+ struct AudioTimings
+ {
+ Microseconds actualBufferDuration = Microseconds(0);
+ Microseconds maxSoundDelay = Microseconds(0);
+ Microseconds minSoundDelay = Microseconds(0);
+ };
+
+ struct BufferedDataWithOffset
+ {
+ QAudioBuffer buffer;
+ qsizetype offset = 0;
+
+ bool isValid() const { return buffer.isValid(); }
+ qsizetype size() const { return buffer.byteCount() - offset; }
+ const char *data() const { return buffer.constData<char>() + offset; }
+ };
+
+ RenderingResult renderInternal(Frame frame) override;
+
+ RenderingResult pushFrameToOutput(const Frame &frame);
+
+ void pushFrameToBufferOutput(const Frame &frame);
+
+ void onPlaybackRateChanged() override;
+
+ int timerInterval() const override;
+
+ void onPauseChanged() override;
+
+ void freeOutput();
+
+ void updateOutputs(const Frame &frame);
+
+ void initResempler(const Frame &frame);
+
+ void onDeviceChanged();
+
+ void updateVolume();
+
+ void updateSynchronization(const SynchronizationStamp &stamp, const Frame &frame);
+
+ Microseconds bufferLoadingTime(const SynchronizationStamp &syncStamp) const;
+
+ void onAudioSinkStateChanged(QAudio::State state);
+
+ Microseconds durationForBytes(qsizetype bytes) const;
+
+private:
+ QPointer<QAudioOutput> m_output;
+ QPointer<QAudioBufferOutput> m_bufferOutput;
+ std::unique_ptr<QAudioSink> m_sink;
+ AudioTimings m_timings;
+ BufferLoadingInfo m_bufferLoadingInfo;
+ std::unique_ptr<QFFmpegResampler> m_resampler;
+ std::unique_ptr<QFFmpegResampler> m_bufferOutputResampler;
+ QAudioFormat m_sinkFormat;
+
+ BufferedDataWithOffset m_bufferedData;
+ QIODevice *m_ioDevice = nullptr;
+
+ bool m_lastFramePushDone = true;
+
+ bool m_deviceChanged = false;
+ bool m_bufferOutputChanged = false;
+ bool m_drained = false;
+ bool m_firstFrameToSink = true;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGAUDIORENDERER_P_H
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec.cpp
new file mode 100644
index 000000000..457b3603d
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec.cpp
@@ -0,0 +1,82 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "playbackengine/qffmpegcodec_p.h"
+#include "qloggingcategory.h"
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcPlaybackEngineCodec, "qt.multimedia.playbackengine.codec");
+
+namespace QFFmpeg {
+
+Codec::Data::Data(AVCodecContextUPtr context, AVStream *stream, AVFormatContext *formatContext,
+ std::unique_ptr<QFFmpeg::HWAccel> hwAccel)
+ : context(std::move(context)), stream(stream), hwAccel(std::move(hwAccel))
+{
+ if (stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
+ pixelAspectRatio = av_guess_sample_aspect_ratio(formatContext, stream, nullptr);
+}
+
+QMaybe<Codec> Codec::create(AVStream *stream, AVFormatContext *formatContext)
+{
+ if (!stream)
+ return { "Invalid stream" };
+
+ const AVCodec *decoder = nullptr;
+ std::unique_ptr<QFFmpeg::HWAccel> hwAccel;
+
+ if (stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
+ std::tie(decoder, hwAccel) = HWAccel::findDecoderWithHwAccel(stream->codecpar->codec_id);
+
+ if (!decoder)
+ decoder = QFFmpeg::findAVDecoder(stream->codecpar->codec_id);
+
+ if (!decoder)
+ return { "Failed to find a valid FFmpeg decoder" };
+
+ qCDebug(qLcPlaybackEngineCodec) << "found decoder" << decoder->name << "for id" << decoder->id;
+
+ AVCodecContextUPtr context(avcodec_alloc_context3(decoder));
+ if (!context)
+ return { "Failed to allocate a FFmpeg codec context" };
+
+ if (hwAccel)
+ context->hw_device_ctx = av_buffer_ref(hwAccel->hwDeviceContextAsBuffer());
+
+ if (context->codec_type != AVMEDIA_TYPE_AUDIO && context->codec_type != AVMEDIA_TYPE_VIDEO
+ && context->codec_type != AVMEDIA_TYPE_SUBTITLE) {
+ return { "Unknown codec type" };
+ }
+
+ int ret = avcodec_parameters_to_context(context.get(), stream->codecpar);
+ if (ret < 0)
+ return { "Failed to set FFmpeg codec parameters" };
+
+ // ### This still gives errors about wrong HW formats (as we accept all of them)
+ // But it would be good to get so we can filter out pixel format we don't support natively
+ context->get_format = QFFmpeg::getFormat;
+
+ /* Init the decoder, with reference counting and threading */
+ AVDictionaryHolder opts;
+ av_dict_set(opts, "refcounted_frames", "1", 0);
+ av_dict_set(opts, "threads", "auto", 0);
+ applyExperimentalCodecOptions(decoder, opts);
+
+ ret = avcodec_open2(context.get(), decoder, opts);
+ if (ret < 0)
+ return QString("Failed to open FFmpeg codec context " + err2str(ret));
+
+ return Codec(new Data(std::move(context), stream, formatContext, std::move(hwAccel)));
+}
+
+AVRational Codec::pixelAspectRatio(AVFrame *frame) const
+{
+ // does the same as av_guess_sample_aspect_ratio, but more efficient
+ return d->pixelAspectRatio.num && d->pixelAspectRatio.den ? d->pixelAspectRatio
+ : frame->sample_aspect_ratio;
+}
+
+QT_END_NAMESPACE
+
+} // namespace QFFmpeg
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec_p.h
new file mode 100644
index 000000000..449fb1f65
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegcodec_p.h
@@ -0,0 +1,62 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGCODEC_P_H
+#define QFFMPEGCODEC_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qshareddata.h"
+#include "qqueue.h"
+#include "private/qmultimediautils_p.h"
+#include "qffmpeg_p.h"
+#include "qffmpeghwaccel_p.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+class Codec
+{
+ struct Data
+ {
+ Data(AVCodecContextUPtr context, AVStream *stream, AVFormatContext *formatContext,
+ std::unique_ptr<QFFmpeg::HWAccel> hwAccel);
+ QAtomicInt ref;
+ AVCodecContextUPtr context;
+ AVStream *stream = nullptr;
+ AVRational pixelAspectRatio = { 0, 1 };
+ std::unique_ptr<QFFmpeg::HWAccel> hwAccel;
+ };
+
+public:
+ static QMaybe<Codec> create(AVStream *stream, AVFormatContext *formatContext);
+
+ AVRational pixelAspectRatio(AVFrame *frame) const;
+
+ AVCodecContext *context() const { return d->context.get(); }
+ AVStream *stream() const { return d->stream; }
+ uint streamIndex() const { return d->stream->index; }
+ HWAccel *hwAccel() const { return d->hwAccel.get(); }
+ qint64 toMs(qint64 ts) const { return timeStampMs(ts, d->stream->time_base).value_or(0); }
+ qint64 toUs(qint64 ts) const { return timeStampUs(ts, d->stream->time_base).value_or(0); }
+
+private:
+ Codec(Data *data) : d(data) { }
+ QExplicitlySharedDataPointer<Data> d;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGCODEC_P_H
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegdemuxer.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegdemuxer.cpp
new file mode 100644
index 000000000..8cced835c
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegdemuxer.cpp
@@ -0,0 +1,228 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "playbackengine/qffmpegdemuxer_p.h"
+#include <qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+// 4 sec for buffering. TODO: maybe move to env var customization
+static constexpr qint64 MaxBufferedDurationUs = 4'000'000;
+
+// around 4 sec of hdr video
+static constexpr qint64 MaxBufferedSize = 32 * 1024 * 1024;
+
+namespace QFFmpeg {
+
+static Q_LOGGING_CATEGORY(qLcDemuxer, "qt.multimedia.ffmpeg.demuxer");
+
+static qint64 streamTimeToUs(const AVStream *stream, qint64 time)
+{
+ Q_ASSERT(stream);
+
+ const auto res = mul(time * 1000000, stream->time_base);
+ return res ? *res : time;
+}
+
+static qint64 packetEndPos(const AVStream *stream, const Packet &packet)
+{
+ return packet.loopOffset().pos
+ + streamTimeToUs(stream, packet.avPacket()->pts + packet.avPacket()->duration);
+}
+
+Demuxer::Demuxer(AVFormatContext *context, const PositionWithOffset &posWithOffset,
+ const StreamIndexes &streamIndexes, int loops)
+ : m_context(context), m_posWithOffset(posWithOffset), m_loops(loops)
+{
+ qCDebug(qLcDemuxer) << "Create demuxer."
+ << "pos:" << posWithOffset.pos << "loop offset:" << posWithOffset.offset.pos
+ << "loop index:" << posWithOffset.offset.index << "loops:" << loops;
+
+ Q_ASSERT(m_context);
+
+ for (auto i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i) {
+ if (streamIndexes[i] >= 0) {
+ const auto trackType = static_cast<QPlatformMediaPlayer::TrackType>(i);
+ qCDebug(qLcDemuxer) << "Activate demuxing stream" << i << ", trackType:" << trackType;
+ m_streams[streamIndexes[i]] = { trackType };
+ }
+ }
+}
+
+void Demuxer::doNextStep()
+{
+ ensureSeeked();
+
+ Packet packet(m_posWithOffset.offset, AVPacketUPtr{ av_packet_alloc() }, id());
+ if (av_read_frame(m_context, packet.avPacket()) < 0) {
+ ++m_posWithOffset.offset.index;
+
+ const auto loops = m_loops.loadAcquire();
+ if (loops >= 0 && m_posWithOffset.offset.index >= loops) {
+ qCDebug(qLcDemuxer) << "finish demuxing";
+
+ if (!std::exchange(m_buffered, true))
+ emit packetsBuffered();
+
+ setAtEnd(true);
+ } else {
+ m_seeked = false;
+ m_posWithOffset.pos = 0;
+ m_posWithOffset.offset.pos = m_maxPacketsEndPos;
+ m_maxPacketsEndPos = 0;
+
+ ensureSeeked();
+
+ qCDebug(qLcDemuxer) << "Demuxer loops changed. Index:" << m_posWithOffset.offset.index
+ << "Offset:" << m_posWithOffset.offset.pos;
+
+ scheduleNextStep(false);
+ }
+
+ return;
+ }
+
+ auto &avPacket = *packet.avPacket();
+
+ const auto streamIndex = avPacket.stream_index;
+ const auto stream = m_context->streams[streamIndex];
+
+ auto it = m_streams.find(streamIndex);
+ if (it != m_streams.end()) {
+ auto &streamData = it->second;
+
+ const auto endPos = packetEndPos(stream, packet);
+ m_maxPacketsEndPos = qMax(m_maxPacketsEndPos, endPos);
+
+ // Increase buffered metrics as the packet has been processed.
+
+ streamData.bufferedDuration += streamTimeToUs(stream, avPacket.duration);
+ streamData.bufferedSize += avPacket.size;
+ streamData.maxSentPacketsPos = qMax(streamData.maxSentPacketsPos, endPos);
+ updateStreamDataLimitFlag(streamData);
+
+ if (!m_buffered && streamData.isDataLimitReached) {
+ m_buffered = true;
+ emit packetsBuffered();
+ }
+
+ if (!m_firstPacketFound) {
+ m_firstPacketFound = true;
+ const auto pos = streamTimeToUs(stream, avPacket.pts);
+ emit firstPacketFound(std::chrono::steady_clock::now(), pos);
+ }
+
+ auto signal = signalByTrackType(it->second.trackType);
+ emit (this->*signal)(packet);
+ }
+
+ scheduleNextStep(false);
+}
+
+void Demuxer::onPacketProcessed(Packet packet)
+{
+ Q_ASSERT(packet.isValid());
+
+ if (packet.sourceId() != id())
+ return;
+
+ auto &avPacket = *packet.avPacket();
+
+ const auto streamIndex = avPacket.stream_index;
+ const auto stream = m_context->streams[streamIndex];
+ auto it = m_streams.find(streamIndex);
+
+ if (it != m_streams.end()) {
+ auto &streamData = it->second;
+
+ // Decrease buffered metrics as new data (the packet) has been received (buffered)
+
+ streamData.bufferedDuration -= streamTimeToUs(stream, avPacket.duration);
+ streamData.bufferedSize -= avPacket.size;
+ streamData.maxProcessedPacketPos =
+ qMax(streamData.maxProcessedPacketPos, packetEndPos(stream, packet));
+
+ Q_ASSERT(it->second.bufferedDuration >= 0);
+ Q_ASSERT(it->second.bufferedSize >= 0);
+
+ updateStreamDataLimitFlag(streamData);
+ }
+
+ scheduleNextStep();
+}
+
+bool Demuxer::canDoNextStep() const
+{
+ auto isDataLimitReached = [](const auto &streamIndexToData) {
+ return streamIndexToData.second.isDataLimitReached;
+ };
+
+ // Demuxer waits:
+ // - if it's paused
+ // - if the end has been reached
+ // - if streams are empty (probably, should be handled on the initialization)
+ // - if at least one of the streams has reached the data limit (duration or size)
+
+ return PlaybackEngineObject::canDoNextStep() && !isAtEnd() && !m_streams.empty()
+ && std::none_of(m_streams.begin(), m_streams.end(), isDataLimitReached);
+}
+
+void Demuxer::ensureSeeked()
+{
+ if (std::exchange(m_seeked, true))
+ return;
+
+ if ((m_context->ctx_flags & AVFMTCTX_UNSEEKABLE) == 0) {
+ const qint64 seekPos = m_posWithOffset.pos * AV_TIME_BASE / 1000000;
+ auto err = av_seek_frame(m_context, -1, seekPos, AVSEEK_FLAG_BACKWARD);
+
+ if (err < 0) {
+ qCWarning(qLcDemuxer) << "Failed to seek, pos" << seekPos;
+
+ // Drop an error of seeking to initial position of streams with undefined duration.
+ // This needs improvements.
+ if (seekPos != 0 || m_context->duration > 0)
+ emit error(QMediaPlayer::ResourceError,
+ QLatin1StringView("Failed to seek: ") + err2str(err));
+ }
+ }
+
+ setAtEnd(false);
+}
+
+Demuxer::RequestingSignal Demuxer::signalByTrackType(QPlatformMediaPlayer::TrackType trackType)
+{
+ switch (trackType) {
+ case QPlatformMediaPlayer::TrackType::VideoStream:
+ return &Demuxer::requestProcessVideoPacket;
+ case QPlatformMediaPlayer::TrackType::AudioStream:
+ return &Demuxer::requestProcessAudioPacket;
+ case QPlatformMediaPlayer::TrackType::SubtitleStream:
+ return &Demuxer::requestProcessSubtitlePacket;
+ default:
+ Q_ASSERT(!"Unknown track type");
+ }
+
+ return nullptr;
+}
+
+void Demuxer::setLoops(int loopsCount)
+{
+ qCDebug(qLcDemuxer) << "setLoops to demuxer" << loopsCount;
+ m_loops.storeRelease(loopsCount);
+}
+
+void Demuxer::updateStreamDataLimitFlag(StreamData &streamData)
+{
+ const auto packetsPosDiff = streamData.maxSentPacketsPos - streamData.maxProcessedPacketPos;
+ streamData.isDataLimitReached =
+ streamData.bufferedDuration >= MaxBufferedDurationUs
+ || (streamData.bufferedDuration == 0 && packetsPosDiff >= MaxBufferedDurationUs)
+ || streamData.bufferedSize >= MaxBufferedSize;
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegdemuxer_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegdemuxer_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegdemuxer_p.h
new file mode 100644
index 000000000..b72056185
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegdemuxer_p.h
@@ -0,0 +1,87 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGDEMUXER_P_H
+#define QFFMPEGDEMUXER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "playbackengine/qffmpegplaybackengineobject_p.h"
+#include "private/qplatformmediaplayer_p.h"
+#include "playbackengine/qffmpegpacket_p.h"
+#include "playbackengine/qffmpegpositionwithoffset_p.h"
+
+#include <unordered_map>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+class Demuxer : public PlaybackEngineObject
+{
+ Q_OBJECT
+public:
+ Demuxer(AVFormatContext *context, const PositionWithOffset &posWithOffset,
+ const StreamIndexes &streamIndexes, int loops);
+
+ using RequestingSignal = void (Demuxer::*)(Packet);
+ static RequestingSignal signalByTrackType(QPlatformMediaPlayer::TrackType trackType);
+
+ void setLoops(int loopsCount);
+
+public slots:
+ void onPacketProcessed(Packet);
+
+signals:
+ void requestProcessAudioPacket(Packet);
+ void requestProcessVideoPacket(Packet);
+ void requestProcessSubtitlePacket(Packet);
+ void firstPacketFound(TimePoint tp, qint64 trackPos);
+ void packetsBuffered();
+
+private:
+ bool canDoNextStep() const override;
+
+ void doNextStep() override;
+
+ void ensureSeeked();
+
+private:
+ struct StreamData
+ {
+ QPlatformMediaPlayer::TrackType trackType = QPlatformMediaPlayer::TrackType::NTrackTypes;
+ qint64 bufferedDuration = 0;
+ qint64 bufferedSize = 0;
+
+ qint64 maxSentPacketsPos = 0;
+ qint64 maxProcessedPacketPos = 0;
+
+ bool isDataLimitReached = false;
+ };
+
+ void updateStreamDataLimitFlag(StreamData &streamData);
+
+private:
+ AVFormatContext *m_context = nullptr;
+ bool m_seeked = false;
+ bool m_firstPacketFound = false;
+ std::unordered_map<int, StreamData> m_streams;
+ PositionWithOffset m_posWithOffset;
+ qint64 m_maxPacketsEndPos = 0;
+ QAtomicInt m_loops = QMediaPlayer::Once;
+ bool m_buffered = false;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE // QFFMPEGDEMUXER_P_H
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegframe_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegframe_p.h
new file mode 100644
index 000000000..84fe2fead
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegframe_p.h
@@ -0,0 +1,109 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGFRAME_P_H
+#define QFFMPEGFRAME_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qffmpeg_p.h"
+#include "playbackengine/qffmpegcodec_p.h"
+#include "playbackengine/qffmpegpositionwithoffset_p.h"
+#include "QtCore/qsharedpointer.h"
+#include "qpointer.h"
+#include "qobject.h"
+
+#include <optional>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+struct Frame
+{
+ struct Data
+ {
+ Data(const LoopOffset &offset, AVFrameUPtr f, const Codec &codec, qint64, quint64 sourceId)
+ : loopOffset(offset), codec(codec), frame(std::move(f)), sourceId(sourceId)
+ {
+ Q_ASSERT(frame);
+ if (frame->pts != AV_NOPTS_VALUE)
+ pts = codec.toUs(frame->pts);
+ else
+ pts = codec.toUs(frame->best_effort_timestamp);
+
+ if (auto frameDuration = getAVFrameDuration(*frame)) {
+ duration = codec.toUs(frameDuration);
+ } else {
+ const auto &avgFrameRate = codec.stream()->avg_frame_rate;
+ duration = mul(qint64(1000000), { avgFrameRate.den, avgFrameRate.num }).value_or(0);
+ }
+ }
+ Data(const LoopOffset &offset, const QString &text, qint64 pts, qint64 duration,
+ quint64 sourceId)
+ : loopOffset(offset), text(text), pts(pts), duration(duration), sourceId(sourceId)
+ {
+ }
+
+ QAtomicInt ref;
+ LoopOffset loopOffset;
+ std::optional<Codec> codec;
+ AVFrameUPtr frame;
+ QString text;
+ qint64 pts = -1;
+ qint64 duration = -1;
+ quint64 sourceId = 0;
+ };
+ Frame() = default;
+
+ Frame(const LoopOffset &offset, AVFrameUPtr f, const Codec &codec, qint64 pts,
+ quint64 sourceIndex)
+ : d(new Data(offset, std::move(f), codec, pts, sourceIndex))
+ {
+ }
+ Frame(const LoopOffset &offset, const QString &text, qint64 pts, qint64 duration,
+ quint64 sourceIndex)
+ : d(new Data(offset, text, pts, duration, sourceIndex))
+ {
+ }
+ bool isValid() const { return !!d; }
+
+ AVFrame *avFrame() const { return data().frame.get(); }
+ AVFrameUPtr takeAVFrame() { return std::move(data().frame); }
+ const Codec *codec() const { return data().codec ? &data().codec.value() : nullptr; }
+ qint64 pts() const { return data().pts; }
+ qint64 duration() const { return data().duration; }
+ qint64 end() const { return data().pts + data().duration; }
+ QString text() const { return data().text; }
+ quint64 sourceId() const { return data().sourceId; };
+ const LoopOffset &loopOffset() const { return data().loopOffset; };
+ qint64 absolutePts() const { return pts() + loopOffset().pos; }
+ qint64 absoluteEnd() const { return end() + loopOffset().pos; }
+
+private:
+ Data &data() const
+ {
+ Q_ASSERT(d);
+ return *d;
+ }
+
+private:
+ QExplicitlySharedDataPointer<Data> d;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+Q_DECLARE_METATYPE(QFFmpeg::Frame);
+
+#endif // QFFMPEGFRAME_P_H
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder.cpp
new file mode 100644
index 000000000..f92f93ddb
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder.cpp
@@ -0,0 +1,390 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "playbackengine/qffmpegmediadataholder_p.h"
+
+#include "qffmpegmediametadata_p.h"
+#include "qffmpegmediaformatinfo_p.h"
+#include "qffmpegioutils_p.h"
+#include "qiodevice.h"
+#include "qdatetime.h"
+#include "qloggingcategory.h"
+
+#include <math.h>
+#include <optional>
+
+extern "C" {
+#include "libavutil/display.h"
+}
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcMediaDataHolder, "qt.multimedia.ffmpeg.mediadataholder")
+
+namespace QFFmpeg {
+
+static std::optional<qint64> streamDuration(const AVStream &stream)
+{
+ const auto &factor = stream.time_base;
+
+ if (stream.duration > 0 && factor.num > 0 && factor.den > 0) {
+ return qint64(1000000) * stream.duration * factor.num / factor.den;
+ }
+
+ // In some cases ffmpeg reports negative duration that is definitely invalid.
+ // However, the correct duration may be read from the metadata.
+
+ if (stream.duration < 0) {
+ qCWarning(qLcMediaDataHolder) << "AVStream duration" << stream.duration
+ << "is invalid. Taking it from the metadata";
+ }
+
+ if (const auto duration = av_dict_get(stream.metadata, "DURATION", nullptr, 0)) {
+ const auto time = QTime::fromString(QString::fromUtf8(duration->value));
+ return qint64(1000) * time.msecsSinceStartOfDay();
+ }
+
+ return {};
+}
+
+static int streamOrientation(const AVStream *stream)
+{
+ Q_ASSERT(stream);
+
+ using SideDataSize = decltype(AVPacketSideData::size);
+ constexpr SideDataSize displayMatrixSize = sizeof(int32_t) * 9;
+ const auto *sideData = streamSideData(stream, AV_PKT_DATA_DISPLAYMATRIX);
+ if (!sideData || sideData->size < displayMatrixSize)
+ return 0;
+
+ auto displayMatrix = reinterpret_cast<const int32_t *>(sideData->data);
+ auto rotation = static_cast<int>(std::round(av_display_rotation_get(displayMatrix)));
+ // Convert counterclockwise rotation angle to clockwise, restricted to 0, 90, 180 and 270
+ if (rotation % 90 != 0)
+ return 0;
+ return rotation < 0 ? -rotation % 360 : -rotation % 360 + 360;
+}
+
+
+static bool colorTransferSupportsHdr(const AVStream *stream)
+{
+ if (!stream)
+ return false;
+
+ const AVCodecParameters *codecPar = stream->codecpar;
+ if (!codecPar)
+ return false;
+
+ const QVideoFrameFormat::ColorTransfer colorTransfer = fromAvColorTransfer(codecPar->color_trc);
+
+ // Assume that content is using HDR if the color transfer supports high
+ // dynamic range. The video may still not utilize the extended range,
+ // but we can't determine the actual range without decoding frames.
+ return colorTransfer == QVideoFrameFormat::ColorTransfer_ST2084
+ || colorTransfer == QVideoFrameFormat::ColorTransfer_STD_B67;
+}
+
+QtVideo::Rotation MediaDataHolder::rotation() const
+{
+ int orientation = m_metaData.value(QMediaMetaData::Orientation).toInt();
+ return static_cast<QtVideo::Rotation>(orientation);
+}
+
+AVFormatContext *MediaDataHolder::avContext()
+{
+ return m_context.get();
+}
+
+int MediaDataHolder::currentStreamIndex(QPlatformMediaPlayer::TrackType trackType) const
+{
+ return m_currentAVStreamIndex[trackType];
+}
+
+static void insertMediaData(QMediaMetaData &metaData, QPlatformMediaPlayer::TrackType trackType,
+ const AVStream *stream)
+{
+ Q_ASSERT(stream);
+ const auto *codecPar = stream->codecpar;
+
+ switch (trackType) {
+ case QPlatformMediaPlayer::VideoStream:
+ metaData.insert(QMediaMetaData::VideoBitRate, (int)codecPar->bit_rate);
+ metaData.insert(QMediaMetaData::VideoCodec,
+ QVariant::fromValue(QFFmpegMediaFormatInfo::videoCodecForAVCodecId(
+ codecPar->codec_id)));
+ metaData.insert(QMediaMetaData::Resolution, QSize(codecPar->width, codecPar->height));
+ metaData.insert(QMediaMetaData::VideoFrameRate,
+ qreal(stream->avg_frame_rate.num) / qreal(stream->avg_frame_rate.den));
+ metaData.insert(QMediaMetaData::Orientation, QVariant::fromValue(streamOrientation(stream)));
+ metaData.insert(QMediaMetaData::HasHdrContent, colorTransferSupportsHdr(stream));
+ break;
+ case QPlatformMediaPlayer::AudioStream:
+ metaData.insert(QMediaMetaData::AudioBitRate, (int)codecPar->bit_rate);
+ metaData.insert(QMediaMetaData::AudioCodec,
+ QVariant::fromValue(QFFmpegMediaFormatInfo::audioCodecForAVCodecId(
+ codecPar->codec_id)));
+ break;
+ default:
+ break;
+ }
+};
+
+QPlatformMediaPlayer::TrackType MediaDataHolder::trackTypeFromMediaType(int mediaType)
+{
+ switch (mediaType) {
+ case AVMEDIA_TYPE_AUDIO:
+ return QPlatformMediaPlayer::AudioStream;
+ case AVMEDIA_TYPE_VIDEO:
+ return QPlatformMediaPlayer::VideoStream;
+ case AVMEDIA_TYPE_SUBTITLE:
+ return QPlatformMediaPlayer::SubtitleStream;
+ default:
+ return QPlatformMediaPlayer::NTrackTypes;
+ }
+}
+
+namespace {
+QMaybe<AVFormatContextUPtr, MediaDataHolder::ContextError>
+loadMedia(const QUrl &mediaUrl, QIODevice *stream, const std::shared_ptr<ICancelToken> &cancelToken)
+{
+ const QByteArray url = mediaUrl.toString(QUrl::PreferLocalFile).toUtf8();
+
+ AVFormatContextUPtr context{ avformat_alloc_context() };
+
+ if (stream) {
+ if (!stream->isOpen()) {
+ if (!stream->open(QIODevice::ReadOnly))
+ return MediaDataHolder::ContextError{
+ QMediaPlayer::ResourceError, QLatin1String("Could not open source device.")
+ };
+ }
+ if (!stream->isSequential())
+ stream->seek(0);
+
+ constexpr int bufferSize = 32768;
+ unsigned char *buffer = (unsigned char *)av_malloc(bufferSize);
+ context->pb = avio_alloc_context(buffer, bufferSize, false, stream, &readQIODevice, nullptr,
+ &seekQIODevice);
+ }
+
+ AVDictionaryHolder dict;
+ constexpr auto NetworkTimeoutUs = "5000000";
+ av_dict_set(dict, "timeout", NetworkTimeoutUs, 0);
+
+ const QByteArray protocolWhitelist = qgetenv("QT_FFMPEG_PROTOCOL_WHITELIST");
+ if (!protocolWhitelist.isNull())
+ av_dict_set(dict, "protocol_whitelist", protocolWhitelist.data(), 0);
+
+ context->interrupt_callback.opaque = cancelToken.get();
+ context->interrupt_callback.callback = [](void *opaque) {
+ const auto *cancelToken = static_cast<const ICancelToken *>(opaque);
+ if (cancelToken && cancelToken->isCancelled())
+ return 1;
+ return 0;
+ };
+
+ int ret = 0;
+ {
+ AVFormatContext *contextRaw = context.release();
+ ret = avformat_open_input(&contextRaw, url.constData(), nullptr, dict);
+ context.reset(contextRaw);
+ }
+
+ if (ret < 0) {
+ auto code = QMediaPlayer::ResourceError;
+ if (ret == AVERROR(EACCES))
+ code = QMediaPlayer::AccessDeniedError;
+ else if (ret == AVERROR(EINVAL))
+ code = QMediaPlayer::FormatError;
+
+ return MediaDataHolder::ContextError{ code, QMediaPlayer::tr("Could not open file") };
+ }
+
+ ret = avformat_find_stream_info(context.get(), nullptr);
+ if (ret < 0) {
+ return MediaDataHolder::ContextError{
+ QMediaPlayer::FormatError,
+ QMediaPlayer::tr("Could not find stream information for media file")
+ };
+ }
+
+#ifndef QT_NO_DEBUG
+ av_dump_format(context.get(), 0, url.constData(), 0);
+#endif
+ return context;
+}
+
+} // namespace
+
+MediaDataHolder::Maybe MediaDataHolder::create(const QUrl &url, QIODevice *stream,
+ const std::shared_ptr<ICancelToken> &cancelToken)
+{
+ QMaybe context = loadMedia(url, stream, cancelToken);
+ if (context) {
+ // MediaDataHolder is wrapped in a shared pointer to interop with signal/slot mechanism
+ return QSharedPointer<MediaDataHolder>{ new MediaDataHolder{ std::move(context.value()), cancelToken } };
+ }
+ return context.error();
+}
+
+MediaDataHolder::MediaDataHolder(AVFormatContextUPtr context,
+ const std::shared_ptr<ICancelToken> &cancelToken)
+ : m_cancelToken{ cancelToken }
+{
+ Q_ASSERT(context);
+
+ m_context = std::move(context);
+ m_isSeekable = !(m_context->ctx_flags & AVFMTCTX_UNSEEKABLE);
+
+ for (unsigned int i = 0; i < m_context->nb_streams; ++i) {
+
+ const auto *stream = m_context->streams[i];
+ const auto trackType = trackTypeFromMediaType(stream->codecpar->codec_type);
+
+ if (trackType == QPlatformMediaPlayer::NTrackTypes)
+ continue;
+
+ if (stream->disposition & AV_DISPOSITION_ATTACHED_PIC)
+ continue; // Ignore attached picture streams because we treat them as metadata
+
+ auto metaData = QFFmpegMetaData::fromAVMetaData(stream->metadata);
+ const bool isDefault = stream->disposition & AV_DISPOSITION_DEFAULT;
+
+ if (trackType != QPlatformMediaPlayer::SubtitleStream) {
+ insertMediaData(metaData, trackType, stream);
+
+ if (isDefault && m_requestedStreams[trackType] < 0)
+ m_requestedStreams[trackType] = m_streamMap[trackType].size();
+ }
+
+ if (auto duration = streamDuration(*stream)) {
+ m_duration = qMax(m_duration, *duration);
+ metaData.insert(QMediaMetaData::Duration, *duration / qint64(1000));
+ }
+
+ m_streamMap[trackType].append({ (int)i, isDefault, metaData });
+ }
+
+ // With some media files, streams may be lacking duration info. Let's
+ // get it from ffmpeg's duration estimation instead.
+ if (m_duration == 0 && m_context->duration > 0ll) {
+ m_duration = m_context->duration;
+ }
+
+ for (auto trackType :
+ { QPlatformMediaPlayer::VideoStream, QPlatformMediaPlayer::AudioStream }) {
+ auto &requestedStream = m_requestedStreams[trackType];
+ auto &streamMap = m_streamMap[trackType];
+
+ if (requestedStream < 0 && !streamMap.empty())
+ requestedStream = 0;
+
+ if (requestedStream >= 0)
+ m_currentAVStreamIndex[trackType] = streamMap[requestedStream].avStreamIndex;
+ }
+
+ updateMetaData();
+}
+
+namespace {
+
+/*!
+ \internal
+
+ Attempt to find an attached picture from the context's streams.
+ This will find ID3v2 pictures on audio files, and also pictures
+ attached to videos.
+ */
+QImage getAttachedPicture(const AVFormatContext *context)
+{
+ if (!context)
+ return {};
+
+ for (unsigned int i = 0; i < context->nb_streams; ++i) {
+ const AVStream* stream = context->streams[i];
+ if (!stream || !(stream->disposition & AV_DISPOSITION_ATTACHED_PIC))
+ continue;
+
+ const AVPacket *compressedImage = &stream->attached_pic;
+ if (!compressedImage || !compressedImage->data || compressedImage->size <= 0)
+ continue;
+
+ // Feed raw compressed data to QImage::fromData, which will decompress it
+ // if it is a recognized format.
+ QImage image = QImage::fromData({ compressedImage->data, compressedImage->size });
+ if (!image.isNull())
+ return image;
+ }
+
+ return {};
+}
+
+}
+
+void MediaDataHolder::updateMetaData()
+{
+ m_metaData = {};
+
+ if (!m_context)
+ return;
+
+ m_metaData = QFFmpegMetaData::fromAVMetaData(m_context->metadata);
+ m_metaData.insert(QMediaMetaData::FileFormat,
+ QVariant::fromValue(QFFmpegMediaFormatInfo::fileFormatForAVInputFormat(
+ m_context->iformat)));
+ m_metaData.insert(QMediaMetaData::Duration, m_duration / qint64(1000));
+
+ if (!m_cachedThumbnail.has_value())
+ m_cachedThumbnail = getAttachedPicture(m_context.get());
+
+ if (!m_cachedThumbnail->isNull())
+ m_metaData.insert(QMediaMetaData::ThumbnailImage, m_cachedThumbnail.value());
+
+ for (auto trackType :
+ { QPlatformMediaPlayer::AudioStream, QPlatformMediaPlayer::VideoStream }) {
+ const auto streamIndex = m_currentAVStreamIndex[trackType];
+ if (streamIndex >= 0)
+ insertMediaData(m_metaData, trackType, m_context->streams[streamIndex]);
+ }
+}
+
+bool MediaDataHolder::setActiveTrack(QPlatformMediaPlayer::TrackType type, int streamNumber)
+{
+ if (!m_context)
+ return false;
+
+ if (streamNumber < 0 || streamNumber >= m_streamMap[type].size())
+ streamNumber = -1;
+ if (m_requestedStreams[type] == streamNumber)
+ return false;
+ m_requestedStreams[type] = streamNumber;
+ const int avStreamIndex = m_streamMap[type].value(streamNumber).avStreamIndex;
+
+ const int oldIndex = m_currentAVStreamIndex[type];
+ qCDebug(qLcMediaDataHolder) << ">>>>> change track" << type << "from" << oldIndex << "to"
+ << avStreamIndex;
+
+ // TODO: maybe add additional verifications
+ m_currentAVStreamIndex[type] = avStreamIndex;
+
+ updateMetaData();
+
+ return true;
+}
+
+int MediaDataHolder::activeTrack(QPlatformMediaPlayer::TrackType type) const
+{
+ return type < QPlatformMediaPlayer::NTrackTypes ? m_requestedStreams[type] : -1;
+}
+
+const QList<MediaDataHolder::StreamInfo> &MediaDataHolder::streamInfo(
+ QPlatformMediaPlayer::TrackType trackType) const
+{
+ Q_ASSERT(trackType < QPlatformMediaPlayer::NTrackTypes);
+
+ return m_streamMap[trackType];
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder_p.h
new file mode 100644
index 000000000..a55b0766a
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegmediadataholder_p.h
@@ -0,0 +1,107 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGMEDIADATAHOLDER_P_H
+#define QFFMPEGMEDIADATAHOLDER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qmediametadata.h"
+#include "private/qplatformmediaplayer_p.h"
+#include "qffmpeg_p.h"
+#include "qvideoframe.h"
+#include <private/qmultimediautils_p.h>
+
+#include <array>
+#include <optional>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+struct ICancelToken
+{
+ virtual ~ICancelToken() = default;
+ virtual bool isCancelled() const = 0;
+};
+
+using AVFormatContextUPtr = std::unique_ptr<AVFormatContext, AVDeleter<decltype(&avformat_close_input), &avformat_close_input>>;
+
+class MediaDataHolder
+{
+public:
+ struct StreamInfo
+ {
+ int avStreamIndex = -1;
+ bool isDefault = false;
+ QMediaMetaData metaData;
+ };
+
+ struct ContextError
+ {
+ int code = 0;
+ QString description;
+ };
+
+ using StreamsMap = std::array<QList<StreamInfo>, QPlatformMediaPlayer::NTrackTypes>;
+ using StreamIndexes = std::array<int, QPlatformMediaPlayer::NTrackTypes>;
+
+ MediaDataHolder() = default;
+ MediaDataHolder(AVFormatContextUPtr context, const std::shared_ptr<ICancelToken> &cancelToken);
+
+ static QPlatformMediaPlayer::TrackType trackTypeFromMediaType(int mediaType);
+
+ int activeTrack(QPlatformMediaPlayer::TrackType type) const;
+
+ const QList<StreamInfo> &streamInfo(QPlatformMediaPlayer::TrackType trackType) const;
+
+ qint64 duration() const { return m_duration; }
+
+ const QMediaMetaData &metaData() const { return m_metaData; }
+
+ bool isSeekable() const { return m_isSeekable; }
+
+ QtVideo::Rotation rotation() const;
+
+ AVFormatContext *avContext();
+
+ int currentStreamIndex(QPlatformMediaPlayer::TrackType trackType) const;
+
+ using Maybe = QMaybe<QSharedPointer<MediaDataHolder>, ContextError>;
+ static Maybe create(const QUrl &url, QIODevice *stream,
+ const std::shared_ptr<ICancelToken> &cancelToken);
+
+ bool setActiveTrack(QPlatformMediaPlayer::TrackType type, int streamNumber);
+
+private:
+ void updateMetaData();
+
+ std::shared_ptr<ICancelToken> m_cancelToken; // NOTE: Cancel token may be accessed by
+ // AVFormatContext during destruction and
+ // must outlive the context object
+ AVFormatContextUPtr m_context;
+
+ bool m_isSeekable = false;
+
+ StreamIndexes m_currentAVStreamIndex = { -1, -1, -1 };
+ StreamsMap m_streamMap;
+ StreamIndexes m_requestedStreams = { -1, -1, -1 };
+ qint64 m_duration = 0;
+ QMediaMetaData m_metaData;
+ std::optional<QImage> m_cachedThumbnail;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGMEDIADATAHOLDER_P_H
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegpacket_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegpacket_p.h
new file mode 100644
index 000000000..5e15bf012
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegpacket_p.h
@@ -0,0 +1,61 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGPACKET_P_H
+#define QFFMPEGPACKET_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qffmpeg_p.h"
+#include "QtCore/qsharedpointer.h"
+#include "playbackengine/qffmpegpositionwithoffset_p.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+struct Packet
+{
+ struct Data
+ {
+ Data(const LoopOffset &offset, AVPacketUPtr p, quint64 sourceId)
+ : loopOffset(offset), packet(std::move(p)), sourceId(sourceId)
+ {
+ }
+
+ QAtomicInt ref;
+ LoopOffset loopOffset;
+ AVPacketUPtr packet;
+ quint64 sourceId;
+ };
+ Packet() = default;
+ Packet(const LoopOffset &offset, AVPacketUPtr p, quint64 sourceId)
+ : d(new Data(offset, std::move(p), sourceId))
+ {
+ }
+
+ bool isValid() const { return !!d; }
+ AVPacket *avPacket() const { return d->packet.get(); }
+ const LoopOffset &loopOffset() const { return d->loopOffset; }
+ quint64 sourceId() const { return d->sourceId; }
+
+private:
+ QExplicitlySharedDataPointer<Data> d;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+Q_DECLARE_METATYPE(QFFmpeg::Packet)
+
+#endif // QFFMPEGPACKET_P_H
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegplaybackenginedefs_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegplaybackenginedefs_p.h
new file mode 100644
index 000000000..18254ef64
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegplaybackenginedefs_p.h
@@ -0,0 +1,46 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGPLAYBACKENGINEDEFS_P_H
+#define QFFMPEGPLAYBACKENGINEDEFS_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+#include "qobject.h"
+#include "qpointer.h"
+
+#include <memory>
+#include <array>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+class PlaybackEngine;
+}
+
+namespace QFFmpeg {
+
+using StreamIndexes = std::array<int, 3>;
+
+class PlaybackEngineObjectsController;
+class PlaybackEngineObject;
+class Demuxer;
+class StreamDecoder;
+class Renderer;
+class SubtitleRenderer;
+class AudioRenderer;
+class VideoRenderer;
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGPLAYBACKENGINEDEFS_P_H
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegplaybackengineobject.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegplaybackengineobject.cpp
new file mode 100644
index 000000000..2d23802de
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegplaybackengineobject.cpp
@@ -0,0 +1,109 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "playbackengine/qffmpegplaybackengineobject_p.h"
+
+#include "qtimer.h"
+#include "qdebug.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+static QAtomicInteger<PlaybackEngineObject::Id> PersistentId = 0;
+
+PlaybackEngineObject::PlaybackEngineObject() : m_id(PersistentId.fetchAndAddRelaxed(1)) { }
+
+PlaybackEngineObject::~PlaybackEngineObject()
+{
+ if (thread() != QThread::currentThread())
+ qWarning() << "The playback engine object is being removed in an unexpected thread";
+}
+
+bool PlaybackEngineObject::isPaused() const
+{
+ return m_paused;
+}
+
+void PlaybackEngineObject::setAtEnd(bool isAtEnd)
+{
+ if (m_atEnd.testAndSetRelease(!isAtEnd, isAtEnd) && isAtEnd)
+ emit atEnd();
+}
+
+bool PlaybackEngineObject::isAtEnd() const
+{
+ return m_atEnd;
+}
+
+PlaybackEngineObject::Id PlaybackEngineObject::id() const
+{
+ return m_id;
+}
+
+void PlaybackEngineObject::setPaused(bool isPaused)
+{
+ if (m_paused.testAndSetRelease(!isPaused, isPaused))
+ QMetaObject::invokeMethod(this, &PlaybackEngineObject::onPauseChanged);
+}
+
+void PlaybackEngineObject::kill()
+{
+ m_deleting.storeRelease(true);
+
+ disconnect();
+ deleteLater();
+}
+
+bool PlaybackEngineObject::canDoNextStep() const
+{
+ return !m_paused;
+}
+
+QTimer &PlaybackEngineObject::timer()
+{
+ if (!m_timer) {
+ m_timer = std::make_unique<QTimer>();
+ m_timer->setTimerType(Qt::PreciseTimer);
+ m_timer->setSingleShot(true);
+ connect(m_timer.get(), &QTimer::timeout, this, &PlaybackEngineObject::onTimeout);
+ }
+
+ return *m_timer;
+}
+
+void PlaybackEngineObject::onTimeout()
+{
+ if (!m_deleting && canDoNextStep())
+ doNextStep();
+}
+
+int PlaybackEngineObject::timerInterval() const
+{
+ return 0;
+}
+
+void PlaybackEngineObject::onPauseChanged()
+{
+ scheduleNextStep();
+}
+
+void PlaybackEngineObject::scheduleNextStep(bool allowDoImmediatelly)
+{
+ if (!m_deleting && canDoNextStep()) {
+ const auto interval = timerInterval();
+ if (interval == 0 && allowDoImmediatelly) {
+ timer().stop();
+ doNextStep();
+ } else {
+ timer().start(interval);
+ }
+ } else {
+ timer().stop();
+ }
+}
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegplaybackengineobject_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegplaybackengineobject_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegplaybackengineobject_p.h
new file mode 100644
index 000000000..02943a55b
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegplaybackengineobject_p.h
@@ -0,0 +1,84 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGPLAYBACKENGINEOBJECT_P_H
+#define QFFMPEGPLAYBACKENGINEOBJECT_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "playbackengine/qffmpegplaybackenginedefs_p.h"
+#include "qthread.h"
+#include "qatomic.h"
+
+QT_BEGIN_NAMESPACE
+
+class QTimer;
+
+namespace QFFmpeg {
+
+class PlaybackEngineObject : public QObject
+{
+ Q_OBJECT
+public:
+ using TimePoint = std::chrono::steady_clock::time_point;
+ using TimePointOpt = std::optional<TimePoint>;
+ using Id = quint64;
+
+ PlaybackEngineObject();
+
+ ~PlaybackEngineObject();
+
+ bool isPaused() const;
+
+ bool isAtEnd() const;
+
+ void kill();
+
+ void setPaused(bool isPaused);
+
+ Id id() const;
+
+signals:
+ void atEnd();
+
+ void error(int code, const QString &errorString);
+
+protected:
+ QTimer &timer();
+
+ void scheduleNextStep(bool allowDoImmediatelly = true);
+
+ virtual void onPauseChanged();
+
+ virtual bool canDoNextStep() const;
+
+ virtual int timerInterval() const;
+
+ void setAtEnd(bool isAtEnd);
+
+ virtual void doNextStep() { }
+
+private slots:
+ void onTimeout();
+
+private:
+ std::unique_ptr<QTimer> m_timer;
+
+ QAtomicInteger<bool> m_paused = true;
+ QAtomicInteger<bool> m_atEnd = false;
+ QAtomicInteger<bool> m_deleting = false;
+ const Id m_id;
+};
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGPLAYBACKENGINEOBJECT_P_H
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegpositionwithoffset_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegpositionwithoffset_p.h
new file mode 100644
index 000000000..a30fdc119
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegpositionwithoffset_p.h
@@ -0,0 +1,40 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef qffmpegpositionwithoffset_p_H
+#define qffmpegpositionwithoffset_p_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qtypes.h>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+struct LoopOffset
+{
+ qint64 pos = 0;
+ int index = 0;
+};
+
+struct PositionWithOffset
+{
+ qint64 pos = 0;
+ LoopOffset offset;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // qffmpegpositionwithoffset_p_H
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegrenderer.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegrenderer.cpp
new file mode 100644
index 000000000..e763c786b
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegrenderer.cpp
@@ -0,0 +1,216 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "playbackengine/qffmpegrenderer_p.h"
+#include <qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+static Q_LOGGING_CATEGORY(qLcRenderer, "qt.multimedia.ffmpeg.renderer");
+
+Renderer::Renderer(const TimeController &tc, const std::chrono::microseconds &seekPosTimeOffset)
+ : m_timeController(tc),
+ m_lastFrameEnd(tc.currentPosition()),
+ m_lastPosition(m_lastFrameEnd),
+ m_seekPos(tc.currentPosition(-seekPosTimeOffset))
+{
+}
+
+void Renderer::syncSoft(TimePoint tp, qint64 trackTime)
+{
+ QMetaObject::invokeMethod(this, [this, tp, trackTime]() {
+ m_timeController.syncSoft(tp, trackTime);
+ scheduleNextStep(true);
+ });
+}
+
+qint64 Renderer::seekPosition() const
+{
+ return m_seekPos;
+}
+
+qint64 Renderer::lastPosition() const
+{
+ return m_lastPosition;
+}
+
+void Renderer::setPlaybackRate(float rate)
+{
+ QMetaObject::invokeMethod(this, [this, rate]() {
+ m_timeController.setPlaybackRate(rate);
+ onPlaybackRateChanged();
+ scheduleNextStep();
+ });
+}
+
+void Renderer::doForceStep()
+{
+ if (m_isStepForced.testAndSetOrdered(false, true))
+ QMetaObject::invokeMethod(this, [this]() {
+ // maybe set m_forceStepMaxPos
+
+ if (isAtEnd()) {
+ setForceStepDone();
+ }
+ else {
+ m_explicitNextFrameTime = Clock::now();
+ scheduleNextStep();
+ }
+ });
+}
+
+bool Renderer::isStepForced() const
+{
+ return m_isStepForced;
+}
+
+void Renderer::setInitialPosition(TimePoint tp, qint64 trackPos)
+{
+ QMetaObject::invokeMethod(this, [this, tp, trackPos]() {
+ Q_ASSERT(m_loopIndex == 0);
+ Q_ASSERT(m_frames.empty());
+
+ m_loopIndex = 0;
+ m_lastPosition.storeRelease(trackPos);
+ m_seekPos.storeRelease(trackPos);
+
+ m_timeController.sync(tp, trackPos);
+ });
+}
+
+void Renderer::onFinalFrameReceived()
+{
+ render({});
+}
+
+void Renderer::render(Frame frame)
+{
+ const auto isFrameOutdated = frame.isValid() && frame.absoluteEnd() < seekPosition();
+
+ if (isFrameOutdated) {
+ qCDebug(qLcRenderer) << "frame outdated! absEnd:" << frame.absoluteEnd() << "absPts"
+ << frame.absolutePts() << "seekPos:" << seekPosition();
+ emit frameProcessed(frame);
+ return;
+ }
+
+ m_frames.enqueue(frame);
+
+ if (m_frames.size() == 1)
+ scheduleNextStep();
+}
+
+void Renderer::onPauseChanged()
+{
+ m_timeController.setPaused(isPaused());
+ PlaybackEngineObject::onPauseChanged();
+}
+
+bool Renderer::canDoNextStep() const
+{
+ return !m_frames.empty() && (m_isStepForced || PlaybackEngineObject::canDoNextStep());
+}
+
+float Renderer::playbackRate() const
+{
+ return m_timeController.playbackRate();
+}
+
+int Renderer::timerInterval() const
+{
+ if (m_frames.empty())
+ return 0;
+
+ auto calculateInterval = [](const TimePoint &nextTime) {
+ using namespace std::chrono;
+
+ const auto delay = nextTime - Clock::now();
+ return std::max(0, static_cast<int>(duration_cast<milliseconds>(delay).count()));
+ };
+
+ if (m_explicitNextFrameTime)
+ return calculateInterval(*m_explicitNextFrameTime);
+
+ if (m_frames.front().isValid())
+ return calculateInterval(m_timeController.timeFromPosition(m_frames.front().absolutePts()));
+
+ if (m_lastFrameEnd > 0)
+ return calculateInterval(m_timeController.timeFromPosition(m_lastFrameEnd));
+
+ return 0;
+}
+
+bool Renderer::setForceStepDone()
+{
+ if (!m_isStepForced.testAndSetOrdered(true, false))
+ return false;
+
+ m_explicitNextFrameTime.reset();
+ emit forceStepDone();
+ return true;
+}
+
+void Renderer::doNextStep()
+{
+ auto frame = m_frames.front();
+
+ if (setForceStepDone()) {
+ // if (frame.isValid() && frame.pts() > m_forceStepMaxPos) {
+ // scheduleNextStep(false);
+ // return;
+ // }
+ }
+
+ const auto result = renderInternal(frame);
+
+ if (result.done) {
+ m_explicitNextFrameTime.reset();
+ m_frames.dequeue();
+
+ if (frame.isValid()) {
+ m_lastPosition.storeRelease(std::max(frame.absolutePts(), lastPosition()));
+
+ // TODO: get rid of m_lastFrameEnd or m_seekPos
+ m_lastFrameEnd = frame.absoluteEnd();
+ m_seekPos.storeRelaxed(m_lastFrameEnd);
+
+ const auto loopIndex = frame.loopOffset().index;
+ if (m_loopIndex < loopIndex) {
+ m_loopIndex = loopIndex;
+ emit loopChanged(id(), frame.loopOffset().pos, m_loopIndex);
+ }
+
+ emit frameProcessed(frame);
+ } else {
+ m_lastPosition.storeRelease(std::max(m_lastFrameEnd, lastPosition()));
+ }
+ } else {
+ m_explicitNextFrameTime = Clock::now() + result.recheckInterval;
+ }
+
+ setAtEnd(result.done && !frame.isValid());
+
+ scheduleNextStep(false);
+}
+
+std::chrono::microseconds Renderer::frameDelay(const Frame &frame, TimePoint timePoint) const
+{
+ return std::chrono::duration_cast<std::chrono::microseconds>(
+ timePoint - m_timeController.timeFromPosition(frame.absolutePts()));
+}
+
+void Renderer::changeRendererTime(std::chrono::microseconds offset)
+{
+ const auto now = Clock::now();
+ const auto pos = m_timeController.positionFromTime(now);
+ m_timeController.sync(now + offset, pos);
+ emit synchronized(id(), now + offset, pos);
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegrenderer_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegrenderer_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegrenderer_p.h
new file mode 100644
index 000000000..99c5ef1b1
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegrenderer_p.h
@@ -0,0 +1,125 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGRENDERER_P_H
+#define QFFMPEGRENDERER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "playbackengine/qffmpegplaybackengineobject_p.h"
+#include "playbackengine/qffmpegtimecontroller_p.h"
+#include "playbackengine/qffmpegframe_p.h"
+
+#include <QtCore/qpointer.h>
+
+#include <chrono>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+class Renderer : public PlaybackEngineObject
+{
+ Q_OBJECT
+public:
+ using TimePoint = TimeController::TimePoint;
+ using Clock = TimeController::Clock;
+ Renderer(const TimeController &tc, const std::chrono::microseconds &seekPosTimeOffset = {});
+
+ void syncSoft(TimePoint tp, qint64 trackPos);
+
+ qint64 seekPosition() const;
+
+ qint64 lastPosition() const;
+
+ void setPlaybackRate(float rate);
+
+ void doForceStep();
+
+ bool isStepForced() const;
+
+public slots:
+ void setInitialPosition(TimePoint tp, qint64 trackPos);
+
+ void onFinalFrameReceived();
+
+ void render(Frame);
+
+signals:
+ void frameProcessed(Frame);
+
+ void synchronized(Id id, TimePoint tp, qint64 pos);
+
+ void forceStepDone();
+
+ void loopChanged(Id id, qint64 offset, int index);
+
+protected:
+ bool setForceStepDone();
+
+ void onPauseChanged() override;
+
+ bool canDoNextStep() const override;
+
+ int timerInterval() const override;
+
+ virtual void onPlaybackRateChanged() { }
+
+ struct RenderingResult
+ {
+ bool done = true;
+ std::chrono::microseconds recheckInterval = std::chrono::microseconds(0);
+ };
+
+ virtual RenderingResult renderInternal(Frame frame) = 0;
+
+ float playbackRate() const;
+
+ std::chrono::microseconds frameDelay(const Frame &frame,
+ TimePoint timePoint = Clock::now()) const;
+
+ void changeRendererTime(std::chrono::microseconds offset);
+
+ template<typename Output, typename ChangeHandler>
+ void setOutputInternal(QPointer<Output> &actual, Output *desired, ChangeHandler &&changeHandler)
+ {
+ const auto connectionType = thread() == QThread::currentThread()
+ ? Qt::AutoConnection
+ : Qt::BlockingQueuedConnection;
+ auto doer = [desired, changeHandler, &actual]() {
+ const auto prev = std::exchange(actual, desired);
+ if (prev != desired)
+ changeHandler(prev);
+ };
+ QMetaObject::invokeMethod(this, doer, connectionType);
+ }
+
+private:
+ void doNextStep() override;
+
+private:
+ TimeController m_timeController;
+ qint64 m_lastFrameEnd = 0;
+ QAtomicInteger<qint64> m_lastPosition = 0;
+ QAtomicInteger<qint64> m_seekPos = 0;
+
+ int m_loopIndex = 0;
+ QQueue<Frame> m_frames;
+
+ QAtomicInteger<bool> m_isStepForced = false;
+ std::optional<TimePoint> m_explicitNextFrameTime;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGRENDERER_P_H
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegstreamdecoder.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegstreamdecoder.cpp
new file mode 100644
index 000000000..2f40c53aa
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegstreamdecoder.cpp
@@ -0,0 +1,240 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "playbackengine/qffmpegstreamdecoder_p.h"
+#include "playbackengine/qffmpegmediadataholder_p.h"
+#include <qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcStreamDecoder, "qt.multimedia.ffmpeg.streamdecoder");
+
+namespace QFFmpeg {
+
+StreamDecoder::StreamDecoder(const Codec &codec, qint64 absSeekPos)
+ : m_codec(codec),
+ m_absSeekPos(absSeekPos),
+ m_trackType(MediaDataHolder::trackTypeFromMediaType(codec.context()->codec_type))
+{
+ qCDebug(qLcStreamDecoder) << "Create stream decoder, trackType" << m_trackType
+ << "absSeekPos:" << absSeekPos;
+ Q_ASSERT(m_trackType != QPlatformMediaPlayer::NTrackTypes);
+}
+
+StreamDecoder::~StreamDecoder()
+{
+ avcodec_flush_buffers(m_codec.context());
+}
+
+void StreamDecoder::onFinalPacketReceived()
+{
+ decode({});
+}
+
+void StreamDecoder::setInitialPosition(TimePoint, qint64 trackPos)
+{
+ m_absSeekPos = trackPos;
+}
+
+void StreamDecoder::decode(Packet packet)
+{
+ m_packets.enqueue(packet);
+
+ scheduleNextStep();
+}
+
+void StreamDecoder::doNextStep()
+{
+ auto packet = m_packets.dequeue();
+
+ auto decodePacket = [this](Packet packet) {
+ if (trackType() == QPlatformMediaPlayer::SubtitleStream)
+ decodeSubtitle(packet);
+ else
+ decodeMedia(packet);
+ };
+
+ if (packet.isValid() && packet.loopOffset().index != m_offset.index) {
+ decodePacket({});
+
+ qCDebug(qLcStreamDecoder) << "flush buffers due to new loop:" << packet.loopOffset().index;
+
+ avcodec_flush_buffers(m_codec.context());
+ m_offset = packet.loopOffset();
+ }
+
+ decodePacket(packet);
+
+ setAtEnd(!packet.isValid());
+
+ if (packet.isValid())
+ emit packetProcessed(packet);
+
+ scheduleNextStep(false);
+}
+
+QPlatformMediaPlayer::TrackType StreamDecoder::trackType() const
+{
+ return m_trackType;
+}
+
+qint32 StreamDecoder::maxQueueSize(QPlatformMediaPlayer::TrackType type)
+{
+ switch (type) {
+
+ case QPlatformMediaPlayer::VideoStream:
+ return 3;
+ case QPlatformMediaPlayer::AudioStream:
+ return 9;
+ case QPlatformMediaPlayer::SubtitleStream:
+ return 6; /*main packet and closing packet*/
+ default:
+ Q_UNREACHABLE_RETURN(-1);
+ }
+}
+
+void StreamDecoder::onFrameProcessed(Frame frame)
+{
+ if (frame.sourceId() != id())
+ return;
+
+ --m_pendingFramesCount;
+ Q_ASSERT(m_pendingFramesCount >= 0);
+
+ scheduleNextStep();
+}
+
+bool StreamDecoder::canDoNextStep() const
+{
+ const qint32 maxCount = maxQueueSize(m_trackType);
+
+ return !m_packets.empty() && m_pendingFramesCount < maxCount
+ && PlaybackEngineObject::canDoNextStep();
+}
+
+void StreamDecoder::onFrameFound(Frame frame)
+{
+ if (frame.isValid() && frame.absoluteEnd() < m_absSeekPos)
+ return;
+
+ Q_ASSERT(m_pendingFramesCount >= 0);
+ ++m_pendingFramesCount;
+ emit requestHandleFrame(frame);
+}
+
+void StreamDecoder::decodeMedia(Packet packet)
+{
+ auto sendPacketResult = sendAVPacket(packet);
+
+ if (sendPacketResult == AVERROR(EAGAIN)) {
+ // Doc says:
+ // AVERROR(EAGAIN): input is not accepted in the current state - user
+ // must read output with avcodec_receive_frame() (once
+ // all output is read, the packet should be resent, and
+ // the call will not fail with EAGAIN).
+ receiveAVFrames();
+ sendPacketResult = sendAVPacket(packet);
+
+ if (sendPacketResult != AVERROR(EAGAIN))
+ qWarning() << "Unexpected FFmpeg behavior";
+ }
+
+ if (sendPacketResult == 0)
+ receiveAVFrames();
+}
+
+int StreamDecoder::sendAVPacket(Packet packet)
+{
+ return avcodec_send_packet(m_codec.context(), packet.isValid() ? packet.avPacket() : nullptr);
+}
+
+void StreamDecoder::receiveAVFrames()
+{
+ while (true) {
+ auto avFrame = makeAVFrame();
+
+ const auto receiveFrameResult = avcodec_receive_frame(m_codec.context(), avFrame.get());
+
+ if (receiveFrameResult == AVERROR_EOF || receiveFrameResult == AVERROR(EAGAIN))
+ break;
+
+ if (receiveFrameResult < 0) {
+ emit error(QMediaPlayer::FormatError, err2str(receiveFrameResult));
+ break;
+ }
+
+ onFrameFound({ m_offset, std::move(avFrame), m_codec, 0, id() });
+ }
+}
+
+void StreamDecoder::decodeSubtitle(Packet packet)
+{
+ if (!packet.isValid())
+ return;
+ // qCDebug(qLcDecoder) << " decoding subtitle" << "has delay:" <<
+ // (codec->codec->capabilities & AV_CODEC_CAP_DELAY);
+ AVSubtitle subtitle;
+ memset(&subtitle, 0, sizeof(subtitle));
+ int gotSubtitle = 0;
+
+ const int res =
+ avcodec_decode_subtitle2(m_codec.context(), &subtitle, &gotSubtitle, packet.avPacket());
+ // qCDebug(qLcDecoder) << " subtitle got:" << res << gotSubtitle << subtitle.format <<
+ // Qt::hex << (quint64)subtitle.pts;
+ if (res < 0 || !gotSubtitle)
+ return;
+
+ // apparently the timestamps in the AVSubtitle structure are not always filled in
+ // if they are missing, use the packets pts and duration values instead
+ qint64 start, end;
+ if (subtitle.pts == AV_NOPTS_VALUE) {
+ start = m_codec.toUs(packet.avPacket()->pts);
+ end = start + m_codec.toUs(packet.avPacket()->duration);
+ } else {
+ auto pts = timeStampUs(subtitle.pts, AVRational{ 1, AV_TIME_BASE });
+ start = *pts + qint64(subtitle.start_display_time) * 1000;
+ end = *pts + qint64(subtitle.end_display_time) * 1000;
+ }
+
+ if (end <= start) {
+ qWarning() << "Invalid subtitle time";
+ return;
+ }
+ // qCDebug(qLcDecoder) << " got subtitle (" << start << "--" << end << "):";
+ QString text;
+ for (uint i = 0; i < subtitle.num_rects; ++i) {
+ const auto *r = subtitle.rects[i];
+ // qCDebug(qLcDecoder) << " subtitletext:" << r->text << "/" << r->ass;
+ if (i)
+ text += QLatin1Char('\n');
+ if (r->text)
+ text += QString::fromUtf8(r->text);
+ else {
+ const char *ass = r->ass;
+ int nCommas = 0;
+ while (*ass) {
+ if (nCommas == 8)
+ break;
+ if (*ass == ',')
+ ++nCommas;
+ ++ass;
+ }
+ text += QString::fromUtf8(ass);
+ }
+ }
+ text.replace(QLatin1String("\\N"), QLatin1String("\n"));
+ text.replace(QLatin1String("\\n"), QLatin1String("\n"));
+ text.replace(QLatin1String("\r\n"), QLatin1String("\n"));
+ if (text.endsWith(QLatin1Char('\n')))
+ text.chop(1);
+
+ onFrameFound({ m_offset, text, start, end - start, id() });
+
+ // TODO: maybe optimize
+ onFrameFound({ m_offset, QString(), end, 0, id() });
+}
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegstreamdecoder_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegstreamdecoder_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegstreamdecoder_p.h
new file mode 100644
index 000000000..1acc07983
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegstreamdecoder_p.h
@@ -0,0 +1,87 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGSTREAMDECODER_P_H
+#define QFFMPEGSTREAMDECODER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+#include "playbackengine/qffmpegplaybackengineobject_p.h"
+#include "playbackengine/qffmpegframe_p.h"
+#include "playbackengine/qffmpegpacket_p.h"
+#include "playbackengine/qffmpegpositionwithoffset_p.h"
+#include "private/qplatformmediaplayer_p.h"
+
+#include <optional>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+class StreamDecoder : public PlaybackEngineObject
+{
+ Q_OBJECT
+public:
+ StreamDecoder(const Codec &codec, qint64 absSeekPos);
+
+ ~StreamDecoder();
+
+ QPlatformMediaPlayer::TrackType trackType() const;
+
+ // Maximum number of frames that we are allowed to keep in render queue
+ static qint32 maxQueueSize(QPlatformMediaPlayer::TrackType type);
+
+public slots:
+ void setInitialPosition(TimePoint tp, qint64 trackPos);
+
+ void decode(Packet);
+
+ void onFinalPacketReceived();
+
+ void onFrameProcessed(Frame frame);
+
+signals:
+ void requestHandleFrame(Frame frame);
+
+ void packetProcessed(Packet);
+
+protected:
+ bool canDoNextStep() const override;
+
+ void doNextStep() override;
+
+private:
+ void decodeMedia(Packet);
+
+ void decodeSubtitle(Packet);
+
+ void onFrameFound(Frame frame);
+
+ int sendAVPacket(Packet);
+
+ void receiveAVFrames();
+
+private:
+ Codec m_codec;
+ qint64 m_absSeekPos = 0;
+ const QPlatformMediaPlayer::TrackType m_trackType;
+
+ qint32 m_pendingFramesCount = 0;
+
+ LoopOffset m_offset;
+
+ QQueue<Packet> m_packets;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGSTREAMDECODER_P_H
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegsubtitlerenderer.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegsubtitlerenderer.cpp
new file mode 100644
index 000000000..789c9b53b
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegsubtitlerenderer.cpp
@@ -0,0 +1,44 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "playbackengine/qffmpegsubtitlerenderer_p.h"
+
+#include "qvideosink.h"
+#include "qdebug.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+SubtitleRenderer::SubtitleRenderer(const TimeController &tc, QVideoSink *sink)
+ : Renderer(tc), m_sink(sink)
+{
+}
+
+void SubtitleRenderer::setOutput(QVideoSink *sink, bool cleanPrevSink)
+{
+ setOutputInternal(m_sink, sink, [cleanPrevSink](QVideoSink *prev) {
+ if (prev && cleanPrevSink)
+ prev->setSubtitleText({});
+ });
+}
+
+SubtitleRenderer::~SubtitleRenderer()
+{
+ if (m_sink)
+ m_sink->setSubtitleText({});
+}
+
+Renderer::RenderingResult SubtitleRenderer::renderInternal(Frame frame)
+{
+ if (m_sink)
+ m_sink->setSubtitleText(frame.isValid() ? frame.text() : QString());
+
+ return {};
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegsubtitlerenderer_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegsubtitlerenderer_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegsubtitlerenderer_p.h
new file mode 100644
index 000000000..805212e83
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegsubtitlerenderer_p.h
@@ -0,0 +1,48 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGSUBTITLERENDERER_P_H
+#define QFFMPEGSUBTITLERENDERER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "playbackengine/qffmpegrenderer_p.h"
+
+#include <QtCore/qpointer.h>
+
+QT_BEGIN_NAMESPACE
+
+class QVideoSink;
+
+namespace QFFmpeg {
+
+class SubtitleRenderer : public Renderer
+{
+ Q_OBJECT
+public:
+ SubtitleRenderer(const TimeController &tc, QVideoSink *sink);
+
+ ~SubtitleRenderer() override;
+
+ void setOutput(QVideoSink *sink, bool cleanPrevSink = false);
+
+protected:
+ RenderingResult renderInternal(Frame frame) override;
+
+private:
+ QPointer<QVideoSink> m_sink;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGSUBTITLERENDERER_P_H
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegtimecontroller.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegtimecontroller.cpp
new file mode 100644
index 000000000..8352384b4
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegtimecontroller.cpp
@@ -0,0 +1,165 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "playbackengine/qffmpegtimecontroller_p.h"
+
+#include "qglobal.h"
+#include "qdebug.h"
+
+#include <algorithm>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+TimeController::TimeController()
+{
+ sync();
+}
+
+TimeController::PlaybackRate TimeController::playbackRate() const
+{
+ return m_playbackRate;
+}
+
+void TimeController::setPlaybackRate(PlaybackRate playbackRate)
+{
+ if (playbackRate == m_playbackRate)
+ return;
+
+ Q_ASSERT(playbackRate > 0.f);
+
+ scrollTimeTillNow();
+ m_playbackRate = playbackRate;
+
+ if (m_softSyncData)
+ m_softSyncData = makeSoftSyncData(m_timePoint, m_position, m_softSyncData->dstTimePoint);
+}
+
+void TimeController::sync(qint64 trackPos)
+{
+ sync(Clock::now(), trackPos);
+}
+
+void TimeController::sync(const TimePoint &tp, qint64 pos)
+{
+ m_softSyncData.reset();
+ m_position = TrackTime(pos);
+ m_timePoint = tp;
+}
+
+void TimeController::syncSoft(const TimePoint &tp, qint64 pos, const Clock::duration &fixingTime)
+{
+ const auto srcTime = Clock::now();
+ const auto srcPos = positionFromTime(srcTime, true);
+ const auto dstTime = srcTime + fixingTime;
+
+ m_position = TrackTime(pos);
+ m_timePoint = tp;
+
+ m_softSyncData = makeSoftSyncData(srcTime, TrackTime(srcPos), dstTime);
+}
+
+qint64 TimeController::currentPosition(const Clock::duration &offset) const
+{
+ return positionFromTime(Clock::now() + offset);
+}
+
+void TimeController::setPaused(bool paused)
+{
+ if (m_paused == paused)
+ return;
+
+ scrollTimeTillNow();
+ m_paused = paused;
+}
+
+qint64 TimeController::positionFromTime(TimePoint tp, bool ignorePause) const
+{
+ tp = m_paused && !ignorePause ? m_timePoint : tp;
+
+ if (m_softSyncData && tp < m_softSyncData->dstTimePoint) {
+ const PlaybackRate rate =
+ tp > m_softSyncData->srcTimePoint ? m_softSyncData->internalRate : m_playbackRate;
+
+ return (m_softSyncData->srcPosition
+ + toTrackTime((tp - m_softSyncData->srcTimePoint) * rate))
+ .count();
+ }
+
+ return positionFromTimeInternal(tp).count();
+}
+
+TimeController::TimePoint TimeController::timeFromPosition(qint64 pos, bool ignorePause) const
+{
+ auto position = m_paused && !ignorePause ? m_position : TrackTime(pos);
+
+ if (m_softSyncData && position < m_softSyncData->dstPosition) {
+ const auto rate = position > m_softSyncData->srcPosition ? m_softSyncData->internalRate
+ : m_playbackRate;
+ return m_softSyncData->srcTimePoint
+ + toClockTime((position - m_softSyncData->srcPosition) / rate);
+ }
+
+ return timeFromPositionInternal(position);
+}
+
+TimeController::SoftSyncData TimeController::makeSoftSyncData(const TimePoint &srcTp,
+ const TrackTime &srcPos,
+ const TimePoint &dstTp) const
+{
+ SoftSyncData result;
+ result.srcTimePoint = srcTp;
+ result.srcPosition = srcPos;
+ result.dstTimePoint = dstTp;
+ result.srcPosOffest = srcPos - positionFromTimeInternal(srcTp);
+ result.dstPosition = positionFromTimeInternal(dstTp);
+ result.internalRate =
+ static_cast<PlaybackRate>(toClockTime(TrackTime(result.dstPosition - srcPos)).count())
+ / (dstTp - srcTp).count();
+
+ return result;
+}
+
+TimeController::TrackTime TimeController::positionFromTimeInternal(const TimePoint &tp) const
+{
+ return m_position + toTrackTime((tp - m_timePoint) * m_playbackRate);
+}
+
+TimeController::TimePoint TimeController::timeFromPositionInternal(const TrackTime &pos) const
+{
+ return m_timePoint + toClockTime(TrackTime(pos - m_position) / m_playbackRate);
+}
+
+void TimeController::scrollTimeTillNow()
+{
+ const auto now = Clock::now();
+ if (!m_paused) {
+ m_position = positionFromTimeInternal(now);
+
+ // let's forget outdated syncronizations
+ if (m_softSyncData && m_softSyncData->dstTimePoint <= now)
+ m_softSyncData.reset();
+ } else if (m_softSyncData) {
+ m_softSyncData->dstTimePoint += now - m_timePoint;
+ m_softSyncData->srcTimePoint += now - m_timePoint;
+ }
+
+ m_timePoint = now;
+}
+
+template<typename T>
+TimeController::Clock::duration TimeController::toClockTime(const T &t)
+{
+ return std::chrono::duration_cast<Clock::duration>(t);
+}
+
+template<typename T>
+TimeController::TrackTime TimeController::toTrackTime(const T &t)
+{
+ return std::chrono::duration_cast<TrackTime>(t);
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegtimecontroller_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegtimecontroller_p.h
new file mode 100644
index 000000000..93ced7e64
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegtimecontroller_p.h
@@ -0,0 +1,94 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGTIMECONTROLLER_P_H
+#define QFFMPEGTIMECONTROLLER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qglobal.h"
+
+#include <chrono>
+#include <optional>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+class TimeController
+{
+ using TrackTime = std::chrono::microseconds;
+
+public:
+ using Clock = std::chrono::steady_clock;
+ using TimePoint = Clock::time_point;
+ using PlaybackRate = float;
+
+ TimeController();
+
+ PlaybackRate playbackRate() const;
+
+ void setPlaybackRate(PlaybackRate playbackRate);
+
+ void sync(qint64 trackPos = 0);
+
+ void sync(const TimePoint &tp, qint64 pos);
+
+ void syncSoft(const TimePoint &tp, qint64 pos,
+ const Clock::duration &fixingTime = std::chrono::seconds(4));
+
+ qint64 currentPosition(const Clock::duration &offset = Clock::duration{ 0 }) const;
+
+ void setPaused(bool paused);
+
+ qint64 positionFromTime(TimePoint tp, bool ignorePause = false) const;
+
+ TimePoint timeFromPosition(qint64 pos, bool ignorePause = false) const;
+
+private:
+ struct SoftSyncData
+ {
+ TimePoint srcTimePoint;
+ TrackTime srcPosition;
+ TimePoint dstTimePoint;
+ TrackTime srcPosOffest;
+ TrackTime dstPosition;
+ PlaybackRate internalRate = 1;
+ };
+
+ SoftSyncData makeSoftSyncData(const TimePoint &srcTp, const TrackTime &srcPos,
+ const TimePoint &dstTp) const;
+
+ TrackTime positionFromTimeInternal(const TimePoint &tp) const;
+
+ TimePoint timeFromPositionInternal(const TrackTime &pos) const;
+
+ void scrollTimeTillNow();
+
+ template<typename T>
+ static Clock::duration toClockTime(const T &t);
+
+ template<typename T>
+ static TrackTime toTrackTime(const T &t);
+
+private:
+ bool m_paused = true;
+ PlaybackRate m_playbackRate = 1;
+ TrackTime m_position;
+ TimePoint m_timePoint = {};
+ std::optional<SoftSyncData> m_softSyncData;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGTIMECONTROLLER_P_H
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer.cpp b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer.cpp
new file mode 100644
index 000000000..dceb00f83
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer.cpp
@@ -0,0 +1,79 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "playbackengine/qffmpegvideorenderer_p.h"
+#include "qffmpegvideobuffer_p.h"
+#include "qvideosink.h"
+#include "private/qvideoframe_p.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+VideoRenderer::VideoRenderer(const TimeController &tc, QVideoSink *sink, QtVideo::Rotation rotation)
+ : Renderer(tc), m_sink(sink), m_rotation(rotation)
+{
+}
+
+void VideoRenderer::setOutput(QVideoSink *sink, bool cleanPrevSink)
+{
+ setOutputInternal(m_sink, sink, [cleanPrevSink](QVideoSink *prev) {
+ if (prev && cleanPrevSink)
+ prev->setVideoFrame({});
+ });
+}
+
+VideoRenderer::RenderingResult VideoRenderer::renderInternal(Frame frame)
+{
+ if (!m_sink)
+ return {};
+
+ if (!frame.isValid()) {
+ m_sink->setVideoFrame({});
+ return {};
+ }
+
+ // qCDebug(qLcVideoRenderer) << "RHI:" << accel.isNull() << accel.rhi() << sink->rhi();
+
+ const auto codec = frame.codec();
+ Q_ASSERT(codec);
+
+#ifdef Q_OS_ANDROID
+ // QTBUG-108446
+ // In general case, just creation of frames context is not correct since
+ // frames may require additional specific data for hw contexts, so
+ // just setting of hw_frames_ctx is not enough.
+ // TODO: investigate the case in order to remove or fix the code.
+ if (codec->hwAccel() && !frame.avFrame()->hw_frames_ctx) {
+ HWAccel *hwaccel = codec->hwAccel();
+ AVFrame *avframe = frame.avFrame();
+ if (!hwaccel->hwFramesContext())
+ hwaccel->createFramesContext(AVPixelFormat(avframe->format),
+ { avframe->width, avframe->height });
+
+ if (hwaccel->hwFramesContext())
+ avframe->hw_frames_ctx = av_buffer_ref(hwaccel->hwFramesContextAsBuffer());
+ }
+#endif
+
+ const auto pixelAspectRatio = codec->pixelAspectRatio(frame.avFrame());
+ auto buffer = std::make_unique<QFFmpegVideoBuffer>(frame.takeAVFrame(), pixelAspectRatio);
+ QVideoFrameFormat format(buffer->size(), buffer->pixelFormat());
+ format.setColorSpace(buffer->colorSpace());
+ format.setColorTransfer(buffer->colorTransfer());
+ format.setColorRange(buffer->colorRange());
+ format.setMaxLuminance(buffer->maxNits());
+ format.setRotation(m_rotation);
+ QVideoFrame videoFrame = QVideoFramePrivate::createFrame(std::move(buffer), format);
+ videoFrame.setStartTime(frame.pts());
+ videoFrame.setEndTime(frame.end());
+ m_sink->setVideoFrame(videoFrame);
+
+ return {};
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegvideorenderer_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer_p.h b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer_p.h
new file mode 100644
index 000000000..4866420e8
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/playbackengine/qffmpegvideorenderer_p.h
@@ -0,0 +1,47 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGVIDEORENDERER_P_H
+#define QFFMPEGVIDEORENDERER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "playbackengine/qffmpegrenderer_p.h"
+
+#include <QtCore/qpointer.h>
+
+QT_BEGIN_NAMESPACE
+
+class QVideoSink;
+
+namespace QFFmpeg {
+
+class VideoRenderer : public Renderer
+{
+ Q_OBJECT
+public:
+ VideoRenderer(const TimeController &tc, QVideoSink *sink, QtVideo::Rotation rotation);
+
+ void setOutput(QVideoSink *sink, bool cleanPrevSink = false);
+
+protected:
+ RenderingResult renderInternal(Frame frame) override;
+
+private:
+ QPointer<QVideoSink> m_sink;
+ QtVideo::Rotation m_rotation;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGVIDEORENDERER_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qandroidcamera.cpp b/src/plugins/multimedia/ffmpeg/qandroidcamera.cpp
new file mode 100644
index 000000000..56725b2bb
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qandroidcamera.cpp
@@ -0,0 +1,697 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidcamera_p.h"
+
+#include <jni.h>
+#include <QMediaFormat>
+#include <memory>
+#include <optional>
+#include <qmediadevices.h>
+#include <qguiapplication.h>
+#include <qscreen.h>
+#include <QDebug>
+#include <qloggingcategory.h>
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qpermissions.h>
+#include <QtCore/private/qandroidextras_p.h>
+#include <private/qcameradevice_p.h>
+#include <QReadWriteLock>
+#include <private/qvideoframeconverter_p.h>
+#include <private/qvideotexturehelper_p.h>
+#include <qffmpegvideobuffer_p.h>
+
+#include <qandroidcameraframe_p.h>
+#include <utility>
+
+extern "C" {
+#include "libavutil/hwcontext.h"
+#include "libavutil/pixfmt.h"
+}
+
+Q_DECLARE_JNI_CLASS(QtCamera2, "org/qtproject/qt/android/multimedia/QtCamera2");
+Q_DECLARE_JNI_CLASS(QtVideoDeviceManager,
+ "org/qtproject/qt/android/multimedia/QtVideoDeviceManager");
+
+Q_DECLARE_JNI_CLASS(AndroidImageFormat, "android/graphics/ImageFormat");
+
+Q_DECLARE_JNI_CLASS(AndroidImage, "android/media/Image")
+Q_DECLARE_JNI_TYPE(AndroidImagePlaneArray, "[Landroid/media/Image$Plane;")
+Q_DECLARE_JNI_CLASS(JavaByteBuffer, "java/nio/ByteBuffer")
+Q_DECLARE_JNI_TYPE(StringArray, "[Ljava/lang/String;")
+
+QT_BEGIN_NAMESPACE
+static Q_LOGGING_CATEGORY(qLCAndroidCamera, "qt.multimedia.ffmpeg.androidCamera");
+
+typedef QMap<QString, QAndroidCamera *> QAndroidCameraMap;
+Q_GLOBAL_STATIC(QAndroidCameraMap, g_qcameras)
+Q_GLOBAL_STATIC(QReadWriteLock, rwLock)
+
+namespace {
+
+QCameraFormat getDefaultCameraFormat()
+{
+ // default settings
+ QCameraFormatPrivate *defaultFormat = new QCameraFormatPrivate{
+ .pixelFormat = QVideoFrameFormat::Format_YUV420P,
+ .resolution = { 1920, 1080 },
+ .minFrameRate = 12,
+ .maxFrameRate = 30,
+ };
+ return defaultFormat->create();
+}
+
+bool checkCameraPermission()
+{
+ QCameraPermission permission;
+
+ const bool granted = qApp->checkPermission(permission) == Qt::PermissionStatus::Granted;
+ if (!granted)
+ qCWarning(qLCAndroidCamera) << "Access to camera not granted!";
+
+ return granted;
+}
+
+int sensorOrientation(QString cameraId)
+{
+ QJniObject deviceManager(QtJniTypes::Traits<QtJniTypes::QtVideoDeviceManager>::className(),
+ QNativeInterface::QAndroidApplication::context());
+
+ if (!deviceManager.isValid()) {
+ qCWarning(qLCAndroidCamera) << "Failed to connect to Qt Video Device Manager.";
+ return 0;
+ }
+
+ return deviceManager.callMethod<jint>("getSensorOrientation",
+ QJniObject::fromString(cameraId).object<jstring>());
+}
+} // namespace
+
+// QAndroidCamera
+
+QAndroidCamera::QAndroidCamera(QCamera *camera) : QPlatformCamera(camera)
+{
+ m_jniCamera = QJniObject(QtJniTypes::Traits<QtJniTypes::QtCamera2>::className(),
+ QNativeInterface::QAndroidApplication::context());
+
+ m_hwAccel = QFFmpeg::HWAccel::create(AVHWDeviceType::AV_HWDEVICE_TYPE_MEDIACODEC);
+ if (camera) {
+ m_cameraDevice = camera->cameraDevice();
+ m_cameraFormat = !camera->cameraFormat().isNull() ? camera->cameraFormat()
+ : getDefaultCameraFormat();
+ updateCameraCharacteristics();
+ }
+
+ if (qApp) {
+ connect(qApp, &QGuiApplication::applicationStateChanged,
+ this, &QAndroidCamera::onApplicationStateChanged);
+ }
+};
+
+QAndroidCamera::~QAndroidCamera()
+{
+ {
+ QWriteLocker locker(rwLock);
+ g_qcameras->remove(m_cameraDevice.id());
+
+ m_jniCamera.callMethod<void>("stopAndClose");
+ setState(State::Closed);
+ }
+
+ m_jniCamera.callMethod<void>("stopBackgroundThread");
+}
+
+void QAndroidCamera::setCamera(const QCameraDevice &camera)
+{
+ const bool active = isActive();
+ if (active)
+ setActive(false);
+
+ m_cameraDevice = camera;
+ updateCameraCharacteristics();
+ m_cameraFormat = getDefaultCameraFormat();
+
+ if (active)
+ setActive(true);
+}
+
+std::optional<int> QAndroidCamera::ffmpegHWPixelFormat() const
+{
+ return QFFmpegVideoBuffer::toAVPixelFormat(m_androidFramePixelFormat);
+}
+
+static void deleteFrame(void *opaque, uint8_t *data)
+{
+ Q_UNUSED(data);
+
+ auto frame = reinterpret_cast<QAndroidCameraFrame *>(opaque);
+
+ if (frame)
+ delete frame;
+}
+
+void QAndroidCamera::frameAvailable(QJniObject image, bool takePhoto)
+{
+ if (!(m_state == State::WaitingStart || m_state == State::Started) && !m_waitingForFirstFrame) {
+ qCWarning(qLCAndroidCamera) << "Received frame when not active... ignoring";
+ qCWarning(qLCAndroidCamera) << "state:" << m_state;
+ image.callMethod<void>("close");
+ return;
+ }
+
+ auto androidFrame = new QAndroidCameraFrame(image);
+ if (!androidFrame->isParsed()) {
+ qCWarning(qLCAndroidCamera) << "Failed to parse frame.. dropping frame";
+ delete androidFrame;
+ return;
+ }
+
+ int timestamp = androidFrame->timestamp();
+ m_androidFramePixelFormat = androidFrame->format();
+ if (m_waitingForFirstFrame) {
+ m_waitingForFirstFrame = false;
+ setState(State::Started);
+ }
+ auto avframe = QFFmpeg::makeAVFrame();
+
+ avframe->width = androidFrame->size().width();
+ avframe->height = androidFrame->size().height();
+ avframe->format = QFFmpegVideoBuffer::toAVPixelFormat(androidFrame->format());
+
+ avframe->extended_data = avframe->data;
+ avframe->pts = androidFrame->timestamp();
+
+ for (int planeNumber = 0; planeNumber < androidFrame->numberPlanes(); planeNumber++) {
+ QAndroidCameraFrame::Plane plane = androidFrame->plane(planeNumber);
+ avframe->linesize[planeNumber] = plane.rowStride;
+ avframe->data[planeNumber] = plane.data;
+ }
+
+ avframe->data[3] = nullptr;
+ avframe->buf[0] = nullptr;
+
+ avframe->opaque_ref = av_buffer_create(NULL, 1, deleteFrame, androidFrame, 0);
+ avframe->extended_data = avframe->data;
+ avframe->pts = timestamp;
+
+ QVideoFrameFormat format(androidFrame->size(), androidFrame->format());
+
+ QVideoFrame videoFrame(new QFFmpegVideoBuffer(std::move(avframe)), format);
+
+ if (lastTimestamp == 0)
+ lastTimestamp = timestamp;
+
+ videoFrame.setRotation(rotation());
+ videoFrame.setMirrored(m_cameraDevice.position() == QCameraDevice::Position::FrontFace);
+
+ videoFrame.setStartTime(lastTimestamp);
+ videoFrame.setEndTime(timestamp);
+
+ if (!takePhoto)
+ emit newVideoFrame(videoFrame);
+ else
+ emit onCaptured(videoFrame);
+
+ lastTimestamp = timestamp;
+}
+
+QtVideo::Rotation QAndroidCamera::rotation()
+{
+ auto screen = QGuiApplication::primaryScreen();
+ auto screenOrientation = screen->orientation();
+ if (screenOrientation == Qt::PrimaryOrientation)
+ screenOrientation = screen->primaryOrientation();
+
+ // Display rotation is the opposite direction of the physical device rotation. We need the
+ // device rotation, that's why Landscape is 270 and InvertedLandscape is 90
+ int deviceOrientation = 0;
+ switch (screenOrientation) {
+ case Qt::PrimaryOrientation:
+ case Qt::PortraitOrientation:
+ break;
+ case Qt::LandscapeOrientation:
+ deviceOrientation = 270;
+ break;
+ case Qt::InvertedPortraitOrientation:
+ deviceOrientation = 180;
+ break;
+ case Qt::InvertedLandscapeOrientation:
+ deviceOrientation = 90;
+ break;
+ }
+
+ int sign = (m_cameraDevice.position() == QCameraDevice::Position::FrontFace) ? 1 : -1;
+ int rotation = (sensorOrientation(m_cameraDevice.id()) - deviceOrientation * sign + 360) % 360;
+
+ return QtVideo::Rotation(rotation);
+}
+
+void QAndroidCamera::setActive(bool active)
+{
+ if (isActive() == active)
+ return;
+
+ if (!m_jniCamera.isValid()) {
+ updateError(QCamera::CameraError, QStringLiteral("No connection to Android Camera2 API"));
+ return;
+ }
+
+ if (active && checkCameraPermission()) {
+ QWriteLocker locker(rwLock);
+ int width = m_cameraFormat.resolution().width();
+ int height = m_cameraFormat.resolution().height();
+
+ if (width < 0 || height < 0) {
+ m_cameraFormat = getDefaultCameraFormat();
+ width = m_cameraFormat.resolution().width();
+ height = m_cameraFormat.resolution().height();
+ }
+
+ width = FFALIGN(width, 16);
+ height = FFALIGN(height, 16);
+
+ setState(State::WaitingOpen);
+ g_qcameras->insert(m_cameraDevice.id(), this);
+
+ // this should use the camera format.
+ // but there is only 2 fully supported formats on android - JPG and YUV420P
+ // and JPEG is not supported for encoding in FFmpeg, so it's locked for YUV for now.
+ const static int imageFormat =
+ QJniObject::getStaticField<QtJniTypes::AndroidImageFormat, jint>("YUV_420_888");
+ m_jniCamera.callMethod<void>("prepareCamera", jint(width), jint(height),
+ jint(imageFormat), jint(m_cameraFormat.minFrameRate()),
+ jint(m_cameraFormat.maxFrameRate()));
+
+ bool canOpen = m_jniCamera.callMethod<jboolean>(
+ "open", QJniObject::fromString(m_cameraDevice.id()).object<jstring>());
+
+ if (!canOpen) {
+ g_qcameras->remove(m_cameraDevice.id());
+ setState(State::Closed);
+ updateError(QCamera::CameraError,
+ QString("Failed to start camera: ").append(m_cameraDevice.description()));
+ }
+ } else {
+ m_jniCamera.callMethod<void>("stopAndClose");
+ m_jniCamera.callMethod<void>("clearSurfaces");
+ setState(State::Closed);
+ }
+}
+
+void QAndroidCamera::setState(QAndroidCamera::State newState)
+{
+ if (newState == m_state)
+ return;
+
+ bool wasActive = isActive();
+
+ if (newState == State::Started)
+ m_state = State::Started;
+
+ if (m_state == State::Started && newState == State::Closed)
+ m_state = State::Closed;
+
+ if ((m_state == State::WaitingOpen || m_state == State::WaitingStart)
+ && newState == State::Closed) {
+
+ m_state = State::Closed;
+
+ updateError(QCamera::CameraError,
+ QString("Failed to start Camera %1").arg(m_cameraDevice.description()));
+ }
+
+ if (m_state == State::Closed && newState == State::WaitingOpen)
+ m_state = State::WaitingOpen;
+
+ if (m_state == State::WaitingOpen && newState == State::WaitingStart)
+ m_state = State::WaitingStart;
+
+ if (wasActive != isActive())
+ emit activeChanged(isActive());
+}
+
+bool QAndroidCamera::setCameraFormat(const QCameraFormat &format)
+{
+ const auto chosenFormat = format.isNull() ? getDefaultCameraFormat() : format;
+
+ if (chosenFormat == m_cameraFormat || !m_cameraDevice.videoFormats().contains(chosenFormat))
+ return false;
+
+ m_cameraFormat = chosenFormat;
+
+ if (isActive()) {
+ // Restart the camera to set new camera format
+ setActive(false);
+ setActive(true);
+ }
+
+ return true;
+}
+
+void QAndroidCamera::updateCameraCharacteristics()
+{
+ if (m_cameraDevice.id().isEmpty()) {
+ cleanCameraCharacteristics();
+ return;
+ }
+
+ QJniObject deviceManager(QtJniTypes::Traits<QtJniTypes::QtVideoDeviceManager>::className(),
+ QNativeInterface::QAndroidApplication::context());
+
+ if (!deviceManager.isValid()) {
+ qCWarning(qLCAndroidCamera) << "Failed to connect to Qt Video Device Manager.";
+ cleanCameraCharacteristics();
+ return;
+ }
+
+ const float maxZoom = deviceManager.callMethod<jfloat>(
+ "getMaxZoom", QJniObject::fromString(m_cameraDevice.id()).object<jstring>());
+ maximumZoomFactorChanged(maxZoom);
+ if (maxZoom < zoomFactor()) {
+ zoomTo(1.0, -1.0);
+ }
+
+ m_TorchModeSupported = deviceManager.callMethod<jboolean>(
+ "isTorchModeSupported", QJniObject::fromString(m_cameraDevice.id()).object<jstring>());
+
+ m_supportedFlashModes.clear();
+ m_supportedFlashModes.append(QCamera::FlashOff);
+ QJniObject flashModesObj = deviceManager.callMethod<QtJniTypes::StringArray>(
+ "getSupportedFlashModes",
+ QJniObject::fromString(m_cameraDevice.id()).object<jstring>());
+ QJniEnvironment jniEnv;
+ jobjectArray flashModes = flashModesObj.object<jobjectArray>();
+ int size = jniEnv->GetArrayLength(flashModes);
+ for (int i = 0; i < size; ++i) {
+ QJniObject flashModeObj = jniEnv->GetObjectArrayElement(flashModes, i);
+ QString flashMode = flashModeObj.toString();
+ if (flashMode == QLatin1String("auto"))
+ m_supportedFlashModes.append(QCamera::FlashAuto);
+ else if (flashMode == QLatin1String("on"))
+ m_supportedFlashModes.append(QCamera::FlashOn);
+ }
+}
+
+void QAndroidCamera::cleanCameraCharacteristics()
+{
+ maximumZoomFactorChanged(1.0);
+ if (zoomFactor() != 1.0) {
+ zoomTo(1.0, -1.0);
+ }
+ if (torchMode() != QCamera::TorchOff) {
+ setTorchMode(QCamera::TorchOff);
+ }
+ m_TorchModeSupported = false;
+
+ if (flashMode() != QCamera::FlashOff) {
+ setFlashMode(QCamera::FlashOff);
+ }
+ m_supportedFlashModes.clear();
+ m_supportedFlashModes.append(QCamera::FlashOff);
+}
+
+void QAndroidCamera::setFlashMode(QCamera::FlashMode mode)
+{
+ if (!isFlashModeSupported(mode))
+ return;
+
+ QString flashMode;
+ switch (mode) {
+ case QCamera::FlashAuto:
+ flashMode = QLatin1String("auto");
+ break;
+ case QCamera::FlashOn:
+ flashMode = QLatin1String("on");
+ break;
+ case QCamera::FlashOff:
+ default:
+ flashMode = QLatin1String("off");
+ break;
+ }
+
+ m_jniCamera.callMethod<void>("setFlashMode", QJniObject::fromString(flashMode).object<jstring>());
+ flashModeChanged(mode);
+}
+
+bool QAndroidCamera::isFlashModeSupported(QCamera::FlashMode mode) const
+{
+ return m_supportedFlashModes.contains(mode);
+}
+
+bool QAndroidCamera::isFlashReady() const
+{
+ // Android doesn't have an API for that.
+ // Only check if device supports more flash modes than just FlashOff.
+ return m_supportedFlashModes.size() > 1;
+}
+
+bool QAndroidCamera::isTorchModeSupported(QCamera::TorchMode mode) const
+{
+ if (mode == QCamera::TorchOff)
+ return true;
+ else if (mode == QCamera::TorchOn)
+ return m_TorchModeSupported;
+
+ return false;
+}
+
+void QAndroidCamera::setTorchMode(QCamera::TorchMode mode)
+{
+ bool torchMode;
+ if (mode == QCamera::TorchOff) {
+ torchMode = false;
+ } else if (mode == QCamera::TorchOn) {
+ torchMode = true;
+ } else {
+ qWarning() << "Unknown Torch mode";
+ return;
+ }
+ m_jniCamera.callMethod<void>("setTorchMode", jboolean(torchMode));
+ torchModeChanged(mode);
+}
+
+void QAndroidCamera::zoomTo(float factor, float rate)
+{
+ Q_UNUSED(rate);
+ m_jniCamera.callMethod<void>("zoomTo", factor);
+ zoomFactorChanged(factor);
+}
+
+void QAndroidCamera::onApplicationStateChanged()
+{
+ switch (QGuiApplication::applicationState()) {
+ case Qt::ApplicationInactive:
+ if (isActive()) {
+ setActive(false);
+ m_wasActive = true;
+ }
+ break;
+ case Qt::ApplicationActive:
+ if (m_wasActive) {
+ setActive(true);
+ m_wasActive = false;
+ }
+ break;
+ default:
+ break;
+ }
+}
+
+void QAndroidCamera::onCaptureSessionConfigured()
+{
+ bool canStart = m_jniCamera.callMethod<jboolean>("start", 3);
+ setState(canStart ? State::WaitingStart : State::Closed);
+}
+
+void QAndroidCamera::onCaptureSessionConfigureFailed()
+{
+ setState(State::Closed);
+}
+
+void QAndroidCamera::onCameraOpened()
+{
+ bool canStart = m_jniCamera.callMethod<jboolean>("createSession");
+ setState(canStart ? State::WaitingStart : State::Closed);
+}
+
+void QAndroidCamera::onCameraDisconnect()
+{
+ setState(State::Closed);
+}
+
+void QAndroidCamera::onCameraError(int reason)
+{
+ updateError(QCamera::CameraError,
+ QString("Capture error with Camera %1. Camera2 Api error code: %2")
+ .arg(m_cameraDevice.description())
+ .arg(reason));
+}
+
+void QAndroidCamera::onSessionActive()
+{
+ m_waitingForFirstFrame = true;
+}
+
+void QAndroidCamera::onSessionClosed()
+{
+ m_waitingForFirstFrame = false;
+ setState(State::Closed);
+}
+
+void QAndroidCamera::capture()
+{
+ m_jniCamera.callMethod<void>("takePhoto");
+}
+
+void QAndroidCamera::updateExif(const QString &filename)
+{
+ m_jniCamera.callMethod<void>("saveExifToFile", QJniObject::fromString(filename).object<jstring>());
+}
+
+void QAndroidCamera::onCaptureSessionFailed(int reason, long frameNumber)
+{
+ Q_UNUSED(frameNumber);
+
+ updateError(QCamera::CameraError,
+ QStringLiteral("Capture session failure with Camera %1. Camera2 Api error code: %2")
+ .arg(m_cameraDevice.description())
+ .arg(reason));
+}
+
+// JNI logic
+
+#define GET_CAMERA(cameraId) \
+ QString key = QJniObject(cameraId).toString(); \
+ QReadLocker locker(rwLock); \
+ if (!g_qcameras->contains(key)) { \
+ qCWarning(qLCAndroidCamera) << "Calling back a QtCamera2 after being destroyed."; \
+ return; \
+ } \
+ QAndroidCamera *camera = g_qcameras->find(key).value();
+
+static void onFrameAvailable(JNIEnv *env, jobject obj, jstring cameraId,
+ QtJniTypes::AndroidImage image)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(obj);
+ GET_CAMERA(cameraId);
+
+ camera->frameAvailable(QJniObject(image));
+}
+Q_DECLARE_JNI_NATIVE_METHOD(onFrameAvailable)
+
+static void onPhotoAvailable(JNIEnv *env, jobject obj, jstring cameraId,
+ QtJniTypes::AndroidImage image)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(obj);
+ GET_CAMERA(cameraId);
+
+ camera->frameAvailable(QJniObject(image), true);
+}
+Q_DECLARE_JNI_NATIVE_METHOD(onPhotoAvailable)
+
+
+static void onCameraOpened(JNIEnv *env, jobject obj, jstring cameraId)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(obj);
+ GET_CAMERA(cameraId);
+
+ camera->onCameraOpened();
+}
+Q_DECLARE_JNI_NATIVE_METHOD(onCameraOpened)
+
+static void onCameraDisconnect(JNIEnv *env, jobject obj, jstring cameraId)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(obj);
+ GET_CAMERA(cameraId);
+
+ camera->onCameraDisconnect();
+}
+Q_DECLARE_JNI_NATIVE_METHOD(onCameraDisconnect)
+
+static void onCameraError(JNIEnv *env, jobject obj, jstring cameraId, jint error)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(obj);
+ GET_CAMERA(cameraId);
+
+ camera->onCameraError(error);
+}
+Q_DECLARE_JNI_NATIVE_METHOD(onCameraError)
+
+static void onCaptureSessionConfigured(JNIEnv *env, jobject obj, jstring cameraId)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(obj);
+ GET_CAMERA(cameraId);
+
+ camera->onCaptureSessionConfigured();
+}
+Q_DECLARE_JNI_NATIVE_METHOD(onCaptureSessionConfigured)
+
+static void onCaptureSessionConfigureFailed(JNIEnv *env, jobject obj, jstring cameraId)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(obj);
+ GET_CAMERA(cameraId);
+
+ camera->onCaptureSessionConfigureFailed();
+}
+Q_DECLARE_JNI_NATIVE_METHOD(onCaptureSessionConfigureFailed)
+
+static void onSessionActive(JNIEnv *env, jobject obj, jstring cameraId)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(obj);
+ GET_CAMERA(cameraId);
+
+ camera->onSessionActive();
+}
+Q_DECLARE_JNI_NATIVE_METHOD(onSessionActive)
+
+static void onSessionClosed(JNIEnv *env, jobject obj, jstring cameraId)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(obj);
+ GET_CAMERA(cameraId);
+
+ camera->onSessionClosed();
+}
+Q_DECLARE_JNI_NATIVE_METHOD(onSessionClosed)
+
+static void onCaptureSessionFailed(JNIEnv *env, jobject obj, jstring cameraId, jint reason,
+ jlong framenumber)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(obj);
+ GET_CAMERA(cameraId);
+
+ camera->onCaptureSessionFailed(reason, framenumber);
+}
+Q_DECLARE_JNI_NATIVE_METHOD(onCaptureSessionFailed)
+
+bool QAndroidCamera::registerNativeMethods()
+{
+ static const bool registered = []() {
+ return QJniEnvironment().registerNativeMethods(
+ QtJniTypes::Traits<QtJniTypes::QtCamera2>::className(),
+ {
+ Q_JNI_NATIVE_METHOD(onCameraOpened),
+ Q_JNI_NATIVE_METHOD(onCameraDisconnect),
+ Q_JNI_NATIVE_METHOD(onCameraError),
+ Q_JNI_NATIVE_METHOD(onCaptureSessionConfigured),
+ Q_JNI_NATIVE_METHOD(onCaptureSessionConfigureFailed),
+ Q_JNI_NATIVE_METHOD(onCaptureSessionFailed),
+ Q_JNI_NATIVE_METHOD(onFrameAvailable),
+ Q_JNI_NATIVE_METHOD(onPhotoAvailable),
+ Q_JNI_NATIVE_METHOD(onSessionActive),
+ Q_JNI_NATIVE_METHOD(onSessionClosed),
+ });
+ }();
+ return registered;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qandroidcamera_p.h b/src/plugins/multimedia/ffmpeg/qandroidcamera_p.h
new file mode 100644
index 000000000..26606a7e0
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qandroidcamera_p.h
@@ -0,0 +1,91 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDCAMERA_H
+#define QANDROIDCAMERA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qffmpeghwaccel_p.h"
+#include <private/qplatformcamera_p.h>
+#include <QObject>
+#include <QJniObject>
+
+QT_BEGIN_NAMESPACE
+
+class QVideoFrame;
+
+class QAndroidCamera : public QPlatformCamera
+{
+ Q_OBJECT
+public:
+ enum State { Closed, WaitingOpen, WaitingStart, Started };
+ explicit QAndroidCamera(QCamera *camera);
+ ~QAndroidCamera() override;
+
+ bool isActive() const override { return m_state == State::Started; }
+ bool isFlashModeSupported(QCamera::FlashMode mode) const override;
+ bool isFlashReady() const override;
+ bool isTorchModeSupported(QCamera::TorchMode mode) const override;
+ void setActive(bool active) override;
+ void setCamera(const QCameraDevice &camera) override;
+ bool setCameraFormat(const QCameraFormat &format) override;
+ void setFlashMode(QCamera::FlashMode mode) override;
+ void setTorchMode(QCamera::TorchMode mode) override;
+ void zoomTo(float factor, float rate) override;
+
+ std::optional<int> ffmpegHWPixelFormat() const override;
+
+ static bool registerNativeMethods();
+
+ void capture();
+ void updateExif(const QString &filename);
+public slots:
+ void onApplicationStateChanged();
+ void onCameraOpened();
+ void onCameraDisconnect();
+ void onCameraError(int error);
+ void frameAvailable(QJniObject image, bool takePhoto = false);
+ void onCaptureSessionConfigured();
+ void onCaptureSessionConfigureFailed();
+ void onCaptureSessionFailed(int reason, long frameNumber);
+ void onSessionActive();
+ void onSessionClosed();
+
+Q_SIGNALS:
+ void onCaptured(const QVideoFrame&);
+
+private:
+ bool isActivating() const { return m_state != State::Closed; }
+
+ void setState(State newState);
+ QtVideo::Rotation rotation();
+ void updateCameraCharacteristics();
+ void cleanCameraCharacteristics();
+
+ State m_state = State::Closed;
+ QCameraDevice m_cameraDevice;
+ long lastTimestamp = 0;
+ QJniObject m_jniCamera;
+
+ std::unique_ptr<QFFmpeg::HWAccel> m_hwAccel;
+
+ QVideoFrameFormat::PixelFormat m_androidFramePixelFormat;
+ QList<QCamera::FlashMode> m_supportedFlashModes;
+ bool m_waitingForFirstFrame = false;
+ bool m_TorchModeSupported = false;
+ bool m_wasActive = false;
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDCAMERA_H
diff --git a/src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp b/src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp
new file mode 100644
index 000000000..28d02b20e
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp
@@ -0,0 +1,224 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidcameraframe_p.h"
+#include <jni.h>
+#include <QDebug>
+#include <QtCore/qjnitypes.h>
+#include <QtCore/QLoggingCategory>
+
+Q_DECLARE_JNI_CLASS(AndroidImageFormat, "android/graphics/ImageFormat");
+
+Q_DECLARE_JNI_CLASS(AndroidImage, "android/media/Image")
+Q_DECLARE_JNI_TYPE(AndroidImagePlaneArray, "[Landroid/media/Image$Plane;")
+Q_DECLARE_JNI_CLASS(JavaByteBuffer, "java/nio/ByteBuffer")
+Q_DECLARE_JNI_CLASS(QtVideoDeviceManager,
+ "org/qtproject/qt/android/multimedia/QtVideoDeviceManager");
+
+QT_BEGIN_NAMESPACE
+static Q_LOGGING_CATEGORY(qLCAndroidCameraFrame, "qt.multimedia.ffmpeg.android.camera.frame");
+
+namespace {
+bool isWorkaroundForEmulatorNeeded() {
+ const static bool workaroundForEmulator
+ = QtJniTypes::QtVideoDeviceManager::callStaticMethod<jboolean>("isEmulator");
+ return workaroundForEmulator;
+}
+}
+
+bool QAndroidCameraFrame::parse(const QJniObject &frame)
+{
+ QJniEnvironment jniEnv;
+
+ if (!frame.isValid())
+ return false;
+
+ auto planes = frame.callMethod<QtJniTypes::AndroidImagePlaneArray>("getPlanes");
+ if (!planes.isValid())
+ return false;
+
+ int numberPlanes = jniEnv->GetArrayLength(planes.object<jarray>());
+ // create and populate temporary array structure
+ int pixelStrides[numberPlanes];
+ int rowStrides[numberPlanes];
+ int bufferSize[numberPlanes];
+ uint8_t *buffer[numberPlanes];
+
+ auto resetPlane = [&](int index) {
+ if (index < 0 || index > numberPlanes)
+ return;
+
+ rowStrides[index] = 0;
+ pixelStrides[index] = 0;
+ bufferSize[index] = 0;
+ buffer[index] = nullptr;
+ };
+
+ for (int index = 0; index < numberPlanes; index++) {
+ QJniObject plane = jniEnv->GetObjectArrayElement(planes.object<jobjectArray>(), index);
+ if (jniEnv.checkAndClearExceptions() || !plane.isValid()) {
+ resetPlane(index);
+ continue;
+ }
+
+ rowStrides[index] = plane.callMethod<jint>("getRowStride");
+ pixelStrides[index] = plane.callMethod<jint>("getPixelStride");
+
+ auto byteBuffer = plane.callMethod<QtJniTypes::JavaByteBuffer>("getBuffer");
+ if (!byteBuffer.isValid()) {
+ resetPlane(index);
+ continue;
+ }
+
+ // Uses direct access which is garanteed by android to work with
+ // ImageReader bytebuffer
+ buffer[index] = static_cast<uint8_t *>(jniEnv->GetDirectBufferAddress(byteBuffer.object()));
+ bufferSize[index] = byteBuffer.callMethod<jint>("remaining");
+ }
+
+ QVideoFrameFormat::PixelFormat calculedPixelFormat = QVideoFrameFormat::Format_Invalid;
+
+ // finding the image format
+ // the ImageFormats that can happen here are stated here:
+ // https://developer.android.com/reference/android/media/Image#getFormat()
+ int format = frame.callMethod<jint>("getFormat");
+ AndroidImageFormat imageFormat = AndroidImageFormat(format);
+
+ switch (imageFormat) {
+ case AndroidImageFormat::JPEG:
+ calculedPixelFormat = QVideoFrameFormat::Format_Jpeg;
+ break;
+ case AndroidImageFormat::YUV_420_888:
+ if (numberPlanes < 3) {
+ // something went wrong on parsing. YUV_420_888 format must always have 3 planes
+ calculedPixelFormat = QVideoFrameFormat::Format_Invalid;
+ break;
+ }
+ if (pixelStrides[1] == 1)
+ calculedPixelFormat = QVideoFrameFormat::Format_YUV420P;
+ else if (pixelStrides[1] == 2 && abs(buffer[1] - buffer[2]) == 1)
+ // this can be NV21, but it will converted below
+ calculedPixelFormat = QVideoFrameFormat::Format_NV12;
+ break;
+ case AndroidImageFormat::HEIC:
+ // QImage cannot parse HEIC
+ calculedPixelFormat = QVideoFrameFormat::Format_Invalid;
+ break;
+ case AndroidImageFormat::RAW_PRIVATE:
+ case AndroidImageFormat::RAW_SENSOR:
+ // we cannot know raw formats
+ calculedPixelFormat = QVideoFrameFormat::Format_Invalid;
+ break;
+ case AndroidImageFormat::FLEX_RGBA_8888:
+ case AndroidImageFormat::FLEX_RGB_888:
+ // these formats are only returned by Mediacodec.getOutputImage, they are not used as a
+ // Camera2 Image frame return
+ calculedPixelFormat = QVideoFrameFormat::Format_Invalid;
+ break;
+ case AndroidImageFormat::YUV_422_888:
+ case AndroidImageFormat::YUV_444_888:
+ case AndroidImageFormat::YCBCR_P010:
+ // not dealing with these formats, they require higher API levels than the current Qt min
+ calculedPixelFormat = QVideoFrameFormat::Format_Invalid;
+ break;
+ default:
+ calculedPixelFormat = QVideoFrameFormat::Format_Invalid;
+ break;
+ }
+
+ if (calculedPixelFormat == QVideoFrameFormat::Format_Invalid) {
+ qCWarning(qLCAndroidCameraFrame) << "Cannot determine image format!";
+ return false;
+ }
+
+ auto copyPlane = [&](int mapIndex, int arrayIndex) {
+ if (arrayIndex >= numberPlanes)
+ return;
+
+ m_planes[mapIndex].rowStride = rowStrides[arrayIndex];
+ m_planes[mapIndex].size = bufferSize[arrayIndex];
+ m_planes[mapIndex].data = buffer[arrayIndex];
+ };
+
+ int width = frame.callMethod<jint>("getWidth");
+ int height = frame.callMethod<jint>("getHeight");
+ m_size = QSize(width, height);
+
+ switch (calculedPixelFormat) {
+ case QVideoFrameFormat::Format_YUV420P:
+ m_numberPlanes = 3;
+ copyPlane(0, 0);
+ copyPlane(1, 1);
+ copyPlane(2, 2);
+
+ if (isWorkaroundForEmulatorNeeded()) {
+ for (int i = 0; i < 3; ++i) {
+ const int dataSize = (i == 0) ? width * height : width * height / 4;
+ m_planes[i].data = new uint8_t[dataSize];
+ memcpy(m_planes[i].data, buffer[i], dataSize);
+ }
+ }
+
+ m_pixelFormat = QVideoFrameFormat::Format_YUV420P;
+ break;
+ case QVideoFrameFormat::Format_NV12:
+ m_numberPlanes = 2;
+ copyPlane(0, 0);
+ copyPlane(1, 1);
+ m_pixelFormat = QVideoFrameFormat::Format_NV12;
+ break;
+ case QVideoFrameFormat::Format_Jpeg:
+ qCWarning(qLCAndroidCameraFrame)
+ << "FFmpeg HW Mediacodec does not encode other than YCbCr formats";
+ // we still parse it to preview the frame
+ m_image = QImage::fromData(buffer[0], bufferSize[0]);
+ m_planes[0].rowStride = m_image.bytesPerLine();
+ m_planes[0].size = m_image.sizeInBytes();
+ m_planes[0].data = m_image.bits();
+ m_pixelFormat = QVideoFrameFormat::pixelFormatFromImageFormat(m_image.format());
+ break;
+ default:
+ break;
+ }
+
+ long timestamp = frame.callMethod<jlong>("getTimestamp");
+ m_timestamp = timestamp / 1000;
+
+ return true;
+}
+
+QAndroidCameraFrame::QAndroidCameraFrame(QJniObject frame)
+ : m_pixelFormat(QVideoFrameFormat::Format_Invalid), m_parsed(parse(frame))
+{
+ if (isParsed()) {
+ // holding the frame java object
+ QJniEnvironment jniEnv;
+ m_frame = jniEnv->NewGlobalRef(frame.object());
+ jniEnv.checkAndClearExceptions();
+ } else if (frame.isValid()) {
+ frame.callMethod<void>("close");
+ }
+}
+
+QAndroidCameraFrame::~QAndroidCameraFrame()
+{
+ if (!isParsed()) // nothing to clean
+ return;
+
+ QJniObject qFrame(m_frame);
+ if (qFrame.isValid())
+ qFrame.callMethod<void>("close");
+
+ QJniEnvironment jniEnv;
+ if (m_frame)
+ jniEnv->DeleteGlobalRef(m_frame);
+
+ if (isWorkaroundForEmulatorNeeded()) {
+ if (m_pixelFormat == QVideoFrameFormat::Format_YUV420P) {
+ for (int i = 0; i < 3; ++i)
+ delete[] m_planes[i].data;
+ }
+ }
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qandroidcameraframe_p.h b/src/plugins/multimedia/ffmpeg/qandroidcameraframe_p.h
new file mode 100644
index 000000000..23a737f7d
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qandroidcameraframe_p.h
@@ -0,0 +1,75 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDCAMERAFRAME_H
+#define QANDROIDCAMERAFRAME_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QVideoFrameFormat>
+#include <QJniObject>
+
+class QAndroidCameraFrame
+{
+public:
+ struct Plane
+ {
+ int pixelStride = 0;
+ int rowStride = 0;
+ int size = 0;
+ uint8_t *data;
+ };
+
+ QAndroidCameraFrame(QJniObject frame);
+ ~QAndroidCameraFrame();
+
+ QVideoFrameFormat::PixelFormat format() const { return m_pixelFormat; }
+ int numberPlanes() const { return m_numberPlanes; }
+ Plane plane(int index) const
+ {
+ if (index < 0 || index > numberPlanes())
+ return {};
+
+ return m_planes[index];
+ }
+ QSize size() const { return m_size; }
+ long timestamp() const { return m_timestamp; }
+
+ bool isParsed() const { return m_parsed; }
+
+private:
+ bool parse(const QJniObject &frame);
+ QVideoFrameFormat::PixelFormat m_pixelFormat;
+
+ QSize m_size = {};
+ long m_timestamp = 0;
+ int m_numberPlanes = 0;
+ Plane m_planes[3]; // 3 max number planes
+ jobject m_frame = nullptr;
+ bool m_parsed = false;
+ QImage m_image;
+
+ enum AndroidImageFormat {
+ RAW_SENSOR = 32,
+ YUV_420_888 = 35,
+ RAW_PRIVATE = 36,
+ YUV_422_888 = 39,
+ YUV_444_888 = 40,
+ FLEX_RGB_888 = 41,
+ FLEX_RGBA_8888 = 42,
+ YCBCR_P010 = 54,
+ JPEG = 256,
+ HEIC = 1212500294
+ };
+};
+
+#endif // QANDROIDCAMERAFRAME_H
diff --git a/src/plugins/multimedia/ffmpeg/qandroidimagecapture.cpp b/src/plugins/multimedia/ffmpeg/qandroidimagecapture.cpp
new file mode 100644
index 000000000..ed0f2de9d
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qandroidimagecapture.cpp
@@ -0,0 +1,46 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidimagecapture_p.h"
+#include <qandroidcamera_p.h>
+
+QT_BEGIN_NAMESPACE
+
+QAndroidImageCapture::QAndroidImageCapture(QImageCapture *parent)
+ : QFFmpegImageCapture(parent)
+{
+ connect(this, &QPlatformImageCapture::imageSaved, this, &QAndroidImageCapture::updateExif);
+}
+
+QAndroidImageCapture::~QAndroidImageCapture()
+{
+}
+
+int QAndroidImageCapture::doCapture(const QString &fileName)
+{
+ auto ret = QFFmpegImageCapture::doCapture(fileName);
+ if (ret >= 0) {
+ auto androidCamera = qobject_cast<QAndroidCamera *>(videoSource());
+ if (androidCamera)
+ androidCamera->capture();
+ }
+
+ return ret;
+}
+
+void QAndroidImageCapture::setupVideoSourceConnections()
+{
+ auto androidCamera = qobject_cast<QAndroidCamera *>(videoSource());
+ if (androidCamera)
+ connect(androidCamera, &QAndroidCamera::onCaptured, this, &QAndroidImageCapture::newVideoFrame);
+ else
+ QFFmpegImageCapture::setupVideoSourceConnections();
+}
+
+void QAndroidImageCapture::updateExif(int id, const QString &filename)
+{
+ Q_UNUSED(id);
+ auto androidCamera = qobject_cast<QAndroidCamera *>(videoSource());
+ if (androidCamera)
+ androidCamera->updateExif(filename);
+}
diff --git a/src/plugins/multimedia/ffmpeg/qandroidimagecapture_p.h b/src/plugins/multimedia/ffmpeg/qandroidimagecapture_p.h
new file mode 100644
index 000000000..8a997b595
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qandroidimagecapture_p.h
@@ -0,0 +1,38 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDIMAGECAPTURE_H
+#define QANDROIDIMAGECAPTURE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qffmpegimagecapture_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidImageCapture : public QFFmpegImageCapture
+{
+public:
+ QAndroidImageCapture(QImageCapture *parent);
+ ~QAndroidImageCapture() override;
+
+protected:
+ void setupVideoSourceConnections() override;
+ int doCapture(const QString &fileName) override;
+
+private slots:
+ void updateExif(int id, const QString &filename);
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDIMAGECAPTURE_H
diff --git a/src/plugins/multimedia/ffmpeg/qandroidvideodevices.cpp b/src/plugins/multimedia/ffmpeg/qandroidvideodevices.cpp
new file mode 100644
index 000000000..fd4221d55
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qandroidvideodevices.cpp
@@ -0,0 +1,150 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidvideodevices_p.h"
+
+#include <private/qcameradevice_p.h>
+
+#include <QtCore/QLoggingCategory>
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/private/qandroidextras_p.h>
+#include <QtCore/qcoreapplication_platform.h>
+#include <QJniEnvironment>
+#include <jni.h>
+
+static Q_LOGGING_CATEGORY(qLCAndroidVideoDevices, "qt.multimedia.ffmpeg.android.videoDevices")
+
+Q_DECLARE_JNI_CLASS(QtVideoDeviceManager,
+ "org/qtproject/qt/android/multimedia/QtVideoDeviceManager");
+Q_DECLARE_JNI_TYPE(StringArray, "[Ljava/lang/String;")
+Q_DECLARE_JNI_CLASS(AndroidImageFormat, "android/graphics/ImageFormat");
+
+QCameraFormat createCameraFormat(int width, int height, int fpsMin, int fpsMax)
+{
+ QCameraFormatPrivate *format = new QCameraFormatPrivate();
+
+ format->resolution = { width, height };
+
+ format->minFrameRate = fpsMin;
+ format->maxFrameRate = fpsMax;
+
+ format->pixelFormat = QVideoFrameFormat::PixelFormat::Format_YUV420P;
+
+ return format->create();
+}
+
+QList<QCameraDevice> QAndroidVideoDevices::findVideoDevices()
+{
+ QList<QCameraDevice> devices;
+
+ QJniObject deviceManager(QtJniTypes::Traits<QtJniTypes::QtVideoDeviceManager>::className(),
+ QNativeInterface::QAndroidApplication::context());
+
+ if (!deviceManager.isValid()) {
+ qCWarning(qLCAndroidVideoDevices) << "Failed to connect to Qt Video Device Manager.";
+ return devices;
+ }
+
+ QJniObject cameraIdList = deviceManager.callMethod<QtJniTypes::StringArray>("getCameraIdList");
+
+ QJniEnvironment jniEnv;
+ int numCameras = jniEnv->GetArrayLength(cameraIdList.object<jarray>());
+ if (jniEnv.checkAndClearExceptions())
+ return devices;
+
+ for (int cameraIndex = 0; cameraIndex < numCameras; cameraIndex++) {
+
+ QJniObject cameraIdObject =
+ jniEnv->GetObjectArrayElement(cameraIdList.object<jobjectArray>(), cameraIndex);
+ if (jniEnv.checkAndClearExceptions())
+ continue;
+
+ jstring cameraId = cameraIdObject.object<jstring>();
+
+ QCameraDevicePrivate *info = new QCameraDevicePrivate;
+ info->id = cameraIdObject.toString().toUtf8();
+
+ info->orientation = deviceManager.callMethod<jint>("getSensorOrientation", cameraId);
+
+ int facing = deviceManager.callMethod<jint>("getLensFacing", cameraId);
+
+ const int LENS_FACING_FRONT = 0;
+ const int LENS_FACING_BACK = 1;
+ const int LENS_FACING_EXTERNAL = 2;
+
+ switch (facing) {
+ case LENS_FACING_EXTERNAL:
+ case LENS_FACING_BACK:
+ info->position = QCameraDevice::BackFace;
+ info->description = QString("Rear Camera: %1").arg(cameraIndex);
+ break;
+ case LENS_FACING_FRONT:
+ info->position = QCameraDevice::FrontFace;
+ info->description = QString("Front Camera: %1").arg(cameraIndex);
+ break;
+ }
+
+ QJniObject fpsRangesObject =
+ deviceManager.callMethod<QtJniTypes::StringArray>("getFpsRange", cameraId);
+ jobjectArray fpsRanges = fpsRangesObject.object<jobjectArray>();
+
+ int numRanges = jniEnv->GetArrayLength(fpsRanges);
+ if (jniEnv.checkAndClearExceptions())
+ continue;
+
+ int maxFps = 0, minFps = 0;
+
+ for (int rangeIndex = 0; rangeIndex < numRanges; rangeIndex++) {
+
+ QJniObject rangeString = jniEnv->GetObjectArrayElement(fpsRanges, rangeIndex);
+ if (jniEnv.checkAndClearExceptions())
+ continue;
+
+ QString range = rangeString.toString();
+
+ range = range.remove("[");
+ range = range.remove("]");
+
+ auto split = range.split(",");
+
+ int min = split[0].toInt();
+ int max = split[1].toInt();
+
+ if (max > maxFps) {
+ maxFps = max;
+ minFps = min;
+ }
+ }
+
+ const static int imageFormat =
+ QJniObject::getStaticField<QtJniTypes::AndroidImageFormat, jint>("YUV_420_888");
+
+ QJniObject sizesObject = deviceManager.callMethod<QtJniTypes::StringArray>(
+ "getStreamConfigurationsSizes", cameraId, imageFormat);
+
+ jobjectArray streamSizes = sizesObject.object<jobjectArray>();
+ int numSizes = jniEnv->GetArrayLength(streamSizes);
+ if (jniEnv.checkAndClearExceptions())
+ continue;
+
+ for (int sizesIndex = 0; sizesIndex < numSizes; sizesIndex++) {
+
+ QJniObject sizeStringObject = jniEnv->GetObjectArrayElement(streamSizes, sizesIndex);
+ if (jniEnv.checkAndClearExceptions())
+ continue;
+
+ QString sizeString = sizeStringObject.toString();
+
+ auto split = sizeString.split("x");
+
+ int width = split[0].toInt();
+ int height = split[1].toInt();
+
+ info->videoFormats.append(createCameraFormat(width, height, minFps, maxFps));
+ }
+
+ devices.push_back(info->create());
+ }
+
+ return devices;
+}
diff --git a/src/plugins/multimedia/ffmpeg/qandroidvideodevices_p.h b/src/plugins/multimedia/ffmpeg/qandroidvideodevices_p.h
new file mode 100644
index 000000000..f89ea7f05
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qandroidvideodevices_p.h
@@ -0,0 +1,35 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFANDROIDVIDEODEVICES_H
+#define QFANDROIDVIDEODEVICES_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QObject>
+#include <private/qplatformvideodevices_p.h>
+
+class QAndroidVideoDevices : public QPlatformVideoDevices
+{
+ Q_OBJECT
+public:
+ QAndroidVideoDevices(QPlatformMediaIntegration *integration)
+ : QPlatformVideoDevices(integration), m_videoDevices(findVideoDevices()){};
+
+ QList<QCameraDevice> videoDevices() const override { return m_videoDevices; }
+
+private:
+ QList<QCameraDevice> findVideoDevices();
+ QList<QCameraDevice> m_videoDevices;
+};
+
+#endif // QFANDROIDVIDEODEVICES_H
diff --git a/src/plugins/multimedia/ffmpeg/qavfcamera.mm b/src/plugins/multimedia/ffmpeg/qavfcamera.mm
new file mode 100644
index 000000000..891c4b376
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qavfcamera.mm
@@ -0,0 +1,349 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include <qavfcamera_p.h>
+#include <qpointer.h>
+#include <qmediacapturesession.h>
+#include <private/qplatformmediacapture_p.h>
+#include "avfcamerautility_p.h"
+#include "qavfhelpers_p.h"
+#include "avfcameradebug_p.h"
+#include "qavfsamplebufferdelegate_p.h"
+#include <qvideosink.h>
+#include <rhi/qrhi.h>
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qpermissions.h>
+#define AVMediaType XAVMediaType
+#include "qffmpegvideobuffer_p.h"
+#include "qffmpegvideosink_p.h"
+extern "C" {
+#include <libavutil/hwcontext_videotoolbox.h>
+#include <libavutil/hwcontext.h>
+}
+#undef AVMediaType
+
+QT_BEGIN_NAMESPACE
+
+using namespace QFFmpeg;
+
+QAVFCamera::QAVFCamera(QCamera *parent)
+ : QAVFCameraBase(parent)
+{
+ m_captureSession = [[AVCaptureSession alloc] init];
+ m_sampleBufferDelegate = [[QAVFSampleBufferDelegate alloc]
+ initWithFrameHandler:[this](const QVideoFrame &frame) { syncHandleFrame(frame); }];
+}
+
+QAVFCamera::~QAVFCamera()
+{
+ [m_sampleBufferDelegate release];
+ [m_videoInput release];
+ [m_videoDataOutput release];
+ [m_captureSession release];
+}
+
+bool QAVFCamera::checkCameraPermission()
+{
+ const QCameraPermission permission;
+ const bool granted = qApp->checkPermission(permission) == Qt::PermissionStatus::Granted;
+ if (!granted)
+ qWarning() << "Access to camera not granted";
+
+ return granted;
+}
+
+void QAVFCamera::updateVideoInput()
+{
+ if (!checkCameraPermission())
+ return;
+
+ [m_captureSession beginConfiguration];
+
+ attachVideoInputDevice();
+
+ if (!m_videoDataOutput) {
+ m_videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
+
+ // Configure video output
+ m_delegateQueue = dispatch_queue_create("vf_queue", nullptr);
+ [m_videoDataOutput
+ setSampleBufferDelegate:m_sampleBufferDelegate
+ queue:m_delegateQueue];
+
+ [m_captureSession addOutput:m_videoDataOutput];
+ }
+ [m_captureSession commitConfiguration];
+ deviceOrientationChanged();
+}
+
+void QAVFCamera::deviceOrientationChanged(int angle)
+{
+ AVCaptureConnection *connection = [m_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
+ if (connection == nil || !m_videoDataOutput)
+ return;
+
+ if (!connection.supportsVideoOrientation)
+ return;
+
+ if (angle < 0)
+ angle = m_orientationHandler.currentOrientation();
+
+ AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
+ switch (angle) {
+ default:
+ break;
+ case 90:
+ orientation = AVCaptureVideoOrientationLandscapeRight;
+ break;
+ case 180:
+ // this keeps the last orientation, don't do anything
+ return;
+ case 270:
+ orientation = AVCaptureVideoOrientationLandscapeLeft;
+ break;
+ }
+
+ connection.videoOrientation = orientation;
+}
+
+void QAVFCamera::attachVideoInputDevice()
+{
+ if (m_videoInput) {
+ [m_captureSession removeInput:m_videoInput];
+ [m_videoInput release];
+ m_videoInput = nullptr;
+ }
+
+ QByteArray deviceId = m_cameraDevice.id();
+ if (deviceId.isEmpty())
+ return;
+
+ AVCaptureDevice *videoDevice = [AVCaptureDevice deviceWithUniqueID:
+ [NSString stringWithUTF8String: deviceId.constData()]];
+
+ if (!videoDevice)
+ return;
+
+ m_videoInput = [AVCaptureDeviceInput
+ deviceInputWithDevice:videoDevice
+ error:nil];
+ if (m_videoInput && [m_captureSession canAddInput:m_videoInput]) {
+ [m_videoInput retain];
+ [m_captureSession addInput:m_videoInput];
+ } else {
+ qWarning() << "Failed to create video device input";
+ }
+}
+
+AVCaptureDevice *QAVFCamera::device() const
+{
+ return m_videoInput ? m_videoInput.device : nullptr;
+}
+
+bool QAVFCamera::isActive() const
+{
+ return m_active;
+}
+
+void QAVFCamera::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+ if (!checkCameraPermission())
+ return;
+
+ m_active = active;
+
+ if (active) {
+ // According to the doc, the capture device must be locked before
+ // startRunning to prevent the format we set to be overridden by the
+ // session preset.
+ [m_videoInput.device lockForConfiguration:nil];
+ [m_captureSession startRunning];
+ [m_videoInput.device unlockForConfiguration];
+ } else {
+ [m_captureSession stopRunning];
+ }
+
+ emit activeChanged(active);
+}
+
+void QAVFCamera::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ m_session = session ? session->captureSession() : nullptr;
+}
+
+void QAVFCamera::setCamera(const QCameraDevice &camera)
+{
+ if (m_cameraDevice == camera)
+ return;
+
+ m_cameraDevice = camera;
+
+ if (checkCameraPermission())
+ updateVideoInput();
+ setCameraFormat({});
+}
+
+bool QAVFCamera::setCameraFormat(const QCameraFormat &format)
+{
+ if (m_cameraFormat == format && !format.isNull())
+ return true;
+
+ if (!QAVFCameraBase::setCameraFormat(format))
+ return false;
+
+ updateCameraFormat();
+ return true;
+}
+
+void QAVFCamera::updateCameraFormat()
+{
+ m_framePixelFormat = QVideoFrameFormat::Format_Invalid;
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice)
+ return;
+
+ AVCaptureDeviceFormat *newFormat = qt_convert_to_capture_device_format(
+ captureDevice, m_cameraFormat, &isCVFormatSupported);
+
+ if (!newFormat)
+ newFormat = qt_convert_to_capture_device_format(captureDevice, m_cameraFormat);
+
+ std::uint32_t cvPixelFormat = 0;
+ if (newFormat) {
+ qt_set_active_format(captureDevice, newFormat, false);
+ const auto captureDeviceCVFormat =
+ CMVideoFormatDescriptionGetCodecType(newFormat.formatDescription);
+ cvPixelFormat = setPixelFormat(m_cameraFormat.pixelFormat(), captureDeviceCVFormat);
+ if (captureDeviceCVFormat != cvPixelFormat) {
+ qCWarning(qLcCamera) << "Output CV format differs with capture device format!"
+ << cvPixelFormat << cvFormatToString(cvPixelFormat) << "vs"
+ << captureDeviceCVFormat
+ << cvFormatToString(captureDeviceCVFormat);
+
+ m_framePixelFormat = QAVFHelpers::fromCVPixelFormat(cvPixelFormat);
+ }
+ } else {
+ qWarning() << "Cannot find AVCaptureDeviceFormat; Did you use format from another camera?";
+ }
+
+ const AVPixelFormat avPixelFormat = av_map_videotoolbox_format_to_pixfmt(cvPixelFormat);
+
+ std::unique_ptr<HWAccel> hwAccel;
+
+ if (avPixelFormat == AV_PIX_FMT_NONE) {
+ qCWarning(qLcCamera) << "Videotoolbox doesn't support cvPixelFormat:" << cvPixelFormat
+ << cvFormatToString(cvPixelFormat)
+ << "Camera pix format:" << m_cameraFormat.pixelFormat();
+ } else {
+ hwAccel = HWAccel::create(AV_HWDEVICE_TYPE_VIDEOTOOLBOX);
+ qCDebug(qLcCamera) << "Create VIDEOTOOLBOX hw context" << hwAccel.get() << "for camera";
+ }
+
+ if (hwAccel) {
+ hwAccel->createFramesContext(avPixelFormat, adjustedResolution());
+ m_hwPixelFormat = hwAccel->hwFormat();
+ } else {
+ m_hwPixelFormat = AV_PIX_FMT_NONE;
+ }
+
+ [m_sampleBufferDelegate setHWAccel:std::move(hwAccel)];
+ [m_sampleBufferDelegate setVideoFormatFrameRate:m_cameraFormat.maxFrameRate()];
+}
+
+uint32_t QAVFCamera::setPixelFormat(QVideoFrameFormat::PixelFormat cameraPixelFormat,
+ uint32_t inputCvPixFormat)
+{
+ auto bestScore = MinAVScore;
+ NSNumber *bestFormat = nullptr;
+ for (NSNumber *cvPixFmtNumber in m_videoDataOutput.availableVideoCVPixelFormatTypes) {
+ auto cvPixFmt = [cvPixFmtNumber unsignedIntValue];
+ const auto pixFmt = QAVFHelpers::fromCVPixelFormat(cvPixFmt);
+ if (pixFmt == QVideoFrameFormat::Format_Invalid)
+ continue;
+
+ auto score = DefaultAVScore;
+ if (cvPixFmt == inputCvPixFormat)
+ score += 100;
+ if (pixFmt == cameraPixelFormat)
+ score += 10;
+ // if (cvPixFmt == kCVPixelFormatType_32BGRA)
+ // score += 1;
+
+ // This flag determines priorities of using ffmpeg hw frames or
+ // the exact camera format match.
+ // Maybe configure more, e.g. by some env var?
+ constexpr bool ShouldSuppressNotSupportedByFFmpeg = false;
+
+ if (!isCVFormatSupported(cvPixFmt))
+ score -= ShouldSuppressNotSupportedByFFmpeg ? 100000 : 5;
+
+ // qDebug() << "----FMT:" << pixFmt << cvPixFmt << score;
+
+ if (score > bestScore) {
+ bestScore = score;
+ bestFormat = cvPixFmtNumber;
+ }
+ }
+
+ if (!bestFormat) {
+ qWarning() << "QCamera::setCameraFormat: availableVideoCVPixelFormatTypes empty";
+ return 0;
+ }
+
+ if (bestScore < DefaultAVScore)
+ qWarning() << "QCamera::setCameraFormat: Cannot find hw FFmpeg supported cv pix format";
+
+ NSDictionary *outputSettings = @{
+ (NSString *)kCVPixelBufferPixelFormatTypeKey : bestFormat,
+ (NSString *)kCVPixelBufferMetalCompatibilityKey : @true
+ };
+ m_videoDataOutput.videoSettings = outputSettings;
+
+ return [bestFormat unsignedIntValue];
+}
+
+QSize QAVFCamera::adjustedResolution() const
+{
+#ifdef Q_OS_MACOS
+ return m_cameraFormat.resolution();
+#else
+ // Check, that we have matching dimesnions.
+ QSize resolution = m_cameraFormat.resolution();
+ AVCaptureConnection *connection = [m_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
+ if (!connection.supportsVideoOrientation)
+ return resolution;
+
+ // Either portrait but actually sizes of landscape, or
+ // landscape with dimensions of portrait - not what
+ // sample delegate will report (it depends on videoOrientation set).
+ const bool isPortraitOrientation = connection.videoOrientation == AVCaptureVideoOrientationPortrait;
+ const bool isPortraitResolution = resolution.height() > resolution.width();
+ if (isPortraitOrientation != isPortraitResolution)
+ resolution.transpose();
+
+ return resolution;
+#endif // Q_OS_MACOS
+}
+
+void QAVFCamera::syncHandleFrame(const QVideoFrame &frame)
+{
+ Q_EMIT newVideoFrame(frame);
+}
+
+std::optional<int> QAVFCamera::ffmpegHWPixelFormat() const
+{
+ return m_hwPixelFormat == AV_PIX_FMT_NONE ? std::optional<int>{} : m_hwPixelFormat;
+}
+
+int QAVFCamera::cameraPixelFormatScore(QVideoFrameFormat::PixelFormat pixelFormat,
+ QVideoFrameFormat::ColorRange colorRange) const
+{
+ auto cvFormat = QAVFHelpers::toCVPixelFormat(pixelFormat, colorRange);
+ return static_cast<int>(isCVFormatSupported(cvFormat));
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qavfcamera_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/qavfcamera_p.h b/src/plugins/multimedia/ffmpeg/qavfcamera_p.h
new file mode 100644
index 000000000..0c88c520c
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qavfcamera_p.h
@@ -0,0 +1,90 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QAVFCAMERA_H
+#define QAVFCAMERA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qavfcamerabase_p.h"
+#include <private/qplatformmediaintegration_p.h>
+#include <private/qvideooutputorientationhandler_p.h>
+#define AVMediaType XAVMediaType
+#include "qffmpeghwaccel_p.h"
+#undef AVMediaType
+
+#include <qfilesystemwatcher.h>
+#include <qsocketnotifier.h>
+#include <qmutex.h>
+
+#include <dispatch/dispatch.h>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureSession);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureDeviceInput);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureVideoDataOutput);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureDevice);
+Q_FORWARD_DECLARE_OBJC_CLASS(QAVFSampleBufferDelegate);
+
+QT_BEGIN_NAMESPACE
+
+class QFFmpegVideoSink;
+
+class QAVFCamera : public QAVFCameraBase
+{
+ Q_OBJECT
+
+public:
+ explicit QAVFCamera(QCamera *parent);
+ ~QAVFCamera();
+
+ bool isActive() const override;
+ void setActive(bool active) override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *) override;
+
+ void setCamera(const QCameraDevice &camera) override;
+ bool setCameraFormat(const QCameraFormat &format) override;
+
+ void syncHandleFrame(const QVideoFrame &frame);
+
+ void deviceOrientationChanged(int angle = -1);
+
+ std::optional<int> ffmpegHWPixelFormat() const override;
+
+ int cameraPixelFormatScore(QVideoFrameFormat::PixelFormat pixelFmt,
+ QVideoFrameFormat::ColorRange colorRange) const override;
+
+private:
+ bool checkCameraPermission();
+ void updateCameraFormat();
+ void updateVideoInput();
+ void attachVideoInputDevice();
+ uint32_t setPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat, uint32_t inputCvPixFormat);
+ QSize adjustedResolution() const;
+
+ AVCaptureDevice *device() const;
+
+ QMediaCaptureSession *m_session = nullptr;
+ AVCaptureSession *m_captureSession = nullptr;
+ AVCaptureDeviceInput *m_videoInput = nullptr;
+ AVCaptureVideoDataOutput *m_videoDataOutput = nullptr;
+ QAVFSampleBufferDelegate *m_sampleBufferDelegate = nullptr;
+ dispatch_queue_t m_delegateQueue;
+ QVideoOutputOrientationHandler m_orientationHandler;
+ AVPixelFormat m_hwPixelFormat = AV_PIX_FMT_NONE;
+};
+
+QT_END_NAMESPACE
+
+
+#endif // QFFMPEGCAMERA_H
+
diff --git a/src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate.mm b/src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate.mm
new file mode 100644
index 000000000..8a4c77a9e
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate.mm
@@ -0,0 +1,224 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qavfsamplebufferdelegate_p.h"
+
+#define AVMediaType XAVMediaType
+
+#include "qffmpeghwaccel_p.h"
+#include "qavfhelpers_p.h"
+#include "qffmpegvideobuffer_p.h"
+#include "private/qvideoframe_p.h"
+
+#undef AVMediaType
+
+#include <optional>
+
+QT_USE_NAMESPACE
+
+static void releaseHwFrame(void * /*opaque*/, uint8_t *data)
+{
+ CVPixelBufferRelease(CVPixelBufferRef(data));
+}
+
+namespace {
+
+class CVImageVideoBuffer : public QAbstractVideoBuffer
+{
+public:
+ CVImageVideoBuffer(CVImageBufferRef imageBuffer) : m_buffer(imageBuffer)
+ {
+ CVPixelBufferRetain(imageBuffer);
+ }
+
+ ~CVImageVideoBuffer()
+ {
+ CVImageVideoBuffer::unmap();
+ CVPixelBufferRelease(m_buffer);
+ }
+
+ CVImageVideoBuffer::MapData map(QtVideo::MapMode mode) override
+ {
+ MapData mapData;
+
+ if (m_mode == QtVideo::MapMode::NotMapped) {
+ CVPixelBufferLockBaseAddress(
+ m_buffer, mode == QtVideo::MapMode::ReadOnly ? kCVPixelBufferLock_ReadOnly : 0);
+ m_mode = mode;
+ }
+
+ mapData.planeCount = CVPixelBufferGetPlaneCount(m_buffer);
+ Q_ASSERT(mapData.planeCount <= 3);
+
+ if (!mapData.planeCount) {
+ // single plane
+ mapData.bytesPerLine[0] = CVPixelBufferGetBytesPerRow(m_buffer);
+ mapData.data[0] = static_cast<uchar *>(CVPixelBufferGetBaseAddress(m_buffer));
+ mapData.dataSize[0] = CVPixelBufferGetDataSize(m_buffer);
+ mapData.planeCount = mapData.data[0] ? 1 : 0;
+ return mapData;
+ }
+
+ // For a bi-planar or tri-planar format we have to set the parameters correctly:
+ for (int i = 0; i < mapData.planeCount; ++i) {
+ mapData.bytesPerLine[i] = CVPixelBufferGetBytesPerRowOfPlane(m_buffer, i);
+ mapData.dataSize[i] = mapData.bytesPerLine[i] * CVPixelBufferGetHeightOfPlane(m_buffer, i);
+ mapData.data[i] = static_cast<uchar *>(CVPixelBufferGetBaseAddressOfPlane(m_buffer, i));
+ }
+
+ return mapData;
+ }
+
+ void unmap() override
+ {
+ if (m_mode != QtVideo::MapMode::NotMapped) {
+ CVPixelBufferUnlockBaseAddress(
+ m_buffer, m_mode == QtVideo::MapMode::ReadOnly ? kCVPixelBufferLock_ReadOnly : 0);
+ m_mode = QtVideo::MapMode::NotMapped;
+ }
+ }
+
+ QVideoFrameFormat format() const override { return {}; }
+
+private:
+ CVImageBufferRef m_buffer;
+ QtVideo::MapMode m_mode = QtVideo::MapMode::NotMapped;
+};
+
+}
+
+// Make sure this is compatible with the layout used in ffmpeg's hwcontext_videotoolbox
+static QFFmpeg::AVFrameUPtr allocHWFrame(AVBufferRef *hwContext, const CVPixelBufferRef &pixbuf)
+{
+ AVHWFramesContext *ctx = (AVHWFramesContext *)hwContext->data;
+ auto frame = QFFmpeg::makeAVFrame();
+ frame->hw_frames_ctx = av_buffer_ref(hwContext);
+ frame->extended_data = frame->data;
+
+ frame->buf[0] = av_buffer_create((uint8_t *)pixbuf, 1, releaseHwFrame, NULL, 0);
+ frame->data[3] = (uint8_t *)pixbuf;
+ CVPixelBufferRetain(pixbuf);
+ frame->width = ctx->width;
+ frame->height = ctx->height;
+ frame->format = AV_PIX_FMT_VIDEOTOOLBOX;
+ if (frame->width != (int)CVPixelBufferGetWidth(pixbuf)
+ || frame->height != (int)CVPixelBufferGetHeight(pixbuf)) {
+
+ // This can happen while changing camera format
+ return nullptr;
+ }
+ return frame;
+}
+
+@implementation QAVFSampleBufferDelegate {
+@private
+ std::function<void(const QVideoFrame &)> frameHandler;
+ AVBufferRef *hwFramesContext;
+ std::unique_ptr<QFFmpeg::HWAccel> m_accel;
+ qint64 startTime;
+ std::optional<qint64> baseTime;
+ qreal frameRate;
+}
+
+static QVideoFrame createHwVideoFrame(QAVFSampleBufferDelegate &delegate,
+ CVImageBufferRef imageBuffer, QVideoFrameFormat format)
+{
+ Q_ASSERT(delegate.baseTime);
+
+ if (!delegate.m_accel)
+ return {};
+
+ auto avFrame = allocHWFrame(delegate.m_accel->hwFramesContextAsBuffer(), imageBuffer);
+ if (!avFrame)
+ return {};
+
+#ifdef USE_SW_FRAMES
+ {
+ auto swFrame = QFFmpeg::makeAVFrame();
+ /* retrieve data from GPU to CPU */
+ const int ret = av_hwframe_transfer_data(swFrame.get(), avFrame.get(), 0);
+ if (ret < 0) {
+ qWarning() << "Error transferring the data to system memory:" << ret;
+ } else {
+ avFrame = std::move(swFrame);
+ }
+ }
+#endif
+
+ avFrame->pts = delegate.startTime - *delegate.baseTime;
+
+ return QVideoFramePrivate::createFrame(std::make_unique<QFFmpegVideoBuffer>(std::move(avFrame)),
+ format);
+}
+
+- (instancetype)initWithFrameHandler:(std::function<void(const QVideoFrame &)>)handler
+{
+ if (!(self = [super init]))
+ return nil;
+
+ Q_ASSERT(handler);
+
+ frameHandler = std::move(handler);
+ hwFramesContext = nullptr;
+ startTime = 0;
+ frameRate = 0.;
+ return self;
+}
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection
+{
+ Q_UNUSED(connection);
+ Q_UNUSED(captureOutput);
+
+ // NB: on iOS captureOutput/connection can be nil (when recording a video -
+ // avfmediaassetwriter).
+
+ CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+
+ if (!imageBuffer) {
+ qWarning() << "Cannot get image buffer from sample buffer";
+ return;
+ }
+
+ const CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+ const qint64 frameTime = time.timescale ? time.value * 1000000 / time.timescale : 0;
+ if (!baseTime) {
+ baseTime = frameTime;
+ startTime = frameTime;
+ }
+
+ QVideoFrameFormat format = QAVFHelpers::videoFormatForImageBuffer(imageBuffer);
+ if (!format.isValid()) {
+ qWarning() << "Cannot get get video format for image buffer"
+ << CVPixelBufferGetWidth(imageBuffer) << 'x'
+ << CVPixelBufferGetHeight(imageBuffer);
+ return;
+ }
+
+ format.setStreamFrameRate(frameRate);
+
+ auto frame = createHwVideoFrame(*self, imageBuffer, format);
+ if (!frame.isValid())
+ frame = QVideoFramePrivate::createFrame(std::make_unique<CVImageVideoBuffer>(imageBuffer),
+ std::move(format));
+
+ frame.setStartTime(startTime - *baseTime);
+ frame.setEndTime(frameTime - *baseTime);
+ startTime = frameTime;
+
+ frameHandler(frame);
+}
+
+- (void)setHWAccel:(std::unique_ptr<QFFmpeg::HWAccel> &&)accel
+{
+ m_accel = std::move(accel);
+}
+
+- (void)setVideoFormatFrameRate:(qreal)rate
+{
+ frameRate = rate;
+}
+
+@end
diff --git a/src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate_p.h b/src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate_p.h
new file mode 100644
index 000000000..5a7e16c71
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qavfsamplebufferdelegate_p.h
@@ -0,0 +1,51 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QAVFSAMPLEBUFFERDELEGATE_P_H
+#define QAVFSAMPLEBUFFERDELEGATE_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#import <AVFoundation/AVFoundation.h>
+#import <CoreVideo/CoreVideo.h>
+
+#include <qtconfigmacros.h>
+#include <qtypes.h>
+
+#include <memory>
+#include <functional>
+
+QT_BEGIN_NAMESPACE
+
+class QAVSampleBufferDelegateFrameHandler;
+class QVideoFrame;
+namespace QFFmpeg {
+class HWAccel;
+}
+
+QT_END_NAMESPACE
+
+@interface QAVFSampleBufferDelegate : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
+
+- (instancetype)initWithFrameHandler:(std::function<void(const QVideoFrame &)>)handler;
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection;
+
+- (void)setHWAccel:(std::unique_ptr<QT_PREPEND_NAMESPACE(QFFmpeg::HWAccel)> &&)accel;
+
+- (void)setVideoFormatFrameRate:(qreal)frameRate;
+
+@end
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/qavfscreencapture.mm b/src/plugins/multimedia/ffmpeg/qavfscreencapture.mm
new file mode 100644
index 000000000..715dea09c
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qavfscreencapture.mm
@@ -0,0 +1,201 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qavfscreencapture_p.h"
+#include "qavfsamplebufferdelegate_p.h"
+#include "qffmpegsurfacecapturegrabber_p.h"
+
+#include <qscreen.h>
+
+#define AVMediaType XAVMediaType
+#include "qffmpeghwaccel_p.h"
+
+extern "C" {
+#include <libavutil/hwcontext_videotoolbox.h>
+#include <libavutil/hwcontext.h>
+}
+#undef AVMediaType
+
+#include <AppKit/NSScreen.h>
+
+#include <dispatch/dispatch.h>
+
+namespace {
+
+const auto DefaultCVPixelFormat = kCVPixelFormatType_32BGRA;
+
+CGDirectDisplayID findDisplayByName(const QString &name)
+{
+ for (NSScreen *screen in NSScreen.screens) {
+ if (name == QString::fromNSString(screen.localizedName))
+ return [screen.deviceDescription[@"NSScreenNumber"] unsignedIntValue];
+ }
+ return kCGNullDirectDisplay;
+}
+}
+
+QT_BEGIN_NAMESPACE
+
+QAVFScreenCapture::QAVFScreenCapture() : QPlatformSurfaceCapture(ScreenSource{})
+{
+ CGRequestScreenCaptureAccess();
+}
+
+QAVFScreenCapture::~QAVFScreenCapture()
+{
+ resetCapture();
+}
+
+bool QAVFScreenCapture::setActiveInternal(bool active)
+{
+ if (active) {
+ if (!CGPreflightScreenCaptureAccess()) {
+ updateError(CaptureFailed, QLatin1String("Permissions denied"));
+ return false;
+ }
+
+ auto screen = source<ScreenSource>();
+
+ if (!checkScreenWithError(screen))
+ return false;
+
+ return initScreenCapture(screen);
+ } else {
+ resetCapture();
+ }
+
+ return true;
+}
+
+void QAVFScreenCapture::onNewFrame(const QVideoFrame &frame)
+{
+ // Since writing of the format is supposed to be only from one thread,
+ // the read-only comparison without a mutex is thread-safe
+ if (!m_format || m_format != frame.surfaceFormat()) {
+ QMutexLocker<QMutex> locker(&m_formatMutex);
+
+ m_format = frame.surfaceFormat();
+
+ locker.unlock();
+
+ m_waitForFormat.notify_one();
+ }
+
+ emit newVideoFrame(frame);
+}
+
+QVideoFrameFormat QAVFScreenCapture::frameFormat() const
+{
+ if (!m_grabber)
+ return {};
+
+ QMutexLocker<QMutex> locker(&m_formatMutex);
+ while (!m_format)
+ m_waitForFormat.wait(&m_formatMutex);
+ return *m_format;
+}
+
+std::optional<int> QAVFScreenCapture::ffmpegHWPixelFormat() const
+{
+ return AV_PIX_FMT_VIDEOTOOLBOX;
+}
+
+class QAVFScreenCapture::Grabber
+{
+public:
+ Grabber(QAVFScreenCapture &capture, QScreen *screen, CGDirectDisplayID screenID,
+ std::unique_ptr<QFFmpeg::HWAccel> hwAccel)
+ {
+ m_captureSession = [[AVCaptureSession alloc] init];
+
+ m_sampleBufferDelegate = [[QAVFSampleBufferDelegate alloc]
+ initWithFrameHandler:[&capture](const QVideoFrame &frame) {
+ capture.onNewFrame(frame);
+ }];
+
+ m_videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
+
+ NSDictionary *videoSettings = [NSDictionary
+ dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:DefaultCVPixelFormat],
+ kCVPixelBufferPixelFormatTypeKey, nil];
+
+ [m_videoDataOutput setVideoSettings:videoSettings];
+ [m_videoDataOutput setAlwaysDiscardsLateVideoFrames:true];
+
+ // Configure video output
+ m_dispatchQueue = dispatch_queue_create("vf_queue", nullptr);
+ [m_videoDataOutput setSampleBufferDelegate:m_sampleBufferDelegate queue:m_dispatchQueue];
+
+ [m_captureSession addOutput:m_videoDataOutput];
+
+ [m_sampleBufferDelegate setHWAccel:std::move(hwAccel)];
+
+ const auto frameRate = std::min(screen->refreshRate(), MaxScreenCaptureFrameRate);
+ [m_sampleBufferDelegate setVideoFormatFrameRate:frameRate];
+
+ m_screenInput = [[AVCaptureScreenInput alloc] initWithDisplayID:screenID];
+ [m_screenInput setMinFrameDuration:CMTimeMake(1, static_cast<int32_t>(frameRate))];
+ [m_captureSession addInput:m_screenInput];
+
+ [m_captureSession startRunning];
+ }
+
+ ~Grabber()
+ {
+ if (m_captureSession)
+ [m_captureSession stopRunning];
+
+ if (m_dispatchQueue)
+ dispatch_release(m_dispatchQueue);
+
+ [m_sampleBufferDelegate release];
+ [m_screenInput release];
+ [m_videoDataOutput release];
+ [m_captureSession release];
+ }
+
+private:
+ AVCaptureSession *m_captureSession = nullptr;
+ AVCaptureScreenInput *m_screenInput = nullptr;
+ AVCaptureVideoDataOutput *m_videoDataOutput = nullptr;
+ QAVFSampleBufferDelegate *m_sampleBufferDelegate = nullptr;
+ dispatch_queue_t m_dispatchQueue = nullptr;
+};
+
+bool QAVFScreenCapture::initScreenCapture(QScreen *screen)
+{
+ const auto screenID = findDisplayByName(screen->name());
+
+ if (screenID == kCGNullDirectDisplay) {
+ updateError(InternalError, QLatin1String("Screen exists but couldn't been found by name"));
+ return false;
+ }
+
+ auto hwAccel = QFFmpeg::HWAccel::create(AV_HWDEVICE_TYPE_VIDEOTOOLBOX);
+
+ if (!hwAccel) {
+ updateError(CaptureFailed, QLatin1String("Couldn't create videotoolbox hw acceleration"));
+ return false;
+ }
+
+ hwAccel->createFramesContext(av_map_videotoolbox_format_to_pixfmt(DefaultCVPixelFormat),
+ screen->size() * screen->devicePixelRatio());
+
+ if (!hwAccel->hwFramesContextAsBuffer()) {
+ updateError(CaptureFailed, QLatin1String("Couldn't create hw frames context"));
+ return false;
+ }
+
+ m_grabber = std::make_unique<Grabber>(*this, screen, screenID, std::move(hwAccel));
+ return true;
+}
+
+void QAVFScreenCapture::resetCapture()
+{
+ m_grabber.reset();
+ m_format = {};
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qavfscreencapture_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/qavfscreencapture_p.h b/src/plugins/multimedia/ffmpeg/qavfscreencapture_p.h
new file mode 100644
index 000000000..b95aabcf3
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qavfscreencapture_p.h
@@ -0,0 +1,60 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QAVFSCREENCAPTURE_H
+#define QAVFSCREENCAPTURE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "private/qplatformsurfacecapture_p.h"
+#include <qmutex.h>
+#include <qwaitcondition.h>
+
+QT_BEGIN_NAMESPACE
+
+class QFFmpegVideoSink;
+
+class QAVFScreenCapture : public QPlatformSurfaceCapture
+{
+ Q_OBJECT
+
+ class Grabber;
+
+public:
+ explicit QAVFScreenCapture();
+ ~QAVFScreenCapture() override;
+
+ QVideoFrameFormat frameFormat() const override;
+
+ std::optional<int> ffmpegHWPixelFormat() const override;
+
+protected:
+ bool setActiveInternal(bool active) override;
+
+private:
+ void onNewFrame(const QVideoFrame &frame);
+
+ bool initScreenCapture(QScreen *screen);
+
+ void resetCapture();
+
+private:
+ std::optional<QVideoFrameFormat> m_format;
+ mutable QMutex m_formatMutex;
+ mutable QWaitCondition m_waitForFormat;
+
+ std::unique_ptr<Grabber> m_grabber;
+};
+
+QT_END_NAMESPACE
+
+#endif // QAVFSCREENCAPTURE_H
diff --git a/src/plugins/multimedia/ffmpeg/qcgcapturablewindows.mm b/src/plugins/multimedia/ffmpeg/qcgcapturablewindows.mm
new file mode 100644
index 000000000..eb2035208
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qcgcapturablewindows.mm
@@ -0,0 +1,48 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qcgcapturablewindows_p.h"
+#include "private/qcapturablewindow_p.h"
+#include "QtCore/private/qcore_mac_p.h"
+
+#include <AppKit/NSWindow.h>
+
+QT_BEGIN_NAMESPACE
+
+QList<QCapturableWindow> QCGCapturableWindows::windows() const
+{
+ QList<QCapturableWindow> result;
+ QCFType<CFArrayRef> windowList(
+ CGWindowListCopyWindowInfo(kCGWindowListOptionOnScreenOnly, kCGNullWindowID));
+
+ // Iterate through the window dictionaries
+ CFIndex count = CFArrayGetCount(windowList);
+ for (CFIndex i = 0; i < count; ++i) {
+ auto windowInfo = (CFDictionaryRef)CFArrayGetValueAtIndex(windowList, i);
+ auto windowNumber = (CFNumberRef)CFDictionaryGetValue(windowInfo, kCGWindowNumber);
+ auto windowName = (CFStringRef)CFDictionaryGetValue(windowInfo, kCGWindowName);
+
+ CGWindowID windowId = 0;
+ static_assert(sizeof(windowId) == 4,
+ "CGWindowID size is not compatible with kCFNumberSInt32Type");
+ CFNumberGetValue(windowNumber, kCFNumberSInt32Type, &windowId);
+
+ auto windowData = std::make_unique<QCapturableWindowPrivate>();
+ windowData->id = static_cast<QCapturableWindowPrivate::Id>(windowId);
+ if (windowName)
+ windowData->description = QString::fromCFString(windowName);
+
+ result.push_back(windowData.release()->create());
+ }
+
+ return result;
+}
+
+bool QCGCapturableWindows::isWindowValid(const QCapturableWindowPrivate &window) const
+{
+ QCFType<CFArrayRef> windowList(
+ CGWindowListCreate(kCGWindowListOptionIncludingWindow, window.id));
+ return CFArrayGetCount(windowList) > 0;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qcgcapturablewindows_p.h b/src/plugins/multimedia/ffmpeg/qcgcapturablewindows_p.h
new file mode 100644
index 000000000..6f779ae0d
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qcgcapturablewindows_p.h
@@ -0,0 +1,32 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QCGCAPTURABLEWINDOWS_P_H
+#define QCGCAPTURABLEWINDOWS_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "private/qplatformcapturablewindows_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QCGCapturableWindows : public QPlatformCapturableWindows
+{
+public:
+ QList<QCapturableWindow> windows() const override;
+
+ bool isWindowValid(const QCapturableWindowPrivate &window) const override;
+};
+
+QT_END_NAMESPACE
+
+#endif // QCGCAPTURABLEWINDOWS_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qcgwindowcapture.mm b/src/plugins/multimedia/ffmpeg/qcgwindowcapture.mm
new file mode 100644
index 000000000..6fa2f620f
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qcgwindowcapture.mm
@@ -0,0 +1,203 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qabstractvideobuffer.h"
+
+#include "qcgwindowcapture_p.h"
+#include "private/qcapturablewindow_p.h"
+#include "qffmpegsurfacecapturegrabber_p.h"
+#include "private/qvideoframe_p.h"
+
+#include "qscreen.h"
+#include "qguiapplication.h"
+#include <qmutex.h>
+#include <qwaitcondition.h>
+
+#include <ApplicationServices/ApplicationServices.h>
+#include <IOKit/graphics/IOGraphicsLib.h>
+#include <AppKit/NSScreen.h>
+#include <AppKit/NSApplication.h>
+#include <AppKit/NSWindow.h>
+
+namespace {
+
+std::optional<qreal> frameRateForWindow(CGWindowID /*wid*/)
+{
+ // TODO: detect the frame rate
+ // if (window && window.screen) {
+ // CGDirectDisplayID displayID = [window.screen.deviceDescription[@"NSScreenNumber"]
+ // unsignedIntValue]; const auto displayRefreshRate =
+ // CGDisplayModeGetRefreshRate(CGDisplayCopyDisplayMode(displayID)); if (displayRefreshRate
+ // > 0 && displayRefreshRate < frameRate) frameRate = displayRefreshRate;
+ // }
+
+ return {};
+}
+
+}
+
+QT_BEGIN_NAMESPACE
+
+class QCGImageVideoBuffer : public QAbstractVideoBuffer
+{
+public:
+ QCGImageVideoBuffer(CGImageRef image)
+ {
+ auto provider = CGImageGetDataProvider(image);
+ m_data = CGDataProviderCopyData(provider);
+ m_bytesPerLine = CGImageGetBytesPerRow(image);
+ }
+
+ ~QCGImageVideoBuffer() override { CFRelease(m_data); }
+
+ MapData map(QtVideo::MapMode mode) override
+ {
+ MapData mapData;
+ if (m_mapMode == QtVideo::MapMode::NotMapped) {
+ m_mapMode = mode;
+
+ mapData.planeCount = 1;
+ mapData.bytesPerLine[0] = static_cast<int>(m_bytesPerLine);
+ mapData.data[0] = (uchar *)CFDataGetBytePtr(m_data);
+ mapData.dataSize[0] = static_cast<int>(CFDataGetLength(m_data));
+ }
+
+ return mapData;
+ }
+
+ void unmap() override { m_mapMode = QtVideo::MapMode::NotMapped; }
+
+ QVideoFrameFormat format() const override { return {}; }
+
+private:
+ QtVideo::MapMode m_mapMode = QtVideo::MapMode::NotMapped;
+ CFDataRef m_data;
+ size_t m_bytesPerLine = 0;
+};
+
+class QCGWindowCapture::Grabber : public QFFmpegSurfaceCaptureGrabber
+{
+public:
+ Grabber(QCGWindowCapture &capture, CGWindowID wid) : m_capture(capture), m_wid(wid)
+ {
+ addFrameCallback(*this, &Grabber::onNewFrame);
+ connect(this, &Grabber::errorUpdated, &capture, &QCGWindowCapture::updateError);
+
+ if (auto screen = QGuiApplication::primaryScreen())
+ setFrameRate(screen->refreshRate());
+
+ start();
+ }
+
+ ~Grabber() override { stop(); }
+
+ QVideoFrameFormat frameFormat() const
+ {
+ QMutexLocker<QMutex> locker(&m_formatMutex);
+ while (!m_format)
+ m_waitForFormat.wait(&m_formatMutex);
+ return *m_format;
+ }
+
+protected:
+ QVideoFrame grabFrame() override
+ {
+ if (auto rate = frameRateForWindow(m_wid))
+ setFrameRate(*rate);
+
+ auto imageRef = CGWindowListCreateImage(CGRectNull, kCGWindowListOptionIncludingWindow,
+ m_wid, kCGWindowImageBoundsIgnoreFraming);
+ if (!imageRef) {
+ updateError(QPlatformSurfaceCapture::CaptureFailed,
+ QLatin1String("Cannot create image by window"));
+ return {};
+ }
+
+ auto imageDeleter = qScopeGuard([imageRef]() { CGImageRelease(imageRef); });
+
+ if (CGImageGetBitsPerPixel(imageRef) != 32
+ || CGImageGetPixelFormatInfo(imageRef) != kCGImagePixelFormatPacked
+ || CGImageGetByteOrderInfo(imageRef) != kCGImageByteOrder32Little) {
+ qWarning() << "Unexpected image format. PixelFormatInfo:"
+ << CGImageGetPixelFormatInfo(imageRef)
+ << "BitsPerPixel:" << CGImageGetBitsPerPixel(imageRef) << "AlphaInfo"
+ << CGImageGetAlphaInfo(imageRef)
+ << "ByteOrderInfo:" << CGImageGetByteOrderInfo(imageRef);
+
+ updateError(QPlatformSurfaceCapture::CapturingNotSupported,
+ QLatin1String("Not supported pixel format"));
+ return {};
+ }
+
+ QVideoFrameFormat format(QSize(CGImageGetWidth(imageRef), CGImageGetHeight(imageRef)),
+ QVideoFrameFormat::Format_BGRA8888);
+ format.setStreamFrameRate(frameRate());
+
+ return QVideoFramePrivate::createFrame(std::make_unique<QCGImageVideoBuffer>(imageRef),
+ std::move(format));
+ }
+
+ void onNewFrame(QVideoFrame frame)
+ {
+ // Since writing of the format is supposed to be only from one thread,
+ // the read-only comparison without a mutex is thread-safe
+ if (!m_format || m_format != frame.surfaceFormat()) {
+ QMutexLocker<QMutex> locker(&m_formatMutex);
+
+ m_format = frame.surfaceFormat();
+
+ locker.unlock();
+
+ m_waitForFormat.notify_one();
+ }
+
+ emit m_capture.newVideoFrame(frame);
+ }
+
+private:
+ QCGWindowCapture &m_capture;
+ std::optional<QVideoFrameFormat> m_format;
+ mutable QMutex m_formatMutex;
+ mutable QWaitCondition m_waitForFormat;
+ CGWindowID m_wid;
+};
+
+QCGWindowCapture::QCGWindowCapture() : QPlatformSurfaceCapture(WindowSource{})
+{
+ CGRequestScreenCaptureAccess();
+}
+
+QCGWindowCapture::~QCGWindowCapture() = default;
+
+bool QCGWindowCapture::setActiveInternal(bool active)
+{
+ if (active) {
+ if (!CGPreflightScreenCaptureAccess()) {
+ updateError(QPlatformSurfaceCapture::CaptureFailed,
+ QLatin1String("Permissions denied"));
+ return false;
+ }
+
+ auto window = source<WindowSource>();
+
+ auto handle = QCapturableWindowPrivate::handle(window);
+ if (!handle || !handle->id)
+ updateError(QPlatformSurfaceCapture::NotFound, QLatin1String("Invalid window"));
+ else
+ m_grabber = std::make_unique<Grabber>(*this, handle->id);
+
+ } else {
+ m_grabber.reset();
+ }
+
+ return active == static_cast<bool>(m_grabber);
+}
+
+QVideoFrameFormat QCGWindowCapture::frameFormat() const
+{
+ return m_grabber ? m_grabber->frameFormat() : QVideoFrameFormat();
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qcgwindowcapture_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/qcgwindowcapture_p.h b/src/plugins/multimedia/ffmpeg/qcgwindowcapture_p.h
new file mode 100644
index 000000000..796c01ab3
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qcgwindowcapture_p.h
@@ -0,0 +1,43 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QCGWINDOWCAPTURE_H
+#define QCGWINDOWCAPTURE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "private/qplatformsurfacecapture_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QCGWindowCapture : public QPlatformSurfaceCapture
+{
+ Q_OBJECT
+
+ class Grabber;
+
+public:
+ explicit QCGWindowCapture();
+ ~QCGWindowCapture() override;
+
+ QVideoFrameFormat frameFormat() const override;
+
+protected:
+ bool setActiveInternal(bool active) override;
+
+private:
+ std::unique_ptr<Grabber> m_grabber;
+};
+
+QT_END_NAMESPACE
+
+#endif // QCGWINDOWCAPTURE_H
diff --git a/src/plugins/multimedia/ffmpeg/qeglfsscreencapture.cpp b/src/plugins/multimedia/ffmpeg/qeglfsscreencapture.cpp
new file mode 100644
index 000000000..871cafd4f
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qeglfsscreencapture.cpp
@@ -0,0 +1,180 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qeglfsscreencapture_p.h"
+
+#include "qffmpegsurfacecapturegrabber_p.h"
+#include "qguiapplication.h"
+#include "qopenglvideobuffer_p.h"
+#include "private/qimagevideobuffer_p.h"
+#include "private/qvideoframe_p.h"
+
+#include <QtOpenGL/private/qopenglcompositor_p.h>
+#include <QtOpenGL/private/qopenglframebufferobject_p.h>
+
+#include <QtQuick/qquickwindow.h>
+
+QT_BEGIN_NAMESPACE
+
+class QEglfsScreenCapture::Grabber : public QFFmpegSurfaceCaptureGrabber
+{
+public:
+ Grabber(QEglfsScreenCapture &screenCapture, QScreen *screen)
+ : QFFmpegSurfaceCaptureGrabber(QFFmpegSurfaceCaptureGrabber::UseCurrentThread)
+ {
+ addFrameCallback(screenCapture, &QEglfsScreenCapture::newVideoFrame);
+ connect(this, &Grabber::errorUpdated, &screenCapture, &QEglfsScreenCapture::updateError);
+ // Limit frame rate to 30 fps for performance reasons,
+ // to be reviewed at the next optimization round
+ setFrameRate(std::min(screen->refreshRate(), 30.0));
+ }
+
+ ~Grabber() override { stop(); }
+
+ QVideoFrameFormat format() { return m_format; }
+
+protected:
+ QVideoFrame grabFrame() override
+ {
+ auto nativeSize = QOpenGLCompositor::instance()->nativeTargetGeometry().size();
+ auto fbo = std::make_unique<QOpenGLFramebufferObject>(nativeSize);
+
+ if (!QOpenGLCompositor::instance()->grabToFrameBufferObject(
+ fbo.get(), QOpenGLCompositor::NotFlipped)) {
+ updateError(Error::InternalError, QLatin1String("Couldn't grab to framebuffer object"));
+ return {};
+ }
+
+ if (!fbo->isValid()) {
+ updateError(Error::InternalError, QLatin1String("Framebuffer object invalid"));
+ return {};
+ }
+
+ auto videoBuffer = std::make_unique<QOpenGLVideoBuffer>(std::move(fbo));
+
+ if (!m_format.isValid()) {
+ auto image = videoBuffer->ensureImageBuffer().underlyingImage();
+ m_format = { image.size(), QVideoFrameFormat::pixelFormatFromImageFormat(image.format()) };
+ m_format.setStreamFrameRate(frameRate());
+ }
+
+ return QVideoFramePrivate::createFrame(std::move(videoBuffer), m_format);
+ }
+
+ QVideoFrameFormat m_format;
+};
+
+class QEglfsScreenCapture::QuickGrabber : public Grabber
+{
+public:
+ QuickGrabber(QEglfsScreenCapture &screenCapture, QScreen *screen, QQuickWindow *quickWindow)
+ : Grabber(screenCapture, screen), m_quickWindow(quickWindow)
+ {
+ Q_ASSERT(m_quickWindow);
+ }
+
+protected:
+ QVideoFrame grabFrame() override
+ {
+ if (!m_quickWindow) {
+ updateError(Error::InternalError, QLatin1String("Window deleted"));
+ return {};
+ }
+
+ QImage image = m_quickWindow->grabWindow();
+
+ if (image.isNull()) {
+ updateError(Error::InternalError, QLatin1String("Image invalid"));
+ return {};
+ }
+
+ if (!m_format.isValid()) {
+ m_format = { image.size(),
+ QVideoFrameFormat::pixelFormatFromImageFormat(image.format()) };
+ m_format.setStreamFrameRate(frameRate());
+ }
+
+ return QVideoFramePrivate::createFrame(
+ std::make_unique<QImageVideoBuffer>(std::move(image)), m_format);
+ }
+
+private:
+ QPointer<QQuickWindow> m_quickWindow;
+};
+
+QEglfsScreenCapture::QEglfsScreenCapture() : QPlatformSurfaceCapture(ScreenSource{}) { }
+
+QEglfsScreenCapture::~QEglfsScreenCapture() = default;
+
+QVideoFrameFormat QEglfsScreenCapture::frameFormat() const
+{
+ return m_grabber ? m_grabber->format() : QVideoFrameFormat();
+}
+
+bool QEglfsScreenCapture::setActiveInternal(bool active)
+{
+ if (static_cast<bool>(m_grabber) == active)
+ return true;
+
+ if (m_grabber)
+ m_grabber.reset();
+
+ if (!active)
+ return true;
+
+ m_grabber = createGrabber();
+
+ if (!m_grabber) {
+ // TODO: This could mean that the UI is not started yet, so we should wait and try again,
+ // and then give error if still not started. Might not be possible here.
+ return false;
+ }
+
+ m_grabber->start();
+ return true;
+}
+
+bool QEglfsScreenCapture::isSupported()
+{
+ return QGuiApplication::platformName() == QLatin1String("eglfs");
+}
+
+std::unique_ptr<QEglfsScreenCapture::Grabber> QEglfsScreenCapture::createGrabber()
+{
+ auto screen = source<ScreenSource>();
+ if (!checkScreenWithError(screen))
+ return nullptr;
+
+ QOpenGLCompositor *compositor = QOpenGLCompositor::instance();
+
+ if (compositor->context()) {
+ // Create OpenGL grabber
+ if (!compositor->targetWindow()) {
+ updateError(Error::CaptureFailed,
+ QLatin1String("Target window is not set for OpenGL compositor"));
+ return nullptr;
+ }
+
+ return std::make_unique<Grabber>(*this, screen);
+ }
+
+ // Check for QQuickWindow
+ auto windows = QGuiApplication::topLevelWindows();
+ auto it = std::find_if(windows.begin(), windows.end(), [screen](QWindow *window) {
+ auto quickWindow = qobject_cast<QQuickWindow *>(window);
+ if (!quickWindow)
+ return false;
+
+ return quickWindow->screen() == screen;
+ });
+
+ if (it != windows.end()) {
+ // Create grabber that calls QQuickWindow::grabWindow
+ return std::make_unique<QuickGrabber>(*this, screen, qobject_cast<QQuickWindow *>(*it));
+ }
+
+ updateError(Error::CaptureFailed, QLatin1String("No existing OpenGL context or QQuickWindow"));
+ return nullptr;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qeglfsscreencapture_p.h b/src/plugins/multimedia/ffmpeg/qeglfsscreencapture_p.h
new file mode 100644
index 000000000..840cdbeb0
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qeglfsscreencapture_p.h
@@ -0,0 +1,48 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QEGLFSSCREENCAPTURE_H
+#define QEGLFSSCREENCAPTURE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformsurfacecapture_p.h>
+#include <memory>
+
+QT_BEGIN_NAMESPACE
+
+class QEglfsScreenCapture : public QPlatformSurfaceCapture
+{
+public:
+ QEglfsScreenCapture();
+
+ ~QEglfsScreenCapture() override;
+
+ QVideoFrameFormat frameFormat() const override;
+
+ static bool isSupported();
+
+private:
+ bool setActiveInternal(bool active) override;
+
+private:
+ class Grabber;
+ class QuickGrabber;
+
+ std::unique_ptr<Grabber> createGrabber();
+
+ std::unique_ptr<Grabber> m_grabber;
+};
+
+QT_END_NAMESPACE
+
+#endif // QEGLFSSCREENCAPTURE_H
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeg.cpp b/src/plugins/multimedia/ffmpeg/qffmpeg.cpp
new file mode 100644
index 000000000..ce7dfc682
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpeg.cpp
@@ -0,0 +1,645 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpeg_p.h"
+
+#include <qdebug.h>
+#include <qloggingcategory.h>
+#include <qffmpeghwaccel_p.h> // TODO: probably decompose HWAccel and get rid of the header in the base utils
+
+#include <algorithm>
+#include <vector>
+#include <array>
+#include <optional>
+#include <unordered_set>
+
+extern "C" {
+#include <libavutil/pixdesc.h>
+#include <libavutil/samplefmt.h>
+
+#ifdef Q_OS_DARWIN
+#include <libavutil/hwcontext_videotoolbox.h>
+#endif
+}
+
+#ifdef Q_OS_ANDROID
+#include <QtCore/qjniobject.h>
+#include <QtCore/qjniarray.h>
+#include <QtCore/qjnitypes.h>
+#endif
+
+QT_BEGIN_NAMESPACE
+
+#ifdef Q_OS_ANDROID
+Q_DECLARE_JNI_CLASS(QtVideoDeviceManager,
+ "org/qtproject/qt/android/multimedia/QtVideoDeviceManager");
+Q_DECLARE_JNI_CLASS(String, "java/lang/String");
+#endif
+
+static Q_LOGGING_CATEGORY(qLcFFmpegUtils, "qt.multimedia.ffmpeg.utils");
+
+namespace QFFmpeg {
+
+namespace {
+
+enum CodecStorageType {
+ ENCODERS,
+ DECODERS,
+
+ // TODO: maybe split sw/hw codecs
+
+ CODEC_STORAGE_TYPE_COUNT
+};
+
+using CodecsStorage = std::vector<const AVCodec *>;
+
+struct CodecsComparator
+{
+ bool operator()(const AVCodec *a, const AVCodec *b) const
+ {
+ return a->id < b->id
+ || (a->id == b->id && isAVCodecExperimental(a) < isAVCodecExperimental(b));
+ }
+
+ bool operator()(const AVCodec *a, AVCodecID id) const { return a->id < id; }
+};
+
+template<typename FlagNames>
+QString flagsToString(int flags, const FlagNames &flagNames)
+{
+ QString result;
+ int leftover = flags;
+ for (const auto &flagAndName : flagNames)
+ if ((flags & flagAndName.first) != 0) {
+ leftover &= ~flagAndName.first;
+ if (!result.isEmpty())
+ result += ", ";
+ result += flagAndName.second;
+ }
+
+ if (leftover) {
+ if (!result.isEmpty())
+ result += ", ";
+ result += QString::number(leftover, 16);
+ }
+ return result;
+}
+
+void dumpCodecInfo(const AVCodec *codec)
+{
+ using FlagNames = std::initializer_list<std::pair<int, const char *>>;
+ const auto mediaType = codec->type == AVMEDIA_TYPE_VIDEO ? "video"
+ : codec->type == AVMEDIA_TYPE_AUDIO ? "audio"
+ : codec->type == AVMEDIA_TYPE_SUBTITLE ? "subtitle"
+ : "other_type";
+
+ const auto type = av_codec_is_encoder(codec)
+ ? av_codec_is_decoder(codec) ? "encoder/decoder:" : "encoder:"
+ : "decoder:";
+
+ static const FlagNames capabilitiesNames = {
+ { AV_CODEC_CAP_DRAW_HORIZ_BAND, "DRAW_HORIZ_BAND" },
+ { AV_CODEC_CAP_DR1, "DRAW_HORIZ_DR1" },
+ { AV_CODEC_CAP_DELAY, "DELAY" },
+ { AV_CODEC_CAP_SMALL_LAST_FRAME, "SMALL_LAST_FRAME" },
+ { AV_CODEC_CAP_SUBFRAMES, "SUBFRAMES" },
+ { AV_CODEC_CAP_EXPERIMENTAL, "EXPERIMENTAL" },
+ { AV_CODEC_CAP_CHANNEL_CONF, "CHANNEL_CONF" },
+ { AV_CODEC_CAP_FRAME_THREADS, "FRAME_THREADS" },
+ { AV_CODEC_CAP_SLICE_THREADS, "SLICE_THREADS" },
+ { AV_CODEC_CAP_PARAM_CHANGE, "PARAM_CHANGE" },
+#ifdef AV_CODEC_CAP_OTHER_THREADS
+ { AV_CODEC_CAP_OTHER_THREADS, "OTHER_THREADS" },
+#endif
+ { AV_CODEC_CAP_VARIABLE_FRAME_SIZE, "VARIABLE_FRAME_SIZE" },
+ { AV_CODEC_CAP_AVOID_PROBING, "AVOID_PROBING" },
+ { AV_CODEC_CAP_HARDWARE, "HARDWARE" },
+ { AV_CODEC_CAP_HYBRID, "HYBRID" },
+ { AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE, "ENCODER_REORDERED_OPAQUE" },
+#ifdef AV_CODEC_CAP_ENCODER_FLUSH
+ { AV_CODEC_CAP_ENCODER_FLUSH, "ENCODER_FLUSH" },
+#endif
+ };
+
+ qCDebug(qLcFFmpegUtils) << mediaType << type << codec->name << "id:" << codec->id
+ << "capabilities:"
+ << flagsToString(codec->capabilities, capabilitiesNames);
+
+ if (codec->pix_fmts) {
+ static const FlagNames flagNames = {
+ { AV_PIX_FMT_FLAG_BE, "BE" },
+ { AV_PIX_FMT_FLAG_PAL, "PAL" },
+ { AV_PIX_FMT_FLAG_BITSTREAM, "BITSTREAM" },
+ { AV_PIX_FMT_FLAG_HWACCEL, "HWACCEL" },
+ { AV_PIX_FMT_FLAG_PLANAR, "PLANAR" },
+ { AV_PIX_FMT_FLAG_RGB, "RGB" },
+ { AV_PIX_FMT_FLAG_ALPHA, "ALPHA" },
+ { AV_PIX_FMT_FLAG_BAYER, "BAYER" },
+ { AV_PIX_FMT_FLAG_FLOAT, "FLOAT" },
+ };
+
+ qCDebug(qLcFFmpegUtils) << " pix_fmts:";
+ for (auto f = codec->pix_fmts; *f != AV_PIX_FMT_NONE; ++f) {
+ auto desc = av_pix_fmt_desc_get(*f);
+ qCDebug(qLcFFmpegUtils)
+ << " id:" << *f << desc->name << "depth:" << desc->comp[0].depth
+ << "flags:" << flagsToString(desc->flags, flagNames);
+ }
+ } else if (codec->type == AVMEDIA_TYPE_VIDEO) {
+ qCDebug(qLcFFmpegUtils) << " pix_fmts: null";
+ }
+
+ if (codec->sample_fmts) {
+ qCDebug(qLcFFmpegUtils) << " sample_fmts:";
+ for (auto f = codec->sample_fmts; *f != AV_SAMPLE_FMT_NONE; ++f) {
+ const auto name = av_get_sample_fmt_name(*f);
+ qCDebug(qLcFFmpegUtils) << " id:" << *f << (name ? name : "unknown")
+ << "bytes_per_sample:" << av_get_bytes_per_sample(*f)
+ << "is_planar:" << av_sample_fmt_is_planar(*f);
+ }
+ } else if (codec->type == AVMEDIA_TYPE_AUDIO) {
+ qCDebug(qLcFFmpegUtils) << " sample_fmts: null";
+ }
+
+ if (avcodec_get_hw_config(codec, 0)) {
+ static const FlagNames hwConfigMethodNames = {
+ { AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX, "HW_DEVICE_CTX" },
+ { AV_CODEC_HW_CONFIG_METHOD_HW_FRAMES_CTX, "HW_FRAMES_CTX" },
+ { AV_CODEC_HW_CONFIG_METHOD_INTERNAL, "INTERNAL" },
+ { AV_CODEC_HW_CONFIG_METHOD_AD_HOC, "AD_HOC" }
+ };
+
+ qCDebug(qLcFFmpegUtils) << " hw config:";
+ for (int index = 0; auto config = avcodec_get_hw_config(codec, index); ++index) {
+ const auto pixFmtForDevice = pixelFormatForHwDevice(config->device_type);
+ auto pixFmtDesc = av_pix_fmt_desc_get(config->pix_fmt);
+ auto pixFmtForDeviceDesc = av_pix_fmt_desc_get(pixFmtForDevice);
+ qCDebug(qLcFFmpegUtils)
+ << " device_type:" << config->device_type << "pix_fmt:" << config->pix_fmt
+ << (pixFmtDesc ? pixFmtDesc->name : "unknown")
+ << "pixelFormatForHwDevice:" << pixelFormatForHwDevice(config->device_type)
+ << (pixFmtForDeviceDesc ? pixFmtForDeviceDesc->name : "unknown")
+ << "hw_config_methods:" << flagsToString(config->methods, hwConfigMethodNames);
+ }
+ }
+}
+
+bool isCodecValid(const AVCodec *codec, const std::vector<AVHWDeviceType> &availableHwDeviceTypes,
+ const std::optional<std::unordered_set<AVCodecID>> &codecAvailableOnDevice)
+{
+ if (codec->type != AVMEDIA_TYPE_VIDEO)
+ return true;
+
+ if (!codec->pix_fmts) {
+#if defined(Q_OS_LINUX) || defined(Q_OS_ANDROID)
+ // Disable V4L2 M2M codecs for encoding for now,
+ // TODO: Investigate on how to get them working
+ if (std::strstr(codec->name, "_v4l2m2m") && av_codec_is_encoder(codec))
+ return false;
+
+ // MediaCodec in Android is used for hardware-accelerated media processing. That is why
+ // before marking it as valid, we need to make sure if it is available on current device.
+ if (std::strstr(codec->name, "_mediacodec")
+ && (codec->capabilities & AV_CODEC_CAP_HARDWARE)
+ && codecAvailableOnDevice && codecAvailableOnDevice->count(codec->id) == 0)
+ return false;
+#endif
+
+ return true; // To be investigated. This happens for RAW_VIDEO, that is supposed to be OK,
+ // and with v4l2m2m codecs, that is suspicious.
+ }
+
+ if (findAVPixelFormat(codec, &isHwPixelFormat) == AV_PIX_FMT_NONE)
+ return true;
+
+ if ((codec->capabilities & AV_CODEC_CAP_HARDWARE) == 0)
+ return true;
+
+ auto checkDeviceType = [codec](AVHWDeviceType type) {
+ return isAVFormatSupported(codec, pixelFormatForHwDevice(type));
+ };
+
+ if (codecAvailableOnDevice && codecAvailableOnDevice->count(codec->id) == 0)
+ return false;
+
+ return std::any_of(availableHwDeviceTypes.begin(), availableHwDeviceTypes.end(),
+ checkDeviceType);
+}
+
+std::optional<std::unordered_set<AVCodecID>> availableHWCodecs(const CodecStorageType type)
+{
+#ifdef Q_OS_ANDROID
+ using namespace Qt::StringLiterals;
+ std::unordered_set<AVCodecID> availabeCodecs;
+
+ auto getCodecId = [] (const QString& codecName) {
+ if (codecName == "3gpp"_L1) return AV_CODEC_ID_H263;
+ if (codecName == "avc"_L1) return AV_CODEC_ID_H264;
+ if (codecName == "hevc"_L1) return AV_CODEC_ID_HEVC;
+ if (codecName == "mp4v-es"_L1) return AV_CODEC_ID_MPEG4;
+ if (codecName == "x-vnd.on2.vp8"_L1) return AV_CODEC_ID_VP8;
+ if (codecName == "x-vnd.on2.vp9"_L1) return AV_CODEC_ID_VP9;
+ return AV_CODEC_ID_NONE;
+ };
+
+ const QJniObject jniCodecs =
+ QtJniTypes::QtVideoDeviceManager::callStaticMethod<QtJniTypes::String[]>(
+ type == ENCODERS ? "getHWVideoEncoders" : "getHWVideoDecoders");
+
+ QJniArray<QtJniTypes::String> arrCodecs(jniCodecs.object<jobjectArray>());
+ for (int i = 0; i < arrCodecs.size(); ++i) {
+ availabeCodecs.insert(getCodecId(arrCodecs.at(i).toString()));
+ }
+ return availabeCodecs;
+#else
+ Q_UNUSED(type);
+ return {};
+#endif
+}
+
+const CodecsStorage &codecsStorage(CodecStorageType codecsType)
+{
+ static const auto &storages = []() {
+ std::array<CodecsStorage, CODEC_STORAGE_TYPE_COUNT> result;
+ void *opaque = nullptr;
+ const auto platformHwEncoders = availableHWCodecs(ENCODERS);
+ const auto platformHwDecoders = availableHWCodecs(DECODERS);
+
+ while (auto codec = av_codec_iterate(&opaque)) {
+ // TODO: to be investigated
+ // FFmpeg functions avcodec_find_decoder/avcodec_find_encoder
+ // find experimental codecs in the last order,
+ // now we don't consider them at all since they are supposed to
+ // be not stable, maybe we shouldn't.
+ // Currently, it's possible to turn them on for testing purposes.
+
+ static const auto experimentalCodecsEnabled =
+ qEnvironmentVariableIntValue("QT_ENABLE_EXPERIMENTAL_CODECS");
+
+ if (!experimentalCodecsEnabled && isAVCodecExperimental(codec)) {
+ qCDebug(qLcFFmpegUtils) << "Skip experimental codec" << codec->name;
+ continue;
+ }
+
+ if (av_codec_is_decoder(codec)) {
+ if (isCodecValid(codec, HWAccel::decodingDeviceTypes(), platformHwDecoders))
+ result[DECODERS].emplace_back(codec);
+ else
+ qCDebug(qLcFFmpegUtils)
+ << "Skip decoder" << codec->name
+ << "due to disabled matching hw acceleration, or dysfunctional codec";
+ }
+
+ if (av_codec_is_encoder(codec)) {
+ if (isCodecValid(codec, HWAccel::encodingDeviceTypes(), platformHwEncoders))
+ result[ENCODERS].emplace_back(codec);
+ else
+ qCDebug(qLcFFmpegUtils)
+ << "Skip encoder" << codec->name
+ << "due to disabled matching hw acceleration, or dysfunctional codec";
+ }
+ }
+
+ for (auto &storage : result) {
+ storage.shrink_to_fit();
+
+ // we should ensure the original order
+ std::stable_sort(storage.begin(), storage.end(), CodecsComparator{});
+ }
+
+ // It print pretty much logs, so let's print it only for special case
+ const bool shouldDumpCodecsInfo = qLcFFmpegUtils().isEnabled(QtDebugMsg)
+ && qEnvironmentVariableIsSet("QT_FFMPEG_DEBUG");
+
+ if (shouldDumpCodecsInfo) {
+ qCDebug(qLcFFmpegUtils) << "Advanced FFmpeg codecs info:";
+ for (auto &storage : result) {
+ std::for_each(storage.begin(), storage.end(), &dumpCodecInfo);
+ qCDebug(qLcFFmpegUtils) << "---------------------------";
+ }
+ }
+
+ return result;
+ }();
+
+ return storages[codecsType];
+}
+
+const char *preferredHwCodecNameSuffix(bool isEncoder, AVHWDeviceType deviceType)
+{
+ switch (deviceType) {
+ case AV_HWDEVICE_TYPE_VAAPI:
+ return "_vaapi";
+ case AV_HWDEVICE_TYPE_MEDIACODEC:
+ return "_mediacodec";
+ case AV_HWDEVICE_TYPE_VIDEOTOOLBOX:
+ return "_videotoolbox";
+ case AV_HWDEVICE_TYPE_D3D11VA:
+ case AV_HWDEVICE_TYPE_DXVA2:
+#if QT_FFMPEG_HAS_D3D12VA
+ case AV_HWDEVICE_TYPE_D3D12VA:
+#endif
+ return "_mf";
+ case AV_HWDEVICE_TYPE_CUDA:
+ case AV_HWDEVICE_TYPE_VDPAU:
+ return isEncoder ? "_nvenc" : "_cuvid";
+ default:
+ return nullptr;
+ }
+}
+
+template<typename CodecScoreGetter>
+const AVCodec *findAVCodec(CodecStorageType codecsType, AVCodecID codecId,
+ const CodecScoreGetter &scoreGetter)
+{
+ const auto &storage = codecsStorage(codecsType);
+ auto it = std::lower_bound(storage.begin(), storage.end(), codecId, CodecsComparator{});
+
+ const AVCodec *result = nullptr;
+ AVScore resultScore = NotSuitableAVScore;
+
+ for (; it != storage.end() && (*it)->id == codecId && resultScore != BestAVScore; ++it) {
+ const auto score = scoreGetter(*it);
+
+ if (score > resultScore) {
+ resultScore = score;
+ result = *it;
+ }
+ }
+
+ return result;
+}
+
+AVScore hwCodecNameScores(const AVCodec *codec, AVHWDeviceType deviceType)
+{
+ if (auto suffix = preferredHwCodecNameSuffix(av_codec_is_encoder(codec), deviceType)) {
+ const auto substr = strstr(codec->name, suffix);
+ if (substr && !substr[strlen(suffix)])
+ return BestAVScore;
+
+ return DefaultAVScore;
+ }
+
+ return BestAVScore;
+}
+
+const AVCodec *findAVCodec(CodecStorageType codecsType, AVCodecID codecId,
+ const std::optional<AVHWDeviceType> &deviceType,
+ const std::optional<PixelOrSampleFormat> &format)
+{
+ // TODO: remove deviceType and use only isAVFormatSupported to check the format
+
+ return findAVCodec(codecsType, codecId, [&](const AVCodec *codec) {
+ if (format && !isAVFormatSupported(codec, *format))
+ return NotSuitableAVScore;
+
+ if (!deviceType)
+ return BestAVScore; // find any codec with the id
+
+ if (*deviceType == AV_HWDEVICE_TYPE_NONE
+ && findAVFormat(codec->pix_fmts, &isSwPixelFormat) != AV_PIX_FMT_NONE)
+ return BestAVScore;
+
+ if (*deviceType != AV_HWDEVICE_TYPE_NONE) {
+ for (int index = 0; auto config = avcodec_get_hw_config(codec, index); ++index) {
+ if (config->device_type != deviceType)
+ continue;
+
+ if (format && config->pix_fmt != AV_PIX_FMT_NONE && config->pix_fmt != *format)
+ continue;
+
+ return hwCodecNameScores(codec, *deviceType);
+ }
+
+ // The situation happens mostly with encoders
+ // Probably, it's ffmpeg bug: avcodec_get_hw_config returns null even though
+ // hw acceleration is supported
+ // To be removed: only isAVFormatSupported should be used.
+ if (hasAVFormat(codec->pix_fmts, pixelFormatForHwDevice(*deviceType)))
+ return hwCodecNameScores(codec, *deviceType);
+ }
+
+ return NotSuitableAVScore;
+ });
+}
+
+} // namespace
+
+const AVCodec *findAVDecoder(AVCodecID codecId, const std::optional<AVHWDeviceType> &deviceType,
+ const std::optional<PixelOrSampleFormat> &format)
+{
+ return findAVCodec(DECODERS, codecId, deviceType, format);
+}
+
+const AVCodec *findAVEncoder(AVCodecID codecId, const std::optional<AVHWDeviceType> &deviceType,
+ const std::optional<PixelOrSampleFormat> &format)
+{
+ return findAVCodec(ENCODERS, codecId, deviceType, format);
+}
+
+const AVCodec *findAVEncoder(AVCodecID codecId,
+ const std::function<AVScore(const AVCodec *)> &scoresGetter)
+{
+ return findAVCodec(ENCODERS, codecId, scoresGetter);
+}
+
+bool isAVFormatSupported(const AVCodec *codec, PixelOrSampleFormat format)
+{
+ if (codec->type == AVMEDIA_TYPE_VIDEO) {
+ auto checkFormat = [format](AVPixelFormat f) { return f == format; };
+ return findAVPixelFormat(codec, checkFormat) != AV_PIX_FMT_NONE;
+ }
+
+ if (codec->type == AVMEDIA_TYPE_AUDIO)
+ return hasAVFormat(codec->sample_fmts, AVSampleFormat(format));
+
+ return false;
+}
+
+bool isHwPixelFormat(AVPixelFormat format)
+{
+ const auto desc = av_pix_fmt_desc_get(format);
+ return desc && (desc->flags & AV_PIX_FMT_FLAG_HWACCEL) != 0;
+}
+
+bool isAVCodecExperimental(const AVCodec *codec)
+{
+ return (codec->capabilities & AV_CODEC_CAP_EXPERIMENTAL) != 0;
+}
+
+void applyExperimentalCodecOptions(const AVCodec *codec, AVDictionary** opts)
+{
+ if (isAVCodecExperimental(codec)) {
+ qCWarning(qLcFFmpegUtils) << "Applying the option 'strict -2' for the experimental codec"
+ << codec->name << ". it's unlikely to work properly";
+ av_dict_set(opts, "strict", "-2", 0);
+ }
+}
+
+AVPixelFormat pixelFormatForHwDevice(AVHWDeviceType deviceType)
+{
+ switch (deviceType) {
+ case AV_HWDEVICE_TYPE_VIDEOTOOLBOX:
+ return AV_PIX_FMT_VIDEOTOOLBOX;
+ case AV_HWDEVICE_TYPE_VAAPI:
+ return AV_PIX_FMT_VAAPI;
+ case AV_HWDEVICE_TYPE_MEDIACODEC:
+ return AV_PIX_FMT_MEDIACODEC;
+ case AV_HWDEVICE_TYPE_CUDA:
+ return AV_PIX_FMT_CUDA;
+ case AV_HWDEVICE_TYPE_VDPAU:
+ return AV_PIX_FMT_VDPAU;
+ case AV_HWDEVICE_TYPE_OPENCL:
+ return AV_PIX_FMT_OPENCL;
+ case AV_HWDEVICE_TYPE_QSV:
+ return AV_PIX_FMT_QSV;
+ case AV_HWDEVICE_TYPE_D3D11VA:
+ return AV_PIX_FMT_D3D11;
+#if QT_FFMPEG_HAS_D3D12VA
+ case AV_HWDEVICE_TYPE_D3D12VA:
+ return AV_PIX_FMT_D3D12;
+#endif
+ case AV_HWDEVICE_TYPE_DXVA2:
+ return AV_PIX_FMT_DXVA2_VLD;
+ case AV_HWDEVICE_TYPE_DRM:
+ return AV_PIX_FMT_DRM_PRIME;
+#if QT_FFMPEG_HAS_VULKAN
+ case AV_HWDEVICE_TYPE_VULKAN:
+ return AV_PIX_FMT_VULKAN;
+#endif
+ default:
+ return AV_PIX_FMT_NONE;
+ }
+}
+
+AVPacketSideData *addStreamSideData(AVStream *stream, AVPacketSideData sideData)
+{
+ QScopeGuard freeData([&sideData]() { av_free(sideData.data); });
+#if QT_FFMPEG_STREAM_SIDE_DATA_DEPRECATED
+ AVPacketSideData *result = av_packet_side_data_add(
+ &stream->codecpar->coded_side_data,
+ &stream->codecpar->nb_coded_side_data,
+ sideData.type,
+ sideData.data,
+ sideData.size,
+ 0);
+ if (result) {
+ // If the result is not null, the ownership is taken by AVStream,
+ // otherwise the data must be deleted.
+ freeData.dismiss();
+ return result;
+ }
+#else
+ Q_UNUSED(stream);
+ // TODO: implement for older FFmpeg versions
+ qWarning() << "Adding stream side data is not supported for FFmpeg < 6.1";
+#endif
+
+ return nullptr;
+}
+
+const AVPacketSideData *streamSideData(const AVStream *stream, AVPacketSideDataType type)
+{
+ Q_ASSERT(stream);
+
+#if QT_FFMPEG_STREAM_SIDE_DATA_DEPRECATED
+ return av_packet_side_data_get(stream->codecpar->coded_side_data,
+ stream->codecpar->nb_coded_side_data, type);
+#else
+ auto checkType = [type](const auto &item) { return item.type == type; };
+ const auto end = stream->side_data + stream->nb_side_data;
+ const auto found = std::find_if(stream->side_data, end, checkType);
+ return found == end ? nullptr : found;
+#endif
+}
+
+SwrContextUPtr createResampleContext(const AVAudioFormat &inputFormat,
+ const AVAudioFormat &outputFormat)
+{
+ SwrContext *resampler = nullptr;
+#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
+ resampler = swr_alloc_set_opts(nullptr,
+ outputFormat.channelLayoutMask,
+ outputFormat.sampleFormat,
+ outputFormat.sampleRate,
+ inputFormat.channelLayoutMask,
+ inputFormat.sampleFormat,
+ inputFormat.sampleRate,
+ 0,
+ nullptr);
+#else
+
+#if QT_FFMPEG_SWR_CONST_CH_LAYOUT
+ using AVChannelLayoutPrm = const AVChannelLayout*;
+#else
+ using AVChannelLayoutPrm = AVChannelLayout*;
+#endif
+
+ swr_alloc_set_opts2(&resampler,
+ const_cast<AVChannelLayoutPrm>(&outputFormat.channelLayout),
+ outputFormat.sampleFormat,
+ outputFormat.sampleRate,
+ const_cast<AVChannelLayoutPrm>(&inputFormat.channelLayout),
+ inputFormat.sampleFormat,
+ inputFormat.sampleRate,
+ 0,
+ nullptr);
+#endif
+
+ swr_init(resampler);
+ return SwrContextUPtr(resampler);
+}
+
+QVideoFrameFormat::ColorTransfer fromAvColorTransfer(AVColorTransferCharacteristic colorTrc) {
+ switch (colorTrc) {
+ case AVCOL_TRC_BT709:
+ // The following three cases have transfer characteristics identical to BT709
+ case AVCOL_TRC_BT1361_ECG:
+ case AVCOL_TRC_BT2020_10:
+ case AVCOL_TRC_BT2020_12:
+ case AVCOL_TRC_SMPTE240M: // almost identical to bt709
+ return QVideoFrameFormat::ColorTransfer_BT709;
+ case AVCOL_TRC_GAMMA22:
+ case AVCOL_TRC_SMPTE428: // No idea, let's hope for the best...
+ case AVCOL_TRC_IEC61966_2_1: // sRGB, close enough to 2.2...
+ case AVCOL_TRC_IEC61966_2_4: // not quite, but probably close enough
+ return QVideoFrameFormat::ColorTransfer_Gamma22;
+ case AVCOL_TRC_GAMMA28:
+ return QVideoFrameFormat::ColorTransfer_Gamma28;
+ case AVCOL_TRC_SMPTE170M:
+ return QVideoFrameFormat::ColorTransfer_BT601;
+ case AVCOL_TRC_LINEAR:
+ return QVideoFrameFormat::ColorTransfer_Linear;
+ case AVCOL_TRC_SMPTE2084:
+ return QVideoFrameFormat::ColorTransfer_ST2084;
+ case AVCOL_TRC_ARIB_STD_B67:
+ return QVideoFrameFormat::ColorTransfer_STD_B67;
+ default:
+ break;
+ }
+ return QVideoFrameFormat::ColorTransfer_Unknown;
+}
+
+#ifdef Q_OS_DARWIN
+bool isCVFormatSupported(uint32_t cvFormat)
+{
+ return av_map_videotoolbox_format_to_pixfmt(cvFormat) != AV_PIX_FMT_NONE;
+}
+
+std::string cvFormatToString(uint32_t cvFormat)
+{
+ auto formatDescIt = std::make_reverse_iterator(reinterpret_cast<const char *>(&cvFormat));
+ return std::string(formatDescIt - 4, formatDescIt);
+}
+
+#endif
+
+} // namespace QFFmpeg
+
+QDebug operator<<(QDebug dbg, const AVRational &value)
+{
+ dbg << value.num << "/" << value.den;
+ return dbg;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeg_p.h b/src/plugins/multimedia/ffmpeg/qffmpeg_p.h
new file mode 100644
index 000000000..09bf7e4f4
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpeg_p.h
@@ -0,0 +1,280 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEG_P_H
+#define QFFMPEG_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qffmpegdefs_p.h"
+#include "qffmpegavaudioformat_p.h"
+#include <QtMultimedia/qvideoframeformat.h>
+
+#include <qstring.h>
+#include <optional>
+
+inline bool operator==(const AVRational &lhs, const AVRational &rhs)
+{
+ return lhs.num == rhs.num && lhs.den == rhs.den;
+}
+
+inline bool operator!=(const AVRational &lhs, const AVRational &rhs)
+{
+ return !(lhs == rhs);
+}
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg
+{
+
+inline std::optional<qint64> mul(qint64 a, AVRational b)
+{
+ return b.den != 0 ? (a * b.num + b.den / 2) / b.den : std::optional<qint64>{};
+}
+
+inline std::optional<qreal> mul(qreal a, AVRational b)
+{
+ return b.den != 0 ? a * qreal(b.num) / qreal(b.den) : std::optional<qreal>{};
+}
+
+inline std::optional<qint64> timeStampMs(qint64 ts, AVRational base)
+{
+ return mul(1'000 * ts, base);
+}
+
+inline std::optional<qint64> timeStampUs(qint64 ts, AVRational base)
+{
+ return mul(1'000'000 * ts, base);
+}
+
+inline std::optional<float> toFloat(AVRational r)
+{
+ return r.den != 0 ? float(r.num) / float(r.den) : std::optional<float>{};
+}
+
+inline QString err2str(int errnum)
+{
+ char buffer[AV_ERROR_MAX_STRING_SIZE + 1] = {};
+ av_make_error_string(buffer, AV_ERROR_MAX_STRING_SIZE, errnum);
+ return QString::fromLocal8Bit(buffer);
+}
+
+inline void setAVFrameTime(AVFrame &frame, int64_t pts, const AVRational &timeBase)
+{
+ frame.pts = pts;
+#if QT_FFMPEG_HAS_FRAME_TIME_BASE
+ frame.time_base = timeBase;
+#else
+ Q_UNUSED(timeBase);
+#endif
+}
+
+inline void getAVFrameTime(const AVFrame &frame, int64_t &pts, AVRational &timeBase)
+{
+ pts = frame.pts;
+#if QT_FFMPEG_HAS_FRAME_TIME_BASE
+ timeBase = frame.time_base;
+#else
+ timeBase = { 0, 1 };
+#endif
+}
+
+inline int64_t getAVFrameDuration(const AVFrame &frame)
+{
+#if QT_FFMPEG_HAS_FRAME_DURATION
+ return frame.duration;
+#else
+ Q_UNUSED(frame);
+ return 0;
+#endif
+}
+
+struct AVDictionaryHolder
+{
+ AVDictionary *opts = nullptr;
+
+ operator AVDictionary **() { return &opts; }
+
+ AVDictionaryHolder() = default;
+
+ Q_DISABLE_COPY(AVDictionaryHolder)
+
+ AVDictionaryHolder(AVDictionaryHolder &&other) noexcept
+ : opts(std::exchange(other.opts, nullptr))
+ {
+ }
+
+ ~AVDictionaryHolder()
+ {
+ if (opts)
+ av_dict_free(&opts);
+ }
+};
+
+template<typename FunctionType, FunctionType F>
+struct AVDeleter
+{
+ template<typename T>
+ void operator()(T *object) const
+ {
+ if (object)
+ F(&object);
+ }
+};
+
+using AVFrameUPtr = std::unique_ptr<AVFrame, AVDeleter<decltype(&av_frame_free), &av_frame_free>>;
+
+inline AVFrameUPtr makeAVFrame()
+{
+ return AVFrameUPtr(av_frame_alloc());
+}
+
+using AVPacketUPtr =
+ std::unique_ptr<AVPacket, AVDeleter<decltype(&av_packet_free), &av_packet_free>>;
+
+using AVCodecContextUPtr =
+ std::unique_ptr<AVCodecContext,
+ AVDeleter<decltype(&avcodec_free_context), &avcodec_free_context>>;
+
+using AVBufferUPtr =
+ std::unique_ptr<AVBufferRef, AVDeleter<decltype(&av_buffer_unref), &av_buffer_unref>>;
+
+using AVHWFramesConstraintsUPtr = std::unique_ptr<
+ AVHWFramesConstraints,
+ AVDeleter<decltype(&av_hwframe_constraints_free), &av_hwframe_constraints_free>>;
+
+using SwrContextUPtr = std::unique_ptr<SwrContext, AVDeleter<decltype(&swr_free), &swr_free>>;
+
+using PixelOrSampleFormat = int;
+using AVScore = int;
+constexpr AVScore BestAVScore = std::numeric_limits<AVScore>::max();
+constexpr AVScore DefaultAVScore = 0;
+constexpr AVScore NotSuitableAVScore = std::numeric_limits<AVScore>::min();
+constexpr AVScore MinAVScore = NotSuitableAVScore + 1;
+
+const AVCodec *findAVDecoder(AVCodecID codecId,
+ const std::optional<AVHWDeviceType> &deviceType = {},
+ const std::optional<PixelOrSampleFormat> &format = {});
+
+const AVCodec *findAVEncoder(AVCodecID codecId,
+ const std::optional<AVHWDeviceType> &deviceType = {},
+ const std::optional<PixelOrSampleFormat> &format = {});
+
+const AVCodec *findAVEncoder(AVCodecID codecId,
+ const std::function<AVScore(const AVCodec *)> &scoresGetter);
+
+bool isAVFormatSupported(const AVCodec *codec, PixelOrSampleFormat format);
+
+template<typename Format>
+bool hasAVFormat(const Format *fmts, Format format)
+{
+ return findAVFormat(fmts, [format](Format f) { return f == format; }) != Format(-1);
+}
+
+template<typename Format, typename Predicate>
+Format findAVFormat(const Format *fmts, const Predicate &predicate)
+{
+ auto scoresGetter = [&predicate](Format fmt) {
+ return predicate(fmt) ? BestAVScore : NotSuitableAVScore;
+ };
+ return findBestAVFormat(fmts, scoresGetter).first;
+}
+
+template <typename Predicate>
+const AVCodecHWConfig *findHwConfig(const AVCodec *codec, const Predicate &predicate)
+{
+ for (int i = 0; const auto hwConfig = avcodec_get_hw_config(codec, i); ++i) {
+ if (predicate(hwConfig))
+ return hwConfig;
+ }
+
+ return nullptr;
+}
+
+template <typename Predicate>
+AVPixelFormat findAVPixelFormat(const AVCodec *codec, const Predicate &predicate)
+{
+ const AVPixelFormat format = findAVFormat(codec->pix_fmts, predicate);
+ if (format != AV_PIX_FMT_NONE)
+ return format;
+
+ auto checkHwConfig = [&predicate](const AVCodecHWConfig *config) {
+ return config->pix_fmt != AV_PIX_FMT_NONE && predicate(config->pix_fmt);
+ };
+
+ if (auto hwConfig = findHwConfig(codec, checkHwConfig))
+ return hwConfig->pix_fmt;
+
+ return AV_PIX_FMT_NONE;
+}
+
+template <typename Value, typename CalculateScore>
+auto findBestAVValue(const Value *values, const CalculateScore &calculateScore,
+ Value invalidValue = {})
+{
+ using Limits = std::numeric_limits<decltype(calculateScore(*values))>;
+ std::pair result(invalidValue, Limits::min());
+ if (values) {
+ for (; *values != invalidValue && result.second != Limits::max(); ++values) {
+ const auto score = calculateScore(*values);
+ if (score > result.second)
+ result = { *values, score };
+ }
+ }
+
+ return result;
+}
+
+template <typename Format, typename CalculateScore>
+std::pair<Format, AVScore> findBestAVFormat(const Format *fmts,
+ const CalculateScore &calculateScore)
+{
+ static_assert(std::is_same_v<Format, AVSampleFormat> || std::is_same_v<Format, AVPixelFormat>,
+ "The input value is not AV format, use findBestAVValue instead.");
+ return findBestAVValue(fmts, calculateScore, Format(-1));
+}
+
+bool isHwPixelFormat(AVPixelFormat format);
+
+inline bool isSwPixelFormat(AVPixelFormat format)
+{
+ return !isHwPixelFormat(format);
+}
+
+bool isAVCodecExperimental(const AVCodec *codec);
+
+void applyExperimentalCodecOptions(const AVCodec *codec, AVDictionary** opts);
+
+AVPixelFormat pixelFormatForHwDevice(AVHWDeviceType deviceType);
+
+AVPacketSideData *addStreamSideData(AVStream *stream, AVPacketSideData sideData);
+
+const AVPacketSideData *streamSideData(const AVStream *stream, AVPacketSideDataType type);
+
+SwrContextUPtr createResampleContext(const AVAudioFormat &inputFormat,
+ const AVAudioFormat &outputFormat);
+
+QVideoFrameFormat::ColorTransfer fromAvColorTransfer(AVColorTransferCharacteristic colorTrc);
+
+#ifdef Q_OS_DARWIN
+bool isCVFormatSupported(uint32_t format);
+
+std::string cvFormatToString(uint32_t format);
+
+#endif
+}
+
+QDebug operator<<(QDebug, const AVRational &);
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder.cpp b/src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder.cpp
new file mode 100644
index 000000000..69820cc79
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder.cpp
@@ -0,0 +1,247 @@
+// Copyright (C) 2020 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qffmpegaudiodecoder_p.h"
+#include "qffmpegresampler_p.h"
+#include "qaudiobuffer.h"
+
+#include "qffmpegplaybackengine_p.h"
+#include "playbackengine/qffmpegrenderer_p.h"
+
+#include <qloggingcategory.h>
+
+static Q_LOGGING_CATEGORY(qLcAudioDecoder, "qt.multimedia.ffmpeg.audioDecoder")
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg
+{
+
+class SteppingAudioRenderer : public Renderer
+{
+ Q_OBJECT
+public:
+ SteppingAudioRenderer(const QAudioFormat &format) : Renderer({}), m_format(format) { }
+
+ RenderingResult renderInternal(Frame frame) override
+ {
+ if (!frame.isValid())
+ return {};
+
+ if (!m_resampler)
+ m_resampler = std::make_unique<QFFmpegResampler>(frame.codec(), m_format);
+
+ emit newAudioBuffer(m_resampler->resample(frame.avFrame()));
+
+ return {};
+ }
+
+signals:
+ void newAudioBuffer(QAudioBuffer);
+
+private:
+ QAudioFormat m_format;
+ std::unique_ptr<QFFmpegResampler> m_resampler;
+};
+
+class AudioDecoder : public PlaybackEngine
+{
+ Q_OBJECT
+public:
+ explicit AudioDecoder(const QAudioFormat &format) : m_format(format) { }
+
+ RendererPtr createRenderer(QPlatformMediaPlayer::TrackType trackType) override
+ {
+ if (trackType != QPlatformMediaPlayer::AudioStream)
+ return RendererPtr{ {}, {} };
+
+ auto result = createPlaybackEngineObject<SteppingAudioRenderer>(m_format);
+ m_audioRenderer = result.get();
+
+ connect(result.get(), &SteppingAudioRenderer::newAudioBuffer, this,
+ &AudioDecoder::newAudioBuffer);
+
+ return result;
+ }
+
+ void nextBuffer()
+ {
+ Q_ASSERT(m_audioRenderer);
+ Q_ASSERT(!m_audioRenderer->isStepForced());
+
+ m_audioRenderer->doForceStep();
+ // updateObjectsPausedState();
+ }
+
+signals:
+ void newAudioBuffer(QAudioBuffer);
+
+private:
+ QPointer<Renderer> m_audioRenderer;
+ QAudioFormat m_format;
+};
+}
+
+
+QFFmpegAudioDecoder::QFFmpegAudioDecoder(QAudioDecoder *parent)
+ : QPlatformAudioDecoder(parent)
+{
+}
+
+QFFmpegAudioDecoder::~QFFmpegAudioDecoder() = default;
+
+QUrl QFFmpegAudioDecoder::source() const
+{
+ return m_url;
+}
+
+void QFFmpegAudioDecoder::setSource(const QUrl &fileName)
+{
+ stop();
+ m_sourceDevice = nullptr;
+
+ if (std::exchange(m_url, fileName) != fileName)
+ sourceChanged();
+}
+
+QIODevice *QFFmpegAudioDecoder::sourceDevice() const
+{
+ return m_sourceDevice;
+}
+
+void QFFmpegAudioDecoder::setSourceDevice(QIODevice *device)
+{
+ stop();
+ m_url.clear();
+ if (std::exchange(m_sourceDevice, device) != device)
+ sourceChanged();
+}
+
+void QFFmpegAudioDecoder::start()
+{
+ qCDebug(qLcAudioDecoder) << "start";
+ auto checkNoError = [this]() {
+ if (error() == QAudioDecoder::NoError)
+ return true;
+
+ durationChanged(-1);
+ positionChanged(-1);
+
+ m_decoder.reset();
+
+ return false;
+ };
+
+ m_decoder = std::make_unique<AudioDecoder>(m_audioFormat);
+ connect(m_decoder.get(), &AudioDecoder::errorOccured, this, &QFFmpegAudioDecoder::errorSignal);
+ connect(m_decoder.get(), &AudioDecoder::endOfStream, this, &QFFmpegAudioDecoder::done);
+ connect(m_decoder.get(), &AudioDecoder::newAudioBuffer, this,
+ &QFFmpegAudioDecoder::newAudioBuffer);
+
+ QFFmpeg::MediaDataHolder::Maybe media = QFFmpeg::MediaDataHolder::create(m_url, m_sourceDevice, nullptr);
+
+ if (media) {
+ Q_ASSERT(media.value());
+ if (media.value()->streamInfo(QPlatformMediaPlayer::AudioStream).isEmpty())
+ error(QAudioDecoder::FormatError,
+ QLatin1String("The media doesn't contain an audio stream"));
+ else
+ m_decoder->setMedia(std::move(*media.value()));
+ } else {
+ auto [code, description] = media.error();
+ errorSignal(code, description);
+ }
+
+ if (!checkNoError())
+ return;
+
+ m_decoder->setState(QMediaPlayer::PausedState);
+ if (!checkNoError())
+ return;
+
+ m_decoder->nextBuffer();
+ if (!checkNoError())
+ return;
+
+ durationChanged(m_decoder->duration() / 1000);
+ setIsDecoding(true);
+}
+
+void QFFmpegAudioDecoder::stop()
+{
+ qCDebug(qLcAudioDecoder) << ">>>>> stop";
+ if (m_decoder) {
+ m_decoder.reset();
+ done();
+ }
+}
+
+QAudioFormat QFFmpegAudioDecoder::audioFormat() const
+{
+ return m_audioFormat;
+}
+
+void QFFmpegAudioDecoder::setAudioFormat(const QAudioFormat &format)
+{
+ if (std::exchange(m_audioFormat, format) != format)
+ formatChanged(m_audioFormat);
+}
+
+QAudioBuffer QFFmpegAudioDecoder::read()
+{
+ auto buffer = std::exchange(m_audioBuffer, QAudioBuffer{});
+ if (!buffer.isValid())
+ return buffer;
+ qCDebug(qLcAudioDecoder) << "reading buffer" << buffer.startTime();
+ bufferAvailableChanged(false);
+ if (m_decoder)
+ m_decoder->nextBuffer();
+ return buffer;
+}
+
+void QFFmpegAudioDecoder::newAudioBuffer(const QAudioBuffer &b)
+{
+ Q_ASSERT(b.isValid());
+ Q_ASSERT(!m_audioBuffer.isValid());
+ Q_ASSERT(!bufferAvailable());
+
+ qCDebug(qLcAudioDecoder) << "new audio buffer" << b.startTime();
+ m_audioBuffer = b;
+ const qint64 pos = b.startTime();
+ positionChanged(pos/1000);
+ bufferAvailableChanged(b.isValid());
+ bufferReady();
+}
+
+void QFFmpegAudioDecoder::done()
+{
+ qCDebug(qLcAudioDecoder) << ">>>>> DONE!";
+ finished();
+}
+
+void QFFmpegAudioDecoder::errorSignal(int err, const QString &errorString)
+{
+ // unfortunately the error enums for QAudioDecoder and QMediaPlayer aren't identical.
+ // Map them.
+ switch (QMediaPlayer::Error(err)) {
+ case QMediaPlayer::NoError:
+ error(QAudioDecoder::NoError, errorString);
+ break;
+ case QMediaPlayer::ResourceError:
+ error(QAudioDecoder::ResourceError, errorString);
+ break;
+ case QMediaPlayer::FormatError:
+ error(QAudioDecoder::FormatError, errorString);
+ break;
+ case QMediaPlayer::NetworkError:
+ // fall through, Network error doesn't exist in QAudioDecoder
+ case QMediaPlayer::AccessDeniedError:
+ error(QAudioDecoder::AccessDeniedError, errorString);
+ break;
+ }
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegaudiodecoder_p.cpp"
+
+#include "qffmpegaudiodecoder.moc"
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder_p.h b/src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder_p.h
new file mode 100644
index 000000000..11816cd6f
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegaudiodecoder_p.h
@@ -0,0 +1,68 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGAUDIODECODER_H
+#define QFFMPEGAUDIODECODER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "private/qplatformaudiodecoder_p.h"
+#include <qffmpeg_p.h>
+#include <qurl.h>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+class AudioDecoder;
+}
+
+class QFFmpegAudioDecoder : public QPlatformAudioDecoder
+{
+ Q_OBJECT
+
+public:
+ QFFmpegAudioDecoder(QAudioDecoder *parent);
+ virtual ~QFFmpegAudioDecoder();
+
+ QUrl source() const override;
+ void setSource(const QUrl &fileName) override;
+
+ QIODevice *sourceDevice() const override;
+ void setSourceDevice(QIODevice *device) override;
+
+ void start() override;
+ void stop() override;
+
+ QAudioFormat audioFormat() const override;
+ void setAudioFormat(const QAudioFormat &format) override;
+
+ QAudioBuffer read() override;
+
+public Q_SLOTS:
+ void newAudioBuffer(const QAudioBuffer &b);
+ void done();
+ void errorSignal(int err, const QString &errorString);
+
+private:
+ using AudioDecoder = QFFmpeg::AudioDecoder;
+
+ QUrl m_url;
+ QIODevice *m_sourceDevice = nullptr;
+ std::unique_ptr<AudioDecoder> m_decoder;
+ QAudioFormat m_audioFormat;
+
+ QAudioBuffer m_audioBuffer;
+};
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGAUDIODECODER_H
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegaudioinput.cpp b/src/plugins/multimedia/ffmpeg/qffmpegaudioinput.cpp
new file mode 100644
index 000000000..014c53ca4
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegaudioinput.cpp
@@ -0,0 +1,195 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qffmpegaudioinput_p.h"
+#include <qiodevice.h>
+#include <qaudiosource.h>
+#include <qaudiobuffer.h>
+#include <qatomic.h>
+#include <qdebug.h>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+class AudioSourceIO : public QIODevice
+{
+ Q_OBJECT
+public:
+ AudioSourceIO(QFFmpegAudioInput *audioInput) : QIODevice(), m_input(audioInput)
+ {
+ m_muted = m_input->muted;
+ m_volume = m_input->volume;
+ updateVolume();
+ open(QIODevice::WriteOnly);
+ }
+
+ ~AudioSourceIO() override = default;
+
+ void setDevice(const QAudioDevice &device)
+ {
+ QMutexLocker locker(&m_mutex);
+ if (m_device == device)
+ return;
+ m_device = device;
+ QMetaObject::invokeMethod(this, "updateSource");
+ }
+ void setFrameSize(int frameSize)
+ {
+ m_bufferSize.storeRelease((frameSize > 0 && m_format.isValid())
+ ? m_format.bytesForFrames(frameSize)
+ : DefaultAudioInputBufferSize);
+ }
+ void setRunning(bool r) {
+ QMutexLocker locker(&m_mutex);
+ if (m_running == r)
+ return;
+ m_running = r;
+ QMetaObject::invokeMethod(this, "updateRunning");
+ }
+
+ void setVolume(float vol) {
+ QMutexLocker locker(&m_mutex);
+ m_volume = vol;
+ QMetaObject::invokeMethod(this, "updateVolume");
+ }
+ void setMuted(bool muted) {
+ QMutexLocker locker(&m_mutex);
+ m_muted = muted;
+ QMetaObject::invokeMethod(this, "updateVolume");
+ }
+
+ int bufferSize() const { return m_bufferSize.loadAcquire(); }
+
+protected:
+ qint64 readData(char *, qint64) override
+ {
+ return 0;
+ }
+ qint64 writeData(const char *data, qint64 len) override
+ {
+ int l = len;
+ while (len > 0) {
+ const auto bufferSize = m_bufferSize.loadAcquire();
+ int toAppend = qMin(len, bufferSize - m_pcm.size());
+ m_pcm.append(data, toAppend);
+ data += toAppend;
+ len -= toAppend;
+ if (m_pcm.size() == bufferSize)
+ sendBuffer();
+ }
+
+ return l;
+ }
+
+private Q_SLOTS:
+ void updateSource() {
+ QMutexLocker locker(&m_mutex);
+ m_format = m_device.preferredFormat();
+ if (std::exchange(m_src, nullptr))
+ m_pcm.clear();
+
+ m_src = std::make_unique<QAudioSource>(m_device, m_format);
+ updateVolume();
+ if (m_running)
+ m_src->start(this);
+ }
+ void updateVolume()
+ {
+ if (m_src)
+ m_src->setVolume(m_muted ? 0. : m_volume);
+ }
+ void updateRunning()
+ {
+ QMutexLocker locker(&m_mutex);
+ if (m_running) {
+ if (!m_src)
+ updateSource();
+ m_src->start(this);
+ } else {
+ m_src->stop();
+ }
+ }
+
+private:
+
+ void sendBuffer()
+ {
+ QAudioFormat fmt = m_src->format();
+ qint64 time = fmt.durationForBytes(m_processed);
+ QAudioBuffer buffer(m_pcm, fmt, time);
+ emit m_input->newAudioBuffer(buffer);
+ m_processed += m_pcm.size();
+ m_pcm.clear();
+ }
+
+ QMutex m_mutex;
+ QAudioDevice m_device;
+ float m_volume = 1.;
+ bool m_muted = false;
+ bool m_running = false;
+
+ QFFmpegAudioInput *m_input = nullptr;
+ std::unique_ptr<QAudioSource> m_src;
+ QAudioFormat m_format;
+ QAtomicInt m_bufferSize = DefaultAudioInputBufferSize;
+ qint64 m_processed = 0;
+ QByteArray m_pcm;
+};
+
+}
+
+QFFmpegAudioInput::QFFmpegAudioInput(QAudioInput *qq)
+ : QPlatformAudioInput(qq)
+{
+ qRegisterMetaType<QAudioBuffer>();
+
+ inputThread = std::make_unique<QThread>();
+ audioIO = new QFFmpeg::AudioSourceIO(this);
+ audioIO->moveToThread(inputThread.get());
+ inputThread->start();
+}
+
+QFFmpegAudioInput::~QFFmpegAudioInput()
+{
+ // Ensure that COM is uninitialized by nested QWindowsResampler
+ // on the same thread that initialized it.
+ audioIO->deleteLater();
+ inputThread->exit();
+ inputThread->wait();
+}
+
+void QFFmpegAudioInput::setAudioDevice(const QAudioDevice &device)
+{
+ audioIO->setDevice(device);
+}
+
+void QFFmpegAudioInput::setMuted(bool muted)
+{
+ audioIO->setMuted(muted);
+}
+
+void QFFmpegAudioInput::setVolume(float volume)
+{
+ audioIO->setVolume(volume);
+}
+
+void QFFmpegAudioInput::setFrameSize(int s)
+{
+ audioIO->setFrameSize(s);
+}
+
+void QFFmpegAudioInput::setRunning(bool b)
+{
+ audioIO->setRunning(b);
+}
+
+int QFFmpegAudioInput::bufferSize() const
+{
+ return audioIO->bufferSize();
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegaudioinput_p.cpp"
+
+#include "qffmpegaudioinput.moc"
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h b/src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h
new file mode 100644
index 000000000..288b3f432
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegaudioinput_p.h
@@ -0,0 +1,57 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGAUDIOINPUT_H
+#define QFFMPEGAUDIOINPUT_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformaudioinput_p.h>
+#include <private/qplatformaudiobufferinput_p.h>
+#include "qffmpegthread_p.h"
+#include <qaudioinput.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAudioSource;
+class QAudioBuffer;
+namespace QFFmpeg {
+class AudioSourceIO;
+}
+
+constexpr int DefaultAudioInputBufferSize = 4096;
+
+class QFFmpegAudioInput : public QPlatformAudioBufferInputBase, public QPlatformAudioInput
+{
+ // for qobject_cast
+ Q_OBJECT
+public:
+ QFFmpegAudioInput(QAudioInput *qq);
+ ~QFFmpegAudioInput();
+
+ void setAudioDevice(const QAudioDevice &/*device*/) override;
+ void setMuted(bool /*muted*/) override;
+ void setVolume(float /*volume*/) override;
+
+ void setFrameSize(int frameSize);
+ void setRunning(bool b);
+
+ int bufferSize() const;
+
+private:
+ QFFmpeg::AudioSourceIO *audioIO = nullptr;
+ std::unique_ptr<QThread> inputThread;
+};
+
+QT_END_NAMESPACE
+
+
+#endif // QPLATFORMAUDIOINPUT_H
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegavaudioformat.cpp b/src/plugins/multimedia/ffmpeg/qffmpegavaudioformat.cpp
new file mode 100644
index 000000000..417219a48
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegavaudioformat.cpp
@@ -0,0 +1,78 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegavaudioformat_p.h"
+#include "qaudioformat.h"
+#include "qffmpegmediaformatinfo_p.h"
+
+extern "C" {
+#include <libavutil/opt.h>
+}
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+AVAudioFormat::AVAudioFormat(const AVCodecContext *context)
+ : sampleFormat(context->sample_fmt), sampleRate(context->sample_rate)
+{
+#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
+ if (context->channel_layout) {
+ channelLayoutMask = context->channel_layout;
+ } else {
+ const auto channelConfig =
+ QAudioFormat::defaultChannelConfigForChannelCount(context->channels);
+ channelLayoutMask = QFFmpegMediaFormatInfo::avChannelLayout(channelConfig);
+ }
+#else
+ channelLayout = context->ch_layout;
+#endif
+}
+
+AVAudioFormat::AVAudioFormat(const AVCodecParameters *codecPar)
+ : sampleFormat(AVSampleFormat(codecPar->format)), sampleRate(codecPar->sample_rate)
+{
+#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
+ if (codecPar->channel_layout) {
+ channelLayoutMask = codecPar->channel_layout;
+ } else {
+ const auto channelConfig =
+ QAudioFormat::defaultChannelConfigForChannelCount(codecPar->channels);
+ channelLayoutMask = QFFmpegMediaFormatInfo::avChannelLayout(channelConfig);
+ }
+#else
+ channelLayout = codecPar->ch_layout;
+#endif
+}
+
+AVAudioFormat::AVAudioFormat(const QAudioFormat &audioFormat)
+ : sampleFormat(QFFmpegMediaFormatInfo::avSampleFormat(audioFormat.sampleFormat())),
+ sampleRate(audioFormat.sampleRate())
+{
+ const auto channelConfig = audioFormat.channelConfig() == QAudioFormat::ChannelConfigUnknown
+ ? QAudioFormat::defaultChannelConfigForChannelCount(audioFormat.channelCount())
+ : audioFormat.channelConfig();
+
+ const auto mask = QFFmpegMediaFormatInfo::avChannelLayout(channelConfig);
+
+#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
+ channelLayoutMask = mask;
+#else
+ av_channel_layout_from_mask(&channelLayout, mask);
+#endif
+}
+
+bool operator==(const AVAudioFormat &lhs, const AVAudioFormat &rhs)
+{
+ return lhs.sampleFormat == rhs.sampleFormat && lhs.sampleRate == rhs.sampleRate &&
+#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
+ lhs.channelLayoutMask == rhs.channelLayoutMask
+#else
+ lhs.channelLayout == rhs.channelLayout
+#endif
+ ;
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegavaudioformat_p.h b/src/plugins/multimedia/ffmpeg/qffmpegavaudioformat_p.h
new file mode 100644
index 000000000..9fd4dacdf
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegavaudioformat_p.h
@@ -0,0 +1,68 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGAVAUDIOFORMAT_P_H
+#define QFFMPEGAVAUDIOFORMAT_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qffmpegdefs_p.h"
+#include <private/qtmultimediaglobal_p.h>
+
+#if !QT_FFMPEG_OLD_CHANNEL_LAYOUT
+inline bool operator==(const AVChannelLayout &lhs, const AVChannelLayout &rhs)
+{
+ return lhs.order == rhs.order && lhs.nb_channels == rhs.nb_channels && lhs.u.mask == rhs.u.mask;
+}
+
+inline bool operator!=(const AVChannelLayout &lhs, const AVChannelLayout &rhs)
+{
+ return !(lhs == rhs);
+}
+
+#endif
+
+QT_BEGIN_NAMESPACE
+
+class QAudioFormat;
+
+namespace QFFmpeg {
+
+struct AVAudioFormat
+{
+ AVAudioFormat(const AVCodecContext *context);
+
+ AVAudioFormat(const AVCodecParameters *codecPar);
+
+ AVAudioFormat(const QAudioFormat &audioFormat);
+
+#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
+ uint64_t channelLayoutMask;
+#else
+ AVChannelLayout channelLayout;
+#endif
+ AVSampleFormat sampleFormat;
+ int sampleRate;
+};
+
+bool operator==(const AVAudioFormat &lhs, const AVAudioFormat &rhs);
+
+inline bool operator!=(const AVAudioFormat &lhs, const AVAudioFormat &rhs)
+{
+ return !(lhs == rhs);
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGAVAUDIOFORMAT_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegconverter.cpp b/src/plugins/multimedia/ffmpeg/qffmpegconverter.cpp
new file mode 100644
index 000000000..ba87ce3ed
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegconverter.cpp
@@ -0,0 +1,272 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegconverter_p.h"
+#include <QtMultimedia/qvideoframeformat.h>
+#include <QtMultimedia/qvideoframe.h>
+#include <QtCore/qloggingcategory.h>
+#include <private/qvideotexturehelper_p.h>
+
+extern "C" {
+#include <libswscale/swscale.h>
+}
+
+QT_BEGIN_NAMESPACE
+
+namespace {
+
+Q_LOGGING_CATEGORY(lc, "qt.multimedia.ffmpeg.converter");
+
+
+// Converts to FFmpeg pixel format. This function differs from
+// QFFmpegVideoBuffer::toAVPixelFormat which only covers the subset
+// of pixel formats required for encoding. Here we need to cover more
+// pixel formats to be able to generate test images for decoding/display
+AVPixelFormat toAVPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat)
+{
+ switch (pixelFormat) {
+ default:
+ case QVideoFrameFormat::Format_Invalid:
+ return AV_PIX_FMT_NONE;
+ case QVideoFrameFormat::Format_AYUV:
+ case QVideoFrameFormat::Format_AYUV_Premultiplied:
+ return AV_PIX_FMT_NONE; // TODO: Fixme (No corresponding FFmpeg format available)
+ case QVideoFrameFormat::Format_YV12:
+ case QVideoFrameFormat::Format_IMC1:
+ case QVideoFrameFormat::Format_IMC3:
+ case QVideoFrameFormat::Format_IMC2:
+ case QVideoFrameFormat::Format_IMC4:
+ return AV_PIX_FMT_YUV420P;
+ case QVideoFrameFormat::Format_Jpeg:
+ return AV_PIX_FMT_BGRA;
+ case QVideoFrameFormat::Format_ARGB8888:
+ return AV_PIX_FMT_ARGB;
+ case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
+ case QVideoFrameFormat::Format_XRGB8888:
+ return AV_PIX_FMT_0RGB;
+ case QVideoFrameFormat::Format_BGRA8888:
+ return AV_PIX_FMT_BGRA;
+ case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
+ case QVideoFrameFormat::Format_BGRX8888:
+ return AV_PIX_FMT_BGR0;
+ case QVideoFrameFormat::Format_ABGR8888:
+ return AV_PIX_FMT_ABGR;
+ case QVideoFrameFormat::Format_XBGR8888:
+ return AV_PIX_FMT_0BGR;
+ case QVideoFrameFormat::Format_RGBA8888:
+ return AV_PIX_FMT_RGBA;
+ case QVideoFrameFormat::Format_RGBX8888:
+ return AV_PIX_FMT_RGB0;
+ case QVideoFrameFormat::Format_YUV422P:
+ return AV_PIX_FMT_YUV422P;
+ case QVideoFrameFormat::Format_YUV420P:
+ return AV_PIX_FMT_YUV420P;
+ case QVideoFrameFormat::Format_YUV420P10:
+ return AV_PIX_FMT_YUV420P10;
+ case QVideoFrameFormat::Format_UYVY:
+ return AV_PIX_FMT_UYVY422;
+ case QVideoFrameFormat::Format_YUYV:
+ return AV_PIX_FMT_YUYV422;
+ case QVideoFrameFormat::Format_NV12:
+ return AV_PIX_FMT_NV12;
+ case QVideoFrameFormat::Format_NV21:
+ return AV_PIX_FMT_NV21;
+ case QVideoFrameFormat::Format_Y8:
+ return AV_PIX_FMT_GRAY8;
+ case QVideoFrameFormat::Format_Y16:
+ return AV_PIX_FMT_GRAY16;
+ case QVideoFrameFormat::Format_P010:
+ return AV_PIX_FMT_P010;
+ case QVideoFrameFormat::Format_P016:
+ return AV_PIX_FMT_P016;
+ case QVideoFrameFormat::Format_SamplerExternalOES:
+ return AV_PIX_FMT_MEDIACODEC;
+ }
+}
+
+struct SwsFrameData
+{
+ static constexpr int arraySize = 4; // Array size required by sws_scale
+ std::array<uchar *, arraySize> bits;
+ std::array<int, arraySize> stride;
+};
+
+SwsFrameData getSwsData(QVideoFrame &dst)
+{
+ switch (dst.pixelFormat()) {
+ case QVideoFrameFormat::Format_YV12:
+ case QVideoFrameFormat::Format_IMC1:
+ return { { dst.bits(0), dst.bits(2), dst.bits(1), nullptr },
+ { dst.bytesPerLine(0), dst.bytesPerLine(2), dst.bytesPerLine(1), 0 } };
+
+ case QVideoFrameFormat::Format_IMC2:
+ return { { dst.bits(0), dst.bits(1) + dst.bytesPerLine(1) / 2, dst.bits(1), nullptr },
+ { dst.bytesPerLine(0), dst.bytesPerLine(1), dst.bytesPerLine(1), 0 } };
+
+ case QVideoFrameFormat::Format_IMC4:
+ return { { dst.bits(0), dst.bits(1), dst.bits(1) + dst.bytesPerLine(1) / 2, nullptr },
+ { dst.bytesPerLine(0), dst.bytesPerLine(1), dst.bytesPerLine(1), 0 } };
+ default:
+ return { { dst.bits(0), dst.bits(1), dst.bits(2), nullptr },
+ { dst.bytesPerLine(0), dst.bytesPerLine(1), dst.bytesPerLine(2), 0 } };
+ }
+}
+
+struct SwsColorSpace
+{
+ int colorSpace;
+ int colorRange; // 0 - mpeg/video, 1 - jpeg/full
+};
+
+// Qt heuristics for determining color space requires checking
+// both frame color space and range. This function mimics logic
+// used elsewhere in Qt Multimedia.
+SwsColorSpace toSwsColorSpace(QVideoFrameFormat::ColorRange colorRange,
+ QVideoFrameFormat::ColorSpace colorSpace)
+{
+ const int avRange = colorRange == QVideoFrameFormat::ColorRange_Video ? 0 : 1;
+
+ switch (colorSpace) {
+ case QVideoFrameFormat::ColorSpace_BT601:
+ if (colorRange == QVideoFrameFormat::ColorRange_Full)
+ return { SWS_CS_ITU709, 1 }; // TODO: FIXME - Not exact match
+ return { SWS_CS_ITU601, 0 };
+ case QVideoFrameFormat::ColorSpace_BT709:
+ return { SWS_CS_ITU709, avRange };
+ case QVideoFrameFormat::ColorSpace_AdobeRgb:
+ return { SWS_CS_ITU601, 1 }; // TODO: Why do ITU601 and Adobe RGB match well?
+ case QVideoFrameFormat::ColorSpace_BT2020:
+ return { SWS_CS_BT2020, avRange };
+ case QVideoFrameFormat::ColorSpace_Undefined:
+ default:
+ return { SWS_CS_DEFAULT, avRange };
+ }
+}
+
+using SwsContextUPtr = std::unique_ptr<SwsContext, decltype(&sws_freeContext)>;
+using PixelFormat = QVideoFrameFormat::PixelFormat;
+
+// clang-format off
+
+SwsContextUPtr createConverter(const QSize &srcSize, PixelFormat srcPixFmt,
+ const QSize &dstSize, PixelFormat dstPixFmt)
+{
+ SwsContext* context = sws_getContext(
+ srcSize.width(), srcSize.height(), toAVPixelFormat(srcPixFmt),
+ dstSize.width(), dstSize.height(), toAVPixelFormat(dstPixFmt),
+ SWS_BILINEAR, nullptr, nullptr, nullptr);
+
+ return { context, &sws_freeContext };
+}
+
+bool setColorSpaceDetails(SwsContext *context,
+ const QVideoFrameFormat &srcFormat,
+ const QVideoFrameFormat &dstFormat)
+{
+ const SwsColorSpace src = toSwsColorSpace(srcFormat.colorRange(), srcFormat.colorSpace());
+ const SwsColorSpace dst = toSwsColorSpace(dstFormat.colorRange(), dstFormat.colorSpace());
+
+ constexpr int brightness = 0;
+ constexpr int contrast = 0;
+ constexpr int saturation = 0;
+ const int status = sws_setColorspaceDetails(context,
+ sws_getCoefficients(src.colorSpace), src.colorRange,
+ sws_getCoefficients(dst.colorSpace), dst.colorRange,
+ brightness, contrast, saturation);
+
+ return status == 0;
+}
+
+bool convert(SwsContext *context, QVideoFrame &src, int srcHeight, QVideoFrame &dst)
+{
+ if (!src.map(QtVideo::MapMode::ReadOnly))
+ return false;
+
+ QScopeGuard unmapSrc{[&] {
+ src.unmap();
+ }};
+
+ if (!dst.map(QtVideo::MapMode::WriteOnly))
+ return false;
+
+ QScopeGuard unmapDst{[&] {
+ dst.unmap();
+ }};
+
+ const SwsFrameData srcData = getSwsData(src);
+ const SwsFrameData dstData = getSwsData(dst);
+
+ constexpr int firstSrcSliceRow = 0;
+ const int scaledHeight = sws_scale(context,
+ srcData.bits.data(), srcData.stride.data(),
+ firstSrcSliceRow, srcHeight,
+ dstData.bits.data(), dstData.stride.data());
+
+ if (scaledHeight != srcHeight)
+ return false;
+
+ return true;
+}
+
+// Ensure even size if using planar format with chroma subsampling
+QSize adjustSize(const QSize& size, PixelFormat srcFmt, PixelFormat dstFmt)
+{
+ const auto* srcDesc = QVideoTextureHelper::textureDescription(srcFmt);
+ const auto* dstDesc = QVideoTextureHelper::textureDescription(dstFmt);
+
+ QSize output = size;
+ for (const auto desc : { srcDesc, dstDesc }) {
+ for (int i = 0; i < desc->nplanes; ++i) {
+ // TODO: Assumes that max subsampling is 2
+ if (desc->sizeScale[i].x != 1)
+ output.setWidth(output.width() & ~1); // Make even
+
+ if (desc->sizeScale[i].y != 1)
+ output.setHeight(output.height() & ~1); // Make even
+ }
+ }
+
+ return output;
+}
+
+} // namespace
+
+// Converts a video frame to the dstFormat video frame format.
+QVideoFrame convertFrame(QVideoFrame &src, const QVideoFrameFormat &dstFormat)
+{
+ if (src.size() != dstFormat.frameSize()) {
+ qCCritical(lc) << "Resizing is not supported";
+ return {};
+ }
+
+ // Adjust size to even width/height if we have chroma subsampling
+ const QSize size = adjustSize(src.size(), src.pixelFormat(), dstFormat.pixelFormat());
+ if (size != src.size())
+ qCWarning(lc) << "Input truncated to even width/height";
+
+ const SwsContextUPtr conv = createConverter(
+ size, src.pixelFormat(), size, dstFormat.pixelFormat());
+
+ if (!conv) {
+ qCCritical(lc) << "Failed to create SW converter";
+ return {};
+ }
+
+ if (!setColorSpaceDetails(conv.get(), src.surfaceFormat(), dstFormat)) {
+ qCCritical(lc) << "Failed to set color space details";
+ return {};
+ }
+
+ QVideoFrame dst{ dstFormat };
+
+ if (!convert(conv.get(), src, size.height(), dst)) {
+ qCCritical(lc) << "Frame conversion failed";
+ return {};
+ }
+
+ return dst;
+}
+
+// clang-format on
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegconverter_p.h b/src/plugins/multimedia/ffmpeg/qffmpegconverter_p.h
new file mode 100644
index 000000000..57ee3135f
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegconverter_p.h
@@ -0,0 +1,30 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGCONVERTER_P_H
+#define QFFMPEGCONVERTER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qtconfigmacros.h>
+#include <private/qtmultimediaglobal_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QVideoFrameFormat;
+class QVideoFrame;
+
+QVideoFrame convertFrame(QVideoFrame &src, const QVideoFrameFormat &dstFormat);
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegdefs_p.h b/src/plugins/multimedia/ffmpeg/qffmpegdefs_p.h
new file mode 100644
index 000000000..239d8ff0c
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegdefs_p.h
@@ -0,0 +1,41 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGDEFS_P_H
+#define QFFMPEGDEFS_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+extern "C" {
+#include <libavformat/avformat.h>
+#include <libavcodec/avcodec.h>
+#include <libswresample/swresample.h>
+#include <libavutil/avutil.h>
+#include <libswscale/swscale.h>
+}
+
+#define QT_FFMPEG_OLD_CHANNEL_LAYOUT (LIBAVCODEC_VERSION_INT < AV_VERSION_INT(59, 24, 100))
+#define QT_FFMPEG_HAS_VULKAN \
+ (LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(58, 91, 100)) // since ffmpeg n4.3
+#define QT_FFMPEG_HAS_FRAME_TIME_BASE \
+ (LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(59, 18, 100)) // since ffmpeg n5.0
+#define QT_FFMPEG_HAS_FRAME_DURATION \
+ (LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(60, 3, 100)) // since ffmpeg n6.0
+#define QT_FFMPEG_STREAM_SIDE_DATA_DEPRECATED \
+ (LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(60, 15, 100)) // since ffmpeg n6.1
+#define QT_FFMPEG_HAS_D3D12VA \
+ (LIBAVUTIL_VERSION_INT >= AV_VERSION_INT(59, 8, 100)) // since ffmpeg n7.0
+#define QT_FFMPEG_SWR_CONST_CH_LAYOUT (LIBSWRESAMPLE_VERSION_INT >= AV_VERSION_INT(4, 9, 100))
+#define QT_FFMPEG_AVIO_WRITE_CONST \
+ (LIBAVFORMAT_VERSION_MAJOR >= 61)
+
+#endif // QFFMPEGDEFS_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegencodingformatcontext.cpp b/src/plugins/multimedia/ffmpeg/qffmpegencodingformatcontext.cpp
new file mode 100644
index 000000000..8b367a822
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegencodingformatcontext.cpp
@@ -0,0 +1,116 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegencodingformatcontext_p.h"
+#include "qffmpegmediaformatinfo_p.h"
+#include "qffmpegioutils_p.h"
+#include "qfile.h"
+#include "QtCore/qloggingcategory.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+static Q_LOGGING_CATEGORY(qLcEncodingFormatContext, "qt.multimedia.ffmpeg.encodingformatcontext");
+
+namespace {
+// In the example https://ffmpeg.org/doxygen/trunk/avio_read_callback_8c-example.html,
+// BufferSize = 4096 is suggested, however, it might be not optimal. To be investigated.
+constexpr size_t DefaultBufferSize = 4096;
+} // namespace
+
+EncodingFormatContext::EncodingFormatContext(QMediaFormat::FileFormat fileFormat)
+ : m_avFormatContext(avformat_alloc_context())
+{
+ const AVOutputFormat *avFormat = QFFmpegMediaFormatInfo::outputFormatForFileFormat(fileFormat);
+ m_avFormatContext->oformat = const_cast<AVOutputFormat *>(avFormat); // constness varies
+}
+
+EncodingFormatContext::~EncodingFormatContext()
+{
+ closeAVIO();
+
+ avformat_free_context(m_avFormatContext);
+}
+
+void EncodingFormatContext::openAVIO(const QString &filePath)
+{
+ Q_ASSERT(!isAVIOOpen());
+ Q_ASSERT(!filePath.isEmpty());
+
+ const QByteArray filePathUtf8 = filePath.toUtf8();
+
+ std::unique_ptr<char, decltype(&av_free)> url(
+ reinterpret_cast<char *>(av_malloc(filePathUtf8.size() + 1)), &av_free);
+ memcpy(url.get(), filePathUtf8.constData(), filePathUtf8.size() + 1);
+
+ // Initialize the AVIOContext for accessing the resource indicated by the url
+ auto result = avio_open2(&m_avFormatContext->pb, url.get(), AVIO_FLAG_WRITE, nullptr, nullptr);
+
+ qCDebug(qLcEncodingFormatContext)
+ << "opened by file path:" << url.get() << ", result:" << result;
+
+ Q_ASSERT(m_avFormatContext->url == nullptr);
+ if (isAVIOOpen())
+ m_avFormatContext->url = url.release();
+ else
+ openAVIOWithQFile(filePath);
+}
+
+void EncodingFormatContext::openAVIOWithQFile(const QString &filePath)
+{
+ // QTBUG-123082, To be investigated:
+ // - should we use the logic with QFile for all file paths?
+ // - does avio_open2 handle network protocols that QFile doesn't?
+ // - which buffer size should we set to opening with QFile to ensure the best performance?
+
+ auto file = std::make_unique<QFile>(filePath);
+
+ if (!file->open(QFile::WriteOnly)) {
+ qCDebug(qLcEncodingFormatContext) << "Cannot open QFile" << filePath;
+ return;
+ }
+
+ openAVIO(file.get());
+
+ if (isAVIOOpen())
+ m_outputFile = std::move(file);
+}
+
+void EncodingFormatContext::openAVIO(QIODevice *device)
+{
+ Q_ASSERT(!isAVIOOpen());
+ Q_ASSERT(device);
+
+ if (!device->isWritable())
+ return;
+
+ auto buffer = static_cast<uint8_t *>(av_malloc(DefaultBufferSize));
+ m_avFormatContext->pb = avio_alloc_context(buffer, DefaultBufferSize, 1, device, nullptr,
+ &writeQIODevice, &seekQIODevice);
+}
+
+void EncodingFormatContext::closeAVIO()
+{
+ // Close the AVIOContext and release any file handles
+ if (isAVIOOpen()) {
+ if (m_avFormatContext->url && *m_avFormatContext->url != '\0') {
+ auto closeResult = avio_closep(&m_avFormatContext->pb);
+ Q_ASSERT(closeResult == 0);
+ } else {
+ av_free(std::exchange(m_avFormatContext->pb->buffer, nullptr));
+ avio_context_free(&m_avFormatContext->pb);
+ }
+
+ // delete url even though it might be delete by avformat_free_context to
+ // ensure consistency in openAVIO/closeAVIO.
+ av_freep(&m_avFormatContext->url);
+ m_outputFile.reset();
+ } else {
+ Q_ASSERT(!m_outputFile);
+ }
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegencodingformatcontext_p.h b/src/plugins/multimedia/ffmpeg/qffmpegencodingformatcontext_p.h
new file mode 100644
index 000000000..69d0cd873
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegencodingformatcontext_p.h
@@ -0,0 +1,60 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGENCODINGFORMATCONTEXT_P_H
+#define QFFMPEGENCODINGFORMATCONTEXT_P_H
+
+#include "qffmpegdefs_p.h"
+#include "qmediaformat.h"
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+QT_BEGIN_NAMESPACE
+
+class QIODevice;
+class QFile;
+
+namespace QFFmpeg {
+
+class EncodingFormatContext
+{
+public:
+ explicit EncodingFormatContext(QMediaFormat::FileFormat fileFormat);
+ ~EncodingFormatContext();
+
+ void openAVIO(const QString &filePath);
+
+ void openAVIO(QIODevice *device);
+
+ bool isAVIOOpen() const { return m_avFormatContext->pb != nullptr; }
+
+ void closeAVIO();
+
+ AVFormatContext *avFormatContext() { return m_avFormatContext; }
+
+ const AVFormatContext *avFormatContext() const { return m_avFormatContext; }
+
+private:
+ Q_DISABLE_COPY_MOVE(EncodingFormatContext)
+
+ void openAVIOWithQFile(const QString &filePath);
+
+private:
+ AVFormatContext *m_avFormatContext;
+ std::unique_ptr<QFile> m_outputFile;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGENCODINGFORMATCONTEXT_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp
new file mode 100644
index 000000000..5b140f0ca
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp
@@ -0,0 +1,504 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "libavutil/version.h"
+
+#include "qffmpeghwaccel_p.h"
+#if QT_CONFIG(vaapi)
+#include "qffmpeghwaccel_vaapi_p.h"
+#endif
+#ifdef Q_OS_DARWIN
+#include "qffmpeghwaccel_videotoolbox_p.h"
+#endif
+#if QT_CONFIG(wmf)
+#include "qffmpeghwaccel_d3d11_p.h"
+#include <QtCore/private/qsystemlibrary_p.h>
+
+#endif
+#ifdef Q_OS_ANDROID
+# include "qffmpeghwaccel_mediacodec_p.h"
+#endif
+#include "qffmpeg_p.h"
+#include "qffmpegvideobuffer_p.h"
+#include "qscopedvaluerollback.h"
+#include "QtCore/qfile.h"
+
+#include <rhi/qrhi.h>
+#include <qloggingcategory.h>
+#include <unordered_set>
+#ifdef Q_OS_LINUX
+#include <QLibrary>
+#endif
+
+/* Infrastructure for HW acceleration goes into this file. */
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLHWAccel, "qt.multimedia.ffmpeg.hwaccel");
+extern bool thread_local FFmpegLogsEnabledInThread;
+
+namespace QFFmpeg {
+
+static const std::initializer_list<AVHWDeviceType> preferredHardwareAccelerators = {
+#if defined(Q_OS_ANDROID)
+ AV_HWDEVICE_TYPE_MEDIACODEC,
+#elif defined(Q_OS_LINUX)
+ AV_HWDEVICE_TYPE_CUDA,
+ AV_HWDEVICE_TYPE_VAAPI,
+
+ // TODO: investigate VDPAU advantages.
+ // nvenc/nvdec codecs use AV_HWDEVICE_TYPE_CUDA by default, but they can also use VDPAU
+ // if it's included into the ffmpeg build and vdpau drivers are installed.
+ // AV_HWDEVICE_TYPE_VDPAU
+#elif defined (Q_OS_WIN)
+ AV_HWDEVICE_TYPE_D3D11VA,
+#elif defined (Q_OS_DARWIN)
+ AV_HWDEVICE_TYPE_VIDEOTOOLBOX,
+#endif
+};
+
+static AVBufferUPtr loadHWContext(AVHWDeviceType type)
+{
+ AVBufferRef *hwContext = nullptr;
+ qCDebug(qLHWAccel) << " Checking HW context:" << av_hwdevice_get_type_name(type);
+ int ret = av_hwdevice_ctx_create(&hwContext, type, nullptr, nullptr, 0);
+
+ if (ret == 0) {
+ qCDebug(qLHWAccel) << " Using above hw context.";
+ return AVBufferUPtr(hwContext);
+ }
+ qCDebug(qLHWAccel) << " Could not create hw context:" << ret << strerror(-ret);
+ return nullptr;
+}
+
+// FFmpeg might crash on loading non-existing hw devices.
+// Let's roughly precheck drivers/libraries.
+static bool precheckDriver(AVHWDeviceType type)
+{
+ // precheckings might need some improvements
+#if defined(Q_OS_LINUX)
+ if (type == AV_HWDEVICE_TYPE_CUDA) {
+ if (!QFile::exists(QLatin1String("/proc/driver/nvidia/version")))
+ return false;
+
+ // QTBUG-122199
+ // CUDA backend requires libnvcuvid in libavcodec
+ QLibrary lib("libnvcuvid.so");
+ if (!lib.load())
+ return false;
+ lib.unload();
+ return true;
+ }
+#elif defined(Q_OS_WINDOWS)
+ if (type == AV_HWDEVICE_TYPE_D3D11VA)
+ return QSystemLibrary(QLatin1String("d3d11.dll")).load();
+
+#if QT_FFMPEG_HAS_D3D12VA
+ if (type == AV_HWDEVICE_TYPE_D3D12VA)
+ return QSystemLibrary(QLatin1String("d3d12.dll")).load();
+#endif
+
+ if (type == AV_HWDEVICE_TYPE_DXVA2)
+ return QSystemLibrary(QLatin1String("d3d9.dll")).load();
+
+ // TODO: check nvenc/nvdec and revisit the checking
+ if (type == AV_HWDEVICE_TYPE_CUDA)
+ return QSystemLibrary(QLatin1String("nvml.dll")).load();
+#else
+ Q_UNUSED(type);
+#endif
+
+ return true;
+}
+
+static bool checkHwType(AVHWDeviceType type)
+{
+ const auto deviceName = av_hwdevice_get_type_name(type);
+ if (!deviceName) {
+ qWarning() << "Internal FFmpeg error, unknow hw type:" << type;
+ return false;
+ }
+
+ if (!precheckDriver(type)) {
+ qCDebug(qLHWAccel) << "Drivers for hw device" << deviceName << "is not installed";
+ return false;
+ }
+
+ if (type == AV_HWDEVICE_TYPE_MEDIACODEC ||
+ type == AV_HWDEVICE_TYPE_VIDEOTOOLBOX ||
+ type == AV_HWDEVICE_TYPE_D3D11VA ||
+#if QT_FFMPEG_HAS_D3D12VA
+ type == AV_HWDEVICE_TYPE_D3D12VA ||
+#endif
+ type == AV_HWDEVICE_TYPE_DXVA2)
+ return true; // Don't waste time; it's expected to work fine of the precheck is OK
+
+
+ QScopedValueRollback rollback(FFmpegLogsEnabledInThread);
+ FFmpegLogsEnabledInThread = false;
+
+ return loadHWContext(type) != nullptr;
+}
+
+static const std::vector<AVHWDeviceType> &deviceTypes()
+{
+ static const auto types = []() {
+ qCDebug(qLHWAccel) << "Check device types";
+ QElapsedTimer timer;
+ timer.start();
+
+ // gather hw pix formats
+ std::unordered_set<AVPixelFormat> hwPixFormats;
+ void *opaque = nullptr;
+ while (auto codec = av_codec_iterate(&opaque)) {
+ findAVPixelFormat(codec, [&](AVPixelFormat format) {
+ if (isHwPixelFormat(format))
+ hwPixFormats.insert(format);
+ return false;
+ });
+ }
+
+ // create a device types list
+ std::vector<AVHWDeviceType> result;
+ AVHWDeviceType type = AV_HWDEVICE_TYPE_NONE;
+ while ((type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE)
+ if (hwPixFormats.count(pixelFormatForHwDevice(type)) && checkHwType(type))
+ result.push_back(type);
+ result.shrink_to_fit();
+
+ // reorder the list accordingly preferredHardwareAccelerators
+ auto it = result.begin();
+ for (const auto preffered : preferredHardwareAccelerators) {
+ auto found = std::find(it, result.end(), preffered);
+ if (found != result.end())
+ std::rotate(it++, found, std::next(found));
+ }
+
+ using namespace std::chrono;
+ qCDebug(qLHWAccel) << "Device types checked. Spent time:" << duration_cast<microseconds>(timer.durationElapsed());
+
+ return result;
+ }();
+
+ return types;
+}
+
+static std::vector<AVHWDeviceType> deviceTypes(const char *envVarName)
+{
+ const auto definedDeviceTypes = qgetenv(envVarName);
+
+ if (definedDeviceTypes.isNull())
+ return deviceTypes();
+
+ std::vector<AVHWDeviceType> result;
+ const auto definedDeviceTypesString = QString::fromUtf8(definedDeviceTypes).toLower();
+ for (const auto &deviceType : definedDeviceTypesString.split(',')) {
+ if (!deviceType.isEmpty()) {
+ const auto foundType = av_hwdevice_find_type_by_name(deviceType.toUtf8().data());
+ if (foundType == AV_HWDEVICE_TYPE_NONE)
+ qWarning() << "Unknown hw device type" << deviceType;
+ else
+ result.emplace_back(foundType);
+ }
+ }
+
+ result.shrink_to_fit();
+ return result;
+}
+
+template<typename CodecFinder>
+std::pair<const AVCodec *, std::unique_ptr<HWAccel>>
+findCodecWithHwAccel(AVCodecID id, const std::vector<AVHWDeviceType> &deviceTypes,
+ CodecFinder codecFinder,
+ const std::function<bool(const HWAccel &)> &hwAccelPredicate)
+{
+ for (auto type : deviceTypes) {
+ const auto codec = codecFinder(id, type, {});
+
+ if (!codec)
+ continue;
+
+ qCDebug(qLHWAccel) << "Found potential codec" << codec->name << "for hw accel" << type
+ << "; Checking the hw device...";
+
+ auto hwAccel = QFFmpeg::HWAccel::create(type);
+
+ if (!hwAccel)
+ continue;
+
+ if (hwAccelPredicate && !hwAccelPredicate(*hwAccel)) {
+ qCDebug(qLHWAccel) << "HW device is available but doesn't suit due to restrictions";
+ continue;
+ }
+
+ qCDebug(qLHWAccel) << "HW device is OK";
+
+ return { codec, std::move(hwAccel) };
+ }
+
+ qCDebug(qLHWAccel) << "No hw acceleration found for codec id" << id;
+
+ return { nullptr, nullptr };
+}
+
+static bool isNoConversionFormat(AVPixelFormat f)
+{
+ bool needsConversion = true;
+ QFFmpegVideoBuffer::toQtPixelFormat(f, &needsConversion);
+ return !needsConversion;
+};
+
+namespace {
+
+bool hwTextureConversionEnabled()
+{
+
+ // HW textures conversions are not stable in specific cases, dependent on the hardware and OS.
+ // We need the env var for testing with no textures conversion on the user's side.
+ static const int disableHwConversion =
+ qEnvironmentVariableIntValue("QT_DISABLE_HW_TEXTURES_CONVERSION");
+
+ return !disableHwConversion;
+}
+
+void setupDecoder(const AVPixelFormat format, AVCodecContext *const codecContext)
+{
+ if (!hwTextureConversionEnabled())
+ return;
+
+#if QT_CONFIG(wmf)
+ if (format == AV_PIX_FMT_D3D11)
+ QFFmpeg::D3D11TextureConverter::SetupDecoderTextures(codecContext);
+#elif defined Q_OS_ANDROID
+ if (format == AV_PIX_FMT_MEDIACODEC)
+ QFFmpeg::MediaCodecTextureConverter::setupDecoderSurface(codecContext);
+#else
+ Q_UNUSED(codecContext);
+ Q_UNUSED(format);
+#endif
+}
+
+} // namespace
+
+// Used for the AVCodecContext::get_format callback
+AVPixelFormat getFormat(AVCodecContext *codecContext, const AVPixelFormat *suggestedFormats)
+{
+ // First check HW accelerated codecs, the HW device context must be set
+ if (codecContext->hw_device_ctx) {
+ auto *device_ctx = (AVHWDeviceContext *)codecContext->hw_device_ctx->data;
+ std::pair formatAndScore(AV_PIX_FMT_NONE, NotSuitableAVScore);
+
+ // to be rewritten via findBestAVFormat
+ for (int i = 0;
+ const AVCodecHWConfig *config = avcodec_get_hw_config(codecContext->codec, i); i++) {
+ if (!(config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX))
+ continue;
+
+ if (device_ctx->type != config->device_type)
+ continue;
+
+ const bool isDeprecated = (config->methods & AV_CODEC_HW_CONFIG_METHOD_AD_HOC) != 0;
+ const bool shouldCheckCodecFormats = config->pix_fmt == AV_PIX_FMT_NONE;
+
+ auto scoresGettor = [&](AVPixelFormat format) {
+ // check in supported codec->pix_fmts;
+ // no reason to use findAVPixelFormat as we're already in the hw_config loop
+ if (shouldCheckCodecFormats && !hasAVFormat(codecContext->codec->pix_fmts, format))
+ return NotSuitableAVScore;
+
+ if (!shouldCheckCodecFormats && config->pix_fmt != format)
+ return NotSuitableAVScore;
+
+ auto result = DefaultAVScore;
+
+ if (isDeprecated)
+ result -= 10000;
+ if (isHwPixelFormat(format))
+ result += 10;
+
+ return result;
+ };
+
+ const auto found = findBestAVFormat(suggestedFormats, scoresGettor);
+
+ if (found.second > formatAndScore.second)
+ formatAndScore = found;
+ }
+
+ const auto &format = formatAndScore.first;
+ if (format != AV_PIX_FMT_NONE) {
+ setupDecoder(format, codecContext);
+ qCDebug(qLHWAccel) << "Selected format" << format << "for hw" << device_ctx->type;
+ return format;
+ }
+ }
+
+ // prefer video formats we can handle directly
+ const auto noConversionFormat = findAVFormat(suggestedFormats, &isNoConversionFormat);
+ if (noConversionFormat != AV_PIX_FMT_NONE) {
+ qCDebug(qLHWAccel) << "Selected format with no conversion" << noConversionFormat;
+ return noConversionFormat;
+ }
+
+ qCDebug(qLHWAccel) << "Selected format with conversion" << *suggestedFormats;
+
+ // take the native format, this will involve one additional format conversion on the CPU side
+ return *suggestedFormats;
+}
+
+HWAccel::~HWAccel() = default;
+
+std::unique_ptr<HWAccel> HWAccel::create(AVHWDeviceType deviceType)
+{
+ if (auto ctx = loadHWContext(deviceType))
+ return std::unique_ptr<HWAccel>(new HWAccel(std::move(ctx)));
+ else
+ return {};
+}
+
+AVPixelFormat HWAccel::format(AVFrame *frame)
+{
+ if (!frame->hw_frames_ctx)
+ return AVPixelFormat(frame->format);
+
+ auto *hwFramesContext = (AVHWFramesContext *)frame->hw_frames_ctx->data;
+ Q_ASSERT(hwFramesContext);
+ return AVPixelFormat(hwFramesContext->sw_format);
+}
+
+const std::vector<AVHWDeviceType> &HWAccel::encodingDeviceTypes()
+{
+ static const auto &result = deviceTypes("QT_FFMPEG_ENCODING_HW_DEVICE_TYPES");
+ return result;
+}
+
+const std::vector<AVHWDeviceType> &HWAccel::decodingDeviceTypes()
+{
+ static const auto &result = deviceTypes("QT_FFMPEG_DECODING_HW_DEVICE_TYPES");
+ return result;
+}
+
+AVHWDeviceContext *HWAccel::hwDeviceContext() const
+{
+ return m_hwDeviceContext ? (AVHWDeviceContext *)m_hwDeviceContext->data : nullptr;
+}
+
+AVPixelFormat HWAccel::hwFormat() const
+{
+ return pixelFormatForHwDevice(deviceType());
+}
+
+const AVHWFramesConstraints *HWAccel::constraints() const
+{
+ std::call_once(m_constraintsOnceFlag, [this]() {
+ if (auto context = hwDeviceContextAsBuffer())
+ m_constraints.reset(av_hwdevice_get_hwframe_constraints(context, nullptr));
+ });
+
+ return m_constraints.get();
+}
+
+std::pair<const AVCodec *, std::unique_ptr<HWAccel>>
+HWAccel::findEncoderWithHwAccel(AVCodecID id, const std::function<bool(const HWAccel &)>& hwAccelPredicate)
+{
+ auto finder = qOverload<AVCodecID, const std::optional<AVHWDeviceType> &,
+ const std::optional<PixelOrSampleFormat> &>(&QFFmpeg::findAVEncoder);
+ return findCodecWithHwAccel(id, encodingDeviceTypes(), finder, hwAccelPredicate);
+}
+
+std::pair<const AVCodec *, std::unique_ptr<HWAccel>>
+HWAccel::findDecoderWithHwAccel(AVCodecID id, const std::function<bool(const HWAccel &)>& hwAccelPredicate)
+{
+ return findCodecWithHwAccel(id, decodingDeviceTypes(), &QFFmpeg::findAVDecoder,
+ hwAccelPredicate);
+}
+
+AVHWDeviceType HWAccel::deviceType() const
+{
+ return m_hwDeviceContext ? hwDeviceContext()->type : AV_HWDEVICE_TYPE_NONE;
+}
+
+void HWAccel::createFramesContext(AVPixelFormat swFormat, const QSize &size)
+{
+ if (m_hwFramesContext) {
+ qWarning() << "Frames context has been already created!";
+ return;
+ }
+
+ if (!m_hwDeviceContext)
+ return;
+
+ m_hwFramesContext.reset(av_hwframe_ctx_alloc(m_hwDeviceContext.get()));
+ auto *c = (AVHWFramesContext *)m_hwFramesContext->data;
+ c->format = hwFormat();
+ c->sw_format = swFormat;
+ c->width = size.width();
+ c->height = size.height();
+ qCDebug(qLHWAccel) << "init frames context";
+ int err = av_hwframe_ctx_init(m_hwFramesContext.get());
+ if (err < 0)
+ qWarning() << "failed to init HW frame context" << err << err2str(err);
+ else
+ qCDebug(qLHWAccel) << "Initialized frames context" << size << c->format << c->sw_format;
+}
+
+AVHWFramesContext *HWAccel::hwFramesContext() const
+{
+ return m_hwFramesContext ? (AVHWFramesContext *)m_hwFramesContext->data : nullptr;
+}
+
+
+TextureConverter::TextureConverter(QRhi *rhi)
+ : d(new Data)
+{
+ d->rhi = rhi;
+}
+
+TextureSet *TextureConverter::getTextures(AVFrame *frame)
+{
+ if (!frame || isNull())
+ return nullptr;
+
+ Q_ASSERT(frame->format == d->format);
+ return d->backend->getTextures(frame);
+}
+
+void TextureConverter::updateBackend(AVPixelFormat fmt)
+{
+ d->backend = nullptr;
+ if (!d->rhi)
+ return;
+
+ if (!hwTextureConversionEnabled())
+ return;
+
+ switch (fmt) {
+#if QT_CONFIG(vaapi)
+ case AV_PIX_FMT_VAAPI:
+ d->backend = std::make_unique<VAAPITextureConverter>(d->rhi);
+ break;
+#endif
+#ifdef Q_OS_DARWIN
+ case AV_PIX_FMT_VIDEOTOOLBOX:
+ d->backend = std::make_unique<VideoToolBoxTextureConverter>(d->rhi);
+ break;
+#endif
+#if QT_CONFIG(wmf)
+ case AV_PIX_FMT_D3D11:
+ d->backend = std::make_unique<D3D11TextureConverter>(d->rhi);
+ break;
+#endif
+#ifdef Q_OS_ANDROID
+ case AV_PIX_FMT_MEDIACODEC:
+ d->backend = std::make_unique<MediaCodecTextureConverter>(d->rhi);
+ break;
+#endif
+ default:
+ break;
+ }
+ d->format = fmt;
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11.cpp b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11.cpp
new file mode 100644
index 000000000..a2533a132
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11.cpp
@@ -0,0 +1,309 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpeghwaccel_d3d11_p.h"
+#include "playbackengine/qffmpegstreamdecoder_p.h"
+
+#include <qvideoframeformat.h>
+#include "qffmpegvideobuffer_p.h"
+
+#include <private/qvideotexturehelper_p.h>
+#include <private/qcomptr_p.h>
+#include <private/quniquehandle_p.h>
+
+#include <rhi/qrhi.h>
+
+#include <qopenglfunctions.h>
+#include <qdebug.h>
+#include <qloggingcategory.h>
+
+#include <libavutil/hwcontext_d3d11va.h>
+#include <d3d11_1.h>
+#include <dxgi1_2.h>
+
+QT_BEGIN_NAMESPACE
+
+namespace {
+
+Q_LOGGING_CATEGORY(qLcMediaFFmpegHWAccel, "qt.multimedia.hwaccel");
+
+ComPtr<ID3D11Device1> GetD3DDevice(QRhi *rhi)
+{
+ const auto native = static_cast<const QRhiD3D11NativeHandles *>(rhi->nativeHandles());
+ if (!native)
+ return {};
+
+ const ComPtr<ID3D11Device> rhiDevice = static_cast<ID3D11Device *>(native->dev);
+
+ ComPtr<ID3D11Device1> dev1;
+ if (rhiDevice.As(&dev1) != S_OK)
+ return nullptr;
+
+ return dev1;
+}
+
+} // namespace
+namespace QFFmpeg {
+
+bool TextureBridge::copyToSharedTex(ID3D11Device *dev, ID3D11DeviceContext *ctx,
+ const ComPtr<ID3D11Texture2D> &tex, UINT index,
+ const QSize &frameSize)
+{
+ if (!ensureSrcTex(dev, tex, frameSize))
+ return false;
+
+ // Flush to ensure that texture is fully updated before we share it.
+ ctx->Flush();
+
+ if (m_srcMutex->AcquireSync(m_srcKey, INFINITE) != S_OK)
+ return false;
+
+ const UINT width = static_cast<UINT>(frameSize.width());
+ const UINT height = static_cast<UINT>(frameSize.height());
+
+ // A crop box is needed because FFmpeg may have created textures
+ // that are bigger than the frame size to account for the decoder's
+ // surface alignment requirements.
+ const D3D11_BOX crop{ 0, 0, 0, width, height, 1 };
+ ctx->CopySubresourceRegion(m_srcTex.Get(), 0, 0, 0, 0, tex.Get(), index, &crop);
+
+ m_srcMutex->ReleaseSync(m_destKey);
+ return true;
+}
+
+ComPtr<ID3D11Texture2D> TextureBridge::copyFromSharedTex(const ComPtr<ID3D11Device1> &dev,
+ const ComPtr<ID3D11DeviceContext> &ctx)
+{
+ if (!ensureDestTex(dev))
+ return {};
+
+ if (m_destMutex->AcquireSync(m_destKey, INFINITE) != S_OK)
+ return {};
+
+ ctx->CopySubresourceRegion(m_outputTex.Get(), 0, 0, 0, 0, m_destTex.Get(), 0, nullptr);
+
+ m_destMutex->ReleaseSync(m_srcKey);
+
+ return m_outputTex;
+}
+
+bool TextureBridge::ensureDestTex(const ComPtr<ID3D11Device1> &dev)
+{
+ if (m_destDevice != dev) {
+ // Destination device changed. Recreate texture.
+ m_destTex = nullptr;
+ m_destDevice = dev;
+ }
+
+ if (m_destTex)
+ return true;
+
+ if (m_destDevice->OpenSharedResource1(m_sharedHandle.get(), IID_PPV_ARGS(&m_destTex)) != S_OK)
+ return false;
+
+ CD3D11_TEXTURE2D_DESC desc{};
+ m_destTex->GetDesc(&desc);
+
+ desc.MiscFlags = 0;
+ desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
+
+ if (m_destDevice->CreateTexture2D(&desc, nullptr, m_outputTex.ReleaseAndGetAddressOf()) != S_OK)
+ return false;
+
+ if (m_destTex.As(&m_destMutex) != S_OK)
+ return false;
+
+ return true;
+}
+
+bool TextureBridge::ensureSrcTex(ID3D11Device *dev, const ComPtr<ID3D11Texture2D> &tex, const QSize &frameSize)
+{
+ if (!isSrcInitialized(dev, tex, frameSize))
+ return recreateSrc(dev, tex, frameSize);
+
+ return true;
+}
+
+bool TextureBridge::isSrcInitialized(const ID3D11Device *dev,
+ const ComPtr<ID3D11Texture2D> &tex,
+ const QSize &frameSize) const
+{
+ if (!m_srcTex)
+ return false;
+
+ // Check if device has changed
+ ComPtr<ID3D11Device> texDevice;
+ m_srcTex->GetDevice(texDevice.GetAddressOf());
+ if (dev != texDevice.Get())
+ return false;
+
+ // Check if shared texture has correct size and format
+ CD3D11_TEXTURE2D_DESC inputDesc{};
+ tex->GetDesc(&inputDesc);
+
+ CD3D11_TEXTURE2D_DESC currentDesc{};
+ m_srcTex->GetDesc(&currentDesc);
+
+ if (inputDesc.Format != currentDesc.Format)
+ return false;
+
+ const UINT width = static_cast<UINT>(frameSize.width());
+ const UINT height = static_cast<UINT>(frameSize.height());
+
+ if (currentDesc.Width != width || currentDesc.Height != height)
+ return false;
+
+ return true;
+}
+
+bool TextureBridge::recreateSrc(ID3D11Device *dev, const ComPtr<ID3D11Texture2D> &tex, const QSize &frameSize)
+{
+ m_sharedHandle.close();
+
+ CD3D11_TEXTURE2D_DESC desc{};
+ tex->GetDesc(&desc);
+
+ const UINT width = static_cast<UINT>(frameSize.width());
+ const UINT height = static_cast<UINT>(frameSize.height());
+
+ CD3D11_TEXTURE2D_DESC texDesc{ desc.Format, width, height };
+ texDesc.MipLevels = 1;
+ texDesc.MiscFlags = D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX | D3D11_RESOURCE_MISC_SHARED_NTHANDLE;
+
+ if (dev->CreateTexture2D(&texDesc, nullptr, m_srcTex.ReleaseAndGetAddressOf()) != S_OK)
+ return false;
+
+ ComPtr<IDXGIResource1> res;
+ if (m_srcTex.As(&res) != S_OK)
+ return false;
+
+ const HRESULT hr =
+ res->CreateSharedHandle(nullptr, DXGI_SHARED_RESOURCE_READ, nullptr, &m_sharedHandle);
+
+ if (hr != S_OK || !m_sharedHandle)
+ return false;
+
+ if (m_srcTex.As(&m_srcMutex) != S_OK || !m_srcMutex)
+ return false;
+
+ m_destTex = nullptr;
+ m_destMutex = nullptr;
+ return true;
+}
+
+class D3D11TextureSet : public TextureSet
+{
+public:
+ D3D11TextureSet(QRhi *rhi, ComPtr<ID3D11Texture2D> &&tex)
+ : m_owner{ rhi }, m_tex(std::move(tex))
+ {
+ }
+
+ qint64 textureHandle(QRhi *rhi, int /*plane*/) override
+ {
+ if (rhi != m_owner)
+ return 0u;
+ return reinterpret_cast<qint64>(m_tex.Get());
+ }
+
+private:
+ QRhi *m_owner = nullptr;
+ ComPtr<ID3D11Texture2D> m_tex;
+};
+
+D3D11TextureConverter::D3D11TextureConverter(QRhi *rhi)
+ : TextureConverterBackend(rhi), m_rhiDevice{ GetD3DDevice(rhi) }
+{
+ if (!m_rhiDevice)
+ return;
+
+ m_rhiDevice->GetImmediateContext(m_rhiCtx.GetAddressOf());
+}
+
+TextureSet *D3D11TextureConverter::getTextures(AVFrame *frame)
+{
+ if (!m_rhiDevice)
+ return nullptr;
+
+ if (!frame || !frame->hw_frames_ctx || frame->format != AV_PIX_FMT_D3D11)
+ return nullptr;
+
+ const auto *fCtx = reinterpret_cast<AVHWFramesContext *>(frame->hw_frames_ctx->data);
+ const auto *ctx = fCtx->device_ctx;
+
+ if (!ctx || ctx->type != AV_HWDEVICE_TYPE_D3D11VA)
+ return nullptr;
+
+ const ComPtr<ID3D11Texture2D> ffmpegTex = reinterpret_cast<ID3D11Texture2D *>(frame->data[0]);
+ const int index = static_cast<int>(reinterpret_cast<intptr_t>(frame->data[1]));
+
+ if (rhi->backend() == QRhi::D3D11) {
+ {
+ const auto *avDeviceCtx = static_cast<AVD3D11VADeviceContext *>(ctx->hwctx);
+
+ if (!avDeviceCtx)
+ return nullptr;
+
+ // Lock the FFmpeg device context while we copy from FFmpeg's
+ // frame pool into a shared texture because the underlying ID3D11DeviceContext
+ // is not thread safe.
+ avDeviceCtx->lock(avDeviceCtx->lock_ctx);
+ QScopeGuard autoUnlock([&] { avDeviceCtx->unlock(avDeviceCtx->lock_ctx); });
+
+ // Populate the shared texture with one slice from the frame pool, cropping away
+ // extra surface alignment areas that FFmpeg adds to the textures
+ QSize frameSize{ frame->width, frame->height };
+ if (!m_bridge.copyToSharedTex(avDeviceCtx->device, avDeviceCtx->device_context,
+ ffmpegTex, index, frameSize)) {
+ return nullptr;
+ }
+ }
+
+ // Get a copy of the texture on the RHI device
+ ComPtr<ID3D11Texture2D> output = m_bridge.copyFromSharedTex(m_rhiDevice, m_rhiCtx);
+
+ if (!output)
+ return nullptr;
+
+ return new D3D11TextureSet(rhi, std::move(output));
+ }
+
+ return nullptr;
+}
+
+void D3D11TextureConverter::SetupDecoderTextures(AVCodecContext *s)
+{
+ // We are holding pool frames alive for quite long, which may cause
+ // codecs to run out of frames because FFmpeg has a fixed size
+ // decoder frame pool. We must therefore add extra frames to the pool
+ // to account for the frames we keep alive. First, we need to account
+ // for the maximum number of queued frames during rendering. In
+ // addition, we add one frame for the RHI rendering pipeline, and one
+ // additional frame because we may hold one in the Qt event loop.
+
+ const qint32 maxRenderQueueSize = StreamDecoder::maxQueueSize(QPlatformMediaPlayer::VideoStream);
+ constexpr qint32 framesHeldByRhi = 1;
+ constexpr qint32 framesHeldByQtEventLoop = 1;
+ s->extra_hw_frames = maxRenderQueueSize + framesHeldByRhi + framesHeldByQtEventLoop;
+
+ int ret = avcodec_get_hw_frames_parameters(s, s->hw_device_ctx, AV_PIX_FMT_D3D11,
+ &s->hw_frames_ctx);
+ if (ret < 0) {
+ qCDebug(qLcMediaFFmpegHWAccel) << "Failed to allocate HW frames context" << ret;
+ return;
+ }
+
+ const auto *frames_ctx = reinterpret_cast<const AVHWFramesContext *>(s->hw_frames_ctx->data);
+ auto *hwctx = static_cast<AVD3D11VAFramesContext *>(frames_ctx->hwctx);
+ hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
+ hwctx->BindFlags = D3D11_BIND_DECODER | D3D11_BIND_SHADER_RESOURCE;
+ ret = av_hwframe_ctx_init(s->hw_frames_ctx);
+ if (ret < 0) {
+ qCDebug(qLcMediaFFmpegHWAccel) << "Failed to initialize HW frames context" << ret;
+ av_buffer_unref(&s->hw_frames_ctx);
+ }
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11_p.h b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11_p.h
new file mode 100644
index 000000000..bfcc1f10c
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_d3d11_p.h
@@ -0,0 +1,104 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGHWACCEL_D3D11_P_H
+#define QFFMPEGHWACCEL_D3D11_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qffmpeghwaccel_p.h"
+#include <private/quniquehandle_p.h>
+#include <private/qcomptr_p.h>
+#include <qt_windows.h>
+
+#include <d3d11.h>
+#include <d3d11_1.h>
+
+#if QT_CONFIG(wmf)
+
+QT_BEGIN_NAMESPACE
+
+class QRhi;
+
+namespace QFFmpeg {
+
+struct SharedTextureHandleTraits
+{
+ using Type = HANDLE;
+ static Type invalidValue() { return nullptr; }
+ static bool close(Type handle) { return CloseHandle(handle) != 0; }
+};
+
+using SharedTextureHandle = QUniqueHandle<SharedTextureHandleTraits>;
+
+/*! \internal Utility class for synchronized transfer of a texture between two D3D devices
+ *
+ * This class is used to copy a texture from one device to another device. This
+ * is implemented using a shared texture, along with keyed mutexes to synchronize
+ * access to the texture.
+ *
+ * This is needed because we need to copy data from FFmpeg to RHI. FFmpeg and RHI
+ * uses different D3D devices.
+ */
+class TextureBridge final
+{
+public:
+ /** Copy a texture slice at position 'index' belonging to device 'dev'
+ * into a shared texture, limiting the texture size to the frame size */
+ bool copyToSharedTex(ID3D11Device *dev, ID3D11DeviceContext *ctx,
+ const ComPtr<ID3D11Texture2D> &tex, UINT index, const QSize &frameSize);
+
+ /** Obtain a copy of the texture on a second device 'dev' */
+ ComPtr<ID3D11Texture2D> copyFromSharedTex(const ComPtr<ID3D11Device1> &dev,
+ const ComPtr<ID3D11DeviceContext> &ctx);
+
+private:
+ bool ensureDestTex(const ComPtr<ID3D11Device1> &dev);
+ bool ensureSrcTex(ID3D11Device *dev, const ComPtr<ID3D11Texture2D> &tex, const QSize &frameSize);
+ bool isSrcInitialized(const ID3D11Device *dev, const ComPtr<ID3D11Texture2D> &tex, const QSize &frameSize) const;
+ bool recreateSrc(ID3D11Device *dev, const ComPtr<ID3D11Texture2D> &tex, const QSize &frameSize);
+
+ SharedTextureHandle m_sharedHandle{};
+
+ const UINT m_srcKey = 0;
+ ComPtr<ID3D11Texture2D> m_srcTex;
+ ComPtr<IDXGIKeyedMutex> m_srcMutex;
+
+ const UINT m_destKey = 1;
+ ComPtr<ID3D11Device1> m_destDevice;
+ ComPtr<ID3D11Texture2D> m_destTex;
+ ComPtr<IDXGIKeyedMutex> m_destMutex;
+
+ ComPtr<ID3D11Texture2D> m_outputTex;
+};
+
+class D3D11TextureConverter : public TextureConverterBackend
+{
+public:
+ D3D11TextureConverter(QRhi *rhi);
+
+ TextureSet *getTextures(AVFrame *frame) override;
+
+ static void SetupDecoderTextures(AVCodecContext *s);
+
+private:
+ ComPtr<ID3D11Device1> m_rhiDevice;
+ ComPtr<ID3D11DeviceContext> m_rhiCtx;
+ TextureBridge m_bridge;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec.cpp b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec.cpp
new file mode 100644
index 000000000..e9dd8705a
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec.cpp
@@ -0,0 +1,120 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpeghwaccel_mediacodec_p.h"
+
+#include "androidsurfacetexture_p.h"
+#include <rhi/qrhi.h>
+
+extern "C" {
+#include <libavcodec/mediacodec.h>
+#include <libavutil/hwcontext_mediacodec.h>
+}
+
+#if !defined(Q_OS_ANDROID)
+# error "Configuration error"
+#endif
+
+namespace QFFmpeg {
+
+class MediaCodecTextureSet : public TextureSet
+{
+public:
+ MediaCodecTextureSet(qint64 textureHandle) : handle(textureHandle) { }
+
+ qint64 textureHandle(QRhi *, int plane) override { return (plane == 0) ? handle : 0; }
+
+private:
+ qint64 handle;
+};
+
+namespace {
+
+void deleteSurface(AVHWDeviceContext *ctx)
+{
+ AndroidSurfaceTexture* s = reinterpret_cast<AndroidSurfaceTexture *>(ctx->user_opaque);
+ delete s;
+}
+
+AndroidSurfaceTexture* getTextureSurface(AVFrame *frame)
+{
+ if (!frame || !frame->hw_frames_ctx)
+ return nullptr;
+
+ auto *frameContext = reinterpret_cast<AVHWFramesContext *>(frame->hw_frames_ctx->data);
+
+ if (!frameContext || !frameContext->device_ctx)
+ return nullptr;
+
+ AVHWDeviceContext *deviceContext = frameContext->device_ctx;
+
+ return reinterpret_cast<AndroidSurfaceTexture *>(deviceContext->user_opaque);
+}
+} // namespace
+
+void MediaCodecTextureConverter::setupDecoderSurface(AVCodecContext *avCodecContext)
+{
+ std::unique_ptr<AndroidSurfaceTexture> androidSurfaceTexture(new AndroidSurfaceTexture(0));
+ AVMediaCodecContext *mediacodecContext = av_mediacodec_alloc_context();
+ av_mediacodec_default_init(avCodecContext, mediacodecContext, androidSurfaceTexture->surface());
+
+ if (!avCodecContext->hw_device_ctx || !avCodecContext->hw_device_ctx->data)
+ return;
+
+ AVHWDeviceContext *deviceContext =
+ reinterpret_cast<AVHWDeviceContext *>(avCodecContext->hw_device_ctx->data);
+
+ if (!deviceContext->hwctx)
+ return;
+
+ AVMediaCodecDeviceContext *mediaDeviceContext =
+ reinterpret_cast<AVMediaCodecDeviceContext *>(deviceContext->hwctx);
+
+ if (!mediaDeviceContext)
+ return;
+
+ mediaDeviceContext->surface = androidSurfaceTexture->surface();
+
+ Q_ASSERT(deviceContext->user_opaque == nullptr);
+ deviceContext->user_opaque = androidSurfaceTexture.release();
+ deviceContext->free = deleteSurface;
+}
+
+TextureSet *MediaCodecTextureConverter::getTextures(AVFrame *frame)
+{
+ AndroidSurfaceTexture * androidSurfaceTexture = getTextureSurface(frame);
+
+ if (!androidSurfaceTexture || !androidSurfaceTexture->isValid())
+ return {};
+
+ if (!externalTexture || m_currentSurfaceIndex != androidSurfaceTexture->index()) {
+ m_currentSurfaceIndex = androidSurfaceTexture->index();
+ androidSurfaceTexture->detachFromGLContext();
+ externalTexture = std::unique_ptr<QRhiTexture>(
+ rhi->newTexture(QRhiTexture::Format::RGBA8, { frame->width, frame->height }, 1,
+ QRhiTexture::ExternalOES));
+
+ if (!externalTexture->create()) {
+ qWarning() << "Failed to create the external texture!";
+ return {};
+ }
+
+ quint64 textureHandle = externalTexture->nativeTexture().object;
+ androidSurfaceTexture->attachToGLContext(textureHandle);
+ }
+
+ // release a MediaCodec buffer and render it to the surface
+ AVMediaCodecBuffer *buffer = (AVMediaCodecBuffer *)frame->data[3];
+
+ if (!buffer) {
+ qWarning() << "Received a frame without AVMediaCodecBuffer.";
+ } else if (av_mediacodec_release_buffer(buffer, 1) < 0) {
+ qWarning() << "Failed to render buffer to surface.";
+ return {};
+ }
+
+ androidSurfaceTexture->updateTexImage();
+
+ return new MediaCodecTextureSet(externalTexture->nativeTexture().object);
+}
+}
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec_p.h b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec_p.h
new file mode 100644
index 000000000..79d33d03e
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec_p.h
@@ -0,0 +1,36 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGHWACCEL_MEDIACODEC_P_H
+#define QFFMPEGHWACCEL_MEDIACODEC_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qffmpeghwaccel_p.h"
+#include <memory>
+
+namespace QFFmpeg {
+struct Frame;
+
+class MediaCodecTextureConverter : public TextureConverterBackend
+{
+public:
+ MediaCodecTextureConverter(QRhi *rhi) : TextureConverterBackend(rhi){};
+ TextureSet *getTextures(AVFrame *frame) override;
+
+ static void setupDecoderSurface(AVCodecContext *s);
+private:
+ std::unique_ptr<QRhiTexture> externalTexture;
+ quint64 m_currentSurfaceIndex = 0;
+};
+}
+#endif // QFFMPEGHWACCEL_MEDIACODEC_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h
new file mode 100644
index 000000000..bc6547f12
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_p.h
@@ -0,0 +1,130 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGHWACCEL_P_H
+#define QFFMPEGHWACCEL_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qffmpeg_p.h"
+#include "qvideoframeformat.h"
+#include "qabstractvideobuffer.h"
+
+#include <qshareddata.h>
+#include <memory>
+#include <functional>
+#include <mutex>
+
+QT_BEGIN_NAMESPACE
+
+class QRhi;
+class QRhiTexture;
+class QFFmpegVideoBuffer;
+
+namespace QFFmpeg {
+
+// used for the get_format callback for the decoder
+enum AVPixelFormat getFormat(struct AVCodecContext *s, const enum AVPixelFormat * fmt);
+
+class HWAccel;
+
+class TextureSet {
+public:
+ // ### Should add QVideoFrameFormat::PixelFormat here
+ virtual ~TextureSet() {}
+ virtual qint64 textureHandle(QRhi *, int /*plane*/) { return 0; }
+};
+
+class TextureConverterBackend
+{
+public:
+ TextureConverterBackend(QRhi *rhi)
+ : rhi(rhi)
+ {}
+ virtual ~TextureConverterBackend() {}
+ virtual TextureSet *getTextures(AVFrame * /*frame*/) { return nullptr; }
+
+ QRhi *rhi = nullptr;
+};
+
+class TextureConverter
+{
+ class Data final
+ {
+ public:
+ QAtomicInt ref = 0;
+ QRhi *rhi = nullptr;
+ AVPixelFormat format = AV_PIX_FMT_NONE;
+ std::unique_ptr<TextureConverterBackend> backend;
+ };
+public:
+ TextureConverter(QRhi *rhi = nullptr);
+
+ void init(AVFrame *frame) {
+ AVPixelFormat fmt = frame ? AVPixelFormat(frame->format) : AV_PIX_FMT_NONE;
+ if (fmt != d->format)
+ updateBackend(fmt);
+ }
+ TextureSet *getTextures(AVFrame *frame);
+ bool isNull() const { return !d->backend || !d->backend->rhi; }
+
+private:
+ void updateBackend(AVPixelFormat format);
+
+ QExplicitlySharedDataPointer<Data> d;
+};
+
+class HWAccel
+{
+ AVBufferUPtr m_hwDeviceContext;
+ AVBufferUPtr m_hwFramesContext;
+
+ mutable std::once_flag m_constraintsOnceFlag;
+ mutable AVHWFramesConstraintsUPtr m_constraints;
+
+public:
+ ~HWAccel();
+
+ static std::unique_ptr<HWAccel> create(AVHWDeviceType deviceType);
+
+ static std::pair<const AVCodec *, std::unique_ptr<HWAccel>>
+ findEncoderWithHwAccel(AVCodecID id,
+ const std::function<bool(const HWAccel &)>& hwAccelPredicate = nullptr);
+
+ static std::pair<const AVCodec *, std::unique_ptr<HWAccel>>
+ findDecoderWithHwAccel(AVCodecID id,
+ const std::function<bool(const HWAccel &)>& hwAccelPredicate = nullptr);
+
+ AVHWDeviceType deviceType() const;
+
+ AVBufferRef *hwDeviceContextAsBuffer() const { return m_hwDeviceContext.get(); }
+ AVHWDeviceContext *hwDeviceContext() const;
+ AVPixelFormat hwFormat() const;
+ const AVHWFramesConstraints *constraints() const;
+
+ void createFramesContext(AVPixelFormat swFormat, const QSize &size);
+ AVBufferRef *hwFramesContextAsBuffer() const { return m_hwFramesContext.get(); }
+ AVHWFramesContext *hwFramesContext() const;
+
+ static AVPixelFormat format(AVFrame *frame);
+ static const std::vector<AVHWDeviceType> &encodingDeviceTypes();
+
+ static const std::vector<AVHWDeviceType> &decodingDeviceTypes();
+
+private:
+ HWAccel(AVBufferUPtr hwDeviceContext) : m_hwDeviceContext(std::move(hwDeviceContext)) { }
+};
+
+}
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp
new file mode 100644
index 000000000..7e46e3537
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi.cpp
@@ -0,0 +1,364 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpeghwaccel_vaapi_p.h"
+
+#if !QT_CONFIG(vaapi)
+#error "Configuration error"
+#endif
+
+#include <va/va.h>
+
+#include <qvideoframeformat.h>
+#include "qffmpegvideobuffer_p.h"
+#include "private/qvideotexturehelper_p.h"
+
+#include <rhi/qrhi.h>
+
+#include <qguiapplication.h>
+#include <qpa/qplatformnativeinterface.h>
+
+#include <qopenglfunctions.h>
+
+//#define VA_EXPORT_USE_LAYERS
+
+#if __has_include("drm/drm_fourcc.h")
+#include <drm/drm_fourcc.h>
+#elif __has_include("libdrm/drm_fourcc.h")
+#include <libdrm/drm_fourcc.h>
+#else
+// keep things building without drm_fourcc.h
+#define fourcc_code(a, b, c, d) ((uint32_t)(a) | ((uint32_t)(b) << 8) | \
+ ((uint32_t)(c) << 16) | ((uint32_t)(d) << 24))
+
+#define DRM_FORMAT_RGBA8888 fourcc_code('R', 'A', '2', '4') /* [31:0] R:G:B:A 8:8:8:8 little endian */
+#define DRM_FORMAT_RGB888 fourcc_code('R', 'G', '2', '4') /* [23:0] R:G:B little endian */
+#define DRM_FORMAT_RG88 fourcc_code('R', 'G', '8', '8') /* [15:0] R:G 8:8 little endian */
+#define DRM_FORMAT_ABGR8888 fourcc_code('A', 'B', '2', '4') /* [31:0] A:B:G:R 8:8:8:8 little endian */
+#define DRM_FORMAT_BGR888 fourcc_code('B', 'G', '2', '4') /* [23:0] B:G:R little endian */
+#define DRM_FORMAT_GR88 fourcc_code('G', 'R', '8', '8') /* [15:0] G:R 8:8 little endian */
+#define DRM_FORMAT_R8 fourcc_code('R', '8', ' ', ' ') /* [7:0] R */
+#define DRM_FORMAT_R16 fourcc_code('R', '1', '6', ' ') /* [15:0] R little endian */
+#define DRM_FORMAT_RGB565 fourcc_code('R', 'G', '1', '6') /* [15:0] R:G:B 5:6:5 little endian */
+#define DRM_FORMAT_RG1616 fourcc_code('R', 'G', '3', '2') /* [31:0] R:G 16:16 little endian */
+#define DRM_FORMAT_GR1616 fourcc_code('G', 'R', '3', '2') /* [31:0] G:R 16:16 little endian */
+#define DRM_FORMAT_BGRA1010102 fourcc_code('B', 'A', '3', '0') /* [31:0] B:G:R:A 10:10:10:2 little endian */
+#endif
+
+extern "C" {
+#include <libavutil/hwcontext_vaapi.h>
+}
+
+#include <va/va_drm.h>
+#include <va/va_drmcommon.h>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+
+#include <unistd.h>
+
+#include <qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLHWAccelVAAPI, "qt.multimedia.ffmpeg.hwaccelvaapi");
+
+namespace QFFmpeg {
+
+static const quint32 *fourccFromPixelFormat(const QVideoFrameFormat::PixelFormat format)
+{
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ const quint32 rgba_fourcc = DRM_FORMAT_ABGR8888;
+ const quint32 rg_fourcc = DRM_FORMAT_GR88;
+ const quint32 rg16_fourcc = DRM_FORMAT_GR1616;
+#else
+ const quint32 rgba_fourcc = DRM_FORMAT_RGBA8888;
+ const quint32 rg_fourcc = DRM_FORMAT_RG88;
+ const quint32 rg16_fourcc = DRM_FORMAT_RG1616;
+#endif
+
+// qCDebug(qLHWAccelVAAPI) << "Getting DRM fourcc for pixel format" << format;
+
+ switch (format) {
+ case QVideoFrameFormat::Format_Invalid:
+ case QVideoFrameFormat::Format_IMC1:
+ case QVideoFrameFormat::Format_IMC2:
+ case QVideoFrameFormat::Format_IMC3:
+ case QVideoFrameFormat::Format_IMC4:
+ case QVideoFrameFormat::Format_SamplerExternalOES:
+ case QVideoFrameFormat::Format_Jpeg:
+ case QVideoFrameFormat::Format_SamplerRect:
+ return nullptr;
+
+ case QVideoFrameFormat::Format_ARGB8888:
+ case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
+ case QVideoFrameFormat::Format_XRGB8888:
+ case QVideoFrameFormat::Format_BGRA8888:
+ case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
+ case QVideoFrameFormat::Format_BGRX8888:
+ case QVideoFrameFormat::Format_ABGR8888:
+ case QVideoFrameFormat::Format_XBGR8888:
+ case QVideoFrameFormat::Format_RGBA8888:
+ case QVideoFrameFormat::Format_RGBX8888:
+ case QVideoFrameFormat::Format_AYUV:
+ case QVideoFrameFormat::Format_AYUV_Premultiplied:
+ case QVideoFrameFormat::Format_UYVY:
+ case QVideoFrameFormat::Format_YUYV:
+ {
+ static constexpr quint32 format[] = { rgba_fourcc, 0, 0, 0 };
+ return format;
+ }
+
+ case QVideoFrameFormat::Format_Y8:
+ {
+ static constexpr quint32 format[] = { DRM_FORMAT_R8, 0, 0, 0 };
+ return format;
+ }
+ case QVideoFrameFormat::Format_Y16:
+ {
+ static constexpr quint32 format[] = { DRM_FORMAT_R16, 0, 0, 0 };
+ return format;
+ }
+
+ case QVideoFrameFormat::Format_YUV420P:
+ case QVideoFrameFormat::Format_YUV422P:
+ case QVideoFrameFormat::Format_YV12:
+ {
+ static constexpr quint32 format[] = { DRM_FORMAT_R8, DRM_FORMAT_R8, DRM_FORMAT_R8, 0 };
+ return format;
+ }
+ case QVideoFrameFormat::Format_YUV420P10:
+ {
+ static constexpr quint32 format[] = { DRM_FORMAT_R16, DRM_FORMAT_R16, DRM_FORMAT_R16, 0 };
+ return format;
+ }
+
+ case QVideoFrameFormat::Format_NV12:
+ case QVideoFrameFormat::Format_NV21:
+ {
+ static constexpr quint32 format[] = { DRM_FORMAT_R8, rg_fourcc, 0, 0 };
+ return format;
+ }
+
+ case QVideoFrameFormat::Format_P010:
+ case QVideoFrameFormat::Format_P016:
+ {
+ static constexpr quint32 format[] = { DRM_FORMAT_R16, rg16_fourcc, 0, 0 };
+ return format;
+ }
+ }
+ return nullptr;
+}
+
+class VAAPITextureSet : public TextureSet
+{
+public:
+ ~VAAPITextureSet();
+ qint64 textureHandle(QRhi *, int plane) override {
+ return textures[plane];
+ }
+
+ QRhi *rhi = nullptr;
+ QOpenGLContext *glContext = nullptr;
+ int nPlanes = 0;
+ GLuint textures[4] = {};
+};
+
+
+VAAPITextureConverter::VAAPITextureConverter(QRhi *rhi)
+ : TextureConverterBackend(nullptr)
+{
+ qCDebug(qLHWAccelVAAPI) << ">>>> Creating VAAPI HW accelerator";
+
+ if (!rhi || rhi->backend() != QRhi::OpenGLES2) {
+ qWarning() << "VAAPITextureConverter: No rhi or non openGL based RHI";
+ this->rhi = nullptr;
+ return;
+ }
+
+ auto *nativeHandles = static_cast<const QRhiGles2NativeHandles *>(rhi->nativeHandles());
+ glContext = nativeHandles->context;
+ if (!glContext) {
+ qCDebug(qLHWAccelVAAPI) << " no GL context, disabling";
+ return;
+ }
+ const QString platform = QGuiApplication::platformName();
+ QPlatformNativeInterface *pni = QGuiApplication::platformNativeInterface();
+ eglDisplay = pni->nativeResourceForIntegration(QByteArrayLiteral("egldisplay"));
+ qCDebug(qLHWAccelVAAPI) << " platform is" << platform << eglDisplay;
+
+ if (!eglDisplay) {
+ qCDebug(qLHWAccelVAAPI) << " no egl display, disabling";
+ return;
+ }
+ eglImageTargetTexture2D = eglGetProcAddress("glEGLImageTargetTexture2DOES");
+ if (!eglDisplay) {
+ qCDebug(qLHWAccelVAAPI) << " no eglImageTargetTexture2D, disabling";
+ return;
+ }
+
+ // everything ok, indicate that we can do zero copy
+ this->rhi = rhi;
+}
+
+VAAPITextureConverter::~VAAPITextureConverter()
+{
+}
+
+//#define VA_EXPORT_USE_LAYERS
+TextureSet *VAAPITextureConverter::getTextures(AVFrame *frame)
+{
+// qCDebug(qLHWAccelVAAPI) << "VAAPIAccel::getTextures";
+ if (frame->format != AV_PIX_FMT_VAAPI || !eglDisplay) {
+ qCDebug(qLHWAccelVAAPI) << "format/egl error" << frame->format << eglDisplay;
+ return nullptr;
+ }
+
+ if (!frame->hw_frames_ctx)
+ return nullptr;
+
+ auto *fCtx = (AVHWFramesContext *)frame->hw_frames_ctx->data;
+ auto *ctx = fCtx->device_ctx;
+ if (!ctx)
+ return nullptr;
+
+ auto *vaCtx = (AVVAAPIDeviceContext *)ctx->hwctx;
+ auto vaDisplay = vaCtx->display;
+ if (!vaDisplay) {
+ qCDebug(qLHWAccelVAAPI) << " no VADisplay, disabling";
+ return nullptr;
+ }
+
+ VASurfaceID vaSurface = (uintptr_t)frame->data[3];
+
+ VADRMPRIMESurfaceDescriptor prime = {};
+ if (vaExportSurfaceHandle(vaDisplay, vaSurface,
+ VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2,
+ VA_EXPORT_SURFACE_READ_ONLY |
+#ifdef VA_EXPORT_USE_LAYERS
+ VA_EXPORT_SURFACE_SEPARATE_LAYERS,
+#else
+ VA_EXPORT_SURFACE_COMPOSED_LAYERS,
+#endif
+ &prime) != VA_STATUS_SUCCESS)
+ {
+ qWarning() << "vaExportSurfaceHandle failed";
+ return nullptr;
+ }
+
+ // Make sure all fd's in 'prime' are closed when we return from this function
+ QScopeGuard closeObjectsGuard([&prime]() {
+ for (uint32_t i = 0; i < prime.num_objects; ++i)
+ close(prime.objects[i].fd);
+ });
+
+ // ### Check that prime.fourcc is what we expect
+ vaSyncSurface(vaDisplay, vaSurface);
+
+// qCDebug(qLHWAccelVAAPI) << "VAAPIAccel: vaSufraceDesc: width/height" << prime.width << prime.height << "num objects"
+// << prime.num_objects << "num layers" << prime.num_layers;
+
+ QOpenGLFunctions functions(glContext);
+
+ AVPixelFormat fmt = HWAccel::format(frame);
+ bool needsConversion;
+ auto qtFormat = QFFmpegVideoBuffer::toQtPixelFormat(fmt, &needsConversion);
+ auto *drm_formats = fourccFromPixelFormat(qtFormat);
+ if (!drm_formats || needsConversion) {
+ qWarning() << "can't use DMA transfer for pixel format" << fmt << qtFormat;
+ return nullptr;
+ }
+
+ auto *desc = QVideoTextureHelper::textureDescription(qtFormat);
+ int nPlanes = 0;
+ for (; nPlanes < 5; ++nPlanes) {
+ if (drm_formats[nPlanes] == 0)
+ break;
+ }
+ Q_ASSERT(nPlanes == desc->nplanes);
+ nPlanes = desc->nplanes;
+// qCDebug(qLHWAccelVAAPI) << "VAAPIAccel: nPlanes" << nPlanes;
+
+ rhi->makeThreadLocalNativeContextCurrent();
+
+ EGLImage images[4];
+ GLuint glTextures[4] = {};
+ functions.glGenTextures(nPlanes, glTextures);
+ for (int i = 0; i < nPlanes; ++i) {
+#ifdef VA_EXPORT_USE_LAYERS
+#define LAYER i
+#define PLANE 0
+ if (prime.layers[i].drm_format != drm_formats[i]) {
+ qWarning() << "expected DRM format check failed expected"
+ << Qt::hex << drm_formats[i] << "got" << prime.layers[i].drm_format;
+ }
+#else
+#define LAYER 0
+#define PLANE i
+#endif
+
+ EGLAttrib img_attr[] = {
+ EGL_LINUX_DRM_FOURCC_EXT, (EGLint)drm_formats[i],
+ EGL_WIDTH, desc->widthForPlane(frame->width, i),
+ EGL_HEIGHT, desc->heightForPlane(frame->height, i),
+ EGL_DMA_BUF_PLANE0_FD_EXT, prime.objects[prime.layers[LAYER].object_index[PLANE]].fd,
+ EGL_DMA_BUF_PLANE0_OFFSET_EXT, (EGLint)prime.layers[LAYER].offset[PLANE],
+ EGL_DMA_BUF_PLANE0_PITCH_EXT, (EGLint)prime.layers[LAYER].pitch[PLANE],
+ EGL_NONE
+ };
+ images[i] = eglCreateImage(eglDisplay, EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, nullptr, img_attr);
+ if (!images[i]) {
+ const GLenum error = eglGetError();
+ if (error == EGL_BAD_MATCH) {
+ qWarning() << "eglCreateImage failed for plane" << i << "with error code EGL_BAD_MATCH, "
+ "disabling hardware acceleration. This could indicate an EGL implementation issue."
+ "\nVAAPI driver: " << vaQueryVendorString(vaDisplay)
+ << "\nEGL vendor:" << eglQueryString(eglDisplay, EGL_VENDOR);
+ this->rhi = nullptr; // Disabling texture conversion here to fix QTBUG-112312
+ return nullptr;
+ }
+ if (error) {
+ qWarning() << "eglCreateImage failed for plane" << i << "with error code" << error;
+ return nullptr;
+ }
+ }
+ functions.glActiveTexture(GL_TEXTURE0 + i);
+ functions.glBindTexture(GL_TEXTURE_2D, glTextures[i]);
+
+ PFNGLEGLIMAGETARGETTEXTURE2DOESPROC eglImageTargetTexture2D = (PFNGLEGLIMAGETARGETTEXTURE2DOESPROC)this->eglImageTargetTexture2D;
+ eglImageTargetTexture2D(GL_TEXTURE_2D, images[i]);
+ GLenum error = glGetError();
+ if (error)
+ qWarning() << "eglImageTargetTexture2D failed with error code" << error;
+ }
+
+ for (int i = 0; i < nPlanes; ++i) {
+ functions.glActiveTexture(GL_TEXTURE0 + i);
+ functions.glBindTexture(GL_TEXTURE_2D, 0);
+ eglDestroyImage(eglDisplay, images[i]);
+ }
+
+ VAAPITextureSet *textureSet = new VAAPITextureSet;
+ textureSet->nPlanes = nPlanes;
+ textureSet->rhi = rhi;
+ textureSet->glContext = glContext;
+
+ for (int i = 0; i < 4; ++i)
+ textureSet->textures[i] = glTextures[i];
+// qCDebug(qLHWAccelVAAPI) << "VAAPIAccel: got textures" << textures[0] << textures[1] << textures[2] << textures[3];
+
+ return textureSet;
+}
+
+VAAPITextureSet::~VAAPITextureSet()
+{
+ if (rhi) {
+ rhi->makeThreadLocalNativeContextCurrent();
+ QOpenGLFunctions functions(glContext);
+ functions.glDeleteTextures(nPlanes, textures);
+ }
+}
+
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi_p.h b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi_p.h
new file mode 100644
index 000000000..03084cc72
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_vaapi_p.h
@@ -0,0 +1,48 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGHWACCEL_VAAPI_P_H
+#define QFFMPEGHWACCEL_VAAPI_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qffmpeghwaccel_p.h"
+
+#if QT_CONFIG(vaapi)
+
+#include <qshareddata.h>
+
+QT_BEGIN_NAMESPACE
+
+class QRhi;
+class QOpenGLContext;
+
+namespace QFFmpeg {
+
+class VAAPITextureConverter : public TextureConverterBackend
+{
+public:
+ VAAPITextureConverter(QRhi *rhi);
+ ~VAAPITextureConverter();
+
+ TextureSet *getTextures(AVFrame *frame) override;
+
+ Qt::HANDLE eglDisplay = nullptr;
+ QOpenGLContext *glContext = nullptr;
+ QFunctionPointer eglImageTargetTexture2D = nullptr;
+};
+}
+
+QT_END_NAMESPACE
+
+#endif
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_videotoolbox.mm b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_videotoolbox.mm
new file mode 100644
index 000000000..948f7fc23
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_videotoolbox.mm
@@ -0,0 +1,288 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpeghwaccel_videotoolbox_p.h"
+
+#if !defined(Q_OS_DARWIN)
+#error "Configuration error"
+#endif
+
+#include <qvideoframeformat.h>
+#include <qffmpegvideobuffer_p.h>
+#include <qloggingcategory.h>
+#include "private/qvideotexturehelper_p.h"
+
+#include <rhi/qrhi.h>
+
+#include <CoreVideo/CVMetalTexture.h>
+#include <CoreVideo/CVMetalTextureCache.h>
+
+#include <qopenglcontext.h>
+#ifdef Q_OS_MACOS
+#import <AppKit/AppKit.h>
+#endif
+#ifdef Q_OS_IOS
+#import <OpenGLES/EAGL.h>
+#endif
+#import <Metal/Metal.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcVideotoolbox, "qt.multimedia.ffmpeg.videotoolbox")
+
+namespace QFFmpeg
+{
+
+static CVMetalTextureCacheRef &mtc(void *&cache) { return reinterpret_cast<CVMetalTextureCacheRef &>(cache); }
+
+class VideoToolBoxTextureSet : public TextureSet
+{
+public:
+ ~VideoToolBoxTextureSet();
+ qint64 textureHandle(QRhi *, int plane) override;
+
+ QRhi *rhi = nullptr;
+ CVMetalTextureRef cvMetalTexture[3] = {};
+
+#if defined(Q_OS_MACOS)
+ CVOpenGLTextureRef cvOpenGLTexture = nullptr;
+#elif defined(Q_OS_IOS)
+ CVOpenGLESTextureRef cvOpenGLESTexture = nullptr;
+#endif
+
+ CVImageBufferRef m_buffer = nullptr;
+};
+
+VideoToolBoxTextureConverter::VideoToolBoxTextureConverter(QRhi *rhi)
+ : TextureConverterBackend(rhi)
+{
+ if (!rhi)
+ return;
+
+ if (rhi->backend() == QRhi::Metal) {
+ const auto *metal = static_cast<const QRhiMetalNativeHandles *>(rhi->nativeHandles());
+
+ // Create a Metal Core Video texture cache from the pixel buffer.
+ Q_ASSERT(!cvMetalTextureCache);
+ if (CVMetalTextureCacheCreate(
+ kCFAllocatorDefault,
+ nil,
+ (id<MTLDevice>)metal->dev,
+ nil,
+ &mtc(cvMetalTextureCache)) != kCVReturnSuccess) {
+ qWarning() << "Metal texture cache creation failed";
+ rhi = nullptr;
+ }
+ } else if (rhi->backend() == QRhi::OpenGLES2) {
+#if QT_CONFIG(opengl)
+#ifdef Q_OS_MACOS
+ const auto *gl = static_cast<const QRhiGles2NativeHandles *>(rhi->nativeHandles());
+
+ auto nsGLContext = gl->context->nativeInterface<QNativeInterface::QCocoaGLContext>()->nativeContext();
+ auto nsGLPixelFormat = nsGLContext.pixelFormat.CGLPixelFormatObj;
+
+ // Create an OpenGL CoreVideo texture cache from the pixel buffer.
+ if (CVOpenGLTextureCacheCreate(
+ kCFAllocatorDefault,
+ nullptr,
+ reinterpret_cast<CGLContextObj>(nsGLContext.CGLContextObj),
+ nsGLPixelFormat,
+ nil,
+ &cvOpenGLTextureCache)) {
+ qWarning() << "OpenGL texture cache creation failed";
+ rhi = nullptr;
+ }
+#endif
+#ifdef Q_OS_IOS
+ // Create an OpenGL CoreVideo texture cache from the pixel buffer.
+ if (CVOpenGLESTextureCacheCreate(
+ kCFAllocatorDefault,
+ nullptr,
+ [EAGLContext currentContext],
+ nullptr,
+ &cvOpenGLESTextureCache)) {
+ qWarning() << "OpenGL texture cache creation failed";
+ rhi = nullptr;
+ }
+#endif
+#else
+ rhi = nullptr;
+#endif // QT_CONFIG(opengl)
+ }
+}
+
+VideoToolBoxTextureConverter::~VideoToolBoxTextureConverter()
+{
+ freeTextureCaches();
+}
+
+void VideoToolBoxTextureConverter::freeTextureCaches()
+{
+ if (cvMetalTextureCache)
+ CFRelease(cvMetalTextureCache);
+ cvMetalTextureCache = nullptr;
+#if defined(Q_OS_MACOS)
+ if (cvOpenGLTextureCache)
+ CFRelease(cvOpenGLTextureCache);
+ cvOpenGLTextureCache = nullptr;
+#elif defined(Q_OS_IOS)
+ if (cvOpenGLESTextureCache)
+ CFRelease(cvOpenGLESTextureCache);
+ cvOpenGLESTextureCache = nullptr;
+#endif
+}
+
+static MTLPixelFormat rhiTextureFormatToMetalFormat(QRhiTexture::Format f)
+{
+ switch (f) {
+ default:
+ case QRhiTexture::UnknownFormat:
+ return MTLPixelFormatInvalid;
+ case QRhiTexture::RGBA8:
+ return MTLPixelFormatRGBA8Unorm;
+ case QRhiTexture::BGRA8:
+ return MTLPixelFormatBGRA8Unorm;
+ case QRhiTexture::R8:
+ return MTLPixelFormatR8Unorm;
+ case QRhiTexture::RG8:
+ return MTLPixelFormatRG8Unorm;
+ case QRhiTexture::R16:
+ return MTLPixelFormatR16Unorm;
+ case QRhiTexture::RG16:
+ return MTLPixelFormatRG16Unorm;
+
+ case QRhiTexture::RGBA16F:
+ return MTLPixelFormatRGBA16Float;
+ case QRhiTexture::RGBA32F:
+ return MTLPixelFormatRGBA32Float;
+ case QRhiTexture::R16F:
+ return MTLPixelFormatR16Float;
+ case QRhiTexture::R32F:
+ return MTLPixelFormatR32Float;
+ }
+}
+
+TextureSet *VideoToolBoxTextureConverter::getTextures(AVFrame *frame)
+{
+ if (!rhi)
+ return nullptr;
+
+ bool needsConversion = false;
+ QVideoFrameFormat::PixelFormat pixelFormat = QFFmpegVideoBuffer::toQtPixelFormat(HWAccel::format(frame), &needsConversion);
+ if (needsConversion) {
+ // qDebug() << "XXXXXXXXXXXX pixel format needs conversion" << pixelFormat << HWAccel::format(frame);
+ return nullptr;
+ }
+
+ CVPixelBufferRef buffer = (CVPixelBufferRef)frame->data[3];
+
+ auto textureSet = std::make_unique<VideoToolBoxTextureSet>();
+ textureSet->m_buffer = buffer;
+ textureSet->rhi = rhi;
+ CVPixelBufferRetain(buffer);
+
+ auto *textureDescription = QVideoTextureHelper::textureDescription(pixelFormat);
+ int bufferPlanes = CVPixelBufferGetPlaneCount(buffer);
+// qDebug() << "XXXXX getTextures" << pixelFormat << bufferPlanes << buffer;
+
+ if (rhi->backend() == QRhi::Metal) {
+ for (int plane = 0; plane < bufferPlanes; ++plane) {
+ size_t width = CVPixelBufferGetWidth(buffer);
+ size_t height = CVPixelBufferGetHeight(buffer);
+ width = textureDescription->widthForPlane(width, plane);
+ height = textureDescription->heightForPlane(height, plane);
+
+ // Create a CoreVideo pixel buffer backed Metal texture image from the texture cache.
+ auto ret = CVMetalTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault,
+ mtc(cvMetalTextureCache),
+ buffer, nil,
+ rhiTextureFormatToMetalFormat(textureDescription->textureFormat[plane]),
+ width, height,
+ plane,
+ &textureSet->cvMetalTexture[plane]);
+
+ if (ret != kCVReturnSuccess)
+ qWarning() << "texture creation failed" << ret;
+// auto t = CVMetalTextureGetTexture(textureSet->cvMetalTexture[plane]);
+// qDebug() << " metal texture for plane" << plane << "is" << quint64(textureSet->cvMetalTexture[plane]) << width << height;
+// qDebug() << " " << t.iosurfacePlane << t.pixelFormat << t.width << t.height;
+ }
+ } else if (rhi->backend() == QRhi::OpenGLES2) {
+#if QT_CONFIG(opengl)
+#ifdef Q_OS_MACOS
+ CVOpenGLTextureCacheFlush(cvOpenGLTextureCache, 0);
+ // Create a CVPixelBuffer-backed OpenGL texture image from the texture cache.
+ const CVReturn cvret = CVOpenGLTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault,
+ cvOpenGLTextureCache,
+ buffer,
+ nil,
+ &textureSet->cvOpenGLTexture);
+ if (cvret != kCVReturnSuccess) {
+ qCWarning(qLcVideotoolbox) << "OpenGL texture creation failed" << cvret;
+ return nullptr;
+ }
+
+ Q_ASSERT(CVOpenGLTextureGetTarget(textureSet->cvOpenGLTexture) == GL_TEXTURE_RECTANGLE);
+#endif
+#ifdef Q_OS_IOS
+ CVOpenGLESTextureCacheFlush(cvOpenGLESTextureCache, 0);
+ // Create a CVPixelBuffer-backed OpenGL texture image from the texture cache.
+ const CVReturn cvret = CVOpenGLESTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault,
+ cvOpenGLESTextureCache,
+ buffer,
+ nil,
+ GL_TEXTURE_2D,
+ GL_RGBA,
+ CVPixelBufferGetWidth(buffer),
+ CVPixelBufferGetHeight(buffer),
+ GL_RGBA,
+ GL_UNSIGNED_BYTE,
+ 0,
+ &textureSet->cvOpenGLESTexture);
+ if (cvret != kCVReturnSuccess) {
+ qCWarning(qLcVideotoolbox) << "OpenGL ES texture creation failed" << cvret;
+ return nullptr;
+ }
+#endif
+#endif
+ }
+
+ return textureSet.release();
+}
+
+VideoToolBoxTextureSet::~VideoToolBoxTextureSet()
+{
+ for (int i = 0; i < 4; ++i)
+ if (cvMetalTexture[i])
+ CFRelease(cvMetalTexture[i]);
+#if defined(Q_OS_MACOS)
+ if (cvOpenGLTexture)
+ CVOpenGLTextureRelease(cvOpenGLTexture);
+#elif defined(Q_OS_IOS)
+ if (cvOpenGLESTexture)
+ CFRelease(cvOpenGLESTexture);
+#endif
+ CVPixelBufferRelease(m_buffer);
+}
+
+qint64 VideoToolBoxTextureSet::textureHandle(QRhi *, int plane)
+{
+ if (rhi->backend() == QRhi::Metal)
+ return cvMetalTexture[plane] ? qint64(CVMetalTextureGetTexture(cvMetalTexture[plane])) : 0;
+#if QT_CONFIG(opengl)
+ Q_ASSERT(plane == 0);
+#ifdef Q_OS_MACOS
+ return CVOpenGLTextureGetName(cvOpenGLTexture);
+#endif
+#ifdef Q_OS_IOS
+ return CVOpenGLESTextureGetName(cvOpenGLESTexture);
+#endif
+#endif
+}
+
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_videotoolbox_p.h b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_videotoolbox_p.h
new file mode 100644
index 000000000..44fa32dd2
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_videotoolbox_p.h
@@ -0,0 +1,63 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGHWACCEL_VIDEOTOOLBOX_P_H
+#define QFFMPEGHWACCEL_VIDEOTOOLBOX_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qffmpeghwaccel_p.h"
+
+#ifdef Q_OS_DARWIN
+
+#include <CoreVideo/CVBase.h>
+#include <CoreVideo/CVPixelBuffer.h>
+#include <CoreVideo/CVImageBuffer.h>
+
+#include <CoreVideo/CVMetalTexture.h>
+#if defined(Q_OS_MACOS)
+#include <CoreVideo/CVOpenGLTextureCache.h>
+#elif defined(Q_OS_IOS)
+#include <CoreVideo/CVOpenGLESTextureCache.h>
+#endif
+
+QT_BEGIN_NAMESPACE
+
+class QRhi;
+
+namespace QFFmpeg {
+
+class VideoToolBoxTextureConverter : public TextureConverterBackend
+{
+public:
+ VideoToolBoxTextureConverter(QRhi *rhi);
+ ~VideoToolBoxTextureConverter();
+ TextureSet *getTextures(AVFrame *frame) override;
+
+private:
+ void freeTextureCaches();
+
+ // can not forward declare that type from C++ :/
+ void *cvMetalTextureCache = nullptr;
+#if defined(Q_OS_MACOS)
+ CVOpenGLTextureCacheRef cvOpenGLTextureCache = nullptr;
+#elif defined(Q_OS_IOS)
+ CVOpenGLESTextureCacheRef cvOpenGLESTextureCache = nullptr;
+#endif
+};
+
+}
+
+QT_END_NAMESPACE
+
+#endif
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegimagecapture.cpp b/src/plugins/multimedia/ffmpeg/qffmpegimagecapture.cpp
new file mode 100644
index 000000000..2fb878784
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegimagecapture.cpp
@@ -0,0 +1,271 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegimagecapture_p.h"
+#include <private/qplatformmediaformatinfo_p.h>
+#include <private/qplatformcamera_p.h>
+#include <private/qplatformimagecapture_p.h>
+#include <qvideoframeformat.h>
+#include <private/qmediastoragelocation_p.h>
+#include <qimagewriter.h>
+
+#include <QtCore/QDebug>
+#include <QtCore/QDir>
+#include <qstandardpaths.h>
+
+#include <qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+// Probably, might be increased. To be investigated and tested on Android implementation
+static constexpr int MaxPendingImagesCount = 1;
+
+static Q_LOGGING_CATEGORY(qLcImageCapture, "qt.multimedia.imageCapture")
+
+QFFmpegImageCapture::QFFmpegImageCapture(QImageCapture *parent)
+ : QPlatformImageCapture(parent)
+{
+ qRegisterMetaType<QVideoFrame>();
+}
+
+QFFmpegImageCapture::~QFFmpegImageCapture()
+{
+}
+
+bool QFFmpegImageCapture::isReadyForCapture() const
+{
+ return m_isReadyForCapture;
+}
+
+static const char *extensionForFormat(QImageCapture::FileFormat format)
+{
+ const char *fmt = "jpg";
+ switch (format) {
+ case QImageCapture::UnspecifiedFormat:
+ case QImageCapture::JPEG:
+ fmt = "jpg";
+ break;
+ case QImageCapture::PNG:
+ fmt = "png";
+ break;
+ case QImageCapture::WebP:
+ fmt = "webp";
+ break;
+ case QImageCapture::Tiff:
+ fmt = "tiff";
+ break;
+ }
+ return fmt;
+}
+
+int QFFmpegImageCapture::capture(const QString &fileName)
+{
+ QString path = QMediaStorageLocation::generateFileName(fileName, QStandardPaths::PicturesLocation, QLatin1String(extensionForFormat(m_settings.format())));
+ return doCapture(path);
+}
+
+int QFFmpegImageCapture::captureToBuffer()
+{
+ return doCapture(QString());
+}
+
+int QFFmpegImageCapture::doCapture(const QString &fileName)
+{
+ qCDebug(qLcImageCapture) << "do capture";
+ if (!m_session) {
+ //emit error in the next event loop,
+ //so application can associate it with returned request id.
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, -1),
+ Q_ARG(int, QImageCapture::ResourceError),
+ Q_ARG(QString, QPlatformImageCapture::msgImageCaptureNotSet()));
+
+ qCDebug(qLcImageCapture) << "error 1";
+ return -1;
+ }
+ if (!m_videoSource) {
+ //emit error in the next event loop,
+ //so application can associate it with returned request id.
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, -1),
+ Q_ARG(int, QImageCapture::ResourceError),
+ Q_ARG(QString,tr("No camera available.")));
+
+ qCDebug(qLcImageCapture) << "error 2";
+ return -1;
+ }
+ if (m_pendingImages.size() >= MaxPendingImagesCount) {
+ //emit error in the next event loop,
+ //so application can associate it with returned request id.
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, -1),
+ Q_ARG(int, QImageCapture::NotReadyError),
+ Q_ARG(QString, QPlatformImageCapture::msgCameraNotReady()));
+
+ qCDebug(qLcImageCapture) << "error 3";
+ return -1;
+ }
+
+ m_lastId++;
+
+ m_pendingImages.enqueue({ m_lastId, fileName, QMediaMetaData{} });
+ updateReadyForCapture();
+
+ return m_lastId;
+}
+
+void QFFmpegImageCapture::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ auto *captureSession = static_cast<QFFmpegMediaCaptureSession *>(session);
+ if (m_session == captureSession)
+ return;
+
+ if (m_session) {
+ m_session->disconnect(this);
+ m_lastId = 0;
+ m_pendingImages.clear();
+ }
+
+ m_session = captureSession;
+
+ if (m_session)
+ connect(m_session, &QFFmpegMediaCaptureSession::primaryActiveVideoSourceChanged, this,
+ &QFFmpegImageCapture::onVideoSourceChanged);
+
+ onVideoSourceChanged();
+}
+
+void QFFmpegImageCapture::updateReadyForCapture()
+{
+ const bool ready = m_session && m_pendingImages.size() < MaxPendingImagesCount && m_videoSource
+ && m_videoSource->isActive();
+
+ qCDebug(qLcImageCapture) << "updateReadyForCapture" << ready;
+
+ if (std::exchange(m_isReadyForCapture, ready) != ready)
+ emit readyForCaptureChanged(ready);
+}
+
+void QFFmpegImageCapture::newVideoFrame(const QVideoFrame &frame)
+{
+ if (m_pendingImages.empty())
+ return;
+
+ auto pending = m_pendingImages.dequeue();
+
+ qCDebug(qLcImageCapture) << "Taking image" << pending.id;
+
+ emit imageExposed(pending.id);
+ // ### Add metadata from the AVFrame
+ emit imageMetadataAvailable(pending.id, pending.metaData);
+ emit imageAvailable(pending.id, frame);
+ QImage image = frame.toImage();
+ if (m_settings.resolution().isValid() && m_settings.resolution() != image.size())
+ image = image.scaled(m_settings.resolution());
+
+ emit imageCaptured(pending.id, image);
+ if (!pending.filename.isEmpty()) {
+ const char *fmt = nullptr;
+ switch (m_settings.format()) {
+ case QImageCapture::UnspecifiedFormat:
+ case QImageCapture::JPEG:
+ fmt = "jpeg";
+ break;
+ case QImageCapture::PNG:
+ fmt = "png";
+ break;
+ case QImageCapture::WebP:
+ fmt = "webp";
+ break;
+ case QImageCapture::Tiff:
+ fmt = "tiff";
+ break;
+ }
+ int quality = -1;
+ switch (m_settings.quality()) {
+ case QImageCapture::VeryLowQuality:
+ quality = 25;
+ break;
+ case QImageCapture::LowQuality:
+ quality = 50;
+ break;
+ case QImageCapture::NormalQuality:
+ break;
+ case QImageCapture::HighQuality:
+ quality = 75;
+ break;
+ case QImageCapture::VeryHighQuality:
+ quality = 99;
+ break;
+ }
+
+ QImageWriter writer(pending.filename, fmt);
+ writer.setQuality(quality);
+
+ if (writer.write(image)) {
+ emit imageSaved(pending.id, pending.filename);
+ } else {
+ QImageCapture::Error err = QImageCapture::ResourceError;
+ if (writer.error() == QImageWriter::UnsupportedFormatError)
+ err = QImageCapture::FormatError;
+ emit error(pending.id, err, writer.errorString());
+ }
+ }
+
+ updateReadyForCapture();
+}
+
+void QFFmpegImageCapture::setupVideoSourceConnections()
+{
+ connect(m_videoSource, &QPlatformCamera::newVideoFrame, this,
+ &QFFmpegImageCapture::newVideoFrame);
+}
+
+QPlatformVideoSource *QFFmpegImageCapture::videoSource() const
+{
+ return m_videoSource;
+}
+
+void QFFmpegImageCapture::onVideoSourceChanged()
+{
+ if (m_videoSource)
+ m_videoSource->disconnect(this);
+
+ m_videoSource = m_session ? m_session->primaryActiveVideoSource() : nullptr;
+
+ // TODO: optimize, setup the connection only when the capture is ready
+ if (m_videoSource)
+ setupVideoSourceConnections();
+
+ updateReadyForCapture();
+}
+
+QImageEncoderSettings QFFmpegImageCapture::imageSettings() const
+{
+ return m_settings;
+}
+
+void QFFmpegImageCapture::setImageSettings(const QImageEncoderSettings &settings)
+{
+ auto s = settings;
+ const auto supportedFormats = QPlatformMediaIntegration::instance()->formatInfo()->imageFormats;
+ if (supportedFormats.isEmpty()) {
+ emit error(-1, QImageCapture::FormatError, "No image formats supported, can't capture.");
+ return;
+ }
+ if (s.format() == QImageCapture::UnspecifiedFormat) {
+ auto f = QImageCapture::JPEG;
+ if (!supportedFormats.contains(f))
+ f = supportedFormats.first();
+ s.setFormat(f);
+ } else if (!supportedFormats.contains(settings.format())) {
+ emit error(-1, QImageCapture::FormatError, "Image format not supported.");
+ return;
+ }
+
+ m_settings = settings;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegimagecapture_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegimagecapture_p.h b/src/plugins/multimedia/ffmpeg/qffmpegimagecapture_p.h
new file mode 100644
index 000000000..d8174ae05
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegimagecapture_p.h
@@ -0,0 +1,71 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+
+#ifndef QFFMPEGIMAGECAPTURE_H
+#define QFFMPEGIMAGECAPTURE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformimagecapture_p.h>
+#include "qffmpegmediacapturesession_p.h"
+
+#include <QtCore/qpointer.h>
+#include <qqueue.h>
+
+QT_BEGIN_NAMESPACE
+
+class QFFmpegImageCapture : public QPlatformImageCapture
+{
+ Q_OBJECT
+public:
+ QFFmpegImageCapture(QImageCapture *parent);
+ virtual ~QFFmpegImageCapture();
+
+ bool isReadyForCapture() const override;
+ int capture(const QString &fileName) override;
+ int captureToBuffer() override;
+
+ QImageEncoderSettings imageSettings() const override;
+ void setImageSettings(const QImageEncoderSettings &settings) override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+protected:
+ virtual int doCapture(const QString &fileName);
+ virtual void setupVideoSourceConnections();
+ QPlatformVideoSource *videoSource() const;
+ void updateReadyForCapture();
+
+protected Q_SLOTS:
+ void newVideoFrame(const QVideoFrame &frame);
+ void onVideoSourceChanged();
+
+private:
+ QFFmpegMediaCaptureSession *m_session = nullptr;
+ QPointer<QPlatformVideoSource> m_videoSource;
+ int m_lastId = 0;
+ QImageEncoderSettings m_settings;
+
+ struct PendingImage {
+ int id;
+ QString filename;
+ QMediaMetaData metaData;
+ };
+
+ QQueue<PendingImage> m_pendingImages;
+ bool m_isReadyForCapture = false;
+};
+
+QT_END_NAMESPACE
+
+#endif // QGSTREAMERCAPTURECORNTROL_H
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegioutils.cpp b/src/plugins/multimedia/ffmpeg/qffmpegioutils.cpp
new file mode 100644
index 000000000..cbef88f2b
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegioutils.cpp
@@ -0,0 +1,55 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegioutils_p.h"
+#include "qiodevice.h"
+#include "qffmpegdefs_p.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+int readQIODevice(void *opaque, uint8_t *buf, int buf_size)
+{
+ auto *dev = static_cast<QIODevice *>(opaque);
+ Q_ASSERT(dev);
+
+ if (dev->atEnd())
+ return AVERROR_EOF;
+ return dev->read(reinterpret_cast<char *>(buf), buf_size);
+}
+
+int writeQIODevice(void *opaque, AvioWriteBufferType buf, int buf_size)
+{
+ auto dev = static_cast<QIODevice *>(opaque);
+ Q_ASSERT(dev);
+
+ return dev->write(reinterpret_cast<const char *>(buf), buf_size);
+}
+
+int64_t seekQIODevice(void *opaque, int64_t offset, int whence)
+{
+ QIODevice *dev = static_cast<QIODevice *>(opaque);
+ Q_ASSERT(dev);
+
+ if (dev->isSequential())
+ return AVERROR(EINVAL);
+
+ if (whence & AVSEEK_SIZE)
+ return dev->size();
+
+ whence &= ~AVSEEK_FORCE;
+
+ if (whence == SEEK_CUR)
+ offset += dev->pos();
+ else if (whence == SEEK_END)
+ offset += dev->size();
+
+ if (!dev->seek(offset))
+ return AVERROR(EINVAL);
+ return offset;
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegioutils_p.h b/src/plugins/multimedia/ffmpeg/qffmpegioutils_p.h
new file mode 100644
index 000000000..7f00990f6
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegioutils_p.h
@@ -0,0 +1,40 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGIOUTILS_P_H
+#define QFFMPEGIOUTILS_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qtmultimediaglobal.h"
+#include "qffmpegdefs_p.h"
+
+#include <type_traits>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+int readQIODevice(void *opaque, uint8_t *buf, int buf_size);
+
+using AvioWriteBufferType =
+ std::conditional_t<QT_FFMPEG_AVIO_WRITE_CONST, const uint8_t *, uint8_t *>;
+
+int writeQIODevice(void *opaque, AvioWriteBufferType buf, int buf_size);
+
+int64_t seekQIODevice(void *opaque, int64_t offset, int whence);
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGIOUTILS_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp
new file mode 100644
index 000000000..1b6db5813
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession.cpp
@@ -0,0 +1,318 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegmediacapturesession_p.h"
+
+#include "private/qplatformaudioinput_p.h"
+#include "private/qplatformaudiooutput_p.h"
+#include "private/qplatformsurfacecapture_p.h"
+#include "private/qplatformaudiobufferinput_p.h"
+#include "private/qplatformvideoframeinput_p.h"
+#include "private/qplatformcamera_p.h"
+
+#include "qffmpegimagecapture_p.h"
+#include "qffmpegmediarecorder_p.h"
+#include "qvideosink.h"
+#include "qffmpegaudioinput_p.h"
+#include "qaudiosink.h"
+#include "qaudiobuffer.h"
+#include "qaudiooutput.h"
+
+#include <qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcFFmpegMediaCaptureSession, "qt.multimedia.ffmpeg.mediacapturesession")
+
+static int preferredAudioSinkBufferSize(const QFFmpegAudioInput &input)
+{
+ // Heuristic params to avoid jittering
+ // TODO: investigate the reason of jittering and probably reduce the factor
+ constexpr int BufferSizeFactor = 2;
+ constexpr int BufferSizeExceeding = 4096;
+
+ return input.bufferSize() * BufferSizeFactor + BufferSizeExceeding;
+}
+
+QFFmpegMediaCaptureSession::QFFmpegMediaCaptureSession()
+{
+ connect(this, &QFFmpegMediaCaptureSession::primaryActiveVideoSourceChanged, this,
+ &QFFmpegMediaCaptureSession::updateVideoFrameConnection);
+}
+
+QFFmpegMediaCaptureSession::~QFFmpegMediaCaptureSession() = default;
+
+QPlatformCamera *QFFmpegMediaCaptureSession::camera()
+{
+ return m_camera;
+}
+
+void QFFmpegMediaCaptureSession::setCamera(QPlatformCamera *camera)
+{
+ if (setVideoSource(m_camera, camera))
+ emit cameraChanged();
+}
+
+QPlatformSurfaceCapture *QFFmpegMediaCaptureSession::screenCapture()
+{
+ return m_screenCapture;
+}
+
+void QFFmpegMediaCaptureSession::setScreenCapture(QPlatformSurfaceCapture *screenCapture)
+{
+ if (setVideoSource(m_screenCapture, screenCapture))
+ emit screenCaptureChanged();
+}
+
+QPlatformSurfaceCapture *QFFmpegMediaCaptureSession::windowCapture()
+{
+ return m_windowCapture;
+}
+
+void QFFmpegMediaCaptureSession::setWindowCapture(QPlatformSurfaceCapture *windowCapture)
+{
+ if (setVideoSource(m_windowCapture, windowCapture))
+ emit windowCaptureChanged();
+}
+
+QPlatformVideoFrameInput *QFFmpegMediaCaptureSession::videoFrameInput()
+{
+ return m_videoFrameInput;
+}
+
+void QFFmpegMediaCaptureSession::setVideoFrameInput(QPlatformVideoFrameInput *input)
+{
+ if (setVideoSource(m_videoFrameInput, input))
+ emit videoFrameInputChanged();
+}
+
+QPlatformImageCapture *QFFmpegMediaCaptureSession::imageCapture()
+{
+ return m_imageCapture;
+}
+
+void QFFmpegMediaCaptureSession::setImageCapture(QPlatformImageCapture *imageCapture)
+{
+ if (m_imageCapture == imageCapture)
+ return;
+
+ if (m_imageCapture)
+ m_imageCapture->setCaptureSession(nullptr);
+
+ m_imageCapture = static_cast<QFFmpegImageCapture *>(imageCapture);
+
+ if (m_imageCapture)
+ m_imageCapture->setCaptureSession(this);
+
+ emit imageCaptureChanged();
+}
+
+void QFFmpegMediaCaptureSession::setMediaRecorder(QPlatformMediaRecorder *recorder)
+{
+ auto *r = static_cast<QFFmpegMediaRecorder *>(recorder);
+ if (m_mediaRecorder == r)
+ return;
+
+ if (m_mediaRecorder)
+ m_mediaRecorder->setCaptureSession(nullptr);
+ m_mediaRecorder = r;
+ if (m_mediaRecorder)
+ m_mediaRecorder->setCaptureSession(this);
+
+ emit encoderChanged();
+}
+
+QPlatformMediaRecorder *QFFmpegMediaCaptureSession::mediaRecorder()
+{
+ return m_mediaRecorder;
+}
+
+void QFFmpegMediaCaptureSession::setAudioInput(QPlatformAudioInput *input)
+{
+ qCDebug(qLcFFmpegMediaCaptureSession)
+ << "set audio input:" << (input ? input->device.description() : "null");
+
+ auto ffmpegAudioInput = dynamic_cast<QFFmpegAudioInput *>(input);
+ Q_ASSERT(!!input == !!ffmpegAudioInput);
+
+ if (m_audioInput == ffmpegAudioInput)
+ return;
+
+ if (m_audioInput)
+ m_audioInput->q->disconnect(this);
+
+ m_audioInput = ffmpegAudioInput;
+ if (m_audioInput)
+ // TODO: implement the signal in QPlatformAudioInput and connect to it, QTBUG-112294
+ connect(m_audioInput->q, &QAudioInput::deviceChanged, this,
+ &QFFmpegMediaCaptureSession::updateAudioSink);
+
+ updateAudioSink();
+}
+
+void QFFmpegMediaCaptureSession::setAudioBufferInput(QPlatformAudioBufferInput *input)
+{
+ // TODO: implement binding to audio sink like setAudioInput does
+ m_audioBufferInput = input;
+}
+
+void QFFmpegMediaCaptureSession::updateAudioSink()
+{
+ if (m_audioSink) {
+ m_audioSink->reset();
+ m_audioSink.reset();
+ }
+
+ if (!m_audioInput || !m_audioOutput)
+ return;
+
+ auto format = m_audioInput->device.preferredFormat();
+
+ if (!m_audioOutput->device.isFormatSupported(format))
+ qWarning() << "Audio source format" << format << "is not compatible with the audio output";
+
+ m_audioSink = std::make_unique<QAudioSink>(m_audioOutput->device, format);
+
+ m_audioBufferSize = preferredAudioSinkBufferSize(*m_audioInput);
+ m_audioSink->setBufferSize(m_audioBufferSize);
+
+ qCDebug(qLcFFmpegMediaCaptureSession)
+ << "Create audiosink, format:" << format << "bufferSize:" << m_audioSink->bufferSize()
+ << "output device:" << m_audioOutput->device.description();
+
+ m_audioIODevice = m_audioSink->start();
+ if (m_audioIODevice) {
+ auto writeToDevice = [this](const QAudioBuffer &buffer) {
+ if (m_audioBufferSize < preferredAudioSinkBufferSize(*m_audioInput)) {
+ qCDebug(qLcFFmpegMediaCaptureSession)
+ << "Recreate audiosink due to small buffer size:" << m_audioBufferSize;
+
+ updateAudioSink();
+ }
+
+ const auto written =
+ m_audioIODevice->write(buffer.data<const char>(), buffer.byteCount());
+
+ if (written < buffer.byteCount())
+ qCWarning(qLcFFmpegMediaCaptureSession)
+ << "Not all bytes written:" << written << "vs" << buffer.byteCount();
+ };
+ connect(m_audioInput, &QFFmpegAudioInput::newAudioBuffer, m_audioSink.get(), writeToDevice);
+ } else {
+ qWarning() << "Failed to start audiosink push mode";
+ }
+
+ updateVolume();
+}
+
+void QFFmpegMediaCaptureSession::updateVolume()
+{
+ if (m_audioSink)
+ m_audioSink->setVolume(m_audioOutput->muted ? 0.f : m_audioOutput->volume);
+}
+
+QPlatformAudioInput *QFFmpegMediaCaptureSession::audioInput() const
+{
+ return m_audioInput;
+}
+
+void QFFmpegMediaCaptureSession::setVideoPreview(QVideoSink *sink)
+{
+ if (std::exchange(m_videoSink, sink) == sink)
+ return;
+
+ updateVideoFrameConnection();
+}
+
+void QFFmpegMediaCaptureSession::setAudioOutput(QPlatformAudioOutput *output)
+{
+ qCDebug(qLcFFmpegMediaCaptureSession)
+ << "set audio output:" << (output ? output->device.description() : "null");
+
+ if (m_audioOutput == output)
+ return;
+
+ if (m_audioOutput)
+ m_audioOutput->q->disconnect(this);
+
+ m_audioOutput = output;
+ if (m_audioOutput) {
+ // TODO: implement the signals in QPlatformAudioOutput and connect to them, QTBUG-112294
+ connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this,
+ &QFFmpegMediaCaptureSession::updateAudioSink);
+ connect(m_audioOutput->q, &QAudioOutput::volumeChanged, this,
+ &QFFmpegMediaCaptureSession::updateVolume);
+ connect(m_audioOutput->q, &QAudioOutput::mutedChanged, this,
+ &QFFmpegMediaCaptureSession::updateVolume);
+ }
+
+ updateAudioSink();
+}
+
+void QFFmpegMediaCaptureSession::updateVideoFrameConnection()
+{
+ disconnect(m_videoFrameConnection);
+
+ if (m_primaryActiveVideoSource && m_videoSink) {
+ // deliver frames directly to video sink;
+ // AutoConnection type might be a pessimization due to an extra queuing
+ // TODO: investigate and integrate direct connection
+ m_videoFrameConnection =
+ connect(m_primaryActiveVideoSource, &QPlatformVideoSource::newVideoFrame,
+ m_videoSink, &QVideoSink::setVideoFrame);
+ }
+}
+
+void QFFmpegMediaCaptureSession::updatePrimaryActiveVideoSource()
+{
+ auto sources = activeVideoSources();
+ auto source = sources.empty() ? nullptr : sources.front();
+ if (std::exchange(m_primaryActiveVideoSource, source) != source)
+ emit primaryActiveVideoSourceChanged();
+}
+
+template<typename VideoSource>
+bool QFFmpegMediaCaptureSession::setVideoSource(QPointer<VideoSource> &source,
+ VideoSource *newSource)
+{
+ if (source == newSource)
+ return false;
+
+ if (auto prevSource = std::exchange(source, newSource)) {
+ prevSource->setCaptureSession(nullptr);
+ prevSource->disconnect(this);
+ }
+
+ if (source) {
+ source->setCaptureSession(this);
+ connect(source, &QPlatformVideoSource::activeChanged, this,
+ &QFFmpegMediaCaptureSession::updatePrimaryActiveVideoSource);
+ connect(source, &QObject::destroyed, this,
+ &QFFmpegMediaCaptureSession::updatePrimaryActiveVideoSource, Qt::QueuedConnection);
+ }
+
+ updatePrimaryActiveVideoSource();
+
+ return true;
+}
+
+QPlatformVideoSource *QFFmpegMediaCaptureSession::primaryActiveVideoSource()
+{
+ return m_primaryActiveVideoSource;
+}
+
+std::vector<QPlatformAudioBufferInputBase *> QFFmpegMediaCaptureSession::activeAudioInputs() const
+{
+ std::vector<QPlatformAudioBufferInputBase *> result;
+ if (m_audioInput)
+ result.push_back(m_audioInput);
+
+ if (m_audioBufferInput)
+ result.push_back(m_audioBufferInput);
+
+ return result;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegmediacapturesession_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h
new file mode 100644
index 000000000..25340dad5
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediacapturesession_p.h
@@ -0,0 +1,112 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGMEDIACAPTURESESSION_H
+#define QFFMPEGMEDIACAPTURESESSION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediacapture_p.h>
+#include <private/qplatformmediaintegration_p.h>
+#include "qpointer.h"
+#include "qiodevice.h"
+
+QT_BEGIN_NAMESPACE
+
+class QFFmpegMediaRecorder;
+class QFFmpegImageCapture;
+class QVideoFrame;
+class QAudioSink;
+class QFFmpegAudioInput;
+class QAudioBuffer;
+class QPlatformVideoSource;
+class QPlatformAudioBufferInput;
+class QPlatformAudioBufferInputBase;
+
+class QFFmpegMediaCaptureSession : public QPlatformMediaCaptureSession
+{
+ Q_OBJECT
+
+public:
+ using VideoSources = std::vector<QPointer<QPlatformVideoSource>>;
+
+ QFFmpegMediaCaptureSession();
+ ~QFFmpegMediaCaptureSession() override;
+
+ QPlatformCamera *camera() override;
+ void setCamera(QPlatformCamera *camera) override;
+
+ QPlatformSurfaceCapture *screenCapture() override;
+ void setScreenCapture(QPlatformSurfaceCapture *) override;
+
+ QPlatformSurfaceCapture *windowCapture() override;
+ void setWindowCapture(QPlatformSurfaceCapture *) override;
+
+ QPlatformVideoFrameInput *videoFrameInput() override;
+ void setVideoFrameInput(QPlatformVideoFrameInput *) override;
+
+ QPlatformImageCapture *imageCapture() override;
+ void setImageCapture(QPlatformImageCapture *imageCapture) override;
+
+ QPlatformMediaRecorder *mediaRecorder() override;
+ void setMediaRecorder(QPlatformMediaRecorder *recorder) override;
+
+ void setAudioInput(QPlatformAudioInput *input) override;
+ QPlatformAudioInput *audioInput() const;
+
+ void setAudioBufferInput(QPlatformAudioBufferInput *input) override;
+
+ void setVideoPreview(QVideoSink *sink) override;
+ void setAudioOutput(QPlatformAudioOutput *output) override;
+
+ QPlatformVideoSource *primaryActiveVideoSource();
+
+ // it might be moved to the base class, but it needs QPlatformAudioInput
+ // to be QPlatformAudioBufferInputBase, which might not make sense
+ std::vector<QPlatformAudioBufferInputBase *> activeAudioInputs() const;
+
+private Q_SLOTS:
+ void updateAudioSink();
+ void updateVolume();
+ void updateVideoFrameConnection();
+ void updatePrimaryActiveVideoSource();
+
+Q_SIGNALS:
+ void primaryActiveVideoSourceChanged();
+
+private:
+ template<typename VideoSource>
+ bool setVideoSource(QPointer<VideoSource> &source, VideoSource *newSource);
+
+ QPointer<QPlatformCamera> m_camera;
+ QPointer<QPlatformSurfaceCapture> m_screenCapture;
+ QPointer<QPlatformSurfaceCapture> m_windowCapture;
+ QPointer<QPlatformVideoFrameInput> m_videoFrameInput;
+ QPointer<QPlatformVideoSource> m_primaryActiveVideoSource;
+
+ QPointer<QFFmpegAudioInput> m_audioInput;
+ QPointer<QPlatformAudioBufferInput> m_audioBufferInput;
+
+ QFFmpegImageCapture *m_imageCapture = nullptr;
+ QFFmpegMediaRecorder *m_mediaRecorder = nullptr;
+ QPlatformAudioOutput *m_audioOutput = nullptr;
+ QVideoSink *m_videoSink = nullptr;
+ std::unique_ptr<QAudioSink> m_audioSink;
+ QPointer<QIODevice> m_audioIODevice;
+ qsizetype m_audioBufferSize = 0;
+
+ QMetaObject::Connection m_videoFrameConnection;
+};
+
+QT_END_NAMESPACE
+
+#endif // QGSTREAMERCAPTURESERVICE_H
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo.cpp
new file mode 100644
index 000000000..6389b4eed
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo.cpp
@@ -0,0 +1,517 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegmediaformatinfo_p.h"
+#include "qaudioformat.h"
+#include "qimagewriter.h"
+
+#include <qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcMediaFormatInfo, "qt.multimedia.ffmpeg.mediaformatinfo")
+
+static struct {
+ AVCodecID id;
+ QMediaFormat::VideoCodec codec;
+} videoCodecMap [] = {
+ { AV_CODEC_ID_MPEG1VIDEO, QMediaFormat::VideoCodec::MPEG1 },
+ { AV_CODEC_ID_MPEG2VIDEO, QMediaFormat::VideoCodec::MPEG2 },
+ { AV_CODEC_ID_MPEG4, QMediaFormat::VideoCodec::MPEG4 },
+ { AV_CODEC_ID_H264, QMediaFormat::VideoCodec::H264 },
+ { AV_CODEC_ID_HEVC, QMediaFormat::VideoCodec::H265 },
+ { AV_CODEC_ID_VP8, QMediaFormat::VideoCodec::VP8 },
+ { AV_CODEC_ID_VP9, QMediaFormat::VideoCodec::VP9 },
+ { AV_CODEC_ID_AV1, QMediaFormat::VideoCodec::AV1 },
+ { AV_CODEC_ID_THEORA, QMediaFormat::VideoCodec::Theora },
+ { AV_CODEC_ID_WMV3, QMediaFormat::VideoCodec::WMV },
+ { AV_CODEC_ID_MJPEG, QMediaFormat::VideoCodec::MotionJPEG }
+};
+
+static AVCodecID codecId(QMediaFormat::VideoCodec codec)
+{
+ for (const auto &c : videoCodecMap) {
+ if (c.codec == codec)
+ return c.id;
+ }
+ return AV_CODEC_ID_NONE;
+}
+
+static struct {
+ AVCodecID id;
+ QMediaFormat::AudioCodec codec;
+} audioCodecMap [] = {
+ { AV_CODEC_ID_MP3, QMediaFormat::AudioCodec::MP3 },
+ { AV_CODEC_ID_AAC, QMediaFormat::AudioCodec::AAC },
+ { AV_CODEC_ID_AC3, QMediaFormat::AudioCodec::AC3 },
+ { AV_CODEC_ID_EAC3, QMediaFormat::AudioCodec::EAC3 },
+ { AV_CODEC_ID_FLAC, QMediaFormat::AudioCodec::FLAC },
+ { AV_CODEC_ID_TRUEHD, QMediaFormat::AudioCodec::DolbyTrueHD },
+ { AV_CODEC_ID_OPUS, QMediaFormat::AudioCodec::Opus },
+ { AV_CODEC_ID_VORBIS, QMediaFormat::AudioCodec::Vorbis },
+ { AV_CODEC_ID_PCM_S16LE, QMediaFormat::AudioCodec::Wave },
+ { AV_CODEC_ID_WMAPRO, QMediaFormat::AudioCodec::WMA },
+ { AV_CODEC_ID_ALAC, QMediaFormat::AudioCodec::ALAC }
+};
+
+static AVCodecID codecId(QMediaFormat::AudioCodec codec)
+{
+ for (const auto &c : audioCodecMap) {
+ if (c.codec == codec)
+ return c.id;
+ }
+ return AV_CODEC_ID_NONE;
+}
+
+// mimetypes are mostly copied from qmediaformat.cpp. Unfortunately, FFmpeg uses
+// in some cases slightly different mimetypes
+static const struct
+{
+ QMediaFormat::FileFormat fileFormat;
+ const char *mimeType;
+ const char *name; // disambiguate if we have several muxers/demuxers
+} map[QMediaFormat::LastFileFormat + 1] = {
+ { QMediaFormat::WMV, "video/x-ms-asf", "asf" },
+ { QMediaFormat::AVI, "video/x-msvideo", nullptr },
+ { QMediaFormat::Matroska, "video/x-matroska", nullptr },
+ { QMediaFormat::MPEG4, "video/mp4", "mp4" },
+ { QMediaFormat::Ogg, "video/ogg", nullptr },
+ // QuickTime is the same as MP4
+ { QMediaFormat::WebM, "video/webm", "webm" },
+ // Audio Formats
+ // Mpeg4Audio is the same as MP4 without the video codecs
+ { QMediaFormat::AAC, "audio/aac", nullptr },
+ // WMA is the same as WMV
+ { QMediaFormat::FLAC, "audio/x-flac", nullptr },
+ { QMediaFormat::MP3, "audio/mpeg", "mp3" },
+ { QMediaFormat::Wave, "audio/x-wav", nullptr },
+ { QMediaFormat::UnspecifiedFormat, nullptr, nullptr }
+};
+
+template <typename AVFormat>
+static QMediaFormat::FileFormat formatForAVFormat(AVFormat *format)
+{
+
+ if (!format->mime_type || !*format->mime_type)
+ return QMediaFormat::UnspecifiedFormat;
+
+ auto *m = map;
+ while (m->fileFormat != QMediaFormat::UnspecifiedFormat) {
+ if (m->mimeType && !strcmp(m->mimeType, format->mime_type)) {
+ // check if the name matches. This is used to disambiguate where FFmpeg provides
+ // multiple muxers or demuxers
+ if (!m->name || !strcmp(m->name, format->name))
+ return m->fileFormat;
+ }
+ ++m;
+ }
+
+ return QMediaFormat::UnspecifiedFormat;
+}
+
+static const AVOutputFormat *avFormatForFormat(QMediaFormat::FileFormat format)
+{
+ if (format == QMediaFormat::QuickTime || format == QMediaFormat::Mpeg4Audio)
+ format = QMediaFormat::MPEG4;
+ if (format == QMediaFormat::WMA)
+ format = QMediaFormat::WMV;
+
+ auto *m = map;
+ while (m->fileFormat != QMediaFormat::UnspecifiedFormat) {
+ if (m->fileFormat == format)
+ return av_guess_format(m->name, nullptr, m->mimeType);
+ ++m;
+ }
+
+ return nullptr;
+}
+
+
+QFFmpegMediaFormatInfo::QFFmpegMediaFormatInfo()
+{
+ qCDebug(qLcMediaFormatInfo) << ">>>> listing codecs";
+
+ QList<QMediaFormat::AudioCodec> audioEncoders;
+ QList<QMediaFormat::AudioCodec> extraAudioDecoders;
+ QList<QMediaFormat::VideoCodec> videoEncoders;
+ QList<QMediaFormat::VideoCodec> extraVideoDecoders;
+
+ const AVCodecDescriptor *descriptor = nullptr;
+ while ((descriptor = avcodec_descriptor_next(descriptor))) {
+ const bool canEncode = QFFmpeg::findAVEncoder(descriptor->id) != nullptr;
+ const bool canDecode = QFFmpeg::findAVDecoder(descriptor->id) != nullptr;
+ auto videoCodec = videoCodecForAVCodecId(descriptor->id);
+ auto audioCodec = audioCodecForAVCodecId(descriptor->id);
+ if (descriptor->type == AVMEDIA_TYPE_VIDEO && videoCodec != QMediaFormat::VideoCodec::Unspecified) {
+ if (canEncode) {
+ if (!videoEncoders.contains(videoCodec))
+ videoEncoders.append(videoCodec);
+ } else if (canDecode) {
+ if (!extraVideoDecoders.contains(videoCodec))
+ extraVideoDecoders.append(videoCodec);
+ }
+ } else if (descriptor->type == AVMEDIA_TYPE_AUDIO
+ && audioCodec != QMediaFormat::AudioCodec::Unspecified) {
+ if (canEncode) {
+ if (!audioEncoders.contains(audioCodec))
+ audioEncoders.append(audioCodec);
+ } else if (canDecode) {
+ if (!extraAudioDecoders.contains(audioCodec))
+ extraAudioDecoders.append(audioCodec);
+ }
+ }
+ }
+
+ // get demuxers
+// qCDebug(qLcMediaFormatInfo) << ">>>> Muxers";
+ void *opaque = nullptr;
+ const AVOutputFormat *outputFormat = nullptr;
+ while ((outputFormat = av_muxer_iterate(&opaque))) {
+ auto mediaFormat = formatForAVFormat(outputFormat);
+ if (mediaFormat == QMediaFormat::UnspecifiedFormat)
+ continue;
+// qCDebug(qLcMediaFormatInfo) << " mux:" << outputFormat->name << outputFormat->long_name << outputFormat->mime_type << outputFormat->extensions << mediaFormat;
+
+ CodecMap encoder;
+ encoder.format = mediaFormat;
+
+ for (auto codec : audioEncoders) {
+ auto id = codecId(codec);
+ // only add the codec if it can be used with this container
+ if (avformat_query_codec(outputFormat, id, FF_COMPLIANCE_NORMAL) == 1) {
+ // add codec for container
+// qCDebug(qLcMediaFormatInfo) << " " << codec << Qt::hex << av_codec_get_tag(outputFormat->codec_tag, id);
+ encoder.audio.append(codec);
+ }
+ }
+ for (auto codec : videoEncoders) {
+ auto id = codecId(codec);
+ // only add the codec if it can be used with this container
+ if (avformat_query_codec(outputFormat, id, FF_COMPLIANCE_NORMAL) == 1) {
+ // add codec for container
+// qCDebug(qLcMediaFormatInfo) << " " << codec << Qt::hex << av_codec_get_tag(outputFormat->codec_tag, id);
+ encoder.video.append(codec);
+ }
+ }
+
+ // sanity checks and handling special cases
+ if (encoder.audio.isEmpty() && encoder.video.isEmpty())
+ continue;
+ switch (encoder.format) {
+ case QMediaFormat::WMV:
+ // add WMA
+ encoders.append({ QMediaFormat::WMA, encoder.audio, {} });
+ break;
+ case QMediaFormat::MPEG4:
+ // add Mpeg4Audio and QuickTime
+ encoders.append({ QMediaFormat::QuickTime, encoder.audio, encoder.video });
+ encoders.append({ QMediaFormat::Mpeg4Audio, encoder.audio, {} });
+ break;
+ case QMediaFormat::Wave:
+ // FFmpeg allows other encoded formats in WAV containers, but we do not want that
+ if (!encoder.audio.contains(QMediaFormat::AudioCodec::Wave))
+ continue;
+ encoder.audio = { QMediaFormat::AudioCodec::Wave };
+ break;
+ default:
+ break;
+ }
+ encoders.append(encoder);
+ }
+
+ // FFmpeg doesn't allow querying supported codecs for decoders
+ // we take a simple approximation stating that we can decode what we
+ // can encode. That's a safe subset.
+ decoders = encoders;
+
+#ifdef Q_OS_WINDOWS
+ // MediaFoundation HVEC encoder fails when processing frames
+ for (auto &encoder : encoders) {
+ auto h265index = encoder.video.indexOf(QMediaFormat::VideoCodec::H265);
+ if (h265index >= 0)
+ encoder.video.removeAt(h265index);
+ }
+#endif
+
+// qCDebug(qLcMediaFormatInfo) << "extraDecoders:" << extraAudioDecoders << extraVideoDecoders;
+ // FFmpeg can currently only decode WMA and WMV, not encode
+ if (extraAudioDecoders.contains(QMediaFormat::AudioCodec::WMA)) {
+ decoders[QMediaFormat::WMA].audio.append(QMediaFormat::AudioCodec::WMA);
+ decoders[QMediaFormat::WMV].audio.append(QMediaFormat::AudioCodec::WMA);
+ }
+ if (extraVideoDecoders.contains(QMediaFormat::VideoCodec::WMV)) {
+ decoders[QMediaFormat::WMV].video.append(QMediaFormat::VideoCodec::WMV);
+ }
+
+ // Add image formats we support. We currently simply use Qt's built-in image write
+ // to save images. That doesn't give us HDR support or support for larger bit depths,
+ // but most cameras can currently not generate those anyway.
+ const auto imgFormats = QImageWriter::supportedImageFormats();
+ for (const auto &f : imgFormats) {
+ if (f == "png")
+ imageFormats.append(QImageCapture::PNG);
+ else if (f == "jpeg")
+ imageFormats.append(QImageCapture::JPEG);
+ else if (f == "tiff")
+ imageFormats.append(QImageCapture::Tiff);
+ else if (f == "webp")
+ imageFormats.append(QImageCapture::WebP);
+ }
+
+}
+
+QFFmpegMediaFormatInfo::~QFFmpegMediaFormatInfo() = default;
+
+QMediaFormat::AudioCodec QFFmpegMediaFormatInfo::audioCodecForAVCodecId(AVCodecID id)
+{
+ for (const auto &c : audioCodecMap) {
+ if (c.id == id)
+ return c.codec;
+ }
+ return QMediaFormat::AudioCodec::Unspecified;
+}
+
+QMediaFormat::VideoCodec QFFmpegMediaFormatInfo::videoCodecForAVCodecId(AVCodecID id)
+{
+ for (const auto &c : videoCodecMap) {
+ if (c.id == id)
+ return c.codec;
+ }
+ return QMediaFormat::VideoCodec::Unspecified;
+}
+
+QMediaFormat::FileFormat
+QFFmpegMediaFormatInfo::fileFormatForAVInputFormat(const AVInputFormat *format)
+{
+ // Seems like FFmpeg uses different names for muxers and demuxers of the same format.
+ // that makes it somewhat cumbersome to detect things correctly.
+ // The input formats have a comma separated list of short names. We check the first one of those
+ // as the docs specify that you only append to the list
+ static const struct
+ {
+ QMediaFormat::FileFormat fileFormat;
+ const char *name;
+ } map[QMediaFormat::LastFileFormat + 1] = {
+ { QMediaFormat::WMV, "asf" },
+ { QMediaFormat::AVI, "avi" },
+ { QMediaFormat::Matroska, "matroska" },
+ { QMediaFormat::MPEG4, "mov" },
+ { QMediaFormat::Ogg, "ogg" },
+ { QMediaFormat::WebM, "webm" },
+ // Audio Formats
+ // Mpeg4Audio is the same as MP4 without the video codecs
+ { QMediaFormat::AAC, "aac"},
+ // WMA is the same as WMV
+ { QMediaFormat::FLAC, "flac" },
+ { QMediaFormat::MP3, "mp3" },
+ { QMediaFormat::Wave, "wav" },
+ { QMediaFormat::UnspecifiedFormat, nullptr }
+ };
+
+ if (!format->name)
+ return QMediaFormat::UnspecifiedFormat;
+
+ auto *m = map;
+ while (m->fileFormat != QMediaFormat::UnspecifiedFormat) {
+ if (!strncmp(m->name, format->name, strlen(m->name)))
+ return m->fileFormat;
+ ++m;
+ }
+
+ return QMediaFormat::UnspecifiedFormat;
+}
+
+const AVOutputFormat *
+QFFmpegMediaFormatInfo::outputFormatForFileFormat(QMediaFormat::FileFormat format)
+{
+ return avFormatForFormat(format);
+}
+
+AVCodecID QFFmpegMediaFormatInfo::codecIdForVideoCodec(QMediaFormat::VideoCodec codec)
+{
+ return codecId(codec);
+}
+
+AVCodecID QFFmpegMediaFormatInfo::codecIdForAudioCodec(QMediaFormat::AudioCodec codec)
+{
+ return codecId(codec);
+}
+
+QAudioFormat::SampleFormat QFFmpegMediaFormatInfo::sampleFormat(AVSampleFormat format)
+{
+ switch (format) {
+ case AV_SAMPLE_FMT_NONE:
+ default:
+ return QAudioFormat::Unknown;
+ case AV_SAMPLE_FMT_U8: ///< unsigned 8 bits
+ case AV_SAMPLE_FMT_U8P: ///< unsigned 8 bits: planar
+ return QAudioFormat::UInt8;
+ case AV_SAMPLE_FMT_S16: ///< signed 16 bits
+ case AV_SAMPLE_FMT_S16P: ///< signed 16 bits: planar
+ return QAudioFormat::Int16;
+ case AV_SAMPLE_FMT_S32: ///< signed 32 bits
+ case AV_SAMPLE_FMT_S32P: ///< signed 32 bits: planar
+ return QAudioFormat::Int32;
+ case AV_SAMPLE_FMT_FLT: ///< float
+ case AV_SAMPLE_FMT_FLTP: ///< float: planar
+ return QAudioFormat::Float;
+ case AV_SAMPLE_FMT_DBL: ///< double
+ case AV_SAMPLE_FMT_DBLP: ///< double: planar
+ case AV_SAMPLE_FMT_S64: ///< signed 64 bits
+ case AV_SAMPLE_FMT_S64P: ///< signed 64 bits, planar
+ // let's use float
+ return QAudioFormat::Float;
+ }
+}
+
+AVSampleFormat QFFmpegMediaFormatInfo::avSampleFormat(QAudioFormat::SampleFormat format)
+{
+ switch (format) {
+ case QAudioFormat::UInt8:
+ return AV_SAMPLE_FMT_U8;
+ case QAudioFormat::Int16:
+ return AV_SAMPLE_FMT_S16;
+ case QAudioFormat::Int32:
+ return AV_SAMPLE_FMT_S32;
+ case QAudioFormat::Float:
+ return AV_SAMPLE_FMT_FLT;
+ default:
+ return AV_SAMPLE_FMT_NONE;
+ }
+}
+
+int64_t QFFmpegMediaFormatInfo::avChannelLayout(QAudioFormat::ChannelConfig channelConfig)
+{
+ int64_t avChannelLayout = 0;
+ if (channelConfig & (1 << QAudioFormat::FrontLeft))
+ avChannelLayout |= AV_CH_FRONT_LEFT;
+ if (channelConfig & (1 << QAudioFormat::FrontRight))
+ avChannelLayout |= AV_CH_FRONT_RIGHT;
+ if (channelConfig & (1 << QAudioFormat::FrontCenter))
+ avChannelLayout |= AV_CH_FRONT_CENTER;
+ if (channelConfig & (1 << QAudioFormat::LFE))
+ avChannelLayout |= AV_CH_LOW_FREQUENCY;
+ if (channelConfig & (1 << QAudioFormat::BackLeft))
+ avChannelLayout |= AV_CH_BACK_LEFT;
+ if (channelConfig & (1 << QAudioFormat::BackRight))
+ avChannelLayout |= AV_CH_BACK_RIGHT;
+ if (channelConfig & (1 << QAudioFormat::FrontLeftOfCenter))
+ avChannelLayout |= AV_CH_FRONT_LEFT_OF_CENTER;
+ if (channelConfig & (1 << QAudioFormat::FrontRightOfCenter))
+ avChannelLayout |= AV_CH_FRONT_RIGHT_OF_CENTER;
+ if (channelConfig & (1 << QAudioFormat::BackCenter))
+ avChannelLayout |= AV_CH_BACK_CENTER;
+ if (channelConfig & (1 << QAudioFormat::LFE2))
+ avChannelLayout |= AV_CH_LOW_FREQUENCY_2;
+ if (channelConfig & (1 << QAudioFormat::SideLeft))
+ avChannelLayout |= AV_CH_SIDE_LEFT;
+ if (channelConfig & (1 << QAudioFormat::SideRight))
+ avChannelLayout |= AV_CH_SIDE_RIGHT;
+ if (channelConfig & (1 << QAudioFormat::TopFrontLeft))
+ avChannelLayout |= AV_CH_TOP_FRONT_LEFT;
+ if (channelConfig & (1 << QAudioFormat::TopFrontRight))
+ avChannelLayout |= AV_CH_TOP_FRONT_RIGHT;
+ if (channelConfig & (1 << QAudioFormat::TopFrontCenter))
+ avChannelLayout |= AV_CH_TOP_FRONT_CENTER;
+ if (channelConfig & (1 << QAudioFormat::TopCenter))
+ avChannelLayout |= AV_CH_TOP_CENTER;
+ if (channelConfig & (1 << QAudioFormat::TopBackLeft))
+ avChannelLayout |= AV_CH_TOP_BACK_LEFT;
+ if (channelConfig & (1 << QAudioFormat::TopBackRight))
+ avChannelLayout |= AV_CH_TOP_BACK_RIGHT;
+ if (channelConfig & (1 << QAudioFormat::TopBackCenter))
+ avChannelLayout |= AV_CH_TOP_BACK_CENTER;
+ // The defines used below got added together for FFmpeg 4.4
+#ifdef AV_CH_TOP_SIDE_LEFT
+ if (channelConfig & (1 << QAudioFormat::TopSideLeft))
+ avChannelLayout |= AV_CH_TOP_SIDE_LEFT;
+ if (channelConfig & (1 << QAudioFormat::TopSideRight))
+ avChannelLayout |= AV_CH_TOP_SIDE_RIGHT;
+ if (channelConfig & (1 << QAudioFormat::BottomFrontCenter))
+ avChannelLayout |= AV_CH_BOTTOM_FRONT_CENTER;
+ if (channelConfig & (1 << QAudioFormat::BottomFrontLeft))
+ avChannelLayout |= AV_CH_BOTTOM_FRONT_LEFT;
+ if (channelConfig & (1 << QAudioFormat::BottomFrontRight))
+ avChannelLayout |= AV_CH_BOTTOM_FRONT_RIGHT;
+#endif
+ return avChannelLayout;
+}
+
+QAudioFormat::ChannelConfig QFFmpegMediaFormatInfo::channelConfigForAVLayout(int64_t avChannelLayout)
+{
+ quint32 channelConfig = 0;
+ if (avChannelLayout & AV_CH_FRONT_LEFT)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::FrontLeft);
+ if (avChannelLayout & AV_CH_FRONT_RIGHT)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::FrontRight);
+ if (avChannelLayout & AV_CH_FRONT_CENTER)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::FrontCenter);
+ if (avChannelLayout & AV_CH_LOW_FREQUENCY)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::LFE);
+ if (avChannelLayout & AV_CH_BACK_LEFT)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::BackLeft);
+ if (avChannelLayout & AV_CH_BACK_RIGHT)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::BackRight);
+ if (avChannelLayout & AV_CH_FRONT_LEFT_OF_CENTER)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::FrontLeftOfCenter);
+ if (avChannelLayout & AV_CH_FRONT_RIGHT_OF_CENTER)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::FrontRightOfCenter);
+ if (avChannelLayout & AV_CH_BACK_CENTER)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::BackCenter);
+ if (avChannelLayout & AV_CH_LOW_FREQUENCY_2)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::LFE2);
+ if (avChannelLayout & AV_CH_SIDE_LEFT)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::SideLeft);
+ if (avChannelLayout & AV_CH_SIDE_RIGHT)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::SideRight);
+ if (avChannelLayout & AV_CH_TOP_FRONT_LEFT)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopFrontLeft);
+ if (avChannelLayout & AV_CH_TOP_FRONT_RIGHT)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopFrontRight);
+ if (avChannelLayout & AV_CH_TOP_FRONT_CENTER)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopFrontCenter);
+ if (avChannelLayout & AV_CH_TOP_CENTER)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopCenter);
+ if (avChannelLayout & AV_CH_TOP_BACK_LEFT)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopBackLeft);
+ if (avChannelLayout & AV_CH_TOP_BACK_RIGHT)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopBackRight);
+ if (avChannelLayout & AV_CH_TOP_BACK_CENTER)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopBackCenter);
+ // The defines used below got added together for FFmpeg 4.4
+#ifdef AV_CH_TOP_SIDE_LEFT
+ if (avChannelLayout & AV_CH_TOP_SIDE_LEFT)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopSideLeft);
+ if (avChannelLayout & AV_CH_TOP_SIDE_RIGHT)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::TopSideRight);
+ if (avChannelLayout & AV_CH_BOTTOM_FRONT_CENTER)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::BottomFrontCenter);
+ if (avChannelLayout & AV_CH_BOTTOM_FRONT_LEFT)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::BottomFrontLeft);
+ if (avChannelLayout & AV_CH_BOTTOM_FRONT_RIGHT)
+ channelConfig |= QAudioFormat::channelConfig(QAudioFormat::BottomFrontRight);
+#endif
+ return QAudioFormat::ChannelConfig(channelConfig);
+}
+
+QAudioFormat QFFmpegMediaFormatInfo::audioFormatFromCodecParameters(AVCodecParameters *codecpar)
+{
+ QAudioFormat format;
+ format.setSampleFormat(sampleFormat(AVSampleFormat(codecpar->format)));
+ format.setSampleRate(codecpar->sample_rate);
+#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
+ uint64_t channelLayout = codecpar->channel_layout;
+ if (!channelLayout)
+ channelLayout = avChannelLayout(QAudioFormat::defaultChannelConfigForChannelCount(codecpar->channels));
+#else
+ uint64_t channelLayout = 0;
+ if (codecpar->ch_layout.order == AV_CHANNEL_ORDER_NATIVE)
+ channelLayout = codecpar->ch_layout.u.mask;
+ else
+ channelLayout = avChannelLayout(QAudioFormat::defaultChannelConfigForChannelCount(codecpar->ch_layout.nb_channels));
+#endif
+ format.setChannelConfig(channelConfigForAVLayout(channelLayout));
+ return format;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo_p.h
new file mode 100644
index 000000000..52fcf6f72
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaformatinfo_p.h
@@ -0,0 +1,52 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFmpegMediaFormatInfo_H
+#define QFFmpegMediaFormatInfo_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaformatinfo_p.h>
+#include <qhash.h>
+#include <qlist.h>
+#include <qaudioformat.h>
+#include "qffmpeg_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QFFmpegMediaFormatInfo : public QPlatformMediaFormatInfo
+{
+public:
+ QFFmpegMediaFormatInfo();
+ ~QFFmpegMediaFormatInfo();
+
+ static QMediaFormat::VideoCodec videoCodecForAVCodecId(AVCodecID id);
+ static QMediaFormat::AudioCodec audioCodecForAVCodecId(AVCodecID id);
+ static QMediaFormat::FileFormat fileFormatForAVInputFormat(const AVInputFormat *format);
+
+ static const AVOutputFormat *outputFormatForFileFormat(QMediaFormat::FileFormat format);
+
+ static AVCodecID codecIdForVideoCodec(QMediaFormat::VideoCodec codec);
+ static AVCodecID codecIdForAudioCodec(QMediaFormat::AudioCodec codec);
+
+ static QAudioFormat::SampleFormat sampleFormat(AVSampleFormat format);
+ static AVSampleFormat avSampleFormat(QAudioFormat::SampleFormat format);
+
+ static int64_t avChannelLayout(QAudioFormat::ChannelConfig channelConfig);
+ static QAudioFormat::ChannelConfig channelConfigForAVLayout(int64_t avChannelLayout);
+
+ static QAudioFormat audioFormatFromCodecParameters(AVCodecParameters *codecPar);
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp
new file mode 100644
index 000000000..ba1fff3b3
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp
@@ -0,0 +1,375 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtMultimedia/private/qplatformmediaplugin_p.h>
+#include <qcameradevice.h>
+#include "qffmpegmediaintegration_p.h"
+#include "qffmpegmediaformatinfo_p.h"
+#include "qffmpegmediaplayer_p.h"
+#include "qffmpegvideosink_p.h"
+#include "qffmpegmediacapturesession_p.h"
+#include "qffmpegmediarecorder_p.h"
+#include "qffmpegimagecapture_p.h"
+#include "qffmpegaudioinput_p.h"
+#include "qffmpegaudiodecoder_p.h"
+#include "qffmpegresampler_p.h"
+#include "qgrabwindowsurfacecapture_p.h"
+#include "qffmpegconverter_p.h"
+
+#ifdef Q_OS_MACOS
+#include <VideoToolbox/VideoToolbox.h>
+
+#include "qcgcapturablewindows_p.h"
+#include "qcgwindowcapture_p.h"
+#include "qavfscreencapture_p.h"
+#endif
+
+#ifdef Q_OS_DARWIN
+#include "qavfcamera_p.h"
+
+#elif defined(Q_OS_WINDOWS)
+#include "qwindowscamera_p.h"
+#include "qwindowsvideodevices_p.h"
+#include "qffmpegscreencapture_dxgi_p.h"
+#include "qwincapturablewindows_p.h"
+#include "qgdiwindowcapture_p.h"
+#endif
+
+#ifdef Q_OS_ANDROID
+# include "jni.h"
+# include "qandroidvideodevices_p.h"
+# include "qandroidcamera_p.h"
+# include "qandroidimagecapture_p.h"
+extern "C" {
+# include <libavutil/log.h>
+# include <libavcodec/jni.h>
+}
+#endif
+
+#if QT_CONFIG(linux_v4l)
+#include "qv4l2camera_p.h"
+#include "qv4l2cameradevices_p.h"
+#endif
+
+#if QT_CONFIG(cpp_winrt)
+#include "qffmpegwindowcapture_uwp_p.h"
+#endif
+
+#if QT_CONFIG(xlib)
+#include "qx11surfacecapture_p.h"
+#include "qx11capturablewindows_p.h"
+#endif
+
+#if QT_CONFIG(eglfs)
+#include "qeglfsscreencapture_p.h"
+#endif
+
+QT_BEGIN_NAMESPACE
+
+class QFFmpegMediaPlugin : public QPlatformMediaPlugin
+{
+ Q_OBJECT
+ Q_PLUGIN_METADATA(IID QPlatformMediaPlugin_iid FILE "ffmpeg.json")
+
+public:
+ QFFmpegMediaPlugin()
+ : QPlatformMediaPlugin()
+ {}
+
+ QPlatformMediaIntegration* create(const QString &name) override
+ {
+ if (name == u"ffmpeg")
+ return new QFFmpegMediaIntegration;
+ return nullptr;
+ }
+};
+
+bool thread_local FFmpegLogsEnabledInThread = true;
+static bool UseCustomFFmpegLogger = false;
+
+static void qffmpegLogCallback(void *ptr, int level, const char *fmt, va_list vl)
+{
+ if (!FFmpegLogsEnabledInThread)
+ return;
+
+ if (!UseCustomFFmpegLogger)
+ return av_log_default_callback(ptr, level, fmt, vl);
+
+ // filter logs above the chosen level and AV_LOG_QUIET (negative level)
+ if (level < 0 || level > av_log_get_level())
+ return;
+
+ QString message = QStringLiteral("FFmpeg log: %1").arg(QString::vasprintf(fmt, vl));
+ if (message.endsWith("\n"))
+ message.removeLast();
+
+ if (level == AV_LOG_DEBUG || level == AV_LOG_TRACE)
+ qDebug() << message;
+ else if (level == AV_LOG_VERBOSE || level == AV_LOG_INFO)
+ qInfo() << message;
+ else if (level == AV_LOG_WARNING)
+ qWarning() << message;
+ else if (level == AV_LOG_ERROR || level == AV_LOG_FATAL || level == AV_LOG_PANIC)
+ qCritical() << message;
+}
+
+static void setupFFmpegLogger()
+{
+ if (qEnvironmentVariableIsSet("QT_FFMPEG_DEBUG")) {
+ av_log_set_level(AV_LOG_DEBUG);
+ UseCustomFFmpegLogger = true;
+ }
+
+ av_log_set_callback(&qffmpegLogCallback);
+}
+
+static QPlatformSurfaceCapture *createScreenCaptureByBackend(QString backend)
+{
+ if (backend == u"grabwindow")
+ return new QGrabWindowSurfaceCapture(QPlatformSurfaceCapture::ScreenSource{});
+
+#if QT_CONFIG(eglfs)
+ if (backend == u"eglfs")
+ return new QEglfsScreenCapture;
+#endif
+
+#if QT_CONFIG(xlib)
+ if (backend == u"x11")
+ return new QX11SurfaceCapture(QPlatformSurfaceCapture::ScreenSource{});
+#elif defined(Q_OS_WINDOWS)
+ if (backend == u"dxgi")
+ return new QFFmpegScreenCaptureDxgi;
+#elif defined(Q_OS_MACOS)
+ if (backend == u"avf")
+ return new QAVFScreenCapture;
+#endif
+ return nullptr;
+}
+
+static QPlatformSurfaceCapture *createWindowCaptureByBackend(QString backend)
+{
+ if (backend == u"grabwindow")
+ return new QGrabWindowSurfaceCapture(QPlatformSurfaceCapture::WindowSource{});
+
+#if QT_CONFIG(xlib)
+ if (backend == u"x11")
+ return new QX11SurfaceCapture(QPlatformSurfaceCapture::WindowSource{});
+#elif defined(Q_OS_WINDOWS)
+ if (backend == u"gdi")
+ return new QGdiWindowCapture;
+#if QT_CONFIG(cpp_winrt)
+ if (backend == u"uwp")
+ return new QFFmpegWindowCaptureUwp;
+#endif
+#elif defined(Q_OS_MACOS)
+ if (backend == u"cg")
+ return new QCGWindowCapture;
+#endif
+ return nullptr;
+}
+
+QFFmpegMediaIntegration::QFFmpegMediaIntegration()
+ : QPlatformMediaIntegration(QLatin1String("ffmpeg"))
+{
+ setupFFmpegLogger();
+
+#ifndef QT_NO_DEBUG
+ qDebug() << "Available HW decoding frameworks:";
+ for (auto type : QFFmpeg::HWAccel::decodingDeviceTypes())
+ qDebug() << " " << av_hwdevice_get_type_name(type);
+
+ qDebug() << "Available HW encoding frameworks:";
+ for (auto type : QFFmpeg::HWAccel::encodingDeviceTypes())
+ qDebug() << " " << av_hwdevice_get_type_name(type);
+#endif
+}
+
+QMaybe<QPlatformAudioDecoder *> QFFmpegMediaIntegration::createAudioDecoder(QAudioDecoder *decoder)
+{
+ return new QFFmpegAudioDecoder(decoder);
+}
+
+QMaybe<std::unique_ptr<QPlatformAudioResampler>>
+QFFmpegMediaIntegration::createAudioResampler(const QAudioFormat &inputFormat,
+ const QAudioFormat &outputFormat)
+{
+ return { std::make_unique<QFFmpegResampler>(inputFormat, outputFormat) };
+}
+
+QMaybe<QPlatformMediaCaptureSession *> QFFmpegMediaIntegration::createCaptureSession()
+{
+ return new QFFmpegMediaCaptureSession();
+}
+
+QMaybe<QPlatformMediaPlayer *> QFFmpegMediaIntegration::createPlayer(QMediaPlayer *player)
+{
+ return new QFFmpegMediaPlayer(player);
+}
+
+QMaybe<QPlatformCamera *> QFFmpegMediaIntegration::createCamera(QCamera *camera)
+{
+#ifdef Q_OS_DARWIN
+ return new QAVFCamera(camera);
+#elif defined(Q_OS_ANDROID)
+ return new QAndroidCamera(camera);
+#elif QT_CONFIG(linux_v4l)
+ return new QV4L2Camera(camera);
+#elif defined(Q_OS_WINDOWS)
+ return new QWindowsCamera(camera);
+#else
+ Q_UNUSED(camera);
+ return nullptr;//new QFFmpegCamera(camera);
+#endif
+}
+
+QPlatformSurfaceCapture *QFFmpegMediaIntegration::createScreenCapture(QScreenCapture *)
+{
+ static const QString screenCaptureBackend = qgetenv("QT_SCREEN_CAPTURE_BACKEND").toLower();
+
+ if (!screenCaptureBackend.isEmpty()) {
+ if (auto screenCapture = createScreenCaptureByBackend(screenCaptureBackend))
+ return screenCapture;
+
+ qWarning() << "Not supported QT_SCREEN_CAPTURE_BACKEND:" << screenCaptureBackend;
+ }
+
+#if QT_CONFIG(xlib)
+ if (QX11SurfaceCapture::isSupported())
+ return new QX11SurfaceCapture(QPlatformSurfaceCapture::ScreenSource{});
+#endif
+
+#if QT_CONFIG(eglfs)
+ if (QEglfsScreenCapture::isSupported())
+ return new QEglfsScreenCapture;
+#endif
+
+#if defined(Q_OS_WINDOWS)
+ return new QFFmpegScreenCaptureDxgi;
+#elif defined(Q_OS_MACOS) // TODO: probably use it for iOS as well
+ return new QAVFScreenCapture;
+#else
+ return new QGrabWindowSurfaceCapture(QPlatformSurfaceCapture::ScreenSource{});
+#endif
+}
+
+QPlatformSurfaceCapture *QFFmpegMediaIntegration::createWindowCapture(QWindowCapture *)
+{
+ static const QString windowCaptureBackend = qgetenv("QT_WINDOW_CAPTURE_BACKEND").toLower();
+
+ if (!windowCaptureBackend.isEmpty()) {
+ if (auto windowCapture = createWindowCaptureByBackend(windowCaptureBackend))
+ return windowCapture;
+
+ qWarning() << "Not supported QT_WINDOW_CAPTURE_BACKEND:" << windowCaptureBackend;
+ }
+
+#if QT_CONFIG(xlib)
+ if (QX11SurfaceCapture::isSupported())
+ return new QX11SurfaceCapture(QPlatformSurfaceCapture::WindowSource{});
+#endif
+
+#if defined(Q_OS_WINDOWS)
+# if QT_CONFIG(cpp_winrt)
+ if (QFFmpegWindowCaptureUwp::isSupported())
+ return new QFFmpegWindowCaptureUwp;
+# endif
+
+ return new QGdiWindowCapture;
+#elif defined(Q_OS_MACOS) // TODO: probably use it for iOS as well
+ return new QCGWindowCapture;
+#else
+ return new QGrabWindowSurfaceCapture(QPlatformSurfaceCapture::WindowSource{});
+#endif
+}
+
+QMaybe<QPlatformMediaRecorder *> QFFmpegMediaIntegration::createRecorder(QMediaRecorder *recorder)
+{
+ return new QFFmpegMediaRecorder(recorder);
+}
+
+QMaybe<QPlatformImageCapture *> QFFmpegMediaIntegration::createImageCapture(QImageCapture *imageCapture)
+{
+#if defined(Q_OS_ANDROID)
+ return new QAndroidImageCapture(imageCapture);
+#else
+ return new QFFmpegImageCapture(imageCapture);
+#endif
+}
+
+QMaybe<QPlatformVideoSink *> QFFmpegMediaIntegration::createVideoSink(QVideoSink *sink)
+{
+ return new QFFmpegVideoSink(sink);
+}
+
+QMaybe<QPlatformAudioInput *> QFFmpegMediaIntegration::createAudioInput(QAudioInput *input)
+{
+ return new QFFmpegAudioInput(input);
+}
+
+QVideoFrame QFFmpegMediaIntegration::convertVideoFrame(QVideoFrame &srcFrame,
+ const QVideoFrameFormat &destFormat)
+{
+ return convertFrame(srcFrame, destFormat);
+}
+
+QPlatformMediaFormatInfo *QFFmpegMediaIntegration::createFormatInfo()
+{
+ return new QFFmpegMediaFormatInfo;
+}
+
+QPlatformVideoDevices *QFFmpegMediaIntegration::createVideoDevices()
+{
+#if defined(Q_OS_ANDROID)
+ return new QAndroidVideoDevices(this);
+#elif QT_CONFIG(linux_v4l)
+ return new QV4L2CameraDevices(this);
+#elif defined Q_OS_DARWIN
+ return new QAVFVideoDevices(this);
+#elif defined(Q_OS_WINDOWS)
+ return new QWindowsVideoDevices(this);
+#else
+ return nullptr;
+#endif
+}
+
+QPlatformCapturableWindows *QFFmpegMediaIntegration::createCapturableWindows()
+{
+#if QT_CONFIG(xlib)
+ if (QX11SurfaceCapture::isSupported())
+ return new QX11CapturableWindows;
+#elif defined Q_OS_MACOS
+ return new QCGCapturableWindows;
+#elif defined(Q_OS_WINDOWS)
+ return new QWinCapturableWindows;
+#endif
+ return nullptr;
+}
+
+#ifdef Q_OS_ANDROID
+
+Q_DECL_EXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void * /*reserved*/)
+{
+ static bool initialized = false;
+ if (initialized)
+ return JNI_VERSION_1_6;
+ initialized = true;
+
+ QT_USE_NAMESPACE
+ void *environment;
+ if (vm->GetEnv(&environment, JNI_VERSION_1_6))
+ return JNI_ERR;
+
+ // setting our javavm into ffmpeg.
+ if (av_jni_set_java_vm(vm, nullptr))
+ return JNI_ERR;
+
+ if (!QAndroidCamera::registerNativeMethods())
+ return JNI_ERR;
+
+ return JNI_VERSION_1_6;
+}
+#endif
+
+QT_END_NAMESPACE
+
+#include "qffmpegmediaintegration.moc"
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h
new file mode 100644
index 000000000..473a5f044
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration_p.h
@@ -0,0 +1,57 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERINTEGRATION_H
+#define QGSTREAMERINTEGRATION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaintegration_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QFFmpegMediaFormatInfo;
+
+class QFFmpegMediaIntegration : public QPlatformMediaIntegration
+{
+public:
+ QFFmpegMediaIntegration();
+
+ QMaybe<QPlatformAudioDecoder *> createAudioDecoder(QAudioDecoder *decoder) override;
+ QMaybe<std::unique_ptr<QPlatformAudioResampler>> createAudioResampler(const QAudioFormat &inputFormat, const QAudioFormat &outputFormat) override;
+ QMaybe<QPlatformMediaCaptureSession *> createCaptureSession() override;
+ QMaybe<QPlatformMediaPlayer *> createPlayer(QMediaPlayer *player) override;
+ QMaybe<QPlatformCamera *> createCamera(QCamera *) override;
+ QPlatformSurfaceCapture *createScreenCapture(QScreenCapture *) override;
+ QPlatformSurfaceCapture *createWindowCapture(QWindowCapture *) override;
+ QMaybe<QPlatformMediaRecorder *> createRecorder(QMediaRecorder *) override;
+ QMaybe<QPlatformImageCapture *> createImageCapture(QImageCapture *) override;
+
+ QMaybe<QPlatformVideoSink *> createVideoSink(QVideoSink *sink) override;
+
+ QMaybe<QPlatformAudioInput *> createAudioInput(QAudioInput *input) override;
+// QPlatformAudioOutput *createAudioOutput(QAudioOutput *) override;
+
+ QVideoFrame convertVideoFrame(QVideoFrame &srcFrame,
+ const QVideoFrameFormat &destFormat) override;
+
+protected:
+ QPlatformMediaFormatInfo *createFormatInfo() override;
+
+ QPlatformVideoDevices *createVideoDevices() override;
+
+ QPlatformCapturableWindows *createCapturableWindows() override;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediametadata.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediametadata.cpp
new file mode 100644
index 000000000..465e380db
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediametadata.cpp
@@ -0,0 +1,182 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegmediametadata_p.h"
+#include <QDebug>
+#include <QtCore/qdatetime.h>
+#include <qstringlist.h>
+#include <qurl.h>
+#include <qlocale.h>
+
+#include <qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcMetaData, "qt.multimedia.ffmpeg.metadata")
+
+namespace {
+
+struct ffmpegTagToMetaDataKey
+{
+ const char *tag;
+ QMediaMetaData::Key key;
+};
+
+constexpr ffmpegTagToMetaDataKey ffmpegTagToMetaDataKey[] = {
+ { "title", QMediaMetaData::Title },
+ { "comment", QMediaMetaData::Comment },
+ { "description", QMediaMetaData::Description },
+ { "genre", QMediaMetaData::Genre },
+ { "date", QMediaMetaData::Date },
+ { "year", QMediaMetaData::Date },
+ { "creation_time", QMediaMetaData::Date },
+
+ { "language", QMediaMetaData::Language },
+
+ { "copyright", QMediaMetaData::Copyright },
+
+ // Music
+ { "album", QMediaMetaData::AlbumTitle },
+ { "album_artist", QMediaMetaData::AlbumArtist },
+ { "artist", QMediaMetaData::ContributingArtist },
+ { "track", QMediaMetaData::TrackNumber },
+
+ // Movie
+ { "performer", QMediaMetaData::LeadPerformer },
+
+ { nullptr, QMediaMetaData::Title }
+};
+
+}
+
+static QMediaMetaData::Key tagToKey(const char *tag)
+{
+ const auto *map = ffmpegTagToMetaDataKey;
+ while (map->tag) {
+ if (!strcmp(map->tag, tag))
+ return map->key;
+ ++map;
+ }
+ return QMediaMetaData::Key(-1);
+}
+
+static const char *keyToTag(QMediaMetaData::Key key)
+{
+ const auto *map = ffmpegTagToMetaDataKey;
+ while (map->tag) {
+ if (map->key == key)
+ return map->tag;
+ ++map;
+ }
+ return nullptr;
+}
+
+//internal
+void QFFmpegMetaData::addEntry(QMediaMetaData &metaData, AVDictionaryEntry *entry)
+{
+ qCDebug(qLcMetaData) << " checking:" << entry->key << entry->value;
+ QByteArray tag(entry->key);
+ QMediaMetaData::Key key = tagToKey(tag.toLower());
+ if (key == QMediaMetaData::Key(-1))
+ return;
+ qCDebug(qLcMetaData) << " adding" << key;
+
+ auto *map = &metaData;
+
+ int metaTypeId = keyType(key).id();
+ switch (metaTypeId) {
+ case qMetaTypeId<QString>():
+ map->insert(key, QString::fromUtf8(entry->value));
+ return;
+ case qMetaTypeId<QStringList>():
+ map->insert(key, QString::fromUtf8(entry->value).split(QLatin1Char(',')));
+ return;
+ case qMetaTypeId<QDateTime>(): {
+ QDateTime date;
+ if (!qstrcmp(entry->key, "year")) {
+ if (map->keys().contains(QMediaMetaData::Date))
+ return;
+ date = QDateTime(QDate(QByteArray(entry->value).toInt(), 1, 1), QTime(0, 0, 0));
+ } else {
+ date = QDateTime::fromString(QString::fromUtf8(entry->value), Qt::ISODate);
+ }
+ map->insert(key, date);
+ return;
+ }
+ case qMetaTypeId<QUrl>():
+ map->insert(key, QUrl::fromEncoded(entry->value));
+ return;
+ case qMetaTypeId<qint64>():
+ map->insert(key, (qint64)QByteArray(entry->value).toLongLong());
+ return;
+ case qMetaTypeId<int>():
+ map->insert(key, QByteArray(entry->value).toInt());
+ return;
+ case qMetaTypeId<qreal>():
+ map->insert(key, (qreal)QByteArray(entry->value).toDouble());
+ return;
+ default:
+ break;
+ }
+ if (metaTypeId == qMetaTypeId<QLocale::Language>()) {
+ map->insert(key, QVariant::fromValue(QLocale::codeToLanguage(QString::fromUtf8(entry->value), QLocale::ISO639Part2)));
+ }
+}
+
+
+QMediaMetaData QFFmpegMetaData::fromAVMetaData(const AVDictionary *tags)
+{
+ QMediaMetaData metaData;
+ AVDictionaryEntry *entry = nullptr;
+ while ((entry = av_dict_get(tags, "", entry, AV_DICT_IGNORE_SUFFIX)))
+ addEntry(metaData, entry);
+
+ return metaData;
+}
+
+QByteArray QFFmpegMetaData::value(const QMediaMetaData &metaData, QMediaMetaData::Key key)
+{
+ const int metaTypeId = keyType(key).id();
+ const QVariant val = metaData.value(key);
+ switch (metaTypeId) {
+ case qMetaTypeId<QString>():
+ return val.toString().toUtf8();
+ case qMetaTypeId<QStringList>():
+ return val.toStringList().join(u",").toUtf8();
+ case qMetaTypeId<QDateTime>():
+ return val.toDateTime().toString(Qt::ISODate).toUtf8();
+ case qMetaTypeId<QUrl>():
+ return val.toUrl().toEncoded();
+ case qMetaTypeId<qint64>():
+ case qMetaTypeId<int>():
+ return QByteArray::number(val.toLongLong());
+ case qMetaTypeId<qreal>():
+ return QByteArray::number(val.toDouble());
+ default:
+ break;
+ }
+ if (metaTypeId == qMetaTypeId<QLocale::Language>())
+ return QLocale::languageToCode(val.value<QLocale::Language>(), QLocale::ISO639Part2).toUtf8();
+ return {};
+}
+
+
+AVDictionary *QFFmpegMetaData::toAVMetaData(const QMediaMetaData &metaData)
+{
+ const QList<Key> keys = metaData.keys();
+ AVDictionary *dict = nullptr;
+ for (const auto &k : keys) {
+ const char *key = ::keyToTag(k);
+ if (!key)
+ continue;
+ QByteArray val = value(metaData, k);
+ if (val.isEmpty())
+ continue;
+ av_dict_set(&dict, key, val.constData(), 0);
+ }
+ return dict;
+}
+
+
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediametadata_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediametadata_p.h
new file mode 100644
index 000000000..201287495
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediametadata_p.h
@@ -0,0 +1,35 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGMEDIAMETADATA_H
+#define QFFMPEGMEDIAMETADATA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qmediametadata.h>
+#include <qffmpeg_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QFFmpegMetaData : public QMediaMetaData
+{
+public:
+ static void addEntry(QMediaMetaData &metaData, AVDictionaryEntry *entry);
+ static QMediaMetaData fromAVMetaData(const AVDictionary *tags);
+
+ static QByteArray value(const QMediaMetaData &metaData, QMediaMetaData::Key key);
+ static AVDictionary *toAVMetaData(const QMediaMetaData &metaData);
+};
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGMEDIAMETADATA_H
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp
new file mode 100644
index 000000000..951144692
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer.cpp
@@ -0,0 +1,411 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegmediaplayer_p.h"
+#include "private/qplatformaudiooutput_p.h"
+#include "qvideosink.h"
+#include "qaudiooutput.h"
+#include "qaudiobufferoutput.h"
+
+#include "qffmpegplaybackengine_p.h"
+#include <qiodevice.h>
+#include <qvideosink.h>
+#include <qtimer.h>
+#include <QtConcurrent/QtConcurrent>
+
+#include <qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+class CancelToken : public ICancelToken
+{
+public:
+
+ bool isCancelled() const override { return m_cancelled.load(std::memory_order_acquire); }
+
+ void cancel() { m_cancelled.store(true, std::memory_order_release); }
+
+private:
+ std::atomic_bool m_cancelled = false;
+};
+
+} // namespace QFFmpeg
+
+using namespace QFFmpeg;
+
+QFFmpegMediaPlayer::QFFmpegMediaPlayer(QMediaPlayer *player)
+ : QPlatformMediaPlayer(player)
+{
+ m_positionUpdateTimer.setInterval(50);
+ m_positionUpdateTimer.setTimerType(Qt::PreciseTimer);
+ connect(&m_positionUpdateTimer, &QTimer::timeout, this, &QFFmpegMediaPlayer::updatePosition);
+}
+
+QFFmpegMediaPlayer::~QFFmpegMediaPlayer()
+{
+ if (m_cancelToken)
+ m_cancelToken->cancel();
+
+ m_loadMedia.waitForFinished();
+};
+
+qint64 QFFmpegMediaPlayer::duration() const
+{
+ return m_playbackEngine ? m_playbackEngine->duration() / 1000 : 0;
+}
+
+void QFFmpegMediaPlayer::setPosition(qint64 position)
+{
+ if (mediaStatus() == QMediaPlayer::LoadingMedia)
+ return;
+
+ if (m_playbackEngine) {
+ m_playbackEngine->seek(position * 1000);
+ updatePosition();
+ }
+
+ mediaStatusChanged(QMediaPlayer::LoadedMedia);
+}
+
+void QFFmpegMediaPlayer::updatePosition()
+{
+ positionChanged(m_playbackEngine ? m_playbackEngine->currentPosition() / 1000 : 0);
+}
+
+void QFFmpegMediaPlayer::endOfStream()
+{
+ // stop update timer and report end position anyway
+ m_positionUpdateTimer.stop();
+ QPointer currentPlaybackEngine(m_playbackEngine.get());
+ positionChanged(duration());
+
+ // skip changing state and mediaStatus if playbackEngine has been recreated,
+ // e.g. if new media has been loaded as a response to positionChanged signal
+ if (currentPlaybackEngine)
+ stateChanged(QMediaPlayer::StoppedState);
+ if (currentPlaybackEngine)
+ mediaStatusChanged(QMediaPlayer::EndOfMedia);
+}
+
+void QFFmpegMediaPlayer::onLoopChanged()
+{
+ // report about finish and start
+ // reporting both signals is a bit contraversial
+ // but it eshures the idea of notifications about
+ // imporatant position points.
+ // Also, it ensures more predictable flow for testing.
+ positionChanged(duration());
+ positionChanged(0);
+ m_positionUpdateTimer.stop();
+ m_positionUpdateTimer.start();
+}
+
+void QFFmpegMediaPlayer::onBuffered()
+{
+ if (mediaStatus() == QMediaPlayer::BufferingMedia)
+ mediaStatusChanged(QMediaPlayer::BufferedMedia);
+}
+
+float QFFmpegMediaPlayer::bufferProgress() const
+{
+ return m_bufferProgress;
+}
+
+void QFFmpegMediaPlayer::mediaStatusChanged(QMediaPlayer::MediaStatus status)
+{
+ if (mediaStatus() == status)
+ return;
+
+ const auto newBufferProgress = status == QMediaPlayer::BufferingMedia ? 0.25f // to be improved
+ : status == QMediaPlayer::BufferedMedia ? 1.f
+ : 0.f;
+
+ if (!qFuzzyCompare(newBufferProgress, m_bufferProgress)) {
+ m_bufferProgress = newBufferProgress;
+ bufferProgressChanged(newBufferProgress);
+ }
+
+ QPlatformMediaPlayer::mediaStatusChanged(status);
+}
+
+QMediaTimeRange QFFmpegMediaPlayer::availablePlaybackRanges() const
+{
+ return {};
+}
+
+qreal QFFmpegMediaPlayer::playbackRate() const
+{
+ return m_playbackRate;
+}
+
+void QFFmpegMediaPlayer::setPlaybackRate(qreal rate)
+{
+ const float effectiveRate = std::max(static_cast<float>(rate), 0.0f);
+
+ if (qFuzzyCompare(m_playbackRate, effectiveRate))
+ return;
+
+ m_playbackRate = effectiveRate;
+
+ if (m_playbackEngine)
+ m_playbackEngine->setPlaybackRate(effectiveRate);
+
+ playbackRateChanged(effectiveRate);
+}
+
+QUrl QFFmpegMediaPlayer::media() const
+{
+ return m_url;
+}
+
+const QIODevice *QFFmpegMediaPlayer::mediaStream() const
+{
+ return m_device;
+}
+
+void QFFmpegMediaPlayer::handleIncorrectMedia(QMediaPlayer::MediaStatus status)
+{
+ seekableChanged(false);
+ audioAvailableChanged(false);
+ videoAvailableChanged(false);
+ metaDataChanged();
+ mediaStatusChanged(status);
+ m_playbackEngine = nullptr;
+};
+
+void QFFmpegMediaPlayer::setMedia(const QUrl &media, QIODevice *stream)
+{
+ // Wait for previous unfinished load attempts.
+ if (m_cancelToken)
+ m_cancelToken->cancel();
+
+ m_loadMedia.waitForFinished();
+
+ m_url = media;
+ m_device = stream;
+ m_playbackEngine = nullptr;
+
+ if (media.isEmpty() && !stream) {
+ handleIncorrectMedia(QMediaPlayer::NoMedia);
+ return;
+ }
+
+ mediaStatusChanged(QMediaPlayer::LoadingMedia);
+
+ m_requestedStatus = QMediaPlayer::StoppedState;
+
+ m_cancelToken = std::make_shared<CancelToken>();
+
+ // Load media asynchronously to keep GUI thread responsive while loading media
+ m_loadMedia = QtConcurrent::run([this, media, stream, cancelToken = m_cancelToken] {
+ // On worker thread
+ const MediaDataHolder::Maybe mediaHolder =
+ MediaDataHolder::create(media, stream, cancelToken);
+
+ // Transition back to calling thread using invokeMethod because
+ // QFuture continuations back on calling thread may deadlock (QTBUG-117918)
+ QMetaObject::invokeMethod(this, [this, mediaHolder, cancelToken] {
+ setMediaAsync(mediaHolder, cancelToken);
+ });
+ });
+}
+
+void QFFmpegMediaPlayer::setMediaAsync(QFFmpeg::MediaDataHolder::Maybe mediaDataHolder,
+ const std::shared_ptr<QFFmpeg::CancelToken> &cancelToken)
+{
+ Q_ASSERT(mediaStatus() == QMediaPlayer::LoadingMedia);
+
+ // If loading was cancelled, we do not emit any signals about failing
+ // to load media (or any other events). The rationale is that cancellation
+ // either happens during destruction, where the signals are no longer
+ // of interest, or it happens as a response to user requesting to load
+ // another media file. In the latter case, we don't want to risk popping
+ // up error dialogs or similar.
+ if (cancelToken->isCancelled()) {
+ return;
+ }
+
+ if (!mediaDataHolder) {
+ const auto [code, description] = mediaDataHolder.error();
+ error(code, description);
+ handleIncorrectMedia(QMediaPlayer::MediaStatus::InvalidMedia);
+ return;
+ }
+
+ m_playbackEngine = std::make_unique<PlaybackEngine>();
+
+ connect(m_playbackEngine.get(), &PlaybackEngine::endOfStream, this,
+ &QFFmpegMediaPlayer::endOfStream);
+ connect(m_playbackEngine.get(), &PlaybackEngine::errorOccured, this,
+ &QFFmpegMediaPlayer::error);
+ connect(m_playbackEngine.get(), &PlaybackEngine::loopChanged, this,
+ &QFFmpegMediaPlayer::onLoopChanged);
+ connect(m_playbackEngine.get(), &PlaybackEngine::buffered, this,
+ &QFFmpegMediaPlayer::onBuffered);
+
+ m_playbackEngine->setMedia(std::move(*mediaDataHolder.value()));
+
+ m_playbackEngine->setAudioBufferOutput(m_audioBufferOutput);
+ m_playbackEngine->setAudioSink(m_audioOutput);
+ m_playbackEngine->setVideoSink(m_videoSink);
+
+ m_playbackEngine->setLoops(loops());
+ m_playbackEngine->setPlaybackRate(m_playbackRate);
+
+ durationChanged(duration());
+ tracksChanged();
+ metaDataChanged();
+ seekableChanged(m_playbackEngine->isSeekable());
+
+ audioAvailableChanged(
+ !m_playbackEngine->streamInfo(QPlatformMediaPlayer::AudioStream).isEmpty());
+ videoAvailableChanged(
+ !m_playbackEngine->streamInfo(QPlatformMediaPlayer::VideoStream).isEmpty());
+
+ mediaStatusChanged(QMediaPlayer::LoadedMedia);
+
+ if (m_requestedStatus != QMediaPlayer::StoppedState) {
+ if (m_requestedStatus == QMediaPlayer::PlayingState)
+ play();
+ else if (m_requestedStatus == QMediaPlayer::PausedState)
+ pause();
+ }
+}
+
+void QFFmpegMediaPlayer::play()
+{
+ if (mediaStatus() == QMediaPlayer::LoadingMedia) {
+ m_requestedStatus = QMediaPlayer::PlayingState;
+ return;
+ }
+
+ if (!m_playbackEngine)
+ return;
+
+ if (mediaStatus() == QMediaPlayer::EndOfMedia && state() == QMediaPlayer::StoppedState) {
+ m_playbackEngine->seek(0);
+ positionChanged(0);
+ }
+
+ runPlayback();
+}
+
+void QFFmpegMediaPlayer::runPlayback()
+{
+ m_playbackEngine->play();
+ m_positionUpdateTimer.start();
+ stateChanged(QMediaPlayer::PlayingState);
+
+ if (mediaStatus() == QMediaPlayer::LoadedMedia || mediaStatus() == QMediaPlayer::EndOfMedia)
+ mediaStatusChanged(QMediaPlayer::BufferingMedia);
+}
+
+void QFFmpegMediaPlayer::pause()
+{
+ if (mediaStatus() == QMediaPlayer::LoadingMedia) {
+ m_requestedStatus = QMediaPlayer::PausedState;
+ return;
+ }
+
+ if (!m_playbackEngine)
+ return;
+
+ if (mediaStatus() == QMediaPlayer::EndOfMedia && state() == QMediaPlayer::StoppedState) {
+ m_playbackEngine->seek(0);
+ positionChanged(0);
+ }
+ m_playbackEngine->pause();
+ m_positionUpdateTimer.stop();
+ stateChanged(QMediaPlayer::PausedState);
+
+ if (mediaStatus() == QMediaPlayer::LoadedMedia || mediaStatus() == QMediaPlayer::EndOfMedia)
+ mediaStatusChanged(QMediaPlayer::BufferingMedia);
+}
+
+void QFFmpegMediaPlayer::stop()
+{
+ if (mediaStatus() == QMediaPlayer::LoadingMedia) {
+ m_requestedStatus = QMediaPlayer::StoppedState;
+ return;
+ }
+
+ if (!m_playbackEngine)
+ return;
+
+ m_playbackEngine->stop();
+ m_positionUpdateTimer.stop();
+ m_playbackEngine->seek(0);
+ positionChanged(0);
+ stateChanged(QMediaPlayer::StoppedState);
+ mediaStatusChanged(QMediaPlayer::LoadedMedia);
+}
+
+void QFFmpegMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
+{
+ m_audioOutput = output;
+ if (m_playbackEngine)
+ m_playbackEngine->setAudioSink(output);
+}
+
+void QFFmpegMediaPlayer::setAudioBufferOutput(QAudioBufferOutput *output) {
+ m_audioBufferOutput = output;
+ if (m_playbackEngine)
+ m_playbackEngine->setAudioBufferOutput(output);
+}
+
+QMediaMetaData QFFmpegMediaPlayer::metaData() const
+{
+ return m_playbackEngine ? m_playbackEngine->metaData() : QMediaMetaData{};
+}
+
+void QFFmpegMediaPlayer::setVideoSink(QVideoSink *sink)
+{
+ m_videoSink = sink;
+ if (m_playbackEngine)
+ m_playbackEngine->setVideoSink(sink);
+}
+
+QVideoSink *QFFmpegMediaPlayer::videoSink() const
+{
+ return m_videoSink;
+}
+
+int QFFmpegMediaPlayer::trackCount(TrackType type)
+{
+ return m_playbackEngine ? m_playbackEngine->streamInfo(type).count() : 0;
+}
+
+QMediaMetaData QFFmpegMediaPlayer::trackMetaData(TrackType type, int streamNumber)
+{
+ if (!m_playbackEngine || streamNumber < 0
+ || streamNumber >= m_playbackEngine->streamInfo(type).count())
+ return {};
+ return m_playbackEngine->streamInfo(type).at(streamNumber).metaData;
+}
+
+int QFFmpegMediaPlayer::activeTrack(TrackType type)
+{
+ return m_playbackEngine ? m_playbackEngine->activeTrack(type) : -1;
+}
+
+void QFFmpegMediaPlayer::setActiveTrack(TrackType type, int streamNumber)
+{
+ if (m_playbackEngine)
+ m_playbackEngine->setActiveTrack(type, streamNumber);
+ else
+ qWarning() << "Cannot set active track without open source";
+}
+
+void QFFmpegMediaPlayer::setLoops(int loops)
+{
+ if (m_playbackEngine)
+ m_playbackEngine->setLoops(loops);
+
+ QPlatformMediaPlayer::setLoops(loops);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegmediaplayer_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h
new file mode 100644
index 000000000..4ab5701da
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaplayer_p.h
@@ -0,0 +1,119 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#ifndef QFFMPEGMEDIAPLAYER_H
+#define QFFMPEGMEDIAPLAYER_H
+
+#include <private/qplatformmediaplayer_p.h>
+#include <qmediametadata.h>
+#include <qtimer.h>
+#include <qpointer.h>
+#include <qfuture.h>
+#include "qffmpeg_p.h"
+#include "playbackengine/qffmpegmediadataholder_p.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+class CancelToken;
+
+class PlaybackEngine;
+}
+
+class QPlatformAudioOutput;
+
+class QFFmpegMediaPlayer : public QObject, public QPlatformMediaPlayer
+{
+ Q_OBJECT
+public:
+ QFFmpegMediaPlayer(QMediaPlayer *player);
+ ~QFFmpegMediaPlayer();
+
+ qint64 duration() const override;
+
+ void setPosition(qint64 position) override;
+
+ float bufferProgress() const override;
+
+ QMediaTimeRange availablePlaybackRanges() const override;
+
+ qreal playbackRate() const override;
+ void setPlaybackRate(qreal rate) override;
+
+ QUrl media() const override;
+ const QIODevice *mediaStream() const override;
+ void setMedia(const QUrl &media, QIODevice *stream) override;
+
+ void play() override;
+ void pause() override;
+ void stop() override;
+
+ void setAudioOutput(QPlatformAudioOutput *) override;
+
+ void setAudioBufferOutput(QAudioBufferOutput *) override;
+
+ QMediaMetaData metaData() const override;
+
+ void setVideoSink(QVideoSink *sink) override;
+ QVideoSink *videoSink() const;
+
+ int trackCount(TrackType) override;
+ QMediaMetaData trackMetaData(TrackType type, int streamNumber) override;
+ int activeTrack(TrackType) override;
+ void setActiveTrack(TrackType, int streamNumber) override;
+ void setLoops(int loops) override;
+
+private:
+ void runPlayback();
+ void handleIncorrectMedia(QMediaPlayer::MediaStatus status);
+ void setMediaAsync(QFFmpeg::MediaDataHolder::Maybe mediaDataHolder,
+ const std::shared_ptr<QFFmpeg::CancelToken> &cancelToken);
+
+ void mediaStatusChanged(QMediaPlayer::MediaStatus);
+
+private slots:
+ void updatePosition();
+ void endOfStream();
+ void error(int error, const QString &errorString)
+ {
+ QPlatformMediaPlayer::error(error, errorString);
+ }
+ void onLoopChanged();
+ void onBuffered();
+
+private:
+ QTimer m_positionUpdateTimer;
+ QMediaPlayer::PlaybackState m_requestedStatus = QMediaPlayer::StoppedState;
+
+ using PlaybackEngine = QFFmpeg::PlaybackEngine;
+
+ std::unique_ptr<PlaybackEngine> m_playbackEngine;
+ QPlatformAudioOutput *m_audioOutput = nullptr;
+ QPointer<QAudioBufferOutput> m_audioBufferOutput;
+ QPointer<QVideoSink> m_videoSink;
+
+ QUrl m_url;
+ QPointer<QIODevice> m_device;
+ float m_playbackRate = 1.;
+ float m_bufferProgress = 0.f;
+ QFuture<void> m_loadMedia;
+ std::shared_ptr<QFFmpeg::CancelToken> m_cancelToken; // For interrupting ongoing
+ // network connection attempt
+};
+
+QT_END_NAMESPACE
+
+
+#endif // QMEDIAPLAYERCONTROL_H
+
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp
new file mode 100644
index 000000000..2f9580581
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder.cpp
@@ -0,0 +1,200 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegmediarecorder_p.h"
+#include "qaudiodevice.h"
+#include <private/qmediastoragelocation_p.h>
+#include <private/qplatformcamera_p.h>
+#include <private/qplatformsurfacecapture_p.h>
+#include "qaudiosource.h"
+#include "qffmpegaudioinput_p.h"
+#include "qaudiobuffer.h"
+#include "recordingengine/qffmpegrecordingengine_p.h"
+#include "qffmpegmediacapturesession_p.h"
+
+#include <qdebug.h>
+#include <qloggingcategory.h>
+
+static Q_LOGGING_CATEGORY(qLcMediaEncoder, "qt.multimedia.ffmpeg.encoder");
+
+QT_BEGIN_NAMESPACE
+
+QFFmpegMediaRecorder::QFFmpegMediaRecorder(QMediaRecorder *parent) : QPlatformMediaRecorder(parent)
+{
+}
+
+QFFmpegMediaRecorder::~QFFmpegMediaRecorder() = default;
+
+bool QFFmpegMediaRecorder::isLocationWritable(const QUrl &) const
+{
+ return true;
+}
+
+void QFFmpegMediaRecorder::handleSessionError(QMediaRecorder::Error code, const QString &description)
+{
+ updateError(code, description);
+ stop();
+}
+
+void QFFmpegMediaRecorder::record(QMediaEncoderSettings &settings)
+{
+ if (!m_session || state() != QMediaRecorder::StoppedState)
+ return;
+
+ auto videoSources = m_session->activeVideoSources();
+ auto audioInputs = m_session->activeAudioInputs();
+ const auto hasVideo = !videoSources.empty();
+ const auto hasAudio = !audioInputs.empty();
+
+ if (!hasVideo && !hasAudio) {
+ updateError(QMediaRecorder::ResourceError, QMediaRecorder::tr("No video or audio input"));
+ return;
+ }
+
+ if (outputDevice() && !outputLocation().isEmpty())
+ qCWarning(qLcMediaEncoder)
+ << "Both outputDevice and outputLocation has been set to QMediaRecorder";
+
+ if (outputDevice() && !outputDevice()->isWritable())
+ qCWarning(qLcMediaEncoder) << "Output device has been set but not it's not writable";
+
+ QString actualLocation;
+ auto formatContext = std::make_unique<QFFmpeg::EncodingFormatContext>(settings.fileFormat());
+
+ if (outputDevice() && outputDevice()->isWritable()) {
+ formatContext->openAVIO(outputDevice());
+ } else {
+ actualLocation = findActualLocation(settings);
+ qCDebug(qLcMediaEncoder) << "recording new media to" << actualLocation;
+ formatContext->openAVIO(actualLocation);
+ }
+
+ qCDebug(qLcMediaEncoder) << "requested format:" << settings.fileFormat()
+ << settings.audioCodec();
+
+ if (!formatContext->isAVIOOpen()) {
+ updateError(QMediaRecorder::LocationNotWritable,
+ QMediaRecorder::tr("Cannot open the output location for writing"));
+ return;
+ }
+
+ m_recordingEngine.reset(new RecordingEngine(settings, std::move(formatContext)));
+ m_recordingEngine->setMetaData(m_metaData);
+
+ connect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::durationChanged, this,
+ &QFFmpegMediaRecorder::newDuration);
+ connect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::finalizationDone, this,
+ &QFFmpegMediaRecorder::finalizationDone);
+ connect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::sessionError, this,
+ &QFFmpegMediaRecorder::handleSessionError);
+
+ updateAutoStop();
+
+ auto handleStreamInitializationError = [this](QMediaRecorder::Error code,
+ const QString &description) {
+ qCWarning(qLcMediaEncoder) << "Stream initialization error:" << description;
+ updateError(code, description);
+ };
+
+ connect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::streamInitializationError, this,
+ handleStreamInitializationError);
+
+ durationChanged(0);
+ stateChanged(QMediaRecorder::RecordingState);
+ actualLocationChanged(QUrl::fromLocalFile(actualLocation));
+
+ m_recordingEngine->initialize(audioInputs, videoSources);
+}
+
+void QFFmpegMediaRecorder::pause()
+{
+ if (!m_session || state() != QMediaRecorder::RecordingState)
+ return;
+
+ Q_ASSERT(m_recordingEngine);
+ m_recordingEngine->setPaused(true);
+
+ stateChanged(QMediaRecorder::PausedState);
+}
+
+void QFFmpegMediaRecorder::resume()
+{
+ if (!m_session || state() != QMediaRecorder::PausedState)
+ return;
+
+ Q_ASSERT(m_recordingEngine);
+ m_recordingEngine->setPaused(false);
+
+ stateChanged(QMediaRecorder::RecordingState);
+}
+
+void QFFmpegMediaRecorder::stop()
+{
+ if (!m_session || state() == QMediaRecorder::StoppedState)
+ return;
+ auto * input = m_session ? m_session->audioInput() : nullptr;
+ if (input)
+ static_cast<QFFmpegAudioInput *>(input)->setRunning(false);
+ qCDebug(qLcMediaEncoder) << "stop";
+
+ m_recordingEngine.reset();
+}
+
+void QFFmpegMediaRecorder::finalizationDone()
+{
+ stateChanged(QMediaRecorder::StoppedState);
+}
+
+void QFFmpegMediaRecorder::setMetaData(const QMediaMetaData &metaData)
+{
+ if (!m_session)
+ return;
+ m_metaData = metaData;
+}
+
+QMediaMetaData QFFmpegMediaRecorder::metaData() const
+{
+ return m_metaData;
+}
+
+void QFFmpegMediaRecorder::setCaptureSession(QFFmpegMediaCaptureSession *session)
+{
+ auto *captureSession = session;
+ if (m_session == captureSession)
+ return;
+
+ if (m_session)
+ stop();
+
+ m_session = captureSession;
+ if (!m_session)
+ return;
+}
+
+void QFFmpegMediaRecorder::updateAutoStop()
+{
+ const bool autoStop = mediaRecorder()->autoStop();
+ if (!m_recordingEngine || m_recordingEngine->autoStop() == autoStop)
+ return;
+
+ if (autoStop)
+ connect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::autoStopped, this,
+ &QFFmpegMediaRecorder::stop);
+ else
+ disconnect(m_recordingEngine.get(), &QFFmpeg::RecordingEngine::autoStopped, this,
+ &QFFmpegMediaRecorder::stop);
+
+ m_recordingEngine->setAutoStop(autoStop);
+}
+
+void QFFmpegMediaRecorder::RecordingEngineDeleter::operator()(
+ RecordingEngine *recordingEngine) const
+{
+ // ### all of the below should be done asynchronous. finalize() should do it's work in a thread
+ // to avoid blocking the UI in case of slow codecs
+ recordingEngine->finalize();
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegmediarecorder_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h b/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h
new file mode 100644
index 000000000..af3ee1509
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediarecorder_p.h
@@ -0,0 +1,73 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGMEDIARECODER_H
+#define QFFMPEGMEDIARECODER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediarecorder_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAudioSource;
+class QAudioSourceIO;
+class QAudioBuffer;
+class QMediaMetaData;
+class QFFmpegMediaCaptureSession;
+
+namespace QFFmpeg {
+class RecordingEngine;
+}
+
+class QFFmpegMediaRecorder : public QObject, public QPlatformMediaRecorder
+{
+ Q_OBJECT
+public:
+ QFFmpegMediaRecorder(QMediaRecorder *parent);
+ virtual ~QFFmpegMediaRecorder();
+
+ bool isLocationWritable(const QUrl &sink) const override;
+
+ void record(QMediaEncoderSettings &settings) override;
+ void pause() override;
+ void resume() override;
+ void stop() override;
+
+ void setMetaData(const QMediaMetaData &) override;
+ QMediaMetaData metaData() const override;
+
+ void setCaptureSession(QFFmpegMediaCaptureSession *session);
+
+ void updateAutoStop() override;
+
+private Q_SLOTS:
+ void newDuration(qint64 d) { durationChanged(d); }
+ void finalizationDone();
+ void handleSessionError(QMediaRecorder::Error code, const QString &description);
+
+private:
+ using RecordingEngine = QFFmpeg::RecordingEngine;
+ struct RecordingEngineDeleter
+ {
+ void operator()(RecordingEngine *) const;
+ };
+
+ QFFmpegMediaCaptureSession *m_session = nullptr;
+ QMediaMetaData m_metaData;
+
+ std::unique_ptr<RecordingEngine, RecordingEngineDeleter> m_recordingEngine;
+};
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGMEDIARECODER_H
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine.cpp b/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine.cpp
new file mode 100644
index 000000000..11dccb149
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine.cpp
@@ -0,0 +1,649 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qffmpegplaybackengine_p.h"
+
+#include "qvideosink.h"
+#include "qaudiooutput.h"
+#include "private/qplatformaudiooutput_p.h"
+#include "private/qplatformvideosink_p.h"
+#include "private/qaudiobufferoutput_p.h"
+#include "qiodevice.h"
+#include "playbackengine/qffmpegdemuxer_p.h"
+#include "playbackengine/qffmpegstreamdecoder_p.h"
+#include "playbackengine/qffmpegsubtitlerenderer_p.h"
+#include "playbackengine/qffmpegvideorenderer_p.h"
+#include "playbackengine/qffmpegaudiorenderer_p.h"
+
+#include <qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+static Q_LOGGING_CATEGORY(qLcPlaybackEngine, "qt.multimedia.ffmpeg.playbackengine");
+
+// The helper is needed since on some compilers std::unique_ptr
+// doesn't have a default constructor in the case of sizeof(CustomDeleter) > 0
+template<typename Array>
+inline Array defaultObjectsArray()
+{
+ using T = typename Array::value_type;
+ return { T{ {}, {} }, T{ {}, {} }, T{ {}, {} } };
+}
+
+// TODO: investigate what's better: profile and try network case
+// Most likely, shouldPauseStreams = false is better because of:
+// - packet and frame buffers are not big, the saturration of the is pretty fast.
+// - after any pause a user has some preloaded buffers, so the playback is
+// supposed to be more stable in cases with a weak processor or bad internet.
+// - the code is simplier, usage is more convenient.
+//
+static constexpr bool shouldPauseStreams = false;
+
+PlaybackEngine::PlaybackEngine()
+ : m_demuxer({}, {}),
+ m_streams(defaultObjectsArray<decltype(m_streams)>()),
+ m_renderers(defaultObjectsArray<decltype(m_renderers)>())
+{
+ qCDebug(qLcPlaybackEngine) << "Create PlaybackEngine";
+ qRegisterMetaType<QFFmpeg::Packet>();
+ qRegisterMetaType<QFFmpeg::Frame>();
+}
+
+PlaybackEngine::~PlaybackEngine() {
+ qCDebug(qLcPlaybackEngine) << "Delete PlaybackEngine";
+
+ finalizeOutputs();
+ forEachExistingObject([](auto &object) { object.reset(); });
+ deleteFreeThreads();
+}
+
+void PlaybackEngine::onRendererFinished()
+{
+ auto isAtEnd = [this](auto trackType) {
+ return !m_renderers[trackType] || m_renderers[trackType]->isAtEnd();
+ };
+
+ if (!isAtEnd(QPlatformMediaPlayer::VideoStream))
+ return;
+
+ if (!isAtEnd(QPlatformMediaPlayer::AudioStream))
+ return;
+
+ if (!isAtEnd(QPlatformMediaPlayer::SubtitleStream) && !hasMediaStream())
+ return;
+
+ if (std::exchange(m_state, QMediaPlayer::StoppedState) == QMediaPlayer::StoppedState)
+ return;
+
+ finilizeTime(duration());
+
+ forceUpdate();
+
+ qCDebug(qLcPlaybackEngine) << "Playback engine end of stream";
+
+ emit endOfStream();
+}
+
+void PlaybackEngine::onRendererLoopChanged(quint64 id, qint64 offset, int loopIndex)
+{
+ if (!hasRenderer(id))
+ return;
+
+ if (loopIndex > m_currentLoopOffset.index) {
+ m_currentLoopOffset = { offset, loopIndex };
+ emit loopChanged();
+ } else if (loopIndex == m_currentLoopOffset.index && offset != m_currentLoopOffset.pos) {
+ qWarning() << "Unexpected offset for loop" << loopIndex << ":" << offset << "vs"
+ << m_currentLoopOffset.pos;
+ m_currentLoopOffset.pos = offset;
+ }
+}
+
+void PlaybackEngine::onRendererSynchronized(quint64 id, std::chrono::steady_clock::time_point tp,
+ qint64 pos)
+{
+ if (!hasRenderer(id))
+ return;
+
+ Q_ASSERT(m_renderers[QPlatformMediaPlayer::AudioStream]
+ && m_renderers[QPlatformMediaPlayer::AudioStream]->id() == id);
+
+ m_timeController.sync(tp, pos);
+
+ forEachExistingObject<Renderer>([&](auto &renderer) {
+ if (id != renderer->id())
+ renderer->syncSoft(tp, pos);
+ });
+}
+
+void PlaybackEngine::setState(QMediaPlayer::PlaybackState state) {
+ if (!m_media.avContext())
+ return;
+
+ if (state == m_state)
+ return;
+
+ const auto prevState = std::exchange(m_state, state);
+
+ if (m_state == QMediaPlayer::StoppedState) {
+ finalizeOutputs();
+ finilizeTime(0);
+ }
+
+ if (prevState == QMediaPlayer::StoppedState || m_state == QMediaPlayer::StoppedState)
+ recreateObjects();
+
+ if (prevState == QMediaPlayer::StoppedState)
+ triggerStepIfNeeded();
+
+ updateObjectsPausedState();
+}
+
+void PlaybackEngine::updateObjectsPausedState()
+{
+ const auto paused = m_state != QMediaPlayer::PlayingState;
+ m_timeController.setPaused(paused);
+
+ forEachExistingObject([&](auto &object) {
+ bool objectPaused = false;
+
+ if constexpr (std::is_same_v<decltype(*object), Renderer &>)
+ objectPaused = paused;
+ else if constexpr (shouldPauseStreams) {
+ auto streamPaused = [](bool p, auto &r) {
+ const auto needMoreFrames = r && r->stepInProgress();
+ return p && !needMoreFrames;
+ };
+
+ if constexpr (std::is_same_v<decltype(*object), StreamDecoder &>)
+ objectPaused = streamPaused(paused, renderer(object->trackType()));
+ else
+ objectPaused = std::accumulate(m_renderers.begin(), m_renderers.end(), paused,
+ streamPaused);
+ }
+
+ object->setPaused(objectPaused);
+ });
+}
+
+void PlaybackEngine::ObjectDeleter::operator()(PlaybackEngineObject *object) const
+{
+ Q_ASSERT(engine);
+ if (!std::exchange(engine->m_threadsDirty, true))
+ QMetaObject::invokeMethod(engine, &PlaybackEngine::deleteFreeThreads, Qt::QueuedConnection);
+
+ object->kill();
+}
+
+void PlaybackEngine::registerObject(PlaybackEngineObject &object)
+{
+ connect(&object, &PlaybackEngineObject::error, this, &PlaybackEngine::errorOccured);
+
+ auto threadName = objectThreadName(object);
+ auto &thread = m_threads[threadName];
+ if (!thread) {
+ thread = std::make_unique<QThread>();
+ thread->setObjectName(threadName);
+ thread->start();
+ }
+
+ Q_ASSERT(object.thread() != thread.get());
+ object.moveToThread(thread.get());
+}
+
+PlaybackEngine::RendererPtr
+PlaybackEngine::createRenderer(QPlatformMediaPlayer::TrackType trackType)
+{
+ switch (trackType) {
+ case QPlatformMediaPlayer::VideoStream:
+ return m_videoSink
+ ? createPlaybackEngineObject<VideoRenderer>(m_timeController, m_videoSink, m_media.rotation())
+ : RendererPtr{ {}, {} };
+ case QPlatformMediaPlayer::AudioStream:
+ return m_audioOutput || m_audioBufferOutput
+ ? createPlaybackEngineObject<AudioRenderer>(m_timeController, m_audioOutput, m_audioBufferOutput)
+ : RendererPtr{ {}, {} };
+ case QPlatformMediaPlayer::SubtitleStream:
+ return m_videoSink
+ ? createPlaybackEngineObject<SubtitleRenderer>(m_timeController, m_videoSink)
+ : RendererPtr{ {}, {} };
+ default:
+ return { {}, {} };
+ }
+}
+
+template<typename C, typename Action>
+void PlaybackEngine::forEachExistingObject(Action &&action)
+{
+ auto handleNotNullObject = [&](auto &object) {
+ if constexpr (std::is_base_of_v<C, std::remove_reference_t<decltype(*object)>>)
+ if (object)
+ action(object);
+ };
+
+ handleNotNullObject(m_demuxer);
+ std::for_each(m_streams.begin(), m_streams.end(), handleNotNullObject);
+ std::for_each(m_renderers.begin(), m_renderers.end(), handleNotNullObject);
+}
+
+template<typename Action>
+void PlaybackEngine::forEachExistingObject(Action &&action)
+{
+ forEachExistingObject<PlaybackEngineObject>(std::forward<Action>(action));
+}
+
+void PlaybackEngine::seek(qint64 pos)
+{
+ pos = boundPosition(pos);
+
+ m_timeController.setPaused(true);
+ m_timeController.sync(m_currentLoopOffset.pos + pos);
+
+ forceUpdate();
+}
+
+void PlaybackEngine::setLoops(int loops)
+{
+ if (!isSeekable()) {
+ qWarning() << "Cannot set loops for non-seekable source";
+ return;
+ }
+
+ if (std::exchange(m_loops, loops) == loops)
+ return;
+
+ qCDebug(qLcPlaybackEngine) << "set playback engine loops:" << loops << "prev loops:" << m_loops
+ << "index:" << m_currentLoopOffset.index;
+
+ if (m_demuxer)
+ m_demuxer->setLoops(loops);
+}
+
+void PlaybackEngine::triggerStepIfNeeded()
+{
+ if (m_state != QMediaPlayer::PausedState)
+ return;
+
+ if (m_renderers[QPlatformMediaPlayer::VideoStream])
+ m_renderers[QPlatformMediaPlayer::VideoStream]->doForceStep();
+
+ // TODO: maybe trigger SubtitleStream.
+ // If trigger it, we have to make seeking for the current subtitle frame more stable.
+ // Or set some timeout for seeking.
+}
+
+QString PlaybackEngine::objectThreadName(const PlaybackEngineObject &object)
+{
+ QString result = object.metaObject()->className();
+ if (auto stream = qobject_cast<const StreamDecoder *>(&object))
+ result += QString::number(stream->trackType());
+
+ return result;
+}
+
+void PlaybackEngine::setPlaybackRate(float rate) {
+ if (rate == playbackRate())
+ return;
+
+ m_timeController.setPlaybackRate(rate);
+ forEachExistingObject<Renderer>([rate](auto &renderer) { renderer->setPlaybackRate(rate); });
+}
+
+float PlaybackEngine::playbackRate() const {
+ return m_timeController.playbackRate();
+}
+
+void PlaybackEngine::recreateObjects()
+{
+ m_timeController.setPaused(true);
+
+ forEachExistingObject([](auto &object) { object.reset(); });
+
+ createObjectsIfNeeded();
+}
+
+void PlaybackEngine::createObjectsIfNeeded()
+{
+ if (m_state == QMediaPlayer::StoppedState || !m_media.avContext())
+ return;
+
+ for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i)
+ createStreamAndRenderer(static_cast<QPlatformMediaPlayer::TrackType>(i));
+
+ createDemuxer();
+}
+
+void PlaybackEngine::forceUpdate()
+{
+ recreateObjects();
+ triggerStepIfNeeded();
+ updateObjectsPausedState();
+}
+
+void PlaybackEngine::createStreamAndRenderer(QPlatformMediaPlayer::TrackType trackType)
+{
+ auto codec = codecForTrack(trackType);
+
+ auto &renderer = m_renderers[trackType];
+
+ if (!codec)
+ return;
+
+ if (!renderer) {
+ renderer = createRenderer(trackType);
+
+ if (!renderer)
+ return;
+
+ connect(renderer.get(), &Renderer::synchronized, this,
+ &PlaybackEngine::onRendererSynchronized);
+
+ connect(renderer.get(), &Renderer::loopChanged, this,
+ &PlaybackEngine::onRendererLoopChanged);
+
+ if constexpr (shouldPauseStreams)
+ connect(renderer.get(), &Renderer::forceStepDone, this,
+ &PlaybackEngine::updateObjectsPausedState);
+
+ connect(renderer.get(), &PlaybackEngineObject::atEnd, this,
+ &PlaybackEngine::onRendererFinished);
+ }
+
+ auto &stream = m_streams[trackType] =
+ createPlaybackEngineObject<StreamDecoder>(*codec, renderer->seekPosition());
+
+ Q_ASSERT(trackType == stream->trackType());
+
+ connect(stream.get(), &StreamDecoder::requestHandleFrame, renderer.get(), &Renderer::render);
+ connect(stream.get(), &PlaybackEngineObject::atEnd, renderer.get(),
+ &Renderer::onFinalFrameReceived);
+ connect(renderer.get(), &Renderer::frameProcessed, stream.get(),
+ &StreamDecoder::onFrameProcessed);
+}
+
+std::optional<Codec> PlaybackEngine::codecForTrack(QPlatformMediaPlayer::TrackType trackType)
+{
+ const auto streamIndex = m_media.currentStreamIndex(trackType);
+ if (streamIndex < 0)
+ return {};
+
+ auto &result = m_codecs[trackType];
+
+ if (!result) {
+ qCDebug(qLcPlaybackEngine)
+ << "Create codec for stream:" << streamIndex << "trackType:" << trackType;
+ auto maybeCodec =
+ Codec::create(m_media.avContext()->streams[streamIndex], m_media.avContext());
+
+ if (!maybeCodec) {
+ emit errorOccured(QMediaPlayer::FormatError,
+ "Cannot create codec," + maybeCodec.error());
+ return {};
+ }
+
+ result = maybeCodec.value();
+ }
+
+ return result;
+}
+
+bool PlaybackEngine::hasMediaStream() const
+{
+ return m_renderers[QPlatformMediaPlayer::AudioStream]
+ || m_renderers[QPlatformMediaPlayer::VideoStream];
+}
+
+void PlaybackEngine::createDemuxer()
+{
+ std::array<int, QPlatformMediaPlayer::NTrackTypes> streamIndexes = { -1, -1, -1 };
+
+ bool hasStreams = false;
+ forEachExistingObject<StreamDecoder>([&](auto &stream) {
+ hasStreams = true;
+ const auto trackType = stream->trackType();
+ streamIndexes[trackType] = m_media.currentStreamIndex(trackType);
+ });
+
+ if (!hasStreams)
+ return;
+
+ const PositionWithOffset positionWithOffset{ currentPosition(false), m_currentLoopOffset };
+
+ m_demuxer = createPlaybackEngineObject<Demuxer>(m_media.avContext(), positionWithOffset,
+ streamIndexes, m_loops);
+
+ connect(m_demuxer.get(), &Demuxer::packetsBuffered, this, &PlaybackEngine::buffered);
+
+ forEachExistingObject<StreamDecoder>([&](auto &stream) {
+ connect(m_demuxer.get(), Demuxer::signalByTrackType(stream->trackType()), stream.get(),
+ &StreamDecoder::decode);
+ connect(m_demuxer.get(), &PlaybackEngineObject::atEnd, stream.get(),
+ &StreamDecoder::onFinalPacketReceived);
+ connect(stream.get(), &StreamDecoder::packetProcessed, m_demuxer.get(),
+ &Demuxer::onPacketProcessed);
+ });
+
+ if (!isSeekable() || duration() <= 0) {
+ // We need initial synchronization for such streams
+ forEachExistingObject([&](auto &object) {
+ using Type = std::remove_reference_t<decltype(*object)>;
+ if constexpr (!std::is_same_v<Type, Demuxer>)
+ connect(m_demuxer.get(), &Demuxer::firstPacketFound, object.get(),
+ &Type::setInitialPosition);
+ });
+
+ auto updateTimeController = [this](TimeController::TimePoint tp, qint64 pos) {
+ m_timeController.sync(tp, pos);
+ };
+
+ connect(m_demuxer.get(), &Demuxer::firstPacketFound, this, updateTimeController);
+ }
+}
+
+void PlaybackEngine::deleteFreeThreads() {
+ m_threadsDirty = false;
+ auto freeThreads = std::move(m_threads);
+
+ forEachExistingObject([&](auto &object) {
+ m_threads.insert(freeThreads.extract(objectThreadName(*object)));
+ });
+
+ for (auto &[name, thr] : freeThreads)
+ thr->quit();
+
+ for (auto &[name, thr] : freeThreads)
+ thr->wait();
+}
+
+void PlaybackEngine::setMedia(MediaDataHolder media)
+{
+ Q_ASSERT(!m_media.avContext()); // Playback engine does not support reloading media
+ Q_ASSERT(m_state == QMediaPlayer::StoppedState);
+ Q_ASSERT(m_threads.empty());
+
+ m_media = std::move(media);
+ updateVideoSinkSize();
+}
+
+void PlaybackEngine::setVideoSink(QVideoSink *sink)
+{
+ auto prev = std::exchange(m_videoSink, sink);
+ if (prev == sink)
+ return;
+
+ updateVideoSinkSize(prev);
+ updateActiveVideoOutput(sink);
+
+ if (!sink || !prev) {
+ // might need some improvements
+ forceUpdate();
+ }
+}
+
+void PlaybackEngine::setAudioSink(QPlatformAudioOutput *output) {
+ setAudioSink(output ? output->q : nullptr);
+}
+
+void PlaybackEngine::setAudioSink(QAudioOutput *output)
+{
+ QAudioOutput *prev = std::exchange(m_audioOutput, output);
+ if (prev == output)
+ return;
+
+ updateActiveAudioOutput(output);
+
+ if (!output || !prev) {
+ // might need some improvements
+ forceUpdate();
+ }
+}
+
+void PlaybackEngine::setAudioBufferOutput(QAudioBufferOutput *output)
+{
+ QAudioBufferOutput *prev = std::exchange(m_audioBufferOutput, output);
+ if (prev == output)
+ return;
+ updateActiveAudioOutput(output);
+}
+
+qint64 PlaybackEngine::currentPosition(bool topPos) const {
+ std::optional<qint64> pos;
+
+ for (size_t i = 0; i < m_renderers.size(); ++i) {
+ const auto &renderer = m_renderers[i];
+ if (!renderer)
+ continue;
+
+ // skip subtitle stream for finding lower rendering position
+ if (!topPos && i == QPlatformMediaPlayer::SubtitleStream && hasMediaStream())
+ continue;
+
+ const auto rendererPos = renderer->lastPosition();
+ pos = !pos ? rendererPos
+ : topPos ? std::max(*pos, rendererPos)
+ : std::min(*pos, rendererPos);
+ }
+
+ if (!pos)
+ pos = m_timeController.currentPosition();
+
+ return boundPosition(*pos - m_currentLoopOffset.pos);
+}
+
+qint64 PlaybackEngine::duration() const
+{
+ return m_media.duration();
+}
+
+bool PlaybackEngine::isSeekable() const { return m_media.isSeekable(); }
+
+const QList<MediaDataHolder::StreamInfo> &
+PlaybackEngine::streamInfo(QPlatformMediaPlayer::TrackType trackType) const
+{
+ return m_media.streamInfo(trackType);
+}
+
+const QMediaMetaData &PlaybackEngine::metaData() const
+{
+ return m_media.metaData();
+}
+
+int PlaybackEngine::activeTrack(QPlatformMediaPlayer::TrackType type) const
+{
+ return m_media.activeTrack(type);
+}
+
+void PlaybackEngine::setActiveTrack(QPlatformMediaPlayer::TrackType trackType, int streamNumber)
+{
+ if (!m_media.setActiveTrack(trackType, streamNumber))
+ return;
+
+ m_codecs[trackType] = {};
+
+ m_renderers[trackType].reset();
+ m_streams = defaultObjectsArray<decltype(m_streams)>();
+ m_demuxer.reset();
+
+ updateVideoSinkSize();
+ createObjectsIfNeeded();
+ updateObjectsPausedState();
+}
+
+void PlaybackEngine::finilizeTime(qint64 pos)
+{
+ Q_ASSERT(pos >= 0 && pos <= duration());
+
+ m_timeController.setPaused(true);
+ m_timeController.sync(pos);
+ m_currentLoopOffset = {};
+}
+
+void PlaybackEngine::finalizeOutputs()
+{
+ if (m_audioBufferOutput)
+ updateActiveAudioOutput(static_cast<QAudioBufferOutput *>(nullptr));
+ if (m_audioOutput)
+ updateActiveAudioOutput(static_cast<QAudioOutput *>(nullptr));
+ updateActiveVideoOutput(nullptr, true);
+}
+
+bool PlaybackEngine::hasRenderer(quint64 id) const
+{
+ return std::any_of(m_renderers.begin(), m_renderers.end(),
+ [id](auto &renderer) { return renderer && renderer->id() == id; });
+}
+
+template <typename AudioOutput>
+void PlaybackEngine::updateActiveAudioOutput(AudioOutput *output)
+{
+ if (auto renderer =
+ qobject_cast<AudioRenderer *>(m_renderers[QPlatformMediaPlayer::AudioStream].get()))
+ renderer->setOutput(output);
+}
+
+void PlaybackEngine::updateActiveVideoOutput(QVideoSink *sink, bool cleanOutput)
+{
+ if (auto renderer = qobject_cast<SubtitleRenderer *>(
+ m_renderers[QPlatformMediaPlayer::SubtitleStream].get()))
+ renderer->setOutput(sink, cleanOutput);
+ if (auto renderer =
+ qobject_cast<VideoRenderer *>(m_renderers[QPlatformMediaPlayer::VideoStream].get()))
+ renderer->setOutput(sink, cleanOutput);
+}
+
+void PlaybackEngine::updateVideoSinkSize(QVideoSink *prevSink)
+{
+ auto platformVideoSink = m_videoSink ? m_videoSink->platformVideoSink() : nullptr;
+ if (!platformVideoSink)
+ return;
+
+ if (prevSink && prevSink->platformVideoSink())
+ platformVideoSink->setNativeSize(prevSink->platformVideoSink()->nativeSize());
+ else {
+ const auto streamIndex = m_media.currentStreamIndex(QPlatformMediaPlayer::VideoStream);
+ if (streamIndex >= 0) {
+ const auto context = m_media.avContext();
+ const auto stream = context->streams[streamIndex];
+ const AVRational pixelAspectRatio =
+ av_guess_sample_aspect_ratio(context, stream, nullptr);
+ // auto size = metaData().value(QMediaMetaData::Resolution)
+ const QSize size =
+ qCalculateFrameSize({ stream->codecpar->width, stream->codecpar->height },
+ { pixelAspectRatio.num, pixelAspectRatio.den });
+
+ platformVideoSink->setNativeSize(qRotatedFrameSize(size, m_media.rotation()));
+ }
+ }
+}
+
+qint64 PlaybackEngine::boundPosition(qint64 position) const
+{
+ position = qMax(position, 0);
+ return duration() > 0 ? qMin(position, duration()) : position;
+}
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegplaybackengine_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine_p.h b/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine_p.h
new file mode 100644
index 000000000..50c94c955
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine_p.h
@@ -0,0 +1,234 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGPLAYBACKENGINE_P_H
+#define QFFMPEGPLAYBACKENGINE_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+/* Playback engine design description.
+ *
+ *
+ * PLAYBACK ENGINE OBJECTS
+ *
+ * - Playback engine manages 7 objects inside, each one works in a separate thread.
+ * Each object inherits PlaybackEngineObject. The objects are:
+ * Demuxer
+ * Stream Decoders: audio, video, subtitles
+ * Renderers: audio, video, subtitles
+ *
+ *
+ * THREADS:
+ *
+ * - By default, each object works in a separate thread. It's easy to reconfigure
+ * to using several objects in thread.
+ * - New thread is allocated if a new object is created and the engine doesn't
+ * have free threads. If it does, the thread is to be reused.
+ * - If all objects for some thread are deleted, the thread becomes free and the engine
+ * postpones its termination.
+ *
+ * OBJECTS WEAK CONNECTIVITY
+ *
+ * - The objects know nothing about others and about PlaybackEngine.
+ * For any interactions the objects use slots/signals.
+ *
+ * - PlaybackEngine knows the objects object and is able to create/delete them and
+ * call their public methods.
+ *
+ */
+
+#include "playbackengine/qffmpegplaybackenginedefs_p.h"
+#include "playbackengine/qffmpegtimecontroller_p.h"
+#include "playbackengine/qffmpegmediadataholder_p.h"
+#include "playbackengine/qffmpegcodec_p.h"
+#include "playbackengine/qffmpegpositionwithoffset_p.h"
+
+#include <QtCore/qpointer.h>
+
+#include <unordered_map>
+
+QT_BEGIN_NAMESPACE
+
+class QAudioSink;
+class QVideoSink;
+class QAudioOutput;
+class QAudioBufferOutput;
+class QFFmpegMediaPlayer;
+
+namespace QFFmpeg
+{
+
+class PlaybackEngine : public QObject
+{
+ Q_OBJECT
+public:
+ PlaybackEngine();
+
+ ~PlaybackEngine() override;
+
+ void setMedia(MediaDataHolder media);
+
+ void setVideoSink(QVideoSink *sink);
+
+ void setAudioSink(QAudioOutput *output);
+
+ void setAudioSink(QPlatformAudioOutput *output);
+
+ void setAudioBufferOutput(QAudioBufferOutput *output);
+
+ void setState(QMediaPlayer::PlaybackState state);
+
+ void play() {
+ setState(QMediaPlayer::PlayingState);
+ }
+ void pause() {
+ setState(QMediaPlayer::PausedState);
+ }
+ void stop() {
+ setState(QMediaPlayer::StoppedState);
+ }
+
+ void seek(qint64 pos);
+
+ void setLoops(int loopsCount);
+
+ void setPlaybackRate(float rate);
+
+ float playbackRate() const;
+
+ void setActiveTrack(QPlatformMediaPlayer::TrackType type, int streamNumber);
+
+ qint64 currentPosition(bool topPos = true) const;
+
+ qint64 duration() const;
+
+ bool isSeekable() const;
+
+ const QList<MediaDataHolder::StreamInfo> &
+ streamInfo(QPlatformMediaPlayer::TrackType trackType) const;
+
+ const QMediaMetaData &metaData() const;
+
+ int activeTrack(QPlatformMediaPlayer::TrackType type) const;
+
+signals:
+ void endOfStream();
+ void errorOccured(int, const QString &);
+ void loopChanged();
+ void buffered();
+
+protected: // objects managing
+ struct ObjectDeleter
+ {
+ void operator()(PlaybackEngineObject *) const;
+
+ PlaybackEngine *engine = nullptr;
+ };
+
+ template<typename T>
+ using ObjectPtr = std::unique_ptr<T, ObjectDeleter>;
+
+ using RendererPtr = ObjectPtr<Renderer>;
+ using StreamPtr = ObjectPtr<StreamDecoder>;
+
+ template<typename T, typename... Args>
+ ObjectPtr<T> createPlaybackEngineObject(Args &&...args);
+
+ virtual RendererPtr createRenderer(QPlatformMediaPlayer::TrackType trackType);
+
+ template <typename AudioOutput>
+ void updateActiveAudioOutput(AudioOutput *output);
+
+ void updateActiveVideoOutput(QVideoSink *sink, bool cleanOutput = false);
+
+private:
+ void createStreamAndRenderer(QPlatformMediaPlayer::TrackType trackType);
+
+ void createDemuxer();
+
+ void registerObject(PlaybackEngineObject &object);
+
+ template<typename C, typename Action>
+ void forEachExistingObject(Action &&action);
+
+ template<typename Action>
+ void forEachExistingObject(Action &&action);
+
+ void forceUpdate();
+
+ void recreateObjects();
+
+ void createObjectsIfNeeded();
+
+ void updateObjectsPausedState();
+
+ void deleteFreeThreads();
+
+ void onRendererSynchronized(quint64 id, std::chrono::steady_clock::time_point time,
+ qint64 trackTime);
+
+ void onRendererFinished();
+
+ void onRendererLoopChanged(quint64 id, qint64 offset, int loopIndex);
+
+ void triggerStepIfNeeded();
+
+ static QString objectThreadName(const PlaybackEngineObject &object);
+
+ std::optional<Codec> codecForTrack(QPlatformMediaPlayer::TrackType trackType);
+
+ bool hasMediaStream() const;
+
+ void finilizeTime(qint64 pos);
+
+ void finalizeOutputs();
+
+ bool hasRenderer(quint64 id) const;
+
+ void updateVideoSinkSize(QVideoSink *prevSink = nullptr);
+
+ qint64 boundPosition(qint64 position) const;
+
+private:
+ MediaDataHolder m_media;
+
+ TimeController m_timeController;
+
+ std::unordered_map<QString, std::unique_ptr<QThread>> m_threads;
+ bool m_threadsDirty = false;
+
+ QPointer<QVideoSink> m_videoSink;
+ QPointer<QAudioOutput> m_audioOutput;
+ QPointer<QAudioBufferOutput> m_audioBufferOutput;
+
+ QMediaPlayer::PlaybackState m_state = QMediaPlayer::StoppedState;
+
+ ObjectPtr<Demuxer> m_demuxer;
+ std::array<StreamPtr, QPlatformMediaPlayer::NTrackTypes> m_streams;
+ std::array<RendererPtr, QPlatformMediaPlayer::NTrackTypes> m_renderers;
+
+ std::array<std::optional<Codec>, QPlatformMediaPlayer::NTrackTypes> m_codecs;
+ int m_loops = QMediaPlayer::Once;
+ LoopOffset m_currentLoopOffset;
+};
+
+template<typename T, typename... Args>
+PlaybackEngine::ObjectPtr<T> PlaybackEngine::createPlaybackEngineObject(Args &&...args)
+{
+ auto result = ObjectPtr<T>(new T(std::forward<Args>(args)...), { this });
+ registerObject(*result);
+ return result;
+}
+}
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGPLAYBACKENGINE_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp b/src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp
new file mode 100644
index 000000000..141a6ade2
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegresampler.cpp
@@ -0,0 +1,112 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qffmpegresampler_p.h"
+#include "playbackengine/qffmpegcodec_p.h"
+#include "qffmpegmediaformatinfo_p.h"
+#include <qloggingcategory.h>
+
+static Q_LOGGING_CATEGORY(qLcResampler, "qt.multimedia.ffmpeg.resampler")
+
+QT_BEGIN_NAMESPACE
+
+using namespace QFFmpeg;
+
+QFFmpegResampler::QFFmpegResampler(const QAudioFormat &inputFormat, const QAudioFormat &outputFormat) :
+ m_inputFormat(inputFormat), m_outputFormat(outputFormat)
+{
+ Q_ASSERT(inputFormat.isValid());
+ Q_ASSERT(outputFormat.isValid());
+
+ m_resampler =
+ createResampleContext(AVAudioFormat(m_inputFormat), AVAudioFormat(m_outputFormat));
+}
+
+QFFmpegResampler::QFFmpegResampler(const Codec *codec, const QAudioFormat &outputFormat,
+ qint64 startTime)
+ : m_outputFormat(outputFormat), m_startTime(startTime)
+{
+ Q_ASSERT(codec);
+
+ qCDebug(qLcResampler) << "createResampler";
+ const AVStream *audioStream = codec->stream();
+
+ if (!m_outputFormat.isValid())
+ // want the native format
+ m_outputFormat = QFFmpegMediaFormatInfo::audioFormatFromCodecParameters(audioStream->codecpar);
+
+ m_resampler = createResampleContext(AVAudioFormat(audioStream->codecpar),
+ AVAudioFormat(m_outputFormat));
+}
+
+QFFmpegResampler::~QFFmpegResampler() = default;
+
+QAudioBuffer QFFmpegResampler::resample(const char* data, size_t size)
+{
+ if (!m_inputFormat.isValid())
+ return {};
+
+ return resample(reinterpret_cast<const uint8_t **>(&data),
+ m_inputFormat.framesForBytes(static_cast<qint32>(size)));
+}
+
+QAudioBuffer QFFmpegResampler::resample(const AVFrame *frame)
+{
+ return resample(const_cast<const uint8_t **>(frame->extended_data), frame->nb_samples);
+}
+
+QAudioBuffer QFFmpegResampler::resample(const uint8_t **inputData, int inputSamplesCount)
+{
+ const int maxOutSamples = adjustMaxOutSamples(inputSamplesCount);
+
+ QByteArray samples(m_outputFormat.bytesForFrames(maxOutSamples), Qt::Uninitialized);
+ auto *out = reinterpret_cast<uint8_t *>(samples.data());
+ const int outSamples =
+ swr_convert(m_resampler.get(), &out, maxOutSamples, inputData, inputSamplesCount);
+
+ samples.resize(m_outputFormat.bytesForFrames(outSamples));
+
+ const qint64 startTime = m_outputFormat.durationForFrames(m_samplesProcessed) + m_startTime;
+ m_samplesProcessed += outSamples;
+
+ qCDebug(qLcResampler) << " new frame" << startTime << "in_samples" << inputSamplesCount
+ << outSamples << maxOutSamples;
+ return QAudioBuffer(samples, m_outputFormat, startTime);
+}
+
+int QFFmpegResampler::adjustMaxOutSamples(int inputSamplesCount)
+{
+ int maxOutSamples = swr_get_out_samples(m_resampler.get(), inputSamplesCount);
+
+ const auto remainingCompensationDistance = m_endCompensationSample - m_samplesProcessed;
+
+ if (remainingCompensationDistance > 0 && maxOutSamples > remainingCompensationDistance) {
+ // If the remaining compensation distance less than output frame,
+ // the ffmpeg resampler bufferises the rest of frames that makes
+ // unexpected delays on large frames.
+ // The hack might cause some compensation bias on large frames,
+ // however it's not significant for our logic, in fact.
+ // TODO: probably, it will need some improvements
+ setSampleCompensation(0, 0);
+ maxOutSamples = swr_get_out_samples(m_resampler.get(), inputSamplesCount);
+ }
+
+ return maxOutSamples;
+}
+
+void QFFmpegResampler::setSampleCompensation(qint32 delta, quint32 distance)
+{
+ const int res = swr_set_compensation(m_resampler.get(), delta, static_cast<int>(distance));
+ if (res < 0)
+ qCWarning(qLcResampler) << "swr_set_compensation fail:" << res;
+ else {
+ m_sampleCompensationDelta = delta;
+ m_endCompensationSample = m_samplesProcessed + distance;
+ }
+}
+
+qint32 QFFmpegResampler::activeSampleCompensationDelta() const
+{
+ return m_samplesProcessed < m_endCompensationSample ? m_sampleCompensationDelta : 0;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h b/src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h
new file mode 100644
index 000000000..530f40aa2
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegresampler_p.h
@@ -0,0 +1,62 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGRESAMPLER_P_H
+#define QFFMPEGRESAMPLER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qaudiobuffer.h"
+#include "qffmpeg_p.h"
+#include "private/qplatformaudioresampler_p.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg
+{
+class Codec;
+}
+
+class QFFmpegResampler : public QPlatformAudioResampler
+{
+public:
+ QFFmpegResampler(const QAudioFormat &inputFormat, const QAudioFormat &outputFormat);
+ QFFmpegResampler(const QFFmpeg::Codec *codec, const QAudioFormat &outputFormat,
+ qint64 startTime = 0);
+
+ ~QFFmpegResampler() override;
+
+ QAudioBuffer resample(const char* data, size_t size) override;
+
+ QAudioBuffer resample(const AVFrame *frame);
+
+ qint64 samplesProcessed() const { return m_samplesProcessed; }
+ void setSampleCompensation(qint32 delta, quint32 distance);
+ qint32 activeSampleCompensationDelta() const;
+
+private:
+ int adjustMaxOutSamples(int inputSamplesCount);
+
+ QAudioBuffer resample(const uint8_t **inputData, int inputSamplesCount);
+
+private:
+ QAudioFormat m_inputFormat;
+ QAudioFormat m_outputFormat;
+ qint64 m_startTime = 0;
+ QFFmpeg::SwrContextUPtr m_resampler;
+ qint64 m_samplesProcessed = 0;
+ qint64 m_endCompensationSample = std::numeric_limits<qint64>::min();
+ qint32 m_sampleCompensationDelta = 0;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi.cpp b/src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi.cpp
new file mode 100644
index 000000000..feab39697
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi.cpp
@@ -0,0 +1,467 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegscreencapture_dxgi_p.h"
+#include "qffmpegsurfacecapturegrabber_p.h"
+#include "qabstractvideobuffer.h"
+#include <private/qmultimediautils_p.h>
+#include <private/qwindowsmultimediautils_p.h>
+#include <private/qvideoframe_p.h>
+#include <qtgui/qscreen_platform.h>
+#include "qvideoframe.h"
+
+#include <qloggingcategory.h>
+#include <qwaitcondition.h>
+#include <qmutex.h>
+
+#include "D3d11.h"
+#include "dxgi1_2.h"
+
+#include <system_error>
+#include <thread>
+#include <chrono>
+
+#include <mutex> // std::scoped_lock
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcScreenCaptureDxgi, "qt.multimedia.ffmpeg.screencapturedxgi")
+
+using namespace std::chrono;
+using namespace QWindowsMultimediaUtils;
+using namespace Qt::StringLiterals;
+
+namespace {
+
+// Convenience wrapper that combines an HRESULT
+// status code with an optional textual description.
+class ComStatus
+{
+public:
+ ComStatus() = default;
+ ComStatus(HRESULT hr) : m_hr{ hr } { }
+ ComStatus(HRESULT hr, QAnyStringView msg) : m_hr{ hr }, m_msg{ msg.toString() } { }
+
+ ComStatus(const ComStatus &) = default;
+ ComStatus(ComStatus &&) = default;
+ ComStatus &operator=(const ComStatus &) = default;
+ ComStatus &operator=(ComStatus &&) = default;
+
+ explicit operator bool() const { return m_hr == S_OK; }
+
+ HRESULT code() const { return m_hr; }
+ QString str() const
+ {
+ if (!m_msg)
+ return errorString(m_hr);
+ return *m_msg + " " + errorString(m_hr);
+ }
+
+private:
+ HRESULT m_hr = S_OK;
+ std::optional<QString> m_msg;
+};
+
+template <typename T>
+using ComProduct = QMaybe<ComPtr<T>, ComStatus>;
+
+}
+
+class QD3D11TextureVideoBuffer : public QAbstractVideoBuffer
+{
+public:
+ QD3D11TextureVideoBuffer(const ComPtr<ID3D11Device> &device, std::shared_ptr<QMutex> &mutex,
+ const ComPtr<ID3D11Texture2D> &texture, QSize size)
+ : m_device(device), m_texture(texture), m_ctxMutex(mutex), m_size(size)
+ {}
+
+ ~QD3D11TextureVideoBuffer()
+ {
+ QD3D11TextureVideoBuffer::unmap();
+ }
+
+ MapData map(QtVideo::MapMode mode) override
+ {
+ MapData mapData;
+ if (!m_ctx && mode == QtVideo::MapMode::ReadOnly) {
+ D3D11_TEXTURE2D_DESC texDesc = {};
+ m_texture->GetDesc(&texDesc);
+ texDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
+ texDesc.Usage = D3D11_USAGE_STAGING;
+ texDesc.MiscFlags = 0;
+ texDesc.BindFlags = 0;
+
+ HRESULT hr = m_device->CreateTexture2D(&texDesc, nullptr, m_cpuTexture.GetAddressOf());
+ if (FAILED(hr)) {
+ qCDebug(qLcScreenCaptureDxgi) << "Failed to create texture with CPU access"
+ << std::system_category().message(hr).c_str();
+ qCDebug(qLcScreenCaptureDxgi) << m_device->GetDeviceRemovedReason();
+ return {};
+ }
+
+ m_device->GetImmediateContext(m_ctx.GetAddressOf());
+ m_ctxMutex->lock();
+ m_ctx->CopyResource(m_cpuTexture.Get(), m_texture.Get());
+
+ D3D11_MAPPED_SUBRESOURCE resource = {};
+ hr = m_ctx->Map(m_cpuTexture.Get(), 0, D3D11_MAP_READ, 0, &resource);
+ m_ctxMutex->unlock();
+ if (FAILED(hr)) {
+ qCDebug(qLcScreenCaptureDxgi) << "Failed to map texture" << m_cpuTexture.Get()
+ << std::system_category().message(hr).c_str();
+ return {};
+ }
+
+ m_mapMode = mode;
+ mapData.planeCount = 1;
+ mapData.bytesPerLine[0] = int(resource.RowPitch);
+ mapData.data[0] = reinterpret_cast<uchar*>(resource.pData);
+ mapData.dataSize[0] = m_size.height() * int(resource.RowPitch);
+ }
+
+ return mapData;
+ }
+
+ void unmap() override
+ {
+ if (m_mapMode == QtVideo::MapMode::NotMapped)
+ return;
+ if (m_ctx) {
+ m_ctxMutex->lock();
+ m_ctx->Unmap(m_cpuTexture.Get(), 0);
+ m_ctxMutex->unlock();
+ m_ctx.Reset();
+ }
+ m_cpuTexture.Reset();
+ m_mapMode = QtVideo::MapMode::NotMapped;
+ }
+
+ QVideoFrameFormat format() const override { return {}; }
+
+ QSize getSize() const
+ {
+ if (!m_texture)
+ return {};
+
+ D3D11_TEXTURE2D_DESC desc{};
+ m_texture->GetDesc(&desc);
+
+ return { static_cast<int>(desc.Width), static_cast<int>(desc.Height) };
+ }
+
+private:
+ ComPtr<ID3D11Device> m_device;
+ ComPtr<ID3D11Texture2D> m_texture;
+ ComPtr<ID3D11Texture2D> m_cpuTexture;
+ ComPtr<ID3D11DeviceContext> m_ctx;
+ std::shared_ptr<QMutex> m_ctxMutex;
+ QSize m_size;
+ QtVideo::MapMode m_mapMode = QtVideo::MapMode::NotMapped;
+};
+
+namespace {
+class DxgiDuplication
+{
+ struct DxgiScreen
+ {
+ ComPtr<IDXGIAdapter1> adapter;
+ ComPtr<IDXGIOutput> output;
+ };
+
+public:
+ ~DxgiDuplication()
+ {
+ if (m_releaseFrame)
+ m_dup->ReleaseFrame();
+ }
+
+ ComStatus initialize(QScreen const *screen)
+ {
+ const QMaybe<DxgiScreen, ComStatus> dxgiScreen = findDxgiScreen(screen);
+ if (!dxgiScreen)
+ return dxgiScreen.error();
+
+ const ComPtr<IDXGIAdapter1> adapter = dxgiScreen->adapter;
+
+ ComPtr<ID3D11Device> d3d11dev;
+ HRESULT hr =
+ D3D11CreateDevice(adapter.Get(), D3D_DRIVER_TYPE_UNKNOWN, nullptr, 0, nullptr, 0,
+ D3D11_SDK_VERSION, d3d11dev.GetAddressOf(), nullptr, nullptr);
+ if (FAILED(hr))
+ return { hr, "Failed to create ID3D11Device device"_L1 };
+
+ ComPtr<IDXGIOutput1> output;
+ hr = dxgiScreen->output.As(&output);
+ if (FAILED(hr))
+ return { hr, "Failed to create IDXGIOutput1"_L1 };
+
+ ComPtr<IDXGIOutputDuplication> dup;
+ hr = output->DuplicateOutput(d3d11dev.Get(), dup.GetAddressOf());
+ if (FAILED(hr))
+ return { hr, "Failed to duplicate IDXGIOutput1"_L1 };
+
+ m_adapter = dxgiScreen->adapter;
+ m_output = output;
+ m_device = d3d11dev;
+ m_dup = dup;
+ return { S_OK };
+ }
+
+ bool valid() const { return m_dup != nullptr; }
+
+ QSize getFrameSize() const
+ {
+ DXGI_OUTDUPL_DESC outputDesc = {};
+ m_dup->GetDesc(&outputDesc);
+
+ return { static_cast<int>(outputDesc.ModeDesc.Width),
+ static_cast<int>(outputDesc.ModeDesc.Height) };
+ }
+
+ QMaybe<std::unique_ptr<QD3D11TextureVideoBuffer>, ComStatus> getNextVideoFrame()
+ {
+ const ComProduct<ID3D11Texture2D> texture = getNextFrame();
+
+ if (!texture)
+ return texture.error();
+
+ return std::make_unique<QD3D11TextureVideoBuffer>(m_device, m_ctxMutex, *texture,
+ getFrameSize());
+ }
+
+private:
+ ComProduct<ID3D11Texture2D> getNextFrame()
+ {
+ std::scoped_lock guard{ *m_ctxMutex };
+
+ if (m_releaseFrame) {
+ m_releaseFrame = false;
+
+ HRESULT hr = m_dup->ReleaseFrame();
+
+ if (hr != S_OK)
+ return ComStatus{ hr, "Failed to release duplication frame."_L1 };
+ }
+
+ ComPtr<IDXGIResource> frame;
+ DXGI_OUTDUPL_FRAME_INFO info;
+
+ HRESULT hr = m_dup->AcquireNextFrame(0, &info, frame.GetAddressOf());
+
+ if (hr != S_OK)
+ return { unexpect, hr, "Failed to grab the screen content"_L1 };
+
+ m_releaseFrame = true;
+
+ ComPtr<ID3D11Texture2D> tex;
+ hr = frame.As(&tex);
+ if (hr != S_OK)
+ return { unexpect, hr, "Failed to obtain D3D11 texture"_L1 };
+
+ D3D11_TEXTURE2D_DESC texDesc = {};
+ tex->GetDesc(&texDesc);
+ texDesc.MiscFlags = 0;
+ texDesc.BindFlags = 0;
+
+ ComPtr<ID3D11Texture2D> texCopy;
+ hr = m_device->CreateTexture2D(&texDesc, nullptr, texCopy.GetAddressOf());
+ if (hr != S_OK)
+ return { unexpect, hr, "Failed to create texture with CPU access"_L1 };
+
+ ComPtr<ID3D11DeviceContext> ctx;
+ m_device->GetImmediateContext(ctx.GetAddressOf());
+ ctx->CopyResource(texCopy.Get(), tex.Get());
+
+ return texCopy;
+ }
+
+ static QMaybe<DxgiScreen, ComStatus> findDxgiScreen(const QScreen *screen)
+ {
+ if (!screen)
+ return { unexpect, E_FAIL, "Cannot find nullptr screen"_L1 };
+
+ auto *winScreen = screen->nativeInterface<QNativeInterface::QWindowsScreen>();
+ HMONITOR handle = winScreen ? winScreen->handle() : nullptr;
+
+ ComPtr<IDXGIFactory1> factory;
+ HRESULT hr = CreateDXGIFactory1(IID_PPV_ARGS(&factory));
+ if (FAILED(hr))
+ return { unexpect, hr, "Failed to create IDXGIFactory"_L1 };
+
+ ComPtr<IDXGIAdapter1> adapter;
+ for (quint32 i = 0; factory->EnumAdapters1(i, adapter.ReleaseAndGetAddressOf()) == S_OK; i++) {
+ ComPtr<IDXGIOutput> output;
+ for (quint32 j = 0; adapter->EnumOutputs(j, output.ReleaseAndGetAddressOf()) == S_OK; ++j) {
+ DXGI_OUTPUT_DESC desc = {};
+ output->GetDesc(&desc);
+ qCDebug(qLcScreenCaptureDxgi) << i << j << QString::fromWCharArray(desc.DeviceName);
+ auto match = handle ? handle == desc.Monitor
+ : QString::fromWCharArray(desc.DeviceName) == screen->name();
+ if (match)
+ return DxgiScreen{ adapter, output };
+ }
+ }
+ return { unexpect, DXGI_ERROR_NOT_FOUND,
+ "Could not find screen adapter "_L1 + screen->name() };
+ }
+
+ ComPtr<IDXGIAdapter1> m_adapter;
+ ComPtr<IDXGIOutput> m_output;
+ ComPtr<ID3D11Device> m_device;
+ ComPtr<IDXGIOutputDuplication> m_dup;
+ bool m_releaseFrame = false;
+ std::shared_ptr<QMutex> m_ctxMutex = std::make_shared<QMutex>();
+};
+
+QSize getPhysicalSizePixels(const QScreen *screen)
+{
+ const auto *winScreen = screen->nativeInterface<QNativeInterface::QWindowsScreen>();
+ if (!winScreen)
+ return {};
+
+ const HMONITOR handle = winScreen->handle();
+ if (!handle)
+ return {};
+
+ MONITORINFO info{};
+ info.cbSize = sizeof(info);
+
+ if (!GetMonitorInfoW(handle, &info))
+ return {};
+
+ return { info.rcMonitor.right - info.rcMonitor.left,
+ info.rcMonitor.bottom - info.rcMonitor.top };
+}
+
+QVideoFrameFormat getFrameFormat(QScreen* screen)
+{
+ const QSize screenSize = getPhysicalSizePixels(screen);
+
+ QVideoFrameFormat format = { screenSize, QVideoFrameFormat::Format_BGRA8888 };
+ format.setStreamFrameRate(static_cast<int>(screen->refreshRate()));
+
+ return format;
+}
+
+} // namespace
+
+class QFFmpegScreenCaptureDxgi::Grabber : public QFFmpegSurfaceCaptureGrabber
+{
+public:
+ Grabber(QFFmpegScreenCaptureDxgi &screenCapture, QScreen *screen,
+ const QVideoFrameFormat &format)
+ : m_screen(screen)
+ , m_format(format)
+ {
+ setFrameRate(screen->refreshRate());
+ addFrameCallback(screenCapture, &QFFmpegScreenCaptureDxgi::newVideoFrame);
+ connect(this, &Grabber::errorUpdated, &screenCapture, &QFFmpegScreenCaptureDxgi::updateError);
+ }
+
+ ~Grabber() {
+ stop();
+ }
+
+ QVideoFrameFormat format() {
+ return m_format;
+ }
+
+ QVideoFrame grabFrame() override
+ {
+ QVideoFrame frame;
+ if (!m_duplication.valid()) {
+ const ComStatus status = m_duplication.initialize(m_screen);
+ if (!status) {
+ if (status.code() == E_ACCESSDENIED) {
+ // May occur for some time after pushing Ctrl+Alt+Del.
+ updateError(QPlatformSurfaceCapture::NoError, status.str());
+ qCWarning(qLcScreenCaptureDxgi) << status.str();
+ }
+ return frame;
+ }
+ }
+
+ auto maybeBuf = m_duplication.getNextVideoFrame();
+ const ComStatus &status = maybeBuf.error();
+
+ if (status.code() == DXGI_ERROR_WAIT_TIMEOUT) {
+ // All is good, we just didn't get a new frame yet
+ updateError(QPlatformSurfaceCapture::NoError, status.str());
+ } else if (status.code() == DXGI_ERROR_ACCESS_LOST) {
+ // Can happen for example when pushing Ctrl + Alt + Del
+ m_duplication = {};
+ updateError(QPlatformSurfaceCapture::NoError, status.str());
+ qCWarning(qLcScreenCaptureDxgi) << status.str();
+ } else if (!status) {
+ updateError(QPlatformSurfaceCapture::CaptureFailed, status.str());
+ qCWarning(qLcScreenCaptureDxgi) << status.str();
+ } else if (maybeBuf) {
+ std::unique_ptr<QD3D11TextureVideoBuffer> buffer = std::move(*maybeBuf);
+
+ const QSize bufSize = buffer->getSize();
+ if (bufSize != m_format.frameSize())
+ m_format.setFrameSize(bufSize);
+
+ frame = QVideoFramePrivate::createFrame(std::move(buffer), format());
+ }
+
+ return frame;
+ }
+
+ protected:
+ void initializeGrabbingContext() override
+ {
+ m_duplication = DxgiDuplication();
+ const ComStatus status = m_duplication.initialize(m_screen);
+ if (!status) {
+ updateError(CaptureFailed, status.str());
+ return;
+ }
+
+ QFFmpegSurfaceCaptureGrabber::initializeGrabbingContext();
+ }
+
+private:
+ const QScreen *m_screen = nullptr;
+ QVideoFrameFormat m_format;
+ DxgiDuplication m_duplication;
+};
+
+QFFmpegScreenCaptureDxgi::QFFmpegScreenCaptureDxgi() : QPlatformSurfaceCapture(ScreenSource{}) { }
+
+QFFmpegScreenCaptureDxgi::~QFFmpegScreenCaptureDxgi() = default;
+
+QVideoFrameFormat QFFmpegScreenCaptureDxgi::frameFormat() const
+{
+ if (m_grabber)
+ return m_grabber->format();
+ return {};
+}
+
+bool QFFmpegScreenCaptureDxgi::setActiveInternal(bool active)
+{
+ if (static_cast<bool>(m_grabber) == active)
+ return true;
+
+ if (m_grabber) {
+ m_grabber.reset();
+ } else {
+ auto screen = source<ScreenSource>();
+
+ if (!checkScreenWithError(screen))
+ return false;
+
+ const QVideoFrameFormat format = getFrameFormat(screen);
+ if (!format.isValid()) {
+ updateError(NotFound, QLatin1String("Unable to determine screen size or format"));
+ return false;
+ }
+
+ m_grabber.reset(new Grabber(*this, screen, format));
+ m_grabber->start();
+ }
+
+ return true;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi_p.h b/src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi_p.h
new file mode 100644
index 000000000..8f866b135
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegscreencapture_dxgi_p.h
@@ -0,0 +1,44 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGSCREENCAPTURE_WINDOWS_H
+#define QFFMPEGSCREENCAPTURE_WINDOWS_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qvideoframeformat.h"
+#include <private/qcomptr_p.h>
+#include <private/qplatformsurfacecapture_p.h>
+#include <memory>
+
+QT_BEGIN_NAMESPACE
+
+class QFFmpegScreenCaptureDxgi : public QPlatformSurfaceCapture
+{
+public:
+ explicit QFFmpegScreenCaptureDxgi();
+
+ ~QFFmpegScreenCaptureDxgi() override;
+
+ QVideoFrameFormat frameFormat() const override;
+
+private:
+ bool setActiveInternal(bool active) override;
+
+private:
+ class Grabber;
+ std::unique_ptr<Grabber> m_grabber;
+};
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGSCREENCAPTURE_WINDOWS_H
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegsurfacecapturegrabber.cpp b/src/plugins/multimedia/ffmpeg/qffmpegsurfacecapturegrabber.cpp
new file mode 100644
index 000000000..f708f5021
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegsurfacecapturegrabber.cpp
@@ -0,0 +1,202 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegsurfacecapturegrabber_p.h"
+
+#include <qelapsedtimer.h>
+#include <qloggingcategory.h>
+#include <qthread.h>
+#include <qtimer.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcScreenCaptureGrabber, "qt.multimedia.ffmpeg.surfacecapturegrabber");
+
+namespace {
+
+class GrabbingProfiler
+{
+public:
+ auto measure()
+ {
+ m_elapsedTimer.start();
+ return qScopeGuard([&]() {
+ const auto nsecsElapsed = m_elapsedTimer.nsecsElapsed();
+ ++m_number;
+ m_wholeTime += nsecsElapsed;
+
+#ifdef DUMP_SCREEN_CAPTURE_PROFILING
+ qDebug() << "screen grabbing time:" << nsecsElapsed << "avg:" << avgTime()
+ << "number:" << m_number;
+#endif
+ });
+ }
+
+ qreal avgTime() const
+ {
+ return m_number ? m_wholeTime / (m_number * 1000000.) : 0.;
+ }
+
+ qint64 number() const
+ {
+ return m_number;
+ }
+
+private:
+ QElapsedTimer m_elapsedTimer;
+ qint64 m_wholeTime = 0;
+ qint64 m_number = 0;
+};
+
+} // namespace
+
+struct QFFmpegSurfaceCaptureGrabber::GrabbingContext
+{
+ GrabbingProfiler profiler;
+ QTimer timer;
+ QElapsedTimer elapsedTimer;
+ qint64 lastFrameTime = 0;
+};
+
+class QFFmpegSurfaceCaptureGrabber::GrabbingThread : public QThread
+{
+public:
+ GrabbingThread(QFFmpegSurfaceCaptureGrabber& grabber)
+ : m_grabber(grabber)
+ {}
+
+protected:
+ void run() override
+ {
+ m_grabber.initializeGrabbingContext();
+
+ if (!m_grabber.isGrabbingContextInitialized())
+ return;
+
+ exec();
+ m_grabber.finalizeGrabbingContext();
+ }
+
+private:
+ QFFmpegSurfaceCaptureGrabber& m_grabber;
+};
+
+QFFmpegSurfaceCaptureGrabber::QFFmpegSurfaceCaptureGrabber(ThreadPolicy threadPolicy)
+{
+ setFrameRate(DefaultScreenCaptureFrameRate);
+
+ if (threadPolicy == CreateGrabbingThread)
+ m_thread = std::make_unique<GrabbingThread>(*this);
+}
+
+void QFFmpegSurfaceCaptureGrabber::start()
+{
+ if (m_thread)
+ m_thread->start();
+ else if (!isGrabbingContextInitialized())
+ initializeGrabbingContext();
+}
+
+QFFmpegSurfaceCaptureGrabber::~QFFmpegSurfaceCaptureGrabber() = default;
+
+void QFFmpegSurfaceCaptureGrabber::setFrameRate(qreal rate)
+{
+ rate = qBound(MinScreenCaptureFrameRate, rate, MaxScreenCaptureFrameRate);
+ if (std::exchange(m_rate, rate) != rate) {
+ qCDebug(qLcScreenCaptureGrabber) << "Screen capture rate has been changed:" << m_rate;
+
+ updateTimerInterval();
+ }
+}
+
+qreal QFFmpegSurfaceCaptureGrabber::frameRate() const
+{
+ return m_rate;
+}
+
+void QFFmpegSurfaceCaptureGrabber::stop()
+{
+ if (m_thread)
+ {
+ m_thread->quit();
+ m_thread->wait();
+ }
+ else if (isGrabbingContextInitialized())
+ {
+ finalizeGrabbingContext();
+ }
+}
+
+void QFFmpegSurfaceCaptureGrabber::updateError(QPlatformSurfaceCapture::Error error,
+ const QString &description)
+{
+ const auto prevError = std::exchange(m_prevError, error);
+
+ if (error != QPlatformSurfaceCapture::NoError
+ || prevError != QPlatformSurfaceCapture::NoError) {
+ emit errorUpdated(error, description);
+ }
+
+ updateTimerInterval();
+}
+
+void QFFmpegSurfaceCaptureGrabber::updateTimerInterval()
+{
+ const qreal rate = m_prevError && *m_prevError != QPlatformSurfaceCapture::NoError
+ ? MinScreenCaptureFrameRate
+ : m_rate;
+ const int interval = static_cast<int>(1000 / rate);
+ if (m_context && m_context->timer.interval() != interval)
+ m_context->timer.setInterval(interval);
+}
+
+void QFFmpegSurfaceCaptureGrabber::initializeGrabbingContext()
+{
+ Q_ASSERT(!isGrabbingContextInitialized());
+ qCDebug(qLcScreenCaptureGrabber) << "screen capture started";
+
+ m_context = std::make_unique<GrabbingContext>();
+ m_context->timer.setTimerType(Qt::PreciseTimer);
+ updateTimerInterval();
+
+ m_context->elapsedTimer.start();
+
+ auto doGrab = [this]() {
+ auto measure = m_context->profiler.measure();
+
+ auto frame = grabFrame();
+
+ if (frame.isValid()) {
+ frame.setStartTime(m_context->lastFrameTime);
+ frame.setEndTime(m_context->elapsedTimer.nsecsElapsed() / 1000);
+ m_context->lastFrameTime = frame.endTime();
+
+ updateError(QPlatformSurfaceCapture::NoError);
+
+ emit frameGrabbed(frame);
+ }
+ };
+
+ doGrab();
+
+ m_context->timer.callOnTimeout(&m_context->timer, doGrab);
+ m_context->timer.start();
+}
+
+void QFFmpegSurfaceCaptureGrabber::finalizeGrabbingContext()
+{
+ Q_ASSERT(isGrabbingContextInitialized());
+ qCDebug(qLcScreenCaptureGrabber)
+ << "end screen capture thread; avg grabbing time:" << m_context->profiler.avgTime()
+ << "ms, grabbings number:" << m_context->profiler.number();
+ m_context.reset();
+}
+
+bool QFFmpegSurfaceCaptureGrabber::isGrabbingContextInitialized() const
+{
+ return m_context != nullptr;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegsurfacecapturegrabber_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegsurfacecapturegrabber_p.h b/src/plugins/multimedia/ffmpeg/qffmpegsurfacecapturegrabber_p.h
new file mode 100644
index 000000000..9a617bd7a
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegsurfacecapturegrabber_p.h
@@ -0,0 +1,92 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGSURFACECAPTUREGRABBER_P_H
+#define QFFMPEGSURFACECAPTUREGRABBER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qvideoframe.h"
+#include "private/qplatformsurfacecapture_p.h"
+
+#include <memory>
+#include <optional>
+
+QT_BEGIN_NAMESPACE
+
+class QThread;
+
+static constexpr qreal DefaultScreenCaptureFrameRate = 60.;
+
+// Mac screens often support 120 frames per sec; it looks, this is not
+// needed for the capturing now since it just affects CPI without valuable
+// advantages. In the future, the frame rate should be customized by
+// user's API.
+static constexpr qreal MaxScreenCaptureFrameRate = 60.;
+static constexpr qreal MinScreenCaptureFrameRate = 1.;
+
+class QFFmpegSurfaceCaptureGrabber : public QObject
+{
+ Q_OBJECT
+public:
+ enum ThreadPolicy {
+ UseCurrentThread,
+ CreateGrabbingThread,
+ };
+
+ QFFmpegSurfaceCaptureGrabber(ThreadPolicy threadPolicy = CreateGrabbingThread);
+
+ ~QFFmpegSurfaceCaptureGrabber() override;
+
+ void start();
+ void stop();
+
+ template<typename Object, typename Method>
+ void addFrameCallback(Object &object, Method method)
+ {
+ connect(this, &QFFmpegSurfaceCaptureGrabber::frameGrabbed,
+ &object, method, Qt::DirectConnection);
+ }
+
+signals:
+ void frameGrabbed(const QVideoFrame&);
+ void errorUpdated(QPlatformSurfaceCapture::Error error, const QString &description);
+
+protected:
+ void updateError(QPlatformSurfaceCapture::Error error, const QString &description = {});
+
+ virtual QVideoFrame grabFrame() = 0;
+
+ void setFrameRate(qreal rate);
+
+ qreal frameRate() const;
+
+ void updateTimerInterval();
+
+ virtual void initializeGrabbingContext();
+ virtual void finalizeGrabbingContext();
+
+ bool isGrabbingContextInitialized() const;
+
+private:
+ struct GrabbingContext;
+ class GrabbingThread;
+
+ std::unique_ptr<GrabbingContext> m_context;
+ qreal m_rate = 0;
+ std::optional<QPlatformSurfaceCapture::Error> m_prevError;
+ std::unique_ptr<QThread> m_thread;
+};
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGSURFACECAPTUREGRABBER_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegthread.cpp b/src/plugins/multimedia/ffmpeg/qffmpegthread.cpp
new file mode 100644
index 000000000..fb14ced54
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegthread.cpp
@@ -0,0 +1,53 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegthread_p.h"
+
+
+QT_BEGIN_NAMESPACE
+
+using namespace QFFmpeg;
+
+void ConsumerThread::stopAndDelete()
+{
+ {
+ QMutexLocker locker(&m_loopDataMutex);
+ m_exit = true;
+ }
+ dataReady();
+ wait();
+ delete this;
+}
+
+void ConsumerThread::dataReady()
+{
+ m_condition.wakeAll();
+}
+
+void ConsumerThread::run()
+{
+ init();
+
+ while (true) {
+
+ {
+ QMutexLocker locker(&m_loopDataMutex);
+ while (!hasData() && !m_exit)
+ m_condition.wait(&m_loopDataMutex);
+
+ if (m_exit)
+ break;
+ }
+
+ processOne();
+ }
+
+ cleanup();
+}
+
+QMutexLocker<QMutex> ConsumerThread::lockLoopData() const
+{
+ return QMutexLocker(&m_loopDataMutex);
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegthread_p.h b/src/plugins/multimedia/ffmpeg/qffmpegthread_p.h
new file mode 100644
index 000000000..a7c5b0927
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegthread_p.h
@@ -0,0 +1,96 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGTHREAD_P_H
+#define QFFMPEGTHREAD_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qtmultimediaglobal_p.h>
+
+#include <qmutex.h>
+#include <qwaitcondition.h>
+#include <qthread.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAudioSink;
+
+namespace QFFmpeg
+{
+
+/*!
+ FFmpeg thread that is used to implement a consumer pattern.
+
+ This thread processes work items until no more data is available.
+ When no more data is available, it sleeps until it is notified about
+ more available data.
+ */
+class ConsumerThread : public QThread
+{
+public:
+ /*!
+ Stops the thread and deletes this object
+ */
+ void stopAndDelete();
+
+protected:
+
+ /*!
+ Called on this thread when thread starts
+ */
+ virtual void init() = 0;
+
+ /*!
+ Called on this thread before thread exits
+ */
+ virtual void cleanup() = 0;
+
+ /*!
+ Process one work item. Called repeatedly until hasData() returns
+ false, in which case the thread sleeps until the next dataReady()
+ notification.
+
+ Note: processOne() should never block.
+ */
+ virtual void processOne() = 0;
+
+ /*!
+ Wake thread from sleep and process data until
+ hasData() returns false. The method is supposed to be invoked
+ right after the scope of QMutexLocker that lockLoopData returns.
+ */
+ void dataReady();
+
+ /*!
+ Must return true when data is available for processing
+ */
+ virtual bool hasData() const = 0;
+
+ /*!
+ Locks the loop data mutex. It must be used to protect loop data
+ like a queue of video frames.
+ */
+ QMutexLocker<QMutex> lockLoopData() const;
+
+private:
+ void run() final;
+
+ mutable QMutex m_loopDataMutex;
+ QWaitCondition m_condition;
+ bool m_exit = false;
+};
+
+}
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp b/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp
new file mode 100644
index 000000000..0edf355d8
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp
@@ -0,0 +1,363 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegvideobuffer_p.h"
+#include "private/qvideotexturehelper_p.h"
+#include "private/qmultimediautils_p.h"
+#include "qffmpeghwaccel_p.h"
+#include "qloggingcategory.h"
+
+extern "C" {
+#include <libavutil/pixdesc.h>
+#include <libavutil/hdr_dynamic_metadata.h>
+#include <libavutil/mastering_display_metadata.h>
+}
+
+QT_BEGIN_NAMESPACE
+
+static bool isFrameFlipped(const AVFrame& frame) {
+ for (int i = 0; i < AV_NUM_DATA_POINTERS && frame.data[i]; ++i) {
+ if (frame.linesize[i] < 0)
+ return true;
+ }
+
+ return false;
+}
+
+static Q_LOGGING_CATEGORY(qLcFFmpegVideoBuffer, "qt.multimedia.ffmpeg.videobuffer");
+
+QFFmpegVideoBuffer::QFFmpegVideoBuffer(AVFrameUPtr frame, AVRational pixelAspectRatio)
+ : QHwVideoBuffer(QVideoFrame::NoHandle),
+ m_frame(frame.get()),
+ m_size(qCalculateFrameSize({ frame->width, frame->height },
+ { pixelAspectRatio.num, pixelAspectRatio.den }))
+{
+ if (frame->hw_frames_ctx) {
+ m_hwFrame = std::move(frame);
+ m_pixelFormat = toQtPixelFormat(QFFmpeg::HWAccel::format(m_hwFrame.get()));
+ return;
+ }
+
+ m_swFrame = std::move(frame);
+ m_pixelFormat = toQtPixelFormat(AVPixelFormat(m_swFrame->format));
+
+ convertSWFrame();
+}
+
+QFFmpegVideoBuffer::~QFFmpegVideoBuffer() = default;
+
+void QFFmpegVideoBuffer::convertSWFrame()
+{
+ Q_ASSERT(m_swFrame);
+
+ const auto actualAVPixelFormat = AVPixelFormat(m_swFrame->format);
+ const auto targetAVPixelFormat = toAVPixelFormat(m_pixelFormat);
+
+ if (actualAVPixelFormat != targetAVPixelFormat || isFrameFlipped(*m_swFrame)
+ || m_size != QSize(m_swFrame->width, m_swFrame->height)) {
+ Q_ASSERT(toQtPixelFormat(targetAVPixelFormat) == m_pixelFormat);
+ // convert the format into something we can handle
+ SwsContext *c = sws_getContext(m_swFrame->width, m_swFrame->height, actualAVPixelFormat,
+ m_size.width(), m_size.height(), targetAVPixelFormat,
+ SWS_BICUBIC, nullptr, nullptr, nullptr);
+
+ auto newFrame = QFFmpeg::makeAVFrame();
+ newFrame->width = m_size.width();
+ newFrame->height = m_size.height();
+ newFrame->format = targetAVPixelFormat;
+ av_frame_get_buffer(newFrame.get(), 0);
+
+ sws_scale(c, m_swFrame->data, m_swFrame->linesize, 0, m_swFrame->height, newFrame->data, newFrame->linesize);
+ if (m_frame == m_swFrame.get())
+ m_frame = newFrame.get();
+ m_swFrame = std::move(newFrame);
+ sws_freeContext(c);
+ }
+}
+
+void QFFmpegVideoBuffer::setTextureConverter(const QFFmpeg::TextureConverter &converter)
+{
+ m_textureConverter = converter;
+ m_textureConverter.init(m_hwFrame.get());
+ m_type = converter.isNull() ? QVideoFrame::NoHandle : QVideoFrame::RhiTextureHandle;
+}
+
+QVideoFrameFormat::ColorSpace QFFmpegVideoBuffer::colorSpace() const
+{
+ switch (m_frame->colorspace) {
+ default:
+ case AVCOL_SPC_UNSPECIFIED:
+ case AVCOL_SPC_RESERVED:
+ case AVCOL_SPC_FCC:
+ case AVCOL_SPC_SMPTE240M:
+ case AVCOL_SPC_YCGCO:
+ case AVCOL_SPC_SMPTE2085:
+ case AVCOL_SPC_CHROMA_DERIVED_NCL:
+ case AVCOL_SPC_CHROMA_DERIVED_CL:
+ case AVCOL_SPC_ICTCP: // BT.2100 ICtCp
+ return QVideoFrameFormat::ColorSpace_Undefined;
+ case AVCOL_SPC_RGB:
+ return QVideoFrameFormat::ColorSpace_AdobeRgb;
+ case AVCOL_SPC_BT709:
+ return QVideoFrameFormat::ColorSpace_BT709;
+ case AVCOL_SPC_BT470BG: // BT601
+ case AVCOL_SPC_SMPTE170M: // Also BT601
+ return QVideoFrameFormat::ColorSpace_BT601;
+ case AVCOL_SPC_BT2020_NCL: // Non constant luminence
+ case AVCOL_SPC_BT2020_CL: // Constant luminence
+ return QVideoFrameFormat::ColorSpace_BT2020;
+ }
+}
+
+QVideoFrameFormat::ColorTransfer QFFmpegVideoBuffer::colorTransfer() const
+{
+ return QFFmpeg::fromAvColorTransfer(m_frame->color_trc);
+}
+
+QVideoFrameFormat::ColorRange QFFmpegVideoBuffer::colorRange() const
+{
+ switch (m_frame->color_range) {
+ case AVCOL_RANGE_MPEG:
+ return QVideoFrameFormat::ColorRange_Video;
+ case AVCOL_RANGE_JPEG:
+ return QVideoFrameFormat::ColorRange_Full;
+ default:
+ return QVideoFrameFormat::ColorRange_Unknown;
+ }
+}
+
+float QFFmpegVideoBuffer::maxNits()
+{
+ float maxNits = -1;
+ for (int i = 0; i < m_frame->nb_side_data; ++i) {
+ AVFrameSideData *sd = m_frame->side_data[i];
+ // TODO: Longer term we might want to also support HDR10+ dynamic metadata
+ if (sd->type == AV_FRAME_DATA_MASTERING_DISPLAY_METADATA) {
+ auto *data = reinterpret_cast<AVMasteringDisplayMetadata *>(sd->data);
+ auto maybeLum = QFFmpeg::mul(10'000., data->max_luminance);
+ if (maybeLum)
+ maxNits = float(maybeLum.value());
+ }
+ }
+ return maxNits;
+}
+
+QAbstractVideoBuffer::MapData QFFmpegVideoBuffer::map(QtVideo::MapMode mode)
+{
+ if (!m_swFrame) {
+ Q_ASSERT(m_hwFrame && m_hwFrame->hw_frames_ctx);
+ m_swFrame = QFFmpeg::makeAVFrame();
+ /* retrieve data from GPU to CPU */
+ int ret = av_hwframe_transfer_data(m_swFrame.get(), m_hwFrame.get(), 0);
+ if (ret < 0) {
+ qWarning() << "Error transferring the data to system memory:" << ret;
+ return {};
+ }
+ convertSWFrame();
+ }
+
+ m_mode = mode;
+
+ MapData mapData;
+ auto *desc = QVideoTextureHelper::textureDescription(pixelFormat());
+ mapData.planeCount = desc->nplanes;
+ for (int i = 0; i < mapData.planeCount; ++i) {
+ Q_ASSERT(m_swFrame->linesize[i] >= 0);
+
+ mapData.data[i] = m_swFrame->data[i];
+ mapData.bytesPerLine[i] = m_swFrame->linesize[i];
+ mapData.dataSize[i] = mapData.bytesPerLine[i]*desc->heightForPlane(m_swFrame->height, i);
+ }
+
+ if ((mode & QtVideo::MapMode::WriteOnly) != QtVideo::MapMode::NotMapped && m_hwFrame) {
+ m_type = QVideoFrame::NoHandle;
+ m_hwFrame.reset();
+ if (m_textures) {
+ qCDebug(qLcFFmpegVideoBuffer)
+ << "Mapping of FFmpeg video buffer with write mode when "
+ "textures have been created. Visual artifacts might "
+ "happen if the frame is still in the rendering pipeline";
+ m_textures.reset();
+ }
+ }
+
+ return mapData;
+}
+
+void QFFmpegVideoBuffer::unmap()
+{
+ // nothing to do here for SW buffers.
+ // Set NotMapped mode to ensure map/unmap/mapMode consisteny.
+ m_mode = QtVideo::MapMode::NotMapped;
+}
+
+std::unique_ptr<QVideoFrameTextures> QFFmpegVideoBuffer::mapTextures(QRhi *)
+{
+ if (m_textures)
+ return {};
+ if (!m_hwFrame)
+ return {};
+ if (m_textureConverter.isNull()) {
+ m_textures = nullptr;
+ return {};
+ }
+
+ m_textures.reset(m_textureConverter.getTextures(m_hwFrame.get()));
+ if (!m_textures) {
+ static thread_local int lastFormat = 0;
+ if (std::exchange(lastFormat, m_hwFrame->format) != m_hwFrame->format) // prevent logging spam
+ qWarning() << " failed to get textures for frame; format:" << m_hwFrame->format;
+ }
+ return {};
+}
+
+quint64 QFFmpegVideoBuffer::textureHandle(QRhi *rhi, int plane) const
+{
+ return m_textures ? m_textures->textureHandle(rhi, plane) : 0;
+}
+
+QVideoFrameFormat::PixelFormat QFFmpegVideoBuffer::pixelFormat() const
+{
+ return m_pixelFormat;
+}
+
+QSize QFFmpegVideoBuffer::size() const
+{
+ return m_size;
+}
+
+QVideoFrameFormat::PixelFormat QFFmpegVideoBuffer::toQtPixelFormat(AVPixelFormat avPixelFormat, bool *needsConversion)
+{
+ if (needsConversion)
+ *needsConversion = false;
+
+ switch (avPixelFormat) {
+ default:
+ break;
+ case AV_PIX_FMT_NONE:
+ Q_ASSERT(!"Invalid avPixelFormat!");
+ return QVideoFrameFormat::Format_Invalid;
+ case AV_PIX_FMT_ARGB:
+ return QVideoFrameFormat::Format_ARGB8888;
+ case AV_PIX_FMT_0RGB:
+ return QVideoFrameFormat::Format_XRGB8888;
+ case AV_PIX_FMT_BGRA:
+ return QVideoFrameFormat::Format_BGRA8888;
+ case AV_PIX_FMT_BGR0:
+ return QVideoFrameFormat::Format_BGRX8888;
+ case AV_PIX_FMT_ABGR:
+ return QVideoFrameFormat::Format_ABGR8888;
+ case AV_PIX_FMT_0BGR:
+ return QVideoFrameFormat::Format_XBGR8888;
+ case AV_PIX_FMT_RGBA:
+ return QVideoFrameFormat::Format_RGBA8888;
+ case AV_PIX_FMT_RGB0:
+ return QVideoFrameFormat::Format_RGBX8888;
+
+ case AV_PIX_FMT_YUV422P:
+ return QVideoFrameFormat::Format_YUV422P;
+ case AV_PIX_FMT_YUV420P:
+ return QVideoFrameFormat::Format_YUV420P;
+ case AV_PIX_FMT_YUV420P10:
+ return QVideoFrameFormat::Format_YUV420P10;
+ case AV_PIX_FMT_UYVY422:
+ return QVideoFrameFormat::Format_UYVY;
+ case AV_PIX_FMT_YUYV422:
+ return QVideoFrameFormat::Format_YUYV;
+ case AV_PIX_FMT_NV12:
+ return QVideoFrameFormat::Format_NV12;
+ case AV_PIX_FMT_NV21:
+ return QVideoFrameFormat::Format_NV21;
+ case AV_PIX_FMT_GRAY8:
+ return QVideoFrameFormat::Format_Y8;
+ case AV_PIX_FMT_GRAY16:
+ return QVideoFrameFormat::Format_Y16;
+
+ case AV_PIX_FMT_P010:
+ return QVideoFrameFormat::Format_P010;
+ case AV_PIX_FMT_P016:
+ return QVideoFrameFormat::Format_P016;
+ case AV_PIX_FMT_MEDIACODEC:
+ return QVideoFrameFormat::Format_SamplerExternalOES;
+ }
+
+ if (needsConversion)
+ *needsConversion = true;
+
+ const AVPixFmtDescriptor *descriptor = av_pix_fmt_desc_get(avPixelFormat);
+
+ if (descriptor->flags & AV_PIX_FMT_FLAG_RGB)
+ return QVideoFrameFormat::Format_RGBA8888;
+
+ if (descriptor->comp[0].depth > 8)
+ return QVideoFrameFormat::Format_P016;
+ return QVideoFrameFormat::Format_YUV420P;
+}
+
+AVPixelFormat QFFmpegVideoBuffer::toAVPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat)
+{
+ switch (pixelFormat) {
+ default:
+ case QVideoFrameFormat::Format_Invalid:
+ case QVideoFrameFormat::Format_AYUV:
+ case QVideoFrameFormat::Format_AYUV_Premultiplied:
+ case QVideoFrameFormat::Format_YV12:
+ case QVideoFrameFormat::Format_IMC1:
+ case QVideoFrameFormat::Format_IMC2:
+ case QVideoFrameFormat::Format_IMC3:
+ case QVideoFrameFormat::Format_IMC4:
+ return AV_PIX_FMT_NONE;
+ case QVideoFrameFormat::Format_Jpeg:
+ // We're using the data from the converted QImage here, which is in BGRA.
+ return AV_PIX_FMT_BGRA;
+ case QVideoFrameFormat::Format_ARGB8888:
+ return AV_PIX_FMT_ARGB;
+ case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
+ case QVideoFrameFormat::Format_XRGB8888:
+ return AV_PIX_FMT_0RGB;
+ case QVideoFrameFormat::Format_BGRA8888:
+ return AV_PIX_FMT_BGRA;
+ case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
+ case QVideoFrameFormat::Format_BGRX8888:
+ return AV_PIX_FMT_BGR0;
+ case QVideoFrameFormat::Format_ABGR8888:
+ return AV_PIX_FMT_ABGR;
+ case QVideoFrameFormat::Format_XBGR8888:
+ return AV_PIX_FMT_0BGR;
+ case QVideoFrameFormat::Format_RGBA8888:
+ return AV_PIX_FMT_RGBA;
+ // to be added in 6.8:
+ // case QVideoFrameFormat::Format_RGBA8888_Premultiplied:
+ case QVideoFrameFormat::Format_RGBX8888:
+ return AV_PIX_FMT_RGB0;
+
+ case QVideoFrameFormat::Format_YUV422P:
+ return AV_PIX_FMT_YUV422P;
+ case QVideoFrameFormat::Format_YUV420P:
+ return AV_PIX_FMT_YUV420P;
+ case QVideoFrameFormat::Format_YUV420P10:
+ return AV_PIX_FMT_YUV420P10;
+ case QVideoFrameFormat::Format_UYVY:
+ return AV_PIX_FMT_UYVY422;
+ case QVideoFrameFormat::Format_YUYV:
+ return AV_PIX_FMT_YUYV422;
+ case QVideoFrameFormat::Format_NV12:
+ return AV_PIX_FMT_NV12;
+ case QVideoFrameFormat::Format_NV21:
+ return AV_PIX_FMT_NV21;
+ case QVideoFrameFormat::Format_Y8:
+ return AV_PIX_FMT_GRAY8;
+ case QVideoFrameFormat::Format_Y16:
+ return AV_PIX_FMT_GRAY16;
+
+ case QVideoFrameFormat::Format_P010:
+ return AV_PIX_FMT_P010;
+ case QVideoFrameFormat::Format_P016:
+ return AV_PIX_FMT_P016;
+
+ case QVideoFrameFormat::Format_SamplerExternalOES:
+ return AV_PIX_FMT_MEDIACODEC;
+ }
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h b/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h
new file mode 100644
index 000000000..c61c3f5ff
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer_p.h
@@ -0,0 +1,71 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGVIDEOBUFFER_P_H
+#define QFFMPEGVIDEOBUFFER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qhwvideobuffer_p.h>
+#include <QtCore/qvariant.h>
+
+#include "qffmpeg_p.h"
+#include "qffmpeghwaccel_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QFFmpegVideoBuffer : public QHwVideoBuffer
+{
+public:
+ using AVFrameUPtr = QFFmpeg::AVFrameUPtr;
+
+ QFFmpegVideoBuffer(AVFrameUPtr frame, AVRational pixelAspectRatio = { 1, 1 });
+ ~QFFmpegVideoBuffer() override;
+
+ MapData map(QtVideo::MapMode mode) override;
+ void unmap() override;
+
+ virtual std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *) override;
+ virtual quint64 textureHandle(QRhi *rhi, int plane) const override;
+
+ QVideoFrameFormat::PixelFormat pixelFormat() const;
+ QSize size() const;
+
+ static QVideoFrameFormat::PixelFormat toQtPixelFormat(AVPixelFormat avPixelFormat, bool *needsConversion = nullptr);
+ static AVPixelFormat toAVPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat);
+
+ void convertSWFrame();
+
+ AVFrame *getHWFrame() const { return m_hwFrame.get(); }
+
+ void setTextureConverter(const QFFmpeg::TextureConverter &converter);
+
+ QVideoFrameFormat::ColorSpace colorSpace() const;
+ QVideoFrameFormat::ColorTransfer colorTransfer() const;
+ QVideoFrameFormat::ColorRange colorRange() const;
+
+ float maxNits();
+
+private:
+ QVideoFrameFormat::PixelFormat m_pixelFormat;
+ AVFrame *m_frame = nullptr;
+ AVFrameUPtr m_hwFrame;
+ AVFrameUPtr m_swFrame;
+ QSize m_size;
+ QFFmpeg::TextureConverter m_textureConverter;
+ QtVideo::MapMode m_mode = QtVideo::MapMode::NotMapped;
+ std::unique_ptr<QFFmpeg::TextureSet> m_textures;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp b/src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp
new file mode 100644
index 000000000..2f02f09c1
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegvideosink.cpp
@@ -0,0 +1,34 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include <qffmpegvideosink_p.h>
+#include <qffmpegvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
+
+QT_BEGIN_NAMESPACE
+
+QFFmpegVideoSink::QFFmpegVideoSink(QVideoSink *sink)
+ : QPlatformVideoSink(sink)
+{
+}
+
+void QFFmpegVideoSink::setRhi(QRhi *rhi)
+{
+ if (m_rhi == rhi)
+ return;
+ m_rhi = rhi;
+ textureConverter = QFFmpeg::TextureConverter(rhi);
+ emit rhiChanged(rhi);
+}
+
+void QFFmpegVideoSink::setVideoFrame(const QVideoFrame &frame)
+{
+ auto *buffer = dynamic_cast<QFFmpegVideoBuffer *>(QVideoFramePrivate::hwBuffer(frame));
+ if (buffer)
+ buffer->setTextureConverter(textureConverter);
+
+ QPlatformVideoSink::setVideoFrame(frame);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegvideosink_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegvideosink_p.h b/src/plugins/multimedia/ffmpeg/qffmpegvideosink_p.h
new file mode 100644
index 000000000..92b537ee3
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegvideosink_p.h
@@ -0,0 +1,44 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGVIDEOSINK_H
+#define QFFMPEGVIDEOSINK_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformvideosink_p.h>
+#include <qffmpeghwaccel_p.h>
+
+QT_BEGIN_NAMESPACE
+
+// Required for QDoc workaround
+class QString;
+
+class QFFmpegVideoSink : public QPlatformVideoSink
+{
+ Q_OBJECT
+
+public:
+ QFFmpegVideoSink(QVideoSink *sink);
+ void setRhi(QRhi *rhi) override;
+
+ void setVideoFrame(const QVideoFrame &frame) override;
+
+private:
+ QFFmpeg::TextureConverter textureConverter;
+ QRhi *m_rhi = nullptr;
+};
+
+QT_END_NAMESPACE
+
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp.cpp b/src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp.cpp
new file mode 100644
index 000000000..b36279cc3
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp.cpp
@@ -0,0 +1,508 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegwindowcapture_uwp_p.h"
+#include "qffmpegsurfacecapturegrabber_p.h"
+#include "qabstractvideobuffer.h"
+#include <private/qvideoframe_p.h>
+
+#include <unknwn.h>
+#include <winrt/base.h>
+#include <QtCore/private/qfactorycacheregistration_p.h>
+// Workaround for Windows SDK bug.
+// See https://github.com/microsoft/Windows.UI.Composition-Win32-Samples/issues/47
+namespace winrt::impl
+{
+template <typename Async>
+auto wait_for(Async const& async, Windows::Foundation::TimeSpan const& timeout);
+}
+#include <winrt/Windows.Foundation.Collections.h>
+#include <winrt/Windows.Graphics.Capture.h>
+#include <winrt/Windows.Graphics.DirectX.h>
+#include <winrt/Windows.Graphics.DirectX.Direct3D11.h>
+#include <Windows.Graphics.Capture.h>
+#include <Windows.Graphics.Capture.Interop.h>
+#include <windows.graphics.directx.direct3d11.interop.h>
+
+#include <D3d11.h>
+#include <dwmapi.h>
+#include <lowlevelmonitorconfigurationapi.h>
+#include <physicalmonitorenumerationapi.h>
+
+#include "qvideoframe.h"
+#include <qwindow.h>
+#include <qthread.h>
+#include <qloggingcategory.h>
+#include <qguiapplication.h>
+#include <private/qmultimediautils_p.h>
+#include <private/qwindowsmultimediautils_p.h>
+#include <private/qcapturablewindow_p.h>
+#include <qpa/qplatformscreen_p.h>
+
+#include <memory>
+#include <system_error>
+
+QT_BEGIN_NAMESPACE
+
+using namespace winrt::Windows::Graphics::Capture;
+using namespace winrt::Windows::Graphics::DirectX;
+using namespace winrt::Windows::Graphics::DirectX::Direct3D11;
+using namespace Windows::Graphics::DirectX::Direct3D11;
+using namespace QWindowsMultimediaUtils;
+
+using winrt::check_hresult;
+using winrt::com_ptr;
+using winrt::guid_of;
+
+namespace {
+
+Q_LOGGING_CATEGORY(qLcWindowCaptureUwp, "qt.multimedia.ffmpeg.windowcapture.uwp");
+
+winrt::Windows::Graphics::SizeInt32 getWindowSize(HWND hwnd)
+{
+ RECT windowRect{};
+ ::GetWindowRect(hwnd, &windowRect);
+
+ return { windowRect.right - windowRect.left, windowRect.bottom - windowRect.top };
+}
+
+QSize asQSize(winrt::Windows::Graphics::SizeInt32 size)
+{
+ return { size.Width, size.Height };
+}
+
+struct MultithreadedApartment
+{
+ MultithreadedApartment(const MultithreadedApartment &) = delete;
+ MultithreadedApartment &operator=(const MultithreadedApartment &) = delete;
+
+ MultithreadedApartment() { winrt::init_apartment(); }
+ ~MultithreadedApartment() { winrt::uninit_apartment(); }
+};
+
+class QUwpTextureVideoBuffer : public QAbstractVideoBuffer
+{
+public:
+ QUwpTextureVideoBuffer(com_ptr<IDXGISurface> &&surface) : m_surface(surface) { }
+
+ ~QUwpTextureVideoBuffer() override { QUwpTextureVideoBuffer::unmap(); }
+
+ MapData map(QtVideo::MapMode mode) override
+ {
+ if (m_mapMode != QtVideo::MapMode::NotMapped)
+ return {};
+
+ if (mode == QtVideo::MapMode::ReadOnly) {
+ DXGI_MAPPED_RECT rect = {};
+ HRESULT hr = m_surface->Map(&rect, DXGI_MAP_READ);
+ if (SUCCEEDED(hr)) {
+ DXGI_SURFACE_DESC desc = {};
+ hr = m_surface->GetDesc(&desc);
+
+ MapData md = {};
+ md.planeCount = 1;
+ md.bytesPerLine[0] = rect.Pitch;
+ md.data[0] = rect.pBits;
+ md.dataSize[0] = rect.Pitch * desc.Height;
+
+ m_mapMode = QtVideo::MapMode::ReadOnly;
+
+ return md;
+ } else {
+ qCDebug(qLcWindowCaptureUwp) << "Failed to map DXGI surface" << errorString(hr);
+ return {};
+ }
+ }
+
+ return {};
+ }
+
+ void unmap() override
+ {
+ if (m_mapMode == QtVideo::MapMode::NotMapped)
+ return;
+
+ const HRESULT hr = m_surface->Unmap();
+ if (FAILED(hr))
+ qCDebug(qLcWindowCaptureUwp) << "Failed to unmap surface" << errorString(hr);
+
+ m_mapMode = QtVideo::MapMode::NotMapped;
+ }
+
+ QVideoFrameFormat format() const override { return {}; }
+
+private:
+ QtVideo::MapMode m_mapMode = QtVideo::MapMode::NotMapped;
+ com_ptr<IDXGISurface> m_surface;
+};
+
+struct WindowGrabber
+{
+ WindowGrabber() = default;
+
+ WindowGrabber(IDXGIAdapter1 *adapter, HWND hwnd)
+ : m_frameSize{ getWindowSize(hwnd) }, m_captureWindow{ hwnd }
+ {
+ check_hresult(D3D11CreateDevice(adapter, D3D_DRIVER_TYPE_UNKNOWN, nullptr, 0, nullptr, 0,
+ D3D11_SDK_VERSION, m_device.put(), nullptr, nullptr));
+
+ const auto captureItem = createCaptureItem(hwnd);
+
+ m_framePool = Direct3D11CaptureFramePool::CreateFreeThreaded(
+ getCaptureDevice(m_device), m_pixelFormat, 1,
+ captureItem.Size());
+
+ m_session = m_framePool.CreateCaptureSession(captureItem);
+
+ // If supported, enable cursor capture
+ if (const auto session2 = m_session.try_as<IGraphicsCaptureSession2>())
+ session2.IsCursorCaptureEnabled(true);
+
+ // If supported, disable colored border around captured window to match other platforms
+ if (const auto session3 = m_session.try_as<IGraphicsCaptureSession3>())
+ session3.IsBorderRequired(false);
+
+ m_session.StartCapture();
+ }
+
+ ~WindowGrabber()
+ {
+ m_framePool.Close();
+ m_session.Close();
+ }
+
+ com_ptr<IDXGISurface> tryGetFrame()
+ {
+ const Direct3D11CaptureFrame frame = m_framePool.TryGetNextFrame();
+ if (!frame) {
+
+ // Stop capture and report failure if window was closed. If we don't stop,
+ // testing shows that either we don't get any frames, or we get blank frames.
+ // Emitting an error will prevent this inconsistent behavior, and makes the
+ // Windows implementation behave like the Linux implementation
+ if (!IsWindow(m_captureWindow))
+ throw std::runtime_error("Window was closed");
+
+ // Blank frames may come spuriously if no new window texture
+ // is available yet.
+ return {};
+ }
+
+ if (m_frameSize != frame.ContentSize()) {
+ m_frameSize = frame.ContentSize();
+ m_framePool.Recreate(getCaptureDevice(m_device), m_pixelFormat, 1, frame.ContentSize());
+ return {};
+ }
+
+ return copyTexture(m_device, frame.Surface());
+ }
+
+private:
+ static GraphicsCaptureItem createCaptureItem(HWND hwnd)
+ {
+ const auto factory = winrt::get_activation_factory<GraphicsCaptureItem>();
+ const auto interop = factory.as<IGraphicsCaptureItemInterop>();
+
+ GraphicsCaptureItem item = { nullptr };
+ winrt::hresult status = S_OK;
+
+ // Attempt to create capture item with retry, because this occasionally fails,
+ // particularly in unit tests. When the failure code is E_INVALIDARG, it
+ // seems to help to sleep for a bit and retry. See QTBUG-116025.
+ constexpr int maxRetry = 10;
+ constexpr std::chrono::milliseconds retryDelay{ 100 };
+ for (int retryNum = 0; retryNum < maxRetry; ++retryNum) {
+
+ status = interop->CreateForWindow(hwnd, winrt::guid_of<GraphicsCaptureItem>(),
+ winrt::put_abi(item));
+
+ if (status != E_INVALIDARG)
+ break;
+
+ qCWarning(qLcWindowCaptureUwp)
+ << "Failed to create capture item:"
+ << QString::fromStdWString(winrt::hresult_error(status).message().c_str())
+ << "Retry number" << retryNum;
+
+ if (retryNum + 1 < maxRetry)
+ QThread::sleep(retryDelay);
+ }
+
+ // Throw if we fail to create the capture item
+ check_hresult(status);
+
+ return item;
+ }
+
+ static IDirect3DDevice getCaptureDevice(const com_ptr<ID3D11Device> &d3dDevice)
+ {
+ const auto dxgiDevice = d3dDevice.as<IDXGIDevice>();
+
+ com_ptr<IInspectable> device;
+ check_hresult(CreateDirect3D11DeviceFromDXGIDevice(dxgiDevice.get(), device.put()));
+
+ return device.as<IDirect3DDevice>();
+ }
+
+ static com_ptr<IDXGISurface> copyTexture(const com_ptr<ID3D11Device> &device,
+ const IDirect3DSurface &capturedTexture)
+ {
+ const auto dxgiInterop{ capturedTexture.as<IDirect3DDxgiInterfaceAccess>() };
+ if (!dxgiInterop)
+ return {};
+
+ com_ptr<IDXGISurface> dxgiSurface;
+ check_hresult(dxgiInterop->GetInterface(guid_of<IDXGISurface>(), dxgiSurface.put_void()));
+
+ DXGI_SURFACE_DESC desc = {};
+ check_hresult(dxgiSurface->GetDesc(&desc));
+
+ D3D11_TEXTURE2D_DESC texDesc = {};
+ texDesc.Width = desc.Width;
+ texDesc.Height = desc.Height;
+ texDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
+ texDesc.Usage = D3D11_USAGE_STAGING;
+ texDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
+ texDesc.MiscFlags = 0;
+ texDesc.BindFlags = 0;
+ texDesc.ArraySize = 1;
+ texDesc.MipLevels = 1;
+ texDesc.SampleDesc = { 1, 0 };
+
+ com_ptr<ID3D11Texture2D> texture;
+ check_hresult(device->CreateTexture2D(&texDesc, nullptr, texture.put()));
+
+ com_ptr<ID3D11DeviceContext> ctx;
+ device->GetImmediateContext(ctx.put());
+ ctx->CopyResource(texture.get(), dxgiSurface.as<ID3D11Resource>().get());
+
+ return texture.as<IDXGISurface>();
+ }
+
+ MultithreadedApartment m_comApartment{};
+ HWND m_captureWindow{};
+ winrt::Windows::Graphics::SizeInt32 m_frameSize{};
+ com_ptr<ID3D11Device> m_device;
+ Direct3D11CaptureFramePool m_framePool{ nullptr };
+ GraphicsCaptureSession m_session{ nullptr };
+ const DirectXPixelFormat m_pixelFormat = DirectXPixelFormat::R8G8B8A8UIntNormalized;
+};
+
+} // namespace
+
+class QFFmpegWindowCaptureUwp::Grabber : public QFFmpegSurfaceCaptureGrabber
+{
+ Q_OBJECT
+public:
+ Grabber(QFFmpegWindowCaptureUwp &capture, HWND hwnd)
+ : m_hwnd(hwnd),
+ m_format(QVideoFrameFormat(asQSize(getWindowSize(hwnd)),
+ QVideoFrameFormat::Format_RGBX8888))
+ {
+ const HMONITOR monitor = MonitorFromWindow(hwnd, MONITOR_DEFAULTTONULL);
+ m_adapter = getAdapter(monitor);
+
+ const qreal refreshRate = getMonitorRefreshRateHz(monitor);
+
+ m_format.setStreamFrameRate(refreshRate);
+ setFrameRate(refreshRate);
+
+ addFrameCallback(capture, &QFFmpegWindowCaptureUwp::newVideoFrame);
+ connect(this, &Grabber::errorUpdated, &capture, &QFFmpegWindowCaptureUwp::updateError);
+ }
+
+ ~Grabber() override { stop(); }
+
+ QVideoFrameFormat frameFormat() const { return m_format; }
+
+protected:
+
+ void initializeGrabbingContext() override
+ {
+ if (!m_adapter || !IsWindow(m_hwnd))
+ return; // Error already logged
+
+ try {
+ m_windowGrabber = std::make_unique<WindowGrabber>(m_adapter.get(), m_hwnd);
+
+ QFFmpegSurfaceCaptureGrabber::initializeGrabbingContext();
+ } catch (const winrt::hresult_error &err) {
+
+ const QString message = QLatin1String("Unable to capture window: ")
+ + QString::fromWCharArray(err.message().c_str());
+
+ updateError(InternalError, message);
+ }
+ }
+
+ void finalizeGrabbingContext() override
+ {
+ QFFmpegSurfaceCaptureGrabber::finalizeGrabbingContext();
+ m_windowGrabber = nullptr;
+ }
+
+ QVideoFrame grabFrame() override
+ {
+ try {
+ com_ptr<IDXGISurface> texture = m_windowGrabber->tryGetFrame();
+ if (!texture)
+ return {}; // No frame available yet
+
+ const QSize size = getTextureSize(texture);
+
+ m_format.setFrameSize(size);
+
+ return QVideoFramePrivate::createFrame(
+ std::make_unique<QUwpTextureVideoBuffer>(std::move(texture)), m_format);
+
+ } catch (const winrt::hresult_error &err) {
+
+ const QString message = QLatin1String("Window capture failed: ")
+ + QString::fromWCharArray(err.message().c_str());
+
+ updateError(InternalError, message);
+ } catch (const std::runtime_error& e) {
+ updateError(CaptureFailed, QString::fromLatin1(e.what()));
+ }
+
+ return {};
+ }
+
+private:
+ static com_ptr<IDXGIAdapter1> getAdapter(HMONITOR handle)
+ {
+ com_ptr<IDXGIFactory1> factory;
+ check_hresult(CreateDXGIFactory1(guid_of<IDXGIFactory1>(), factory.put_void()));
+
+ com_ptr<IDXGIAdapter1> adapter;
+ for (quint32 i = 0; factory->EnumAdapters1(i, adapter.put()) == S_OK; adapter = nullptr, i++) {
+ com_ptr<IDXGIOutput> output;
+ for (quint32 j = 0; adapter->EnumOutputs(j, output.put()) == S_OK; output = nullptr, j++) {
+ DXGI_OUTPUT_DESC desc = {};
+ HRESULT hr = output->GetDesc(&desc);
+ if (hr == S_OK && desc.Monitor == handle)
+ return adapter;
+ }
+ }
+ return {};
+ }
+
+ static QSize getTextureSize(const com_ptr<IDXGISurface> &surf)
+ {
+ if (!surf)
+ return {};
+
+ DXGI_SURFACE_DESC desc;
+ check_hresult(surf->GetDesc(&desc));
+
+ return { static_cast<int>(desc.Width), static_cast<int>(desc.Height) };
+ }
+
+ static qreal getMonitorRefreshRateHz(HMONITOR handle)
+ {
+ DWORD count = 0;
+ if (GetNumberOfPhysicalMonitorsFromHMONITOR(handle, &count)) {
+ std::vector<PHYSICAL_MONITOR> monitors{ count };
+ if (GetPhysicalMonitorsFromHMONITOR(handle, count, monitors.data())) {
+ for (const auto &monitor : std::as_const(monitors)) {
+ MC_TIMING_REPORT screenTiming = {};
+ if (GetTimingReport(monitor.hPhysicalMonitor, &screenTiming)) {
+ // Empirically we found that GetTimingReport does not return
+ // the frequency in updates per second as documented, but in
+ // updates per 100 seconds.
+ return static_cast<qreal>(screenTiming.dwVerticalFrequencyInHZ) / 100.0;
+ }
+ }
+ }
+ }
+ return DefaultScreenCaptureFrameRate;
+ }
+
+ HWND m_hwnd{};
+ com_ptr<IDXGIAdapter1> m_adapter{};
+ std::unique_ptr<WindowGrabber> m_windowGrabber;
+ QVideoFrameFormat m_format;
+};
+
+QFFmpegWindowCaptureUwp::QFFmpegWindowCaptureUwp() : QPlatformSurfaceCapture(WindowSource{})
+{
+ qCDebug(qLcWindowCaptureUwp) << "Creating UWP screen capture";
+}
+
+QFFmpegWindowCaptureUwp::~QFFmpegWindowCaptureUwp() = default;
+
+static QString isCapturableWindow(HWND hwnd)
+{
+ if (!IsWindow(hwnd))
+ return "Invalid window handle";
+
+ if (hwnd == GetShellWindow())
+ return "Cannot capture the shell window";
+
+ wchar_t className[MAX_PATH] = {};
+ GetClassName(hwnd, className, MAX_PATH);
+ if (QString::fromWCharArray(className).length() == 0)
+ return "Cannot capture windows without a class name";
+
+ if (!IsWindowVisible(hwnd))
+ return "Cannot capture invisible windows";
+
+ if (GetAncestor(hwnd, GA_ROOT) != hwnd)
+ return "Can only capture root windows";
+
+ const LONG_PTR style = GetWindowLongPtr(hwnd, GWL_STYLE);
+ if (style & WS_DISABLED)
+ return "Cannot capture disabled windows";
+
+ const LONG_PTR exStyle = GetWindowLongPtr(hwnd, GWL_EXSTYLE);
+ if (exStyle & WS_EX_TOOLWINDOW)
+ return "No tooltips";
+
+ DWORD cloaked = FALSE;
+ const HRESULT hr = DwmGetWindowAttribute(hwnd, DWMWA_CLOAKED, &cloaked, sizeof(cloaked));
+ if (SUCCEEDED(hr) && cloaked == DWM_CLOAKED_SHELL)
+ return "Cannot capture cloaked windows";
+
+ return {};
+}
+
+bool QFFmpegWindowCaptureUwp::setActiveInternal(bool active)
+{
+ if (static_cast<bool>(m_grabber) == active)
+ return false;
+
+ if (m_grabber) {
+ m_grabber.reset();
+ return true;
+ }
+
+ const auto window = source<WindowSource>();
+ const auto handle = QCapturableWindowPrivate::handle(window);
+
+ const auto hwnd = reinterpret_cast<HWND>(handle ? handle->id : 0);
+ if (const QString error = isCapturableWindow(hwnd); !error.isEmpty()) {
+ updateError(InternalError, error);
+ return false;
+ }
+
+ m_grabber = std::make_unique<Grabber>(*this, hwnd);
+ m_grabber->start();
+
+ return true;
+}
+
+bool QFFmpegWindowCaptureUwp::isSupported()
+{
+ return GraphicsCaptureSession::IsSupported();
+}
+
+QVideoFrameFormat QFFmpegWindowCaptureUwp::frameFormat() const
+{
+ if (m_grabber)
+ return m_grabber->frameFormat();
+ return {};
+}
+
+QT_END_NAMESPACE
+
+#include "qffmpegwindowcapture_uwp.moc"
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp_p.h b/src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp_p.h
new file mode 100644
index 000000000..10f6e62be
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qffmpegwindowcapture_uwp_p.h
@@ -0,0 +1,46 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGWINDOWCAPTURE_UWP_P_H
+#define QFFMPEGWINDOWCAPTURE_UWP_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qnamespace.h>
+#include "private/qplatformsurfacecapture_p.h"
+#include "qvideoframeformat.h"
+#include <memory>
+
+QT_BEGIN_NAMESPACE
+
+class QFFmpegWindowCaptureUwp : public QPlatformSurfaceCapture
+{
+public:
+ QFFmpegWindowCaptureUwp();
+ ~QFFmpegWindowCaptureUwp() override;
+
+ QVideoFrameFormat frameFormat() const override;
+
+ static bool isSupported();
+
+private:
+ class Grabber;
+
+ bool setActiveInternal(bool active) override;
+
+private:
+ std::unique_ptr<Grabber> m_grabber;
+};
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGWINDOWCAPTURE_UWP_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qgdiwindowcapture.cpp b/src/plugins/multimedia/ffmpeg/qgdiwindowcapture.cpp
new file mode 100644
index 000000000..97742043c
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qgdiwindowcapture.cpp
@@ -0,0 +1,197 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qgdiwindowcapture_p.h"
+
+#include "qvideoframe.h"
+#include "qffmpegsurfacecapturegrabber_p.h"
+#include "private/qcapturablewindow_p.h"
+#include "private/qmemoryvideobuffer_p.h"
+#include "private/qvideoframe_p.h"
+
+#include <qt_windows.h>
+#include <QtCore/qloggingcategory.h>
+
+static Q_LOGGING_CATEGORY(qLcGdiWindowCapture, "qt.multimedia.ffmpeg.gdiwindowcapture");
+
+QT_BEGIN_NAMESPACE
+
+class QGdiWindowCapture::Grabber : public QFFmpegSurfaceCaptureGrabber
+{
+public:
+ static std::unique_ptr<Grabber> create(QGdiWindowCapture &capture, HWND hWnd)
+ {
+ auto hdcWindow = GetDC(hWnd);
+ if (!hdcWindow) {
+ capture.updateError(QPlatformSurfaceCapture::CaptureFailed,
+ QLatin1String("Cannot create a window drawing context"));
+ return nullptr;
+ }
+
+ auto hdcMem = CreateCompatibleDC(hdcWindow);
+
+ if (!hdcMem) {
+ capture.updateError(QPlatformSurfaceCapture::CaptureFailed,
+ QLatin1String("Cannot create a compatible drawing context"));
+ return nullptr;
+ }
+
+ std::unique_ptr<Grabber> result(new Grabber(capture, hWnd, hdcWindow, hdcMem));
+ if (!result->update())
+ return nullptr;
+
+ result->start();
+ return result;
+ }
+
+ ~Grabber() override
+ {
+ stop();
+
+ if (m_hBitmap)
+ DeleteObject(m_hBitmap);
+
+ if (m_hdcMem)
+ DeleteDC(m_hdcMem);
+
+ if (m_hdcWindow)
+ ReleaseDC(m_hwnd, m_hdcWindow);
+ }
+
+ QVideoFrameFormat format() const { return m_format; }
+
+private:
+ Grabber(QGdiWindowCapture &capture, HWND hWnd, HDC hdcWindow, HDC hdcMem)
+ : m_hwnd(hWnd), m_hdcWindow(hdcWindow), m_hdcMem(hdcMem)
+ {
+ if (auto rate = GetDeviceCaps(hdcWindow, VREFRESH); rate > 0)
+ setFrameRate(rate);
+
+ addFrameCallback(capture, &QGdiWindowCapture::newVideoFrame);
+ connect(this, &Grabber::errorUpdated, &capture, &QGdiWindowCapture::updateError);
+ }
+
+ bool update()
+ {
+ RECT windowRect{};
+ if (!GetWindowRect(m_hwnd, &windowRect)) {
+ updateError(QPlatformSurfaceCapture::CaptureFailed,
+ QLatin1String("Cannot get window size"));
+ return false;
+ }
+
+ const QSize size{ windowRect.right - windowRect.left, windowRect.bottom - windowRect.top };
+
+ if (m_format.isValid() && size == m_format.frameSize() && m_hBitmap)
+ return true;
+
+ if (m_hBitmap)
+ DeleteObject(std::exchange(m_hBitmap, nullptr));
+
+ if (size.isEmpty()) {
+ m_format = {};
+ updateError(QPlatformSurfaceCapture::CaptureFailed,
+ QLatin1String("Invalid window size"));
+ return false;
+ }
+
+ m_hBitmap = CreateCompatibleBitmap(m_hdcWindow, size.width(), size.height());
+
+ if (!m_hBitmap) {
+ m_format = {};
+ updateError(QPlatformSurfaceCapture::CaptureFailed,
+ QLatin1String("Cannot create a compatible bitmap"));
+ return false;
+ }
+
+ QVideoFrameFormat format(size, QVideoFrameFormat::Format_BGRX8888);
+ format.setStreamFrameRate(frameRate());
+ m_format = format;
+ return true;
+ }
+
+ QVideoFrame grabFrame() override
+ {
+ if (!update())
+ return {};
+
+ const auto oldBitmap = SelectObject(m_hdcMem, m_hBitmap);
+ auto deselect = qScopeGuard([&]() { SelectObject(m_hdcMem, oldBitmap); });
+
+ const auto size = m_format.frameSize();
+
+ if (!BitBlt(m_hdcMem, 0, 0, size.width(), size.height(), m_hdcWindow, 0, 0, SRCCOPY)) {
+ updateError(QPlatformSurfaceCapture::CaptureFailed,
+ QLatin1String("Cannot copy image to the compatible DC"));
+ return {};
+ }
+
+ BITMAPINFO info{};
+ auto &header = info.bmiHeader;
+ header.biSize = sizeof(BITMAPINFOHEADER);
+ header.biWidth = size.width();
+ header.biHeight = -size.height(); // negative height to ensure top-down orientation
+ header.biPlanes = 1;
+ header.biBitCount = 32;
+ header.biCompression = BI_RGB;
+
+ const auto bytesPerLine = size.width() * 4;
+
+ QByteArray array(size.height() * bytesPerLine, Qt::Uninitialized);
+
+ const auto copiedHeight = GetDIBits(m_hdcMem, m_hBitmap, 0, size.height(), array.data(), &info, DIB_RGB_COLORS);
+ if (copiedHeight != size.height()) {
+ qCWarning(qLcGdiWindowCapture) << copiedHeight << "lines have been copied, expected:" << size.height();
+ // In practice, it might fail randomly first time after start. So we don't consider it as an error.
+ // TODO: investigate reasons and properly handle the error
+ // updateError(QPlatformSurfaceCapture::CaptureFailed,
+ // QLatin1String("Cannot get raw image data"));
+ return {};
+ }
+
+ if (header.biWidth != size.width() || header.biHeight != -size.height()
+ || header.biPlanes != 1 || header.biBitCount != 32 || header.biCompression != BI_RGB) {
+ updateError(QPlatformSurfaceCapture::CaptureFailed,
+ QLatin1String("Output bitmap info is unexpected"));
+ return {};
+ }
+
+ return QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(std::move(array), bytesPerLine), m_format);
+ }
+
+private:
+ HWND m_hwnd = {};
+ QVideoFrameFormat m_format;
+ HDC m_hdcWindow = {};
+ HDC m_hdcMem = {};
+ HBITMAP m_hBitmap = {};
+};
+
+QGdiWindowCapture::QGdiWindowCapture() : QPlatformSurfaceCapture(WindowSource{}) { }
+
+QGdiWindowCapture::~QGdiWindowCapture() = default;
+
+QVideoFrameFormat QGdiWindowCapture::frameFormat() const
+{
+ return m_grabber ? m_grabber->format() : QVideoFrameFormat();
+}
+
+bool QGdiWindowCapture::setActiveInternal(bool active)
+{
+ if (active == static_cast<bool>(m_grabber))
+ return true;
+
+ if (m_grabber) {
+ m_grabber.reset();
+ } else {
+ auto window = source<WindowSource>();
+ auto handle = QCapturableWindowPrivate::handle(window);
+
+ m_grabber = Grabber::create(*this, reinterpret_cast<HWND>(handle ? handle->id : 0));
+ }
+
+ return static_cast<bool>(m_grabber) == active;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qgdiwindowcapture_p.h b/src/plugins/multimedia/ffmpeg/qgdiwindowcapture_p.h
new file mode 100644
index 000000000..d4498455c
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qgdiwindowcapture_p.h
@@ -0,0 +1,43 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGDIWINDOWCAPTURE_P_H
+#define QGDIWINDOWCAPTURE_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "private/qplatformsurfacecapture_p.h"
+
+#include <memory>
+
+QT_BEGIN_NAMESPACE
+
+class QGdiWindowCapture : public QPlatformSurfaceCapture
+{
+ class Grabber;
+
+public:
+ QGdiWindowCapture();
+ ~QGdiWindowCapture() override;
+
+ QVideoFrameFormat frameFormat() const override;
+
+protected:
+ bool setActiveInternal(bool active) override;
+
+private:
+ std::unique_ptr<Grabber> m_grabber;
+};
+
+QT_END_NAMESPACE
+
+#endif // QGDIWINDOWCAPTURE_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture.cpp b/src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture.cpp
new file mode 100644
index 000000000..4bd1f6a65
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture.cpp
@@ -0,0 +1,221 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qvideoframe.h"
+#include "qgrabwindowsurfacecapture_p.h"
+#include "qscreencapture.h"
+#include "qffmpegsurfacecapturegrabber_p.h"
+
+#include "private/qimagevideobuffer_p.h"
+#include "private/qcapturablewindow_p.h"
+#include "private/qvideoframe_p.h"
+
+#include "qscreen.h"
+#include "qmutex.h"
+#include "qwaitcondition.h"
+#include "qpixmap.h"
+#include "qguiapplication.h"
+#include "qwindow.h"
+#include "qpointer.h"
+
+#include <QtCore/qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+namespace {
+
+using WindowUPtr = std::unique_ptr<QWindow>;
+
+} // namespace
+
+class QGrabWindowSurfaceCapture::Grabber : public QFFmpegSurfaceCaptureGrabber
+{
+public:
+ Grabber(QGrabWindowSurfaceCapture &capture, QScreen *screen) : Grabber(capture, screen, nullptr)
+ {
+ Q_ASSERT(screen);
+ }
+
+ Grabber(QGrabWindowSurfaceCapture &capture, WindowUPtr window)
+ : Grabber(capture, nullptr, std::move(window))
+ {
+ Q_ASSERT(m_window);
+ }
+
+ ~Grabber() override {
+ stop();
+
+ Q_ASSERT(!m_screenRemovingLocked);
+ }
+
+ const QVideoFrameFormat &format()
+ {
+ QMutexLocker locker(&m_formatMutex);
+ while (!m_format)
+ m_waitForFormat.wait(&m_formatMutex);
+ return *m_format;
+ }
+
+private:
+ Grabber(QGrabWindowSurfaceCapture &capture, QScreen *screen, WindowUPtr window)
+ : QFFmpegSurfaceCaptureGrabber(
+ QGuiApplication::platformName() == QLatin1String("eglfs")
+ ? QFFmpegSurfaceCaptureGrabber::UseCurrentThread
+ : QFFmpegSurfaceCaptureGrabber::CreateGrabbingThread),
+ m_capture(capture),
+ m_screen(screen),
+ m_window(std::move(window))
+ {
+ connect(qApp, &QGuiApplication::screenRemoved, this, &Grabber::onScreenRemoved);
+ addFrameCallback(m_capture, &QGrabWindowSurfaceCapture::newVideoFrame);
+ connect(this, &Grabber::errorUpdated, &m_capture, &QGrabWindowSurfaceCapture::updateError);
+ }
+
+ void onScreenRemoved(QScreen *screen)
+ {
+ /* The hack allows to lock screens removing while QScreen::grabWindow is in progress.
+ * The current solution works since QGuiApplication::screenRemoved is emitted from
+ * the destructor of QScreen before destruction members of the object.
+ * Note, QGuiApplication works with screens in the main thread, and any removing of a screen
+ * must be synchronized with grabbing thread.
+ */
+ QMutexLocker locker(&m_screenRemovingMutex);
+
+ if (m_screenRemovingLocked) {
+ qDebug() << "Screen" << screen->name()
+ << "is removed while screen window grabbing lock is active";
+ }
+
+ while (m_screenRemovingLocked)
+ m_screenRemovingWc.wait(&m_screenRemovingMutex);
+ }
+
+ void setScreenRemovingLocked(bool locked)
+ {
+ Q_ASSERT(locked != m_screenRemovingLocked);
+
+ {
+ QMutexLocker locker(&m_screenRemovingMutex);
+ m_screenRemovingLocked = locked;
+ }
+
+ if (!locked)
+ m_screenRemovingWc.wakeAll();
+ }
+
+ void updateFormat(const QVideoFrameFormat &format)
+ {
+ if (m_format && m_format->isValid())
+ return;
+
+ {
+ QMutexLocker locker(&m_formatMutex);
+ m_format = format;
+ }
+
+ m_waitForFormat.wakeAll();
+ }
+
+ QVideoFrame grabFrame() override
+ {
+ setScreenRemovingLocked(true);
+ auto screenGuard = qScopeGuard(std::bind(&Grabber::setScreenRemovingLocked, this, false));
+
+ WId wid = m_window ? m_window->winId() : 0;
+ QScreen *screen = m_window ? m_window->screen() : m_screen ? m_screen.data() : nullptr;
+
+ if (!screen) {
+ updateError(QPlatformSurfaceCapture::CaptureFailed, "Screen not found");
+ return {};
+ }
+
+ setFrameRate(screen->refreshRate());
+
+ QPixmap p = screen->grabWindow(wid);
+ auto buffer = std::make_unique<QImageVideoBuffer>(p.toImage());
+ const auto img = buffer->underlyingImage();
+
+ QVideoFrameFormat format(img.size(),
+ QVideoFrameFormat::pixelFormatFromImageFormat(img.format()));
+ format.setStreamFrameRate(screen->refreshRate());
+ updateFormat(format);
+
+ if (!format.isValid()) {
+ updateError(QPlatformSurfaceCapture::CaptureFailed,
+ "Failed to grab the screen content");
+ return {};
+ }
+
+ return QVideoFramePrivate::createFrame(std::move(buffer), std::move(format));
+ }
+
+private:
+ QGrabWindowSurfaceCapture &m_capture;
+ QPointer<QScreen> m_screen;
+ WindowUPtr m_window;
+
+ QMutex m_formatMutex;
+ QWaitCondition m_waitForFormat;
+ std::optional<QVideoFrameFormat> m_format;
+
+ QMutex m_screenRemovingMutex;
+ bool m_screenRemovingLocked = false;
+ QWaitCondition m_screenRemovingWc;
+};
+
+QGrabWindowSurfaceCapture::QGrabWindowSurfaceCapture(Source initialSource)
+ : QPlatformSurfaceCapture(initialSource)
+{
+}
+
+QGrabWindowSurfaceCapture::~QGrabWindowSurfaceCapture() = default;
+
+QVideoFrameFormat QGrabWindowSurfaceCapture::frameFormat() const
+{
+ if (m_grabber)
+ return m_grabber->format();
+ else
+ return {};
+}
+
+bool QGrabWindowSurfaceCapture::setActiveInternal(bool active)
+{
+ if (active == static_cast<bool>(m_grabber))
+ return true;
+
+ if (m_grabber)
+ m_grabber.reset();
+ else
+ std::visit([this](auto source) { activate(source); }, source());
+
+ return static_cast<bool>(m_grabber) == active;
+}
+
+void QGrabWindowSurfaceCapture::activate(ScreenSource screen)
+{
+ if (!checkScreenWithError(screen))
+ return;
+
+ m_grabber = std::make_unique<Grabber>(*this, screen);
+ m_grabber->start();
+}
+
+void QGrabWindowSurfaceCapture::activate(WindowSource window)
+{
+ auto handle = QCapturableWindowPrivate::handle(window);
+ auto wid = handle ? handle->id : 0;
+ if (auto wnd = WindowUPtr(QWindow::fromWinId(wid))) {
+ if (!wnd->screen()) {
+ updateError(InternalError,
+ "Window " + QString::number(wid) + " doesn't belong to any screen");
+ } else {
+ m_grabber = std::make_unique<Grabber>(*this, std::move(wnd));
+ m_grabber->start();
+ }
+ } else {
+ updateError(NotFound,
+ "Window " + QString::number(wid) + "doesn't exist or permissions denied");
+ }
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture_p.h b/src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture_p.h
new file mode 100644
index 000000000..a76ce9507
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qgrabwindowsurfacecapture_p.h
@@ -0,0 +1,48 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGRABWINDOWSURFACECAPTURE_P_H
+#define QGRABWINDOWSURFACECAPTURE_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "private/qplatformsurfacecapture_p.h"
+
+#include <memory>
+
+QT_BEGIN_NAMESPACE
+
+class QGrabWindowSurfaceCapture : public QPlatformSurfaceCapture
+{
+ class Grabber;
+
+public:
+ explicit QGrabWindowSurfaceCapture(Source initialSource);
+ ~QGrabWindowSurfaceCapture() override;
+
+ QVideoFrameFormat frameFormat() const override;
+
+protected:
+ bool setActiveInternal(bool active) override;
+
+private:
+ void activate(ScreenSource);
+
+ void activate(WindowSource);
+
+private:
+ std::unique_ptr<Grabber> m_grabber;
+};
+
+QT_END_NAMESPACE
+
+#endif // QGRABWINDOWSURFACECAPTURE_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qopenglvideobuffer.cpp b/src/plugins/multimedia/ffmpeg/qopenglvideobuffer.cpp
new file mode 100644
index 000000000..4ac08fd24
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qopenglvideobuffer.cpp
@@ -0,0 +1,96 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qopenglvideobuffer_p.h"
+
+#include <qoffscreensurface.h>
+#include <qthread.h>
+#include <private/qimagevideobuffer_p.h>
+
+#include <QtOpenGL/private/qopenglcompositor_p.h>
+#include <QtOpenGL/private/qopenglframebufferobject_p.h>
+
+QT_BEGIN_NAMESPACE
+
+static QOpenGLContext *createContext(QOpenGLContext *shareContext)
+{
+ // Create an OpenGL context for the current thread. The lifetime of the context is tied to the
+ // lifetime of the current thread.
+ auto context = std::make_unique<QOpenGLContext>();
+ context->setShareContext(shareContext);
+
+ if (!context->create()) {
+ qWarning() << "Couldn't create an OpenGL context for QOpenGLVideoBuffer";
+ return nullptr;
+ }
+
+ QObject::connect(QThread::currentThread(), &QThread::finished,
+ context.get(), &QOpenGLContext::deleteLater);
+ return context.release();
+}
+
+static bool setCurrentOpenGLContext()
+{
+ auto compositorContext = QOpenGLCompositor::instance()->context();
+
+ // A thread-local variable is used to avoid creating a new context if we're called on the same
+ // thread. The context lifetime is tied to the current thread lifetime (see createContext()).
+ static thread_local QOpenGLContext *context = nullptr;
+ static thread_local QOffscreenSurface *surface = nullptr;
+
+ if (!context) {
+ context = (compositorContext->thread() == QThread::currentThread())
+ ? compositorContext
+ : createContext(compositorContext);
+
+ if (!context)
+ return false;
+
+ surface = new QOffscreenSurface(nullptr, context);
+ surface->setFormat(context->format());
+ surface->create();
+ }
+
+ return context->makeCurrent(surface);
+}
+
+QOpenGLVideoBuffer::QOpenGLVideoBuffer(std::unique_ptr<QOpenGLFramebufferObject> fbo)
+ : QHwVideoBuffer(QVideoFrame::RhiTextureHandle), m_fbo(std::move(fbo))
+{
+ Q_ASSERT(m_fbo);
+}
+
+QOpenGLVideoBuffer::~QOpenGLVideoBuffer() { }
+
+QAbstractVideoBuffer::MapData QOpenGLVideoBuffer::map(QtVideo::MapMode mode)
+{
+ return ensureImageBuffer().map(mode);
+}
+
+void QOpenGLVideoBuffer::unmap()
+{
+ if (m_imageBuffer)
+ m_imageBuffer->unmap();
+}
+
+quint64 QOpenGLVideoBuffer::textureHandle(QRhi *, int plane) const
+{
+ Q_UNUSED(plane);
+ return m_fbo->texture();
+}
+
+QImageVideoBuffer &QOpenGLVideoBuffer::ensureImageBuffer()
+{
+ // Create image buffer if not yet created.
+ // This is protected by mapMutex in QVideoFrame::map.
+ if (!m_imageBuffer) {
+ if (!setCurrentOpenGLContext())
+ qWarning() << "Failed to set current OpenGL context";
+
+ m_imageBuffer = std::make_unique<QImageVideoBuffer>(m_fbo->toImage(false));
+ }
+
+ return *m_imageBuffer;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qopenglvideobuffer_p.h b/src/plugins/multimedia/ffmpeg/qopenglvideobuffer_p.h
new file mode 100644
index 000000000..6e62625d0
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qopenglvideobuffer_p.h
@@ -0,0 +1,44 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QOPENGLVIDEOBUFFER_P_H
+#define QOPENGLVIDEOBUFFER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qhwvideobuffer_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QImageVideoBuffer;
+class QOpenGLFramebufferObject;
+
+class QOpenGLVideoBuffer : public QHwVideoBuffer
+{
+public:
+ QOpenGLVideoBuffer(std::unique_ptr<QOpenGLFramebufferObject> fbo);
+ ~QOpenGLVideoBuffer();
+
+ MapData map(QtVideo::MapMode mode) override;
+ void unmap() override;
+ quint64 textureHandle(QRhi *, int plane) const override;
+
+ QImageVideoBuffer &ensureImageBuffer();
+
+private:
+ std::unique_ptr<QOpenGLFramebufferObject> m_fbo;
+ std::unique_ptr<QImageVideoBuffer> m_imageBuffer;
+};
+
+QT_END_NAMESPACE
+
+#endif // QOPENGLVIDEOBUFFER_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qv4l2camera.cpp b/src/plugins/multimedia/ffmpeg/qv4l2camera.cpp
new file mode 100644
index 000000000..800460f14
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qv4l2camera.cpp
@@ -0,0 +1,708 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qv4l2camera_p.h"
+#include "qv4l2filedescriptor_p.h"
+#include "qv4l2memorytransfer_p.h"
+
+#include <private/qcameradevice_p.h>
+#include <private/qmultimediautils_p.h>
+#include <private/qmemoryvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
+#include <private/qcore_unix_p.h>
+
+#include <qsocketnotifier.h>
+#include <qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcV4L2Camera, "qt.multimedia.ffmpeg.v4l2camera");
+
+static const struct {
+ QVideoFrameFormat::PixelFormat fmt;
+ uint32_t v4l2Format;
+} formatMap[] = {
+ // ### How do we handle V4L2_PIX_FMT_H264 and V4L2_PIX_FMT_MPEG4?
+ { QVideoFrameFormat::Format_YUV420P, V4L2_PIX_FMT_YUV420 },
+ { QVideoFrameFormat::Format_YUV422P, V4L2_PIX_FMT_YUV422P },
+ { QVideoFrameFormat::Format_YUYV, V4L2_PIX_FMT_YUYV },
+ { QVideoFrameFormat::Format_UYVY, V4L2_PIX_FMT_UYVY },
+ { QVideoFrameFormat::Format_XBGR8888, V4L2_PIX_FMT_XBGR32 },
+ { QVideoFrameFormat::Format_XRGB8888, V4L2_PIX_FMT_XRGB32 },
+ { QVideoFrameFormat::Format_ABGR8888, V4L2_PIX_FMT_ABGR32 },
+ { QVideoFrameFormat::Format_ARGB8888, V4L2_PIX_FMT_ARGB32 },
+ { QVideoFrameFormat::Format_BGRX8888, V4L2_PIX_FMT_BGR32 },
+ { QVideoFrameFormat::Format_RGBX8888, V4L2_PIX_FMT_RGB32 },
+ { QVideoFrameFormat::Format_BGRA8888, V4L2_PIX_FMT_BGRA32 },
+ { QVideoFrameFormat::Format_RGBA8888, V4L2_PIX_FMT_RGBA32 },
+ { QVideoFrameFormat::Format_Y8, V4L2_PIX_FMT_GREY },
+ { QVideoFrameFormat::Format_Y16, V4L2_PIX_FMT_Y16 },
+ { QVideoFrameFormat::Format_NV12, V4L2_PIX_FMT_NV12 },
+ { QVideoFrameFormat::Format_NV21, V4L2_PIX_FMT_NV21 },
+ { QVideoFrameFormat::Format_Jpeg, V4L2_PIX_FMT_MJPEG },
+ { QVideoFrameFormat::Format_Jpeg, V4L2_PIX_FMT_JPEG },
+ { QVideoFrameFormat::Format_Invalid, 0 },
+};
+
+QVideoFrameFormat::PixelFormat formatForV4L2Format(uint32_t v4l2Format)
+{
+ auto *f = formatMap;
+ while (f->v4l2Format) {
+ if (f->v4l2Format == v4l2Format)
+ return f->fmt;
+ ++f;
+ }
+ return QVideoFrameFormat::Format_Invalid;
+}
+
+uint32_t v4l2FormatForPixelFormat(QVideoFrameFormat::PixelFormat format)
+{
+ auto *f = formatMap;
+ while (f->v4l2Format) {
+ if (f->fmt == format)
+ return f->v4l2Format;
+ ++f;
+ }
+ return 0;
+}
+
+QV4L2Camera::QV4L2Camera(QCamera *camera)
+ : QPlatformCamera(camera)
+{
+}
+
+QV4L2Camera::~QV4L2Camera()
+{
+ stopCapturing();
+ closeV4L2Fd();
+}
+
+bool QV4L2Camera::isActive() const
+{
+ return m_active;
+}
+
+void QV4L2Camera::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+ if (m_cameraDevice.isNull() && active)
+ return;
+
+ if (m_cameraFormat.isNull())
+ resolveCameraFormat({});
+
+ m_active = active;
+ if (m_active)
+ startCapturing();
+ else
+ stopCapturing();
+
+ emit newVideoFrame({});
+
+ emit activeChanged(active);
+}
+
+void QV4L2Camera::setCamera(const QCameraDevice &camera)
+{
+ if (m_cameraDevice == camera)
+ return;
+
+ stopCapturing();
+ closeV4L2Fd();
+
+ m_cameraDevice = camera;
+ resolveCameraFormat({});
+
+ initV4L2Controls();
+
+ if (m_active)
+ startCapturing();
+}
+
+bool QV4L2Camera::setCameraFormat(const QCameraFormat &format)
+{
+ if (!format.isNull() && !m_cameraDevice.videoFormats().contains(format))
+ return false;
+
+ if (!resolveCameraFormat(format))
+ return true;
+
+ if (m_active) {
+ stopCapturing();
+ closeV4L2Fd();
+
+ initV4L2Controls();
+ startCapturing();
+ }
+
+ return true;
+}
+
+bool QV4L2Camera::resolveCameraFormat(const QCameraFormat &format)
+{
+ auto fmt = format;
+ if (fmt.isNull())
+ fmt = findBestCameraFormat(m_cameraDevice);
+
+ if (fmt == m_cameraFormat)
+ return false;
+
+ m_cameraFormat = fmt;
+ return true;
+}
+
+void QV4L2Camera::setFocusMode(QCamera::FocusMode mode)
+{
+ if (mode == focusMode())
+ return;
+
+ bool focusDist = supportedFeatures() & QCamera::Feature::FocusDistance;
+ if (!focusDist && !m_v4l2Info.rangedFocus)
+ return;
+
+ switch (mode) {
+ default:
+ case QCamera::FocusModeAuto:
+ setV4L2Parameter(V4L2_CID_FOCUS_AUTO, 1);
+ if (m_v4l2Info.rangedFocus)
+ setV4L2Parameter(V4L2_CID_AUTO_FOCUS_RANGE, V4L2_AUTO_FOCUS_RANGE_AUTO);
+ break;
+ case QCamera::FocusModeAutoNear:
+ setV4L2Parameter(V4L2_CID_FOCUS_AUTO, 1);
+ if (m_v4l2Info.rangedFocus)
+ setV4L2Parameter(V4L2_CID_AUTO_FOCUS_RANGE, V4L2_AUTO_FOCUS_RANGE_MACRO);
+ else if (focusDist)
+ setV4L2Parameter(V4L2_CID_FOCUS_ABSOLUTE, m_v4l2Info.minFocus);
+ break;
+ case QCamera::FocusModeAutoFar:
+ setV4L2Parameter(V4L2_CID_FOCUS_AUTO, 1);
+ if (m_v4l2Info.rangedFocus)
+ setV4L2Parameter(V4L2_CID_AUTO_FOCUS_RANGE, V4L2_AUTO_FOCUS_RANGE_INFINITY);
+ break;
+ case QCamera::FocusModeInfinity:
+ setV4L2Parameter(V4L2_CID_FOCUS_AUTO, 0);
+ setV4L2Parameter(V4L2_CID_FOCUS_ABSOLUTE, m_v4l2Info.maxFocus);
+ break;
+ case QCamera::FocusModeManual:
+ setV4L2Parameter(V4L2_CID_FOCUS_AUTO, 0);
+ setFocusDistance(focusDistance());
+ break;
+ }
+ focusModeChanged(mode);
+}
+
+void QV4L2Camera::setFocusDistance(float d)
+{
+ int distance = m_v4l2Info.minFocus + int((m_v4l2Info.maxFocus - m_v4l2Info.minFocus) * d);
+ setV4L2Parameter(V4L2_CID_FOCUS_ABSOLUTE, distance);
+ focusDistanceChanged(d);
+}
+
+void QV4L2Camera::zoomTo(float factor, float)
+{
+ if (m_v4l2Info.maxZoom == m_v4l2Info.minZoom)
+ return;
+ factor = qBound(1., factor, 2.);
+ int zoom = m_v4l2Info.minZoom + (factor - 1.) * (m_v4l2Info.maxZoom - m_v4l2Info.minZoom);
+ setV4L2Parameter(V4L2_CID_ZOOM_ABSOLUTE, zoom);
+ zoomFactorChanged(factor);
+}
+
+bool QV4L2Camera::isFocusModeSupported(QCamera::FocusMode mode) const
+{
+ if (supportedFeatures() & QCamera::Feature::FocusDistance &&
+ (mode == QCamera::FocusModeManual || mode == QCamera::FocusModeAutoNear || mode == QCamera::FocusModeInfinity))
+ return true;
+
+ return mode == QCamera::FocusModeAuto;
+}
+
+void QV4L2Camera::setFlashMode(QCamera::FlashMode mode)
+{
+ if (!m_v4l2Info.flashSupported || mode == QCamera::FlashOn)
+ return;
+ setV4L2Parameter(V4L2_CID_FLASH_LED_MODE, mode == QCamera::FlashAuto ? V4L2_FLASH_LED_MODE_FLASH : V4L2_FLASH_LED_MODE_NONE);
+ flashModeChanged(mode);
+}
+
+bool QV4L2Camera::isFlashModeSupported(QCamera::FlashMode mode) const
+{
+ if (m_v4l2Info.flashSupported && mode == QCamera::FlashAuto)
+ return true;
+ return mode == QCamera::FlashOff;
+}
+
+bool QV4L2Camera::isFlashReady() const
+{
+ struct v4l2_queryctrl queryControl;
+ ::memset(&queryControl, 0, sizeof(queryControl));
+ queryControl.id = V4L2_CID_AUTO_WHITE_BALANCE;
+
+ return m_v4l2FileDescriptor && m_v4l2FileDescriptor->call(VIDIOC_QUERYCTRL, &queryControl);
+}
+
+void QV4L2Camera::setTorchMode(QCamera::TorchMode mode)
+{
+ if (!m_v4l2Info.torchSupported || mode == QCamera::TorchOn)
+ return;
+ setV4L2Parameter(V4L2_CID_FLASH_LED_MODE, mode == QCamera::TorchOn ? V4L2_FLASH_LED_MODE_TORCH : V4L2_FLASH_LED_MODE_NONE);
+ torchModeChanged(mode);
+}
+
+bool QV4L2Camera::isTorchModeSupported(QCamera::TorchMode mode) const
+{
+ if (mode == QCamera::TorchOn)
+ return m_v4l2Info.torchSupported;
+ return mode == QCamera::TorchOff;
+}
+
+void QV4L2Camera::setExposureMode(QCamera::ExposureMode mode)
+{
+ if (m_v4l2Info.autoExposureSupported && m_v4l2Info.manualExposureSupported) {
+ if (mode != QCamera::ExposureAuto && mode != QCamera::ExposureManual)
+ return;
+ int value = QCamera::ExposureAuto ? V4L2_EXPOSURE_AUTO : V4L2_EXPOSURE_MANUAL;
+ setV4L2Parameter(V4L2_CID_EXPOSURE_AUTO, value);
+ exposureModeChanged(mode);
+ return;
+ }
+}
+
+bool QV4L2Camera::isExposureModeSupported(QCamera::ExposureMode mode) const
+{
+ if (mode == QCamera::ExposureAuto)
+ return true;
+ if (m_v4l2Info.manualExposureSupported && m_v4l2Info.autoExposureSupported)
+ return mode == QCamera::ExposureManual;
+ return false;
+}
+
+void QV4L2Camera::setExposureCompensation(float compensation)
+{
+ if ((m_v4l2Info.minExposureAdjustment != 0 || m_v4l2Info.maxExposureAdjustment != 0)) {
+ int value = qBound(m_v4l2Info.minExposureAdjustment, (int)(compensation * 1000),
+ m_v4l2Info.maxExposureAdjustment);
+ setV4L2Parameter(V4L2_CID_AUTO_EXPOSURE_BIAS, value);
+ exposureCompensationChanged(value/1000.);
+ return;
+ }
+}
+
+void QV4L2Camera::setManualIsoSensitivity(int iso)
+{
+ if (!(supportedFeatures() & QCamera::Feature::IsoSensitivity))
+ return;
+ setV4L2Parameter(V4L2_CID_ISO_SENSITIVITY_AUTO, iso <= 0 ? V4L2_ISO_SENSITIVITY_AUTO : V4L2_ISO_SENSITIVITY_MANUAL);
+ if (iso > 0) {
+ iso = qBound(minIso(), iso, maxIso());
+ setV4L2Parameter(V4L2_CID_ISO_SENSITIVITY, iso);
+ }
+ return;
+}
+
+int QV4L2Camera::isoSensitivity() const
+{
+ if (!(supportedFeatures() & QCamera::Feature::IsoSensitivity))
+ return -1;
+ return getV4L2Parameter(V4L2_CID_ISO_SENSITIVITY);
+}
+
+void QV4L2Camera::setManualExposureTime(float secs)
+{
+ if (m_v4l2Info.manualExposureSupported && m_v4l2Info.autoExposureSupported) {
+ int exposure =
+ qBound(m_v4l2Info.minExposure, qRound(secs * 10000.), m_v4l2Info.maxExposure);
+ setV4L2Parameter(V4L2_CID_EXPOSURE_ABSOLUTE, exposure);
+ exposureTimeChanged(exposure/10000.);
+ return;
+ }
+}
+
+float QV4L2Camera::exposureTime() const
+{
+ return getV4L2Parameter(V4L2_CID_EXPOSURE_ABSOLUTE)/10000.;
+}
+
+bool QV4L2Camera::isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const
+{
+ if (m_v4l2Info.autoWhiteBalanceSupported && m_v4l2Info.colorTemperatureSupported)
+ return true;
+
+ return mode == QCamera::WhiteBalanceAuto;
+}
+
+void QV4L2Camera::setWhiteBalanceMode(QCamera::WhiteBalanceMode mode)
+{
+ Q_ASSERT(isWhiteBalanceModeSupported(mode));
+
+ int temperature = colorTemperatureForWhiteBalance(mode);
+ int t = setV4L2ColorTemperature(temperature);
+ if (t == 0)
+ mode = QCamera::WhiteBalanceAuto;
+ whiteBalanceModeChanged(mode);
+}
+
+void QV4L2Camera::setColorTemperature(int temperature)
+{
+ if (temperature == 0) {
+ setWhiteBalanceMode(QCamera::WhiteBalanceAuto);
+ return;
+ }
+
+ Q_ASSERT(isWhiteBalanceModeSupported(QCamera::WhiteBalanceManual));
+
+ int t = setV4L2ColorTemperature(temperature);
+ if (t)
+ colorTemperatureChanged(t);
+}
+
+void QV4L2Camera::readFrame()
+{
+ Q_ASSERT(m_memoryTransfer);
+
+ auto buffer = m_memoryTransfer->dequeueBuffer();
+ if (!buffer) {
+ qCWarning(qLcV4L2Camera) << "Cannot take buffer";
+
+ if (errno == ENODEV) {
+ // camera got removed while being active
+ stopCapturing();
+ closeV4L2Fd();
+ }
+
+ return;
+ }
+
+ auto videoBuffer = std::make_unique<QMemoryVideoBuffer>(buffer->data, m_bytesPerLine);
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(videoBuffer), frameFormat());
+
+ auto &v4l2Buffer = buffer->v4l2Buffer;
+
+ if (m_firstFrameTime.tv_sec == -1)
+ m_firstFrameTime = v4l2Buffer.timestamp;
+ qint64 secs = v4l2Buffer.timestamp.tv_sec - m_firstFrameTime.tv_sec;
+ qint64 usecs = v4l2Buffer.timestamp.tv_usec - m_firstFrameTime.tv_usec;
+ frame.setStartTime(secs*1000000 + usecs);
+ frame.setEndTime(frame.startTime() + m_frameDuration);
+
+ emit newVideoFrame(frame);
+
+ if (!m_memoryTransfer->enqueueBuffer(v4l2Buffer.index))
+ qCWarning(qLcV4L2Camera) << "Cannot add buffer";
+}
+
+void QV4L2Camera::setCameraBusy()
+{
+ m_cameraBusy = true;
+ updateError(QCamera::CameraError, QLatin1String("Camera is in use"));
+}
+
+void QV4L2Camera::initV4L2Controls()
+{
+ m_v4l2Info = {};
+ QCamera::Features features;
+
+ const QByteArray deviceName = m_cameraDevice.id();
+ Q_ASSERT(!deviceName.isEmpty());
+
+ closeV4L2Fd();
+
+ const int descriptor = qt_safe_open(deviceName.constData(), O_RDWR);
+ if (descriptor == -1) {
+ qCWarning(qLcV4L2Camera) << "Unable to open the camera" << deviceName
+ << "for read to query the parameter info:"
+ << qt_error_string(errno);
+ updateError(QCamera::CameraError, QLatin1String("Cannot open camera"));
+ return;
+ }
+
+ m_v4l2FileDescriptor = std::make_shared<QV4L2FileDescriptor>(descriptor);
+
+ qCDebug(qLcV4L2Camera) << "FD=" << descriptor;
+
+ struct v4l2_queryctrl queryControl;
+ ::memset(&queryControl, 0, sizeof(queryControl));
+ queryControl.id = V4L2_CID_AUTO_WHITE_BALANCE;
+
+ if (m_v4l2FileDescriptor->call(VIDIOC_QUERYCTRL, &queryControl)) {
+ m_v4l2Info.autoWhiteBalanceSupported = true;
+ setV4L2Parameter(V4L2_CID_AUTO_WHITE_BALANCE, true);
+ }
+
+ ::memset(&queryControl, 0, sizeof(queryControl));
+ queryControl.id = V4L2_CID_WHITE_BALANCE_TEMPERATURE;
+ if (m_v4l2FileDescriptor->call(VIDIOC_QUERYCTRL, &queryControl)) {
+ m_v4l2Info.minColorTemp = queryControl.minimum;
+ m_v4l2Info.maxColorTemp = queryControl.maximum;
+ m_v4l2Info.colorTemperatureSupported = true;
+ features |= QCamera::Feature::ColorTemperature;
+ }
+
+ ::memset(&queryControl, 0, sizeof(queryControl));
+ queryControl.id = V4L2_CID_EXPOSURE_AUTO;
+ if (m_v4l2FileDescriptor->call(VIDIOC_QUERYCTRL, &queryControl)) {
+ m_v4l2Info.autoExposureSupported = true;
+ }
+
+ ::memset(&queryControl, 0, sizeof(queryControl));
+ queryControl.id = V4L2_CID_EXPOSURE_ABSOLUTE;
+ if (m_v4l2FileDescriptor->call(VIDIOC_QUERYCTRL, &queryControl)) {
+ m_v4l2Info.manualExposureSupported = true;
+ m_v4l2Info.minExposure = queryControl.minimum;
+ m_v4l2Info.maxExposure = queryControl.maximum;
+ features |= QCamera::Feature::ManualExposureTime;
+ }
+
+ ::memset(&queryControl, 0, sizeof(queryControl));
+ queryControl.id = V4L2_CID_AUTO_EXPOSURE_BIAS;
+ if (m_v4l2FileDescriptor->call(VIDIOC_QUERYCTRL, &queryControl)) {
+ m_v4l2Info.minExposureAdjustment = queryControl.minimum;
+ m_v4l2Info.maxExposureAdjustment = queryControl.maximum;
+ features |= QCamera::Feature::ExposureCompensation;
+ }
+
+ ::memset(&queryControl, 0, sizeof(queryControl));
+ queryControl.id = V4L2_CID_ISO_SENSITIVITY_AUTO;
+ if (m_v4l2FileDescriptor->call(VIDIOC_QUERYCTRL, &queryControl)) {
+ queryControl.id = V4L2_CID_ISO_SENSITIVITY;
+ if (m_v4l2FileDescriptor->call(VIDIOC_QUERYCTRL, &queryControl)) {
+ features |= QCamera::Feature::IsoSensitivity;
+ minIsoChanged(queryControl.minimum);
+ maxIsoChanged(queryControl.minimum);
+ }
+ }
+
+ ::memset(&queryControl, 0, sizeof(queryControl));
+ queryControl.id = V4L2_CID_FOCUS_ABSOLUTE;
+ if (m_v4l2FileDescriptor->call(VIDIOC_QUERYCTRL, &queryControl)) {
+ m_v4l2Info.minExposureAdjustment = queryControl.minimum;
+ m_v4l2Info.maxExposureAdjustment = queryControl.maximum;
+ features |= QCamera::Feature::FocusDistance;
+ }
+
+ ::memset(&queryControl, 0, sizeof(queryControl));
+ queryControl.id = V4L2_CID_AUTO_FOCUS_RANGE;
+ if (m_v4l2FileDescriptor->call(VIDIOC_QUERYCTRL, &queryControl)) {
+ m_v4l2Info.rangedFocus = true;
+ }
+
+ ::memset(&queryControl, 0, sizeof(queryControl));
+ queryControl.id = V4L2_CID_FLASH_LED_MODE;
+ if (m_v4l2FileDescriptor->call(VIDIOC_QUERYCTRL, &queryControl)) {
+ m_v4l2Info.flashSupported = queryControl.minimum <= V4L2_FLASH_LED_MODE_FLASH
+ && queryControl.maximum >= V4L2_FLASH_LED_MODE_FLASH;
+ m_v4l2Info.torchSupported = queryControl.minimum <= V4L2_FLASH_LED_MODE_TORCH
+ && queryControl.maximum >= V4L2_FLASH_LED_MODE_TORCH;
+ }
+
+ ::memset(&queryControl, 0, sizeof(queryControl));
+ queryControl.id = V4L2_CID_ZOOM_ABSOLUTE;
+ if (m_v4l2FileDescriptor->call(VIDIOC_QUERYCTRL, &queryControl)) {
+ m_v4l2Info.minZoom = queryControl.minimum;
+ m_v4l2Info.maxZoom = queryControl.maximum;
+ }
+ // zoom factors are in arbitrary units, so we simply normalize them to go from 1 to 2
+ // if they are different
+ minimumZoomFactorChanged(1);
+ maximumZoomFactorChanged(m_v4l2Info.minZoom != m_v4l2Info.maxZoom ? 2 : 1);
+
+ supportedFeaturesChanged(features);
+}
+
+void QV4L2Camera::closeV4L2Fd()
+{
+ Q_ASSERT(!m_memoryTransfer);
+
+ m_v4l2Info = {};
+ m_cameraBusy = false;
+ m_v4l2FileDescriptor = nullptr;
+}
+
+int QV4L2Camera::setV4L2ColorTemperature(int temperature)
+{
+ struct v4l2_control control;
+ ::memset(&control, 0, sizeof(control));
+
+ if (m_v4l2Info.autoWhiteBalanceSupported) {
+ setV4L2Parameter(V4L2_CID_AUTO_WHITE_BALANCE, temperature == 0 ? true : false);
+ } else if (temperature == 0) {
+ temperature = 5600;
+ }
+
+ if (temperature != 0 && m_v4l2Info.colorTemperatureSupported) {
+ temperature = qBound(m_v4l2Info.minColorTemp, temperature, m_v4l2Info.maxColorTemp);
+ if (!setV4L2Parameter(
+ V4L2_CID_WHITE_BALANCE_TEMPERATURE,
+ qBound(m_v4l2Info.minColorTemp, temperature, m_v4l2Info.maxColorTemp)))
+ temperature = 0;
+ } else {
+ temperature = 0;
+ }
+
+ return temperature;
+}
+
+bool QV4L2Camera::setV4L2Parameter(quint32 id, qint32 value)
+{
+ v4l2_control control{ id, value };
+ if (!m_v4l2FileDescriptor->call(VIDIOC_S_CTRL, &control)) {
+ qWarning() << "Unable to set the V4L2 Parameter" << Qt::hex << id << "to" << value << qt_error_string(errno);
+ return false;
+ }
+ return true;
+}
+
+int QV4L2Camera::getV4L2Parameter(quint32 id) const
+{
+ struct v4l2_control control{id, 0};
+ if (!m_v4l2FileDescriptor->call(VIDIOC_G_CTRL, &control)) {
+ qWarning() << "Unable to get the V4L2 Parameter" << Qt::hex << id << qt_error_string(errno);
+ return 0;
+ }
+ return control.value;
+}
+
+void QV4L2Camera::setV4L2CameraFormat()
+{
+ if (m_v4l2Info.formatInitialized || !m_v4l2FileDescriptor)
+ return;
+
+ Q_ASSERT(!m_cameraFormat.isNull());
+ qCDebug(qLcV4L2Camera) << "XXXXX" << this << m_cameraDevice.id() << m_cameraFormat.pixelFormat()
+ << m_cameraFormat.resolution();
+
+ v4l2_format fmt = {};
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ auto size = m_cameraFormat.resolution();
+ fmt.fmt.pix.width = size.width();
+ fmt.fmt.pix.height = size.height();
+ fmt.fmt.pix.pixelformat = v4l2FormatForPixelFormat(m_cameraFormat.pixelFormat());
+ fmt.fmt.pix.field = V4L2_FIELD_ANY;
+
+ qCDebug(qLcV4L2Camera) << "setting camera format to" << size << fmt.fmt.pix.pixelformat;
+
+ if (!m_v4l2FileDescriptor->call(VIDIOC_S_FMT, &fmt)) {
+ if (errno == EBUSY) {
+ setCameraBusy();
+ return;
+ }
+ qWarning() << "Couldn't set video format on v4l2 camera" << strerror(errno);
+ }
+
+ m_v4l2Info.formatInitialized = true;
+ m_cameraBusy = false;
+
+ m_bytesPerLine = fmt.fmt.pix.bytesperline;
+ m_imageSize = std::max(fmt.fmt.pix.sizeimage, m_bytesPerLine * fmt.fmt.pix.height);
+
+ switch (v4l2_colorspace(fmt.fmt.pix.colorspace)) {
+ default:
+ case V4L2_COLORSPACE_DCI_P3:
+ m_colorSpace = QVideoFrameFormat::ColorSpace_Undefined;
+ break;
+ case V4L2_COLORSPACE_REC709:
+ m_colorSpace = QVideoFrameFormat::ColorSpace_BT709;
+ break;
+ case V4L2_COLORSPACE_JPEG:
+ m_colorSpace = QVideoFrameFormat::ColorSpace_AdobeRgb;
+ break;
+ case V4L2_COLORSPACE_SRGB:
+ // ##### is this correct???
+ m_colorSpace = QVideoFrameFormat::ColorSpace_BT601;
+ break;
+ case V4L2_COLORSPACE_BT2020:
+ m_colorSpace = QVideoFrameFormat::ColorSpace_BT2020;
+ break;
+ }
+
+ v4l2_streamparm streamParam = {};
+ streamParam.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ streamParam.parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
+ auto [num, den] = qRealToFraction(1./m_cameraFormat.maxFrameRate());
+ streamParam.parm.capture.timeperframe = { (uint)num, (uint)den };
+ m_v4l2FileDescriptor->call(VIDIOC_S_PARM, &streamParam);
+
+ m_frameDuration = 1000000 * streamParam.parm.capture.timeperframe.numerator
+ / streamParam.parm.capture.timeperframe.denominator;
+}
+
+void QV4L2Camera::initV4L2MemoryTransfer()
+{
+ if (m_cameraBusy)
+ return;
+
+ Q_ASSERT(!m_memoryTransfer);
+
+ m_memoryTransfer = makeUserPtrMemoryTransfer(m_v4l2FileDescriptor, m_imageSize);
+
+ if (m_memoryTransfer)
+ return;
+
+ if (errno == EBUSY) {
+ setCameraBusy();
+ return;
+ }
+
+ qCDebug(qLcV4L2Camera) << "Cannot init V4L2_MEMORY_USERPTR; trying V4L2_MEMORY_MMAP";
+
+ m_memoryTransfer = makeMMapMemoryTransfer(m_v4l2FileDescriptor);
+
+ if (!m_memoryTransfer) {
+ qCWarning(qLcV4L2Camera) << "Cannot init v4l2 memory transfer," << qt_error_string(errno);
+ updateError(QCamera::CameraError, QLatin1String("Cannot init V4L2 memory transfer"));
+ }
+}
+
+void QV4L2Camera::stopCapturing()
+{
+ if (!m_memoryTransfer || !m_v4l2FileDescriptor)
+ return;
+
+ m_notifier = nullptr;
+
+ if (!m_v4l2FileDescriptor->stopStream()) {
+ // TODO: handle the case carefully to avoid possible memory corruption
+ if (errno != ENODEV)
+ qWarning() << "failed to stop capture";
+ }
+
+ m_memoryTransfer = nullptr;
+ m_cameraBusy = false;
+}
+
+void QV4L2Camera::startCapturing()
+{
+ if (!m_v4l2FileDescriptor)
+ return;
+
+ setV4L2CameraFormat();
+ initV4L2MemoryTransfer();
+
+ if (m_cameraBusy || !m_memoryTransfer)
+ return;
+
+ if (!m_v4l2FileDescriptor->startStream()) {
+ qWarning() << "Couldn't start v4l2 camera stream";
+ return;
+ }
+
+ m_notifier =
+ std::make_unique<QSocketNotifier>(m_v4l2FileDescriptor->get(), QSocketNotifier::Read);
+ connect(m_notifier.get(), &QSocketNotifier::activated, this, &QV4L2Camera::readFrame);
+
+ m_firstFrameTime = { -1, -1 };
+}
+
+QVideoFrameFormat QV4L2Camera::frameFormat() const
+{
+ auto result = QPlatformCamera::frameFormat();
+ result.setColorSpace(m_colorSpace);
+ return result;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qv4l2camera_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/qv4l2camera_p.h b/src/plugins/multimedia/ffmpeg/qv4l2camera_p.h
new file mode 100644
index 000000000..3033f5ff9
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qv4l2camera_p.h
@@ -0,0 +1,133 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QV4L2CAMERA_H
+#define QV4L2CAMERA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformcamera_p.h>
+#include <sys/time.h>
+
+QT_BEGIN_NAMESPACE
+
+class QV4L2FileDescriptor;
+class QV4L2MemoryTransfer;
+class QSocketNotifier;
+
+struct V4L2CameraInfo
+{
+ bool formatInitialized = false;
+
+ bool autoWhiteBalanceSupported = false;
+ bool colorTemperatureSupported = false;
+ bool autoExposureSupported = false;
+ bool manualExposureSupported = false;
+ bool flashSupported = false;
+ bool torchSupported = false;
+ qint32 minColorTemp = 5600; // Daylight...
+ qint32 maxColorTemp = 5600;
+ qint32 minExposure = 0;
+ qint32 maxExposure = 0;
+ qint32 minExposureAdjustment = 0;
+ qint32 maxExposureAdjustment = 0;
+ qint32 minFocus = 0;
+ qint32 maxFocus = 0;
+ qint32 rangedFocus = false;
+
+ int minZoom = 0;
+ int maxZoom = 0;
+};
+
+QVideoFrameFormat::PixelFormat formatForV4L2Format(uint32_t v4l2Format);
+uint32_t v4l2FormatForPixelFormat(QVideoFrameFormat::PixelFormat format);
+
+class QV4L2Camera : public QPlatformCamera
+{
+ Q_OBJECT
+
+public:
+ explicit QV4L2Camera(QCamera *parent);
+ ~QV4L2Camera();
+
+ bool isActive() const override;
+ void setActive(bool active) override;
+
+ void setCamera(const QCameraDevice &camera) override;
+ bool setCameraFormat(const QCameraFormat &format) override;
+ bool resolveCameraFormat(const QCameraFormat &format);
+
+ bool isFocusModeSupported(QCamera::FocusMode mode) const override;
+ void setFocusMode(QCamera::FocusMode /*mode*/) override;
+
+// void setCustomFocusPoint(const QPointF &/*point*/) override;
+ void setFocusDistance(float) override;
+ void zoomTo(float /*newZoomFactor*/, float /*rate*/ = -1.) override;
+
+ void setFlashMode(QCamera::FlashMode /*mode*/) override;
+ bool isFlashModeSupported(QCamera::FlashMode mode) const override;
+ bool isFlashReady() const override;
+
+ void setTorchMode(QCamera::TorchMode /*mode*/) override;
+ bool isTorchModeSupported(QCamera::TorchMode mode) const override;
+
+ void setExposureMode(QCamera::ExposureMode) override;
+ bool isExposureModeSupported(QCamera::ExposureMode mode) const override;
+ void setExposureCompensation(float) override;
+ int isoSensitivity() const override;
+ void setManualIsoSensitivity(int) override;
+ void setManualExposureTime(float) override;
+ float exposureTime() const override;
+
+ bool isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const override;
+ void setWhiteBalanceMode(QCamera::WhiteBalanceMode /*mode*/) override;
+ void setColorTemperature(int /*temperature*/) override;
+
+ QVideoFrameFormat frameFormat() const override;
+
+private Q_SLOTS:
+ void readFrame();
+
+private:
+ void setCameraBusy();
+ void initV4L2Controls();
+ void closeV4L2Fd();
+ int setV4L2ColorTemperature(int temperature);
+ bool setV4L2Parameter(quint32 id, qint32 value);
+ int getV4L2Parameter(quint32 id) const;
+
+ void setV4L2CameraFormat();
+ void initV4L2MemoryTransfer();
+ void startCapturing();
+ void stopCapturing();
+
+private:
+ bool m_active = false;
+ QCameraDevice m_cameraDevice;
+
+ std::unique_ptr<QSocketNotifier> m_notifier;
+ std::unique_ptr<QV4L2MemoryTransfer> m_memoryTransfer;
+ std::shared_ptr<QV4L2FileDescriptor> m_v4l2FileDescriptor;
+
+ V4L2CameraInfo m_v4l2Info;
+
+ timeval m_firstFrameTime = { -1, -1 };
+ quint32 m_bytesPerLine = 0;
+ quint32 m_imageSize = 0;
+ QVideoFrameFormat::ColorSpace m_colorSpace = QVideoFrameFormat::ColorSpace_Undefined;
+ qint64 m_frameDuration = -1;
+ bool m_cameraBusy = false;
+};
+
+QT_END_NAMESPACE
+
+#endif // QV4L2CAMERA_H
diff --git a/src/plugins/multimedia/ffmpeg/qv4l2cameradevices.cpp b/src/plugins/multimedia/ffmpeg/qv4l2cameradevices.cpp
new file mode 100644
index 000000000..e450cf7bc
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qv4l2cameradevices.cpp
@@ -0,0 +1,182 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qv4l2cameradevices_p.h"
+#include "qv4l2filedescriptor_p.h"
+#include "qv4l2camera_p.h"
+
+#include <private/qcameradevice_p.h>
+#include <private/qcore_unix_p.h>
+
+#include <qdir.h>
+#include <qfile.h>
+#include <qdebug.h>
+#include <qloggingcategory.h>
+
+#include <linux/videodev2.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcV4L2CameraDevices, "qt.multimedia.ffmpeg.v4l2cameradevices");
+
+static bool areCamerasEqual(QList<QCameraDevice> a, QList<QCameraDevice> b)
+{
+ auto areCamerasDataEqual = [](const QCameraDevice &a, const QCameraDevice &b) {
+ Q_ASSERT(QCameraDevicePrivate::handle(a));
+ Q_ASSERT(QCameraDevicePrivate::handle(b));
+ return *QCameraDevicePrivate::handle(a) == *QCameraDevicePrivate::handle(b);
+ };
+
+ return std::equal(a.cbegin(), a.cend(), b.cbegin(), b.cend(), areCamerasDataEqual);
+}
+
+QV4L2CameraDevices::QV4L2CameraDevices(QPlatformMediaIntegration *integration)
+ : QPlatformVideoDevices(integration)
+{
+ m_deviceWatcher.addPath(QLatin1String("/dev"));
+ connect(&m_deviceWatcher, &QFileSystemWatcher::directoryChanged, this,
+ &QV4L2CameraDevices::checkCameras);
+ doCheckCameras();
+}
+
+QList<QCameraDevice> QV4L2CameraDevices::videoDevices() const
+{
+ return m_cameras;
+}
+
+void QV4L2CameraDevices::checkCameras()
+{
+ if (doCheckCameras())
+ emit videoInputsChanged();
+}
+
+bool QV4L2CameraDevices::doCheckCameras()
+{
+ QList<QCameraDevice> newCameras;
+
+ QDir dir(QLatin1String("/dev"));
+ const auto devices = dir.entryList(QDir::System);
+
+ bool first = true;
+
+ for (auto device : devices) {
+ // qCDebug(qLcV4L2Camera) << "device:" << device;
+ if (!device.startsWith(QLatin1String("video")))
+ continue;
+
+ QByteArray file = QFile::encodeName(dir.filePath(device));
+ const int fd = open(file.constData(), O_RDONLY);
+ if (fd < 0)
+ continue;
+
+ auto fileCloseGuard = qScopeGuard([fd]() { close(fd); });
+
+ v4l2_fmtdesc formatDesc = {};
+
+ struct v4l2_capability cap;
+ if (xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0)
+ continue;
+
+ if (cap.device_caps & V4L2_CAP_META_CAPTURE)
+ continue;
+ if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
+ continue;
+ if (!(cap.capabilities & V4L2_CAP_STREAMING))
+ continue;
+
+ auto camera = std::make_unique<QCameraDevicePrivate>();
+
+ camera->id = file;
+ camera->description = QString::fromUtf8((const char *)cap.card);
+ qCDebug(qLcV4L2CameraDevices) << "found camera" << camera->id << camera->description;
+
+ formatDesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ while (!xioctl(fd, VIDIOC_ENUM_FMT, &formatDesc)) {
+ auto pixelFmt = formatForV4L2Format(formatDesc.pixelformat);
+ qCDebug(qLcV4L2CameraDevices) << " " << pixelFmt;
+
+ if (pixelFmt == QVideoFrameFormat::Format_Invalid) {
+ ++formatDesc.index;
+ continue;
+ }
+
+ qCDebug(qLcV4L2CameraDevices) << "frame sizes:";
+ v4l2_frmsizeenum frameSize = {};
+ frameSize.pixel_format = formatDesc.pixelformat;
+
+ while (!xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frameSize)) {
+ QList<QSize> resolutions;
+ if (frameSize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
+ resolutions.append(QSize(frameSize.discrete.width,
+ frameSize.discrete.height));
+ } else {
+ resolutions.append(QSize(frameSize.stepwise.max_width,
+ frameSize.stepwise.max_height));
+ resolutions.append(QSize(frameSize.stepwise.min_width,
+ frameSize.stepwise.min_height));
+ }
+
+ for (auto resolution : resolutions) {
+ float min = 1e10;
+ float max = 0;
+ auto updateMaxMinFrameRate = [&max, &min](auto discreteFrameRate) {
+ const float rate = float(discreteFrameRate.denominator)
+ / float(discreteFrameRate.numerator);
+ if (rate > max)
+ max = rate;
+ if (rate < min)
+ min = rate;
+ };
+
+ v4l2_frmivalenum frameInterval = {};
+ frameInterval.pixel_format = formatDesc.pixelformat;
+ frameInterval.width = resolution.width();
+ frameInterval.height = resolution.height();
+
+ while (!xioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frameInterval)) {
+ if (frameInterval.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
+ updateMaxMinFrameRate(frameInterval.discrete);
+ } else {
+ updateMaxMinFrameRate(frameInterval.stepwise.max);
+ updateMaxMinFrameRate(frameInterval.stepwise.min);
+ }
+ ++frameInterval.index;
+ }
+
+ qCDebug(qLcV4L2CameraDevices) << " " << resolution << min << max;
+
+ if (min <= max) {
+ auto fmt = std::make_unique<QCameraFormatPrivate>();
+ fmt->pixelFormat = pixelFmt;
+ fmt->resolution = resolution;
+ fmt->minFrameRate = min;
+ fmt->maxFrameRate = max;
+ camera->videoFormats.append(fmt.release()->create());
+ camera->photoResolutions.append(resolution);
+ }
+ }
+ ++frameSize.index;
+ }
+ ++formatDesc.index;
+ }
+
+ if (camera->videoFormats.empty())
+ continue;
+
+ // first camera is default
+ camera->isDefault = std::exchange(first, false);
+
+ newCameras.append(camera.release()->create());
+ }
+
+ if (areCamerasEqual(m_cameras, newCameras))
+ return false;
+
+ m_cameras = std::move(newCameras);
+ return true;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qv4l2cameradevices_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/qv4l2cameradevices_p.h b/src/plugins/multimedia/ffmpeg/qv4l2cameradevices_p.h
new file mode 100644
index 000000000..ce424d3b6
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qv4l2cameradevices_p.h
@@ -0,0 +1,46 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QV4L2CAMERADEVICES_P_H
+#define QV4L2CAMERADEVICES_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformvideodevices_p.h>
+#include <private/qplatformmediaintegration_p.h>
+
+#include <qfilesystemwatcher.h>
+
+QT_BEGIN_NAMESPACE
+
+class QV4L2CameraDevices : public QPlatformVideoDevices
+{
+ Q_OBJECT
+public:
+ QV4L2CameraDevices(QPlatformMediaIntegration *integration);
+
+ QList<QCameraDevice> videoDevices() const override;
+
+public Q_SLOTS:
+ void checkCameras();
+
+private:
+ bool doCheckCameras();
+
+private:
+ QList<QCameraDevice> m_cameras;
+ QFileSystemWatcher m_deviceWatcher;
+};
+
+QT_END_NAMESPACE
+
+#endif // QV4L2CAMERADEVICES_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qv4l2filedescriptor.cpp b/src/plugins/multimedia/ffmpeg/qv4l2filedescriptor.cpp
new file mode 100644
index 000000000..7f7b099c7
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qv4l2filedescriptor.cpp
@@ -0,0 +1,71 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qv4l2filedescriptor_p.h"
+
+#include <sys/ioctl.h>
+#include <private/qcore_unix_p.h>
+
+#include <linux/videodev2.h>
+
+QT_BEGIN_NAMESPACE
+
+int xioctl(int fd, int request, void *arg)
+{
+ int res;
+
+ do {
+ res = ::ioctl(fd, request, arg);
+ } while (res == -1 && EINTR == errno);
+
+ return res;
+}
+
+QV4L2FileDescriptor::QV4L2FileDescriptor(int descriptor) : m_descriptor(descriptor)
+{
+ Q_ASSERT(descriptor >= 0);
+}
+
+QV4L2FileDescriptor::~QV4L2FileDescriptor()
+{
+ qt_safe_close(m_descriptor);
+}
+
+bool QV4L2FileDescriptor::call(int request, void *arg) const
+{
+ return ::xioctl(m_descriptor, request, arg) >= 0;
+}
+
+bool QV4L2FileDescriptor::requestBuffers(quint32 memoryType, quint32 &buffersCount) const
+{
+ v4l2_requestbuffers req = {};
+ req.count = buffersCount;
+ req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ req.memory = memoryType;
+
+ if (!call(VIDIOC_REQBUFS, &req))
+ return false;
+
+ buffersCount = req.count;
+ return true;
+}
+
+bool QV4L2FileDescriptor::startStream()
+{
+ int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (!call(VIDIOC_STREAMON, &type))
+ return false;
+
+ m_streamStarted = true;
+ return true;
+}
+
+bool QV4L2FileDescriptor::stopStream()
+{
+ int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ auto result = call(VIDIOC_STREAMOFF, &type);
+ m_streamStarted = false;
+ return result;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qv4l2filedescriptor_p.h b/src/plugins/multimedia/ffmpeg/qv4l2filedescriptor_p.h
new file mode 100644
index 000000000..1058c7a82
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qv4l2filedescriptor_p.h
@@ -0,0 +1,50 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QV4L2FILEDESCRIPTOR_P_H
+#define QV4L2FILEDESCRIPTOR_P_H
+
+#include <private/qtmultimediaglobal_p.h>
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+QT_BEGIN_NAMESPACE
+
+int xioctl(int fd, int request, void *arg);
+
+class QV4L2FileDescriptor
+{
+public:
+ QV4L2FileDescriptor(int descriptor);
+
+ ~QV4L2FileDescriptor();
+
+ bool call(int request, void *arg) const;
+
+ int get() const { return m_descriptor; }
+
+ bool requestBuffers(quint32 memoryType, quint32 &buffersCount) const;
+
+ bool startStream();
+
+ bool stopStream();
+
+ bool streamStarted() const { return m_streamStarted; }
+
+private:
+ int m_descriptor;
+ bool m_streamStarted = false;
+};
+
+QT_END_NAMESPACE
+
+#endif // QV4L2FILEDESCRIPTOR_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qv4l2memorytransfer.cpp b/src/plugins/multimedia/ffmpeg/qv4l2memorytransfer.cpp
new file mode 100644
index 000000000..32ee4f8f8
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qv4l2memorytransfer.cpp
@@ -0,0 +1,223 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qv4l2memorytransfer_p.h"
+#include "qv4l2filedescriptor_p.h"
+
+#include <qloggingcategory.h>
+#include <qdebug.h>
+#include <sys/mman.h>
+#include <optional>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcV4L2MemoryTransfer, "qt.multimedia.ffmpeg.v4l2camera.memorytransfer");
+
+namespace {
+
+v4l2_buffer makeV4l2Buffer(quint32 memoryType, quint32 index = 0)
+{
+ v4l2_buffer buf = {};
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = memoryType;
+ buf.index = index;
+ return buf;
+}
+
+class UserPtrMemoryTransfer : public QV4L2MemoryTransfer
+{
+public:
+ static QV4L2MemoryTransferUPtr create(QV4L2FileDescriptorPtr fileDescriptor, quint32 imageSize)
+ {
+ quint32 buffersCount = 2;
+ if (!fileDescriptor->requestBuffers(V4L2_MEMORY_USERPTR, buffersCount)) {
+ qCWarning(qLcV4L2MemoryTransfer) << "Cannot request V4L2_MEMORY_USERPTR buffers";
+ return {};
+ }
+
+ std::unique_ptr<UserPtrMemoryTransfer> result(
+ new UserPtrMemoryTransfer(std::move(fileDescriptor), buffersCount, imageSize));
+
+ return result->enqueueBuffers() ? std::move(result) : nullptr;
+ }
+
+ std::optional<Buffer> dequeueBuffer() override
+ {
+ auto v4l2Buffer = makeV4l2Buffer(V4L2_MEMORY_USERPTR);
+ if (!fileDescriptor().call(VIDIOC_DQBUF, &v4l2Buffer))
+ return {};
+
+ Q_ASSERT(v4l2Buffer.index < m_byteArrays.size());
+ Q_ASSERT(!m_byteArrays[v4l2Buffer.index].isEmpty());
+
+ return Buffer{ v4l2Buffer, std::move(m_byteArrays[v4l2Buffer.index]) };
+ }
+
+ bool enqueueBuffer(quint32 index) override
+ {
+ Q_ASSERT(index < m_byteArrays.size());
+ Q_ASSERT(m_byteArrays[index].isEmpty());
+
+ auto buf = makeV4l2Buffer(V4L2_MEMORY_USERPTR, index);
+ static_assert(sizeof(decltype(buf.m.userptr)) == sizeof(size_t), "Not compatible sizes");
+
+ m_byteArrays[index] = QByteArray(static_cast<int>(m_imageSize), Qt::Uninitialized);
+
+ buf.m.userptr = (decltype(buf.m.userptr))m_byteArrays[index].data();
+ buf.length = m_byteArrays[index].size();
+
+ if (!fileDescriptor().call(VIDIOC_QBUF, &buf)) {
+ qWarning() << "Couldn't add V4L2 buffer" << errno << strerror(errno) << index;
+ return false;
+ }
+
+ return true;
+ }
+
+ quint32 buffersCount() const override { return static_cast<quint32>(m_byteArrays.size()); }
+
+private:
+ UserPtrMemoryTransfer(QV4L2FileDescriptorPtr fileDescriptor, quint32 buffersCount,
+ quint32 imageSize)
+ : QV4L2MemoryTransfer(std::move(fileDescriptor)),
+ m_imageSize(imageSize),
+ m_byteArrays(buffersCount)
+ {
+ }
+
+private:
+ quint32 m_imageSize;
+ std::vector<QByteArray> m_byteArrays;
+};
+
+class MMapMemoryTransfer : public QV4L2MemoryTransfer
+{
+public:
+ struct MemorySpan
+ {
+ void *data = nullptr;
+ size_t size = 0;
+ bool inQueue = false;
+ };
+
+ static QV4L2MemoryTransferUPtr create(QV4L2FileDescriptorPtr fileDescriptor)
+ {
+ quint32 buffersCount = 2;
+ if (!fileDescriptor->requestBuffers(V4L2_MEMORY_MMAP, buffersCount)) {
+ qCWarning(qLcV4L2MemoryTransfer) << "Cannot request V4L2_MEMORY_MMAP buffers";
+ return {};
+ }
+
+ std::unique_ptr<MMapMemoryTransfer> result(
+ new MMapMemoryTransfer(std::move(fileDescriptor)));
+
+ return result->init(buffersCount) ? std::move(result) : nullptr;
+ }
+
+ bool init(quint32 buffersCount)
+ {
+ for (quint32 index = 0; index < buffersCount; ++index) {
+ auto buf = makeV4l2Buffer(V4L2_MEMORY_MMAP, index);
+
+ if (!fileDescriptor().call(VIDIOC_QUERYBUF, &buf)) {
+ qWarning() << "Can't map buffer" << index;
+ return false;
+ }
+
+ auto mappedData = mmap(nullptr, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED,
+ fileDescriptor().get(), buf.m.offset);
+
+ if (mappedData == MAP_FAILED) {
+ qWarning() << "mmap failed" << index << buf.length << buf.m.offset;
+ return false;
+ }
+
+ m_spans.push_back(MemorySpan{ mappedData, buf.length, false });
+ }
+
+ m_spans.shrink_to_fit();
+
+ return enqueueBuffers();
+ }
+
+ ~MMapMemoryTransfer() override
+ {
+ for (const auto &span : m_spans)
+ munmap(span.data, span.size);
+ }
+
+ std::optional<Buffer> dequeueBuffer() override
+ {
+ auto v4l2Buffer = makeV4l2Buffer(V4L2_MEMORY_MMAP);
+ if (!fileDescriptor().call(VIDIOC_DQBUF, &v4l2Buffer))
+ return {};
+
+ const auto index = v4l2Buffer.index;
+
+ Q_ASSERT(index < m_spans.size());
+
+ auto &span = m_spans[index];
+
+ Q_ASSERT(span.inQueue);
+ span.inQueue = false;
+
+ return Buffer{ v4l2Buffer,
+ QByteArray(reinterpret_cast<const char *>(span.data), span.size) };
+ }
+
+ bool enqueueBuffer(quint32 index) override
+ {
+ Q_ASSERT(index < m_spans.size());
+ Q_ASSERT(!m_spans[index].inQueue);
+
+ auto buf = makeV4l2Buffer(V4L2_MEMORY_MMAP, index);
+ if (!fileDescriptor().call(VIDIOC_QBUF, &buf))
+ return false;
+
+ m_spans[index].inQueue = true;
+ return true;
+ }
+
+ quint32 buffersCount() const override { return static_cast<quint32>(m_spans.size()); }
+
+private:
+ using QV4L2MemoryTransfer::QV4L2MemoryTransfer;
+
+private:
+ std::vector<MemorySpan> m_spans;
+};
+} // namespace
+
+QV4L2MemoryTransfer::QV4L2MemoryTransfer(QV4L2FileDescriptorPtr fileDescriptor)
+ : m_fileDescriptor(std::move(fileDescriptor))
+{
+ Q_ASSERT(m_fileDescriptor);
+ Q_ASSERT(!m_fileDescriptor->streamStarted());
+}
+
+QV4L2MemoryTransfer::~QV4L2MemoryTransfer()
+{
+ Q_ASSERT(!m_fileDescriptor->streamStarted()); // to avoid possible corruptions
+}
+
+bool QV4L2MemoryTransfer::enqueueBuffers()
+{
+ for (quint32 i = 0; i < buffersCount(); ++i)
+ if (!enqueueBuffer(i))
+ return false;
+
+ return true;
+}
+
+QV4L2MemoryTransferUPtr makeUserPtrMemoryTransfer(QV4L2FileDescriptorPtr fileDescriptor,
+ quint32 imageSize)
+{
+ return UserPtrMemoryTransfer::create(std::move(fileDescriptor), imageSize);
+}
+
+QV4L2MemoryTransferUPtr makeMMapMemoryTransfer(QV4L2FileDescriptorPtr fileDescriptor)
+{
+ return MMapMemoryTransfer::create(std::move(fileDescriptor));
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qv4l2memorytransfer_p.h b/src/plugins/multimedia/ffmpeg/qv4l2memorytransfer_p.h
new file mode 100644
index 000000000..6b5e3913f
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qv4l2memorytransfer_p.h
@@ -0,0 +1,66 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QV4L2MEMORYTRANSFER_P_H
+#define QV4L2MEMORYTRANSFER_P_H
+
+#include <private/qtmultimediaglobal_p.h>
+#include <qbytearray.h>
+#include <linux/videodev2.h>
+
+#include <memory>
+
+QT_BEGIN_NAMESPACE
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+class QV4L2FileDescriptor;
+using QV4L2FileDescriptorPtr = std::shared_ptr<QV4L2FileDescriptor>;
+
+class QV4L2MemoryTransfer
+{
+public:
+ struct Buffer
+ {
+ v4l2_buffer v4l2Buffer = {};
+ QByteArray data;
+ };
+
+ QV4L2MemoryTransfer(QV4L2FileDescriptorPtr fileDescriptor);
+
+ virtual ~QV4L2MemoryTransfer();
+
+ virtual std::optional<Buffer> dequeueBuffer() = 0;
+
+ virtual bool enqueueBuffer(quint32 index) = 0;
+
+ virtual quint32 buffersCount() const = 0;
+
+protected:
+ bool enqueueBuffers();
+
+ const QV4L2FileDescriptor &fileDescriptor() const { return *m_fileDescriptor; }
+
+private:
+ QV4L2FileDescriptorPtr m_fileDescriptor;
+};
+
+using QV4L2MemoryTransferUPtr = std::unique_ptr<QV4L2MemoryTransfer>;
+
+QV4L2MemoryTransferUPtr makeUserPtrMemoryTransfer(QV4L2FileDescriptorPtr fileDescriptor,
+ quint32 imageSize);
+
+QV4L2MemoryTransferUPtr makeMMapMemoryTransfer(QV4L2FileDescriptorPtr fileDescriptor);
+
+QT_END_NAMESPACE
+
+#endif // QV4L2MEMORYTRANSFER_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qwincapturablewindows.cpp b/src/plugins/multimedia/ffmpeg/qwincapturablewindows.cpp
new file mode 100644
index 000000000..aac77aec4
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qwincapturablewindows.cpp
@@ -0,0 +1,74 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwincapturablewindows_p.h"
+#include "private/qcapturablewindow_p.h"
+
+#include <qt_windows.h>
+
+QT_BEGIN_NAMESPACE
+
+static bool isTopLevelWindow(HWND hwnd)
+{
+ return hwnd && ::GetAncestor(hwnd, GA_ROOT) == hwnd;
+}
+
+static bool canCaptureWindow(HWND hwnd)
+{
+ Q_ASSERT(hwnd);
+
+ if (!::IsWindowVisible(hwnd))
+ return false;
+
+ RECT rect{};
+ if (!::GetWindowRect(hwnd, &rect))
+ return false;
+
+ if (rect.left >= rect.right || rect.top >= rect.bottom)
+ return false;
+
+ return true;
+}
+
+static QString windowTitle(HWND hwnd) {
+ // QTBUG-114890
+ // TODO: investigate the case when hwnd is inner and belows to another thread.
+ // It might causes deadlocks in specific cases.
+ auto titleLength = ::GetWindowTextLengthW(hwnd);
+ std::wstring buffer(titleLength + 1, L'\0');
+ titleLength = ::GetWindowTextW(hwnd, buffer.data(), titleLength + 1);
+ buffer.resize(titleLength);
+
+ return QString::fromStdWString(buffer);
+}
+
+QList<QCapturableWindow> QWinCapturableWindows::windows() const
+{
+ QList<QCapturableWindow> result;
+
+ auto windowHandler = [](HWND hwnd, LPARAM lParam) {
+ if (!canCaptureWindow(hwnd))
+ return TRUE; // Ignore window and continue enumerating
+
+ auto& windows = *reinterpret_cast<QList<QCapturableWindow>*>(lParam);
+
+ auto windowData = std::make_unique<QCapturableWindowPrivate>();
+ windowData->id = reinterpret_cast<QCapturableWindowPrivate::Id>(hwnd);
+ windowData->description = windowTitle(hwnd);
+ windows.push_back(windowData.release()->create());
+
+ return TRUE;
+ };
+
+ ::EnumWindows(windowHandler, reinterpret_cast<LPARAM>(&result));
+
+ return result;
+}
+
+bool QWinCapturableWindows::isWindowValid(const QCapturableWindowPrivate &window) const
+{
+ const auto hwnd = reinterpret_cast<HWND>(window.id);
+ return isTopLevelWindow(hwnd) && canCaptureWindow(hwnd);
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qwincapturablewindows_p.h b/src/plugins/multimedia/ffmpeg/qwincapturablewindows_p.h
new file mode 100644
index 000000000..1e38708ef
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qwincapturablewindows_p.h
@@ -0,0 +1,32 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWINCAPTURABLEWINDOWS_P_H
+#define QWINCAPTURABLEWINDOWS_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "private/qplatformcapturablewindows_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QWinCapturableWindows : public QPlatformCapturableWindows
+{
+public:
+ QList<QCapturableWindow> windows() const override;
+
+ bool isWindowValid(const QCapturableWindowPrivate &window) const override;
+};
+
+QT_END_NAMESPACE
+
+#endif // QWINCAPTURABLEWINDOWS_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qwindowscamera.cpp b/src/plugins/multimedia/ffmpeg/qwindowscamera.cpp
new file mode 100644
index 000000000..61a4ebe52
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qwindowscamera.cpp
@@ -0,0 +1,330 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowscamera_p.h"
+#include "qsemaphore.h"
+#include "qmutex.h"
+
+#include <private/qmemoryvideobuffer_p.h>
+#include <private/qwindowsmfdefs_p.h>
+#include <private/qwindowsmultimediautils_p.h>
+#include <private/qvideoframe_p.h>
+#include <private/qcomobject_p.h>
+
+#include <mfapi.h>
+#include <mfidl.h>
+#include <mferror.h>
+#include <mfreadwrite.h>
+
+#include <system_error>
+
+QT_BEGIN_NAMESPACE
+
+using namespace QWindowsMultimediaUtils;
+
+class CameraReaderCallback : public QComObject<IMFSourceReaderCallback>
+{
+public:
+ //from IMFSourceReaderCallback
+ STDMETHODIMP OnReadSample(HRESULT status, DWORD, DWORD, LONGLONG timestamp, IMFSample *sample) override;
+ STDMETHODIMP OnFlush(DWORD) override;
+ STDMETHODIMP OnEvent(DWORD, IMFMediaEvent *) override { return S_OK; }
+
+ void setActiveCamera(ActiveCamera *activeCamera)
+ {
+ QMutexLocker locker(&m_mutex);
+ m_activeCamera = activeCamera;
+ }
+private:
+ // Destructor is not public. Caller should call Release.
+ ~CameraReaderCallback() override = default;
+
+ ActiveCamera *m_activeCamera = nullptr;
+ QMutex m_mutex;
+};
+
+static ComPtr<IMFSourceReader> createCameraReader(IMFMediaSource *mediaSource,
+ const ComPtr<CameraReaderCallback> &callback)
+{
+ ComPtr<IMFSourceReader> sourceReader;
+ ComPtr<IMFAttributes> readerAttributes;
+
+ HRESULT hr = MFCreateAttributes(readerAttributes.GetAddressOf(), 1);
+ if (SUCCEEDED(hr)) {
+ hr = readerAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, callback.Get());
+ if (SUCCEEDED(hr)) {
+ hr = MFCreateSourceReaderFromMediaSource(mediaSource, readerAttributes.Get(), sourceReader.GetAddressOf());
+ if (SUCCEEDED(hr))
+ return sourceReader;
+ }
+ }
+
+ qWarning() << "Failed to create camera IMFSourceReader" << hr;
+ return sourceReader;
+}
+
+static ComPtr<IMFMediaSource> createCameraSource(const QString &deviceId)
+{
+ ComPtr<IMFMediaSource> mediaSource;
+ ComPtr<IMFAttributes> sourceAttributes;
+ HRESULT hr = MFCreateAttributes(sourceAttributes.GetAddressOf(), 2);
+ if (SUCCEEDED(hr)) {
+ hr = sourceAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, QMM_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
+ if (SUCCEEDED(hr)) {
+ hr = sourceAttributes->SetString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK,
+ reinterpret_cast<LPCWSTR>(deviceId.utf16()));
+ if (SUCCEEDED(hr)) {
+ hr = MFCreateDeviceSource(sourceAttributes.Get(), mediaSource.GetAddressOf());
+ if (SUCCEEDED(hr))
+ return mediaSource;
+ }
+ }
+ }
+ qWarning() << "Failed to create camera IMFMediaSource" << hr;
+ return mediaSource;
+}
+
+static int calculateVideoFrameStride(IMFMediaType *videoType, int width)
+{
+ Q_ASSERT(videoType);
+
+ GUID subtype = GUID_NULL;
+ HRESULT hr = videoType->GetGUID(MF_MT_SUBTYPE, &subtype);
+ if (SUCCEEDED(hr)) {
+ LONG stride = 0;
+ hr = MFGetStrideForBitmapInfoHeader(subtype.Data1, width, &stride);
+ if (SUCCEEDED(hr))
+ return int(qAbs(stride));
+ }
+
+ qWarning() << "Failed to calculate video stride" << errorString(hr);
+ return 0;
+}
+
+static bool setCameraReaderFormat(IMFSourceReader *sourceReader, IMFMediaType *videoType)
+{
+ Q_ASSERT(sourceReader);
+ Q_ASSERT(videoType);
+
+ HRESULT hr = sourceReader->SetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, nullptr,
+ videoType);
+ if (FAILED(hr))
+ qWarning() << "Failed to set video format" << errorString(hr);
+
+ return SUCCEEDED(hr);
+}
+
+static ComPtr<IMFMediaType> findVideoType(IMFSourceReader *reader,
+ const QCameraFormat &format)
+{
+ for (DWORD i = 0;; ++i) {
+ ComPtr<IMFMediaType> candidate;
+ HRESULT hr = reader->GetNativeMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, i,
+ candidate.GetAddressOf());
+ if (FAILED(hr))
+ break;
+
+ GUID subtype = GUID_NULL;
+ if (FAILED(candidate->GetGUID(MF_MT_SUBTYPE, &subtype)))
+ continue;
+
+ if (format.pixelFormat() != pixelFormatFromMediaSubtype(subtype))
+ continue;
+
+ UINT32 width = 0u;
+ UINT32 height = 0u;
+ if (FAILED(MFGetAttributeSize(candidate.Get(), MF_MT_FRAME_SIZE, &width, &height)))
+ continue;
+
+ if (format.resolution() != QSize{ int(width), int(height) })
+ continue;
+
+ return candidate;
+ }
+ return {};
+}
+
+class ActiveCamera {
+public:
+ static std::unique_ptr<ActiveCamera> create(QWindowsCamera &wc, const QCameraDevice &device, const QCameraFormat &format)
+ {
+ auto ac = std::unique_ptr<ActiveCamera>(new ActiveCamera(wc));
+ ac->m_source = createCameraSource(device.id());
+ if (!ac->m_source)
+ return {};
+
+ ac->m_readerCallback = makeComObject<CameraReaderCallback>();
+ ac->m_readerCallback->setActiveCamera(ac.get());
+ ac->m_reader = createCameraReader(ac->m_source.Get(), ac->m_readerCallback);
+ if (!ac->m_reader)
+ return {};
+
+ if (!ac->setFormat(format))
+ return {};
+
+ return ac;
+ }
+
+ bool setFormat(const QCameraFormat &format)
+ {
+ flush();
+
+ auto videoType = findVideoType(m_reader.Get(), format);
+ if (videoType) {
+ if (setCameraReaderFormat(m_reader.Get(), videoType.Get())) {
+ m_frameFormat = { format.resolution(), format.pixelFormat() };
+ m_videoFrameStride =
+ calculateVideoFrameStride(videoType.Get(), format.resolution().width());
+ }
+ }
+
+ m_reader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, nullptr, nullptr, nullptr,
+ nullptr);
+ return true;
+ }
+
+ void onReadSample(HRESULT status, LONGLONG timestamp, IMFSample *sample)
+ {
+ if (FAILED(status)) {
+ const std::string msg{ std::system_category().message(status) };
+ m_windowsCamera.updateError(QCamera::CameraError, QString::fromStdString(msg));
+ return;
+ }
+
+ if (sample) {
+ ComPtr<IMFMediaBuffer> mediaBuffer;
+ if (SUCCEEDED(sample->ConvertToContiguousBuffer(mediaBuffer.GetAddressOf()))) {
+
+ DWORD bufLen = 0;
+ BYTE *buffer = nullptr;
+ if (SUCCEEDED(mediaBuffer->Lock(&buffer, nullptr, &bufLen))) {
+ QByteArray bytes(reinterpret_cast<char*>(buffer), qsizetype(bufLen));
+ auto buffer = std::make_unique<QMemoryVideoBuffer>(std::move(bytes),
+ m_videoFrameStride);
+ QVideoFrame frame =
+ QVideoFramePrivate::createFrame(std::move(buffer), m_frameFormat);
+
+ // WMF uses 100-nanosecond units, Qt uses microseconds
+ frame.setStartTime(timestamp / 10);
+
+ LONGLONG duration = -1;
+ if (SUCCEEDED(sample->GetSampleDuration(&duration)))
+ frame.setEndTime((timestamp + duration) / 10);
+
+ emit m_windowsCamera.newVideoFrame(frame);
+ mediaBuffer->Unlock();
+ }
+ }
+ }
+
+ m_reader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, nullptr,
+ nullptr, nullptr, nullptr);
+ }
+
+ void onFlush()
+ {
+ m_flushWait.release();
+ }
+
+ ~ActiveCamera()
+ {
+ flush();
+ m_readerCallback->setActiveCamera(nullptr);
+ }
+
+private:
+ explicit ActiveCamera(QWindowsCamera &wc) : m_windowsCamera(wc), m_flushWait(0) {};
+
+ void flush()
+ {
+ if (SUCCEEDED(m_reader->Flush(MF_SOURCE_READER_FIRST_VIDEO_STREAM))) {
+ m_flushWait.acquire();
+ }
+ }
+
+ QWindowsCamera &m_windowsCamera;
+
+ QSemaphore m_flushWait;
+
+ ComPtr<IMFMediaSource> m_source;
+ ComPtr<IMFSourceReader> m_reader;
+ ComPtr<CameraReaderCallback> m_readerCallback;
+
+ QVideoFrameFormat m_frameFormat;
+ int m_videoFrameStride = 0;
+};
+
+STDMETHODIMP CameraReaderCallback::OnReadSample(HRESULT status, DWORD, DWORD, LONGLONG timestamp, IMFSample *sample)
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_activeCamera)
+ m_activeCamera->onReadSample(status, timestamp, sample);
+
+ return status;
+}
+
+STDMETHODIMP CameraReaderCallback::OnFlush(DWORD)
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_activeCamera)
+ m_activeCamera->onFlush();
+ return S_OK;
+}
+
+QWindowsCamera::QWindowsCamera(QCamera *camera)
+ : QPlatformCamera(camera)
+{
+ m_cameraDevice = camera ? camera->cameraDevice() : QCameraDevice{};
+}
+
+QWindowsCamera::~QWindowsCamera()
+{
+ QWindowsCamera::setActive(false);
+}
+
+void QWindowsCamera::setActive(bool active)
+{
+ if (bool(m_active) == active)
+ return;
+
+ if (active) {
+ if (m_cameraDevice.isNull())
+ return;
+
+ if (m_cameraFormat.isNull())
+ m_cameraFormat = findBestCameraFormat(m_cameraDevice);
+
+ m_active = ActiveCamera::create(*this, m_cameraDevice, m_cameraFormat);
+ if (m_active)
+ activeChanged(true);
+
+ } else {
+ m_active.reset();
+ emit activeChanged(false);
+ }
+}
+
+void QWindowsCamera::setCamera(const QCameraDevice &camera)
+{
+ bool active = bool(m_active);
+ if (active)
+ setActive(false);
+ m_cameraDevice = camera;
+ m_cameraFormat = {};
+ if (active)
+ setActive(true);
+}
+
+bool QWindowsCamera::setCameraFormat(const QCameraFormat &format)
+{
+ if (format.isNull())
+ return false;
+
+ bool ok = m_active ? m_active->setFormat(format) : true;
+ if (ok)
+ m_cameraFormat = format;
+
+ return ok;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qwindowscamera_p.h b/src/plugins/multimedia/ffmpeg/qwindowscamera_p.h
new file mode 100644
index 000000000..80c05ff59
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qwindowscamera_p.h
@@ -0,0 +1,45 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWINDOWSCAMERA_H
+#define QWINDOWSCAMERA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformcamera_p.h>
+#include <private/qcomptr_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class ActiveCamera;
+
+class QWindowsCamera : public QPlatformCamera
+{
+ Q_OBJECT
+
+public:
+ explicit QWindowsCamera(QCamera *parent);
+ ~QWindowsCamera() override;
+
+ bool isActive() const override { return bool(m_active); }
+ void setActive(bool active) override;
+ void setCamera(const QCameraDevice &camera) override;
+ bool setCameraFormat(const QCameraFormat &/*format*/) override;
+
+private:
+ QCameraDevice m_cameraDevice;
+ std::unique_ptr<ActiveCamera> m_active;
+};
+
+QT_END_NAMESPACE
+
+#endif //QWINDOWSCAMERA_H
diff --git a/src/plugins/multimedia/ffmpeg/qx11capturablewindows.cpp b/src/plugins/multimedia/ffmpeg/qx11capturablewindows.cpp
new file mode 100644
index 000000000..9e57cbc64
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qx11capturablewindows.cpp
@@ -0,0 +1,74 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qx11capturablewindows_p.h"
+#include "private/qcapturablewindow_p.h"
+#include <qdebug.h>
+
+#include <X11/Xlib.h>
+
+QT_BEGIN_NAMESPACE
+
+QX11CapturableWindows::~QX11CapturableWindows()
+{
+ if (m_display)
+ XCloseDisplay(m_display);
+}
+
+QList<QCapturableWindow> QX11CapturableWindows::windows() const
+{
+ auto display = this->display();
+
+ if (!display)
+ return {};
+
+ Atom atom = XInternAtom(display, "_NET_CLIENT_LIST", true);
+ Atom actualType = 0;
+ int format = 0;
+ unsigned long windowsCount = 0;
+ unsigned long bytesAfter = 0;
+ unsigned char *data = nullptr;
+ const int status = XGetWindowProperty(display, XDefaultRootWindow(display), atom, 0L, (~0L),
+ false, AnyPropertyType, &actualType, &format,
+ &windowsCount, &bytesAfter, &data);
+
+ if (status < Success || !data)
+ return {};
+
+ QList<QCapturableWindow> result;
+
+ auto freeDataGuard = qScopeGuard([data]() { XFree(data); });
+ auto windows = reinterpret_cast<XID *>(data);
+ for (unsigned long i = 0; i < windowsCount; i++) {
+ auto windowData = std::make_unique<QCapturableWindowPrivate>();
+ windowData->id = static_cast<QCapturableWindowPrivate::Id>(windows[i]);
+
+ char *windowTitle = nullptr;
+ if (XFetchName(display, windows[i], &windowTitle) && windowTitle) {
+ windowData->description = QString::fromUtf8(windowTitle);
+ XFree(windowTitle);
+ }
+
+ if (isWindowValid(*windowData))
+ result.push_back(windowData.release()->create());
+ }
+
+ return result;
+}
+
+bool QX11CapturableWindows::isWindowValid(const QCapturableWindowPrivate &window) const
+{
+ auto display = this->display();
+ XWindowAttributes windowAttributes = {};
+ return display
+ && XGetWindowAttributes(display, static_cast<Window>(window.id), &windowAttributes) != 0
+ && windowAttributes.depth > 0;
+}
+
+Display *QX11CapturableWindows::display() const
+{
+ std::call_once(m_displayOnceFlag, [this]() { m_display = XOpenDisplay(nullptr); });
+ return m_display;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qx11capturablewindows_p.h b/src/plugins/multimedia/ffmpeg/qx11capturablewindows_p.h
new file mode 100644
index 000000000..088fe97cb
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qx11capturablewindows_p.h
@@ -0,0 +1,45 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QX11CAPTURABLEWINDOWS_P_H
+#define QX11CAPTURABLEWINDOWS_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "private/qplatformcapturablewindows_p.h"
+#include <mutex>
+
+struct _XDisplay;
+typedef struct _XDisplay Display;
+
+QT_BEGIN_NAMESPACE
+
+class QX11CapturableWindows : public QPlatformCapturableWindows
+{
+public:
+ ~QX11CapturableWindows() override;
+
+ QList<QCapturableWindow> windows() const override;
+
+ bool isWindowValid(const QCapturableWindowPrivate &window) const override;
+
+private:
+ Display *display() const;
+
+private:
+ mutable std::once_flag m_displayOnceFlag;
+ mutable Display *m_display = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QX11CAPTURABLEWINDOWS_P_H
diff --git a/src/plugins/multimedia/ffmpeg/qx11surfacecapture.cpp b/src/plugins/multimedia/ffmpeg/qx11surfacecapture.cpp
new file mode 100644
index 000000000..d9343cdfe
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qx11surfacecapture.cpp
@@ -0,0 +1,342 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qx11surfacecapture_p.h"
+#include "qffmpegsurfacecapturegrabber_p.h"
+
+#include <qvideoframe.h>
+#include <qscreen.h>
+#include <qwindow.h>
+#include <qdebug.h>
+#include <qguiapplication.h>
+#include <qloggingcategory.h>
+
+#include "private/qcapturablewindow_p.h"
+#include "private/qmemoryvideobuffer_p.h"
+#include "private/qvideoframeconversionhelper_p.h"
+#include "private/qvideoframe_p.h"
+
+#include <X11/Xlib.h>
+#include <sys/shm.h>
+#include <X11/extensions/XShm.h>
+#include <X11/Xutil.h>
+#include <X11/extensions/Xrandr.h>
+
+#include <optional>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcX11SurfaceCapture, "qt.multimedia.ffmpeg.qx11surfacecapture");
+
+namespace {
+
+void destroyXImage(XImage* image) {
+ XDestroyImage(image); // macro
+}
+
+template <typename T, typename D>
+std::unique_ptr<T, D> makeXUptr(T* ptr, D deleter) {
+ return std::unique_ptr<T, D>(ptr, deleter);
+}
+
+int screenNumberByName(Display *display, const QString &name)
+{
+ int size = 0;
+ auto monitors = makeXUptr(
+ XRRGetMonitors(display, XDefaultRootWindow(display), true, &size),
+ &XRRFreeMonitors);
+ const auto end = monitors.get() + size;
+ auto found = std::find_if(monitors.get(), end, [&](const XRRMonitorInfo &info) {
+ auto atomName = makeXUptr(XGetAtomName(display, info.name), &XFree);
+ return atomName && name == QString::fromUtf8(atomName.get());
+ });
+
+ return found == end ? -1 : std::distance(monitors.get(), found);
+}
+
+QVideoFrameFormat::PixelFormat xImagePixelFormat(const XImage &image)
+{
+ if (image.bits_per_pixel != 32) return QVideoFrameFormat::Format_Invalid;
+
+ if (image.red_mask == 0xff0000 &&
+ image.green_mask == 0xff00 &&
+ image.blue_mask == 0xff)
+ return QVideoFrameFormat::Format_BGRX8888;
+
+ if (image.red_mask == 0xff00 &&
+ image.green_mask == 0xff0000 &&
+ image.blue_mask == 0xff000000)
+ return QVideoFrameFormat::Format_XBGR8888;
+
+ if (image.blue_mask == 0xff0000 &&
+ image.green_mask == 0xff00 &&
+ image.red_mask == 0xff)
+ return QVideoFrameFormat::Format_RGBX8888;
+
+ if (image.red_mask == 0xff00 &&
+ image.green_mask == 0xff0000 &&
+ image.blue_mask == 0xff000000)
+ return QVideoFrameFormat::Format_XRGB8888;
+
+ return QVideoFrameFormat::Format_Invalid;
+}
+
+} // namespace
+
+class QX11SurfaceCapture::Grabber : private QFFmpegSurfaceCaptureGrabber
+{
+public:
+ static std::unique_ptr<Grabber> create(QX11SurfaceCapture &capture, QScreen *screen)
+ {
+ std::unique_ptr<Grabber> result(new Grabber(capture));
+ return result->init(screen) ? std::move(result) : nullptr;
+ }
+
+ static std::unique_ptr<Grabber> create(QX11SurfaceCapture &capture, WId wid)
+ {
+ std::unique_ptr<Grabber> result(new Grabber(capture));
+ return result->init(wid) ? std::move(result) : nullptr;
+ }
+
+ ~Grabber() override
+ {
+ stop();
+
+ detachShm();
+ }
+
+ const QVideoFrameFormat &format() const { return m_format; }
+
+private:
+ Grabber(QX11SurfaceCapture &capture)
+ {
+ addFrameCallback(capture, &QX11SurfaceCapture::newVideoFrame);
+ connect(this, &Grabber::errorUpdated, &capture, &QX11SurfaceCapture::updateError);
+ }
+
+ bool createDisplay()
+ {
+ if (!m_display)
+ m_display.reset(XOpenDisplay(nullptr));
+
+ if (!m_display)
+ updateError(QPlatformSurfaceCapture::InternalError,
+ QLatin1String("Cannot open X11 display"));
+
+ return m_display != nullptr;
+ }
+
+ bool init(WId wid)
+ {
+ if (auto screen = QGuiApplication::primaryScreen())
+ setFrameRate(screen->refreshRate());
+
+ return createDisplay() && initWithXID(static_cast<XID>(wid));
+ }
+
+ bool init(QScreen *screen)
+ {
+ if (!screen) {
+ updateError(QPlatformSurfaceCapture::NotFound, QLatin1String("Screen Not Found"));
+ return false;
+ }
+
+ if (!createDisplay())
+ return false;
+
+ auto screenNumber = screenNumberByName(m_display.get(), screen->name());
+
+ if (screenNumber < 0)
+ return false;
+
+ setFrameRate(screen->refreshRate());
+
+ return initWithXID(RootWindow(m_display.get(), screenNumber));
+ }
+
+ bool initWithXID(XID xid)
+ {
+ m_xid = xid;
+
+ if (update()) {
+ start();
+ return true;
+ }
+
+ return false;
+ }
+
+ void detachShm()
+ {
+ if (std::exchange(m_attached, false)) {
+ XShmDetach(m_display.get(), &m_shmInfo);
+ shmdt(m_shmInfo.shmaddr);
+ shmctl(m_shmInfo.shmid, IPC_RMID, 0);
+ }
+ }
+
+ void attachShm()
+ {
+ Q_ASSERT(!m_attached);
+
+ m_shmInfo.shmid =
+ shmget(IPC_PRIVATE, m_xImage->bytes_per_line * m_xImage->height, IPC_CREAT | 0777);
+
+ if (m_shmInfo.shmid == -1)
+ return;
+
+ m_shmInfo.readOnly = false;
+ m_shmInfo.shmaddr = m_xImage->data = (char *)shmat(m_shmInfo.shmid, 0, 0);
+
+ m_attached = XShmAttach(m_display.get(), &m_shmInfo);
+ }
+
+ bool update()
+ {
+ XWindowAttributes wndattr = {};
+ if (XGetWindowAttributes(m_display.get(), m_xid, &wndattr) == 0) {
+ updateError(QPlatformSurfaceCapture::CaptureFailed,
+ QLatin1String("Cannot get window attributes"));
+ return false;
+ }
+
+ // TODO: if capture windows, we should adjust offsets and size if
+ // the window is out of the screen borders
+ // m_xOffset = ...
+ // m_yOffset = ...
+
+ // check window params for the root window as well since
+ // it potentially can be changed (e.g. on VM with resizing)
+ if (!m_xImage || wndattr.width != m_xImage->width || wndattr.height != m_xImage->height
+ || wndattr.depth != m_xImage->depth || wndattr.visual->visualid != m_visualID) {
+
+ qCDebug(qLcX11SurfaceCapture) << "recreate ximage: " << wndattr.width << wndattr.height
+ << wndattr.depth << wndattr.visual->visualid;
+
+ detachShm();
+ m_xImage.reset();
+
+ m_visualID = wndattr.visual->visualid;
+ m_xImage.reset(XShmCreateImage(m_display.get(), wndattr.visual, wndattr.depth, ZPixmap,
+ nullptr, &m_shmInfo, wndattr.width, wndattr.height));
+
+ if (!m_xImage) {
+ updateError(QPlatformSurfaceCapture::CaptureFailed,
+ QLatin1String("Cannot create image"));
+ return false;
+ }
+
+ const auto pixelFormat = xImagePixelFormat(*m_xImage);
+
+ // TODO: probably, add a converter instead
+ if (pixelFormat == QVideoFrameFormat::Format_Invalid) {
+ updateError(QPlatformSurfaceCapture::CaptureFailed,
+ QLatin1String("Not handled pixel format, bpp=")
+ + QString::number(m_xImage->bits_per_pixel));
+ return false;
+ }
+
+ attachShm();
+
+ if (!m_attached) {
+ updateError(QPlatformSurfaceCapture::CaptureFailed,
+ QLatin1String("Cannot attach shared memory"));
+ return false;
+ }
+
+ QVideoFrameFormat format(QSize(m_xImage->width, m_xImage->height), pixelFormat);
+ format.setStreamFrameRate(frameRate());
+ m_format = format;
+ }
+
+ return m_attached;
+ }
+
+protected:
+ QVideoFrame grabFrame() override
+ {
+ if (!update())
+ return {};
+
+ if (!XShmGetImage(m_display.get(), m_xid, m_xImage.get(), m_xOffset, m_yOffset,
+ AllPlanes)) {
+ updateError(QPlatformSurfaceCapture::CaptureFailed,
+ QLatin1String(
+ "Cannot get ximage; the window may be out of the screen borders"));
+ return {};
+ }
+
+ QByteArray data(m_xImage->bytes_per_line * m_xImage->height, Qt::Uninitialized);
+
+ const auto pixelSrc = reinterpret_cast<const uint32_t *>(m_xImage->data);
+ const auto pixelDst = reinterpret_cast<uint32_t *>(data.data());
+ const auto pixelCount = data.size() / 4;
+ const auto xImageAlphaVaries = false; // In known cases it doesn't vary - it's 0xff or 0xff
+
+ qCopyPixelsWithAlphaMask(pixelDst, pixelSrc, pixelCount, m_format.pixelFormat(),
+ xImageAlphaVaries);
+
+ auto buffer = std::make_unique<QMemoryVideoBuffer>(data, m_xImage->bytes_per_line);
+ return QVideoFramePrivate::createFrame(std::move(buffer), m_format);
+ }
+
+private:
+ std::optional<QPlatformSurfaceCapture::Error> m_prevGrabberError;
+ XID m_xid = None;
+ int m_xOffset = 0;
+ int m_yOffset = 0;
+ std::unique_ptr<Display, decltype(&XCloseDisplay)> m_display{ nullptr, &XCloseDisplay };
+ std::unique_ptr<XImage, decltype(&destroyXImage)> m_xImage{ nullptr, &destroyXImage };
+ XShmSegmentInfo m_shmInfo;
+ bool m_attached = false;
+ VisualID m_visualID = None;
+ QVideoFrameFormat m_format;
+};
+
+QX11SurfaceCapture::QX11SurfaceCapture(Source initialSource)
+ : QPlatformSurfaceCapture(initialSource)
+{
+ // For debug
+ // XSetErrorHandler([](Display *, XErrorEvent * e) {
+ // qDebug() << "error handler" << e->error_code;
+ // return 0;
+ // });
+}
+
+QX11SurfaceCapture::~QX11SurfaceCapture() = default;
+
+QVideoFrameFormat QX11SurfaceCapture::frameFormat() const
+{
+ return m_grabber ? m_grabber->format() : QVideoFrameFormat{};
+}
+
+bool QX11SurfaceCapture::setActiveInternal(bool active)
+{
+ qCDebug(qLcX11SurfaceCapture) << "set active" << active;
+
+ if (m_grabber)
+ m_grabber.reset();
+ else
+ std::visit([this](auto source) { activate(source); }, source());
+
+ return static_cast<bool>(m_grabber) == active;
+}
+
+void QX11SurfaceCapture::activate(ScreenSource screen)
+{
+ if (checkScreenWithError(screen))
+ m_grabber = Grabber::create(*this, screen);
+}
+
+void QX11SurfaceCapture::activate(WindowSource window)
+{
+ auto handle = QCapturableWindowPrivate::handle(window);
+ m_grabber = Grabber::create(*this, handle ? handle->id : 0);
+}
+
+bool QX11SurfaceCapture::isSupported()
+{
+ return qgetenv("XDG_SESSION_TYPE").compare(QLatin1String("x11"), Qt::CaseInsensitive) == 0;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qx11surfacecapture_p.h b/src/plugins/multimedia/ffmpeg/qx11surfacecapture_p.h
new file mode 100644
index 000000000..7f794fd8b
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qx11surfacecapture_p.h
@@ -0,0 +1,48 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef X11SURFACECAPTURE_P_H
+#define X11SURFACECAPTURE_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "private/qplatformsurfacecapture_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QX11SurfaceCapture : public QPlatformSurfaceCapture
+{
+ class Grabber;
+
+public:
+ explicit QX11SurfaceCapture(Source initialSource);
+ ~QX11SurfaceCapture() override;
+
+ QVideoFrameFormat frameFormat() const override;
+
+ static bool isSupported();
+
+protected:
+ bool setActiveInternal(bool active) override;
+
+private:
+ void activate(ScreenSource);
+
+ void activate(WindowSource);
+
+private:
+ std::unique_ptr<Grabber> m_grabber;
+};
+
+QT_END_NAMESPACE
+
+#endif // X11SURFACECAPTURE_P_H
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder.cpp
new file mode 100644
index 000000000..bcd8a39c8
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder.cpp
@@ -0,0 +1,343 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qffmpegaudioencoder_p.h"
+#include "qffmpegrecordingengineutils_p.h"
+#include "qffmpegaudioencoderutils_p.h"
+#include "qffmpegaudioinput_p.h"
+#include "qffmpegencoderoptions_p.h"
+#include "qffmpegmuxer_p.h"
+#include "qffmpegrecordingengine_p.h"
+#include "qffmpegmediaformatinfo_p.h"
+#include <QtCore/qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+static Q_LOGGING_CATEGORY(qLcFFmpegAudioEncoder, "qt.multimedia.ffmpeg.audioencoder");
+
+AudioEncoder::AudioEncoder(RecordingEngine &recordingEngine, const QAudioFormat &sourceFormat,
+ const QMediaEncoderSettings &settings)
+ : EncoderThread(recordingEngine), m_format(sourceFormat), m_settings(settings)
+{
+ setObjectName(QLatin1String("AudioEncoder"));
+ qCDebug(qLcFFmpegAudioEncoder) << "AudioEncoder" << settings.audioCodec();
+
+ auto codecID = QFFmpegMediaFormatInfo::codecIdForAudioCodec(settings.audioCodec());
+ Q_ASSERT(avformat_query_codec(recordingEngine.avFormatContext()->oformat, codecID,
+ FF_COMPLIANCE_NORMAL));
+
+ const AVAudioFormat requestedAudioFormat(m_format);
+
+ m_avCodec = QFFmpeg::findAVEncoder(codecID, {}, requestedAudioFormat.sampleFormat);
+
+ if (!m_avCodec)
+ m_avCodec = QFFmpeg::findAVEncoder(codecID);
+
+ qCDebug(qLcFFmpegAudioEncoder) << "found audio codec" << m_avCodec->name;
+
+ Q_ASSERT(m_avCodec);
+
+ m_stream = avformat_new_stream(recordingEngine.avFormatContext(), nullptr);
+ m_stream->id = recordingEngine.avFormatContext()->nb_streams - 1;
+ m_stream->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
+ m_stream->codecpar->codec_id = codecID;
+#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
+ m_stream->codecpar->channel_layout =
+ adjustChannelLayout(m_avCodec->channel_layouts, requestedAudioFormat.channelLayoutMask);
+ m_stream->codecpar->channels = qPopulationCount(m_stream->codecpar->channel_layout);
+#else
+ m_stream->codecpar->ch_layout =
+ adjustChannelLayout(m_avCodec->ch_layouts, requestedAudioFormat.channelLayout);
+#endif
+ const auto sampleRate =
+ adjustSampleRate(m_avCodec->supported_samplerates, requestedAudioFormat.sampleRate);
+
+ m_stream->codecpar->sample_rate = sampleRate;
+ m_stream->codecpar->frame_size = 1024;
+ m_stream->codecpar->format =
+ adjustSampleFormat(m_avCodec->sample_fmts, requestedAudioFormat.sampleFormat);
+
+ m_stream->time_base = AVRational{ 1, sampleRate };
+
+ qCDebug(qLcFFmpegAudioEncoder) << "set stream time_base" << m_stream->time_base.num << "/"
+ << m_stream->time_base.den;
+}
+
+void AudioEncoder::open()
+{
+ m_codecContext.reset(avcodec_alloc_context3(m_avCodec));
+
+ if (m_stream->time_base.num != 1 || m_stream->time_base.den != m_format.sampleRate()) {
+ qCDebug(qLcFFmpegAudioEncoder) << "Most likely, av_format_write_header changed time base from"
+ << 1 << "/" << m_format.sampleRate() << "to"
+ << m_stream->time_base;
+ }
+
+ m_codecContext->time_base = m_stream->time_base;
+
+ avcodec_parameters_to_context(m_codecContext.get(), m_stream->codecpar);
+
+ AVDictionaryHolder opts;
+ applyAudioEncoderOptions(m_settings, m_avCodec->name, m_codecContext.get(), opts);
+ applyExperimentalCodecOptions(m_avCodec, opts);
+
+ const int res = avcodec_open2(m_codecContext.get(), m_avCodec, opts);
+
+ qCDebug(qLcFFmpegAudioEncoder) << "audio codec opened" << res;
+ qCDebug(qLcFFmpegAudioEncoder) << "audio codec params: fmt=" << m_codecContext->sample_fmt
+ << "rate=" << m_codecContext->sample_rate;
+
+ updateResampler();
+}
+
+void AudioEncoder::addBuffer(const QAudioBuffer &buffer)
+{
+ if (!buffer.isValid()) {
+ setEndOfSourceStream();
+ return;
+ }
+
+ {
+ const std::chrono::microseconds bufferDuration(buffer.duration());
+ auto guard = lockLoopData();
+
+ resetEndOfSourceStream();
+
+ if (m_paused)
+ return;
+
+ // TODO: apply logic with canPushFrame
+
+ m_audioBufferQueue.push(buffer);
+ m_queueDuration += bufferDuration;
+ }
+
+ dataReady();
+}
+
+QAudioBuffer AudioEncoder::takeBuffer()
+{
+ auto locker = lockLoopData();
+ QAudioBuffer result = dequeueIfPossible(m_audioBufferQueue);
+ m_queueDuration -= std::chrono::microseconds(result.duration());
+ return result;
+}
+
+void AudioEncoder::init()
+{
+ open();
+
+ // TODO: try to address this dependency here.
+ if (auto input = qobject_cast<QFFmpegAudioInput *>(source()))
+ input->setFrameSize(m_codecContext->frame_size);
+
+ qCDebug(qLcFFmpegAudioEncoder) << "AudioEncoder::init started audio device thread.";
+}
+
+void AudioEncoder::cleanup()
+{
+ while (!m_audioBufferQueue.empty())
+ processOne();
+
+ if (m_avFrameSamplesOffset) {
+ // the size of the last frame can be less than m_codecContext->frame_size
+
+ retrievePackets();
+ sendPendingFrameToAVCodec();
+ }
+
+ while (avcodec_send_frame(m_codecContext.get(), nullptr) == AVERROR(EAGAIN))
+ retrievePackets();
+ retrievePackets();
+}
+
+bool AudioEncoder::hasData() const
+{
+ return !m_audioBufferQueue.empty();
+}
+
+void AudioEncoder::retrievePackets()
+{
+ while (1) {
+ AVPacketUPtr packet(av_packet_alloc());
+ int ret = avcodec_receive_packet(m_codecContext.get(), packet.get());
+ if (ret < 0) {
+ if (ret != AVERROR(EOF))
+ break;
+ if (ret != AVERROR(EAGAIN)) {
+ char errStr[1024];
+ av_strerror(ret, errStr, 1024);
+ qCDebug(qLcFFmpegAudioEncoder) << "receive packet" << ret << errStr;
+ }
+ break;
+ }
+
+ // qCDebug(qLcFFmpegEncoder) << "writing audio packet" << packet->size << packet->pts <<
+ // packet->dts;
+ packet->stream_index = m_stream->id;
+ m_recordingEngine.getMuxer()->addPacket(std::move(packet));
+ }
+}
+
+void AudioEncoder::processOne()
+{
+ QAudioBuffer buffer = takeBuffer();
+ Q_ASSERT(buffer.isValid());
+
+ // qCDebug(qLcFFmpegEncoder) << "new audio buffer" << buffer.byteCount() << buffer.format()
+ // << buffer.frameCount() << codec->frame_size;
+
+ if (buffer.format() != m_format) {
+ m_format = buffer.format();
+ updateResampler();
+ }
+
+ int samplesOffset = 0;
+ const int bufferSamplesCount = static_cast<int>(buffer.frameCount());
+
+ while (samplesOffset < bufferSamplesCount)
+ handleAudioData(buffer.constData<uint8_t>(), samplesOffset, bufferSamplesCount);
+
+ Q_ASSERT(samplesOffset == bufferSamplesCount);
+}
+
+bool AudioEncoder::checkIfCanPushFrame() const
+{
+ if (isRunning())
+ return m_audioBufferQueue.size() <= 1 || m_queueDuration < m_maxQueueDuration;
+ if (!isFinished())
+ return m_audioBufferQueue.empty();
+
+ return false;
+}
+
+void AudioEncoder::updateResampler()
+{
+ m_resampler.reset();
+
+ const AVAudioFormat requestedAudioFormat(m_format);
+ const AVAudioFormat codecAudioFormat(m_codecContext.get());
+
+ if (requestedAudioFormat != codecAudioFormat)
+ m_resampler = createResampleContext(requestedAudioFormat, codecAudioFormat);
+
+ qCDebug(qLcFFmpegAudioEncoder)
+ << "Resampler updated. Input format:" << m_format << "Resampler:" << m_resampler.get();
+}
+
+void AudioEncoder::ensurePendingFrame(int availableSamplesCount)
+{
+ Q_ASSERT(availableSamplesCount >= 0);
+
+ if (m_avFrame)
+ return;
+
+ m_avFrame = makeAVFrame();
+
+ m_avFrame->format = m_codecContext->sample_fmt;
+#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
+ m_avFrame->channel_layout = m_codecContext->channel_layout;
+ m_avFrame->channels = m_codecContext->channels;
+#else
+ m_avFrame->ch_layout = m_codecContext->ch_layout;
+#endif
+ m_avFrame->sample_rate = m_codecContext->sample_rate;
+
+ const bool isFixedFrameSize = !(m_avCodec->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE)
+ && m_codecContext->frame_size;
+ m_avFrame->nb_samples = isFixedFrameSize ? m_codecContext->frame_size : availableSamplesCount;
+ if (m_avFrame->nb_samples)
+ av_frame_get_buffer(m_avFrame.get(), 0);
+
+ const auto &timeBase = m_stream->time_base;
+ const auto pts = timeBase.den && timeBase.num
+ ? timeBase.den * m_samplesWritten / (m_codecContext->sample_rate * timeBase.num)
+ : m_samplesWritten;
+ setAVFrameTime(*m_avFrame, pts, timeBase);
+}
+
+void AudioEncoder::writeDataToPendingFrame(const uchar *data, int &samplesOffset, int samplesCount)
+{
+ Q_ASSERT(m_avFrame);
+ Q_ASSERT(m_avFrameSamplesOffset <= m_avFrame->nb_samples);
+
+ const int bytesPerSample = av_get_bytes_per_sample(m_codecContext->sample_fmt);
+ const bool isPlanar = av_sample_fmt_is_planar(m_codecContext->sample_fmt);
+
+#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
+ const int channelsCount = m_codecContext->channels;
+#else
+ const int channelsCount = m_codecContext->ch_layout.nb_channels;
+#endif
+
+ const int audioDataOffset = isPlanar ? bytesPerSample * m_avFrameSamplesOffset
+ : bytesPerSample * m_avFrameSamplesOffset * channelsCount;
+
+ const int planesCount = isPlanar ? channelsCount : 1;
+ m_avFramePlanesData.resize(planesCount);
+ for (int plane = 0; plane < planesCount; ++plane)
+ m_avFramePlanesData[plane] = m_avFrame->extended_data[plane] + audioDataOffset;
+
+ const int samplesToRead =
+ std::min(m_avFrame->nb_samples - m_avFrameSamplesOffset, samplesCount - samplesOffset);
+
+ data += m_format.bytesForFrames(samplesOffset);
+
+ if (m_resampler) {
+ m_avFrameSamplesOffset += swr_convert(m_resampler.get(), m_avFramePlanesData.data(),
+ samplesToRead, &data, samplesToRead);
+ } else {
+ Q_ASSERT(planesCount == 1);
+ m_avFrameSamplesOffset += samplesToRead;
+ memcpy(m_avFramePlanesData[0], data, m_format.bytesForFrames(samplesToRead));
+ }
+
+ samplesOffset += samplesToRead;
+}
+
+void AudioEncoder::sendPendingFrameToAVCodec()
+{
+ Q_ASSERT(m_avFrame);
+ Q_ASSERT(m_avFrameSamplesOffset <= m_avFrame->nb_samples);
+
+ m_avFrame->nb_samples = m_avFrameSamplesOffset;
+
+ m_samplesWritten += m_avFrameSamplesOffset;
+
+ const qint64 time = m_format.durationForFrames(m_samplesWritten);
+ m_recordingEngine.newTimeStamp(time / 1000);
+
+ // qCDebug(qLcFFmpegEncoder) << "sending audio frame" << buffer.byteCount() << frame->pts <<
+ // ((double)buffer.frameCount()/frame->sample_rate);
+
+ int ret = avcodec_send_frame(m_codecContext.get(), m_avFrame.get());
+ if (ret < 0) {
+ char errStr[AV_ERROR_MAX_STRING_SIZE];
+ av_strerror(ret, errStr, AV_ERROR_MAX_STRING_SIZE);
+ qCDebug(qLcFFmpegAudioEncoder) << "error sending frame" << ret << errStr;
+ }
+
+ m_avFrame = nullptr;
+ m_avFrameSamplesOffset = 0;
+ std::fill(m_avFramePlanesData.begin(), m_avFramePlanesData.end(), nullptr);
+}
+
+void AudioEncoder::handleAudioData(const uchar *data, int &samplesOffset, int samplesCount)
+{
+ ensurePendingFrame(samplesCount - samplesOffset);
+
+ writeDataToPendingFrame(data, samplesOffset, samplesCount);
+
+ // The frame is not ready yet
+ if (m_avFrameSamplesOffset < m_avFrame->nb_samples)
+ return;
+
+ retrievePackets();
+
+ sendPendingFrameToAVCodec();
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder_p.h
new file mode 100644
index 000000000..4408ff54f
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoder_p.h
@@ -0,0 +1,77 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGAUDIOENCODER_P_H
+#define QFFMPEGAUDIOENCODER_P_H
+
+#include "qffmpeg_p.h"
+#include "qffmpegencoderthread_p.h"
+#include "private/qplatformmediarecorder_p.h"
+#include <qaudiobuffer.h>
+#include <queue>
+#include <chrono>
+
+QT_BEGIN_NAMESPACE
+
+class QMediaEncoderSettings;
+
+namespace QFFmpeg {
+
+class AudioEncoder : public EncoderThread
+{
+public:
+ AudioEncoder(RecordingEngine &recordingEngine, const QAudioFormat &sourceFormat,
+ const QMediaEncoderSettings &settings);
+
+ void addBuffer(const QAudioBuffer &buffer);
+
+protected:
+ bool checkIfCanPushFrame() const override;
+
+private:
+ void open();
+
+ QAudioBuffer takeBuffer();
+ void retrievePackets();
+ void updateResampler();
+
+ void init() override;
+ void cleanup() override;
+ bool hasData() const override;
+ void processOne() override;
+
+ void handleAudioData(const uchar *data, int &samplesOffset, int samplesCount);
+
+ void ensurePendingFrame(int availableSamplesCount);
+
+ void writeDataToPendingFrame(const uchar *data, int &samplesOffset, int samplesCount);
+
+ void sendPendingFrameToAVCodec();
+
+private:
+ std::queue<QAudioBuffer> m_audioBufferQueue;
+
+ // Arbitrarily chosen to limit audio queue duration
+ const std::chrono::microseconds m_maxQueueDuration = std::chrono::seconds(5);
+
+ std::chrono::microseconds m_queueDuration{ 0 };
+
+ AVStream *m_stream = nullptr;
+ AVCodecContextUPtr m_codecContext;
+ QAudioFormat m_format;
+
+ SwrContextUPtr m_resampler;
+ qint64 m_samplesWritten = 0;
+ const AVCodec *m_avCodec = nullptr;
+ QMediaEncoderSettings m_settings;
+
+ AVFrameUPtr m_avFrame;
+ int m_avFrameSamplesOffset = 0;
+ std::vector<uint8_t *> m_avFramePlanesData;
+};
+
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoderutils.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoderutils.cpp
new file mode 100644
index 000000000..ea36a8138
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoderutils.cpp
@@ -0,0 +1,97 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegaudioencoderutils_p.h"
+#include "qalgorithms.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+AVSampleFormat adjustSampleFormat(const AVSampleFormat *supportedFormats, AVSampleFormat requested)
+{
+ auto calcScore = [requested](AVSampleFormat format) {
+ return format == requested ? BestAVScore
+ : format == av_get_planar_sample_fmt(requested) ? BestAVScore - 1
+ : 0;
+ };
+
+ const auto result = findBestAVFormat(supportedFormats, calcScore).first;
+ return result == AV_SAMPLE_FMT_NONE ? requested : result;
+}
+
+int adjustSampleRate(const int *supportedRates, int requested)
+{
+ auto calcScore = [requested](int rate) {
+ return requested == rate ? BestAVScore
+ : requested <= rate ? rate - requested
+ : requested - rate - 1000000;
+ };
+
+ const auto result = findBestAVValue(supportedRates, calcScore).first;
+ return result == 0 ? requested : result;
+}
+
+static AVScore calculateScoreByChannelsCount(int supportedChannelsNumber,
+ int requestedChannelsNumber)
+{
+ if (supportedChannelsNumber >= requestedChannelsNumber)
+ return requestedChannelsNumber - supportedChannelsNumber;
+
+ return supportedChannelsNumber - requestedChannelsNumber - 10000;
+}
+
+static AVScore calculateScoreByChannelsMask(int supportedChannelsNumber, uint64_t supportedMask,
+ int requestedChannelsNumber, uint64_t requestedMask)
+{
+ if ((supportedMask & requestedMask) == requestedMask)
+ return BestAVScore - qPopulationCount(supportedMask & ~requestedMask);
+
+ return calculateScoreByChannelsCount(supportedChannelsNumber, requestedChannelsNumber);
+}
+
+#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
+
+uint64_t adjustChannelLayout(const uint64_t *supportedMasks, uint64_t requested)
+{
+ auto calcScore = [requested](uint64_t mask) {
+ return calculateScoreByChannelsMask(qPopulationCount(mask), mask,
+ qPopulationCount(requested), requested);
+ };
+
+ const auto result = findBestAVValue(supportedMasks, calcScore).first;
+ return result == 0 ? requested : result;
+}
+
+#else
+
+AVChannelLayout adjustChannelLayout(const AVChannelLayout *supportedLayouts,
+ const AVChannelLayout &requested)
+{
+ auto calcScore = [&requested](const AVChannelLayout &layout) {
+ if (layout == requested)
+ return BestAVScore;
+
+ // The only realistic case for now:
+ // layout.order == requested.order == AV_CHANNEL_ORDER_NATIVE
+ // Let's consider other orders to make safe code
+
+ if (layout.order == AV_CHANNEL_ORDER_CUSTOM || requested.order == AV_CHANNEL_ORDER_CUSTOM)
+ return calculateScoreByChannelsCount(layout.nb_channels, requested.nb_channels) - 1000;
+
+ const auto offset = layout.order == requested.order ? 1 : 100;
+
+ return calculateScoreByChannelsMask(layout.nb_channels, layout.u.mask,
+ requested.nb_channels, requested.u.mask)
+ - offset;
+ };
+
+ const auto result = findBestAVValue(supportedLayouts, calcScore);
+ return result.second == NotSuitableAVScore ? requested : result.first;
+}
+
+#endif
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoderutils_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoderutils_p.h
new file mode 100644
index 000000000..8a7c184ec
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegaudioencoderutils_p.h
@@ -0,0 +1,28 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGAUDIOENCODERUTILS_P_H
+#define QFFMPEGAUDIOENCODERUTILS_P_H
+
+#include "qffmpeg_p.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+AVSampleFormat adjustSampleFormat(const AVSampleFormat *supportedFormats, AVSampleFormat requested);
+
+int adjustSampleRate(const int *supportedRates, int requested);
+
+#if QT_FFMPEG_OLD_CHANNEL_LAYOUT
+uint64_t adjustChannelLayout(const uint64_t *supportedLayouts, uint64_t requested);
+#else
+AVChannelLayout adjustChannelLayout(const AVChannelLayout *supportedLayouts,
+ const AVChannelLayout &requested);
+#endif
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGAUDIOENCODERUTILS_P_H
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderoptions.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderoptions.cpp
new file mode 100644
index 000000000..bd6a8e09b
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderoptions.cpp
@@ -0,0 +1,362 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qffmpegencoderoptions_p.h"
+
+#if QT_CONFIG(vaapi)
+#include <va/va.h>
+#endif
+
+QT_BEGIN_NAMESPACE
+
+// unfortunately there is no common way to specify options for the encoders. The code here tries to map our settings sensibly
+// to options available in different encoders
+
+// For constant quality options, we're trying to map things to approx those bit rates for 1080p@30fps (in Mbps):
+// VeryLow Low Normal High VeryHigh
+// H264: 0.8M 1.5M 3.5M 6M 10M
+// H265: 0.5M 1.0M 2.5M 4M 7M
+
+[[maybe_unused]]
+static int bitrateForSettings(const QMediaEncoderSettings &settings, bool hdr = false)
+{
+ // calculate an acceptable bitrate depending on video codec, resolution, framerate and requested quality
+ // The calculations are rather heuristic here, trying to take into account how well codecs compress using
+ // the tables above.
+
+ // The table here is for 30FPS
+ const double bitsPerPixel[int(QMediaFormat::VideoCodec::LastVideoCodec)+1][QMediaRecorder::VeryHighQuality+1] = {
+ { 1.2, 2.25, 5, 9, 15 }, // MPEG1,
+ { 0.8, 1.5, 3.5, 6, 10 }, // MPEG2
+ { 0.4, 0.75, 1.75, 3, 5 }, // MPEG4
+ { 0.4, 0.75, 1.75, 3, 5 }, // H264
+ { 0.3, 0.5, 0.2, 2, 3 }, // H265
+ { 0.4, 0.75, 1.75, 3, 5 }, // VP8
+ { 0.3, 0.5, 0.2, 2, 3 }, // VP9
+ { 0.2, 0.4, 0.9, 1.5, 2.5 }, // AV1
+ { 0.4, 0.75, 1.75, 3, 5 }, // Theora
+ { 0.8, 1.5, 3.5, 6, 10 }, // WMV
+ { 16, 24, 32, 40, 48 }, // MotionJPEG
+ };
+
+ QSize s = settings.videoResolution();
+ double bitrate = bitsPerPixel[int(settings.videoCodec())][settings.quality()]*s.width()*s.height();
+
+ if (settings.videoCodec() != QMediaFormat::VideoCodec::MotionJPEG) {
+ // We assume that doubling the framerate requires 1.5 times the amount of data (not twice, as intraframe
+ // differences will be smaller). 4 times the frame rate uses thus 2.25 times the data, etc.
+ float rateMultiplier = log2(settings.videoFrameRate()/30.);
+ bitrate *= pow(1.5, rateMultiplier);
+ } else {
+ // MotionJPEG doesn't optimize between frames, so we have a linear dependency on framerate
+ bitrate *= settings.videoFrameRate()/30.;
+ }
+
+ // HDR requires 10bits per pixel instead of 8, so apply a factor of 1.25.
+ if (hdr)
+ bitrate *= 1.25;
+ return bitrate;
+}
+
+static void apply_openh264(const QMediaEncoderSettings &settings, AVCodecContext *codec,
+ AVDictionary **opts)
+{
+ if (settings.encodingMode() == QMediaRecorder::ConstantBitRateEncoding
+ || settings.encodingMode() == QMediaRecorder::AverageBitRateEncoding) {
+ codec->bit_rate = settings.videoBitRate();
+ av_dict_set(opts, "rc_mode", "bitrate", 0);
+ } else {
+ av_dict_set(opts, "rc_mode", "quality", 0);
+ static const int q[] = { 51, 48, 38, 25, 5 };
+ codec->qmax = codec->qmin = q[settings.quality()];
+ }
+}
+
+static void apply_x264(const QMediaEncoderSettings &settings, AVCodecContext *codec, AVDictionary **opts)
+{
+ if (settings.encodingMode() == QMediaRecorder::ConstantBitRateEncoding || settings.encodingMode() == QMediaRecorder::AverageBitRateEncoding) {
+ codec->bit_rate = settings.videoBitRate();
+ } else {
+ const char *scales[] = {
+ "29", "26", "23", "21", "19"
+ };
+ av_dict_set(opts, "crf", scales[settings.quality()], 0);
+ }
+}
+
+static void apply_x265(const QMediaEncoderSettings &settings, AVCodecContext *codec, AVDictionary **opts)
+{
+ if (settings.encodingMode() == QMediaRecorder::ConstantBitRateEncoding || settings.encodingMode() == QMediaRecorder::AverageBitRateEncoding) {
+ codec->bit_rate = settings.videoBitRate();
+ } else {
+ const char *scales[QMediaRecorder::VeryHighQuality+1] = {
+ "40", "34", "28", "26", "24",
+ };
+ av_dict_set(opts, "crf", scales[settings.quality()], 0);
+ }
+}
+
+static void apply_libvpx(const QMediaEncoderSettings &settings, AVCodecContext *codec, AVDictionary **opts)
+{
+ if (settings.encodingMode() == QMediaRecorder::ConstantBitRateEncoding || settings.encodingMode() == QMediaRecorder::AverageBitRateEncoding) {
+ codec->bit_rate = settings.videoBitRate();
+ } else {
+ const char *scales[QMediaRecorder::VeryHighQuality+1] = {
+ "38", "34", "31", "28", "25",
+ };
+ av_dict_set(opts, "crf", scales[settings.quality()], 0);
+ av_dict_set(opts, "b", 0, 0);
+ }
+ av_dict_set(opts, "row-mt", "1", 0); // better multithreading
+}
+
+#ifdef Q_OS_DARWIN
+static void apply_videotoolbox(const QMediaEncoderSettings &settings, AVCodecContext *codec, AVDictionary **opts)
+{
+ if (settings.encodingMode() == QMediaRecorder::ConstantBitRateEncoding || settings.encodingMode() == QMediaRecorder::AverageBitRateEncoding) {
+ codec->bit_rate = settings.videoBitRate();
+ } else {
+ // only use quality on macOS/ARM, as FFmpeg doesn't support it on the other platforms and would throw
+ // an error when initializing the codec
+#if defined(Q_OS_MACOS) && defined(Q_PROCESSOR_ARM_64)
+ // Videotoolbox describes quality as a number from 0 to 1, with low == 0.25, normal 0.5, high 0.75 and lossless = 1
+ // ffmpeg uses a different scale going from 0 to 11800.
+ // Values here are adjusted to agree approximately with the target bit rates listed above
+ const int scales[] = {
+ 3000, 4800, 5900, 6900, 7700,
+ };
+ codec->global_quality = scales[settings.quality()];
+ codec->flags |= AV_CODEC_FLAG_QSCALE;
+#else
+ codec->bit_rate = bitrateForSettings(settings);
+#endif
+ }
+
+ // Videotooldox hw acceleration fails of some hardwares,
+ // allow_sw makes sw encoding available if hw encoding failed.
+ // Under the hood, ffmpeg sets
+ // kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder instead of
+ // kVTVideoEncoderSpecification_RequireHardwareAcceleratedVideoEncoder
+ av_dict_set(opts, "allow_sw", "1", 0);
+}
+#endif
+
+#if QT_CONFIG(vaapi)
+static void apply_vaapi(const QMediaEncoderSettings &settings, AVCodecContext *codec, AVDictionary **/*opts*/)
+{
+ // See also vaapi_encode_init_rate_control() in libavcodec
+ if (settings.encodingMode() == QMediaRecorder::ConstantBitRateEncoding) {
+ codec->bit_rate = settings.videoBitRate();
+ codec->rc_max_rate = settings.videoBitRate();
+ } else if (settings.encodingMode() == QMediaRecorder::AverageBitRateEncoding) {
+ codec->bit_rate = settings.videoBitRate();
+ } else {
+ const int *quality = nullptr;
+ // unfortunately, all VA codecs use different quality scales :/
+ switch (settings.videoCodec()) {
+ case QMediaFormat::VideoCodec::MPEG2: {
+ static const int q[] = { 20, 15, 10, 8, 6 };
+ quality = q;
+ break;
+ }
+ case QMediaFormat::VideoCodec::MPEG4:
+ case QMediaFormat::VideoCodec::H264: {
+ static const int q[] = { 29, 26, 23, 21, 19 };
+ quality = q;
+ break;
+ }
+ case QMediaFormat::VideoCodec::H265: {
+ static const int q[] = { 40, 34, 28, 26, 24 };
+ quality = q;
+ break;
+ }
+ case QMediaFormat::VideoCodec::VP8: {
+ static const int q[] = { 56, 48, 40, 34, 28 };
+ quality = q;
+ break;
+ }
+ case QMediaFormat::VideoCodec::VP9: {
+ static const int q[] = { 124, 112, 100, 88, 76 };
+ quality = q;
+ break;
+ }
+ case QMediaFormat::VideoCodec::MotionJPEG: {
+ static const int q[] = { 40, 60, 80, 90, 95 };
+ quality = q;
+ break;
+ }
+ case QMediaFormat::VideoCodec::AV1:
+ case QMediaFormat::VideoCodec::Theora:
+ case QMediaFormat::VideoCodec::WMV:
+ default:
+ break;
+ }
+
+ if (quality)
+ codec->global_quality = quality[settings.quality()];
+ }
+}
+#endif
+
+static void apply_nvenc(const QMediaEncoderSettings &settings, AVCodecContext *codec,
+ AVDictionary **opts)
+{
+ switch (settings.encodingMode()) {
+ case QMediaRecorder::EncodingMode::AverageBitRateEncoding:
+ av_dict_set(opts, "vbr", "1", 0);
+ codec->bit_rate = settings.videoBitRate();
+ break;
+ case QMediaRecorder::EncodingMode::ConstantBitRateEncoding:
+ av_dict_set(opts, "cbr", "1", 0);
+ codec->bit_rate = settings.videoBitRate();
+ codec->rc_max_rate = codec->rc_min_rate = codec->bit_rate;
+ break;
+ case QMediaRecorder::EncodingMode::ConstantQualityEncoding: {
+ static const char *q[] = { "51", "48", "35", "15", "1" };
+ av_dict_set(opts, "cq", q[settings.quality()], 0);
+ } break;
+ default:
+ break;
+ }
+}
+
+#ifdef Q_OS_WINDOWS
+static void apply_mf(const QMediaEncoderSettings &settings, AVCodecContext *codec, AVDictionary **opts)
+{
+ if (settings.encodingMode() == QMediaRecorder::ConstantBitRateEncoding || settings.encodingMode() == QMediaRecorder::AverageBitRateEncoding) {
+ codec->bit_rate = settings.videoBitRate();
+ av_dict_set(opts, "rate_control", "cbr", 0);
+ } else {
+ av_dict_set(opts, "rate_control", "quality", 0);
+ const char *scales[] = {
+ "25", "50", "75", "90", "100"
+ };
+ av_dict_set(opts, "quality", scales[settings.quality()], 0);
+ }
+}
+#endif
+
+#ifdef Q_OS_ANDROID
+static void apply_mediacodec(const QMediaEncoderSettings &settings, AVCodecContext *codec,
+ AVDictionary **opts)
+{
+ codec->bit_rate = settings.videoBitRate();
+
+ const int quality[] = { 25, 50, 75, 90, 100 };
+ codec->global_quality = quality[settings.quality()];
+
+ switch (settings.encodingMode()) {
+ case QMediaRecorder::EncodingMode::AverageBitRateEncoding:
+ av_dict_set(opts, "bitrate_mode", "vbr", 1);
+ break;
+ case QMediaRecorder::EncodingMode::ConstantBitRateEncoding:
+ av_dict_set(opts, "bitrate_mode", "cbr", 1);
+ break;
+ case QMediaRecorder::EncodingMode::ConstantQualityEncoding:
+ // av_dict_set(opts, "bitrate_mode", "cq", 1);
+ av_dict_set(opts, "bitrate_mode", "cbr", 1);
+ break;
+ default:
+ break;
+ }
+
+ switch (settings.videoCodec()) {
+ case QMediaFormat::VideoCodec::H264: {
+ const char *levels[] = { "2.2", "3.2", "4.2", "5.2", "6.2" };
+ av_dict_set(opts, "level", levels[settings.quality()], 1);
+ codec->profile = FF_PROFILE_H264_HIGH;
+ break;
+ }
+ case QMediaFormat::VideoCodec::H265: {
+ const char *levels[] = { "h2.1", "h3.1", "h4.1", "h5.1", "h6.1" };
+ av_dict_set(opts, "level", levels[settings.quality()], 1);
+ codec->profile = FF_PROFILE_HEVC_MAIN;
+ break;
+ }
+ default:
+ break;
+ }
+}
+#endif
+
+namespace QFFmpeg {
+
+using ApplyOptions = void (*)(const QMediaEncoderSettings &settings, AVCodecContext *codec, AVDictionary **opts);
+
+const struct {
+ const char *name;
+ ApplyOptions apply;
+} videoCodecOptionTable[] = { { "libx264", apply_x264 },
+ { "libx265xx", apply_x265 },
+ { "libvpx", apply_libvpx },
+ { "libvpx_vp9", apply_libvpx },
+ { "libopenh264", apply_openh264 },
+ { "h264_nvenc", apply_nvenc },
+ { "hevc_nvenc", apply_nvenc },
+ { "av1_nvenc", apply_nvenc },
+#ifdef Q_OS_DARWIN
+ { "h264_videotoolbox", apply_videotoolbox },
+ { "hevc_videotoolbox", apply_videotoolbox },
+ { "prores_videotoolbox", apply_videotoolbox },
+ { "vp9_videotoolbox", apply_videotoolbox },
+#endif
+#if QT_CONFIG(vaapi)
+ { "mpeg2_vaapi", apply_vaapi },
+ { "mjpeg_vaapi", apply_vaapi },
+ { "h264_vaapi", apply_vaapi },
+ { "hevc_vaapi", apply_vaapi },
+ { "vp8_vaapi", apply_vaapi },
+ { "vp9_vaapi", apply_vaapi },
+#endif
+#ifdef Q_OS_WINDOWS
+ { "hevc_mf", apply_mf },
+ { "h264_mf", apply_mf },
+#endif
+#ifdef Q_OS_ANDROID
+ { "hevc_mediacodec", apply_mediacodec },
+ { "h264_mediacodec", apply_mediacodec },
+#endif
+ { nullptr, nullptr } };
+
+const struct {
+ const char *name;
+ ApplyOptions apply;
+} audioCodecOptionTable[] = {
+ { nullptr, nullptr }
+};
+
+void applyVideoEncoderOptions(const QMediaEncoderSettings &settings, const QByteArray &codecName, AVCodecContext *codec, AVDictionary **opts)
+{
+ av_dict_set(opts, "threads", "auto", 0); // we always want automatic threading
+
+ auto *table = videoCodecOptionTable;
+ while (table->name) {
+ if (codecName == table->name) {
+ table->apply(settings, codec, opts);
+ return;
+ }
+
+ ++table;
+ }
+}
+
+void applyAudioEncoderOptions(const QMediaEncoderSettings &settings, const QByteArray &codecName, AVCodecContext *codec, AVDictionary **opts)
+{
+ codec->thread_count = -1; // we always want automatic threading
+ if (settings.encodingMode() == QMediaRecorder::ConstantBitRateEncoding || settings.encodingMode() == QMediaRecorder::AverageBitRateEncoding)
+ codec->bit_rate = settings.audioBitRate();
+
+ auto *table = audioCodecOptionTable;
+ while (table->name) {
+ if (codecName == table->name) {
+ table->apply(settings, codec, opts);
+ return;
+ }
+
+ ++table;
+ }
+
+}
+
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderoptions_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderoptions_p.h
new file mode 100644
index 000000000..005ad7652
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderoptions_p.h
@@ -0,0 +1,32 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGENCODEROPTIONS_P_H
+#define QFFMPEGENCODEROPTIONS_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qffmpeghwaccel_p.h"
+#include "qvideoframeformat.h"
+#include "private/qplatformmediarecorder_p.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+void applyVideoEncoderOptions(const QMediaEncoderSettings &settings, const QByteArray &codecName, AVCodecContext *codec, AVDictionary **opts);
+void applyAudioEncoderOptions(const QMediaEncoderSettings &settings, const QByteArray &codecName, AVCodecContext *codec, AVDictionary **opts);
+
+}
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread.cpp
new file mode 100644
index 000000000..61fe954c8
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread.cpp
@@ -0,0 +1,40 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qffmpegencoderthread_p.h"
+#include "qmetaobject.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+EncoderThread::EncoderThread(RecordingEngine &recordingEngine) : m_recordingEngine(recordingEngine)
+{
+}
+
+void EncoderThread::setPaused(bool paused)
+{
+ auto guard = lockLoopData();
+ m_paused = paused;
+}
+
+void EncoderThread::setAutoStop(bool autoStop)
+{
+ auto guard = lockLoopData();
+ m_autoStop = autoStop;
+}
+
+void EncoderThread::setEndOfSourceStream()
+{
+ {
+ auto guard = lockLoopData();
+ m_endOfSourceStream = true;
+ }
+
+ emit endOfSourceStream();
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegencoderthread_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread_p.h
new file mode 100644
index 000000000..f1f6b610a
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencoderthread_p.h
@@ -0,0 +1,72 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGENCODERTHREAD_P_H
+#define QFFMPEGENCODERTHREAD_P_H
+
+#include "qffmpegthread_p.h"
+#include "qpointer.h"
+
+#include "private/qmediainputencoderinterface_p.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+class RecordingEngine;
+
+class EncoderThread : public ConsumerThread, public QMediaInputEncoderInterface
+{
+ Q_OBJECT
+public:
+ EncoderThread(RecordingEngine &recordingEngine);
+
+ void setPaused(bool paused);
+
+ void setAutoStop(bool autoStop);
+
+ void setSource(QObject *source) { m_source = source; }
+
+ QObject *source() const { return m_source; }
+
+ bool canPushFrame() const override { return m_canPushFrame.load(std::memory_order_relaxed); }
+
+ void setEndOfSourceStream();
+
+ bool isEndOfSourceStream() const { return m_endOfSourceStream; }
+
+protected:
+ void updateCanPushFrame();
+
+ virtual bool checkIfCanPushFrame() const = 0;
+
+ void resetEndOfSourceStream() { m_endOfSourceStream = false; }
+
+ auto lockLoopData()
+ {
+ return QScopeGuard([this, locker = ConsumerThread::lockLoopData()]() mutable {
+ const bool autoStopActivated = m_endOfSourceStream && m_autoStop;
+ const bool canPush = !autoStopActivated && !m_paused && checkIfCanPushFrame();
+ locker.unlock();
+ if (m_canPushFrame.exchange(canPush, std::memory_order_relaxed) != canPush)
+ emit canPushFrameChanged();
+ });
+ }
+
+Q_SIGNALS:
+ void canPushFrameChanged();
+ void endOfSourceStream();
+
+protected:
+ bool m_paused = false;
+ bool m_endOfSourceStream = false;
+ bool m_autoStop = false;
+ std::atomic_bool m_canPushFrame = false;
+ RecordingEngine &m_recordingEngine;
+ QPointer<QObject> m_source;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer.cpp
new file mode 100644
index 000000000..4f8c21bd5
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer.cpp
@@ -0,0 +1,165 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegencodinginitializer_p.h"
+#include "qffmpegrecordingengineutils_p.h"
+#include "qffmpegrecordingengine_p.h"
+#include "qffmpegaudioinput_p.h"
+#include "qvideoframe.h"
+
+#include "private/qplatformvideoframeinput_p.h"
+#include "private/qplatformaudiobufferinput_p.h"
+#include "private/qplatformaudiobufferinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+EncodingInitializer::EncodingInitializer(RecordingEngine &engine) : m_recordingEngine(engine) { }
+
+EncodingInitializer::~EncodingInitializer()
+{
+ for (QObject *source : m_pendingSources)
+ setEncoderInterface(source, nullptr);
+}
+
+void EncodingInitializer::start(const std::vector<QPlatformAudioBufferInputBase *> &audioSources,
+ const std::vector<QPlatformVideoSource *> &videoSources)
+{
+ for (auto source : audioSources) {
+ if (auto audioInput = qobject_cast<QFFmpegAudioInput *>(source))
+ m_recordingEngine.addAudioInput(audioInput);
+ else if (auto audioBufferInput = qobject_cast<QPlatformAudioBufferInput *>(source))
+ addAudioBufferInput(audioBufferInput);
+ else
+ Q_ASSERT(!"Undefined source type");
+ }
+
+ for (auto source : videoSources)
+ addVideoSource(source);
+
+ tryStartRecordingEngine();
+}
+
+void EncodingInitializer::addAudioBufferInput(QPlatformAudioBufferInput *input)
+{
+ Q_ASSERT(input);
+
+ if (input->audioFormat().isValid())
+ m_recordingEngine.addAudioBufferInput(input, {});
+ else
+ addPendingAudioBufferInput(input);
+}
+
+void EncodingInitializer::addPendingAudioBufferInput(QPlatformAudioBufferInput *input)
+{
+ addPendingSource(input);
+
+ connect(input, &QPlatformAudioBufferInput::destroyed, this, [this, input]() {
+ erasePendingSource(input, QStringLiteral("Audio source deleted"), true);
+ });
+
+ connect(input, &QPlatformAudioBufferInput::newAudioBuffer, this,
+ [this, input](const QAudioBuffer &buffer) {
+ if (buffer.isValid())
+ erasePendingSource(
+ input, [&]() { m_recordingEngine.addAudioBufferInput(input, buffer); });
+ else
+ erasePendingSource(input,
+ QStringLiteral("Audio source has sent the end frame"));
+ });
+}
+
+void EncodingInitializer::addVideoSource(QPlatformVideoSource *source)
+{
+ Q_ASSERT(source);
+ Q_ASSERT(source->isActive());
+
+ if (source->frameFormat().isValid())
+ m_recordingEngine.addVideoSource(source, {});
+ else if (source->hasError())
+ emitStreamInitializationError(QStringLiteral("Video source error: ")
+ + source->errorString());
+ else
+ addPendingVideoSource(source);
+}
+
+void EncodingInitializer::addPendingVideoSource(QPlatformVideoSource *source)
+{
+ addPendingSource(source);
+
+ connect(source, &QPlatformVideoSource::errorChanged, this, [this, source]() {
+ if (source->hasError())
+ erasePendingSource(source,
+ QStringLiteral("Videio source error: ") + source->errorString());
+ });
+
+ connect(source, &QPlatformVideoSource::destroyed, this, [this, source]() {
+ erasePendingSource(source, QStringLiteral("Source deleted"), true);
+ });
+
+ connect(source, &QPlatformVideoSource::activeChanged, this, [this, source]() {
+ if (!source->isActive())
+ erasePendingSource(source, QStringLiteral("Video source deactivated"));
+ });
+
+ connect(source, &QPlatformVideoSource::newVideoFrame, this,
+ [this, source](const QVideoFrame &frame) {
+ if (frame.isValid())
+ erasePendingSource(source,
+ [&]() { m_recordingEngine.addVideoSource(source, frame); });
+ else
+ erasePendingSource(source,
+ QStringLiteral("Video source has sent the end frame"));
+ });
+}
+
+void EncodingInitializer::tryStartRecordingEngine()
+{
+ if (m_pendingSources.empty())
+ m_recordingEngine.start();
+}
+
+void EncodingInitializer::emitStreamInitializationError(QString error)
+{
+ emit m_recordingEngine.streamInitializationError(
+ QMediaRecorder::ResourceError,
+ QStringLiteral("Video steam initialization error. ") + error);
+}
+
+void EncodingInitializer::addPendingSource(QObject *source)
+{
+ Q_ASSERT(m_pendingSources.count(source) == 0);
+
+ setEncoderInterface(source, this);
+ m_pendingSources.emplace(source);
+}
+
+template <typename F>
+void EncodingInitializer::erasePendingSource(QObject *source, F &&functionOrError, bool destroyed)
+{
+ const auto erasedCount = m_pendingSources.erase(source);
+ if (erasedCount == 0)
+ return; // got a queued event, just ignore it.
+
+ if (!destroyed) {
+ setEncoderInterface(source, nullptr);
+ disconnect(source, nullptr, this, nullptr);
+ }
+
+ if constexpr (std::is_invocable_v<F>)
+ functionOrError();
+ else
+ emitStreamInitializationError(functionOrError);
+
+ tryStartRecordingEngine();
+}
+
+bool EncodingInitializer::canPushFrame() const
+{
+ return true;
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer_p.h
new file mode 100644
index 000000000..e3bcb3428
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegencodinginitializer_p.h
@@ -0,0 +1,77 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QENCODINGINITIALIZER_P_H
+#define QENCODINGINITIALIZER_P_H
+
+#include "qobject.h"
+#include "private/qmediainputencoderinterface_p.h"
+#include <unordered_set>
+#include <vector>
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+QT_BEGIN_NAMESPACE
+
+class QFFmpegAudioInput;
+class QPlatformVideoSource;
+class QPlatformAudioBufferInput;
+class QPlatformAudioBufferInputBase;
+class QMediaInputEncoderInterface;
+
+namespace QFFmpeg {
+
+class RecordingEngine;
+
+// Initializes RecordingEngine with audio and video sources, potentially lazily
+// upon first frame arrival if video frame format is not pre-determined.
+class EncodingInitializer : public QObject, private QMediaInputEncoderInterface
+{
+public:
+ EncodingInitializer(RecordingEngine &engine);
+
+ ~EncodingInitializer() override;
+
+ void start(const std::vector<QPlatformAudioBufferInputBase *> &audioSources,
+ const std::vector<QPlatformVideoSource *> &videoSources);
+
+private:
+ void addAudioBufferInput(QPlatformAudioBufferInput *input);
+
+ void addPendingAudioBufferInput(QPlatformAudioBufferInput *input);
+
+ void addVideoSource(QPlatformVideoSource *source);
+
+ void addPendingVideoSource(QPlatformVideoSource *source);
+
+ void addPendingSource(QObject *source);
+
+ void tryStartRecordingEngine();
+
+private:
+ void emitStreamInitializationError(QString error);
+
+ template <typename F>
+ void erasePendingSource(QObject *source, F &&functionOrError, bool destroyed = false);
+
+ bool canPushFrame() const override;
+
+private:
+ RecordingEngine &m_recordingEngine;
+ std::unordered_set<QObject *> m_pendingSources;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QENCODINGINITIALIZER_P_H
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer.cpp
new file mode 100644
index 000000000..6a33e79dd
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer.cpp
@@ -0,0 +1,64 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qffmpegmuxer_p.h"
+#include "qffmpegrecordingengine_p.h"
+#include "qffmpegrecordingengineutils_p.h"
+#include <QtCore/qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+static Q_LOGGING_CATEGORY(qLcFFmpegMuxer, "qt.multimedia.ffmpeg.muxer");
+
+Muxer::Muxer(RecordingEngine *encoder) : m_encoder(encoder)
+{
+ setObjectName(QLatin1String("Muxer"));
+}
+
+void Muxer::addPacket(AVPacketUPtr packet)
+{
+ {
+ QMutexLocker locker = lockLoopData();
+ m_packetQueue.push(std::move(packet));
+ }
+
+ // qCDebug(qLcFFmpegEncoder) << "Muxer::addPacket" << packet->pts << packet->stream_index;
+ dataReady();
+}
+
+AVPacketUPtr Muxer::takePacket()
+{
+ QMutexLocker locker = lockLoopData();
+ return dequeueIfPossible(m_packetQueue);
+}
+
+void Muxer::init()
+{
+ qCDebug(qLcFFmpegMuxer) << "Muxer::init started thread.";
+}
+
+void Muxer::cleanup()
+{
+ while (!m_packetQueue.empty())
+ processOne();
+}
+
+bool QFFmpeg::Muxer::hasData() const
+{
+ return !m_packetQueue.empty();
+}
+
+void Muxer::processOne()
+{
+ auto packet = takePacket();
+ // qCDebug(qLcFFmpegEncoder) << "writing packet to file" << packet->pts << packet->duration <<
+ // packet->stream_index;
+
+ // the function takes ownership for the packet
+ av_interleaved_write_frame(m_encoder->avFormatContext(), packet.release());
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer_p.h
new file mode 100644
index 000000000..4f8f4d27a
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegmuxer_p.h
@@ -0,0 +1,41 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGMUXER_P_H
+#define QFFMPEGMUXER_P_H
+
+#include "qffmpegthread_p.h"
+#include "qffmpeg_p.h"
+#include <queue>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+class RecordingEngine;
+
+class Muxer : public ConsumerThread
+{
+public:
+ Muxer(RecordingEngine *encoder);
+
+ void addPacket(AVPacketUPtr packet);
+
+private:
+ AVPacketUPtr takePacket();
+
+ void init() override;
+ void cleanup() override;
+ bool hasData() const override;
+ void processOne() override;
+
+private:
+ std::queue<AVPacketUPtr> m_packetQueue;
+
+ RecordingEngine *m_encoder;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine.cpp
new file mode 100644
index 000000000..2c0931780
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine.cpp
@@ -0,0 +1,278 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qffmpegrecordingengine_p.h"
+#include "qffmpegencodinginitializer_p.h"
+#include "qffmpegaudioencoder_p.h"
+#include "qffmpegaudioinput_p.h"
+#include "qffmpegrecordingengineutils_p.h"
+
+#include "private/qmultimediautils_p.h"
+#include "private/qplatformaudiobufferinput_p.h"
+#include "private/qplatformvideosource_p.h"
+#include "private/qplatformvideoframeinput_p.h"
+
+#include "qdebug.h"
+#include "qffmpegvideoencoder_p.h"
+#include "qffmpegmediametadata_p.h"
+#include "qffmpegmuxer_p.h"
+#include "qloggingcategory.h"
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcFFmpegEncoder, "qt.multimedia.ffmpeg.encoder");
+
+namespace QFFmpeg
+{
+
+RecordingEngine::RecordingEngine(const QMediaEncoderSettings &settings,
+ std::unique_ptr<EncodingFormatContext> context)
+ : m_settings(settings), m_formatContext(std::move(context)), m_muxer(new Muxer(this))
+{
+ Q_ASSERT(m_formatContext);
+ Q_ASSERT(m_formatContext->isAVIOOpen());
+}
+
+RecordingEngine::~RecordingEngine()
+{
+}
+
+void RecordingEngine::addAudioInput(QFFmpegAudioInput *input)
+{
+ Q_ASSERT(input);
+
+ if (input->device.isNull()) {
+ emit streamInitializationError(QMediaRecorder::ResourceError,
+ QLatin1StringView("Audio device is null"));
+ return;
+ }
+
+ const QAudioFormat format = input->device.preferredFormat();
+
+ if (!format.isValid()) {
+ emit streamInitializationError(
+ QMediaRecorder::FormatError,
+ QLatin1StringView("Audio device has invalid preferred format"));
+ return;
+ }
+
+ AudioEncoder *audioEncoder = createAudioEncoder(format);
+ connectEncoderToSource(audioEncoder, input);
+
+ input->setRunning(true);
+}
+
+void RecordingEngine::addAudioBufferInput(QPlatformAudioBufferInput *input,
+ const QAudioBuffer &firstBuffer)
+{
+ Q_ASSERT(input);
+ const QAudioFormat format = firstBuffer.isValid() ? firstBuffer.format() : input->audioFormat();
+
+ AudioEncoder *audioEncoder = createAudioEncoder(format);
+
+ // set the buffer before connecting to avoid potential races
+ if (firstBuffer.isValid())
+ audioEncoder->addBuffer(firstBuffer);
+
+ connectEncoderToSource(audioEncoder, input);
+}
+
+AudioEncoder *RecordingEngine::createAudioEncoder(const QAudioFormat &format)
+{
+ Q_ASSERT(format.isValid());
+
+ auto audioEncoder = new AudioEncoder(*this, format, m_settings);
+ m_audioEncoders.push_back(audioEncoder);
+ connect(audioEncoder, &EncoderThread::endOfSourceStream, this,
+ &RecordingEngine::handleSourceEndOfStream);
+ if (m_autoStop)
+ audioEncoder->setAutoStop(true);
+
+ return audioEncoder;
+}
+
+void RecordingEngine::addVideoSource(QPlatformVideoSource *source, const QVideoFrame &firstFrame)
+{
+ QVideoFrameFormat frameFormat =
+ firstFrame.isValid() ? firstFrame.surfaceFormat() : source->frameFormat();
+
+ Q_ASSERT(frameFormat.isValid());
+
+ if (firstFrame.isValid() && frameFormat.streamFrameRate() <= 0.f) {
+ const qint64 startTime = firstFrame.startTime();
+ const qint64 endTime = firstFrame.endTime();
+ if (startTime != -1 && endTime > startTime)
+ frameFormat.setStreamFrameRate(static_cast<qreal>(VideoFrameTimeBase)
+ / (endTime - startTime));
+ }
+
+ std::optional<AVPixelFormat> hwPixelFormat = source->ffmpegHWPixelFormat()
+ ? AVPixelFormat(*source->ffmpegHWPixelFormat())
+ : std::optional<AVPixelFormat>{};
+
+ qCDebug(qLcFFmpegEncoder) << "adding video source" << source->metaObject()->className() << ":"
+ << "pixelFormat=" << frameFormat.pixelFormat()
+ << "frameSize=" << frameFormat.frameSize()
+ << "frameRate=" << frameFormat.streamFrameRate()
+ << "ffmpegHWPixelFormat=" << (hwPixelFormat ? *hwPixelFormat : AV_PIX_FMT_NONE);
+
+ auto veUPtr = std::make_unique<VideoEncoder>(*this, m_settings, frameFormat, hwPixelFormat);
+ if (!veUPtr->isValid()) {
+ emit streamInitializationError(QMediaRecorder::FormatError,
+ QLatin1StringView("Cannot initialize encoder"));
+ return;
+ }
+
+ auto videoEncoder = veUPtr.release();
+ m_videoEncoders.append(videoEncoder);
+ if (m_autoStop)
+ videoEncoder->setAutoStop(true);
+
+ connect(videoEncoder, &EncoderThread::endOfSourceStream, this,
+ &RecordingEngine::handleSourceEndOfStream);
+
+ // set the frame before connecting to avoid potential races
+ if (firstFrame.isValid())
+ videoEncoder->addFrame(firstFrame);
+
+ connectEncoderToSource(videoEncoder, source);
+}
+
+void RecordingEngine::start()
+{
+ Q_ASSERT(m_initializer);
+ m_initializer.reset();
+
+ if (m_audioEncoders.empty() && m_videoEncoders.empty()) {
+ emit sessionError(QMediaRecorder::ResourceError,
+ QLatin1StringView("No valid stream found for encoding"));
+ return;
+ }
+
+ qCDebug(qLcFFmpegEncoder) << "RecordingEngine::start!";
+
+ avFormatContext()->metadata = QFFmpegMetaData::toAVMetaData(m_metaData);
+
+ Q_ASSERT(!m_isHeaderWritten);
+
+ int res = avformat_write_header(avFormatContext(), nullptr);
+ if (res < 0) {
+ qWarning() << "could not write header, error:" << res << err2str(res);
+ emit sessionError(QMediaRecorder::ResourceError,
+ QLatin1StringView("Cannot start writing the stream"));
+ return;
+ }
+
+ m_isHeaderWritten = true;
+
+ qCDebug(qLcFFmpegEncoder) << "stream header is successfully written";
+
+ m_muxer->start();
+
+ forEachEncoder([](QThread *thread) { thread->start(); });
+}
+
+void RecordingEngine::initialize(const std::vector<QPlatformAudioBufferInputBase *> &audioSources,
+ const std::vector<QPlatformVideoSource *> &videoSources)
+{
+ qCDebug(qLcFFmpegEncoder) << ">>>>>>>>>>>>>>> initialize";
+
+ m_initializer = std::make_unique<EncodingInitializer>(*this);
+ m_initializer->start(audioSources, videoSources);
+}
+
+RecordingEngine::EncodingFinalizer::EncodingFinalizer(RecordingEngine &recordingEngine)
+ : m_recordingEngine(recordingEngine)
+{
+ connect(this, &QThread::finished, this, &QObject::deleteLater);
+}
+
+void RecordingEngine::EncodingFinalizer::run()
+{
+ m_recordingEngine.forEachEncoder(&EncoderThread::stopAndDelete);
+ m_recordingEngine.m_muxer->stopAndDelete();
+
+ if (m_recordingEngine.m_isHeaderWritten) {
+ const int res = av_write_trailer(m_recordingEngine.avFormatContext());
+ if (res < 0) {
+ const auto errorDescription = err2str(res);
+ qCWarning(qLcFFmpegEncoder) << "could not write trailer" << res << errorDescription;
+ emit m_recordingEngine.sessionError(QMediaRecorder::FormatError,
+ QLatin1String("Cannot write trailer: ")
+ + errorDescription);
+ }
+ }
+ // else ffmpeg might crash
+
+ // close AVIO before emitting finalizationDone.
+ m_recordingEngine.m_formatContext->closeAVIO();
+
+ qCDebug(qLcFFmpegEncoder) << " done finalizing.";
+ emit m_recordingEngine.finalizationDone();
+ auto recordingEnginePtr = &m_recordingEngine;
+ delete recordingEnginePtr;
+}
+
+void RecordingEngine::finalize()
+{
+ qCDebug(qLcFFmpegEncoder) << ">>>>>>>>>>>>>>> finalize";
+
+ m_initializer.reset();
+
+ forEachEncoder(&disconnectEncoderFromSource);
+
+ auto *finalizer = new EncodingFinalizer(*this);
+ finalizer->start();
+}
+
+void RecordingEngine::setPaused(bool paused)
+{
+ forEachEncoder(&EncoderThread::setPaused, paused);
+}
+
+void RecordingEngine::setAutoStop(bool autoStop)
+{
+ m_autoStop = autoStop;
+ forEachEncoder(&EncoderThread::setAutoStop, autoStop);
+ handleSourceEndOfStream();
+}
+
+void RecordingEngine::setMetaData(const QMediaMetaData &metaData)
+{
+ m_metaData = metaData;
+}
+
+void RecordingEngine::newTimeStamp(qint64 time)
+{
+ QMutexLocker locker(&m_timeMutex);
+ if (time > m_timeRecorded) {
+ m_timeRecorded = time;
+ emit durationChanged(time);
+ }
+}
+
+bool RecordingEngine::isEndOfSourceStreams() const
+{
+ auto isAtEnd = [](EncoderThread *encoder) { return encoder->isEndOfSourceStream(); };
+ return std::all_of(m_videoEncoders.cbegin(), m_videoEncoders.cend(), isAtEnd)
+ && std::all_of(m_audioEncoders.cbegin(), m_audioEncoders.cend(), isAtEnd);
+}
+
+void RecordingEngine::handleSourceEndOfStream()
+{
+ if (m_autoStop && isEndOfSourceStreams())
+ emit autoStopped();
+}
+
+template <typename F, typename... Args>
+void RecordingEngine::forEachEncoder(F &&f, Args &&...args)
+{
+ for (AudioEncoder *audioEncoder : m_audioEncoders)
+ std::invoke(f, audioEncoder, args...);
+ for (VideoEncoder *videoEncoder : m_videoEncoders)
+ std::invoke(f, videoEncoder, args...);
+}
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qffmpegrecordingengine_p.cpp"
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine_p.h
new file mode 100644
index 000000000..ce3aaa6bb
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengine_p.h
@@ -0,0 +1,121 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGENCODER_P_H
+#define QFFMPEGENCODER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qffmpegthread_p.h"
+#include "qffmpegencodingformatcontext_p.h"
+
+#include <private/qplatformmediarecorder_p.h>
+#include <qmediarecorder.h>
+
+QT_BEGIN_NAMESPACE
+
+class QFFmpegAudioInput;
+class QPlatformAudioBufferInput;
+class QPlatformAudioBufferInputBase;
+class QVideoFrame;
+class QAudioBuffer;
+class QPlatformVideoSource;
+
+namespace QFFmpeg
+{
+
+class RecordingEngine;
+class Muxer;
+class AudioEncoder;
+class VideoEncoder;
+class VideoFrameEncoder;
+class EncodingInitializer;
+
+class RecordingEngine : public QObject
+{
+ Q_OBJECT
+public:
+ RecordingEngine(const QMediaEncoderSettings &settings, std::unique_ptr<EncodingFormatContext> context);
+ ~RecordingEngine();
+
+ void initialize(const std::vector<QPlatformAudioBufferInputBase *> &audioSources,
+ const std::vector<QPlatformVideoSource *> &videoSources);
+ void finalize();
+
+ void setPaused(bool p);
+
+ void setAutoStop(bool autoStop);
+
+ bool autoStop() const { return m_autoStop; }
+
+ void setMetaData(const QMediaMetaData &metaData);
+ AVFormatContext *avFormatContext() { return m_formatContext->avFormatContext(); }
+ Muxer *getMuxer() { return m_muxer; }
+
+ bool isEndOfSourceStreams() const;
+
+public Q_SLOTS:
+ void newTimeStamp(qint64 time);
+
+Q_SIGNALS:
+ void durationChanged(qint64 duration);
+ void sessionError(QMediaRecorder::Error code, const QString &description);
+ void streamInitializationError(QMediaRecorder::Error code, const QString &description);
+ void finalizationDone();
+ void autoStopped();
+
+private:
+ class EncodingFinalizer : public QThread
+ {
+ public:
+ EncodingFinalizer(RecordingEngine &recordingEngine);
+
+ void run() override;
+
+ private:
+ RecordingEngine &m_recordingEngine;
+ };
+
+ friend class EncodingInitializer;
+ void addAudioInput(QFFmpegAudioInput *input);
+ void addAudioBufferInput(QPlatformAudioBufferInput *input, const QAudioBuffer &firstBuffer);
+ AudioEncoder *createAudioEncoder(const QAudioFormat &format);
+
+ void addVideoSource(QPlatformVideoSource *source, const QVideoFrame &firstFrame);
+ void handleSourceEndOfStream();
+
+ void start();
+
+ template <typename F, typename... Args>
+ void forEachEncoder(F &&f, Args &&...args);
+
+private:
+ QMediaEncoderSettings m_settings;
+ QMediaMetaData m_metaData;
+ std::unique_ptr<EncodingFormatContext> m_formatContext;
+ Muxer *m_muxer = nullptr;
+
+ QList<AudioEncoder *> m_audioEncoders;
+ QList<VideoEncoder *> m_videoEncoders;
+ std::unique_ptr<EncodingInitializer> m_initializer;
+
+ QMutex m_timeMutex;
+ qint64 m_timeRecorded = 0;
+
+ bool m_isHeaderWritten = false;
+ bool m_autoStop = false;
+};
+
+}
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils.cpp
new file mode 100644
index 000000000..6c2ba8b15
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils.cpp
@@ -0,0 +1,63 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "recordingengine/qffmpegrecordingengineutils_p.h"
+#include "recordingengine/qffmpegencoderthread_p.h"
+#include "private/qplatformaudiobufferinput_p.h"
+#include "private/qplatformvideoframeinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+template <typename F>
+void doWithMediaFrameInput(QObject *source, F &&f)
+{
+ if (auto videoFrameInput = qobject_cast<QPlatformVideoFrameInput *>(source))
+ f(videoFrameInput);
+ else if (auto audioBufferInput = qobject_cast<QPlatformAudioBufferInput *>(source))
+ f(audioBufferInput);
+}
+
+void setEncoderInterface(QObject *source, QMediaInputEncoderInterface *interface)
+{
+ doWithMediaFrameInput(source, [&](auto source) {
+ using Source = std::remove_pointer_t<decltype(source)>;
+
+ source->setEncoderInterface(interface);
+ if (interface)
+ // Postpone emit 'encoderUpdated' as the encoding pipeline may be not
+ // completely ready at the moment. The case is calling QMediaRecorder::stop
+ // upon handling 'readyToSendFrame'
+ QMetaObject::invokeMethod(source, &Source::encoderUpdated, Qt::QueuedConnection);
+ else
+ emit source->encoderUpdated();
+ });
+}
+
+void setEncoderUpdateConnection(QObject *source, EncoderThread *encoder)
+{
+ doWithMediaFrameInput(source, [&](auto source) {
+ using Source = std::remove_pointer_t<decltype(source)>;
+ QObject::connect(encoder, &EncoderThread::canPushFrameChanged, source,
+ &Source::encoderUpdated);
+ });
+}
+
+void disconnectEncoderFromSource(EncoderThread *encoder)
+{
+ QObject *source = encoder->source();
+ if (!source)
+ return;
+
+ // We should address the dependency AudioEncoder from QFFmpegAudioInput to
+ // set null source here.
+ // encoder->setSource(nullptr);
+
+ QObject::disconnect(source, nullptr, encoder, nullptr);
+ setEncoderInterface(source, nullptr);
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils_p.h
new file mode 100644
index 000000000..a60f81696
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegrecordingengineutils_p.h
@@ -0,0 +1,81 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QFFMPEGRECORDINGENGINEUTILS_P_H
+#define QFFMPEGRECORDINGENGINEUTILS_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qobject.h"
+#include <queue>
+
+QT_BEGIN_NAMESPACE
+
+class QMediaInputEncoderInterface;
+class QPlatformVideoSource;
+
+namespace QFFmpeg {
+
+constexpr qint64 VideoFrameTimeBase = 1000000; // us in sec
+
+class EncoderThread;
+
+template <typename T>
+T dequeueIfPossible(std::queue<T> &queue)
+{
+ if (queue.empty())
+ return T{};
+
+ auto result = std::move(queue.front());
+ queue.pop();
+ return result;
+}
+
+void setEncoderInterface(QObject *source, QMediaInputEncoderInterface *interface);
+
+void setEncoderUpdateConnection(QObject *source, EncoderThread *encoder);
+
+template <typename Encoder, typename Source>
+void connectEncoderToSource(Encoder *encoder, Source *source)
+{
+ Q_ASSERT(!encoder->source());
+ encoder->setSource(source);
+
+ if constexpr (std::is_same_v<Source, QPlatformVideoSource>) {
+ QObject::connect(source, &Source::newVideoFrame, encoder, &Encoder::addFrame,
+ Qt::DirectConnection);
+
+ QObject::connect(source, &Source::activeChanged, encoder, [=]() {
+ if (!source->isActive())
+ encoder->setEndOfSourceStream();
+ });
+ } else {
+ QObject::connect(source, &Source::newAudioBuffer, encoder, &Encoder::addBuffer,
+ Qt::DirectConnection);
+ }
+
+ // TODO:
+ // QObject::connect(source, &Source::disconnectedFromSession, encoder, [=]() {
+ // encoder->setSourceEndOfStream();
+ // });
+
+ setEncoderUpdateConnection(source, encoder);
+ setEncoderInterface(source, encoder);
+}
+
+void disconnectEncoderFromSource(EncoderThread *encoder);
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGRECORDINGENGINEUTILS_P_H
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder.cpp
new file mode 100644
index 000000000..7c7d4a55f
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder.cpp
@@ -0,0 +1,245 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qffmpegvideoencoder_p.h"
+#include "qffmpegmuxer_p.h"
+#include "qffmpegvideobuffer_p.h"
+#include "qffmpegrecordingengine_p.h"
+#include "qffmpegvideoframeencoder_p.h"
+#include "qffmpegrecordingengineutils_p.h"
+#include "private/qvideoframe_p.h"
+#include <QtCore/qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+static Q_LOGGING_CATEGORY(qLcFFmpegVideoEncoder, "qt.multimedia.ffmpeg.videoencoder");
+
+VideoEncoder::VideoEncoder(RecordingEngine &recordingEngine, const QMediaEncoderSettings &settings,
+ const QVideoFrameFormat &format, std::optional<AVPixelFormat> hwFormat)
+ : EncoderThread(recordingEngine)
+{
+ setObjectName(QLatin1String("VideoEncoder"));
+
+ AVPixelFormat swFormat = QFFmpegVideoBuffer::toAVPixelFormat(format.pixelFormat());
+ AVPixelFormat ffmpegPixelFormat =
+ hwFormat && *hwFormat != AV_PIX_FMT_NONE ? *hwFormat : swFormat;
+ auto frameRate = format.streamFrameRate();
+ if (frameRate <= 0.) {
+ qWarning() << "Invalid frameRate" << frameRate << "; Using the default instead";
+
+ // set some default frame rate since ffmpeg has UB if it's 0.
+ frameRate = 30.;
+ }
+
+ m_frameEncoder = VideoFrameEncoder::create(settings,
+ format.frameSize(),
+ format.rotation(),
+ frameRate,
+ ffmpegPixelFormat,
+ swFormat,
+ recordingEngine.avFormatContext());
+}
+
+VideoEncoder::~VideoEncoder() = default;
+
+bool VideoEncoder::isValid() const
+{
+ return m_frameEncoder != nullptr;
+}
+
+void VideoEncoder::addFrame(const QVideoFrame &frame)
+{
+ if (!frame.isValid()) {
+ setEndOfSourceStream();
+ return;
+ }
+
+ {
+ auto guard = lockLoopData();
+
+ resetEndOfSourceStream();
+
+ if (m_paused) {
+ m_shouldAdjustTimeBaseForNextFrame = true;
+ return;
+ }
+
+ // Drop frames if encoder can not keep up with the video source data rate;
+ // canPushFrame might be used instead
+ const bool queueFull = m_videoFrameQueue.size() >= m_maxQueueSize;
+
+ if (queueFull) {
+ qCDebug(qLcFFmpegVideoEncoder) << "RecordingEngine frame queue full. Frame lost.";
+ return;
+ }
+
+ m_videoFrameQueue.push({ frame, m_shouldAdjustTimeBaseForNextFrame });
+ m_shouldAdjustTimeBaseForNextFrame = false;
+ }
+
+ dataReady();
+}
+
+VideoEncoder::FrameInfo VideoEncoder::takeFrame()
+{
+ auto guard = lockLoopData();
+ return dequeueIfPossible(m_videoFrameQueue);
+}
+
+void VideoEncoder::retrievePackets()
+{
+ if (!m_frameEncoder)
+ return;
+ while (auto packet = m_frameEncoder->retrievePacket())
+ m_recordingEngine.getMuxer()->addPacket(std::move(packet));
+}
+
+void VideoEncoder::init()
+{
+ Q_ASSERT(isValid());
+
+ qCDebug(qLcFFmpegVideoEncoder) << "VideoEncoder::init started video device thread.";
+ bool ok = m_frameEncoder->open();
+ if (!ok)
+ emit m_recordingEngine.sessionError(QMediaRecorder::ResourceError,
+ "Could not initialize encoder");
+}
+
+void VideoEncoder::cleanup()
+{
+ while (!m_videoFrameQueue.empty())
+ processOne();
+ if (m_frameEncoder) {
+ while (m_frameEncoder->sendFrame(nullptr) == AVERROR(EAGAIN))
+ retrievePackets();
+ retrievePackets();
+ }
+}
+
+bool VideoEncoder::hasData() const
+{
+ return !m_videoFrameQueue.empty();
+}
+
+struct QVideoFrameHolder
+{
+ QVideoFrame f;
+ QImage i;
+};
+
+static void freeQVideoFrame(void *opaque, uint8_t *)
+{
+ delete reinterpret_cast<QVideoFrameHolder *>(opaque);
+}
+
+void VideoEncoder::processOne()
+{
+ retrievePackets();
+
+ FrameInfo frameInfo = takeFrame();
+ QVideoFrame &frame = frameInfo.frame;
+ Q_ASSERT(frame.isValid());
+
+ if (!isValid())
+ return;
+
+ // qCDebug(qLcFFmpegEncoder) << "new video buffer" << frame.startTime();
+
+ AVFrameUPtr avFrame;
+
+ auto *videoBuffer = dynamic_cast<QFFmpegVideoBuffer *>(QVideoFramePrivate::hwBuffer(frame));
+ if (videoBuffer) {
+ // ffmpeg video buffer, let's use the native AVFrame stored in there
+ auto *hwFrame = videoBuffer->getHWFrame();
+ if (hwFrame && hwFrame->format == m_frameEncoder->sourceFormat())
+ avFrame.reset(av_frame_clone(hwFrame));
+ }
+
+ if (!avFrame) {
+ frame.map(QtVideo::MapMode::ReadOnly);
+ auto size = frame.size();
+ avFrame = makeAVFrame();
+ avFrame->format = m_frameEncoder->sourceFormat();
+ avFrame->width = size.width();
+ avFrame->height = size.height();
+
+ for (int i = 0; i < 4; ++i) {
+ avFrame->data[i] = const_cast<uint8_t *>(frame.bits(i));
+ avFrame->linesize[i] = frame.bytesPerLine(i);
+ }
+
+ QImage img;
+ if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg) {
+ // the QImage is cached inside the video frame, so we can take the pointer to the image
+ // data here
+ img = frame.toImage();
+ avFrame->data[0] = (uint8_t *)img.bits();
+ avFrame->linesize[0] = img.bytesPerLine();
+ }
+
+ Q_ASSERT(avFrame->data[0]);
+ // ensure the video frame and it's data is alive as long as it's being used in the encoder
+ avFrame->opaque_ref = av_buffer_create(nullptr, 0, freeQVideoFrame,
+ new QVideoFrameHolder{ frame, img }, 0);
+ }
+
+ const auto [startTime, endTime] = frameTimeStamps(frame);
+
+ if (frameInfo.shouldAdjustTimeBase) {
+ m_baseTime += startTime - m_lastFrameTime;
+ qCDebug(qLcFFmpegVideoEncoder)
+ << ">>>> adjusting base time to" << m_baseTime << startTime << m_lastFrameTime;
+ }
+
+ const qint64 time = startTime - m_baseTime;
+ m_lastFrameTime = endTime;
+
+ setAVFrameTime(*avFrame, m_frameEncoder->getPts(time), m_frameEncoder->getTimeBase());
+
+ m_recordingEngine.newTimeStamp(time / 1000);
+
+ qCDebug(qLcFFmpegVideoEncoder)
+ << ">>> sending frame" << avFrame->pts << time << m_lastFrameTime;
+ int ret = m_frameEncoder->sendFrame(std::move(avFrame));
+ if (ret < 0) {
+ qCDebug(qLcFFmpegVideoEncoder) << "error sending frame" << ret << err2str(ret);
+ emit m_recordingEngine.sessionError(QMediaRecorder::ResourceError, err2str(ret));
+ }
+}
+
+bool VideoEncoder::checkIfCanPushFrame() const
+{
+ if (isRunning())
+ return m_videoFrameQueue.size() < m_maxQueueSize;
+ if (!isFinished())
+ return m_videoFrameQueue.empty();
+
+ return false;
+}
+
+std::pair<qint64, qint64> VideoEncoder::frameTimeStamps(const QVideoFrame &frame) const
+{
+ qint64 startTime = frame.startTime();
+ qint64 endTime = frame.endTime();
+
+ if (startTime == -1) {
+ startTime = m_lastFrameTime;
+ endTime = -1;
+ }
+
+ if (endTime == -1) {
+ qreal frameRate = frame.streamFrameRate();
+ if (frameRate <= 0.)
+ frameRate = m_frameEncoder->settings().videoFrameRate();
+
+ Q_ASSERT(frameRate > 0.f);
+ endTime = startTime + static_cast<qint64>(std::round(VideoFrameTimeBase / frameRate));
+ }
+
+ return { startTime, endTime };
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder_p.h
new file mode 100644
index 000000000..ff6a74fc8
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoder_p.h
@@ -0,0 +1,64 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGVIDEOENCODER_P_H
+#define QFFMPEGVIDEOENCODER_P_H
+
+#include "qffmpegencoderthread_p.h"
+#include "qffmpeg_p.h"
+#include <qvideoframe.h>
+#include <queue>
+
+QT_BEGIN_NAMESPACE
+
+class QVideoFrameFormat;
+class QMediaEncoderSettings;
+
+namespace QFFmpeg {
+class VideoFrameEncoder;
+
+class VideoEncoder : public EncoderThread
+{
+public:
+ VideoEncoder(RecordingEngine &recordingEngine, const QMediaEncoderSettings &settings,
+ const QVideoFrameFormat &format, std::optional<AVPixelFormat> hwFormat);
+ ~VideoEncoder() override;
+
+ bool isValid() const;
+
+ void addFrame(const QVideoFrame &frame);
+
+protected:
+ bool checkIfCanPushFrame() const override;
+
+private:
+ struct FrameInfo
+ {
+ QVideoFrame frame;
+ bool shouldAdjustTimeBase = false;
+ };
+
+ FrameInfo takeFrame();
+ void retrievePackets();
+
+ void init() override;
+ void cleanup() override;
+ bool hasData() const override;
+ void processOne() override;
+
+ std::pair<qint64, qint64> frameTimeStamps(const QVideoFrame &frame) const;
+
+private:
+ std::queue<FrameInfo> m_videoFrameQueue;
+ const size_t m_maxQueueSize = 10; // Arbitrarily chosen to limit memory usage (332 MB @ 4K)
+
+ std::unique_ptr<VideoFrameEncoder> m_frameEncoder;
+ qint64 m_baseTime = 0;
+ bool m_shouldAdjustTimeBaseForNextFrame = true;
+ qint64 m_lastFrameTime = 0;
+};
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils.cpp
new file mode 100644
index 000000000..eef2a64bf
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils.cpp
@@ -0,0 +1,214 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegvideoencoderutils_p.h"
+#include "private/qmultimediautils_p.h"
+
+extern "C" {
+#include <libavutil/pixdesc.h>
+}
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+static AVScore calculateTargetSwFormatScore(const AVPixFmtDescriptor *sourceSwFormatDesc,
+ AVPixelFormat fmt)
+{
+ // determine the format used by the encoder.
+ // We prefer YUV422 based formats such as NV12 or P010. Selection trues to find the best
+ // matching format for the encoder depending on the bit depth of the source format
+
+ const auto *desc = av_pix_fmt_desc_get(fmt);
+ if (!desc)
+ return NotSuitableAVScore;
+
+ const int sourceDepth = sourceSwFormatDesc ? sourceSwFormatDesc->comp[0].depth : 0;
+
+ if (desc->flags & AV_PIX_FMT_FLAG_HWACCEL)
+ // we really don't want HW accelerated formats here
+ return NotSuitableAVScore;
+
+ auto score = DefaultAVScore;
+
+ if (desc == sourceSwFormatDesc)
+ // prefer exact matches
+ score += 10;
+ if (desc->comp[0].depth == sourceDepth)
+ score += 100;
+ else if (desc->comp[0].depth < sourceDepth)
+ score -= 100 + (sourceDepth - desc->comp[0].depth);
+ if (desc->log2_chroma_h == 1)
+ score += 1;
+ if (desc->log2_chroma_w == 1)
+ score += 1;
+ if (desc->flags & AV_PIX_FMT_FLAG_BE)
+ score -= 10;
+ if (desc->flags & AV_PIX_FMT_FLAG_PAL)
+ // we don't want paletted formats
+ score -= 10000;
+ if (desc->flags & AV_PIX_FMT_FLAG_RGB)
+ // we don't want RGB formats
+ score -= 1000;
+
+ // qCDebug(qLcVideoFrameEncoder)
+ // << "checking format" << fmt << Qt::hex << desc->flags << desc->comp[0].depth
+ // << desc->log2_chroma_h << desc->log2_chroma_w << "score:" << score;
+
+ return score;
+}
+
+static auto targetSwFormatScoreCalculator(AVPixelFormat sourceFormat)
+{
+ const auto sourceSwFormatDesc = av_pix_fmt_desc_get(sourceFormat);
+ return [=](AVPixelFormat fmt) { return calculateTargetSwFormatScore(sourceSwFormatDesc, fmt); };
+}
+
+static bool isHwFormatAcceptedByCodec(AVPixelFormat pixFormat)
+{
+ switch (pixFormat) {
+ case AV_PIX_FMT_MEDIACODEC:
+ // Mediacodec doesn't accept AV_PIX_FMT_MEDIACODEC (QTBUG-116836)
+ return false;
+ default:
+ return true;
+ }
+}
+
+AVPixelFormat findTargetSWFormat(AVPixelFormat sourceSWFormat, const AVCodec *codec,
+ const HWAccel &accel)
+{
+ auto scoreCalculator = targetSwFormatScoreCalculator(sourceSWFormat);
+
+ const auto constraints = accel.constraints();
+ if (constraints && constraints->valid_sw_formats)
+ return findBestAVFormat(constraints->valid_sw_formats, scoreCalculator).first;
+
+ // Some codecs, e.g. mediacodec, don't expose constraints, let's find the format in
+ // codec->pix_fmts
+ if (codec->pix_fmts)
+ return findBestAVFormat(codec->pix_fmts, scoreCalculator).first;
+
+ return AV_PIX_FMT_NONE;
+}
+
+AVPixelFormat findTargetFormat(AVPixelFormat sourceFormat, AVPixelFormat sourceSWFormat,
+ const AVCodec *codec, const HWAccel *accel)
+{
+ Q_UNUSED(sourceFormat);
+
+ if (accel) {
+ const auto hwFormat = accel->hwFormat();
+
+ // TODO: handle codec->capabilities & AV_CODEC_CAP_HARDWARE here
+ if (!isHwFormatAcceptedByCodec(hwFormat))
+ return findTargetSWFormat(sourceSWFormat, codec, *accel);
+
+ const auto constraints = accel->constraints();
+ if (constraints && hasAVFormat(constraints->valid_hw_formats, hwFormat))
+ return hwFormat;
+
+ // Some codecs, don't expose constraints,
+ // let's find the format in codec->pix_fmts and hw_config
+ if (isAVFormatSupported(codec, hwFormat))
+ return hwFormat;
+ }
+
+ if (!codec->pix_fmts) {
+ qWarning() << "Codec pix formats are undefined, it's likely to behave incorrectly";
+
+ return sourceSWFormat;
+ }
+
+ auto swScoreCalculator = targetSwFormatScoreCalculator(sourceSWFormat);
+ return findBestAVFormat(codec->pix_fmts, swScoreCalculator).first;
+}
+
+std::pair<const AVCodec *, std::unique_ptr<HWAccel>> findHwEncoder(AVCodecID codecID,
+ const QSize &resolution)
+{
+ auto matchesSizeConstraints = [&resolution](const HWAccel &accel) {
+ const auto constraints = accel.constraints();
+ if (!constraints)
+ return true;
+
+ return resolution.width() >= constraints->min_width
+ && resolution.height() >= constraints->min_height
+ && resolution.width() <= constraints->max_width
+ && resolution.height() <= constraints->max_height;
+ };
+
+ // 1st - attempt to find hw accelerated encoder
+ auto result = HWAccel::findEncoderWithHwAccel(codecID, matchesSizeConstraints);
+ Q_ASSERT(!!result.first == !!result.second);
+
+ return result;
+}
+
+const AVCodec *findSwEncoder(AVCodecID codecID, AVPixelFormat sourceSWFormat)
+{
+ auto formatScoreCalculator = targetSwFormatScoreCalculator(sourceSWFormat);
+
+ return findAVEncoder(codecID, [&formatScoreCalculator](const AVCodec *codec) {
+ if (!codec->pix_fmts)
+ // codecs without pix_fmts are suspicious
+ return MinAVScore;
+
+ return findBestAVFormat(codec->pix_fmts, formatScoreCalculator).second;
+ });
+}
+
+AVRational adjustFrameRate(const AVRational *supportedRates, qreal requestedRate)
+{
+ auto calcScore = [requestedRate](const AVRational &rate) {
+ // relative comparison
+ return qMin(requestedRate * rate.den, qreal(rate.num))
+ / qMax(requestedRate * rate.den, qreal(rate.num));
+ };
+
+ const auto result = findBestAVValue(supportedRates, calcScore).first;
+ if (result.num && result.den)
+ return result;
+
+ const auto [num, den] = qRealToFraction(requestedRate);
+ return { num, den };
+}
+
+AVRational adjustFrameTimeBase(const AVRational *supportedRates, AVRational frameRate)
+{
+ // TODO: user-specified frame rate might be required.
+ if (supportedRates) {
+ auto hasFrameRate = [&]() {
+ for (auto rate = supportedRates; rate->num && rate->den; ++rate)
+ if (rate->den == frameRate.den && rate->num == frameRate.num)
+ return true;
+
+ return false;
+ };
+
+ Q_ASSERT(hasFrameRate());
+
+ return { frameRate.den, frameRate.num };
+ }
+
+ constexpr int TimeScaleFactor = 1000; // Allows not to follow fixed rate
+ return { frameRate.den, frameRate.num * TimeScaleFactor };
+}
+
+QSize adjustVideoResolution(const AVCodec *codec, QSize requestedResolution)
+{
+#ifdef Q_OS_WINDOWS
+ // TODO: investigate, there might be more encoders not supporting odd resolution
+ if (strcmp(codec->name, "h264_mf") == 0) {
+ auto makeEven = [](int size) { return size & ~1; };
+ return QSize(makeEven(requestedResolution.width()), makeEven(requestedResolution.height()));
+ }
+#else
+ Q_UNUSED(codec);
+#endif
+ return requestedResolution;
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils_p.h
new file mode 100644
index 000000000..3a16a7de3
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoencoderutils_p.h
@@ -0,0 +1,64 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGVIDEOENCODERUTILS_P_H
+#define QFFMPEGVIDEOENCODERUTILS_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qffmpeg_p.h"
+#include "qffmpeghwaccel_p.h"
+
+QT_BEGIN_NAMESPACE
+
+namespace QFFmpeg {
+
+AVPixelFormat findTargetSWFormat(AVPixelFormat sourceSWFormat, const AVCodec *codec,
+ const HWAccel &accel);
+
+AVPixelFormat findTargetFormat(AVPixelFormat sourceFormat, AVPixelFormat sourceSWFormat,
+ const AVCodec *codec, const HWAccel *accel);
+
+std::pair<const AVCodec *, std::unique_ptr<HWAccel>> findHwEncoder(AVCodecID codecID,
+ const QSize &sourceSize);
+
+const AVCodec *findSwEncoder(AVCodecID codecID, AVPixelFormat sourceSWFormat);
+
+/**
+ * @brief adjustFrameRate get a rational frame rate be requested qreal rate.
+ * If the codec supports fixed frame rate (non-null supportedRates),
+ * the function selects the most suitable one,
+ * otherwise just makes AVRational from qreal.
+ */
+AVRational adjustFrameRate(const AVRational *supportedRates, qreal requestedRate);
+
+/**
+ * @brief adjustFrameTimeBase gets adjusted timebase by a list of supported frame rates
+ * and an already adjusted frame rate.
+ *
+ * Timebase is the fundamental unit of time (in seconds) in terms
+ * of which frame timestamps are represented.
+ * For fixed-fps content (non-null supportedRates),
+ * timebase should be 1/framerate.
+ *
+ * For more information, see AVStream::time_base and AVCodecContext::time_base.
+ *
+ * The adjusted time base is supposed to be set to stream and codec context.
+ */
+AVRational adjustFrameTimeBase(const AVRational *supportedRates, AVRational frameRate);
+
+QSize adjustVideoResolution(const AVCodec *codec, QSize requestedResolution);
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
+
+#endif // QFFMPEGVIDEOENCODERUTILS_P_H
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder.cpp b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder.cpp
new file mode 100644
index 000000000..6f03c75a6
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder.cpp
@@ -0,0 +1,477 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qffmpegvideoframeencoder_p.h"
+#include "qffmpegmediaformatinfo_p.h"
+#include "qffmpegencoderoptions_p.h"
+#include "qffmpegvideoencoderutils_p.h"
+#include <qloggingcategory.h>
+
+extern "C" {
+#include "libavutil/display.h"
+}
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcVideoFrameEncoder, "qt.multimedia.ffmpeg.videoencoder");
+
+namespace QFFmpeg {
+
+std::unique_ptr<VideoFrameEncoder>
+VideoFrameEncoder::create(const QMediaEncoderSettings &encoderSettings,
+ const QSize &sourceSize,
+ QtVideo::Rotation sourceRotation,
+ qreal sourceFrameRate,
+ AVPixelFormat sourceFormat,
+ AVPixelFormat sourceSWFormat,
+ AVFormatContext *formatContext)
+{
+ Q_ASSERT(isSwPixelFormat(sourceSWFormat));
+ Q_ASSERT(isHwPixelFormat(sourceFormat) || sourceSWFormat == sourceFormat);
+
+ std::unique_ptr<VideoFrameEncoder> result(new VideoFrameEncoder);
+
+ result->m_settings = encoderSettings;
+ result->m_sourceSize = sourceSize;
+ result->m_sourceFormat = sourceFormat;
+ result->m_sourceRotation = sourceRotation;
+
+ // Temporary: check isSwPixelFormat because of android issue (QTBUG-116836)
+ result->m_sourceSWFormat = isSwPixelFormat(sourceFormat) ? sourceFormat : sourceSWFormat;
+
+ if (!result->m_settings.videoResolution().isValid())
+ result->m_settings.setVideoResolution(sourceSize);
+
+ if (result->m_settings.videoFrameRate() <= 0.)
+ result->m_settings.setVideoFrameRate(sourceFrameRate);
+
+ if (!result->initCodec() || !result->initTargetFormats()
+ || !result->initCodecContext(formatContext)) {
+ return nullptr;
+ }
+
+ // TODO: make VideoFrameEncoder::private and do openning here
+ // if (!open()) {
+ // m_error = QMediaRecorder::FormatError;
+ // m_errorStr = QLatin1StringView("Cannot open codec");
+ // return;
+ // }
+
+ result->updateConversions();
+
+ return result;
+}
+
+bool VideoFrameEncoder::initCodec()
+{
+ const auto qVideoCodec = m_settings.videoCodec();
+ const auto codecID = QFFmpegMediaFormatInfo::codecIdForVideoCodec(qVideoCodec);
+ const auto resolution = m_settings.videoResolution();
+
+ std::tie(m_codec, m_accel) = findHwEncoder(codecID, resolution);
+
+ if (!m_codec)
+ m_codec = findSwEncoder(codecID, m_sourceSWFormat);
+
+ if (!m_codec) {
+ qWarning() << "Could not find encoder for codecId" << codecID;
+ return false;
+ }
+
+ qCDebug(qLcVideoFrameEncoder) << "found encoder" << m_codec->name << "for id" << m_codec->id;
+
+#ifdef Q_OS_WINDOWS
+ // TODO: investigate, there might be more encoders not supporting odd resolution
+ if (strcmp(m_codec->name, "h264_mf") == 0) {
+ auto makeEven = [](int size) { return size & ~1; };
+ const QSize fixedResolution(makeEven(resolution.width()), makeEven(resolution.height()));
+ if (fixedResolution != resolution) {
+ qCDebug(qLcVideoFrameEncoder) << "Fix odd video resolution for codec" << m_codec->name
+ << ":" << resolution << "->" << fixedResolution;
+ m_settings.setVideoResolution(fixedResolution);
+ }
+ }
+#endif
+
+ auto fixedResolution = adjustVideoResolution(m_codec, m_settings.videoResolution());
+ if (resolution != fixedResolution) {
+ qCDebug(qLcVideoFrameEncoder) << "Fix odd video resolution for codec" << m_codec->name
+ << ":" << resolution << "->" << fixedResolution;
+
+ m_settings.setVideoResolution(fixedResolution);
+ }
+
+ if (m_codec->supported_framerates && qLcVideoFrameEncoder().isEnabled(QtDebugMsg))
+ for (auto rate = m_codec->supported_framerates; rate->num && rate->den; ++rate)
+ qCDebug(qLcVideoFrameEncoder) << "supported frame rate:" << *rate;
+
+ m_codecFrameRate = adjustFrameRate(m_codec->supported_framerates, m_settings.videoFrameRate());
+ qCDebug(qLcVideoFrameEncoder) << "Adjusted frame rate:" << m_codecFrameRate;
+
+ return true;
+}
+
+bool VideoFrameEncoder::initTargetFormats()
+{
+ m_targetFormat = findTargetFormat(m_sourceFormat, m_sourceSWFormat, m_codec, m_accel.get());
+
+ if (m_targetFormat == AV_PIX_FMT_NONE) {
+ qWarning() << "Could not find target format for codecId" << m_codec->id;
+ return false;
+ }
+
+ if (isHwPixelFormat(m_targetFormat)) {
+ Q_ASSERT(m_accel);
+
+ m_targetSWFormat = findTargetSWFormat(m_sourceSWFormat, m_codec, *m_accel);
+
+ if (m_targetSWFormat == AV_PIX_FMT_NONE) {
+ qWarning() << "Cannot find software target format. sourceSWFormat:" << m_sourceSWFormat
+ << "targetFormat:" << m_targetFormat;
+ return false;
+ }
+
+ m_accel->createFramesContext(m_targetSWFormat, m_settings.videoResolution());
+ if (!m_accel->hwFramesContextAsBuffer())
+ return false;
+ } else {
+ m_targetSWFormat = m_targetFormat;
+ }
+
+ return true;
+}
+
+VideoFrameEncoder::~VideoFrameEncoder() = default;
+
+bool QFFmpeg::VideoFrameEncoder::initCodecContext(AVFormatContext *formatContext)
+{
+ m_stream = avformat_new_stream(formatContext, nullptr);
+ m_stream->id = formatContext->nb_streams - 1;
+ //qCDebug(qLcVideoFrameEncoder) << "Video stream: index" << d->stream->id;
+ m_stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
+ m_stream->codecpar->codec_id = m_codec->id;
+
+ // Apples HEVC decoders don't like the hev1 tag ffmpeg uses by default, use hvc1 as the more commonly accepted tag
+ if (m_codec->id == AV_CODEC_ID_HEVC)
+ m_stream->codecpar->codec_tag = MKTAG('h', 'v', 'c', '1');
+
+ const auto resolution = m_settings.videoResolution();
+
+ // ### Fix hardcoded values
+ m_stream->codecpar->format = m_targetFormat;
+ m_stream->codecpar->width = resolution.width();
+ m_stream->codecpar->height = resolution.height();
+ m_stream->codecpar->sample_aspect_ratio = AVRational{ 1, 1 };
+
+ if (m_sourceRotation != QtVideo::Rotation::None) {
+ constexpr auto displayMatrixSize = sizeof(int32_t) * 9;
+ AVPacketSideData sideData = { reinterpret_cast<uint8_t *>(av_malloc(displayMatrixSize)),
+ displayMatrixSize, AV_PKT_DATA_DISPLAYMATRIX };
+ av_display_rotation_set(reinterpret_cast<int32_t *>(sideData.data),
+ static_cast<double>(m_sourceRotation));
+ addStreamSideData(m_stream, sideData);
+ }
+
+ Q_ASSERT(m_codec);
+
+ m_stream->time_base = adjustFrameTimeBase(m_codec->supported_framerates, m_codecFrameRate);
+ m_codecContext.reset(avcodec_alloc_context3(m_codec));
+ if (!m_codecContext) {
+ qWarning() << "Could not allocate codec context";
+ return false;
+ }
+
+ avcodec_parameters_to_context(m_codecContext.get(), m_stream->codecpar);
+ m_codecContext->time_base = m_stream->time_base;
+ qCDebug(qLcVideoFrameEncoder) << "codecContext time base" << m_codecContext->time_base.num
+ << m_codecContext->time_base.den;
+
+ m_codecContext->framerate = m_codecFrameRate;
+ m_codecContext->pix_fmt = m_targetFormat;
+ m_codecContext->width = resolution.width();
+ m_codecContext->height = resolution.height();
+
+ if (m_accel) {
+ auto deviceContext = m_accel->hwDeviceContextAsBuffer();
+ Q_ASSERT(deviceContext);
+ m_codecContext->hw_device_ctx = av_buffer_ref(deviceContext);
+
+ if (auto framesContext = m_accel->hwFramesContextAsBuffer())
+ m_codecContext->hw_frames_ctx = av_buffer_ref(framesContext);
+ }
+
+ return true;
+}
+
+bool VideoFrameEncoder::open()
+{
+ if (!m_codecContext)
+ return false;
+
+ AVDictionaryHolder opts;
+ applyVideoEncoderOptions(m_settings, m_codec->name, m_codecContext.get(), opts);
+ applyExperimentalCodecOptions(m_codec, opts);
+
+ int res = avcodec_open2(m_codecContext.get(), m_codec, opts);
+ if (res < 0) {
+ m_codecContext.reset();
+ qWarning() << "Couldn't open codec for writing" << err2str(res);
+ return false;
+ }
+ qCDebug(qLcVideoFrameEncoder) << "video codec opened" << res << "time base"
+ << m_codecContext->time_base;
+ return true;
+}
+
+qint64 VideoFrameEncoder::getPts(qint64 us) const
+{
+ qint64 div = 1'000'000 * m_stream->time_base.num;
+ return div != 0 ? (us * m_stream->time_base.den + div / 2) / div : 0;
+}
+
+const AVRational &VideoFrameEncoder::getTimeBase() const
+{
+ return m_stream->time_base;
+}
+
+int VideoFrameEncoder::sendFrame(AVFrameUPtr frame)
+{
+ if (!m_codecContext) {
+ qWarning() << "codec context is not initialized!";
+ return AVERROR(EINVAL);
+ }
+
+ if (!frame)
+ return avcodec_send_frame(m_codecContext.get(), frame.get());
+
+ if (!updateSourceFormatAndSize(frame.get()))
+ return AVERROR(EINVAL);
+
+ int64_t pts = 0;
+ AVRational timeBase = {};
+ getAVFrameTime(*frame, pts, timeBase);
+
+ if (m_downloadFromHW) {
+ auto f = makeAVFrame();
+
+ int err = av_hwframe_transfer_data(f.get(), frame.get(), 0);
+ if (err < 0) {
+ qCDebug(qLcVideoFrameEncoder) << "Error transferring frame data to surface." << err2str(err);
+ return err;
+ }
+
+ frame = std::move(f);
+ }
+
+ if (m_converter) {
+ auto f = makeAVFrame();
+
+ f->format = m_targetSWFormat;
+ f->width = m_settings.videoResolution().width();
+ f->height = m_settings.videoResolution().height();
+
+ av_frame_get_buffer(f.get(), 0);
+ const auto scaledHeight = sws_scale(m_converter.get(), frame->data, frame->linesize, 0,
+ frame->height, f->data, f->linesize);
+
+ if (scaledHeight != f->height)
+ qCWarning(qLcVideoFrameEncoder) << "Scaled height" << scaledHeight << "!=" << f->height;
+
+ frame = std::move(f);
+ }
+
+ if (m_uploadToHW) {
+ auto *hwFramesContext = m_accel->hwFramesContextAsBuffer();
+ Q_ASSERT(hwFramesContext);
+ auto f = makeAVFrame();
+
+ if (!f)
+ return AVERROR(ENOMEM);
+ int err = av_hwframe_get_buffer(hwFramesContext, f.get(), 0);
+ if (err < 0) {
+ qCDebug(qLcVideoFrameEncoder) << "Error getting HW buffer" << err2str(err);
+ return err;
+ } else {
+ qCDebug(qLcVideoFrameEncoder) << "got HW buffer";
+ }
+ if (!f->hw_frames_ctx) {
+ qCDebug(qLcVideoFrameEncoder) << "no hw frames context";
+ return AVERROR(ENOMEM);
+ }
+ err = av_hwframe_transfer_data(f.get(), frame.get(), 0);
+ if (err < 0) {
+ qCDebug(qLcVideoFrameEncoder) << "Error transferring frame data to surface." << err2str(err);
+ return err;
+ }
+ frame = std::move(f);
+ }
+
+ qCDebug(qLcVideoFrameEncoder) << "sending frame" << pts << "*" << timeBase;
+
+ setAVFrameTime(*frame, pts, timeBase);
+ return avcodec_send_frame(m_codecContext.get(), frame.get());
+}
+
+qint64 VideoFrameEncoder::estimateDuration(const AVPacket &packet, bool isFirstPacket)
+{
+ qint64 duration = 0; // In stream units, multiply by time_base to get seconds
+
+ if (isFirstPacket) {
+ // First packet - Estimate duration from frame rate. Duration must
+ // be set for single-frame videos, otherwise they won't open in
+ // media player.
+ const AVRational frameDuration = av_inv_q(m_codecContext->framerate);
+ duration = av_rescale_q(1, frameDuration, m_stream->time_base);
+ } else {
+ // Duration is calculated from actual packet times. TODO: Handle discontinuities
+ duration = packet.pts - m_lastPacketTime;
+ }
+
+ return duration;
+}
+
+AVPacketUPtr VideoFrameEncoder::retrievePacket()
+{
+ if (!m_codecContext)
+ return nullptr;
+
+ auto getPacket = [&]() {
+ AVPacketUPtr packet(av_packet_alloc());
+ const int ret = avcodec_receive_packet(m_codecContext.get(), packet.get());
+ if (ret < 0) {
+ if (ret != AVERROR(EOF) && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF)
+ qCDebug(qLcVideoFrameEncoder) << "Error receiving packet" << ret << err2str(ret);
+ return AVPacketUPtr{};
+ }
+ auto ts = timeStampMs(packet->pts, m_stream->time_base);
+
+ qCDebug(qLcVideoFrameEncoder)
+ << "got a packet" << packet->pts << packet->dts << (ts ? *ts : 0);
+
+ packet->stream_index = m_stream->id;
+
+ if (packet->duration == 0) {
+ const bool firstFrame = m_lastPacketTime == AV_NOPTS_VALUE;
+ packet->duration = estimateDuration(*packet, firstFrame);
+ }
+
+ m_lastPacketTime = packet->pts;
+
+ return packet;
+ };
+
+ auto fixPacketDts = [&](AVPacket &packet) {
+ // Workaround for some ffmpeg codecs bugs (e.g. nvenc)
+ // Ideally, packet->pts < packet->dts is not expected
+
+ if (packet.dts == AV_NOPTS_VALUE)
+ return true;
+
+ packet.dts -= m_packetDtsOffset;
+
+ if (packet.pts != AV_NOPTS_VALUE && packet.pts < packet.dts) {
+ m_packetDtsOffset += packet.dts - packet.pts;
+ packet.dts = packet.pts;
+
+ if (m_prevPacketDts != AV_NOPTS_VALUE && packet.dts < m_prevPacketDts) {
+ qCWarning(qLcVideoFrameEncoder)
+ << "Skip packet; failed to fix dts:" << packet.dts << m_prevPacketDts;
+ return false;
+ }
+ }
+
+ m_prevPacketDts = packet.dts;
+
+ return true;
+ };
+
+ while (auto packet = getPacket()) {
+ if (fixPacketDts(*packet))
+ return packet;
+ }
+
+ return nullptr;
+}
+
+bool VideoFrameEncoder::updateSourceFormatAndSize(const AVFrame *frame)
+{
+ Q_ASSERT(frame);
+
+ const QSize frameSize(frame->width, frame->height);
+ const AVPixelFormat frameFormat = static_cast<AVPixelFormat>(frame->format);
+
+ if (frameSize == m_sourceSize && frameFormat == m_sourceFormat)
+ return true;
+
+ auto applySourceFormatAndSize = [&](AVPixelFormat swFormat) {
+ m_sourceSize = frameSize;
+ m_sourceFormat = frameFormat;
+ m_sourceSWFormat = swFormat;
+ updateConversions();
+ return true;
+ };
+
+ if (frameFormat == m_sourceFormat)
+ return applySourceFormatAndSize(m_sourceSWFormat);
+
+ if (frameFormat == AV_PIX_FMT_NONE) {
+ qWarning() << "Got a frame with invalid pixel format";
+ return false;
+ }
+
+ if (isSwPixelFormat(frameFormat))
+ return applySourceFormatAndSize(frameFormat);
+
+ auto framesCtx = reinterpret_cast<const AVHWFramesContext *>(frame->hw_frames_ctx->data);
+ if (!framesCtx || framesCtx->sw_format == AV_PIX_FMT_NONE) {
+ qWarning() << "Cannot update conversions as hw frame has invalid framesCtx" << framesCtx;
+ return false;
+ }
+
+ return applySourceFormatAndSize(framesCtx->sw_format);
+}
+
+void VideoFrameEncoder::updateConversions()
+{
+ const bool needToScale = m_sourceSize != m_settings.videoResolution();
+ const bool zeroCopy = m_sourceFormat == m_targetFormat && !needToScale;
+
+ m_converter.reset();
+
+ if (zeroCopy) {
+ m_downloadFromHW = false;
+ m_uploadToHW = false;
+
+ qCDebug(qLcVideoFrameEncoder) << "zero copy encoding, format" << m_targetFormat;
+ // no need to initialize any converters
+ return;
+ }
+
+ m_downloadFromHW = m_sourceFormat != m_sourceSWFormat;
+ m_uploadToHW = m_targetFormat != m_targetSWFormat;
+
+ if (m_sourceSWFormat != m_targetSWFormat || needToScale) {
+ const auto targetSize = m_settings.videoResolution();
+ qCDebug(qLcVideoFrameEncoder)
+ << "video source and encoder use different formats:" << m_sourceSWFormat
+ << m_targetSWFormat << "or sizes:" << m_sourceSize << targetSize;
+
+ m_converter.reset(sws_getContext(m_sourceSize.width(), m_sourceSize.height(),
+ m_sourceSWFormat, targetSize.width(), targetSize.height(),
+ m_targetSWFormat, SWS_FAST_BILINEAR, nullptr, nullptr,
+ nullptr));
+ }
+
+ qCDebug(qLcVideoFrameEncoder) << "VideoFrameEncoder conversions initialized:"
+ << "sourceFormat:" << m_sourceFormat
+ << (isHwPixelFormat(m_sourceFormat) ? "(hw)" : "(sw)")
+ << "targetFormat:" << m_targetFormat
+ << (isHwPixelFormat(m_targetFormat) ? "(hw)" : "(sw)")
+ << "sourceSWFormat:" << m_sourceSWFormat
+ << "targetSWFormat:" << m_targetSWFormat
+ << "converter:" << m_converter.get();
+}
+
+} // namespace QFFmpeg
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder_p.h b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder_p.h
new file mode 100644
index 000000000..193590a64
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/recordingengine/qffmpegvideoframeencoder_p.h
@@ -0,0 +1,96 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QFFMPEGVIDEOFRAMEENCODER_P_H
+#define QFFMPEGVIDEOFRAMEENCODER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qffmpeghwaccel_p.h"
+#include "private/qplatformmediarecorder_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QMediaEncoderSettings;
+
+namespace QFFmpeg {
+
+class VideoFrameEncoder
+{
+public:
+ static std::unique_ptr<VideoFrameEncoder> create(const QMediaEncoderSettings &encoderSettings,
+ const QSize &sourceSize,
+ QtVideo::Rotation sourceRotation,
+ qreal sourceFrameRate,
+ AVPixelFormat sourceFormat,
+ AVPixelFormat sourceSWFormat,
+ AVFormatContext *formatContext);
+
+ ~VideoFrameEncoder();
+
+ bool open();
+
+ AVPixelFormat sourceFormat() const { return m_sourceFormat; }
+ AVPixelFormat targetFormat() const { return m_targetFormat; }
+
+ qint64 getPts(qint64 ms) const;
+
+ const AVRational &getTimeBase() const;
+
+ int sendFrame(AVFrameUPtr frame);
+ AVPacketUPtr retrievePacket();
+
+ const QMediaEncoderSettings &settings() { return m_settings; }
+
+private:
+ VideoFrameEncoder() = default;
+
+ bool updateSourceFormatAndSize(const AVFrame *frame);
+
+ void updateConversions();
+
+ bool initCodec();
+
+ bool initTargetFormats();
+
+ bool initCodecContext(AVFormatContext *formatContext);
+
+ qint64 estimateDuration(const AVPacket &packet, bool isFirstPacket);
+
+private:
+ QMediaEncoderSettings m_settings;
+ QSize m_sourceSize;
+ QtVideo::Rotation m_sourceRotation = QtVideo::Rotation::None;
+
+ std::unique_ptr<HWAccel> m_accel;
+ const AVCodec *m_codec = nullptr;
+ AVStream *m_stream = nullptr;
+ qint64 m_lastPacketTime = AV_NOPTS_VALUE;
+ AVCodecContextUPtr m_codecContext;
+ std::unique_ptr<SwsContext, decltype(&sws_freeContext)> m_converter = { nullptr,
+ &sws_freeContext };
+ AVPixelFormat m_sourceFormat = AV_PIX_FMT_NONE;
+ AVPixelFormat m_sourceSWFormat = AV_PIX_FMT_NONE;
+ AVPixelFormat m_targetFormat = AV_PIX_FMT_NONE;
+ AVPixelFormat m_targetSWFormat = AV_PIX_FMT_NONE;
+ bool m_downloadFromHW = false;
+ bool m_uploadToHW = false;
+
+ AVRational m_codecFrameRate = { 0, 1 };
+
+ int64_t m_prevPacketDts = AV_NOPTS_VALUE;
+ int64_t m_packetDtsOffset = 0;
+};
+}
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/openssl3.ver b/src/plugins/multimedia/ffmpeg/symbolstubs/openssl3.ver
new file mode 100644
index 000000000..88235a94c
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/openssl3.ver
@@ -0,0 +1,7 @@
+# Copyright (C) 2024 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+OPENSSL_3.0.0 {
+ global:
+ *;
+};
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-crypto.cpp b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-crypto.cpp
new file mode 100644
index 000000000..fbf3b783c
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-crypto.cpp
@@ -0,0 +1,6 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtMultimedia/private/qsymbolsresolveutils_p.h>
+
+// No stub functions are needed for ffmpeg
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-ssl.cpp b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-ssl.cpp
new file mode 100644
index 000000000..3e38e398c
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-ssl.cpp
@@ -0,0 +1,300 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtMultimedia/private/qsymbolsresolveutils_p.h>
+
+#include <qstringliteral.h>
+
+#include <openssl/bio.h>
+#include <openssl/ssl.h>
+#include <openssl/bn.h>
+#include <openssl/err.h>
+#include <openssl/rand.h>
+
+using namespace Qt::StringLiterals;
+
+[[maybe_unused]] static constexpr auto SHLIB_VERSION =
+#if defined(OPENSSL_SHLIB_VERSION)
+ OPENSSL_SHLIB_VERSION;
+#elif defined(SHLIB_VERSION_NUMBER)
+ SHLIB_VERSION_NUMBER;
+#endif
+
+
+#if !defined(Q_OS_ANDROID)
+CHECK_VERSIONS("ssl", SSL_NEEDED_SOVERSION, SHLIB_VERSION);
+#endif
+
+static std::unique_ptr<QLibrary> loadLib()
+{
+ auto lib = std::make_unique<QLibrary>();
+
+ auto tryLoad = [&](QString sslName, auto version) {
+ lib->setFileNameAndVersion(sslName, version);
+ return lib->load();
+ };
+
+// openssl on Android has specific suffixes
+#if defined(Q_OS_ANDROID)
+ {
+ auto suffix = qEnvironmentVariable("ANDROID_OPENSSL_SUFFIX");
+ if (suffix.isEmpty()) {
+#if (OPENSSL_VERSION_NUMBER >> 28) < 3 // major version < 3
+ suffix = "_1_1"_L1;
+#elif OPENSSL_VERSION_MAJOR == 3
+ suffix = "_3"_L1;
+#else
+ static_assert(false, "Unexpected openssl version");
+#endif
+ }
+
+ if (tryLoad("ssl"_L1 + suffix, -1))
+ return lib;
+ }
+#endif
+
+ if (tryLoad("ssl"_L1, SSL_NEEDED_SOVERSION ""_L1))
+ return lib;
+
+ return {};
+};
+
+
+BEGIN_INIT_FUNCS("ssl", loadLib)
+
+// BN functions
+
+INIT_FUNC(BN_value_one);
+INIT_FUNC(BN_mod_word);
+
+INIT_FUNC(BN_div_word)
+INIT_FUNC(BN_mul_word)
+INIT_FUNC(BN_add_word)
+INIT_FUNC(BN_sub_word)
+INIT_FUNC(BN_set_word)
+INIT_FUNC(BN_new)
+INIT_FUNC(BN_cmp)
+
+INIT_FUNC(BN_free);
+
+INIT_FUNC(BN_copy);
+
+INIT_FUNC(BN_CTX_new);
+
+INIT_FUNC(BN_CTX_free);
+INIT_FUNC(BN_CTX_start);
+
+INIT_FUNC(BN_CTX_get);
+INIT_FUNC(BN_CTX_end);
+
+INIT_FUNC(BN_rand);
+INIT_FUNC(BN_mod_exp);
+
+INIT_FUNC(BN_num_bits);
+INIT_FUNC(BN_num_bits_word);
+
+INIT_FUNC(BN_bn2hex);
+INIT_FUNC(BN_bn2dec);
+
+INIT_FUNC(BN_hex2bn);
+INIT_FUNC(BN_dec2bn);
+INIT_FUNC(BN_asc2bn);
+
+INIT_FUNC(BN_bn2bin);
+INIT_FUNC(BN_bin2bn);
+
+// BIO-related functions
+
+INIT_FUNC(BIO_new);
+INIT_FUNC(BIO_free);
+
+INIT_FUNC(BIO_read);
+INIT_FUNC(BIO_write);
+INIT_FUNC(BIO_s_mem);
+
+INIT_FUNC(BIO_set_data);
+
+INIT_FUNC(BIO_get_data);
+INIT_FUNC(BIO_set_init);
+
+INIT_FUNC(BIO_set_flags);
+INIT_FUNC(BIO_test_flags);
+INIT_FUNC(BIO_clear_flags);
+
+INIT_FUNC(BIO_meth_new);
+INIT_FUNC(BIO_meth_free);
+
+INIT_FUNC(BIO_meth_set_write);
+INIT_FUNC(BIO_meth_set_read);
+INIT_FUNC(BIO_meth_set_puts);
+INIT_FUNC(BIO_meth_set_gets);
+INIT_FUNC(BIO_meth_set_ctrl);
+INIT_FUNC(BIO_meth_set_create);
+INIT_FUNC(BIO_meth_set_destroy);
+INIT_FUNC(BIO_meth_set_callback_ctrl);
+
+// SSL functions
+
+INIT_FUNC(SSL_CTX_new);
+INIT_FUNC(SSL_CTX_up_ref);
+INIT_FUNC(SSL_CTX_free);
+
+INIT_FUNC(SSL_new);
+INIT_FUNC(SSL_up_ref);
+INIT_FUNC(SSL_free);
+
+INIT_FUNC(SSL_accept);
+INIT_FUNC(SSL_stateless);
+INIT_FUNC(SSL_connect);
+INIT_FUNC(SSL_read);
+INIT_FUNC(SSL_peek);
+INIT_FUNC(SSL_write);
+INIT_FUNC(SSL_ctrl);
+INIT_FUNC(SSL_shutdown);
+INIT_FUNC(SSL_set_bio);
+
+// options are unsigned long in openssl 1.1.1, and uint64 in 3.x.x
+INIT_FUNC(SSL_CTX_set_options);
+
+INIT_FUNC(SSL_get_error);
+INIT_FUNC(SSL_CTX_load_verify_locations);
+
+INIT_FUNC(SSL_CTX_set_verify);
+INIT_FUNC(SSL_CTX_use_PrivateKey);
+
+INIT_FUNC(SSL_CTX_use_PrivateKey_file);
+INIT_FUNC(SSL_CTX_use_certificate_chain_file);
+
+INIT_FUNC(ERR_get_error);
+
+INIT_FUNC(ERR_error_string);
+
+// TLS functions
+
+INIT_FUNC(TLS_client_method);
+INIT_FUNC(TLS_server_method);
+
+// RAND functions
+
+INIT_FUNC(RAND_bytes);
+
+END_INIT_FUNCS()
+
+//////////// Define
+
+// BN functions
+
+DEFINE_FUNC(BN_value_one, 0);
+DEFINE_FUNC(BN_mod_word, 2);
+
+DEFINE_FUNC(BN_div_word, 2)
+DEFINE_FUNC(BN_mul_word, 2)
+DEFINE_FUNC(BN_add_word, 2)
+DEFINE_FUNC(BN_sub_word, 2)
+DEFINE_FUNC(BN_set_word, 2)
+DEFINE_FUNC(BN_new, 0)
+DEFINE_FUNC(BN_cmp, 2)
+
+DEFINE_FUNC(BN_free, 1);
+
+DEFINE_FUNC(BN_copy, 2);
+
+DEFINE_FUNC(BN_CTX_new, 0);
+
+DEFINE_FUNC(BN_CTX_free, 1);
+DEFINE_FUNC(BN_CTX_start, 1);
+
+DEFINE_FUNC(BN_CTX_get, 1);
+DEFINE_FUNC(BN_CTX_end, 1);
+
+DEFINE_FUNC(BN_rand, 4);
+DEFINE_FUNC(BN_mod_exp, 5);
+
+DEFINE_FUNC(BN_num_bits, 1);
+DEFINE_FUNC(BN_num_bits_word, 1);
+
+DEFINE_FUNC(BN_bn2hex, 1);
+DEFINE_FUNC(BN_bn2dec, 1);
+
+DEFINE_FUNC(BN_hex2bn, 2);
+DEFINE_FUNC(BN_dec2bn, 2);
+DEFINE_FUNC(BN_asc2bn, 2);
+
+DEFINE_FUNC(BN_bn2bin, 2);
+DEFINE_FUNC(BN_bin2bn, 3);
+
+// BIO-related functions
+
+DEFINE_FUNC(BIO_new, 1);
+DEFINE_FUNC(BIO_free, 1);
+
+DEFINE_FUNC(BIO_read, 3, -1);
+DEFINE_FUNC(BIO_write, 3, -1);
+DEFINE_FUNC(BIO_s_mem, 0);
+
+DEFINE_FUNC(BIO_set_data, 2);
+
+DEFINE_FUNC(BIO_get_data, 1);
+DEFINE_FUNC(BIO_set_init, 2);
+
+DEFINE_FUNC(BIO_set_flags, 2);
+DEFINE_FUNC(BIO_test_flags, 2);
+DEFINE_FUNC(BIO_clear_flags, 2);
+
+DEFINE_FUNC(BIO_meth_new, 2);
+DEFINE_FUNC(BIO_meth_free, 1);
+
+DEFINE_FUNC(BIO_meth_set_write, 2);
+DEFINE_FUNC(BIO_meth_set_read, 2);
+DEFINE_FUNC(BIO_meth_set_puts, 2);
+DEFINE_FUNC(BIO_meth_set_gets, 2);
+DEFINE_FUNC(BIO_meth_set_ctrl, 2);
+DEFINE_FUNC(BIO_meth_set_create, 2);
+DEFINE_FUNC(BIO_meth_set_destroy, 2);
+DEFINE_FUNC(BIO_meth_set_callback_ctrl, 2);
+
+// SSL functions
+
+DEFINE_FUNC(SSL_CTX_new, 1);
+DEFINE_FUNC(SSL_CTX_up_ref, 1);
+DEFINE_FUNC(SSL_CTX_free, 1);
+
+DEFINE_FUNC(SSL_new, 1);
+DEFINE_FUNC(SSL_up_ref, 1);
+DEFINE_FUNC(SSL_free, 1);
+
+DEFINE_FUNC(SSL_accept, 1);
+DEFINE_FUNC(SSL_stateless, 1);
+DEFINE_FUNC(SSL_connect, 1);
+DEFINE_FUNC(SSL_read, 3, -1);
+DEFINE_FUNC(SSL_peek, 3);
+DEFINE_FUNC(SSL_write, 3, -1);
+DEFINE_FUNC(SSL_ctrl, 4);
+DEFINE_FUNC(SSL_shutdown, 1);
+DEFINE_FUNC(SSL_set_bio, 3);
+
+// options are unsigned long in openssl 1.1.1, and uint64 in 3.x.x
+DEFINE_FUNC(SSL_CTX_set_options, 2);
+
+DEFINE_FUNC(SSL_get_error, 2);
+DEFINE_FUNC(SSL_CTX_load_verify_locations, 3, -1);
+
+DEFINE_FUNC(SSL_CTX_set_verify, 3);
+DEFINE_FUNC(SSL_CTX_use_PrivateKey, 2);
+
+DEFINE_FUNC(SSL_CTX_use_PrivateKey_file, 3);
+DEFINE_FUNC(SSL_CTX_use_certificate_chain_file, 2);
+
+DEFINE_FUNC(ERR_get_error, 0);
+
+static char ErrorString[] = "Ssl not found";
+DEFINE_FUNC(ERR_error_string, 2, ErrorString);
+
+// TLS functions
+
+DEFINE_FUNC(TLS_client_method, 0);
+DEFINE_FUNC(TLS_server_method, 0);
+
+// RAND functions
+
+DEFINE_FUNC(RAND_bytes, 2);
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-drm.cpp b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-drm.cpp
new file mode 100644
index 000000000..655a6b2b6
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-drm.cpp
@@ -0,0 +1,14 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtMultimedia/private/qsymbolsresolveutils_p.h>
+
+#include <va/va_drm.h>
+
+CHECK_VERSIONS("va-drm", VA_DRM_NEEDED_SOVERSION, VA_MAJOR_VERSION + 1);
+
+BEGIN_INIT_FUNCS("va-drm", VA_DRM_NEEDED_SOVERSION)
+INIT_FUNC(vaGetDisplayDRM)
+END_INIT_FUNCS()
+
+DEFINE_FUNC(vaGetDisplayDRM, 1);
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-x11.cpp b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-x11.cpp
new file mode 100644
index 000000000..3bada9e69
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va-x11.cpp
@@ -0,0 +1,14 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtMultimedia/private/qsymbolsresolveutils_p.h>
+
+#include <va/va_x11.h>
+
+CHECK_VERSIONS("va-x11", VA_X11_NEEDED_SOVERSION, VA_MAJOR_VERSION + 1);
+
+BEGIN_INIT_FUNCS("va-x11", VA_X11_NEEDED_SOVERSION)
+INIT_FUNC(vaGetDisplay)
+END_INIT_FUNCS()
+
+DEFINE_FUNC(vaGetDisplay, 1);
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va.cpp b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va.cpp
new file mode 100644
index 000000000..cfd2e5686
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/qffmpegsymbols-va.cpp
@@ -0,0 +1,150 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtMultimedia/private/qsymbolsresolveutils_p.h>
+
+#include <va/va.h>
+#include <va/va_str.h>
+
+// VAAPI generated the actual *.so name due to the rule:
+// https://github.com/intel/libva/blob/master/configure.ac
+//
+// The library name is generated libva.<x>.<y>.0 where
+// <x> = VA-API major version + 1
+// <y> = 100 * VA-API minor version + VA-API micro version
+CHECK_VERSIONS("va", VA_NEEDED_SOVERSION, VA_MAJOR_VERSION + 1);
+
+#ifdef Q_FFMPEG_PLUGIN_STUBS_ONLY
+constexpr const char *loggingName = "va(in plugin)";
+#else
+constexpr const char *loggingName = nullptr;
+#endif
+
+BEGIN_INIT_FUNCS("va", VA_NEEDED_SOVERSION, loggingName)
+
+
+INIT_FUNC(vaExportSurfaceHandle);
+INIT_FUNC(vaSyncSurface);
+INIT_FUNC(vaQueryVendorString);
+
+#ifndef Q_FFMPEG_PLUGIN_STUBS_ONLY
+
+INIT_FUNC(vaInitialize);
+INIT_FUNC(vaTerminate);
+INIT_FUNC(vaErrorStr);
+INIT_FUNC(vaSetErrorCallback);
+INIT_FUNC(vaSetInfoCallback);
+
+INIT_FUNC(vaCreateImage);
+INIT_FUNC(vaGetImage);
+INIT_FUNC(vaPutImage);
+INIT_FUNC(vaDeriveImage);
+INIT_FUNC(vaDestroyImage);
+INIT_FUNC(vaQueryImageFormats);
+
+INIT_FUNC(vaBeginPicture);
+INIT_FUNC(vaRenderPicture);
+INIT_FUNC(vaEndPicture);
+
+INIT_FUNC(vaCreateBuffer);
+INIT_FUNC(vaMapBuffer);
+INIT_FUNC(vaUnmapBuffer);
+#if VA_CHECK_VERSION(1, 9, 0)
+INIT_FUNC(vaSyncBuffer);
+#endif
+INIT_FUNC(vaDestroyBuffer);
+
+INIT_FUNC(vaCreateSurfaces);
+INIT_FUNC(vaDestroySurfaces);
+
+INIT_FUNC(vaCreateConfig);
+INIT_FUNC(vaGetConfigAttributes);
+INIT_FUNC(vaMaxNumProfiles);
+INIT_FUNC(vaMaxNumImageFormats);
+INIT_FUNC(vaMaxNumEntrypoints);
+INIT_FUNC(vaQueryConfigProfiles);
+INIT_FUNC(vaQueryConfigEntrypoints);
+INIT_FUNC(vaQuerySurfaceAttributes);
+INIT_FUNC(vaDestroyConfig);
+
+INIT_FUNC(vaCreateContext);
+INIT_FUNC(vaDestroyContext);
+
+INIT_FUNC(vaProfileStr);
+INIT_FUNC(vaEntrypointStr);
+
+INIT_FUNC(vaGetDisplayAttributes);
+
+INIT_FUNC(vaSetDriverName);
+
+INIT_FUNC(vaAcquireBufferHandle);
+INIT_FUNC(vaReleaseBufferHandle);
+
+#endif
+
+END_INIT_FUNCS()
+
+constexpr auto emptyString = "";
+
+DEFINE_FUNC(vaExportSurfaceHandle, 5, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaSyncSurface, 2, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaQueryVendorString, 1, emptyString);
+
+#ifndef Q_FFMPEG_PLUGIN_STUBS_ONLY
+
+DEFINE_FUNC(vaInitialize, 3, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaTerminate, 1, VA_STATUS_ERROR_OPERATION_FAILED);
+
+constexpr auto errorStr = "VAAPI is not available";
+DEFINE_FUNC(vaErrorStr, 1, errorStr);
+DEFINE_FUNC(vaSetErrorCallback, 3);
+DEFINE_FUNC(vaSetInfoCallback, 3);
+
+DEFINE_FUNC(vaCreateImage, 5, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaGetImage, 7, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaPutImage, 11, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaDeriveImage, 3, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaDestroyImage, 2, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaQueryImageFormats, 3, VA_STATUS_ERROR_OPERATION_FAILED);
+
+DEFINE_FUNC(vaBeginPicture, 3, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaRenderPicture, 4, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaEndPicture, 2, VA_STATUS_ERROR_OPERATION_FAILED);
+
+DEFINE_FUNC(vaCreateBuffer, 7, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaMapBuffer, 3, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaUnmapBuffer, 2, VA_STATUS_ERROR_OPERATION_FAILED);
+#if VA_CHECK_VERSION(1, 9, 0)
+DEFINE_FUNC(vaSyncBuffer, 3, VA_STATUS_ERROR_OPERATION_FAILED);
+#endif
+DEFINE_FUNC(vaDestroyBuffer, 2, VA_STATUS_ERROR_OPERATION_FAILED);
+
+DEFINE_FUNC(vaCreateSurfaces, 8, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaDestroySurfaces, 3, VA_STATUS_ERROR_OPERATION_FAILED);
+
+DEFINE_FUNC(vaCreateConfig, 6, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaGetConfigAttributes, 5, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaMaxNumProfiles, 1);
+DEFINE_FUNC(vaMaxNumImageFormats, 1);
+DEFINE_FUNC(vaMaxNumEntrypoints, 1);
+DEFINE_FUNC(vaQueryConfigProfiles, 3, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaQueryConfigEntrypoints, 4, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaQuerySurfaceAttributes, 4, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaDestroyConfig, 2, VA_STATUS_ERROR_OPERATION_FAILED);
+
+DEFINE_FUNC(vaCreateContext, 8);
+DEFINE_FUNC(vaDestroyContext, 2);
+
+
+DEFINE_FUNC(vaProfileStr, 1, emptyString);
+DEFINE_FUNC(vaEntrypointStr, 1, emptyString);
+
+DEFINE_FUNC(vaGetDisplayAttributes, 3, VA_STATUS_ERROR_OPERATION_FAILED);
+
+DEFINE_FUNC(vaSetDriverName, 2, VA_STATUS_ERROR_OPERATION_FAILED);
+
+DEFINE_FUNC(vaAcquireBufferHandle, 3, VA_STATUS_ERROR_OPERATION_FAILED);
+DEFINE_FUNC(vaReleaseBufferHandle, 2, VA_STATUS_ERROR_OPERATION_FAILED);
+
+#endif
+
diff --git a/src/plugins/multimedia/ffmpeg/symbolstubs/va.ver b/src/plugins/multimedia/ffmpeg/symbolstubs/va.ver
new file mode 100644
index 000000000..80c9a6dc0
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/symbolstubs/va.ver
@@ -0,0 +1,7 @@
+# Copyright (C) 2024 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+VA_API_0.33.0 {
+ global:
+ vaCreateSurfaces;
+};
diff --git a/src/plugins/multimedia/gstreamer/CMakeLists.txt b/src/plugins/multimedia/gstreamer/CMakeLists.txt
new file mode 100644
index 000000000..1ef1f9a36
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/CMakeLists.txt
@@ -0,0 +1,73 @@
+# Copyright (C) 2022 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+qt_find_package(EGL)
+
+qt_internal_add_module(QGstreamerMediaPluginPrivate
+ STATIC
+ INTERNAL_MODULE
+ SOURCES
+ audio/qgstreameraudiodevice.cpp audio/qgstreameraudiodevice_p.h
+ audio/qgstreameraudiodecoder.cpp audio/qgstreameraudiodecoder_p.h
+ common/qglist_helper_p.h
+ common/qgst.cpp common/qgst_p.h
+ common/qgst_debug.cpp common/qgst_debug_p.h
+ common/qgst_handle_types_p.h
+ common/qgstappsource.cpp common/qgstappsource_p.h
+ common/qgstreameraudioinput.cpp common/qgstreameraudioinput_p.h
+ common/qgstreameraudiooutput.cpp common/qgstreameraudiooutput_p.h
+ common/qgstreamerbufferprobe.cpp common/qgstreamerbufferprobe_p.h
+ common/qgstreamermetadata.cpp common/qgstreamermetadata_p.h
+ common/qgstreamermessage_p.h
+ common/qgstreamermediaplayer.cpp common/qgstreamermediaplayer_p.h
+ common/qgstreamervideooutput.cpp common/qgstreamervideooutput_p.h
+ common/qgstreamervideooverlay.cpp common/qgstreamervideooverlay_p.h
+ common/qgstreamervideosink.cpp common/qgstreamervideosink_p.h
+ common/qgstpipeline.cpp common/qgstpipeline_p.h
+ common/qgstutils.cpp common/qgstutils_p.h
+ common/qgstvideobuffer.cpp common/qgstvideobuffer_p.h
+ common/qgstvideorenderersink.cpp common/qgstvideorenderersink_p.h
+ common/qgstsubtitlesink.cpp common/qgstsubtitlesink_p.h
+ qgstreamerintegration.cpp qgstreamerintegration_p.h
+ qgstreamerformatinfo.cpp qgstreamerformatinfo_p.h
+ qgstreamervideodevices.cpp qgstreamervideodevices_p.h
+ mediacapture/qgstreamercamera.cpp mediacapture/qgstreamercamera_p.h
+ mediacapture/qgstreamerimagecapture.cpp mediacapture/qgstreamerimagecapture_p.h
+ mediacapture/qgstreamermediacapture.cpp mediacapture/qgstreamermediacapture_p.h
+ mediacapture/qgstreamermediaencoder.cpp mediacapture/qgstreamermediaencoder_p.h
+ NO_UNITY_BUILD_SOURCES
+ # Conflicts with macros defined in X11.h, and Xlib.h
+ common/qgstvideobuffer.cpp
+ common/qgstreamervideosink.cpp
+ NO_GENERATE_CPP_EXPORTS
+ DEFINES
+ GLIB_VERSION_MIN_REQUIRED=GLIB_VERSION_2_26
+ PUBLIC_LIBRARIES
+ Qt::MultimediaPrivate
+ Qt::CorePrivate
+ GStreamer::GStreamer
+ GStreamer::App
+)
+
+qt_internal_extend_target(QGstreamerMediaPluginPrivate CONDITION QT_FEATURE_gstreamer_photography
+ PUBLIC_LIBRARIES
+ GStreamer::Photography
+)
+
+qt_internal_extend_target(QGstreamerMediaPluginPrivate CONDITION QT_FEATURE_gstreamer_gl
+ PUBLIC_LIBRARIES
+ GStreamer::Gl
+ LIBRARIES
+ EGL::EGL
+)
+
+qt_internal_add_plugin(QGstreamerMediaPlugin
+ OUTPUT_NAME gstreamermediaplugin
+ PLUGIN_TYPE multimedia
+ SOURCES
+ qgstreamerplugin.cpp
+ gstreamer.json
+ LIBRARIES
+ Qt::QGstreamerMediaPluginPrivate
+ Qt::MultimediaPrivate
+)
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder.cpp b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder.cpp
new file mode 100644
index 000000000..ba1582877
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder.cpp
@@ -0,0 +1,535 @@
+// Copyright (C) 2020 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+//#define DEBUG_DECODER
+
+#include <audio/qgstreameraudiodecoder_p.h>
+
+#include <common/qgstreamermessage_p.h>
+#include <common/qgst_debug_p.h>
+#include <common/qgstutils_p.h>
+
+#include <gst/gstvalue.h>
+#include <gst/base/gstbasesrc.h>
+
+#include <QtCore/qdatetime.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qsize.h>
+#include <QtCore/qtimer.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qdir.h>
+#include <QtCore/qstandardpaths.h>
+#include <QtCore/qurl.h>
+#include <QtCore/qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcGstreamerAudioDecoder, "qt.multimedia.gstreameraudiodecoder");
+
+typedef enum {
+ GST_PLAY_FLAG_VIDEO = 0x00000001,
+ GST_PLAY_FLAG_AUDIO = 0x00000002,
+ GST_PLAY_FLAG_TEXT = 0x00000004,
+ GST_PLAY_FLAG_VIS = 0x00000008,
+ GST_PLAY_FLAG_SOFT_VOLUME = 0x00000010,
+ GST_PLAY_FLAG_NATIVE_AUDIO = 0x00000020,
+ GST_PLAY_FLAG_NATIVE_VIDEO = 0x00000040,
+ GST_PLAY_FLAG_DOWNLOAD = 0x00000080,
+ GST_PLAY_FLAG_BUFFERING = 0x000000100
+} GstPlayFlags;
+
+
+QMaybe<QPlatformAudioDecoder *> QGstreamerAudioDecoder::create(QAudioDecoder *parent)
+{
+ static const auto error = qGstErrorMessageIfElementsNotAvailable("audioconvert", "playbin");
+ if (error)
+ return *error;
+
+ return new QGstreamerAudioDecoder(parent);
+}
+
+QGstreamerAudioDecoder::QGstreamerAudioDecoder(QAudioDecoder *parent)
+ : QPlatformAudioDecoder(parent),
+ m_playbin{
+ QGstPipeline::adopt(GST_PIPELINE_CAST(
+ QGstElement::createFromFactory("playbin", "playbin").element())),
+ },
+ m_audioConvert{
+ QGstElement::createFromFactory("audioconvert", "audioconvert"),
+ }
+{
+ // Sort out messages
+ m_playbin.installMessageFilter(this);
+
+ // Set the rest of the pipeline up
+ setAudioFlags(true);
+
+ m_outputBin = QGstBin::create("audio-output-bin");
+ m_outputBin.add(m_audioConvert);
+
+ // add ghostpad
+ m_outputBin.addGhostPad(m_audioConvert, "sink");
+
+ g_object_set(m_playbin.object(), "audio-sink", m_outputBin.element(), NULL);
+
+ m_deepNotifySourceConnection = m_playbin.connect(
+ "deep-notify::source", (GCallback)&configureAppSrcElement, (gpointer)this);
+
+ // Set volume to 100%
+ gdouble volume = 1.0;
+ m_playbin.set("volume", volume);
+}
+
+QGstreamerAudioDecoder::~QGstreamerAudioDecoder()
+{
+ stop();
+
+ m_playbin.removeMessageFilter(this);
+
+#if QT_CONFIG(gstreamer_app)
+ delete m_appSrc;
+#endif
+}
+
+#if QT_CONFIG(gstreamer_app)
+void QGstreamerAudioDecoder::configureAppSrcElement([[maybe_unused]] GObject *object, GObject *orig,
+ [[maybe_unused]] GParamSpec *pspec,
+ QGstreamerAudioDecoder *self)
+{
+ // In case we switch from appsrc to not
+ if (!self->m_appSrc)
+ return;
+
+ QGstElementHandle appsrc;
+ g_object_get(orig, "source", &appsrc, NULL);
+
+ auto *qAppSrc = self->m_appSrc;
+ qAppSrc->setExternalAppSrc(QGstAppSrc{
+ qGstSafeCast<GstAppSrc>(appsrc.get()),
+ QGstAppSrc::NeedsRef, // CHECK: can we `release()`?
+ });
+ qAppSrc->setup(self->mDevice);
+}
+#endif
+
+bool QGstreamerAudioDecoder::processBusMessage(const QGstreamerMessage &message)
+{
+ qCDebug(qLcGstreamerAudioDecoder) << "received bus message:" << message;
+
+ GstMessage *gm = message.message();
+
+ switch (message.type()) {
+ case GST_MESSAGE_DURATION: {
+ updateDuration();
+ return false;
+ }
+
+ case GST_MESSAGE_ERROR: {
+ qCDebug(qLcGstreamerAudioDecoder) << " error" << QCompactGstMessageAdaptor(message);
+
+ QUniqueGErrorHandle err;
+ QGString debug;
+ gst_message_parse_error(gm, &err, &debug);
+
+ if (message.source() == m_playbin) {
+ if (err.get()->domain == GST_STREAM_ERROR
+ && err.get()->code == GST_STREAM_ERROR_CODEC_NOT_FOUND)
+ processInvalidMedia(QAudioDecoder::FormatError,
+ tr("Cannot play stream of type: <unknown>"));
+ else
+ processInvalidMedia(QAudioDecoder::ResourceError,
+ QString::fromUtf8(err.get()->message));
+ } else {
+ QAudioDecoder::Error qerror = QAudioDecoder::ResourceError;
+ if (err.get()->domain == GST_STREAM_ERROR) {
+ switch (err.get()->code) {
+ case GST_STREAM_ERROR_DECRYPT:
+ case GST_STREAM_ERROR_DECRYPT_NOKEY:
+ qerror = QAudioDecoder::AccessDeniedError;
+ break;
+ case GST_STREAM_ERROR_FORMAT:
+ case GST_STREAM_ERROR_DEMUX:
+ case GST_STREAM_ERROR_DECODE:
+ case GST_STREAM_ERROR_WRONG_TYPE:
+ case GST_STREAM_ERROR_TYPE_NOT_FOUND:
+ case GST_STREAM_ERROR_CODEC_NOT_FOUND:
+ qerror = QAudioDecoder::FormatError;
+ break;
+ default:
+ break;
+ }
+ } else if (err.get()->domain == GST_CORE_ERROR) {
+ switch (err.get()->code) {
+ case GST_CORE_ERROR_MISSING_PLUGIN:
+ qerror = QAudioDecoder::FormatError;
+ break;
+ default:
+ break;
+ }
+ }
+
+ processInvalidMedia(qerror, QString::fromUtf8(err.get()->message));
+ }
+ break;
+ }
+
+ default:
+ if (message.source() == m_playbin)
+ return handlePlaybinMessage(message);
+ }
+
+ return false;
+}
+
+bool QGstreamerAudioDecoder::handlePlaybinMessage(const QGstreamerMessage &message)
+{
+ GstMessage *gm = message.message();
+
+ switch (GST_MESSAGE_TYPE(gm)) {
+ case GST_MESSAGE_STATE_CHANGED: {
+ GstState oldState;
+ GstState newState;
+ GstState pending;
+
+ gst_message_parse_state_changed(gm, &oldState, &newState, &pending);
+
+ bool isDecoding = false;
+ switch (newState) {
+ case GST_STATE_VOID_PENDING:
+ case GST_STATE_NULL:
+ case GST_STATE_READY:
+ break;
+ case GST_STATE_PLAYING:
+ isDecoding = true;
+ break;
+ case GST_STATE_PAUSED:
+ isDecoding = true;
+
+ // gstreamer doesn't give a reliable indication the duration
+ // information is ready, GST_MESSAGE_DURATION is not sent by most elements
+ // the duration is queried up to 5 times with increasing delay
+ m_durationQueries = 5;
+ updateDuration();
+ break;
+ }
+
+ setIsDecoding(isDecoding);
+ break;
+ };
+
+ case GST_MESSAGE_EOS:
+ m_playbin.setState(GST_STATE_NULL);
+ finished();
+ break;
+
+ case GST_MESSAGE_ERROR:
+ Q_UNREACHABLE_RETURN(false); // handled in processBusMessage
+
+ case GST_MESSAGE_WARNING:
+ qCWarning(qLcGstreamerAudioDecoder) << "Warning:" << QCompactGstMessageAdaptor(message);
+ break;
+
+ case GST_MESSAGE_INFO: {
+ if (qLcGstreamerAudioDecoder().isDebugEnabled())
+ qCWarning(qLcGstreamerAudioDecoder) << "Info:" << QCompactGstMessageAdaptor(message);
+ break;
+ }
+ default:
+ break;
+ }
+
+ return false;
+}
+
+QUrl QGstreamerAudioDecoder::source() const
+{
+ return mSource;
+}
+
+void QGstreamerAudioDecoder::setSource(const QUrl &fileName)
+{
+ stop();
+ mDevice = nullptr;
+ delete m_appSrc;
+ m_appSrc = nullptr;
+
+ bool isSignalRequired = (mSource != fileName);
+ mSource = fileName;
+ if (isSignalRequired)
+ sourceChanged();
+}
+
+QIODevice *QGstreamerAudioDecoder::sourceDevice() const
+{
+ return mDevice;
+}
+
+void QGstreamerAudioDecoder::setSourceDevice(QIODevice *device)
+{
+ stop();
+ mSource.clear();
+ bool isSignalRequired = (mDevice != device);
+ mDevice = device;
+ if (isSignalRequired)
+ sourceChanged();
+}
+
+void QGstreamerAudioDecoder::start()
+{
+ addAppSink();
+
+ if (!mSource.isEmpty()) {
+ m_playbin.set("uri", mSource.toEncoded().constData());
+ } else if (mDevice) {
+ // make sure we can read from device
+ if (!mDevice->isOpen() || !mDevice->isReadable()) {
+ processInvalidMedia(QAudioDecoder::ResourceError, QLatin1String("Unable to read from specified device"));
+ return;
+ }
+
+ if (!m_appSrc) {
+ auto maybeAppSrc = QGstAppSource::create(this);
+ if (maybeAppSrc) {
+ m_appSrc = maybeAppSrc.value();
+ } else {
+ processInvalidMedia(QAudioDecoder::ResourceError, maybeAppSrc.error());
+ return;
+ }
+ }
+
+ m_playbin.set("uri", "appsrc://");
+ } else {
+ return;
+ }
+
+ // Set audio format
+ if (m_appSink) {
+ if (mFormat.isValid()) {
+ setAudioFlags(false);
+ auto caps = QGstUtils::capsForAudioFormat(mFormat);
+ m_appSink.setCaps(caps);
+ } else {
+ // We want whatever the native audio format is
+ setAudioFlags(true);
+ m_appSink.setCaps({});
+ }
+ }
+
+ if (m_playbin.setState(GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
+ qWarning() << "GStreamer; Unable to start decoding process";
+ m_playbin.dumpGraph("failed");
+ return;
+ }
+}
+
+void QGstreamerAudioDecoder::stop()
+{
+ m_playbin.setState(GST_STATE_NULL);
+ m_currentSessionId += 1;
+ removeAppSink();
+
+ // GStreamer thread is stopped. Can safely access m_buffersAvailable
+ if (m_buffersAvailable != 0) {
+ m_buffersAvailable = 0;
+ bufferAvailableChanged(false);
+ }
+
+ if (m_position != invalidPosition) {
+ m_position = invalidPosition;
+ positionChanged(m_position.count());
+ }
+
+ if (m_duration != invalidDuration) {
+ m_duration = invalidDuration;
+ durationChanged(m_duration.count());
+ }
+
+ setIsDecoding(false);
+}
+
+QAudioFormat QGstreamerAudioDecoder::audioFormat() const
+{
+ return mFormat;
+}
+
+void QGstreamerAudioDecoder::setAudioFormat(const QAudioFormat &format)
+{
+ if (mFormat != format) {
+ mFormat = format;
+ formatChanged(mFormat);
+ }
+}
+
+QAudioBuffer QGstreamerAudioDecoder::read()
+{
+ using namespace std::chrono;
+
+ QAudioBuffer audioBuffer;
+
+ if (m_buffersAvailable == 0)
+ return audioBuffer;
+
+ m_buffersAvailable -= 1;
+
+ if (m_buffersAvailable == 0)
+ bufferAvailableChanged(false);
+
+ QGstSampleHandle sample = m_appSink.pullSample();
+ GstBuffer *buffer = gst_sample_get_buffer(sample.get());
+ GstMapInfo mapInfo;
+ gst_buffer_map(buffer, &mapInfo, GST_MAP_READ);
+ const char *bufferData = (const char *)mapInfo.data;
+ int bufferSize = mapInfo.size;
+ QAudioFormat format = QGstUtils::audioFormatForSample(sample.get());
+
+ if (format.isValid()) {
+ // XXX At the moment we have to copy data from GstBuffer into QAudioBuffer.
+ // We could improve performance by implementing QAbstractAudioBuffer for GstBuffer.
+ nanoseconds position = getPositionFromBuffer(buffer);
+ audioBuffer = QAudioBuffer{
+ QByteArray(bufferData, bufferSize),
+ format,
+ round<microseconds>(position).count(),
+ };
+ milliseconds positionInMs = round<milliseconds>(position);
+ if (position != m_position) {
+ m_position = positionInMs;
+ positionChanged(m_position.count());
+ }
+ }
+ gst_buffer_unmap(buffer, &mapInfo);
+
+ return audioBuffer;
+}
+
+qint64 QGstreamerAudioDecoder::position() const
+{
+ return m_position.count();
+}
+
+qint64 QGstreamerAudioDecoder::duration() const
+{
+ return m_duration.count();
+}
+
+void QGstreamerAudioDecoder::processInvalidMedia(QAudioDecoder::Error errorCode, const QString& errorString)
+{
+ stop();
+ error(int(errorCode), errorString);
+}
+
+GstFlowReturn QGstreamerAudioDecoder::newSample(GstAppSink *)
+{
+ // "Note that the preroll buffer will also be returned as the first buffer when calling
+ // gst_app_sink_pull_buffer()."
+
+ QMetaObject::invokeMethod(this, [this, sessionId = m_currentSessionId] {
+ if (sessionId != m_currentSessionId)
+ return; // stop()ed before message is executed
+
+ m_buffersAvailable += 1;
+ bufferAvailableChanged(true);
+ bufferReady();
+ });
+
+ return GST_FLOW_OK;
+}
+
+GstFlowReturn QGstreamerAudioDecoder::new_sample(GstAppSink *sink, gpointer user_data)
+{
+ QGstreamerAudioDecoder *decoder = reinterpret_cast<QGstreamerAudioDecoder *>(user_data);
+ qCDebug(qLcGstreamerAudioDecoder) << "QGstreamerAudioDecoder::new_sample";
+ return decoder->newSample(sink);
+}
+
+void QGstreamerAudioDecoder::setAudioFlags(bool wantNativeAudio)
+{
+ int flags = m_playbin.getInt("flags");
+ // make sure not to use GST_PLAY_FLAG_NATIVE_AUDIO unless desired
+ // it prevents audio format conversion
+ flags &= ~(GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_TEXT | GST_PLAY_FLAG_VIS | GST_PLAY_FLAG_NATIVE_AUDIO);
+ flags |= GST_PLAY_FLAG_AUDIO;
+ if (wantNativeAudio)
+ flags |= GST_PLAY_FLAG_NATIVE_AUDIO;
+ m_playbin.set("flags", flags);
+}
+
+void QGstreamerAudioDecoder::addAppSink()
+{
+ using namespace std::chrono_literals;
+
+ if (m_appSink)
+ return;
+
+ qCDebug(qLcGstreamerAudioDecoder) << "QGstreamerAudioDecoder::addAppSink";
+ m_appSink = QGstAppSink::create("decoderAppSink");
+ GstAppSinkCallbacks callbacks{};
+ callbacks.new_sample = new_sample;
+ m_appSink.setCallbacks(callbacks, this, nullptr);
+
+#if GST_CHECK_VERSION(1, 24, 0)
+ static constexpr auto maxBufferTime = 500ms;
+ m_appSink.setMaxBufferTime(maxBufferTime);
+#else
+ static constexpr int maxBuffers = 16;
+ m_appSink.setMaxBuffers(maxBuffers);
+#endif
+
+ static constexpr bool sync = false;
+ m_appSink.setSync(sync);
+
+ QGstPipeline::modifyPipelineWhileNotRunning(m_playbin.getPipeline(), [&] {
+ m_outputBin.add(m_appSink);
+ qLinkGstElements(m_audioConvert, m_appSink);
+ });
+}
+
+void QGstreamerAudioDecoder::removeAppSink()
+{
+ if (!m_appSink)
+ return;
+
+ qCDebug(qLcGstreamerAudioDecoder) << "QGstreamerAudioDecoder::removeAppSink";
+
+ QGstPipeline::modifyPipelineWhileNotRunning(m_playbin.getPipeline(), [&] {
+ qUnlinkGstElements(m_audioConvert, m_appSink);
+ m_outputBin.stopAndRemoveElements(m_appSink);
+ });
+ m_appSink = {};
+}
+
+void QGstreamerAudioDecoder::updateDuration()
+{
+ std::optional<std::chrono::milliseconds> duration = m_playbin.durationInMs();
+ if (!duration)
+ duration = invalidDuration;
+
+ if (m_duration != duration) {
+ m_duration = *duration;
+ durationChanged(m_duration.count());
+ }
+
+ if (m_duration.count() > 0)
+ m_durationQueries = 0;
+
+ if (m_durationQueries > 0) {
+ //increase delay between duration requests
+ int delay = 25 << (5 - m_durationQueries);
+ QTimer::singleShot(delay, this, &QGstreamerAudioDecoder::updateDuration);
+ m_durationQueries--;
+ }
+}
+
+std::chrono::nanoseconds QGstreamerAudioDecoder::getPositionFromBuffer(GstBuffer *buffer)
+{
+ using namespace std::chrono;
+ using namespace std::chrono_literals;
+ nanoseconds position{ GST_BUFFER_TIMESTAMP(buffer) };
+ if (position >= 0ns)
+ return position;
+ else
+ return invalidPosition;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qgstreameraudiodecoder_p.cpp"
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder_p.h b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder_p.h
new file mode 100644
index 000000000..a5e192a38
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder_p.h
@@ -0,0 +1,116 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERAUDIODECODERCONTROL_H
+#define QGSTREAMERAUDIODECODERCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/private/qmultimediautils_p.h>
+#include <QtMultimedia/private/qplatformaudiodecoder_p.h>
+#include <QtMultimedia/private/qtmultimediaglobal_p.h>
+#include <QtMultimedia/qaudiodecoder.h>
+#include <QtCore/qobject.h>
+#include <QtCore/qmutex.h>
+#include <QtCore/qurl.h>
+
+#include <common/qgstpipeline_p.h>
+#include <common/qgst_p.h>
+
+#if QT_CONFIG(gstreamer_app)
+# include <common/qgstappsource_p.h>
+#endif
+
+#include <gst/app/gstappsink.h>
+
+QT_BEGIN_NAMESPACE
+
+class QGstreamerMessage;
+
+class QGstreamerAudioDecoder final : public QPlatformAudioDecoder, public QGstreamerBusMessageFilter
+{
+ Q_OBJECT
+
+public:
+ static QMaybe<QPlatformAudioDecoder *> create(QAudioDecoder *parent);
+ virtual ~QGstreamerAudioDecoder();
+
+ QUrl source() const override;
+ void setSource(const QUrl &fileName) override;
+
+ QIODevice *sourceDevice() const override;
+ void setSourceDevice(QIODevice *device) override;
+
+ void start() override;
+ void stop() override;
+
+ QAudioFormat audioFormat() const override;
+ void setAudioFormat(const QAudioFormat &format) override;
+
+ QAudioBuffer read() override;
+
+ qint64 position() const override;
+ qint64 duration() const override;
+
+ // GStreamerBusMessageFilter interface
+ bool processBusMessage(const QGstreamerMessage &message) override;
+
+private slots:
+ void updateDuration();
+
+private:
+ explicit QGstreamerAudioDecoder(QAudioDecoder *parent);
+
+#if QT_CONFIG(gstreamer_app)
+ static GstFlowReturn new_sample(GstAppSink *sink, gpointer user_data);
+ GstFlowReturn newSample(GstAppSink *sink);
+
+ static void configureAppSrcElement(GObject *, GObject *, GParamSpec *,
+ QGstreamerAudioDecoder *_this);
+#endif
+
+ void setAudioFlags(bool wantNativeAudio);
+ void addAppSink();
+ void removeAppSink();
+
+ bool handlePlaybinMessage(const QGstreamerMessage &);
+
+ void processInvalidMedia(QAudioDecoder::Error errorCode, const QString &errorString);
+ static std::chrono::nanoseconds getPositionFromBuffer(GstBuffer *buffer);
+
+ QGstPipeline m_playbin;
+ QGstBin m_outputBin;
+ QGstElement m_audioConvert;
+ QGstAppSink m_appSink;
+ QGstAppSource *m_appSrc = nullptr;
+
+ QUrl mSource;
+ QIODevice *mDevice = nullptr;
+ QAudioFormat mFormat;
+
+ int m_buffersAvailable = 0;
+
+ static constexpr auto invalidDuration = std::chrono::milliseconds{ -1 };
+ static constexpr auto invalidPosition = std::chrono::milliseconds{ -1 };
+ std::chrono::milliseconds m_position{ invalidPosition };
+ std::chrono::milliseconds m_duration{ invalidDuration };
+
+ int m_durationQueries = 0;
+
+ qint32 m_currentSessionId{};
+
+ QGObjectHandlerScopedConnection m_deepNotifySourceConnection;
+};
+
+QT_END_NAMESPACE
+
+#endif // QGSTREAMERPLAYERSESSION_H
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice.cpp b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice.cpp
new file mode 100644
index 000000000..dc6975030
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice.cpp
@@ -0,0 +1,77 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qgstreameraudiodevice_p.h"
+
+#include <common/qgst_p.h>
+#include <common/qgstutils_p.h>
+#include <private/qplatformmediaintegration_p.h>
+
+QT_BEGIN_NAMESPACE
+
+QGStreamerAudioDeviceInfo::QGStreamerAudioDeviceInfo(GstDevice *d, const QByteArray &device,
+ QAudioDevice::Mode mode)
+ : QAudioDevicePrivate(device, mode),
+ gstDevice{
+ d,
+ QGstDeviceHandle::NeedsRef,
+ }
+{
+ QGString name{
+ gst_device_get_display_name(gstDevice.get()),
+ };
+ description = name.toQString();
+
+ auto caps = QGstCaps(gst_device_get_caps(gstDevice.get()), QGstCaps::HasRef);
+ int size = caps.size();
+ for (int i = 0; i < size; ++i) {
+ auto c = caps.at(i);
+ if (c.name() == "audio/x-raw") {
+ auto rate = c["rate"].toIntRange();
+ if (rate) {
+ minimumSampleRate = rate->min;
+ maximumSampleRate = rate->max;
+ }
+ auto channels = c["channels"].toIntRange();
+ if (channels) {
+ minimumChannelCount = channels->min;
+ maximumChannelCount = channels->max;
+ }
+ supportedSampleFormats = c["format"].getSampleFormats();
+ }
+ }
+
+ preferredFormat.setChannelCount(qBound(minimumChannelCount, 2, maximumChannelCount));
+ preferredFormat.setSampleRate(qBound(minimumSampleRate, 48000, maximumSampleRate));
+ QAudioFormat::SampleFormat f = QAudioFormat::Int16;
+ if (!supportedSampleFormats.contains(f))
+ f = supportedSampleFormats.value(0, QAudioFormat::Unknown);
+ preferredFormat.setSampleFormat(f);
+}
+
+QGStreamerCustomAudioDeviceInfo::QGStreamerCustomAudioDeviceInfo(
+ const QByteArray &gstreamerPipeline, QAudioDevice::Mode mode)
+ : QAudioDevicePrivate{
+ gstreamerPipeline,
+ mode,
+ }
+{
+}
+
+QAudioDevice qMakeCustomGStreamerAudioInput(const QByteArray &gstreamerPipeline)
+{
+ auto deviceInfo = std::make_unique<QGStreamerCustomAudioDeviceInfo>(gstreamerPipeline,
+ QAudioDevice::Mode::Input);
+
+ return deviceInfo.release()->create();
+}
+
+QAudioDevice qMakeCustomGStreamerAudioOutput(const QByteArray &gstreamerPipeline)
+{
+ auto deviceInfo = std::make_unique<QGStreamerCustomAudioDeviceInfo>(gstreamerPipeline,
+ QAudioDevice::Mode::Output);
+
+ return deviceInfo.release()->create();
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice_p.h b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice_p.h
new file mode 100644
index 000000000..34d25bceb
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice_p.h
@@ -0,0 +1,52 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERAUDIODEVICEINFO_H
+#define QGSTREAMERAUDIODEVICEINFO_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qbytearray.h>
+#include <QtCore/qstringlist.h>
+#include <QtCore/qlist.h>
+
+#include <QtMultimedia/qaudio.h>
+#include <QtMultimedia/qaudiodevice.h>
+#include <QtMultimedia/private/qaudiodevice_p.h>
+
+#include <QtQGstreamerMediaPlugin/private/qgst_handle_types_p.h>
+
+#include <gst/gst.h>
+
+QT_BEGIN_NAMESPACE
+
+class QGStreamerAudioDeviceInfo : public QAudioDevicePrivate
+{
+public:
+ QGStreamerAudioDeviceInfo(GstDevice *gstDevice, const QByteArray &device, QAudioDevice::Mode mode);
+
+ QGstDeviceHandle gstDevice;
+};
+
+class QGStreamerCustomAudioDeviceInfo : public QAudioDevicePrivate
+{
+public:
+ QGStreamerCustomAudioDeviceInfo(const QByteArray &gstreamerPipeline, QAudioDevice::Mode mode);
+};
+
+QAudioDevice qMakeCustomGStreamerAudioInput(const QByteArray &gstreamerPipeline);
+QAudioDevice qMakeCustomGStreamerAudioOutput(const QByteArray &gstreamerPipeline);
+
+QT_END_NAMESPACE
+
+#endif
+
diff --git a/src/plugins/multimedia/gstreamer/common/qglist_helper_p.h b/src/plugins/multimedia/gstreamer/common/qglist_helper_p.h
new file mode 100644
index 000000000..54108e1c3
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qglist_helper_p.h
@@ -0,0 +1,82 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGLIST_HELPER_P_H
+#define QGLIST_HELPER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qtconfigmacros.h>
+
+#include <glib.h>
+#include <iterator>
+
+QT_BEGIN_NAMESPACE
+
+namespace QGstUtils {
+
+template <typename ListType>
+struct GListIterator
+{
+ explicit GListIterator(const GList *element = nullptr) : element(element) { }
+
+ const ListType &operator*() const noexcept { return *operator->(); }
+ const ListType *operator->() const noexcept
+ {
+ return reinterpret_cast<const ListType *>(&element->data);
+ }
+
+ GListIterator &operator++() noexcept
+ {
+ if (element)
+ element = element->next;
+
+ return *this;
+ }
+ GListIterator operator++(int n) noexcept
+ {
+ for (int i = 0; i != n; ++i)
+ operator++();
+
+ return *this;
+ }
+
+ bool operator==(const GListIterator &r) const noexcept { return element == r.element; }
+ bool operator!=(const GListIterator &r) const noexcept { return element != r.element; }
+
+ using difference_type = std::ptrdiff_t;
+ using value_type = ListType;
+ using pointer = value_type *;
+ using reference = value_type &;
+ using iterator_category = std::input_iterator_tag;
+
+ const GList *element = nullptr;
+};
+
+template <typename ListType>
+struct GListRangeAdaptor
+{
+ static_assert(std::is_pointer_v<ListType>);
+
+ explicit GListRangeAdaptor(const GList *list) : head(list) { }
+
+ auto begin() { return GListIterator<ListType>(head); }
+ auto end() { return GListIterator<ListType>(nullptr); }
+
+ const GList *head;
+};
+
+} // namespace QGstUtils
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgst.cpp b/src/plugins/multimedia/gstreamer/common/qgst.cpp
new file mode 100644
index 000000000..6cf133d6c
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgst.cpp
@@ -0,0 +1,1392 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <common/qgst_p.h>
+#include <common/qgst_debug_p.h>
+#include <common/qgstpipeline_p.h>
+#include <common/qgstreamermessage_p.h>
+
+#include <QtCore/qdebug.h>
+#include <QtMultimedia/qcameradevice.h>
+
+#include <array>
+
+QT_BEGIN_NAMESPACE
+
+namespace {
+
+struct VideoFormat
+{
+ QVideoFrameFormat::PixelFormat pixelFormat;
+ GstVideoFormat gstFormat;
+};
+
+constexpr std::array<VideoFormat, 19> qt_videoFormatLookup{ {
+ { QVideoFrameFormat::Format_YUV420P, GST_VIDEO_FORMAT_I420 },
+ { QVideoFrameFormat::Format_YUV422P, GST_VIDEO_FORMAT_Y42B },
+ { QVideoFrameFormat::Format_YV12, GST_VIDEO_FORMAT_YV12 },
+ { QVideoFrameFormat::Format_UYVY, GST_VIDEO_FORMAT_UYVY },
+ { QVideoFrameFormat::Format_YUYV, GST_VIDEO_FORMAT_YUY2 },
+ { QVideoFrameFormat::Format_NV12, GST_VIDEO_FORMAT_NV12 },
+ { QVideoFrameFormat::Format_NV21, GST_VIDEO_FORMAT_NV21 },
+ { QVideoFrameFormat::Format_AYUV, GST_VIDEO_FORMAT_AYUV },
+ { QVideoFrameFormat::Format_Y8, GST_VIDEO_FORMAT_GRAY8 },
+ { QVideoFrameFormat::Format_XRGB8888, GST_VIDEO_FORMAT_xRGB },
+ { QVideoFrameFormat::Format_XBGR8888, GST_VIDEO_FORMAT_xBGR },
+ { QVideoFrameFormat::Format_RGBX8888, GST_VIDEO_FORMAT_RGBx },
+ { QVideoFrameFormat::Format_BGRX8888, GST_VIDEO_FORMAT_BGRx },
+ { QVideoFrameFormat::Format_ARGB8888, GST_VIDEO_FORMAT_ARGB },
+ { QVideoFrameFormat::Format_ABGR8888, GST_VIDEO_FORMAT_ABGR },
+ { QVideoFrameFormat::Format_RGBA8888, GST_VIDEO_FORMAT_RGBA },
+ { QVideoFrameFormat::Format_BGRA8888, GST_VIDEO_FORMAT_BGRA },
+#if Q_BYTE_ORDER == Q_LITTLE_ENDIAN
+ { QVideoFrameFormat::Format_Y16, GST_VIDEO_FORMAT_GRAY16_LE },
+ { QVideoFrameFormat::Format_P010, GST_VIDEO_FORMAT_P010_10LE },
+#else
+ { QVideoFrameFormat::Format_Y16, GST_VIDEO_FORMAT_GRAY16_BE },
+ { QVideoFrameFormat::Format_P010, GST_VIDEO_FORMAT_P010_10BE },
+#endif
+} };
+
+int indexOfVideoFormat(QVideoFrameFormat::PixelFormat format)
+{
+ for (size_t i = 0; i < qt_videoFormatLookup.size(); ++i)
+ if (qt_videoFormatLookup[i].pixelFormat == format)
+ return int(i);
+
+ return -1;
+}
+
+int indexOfVideoFormat(GstVideoFormat format)
+{
+ for (size_t i = 0; i < qt_videoFormatLookup.size(); ++i)
+ if (qt_videoFormatLookup[i].gstFormat == format)
+ return int(i);
+
+ return -1;
+}
+
+} // namespace
+
+// QGValue
+
+QGValue::QGValue(const GValue *v) : value(v) { }
+
+bool QGValue::isNull() const
+{
+ return !value;
+}
+
+std::optional<bool> QGValue::toBool() const
+{
+ if (!G_VALUE_HOLDS_BOOLEAN(value))
+ return std::nullopt;
+ return g_value_get_boolean(value);
+}
+
+std::optional<int> QGValue::toInt() const
+{
+ if (!G_VALUE_HOLDS_INT(value))
+ return std::nullopt;
+ return g_value_get_int(value);
+}
+
+std::optional<int> QGValue::toInt64() const
+{
+ if (!G_VALUE_HOLDS_INT64(value))
+ return std::nullopt;
+ return g_value_get_int64(value);
+}
+
+const char *QGValue::toString() const
+{
+ return value ? g_value_get_string(value) : nullptr;
+}
+
+std::optional<float> QGValue::getFraction() const
+{
+ if (!GST_VALUE_HOLDS_FRACTION(value))
+ return std::nullopt;
+ return (float)gst_value_get_fraction_numerator(value)
+ / (float)gst_value_get_fraction_denominator(value);
+}
+
+std::optional<QGRange<float>> QGValue::getFractionRange() const
+{
+ if (!GST_VALUE_HOLDS_FRACTION_RANGE(value))
+ return std::nullopt;
+ QGValue min = QGValue{ gst_value_get_fraction_range_min(value) };
+ QGValue max = QGValue{ gst_value_get_fraction_range_max(value) };
+ return QGRange<float>{ *min.getFraction(), *max.getFraction() };
+}
+
+std::optional<QGRange<int>> QGValue::toIntRange() const
+{
+ if (!GST_VALUE_HOLDS_INT_RANGE(value))
+ return std::nullopt;
+ return QGRange<int>{ gst_value_get_int_range_min(value), gst_value_get_int_range_max(value) };
+}
+
+QGstStructureView QGValue::toStructure() const
+{
+ if (!value || !GST_VALUE_HOLDS_STRUCTURE(value))
+ return QGstStructureView(nullptr);
+ return QGstStructureView(gst_value_get_structure(value));
+}
+
+QGstCaps QGValue::toCaps() const
+{
+ if (!value || !GST_VALUE_HOLDS_CAPS(value))
+ return {};
+ return QGstCaps(gst_caps_copy(gst_value_get_caps(value)), QGstCaps::HasRef);
+}
+
+bool QGValue::isList() const
+{
+ return value && GST_VALUE_HOLDS_LIST(value);
+}
+
+int QGValue::listSize() const
+{
+ return gst_value_list_get_size(value);
+}
+
+QGValue QGValue::at(int index) const
+{
+ return QGValue{ gst_value_list_get_value(value, index) };
+}
+
+// QGstStructureView
+
+QGstStructureView::QGstStructureView(const GstStructure *s) : structure(s) { }
+
+QGstStructureView::QGstStructureView(const QUniqueGstStructureHandle &handle)
+ : QGstStructureView{ handle.get() }
+{
+}
+
+QUniqueGstStructureHandle QGstStructureView::clone() const
+{
+ return QUniqueGstStructureHandle{ gst_structure_copy(structure) };
+}
+
+bool QGstStructureView::isNull() const
+{
+ return !structure;
+}
+
+QByteArrayView QGstStructureView::name() const
+{
+ return gst_structure_get_name(structure);
+}
+
+QGValue QGstStructureView::operator[](const char *fieldname) const
+{
+ return QGValue{ gst_structure_get_value(structure, fieldname) };
+}
+
+QGstCaps QGstStructureView::caps() const
+{
+ return operator[]("caps").toCaps();
+}
+
+QGstTagListHandle QGstStructureView::tags() const
+{
+ QGValue tags = operator[]("tags");
+ if (tags.isNull())
+ return {};
+
+ QGstTagListHandle tagList;
+ gst_structure_get(structure, "tags", GST_TYPE_TAG_LIST, &tagList, nullptr);
+ return tagList;
+}
+
+QSize QGstStructureView::resolution() const
+{
+ QSize size;
+
+ int w, h;
+ if (structure && gst_structure_get_int(structure, "width", &w)
+ && gst_structure_get_int(structure, "height", &h)) {
+ size.rwidth() = w;
+ size.rheight() = h;
+ }
+
+ return size;
+}
+
+QVideoFrameFormat::PixelFormat QGstStructureView::pixelFormat() const
+{
+ QVideoFrameFormat::PixelFormat pixelFormat = QVideoFrameFormat::Format_Invalid;
+
+ if (!structure)
+ return pixelFormat;
+
+ if (gst_structure_has_name(structure, "video/x-raw")) {
+ const gchar *s = gst_structure_get_string(structure, "format");
+ if (s) {
+ GstVideoFormat format = gst_video_format_from_string(s);
+ int index = indexOfVideoFormat(format);
+
+ if (index != -1)
+ pixelFormat = qt_videoFormatLookup[index].pixelFormat;
+ }
+ } else if (gst_structure_has_name(structure, "image/jpeg")) {
+ pixelFormat = QVideoFrameFormat::Format_Jpeg;
+ }
+
+ return pixelFormat;
+}
+
+QGRange<float> QGstStructureView::frameRateRange() const
+{
+ float minRate = 0.;
+ float maxRate = 0.;
+
+ if (!structure)
+ return { 0.f, 0.f };
+
+ auto extractFraction = [](const GValue *v) -> float {
+ return (float)gst_value_get_fraction_numerator(v)
+ / (float)gst_value_get_fraction_denominator(v);
+ };
+ auto extractFrameRate = [&](const GValue *v) {
+ auto insert = [&](float min, float max) {
+ if (max > maxRate)
+ maxRate = max;
+ if (min < minRate)
+ minRate = min;
+ };
+
+ if (GST_VALUE_HOLDS_FRACTION(v)) {
+ float rate = extractFraction(v);
+ insert(rate, rate);
+ } else if (GST_VALUE_HOLDS_FRACTION_RANGE(v)) {
+ auto *min = gst_value_get_fraction_range_max(v);
+ auto *max = gst_value_get_fraction_range_max(v);
+ insert(extractFraction(min), extractFraction(max));
+ }
+ };
+
+ const GValue *gstFrameRates = gst_structure_get_value(structure, "framerate");
+ if (gstFrameRates) {
+ if (GST_VALUE_HOLDS_LIST(gstFrameRates)) {
+ guint nFrameRates = gst_value_list_get_size(gstFrameRates);
+ for (guint f = 0; f < nFrameRates; ++f) {
+ extractFrameRate(gst_value_list_get_value(gstFrameRates, f));
+ }
+ } else {
+ extractFrameRate(gstFrameRates);
+ }
+ } else {
+ const GValue *min = gst_structure_get_value(structure, "min-framerate");
+ const GValue *max = gst_structure_get_value(structure, "max-framerate");
+ if (min && max) {
+ minRate = extractFraction(min);
+ maxRate = extractFraction(max);
+ }
+ }
+
+ return { minRate, maxRate };
+}
+
+QGstreamerMessage QGstStructureView::getMessage()
+{
+ GstMessage *message = nullptr;
+ gst_structure_get(structure, "message", GST_TYPE_MESSAGE, &message, nullptr);
+ return QGstreamerMessage(message, QGstreamerMessage::HasRef);
+}
+
+std::optional<Fraction> QGstStructureView::pixelAspectRatio() const
+{
+ gint numerator;
+ gint denominator;
+ if (gst_structure_get_fraction(structure, "pixel-aspect-ratio", &numerator, &denominator)) {
+ return Fraction{
+ numerator,
+ denominator,
+ };
+ }
+
+ return std::nullopt;
+}
+
+// QTBUG-125249: gstreamer tries "to keep the input height (because of interlacing)". Can we align
+// the behavior between gstreamer and ffmpeg?
+static QSize qCalculateFrameSizeGStreamer(QSize resolution, Fraction par)
+{
+ if (par.numerator == par.denominator || par.numerator < 1 || par.denominator < 1)
+ return resolution;
+
+ return QSize{
+ resolution.width() * par.numerator / par.denominator,
+ resolution.height(),
+ };
+}
+
+QSize QGstStructureView::nativeSize() const
+{
+ QSize size = resolution();
+ if (!size.isValid()) {
+ qWarning() << Q_FUNC_INFO << "invalid resolution when querying nativeSize";
+ return size;
+ }
+
+ std::optional<Fraction> par = pixelAspectRatio();
+ if (par)
+ size = qCalculateFrameSizeGStreamer(size, *par);
+ return size;
+}
+
+// QGstCaps
+
+std::optional<std::pair<QVideoFrameFormat, GstVideoInfo>> QGstCaps::formatAndVideoInfo() const
+{
+ GstVideoInfo vidInfo;
+
+ bool success = gst_video_info_from_caps(&vidInfo, get());
+ if (!success)
+ return std::nullopt;
+
+ int index = indexOfVideoFormat(vidInfo.finfo->format);
+ if (index == -1)
+ return std::nullopt;
+
+ QVideoFrameFormat format(QSize(vidInfo.width, vidInfo.height),
+ qt_videoFormatLookup[index].pixelFormat);
+
+ if (vidInfo.fps_d > 0)
+ format.setStreamFrameRate(qreal(vidInfo.fps_n) / vidInfo.fps_d);
+
+ QVideoFrameFormat::ColorRange range = QVideoFrameFormat::ColorRange_Unknown;
+ switch (vidInfo.colorimetry.range) {
+ case GST_VIDEO_COLOR_RANGE_UNKNOWN:
+ break;
+ case GST_VIDEO_COLOR_RANGE_0_255:
+ range = QVideoFrameFormat::ColorRange_Full;
+ break;
+ case GST_VIDEO_COLOR_RANGE_16_235:
+ range = QVideoFrameFormat::ColorRange_Video;
+ break;
+ }
+ format.setColorRange(range);
+
+ QVideoFrameFormat::ColorSpace colorSpace = QVideoFrameFormat::ColorSpace_Undefined;
+ switch (vidInfo.colorimetry.matrix) {
+ case GST_VIDEO_COLOR_MATRIX_UNKNOWN:
+ case GST_VIDEO_COLOR_MATRIX_RGB:
+ case GST_VIDEO_COLOR_MATRIX_FCC:
+ break;
+ case GST_VIDEO_COLOR_MATRIX_BT709:
+ colorSpace = QVideoFrameFormat::ColorSpace_BT709;
+ break;
+ case GST_VIDEO_COLOR_MATRIX_BT601:
+ colorSpace = QVideoFrameFormat::ColorSpace_BT601;
+ break;
+ case GST_VIDEO_COLOR_MATRIX_SMPTE240M:
+ colorSpace = QVideoFrameFormat::ColorSpace_AdobeRgb;
+ break;
+ case GST_VIDEO_COLOR_MATRIX_BT2020:
+ colorSpace = QVideoFrameFormat::ColorSpace_BT2020;
+ break;
+ }
+ format.setColorSpace(colorSpace);
+
+ QVideoFrameFormat::ColorTransfer transfer = QVideoFrameFormat::ColorTransfer_Unknown;
+ switch (vidInfo.colorimetry.transfer) {
+ case GST_VIDEO_TRANSFER_UNKNOWN:
+ break;
+ case GST_VIDEO_TRANSFER_GAMMA10:
+ transfer = QVideoFrameFormat::ColorTransfer_Linear;
+ break;
+ case GST_VIDEO_TRANSFER_GAMMA22:
+ case GST_VIDEO_TRANSFER_SMPTE240M:
+ case GST_VIDEO_TRANSFER_SRGB:
+ case GST_VIDEO_TRANSFER_ADOBERGB:
+ transfer = QVideoFrameFormat::ColorTransfer_Gamma22;
+ break;
+ case GST_VIDEO_TRANSFER_GAMMA18:
+ case GST_VIDEO_TRANSFER_GAMMA20:
+ // not quite, but best fit
+ case GST_VIDEO_TRANSFER_BT709:
+ case GST_VIDEO_TRANSFER_BT2020_12:
+ transfer = QVideoFrameFormat::ColorTransfer_BT709;
+ break;
+ case GST_VIDEO_TRANSFER_GAMMA28:
+ transfer = QVideoFrameFormat::ColorTransfer_Gamma28;
+ break;
+ case GST_VIDEO_TRANSFER_LOG100:
+ case GST_VIDEO_TRANSFER_LOG316:
+ break;
+#if GST_CHECK_VERSION(1, 18, 0)
+ case GST_VIDEO_TRANSFER_SMPTE2084:
+ transfer = QVideoFrameFormat::ColorTransfer_ST2084;
+ break;
+ case GST_VIDEO_TRANSFER_ARIB_STD_B67:
+ transfer = QVideoFrameFormat::ColorTransfer_STD_B67;
+ break;
+ case GST_VIDEO_TRANSFER_BT2020_10:
+ transfer = QVideoFrameFormat::ColorTransfer_BT709;
+ break;
+ case GST_VIDEO_TRANSFER_BT601:
+ transfer = QVideoFrameFormat::ColorTransfer_BT601;
+ break;
+#endif
+ }
+ format.setColorTransfer(transfer);
+
+ return std::pair{
+ std::move(format),
+ vidInfo,
+ };
+}
+
+void QGstCaps::addPixelFormats(const QList<QVideoFrameFormat::PixelFormat> &formats,
+ const char *modifier)
+{
+ if (!gst_caps_is_writable(get()))
+ *this = QGstCaps(gst_caps_make_writable(release()), QGstCaps::RefMode::HasRef);
+
+ GValue list = {};
+ g_value_init(&list, GST_TYPE_LIST);
+
+ for (QVideoFrameFormat::PixelFormat format : formats) {
+ int index = indexOfVideoFormat(format);
+ if (index == -1)
+ continue;
+ GValue item = {};
+
+ g_value_init(&item, G_TYPE_STRING);
+ g_value_set_string(&item,
+ gst_video_format_to_string(qt_videoFormatLookup[index].gstFormat));
+ gst_value_list_append_value(&list, &item);
+ g_value_unset(&item);
+ }
+
+ auto *structure = gst_structure_new("video/x-raw", "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
+ INT_MAX, 1, "width", GST_TYPE_INT_RANGE, 1, INT_MAX,
+ "height", GST_TYPE_INT_RANGE, 1, INT_MAX, nullptr);
+ gst_structure_set_value(structure, "format", &list);
+ gst_caps_append_structure(get(), structure);
+ g_value_unset(&list);
+
+ if (modifier)
+ gst_caps_set_features(get(), size() - 1, gst_caps_features_from_string(modifier));
+}
+
+void QGstCaps::setResolution(QSize resolution)
+{
+ Q_ASSERT(resolution.isValid());
+ GValue width{};
+ g_value_init(&width, G_TYPE_INT);
+ g_value_set_int(&width, resolution.width());
+ GValue height{};
+ g_value_init(&height, G_TYPE_INT);
+ g_value_set_int(&height, resolution.height());
+
+ gst_caps_set_value(caps(), "width", &width);
+ gst_caps_set_value(caps(), "height", &height);
+}
+
+QGstCaps QGstCaps::fromCameraFormat(const QCameraFormat &format)
+{
+ QSize size = format.resolution();
+ GstStructure *structure = nullptr;
+ if (format.pixelFormat() == QVideoFrameFormat::Format_Jpeg) {
+ structure = gst_structure_new("image/jpeg", "width", G_TYPE_INT, size.width(), "height",
+ G_TYPE_INT, size.height(), nullptr);
+ } else {
+ int index = indexOfVideoFormat(format.pixelFormat());
+ if (index < 0)
+ return {};
+ auto gstFormat = qt_videoFormatLookup[index].gstFormat;
+ structure = gst_structure_new("video/x-raw", "format", G_TYPE_STRING,
+ gst_video_format_to_string(gstFormat), "width", G_TYPE_INT,
+ size.width(), "height", G_TYPE_INT, size.height(), nullptr);
+ }
+ auto caps = QGstCaps::create();
+ gst_caps_append_structure(caps.get(), structure);
+ return caps;
+}
+
+QGstCaps QGstCaps::copy() const
+{
+ return QGstCaps{
+ gst_caps_copy(caps()),
+ QGstCaps::HasRef,
+ };
+}
+
+QGstCaps::MemoryFormat QGstCaps::memoryFormat() const
+{
+ auto *features = gst_caps_get_features(get(), 0);
+ if (gst_caps_features_contains(features, "memory:GLMemory"))
+ return GLTexture;
+ if (gst_caps_features_contains(features, "memory:DMABuf"))
+ return DMABuf;
+ return CpuMemory;
+}
+
+int QGstCaps::size() const
+{
+ return int(gst_caps_get_size(get()));
+}
+
+QGstStructureView QGstCaps::at(int index) const
+{
+ return QGstStructureView{
+ gst_caps_get_structure(get(), index),
+ };
+}
+
+GstCaps *QGstCaps::caps() const
+{
+ return get();
+}
+
+QGstCaps QGstCaps::create()
+{
+ return QGstCaps(gst_caps_new_empty(), HasRef);
+}
+
+// QGstObject
+
+void QGstObject::set(const char *property, const char *str)
+{
+ g_object_set(get(), property, str, nullptr);
+}
+
+void QGstObject::set(const char *property, bool b)
+{
+ g_object_set(get(), property, gboolean(b), nullptr);
+}
+
+void QGstObject::set(const char *property, uint i)
+{
+ g_object_set(get(), property, guint(i), nullptr);
+}
+
+void QGstObject::set(const char *property, int i)
+{
+ g_object_set(get(), property, gint(i), nullptr);
+}
+
+void QGstObject::set(const char *property, qint64 i)
+{
+ g_object_set(get(), property, gint64(i), nullptr);
+}
+
+void QGstObject::set(const char *property, quint64 i)
+{
+ g_object_set(get(), property, guint64(i), nullptr);
+}
+
+void QGstObject::set(const char *property, double d)
+{
+ g_object_set(get(), property, gdouble(d), nullptr);
+}
+
+void QGstObject::set(const char *property, const QGstObject &o)
+{
+ g_object_set(get(), property, o.object(), nullptr);
+}
+
+void QGstObject::set(const char *property, const QGstCaps &c)
+{
+ g_object_set(get(), property, c.caps(), nullptr);
+}
+
+QGString QGstObject::getString(const char *property) const
+{
+ char *s = nullptr;
+ g_object_get(get(), property, &s, nullptr);
+ return QGString(s);
+}
+
+QGstStructureView QGstObject::getStructure(const char *property) const
+{
+ GstStructure *s = nullptr;
+ g_object_get(get(), property, &s, nullptr);
+ return QGstStructureView(s);
+}
+
+bool QGstObject::getBool(const char *property) const
+{
+ gboolean b = false;
+ g_object_get(get(), property, &b, nullptr);
+ return b;
+}
+
+uint QGstObject::getUInt(const char *property) const
+{
+ guint i = 0;
+ g_object_get(get(), property, &i, nullptr);
+ return i;
+}
+
+int QGstObject::getInt(const char *property) const
+{
+ gint i = 0;
+ g_object_get(get(), property, &i, nullptr);
+ return i;
+}
+
+quint64 QGstObject::getUInt64(const char *property) const
+{
+ guint64 i = 0;
+ g_object_get(get(), property, &i, nullptr);
+ return i;
+}
+
+qint64 QGstObject::getInt64(const char *property) const
+{
+ gint64 i = 0;
+ g_object_get(get(), property, &i, nullptr);
+ return i;
+}
+
+float QGstObject::getFloat(const char *property) const
+{
+ gfloat d = 0;
+ g_object_get(get(), property, &d, nullptr);
+ return d;
+}
+
+double QGstObject::getDouble(const char *property) const
+{
+ gdouble d = 0;
+ g_object_get(get(), property, &d, nullptr);
+ return d;
+}
+
+QGstObject QGstObject::getObject(const char *property) const
+{
+ GstObject *o = nullptr;
+ g_object_get(get(), property, &o, nullptr);
+ return QGstObject(o, HasRef);
+}
+
+QGObjectHandlerConnection QGstObject::connect(const char *name, GCallback callback,
+ gpointer userData)
+{
+ return QGObjectHandlerConnection{
+ *this,
+ g_signal_connect(get(), name, callback, userData),
+ };
+}
+
+void QGstObject::disconnect(gulong handlerId)
+{
+ g_signal_handler_disconnect(get(), handlerId);
+}
+
+GType QGstObject::type() const
+{
+ return G_OBJECT_TYPE(get());
+}
+
+QLatin1StringView QGstObject::typeName() const
+{
+ return QLatin1StringView{
+ g_type_name(type()),
+ };
+}
+
+GstObject *QGstObject::object() const
+{
+ return get();
+}
+
+QLatin1StringView QGstObject::name() const
+{
+ using namespace Qt::StringLiterals;
+
+ return get() ? QLatin1StringView{ GST_OBJECT_NAME(get()) } : "(null)"_L1;
+}
+
+// QGObjectHandlerConnection
+
+QGObjectHandlerConnection::QGObjectHandlerConnection(QGstObject object, gulong handlerId)
+ : object{ std::move(object) }, handlerId{ handlerId }
+{
+}
+
+void QGObjectHandlerConnection::disconnect()
+{
+ if (!object)
+ return;
+
+ object.disconnect(handlerId);
+ object = {};
+ handlerId = invalidHandlerId;
+}
+
+// QGObjectHandlerScopedConnection
+
+QGObjectHandlerScopedConnection::QGObjectHandlerScopedConnection(
+ QGObjectHandlerConnection connection)
+ : connection{
+ std::move(connection),
+ }
+{
+}
+
+QGObjectHandlerScopedConnection::~QGObjectHandlerScopedConnection()
+{
+ connection.disconnect();
+}
+
+void QGObjectHandlerScopedConnection::disconnect()
+{
+ connection.disconnect();
+}
+
+// QGstPad
+
+QGstPad::QGstPad(const QGstObject &o)
+ : QGstPad{
+ qGstSafeCast<GstPad>(o.object()),
+ QGstElement::NeedsRef,
+ }
+{
+}
+
+QGstPad::QGstPad(GstPad *pad, RefMode mode)
+ : QGstObject{
+ qGstCheckedCast<GstObject>(pad),
+ mode,
+ }
+{
+}
+
+QGstCaps QGstPad::currentCaps() const
+{
+ return QGstCaps(gst_pad_get_current_caps(pad()), QGstCaps::HasRef);
+}
+
+QGstCaps QGstPad::queryCaps() const
+{
+ return QGstCaps(gst_pad_query_caps(pad(), nullptr), QGstCaps::HasRef);
+}
+
+QGstTagListHandle QGstPad::tags() const
+{
+ QGstTagListHandle tagList;
+ g_object_get(object(), "tags", &tagList, nullptr);
+ return tagList;
+}
+
+std::optional<QPlatformMediaPlayer::TrackType> QGstPad::inferTrackTypeFromName() const
+{
+ using namespace Qt::Literals;
+ QLatin1StringView padName = name();
+
+ if (padName.startsWith("video_"_L1))
+ return QPlatformMediaPlayer::TrackType::VideoStream;
+ if (padName.startsWith("audio_"_L1))
+ return QPlatformMediaPlayer::TrackType::AudioStream;
+ if (padName.startsWith("text_"_L1))
+ return QPlatformMediaPlayer::TrackType::SubtitleStream;
+
+ return std::nullopt;
+}
+
+bool QGstPad::isLinked() const
+{
+ return gst_pad_is_linked(pad());
+}
+
+bool QGstPad::link(const QGstPad &sink) const
+{
+ return gst_pad_link(pad(), sink.pad()) == GST_PAD_LINK_OK;
+}
+
+bool QGstPad::unlink(const QGstPad &sink) const
+{
+ return gst_pad_unlink(pad(), sink.pad());
+}
+
+bool QGstPad::unlinkPeer() const
+{
+ return unlink(peer());
+}
+
+QGstPad QGstPad::peer() const
+{
+ return QGstPad(gst_pad_get_peer(pad()), HasRef);
+}
+
+QGstElement QGstPad::parent() const
+{
+ return QGstElement(gst_pad_get_parent_element(pad()), HasRef);
+}
+
+GstPad *QGstPad::pad() const
+{
+ return qGstCheckedCast<GstPad>(object());
+}
+
+GstEvent *QGstPad::stickyEvent(GstEventType type)
+{
+ return gst_pad_get_sticky_event(pad(), type, 0);
+}
+
+bool QGstPad::sendEvent(GstEvent *event)
+{
+ return gst_pad_send_event(pad(), event);
+}
+
+// QGstClock
+
+QGstClock::QGstClock(const QGstObject &o)
+ : QGstClock{
+ qGstSafeCast<GstClock>(o.object()),
+ QGstElement::NeedsRef,
+ }
+{
+}
+
+QGstClock::QGstClock(GstClock *clock, RefMode mode)
+ : QGstObject{
+ qGstCheckedCast<GstObject>(clock),
+ mode,
+ }
+{
+}
+
+GstClock *QGstClock::clock() const
+{
+ return qGstCheckedCast<GstClock>(object());
+}
+
+GstClockTime QGstClock::time() const
+{
+ return gst_clock_get_time(clock());
+}
+
+// QGstElement
+
+QGstElement::QGstElement(GstElement *element, RefMode mode)
+ : QGstObject{
+ qGstCheckedCast<GstObject>(element),
+ mode,
+ }
+{
+}
+
+QGstElement QGstElement::createFromFactory(const char *factory, const char *name)
+{
+ GstElement *element = gst_element_factory_make(factory, name);
+
+#ifndef QT_NO_DEBUG
+ if (!element) {
+ qWarning() << "Failed to make element" << name << "from factory" << factory;
+ return QGstElement{};
+ }
+#endif
+
+ return QGstElement{
+ element,
+ NeedsRef,
+ };
+}
+
+QGstElement QGstElement::createFromFactory(GstElementFactory *factory, const char *name)
+{
+ return QGstElement{
+ gst_element_factory_create(factory, name),
+ NeedsRef,
+ };
+}
+
+QGstElement QGstElement::createFromFactory(const QGstElementFactoryHandle &factory,
+ const char *name)
+{
+ return createFromFactory(factory.get(), name);
+}
+
+QGstElement QGstElement::createFromDevice(const QGstDeviceHandle &device, const char *name)
+{
+ return createFromDevice(device.get(), name);
+}
+
+QGstElement QGstElement::createFromDevice(GstDevice *device, const char *name)
+{
+ return QGstElement{
+ gst_device_create_element(device, name),
+ QGstElement::NeedsRef,
+ };
+}
+
+QGstElement QGstElement::createFromPipelineDescription(const char *str)
+{
+ QUniqueGErrorHandle error;
+ QGstElement element{
+ gst_parse_launch(str, &error),
+ QGstElement::NeedsRef,
+ };
+
+ if (error) // error does not mean that the element could not be constructed
+ qWarning() << "gst_parse_launch error:" << error;
+
+ return element;
+}
+
+QGstElement QGstElement::createFromPipelineDescription(const QByteArray &str)
+{
+ return createFromPipelineDescription(str.constData());
+}
+
+QGstElementFactoryHandle QGstElement::findFactory(const char *name)
+{
+ return QGstElementFactoryHandle{
+ gst_element_factory_find(name),
+ QGstElementFactoryHandle::HasRef,
+ };
+}
+
+QGstElementFactoryHandle QGstElement::findFactory(const QByteArray &name)
+{
+ return findFactory(name.constData());
+}
+
+QGstPad QGstElement::staticPad(const char *name) const
+{
+ return QGstPad(gst_element_get_static_pad(element(), name), HasRef);
+}
+
+QGstPad QGstElement::src() const
+{
+ return staticPad("src");
+}
+
+QGstPad QGstElement::sink() const
+{
+ return staticPad("sink");
+}
+
+QGstPad QGstElement::getRequestPad(const char *name) const
+{
+#if GST_CHECK_VERSION(1, 19, 1)
+ return QGstPad(gst_element_request_pad_simple(element(), name), HasRef);
+#else
+ return QGstPad(gst_element_get_request_pad(element(), name), HasRef);
+#endif
+}
+
+void QGstElement::releaseRequestPad(const QGstPad &pad) const
+{
+ return gst_element_release_request_pad(element(), pad.pad());
+}
+
+GstState QGstElement::state(std::chrono::nanoseconds timeout) const
+{
+ using namespace std::chrono_literals;
+
+ GstState state;
+ GstStateChangeReturn change =
+ gst_element_get_state(element(), &state, nullptr, timeout.count());
+
+ if (Q_UNLIKELY(change == GST_STATE_CHANGE_ASYNC))
+ qWarning() << "QGstElement::state detected an asynchronous state change. Return value not "
+ "reliable";
+
+ return state;
+}
+
+GstStateChangeReturn QGstElement::setState(GstState state)
+{
+ return gst_element_set_state(element(), state);
+}
+
+bool QGstElement::setStateSync(GstState state, std::chrono::nanoseconds timeout)
+{
+ if (state == GST_STATE_NULL) {
+ // QTBUG-125251: when changing pipeline state too quickly between NULL->PAUSED->NULL there
+ // may be a pending task to activate pads while we try to switch to NULL. This can cause an
+ // assertion failure in gstreamer. we therefore finish the state change when called on a bin
+ // or pipeline.
+ if (qIsGstObjectOfType<GstBin>(element()))
+ finishStateChange();
+ }
+
+ GstStateChangeReturn change = gst_element_set_state(element(), state);
+ if (change == GST_STATE_CHANGE_ASYNC)
+ change = gst_element_get_state(element(), nullptr, &state, timeout.count());
+
+ if (change != GST_STATE_CHANGE_SUCCESS && change != GST_STATE_CHANGE_NO_PREROLL) {
+ qWarning() << "Could not change state of" << name() << "to" << state << change;
+ dumpPipelineGraph("setStatSyncFailure");
+ }
+ return change == GST_STATE_CHANGE_SUCCESS;
+}
+
+bool QGstElement::syncStateWithParent()
+{
+ Q_ASSERT(element());
+ return gst_element_sync_state_with_parent(element()) == TRUE;
+}
+
+bool QGstElement::finishStateChange(std::chrono::nanoseconds timeout)
+{
+ GstState state, pending;
+ GstStateChangeReturn change =
+ gst_element_get_state(element(), &state, &pending, timeout.count());
+
+ if (change != GST_STATE_CHANGE_SUCCESS && change != GST_STATE_CHANGE_NO_PREROLL) {
+ qWarning() << "Could not finish change state of" << name() << change << state << pending;
+ dumpPipelineGraph("finishStateChangeFailure");
+ }
+ return change == GST_STATE_CHANGE_SUCCESS;
+}
+
+void QGstElement::lockState(bool locked)
+{
+ gst_element_set_locked_state(element(), locked);
+}
+
+bool QGstElement::isStateLocked() const
+{
+ return gst_element_is_locked_state(element());
+}
+
+void QGstElement::sendEvent(GstEvent *event) const
+{
+ gst_element_send_event(element(), event);
+}
+
+void QGstElement::sendEos() const
+{
+ sendEvent(gst_event_new_eos());
+}
+
+std::optional<std::chrono::nanoseconds> QGstElement::duration() const
+{
+ gint64 d;
+ if (!gst_element_query_duration(element(), GST_FORMAT_TIME, &d)) {
+ qDebug() << "QGstElement: failed to query duration";
+ return std::nullopt;
+ }
+ return std::chrono::nanoseconds{ d };
+}
+
+std::optional<std::chrono::milliseconds> QGstElement::durationInMs() const
+{
+ using namespace std::chrono;
+ auto dur = duration();
+ if (dur)
+ return round<milliseconds>(*dur);
+ return std::nullopt;
+}
+
+std::optional<std::chrono::nanoseconds> QGstElement::position() const
+{
+ QGstQueryHandle &query = positionQuery();
+
+ gint64 pos;
+ if (gst_element_query(element(), query.get())) {
+ gst_query_parse_position(query.get(), nullptr, &pos);
+ return std::chrono::nanoseconds{ pos };
+ }
+
+ qDebug() << "QGstElement: failed to query position";
+ return std::nullopt;
+}
+
+std::optional<std::chrono::milliseconds> QGstElement::positionInMs() const
+{
+ using namespace std::chrono;
+ auto pos = position();
+ if (pos)
+ return round<milliseconds>(*pos);
+ return std::nullopt;
+}
+
+GstClockTime QGstElement::baseTime() const
+{
+ return gst_element_get_base_time(element());
+}
+
+void QGstElement::setBaseTime(GstClockTime time) const
+{
+ gst_element_set_base_time(element(), time);
+}
+
+GstElement *QGstElement::element() const
+{
+ return GST_ELEMENT_CAST(get());
+}
+
+QGstElement QGstElement::getParent() const
+{
+ return QGstElement{
+ qGstCheckedCast<GstElement>(gst_element_get_parent(object())),
+ QGstElement::HasRef,
+ };
+}
+
+QGstPipeline QGstElement::getPipeline() const
+{
+ QGstElement ancestor = *this;
+ for (;;) {
+ QGstElement greatAncestor = ancestor.getParent();
+ if (greatAncestor) {
+ ancestor = std::move(greatAncestor);
+ continue;
+ }
+
+ return QGstPipeline{
+ qGstSafeCast<GstPipeline>(ancestor.element()),
+ QGstPipeline::NeedsRef,
+ };
+ }
+}
+
+void QGstElement::dumpPipelineGraph(const char *filename) const
+{
+ static const bool dumpEnabled = qEnvironmentVariableIsSet("GST_DEBUG_DUMP_DOT_DIR");
+ if (dumpEnabled) {
+ QGstPipeline pipeline = getPipeline();
+ if (pipeline)
+ pipeline.dumpGraph(filename);
+ }
+}
+
+QGstQueryHandle &QGstElement::positionQuery() const
+{
+ if (Q_UNLIKELY(!m_positionQuery))
+ m_positionQuery = QGstQueryHandle{
+ gst_query_new_position(GST_FORMAT_TIME),
+ QGstQueryHandle::HasRef,
+ };
+
+ return m_positionQuery;
+}
+
+// QGstBin
+
+QGstBin QGstBin::create(const char *name)
+{
+ return QGstBin(gst_bin_new(name), NeedsRef);
+}
+
+QGstBin QGstBin::createFromFactory(const char *factory, const char *name)
+{
+ QGstElement element = QGstElement::createFromFactory(factory, name);
+ Q_ASSERT(GST_IS_BIN(element.element()));
+ return QGstBin{
+ GST_BIN(element.release()),
+ RefMode::HasRef,
+ };
+}
+
+QGstBin QGstBin::createFromPipelineDescription(const QByteArray &pipelineDescription,
+ const char *name, bool ghostUnlinkedPads)
+{
+ return createFromPipelineDescription(pipelineDescription.constData(), name, ghostUnlinkedPads);
+}
+
+QGstBin QGstBin::createFromPipelineDescription(const char *pipelineDescription, const char *name,
+ bool ghostUnlinkedPads)
+{
+ QUniqueGErrorHandle error;
+
+ GstElement *element =
+ gst_parse_bin_from_description_full(pipelineDescription, ghostUnlinkedPads,
+ /*context=*/nullptr, GST_PARSE_FLAG_NONE, &error);
+
+ if (!element) {
+ qWarning() << "Failed to make element from pipeline description" << pipelineDescription
+ << error;
+ return QGstBin{};
+ }
+
+ if (name)
+ gst_element_set_name(element, name);
+
+ return QGstBin{
+ element,
+ NeedsRef,
+ };
+}
+
+QGstBin::QGstBin(GstBin *bin, RefMode mode)
+ : QGstElement{
+ qGstCheckedCast<GstElement>(bin),
+ mode,
+ }
+{
+}
+
+GstBin *QGstBin::bin() const
+{
+ return qGstCheckedCast<GstBin>(object());
+}
+
+void QGstBin::addGhostPad(const QGstElement &child, const char *name)
+{
+ addGhostPad(name, child.staticPad(name));
+}
+
+void QGstBin::addGhostPad(const char *name, const QGstPad &pad)
+{
+ gst_element_add_pad(element(), gst_ghost_pad_new(name, pad.pad()));
+}
+
+bool QGstBin::syncChildrenState()
+{
+ return gst_bin_sync_children_states(bin());
+}
+
+void QGstBin::dumpGraph(const char *fileNamePrefix)
+{
+ if (isNull())
+ return;
+
+ GST_DEBUG_BIN_TO_DOT_FILE(bin(), GST_DEBUG_GRAPH_SHOW_VERBOSE, fileNamePrefix);
+}
+
+QGstElement QGstBin::findByName(const char *name)
+{
+ return QGstElement{
+ gst_bin_get_by_name(bin(), name),
+ QGstElement::NeedsRef,
+ };
+}
+
+// QGstBaseSink
+
+QGstBaseSink::QGstBaseSink(GstBaseSink *element, RefMode mode)
+ : QGstElement{
+ qGstCheckedCast<GstElement>(element),
+ mode,
+ }
+{
+}
+
+void QGstBaseSink::setSync(bool arg)
+{
+ gst_base_sink_set_sync(baseSink(), arg ? TRUE : FALSE);
+}
+
+GstBaseSink *QGstBaseSink::baseSink() const
+{
+ return qGstCheckedCast<GstBaseSink>(element());
+}
+
+// QGstBaseSrc
+
+QGstBaseSrc::QGstBaseSrc(GstBaseSrc *element, RefMode mode)
+ : QGstElement{
+ qGstCheckedCast<GstElement>(element),
+ mode,
+ }
+{
+}
+
+GstBaseSrc *QGstBaseSrc::baseSrc() const
+{
+ return qGstCheckedCast<GstBaseSrc>(element());
+}
+
+#if QT_CONFIG(gstreamer_app)
+
+// QGstAppSink
+
+QGstAppSink::QGstAppSink(GstAppSink *element, RefMode mode)
+ : QGstBaseSink{
+ qGstCheckedCast<GstBaseSink>(element),
+ mode,
+ }
+{
+}
+
+QGstAppSink QGstAppSink::create(const char *name)
+{
+ QGstElement created = QGstElement::createFromFactory("appsink", name);
+ return QGstAppSink{
+ qGstCheckedCast<GstAppSink>(created.element()),
+ QGstAppSink::NeedsRef,
+ };
+}
+
+GstAppSink *QGstAppSink::appSink() const
+{
+ return qGstCheckedCast<GstAppSink>(element());
+}
+
+# if GST_CHECK_VERSION(1, 24, 0)
+void QGstAppSink::setMaxBufferTime(std::chrono::nanoseconds ns)
+{
+ gst_app_sink_set_max_time(appSink(), qGstClockTimeFromChrono(ns));
+}
+# endif
+
+void QGstAppSink::setMaxBuffers(int n)
+{
+ gst_app_sink_set_max_buffers(appSink(), n);
+}
+
+void QGstAppSink::setCaps(const QGstCaps &caps)
+{
+ gst_app_sink_set_caps(appSink(), caps.caps());
+}
+
+void QGstAppSink::setCallbacks(GstAppSinkCallbacks &callbacks, gpointer user_data,
+ GDestroyNotify notify)
+{
+ gst_app_sink_set_callbacks(appSink(), &callbacks, user_data, notify);
+}
+
+QGstSampleHandle QGstAppSink::pullSample()
+{
+ return QGstSampleHandle{
+ gst_app_sink_pull_sample(appSink()),
+ QGstSampleHandle::HasRef,
+ };
+}
+
+// QGstAppSrc
+
+QGstAppSrc::QGstAppSrc(GstAppSrc *element, RefMode mode)
+ : QGstBaseSrc{
+ qGstCheckedCast<GstBaseSrc>(element),
+ mode,
+ }
+{
+}
+
+QGstAppSrc QGstAppSrc::create(const char *name)
+{
+ QGstElement created = QGstElement::createFromFactory("appsrc", name);
+ return QGstAppSrc{
+ qGstCheckedCast<GstAppSrc>(created.element()),
+ QGstAppSrc::NeedsRef,
+ };
+}
+
+GstAppSrc *QGstAppSrc::appSrc() const
+{
+ return qGstCheckedCast<GstAppSrc>(element());
+}
+
+void QGstAppSrc::setCallbacks(GstAppSrcCallbacks &callbacks, gpointer user_data,
+ GDestroyNotify notify)
+{
+ gst_app_src_set_callbacks(appSrc(), &callbacks, user_data, notify);
+}
+
+GstFlowReturn QGstAppSrc::pushBuffer(GstBuffer *buffer)
+{
+ return gst_app_src_push_buffer(appSrc(), buffer);
+}
+
+#endif
+
+QString qGstErrorMessageCannotFindElement(std::string_view element)
+{
+ return QStringLiteral("Could not find the %1 GStreamer element")
+ .arg(QLatin1StringView(element));
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgst_debug.cpp b/src/plugins/multimedia/gstreamer/common/qgst_debug.cpp
new file mode 100644
index 000000000..413b02f44
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgst_debug.cpp
@@ -0,0 +1,565 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qgst_debug_p.h"
+#include "qgstreamermessage_p.h"
+
+#include <gst/gstclock.h>
+
+QT_BEGIN_NAMESPACE
+
+// NOLINTBEGIN(performance-unnecessary-value-param)
+
+QDebug operator<<(QDebug dbg, const QGString &str)
+{
+ return dbg << str.get();
+}
+
+QDebug operator<<(QDebug dbg, const QGstCaps &caps)
+{
+ return dbg << caps.caps();
+}
+
+QDebug operator<<(QDebug dbg, const QGstStructureView &structure)
+{
+ return dbg << structure.structure;
+}
+
+QDebug operator<<(QDebug dbg, const QGValue &value)
+{
+ return dbg << value.value;
+}
+
+QDebug operator<<(QDebug dbg, const QGstreamerMessage &msg)
+{
+ return dbg << msg.message();
+}
+
+QDebug operator<<(QDebug dbg, const QUniqueGErrorHandle &handle)
+{
+ return dbg << handle.get();
+}
+
+QDebug operator<<(QDebug dbg, const QUniqueGStringHandle &handle)
+{
+ return dbg << handle.get();
+}
+
+QDebug operator<<(QDebug dbg, const QGstStreamCollectionHandle &handle)
+{
+ return dbg << handle.get();
+}
+
+QDebug operator<<(QDebug dbg, const QGstStreamHandle &handle)
+{
+ return dbg << handle.get();
+}
+
+QDebug operator<<(QDebug dbg, const QGstTagListHandle &handle)
+{
+ return dbg << handle.get();
+}
+
+QDebug operator<<(QDebug dbg, const QGstElement &element)
+{
+ return dbg << element.element();
+}
+
+QDebug operator<<(QDebug dbg, const QGstPad &pad)
+{
+ return dbg << pad.pad();
+}
+
+QDebug operator<<(QDebug dbg, const GstCaps *caps)
+{
+ if (caps)
+ return dbg << QGString(gst_caps_to_string(caps));
+ else
+ return dbg << "null";
+}
+
+QDebug operator<<(QDebug dbg, const GstVideoInfo *info)
+{
+#if GST_CHECK_VERSION(1, 20, 0)
+ return dbg << QGstCaps{
+ gst_video_info_to_caps(info),
+ QGstCaps::NeedsRef,
+ };
+#else
+ return dbg << QGstCaps{
+ gst_video_info_to_caps(const_cast<GstVideoInfo *>(info)),
+ QGstCaps::NeedsRef,
+ };
+#endif
+}
+
+QDebug operator<<(QDebug dbg, const GstStructure *structure)
+{
+ if (structure)
+ return dbg << QGString(gst_structure_to_string(structure));
+ else
+ return dbg << "null";
+}
+
+QDebug operator<<(QDebug dbg, const GstObject *object)
+{
+ dbg << QGString{gst_object_get_name(const_cast<GstObject*>(object))};
+
+ {
+ QDebugStateSaver saver(dbg);
+ dbg.nospace();
+
+ dbg << "{";
+
+ guint numProperties;
+ GParamSpec **properties = g_object_class_list_properties(G_OBJECT_GET_CLASS(object), &numProperties);
+
+ for (guint i = 0; i < numProperties; i++) {
+ GParamSpec *param = properties[i];
+
+ const gchar *name = g_param_spec_get_name(param);
+ constexpr bool trace_blurb = false;
+ if constexpr (trace_blurb) {
+ const gchar *blurb = g_param_spec_get_blurb(param);
+ dbg << name << " (" << blurb << "): ";
+ } else
+ dbg << name << ": ";
+
+ bool readable = bool(param->flags & G_PARAM_READABLE);
+ if (!readable) {
+ dbg << "(not readable)";
+ } else if (QLatin1StringView(name) == QLatin1StringView("parent")) {
+ if (object->parent)
+ dbg << QGString{ gst_object_get_name(object->parent) };
+ else
+ dbg << "(none)";
+ } else {
+ GValue value = {};
+ g_object_get_property(&const_cast<GstObject *>(object)->object, param->name,
+ &value);
+ dbg << &value;
+ }
+ if (i != numProperties - 1)
+ dbg << ", ";
+ }
+
+ dbg << "}";
+
+ g_free(properties);
+ }
+ return dbg;
+}
+
+QDebug operator<<(QDebug dbg, const GstElement *element)
+{
+ return dbg << GST_OBJECT_CAST(element); // LATER: output other members?
+}
+
+QDebug operator<<(QDebug dbg, const GstPad *pad)
+{
+ return dbg << GST_OBJECT_CAST(pad); // LATER: output other members?
+}
+
+QDebug operator<<(QDebug dbg, const GstDevice *device)
+{
+ GstDevice *d = const_cast<GstDevice *>(device);
+ QDebugStateSaver saver(dbg);
+ dbg.nospace();
+
+ dbg << gst_device_get_display_name(d) << "(" << gst_device_get_device_class(d) << ") ";
+ dbg << "Caps: " << QGstCaps{ gst_device_get_caps(d), QGstCaps::NeedsRef, } << ", ";
+ dbg << "Properties: " << QUniqueGstStructureHandle{ gst_device_get_properties(d) }.get();
+ return dbg;
+}
+
+namespace {
+
+struct Timepoint
+{
+ explicit Timepoint(guint64 us) : ts{ us } { }
+ guint64 ts;
+};
+
+QDebug operator<<(QDebug dbg, Timepoint ts)
+{
+ char buffer[128];
+ snprintf(buffer, sizeof(buffer), "%" GST_TIME_FORMAT, GST_TIME_ARGS(ts.ts));
+ dbg << buffer;
+ return dbg;
+}
+
+} // namespace
+
+QDebug operator<<(QDebug dbg, const GstMessage *msg)
+{
+ QDebugStateSaver saver(dbg);
+ dbg.nospace();
+
+ dbg << GST_MESSAGE_TYPE_NAME(msg) << ", Source: " << GST_MESSAGE_SRC_NAME(msg);
+ if (GST_MESSAGE_TIMESTAMP(msg) != 0xFFFFFFFFFFFFFFFF)
+ dbg << ", Timestamp: " << GST_MESSAGE_TIMESTAMP(msg);
+
+ switch (msg->type) {
+ case GST_MESSAGE_ERROR: {
+ QUniqueGErrorHandle err;
+ QGString debug;
+ gst_message_parse_error(const_cast<GstMessage *>(msg), &err, &debug);
+
+ dbg << ", Error: " << err << " (" << debug << ")";
+ break;
+ }
+
+ case GST_MESSAGE_WARNING: {
+ QUniqueGErrorHandle err;
+ QGString debug;
+ gst_message_parse_warning(const_cast<GstMessage *>(msg), &err, &debug);
+
+ dbg << ", Warning: " << err << " (" << debug << ")";
+ break;
+ }
+
+ case GST_MESSAGE_INFO: {
+ QUniqueGErrorHandle err;
+ QGString debug;
+ gst_message_parse_info(const_cast<GstMessage *>(msg), &err, &debug);
+
+ dbg << ", Info: " << err << " (" << debug << ")";
+ break;
+ }
+
+ case GST_MESSAGE_TAG: {
+ QGstTagListHandle tagList;
+ gst_message_parse_tag(const_cast<GstMessage *>(msg), &tagList);
+
+ dbg << ", Tags: " << tagList;
+ break;
+ }
+
+ case GST_MESSAGE_QOS: {
+ gboolean live;
+ guint64 running_time;
+ guint64 stream_time;
+ guint64 timestamp;
+ guint64 duration;
+
+ gst_message_parse_qos(const_cast<GstMessage *>(msg), &live, &running_time, &stream_time,
+ &timestamp, &duration);
+
+ dbg << ", Live: " << bool(live) << ", Running time: " << Timepoint{ running_time }
+ << ", Stream time: " << Timepoint{ stream_time }
+ << ", Timestamp: " << Timepoint{ timestamp } << ", Duration: " << Timepoint{ duration };
+ break;
+ }
+
+ case GST_MESSAGE_STATE_CHANGED: {
+ GstState oldState;
+ GstState newState;
+ GstState pending;
+
+ gst_message_parse_state_changed(const_cast<GstMessage *>(msg), &oldState, &newState,
+ &pending);
+
+ dbg << ", Transition: " << oldState << "->" << newState;
+
+ if (pending != GST_STATE_VOID_PENDING)
+ dbg << ", Pending State: " << pending;
+ break;
+ }
+
+ case GST_MESSAGE_STREAM_COLLECTION: {
+ QGstStreamCollectionHandle collection;
+ gst_message_parse_stream_collection(const_cast<GstMessage *>(msg), &collection);
+
+ dbg << ", " << collection;
+ break;
+ }
+
+ case GST_MESSAGE_STREAMS_SELECTED: {
+ QGstStreamCollectionHandle collection;
+ gst_message_parse_streams_selected(const_cast<GstMessage *>(msg), &collection);
+
+ dbg << ", " << collection;
+ break;
+ }
+
+ case GST_MESSAGE_STREAM_STATUS: {
+ GstStreamStatusType streamStatus;
+ gst_message_parse_stream_status(const_cast<GstMessage *>(msg), &streamStatus, nullptr);
+
+ dbg << ", Stream Status: " << streamStatus;
+ break;
+ }
+
+ default:
+ break;
+ }
+ return dbg;
+}
+
+QDebug operator<<(QDebug dbg, const GstTagList *tagList)
+{
+ dbg << QGString{ gst_tag_list_to_string(tagList) };
+ return dbg;
+}
+
+QDebug operator<<(QDebug dbg, const GstQuery *query)
+{
+ dbg << GST_QUERY_TYPE_NAME(query);
+ return dbg;
+}
+
+QDebug operator<<(QDebug dbg, const GstEvent *event)
+{
+ dbg << GST_EVENT_TYPE_NAME(event);
+ return dbg;
+}
+
+QDebug operator<<(QDebug dbg, const GstPadTemplate *padTemplate)
+{
+ QGstCaps caps = padTemplate
+ ? QGstCaps{ gst_pad_template_get_caps(const_cast<GstPadTemplate *>(padTemplate)), QGstCaps::HasRef, }
+ : QGstCaps{};
+
+ dbg << caps;
+ return dbg;
+}
+
+QDebug operator<<(QDebug dbg, const GstStreamCollection *streamCollection)
+{
+ GstStreamCollection *collection = const_cast<GstStreamCollection *>(streamCollection);
+ guint size = gst_stream_collection_get_size(collection);
+
+ dbg << "Stream Collection: {";
+ for (guint index = 0; index != size; ++index) {
+ dbg << gst_stream_collection_get_stream(collection, index);
+ if (index + 1 != size)
+ dbg << ", ";
+ }
+
+ dbg << "}";
+ return dbg;
+}
+
+QDebug operator<<(QDebug dbg, const GstStream *cstream)
+{
+ GstStream *stream = const_cast<GstStream *>(cstream);
+
+ dbg << "GstStream { ";
+ dbg << "Type: " << gst_stream_type_get_name(gst_stream_get_stream_type(stream));
+
+ QGstTagListHandle tagList{
+ gst_stream_get_tags(stream),
+ QGstTagListHandle::HasRef,
+ };
+
+ if (tagList)
+ dbg << ", Tags: " << tagList;
+
+ QGstCaps caps{
+ gst_stream_get_caps(stream),
+ QGstCaps::HasRef,
+ };
+
+ if (caps)
+ dbg << ", Caps: " << caps;
+
+ dbg << "}";
+
+ return dbg;
+}
+
+QDebug operator<<(QDebug dbg, GstState state)
+{
+ return dbg << gst_element_state_get_name(state);
+}
+
+QDebug operator<<(QDebug dbg, GstStateChange transition)
+{
+ return dbg << gst_state_change_get_name(transition);
+}
+
+QDebug operator<<(QDebug dbg, GstStateChangeReturn stateChangeReturn)
+{
+ return dbg << gst_element_state_change_return_get_name(stateChangeReturn);
+}
+
+QDebug operator<<(QDebug dbg, GstMessageType type)
+{
+ return dbg << gst_message_type_get_name(type);
+}
+
+#define ADD_ENUM_SWITCH(value) \
+ case value: \
+ return dbg << #value; \
+ static_assert(true, "enforce semicolon")
+
+QDebug operator<<(QDebug dbg, GstPadDirection direction)
+{
+ switch (direction) {
+ ADD_ENUM_SWITCH(GST_PAD_UNKNOWN);
+ ADD_ENUM_SWITCH(GST_PAD_SRC);
+ ADD_ENUM_SWITCH(GST_PAD_SINK);
+ default:
+ Q_UNREACHABLE_RETURN(dbg);
+ }
+}
+
+QDebug operator<<(QDebug dbg, GstStreamStatusType type)
+{
+ switch (type) {
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_CREATE);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_ENTER);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_LEAVE);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_DESTROY);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_START);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_PAUSE);
+ ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_STOP);
+ default:
+ Q_UNREACHABLE_RETURN(dbg);
+ }
+ return dbg;
+}
+
+#undef ADD_ENUM_SWITCH
+
+QDebug operator<<(QDebug dbg, const GValue *value)
+{
+ switch (G_VALUE_TYPE(value)) {
+ case G_TYPE_STRING:
+ return dbg << g_value_get_string(value);
+ case G_TYPE_BOOLEAN:
+ return dbg << g_value_get_boolean(value);
+ case G_TYPE_ULONG:
+ return dbg << g_value_get_ulong(value);
+ case G_TYPE_LONG:
+ return dbg << g_value_get_long(value);
+ case G_TYPE_UINT:
+ return dbg << g_value_get_uint(value);
+ case G_TYPE_INT:
+ return dbg << g_value_get_int(value);
+ case G_TYPE_UINT64:
+ return dbg << g_value_get_uint64(value);
+ case G_TYPE_INT64:
+ return dbg << g_value_get_int64(value);
+ case G_TYPE_FLOAT:
+ return dbg << g_value_get_float(value);
+ case G_TYPE_DOUBLE:
+ return dbg << g_value_get_double(value);
+ default:
+ break;
+ }
+
+ if (GST_VALUE_HOLDS_BITMASK(value)) {
+ QDebugStateSaver saver(dbg);
+ return dbg << Qt::hex << gst_value_get_bitmask(value);
+ }
+
+ if (GST_VALUE_HOLDS_FRACTION(value))
+ return dbg << gst_value_get_fraction_numerator(value) << "/"
+ << gst_value_get_fraction_denominator(value);
+
+ if (GST_VALUE_HOLDS_CAPS(value))
+ return dbg << gst_value_get_caps(value);
+
+ if (GST_VALUE_HOLDS_STRUCTURE(value))
+ return dbg << gst_value_get_structure(value);
+
+ if (GST_VALUE_HOLDS_ARRAY(value)) {
+ const guint size = gst_value_array_get_size(value);
+ const guint last = size - 1;
+ dbg << "[";
+ for (guint index = 0; index != size; ++index) {
+ dbg << gst_value_array_get_value(value, index);
+ if (index != last)
+ dbg << ", ";
+ }
+ dbg << "}";
+ return dbg;
+ }
+
+ if (G_VALUE_TYPE(value) == GST_TYPE_PAD_DIRECTION) {
+ GstPadDirection direction = static_cast<GstPadDirection>(g_value_get_enum(value));
+ return dbg << direction;
+ }
+
+ if (G_VALUE_TYPE(value) == GST_TYPE_PAD_TEMPLATE) {
+ GstPadTemplate *padTemplate = static_cast<GstPadTemplate *>(g_value_get_object(value));
+ return dbg << padTemplate;
+ }
+
+ dbg << "(not implemented: " << G_VALUE_TYPE_NAME(value) << ")";
+
+ return dbg;
+}
+
+QDebug operator<<(QDebug dbg, const GError *error)
+{
+ return dbg << error->message;
+}
+
+QCompactGstMessageAdaptor::QCompactGstMessageAdaptor(const QGstreamerMessage &m)
+ : QCompactGstMessageAdaptor{
+ m.message(),
+ }
+{
+}
+
+QCompactGstMessageAdaptor::QCompactGstMessageAdaptor(GstMessage *m)
+ : msg{
+ m,
+ }
+{
+}
+
+QDebug operator<<(QDebug dbg, const QCompactGstMessageAdaptor &m)
+{
+ std::optional<QDebugStateSaver> saver(dbg);
+ dbg.nospace();
+
+ switch (GST_MESSAGE_TYPE(m.msg)) {
+ case GST_MESSAGE_ERROR: {
+ QUniqueGErrorHandle err;
+ QGString debug;
+ gst_message_parse_error(m.msg, &err, &debug);
+ dbg << err << " (" << debug << ")";
+ return dbg;
+ }
+
+ case GST_MESSAGE_WARNING: {
+ QUniqueGErrorHandle err;
+ QGString debug;
+ gst_message_parse_warning(m.msg, &err, &debug);
+ dbg << err << " (" << debug << ")";
+ return dbg;
+ }
+
+ case GST_MESSAGE_INFO: {
+ QUniqueGErrorHandle err;
+ QGString debug;
+ gst_message_parse_info(m.msg, &err, &debug);
+
+ dbg << err << " (" << debug << ")";
+ return dbg;
+ }
+
+ case GST_MESSAGE_STATE_CHANGED: {
+ GstState oldState;
+ GstState newState;
+ GstState pending;
+
+ gst_message_parse_state_changed(m.msg, &oldState, &newState, &pending);
+
+ dbg << oldState << " -> " << newState;
+ if (pending != GST_STATE_VOID_PENDING)
+ dbg << " (pending: " << pending << ")";
+ return dbg;
+ }
+
+ default: {
+ saver.reset();
+ return dbg << m.msg;
+ }
+ }
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgst_debug_p.h b/src/plugins/multimedia/gstreamer/common/qgst_debug_p.h
new file mode 100644
index 000000000..df13c6c13
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgst_debug_p.h
@@ -0,0 +1,74 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGST_DEBUG_P_H
+#define QGST_DEBUG_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qgst_p.h"
+#include <qdebug.h>
+
+QT_BEGIN_NAMESPACE
+
+class QGstreamerMessage;
+
+QDebug operator<<(QDebug, const QGstCaps &);
+QDebug operator<<(QDebug, const QGstStructureView &);
+QDebug operator<<(QDebug, const QGstElement &);
+QDebug operator<<(QDebug, const QGstPad &);
+QDebug operator<<(QDebug, const QGString &);
+QDebug operator<<(QDebug, const QGValue &);
+QDebug operator<<(QDebug, const QGstreamerMessage &);
+QDebug operator<<(QDebug, const QUniqueGErrorHandle &);
+QDebug operator<<(QDebug, const QUniqueGStringHandle &);
+QDebug operator<<(QDebug, const QGstStreamCollectionHandle &);
+QDebug operator<<(QDebug, const QGstStreamHandle &);
+QDebug operator<<(QDebug, const QGstTagListHandle &);
+
+QDebug operator<<(QDebug, const GstCaps *);
+QDebug operator<<(QDebug, const GstVideoInfo *);
+QDebug operator<<(QDebug, const GstStructure *);
+QDebug operator<<(QDebug, const GstObject *);
+QDebug operator<<(QDebug, const GstElement *);
+QDebug operator<<(QDebug, const GstPad *);
+QDebug operator<<(QDebug, const GstDevice *);
+QDebug operator<<(QDebug, const GstMessage *);
+QDebug operator<<(QDebug, const GstTagList *);
+QDebug operator<<(QDebug, const GstQuery *);
+QDebug operator<<(QDebug, const GstEvent *);
+QDebug operator<<(QDebug, const GstPadTemplate *);
+QDebug operator<<(QDebug, const GstStreamCollection *);
+QDebug operator<<(QDebug, const GstStream *);
+
+QDebug operator<<(QDebug, GstState);
+QDebug operator<<(QDebug, GstStateChange);
+QDebug operator<<(QDebug, GstStateChangeReturn);
+QDebug operator<<(QDebug, GstMessageType);
+QDebug operator<<(QDebug, GstPadDirection);
+QDebug operator<<(QDebug, GstStreamStatusType);
+
+QDebug operator<<(QDebug, const GValue *);
+QDebug operator<<(QDebug, const GError *);
+
+struct QCompactGstMessageAdaptor
+{
+ explicit QCompactGstMessageAdaptor(const QGstreamerMessage &m);
+ explicit QCompactGstMessageAdaptor(GstMessage *m);
+ GstMessage *msg;
+};
+
+QDebug operator<<(QDebug, const QCompactGstMessageAdaptor &);
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgst_handle_types_p.h b/src/plugins/multimedia/gstreamer/common/qgst_handle_types_p.h
new file mode 100644
index 000000000..e813f4181
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgst_handle_types_p.h
@@ -0,0 +1,270 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGST_HANDLE_TYPES_P_H
+#define QGST_HANDLE_TYPES_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/private/qcore_unix_p.h>
+#include <QtCore/private/quniquehandle_p.h>
+#include <QtCore/qtconfigmacros.h>
+
+#include <QtMultimedia/private/qtmultimedia-config_p.h>
+
+#include <gst/gst.h>
+
+#if QT_CONFIG(gstreamer_gl)
+# include <gst/gl/gstglcontext.h>
+#endif
+
+QT_BEGIN_NAMESPACE
+
+namespace QGstImpl {
+
+template <typename HandleTraits>
+struct QSharedHandle : private QUniqueHandle<HandleTraits>
+{
+ using BaseClass = QUniqueHandle<HandleTraits>;
+
+ enum RefMode { HasRef, NeedsRef };
+
+ QSharedHandle() = default;
+
+ explicit QSharedHandle(typename HandleTraits::Type object, RefMode mode)
+ : BaseClass{ mode == NeedsRef ? HandleTraits::ref(object) : object }
+ {
+ }
+
+ QSharedHandle(const QSharedHandle &o)
+ : BaseClass{
+ HandleTraits::ref(o.get()),
+ }
+ {
+ }
+
+ QSharedHandle(QSharedHandle &&) noexcept = default;
+
+ QSharedHandle &operator=(const QSharedHandle &o) // NOLINT: bugprone-unhandled-self-assign
+ {
+ if (BaseClass::get() != o.get())
+ reset(HandleTraits::ref(o.get()));
+ return *this;
+ };
+
+ QSharedHandle &operator=(QSharedHandle &&) noexcept = default;
+
+ [[nodiscard]] friend bool operator==(const QSharedHandle &lhs,
+ const QSharedHandle &rhs) noexcept
+ {
+ return lhs.get() == rhs.get();
+ }
+
+ [[nodiscard]] friend bool operator!=(const QSharedHandle &lhs,
+ const QSharedHandle &rhs) noexcept
+ {
+ return lhs.get() != rhs.get();
+ }
+
+ [[nodiscard]] friend bool operator<(const QSharedHandle &lhs, const QSharedHandle &rhs) noexcept
+ {
+ return lhs.get() < rhs.get();
+ }
+
+ [[nodiscard]] friend bool operator<=(const QSharedHandle &lhs,
+ const QSharedHandle &rhs) noexcept
+ {
+ return lhs.get() <= rhs.get();
+ }
+
+ [[nodiscard]] friend bool operator>(const QSharedHandle &lhs, const QSharedHandle &rhs) noexcept
+ {
+ return lhs.get() > rhs.get();
+ }
+
+ [[nodiscard]] friend bool operator>=(const QSharedHandle &lhs,
+ const QSharedHandle &rhs) noexcept
+ {
+ return lhs.get() >= rhs.get();
+ }
+
+ using BaseClass::get;
+ using BaseClass::isValid;
+ using BaseClass::operator bool;
+ using BaseClass::release;
+ using BaseClass::reset;
+ using BaseClass::operator&;
+ using BaseClass::close;
+};
+
+struct QGstTagListHandleTraits
+{
+ using Type = GstTagList *;
+ static constexpr Type invalidValue() noexcept { return nullptr; }
+ static bool close(Type handle) noexcept
+ {
+ gst_tag_list_unref(handle);
+ return true;
+ }
+ static Type ref(Type handle) noexcept { return gst_tag_list_ref(handle); }
+};
+
+struct QGstSampleHandleTraits
+{
+ using Type = GstSample *;
+ static constexpr Type invalidValue() noexcept { return nullptr; }
+ static bool close(Type handle) noexcept
+ {
+ gst_sample_unref(handle);
+ return true;
+ }
+ static Type ref(Type handle) noexcept { return gst_sample_ref(handle); }
+};
+
+struct QUniqueGstStructureHandleTraits
+{
+ using Type = GstStructure *;
+ static constexpr Type invalidValue() noexcept { return nullptr; }
+ static bool close(Type handle) noexcept
+ {
+ gst_structure_free(handle);
+ return true;
+ }
+};
+
+struct QUniqueGStringHandleTraits
+{
+ using Type = gchar *;
+ static constexpr Type invalidValue() noexcept { return nullptr; }
+ static bool close(Type handle) noexcept
+ {
+ g_free(handle);
+ return true;
+ }
+};
+
+struct QUniqueGErrorHandleTraits
+{
+ using Type = GError *;
+ static constexpr Type invalidValue() noexcept { return nullptr; }
+ static bool close(Type handle) noexcept
+ {
+ g_error_free(handle);
+ return true;
+ }
+};
+
+
+struct QUniqueGstDateTimeHandleTraits
+{
+ using Type = GstDateTime *;
+ static constexpr Type invalidValue() noexcept { return nullptr; }
+ static bool close(Type handle) noexcept
+ {
+ gst_date_time_unref(handle);
+ return true;
+ }
+};
+
+struct QFileDescriptorHandleTraits
+{
+ using Type = int;
+ static constexpr Type invalidValue() noexcept { return -1; }
+ static bool close(Type fd) noexcept
+ {
+ int closeResult = qt_safe_close(fd);
+ return closeResult == 0;
+ }
+};
+
+template <typename GstType>
+struct QGstHandleHelper
+{
+ struct QGstSafeObjectHandleTraits
+ {
+ using Type = GstType *;
+ static constexpr Type invalidValue() noexcept { return nullptr; }
+ static bool close(Type handle) noexcept
+ {
+ gst_object_unref(G_OBJECT(handle));
+ return true;
+ }
+
+ static Type ref(Type handle) noexcept
+ {
+ gst_object_ref_sink(G_OBJECT(handle));
+ return handle;
+ }
+ };
+
+ using SharedHandle = QSharedHandle<QGstSafeObjectHandleTraits>;
+ using UniqueHandle = QUniqueHandle<QGstSafeObjectHandleTraits>;
+};
+
+template <typename GstType>
+struct QGstMiniObjectHandleHelper
+{
+ struct Traits
+ {
+ using Type = GstType *;
+ static constexpr Type invalidValue() noexcept { return nullptr; }
+ static bool close(Type handle) noexcept
+ {
+ gst_mini_object_unref(GST_MINI_OBJECT_CAST(handle));
+ return true;
+ }
+
+ static Type ref(Type handle) noexcept
+ {
+ if (GST_MINI_OBJECT_CAST(handle))
+ gst_mini_object_ref(GST_MINI_OBJECT_CAST(handle));
+ return handle;
+ }
+ };
+
+ using SharedHandle = QSharedHandle<Traits>;
+ using UniqueHandle = QUniqueHandle<Traits>;
+};
+
+} // namespace QGstImpl
+
+using QGstClockHandle = QGstImpl::QGstHandleHelper<GstClock>::UniqueHandle;
+using QGstElementHandle = QGstImpl::QGstHandleHelper<GstElement>::UniqueHandle;
+using QGstElementFactoryHandle = QGstImpl::QGstHandleHelper<GstElementFactory>::SharedHandle;
+using QGstDeviceHandle = QGstImpl::QGstHandleHelper<GstDevice>::SharedHandle;
+using QGstDeviceMonitorHandle = QGstImpl::QGstHandleHelper<GstDeviceMonitor>::UniqueHandle;
+using QGstBusHandle = QGstImpl::QGstHandleHelper<GstBus>::UniqueHandle;
+using QGstStreamCollectionHandle = QGstImpl::QGstHandleHelper<GstStreamCollection>::SharedHandle;
+using QGstStreamHandle = QGstImpl::QGstHandleHelper<GstStream>::SharedHandle;
+
+using QGstTagListHandle = QGstImpl::QSharedHandle<QGstImpl::QGstTagListHandleTraits>;
+using QGstSampleHandle = QGstImpl::QSharedHandle<QGstImpl::QGstSampleHandleTraits>;
+
+using QUniqueGstStructureHandle = QUniqueHandle<QGstImpl::QUniqueGstStructureHandleTraits>;
+using QUniqueGStringHandle = QUniqueHandle<QGstImpl::QUniqueGStringHandleTraits>;
+using QUniqueGErrorHandle = QUniqueHandle<QGstImpl::QUniqueGErrorHandleTraits>;
+using QUniqueGstDateTimeHandle = QUniqueHandle<QGstImpl::QUniqueGstDateTimeHandleTraits>;
+using QFileDescriptorHandle = QUniqueHandle<QGstImpl::QFileDescriptorHandleTraits>;
+using QGstBufferHandle = QGstImpl::QGstMiniObjectHandleHelper<GstBuffer>::SharedHandle;
+using QGstContextHandle = QGstImpl::QGstMiniObjectHandleHelper<GstContext>::UniqueHandle;
+using QGstGstDateTimeHandle = QGstImpl::QGstMiniObjectHandleHelper<GstDateTime>::SharedHandle;
+using QGstPluginFeatureHandle = QGstImpl::QGstHandleHelper<GstPluginFeature>::SharedHandle;
+using QGstQueryHandle = QGstImpl::QGstMiniObjectHandleHelper<GstQuery>::SharedHandle;
+
+#if QT_CONFIG(gstreamer_gl)
+using QGstGLContextHandle = QGstImpl::QGstHandleHelper<GstGLContext>::UniqueHandle;
+using QGstGLDisplayHandle = QGstImpl::QGstHandleHelper<GstGLDisplay>::UniqueHandle;
+#endif
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgst_p.h b/src/plugins/multimedia/gstreamer/common/qgst_p.h
new file mode 100644
index 000000000..865b5895d
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgst_p.h
@@ -0,0 +1,853 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGST_P_H
+#define QGST_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qdebug.h>
+#include <QtCore/qlist.h>
+#include <QtCore/qsemaphore.h>
+
+#include <QtMultimedia/qaudioformat.h>
+#include <QtMultimedia/qvideoframe.h>
+#include <QtMultimedia/private/qtmultimediaglobal_p.h>
+#include <QtMultimedia/private/qmultimediautils_p.h>
+#include <QtMultimedia/private/qplatformmediaplayer_p.h>
+
+#include <gst/gst.h>
+#include <gst/video/video-info.h>
+
+#include "qgst_handle_types_p.h"
+
+#include <type_traits>
+
+#if QT_CONFIG(gstreamer_photography)
+# define GST_USE_UNSTABLE_API
+# include <gst/interfaces/photography.h>
+# undef GST_USE_UNSTABLE_API
+#endif
+
+#if QT_CONFIG(gstreamer_app)
+# include <gst/app/gstappsink.h>
+# include <gst/app/gstappsrc.h>
+#endif
+
+QT_BEGIN_NAMESPACE
+
+namespace QGstImpl {
+
+template <typename T>
+struct GstObjectTraits
+{
+ // using Type = T;
+ // template <typename U>
+ // static bool isObjectOfType(U *);
+ // template <typename U>
+ // static T *cast(U *);
+};
+
+#define QGST_DEFINE_CAST_TRAITS(ClassName, MACRO_LABEL) \
+ template <> \
+ struct GstObjectTraits<ClassName> \
+ { \
+ using Type = ClassName; \
+ template <typename U> \
+ static bool isObjectOfType(U *arg) \
+ { \
+ return GST_IS_##MACRO_LABEL(arg); \
+ } \
+ template <typename U> \
+ static Type *cast(U *arg) \
+ { \
+ return GST_##MACRO_LABEL##_CAST(arg); \
+ } \
+ template <typename U> \
+ static Type *checked_cast(U *arg) \
+ { \
+ return GST_##MACRO_LABEL(arg); \
+ } \
+ }; \
+ static_assert(true, "ensure semicolon")
+
+#define QGST_DEFINE_CAST_TRAITS_FOR_INTERFACE(ClassName, MACRO_LABEL) \
+ template <> \
+ struct GstObjectTraits<ClassName> \
+ { \
+ using Type = ClassName; \
+ template <typename U> \
+ static bool isObjectOfType(U *arg) \
+ { \
+ return GST_IS_##MACRO_LABEL(arg); \
+ } \
+ template <typename U> \
+ static Type *cast(U *arg) \
+ { \
+ return checked_cast(arg); \
+ } \
+ template <typename U> \
+ static Type *checked_cast(U *arg) \
+ { \
+ return GST_##MACRO_LABEL(arg); \
+ } \
+ }; \
+ static_assert(true, "ensure semicolon")
+
+QGST_DEFINE_CAST_TRAITS(GstBin, BIN);
+QGST_DEFINE_CAST_TRAITS(GstClock, CLOCK);
+QGST_DEFINE_CAST_TRAITS(GstElement, ELEMENT);
+QGST_DEFINE_CAST_TRAITS(GstObject, OBJECT);
+QGST_DEFINE_CAST_TRAITS(GstPad, PAD);
+QGST_DEFINE_CAST_TRAITS(GstPipeline, PIPELINE);
+QGST_DEFINE_CAST_TRAITS(GstBaseSink, BASE_SINK);
+QGST_DEFINE_CAST_TRAITS(GstBaseSrc, BASE_SRC);
+
+QGST_DEFINE_CAST_TRAITS_FOR_INTERFACE(GstTagSetter, TAG_SETTER);
+
+#if QT_CONFIG(gstreamer_app)
+QGST_DEFINE_CAST_TRAITS(GstAppSink, APP_SINK);
+QGST_DEFINE_CAST_TRAITS(GstAppSrc, APP_SRC);
+#endif
+
+template <>
+struct GstObjectTraits<GObject>
+{
+ using Type = GObject;
+ template <typename U>
+ static bool isObjectOfType(U *arg)
+ {
+ return G_IS_OBJECT(arg);
+ }
+ template <typename U>
+ static Type *cast(U *arg)
+ {
+ return G_OBJECT(arg);
+ }
+ template <typename U>
+ static Type *checked_cast(U *arg)
+ {
+ return G_OBJECT(arg);
+ }
+};
+
+#undef QGST_DEFINE_CAST_TRAITS
+#undef QGST_DEFINE_CAST_TRAITS_FOR_INTERFACE
+
+} // namespace QGstImpl
+
+template <typename DestinationType, typename SourceType>
+bool qIsGstObjectOfType(SourceType *arg)
+{
+ using Traits = QGstImpl::GstObjectTraits<DestinationType>;
+ return arg && Traits::isObjectOfType(arg);
+}
+
+template <typename DestinationType, typename SourceType>
+DestinationType *qGstSafeCast(SourceType *arg)
+{
+ using Traits = QGstImpl::GstObjectTraits<DestinationType>;
+ if (arg && Traits::isObjectOfType(arg))
+ return Traits::cast(arg);
+ return nullptr;
+}
+
+template <typename DestinationType, typename SourceType>
+DestinationType *qGstCheckedCast(SourceType *arg)
+{
+ using Traits = QGstImpl::GstObjectTraits<DestinationType>;
+ if (arg)
+ Q_ASSERT(Traits::isObjectOfType(arg));
+ return Traits::cast(arg);
+}
+
+class QSize;
+class QGstStructureView;
+class QGstCaps;
+class QGstPipelinePrivate;
+class QCameraFormat;
+
+template <typename T> struct QGRange
+{
+ T min;
+ T max;
+};
+
+struct QGString : QUniqueGStringHandle
+{
+ using QUniqueGStringHandle::QUniqueGStringHandle;
+
+ QLatin1StringView asStringView() const { return QLatin1StringView{ get() }; }
+ QString toQString() const { return QString::fromUtf8(get()); }
+};
+
+class QGValue
+{
+public:
+ explicit QGValue(const GValue *v);
+ const GValue *value;
+
+ bool isNull() const;
+
+ std::optional<bool> toBool() const;
+ std::optional<int> toInt() const;
+ std::optional<int> toInt64() const;
+ template<typename T>
+ T *getPointer() const
+ {
+ return value ? static_cast<T *>(g_value_get_pointer(value)) : nullptr;
+ }
+
+ const char *toString() const;
+ std::optional<float> getFraction() const;
+ std::optional<QGRange<float>> getFractionRange() const;
+ std::optional<QGRange<int>> toIntRange() const;
+
+ QGstStructureView toStructure() const;
+ QGstCaps toCaps() const;
+
+ bool isList() const;
+ int listSize() const;
+ QGValue at(int index) const;
+
+ QList<QAudioFormat::SampleFormat> getSampleFormats() const;
+};
+
+namespace QGstPointerImpl {
+
+template <typename RefcountedObject>
+struct QGstRefcountingAdaptor;
+
+template <typename GstType>
+class QGstObjectWrapper
+{
+ using Adaptor = QGstRefcountingAdaptor<GstType>;
+
+ GstType *m_object = nullptr;
+
+public:
+ enum RefMode { HasRef, NeedsRef };
+
+ constexpr QGstObjectWrapper() = default;
+
+ explicit QGstObjectWrapper(GstType *object, RefMode mode) : m_object(object)
+ {
+ if (m_object && mode == NeedsRef)
+ Adaptor::ref(m_object);
+ }
+
+ QGstObjectWrapper(const QGstObjectWrapper &other) : m_object(other.m_object)
+ {
+ if (m_object)
+ Adaptor::ref(m_object);
+ }
+
+ ~QGstObjectWrapper()
+ {
+ if (m_object)
+ Adaptor::unref(m_object);
+ }
+
+ QGstObjectWrapper(QGstObjectWrapper &&other) noexcept
+ : m_object(std::exchange(other.m_object, nullptr))
+ {
+ }
+
+ QGstObjectWrapper &
+ operator=(const QGstObjectWrapper &other) // NOLINT: bugprone-unhandled-self-assign
+ {
+ if (m_object != other.m_object) {
+ GstType *originalObject = m_object;
+
+ m_object = other.m_object;
+ if (m_object)
+ Adaptor::ref(m_object);
+ if (originalObject)
+ Adaptor::unref(originalObject);
+ }
+ return *this;
+ }
+
+ QGstObjectWrapper &operator=(QGstObjectWrapper &&other) noexcept
+ {
+ if (this != &other) {
+ GstType *originalObject = m_object;
+ m_object = std::exchange(other.m_object, nullptr);
+
+ if (originalObject)
+ Adaptor::unref(originalObject);
+ }
+ return *this;
+ }
+
+ friend bool operator==(const QGstObjectWrapper &a, const QGstObjectWrapper &b)
+ {
+ return a.m_object == b.m_object;
+ }
+ friend bool operator!=(const QGstObjectWrapper &a, const QGstObjectWrapper &b)
+ {
+ return a.m_object != b.m_object;
+ }
+
+ explicit operator bool() const { return bool(m_object); }
+ bool isNull() const { return !m_object; }
+ GstType *release() { return std::exchange(m_object, nullptr); }
+
+protected:
+ GstType *get() const { return m_object; }
+};
+
+} // namespace QGstPointerImpl
+
+class QGstreamerMessage;
+
+class QGstStructureView
+{
+public:
+ const GstStructure *structure = nullptr;
+ explicit QGstStructureView(const GstStructure *);
+ explicit QGstStructureView(const QUniqueGstStructureHandle &);
+
+ QUniqueGstStructureHandle clone() const;
+
+ bool isNull() const;
+ QByteArrayView name() const;
+ QGValue operator[](const char *fieldname) const;
+
+ QGstCaps caps() const;
+ QGstTagListHandle tags() const;
+
+ QSize resolution() const;
+ QVideoFrameFormat::PixelFormat pixelFormat() const;
+ QGRange<float> frameRateRange() const;
+ QGstreamerMessage getMessage();
+ std::optional<Fraction> pixelAspectRatio() const;
+ QSize nativeSize() const;
+};
+
+template <>
+struct QGstPointerImpl::QGstRefcountingAdaptor<GstCaps>
+{
+ static void ref(GstCaps *arg) noexcept { gst_caps_ref(arg); }
+ static void unref(GstCaps *arg) noexcept { gst_caps_unref(arg); }
+};
+
+class QGstCaps : public QGstPointerImpl::QGstObjectWrapper<GstCaps>
+{
+ using BaseClass = QGstPointerImpl::QGstObjectWrapper<GstCaps>;
+
+public:
+ using BaseClass::BaseClass;
+ QGstCaps(const QGstCaps &) = default;
+ QGstCaps(QGstCaps &&) noexcept = default;
+ QGstCaps &operator=(const QGstCaps &) = default;
+ QGstCaps &operator=(QGstCaps &&) noexcept = default;
+
+ enum MemoryFormat { CpuMemory, GLTexture, DMABuf };
+
+ int size() const;
+ QGstStructureView at(int index) const;
+ GstCaps *caps() const;
+
+ MemoryFormat memoryFormat() const;
+ std::optional<std::pair<QVideoFrameFormat, GstVideoInfo>> formatAndVideoInfo() const;
+
+ void addPixelFormats(const QList<QVideoFrameFormat::PixelFormat> &formats, const char *modifier = nullptr);
+ void setResolution(QSize);
+
+ static QGstCaps create();
+
+ static QGstCaps fromCameraFormat(const QCameraFormat &format);
+
+ QGstCaps copy() const;
+};
+
+template <>
+struct QGstPointerImpl::QGstRefcountingAdaptor<GstObject>
+{
+ static void ref(GstObject *arg) noexcept { gst_object_ref_sink(arg); }
+ static void unref(GstObject *arg) noexcept { gst_object_unref(arg); }
+};
+
+class QGObjectHandlerConnection;
+
+class QGstObject : public QGstPointerImpl::QGstObjectWrapper<GstObject>
+{
+ using BaseClass = QGstPointerImpl::QGstObjectWrapper<GstObject>;
+
+public:
+ using BaseClass::BaseClass;
+ QGstObject(const QGstObject &) = default;
+ QGstObject(QGstObject &&) noexcept = default;
+
+ QGstObject &operator=(const QGstObject &) = default;
+ QGstObject &operator=(QGstObject &&) noexcept = default;
+
+ void set(const char *property, const char *str);
+ void set(const char *property, bool b);
+ void set(const char *property, uint i);
+ void set(const char *property, int i);
+ void set(const char *property, qint64 i);
+ void set(const char *property, quint64 i);
+ void set(const char *property, double d);
+ void set(const char *property, const QGstObject &o);
+ void set(const char *property, const QGstCaps &c);
+
+ QGString getString(const char *property) const;
+ QGstStructureView getStructure(const char *property) const;
+ bool getBool(const char *property) const;
+ uint getUInt(const char *property) const;
+ int getInt(const char *property) const;
+ quint64 getUInt64(const char *property) const;
+ qint64 getInt64(const char *property) const;
+ float getFloat(const char *property) const;
+ double getDouble(const char *property) const;
+ QGstObject getObject(const char *property) const;
+
+ QGObjectHandlerConnection connect(const char *name, GCallback callback, gpointer userData);
+ void disconnect(gulong handlerId);
+
+ GType type() const;
+ QLatin1StringView typeName() const;
+ GstObject *object() const;
+ QLatin1StringView name() const;
+};
+
+class QGObjectHandlerConnection
+{
+public:
+ QGObjectHandlerConnection(QGstObject object, gulong handler);
+
+ QGObjectHandlerConnection() = default;
+ QGObjectHandlerConnection(const QGObjectHandlerConnection &) = default;
+ QGObjectHandlerConnection(QGObjectHandlerConnection &&) = default;
+ QGObjectHandlerConnection &operator=(const QGObjectHandlerConnection &) = default;
+ QGObjectHandlerConnection &operator=(QGObjectHandlerConnection &&) = default;
+
+ void disconnect();
+
+private:
+ static constexpr gulong invalidHandlerId = std::numeric_limits<gulong>::max();
+
+ QGstObject object;
+ gulong handlerId = invalidHandlerId;
+};
+
+// disconnects in dtor
+class QGObjectHandlerScopedConnection
+{
+public:
+ QGObjectHandlerScopedConnection(QGObjectHandlerConnection connection);
+
+ QGObjectHandlerScopedConnection() = default;
+ QGObjectHandlerScopedConnection(const QGObjectHandlerScopedConnection &) = delete;
+ QGObjectHandlerScopedConnection &operator=(const QGObjectHandlerScopedConnection &) = delete;
+ QGObjectHandlerScopedConnection(QGObjectHandlerScopedConnection &&) = default;
+ QGObjectHandlerScopedConnection &operator=(QGObjectHandlerScopedConnection &&) = default;
+
+ ~QGObjectHandlerScopedConnection();
+
+ void disconnect();
+
+private:
+ QGObjectHandlerConnection connection;
+};
+
+class QGstElement;
+
+class QGstPad : public QGstObject
+{
+public:
+ using QGstObject::QGstObject;
+ QGstPad(const QGstPad &) = default;
+ QGstPad(QGstPad &&) noexcept = default;
+
+ explicit QGstPad(const QGstObject &o);
+ explicit QGstPad(GstPad *pad, RefMode mode);
+
+ QGstPad &operator=(const QGstPad &) = default;
+ QGstPad &operator=(QGstPad &&) noexcept = default;
+
+ QGstCaps currentCaps() const;
+ QGstCaps queryCaps() const;
+
+ QGstTagListHandle tags() const;
+
+ std::optional<QPlatformMediaPlayer::TrackType>
+ inferTrackTypeFromName() const; // for decodebin3 etc
+
+ bool isLinked() const;
+ bool link(const QGstPad &sink) const;
+ bool unlink(const QGstPad &sink) const;
+ bool unlinkPeer() const;
+ QGstPad peer() const;
+ QGstElement parent() const;
+
+ GstPad *pad() const;
+
+ GstEvent *stickyEvent(GstEventType type);
+ bool sendEvent(GstEvent *event);
+
+ template<auto Member, typename T>
+ void addProbe(T *instance, GstPadProbeType type) {
+ auto callback = [](GstPad *pad, GstPadProbeInfo *info, gpointer userData) {
+ return (static_cast<T *>(userData)->*Member)(QGstPad(pad, NeedsRef), info);
+ };
+
+ gst_pad_add_probe(pad(), type, callback, instance, nullptr);
+ }
+
+ template <typename Functor>
+ void doInIdleProbe(Functor &&work)
+ {
+ struct CallbackData {
+ QSemaphore waitDone;
+ Functor work;
+ };
+
+ CallbackData cd{
+ .waitDone = QSemaphore{},
+ .work = std::forward<Functor>(work),
+ };
+
+ auto callback= [](GstPad *, GstPadProbeInfo *, gpointer p) {
+ auto cd = reinterpret_cast<CallbackData*>(p);
+ cd->work();
+ cd->waitDone.release();
+ return GST_PAD_PROBE_REMOVE;
+ };
+
+ gst_pad_add_probe(pad(), GST_PAD_PROBE_TYPE_IDLE, callback, &cd, nullptr);
+ cd.waitDone.acquire();
+ }
+
+ template<auto Member, typename T>
+ void addEosProbe(T *instance) {
+ auto callback = [](GstPad *, GstPadProbeInfo *info, gpointer userData) {
+ if (GST_EVENT_TYPE(GST_PAD_PROBE_INFO_DATA(info)) != GST_EVENT_EOS)
+ return GST_PAD_PROBE_PASS;
+ (static_cast<T *>(userData)->*Member)();
+ return GST_PAD_PROBE_REMOVE;
+ };
+
+ gst_pad_add_probe(pad(), GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, callback, instance, nullptr);
+ }
+};
+
+class QGstClock : public QGstObject
+{
+public:
+ QGstClock() = default;
+ explicit QGstClock(const QGstObject &o);
+ explicit QGstClock(GstClock *clock, RefMode mode);
+
+ GstClock *clock() const;
+ GstClockTime time() const;
+};
+
+class QGstPipeline;
+
+class QGstElement : public QGstObject
+{
+public:
+ using QGstObject::QGstObject;
+
+ QGstElement(const QGstElement &) = default;
+ QGstElement(QGstElement &&) noexcept = default;
+ QGstElement &operator=(const QGstElement &) = default;
+ QGstElement &operator=(QGstElement &&) noexcept = default;
+
+ explicit QGstElement(GstElement *element, RefMode mode);
+ static QGstElement createFromFactory(const char *factory, const char *name = nullptr);
+ static QGstElement createFromFactory(GstElementFactory *, const char *name = nullptr);
+ static QGstElement createFromFactory(const QGstElementFactoryHandle &,
+ const char *name = nullptr);
+ static QGstElement createFromDevice(const QGstDeviceHandle &, const char *name = nullptr);
+ static QGstElement createFromDevice(GstDevice *, const char *name = nullptr);
+ static QGstElement createFromPipelineDescription(const char *);
+ static QGstElement createFromPipelineDescription(const QByteArray &);
+
+ static QGstElementFactoryHandle findFactory(const char *);
+ static QGstElementFactoryHandle findFactory(const QByteArray &name);
+
+ QGstPad staticPad(const char *name) const;
+ QGstPad src() const;
+ QGstPad sink() const;
+ QGstPad getRequestPad(const char *name) const;
+ void releaseRequestPad(const QGstPad &pad) const;
+
+ GstState state(std::chrono::nanoseconds timeout = std::chrono::seconds(0)) const;
+ GstStateChangeReturn setState(GstState state);
+ bool setStateSync(GstState state, std::chrono::nanoseconds timeout = std::chrono::seconds(1));
+ bool syncStateWithParent();
+ bool finishStateChange(std::chrono::nanoseconds timeout = std::chrono::seconds(5));
+
+ void lockState(bool locked);
+ bool isStateLocked() const;
+
+ void sendEvent(GstEvent *event) const;
+ void sendEos() const;
+
+ std::optional<std::chrono::nanoseconds> duration() const;
+ std::optional<std::chrono::milliseconds> durationInMs() const;
+ std::optional<std::chrono::nanoseconds> position() const;
+ std::optional<std::chrono::milliseconds> positionInMs() const;
+
+ template <auto Member, typename T>
+ QGObjectHandlerConnection onPadAdded(T *instance)
+ {
+ struct Impl
+ {
+ static void callback(GstElement *e, GstPad *pad, gpointer userData)
+ {
+ (static_cast<T *>(userData)->*Member)(QGstElement(e, NeedsRef),
+ QGstPad(pad, NeedsRef));
+ };
+ };
+
+ return connect("pad-added", G_CALLBACK(Impl::callback), instance);
+ }
+ template <auto Member, typename T>
+ QGObjectHandlerConnection onPadRemoved(T *instance)
+ {
+ struct Impl
+ {
+ static void callback(GstElement *e, GstPad *pad, gpointer userData)
+ {
+ (static_cast<T *>(userData)->*Member)(QGstElement(e, NeedsRef),
+ QGstPad(pad, NeedsRef));
+ };
+ };
+
+ return connect("pad-removed", G_CALLBACK(Impl::callback), instance);
+ }
+ template <auto Member, typename T>
+ QGObjectHandlerConnection onNoMorePads(T *instance)
+ {
+ struct Impl
+ {
+ static void callback(GstElement *e, gpointer userData)
+ {
+ (static_cast<T *>(userData)->*Member)(QGstElement(e, NeedsRef));
+ };
+ };
+
+ return connect("no-more-pads", G_CALLBACK(Impl::callback), instance);
+ }
+
+ GstClockTime baseTime() const;
+ void setBaseTime(GstClockTime time) const;
+
+ GstElement *element() const;
+
+ QGstElement getParent() const;
+ QGstPipeline getPipeline() const;
+ void dumpPipelineGraph(const char *filename) const;
+
+private:
+ QGstQueryHandle &positionQuery() const;
+ mutable QGstQueryHandle m_positionQuery;
+};
+
+template <typename... Ts>
+std::enable_if_t<(std::is_base_of_v<QGstElement, Ts> && ...), void>
+qLinkGstElements(const Ts &...ts)
+{
+ bool link_success = [&] {
+ if constexpr (sizeof...(Ts) == 2)
+ return gst_element_link(ts.element()...);
+ else
+ return gst_element_link_many(ts.element()..., nullptr);
+ }();
+
+ if (Q_UNLIKELY(!link_success)) {
+ qWarning() << "qLinkGstElements: could not link elements: "
+ << std::initializer_list<const char *>{
+ (GST_ELEMENT_NAME(ts.element()))...,
+ };
+ }
+}
+
+template <typename... Ts>
+std::enable_if_t<(std::is_base_of_v<QGstElement, Ts> && ...), void>
+qUnlinkGstElements(const Ts &...ts)
+{
+ if constexpr (sizeof...(Ts) == 2)
+ gst_element_unlink(ts.element()...);
+ else
+ gst_element_unlink_many(ts.element()..., nullptr);
+}
+
+class QGstBin : public QGstElement
+{
+public:
+ using QGstElement::QGstElement;
+ QGstBin(const QGstBin &) = default;
+ QGstBin(QGstBin &&) noexcept = default;
+ QGstBin &operator=(const QGstBin &) = default;
+ QGstBin &operator=(QGstBin &&) noexcept = default;
+
+ explicit QGstBin(GstBin *bin, RefMode mode = NeedsRef);
+ static QGstBin create(const char *name);
+ static QGstBin createFromFactory(const char *factory, const char *name);
+ static QGstBin createFromPipelineDescription(const QByteArray &pipelineDescription,
+ const char *name = nullptr,
+ bool ghostUnlinkedPads = false);
+ static QGstBin createFromPipelineDescription(const char *pipelineDescription,
+ const char *name = nullptr,
+ bool ghostUnlinkedPads = false);
+
+ template <typename... Ts>
+ std::enable_if_t<(std::is_base_of_v<QGstElement, Ts> && ...), void> add(const Ts &...ts)
+ {
+ if constexpr (sizeof...(Ts) == 1)
+ gst_bin_add(bin(), ts.element()...);
+ else
+ gst_bin_add_many(bin(), ts.element()..., nullptr);
+ }
+
+ template <typename... Ts>
+ std::enable_if_t<(std::is_base_of_v<QGstElement, Ts> && ...), void> remove(const Ts &...ts)
+ {
+ if constexpr (sizeof...(Ts) == 1)
+ gst_bin_remove(bin(), ts.element()...);
+ else
+ gst_bin_remove_many(bin(), ts.element()..., nullptr);
+ }
+
+ template <typename... Ts>
+ std::enable_if_t<(std::is_base_of_v<QGstElement, Ts> && ...), void>
+ stopAndRemoveElements(Ts... ts)
+ {
+ bool stateChangeSuccessful = (ts.setStateSync(GST_STATE_NULL) && ...);
+ Q_ASSERT(stateChangeSuccessful);
+ remove(ts...);
+ }
+
+ GstBin *bin() const;
+
+ void addGhostPad(const QGstElement &child, const char *name);
+ void addGhostPad(const char *name, const QGstPad &pad);
+
+ bool syncChildrenState();
+
+ void dumpGraph(const char *fileNamePrefix);
+
+ QGstElement findByName(const char *);
+};
+
+class QGstBaseSink : public QGstElement
+{
+public:
+ using QGstElement::QGstElement;
+
+ explicit QGstBaseSink(GstBaseSink *, RefMode);
+
+ QGstBaseSink(const QGstBaseSink &) = default;
+ QGstBaseSink(QGstBaseSink &&) noexcept = default;
+ QGstBaseSink &operator=(const QGstBaseSink &) = default;
+ QGstBaseSink &operator=(QGstBaseSink &&) noexcept = default;
+
+ void setSync(bool);
+
+ GstBaseSink *baseSink() const;
+};
+
+class QGstBaseSrc : public QGstElement
+{
+public:
+ using QGstElement::QGstElement;
+
+ explicit QGstBaseSrc(GstBaseSrc *, RefMode);
+
+ QGstBaseSrc(const QGstBaseSrc &) = default;
+ QGstBaseSrc(QGstBaseSrc &&) noexcept = default;
+ QGstBaseSrc &operator=(const QGstBaseSrc &) = default;
+ QGstBaseSrc &operator=(QGstBaseSrc &&) noexcept = default;
+
+ GstBaseSrc *baseSrc() const;
+};
+
+#if QT_CONFIG(gstreamer_app)
+class QGstAppSink : public QGstBaseSink
+{
+public:
+ using QGstBaseSink::QGstBaseSink;
+
+ explicit QGstAppSink(GstAppSink *, RefMode);
+
+ QGstAppSink(const QGstAppSink &) = default;
+ QGstAppSink(QGstAppSink &&) noexcept = default;
+ QGstAppSink &operator=(const QGstAppSink &) = default;
+ QGstAppSink &operator=(QGstAppSink &&) noexcept = default;
+
+ static QGstAppSink create(const char *name);
+
+ GstAppSink *appSink() const;
+
+ void setMaxBuffers(int);
+# if GST_CHECK_VERSION(1, 24, 0)
+ void setMaxBufferTime(std::chrono::nanoseconds);
+# endif
+
+ void setCaps(const QGstCaps &caps);
+ void setCallbacks(GstAppSinkCallbacks &callbacks, gpointer user_data, GDestroyNotify notify);
+
+ QGstSampleHandle pullSample();
+};
+
+class QGstAppSrc : public QGstBaseSrc
+{
+public:
+ using QGstBaseSrc::QGstBaseSrc;
+
+ explicit QGstAppSrc(GstAppSrc *, RefMode);
+
+ QGstAppSrc(const QGstAppSrc &) = default;
+ QGstAppSrc(QGstAppSrc &&) noexcept = default;
+ QGstAppSrc &operator=(const QGstAppSrc &) = default;
+ QGstAppSrc &operator=(QGstAppSrc &&) noexcept = default;
+
+ static QGstAppSrc create(const char *name);
+
+ GstAppSrc *appSrc() const;
+
+ void setCallbacks(GstAppSrcCallbacks &callbacks, gpointer user_data, GDestroyNotify notify);
+
+ GstFlowReturn pushBuffer(GstBuffer *); // take ownership
+};
+
+#endif
+
+inline GstClockTime qGstClockTimeFromChrono(std::chrono::nanoseconds ns)
+{
+ return ns.count();
+}
+
+QString qGstErrorMessageCannotFindElement(std::string_view element);
+
+template <typename Arg, typename... Args>
+std::optional<QString> qGstErrorMessageIfElementsNotAvailable(const Arg &arg, Args... args)
+{
+ QGstElementFactoryHandle factory = QGstElement::findFactory(arg);
+ if (!factory)
+ return qGstErrorMessageCannotFindElement(arg);
+
+ if constexpr (sizeof...(args) != 0)
+ return qGstErrorMessageIfElementsNotAvailable(args...);
+ else
+ return std::nullopt;
+}
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgstappsource.cpp b/src/plugins/multimedia/gstreamer/common/qgstappsource.cpp
new file mode 100644
index 000000000..3c345de82
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstappsource.cpp
@@ -0,0 +1,319 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QDebug>
+
+#include "qgstappsource_p.h"
+#include <common/qgstutils_p.h>
+#include "qnetworkreply.h"
+#include "qloggingcategory.h"
+
+static Q_LOGGING_CATEGORY(qLcAppSrc, "qt.multimedia.appsrc")
+
+QT_BEGIN_NAMESPACE
+
+QMaybe<QGstAppSource *> QGstAppSource::create(QObject *parent)
+{
+ QGstAppSrc appsrc = QGstAppSrc::create("appsrc");
+ if (!appsrc)
+ return qGstErrorMessageCannotFindElement("appsrc");
+
+ return new QGstAppSource(appsrc, parent);
+}
+
+QGstAppSource::QGstAppSource(QGstAppSrc appsrc, QObject *parent)
+ : QObject(parent), m_appSrc(std::move(appsrc))
+{
+ m_appSrc.set("emit-signals", false);
+}
+
+QGstAppSource::~QGstAppSource()
+{
+ m_appSrc.setStateSync(GST_STATE_NULL);
+ streamDestroyed();
+ qCDebug(qLcAppSrc) << "~QGstAppSrc";
+}
+
+bool QGstAppSource::setup(QIODevice *stream, qint64 offset)
+{
+ QMutexLocker locker(&m_mutex);
+
+ if (m_appSrc.isNull())
+ return false;
+
+ if (!setStream(stream, offset))
+ return false;
+
+ GstAppSrcCallbacks callbacks{};
+ callbacks.need_data = QGstAppSource::on_need_data;
+ callbacks.enough_data = QGstAppSource::on_enough_data;
+ callbacks.seek_data = QGstAppSource::on_seek_data;
+
+ m_appSrc.setCallbacks(callbacks, this, nullptr);
+
+ GstAppSrc *appSrc = m_appSrc.appSrc();
+ m_maxBytes = gst_app_src_get_max_bytes(appSrc);
+ m_suspended = false;
+
+ if (m_sequential)
+ m_streamType = GST_APP_STREAM_TYPE_STREAM;
+ else
+ m_streamType = GST_APP_STREAM_TYPE_RANDOM_ACCESS;
+ gst_app_src_set_stream_type(appSrc, m_streamType);
+ gst_app_src_set_size(appSrc, m_sequential ? -1 : m_stream->size() - m_offset);
+
+ m_noMoreData = true;
+
+ return true;
+}
+
+void QGstAppSource::setAudioFormat(const QAudioFormat &f)
+{
+ QMutexLocker locker(&m_mutex);
+
+ m_format = f;
+ if (!m_format.isValid())
+ return;
+
+ auto caps = QGstUtils::capsForAudioFormat(m_format);
+ Q_ASSERT(!caps.isNull());
+ m_appSrc.set("caps", caps);
+ m_appSrc.set("format", GST_FORMAT_TIME);
+}
+
+void QGstAppSource::setExternalAppSrc(QGstAppSrc appsrc)
+{
+ QMutexLocker locker(&m_mutex);
+ m_appSrc = std::move(appsrc);
+}
+
+bool QGstAppSource::setStream(QIODevice *stream, qint64 offset)
+{
+ if (m_stream) {
+ disconnect(m_stream, &QIODevice::readyRead, this, &QGstAppSource::onDataReady);
+ disconnect(m_stream, &QIODevice::destroyed, this, &QGstAppSource::streamDestroyed);
+ m_stream = nullptr;
+ }
+
+ m_dataRequestSize = 0;
+ m_sequential = true;
+ m_maxBytes = 0;
+ streamedSamples = 0;
+
+ if (stream) {
+ if (!stream->isOpen() && !stream->open(QIODevice::ReadOnly))
+ return false;
+ m_stream = stream;
+ connect(m_stream, &QIODevice::destroyed, this, &QGstAppSource::streamDestroyed);
+ connect(m_stream, &QIODevice::readyRead, this, &QGstAppSource::onDataReady);
+ m_sequential = m_stream->isSequential();
+ m_offset = offset;
+ }
+ return true;
+}
+
+bool QGstAppSource::isStreamValid() const
+{
+ return m_stream != nullptr && m_stream->isOpen();
+}
+
+QGstElement QGstAppSource::element() const
+{
+ return m_appSrc;
+}
+
+void QGstAppSource::write(const char *data, qsizetype size)
+{
+ QMutexLocker locker(&m_mutex);
+
+ qCDebug(qLcAppSrc) << "write" << size << m_noMoreData << m_dataRequestSize;
+ if (!size)
+ return;
+ Q_ASSERT(!m_stream);
+ m_buffer.append(data, size);
+ m_noMoreData = false;
+ pushData();
+}
+
+bool QGstAppSource::canAcceptMoreData() const
+{
+ QMutexLocker locker(&m_mutex);
+ return m_noMoreData || m_dataRequestSize != 0;
+}
+
+void QGstAppSource::suspend()
+{
+ QMutexLocker locker(&m_mutex);
+ m_suspended = true;
+}
+
+void QGstAppSource::resume()
+{
+ QMutexLocker locker(&m_mutex);
+ m_suspended = false;
+ m_noMoreData = true;
+}
+
+void QGstAppSource::onDataReady()
+{
+ qCDebug(qLcAppSrc) << "onDataReady" << m_stream->bytesAvailable() << m_stream->size();
+ pushData();
+}
+
+void QGstAppSource::streamDestroyed()
+{
+ qCDebug(qLcAppSrc) << "stream destroyed";
+ m_stream = nullptr;
+ m_dataRequestSize = 0;
+ streamedSamples = 0;
+ sendEOS();
+}
+
+void QGstAppSource::pushData()
+{
+ if (m_appSrc.isNull() || !m_dataRequestSize || m_suspended) {
+ qCDebug(qLcAppSrc) << "push data: return immediately" << m_appSrc.isNull() << m_dataRequestSize << m_suspended;
+ return;
+ }
+
+ qCDebug(qLcAppSrc) << "pushData" << (m_stream ? m_stream : nullptr) << m_buffer.size();
+ if ((m_stream && m_stream->atEnd())) {
+ eosOrIdle();
+ qCDebug(qLcAppSrc) << "end pushData" << (m_stream ? m_stream : nullptr) << m_buffer.size();
+ return;
+ }
+
+ qint64 size;
+ if (m_stream)
+ size = m_stream->bytesAvailable();
+ else
+ size = m_buffer.size();
+
+ if (!m_dataRequestSize)
+ m_dataRequestSize = m_maxBytes;
+ size = qMin(size, (qint64)m_dataRequestSize);
+ qCDebug(qLcAppSrc) << " reading" << size << "bytes" << size << m_dataRequestSize;
+
+ GstBuffer* buffer = gst_buffer_new_and_alloc(size);
+
+ if (m_sequential || !m_stream)
+ buffer->offset = bytesReadSoFar;
+ else
+ buffer->offset = m_stream->pos();
+
+ if (m_format.isValid()) {
+ // timestamp raw audio data
+ uint nSamples = size/m_format.bytesPerFrame();
+
+ GST_BUFFER_TIMESTAMP(buffer) = gst_util_uint64_scale(streamedSamples, GST_SECOND, m_format.sampleRate());
+ GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale(nSamples, GST_SECOND, m_format.sampleRate());
+ streamedSamples += nSamples;
+ }
+
+ GstMapInfo mapInfo;
+ gst_buffer_map(buffer, &mapInfo, GST_MAP_WRITE);
+ void* bufferData = mapInfo.data;
+
+ qint64 bytesRead;
+ if (m_stream)
+ bytesRead = m_stream->read((char*)bufferData, size);
+ else
+ bytesRead = m_buffer.read((char*)bufferData, size);
+ buffer->offset_end = buffer->offset + bytesRead - 1;
+ bytesReadSoFar += bytesRead;
+
+ gst_buffer_unmap(buffer, &mapInfo);
+ qCDebug(qLcAppSrc) << "pushing bytes into gstreamer" << buffer->offset << bytesRead;
+ if (bytesRead == 0) {
+ gst_buffer_unref(buffer);
+ eosOrIdle();
+ qCDebug(qLcAppSrc) << "end pushData" << (m_stream ? m_stream : nullptr) << m_buffer.size();
+ return;
+ }
+ m_noMoreData = false;
+ emit bytesProcessed(bytesRead);
+
+ GstFlowReturn ret = m_appSrc.pushBuffer(buffer);
+ if (ret == GST_FLOW_ERROR) {
+ qWarning() << "QGstAppSrc: push buffer error";
+ } else if (ret == GST_FLOW_FLUSHING) {
+ qWarning() << "QGstAppSrc: push buffer wrong state";
+ }
+ qCDebug(qLcAppSrc) << "end pushData" << (m_stream ? m_stream : nullptr) << m_buffer.size();
+
+}
+
+bool QGstAppSource::doSeek(qint64 value)
+{
+ if (isStreamValid())
+ return m_stream->seek(value + m_offset);
+ return false;
+}
+
+gboolean QGstAppSource::on_seek_data(GstAppSrc *, guint64 arg0, gpointer userdata)
+{
+ // we do get some spurious seeks to INT_MAX, ignore those
+ if (arg0 == std::numeric_limits<quint64>::max())
+ return true;
+
+ QGstAppSource *self = reinterpret_cast<QGstAppSource *>(userdata);
+ Q_ASSERT(self);
+
+ QMutexLocker locker(&self->m_mutex);
+
+ if (self->m_sequential)
+ return false;
+
+ self->doSeek(arg0);
+ return true;
+}
+
+void QGstAppSource::on_enough_data(GstAppSrc *, gpointer userdata)
+{
+ qCDebug(qLcAppSrc) << "on_enough_data";
+ QGstAppSource *self = static_cast<QGstAppSource *>(userdata);
+ Q_ASSERT(self);
+ QMutexLocker locker(&self->m_mutex);
+ self->m_dataRequestSize = 0;
+}
+
+void QGstAppSource::on_need_data(GstAppSrc *, guint arg0, gpointer userdata)
+{
+ qCDebug(qLcAppSrc) << "on_need_data requesting bytes" << arg0;
+ QGstAppSource *self = static_cast<QGstAppSource *>(userdata);
+ Q_ASSERT(self);
+ QMutexLocker locker(&self->m_mutex);
+ self->m_dataRequestSize = arg0;
+ self->pushData();
+ qCDebug(qLcAppSrc) << "done on_need_data";
+}
+
+void QGstAppSource::sendEOS()
+{
+ qCDebug(qLcAppSrc) << "sending EOS";
+ if (m_appSrc.isNull())
+ return;
+
+ gst_app_src_end_of_stream(GST_APP_SRC(m_appSrc.element()));
+}
+
+void QGstAppSource::eosOrIdle()
+{
+ qCDebug(qLcAppSrc) << "eosOrIdle";
+ if (m_appSrc.isNull())
+ return;
+
+ if (!m_sequential) {
+ sendEOS();
+ return;
+ }
+ if (m_noMoreData)
+ return;
+ qCDebug(qLcAppSrc) << " idle!";
+ m_noMoreData = true;
+ emit noMoreData();
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qgstappsource_p.cpp"
diff --git a/src/plugins/multimedia/gstreamer/common/qgstappsource_p.h b/src/plugins/multimedia/gstreamer/common/qgstappsource_p.h
new file mode 100644
index 000000000..59ced00dc
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstappsource_p.h
@@ -0,0 +1,96 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTAPPSRC_H
+#define QGSTAPPSRC_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qtmultimediaglobal_p.h>
+#include <private/qmultimediautils_p.h>
+#include <qaudioformat.h>
+
+#include <QtCore/qobject.h>
+#include <QtCore/qiodevice.h>
+#include <QtCore/private/qringbuffer_p.h>
+#include <QtCore/qatomic.h>
+#include <QtCore/qmutex.h>
+
+#include <common/qgst_p.h>
+#include <gst/app/gstappsrc.h>
+
+QT_BEGIN_NAMESPACE
+
+class QGstAppSource : public QObject
+{
+ Q_OBJECT
+public:
+ static QMaybe<QGstAppSource *> create(QObject *parent = nullptr);
+ ~QGstAppSource();
+
+ bool setup(QIODevice *stream = nullptr, qint64 offset = 0);
+ void setAudioFormat(const QAudioFormat &f);
+
+ void setExternalAppSrc(QGstAppSrc);
+ QGstElement element() const;
+
+ void write(const char *data, qsizetype size);
+
+ bool canAcceptMoreData() const;
+
+ void suspend();
+ void resume();
+
+Q_SIGNALS:
+ void bytesProcessed(int bytes);
+ void noMoreData();
+
+private Q_SLOTS:
+ void pushData();
+ bool doSeek(qint64);
+ void onDataReady();
+
+ void streamDestroyed();
+private:
+ QGstAppSource(QGstAppSrc appsrc, QObject *parent);
+
+ bool setStream(QIODevice *, qint64 offset);
+ bool isStreamValid() const;
+
+ static gboolean on_seek_data(GstAppSrc *element, guint64 arg0, gpointer userdata);
+ static void on_enough_data(GstAppSrc *element, gpointer userdata);
+ static void on_need_data(GstAppSrc *element, uint arg0, gpointer userdata);
+
+ void sendEOS();
+ void eosOrIdle();
+
+ mutable QMutex m_mutex;
+
+ QIODevice *m_stream = nullptr;
+ QRingBuffer m_buffer;
+ QAudioFormat m_format;
+
+ QGstAppSrc m_appSrc;
+ bool m_sequential = true;
+ bool m_suspended = false;
+ bool m_noMoreData = false;
+ GstAppStreamType m_streamType = GST_APP_STREAM_TYPE_RANDOM_ACCESS;
+ qint64 m_offset = 0;
+ qint64 m_maxBytes = 0;
+ qint64 bytesReadSoFar = 0;
+ QAtomicInteger<unsigned int> m_dataRequestSize = 0;
+ int streamedSamples = 0;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgstpipeline.cpp b/src/plugins/multimedia/gstreamer/common/qgstpipeline.cpp
new file mode 100644
index 000000000..c92a12764
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstpipeline.cpp
@@ -0,0 +1,414 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtCore/qmap.h>
+#include <QtCore/qtimer.h>
+#include <QtCore/qmutex.h>
+#include <QtCore/qlist.h>
+#include <QtCore/qabstracteventdispatcher.h>
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qproperty.h>
+
+#include "qgstpipeline_p.h"
+#include "qgstreamermessage_p.h"
+
+QT_BEGIN_NAMESPACE
+
+static constexpr GstSeekFlags rateChangeSeekFlags =
+#if GST_CHECK_VERSION(1, 18, 0)
+ GST_SEEK_FLAG_INSTANT_RATE_CHANGE;
+#else
+ GST_SEEK_FLAG_FLUSH;
+#endif
+
+class QGstPipelinePrivate : public QObject
+{
+public:
+ int m_ref = 0;
+ guint m_tag = 0;
+ GstBus *m_bus = nullptr;
+ QTimer *m_intervalTimer = nullptr;
+ QMutex filterMutex;
+ QList<QGstreamerSyncMessageFilter*> syncFilters;
+ QList<QGstreamerBusMessageFilter*> busFilters;
+ bool inStoppedState = true;
+ mutable std::chrono::nanoseconds m_position{};
+ double m_rate = 1.;
+ bool m_flushOnConfigChanges = false;
+ bool m_pendingFlush = false;
+
+ int m_configCounter = 0;
+ GstState m_savedState = GST_STATE_NULL;
+
+ explicit QGstPipelinePrivate(GstBus *bus, QObject *parent = nullptr);
+ ~QGstPipelinePrivate();
+
+ void ref() { ++ m_ref; }
+ void deref() { if (!--m_ref) delete this; }
+
+ void installMessageFilter(QGstreamerSyncMessageFilter *filter);
+ void removeMessageFilter(QGstreamerSyncMessageFilter *filter);
+ void installMessageFilter(QGstreamerBusMessageFilter *filter);
+ void removeMessageFilter(QGstreamerBusMessageFilter *filter);
+
+ void processMessage(const QGstreamerMessage &msg)
+ {
+ for (QGstreamerBusMessageFilter *filter : std::as_const(busFilters)) {
+ if (filter->processBusMessage(msg))
+ break;
+ }
+ }
+
+private:
+ static GstBusSyncReply syncGstBusFilter(GstBus *bus, GstMessage *message,
+ QGstPipelinePrivate *d)
+ {
+ if (!message)
+ return GST_BUS_PASS;
+
+ Q_UNUSED(bus);
+ QMutexLocker lock(&d->filterMutex);
+
+ for (QGstreamerSyncMessageFilter *filter : std::as_const(d->syncFilters)) {
+ if (filter->processSyncMessage(
+ QGstreamerMessage{ message, QGstreamerMessage::NeedsRef })) {
+ gst_message_unref(message);
+ return GST_BUS_DROP;
+ }
+ }
+
+ return GST_BUS_PASS;
+ }
+
+ void processMessage(GstMessage *message)
+ {
+ if (!message)
+ return;
+
+ QGstreamerMessage msg{
+ message,
+ QGstreamerMessage::NeedsRef,
+ };
+
+ processMessage(msg);
+ }
+
+ static gboolean busCallback(GstBus *, GstMessage *message, gpointer data)
+ {
+ static_cast<QGstPipelinePrivate *>(data)->processMessage(message);
+ return TRUE;
+ }
+};
+
+QGstPipelinePrivate::QGstPipelinePrivate(GstBus* bus, QObject* parent)
+ : QObject(parent),
+ m_bus(bus)
+{
+ // glib event loop can be disabled either by env variable or QT_NO_GLIB define, so check the dispacher
+ QAbstractEventDispatcher *dispatcher = QCoreApplication::eventDispatcher();
+ const bool hasGlib = dispatcher && dispatcher->inherits("QEventDispatcherGlib");
+ if (!hasGlib) {
+ m_intervalTimer = new QTimer(this);
+ m_intervalTimer->setInterval(250);
+ QObject::connect(m_intervalTimer, &QTimer::timeout, this, [this] {
+ GstMessage *message;
+ while ((message = gst_bus_poll(m_bus, GST_MESSAGE_ANY, 0)) != nullptr) {
+ processMessage(message);
+ gst_message_unref(message);
+ }
+ });
+ m_intervalTimer->start();
+ } else {
+ m_tag = gst_bus_add_watch_full(bus, G_PRIORITY_DEFAULT, busCallback, this, nullptr);
+ }
+
+ gst_bus_set_sync_handler(bus, (GstBusSyncHandler)syncGstBusFilter, this, nullptr);
+}
+
+QGstPipelinePrivate::~QGstPipelinePrivate()
+{
+ delete m_intervalTimer;
+
+ if (m_tag)
+ gst_bus_remove_watch(m_bus);
+
+ gst_bus_set_sync_handler(m_bus, nullptr, nullptr, nullptr);
+ gst_object_unref(GST_OBJECT(m_bus));
+}
+
+void QGstPipelinePrivate::installMessageFilter(QGstreamerSyncMessageFilter *filter)
+{
+ if (filter) {
+ QMutexLocker lock(&filterMutex);
+ if (!syncFilters.contains(filter))
+ syncFilters.append(filter);
+ }
+}
+
+void QGstPipelinePrivate::removeMessageFilter(QGstreamerSyncMessageFilter *filter)
+{
+ if (filter) {
+ QMutexLocker lock(&filterMutex);
+ syncFilters.removeAll(filter);
+ }
+}
+
+void QGstPipelinePrivate::installMessageFilter(QGstreamerBusMessageFilter *filter)
+{
+ if (filter && !busFilters.contains(filter))
+ busFilters.append(filter);
+}
+
+void QGstPipelinePrivate::removeMessageFilter(QGstreamerBusMessageFilter *filter)
+{
+ if (filter)
+ busFilters.removeAll(filter);
+}
+
+QGstPipeline QGstPipeline::create(const char *name)
+{
+ GstPipeline *pipeline = qGstCheckedCast<GstPipeline>(gst_pipeline_new(name));
+ return adopt(pipeline);
+}
+
+QGstPipeline QGstPipeline::adopt(GstPipeline *pipeline)
+{
+ QGstPipelinePrivate *d = new QGstPipelinePrivate(gst_pipeline_get_bus(pipeline));
+ g_object_set_data_full(qGstCheckedCast<GObject>(pipeline), "pipeline-private", d,
+ [](gpointer ptr) {
+ delete reinterpret_cast<QGstPipelinePrivate *>(ptr);
+ return;
+ });
+
+ return QGstPipeline{
+ pipeline,
+ QGstPipeline::NeedsRef,
+ };
+}
+
+QGstPipeline::QGstPipeline(GstPipeline *p, RefMode mode) : QGstBin(qGstCheckedCast<GstBin>(p), mode)
+{
+}
+
+QGstPipeline::~QGstPipeline() = default;
+
+bool QGstPipeline::inStoppedState() const
+{
+ QGstPipelinePrivate *d = getPrivate();
+ return d->inStoppedState;
+}
+
+void QGstPipeline::setInStoppedState(bool stopped)
+{
+ QGstPipelinePrivate *d = getPrivate();
+ d->inStoppedState = stopped;
+}
+
+void QGstPipeline::setFlushOnConfigChanges(bool flush)
+{
+ QGstPipelinePrivate *d = getPrivate();
+ d->m_flushOnConfigChanges = flush;
+}
+
+void QGstPipeline::installMessageFilter(QGstreamerSyncMessageFilter *filter)
+{
+ QGstPipelinePrivate *d = getPrivate();
+ d->installMessageFilter(filter);
+}
+
+void QGstPipeline::removeMessageFilter(QGstreamerSyncMessageFilter *filter)
+{
+ QGstPipelinePrivate *d = getPrivate();
+ d->removeMessageFilter(filter);
+}
+
+void QGstPipeline::installMessageFilter(QGstreamerBusMessageFilter *filter)
+{
+ QGstPipelinePrivate *d = getPrivate();
+ d->installMessageFilter(filter);
+}
+
+void QGstPipeline::removeMessageFilter(QGstreamerBusMessageFilter *filter)
+{
+ QGstPipelinePrivate *d = getPrivate();
+ d->removeMessageFilter(filter);
+}
+
+GstStateChangeReturn QGstPipeline::setState(GstState state)
+{
+ QGstPipelinePrivate *d = getPrivate();
+ auto retval = gst_element_set_state(element(), state);
+ if (d->m_pendingFlush) {
+ d->m_pendingFlush = false;
+ flush();
+ }
+ return retval;
+}
+
+void QGstPipeline::processMessages(GstMessageType types)
+{
+ QGstPipelinePrivate *d = getPrivate();
+ QGstreamerMessage message{
+ gst_bus_pop_filtered(d->m_bus, types),
+ QGstreamerMessage::HasRef,
+ };
+ d->processMessage(message);
+}
+
+void QGstPipeline::dumpGraph(const char *fileName)
+{
+ if (isNull())
+ return;
+
+ QGstBin{ bin(), QGstBin::NeedsRef }.dumpGraph(fileName);
+}
+
+void QGstPipeline::beginConfig()
+{
+ QGstPipelinePrivate *d = getPrivate();
+ Q_ASSERT(!isNull());
+
+ ++d->m_configCounter;
+ if (d->m_configCounter > 1)
+ return;
+
+ GstState state;
+ GstState pending;
+ GstStateChangeReturn stateChangeReturn = gst_element_get_state(element(), &state, &pending, 0);
+ switch (stateChangeReturn) {
+ case GST_STATE_CHANGE_ASYNC: {
+ if (state == GST_STATE_PLAYING) {
+ // playing->paused transition in progress. wait for it to finish
+ bool stateChangeSuccessful = this->finishStateChange();
+ if (!stateChangeSuccessful)
+ qWarning() << "QGstPipeline::beginConfig: timeout when waiting for state change";
+ }
+
+ state = pending;
+ break;
+ }
+ case GST_STATE_CHANGE_FAILURE: {
+ qDebug() << "QGstPipeline::beginConfig: state change failure";
+ dumpGraph("beginConfigFailure");
+ break;
+ }
+
+ case GST_STATE_CHANGE_NO_PREROLL:
+ case GST_STATE_CHANGE_SUCCESS:
+ break;
+ }
+
+ d->m_savedState = state;
+ if (d->m_savedState == GST_STATE_PLAYING)
+ setStateSync(GST_STATE_PAUSED);
+}
+
+void QGstPipeline::endConfig()
+{
+ QGstPipelinePrivate *d = getPrivate();
+ Q_ASSERT(!isNull());
+
+ --d->m_configCounter;
+ if (d->m_configCounter)
+ return;
+
+ if (d->m_flushOnConfigChanges)
+ d->m_pendingFlush = true;
+ if (d->m_savedState == GST_STATE_PLAYING)
+ setState(GST_STATE_PLAYING);
+ d->m_savedState = GST_STATE_NULL;
+}
+
+void QGstPipeline::flush()
+{
+ seek(position());
+}
+
+void QGstPipeline::seek(std::chrono::nanoseconds pos, double rate)
+{
+ using namespace std::chrono_literals;
+
+ QGstPipelinePrivate *d = getPrivate();
+ // always adjust the rate, so it can be set before playback starts
+ // setting position needs a loaded media file that's seekable
+
+ bool success = (rate > 0)
+ ? gst_element_seek(element(), d->m_rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
+ GST_SEEK_TYPE_SET, pos.count(), GST_SEEK_TYPE_END, 0)
+ : gst_element_seek(element(), d->m_rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
+ GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, pos.count());
+
+ if (!success) {
+ qDebug() << "seek: gst_element_seek failed" << pos;
+ return;
+ }
+
+ d->m_position = pos;
+}
+
+void QGstPipeline::seek(std::chrono::nanoseconds pos)
+{
+ seek(pos, getPrivate()->m_rate);
+}
+
+void QGstPipeline::setPlaybackRate(double rate)
+{
+ QGstPipelinePrivate *d = getPrivate();
+ if (rate == d->m_rate)
+ return;
+
+ d->m_rate = rate;
+
+ applyPlaybackRate(/*instantRateChange =*/true);
+}
+
+double QGstPipeline::playbackRate() const
+{
+ QGstPipelinePrivate *d = getPrivate();
+ return d->m_rate;
+}
+
+void QGstPipeline::applyPlaybackRate(bool instantRateChange)
+{
+ QGstPipelinePrivate *d = getPrivate();
+
+ bool success = gst_element_seek(element(), d->m_rate, GST_FORMAT_UNDEFINED,
+ instantRateChange ? rateChangeSeekFlags : GST_SEEK_FLAG_FLUSH,
+ GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE, GST_SEEK_TYPE_NONE,
+ GST_CLOCK_TIME_NONE);
+ if (!success)
+ qDebug() << "setPlaybackRate: gst_element_seek failed";
+}
+
+void QGstPipeline::setPosition(std::chrono::nanoseconds pos)
+{
+ seek(pos);
+}
+
+std::chrono::nanoseconds QGstPipeline::position() const
+{
+ QGstPipelinePrivate *d = getPrivate();
+ std::optional<std::chrono::nanoseconds> pos = QGstElement::position();
+ if (pos)
+ d->m_position = *pos;
+ else
+ qDebug() << "QGstPipeline: failed to query position, using previous position";
+
+ return d->m_position;
+}
+
+std::chrono::milliseconds QGstPipeline::positionInMs() const
+{
+ using namespace std::chrono;
+ return round<milliseconds>(position());
+}
+
+QGstPipelinePrivate *QGstPipeline::getPrivate() const
+{
+ gpointer p = g_object_get_data(qGstCheckedCast<GObject>(object()), "pipeline-private");
+ auto *d = reinterpret_cast<QGstPipelinePrivate *>(p);
+ Q_ASSERT(d);
+ return d;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstpipeline_p.h b/src/plugins/multimedia/gstreamer/common/qgstpipeline_p.h
new file mode 100644
index 000000000..ef08bfaaa
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstpipeline_p.h
@@ -0,0 +1,119 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef qgstpipeline_p_H
+#define qgstpipeline_p_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/private/qtmultimediaglobal_p.h>
+#include <QtCore/qobject.h>
+
+#include "qgst_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QGstreamerMessage;
+
+class QGstreamerSyncMessageFilter {
+public:
+ //returns true if message was processed and should be dropped, false otherwise
+ virtual bool processSyncMessage(const QGstreamerMessage &message) = 0;
+};
+
+
+class QGstreamerBusMessageFilter {
+public:
+ //returns true if message was processed and should be dropped, false otherwise
+ virtual bool processBusMessage(const QGstreamerMessage &message) = 0;
+};
+
+class QGstPipelinePrivate;
+
+class QGstPipeline : public QGstBin
+{
+public:
+ constexpr QGstPipeline() = default;
+ QGstPipeline(const QGstPipeline &) = default;
+ QGstPipeline(QGstPipeline &&) = default;
+ QGstPipeline &operator=(const QGstPipeline &) = default;
+ QGstPipeline &operator=(QGstPipeline &&) noexcept = default;
+ QGstPipeline(GstPipeline *, RefMode mode);
+ ~QGstPipeline();
+
+ // installs QGstPipelinePrivate as "pipeline-private" gobject property
+ static QGstPipeline create(const char *name);
+ static QGstPipeline adopt(GstPipeline *);
+
+ // This is needed to help us avoid sending QVideoFrames or audio buffers to the
+ // application while we're prerolling the pipeline.
+ // QMediaPlayer is still in a stopped state, while we put the gstreamer pipeline into a
+ // Paused state so that we can get the required metadata of the stream and also have a fast
+ // transition to play.
+ bool inStoppedState() const;
+ void setInStoppedState(bool stopped);
+
+ void setFlushOnConfigChanges(bool flush);
+
+ void installMessageFilter(QGstreamerSyncMessageFilter *filter);
+ void removeMessageFilter(QGstreamerSyncMessageFilter *filter);
+ void installMessageFilter(QGstreamerBusMessageFilter *filter);
+ void removeMessageFilter(QGstreamerBusMessageFilter *filter);
+
+ GstStateChangeReturn setState(GstState state);
+
+ GstPipeline *pipeline() const { return GST_PIPELINE_CAST(get()); }
+
+ void processMessages(GstMessageType = GST_MESSAGE_ANY);
+
+ void dumpGraph(const char *fileName);
+
+ template <typename Functor>
+ void modifyPipelineWhileNotRunning(Functor &&fn)
+ {
+ beginConfig();
+ fn();
+ endConfig();
+ }
+
+ template <typename Functor>
+ static void modifyPipelineWhileNotRunning(QGstPipeline &&pipeline, Functor &&fn)
+ {
+ if (pipeline)
+ pipeline.modifyPipelineWhileNotRunning(fn);
+ else
+ fn();
+ }
+
+ void flush();
+
+ void setPlaybackRate(double rate);
+ double playbackRate() const;
+ void applyPlaybackRate(bool instantRateChange);
+
+ void setPosition(std::chrono::nanoseconds pos);
+ std::chrono::nanoseconds position() const;
+ std::chrono::milliseconds positionInMs() const;
+
+private:
+ void seek(std::chrono::nanoseconds pos, double rate);
+ void seek(std::chrono::nanoseconds pos);
+
+ QGstPipelinePrivate *getPrivate() const;
+
+ void beginConfig();
+ void endConfig();
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput.cpp b/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput.cpp
new file mode 100644
index 000000000..7c620da39
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput.cpp
@@ -0,0 +1,137 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtMultimedia/qaudiodevice.h>
+#include <QtMultimedia/qaudioinput.h>
+
+#include <QtCore/qloggingcategory.h>
+
+#include <audio/qgstreameraudiodevice_p.h>
+#include <common/qgstreameraudioinput_p.h>
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+
+#include <utility>
+
+static Q_LOGGING_CATEGORY(qLcMediaAudioInput, "qt.multimedia.audioInput")
+
+QT_BEGIN_NAMESPACE
+
+QMaybe<QPlatformAudioInput *> QGstreamerAudioInput::create(QAudioInput *parent)
+{
+ static const auto error = qGstErrorMessageIfElementsNotAvailable("autoaudiosrc", "volume");
+ if (error)
+ return *error;
+
+ return new QGstreamerAudioInput(parent);
+}
+
+QGstreamerAudioInput::QGstreamerAudioInput(QAudioInput *parent)
+ : QObject(parent),
+ QPlatformAudioInput(parent),
+ gstAudioInput(QGstBin::create("audioInput")),
+ audioSrc{
+ QGstElement::createFromFactory("autoaudiosrc", "autoaudiosrc"),
+ },
+ audioVolume{
+ QGstElement::createFromFactory("volume", "volume"),
+ }
+{
+ gstAudioInput.add(audioSrc, audioVolume);
+ qLinkGstElements(audioSrc, audioVolume);
+
+ gstAudioInput.addGhostPad(audioVolume, "src");
+}
+
+QGstElement QGstreamerAudioInput::createGstElement()
+{
+ const auto *customDeviceInfo =
+ dynamic_cast<const QGStreamerCustomAudioDeviceInfo *>(m_audioDevice.handle());
+
+ if (customDeviceInfo) {
+ qCDebug(qLcMediaAudioInput)
+ << "requesting custom audio src element: " << customDeviceInfo->id;
+
+ QGstElement element = QGstBin::createFromPipelineDescription(customDeviceInfo->id,
+ /*name=*/nullptr,
+ /*ghostUnlinkedPads=*/true);
+ if (element)
+ return element;
+
+ qCWarning(qLcMediaAudioInput)
+ << "Cannot create audio source element:" << customDeviceInfo->id;
+ }
+
+ const QByteArray &id = m_audioDevice.id();
+ if constexpr (QT_CONFIG(pulseaudio)) {
+ QGstElement newSrc = QGstElement::createFromFactory("pulsesrc", "audiosrc");
+ if (newSrc) {
+ newSrc.set("device", id.constData());
+ return newSrc;
+ } else {
+ qWarning() << "Cannot create pulsesrc";
+ }
+ } else if constexpr (QT_CONFIG(alsa)) {
+ QGstElement newSrc = QGstElement::createFromFactory("alsasrc", "audiosrc");
+ if (newSrc) {
+ newSrc.set("device", id.constData());
+ return newSrc;
+ } else {
+ qWarning() << "Cannot create alsasrc";
+ }
+ } else {
+ auto *deviceInfo = dynamic_cast<const QGStreamerAudioDeviceInfo *>(m_audioDevice.handle());
+ if (deviceInfo && deviceInfo->gstDevice) {
+ QGstElement element = QGstElement::createFromDevice(deviceInfo->gstDevice, "audiosrc");
+ if (element)
+ return element;
+ }
+ }
+ qCWarning(qLcMediaAudioInput) << "Invalid audio device";
+ qCWarning(qLcMediaAudioInput)
+ << "Failed to create a gst element for the audio device, using a default audio source";
+ return QGstElement::createFromFactory("autoaudiosrc", "audiosrc");
+}
+
+QGstreamerAudioInput::~QGstreamerAudioInput()
+{
+ gstAudioInput.setStateSync(GST_STATE_NULL);
+}
+
+void QGstreamerAudioInput::setVolume(float volume)
+{
+ audioVolume.set("volume", volume);
+}
+
+void QGstreamerAudioInput::setMuted(bool muted)
+{
+ audioVolume.set("mute", muted);
+}
+
+void QGstreamerAudioInput::setAudioDevice(const QAudioDevice &device)
+{
+ if (device == m_audioDevice)
+ return;
+ qCDebug(qLcMediaAudioInput) << "setAudioInput" << device.description() << device.isNull();
+ m_audioDevice = device;
+
+ QGstElement newSrc = createGstElement();
+
+ QGstPipeline::modifyPipelineWhileNotRunning(gstAudioInput.getPipeline(), [&] {
+ qUnlinkGstElements(audioSrc, audioVolume);
+ gstAudioInput.stopAndRemoveElements(audioSrc);
+ audioSrc = std::move(newSrc);
+ gstAudioInput.add(audioSrc);
+ qLinkGstElements(audioSrc, audioVolume);
+ audioSrc.syncStateWithParent();
+ });
+}
+
+QAudioDevice QGstreamerAudioInput::audioInput() const
+{
+ return m_audioDevice;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput_p.h b/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput_p.h
new file mode 100644
index 000000000..5ca0e1a49
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput_p.h
@@ -0,0 +1,62 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERAUDIOINPUT_P_H
+#define QGSTREAMERAUDIOINPUT_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qobject.h>
+#include <QtMultimedia/private/qmultimediautils_p.h>
+#include <QtMultimedia/private/qplatformaudioinput_p.h>
+#include <QtMultimedia/private/qtmultimediaglobal_p.h>
+#include <QtMultimedia/qaudiodevice.h>
+
+#include <common/qgst_p.h>
+#include <common/qgstpipeline_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAudioDevice;
+
+class QGstreamerAudioInput : public QObject, public QPlatformAudioInput
+{
+public:
+ static QMaybe<QPlatformAudioInput *> create(QAudioInput *parent);
+ ~QGstreamerAudioInput();
+
+ bool setAudioInput(const QAudioDevice &);
+ QAudioDevice audioInput() const;
+
+ void setAudioDevice(const QAudioDevice &) override;
+ void setVolume(float) override;
+ void setMuted(bool) override;
+
+ QGstElement gstElement() const { return gstAudioInput; }
+
+private:
+ explicit QGstreamerAudioInput(QAudioInput *parent);
+
+ QGstElement createGstElement();
+
+ QAudioDevice m_audioDevice;
+
+ // Gst elements
+ QGstBin gstAudioInput;
+
+ QGstElement audioSrc;
+ QGstElement audioVolume;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput.cpp b/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput.cpp
new file mode 100644
index 000000000..9cea7fb62
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput.cpp
@@ -0,0 +1,138 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <common/qgstreameraudiooutput_p.h>
+#include <audio/qgstreameraudiodevice_p.h>
+
+#include <QtMultimedia/qaudiodevice.h>
+#include <QtMultimedia/qaudiooutput.h>
+#include <QtCore/qloggingcategory.h>
+
+#include <utility>
+
+static Q_LOGGING_CATEGORY(qLcMediaAudioOutput, "qt.multimedia.audiooutput")
+
+QT_BEGIN_NAMESPACE
+
+QMaybe<QPlatformAudioOutput *> QGstreamerAudioOutput::create(QAudioOutput *parent)
+{
+ static const auto error = qGstErrorMessageIfElementsNotAvailable(
+ "audioconvert", "audioresample", "volume", "autoaudiosink");
+ if (error)
+ return *error;
+
+ return new QGstreamerAudioOutput(parent);
+}
+
+QGstreamerAudioOutput::QGstreamerAudioOutput(QAudioOutput *parent)
+ : QObject(parent),
+ QPlatformAudioOutput(parent),
+ gstAudioOutput(QGstBin::create("audioOutput")),
+ audioQueue{
+ QGstElement::createFromFactory("queue", "audioQueue"),
+ },
+ audioConvert{
+ QGstElement::createFromFactory("audioconvert", "audioConvert"),
+ },
+ audioResample{
+ QGstElement::createFromFactory("audioresample", "audioResample"),
+ },
+ audioVolume{
+ QGstElement::createFromFactory("volume", "volume"),
+ },
+ audioSink{
+ QGstElement::createFromFactory("autoaudiosink", "autoAudioSink"),
+ }
+{
+ gstAudioOutput.add(audioQueue, audioConvert, audioResample, audioVolume, audioSink);
+ qLinkGstElements(audioQueue, audioConvert, audioResample, audioVolume, audioSink);
+
+ gstAudioOutput.addGhostPad(audioQueue, "sink");
+}
+
+QGstElement QGstreamerAudioOutput::createGstElement()
+{
+ const auto *customDeviceInfo =
+ dynamic_cast<const QGStreamerCustomAudioDeviceInfo *>(m_audioOutput.handle());
+
+ if (customDeviceInfo) {
+ qCDebug(qLcMediaAudioOutput)
+ << "requesting custom audio sink element: " << customDeviceInfo->id;
+
+ QGstElement element =
+ QGstBin::createFromPipelineDescription(customDeviceInfo->id, /*name=*/nullptr,
+ /*ghostUnlinkedPads=*/true);
+ if (element)
+ return element;
+
+ qCWarning(qLcMediaAudioOutput)
+ << "Cannot create audio sink element:" << customDeviceInfo->id;
+ }
+
+ const QByteArray &id = m_audioOutput.id();
+ if constexpr (QT_CONFIG(pulseaudio)) {
+ QGstElement newSink = QGstElement::createFromFactory("pulsesink", "audiosink");
+ if (newSink) {
+ newSink.set("device", id.constData());
+ return newSink;
+ } else {
+ qWarning() << "Cannot create pulsesink";
+ }
+ } else if constexpr (QT_CONFIG(alsa)) {
+ QGstElement newSink = QGstElement::createFromFactory("alsasink", "audiosink");
+ if (newSink) {
+ newSink.set("device", id.constData());
+ return newSink;
+ } else {
+ qWarning() << "Cannot create alsasink";
+ }
+ } else {
+ auto *deviceInfo = dynamic_cast<const QGStreamerAudioDeviceInfo *>(m_audioOutput.handle());
+ if (deviceInfo && deviceInfo->gstDevice) {
+ QGstElement element = QGstElement::createFromDevice(deviceInfo->gstDevice, "audiosink");
+ if (element)
+ return element;
+ }
+ }
+ qCWarning(qLcMediaAudioOutput) << "Invalid audio device:" << m_audioOutput.id();
+ qCWarning(qLcMediaAudioOutput)
+ << "Failed to create a gst element for the audio device, using a default audio sink";
+ return QGstElement::createFromFactory("autoaudiosink", "audiosink");
+}
+
+QGstreamerAudioOutput::~QGstreamerAudioOutput()
+{
+ gstAudioOutput.setStateSync(GST_STATE_NULL);
+}
+
+void QGstreamerAudioOutput::setVolume(float volume)
+{
+ audioVolume.set("volume", volume);
+}
+
+void QGstreamerAudioOutput::setMuted(bool muted)
+{
+ audioVolume.set("mute", muted);
+}
+
+void QGstreamerAudioOutput::setAudioDevice(const QAudioDevice &info)
+{
+ if (info == m_audioOutput)
+ return;
+ qCDebug(qLcMediaAudioOutput) << "setAudioOutput" << info.description() << info.isNull();
+
+ m_audioOutput = info;
+
+ QGstElement newSink = createGstElement();
+
+ QGstPipeline::modifyPipelineWhileNotRunning(gstAudioOutput.getPipeline(), [&] {
+ qUnlinkGstElements(audioVolume, audioSink);
+ gstAudioOutput.stopAndRemoveElements(audioSink);
+ audioSink = std::move(newSink);
+ gstAudioOutput.add(audioSink);
+ audioSink.syncStateWithParent();
+ qLinkGstElements(audioVolume, audioSink);
+ });
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput_p.h b/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput_p.h
new file mode 100644
index 000000000..dea53e5c4
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput_p.h
@@ -0,0 +1,63 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERAUDIOOUTPUT_P_H
+#define QGSTREAMERAUDIOOUTPUT_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qobject.h>
+#include <QtMultimedia/private/qmultimediautils_p.h>
+#include <QtMultimedia/private/qplatformaudiooutput_p.h>
+#include <QtMultimedia/private/qtmultimediaglobal_p.h>
+#include <QtMultimedia/qaudiodevice.h>
+
+#include <common/qgst_p.h>
+#include <common/qgstpipeline_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAudioDevice;
+
+class QGstreamerAudioOutput : public QObject, public QPlatformAudioOutput
+{
+public:
+ static QMaybe<QPlatformAudioOutput *> create(QAudioOutput *parent);
+ ~QGstreamerAudioOutput();
+
+ void setAudioDevice(const QAudioDevice &) override;
+ void setVolume(float) override;
+ void setMuted(bool) override;
+
+ QGstElement gstElement() const { return gstAudioOutput; }
+
+private:
+ explicit QGstreamerAudioOutput(QAudioOutput *parent);
+
+ QGstElement createGstElement();
+
+ QAudioDevice m_audioOutput;
+
+ // Gst elements
+ QGstPipeline gstPipeline;
+ QGstBin gstAudioOutput;
+
+ QGstElement audioQueue;
+ QGstElement audioConvert;
+ QGstElement audioResample;
+ QGstElement audioVolume;
+ QGstElement audioSink;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe.cpp
new file mode 100644
index 000000000..9cba810db
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe.cpp
@@ -0,0 +1,88 @@
+// Copyright (C) 2016 Jolla Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <common/qgstreamerbufferprobe_p.h>
+
+#include <common/qgst_p.h>
+
+QT_BEGIN_NAMESPACE
+
+QGstreamerBufferProbe::QGstreamerBufferProbe(Flags flags)
+ : m_flags(flags)
+{
+}
+
+QGstreamerBufferProbe::~QGstreamerBufferProbe() = default;
+
+void QGstreamerBufferProbe::addProbeToPad(GstPad *pad, bool downstream)
+{
+ QGstCaps caps{
+ gst_pad_get_current_caps(pad),
+ QGstCaps::HasRef,
+ };
+
+ if (caps)
+ probeCaps(caps.caps());
+
+ if (m_flags & ProbeCaps) {
+ m_capsProbeId = gst_pad_add_probe(
+ pad,
+ downstream
+ ? GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM
+ : GST_PAD_PROBE_TYPE_EVENT_UPSTREAM,
+ capsProbe,
+ this,
+ nullptr);
+ }
+ if (m_flags & ProbeBuffers) {
+ m_bufferProbeId = gst_pad_add_probe(
+ pad, GST_PAD_PROBE_TYPE_BUFFER, bufferProbe, this, nullptr);
+ }
+}
+
+void QGstreamerBufferProbe::removeProbeFromPad(GstPad *pad)
+{
+ if (m_capsProbeId != -1) {
+ gst_pad_remove_probe(pad, m_capsProbeId);
+ m_capsProbeId = -1;
+ }
+ if (m_bufferProbeId != -1) {
+ gst_pad_remove_probe(pad, m_bufferProbeId);
+ m_bufferProbeId = -1;
+ }
+}
+
+void QGstreamerBufferProbe::probeCaps(GstCaps *)
+{
+}
+
+bool QGstreamerBufferProbe::probeBuffer(GstBuffer *)
+{
+ return true;
+}
+
+GstPadProbeReturn QGstreamerBufferProbe::capsProbe(GstPad *, GstPadProbeInfo *info, gpointer user_data)
+{
+ QGstreamerBufferProbe * const control = static_cast<QGstreamerBufferProbe *>(user_data);
+
+ if (GstEvent * const event = gst_pad_probe_info_get_event(info)) {
+ if (GST_EVENT_TYPE(event) == GST_EVENT_CAPS) {
+ GstCaps *caps;
+ gst_event_parse_caps(event, &caps);
+
+ control->probeCaps(caps);
+ }
+ }
+ return GST_PAD_PROBE_OK;
+}
+
+GstPadProbeReturn QGstreamerBufferProbe::bufferProbe(
+ GstPad *, GstPadProbeInfo *info, gpointer user_data)
+{
+ QGstreamerBufferProbe * const control = static_cast<QGstreamerBufferProbe *>(user_data);
+ if (GstBuffer * const buffer = gst_pad_probe_info_get_buffer(info))
+ return control->probeBuffer(buffer) ? GST_PAD_PROBE_OK : GST_PAD_PROBE_DROP;
+ return GST_PAD_PROBE_OK;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe_p.h
new file mode 100644
index 000000000..71996a0cc
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe_p.h
@@ -0,0 +1,56 @@
+// Copyright (C) 2016 Jolla Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERBUFFERPROBE_H
+#define QGSTREAMERBUFFERPROBE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qtmultimediaglobal_p.h>
+#include <gst/gst.h>
+
+#include <QtCore/qglobal.h>
+
+
+QT_BEGIN_NAMESPACE
+
+class QGstreamerBufferProbe
+{
+public:
+ enum Flags
+ {
+ ProbeCaps = 0x01,
+ ProbeBuffers = 0x02,
+ ProbeAll = ProbeCaps | ProbeBuffers
+ };
+
+ explicit QGstreamerBufferProbe(Flags flags = ProbeAll);
+ virtual ~QGstreamerBufferProbe();
+
+ void addProbeToPad(GstPad *pad, bool downstream = true);
+ void removeProbeFromPad(GstPad *pad);
+
+protected:
+ virtual void probeCaps(GstCaps *caps);
+ virtual bool probeBuffer(GstBuffer *buffer);
+
+private:
+ static GstPadProbeReturn capsProbe(GstPad *pad, GstPadProbeInfo *info, gpointer user_data);
+ static GstPadProbeReturn bufferProbe(GstPad *pad, GstPadProbeInfo *info, gpointer user_data);
+ int m_capsProbeId = -1;
+ int m_bufferProbeId = -1;
+ const Flags m_flags;
+};
+
+QT_END_NAMESPACE
+
+#endif // QGSTREAMERBUFFERPROBE_H
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer.cpp
new file mode 100644
index 000000000..ce5efb648
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer.cpp
@@ -0,0 +1,1114 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <common/qgstreamermediaplayer_p.h>
+
+#include <audio/qgstreameraudiodevice_p.h>
+#include <common/qgst_debug_p.h>
+#include <common/qgstappsource_p.h>
+#include <common/qgstpipeline_p.h>
+#include <common/qgstreameraudiooutput_p.h>
+#include <common/qgstreamermessage_p.h>
+#include <common/qgstreamermetadata_p.h>
+#include <common/qgstreamervideooutput_p.h>
+#include <common/qgstreamervideosink_p.h>
+#include <qgstreamerformatinfo_p.h>
+
+#include <QtMultimedia/qaudiodevice.h>
+#include <QtCore/qdir.h>
+#include <QtCore/qsocketnotifier.h>
+#include <QtCore/qurl.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qloggingcategory.h>
+#include <QtCore/private/quniquehandle_p.h>
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+
+#if QT_CONFIG(gstreamer_gl)
+# include <gst/gl/gl.h>
+#endif
+
+static Q_LOGGING_CATEGORY(qLcMediaPlayer, "qt.multimedia.player")
+
+QT_BEGIN_NAMESPACE
+
+QGstreamerMediaPlayer::TrackSelector::TrackSelector(TrackType type, QGstElement selector)
+ : selector(selector), type(type)
+{
+ selector.set("sync-streams", true);
+ selector.set("sync-mode", 1 /*clock*/);
+
+ if (type == SubtitleStream)
+ selector.set("cache-buffers", true);
+}
+
+QGstPad QGstreamerMediaPlayer::TrackSelector::createInputPad()
+{
+ auto pad = selector.getRequestPad("sink_%u");
+ tracks.append(pad);
+ return pad;
+}
+
+void QGstreamerMediaPlayer::TrackSelector::removeAllInputPads()
+{
+ for (auto &pad : tracks)
+ selector.releaseRequestPad(pad);
+ tracks.clear();
+}
+
+void QGstreamerMediaPlayer::TrackSelector::removeInputPad(QGstPad pad)
+{
+ selector.releaseRequestPad(pad);
+ tracks.removeOne(pad);
+}
+
+QGstPad QGstreamerMediaPlayer::TrackSelector::inputPad(int index)
+{
+ if (index >= 0 && index < tracks.count())
+ return tracks[index];
+ return {};
+}
+
+QGstreamerMediaPlayer::TrackSelector &QGstreamerMediaPlayer::trackSelector(TrackType type)
+{
+ auto &ts = trackSelectors[type];
+ Q_ASSERT(ts.type == type);
+ return ts;
+}
+
+void QGstreamerMediaPlayer::updateBufferProgress(float newProgress)
+{
+ if (qFuzzyIsNull(newProgress - m_bufferProgress))
+ return;
+
+ m_bufferProgress = newProgress;
+ bufferProgressChanged(m_bufferProgress);
+}
+
+void QGstreamerMediaPlayer::disconnectDecoderHandlers()
+{
+ auto handlers = std::initializer_list<QGObjectHandlerScopedConnection *>{
+ &padAdded, &padRemoved, &sourceSetup, &uridecodebinElementAdded,
+ &unknownType, &elementAdded, &elementRemoved,
+ };
+ for (QGObjectHandlerScopedConnection *handler : handlers)
+ handler->disconnect();
+
+ decodeBinQueues = 0;
+}
+
+QMaybe<QPlatformMediaPlayer *> QGstreamerMediaPlayer::create(QMediaPlayer *parent)
+{
+ auto videoOutput = QGstreamerVideoOutput::create();
+ if (!videoOutput)
+ return videoOutput.error();
+
+ static const auto error =
+ qGstErrorMessageIfElementsNotAvailable("input-selector", "decodebin", "uridecodebin");
+ if (error)
+ return *error;
+
+ return new QGstreamerMediaPlayer(videoOutput.value(), parent);
+}
+
+QGstreamerMediaPlayer::QGstreamerMediaPlayer(QGstreamerVideoOutput *videoOutput,
+ QMediaPlayer *parent)
+ : QObject(parent),
+ QPlatformMediaPlayer(parent),
+ trackSelectors{ {
+ { VideoStream,
+ QGstElement::createFromFactory("input-selector", "videoInputSelector") },
+ { AudioStream,
+ QGstElement::createFromFactory("input-selector", "audioInputSelector") },
+ { SubtitleStream,
+ QGstElement::createFromFactory("input-selector", "subTitleInputSelector") },
+ } },
+ playerPipeline(QGstPipeline::create("playerPipeline")),
+ gstVideoOutput(videoOutput)
+{
+ playerPipeline.setFlushOnConfigChanges(true);
+
+ gstVideoOutput->setParent(this);
+ gstVideoOutput->setPipeline(playerPipeline);
+
+ for (auto &ts : trackSelectors)
+ playerPipeline.add(ts.selector);
+
+ playerPipeline.installMessageFilter(static_cast<QGstreamerBusMessageFilter *>(this));
+ playerPipeline.installMessageFilter(static_cast<QGstreamerSyncMessageFilter *>(this));
+
+ QGstClockHandle systemClock{
+ gst_system_clock_obtain(),
+ };
+
+ gst_pipeline_use_clock(playerPipeline.pipeline(), systemClock.get());
+
+ connect(&positionUpdateTimer, &QTimer::timeout, this, [this] {
+ updatePositionFromPipeline();
+ });
+}
+
+QGstreamerMediaPlayer::~QGstreamerMediaPlayer()
+{
+ playerPipeline.removeMessageFilter(static_cast<QGstreamerBusMessageFilter *>(this));
+ playerPipeline.removeMessageFilter(static_cast<QGstreamerSyncMessageFilter *>(this));
+ playerPipeline.setStateSync(GST_STATE_NULL);
+}
+
+std::chrono::nanoseconds QGstreamerMediaPlayer::pipelinePosition() const
+{
+ if (m_url.isEmpty())
+ return {};
+
+ Q_ASSERT(playerPipeline);
+ return playerPipeline.position();
+}
+
+void QGstreamerMediaPlayer::updatePositionFromPipeline()
+{
+ using namespace std::chrono;
+
+ positionChanged(round<milliseconds>(pipelinePosition()));
+}
+
+void QGstreamerMediaPlayer::updateDurationFromPipeline()
+{
+ std::optional<std::chrono::milliseconds> duration = playerPipeline.durationInMs();
+ if (!duration)
+ duration = std::chrono::milliseconds{ -1 };
+
+ if (duration != m_duration) {
+ qCDebug(qLcMediaPlayer) << "updateDurationFromPipeline" << *duration;
+ m_duration = *duration;
+ durationChanged(m_duration);
+ }
+}
+
+qint64 QGstreamerMediaPlayer::duration() const
+{
+ return m_duration.count();
+}
+
+float QGstreamerMediaPlayer::bufferProgress() const
+{
+ return m_bufferProgress;
+}
+
+QMediaTimeRange QGstreamerMediaPlayer::availablePlaybackRanges() const
+{
+ return QMediaTimeRange();
+}
+
+qreal QGstreamerMediaPlayer::playbackRate() const
+{
+ return playerPipeline.playbackRate();
+}
+
+void QGstreamerMediaPlayer::setPlaybackRate(qreal rate)
+{
+ if (rate == m_rate)
+ return;
+
+ m_rate = rate;
+
+ playerPipeline.setPlaybackRate(rate);
+ playbackRateChanged(rate);
+}
+
+void QGstreamerMediaPlayer::setPosition(qint64 pos)
+{
+ std::chrono::milliseconds posInMs{ pos };
+ setPosition(posInMs);
+}
+
+void QGstreamerMediaPlayer::setPosition(std::chrono::milliseconds pos)
+{
+ if (pos == playerPipeline.position())
+ return;
+ playerPipeline.finishStateChange();
+ playerPipeline.setPosition(pos);
+ qCDebug(qLcMediaPlayer) << Q_FUNC_INFO << pos << playerPipeline.positionInMs();
+ if (mediaStatus() == QMediaPlayer::EndOfMedia)
+ mediaStatusChanged(QMediaPlayer::LoadedMedia);
+ positionChanged(pos);
+}
+
+void QGstreamerMediaPlayer::play()
+{
+ if (state() == QMediaPlayer::PlayingState || m_url.isEmpty())
+ return;
+
+ if (state() != QMediaPlayer::PausedState)
+ resetCurrentLoop();
+
+ playerPipeline.setInStoppedState(false);
+ if (mediaStatus() == QMediaPlayer::EndOfMedia) {
+ playerPipeline.setPosition({});
+ positionChanged(0);
+ }
+
+ qCDebug(qLcMediaPlayer) << "play().";
+ int ret = playerPipeline.setState(GST_STATE_PLAYING);
+ if (m_requiresSeekOnPlay) {
+ // Flushing the pipeline is required to get track changes
+ // immediately, when they happen while paused.
+ playerPipeline.flush();
+ m_requiresSeekOnPlay = false;
+ } else {
+ // we get an assertion failure during instant playback rate changes
+ // https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/3545
+ constexpr bool performInstantRateChange = false;
+ playerPipeline.applyPlaybackRate(/*instantRateChange=*/performInstantRateChange);
+ }
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the playing state.";
+
+ positionUpdateTimer.start(100);
+ stateChanged(QMediaPlayer::PlayingState);
+}
+
+void QGstreamerMediaPlayer::pause()
+{
+ if (state() == QMediaPlayer::PausedState || m_url.isEmpty()
+ || m_resourceErrorState != ResourceErrorState::NoError)
+ return;
+
+ positionUpdateTimer.stop();
+ if (playerPipeline.inStoppedState()) {
+ playerPipeline.setInStoppedState(false);
+ playerPipeline.flush();
+ }
+ int ret = playerPipeline.setStateSync(GST_STATE_PAUSED);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the paused state.";
+ if (mediaStatus() == QMediaPlayer::EndOfMedia) {
+ playerPipeline.setPosition({});
+ positionChanged(0);
+ } else {
+ updatePositionFromPipeline();
+ }
+ stateChanged(QMediaPlayer::PausedState);
+
+ if (m_bufferProgress > 0 || !canTrackProgress())
+ mediaStatusChanged(QMediaPlayer::BufferedMedia);
+ else
+ mediaStatusChanged(QMediaPlayer::BufferingMedia);
+}
+
+void QGstreamerMediaPlayer::stop()
+{
+ using namespace std::chrono_literals;
+ if (state() == QMediaPlayer::StoppedState) {
+ if (position() != 0) {
+ playerPipeline.setPosition({});
+ positionChanged(0ms);
+ mediaStatusChanged(QMediaPlayer::LoadedMedia);
+ }
+ return;
+ }
+ stopOrEOS(false);
+}
+
+const QGstPipeline &QGstreamerMediaPlayer::pipeline() const
+{
+ return playerPipeline;
+}
+
+void QGstreamerMediaPlayer::stopOrEOS(bool eos)
+{
+ using namespace std::chrono_literals;
+
+ positionUpdateTimer.stop();
+ playerPipeline.setInStoppedState(true);
+ bool ret = playerPipeline.setStateSync(GST_STATE_PAUSED);
+ if (!ret)
+ qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the stopped state.";
+ if (!eos) {
+ playerPipeline.setPosition(0ms);
+ positionChanged(0ms);
+ }
+ stateChanged(QMediaPlayer::StoppedState);
+ if (eos)
+ mediaStatusChanged(QMediaPlayer::EndOfMedia);
+ else
+ mediaStatusChanged(QMediaPlayer::LoadedMedia);
+ m_initialBufferProgressSent = false;
+ bufferProgressChanged(0.f);
+}
+
+void QGstreamerMediaPlayer::detectPipelineIsSeekable()
+{
+ qCDebug(qLcMediaPlayer) << "detectPipelineIsSeekable";
+ QGstQueryHandle query{
+ gst_query_new_seeking(GST_FORMAT_TIME),
+ QGstQueryHandle::HasRef,
+ };
+ gboolean canSeek = false;
+ if (gst_element_query(playerPipeline.element(), query.get())) {
+ gst_query_parse_seeking(query.get(), nullptr, &canSeek, nullptr, nullptr);
+ qCDebug(qLcMediaPlayer) << " pipeline is seekable:" << canSeek;
+ } else {
+ qCWarning(qLcMediaPlayer) << " query for seekable failed.";
+ }
+ seekableChanged(canSeek);
+}
+
+bool QGstreamerMediaPlayer::processBusMessage(const QGstreamerMessage &message)
+{
+ qCDebug(qLcMediaPlayer) << "received bus message:" << message;
+
+ GstMessage* gm = message.message();
+ switch (message.type()) {
+ case GST_MESSAGE_TAG: {
+ // #### This isn't ideal. We shouldn't catch stream specific tags here, rather the global ones
+ QGstTagListHandle tagList;
+ gst_message_parse_tag(gm, &tagList);
+
+ qCDebug(qLcMediaPlayer) << " Got tags: " << tagList.get();
+
+ QMediaMetaData originalMetaData = m_metaData;
+ extendMetaDataFromTagList(m_metaData, tagList);
+ if (originalMetaData != m_metaData)
+ metaDataChanged();
+
+ if (gstVideoOutput) {
+ QVariant rotation = m_metaData.value(QMediaMetaData::Orientation);
+ gstVideoOutput->setRotation(rotation.value<QtVideo::Rotation>());
+ }
+ break;
+ }
+ case GST_MESSAGE_DURATION_CHANGED: {
+ if (!prerolling)
+ updateDurationFromPipeline();
+
+ return false;
+ }
+ case GST_MESSAGE_EOS: {
+ positionChanged(m_duration);
+ if (doLoop()) {
+ setPosition(0);
+ break;
+ }
+ stopOrEOS(true);
+ break;
+ }
+ case GST_MESSAGE_BUFFERING: {
+ int progress = 0;
+ gst_message_parse_buffering(gm, &progress);
+
+ qCDebug(qLcMediaPlayer) << " buffering message: " << progress;
+
+ if (state() != QMediaPlayer::StoppedState && !prerolling) {
+ if (!m_initialBufferProgressSent) {
+ mediaStatusChanged(QMediaPlayer::BufferingMedia);
+ m_initialBufferProgressSent = true;
+ }
+
+ if (m_bufferProgress > 0 && progress == 0)
+ mediaStatusChanged(QMediaPlayer::StalledMedia);
+ else if (progress >= 50)
+ // QTBUG-124517: rethink buffering
+ mediaStatusChanged(QMediaPlayer::BufferedMedia);
+ else
+ mediaStatusChanged(QMediaPlayer::BufferingMedia);
+ }
+
+ updateBufferProgress(progress * 0.01);
+ break;
+ }
+ case GST_MESSAGE_STATE_CHANGED: {
+ if (message.source() != playerPipeline)
+ return false;
+
+ GstState oldState;
+ GstState newState;
+ GstState pending;
+
+ gst_message_parse_state_changed(gm, &oldState, &newState, &pending);
+ qCDebug(qLcMediaPlayer) << " state changed message from"
+ << QCompactGstMessageAdaptor(message);
+
+ switch (newState) {
+ case GST_STATE_VOID_PENDING:
+ case GST_STATE_NULL:
+ case GST_STATE_READY:
+ break;
+ case GST_STATE_PAUSED: {
+ if (prerolling) {
+ qCDebug(qLcMediaPlayer) << "Preroll done, setting status to Loaded";
+ playerPipeline.dumpGraph("playerPipelinePrerollDone");
+
+ prerolling = false;
+ updateDurationFromPipeline();
+
+ m_metaData.insert(QMediaMetaData::Duration, duration());
+ m_metaData.insert(QMediaMetaData::Url, m_url);
+ parseStreamsAndMetadata();
+ metaDataChanged();
+
+ tracksChanged();
+ mediaStatusChanged(QMediaPlayer::LoadedMedia);
+
+ if (!playerPipeline.inStoppedState()) {
+ Q_ASSERT(!m_initialBufferProgressSent);
+
+ bool immediatelySendBuffered = !canTrackProgress() || m_bufferProgress > 0;
+ mediaStatusChanged(QMediaPlayer::BufferingMedia);
+ m_initialBufferProgressSent = true;
+ if (immediatelySendBuffered)
+ mediaStatusChanged(QMediaPlayer::BufferedMedia);
+ }
+ }
+
+ break;
+ }
+ case GST_STATE_PLAYING: {
+ if (!m_initialBufferProgressSent) {
+ bool immediatelySendBuffered = !canTrackProgress() || m_bufferProgress > 0;
+ mediaStatusChanged(QMediaPlayer::BufferingMedia);
+ m_initialBufferProgressSent = true;
+ if (immediatelySendBuffered)
+ mediaStatusChanged(QMediaPlayer::BufferedMedia);
+ }
+ break;
+ }
+ }
+ break;
+ }
+ case GST_MESSAGE_ERROR: {
+ qCDebug(qLcMediaPlayer) << " error" << QCompactGstMessageAdaptor(message);
+
+ QUniqueGErrorHandle err;
+ QUniqueGStringHandle debug;
+ gst_message_parse_error(gm, &err, &debug);
+ GQuark errorDomain = err.get()->domain;
+ gint errorCode = err.get()->code;
+
+ if (errorDomain == GST_STREAM_ERROR) {
+ if (errorCode == GST_STREAM_ERROR_CODEC_NOT_FOUND)
+ error(QMediaPlayer::FormatError, tr("Cannot play stream of type: <unknown>"));
+ else {
+ error(QMediaPlayer::FormatError, QString::fromUtf8(err.get()->message));
+ }
+ } else if (errorDomain == GST_RESOURCE_ERROR) {
+ if (errorCode == GST_RESOURCE_ERROR_NOT_FOUND) {
+ if (m_resourceErrorState != ResourceErrorState::ErrorReported) {
+ // gstreamer seems to deliver multiple GST_RESOURCE_ERROR_NOT_FOUND events
+ error(QMediaPlayer::ResourceError, QString::fromUtf8(err.get()->message));
+ m_resourceErrorState = ResourceErrorState::ErrorReported;
+ m_url.clear();
+ }
+ } else {
+ error(QMediaPlayer::ResourceError, QString::fromUtf8(err.get()->message));
+ }
+ } else {
+ playerPipeline.dumpGraph("error");
+ }
+ mediaStatusChanged(QMediaPlayer::InvalidMedia);
+ break;
+ }
+
+ case GST_MESSAGE_WARNING:
+ qCWarning(qLcMediaPlayer) << "Warning:" << QCompactGstMessageAdaptor(message);
+ playerPipeline.dumpGraph("warning");
+ break;
+
+ case GST_MESSAGE_INFO:
+ if (qLcMediaPlayer().isDebugEnabled())
+ qCDebug(qLcMediaPlayer) << "Info:" << QCompactGstMessageAdaptor(message);
+ break;
+
+ case GST_MESSAGE_SEGMENT_START: {
+ qCDebug(qLcMediaPlayer) << " segment start message, updating position";
+ QGstStructureView structure(gst_message_get_structure(gm));
+ auto p = structure["position"].toInt64();
+ if (p) {
+ std::chrono::milliseconds position{
+ (*p) / 1000000,
+ };
+ positionChanged(position);
+ }
+ break;
+ }
+ case GST_MESSAGE_ELEMENT: {
+ QGstStructureView structure(gst_message_get_structure(gm));
+ auto type = structure.name();
+ if (type == "stream-topology")
+ topology = structure.clone();
+
+ break;
+ }
+
+ case GST_MESSAGE_ASYNC_DONE: {
+ detectPipelineIsSeekable();
+ break;
+ }
+
+ default:
+// qCDebug(qLcMediaPlayer) << " default message handler, doing nothing";
+
+ break;
+ }
+
+ return false;
+}
+
+bool QGstreamerMediaPlayer::processSyncMessage(const QGstreamerMessage &message)
+{
+#if QT_CONFIG(gstreamer_gl)
+ if (message.type() != GST_MESSAGE_NEED_CONTEXT)
+ return false;
+ const gchar *type = nullptr;
+ gst_message_parse_context_type (message.message(), &type);
+ if (strcmp(type, GST_GL_DISPLAY_CONTEXT_TYPE))
+ return false;
+ if (!gstVideoOutput || !gstVideoOutput->gstreamerVideoSink())
+ return false;
+ auto *context = gstVideoOutput->gstreamerVideoSink()->gstGlDisplayContext();
+ if (!context)
+ return false;
+ gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message.message())), context);
+ playerPipeline.dumpGraph("need_context");
+ return true;
+#else
+ Q_UNUSED(message);
+ return false;
+#endif
+}
+
+QUrl QGstreamerMediaPlayer::media() const
+{
+ return m_url;
+}
+
+const QIODevice *QGstreamerMediaPlayer::mediaStream() const
+{
+ return m_stream;
+}
+
+void QGstreamerMediaPlayer::decoderPadAdded(const QGstElement &src, const QGstPad &pad)
+{
+ if (src != decoder)
+ return;
+
+ auto caps = pad.currentCaps();
+ auto type = caps.at(0).name();
+ qCDebug(qLcMediaPlayer) << "Received new pad" << pad.name() << "from" << src.name() << "type" << type;
+ qCDebug(qLcMediaPlayer) << " " << caps;
+
+ TrackType streamType = NTrackTypes;
+ if (type.startsWith("video/x-raw")) {
+ streamType = VideoStream;
+ } else if (type.startsWith("audio/x-raw")) {
+ streamType = AudioStream;
+ } else if (type.startsWith("text/")) {
+ streamType = SubtitleStream;
+ } else {
+ qCWarning(qLcMediaPlayer) << "Ignoring unknown media stream:" << pad.name() << type;
+ return;
+ }
+
+ auto &ts = trackSelector(streamType);
+ QGstPad sinkPad = ts.createInputPad();
+ if (!pad.link(sinkPad)) {
+ qCWarning(qLcMediaPlayer) << "Failed to add track, cannot link pads";
+ return;
+ }
+ qCDebug(qLcMediaPlayer) << "Adding track";
+
+ if (ts.trackCount() == 1) {
+ if (streamType == VideoStream) {
+ connectOutput(ts);
+ ts.setActiveInputPad(sinkPad);
+ videoAvailableChanged(true);
+ }
+ else if (streamType == AudioStream) {
+ connectOutput(ts);
+ ts.setActiveInputPad(sinkPad);
+ audioAvailableChanged(true);
+ }
+ }
+
+ if (!prerolling)
+ tracksChanged();
+
+ decoderOutputMap.emplace(pad, sinkPad);
+}
+
+void QGstreamerMediaPlayer::decoderPadRemoved(const QGstElement &src, const QGstPad &pad)
+{
+ if (src != decoder)
+ return;
+
+ qCDebug(qLcMediaPlayer) << "Removed pad" << pad.name() << "from" << src.name();
+
+ auto it = decoderOutputMap.find(pad);
+ if (it == decoderOutputMap.end())
+ return;
+ QGstPad track = it->second;
+
+ auto ts = std::find_if(std::begin(trackSelectors), std::end(trackSelectors),
+ [&](TrackSelector &ts){ return ts.selector == track.parent(); });
+ if (ts == std::end(trackSelectors))
+ return;
+
+ qCDebug(qLcMediaPlayer) << " was linked to pad" << track.name() << "from" << ts->selector.name();
+ ts->removeInputPad(track);
+
+ if (ts->trackCount() == 0) {
+ removeOutput(*ts);
+ if (ts->type == AudioStream)
+ audioAvailableChanged(false);
+ else if (ts->type == VideoStream)
+ videoAvailableChanged(false);
+ }
+
+ if (!prerolling)
+ tracksChanged();
+}
+
+void QGstreamerMediaPlayer::removeAllOutputs()
+{
+ for (auto &ts : trackSelectors) {
+ removeOutput(ts);
+ ts.removeAllInputPads();
+ }
+ audioAvailableChanged(false);
+ videoAvailableChanged(false);
+}
+
+void QGstreamerMediaPlayer::connectOutput(TrackSelector &ts)
+{
+ if (ts.isConnected)
+ return;
+
+ QGstElement e;
+ switch (ts.type) {
+ case AudioStream:
+ e = gstAudioOutput ? gstAudioOutput->gstElement() : QGstElement{};
+ break;
+ case VideoStream:
+ e = gstVideoOutput ? gstVideoOutput->gstElement() : QGstElement{};
+ break;
+ case SubtitleStream:
+ if (gstVideoOutput)
+ gstVideoOutput->linkSubtitleStream(ts.selector);
+ break;
+ default:
+ return;
+ }
+
+ if (!e.isNull()) {
+ qCDebug(qLcMediaPlayer) << "connecting output for track type" << ts.type;
+ playerPipeline.add(e);
+ qLinkGstElements(ts.selector, e);
+ e.syncStateWithParent();
+ }
+
+ ts.isConnected = true;
+}
+
+void QGstreamerMediaPlayer::removeOutput(TrackSelector &ts)
+{
+ if (!ts.isConnected)
+ return;
+
+ QGstElement e;
+ switch (ts.type) {
+ case AudioStream:
+ e = gstAudioOutput ? gstAudioOutput->gstElement() : QGstElement{};
+ break;
+ case VideoStream:
+ e = gstVideoOutput ? gstVideoOutput->gstElement() : QGstElement{};
+ break;
+ case SubtitleStream:
+ if (gstVideoOutput)
+ gstVideoOutput->unlinkSubtitleStream();
+ break;
+ default:
+ break;
+ }
+
+ if (!e.isNull()) {
+ qCDebug(qLcMediaPlayer) << "removing output for track type" << ts.type;
+ playerPipeline.stopAndRemoveElements(e);
+ }
+
+ ts.isConnected = false;
+}
+
+void QGstreamerMediaPlayer::removeDynamicPipelineElements()
+{
+ for (QGstElement *element : { &src, &decoder }) {
+ if (element->isNull())
+ continue;
+
+ element->setStateSync(GstState::GST_STATE_NULL);
+ playerPipeline.remove(*element);
+ *element = QGstElement{};
+ }
+}
+
+void QGstreamerMediaPlayer::uridecodebinElementAddedCallback(GstElement * /*uridecodebin*/,
+ GstElement *child,
+ QGstreamerMediaPlayer *)
+{
+ QGstElement c(child, QGstElement::NeedsRef);
+ qCDebug(qLcMediaPlayer) << "New element added to uridecodebin:" << c.name();
+
+ static const GType decodeBinType = [] {
+ QGstElementFactoryHandle factory = QGstElement::findFactory("decodebin");
+ return gst_element_factory_get_element_type(factory.get());
+ }();
+
+ if (c.type() == decodeBinType) {
+ qCDebug(qLcMediaPlayer) << " -> setting post-stream-topology property";
+ c.set("post-stream-topology", true);
+ }
+}
+
+void QGstreamerMediaPlayer::sourceSetupCallback(GstElement *uridecodebin, GstElement *source, QGstreamerMediaPlayer *that)
+{
+ Q_UNUSED(uridecodebin)
+ Q_UNUSED(that)
+
+ qCDebug(qLcMediaPlayer) << "Setting up source:" << g_type_name_from_instance((GTypeInstance*)source);
+
+ if (std::string_view("GstRTSPSrc") == g_type_name_from_instance((GTypeInstance *)source)) {
+ QGstElement s(source, QGstElement::NeedsRef);
+ int latency{40};
+ bool ok{false};
+ int v = qEnvironmentVariableIntValue("QT_MEDIA_RTSP_LATENCY", &ok);
+ if (ok)
+ latency = v;
+ qCDebug(qLcMediaPlayer) << " -> setting source latency to:" << latency << "ms";
+ s.set("latency", latency);
+
+ bool drop{true};
+ v = qEnvironmentVariableIntValue("QT_MEDIA_RTSP_DROP_ON_LATENCY", &ok);
+ if (ok && v == 0)
+ drop = false;
+ qCDebug(qLcMediaPlayer) << " -> setting drop-on-latency to:" << drop;
+ s.set("drop-on-latency", drop);
+
+ bool retrans{false};
+ v = qEnvironmentVariableIntValue("QT_MEDIA_RTSP_DO_RETRANSMISSION", &ok);
+ if (ok && v != 0)
+ retrans = true;
+ qCDebug(qLcMediaPlayer) << " -> setting do-retransmission to:" << retrans;
+ s.set("do-retransmission", retrans);
+ }
+}
+
+void QGstreamerMediaPlayer::unknownTypeCallback(GstElement *decodebin, GstPad *pad, GstCaps *caps,
+ QGstreamerMediaPlayer *self)
+{
+ Q_UNUSED(decodebin)
+ Q_UNUSED(pad)
+ Q_UNUSED(self)
+ qCDebug(qLcMediaPlayer) << "Unknown type:" << caps;
+
+ QMetaObject::invokeMethod(self, [self] {
+ self->stop();
+ });
+}
+
+static bool isQueue(const QGstElement &element)
+{
+ static const GType queueType = [] {
+ QGstElementFactoryHandle factory = QGstElement::findFactory("queue");
+ return gst_element_factory_get_element_type(factory.get());
+ }();
+
+ static const GType multiQueueType = [] {
+ QGstElementFactoryHandle factory = QGstElement::findFactory("multiqueue");
+ return gst_element_factory_get_element_type(factory.get());
+ }();
+
+ return element.type() == queueType || element.type() == multiQueueType;
+}
+
+void QGstreamerMediaPlayer::decodebinElementAddedCallback(GstBin * /*decodebin*/,
+ GstBin * /*sub_bin*/, GstElement *child,
+ QGstreamerMediaPlayer *self)
+{
+ QGstElement c(child, QGstElement::NeedsRef);
+ if (isQueue(c))
+ self->decodeBinQueues += 1;
+}
+
+void QGstreamerMediaPlayer::decodebinElementRemovedCallback(GstBin * /*decodebin*/,
+ GstBin * /*sub_bin*/, GstElement *child,
+ QGstreamerMediaPlayer *self)
+{
+ QGstElement c(child, QGstElement::NeedsRef);
+ if (isQueue(c))
+ self->decodeBinQueues -= 1;
+}
+
+void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
+{
+ using namespace std::chrono_literals;
+
+ qCDebug(qLcMediaPlayer) << Q_FUNC_INFO << "setting location to" << content;
+
+ prerolling = true;
+ m_resourceErrorState = ResourceErrorState::NoError;
+
+ bool ret = playerPipeline.setStateSync(GST_STATE_NULL);
+ if (!ret)
+ qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the stopped state.";
+
+ m_url = content;
+ m_stream = stream;
+
+ removeDynamicPipelineElements();
+ disconnectDecoderHandlers();
+ removeAllOutputs();
+ seekableChanged(false);
+ Q_ASSERT(playerPipeline.inStoppedState());
+
+ if (m_duration != 0ms) {
+ m_duration = 0ms;
+ durationChanged(0ms);
+ }
+ stateChanged(QMediaPlayer::StoppedState);
+ if (position() != 0)
+ positionChanged(0ms);
+ if (!m_metaData.isEmpty()) {
+ m_metaData.clear();
+ metaDataChanged();
+ }
+
+ if (content.isEmpty() && !stream)
+ mediaStatusChanged(QMediaPlayer::NoMedia);
+
+ if (content.isEmpty())
+ return;
+
+ if (m_stream) {
+ if (!m_appSrc) {
+ auto maybeAppSrc = QGstAppSource::create(this);
+ if (maybeAppSrc) {
+ m_appSrc = maybeAppSrc.value();
+ } else {
+ error(QMediaPlayer::ResourceError, maybeAppSrc.error());
+ return;
+ }
+ }
+ src = m_appSrc->element();
+ decoder = QGstElement::createFromFactory("decodebin", "decoder");
+ if (!decoder) {
+ error(QMediaPlayer::ResourceError, qGstErrorMessageCannotFindElement("decodebin"));
+ return;
+ }
+ decoder.set("post-stream-topology", true);
+ decoder.set("use-buffering", true);
+ unknownType = decoder.connect("unknown-type", GCallback(unknownTypeCallback), this);
+ elementAdded = decoder.connect("deep-element-added",
+ GCallback(decodebinElementAddedCallback), this);
+ elementRemoved = decoder.connect("deep-element-removed",
+ GCallback(decodebinElementAddedCallback), this);
+
+ playerPipeline.add(src, decoder);
+ qLinkGstElements(src, decoder);
+
+ m_appSrc->setup(m_stream);
+ seekableChanged(!stream->isSequential());
+ } else {
+ // use uridecodebin
+ decoder = QGstElement::createFromFactory("uridecodebin", "decoder");
+ if (!decoder) {
+ error(QMediaPlayer::ResourceError, qGstErrorMessageCannotFindElement("uridecodebin"));
+ return;
+ }
+ playerPipeline.add(decoder);
+
+ constexpr bool hasPostStreamTopology = GST_CHECK_VERSION(1, 22, 0);
+ if constexpr (hasPostStreamTopology) {
+ decoder.set("post-stream-topology", true);
+ } else {
+ // can't set post-stream-topology to true, as uridecodebin doesn't have the property.
+ // Use a hack
+ uridecodebinElementAdded = decoder.connect(
+ "element-added", GCallback(uridecodebinElementAddedCallback), this);
+ }
+
+ sourceSetup = decoder.connect("source-setup", GCallback(sourceSetupCallback), this);
+ unknownType = decoder.connect("unknown-type", GCallback(unknownTypeCallback), this);
+
+ decoder.set("uri", content.toEncoded().constData());
+ decoder.set("use-buffering", true);
+
+ constexpr int mb = 1024 * 1024;
+ decoder.set("ring-buffer-max-size", 2 * mb);
+
+ updateBufferProgress(0.f);
+
+ elementAdded = decoder.connect("deep-element-added",
+ GCallback(decodebinElementAddedCallback), this);
+ elementRemoved = decoder.connect("deep-element-removed",
+ GCallback(decodebinElementAddedCallback), this);
+ }
+ padAdded = decoder.onPadAdded<&QGstreamerMediaPlayer::decoderPadAdded>(this);
+ padRemoved = decoder.onPadRemoved<&QGstreamerMediaPlayer::decoderPadRemoved>(this);
+
+ mediaStatusChanged(QMediaPlayer::LoadingMedia);
+ if (!playerPipeline.setStateSync(GST_STATE_PAUSED)) {
+ qCWarning(qLcMediaPlayer) << "Unable to set the pipeline to the paused state.";
+ // Note: no further error handling: errors will be delivered via a GstMessage
+ return;
+ }
+
+ playerPipeline.setPosition(0ms);
+ positionChanged(0ms);
+}
+
+void QGstreamerMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
+{
+ if (gstAudioOutput == output)
+ return;
+
+ auto &ts = trackSelector(AudioStream);
+
+ playerPipeline.modifyPipelineWhileNotRunning([&] {
+ if (gstAudioOutput)
+ removeOutput(ts);
+
+ gstAudioOutput = static_cast<QGstreamerAudioOutput *>(output);
+ if (gstAudioOutput)
+ connectOutput(ts);
+ });
+}
+
+QMediaMetaData QGstreamerMediaPlayer::metaData() const
+{
+ return m_metaData;
+}
+
+void QGstreamerMediaPlayer::setVideoSink(QVideoSink *sink)
+{
+ gstVideoOutput->setVideoSink(sink);
+}
+
+static QGstStructureView endOfChain(const QGstStructureView &s)
+{
+ QGstStructureView e = s;
+ while (1) {
+ auto next = e["next"].toStructure();
+ if (!next.isNull())
+ e = next;
+ else
+ break;
+ }
+ return e;
+}
+
+void QGstreamerMediaPlayer::parseStreamsAndMetadata()
+{
+ qCDebug(qLcMediaPlayer) << "============== parse topology ============";
+
+ if (!topology) {
+ qCDebug(qLcMediaPlayer) << " null topology";
+ return;
+ }
+
+ QGstStructureView topologyView{ topology };
+
+ QGstCaps caps = topologyView.caps();
+ extendMetaDataFromCaps(m_metaData, caps);
+
+ QGstTagListHandle tagList = QGstStructureView{ topology }.tags();
+ if (tagList)
+ extendMetaDataFromTagList(m_metaData, tagList);
+
+ QGstStructureView demux = endOfChain(topologyView);
+ QGValue next = demux["next"];
+ if (!next.isList()) {
+ qCDebug(qLcMediaPlayer) << " no additional streams";
+ metaDataChanged();
+ return;
+ }
+
+ // collect stream info
+ int size = next.listSize();
+ for (int i = 0; i < size; ++i) {
+ auto val = next.at(i);
+ caps = val.toStructure().caps();
+
+ extendMetaDataFromCaps(m_metaData, caps);
+
+ QGstStructureView structure = caps.at(0);
+
+ if (structure.name().startsWith("video/")) {
+ QSize nativeSize = structure.nativeSize();
+ gstVideoOutput->setNativeSize(nativeSize);
+ }
+ }
+
+ auto sinkPad = trackSelector(VideoStream).activeInputPad();
+ if (sinkPad) {
+ QGstTagListHandle tagList = sinkPad.tags();
+ if (tagList)
+ qCDebug(qLcMediaPlayer) << " tags=" << tagList.get();
+ else
+ qCDebug(qLcMediaPlayer) << " tags=(null)";
+ }
+
+ qCDebug(qLcMediaPlayer) << "============== end parse topology ============";
+ playerPipeline.dumpGraph("playback");
+}
+
+int QGstreamerMediaPlayer::trackCount(QPlatformMediaPlayer::TrackType type)
+{
+ return trackSelector(type).trackCount();
+}
+
+QMediaMetaData QGstreamerMediaPlayer::trackMetaData(QPlatformMediaPlayer::TrackType type, int index)
+{
+ auto track = trackSelector(type).inputPad(index);
+ if (!track)
+ return {};
+
+ QGstTagListHandle tagList = track.tags();
+ return taglistToMetaData(tagList);
+}
+
+int QGstreamerMediaPlayer::activeTrack(TrackType type)
+{
+ return trackSelector(type).activeInputIndex();
+}
+
+void QGstreamerMediaPlayer::setActiveTrack(TrackType type, int index)
+{
+ auto &ts = trackSelector(type);
+ auto track = ts.inputPad(index);
+ if (track.isNull() && index != -1) {
+ qCWarning(qLcMediaPlayer) << "Attempt to set an incorrect index" << index
+ << "for the track type" << type;
+ return;
+ }
+
+ qCDebug(qLcMediaPlayer) << "Setting the index" << index << "for the track type" << type;
+ if (type == QPlatformMediaPlayer::SubtitleStream)
+ gstVideoOutput->flushSubtitles();
+
+ playerPipeline.modifyPipelineWhileNotRunning([&] {
+ if (track.isNull()) {
+ removeOutput(ts);
+ } else {
+ ts.setActiveInputPad(track);
+ connectOutput(ts);
+ }
+ });
+
+ // seek to force an immediate change of the stream
+ if (playerPipeline.state() == GST_STATE_PLAYING)
+ playerPipeline.flush();
+ else
+ m_requiresSeekOnPlay = true;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer_p.h
new file mode 100644
index 000000000..28e7a0c31
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer_p.h
@@ -0,0 +1,199 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERMEDIAPLAYER_P_H
+#define QGSTREAMERMEDIAPLAYER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qstack.h>
+#include <private/qplatformmediaplayer_p.h>
+#include <private/qtmultimediaglobal_p.h>
+#include <private/qmultimediautils_p.h>
+#include <qurl.h>
+#include <common/qgst_p.h>
+#include <common/qgstpipeline_p.h>
+
+#include <QtCore/qtimer.h>
+
+#include <array>
+
+QT_BEGIN_NAMESPACE
+
+class QNetworkAccessManager;
+class QGstreamerMessage;
+class QGstAppSource;
+class QGstreamerAudioOutput;
+class QGstreamerVideoOutput;
+
+class QGstreamerMediaPlayer : public QObject,
+ public QPlatformMediaPlayer,
+ public QGstreamerBusMessageFilter,
+ public QGstreamerSyncMessageFilter
+{
+public:
+ static QMaybe<QPlatformMediaPlayer *> create(QMediaPlayer *parent = nullptr);
+ ~QGstreamerMediaPlayer();
+
+ qint64 duration() const override;
+
+ float bufferProgress() const override;
+
+ QMediaTimeRange availablePlaybackRanges() const override;
+
+ qreal playbackRate() const override;
+ void setPlaybackRate(qreal rate) override;
+
+ QUrl media() const override;
+ const QIODevice *mediaStream() const override;
+ void setMedia(const QUrl &, QIODevice *) override;
+
+ bool streamPlaybackSupported() const override { return true; }
+
+ void setAudioOutput(QPlatformAudioOutput *output) override;
+
+ QMediaMetaData metaData() const override;
+
+ void setVideoSink(QVideoSink *sink) override;
+
+ int trackCount(TrackType) override;
+ QMediaMetaData trackMetaData(TrackType /*type*/, int /*streamNumber*/) override;
+ int activeTrack(TrackType) override;
+ void setActiveTrack(TrackType, int /*streamNumber*/) override;
+
+ void setPosition(qint64 pos) override;
+ void setPosition(std::chrono::milliseconds pos);
+
+ void play() override;
+ void pause() override;
+ void stop() override;
+
+ const QGstPipeline &pipeline() const;
+
+ bool processBusMessage(const QGstreamerMessage& message) override;
+ bool processSyncMessage(const QGstreamerMessage& message) override;
+
+private:
+ QGstreamerMediaPlayer(QGstreamerVideoOutput *videoOutput, QMediaPlayer *parent);
+
+ struct TrackSelector
+ {
+ TrackSelector(TrackType, QGstElement selector);
+ QGstPad createInputPad();
+ void removeInputPad(QGstPad pad);
+ void removeAllInputPads();
+ QGstPad inputPad(int index);
+ int activeInputIndex() const { return isConnected ? tracks.indexOf(activeInputPad()) : -1; }
+ QGstPad activeInputPad() const
+ {
+ return isConnected ? QGstPad{ selector.getObject("active-pad") } : QGstPad{};
+ }
+ void setActiveInputPad(QGstPad input) { selector.set("active-pad", input); }
+ int trackCount() const { return tracks.count(); }
+
+ QGstElement selector;
+ TrackType type;
+ QList<QGstPad> tracks;
+ bool isConnected = false;
+ };
+
+ friend class QGstreamerStreamsControl;
+ void decoderPadAdded(const QGstElement &src, const QGstPad &pad);
+ void decoderPadRemoved(const QGstElement &src, const QGstPad &pad);
+ void disconnectDecoderHandlers();
+ static void uridecodebinElementAddedCallback(GstElement *uridecodebin, GstElement *child,
+ QGstreamerMediaPlayer *that);
+ static void sourceSetupCallback(GstElement *uridecodebin, GstElement *source,
+ QGstreamerMediaPlayer *that);
+ static void unknownTypeCallback(GstElement *decodebin, GstPad *pad, GstCaps *caps,
+ QGstreamerMediaPlayer *self);
+ static void decodebinElementAddedCallback(GstBin *decodebin, GstBin *sub_bin,
+ GstElement *element, QGstreamerMediaPlayer *self);
+ static void decodebinElementRemovedCallback(GstBin *decodebin, GstBin *sub_bin,
+ GstElement *element, QGstreamerMediaPlayer *self);
+
+ void parseStreamsAndMetadata();
+ void connectOutput(TrackSelector &ts);
+ void removeOutput(TrackSelector &ts);
+ void removeDynamicPipelineElements();
+ void removeAllOutputs();
+ void stopOrEOS(bool eos);
+ bool canTrackProgress() const { return decodeBinQueues > 0; }
+ void detectPipelineIsSeekable();
+
+ std::chrono::nanoseconds pipelinePosition() const;
+ void updatePositionFromPipeline();
+ void updateDurationFromPipeline();
+ void updateBufferProgress(float);
+
+ std::array<TrackSelector, NTrackTypes> trackSelectors;
+ TrackSelector &trackSelector(TrackType type);
+
+ QMediaMetaData m_metaData;
+
+ QUrl m_url;
+ QIODevice *m_stream = nullptr;
+
+ enum class ResourceErrorState : uint8_t {
+ NoError,
+ ErrorOccurred,
+ ErrorReported,
+ };
+
+ bool prerolling = false;
+ bool m_requiresSeekOnPlay = false;
+ bool m_initialBufferProgressSent = false;
+ ResourceErrorState m_resourceErrorState = ResourceErrorState::NoError;
+ float m_rate = 1.f;
+ float m_bufferProgress = 0.f;
+ std::chrono::milliseconds m_duration{};
+ QTimer positionUpdateTimer;
+
+ QGstAppSource *m_appSrc = nullptr;
+
+ QUniqueGstStructureHandle topology;
+
+ // Gst elements
+ QGstPipeline playerPipeline;
+ QGstElement src;
+ QGstElement decoder;
+
+ QGstreamerAudioOutput *gstAudioOutput = nullptr;
+ QGstreamerVideoOutput *gstVideoOutput = nullptr;
+
+ // QGstElement streamSynchronizer;
+
+ struct QGstPadLess
+ {
+ bool operator()(const QGstPad &lhs, const QGstPad &rhs) const
+ {
+ return lhs.pad() < rhs.pad();
+ }
+ };
+
+ std::map<QGstPad, QGstPad, QGstPadLess> decoderOutputMap;
+
+ // decoder connections
+ QGObjectHandlerScopedConnection padAdded;
+ QGObjectHandlerScopedConnection padRemoved;
+ QGObjectHandlerScopedConnection sourceSetup;
+ QGObjectHandlerScopedConnection uridecodebinElementAdded;
+ QGObjectHandlerScopedConnection unknownType;
+ QGObjectHandlerScopedConnection elementAdded;
+ QGObjectHandlerScopedConnection elementRemoved;
+
+ int decodeBinQueues = 0;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermessage_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamermessage_p.h
new file mode 100644
index 000000000..9836bd0cb
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamermessage_p.h
@@ -0,0 +1,55 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERMESSAGE_P_H
+#define QGSTREAMERMESSAGE_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qtmultimediaglobal_p.h>
+#include <common/qgst_p.h>
+
+QT_BEGIN_NAMESPACE
+
+// Required for QDoc workaround
+class QString;
+
+template <>
+struct QGstPointerImpl::QGstRefcountingAdaptor<GstMessage>
+{
+ static void ref(GstMessage *arg) noexcept { gst_message_ref(arg); }
+ static void unref(GstMessage *arg) noexcept { gst_message_unref(arg); }
+};
+
+class QGstreamerMessage : public QGstPointerImpl::QGstObjectWrapper<GstMessage>
+{
+ using BaseClass = QGstPointerImpl::QGstObjectWrapper<GstMessage>;
+
+public:
+ using BaseClass::BaseClass;
+ QGstreamerMessage(const QGstreamerMessage &) = default;
+ QGstreamerMessage(QGstreamerMessage &&) noexcept = default;
+ QGstreamerMessage &operator=(const QGstreamerMessage &) = default;
+ QGstreamerMessage &operator=(QGstreamerMessage &&) noexcept = default;
+
+ GstMessageType type() const { return GST_MESSAGE_TYPE(get()); }
+ QGstObject source() const { return QGstObject(GST_MESSAGE_SRC(get()), QGstObject::NeedsRef); }
+ QGstStructureView structure() const { return QGstStructureView(gst_message_get_structure(get())); }
+
+ GstMessage *message() const { return get(); }
+};
+
+QT_END_NAMESPACE
+
+Q_DECLARE_METATYPE(QGstreamerMessage);
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermetadata.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamermetadata.cpp
new file mode 100644
index 000000000..9aa9406b9
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamermetadata.cpp
@@ -0,0 +1,489 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qgstreamermetadata_p.h"
+#include <QtMultimedia/qmediametadata.h>
+#include <QtMultimedia/qtvideo.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qdatetime.h>
+#include <QtCore/qlocale.h>
+#include <QtCore/qtimezone.h>
+#include <QtGui/qimage.h>
+
+#include <gst/gstversion.h>
+#include <common/qgst_handle_types_p.h>
+#include <common/qgstutils_p.h>
+#include <qgstreamerformatinfo_p.h>
+
+QT_BEGIN_NAMESPACE
+
+namespace {
+
+namespace MetadataLookupImpl {
+
+#ifdef __cpp_lib_constexpr_algorithms
+# define constexpr_lookup constexpr
+#else
+# define constexpr_lookup /*constexpr*/
+#endif
+
+struct MetadataKeyValuePair
+{
+ const char *tag;
+ QMediaMetaData::Key key;
+};
+
+constexpr const char *toTag(const char *t)
+{
+ return t;
+}
+constexpr const char *toTag(const MetadataKeyValuePair &kv)
+{
+ return kv.tag;
+}
+
+constexpr QMediaMetaData::Key toKey(QMediaMetaData::Key k)
+{
+ return k;
+}
+constexpr QMediaMetaData::Key toKey(const MetadataKeyValuePair &kv)
+{
+ return kv.key;
+}
+
+constexpr auto compareByKey = [](const auto &lhs, const auto &rhs) {
+ return toKey(lhs) < toKey(rhs);
+};
+
+constexpr auto compareByTag = [](const auto &lhs, const auto &rhs) {
+ return std::strcmp(toTag(lhs), toTag(rhs)) < 0;
+};
+
+constexpr_lookup auto makeLookupTable()
+{
+ std::array<MetadataKeyValuePair, 22> lookupTable{ {
+ { GST_TAG_TITLE, QMediaMetaData::Title },
+ { GST_TAG_COMMENT, QMediaMetaData::Comment },
+ { GST_TAG_DESCRIPTION, QMediaMetaData::Description },
+ { GST_TAG_GENRE, QMediaMetaData::Genre },
+ { GST_TAG_DATE_TIME, QMediaMetaData::Date },
+ { GST_TAG_DATE, QMediaMetaData::Date },
+
+ { GST_TAG_LANGUAGE_CODE, QMediaMetaData::Language },
+
+ { GST_TAG_ORGANIZATION, QMediaMetaData::Publisher },
+ { GST_TAG_COPYRIGHT, QMediaMetaData::Copyright },
+
+ // Media
+ { GST_TAG_DURATION, QMediaMetaData::Duration },
+
+ // Audio
+ { GST_TAG_BITRATE, QMediaMetaData::AudioBitRate },
+ { GST_TAG_AUDIO_CODEC, QMediaMetaData::AudioCodec },
+
+ // Music
+ { GST_TAG_ALBUM, QMediaMetaData::AlbumTitle },
+ { GST_TAG_ALBUM_ARTIST, QMediaMetaData::AlbumArtist },
+ { GST_TAG_ARTIST, QMediaMetaData::ContributingArtist },
+ { GST_TAG_TRACK_NUMBER, QMediaMetaData::TrackNumber },
+
+ { GST_TAG_PREVIEW_IMAGE, QMediaMetaData::ThumbnailImage },
+ { GST_TAG_IMAGE, QMediaMetaData::CoverArtImage },
+
+ // Image/Video
+ { "resolution", QMediaMetaData::Resolution },
+ { GST_TAG_IMAGE_ORIENTATION, QMediaMetaData::Orientation },
+
+ // Video
+ { GST_TAG_VIDEO_CODEC, QMediaMetaData::VideoCodec },
+
+ // Movie
+ { GST_TAG_PERFORMER, QMediaMetaData::LeadPerformer },
+ } };
+
+ std::sort(lookupTable.begin(), lookupTable.end(),
+ [](const MetadataKeyValuePair &lhs, const MetadataKeyValuePair &rhs) {
+ return std::string_view(lhs.tag) < std::string_view(rhs.tag);
+ });
+ return lookupTable;
+}
+
+constexpr_lookup auto gstTagToMetaDataKey = makeLookupTable();
+constexpr_lookup auto metaDataKeyToGstTag = [] {
+ auto array = gstTagToMetaDataKey;
+ std::sort(array.begin(), array.end(), compareByKey);
+ return array;
+}();
+
+} // namespace MetadataLookupImpl
+
+QMediaMetaData::Key tagToKey(const char *tag)
+{
+ if (tag == nullptr)
+ return QMediaMetaData::Key(-1);
+
+ using namespace MetadataLookupImpl;
+ auto foundIterator = std::lower_bound(gstTagToMetaDataKey.begin(), gstTagToMetaDataKey.end(),
+ tag, compareByTag);
+ if (std::strcmp(foundIterator->tag, tag) == 0)
+ return foundIterator->key;
+
+ return QMediaMetaData::Key(-1);
+}
+
+const char *keyToTag(QMediaMetaData::Key key)
+{
+ using namespace MetadataLookupImpl;
+ auto foundIterator = std::lower_bound(metaDataKeyToGstTag.begin(), metaDataKeyToGstTag.end(),
+ key, compareByKey);
+ if (foundIterator->key == key)
+ return foundIterator->tag;
+
+ return nullptr;
+}
+
+#undef constexpr_lookup
+
+QtVideo::Rotation parseRotationTag(const char *string)
+{
+ using namespace std::string_view_literals;
+
+ if (string == "rotate-90"sv)
+ return QtVideo::Rotation::Clockwise90;
+ if (string == "rotate-180"sv)
+ return QtVideo::Rotation::Clockwise180;
+ if (string == "rotate-270"sv)
+ return QtVideo::Rotation::Clockwise270;
+ if (string == "rotate-0"sv)
+ return QtVideo::Rotation::None;
+
+ qCritical() << "cannot parse orientation: {}" << string;
+ return QtVideo::Rotation::None;
+}
+
+QDateTime parseDate(const GValue &val)
+{
+ Q_ASSERT(G_VALUE_TYPE(&val) == G_TYPE_DATE);
+
+ const GDate *date = (const GDate *)g_value_get_boxed(&val);
+ if (!g_date_valid(date))
+ return {};
+
+ int year = g_date_get_year(date);
+ int month = g_date_get_month(date);
+ int day = g_date_get_day(date);
+ return QDateTime(QDate(year, month, day), QTime());
+}
+
+QDateTime parseDateTime(const GValue &val)
+{
+ Q_ASSERT(G_VALUE_TYPE(&val) == GST_TYPE_DATE_TIME);
+
+ const GstDateTime *dateTime = (const GstDateTime *)g_value_get_boxed(&val);
+ int year = gst_date_time_has_year(dateTime) ? gst_date_time_get_year(dateTime) : 0;
+ int month = gst_date_time_has_month(dateTime) ? gst_date_time_get_month(dateTime) : 0;
+ int day = gst_date_time_has_day(dateTime) ? gst_date_time_get_day(dateTime) : 0;
+ int hour = 0;
+ int minute = 0;
+ int second = 0;
+ float tz = 0;
+ if (gst_date_time_has_time(dateTime)) {
+ hour = gst_date_time_get_hour(dateTime);
+ minute = gst_date_time_get_minute(dateTime);
+ second = gst_date_time_get_second(dateTime);
+ tz = gst_date_time_get_time_zone_offset(dateTime);
+ }
+ return QDateTime{
+ QDate(year, month, day),
+ QTime(hour, minute, second),
+ QTimeZone(tz * 60 * 60),
+ };
+}
+
+QImage parseImage(const GValue &val)
+{
+ Q_ASSERT(G_VALUE_TYPE(&val) == GST_TYPE_SAMPLE);
+
+ GstSample *sample = (GstSample *)g_value_get_boxed(&val);
+ GstCaps *caps = gst_sample_get_caps(sample);
+ if (caps && !gst_caps_is_empty(caps)) {
+ GstStructure *structure = gst_caps_get_structure(caps, 0);
+ const gchar *name = gst_structure_get_name(structure);
+ if (QByteArray(name).startsWith("image/")) {
+ GstBuffer *buffer = gst_sample_get_buffer(sample);
+ if (buffer) {
+ GstMapInfo info;
+ gst_buffer_map(buffer, &info, GST_MAP_READ);
+ QImage image = QImage::fromData(info.data, info.size, name);
+ gst_buffer_unmap(buffer, &info);
+ return image;
+ }
+ }
+ }
+
+ return {};
+}
+
+std::optional<double> parseFractionAsDouble(const GValue &val)
+{
+ Q_ASSERT(G_VALUE_TYPE(&val) == GST_TYPE_FRACTION);
+
+ int nom = gst_value_get_fraction_numerator(&val);
+ int denom = gst_value_get_fraction_denominator(&val);
+ if (denom == 0)
+ return std::nullopt;
+ return double(nom) / double(denom);
+}
+
+constexpr std::string_view extendedComment{ GST_TAG_EXTENDED_COMMENT };
+
+void addTagsFromExtendedComment(const GstTagList *list, const gchar *tag, QMediaMetaData &metadata)
+{
+ using namespace Qt::Literals;
+ assert(tag == extendedComment);
+
+ int entryCount = gst_tag_list_get_tag_size(list, tag);
+ for (int i = 0; i != entryCount; ++i) {
+ const GValue *value = gst_tag_list_get_value_index(list, tag, i);
+
+ const QLatin1StringView strValue{ g_value_get_string(value) };
+
+ auto equalIndex = strValue.indexOf(QLatin1StringView("="));
+ if (equalIndex == -1) {
+ qDebug() << "Cannot parse GST_TAG_EXTENDED_COMMENT entry: " << value;
+ continue;
+ }
+
+ const QLatin1StringView key = strValue.first(equalIndex);
+ const QLatin1StringView valueString = strValue.last(strValue.size() - equalIndex - 1);
+
+ if (key == "DURATION"_L1) {
+ QUniqueGstDateTimeHandle duration{
+ gst_date_time_new_from_iso8601_string(valueString.data()),
+ };
+
+ if (duration) {
+ using namespace std::chrono;
+
+ auto chronoDuration = hours(gst_date_time_get_hour(duration.get()))
+ + minutes(gst_date_time_get_minute(duration.get()))
+ + seconds(gst_date_time_get_second(duration.get()))
+ + microseconds(gst_date_time_get_microsecond(duration.get()));
+
+ metadata.insert(QMediaMetaData::Duration,
+ QVariant::fromValue(round<milliseconds>(chronoDuration).count()));
+ }
+ }
+ }
+}
+
+void addTagToMetaData(const GstTagList *list, const gchar *tag, void *userdata)
+{
+ QMediaMetaData &metadata = *reinterpret_cast<QMediaMetaData *>(userdata);
+
+ QMediaMetaData::Key key = tagToKey(tag);
+ if (key == QMediaMetaData::Key(-1)) {
+ if (tag == extendedComment)
+ addTagsFromExtendedComment(list, tag, metadata);
+
+ return;
+ }
+
+ GValue val{};
+ gst_tag_list_copy_value(&val, list, tag);
+
+ GType type = G_VALUE_TYPE(&val);
+
+ if (auto entryCount = gst_tag_list_get_tag_size(list, tag) != 0; entryCount != 1)
+ qWarning() << "addTagToMetaData: invaled entry count for" << tag << "-" << entryCount;
+
+ if (type == G_TYPE_STRING) {
+ const gchar *str_value = g_value_get_string(&val);
+
+ switch (key) {
+ case QMediaMetaData::Language: {
+ metadata.insert(key,
+ QVariant::fromValue(QLocale::codeToLanguage(
+ QString::fromUtf8(str_value), QLocale::AnyLanguageCode)));
+ break;
+ }
+ case QMediaMetaData::Orientation: {
+ metadata.insert(key, QVariant::fromValue(parseRotationTag(str_value)));
+ break;
+ }
+ default:
+ metadata.insert(key, QString::fromUtf8(str_value));
+ break;
+ };
+ } else if (type == G_TYPE_INT) {
+ metadata.insert(key, g_value_get_int(&val));
+ } else if (type == G_TYPE_UINT) {
+ metadata.insert(key, g_value_get_uint(&val));
+ } else if (type == G_TYPE_LONG) {
+ metadata.insert(key, qint64(g_value_get_long(&val)));
+ } else if (type == G_TYPE_BOOLEAN) {
+ metadata.insert(key, g_value_get_boolean(&val));
+ } else if (type == G_TYPE_CHAR) {
+ metadata.insert(key, g_value_get_schar(&val));
+ } else if (type == G_TYPE_DOUBLE) {
+ metadata.insert(key, g_value_get_double(&val));
+ } else if (type == G_TYPE_DATE) {
+ if (!metadata.keys().contains(key)) {
+ QDateTime date = parseDate(val);
+ if (date.isValid())
+ metadata.insert(key, date);
+ }
+ } else if (type == GST_TYPE_DATE_TIME) {
+ metadata.insert(key, parseDateTime(val));
+ } else if (type == GST_TYPE_SAMPLE) {
+ QImage image = parseImage(val);
+ if (!image.isNull())
+ metadata.insert(key, image);
+ } else if (type == GST_TYPE_FRACTION) {
+ std::optional<double> fraction = parseFractionAsDouble(val);
+
+ if (fraction)
+ metadata.insert(key, *fraction);
+ }
+
+ g_value_unset(&val);
+}
+
+} // namespace
+
+QMediaMetaData taglistToMetaData(const QGstTagListHandle &handle)
+{
+ QMediaMetaData m;
+ extendMetaDataFromTagList(m, handle);
+ return m;
+}
+
+void extendMetaDataFromTagList(QMediaMetaData &metadata, const QGstTagListHandle &handle)
+{
+ if (handle)
+ gst_tag_list_foreach(handle.get(), reinterpret_cast<GstTagForeachFunc>(&addTagToMetaData),
+ &metadata);
+}
+
+static void applyMetaDataToTagSetter(const QMediaMetaData &metadata, GstTagSetter *element)
+{
+ gst_tag_setter_reset_tags(element);
+
+ for (QMediaMetaData::Key key : metadata.keys()) {
+ const char *tagName = keyToTag(key);
+ if (!tagName)
+ continue;
+ const QVariant &tagValue = metadata.value(key);
+
+ auto setTag = [&](const auto &value) {
+ gst_tag_setter_add_tags(element, GST_TAG_MERGE_REPLACE, tagName, value, nullptr);
+ };
+
+ switch (tagValue.typeId()) {
+ case QMetaType::QString:
+ setTag(tagValue.toString().toUtf8().constData());
+ break;
+ case QMetaType::Int:
+ case QMetaType::LongLong:
+ setTag(tagValue.toInt());
+ break;
+ case QMetaType::Double:
+ setTag(tagValue.toDouble());
+ break;
+ case QMetaType::QDate:
+ case QMetaType::QDateTime: {
+ QDateTime date = tagValue.toDateTime();
+
+ QGstGstDateTimeHandle dateTime{
+ gst_date_time_new(date.offsetFromUtc() / 60. / 60., date.date().year(),
+ date.date().month(), date.date().day(), date.time().hour(),
+ date.time().minute(), date.time().second()),
+ QGstGstDateTimeHandle::HasRef,
+ };
+
+ setTag(dateTime.get());
+ break;
+ }
+ default: {
+ if (tagValue.typeId() == qMetaTypeId<QLocale::Language>()) {
+ QByteArray language = QLocale::languageToCode(tagValue.value<QLocale::Language>(),
+ QLocale::ISO639Part2)
+ .toUtf8();
+ setTag(language.constData());
+ }
+
+ break;
+ }
+ }
+ }
+}
+
+void applyMetaDataToTagSetter(const QMediaMetaData &metadata, const QGstElement &element)
+{
+ GstTagSetter *tagSetter = qGstSafeCast<GstTagSetter>(element.element());
+ if (tagSetter)
+ applyMetaDataToTagSetter(metadata, tagSetter);
+ else
+ qWarning() << "applyMetaDataToTagSetter failed: element not a GstTagSetter"
+ << element.name();
+}
+
+void applyMetaDataToTagSetter(const QMediaMetaData &metadata, const QGstBin &bin)
+{
+ GstIterator *elements = gst_bin_iterate_all_by_interface(bin.bin(), GST_TYPE_TAG_SETTER);
+ GValue item = {};
+
+ while (gst_iterator_next(elements, &item) == GST_ITERATOR_OK) {
+ GstElement *element = static_cast<GstElement *>(g_value_get_object(&item));
+ if (!element)
+ continue;
+
+ GstTagSetter *tagSetter = qGstSafeCast<GstTagSetter>(element);
+
+ if (tagSetter)
+ applyMetaDataToTagSetter(metadata, tagSetter);
+ }
+
+ gst_iterator_free(elements);
+}
+
+void extendMetaDataFromCaps(QMediaMetaData &metadata, const QGstCaps &caps)
+{
+ QGstStructureView structure = caps.at(0);
+
+ QMediaFormat::FileFormat fileFormat = QGstreamerFormatInfo::fileFormatForCaps(structure);
+ if (fileFormat != QMediaFormat::FileFormat::UnspecifiedFormat) {
+ // Container caps
+ metadata.insert(QMediaMetaData::FileFormat, fileFormat);
+ return;
+ }
+
+ QMediaFormat::AudioCodec audioCodec = QGstreamerFormatInfo::audioCodecForCaps(structure);
+ if (audioCodec != QMediaFormat::AudioCodec::Unspecified) {
+ // Audio stream caps
+ metadata.insert(QMediaMetaData::AudioCodec, QVariant::fromValue(audioCodec));
+ return;
+ }
+
+ QMediaFormat::VideoCodec videoCodec = QGstreamerFormatInfo::videoCodecForCaps(structure);
+ if (videoCodec != QMediaFormat::VideoCodec::Unspecified) {
+ // Video stream caps
+ metadata.insert(QMediaMetaData::VideoCodec, QVariant::fromValue(videoCodec));
+ std::optional<float> framerate = structure["framerate"].getFraction();
+ if (framerate)
+ metadata.insert(QMediaMetaData::VideoFrameRate, *framerate);
+
+ QSize resolution = structure.resolution();
+ if (resolution.isValid())
+ metadata.insert(QMediaMetaData::Resolution, resolution);
+ }
+}
+
+QMediaMetaData capsToMetaData(const QGstCaps &caps)
+{
+ QMediaMetaData metadata;
+ extendMetaDataFromCaps(metadata, caps);
+ return metadata;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermetadata_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamermetadata_p.h
new file mode 100644
index 000000000..f04a9aba9
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamermetadata_p.h
@@ -0,0 +1,35 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERMETADATA_H
+#define QGSTREAMERMETADATA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qmediametadata.h>
+
+#include "qgst_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QMediaMetaData taglistToMetaData(const QGstTagListHandle &);
+void extendMetaDataFromTagList(QMediaMetaData &, const QGstTagListHandle &);
+
+QMediaMetaData capsToMetaData(const QGstCaps &);
+void extendMetaDataFromCaps(QMediaMetaData &, const QGstCaps &);
+
+void applyMetaDataToTagSetter(const QMediaMetaData &metadata, const QGstBin &);
+void applyMetaDataToTagSetter(const QMediaMetaData &metadata, const QGstElement &);
+
+QT_END_NAMESPACE
+
+#endif // QGSTREAMERMETADATA_H
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput.cpp
new file mode 100644
index 000000000..ef991f5b4
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput.cpp
@@ -0,0 +1,220 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtMultimedia/qvideosink.h>
+
+#include <QtCore/qloggingcategory.h>
+#include <QtCore/qthread.h>
+
+#include <common/qgstreamervideooutput_p.h>
+#include <common/qgstreamervideosink_p.h>
+#include <common/qgstsubtitlesink_p.h>
+
+static Q_LOGGING_CATEGORY(qLcMediaVideoOutput, "qt.multimedia.videooutput")
+
+QT_BEGIN_NAMESPACE
+
+static QGstElement makeVideoConvertScale(const char *name)
+{
+ QGstElementFactoryHandle factory = QGstElement::findFactory("videoconvertscale");
+ if (factory) // videoconvertscale is only available in gstreamer 1.20
+ return QGstElement::createFromFactory(factory, name);
+
+ return QGstBin::createFromPipelineDescription("videoconvert ! videoscale", name,
+ /*ghostUnlinkedPads=*/true);
+}
+
+QMaybe<QGstreamerVideoOutput *> QGstreamerVideoOutput::create(QObject *parent)
+{
+ QGstElementFactoryHandle factory = QGstElement::findFactory("videoconvertscale");
+
+ static std::optional<QString> elementCheck = []() -> std::optional<QString> {
+ std::optional<QString> error = qGstErrorMessageIfElementsNotAvailable("fakesink", "queue");
+ if (error)
+ return error;
+
+ QGstElementFactoryHandle factory = QGstElement::findFactory("videoconvertscale");
+ if (factory)
+ return std::nullopt;
+
+ return qGstErrorMessageIfElementsNotAvailable("videoconvert", "videoscale");
+ }();
+
+ if (elementCheck)
+ return *elementCheck;
+
+ return new QGstreamerVideoOutput(parent);
+}
+
+QGstreamerVideoOutput::QGstreamerVideoOutput(QObject *parent)
+ : QObject(parent),
+ m_outputBin(QGstBin::create("videoOutput")),
+ m_videoQueue{
+ QGstElement::createFromFactory("queue", "videoQueue"),
+ },
+ m_videoConvertScale{
+ makeVideoConvertScale("videoConvertScale"),
+ },
+ m_videoSink{
+ QGstElement::createFromFactory("fakesink", "fakeVideoSink"),
+ }
+{
+ m_videoSink.set("sync", true);
+ m_videoSink.set("async", false); // no asynchronous state changes
+
+ m_outputBin.add(m_videoQueue, m_videoConvertScale, m_videoSink);
+ qLinkGstElements(m_videoQueue, m_videoConvertScale, m_videoSink);
+
+ m_outputBin.addGhostPad(m_videoQueue, "sink");
+}
+
+QGstreamerVideoOutput::~QGstreamerVideoOutput()
+{
+ m_outputBin.setStateSync(GST_STATE_NULL);
+}
+
+void QGstreamerVideoOutput::setVideoSink(QVideoSink *sink)
+{
+ auto *gstVideoSink = sink ? static_cast<QGstreamerVideoSink *>(sink->platformVideoSink()) : nullptr;
+ if (gstVideoSink == m_platformVideoSink)
+ return;
+
+ if (m_platformVideoSink)
+ m_platformVideoSink->setPipeline({});
+
+ m_platformVideoSink = gstVideoSink;
+ if (m_platformVideoSink) {
+ m_platformVideoSink->setPipeline(m_pipeline);
+ if (m_nativeSize.isValid())
+ m_platformVideoSink->setNativeSize(m_nativeSize);
+ }
+ QGstElement gstSink;
+ if (m_platformVideoSink) {
+ gstSink = m_platformVideoSink->gstSink();
+ } else {
+ gstSink = QGstElement::createFromFactory("fakesink", "fakevideosink");
+ Q_ASSERT(gstSink);
+ gstSink.set("sync", true);
+ gstSink.set("async", false); // no asynchronous state changes
+ }
+
+ if (m_videoSink == gstSink)
+ return;
+
+ m_pipeline.modifyPipelineWhileNotRunning([&] {
+ if (!m_videoSink.isNull())
+ m_outputBin.stopAndRemoveElements(m_videoSink);
+
+ m_videoSink = gstSink;
+ m_outputBin.add(m_videoSink);
+
+ qLinkGstElements(m_videoConvertScale, m_videoSink);
+
+ GstEvent *event = gst_event_new_reconfigure();
+ gst_element_send_event(m_videoSink.element(), event);
+ m_videoSink.syncStateWithParent();
+
+ doLinkSubtitleStream();
+ });
+
+ qCDebug(qLcMediaVideoOutput) << "sinkChanged" << gstSink.name();
+
+ m_pipeline.dumpGraph(m_videoSink.name().constData());
+}
+
+void QGstreamerVideoOutput::setPipeline(const QGstPipeline &pipeline)
+{
+ m_pipeline = pipeline;
+ if (m_platformVideoSink)
+ m_platformVideoSink->setPipeline(m_pipeline);
+}
+
+void QGstreamerVideoOutput::linkSubtitleStream(QGstElement src)
+{
+ qCDebug(qLcMediaVideoOutput) << "link subtitle stream" << src.isNull();
+ if (src == m_subtitleSrc)
+ return;
+
+ m_pipeline.modifyPipelineWhileNotRunning([&] {
+ m_subtitleSrc = src;
+ doLinkSubtitleStream();
+ });
+}
+
+void QGstreamerVideoOutput::unlinkSubtitleStream()
+{
+ if (m_subtitleSrc.isNull())
+ return;
+ qCDebug(qLcMediaVideoOutput) << "unlink subtitle stream";
+ m_subtitleSrc = {};
+ if (!m_subtitleSink.isNull()) {
+ m_pipeline.modifyPipelineWhileNotRunning([&] {
+ m_pipeline.stopAndRemoveElements(m_subtitleSink);
+ return;
+ });
+ m_subtitleSink = {};
+ }
+ if (m_platformVideoSink)
+ m_platformVideoSink->setSubtitleText({});
+}
+
+void QGstreamerVideoOutput::doLinkSubtitleStream()
+{
+ if (!m_subtitleSink.isNull()) {
+ m_pipeline.stopAndRemoveElements(m_subtitleSink);
+ m_subtitleSink = {};
+ }
+ if (!m_platformVideoSink || m_subtitleSrc.isNull())
+ return;
+ if (m_subtitleSink.isNull()) {
+ m_subtitleSink = m_platformVideoSink->subtitleSink();
+ m_pipeline.add(m_subtitleSink);
+ }
+ qLinkGstElements(m_subtitleSrc, m_subtitleSink);
+}
+
+void QGstreamerVideoOutput::updateNativeSize()
+{
+ if (!m_platformVideoSink)
+ return;
+
+ m_platformVideoSink->setNativeSize(qRotatedFrameSize(m_nativeSize, m_rotation));
+}
+
+void QGstreamerVideoOutput::setIsPreview()
+{
+ // configures the queue to be fast and lightweight for camera preview
+ // also avoids blocking the queue in case we have an encodebin attached to the tee as well
+ m_videoQueue.set("leaky", 2 /*downstream*/);
+ m_videoQueue.set("silent", true);
+ m_videoQueue.set("max-size-buffers", uint(1));
+ m_videoQueue.set("max-size-bytes", uint(0));
+ m_videoQueue.set("max-size-time", quint64(0));
+}
+
+void QGstreamerVideoOutput::flushSubtitles()
+{
+ if (!m_subtitleSink.isNull()) {
+ auto pad = m_subtitleSink.staticPad("sink");
+ auto *event = gst_event_new_flush_start();
+ pad.sendEvent(event);
+ event = gst_event_new_flush_stop(false);
+ pad.sendEvent(event);
+ }
+}
+
+void QGstreamerVideoOutput::setNativeSize(QSize sz)
+{
+ m_nativeSize = sz;
+ updateNativeSize();
+}
+
+void QGstreamerVideoOutput::setRotation(QtVideo::Rotation rot)
+{
+ m_rotation = rot;
+ updateNativeSize();
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qgstreamervideooutput_p.cpp"
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput_p.h
new file mode 100644
index 000000000..10d2f3ee7
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput_p.h
@@ -0,0 +1,80 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERVIDEOOUTPUT_P_H
+#define QGSTREAMERVIDEOOUTPUT_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qobject.h>
+#include <private/qtmultimediaglobal_p.h>
+#include <private/qmultimediautils_p.h>
+#include <common/qgst_p.h>
+#include <common/qgstpipeline_p.h>
+#include <common/qgstreamervideosink_p.h>
+#include <qwaitcondition.h>
+#include <qmutex.h>
+#include <qpointer.h>
+
+QT_BEGIN_NAMESPACE
+
+class QVideoSink;
+
+class QGstreamerVideoOutput : public QObject
+{
+ Q_OBJECT
+
+public:
+ static QMaybe<QGstreamerVideoOutput *> create(QObject *parent = nullptr);
+ ~QGstreamerVideoOutput();
+
+ void setVideoSink(QVideoSink *sink);
+ QGstreamerVideoSink *gstreamerVideoSink() const { return m_platformVideoSink; }
+
+ void setPipeline(const QGstPipeline &pipeline);
+
+ QGstElement gstElement() const { return m_outputBin; }
+ void linkSubtitleStream(QGstElement subtitleSrc);
+ void unlinkSubtitleStream();
+
+ void setIsPreview();
+ void flushSubtitles();
+
+ void setNativeSize(QSize);
+ void setRotation(QtVideo::Rotation);
+
+private:
+ explicit QGstreamerVideoOutput(QObject *parent);
+
+ void doLinkSubtitleStream();
+ void updateNativeSize();
+
+ QPointer<QGstreamerVideoSink> m_platformVideoSink;
+
+ // Gst elements
+ QGstPipeline m_pipeline;
+
+ QGstBin m_outputBin;
+ QGstElement m_videoQueue;
+ QGstElement m_videoConvertScale;
+ QGstElement m_videoSink;
+
+ QGstElement m_subtitleSrc;
+ QGstElement m_subtitleSink;
+
+ QSize m_nativeSize;
+ QtVideo::Rotation m_rotation{};
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideooverlay.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamervideooverlay.cpp
new file mode 100644
index 000000000..6ca23006b
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideooverlay.cpp
@@ -0,0 +1,218 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qgstreamervideooverlay_p.h"
+
+#include <QtGui/qguiapplication.h>
+#include <QtMultimedia/private/qtmultimediaglobal_p.h>
+
+#include <common/qglist_helper_p.h>
+#include <common/qgst_p.h>
+#include <common/qgstreamermessage_p.h>
+#include <common/qgstreamervideosink_p.h>
+#include <common/qgstutils_p.h>
+
+#include <gst/video/videooverlay.h>
+
+QT_BEGIN_NAMESPACE
+
+struct ElementMap
+{
+ QStringView qtPlatform;
+ const char *gstreamerElement = nullptr;
+};
+
+// Ordered by descending priority
+static constexpr ElementMap elementMap[] = {
+ { u"xcb", "xvimagesink" },
+ { u"xcb", "ximagesink" },
+
+ // wayland
+ { u"wayland", "waylandsink" },
+};
+
+static bool qt_gst_element_is_functioning(QGstElement element)
+{
+ GstStateChangeReturn ret = element.setState(GST_STATE_READY);
+ if (ret == GST_STATE_CHANGE_SUCCESS) {
+ element.setState(GST_STATE_NULL);
+ return true;
+ }
+
+ return false;
+}
+
+static QGstElement findBestVideoSink()
+{
+ using namespace Qt::StringLiterals;
+ QString platform = QGuiApplication::platformName();
+
+ // First, try some known video sinks, depending on the Qt platform plugin in use.
+ for (const auto &i : elementMap) {
+ if (platform != i.qtPlatform)
+ continue;
+ QGstElement choice = QGstElement::createFromFactory(i.gstreamerElement, i.gstreamerElement);
+ if (choice.isNull())
+ continue;
+
+ if (qt_gst_element_is_functioning(choice))
+ return choice;
+ }
+
+ // We need a native window ID to use the GstVideoOverlay interface.
+ // Bail out if the Qt platform plugin in use cannot provide a sensible WId.
+ if (platform != QStringView{ u"xcb" } && platform != QStringView{ u"wayland" })
+ return {};
+
+ QGstElement choice;
+ // If none of the known video sinks are available, try to find one that implements the
+ // GstVideoOverlay interface and has autoplugging rank.
+ GList *list = qt_gst_video_sinks();
+ for (GstElementFactory *f : QGstUtils::GListRangeAdaptor<GstElementFactory *>(list)) {
+ if (!gst_element_factory_has_interface(f, "GstVideoOverlay"))
+ continue;
+
+ choice = QGstElement::createFromFactory(f, nullptr);
+ if (choice.isNull())
+ continue;
+
+ if (qt_gst_element_is_functioning(choice))
+ break;
+ choice = {};
+ }
+
+ gst_plugin_feature_list_free(list);
+ if (choice.isNull())
+ qWarning() << "Could not find a valid windowed video sink";
+
+ return choice;
+}
+
+QGstreamerVideoOverlay::QGstreamerVideoOverlay(QGstreamerVideoSink *parent, const QByteArray &elementName)
+ : QObject(parent)
+ , QGstreamerBufferProbe(QGstreamerBufferProbe::ProbeCaps)
+ , m_gstreamerVideoSink(parent)
+{
+ QGstElement sink;
+ if (!elementName.isEmpty())
+ sink = QGstElement::createFromFactory(elementName.constData());
+ else
+ sink = findBestVideoSink();
+
+ setVideoSink(sink);
+}
+
+QGstreamerVideoOverlay::~QGstreamerVideoOverlay()
+{
+ if (!m_videoSink.isNull()) {
+ QGstPad pad = m_videoSink.staticPad("sink");
+ removeProbeFromPad(pad.pad());
+ }
+}
+
+QGstElement QGstreamerVideoOverlay::videoSink() const
+{
+ return m_videoSink;
+}
+
+void QGstreamerVideoOverlay::setVideoSink(QGstElement sink)
+{
+ if (sink.isNull())
+ return;
+
+ m_videoSink = std::move(sink);
+
+ QGstPad pad = m_videoSink.staticPad("sink");
+ addProbeToPad(pad.pad());
+
+ auto *klass = G_OBJECT_GET_CLASS(m_videoSink.object());
+ m_hasForceAspectRatio = g_object_class_find_property(klass, "force-aspect-ratio");
+ m_hasFullscreen = g_object_class_find_property(klass, "fullscreen");
+}
+
+QSize QGstreamerVideoOverlay::nativeVideoSize() const
+{
+ return m_nativeVideoSize;
+}
+
+void QGstreamerVideoOverlay::setWindowHandle(WId id)
+{
+ m_windowId = id;
+
+ if (!m_videoSink.isNull() && GST_IS_VIDEO_OVERLAY(m_videoSink.object())) {
+ applyRenderRect();
+
+ // Properties need to be reset when changing the winId.
+ setAspectRatioMode(m_aspectRatioMode);
+ setFullScreen(m_fullScreen);
+ applyRenderRect();
+ }
+}
+
+void QGstreamerVideoOverlay::setRenderRectangle(const QRect &rect)
+{
+ renderRect = rect;
+ applyRenderRect();
+}
+
+void QGstreamerVideoOverlay::applyRenderRect()
+{
+ if (!m_windowId)
+ return;
+
+ int x = -1;
+ int y = -1;
+ int w = -1;
+ int h = -1;
+
+ if (!renderRect.isEmpty()) {
+ x = renderRect.x();
+ y = renderRect.y();
+ w = renderRect.width();
+ h = renderRect.height();
+ QSize scaledVideo = m_nativeVideoSize.scaled(w, h, m_aspectRatioMode);
+ x += (w - scaledVideo.width())/2;
+ y += (h - scaledVideo.height())/2;
+ w = scaledVideo.width();
+ h = scaledVideo.height();
+ }
+
+ if (!m_videoSink.isNull() && GST_IS_VIDEO_OVERLAY(m_videoSink.object()))
+ gst_video_overlay_set_render_rectangle(GST_VIDEO_OVERLAY(m_videoSink.object()), x, y, w, h);
+}
+
+void QGstreamerVideoOverlay::probeCaps(GstCaps *caps)
+{
+ QSize size = QGstCaps(caps, QGstCaps::NeedsRef).at(0).resolution();
+ if (size != m_nativeVideoSize) {
+ m_nativeVideoSize = size;
+ m_gstreamerVideoSink->setNativeSize(m_nativeVideoSize);
+ applyRenderRect();
+ }
+}
+
+void QGstreamerVideoOverlay::setAspectRatioMode(Qt::AspectRatioMode mode)
+{
+ m_aspectRatioMode = mode;
+ if (m_hasForceAspectRatio)
+ m_videoSink.set("force-aspect-ratio", (mode == Qt::KeepAspectRatio));
+}
+
+void QGstreamerVideoOverlay::setFullScreen(bool fullscreen)
+{
+ m_fullScreen = fullscreen;
+ if (m_hasFullscreen)
+ m_videoSink.set("fullscreen", fullscreen);
+}
+
+bool QGstreamerVideoOverlay::processSyncMessage(const QGstreamerMessage &message)
+{
+ if (!gst_is_video_overlay_prepare_window_handle_message(message.message()))
+ return false;
+ gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(m_videoSink.object()), m_windowId);
+ return true;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qgstreamervideooverlay_p.cpp"
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideooverlay_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamervideooverlay_p.h
new file mode 100644
index 000000000..588e8b5e4
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideooverlay_p.h
@@ -0,0 +1,74 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERVIDEOOVERLAY_P_H
+#define QGSTREAMERVIDEOOVERLAY_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <common/qgstpipeline_p.h>
+#include <common/qgstreamerbufferprobe_p.h>
+#include <common/qgst_p.h>
+#include <QtGui/qwindowdefs.h>
+
+QT_BEGIN_NAMESPACE
+class QGstreamerVideoSink;
+
+class QGstreamerVideoOverlay : public QObject,
+ public QGstreamerSyncMessageFilter,
+ private QGstreamerBufferProbe
+{
+ Q_OBJECT
+public:
+ explicit QGstreamerVideoOverlay(QGstreamerVideoSink *parent = nullptr,
+ const QByteArray &elementName = QByteArray());
+ virtual ~QGstreamerVideoOverlay();
+
+ QGstElement videoSink() const;
+ void setVideoSink(QGstElement);
+ QSize nativeVideoSize() const;
+
+ void setWindowHandle(WId id);
+ void setRenderRectangle(const QRect &rect);
+
+ void setAspectRatioMode(Qt::AspectRatioMode mode);
+ void setFullScreen(bool fullscreen);
+
+ bool processSyncMessage(const QGstreamerMessage &message) override;
+
+ bool isNull() const { return m_videoSink.isNull(); }
+
+Q_SIGNALS:
+ void nativeVideoSizeChanged();
+ void activeChanged();
+
+private:
+ void probeCaps(GstCaps *caps) override;
+ void applyRenderRect();
+
+ QGstreamerVideoSink *m_gstreamerVideoSink = nullptr;
+ QGstElement m_videoSink;
+ QSize m_nativeVideoSize;
+
+ bool m_hasForceAspectRatio = false;
+ bool m_hasFullscreen = false;
+ Qt::AspectRatioMode m_aspectRatioMode = Qt::KeepAspectRatio;
+ bool m_fullScreen = false;
+
+ WId m_windowId = 0;
+ QRect renderRect;
+};
+
+QT_END_NAMESPACE
+
+#endif // QGSTREAMERVIDEOOVERLAY_P_H
+
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideosink.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamervideosink.cpp
new file mode 100644
index 000000000..39377265a
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideosink.cpp
@@ -0,0 +1,314 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <common/qgstreamervideosink_p.h>
+#include <common/qgstvideorenderersink_p.h>
+#include <common/qgstsubtitlesink_p.h>
+#include <common/qgst_debug_p.h>
+#include <common/qgstutils_p.h>
+#include <rhi/qrhi.h>
+
+#if QT_CONFIG(gstreamer_gl)
+#include <QGuiApplication>
+#include <QtGui/qopenglcontext.h>
+#include <QWindow>
+#include <qpa/qplatformnativeinterface.h>
+#include <gst/gl/gstglconfig.h>
+
+#if GST_GL_HAVE_WINDOW_X11 && __has_include("X11/Xlib-xcb.h")
+# include <gst/gl/x11/gstgldisplay_x11.h>
+#endif
+#if GST_GL_HAVE_PLATFORM_EGL
+# include <gst/gl/egl/gstgldisplay_egl.h>
+# include <EGL/egl.h>
+# include <EGL/eglext.h>
+#endif
+#if GST_GL_HAVE_WINDOW_WAYLAND && __has_include("wayland-client.h")
+# include <gst/gl/wayland/gstgldisplay_wayland.h>
+#endif
+#endif // #if QT_CONFIG(gstreamer_gl)
+
+#include <QtCore/qdebug.h>
+
+#include <QtCore/qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcGstVideoSink, "qt.multimedia.gstvideosink");
+
+QGstreamerVideoSink::QGstreamerVideoSink(QVideoSink *parent)
+ : QPlatformVideoSink{
+ parent,
+ },
+ m_sinkBin{
+ QGstBin::create("videoSinkBin"),
+ }
+{
+ // This is a hack for some iMX and NVidia platforms. These require the use of a special video
+ // conversion element in the pipeline before the video sink, as they unfortunately
+ // output some proprietary format from the decoder even though it's sometimes marked as
+ // a regular supported video/x-raw format.
+ //
+ // To fix this, simply insert the element into the pipeline if it's available. Otherwise
+ // we simply use an identity element.
+ QGstElementFactoryHandle factory;
+
+ // QT_GSTREAMER_OVERRIDE_VIDEO_CONVERSION_ELEMENT allows users to override the
+ // conversion element. Ideally we construct the element programatically, though.
+ QByteArray preprocessOverride = qgetenv("QT_GSTREAMER_OVERRIDE_VIDEO_CONVERSION_ELEMENT");
+ if (!preprocessOverride.isEmpty()) {
+ qCDebug(qLcGstVideoSink) << "requesting conversion element from environment:"
+ << preprocessOverride;
+
+ m_gstPreprocess = QGstBin::createFromPipelineDescription(preprocessOverride, nullptr,
+ /*ghostUnlinkedPads=*/true);
+ if (!m_gstPreprocess)
+ qCWarning(qLcGstVideoSink) << "Cannot create conversion element:" << preprocessOverride;
+ }
+
+ if (!m_gstPreprocess) {
+ // This is a hack for some iMX and NVidia platforms. These require the use of a special
+ // video conversion element in the pipeline before the video sink, as they unfortunately
+ // output some proprietary format from the decoder even though it's sometimes marked as
+ // a regular supported video/x-raw format.
+ static constexpr auto decodersToTest = {
+ "imxvideoconvert_g2d",
+ "nvvidconv",
+ };
+
+ for (const char *decoder : decodersToTest) {
+ factory = QGstElement::findFactory(decoder);
+ if (factory)
+ break;
+ }
+
+ if (factory) {
+ qCDebug(qLcGstVideoSink)
+ << "instantiating conversion element:"
+ << g_type_name(gst_element_factory_get_element_type(factory.get()));
+
+ m_gstPreprocess = QGstElement::createFromFactory(factory, "preprocess");
+ }
+ }
+
+ bool disablePixelAspectRatio =
+ qEnvironmentVariableIsSet("QT_GSTREAMER_DISABLE_PIXEL_ASPECT_RATIO");
+ if (disablePixelAspectRatio) {
+ // Enabling the pixel aspect ratio may expose a gstreamer bug on cameras that don't expose a
+ // pixel-aspect-ratio via `VIDIOC_CROPCAP`. This can cause the caps negotiation to fail.
+ // Using the QT_GSTREAMER_DISABLE_PIXEL_ASPECT_RATIO environment variable, one can disable
+ // pixel-aspect-ratio handling
+ //
+ // compare: https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/6242
+ m_gstCapsFilter =
+ QGstElement::createFromFactory("identity", "nullPixelAspectRatioCapsFilter");
+ } else {
+ m_gstCapsFilter =
+ QGstElement::createFromFactory("capsfilter", "pixelAspectRatioCapsFilter");
+ QGstCaps capsFilterCaps{
+ gst_caps_new_simple("video/x-raw", "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, NULL),
+ QGstCaps::HasRef,
+ };
+ g_object_set(m_gstCapsFilter.element(), "caps", capsFilterCaps.caps(), NULL);
+ }
+
+ if (m_gstPreprocess) {
+ m_sinkBin.add(m_gstPreprocess, m_gstCapsFilter);
+ qLinkGstElements(m_gstPreprocess, m_gstCapsFilter);
+ m_sinkBin.addGhostPad(m_gstPreprocess, "sink");
+ } else {
+ m_sinkBin.add(m_gstCapsFilter);
+ m_sinkBin.addGhostPad(m_gstCapsFilter, "sink");
+ }
+
+ m_gstSubtitleSink =
+ QGstElement(GST_ELEMENT(QGstSubtitleSink::createSink(this)), QGstElement::NeedsRef);
+}
+
+QGstreamerVideoSink::~QGstreamerVideoSink()
+{
+ emit aboutToBeDestroyed();
+
+ unrefGstContexts();
+
+ setPipeline(QGstPipeline());
+}
+
+QGstElement QGstreamerVideoSink::gstSink()
+{
+ updateSinkElement();
+ return m_sinkBin;
+}
+
+void QGstreamerVideoSink::setPipeline(QGstPipeline pipeline)
+{
+ m_pipeline = std::move(pipeline);
+}
+
+bool QGstreamerVideoSink::inStoppedState() const
+{
+ if (m_pipeline.isNull())
+ return true;
+ return m_pipeline.inStoppedState();
+}
+
+void QGstreamerVideoSink::setRhi(QRhi *rhi)
+{
+ if (rhi && rhi->backend() != QRhi::OpenGLES2)
+ rhi = nullptr;
+ if (m_rhi == rhi)
+ return;
+
+ m_rhi = rhi;
+ updateGstContexts();
+ if (!m_gstQtSink.isNull()) {
+ // force creation of a new sink with proper caps
+ createQtSink();
+ updateSinkElement();
+ }
+}
+
+void QGstreamerVideoSink::createQtSink()
+{
+ if (m_gstQtSink)
+ m_gstQtSink.setStateSync(GST_STATE_NULL);
+
+ m_gstQtSink =
+ QGstElement(reinterpret_cast<GstElement *>(QGstVideoRendererSink::createSink(this)),
+ QGstElement::NeedsRef);
+}
+
+void QGstreamerVideoSink::updateSinkElement()
+{
+ QGstElement newSink;
+ if (m_gstQtSink.isNull())
+ createQtSink();
+ newSink = m_gstQtSink;
+
+ if (newSink == m_gstVideoSink)
+ return;
+
+ m_pipeline.modifyPipelineWhileNotRunning([&] {
+ if (!m_gstVideoSink.isNull())
+ m_sinkBin.stopAndRemoveElements(m_gstVideoSink);
+
+ newSink.set("async", false); // no asynchronous state changes
+
+ m_gstVideoSink = newSink;
+ m_sinkBin.add(m_gstVideoSink);
+ qLinkGstElements(m_gstCapsFilter, m_gstVideoSink);
+ m_gstVideoSink.setState(GST_STATE_PAUSED);
+ });
+
+ m_pipeline.dumpGraph("updateVideoSink");
+}
+
+void QGstreamerVideoSink::unrefGstContexts()
+{
+ m_gstGlDisplayContext.close();
+ m_gstGlLocalContext.close();
+ m_eglDisplay = nullptr;
+ m_eglImageTargetTexture2D = nullptr;
+}
+
+void QGstreamerVideoSink::updateGstContexts()
+{
+ using namespace Qt::Literals;
+
+ unrefGstContexts();
+
+#if QT_CONFIG(gstreamer_gl)
+ if (!m_rhi || m_rhi->backend() != QRhi::OpenGLES2)
+ return;
+
+ auto *nativeHandles = static_cast<const QRhiGles2NativeHandles *>(m_rhi->nativeHandles());
+ auto glContext = nativeHandles->context;
+ Q_ASSERT(glContext);
+
+ const QString platform = QGuiApplication::platformName();
+ QPlatformNativeInterface *pni = QGuiApplication::platformNativeInterface();
+ m_eglDisplay = pni->nativeResourceForIntegration("egldisplay"_ba);
+// qDebug() << "platform is" << platform << m_eglDisplay;
+
+ QGstGLDisplayHandle gstGlDisplay;
+
+ QByteArray contextName = "eglcontext"_ba;
+ GstGLPlatform glPlatform = GST_GL_PLATFORM_EGL;
+ // use the egl display if we have one
+ if (m_eglDisplay) {
+#if GST_GL_HAVE_PLATFORM_EGL
+ gstGlDisplay.reset(
+ GST_GL_DISPLAY_CAST(gst_gl_display_egl_new_with_egl_display(m_eglDisplay)));
+ m_eglImageTargetTexture2D = eglGetProcAddress("glEGLImageTargetTexture2DOES");
+#endif
+ } else {
+ auto display = pni->nativeResourceForIntegration("display"_ba);
+
+ if (display) {
+#if GST_GL_HAVE_WINDOW_X11 && __has_include("X11/Xlib-xcb.h")
+ if (platform == QLatin1String("xcb")) {
+ contextName = "glxcontext"_ba;
+ glPlatform = GST_GL_PLATFORM_GLX;
+
+ gstGlDisplay.reset(GST_GL_DISPLAY_CAST(
+ gst_gl_display_x11_new_with_display(reinterpret_cast<Display *>(display))));
+ }
+#endif
+#if GST_GL_HAVE_WINDOW_WAYLAND && __has_include("wayland-client.h")
+ if (platform.startsWith(QLatin1String("wayland"))) {
+ Q_ASSERT(!gstGlDisplay);
+ gstGlDisplay.reset(GST_GL_DISPLAY_CAST(gst_gl_display_wayland_new_with_display(
+ reinterpret_cast<struct wl_display *>(display))));
+ }
+#endif
+ }
+ }
+
+ if (!gstGlDisplay) {
+ qWarning() << "Could not create GstGLDisplay";
+ return;
+ }
+
+ void *nativeContext = pni->nativeResourceForContext(contextName, glContext);
+ if (!nativeContext)
+ qWarning() << "Could not find resource for" << contextName;
+
+ GstGLAPI glApi = QOpenGLContext::openGLModuleType() == QOpenGLContext::LibGL ? GST_GL_API_OPENGL : GST_GL_API_GLES2;
+ QGstGLContextHandle appContext{
+ gst_gl_context_new_wrapped(gstGlDisplay.get(), guintptr(nativeContext), glPlatform, glApi),
+ };
+ if (!appContext)
+ qWarning() << "Could not create wrappped context for platform:" << glPlatform;
+
+ gst_gl_context_activate(appContext.get(), true);
+
+ QUniqueGErrorHandle error;
+ gst_gl_context_fill_info(appContext.get(), &error);
+ if (error) {
+ qWarning() << "Could not fill context info:" << error;
+ error = {};
+ }
+
+ QGstGLContextHandle displayContext;
+ gst_gl_display_create_context(gstGlDisplay.get(), appContext.get(), &displayContext, &error);
+ if (error)
+ qWarning() << "Could not create display context:" << error;
+
+ appContext.close();
+
+ m_gstGlDisplayContext.reset(gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, false));
+ gst_context_set_gl_display(m_gstGlDisplayContext.get(), gstGlDisplay.get());
+
+ m_gstGlLocalContext.reset(gst_context_new("gst.gl.local_context", false));
+ GstStructure *structure = gst_context_writable_structure(m_gstGlLocalContext.get());
+ gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, displayContext.get(), nullptr);
+ displayContext.close();
+
+ if (m_pipeline)
+ gst_element_set_context(m_pipeline.element(), m_gstGlLocalContext.get());
+#endif // #if QT_CONFIG(gstreamer_gl)
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qgstreamervideosink_p.cpp"
diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideosink_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamervideosink_p.h
new file mode 100644
index 000000000..7ee1dd2e6
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideosink_p.h
@@ -0,0 +1,76 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERVIDEOSINK_H
+#define QGSTREAMERVIDEOSINK_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/qvideosink.h>
+#include <QtMultimedia/private/qplatformvideosink_p.h>
+
+#include <common/qgstpipeline_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QGstreamerVideoSink : public QPlatformVideoSink
+{
+ Q_OBJECT
+
+public:
+ explicit QGstreamerVideoSink(QVideoSink *parent = nullptr);
+ ~QGstreamerVideoSink();
+
+ void setRhi(QRhi *rhi) override;
+ QRhi *rhi() const { return m_rhi; }
+
+ QGstElement gstSink();
+ QGstElement subtitleSink() const { return m_gstSubtitleSink; }
+
+ void setPipeline(QGstPipeline pipeline);
+ bool inStoppedState() const;
+
+ GstContext *gstGlDisplayContext() const { return m_gstGlDisplayContext.get(); }
+ GstContext *gstGlLocalContext() const { return m_gstGlLocalContext.get(); }
+ Qt::HANDLE eglDisplay() const { return m_eglDisplay; }
+ QFunctionPointer eglImageTargetTexture2D() const { return m_eglImageTargetTexture2D; }
+
+Q_SIGNALS:
+ void aboutToBeDestroyed();
+
+private:
+ void createQtSink();
+ void updateSinkElement();
+
+ void unrefGstContexts();
+ void updateGstContexts();
+
+ QGstPipeline m_pipeline;
+ QGstBin m_sinkBin;
+ QGstElement m_gstPreprocess;
+ QGstElement m_gstCapsFilter;
+ QGstElement m_gstVideoSink;
+ QGstElement m_gstQtSink;
+ QGstElement m_gstSubtitleSink;
+
+ QRhi *m_rhi = nullptr;
+
+ Qt::HANDLE m_eglDisplay = nullptr;
+ QFunctionPointer m_eglImageTargetTexture2D = nullptr;
+
+ QGstContextHandle m_gstGlLocalContext;
+ QGstContextHandle m_gstGlDisplayContext;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink.cpp b/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink.cpp
new file mode 100644
index 000000000..c6b230d85
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink.cpp
@@ -0,0 +1,155 @@
+// Copyright (C) 2021 The Qt Company
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QDebug>
+#include <QThread>
+#include <QEvent>
+
+#include "qgstreamervideosink_p.h"
+#include "qgstsubtitlesink_p.h"
+
+QT_BEGIN_NAMESPACE
+
+static GstBaseSinkClass *gst_sink_parent_class;
+static thread_local QGstreamerVideoSink *gst_current_sink;
+
+#define ST_SINK(s) QGstSubtitleSink *sink(reinterpret_cast<QGstSubtitleSink *>(s))
+
+QGstSubtitleSink *QGstSubtitleSink::createSink(QGstreamerVideoSink *sink)
+{
+ gst_current_sink = sink;
+
+ QGstSubtitleSink *gstSink = reinterpret_cast<QGstSubtitleSink *>(
+ g_object_new(QGstSubtitleSink::get_type(), nullptr));
+ g_object_set(gstSink, "async", false, nullptr);
+
+ return gstSink;
+}
+
+GType QGstSubtitleSink::get_type()
+{
+ static const GTypeInfo info =
+ {
+ sizeof(QGstSubtitleSinkClass), // class_size
+ base_init, // base_init
+ nullptr, // base_finalize
+ class_init, // class_init
+ nullptr, // class_finalize
+ nullptr, // class_data
+ sizeof(QGstSubtitleSink), // instance_size
+ 0, // n_preallocs
+ instance_init, // instance_init
+ nullptr // value_table
+ };
+
+ static const GType type = []() {
+ const auto result = g_type_register_static(
+ GST_TYPE_BASE_SINK, "QGstSubtitleSink", &info, GTypeFlags(0));
+
+ // Register the sink type to be used in custom piplines.
+ // When surface is ready the sink can be used.
+ gst_element_register(nullptr, "qtsubtitlesink", GST_RANK_PRIMARY, result);
+
+ return result;
+ }();
+
+ return type;
+}
+
+void QGstSubtitleSink::class_init(gpointer g_class, gpointer class_data)
+{
+ Q_UNUSED(class_data);
+
+ gst_sink_parent_class = reinterpret_cast<GstBaseSinkClass *>(g_type_class_peek_parent(g_class));
+
+ GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class);
+ base_sink_class->render = QGstSubtitleSink::render;
+ base_sink_class->get_caps = QGstSubtitleSink::get_caps;
+ base_sink_class->set_caps = QGstSubtitleSink::set_caps;
+ base_sink_class->propose_allocation = QGstSubtitleSink::propose_allocation;
+ base_sink_class->wait_event = QGstSubtitleSink::wait_event;
+
+ GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
+ element_class->change_state = QGstSubtitleSink::change_state;
+ gst_element_class_set_metadata(element_class,
+ "Qt built-in subtitle sink",
+ "Sink/Subtitle",
+ "Qt default built-in subtitle sink",
+ "The Qt Company");
+
+ GObjectClass *object_class = reinterpret_cast<GObjectClass *>(g_class);
+ object_class->finalize = QGstSubtitleSink::finalize;
+}
+
+void QGstSubtitleSink::base_init(gpointer g_class)
+{
+ static GstStaticPadTemplate sink_pad_template = GST_STATIC_PAD_TEMPLATE(
+ "sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS("ANY"));
+
+ gst_element_class_add_pad_template(
+ GST_ELEMENT_CLASS(g_class), gst_static_pad_template_get(&sink_pad_template));
+}
+
+void QGstSubtitleSink::instance_init(GTypeInstance *instance, gpointer g_class)
+{
+ Q_UNUSED(g_class);
+ ST_SINK(instance);
+
+ Q_ASSERT(gst_current_sink);
+ sink->sink = gst_current_sink;
+ gst_current_sink = nullptr;
+}
+
+void QGstSubtitleSink::finalize(GObject *object)
+{
+ // Chain up
+ G_OBJECT_CLASS(gst_sink_parent_class)->finalize(object);
+}
+
+GstStateChangeReturn QGstSubtitleSink::change_state(GstElement *element, GstStateChange transition)
+{
+ return GST_ELEMENT_CLASS(gst_sink_parent_class)->change_state(element, transition);
+}
+
+GstCaps *QGstSubtitleSink::get_caps(GstBaseSink *base, GstCaps *filter)
+{
+ return gst_sink_parent_class->get_caps(base, filter);
+}
+
+gboolean QGstSubtitleSink::set_caps(GstBaseSink *base, GstCaps *caps)
+{
+ qDebug() << "set_caps:" << caps;
+ return gst_sink_parent_class->set_caps(base, caps);
+}
+
+gboolean QGstSubtitleSink::propose_allocation(GstBaseSink *base, GstQuery *query)
+{
+ return gst_sink_parent_class->propose_allocation(base, query);
+}
+
+GstFlowReturn QGstSubtitleSink::wait_event(GstBaseSink *base, GstEvent *event)
+{
+ GstFlowReturn retval = gst_sink_parent_class->wait_event(base, event);
+ ST_SINK(base);
+ if (event->type == GST_EVENT_GAP) {
+// qDebug() << "gap, clearing subtitle";
+ sink->sink->setSubtitleText(QString());
+ }
+ return retval;
+}
+
+GstFlowReturn QGstSubtitleSink::render(GstBaseSink *base, GstBuffer *buffer)
+{
+ ST_SINK(base);
+ GstMemory *mem = gst_buffer_get_memory(buffer, 0);
+ GstMapInfo info;
+ QString subtitle;
+ if (gst_memory_map(mem, &info, GST_MAP_READ))
+ subtitle = QString::fromUtf8(reinterpret_cast<const char *>(info.data));
+ gst_memory_unmap(mem, &info);
+// qDebug() << "render" << buffer << subtitle;
+ sink->sink->setSubtitleText(subtitle);
+ return GST_FLOW_OK;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink_p.h b/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink_p.h
new file mode 100644
index 000000000..0f515cb99
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink_p.h
@@ -0,0 +1,70 @@
+// Copyright (C) 2021 The Qt Company
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTSUBTITLESINK_P_H
+#define QGSTSUBTITLESINK_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/private/qtmultimediaglobal_p.h>
+
+#include <QtCore/qlist.h>
+#include <QtCore/qmutex.h>
+#include <QtCore/qqueue.h>
+#include <QtCore/qpointer.h>
+#include <QtCore/qwaitcondition.h>
+#include <common/qgst_p.h>
+#include <gst/base/gstbasesink.h>
+
+QT_BEGIN_NAMESPACE
+
+class QGstreamerVideoSink;
+
+class QGstSubtitleSink
+{
+public:
+ GstBaseSink parent{};
+
+ static QGstSubtitleSink *createSink(QGstreamerVideoSink *sink);
+
+private:
+ static GType get_type();
+ static void class_init(gpointer g_class, gpointer class_data);
+ static void base_init(gpointer g_class);
+ static void instance_init(GTypeInstance *instance, gpointer g_class);
+
+ static void finalize(GObject *object);
+
+ static GstStateChangeReturn change_state(GstElement *element, GstStateChange transition);
+
+ static GstCaps *get_caps(GstBaseSink *sink, GstCaps *filter);
+ static gboolean set_caps(GstBaseSink *sink, GstCaps *caps);
+
+ static gboolean propose_allocation(GstBaseSink *sink, GstQuery *query);
+
+ static GstFlowReturn wait_event(GstBaseSink * sink, GstEvent * event);
+ static GstFlowReturn render(GstBaseSink *sink, GstBuffer *buffer);
+
+private:
+ QGstreamerVideoSink *sink = nullptr;
+};
+
+
+class QGstSubtitleSinkClass
+{
+public:
+ GstBaseSinkClass parent_class;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgstutils.cpp b/src/plugins/multimedia/gstreamer/common/qgstutils.cpp
new file mode 100644
index 000000000..8ec2bde3c
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstutils.cpp
@@ -0,0 +1,141 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <common/qgstutils_p.h>
+#include <common/qgst_p.h>
+
+#include <QtMultimedia/qaudioformat.h>
+
+#include <chrono>
+
+QT_BEGIN_NAMESPACE
+
+namespace {
+
+const char *audioSampleFormatNames[QAudioFormat::NSampleFormats] = {
+ nullptr,
+#if Q_BYTE_ORDER == Q_LITTLE_ENDIAN
+ "U8",
+ "S16LE",
+ "S32LE",
+ "F32LE"
+#else
+ "U8",
+ "S16BE",
+ "S32BE",
+ "F32BE"
+#endif
+};
+
+QAudioFormat::SampleFormat gstSampleFormatToSampleFormat(const char *fmt)
+{
+ if (fmt) {
+ for (int i = 1; i < QAudioFormat::NSampleFormats; ++i) {
+ if (strcmp(fmt, audioSampleFormatNames[i]))
+ continue;
+ return QAudioFormat::SampleFormat(i);
+ }
+ }
+ return QAudioFormat::Unknown;
+}
+
+} // namespace
+
+/*
+ Returns audio format for a sample \a sample.
+ If the buffer doesn't have a valid audio format, an empty QAudioFormat is returned.
+*/
+QAudioFormat QGstUtils::audioFormatForSample(GstSample *sample)
+{
+ auto caps = QGstCaps(gst_sample_get_caps(sample), QGstCaps::NeedsRef);
+ if (caps.isNull())
+ return {};
+ return audioFormatForCaps(caps);
+}
+
+QAudioFormat QGstUtils::audioFormatForCaps(const QGstCaps &caps)
+{
+ QAudioFormat format;
+ QGstStructureView s = caps.at(0);
+ if (s.name() != "audio/x-raw")
+ return format;
+
+ auto rate = s["rate"].toInt();
+ auto channels = s["channels"].toInt();
+ QAudioFormat::SampleFormat fmt = gstSampleFormatToSampleFormat(s["format"].toString());
+ if (!rate || !channels || fmt == QAudioFormat::Unknown)
+ return format;
+
+ format.setSampleRate(*rate);
+ format.setChannelCount(*channels);
+ format.setSampleFormat(fmt);
+
+ return format;
+}
+
+/*
+ Builds GstCaps for an audio format \a format.
+ Returns 0 if the audio format is not valid.
+
+ \note Caller must unreference GstCaps.
+*/
+
+QGstCaps QGstUtils::capsForAudioFormat(const QAudioFormat &format)
+{
+ if (!format.isValid())
+ return {};
+
+ auto sampleFormat = format.sampleFormat();
+ auto caps = gst_caps_new_simple(
+ "audio/x-raw",
+ "format" , G_TYPE_STRING, audioSampleFormatNames[sampleFormat],
+ "rate" , G_TYPE_INT , format.sampleRate(),
+ "channels", G_TYPE_INT , format.channelCount(),
+ "layout" , G_TYPE_STRING, "interleaved",
+ nullptr);
+
+ return QGstCaps(caps, QGstCaps::HasRef);
+}
+
+QList<QAudioFormat::SampleFormat> QGValue::getSampleFormats() const
+{
+ if (!GST_VALUE_HOLDS_LIST(value))
+ return {};
+
+ QList<QAudioFormat::SampleFormat> formats;
+ guint nFormats = gst_value_list_get_size(value);
+ for (guint f = 0; f < nFormats; ++f) {
+ QGValue v = QGValue{ gst_value_list_get_value(value, f) };
+ auto *name = v.toString();
+ QAudioFormat::SampleFormat fmt = gstSampleFormatToSampleFormat(name);
+ if (fmt == QAudioFormat::Unknown)
+ continue;
+ formats.append(fmt);
+ }
+ return formats;
+}
+
+void QGstUtils::setFrameTimeStampsFromBuffer(QVideoFrame *frame, GstBuffer *buffer)
+{
+ using namespace std::chrono;
+ using namespace std::chrono_literals;
+
+ // GStreamer uses nanoseconds, Qt uses microseconds
+ nanoseconds startTime{ GST_BUFFER_TIMESTAMP(buffer) };
+ if (startTime >= 0ns) {
+ frame->setStartTime(floor<microseconds>(startTime).count());
+
+ nanoseconds duration{ GST_BUFFER_DURATION(buffer) };
+ if (duration >= 0ns)
+ frame->setEndTime(floor<microseconds>(startTime + duration).count());
+ }
+}
+
+GList *qt_gst_video_sinks()
+{
+ return gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_SINK
+ | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO,
+ GST_RANK_MARGINAL);
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstutils_p.h b/src/plugins/multimedia/gstreamer/common/qgstutils_p.h
new file mode 100644
index 000000000..c65fcf090
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstutils_p.h
@@ -0,0 +1,41 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTUTILS_P_H
+#define QGSTUTILS_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <gst/gstsample.h>
+#include <gst/gstbuffer.h>
+
+#include <QtCore/qglobal.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAudioFormat;
+class QGstCaps;
+class QVideoFrame;
+
+namespace QGstUtils {
+QAudioFormat audioFormatForSample(GstSample *sample);
+QAudioFormat audioFormatForCaps(const QGstCaps &caps);
+QGstCaps capsForAudioFormat(const QAudioFormat &format);
+
+void setFrameTimeStampsFromBuffer(QVideoFrame *frame, GstBuffer *buffer);
+} // namespace QGstUtils
+
+GList *qt_gst_video_sinks();
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp
new file mode 100644
index 000000000..be6342ea8
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp
@@ -0,0 +1,393 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qgstvideobuffer_p.h"
+#include "qgstreamervideosink_p.h"
+#include <private/qvideotexturehelper_p.h>
+#include <qpa/qplatformnativeinterface.h>
+#include <qguiapplication.h>
+
+#include <gst/video/video.h>
+#include <gst/video/video-frame.h>
+#include <gst/video/gstvideometa.h>
+#include <gst/pbutils/gstpluginsbaseversion.h>
+
+#include <common/qgstutils_p.h>
+
+#if QT_CONFIG(gstreamer_gl)
+# include <QtGui/rhi/qrhi.h>
+# include <QtGui/qopenglcontext.h>
+# include <QtGui/qopenglfunctions.h>
+# include <QtGui/qopengl.h>
+
+# include <gst/gl/gstglconfig.h>
+# include <gst/gl/gstglmemory.h>
+# include <gst/gl/gstglsyncmeta.h>
+
+# include <EGL/egl.h>
+# include <EGL/eglext.h>
+
+# if QT_CONFIG(linux_dmabuf)
+# include <gst/allocators/gstdmabuf.h>
+# endif
+#endif
+
+QT_BEGIN_NAMESPACE
+
+// keep things building without drm_fourcc.h
+#define fourcc_code(a, b, c, d) ((uint32_t)(a) | ((uint32_t)(b) << 8) | \
+ ((uint32_t)(c) << 16) | ((uint32_t)(d) << 24))
+
+#define DRM_FORMAT_RGBA8888 fourcc_code('R', 'A', '2', '4') /* [31:0] R:G:B:A 8:8:8:8 little endian */
+#define DRM_FORMAT_RGB888 fourcc_code('R', 'G', '2', '4') /* [23:0] R:G:B little endian */
+#define DRM_FORMAT_RG88 fourcc_code('R', 'G', '8', '8') /* [15:0] R:G 8:8 little endian */
+#define DRM_FORMAT_ABGR8888 fourcc_code('A', 'B', '2', '4') /* [31:0] A:B:G:R 8:8:8:8 little endian */
+#define DRM_FORMAT_BGR888 fourcc_code('B', 'G', '2', '4') /* [23:0] B:G:R little endian */
+#define DRM_FORMAT_GR88 fourcc_code('G', 'R', '8', '8') /* [15:0] G:R 8:8 little endian */
+#define DRM_FORMAT_R8 fourcc_code('R', '8', ' ', ' ') /* [7:0] R */
+#define DRM_FORMAT_R16 fourcc_code('R', '1', '6', ' ') /* [15:0] R little endian */
+#define DRM_FORMAT_RGB565 fourcc_code('R', 'G', '1', '6') /* [15:0] R:G:B 5:6:5 little endian */
+#define DRM_FORMAT_RG1616 fourcc_code('R', 'G', '3', '2') /* [31:0] R:G 16:16 little endian */
+#define DRM_FORMAT_GR1616 fourcc_code('G', 'R', '3', '2') /* [31:0] G:R 16:16 little endian */
+#define DRM_FORMAT_BGRA1010102 fourcc_code('B', 'A', '3', '0') /* [31:0] B:G:R:A 10:10:10:2 little endian */
+
+QGstVideoBuffer::QGstVideoBuffer(QGstBufferHandle buffer, const GstVideoInfo &info,
+ QGstreamerVideoSink *sink, const QVideoFrameFormat &frameFormat,
+ QGstCaps::MemoryFormat format)
+ : QHwVideoBuffer((sink && sink->rhi() && format != QGstCaps::CpuMemory)
+ ? QVideoFrame::RhiTextureHandle
+ : QVideoFrame::NoHandle,
+ sink ? sink->rhi() : nullptr),
+ memoryFormat(format),
+ m_frameFormat(frameFormat),
+ m_rhi(sink ? sink->rhi() : nullptr),
+ m_videoInfo(info),
+ m_buffer(std::move(buffer))
+{
+ if (sink) {
+ eglDisplay = sink->eglDisplay();
+ eglImageTargetTexture2D = sink->eglImageTargetTexture2D();
+ }
+
+#if !QT_CONFIG(gstreamer_gl)
+ Q_UNUSED(memoryFormat);
+#endif
+}
+
+QGstVideoBuffer::~QGstVideoBuffer()
+{
+ unmap();
+}
+
+QAbstractVideoBuffer::MapData QGstVideoBuffer::map(QtVideo::MapMode mode)
+{
+ const GstMapFlags flags = GstMapFlags(
+ ((mode & QtVideo::MapMode::ReadOnly ) == QtVideo::MapMode::NotMapped ? 0 : GST_MAP_READ)
+ | ((mode & QtVideo::MapMode::WriteOnly) == QtVideo::MapMode::NotMapped ? 0 : GST_MAP_WRITE));
+
+ MapData mapData;
+ if (mode == QtVideo::MapMode::NotMapped || m_mode != QtVideo::MapMode::NotMapped)
+ return mapData;
+
+ if (m_videoInfo.finfo->n_planes == 0) { // Encoded
+ if (gst_buffer_map(m_buffer.get(), &m_frame.map[0], flags)) {
+ mapData.planeCount = 1;
+ mapData.bytesPerLine[0] = -1;
+ mapData.dataSize[0] = m_frame.map[0].size;
+ mapData.data[0] = static_cast<uchar *>(m_frame.map[0].data);
+
+ m_mode = mode;
+ }
+ } else if (gst_video_frame_map(&m_frame, &m_videoInfo, m_buffer.get(), flags)) {
+ mapData.planeCount = GST_VIDEO_FRAME_N_PLANES(&m_frame);
+
+ for (guint i = 0; i < GST_VIDEO_FRAME_N_PLANES(&m_frame); ++i) {
+ mapData.bytesPerLine[i] = GST_VIDEO_FRAME_PLANE_STRIDE(&m_frame, i);
+ mapData.data[i] = static_cast<uchar *>(GST_VIDEO_FRAME_PLANE_DATA(&m_frame, i));
+ mapData.dataSize[i] = mapData.bytesPerLine[i]*GST_VIDEO_FRAME_COMP_HEIGHT(&m_frame, i);
+ }
+
+ m_mode = mode;
+ }
+ return mapData;
+}
+
+void QGstVideoBuffer::unmap()
+{
+ if (m_mode != QtVideo::MapMode::NotMapped) {
+ if (m_videoInfo.finfo->n_planes == 0)
+ gst_buffer_unmap(m_buffer.get(), &m_frame.map[0]);
+ else
+ gst_video_frame_unmap(&m_frame);
+ }
+ m_mode = QtVideo::MapMode::NotMapped;
+}
+
+#if QT_CONFIG(gstreamer_gl) && QT_CONFIG(linux_dmabuf)
+static int
+fourccFromVideoInfo(const GstVideoInfo * info, int plane)
+{
+ GstVideoFormat format = GST_VIDEO_INFO_FORMAT (info);
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ const gint rgba_fourcc = DRM_FORMAT_ABGR8888;
+ const gint rgb_fourcc = DRM_FORMAT_BGR888;
+ const gint rg_fourcc = DRM_FORMAT_GR88;
+#else
+ const gint rgba_fourcc = DRM_FORMAT_RGBA8888;
+ const gint rgb_fourcc = DRM_FORMAT_RGB888;
+ const gint rg_fourcc = DRM_FORMAT_RG88;
+#endif
+
+ GST_DEBUG ("Getting DRM fourcc for %s plane %i",
+ gst_video_format_to_string (format), plane);
+
+ switch (format) {
+ case GST_VIDEO_FORMAT_RGB16:
+ case GST_VIDEO_FORMAT_BGR16:
+ return DRM_FORMAT_RGB565;
+
+ case GST_VIDEO_FORMAT_RGB:
+ case GST_VIDEO_FORMAT_BGR:
+ return rgb_fourcc;
+
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_RGBx:
+ case GST_VIDEO_FORMAT_BGRA:
+ case GST_VIDEO_FORMAT_BGRx:
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_xRGB:
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_xBGR:
+ case GST_VIDEO_FORMAT_AYUV:
+#if GST_CHECK_PLUGINS_BASE_VERSION(1,16,0)
+ case GST_VIDEO_FORMAT_VUYA:
+#endif
+ return rgba_fourcc;
+
+ case GST_VIDEO_FORMAT_GRAY8:
+ return DRM_FORMAT_R8;
+
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_UYVY:
+ case GST_VIDEO_FORMAT_GRAY16_LE:
+ case GST_VIDEO_FORMAT_GRAY16_BE:
+ return rg_fourcc;
+
+ case GST_VIDEO_FORMAT_NV12:
+ case GST_VIDEO_FORMAT_NV21:
+ return plane == 0 ? DRM_FORMAT_R8 : rg_fourcc;
+
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ case GST_VIDEO_FORMAT_Y41B:
+ case GST_VIDEO_FORMAT_Y42B:
+ case GST_VIDEO_FORMAT_Y444:
+ return DRM_FORMAT_R8;
+
+#if GST_CHECK_PLUGINS_BASE_VERSION(1,16,0)
+ case GST_VIDEO_FORMAT_BGR10A2_LE:
+ return DRM_FORMAT_BGRA1010102;
+#endif
+
+// case GST_VIDEO_FORMAT_RGB10A2_LE:
+// return DRM_FORMAT_RGBA1010102;
+
+ case GST_VIDEO_FORMAT_P010_10LE:
+// case GST_VIDEO_FORMAT_P012_LE:
+// case GST_VIDEO_FORMAT_P016_LE:
+ return plane == 0 ? DRM_FORMAT_R16 : DRM_FORMAT_GR1616;
+
+ case GST_VIDEO_FORMAT_P010_10BE:
+// case GST_VIDEO_FORMAT_P012_BE:
+// case GST_VIDEO_FORMAT_P016_BE:
+ return plane == 0 ? DRM_FORMAT_R16 : DRM_FORMAT_RG1616;
+
+ default:
+ GST_ERROR ("Unsupported format for DMABuf.");
+ return -1;
+ }
+}
+#endif
+
+#if QT_CONFIG(gstreamer_gl)
+struct GlTextures
+{
+ uint count = 0;
+ bool owned = false;
+ std::array<guint32, QVideoTextureHelper::TextureDescription::maxPlanes> names{};
+};
+
+class QGstQVideoFrameTextures : public QVideoFrameTextures
+{
+public:
+ QGstQVideoFrameTextures(QRhi *rhi, QSize size, QVideoFrameFormat::PixelFormat format, GlTextures &textures)
+ : m_rhi(rhi)
+ , m_glTextures(textures)
+ {
+ auto desc = QVideoTextureHelper::textureDescription(format);
+ for (uint i = 0; i < textures.count; ++i) {
+ QSize planeSize(desc->widthForPlane(size.width(), int(i)),
+ desc->heightForPlane(size.height(), int(i)));
+ m_textures[i].reset(rhi->newTexture(desc->textureFormat[i], planeSize, 1, {}));
+ m_textures[i]->createFrom({textures.names[i], 0});
+ }
+ }
+
+ ~QGstQVideoFrameTextures()
+ {
+ m_rhi->makeThreadLocalNativeContextCurrent();
+ auto ctx = QOpenGLContext::currentContext();
+ if (m_glTextures.owned && ctx)
+ ctx->functions()->glDeleteTextures(int(m_glTextures.count), m_glTextures.names.data());
+ }
+
+ QRhiTexture *texture(uint plane) const override
+ {
+ return plane < m_glTextures.count ? m_textures[plane].get() : nullptr;
+ }
+
+private:
+ QRhi *m_rhi = nullptr;
+ GlTextures m_glTextures;
+ std::unique_ptr<QRhiTexture> m_textures[QVideoTextureHelper::TextureDescription::maxPlanes];
+};
+
+static GlTextures mapFromGlTexture(const QGstBufferHandle &bufferHandle, GstVideoFrame &frame,
+ GstVideoInfo &videoInfo)
+{
+ GstBuffer *buffer = bufferHandle.get();
+ auto *mem = GST_GL_BASE_MEMORY_CAST(gst_buffer_peek_memory(buffer, 0));
+ if (!mem)
+ return {};
+
+ if (!gst_video_frame_map(&frame, &videoInfo, buffer, GstMapFlags(GST_MAP_READ|GST_MAP_GL))) {
+ qWarning() << "Could not map GL textures";
+ return {};
+ }
+
+ auto *sync_meta = gst_buffer_get_gl_sync_meta(buffer);
+ GstBuffer *sync_buffer = nullptr;
+ if (!sync_meta) {
+ sync_buffer = gst_buffer_new();
+ sync_meta = gst_buffer_add_gl_sync_meta(mem->context, sync_buffer);
+ }
+ gst_gl_sync_meta_set_sync_point (sync_meta, mem->context);
+ gst_gl_sync_meta_wait (sync_meta, mem->context);
+ if (sync_buffer)
+ gst_buffer_unref(sync_buffer);
+
+ GlTextures textures;
+ textures.count = frame.info.finfo->n_planes;
+
+ for (uint i = 0; i < textures.count; ++i)
+ textures.names[i] = *(guint32 *)frame.data[i];
+
+ gst_video_frame_unmap(&frame);
+
+ return textures;
+}
+
+#if GST_GL_HAVE_PLATFORM_EGL && QT_CONFIG(linux_dmabuf)
+static GlTextures mapFromDmaBuffer(QRhi *rhi, const QGstBufferHandle &bufferHandle,
+ GstVideoFrame &frame, GstVideoInfo &videoInfo,
+ Qt::HANDLE eglDisplay, QFunctionPointer eglImageTargetTexture2D)
+{
+ GstBuffer *buffer = bufferHandle.get();
+
+ Q_ASSERT(gst_is_dmabuf_memory(gst_buffer_peek_memory(buffer, 0)));
+ Q_ASSERT(eglDisplay);
+ Q_ASSERT(eglImageTargetTexture2D);
+
+ auto *nativeHandles = static_cast<const QRhiGles2NativeHandles *>(rhi->nativeHandles());
+ auto glContext = nativeHandles->context;
+ if (!glContext) {
+ qWarning() << "no GL context";
+ return {};
+ }
+
+ if (!gst_video_frame_map(&frame, &videoInfo, buffer, GstMapFlags(GST_MAP_READ))) {
+ qDebug() << "Couldn't map DMA video frame";
+ return {};
+ }
+
+ GlTextures textures = {};
+ textures.owned = true;
+ textures.count = GST_VIDEO_FRAME_N_PLANES(&frame);
+ // int width = GST_VIDEO_FRAME_WIDTH(&frame);
+ // int height = GST_VIDEO_FRAME_HEIGHT(&frame);
+ Q_ASSERT(GST_VIDEO_FRAME_N_PLANES(&frame) == gst_buffer_n_memory(buffer));
+
+ QOpenGLFunctions functions(glContext);
+ functions.glGenTextures(int(textures.count), textures.names.data());
+
+ // qDebug() << Qt::hex << "glGenTextures: glerror" << glGetError() << "egl error" << eglGetError();
+ // qDebug() << "converting DMA buffer nPlanes=" << nPlanes << m_textures[0] << m_textures[1] << m_textures[2];
+
+ for (int i = 0; i < int(textures.count); ++i) {
+ auto offset = GST_VIDEO_FRAME_PLANE_OFFSET(&frame, i);
+ auto stride = GST_VIDEO_FRAME_PLANE_STRIDE(&frame, i);
+ int planeWidth = GST_VIDEO_FRAME_COMP_WIDTH(&frame, i);
+ int planeHeight = GST_VIDEO_FRAME_COMP_HEIGHT(&frame, i);
+ auto mem = gst_buffer_peek_memory(buffer, i);
+ int fd = gst_dmabuf_memory_get_fd(mem);
+
+ // qDebug() << " plane" << i << "size" << width << height << "stride" << stride << "offset" << offset << "fd=" << fd;
+ // ### do we need to open/close the fd?
+ // ### can we convert several planes at once?
+ // Get the correct DRM_FORMATs from the texture format in the description
+ EGLAttrib const attribute_list[] = {
+ EGL_WIDTH, planeWidth,
+ EGL_HEIGHT, planeHeight,
+ EGL_LINUX_DRM_FOURCC_EXT, fourccFromVideoInfo(&videoInfo, i),
+ EGL_DMA_BUF_PLANE0_FD_EXT, fd,
+ EGL_DMA_BUF_PLANE0_OFFSET_EXT, (EGLAttrib)offset,
+ EGL_DMA_BUF_PLANE0_PITCH_EXT, stride,
+ EGL_NONE
+ };
+ EGLImage image = eglCreateImage(eglDisplay,
+ EGL_NO_CONTEXT,
+ EGL_LINUX_DMA_BUF_EXT,
+ nullptr,
+ attribute_list);
+ if (image == EGL_NO_IMAGE_KHR) {
+ qWarning() << "could not create EGL image for plane" << i << Qt::hex << eglGetError();
+ }
+ // qDebug() << Qt::hex << "eglCreateImage: glerror" << glGetError() << "egl error" << eglGetError();
+ functions.glBindTexture(GL_TEXTURE_2D, textures.names[i]);
+ // qDebug() << Qt::hex << "bind texture: glerror" << glGetError() << "egl error" << eglGetError();
+ auto EGLImageTargetTexture2D = (PFNGLEGLIMAGETARGETTEXTURE2DOESPROC)eglImageTargetTexture2D;
+ EGLImageTargetTexture2D(GL_TEXTURE_2D, image);
+ // qDebug() << Qt::hex << "glerror" << glGetError() << "egl error" << eglGetError();
+ eglDestroyImage(eglDisplay, image);
+ }
+ gst_video_frame_unmap(&frame);
+
+ return textures;
+}
+#endif
+#endif
+
+std::unique_ptr<QVideoFrameTextures> QGstVideoBuffer::mapTextures(QRhi *rhi)
+{
+ if (!rhi)
+ return {};
+
+#if QT_CONFIG(gstreamer_gl)
+ GlTextures textures = {};
+ if (memoryFormat == QGstCaps::GLTexture)
+ textures = mapFromGlTexture(m_buffer, m_frame, m_videoInfo);
+
+# if GST_GL_HAVE_PLATFORM_EGL && QT_CONFIG(linux_dmabuf)
+ else if (memoryFormat == QGstCaps::DMABuf)
+ textures = mapFromDmaBuffer(m_rhi, m_buffer, m_frame, m_videoInfo, eglDisplay,
+ eglImageTargetTexture2D);
+
+# endif
+ if (textures.count > 0)
+ return std::make_unique<QGstQVideoFrameTextures>(rhi, QSize{m_videoInfo.width, m_videoInfo.height},
+ m_frameFormat.pixelFormat(), textures);
+#endif
+ return {};
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstvideobuffer_p.h b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer_p.h
new file mode 100644
index 000000000..573a4662c
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer_p.h
@@ -0,0 +1,55 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTVIDEOBUFFER_P_H
+#define QGSTVIDEOBUFFER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qhwvideobuffer_p.h>
+#include <QtCore/qvariant.h>
+
+#include <common/qgst_p.h>
+#include <gst/video/video.h>
+
+QT_BEGIN_NAMESPACE
+class QVideoFrameFormat;
+class QGstreamerVideoSink;
+class QOpenGLContext;
+
+class QGstVideoBuffer final : public QHwVideoBuffer
+{
+public:
+ QGstVideoBuffer(QGstBufferHandle buffer, const GstVideoInfo &info, QGstreamerVideoSink *sink,
+ const QVideoFrameFormat &frameFormat, QGstCaps::MemoryFormat format);
+ ~QGstVideoBuffer();
+
+ MapData map(QtVideo::MapMode mode) override;
+ void unmap() override;
+
+ std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *) override;
+
+private:
+ const QGstCaps::MemoryFormat memoryFormat = QGstCaps::CpuMemory;
+ const QVideoFrameFormat m_frameFormat;
+ QRhi *m_rhi = nullptr;
+ mutable GstVideoInfo m_videoInfo;
+ mutable GstVideoFrame m_frame{};
+ const QGstBufferHandle m_buffer;
+ QtVideo::MapMode m_mode = QtVideo::MapMode::NotMapped;
+ Qt::HANDLE eglDisplay = nullptr;
+ QFunctionPointer eglImageTargetTexture2D = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp b/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp
new file mode 100644
index 000000000..f9c936ea6
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp
@@ -0,0 +1,499 @@
+// Copyright (C) 2016 Jolla Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qgstvideorenderersink_p.h"
+
+#include <QtMultimedia/qvideoframe.h>
+#include <QtMultimedia/qvideosink.h>
+#include <QtCore/private/qfactoryloader_p.h>
+#include <QtCore/private/quniquehandle_p.h>
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qloggingcategory.h>
+#include <QtCore/qmap.h>
+#include <QtCore/qthread.h>
+#include <QtGui/qevent.h>
+
+#include <common/qgstvideobuffer_p.h>
+#include <common/qgstreamervideosink_p.h>
+#include <common/qgst_debug_p.h>
+#include <common/qgstutils_p.h>
+
+#include <private/qvideoframe_p.h>
+
+#include <gst/video/video.h>
+#include <gst/video/gstvideometa.h>
+
+
+#include <rhi/qrhi.h>
+#if QT_CONFIG(gstreamer_gl)
+#include <gst/gl/gl.h>
+#endif // #if QT_CONFIG(gstreamer_gl)
+
+// DMA support
+#if QT_CONFIG(linux_dmabuf)
+#include <gst/allocators/gstdmabuf.h>
+#endif
+
+static Q_LOGGING_CATEGORY(qLcGstVideoRenderer, "qt.multimedia.gstvideorenderer")
+
+QT_BEGIN_NAMESPACE
+
+QGstVideoRenderer::QGstVideoRenderer(QGstreamerVideoSink *sink)
+ : m_sink(sink), m_surfaceCaps(createSurfaceCaps(sink))
+{
+ QObject::connect(
+ sink, &QGstreamerVideoSink::aboutToBeDestroyed, this,
+ [this] {
+ QMutexLocker locker(&m_sinkMutex);
+ m_sink = nullptr;
+ },
+ Qt::DirectConnection);
+}
+
+QGstVideoRenderer::~QGstVideoRenderer() = default;
+
+QGstCaps QGstVideoRenderer::createSurfaceCaps([[maybe_unused]] QGstreamerVideoSink *sink)
+{
+ QGstCaps caps = QGstCaps::create();
+
+ // All the formats that both we and gstreamer support
+ auto formats = QList<QVideoFrameFormat::PixelFormat>()
+ << QVideoFrameFormat::Format_YUV420P
+ << QVideoFrameFormat::Format_YUV422P
+ << QVideoFrameFormat::Format_YV12
+ << QVideoFrameFormat::Format_UYVY
+ << QVideoFrameFormat::Format_YUYV
+ << QVideoFrameFormat::Format_NV12
+ << QVideoFrameFormat::Format_NV21
+ << QVideoFrameFormat::Format_AYUV
+ << QVideoFrameFormat::Format_P010
+ << QVideoFrameFormat::Format_XRGB8888
+ << QVideoFrameFormat::Format_XBGR8888
+ << QVideoFrameFormat::Format_RGBX8888
+ << QVideoFrameFormat::Format_BGRX8888
+ << QVideoFrameFormat::Format_ARGB8888
+ << QVideoFrameFormat::Format_ABGR8888
+ << QVideoFrameFormat::Format_RGBA8888
+ << QVideoFrameFormat::Format_BGRA8888
+ << QVideoFrameFormat::Format_Y8
+ << QVideoFrameFormat::Format_Y16
+ ;
+#if QT_CONFIG(gstreamer_gl)
+ QRhi *rhi = sink->rhi();
+ if (rhi && rhi->backend() == QRhi::OpenGLES2) {
+ caps.addPixelFormats(formats, GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
+#if QT_CONFIG(linux_dmabuf)
+ if (sink->eglDisplay() && sink->eglImageTargetTexture2D()) {
+ // We currently do not handle planar DMA buffers, as it's somewhat unclear how to
+ // convert the planar EGLImage into something we can use from OpenGL
+ auto singlePlaneFormats = QList<QVideoFrameFormat::PixelFormat>()
+ << QVideoFrameFormat::Format_UYVY
+ << QVideoFrameFormat::Format_YUYV
+ << QVideoFrameFormat::Format_AYUV
+ << QVideoFrameFormat::Format_XRGB8888
+ << QVideoFrameFormat::Format_XBGR8888
+ << QVideoFrameFormat::Format_RGBX8888
+ << QVideoFrameFormat::Format_BGRX8888
+ << QVideoFrameFormat::Format_ARGB8888
+ << QVideoFrameFormat::Format_ABGR8888
+ << QVideoFrameFormat::Format_RGBA8888
+ << QVideoFrameFormat::Format_BGRA8888
+ << QVideoFrameFormat::Format_Y8
+ << QVideoFrameFormat::Format_Y16
+ ;
+ caps.addPixelFormats(singlePlaneFormats, GST_CAPS_FEATURE_MEMORY_DMABUF);
+ }
+#endif
+ }
+#endif
+ caps.addPixelFormats(formats);
+ return caps;
+}
+
+const QGstCaps &QGstVideoRenderer::caps()
+{
+ return m_surfaceCaps;
+}
+
+bool QGstVideoRenderer::start(const QGstCaps& caps)
+{
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::start" << caps;
+
+ {
+ m_frameRotationAngle = QtVideo::Rotation::None;
+ auto optionalFormatAndVideoInfo = caps.formatAndVideoInfo();
+ if (optionalFormatAndVideoInfo) {
+ std::tie(m_format, m_videoInfo) = std::move(*optionalFormatAndVideoInfo);
+ } else {
+ m_format = {};
+ m_videoInfo = {};
+ }
+ m_memoryFormat = caps.memoryFormat();
+ }
+
+ return true;
+}
+
+void QGstVideoRenderer::stop()
+{
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::stop";
+
+ QMetaObject::invokeMethod(this, [this] {
+ m_currentState.buffer = {};
+ m_sink->setVideoFrame(QVideoFrame{});
+ return;
+ });
+}
+
+void QGstVideoRenderer::unlock()
+{
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::unlock";
+}
+
+bool QGstVideoRenderer::proposeAllocation(GstQuery *)
+{
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::proposeAllocation";
+ return true;
+}
+
+GstFlowReturn QGstVideoRenderer::render(GstBuffer *buffer)
+{
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::render";
+
+ GstVideoCropMeta *meta = gst_buffer_get_video_crop_meta(buffer);
+ if (meta) {
+ QRect vp(meta->x, meta->y, meta->width, meta->height);
+ if (m_format.viewport() != vp) {
+ qCDebug(qLcGstVideoRenderer)
+ << Q_FUNC_INFO << " Update viewport on Metadata: [" << meta->height << "x"
+ << meta->width << " | " << meta->x << "x" << meta->y << "]";
+ // Update viewport if data is not the same
+ m_format.setViewport(vp);
+ }
+ }
+
+ RenderBufferState state{
+ .buffer = QGstBufferHandle{ buffer, QGstBufferHandle::NeedsRef },
+ .format = m_format,
+ .memoryFormat = m_memoryFormat,
+ .mirrored = m_frameMirrored,
+ .rotationAngle = m_frameRotationAngle,
+ };
+
+ qCDebug(qLcGstVideoRenderer) << " sending video frame";
+
+ QMetaObject::invokeMethod(this, [this, state = std::move(state)]() mutable {
+ if (state == m_currentState) {
+ // same buffer received twice
+ if (!m_sink || !m_sink->inStoppedState())
+ return;
+
+ qCDebug(qLcGstVideoRenderer) << " showing empty video frame";
+ m_currentVideoFrame = {};
+ m_sink->setVideoFrame(m_currentVideoFrame);
+ m_currentState = {};
+ return;
+ }
+
+ auto videoBuffer = std::make_unique<QGstVideoBuffer>(state.buffer, m_videoInfo, m_sink,
+ state.format, state.memoryFormat);
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(videoBuffer), state.format);
+ QGstUtils::setFrameTimeStampsFromBuffer(&frame, state.buffer.get());
+ frame.setMirrored(state.mirrored);
+ frame.setRotation(state.rotationAngle);
+ m_currentVideoFrame = std::move(frame);
+ m_currentState = std::move(state);
+
+ if (!m_sink)
+ return;
+
+ if (m_sink->inStoppedState()) {
+ qCDebug(qLcGstVideoRenderer) << " showing empty video frame";
+ m_currentVideoFrame = {};
+ }
+
+ m_sink->setVideoFrame(m_currentVideoFrame);
+ });
+
+ return GST_FLOW_OK;
+}
+
+bool QGstVideoRenderer::query(GstQuery *query)
+{
+#if QT_CONFIG(gstreamer_gl)
+ if (GST_QUERY_TYPE(query) == GST_QUERY_CONTEXT) {
+ const gchar *type;
+ gst_query_parse_context_type(query, &type);
+
+ if (strcmp(type, "gst.gl.local_context") != 0)
+ return false;
+
+ QMutexLocker locker(&m_sinkMutex);
+ if (!m_sink)
+ return false;
+
+ auto *gstGlContext = m_sink->gstGlLocalContext();
+ if (!gstGlContext)
+ return false;
+
+ gst_query_set_context(query, gstGlContext);
+
+ return true;
+ }
+#else
+ Q_UNUSED(query);
+#endif
+ return false;
+}
+
+void QGstVideoRenderer::gstEvent(GstEvent *event)
+{
+ switch (GST_EVENT_TYPE(event)) {
+ case GST_EVENT_TAG:
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::gstEvent: Tag";
+ return gstEventHandleTag(event);
+ case GST_EVENT_EOS:
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::gstEvent: EOS";
+ return gstEventHandleEOS(event);
+
+ default:
+ qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::gstEvent: unhandled event - " << event;
+ return;
+ }
+}
+
+void QGstVideoRenderer::gstEventHandleTag(GstEvent *event)
+{
+ GstTagList *taglist = nullptr;
+ gst_event_parse_tag(event, &taglist);
+ if (!taglist)
+ return;
+
+ QGString value;
+ if (!gst_tag_list_get_string(taglist, GST_TAG_IMAGE_ORIENTATION, &value))
+ return;
+
+ constexpr const char rotate[] = "rotate-";
+ constexpr const char flipRotate[] = "flip-rotate-";
+ constexpr size_t rotateLen = sizeof(rotate) - 1;
+ constexpr size_t flipRotateLen = sizeof(flipRotate) - 1;
+
+ bool mirrored = false;
+ int rotationAngle = 0;
+
+ if (!strncmp(rotate, value.get(), rotateLen)) {
+ rotationAngle = atoi(value.get() + rotateLen);
+ } else if (!strncmp(flipRotate, value.get(), flipRotateLen)) {
+ // To flip by horizontal axis is the same as to mirror by vertical axis
+ // and rotate by 180 degrees.
+ mirrored = true;
+ rotationAngle = (180 + atoi(value.get() + flipRotateLen)) % 360;
+ }
+
+ m_frameMirrored = mirrored;
+ switch (rotationAngle) {
+ case 0:
+ m_frameRotationAngle = QtVideo::Rotation::None;
+ break;
+ case 90:
+ m_frameRotationAngle = QtVideo::Rotation::Clockwise90;
+ break;
+ case 180:
+ m_frameRotationAngle = QtVideo::Rotation::Clockwise180;
+ break;
+ case 270:
+ m_frameRotationAngle = QtVideo::Rotation::Clockwise270;
+ break;
+ default:
+ m_frameRotationAngle = QtVideo::Rotation::None;
+ }
+}
+
+void QGstVideoRenderer::gstEventHandleEOS(GstEvent *)
+{
+ stop();
+}
+
+static GstVideoSinkClass *gvrs_sink_parent_class;
+static thread_local QGstreamerVideoSink *gvrs_current_sink;
+
+#define VO_SINK(s) QGstVideoRendererSink *sink(reinterpret_cast<QGstVideoRendererSink *>(s))
+
+QGstVideoRendererSink *QGstVideoRendererSink::createSink(QGstreamerVideoSink *sink)
+{
+ setSink(sink);
+ QGstVideoRendererSink *gstSink = reinterpret_cast<QGstVideoRendererSink *>(
+ g_object_new(QGstVideoRendererSink::get_type(), nullptr));
+
+ return gstSink;
+}
+
+void QGstVideoRendererSink::setSink(QGstreamerVideoSink *sink)
+{
+ gvrs_current_sink = sink;
+}
+
+GType QGstVideoRendererSink::get_type()
+{
+ static const GTypeInfo info =
+ {
+ sizeof(QGstVideoRendererSinkClass), // class_size
+ base_init, // base_init
+ nullptr, // base_finalize
+ class_init, // class_init
+ nullptr, // class_finalize
+ nullptr, // class_data
+ sizeof(QGstVideoRendererSink), // instance_size
+ 0, // n_preallocs
+ instance_init, // instance_init
+ nullptr // value_table
+ };
+
+ static const GType type = g_type_register_static(GST_TYPE_VIDEO_SINK, "QGstVideoRendererSink",
+ &info, GTypeFlags(0));
+
+ return type;
+}
+
+void QGstVideoRendererSink::class_init(gpointer g_class, gpointer class_data)
+{
+ Q_UNUSED(class_data);
+
+ gvrs_sink_parent_class = reinterpret_cast<GstVideoSinkClass *>(g_type_class_peek_parent(g_class));
+
+ GstVideoSinkClass *video_sink_class = reinterpret_cast<GstVideoSinkClass *>(g_class);
+ video_sink_class->show_frame = QGstVideoRendererSink::show_frame;
+
+ GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class);
+ base_sink_class->get_caps = QGstVideoRendererSink::get_caps;
+ base_sink_class->set_caps = QGstVideoRendererSink::set_caps;
+ base_sink_class->propose_allocation = QGstVideoRendererSink::propose_allocation;
+ base_sink_class->stop = QGstVideoRendererSink::stop;
+ base_sink_class->unlock = QGstVideoRendererSink::unlock;
+ base_sink_class->query = QGstVideoRendererSink::query;
+ base_sink_class->event = QGstVideoRendererSink::event;
+
+ GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
+ element_class->change_state = QGstVideoRendererSink::change_state;
+ gst_element_class_set_metadata(element_class,
+ "Qt built-in video renderer sink",
+ "Sink/Video",
+ "Qt default built-in video renderer sink",
+ "The Qt Company");
+
+ GObjectClass *object_class = reinterpret_cast<GObjectClass *>(g_class);
+ object_class->finalize = QGstVideoRendererSink::finalize;
+}
+
+void QGstVideoRendererSink::base_init(gpointer g_class)
+{
+ static GstStaticPadTemplate sink_pad_template = GST_STATIC_PAD_TEMPLATE(
+ "sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS(
+ "video/x-raw, "
+ "framerate = (fraction) [ 0, MAX ], "
+ "width = (int) [ 1, MAX ], "
+ "height = (int) [ 1, MAX ]"));
+
+ gst_element_class_add_pad_template(
+ GST_ELEMENT_CLASS(g_class), gst_static_pad_template_get(&sink_pad_template));
+}
+
+void QGstVideoRendererSink::instance_init(GTypeInstance *instance, gpointer g_class)
+{
+ Q_UNUSED(g_class);
+ VO_SINK(instance);
+
+ Q_ASSERT(gvrs_current_sink);
+
+ sink->renderer = new QGstVideoRenderer(gvrs_current_sink);
+ sink->renderer->moveToThread(gvrs_current_sink->thread());
+ gvrs_current_sink = nullptr;
+}
+
+void QGstVideoRendererSink::finalize(GObject *object)
+{
+ VO_SINK(object);
+
+ delete sink->renderer;
+
+ // Chain up
+ G_OBJECT_CLASS(gvrs_sink_parent_class)->finalize(object);
+}
+
+GstStateChangeReturn QGstVideoRendererSink::change_state(
+ GstElement *element, GstStateChange transition)
+{
+ return GST_ELEMENT_CLASS(gvrs_sink_parent_class)->change_state(element, transition);
+}
+
+GstCaps *QGstVideoRendererSink::get_caps(GstBaseSink *base, GstCaps *filter)
+{
+ VO_SINK(base);
+
+ QGstCaps caps = sink->renderer->caps();
+ if (filter)
+ caps = QGstCaps(gst_caps_intersect(caps.caps(), filter), QGstCaps::HasRef);
+
+ return caps.release();
+}
+
+gboolean QGstVideoRendererSink::set_caps(GstBaseSink *base, GstCaps *gcaps)
+{
+ VO_SINK(base);
+ auto caps = QGstCaps(gcaps, QGstCaps::NeedsRef);
+
+ qCDebug(qLcGstVideoRenderer) << "set_caps:" << caps;
+
+ if (caps.isNull()) {
+ sink->renderer->stop();
+ return TRUE;
+ }
+
+ return sink->renderer->start(caps);
+}
+
+gboolean QGstVideoRendererSink::propose_allocation(GstBaseSink *base, GstQuery *query)
+{
+ VO_SINK(base);
+ return sink->renderer->proposeAllocation(query);
+}
+
+gboolean QGstVideoRendererSink::stop(GstBaseSink *base)
+{
+ VO_SINK(base);
+ sink->renderer->stop();
+ return TRUE;
+}
+
+gboolean QGstVideoRendererSink::unlock(GstBaseSink *base)
+{
+ VO_SINK(base);
+ sink->renderer->unlock();
+ return TRUE;
+}
+
+GstFlowReturn QGstVideoRendererSink::show_frame(GstVideoSink *base, GstBuffer *buffer)
+{
+ VO_SINK(base);
+ return sink->renderer->render(buffer);
+}
+
+gboolean QGstVideoRendererSink::query(GstBaseSink *base, GstQuery *query)
+{
+ VO_SINK(base);
+ if (sink->renderer->query(query))
+ return TRUE;
+
+ return GST_BASE_SINK_CLASS(gvrs_sink_parent_class)->query(base, query);
+}
+
+gboolean QGstVideoRendererSink::event(GstBaseSink *base, GstEvent * event)
+{
+ VO_SINK(base);
+ sink->renderer->gstEvent(event);
+ return GST_BASE_SINK_CLASS(gvrs_sink_parent_class)->event(base, event);
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink_p.h b/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink_p.h
new file mode 100644
index 000000000..d9e3db462
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink_p.h
@@ -0,0 +1,138 @@
+// Copyright (C) 2016 Jolla Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTVIDEORENDERERSINK_P_H
+#define QGSTVIDEORENDERERSINK_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/qvideoframeformat.h>
+#include <QtMultimedia/qvideoframe.h>
+#include <QtMultimedia/private/qtmultimediaglobal_p.h>
+#include <QtCore/qmutex.h>
+
+#include <gst/video/gstvideosink.h>
+#include <gst/video/video.h>
+
+#include <QtCore/qlist.h>
+#include <QtCore/qmutex.h>
+#include <QtCore/qqueue.h>
+#include <QtCore/qpointer.h>
+#include <QtCore/qwaitcondition.h>
+#include <qvideoframeformat.h>
+#include <qvideoframe.h>
+#include <common/qgstvideobuffer_p.h>
+#include <common/qgst_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QGstVideoRenderer : public QObject
+{
+public:
+ explicit QGstVideoRenderer(QGstreamerVideoSink *);
+ ~QGstVideoRenderer();
+
+ const QGstCaps &caps();
+
+ bool start(const QGstCaps &);
+ void stop();
+ void unlock();
+ bool proposeAllocation(GstQuery *);
+ GstFlowReturn render(GstBuffer *);
+ bool query(GstQuery *);
+ void gstEvent(GstEvent *);
+
+private:
+ void notify();
+ static QGstCaps createSurfaceCaps(QGstreamerVideoSink *);
+
+ void gstEventHandleTag(GstEvent *);
+ void gstEventHandleEOS(GstEvent *);
+
+ QMutex m_sinkMutex;
+ QGstreamerVideoSink *m_sink = nullptr; // written only from qt thread. so only readers on
+ // worker threads need to acquire the lock
+
+ // --- only accessed from gstreamer thread
+ const QGstCaps m_surfaceCaps;
+ QVideoFrameFormat m_format;
+ GstVideoInfo m_videoInfo{};
+ QGstCaps::MemoryFormat m_memoryFormat = QGstCaps::CpuMemory;
+ bool m_frameMirrored = false;
+ QtVideo::Rotation m_frameRotationAngle = QtVideo::Rotation::None;
+
+ // --- only accessed from qt thread
+ QVideoFrame m_currentVideoFrame;
+
+ struct RenderBufferState
+ {
+ QGstBufferHandle buffer;
+ QVideoFrameFormat format;
+ QGstCaps::MemoryFormat memoryFormat;
+ bool mirrored;
+ QtVideo::Rotation rotationAngle;
+
+ bool operator==(const RenderBufferState &rhs) const
+ {
+ return std::tie(buffer, format, memoryFormat, mirrored, rotationAngle)
+ == std::tie(rhs.buffer, rhs.format, rhs.memoryFormat, rhs.mirrored,
+ rhs.rotationAngle);
+ }
+ };
+ RenderBufferState m_currentState;
+};
+
+class QGstVideoRendererSink
+{
+public:
+ GstVideoSink parent{};
+
+ static QGstVideoRendererSink *createSink(QGstreamerVideoSink *surface);
+ static void setSink(QGstreamerVideoSink *surface);
+
+private:
+ static GType get_type();
+ static void class_init(gpointer g_class, gpointer class_data);
+ static void base_init(gpointer g_class);
+ static void instance_init(GTypeInstance *instance, gpointer g_class);
+
+ static void finalize(GObject *object);
+
+ static GstStateChangeReturn change_state(GstElement *element, GstStateChange transition);
+
+ static GstCaps *get_caps(GstBaseSink *sink, GstCaps *filter);
+ static gboolean set_caps(GstBaseSink *sink, GstCaps *caps);
+
+ static gboolean propose_allocation(GstBaseSink *sink, GstQuery *query);
+
+ static gboolean stop(GstBaseSink *sink);
+
+ static gboolean unlock(GstBaseSink *sink);
+
+ static GstFlowReturn show_frame(GstVideoSink *sink, GstBuffer *buffer);
+ static gboolean query(GstBaseSink *element, GstQuery *query);
+ static gboolean event(GstBaseSink *element, GstEvent * event);
+
+private:
+ QGstVideoRenderer *renderer = nullptr;
+};
+
+
+class QGstVideoRendererSinkClass
+{
+public:
+ GstVideoSinkClass parent_class;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/gstreamer.json b/src/plugins/multimedia/gstreamer/gstreamer.json
new file mode 100644
index 000000000..6a709d9f4
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/gstreamer.json
@@ -0,0 +1,3 @@
+{
+ "Keys": [ "gstreamer" ]
+}
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera.cpp b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera.cpp
new file mode 100644
index 000000000..c54e8b74b
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera.cpp
@@ -0,0 +1,771 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <mediacapture/qgstreamercamera_p.h>
+
+#include <QtMultimedia/qcameradevice.h>
+#include <QtMultimedia/qmediacapturesession.h>
+#include <QtMultimedia/private/qcameradevice_p.h>
+#include <QtCore/qdebug.h>
+
+#include <common/qgst_debug_p.h>
+#include <qgstreamervideodevices_p.h>
+#include <qgstreamerintegration_p.h>
+
+#if QT_CONFIG(linux_v4l)
+#include <linux/videodev2.h>
+#include <private/qcore_unix_p.h>
+#endif
+
+
+QT_BEGIN_NAMESPACE
+
+QMaybe<QPlatformCamera *> QGstreamerCamera::create(QCamera *camera)
+{
+ static const auto error = qGstErrorMessageIfElementsNotAvailable(
+ "videotestsrc", "capsfilter", "videoconvert", "videoscale", "identity");
+ if (error)
+ return *error;
+
+ return new QGstreamerCamera(camera);
+}
+
+QGstreamerCamera::QGstreamerCamera(QCamera *camera)
+ : QGstreamerCameraBase(camera),
+ gstCameraBin{
+ QGstBin::create("camerabin"),
+ },
+ gstCamera{
+ QGstElement::createFromFactory("videotestsrc"),
+ },
+ gstCapsFilter{
+ QGstElement::createFromFactory("capsfilter", "videoCapsFilter"),
+ },
+ gstDecode{
+ QGstElement::createFromFactory("identity"),
+ },
+ gstVideoConvert{
+ QGstElement::createFromFactory("videoconvert", "videoConvert"),
+ },
+ gstVideoScale{
+ QGstElement::createFromFactory("videoscale", "videoScale"),
+ }
+{
+ gstCameraBin.add(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert, gstVideoScale);
+ qLinkGstElements(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert, gstVideoScale);
+ gstCameraBin.addGhostPad(gstVideoScale, "src");
+}
+
+QGstreamerCamera::~QGstreamerCamera()
+{
+ gstCameraBin.setStateSync(GST_STATE_NULL);
+}
+
+bool QGstreamerCamera::isActive() const
+{
+ return m_active;
+}
+
+void QGstreamerCamera::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+ if (m_cameraDevice.isNull() && active)
+ return;
+
+ m_active = active;
+
+ emit activeChanged(active);
+}
+
+void QGstreamerCamera::setCamera(const QCameraDevice &camera)
+{
+ using namespace Qt::Literals;
+
+ if (m_cameraDevice == camera)
+ return;
+
+ m_cameraDevice = camera;
+
+ QGstElement gstNewCamera;
+ if (camera.isNull()) {
+ gstNewCamera = QGstElement::createFromFactory("videotestsrc");
+ } else {
+ auto *integration = static_cast<QGstreamerIntegration *>(QGstreamerIntegration::instance());
+ GstDevice *device = integration->videoDevice(camera.id());
+
+ if (!device) {
+ updateError(QCamera::Error::CameraError,
+ u"Failed to create GstDevice for camera: "_s
+ + QString::fromUtf8(camera.id()));
+ return;
+ }
+
+ gstNewCamera = QGstElement::createFromDevice(device, "camerasrc");
+ QUniqueGstStructureHandle properties{
+ gst_device_get_properties(device),
+ };
+
+ if (properties) {
+ QGstStructureView propertiesView{ properties };
+ if (propertiesView.name() == "v4l2deviceprovider")
+ m_v4l2DevicePath = QString::fromUtf8(propertiesView["device.path"].toString());
+ }
+ }
+
+ QCameraFormat f = findBestCameraFormat(camera);
+ auto caps = QGstCaps::fromCameraFormat(f);
+ auto gstNewDecode = QGstElement::createFromFactory(
+ f.pixelFormat() == QVideoFrameFormat::Format_Jpeg ? "jpegdec" : "identity");
+
+ QGstPipeline::modifyPipelineWhileNotRunning(gstCamera.getPipeline(), [&] {
+ gstCamera.setStateSync(GST_STATE_READY); // stop camera, as it may have active tasks
+
+ qUnlinkGstElements(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert);
+ gstCameraBin.stopAndRemoveElements(gstCamera, gstDecode);
+
+ gstCapsFilter.set("caps", caps);
+
+ gstCamera = std::move(gstNewCamera);
+ gstDecode = std::move(gstNewDecode);
+
+ gstCameraBin.add(gstCamera, gstDecode);
+ qLinkGstElements(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert);
+
+ gstCameraBin.syncChildrenState();
+ });
+
+ updateCameraProperties();
+}
+
+bool QGstreamerCamera::setCameraFormat(const QCameraFormat &format)
+{
+ if (!format.isNull() && !m_cameraDevice.videoFormats().contains(format))
+ return false;
+
+ QCameraFormat f = format;
+ if (f.isNull())
+ f = findBestCameraFormat(m_cameraDevice);
+
+ auto caps = QGstCaps::fromCameraFormat(f);
+
+ auto newGstDecode = QGstElement::createFromFactory(
+ f.pixelFormat() == QVideoFrameFormat::Format_Jpeg ? "jpegdec" : "identity");
+
+ QGstPipeline::modifyPipelineWhileNotRunning(gstCamera.getPipeline(), [&] {
+ gstCamera.setStateSync(GST_STATE_READY); // stop camera, as it may have active tasks
+
+ qUnlinkGstElements(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert);
+ gstCameraBin.stopAndRemoveElements(gstDecode);
+
+ gstCapsFilter.set("caps", caps);
+
+ gstDecode = std::move(newGstDecode);
+
+ gstCameraBin.add(gstDecode);
+ qLinkGstElements(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert);
+ gstCameraBin.syncChildrenState();
+ });
+
+ return true;
+}
+
+void QGstreamerCamera::updateCameraProperties()
+{
+#if QT_CONFIG(linux_v4l)
+ if (isV4L2Camera()) {
+ initV4L2Controls();
+ return;
+ }
+#endif
+#if QT_CONFIG(gstreamer_photography)
+ if (auto *p = photography())
+ gst_photography_set_white_balance_mode(p, GST_PHOTOGRAPHY_WB_MODE_AUTO);
+ QCamera::Features f = QCamera::Feature::ColorTemperature | QCamera::Feature::ExposureCompensation |
+ QCamera::Feature::IsoSensitivity | QCamera::Feature::ManualExposureTime;
+ supportedFeaturesChanged(f);
+#endif
+
+}
+
+#if QT_CONFIG(gstreamer_photography)
+GstPhotography *QGstreamerCamera::photography() const
+{
+ if (!gstCamera.isNull() && GST_IS_PHOTOGRAPHY(gstCamera.element()))
+ return GST_PHOTOGRAPHY(gstCamera.element());
+ return nullptr;
+}
+#endif
+
+void QGstreamerCamera::setFocusMode(QCamera::FocusMode mode)
+{
+ if (mode == focusMode())
+ return;
+
+#if QT_CONFIG(gstreamer_photography)
+ auto p = photography();
+ if (p) {
+ GstPhotographyFocusMode photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_CONTINUOUS_NORMAL;
+
+ switch (mode) {
+ case QCamera::FocusModeAutoNear:
+ photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_MACRO;
+ break;
+ case QCamera::FocusModeAutoFar:
+ // not quite, but hey :)
+ Q_FALLTHROUGH();
+ case QCamera::FocusModeHyperfocal:
+ photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_HYPERFOCAL;
+ break;
+ case QCamera::FocusModeInfinity:
+ photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_INFINITY;
+ break;
+ case QCamera::FocusModeManual:
+ photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_MANUAL;
+ break;
+ default: // QCamera::FocusModeAuto:
+ break;
+ }
+
+ if (gst_photography_set_focus_mode(p, photographyMode))
+ focusModeChanged(mode);
+ }
+#endif
+}
+
+bool QGstreamerCamera::isFocusModeSupported(QCamera::FocusMode mode) const
+{
+#if QT_CONFIG(gstreamer_photography)
+ if (photography())
+ return true;
+#endif
+ return mode == QCamera::FocusModeAuto;
+}
+
+void QGstreamerCamera::setFlashMode(QCamera::FlashMode mode)
+{
+ Q_UNUSED(mode);
+
+#if QT_CONFIG(gstreamer_photography)
+ if (auto *p = photography()) {
+ GstPhotographyFlashMode flashMode;
+ gst_photography_get_flash_mode(p, &flashMode);
+
+ switch (mode) {
+ case QCamera::FlashAuto:
+ flashMode = GST_PHOTOGRAPHY_FLASH_MODE_AUTO;
+ break;
+ case QCamera::FlashOff:
+ flashMode = GST_PHOTOGRAPHY_FLASH_MODE_OFF;
+ break;
+ case QCamera::FlashOn:
+ flashMode = GST_PHOTOGRAPHY_FLASH_MODE_ON;
+ break;
+ }
+
+ if (gst_photography_set_flash_mode(p, flashMode))
+ flashModeChanged(mode);
+ }
+#endif
+}
+
+bool QGstreamerCamera::isFlashModeSupported(QCamera::FlashMode mode) const
+{
+#if QT_CONFIG(gstreamer_photography)
+ if (photography())
+ return true;
+#endif
+
+ return mode == QCamera::FlashAuto;
+}
+
+bool QGstreamerCamera::isFlashReady() const
+{
+#if QT_CONFIG(gstreamer_photography)
+ if (photography())
+ return true;
+#endif
+
+ return false;
+}
+
+void QGstreamerCamera::setExposureMode(QCamera::ExposureMode mode)
+{
+ Q_UNUSED(mode);
+#if QT_CONFIG(linux_v4l)
+ if (isV4L2Camera() && v4l2AutoExposureSupported && v4l2ManualExposureSupported) {
+ if (mode != QCamera::ExposureAuto && mode != QCamera::ExposureManual)
+ return;
+ int value = QCamera::ExposureAuto ? V4L2_EXPOSURE_AUTO : V4L2_EXPOSURE_MANUAL;
+ setV4L2Parameter(V4L2_CID_EXPOSURE_AUTO, value);
+ exposureModeChanged(mode);
+ return;
+ }
+#endif
+
+#if QT_CONFIG(gstreamer_photography)
+ auto *p = photography();
+ if (!p)
+ return;
+
+ GstPhotographySceneMode sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_AUTO;
+
+ switch (mode) {
+ case QCamera::ExposureManual:
+ sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_MANUAL;
+ break;
+ case QCamera::ExposurePortrait:
+ sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_PORTRAIT;
+ break;
+ case QCamera::ExposureSports:
+ sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_SPORT;
+ break;
+ case QCamera::ExposureNight:
+ sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_NIGHT;
+ break;
+ case QCamera::ExposureAuto:
+ sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_AUTO;
+ break;
+ case QCamera::ExposureLandscape:
+ sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_LANDSCAPE;
+ break;
+ case QCamera::ExposureSnow:
+ sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_SNOW;
+ break;
+ case QCamera::ExposureBeach:
+ sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_BEACH;
+ break;
+ case QCamera::ExposureAction:
+ sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_ACTION;
+ break;
+ case QCamera::ExposureNightPortrait:
+ sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_NIGHT_PORTRAIT;
+ break;
+ case QCamera::ExposureTheatre:
+ sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_THEATRE;
+ break;
+ case QCamera::ExposureSunset:
+ sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_SUNSET;
+ break;
+ case QCamera::ExposureSteadyPhoto:
+ sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_STEADY_PHOTO;
+ break;
+ case QCamera::ExposureFireworks:
+ sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_FIREWORKS;
+ break;
+ case QCamera::ExposureParty:
+ sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_PARTY;
+ break;
+ case QCamera::ExposureCandlelight:
+ sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_CANDLELIGHT;
+ break;
+ case QCamera::ExposureBarcode:
+ sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_BARCODE;
+ break;
+ default:
+ return;
+ }
+
+ if (gst_photography_set_scene_mode(p, sceneMode))
+ exposureModeChanged(mode);
+#endif
+}
+
+bool QGstreamerCamera::isExposureModeSupported(QCamera::ExposureMode mode) const
+{
+ if (mode == QCamera::ExposureAuto)
+ return true;
+#if QT_CONFIG(linux_v4l)
+ if (isV4L2Camera() && v4l2ManualExposureSupported && v4l2AutoExposureSupported)
+ return mode == QCamera::ExposureManual;
+#endif
+#if QT_CONFIG(gstreamer_photography)
+ if (photography())
+ return true;
+#endif
+
+ return false;
+}
+
+void QGstreamerCamera::setExposureCompensation(float compensation)
+{
+ Q_UNUSED(compensation);
+#if QT_CONFIG(linux_v4l)
+ if (isV4L2Camera() && (v4l2MinExposureAdjustment != 0 || v4l2MaxExposureAdjustment != 0)) {
+ int value = qBound(v4l2MinExposureAdjustment, (int)(compensation*1000), v4l2MaxExposureAdjustment);
+ setV4L2Parameter(V4L2_CID_AUTO_EXPOSURE_BIAS, value);
+ exposureCompensationChanged(value/1000.);
+ return;
+ }
+#endif
+
+#if QT_CONFIG(gstreamer_photography)
+ if (auto *p = photography()) {
+ if (gst_photography_set_ev_compensation(p, compensation))
+ exposureCompensationChanged(compensation);
+ }
+#endif
+}
+
+void QGstreamerCamera::setManualIsoSensitivity(int iso)
+{
+ Q_UNUSED(iso);
+#if QT_CONFIG(linux_v4l)
+ if (isV4L2Camera()) {
+ if (!(supportedFeatures() & QCamera::Feature::IsoSensitivity))
+ return;
+ setV4L2Parameter(V4L2_CID_ISO_SENSITIVITY_AUTO, iso <= 0 ? V4L2_ISO_SENSITIVITY_AUTO : V4L2_ISO_SENSITIVITY_MANUAL);
+ if (iso > 0) {
+ iso = qBound(minIso(), iso, maxIso());
+ setV4L2Parameter(V4L2_CID_ISO_SENSITIVITY, iso);
+ }
+ return;
+ }
+#endif
+#if QT_CONFIG(gstreamer_photography)
+ if (auto *p = photography()) {
+ if (gst_photography_set_iso_speed(p, iso))
+ isoSensitivityChanged(iso);
+ }
+#endif
+}
+
+int QGstreamerCamera::isoSensitivity() const
+{
+#if QT_CONFIG(linux_v4l)
+ if (isV4L2Camera()) {
+ if (!(supportedFeatures() & QCamera::Feature::IsoSensitivity))
+ return -1;
+ return getV4L2Parameter(V4L2_CID_ISO_SENSITIVITY);
+ }
+#endif
+#if QT_CONFIG(gstreamer_photography)
+ if (auto *p = photography()) {
+ guint speed = 0;
+ if (gst_photography_get_iso_speed(p, &speed))
+ return speed;
+ }
+#endif
+ return 100;
+}
+
+void QGstreamerCamera::setManualExposureTime(float secs)
+{
+ Q_UNUSED(secs);
+#if QT_CONFIG(linux_v4l)
+ if (isV4L2Camera() && v4l2ManualExposureSupported && v4l2AutoExposureSupported) {
+ int exposure = qBound(v4l2MinExposure, qRound(secs*10000.), v4l2MaxExposure);
+ setV4L2Parameter(V4L2_CID_EXPOSURE_ABSOLUTE, exposure);
+ exposureTimeChanged(exposure/10000.);
+ return;
+ }
+#endif
+
+#if QT_CONFIG(gstreamer_photography)
+ if (auto *p = photography()) {
+ if (gst_photography_set_exposure(p, guint(secs*1000000)))
+ exposureTimeChanged(secs);
+ }
+#endif
+}
+
+float QGstreamerCamera::exposureTime() const
+{
+#if QT_CONFIG(linux_v4l)
+ if (isV4L2Camera()) {
+ return getV4L2Parameter(V4L2_CID_EXPOSURE_ABSOLUTE)/10000.;
+ }
+#endif
+#if QT_CONFIG(gstreamer_photography)
+ if (auto *p = photography()) {
+ guint32 exposure = 0;
+ if (gst_photography_get_exposure(p, &exposure))
+ return exposure/1000000.;
+ }
+#endif
+ return -1;
+}
+
+bool QGstreamerCamera::isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const
+{
+ if (mode == QCamera::WhiteBalanceAuto)
+ return true;
+
+#if QT_CONFIG(linux_v4l)
+ if (isV4L2Camera()) {
+ if (v4l2AutoWhiteBalanceSupported && v4l2ColorTemperatureSupported)
+ return true;
+ }
+#endif
+#if QT_CONFIG(gstreamer_photography)
+ if (auto *p = photography()) {
+ Q_UNUSED(p);
+ switch (mode) {
+ case QCamera::WhiteBalanceAuto:
+ case QCamera::WhiteBalanceSunlight:
+ case QCamera::WhiteBalanceCloudy:
+ case QCamera::WhiteBalanceShade:
+ case QCamera::WhiteBalanceSunset:
+ case QCamera::WhiteBalanceTungsten:
+ case QCamera::WhiteBalanceFluorescent:
+ return true;
+ case QCamera::WhiteBalanceManual: {
+#if GST_CHECK_VERSION(1, 18, 0)
+ GstPhotographyInterface *iface = GST_PHOTOGRAPHY_GET_INTERFACE(p);
+ if (iface->set_color_temperature && iface->get_color_temperature)
+ return true;
+#endif
+ break;
+ }
+ default:
+ break;
+ }
+ }
+#endif
+
+ return mode == QCamera::WhiteBalanceAuto;
+}
+
+void QGstreamerCamera::setWhiteBalanceMode(QCamera::WhiteBalanceMode mode)
+{
+ Q_ASSERT(isWhiteBalanceModeSupported(mode));
+
+#if QT_CONFIG(linux_v4l)
+ if (isV4L2Camera()) {
+ int temperature = colorTemperatureForWhiteBalance(mode);
+ int t = setV4L2ColorTemperature(temperature);
+ if (t == 0)
+ mode = QCamera::WhiteBalanceAuto;
+ whiteBalanceModeChanged(mode);
+ return;
+ }
+#endif
+
+#if QT_CONFIG(gstreamer_photography)
+ if (auto *p = photography()) {
+ GstPhotographyWhiteBalanceMode gstMode = GST_PHOTOGRAPHY_WB_MODE_AUTO;
+ switch (mode) {
+ case QCamera::WhiteBalanceSunlight:
+ gstMode = GST_PHOTOGRAPHY_WB_MODE_DAYLIGHT;
+ break;
+ case QCamera::WhiteBalanceCloudy:
+ gstMode = GST_PHOTOGRAPHY_WB_MODE_CLOUDY;
+ break;
+ case QCamera::WhiteBalanceShade:
+ gstMode = GST_PHOTOGRAPHY_WB_MODE_SHADE;
+ break;
+ case QCamera::WhiteBalanceSunset:
+ gstMode = GST_PHOTOGRAPHY_WB_MODE_SUNSET;
+ break;
+ case QCamera::WhiteBalanceTungsten:
+ gstMode = GST_PHOTOGRAPHY_WB_MODE_TUNGSTEN;
+ break;
+ case QCamera::WhiteBalanceFluorescent:
+ gstMode = GST_PHOTOGRAPHY_WB_MODE_FLUORESCENT;
+ break;
+ case QCamera::WhiteBalanceAuto:
+ default:
+ break;
+ }
+ if (gst_photography_set_white_balance_mode(p, gstMode)) {
+ whiteBalanceModeChanged(mode);
+ return;
+ }
+ }
+#endif
+}
+
+void QGstreamerCamera::setColorTemperature(int temperature)
+{
+ if (temperature == 0) {
+ setWhiteBalanceMode(QCamera::WhiteBalanceAuto);
+ return;
+ }
+
+ Q_ASSERT(isWhiteBalanceModeSupported(QCamera::WhiteBalanceManual));
+
+#if QT_CONFIG(linux_v4l)
+ if (isV4L2Camera()) {
+ int t = setV4L2ColorTemperature(temperature);
+ if (t)
+ colorTemperatureChanged(t);
+ return;
+ }
+#endif
+
+#if QT_CONFIG(gstreamer_photography) && GST_CHECK_VERSION(1, 18, 0)
+ if (auto *p = photography()) {
+ GstPhotographyInterface *iface = GST_PHOTOGRAPHY_GET_INTERFACE(p);
+ Q_ASSERT(iface->set_color_temperature);
+ iface->set_color_temperature(p, temperature);
+ return;
+ }
+#endif
+}
+
+#if QT_CONFIG(linux_v4l)
+bool QGstreamerCamera::isV4L2Camera() const
+{
+ return !m_v4l2DevicePath.isEmpty();
+}
+
+void QGstreamerCamera::initV4L2Controls()
+{
+ v4l2AutoWhiteBalanceSupported = false;
+ v4l2ColorTemperatureSupported = false;
+ QCamera::Features features{};
+
+ Q_ASSERT(!m_v4l2DevicePath.isEmpty());
+
+
+ withV4L2DeviceFileDescriptor([&](int fd) {
+ struct v4l2_queryctrl queryControl = {};
+ queryControl.id = V4L2_CID_AUTO_WHITE_BALANCE;
+
+ if (::ioctl(fd, VIDIOC_QUERYCTRL, &queryControl) == 0) {
+ v4l2AutoWhiteBalanceSupported = true;
+ setV4L2Parameter(V4L2_CID_AUTO_WHITE_BALANCE, true);
+ }
+
+ queryControl = {};
+ queryControl.id = V4L2_CID_WHITE_BALANCE_TEMPERATURE;
+ if (::ioctl(fd, VIDIOC_QUERYCTRL, &queryControl) == 0) {
+ v4l2MinColorTemp = queryControl.minimum;
+ v4l2MaxColorTemp = queryControl.maximum;
+ v4l2ColorTemperatureSupported = true;
+ features |= QCamera::Feature::ColorTemperature;
+ }
+
+ queryControl = {};
+ queryControl.id = V4L2_CID_EXPOSURE_AUTO;
+ if (::ioctl(fd, VIDIOC_QUERYCTRL, &queryControl) == 0) {
+ v4l2AutoExposureSupported = true;
+ }
+
+ queryControl = {};
+ queryControl.id = V4L2_CID_EXPOSURE_ABSOLUTE;
+ if (::ioctl(fd, VIDIOC_QUERYCTRL, &queryControl) == 0) {
+ v4l2ManualExposureSupported = true;
+ v4l2MinExposure = queryControl.minimum;
+ v4l2MaxExposure = queryControl.maximum;
+ features |= QCamera::Feature::ManualExposureTime;
+ }
+
+ queryControl = {};
+ queryControl.id = V4L2_CID_AUTO_EXPOSURE_BIAS;
+ if (::ioctl(fd, VIDIOC_QUERYCTRL, &queryControl) == 0) {
+ v4l2MinExposureAdjustment = queryControl.minimum;
+ v4l2MaxExposureAdjustment = queryControl.maximum;
+ features |= QCamera::Feature::ExposureCompensation;
+ }
+
+ queryControl = {};
+ queryControl.id = V4L2_CID_ISO_SENSITIVITY_AUTO;
+ if (::ioctl(fd, VIDIOC_QUERYCTRL, &queryControl) == 0) {
+ queryControl.id = V4L2_CID_ISO_SENSITIVITY;
+ if (::ioctl(fd, VIDIOC_QUERYCTRL, &queryControl) == 0) {
+ features |= QCamera::Feature::IsoSensitivity;
+ minIsoChanged(queryControl.minimum);
+ maxIsoChanged(queryControl.minimum);
+ }
+ }
+ });
+
+ supportedFeaturesChanged(features);
+}
+
+int QGstreamerCamera::setV4L2ColorTemperature(int temperature)
+{
+ if (v4l2AutoWhiteBalanceSupported) {
+ setV4L2Parameter(V4L2_CID_AUTO_WHITE_BALANCE, temperature == 0 ? true : false);
+ } else if (temperature == 0) {
+ temperature = 5600;
+ }
+
+ if (temperature != 0 && v4l2ColorTemperatureSupported) {
+ temperature = qBound(v4l2MinColorTemp, temperature, v4l2MaxColorTemp);
+ if (!setV4L2Parameter(V4L2_CID_WHITE_BALANCE_TEMPERATURE, qBound(v4l2MinColorTemp, temperature, v4l2MaxColorTemp)))
+ temperature = 0;
+ } else {
+ temperature = 0;
+ }
+
+ return temperature;
+}
+
+bool QGstreamerCamera::setV4L2Parameter(quint32 id, qint32 value)
+{
+ return withV4L2DeviceFileDescriptor([&](int fd) {
+ v4l2_control control{ id, value };
+ if (::ioctl(fd, VIDIOC_S_CTRL, &control) != 0) {
+ qWarning() << "Unable to set the V4L2 Parameter" << Qt::hex << id << "to" << value
+ << qt_error_string(errno);
+ return false;
+ }
+ return true;
+ });
+}
+
+int QGstreamerCamera::getV4L2Parameter(quint32 id) const
+{
+ return withV4L2DeviceFileDescriptor([&](int fd) {
+ v4l2_control control{ id, 0 };
+ if (::ioctl(fd, VIDIOC_G_CTRL, &control) != 0) {
+ qWarning() << "Unable to get the V4L2 Parameter" << Qt::hex << id
+ << qt_error_string(errno);
+ return 0;
+ }
+ return control.value;
+ });
+}
+
+QGstreamerCustomCamera::QGstreamerCustomCamera(QCamera *camera)
+ : QGstreamerCameraBase{
+ camera,
+ },
+ m_userProvidedGstElement{
+ false,
+ }
+{
+}
+
+QGstreamerCustomCamera::QGstreamerCustomCamera(QCamera *camera, QGstElement element)
+ : QGstreamerCameraBase{
+ camera,
+ },
+ gstCamera{
+ std::move(element),
+ },
+ m_userProvidedGstElement{
+ true,
+ }
+{
+}
+
+void QGstreamerCustomCamera::setCamera(const QCameraDevice &device)
+{
+ if (m_userProvidedGstElement)
+ return;
+
+ gstCamera = QGstBin::createFromPipelineDescription(device.id(), /*name=*/nullptr,
+ /* ghostUnlinkedPads=*/true);
+}
+
+bool QGstreamerCustomCamera::isActive() const
+{
+ return m_active;
+}
+
+void QGstreamerCustomCamera::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+
+ m_active = active;
+
+ emit activeChanged(active);
+}
+
+#endif
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera_p.h b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera_p.h
new file mode 100644
index 000000000..f43c01f34
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera_p.h
@@ -0,0 +1,152 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERCAMERACONTROL_H
+#define QGSTREAMERCAMERACONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformcamera_p.h>
+#include <private/qmultimediautils_p.h>
+
+#include <mediacapture/qgstreamermediacapture_p.h>
+#include <common/qgst_p.h>
+#include <common/qgstpipeline_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QGstreamerCameraBase : public QPlatformCamera
+{
+public:
+ using QPlatformCamera::QPlatformCamera;
+
+ virtual QGstElement gstElement() const = 0;
+};
+
+class QGstreamerCamera : public QGstreamerCameraBase
+{
+public:
+ static QMaybe<QPlatformCamera *> create(QCamera *camera);
+
+ virtual ~QGstreamerCamera();
+
+ bool isActive() const override;
+ void setActive(bool active) override;
+
+ void setCamera(const QCameraDevice &camera) override;
+ bool setCameraFormat(const QCameraFormat &format) override;
+
+ QGstElement gstElement() const override { return gstCameraBin; }
+#if QT_CONFIG(gstreamer_photography)
+ GstPhotography *photography() const;
+#endif
+
+ void setFocusMode(QCamera::FocusMode mode) override;
+ bool isFocusModeSupported(QCamera::FocusMode mode) const override;
+
+ void setFlashMode(QCamera::FlashMode mode) override;
+ bool isFlashModeSupported(QCamera::FlashMode mode) const override;
+ bool isFlashReady() const override;
+
+ void setExposureMode(QCamera::ExposureMode) override;
+ bool isExposureModeSupported(QCamera::ExposureMode mode) const override;
+ void setExposureCompensation(float) override;
+ void setManualIsoSensitivity(int) override;
+ int isoSensitivity() const override;
+ void setManualExposureTime(float) override;
+ float exposureTime() const override;
+
+ bool isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const override;
+ void setWhiteBalanceMode(QCamera::WhiteBalanceMode mode) override;
+ void setColorTemperature(int temperature) override;
+
+private:
+ QGstreamerCamera(QCamera *camera);
+
+ void updateCameraProperties();
+
+#if QT_CONFIG(linux_v4l)
+ bool isV4L2Camera() const;
+ void initV4L2Controls();
+ int setV4L2ColorTemperature(int temperature);
+ bool setV4L2Parameter(quint32 id, qint32 value);
+ int getV4L2Parameter(quint32 id) const;
+
+ bool v4l2AutoWhiteBalanceSupported = false;
+ bool v4l2ColorTemperatureSupported = false;
+ bool v4l2AutoExposureSupported = false;
+ bool v4l2ManualExposureSupported = false;
+ qint32 v4l2MinColorTemp = 5600; // Daylight...
+ qint32 v4l2MaxColorTemp = 5600;
+ qint32 v4l2MinExposure = 0;
+ qint32 v4l2MaxExposure = 0;
+ qint32 v4l2MinExposureAdjustment = 0;
+ qint32 v4l2MaxExposureAdjustment = 0;
+
+ template <typename Functor>
+ auto withV4L2DeviceFileDescriptor(Functor &&f) const
+ {
+ using ReturnType = std::invoke_result_t<Functor, int>;
+ Q_ASSERT(isV4L2Camera());
+
+ if (int gstreamerDeviceFd = gstCamera.getInt("device-fd"); gstreamerDeviceFd != -1)
+ return f(gstreamerDeviceFd);
+
+ auto v4l2FileDescriptor = QFileDescriptorHandle{
+ qt_safe_open(m_v4l2DevicePath.toLocal8Bit().constData(), O_RDONLY),
+ };
+ if (!v4l2FileDescriptor) {
+ qWarning() << "Unable to open the camera" << m_v4l2DevicePath
+ << "for read to query the parameter info:" << qt_error_string(errno);
+ if constexpr (std::is_void_v<ReturnType>)
+ return;
+ else
+ return ReturnType{};
+ }
+ return f(v4l2FileDescriptor.get());
+ }
+#endif
+
+ QCameraDevice m_cameraDevice;
+
+ QGstBin gstCameraBin;
+ QGstElement gstCamera;
+ QGstElement gstCapsFilter;
+ QGstElement gstDecode;
+ QGstElement gstVideoConvert;
+ QGstElement gstVideoScale;
+
+ bool m_active = false;
+ QString m_v4l2DevicePath;
+};
+
+class QGstreamerCustomCamera : public QGstreamerCameraBase
+{
+public:
+ explicit QGstreamerCustomCamera(QCamera *);
+ explicit QGstreamerCustomCamera(QCamera *, QGstElement element);
+
+ QGstElement gstElement() const override { return gstCamera; }
+ void setCamera(const QCameraDevice &) override;
+
+ bool isActive() const override;
+ void setActive(bool) override;
+
+private:
+ QGstElement gstCamera;
+ bool m_active{};
+ const bool m_userProvidedGstElement;
+};
+
+QT_END_NAMESPACE
+
+#endif // QGSTREAMERCAMERACONTROL_H
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture.cpp b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture.cpp
new file mode 100644
index 000000000..9c21dc083
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture.cpp
@@ -0,0 +1,450 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qgstreamerimagecapture_p.h"
+
+#include <QtMultimedia/qvideoframeformat.h>
+#include <QtMultimedia/private/qmediastoragelocation_p.h>
+#include <QtMultimedia/private/qplatformcamera_p.h>
+#include <QtMultimedia/private/qplatformimagecapture_p.h>
+#include <QtMultimedia/private/qvideoframe_p.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qdir.h>
+#include <QtCore/qstandardpaths.h>
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qloggingcategory.h>
+
+#include <common/qgstreamermetadata_p.h>
+#include <common/qgstvideobuffer_p.h>
+#include <common/qgstutils_p.h>
+
+#include <utility>
+
+QT_BEGIN_NAMESPACE
+
+namespace {
+Q_LOGGING_CATEGORY(qLcImageCaptureGst, "qt.multimedia.imageCapture")
+
+struct ThreadPoolSingleton
+{
+ QObject m_context;
+ QMutex m_poolMutex;
+ QThreadPool *m_instance{};
+ bool m_appUnderDestruction = false;
+
+ QThreadPool *get(const QMutexLocker<QMutex> &)
+ {
+ if (m_instance)
+ return m_instance;
+ if (m_appUnderDestruction || !qApp)
+ return nullptr;
+
+ using namespace std::chrono;
+
+ m_instance = new QThreadPool(qApp);
+ m_instance->setMaxThreadCount(1); // 1 thread;
+ static constexpr auto expiryTimeout = minutes(5);
+ m_instance->setExpiryTimeout(round<milliseconds>(expiryTimeout).count());
+
+ QObject::connect(qApp, &QCoreApplication::aboutToQuit, &m_context, [&] {
+ // we need to make sure that thread-local QRhi is destroyed before the application to
+ // prevent QTBUG-124189
+ QMutexLocker guard(&m_poolMutex);
+ delete m_instance;
+ m_instance = {};
+ m_appUnderDestruction = true;
+ });
+
+ QObject::connect(qApp, &QCoreApplication::destroyed, &m_context, [&] {
+ m_appUnderDestruction = false;
+ });
+ return m_instance;
+ }
+
+ template <typename Functor>
+ QFuture<void> run(Functor &&f)
+ {
+ QMutexLocker guard(&m_poolMutex);
+ QThreadPool *pool = get(guard);
+ if (!pool)
+ return QFuture<void>{};
+
+ return QtConcurrent::run(pool, std::forward<Functor>(f));
+ }
+};
+
+ThreadPoolSingleton s_threadPoolSingleton;
+
+}; // namespace
+
+QMaybe<QPlatformImageCapture *> QGstreamerImageCapture::create(QImageCapture *parent)
+{
+ static const auto error = qGstErrorMessageIfElementsNotAvailable(
+ "queue", "capsfilter", "videoconvert", "jpegenc", "jifmux", "fakesink");
+ if (error)
+ return *error;
+
+ return new QGstreamerImageCapture(parent);
+}
+
+QGstreamerImageCapture::QGstreamerImageCapture(QImageCapture *parent)
+ : QPlatformImageCapture(parent),
+ QGstreamerBufferProbe(ProbeBuffers),
+ bin{
+ QGstBin::create("imageCaptureBin"),
+ },
+ queue{
+ QGstElement::createFromFactory("queue", "imageCaptureQueue"),
+ },
+ filter{
+ QGstElement::createFromFactory("capsfilter", "filter"),
+ },
+ videoConvert{
+ QGstElement::createFromFactory("videoconvert", "imageCaptureConvert"),
+ },
+ encoder{
+ QGstElement::createFromFactory("jpegenc", "jpegEncoder"),
+ },
+ muxer{
+ QGstElement::createFromFactory("jifmux", "jpegMuxer"),
+ },
+ sink{
+ QGstElement::createFromFactory("fakesink", "imageCaptureSink"),
+ }
+{
+ // configures the queue to be fast, lightweight and non blocking
+ queue.set("leaky", 2 /*downstream*/);
+ queue.set("silent", true);
+ queue.set("max-size-buffers", uint(1));
+ queue.set("max-size-bytes", uint(0));
+ queue.set("max-size-time", quint64(0));
+
+ // imageCaptureSink do not wait for a preroll buffer when going READY -> PAUSED
+ // as no buffer will arrive until capture() is called
+ sink.set("async", false);
+
+ bin.add(queue, filter, videoConvert, encoder, muxer, sink);
+ qLinkGstElements(queue, filter, videoConvert, encoder, muxer, sink);
+ bin.addGhostPad(queue, "sink");
+
+ addProbeToPad(queue.staticPad("src").pad(), false);
+
+ sink.set("signal-handoffs", true);
+ m_handoffConnection = sink.connect("handoff", G_CALLBACK(&saveImageFilter), this);
+}
+
+QGstreamerImageCapture::~QGstreamerImageCapture()
+{
+ bin.setStateSync(GST_STATE_NULL);
+
+ // wait for pending futures
+ auto pendingFutures = [&] {
+ QMutexLocker guard(&m_mutex);
+ return std::move(m_pendingFutures);
+ }();
+
+ for (QFuture<void> &pendingImage : pendingFutures)
+ pendingImage.waitForFinished();
+}
+
+bool QGstreamerImageCapture::isReadyForCapture() const
+{
+ QMutexLocker guard(&m_mutex);
+ return m_session && !passImage && cameraActive;
+}
+
+int QGstreamerImageCapture::capture(const QString &fileName)
+{
+ using namespace Qt::Literals;
+ QString path = QMediaStorageLocation::generateFileName(
+ fileName, QStandardPaths::PicturesLocation, u"jpg"_s);
+ return doCapture(path);
+}
+
+int QGstreamerImageCapture::captureToBuffer()
+{
+ return doCapture(QString());
+}
+
+int QGstreamerImageCapture::doCapture(const QString &fileName)
+{
+ qCDebug(qLcImageCaptureGst) << "do capture";
+
+ {
+ QMutexLocker guard(&m_mutex);
+ if (!m_session) {
+ invokeDeferred([this] {
+ emit error(-1, QImageCapture::ResourceError,
+ QPlatformImageCapture::msgImageCaptureNotSet());
+ });
+
+ qCDebug(qLcImageCaptureGst) << "error 1";
+ return -1;
+ }
+ if (!m_session->camera()) {
+ invokeDeferred([this] {
+ emit error(-1, QImageCapture::ResourceError, tr("No camera available."));
+ });
+
+ qCDebug(qLcImageCaptureGst) << "error 2";
+ return -1;
+ }
+ if (passImage) {
+ invokeDeferred([this] {
+ emit error(-1, QImageCapture::NotReadyError,
+ QPlatformImageCapture::msgCameraNotReady());
+ });
+
+ qCDebug(qLcImageCaptureGst) << "error 3";
+ return -1;
+ }
+ m_lastId++;
+
+ pendingImages.enqueue({ m_lastId, fileName, QMediaMetaData{} });
+ // let one image pass the pipeline
+ passImage = true;
+ }
+
+ emit readyForCaptureChanged(false);
+ return m_lastId;
+}
+
+void QGstreamerImageCapture::setResolution(const QSize &resolution)
+{
+ QGstCaps padCaps = bin.staticPad("sink").currentCaps();
+ if (padCaps.isNull()) {
+ qDebug() << "Camera not ready";
+ return;
+ }
+ QGstCaps caps = padCaps.copy();
+ if (caps.isNull())
+ return;
+
+ gst_caps_set_simple(caps.caps(), "width", G_TYPE_INT, resolution.width(), "height", G_TYPE_INT,
+ resolution.height(), nullptr);
+ filter.set("caps", caps);
+}
+
+// HACK: gcc-10 and earlier reject [=,this] when building with c++17
+#if __cplusplus >= 202002L
+# define EQ_THIS_CAPTURE =, this
+#else
+# define EQ_THIS_CAPTURE =
+#endif
+
+bool QGstreamerImageCapture::probeBuffer(GstBuffer *buffer)
+{
+ QMutexLocker guard(&m_mutex);
+
+ if (!passImage)
+ return false;
+ qCDebug(qLcImageCaptureGst) << "probe buffer";
+
+ QGstBufferHandle bufferHandle{
+ buffer,
+ QGstBufferHandle::NeedsRef,
+ };
+
+ passImage = false;
+
+ bool ready = isReadyForCapture();
+ invokeDeferred([this, ready] {
+ emit readyForCaptureChanged(ready);
+ });
+
+ QGstCaps caps = bin.staticPad("sink").currentCaps();
+ auto memoryFormat = caps.memoryFormat();
+
+ GstVideoInfo previewInfo;
+ QVideoFrameFormat fmt;
+ auto optionalFormatAndVideoInfo = caps.formatAndVideoInfo();
+ if (optionalFormatAndVideoInfo)
+ std::tie(fmt, previewInfo) = std::move(*optionalFormatAndVideoInfo);
+
+ int futureId = futureIDAllocator += 1;
+
+ // ensure QVideoFrame::toImage is executed on a worker thread that is joined before the
+ // qApplication is destroyed
+ QFuture<void> future = s_threadPoolSingleton.run([EQ_THIS_CAPTURE]() mutable {
+ QMutexLocker guard(&m_mutex);
+ auto scopeExit = qScopeGuard([&] {
+ m_pendingFutures.remove(futureId);
+ });
+
+ if (!m_session) {
+ qDebug() << "QGstreamerImageCapture::probeBuffer: no session";
+ return;
+ }
+
+ auto *sink = m_session->gstreamerVideoSink();
+ auto gstBuffer = std::make_unique<QGstVideoBuffer>(std::move(bufferHandle), previewInfo,
+ sink, fmt, memoryFormat);
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(gstBuffer), fmt);
+ QImage img = frame.toImage();
+ if (img.isNull()) {
+ qDebug() << "received a null image";
+ return;
+ }
+
+ QMediaMetaData imageMetaData = metaData();
+ imageMetaData.insert(QMediaMetaData::Resolution, frame.size());
+ pendingImages.head().metaData = std::move(imageMetaData);
+ PendingImage pendingImage = pendingImages.head();
+
+ invokeDeferred([this, pendingImage = std::move(pendingImage), frame = std::move(frame),
+ img = std::move(img)]() mutable {
+ emit imageExposed(pendingImage.id);
+ qCDebug(qLcImageCaptureGst) << "Image available!";
+ emit imageAvailable(pendingImage.id, frame);
+ emit imageCaptured(pendingImage.id, img);
+ emit imageMetadataAvailable(pendingImage.id, pendingImage.metaData);
+ });
+ });
+
+ if (!future.isValid()) // during qApplication shutdown the threadpool becomes unusable
+ return true;
+
+ m_pendingFutures.insert(futureId, future);
+
+ return true;
+}
+
+#undef EQ_THIS_CAPTURE
+
+void QGstreamerImageCapture::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ QMutexLocker guard(&m_mutex);
+ QGstreamerMediaCapture *captureSession = static_cast<QGstreamerMediaCapture *>(session);
+ if (m_session == captureSession)
+ return;
+
+ bool readyForCapture = isReadyForCapture();
+ if (m_session) {
+ disconnect(m_session, nullptr, this, nullptr);
+ m_lastId = 0;
+ pendingImages.clear();
+ passImage = false;
+ cameraActive = false;
+ }
+
+ m_session = captureSession;
+ if (!m_session) {
+ if (readyForCapture)
+ emit readyForCaptureChanged(false);
+ return;
+ }
+
+ connect(m_session, &QPlatformMediaCaptureSession::cameraChanged, this,
+ &QGstreamerImageCapture::onCameraChanged);
+ onCameraChanged();
+}
+
+void QGstreamerImageCapture::setMetaData(const QMediaMetaData &m)
+{
+ {
+ QMutexLocker guard(&m_mutex);
+ QPlatformImageCapture::setMetaData(m);
+ }
+
+ // ensure taginject injects this metaData
+ applyMetaDataToTagSetter(m, muxer);
+}
+
+void QGstreamerImageCapture::cameraActiveChanged(bool active)
+{
+ qCDebug(qLcImageCaptureGst) << "cameraActiveChanged" << cameraActive << active;
+ if (cameraActive == active)
+ return;
+ cameraActive = active;
+ qCDebug(qLcImageCaptureGst) << "isReady" << isReadyForCapture();
+ emit readyForCaptureChanged(isReadyForCapture());
+}
+
+void QGstreamerImageCapture::onCameraChanged()
+{
+ QMutexLocker guard(&m_mutex);
+ if (m_session->camera()) {
+ cameraActiveChanged(m_session->camera()->isActive());
+ connect(m_session->camera(), &QPlatformCamera::activeChanged, this,
+ &QGstreamerImageCapture::cameraActiveChanged);
+ } else {
+ cameraActiveChanged(false);
+ }
+}
+
+gboolean QGstreamerImageCapture::saveImageFilter(GstElement *, GstBuffer *buffer, GstPad *,
+ QGstreamerImageCapture *capture)
+{
+ capture->saveBufferToImage(buffer);
+ return true;
+}
+
+void QGstreamerImageCapture::saveBufferToImage(GstBuffer *buffer)
+{
+ QMutexLocker guard(&m_mutex);
+ passImage = false;
+
+ if (pendingImages.isEmpty())
+ return;
+
+ PendingImage imageData = pendingImages.dequeue();
+ if (imageData.filename.isEmpty())
+ return;
+
+ int id = futureIDAllocator++;
+ QGstBufferHandle bufferHandle{
+ buffer,
+ QGstBufferHandle::NeedsRef,
+ };
+
+ QFuture<void> saveImageFuture = QtConcurrent::run([this, imageData, bufferHandle,
+ id]() mutable {
+ auto cleanup = qScopeGuard([&] {
+ QMutexLocker guard(&m_mutex);
+ m_pendingFutures.remove(id);
+ });
+
+ qCDebug(qLcImageCaptureGst) << "saving image as" << imageData.filename;
+
+ QFile f(imageData.filename);
+ if (!f.open(QFile::WriteOnly)) {
+ qCDebug(qLcImageCaptureGst) << " could not open image file for writing";
+ return;
+ }
+
+ GstMapInfo info;
+ GstBuffer *buffer = bufferHandle.get();
+ if (gst_buffer_map(buffer, &info, GST_MAP_READ)) {
+ f.write(reinterpret_cast<const char *>(info.data), info.size);
+ gst_buffer_unmap(buffer, &info);
+ }
+ f.close();
+
+ QMetaObject::invokeMethod(this, [this, imageData = std::move(imageData)]() mutable {
+ emit imageSaved(imageData.id, imageData.filename);
+ });
+ });
+
+ m_pendingFutures.insert(id, saveImageFuture);
+}
+
+QImageEncoderSettings QGstreamerImageCapture::imageSettings() const
+{
+ return m_settings;
+}
+
+void QGstreamerImageCapture::setImageSettings(const QImageEncoderSettings &settings)
+{
+ if (m_settings != settings) {
+ QSize resolution = settings.resolution();
+ if (m_settings.resolution() != resolution && !resolution.isEmpty())
+ setResolution(resolution);
+
+ m_settings = settings;
+ }
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qgstreamerimagecapture_p.cpp"
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture_p.h b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture_p.h
new file mode 100644
index 000000000..04a7c00b4
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture_p.h
@@ -0,0 +1,109 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERIMAGECAPTURECONTROL_H
+#define QGSTREAMERIMAGECAPTURECONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/private/qplatformimagecapture_p.h>
+#include <QtMultimedia/private/qmultimediautils_p.h>
+
+#include <QtCore/qmutex.h>
+#include <QtCore/qqueue.h>
+#include <QtConcurrent/QtConcurrentRun>
+
+#include <common/qgst_p.h>
+#include <common/qgstreamerbufferprobe_p.h>
+#include <mediacapture/qgstreamermediacapture_p.h>
+#include <gst/video/video.h>
+
+QT_BEGIN_NAMESPACE
+
+class QGstreamerImageCapture : public QPlatformImageCapture, private QGstreamerBufferProbe
+{
+ Q_OBJECT
+
+public:
+ static QMaybe<QPlatformImageCapture *> create(QImageCapture *parent);
+ virtual ~QGstreamerImageCapture();
+
+ bool isReadyForCapture() const override;
+ int capture(const QString &fileName) override;
+ int captureToBuffer() override;
+
+ QImageEncoderSettings imageSettings() const override;
+ void setImageSettings(const QImageEncoderSettings &settings) override;
+
+ bool probeBuffer(GstBuffer *buffer) override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+ QGstElement gstElement() const { return bin; }
+
+ void setMetaData(const QMediaMetaData &m) override;
+
+public Q_SLOTS:
+ void cameraActiveChanged(bool active);
+ void onCameraChanged();
+
+private:
+ QGstreamerImageCapture(QImageCapture *parent);
+
+ void setResolution(const QSize &resolution);
+ int doCapture(const QString &fileName);
+ static gboolean saveImageFilter(GstElement *element, GstBuffer *buffer, GstPad *pad,
+ QGstreamerImageCapture *capture);
+
+ void saveBufferToImage(GstBuffer *buffer);
+
+ mutable QRecursiveMutex
+ m_mutex; // guard all elements accessed from probeBuffer/saveBufferToImage
+ QGstreamerMediaCapture *m_session = nullptr;
+ int m_lastId = 0;
+ QImageEncoderSettings m_settings;
+
+ struct PendingImage {
+ int id;
+ QString filename;
+ QMediaMetaData metaData;
+ };
+
+ QQueue<PendingImage> pendingImages;
+
+ QGstBin bin;
+ QGstElement queue;
+ QGstElement filter;
+ QGstElement videoConvert;
+ QGstElement encoder;
+ QGstElement muxer;
+ QGstElement sink;
+ QGstPad videoSrcPad;
+
+ bool passImage = false;
+ bool cameraActive = false;
+
+ QGObjectHandlerScopedConnection m_handoffConnection;
+
+ QMap<int, QFuture<void>> m_pendingFutures;
+ int futureIDAllocator = 0;
+
+ template <typename Functor>
+ void invokeDeferred(Functor &&fn)
+ {
+ QMetaObject::invokeMethod(this, std::forward<decltype(fn)>(fn), Qt::QueuedConnection);
+ };
+};
+
+QT_END_NAMESPACE
+
+#endif // QGSTREAMERCAPTURECORNTROL_H
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture.cpp b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture.cpp
new file mode 100644
index 000000000..7ecbb07d7
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture.cpp
@@ -0,0 +1,326 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <mediacapture/qgstreamermediacapture_p.h>
+#include <mediacapture/qgstreamermediaencoder_p.h>
+#include <mediacapture/qgstreamerimagecapture_p.h>
+#include <mediacapture/qgstreamercamera_p.h>
+#include <common/qgstpipeline_p.h>
+#include <common/qgstreameraudioinput_p.h>
+#include <common/qgstreameraudiooutput_p.h>
+#include <common/qgstreamervideooutput_p.h>
+
+#include <QtCore/qloggingcategory.h>
+#include <QtCore/private/quniquehandle_p.h>
+
+QT_BEGIN_NAMESPACE
+
+static void linkTeeToPad(QGstElement tee, QGstPad sink)
+{
+ if (tee.isNull() || sink.isNull())
+ return;
+
+ auto source = tee.getRequestPad("src_%u");
+ source.link(sink);
+}
+
+QMaybe<QPlatformMediaCaptureSession *> QGstreamerMediaCapture::create()
+{
+ auto videoOutput = QGstreamerVideoOutput::create();
+ if (!videoOutput)
+ return videoOutput.error();
+
+ static const auto error = qGstErrorMessageIfElementsNotAvailable("tee", "capsfilter");
+ if (error)
+ return *error;
+
+ return new QGstreamerMediaCapture(videoOutput.value());
+}
+
+QGstreamerMediaCapture::QGstreamerMediaCapture(QGstreamerVideoOutput *videoOutput)
+ : capturePipeline(QGstPipeline::create("mediaCapturePipeline")), gstVideoOutput(videoOutput)
+{
+ gstVideoOutput->setParent(this);
+ gstVideoOutput->setIsPreview();
+ gstVideoOutput->setPipeline(capturePipeline);
+
+ // Use system clock to drive all elements in the pipeline. Otherwise,
+ // the clock is sourced from the elements (e.g. from an audio source).
+ // Since the elements are added and removed dynamically the clock would
+ // also change causing lost of synchronization in the pipeline.
+
+ QGstClockHandle systemClock{
+ gst_system_clock_obtain(),
+ };
+ gst_pipeline_use_clock(capturePipeline.pipeline(), systemClock.get());
+
+ // This is the recording pipeline with only live sources, thus the pipeline
+ // will be always in the playing state.
+ capturePipeline.setState(GST_STATE_PLAYING);
+ capturePipeline.setInStoppedState(false);
+
+ capturePipeline.dumpGraph("initial");
+}
+
+QGstreamerMediaCapture::~QGstreamerMediaCapture()
+{
+ setMediaRecorder(nullptr);
+ setImageCapture(nullptr);
+ setCamera(nullptr);
+ capturePipeline.setStateSync(GST_STATE_NULL);
+}
+
+QPlatformCamera *QGstreamerMediaCapture::camera()
+{
+ return gstCamera;
+}
+
+void QGstreamerMediaCapture::setCamera(QPlatformCamera *platformCamera)
+{
+ auto *camera = static_cast<QGstreamerCameraBase *>(platformCamera);
+ if (gstCamera == camera)
+ return;
+
+ if (gstCamera) {
+ QObject::disconnect(gstCameraActiveConnection);
+ if (gstVideoTee)
+ setCameraActive(false);
+ }
+
+ gstCamera = camera;
+
+ if (gstCamera) {
+ gstCameraActiveConnection = QObject::connect(camera, &QPlatformCamera::activeChanged, this,
+ &QGstreamerMediaCapture::setCameraActive);
+ if (gstCamera->isActive())
+ setCameraActive(true);
+ }
+
+ emit cameraChanged();
+}
+
+void QGstreamerMediaCapture::setCameraActive(bool activate)
+{
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
+ if (activate) {
+ QGstElement cameraElement = gstCamera->gstElement();
+ gstVideoTee = QGstElement::createFromFactory("tee", "videotee");
+ gstVideoTee.set("allow-not-linked", true);
+
+ capturePipeline.add(gstVideoOutput->gstElement(), cameraElement, gstVideoTee);
+
+ linkTeeToPad(gstVideoTee, encoderVideoSink);
+ linkTeeToPad(gstVideoTee, gstVideoOutput->gstElement().staticPad("sink"));
+ linkTeeToPad(gstVideoTee, imageCaptureSink);
+
+ qLinkGstElements(cameraElement, gstVideoTee);
+
+ capturePipeline.syncChildrenState();
+ } else {
+ if (encoderVideoCapsFilter)
+ qUnlinkGstElements(gstVideoTee, encoderVideoCapsFilter);
+ if (m_imageCapture)
+ qUnlinkGstElements(gstVideoTee, m_imageCapture->gstElement());
+
+ auto camera = gstCamera->gstElement();
+
+ capturePipeline.stopAndRemoveElements(camera, gstVideoTee,
+ gstVideoOutput->gstElement());
+
+ gstVideoTee = {};
+ gstCamera->setCaptureSession(nullptr);
+ }
+ });
+
+ capturePipeline.dumpGraph("camera");
+}
+
+QPlatformImageCapture *QGstreamerMediaCapture::imageCapture()
+{
+ return m_imageCapture;
+}
+
+void QGstreamerMediaCapture::setImageCapture(QPlatformImageCapture *imageCapture)
+{
+ QGstreamerImageCapture *control = static_cast<QGstreamerImageCapture *>(imageCapture);
+ if (m_imageCapture == control)
+ return;
+
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
+ if (m_imageCapture) {
+ qUnlinkGstElements(gstVideoTee, m_imageCapture->gstElement());
+ capturePipeline.stopAndRemoveElements(m_imageCapture->gstElement());
+ imageCaptureSink = {};
+ m_imageCapture->setCaptureSession(nullptr);
+ }
+
+ m_imageCapture = control;
+ if (m_imageCapture) {
+ imageCaptureSink = m_imageCapture->gstElement().staticPad("sink");
+ capturePipeline.add(m_imageCapture->gstElement());
+ m_imageCapture->gstElement().syncStateWithParent();
+ linkTeeToPad(gstVideoTee, imageCaptureSink);
+ m_imageCapture->setCaptureSession(this);
+ }
+ });
+
+ capturePipeline.dumpGraph("imageCapture");
+
+ emit imageCaptureChanged();
+}
+
+void QGstreamerMediaCapture::setMediaRecorder(QPlatformMediaRecorder *recorder)
+{
+ QGstreamerMediaEncoder *control = static_cast<QGstreamerMediaEncoder *>(recorder);
+ if (m_mediaEncoder == control)
+ return;
+
+ if (m_mediaEncoder)
+ m_mediaEncoder->setCaptureSession(nullptr);
+ m_mediaEncoder = control;
+ if (m_mediaEncoder)
+ m_mediaEncoder->setCaptureSession(this);
+
+ emit encoderChanged();
+ capturePipeline.dumpGraph("encoder");
+}
+
+QPlatformMediaRecorder *QGstreamerMediaCapture::mediaRecorder()
+{
+ return m_mediaEncoder;
+}
+
+void QGstreamerMediaCapture::linkEncoder(QGstPad audioSink, QGstPad videoSink)
+{
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
+ if (!gstVideoTee.isNull() && !videoSink.isNull()) {
+ QGstCaps caps = gstVideoTee.sink().currentCaps();
+
+ encoderVideoCapsFilter =
+ QGstElement::createFromFactory("capsfilter", "encoderVideoCapsFilter");
+ Q_ASSERT(encoderVideoCapsFilter);
+ encoderVideoCapsFilter.set("caps", caps);
+
+ capturePipeline.add(encoderVideoCapsFilter);
+
+ encoderVideoCapsFilter.src().link(videoSink);
+ linkTeeToPad(gstVideoTee, encoderVideoCapsFilter.sink());
+ encoderVideoSink = encoderVideoCapsFilter.sink();
+ }
+
+ if (!gstAudioTee.isNull() && !audioSink.isNull()) {
+ QGstCaps caps = gstAudioTee.sink().currentCaps();
+
+ encoderAudioCapsFilter =
+ QGstElement::createFromFactory("capsfilter", "encoderAudioCapsFilter");
+ Q_ASSERT(encoderAudioCapsFilter);
+ encoderAudioCapsFilter.set("caps", caps);
+
+ capturePipeline.add(encoderAudioCapsFilter);
+
+ encoderAudioCapsFilter.src().link(audioSink);
+ linkTeeToPad(gstAudioTee, encoderAudioCapsFilter.sink());
+ encoderAudioSink = encoderAudioCapsFilter.sink();
+ }
+ });
+}
+
+void QGstreamerMediaCapture::unlinkEncoder()
+{
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
+ if (encoderVideoCapsFilter) {
+ qUnlinkGstElements(gstVideoTee, encoderVideoCapsFilter);
+ capturePipeline.stopAndRemoveElements(encoderVideoCapsFilter);
+ encoderVideoCapsFilter = {};
+ }
+
+ if (encoderAudioCapsFilter) {
+ qUnlinkGstElements(gstAudioTee, encoderAudioCapsFilter);
+ capturePipeline.stopAndRemoveElements(encoderAudioCapsFilter);
+ encoderAudioCapsFilter = {};
+ }
+
+ encoderAudioSink = {};
+ encoderVideoSink = {};
+ });
+}
+
+const QGstPipeline &QGstreamerMediaCapture::pipeline() const
+{
+ return capturePipeline;
+}
+
+void QGstreamerMediaCapture::setAudioInput(QPlatformAudioInput *input)
+{
+ if (gstAudioInput == input)
+ return;
+
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
+ if (gstAudioInput) {
+ if (encoderAudioCapsFilter)
+ qUnlinkGstElements(gstAudioTee, encoderAudioCapsFilter);
+
+ if (gstAudioOutput) {
+ qUnlinkGstElements(gstAudioTee, gstAudioOutput->gstElement());
+ capturePipeline.stopAndRemoveElements(gstAudioOutput->gstElement());
+ }
+
+ capturePipeline.stopAndRemoveElements(gstAudioInput->gstElement(), gstAudioTee);
+ gstAudioTee = {};
+ }
+
+ gstAudioInput = static_cast<QGstreamerAudioInput *>(input);
+ if (gstAudioInput) {
+ Q_ASSERT(gstAudioTee.isNull());
+ gstAudioTee = QGstElement::createFromFactory("tee", "audiotee");
+ gstAudioTee.set("allow-not-linked", true);
+ capturePipeline.add(gstAudioInput->gstElement(), gstAudioTee);
+ qLinkGstElements(gstAudioInput->gstElement(), gstAudioTee);
+
+ if (gstAudioOutput) {
+ capturePipeline.add(gstAudioOutput->gstElement());
+ gstAudioOutput->gstElement().setState(GST_STATE_PLAYING);
+ linkTeeToPad(gstAudioTee, gstAudioOutput->gstElement().staticPad("sink"));
+ }
+
+ capturePipeline.syncChildrenState();
+
+ linkTeeToPad(gstAudioTee, encoderAudioSink);
+ }
+ });
+}
+
+void QGstreamerMediaCapture::setVideoPreview(QVideoSink *sink)
+{
+ gstVideoOutput->setVideoSink(sink);
+}
+
+void QGstreamerMediaCapture::setAudioOutput(QPlatformAudioOutput *output)
+{
+ if (gstAudioOutput == output)
+ return;
+
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
+ if (gstAudioOutput && gstAudioInput) {
+ // If audio input is set, the output is in the pipeline
+ qUnlinkGstElements(gstAudioTee, gstAudioOutput->gstElement());
+ capturePipeline.stopAndRemoveElements(gstAudioOutput->gstElement());
+ }
+
+ gstAudioOutput = static_cast<QGstreamerAudioOutput *>(output);
+ if (gstAudioOutput && gstAudioInput) {
+ capturePipeline.add(gstAudioOutput->gstElement());
+ capturePipeline.syncChildrenState();
+ linkTeeToPad(gstAudioTee, gstAudioOutput->gstElement().staticPad("sink"));
+ }
+ });
+}
+
+QGstreamerVideoSink *QGstreamerMediaCapture::gstreamerVideoSink() const
+{
+ return gstVideoOutput ? gstVideoOutput->gstreamerVideoSink() : nullptr;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qgstreamermediacapture_p.cpp"
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture_p.h b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture_p.h
new file mode 100644
index 000000000..c44e31f0e
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture_p.h
@@ -0,0 +1,97 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERCAPTURESERVICE_H
+#define QGSTREAMERCAPTURESERVICE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediacapture_p.h>
+#include <private/qplatformmediaintegration_p.h>
+
+#include <common/qgst_p.h>
+#include <common/qgstpipeline_p.h>
+
+#include <qtimer.h>
+
+QT_BEGIN_NAMESPACE
+
+class QGstreamerCameraBase;
+class QGstreamerImageCapture;
+class QGstreamerMediaEncoder;
+class QGstreamerAudioInput;
+class QGstreamerAudioOutput;
+class QGstreamerVideoOutput;
+class QGstreamerVideoSink;
+
+class QGstreamerMediaCapture final : public QPlatformMediaCaptureSession
+{
+ Q_OBJECT
+
+public:
+ static QMaybe<QPlatformMediaCaptureSession *> create();
+ virtual ~QGstreamerMediaCapture();
+
+ QPlatformCamera *camera() override;
+ void setCamera(QPlatformCamera *camera) override;
+
+ QPlatformImageCapture *imageCapture() override;
+ void setImageCapture(QPlatformImageCapture *imageCapture) override;
+
+ QPlatformMediaRecorder *mediaRecorder() override;
+ void setMediaRecorder(QPlatformMediaRecorder *recorder) override;
+
+ void setAudioInput(QPlatformAudioInput *input) override;
+ QGstreamerAudioInput *audioInput() { return gstAudioInput; }
+
+ void setVideoPreview(QVideoSink *sink) override;
+ void setAudioOutput(QPlatformAudioOutput *output) override;
+
+ void linkEncoder(QGstPad audioSink, QGstPad videoSink);
+ void unlinkEncoder();
+
+ const QGstPipeline &pipeline() const;
+
+ QGstreamerVideoSink *gstreamerVideoSink() const;
+
+private:
+ void setCameraActive(bool activate);
+
+ explicit QGstreamerMediaCapture(QGstreamerVideoOutput *videoOutput);
+
+ friend QGstreamerMediaEncoder;
+ // Gst elements
+ QGstPipeline capturePipeline;
+
+ QGstreamerAudioInput *gstAudioInput = nullptr;
+ QGstreamerCameraBase *gstCamera = nullptr;
+ QMetaObject::Connection gstCameraActiveConnection;
+
+ QGstElement gstAudioTee;
+ QGstElement gstVideoTee;
+ QGstElement encoderVideoCapsFilter;
+ QGstElement encoderAudioCapsFilter;
+
+ QGstPad encoderAudioSink;
+ QGstPad encoderVideoSink;
+ QGstPad imageCaptureSink;
+
+ QGstreamerAudioOutput *gstAudioOutput = nullptr;
+ QGstreamerVideoOutput *gstVideoOutput = nullptr;
+
+ QGstreamerMediaEncoder *m_mediaEncoder = nullptr;
+ QGstreamerImageCapture *m_imageCapture = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QGSTREAMERCAPTURESERVICE_H
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder.cpp b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder.cpp
new file mode 100644
index 000000000..4ec10ca84
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder.cpp
@@ -0,0 +1,419 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <mediacapture/qgstreamermediaencoder_p.h>
+#include <qgstreamerformatinfo_p.h>
+#include <common/qgstpipeline_p.h>
+#include <common/qgstreamermessage_p.h>
+#include <common/qgst_debug_p.h>
+#include <qgstreamerintegration_p.h>
+
+#include <QtMultimedia/private/qmediastoragelocation_p.h>
+#include <QtMultimedia/private/qplatformcamera_p.h>
+#include <QtMultimedia/qaudiodevice.h>
+
+#include <QtCore/qdebug.h>
+#include <QtCore/qeventloop.h>
+#include <QtCore/qstandardpaths.h>
+#include <QtCore/qmimetype.h>
+#include <QtCore/qloggingcategory.h>
+
+#include <gst/gsttagsetter.h>
+#include <gst/gstversion.h>
+#include <gst/video/video.h>
+#include <gst/pbutils/encoding-profile.h>
+
+static Q_LOGGING_CATEGORY(qLcMediaEncoderGst, "qt.multimedia.encoder")
+
+QT_BEGIN_NAMESPACE
+
+QGstreamerMediaEncoder::QGstreamerMediaEncoder(QMediaRecorder *parent)
+ : QPlatformMediaRecorder(parent),
+ audioPauseControl(*this),
+ videoPauseControl(*this)
+{
+ signalDurationChangedTimer.setInterval(100);
+ signalDurationChangedTimer.callOnTimeout(&signalDurationChangedTimer, [this]() {
+ durationChanged(duration());
+ });
+}
+
+QGstreamerMediaEncoder::~QGstreamerMediaEncoder()
+{
+ if (!capturePipeline.isNull()) {
+ finalize();
+ capturePipeline.removeMessageFilter(this);
+ capturePipeline.setStateSync(GST_STATE_NULL);
+ }
+}
+
+bool QGstreamerMediaEncoder::isLocationWritable(const QUrl &) const
+{
+ return true;
+}
+
+void QGstreamerMediaEncoder::handleSessionError(QMediaRecorder::Error code, const QString &description)
+{
+ updateError(code, description);
+ stop();
+}
+
+bool QGstreamerMediaEncoder::processBusMessage(const QGstreamerMessage &msg)
+{
+ constexpr bool traceStateChange = false;
+ constexpr bool traceAllEvents = false;
+
+ if constexpr (traceAllEvents)
+ qCDebug(qLcMediaEncoderGst) << "received event:" << msg;
+
+ switch (msg.type()) {
+ case GST_MESSAGE_ELEMENT: {
+ QGstStructureView s = msg.structure();
+ if (s.name() == "GstBinForwarded")
+ return processBusMessage(s.getMessage());
+
+ qCDebug(qLcMediaEncoderGst)
+ << "received element message from" << msg.source().name() << s.name();
+ return false;
+ }
+
+ case GST_MESSAGE_EOS: {
+ qCDebug(qLcMediaEncoderGst) << "received EOS from" << msg.source().name();
+ finalize();
+ return false;
+ }
+
+ case GST_MESSAGE_ERROR: {
+ qCDebug(qLcMediaEncoderGst)
+ << "received error:" << msg.source().name() << QCompactGstMessageAdaptor(msg);
+
+ QUniqueGErrorHandle err;
+ QGString debug;
+ gst_message_parse_error(msg.message(), &err, &debug);
+ updateError(QMediaRecorder::ResourceError, QString::fromUtf8(err.get()->message));
+ if (!m_finalizing)
+ stop();
+ finalize();
+ return false;
+ }
+
+ case GST_MESSAGE_STATE_CHANGED: {
+ if constexpr (traceStateChange)
+ qCDebug(qLcMediaEncoderGst)
+ << "received state change" << QCompactGstMessageAdaptor(msg);
+
+ return false;
+ }
+
+ default:
+ return false;
+ };
+}
+
+qint64 QGstreamerMediaEncoder::duration() const
+{
+ return std::max(audioPauseControl.duration, videoPauseControl.duration);
+}
+
+
+static GstEncodingContainerProfile *createContainerProfile(const QMediaEncoderSettings &settings)
+{
+ auto *formatInfo = QGstreamerIntegration::instance()->gstFormatsInfo();
+
+ auto caps = formatInfo->formatCaps(settings.fileFormat());
+
+ GstEncodingContainerProfile *profile =
+ (GstEncodingContainerProfile *)gst_encoding_container_profile_new(
+ "container_profile", (gchar *)"custom container profile",
+ const_cast<GstCaps *>(caps.caps()),
+ nullptr); // preset
+ return profile;
+}
+
+static GstEncodingProfile *createVideoProfile(const QMediaEncoderSettings &settings)
+{
+ auto *formatInfo = QGstreamerIntegration::instance()->gstFormatsInfo();
+
+ QGstCaps caps = formatInfo->videoCaps(settings.mediaFormat());
+ if (caps.isNull())
+ return nullptr;
+
+ QSize videoResolution = settings.videoResolution();
+ if (videoResolution.isValid())
+ caps.setResolution(videoResolution);
+
+ GstEncodingVideoProfile *profile =
+ gst_encoding_video_profile_new(const_cast<GstCaps *>(caps.caps()), nullptr,
+ nullptr, // restriction
+ 0); // presence
+
+ gst_encoding_video_profile_set_pass(profile, 0);
+ gst_encoding_video_profile_set_variableframerate(profile, TRUE);
+
+ return (GstEncodingProfile *)profile;
+}
+
+static GstEncodingProfile *createAudioProfile(const QMediaEncoderSettings &settings)
+{
+ auto *formatInfo = QGstreamerIntegration::instance()->gstFormatsInfo();
+
+ auto caps = formatInfo->audioCaps(settings.mediaFormat());
+ if (caps.isNull())
+ return nullptr;
+
+ GstEncodingProfile *profile =
+ (GstEncodingProfile *)gst_encoding_audio_profile_new(const_cast<GstCaps *>(caps.caps()),
+ nullptr, // preset
+ nullptr, // restriction
+ 0); // presence
+
+ return profile;
+}
+
+
+static GstEncodingContainerProfile *createEncodingProfile(const QMediaEncoderSettings &settings)
+{
+ auto *containerProfile = createContainerProfile(settings);
+ if (!containerProfile) {
+ qWarning() << "QGstreamerMediaEncoder: failed to create container profile!";
+ return nullptr;
+ }
+
+ GstEncodingProfile *audioProfile = createAudioProfile(settings);
+ GstEncodingProfile *videoProfile = nullptr;
+ if (settings.videoCodec() != QMediaFormat::VideoCodec::Unspecified)
+ videoProfile = createVideoProfile(settings);
+// qDebug() << "audio profile" << (audioProfile ? gst_caps_to_string(gst_encoding_profile_get_format(audioProfile)) : "(null)");
+// qDebug() << "video profile" << (videoProfile ? gst_caps_to_string(gst_encoding_profile_get_format(videoProfile)) : "(null)");
+// qDebug() << "conta profile" << gst_caps_to_string(gst_encoding_profile_get_format((GstEncodingProfile *)containerProfile));
+
+ if (videoProfile) {
+ if (!gst_encoding_container_profile_add_profile(containerProfile, videoProfile)) {
+ qWarning() << "QGstreamerMediaEncoder: failed to add video profile!";
+ gst_encoding_profile_unref(videoProfile);
+ }
+ }
+ if (audioProfile) {
+ if (!gst_encoding_container_profile_add_profile(containerProfile, audioProfile)) {
+ qWarning() << "QGstreamerMediaEncoder: failed to add audio profile!";
+ gst_encoding_profile_unref(audioProfile);
+ }
+ }
+
+ return containerProfile;
+}
+
+void QGstreamerMediaEncoder::PauseControl::reset()
+{
+ pauseOffsetPts = 0;
+ pauseStartPts.reset();
+ duration = 0;
+ firstBufferPts.reset();
+}
+
+void QGstreamerMediaEncoder::PauseControl::installOn(QGstPad pad)
+{
+ pad.addProbe<&QGstreamerMediaEncoder::PauseControl::processBuffer>(this, GST_PAD_PROBE_TYPE_BUFFER);
+}
+
+GstPadProbeReturn QGstreamerMediaEncoder::PauseControl::processBuffer(QGstPad, GstPadProbeInfo *info)
+{
+ auto buffer = GST_PAD_PROBE_INFO_BUFFER(info);
+ if (!buffer)
+ return GST_PAD_PROBE_OK;
+
+ buffer = gst_buffer_make_writable(buffer);
+
+ if (!buffer)
+ return GST_PAD_PROBE_OK;
+
+ GST_PAD_PROBE_INFO_DATA(info) = buffer;
+
+ if (!GST_BUFFER_PTS_IS_VALID(buffer))
+ return GST_PAD_PROBE_OK;
+
+ if (!firstBufferPts)
+ firstBufferPts = GST_BUFFER_PTS(buffer);
+
+ if (encoder.state() == QMediaRecorder::PausedState) {
+ if (!pauseStartPts)
+ pauseStartPts = GST_BUFFER_PTS(buffer);
+
+ return GST_PAD_PROBE_DROP;
+ }
+
+ if (pauseStartPts) {
+ pauseOffsetPts += GST_BUFFER_PTS(buffer) - *pauseStartPts;
+ pauseStartPts.reset();
+ }
+ GST_BUFFER_PTS(buffer) -= pauseOffsetPts;
+
+ duration = (GST_BUFFER_PTS(buffer) - *firstBufferPts) / GST_MSECOND;
+
+ return GST_PAD_PROBE_OK;
+}
+
+void QGstreamerMediaEncoder::record(QMediaEncoderSettings &settings)
+{
+ if (!m_session ||m_finalizing || state() != QMediaRecorder::StoppedState)
+ return;
+
+ const auto hasVideo = m_session->camera() && m_session->camera()->isActive();
+ const auto hasAudio = m_session->audioInput() != nullptr;
+
+ if (!hasVideo && !hasAudio) {
+ updateError(QMediaRecorder::ResourceError, QMediaRecorder::tr("No camera or audio input"));
+ return;
+ }
+
+ const auto audioOnly = settings.videoCodec() == QMediaFormat::VideoCodec::Unspecified;
+
+ auto primaryLocation = audioOnly ? QStandardPaths::MusicLocation : QStandardPaths::MoviesLocation;
+ auto container = settings.mimeType().preferredSuffix();
+ auto location = QMediaStorageLocation::generateFileName(outputLocation().toLocalFile(), primaryLocation, container);
+
+ QUrl actualSink = QUrl::fromLocalFile(QDir::currentPath()).resolved(location);
+ qCDebug(qLcMediaEncoderGst) << "recording new video to" << actualSink;
+
+ Q_ASSERT(!actualSink.isEmpty());
+
+ gstEncoder = QGstBin::createFromFactory("encodebin", "encodebin");
+ Q_ASSERT(gstEncoder);
+ auto *encodingProfile = createEncodingProfile(settings);
+ g_object_set (gstEncoder.object(), "profile", encodingProfile, nullptr);
+ gst_encoding_profile_unref(encodingProfile);
+
+ gstFileSink = QGstElement::createFromFactory("filesink", "filesink");
+ Q_ASSERT(gstFileSink);
+ gstFileSink.set("location", QFile::encodeName(actualSink.toLocalFile()).constData());
+ gstFileSink.set("async", false);
+
+ QGstPad audioSink = {};
+ QGstPad videoSink = {};
+
+ audioPauseControl.reset();
+ videoPauseControl.reset();
+
+ if (hasAudio) {
+ audioSink = gstEncoder.getRequestPad("audio_%u");
+ if (audioSink.isNull())
+ qWarning() << "Unsupported audio codec";
+ else
+ audioPauseControl.installOn(audioSink);
+ }
+
+ if (hasVideo) {
+ videoSink = gstEncoder.getRequestPad("video_%u");
+ if (videoSink.isNull())
+ qWarning() << "Unsupported video codec";
+ else
+ videoPauseControl.installOn(videoSink);
+ }
+
+ capturePipeline.modifyPipelineWhileNotRunning([&] {
+ capturePipeline.add(gstEncoder, gstFileSink);
+ qLinkGstElements(gstEncoder, gstFileSink);
+ applyMetaDataToTagSetter(m_metaData, gstEncoder);
+
+ m_session->linkEncoder(audioSink, videoSink);
+
+ gstEncoder.syncStateWithParent();
+ gstFileSink.syncStateWithParent();
+ });
+
+ signalDurationChangedTimer.start();
+ capturePipeline.dumpGraph("recording");
+
+ durationChanged(0);
+ stateChanged(QMediaRecorder::RecordingState);
+ actualLocationChanged(QUrl::fromLocalFile(location));
+}
+
+void QGstreamerMediaEncoder::pause()
+{
+ if (!m_session || m_finalizing || state() != QMediaRecorder::RecordingState)
+ return;
+ signalDurationChangedTimer.stop();
+ durationChanged(duration());
+ capturePipeline.dumpGraph("before-pause");
+ stateChanged(QMediaRecorder::PausedState);
+}
+
+void QGstreamerMediaEncoder::resume()
+{
+ capturePipeline.dumpGraph("before-resume");
+ if (!m_session || m_finalizing || state() != QMediaRecorder::PausedState)
+ return;
+ signalDurationChangedTimer.start();
+ stateChanged(QMediaRecorder::RecordingState);
+}
+
+void QGstreamerMediaEncoder::stop()
+{
+ if (!m_session || m_finalizing || state() == QMediaRecorder::StoppedState)
+ return;
+ durationChanged(duration());
+ qCDebug(qLcMediaEncoderGst) << "stop";
+ m_finalizing = true;
+ m_session->unlinkEncoder();
+ signalDurationChangedTimer.stop();
+
+ qCDebug(qLcMediaEncoderGst) << ">>>>>>>>>>>>> sending EOS";
+ gstEncoder.sendEos();
+}
+
+void QGstreamerMediaEncoder::finalize()
+{
+ if (!m_session || gstEncoder.isNull())
+ return;
+
+ qCDebug(qLcMediaEncoderGst) << "finalize";
+
+ capturePipeline.stopAndRemoveElements(gstEncoder, gstFileSink);
+ gstFileSink = {};
+ gstEncoder = {};
+ m_finalizing = false;
+ stateChanged(QMediaRecorder::StoppedState);
+}
+
+void QGstreamerMediaEncoder::setMetaData(const QMediaMetaData &metaData)
+{
+ if (!m_session)
+ return;
+ m_metaData = metaData;
+}
+
+QMediaMetaData QGstreamerMediaEncoder::metaData() const
+{
+ return m_metaData;
+}
+
+void QGstreamerMediaEncoder::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ QGstreamerMediaCapture *captureSession = static_cast<QGstreamerMediaCapture *>(session);
+ if (m_session == captureSession)
+ return;
+
+ if (m_session) {
+ stop();
+ if (m_finalizing) {
+ QEventLoop loop;
+ QObject::connect(mediaRecorder(), &QMediaRecorder::recorderStateChanged, &loop,
+ &QEventLoop::quit);
+ loop.exec();
+ }
+
+ capturePipeline.removeMessageFilter(this);
+ capturePipeline = {};
+ }
+
+ m_session = captureSession;
+ if (!m_session)
+ return;
+
+ capturePipeline = captureSession->capturePipeline;
+ capturePipeline.set("message-forward", true);
+ capturePipeline.installMessageFilter(this);
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder_p.h b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder_p.h
new file mode 100644
index 000000000..56e8c193b
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder_p.h
@@ -0,0 +1,91 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+
+#ifndef QGSTREAMERENCODERCONTROL_H
+#define QGSTREAMERENCODERCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <mediacapture/qgstreamermediacapture_p.h>
+#include <common/qgstreamermetadata_p.h>
+
+#include <QtMultimedia/private/qplatformmediarecorder_p.h>
+#include <QtCore/qurl.h>
+#include <QtCore/qdir.h>
+#include <QtCore/qelapsedtimer.h>
+#include <QtCore/qtimer.h>
+
+QT_BEGIN_NAMESPACE
+
+class QMediaMetaData;
+class QGstreamerMessage;
+
+class QGstreamerMediaEncoder : public QPlatformMediaRecorder, QGstreamerBusMessageFilter
+{
+public:
+ explicit QGstreamerMediaEncoder(QMediaRecorder *parent);
+ virtual ~QGstreamerMediaEncoder();
+
+ bool isLocationWritable(const QUrl &sink) const override;
+
+ qint64 duration() const override;
+
+ void record(QMediaEncoderSettings &settings) override;
+ void pause() override;
+ void resume() override;
+ void stop() override;
+
+ void setMetaData(const QMediaMetaData &) override;
+ QMediaMetaData metaData() const override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+ QGstElement getEncoder() { return gstEncoder; }
+private:
+ bool processBusMessage(const QGstreamerMessage& message) override;
+
+private:
+ struct PauseControl {
+ explicit PauseControl(QPlatformMediaRecorder &encoder) : encoder(encoder) { }
+
+ GstPadProbeReturn processBuffer(QGstPad pad, GstPadProbeInfo *info);
+ void installOn(QGstPad pad);
+ void reset();
+
+ QPlatformMediaRecorder &encoder;
+ GstClockTime pauseOffsetPts = 0;
+ std::optional<GstClockTime> pauseStartPts;
+ std::optional<GstClockTime> firstBufferPts;
+ qint64 duration = 0;
+ };
+
+ PauseControl audioPauseControl;
+ PauseControl videoPauseControl;
+
+ void handleSessionError(QMediaRecorder::Error code, const QString &description);
+ void finalize();
+
+ QGstreamerMediaCapture *m_session = nullptr;
+ QMediaMetaData m_metaData;
+ QTimer signalDurationChangedTimer;
+
+ QGstPipeline capturePipeline;
+ QGstBin gstEncoder;
+ QGstElement gstFileSink;
+
+ bool m_finalizing = false;
+};
+
+QT_END_NAMESPACE
+
+#endif // QGSTREAMERENCODERCONTROL_H
diff --git a/src/plugins/multimedia/gstreamer/qgstreamerformatinfo.cpp b/src/plugins/multimedia/gstreamer/qgstreamerformatinfo.cpp
new file mode 100644
index 000000000..a657fc52f
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/qgstreamerformatinfo.cpp
@@ -0,0 +1,445 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <common/qglist_helper_p.h>
+#include "qgstreamerformatinfo_p.h"
+
+#include <gst/gst.h>
+
+QT_BEGIN_NAMESPACE
+
+QMediaFormat::AudioCodec QGstreamerFormatInfo::audioCodecForCaps(QGstStructureView structure)
+{
+ using namespace std::string_view_literals;
+ const char *name = structure.name().data();
+
+ if (!name || (strncmp(name, "audio/", 6) != 0))
+ return QMediaFormat::AudioCodec::Unspecified;
+ name += 6;
+ if (name == "mpeg"sv) {
+ auto version = structure["mpegversion"].toInt();
+ if (version == 1) {
+ auto layer = structure["layer"];
+ if (!layer.isNull())
+ return QMediaFormat::AudioCodec::MP3;
+ }
+ if (version == 4)
+ return QMediaFormat::AudioCodec::AAC;
+ return QMediaFormat::AudioCodec::Unspecified;
+ }
+ if (name == "x-ac3"sv)
+ return QMediaFormat::AudioCodec::AC3;
+
+ if (name == "x-eac3"sv)
+ return QMediaFormat::AudioCodec::EAC3;
+
+ if (name == "x-flac"sv)
+ return QMediaFormat::AudioCodec::FLAC;
+
+ if (name == "x-alac"sv)
+ return QMediaFormat::AudioCodec::ALAC;
+
+ if (name == "x-true-hd"sv)
+ return QMediaFormat::AudioCodec::DolbyTrueHD;
+
+ if (name == "x-vorbis"sv)
+ return QMediaFormat::AudioCodec::Vorbis;
+
+ if (name == "x-opus"sv)
+ return QMediaFormat::AudioCodec::Opus;
+
+ if (name == "x-wav"sv)
+ return QMediaFormat::AudioCodec::Wave;
+
+ if (name == "x-wma"sv)
+ return QMediaFormat::AudioCodec::WMA;
+
+ return QMediaFormat::AudioCodec::Unspecified;
+}
+
+QMediaFormat::VideoCodec QGstreamerFormatInfo::videoCodecForCaps(QGstStructureView structure)
+{
+ using namespace std::string_view_literals;
+ const char *name = structure.name().data();
+
+ if (!name || (strncmp(name, "video/", 6) != 0))
+ return QMediaFormat::VideoCodec::Unspecified;
+ name += 6;
+
+ if (name == "mpeg"sv) {
+ auto version = structure["mpegversion"].toInt();
+ if (version == 1)
+ return QMediaFormat::VideoCodec::MPEG1;
+ if (version == 2)
+ return QMediaFormat::VideoCodec::MPEG2;
+ if (version == 4)
+ return QMediaFormat::VideoCodec::MPEG4;
+ return QMediaFormat::VideoCodec::Unspecified;
+ }
+ if (name == "x-h264"sv)
+ return QMediaFormat::VideoCodec::H264;
+
+#if GST_CHECK_VERSION(1, 17, 0) // x265enc seems to be broken on 1.16 at least
+ if (name == "x-h265"sv)
+ return QMediaFormat::VideoCodec::H265;
+#endif
+
+ if (name == "x-vp8"sv)
+ return QMediaFormat::VideoCodec::VP8;
+
+ if (name == "x-vp9"sv)
+ return QMediaFormat::VideoCodec::VP9;
+
+ if (name == "x-av1"sv)
+ return QMediaFormat::VideoCodec::AV1;
+
+ if (name == "x-theora"sv)
+ return QMediaFormat::VideoCodec::Theora;
+
+ if (name == "x-jpeg"sv)
+ return QMediaFormat::VideoCodec::MotionJPEG;
+
+ if (name == "x-wmv"sv)
+ return QMediaFormat::VideoCodec::WMV;
+
+ return QMediaFormat::VideoCodec::Unspecified;
+}
+
+QMediaFormat::FileFormat QGstreamerFormatInfo::fileFormatForCaps(QGstStructureView structure)
+{
+ using namespace std::string_view_literals;
+ const char *name = structure.name().data();
+
+ if (name == "video/x-ms-asf"sv)
+ return QMediaFormat::FileFormat::WMV;
+
+ if (name == "video/x-msvideo"sv)
+ return QMediaFormat::FileFormat::AVI;
+
+ if (name == "video/x-matroska"sv)
+ return QMediaFormat::FileFormat::Matroska;
+
+ if (name == "video/quicktime"sv) {
+ const char *variant = structure["variant"].toString();
+ if (!variant)
+ return QMediaFormat::FileFormat::QuickTime;
+ if (variant == "iso"sv)
+ return QMediaFormat::FileFormat::MPEG4;
+ }
+ if (name == "video/ogg"sv)
+ return QMediaFormat::FileFormat::Ogg;
+
+ if (name == "video/webm"sv)
+ return QMediaFormat::FileFormat::WebM;
+
+ if (name == "audio/x-m4a"sv)
+ return QMediaFormat::FileFormat::Mpeg4Audio;
+
+ if (name == "audio/x-wav"sv)
+ return QMediaFormat::FileFormat::Wave;
+
+ if (name == "audio/mpeg"sv) {
+ auto mpegversion = structure["mpegversion"].toInt();
+ if (mpegversion == 1) {
+ auto layer = structure["layer"];
+ if (!layer.isNull())
+ return QMediaFormat::FileFormat::MP3;
+ }
+ }
+
+ return QMediaFormat::UnspecifiedFormat;
+}
+
+
+QImageCapture::FileFormat QGstreamerFormatInfo::imageFormatForCaps(QGstStructureView structure)
+{
+ using namespace std::string_view_literals;
+ const char *name = structure.name().data();
+
+ if (name == "image/jpeg"sv)
+ return QImageCapture::JPEG;
+
+ if (name == "image/png"sv)
+ return QImageCapture::PNG;
+
+ if (name == "image/webp"sv)
+ return QImageCapture::WebP;
+
+ if (name == "image/tiff"sv)
+ return QImageCapture::Tiff;
+
+ return QImageCapture::UnspecifiedFormat;
+}
+
+static QPair<QList<QMediaFormat::AudioCodec>, QList<QMediaFormat::VideoCodec>> getCodecsList(bool decode)
+{
+ QList<QMediaFormat::AudioCodec> audio;
+ QList<QMediaFormat::VideoCodec> video;
+
+ GstPadDirection padDirection = decode ? GST_PAD_SINK : GST_PAD_SRC;
+
+ GList *elementList = gst_element_factory_list_get_elements(decode ? GST_ELEMENT_FACTORY_TYPE_DECODER : GST_ELEMENT_FACTORY_TYPE_ENCODER,
+ GST_RANK_MARGINAL);
+
+ for (GstElementFactory *factory :
+ QGstUtils::GListRangeAdaptor<GstElementFactory *>(elementList)) {
+ for (GstStaticPadTemplate *padTemplate :
+ QGstUtils::GListRangeAdaptor<GstStaticPadTemplate *>(
+ gst_element_factory_get_static_pad_templates(factory))) {
+ if (padTemplate->direction == padDirection) {
+ auto caps = QGstCaps(gst_static_caps_get(&padTemplate->static_caps), QGstCaps::HasRef);
+
+ for (int i = 0; i < caps.size(); i++) {
+ QGstStructureView structure = caps.at(i);
+ auto a = QGstreamerFormatInfo::audioCodecForCaps(structure);
+ if (a != QMediaFormat::AudioCodec::Unspecified && !audio.contains(a))
+ audio.append(a);
+ auto v = QGstreamerFormatInfo::videoCodecForCaps(structure);
+ if (v != QMediaFormat::VideoCodec::Unspecified && !video.contains(v))
+ video.append(v);
+ }
+ }
+ }
+ }
+ gst_plugin_feature_list_free(elementList);
+ return {audio, video};
+}
+
+
+QList<QGstreamerFormatInfo::CodecMap> QGstreamerFormatInfo::getMuxerList(bool demuxer,
+ QList<QMediaFormat::AudioCodec> supportedAudioCodecs,
+ QList<QMediaFormat::VideoCodec> supportedVideoCodecs)
+{
+ QList<QGstreamerFormatInfo::CodecMap> muxers;
+
+ GstPadDirection padDirection = demuxer ? GST_PAD_SINK : GST_PAD_SRC;
+
+ GList *elementList = gst_element_factory_list_get_elements(
+ demuxer ? GST_ELEMENT_FACTORY_TYPE_DEMUXER : GST_ELEMENT_FACTORY_TYPE_MUXER,
+ GST_RANK_MARGINAL);
+
+ for (GstElementFactory *factory :
+ QGstUtils::GListRangeAdaptor<GstElementFactory *>(elementList)) {
+ QList<QMediaFormat::FileFormat> fileFormats;
+
+ for (GstStaticPadTemplate *padTemplate :
+ QGstUtils::GListRangeAdaptor<GstStaticPadTemplate *>(
+ gst_element_factory_get_static_pad_templates(factory))) {
+
+ if (padTemplate->direction == padDirection) {
+ auto caps = QGstCaps(gst_static_caps_get(&padTemplate->static_caps), QGstCaps::HasRef);
+
+ for (int i = 0; i < caps.size(); i++) {
+ QGstStructureView structure = caps.at(i);
+ auto fmt = fileFormatForCaps(structure);
+ if (fmt != QMediaFormat::UnspecifiedFormat)
+ fileFormats.append(fmt);
+ }
+ }
+ }
+ if (fileFormats.isEmpty())
+ continue;
+
+ QList<QMediaFormat::AudioCodec> audioCodecs;
+ QList<QMediaFormat::VideoCodec> videoCodecs;
+
+ for (GstStaticPadTemplate *padTemplate :
+ QGstUtils::GListRangeAdaptor<GstStaticPadTemplate *>(
+ gst_element_factory_get_static_pad_templates(factory))) {
+
+ // check the other side for supported inputs/outputs
+ if (padTemplate->direction != padDirection) {
+ auto caps = QGstCaps(gst_static_caps_get(&padTemplate->static_caps), QGstCaps::HasRef);
+
+ bool acceptsRawAudio = false;
+ for (int i = 0; i < caps.size(); i++) {
+ QGstStructureView structure = caps.at(i);
+ if (structure.name() == "audio/x-raw")
+ acceptsRawAudio = true;
+ auto audio = audioCodecForCaps(structure);
+ if (audio != QMediaFormat::AudioCodec::Unspecified && supportedAudioCodecs.contains(audio))
+ audioCodecs.append(audio);
+ auto video = videoCodecForCaps(structure);
+ if (video != QMediaFormat::VideoCodec::Unspecified && supportedVideoCodecs.contains(video))
+ videoCodecs.append(video);
+ }
+ if (acceptsRawAudio && fileFormats.size() == 1) {
+ switch (fileFormats.at(0)) {
+ case QMediaFormat::Mpeg4Audio:
+ default:
+ break;
+ case QMediaFormat::MP3:
+ audioCodecs.append(QMediaFormat::AudioCodec::MP3);
+ break;
+ case QMediaFormat::FLAC:
+ audioCodecs.append(QMediaFormat::AudioCodec::FLAC);
+ break;
+ case QMediaFormat::Wave:
+ audioCodecs.append(QMediaFormat::AudioCodec::Wave);
+ break;
+ }
+ }
+ }
+ }
+ if (!audioCodecs.isEmpty() || !videoCodecs.isEmpty()) {
+ for (auto f : std::as_const(fileFormats)) {
+ muxers.append({f, audioCodecs, videoCodecs});
+ if (f == QMediaFormat::MPEG4 && !fileFormats.contains(QMediaFormat::Mpeg4Audio)) {
+ muxers.append({QMediaFormat::Mpeg4Audio, audioCodecs, {}});
+ if (audioCodecs.contains(QMediaFormat::AudioCodec::AAC))
+ muxers.append({QMediaFormat::AAC, { QMediaFormat::AudioCodec::AAC }, {}});
+ } else if (f == QMediaFormat::WMV && !fileFormats.contains(QMediaFormat::WMA)) {
+ muxers.append({QMediaFormat::WMA, audioCodecs, {}});
+ }
+ }
+ }
+ }
+ gst_plugin_feature_list_free(elementList);
+ return muxers;
+}
+
+static QList<QImageCapture::FileFormat> getImageFormatList()
+{
+ QSet<QImageCapture::FileFormat> formats;
+
+ GList *elementList = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_ENCODER,
+ GST_RANK_MARGINAL);
+
+ for (GstElementFactory *factory :
+ QGstUtils::GListRangeAdaptor<GstElementFactory *>(elementList)) {
+
+ for (GstStaticPadTemplate *padTemplate :
+ QGstUtils::GListRangeAdaptor<GstStaticPadTemplate *>(
+ gst_element_factory_get_static_pad_templates(factory))) {
+ if (padTemplate->direction == GST_PAD_SRC) {
+ QGstCaps caps = QGstCaps(gst_static_caps_get(&padTemplate->static_caps), QGstCaps::HasRef);
+
+ for (int i = 0; i < caps.size(); i++) {
+ QGstStructureView structure = caps.at(i);
+ auto f = QGstreamerFormatInfo::imageFormatForCaps(structure);
+ if (f != QImageCapture::UnspecifiedFormat) {
+// qDebug() << structure.toString() << f;
+ formats.insert(f);
+ }
+ }
+ }
+ }
+ }
+ gst_plugin_feature_list_free(elementList);
+ return formats.values();
+}
+
+#if 0
+static void dumpAudioCodecs(const QList<QMediaFormat::AudioCodec> &codecList)
+{
+ qDebug() << "Audio codecs:";
+ for (const auto &c : codecList)
+ qDebug() << " " << QMediaFormat::audioCodecName(c);
+}
+
+static void dumpVideoCodecs(const QList<QMediaFormat::VideoCodec> &codecList)
+{
+ qDebug() << "Video codecs:";
+ for (const auto &c : codecList)
+ qDebug() << " " << QMediaFormat::videoCodecName(c);
+}
+
+static void dumpMuxers(const QList<QPlatformMediaFormatInfo::CodecMap> &muxerList)
+{
+ for (const auto &m : muxerList) {
+ qDebug() << " " << QMediaFormat::fileFormatName(m.format);
+ qDebug() << " Audio";
+ for (const auto &a : m.audio)
+ qDebug() << " " << QMediaFormat::audioCodecName(a);
+ qDebug() << " Video";
+ for (const auto &v : m.video)
+ qDebug() << " " << QMediaFormat::videoCodecName(v);
+ }
+
+}
+#endif
+
+QGstreamerFormatInfo::QGstreamerFormatInfo()
+{
+ auto codecs = getCodecsList(/*decode = */ true);
+ decoders = getMuxerList(true, codecs.first, codecs.second);
+
+ codecs = getCodecsList(/*decode = */ false);
+ encoders = getMuxerList(/* demuxer = */false, codecs.first, codecs.second);
+// dumpAudioCodecs(codecs.first);
+// dumpVideoCodecs(codecs.second);
+// dumpMuxers(encoders);
+
+ imageFormats = getImageFormatList();
+}
+
+QGstreamerFormatInfo::~QGstreamerFormatInfo() = default;
+
+QGstCaps QGstreamerFormatInfo::formatCaps(const QMediaFormat &f) const
+{
+ auto format = f.fileFormat();
+ Q_ASSERT(format != QMediaFormat::UnspecifiedFormat);
+
+ const char *capsForFormat[QMediaFormat::LastFileFormat + 1] = {
+ "video/x-ms-asf", // WMV
+ "video/x-msvideo", // AVI
+ "video/x-matroska", // Matroska
+ "video/quicktime, variant=(string)iso", // MPEG4
+ "video/ogg", // Ogg
+ "video/quicktime", // QuickTime
+ "video/webm", // WebM
+ "video/quicktime, variant=(string)iso", // Mpeg4Audio is the same is mp4...
+ "video/quicktime, variant=(string)iso", // AAC is also an MP4 container
+ "video/x-ms-asf", // WMA, same as WMV
+ "audio/mpeg, mpegversion=(int)1, layer=(int)3", // MP3
+ "audio/x-flac", // FLAC
+ "audio/x-wav" // Wave
+ };
+ return QGstCaps(gst_caps_from_string(capsForFormat[format]), QGstCaps::HasRef);
+}
+
+QGstCaps QGstreamerFormatInfo::audioCaps(const QMediaFormat &f) const
+{
+ auto codec = f.audioCodec();
+ if (codec == QMediaFormat::AudioCodec::Unspecified)
+ return {};
+
+ const char *capsForCodec[(int)QMediaFormat::AudioCodec::LastAudioCodec + 1] = {
+ "audio/mpeg, mpegversion=(int)1, layer=(int)3", // MP3
+ "audio/mpeg, mpegversion=(int)4", // AAC
+ "audio/x-ac3", // AC3
+ "audio/x-eac3", // EAC3
+ "audio/x-flac", // FLAC
+ "audio/x-true-hd", // DolbyTrueHD
+ "audio/x-opus", // Opus
+ "audio/x-vorbis", // Vorbis
+ "audio/x-raw", // WAVE
+ "audio/x-wma", // WMA
+ "audio/x-alac", // ALAC
+ };
+ return QGstCaps(gst_caps_from_string(capsForCodec[(int)codec]), QGstCaps::HasRef);
+}
+
+QGstCaps QGstreamerFormatInfo::videoCaps(const QMediaFormat &f) const
+{
+ auto codec = f.videoCodec();
+ if (codec == QMediaFormat::VideoCodec::Unspecified)
+ return {};
+
+ const char *capsForCodec[(int)QMediaFormat::VideoCodec::LastVideoCodec + 1] = {
+ "video/mpeg, mpegversion=(int)1", // MPEG1,
+ "video/mpeg, mpegversion=(int)2", // MPEG2,
+ "video/mpeg, mpegversion=(int)4", // MPEG4,
+ "video/x-h264", // H264,
+ "video/x-h265", // H265,
+ "video/x-vp8", // VP8,
+ "video/x-vp9", // VP9,
+ "video/x-av1", // AV1,
+ "video/x-theora", // Theora,
+ "audio/x-wmv", // WMV
+ "video/x-jpeg", // MotionJPEG,
+ };
+ return QGstCaps(gst_caps_from_string(capsForCodec[(int)codec]), QGstCaps::HasRef);
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/qgstreamerformatinfo_p.h b/src/plugins/multimedia/gstreamer/qgstreamerformatinfo_p.h
new file mode 100644
index 000000000..bba10edb9
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/qgstreamerformatinfo_p.h
@@ -0,0 +1,44 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERFORMATINFO_H
+#define QGSTREAMERFORMATINFO_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaformatinfo_p.h>
+#include <qlist.h>
+#include <common/qgst_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QGstreamerFormatInfo : public QPlatformMediaFormatInfo
+{
+public:
+ QGstreamerFormatInfo();
+ ~QGstreamerFormatInfo();
+
+ QGstCaps formatCaps(const QMediaFormat &f) const;
+ QGstCaps audioCaps(const QMediaFormat &f) const;
+ QGstCaps videoCaps(const QMediaFormat &f) const;
+
+ static QMediaFormat::AudioCodec audioCodecForCaps(QGstStructureView structure);
+ static QMediaFormat::VideoCodec videoCodecForCaps(QGstStructureView structure);
+ static QMediaFormat::FileFormat fileFormatForCaps(QGstStructureView structure);
+ static QImageCapture::FileFormat imageFormatForCaps(QGstStructureView structure);
+
+ QList<CodecMap> getMuxerList(bool demuxer, QList<QMediaFormat::AudioCodec> audioCodecs, QList<QMediaFormat::VideoCodec> videoCodecs);
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/qgstreamerintegration.cpp b/src/plugins/multimedia/gstreamer/qgstreamerintegration.cpp
new file mode 100644
index 000000000..87c514f2e
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/qgstreamerintegration.cpp
@@ -0,0 +1,242 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <qgstreamerintegration_p.h>
+#include <qgstreamerformatinfo_p.h>
+#include <qgstreamervideodevices_p.h>
+#include <audio/qgstreameraudiodevice_p.h>
+#include <audio/qgstreameraudiodecoder_p.h>
+#include <common/qgstreameraudioinput_p.h>
+#include <common/qgstreameraudiooutput_p.h>
+#include <common/qgstreamermediaplayer_p.h>
+#include <common/qgstreamervideosink_p.h>
+#include <mediacapture/qgstreamercamera_p.h>
+#include <mediacapture/qgstreamerimagecapture_p.h>
+#include <mediacapture/qgstreamermediacapture_p.h>
+#include <mediacapture/qgstreamermediaencoder_p.h>
+
+#include <QtCore/qloggingcategory.h>
+#include <QtMultimedia/private/qmediaplayer_p.h>
+#include <QtMultimedia/private/qmediacapturesession_p.h>
+#include <QtMultimedia/private/qcameradevice_p.h>
+
+QT_BEGIN_NAMESPACE
+
+static thread_local bool inCustomCameraConstruction = false;
+static thread_local QGstElement pendingCameraElement{};
+
+QGStreamerPlatformSpecificInterfaceImplementation::
+ ~QGStreamerPlatformSpecificInterfaceImplementation() = default;
+
+QAudioDevice QGStreamerPlatformSpecificInterfaceImplementation::makeCustomGStreamerAudioInput(
+ const QByteArray &gstreamerPipeline)
+{
+ return qMakeCustomGStreamerAudioInput(gstreamerPipeline);
+}
+
+QAudioDevice QGStreamerPlatformSpecificInterfaceImplementation::makeCustomGStreamerAudioOutput(
+ const QByteArray &gstreamerPipeline)
+{
+ return qMakeCustomGStreamerAudioOutput(gstreamerPipeline);
+}
+
+QCamera *QGStreamerPlatformSpecificInterfaceImplementation::makeCustomGStreamerCamera(
+ const QByteArray &gstreamerPipeline, QObject *parent)
+{
+ QCameraDevicePrivate *info = new QCameraDevicePrivate;
+ info->id = gstreamerPipeline;
+ QCameraDevice device = info->create();
+
+ inCustomCameraConstruction = true;
+ auto guard = qScopeGuard([] {
+ inCustomCameraConstruction = false;
+ });
+
+ return new QCamera(device, parent);
+}
+
+QCamera *
+QGStreamerPlatformSpecificInterfaceImplementation::makeCustomGStreamerCamera(GstElement *element,
+ QObject *parent)
+{
+ QCameraDevicePrivate *info = new QCameraDevicePrivate;
+ info->id = "Custom Camera from GstElement";
+ QCameraDevice device = info->create();
+
+ pendingCameraElement = QGstElement{
+ element,
+ QGstElement::NeedsRef,
+ };
+
+ inCustomCameraConstruction = true;
+ auto guard = qScopeGuard([] {
+ inCustomCameraConstruction = false;
+ Q_ASSERT(!pendingCameraElement);
+ });
+
+ return new QCamera(device, parent);
+}
+
+GstPipeline *QGStreamerPlatformSpecificInterfaceImplementation::gstPipeline(QMediaPlayer *player)
+{
+ auto *priv = reinterpret_cast<QMediaPlayerPrivate *>(QMediaPlayerPrivate::get(player));
+ if (!priv)
+ return nullptr;
+
+ QGstreamerMediaPlayer *gstreamerPlayer = dynamic_cast<QGstreamerMediaPlayer *>(priv->control);
+ return gstreamerPlayer ? gstreamerPlayer->pipeline().pipeline() : nullptr;
+}
+
+GstPipeline *
+QGStreamerPlatformSpecificInterfaceImplementation::gstPipeline(QMediaCaptureSession *session)
+{
+ auto *priv = QMediaCaptureSessionPrivate::get(session);
+ if (!priv)
+ return nullptr;
+
+ QGstreamerMediaCapture *gstreamerCapture =
+ dynamic_cast<QGstreamerMediaCapture *>(priv->captureSession.get());
+ return gstreamerCapture ? gstreamerCapture->pipeline().pipeline() : nullptr;
+}
+
+Q_LOGGING_CATEGORY(lcGstreamer, "qt.multimedia.gstreamer")
+
+namespace {
+
+void rankDownPlugin(GstRegistry *reg, const char *name)
+{
+ QGstPluginFeatureHandle pluginFeature{
+ gst_registry_lookup_feature(reg, name),
+ QGstPluginFeatureHandle::HasRef,
+ };
+ if (pluginFeature)
+ gst_plugin_feature_set_rank(pluginFeature.get(), GST_RANK_PRIMARY - 1);
+}
+
+// https://gstreamer.freedesktop.org/documentation/vaapi/index.html
+constexpr auto vaapiPluginNames = {
+ "vaapidecodebin", "vaapih264dec", "vaapih264enc", "vaapih265dec",
+ "vaapijpegdec", "vaapijpegenc", "vaapimpeg2dec", "vaapipostproc",
+ "vaapisink", "vaapivp8dec", "vaapivp9dec",
+};
+
+// https://gstreamer.freedesktop.org/documentation/va/index.html
+constexpr auto vaPluginNames = {
+ "vaav1dec", "vacompositor", "vadeinterlace", "vah264dec", "vah264enc", "vah265dec",
+ "vajpegdec", "vampeg2dec", "vapostproc", "vavp8dec", "vavp9dec",
+};
+
+// https://gstreamer.freedesktop.org/documentation/nvcodec/index.html
+constexpr auto nvcodecPluginNames = {
+ "cudaconvert", "cudaconvertscale", "cudadownload", "cudaipcsink", "cudaipcsrc",
+ "cudascale", "cudaupload", "nvautogpuh264enc", "nvautogpuh265enc", "nvav1dec",
+ "nvcudah264enc", "nvcudah265enc", "nvd3d11h264enc", "nvd3d11h265enc", "nvh264dec",
+ "nvh264enc", "nvh265dec", "nvh265enc", "nvjpegdec", "nvjpegenc",
+ "nvmpeg2videodec", "nvmpeg4videodec", "nvmpegvideodec", "nvvp8dec", "nvvp9dec",
+};
+
+} // namespace
+
+QGstreamerIntegration::QGstreamerIntegration()
+ : QPlatformMediaIntegration(QLatin1String("gstreamer"))
+{
+ gst_init(nullptr, nullptr);
+ qCDebug(lcGstreamer) << "Using gstreamer version: " << gst_version_string();
+
+ GstRegistry *reg = gst_registry_get();
+
+ if constexpr (!GST_CHECK_VERSION(1, 22, 0)) {
+ GstRegistry* reg = gst_registry_get();
+ for (const char *name : vaapiPluginNames)
+ rankDownPlugin(reg, name);
+ }
+
+ if (qEnvironmentVariableIsSet("QT_GSTREAMER_DISABLE_VA")) {
+ for (const char *name : vaPluginNames)
+ rankDownPlugin(reg, name);
+ }
+
+ if (qEnvironmentVariableIsSet("QT_GSTREAMER_DISABLE_NVCODEC")) {
+ for (const char *name : nvcodecPluginNames)
+ rankDownPlugin(reg, name);
+ }
+}
+
+QPlatformMediaFormatInfo *QGstreamerIntegration::createFormatInfo()
+{
+ return new QGstreamerFormatInfo();
+}
+
+QPlatformVideoDevices *QGstreamerIntegration::createVideoDevices()
+{
+ return new QGstreamerVideoDevices(this);
+}
+
+const QGstreamerFormatInfo *QGstreamerIntegration::gstFormatsInfo()
+{
+ return static_cast<const QGstreamerFormatInfo *>(formatInfo());
+}
+
+QMaybe<QPlatformAudioDecoder *> QGstreamerIntegration::createAudioDecoder(QAudioDecoder *decoder)
+{
+ return QGstreamerAudioDecoder::create(decoder);
+}
+
+QMaybe<QPlatformMediaCaptureSession *> QGstreamerIntegration::createCaptureSession()
+{
+ return QGstreamerMediaCapture::create();
+}
+
+QMaybe<QPlatformMediaPlayer *> QGstreamerIntegration::createPlayer(QMediaPlayer *player)
+{
+ return QGstreamerMediaPlayer::create(player);
+}
+
+QMaybe<QPlatformCamera *> QGstreamerIntegration::createCamera(QCamera *camera)
+{
+ if (inCustomCameraConstruction) {
+ QGstElement element = std::exchange(pendingCameraElement, {});
+ return element ? new QGstreamerCustomCamera{ camera, std::move(element) }
+ : new QGstreamerCustomCamera{ camera };
+ }
+
+ return QGstreamerCamera::create(camera);
+}
+
+QMaybe<QPlatformMediaRecorder *> QGstreamerIntegration::createRecorder(QMediaRecorder *recorder)
+{
+ return new QGstreamerMediaEncoder(recorder);
+}
+
+QMaybe<QPlatformImageCapture *> QGstreamerIntegration::createImageCapture(QImageCapture *imageCapture)
+{
+ return QGstreamerImageCapture::create(imageCapture);
+}
+
+QMaybe<QPlatformVideoSink *> QGstreamerIntegration::createVideoSink(QVideoSink *sink)
+{
+ return new QGstreamerVideoSink(sink);
+}
+
+QMaybe<QPlatformAudioInput *> QGstreamerIntegration::createAudioInput(QAudioInput *q)
+{
+ return QGstreamerAudioInput::create(q);
+}
+
+QMaybe<QPlatformAudioOutput *> QGstreamerIntegration::createAudioOutput(QAudioOutput *q)
+{
+ return QGstreamerAudioOutput::create(q);
+}
+
+GstDevice *QGstreamerIntegration::videoDevice(const QByteArray &id)
+{
+ const auto devices = videoDevices();
+ return devices ? static_cast<QGstreamerVideoDevices *>(devices)->videoDevice(id) : nullptr;
+}
+
+QAbstractPlatformSpecificInterface *QGstreamerIntegration::platformSpecificInterface()
+{
+ return &m_platformSpecificImplementation;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/qgstreamerintegration_p.h b/src/plugins/multimedia/gstreamer/qgstreamerintegration_p.h
new file mode 100644
index 000000000..229bbd48e
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/qgstreamerintegration_p.h
@@ -0,0 +1,79 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERINTEGRATION_H
+#define QGSTREAMERINTEGRATION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/private/qplatformmediaintegration_p.h>
+#include <QtMultimedia/private/qgstreamer_platformspecificinterface_p.h>
+
+#include <gst/gst.h>
+
+QT_BEGIN_NAMESPACE
+
+class QGstreamerFormatInfo;
+
+class QGStreamerPlatformSpecificInterfaceImplementation : public QGStreamerPlatformSpecificInterface
+{
+public:
+ ~QGStreamerPlatformSpecificInterfaceImplementation() override;
+
+ QAudioDevice makeCustomGStreamerAudioInput(const QByteArray &gstreamerPipeline) override;
+ QAudioDevice makeCustomGStreamerAudioOutput(const QByteArray &gstreamerPipeline) override;
+ QCamera *makeCustomGStreamerCamera(const QByteArray &gstreamerPipeline,
+ QObject *parent) override;
+
+ QCamera *makeCustomGStreamerCamera(GstElement *, QObject *parent) override;
+
+ GstPipeline *gstPipeline(QMediaPlayer *) override;
+ GstPipeline *gstPipeline(QMediaCaptureSession *) override;
+};
+
+class QGstreamerIntegration : public QPlatformMediaIntegration
+{
+public:
+ QGstreamerIntegration();
+
+ static QGstreamerIntegration *instance()
+ {
+ return static_cast<QGstreamerIntegration *>(QPlatformMediaIntegration::instance());
+ }
+
+ QMaybe<QPlatformAudioDecoder *> createAudioDecoder(QAudioDecoder *decoder) override;
+ QMaybe<QPlatformMediaCaptureSession *> createCaptureSession() override;
+ QMaybe<QPlatformMediaPlayer *> createPlayer(QMediaPlayer *player) override;
+ QMaybe<QPlatformCamera *> createCamera(QCamera *) override;
+ QMaybe<QPlatformMediaRecorder *> createRecorder(QMediaRecorder *) override;
+ QMaybe<QPlatformImageCapture *> createImageCapture(QImageCapture *) override;
+
+ QMaybe<QPlatformVideoSink *> createVideoSink(QVideoSink *sink) override;
+
+ QMaybe<QPlatformAudioInput *> createAudioInput(QAudioInput *) override;
+ QMaybe<QPlatformAudioOutput *> createAudioOutput(QAudioOutput *) override;
+
+ const QGstreamerFormatInfo *gstFormatsInfo();
+ GstDevice *videoDevice(const QByteArray &id);
+
+ QAbstractPlatformSpecificInterface *platformSpecificInterface() override;
+
+protected:
+ QPlatformMediaFormatInfo *createFormatInfo() override;
+ QPlatformVideoDevices *createVideoDevices() override;
+
+ QGStreamerPlatformSpecificInterfaceImplementation m_platformSpecificImplementation;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/gstreamer/qgstreamerplugin.cpp b/src/plugins/multimedia/gstreamer/qgstreamerplugin.cpp
new file mode 100644
index 000000000..66ad7f712
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/qgstreamerplugin.cpp
@@ -0,0 +1,28 @@
+// Copyright (C) 2024 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtMultimedia/private/qplatformmediaplugin_p.h>
+
+#include <qgstreamerintegration_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QGstreamerMediaPlugin : public QPlatformMediaPlugin
+{
+ Q_OBJECT
+ Q_PLUGIN_METADATA(IID QPlatformMediaPlugin_iid FILE "gstreamer.json")
+
+public:
+ QGstreamerMediaPlugin() = default;
+
+ QPlatformMediaIntegration* create(const QString &name) override
+ {
+ if (name == u"gstreamer")
+ return new QGstreamerIntegration;
+ return nullptr;
+ }
+};
+
+QT_END_NAMESPACE
+
+#include "qgstreamerplugin.moc"
diff --git a/src/plugins/multimedia/gstreamer/qgstreamervideodevices.cpp b/src/plugins/multimedia/gstreamer/qgstreamervideodevices.cpp
new file mode 100644
index 000000000..78ac16eb4
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/qgstreamervideodevices.cpp
@@ -0,0 +1,158 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qgstreamervideodevices_p.h"
+#include <QtMultimedia/qmediadevices.h>
+#include <QtMultimedia/private/qcameradevice_p.h>
+
+#include <common/qgst_p.h>
+#include <common/qgstutils_p.h>
+#include <common/qglist_helper_p.h>
+
+QT_BEGIN_NAMESPACE
+
+static gboolean deviceMonitorCallback(GstBus *, GstMessage *message, gpointer m)
+{
+ auto *manager = static_cast<QGstreamerVideoDevices *>(m);
+ QGstDeviceHandle device;
+
+ switch (GST_MESSAGE_TYPE(message)) {
+ case GST_MESSAGE_DEVICE_ADDED:
+ gst_message_parse_device_added(message, &device);
+ manager->addDevice(std::move(device));
+ break;
+ case GST_MESSAGE_DEVICE_REMOVED:
+ gst_message_parse_device_removed(message, &device);
+ manager->removeDevice(std::move(device));
+ break;
+ default:
+ break;
+ }
+
+ return G_SOURCE_CONTINUE;
+}
+
+QGstreamerVideoDevices::QGstreamerVideoDevices(QPlatformMediaIntegration *integration)
+ : QPlatformVideoDevices(integration),
+ m_deviceMonitor{
+ gst_device_monitor_new(),
+ }
+{
+ gst_device_monitor_add_filter(m_deviceMonitor.get(), "Video/Source", nullptr);
+
+ QGstBusHandle bus{
+ gst_device_monitor_get_bus(m_deviceMonitor.get()),
+ };
+ gst_bus_add_watch(bus.get(), deviceMonitorCallback, this);
+ gst_device_monitor_start(m_deviceMonitor.get());
+
+ GList *devices = gst_device_monitor_get_devices(m_deviceMonitor.get());
+
+ for (GstDevice *device : QGstUtils::GListRangeAdaptor<GstDevice *>(devices)) {
+ addDevice(QGstDeviceHandle{
+ device,
+ QGstDeviceHandle::HasRef,
+ });
+ }
+
+ g_list_free(devices);
+}
+
+QGstreamerVideoDevices::~QGstreamerVideoDevices()
+{
+ gst_device_monitor_stop(m_deviceMonitor.get());
+}
+
+QList<QCameraDevice> QGstreamerVideoDevices::videoDevices() const
+{
+ QList<QCameraDevice> devices;
+
+ for (const auto &device : m_videoSources) {
+ QCameraDevicePrivate *info = new QCameraDevicePrivate;
+
+ QGString desc{
+ gst_device_get_display_name(device.gstDevice.get()),
+ };
+ info->description = desc.toQString();
+ info->id = device.id;
+
+ QUniqueGstStructureHandle properties{
+ gst_device_get_properties(device.gstDevice.get()),
+ };
+ if (properties) {
+ QGstStructureView view{ properties };
+ auto def = view["is-default"].toBool();
+ info->isDefault = def && *def;
+ }
+
+ if (info->isDefault)
+ devices.prepend(info->create());
+ else
+ devices.append(info->create());
+
+ auto caps = QGstCaps(gst_device_get_caps(device.gstDevice.get()), QGstCaps::HasRef);
+ if (!caps.isNull()) {
+ QList<QCameraFormat> formats;
+ QSet<QSize> photoResolutions;
+
+ int size = caps.size();
+ for (int i = 0; i < size; ++i) {
+ auto cap = caps.at(i);
+
+ QSize resolution = cap.resolution();
+ if (!resolution.isValid())
+ continue;
+
+ auto pixelFormat = cap.pixelFormat();
+ auto frameRate = cap.frameRateRange();
+
+ auto *f = new QCameraFormatPrivate{ QSharedData(), pixelFormat, resolution,
+ frameRate.min, frameRate.max };
+ formats << f->create();
+ photoResolutions.insert(resolution);
+ }
+ info->videoFormats = formats;
+ // ### sort resolutions?
+ info->photoResolutions = photoResolutions.values();
+ }
+ }
+ return devices;
+}
+
+void QGstreamerVideoDevices::addDevice(QGstDeviceHandle device)
+{
+ Q_ASSERT(gst_device_has_classes(device.get(), "Video/Source"));
+
+ auto it = std::find_if(m_videoSources.begin(), m_videoSources.end(),
+ [&](const QGstRecordDevice &a) { return a.gstDevice == device; });
+
+ if (it != m_videoSources.end())
+ return;
+
+ m_videoSources.push_back(QGstRecordDevice{
+ std::move(device),
+ QByteArray::number(m_idGenerator),
+ });
+ emit videoInputsChanged();
+ m_idGenerator++;
+}
+
+void QGstreamerVideoDevices::removeDevice(QGstDeviceHandle device)
+{
+ auto it = std::find_if(m_videoSources.begin(), m_videoSources.end(),
+ [&](const QGstRecordDevice &a) { return a.gstDevice == device; });
+
+ if (it != m_videoSources.end()) {
+ m_videoSources.erase(it);
+ emit videoInputsChanged();
+ }
+}
+
+GstDevice *QGstreamerVideoDevices::videoDevice(const QByteArray &id) const
+{
+ auto it = std::find_if(m_videoSources.begin(), m_videoSources.end(),
+ [&](const QGstRecordDevice &a) { return a.id == id; });
+ return it != m_videoSources.end() ? it->gstDevice.get() : nullptr;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/gstreamer/qgstreamervideodevices_p.h b/src/plugins/multimedia/gstreamer/qgstreamervideodevices_p.h
new file mode 100644
index 000000000..a321ae66b
--- /dev/null
+++ b/src/plugins/multimedia/gstreamer/qgstreamervideodevices_p.h
@@ -0,0 +1,54 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QGSTREAMERMEDIADEVICES_H
+#define QGSTREAMERMEDIADEVICES_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformvideodevices_p.h>
+#include <gst/gst.h>
+#include <qaudiodevice.h>
+#include <vector>
+
+#include <common/qgst_handle_types_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QGstreamerVideoDevices : public QPlatformVideoDevices
+{
+public:
+ explicit QGstreamerVideoDevices(QPlatformMediaIntegration *integration);
+ ~QGstreamerVideoDevices();
+
+ QList<QCameraDevice> videoDevices() const override;
+ GstDevice *videoDevice(const QByteArray &id) const;
+
+ void addDevice(QGstDeviceHandle);
+ void removeDevice(QGstDeviceHandle);
+
+private:
+ struct QGstRecordDevice
+ {
+ QGstDeviceHandle gstDevice;
+ QByteArray id;
+ };
+
+ quint64 m_idGenerator = 0;
+ std::vector<QGstRecordDevice> m_videoSources;
+
+ QGstDeviceMonitorHandle m_deviceMonitor;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/qnx/CMakeLists.txt b/src/plugins/multimedia/qnx/CMakeLists.txt
new file mode 100644
index 000000000..e1ac0ffa3
--- /dev/null
+++ b/src/plugins/multimedia/qnx/CMakeLists.txt
@@ -0,0 +1,39 @@
+# Copyright (C) 2022 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+
+qt_internal_add_plugin(QQnxMediaPlugin
+ OUTPUT_NAME qnxmediaplugin
+ PLUGIN_TYPE multimedia
+ SOURCES
+ camera/qqnxcamera.cpp camera/qqnxcamera_p.h
+ camera/qqnxplatformcamera.cpp camera/qqnxplatformcamera_p.h
+ camera/qqnxcameraframebuffer.cpp camera/qqnxcameraframebuffer_p.h
+ camera/qqnximagecapture.cpp camera/qqnximagecapture_p.h
+ common/qqnxaudioinput.cpp common/qqnxaudioinput_p.h
+ common/qqnxaudiooutput.cpp common/qqnxaudiooutput_p.h
+ common/qqnxmediaeventthread.cpp common/qqnxmediaeventthread_p.h
+ common/qqnxwindowgrabber.cpp common/qqnxwindowgrabber_p.h
+ capture/qqnxaudiorecorder.cpp capture/qqnxaudiorecorder_p.h
+ capture/qqnxmediacapturesession.cpp capture/qqnxmediacapturesession_p.h
+ capture/qqnxmediarecorder.cpp capture/qqnxmediarecorder_p.h
+ mediaplayer/qqnxmediaplayer.cpp mediaplayer/qqnxmediaplayer_p.h
+ mediaplayer/qqnxmediametadata.cpp mediaplayer/qqnxmediametadata_p.h
+ mediaplayer/qqnxvideosink.cpp mediaplayer/qqnxvideosink_p.h
+ mediaplayer/qqnxmediautil.cpp mediaplayer/qqnxmediautil_p.h
+ qqnxformatinfo.cpp qqnxformatinfo_p.h
+ qqnxmediaintegration.cpp qqnxmediaintegration_p.h
+ qqnxvideodevices.cpp qqnxvideodevices_p.h
+ INCLUDE_DIRECTORIES
+ audio
+ camera
+ capture
+ common
+ mediaplayer
+ LIBRARIES
+ Qt::MultimediaPrivate
+ Qt::CorePrivate
+ MMRenderer::MMRenderer
+ strm
+ camapi
+)
diff --git a/src/plugins/multimedia/qnx/camera/qqnxcamera.cpp b/src/plugins/multimedia/qnx/camera/qqnxcamera.cpp
new file mode 100644
index 000000000..6976221bd
--- /dev/null
+++ b/src/plugins/multimedia/qnx/camera/qqnxcamera.cpp
@@ -0,0 +1,820 @@
+// Copyright (C) 2016 Research In Motion
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qqnxcamera_p.h"
+#include "qqnxcameraframebuffer_p.h"
+#include "qqnxmediacapturesession_p.h"
+#include "qqnxvideosink_p.h"
+
+#include <qcameradevice.h>
+#include <qmediadevices.h>
+
+#include <private/qmediastoragelocation_p.h>
+
+QDebug &operator<<(QDebug &d, const QQnxCamera::VideoFormat &f)
+{
+ d << "VideoFormat - width=" << f.width
+ << "height=" << f.height
+ << "rotation=" << f.rotation
+ << "frameRate=" << f.frameRate
+ << "frameType=" << f.frameType;
+
+ return d;
+}
+
+static QString statusToString(camera_devstatus_t status)
+{
+ switch (status) {
+ case CAMERA_STATUS_DISCONNECTED:
+ return QStringLiteral("No user is connected to the camera");
+ case CAMERA_STATUS_POWERDOWN:
+ return QStringLiteral("Power down");
+ case CAMERA_STATUS_VIDEOVF:
+ return QStringLiteral("The video viewfinder has started");
+ case CAMERA_STATUS_CAPTURE_ABORTED:
+ return QStringLiteral("The capture of a still image failed and was aborted");
+ case CAMERA_STATUS_FILESIZE_WARNING:
+ return QStringLiteral("Time-remaining threshold has been exceeded");
+ case CAMERA_STATUS_FOCUS_CHANGE:
+ return QStringLiteral("The focus has changed on the camera");
+ case CAMERA_STATUS_RESOURCENOTAVAIL:
+ return QStringLiteral("The camera is about to free resources");
+ case CAMERA_STATUS_VIEWFINDER_ERROR:
+ return QStringLiteral(" An unexpected error was encountered while the "
+ "viewfinder was active");
+ case CAMERA_STATUS_MM_ERROR:
+ return QStringLiteral("The recording has stopped due to a memory error or multimedia "
+ "framework error");
+ case CAMERA_STATUS_FILESIZE_ERROR:
+ return QStringLiteral("A file has exceeded the maximum size.");
+ case CAMERA_STATUS_NOSPACE_ERROR:
+ return QStringLiteral("Not enough disk space");
+ case CAMERA_STATUS_BUFFER_UNDERFLOW:
+ return QStringLiteral("The viewfinder is out of buffers");
+ default:
+ break;
+ }
+
+ return {};
+}
+
+QT_BEGIN_NAMESPACE
+
+QQnxCamera::QQnxCamera(camera_unit_t unit, QObject *parent)
+ : QObject(parent)
+ , m_cameraUnit(unit)
+{
+ if (!m_handle.open(m_cameraUnit, CAMERA_MODE_RW))
+ qWarning("QQnxCamera: Failed to open camera (0x%x)", m_handle.lastError());
+
+ if (camera_set_vf_mode(m_handle.get(), CAMERA_VFMODE_VIDEO) != CAMERA_EOK) {
+ qWarning("QQnxCamera: unable to configure viewfinder mode");
+ return;
+ }
+
+ if (camera_set_vf_property(m_handle.get(), CAMERA_IMGPROP_CREATEWINDOW, 0,
+ CAMERA_IMGPROP_RENDERTOWINDOW, 0) != CAMERA_EOK) {
+ qWarning("QQnxCamera: failed to set camera properties");
+ return;
+ }
+
+ updateZoomLimits();
+ updateSupportedWhiteBalanceValues();
+
+ m_valid = true;
+}
+
+QQnxCamera::~QQnxCamera()
+{
+ stop();
+}
+
+camera_unit_t QQnxCamera::unit() const
+{
+ return m_cameraUnit;
+}
+
+QString QQnxCamera::name() const
+{
+ char name[CAMERA_LOCATION_NAMELEN];
+
+ if (camera_get_location_property(m_cameraUnit,
+ CAMERA_LOCATION_NAME, &name, CAMERA_LOCATION_END) != CAMERA_EOK) {
+ qWarning("QQnxCamera: unable to obtain camera name");
+ return {};
+ }
+
+ return QString::fromUtf8(name);
+}
+
+bool QQnxCamera::isValid() const
+{
+ return m_valid;
+}
+
+bool QQnxCamera::isActive() const
+{
+ return m_handle.isOpen() && m_viewfinderActive;
+}
+
+void QQnxCamera::start()
+{
+ if (isActive())
+ return;
+
+ if (camera_start_viewfinder(m_handle.get(), viewfinderCallback,
+ statusCallback, this) != CAMERA_EOK) {
+ qWarning("QQnxCamera: unable to start viewfinder");
+ return;
+ }
+
+ m_viewfinderActive = true;
+}
+
+void QQnxCamera::stop()
+{
+ if (!isActive())
+ return;
+
+ if (m_recordingVideo)
+ stopVideoRecording();
+
+ if (camera_stop_viewfinder(m_handle.get()) != CAMERA_EOK)
+ qWarning("QQnxCamera: Failed to stop camera");
+
+ m_viewfinderActive = false;
+}
+
+bool QQnxCamera::setCameraFormat(uint32_t width, uint32_t height, double frameRate)
+{
+ if (!m_handle.isOpen())
+ return false;
+
+ const camera_error_t error = camera_set_vf_property(m_handle.get(),
+ CAMERA_IMGPROP_WIDTH, width,
+ CAMERA_IMGPROP_HEIGHT, height,
+ CAMERA_IMGPROP_FRAMERATE, frameRate);
+
+ if (error != CAMERA_EOK) {
+ qWarning("QQnxCamera: failed to set camera format");
+ return false;
+ }
+
+ return true;
+}
+
+bool QQnxCamera::isFocusModeSupported(camera_focusmode_t mode) const
+{
+ return supportedFocusModes().contains(mode);
+}
+
+bool QQnxCamera::setFocusMode(camera_focusmode_t mode)
+{
+ if (!isActive())
+ return false;
+
+ const camera_error_t result = camera_set_focus_mode(m_handle.get(), mode);
+
+ if (result != CAMERA_EOK) {
+ qWarning("QQnxCamera: Unable to set focus mode (0x%x)", result);
+ return false;
+ }
+
+ focusModeChanged(mode);
+
+ return true;
+}
+
+camera_focusmode_t QQnxCamera::focusMode() const
+{
+ if (!isActive())
+ return CAMERA_FOCUSMODE_OFF;
+
+ camera_focusmode_t mode;
+
+ const camera_error_t result = camera_get_focus_mode(m_handle.get(), &mode);
+
+ if (result != CAMERA_EOK) {
+ qWarning("QQnxCamera: Unable to set focus mode (0x%x)", result);
+ return CAMERA_FOCUSMODE_OFF;
+ }
+
+ return mode;
+}
+
+QQnxCamera::VideoFormat QQnxCamera::vfFormat() const
+{
+ VideoFormat f = {};
+
+ if (camera_get_vf_property(m_handle.get(),
+ CAMERA_IMGPROP_WIDTH, &f.width,
+ CAMERA_IMGPROP_HEIGHT, &f.height,
+ CAMERA_IMGPROP_ROTATION, &f.rotation,
+ CAMERA_IMGPROP_FRAMERATE, &f.frameRate,
+ CAMERA_IMGPROP_FORMAT, &f.frameType) != CAMERA_EOK) {
+ qWarning("QQnxCamera: Failed to query video finder frameType");
+ }
+
+ return f;
+}
+
+void QQnxCamera::setVfFormat(const VideoFormat &f)
+{
+ const bool active = isActive();
+
+ if (active)
+ stop();
+
+ if (camera_set_vf_property(m_handle.get(),
+ CAMERA_IMGPROP_WIDTH, f.width,
+ CAMERA_IMGPROP_HEIGHT, f.height,
+ CAMERA_IMGPROP_ROTATION, f.rotation,
+ CAMERA_IMGPROP_FRAMERATE, f.frameRate,
+ CAMERA_IMGPROP_FORMAT, f.frameType) != CAMERA_EOK) {
+ qWarning("QQnxCamera: Failed to set video finder frameType");
+ }
+
+ if (active)
+ start();
+}
+
+QQnxCamera::VideoFormat QQnxCamera::recordingFormat() const
+{
+ VideoFormat f = {};
+
+ if (camera_get_video_property(m_handle.get(),
+ CAMERA_IMGPROP_WIDTH, &f.width,
+ CAMERA_IMGPROP_HEIGHT, &f.height,
+ CAMERA_IMGPROP_ROTATION, &f.rotation,
+ CAMERA_IMGPROP_FRAMERATE, &f.frameRate,
+ CAMERA_IMGPROP_FORMAT, &f.frameType) != CAMERA_EOK) {
+ qWarning("QQnxCamera: Failed to query recording frameType");
+ }
+
+ return f;
+}
+
+void QQnxCamera::setRecordingFormat(const VideoFormat &f)
+{
+ if (camera_set_video_property(m_handle.get(),
+ CAMERA_IMGPROP_WIDTH, f.width,
+ CAMERA_IMGPROP_HEIGHT, f.height,
+ CAMERA_IMGPROP_ROTATION, f.rotation,
+ CAMERA_IMGPROP_FRAMERATE, f.frameRate,
+ CAMERA_IMGPROP_FORMAT, f.frameType) != CAMERA_EOK) {
+ qWarning("QQnxCamera: Failed to set recording frameType");
+ }
+}
+
+void QQnxCamera::setCustomFocusPoint(const QPointF &point)
+{
+ const QSize vfSize = viewFinderSize();
+
+ if (vfSize.isEmpty())
+ return;
+
+ const auto toUint32 = [](double value) {
+ return static_cast<uint32_t>(std::max(0.0, value));
+ };
+
+ // define a 40x40 pixel focus region around the custom focus point
+ constexpr int pixelSize = 40;
+
+ const auto left = toUint32(point.x() * vfSize.width() - pixelSize / 2);
+ const auto top = toUint32(point.y() * vfSize.height() - pixelSize / 2);
+
+ camera_region_t focusRegion {
+ .left = left,
+ .top = top,
+ .width = pixelSize,
+ .height = pixelSize,
+ .extra = 0
+ };
+
+ if (camera_set_focus_regions(m_handle.get(), 1, &focusRegion) != CAMERA_EOK) {
+ qWarning("QQnxCamera: Unable to set focus region");
+ return;
+ }
+
+ if (setFocusMode(focusMode()))
+ customFocusPointChanged(point);
+}
+
+void QQnxCamera::setManualFocusStep(int step)
+{
+ if (!isActive()) {
+ qWarning("QQnxCamera: Failed to set focus distance - view finder not active");
+ return;
+ }
+
+ if (!isFocusModeSupported(CAMERA_FOCUSMODE_MANUAL)) {
+ qWarning("QQnxCamera: Failed to set focus distance - manual focus mode not supported");
+ return;
+ }
+
+ if (camera_set_manual_focus_step(m_handle.get(), step) != CAMERA_EOK)
+ qWarning("QQnxCamera: Failed to set focus distance");
+}
+
+int QQnxCamera::manualFocusStep() const
+{
+ return focusStep().step;
+}
+
+int QQnxCamera::maxFocusStep() const
+{
+ return focusStep().maxStep;
+}
+
+QQnxCamera::FocusStep QQnxCamera::focusStep() const
+{
+ constexpr FocusStep invalidStep { -1, -1 };
+
+ if (!isActive()) {
+ qWarning("QQnxCamera: Failed to query max focus distance - view finder not active");
+ return invalidStep;
+ }
+
+ if (!isFocusModeSupported(CAMERA_FOCUSMODE_MANUAL)) {
+ qWarning("QQnxCamera: Failed to query max focus distance - "
+ "manual focus mode not supported");
+ return invalidStep;
+ }
+
+ FocusStep focusStep;
+
+ if (camera_get_manual_focus_step(m_handle.get(),
+ &focusStep.maxStep, &focusStep.step) != CAMERA_EOK) {
+ qWarning("QQnxCamera: Unable to query camera focus step");
+ return invalidStep;
+ }
+
+ return focusStep;
+}
+
+
+QSize QQnxCamera::viewFinderSize() const
+{
+ // get the size of the viewfinder
+ int width = 0;
+ int height = 0;
+
+ if (camera_get_vf_property(m_handle.get(),
+ CAMERA_IMGPROP_WIDTH, width,
+ CAMERA_IMGPROP_HEIGHT, height) != CAMERA_EOK) {
+ qWarning("QQnxCamera: failed to query view finder size");
+ return {};
+ }
+
+ return { width, height };
+}
+
+uint32_t QQnxCamera::minimumZoomLevel() const
+{
+ return m_minZoom;
+}
+
+uint32_t QQnxCamera::maximumZoomLevel() const
+{
+ return m_maxZoom;
+}
+
+bool QQnxCamera::isSmoothZoom() const
+{
+ return m_smoothZoom;
+}
+
+double QQnxCamera::zoomRatio(uint32_t zoomLevel) const
+{
+ double ratio;
+
+ if (camera_get_zoom_ratio_from_zoom_level(m_handle.get(), zoomLevel, &ratio) != CAMERA_EOK) {
+ qWarning("QQnxCamera: failed to query zoom ratio from zoom level");
+ return 0.0;
+ }
+
+ return ratio;
+}
+
+bool QQnxCamera::setZoomFactor(uint32_t factor)
+{
+ if (camera_set_vf_property(m_handle.get(), CAMERA_IMGPROP_ZOOMFACTOR, factor) != CAMERA_EOK) {
+ qWarning("QQnxCamera: failed to set zoom factor");
+ return false;
+ }
+
+ return true;
+}
+
+void QQnxCamera::setEvOffset(float ev)
+{
+ if (!isActive())
+ return;
+
+ if (camera_set_ev_offset(m_handle.get(), ev) != CAMERA_EOK)
+ qWarning("QQnxCamera: Failed to set up exposure compensation");
+}
+
+uint32_t QQnxCamera::manualIsoSensitivity() const
+{
+ if (!isActive())
+ return 0;
+
+ uint32_t isoValue;
+
+ if (camera_get_manual_iso(m_handle.get(), &isoValue) != CAMERA_EOK) {
+ qWarning("QQnxCamera: Failed to query ISO value");
+ return 0;
+ }
+
+ return isoValue;
+}
+
+void QQnxCamera::setManualIsoSensitivity(uint32_t value)
+{
+ if (!isActive())
+ return;
+
+ if (camera_set_manual_iso(m_handle.get(), value) != CAMERA_EOK)
+ qWarning("QQnxCamera: Failed to set ISO value");
+}
+
+void QQnxCamera::setManualExposureTime(double seconds)
+{
+ if (!isActive())
+ return;
+
+ if (camera_set_manual_shutter_speed(m_handle.get(), seconds) != CAMERA_EOK)
+ qWarning("QQnxCamera: Failed to set exposure time");
+}
+
+double QQnxCamera::manualExposureTime() const
+{
+ if (!isActive())
+ return 0.0;
+
+ double shutterSpeed;
+
+ if (camera_get_manual_shutter_speed(m_handle.get(), &shutterSpeed) != CAMERA_EOK) {
+ qWarning("QQnxCamera: Failed to get exposure time");
+ return 0.0;
+ }
+
+ return shutterSpeed;
+}
+
+bool QQnxCamera::hasFeature(camera_feature_t feature) const
+{
+ return camera_has_feature(m_handle.get(), feature);
+}
+
+void QQnxCamera::setWhiteBalanceMode(camera_whitebalancemode_t mode)
+{
+ if (!isActive())
+ return;
+
+ if (camera_set_whitebalance_mode(m_handle.get(), mode) != CAMERA_EOK)
+ qWarning("QQnxCamera: failed to set whitebalance mode");
+}
+
+camera_whitebalancemode_t QQnxCamera::whiteBalanceMode() const
+{
+ if (!isActive())
+ return CAMERA_WHITEBALANCEMODE_OFF;
+
+ camera_whitebalancemode_t mode;
+
+ if (camera_get_whitebalance_mode(m_handle.get(), &mode) != CAMERA_EOK) {
+ qWarning("QQnxCamera: failed to get white balance mode");
+ return CAMERA_WHITEBALANCEMODE_OFF;
+ }
+
+ return mode;
+}
+
+void QQnxCamera::setManualWhiteBalance(uint32_t value)
+{
+ if (!isActive())
+ return;
+
+ if (camera_set_manual_white_balance(m_handle.get(), value) != CAMERA_EOK)
+ qWarning("QQnxCamera: failed to set manual white balance");
+}
+
+uint32_t QQnxCamera::manualWhiteBalance() const
+{
+ if (!isActive())
+ return 0;
+
+ uint32_t value;
+
+ if (camera_get_manual_white_balance(m_handle.get(), &value) != CAMERA_EOK) {
+ qWarning("QQnxCamera: failed to get manual white balance");
+ return 0;
+ }
+
+ return value;
+}
+
+bool QQnxCamera::startVideoRecording(const QString &filename)
+{
+ // when preview is video, we must ensure that the recording properties
+ // match the view finder properties
+ if (hasFeature(CAMERA_FEATURE_PREVIEWISVIDEO)) {
+ VideoFormat newFormat = vfFormat();
+
+ const QList<camera_frametype_t> recordingTypes = supportedRecordingFrameTypes();
+
+ // find a suitable matching frame type in case the current view finder
+ // frametype is not supported
+ if (newFormat.frameType != recordingFormat().frameType
+ && !recordingTypes.contains(newFormat.frameType)) {
+
+ bool found = false;
+
+ for (const camera_frametype_t type : supportedVfFrameTypes()) {
+ if (recordingTypes.contains(type)) {
+ newFormat.frameType = type;
+ found = true;
+ break;
+ }
+ }
+
+ if (found) {
+ m_originalVfFormat = vfFormat();
+
+ // reconfigure and restart the view finder
+ setVfFormat(newFormat);
+ } else {
+ qWarning("QQnxCamera: failed to find suitable frame type for recording - aborting");
+ return false;
+ }
+ }
+
+ setRecordingFormat(newFormat);
+ }
+
+ if (camera_start_video(m_handle.get(), qPrintable(filename),
+ nullptr, nullptr, nullptr) == CAMERA_EOK) {
+ m_recordingVideo = true;
+ } else {
+ qWarning("QQnxCamera: failed to start video encoding");
+ }
+
+ return m_recordingVideo;
+}
+
+void QQnxCamera::stopVideoRecording()
+{
+ m_recordingVideo = false;
+
+ if (camera_stop_video(m_handle.get()) != CAMERA_EOK)
+ qWarning("QQnxCamera: error when stopping video recording");
+
+ // restore original vf format
+ if (m_originalVfFormat) {
+ setVfFormat(*m_originalVfFormat);
+ m_originalVfFormat.reset();
+ }
+}
+
+bool QQnxCamera::isVideoEncodingSupported() const
+{
+ if (!isActive())
+ return false;
+
+ return camera_has_feature(m_handle.get(), CAMERA_FEATURE_VIDEO);
+}
+
+camera_handle_t QQnxCamera::handle() const
+{
+ return m_handle.get();
+}
+
+void QQnxCamera::updateZoomLimits()
+{
+ bool smooth;
+
+ if (camera_get_zoom_limits(m_handle.get(), &m_minZoom, &m_maxZoom, &smooth) != CAMERA_EOK) {
+ qWarning("QQnxCamera: failed to update zoom limits - using default values");
+ m_minZoom = m_maxZoom = 0;
+ }
+}
+
+void QQnxCamera::updateSupportedWhiteBalanceValues()
+{
+ uint32_t numSupported = 0;
+
+ const camera_error_t result = camera_get_supported_manual_white_balance_values(
+ m_handle.get(), 0, &numSupported, nullptr, &m_continuousWhiteBalanceValues);
+
+ if (result != CAMERA_EOK) {
+ if (result == CAMERA_EOPNOTSUPP)
+ qWarning("QQnxCamera: white balance not supported");
+ else
+ qWarning("QQnxCamera: unable to query manual white balance value count");
+
+ m_supportedWhiteBalanceValues.clear();
+
+ return;
+ }
+
+ m_supportedWhiteBalanceValues.resize(numSupported);
+
+ if (camera_get_supported_manual_white_balance_values(m_handle.get(),
+ m_supportedWhiteBalanceValues.size(),
+ &numSupported,
+ m_supportedWhiteBalanceValues.data(),
+ &m_continuousWhiteBalanceValues) != CAMERA_EOK) {
+ qWarning("QQnxCamera: unable to query manual white balance values");
+
+ m_supportedWhiteBalanceValues.clear();
+ }
+}
+
+QList<camera_vfmode_t> QQnxCamera::supportedVfModes() const
+{
+ return queryValues(camera_get_supported_vf_modes);
+}
+
+QList<camera_res_t> QQnxCamera::supportedVfResolutions() const
+{
+ return queryValues(camera_get_supported_vf_resolutions);
+}
+
+QList<camera_frametype_t> QQnxCamera::supportedVfFrameTypes() const
+{
+ return queryValues(camera_get_supported_vf_frame_types);
+}
+
+QList<camera_focusmode_t> QQnxCamera::supportedFocusModes() const
+{
+ return queryValues(camera_get_focus_modes);
+}
+
+QList<double> QQnxCamera::specifiedVfFrameRates(camera_frametype_t frameType,
+ camera_res_t resolution) const
+{
+ uint32_t numSupported = 0;
+
+ if (camera_get_specified_vf_framerates(m_handle.get(), frameType, resolution,
+ 0, &numSupported, nullptr, nullptr) != CAMERA_EOK) {
+ qWarning("QQnxCamera: unable to query specified framerates count");
+ return {};
+ }
+
+ QList<double> values(numSupported);
+
+ if (camera_get_specified_vf_framerates(m_handle.get(), frameType, resolution,
+ values.size(), &numSupported, values.data(), nullptr) != CAMERA_EOK) {
+ qWarning("QQnxCamera: unable to query specified framerates values");
+ return {};
+ }
+
+ return values;
+}
+
+QList<camera_frametype_t> QQnxCamera::supportedRecordingFrameTypes() const
+{
+ return queryValues(camera_get_video_frame_types);
+}
+
+QList<uint32_t> QQnxCamera::supportedWhiteBalanceValues() const
+{
+ return m_supportedWhiteBalanceValues;
+}
+
+bool QQnxCamera::hasContinuousWhiteBalanceValues() const
+{
+ return m_continuousWhiteBalanceValues;
+}
+
+QList<camera_unit_t> QQnxCamera::supportedUnits()
+{
+ unsigned int numSupported = 0;
+
+ if (camera_get_supported_cameras(0, &numSupported, nullptr) != CAMERA_EOK) {
+ qWarning("QQnxCamera: failed to query supported camera unit count");
+ return {};
+ }
+
+ QList<camera_unit_t> cameraUnits(numSupported);
+
+ if (camera_get_supported_cameras(cameraUnits.size(), &numSupported,
+ cameraUnits.data()) != CAMERA_EOK) {
+ qWarning("QQnxCamera: failed to enumerate supported camera units");
+ return {};
+ }
+
+ return cameraUnits;
+}
+
+template <typename T, typename U>
+QList<T> QQnxCamera::queryValues(QueryFuncPtr<T,U> func) const
+{
+ static_assert(std::is_integral_v<U>, "Parameter U must be of integral type");
+
+ U numSupported = 0;
+
+ if (func(m_handle.get(), 0, &numSupported, nullptr) != CAMERA_EOK) {
+ qWarning("QQnxCamera: unable to query camera value count");
+ return {};
+ }
+
+ QList<T> values(numSupported);
+
+ if (func(m_handle.get(), values.size(), &numSupported, values.data()) != CAMERA_EOK) {
+ qWarning("QQnxCamera: unable to query camera values");
+ return {};
+ }
+
+ return values;
+}
+
+void QQnxCamera::handleVfBuffer(camera_buffer_t *buffer)
+{
+ // process the frame on this thread before locking the mutex
+ auto frame = std::make_unique<QQnxCameraFrameBuffer>(buffer);
+
+ // skip a frame if mutex is busy
+ if (m_currentFrameMutex.tryLock()) {
+ m_currentFrame = std::move(frame);
+ m_currentFrameMutex.unlock();
+
+ Q_EMIT frameAvailable();
+ }
+}
+
+void QQnxCamera::handleVfStatus(camera_devstatus_t status, uint16_t extraData)
+{
+ QMetaObject::invokeMethod(this, "handleStatusChange", Qt::QueuedConnection,
+ Q_ARG(camera_devstatus_t, status),
+ Q_ARG(uint16_t, extraData));
+}
+
+void QQnxCamera::handleStatusChange(camera_devstatus_t status, uint16_t extraData)
+{
+ Q_UNUSED(extraData);
+
+ switch (status) {
+ case CAMERA_STATUS_BUFFER_UNDERFLOW:
+ case CAMERA_STATUS_CAPTURECOMPLETE:
+ case CAMERA_STATUS_CAPTURE_ABORTED:
+ case CAMERA_STATUS_CONNECTED:
+ case CAMERA_STATUS_DISCONNECTED:
+ case CAMERA_STATUS_FILESIZE_ERROR:
+ case CAMERA_STATUS_FILESIZE_LIMIT_WARNING:
+ case CAMERA_STATUS_FILESIZE_WARNING:
+ case CAMERA_STATUS_FLASH_LEVEL_CHANGE:
+ case CAMERA_STATUS_FOCUS_CHANGE:
+ case CAMERA_STATUS_FRAME_DROPPED:
+ case CAMERA_STATUS_LOWLIGHT:
+ case CAMERA_STATUS_MM_ERROR:
+ case CAMERA_STATUS_NOSPACE_ERROR:
+ case CAMERA_STATUS_PHOTOVF:
+ case CAMERA_STATUS_POWERDOWN:
+ case CAMERA_STATUS_POWERUP:
+ case CAMERA_STATUS_RESOURCENOTAVAIL:
+ case CAMERA_STATUS_UNKNOWN:
+ case CAMERA_STATUS_VIDEOLIGHT_CHANGE:
+ case CAMERA_STATUS_VIDEOLIGHT_LEVEL_CHANGE:
+ case CAMERA_STATUS_VIDEOVF:
+ case CAMERA_STATUS_VIDEO_PAUSE:
+ case CAMERA_STATUS_VIDEO_RESUME:
+ case CAMERA_STATUS_VIEWFINDER_ACTIVE:
+ case CAMERA_STATUS_VIEWFINDER_ERROR:
+ case CAMERA_STATUS_VIEWFINDER_FREEZE:
+ case CAMERA_STATUS_VIEWFINDER_SUSPEND:
+ case CAMERA_STATUS_VIEWFINDER_UNFREEZE:
+ case CAMERA_STATUS_VIEWFINDER_UNSUSPEND:
+ qDebug() << "QQnxCamera:" << ::statusToString(status);
+ break;
+ }
+}
+
+std::unique_ptr<QQnxCameraFrameBuffer> QQnxCamera::takeCurrentFrame()
+{
+ QMutexLocker l(&m_currentFrameMutex);
+
+ return std::move(m_currentFrame);
+}
+
+void QQnxCamera::viewfinderCallback(camera_handle_t handle, camera_buffer_t *buffer, void *arg)
+{
+ Q_UNUSED(handle);
+
+ auto *camera = static_cast<QQnxCamera*>(arg);
+ camera->handleVfBuffer(buffer);
+}
+
+void QQnxCamera::statusCallback(camera_handle_t handle, camera_devstatus_t status,
+ uint16_t extraData, void *arg)
+{
+ Q_UNUSED(handle);
+
+ auto *camera = static_cast<QQnxCamera*>(arg);
+ camera->handleVfStatus(status, extraData);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qqnxcamera_p.cpp"
diff --git a/src/plugins/multimedia/qnx/camera/qqnxcamera_p.h b/src/plugins/multimedia/qnx/camera/qqnxcamera_p.h
new file mode 100644
index 000000000..a4ddbfed6
--- /dev/null
+++ b/src/plugins/multimedia/qnx/camera/qqnxcamera_p.h
@@ -0,0 +1,201 @@
+// Copyright (C) 2016 Research In Motion
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QQNXCAMERA_H
+#define QQNXCAMERA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qqnxcamerahandle_p.h"
+
+#include <QtCore/qlist.h>
+#include <QtCore/qmutex.h>
+#include <QtCore/qobject.h>
+#include <QtCore/qurl.h>
+
+#include <camera/camera_api.h>
+#include <camera/camera_3a.h>
+
+#include <memory>
+#include <optional>
+
+QT_BEGIN_NAMESPACE
+
+class QQnxCameraFrameBuffer;
+class QQnxMediaCaptureSession;
+class QQnxVideoSink;
+
+class QQnxCamera : public QObject
+{
+ Q_OBJECT
+public:
+ explicit QQnxCamera(camera_unit_t unit, QObject *parent = nullptr);
+ ~QQnxCamera();
+
+ camera_unit_t unit() const;
+
+ QString name() const;
+
+ bool isValid() const;
+
+ bool isActive() const;
+ void start();
+ void stop();
+
+ bool startVideoRecording(const QString &filename);
+ void stopVideoRecording();
+
+ bool setCameraFormat(uint32_t width, uint32_t height, double frameRate);
+
+ bool isFocusModeSupported(camera_focusmode_t mode) const;
+ bool setFocusMode(camera_focusmode_t mode);
+ camera_focusmode_t focusMode() const;
+
+ void setCustomFocusPoint(const QPointF &point);
+
+ void setManualFocusStep(int step);
+ int manualFocusStep() const;
+ int maxFocusStep() const;
+
+ QSize viewFinderSize() const;
+
+ uint32_t minimumZoomLevel() const;
+ uint32_t maximumZoomLevel() const;
+ bool isSmoothZoom() const;
+ double zoomRatio(uint32_t zoomLevel) const;
+ bool setZoomFactor(uint32_t factor);
+
+ void setEvOffset(float ev);
+
+ uint32_t manualIsoSensitivity() const;
+ void setManualIsoSensitivity(uint32_t value);
+ void setManualExposureTime(double seconds);
+ double manualExposureTime() const;
+
+ void setWhiteBalanceMode(camera_whitebalancemode_t mode);
+ camera_whitebalancemode_t whiteBalanceMode() const;
+
+ void setManualWhiteBalance(uint32_t value);
+ uint32_t manualWhiteBalance() const;
+
+ bool hasFeature(camera_feature_t feature) const;
+
+ camera_handle_t handle() const;
+
+ QList<camera_vfmode_t> supportedVfModes() const;
+ QList<camera_res_t> supportedVfResolutions() const;
+ QList<camera_frametype_t> supportedVfFrameTypes() const;
+ QList<camera_focusmode_t> supportedFocusModes() const;
+ QList<double> specifiedVfFrameRates(camera_frametype_t frameType,
+ camera_res_t resolution) const;
+
+ QList<camera_frametype_t> supportedRecordingFrameTypes() const;
+
+ QList<uint32_t> supportedWhiteBalanceValues() const;
+
+ bool hasContinuousWhiteBalanceValues() const;
+
+ static QList<camera_unit_t> supportedUnits();
+
+ std::unique_ptr<QQnxCameraFrameBuffer> takeCurrentFrame();
+
+Q_SIGNALS:
+ void focusModeChanged(camera_focusmode_t mode);
+ void customFocusPointChanged(const QPointF &point);
+ void minimumZoomFactorChanged(double factor);
+
+ double maximumZoomFactorChanged(double factor);
+
+ void frameAvailable();
+
+private:
+ struct FocusStep
+ {
+ int step; // current step
+ int maxStep; // max supported step
+ };
+
+ FocusStep focusStep() const;
+
+ struct VideoFormat
+ {
+ uint32_t width;
+ uint32_t height;
+ uint32_t rotation;
+ double frameRate;
+ camera_frametype_t frameType;
+ };
+
+ friend QDebug &operator<<(QDebug&, const VideoFormat&);
+
+ VideoFormat vfFormat() const;
+ void setVfFormat(const VideoFormat &format);
+
+ VideoFormat recordingFormat() const;
+ void setRecordingFormat(const VideoFormat &format);
+
+ void updateZoomLimits();
+ void updateSupportedWhiteBalanceValues();
+ void setColorTemperatureInternal(unsigned temp);
+
+ bool isVideoEncodingSupported() const;
+
+ void handleVfBuffer(camera_buffer_t *buffer);
+
+ // viewfinder callback
+ void handleVfStatus(camera_devstatus_t status, uint16_t extraData);
+
+ // our handler running on main thread
+ Q_INVOKABLE void handleStatusChange(camera_devstatus_t status, uint16_t extraData);
+
+ template <typename T, typename U>
+ using QueryFuncPtr = camera_error_t (*)(camera_handle_t, U, U *, T *);
+
+ template <typename T, typename U>
+ QList<T> queryValues(QueryFuncPtr<T, U> func) const;
+
+ static void viewfinderCallback(camera_handle_t handle,
+ camera_buffer_t *buffer, void *arg);
+
+ static void statusCallback(camera_handle_t handle, camera_devstatus_t status,
+ uint16_t extraData, void *arg);
+
+ QQnxMediaCaptureSession *m_session = nullptr;
+
+ camera_unit_t m_cameraUnit = CAMERA_UNIT_NONE;
+
+ QQnxCameraHandle m_handle;
+
+ uint32_t m_minZoom = 0;
+ uint32_t m_maxZoom = 0;
+
+ QMutex m_currentFrameMutex;
+
+ QList<uint32_t> m_supportedWhiteBalanceValues;
+
+ std::unique_ptr<QQnxCameraFrameBuffer> m_currentFrame;
+
+ std::optional<VideoFormat> m_originalVfFormat;
+
+ bool m_viewfinderActive = false;
+ bool m_recordingVideo = false;
+ bool m_valid = false;
+ bool m_smoothZoom = false;
+ bool m_continuousWhiteBalanceValues = false;
+};
+
+QT_END_NAMESPACE
+
+Q_DECLARE_METATYPE(camera_devstatus_t)
+Q_DECLARE_METATYPE(uint16_t)
+
+#endif
diff --git a/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer.cpp b/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer.cpp
new file mode 100644
index 000000000..6595c5d42
--- /dev/null
+++ b/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer.cpp
@@ -0,0 +1,299 @@
+// Copyright (C) 2022 The Qt Company
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qqnxcameraframebuffer_p.h"
+
+#include <limits>
+
+template <typename T>
+static constexpr int toInt(T value)
+{
+ if constexpr (sizeof(T) >= sizeof(int)) {
+ if (std::is_signed_v<T>) {
+ return static_cast<int>(std::clamp<T>(value,
+ std::numeric_limits<int>::min(), std::numeric_limits<int>::max()));
+ } else {
+ return static_cast<int>(std::min<T>(value, std::numeric_limits<int>::max()));
+ }
+ } else {
+ return static_cast<int>(value);
+ }
+}
+
+template <typename T>
+static constexpr QSize frameSize(const T &frame)
+{
+ return { toInt(frame.width), toInt(frame.height) };
+}
+
+static constexpr QVideoFrameFormat::PixelFormat frameTypeToPixelFormat(camera_frametype_t type)
+{
+ switch (type) {
+ case CAMERA_FRAMETYPE_NV12:
+ return QVideoFrameFormat::Format_NV12;
+ case CAMERA_FRAMETYPE_RGB8888:
+ return QVideoFrameFormat::Format_ARGB8888;
+ case CAMERA_FRAMETYPE_GRAY8:
+ return QVideoFrameFormat::Format_Y8;
+ case CAMERA_FRAMETYPE_CBYCRY:
+ return QVideoFrameFormat::Format_UYVY;
+ case CAMERA_FRAMETYPE_YCBCR420P:
+ return QVideoFrameFormat::Format_YUV420P;
+ case CAMERA_FRAMETYPE_YCBYCR:
+ return QVideoFrameFormat::Format_YUYV;
+ default:
+ break;
+ }
+
+ return QVideoFrameFormat::Format_Invalid;
+}
+
+static constexpr size_t bufferDataSize(const camera_frame_nv12_t &frame)
+{
+ return frame.uv_offset + frame.uv_stride * frame.height / 2;
+}
+
+static constexpr size_t bufferDataSize(const camera_frame_rgb8888_t &frame)
+{
+ return frame.stride * frame.height;
+}
+
+static constexpr size_t bufferDataSize(const camera_frame_gray8_t &frame)
+{
+ return frame.stride * frame.height;
+}
+
+static constexpr size_t bufferDataSize(const camera_frame_cbycry_t &frame)
+{
+ return frame.bufsize;
+}
+
+static constexpr size_t bufferDataSize(const camera_frame_ycbcr420p_t &frame)
+{
+ return frame.cr_offset + frame.cr_stride * frame.height / 2;
+}
+
+static constexpr size_t bufferDataSize(const camera_frame_ycbycr_t &frame)
+{
+ return frame.stride * frame.height;
+}
+
+static constexpr size_t bufferDataSize(const camera_buffer_t *buffer)
+{
+ switch (buffer->frametype) {
+ case CAMERA_FRAMETYPE_NV12:
+ return bufferDataSize(buffer->framedesc.nv12);
+ case CAMERA_FRAMETYPE_RGB8888:
+ return bufferDataSize(buffer->framedesc.rgb8888);
+ case CAMERA_FRAMETYPE_GRAY8:
+ return bufferDataSize(buffer->framedesc.gray8);
+ case CAMERA_FRAMETYPE_CBYCRY:
+ return bufferDataSize(buffer->framedesc.cbycry);
+ case CAMERA_FRAMETYPE_YCBCR420P:
+ return bufferDataSize(buffer->framedesc.ycbcr420p);
+ case CAMERA_FRAMETYPE_YCBYCR:
+ return bufferDataSize(buffer->framedesc.ycbycr);
+ default:
+ break;
+ }
+
+ return 0;
+}
+
+static QAbstractVideoBuffer::MapData mapData(const camera_frame_nv12_t &frame,
+ unsigned char *baseAddress)
+{
+
+ return {
+ .planeCount = 2,
+ .bytesPerLine = {
+ toInt(frame.stride),
+ toInt(frame.uv_stride)
+ },
+ .data = {
+ baseAddress,
+ baseAddress + frame.uv_offset
+ },
+ .dataSize = {
+ toInt(frame.stride * frame.height),
+ toInt(frame.uv_stride * frame.height / 2)
+ }
+ };
+}
+
+static QAbstractVideoBuffer::MapData mapData(const camera_frame_rgb8888_t &frame,
+ unsigned char *baseAddress)
+{
+ return {
+ .planeCount = 1,
+ .bytesPerLine = {
+ toInt(frame.stride)
+ },
+ .data = {
+ baseAddress
+ },
+ .dataSize = {
+ toInt(frame.stride * frame.height),
+ }
+ };
+}
+
+static QAbstractVideoBuffer::MapData mapData(const camera_frame_gray8_t &frame,
+ unsigned char *baseAddress)
+{
+ return {
+ .planeCount = 1,
+ .bytesPerLine = {
+ toInt(frame.stride)
+ },
+ .data = {
+ baseAddress
+ },
+ .dataSize = {
+ toInt(frame.stride * frame.height)
+ }
+ };
+}
+
+static QAbstractVideoBuffer::MapData mapData(const camera_frame_cbycry_t &frame,
+ unsigned char *baseAddress)
+{
+ return {
+ .planeCount = 1,
+ .bytesPerLine = {
+ toInt(frame.stride)
+ },
+ .data = {
+ baseAddress
+ },
+ .dataSize = {
+ toInt(frame.bufsize),
+ }
+ };
+}
+
+static QAbstractVideoBuffer::MapData mapData(const camera_frame_ycbcr420p_t &frame,
+ unsigned char *baseAddress)
+{
+ return {
+ .planeCount = 3,
+ .bytesPerLine = {
+ toInt(frame.y_stride),
+ frame.cb_stride,
+ frame.cr_stride,
+ },
+ .data = {
+ baseAddress,
+ baseAddress + frame.cb_offset,
+ baseAddress + frame.cr_offset,
+ },
+ .dataSize = {
+ toInt(frame.y_stride * frame.height),
+ toInt(frame.cb_stride * frame.height / 2),
+ toInt(frame.cr_stride * frame.height / 2)
+ }
+ };
+}
+
+static QAbstractVideoBuffer::MapData mapData(const camera_frame_ycbycr_t &frame,
+ unsigned char *baseAddress)
+{
+ return {
+ .planeCount = 1,
+ .bytesPerLine = {
+ toInt(frame.stride)
+ },
+ .data = {
+ baseAddress
+ },
+ .dataSize = {
+ toInt(frame.stride * frame.height)
+ }
+ };
+}
+
+static QAbstractVideoBuffer::MapData mapData(const camera_buffer_t *buffer,
+ unsigned char *baseAddress)
+{
+ switch (buffer->frametype) {
+ case CAMERA_FRAMETYPE_NV12:
+ return mapData(buffer->framedesc.nv12, baseAddress);
+ case CAMERA_FRAMETYPE_RGB8888:
+ return mapData(buffer->framedesc.rgb8888, baseAddress);
+ case CAMERA_FRAMETYPE_GRAY8:
+ return mapData(buffer->framedesc.gray8, baseAddress);
+ case CAMERA_FRAMETYPE_CBYCRY:
+ return mapData(buffer->framedesc.cbycry, baseAddress);
+ case CAMERA_FRAMETYPE_YCBCR420P:
+ return mapData(buffer->framedesc.ycbcr420p, baseAddress);
+ case CAMERA_FRAMETYPE_YCBYCR:
+ return mapData(buffer->framedesc.ycbycr, baseAddress);
+ default:
+ break;
+ }
+
+ return {};
+}
+
+static constexpr QSize frameSize(const camera_buffer_t *buffer)
+{
+ switch (buffer->frametype) {
+ case CAMERA_FRAMETYPE_NV12:
+ return frameSize(buffer->framedesc.nv12);
+ case CAMERA_FRAMETYPE_RGB8888:
+ return frameSize(buffer->framedesc.rgb8888);
+ case CAMERA_FRAMETYPE_GRAY8:
+ return frameSize(buffer->framedesc.gray8);
+ case CAMERA_FRAMETYPE_CBYCRY:
+ return frameSize(buffer->framedesc.cbycry);
+ case CAMERA_FRAMETYPE_YCBCR420P:
+ return frameSize(buffer->framedesc.ycbcr420p);
+ case CAMERA_FRAMETYPE_YCBYCR:
+ return frameSize(buffer->framedesc.ycbycr);
+ default:
+ break;
+ }
+
+ return {};
+}
+
+QT_BEGIN_NAMESPACE
+
+QQnxCameraFrameBuffer::QQnxCameraFrameBuffer(const camera_buffer_t *buffer, QRhi *rhi)
+ : QHwVideoBuffer(rhi ? QVideoFrame::RhiTextureHandle : QVideoFrame::NoHandle, rhi),
+ m_rhi(rhi),
+ m_pixelFormat(::frameTypeToPixelFormat(buffer->frametype)),
+ m_dataSize(::bufferDataSize(buffer))
+{
+ if (m_dataSize <= 0)
+ return;
+
+ m_data = std::make_unique<unsigned char[]>(m_dataSize);
+
+ memcpy(m_data.get(), buffer->framebuf, m_dataSize);
+
+ m_mapData = ::mapData(buffer, m_data.get());
+
+ m_frameSize = ::frameSize(buffer);
+}
+
+QAbstractVideoBuffer::MapData QQnxCameraFrameBuffer::map(QtVideo::MapMode)
+{
+ return m_mapData;
+}
+
+void QQnxCameraFrameBuffer::unmap()
+{
+}
+
+QVideoFrameFormat::PixelFormat QQnxCameraFrameBuffer::pixelFormat() const
+{
+ return m_pixelFormat;
+}
+
+QSize QQnxCameraFrameBuffer::size() const
+{
+ return m_frameSize;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer_p.h b/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer_p.h
new file mode 100644
index 000000000..20f724552
--- /dev/null
+++ b/src/plugins/multimedia/qnx/camera/qqnxcameraframebuffer_p.h
@@ -0,0 +1,60 @@
+// Copyright (C) 2022 The Qt Company
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QQNXCAMERAFRAMEBUFFER_H
+#define QQNXCAMERAFRAMEBUFFER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qhwvideobuffer_p.h>
+
+#include <QtCore/qsize.h>
+
+#include <camera/camera_api.h>
+
+#include <memory>
+
+QT_BEGIN_NAMESPACE
+
+class QRhi;
+
+class QQnxCameraFrameBuffer : public QHwVideoBuffer
+{
+public:
+ explicit QQnxCameraFrameBuffer(const camera_buffer_t *buffer, QRhi *rhi = nullptr);
+
+ QQnxCameraFrameBuffer(const QQnxCameraFrameBuffer&) = delete;
+ QQnxCameraFrameBuffer& operator=(const QQnxCameraFrameBuffer&) = delete;
+
+ MapData map(QtVideo::MapMode mode) override;
+ void unmap() override;
+
+ QVideoFrameFormat::PixelFormat pixelFormat() const;
+
+ QSize size() const;
+
+private:
+ QRhi *m_rhi;
+
+ QVideoFrameFormat::PixelFormat m_pixelFormat;
+
+ std::unique_ptr<unsigned char[]> m_data;
+
+ size_t m_dataSize;
+
+ MapData m_mapData;
+
+ QSize m_frameSize;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/qnx/camera/qqnxcamerahandle_p.h b/src/plugins/multimedia/qnx/camera/qqnxcamerahandle_p.h
new file mode 100644
index 000000000..3d7863dc2
--- /dev/null
+++ b/src/plugins/multimedia/qnx/camera/qqnxcamerahandle_p.h
@@ -0,0 +1,102 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QQNXCAMERAHANDLE_P_H
+#define QQNXCAMERAHANDLE_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <camera/camera_api.h>
+
+#include <utility>
+
+class QQnxCameraHandle
+{
+public:
+ QQnxCameraHandle() = default;
+
+ explicit QQnxCameraHandle(camera_handle_t h)
+ : m_handle (h) {}
+
+ explicit QQnxCameraHandle(QQnxCameraHandle &&other)
+ : m_handle(other.m_handle)
+ , m_lastError(other.m_lastError)
+ {
+ other = QQnxCameraHandle();
+ }
+
+ QQnxCameraHandle(const QQnxCameraHandle&) = delete;
+
+ QQnxCameraHandle& operator=(QQnxCameraHandle&& other)
+ {
+ m_handle = other.m_handle;
+ m_lastError = other.m_lastError;
+
+ other = QQnxCameraHandle();
+
+ return *this;
+ }
+
+ ~QQnxCameraHandle()
+ {
+ close();
+ }
+
+ bool open(camera_unit_t unit, uint32_t mode)
+ {
+ if (isOpen()) {
+ m_lastError = CAMERA_EALREADY;
+ return false;
+ }
+
+ return cacheError(camera_open, unit, mode, &m_handle);
+ }
+
+ bool close()
+ {
+ if (!isOpen())
+ return true;
+
+ const bool success = cacheError(camera_close, m_handle);
+ m_handle = CAMERA_HANDLE_INVALID;
+
+ return success;
+ }
+
+ camera_handle_t get() const
+ {
+ return m_handle;
+ }
+
+ bool isOpen() const
+ {
+ return m_handle != CAMERA_HANDLE_INVALID;
+ }
+
+ camera_error_t lastError() const
+ {
+ return m_lastError;
+ }
+
+private:
+ template <typename Func, typename ...Args>
+ bool cacheError(Func f, Args &&...args)
+ {
+ m_lastError = f(std::forward<Args>(args)...);
+
+ return m_lastError == CAMERA_EOK;
+ }
+
+ camera_handle_t m_handle = CAMERA_HANDLE_INVALID;
+ camera_error_t m_lastError = CAMERA_EOK;
+};
+
+#endif
diff --git a/src/plugins/multimedia/qnx/camera/qqnximagecapture.cpp b/src/plugins/multimedia/qnx/camera/qqnximagecapture.cpp
new file mode 100644
index 000000000..3983dddbb
--- /dev/null
+++ b/src/plugins/multimedia/qnx/camera/qqnximagecapture.cpp
@@ -0,0 +1,257 @@
+// Copyright (C) 2016 Research In Motion
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qqnximagecapture_p.h"
+
+#include "qqnxplatformcamera_p.h"
+#include "qqnxmediacapturesession_p.h"
+#include "qqnxcamera_p.h"
+#include "qfile.h"
+
+#include <private/qmediastoragelocation_p.h>
+
+#include <QtCore/qfileinfo.h>
+#include <QtCore/qfuture.h>
+#include <QtCore/qpromise.h>
+#include <QtCore/qthread.h>
+
+#include <camera/camera_api.h>
+
+using namespace Qt::Literals::StringLiterals;
+
+static QString formatExtension(QImageCapture::FileFormat format)
+{
+ switch (format) {
+ case QImageCapture::JPEG:
+ return u"jpg"_s;
+ case QImageCapture::PNG:
+ return u"png"_s;
+ case QImageCapture::WebP:
+ case QImageCapture::Tiff:
+ case QImageCapture::UnspecifiedFormat:
+ break;
+ }
+
+ return {};
+}
+
+static QString resolveFileName(const QString &fileName, QImageCapture::FileFormat format)
+{
+ const QString extension = formatExtension(format);
+
+ if (extension.isEmpty())
+ return {};
+
+ if (fileName.isEmpty()) {
+ return QMediaStorageLocation::generateFileName(QString(),
+ QStandardPaths::PicturesLocation, extension);
+ }
+
+ if (fileName.endsWith(extension))
+ return QFileInfo(fileName).canonicalFilePath();
+
+ QString path = fileName;
+ path.append(u".%1"_s.arg(extension));
+
+ return QFileInfo(path).canonicalFilePath();
+}
+
+QT_BEGIN_NAMESPACE
+
+QQnxImageCapture::QQnxImageCapture(QImageCapture *parent)
+ : QPlatformImageCapture(parent)
+{
+}
+
+bool QQnxImageCapture::isReadyForCapture() const
+{
+ return m_camera && m_camera->isActive();
+}
+
+int QQnxImageCapture::capture(const QString &fileName)
+{
+ if (!isReadyForCapture()) {
+ Q_EMIT error(-1, QImageCapture::NotReadyError, QPlatformImageCapture::msgCameraNotReady());
+ return -1;
+ }
+
+ // default to PNG format if no format has been specified
+ const QImageCapture::FileFormat format =
+ m_settings.format() == QImageCapture::UnspecifiedFormat
+ ? QImageCapture::PNG : m_settings.format();
+
+ const QString resolvedFileName = resolveFileName(fileName, format);
+
+ if (resolvedFileName.isEmpty()) {
+ const QString errorMessage = (u"Invalid file format: %1"_s).arg(
+ QImageCapture::fileFormatName(format));
+
+ Q_EMIT error(-1, QImageCapture::NotSupportedFeatureError, errorMessage);
+ return -1;
+ }
+
+ const int id = m_lastId++;
+
+ auto callback = [this, id, fn=std::move(resolvedFileName)](const QVideoFrame &frame) {
+ saveFrame(id, frame, fn);
+ };
+
+ m_camera->requestVideoFrame(std::move(callback));
+
+ return id;
+}
+
+int QQnxImageCapture::captureToBuffer()
+{
+ if (!isReadyForCapture()) {
+ Q_EMIT error(-1, QImageCapture::NotReadyError, QPlatformImageCapture::msgCameraNotReady());
+ return -1;
+ }
+
+ const int id = m_lastId++;
+
+ auto callback = [this, id](const QVideoFrame &frame) { decodeFrame(id, frame); };
+
+ m_camera->requestVideoFrame(std::move(callback));
+
+ return id;
+}
+
+QFuture<QImage> QQnxImageCapture::decodeFrame(int id, const QVideoFrame &frame)
+{
+ if (!frame.isValid()) {
+ Q_EMIT error(id, QImageCapture::NotReadyError, u"Invalid frame"_s);
+ return {};
+ }
+
+ QPromise<QImage> promise;
+ QFuture<QImage> future = promise.future();
+
+ // converting a QVideoFrame to QImage is an expensive operation
+ // run it on a background thread to prevent it from stalling the UI
+ auto runner = [frame, promise=std::move(promise)]() mutable {
+ promise.start();
+ promise.addResult(frame.toImage());
+ promise.finish();
+ };
+
+ auto *worker = QThread::create(std::move(runner));
+
+ auto onFinished = [this, worker, id, future]() mutable {
+ worker->deleteLater();
+
+ if (future.isValid()) {
+ Q_EMIT imageCaptured(id, future.result());
+ } else {
+ qWarning("QQnxImageCapture: failed to capture image to buffer");
+ }
+ };
+
+ connect(worker, &QThread::finished, this, std::move(onFinished));
+
+ Q_EMIT imageExposed(id);
+
+ worker->start();
+
+ return future;
+}
+
+void QQnxImageCapture::saveFrame(int id, const QVideoFrame &frame, const QString &fileName)
+{
+ QFuture<QImage> decodeFuture = decodeFrame(id, frame);
+
+ if (decodeFuture.isCanceled())
+ return;
+
+ QPromise<bool> promise;
+ QFuture<bool> saveFuture = promise.future();
+
+ // writing a QImage to disk is a _very_ expensive operation
+ // run it on a background thread to prevent it from stalling the UI
+ auto runner = [future=std::move(decodeFuture),
+ promise=std::move(promise), fileName]() mutable {
+ promise.start();
+ promise.addResult(future.result().save(fileName));
+ promise.finish();
+ };
+
+ auto *worker = QThread::create(std::move(runner));
+
+ auto onFinished = [this, worker, id, future=std::move(saveFuture), fn=std::move(fileName)]() {
+ worker->deleteLater();
+
+ if (future.isValid() && future.result())
+ Q_EMIT imageSaved(id, fn);
+ else
+ Q_EMIT error(id, QImageCapture::NotSupportedFeatureError, u"Failed to save image"_s);
+ };
+
+ connect(worker, &QThread::finished, this, std::move(onFinished));
+
+ worker->start();
+}
+
+QImageEncoderSettings QQnxImageCapture::imageSettings() const
+{
+ return m_settings;
+}
+
+void QQnxImageCapture::setImageSettings(const QImageEncoderSettings &settings)
+{
+ m_settings = settings;
+}
+
+void QQnxImageCapture::setCaptureSession(QQnxMediaCaptureSession *captureSession)
+{
+ if (m_session == captureSession)
+ return;
+
+ if (m_session)
+ m_session->disconnect(this);
+
+ m_session = captureSession;
+
+ if (m_session) {
+ connect(m_session, &QQnxMediaCaptureSession::cameraChanged,
+ this, &QQnxImageCapture::onCameraChanged);
+ }
+
+ onCameraChanged();
+}
+
+void QQnxImageCapture::onCameraChanged()
+{
+ if (m_camera)
+ m_camera->disconnect(this);
+
+ m_camera = m_session ? static_cast<QQnxPlatformCamera*>(m_session->camera()) : nullptr;
+
+ if (m_camera) {
+ connect(m_camera, &QQnxPlatformCamera::activeChanged,
+ this, &QQnxImageCapture::onCameraChanged);
+ }
+
+ updateReadyForCapture();
+}
+
+void QQnxImageCapture::onCameraActiveChanged(bool active)
+{
+ Q_UNUSED(active);
+
+ updateReadyForCapture();
+}
+
+void QQnxImageCapture::updateReadyForCapture()
+{
+ const bool readyForCapture = isReadyForCapture();
+
+ if (m_lastReadyForCapture == readyForCapture)
+ return;
+
+ m_lastReadyForCapture = readyForCapture;
+
+ Q_EMIT readyForCaptureChanged(m_lastReadyForCapture);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qqnximagecapture_p.cpp"
diff --git a/src/plugins/multimedia/qnx/camera/qqnximagecapture_p.h b/src/plugins/multimedia/qnx/camera/qqnximagecapture_p.h
new file mode 100644
index 000000000..832039654
--- /dev/null
+++ b/src/plugins/multimedia/qnx/camera/qqnximagecapture_p.h
@@ -0,0 +1,63 @@
+// Copyright (C) 2016 Research In Motion
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QQnxImageCapture_H
+#define QQnxImageCapture_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformimagecapture_p.h>
+
+#include <QtCore/qfuture.h>
+
+QT_BEGIN_NAMESPACE
+
+class QQnxMediaCaptureSession;
+class QQnxPlatformCamera;
+
+class QThread;
+
+class QQnxImageCapture : public QPlatformImageCapture
+{
+ Q_OBJECT
+public:
+ explicit QQnxImageCapture(QImageCapture *parent);
+
+ bool isReadyForCapture() const override;
+
+ int capture(const QString &fileName) override;
+ int captureToBuffer() override;
+
+ QImageEncoderSettings imageSettings() const override;
+ void setImageSettings(const QImageEncoderSettings &settings) override;
+
+ void setCaptureSession(QQnxMediaCaptureSession *session);
+
+private:
+ QFuture<QImage> decodeFrame(int id, const QVideoFrame &frame);
+ void saveFrame(int id, const QVideoFrame &frame, const QString &fileName);
+
+ void onCameraChanged();
+ void onCameraActiveChanged(bool active);
+ void updateReadyForCapture();
+
+ QQnxMediaCaptureSession *m_session = nullptr;
+ QQnxPlatformCamera *m_camera = nullptr;
+
+ int m_lastId = 0;
+ QImageEncoderSettings m_settings;
+
+ bool m_lastReadyForCapture = false;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/qnx/camera/qqnxplatformcamera.cpp b/src/plugins/multimedia/qnx/camera/qqnxplatformcamera.cpp
new file mode 100644
index 000000000..b604f4561
--- /dev/null
+++ b/src/plugins/multimedia/qnx/camera/qqnxplatformcamera.cpp
@@ -0,0 +1,426 @@
+// Copyright (C) 2016 Research In Motion
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#undef QT_NO_CONTEXTLESS_CONNECT // Remove after porting connect() calls
+
+#include "qqnxplatformcamera_p.h"
+#include "qqnxcameraframebuffer_p.h"
+#include "qqnxmediacapturesession_p.h"
+#include "qqnxvideosink_p.h"
+
+#include <qcameradevice.h>
+#include <qmediadevices.h>
+
+#include <private/qmediastoragelocation_p.h>
+#include <private/qvideoframe_p.h>
+
+#include <camera/camera_api.h>
+#include <camera/camera_3a.h>
+
+#include <algorithm>
+#include <array>
+
+#include <dlfcn.h>
+
+struct FocusModeMapping
+{
+ QCamera::FocusMode qt;
+ camera_focusmode_t qnx;
+};
+
+constexpr std::array<FocusModeMapping, 6> focusModes {{
+ { QCamera::FocusModeAuto, CAMERA_FOCUSMODE_CONTINUOUS_AUTO },
+ { QCamera::FocusModeAutoFar, CAMERA_FOCUSMODE_CONTINUOUS_AUTO },
+ { QCamera::FocusModeInfinity, CAMERA_FOCUSMODE_CONTINUOUS_AUTO },
+ { QCamera::FocusModeAutoNear, CAMERA_FOCUSMODE_CONTINUOUS_MACRO },
+ { QCamera::FocusModeHyperfocal, CAMERA_FOCUSMODE_EDOF },
+ { QCamera::FocusModeManual, CAMERA_FOCUSMODE_MANUAL },
+}};
+
+template <typename Mapping, typename From, typename To, size_t N>
+static constexpr To convert(const std::array<Mapping, N> &mapping,
+ From Mapping::* from, To Mapping::* to, From value, To defaultValue)
+{
+ for (const Mapping &m : mapping) {
+ const auto fromValue = m.*from;
+ const auto toValue = m.*to;
+
+ if (value == fromValue)
+ return toValue;
+ }
+
+ return defaultValue;
+}
+
+static constexpr camera_focusmode_t qnxFocusMode(QCamera::FocusMode mode)
+{
+ return convert(focusModes, &FocusModeMapping::qt,
+ &FocusModeMapping::qnx, mode, CAMERA_FOCUSMODE_CONTINUOUS_AUTO);
+}
+
+static constexpr QCamera::FocusMode qtFocusMode(camera_focusmode_t mode)
+{
+ return convert(focusModes, &FocusModeMapping::qnx,
+ &FocusModeMapping::qt, mode, QCamera::FocusModeAuto);
+}
+
+QT_BEGIN_NAMESPACE
+
+QQnxPlatformCamera::QQnxPlatformCamera(QCamera *parent)
+ : QPlatformCamera(parent)
+{
+ if (parent)
+ setCamera(parent->cameraDevice());
+ else
+ setCamera(QMediaDevices::defaultVideoInput());
+}
+
+QQnxPlatformCamera::~QQnxPlatformCamera()
+{
+ stop();
+}
+
+bool QQnxPlatformCamera::isActive() const
+{
+ return m_qnxCamera && m_qnxCamera->isActive();
+}
+
+void QQnxPlatformCamera::setActive(bool active)
+{
+ if (active)
+ start();
+ else
+ stop();
+}
+
+void QQnxPlatformCamera::start()
+{
+ if (!m_qnxCamera || isActive())
+ return;
+
+ if (m_session)
+ m_videoSink = m_session->videoSink();
+
+ m_qnxCamera->start();
+
+ Q_EMIT activeChanged(true);
+}
+
+void QQnxPlatformCamera::stop()
+{
+ if (!m_qnxCamera)
+ return;
+
+ m_qnxCamera->stop();
+
+ m_videoSink = nullptr;
+
+ Q_EMIT activeChanged(false);
+}
+
+void QQnxPlatformCamera::setCamera(const QCameraDevice &camera)
+{
+ if (m_cameraDevice == camera)
+ return;
+
+ const auto cameraUnit = static_cast<camera_unit_t>(camera.id().toUInt());
+
+ m_qnxCamera = std::make_unique<QQnxCamera>(cameraUnit);
+
+ connect(m_qnxCamera.get(), &QQnxCamera::focusModeChanged,
+ [this](camera_focusmode_t mode) { Q_EMIT focusModeChanged(qtFocusMode(mode)); });
+ connect(m_qnxCamera.get(), &QQnxCamera::customFocusPointChanged,
+ this, &QQnxPlatformCamera::customFocusPointChanged);
+ connect(m_qnxCamera.get(), &QQnxCamera::frameAvailable,
+ this, &QQnxPlatformCamera::onFrameAvailable, Qt::QueuedConnection);
+
+ m_cameraDevice = camera;
+
+ updateCameraFeatures();
+}
+
+bool QQnxPlatformCamera::setCameraFormat(const QCameraFormat &format)
+{
+ const QSize resolution = format.resolution();
+
+ if (resolution.isEmpty()) {
+ qWarning("QQnxPlatformCamera: invalid resolution requested");
+ return false;
+ }
+
+ return m_qnxCamera->setCameraFormat(resolution.width(),
+ resolution.height(), format.maxFrameRate());
+}
+
+void QQnxPlatformCamera::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ if (m_session == session)
+ return;
+
+ m_session = static_cast<QQnxMediaCaptureSession *>(session);
+}
+
+bool QQnxPlatformCamera::isFocusModeSupported(QCamera::FocusMode mode) const
+{
+ if (!m_qnxCamera)
+ return false;
+
+ return m_qnxCamera->supportedFocusModes().contains(::qnxFocusMode(mode));
+}
+
+void QQnxPlatformCamera::setFocusMode(QCamera::FocusMode mode)
+{
+ if (!m_qnxCamera)
+ return;
+
+ m_qnxCamera->setFocusMode(::qnxFocusMode(mode));
+}
+
+void QQnxPlatformCamera::setCustomFocusPoint(const QPointF &point)
+{
+ if (!m_qnxCamera)
+ return;
+
+ m_qnxCamera->setCustomFocusPoint(point);
+}
+
+void QQnxPlatformCamera::setFocusDistance(float distance)
+{
+ if (!m_qnxCamera)
+ return;
+
+ const int maxDistance = m_qnxCamera->maxFocusStep();
+
+ if (maxDistance < 0)
+ return;
+
+ const int qnxDistance = maxDistance * std::min(distance, 1.0f);
+
+ m_qnxCamera->setManualFocusStep(qnxDistance);
+}
+
+void QQnxPlatformCamera::zoomTo(float factor, float)
+{
+ if (!m_qnxCamera)
+ return;
+
+ const uint32_t minZoom = m_qnxCamera->minimumZoomLevel();
+ const uint32_t maxZoom = m_qnxCamera->maximumZoomLevel();
+
+ if (maxZoom <= minZoom)
+ return;
+
+ // QNX has an integer based API. Interpolate between the levels according to the factor we get
+ const float max = maxZoomFactor();
+ const float min = minZoomFactor();
+
+ if (max <= min)
+ return;
+
+ factor = qBound(min, factor, max) - min;
+
+ const uint32_t zoom = minZoom
+ + static_cast<uint32_t>(qRound(factor*(maxZoom - minZoom)/(max - min)));
+
+ if (m_qnxCamera->setZoomFactor(zoom))
+ zoomFactorChanged(factor);
+}
+
+void QQnxPlatformCamera::setExposureCompensation(float ev)
+{
+ if (!m_qnxCamera)
+ return;
+
+ m_qnxCamera->setEvOffset(ev);
+}
+
+int QQnxPlatformCamera::isoSensitivity() const
+{
+ if (!m_qnxCamera)
+ return 0;
+
+ return m_qnxCamera->manualIsoSensitivity();
+}
+
+void QQnxPlatformCamera::setManualIsoSensitivity(int value)
+{
+ if (!m_qnxCamera)
+ return;
+
+ const uint32_t isoValue = std::max(0, value);
+
+ m_qnxCamera->setManualIsoSensitivity(isoValue);
+}
+
+void QQnxPlatformCamera::setManualExposureTime(float seconds)
+{
+ if (!m_qnxCamera)
+ return;
+
+ m_qnxCamera->setManualExposureTime(seconds);
+}
+
+float QQnxPlatformCamera::exposureTime() const
+{
+ if (!m_qnxCamera)
+ return 0.0;
+
+ return static_cast<float>(m_qnxCamera->manualExposureTime());
+}
+
+bool QQnxPlatformCamera::isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const
+{
+ if (m_maxColorTemperature != 0)
+ return true;
+
+ return mode == QCamera::WhiteBalanceAuto;
+}
+
+void QQnxPlatformCamera::setWhiteBalanceMode(QCamera::WhiteBalanceMode mode)
+{
+ if (!m_qnxCamera)
+ return;
+
+ if (mode == QCamera::WhiteBalanceAuto) {
+ m_qnxCamera->setWhiteBalanceMode(CAMERA_WHITEBALANCEMODE_AUTO);
+ } else {
+ m_qnxCamera->setWhiteBalanceMode(CAMERA_WHITEBALANCEMODE_MANUAL);
+ setColorTemperature(colorTemperatureForWhiteBalance(mode));
+ }
+}
+
+void QQnxPlatformCamera::setColorTemperature(int temperature)
+{
+ if (!m_qnxCamera)
+ return;
+
+ const auto normalizedTemp = std::clamp<uint32_t>(std::max(0, temperature),
+ m_minColorTemperature, m_maxColorTemperature);
+
+ if (m_qnxCamera->hasContinuousWhiteBalanceValues()) {
+ m_qnxCamera->setManualWhiteBalance(normalizedTemp);
+ } else {
+ uint32_t delta = std::numeric_limits<uint32_t>::max();
+ uint32_t closestTemp = 0;
+
+ for (uint32_t value : m_qnxCamera->supportedWhiteBalanceValues()) {
+ const auto &[min, max] = std::minmax(value, normalizedTemp);
+ const uint32_t currentDelta = max - min;
+
+ if (currentDelta < delta) {
+ closestTemp = value;
+ delta = currentDelta;
+ }
+ }
+
+ m_qnxCamera->setManualWhiteBalance(closestTemp);
+ }
+}
+
+bool QQnxPlatformCamera::startVideoRecording()
+{
+ if (!m_qnxCamera) {
+ qWarning("QQnxPlatformCamera: cannot start video recording - no no camera assigned");
+ return false;
+ }
+
+ if (!isVideoEncodingSupported()) {
+ qWarning("QQnxPlatformCamera: cannot start video recording - not supported");
+ return false;
+ }
+
+ if (!m_qnxCamera->isActive()) {
+ qWarning("QQnxPlatformCamera: cannot start video recording - camera not started");
+ return false;
+ }
+
+ const QString container = m_encoderSettings.mimeType().preferredSuffix();
+ const QString location = QMediaStorageLocation::generateFileName(m_outputUrl.toLocalFile(),
+ QStandardPaths::MoviesLocation, container);
+
+#if 0
+ {
+ static void *libScreen = nullptr;
+
+ if (!libScreen)
+ libScreen = dlopen("/usr/lib/libscreen.so.1", RTLD_GLOBAL);
+ }
+#endif
+
+ qDebug() << "Recording to" << location;
+ return m_qnxCamera->startVideoRecording(location);
+}
+
+void QQnxPlatformCamera::requestVideoFrame(VideoFrameCallback cb)
+{
+ m_videoFrameRequests.emplace_back(std::move(cb));
+}
+
+bool QQnxPlatformCamera::isVideoEncodingSupported() const
+{
+ return m_qnxCamera && m_qnxCamera->hasFeature(CAMERA_FEATURE_VIDEO);
+}
+
+void QQnxPlatformCamera::setOutputUrl(const QUrl &url)
+{
+ m_outputUrl = url;
+}
+
+void QQnxPlatformCamera::setMediaEncoderSettings(const QMediaEncoderSettings &settings)
+{
+ m_encoderSettings = settings;
+}
+
+void QQnxPlatformCamera::updateCameraFeatures()
+{
+ if (!m_qnxCamera)
+ return;
+
+ QCamera::Features features = {};
+
+ if (m_qnxCamera->hasFeature(CAMERA_FEATURE_REGIONFOCUS))
+ features |= QCamera::Feature::CustomFocusPoint;
+
+ supportedFeaturesChanged(features);
+
+ minimumZoomFactorChanged(m_qnxCamera->minimumZoomLevel());
+ maximumZoomFactorChanged(m_qnxCamera->maximumZoomLevel());
+
+ const QList<uint32_t> wbValues = m_qnxCamera->supportedWhiteBalanceValues();
+
+ if (wbValues.isEmpty()) {
+ m_minColorTemperature = m_maxColorTemperature = 0;
+ } else {
+ const auto &[minTemp, maxTemp] = std::minmax_element(wbValues.begin(), wbValues.end());
+
+ m_minColorTemperature = *minTemp;
+ m_maxColorTemperature = *maxTemp;
+ }
+}
+
+void QQnxPlatformCamera::onFrameAvailable()
+{
+ if (!m_videoSink)
+ return;
+
+ std::unique_ptr<QQnxCameraFrameBuffer> currentFrameBuffer = m_qnxCamera->takeCurrentFrame();
+
+ if (!currentFrameBuffer)
+ return;
+
+ QVideoFrameFormat format(currentFrameBuffer->size(), currentFrameBuffer->pixelFormat());
+ const QVideoFrame actualFrame =
+ QVideoFramePrivate::createFrame(std::move(currentFrameBuffer), std::move(format));
+
+ m_videoSink->setVideoFrame(actualFrame);
+
+ if (!m_videoFrameRequests.empty()) {
+ VideoFrameCallback cb = std::move(m_videoFrameRequests.front());
+ m_videoFrameRequests.pop_front();
+ cb(actualFrame);
+ }
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qqnxplatformcamera_p.cpp"
diff --git a/src/plugins/multimedia/qnx/camera/qqnxplatformcamera_p.h b/src/plugins/multimedia/qnx/camera/qqnxplatformcamera_p.h
new file mode 100644
index 000000000..3cbd17a4f
--- /dev/null
+++ b/src/plugins/multimedia/qnx/camera/qqnxplatformcamera_p.h
@@ -0,0 +1,113 @@
+// Copyright (C) 2016 Research In Motion
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QQNXPLATFORMCAMERA_H
+#define QQNXPLATFORMCAMERA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qqnxcamera_p.h"
+
+#include <private/qplatformcamera_p.h>
+#include <private/qplatformmediarecorder_p.h>
+
+#include <QtCore/qlist.h>
+#include <QtCore/qmutex.h>
+#include <QtCore/qurl.h>
+
+#include <deque>
+#include <functional>
+#include <memory>
+
+QT_BEGIN_NAMESPACE
+
+class QQnxPlatformCameraFrameBuffer;
+class QQnxMediaCaptureSession;
+class QQnxVideoSink;
+class QQnxCameraFrameBuffer;
+
+class QQnxPlatformCamera : public QPlatformCamera
+{
+ Q_OBJECT
+public:
+ explicit QQnxPlatformCamera(QCamera *parent);
+ ~QQnxPlatformCamera();
+
+ bool isActive() const override;
+ void setActive(bool active) override;
+ void start();
+ void stop();
+
+ void setCamera(const QCameraDevice &camera) override;
+
+ bool setCameraFormat(const QCameraFormat &format) override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session) override;
+
+ bool isFocusModeSupported(QCamera::FocusMode mode) const override;
+ void setFocusMode(QCamera::FocusMode mode) override;
+
+ void setCustomFocusPoint(const QPointF &point) override;
+
+ void setFocusDistance(float distance) override;
+
+ void zoomTo(float newZoomFactor, float rate = -1.) override;
+
+ void setExposureCompensation(float ev) override;
+
+ int isoSensitivity() const override;
+ void setManualIsoSensitivity(int value) override;
+ void setManualExposureTime(float seconds) override;
+ float exposureTime() const override;
+
+ bool isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const override;
+ void setWhiteBalanceMode(QCamera::WhiteBalanceMode mode) override;
+ void setColorTemperature(int temperature) override;
+
+ void setOutputUrl(const QUrl &url);
+ void setMediaEncoderSettings(const QMediaEncoderSettings &settings);
+
+ bool startVideoRecording();
+
+ using VideoFrameCallback = std::function<void(const QVideoFrame&)>;
+ void requestVideoFrame(VideoFrameCallback cb);
+
+private:
+ void updateCameraFeatures();
+ void setColorTemperatureInternal(unsigned temp);
+
+ bool isVideoEncodingSupported() const;
+
+ void onFrameAvailable();
+
+ QQnxMediaCaptureSession *m_session = nullptr;
+ QQnxVideoSink *m_videoSink = nullptr;
+
+ QCameraDevice m_cameraDevice;
+
+ QUrl m_outputUrl;
+
+ QMediaEncoderSettings m_encoderSettings;
+
+ uint32_t m_minColorTemperature = 0;
+ uint32_t m_maxColorTemperature = 0;
+
+ QMutex m_currentFrameMutex;
+
+ std::unique_ptr<QQnxCamera> m_qnxCamera;
+ std::unique_ptr<QQnxCameraFrameBuffer> m_currentFrame;
+
+ std::deque<VideoFrameCallback> m_videoFrameRequests;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/qnx/capture/qqnxaudiorecorder.cpp b/src/plugins/multimedia/qnx/capture/qqnxaudiorecorder.cpp
new file mode 100644
index 000000000..00a20bbd7
--- /dev/null
+++ b/src/plugins/multimedia/qnx/capture/qqnxaudiorecorder.cpp
@@ -0,0 +1,284 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qqnxaudiorecorder_p.h"
+#include "qqnxmediaeventthread_p.h"
+
+#include <QtCore/qcoreapplication.h>
+
+#include <private/qmediastoragelocation_p.h>
+
+#include <mm/renderer.h>
+
+#include <sys/stat.h>
+#include <sys/strm.h>
+
+static QByteArray buildDevicePath(const QByteArray &deviceId, const QMediaEncoderSettings &settings)
+{
+ QByteArray devicePath = QByteArrayLiteral("snd:/dev/snd/") + deviceId + QByteArrayLiteral("?");
+
+ if (settings.audioSampleRate() > 0)
+ devicePath += QByteArrayLiteral("frate=") + QByteArray::number(settings.audioSampleRate());
+
+ if (settings.audioChannelCount() > 0)
+ devicePath += QByteArrayLiteral("nchan=") + QByteArray::number(settings.audioChannelCount());
+
+ return devicePath;
+}
+
+QT_BEGIN_NAMESPACE
+
+QQnxAudioRecorder::QQnxAudioRecorder(QObject *parent)
+ : QObject(parent)
+{
+ openConnection();
+}
+
+QQnxAudioRecorder::~QQnxAudioRecorder()
+{
+ stop();
+ closeConnection();
+}
+
+void QQnxAudioRecorder::openConnection()
+{
+ static int idCounter = 0;
+
+ m_connection = ConnectionUniquePtr { mmr_connect(nullptr) };
+
+ if (!m_connection) {
+ qWarning("QQnxAudioRecorder: Unable to connect to the multimedia renderer");
+ return;
+ }
+
+ m_id = idCounter++;
+
+ char contextName[256];
+
+ std::snprintf(contextName, sizeof contextName, "QQnxAudioRecorder_%d_%llu",
+ m_id, QCoreApplication::applicationPid());
+
+ m_context = ContextUniquePtr { mmr_context_create(m_connection.get(),
+ contextName, 0, S_IRWXU|S_IRWXG|S_IRWXO) };
+
+ if (m_context) {
+ startMonitoring();
+ } else {
+ qWarning("QQnxAudioRecorder: Unable to create context");
+ closeConnection();
+ }
+}
+
+void QQnxAudioRecorder::closeConnection()
+{
+ m_context.reset();
+ m_context.reset();
+
+ stopMonitoring();
+}
+
+void QQnxAudioRecorder::attach()
+{
+ if (isAttached())
+ return;
+
+ const QString container = m_encoderSettings.mimeType().preferredSuffix();
+ const QString location = QMediaStorageLocation::generateFileName(m_outputUrl.toLocalFile(),
+ QStandardPaths::MusicLocation, container);
+
+ m_audioId = mmr_output_attach(m_context.get(), qPrintable(location), "file");
+
+ if (m_audioId == -1) {
+ qWarning("QQnxAudioRecorder: mmr_output_attach() for file failed");
+ return;
+ }
+
+ configureOutputBitRate();
+
+ const QByteArray devicePath = buildDevicePath(m_inputDeviceId, m_encoderSettings);
+
+ if (mmr_input_attach(m_context.get(), devicePath.constData(), "track") != 0) {
+ qWarning("QQnxAudioRecorder: mmr_input_attach() failed");
+ detach();
+ } else {
+ Q_EMIT actualLocationChanged(location);
+ }
+}
+
+void QQnxAudioRecorder::detach()
+{
+ if (!isAttached())
+ return;
+
+ mmr_input_detach(m_context.get());
+ mmr_output_detach(m_context.get(), m_audioId);
+
+ m_audioId = -1;
+}
+
+void QQnxAudioRecorder::configureOutputBitRate()
+{
+ const int bitRate = m_encoderSettings.audioBitRate();
+
+ if (!isAttached() || bitRate <= 0)
+ return;
+
+ char buf[12];
+ std::snprintf(buf, sizeof buf, "%d", bitRate);
+
+ strm_dict_t *dict = strm_dict_new();
+ dict = strm_dict_set(dict, "audio_bitrate", buf);
+
+ if (mmr_output_parameters(m_context.get(), m_audioId, dict) != 0)
+ qWarning("mmr_output_parameters: setting bitrate failed");
+}
+
+bool QQnxAudioRecorder::isAttached() const
+{
+ return m_context && m_audioId != -1;
+}
+
+void QQnxAudioRecorder::setInputDeviceId(const QByteArray &id)
+{
+ m_inputDeviceId = id;
+}
+
+void QQnxAudioRecorder::setOutputUrl(const QUrl &url)
+{
+ m_outputUrl = url;
+}
+
+void QQnxAudioRecorder::setMediaEncoderSettings(const QMediaEncoderSettings &settings)
+{
+ m_encoderSettings = settings;
+}
+
+void QQnxAudioRecorder::record()
+{
+ if (!isAttached()) {
+ attach();
+
+ if (!isAttached())
+ return;
+ }
+
+ if (mmr_play(m_context.get()) != 0)
+ qWarning("QQnxAudioRecorder: mmr_play() failed");
+}
+
+void QQnxAudioRecorder::stop()
+{
+ if (!isAttached())
+ return;
+
+ mmr_stop(m_context.get());
+
+ detach();
+}
+
+void QQnxAudioRecorder::startMonitoring()
+{
+ m_eventThread = std::make_unique<QQnxMediaEventThread>(m_context.get());
+
+ connect(m_eventThread.get(), &QQnxMediaEventThread::eventPending,
+ this, &QQnxAudioRecorder::readEvents);
+
+ m_eventThread->setObjectName(QStringLiteral("MmrAudioEventThread-") + QString::number(m_id));
+ m_eventThread->start();
+}
+
+void QQnxAudioRecorder::stopMonitoring()
+{
+ if (m_eventThread)
+ m_eventThread.reset();
+}
+
+void QQnxAudioRecorder::readEvents()
+{
+ while (const mmr_event_t *event = mmr_event_get(m_context.get())) {
+ if (event->type == MMR_EVENT_NONE)
+ break;
+
+ switch (event->type) {
+ case MMR_EVENT_STATUS:
+ handleMmEventStatus(event);
+ break;
+ case MMR_EVENT_STATE:
+ handleMmEventState(event);
+ break;
+ case MMR_EVENT_ERROR:
+ handleMmEventError(event);
+ break;
+ case MMR_EVENT_METADATA:
+ case MMR_EVENT_NONE:
+ case MMR_EVENT_OVERFLOW:
+ case MMR_EVENT_WARNING:
+ case MMR_EVENT_PLAYLIST:
+ case MMR_EVENT_INPUT:
+ case MMR_EVENT_OUTPUT:
+ case MMR_EVENT_CTXTPAR:
+ case MMR_EVENT_TRKPAR:
+ case MMR_EVENT_OTHER:
+ break;
+ }
+ }
+
+ if (m_eventThread)
+ m_eventThread->signalRead();
+}
+
+void QQnxAudioRecorder::handleMmEventStatus(const mmr_event_t *event)
+{
+ if (!event || event->type != MMR_EVENT_STATUS)
+ return;
+
+ if (!event->pos_str)
+ return;
+
+ const QByteArray valueBa(event->pos_str);
+
+ bool ok;
+ const qint64 duration = valueBa.toLongLong(&ok);
+
+ if (!ok)
+ qCritical("Could not parse duration from '%s'", valueBa.constData());
+ else
+ durationChanged(duration);
+}
+
+void QQnxAudioRecorder::handleMmEventState(const mmr_event_t *event)
+{
+ if (!event || event->type != MMR_EVENT_STATE)
+ return;
+
+ switch (event->state) {
+ case MMR_STATE_DESTROYED:
+ case MMR_STATE_IDLE:
+ case MMR_STATE_STOPPED:
+ Q_EMIT stateChanged(QMediaRecorder::StoppedState);
+ break;
+ case MMR_STATE_PLAYING:
+ Q_EMIT stateChanged(QMediaRecorder::RecordingState);
+ break;
+ }
+}
+
+void QQnxAudioRecorder::handleMmEventError(const mmr_event_t *event)
+{
+ if (!event)
+ return;
+
+ // When playback is explicitly stopped using mmr_stop(), mm-renderer
+ // generates a STATE event. When the end of media is reached, an ERROR
+ // event is generated and the error code contained in the event information
+ // is set to MMR_ERROR_NONE. When an error causes playback to stop,
+ // the error code is set to something else.
+ if (event->details.error.info.error_code == MMR_ERROR_NONE) {
+ //TODO add error
+ Q_EMIT stateChanged(QMediaRecorder::StoppedState);
+ detach();
+ }
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qqnxaudiorecorder_p.cpp"
diff --git a/src/plugins/multimedia/qnx/capture/qqnxaudiorecorder_p.h b/src/plugins/multimedia/qnx/capture/qqnxaudiorecorder_p.h
new file mode 100644
index 000000000..f343cee14
--- /dev/null
+++ b/src/plugins/multimedia/qnx/capture/qqnxaudiorecorder_p.h
@@ -0,0 +1,103 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QQNXAUDIORECORDER_H
+#define QQNXAUDIORECORDER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "mmrenderertypes.h"
+
+#include <QByteArray>
+#include <QUrl>
+
+#include <QtCore/qobject.h>
+#include <QtCore/qtconfigmacros.h>
+
+#include <QtMultimedia/qmediarecorder.h>
+
+#include <private/qplatformmediarecorder_p.h>
+
+#include <mm/renderer.h>
+#include <mm/renderer/types.h>
+
+#include <memory>
+
+QT_BEGIN_NAMESPACE
+
+class QQnxMediaEventThread;
+
+class QQnxAudioRecorder : public QObject
+{
+ Q_OBJECT
+
+public:
+ explicit QQnxAudioRecorder(QObject *parent = nullptr);
+ ~QQnxAudioRecorder();
+
+ void setInputDeviceId(const QByteArray &id);
+ void setOutputUrl(const QUrl &url);
+ void setMediaEncoderSettings(const QMediaEncoderSettings &settings);
+
+ void record();
+ void stop();
+
+Q_SIGNALS:
+ void stateChanged(QMediaRecorder::RecorderState state);
+ void durationChanged(qint64 durationMs);
+ void actualLocationChanged(const QUrl &location);
+
+private:
+ void openConnection();
+ void closeConnection();
+ void attach();
+ void detach();
+ void configureOutputBitRate();
+ void startMonitoring();
+ void stopMonitoring();
+ void readEvents();
+ void handleMmEventStatus(const mmr_event_t *event);
+ void handleMmEventState(const mmr_event_t *event);
+ void handleMmEventError(const mmr_event_t *event);
+
+ bool isAttached() const;
+
+ struct ContextDeleter
+ {
+ void operator()(mmr_context_t *ctx) { if (ctx) mmr_context_destroy(ctx); }
+ };
+
+ struct ConnectionDeleter
+ {
+ void operator()(mmr_connection_t *conn) { if (conn) mmr_disconnect(conn); }
+ };
+
+ using ContextUniquePtr = std::unique_ptr<mmr_context_t, ContextDeleter>;
+ ContextUniquePtr m_context;
+
+ using ConnectionUniquePtr = std::unique_ptr<mmr_connection_t, ConnectionDeleter>;
+ ConnectionUniquePtr m_connection;
+
+ int m_id = -1;
+ int m_audioId = -1;
+
+ QByteArray m_inputDeviceId;
+
+ QUrl m_outputUrl;
+
+ QMediaEncoderSettings m_encoderSettings;
+
+ std::unique_ptr<QQnxMediaEventThread> m_eventThread;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/qnx/capture/qqnxmediacapturesession.cpp b/src/plugins/multimedia/qnx/capture/qqnxmediacapturesession.cpp
new file mode 100644
index 000000000..d73ca7e54
--- /dev/null
+++ b/src/plugins/multimedia/qnx/capture/qqnxmediacapturesession.cpp
@@ -0,0 +1,121 @@
+// Copyright (C) 2016 Research In Motion
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qqnxmediacapturesession_p.h"
+
+#include "qqnxaudioinput_p.h"
+#include "qqnxplatformcamera_p.h"
+#include "qqnximagecapture_p.h"
+#include "qqnxmediarecorder_p.h"
+#include "qqnxvideosink_p.h"
+#include "qvideosink.h"
+
+QT_BEGIN_NAMESPACE
+
+QQnxMediaCaptureSession::QQnxMediaCaptureSession()
+ : QPlatformMediaCaptureSession()
+{
+}
+
+QQnxMediaCaptureSession::~QQnxMediaCaptureSession()
+{
+}
+
+QPlatformCamera *QQnxMediaCaptureSession::camera()
+{
+ return m_camera;
+}
+
+void QQnxMediaCaptureSession::setCamera(QPlatformCamera *camera)
+{
+ if (camera == m_camera)
+ return;
+
+ if (m_camera)
+ m_camera->setCaptureSession(nullptr);
+
+ m_camera = static_cast<QQnxPlatformCamera *>(camera);
+
+ if (m_camera)
+ m_camera->setCaptureSession(this);
+
+ emit cameraChanged();
+}
+
+QPlatformImageCapture *QQnxMediaCaptureSession::imageCapture()
+{
+ return m_imageCapture;
+}
+
+void QQnxMediaCaptureSession::setImageCapture(QPlatformImageCapture *imageCapture)
+{
+ if (m_imageCapture == imageCapture)
+ return;
+
+ if (m_imageCapture)
+ m_imageCapture->setCaptureSession(nullptr);
+
+ m_imageCapture = static_cast<QQnxImageCapture *>(imageCapture);
+
+ if (m_imageCapture)
+ m_imageCapture->setCaptureSession(this);
+
+ emit imageCaptureChanged();
+}
+
+QPlatformMediaRecorder *QQnxMediaCaptureSession::mediaRecorder()
+{
+ return m_mediaRecorder;
+}
+
+void QQnxMediaCaptureSession::setMediaRecorder(QPlatformMediaRecorder *mediaRecorder)
+{
+ if (m_mediaRecorder == mediaRecorder)
+ return;
+
+ if (m_mediaRecorder)
+ m_mediaRecorder->setCaptureSession(nullptr);
+
+ m_mediaRecorder = static_cast<QQnxMediaRecorder *>(mediaRecorder);
+
+ if (m_mediaRecorder)
+ m_mediaRecorder->setCaptureSession(this);
+
+ emit encoderChanged();
+}
+
+void QQnxMediaCaptureSession::setAudioInput(QPlatformAudioInput *input)
+{
+ if (m_audioInput == input)
+ return;
+
+ m_audioInput = static_cast<QQnxAudioInput*>(input);
+}
+
+void QQnxMediaCaptureSession::setVideoPreview(QVideoSink *sink)
+{
+ auto qnxSink = sink ? static_cast<QQnxVideoSink *>(sink->platformVideoSink()) : nullptr;
+ if (m_videoSink == qnxSink)
+ return;
+ m_videoSink = qnxSink;
+}
+
+void QQnxMediaCaptureSession::setAudioOutput(QPlatformAudioOutput *output)
+{
+ if (m_audioOutput == output)
+ return;
+ m_audioOutput = output;
+}
+
+QQnxAudioInput * QQnxMediaCaptureSession::audioInput() const
+{
+ return m_audioInput;
+}
+
+QQnxVideoSink * QQnxMediaCaptureSession::videoSink() const
+{
+ return m_videoSink;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qqnxmediacapturesession_p.cpp"
diff --git a/src/plugins/multimedia/qnx/capture/qqnxmediacapturesession_p.h b/src/plugins/multimedia/qnx/capture/qqnxmediacapturesession_p.h
new file mode 100644
index 000000000..551416a61
--- /dev/null
+++ b/src/plugins/multimedia/qnx/capture/qqnxmediacapturesession_p.h
@@ -0,0 +1,67 @@
+// Copyright (C) 2016 Research In Motion
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QQNXMEDIACAPTURESESSION_H
+#define QQNXMEDIACAPTURESESSION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QObject>
+
+#include <private/qplatformmediacapture_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QQnxAudioInput;
+class QQnxPlatformCamera;
+class QQnxImageCapture;
+class QQnxMediaRecorder;
+class QQnxVideoSink;
+
+class QQnxMediaCaptureSession : public QPlatformMediaCaptureSession
+{
+ Q_OBJECT
+
+public:
+ QQnxMediaCaptureSession();
+ ~QQnxMediaCaptureSession();
+
+ QPlatformCamera *camera() override;
+ void setCamera(QPlatformCamera *camera) override;
+
+ QPlatformImageCapture *imageCapture() override;
+ void setImageCapture(QPlatformImageCapture *imageCapture) override;
+
+ QPlatformMediaRecorder *mediaRecorder() override;
+ void setMediaRecorder(QPlatformMediaRecorder *mediaRecorder) override;
+
+ void setAudioInput(QPlatformAudioInput *input) override;
+
+ void setVideoPreview(QVideoSink *sink) override;
+
+ void setAudioOutput(QPlatformAudioOutput *output) override;
+
+ QQnxAudioInput *audioInput() const;
+
+ QQnxVideoSink *videoSink() const;
+
+private:
+ QQnxPlatformCamera *m_camera = nullptr;
+ QQnxImageCapture *m_imageCapture = nullptr;
+ QQnxMediaRecorder *m_mediaRecorder = nullptr;
+ QQnxAudioInput *m_audioInput = nullptr;
+ QPlatformAudioOutput *m_audioOutput = nullptr;
+ QQnxVideoSink *m_videoSink = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/qnx/capture/qqnxmediarecorder.cpp b/src/plugins/multimedia/qnx/capture/qqnxmediarecorder.cpp
new file mode 100644
index 000000000..62ac030db
--- /dev/null
+++ b/src/plugins/multimedia/qnx/capture/qqnxmediarecorder.cpp
@@ -0,0 +1,115 @@
+// Copyright (C) 2016 Research In Motion
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#undef QT_NO_CONTEXTLESS_CONNECT // Remove after porting connect() calls
+
+#include "qqnxmediarecorder_p.h"
+
+#include "qqnxplatformcamera_p.h"
+#include "qqnxaudioinput_p.h"
+#include "qqnxcamera_p.h"
+#include "qqnxmediacapturesession_p.h"
+
+#include <private/qplatformcamera_p.h>
+
+#include <QDebug>
+#include <QUrl>
+
+QT_BEGIN_NAMESPACE
+
+QQnxMediaRecorder::QQnxMediaRecorder(QMediaRecorder *parent)
+ : QPlatformMediaRecorder(parent)
+{
+}
+
+bool QQnxMediaRecorder::isLocationWritable(const QUrl &/*location*/) const
+{
+ return true;
+}
+
+void QQnxMediaRecorder::setCaptureSession(QQnxMediaCaptureSession *session)
+{
+ m_session = session;
+}
+
+void QQnxMediaRecorder::record(QMediaEncoderSettings &settings)
+{
+ if (!m_session)
+ return;
+
+ m_audioRecorder.disconnect();
+
+ if (hasCamera()) {
+ startVideoRecording(settings);
+ } else {
+ QObject::connect(&m_audioRecorder, &QQnxAudioRecorder::durationChanged,
+ [this](qint64 d) { durationChanged(d); });
+
+ QObject::connect(&m_audioRecorder, &QQnxAudioRecorder::stateChanged,
+ [this](QMediaRecorder::RecorderState s) { stateChanged(s); });
+
+ QObject::connect(&m_audioRecorder, &QQnxAudioRecorder::actualLocationChanged,
+ [this](const QUrl &l) { actualLocationChanged(l); });
+
+ startAudioRecording(settings);
+ }
+}
+
+void QQnxMediaRecorder::stop()
+{
+ if (hasCamera()) {
+ stopVideoRecording();
+ } else {
+ m_audioRecorder.stop();
+ }
+}
+
+void QQnxMediaRecorder::startAudioRecording(QMediaEncoderSettings &settings)
+{
+ if (!m_session)
+ return;
+
+ QQnxAudioInput *audioInput = m_session->audioInput();
+
+ if (!audioInput)
+ return;
+
+ m_audioRecorder.setInputDeviceId(audioInput->device.id());
+ m_audioRecorder.setMediaEncoderSettings(settings);
+ m_audioRecorder.setOutputUrl(outputLocation());
+ m_audioRecorder.record();
+}
+
+void QQnxMediaRecorder::startVideoRecording(QMediaEncoderSettings &settings)
+{
+ if (!hasCamera())
+ return;
+
+ auto *camera = static_cast<QQnxPlatformCamera*>(m_session->camera());
+
+ camera->setMediaEncoderSettings(settings);
+ camera->setOutputUrl(outputLocation());
+
+ if (camera->startVideoRecording())
+ stateChanged(QMediaRecorder::RecordingState);
+}
+
+void QQnxMediaRecorder::stopVideoRecording()
+{
+ if (!hasCamera())
+ return;
+
+ auto *camera = static_cast<QQnxPlatformCamera*>(m_session->camera());
+
+ camera->stop();
+
+ stateChanged(QMediaRecorder::StoppedState);
+}
+
+bool QQnxMediaRecorder::hasCamera() const
+{
+ return m_session && m_session->camera();
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/qnx/capture/qqnxmediarecorder_p.h b/src/plugins/multimedia/qnx/capture/qqnxmediarecorder_p.h
new file mode 100644
index 000000000..8b3ea21d3
--- /dev/null
+++ b/src/plugins/multimedia/qnx/capture/qqnxmediarecorder_p.h
@@ -0,0 +1,51 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QQNXMEDIARECORDER_H
+#define QQNXMEDIARECORDER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qqnxaudiorecorder_p.h"
+
+#include <private/qplatformmediarecorder_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QQnxMediaCaptureSession;
+
+class QQnxMediaRecorder : public QPlatformMediaRecorder
+{
+public:
+ explicit QQnxMediaRecorder(QMediaRecorder *parent);
+
+ bool isLocationWritable(const QUrl &location) const override;
+
+ void record(QMediaEncoderSettings &settings) override;
+ void stop() override;
+
+ void setCaptureSession(QQnxMediaCaptureSession *session);
+
+private:
+ bool hasCamera() const;
+
+ void startAudioRecording(QMediaEncoderSettings &settings);
+ void startVideoRecording(QMediaEncoderSettings &settings);
+ void stopVideoRecording();
+
+ QQnxAudioRecorder m_audioRecorder;
+
+ QQnxMediaCaptureSession *m_session = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/qnx/common/mmrenderertypes.h b/src/plugins/multimedia/qnx/common/mmrenderertypes.h
new file mode 100644
index 000000000..f1d498388
--- /dev/null
+++ b/src/plugins/multimedia/qnx/common/mmrenderertypes.h
@@ -0,0 +1,95 @@
+// Copyright (C) 2016 Research In Motion
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef MMRENDERERTYPES_H
+#define MMRENDERERTYPES_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <mm/renderer.h>
+#include <mm/renderer/types.h>
+
+extern "C" {
+// ### replace with proper include: mm/renderer/events.h
+typedef enum mmr_state {
+ MMR_STATE_DESTROYED,
+ MMR_STATE_IDLE,
+ MMR_STATE_STOPPED,
+ MMR_STATE_PLAYING
+} mmr_state_t;
+
+typedef enum mmr_event_type {
+ MMR_EVENT_NONE,
+ MMR_EVENT_ERROR,
+ MMR_EVENT_STATE,
+ MMR_EVENT_OVERFLOW,
+ MMR_EVENT_WARNING,
+ MMR_EVENT_STATUS,
+ MMR_EVENT_METADATA,
+ MMR_EVENT_PLAYLIST,
+ MMR_EVENT_INPUT,
+ MMR_EVENT_OUTPUT,
+ MMR_EVENT_CTXTPAR,
+ MMR_EVENT_TRKPAR,
+ MMR_EVENT_OTHER
+} mmr_event_type_t;
+
+typedef struct mmr_event {
+ mmr_event_type_t type;
+ mmr_state_t state;
+ int speed;
+ union mmr_event_details {
+
+ struct mmr_event_state {
+ mmr_state_t oldstate;
+ int oldspeed;
+ } state;
+
+ struct mmr_event_error {
+ mmr_error_info_t info;
+ } error;
+
+ struct mmr_event_warning {
+ const char *str;
+ const strm_string_t *obj;
+ } warning;
+
+ struct mmr_event_metadata {
+ unsigned index;
+ } metadata;
+
+ struct mmr_event_trkparam {
+ unsigned index;
+ } trkparam;
+
+ struct mmr_event_playlist {
+ unsigned start;
+ unsigned end;
+ unsigned length;
+ } playlist;
+
+ struct mmr_event_output {
+ unsigned id;
+ } output;
+ } details;
+
+ const strm_string_t* pos_obj;
+ const char* pos_str;
+ const strm_dict_t* data;
+ const char* objname;
+ void* usrdata;
+} mmr_event_t;
+
+const mmr_event_t* mmr_event_get(mmr_context_t *ctxt);
+
+}
+
+#endif
diff --git a/src/plugins/multimedia/qnx/common/qqnxaudioinput.cpp b/src/plugins/multimedia/qnx/common/qqnxaudioinput.cpp
new file mode 100644
index 000000000..fff3cf1eb
--- /dev/null
+++ b/src/plugins/multimedia/qnx/common/qqnxaudioinput.cpp
@@ -0,0 +1,25 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qqnxaudioinput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QQnxAudioInput::QQnxAudioInput(QAudioInput *parent)
+ : QPlatformAudioInput(parent)
+{
+}
+
+QQnxAudioInput::~QQnxAudioInput()
+{
+}
+
+void QQnxAudioInput::setAudioDevice(const QAudioDevice &info)
+{
+ if (info == device)
+ return;
+
+ device = info;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/qnx/common/qqnxaudioinput_p.h b/src/plugins/multimedia/qnx/common/qqnxaudioinput_p.h
new file mode 100644
index 000000000..62a573cc1
--- /dev/null
+++ b/src/plugins/multimedia/qnx/common/qqnxaudioinput_p.h
@@ -0,0 +1,33 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QQNXAUDIOINPUT_P_H
+#define QQNXAUDIOINPUT_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qtmultimediaglobal_p.h>
+#include <private/qplatformaudioinput_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class Q_MULTIMEDIA_EXPORT QQnxAudioInput : public QPlatformAudioInput
+{
+public:
+ explicit QQnxAudioInput(QAudioInput *parent);
+ ~QQnxAudioInput();
+
+ void setAudioDevice(const QAudioDevice &device) override;
+};
+
+QT_END_NAMESPACE
+#endif
diff --git a/src/plugins/multimedia/qnx/common/qqnxaudiooutput.cpp b/src/plugins/multimedia/qnx/common/qqnxaudiooutput.cpp
new file mode 100644
index 000000000..76f8fbafd
--- /dev/null
+++ b/src/plugins/multimedia/qnx/common/qqnxaudiooutput.cpp
@@ -0,0 +1,52 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qqnxaudiooutput_p.h"
+
+#include <private/qqnxaudiodevice_p.h>
+
+#include <qaudiodevice.h>
+#include <qaudiooutput.h>
+
+#include <QtCore/qloggingcategory.h>
+
+static Q_LOGGING_CATEGORY(qLcMediaAudioOutput, "qt.multimedia.audiooutput")
+
+QT_BEGIN_NAMESPACE
+
+QQnxAudioOutput::QQnxAudioOutput(QAudioOutput *parent)
+ : QPlatformAudioOutput(parent)
+{
+}
+
+QQnxAudioOutput::~QQnxAudioOutput()
+{
+}
+
+void QQnxAudioOutput::setVolume(float vol)
+{
+ if (vol == volume)
+ return;
+ vol = volume;
+ q->volumeChanged(vol);
+}
+
+void QQnxAudioOutput::setMuted(bool m)
+{
+ if (muted == m)
+ return;
+ muted = m;
+ q->mutedChanged(muted);
+}
+
+void QQnxAudioOutput::setAudioDevice(const QAudioDevice &info)
+{
+ if (info == device)
+ return;
+ qCDebug(qLcMediaAudioOutput) << "setAudioDevice" << info.description() << info.isNull();
+ device = info;
+
+ // ### handle device changes
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/qnx/common/qqnxaudiooutput_p.h b/src/plugins/multimedia/qnx/common/qqnxaudiooutput_p.h
new file mode 100644
index 000000000..2ae5844e6
--- /dev/null
+++ b/src/plugins/multimedia/qnx/common/qqnxaudiooutput_p.h
@@ -0,0 +1,39 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QQNXAUDIOOUTPUT_P_H
+#define QQNXAUDIOOUTPUT_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qtmultimediaglobal_p.h>
+#include <private/qplatformaudiooutput_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAudioDevice;
+class QAudioOutput;
+
+class Q_MULTIMEDIA_EXPORT QQnxAudioOutput : public QPlatformAudioOutput
+{
+public:
+ explicit QQnxAudioOutput(QAudioOutput *parent);
+ ~QQnxAudioOutput();
+
+ void setAudioDevice(const QAudioDevice &) override;
+ void setVolume(float volume) override;
+ void setMuted(bool muted) override;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/qnx/common/qqnxmediaeventthread.cpp b/src/plugins/multimedia/qnx/common/qqnxmediaeventthread.cpp
new file mode 100644
index 000000000..f0cc9b1c0
--- /dev/null
+++ b/src/plugins/multimedia/qnx/common/qqnxmediaeventthread.cpp
@@ -0,0 +1,98 @@
+// Copyright (C) 2017 QNX Software Systems. All rights reserved.
+// Copyright (C) 2021 The Qt Company
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qqnxmediaeventthread_p.h"
+
+#include <QtCore/QDebug>
+
+#include <errno.h>
+#include <mm/renderer/types.h>
+#include <sys/neutrino.h>
+
+extern "C" {
+// ### Include mm/renderer/events.h once we have it
+int mmr_event_arm(mmr_context_t *ctxt,
+ struct sigevent const *sev);
+}
+
+QT_BEGIN_NAMESPACE
+
+static const int c_mmrCode = _PULSE_CODE_MINAVAIL + 0;
+static const int c_readCode = _PULSE_CODE_MINAVAIL + 1;
+static const int c_quitCode = _PULSE_CODE_MINAVAIL + 2;
+
+QQnxMediaEventThread::QQnxMediaEventThread(mmr_context_t *context)
+ : QThread(),
+ m_mmrContext(context)
+{
+ if (Q_UNLIKELY((m_channelId = ChannelCreate(_NTO_CHF_DISCONNECT
+ | _NTO_CHF_UNBLOCK
+ | _NTO_CHF_PRIVATE)) == -1)) {
+ qFatal("QQnxMediaEventThread: Can't continue without a channel");
+ }
+
+ if (Q_UNLIKELY((m_connectionId = ConnectAttach(0, 0, m_channelId,
+ _NTO_SIDE_CHANNEL, 0)) == -1)) {
+ ChannelDestroy(m_channelId);
+ qFatal("QQnxMediaEventThread: Can't continue without a channel connection");
+ }
+
+ SIGEV_PULSE_INIT(&m_mmrEvent, m_connectionId, SIGEV_PULSE_PRIO_INHERIT, c_mmrCode, 0);
+}
+
+QQnxMediaEventThread::~QQnxMediaEventThread()
+{
+ // block until thread terminates
+ shutdown();
+
+ ConnectDetach(m_connectionId);
+ ChannelDestroy(m_channelId);
+}
+
+void QQnxMediaEventThread::run()
+{
+ int armResult = mmr_event_arm(m_mmrContext, &m_mmrEvent);
+ if (armResult > 0)
+ emit eventPending();
+
+ while (1) {
+ struct _pulse msg;
+ memset(&msg, 0, sizeof(msg));
+ int receiveId = MsgReceive(m_channelId, &msg, sizeof(msg), nullptr);
+ if (receiveId == 0) {
+ if (msg.code == c_mmrCode) {
+ emit eventPending();
+ } else if (msg.code == c_readCode) {
+ armResult = mmr_event_arm(m_mmrContext, &m_mmrEvent);
+ if (armResult > 0)
+ emit eventPending();
+ } else if (msg.code == c_quitCode) {
+ break;
+ } else {
+ qWarning() << Q_FUNC_INFO << "Unexpected pulse" << msg.code;
+ }
+ } else if (receiveId > 0) {
+ qWarning() << Q_FUNC_INFO << "Unexpected message" << msg.code;
+ } else {
+ qWarning() << Q_FUNC_INFO << "MsgReceive error" << strerror(errno);
+ }
+ }
+}
+
+void QQnxMediaEventThread::signalRead()
+{
+ MsgSendPulse(m_connectionId, SIGEV_PULSE_PRIO_INHERIT, c_readCode, 0);
+}
+
+void QQnxMediaEventThread::shutdown()
+{
+ MsgSendPulse(m_connectionId, SIGEV_PULSE_PRIO_INHERIT, c_quitCode, 0);
+
+ // block until thread terminates
+ wait();
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qqnxmediaeventthread_p.cpp"
diff --git a/src/plugins/multimedia/qnx/common/qqnxmediaeventthread_p.h b/src/plugins/multimedia/qnx/common/qqnxmediaeventthread_p.h
new file mode 100644
index 000000000..a622fcb62
--- /dev/null
+++ b/src/plugins/multimedia/qnx/common/qqnxmediaeventthread_p.h
@@ -0,0 +1,55 @@
+// Copyright (C) 2017 QNX Software Systems. All rights reserved.
+// Copyright (C) 2021 The Qt Company
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QQNXMEDIAEVENTTHREAD_P_H
+#define QQNXMEDIAEVENTTHREAD_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/QThread>
+
+#include <sys/neutrino.h>
+#include <sys/siginfo.h>
+
+QT_BEGIN_NAMESPACE
+
+typedef struct mmr_context mmr_context_t;
+
+class QQnxMediaEventThread : public QThread
+{
+ Q_OBJECT
+
+public:
+ QQnxMediaEventThread(mmr_context_t *context);
+ ~QQnxMediaEventThread() override;
+
+ void signalRead();
+
+protected:
+ void run() override;
+
+Q_SIGNALS:
+ void eventPending();
+
+private:
+ void shutdown();
+
+ int m_channelId;
+ int m_connectionId;
+ struct sigevent m_mmrEvent;
+ mmr_context_t *m_mmrContext;
+};
+
+QT_END_NAMESPACE
+
+#endif // QQnxMediaEventThread_H
diff --git a/src/plugins/multimedia/qnx/common/qqnxwindowgrabber.cpp b/src/plugins/multimedia/qnx/common/qqnxwindowgrabber.cpp
new file mode 100644
index 000000000..28f16b70a
--- /dev/null
+++ b/src/plugins/multimedia/qnx/common/qqnxwindowgrabber.cpp
@@ -0,0 +1,435 @@
+// Copyright (C) 2016 Research In Motion
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qqnxwindowgrabber_p.h"
+
+#include <QAbstractEventDispatcher>
+#include <QDebug>
+#include <QGuiApplication>
+#include <QImage>
+#include <QThread>
+#include <qpa/qplatformnativeinterface.h>
+
+#include <QOpenGLContext>
+#include <QOpenGLFunctions>
+
+#include <rhi/qrhi.h>
+
+#include <cstring>
+
+#include <EGL/egl.h>
+#include <errno.h>
+
+QT_BEGIN_NAMESPACE
+
+static PFNEGLCREATEIMAGEKHRPROC s_eglCreateImageKHR;
+static PFNEGLDESTROYIMAGEKHRPROC s_eglDestroyImageKHR;
+
+class QQnxWindowGrabberImage
+{
+public:
+ QQnxWindowGrabberImage();
+ ~QQnxWindowGrabberImage();
+
+ bool initialize(screen_context_t screenContext);
+
+ QQnxWindowGrabber::BufferView getBuffer(screen_window_t window, const QSize &size);
+ GLuint getTexture(screen_window_t window, const QSize &size);
+
+private:
+ bool grab(screen_window_t window);
+ bool resize(const QSize &size);
+
+ QSize m_size;
+ screen_pixmap_t m_pixmap;
+ screen_buffer_t m_pixmapBuffer;
+ EGLImageKHR m_eglImage;
+ GLuint m_glTexture;
+ unsigned char *m_bufferAddress;
+ int m_bufferStride;
+};
+
+QQnxWindowGrabber::QQnxWindowGrabber(QObject *parent)
+ : QObject(parent),
+ m_windowParent(nullptr),
+ m_window(nullptr),
+ m_screenContext(nullptr),
+ m_rhi(nullptr),
+ m_active(false),
+ m_eglImageSupported(false),
+ m_startPending(false)
+{
+ // grab the window frame with 60 frames per second
+ m_timer.setInterval(1000/60);
+
+ connect(&m_timer, &QTimer::timeout, this, &QQnxWindowGrabber::triggerUpdate);
+
+ QCoreApplication::eventDispatcher()->installNativeEventFilter(this);
+
+ // Use of EGL images can be disabled by setting QQNX_MM_DISABLE_EGLIMAGE_SUPPORT to something
+ // non-zero. This is probably useful only to test that this path still works since it results
+ // in a high CPU load.
+ if (!s_eglCreateImageKHR && qgetenv("QQNX_MM_DISABLE_EGLIMAGE_SUPPORT").toInt() == 0) {
+ s_eglCreateImageKHR = reinterpret_cast<PFNEGLCREATEIMAGEKHRPROC>(eglGetProcAddress("eglCreateImageKHR"));
+ s_eglDestroyImageKHR = reinterpret_cast<PFNEGLDESTROYIMAGEKHRPROC>(eglGetProcAddress("eglDestroyImageKHR"));
+ }
+
+ QPlatformNativeInterface *const nativeInterface = QGuiApplication::platformNativeInterface();
+ if (nativeInterface) {
+ m_screenContext = static_cast<screen_context_t>(
+ nativeInterface->nativeResourceForIntegration("screenContext"));
+ }
+
+ // Create a parent window for the window whose content will be grabbed. Since the
+ // window is only a buffer conduit, the characteristics of the parent window are
+ // irrelevant. The contents of the window can be grabbed so long as the window
+ // joins the parent window's group and the parent window is in this process.
+ // Using the window that displays this content isn't possible because there's no
+ // way to reliably retrieve it from this code or any calling code.
+ screen_create_window(&m_windowParent, m_screenContext);
+ screen_create_window_group(m_windowParent, nullptr);
+}
+
+QQnxWindowGrabber::~QQnxWindowGrabber()
+{
+ screen_destroy_window(m_windowParent);
+ QCoreApplication::eventDispatcher()->removeNativeEventFilter(this);
+}
+
+void QQnxWindowGrabber::setFrameRate(int frameRate)
+{
+ m_timer.setInterval(1000/frameRate);
+}
+
+void QQnxWindowGrabber::setWindowId(const QByteArray &windowId)
+{
+ m_windowId = windowId;
+}
+
+void QQnxWindowGrabber::setRhi(QRhi *rhi)
+{
+ m_rhi = rhi;
+
+ checkForEglImageExtension();
+}
+
+void QQnxWindowGrabber::start()
+{
+ if (m_active)
+ return;
+
+ if (!m_window) {
+ m_startPending = true;
+ return;
+ }
+
+ m_startPending = false;
+
+ if (!m_screenContext)
+ screen_get_window_property_pv(m_window, SCREEN_PROPERTY_CONTEXT, reinterpret_cast<void**>(&m_screenContext));
+
+ m_timer.start();
+
+ m_active = true;
+}
+
+void QQnxWindowGrabber::stop()
+{
+ if (!m_active)
+ return;
+
+ resetBuffers();
+
+ m_timer.stop();
+
+ m_active = false;
+}
+
+void QQnxWindowGrabber::pause()
+{
+ m_timer.stop();
+}
+
+void QQnxWindowGrabber::resume()
+{
+ if (!m_active)
+ return;
+
+ m_timer.start();
+}
+
+void QQnxWindowGrabber::forceUpdate()
+{
+ if (!m_active)
+ return;
+
+ triggerUpdate();
+}
+
+bool QQnxWindowGrabber::handleScreenEvent(screen_event_t screen_event)
+{
+
+ int eventType;
+ if (screen_get_event_property_iv(screen_event, SCREEN_PROPERTY_TYPE, &eventType) != 0) {
+ qWarning() << "QQnxWindowGrabber: Failed to query screen event type";
+ return false;
+ }
+
+ if (eventType != SCREEN_EVENT_CREATE)
+ return false;
+
+ screen_window_t window = 0;
+ if (screen_get_event_property_pv(screen_event, SCREEN_PROPERTY_WINDOW, (void**)&window) != 0) {
+ qWarning() << "QQnxWindowGrabber: Failed to query window property";
+ return false;
+ }
+
+ const int maxIdStrLength = 128;
+ char idString[maxIdStrLength];
+ if (screen_get_window_property_cv(window, SCREEN_PROPERTY_ID_STRING, maxIdStrLength, idString) != 0) {
+ qWarning() << "QQnxWindowGrabber: Failed to query window ID string";
+ return false;
+ }
+
+ // Grab windows that have a non-empty ID string and a matching window id to grab
+ if (idString[0] != '\0' && m_windowId == idString) {
+ m_window = window;
+
+ if (m_startPending)
+ start();
+ }
+
+ return false;
+}
+
+bool QQnxWindowGrabber::nativeEventFilter(const QByteArray &eventType, void *message, qintptr *)
+{
+ if (eventType == "screen_event_t") {
+ const screen_event_t event = static_cast<screen_event_t>(message);
+ return handleScreenEvent(event);
+ }
+
+ return false;
+}
+
+QByteArray QQnxWindowGrabber::windowGroupId() const
+{
+ char groupName[256];
+ memset(groupName, 0, sizeof(groupName));
+ screen_get_window_property_cv(m_windowParent,
+ SCREEN_PROPERTY_GROUP,
+ sizeof(groupName) - 1,
+ groupName);
+ return QByteArray(groupName);
+}
+
+bool QQnxWindowGrabber::isEglImageSupported() const
+{
+ return m_eglImageSupported;
+}
+
+void QQnxWindowGrabber::checkForEglImageExtension()
+{
+ m_eglImageSupported = false;
+
+ if (!m_rhi || m_rhi->backend() != QRhi::OpenGLES2)
+ return;
+
+ const EGLDisplay defaultDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
+
+ const char *vendor = eglQueryString(defaultDisplay, EGL_VENDOR);
+
+ if (vendor && std::strstr(vendor, "VMWare"))
+ return;
+
+ const char *eglExtensions = eglQueryString(defaultDisplay, EGL_EXTENSIONS);
+
+ if (!eglExtensions)
+ return;
+
+ m_eglImageSupported = std::strstr(eglExtensions, "EGL_KHR_image")
+ && s_eglCreateImageKHR
+ && s_eglDestroyImageKHR;
+}
+
+void QQnxWindowGrabber::triggerUpdate()
+{
+ int size[2] = { 0, 0 };
+
+ const int result = screen_get_window_property_iv(m_window, SCREEN_PROPERTY_SOURCE_SIZE, size);
+
+ if (result != 0) {
+ resetBuffers();
+ qWarning() << "QQnxWindowGrabber: cannot get window size:" << strerror(errno);
+ return;
+ }
+
+ if (m_size.width() != size[0] || m_size.height() != size[1])
+ m_size = QSize(size[0], size[1]);
+
+ emit updateScene(m_size);
+}
+
+bool QQnxWindowGrabber::selectBuffer()
+{
+ // If we're using egl images we need to double buffer since the gpu may still be using the last
+ // video frame. If we're not, it doesn't matter since the data is immediately copied.
+ if (isEglImageSupported())
+ std::swap(m_frontBuffer, m_backBuffer);
+
+ if (m_frontBuffer)
+ return true;
+
+ auto frontBuffer = std::make_unique<QQnxWindowGrabberImage>();
+
+ if (!frontBuffer->initialize(m_screenContext))
+ return false;
+
+ m_frontBuffer = std::move(frontBuffer);
+
+ return true;
+}
+
+int QQnxWindowGrabber::getNextTextureId()
+{
+ if (!selectBuffer())
+ return 0;
+
+ return m_frontBuffer->getTexture(m_window, m_size);
+}
+
+QQnxWindowGrabber::BufferView QQnxWindowGrabber::getNextBuffer()
+{
+ if (!selectBuffer())
+ return {};
+
+ return m_frontBuffer->getBuffer(m_window, m_size);
+}
+
+void QQnxWindowGrabber::resetBuffers()
+{
+ m_frontBuffer.reset();
+ m_backBuffer.reset();
+}
+
+QQnxWindowGrabberImage::QQnxWindowGrabberImage()
+ : m_pixmap(0),
+ m_pixmapBuffer(0),
+ m_eglImage(0),
+ m_glTexture(0),
+ m_bufferAddress(nullptr),
+ m_bufferStride(0)
+{
+}
+
+QQnxWindowGrabberImage::~QQnxWindowGrabberImage()
+{
+ if (m_glTexture)
+ glDeleteTextures(1, &m_glTexture);
+ if (m_eglImage)
+ s_eglDestroyImageKHR(eglGetDisplay(EGL_DEFAULT_DISPLAY), m_eglImage);
+ if (m_pixmap)
+ screen_destroy_pixmap(m_pixmap);
+}
+
+bool QQnxWindowGrabberImage::initialize(screen_context_t screenContext)
+{
+ if (screen_create_pixmap(&m_pixmap, screenContext) != 0) {
+ qWarning() << "QQnxWindowGrabber: cannot create pixmap:" << strerror(errno);
+ return false;
+ }
+ const int usage = SCREEN_USAGE_WRITE | SCREEN_USAGE_READ | SCREEN_USAGE_NATIVE;
+ screen_set_pixmap_property_iv(m_pixmap, SCREEN_PROPERTY_USAGE, &usage);
+
+ // XXX as a matter of fact, the underlying buffer is BGRX8888 (according to
+ // QNX, screen formats can be loose on the ARGB ordering) - as there is no
+ // SCREEN_FORMAT_BGRX8888 constant, we use SCREEN_FORMAT_RGBX8888, which
+ // carries the same depth and allows us to use the buffer.
+ const int format = SCREEN_FORMAT_RGBX8888;
+ screen_set_pixmap_property_iv(m_pixmap, SCREEN_PROPERTY_FORMAT, &format);
+
+ return true;
+}
+
+bool QQnxWindowGrabberImage::resize(const QSize &newSize)
+{
+ if (m_pixmapBuffer) {
+ screen_destroy_pixmap_buffer(m_pixmap);
+ m_pixmapBuffer = 0;
+ m_bufferAddress = 0;
+ m_bufferStride = 0;
+ }
+
+ const int size[2] = { newSize.width(), newSize.height() };
+
+ screen_set_pixmap_property_iv(m_pixmap, SCREEN_PROPERTY_BUFFER_SIZE, size);
+
+ if (screen_create_pixmap_buffer(m_pixmap) == 0) {
+ screen_get_pixmap_property_pv(m_pixmap, SCREEN_PROPERTY_RENDER_BUFFERS,
+ reinterpret_cast<void**>(&m_pixmapBuffer));
+ screen_get_buffer_property_pv(m_pixmapBuffer, SCREEN_PROPERTY_POINTER,
+ reinterpret_cast<void**>(&m_bufferAddress));
+ screen_get_buffer_property_iv(m_pixmapBuffer, SCREEN_PROPERTY_STRIDE, &m_bufferStride);
+ m_size = newSize;
+
+ return true;
+ } else {
+ m_size = QSize();
+ return false;
+ }
+}
+
+bool QQnxWindowGrabberImage::grab(screen_window_t window)
+{
+ const int rect[] = { 0, 0, m_size.width(), m_size.height() };
+ return screen_read_window(window, m_pixmapBuffer, 1, rect, 0) == 0;
+}
+
+QQnxWindowGrabber::BufferView QQnxWindowGrabberImage::getBuffer(
+ screen_window_t window, const QSize &size)
+{
+ if (size != m_size && !resize(size))
+ return {};
+
+ if (!m_bufferAddress || !grab(window))
+ return {};
+
+ return {
+ .width = m_size.width(),
+ .height = m_size.height(),
+ .stride = m_bufferStride,
+ .data = m_bufferAddress
+ };
+}
+
+GLuint QQnxWindowGrabberImage::getTexture(screen_window_t window, const QSize &size)
+{
+ if (size != m_size) {
+ // create a brand new texture to be the KHR image sibling, as
+ // previously used textures cannot be reused with new KHR image
+ // sources - note that glDeleteTextures handles nullptr gracefully
+ glDeleteTextures(1, &m_glTexture);
+ glGenTextures(1, &m_glTexture);
+
+ glBindTexture(GL_TEXTURE_2D, m_glTexture);
+ if (m_eglImage) {
+ glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, 0);
+ s_eglDestroyImageKHR(eglGetDisplay(EGL_DEFAULT_DISPLAY), m_eglImage);
+ m_eglImage = 0;
+ }
+ if (!resize(size))
+ return 0;
+ m_eglImage = s_eglCreateImageKHR(eglGetDisplay(EGL_DEFAULT_DISPLAY), EGL_NO_CONTEXT,
+ EGL_NATIVE_PIXMAP_KHR, m_pixmap, 0);
+ glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, m_eglImage);
+ }
+
+ if (!m_pixmap || !grab(window))
+ return 0;
+
+ return m_glTexture;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qqnxwindowgrabber_p.cpp"
diff --git a/src/plugins/multimedia/qnx/common/qqnxwindowgrabber_p.h b/src/plugins/multimedia/qnx/common/qqnxwindowgrabber_p.h
new file mode 100644
index 000000000..1ffd96b63
--- /dev/null
+++ b/src/plugins/multimedia/qnx/common/qqnxwindowgrabber_p.h
@@ -0,0 +1,114 @@
+// Copyright (C) 2016 Research In Motion
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QQnxWindowGrabber_H
+#define QQnxWindowGrabber_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#define EGL_EGLEXT_PROTOTYPES
+#define GL_GLEXT_PROTOTYPES
+#include <EGL/egl.h>
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#include <EGL/eglext.h>
+#include <QAbstractNativeEventFilter>
+#include <QObject>
+#include <QSize>
+#include <QTimer>
+
+#include <memory>
+
+#include <screen/screen.h>
+
+QT_BEGIN_NAMESPACE
+
+class QRhi;
+class QQnxWindowGrabberImage;
+
+class QQnxWindowGrabber : public QObject, public QAbstractNativeEventFilter
+{
+ Q_OBJECT
+
+public:
+ struct BufferView
+ {
+ int width = -1;
+ int height = -1;
+ int stride = -1;
+
+ unsigned char *data = nullptr;
+
+ static constexpr int pixelSize = 4; // BGRX8888;
+ };
+
+ explicit QQnxWindowGrabber(QObject *parent = 0);
+ ~QQnxWindowGrabber();
+
+ void setFrameRate(int frameRate);
+
+ void setWindowId(const QByteArray &windowId);
+
+ void setRhi(QRhi *rhi);
+
+ void start();
+ void stop();
+
+ void pause();
+ void resume();
+
+ void forceUpdate();
+
+ bool nativeEventFilter(const QByteArray &eventType, void *message, qintptr *result) override;
+
+ bool handleScreenEvent(screen_event_t event);
+
+ QByteArray windowGroupId() const;
+
+ bool isEglImageSupported() const;
+
+ int getNextTextureId();
+ BufferView getNextBuffer();
+
+signals:
+ void updateScene(const QSize &size);
+
+private slots:
+ void triggerUpdate();
+
+private:
+ bool selectBuffer();
+ void resetBuffers();
+ void checkForEglImageExtension();
+
+ QTimer m_timer;
+
+ QByteArray m_windowId;
+
+ screen_window_t m_windowParent;
+ screen_window_t m_window;
+ screen_context_t m_screenContext;
+
+ std::unique_ptr<QQnxWindowGrabberImage> m_frontBuffer;
+ std::unique_ptr<QQnxWindowGrabberImage> m_backBuffer;
+
+ QSize m_size;
+
+ QRhi *m_rhi;
+
+ bool m_active;
+ bool m_eglImageSupported;
+ bool m_startPending;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/qnx/mediaplayer/qqnxmediametadata.cpp b/src/plugins/multimedia/qnx/mediaplayer/qqnxmediametadata.cpp
new file mode 100644
index 000000000..fcd535814
--- /dev/null
+++ b/src/plugins/multimedia/qnx/mediaplayer/qqnxmediametadata.cpp
@@ -0,0 +1,262 @@
+// Copyright (C) 2016 Research In Motion
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qqnxmediametadata_p.h"
+
+#include <QtCore/qdebug.h>
+#include <QtCore/qfile.h>
+#include <QtCore/qstringlist.h>
+
+#include <mm/renderer/types.h>
+#include <sys/neutrino.h>
+#include <sys/strm.h>
+
+extern "C" {
+// ### include this properly from mm/renderer/events.h once the toolchain is fixed
+extern strm_dict_t* mmr_metadata_split(strm_dict_t const *md,
+ const char *type,
+ unsigned idx);
+}
+
+static const char *strm_string_getx(const strm_string_t *sstr, const char *defaultValue)
+{
+ return sstr ? strm_string_get(sstr) : defaultValue;
+}
+
+QT_BEGIN_NAMESPACE
+
+QQnxMediaMetaData::QQnxMediaMetaData()
+{
+ clear();
+}
+
+static const char * titleKey = "md_title_name";
+static const char * artistKey = "md_title_artist";
+static const char * commentKey = "md_title_comment";
+static const char * genreKey = "md_title_genre";
+static const char * yearKey = "md_title_year";
+static const char * durationKey = "md_title_duration";
+static const char * bitRateKey = "md_title_bitrate";
+static const char * sampleKey = "md_title_samplerate";
+static const char * albumKey = "md_title_album";
+static const char * trackKey = "md_title_track";
+static const char * widthKey = "md_video_width";
+static const char * heightKey = "md_video_height";
+static const char * mediaTypeKey = "md_title_mediatype";
+static const char * pixelWidthKey = "md_video_pixel_width";
+static const char * pixelHeightKey = "md_video_pixel_height";
+static const char * seekableKey = "md_title_seekable";
+static const char * trackSampleKey = "sample_rate";
+static const char * trackBitRateKey = "bitrate";
+static const char * trackWidthKey = "width";
+static const char * trackHeightKey = "height";
+static const char * trackPixelWidthKey = "pixel_width";
+static const char * trackPixelHeightKey = "pixel_height";
+
+static const int mediaTypeAudioFlag = 4;
+static const int mediaTypeVideoFlag = 2;
+
+bool QQnxMediaMetaData::update(const strm_dict_t *dict)
+{
+ if (!dict) {
+ clear();
+ return true;
+ }
+
+ const strm_string_t *value;
+
+ value = strm_dict_find_rstr(dict, durationKey);
+ m_duration = QByteArray(strm_string_getx(value, "0")).toLongLong();
+
+ value = strm_dict_find_rstr(dict, mediaTypeKey);
+ m_mediaType = QByteArray(strm_string_getx(value, "-1")).toInt();
+
+ value = strm_dict_find_rstr(dict, titleKey);
+ m_title = QString::fromLatin1(QByteArray(strm_string_getx(value, nullptr)));
+
+ value = strm_dict_find_rstr(dict, seekableKey);
+ m_seekable = (strcmp(strm_string_getx(value, "1"), "0") != 0);
+
+ value = strm_dict_find_rstr(dict, artistKey);
+ m_artist = QString::fromLatin1(QByteArray(strm_string_getx(value, nullptr)));
+
+ value = strm_dict_find_rstr(dict, commentKey);
+ m_comment = QString::fromLatin1(QByteArray(strm_string_getx(value, nullptr)));
+
+ value = strm_dict_find_rstr(dict, genreKey);
+ m_genre = QString::fromLatin1(QByteArray(strm_string_getx(value, nullptr)));
+
+ value = strm_dict_find_rstr(dict, yearKey);
+ m_year = QByteArray(strm_string_getx(value, "0")).toInt();
+
+ value = strm_dict_find_rstr(dict, albumKey);
+ m_album = QString::fromLatin1(QByteArray(strm_string_getx(value, nullptr)));
+
+ value = strm_dict_find_rstr(dict, trackKey);
+ m_track = QByteArray(strm_string_getx(value, "0")).toInt();
+
+ strm_dict_t *at = mmr_metadata_split(dict, "audio", 0);
+ if (at) {
+ value = strm_dict_find_rstr(at, trackSampleKey);
+ m_sampleRate = QByteArray(strm_string_getx(value, "0")).toInt();
+
+ value = strm_dict_find_rstr(at, trackBitRateKey);
+ m_audioBitRate = QByteArray(strm_string_getx(value, "0")).toInt();
+
+ strm_dict_destroy(at);
+ } else {
+ value = strm_dict_find_rstr(dict, sampleKey);
+ m_sampleRate = QByteArray(strm_string_getx(value, "0")).toInt();
+
+ value = strm_dict_find_rstr(dict, bitRateKey);
+ m_audioBitRate = QByteArray(strm_string_getx(value, "0")).toInt();
+ }
+
+ strm_dict_t *vt = mmr_metadata_split(dict, "video", 0);
+ if (vt) {
+ value = strm_dict_find_rstr(vt, trackWidthKey);
+ m_width = QByteArray(strm_string_getx(value, "0")).toInt();
+
+ value = strm_dict_find_rstr(vt, trackHeightKey);
+ m_height = QByteArray(strm_string_getx(value, "0")).toInt();
+
+ value = strm_dict_find_rstr(vt, trackPixelWidthKey);
+ m_pixelWidth = QByteArray(strm_string_getx(value, "1")).toFloat();
+
+ value = strm_dict_find_rstr(vt, trackPixelHeightKey);
+ m_pixelHeight = QByteArray(strm_string_getx(value, "1")).toFloat();
+
+ strm_dict_destroy(vt);
+ } else {
+ value = strm_dict_find_rstr(dict, widthKey);
+ m_width = QByteArray(strm_string_getx(value, "0")).toInt();
+
+ value = strm_dict_find_rstr(dict, heightKey);
+ m_height = QByteArray(strm_string_getx(value, "0")).toInt();
+
+ value = strm_dict_find_rstr(dict, pixelWidthKey);
+ m_pixelWidth = QByteArray(strm_string_getx(value, "1")).toFloat();
+
+ value = strm_dict_find_rstr(dict, pixelHeightKey);
+ m_pixelHeight = QByteArray(strm_string_getx(value, "1")).toFloat();
+ }
+
+ return true;
+}
+
+void QQnxMediaMetaData::clear()
+{
+ strm_dict_t *dict;
+ dict = strm_dict_new();
+ update(dict);
+ strm_dict_destroy(dict);
+}
+
+qlonglong QQnxMediaMetaData::duration() const
+{
+ return m_duration;
+}
+
+// Handling of pixel aspect ratio
+//
+// If the pixel aspect ratio is different from 1:1, it means the video needs to be stretched in
+// order to look natural.
+// For example, if the pixel width is 2, and the pixel height is 1, it means a video of 300x200
+// pixels needs to be displayed as 600x200 to look correct.
+// In order to support this the easiest way, we simply pretend that the actual size of the video
+// is 600x200, which will cause the video to be displayed in an aspect ratio of 3:1 instead of 3:2,
+// and therefore look correct.
+
+int QQnxMediaMetaData::height() const
+{
+ return m_height * m_pixelHeight;
+}
+
+int QQnxMediaMetaData::width() const
+{
+ return m_width * m_pixelWidth;
+}
+
+bool QQnxMediaMetaData::hasVideo() const
+{
+ // By default, assume no video if we can't extract the information
+ if (m_mediaType == -1)
+ return false;
+
+ return (m_mediaType & mediaTypeVideoFlag);
+}
+
+bool QQnxMediaMetaData::hasAudio() const
+{
+ // By default, assume audio only if we can't extract the information
+ if (m_mediaType == -1)
+ return true;
+
+ return (m_mediaType & mediaTypeAudioFlag);
+}
+
+QString QQnxMediaMetaData::title() const
+{
+ return m_title;
+}
+
+bool QQnxMediaMetaData::isSeekable() const
+{
+ return m_seekable;
+}
+
+QString QQnxMediaMetaData::artist() const
+{
+ return m_artist;
+}
+
+QString QQnxMediaMetaData::comment() const
+{
+ return m_comment;
+}
+
+QString QQnxMediaMetaData::genre() const
+{
+ return m_genre;
+}
+
+int QQnxMediaMetaData::year() const
+{
+ return m_year;
+}
+
+QString QQnxMediaMetaData::mediaType() const
+{
+ if (hasVideo())
+ return QLatin1String("video");
+ else if (hasAudio())
+ return QLatin1String("audio");
+ else
+ return QString();
+}
+
+int QQnxMediaMetaData::audioBitRate() const
+{
+ return m_audioBitRate;
+}
+
+int QQnxMediaMetaData::sampleRate() const
+{
+ return m_sampleRate;
+}
+
+QString QQnxMediaMetaData::album() const
+{
+ return m_album;
+}
+
+int QQnxMediaMetaData::track() const
+{
+ return m_track;
+}
+
+QSize QQnxMediaMetaData::resolution() const
+{
+ return QSize(width(), height());
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/qnx/mediaplayer/qqnxmediametadata_p.h b/src/plugins/multimedia/qnx/mediaplayer/qqnxmediametadata_p.h
new file mode 100644
index 000000000..db7639dc5
--- /dev/null
+++ b/src/plugins/multimedia/qnx/mediaplayer/qqnxmediametadata_p.h
@@ -0,0 +1,74 @@
+// Copyright (C) 2016 Research In Motion
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QQnxMediaMetaData_H
+#define QQnxMediaMetaData_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qglobal.h>
+#include <QtCore/QSize>
+#include <QtCore/QString>
+
+typedef struct strm_dict strm_dict_t;
+
+QT_BEGIN_NAMESPACE
+
+class QQnxMediaMetaData
+{
+public:
+ QQnxMediaMetaData();
+ bool update(const strm_dict_t *dict);
+ void clear();
+
+ // Duration in milliseconds
+ qlonglong duration() const;
+
+ int height() const;
+ int width() const;
+ bool hasVideo() const;
+ bool hasAudio() const;
+ bool isSeekable() const;
+
+ QString title() const;
+ QString artist() const;
+ QString comment() const;
+ QString genre() const;
+ int year() const;
+ QString mediaType() const;
+ int audioBitRate() const;
+ int sampleRate() const;
+ QString album() const;
+ int track() const;
+ QSize resolution() const;
+
+private:
+ qlonglong m_duration;
+ int m_height;
+ int m_width;
+ int m_mediaType;
+ float m_pixelWidth;
+ float m_pixelHeight;
+ bool m_seekable;
+ QString m_title;
+ QString m_artist;
+ QString m_comment;
+ QString m_genre;
+ int m_year;
+ int m_audioBitRate;
+ int m_sampleRate;
+ QString m_album;
+ int m_track;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer.cpp b/src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer.cpp
new file mode 100644
index 000000000..14b190836
--- /dev/null
+++ b/src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer.cpp
@@ -0,0 +1,887 @@
+// Copyright (C) 2016 Research In Motion
+// Copyright (C) 2021 The Qt Company
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qqnxmediaplayer_p.h"
+#include "qqnxvideosink_p.h"
+#include "qqnxmediautil_p.h"
+#include "qqnxmediaeventthread_p.h"
+#include "qqnxwindowgrabber_p.h"
+
+#include <private/qhwvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
+
+#include <QtCore/qabstracteventdispatcher.h>
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qdir.h>
+#include <QtCore/qfileinfo.h>
+#include <QtCore/quuid.h>
+#include <mm/renderer.h>
+#include <qmediaplayer.h>
+#include <qqnxaudiooutput_p.h>
+#include <qaudiooutput.h>
+
+#include <errno.h>
+#include <sys/strm.h>
+#include <sys/stat.h>
+
+#include <algorithm>
+#include <tuple>
+
+static constexpr int rateToSpeed(qreal rate)
+{
+ return std::floor(rate * 1000);
+}
+
+static constexpr qreal speedToRate(int speed)
+{
+ return std::floor(speed / 1000.0);
+}
+
+static constexpr int normalizeVolume(float volume)
+{
+ return std::clamp<int>(std::floor(volume * 100.0), 0, 100);
+}
+
+static std::tuple<int, int, bool> parseBufferLevel(const QString &value)
+{
+ if (value.isEmpty())
+ return {};
+
+ const int slashPos = value.indexOf('/');
+ if (slashPos <= 0)
+ return {};
+
+ bool ok = false;
+ const int level = value.left(slashPos).toInt(&ok);
+ if (!ok || level < 0)
+ return {};
+
+ const int capacity = value.mid(slashPos + 1).toInt(&ok);
+ if (!ok || capacity < 0)
+ return {};
+
+ return { level, capacity, true };
+}
+
+class QnxTextureBuffer : public QHwVideoBuffer
+{
+public:
+ QnxTextureBuffer(QQnxWindowGrabber *QQnxWindowGrabber)
+ : QHwVideoBuffer(QVideoFrame::RhiTextureHandle)
+ {
+ m_windowGrabber = QQnxWindowGrabber;
+ m_handle = 0;
+ }
+
+ void unmap() override {}
+
+ MapData map(QtVideo::MapMode /*mode*/) override
+ {
+ return {};
+ }
+
+ quint64 textureHandle(QRhi *, int plane) const override
+ {
+ if (plane != 0)
+ return 0;
+ if (!m_handle) {
+ const_cast<QnxTextureBuffer*>(this)->m_handle = m_windowGrabber->getNextTextureId();
+ }
+ return m_handle;
+ }
+
+private:
+ QQnxWindowGrabber *m_windowGrabber;
+ quint64 m_handle;
+};
+
+class QnxRasterBuffer : public QAbstractVideoBuffer
+{
+public:
+ QnxRasterBuffer(QQnxWindowGrabber *windowGrabber) { m_windowGrabber = windowGrabber; }
+
+ MapData map(QtVideo::MapMode mode) override
+ {
+ if (mode != QtVideo::MapMode::ReadOnly)
+ return {};
+
+ if (buffer.data) {
+ qWarning("QnxRasterBuffer: need to unmap before mapping");
+ return {};
+ }
+
+ buffer = m_windowGrabber->getNextBuffer();
+
+ return {
+ .planeCount = 1,
+ .bytesPerLine = { buffer.stride },
+ .data = { buffer.data },
+ .dataSize = { buffer.width * buffer.height * buffer.pixelSize }
+ };
+ }
+
+ void unmap() override
+ {
+ buffer = {};
+ }
+
+ QVideoFrameFormat format() const override { return {}; }
+
+private:
+ QQnxWindowGrabber *m_windowGrabber;
+ QQnxWindowGrabber::BufferView buffer;
+};
+
+QT_BEGIN_NAMESPACE
+
+QQnxMediaPlayer::QQnxMediaPlayer(QMediaPlayer *parent)
+ : QObject(parent)
+ , QPlatformMediaPlayer(parent)
+ , m_windowGrabber(new QQnxWindowGrabber(this))
+{
+ m_flushPositionTimer.setSingleShot(true);
+ m_flushPositionTimer.setInterval(100);
+
+ connect(&m_flushPositionTimer, &QTimer::timeout, this, &QQnxMediaPlayer::flushPosition);
+
+ connect(m_windowGrabber, &QQnxWindowGrabber::updateScene, this, &QQnxMediaPlayer::updateScene);
+
+ openConnection();
+}
+
+QQnxMediaPlayer::~QQnxMediaPlayer()
+{
+ stop();
+ detach();
+ closeConnection();
+}
+
+void QQnxMediaPlayer::openConnection()
+{
+ static int idCounter = 0;
+
+ m_connection = mmr_connect(nullptr);
+ if (!m_connection) {
+ emitPError(QString::fromLatin1("Unable to connect to the multimedia renderer"));
+ return;
+ }
+
+ m_id = idCounter++;
+ m_contextName = QString::fromLatin1("QQnxMediaPlayer_%1_%2").arg(m_id)
+ .arg(QCoreApplication::applicationPid());
+ m_context = mmr_context_create(m_connection, m_contextName.toLatin1(),
+ 0, S_IRWXU|S_IRWXG|S_IRWXO);
+ if (!m_context) {
+ emitPError(QString::fromLatin1("Unable to create context"));
+ closeConnection();
+ return;
+ }
+
+ startMonitoring();
+}
+
+void QQnxMediaPlayer::handleMmEventState(const mmr_event_t *event)
+{
+ if (!event || event->type != MMR_EVENT_STATE)
+ return;
+
+ switch (event->state) {
+ case MMR_STATE_DESTROYED:
+ break;
+ case MMR_STATE_IDLE:
+ mediaStatusChanged(QMediaPlayer::NoMedia);
+ stateChanged(QMediaPlayer::StoppedState);
+ detachVideoOutput();
+ detachInput();
+ break;
+ case MMR_STATE_STOPPED:
+ stateChanged(QMediaPlayer::StoppedState);
+ m_windowGrabber->stop();
+
+ if (m_platformVideoSink)
+ m_platformVideoSink->setVideoFrame({});
+ break;
+ case MMR_STATE_PLAYING:
+ if (event->speed == 0) {
+ stateChanged(QMediaPlayer::PausedState);
+ m_windowGrabber->pause();
+ } else if (state() == QMediaPlayer::PausedState) {
+ m_windowGrabber->resume();
+ stateChanged(QMediaPlayer::PlayingState);
+ } else {
+ m_windowGrabber->start();
+ stateChanged(QMediaPlayer::PlayingState);
+ }
+
+ if (event->speed != m_speed) {
+ m_speed = event->speed;
+
+ if (state() != QMediaPlayer::PausedState)
+ m_configuredSpeed = m_speed;
+
+ playbackRateChanged(::speedToRate(m_speed));
+ }
+ break;
+ }
+}
+
+void QQnxMediaPlayer::handleMmEventStatus(const mmr_event_t *event)
+{
+ if (!event || event->type != MMR_EVENT_STATUS)
+ return;
+
+ if (event->data)
+ handleMmEventStatusData(event->data);
+
+ // update pos
+ if (!event->pos_str || isPendingPositionFlush())
+ return;
+
+ const QByteArray valueBa(event->pos_str);
+
+ bool ok;
+ const qint64 position = valueBa.toLongLong(&ok);
+
+ if (!ok)
+ qCritical("Could not parse position from '%s'", valueBa.constData());
+ else
+ handleMmPositionChanged(position);
+}
+
+void QQnxMediaPlayer::handleMmEventStatusData(const strm_dict_t *data)
+{
+ if (!data)
+ return;
+
+ const auto getValue = [data](const char *key) -> QString {
+ const strm_string_t *value = strm_dict_find_rstr(data, key);
+
+ if (!value)
+ return {};
+
+ return QString::fromUtf8(strm_string_get(value));
+ };
+
+ // update bufferProgress
+ const QString bufferLevel = getValue("bufferlevel");
+
+ if (!bufferLevel.isEmpty()) {
+ const auto & [level, capacity, ok] = ::parseBufferLevel(bufferLevel);
+
+ if (ok)
+ updateBufferLevel(level, capacity);
+ else
+ qCritical("Could not parse buffer capacity from '%s'", qUtf8Printable(bufferLevel));
+ }
+
+ // update MediaStatus
+ const QString bufferStatus = getValue("bufferstatus");
+ const QString suspended = getValue("suspended");
+
+ if (suspended == QStringLiteral("yes"))
+ mediaStatusChanged(QMediaPlayer::StalledMedia);
+ else if (bufferStatus == QStringLiteral("buffering"))
+ mediaStatusChanged(QMediaPlayer::BufferingMedia);
+ else if (bufferStatus == QStringLiteral("playing"))
+ mediaStatusChanged(QMediaPlayer::BufferedMedia);
+}
+
+void QQnxMediaPlayer::handleMmEventError(const mmr_event_t *event)
+{
+ if (!event)
+ return;
+
+ // When playback is explicitly stopped using mmr_stop(), mm-renderer
+ // generates a STATE event. When the end of media is reached, an ERROR
+ // event is generated and the error code contained in the event information
+ // is set to MMR_ERROR_NONE. When an error causes playback to stop,
+ // the error code is set to something else.
+ if (event->details.error.info.error_code == MMR_ERROR_NONE) {
+ mediaStatusChanged(QMediaPlayer::EndOfMedia);
+ stateChanged(QMediaPlayer::StoppedState);
+ }
+}
+
+void QQnxMediaPlayer::closeConnection()
+{
+ stopMonitoring();
+
+ if (m_context) {
+ mmr_context_destroy(m_context);
+ m_context = nullptr;
+ m_contextName.clear();
+ }
+
+ if (m_connection) {
+ mmr_disconnect(m_connection);
+ m_connection = nullptr;
+ }
+}
+
+QByteArray QQnxMediaPlayer::resourcePathForUrl(const QUrl &url)
+{
+ // If this is a local file, mmrenderer expects the file:// prefix and an absolute path.
+ // We treat URLs without scheme as local files, most likely someone just forgot to set the
+ // file:// prefix when constructing the URL.
+ if (url.isLocalFile() || url.scheme().isEmpty()) {
+ const QString relativeFilePath = url.scheme().isEmpty() ? url.path() : url.toLocalFile();
+ const QFileInfo fileInfo(relativeFilePath);
+ return QFile::encodeName(QStringLiteral("file://") + fileInfo.absoluteFilePath());
+
+ // HTTP or similar URL
+ } else {
+ return url.toEncoded();
+ }
+}
+
+void QQnxMediaPlayer::attach()
+{
+ // Should only be called in detached state
+ if (isInputAttached())
+ return;
+
+ if (!m_media.isValid() || !m_context) {
+ mediaStatusChanged(QMediaPlayer::NoMedia);
+ return;
+ }
+
+ resetMonitoring();
+
+ if (!(attachVideoOutput() && attachAudioOutput() && attachInput())) {
+ detach();
+ return;
+ }
+
+ mediaStatusChanged(QMediaPlayer::LoadedMedia);
+}
+
+bool QQnxMediaPlayer::attachVideoOutput()
+{
+ if (isVideoOutputAttached()) {
+ qWarning() << "QQnxMediaPlayer: Video output already attached!";
+ return true;
+ }
+
+ if (!m_context) {
+ qWarning() << "QQnxMediaPlayer: No media player context!";
+ return false;
+ }
+
+ const QByteArray windowGroupId = m_windowGrabber->windowGroupId();
+ if (windowGroupId.isEmpty()) {
+ qWarning() << "QQnxMediaPlayer: Unable to find window group";
+ return false;
+ }
+
+ static int winIdCounter = 0;
+
+ const QString windowName = QStringLiteral("QQnxVideoSink_%1_%2")
+ .arg(winIdCounter++)
+ .arg(QCoreApplication::applicationPid());
+
+ m_windowGrabber->setWindowId(windowName.toLatin1());
+
+ if (m_platformVideoSink)
+ m_windowGrabber->setRhi(m_platformVideoSink->rhi());
+
+ // Start with an invisible window, because we just want to grab the frames from it.
+ const QString videoDeviceUrl = QStringLiteral("screen:?winid=%1&wingrp=%2&initflags=invisible&nodstviewport=1")
+ .arg(windowName, QString::fromLatin1(windowGroupId));
+
+ m_videoId = mmr_output_attach(m_context, videoDeviceUrl.toLatin1(), "video");
+
+ if (m_videoId == -1) {
+ qWarning() << "mmr_output_attach() for video failed";
+ return false;
+ }
+
+ return true;
+}
+
+bool QQnxMediaPlayer::attachAudioOutput()
+{
+ if (isAudioOutputAttached()) {
+ qWarning() << "QQnxMediaPlayer: Audio output already attached!";
+ return true;
+ }
+
+ const QByteArray defaultAudioDevice = qgetenv("QQNX_RENDERER_DEFAULT_AUDIO_SINK");
+
+ m_audioId = mmr_output_attach(m_context,
+ defaultAudioDevice.isEmpty() ? "snd:" : defaultAudioDevice.constData(), "audio");
+
+ if (m_audioId == -1) {
+ emitMmError("mmr_output_attach() for audio failed");
+
+ return false;
+ }
+
+ return true;
+}
+
+bool QQnxMediaPlayer::attachInput()
+{
+ if (isInputAttached())
+ return true;
+
+ const QByteArray resourcePath = resourcePathForUrl(m_media);
+
+ if (resourcePath.isEmpty())
+ return false;
+
+ if (mmr_input_attach(m_context, resourcePath.constData(), "track") != 0) {
+ emitMmError(QStringLiteral("mmr_input_attach() failed for ")
+ + QString::fromUtf8(resourcePath));
+
+ mediaStatusChanged(QMediaPlayer::InvalidMedia);
+
+ return false;
+ }
+
+ m_inputAttached = true;
+
+ return true;
+}
+
+void QQnxMediaPlayer::detach()
+{
+ if (!m_context)
+ return;
+
+ if (isVideoOutputAttached())
+ detachVideoOutput();
+
+ if (isAudioOutputAttached())
+ detachAudioOutput();
+
+ if (isInputAttached())
+ detachInput();
+
+ resetMonitoring();
+}
+
+void QQnxMediaPlayer::detachVideoOutput()
+{
+ m_windowGrabber->stop();
+
+ if (m_platformVideoSink)
+ m_platformVideoSink->setVideoFrame({});
+
+ if (isVideoOutputAttached())
+ mmr_output_detach(m_context, m_videoId);
+
+ m_videoId = -1;
+}
+
+void QQnxMediaPlayer::detachAudioOutput()
+{
+ if (isAudioOutputAttached())
+ mmr_output_detach(m_context, m_audioId);
+
+ m_audioId = -1;
+}
+
+void QQnxMediaPlayer::detachInput()
+{
+ if (isInputAttached())
+ mmr_input_detach(m_context);
+
+ m_inputAttached = false;
+}
+
+bool QQnxMediaPlayer::isVideoOutputAttached() const
+{
+ return m_videoId != -1;
+}
+
+bool QQnxMediaPlayer::isAudioOutputAttached() const
+{
+ return m_audioId != -1;
+}
+
+bool QQnxMediaPlayer::isInputAttached() const
+{
+ return m_inputAttached;
+}
+
+void QQnxMediaPlayer::updateScene(const QSize &size)
+{
+ if (!m_platformVideoSink)
+ return;
+
+ QVideoFrameFormat format(size, QVideoFrameFormat::Format_BGRX8888);
+
+ const QVideoFrame actualFrame = m_windowGrabber->isEglImageSupported()
+ ? QVideoFramePrivate::createFrame(std::make_unique<QnxTextureBuffer>(m_windowGrabber),
+ std::move(format))
+ : QVideoFramePrivate::createFrame(std::make_unique<QnxRasterBuffer>(m_windowGrabber),
+ std::move(format));
+
+ m_platformVideoSink->setVideoFrame(actualFrame);
+}
+
+qint64 QQnxMediaPlayer::duration() const
+{
+ return m_metaData.duration();
+}
+
+qint64 QQnxMediaPlayer::position() const
+{
+ return m_position;
+}
+
+void QQnxMediaPlayer::setPosition(qint64 position)
+{
+ if (m_position == position)
+ return;
+
+ m_pendingPosition = position;
+ m_flushPositionTimer.start();
+}
+
+void QQnxMediaPlayer::setPositionInternal(qint64 position)
+{
+ if (!m_context || !m_metaData.isSeekable() || mediaStatus() == QMediaPlayer::NoMedia)
+ return;
+
+ if (mmr_seek(m_context, QString::number(position).toLatin1()) != 0)
+ emitMmError("Seeking failed");
+}
+
+void QQnxMediaPlayer::flushPosition()
+{
+ setPositionInternal(m_pendingPosition);
+}
+
+bool QQnxMediaPlayer::isPendingPositionFlush() const
+{
+ return m_flushPositionTimer.isActive();
+}
+
+void QQnxMediaPlayer::setDeferredSpeedEnabled(bool enabled)
+{
+ m_deferredSpeedEnabled = enabled;
+}
+
+bool QQnxMediaPlayer::isDeferredSpeedEnabled() const
+{
+ return m_deferredSpeedEnabled;
+}
+
+void QQnxMediaPlayer::setVolume(float volume)
+{
+ const int normalizedVolume = ::normalizeVolume(volume);
+
+ if (m_volume == normalizedVolume)
+ return;
+
+ m_volume = normalizedVolume;
+
+ if (!m_muted)
+ updateVolume();
+}
+
+void QQnxMediaPlayer::setMuted(bool muted)
+{
+ if (m_muted == muted)
+ return;
+
+ m_muted = muted;
+
+ updateVolume();
+}
+
+void QQnxMediaPlayer::updateVolume()
+{
+ if (!m_context || m_audioId == -1)
+ return;
+
+ const int volume = m_muted ? 0 : m_volume;
+
+ char buf[] = "100";
+ std::snprintf(buf, sizeof buf, "%d", volume);
+
+ strm_dict_t * dict = strm_dict_new();
+ dict = strm_dict_set(dict, "volume", buf);
+
+ if (mmr_output_parameters(m_context, m_audioId, dict) != 0)
+ emitMmError("mmr_output_parameters: Setting volume failed");
+}
+
+void QQnxMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
+{
+ QAudioOutput *out = output ? output->q : nullptr;
+ if (m_audioOutput == out)
+ return;
+
+ if (m_audioOutput)
+ disconnect(m_audioOutput.get());
+ m_audioOutput = out;
+ if (m_audioOutput) {
+ connect(out, &QAudioOutput::volumeChanged, this, &QQnxMediaPlayer::setVolume);
+ connect(out, &QAudioOutput::mutedChanged, this, &QQnxMediaPlayer::setMuted);
+ }
+ setVolume(out ? out->volume() : 1.);
+ setMuted(out ? out->isMuted() : true);
+}
+
+float QQnxMediaPlayer::bufferProgress() const
+{
+ // mm-renderer has buffer properties "status" and "level"
+ // QMediaPlayer's buffer status maps to mm-renderer's buffer level
+ return m_bufferLevel/100.0f;
+}
+
+bool QQnxMediaPlayer::isAudioAvailable() const
+{
+ return m_metaData.hasAudio();
+}
+
+bool QQnxMediaPlayer::isVideoAvailable() const
+{
+ return m_metaData.hasVideo();
+}
+
+bool QQnxMediaPlayer::isSeekable() const
+{
+ return m_metaData.isSeekable();
+}
+
+QMediaTimeRange QQnxMediaPlayer::availablePlaybackRanges() const
+{
+ // We can't get this information from the mmrenderer API yet, so pretend we can seek everywhere
+ return QMediaTimeRange(0, m_metaData.duration());
+}
+
+qreal QQnxMediaPlayer::playbackRate() const
+{
+ return ::speedToRate(m_speed);
+}
+
+void QQnxMediaPlayer::setPlaybackRate(qreal rate)
+{
+ if (!m_context)
+ return;
+
+ const int speed = ::rateToSpeed(rate);
+
+ if (m_speed == speed)
+ return;
+
+ // defer setting the playback speed for when play() is called to prevent
+ // mm-renderer from inadvertently transitioning into play state
+ if (isDeferredSpeedEnabled() && state() != QMediaPlayer::PlayingState) {
+ m_deferredSpeed = speed;
+ return;
+ }
+
+ if (mmr_speed_set(m_context, speed) != 0)
+ emitMmError("mmr_speed_set failed");
+}
+
+QUrl QQnxMediaPlayer::media() const
+{
+ return m_media;
+}
+
+const QIODevice *QQnxMediaPlayer::mediaStream() const
+{
+ // Always 0, we don't support QIODevice streams
+ return 0;
+}
+
+void QQnxMediaPlayer::setMedia(const QUrl &media, QIODevice *stream)
+{
+ Q_UNUSED(stream); // not supported
+
+ stop();
+ detach();
+
+ stateChanged(QMediaPlayer::StoppedState);
+ mediaStatusChanged(QMediaPlayer::LoadingMedia);
+
+ m_media = media;
+
+ updateMetaData(nullptr);
+ attach();
+}
+
+void QQnxMediaPlayer::play()
+{
+ if (!m_media.isValid() || !m_connection || !m_context || m_audioId == -1) {
+ stateChanged(QMediaPlayer::StoppedState);
+ return;
+ }
+
+ if (state() == QMediaPlayer::PlayingState)
+ return;
+
+ setDeferredSpeedEnabled(false);
+
+ if (m_deferredSpeed) {
+ setPlaybackRate(::speedToRate(*m_deferredSpeed));
+ m_deferredSpeed = {};
+ } else {
+ setPlaybackRate(::speedToRate(m_configuredSpeed));
+ }
+
+ setDeferredSpeedEnabled(true);
+
+ // Un-pause the state when it is paused
+ if (state() == QMediaPlayer::PausedState) {
+ return;
+ }
+
+
+ if (mediaStatus() == QMediaPlayer::EndOfMedia)
+ setPositionInternal(0);
+
+ resetMonitoring();
+ updateVolume();
+
+ if (mmr_play(m_context) != 0) {
+ stateChanged(QMediaPlayer::StoppedState);
+ emitMmError("mmr_play() failed");
+ return;
+ }
+
+ stateChanged(QMediaPlayer::PlayingState);
+}
+
+void QQnxMediaPlayer::pause()
+{
+ if (state() != QMediaPlayer::PlayingState)
+ return;
+
+ setPlaybackRate(0);
+}
+
+void QQnxMediaPlayer::stop()
+{
+ if (!m_context
+ || state() == QMediaPlayer::StoppedState
+ || mediaStatus() == QMediaPlayer::NoMedia)
+ return;
+
+ // mm-renderer does not rewind by default
+ setPositionInternal(0);
+
+ mmr_stop(m_context);
+}
+
+void QQnxMediaPlayer::setVideoSink(QVideoSink *videoSink)
+{
+ m_platformVideoSink = videoSink
+ ? static_cast<QQnxVideoSink *>(videoSink->platformVideoSink())
+ : nullptr;
+}
+
+void QQnxMediaPlayer::startMonitoring()
+{
+ m_eventThread = new QQnxMediaEventThread(m_context);
+
+ connect(m_eventThread, &QQnxMediaEventThread::eventPending,
+ this, &QQnxMediaPlayer::readEvents);
+
+ m_eventThread->setObjectName(QStringLiteral("MmrEventThread-") + QString::number(m_id));
+ m_eventThread->start();
+}
+
+void QQnxMediaPlayer::stopMonitoring()
+{
+ delete m_eventThread;
+ m_eventThread = nullptr;
+}
+
+void QQnxMediaPlayer::resetMonitoring()
+{
+ m_bufferLevel = 0;
+ m_position = 0;
+ m_speed = 0;
+}
+
+void QQnxMediaPlayer::handleMmPositionChanged(qint64 newPosition)
+{
+ m_position = newPosition;
+
+ if (state() == QMediaPlayer::PausedState)
+ m_windowGrabber->forceUpdate();
+
+ positionChanged(m_position);
+}
+
+void QQnxMediaPlayer::updateBufferLevel(int level, int capacity)
+{
+ m_bufferLevel = capacity == 0 ? 0 : level / static_cast<float>(capacity) * 100.0f;
+ m_bufferLevel = qBound(0, m_bufferLevel, 100);
+ bufferProgressChanged(m_bufferLevel/100.0f);
+}
+
+void QQnxMediaPlayer::updateMetaData(const strm_dict *dict)
+{
+ m_metaData.update(dict);
+
+ durationChanged(m_metaData.duration());
+ audioAvailableChanged(m_metaData.hasAudio());
+ videoAvailableChanged(m_metaData.hasVideo());
+ seekableChanged(m_metaData.isSeekable());
+}
+
+void QQnxMediaPlayer::emitMmError(const char *msg)
+{
+ emitMmError(QString::fromUtf8(msg));
+}
+
+void QQnxMediaPlayer::emitMmError(const QString &msg)
+{
+ int errorCode = MMR_ERROR_NONE;
+ const QString errorMessage = mmErrorMessage(msg, m_context, &errorCode);
+ emit error(errorCode, errorMessage);
+}
+
+void QQnxMediaPlayer::emitPError(const QString &msg)
+{
+ const QString errorMessage = QString::fromLatin1("%1: %2").arg(msg).arg(QString::fromUtf8(strerror(errno)));
+ emit error(errno, errorMessage);
+}
+
+
+void QQnxMediaPlayer::readEvents()
+{
+ while (const mmr_event_t *event = mmr_event_get(m_context)) {
+ if (event->type == MMR_EVENT_NONE)
+ break;
+
+ switch (event->type) {
+ case MMR_EVENT_STATUS:
+ handleMmEventStatus(event);
+ break;
+ case MMR_EVENT_STATE:
+ handleMmEventState(event);
+ break;
+ case MMR_EVENT_METADATA:
+ updateMetaData(event->data);
+ break;
+ case MMR_EVENT_ERROR:
+ handleMmEventError(event);
+ break;
+ case MMR_EVENT_NONE:
+ case MMR_EVENT_OVERFLOW:
+ case MMR_EVENT_WARNING:
+ case MMR_EVENT_PLAYLIST:
+ case MMR_EVENT_INPUT:
+ case MMR_EVENT_OUTPUT:
+ case MMR_EVENT_CTXTPAR:
+ case MMR_EVENT_TRKPAR:
+ case MMR_EVENT_OTHER:
+ break;
+ }
+ }
+
+ if (m_eventThread)
+ m_eventThread->signalRead();
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qqnxmediaplayer_p.cpp"
diff --git a/src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer_p.h b/src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer_p.h
new file mode 100644
index 000000000..c570a6334
--- /dev/null
+++ b/src/plugins/multimedia/qnx/mediaplayer/qqnxmediaplayer_p.h
@@ -0,0 +1,167 @@
+// Copyright (C) 2016 Research In Motion
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QQnxMediaPlayer_H
+#define QQnxMediaPlayer_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qqnxmediametadata_p.h"
+#include "mmrenderertypes.h"
+
+#include <private/qplatformmediaplayer_p.h>
+#include <QtCore/qabstractnativeeventfilter.h>
+#include <QtCore/qpointer.h>
+#include <QtCore/qtimer.h>
+
+#include <mm/renderer.h>
+#include <mm/renderer/types.h>
+
+#include <optional>
+
+QT_BEGIN_NAMESPACE
+
+class QQnxVideoSink;
+class QQnxMediaEventThread;
+class QQnxWindowGrabber;
+
+class QQnxMediaPlayer : public QObject
+ , public QPlatformMediaPlayer
+{
+ Q_OBJECT
+public:
+ explicit QQnxMediaPlayer(QMediaPlayer *parent = nullptr);
+ ~QQnxMediaPlayer();
+
+ qint64 duration() const override;
+
+ qint64 position() const override;
+ void setPosition(qint64 position) override;
+
+ void setAudioOutput(QPlatformAudioOutput *) override;
+
+ float bufferProgress() const override;
+
+ bool isAudioAvailable() const override;
+ bool isVideoAvailable() const override;
+
+ bool isSeekable() const override;
+
+ QMediaTimeRange availablePlaybackRanges() const override;
+
+ qreal playbackRate() const override;
+ void setPlaybackRate(qreal rate) override;
+
+ QUrl media() const override;
+ const QIODevice *mediaStream() const override;
+ void setMedia(const QUrl &media, QIODevice *stream) override;
+
+ void play() override;
+ void pause() override;
+ void stop() override;
+
+ void setVideoSink(QVideoSink *videoSink);
+
+private Q_SLOTS:
+ void setVolume(float volume);
+ void setMuted(bool muted);
+ void readEvents();
+
+private:
+ void startMonitoring();
+ void stopMonitoring();
+ void resetMonitoring();
+
+ void openConnection();
+ void emitMmError(const char *msg);
+ void emitMmError(const QString &msg);
+ void emitPError(const QString &msg);
+
+ void handleMmPositionChanged(qint64 newPosition);
+ void updateBufferLevel(int level, int capacity);
+ void updateMetaData(const strm_dict_t *dict);
+
+ void handleMmEventState(const mmr_event_t *event);
+ void handleMmEventStatus(const mmr_event_t *event);
+ void handleMmEventStatusData(const strm_dict_t *data);
+ void handleMmEventError(const mmr_event_t *event);
+
+ QByteArray resourcePathForUrl(const QUrl &url);
+
+ void closeConnection();
+ void attach();
+
+ bool attachVideoOutput();
+ bool attachAudioOutput();
+ bool attachInput();
+
+ void detach();
+ void detachVideoOutput();
+ void detachAudioOutput();
+ void detachInput();
+
+ bool isVideoOutputAttached() const;
+ bool isAudioOutputAttached() const;
+ bool isInputAttached() const;
+
+ void updateScene(const QSize &size);
+
+ void updateVolume();
+
+ void setPositionInternal(qint64 position);
+ void flushPosition();
+
+ bool isPendingPositionFlush() const;
+
+ void setDeferredSpeedEnabled(bool enabled);
+ bool isDeferredSpeedEnabled() const;
+
+ mmr_context_t *m_context = nullptr;
+ mmr_connection_t *m_connection = nullptr;
+
+ QString m_contextName;
+
+ int m_id = -1;
+ int m_audioId = -1;
+ int m_volume = 50; // range is 0-100
+
+ QUrl m_media;
+ QPointer<QAudioOutput> m_audioOutput;
+ QPointer<QQnxVideoSink> m_platformVideoSink;
+
+ QQnxMediaMetaData m_metaData;
+
+ qint64 m_position = 0;
+ qint64 m_pendingPosition = 0;
+
+ int m_bufferLevel = 0;
+
+ int m_videoId = -1;
+
+ QTimer m_flushPositionTimer;
+
+ QQnxMediaEventThread *m_eventThread = nullptr;
+
+ int m_speed = 1000;
+ int m_configuredSpeed = 1000;
+
+ std::optional<int> m_deferredSpeed;
+
+ QQnxWindowGrabber* m_windowGrabber = nullptr;
+
+ bool m_inputAttached = false;
+ bool m_muted = false;
+ bool m_deferredSpeedEnabled = false;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/qnx/mediaplayer/qqnxmediautil.cpp b/src/plugins/multimedia/qnx/mediaplayer/qqnxmediautil.cpp
new file mode 100644
index 000000000..074989642
--- /dev/null
+++ b/src/plugins/multimedia/qnx/mediaplayer/qqnxmediautil.cpp
@@ -0,0 +1,126 @@
+// Copyright (C) 2016 Research In Motion
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qqnxmediautil_p.h"
+
+#include <QDebug>
+#include <QDir>
+#include <QFile>
+#include <QJsonDocument>
+#include <QJsonObject>
+#include <QJsonValue>
+#include <QMutex>
+#include <QMutex>
+#include <QString>
+#include <QXmlStreamReader>
+
+#include <mm/renderer.h>
+#include <mm/renderer/types.h>
+
+QT_BEGIN_NAMESPACE
+
+struct MmError {
+ int errorCode;
+ const char *name;
+};
+
+#define MM_ERROR_ENTRY(error) { error, #error }
+static const MmError mmErrors[] = {
+ MM_ERROR_ENTRY(MMR_ERROR_NONE),
+ MM_ERROR_ENTRY(MMR_ERROR_UNKNOWN ),
+ MM_ERROR_ENTRY(MMR_ERROR_INVALID_PARAMETER ),
+ MM_ERROR_ENTRY(MMR_ERROR_INVALID_STATE),
+ MM_ERROR_ENTRY(MMR_ERROR_UNSUPPORTED_VALUE),
+ MM_ERROR_ENTRY(MMR_ERROR_UNSUPPORTED_MEDIA_TYPE),
+ MM_ERROR_ENTRY(MMR_ERROR_MEDIA_PROTECTED),
+ MM_ERROR_ENTRY(MMR_ERROR_UNSUPPORTED_OPERATION),
+ MM_ERROR_ENTRY(MMR_ERROR_READ),
+ MM_ERROR_ENTRY(MMR_ERROR_WRITE),
+ MM_ERROR_ENTRY(MMR_ERROR_MEDIA_UNAVAILABLE),
+ MM_ERROR_ENTRY(MMR_ERROR_MEDIA_CORRUPTED),
+ MM_ERROR_ENTRY(MMR_ERROR_OUTPUT_UNAVAILABLE),
+ MM_ERROR_ENTRY(MMR_ERROR_NO_MEMORY),
+ MM_ERROR_ENTRY(MMR_ERROR_RESOURCE_UNAVAILABLE),
+ MM_ERROR_ENTRY(MMR_ERROR_MEDIA_DRM_NO_RIGHTS),
+ MM_ERROR_ENTRY(MMR_ERROR_DRM_CORRUPTED_DATA_STORE),
+ MM_ERROR_ENTRY(MMR_ERROR_DRM_OUTPUT_PROTECTION),
+ MM_ERROR_ENTRY(MMR_ERROR_DRM_OPL_HDMI),
+ MM_ERROR_ENTRY(MMR_ERROR_DRM_OPL_DISPLAYPORT),
+ MM_ERROR_ENTRY(MMR_ERROR_DRM_OPL_DVI),
+ MM_ERROR_ENTRY(MMR_ERROR_DRM_OPL_ANALOG_VIDEO),
+ MM_ERROR_ENTRY(MMR_ERROR_DRM_OPL_ANALOG_AUDIO),
+ MM_ERROR_ENTRY(MMR_ERROR_DRM_OPL_TOSLINK),
+ MM_ERROR_ENTRY(MMR_ERROR_DRM_OPL_SPDIF),
+ MM_ERROR_ENTRY(MMR_ERROR_DRM_OPL_BLUETOOTH),
+ MM_ERROR_ENTRY(MMR_ERROR_DRM_OPL_WIRELESSHD),
+};
+static const unsigned int numMmErrors = sizeof(mmErrors) / sizeof(MmError);
+
+template <typename T, size_t N>
+constexpr size_t countof(T (&)[N])
+{
+ return N;
+}
+
+QString keyValueMapsLocation()
+{
+ QByteArray qtKeyValueMaps = qgetenv("QT_KEY_VALUE_MAPS");
+ if (qtKeyValueMaps.isNull())
+ return QString::fromUtf8("/etc/qt/keyvaluemaps");
+ else
+ return QString::fromUtf8(qtKeyValueMaps);
+}
+
+QJsonObject loadMapObject(const QString &keyValueMapPath)
+{
+ QFile mapFile(keyValueMapsLocation() + keyValueMapPath);
+ if (mapFile.open(QIODevice::ReadOnly)) {
+ QByteArray mapFileContents = mapFile.readAll();
+ QJsonDocument mapDocument = QJsonDocument::fromJson(mapFileContents);
+ if (mapDocument.isObject()) {
+ QJsonObject mapObject = mapDocument.object();
+ return mapObject;
+ }
+ }
+ return QJsonObject();
+}
+
+QString mmErrorMessage(const QString &msg, mmr_context_t *context, int *errorCode)
+{
+ const mmr_error_info_t * const mmError = mmr_error_info(context);
+
+ if (errorCode)
+ *errorCode = mmError->error_code;
+
+ if (mmError->error_code < numMmErrors) {
+ return QString::fromLatin1("%1: %2 (code %3)").arg(msg).arg(QString::fromUtf8(mmErrors[mmError->error_code].name))
+ .arg(mmError->error_code);
+ } else {
+ return QString::fromLatin1("%1: Unknown error code %2").arg(msg).arg(mmError->error_code);
+ }
+}
+
+bool checkForDrmPermission()
+{
+ QDir sandboxDir = QDir::home(); // always returns 'data' directory
+ sandboxDir.cdUp(); // change to app sandbox directory
+
+ QFile file(sandboxDir.filePath(QString::fromUtf8("app/native/bar-descriptor.xml")));
+ if (!file.open(QIODevice::ReadOnly)) {
+ qWarning() << "checkForDrmPermission: Unable to open bar-descriptor.xml";
+ return false;
+ }
+
+ QXmlStreamReader reader(&file);
+ while (!reader.atEnd()) {
+ reader.readNextStartElement();
+ if (reader.name() == QLatin1String("action")
+ || reader.name() == QLatin1String("permission")) {
+ if (reader.readElementText().trimmed() == QLatin1String("access_protected_media"))
+ return true;
+ }
+ }
+
+ return false;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/qnx/mediaplayer/qqnxmediautil_p.h b/src/plugins/multimedia/qnx/mediaplayer/qqnxmediautil_p.h
new file mode 100644
index 000000000..7b709142f
--- /dev/null
+++ b/src/plugins/multimedia/qnx/mediaplayer/qqnxmediautil_p.h
@@ -0,0 +1,32 @@
+// Copyright (C) 2016 Research In Motion
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef MMRENDERERUTIL_H
+#define MMRENDERERUTIL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qglobal.h>
+#include <QtMultimedia/qaudio.h>
+
+typedef struct mmr_context mmr_context_t;
+
+QT_BEGIN_NAMESPACE
+
+class QString;
+
+QString mmErrorMessage(const QString &msg, mmr_context_t *context, int * errorCode = 0);
+
+bool checkForDrmPermission();
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/qnx/mediaplayer/qqnxvideosink.cpp b/src/plugins/multimedia/qnx/mediaplayer/qqnxvideosink.cpp
new file mode 100644
index 000000000..18d4d1828
--- /dev/null
+++ b/src/plugins/multimedia/qnx/mediaplayer/qqnxvideosink.cpp
@@ -0,0 +1,26 @@
+// Copyright (C) 2016 Research In Motion
+// Copyright (C) 2021 The Qt Company
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qqnxvideosink_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QQnxVideoSink::QQnxVideoSink(QVideoSink *parent)
+ : QPlatformVideoSink(parent)
+{
+}
+
+void QQnxVideoSink::setRhi(QRhi *rhi)
+{
+ m_rhi = rhi;
+}
+
+QRhi *QQnxVideoSink::rhi() const
+{
+ return m_rhi;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qqnxvideosink_p.cpp"
diff --git a/src/plugins/multimedia/qnx/mediaplayer/qqnxvideosink_p.h b/src/plugins/multimedia/qnx/mediaplayer/qqnxvideosink_p.h
new file mode 100644
index 000000000..2cc7990db
--- /dev/null
+++ b/src/plugins/multimedia/qnx/mediaplayer/qqnxvideosink_p.h
@@ -0,0 +1,41 @@
+// Copyright (C) 2016 Research In Motion
+// Copyright (C) 2021 The Qt Company
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QQNXVIDFEOSINK_P_H
+#define QQNXVIDFEOSINK_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformvideosink_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QQnxWindowGrabber;
+class QVideoSink;
+
+class QQnxVideoSink : public QPlatformVideoSink
+{
+ Q_OBJECT
+public:
+ explicit QQnxVideoSink(QVideoSink *parent = 0);
+
+ void setRhi(QRhi *) override;
+
+ QRhi *rhi() const;
+
+private:
+ QRhi *m_rhi = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/qnx/qnx.json b/src/plugins/multimedia/qnx/qnx.json
new file mode 100644
index 000000000..38df228ef
--- /dev/null
+++ b/src/plugins/multimedia/qnx/qnx.json
@@ -0,0 +1,3 @@
+{
+ "Keys": [ "qnx" ]
+}
diff --git a/src/plugins/multimedia/qnx/qqnxformatinfo.cpp b/src/plugins/multimedia/qnx/qqnxformatinfo.cpp
new file mode 100644
index 000000000..77492e80d
--- /dev/null
+++ b/src/plugins/multimedia/qnx/qqnxformatinfo.cpp
@@ -0,0 +1,36 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qqnxformatinfo_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QQnxFormatInfo::QQnxFormatInfo()
+{
+ // ### This is probably somewhat correct for encoding, but should be checked
+ encoders = {
+ { QMediaFormat::MPEG4,
+ { QMediaFormat::AudioCodec::AAC },
+ { QMediaFormat::VideoCodec::H264 } },
+ { QMediaFormat::Mpeg4Audio,
+ { QMediaFormat::AudioCodec::AAC },
+ {} },
+ { QMediaFormat::Wave,
+ { QMediaFormat::AudioCodec::Wave },
+ {} },
+ { QMediaFormat::AAC,
+ { QMediaFormat::AudioCodec::AAC },
+ {} },
+ };
+
+ // ### There can apparently be more codecs and demuxers installed on the system as plugins
+ // Need to find a way to determine the list at compile time or runtime
+ decoders = encoders;
+
+ // ###
+ imageFormats << QImageCapture::JPEG;
+}
+
+QQnxFormatInfo::~QQnxFormatInfo() = default;
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/qnx/qqnxformatinfo_p.h b/src/plugins/multimedia/qnx/qqnxformatinfo_p.h
new file mode 100644
index 000000000..aae3a026a
--- /dev/null
+++ b/src/plugins/multimedia/qnx/qqnxformatinfo_p.h
@@ -0,0 +1,33 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QQNXFORMATINFO_H
+#define QQNXFORMATINFO_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaformatinfo_p.h>
+#include <qhash.h>
+#include <qlist.h>
+
+QT_BEGIN_NAMESPACE
+
+class QQnxFormatInfo : public QPlatformMediaFormatInfo
+{
+public:
+ QQnxFormatInfo();
+ ~QQnxFormatInfo();
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/qnx/qqnxmediaintegration.cpp b/src/plugins/multimedia/qnx/qqnxmediaintegration.cpp
new file mode 100644
index 000000000..8567a69fd
--- /dev/null
+++ b/src/plugins/multimedia/qnx/qqnxmediaintegration.cpp
@@ -0,0 +1,79 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qqnxmediaintegration_p.h"
+#include "qqnxmediacapturesession_p.h"
+#include "qqnxmediarecorder_p.h"
+#include "qqnxformatinfo_p.h"
+#include "qqnxvideodevices_p.h"
+#include "qqnxvideosink_p.h"
+#include "qqnxmediaplayer_p.h"
+#include "qqnximagecapture_p.h"
+#include "qqnxplatformcamera_p.h"
+#include <QtMultimedia/private/qplatformmediaplugin_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QQnxMediaPlugin : public QPlatformMediaPlugin
+{
+ Q_OBJECT
+ Q_PLUGIN_METADATA(IID QPlatformMediaPlugin_iid FILE "qnx.json")
+
+public:
+ QQnxMediaPlugin()
+ : QPlatformMediaPlugin()
+ {}
+
+ QPlatformMediaIntegration* create(const QString &name) override
+ {
+ if (name == u"qnx")
+ return new QQnxMediaIntegration;
+ return nullptr;
+ }
+};
+
+QQnxMediaIntegration::QQnxMediaIntegration() : QPlatformMediaIntegration(QLatin1String("qnx")) { }
+
+QPlatformMediaFormatInfo *QQnxMediaIntegration::createFormatInfo()
+{
+ return new QQnxFormatInfo;
+}
+
+QPlatformVideoDevices *QQnxMediaIntegration::createVideoDevices()
+{
+ return new QQnxVideoDevices(this);
+}
+
+QMaybe<QPlatformVideoSink *> QQnxMediaIntegration::createVideoSink(QVideoSink *sink)
+{
+ return new QQnxVideoSink(sink);
+}
+
+QMaybe<QPlatformMediaPlayer *> QQnxMediaIntegration::createPlayer(QMediaPlayer *parent)
+{
+ return new QQnxMediaPlayer(parent);
+}
+
+QMaybe<QPlatformMediaCaptureSession *> QQnxMediaIntegration::createCaptureSession()
+{
+ return new QQnxMediaCaptureSession();
+}
+
+QMaybe<QPlatformMediaRecorder *> QQnxMediaIntegration::createRecorder(QMediaRecorder *parent)
+{
+ return new QQnxMediaRecorder(parent);
+}
+
+QMaybe<QPlatformCamera *> QQnxMediaIntegration::createCamera(QCamera *parent)
+{
+ return new QQnxPlatformCamera(parent);
+}
+
+QMaybe<QPlatformImageCapture *> QQnxMediaIntegration::createImageCapture(QImageCapture *parent)
+{
+ return new QQnxImageCapture(parent);
+}
+
+QT_END_NAMESPACE
+
+#include "qqnxmediaintegration.moc"
diff --git a/src/plugins/multimedia/qnx/qqnxmediaintegration_p.h b/src/plugins/multimedia/qnx/qqnxmediaintegration_p.h
new file mode 100644
index 000000000..60fafc246
--- /dev/null
+++ b/src/plugins/multimedia/qnx/qqnxmediaintegration_p.h
@@ -0,0 +1,50 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QQnxMediaIntegration_H
+#define QQnxMediaIntegration_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaintegration_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QQnxPlayerInterface;
+class QQnxFormatInfo;
+
+class QQnxMediaIntegration : public QPlatformMediaIntegration
+{
+public:
+ QQnxMediaIntegration();
+
+ QMaybe<QPlatformVideoSink *> createVideoSink(QVideoSink *sink) override;
+
+ QMaybe<QPlatformMediaPlayer *> createPlayer(QMediaPlayer *parent) override;
+
+ QMaybe<QPlatformMediaCaptureSession *> createCaptureSession() override;
+
+ QMaybe<QPlatformMediaRecorder *> createRecorder(QMediaRecorder *parent) override;
+
+ QMaybe<QPlatformCamera *> createCamera(QCamera *parent) override;
+
+ QMaybe<QPlatformImageCapture *> createImageCapture(QImageCapture *parent) override;
+
+protected:
+ QPlatformMediaFormatInfo *createFormatInfo() override;
+
+ QPlatformVideoDevices *createVideoDevices() override;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/qnx/qqnxvideodevices.cpp b/src/plugins/multimedia/qnx/qqnxvideodevices.cpp
new file mode 100644
index 000000000..ea0cfd956
--- /dev/null
+++ b/src/plugins/multimedia/qnx/qqnxvideodevices.cpp
@@ -0,0 +1,111 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qqnxvideodevices_p.h"
+#include "qqnxcamera_p.h"
+#include "private/qcameradevice_p.h"
+#include "qcameradevice.h"
+
+#include <qdir.h>
+#include <qdebug.h>
+
+#include <optional>
+
+QT_BEGIN_NAMESPACE
+
+static QVideoFrameFormat::PixelFormat fromCameraFrametype(camera_frametype_t type)
+{
+ switch (type) {
+ default:
+ case CAMERA_FRAMETYPE_UNSPECIFIED:
+ return QVideoFrameFormat::Format_Invalid;
+ case CAMERA_FRAMETYPE_NV12:
+ return QVideoFrameFormat::Format_NV12;
+ case CAMERA_FRAMETYPE_RGB8888:
+ return QVideoFrameFormat::Format_ARGB8888;
+ case CAMERA_FRAMETYPE_JPEG:
+ return QVideoFrameFormat::Format_Jpeg;
+ case CAMERA_FRAMETYPE_GRAY8:
+ return QVideoFrameFormat::Format_Y8;
+ case CAMERA_FRAMETYPE_CBYCRY:
+ return QVideoFrameFormat::Format_UYVY;
+ case CAMERA_FRAMETYPE_YCBCR420P:
+ return QVideoFrameFormat::Format_YUV420P;
+ case CAMERA_FRAMETYPE_YCBYCR:
+ return QVideoFrameFormat::Format_YUYV;
+ }
+}
+
+static std::optional<QCameraDevice> createCameraDevice(camera_unit_t unit, bool isDefault)
+{
+ const QQnxCamera camera(unit);
+
+ if (!camera.isValid()) {
+ qWarning() << "Invalid camera unit:" << unit;
+ return {};
+ }
+
+ auto *p = new QCameraDevicePrivate;
+
+ p->id = QByteArray::number(camera.unit());
+ p->description = camera.name();
+ p->isDefault = isDefault;
+
+ const QList<camera_frametype_t> frameTypes = camera.supportedVfFrameTypes();
+
+ for (camera_res_t res : camera.supportedVfResolutions()) {
+ const QSize resolution(res.width, res.height);
+
+ p->photoResolutions.append(resolution);
+
+ for (camera_frametype_t frameType : camera.supportedVfFrameTypes()) {
+ const QVideoFrameFormat::PixelFormat pixelFormat = fromCameraFrametype(frameType);
+
+ if (pixelFormat == QVideoFrameFormat::Format_Invalid)
+ continue;
+
+ auto *f = new QCameraFormatPrivate;
+ p->videoFormats.append(f->create());
+
+ f->resolution = resolution;
+ f->pixelFormat = pixelFormat;
+ f->minFrameRate = 1.e10;
+
+ for (double fr : camera.specifiedVfFrameRates(frameType, res)) {
+ if (fr < f->minFrameRate)
+ f->minFrameRate = fr;
+ if (fr > f->maxFrameRate)
+ f->maxFrameRate = fr;
+ }
+ }
+ }
+
+ return p->create();
+}
+
+QQnxVideoDevices::QQnxVideoDevices(QPlatformMediaIntegration *integration)
+ : QPlatformVideoDevices(integration)
+{
+}
+
+QList<QCameraDevice> QQnxVideoDevices::videoDevices() const
+{
+ QList<QCameraDevice> cameras;
+
+ bool isDefault = true;
+
+ for (const camera_unit_t cameraUnit : QQnxCamera::supportedUnits()) {
+ const std::optional<QCameraDevice> cameraDevice = createCameraDevice(cameraUnit, isDefault);
+
+ if (!cameraDevice)
+ continue;
+
+ cameras.append(*cameraDevice);
+
+ isDefault = false;
+ }
+
+ return cameras;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/qnx/qqnxvideodevices_p.h b/src/plugins/multimedia/qnx/qqnxvideodevices_p.h
new file mode 100644
index 000000000..cc2284e57
--- /dev/null
+++ b/src/plugins/multimedia/qnx/qqnxvideodevices_p.h
@@ -0,0 +1,32 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QQNXVIDEODEVICES_H
+#define QQNXVIDEODEVICES_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformvideodevices_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QQnxVideoDevices : public QPlatformVideoDevices
+{
+public:
+ explicit QQnxVideoDevices(QPlatformMediaIntegration *integration);
+
+ QList<QCameraDevice> videoDevices() const override;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/wasm/CMakeLists.txt b/src/plugins/multimedia/wasm/CMakeLists.txt
new file mode 100644
index 000000000..21f3e3472
--- /dev/null
+++ b/src/plugins/multimedia/wasm/CMakeLists.txt
@@ -0,0 +1,25 @@
+qt_internal_add_plugin(QWasmMediaPlugin
+ OUTPUT_NAME wasmmediaplugin
+ PLUGIN_TYPE multimedia
+ SOURCES
+ qwasmmediaintegration.cpp qwasmmediaintegration_p.h
+ mediaplayer/qwasmmediaplayer.cpp mediaplayer/qwasmmediaplayer_p.h
+ mediaplayer/qwasmvideosink.cpp mediaplayer/qwasmvideosink_p.h
+ common/qwasmvideooutput.cpp common/qwasmvideooutput_p.h
+ common/qwasmaudiooutput.cpp common/qwasmaudiooutput_p.h
+ common/qwasmaudioinput.cpp common/qwasmaudioinput_p.h
+ mediacapture/qwasmmediacapturesession.cpp mediacapture/qwasmmediacapturesession_p.h
+ mediacapture/qwasmmediarecorder.cpp mediacapture/qwasmmediarecorder_p.h
+ mediacapture/qwasmcamera.cpp mediacapture/qwasmcamera_p.h
+ mediacapture/qwasmimagecapture.cpp mediacapture/qwasmimagecapture_p.h
+ INCLUDE_DIRECTORIES
+ common
+ mediaplayer
+ mediacapture
+ LIBRARIES
+ Qt::MultimediaPrivate
+ Qt::CorePrivate
+ openal
+)
+
+target_link_libraries(QWasmMediaPlugin PUBLIC embind)
diff --git a/src/plugins/multimedia/wasm/common/qwasmaudioinput.cpp b/src/plugins/multimedia/wasm/common/qwasmaudioinput.cpp
new file mode 100644
index 000000000..a0418c5c2
--- /dev/null
+++ b/src/plugins/multimedia/wasm/common/qwasmaudioinput.cpp
@@ -0,0 +1,107 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwasmaudioinput_p.h"
+
+#include <qaudioinput.h>
+#include <private/qstdweb_p.h>
+
+QT_BEGIN_NAMESPACE
+
+Q_LOGGING_CATEGORY(qWasmAudioInput, "qt.multimedia.wasm.audioinput")
+
+QWasmAudioInput::QWasmAudioInput(QAudioInput *parent)
+ : QObject(parent), QPlatformAudioInput(parent)
+{
+ m_wasMuted = false;
+ setDeviceSourceStream("");
+}
+
+QWasmAudioInput::~QWasmAudioInput()
+{
+}
+
+void QWasmAudioInput::setMuted(bool muted)
+{
+ qCDebug(qWasmAudioInput) << Q_FUNC_INFO << muted;
+ if (muted == m_wasMuted)
+ return;
+ if (m_mediaStream.isNull() || m_mediaStream.isUndefined())
+ return;
+ emscripten::val audioTracks = m_mediaStream.call<emscripten::val>("getAudioTracks");
+ if (audioTracks.isNull() || audioTracks.isUndefined())
+ return;
+ if (audioTracks["length"].as<int>() < 1)
+ return;
+ audioTracks[0].set("muted", muted);
+
+ emit mutedChanged(muted);
+ m_wasMuted = muted;
+
+}
+
+bool QWasmAudioInput::isMuted() const
+{
+ return m_wasMuted;
+}
+
+void QWasmAudioInput::setAudioDevice(const QAudioDevice &audioDevice)
+{
+ if (device == audioDevice)
+ return;
+
+ device = audioDevice;
+ setDeviceSourceStream(device.id().toStdString());
+}
+
+void QWasmAudioInput::setVolume(float volume)
+{
+ Q_UNUSED(volume)
+ // TODO seems no easy way to set input volume
+}
+
+void QWasmAudioInput::setDeviceSourceStream(const std::string &id)
+{
+ qCDebug(qWasmAudioInput) << Q_FUNC_INFO << id;
+ emscripten::val navigator = emscripten::val::global("navigator");
+ emscripten::val mediaDevices = navigator["mediaDevices"];
+
+ if (mediaDevices.isNull() || mediaDevices.isUndefined()) {
+ qWarning() << "No media devices found";
+ return;
+ }
+
+ qstdweb::PromiseCallbacks getUserMediaCallback{
+ // default
+ .thenFunc =
+ [this](emscripten::val stream) {
+ qCDebug(qWasmAudioInput) << "getUserMediaSuccess";
+ m_mediaStream = stream;
+ },
+ .catchFunc =
+ [](emscripten::val error) {
+ qCDebug(qWasmAudioInput)
+ << "addCameraSourceElement getUserMedia fail"
+ << QString::fromStdString(error["name"].as<std::string>())
+ << QString::fromStdString(error["message"].as<std::string>());
+ }
+ };
+
+ emscripten::val constraints = emscripten::val::object();
+ constraints.set("audio", true);
+ if (!id.empty())
+ constraints.set("deviceId", id);
+
+ // we do it this way as this prompts user for mic permissions
+ qstdweb::Promise::make(mediaDevices, QStringLiteral("getUserMedia"),
+ std::move(getUserMediaCallback), constraints);
+}
+
+emscripten::val QWasmAudioInput::mediaStream()
+{
+ return m_mediaStream;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qwasmaudioinput_p.cpp"
diff --git a/src/plugins/multimedia/wasm/common/qwasmaudioinput_p.h b/src/plugins/multimedia/wasm/common/qwasmaudioinput_p.h
new file mode 100644
index 000000000..c772ee956
--- /dev/null
+++ b/src/plugins/multimedia/wasm/common/qwasmaudioinput_p.h
@@ -0,0 +1,57 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWASMAUDIOINPUT_H
+#define QWASMAUDIOINPUT_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qobject.h>
+#include <QtCore/qloggingcategory.h>
+
+#include <private/qtmultimediaglobal_p.h>
+#include <private/qplatformaudioinput_p.h>
+
+#include <emscripten.h>
+#include <emscripten/val.h>
+
+QT_BEGIN_NAMESPACE
+
+Q_DECLARE_LOGGING_CATEGORY(qWasmAudioInput)
+
+class QWasmAudioInput : public QObject, public QPlatformAudioInput
+{
+ Q_OBJECT
+public:
+ explicit QWasmAudioInput(QAudioInput *parent);
+ ~QWasmAudioInput();
+
+ void setMuted(bool muted) override;
+ void setAudioDevice(const QAudioDevice & device) final;
+
+ bool isMuted() const;
+ void setVolume(float volume) final;
+ emscripten::val mediaStream();
+
+
+Q_SIGNALS:
+ void mutedChanged(bool muted);
+
+private:
+ bool m_wasMuted = false;
+ void setDeviceSourceStream(const std::string &id);
+ emscripten::val m_mediaStream;
+};
+
+QT_END_NAMESPACE
+
+#endif // QWASMAUDIOINPUT_H
diff --git a/src/plugins/multimedia/wasm/common/qwasmaudiooutput.cpp b/src/plugins/multimedia/wasm/common/qwasmaudiooutput.cpp
new file mode 100644
index 000000000..a9a644140
--- /dev/null
+++ b/src/plugins/multimedia/wasm/common/qwasmaudiooutput.cpp
@@ -0,0 +1,378 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <qaudiodevice.h>
+#include <qaudiooutput.h>
+#include <qwasmaudiooutput_p.h>
+
+#include <QMimeDatabase>
+#include <QtCore/qloggingcategory.h>
+#include <QMediaDevices>
+#include <QUrl>
+#include <QFile>
+#include <QMimeDatabase>
+#include <QFileInfo>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qWasmMediaAudioOutput, "qt.multimedia.wasm.audiooutput")
+
+QWasmAudioOutput::QWasmAudioOutput(QAudioOutput *parent)
+ : QPlatformAudioOutput(parent)
+{
+}
+
+QWasmAudioOutput::~QWasmAudioOutput() = default;
+
+void QWasmAudioOutput::setAudioDevice(const QAudioDevice &audioDevice)
+{
+ qCDebug(qWasmMediaAudioOutput) << Q_FUNC_INFO << device.id();
+ device = audioDevice;
+}
+
+void QWasmAudioOutput::setVideoElement(emscripten::val videoElement)
+{
+ m_videoElement = videoElement;
+}
+
+emscripten::val QWasmAudioOutput::videoElement()
+{
+ return m_videoElement;
+}
+
+void QWasmAudioOutput::setMuted(bool muted)
+{
+ emscripten::val realElement = videoElement();
+ if (!realElement.isUndefined()) {
+ realElement.set("muted", muted);
+ return;
+ }
+ if (m_audio.isUndefined() || m_audio.isNull()) {
+ qCDebug(qWasmMediaAudioOutput) << "Error"
+ << "Audio element could not be created";
+ emit errorOccured(QMediaPlayer::ResourceError,
+ QStringLiteral("Media file could not be opened"));
+ return;
+ }
+ m_audio.set("mute", muted);
+}
+
+void QWasmAudioOutput::setVolume(float volume)
+{
+ volume = qBound(qreal(0.0), volume, qreal(1.0));
+ emscripten::val realElement = videoElement();
+ if (!realElement.isUndefined()) {
+ realElement.set("volume", volume);
+ return;
+ }
+ if (m_audio.isUndefined() || m_audio.isNull()) {
+ qCDebug(qWasmMediaAudioOutput) << "Error"
+ << "Audio element not available";
+ emit errorOccured(QMediaPlayer::ResourceError,
+ QStringLiteral("Media file could not be opened"));
+ return;
+ }
+
+ m_audio.set("volume", volume);
+}
+
+void QWasmAudioOutput::setSource(const QUrl &url)
+{
+ qCDebug(qWasmMediaAudioOutput) << Q_FUNC_INFO << url;
+ if (url.isEmpty()) {
+ stop();
+ return;
+ }
+
+ createAudioElement(device.id().toStdString());
+
+ if (m_audio.isUndefined() || m_audio.isNull()) {
+ qCDebug(qWasmMediaAudioOutput) << "Error"
+ << "Audio element could not be created";
+ emit errorOccured(QMediaPlayer::ResourceError,
+ QStringLiteral("Audio element could not be created"));
+ return;
+ }
+
+ emscripten::val document = emscripten::val::global("document");
+ emscripten::val body = document["body"];
+
+ m_audio.set("id", device.id().toStdString());
+
+ body.call<void>("appendChild", m_audio);
+
+
+ if (url.isLocalFile()) { // is localfile
+ qCDebug(qWasmMediaAudioOutput) << "is localfile";
+ m_source = url.toLocalFile();
+
+ QFile mediaFile(m_source);
+ if (!mediaFile.open(QIODevice::ReadOnly)) {
+ qCDebug(qWasmMediaAudioOutput) << "Error"
+ << "Media file could not be opened";
+ emit errorOccured(QMediaPlayer::ResourceError,
+ QStringLiteral("Media file could not be opened"));
+ return;
+ }
+
+ // local files are relatively small due to browser filesystem being restricted
+ QByteArray content = mediaFile.readAll();
+
+ QMimeDatabase db;
+ qCDebug(qWasmMediaAudioOutput) << db.mimeTypeForData(content).name();
+
+ qstdweb::Blob contentBlob = qstdweb::Blob::copyFrom(content.constData(), content.size());
+ emscripten::val contentUrl =
+ qstdweb::window()["URL"].call<emscripten::val>("createObjectURL", contentBlob.val());
+
+ emscripten::val audioSourceElement =
+ document.call<emscripten::val>("createElement", std::string("source"));
+
+ audioSourceElement.set("src", contentUrl);
+
+ // work around Safari not being able to read audio from blob URLs.
+ QFileInfo info(m_source);
+ QMimeType mimeType = db.mimeTypeForFile(info);
+
+ audioSourceElement.set("type", mimeType.name().toStdString());
+ m_audio.call<void>("appendChild", audioSourceElement);
+
+ m_audio.call<void>("setAttribute", emscripten::val("srcObject"), contentUrl);
+
+ } else {
+ m_source = url.toString();
+ m_audio.set("src", m_source.toStdString());
+ }
+ m_audio.set("id", device.id().toStdString());
+
+ body.call<void>("appendChild", m_audio);
+ qCDebug(qWasmMediaAudioOutput) << Q_FUNC_INFO << device.id();
+
+ doElementCallbacks();
+}
+
+void QWasmAudioOutput::setSource(QIODevice *stream)
+{
+ m_audioIODevice = stream;
+}
+
+void QWasmAudioOutput::start()
+{
+ if (m_audio.isNull() || m_audio.isUndefined()) {
+ qCDebug(qWasmMediaAudioOutput) << "audio failed to start";
+ emit errorOccured(QMediaPlayer::ResourceError,
+ QStringLiteral("Audio element resource error"));
+ return;
+ }
+
+ m_audio.call<void>("play");
+}
+
+void QWasmAudioOutput::stop()
+{
+ if (m_audio.isNull() || m_audio.isUndefined()) {
+ qCDebug(qWasmMediaAudioOutput) << "audio failed to start";
+ emit errorOccured(QMediaPlayer::ResourceError,
+ QStringLiteral("Audio element resource error"));
+ return;
+ }
+ if (!m_source.isEmpty()) {
+ pause();
+ m_audio.set("currentTime", emscripten::val(0));
+ }
+ if (m_audioIODevice) {
+ m_audioIODevice->close();
+ delete m_audioIODevice;
+ m_audioIODevice = 0;
+ }
+}
+
+void QWasmAudioOutput::pause()
+{
+ if (m_audio.isNull() || m_audio.isUndefined()) {
+ qCDebug(qWasmMediaAudioOutput) << "audio failed to start";
+ emit errorOccured(QMediaPlayer::ResourceError,
+ QStringLiteral("Audio element resource error"));
+ return;
+ }
+ m_audio.call<emscripten::val>("pause");
+}
+
+void QWasmAudioOutput::createAudioElement(const std::string &id)
+{
+ emscripten::val document = emscripten::val::global("document");
+ m_audio = document.call<emscripten::val>("createElement", std::string("audio"));
+
+ // only works in chrome and firefox.
+ // Firefox this feature is behind media.setsinkid.enabled preferences
+ // allows user to choose audio output device
+
+ if (!m_audio.hasOwnProperty("sinkId") || m_audio["sinkId"].isUndefined()) {
+ return;
+ }
+
+ std::string usableId = id;
+ if (usableId.empty())
+ usableId = QMediaDevices::defaultAudioOutput().id();
+
+ qstdweb::PromiseCallbacks sinkIdCallbacks{
+ .thenFunc = [](emscripten::val) { qCWarning(qWasmMediaAudioOutput) << "setSinkId ok"; },
+ .catchFunc =
+ [](emscripten::val) {
+ qCWarning(qWasmMediaAudioOutput) << "Error while trying to setSinkId";
+ }
+ };
+ qstdweb::Promise::make(m_audio, "setSinkId", std::move(sinkIdCallbacks), std::move(usableId));
+
+ m_audio.set("id", usableId.c_str());
+}
+
+void QWasmAudioOutput::doElementCallbacks()
+{
+ // error
+ auto errorCallback = [&](emscripten::val event) {
+ qCDebug(qWasmMediaAudioOutput) << "error";
+ if (event.isUndefined() || event.isNull())
+ return;
+ emit errorOccured(m_audio["error"]["code"].as<int>(),
+ QString::fromStdString(m_audio["error"]["message"].as<std::string>()));
+
+ QString errorMessage =
+ QString::fromStdString(m_audio["error"]["message"].as<std::string>());
+ if (errorMessage.isEmpty()) {
+ switch (m_audio["error"]["code"].as<int>()) {
+ case AudioElementError::MEDIA_ERR_ABORTED:
+ errorMessage = QStringLiteral("aborted by the user agent at the user's request.");
+ break;
+ case AudioElementError::MEDIA_ERR_NETWORK:
+ errorMessage = QStringLiteral("network error.");
+ break;
+ case AudioElementError::MEDIA_ERR_DECODE:
+ errorMessage = QStringLiteral("decoding error.");
+ break;
+ case AudioElementError::MEDIA_ERR_SRC_NOT_SUPPORTED:
+ errorMessage = QStringLiteral("src attribute not suitable.");
+ break;
+ };
+ }
+ qCDebug(qWasmMediaAudioOutput) << m_audio["error"]["code"].as<int>() << errorMessage;
+
+ emit errorOccured(m_audio["error"]["code"].as<int>(), errorMessage);
+ };
+ m_errorChangeEvent.reset(new qstdweb::EventCallback(m_audio, "error", errorCallback));
+
+ // loadeddata
+ auto loadedDataCallback = [&](emscripten::val event) {
+ Q_UNUSED(event)
+ qCDebug(qWasmMediaAudioOutput) << "loaded data";
+ qstdweb::window()["URL"].call<emscripten::val>("revokeObjectURL", m_audio["src"]);
+ };
+ m_loadedDataEvent.reset(new qstdweb::EventCallback(m_audio, "loadeddata", loadedDataCallback));
+
+ // canplay
+ auto canPlayCallback = [&](emscripten::val event) {
+ if (event.isUndefined() || event.isNull())
+ return;
+ qCDebug(qWasmMediaAudioOutput) << "can play";
+ emit readyChanged(true);
+ emit stateChanged(QWasmMediaPlayer::Preparing);
+ };
+ m_canPlayChangeEvent.reset(new qstdweb::EventCallback(m_audio, "canplay", canPlayCallback));
+
+ // canplaythrough
+ auto canPlayThroughCallback = [&](emscripten::val event) {
+ Q_UNUSED(event)
+ emit stateChanged(QWasmMediaPlayer::Prepared);
+ };
+ m_canPlayThroughChangeEvent.reset(
+ new qstdweb::EventCallback(m_audio, "canplaythrough", canPlayThroughCallback));
+
+ // play
+ auto playCallback = [&](emscripten::val event) {
+ Q_UNUSED(event)
+ qCDebug(qWasmMediaAudioOutput) << "play";
+ emit stateChanged(QWasmMediaPlayer::Started);
+ };
+ m_playEvent.reset(new qstdweb::EventCallback(m_audio, "play", playCallback));
+
+ // durationchange
+ auto durationChangeCallback = [&](emscripten::val event) {
+ qCDebug(qWasmMediaAudioOutput) << "durationChange";
+
+ // duration in ms
+ emit durationChanged(event["target"]["duration"].as<double>() * 1000);
+ };
+ m_durationChangeEvent.reset(
+ new qstdweb::EventCallback(m_audio, "durationchange", durationChangeCallback));
+
+ // ended
+ auto endedCallback = [&](emscripten::val event) {
+ Q_UNUSED(event)
+ qCDebug(qWasmMediaAudioOutput) << "ended";
+ m_currentMediaStatus = QMediaPlayer::EndOfMedia;
+ emit statusChanged(m_currentMediaStatus);
+ };
+ m_endedEvent.reset(new qstdweb::EventCallback(m_audio, "ended", endedCallback));
+
+ // progress (buffering progress)
+ auto progesssCallback = [&](emscripten::val event) {
+ if (event.isUndefined() || event.isNull())
+ return;
+ qCDebug(qWasmMediaAudioOutput) << "progress";
+ float duration = event["target"]["duration"].as<int>();
+ if (duration < 0) // track not exactly ready yet
+ return;
+
+ emscripten::val timeRanges = event["target"]["buffered"];
+
+ if ((!timeRanges.isNull() || !timeRanges.isUndefined())
+ && timeRanges["length"].as<int>() == 1) {
+ emscripten::val dVal = timeRanges.call<emscripten::val>("end", 0);
+
+ if (!dVal.isNull() || !dVal.isUndefined()) {
+ double bufferedEnd = dVal.as<double>();
+
+ if (duration > 0 && bufferedEnd > 0) {
+ float bufferedValue = (bufferedEnd / duration * 100);
+ qCDebug(qWasmMediaAudioOutput) << "progress buffered" << bufferedValue;
+
+ emit bufferingChanged(m_currentBufferedValue);
+ if (bufferedEnd == duration)
+ m_currentMediaStatus = QMediaPlayer::BufferedMedia;
+ else
+ m_currentMediaStatus = QMediaPlayer::BufferingMedia;
+
+ emit statusChanged(m_currentMediaStatus);
+ }
+ }
+ }
+ };
+ m_progressChangeEvent.reset(new qstdweb::EventCallback(m_audio, "progress", progesssCallback));
+
+ // timupdate
+ auto timeUpdateCallback = [&](emscripten::val event) {
+ qCDebug(qWasmMediaAudioOutput)
+ << "timeupdate" << (event["target"]["currentTime"].as<double>() * 1000);
+
+ // qt progress is ms
+ emit progressChanged(event["target"]["currentTime"].as<double>() * 1000);
+ };
+ m_timeUpdateEvent.reset(new qstdweb::EventCallback(m_audio, "timeupdate", timeUpdateCallback));
+
+ // pause
+ auto pauseCallback = [&](emscripten::val event) {
+ Q_UNUSED(event)
+ qCDebug(qWasmMediaAudioOutput) << "pause";
+
+ int currentTime = m_audio["currentTime"].as<int>(); // in seconds
+ int duration = m_audio["duration"].as<int>(); // in seconds
+ if ((currentTime > 0 && currentTime < duration)) {
+ emit stateChanged(QWasmMediaPlayer::Paused);
+ } else {
+ emit stateChanged(QWasmMediaPlayer::Stopped);
+ }
+ };
+ m_pauseChangeEvent.reset(new qstdweb::EventCallback(m_audio, "pause", pauseCallback));
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/wasm/common/qwasmaudiooutput_p.h b/src/plugins/multimedia/wasm/common/qwasmaudiooutput_p.h
new file mode 100644
index 000000000..69fda120b
--- /dev/null
+++ b/src/plugins/multimedia/wasm/common/qwasmaudiooutput_p.h
@@ -0,0 +1,97 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWASMAUDIOOUTPUT_H
+#define QWASMAUDIOOUTPUT_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformaudiooutput_p.h>
+#include "qwasmmediaplayer_p.h"
+
+#include <emscripten/val.h>
+#include <private/qstdweb_p.h>
+#include <private/qwasmaudiosink_p.h>
+#include <QIODevice>
+#include <QObject>
+
+QT_BEGIN_NAMESPACE
+
+class Q_MULTIMEDIA_EXPORT QWasmAudioOutput : public QObject, public QPlatformAudioOutput
+{
+ Q_OBJECT
+
+public:
+ QWasmAudioOutput(QAudioOutput *qq);
+ ~QWasmAudioOutput();
+
+ enum AudioElementError {
+ MEDIA_ERR_ABORTED = 1,
+ MEDIA_ERR_NETWORK,
+ MEDIA_ERR_DECODE,
+ MEDIA_ERR_SRC_NOT_SUPPORTED
+ };
+
+ void setAudioDevice(const QAudioDevice &device) final;
+ void setMuted(bool muted) override;
+ void setVolume(float volume) override;
+
+ void start();
+ void stop();
+ void pause();
+
+ void setSource(const QUrl &url);
+ void setSource(QIODevice *stream);
+ void setVideoElement(emscripten::val videoElement);
+
+Q_SIGNALS:
+ void readyChanged(bool);
+ void bufferingChanged(qint32 percent);
+ void errorOccured(qint32 code, const QString &message);
+ void stateChanged(QWasmMediaPlayer::QWasmMediaPlayerState newState);
+ void progressChanged(qint32 position);
+ void durationChanged(qint64 duration);
+ void statusChanged(QMediaPlayer::MediaStatus status);
+ void sizeChange(qint32 width, qint32 height);
+ void metaDataLoaded();
+
+private:
+ void doElementCallbacks();
+ void createAudioElement(const std::string &id);
+
+ emscripten::val videoElement();
+
+ QScopedPointer<QWasmAudioSink> m_sink;
+ QScopedPointer<qstdweb::EventCallback> m_playEvent;
+ QScopedPointer<qstdweb::EventCallback> m_endedEvent;
+ QScopedPointer<qstdweb::EventCallback> m_durationChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_errorChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_canPlayChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_canPlayThroughChangeEvent;
+
+ QScopedPointer<qstdweb::EventCallback> m_playingChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_progressChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_pauseChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_timeUpdateEvent;
+ QScopedPointer<qstdweb::EventCallback> m_loadedDataEvent;
+
+ QString m_source;
+ QIODevice *m_audioIODevice = nullptr;
+ emscripten::val m_audio = emscripten::val::undefined();
+ emscripten::val m_videoElement = emscripten::val::undefined();
+ QMediaPlayer::MediaStatus m_currentMediaStatus;
+ qreal m_currentBufferedValue;
+};
+
+QT_END_NAMESPACE
+
+#endif // QWASMAUDIOOUTPUT_H
diff --git a/src/plugins/multimedia/wasm/common/qwasmvideooutput.cpp b/src/plugins/multimedia/wasm/common/qwasmvideooutput.cpp
new file mode 100644
index 000000000..84d325635
--- /dev/null
+++ b/src/plugins/multimedia/wasm/common/qwasmvideooutput.cpp
@@ -0,0 +1,1071 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QDebug>
+#include <QUrl>
+#include <QPoint>
+#include <QRect>
+#include <QMediaPlayer>
+#include <QVideoFrame>
+#include <QFile>
+#include <QBuffer>
+#include <QMimeDatabase>
+#include "qwasmvideooutput_p.h"
+
+#include <qvideosink.h>
+#include <private/qplatformvideosink_p.h>
+#include <private/qmemoryvideobuffer_p.h>
+#include <private/qvideotexturehelper_p.h>
+#include <private/qvideoframe_p.h>
+#include <private/qstdweb_p.h>
+#include <QTimer>
+
+#include <emscripten/bind.h>
+#include <emscripten/html5.h>
+#include <emscripten/val.h>
+
+
+QT_BEGIN_NAMESPACE
+
+
+using namespace emscripten;
+
+Q_LOGGING_CATEGORY(qWasmMediaVideoOutput, "qt.multimedia.wasm.videooutput")
+
+// TODO unique videosurface ?
+static std::string m_videoSurfaceId;
+
+void qtVideoBeforeUnload(emscripten::val event)
+{
+ Q_UNUSED(event)
+ // large videos will leave the unloading window
+ // in a frozen state, so remove the video element first
+ emscripten::val document = emscripten::val::global("document");
+ emscripten::val videoElement =
+ document.call<emscripten::val>("getElementById", std::string(m_videoSurfaceId));
+ videoElement.call<void>("removeAttribute", emscripten::val("src"));
+ videoElement.call<void>("load");
+}
+
+EMSCRIPTEN_BINDINGS(video_module)
+{
+ emscripten::function("mbeforeUnload", qtVideoBeforeUnload);
+}
+
+static bool checkForVideoFrame()
+{
+ emscripten::val videoFrame = emscripten::val::global("VideoFrame");
+ return (!videoFrame.isNull() && !videoFrame.isUndefined());
+}
+
+QWasmVideoOutput::QWasmVideoOutput(QObject *parent) : QObject{ parent }
+{
+ m_hasVideoFrame = checkForVideoFrame();
+}
+
+void QWasmVideoOutput::setVideoSize(const QSize &newSize)
+{
+ if (m_pendingVideoSize == newSize)
+ return;
+
+ m_pendingVideoSize = newSize;
+ updateVideoElementGeometry(QRect(0, 0, m_pendingVideoSize.width(), m_pendingVideoSize.height()));
+}
+
+void QWasmVideoOutput::setVideoMode(QWasmVideoOutput::WasmVideoMode mode)
+{
+ m_currentVideoMode = mode;
+}
+
+void QWasmVideoOutput::start()
+{
+ if (m_video.isUndefined() || m_video.isNull()
+ || !m_wasmSink) {
+ // error
+ emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
+ return;
+ }
+
+ switch (m_currentVideoMode) {
+ case QWasmVideoOutput::VideoOutput: {
+ emscripten::val sourceObj = m_video["src"];
+ if ((sourceObj.isUndefined() || sourceObj.isNull()) && !m_source.isEmpty()) {
+ qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << "calling load" << m_source;
+ m_video.set("src", m_source);
+ m_video.call<void>("load");
+ }
+ } break;
+ case QWasmVideoOutput::Camera: {
+ if (!m_cameraIsReady) {
+ m_shouldBeStarted = true;
+ }
+
+ emscripten::val stream = m_video["srcObject"];
+ if (stream.isNull() || stream.isUndefined()) { // camera device
+ qCDebug(qWasmMediaVideoOutput) << "ERROR";
+ emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
+ return;
+ } else {
+ emscripten::val videoTracks = stream.call<emscripten::val>("getVideoTracks");
+ if (videoTracks.isNull() || videoTracks.isUndefined()) {
+ qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << "videoTracks is null";
+ emit errorOccured(QMediaPlayer::ResourceError,
+ QStringLiteral("video surface error"));
+ return;
+ }
+ if (videoTracks["length"].as<int>() == 0) {
+ qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << "videoTracks count is 0";
+ emit errorOccured(QMediaPlayer::ResourceError,
+ QStringLiteral("video surface error"));
+ return;
+ }
+ emscripten::val videoSettings = videoTracks[0].call<emscripten::val>("getSettings");
+ if (!videoSettings.isNull() || !videoSettings.isUndefined()) {
+ // double fRate = videoSettings["frameRate"].as<double>(); TODO
+ const int width = videoSettings["width"].as<int>();
+ const int height = videoSettings["height"].as<int>();
+
+ qCDebug(qWasmMediaVideoOutput)
+ << "width" << width << "height" << height;
+
+ updateVideoElementGeometry(QRect(0, 0, width, height));
+ }
+ }
+ } break;
+ };
+
+ m_shouldStop = false;
+ m_toBePaused = false;
+ m_video.call<void>("play");
+
+ if (m_currentVideoMode == QWasmVideoOutput::Camera) {
+ if (m_hasVideoFrame) {
+ m_video.call<emscripten::val>("requestVideoFrameCallback",
+ emscripten::val::module_property("qtVideoFrameTimerCallback"));
+ } else {
+ videoFrameTimerCallback();
+ }
+ }
+}
+
+void QWasmVideoOutput::stop()
+{
+ qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
+
+ if (m_video.isUndefined() || m_video.isNull()) {
+ // error
+ emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("Resource error"));
+ return;
+ }
+ m_shouldStop = true;
+ if (m_toBePaused) {
+ // we are stopped , need to reset
+ m_toBePaused = false;
+ m_video.call<void>("load");
+ } else {
+ m_video.call<void>("pause");
+ }
+}
+
+void QWasmVideoOutput::pause()
+{
+ qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
+
+ if (m_video.isUndefined() || m_video.isNull()) {
+ // error
+ emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
+ return;
+ }
+ m_shouldStop = false;
+ m_toBePaused = true;
+ m_video.call<void>("pause");
+}
+
+void QWasmVideoOutput::reset()
+{
+ // flush pending frame
+ if (m_wasmSink)
+ m_wasmSink->platformVideoSink()->setVideoFrame(QVideoFrame());
+
+ m_source = "";
+ m_video.set("currentTime", emscripten::val(0));
+ m_video.call<void>("load");
+}
+
+emscripten::val QWasmVideoOutput::surfaceElement()
+{
+ return m_video;
+}
+
+void QWasmVideoOutput::setSurface(QVideoSink *surface)
+{
+ if (!surface || surface == m_wasmSink) {
+ qWarning() << "Surface not ready";
+ return;
+ }
+
+ m_wasmSink = surface;
+}
+
+bool QWasmVideoOutput::isReady() const
+{
+ if (m_video.isUndefined() || m_video.isNull()) {
+ // error
+ return false;
+ }
+
+ constexpr int hasCurrentData = 2;
+ if (!m_video.isUndefined() || !m_video.isNull())
+ return m_video["readyState"].as<int>() >= hasCurrentData;
+ else
+ return true;
+}
+
+void QWasmVideoOutput::setSource(const QUrl &url)
+{
+ qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << url;
+
+ if (m_video.isUndefined() || m_video.isNull()) {
+ return;
+ }
+
+ m_source = url.toString();
+ if (url.isEmpty()) {
+ stop();
+ return;
+ }
+ if (url.isLocalFile()) {
+ QFile localFile(url.toLocalFile());
+ if (localFile.open(QIODevice::ReadOnly)) {
+ QDataStream buffer(&localFile); // we will serialize the data into the file
+ setSource(buffer.device());
+ } else {
+ qWarning() << "Failed to open file";
+ }
+ return;
+ }
+
+ updateVideoElementSource(m_source);
+}
+
+void QWasmVideoOutput::updateVideoElementSource(const QString &src)
+{
+ m_video.set("src", src.toStdString());
+ m_video.call<void>("load");
+}
+
+void QWasmVideoOutput::addCameraSourceElement(const std::string &id)
+{
+ m_cameraIsReady = false;
+ emscripten::val navigator = emscripten::val::global("navigator");
+ emscripten::val mediaDevices = navigator["mediaDevices"];
+
+ if (mediaDevices.isNull() || mediaDevices.isUndefined()) {
+ qWarning() << "No media devices found";
+ emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("Resource error"));
+ return;
+ }
+
+ qstdweb::PromiseCallbacks getUserMediaCallback{
+ .thenFunc =
+ [this](emscripten::val stream) {
+ qCDebug(qWasmMediaVideoOutput) << "getUserMediaSuccess";
+
+ m_video.set("srcObject", stream);
+ m_cameraIsReady = true;
+ if (m_shouldBeStarted) {
+ start();
+ m_shouldBeStarted = false;
+ }
+ },
+ .catchFunc =
+ [](emscripten::val error) {
+ qCDebug(qWasmMediaVideoOutput)
+ << "getUserMedia fail"
+ << QString::fromStdString(error["name"].as<std::string>())
+ << QString::fromStdString(error["message"].as<std::string>());
+ }
+ };
+
+ emscripten::val constraints = emscripten::val::object();
+
+ constraints.set("audio", m_hasAudio);
+
+ emscripten::val videoContraints = emscripten::val::object();
+ videoContraints.set("exact", id);
+ videoContraints.set("deviceId", id);
+ constraints.set("video", videoContraints);
+
+ // we do it this way as this prompts user for mic/camera permissions
+ qstdweb::Promise::make(mediaDevices, QStringLiteral("getUserMedia"),
+ std::move(getUserMediaCallback), constraints);
+}
+
+void QWasmVideoOutput::setSource(QIODevice *stream)
+{
+ if (stream->bytesAvailable() == 0) {
+ qWarning() << "data not available";
+ emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("data not available"));
+ return;
+ }
+ if (m_video.isUndefined() || m_video.isNull()) {
+ emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
+ return;
+ }
+
+ QMimeDatabase db;
+ QMimeType mime = db.mimeTypeForData(stream);
+
+ QByteArray buffer = stream->readAll();
+
+ qstdweb::Blob contentBlob = qstdweb::Blob::copyFrom(buffer.data(), buffer.size(), mime.name().toStdString());
+
+ emscripten::val window = qstdweb::window();
+
+ if (window["safari"].isUndefined()) {
+ emscripten::val contentUrl = window["URL"].call<emscripten::val>("createObjectURL", contentBlob.val());
+ m_video.set("src", contentUrl);
+ m_source = QString::fromStdString(contentUrl.as<std::string>());
+ } else {
+ // only Safari currently supports Blob with srcObject
+ m_video.set("srcObject", contentBlob.val());
+ }
+}
+
+void QWasmVideoOutput::setVolume(qreal volume)
+{ // between 0 - 1
+ volume = qBound(qreal(0.0), volume, qreal(1.0));
+ m_video.set("volume", volume);
+}
+
+void QWasmVideoOutput::setMuted(bool muted)
+{
+ if (m_video.isUndefined() || m_video.isNull()) {
+ // error
+ emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
+ return;
+ }
+ m_video.set("muted", muted);
+}
+
+qint64 QWasmVideoOutput::getCurrentPosition()
+{
+ return (!m_video.isUndefined() || !m_video.isNull())
+ ? (m_video["currentTime"].as<double>() * 1000)
+ : 0;
+}
+
+void QWasmVideoOutput::seekTo(qint64 positionMSecs)
+{
+ if (isVideoSeekable()) {
+ float positionToSetInSeconds = float(positionMSecs) / 1000;
+ emscripten::val seekableTimeRange = m_video["seekable"];
+ if (!seekableTimeRange.isNull() || !seekableTimeRange.isUndefined()) {
+ // range user can seek
+ if (seekableTimeRange["length"].as<int>() < 1)
+ return;
+ if (positionToSetInSeconds
+ >= seekableTimeRange.call<emscripten::val>("start", 0).as<double>()
+ && positionToSetInSeconds
+ <= seekableTimeRange.call<emscripten::val>("end", 0).as<double>()) {
+ m_requestedPosition = positionToSetInSeconds;
+
+ m_video.set("currentTime", m_requestedPosition);
+ }
+ }
+ }
+ qCDebug(qWasmMediaVideoOutput) << "m_requestedPosition" << m_requestedPosition;
+}
+
+bool QWasmVideoOutput::isVideoSeekable()
+{
+ if (m_video.isUndefined() || m_video.isNull()) {
+ // error
+ emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
+ return false;
+ }
+
+ emscripten::val seekableTimeRange = m_video["seekable"];
+ if (seekableTimeRange["length"].as<int>() < 1)
+ return false;
+ if (!seekableTimeRange.isNull() || !seekableTimeRange.isUndefined()) {
+ bool isit = !qFuzzyCompare(seekableTimeRange.call<emscripten::val>("start", 0).as<double>(),
+ seekableTimeRange.call<emscripten::val>("end", 0).as<double>());
+ return isit;
+ }
+ return false;
+}
+
+void QWasmVideoOutput::createVideoElement(const std::string &id)
+{
+ // TODO: there can be more than one element !!
+ qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << id;
+ // Create <video> element and add it to the page body
+ emscripten::val document = emscripten::val::global("document");
+ emscripten::val body = document["body"];
+
+ emscripten::val oldVideo = document.call<emscripten::val>("getElementsByClassName",
+ (m_currentVideoMode == QWasmVideoOutput::Camera
+ ? std::string("Camera")
+ : std::string("Video")));
+
+ // we don't provide alternate tracks
+ // but need to remove stale track
+ if (oldVideo["length"].as<int>() > 0)
+ oldVideo[0].call<void>("remove");
+
+ m_videoSurfaceId = id;
+ m_video = document.call<emscripten::val>("createElement", std::string("video"));
+
+ m_video.set("id", m_videoSurfaceId.c_str());
+ m_video.call<void>("setAttribute", std::string("class"),
+ (m_currentVideoMode == QWasmVideoOutput::Camera ? std::string("Camera")
+ : std::string("Video")));
+ m_video.set("data-qvideocontext",
+ emscripten::val(quintptr(reinterpret_cast<void *>(this))));
+
+ // if video
+ m_video.set("preload", "metadata");
+
+ // Uncaught DOMException: Failed to execute 'getImageData' on
+ // 'OffscreenCanvasRenderingContext2D': The canvas has been tainted by
+ // cross-origin data.
+ // TODO figure out somehow to let user choose between these
+ std::string originString = "anonymous"; // requires server Access-Control-Allow-Origin *
+ // std::string originString = "use-credentials"; // must not
+ // Access-Control-Allow-Origin *
+
+ m_video.call<void>("setAttribute", std::string("crossorigin"), originString);
+ body.call<void>("appendChild", m_video);
+
+ // Create/add video source
+ document.call<emscripten::val>("createElement",
+ std::string("source")).set("src", m_source.toStdString());
+
+ // Set position:absolute, which makes it possible to position the video
+ // element using x,y. coordinates, relative to its parent (the page's <body>
+ // element)
+ emscripten::val style = m_video["style"];
+ style.set("position", "absolute");
+ style.set("display", "none"); // hide
+
+ if (!m_source.isEmpty())
+ updateVideoElementSource(m_source);
+}
+
+void QWasmVideoOutput::createOffscreenElement(const QSize &offscreenSize)
+{
+ qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
+
+ if (m_hasVideoFrame) // VideoFrame does not require offscreen canvas/context
+ return;
+
+ // create offscreen element for grabbing frames
+ // OffscreenCanvas - no safari :(
+ // https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas
+
+ emscripten::val document = emscripten::val::global("document");
+
+ // TODO use correct frameBytesAllocationSize?
+ // offscreen render buffer
+ m_offscreen = emscripten::val::global("OffscreenCanvas");
+
+ if (m_offscreen.isUndefined()) {
+ // Safari OffscreenCanvas not supported, try old skool way
+ m_offscreen = document.call<emscripten::val>("createElement", std::string("canvas"));
+
+ m_offscreen.set("style",
+ "position:absolute;left:-1000px;top:-1000px"); // offscreen
+ m_offscreen.set("width", offscreenSize.width());
+ m_offscreen.set("height", offscreenSize.height());
+ m_offscreenContext = m_offscreen.call<emscripten::val>("getContext", std::string("2d"));
+ } else {
+ m_offscreen = emscripten::val::global("OffscreenCanvas")
+ .new_(offscreenSize.width(), offscreenSize.height());
+ emscripten::val offscreenAttributes = emscripten::val::array();
+ offscreenAttributes.set("willReadFrequently", true);
+ m_offscreenContext = m_offscreen.call<emscripten::val>("getContext", std::string("2d"),
+ offscreenAttributes);
+ }
+ std::string offscreenId = m_videoSurfaceId + "_offscreenOutputSurface";
+ m_offscreen.set("id", offscreenId.c_str());
+}
+
+void QWasmVideoOutput::doElementCallbacks()
+{
+ qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
+
+ // event callbacks
+ // timupdate
+ auto timeUpdateCallback = [=](emscripten::val event) {
+ qCDebug(qWasmMediaVideoOutput) << "timeupdate";
+
+ // qt progress is ms
+ emit progressChanged(event["target"]["currentTime"].as<double>() * 1000);
+ };
+ m_timeUpdateEvent.reset(new qstdweb::EventCallback(m_video, "timeupdate", timeUpdateCallback));
+
+ // play
+ auto playCallback = [=](emscripten::val event) {
+ Q_UNUSED(event)
+ qCDebug(qWasmMediaVideoOutput) << "play" << m_video["src"].as<std::string>();
+ if (!m_isSeeking)
+ emit stateChanged(QWasmMediaPlayer::Preparing);
+ };
+ m_playEvent.reset(new qstdweb::EventCallback(m_video, "play", playCallback));
+
+ // ended
+ auto endedCallback = [=](emscripten::val event) {
+ Q_UNUSED(event)
+ qCDebug(qWasmMediaVideoOutput) << "ended";
+ m_currentMediaStatus = QMediaPlayer::EndOfMedia;
+ emit statusChanged(m_currentMediaStatus);
+ m_shouldStop = true;
+ stop();
+ };
+ m_endedEvent.reset(new qstdweb::EventCallback(m_video, "ended", endedCallback));
+
+ // durationchange
+ auto durationChangeCallback = [=](emscripten::val event) {
+ qCDebug(qWasmMediaVideoOutput) << "durationChange";
+
+ // qt duration is in milliseconds.
+ qint64 dur = event["target"]["duration"].as<double>() * 1000;
+ emit durationChanged(dur);
+ };
+ m_durationChangeEvent.reset(
+ new qstdweb::EventCallback(m_video, "durationchange", durationChangeCallback));
+
+ // loadeddata
+ auto loadedDataCallback = [=](emscripten::val event) {
+ Q_UNUSED(event)
+ qCDebug(qWasmMediaVideoOutput) << "loaded data";
+
+ emit stateChanged(QWasmMediaPlayer::Prepared);
+ };
+ m_loadedDataEvent.reset(new qstdweb::EventCallback(m_video, "loadeddata", loadedDataCallback));
+
+ // error
+ auto errorCallback = [=](emscripten::val event) {
+ qCDebug(qWasmMediaVideoOutput) << "error";
+ if (event.isUndefined() || event.isNull())
+ return;
+ emit errorOccured(m_video["error"]["code"].as<int>(),
+ QString::fromStdString(m_video["error"]["message"].as<std::string>()));
+ };
+ m_errorChangeEvent.reset(new qstdweb::EventCallback(m_video, "error", errorCallback));
+
+ // resize
+ auto resizeCallback = [=](emscripten::val event) {
+ Q_UNUSED(event)
+ qCDebug(qWasmMediaVideoOutput) << "resize";
+
+ updateVideoElementGeometry(
+ QRect(0, 0, m_video["videoWidth"].as<int>(), m_video["videoHeight"].as<int>()));
+ emit sizeChange(m_video["videoWidth"].as<int>(), m_video["videoHeight"].as<int>());
+
+ };
+ m_resizeChangeEvent.reset(new qstdweb::EventCallback(m_video, "resize", resizeCallback));
+
+ // loadedmetadata
+ auto loadedMetadataCallback = [=](emscripten::val event) {
+ Q_UNUSED(event)
+ qCDebug(qWasmMediaVideoOutput) << "loaded meta data";
+
+ emit metaDataLoaded();
+ };
+ m_loadedMetadataChangeEvent.reset(
+ new qstdweb::EventCallback(m_video, "loadedmetadata", loadedMetadataCallback));
+
+ // loadstart
+ auto loadStartCallback = [=](emscripten::val event) {
+ Q_UNUSED(event)
+ qCDebug(qWasmMediaVideoOutput) << "load started";
+ m_currentMediaStatus = QMediaPlayer::LoadingMedia;
+ emit statusChanged(m_currentMediaStatus);
+ m_shouldStop = false;
+ };
+ m_loadStartChangeEvent.reset(new qstdweb::EventCallback(m_video, "loadstart", loadStartCallback));
+
+ // canplay
+
+ auto canPlayCallback = [=](emscripten::val event) {
+ if (event.isUndefined() || event.isNull())
+ return;
+ qCDebug(qWasmMediaVideoOutput) << "can play"
+ << "m_requestedPosition" << m_requestedPosition;
+
+ if (!m_shouldStop)
+ emit readyChanged(true); // sets video available
+ };
+ m_canPlayChangeEvent.reset(new qstdweb::EventCallback(m_video, "canplay", canPlayCallback));
+
+ // canplaythrough
+ auto canPlayThroughCallback = [=](emscripten::val event) {
+ Q_UNUSED(event)
+ qCDebug(qWasmMediaVideoOutput) << "can play through"
+ << "m_shouldStop" << m_shouldStop;
+
+ if (m_currentMediaStatus == QMediaPlayer::EndOfMedia)
+ return;
+ if (!m_isSeeking && !m_shouldStop) {
+ emscripten::val timeRanges = m_video["buffered"];
+ if ((!timeRanges.isNull() || !timeRanges.isUndefined())
+ && timeRanges["length"].as<int>() == 1) {
+ double buffered = m_video["buffered"].call<emscripten::val>("end", 0).as<double>();
+ const double duration = m_video["duration"].as<double>();
+
+ if (duration == buffered) {
+ m_currentBufferedValue = 100;
+ emit bufferingChanged(m_currentBufferedValue);
+ }
+ }
+ m_currentMediaStatus = QMediaPlayer::LoadedMedia;
+ emit statusChanged(m_currentMediaStatus);
+ if (m_hasVideoFrame) {
+ m_video.call<emscripten::val>("requestVideoFrameCallback",
+ emscripten::val::module_property("qtVideoFrameTimerCallback"));
+ } else {
+ videoFrameTimerCallback();
+ }
+ } else {
+ m_shouldStop = false;
+ }
+ };
+ m_canPlayThroughChangeEvent.reset(
+ new qstdweb::EventCallback(m_video, "canplaythrough", canPlayThroughCallback));
+
+ // seeking
+ auto seekingCallback = [=](emscripten::val event) {
+ Q_UNUSED(event)
+ qCDebug(qWasmMediaVideoOutput)
+ << "seeking started" << (m_video["currentTime"].as<double>() * 1000);
+ m_isSeeking = true;
+ };
+ m_seekingChangeEvent.reset(new qstdweb::EventCallback(m_video, "seeking", seekingCallback));
+
+ // seeked
+ auto seekedCallback = [=](emscripten::val event) {
+ Q_UNUSED(event)
+ qCDebug(qWasmMediaVideoOutput) << "seeked" << (m_video["currentTime"].as<double>() * 1000);
+ emit progressChanged(m_video["currentTime"].as<double>() * 1000);
+ m_isSeeking = false;
+ };
+ m_seekedChangeEvent.reset(new qstdweb::EventCallback(m_video, "seeked", seekedCallback));
+
+ // emptied
+ auto emptiedCallback = [=](emscripten::val event) {
+ Q_UNUSED(event)
+ qCDebug(qWasmMediaVideoOutput) << "emptied";
+ emit readyChanged(false);
+ m_currentMediaStatus = QMediaPlayer::EndOfMedia;
+ emit statusChanged(m_currentMediaStatus);
+ };
+ m_emptiedChangeEvent.reset(new qstdweb::EventCallback(m_video, "emptied", emptiedCallback));
+
+ // stalled
+ auto stalledCallback = [=](emscripten::val event) {
+ Q_UNUSED(event)
+ qCDebug(qWasmMediaVideoOutput) << "stalled";
+ m_currentMediaStatus = QMediaPlayer::StalledMedia;
+ emit statusChanged(m_currentMediaStatus);
+ };
+ m_stalledChangeEvent.reset(new qstdweb::EventCallback(m_video, "stalled", stalledCallback));
+
+ // waiting
+ auto waitingCallback = [=](emscripten::val event) {
+ Q_UNUSED(event)
+
+ qCDebug(qWasmMediaVideoOutput) << "waiting";
+ // check buffer
+ };
+ m_waitingChangeEvent.reset(new qstdweb::EventCallback(m_video, "waiting", waitingCallback));
+
+ // suspend
+
+ // playing
+ auto playingCallback = [=](emscripten::val event) {
+ Q_UNUSED(event)
+ qCDebug(qWasmMediaVideoOutput) << "playing";
+ if (m_isSeeking)
+ return;
+ emit stateChanged(QWasmMediaPlayer::Started);
+ if (m_toBePaused || !m_shouldStop) { // paused
+ m_toBePaused = false;
+
+ if (m_hasVideoFrame) {
+ m_video.call<emscripten::val>("requestVideoFrameCallback",
+ emscripten::val::module_property("qtVideoFrameTimerCallback"));
+ } else {
+ videoFrameTimerCallback(); // get the ball rolling
+ }
+ }
+ };
+ m_playingChangeEvent.reset(new qstdweb::EventCallback(m_video, "playing", playingCallback));
+
+ // progress (buffering progress)
+ auto progesssCallback = [=](emscripten::val event) {
+ if (event.isUndefined() || event.isNull())
+ return;
+
+ const double duration = event["target"]["duration"].as<double>();
+ if (duration < 0) // track not exactly ready yet
+ return;
+
+ emscripten::val timeRanges = event["target"]["buffered"];
+
+ if ((!timeRanges.isNull() || !timeRanges.isUndefined())
+ && timeRanges["length"].as<int>() == 1) {
+ emscripten::val dVal = timeRanges.call<emscripten::val>("end", 0);
+ if (!dVal.isNull() || !dVal.isUndefined()) {
+ double bufferedEnd = dVal.as<double>();
+
+ if (duration > 0 && bufferedEnd > 0) {
+ const double bufferedValue = (bufferedEnd / duration * 100);
+ qCDebug(qWasmMediaVideoOutput) << "progress buffered";
+ m_currentBufferedValue = bufferedValue;
+ emit bufferingChanged(m_currentBufferedValue);
+ if (bufferedEnd == duration)
+ m_currentMediaStatus = QMediaPlayer::BufferedMedia;
+ else
+ m_currentMediaStatus = QMediaPlayer::BufferingMedia;
+ emit statusChanged(m_currentMediaStatus);
+ }
+ }
+ }
+ };
+ m_progressChangeEvent.reset(new qstdweb::EventCallback(m_video, "progress", progesssCallback));
+
+ // pause
+ auto pauseCallback = [=](emscripten::val event) {
+ Q_UNUSED(event)
+ qCDebug(qWasmMediaVideoOutput) << "pause";
+
+ const double currentTime = m_video["currentTime"].as<double>(); // in seconds
+ const double duration = m_video["duration"].as<double>(); // in seconds
+ if ((currentTime > 0 && currentTime < duration) && (!m_shouldStop && m_toBePaused)) {
+ emit stateChanged(QWasmMediaPlayer::Paused);
+ } else {
+ // stop this crazy thing!
+ m_video.set("currentTime", emscripten::val(0));
+ emit stateChanged(QWasmMediaPlayer::Stopped);
+ }
+ };
+ m_pauseChangeEvent.reset(new qstdweb::EventCallback(m_video, "pause", pauseCallback));
+
+ // onunload
+ // we use lower level events here as to avert a crash on activate using the
+ // qtdweb see _qt_beforeUnload
+ emscripten::val window = emscripten::val::global("window");
+ window.call<void>("addEventListener", std::string("beforeunload"),
+ emscripten::val::module_property("mbeforeUnload"));
+}
+
+void QWasmVideoOutput::updateVideoElementGeometry(const QRect &windowGeometry)
+{
+ qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << windowGeometry;
+ QRect m_videoElementSource(windowGeometry.topLeft(), windowGeometry.size());
+
+ emscripten::val style = m_video["style"];
+ style.set("left", QString("%1px").arg(m_videoElementSource.left()).toStdString());
+ style.set("top", QString("%1px").arg(m_videoElementSource.top()).toStdString());
+ style.set("width", QString("%1px").arg(m_videoElementSource.width()).toStdString());
+ style.set("height", QString("%1px").arg(m_videoElementSource.height()).toStdString());
+ style.set("z-index", "999");
+
+ if (!m_hasVideoFrame) {
+ // offscreen
+ m_offscreen.set("width", m_videoElementSource.width());
+ m_offscreen.set("height", m_videoElementSource.height());
+ }
+}
+
+qint64 QWasmVideoOutput::getDuration()
+{
+ // qt duration is in ms
+ // js is sec
+
+ if (m_video.isUndefined() || m_video.isNull())
+ return 0;
+ return m_video["duration"].as<double>() * 1000;
+}
+
+void QWasmVideoOutput::newFrame(const QVideoFrame &frame)
+{
+ m_wasmSink->setVideoFrame(frame);
+}
+
+void QWasmVideoOutput::setPlaybackRate(qreal rate)
+{
+ m_video.set("playbackRate", emscripten::val(rate));
+}
+
+qreal QWasmVideoOutput::playbackRate()
+{
+ return (m_video.isUndefined() || m_video.isNull()) ? 0 : m_video["playbackRate"].as<float>();
+}
+
+void QWasmVideoOutput::checkNetworkState()
+{
+ int netState = m_video["networkState"].as<int>();
+
+ qCDebug(qWasmMediaVideoOutput) << netState;
+
+ switch (netState) {
+ case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkEmpty: // no data
+ break;
+ case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkIdle:
+ break;
+ case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkLoading:
+ break;
+ case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkNoSource: // no source
+ emit errorOccured(netState, QStringLiteral("No media source found"));
+ break;
+ };
+}
+
+void QWasmVideoOutput::videoComputeFrame(void *context)
+{
+ if (m_offscreenContext.isUndefined() || m_offscreenContext.isNull()) {
+ qCDebug(qWasmMediaVideoOutput) << "offscreen canvas context could not be found";
+ return;
+ }
+ emscripten::val document = emscripten::val::global("document");
+
+ emscripten::val videoElement =
+ document.call<emscripten::val>("getElementById", std::string(m_videoSurfaceId));
+
+ if (videoElement.isUndefined() || videoElement.isNull()) {
+ qCDebug(qWasmMediaVideoOutput) << "video element could not be found";
+ return;
+ }
+
+ const int videoWidth = videoElement["videoWidth"].as<int>();
+ const int videoHeight = videoElement["videoHeight"].as<int>();
+
+ if (videoWidth == 0 || videoHeight == 0)
+ return;
+
+ m_offscreenContext.call<void>("drawImage", videoElement, 0, 0, videoWidth, videoHeight);
+
+ emscripten::val frame = // one frame, Uint8ClampedArray
+ m_offscreenContext.call<emscripten::val>("getImageData", 0, 0, videoWidth, videoHeight);
+
+ const QSize frameBytesAllocationSize(videoWidth, videoHeight);
+
+ // this seems to work ok, even though getImageData returns a Uint8ClampedArray
+ QByteArray frameBytes = qstdweb::Uint8Array(frame["data"]).copyToQByteArray();
+
+ QVideoFrameFormat frameFormat =
+ QVideoFrameFormat(frameBytesAllocationSize, QVideoFrameFormat::Format_RGBA8888);
+
+ auto *textureDescription = QVideoTextureHelper::textureDescription(frameFormat.pixelFormat());
+
+ QVideoFrame vFrame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(
+ std::move(frameBytes),
+ textureDescription->strideForWidth(frameFormat.frameWidth())),
+ frameFormat);
+ QWasmVideoOutput *wasmVideoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
+
+ if (!wasmVideoOutput->m_wasmSink) {
+ qWarning() << "ERROR ALERT!! video sink not set";
+ }
+ wasmVideoOutput->m_wasmSink->setVideoFrame(vFrame);
+}
+
+
+void QWasmVideoOutput::videoFrameCallback(emscripten::val now, emscripten::val metadata)
+{
+ Q_UNUSED(now)
+ Q_UNUSED(metadata)
+
+ emscripten::val videoElement =
+ emscripten::val::global("document").
+ call<emscripten::val>("getElementById",
+ std::string(m_videoSurfaceId));
+
+ emscripten::val oneVideoFrame = val::global("VideoFrame").new_(videoElement);
+
+ if (oneVideoFrame.isNull() || oneVideoFrame.isUndefined()) {
+ qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO
+ << "ERROR" << "failed to construct VideoFrame";
+ return;
+ }
+ emscripten::val frameBytesAllocationSize = oneVideoFrame.call<emscripten::val>("allocationSize");
+
+ emscripten::val frameBuffer =
+ emscripten::val::global("Uint8Array").new_(frameBytesAllocationSize);
+ QWasmVideoOutput *wasmVideoOutput =
+ reinterpret_cast<QWasmVideoOutput*>(videoElement["data-qvideocontext"].as<quintptr>());
+
+ qstdweb::PromiseCallbacks copyToCallback;
+ copyToCallback.thenFunc = [wasmVideoOutput, oneVideoFrame, frameBuffer, videoElement]
+ (emscripten::val frameLayout)
+ {
+ if (frameLayout.isNull() || frameLayout.isUndefined()) {
+ qCDebug(qWasmMediaVideoOutput) << "theres no frameLayout";
+ return;
+ }
+
+ // frameBuffer now has a new frame, send to Qt
+ const QSize frameSize(oneVideoFrame["displayWidth"].as<int>(),
+ oneVideoFrame["displayHeight"].as<int>());
+
+
+ QByteArray frameBytes = QByteArray::fromEcmaUint8Array(frameBuffer);
+
+ QVideoFrameFormat::PixelFormat pixelFormat = fromJsPixelFormat(oneVideoFrame["format"].as<std::string>());
+ if (pixelFormat == QVideoFrameFormat::Format_Invalid) {
+ qWarning() << "Invalid pixel format";
+ return;
+ }
+ QVideoFrameFormat frameFormat = QVideoFrameFormat(frameSize, pixelFormat);
+
+ auto *textureDescription = QVideoTextureHelper::textureDescription(frameFormat.pixelFormat());
+
+ auto buffer = std::make_unique<QMemoryVideoBuffer>(
+ std::move(frameBytes),
+ textureDescription->strideForWidth(frameFormat.frameWidth()));
+
+ QVideoFrame vFrame =
+ QVideoFramePrivate::createFrame(std::move(buffer), std::move(frameFormat));
+
+ if (!wasmVideoOutput) {
+ qCDebug(qWasmMediaVideoOutput) << "ERROR:"
+ << "data-qvideocontext not found";
+ return;
+ }
+ if (!wasmVideoOutput->m_wasmSink) {
+ qWarning() << "ERROR ALERT!! video sink not set";
+ return;
+ }
+ wasmVideoOutput->m_wasmSink->setVideoFrame(vFrame);
+ oneVideoFrame.call<emscripten::val>("close");
+ };
+ copyToCallback.catchFunc = [&, wasmVideoOutput, oneVideoFrame, videoElement](emscripten::val error)
+ {
+ qCDebug(qWasmMediaVideoOutput) << "Error"
+ << QString::fromStdString(error["name"].as<std::string>())
+ << QString::fromStdString(error["message"].as<std::string>()) ;
+
+ oneVideoFrame.call<emscripten::val>("close");
+ wasmVideoOutput->stop();
+ return;
+ };
+
+ qstdweb::Promise::make(oneVideoFrame, "copyTo", std::move(copyToCallback), frameBuffer);
+
+ videoElement.call<emscripten::val>("requestVideoFrameCallback",
+ emscripten::val::module_property("qtVideoFrameTimerCallback"));
+
+}
+
+void QWasmVideoOutput::videoFrameTimerCallback()
+{
+ static auto frame = [](double frameTime, void *context) -> int {
+ Q_UNUSED(frameTime);
+ QWasmVideoOutput *videoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
+
+ emscripten::val document = emscripten::val::global("document");
+ emscripten::val videoElement =
+ document.call<emscripten::val>("getElementById", std::string(m_videoSurfaceId));
+
+ if (videoElement["paused"].as<bool>() || videoElement["ended"].as<bool>())
+ return false;
+
+ videoOutput->videoComputeFrame(context);
+
+ return true;
+ };
+
+ emscripten_request_animation_frame_loop(frame, this);
+ // about 60 fps
+}
+
+
+QVideoFrameFormat::PixelFormat QWasmVideoOutput::fromJsPixelFormat(std::string videoFormat)
+{
+ if (videoFormat == "I420")
+ return QVideoFrameFormat::Format_YUV420P;
+ // no equivalent pixel format
+ // else if (videoFormat == "I420A")
+ else if (videoFormat == "I422")
+ return QVideoFrameFormat::Format_YUV422P;
+ // no equivalent pixel format
+ // else if (videoFormat == "I444")
+ else if (videoFormat == "NV12")
+ return QVideoFrameFormat::Format_NV12;
+ else if (videoFormat == "RGBA")
+ return QVideoFrameFormat::Format_RGBA8888;
+ else if (videoFormat == "I420")
+ return QVideoFrameFormat::Format_YUV420P;
+ else if (videoFormat == "RGBX")
+ return QVideoFrameFormat::Format_RGBX8888;
+ else if (videoFormat == "BGRA")
+ return QVideoFrameFormat::Format_BGRA8888;
+ else if (videoFormat == "BGRX")
+ return QVideoFrameFormat::Format_BGRX8888;
+
+ return QVideoFrameFormat::Format_Invalid;
+}
+
+
+emscripten::val QWasmVideoOutput::getDeviceCapabilities()
+{
+ emscripten::val stream = m_video["srcObject"];
+ if (!stream.isUndefined() || !stream["getVideoTracks"].isUndefined()) {
+ emscripten::val tracks = stream.call<emscripten::val>("getVideoTracks");
+ if (!tracks.isUndefined()) {
+ if (tracks["length"].as<int>() == 0)
+ return emscripten::val::undefined();
+
+ emscripten::val track = tracks[0];
+ if (!track.isUndefined()) {
+ emscripten::val trackCaps = emscripten::val::undefined();
+ if (!track["getCapabilities"].isUndefined())
+ trackCaps = track.call<emscripten::val>("getCapabilities");
+ else // firefox does not support getCapabilities
+ trackCaps = track.call<emscripten::val>("getSettings");
+
+ if (!trackCaps.isUndefined())
+ return trackCaps;
+ }
+ }
+ } else {
+ // camera not started track capabilities not available
+ emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("capabilities not available"));
+ }
+
+ return emscripten::val::undefined();
+}
+
+bool QWasmVideoOutput::setDeviceSetting(const std::string &key, emscripten::val value)
+{
+ emscripten::val stream = m_video["srcObject"];
+ if (stream.isNull() || stream.isUndefined()
+ || stream["getVideoTracks"].isUndefined())
+ return false;
+
+ emscripten::val tracks = stream.call<emscripten::val>("getVideoTracks");
+ if (!tracks.isNull() || !tracks.isUndefined()) {
+ if (tracks["length"].as<int>() == 0)
+ return false;
+
+ emscripten::val track = tracks[0];
+ emscripten::val contraint = emscripten::val::object();
+ contraint.set(std::move(key), value);
+ track.call<emscripten::val>("applyConstraints", contraint);
+ return true;
+ }
+
+ return false;
+}
+
+EMSCRIPTEN_BINDINGS(qtwasmvideooutput) {
+ emscripten::function("qtVideoFrameTimerCallback", &QWasmVideoOutput::videoFrameCallback);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qwasmvideooutput_p.cpp"
diff --git a/src/plugins/multimedia/wasm/common/qwasmvideooutput_p.h b/src/plugins/multimedia/wasm/common/qwasmvideooutput_p.h
new file mode 100644
index 000000000..f078ffb44
--- /dev/null
+++ b/src/plugins/multimedia/wasm/common/qwasmvideooutput_p.h
@@ -0,0 +1,153 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#ifndef QWASMVIDEOOUTPUT_H
+#define QWASMVIDEOOUTPUT_H
+
+#include <QObject>
+
+#include <emscripten/val.h>
+#include <QMediaPlayer>
+#include <QVideoFrame>
+
+#include "qwasmmediaplayer_p.h"
+#include <QtCore/qloggingcategory.h>
+
+#include <private/qstdweb_p.h>
+
+QT_BEGIN_NAMESPACE
+
+Q_DECLARE_LOGGING_CATEGORY(qWasmMediaVideoOutput)
+
+class QVideoSink;
+
+class QWasmVideoOutput : public QObject
+{
+ Q_OBJECT
+public:
+ enum WasmVideoMode { VideoOutput, Camera };
+ Q_ENUM(WasmVideoMode)
+
+ explicit QWasmVideoOutput(QObject *parent = nullptr);
+
+ void setVideoSize(const QSize &);
+ void start();
+ void stop();
+ void reset();
+ void pause();
+
+ void setSurface(QVideoSink *surface);
+ emscripten::val surfaceElement();
+
+ bool isReady() const;
+
+ void setSource(const QUrl &url);
+ void setSource(QIODevice *stream);
+ void setVolume(qreal volume);
+ void setMuted(bool muted);
+
+ qint64 getCurrentPosition();
+ void seekTo(qint64 position);
+ bool isVideoSeekable();
+ void setPlaybackRate(qreal rate);
+ qreal playbackRate();
+
+ qint64 getDuration();
+ void newFrame(const QVideoFrame &newFrame);
+
+ void createVideoElement(const std::string &id);
+ void createOffscreenElement(const QSize &offscreenSize);
+ void doElementCallbacks();
+ void updateVideoElementGeometry(const QRect &windowGeometry);
+ void updateVideoElementSource(const QString &src);
+ void addCameraSourceElement(const std::string &id);
+ void removeSourceElement();
+ void setVideoMode(QWasmVideoOutput::WasmVideoMode mode);
+
+ void setHasAudio(bool needsAudio) { m_hasAudio = needsAudio; }
+
+ bool hasCapability(const QString &cap);
+ emscripten::val getDeviceCapabilities();
+ bool setDeviceSetting(const std::string &key, emscripten::val value);
+ bool isCameraReady() { return m_cameraIsReady; }
+ bool m_hasVideoFrame = false;
+
+ static void videoFrameCallback(emscripten::val now, emscripten::val metadata);
+ void videoFrameTimerCallback();
+ // mediacapturesession has the videosink
+ QVideoSink *m_wasmSink = nullptr;
+
+ emscripten::val currentVideoElement() { return m_video; }
+
+Q_SIGNALS:
+ void readyChanged(bool);
+ void bufferingChanged(qint32 percent);
+ void errorOccured(qint32 code, const QString &message);
+ void stateChanged(QWasmMediaPlayer::QWasmMediaPlayerState newState);
+ void progressChanged(qint32 position);
+ void durationChanged(qint64 duration);
+ void statusChanged(QMediaPlayer::MediaStatus status);
+ void sizeChange(qint32 width, qint32 height);
+ void metaDataLoaded();
+
+private:
+ void checkNetworkState();
+ void videoComputeFrame(void *context);
+ void getDeviceSettings();
+
+ static QVideoFrameFormat::PixelFormat fromJsPixelFormat(std::string videoFormat);
+
+ emscripten::val m_video = emscripten::val::undefined();
+ emscripten::val m_videoElementSource = emscripten::val::undefined();
+
+ QString m_source;
+ float m_requestedPosition = 0.0;
+ emscripten::val m_offscreen = emscripten::val::undefined();
+
+ bool m_shouldStop = false;
+ bool m_toBePaused = false;
+ bool m_isSeeking = false;
+ bool m_hasAudio = false;
+ bool m_cameraIsReady = false;
+ bool m_shouldBeStarted = false;
+
+ emscripten::val m_offscreenContext = emscripten::val::undefined();
+ QSize m_pendingVideoSize;
+ QWasmVideoOutput::WasmVideoMode m_currentVideoMode = QWasmVideoOutput::VideoOutput;
+ QMediaPlayer::MediaStatus m_currentMediaStatus;
+ qreal m_currentBufferedValue;
+
+ QScopedPointer<qstdweb::EventCallback> m_timeUpdateEvent;
+ QScopedPointer<qstdweb::EventCallback> m_playEvent;
+ QScopedPointer<qstdweb::EventCallback> m_endedEvent;
+ QScopedPointer<qstdweb::EventCallback> m_durationChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_loadedDataEvent;
+ QScopedPointer<qstdweb::EventCallback> m_errorChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_resizeChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_loadedMetadataChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_loadStartChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_canPlayChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_canPlayThroughChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_seekingChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_seekedChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_emptiedChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_stalledChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_waitingChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_playingChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_progressChangeEvent;
+ QScopedPointer<qstdweb::EventCallback> m_pauseChangeEvent;
+};
+
+QT_END_NAMESPACE
+#endif // QWASMVIDEOOUTPUT_H
diff --git a/src/plugins/multimedia/wasm/mediacapture/qwasmcamera.cpp b/src/plugins/multimedia/wasm/mediacapture/qwasmcamera.cpp
new file mode 100644
index 000000000..fbc5cf262
--- /dev/null
+++ b/src/plugins/multimedia/wasm/mediacapture/qwasmcamera.cpp
@@ -0,0 +1,478 @@
+// Copyright (C) 2022 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwasmcamera_p.h"
+#include "qmediadevices.h"
+#include <qcameradevice.h>
+#include "private/qplatformvideosink_p.h"
+#include <private/qmemoryvideobuffer_p.h>
+#include <private/qvideotexturehelper_p.h>
+#include <private/qwasmmediadevices_p.h>
+
+#include "qwasmmediacapturesession_p.h"
+#include <common/qwasmvideooutput_p.h>
+
+#include <emscripten/val.h>
+#include <emscripten/bind.h>
+#include <emscripten/html5.h>
+#include <QUuid>
+#include <QTimer>
+
+#include <private/qstdweb_p.h>
+
+Q_LOGGING_CATEGORY(qWasmCamera, "qt.multimedia.wasm.camera")
+
+QWasmCamera::QWasmCamera(QCamera *camera)
+ : QPlatformCamera(camera),
+ m_cameraOutput(new QWasmVideoOutput),
+ m_cameraIsReady(false)
+{
+ QWasmMediaDevices *wasmMediaDevices =
+ static_cast<QWasmMediaDevices *>(QPlatformMediaIntegration::instance()->mediaDevices());
+
+ connect(wasmMediaDevices, &QWasmMediaDevices::videoInputsChanged,this, [this]() {
+ const QList<QCameraDevice> cameras = QMediaDevices::videoInputs();
+
+ if (!cameras.isEmpty()) {
+ if (m_cameraDev.id().isEmpty())
+ setCamera(cameras.at(0)); // default camera
+ else
+ setCamera(m_cameraDev);
+ return;
+ }
+ });
+
+ connect(this, &QWasmCamera::cameraIsReady, this, [this]() {
+ m_cameraIsReady = true;
+ if (m_cameraShouldStartActive) {
+ QTimer::singleShot(50, this, [this]() {
+ setActive(true);
+ });
+ }
+ });
+}
+
+QWasmCamera::~QWasmCamera() = default;
+
+bool QWasmCamera::isActive() const
+{
+ return m_cameraActive;
+}
+
+void QWasmCamera::setActive(bool active)
+{
+
+ if (!m_CaptureSession) {
+ updateError(QCamera::CameraError, QStringLiteral("video surface error"));
+ m_shouldBeActive = true;
+ return;
+ }
+
+ if (!m_cameraIsReady) {
+ m_cameraShouldStartActive = true;
+ return;
+ }
+
+ QVideoSink *sink = m_CaptureSession->videoSink();
+ if (!sink) {
+ qWarning() << Q_FUNC_INFO << "sink not ready";
+ return;
+ }
+
+ m_cameraOutput->setSurface(m_CaptureSession->videoSink());
+ m_cameraActive = active;
+ m_shouldBeActive = false;
+
+ if (m_cameraActive)
+ m_cameraOutput->start();
+ else
+ m_cameraOutput->pause();
+
+ updateCameraFeatures();
+ emit activeChanged(active);
+}
+
+void QWasmCamera::setCamera(const QCameraDevice &camera)
+{
+ if (!m_cameraDev.id().isEmpty())
+ return;
+
+ m_cameraOutput->setVideoMode(QWasmVideoOutput::Camera);
+
+ constexpr QSize initialSize(0, 0);
+ constexpr QRect initialRect(QPoint(0, 0), initialSize);
+ m_cameraOutput->createVideoElement(camera.id().toStdString()); // videoElementId
+ m_cameraOutput->createOffscreenElement(initialSize);
+ m_cameraOutput->updateVideoElementGeometry(initialRect);
+
+ const auto cameras = QMediaDevices::videoInputs();
+
+ if (std::find(cameras.begin(), cameras.end(), camera) != cameras.end()) {
+ m_cameraDev = camera;
+ createCamera(m_cameraDev);
+ emit cameraIsReady();
+ return;
+ }
+
+ if (cameras.count() > 0) {
+ m_cameraDev = camera;
+ createCamera(m_cameraDev);
+ emit cameraIsReady();
+ } else {
+ updateError(QCamera::CameraError, QStringLiteral("Failed to find a camera"));
+ }
+}
+
+bool QWasmCamera::setCameraFormat(const QCameraFormat &format)
+{
+ m_cameraFormat = format;
+
+ return true;
+}
+
+void QWasmCamera::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ QWasmMediaCaptureSession *captureSession = static_cast<QWasmMediaCaptureSession *>(session);
+ if (m_CaptureSession == captureSession)
+ return;
+
+ m_CaptureSession = captureSession;
+
+ if (m_shouldBeActive)
+ setActive(true);
+}
+
+void QWasmCamera::setFocusMode(QCamera::FocusMode mode)
+{
+ if (!isFocusModeSupported(mode))
+ return;
+
+ static constexpr std::string_view focusModeString = "focusMode";
+ if (mode == QCamera::FocusModeManual)
+ m_cameraOutput->setDeviceSetting(focusModeString.data(), emscripten::val("manual"));
+ if (mode == QCamera::FocusModeAuto)
+ m_cameraOutput->setDeviceSetting(focusModeString.data(), emscripten::val("continuous"));
+ focusModeChanged(mode);
+}
+
+bool QWasmCamera::isFocusModeSupported(QCamera::FocusMode mode) const
+{
+ emscripten::val caps = m_cameraOutput->getDeviceCapabilities();
+ if (caps.isUndefined())
+ return false;
+
+ emscripten::val focusMode = caps["focusMode"];
+ if (focusMode.isUndefined())
+ return false;
+
+ std::vector<std::string> focalModes;
+
+ for (int i = 0; i < focusMode["length"].as<int>(); i++)
+ focalModes.push_back(focusMode[i].as<std::string>());
+
+ // Do we need to take into account focusDistance
+ // it is not always available, and what distance
+ // would be far/near
+
+ bool found = false;
+ switch (mode) {
+ case QCamera::FocusModeAuto:
+ return std::find(focalModes.begin(), focalModes.end(), "continuous") != focalModes.end()
+ || std::find(focalModes.begin(), focalModes.end(), "single-shot")
+ != focalModes.end();
+ case QCamera::FocusModeAutoNear:
+ case QCamera::FocusModeAutoFar:
+ case QCamera::FocusModeHyperfocal:
+ case QCamera::FocusModeInfinity:
+ break;
+ case QCamera::FocusModeManual:
+ found = std::find(focalModes.begin(), focalModes.end(), "manual") != focalModes.end();
+ };
+ return found;
+}
+
+void QWasmCamera::setTorchMode(QCamera::TorchMode mode)
+{
+ if (!isTorchModeSupported(mode))
+ return;
+
+ if (m_wasmTorchMode == mode)
+ return;
+
+ static constexpr std::string_view torchModeString = "torchMode";
+ bool hasChanged = false;
+ switch (mode) {
+ case QCamera::TorchOff:
+ m_cameraOutput->setDeviceSetting(torchModeString.data(), emscripten::val(false));
+ hasChanged = true;
+ break;
+ case QCamera::TorchOn:
+ m_cameraOutput->setDeviceSetting(torchModeString.data(), emscripten::val(true));
+ hasChanged = true;
+ break;
+ case QCamera::TorchAuto:
+ break;
+ };
+ m_wasmTorchMode = mode;
+ if (hasChanged)
+ torchModeChanged(m_wasmTorchMode);
+}
+
+bool QWasmCamera::isTorchModeSupported(QCamera::TorchMode mode) const
+{
+ if (!m_cameraIsReady)
+ return false;
+
+ emscripten::val caps = m_cameraOutput->getDeviceCapabilities();
+ if (caps.isUndefined())
+ return false;
+
+ emscripten::val exposureMode = caps["torch"];
+ if (exposureMode.isUndefined())
+ return false;
+
+ return (mode != QCamera::TorchAuto);
+}
+
+void QWasmCamera::setExposureMode(QCamera::ExposureMode mode)
+{
+ // TODO manually come up with exposureTime values ?
+ if (!isExposureModeSupported(mode))
+ return;
+
+ if (m_wasmExposureMode == mode)
+ return;
+
+ bool hasChanged = false;
+ static constexpr std::string_view exposureModeString = "exposureMode";
+ switch (mode) {
+ case QCamera::ExposureManual:
+ m_cameraOutput->setDeviceSetting(exposureModeString.data(), emscripten::val("manual"));
+ hasChanged = true;
+ break;
+ case QCamera::ExposureAuto:
+ m_cameraOutput->setDeviceSetting(exposureModeString.data(), emscripten::val("continuous"));
+ hasChanged = true;
+ break;
+ default:
+ break;
+ };
+
+ if (hasChanged) {
+ m_wasmExposureMode = mode;
+ exposureModeChanged(m_wasmExposureMode);
+ }
+}
+
+bool QWasmCamera::isExposureModeSupported(QCamera::ExposureMode mode) const
+{
+ if (!m_cameraIsReady)
+ return false;
+
+ emscripten::val caps = m_cameraOutput->getDeviceCapabilities();
+ if (caps.isUndefined())
+ return false;
+
+ emscripten::val exposureMode = caps["exposureMode"];
+ if (exposureMode.isUndefined())
+ return false;
+
+ std::vector<std::string> exposureModes;
+
+ for (int i = 0; i < exposureMode["length"].as<int>(); i++)
+ exposureModes.push_back(exposureMode[i].as<std::string>());
+
+ bool found = false;
+ switch (mode) {
+ case QCamera::ExposureAuto:
+ found = std::find(exposureModes.begin(), exposureModes.end(), "continuous")
+ != exposureModes.end();
+ break;
+ case QCamera::ExposureManual:
+ found = std::find(exposureModes.begin(), exposureModes.end(), "manual")
+ != exposureModes.end();
+ break;
+ default:
+ break;
+ };
+
+ return found;
+}
+
+void QWasmCamera::setExposureCompensation(float bias)
+{
+ if (!m_cameraIsReady)
+ return;
+
+ emscripten::val caps = m_cameraOutput->getDeviceCapabilities();
+ if (caps.isUndefined())
+ return;
+
+ emscripten::val exposureComp = caps["exposureCompensation"];
+ if (exposureComp.isUndefined())
+ return;
+ if (m_wasmExposureCompensation == bias)
+ return;
+
+ static constexpr std::string_view exposureCompensationModeString = "exposureCompensation";
+ m_cameraOutput->setDeviceSetting(exposureCompensationModeString.data(), emscripten::val(bias));
+ m_wasmExposureCompensation = bias;
+ emit exposureCompensationChanged(m_wasmExposureCompensation);
+}
+
+void QWasmCamera::setManualExposureTime(float secs)
+{
+ if (m_wasmExposureTime == secs)
+ return;
+
+ if (!m_cameraIsReady)
+ return;
+
+ emscripten::val caps = m_cameraOutput->getDeviceCapabilities();
+ emscripten::val exposureTime = caps["exposureTime"];
+ if (exposureTime.isUndefined())
+ return;
+ static constexpr std::string_view exposureTimeString = "exposureTime";
+ m_cameraOutput->setDeviceSetting(exposureTimeString.data(), emscripten::val(secs));
+ m_wasmExposureTime = secs;
+ emit exposureTimeChanged(m_wasmExposureTime);
+}
+
+int QWasmCamera::isoSensitivity() const
+{
+ if (!m_cameraIsReady)
+ return 0;
+
+ emscripten::val caps = m_cameraOutput->getDeviceCapabilities();
+ if (caps.isUndefined())
+ return false;
+
+ emscripten::val isoSpeed = caps["iso"];
+ if (isoSpeed.isUndefined())
+ return 0;
+
+ return isoSpeed.as<double>();
+}
+
+void QWasmCamera::setManualIsoSensitivity(int sens)
+{
+ if (!m_cameraIsReady)
+ return;
+
+ emscripten::val caps = m_cameraOutput->getDeviceCapabilities();
+ if (caps.isUndefined())
+ return;
+
+ emscripten::val isoSpeed = caps["iso"];
+ if (isoSpeed.isUndefined())
+ return;
+ if (m_wasmIsoSensitivity == sens)
+ return;
+ static constexpr std::string_view isoString = "iso";
+ m_cameraOutput->setDeviceSetting(isoString.data(), emscripten::val(sens));
+ m_wasmIsoSensitivity = sens;
+ emit isoSensitivityChanged(m_wasmIsoSensitivity);
+}
+
+bool QWasmCamera::isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const
+{
+ if (!m_cameraIsReady)
+ return false;
+
+ emscripten::val caps = m_cameraOutput->getDeviceCapabilities();
+ if (caps.isUndefined())
+ return false;
+
+ emscripten::val whiteBalanceMode = caps["whiteBalanceMode"];
+ if (whiteBalanceMode.isUndefined())
+ return false;
+
+ if (mode == QCamera::WhiteBalanceAuto || mode == QCamera::WhiteBalanceManual)
+ return true;
+
+ return false;
+}
+
+void QWasmCamera::setWhiteBalanceMode(QCamera::WhiteBalanceMode mode)
+{
+ if (!isWhiteBalanceModeSupported(mode))
+ return;
+
+ if (m_wasmWhiteBalanceMode == mode)
+ return;
+
+ bool hasChanged = false;
+ static constexpr std::string_view whiteBalanceModeString = "whiteBalanceMode";
+ switch (mode) {
+ case QCamera::WhiteBalanceAuto:
+ m_cameraOutput->setDeviceSetting(whiteBalanceModeString.data(), emscripten::val("auto"));
+ hasChanged = true;
+ break;
+ case QCamera::WhiteBalanceManual:
+ m_cameraOutput->setDeviceSetting(whiteBalanceModeString.data(), emscripten::val("manual"));
+ hasChanged = true;
+ break;
+ default:
+ break;
+ };
+
+ if (hasChanged) {
+ m_wasmWhiteBalanceMode = mode;
+ emit whiteBalanceModeChanged(m_wasmWhiteBalanceMode);
+ }
+}
+
+void QWasmCamera::setColorTemperature(int temperature)
+{
+ if (!m_cameraIsReady)
+ return;
+
+ emscripten::val caps = m_cameraOutput->getDeviceCapabilities();
+ if (caps.isUndefined())
+ return;
+
+ emscripten::val whiteBalanceMode = caps["colorTemperature"];
+ if (whiteBalanceMode.isUndefined())
+ return;
+ if(m_wasmColorTemperature == temperature)
+ return;
+
+ static constexpr std::string_view colorBalanceString = "colorTemperature";
+ m_cameraOutput->setDeviceSetting(colorBalanceString.data(), emscripten::val(temperature));
+ m_wasmColorTemperature = temperature;
+ colorTemperatureChanged(m_wasmColorTemperature);
+}
+
+void QWasmCamera::createCamera(const QCameraDevice &camera)
+{
+ m_cameraOutput->addCameraSourceElement(camera.id().toStdString());
+}
+
+void QWasmCamera::updateCameraFeatures()
+{
+ if (!m_cameraIsReady)
+ return;
+
+ emscripten::val caps = m_cameraOutput->getDeviceCapabilities();
+ if (caps.isUndefined())
+ return;
+
+ QCamera::Features cameraFeatures;
+
+ if (!caps["colorTemperature"].isUndefined())
+ cameraFeatures |= QCamera::Feature::ColorTemperature;
+
+ if (!caps["exposureCompensation"].isUndefined())
+ cameraFeatures |= QCamera::Feature::ExposureCompensation;
+
+ if (!caps["iso"].isUndefined())
+ cameraFeatures |= QCamera::Feature::IsoSensitivity;
+
+ if (!caps["exposureTime"].isUndefined())
+ cameraFeatures |= QCamera::Feature::ManualExposureTime;
+
+ if (!caps["focusDistance"].isUndefined())
+ cameraFeatures |= QCamera::Feature::FocusDistance;
+
+ supportedFeaturesChanged(cameraFeatures);
+}
diff --git a/src/plugins/multimedia/wasm/mediacapture/qwasmcamera_p.h b/src/plugins/multimedia/wasm/mediacapture/qwasmcamera_p.h
new file mode 100644
index 000000000..7bb6d02d7
--- /dev/null
+++ b/src/plugins/multimedia/wasm/mediacapture/qwasmcamera_p.h
@@ -0,0 +1,99 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWASMCAMERA_H
+#define QWASMCAMERA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformcamera_p.h>
+#include <private/qplatformvideodevices_p.h>
+#include <common/qwasmvideooutput_p.h>
+
+#include <QCameraDevice>
+#include <QtCore/qloggingcategory.h>
+
+#include <emscripten/val.h>
+#include <emscripten/bind.h>
+
+QT_BEGIN_NAMESPACE
+
+Q_DECLARE_LOGGING_CATEGORY(qWasmCamera)
+
+class QWasmMediaCaptureSession;
+
+class QWasmCamera : public QPlatformCamera
+{
+ Q_OBJECT
+
+public:
+ explicit QWasmCamera(QCamera *camera);
+ ~QWasmCamera();
+
+ bool isActive() const override;
+ void setActive(bool active) override;
+
+ void setCamera(const QCameraDevice &camera) override;
+ bool setCameraFormat(const QCameraFormat &format) override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session) override;
+
+ void setFocusMode(QCamera::FocusMode mode) override;
+ bool isFocusModeSupported(QCamera::FocusMode mode) const override;
+
+ void setTorchMode(QCamera::TorchMode mode) override;
+ bool isTorchModeSupported(QCamera::TorchMode mode) const override;
+
+ void setExposureMode(QCamera::ExposureMode mode) override;
+ bool isExposureModeSupported(QCamera::ExposureMode mode) const override;
+ void setExposureCompensation(float bias) override;
+
+ void setManualExposureTime(float) override;
+ int isoSensitivity() const override;
+ void setManualIsoSensitivity(int) override;
+
+ bool isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const override;
+ void setWhiteBalanceMode(QCamera::WhiteBalanceMode mode) override;
+
+ void setColorTemperature(int temperature) override;
+
+ QWasmVideoOutput *cameraOutput() { return m_cameraOutput.data(); }
+Q_SIGNALS:
+ void cameraIsReady();
+private:
+ void createCamera(const QCameraDevice &camera);
+ void updateCameraFeatures();
+
+ QCameraDevice m_cameraDev;
+ QWasmMediaCaptureSession *m_CaptureSession = nullptr;
+ bool m_cameraActive = false;
+ QScopedPointer <QWasmVideoOutput> m_cameraOutput;
+
+ emscripten::val supportedCapabilities = emscripten::val::object(); // browser
+ emscripten::val currentCapabilities = emscripten::val::object(); // camera track
+ emscripten::val currentSettings = emscripten::val::object(); // camera track
+
+ QCamera::TorchMode m_wasmTorchMode;
+ QCamera::ExposureMode m_wasmExposureMode;
+ float m_wasmExposureTime;
+ float m_wasmExposureCompensation;
+ int m_wasmIsoSensitivity;
+ QCamera::WhiteBalanceMode m_wasmWhiteBalanceMode;
+ int m_wasmColorTemperature;
+ bool m_cameraIsReady = false;
+ bool m_cameraShouldStartActive = false;
+ bool m_shouldBeActive = false;
+};
+
+QT_END_NAMESPACE
+
+#endif // QWASMCAMERA_H
diff --git a/src/plugins/multimedia/wasm/mediacapture/qwasmimagecapture.cpp b/src/plugins/multimedia/wasm/mediacapture/qwasmimagecapture.cpp
new file mode 100644
index 000000000..f62d6f1a6
--- /dev/null
+++ b/src/plugins/multimedia/wasm/mediacapture/qwasmimagecapture.cpp
@@ -0,0 +1,130 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwasmimagecapture_p.h"
+#include <qimagewriter.h>
+#include "qwasmmediacapturesession_p.h"
+#include "qwasmcamera_p.h"
+#include "qwasmvideosink_p.h"
+
+QT_BEGIN_NAMESPACE
+
+Q_LOGGING_CATEGORY(qWasmImageCapture, "qt.multimedia.wasm.imagecapture")
+/* TODO
+signals:
+imageExposed
+*/
+QWasmImageCapture::QWasmImageCapture(QImageCapture *parent) : QPlatformImageCapture(parent) { }
+
+QWasmImageCapture::~QWasmImageCapture() = default;
+
+int QWasmImageCapture::capture(const QString &fileName)
+{
+ if (!isReadyForCapture()) {
+ emit error(m_lastId, QImageCapture::NotReadyError, msgCameraNotReady());
+ return -1;
+ }
+
+ // TODO if fileName.isEmpty() we choose filename and location
+
+ QImage image = takePicture();
+ if (image.isNull())
+ return -1;
+
+ QImageWriter writer(fileName);
+ // TODO
+ // writer.setQuality(quality);
+ // writer.setFormat("png");
+
+ if (writer.write(image)) {
+ qCDebug(qWasmImageCapture) << Q_FUNC_INFO << "image saved";
+ emit imageSaved(m_lastId, fileName);
+ } else {
+ QImageCapture::Error err = (writer.error() == QImageWriter::UnsupportedFormatError)
+ ? QImageCapture::FormatError
+ : QImageCapture::ResourceError;
+
+ emit error(m_lastId, err, writer.errorString());
+ }
+
+ return m_lastId;
+}
+
+int QWasmImageCapture::captureToBuffer()
+{
+ if (!isReadyForCapture()) {
+ emit error(m_lastId, QImageCapture::NotReadyError, msgCameraNotReady());
+ return -1;
+ }
+
+ QImage image = takePicture();
+ if (image.isNull())
+ return -1;
+
+ emit imageCaptured(m_lastId, image);
+ return m_lastId;
+}
+
+QImage QWasmImageCapture::takePicture()
+{
+ QVideoFrame thisFrame = m_captureSession->videoSink()->videoFrame();
+ if (!thisFrame.isValid())
+ return QImage();
+
+ m_lastId++;
+ emit imageAvailable(m_lastId, thisFrame);
+
+ QImage image = thisFrame.toImage();
+ if (image.isNull()) {
+ qCDebug(qWasmImageCapture) << Q_FUNC_INFO << "image is null";
+ emit error(m_lastId, QImageCapture::ResourceError, QStringLiteral("Resource error"));
+ return QImage();
+ }
+
+ emit imageCaptured(m_lastId, image);
+ if (m_settings.resolution().isValid() && m_settings.resolution() != image.size())
+ image = image.scaled(m_settings.resolution());
+
+ return image;
+}
+
+bool QWasmImageCapture::isReadyForCapture() const
+{
+ return m_isReadyForCapture;
+}
+
+QImageEncoderSettings QWasmImageCapture::imageSettings() const
+{
+ return m_settings;
+}
+
+void QWasmImageCapture::setImageSettings(const QImageEncoderSettings &settings)
+{
+ m_settings = settings;
+}
+
+void QWasmImageCapture::setReadyForCapture(bool isReady)
+{
+ if (m_isReadyForCapture != isReady) {
+ m_isReadyForCapture = isReady;
+ emit readyForCaptureChanged(m_isReadyForCapture);
+ }
+}
+
+void QWasmImageCapture::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ QWasmMediaCaptureSession *captureSession = static_cast<QWasmMediaCaptureSession *>(session);
+ // nullptr clears
+ if (m_captureSession == captureSession)
+ return;
+
+ m_isReadyForCapture = captureSession;
+ if (captureSession) {
+ m_lastId = 0;
+ m_captureSession = captureSession;
+ }
+ emit readyForCaptureChanged(m_isReadyForCapture);
+ m_captureSession = captureSession;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/wasm/mediacapture/qwasmimagecapture_p.h b/src/plugins/multimedia/wasm/mediacapture/qwasmimagecapture_p.h
new file mode 100644
index 000000000..2e9e9b227
--- /dev/null
+++ b/src/plugins/multimedia/wasm/mediacapture/qwasmimagecapture_p.h
@@ -0,0 +1,58 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWASMIMAGECAPTURE_H
+#define QWASMIMAGECAPTURE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QObject>
+#include <private/qplatformimagecapture_p.h>
+#include <QtCore/qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+Q_DECLARE_LOGGING_CATEGORY(qWasmImageCapture)
+
+class QWasmMediaCaptureSession;
+
+class QWasmImageCapture : public QPlatformImageCapture
+{
+ Q_OBJECT
+public:
+ explicit QWasmImageCapture(QImageCapture *parent = nullptr);
+ ~QWasmImageCapture();
+
+ bool isReadyForCapture() const override;
+
+ int capture(const QString &fileName) override;
+ int captureToBuffer() override;
+
+ QImageEncoderSettings imageSettings() const override;
+ void setImageSettings(const QImageEncoderSettings &settings) override;
+
+ void setReadyForCapture(bool isReady);
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+private:
+ QImage takePicture();
+
+ // weak
+ QWasmMediaCaptureSession *m_captureSession = nullptr;
+ QImageEncoderSettings m_settings;
+ bool m_isReadyForCapture = false;
+ int m_lastId = 0;
+};
+
+QT_END_NAMESPACE
+#endif // QWASMIMAGECAPTURE_H
diff --git a/src/plugins/multimedia/wasm/mediacapture/qwasmmediacapturesession.cpp b/src/plugins/multimedia/wasm/mediacapture/qwasmmediacapturesession.cpp
new file mode 100644
index 000000000..826650570
--- /dev/null
+++ b/src/plugins/multimedia/wasm/mediacapture/qwasmmediacapturesession.cpp
@@ -0,0 +1,111 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwasmmediacapturesession_p.h"
+#include "mediacapture/qwasmimagecapture_p.h"
+
+#include "qwasmcamera_p.h"
+#include <private/qplatformmediadevices_p.h>
+#include <private/qplatformmediaintegration_p.h>
+#include <private/qwasmmediadevices_p.h>
+
+Q_LOGGING_CATEGORY(qWasmMediaCaptureSession, "qt.multimedia.wasm.capturesession")
+
+QWasmMediaCaptureSession::QWasmMediaCaptureSession()
+{
+ QWasmMediaDevices *wasmMediaDevices = static_cast<QWasmMediaDevices *>(QPlatformMediaIntegration::instance()->mediaDevices());
+ wasmMediaDevices->initDevices();
+}
+
+QWasmMediaCaptureSession::~QWasmMediaCaptureSession() = default;
+
+QPlatformCamera *QWasmMediaCaptureSession::camera()
+{
+ return m_camera.data();
+}
+
+void QWasmMediaCaptureSession::setCamera(QPlatformCamera *camera)
+{
+ if (!camera) {
+ if (m_camera == nullptr)
+ return;
+ m_camera.reset(nullptr);
+ } else {
+ QWasmCamera *wasmCamera = static_cast<QWasmCamera *>(camera);
+ if (m_camera.data() == wasmCamera)
+ return;
+ m_camera.reset(wasmCamera);
+ m_camera->setCaptureSession(this);
+ }
+ emit cameraChanged();
+}
+
+QPlatformImageCapture *QWasmMediaCaptureSession::imageCapture()
+{
+ return m_imageCapture;
+}
+
+void QWasmMediaCaptureSession::setImageCapture(QPlatformImageCapture *imageCapture)
+{
+ if (m_imageCapture == imageCapture)
+ return;
+
+ if (m_imageCapture)
+ m_imageCapture->setCaptureSession(nullptr);
+
+ m_imageCapture = static_cast<QWasmImageCapture *>(imageCapture);
+
+ if (m_imageCapture) {
+ m_imageCapture->setCaptureSession(this);
+
+ m_imageCapture->setReadyForCapture(true);
+ emit imageCaptureChanged();
+ }
+}
+
+QPlatformMediaRecorder *QWasmMediaCaptureSession::mediaRecorder()
+{
+ return m_mediaRecorder;
+}
+
+void QWasmMediaCaptureSession::setMediaRecorder(QPlatformMediaRecorder *mediaRecorder)
+{
+ if (m_mediaRecorder == mediaRecorder)
+ return;
+
+ if (m_mediaRecorder)
+ m_mediaRecorder->setCaptureSession(nullptr);
+
+ m_mediaRecorder = static_cast<QWasmMediaRecorder *>(mediaRecorder);
+
+ if (m_mediaRecorder)
+ m_mediaRecorder->setCaptureSession(this);
+}
+
+void QWasmMediaCaptureSession::setAudioInput(QPlatformAudioInput *input)
+{
+ if (m_audioInput == input)
+ return;
+
+ m_needsAudio = (bool)input;
+ m_audioInput = input;
+}
+
+bool QWasmMediaCaptureSession::hasAudio()
+{
+ return m_needsAudio;
+}
+
+void QWasmMediaCaptureSession::setVideoPreview(QVideoSink *sink)
+{
+ if (m_wasmSink == sink)
+ return;
+ m_wasmSink = sink;
+}
+
+void QWasmMediaCaptureSession::setAudioOutput(QPlatformAudioOutput *output)
+{
+ if (m_audioOutput == output)
+ return;
+ m_audioOutput = output;
+}
diff --git a/src/plugins/multimedia/wasm/mediacapture/qwasmmediacapturesession_p.h b/src/plugins/multimedia/wasm/mediacapture/qwasmmediacapturesession_p.h
new file mode 100644
index 000000000..817580c90
--- /dev/null
+++ b/src/plugins/multimedia/wasm/mediacapture/qwasmmediacapturesession_p.h
@@ -0,0 +1,71 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWASMMEDIACAPTURESESSION_H
+#define QWASMMEDIACAPTURESESSION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qwasmimagecapture_p.h"
+
+#include <private/qplatformmediacapture_p.h>
+#include <private/qplatformmediaintegration_p.h>
+#include "qwasmmediarecorder_p.h"
+#include <QScopedPointer>
+#include <QtCore/qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+Q_DECLARE_LOGGING_CATEGORY(qWasmMediaCaptureSession)
+
+class QAudioInput;
+class QWasmCamera;
+
+class QWasmMediaCaptureSession : public QPlatformMediaCaptureSession
+{
+public:
+ explicit QWasmMediaCaptureSession();
+ ~QWasmMediaCaptureSession() override;
+
+ QPlatformCamera *camera() override;
+ void setCamera(QPlatformCamera *camera) override;
+
+ QPlatformImageCapture *imageCapture() override;
+ void setImageCapture(QPlatformImageCapture *imageCapture) override;
+
+ QPlatformMediaRecorder *mediaRecorder() override;
+ void setMediaRecorder(QPlatformMediaRecorder *recorder) override;
+
+ void setAudioInput(QPlatformAudioInput *input) override;
+ QPlatformAudioInput * audioInput() const { return m_audioInput; }
+ void setVideoPreview(QVideoSink *sink) override;
+ void setAudioOutput(QPlatformAudioOutput *output) override;
+
+ bool hasAudio();
+ QVideoSink *videoSink() { return m_wasmSink; }
+
+private:
+ QWasmMediaRecorder *m_mediaRecorder = nullptr;
+
+ QScopedPointer <QWasmCamera> m_camera;
+
+ QWasmImageCapture *m_imageCapture = nullptr;
+
+ QPlatformAudioInput *m_audioInput = nullptr;
+ QPlatformAudioOutput *m_audioOutput = nullptr;
+ bool m_needsAudio = false;
+ QVideoSink *m_wasmSink = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QWASMMEDIACAPTURESESSION_H
diff --git a/src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder.cpp b/src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder.cpp
new file mode 100644
index 000000000..98f04616a
--- /dev/null
+++ b/src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder.cpp
@@ -0,0 +1,520 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwasmmediarecorder_p.h"
+#include "qwasmmediacapturesession_p.h"
+#include <private/qplatformmediadevices_p.h>
+#include <private/qplatformmediaintegration_p.h>
+#include "qwasmcamera_p.h"
+#include "qwasmaudioinput_p.h"
+
+#include <private/qstdweb_p.h>
+#include <QtCore/QIODevice>
+#include <QFile>
+#include <QTimer>
+#include <QDebug>
+
+QT_BEGIN_NAMESPACE
+
+Q_LOGGING_CATEGORY(qWasmMediaRecorder, "qt.multimedia.wasm.mediarecorder")
+
+QWasmMediaRecorder::QWasmMediaRecorder(QMediaRecorder *parent)
+ : QPlatformMediaRecorder(parent)
+{
+ m_durationTimer.reset(new QElapsedTimer());
+ QPlatformMediaIntegration::instance()->mediaDevices(); // initialize getUserMedia
+}
+
+QWasmMediaRecorder::~QWasmMediaRecorder()
+{
+ if (m_outputTarget->isOpen())
+ m_outputTarget->close();
+
+ if (!m_mediaRecorder.isNull()) {
+ m_mediaStreamDataAvailable.reset(nullptr);
+ m_mediaStreamStopped.reset(nullptr);
+ m_mediaStreamError.reset(nullptr);
+ m_mediaStreamStart.reset(nullptr);
+ }
+}
+
+bool QWasmMediaRecorder::isLocationWritable(const QUrl &location) const
+{
+ return location.isValid() && (location.isLocalFile() || location.isRelative());
+}
+
+QMediaRecorder::RecorderState QWasmMediaRecorder::state() const
+{
+ QMediaRecorder::RecorderState recordingState = QMediaRecorder::StoppedState;
+
+ if (!m_mediaRecorder.isUndefined()) {
+ std::string state = m_mediaRecorder["state"].as<std::string>();
+ if (state == "recording")
+ recordingState = QMediaRecorder::RecordingState;
+ else if (state == "paused")
+ recordingState = QMediaRecorder::PausedState;
+ }
+ return recordingState;
+}
+
+qint64 QWasmMediaRecorder::duration() const
+{ // milliseconds
+ return m_durationMs;
+}
+
+void QWasmMediaRecorder::record(QMediaEncoderSettings &settings)
+{
+ if (!m_session)
+ return;
+
+ m_mediaSettings = settings;
+ initUserMedia();
+}
+
+void QWasmMediaRecorder::pause()
+{
+ if (!m_session || (m_mediaRecorder.isUndefined() || m_mediaRecorder.isNull())) {
+ qCDebug(qWasmMediaRecorder) << Q_FUNC_INFO << "could not find MediaRecorder";
+ return;
+ }
+ m_mediaRecorder.call<void>("pause");
+ emit stateChanged(state());
+}
+
+void QWasmMediaRecorder::resume()
+{
+ if (!m_session || (m_mediaRecorder.isUndefined() || m_mediaRecorder.isNull())) {
+ qCDebug(qWasmMediaRecorder)<< Q_FUNC_INFO << "could not find MediaRecorder";
+ return;
+ }
+
+ m_mediaRecorder.call<void>("resume");
+ emit stateChanged(state());
+}
+
+void QWasmMediaRecorder::stop()
+{
+ if (!m_session || (m_mediaRecorder.isUndefined() || m_mediaRecorder.isNull())) {
+ qCDebug(qWasmMediaRecorder)<< Q_FUNC_INFO << "could not find MediaRecorder";
+ return;
+ }
+ if (m_mediaRecorder["state"].as<std::string>() == "recording")
+ m_mediaRecorder.call<void>("stop");
+}
+
+void QWasmMediaRecorder::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ m_session = static_cast<QWasmMediaCaptureSession *>(session);
+}
+
+bool QWasmMediaRecorder::hasCamera() const
+{
+ return m_session && m_session->camera();
+}
+
+void QWasmMediaRecorder::initUserMedia()
+{
+ setUpFileSink();
+ emscripten::val navigator = emscripten::val::global("navigator");
+ emscripten::val mediaDevices = navigator["mediaDevices"];
+
+ if (mediaDevices.isNull() || mediaDevices.isUndefined()) {
+ qCDebug(qWasmMediaRecorder) << "MediaDevices are undefined or null";
+ return;
+ }
+
+ if (!m_session)
+ return;
+ qCDebug(qWasmMediaRecorder) << Q_FUNC_INFO << m_session;
+
+ emscripten::val stream = emscripten::val::undefined();
+ if (hasCamera()) {
+ qCDebug(qWasmMediaRecorder) << Q_FUNC_INFO << "has camera";
+ QWasmCamera *wasmCamera = reinterpret_cast<QWasmCamera *>(m_session->camera());
+
+ if (wasmCamera) {
+ emscripten::val m_video = wasmCamera->cameraOutput()->surfaceElement();
+ if (m_video.isNull() || m_video.isUndefined()) {
+ qCDebug(qWasmMediaRecorder) << Q_FUNC_INFO << "video element not found";
+ return;
+ }
+
+ stream = m_video["srcObject"];
+ if (stream.isNull() || stream.isUndefined()) {
+ qCDebug(qWasmMediaRecorder) << Q_FUNC_INFO << "Video input stream not found";
+ return;
+ }
+ }
+ } else {
+ qCDebug(qWasmMediaRecorder) << Q_FUNC_INFO << "has audio";
+ stream = static_cast<QWasmAudioInput *>(m_session->audioInput())->mediaStream();
+
+ if (stream.isNull() || stream.isUndefined()) {
+ qCDebug(qWasmMediaRecorder) << Q_FUNC_INFO << "Audio input stream not found";
+ return;
+ }
+ }
+ if (stream.isNull() || stream.isUndefined()) {
+ qCDebug(qWasmMediaRecorder) << Q_FUNC_INFO << "No input stream found";
+ return;
+ }
+
+ setStream(stream);
+}
+
+void QWasmMediaRecorder::startAudioRecording()
+{
+ startStream();
+}
+
+void QWasmMediaRecorder::setStream(emscripten::val stream)
+{
+ emscripten::val emMediaSettings = emscripten::val::object();
+ QMediaFormat::VideoCodec videoCodec = m_mediaSettings.videoCodec();
+ QMediaFormat::AudioCodec audioCodec = m_mediaSettings.audioCodec();
+ QMediaFormat::FileFormat fileFormat = m_mediaSettings.fileFormat();
+
+ // mime and codecs
+ QString mimeCodec;
+ if (!m_mediaSettings.mimeType().name().isEmpty()) {
+ mimeCodec = m_mediaSettings.mimeType().name();
+
+ if (videoCodec != QMediaFormat::VideoCodec::Unspecified)
+ mimeCodec += QStringLiteral(": codecs=");
+
+ if (audioCodec != QMediaFormat::AudioCodec::Unspecified) {
+ // TODO
+ }
+
+ if (fileFormat != QMediaFormat::UnspecifiedFormat)
+ mimeCodec += QMediaFormat::fileFormatName(m_mediaSettings.fileFormat());
+
+ emMediaSettings.set("mimeType", mimeCodec.toStdString());
+ }
+
+ if (m_mediaSettings.audioBitRate() > 0)
+ emMediaSettings.set("audioBitsPerSecond", emscripten::val(m_mediaSettings.audioBitRate()));
+
+ if (m_mediaSettings.videoBitRate() > 0)
+ emMediaSettings.set("videoBitsPerSecond", emscripten::val(m_mediaSettings.videoBitRate()));
+
+ // create the MediaRecorder, and set up data callback
+ m_mediaRecorder = emscripten::val::global("MediaRecorder").new_(stream, emMediaSettings);
+
+ qCDebug(qWasmMediaRecorder) << Q_FUNC_INFO << "m_mediaRecorder state:"
+ << QString::fromStdString(m_mediaRecorder["state"].as<std::string>());
+
+ if (m_mediaRecorder.isNull() || m_mediaRecorder.isUndefined()) {
+ qWarning() << "MediaRecorder could not be found";
+ return;
+ }
+ m_mediaRecorder.set("data-mediarecordercontext",
+ emscripten::val(quintptr(reinterpret_cast<void *>(this))));
+
+ if (!m_mediaStreamDataAvailable.isNull()) {
+ m_mediaStreamDataAvailable.reset();
+ m_mediaStreamStopped.reset();
+ m_mediaStreamError.reset();
+ m_mediaStreamStart.reset();
+ m_mediaStreamPause.reset();
+ m_mediaStreamResume.reset();
+ }
+
+ // dataavailable
+ auto callback = [](emscripten::val blob) {
+ if (blob.isUndefined() || blob.isNull()) {
+ qCDebug(qWasmMediaRecorder) << "blob is null";
+ return;
+ }
+ if (blob["target"].isUndefined() || blob["target"].isNull())
+ return;
+ if (blob["data"].isUndefined() || blob["data"].isNull())
+ return;
+ if (blob["target"]["data-mediarecordercontext"].isUndefined()
+ || blob["target"]["data-mediarecordercontext"].isNull())
+ return;
+
+ QWasmMediaRecorder *recorder = reinterpret_cast<QWasmMediaRecorder *>(
+ blob["target"]["data-mediarecordercontext"].as<quintptr>());
+
+ if (recorder) {
+ const double timeCode =
+ blob.hasOwnProperty("timecode") ? blob["timecode"].as<double>() : 0;
+ recorder->audioDataAvailable(blob["data"], timeCode);
+ }
+ };
+
+ m_mediaStreamDataAvailable.reset(
+ new qstdweb::EventCallback(m_mediaRecorder, "dataavailable", callback));
+
+ // stopped
+ auto stoppedCallback = [](emscripten::val event) {
+ if (event.isUndefined() || event.isNull()) {
+ qCDebug(qWasmMediaRecorder) << "event is null";
+ return;
+ }
+ qCDebug(qWasmMediaRecorder)
+ << "STOPPED: state changed"
+ << QString::fromStdString(event["target"]["state"].as<std::string>());
+
+ QWasmMediaRecorder *recorder = reinterpret_cast<QWasmMediaRecorder *>(
+ event["target"]["data-mediarecordercontext"].as<quintptr>());
+
+ if (recorder) {
+ recorder->m_isRecording = false;
+ recorder->m_durationTimer->invalidate();
+
+ emit recorder->stateChanged(recorder->state());
+ }
+ };
+
+ m_mediaStreamStopped.reset(
+ new qstdweb::EventCallback(m_mediaRecorder, "stop", stoppedCallback));
+
+ // error
+ auto errorCallback = [](emscripten::val theError) {
+ if (theError.isUndefined() || theError.isNull()) {
+ qCDebug(qWasmMediaRecorder) << "error is null";
+ return;
+ }
+ qCDebug(qWasmMediaRecorder)
+ << theError["code"].as<int>()
+ << QString::fromStdString(theError["message"].as<std::string>());
+
+ QWasmMediaRecorder *recorder = reinterpret_cast<QWasmMediaRecorder *>(
+ theError["target"]["data-mediarecordercontext"].as<quintptr>());
+
+ if (recorder) {
+ recorder->updateError(QMediaRecorder::ResourceError,
+ QString::fromStdString(theError["message"].as<std::string>()));
+ emit recorder->stateChanged(recorder->state());
+ }
+ };
+
+ m_mediaStreamError.reset(new qstdweb::EventCallback(m_mediaRecorder, "error", errorCallback));
+
+ // start
+ auto startCallback = [](emscripten::val event) {
+ if (event.isUndefined() || event.isNull()) {
+ qCDebug(qWasmMediaRecorder) << "event is null";
+ return;
+ }
+
+ qCDebug(qWasmMediaRecorder)
+ << "START: state changed"
+ << QString::fromStdString(event["target"]["state"].as<std::string>());
+
+ QWasmMediaRecorder *recorder = reinterpret_cast<QWasmMediaRecorder *>(
+ event["target"]["data-mediarecordercontext"].as<quintptr>());
+
+ if (recorder) {
+ recorder->m_isRecording = true;
+ recorder->m_durationTimer->start();
+ emit recorder->stateChanged(recorder->state());
+ }
+ };
+
+ m_mediaStreamStart.reset(new qstdweb::EventCallback(m_mediaRecorder, "start", startCallback));
+
+ // pause
+ auto pauseCallback = [](emscripten::val event) {
+ if (event.isUndefined() || event.isNull()) {
+ qCDebug(qWasmMediaRecorder) << "event is null";
+ return;
+ }
+
+ qCDebug(qWasmMediaRecorder)
+ << "pause: state changed"
+ << QString::fromStdString(event["target"]["state"].as<std::string>());
+
+ QWasmMediaRecorder *recorder = reinterpret_cast<QWasmMediaRecorder *>(
+ event["target"]["data-mediarecordercontext"].as<quintptr>());
+
+ if (recorder) {
+ recorder->m_isRecording = true;
+ recorder->m_durationTimer->start();
+ emit recorder->stateChanged(recorder->state());
+ }
+ };
+
+ m_mediaStreamPause.reset(new qstdweb::EventCallback(m_mediaRecorder, "pause", pauseCallback));
+
+ // resume
+ auto resumeCallback = [](emscripten::val event) {
+ if (event.isUndefined() || event.isNull()) {
+ qCDebug(qWasmMediaRecorder) << "event is null";
+ return;
+ }
+
+ qCDebug(qWasmMediaRecorder)
+ << "resume: state changed"
+ << QString::fromStdString(event["target"]["state"].as<std::string>());
+
+ QWasmMediaRecorder *recorder = reinterpret_cast<QWasmMediaRecorder *>(
+ event["target"]["data-mediarecordercontext"].as<quintptr>());
+
+ if (recorder) {
+ recorder->m_isRecording = true;
+ recorder->m_durationTimer->start();
+ emit recorder->stateChanged(recorder->state());
+ }
+ };
+
+ m_mediaStreamResume.reset(
+ new qstdweb::EventCallback(m_mediaRecorder, "resume", resumeCallback));
+
+ // set up what options we can
+ if (hasCamera())
+ setTrackContraints(m_mediaSettings, stream);
+ else
+ startStream();
+}
+
+void QWasmMediaRecorder::audioDataAvailable(emscripten::val blob, double timeCodeDifference)
+{
+ Q_UNUSED(timeCodeDifference)
+ if (blob.isUndefined() || blob.isNull()) {
+ qCDebug(qWasmMediaRecorder) << "blob is null";
+ return;
+ }
+
+ auto fileReader = std::make_shared<qstdweb::FileReader>();
+
+ fileReader->onError([=](emscripten::val theError) {
+ updateError(QMediaRecorder::ResourceError,
+ QString::fromStdString(theError["message"].as<std::string>()));
+ });
+
+ fileReader->onAbort([=](emscripten::val) {
+ updateError(QMediaRecorder::ResourceError, QStringLiteral("File read aborted"));
+ });
+
+ fileReader->onLoad([=](emscripten::val) {
+ if (fileReader->val().isNull() || fileReader->val().isUndefined())
+ return;
+ qstdweb::ArrayBuffer result = fileReader->result();
+ if (result.val().isNull() || result.val().isUndefined())
+ return;
+ QByteArray fileContent = qstdweb::Uint8Array(result).copyToQByteArray();
+
+ if (m_isRecording && !fileContent.isEmpty()) {
+ m_durationMs = m_durationTimer->elapsed();
+ if (m_outputTarget->isOpen())
+ m_outputTarget->write(fileContent, fileContent.length());
+ // we've read everything
+ emit durationChanged(m_durationMs);
+ qCDebug(qWasmMediaRecorder) << "duration changed" << m_durationMs;
+ }
+ });
+
+ fileReader->readAsArrayBuffer(qstdweb::Blob(blob));
+}
+
+// constraints are suggestions, as not all hardware supports all settings
+void QWasmMediaRecorder::setTrackContraints(QMediaEncoderSettings &settings, emscripten::val stream)
+{
+ qCDebug(qWasmMediaRecorder) << Q_FUNC_INFO << settings.audioSampleRate();
+
+ if (stream.isUndefined() || stream.isNull()) {
+ qCDebug(qWasmMediaRecorder)<< Q_FUNC_INFO << "could not find MediaStream";
+ return;
+ }
+
+ emscripten::val navigator = emscripten::val::global("navigator");
+ emscripten::val mediaDevices = navigator["mediaDevices"];
+
+ // check which ones are supported
+ // emscripten::val allConstraints = mediaDevices.call<emscripten::val>("getSupportedConstraints");
+ // browsers only support some settings
+
+ emscripten::val videoParams = emscripten::val::object();
+ emscripten::val constraints = emscripten::val::object();
+
+ if (hasCamera()) {
+ if (settings.videoFrameRate() > 0)
+ videoParams.set("frameRate", emscripten::val(settings.videoFrameRate()));
+ if (settings.videoResolution().height() > 0)
+ videoParams.set("height",
+ emscripten::val(settings.videoResolution().height())); // viewportHeight?
+ if (settings.videoResolution().width() > 0)
+ videoParams.set("width", emscripten::val(settings.videoResolution().width()));
+
+ constraints.set("video", videoParams); // only video here
+ }
+
+ emscripten::val audioParams = emscripten::val::object();
+ if (settings.audioSampleRate() > 0)
+ audioParams.set("sampleRate", emscripten::val(settings.audioSampleRate())); // may not work
+ if (settings.audioBitRate() > 0)
+ audioParams.set("sampleSize", emscripten::val(settings.audioBitRate())); // may not work
+ if (settings.audioChannelCount() > 0)
+ audioParams.set("channelCount", emscripten::val(settings.audioChannelCount()));
+
+ constraints.set("audio", audioParams); // only audio here
+
+ if (hasCamera() && stream["active"].as<bool>()) {
+ emscripten::val videoTracks = emscripten::val::undefined();
+ videoTracks = stream.call<emscripten::val>("getVideoTracks");
+ if (videoTracks.isNull() || videoTracks.isUndefined()) {
+ qCDebug(qWasmMediaRecorder) << "no video tracks";
+ return;
+ }
+ if (videoTracks["length"].as<int>() > 0) {
+ // try to apply the video options
+ qstdweb::Promise::make(videoTracks[0], QStringLiteral("applyConstraints"),
+ { .thenFunc =
+ [this](emscripten::val result) {
+ Q_UNUSED(result)
+ startStream();
+ },
+ .catchFunc =
+ [this](emscripten::val theError) {
+ qWarning() << "setting video params failed error";
+ qCDebug(qWasmMediaRecorder)
+ << theError["code"].as<int>()
+ << QString::fromStdString(theError["message"].as<std::string>());
+ updateError(QMediaRecorder::ResourceError,
+ QString::fromStdString(theError["message"].as<std::string>()));
+ } },
+ constraints);
+ }
+ }
+}
+
+// this starts the recording stream
+void QWasmMediaRecorder::startStream()
+{
+ if (m_mediaRecorder.isUndefined() || m_mediaRecorder.isNull()) {
+ qCDebug(qWasmMediaRecorder) << Q_FUNC_INFO << "could not find MediaStream";
+ return;
+ }
+ qCDebug(qWasmMediaRecorder) << "m_mediaRecorder state:" <<
+ QString::fromStdString(m_mediaRecorder["state"].as<std::string>());
+
+ constexpr int sliceSizeInMs = 250; // TODO find what size is best
+ m_mediaRecorder.call<void>("start", emscripten::val(sliceSizeInMs));
+
+ /* this method can optionally be passed a timeslice argument with a value in milliseconds.
+ * If this is specified, the media will be captured in separate chunks of that duration,
+ * rather than the default behavior of recording the media in a single large chunk.*/
+
+ emit stateChanged(state());
+}
+
+void QWasmMediaRecorder::setUpFileSink()
+{
+ QString m_targetFileName = outputLocation().toLocalFile();
+ QString suffix = m_mediaSettings.mimeType().preferredSuffix();
+ if (m_targetFileName.isEmpty()) {
+ m_targetFileName = "/home/web_user/tmp." + suffix;
+ QPlatformMediaRecorder::setOutputLocation(m_targetFileName);
+ }
+
+ m_outputTarget = new QFile(m_targetFileName, this);
+ if (!m_outputTarget->open(QIODevice::WriteOnly)) {
+ qWarning() << "target file is not writable";
+ return;
+ }
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder_p.h b/src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder_p.h
new file mode 100644
index 000000000..c325e411b
--- /dev/null
+++ b/src/plugins/multimedia/wasm/mediacapture/qwasmmediarecorder_p.h
@@ -0,0 +1,89 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWASMMEDIARECORDER_H
+#define QWASMMEDIARECORDER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediarecorder_p.h>
+#include <private/qplatformmediacapture_p.h>
+#include <QtCore/qglobal.h>
+#include <QtCore/qloggingcategory.h>
+#include <QElapsedTimer>
+
+#include <emscripten.h>
+#include <emscripten/val.h>
+#include <emscripten/bind.h>
+#include <private/qstdweb_p.h>
+
+QT_BEGIN_NAMESPACE
+
+Q_DECLARE_LOGGING_CATEGORY(qWasmMediaRecorder)
+
+class QWasmMediaCaptureSession;
+class QIODevice;
+
+class QWasmMediaRecorder final : public QObject, public QPlatformMediaRecorder
+{
+ Q_OBJECT
+public:
+ explicit QWasmMediaRecorder(QMediaRecorder *parent);
+ ~QWasmMediaRecorder() final;
+
+ bool isLocationWritable(const QUrl &location) const override;
+ QMediaRecorder::RecorderState state() const override;
+ qint64 duration() const override;
+ void record(QMediaEncoderSettings &settings) override;
+ void pause() override;
+ void resume() override;
+ void stop() override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+private:
+
+ bool hasCamera() const;
+ void startAudioRecording();
+ void setStream(emscripten::val stream);
+ void streamCallback(emscripten::val event);
+ void exceptionCallback(emscripten::val event);
+ void dataAvailableCallback(emscripten::val dataEvent);
+ void startStream();
+ void setTrackContraints(QMediaEncoderSettings &settings, emscripten::val stream);
+ void initUserMedia();
+ void audioDataAvailable(emscripten::val Blob, double timeCodeDifference);
+ void setUpFileSink();
+
+ emscripten::val m_mediaRecorder = emscripten::val::undefined();
+ emscripten::val m_mediaStream = emscripten::val::undefined();
+
+ QWasmMediaCaptureSession *m_session = nullptr;
+ QMediaEncoderSettings m_mediaSettings;
+ QIODevice *m_outputTarget;
+ QScopedPointer<qstdweb::EventCallback> m_mediaStreamDataAvailable;
+ QScopedPointer<qstdweb::EventCallback> m_mediaStreamStopped;
+ QScopedPointer<qstdweb::EventCallback> m_mediaStreamError;
+ QScopedPointer<qstdweb::EventCallback> m_mediaStreamStart;
+ QScopedPointer<qstdweb::EventCallback> m_mediaStreamPause;
+ QScopedPointer<qstdweb::EventCallback> m_mediaStreamResume;
+
+ qint64 m_durationMs = 0;
+ bool m_isRecording = false;
+ QScopedPointer <QElapsedTimer> m_durationTimer;
+
+private Q_SLOTS:
+};
+
+QT_END_NAMESPACE
+
+#endif // QWASMMEDIARECORDER_H
diff --git a/src/plugins/multimedia/wasm/mediaplayer/qwasmmediaplayer.cpp b/src/plugins/multimedia/wasm/mediaplayer/qwasmmediaplayer.cpp
new file mode 100644
index 000000000..75886b7c2
--- /dev/null
+++ b/src/plugins/multimedia/wasm/mediaplayer/qwasmmediaplayer.cpp
@@ -0,0 +1,475 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwasmmediaplayer_p.h"
+#include <common/qwasmvideooutput_p.h>
+#include <common/qwasmaudiooutput_p.h>
+#include "qaudiooutput.h"
+
+#include <QtCore/qloggingcategory.h>
+#include <QUuid>
+#include <QtGlobal>
+#include <QMimeDatabase>
+#include <QFileInfo>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(lcMediaPlayer, "qt.multimedia.wasm.mediaplayer");
+
+QWasmMediaPlayer::QWasmMediaPlayer(QMediaPlayer *parent)
+ : QPlatformMediaPlayer(parent),
+ m_videoOutput(new QWasmVideoOutput),
+ m_State(QWasmMediaPlayer::Idle)
+{
+ qCDebug(lcMediaPlayer) << Q_FUNC_INFO << this;
+
+}
+
+QWasmMediaPlayer::~QWasmMediaPlayer()
+{
+ delete m_videoOutput;
+}
+
+void QWasmMediaPlayer::initVideo()
+{
+ m_videoOutput->setVideoMode(QWasmVideoOutput::VideoOutput);
+ QUuid videoElementId = QUuid::createUuid();
+ qCDebug(lcMediaPlayer) << Q_FUNC_INFO << "videoElementId"<< videoElementId << this;
+
+ m_videoOutput->createVideoElement(videoElementId.toString(QUuid::WithoutBraces).toStdString());
+ m_videoOutput->doElementCallbacks();
+ m_videoOutput->createOffscreenElement(QSize(1280, 720));
+ m_videoOutput->updateVideoElementGeometry(QRect(0, 0, 1280, 720)); // initial size 720p standard
+
+ connect(m_videoOutput, &QWasmVideoOutput::bufferingChanged, this,
+ &QWasmMediaPlayer::bufferingChanged);
+ connect(m_videoOutput, &QWasmVideoOutput::errorOccured, this,
+ &QWasmMediaPlayer::errorOccured);
+ connect(m_videoOutput, &QWasmVideoOutput::stateChanged, this,
+ &QWasmMediaPlayer::mediaStateChanged);
+ connect(m_videoOutput, &QWasmVideoOutput::progressChanged, this,
+ &QWasmMediaPlayer::setPositionChanged);
+ connect(m_videoOutput, &QWasmVideoOutput::durationChanged, this,
+ &QWasmMediaPlayer::setDurationChanged);
+ connect(m_videoOutput, &QWasmVideoOutput::sizeChange, this,
+ &QWasmMediaPlayer::videoSizeChanged);
+ connect(m_videoOutput, &QWasmVideoOutput::readyChanged, this,
+ &QWasmMediaPlayer::videoOutputReady);
+ connect(m_videoOutput, &QWasmVideoOutput::statusChanged, this,
+ &QWasmMediaPlayer::onMediaStatusChanged);
+ connect(m_videoOutput, &QWasmVideoOutput::metaDataLoaded, this,
+ &QWasmMediaPlayer::videoMetaDataChanged);
+
+ setVideoAvailable(true);
+}
+
+void QWasmMediaPlayer::initAudio()
+{
+ connect(m_audioOutput->q, &QAudioOutput::deviceChanged,
+ this, &QWasmMediaPlayer::updateAudioDevice);
+ connect(m_audioOutput->q, &QAudioOutput::volumeChanged,
+ this, &QWasmMediaPlayer::volumeChanged);
+ connect(m_audioOutput->q, &QAudioOutput::mutedChanged,
+ this, &QWasmMediaPlayer::mutedChanged);
+
+ connect(m_audioOutput, &QWasmAudioOutput::bufferingChanged, this,
+ &QWasmMediaPlayer::bufferingChanged);
+ connect(m_audioOutput, &QWasmAudioOutput::errorOccured, this,
+ &QWasmMediaPlayer::errorOccured);
+ connect(m_audioOutput, &QWasmAudioOutput::progressChanged, this,
+ &QWasmMediaPlayer::setPositionChanged);
+ connect(m_audioOutput, &QWasmAudioOutput::durationChanged, this,
+ &QWasmMediaPlayer::setDurationChanged);
+ connect(m_audioOutput, &QWasmAudioOutput::statusChanged, this,
+ &QWasmMediaPlayer::onMediaStatusChanged);
+ connect(m_audioOutput, &QWasmAudioOutput::stateChanged, this,
+ &QWasmMediaPlayer::mediaStateChanged);
+ setAudioAvailable(true);
+}
+
+qint64 QWasmMediaPlayer::duration() const
+{
+ return m_videoOutput->getDuration();
+}
+
+qint64 QWasmMediaPlayer::position() const
+{
+ if (mediaStatus() == QMediaPlayer::EndOfMedia)
+ return duration();
+
+ if (m_videoAvailable)
+ return m_videoOutput->getCurrentPosition();
+
+ return 0;
+}
+
+void QWasmMediaPlayer::setPosition(qint64 position)
+{
+ if (!isSeekable())
+ return;
+
+ const int seekPosition = (position > INT_MAX) ? INT_MAX : position;
+
+ if (seekPosition == this->position())
+ return;
+
+ if (mediaStatus() == QMediaPlayer::EndOfMedia)
+ setMediaStatus(QMediaPlayer::LoadedMedia);
+
+ if (m_videoAvailable)
+ return m_videoOutput->seekTo(position);
+
+ emit positionChanged(seekPosition);
+}
+
+void QWasmMediaPlayer::volumeChanged(float gain)
+{
+ if (m_State != QWasmMediaPlayer::Started)
+ return;
+
+ if (m_videoAvailable)
+ m_videoOutput->setVolume(gain);
+}
+
+void QWasmMediaPlayer::mutedChanged(bool muted)
+{
+ if (m_State != QWasmMediaPlayer::Started)
+ return;
+
+ if (m_videoAvailable)
+ m_videoOutput->setMuted(muted);
+}
+
+float QWasmMediaPlayer::bufferProgress() const
+{
+ return qBound(0.0, (m_bufferPercent * .01), 1.0);
+}
+
+bool QWasmMediaPlayer::isAudioAvailable() const
+{
+ return m_audioAvailable;
+}
+
+bool QWasmMediaPlayer::isVideoAvailable() const
+{
+ return m_videoAvailable;
+}
+
+QMediaTimeRange QWasmMediaPlayer::availablePlaybackRanges() const
+{
+ return m_availablePlaybackRange;
+}
+
+void QWasmMediaPlayer::updateAvailablePlaybackRanges()
+{
+ if (m_buffering) {
+ const qint64 pos = position();
+ const qint64 end = (duration() / 100) * m_bufferPercent;
+ m_availablePlaybackRange.addInterval(pos, end);
+ } else if (isSeekable()) {
+ m_availablePlaybackRange = QMediaTimeRange(0, duration());
+ } else {
+ m_availablePlaybackRange = QMediaTimeRange();
+ }
+}
+
+qreal QWasmMediaPlayer::playbackRate() const
+{
+ if (m_State != QWasmMediaPlayer::Started)
+ return 0;
+
+ if (isVideoAvailable())
+ return m_videoOutput->playbackRate();
+ return 0;
+}
+
+void QWasmMediaPlayer::setPlaybackRate(qreal rate)
+{
+ if (m_State != QWasmMediaPlayer::Started || !isVideoAvailable())
+ return;
+
+ m_videoOutput->setPlaybackRate(rate);
+ emit playbackRateChanged(rate);
+}
+
+QUrl QWasmMediaPlayer::media() const
+{
+ return m_mediaContent;
+}
+
+const QIODevice *QWasmMediaPlayer::mediaStream() const
+{
+ return m_mediaStream;
+}
+
+void QWasmMediaPlayer::setMedia(const QUrl &mediaContent, QIODevice *stream)
+{
+ qCDebug(lcMediaPlayer) << Q_FUNC_INFO << mediaContent << stream;
+ QMimeDatabase db;
+
+ if (mediaContent.isEmpty()) {
+ if (stream) {
+ m_mediaStream = stream;
+ qCDebug(lcMediaPlayer) << db.mimeTypeForData(stream).name();
+ if (db.mimeTypeForData(stream).name().contains("audio")) {
+ setAudioAvailable(true);
+ m_audioOutput->setSource(m_mediaStream);
+ } else { // treat octet-stream as video
+ setVideoAvailable(true);
+ m_videoOutput->setSource(m_mediaStream);
+ }
+ } else {
+
+ setMediaStatus(QMediaPlayer::NoMedia);
+ }
+ } else {
+ QString sourceFile = mediaContent.toLocalFile();
+ qCDebug(lcMediaPlayer) << db.mimeTypeForFile(QFileInfo(sourceFile)).name();
+ if (db.mimeTypeForFile(QFileInfo(sourceFile)).name().contains("audio")) {
+ setAudioAvailable(true);
+ m_audioOutput->setSource(mediaContent);
+ } else { // treat octet-stream as video
+ setVideoAvailable(true);
+ m_videoOutput->setSource(mediaContent);
+ }
+ }
+
+ resetBufferingProgress();
+}
+
+void QWasmMediaPlayer::setVideoSink(QVideoSink *sink)
+{
+ if (m_videoSink == sink)
+ return;
+
+ m_videoSink = sink;
+
+ if (!m_videoSink)
+ return;
+
+ initVideo();
+ m_videoOutput->setSurface(sink);
+ setVideoAvailable(true);
+ if (isAudioAvailable() && m_audioOutput)
+ m_audioOutput->setVideoElement(m_videoOutput->currentVideoElement());
+}
+
+void QWasmMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
+{
+ if (m_audioOutput == output)
+ return;
+
+ if (m_audioOutput)
+ m_audioOutput->q->disconnect(this);
+ m_audioOutput = static_cast<QWasmAudioOutput *>(output);
+ setAudioAvailable(true);
+}
+
+void QWasmMediaPlayer::updateAudioDevice()
+{
+ if (m_audioOutput) {
+ m_audioOutput->setAudioDevice(m_audioOutput->q->device());
+ }
+}
+
+void QWasmMediaPlayer::play()
+{
+ resetCurrentLoop();
+
+ if (isVideoAvailable()) {
+ m_videoOutput->start();
+ m_playWhenReady = true;
+ } else {
+ initAudio();
+ if (isAudioAvailable()) {
+ m_audioOutput->start();
+ }
+ }
+
+#ifdef DEBUG_AUDIOENGINE
+ QAudioEnginePrivate::checkNoError("play");
+#endif
+}
+
+void QWasmMediaPlayer::pause()
+{
+ if ((m_State
+ & (QWasmMediaPlayer::Started | QWasmMediaPlayer::Paused
+ | QWasmMediaPlayer::PlaybackCompleted)) == 0) {
+ return;
+ }
+ if (isVideoAvailable()) {
+ m_videoOutput->pause();
+ } else {
+ m_audioOutput->pause();
+ stateChanged(QMediaPlayer::PausedState);
+ }
+}
+
+void QWasmMediaPlayer::stop()
+{
+ m_playWhenReady = false;
+
+ if (m_State == QWasmMediaPlayer::Idle || m_State == QWasmMediaPlayer::PlaybackCompleted
+ || m_State == QWasmMediaPlayer::Stopped) {
+ qWarning() << Q_FUNC_INFO << __LINE__;
+ return;
+ }
+
+ if (isVideoAvailable()) {
+ m_videoOutput->stop();
+ } else {
+ m_audioOutput->stop();
+ }
+
+}
+
+bool QWasmMediaPlayer::isSeekable() const
+{
+ return isVideoAvailable() && m_videoOutput->isVideoSeekable();
+}
+
+void QWasmMediaPlayer::errorOccured(qint32 code, const QString &message)
+{
+ QString errorString;
+ QMediaPlayer::Error error = QMediaPlayer::ResourceError;
+
+ switch (code) {
+ case QWasmMediaNetworkState::NetworkEmpty: // no data
+ break;
+ case QWasmMediaNetworkState::NetworkIdle:
+ break;
+ case QWasmMediaNetworkState::NetworkLoading:
+ break;
+ case QWasmMediaNetworkState::NetworkNoSource: // no source
+ error = QMediaPlayer::ResourceError;
+ errorString = message;
+ break;
+ };
+
+ emit QPlatformMediaPlayer::error(error, errorString);
+}
+
+void QWasmMediaPlayer::bufferingChanged(qint32 percent)
+{
+ m_buffering = percent != 100;
+ m_bufferPercent = percent;
+
+ updateAvailablePlaybackRanges();
+ emit bufferProgressChanged(bufferProgress());
+}
+
+void QWasmMediaPlayer::videoSizeChanged(qint32 width, qint32 height)
+{
+ QSize newSize(width, height);
+
+ if (width == 0 || height == 0 || newSize == m_videoSize)
+ return;
+
+ m_videoSize = newSize;
+}
+
+void QWasmMediaPlayer::mediaStateChanged(QWasmMediaPlayer::QWasmMediaPlayerState state)
+{
+ m_State = state;
+ QMediaPlayer::PlaybackState m_mediaPlayerState;
+ switch (m_State) {
+ case QWasmMediaPlayer::Started:
+ m_mediaPlayerState = QMediaPlayer::PlayingState;
+ break;
+ case QWasmMediaPlayer::Paused:
+ m_mediaPlayerState = QMediaPlayer::PausedState;
+ break;
+ case QWasmMediaPlayer::Stopped:
+ m_mediaPlayerState = QMediaPlayer::StoppedState;
+ break;
+ default:
+ m_mediaPlayerState = QMediaPlayer::StoppedState;
+ break;
+ };
+
+ QPlatformMediaPlayer::stateChanged(m_mediaPlayerState);
+}
+
+int QWasmMediaPlayer::trackCount(TrackType trackType)
+{
+ Q_UNUSED(trackType)
+ // TODO QTBUG-108517
+ return 0; // tracks.count();
+}
+
+void QWasmMediaPlayer::setPositionChanged(qint64 position)
+{
+ QPlatformMediaPlayer::positionChanged(position);
+}
+
+void QWasmMediaPlayer::setDurationChanged(qint64 duration)
+{
+ QPlatformMediaPlayer::durationChanged(duration);
+}
+
+void QWasmMediaPlayer::videoOutputReady(bool ready)
+{
+ setVideoAvailable(ready);
+
+ if (m_playWhenReady && m_videoOutput->isReady())
+ play();
+}
+
+void QWasmMediaPlayer::setMediaStatus(QMediaPlayer::MediaStatus status)
+{
+ mediaStatusChanged(status);
+
+ switch (status) {
+ case QMediaPlayer::NoMedia:
+ case QMediaPlayer::InvalidMedia:
+ emit durationChanged(0);
+ break;
+ case QMediaPlayer::EndOfMedia:
+ setPositionChanged(position());
+ default:
+ break;
+ };
+}
+
+void QWasmMediaPlayer::setAudioAvailable(bool available)
+{
+ if (m_audioAvailable == available)
+ return;
+
+ m_audioAvailable = available;
+ emit audioAvailableChanged(m_audioAvailable);
+}
+
+void QWasmMediaPlayer::setVideoAvailable(bool available)
+{
+ if (m_videoAvailable == available)
+ return;
+
+ if (!available)
+ m_videoSize = QSize();
+
+ m_videoAvailable = available;
+ emit videoAvailableChanged(m_videoAvailable);
+}
+
+void QWasmMediaPlayer::resetBufferingProgress()
+{
+ m_buffering = false;
+ m_bufferPercent = 0;
+ m_availablePlaybackRange = QMediaTimeRange();
+}
+
+void QWasmMediaPlayer::onMediaStatusChanged(QMediaPlayer::MediaStatus status)
+{
+ setMediaStatus(status);
+}
+
+void QWasmMediaPlayer::videoMetaDataChanged()
+{
+ metaDataChanged();
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qwasmmediaplayer_p.cpp"
diff --git a/src/plugins/multimedia/wasm/mediaplayer/qwasmmediaplayer_p.h b/src/plugins/multimedia/wasm/mediaplayer/qwasmmediaplayer_p.h
new file mode 100644
index 000000000..9269ecdb6
--- /dev/null
+++ b/src/plugins/multimedia/wasm/mediaplayer/qwasmmediaplayer_p.h
@@ -0,0 +1,124 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWASMMEDIAPLAYER_H
+#define QWASMMEDIAPLAYER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qglobal.h>
+#include <private/qplatformmediaplayer_p.h>
+#include <qsize.h>
+#include <qurl.h>
+#include <QtCore/qpointer.h>
+
+QT_BEGIN_NAMESPACE
+
+class QWasmAudioOutput;
+class QWasmVideoOutput;
+
+class QWasmMediaPlayer : public QObject, public QPlatformMediaPlayer
+{
+ Q_OBJECT
+
+public:
+ explicit QWasmMediaPlayer(QMediaPlayer *parent = 0);
+ ~QWasmMediaPlayer() override;
+
+ enum QWasmMediaPlayerState {
+ Error,
+ Idle,
+ Uninitialized,
+ Preparing,
+ Prepared,
+ Started,
+ Paused,
+ Stopped,
+ PlaybackCompleted
+ };
+ Q_ENUM(QWasmMediaPlayerState)
+
+ enum QWasmMediaNetworkState { NetworkEmpty = 0, NetworkIdle, NetworkLoading, NetworkNoSource };
+ Q_ENUM(QWasmMediaNetworkState)
+
+ qint64 duration() const override;
+ qint64 position() const override;
+ float bufferProgress() const override;
+ bool isAudioAvailable() const override;
+ bool isVideoAvailable() const override;
+ QMediaTimeRange availablePlaybackRanges() const override;
+ qreal playbackRate() const override;
+ void setPlaybackRate(qreal rate) override;
+ QUrl media() const override;
+ const QIODevice *mediaStream() const override;
+ void setMedia(const QUrl &mediaContent, QIODevice *stream) override;
+ void setVideoSink(QVideoSink *surface) override;
+ void setAudioOutput(QPlatformAudioOutput *output) override;
+ void setPosition(qint64 position) override;
+ void play() override;
+ void pause() override;
+ void stop() override;
+ bool isSeekable() const override;
+ int trackCount(TrackType trackType) override;
+
+ void updateAudioDevice();
+
+private Q_SLOTS:
+ void volumeChanged(float volume);
+ void mutedChanged(bool muted);
+ void videoOutputReady(bool ready);
+ void errorOccured(qint32 code, const QString &message);
+ void bufferingChanged(qint32 percent);
+ void videoSizeChanged(qint32 width, qint32 height);
+ void mediaStateChanged(QWasmMediaPlayer::QWasmMediaPlayerState state);
+ void setPositionChanged(qint64 position);
+ void setDurationChanged(qint64 duration);
+ void videoMetaDataChanged();
+
+ void onMediaStatusChanged(QMediaPlayer::MediaStatus status);
+
+private:
+ void setMediaStatus(QMediaPlayer::MediaStatus status);
+ void setAudioAvailable(bool available);
+ void setVideoAvailable(bool available);
+ void updateAvailablePlaybackRanges();
+ void resetBufferingProgress();
+
+ void setSubtitle(QString subtitle);
+ void disableTrack(TrackType trackType);
+ void initVideo();
+ void initAudio();
+
+ friend class StateChangeNotifier;
+
+ QPointer<QWasmVideoOutput> m_videoOutput;
+ QWasmAudioOutput *m_audioOutput = nullptr;
+
+ QUrl m_mediaContent;
+ QIODevice *m_mediaStream = nullptr;
+
+ QVideoSink *m_videoSink = nullptr;
+ int m_bufferPercent = -1;
+ bool m_audioAvailable = false;
+ bool m_videoAvailable = false;
+ QSize m_videoSize;
+ bool m_buffering = false;
+ QMediaTimeRange m_availablePlaybackRange;
+ int m_State;
+
+ bool m_playWhenReady = false;
+
+};
+
+QT_END_NAMESPACE
+
+#endif // QWASMMEDIAPLAYER_H
diff --git a/src/plugins/multimedia/wasm/mediaplayer/qwasmvideosink.cpp b/src/plugins/multimedia/wasm/mediaplayer/qwasmvideosink.cpp
new file mode 100644
index 000000000..b6fe0e8e0
--- /dev/null
+++ b/src/plugins/multimedia/wasm/mediaplayer/qwasmvideosink.cpp
@@ -0,0 +1,26 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwasmvideosink_p.h"
+
+#include <QtGui/rhi/qrhi.h>
+
+QT_BEGIN_NAMESPACE
+
+QWasmVideoSink::QWasmVideoSink(QVideoSink *parent)
+ : QPlatformVideoSink(parent)
+{
+}
+
+void QWasmVideoSink::setRhi(QRhi *rhi)
+{
+ if (rhi && rhi->backend() != QRhi::OpenGLES2)
+ rhi = nullptr;
+ if (m_rhi == rhi)
+ return;
+ m_rhi = rhi;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qwasmvideosink_p.cpp"
diff --git a/src/plugins/multimedia/wasm/mediaplayer/qwasmvideosink_p.h b/src/plugins/multimedia/wasm/mediaplayer/qwasmvideosink_p.h
new file mode 100644
index 000000000..5f2885249
--- /dev/null
+++ b/src/plugins/multimedia/wasm/mediaplayer/qwasmvideosink_p.h
@@ -0,0 +1,40 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#ifndef QWASMVIDEOSINK_H
+#define QWASMVIDEOSINK_H
+
+#include <private/qplatformvideosink_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QVideoSink;
+class QRhi;
+
+class QWasmVideoSink : public QPlatformVideoSink
+{
+ Q_OBJECT
+
+public:
+ explicit QWasmVideoSink(QVideoSink *parent = 0);
+
+ void setRhi(QRhi *) override;
+
+private:
+ QRhi *m_rhi = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QWASMVIDEOSINK_H
diff --git a/src/plugins/multimedia/wasm/qwasmmediaintegration.cpp b/src/plugins/multimedia/wasm/qwasmmediaintegration.cpp
new file mode 100644
index 000000000..effc194a4
--- /dev/null
+++ b/src/plugins/multimedia/wasm/qwasmmediaintegration.cpp
@@ -0,0 +1,109 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwasmmediaintegration_p.h"
+#include <QLoggingCategory>
+
+#include <QCamera>
+#include <QCameraDevice>
+
+#include <private/qplatformmediaformatinfo_p.h>
+#include <private/qplatformmediaplugin_p.h>
+#include <private/qplatformmediadevices_p.h>
+#include <private/qplatformvideodevices_p.h>
+
+#include "mediaplayer/qwasmmediaplayer_p.h"
+#include "mediaplayer/qwasmvideosink_p.h"
+#include "qwasmaudioinput_p.h"
+#include "common/qwasmaudiooutput_p.h"
+
+#include "mediacapture/qwasmmediacapturesession_p.h"
+#include "mediacapture/qwasmmediarecorder_p.h"
+#include "mediacapture/qwasmcamera_p.h"
+#include "mediacapture/qwasmmediacapturesession_p.h"
+#include "mediacapture/qwasmimagecapture_p.h"
+
+QT_BEGIN_NAMESPACE
+
+
+class QWasmMediaPlugin : public QPlatformMediaPlugin
+{
+ Q_OBJECT
+ Q_PLUGIN_METADATA(IID QPlatformMediaPlugin_iid FILE "wasm.json")
+
+public:
+ QWasmMediaPlugin()
+ : QPlatformMediaPlugin()
+ {}
+
+ QPlatformMediaIntegration *create(const QString &name) override
+ {
+ if (name == u"wasm")
+ return new QWasmMediaIntegration;
+ return nullptr;
+ }
+};
+
+QWasmMediaIntegration::QWasmMediaIntegration()
+ : QPlatformMediaIntegration(QLatin1String("wasm")) { }
+
+QMaybe<QPlatformMediaPlayer *> QWasmMediaIntegration::createPlayer(QMediaPlayer *player)
+{
+ return new QWasmMediaPlayer(player);
+}
+
+QMaybe<QPlatformVideoSink *> QWasmMediaIntegration::createVideoSink(QVideoSink *sink)
+{
+ return new QWasmVideoSink(sink);
+}
+
+QMaybe<QPlatformAudioInput *> QWasmMediaIntegration::createAudioInput(QAudioInput *audioInput)
+{
+ return new QWasmAudioInput(audioInput);
+}
+
+QMaybe<QPlatformAudioOutput *> QWasmMediaIntegration::createAudioOutput(QAudioOutput *q)
+{
+ return new QWasmAudioOutput(q);
+}
+
+QPlatformMediaFormatInfo *QWasmMediaIntegration::createFormatInfo()
+{
+ // TODO: create custom implementation
+ return new QPlatformMediaFormatInfo;
+}
+
+QPlatformVideoDevices *QWasmMediaIntegration::createVideoDevices()
+{
+ return new QWasmCameraDevices(this);
+}
+
+QMaybe<QPlatformMediaCaptureSession *> QWasmMediaIntegration::createCaptureSession()
+{
+ return new QWasmMediaCaptureSession();
+}
+
+QMaybe<QPlatformMediaRecorder *> QWasmMediaIntegration::createRecorder(QMediaRecorder *recorder)
+{
+ return new QWasmMediaRecorder(recorder);
+}
+
+QMaybe<QPlatformCamera *> QWasmMediaIntegration::createCamera(QCamera *camera)
+{
+ return new QWasmCamera(camera);
+}
+
+QMaybe<QPlatformImageCapture *>
+QWasmMediaIntegration::createImageCapture(QImageCapture *imageCapture)
+{
+ return new QWasmImageCapture(imageCapture);
+}
+
+QList<QCameraDevice> QWasmMediaIntegration::videoInputs()
+{
+ return videoDevices()->videoDevices();
+}
+
+QT_END_NAMESPACE
+
+#include "qwasmmediaintegration.moc"
diff --git a/src/plugins/multimedia/wasm/qwasmmediaintegration_p.h b/src/plugins/multimedia/wasm/qwasmmediaintegration_p.h
new file mode 100644
index 000000000..d946c1854
--- /dev/null
+++ b/src/plugins/multimedia/wasm/qwasmmediaintegration_p.h
@@ -0,0 +1,50 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWASMMEDIAINTEGRATION_H
+#define QWASMMEDIAINTEGRATION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaintegration_p.h>
+
+#include <private/qwasmmediadevices_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QWasmMediaDevices;
+
+class QWasmMediaIntegration : public QPlatformMediaIntegration
+{
+public:
+ QWasmMediaIntegration();
+
+ QMaybe<QPlatformMediaPlayer *> createPlayer(QMediaPlayer *player) override;
+ QMaybe<QPlatformVideoSink *> createVideoSink(QVideoSink *sink) override;
+
+ QMaybe<QPlatformAudioInput *> createAudioInput(QAudioInput *audioInput) override;
+ QMaybe<QPlatformAudioOutput *> createAudioOutput(QAudioOutput *q) override;
+
+ QMaybe<QPlatformMediaCaptureSession *> createCaptureSession() override;
+ QMaybe<QPlatformCamera *> createCamera(QCamera *camera) override;
+ QMaybe<QPlatformMediaRecorder *> createRecorder(QMediaRecorder *recorder) override;
+ QMaybe<QPlatformImageCapture *> createImageCapture(QImageCapture *imageCapture) override;
+ QList<QCameraDevice> videoInputs() override;
+
+protected:
+ QPlatformMediaFormatInfo *createFormatInfo() override;
+ QPlatformVideoDevices *createVideoDevices() override;
+};
+
+QT_END_NAMESPACE
+
+#endif // QWASMMEDIAINTEGRATION_H
diff --git a/src/plugins/multimedia/wasm/wasm.json b/src/plugins/multimedia/wasm/wasm.json
new file mode 100644
index 000000000..02335aebe
--- /dev/null
+++ b/src/plugins/multimedia/wasm/wasm.json
@@ -0,0 +1,5 @@
+{
+ "Keys": [
+ "wasm"
+ ]
+}
diff --git a/src/plugins/multimedia/windows/CMakeLists.txt b/src/plugins/multimedia/windows/CMakeLists.txt
new file mode 100644
index 000000000..963081e0a
--- /dev/null
+++ b/src/plugins/multimedia/windows/CMakeLists.txt
@@ -0,0 +1,69 @@
+# Copyright (C) 2022 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+qt_internal_add_plugin(QWindowsMediaPlugin
+ OUTPUT_NAME windowsmediaplugin
+ PLUGIN_TYPE multimedia
+ SOURCES
+ common/mfmetadata.cpp common/mfmetadata_p.h
+ decoder/mfaudiodecodercontrol.cpp decoder/mfaudiodecodercontrol_p.h
+ decoder/mfdecodersourcereader.cpp decoder/mfdecodersourcereader_p.h
+ evr/evrcustompresenter.cpp evr/evrcustompresenter_p.h
+ evr/evrd3dpresentengine.cpp evr/evrd3dpresentengine_p.h
+ evr/evrhelpers.cpp evr/evrhelpers_p.h
+ evr/evrvideowindowcontrol.cpp evr/evrvideowindowcontrol_p.h
+ mfstream.cpp mfstream_p.h
+ player/mfactivate.cpp player/mfactivate_p.h
+ player/mfevrvideowindowcontrol.cpp player/mfevrvideowindowcontrol_p.h
+ player/mfplayercontrol.cpp player/mfplayercontrol_p.h
+ player/mfplayersession.cpp player/mfplayersession_p.h
+ player/mfvideorenderercontrol.cpp player/mfvideorenderercontrol_p.h
+ mediacapture/qwindowscamera.cpp
+ mediacapture/qwindowscamera_p.h
+ mediacapture/qwindowsimagecapture.cpp
+ mediacapture/qwindowsimagecapture_p.h
+ mediacapture/qwindowsmediacapture.cpp
+ mediacapture/qwindowsmediacapture_p.h
+ mediacapture/qwindowsmediadevicereader.cpp
+ mediacapture/qwindowsmediadevicereader_p.h
+ mediacapture/qwindowsmediadevicesession.cpp
+ mediacapture/qwindowsmediadevicesession_p.h
+ mediacapture/qwindowsmediaencoder.cpp
+ mediacapture/qwindowsmediaencoder_p.h
+ qwindowsformatinfo.cpp qwindowsformatinfo_p.h
+ qwindowsintegration.cpp qwindowsintegration_p.h
+ qwindowsvideodevices.cpp qwindowsvideodevices_p.h
+ sourceresolver.cpp sourceresolver_p.h
+ INCLUDE_DIRECTORIES
+ audio
+ common
+ decoder
+ evr
+ player
+ mediacapture
+ LIBRARIES
+ Qt::MultimediaPrivate
+ Qt::CorePrivate
+ uuid
+ WMF::WMF
+ d3d9
+ dxva2
+ evr
+ gdi32
+ ksuser
+ mf
+ mfcore
+ mfplat
+ mfreadwrite
+ mfuuid
+ ole32
+ oleaut32
+ propsys
+ shlwapi
+ strmiids
+ amstrmid
+ user32
+ winmm
+ wmcodecdspuuid
+)
+
diff --git a/src/plugins/multimedia/windows/common/mfmetadata.cpp b/src/plugins/multimedia/windows/common/mfmetadata.cpp
new file mode 100644
index 000000000..cc8c425e3
--- /dev/null
+++ b/src/plugins/multimedia/windows/common/mfmetadata.cpp
@@ -0,0 +1,408 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <qmediametadata.h>
+#include <qdatetime.h>
+#include <qtimezone.h>
+#include <qimage.h>
+#include <quuid.h>
+
+#include <mfapi.h>
+#include <mfidl.h>
+#include <propvarutil.h>
+#include <propkey.h>
+
+#include "private/qwindowsmultimediautils_p.h"
+#include "mfmetadata_p.h"
+
+//#define DEBUG_MEDIAFOUNDATION
+
+static const PROPERTYKEY PROP_KEY_NULL = {GUID_NULL, 0};
+
+static QVariant convertValue(const PROPVARIANT& var)
+{
+ QVariant value;
+ switch (var.vt) {
+ case VT_LPWSTR:
+ value = QString::fromUtf16(reinterpret_cast<const char16_t *>(var.pwszVal));
+ break;
+ case VT_UI4:
+ value = uint(var.ulVal);
+ break;
+ case VT_UI8:
+ value = qulonglong(var.uhVal.QuadPart);
+ break;
+ case VT_BOOL:
+ value = bool(var.boolVal);
+ break;
+ case VT_FILETIME:
+ SYSTEMTIME t;
+ if (!FileTimeToSystemTime(&var.filetime, &t))
+ break;
+
+ value = QDateTime(QDate(t.wYear, t.wMonth, t.wDay),
+ QTime(t.wHour, t.wMinute, t.wSecond, t.wMilliseconds),
+ QTimeZone(QTimeZone::UTC));
+ break;
+ case VT_STREAM:
+ {
+ STATSTG stat;
+ if (FAILED(var.pStream->Stat(&stat, STATFLAG_NONAME)))
+ break;
+ void *data = malloc(stat.cbSize.QuadPart);
+ ULONG read = 0;
+ if (FAILED(var.pStream->Read(data, stat.cbSize.QuadPart, &read))) {
+ free(data);
+ break;
+ }
+ value = QImage::fromData((const uchar*)data, read);
+ free(data);
+ }
+ break;
+ case VT_VECTOR | VT_LPWSTR:
+ QStringList vList;
+ for (ULONG i = 0; i < var.calpwstr.cElems; ++i)
+ vList.append(QString::fromUtf16(reinterpret_cast<const char16_t *>(var.calpwstr.pElems[i])));
+ value = vList;
+ break;
+ }
+ return value;
+}
+
+static QVariant metaDataValue(IPropertyStore *content, const PROPERTYKEY &key)
+{
+ QVariant value;
+
+ PROPVARIANT var;
+ PropVariantInit(&var);
+ HRESULT hr = S_FALSE;
+ if (content)
+ hr = content->GetValue(key, &var);
+
+ if (SUCCEEDED(hr)) {
+ value = convertValue(var);
+
+ // some metadata needs to be reformatted
+ if (value.isValid() && content) {
+ if (key == PKEY_Media_ClassPrimaryID /*QMediaMetaData::MediaType*/) {
+ QString v = value.toString();
+ if (v == QLatin1String("{D1607DBC-E323-4BE2-86A1-48A42A28441E}"))
+ value = QStringLiteral("Music");
+ else if (v == QLatin1String("{DB9830BD-3AB3-4FAB-8A37-1A995F7FF74B}"))
+ value = QStringLiteral("Video");
+ else if (v == QLatin1String("{01CD0F29-DA4E-4157-897B-6275D50C4F11}"))
+ value = QStringLiteral("Audio");
+ else if (v == QLatin1String("{FCF24A76-9A57-4036-990D-E35DD8B244E1}"))
+ value = QStringLiteral("Other");
+ } else if (key == PKEY_Media_Duration) {
+ // duration is provided in 100-nanosecond units, convert to milliseconds
+ value = (value.toLongLong() + 10000) / 10000;
+ } else if (key == PKEY_Video_Compression) {
+ value = int(QWindowsMultimediaUtils::codecForVideoFormat(value.toUuid()));
+ } else if (key == PKEY_Audio_Format) {
+ value = int(QWindowsMultimediaUtils::codecForAudioFormat(value.toUuid()));
+ } else if (key == PKEY_Video_FrameHeight /*Resolution*/) {
+ QSize res;
+ res.setHeight(value.toUInt());
+ if (content && SUCCEEDED(content->GetValue(PKEY_Video_FrameWidth, &var)))
+ res.setWidth(convertValue(var).toUInt());
+ value = res;
+ } else if (key == PKEY_Video_Orientation) {
+ uint orientation = 0;
+ if (content && SUCCEEDED(content->GetValue(PKEY_Video_Orientation, &var)))
+ orientation = convertValue(var).toUInt();
+ value = orientation;
+ } else if (key == PKEY_Video_FrameRate) {
+ value = value.toReal() / 1000.f;
+ }
+ }
+ }
+
+ PropVariantClear(&var);
+ return value;
+}
+
+QMediaMetaData MFMetaData::fromNative(IMFMediaSource* mediaSource)
+{
+ QMediaMetaData metaData;
+
+ IPropertyStore *content = nullptr;
+ if (!SUCCEEDED(MFGetService(mediaSource, MF_PROPERTY_HANDLER_SERVICE, IID_PPV_ARGS(&content))))
+ return metaData;
+
+ Q_ASSERT(content);
+ DWORD cProps;
+ if (SUCCEEDED(content->GetCount(&cProps))) {
+ for (DWORD i = 0; i < cProps; i++)
+ {
+ PROPERTYKEY key;
+ if (FAILED(content->GetAt(i, &key)))
+ continue;
+ QMediaMetaData::Key mediaKey;
+ if (key == PKEY_Author) {
+ mediaKey = QMediaMetaData::Author;
+ } else if (key == PKEY_Title) {
+ mediaKey = QMediaMetaData::Title;
+// } else if (key == PKEY_Media_SubTitle) {
+// mediaKey = QMediaMetaData::SubTitle;
+// } else if (key == PKEY_ParentalRating) {
+// mediaKey = QMediaMetaData::ParentalRating;
+ } else if (key == PKEY_Media_EncodingSettings) {
+ mediaKey = QMediaMetaData::Description;
+ } else if (key == PKEY_Copyright) {
+ mediaKey = QMediaMetaData::Copyright;
+ } else if (key == PKEY_Comment) {
+ mediaKey = QMediaMetaData::Comment;
+ } else if (key == PKEY_Media_ProviderStyle) {
+ mediaKey = QMediaMetaData::Genre;
+ } else if (key == PKEY_Media_DateEncoded) {
+ mediaKey = QMediaMetaData::Date;
+// } else if (key == PKEY_Rating) {
+// mediaKey = QMediaMetaData::UserRating;
+// } else if (key == PKEY_Keywords) {
+// mediaKey = QMediaMetaData::Keywords;
+ } else if (key == PKEY_Language) {
+ mediaKey = QMediaMetaData::Language;
+ } else if (key == PKEY_Media_Publisher) {
+ mediaKey = QMediaMetaData::Publisher;
+ } else if (key == PKEY_Media_ClassPrimaryID) {
+ mediaKey = QMediaMetaData::MediaType;
+ } else if (key == PKEY_Media_Duration) {
+ mediaKey = QMediaMetaData::Duration;
+ } else if (key == PKEY_Audio_EncodingBitrate) {
+ mediaKey = QMediaMetaData::AudioBitRate;
+ } else if (key == PKEY_Audio_Format) {
+ mediaKey = QMediaMetaData::AudioCodec;
+// } else if (key == PKEY_Media_AverageLevel) {
+// mediaKey = QMediaMetaData::AverageLevel;
+// } else if (key == PKEY_Audio_ChannelCount) {
+// mediaKey = QMediaMetaData::ChannelCount;
+// } else if (key == PKEY_Audio_PeakValue) {
+// mediaKey = QMediaMetaData::PeakValue;
+// } else if (key == PKEY_Audio_SampleRate) {
+// mediaKey = QMediaMetaData::SampleRate;
+ } else if (key == PKEY_Music_AlbumTitle) {
+ mediaKey = QMediaMetaData::AlbumTitle;
+ } else if (key == PKEY_Music_AlbumArtist) {
+ mediaKey = QMediaMetaData::AlbumArtist;
+ } else if (key == PKEY_Music_Artist) {
+ mediaKey = QMediaMetaData::ContributingArtist;
+ } else if (key == PKEY_Music_Composer) {
+ mediaKey = QMediaMetaData::Composer;
+// } else if (key == PKEY_Music_Conductor) {
+// mediaKey = QMediaMetaData::Conductor;
+// } else if (key == PKEY_Music_Lyrics) {
+// mediaKey = QMediaMetaData::Lyrics;
+// } else if (key == PKEY_Music_Mood) {
+// mediaKey = QMediaMetaData::Mood;
+ } else if (key == PKEY_Music_TrackNumber) {
+ mediaKey = QMediaMetaData::TrackNumber;
+ } else if (key == PKEY_Music_Genre) {
+ mediaKey = QMediaMetaData::Genre;
+ } else if (key == PKEY_ThumbnailStream) {
+ mediaKey = QMediaMetaData::ThumbnailImage;
+ } else if (key == PKEY_Video_FrameHeight) {
+ mediaKey = QMediaMetaData::Resolution;
+ } else if (key == PKEY_Video_Orientation) {
+ mediaKey = QMediaMetaData::Orientation;
+ } else if (key == PKEY_Video_FrameRate) {
+ mediaKey = QMediaMetaData::VideoFrameRate;
+ } else if (key == PKEY_Video_EncodingBitrate) {
+ mediaKey = QMediaMetaData::VideoBitRate;
+ } else if (key == PKEY_Video_Compression) {
+ mediaKey = QMediaMetaData::VideoCodec;
+// } else if (key == PKEY_Video_Director) {
+// mediaKey = QMediaMetaData::Director;
+// } else if (key == PKEY_Media_Writer) {
+// mediaKey = QMediaMetaData::Writer;
+ } else {
+ continue;
+ }
+ metaData.insert(mediaKey, metaDataValue(content, key));
+ }
+ }
+
+ content->Release();
+
+ return metaData;
+}
+
+static REFPROPERTYKEY propertyKeyForMetaDataKey(QMediaMetaData::Key key)
+{
+ switch (key) {
+ case QMediaMetaData::Key::Title:
+ return PKEY_Title;
+ case QMediaMetaData::Key::Author:
+ return PKEY_Author;
+ case QMediaMetaData::Key::Comment:
+ return PKEY_Comment;
+ case QMediaMetaData::Key::Genre:
+ return PKEY_Music_Genre;
+ case QMediaMetaData::Key::Copyright:
+ return PKEY_Copyright;
+ case QMediaMetaData::Key::Publisher:
+ return PKEY_Media_Publisher;
+ case QMediaMetaData::Key::Url:
+ return PKEY_Media_AuthorUrl;
+ case QMediaMetaData::Key::AlbumTitle:
+ return PKEY_Music_AlbumTitle;
+ case QMediaMetaData::Key::AlbumArtist:
+ return PKEY_Music_AlbumArtist;
+ case QMediaMetaData::Key::TrackNumber:
+ return PKEY_Music_TrackNumber;
+ case QMediaMetaData::Key::Date:
+ return PKEY_Media_DateEncoded;
+ case QMediaMetaData::Key::Composer:
+ return PKEY_Music_Composer;
+ case QMediaMetaData::Key::Duration:
+ return PKEY_Media_Duration;
+ case QMediaMetaData::Key::Language:
+ return PKEY_Language;
+ case QMediaMetaData::Key::Description:
+ return PKEY_Media_EncodingSettings;
+ case QMediaMetaData::Key::AudioBitRate:
+ return PKEY_Audio_EncodingBitrate;
+ case QMediaMetaData::Key::ContributingArtist:
+ return PKEY_Music_Artist;
+ case QMediaMetaData::Key::ThumbnailImage:
+ return PKEY_ThumbnailStream;
+ case QMediaMetaData::Key::Orientation:
+ return PKEY_Video_Orientation;
+ case QMediaMetaData::Key::VideoFrameRate:
+ return PKEY_Video_FrameRate;
+ case QMediaMetaData::Key::VideoBitRate:
+ return PKEY_Video_EncodingBitrate;
+ case QMediaMetaData::MediaType:
+ return PKEY_Media_ClassPrimaryID;
+ default:
+ return PROP_KEY_NULL;
+ }
+}
+
+static void setStringProperty(IPropertyStore *content, REFPROPERTYKEY key, const QString &value)
+{
+ PROPVARIANT propValue = {};
+ if (SUCCEEDED(InitPropVariantFromString(reinterpret_cast<LPCWSTR>(value.utf16()), &propValue))) {
+ if (SUCCEEDED(PSCoerceToCanonicalValue(key, &propValue)))
+ content->SetValue(key, propValue);
+ PropVariantClear(&propValue);
+ }
+}
+
+static void setUInt32Property(IPropertyStore *content, REFPROPERTYKEY key, quint32 value)
+{
+ PROPVARIANT propValue = {};
+ if (SUCCEEDED(InitPropVariantFromUInt32(ULONG(value), &propValue))) {
+ if (SUCCEEDED(PSCoerceToCanonicalValue(key, &propValue)))
+ content->SetValue(key, propValue);
+ PropVariantClear(&propValue);
+ }
+}
+
+static void setUInt64Property(IPropertyStore *content, REFPROPERTYKEY key, quint64 value)
+{
+ PROPVARIANT propValue = {};
+ if (SUCCEEDED(InitPropVariantFromUInt64(ULONGLONG(value), &propValue))) {
+ if (SUCCEEDED(PSCoerceToCanonicalValue(key, &propValue)))
+ content->SetValue(key, propValue);
+ PropVariantClear(&propValue);
+ }
+}
+
+static void setFileTimeProperty(IPropertyStore *content, REFPROPERTYKEY key, const FILETIME *ft)
+{
+ PROPVARIANT propValue = {};
+ if (SUCCEEDED(InitPropVariantFromFileTime(ft, &propValue))) {
+ if (SUCCEEDED(PSCoerceToCanonicalValue(key, &propValue)))
+ content->SetValue(key, propValue);
+ PropVariantClear(&propValue);
+ }
+}
+
+void MFMetaData::toNative(const QMediaMetaData &metaData, IPropertyStore *content)
+{
+ if (content) {
+
+ for (const auto &key : metaData.keys()) {
+
+ QVariant value = metaData.value(key);
+
+ if (key == QMediaMetaData::Key::MediaType) {
+
+ QString strValue = metaData.stringValue(key);
+ QString v;
+
+ // Sets property to one of the MediaClassPrimaryID values defined by Microsoft:
+ // https://docs.microsoft.com/en-us/windows/win32/wmformat/wm-mediaprimaryid
+ if (strValue == QLatin1String("Music"))
+ v = QLatin1String("{D1607DBC-E323-4BE2-86A1-48A42A28441E}");
+ else if (strValue == QLatin1String("Video"))
+ v = QLatin1String("{DB9830BD-3AB3-4FAB-8A37-1A995F7FF74B}");
+ else if (strValue == QLatin1String("Audio"))
+ v = QLatin1String("{01CD0F29-DA4E-4157-897B-6275D50C4F11}");
+ else
+ v = QLatin1String("{FCF24A76-9A57-4036-990D-E35DD8B244E1}");
+
+ setStringProperty(content, PKEY_Media_ClassPrimaryID, v);
+
+ } else if (key == QMediaMetaData::Key::Duration) {
+
+ setUInt64Property(content, PKEY_Media_Duration, value.toULongLong() * 10000);
+
+ } else if (key == QMediaMetaData::Key::Resolution) {
+
+ QSize res = value.toSize();
+ setUInt32Property(content, PKEY_Video_FrameWidth, quint32(res.width()));
+ setUInt32Property(content, PKEY_Video_FrameHeight, quint32(res.height()));
+
+ } else if (key == QMediaMetaData::Key::Orientation) {
+
+ setUInt32Property(content, PKEY_Video_Orientation, value.toUInt());
+
+ } else if (key == QMediaMetaData::Key::VideoFrameRate) {
+
+ qreal fps = value.toReal();
+ setUInt32Property(content, PKEY_Video_FrameRate, quint32(fps * 1000));
+
+ } else if (key == QMediaMetaData::Key::TrackNumber) {
+
+ setUInt32Property(content, PKEY_Music_TrackNumber, value.toUInt());
+
+ } else if (key == QMediaMetaData::Key::AudioBitRate) {
+
+ setUInt32Property(content, PKEY_Audio_EncodingBitrate, value.toUInt());
+
+ } else if (key == QMediaMetaData::Key::VideoBitRate) {
+
+ setUInt32Property(content, PKEY_Video_EncodingBitrate, value.toUInt());
+
+ } else if (key == QMediaMetaData::Key::Date) {
+
+ // Convert QDateTime to FILETIME by converting to 100-nsecs since
+ // 01/01/1970 UTC and adding the difference from 1601 to 1970.
+ ULARGE_INTEGER t = {};
+ t.QuadPart = ULONGLONG(value.toDateTime().toUTC().toMSecsSinceEpoch() * 10000
+ + 116444736000000000LL);
+
+ FILETIME ft = {};
+ ft.dwHighDateTime = t.HighPart;
+ ft.dwLowDateTime = t.LowPart;
+
+ setFileTimeProperty(content, PKEY_Media_DateEncoded, &ft);
+
+ } else {
+
+ // By default use as string and let PSCoerceToCanonicalValue()
+ // do validation and type conversion.
+ REFPROPERTYKEY propKey = propertyKeyForMetaDataKey(key);
+
+ if (propKey != PROP_KEY_NULL) {
+ QString strValue = metaData.stringValue(key);
+ if (!strValue.isEmpty())
+ setStringProperty(content, propKey, strValue);
+ }
+ }
+ }
+ }
+}
+
diff --git a/src/plugins/multimedia/windows/common/mfmetadata_p.h b/src/plugins/multimedia/windows/common/mfmetadata_p.h
new file mode 100644
index 000000000..9ff196240
--- /dev/null
+++ b/src/plugins/multimedia/windows/common/mfmetadata_p.h
@@ -0,0 +1,30 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFMETADATACONTROL_H
+#define MFMETADATACONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qmediametadata.h>
+#include "mfidl.h"
+
+QT_USE_NAMESPACE
+
+class MFMetaData
+{
+public:
+ static QMediaMetaData fromNative(IMFMediaSource* mediaSource);
+ static void toNative(const QMediaMetaData &metaData, IPropertyStore *content);
+};
+
+#endif
diff --git a/src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol.cpp b/src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol.cpp
new file mode 100644
index 000000000..912ab5e94
--- /dev/null
+++ b/src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol.cpp
@@ -0,0 +1,225 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <system_error>
+#include <mferror.h>
+#include <qglobal.h>
+#include "wmcodecdsp.h"
+#include "mfaudiodecodercontrol_p.h"
+#include <private/qwindowsaudioutils_p.h>
+
+QT_BEGIN_NAMESPACE
+
+MFAudioDecoderControl::MFAudioDecoderControl(QAudioDecoder *parent)
+ : QPlatformAudioDecoder(parent)
+ , m_sourceResolver(new SourceResolver)
+{
+ connect(m_sourceResolver, &SourceResolver::mediaSourceReady, this, &MFAudioDecoderControl::handleMediaSourceReady);
+ connect(m_sourceResolver, &SourceResolver::error, this, &MFAudioDecoderControl::handleMediaSourceError);
+}
+
+MFAudioDecoderControl::~MFAudioDecoderControl()
+{
+ m_sourceResolver->shutdown();
+ m_sourceResolver->Release();
+}
+
+void MFAudioDecoderControl::setSource(const QUrl &fileName)
+{
+ if (!m_device && m_source == fileName)
+ return;
+ stop();
+ m_sourceResolver->cancel();
+ m_sourceResolver->shutdown();
+ m_device = nullptr;
+ m_source = fileName;
+ sourceChanged();
+
+ if (!m_source.isEmpty()) {
+ m_sourceResolver->load(m_source, 0);
+ m_loadingSource = true;
+ }
+}
+
+void MFAudioDecoderControl::setSourceDevice(QIODevice *device)
+{
+ if (m_device == device && m_source.isEmpty())
+ return;
+ stop();
+ m_sourceResolver->cancel();
+ m_sourceResolver->shutdown();
+ m_source.clear();
+ m_device = device;
+ sourceChanged();
+
+ if (m_device) {
+ if (m_device->isOpen() && m_device->isReadable()) {
+ m_sourceResolver->load(QUrl(), m_device);
+ m_loadingSource = true;
+ }
+ }
+}
+
+void MFAudioDecoderControl::handleMediaSourceReady()
+{
+ m_loadingSource = false;
+ if (m_deferredStart) {
+ m_deferredStart = false;
+ startReadingSource(m_sourceResolver->mediaSource());
+ }
+}
+
+void MFAudioDecoderControl::handleMediaSourceError(long hr)
+{
+ m_loadingSource = false;
+ m_deferredStart = false;
+ if (hr == MF_E_UNSUPPORTED_BYTESTREAM_TYPE) {
+ error(QAudioDecoder::FormatError, tr("Unsupported media type"));
+ } else if (hr == ERROR_FILE_NOT_FOUND) {
+ error(QAudioDecoder::ResourceError, tr("Media not found"));
+ } else {
+ error(QAudioDecoder::ResourceError, tr("Unable to load specified URL")
+ + QString::fromStdString(std::system_category().message(hr)));
+ }
+}
+
+void MFAudioDecoderControl::startReadingSource(IMFMediaSource *source)
+{
+ Q_ASSERT(source);
+
+ m_decoderSourceReader = makeComObject<MFDecoderSourceReader>();
+ if (!m_decoderSourceReader) {
+ error(QAudioDecoder::ResourceError, tr("Could not instantiate MFDecoderSourceReader"));
+ return;
+ }
+
+ auto mediaType = m_decoderSourceReader->setSource(source, m_outputFormat.sampleFormat());
+ QAudioFormat mediaFormat = QWindowsAudioUtils::mediaTypeToFormat(mediaType.Get());
+ if (!mediaFormat.isValid()) {
+ error(QAudioDecoder::FormatError, tr("Invalid media format"));
+ m_decoderSourceReader.Reset();
+ return;
+ }
+
+ ComPtr<IMFPresentationDescriptor> pd;
+ if (SUCCEEDED(source->CreatePresentationDescriptor(pd.GetAddressOf()))) {
+ UINT64 duration = 0;
+ pd->GetUINT64(MF_PD_DURATION, &duration);
+ duration /= 10000;
+ m_duration = qint64(duration);
+ durationChanged(m_duration);
+ }
+
+ if (!m_resampler.setup(mediaFormat, m_outputFormat.isValid() ? m_outputFormat : mediaFormat)) {
+ qWarning() << "Failed to set up resampler";
+ return;
+ }
+
+ connect(m_decoderSourceReader.Get(), &MFDecoderSourceReader::finished, this, &MFAudioDecoderControl::handleSourceFinished);
+ connect(m_decoderSourceReader.Get(), &MFDecoderSourceReader::newSample, this, &MFAudioDecoderControl::handleNewSample);
+
+ setIsDecoding(true);
+
+ m_decoderSourceReader->readNextSample();
+}
+
+void MFAudioDecoderControl::start()
+{
+ if (isDecoding())
+ return;
+
+ if (m_loadingSource) {
+ m_deferredStart = true;
+ } else {
+ IMFMediaSource *source = m_sourceResolver->mediaSource();
+ if (!source) {
+ if (m_device)
+ error(QAudioDecoder::ResourceError, tr("Unable to read from specified device"));
+ else if (m_source.isValid())
+ error(QAudioDecoder::ResourceError, tr("Unable to load specified URL"));
+ else
+ error(QAudioDecoder::ResourceError, tr("No media source specified"));
+ return;
+ } else {
+ startReadingSource(source);
+ }
+ }
+}
+
+void MFAudioDecoderControl::stop()
+{
+ m_deferredStart = false;
+ if (!isDecoding())
+ return;
+
+ disconnect(m_decoderSourceReader.Get());
+ m_decoderSourceReader->clearSource();
+ m_decoderSourceReader.Reset();
+
+ if (bufferAvailable()) {
+ QAudioBuffer buffer;
+ m_audioBuffer.swap(buffer);
+ bufferAvailableChanged(false);
+ }
+ setIsDecoding(false);
+
+ if (m_position != -1) {
+ m_position = -1;
+ positionChanged(m_position);
+ }
+ if (m_duration != -1) {
+ m_duration = -1;
+ durationChanged(m_duration);
+ }
+}
+
+void MFAudioDecoderControl::handleNewSample(ComPtr<IMFSample> sample)
+{
+ Q_ASSERT(sample);
+
+ qint64 sampleStartTimeUs = m_resampler.outputFormat().durationForBytes(m_resampler.totalOutputBytes());
+ QByteArray out = m_resampler.resample(sample.Get());
+
+ if (out.isEmpty()) {
+ error(QAudioDecoder::Error::ResourceError, tr("Failed processing a sample"));
+
+ } else {
+ m_audioBuffer = QAudioBuffer(out, m_resampler.outputFormat(), sampleStartTimeUs);
+
+ bufferAvailableChanged(true);
+ bufferReady();
+ }
+}
+
+void MFAudioDecoderControl::handleSourceFinished()
+{
+ stop();
+ finished();
+}
+
+void MFAudioDecoderControl::setAudioFormat(const QAudioFormat &format)
+{
+ if (m_outputFormat == format)
+ return;
+ m_outputFormat = format;
+ formatChanged(m_outputFormat);
+}
+
+QAudioBuffer MFAudioDecoderControl::read()
+{
+ QAudioBuffer buffer;
+
+ if (bufferAvailable()) {
+ buffer.swap(m_audioBuffer);
+ m_position = buffer.startTime() / 1000;
+ positionChanged(m_position);
+ bufferAvailableChanged(false);
+ m_decoderSourceReader->readNextSample();
+ }
+
+ return buffer;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_mfaudiodecodercontrol_p.cpp"
diff --git a/src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol_p.h b/src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol_p.h
new file mode 100644
index 000000000..9bb2371ec
--- /dev/null
+++ b/src/plugins/multimedia/windows/decoder/mfaudiodecodercontrol_p.h
@@ -0,0 +1,75 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFAUDIODECODERCONTROL_H
+#define MFAUDIODECODERCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "mfdecodersourcereader_p.h"
+#include <private/qplatformaudiodecoder_p.h>
+#include <sourceresolver_p.h>
+#include <private/qcomptr_p.h>
+#include <private/qwindowsresampler_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class MFAudioDecoderControl : public QPlatformAudioDecoder
+{
+ Q_OBJECT
+public:
+ MFAudioDecoderControl(QAudioDecoder *parent);
+ ~MFAudioDecoderControl() override;
+
+ QUrl source() const override { return m_source; }
+ void setSource(const QUrl &fileName) override;
+
+ QIODevice* sourceDevice() const override { return m_device; }
+ void setSourceDevice(QIODevice *device) override;
+
+ void start() override;
+ void stop() override;
+
+ QAudioFormat audioFormat() const override { return m_outputFormat; }
+ void setAudioFormat(const QAudioFormat &format) override;
+
+ QAudioBuffer read() override;
+ bool bufferAvailable() const override { return m_audioBuffer.sampleCount() > 0; }
+
+ qint64 position() const override { return m_position; }
+ qint64 duration() const override { return m_duration; }
+
+private Q_SLOTS:
+ void handleMediaSourceReady();
+ void handleMediaSourceError(long hr);
+ void handleNewSample(ComPtr<IMFSample>);
+ void handleSourceFinished();
+
+private:
+ void startReadingSource(IMFMediaSource *source);
+
+ ComPtr<MFDecoderSourceReader> m_decoderSourceReader;
+ SourceResolver *m_sourceResolver;
+ QWindowsResampler m_resampler;
+ QUrl m_source;
+ QIODevice *m_device = nullptr;
+ QAudioFormat m_outputFormat;
+ QAudioBuffer m_audioBuffer;
+ qint64 m_duration = -1;
+ qint64 m_position = -1;
+ bool m_loadingSource = false;
+ bool m_deferredStart = false;
+};
+
+QT_END_NAMESPACE
+
+#endif//MFAUDIODECODERCONTROL_H
diff --git a/src/plugins/multimedia/windows/decoder/mfdecodersourcereader.cpp b/src/plugins/multimedia/windows/decoder/mfdecodersourcereader.cpp
new file mode 100644
index 000000000..097f83437
--- /dev/null
+++ b/src/plugins/multimedia/windows/decoder/mfdecodersourcereader.cpp
@@ -0,0 +1,103 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <system_error>
+#include <mferror.h>
+#include <qlogging.h>
+#include <qdebug.h>
+#include "mfdecodersourcereader_p.h"
+
+QT_BEGIN_NAMESPACE
+
+ComPtr<IMFMediaType> MFDecoderSourceReader::setSource(IMFMediaSource *source, QAudioFormat::SampleFormat sampleFormat)
+{
+ ComPtr<IMFMediaType> mediaType;
+ m_sourceReader.Reset();
+
+ if (!source)
+ return mediaType;
+
+ ComPtr<IMFAttributes> attr;
+ MFCreateAttributes(attr.GetAddressOf(), 1);
+ if (FAILED(attr->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this)))
+ return mediaType;
+ if (FAILED(attr->SetUINT32(MF_SOURCE_READER_DISCONNECT_MEDIASOURCE_ON_SHUTDOWN, TRUE)))
+ return mediaType;
+
+ HRESULT hr = MFCreateSourceReaderFromMediaSource(source, attr.Get(), m_sourceReader.GetAddressOf());
+ if (FAILED(hr)) {
+ qWarning() << "MFDecoderSourceReader: failed to set up source reader: "
+ << std::system_category().message(hr).c_str();
+ return mediaType;
+ }
+
+ m_sourceReader->SetStreamSelection(DWORD(MF_SOURCE_READER_ALL_STREAMS), FALSE);
+ m_sourceReader->SetStreamSelection(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), TRUE);
+
+ ComPtr<IMFMediaType> pPartialType;
+ MFCreateMediaType(pPartialType.GetAddressOf());
+ pPartialType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
+ pPartialType->SetGUID(MF_MT_SUBTYPE, sampleFormat == QAudioFormat::Float ? MFAudioFormat_Float : MFAudioFormat_PCM);
+ m_sourceReader->SetCurrentMediaType(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), nullptr, pPartialType.Get());
+ m_sourceReader->GetCurrentMediaType(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), mediaType.GetAddressOf());
+ // Ensure the stream is selected.
+ m_sourceReader->SetStreamSelection(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), TRUE);
+
+ return mediaType;
+}
+
+void MFDecoderSourceReader::readNextSample()
+{
+ if (m_sourceReader)
+ m_sourceReader->ReadSample(MF_SOURCE_READER_FIRST_AUDIO_STREAM, 0, NULL, NULL, NULL, NULL);
+}
+
+//from IUnknown
+STDMETHODIMP MFDecoderSourceReader::QueryInterface(REFIID riid, LPVOID *ppvObject)
+{
+ if (!ppvObject)
+ return E_POINTER;
+ if (riid == IID_IMFSourceReaderCallback) {
+ *ppvObject = static_cast<IMFSourceReaderCallback*>(this);
+ } else if (riid == IID_IUnknown) {
+ *ppvObject = static_cast<IUnknown*>(this);
+ } else {
+ *ppvObject = NULL;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+}
+
+STDMETHODIMP_(ULONG) MFDecoderSourceReader::AddRef()
+{
+ return InterlockedIncrement(&m_cRef);
+}
+
+STDMETHODIMP_(ULONG) MFDecoderSourceReader::Release()
+{
+ LONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0) {
+ this->deleteLater();
+ }
+ return cRef;
+}
+
+//from IMFSourceReaderCallback
+STDMETHODIMP MFDecoderSourceReader::OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex,
+ DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *pSample)
+{
+ Q_UNUSED(hrStatus);
+ Q_UNUSED(dwStreamIndex);
+ Q_UNUSED(llTimestamp);
+ if (pSample) {
+ emit newSample(ComPtr<IMFSample>{pSample});
+ } else if ((dwStreamFlags & MF_SOURCE_READERF_ENDOFSTREAM) == MF_SOURCE_READERF_ENDOFSTREAM) {
+ emit finished();
+ }
+ return S_OK;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_mfdecodersourcereader_p.cpp"
diff --git a/src/plugins/multimedia/windows/decoder/mfdecodersourcereader_p.h b/src/plugins/multimedia/windows/decoder/mfdecodersourcereader_p.h
new file mode 100644
index 000000000..dee6f8bf5
--- /dev/null
+++ b/src/plugins/multimedia/windows/decoder/mfdecodersourcereader_p.h
@@ -0,0 +1,63 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFDECODERSOURCEREADER_H
+#define MFDECODERSOURCEREADER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <mfapi.h>
+#include <mfidl.h>
+#include <mfreadwrite.h>
+
+#include <QtCore/qobject.h>
+#include "qaudioformat.h"
+#include <private/qcomptr_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class MFDecoderSourceReader : public QObject, public IMFSourceReaderCallback
+{
+ Q_OBJECT
+public:
+ MFDecoderSourceReader() {}
+ ~MFDecoderSourceReader() override {}
+
+ void clearSource() { m_sourceReader.Reset(); }
+ ComPtr<IMFMediaType> setSource(IMFMediaSource *source, QAudioFormat::SampleFormat);
+
+ void readNextSample();
+
+ //from IUnknown
+ STDMETHODIMP QueryInterface(REFIID riid, LPVOID *ppvObject) override;
+ STDMETHODIMP_(ULONG) AddRef() override;
+ STDMETHODIMP_(ULONG) Release() override;
+
+ //from IMFSourceReaderCallback
+ STDMETHODIMP OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex,
+ DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *pSample) override;
+ STDMETHODIMP OnFlush(DWORD) override { return S_OK; }
+ STDMETHODIMP OnEvent(DWORD, IMFMediaEvent *) override { return S_OK; }
+
+Q_SIGNALS:
+ void newSample(ComPtr<IMFSample>);
+ void finished();
+
+private:
+ long m_cRef = 1;
+ ComPtr<IMFSourceReader> m_sourceReader;
+
+};
+
+QT_END_NAMESPACE
+
+#endif//MFDECODERSOURCEREADER_H
diff --git a/src/plugins/multimedia/windows/evr/evrcustompresenter.cpp b/src/plugins/multimedia/windows/evr/evrcustompresenter.cpp
new file mode 100644
index 000000000..2a3433f4d
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrcustompresenter.cpp
@@ -0,0 +1,1849 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "evrcustompresenter_p.h"
+
+#include "evrd3dpresentengine_p.h"
+#include "evrhelpers_p.h"
+#include <private/qwindowsmultimediautils_p.h>
+#include <private/qplatformvideosink_p.h>
+#include <private/qwindowsmfdefs_p.h>
+
+#include <rhi/qrhi.h>
+
+#include <QtCore/qmutex.h>
+#include <QtCore/qvarlengtharray.h>
+#include <QtCore/qrect.h>
+#include <qthread.h>
+#include <qcoreapplication.h>
+#include <qmath.h>
+#include <qloggingcategory.h>
+
+#include <mutex>
+
+#include <float.h>
+#include <evcode.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcEvrCustomPresenter, "qt.multimedia.evrcustompresenter")
+
+const static MFRatio g_DefaultFrameRate = { 30, 1 };
+static const DWORD SCHEDULER_TIMEOUT = 5000;
+static const MFTIME ONE_SECOND = 10000000;
+static const LONG ONE_MSEC = 1000;
+
+// Function declarations.
+static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect& nrcSource);
+static QVideoFrameFormat::PixelFormat pixelFormatFromMediaType(IMFMediaType *type);
+
+static inline LONG MFTimeToMsec(const LONGLONG& time)
+{
+ return (LONG)(time / (ONE_SECOND / ONE_MSEC));
+}
+
+bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter)
+{
+ if (!evr || !presenter)
+ return false;
+
+ HRESULT result = E_FAIL;
+
+ IMFVideoRenderer *renderer = NULL;
+ if (SUCCEEDED(evr->QueryInterface(IID_PPV_ARGS(&renderer)))) {
+ result = renderer->InitializeRenderer(NULL, presenter);
+ renderer->Release();
+ }
+
+ return result == S_OK;
+}
+
+class PresentSampleEvent : public QEvent
+{
+public:
+ explicit PresentSampleEvent(const ComPtr<IMFSample> &sample)
+ : QEvent(static_cast<Type>(EVRCustomPresenter::PresentSample)), m_sample(sample)
+ {
+ }
+
+ ComPtr<IMFSample> sample() const { return m_sample; }
+
+private:
+ const ComPtr<IMFSample> m_sample;
+};
+
+Scheduler::Scheduler(EVRCustomPresenter *presenter)
+ : m_presenter(presenter)
+ , m_threadID(0)
+ , m_playbackRate(1.0f)
+ , m_perFrame_1_4th(0)
+{
+}
+
+Scheduler::~Scheduler()
+{
+ m_scheduledSamples.clear();
+}
+
+void Scheduler::setFrameRate(const MFRatio& fps)
+{
+ UINT64 AvgTimePerFrame = 0;
+
+ // Convert to a duration.
+ MFFrameRateToAverageTimePerFrame(fps.Numerator, fps.Denominator, &AvgTimePerFrame);
+
+ // Calculate 1/4th of this value, because we use it frequently.
+ m_perFrame_1_4th = AvgTimePerFrame / 4;
+}
+
+HRESULT Scheduler::startScheduler(ComPtr<IMFClock> clock)
+{
+ if (m_schedulerThread)
+ return E_UNEXPECTED;
+
+ HRESULT hr = S_OK;
+ DWORD dwID = 0;
+ HANDLE hObjects[2];
+ DWORD dwWait = 0;
+
+ m_clock = clock;
+
+ // Set a high the timer resolution (ie, short timer period).
+ timeBeginPeriod(1);
+
+ // Create an event to wait for the thread to start.
+ m_threadReadyEvent = EventHandle{ CreateEvent(NULL, FALSE, FALSE, NULL) };
+ if (!m_threadReadyEvent) {
+ hr = HRESULT_FROM_WIN32(GetLastError());
+ goto done;
+ }
+
+ // Create an event to wait for flush commands to complete.
+ m_flushEvent = EventHandle{ CreateEvent(NULL, FALSE, FALSE, NULL) };
+ if (!m_flushEvent) {
+ hr = HRESULT_FROM_WIN32(GetLastError());
+ goto done;
+ }
+
+ // Create the scheduler thread.
+ m_schedulerThread = ThreadHandle{ CreateThread(NULL, 0, schedulerThreadProc, (LPVOID)this, 0, &dwID) };
+ if (!m_schedulerThread) {
+ hr = HRESULT_FROM_WIN32(GetLastError());
+ goto done;
+ }
+
+ // Wait for the thread to signal the "thread ready" event.
+ hObjects[0] = m_threadReadyEvent.get();
+ hObjects[1] = m_schedulerThread.get();
+ dwWait = WaitForMultipleObjects(2, hObjects, FALSE, INFINITE); // Wait for EITHER of these handles.
+ if (WAIT_OBJECT_0 != dwWait) {
+ // The thread terminated early for some reason. This is an error condition.
+ m_schedulerThread = {};
+
+ hr = E_UNEXPECTED;
+ goto done;
+ }
+
+ m_threadID = dwID;
+
+done:
+ // Regardless success/failure, we are done using the "thread ready" event.
+ m_threadReadyEvent = {};
+
+ return hr;
+}
+
+HRESULT Scheduler::stopScheduler()
+{
+ if (!m_schedulerThread)
+ return S_OK;
+
+ // Ask the scheduler thread to exit.
+ PostThreadMessage(m_threadID, Terminate, 0, 0);
+
+ // Wait for the thread to exit.
+ WaitForSingleObject(m_schedulerThread.get(), INFINITE);
+
+ // Close handles.
+ m_schedulerThread = {};
+ m_flushEvent = {};
+
+ // Discard samples.
+ m_mutex.lock();
+ m_scheduledSamples.clear();
+ m_mutex.unlock();
+
+ // Restore the timer resolution.
+ timeEndPeriod(1);
+
+ return S_OK;
+}
+
+HRESULT Scheduler::flush()
+{
+ if (m_schedulerThread) {
+ // Ask the scheduler thread to flush.
+ PostThreadMessage(m_threadID, Flush, 0 , 0);
+
+ // Wait for the scheduler thread to signal the flush event,
+ // OR for the thread to terminate.
+ HANDLE objects[] = { m_flushEvent.get(), m_schedulerThread.get() };
+
+ WaitForMultipleObjects(ARRAYSIZE(objects), objects, FALSE, SCHEDULER_TIMEOUT);
+ }
+
+ return S_OK;
+}
+
+bool Scheduler::areSamplesScheduled()
+{
+ QMutexLocker locker(&m_mutex);
+ return m_scheduledSamples.count() > 0;
+}
+
+HRESULT Scheduler::scheduleSample(const ComPtr<IMFSample> &sample, bool presentNow)
+{
+ if (!m_schedulerThread)
+ return MF_E_NOT_INITIALIZED;
+
+ HRESULT hr = S_OK;
+ DWORD dwExitCode = 0;
+
+ GetExitCodeThread(m_schedulerThread.get(), &dwExitCode);
+ if (dwExitCode != STILL_ACTIVE)
+ return E_FAIL;
+
+ if (presentNow || !m_clock) {
+ m_presenter->presentSample(sample);
+ } else {
+ if (m_playbackRate > 0.0f && qt_evr_isSampleTimePassed(m_clock.Get(), sample.Get())) {
+ qCDebug(qLcEvrCustomPresenter) << "Discard the sample, it came too late";
+ return hr;
+ }
+
+ // Queue the sample and ask the scheduler thread to wake up.
+ m_mutex.lock();
+ m_scheduledSamples.enqueue(sample);
+ m_mutex.unlock();
+
+ if (SUCCEEDED(hr))
+ PostThreadMessage(m_threadID, Schedule, 0, 0);
+ }
+
+ return hr;
+}
+
+HRESULT Scheduler::processSamplesInQueue(LONG *nextSleep)
+{
+ HRESULT hr = S_OK;
+ LONG wait = 0;
+
+ QQueue<ComPtr<IMFSample>> scheduledSamples;
+
+ m_mutex.lock();
+ m_scheduledSamples.swap(scheduledSamples);
+ m_mutex.unlock();
+
+ // Process samples until the queue is empty or until the wait time > 0.
+ while (!scheduledSamples.isEmpty()) {
+ ComPtr<IMFSample> sample = scheduledSamples.dequeue();
+
+ // Process the next sample in the queue. If the sample is not ready
+ // for presentation. the value returned in wait is > 0, which
+ // means the scheduler should sleep for that amount of time.
+ if (isSampleReadyToPresent(sample.Get(), &wait)) {
+ m_presenter->presentSample(sample.Get());
+ continue;
+ }
+
+ if (wait > 0) {
+ // return the sample to scheduler
+ scheduledSamples.prepend(sample);
+ break;
+ }
+ }
+
+ m_mutex.lock();
+ scheduledSamples.append(std::move(m_scheduledSamples));
+ m_scheduledSamples.swap(scheduledSamples);
+ m_mutex.unlock();
+
+ // If the wait time is zero, it means we stopped because the queue is
+ // empty (or an error occurred). Set the wait time to infinite; this will
+ // make the scheduler thread sleep until it gets another thread message.
+ if (wait == 0)
+ wait = INFINITE;
+
+ *nextSleep = wait;
+ return hr;
+}
+
+bool Scheduler::isSampleReadyToPresent(IMFSample *sample, LONG *pNextSleep) const
+{
+ *pNextSleep = 0;
+ if (!m_clock)
+ return true;
+
+ MFTIME hnsPresentationTime = 0;
+ MFTIME hnsTimeNow = 0;
+ MFTIME hnsSystemTime = 0;
+
+ // Get the sample's time stamp. It is valid for a sample to
+ // have no time stamp.
+ HRESULT hr = sample->GetSampleTime(&hnsPresentationTime);
+
+ // Get the clock time. (But if the sample does not have a time stamp,
+ // we don't need the clock time.)
+ if (SUCCEEDED(hr))
+ hr = m_clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime);
+
+ // Calculate the time until the sample's presentation time.
+ // A negative value means the sample is late.
+ MFTIME hnsDelta = hnsPresentationTime - hnsTimeNow;
+ if (m_playbackRate < 0) {
+ // For reverse playback, the clock runs backward. Therefore, the
+ // delta is reversed.
+ hnsDelta = - hnsDelta;
+ }
+
+ if (hnsDelta < - m_perFrame_1_4th) {
+ // This sample is late - skip.
+ return false;
+ } else if (hnsDelta > (3 * m_perFrame_1_4th)) {
+ // This sample came too early - reschedule
+ *pNextSleep = MFTimeToMsec(hnsDelta - (3 * m_perFrame_1_4th));
+
+ // Adjust the sleep time for the clock rate. (The presentation clock runs
+ // at m_fRate, but sleeping uses the system clock.)
+ if (m_playbackRate != 0)
+ *pNextSleep = (LONG)(*pNextSleep / qFabs(m_playbackRate));
+ return *pNextSleep == 0;
+ } else {
+ // This sample can be presented right now
+ return true;
+ }
+}
+
+DWORD WINAPI Scheduler::schedulerThreadProc(LPVOID parameter)
+{
+ Scheduler* scheduler = reinterpret_cast<Scheduler*>(parameter);
+ if (!scheduler)
+ return -1;
+ return scheduler->schedulerThreadProcPrivate();
+}
+
+DWORD Scheduler::schedulerThreadProcPrivate()
+{
+ HRESULT hr = S_OK;
+ MSG msg;
+ LONG wait = INFINITE;
+ bool exitThread = false;
+
+ // Force the system to create a message queue for this thread.
+ // (See MSDN documentation for PostThreadMessage.)
+ PeekMessage(&msg, NULL, WM_USER, WM_USER, PM_NOREMOVE);
+
+ // Signal to the scheduler that the thread is ready.
+ SetEvent(m_threadReadyEvent.get());
+
+ while (!exitThread) {
+ // Wait for a thread message OR until the wait time expires.
+ DWORD result = MsgWaitForMultipleObjects(0, NULL, FALSE, wait, QS_POSTMESSAGE);
+
+ if (result == WAIT_TIMEOUT) {
+ // If we timed out, then process the samples in the queue
+ hr = processSamplesInQueue(&wait);
+ if (FAILED(hr))
+ exitThread = true;
+ }
+
+ while (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
+ bool processSamples = true;
+
+ switch (msg.message) {
+ case Terminate:
+ exitThread = true;
+ break;
+ case Flush:
+ // Flushing: Clear the sample queue and set the event.
+ m_mutex.lock();
+ m_scheduledSamples.clear();
+ m_mutex.unlock();
+ wait = INFINITE;
+ SetEvent(m_flushEvent.get());
+ break;
+ case Schedule:
+ // Process as many samples as we can.
+ if (processSamples) {
+ hr = processSamplesInQueue(&wait);
+ if (FAILED(hr))
+ exitThread = true;
+ processSamples = (wait != (LONG)INFINITE);
+ }
+ break;
+ }
+ }
+
+ }
+
+ return (SUCCEEDED(hr) ? 0 : 1);
+}
+
+
+SamplePool::SamplePool()
+ : m_initialized(false)
+{
+}
+
+SamplePool::~SamplePool()
+{
+ clear();
+}
+
+ComPtr<IMFSample> SamplePool::takeSample()
+{
+ QMutexLocker locker(&m_mutex);
+
+ Q_ASSERT(m_initialized);
+ if (!m_initialized) {
+ qCWarning(qLcEvrCustomPresenter) << "SamplePool is not initialized yet";
+ return nullptr;
+ }
+
+ if (m_videoSampleQueue.isEmpty()) {
+ qCDebug(qLcEvrCustomPresenter) << "SamplePool is empty";
+ return nullptr;
+ }
+
+ // Get a sample from the allocated queue.
+
+ // It doesn't matter if we pull them from the head or tail of the list,
+ // but when we get it back, we want to re-insert it onto the opposite end.
+ // (see returnSample)
+
+ return m_videoSampleQueue.takeFirst();
+}
+
+void SamplePool::returnSample(const ComPtr<IMFSample> &sample)
+{
+ QMutexLocker locker(&m_mutex);
+
+ Q_ASSERT(m_initialized);
+ if (!m_initialized) {
+ qCWarning(qLcEvrCustomPresenter) << "SamplePool is not initialized yet";
+ return;
+ }
+
+ m_videoSampleQueue.append(sample);
+}
+
+HRESULT SamplePool::initialize(QList<ComPtr<IMFSample>> &&samples)
+{
+ QMutexLocker locker(&m_mutex);
+
+ if (m_initialized)
+ return MF_E_INVALIDREQUEST;
+
+ // Move these samples into our allocated queue.
+ m_videoSampleQueue.append(std::move(samples));
+
+ m_initialized = true;
+
+ return S_OK;
+}
+
+HRESULT SamplePool::clear()
+{
+ QMutexLocker locker(&m_mutex);
+
+ m_videoSampleQueue.clear();
+ m_initialized = false;
+
+ return S_OK;
+}
+
+
+EVRCustomPresenter::EVRCustomPresenter(QVideoSink *sink)
+ : QObject()
+ , m_sampleFreeCB(this, &EVRCustomPresenter::onSampleFree)
+ , m_refCount(1)
+ , m_renderState(RenderShutdown)
+ , m_scheduler(this)
+ , m_tokenCounter(0)
+ , m_sampleNotify(false)
+ , m_prerolled(false)
+ , m_endStreaming(false)
+ , m_playbackRate(1.0f)
+ , m_presentEngine(new D3DPresentEngine(sink))
+ , m_mediaType(0)
+ , m_videoSink(0)
+ , m_canRenderToSurface(false)
+ , m_positionOffset(0)
+{
+ // Initial source rectangle = (0,0,1,1)
+ m_sourceRect.top = 0;
+ m_sourceRect.left = 0;
+ m_sourceRect.bottom = 1;
+ m_sourceRect.right = 1;
+
+ setSink(sink);
+}
+
+EVRCustomPresenter::~EVRCustomPresenter()
+{
+ m_scheduler.flush();
+ m_scheduler.stopScheduler();
+ m_samplePool.clear();
+
+ delete m_presentEngine;
+}
+
+HRESULT EVRCustomPresenter::QueryInterface(REFIID riid, void ** ppvObject)
+{
+ if (!ppvObject)
+ return E_POINTER;
+ if (riid == IID_IMFGetService) {
+ *ppvObject = static_cast<IMFGetService*>(this);
+ } else if (riid == IID_IMFTopologyServiceLookupClient) {
+ *ppvObject = static_cast<IMFTopologyServiceLookupClient*>(this);
+ } else if (riid == IID_IMFVideoDeviceID) {
+ *ppvObject = static_cast<IMFVideoDeviceID*>(this);
+ } else if (riid == IID_IMFVideoPresenter) {
+ *ppvObject = static_cast<IMFVideoPresenter*>(this);
+ } else if (riid == IID_IMFRateSupport) {
+ *ppvObject = static_cast<IMFRateSupport*>(this);
+ } else if (riid == IID_IUnknown) {
+ *ppvObject = static_cast<IUnknown*>(static_cast<IMFGetService*>(this));
+ } else if (riid == IID_IMFClockStateSink) {
+ *ppvObject = static_cast<IMFClockStateSink*>(this);
+ } else {
+ *ppvObject = NULL;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+}
+
+ULONG EVRCustomPresenter::AddRef()
+{
+ return InterlockedIncrement(&m_refCount);
+}
+
+ULONG EVRCustomPresenter::Release()
+{
+ ULONG uCount = InterlockedDecrement(&m_refCount);
+ if (uCount == 0)
+ deleteLater();
+ return uCount;
+}
+
+HRESULT EVRCustomPresenter::GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject)
+{
+ HRESULT hr = S_OK;
+
+ if (!ppvObject)
+ return E_POINTER;
+
+ // The only service GUID that we support is MR_VIDEO_RENDER_SERVICE.
+ if (guidService != MR_VIDEO_RENDER_SERVICE)
+ return MF_E_UNSUPPORTED_SERVICE;
+
+ // First try to get the service interface from the D3DPresentEngine object.
+ hr = m_presentEngine->getService(guidService, riid, ppvObject);
+ if (FAILED(hr))
+ // Next, check if this object supports the interface.
+ hr = QueryInterface(riid, ppvObject);
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::GetDeviceID(IID* deviceID)
+{
+ if (!deviceID)
+ return E_POINTER;
+
+ *deviceID = IID_IDirect3DDevice9;
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::InitServicePointers(IMFTopologyServiceLookup *lookup)
+{
+ if (!lookup)
+ return E_POINTER;
+
+ HRESULT hr = S_OK;
+ DWORD objectCount = 0;
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ // Do not allow initializing when playing or paused.
+ if (isActive())
+ return MF_E_INVALIDREQUEST;
+
+ m_clock.Reset();
+ m_mixer.Reset();
+ m_mediaEventSink.Reset();
+
+ // Ask for the clock. Optional, because the EVR might not have a clock.
+ objectCount = 1;
+
+ lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
+ MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_clock),
+ &objectCount
+ );
+
+ // Ask for the mixer. (Required.)
+ objectCount = 1;
+
+ hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
+ MR_VIDEO_MIXER_SERVICE, IID_PPV_ARGS(&m_mixer),
+ &objectCount
+ );
+
+ if (FAILED(hr))
+ return hr;
+
+ // Make sure that we can work with this mixer.
+ hr = configureMixer(m_mixer.Get());
+ if (FAILED(hr))
+ return hr;
+
+ // Ask for the EVR's event-sink interface. (Required.)
+ objectCount = 1;
+
+ hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
+ MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_mediaEventSink),
+ &objectCount
+ );
+
+ if (SUCCEEDED(hr))
+ m_renderState = RenderStopped;
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::ReleaseServicePointers()
+{
+ // Enter the shut-down state.
+ m_mutex.lock();
+
+ m_renderState = RenderShutdown;
+
+ m_mutex.unlock();
+
+ // Flush any samples that were scheduled.
+ flush();
+
+ // Clear the media type and release related resources.
+ setMediaType(NULL);
+
+ // Release all services that were acquired from InitServicePointers.
+ m_clock.Reset();
+ m_mixer.Reset();
+ m_mediaEventSink.Reset();
+
+ return S_OK;
+}
+
+bool EVRCustomPresenter::isValid() const
+{
+ return m_presentEngine->isValid() && m_canRenderToSurface;
+}
+
+HRESULT EVRCustomPresenter::ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param)
+{
+ HRESULT hr = S_OK;
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ switch (message) {
+ // Flush all pending samples.
+ case MFVP_MESSAGE_FLUSH:
+ hr = flush();
+ break;
+
+ // Renegotiate the media type with the mixer.
+ case MFVP_MESSAGE_INVALIDATEMEDIATYPE:
+ hr = renegotiateMediaType();
+ break;
+
+ // The mixer received a new input sample.
+ case MFVP_MESSAGE_PROCESSINPUTNOTIFY:
+ hr = processInputNotify();
+ break;
+
+ // Streaming is about to start.
+ case MFVP_MESSAGE_BEGINSTREAMING:
+ hr = beginStreaming();
+ break;
+
+ // Streaming has ended. (The EVR has stopped.)
+ case MFVP_MESSAGE_ENDSTREAMING:
+ hr = endStreaming();
+ break;
+
+ // All input streams have ended.
+ case MFVP_MESSAGE_ENDOFSTREAM:
+ // Set the EOS flag.
+ m_endStreaming = true;
+ // Check if it's time to send the EC_COMPLETE event to the EVR.
+ hr = checkEndOfStream();
+ break;
+
+ // Frame-stepping is starting.
+ case MFVP_MESSAGE_STEP:
+ hr = prepareFrameStep(DWORD(param));
+ break;
+
+ // Cancels frame-stepping.
+ case MFVP_MESSAGE_CANCELSTEP:
+ hr = cancelFrameStep();
+ break;
+
+ default:
+ hr = E_INVALIDARG; // Unknown message. This case should never occur.
+ break;
+ }
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::GetCurrentMediaType(IMFVideoMediaType **mediaType)
+{
+ HRESULT hr = S_OK;
+
+ if (!mediaType)
+ return E_POINTER;
+
+ *mediaType = NULL;
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ if (!m_mediaType)
+ return MF_E_NOT_INITIALIZED;
+
+ return m_mediaType->QueryInterface(IID_PPV_ARGS(mediaType));
+}
+
+HRESULT EVRCustomPresenter::OnClockStart(MFTIME, LONGLONG clockStartOffset)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ // We cannot start after shutdown.
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ // Check if the clock is already active (not stopped).
+ if (isActive()) {
+ m_renderState = RenderStarted;
+
+ // If the clock position changes while the clock is active, it
+ // is a seek request. We need to flush all pending samples.
+ if (clockStartOffset != QMM_PRESENTATION_CURRENT_POSITION)
+ flush();
+ } else {
+ m_renderState = RenderStarted;
+
+ // The clock has started from the stopped state.
+
+ // Possibly we are in the middle of frame-stepping OR have samples waiting
+ // in the frame-step queue. Deal with these two cases first:
+ hr = startFrameStep();
+ if (FAILED(hr))
+ return hr;
+ }
+
+ // Now try to get new output samples from the mixer.
+ processOutputLoop();
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::OnClockRestart(MFTIME)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ // The EVR calls OnClockRestart only while paused.
+
+ m_renderState = RenderStarted;
+
+ // Possibly we are in the middle of frame-stepping OR we have samples waiting
+ // in the frame-step queue. Deal with these two cases first:
+ hr = startFrameStep();
+ if (FAILED(hr))
+ return hr;
+
+ // Now resume the presentation loop.
+ processOutputLoop();
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::OnClockStop(MFTIME)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ if (m_renderState != RenderStopped) {
+ m_renderState = RenderStopped;
+ flush();
+
+ // If we are in the middle of frame-stepping, cancel it now.
+ if (m_frameStep.state != FrameStepNone)
+ cancelFrameStep();
+ }
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::OnClockPause(MFTIME)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ // We cannot pause the clock after shutdown.
+ HRESULT hr = checkShutdown();
+
+ if (SUCCEEDED(hr))
+ m_renderState = RenderPaused;
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::OnClockSetRate(MFTIME, float rate)
+{
+ // Note:
+ // The presenter reports its maximum rate through the IMFRateSupport interface.
+ // Here, we assume that the EVR honors the maximum rate.
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ // If the rate is changing from zero (scrubbing) to non-zero, cancel the
+ // frame-step operation.
+ if ((m_playbackRate == 0.0f) && (rate != 0.0f)) {
+ cancelFrameStep();
+ m_frameStep.samples.clear();
+ }
+
+ m_playbackRate = rate;
+
+ // Tell the scheduler about the new rate.
+ m_scheduler.setClockRate(rate);
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::GetSlowestRate(MFRATE_DIRECTION, BOOL, float *rate)
+{
+ if (!rate)
+ return E_POINTER;
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ HRESULT hr = checkShutdown();
+
+ if (SUCCEEDED(hr)) {
+ // There is no minimum playback rate, so the minimum is zero.
+ *rate = 0;
+ }
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate)
+{
+ if (!rate)
+ return E_POINTER;
+
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ float maxRate = 0.0f;
+
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ // Get the maximum *forward* rate.
+ maxRate = getMaxRate(thin);
+
+ // For reverse playback, it's the negative of maxRate.
+ if (direction == MFRATE_REVERSE)
+ maxRate = -maxRate;
+
+ *rate = maxRate;
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ float maxRate = 0.0f;
+ float nearestRate = rate; // If we support rate, that is the nearest.
+
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ return hr;
+
+ // Find the maximum forward rate.
+ // Note: We have no minimum rate (that is, we support anything down to 0).
+ maxRate = getMaxRate(thin);
+
+ if (qFabs(rate) > maxRate) {
+ // The (absolute) requested rate exceeds the maximum rate.
+ hr = MF_E_UNSUPPORTED_RATE;
+
+ // The nearest supported rate is maxRate.
+ nearestRate = maxRate;
+ if (rate < 0) {
+ // Negative for reverse playback.
+ nearestRate = -nearestRate;
+ }
+ }
+
+ // Return the nearest supported rate.
+ if (nearestSupportedRate)
+ *nearestSupportedRate = nearestRate;
+
+ return hr;
+}
+
+void EVRCustomPresenter::supportedFormatsChanged()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_mutex);
+
+ m_canRenderToSurface = false;
+
+ // check if we can render to the surface (compatible formats)
+ if (m_videoSink) {
+ for (int f = 0; f < QVideoFrameFormat::NPixelFormats; ++f) {
+ // ### set a better preference order
+ QVideoFrameFormat::PixelFormat format = QVideoFrameFormat::PixelFormat(f);
+ if (SUCCEEDED(m_presentEngine->checkFormat(qt_evr_D3DFormatFromPixelFormat(format)))) {
+ m_canRenderToSurface = true;
+ break;
+ }
+ }
+ }
+
+ // TODO: if media type already set, renegotiate?
+}
+
+void EVRCustomPresenter::setSink(QVideoSink *sink)
+{
+ m_mutex.lock();
+ m_videoSink = sink;
+ m_presentEngine->setSink(sink);
+ m_mutex.unlock();
+
+ supportedFormatsChanged();
+}
+
+void EVRCustomPresenter::setCropRect(QRect cropRect)
+{
+ m_mutex.lock();
+ m_cropRect = cropRect;
+ m_mutex.unlock();
+}
+
+HRESULT EVRCustomPresenter::configureMixer(IMFTransform *mixer)
+{
+ // Set the zoom rectangle (ie, the source clipping rectangle).
+ return setMixerSourceRect(mixer, m_sourceRect);
+}
+
+HRESULT EVRCustomPresenter::renegotiateMediaType()
+{
+ HRESULT hr = S_OK;
+ bool foundMediaType = false;
+
+ IMFMediaType *mixerType = NULL;
+ IMFMediaType *optimalType = NULL;
+
+ if (!m_mixer)
+ return MF_E_INVALIDREQUEST;
+
+ // Loop through all of the mixer's proposed output types.
+ DWORD typeIndex = 0;
+ while (!foundMediaType && (hr != MF_E_NO_MORE_TYPES)) {
+ qt_evr_safe_release(&mixerType);
+ qt_evr_safe_release(&optimalType);
+
+ // Step 1. Get the next media type supported by mixer.
+ hr = m_mixer->GetOutputAvailableType(0, typeIndex++, &mixerType);
+ if (FAILED(hr))
+ break;
+
+ // From now on, if anything in this loop fails, try the next type,
+ // until we succeed or the mixer runs out of types.
+
+ // Step 2. Check if we support this media type.
+ if (SUCCEEDED(hr))
+ hr = isMediaTypeSupported(mixerType);
+
+ // Step 3. Adjust the mixer's type to match our requirements.
+ if (SUCCEEDED(hr))
+ hr = createOptimalVideoType(mixerType, &optimalType);
+
+ // Step 4. Check if the mixer will accept this media type.
+ if (SUCCEEDED(hr))
+ hr = m_mixer->SetOutputType(0, optimalType, MFT_SET_TYPE_TEST_ONLY);
+
+ // Step 5. Try to set the media type on ourselves.
+ if (SUCCEEDED(hr))
+ hr = setMediaType(optimalType);
+
+ // Step 6. Set output media type on mixer.
+ if (SUCCEEDED(hr)) {
+ hr = m_mixer->SetOutputType(0, optimalType, 0);
+
+ // If something went wrong, clear the media type.
+ if (FAILED(hr))
+ setMediaType(NULL);
+ }
+
+ if (SUCCEEDED(hr))
+ foundMediaType = true;
+ }
+
+ qt_evr_safe_release(&mixerType);
+ qt_evr_safe_release(&optimalType);
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::flush()
+{
+ m_prerolled = false;
+
+ // The scheduler might have samples that are waiting for
+ // their presentation time. Tell the scheduler to flush.
+
+ // This call blocks until the scheduler threads discards all scheduled samples.
+ m_scheduler.flush();
+
+ // Flush the frame-step queue.
+ m_frameStep.samples.clear();
+
+ if (m_renderState == RenderStopped && m_videoSink) {
+ // Repaint with black.
+ presentSample(nullptr);
+ }
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::processInputNotify()
+{
+ HRESULT hr = S_OK;
+
+ // Set the flag that says the mixer has a new sample.
+ m_sampleNotify = true;
+
+ if (!m_mediaType) {
+ // We don't have a valid media type yet.
+ hr = MF_E_TRANSFORM_TYPE_NOT_SET;
+ } else {
+ // Try to process an output sample.
+ processOutputLoop();
+ }
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::beginStreaming()
+{
+ HRESULT hr = S_OK;
+
+ // Start the scheduler thread.
+ hr = m_scheduler.startScheduler(m_clock);
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::endStreaming()
+{
+ HRESULT hr = S_OK;
+
+ // Stop the scheduler thread.
+ hr = m_scheduler.stopScheduler();
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::checkEndOfStream()
+{
+ if (!m_endStreaming) {
+ // The EVR did not send the MFVP_MESSAGE_ENDOFSTREAM message.
+ return S_OK;
+ }
+
+ if (m_sampleNotify) {
+ // The mixer still has input.
+ return S_OK;
+ }
+
+ if (m_scheduler.areSamplesScheduled()) {
+ // Samples are still scheduled for rendering.
+ return S_OK;
+ }
+
+ // Everything is complete. Now we can tell the EVR that we are done.
+ notifyEvent(EC_COMPLETE, (LONG_PTR)S_OK, 0);
+ m_endStreaming = false;
+
+ stopSurface();
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::prepareFrameStep(DWORD steps)
+{
+ HRESULT hr = S_OK;
+
+ // Cache the step count.
+ m_frameStep.steps += steps;
+
+ // Set the frame-step state.
+ m_frameStep.state = FrameStepWaitingStart;
+
+ // If the clock is are already running, we can start frame-stepping now.
+ // Otherwise, we will start when the clock starts.
+ if (m_renderState == RenderStarted)
+ hr = startFrameStep();
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::startFrameStep()
+{
+ if (m_frameStep.state == FrameStepWaitingStart) {
+ // We have a frame-step request, and are waiting for the clock to start.
+ // Set the state to "pending," which means we are waiting for samples.
+ m_frameStep.state = FrameStepPending;
+
+ // If the frame-step queue already has samples, process them now.
+ while (!m_frameStep.samples.isEmpty() && (m_frameStep.state == FrameStepPending)) {
+ const ComPtr<IMFSample> sample = m_frameStep.samples.takeFirst();
+
+ const HRESULT hr = deliverFrameStepSample(sample.Get());
+ if (FAILED(hr))
+ return hr;
+
+ // We break from this loop when:
+ // (a) the frame-step queue is empty, or
+ // (b) the frame-step operation is complete.
+ }
+ } else if (m_frameStep.state == FrameStepNone) {
+ // We are not frame stepping. Therefore, if the frame-step queue has samples,
+ // we need to process them normally.
+ while (!m_frameStep.samples.isEmpty()) {
+ const ComPtr<IMFSample> sample = m_frameStep.samples.takeFirst();
+
+ const HRESULT hr = deliverSample(sample.Get());
+ if (FAILED(hr))
+ return hr;
+ }
+ }
+
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::completeFrameStep(const ComPtr<IMFSample> &sample)
+{
+ HRESULT hr = S_OK;
+ MFTIME sampleTime = 0;
+ MFTIME systemTime = 0;
+
+ // Update our state.
+ m_frameStep.state = FrameStepComplete;
+ m_frameStep.sampleNoRef = 0;
+
+ // Notify the EVR that the frame-step is complete.
+ notifyEvent(EC_STEP_COMPLETE, FALSE, 0); // FALSE = completed (not cancelled)
+
+ // If we are scrubbing (rate == 0), also send the "scrub time" event.
+ if (isScrubbing()) {
+ // Get the time stamp from the sample.
+ hr = sample->GetSampleTime(&sampleTime);
+ if (FAILED(hr)) {
+ // No time stamp. Use the current presentation time.
+ if (m_clock)
+ m_clock->GetCorrelatedTime(0, &sampleTime, &systemTime);
+
+ hr = S_OK; // (Not an error condition.)
+ }
+
+ notifyEvent(EC_SCRUB_TIME, DWORD(sampleTime), DWORD(((sampleTime) >> 32) & 0xffffffff));
+ }
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::cancelFrameStep()
+{
+ FrameStepState oldState = m_frameStep.state;
+
+ m_frameStep.state = FrameStepNone;
+ m_frameStep.steps = 0;
+ m_frameStep.sampleNoRef = 0;
+ // Don't clear the frame-step queue yet, because we might frame step again.
+
+ if (oldState > FrameStepNone && oldState < FrameStepComplete) {
+ // We were in the middle of frame-stepping when it was cancelled.
+ // Notify the EVR.
+ notifyEvent(EC_STEP_COMPLETE, TRUE, 0); // TRUE = cancelled
+ }
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::createOptimalVideoType(IMFMediaType *proposedType, IMFMediaType **optimalType)
+{
+ HRESULT hr = S_OK;
+
+ RECT rcOutput;
+ ZeroMemory(&rcOutput, sizeof(rcOutput));
+
+ MFVideoArea displayArea;
+ ZeroMemory(&displayArea, sizeof(displayArea));
+
+ IMFMediaType *mtOptimal = NULL;
+
+ UINT64 size;
+ int width;
+ int height;
+
+ // Clone the proposed type.
+
+ hr = MFCreateMediaType(&mtOptimal);
+ if (FAILED(hr))
+ goto done;
+
+ hr = proposedType->CopyAllItems(mtOptimal);
+ if (FAILED(hr))
+ goto done;
+
+ // Modify the new type.
+
+ hr = proposedType->GetUINT64(MF_MT_FRAME_SIZE, &size);
+ width = int(HI32(size));
+ height = int(LO32(size));
+
+ if (m_cropRect.isValid()) {
+ rcOutput.left = m_cropRect.x();
+ rcOutput.top = m_cropRect.y();
+ rcOutput.right = m_cropRect.x() + m_cropRect.width();
+ rcOutput.bottom = m_cropRect.y() + m_cropRect.height();
+
+ m_sourceRect.left = float(m_cropRect.x()) / width;
+ m_sourceRect.top = float(m_cropRect.y()) / height;
+ m_sourceRect.right = float(m_cropRect.x() + m_cropRect.width()) / width;
+ m_sourceRect.bottom = float(m_cropRect.y() + m_cropRect.height()) / height;
+
+ if (m_mixer)
+ configureMixer(m_mixer.Get());
+ } else {
+ rcOutput.left = 0;
+ rcOutput.top = 0;
+ rcOutput.right = width;
+ rcOutput.bottom = height;
+ }
+
+ // Set the geometric aperture, and disable pan/scan.
+ displayArea = qt_evr_makeMFArea(0, 0, rcOutput.right - rcOutput.left,
+ rcOutput.bottom - rcOutput.top);
+
+ hr = mtOptimal->SetUINT32(MF_MT_PAN_SCAN_ENABLED, FALSE);
+ if (FAILED(hr))
+ goto done;
+
+ hr = mtOptimal->SetBlob(MF_MT_GEOMETRIC_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
+ sizeof(displayArea));
+ if (FAILED(hr))
+ goto done;
+
+ // Set the pan/scan aperture and the minimum display aperture. We don't care
+ // about them per se, but the mixer will reject the type if these exceed the
+ // frame dimentions.
+ hr = mtOptimal->SetBlob(MF_MT_PAN_SCAN_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
+ sizeof(displayArea));
+ if (FAILED(hr))
+ goto done;
+
+ hr = mtOptimal->SetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
+ sizeof(displayArea));
+ if (FAILED(hr))
+ goto done;
+
+ // Return the pointer to the caller.
+ *optimalType = mtOptimal;
+ (*optimalType)->AddRef();
+
+done:
+ qt_evr_safe_release(&mtOptimal);
+ return hr;
+
+}
+
+HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType)
+{
+ // Note: mediaType can be NULL (to clear the type)
+
+ // Clearing the media type is allowed in any state (including shutdown).
+ if (!mediaType) {
+ stopSurface();
+ m_mediaType.Reset();
+ releaseResources();
+ return S_OK;
+ }
+
+ MFRatio fps = { 0, 0 };
+ QList<ComPtr<IMFSample>> sampleQueue;
+
+ // Cannot set the media type after shutdown.
+ HRESULT hr = checkShutdown();
+ if (FAILED(hr))
+ goto done;
+
+ // Check if the new type is actually different.
+ // Note: This function safely handles NULL input parameters.
+ if (qt_evr_areMediaTypesEqual(m_mediaType.Get(), mediaType))
+ goto done; // Nothing more to do.
+
+ // We're really changing the type. First get rid of the old type.
+ m_mediaType.Reset();
+ releaseResources();
+
+ // Initialize the presenter engine with the new media type.
+ // The presenter engine allocates the samples.
+
+ hr = m_presentEngine->createVideoSamples(mediaType, sampleQueue, m_cropRect.size());
+ if (FAILED(hr))
+ goto done;
+
+ // Mark each sample with our token counter. If this batch of samples becomes
+ // invalid, we increment the counter, so that we know they should be discarded.
+ for (auto sample : std::as_const(sampleQueue)) {
+ hr = sample->SetUINT32(MFSamplePresenter_SampleCounter, m_tokenCounter);
+ if (FAILED(hr))
+ goto done;
+ }
+
+ // Add the samples to the sample pool.
+ hr = m_samplePool.initialize(std::move(sampleQueue));
+ if (FAILED(hr))
+ goto done;
+
+ // Set the frame rate on the scheduler.
+ if (SUCCEEDED(qt_evr_getFrameRate(mediaType, &fps)) && (fps.Numerator != 0) && (fps.Denominator != 0)) {
+ m_scheduler.setFrameRate(fps);
+ } else {
+ // NOTE: The mixer's proposed type might not have a frame rate, in which case
+ // we'll use an arbitrary default. (Although it's unlikely the video source
+ // does not have a frame rate.)
+ m_scheduler.setFrameRate(g_DefaultFrameRate);
+ }
+
+ // Store the media type.
+ m_mediaType = mediaType;
+ m_mediaType->AddRef();
+
+ startSurface();
+
+done:
+ if (FAILED(hr))
+ releaseResources();
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::isMediaTypeSupported(IMFMediaType *proposed)
+{
+ D3DFORMAT d3dFormat = D3DFMT_UNKNOWN;
+ BOOL compressed = FALSE;
+ MFVideoInterlaceMode interlaceMode = MFVideoInterlace_Unknown;
+ MFVideoArea videoCropArea;
+ UINT32 width = 0, height = 0;
+
+ // Validate the format.
+ HRESULT hr = qt_evr_getFourCC(proposed, reinterpret_cast<DWORD*>(&d3dFormat));
+ if (FAILED(hr))
+ return hr;
+
+ QVideoFrameFormat::PixelFormat pixelFormat = pixelFormatFromMediaType(proposed);
+ if (pixelFormat == QVideoFrameFormat::Format_Invalid)
+ return MF_E_INVALIDMEDIATYPE;
+
+ // Reject compressed media types.
+ hr = proposed->IsCompressedFormat(&compressed);
+ if (FAILED(hr))
+ return hr;
+
+ if (compressed)
+ return MF_E_INVALIDMEDIATYPE;
+
+ // The D3DPresentEngine checks whether surfaces can be created using this format
+ hr = m_presentEngine->checkFormat(d3dFormat);
+ if (FAILED(hr))
+ return hr;
+
+ // Reject interlaced formats.
+ hr = proposed->GetUINT32(MF_MT_INTERLACE_MODE, reinterpret_cast<UINT32*>(&interlaceMode));
+ if (FAILED(hr))
+ return hr;
+
+ if (interlaceMode != MFVideoInterlace_Progressive)
+ return MF_E_INVALIDMEDIATYPE;
+
+ hr = MFGetAttributeSize(proposed, MF_MT_FRAME_SIZE, &width, &height);
+ if (FAILED(hr))
+ return hr;
+
+ // Validate the various apertures (cropping regions) against the frame size.
+ // Any of these apertures may be unspecified in the media type, in which case
+ // we ignore it. We just want to reject invalid apertures.
+
+ if (SUCCEEDED(proposed->GetBlob(MF_MT_PAN_SCAN_APERTURE,
+ reinterpret_cast<UINT8*>(&videoCropArea),
+ sizeof(videoCropArea), nullptr))) {
+ hr = qt_evr_validateVideoArea(videoCropArea, width, height);
+ }
+ if (SUCCEEDED(proposed->GetBlob(MF_MT_GEOMETRIC_APERTURE,
+ reinterpret_cast<UINT8*>(&videoCropArea),
+ sizeof(videoCropArea), nullptr))) {
+ hr = qt_evr_validateVideoArea(videoCropArea, width, height);
+ }
+ if (SUCCEEDED(proposed->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE,
+ reinterpret_cast<UINT8*>(&videoCropArea),
+ sizeof(videoCropArea), nullptr))) {
+ hr = qt_evr_validateVideoArea(videoCropArea, width, height);
+ }
+ return hr;
+}
+
+void EVRCustomPresenter::processOutputLoop()
+{
+ HRESULT hr = S_OK;
+
+ // Process as many samples as possible.
+ while (hr == S_OK) {
+ // If the mixer doesn't have a new input sample, break from the loop.
+ if (!m_sampleNotify) {
+ hr = MF_E_TRANSFORM_NEED_MORE_INPUT;
+ break;
+ }
+
+ // Try to process a sample.
+ hr = processOutput();
+
+ // NOTE: ProcessOutput can return S_FALSE to indicate it did not
+ // process a sample. If so, break out of the loop.
+ }
+
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ // The mixer has run out of input data. Check for end-of-stream.
+ checkEndOfStream();
+ }
+}
+
+HRESULT EVRCustomPresenter::processOutput()
+{
+ // If the clock is not running, we present the first sample,
+ // and then don't present any more until the clock starts.
+ if ((m_renderState != RenderStarted) && m_prerolled)
+ return S_FALSE;
+
+ // Make sure we have a pointer to the mixer.
+ if (!m_mixer)
+ return MF_E_INVALIDREQUEST;
+
+ // Try to get a free sample from the video sample pool.
+ ComPtr<IMFSample> sample = m_samplePool.takeSample();
+ if (!sample)
+ return S_FALSE; // No free samples. Try again when a sample is released.
+
+ // From now on, we have a valid video sample pointer, where the mixer will
+ // write the video data.
+
+ LONGLONG mixerStartTime = 0, mixerEndTime = 0;
+ MFTIME systemTime = 0;
+
+ if (m_clock) {
+ // Latency: Record the starting time for ProcessOutput.
+ m_clock->GetCorrelatedTime(0, &mixerStartTime, &systemTime);
+ }
+
+ // Now we are ready to get an output sample from the mixer.
+ DWORD status = 0;
+ MFT_OUTPUT_DATA_BUFFER dataBuffer = {};
+ dataBuffer.pSample = sample.Get();
+ HRESULT hr = m_mixer->ProcessOutput(0, 1, &dataBuffer, &status);
+ // Important: Release any events returned from the ProcessOutput method.
+ qt_evr_safe_release(&dataBuffer.pEvents);
+
+ if (FAILED(hr)) {
+ // Return the sample to the pool.
+ m_samplePool.returnSample(sample);
+
+ // Handle some known error codes from ProcessOutput.
+ if (hr == MF_E_TRANSFORM_TYPE_NOT_SET) {
+ // The mixer's format is not set. Negotiate a new format.
+ hr = renegotiateMediaType();
+ } else if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
+ // There was a dynamic media type change. Clear our media type.
+ setMediaType(NULL);
+ } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ // The mixer needs more input.
+ // We have to wait for the mixer to get more input.
+ m_sampleNotify = false;
+ }
+
+ return hr;
+ }
+
+ // We got an output sample from the mixer.
+ if (m_clock) {
+ // Latency: Record the ending time for the ProcessOutput operation,
+ // and notify the EVR of the latency.
+
+ m_clock->GetCorrelatedTime(0, &mixerEndTime, &systemTime);
+
+ LONGLONG latencyTime = mixerEndTime - mixerStartTime;
+ notifyEvent(EC_PROCESSING_LATENCY, reinterpret_cast<LONG_PTR>(&latencyTime), 0);
+ }
+
+ // Set up notification for when the sample is released.
+ hr = trackSample(sample);
+ if (FAILED(hr))
+ return hr;
+
+ // Schedule the sample.
+ if (m_frameStep.state == FrameStepNone)
+ hr = deliverSample(sample);
+ else // We are frame-stepping
+ hr = deliverFrameStepSample(sample);
+
+ if (FAILED(hr))
+ return hr;
+
+ m_prerolled = true; // We have presented at least one sample now.
+ return S_OK;
+}
+
+HRESULT EVRCustomPresenter::deliverSample(const ComPtr<IMFSample> &sample)
+{
+ // If we are not actively playing, OR we are scrubbing (rate = 0),
+ // then we need to present the sample immediately. Otherwise,
+ // schedule it normally.
+
+ bool presentNow = ((m_renderState != RenderStarted) || isScrubbing());
+
+ HRESULT hr = m_scheduler.scheduleSample(sample, presentNow);
+
+ if (FAILED(hr)) {
+ // Notify the EVR that we have failed during streaming. The EVR will notify the
+ // pipeline.
+
+ notifyEvent(EC_ERRORABORT, hr, 0);
+ }
+
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::deliverFrameStepSample(const ComPtr<IMFSample> &sample)
+{
+ HRESULT hr = S_OK;
+ IUnknown *unk = NULL;
+
+ // For rate 0, discard any sample that ends earlier than the clock time.
+ if (isScrubbing() && m_clock && qt_evr_isSampleTimePassed(m_clock.Get(), sample.Get())) {
+ // Discard this sample.
+ } else if (m_frameStep.state >= FrameStepScheduled) {
+ // A frame was already submitted. Put this sample on the frame-step queue,
+ // in case we are asked to step to the next frame. If frame-stepping is
+ // cancelled, this sample will be processed normally.
+ m_frameStep.samples.append(sample);
+ } else {
+ // We're ready to frame-step.
+
+ // Decrement the number of steps.
+ if (m_frameStep.steps > 0)
+ m_frameStep.steps--;
+
+ if (m_frameStep.steps > 0) {
+ // This is not the last step. Discard this sample.
+ } else if (m_frameStep.state == FrameStepWaitingStart) {
+ // This is the right frame, but the clock hasn't started yet. Put the
+ // sample on the frame-step queue. When the clock starts, the sample
+ // will be processed.
+ m_frameStep.samples.append(sample);
+ } else {
+ // This is the right frame *and* the clock has started. Deliver this sample.
+ hr = deliverSample(sample);
+ if (FAILED(hr))
+ goto done;
+
+ // Query for IUnknown so that we can identify the sample later.
+ // Per COM rules, an object always returns the same pointer when QI'ed for IUnknown.
+ hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
+ if (FAILED(hr))
+ goto done;
+
+ m_frameStep.sampleNoRef = reinterpret_cast<DWORD_PTR>(unk); // No add-ref.
+
+ // NOTE: We do not AddRef the IUnknown pointer, because that would prevent the
+ // sample from invoking the OnSampleFree callback after the sample is presented.
+ // We use this IUnknown pointer purely to identify the sample later; we never
+ // attempt to dereference the pointer.
+
+ m_frameStep.state = FrameStepScheduled;
+ }
+ }
+done:
+ qt_evr_safe_release(&unk);
+ return hr;
+}
+
+HRESULT EVRCustomPresenter::trackSample(const ComPtr<IMFSample> &sample)
+{
+ IMFTrackedSample *tracked = NULL;
+
+ HRESULT hr = sample->QueryInterface(IID_PPV_ARGS(&tracked));
+
+ if (SUCCEEDED(hr))
+ hr = tracked->SetAllocator(&m_sampleFreeCB, NULL);
+
+ qt_evr_safe_release(&tracked);
+ return hr;
+}
+
+void EVRCustomPresenter::releaseResources()
+{
+ // Increment the token counter to indicate that all existing video samples
+ // are "stale." As these samples get released, we'll dispose of them.
+ //
+ // Note: The token counter is required because the samples are shared
+ // between more than one thread, and they are returned to the presenter
+ // through an asynchronous callback (onSampleFree). Without the token, we
+ // might accidentally re-use a stale sample after the ReleaseResources
+ // method returns.
+
+ m_tokenCounter++;
+
+ flush();
+
+ m_samplePool.clear();
+
+ m_presentEngine->releaseResources();
+}
+
+HRESULT EVRCustomPresenter::onSampleFree(IMFAsyncResult *result)
+{
+ IUnknown *object = NULL;
+ IMFSample *sample = NULL;
+ IUnknown *unk = NULL;
+ UINT32 token;
+
+ // Get the sample from the async result object.
+ HRESULT hr = result->GetObject(&object);
+ if (FAILED(hr))
+ goto done;
+
+ hr = object->QueryInterface(IID_PPV_ARGS(&sample));
+ if (FAILED(hr))
+ goto done;
+
+ // If this sample was submitted for a frame-step, the frame step operation
+ // is complete.
+
+ if (m_frameStep.state == FrameStepScheduled) {
+ // Query the sample for IUnknown and compare it to our cached value.
+ hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
+ if (FAILED(hr))
+ goto done;
+
+ if (m_frameStep.sampleNoRef == reinterpret_cast<DWORD_PTR>(unk)) {
+ // Notify the EVR.
+ hr = completeFrameStep(sample);
+ if (FAILED(hr))
+ goto done;
+ }
+
+ // Note: Although object is also an IUnknown pointer, it is not
+ // guaranteed to be the exact pointer value returned through
+ // QueryInterface. Therefore, the second QueryInterface call is
+ // required.
+ }
+
+ m_mutex.lock();
+
+ token = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1);
+
+ if (token == m_tokenCounter) {
+ // Return the sample to the sample pool.
+ m_samplePool.returnSample(sample);
+ // A free sample is available. Process more data if possible.
+ processOutputLoop();
+ }
+
+ m_mutex.unlock();
+
+done:
+ if (FAILED(hr))
+ notifyEvent(EC_ERRORABORT, hr, 0);
+ qt_evr_safe_release(&object);
+ qt_evr_safe_release(&sample);
+ qt_evr_safe_release(&unk);
+ return hr;
+}
+
+float EVRCustomPresenter::getMaxRate(bool thin)
+{
+ // Non-thinned:
+ // If we have a valid frame rate and a monitor refresh rate, the maximum
+ // playback rate is equal to the refresh rate. Otherwise, the maximum rate
+ // is unbounded (FLT_MAX).
+
+ // Thinned: The maximum rate is unbounded.
+
+ float maxRate = FLT_MAX;
+ MFRatio fps = { 0, 0 };
+ UINT monitorRateHz = 0;
+
+ if (!thin && m_mediaType) {
+ qt_evr_getFrameRate(m_mediaType.Get(), &fps);
+ monitorRateHz = m_presentEngine->refreshRate();
+
+ if (fps.Denominator && fps.Numerator && monitorRateHz) {
+ // Max Rate = Refresh Rate / Frame Rate
+ maxRate = (float)MulDiv(monitorRateHz, fps.Denominator, fps.Numerator);
+ }
+ }
+
+ return maxRate;
+}
+
+bool EVRCustomPresenter::event(QEvent *e)
+{
+ switch (int(e->type())) {
+ case StartSurface:
+ startSurface();
+ return true;
+ case StopSurface:
+ stopSurface();
+ return true;
+ case PresentSample:
+ presentSample(static_cast<PresentSampleEvent *>(e)->sample());
+ return true;
+ default:
+ break;
+ }
+ return QObject::event(e);
+}
+
+void EVRCustomPresenter::startSurface()
+{
+ if (thread() != QThread::currentThread()) {
+ QCoreApplication::postEvent(this, new QEvent(QEvent::Type(StartSurface)));
+ return;
+ }
+}
+
+void EVRCustomPresenter::stopSurface()
+{
+ if (thread() != QThread::currentThread()) {
+ QCoreApplication::postEvent(this, new QEvent(QEvent::Type(StopSurface)));
+ return;
+ }
+}
+
+void EVRCustomPresenter::presentSample(const ComPtr<IMFSample> &sample)
+{
+ if (thread() != QThread::currentThread()) {
+ QCoreApplication::postEvent(this, new PresentSampleEvent(sample));
+ return;
+ }
+
+ if (!m_videoSink || !m_presentEngine->videoSurfaceFormat().isValid())
+ return;
+
+ QVideoFrame frame = m_presentEngine->makeVideoFrame(sample);
+
+ // Since start/end times are related to a position when the clock is started,
+ // to have times from the beginning, need to adjust it by adding seeked position.
+ if (m_positionOffset) {
+ if (frame.startTime())
+ frame.setStartTime(frame.startTime() + m_positionOffset);
+ if (frame.endTime())
+ frame.setEndTime(frame.endTime() + m_positionOffset);
+ }
+
+ ComPtr<IMFMediaType> inputStreamType;
+ if (SUCCEEDED(m_mixer->GetInputCurrentType(0, inputStreamType.GetAddressOf()))) {
+ auto rotation = static_cast<MFVideoRotationFormat>(MFGetAttributeUINT32(inputStreamType.Get(), MF_MT_VIDEO_ROTATION, 0));
+ switch (rotation) {
+ case MFVideoRotationFormat_0: frame.setRotation(QtVideo::Rotation::None); break;
+ case MFVideoRotationFormat_90: frame.setRotation(QtVideo::Rotation::Clockwise90); break;
+ case MFVideoRotationFormat_180: frame.setRotation(QtVideo::Rotation::Clockwise180); break;
+ case MFVideoRotationFormat_270: frame.setRotation(QtVideo::Rotation::Clockwise270); break;
+ default: frame.setRotation(QtVideo::Rotation::None);
+ }
+ }
+
+ m_videoSink->platformVideoSink()->setVideoFrame(frame);
+}
+
+void EVRCustomPresenter::positionChanged(qint64 position)
+{
+ m_positionOffset = position * 1000;
+}
+
+HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &sourceRect)
+{
+ if (!mixer)
+ return E_POINTER;
+
+ IMFAttributes *attributes = NULL;
+
+ HRESULT hr = mixer->GetAttributes(&attributes);
+ if (SUCCEEDED(hr)) {
+ hr = attributes->SetBlob(VIDEO_ZOOM_RECT, reinterpret_cast<const UINT8*>(&sourceRect),
+ sizeof(sourceRect));
+ attributes->Release();
+ }
+ return hr;
+}
+
+static QVideoFrameFormat::PixelFormat pixelFormatFromMediaType(IMFMediaType *type)
+{
+ GUID majorType;
+ if (FAILED(type->GetMajorType(&majorType)))
+ return QVideoFrameFormat::Format_Invalid;
+ if (majorType != MFMediaType_Video)
+ return QVideoFrameFormat::Format_Invalid;
+
+ GUID subtype;
+ if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &subtype)))
+ return QVideoFrameFormat::Format_Invalid;
+
+ return QWindowsMultimediaUtils::pixelFormatFromMediaSubtype(subtype);
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/windows/evr/evrcustompresenter_p.h b/src/plugins/multimedia/windows/evr/evrcustompresenter_p.h
new file mode 100644
index 000000000..28f1cbc68
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrcustompresenter_p.h
@@ -0,0 +1,357 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef EVRCUSTOMPRESENTER_H
+#define EVRCUSTOMPRESENTER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QObject>
+#include <qmutex.h>
+#include <qqueue.h>
+#include <qevent.h>
+#include <qrect.h>
+#include <qvideoframeformat.h>
+#include <qvideosink.h>
+#include <qpointer.h>
+#include <private/qcomptr_p.h>
+#include "evrhelpers_p.h"
+
+#include <d3d9.h>
+#include <dxva2api.h>
+#include <evr9.h>
+#include <evr.h>
+#include <mfidl.h>
+#include <mfapi.h>
+#include <mferror.h>
+
+QT_BEGIN_NAMESPACE
+
+class EVRCustomPresenter;
+class D3DPresentEngine;
+
+template<class T>
+class AsyncCallback : public IMFAsyncCallback
+{
+ Q_DISABLE_COPY(AsyncCallback)
+public:
+ typedef HRESULT (T::*InvokeFn)(IMFAsyncResult *asyncResult);
+
+ AsyncCallback(T *parent, InvokeFn fn) : m_parent(parent), m_invokeFn(fn)
+ {
+ }
+
+ // IUnknown
+ STDMETHODIMP QueryInterface(REFIID iid, void** ppv) override
+ {
+ if (!ppv)
+ return E_POINTER;
+
+ if (iid == __uuidof(IUnknown)) {
+ *ppv = static_cast<IUnknown*>(static_cast<IMFAsyncCallback*>(this));
+ } else if (iid == __uuidof(IMFAsyncCallback)) {
+ *ppv = static_cast<IMFAsyncCallback*>(this);
+ } else {
+ *ppv = NULL;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+ }
+
+ STDMETHODIMP_(ULONG) AddRef() override {
+ // Delegate to parent class.
+ return m_parent->AddRef();
+ }
+ STDMETHODIMP_(ULONG) Release() override {
+ // Delegate to parent class.
+ return m_parent->Release();
+ }
+
+ // IMFAsyncCallback methods
+ STDMETHODIMP GetParameters(DWORD*, DWORD*) override
+ {
+ // Implementation of this method is optional.
+ return E_NOTIMPL;
+ }
+
+ STDMETHODIMP Invoke(IMFAsyncResult* asyncResult) override
+ {
+ return (m_parent->*m_invokeFn)(asyncResult);
+ }
+
+ T *m_parent;
+ InvokeFn m_invokeFn;
+};
+
+class Scheduler
+{
+ Q_DISABLE_COPY(Scheduler)
+public:
+ enum ScheduleEvent
+ {
+ Terminate = WM_USER,
+ Schedule = WM_USER + 1,
+ Flush = WM_USER + 2
+ };
+
+ Scheduler(EVRCustomPresenter *presenter);
+ ~Scheduler();
+
+ void setFrameRate(const MFRatio &fps);
+ void setClockRate(float rate) { m_playbackRate = rate; }
+
+ HRESULT startScheduler(ComPtr<IMFClock> clock);
+ HRESULT stopScheduler();
+
+ HRESULT scheduleSample(const ComPtr<IMFSample> &sample, bool presentNow);
+ HRESULT processSamplesInQueue(LONG *nextSleep);
+ HRESULT flush();
+
+ bool areSamplesScheduled();
+
+ // ThreadProc for the scheduler thread.
+ static DWORD WINAPI schedulerThreadProc(LPVOID parameter);
+
+private:
+ DWORD schedulerThreadProcPrivate();
+ bool isSampleReadyToPresent(IMFSample *sample, LONG *pNextSleep) const;
+
+ EVRCustomPresenter *m_presenter;
+
+ QQueue<ComPtr<IMFSample>> m_scheduledSamples; // Samples waiting to be presented.
+
+ ComPtr<IMFClock> m_clock; // Presentation clock. Can be NULL.
+
+ DWORD m_threadID;
+ ThreadHandle m_schedulerThread;
+ EventHandle m_threadReadyEvent;
+ EventHandle m_flushEvent;
+
+ float m_playbackRate;
+ MFTIME m_perFrame_1_4th; // 1/4th of the frame duration.
+
+ QMutex m_mutex;
+};
+
+class SamplePool
+{
+ Q_DISABLE_COPY(SamplePool)
+public:
+ SamplePool();
+ ~SamplePool();
+
+ HRESULT initialize(QList<ComPtr<IMFSample>> &&samples);
+ HRESULT clear();
+
+ ComPtr<IMFSample> takeSample();
+ void returnSample(const ComPtr<IMFSample> &sample);
+
+private:
+ QMutex m_mutex;
+ QList<ComPtr<IMFSample>> m_videoSampleQueue;
+ bool m_initialized;
+};
+
+class EVRCustomPresenter
+ : public QObject
+ , public IMFVideoDeviceID
+ , public IMFVideoPresenter // Inherits IMFClockStateSink
+ , public IMFRateSupport
+ , public IMFGetService
+ , public IMFTopologyServiceLookupClient
+{
+ Q_DISABLE_COPY(EVRCustomPresenter)
+public:
+ // Defines the state of the presenter.
+ enum RenderState
+ {
+ RenderStarted = 1,
+ RenderStopped,
+ RenderPaused,
+ RenderShutdown // Initial state.
+ };
+
+ // Defines the presenter's state with respect to frame-stepping.
+ enum FrameStepState
+ {
+ FrameStepNone, // Not frame stepping.
+ FrameStepWaitingStart, // Frame stepping, but the clock is not started.
+ FrameStepPending, // Clock is started. Waiting for samples.
+ FrameStepScheduled, // Submitted a sample for rendering.
+ FrameStepComplete // Sample was rendered.
+ };
+
+ enum PresenterEvents
+ {
+ StartSurface = QEvent::User,
+ StopSurface = QEvent::User + 1,
+ PresentSample = QEvent::User + 2
+ };
+
+ EVRCustomPresenter(QVideoSink *sink = 0);
+ ~EVRCustomPresenter() override;
+
+ bool isValid() const;
+
+ // IUnknown methods
+ STDMETHODIMP QueryInterface(REFIID riid, void ** ppv) override;
+ STDMETHODIMP_(ULONG) AddRef() override;
+ STDMETHODIMP_(ULONG) Release() override;
+
+ // IMFGetService methods
+ STDMETHODIMP GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject) override;
+
+ // IMFVideoPresenter methods
+ STDMETHODIMP ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param) override;
+ STDMETHODIMP GetCurrentMediaType(IMFVideoMediaType** mediaType) override;
+
+ // IMFClockStateSink methods
+ STDMETHODIMP OnClockStart(MFTIME systemTime, LONGLONG clockStartOffset) override;
+ STDMETHODIMP OnClockStop(MFTIME systemTime) override;
+ STDMETHODIMP OnClockPause(MFTIME systemTime) override;
+ STDMETHODIMP OnClockRestart(MFTIME systemTime) override;
+ STDMETHODIMP OnClockSetRate(MFTIME systemTime, float rate) override;
+
+ // IMFRateSupport methods
+ STDMETHODIMP GetSlowestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate) override;
+ STDMETHODIMP GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate) override;
+ STDMETHODIMP IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate) override;
+
+ // IMFVideoDeviceID methods
+ STDMETHODIMP GetDeviceID(IID* deviceID) override;
+
+ // IMFTopologyServiceLookupClient methods
+ STDMETHODIMP InitServicePointers(IMFTopologyServiceLookup *lookup) override;
+ STDMETHODIMP ReleaseServicePointers() override;
+
+ void supportedFormatsChanged();
+ void setSink(QVideoSink *sink);
+ void setCropRect(QRect cropRect);
+
+ void startSurface();
+ void stopSurface();
+ void presentSample(const ComPtr<IMFSample> &sample);
+
+ bool event(QEvent *) override;
+
+public Q_SLOTS:
+ void positionChanged(qint64 position);
+
+private:
+ HRESULT checkShutdown() const
+ {
+ if (m_renderState == RenderShutdown)
+ return MF_E_SHUTDOWN;
+ else
+ return S_OK;
+ }
+
+ // The "active" state is started or paused.
+ inline bool isActive() const
+ {
+ return ((m_renderState == RenderStarted) || (m_renderState == RenderPaused));
+ }
+
+ // Scrubbing occurs when the frame rate is 0.
+ inline bool isScrubbing() const { return m_playbackRate == 0.0f; }
+
+ // Send an event to the EVR through its IMediaEventSink interface.
+ void notifyEvent(long eventCode, LONG_PTR param1, LONG_PTR param2)
+ {
+ if (m_mediaEventSink)
+ m_mediaEventSink->Notify(eventCode, param1, param2);
+ }
+
+ float getMaxRate(bool thin);
+
+ // Mixer operations
+ HRESULT configureMixer(IMFTransform *mixer);
+
+ // Formats
+ HRESULT createOptimalVideoType(IMFMediaType* proposed, IMFMediaType **optimal);
+ HRESULT setMediaType(IMFMediaType *mediaType);
+ HRESULT isMediaTypeSupported(IMFMediaType *mediaType);
+
+ // Message handlers
+ HRESULT flush();
+ HRESULT renegotiateMediaType();
+ HRESULT processInputNotify();
+ HRESULT beginStreaming();
+ HRESULT endStreaming();
+ HRESULT checkEndOfStream();
+
+ // Managing samples
+ void processOutputLoop();
+ HRESULT processOutput();
+ HRESULT deliverSample(const ComPtr<IMFSample> &sample);
+ HRESULT trackSample(const ComPtr<IMFSample> &sample);
+ void releaseResources();
+
+ // Frame-stepping
+ HRESULT prepareFrameStep(DWORD steps);
+ HRESULT startFrameStep();
+ HRESULT deliverFrameStepSample(const ComPtr<IMFSample> &sample);
+ HRESULT completeFrameStep(const ComPtr<IMFSample> &sample);
+ HRESULT cancelFrameStep();
+
+ // Callback when a video sample is released.
+ HRESULT onSampleFree(IMFAsyncResult *result);
+ AsyncCallback<EVRCustomPresenter> m_sampleFreeCB;
+
+ // Holds information related to frame-stepping.
+ struct FrameStep
+ {
+ FrameStepState state = FrameStepNone;
+ QList<ComPtr<IMFSample>> samples;
+ DWORD steps = 0;
+ DWORD_PTR sampleNoRef = 0;
+ };
+
+ long m_refCount;
+
+ RenderState m_renderState;
+ FrameStep m_frameStep;
+
+ QRecursiveMutex m_mutex;
+
+ // Samples and scheduling
+ Scheduler m_scheduler; // Manages scheduling of samples.
+ SamplePool m_samplePool; // Pool of allocated samples.
+ DWORD m_tokenCounter; // Counter. Incremented whenever we create new samples.
+
+ // Rendering state
+ bool m_sampleNotify; // Did the mixer signal it has an input sample?
+ bool m_prerolled; // Have we presented at least one sample?
+ bool m_endStreaming; // Did we reach the end of the stream (EOS)?
+
+ MFVideoNormalizedRect m_sourceRect;
+ float m_playbackRate;
+
+ D3DPresentEngine *m_presentEngine; // Rendering engine. (Never null if the constructor succeeds.)
+
+ ComPtr<IMFClock> m_clock; // The EVR's clock.
+ ComPtr<IMFTransform> m_mixer; // The EVR's mixer.
+ ComPtr<IMediaEventSink> m_mediaEventSink; // The EVR's event-sink interface.
+ ComPtr<IMFMediaType> m_mediaType; // Output media type
+
+ QPointer<QVideoSink> m_videoSink;
+ bool m_canRenderToSurface;
+ qint64 m_positionOffset; // Seek position in microseconds.
+ QRect m_cropRect; // Video crop rectangle
+};
+
+bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter);
+
+QT_END_NAMESPACE
+
+#endif // EVRCUSTOMPRESENTER_H
diff --git a/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp b/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp
new file mode 100644
index 000000000..517f1d969
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrd3dpresentengine.cpp
@@ -0,0 +1,699 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "evrd3dpresentengine_p.h"
+
+#include "evrhelpers_p.h"
+
+#include <private/qhwvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
+#include <qvideoframe.h>
+#include <QDebug>
+#include <qthread.h>
+#include <qvideosink.h>
+#include <qloggingcategory.h>
+
+#include <d3d11_1.h>
+
+#include <rhi/qrhi.h>
+
+#if QT_CONFIG(opengl)
+# include <qopenglcontext.h>
+# include <qopenglfunctions.h>
+# include <qoffscreensurface.h>
+#endif
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcEvrD3DPresentEngine, "qt.multimedia.evrd3dpresentengine");
+
+class IMFSampleVideoBuffer : public QHwVideoBuffer
+{
+public:
+ IMFSampleVideoBuffer(ComPtr<IDirect3DDevice9Ex> device, const ComPtr<IMFSample> &sample,
+ QRhi *rhi, QVideoFrame::HandleType type = QVideoFrame::NoHandle)
+ : QHwVideoBuffer(type, rhi),
+ m_device(device),
+ m_sample(sample),
+ m_mapMode(QtVideo::MapMode::NotMapped)
+ {
+ }
+
+ ~IMFSampleVideoBuffer() override
+ {
+ if (m_memSurface && m_mapMode != QtVideo::MapMode::NotMapped)
+ m_memSurface->UnlockRect();
+ }
+
+ MapData map(QtVideo::MapMode mode) override
+ {
+ if (!m_sample || m_mapMode != QtVideo::MapMode::NotMapped || mode != QtVideo::MapMode::ReadOnly)
+ return {};
+
+ D3DSURFACE_DESC desc;
+ if (m_memSurface) {
+ if (FAILED(m_memSurface->GetDesc(&desc)))
+ return {};
+
+ } else {
+ ComPtr<IMFMediaBuffer> buffer;
+ HRESULT hr = m_sample->GetBufferByIndex(0, buffer.GetAddressOf());
+ if (FAILED(hr))
+ return {};
+
+ ComPtr<IDirect3DSurface9> surface;
+ hr = MFGetService(buffer.Get(), MR_BUFFER_SERVICE, IID_IDirect3DSurface9, (void **)(surface.GetAddressOf()));
+ if (FAILED(hr))
+ return {};
+
+ if (FAILED(surface->GetDesc(&desc)))
+ return {};
+
+ if (FAILED(m_device->CreateOffscreenPlainSurface(desc.Width, desc.Height, desc.Format, D3DPOOL_SYSTEMMEM, m_memSurface.GetAddressOf(), nullptr)))
+ return {};
+
+ if (FAILED(m_device->GetRenderTargetData(surface.Get(), m_memSurface.Get()))) {
+ m_memSurface.Reset();
+ return {};
+ }
+ }
+
+ D3DLOCKED_RECT rect;
+ if (FAILED(m_memSurface->LockRect(&rect, NULL, mode == QtVideo::MapMode::ReadOnly ? D3DLOCK_READONLY : 0)))
+ return {};
+
+ m_mapMode = mode;
+
+ MapData mapData;
+ mapData.planeCount = 1;
+ mapData.bytesPerLine[0] = (int)rect.Pitch;
+ mapData.data[0] = reinterpret_cast<uchar *>(rect.pBits);
+ mapData.dataSize[0] = (int)(rect.Pitch * desc.Height);
+ return mapData;
+ }
+
+ void unmap() override
+ {
+ if (m_mapMode == QtVideo::MapMode::NotMapped)
+ return;
+
+ m_mapMode = QtVideo::MapMode::NotMapped;
+ if (m_memSurface)
+ m_memSurface->UnlockRect();
+ }
+
+protected:
+ ComPtr<IDirect3DDevice9Ex> m_device;
+ ComPtr<IMFSample> m_sample;
+
+private:
+ ComPtr<IDirect3DSurface9> m_memSurface;
+ QtVideo::MapMode m_mapMode;
+};
+
+class QVideoFrameD3D11Textures: public QVideoFrameTextures
+{
+public:
+ QVideoFrameD3D11Textures(std::unique_ptr<QRhiTexture> &&tex, ComPtr<ID3D11Texture2D> &&d3d11tex)
+ : m_tex(std::move(tex))
+ , m_d3d11tex(std::move(d3d11tex))
+ {}
+
+ QRhiTexture *texture(uint plane) const override
+ {
+ return plane == 0 ? m_tex.get() : nullptr;
+ };
+
+private:
+ std::unique_ptr<QRhiTexture> m_tex;
+ ComPtr<ID3D11Texture2D> m_d3d11tex;
+};
+
+class D3D11TextureVideoBuffer: public IMFSampleVideoBuffer
+{
+public:
+ D3D11TextureVideoBuffer(ComPtr<IDirect3DDevice9Ex> device, const ComPtr<IMFSample> &sample,
+ HANDLE sharedHandle, QRhi *rhi)
+ : IMFSampleVideoBuffer(std::move(device), sample, rhi, QVideoFrame::RhiTextureHandle)
+ , m_sharedHandle(sharedHandle)
+ {}
+
+ std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *rhi) override
+ {
+ if (!rhi || rhi->backend() != QRhi::D3D11)
+ return {};
+
+ auto nh = static_cast<const QRhiD3D11NativeHandles*>(rhi->nativeHandles());
+ if (!nh)
+ return {};
+
+ auto dev = reinterpret_cast<ID3D11Device *>(nh->dev);
+ if (!dev)
+ return {};
+
+ ComPtr<ID3D11Texture2D> d3d11tex;
+ HRESULT hr = dev->OpenSharedResource(m_sharedHandle, __uuidof(ID3D11Texture2D), (void**)(d3d11tex.GetAddressOf()));
+ if (SUCCEEDED(hr)) {
+ D3D11_TEXTURE2D_DESC desc = {};
+ d3d11tex->GetDesc(&desc);
+ QRhiTexture::Format format;
+ if (desc.Format == DXGI_FORMAT_B8G8R8A8_UNORM)
+ format = QRhiTexture::BGRA8;
+ else if (desc.Format == DXGI_FORMAT_R8G8B8A8_UNORM)
+ format = QRhiTexture::RGBA8;
+ else
+ return {};
+
+ std::unique_ptr<QRhiTexture> tex(rhi->newTexture(format, QSize{int(desc.Width), int(desc.Height)}, 1, {}));
+ tex->createFrom({quint64(d3d11tex.Get()), 0});
+ return std::make_unique<QVideoFrameD3D11Textures>(std::move(tex), std::move(d3d11tex));
+
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to obtain D3D11Texture2D from D3D9Texture2D handle";
+ }
+ return {};
+ }
+
+private:
+ HANDLE m_sharedHandle = nullptr;
+};
+
+#if QT_CONFIG(opengl)
+class QVideoFrameOpenGlTextures : public QVideoFrameTextures
+{
+ struct InterOpHandles {
+ GLuint textureName = 0;
+ HANDLE device = nullptr;
+ HANDLE texture = nullptr;
+ };
+
+public:
+ Q_DISABLE_COPY(QVideoFrameOpenGlTextures);
+
+ QVideoFrameOpenGlTextures(std::unique_ptr<QRhiTexture> &&tex, const WglNvDxInterop &wgl, InterOpHandles &handles)
+ : m_tex(std::move(tex))
+ , m_wgl(wgl)
+ , m_handles(handles)
+ {}
+
+ ~QVideoFrameOpenGlTextures() override {
+ if (QOpenGLContext::currentContext()) {
+ if (!m_wgl.wglDXUnlockObjectsNV(m_handles.device, 1, &m_handles.texture))
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to unlock OpenGL texture";
+
+ if (!m_wgl.wglDXUnregisterObjectNV(m_handles.device, m_handles.texture))
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to unregister OpenGL texture";
+
+ QOpenGLFunctions *funcs = QOpenGLContext::currentContext()->functions();
+ if (funcs)
+ funcs->glDeleteTextures(1, &m_handles.textureName);
+ else
+ qCDebug(qLcEvrD3DPresentEngine) << "Could not delete texture, OpenGL context functions missing";
+
+ if (!m_wgl.wglDXCloseDeviceNV(m_handles.device))
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to close D3D-GL device";
+
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "Could not release texture, OpenGL context missing";
+ }
+ }
+
+ static std::unique_ptr<QVideoFrameOpenGlTextures> create(const WglNvDxInterop &wgl, QRhi *rhi,
+ IDirect3DDevice9Ex *device, IDirect3DTexture9 *texture,
+ HANDLE sharedHandle)
+ {
+ if (!rhi || rhi->backend() != QRhi::OpenGLES2)
+ return {};
+
+ if (!QOpenGLContext::currentContext())
+ return {};
+
+ InterOpHandles handles = {};
+ handles.device = wgl.wglDXOpenDeviceNV(device);
+ if (!handles.device) {
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to open D3D device";
+ return {};
+ }
+
+ wgl.wglDXSetResourceShareHandleNV(texture, sharedHandle);
+
+ QOpenGLFunctions *funcs = QOpenGLContext::currentContext()->functions();
+ if (funcs) {
+ funcs->glGenTextures(1, &handles.textureName);
+ handles.texture = wgl.wglDXRegisterObjectNV(handles.device, texture, handles.textureName,
+ GL_TEXTURE_2D, WglNvDxInterop::WGL_ACCESS_READ_ONLY_NV);
+ if (handles.texture) {
+ if (wgl.wglDXLockObjectsNV(handles.device, 1, &handles.texture)) {
+ D3DSURFACE_DESC desc;
+ texture->GetLevelDesc(0, &desc);
+ QRhiTexture::Format format;
+ if (desc.Format == D3DFMT_A8R8G8B8)
+ format = QRhiTexture::BGRA8;
+ else if (desc.Format == D3DFMT_A8B8G8R8)
+ format = QRhiTexture::RGBA8;
+ else
+ return {};
+
+ std::unique_ptr<QRhiTexture> tex(rhi->newTexture(format, QSize{int(desc.Width), int(desc.Height)}, 1, {}));
+ tex->createFrom({quint64(handles.textureName), 0});
+ return std::make_unique<QVideoFrameOpenGlTextures>(std::move(tex), wgl, handles);
+ }
+
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed to lock OpenGL texture";
+ wgl.wglDXUnregisterObjectNV(handles.device, handles.texture);
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "Could not register D3D9 texture in OpenGL";
+ }
+
+ funcs->glDeleteTextures(1, &handles.textureName);
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "Failed generate texture names, OpenGL context functions missing";
+ }
+ return {};
+ }
+
+ QRhiTexture *texture(uint plane) const override
+ {
+ return plane == 0 ? m_tex.get() : nullptr;
+ };
+private:
+ std::unique_ptr<QRhiTexture> m_tex;
+ WglNvDxInterop m_wgl;
+ InterOpHandles m_handles;
+};
+
+class OpenGlVideoBuffer: public IMFSampleVideoBuffer
+{
+public:
+ OpenGlVideoBuffer(ComPtr<IDirect3DDevice9Ex> device, const ComPtr<IMFSample> &sample,
+ const WglNvDxInterop &wglNvDxInterop, HANDLE sharedHandle, QRhi *rhi)
+ : IMFSampleVideoBuffer(std::move(device), sample, rhi, QVideoFrame::RhiTextureHandle)
+ , m_sharedHandle(sharedHandle)
+ , m_wgl(wglNvDxInterop)
+ {}
+
+ std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *rhi) override
+ {
+ if (!m_texture) {
+ ComPtr<IMFMediaBuffer> buffer;
+ HRESULT hr = m_sample->GetBufferByIndex(0, buffer.GetAddressOf());
+ if (FAILED(hr))
+ return {};
+
+ ComPtr<IDirect3DSurface9> surface;
+ hr = MFGetService(buffer.Get(), MR_BUFFER_SERVICE, IID_IDirect3DSurface9,
+ (void **)(surface.GetAddressOf()));
+ if (FAILED(hr))
+ return {};
+
+ hr = surface->GetContainer(IID_IDirect3DTexture9, (void **)m_texture.GetAddressOf());
+ if (FAILED(hr))
+ return {};
+ }
+
+ return QVideoFrameOpenGlTextures::create(m_wgl, rhi, m_device.Get(), m_texture.Get(), m_sharedHandle);
+ }
+
+private:
+ HANDLE m_sharedHandle = nullptr;
+ WglNvDxInterop m_wgl;
+ ComPtr<IDirect3DTexture9> m_texture;
+};
+#endif
+
+D3DPresentEngine::D3DPresentEngine(QVideoSink *sink)
+ : m_deviceResetToken(0)
+{
+ ZeroMemory(&m_displayMode, sizeof(m_displayMode));
+ setSink(sink);
+}
+
+D3DPresentEngine::~D3DPresentEngine()
+{
+ releaseResources();
+}
+
+void D3DPresentEngine::setSink(QVideoSink *sink)
+{
+ if (sink == m_sink)
+ return;
+
+ m_sink = sink;
+
+ releaseResources();
+ m_device.Reset();
+ m_devices.Reset();
+ m_D3D9.Reset();
+
+ if (!m_sink)
+ return;
+
+ HRESULT hr = initializeD3D();
+
+ if (SUCCEEDED(hr)) {
+ hr = createD3DDevice();
+ if (FAILED(hr))
+ qWarning("Failed to create D3D device");
+ } else {
+ qWarning("Failed to initialize D3D");
+ }
+}
+
+HRESULT D3DPresentEngine::initializeD3D()
+{
+ HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, m_D3D9.GetAddressOf());
+
+ if (SUCCEEDED(hr))
+ hr = DXVA2CreateDirect3DDeviceManager9(&m_deviceResetToken, m_devices.GetAddressOf());
+
+ return hr;
+}
+
+static bool findD3D11AdapterID(QRhi &rhi, IDirect3D9Ex *D3D9, UINT &adapterID)
+{
+ auto nh = static_cast<const QRhiD3D11NativeHandles*>(rhi.nativeHandles());
+ if (D3D9 && nh) {
+ for (auto i = 0u; i < D3D9->GetAdapterCount(); ++i) {
+ LUID luid = {};
+ D3D9->GetAdapterLUID(i, &luid);
+ if (luid.LowPart == nh->adapterLuidLow && luid.HighPart == nh->adapterLuidHigh) {
+ adapterID = i;
+ return true;
+ }
+ }
+ }
+
+ return false;
+}
+
+#if QT_CONFIG(opengl)
+template <typename T>
+static bool getProc(const QOpenGLContext *ctx, T &fn, const char *fName)
+{
+ fn = reinterpret_cast<T>(ctx->getProcAddress(fName));
+ return fn != nullptr;
+}
+
+static bool readWglNvDxInteropProc(WglNvDxInterop &f)
+{
+ QScopedPointer<QOffscreenSurface> surface(new QOffscreenSurface);
+ surface->create();
+ QScopedPointer<QOpenGLContext> ctx(new QOpenGLContext);
+ ctx->create();
+ ctx->makeCurrent(surface.get());
+
+ auto wglGetExtensionsStringARB = reinterpret_cast<const char* (WINAPI* )(HDC)>
+ (ctx->getProcAddress("wglGetExtensionsStringARB"));
+ if (!wglGetExtensionsStringARB) {
+ qCDebug(qLcEvrD3DPresentEngine) << "WGL extensions missing (no wglGetExtensionsStringARB function)";
+ return false;
+ }
+
+ HWND hwnd = ::GetShellWindow();
+ auto dc = ::GetDC(hwnd);
+
+ const char *wglExtString = wglGetExtensionsStringARB(dc);
+ if (!wglExtString)
+ qCDebug(qLcEvrD3DPresentEngine) << "WGL extensions missing (wglGetExtensionsStringARB returned null)";
+
+ bool hasExtension = wglExtString && strstr(wglExtString, "WGL_NV_DX_interop");
+ ReleaseDC(hwnd, dc);
+ if (!hasExtension) {
+ qCDebug(qLcEvrD3DPresentEngine) << "WGL_NV_DX_interop missing";
+ return false;
+ }
+
+ return getProc(ctx.get(), f.wglDXOpenDeviceNV, "wglDXOpenDeviceNV")
+ && getProc(ctx.get(), f.wglDXCloseDeviceNV, "wglDXCloseDeviceNV")
+ && getProc(ctx.get(), f.wglDXSetResourceShareHandleNV, "wglDXSetResourceShareHandleNV")
+ && getProc(ctx.get(), f.wglDXRegisterObjectNV, "wglDXRegisterObjectNV")
+ && getProc(ctx.get(), f.wglDXUnregisterObjectNV, "wglDXUnregisterObjectNV")
+ && getProc(ctx.get(), f.wglDXLockObjectsNV, "wglDXLockObjectsNV")
+ && getProc(ctx.get(), f.wglDXUnlockObjectsNV, "wglDXUnlockObjectsNV");
+}
+#endif
+
+namespace {
+
+bool hwTextureRenderingEnabled() {
+ // add possibility for an user to opt-out HW video rendering
+ // using the same env. variable as for FFmpeg backend
+ static bool isDisableConversionSet = false;
+ static const int disableHwConversion = qEnvironmentVariableIntValue(
+ "QT_DISABLE_HW_TEXTURES_CONVERSION", &isDisableConversionSet);
+
+ return !isDisableConversionSet || !disableHwConversion;
+}
+
+}
+
+HRESULT D3DPresentEngine::createD3DDevice()
+{
+ if (!m_D3D9 || !m_devices)
+ return MF_E_NOT_INITIALIZED;
+
+ m_useTextureRendering = false;
+ UINT adapterID = 0;
+
+ if (hwTextureRenderingEnabled()) {
+ QRhi *rhi = m_sink ? m_sink->rhi() : nullptr;
+ if (rhi) {
+ if (rhi->backend() == QRhi::D3D11) {
+ m_useTextureRendering = findD3D11AdapterID(*rhi, m_D3D9.Get(), adapterID);
+#if QT_CONFIG(opengl)
+ } else if (rhi->backend() == QRhi::OpenGLES2) {
+ m_useTextureRendering = readWglNvDxInteropProc(m_wglNvDxInterop);
+#endif
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "Not supported RHI backend type";
+ }
+ } else {
+ qCDebug(qLcEvrD3DPresentEngine) << "No RHI associated with this sink";
+ }
+
+ if (!m_useTextureRendering)
+ qCDebug(qLcEvrD3DPresentEngine) << "Could not find compatible RHI adapter, zero copy disabled";
+ }
+
+ D3DCAPS9 ddCaps;
+ ZeroMemory(&ddCaps, sizeof(ddCaps));
+
+ HRESULT hr = m_D3D9->GetDeviceCaps(adapterID, D3DDEVTYPE_HAL, &ddCaps);
+ if (FAILED(hr))
+ return hr;
+
+ DWORD vp = 0;
+ if (ddCaps.DevCaps & D3DDEVCAPS_HWTRANSFORMANDLIGHT)
+ vp = D3DCREATE_HARDWARE_VERTEXPROCESSING;
+ else
+ vp = D3DCREATE_SOFTWARE_VERTEXPROCESSING;
+
+ D3DPRESENT_PARAMETERS pp;
+ ZeroMemory(&pp, sizeof(pp));
+
+ pp.BackBufferWidth = 1;
+ pp.BackBufferHeight = 1;
+ pp.BackBufferCount = 1;
+ pp.Windowed = TRUE;
+ pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
+ pp.BackBufferFormat = D3DFMT_UNKNOWN;
+ pp.hDeviceWindow = nullptr;
+ pp.Flags = D3DPRESENTFLAG_VIDEO;
+ pp.PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT;
+
+ ComPtr<IDirect3DDevice9Ex> device;
+
+ hr = m_D3D9->CreateDeviceEx(
+ adapterID,
+ D3DDEVTYPE_HAL,
+ pp.hDeviceWindow,
+ vp | D3DCREATE_NOWINDOWCHANGES | D3DCREATE_MULTITHREADED | D3DCREATE_FPU_PRESERVE,
+ &pp,
+ NULL,
+ device.GetAddressOf()
+ );
+ if (FAILED(hr))
+ return hr;
+
+ hr = m_D3D9->GetAdapterDisplayMode(adapterID, &m_displayMode);
+ if (FAILED(hr))
+ return hr;
+
+ hr = m_devices->ResetDevice(device.Get(), m_deviceResetToken);
+ if (FAILED(hr))
+ return hr;
+
+ m_device = device;
+ return hr;
+}
+
+bool D3DPresentEngine::isValid() const
+{
+ return m_device.Get() != nullptr;
+}
+
+void D3DPresentEngine::releaseResources()
+{
+ m_surfaceFormat = QVideoFrameFormat();
+}
+
+HRESULT D3DPresentEngine::getService(REFGUID, REFIID riid, void** ppv)
+{
+ HRESULT hr = S_OK;
+
+ if (riid == __uuidof(IDirect3DDeviceManager9)) {
+ if (!m_devices) {
+ hr = MF_E_UNSUPPORTED_SERVICE;
+ } else {
+ *ppv = m_devices.Get();
+ m_devices->AddRef();
+ }
+ } else {
+ hr = MF_E_UNSUPPORTED_SERVICE;
+ }
+
+ return hr;
+}
+
+HRESULT D3DPresentEngine::checkFormat(D3DFORMAT format)
+{
+ if (!m_D3D9 || !m_device)
+ return E_FAIL;
+
+ HRESULT hr = S_OK;
+
+ D3DDISPLAYMODE mode;
+ D3DDEVICE_CREATION_PARAMETERS params;
+
+ hr = m_device->GetCreationParameters(&params);
+ if (FAILED(hr))
+ return hr;
+
+ UINT uAdapter = params.AdapterOrdinal;
+ D3DDEVTYPE type = params.DeviceType;
+
+ hr = m_D3D9->GetAdapterDisplayMode(uAdapter, &mode);
+ if (FAILED(hr))
+ return hr;
+
+ hr = m_D3D9->CheckDeviceFormat(uAdapter, type, mode.Format,
+ D3DUSAGE_RENDERTARGET,
+ D3DRTYPE_SURFACE,
+ format);
+ if (FAILED(hr))
+ return hr;
+
+ bool ok = format == D3DFMT_X8R8G8B8
+ || format == D3DFMT_A8R8G8B8
+ || format == D3DFMT_X8B8G8R8
+ || format == D3DFMT_A8B8G8R8;
+
+ return ok ? S_OK : D3DERR_NOTAVAILABLE;
+}
+
+HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format,
+ QList<ComPtr<IMFSample>> &videoSampleQueue,
+ QSize frameSize)
+{
+ if (!format || !m_device)
+ return MF_E_UNEXPECTED;
+
+ HRESULT hr = S_OK;
+ releaseResources();
+
+ UINT32 width = 0, height = 0;
+ hr = MFGetAttributeSize(format, MF_MT_FRAME_SIZE, &width, &height);
+ if (FAILED(hr))
+ return hr;
+
+ if (frameSize.isValid() && !frameSize.isEmpty()) {
+ width = frameSize.width();
+ height = frameSize.height();
+ }
+
+ DWORD d3dFormat = 0;
+ hr = qt_evr_getFourCC(format, &d3dFormat);
+ if (FAILED(hr))
+ return hr;
+
+ // FIXME: RHI defines only RGBA, thus add the alpha channel to the selected format
+ if (d3dFormat == D3DFMT_X8R8G8B8)
+ d3dFormat = D3DFMT_A8R8G8B8;
+ else if (d3dFormat == D3DFMT_X8B8G8R8)
+ d3dFormat = D3DFMT_A8B8G8R8;
+
+ for (int i = 0; i < PRESENTER_BUFFER_COUNT; i++) {
+ // texture ref cnt is increased by GetSurfaceLevel()/MFCreateVideoSampleFromSurface()
+ // below, so it will be destroyed only when the sample pool is released.
+ ComPtr<IDirect3DTexture9> texture;
+ HANDLE sharedHandle = nullptr;
+ hr = m_device->CreateTexture(width, height, 1, D3DUSAGE_RENDERTARGET, (D3DFORMAT)d3dFormat, D3DPOOL_DEFAULT, texture.GetAddressOf(), &sharedHandle);
+ if (FAILED(hr))
+ break;
+
+ ComPtr<IDirect3DSurface9> surface;
+ hr = texture->GetSurfaceLevel(0, surface.GetAddressOf());
+ if (FAILED(hr))
+ break;
+
+ ComPtr<IMFSample> videoSample;
+ hr = MFCreateVideoSampleFromSurface(surface.Get(), videoSample.GetAddressOf());
+ if (FAILED(hr))
+ break;
+
+ m_sampleTextureHandle[i] = {videoSample.Get(), sharedHandle};
+ videoSampleQueue.append(videoSample);
+ }
+
+ if (SUCCEEDED(hr)) {
+ m_surfaceFormat = QVideoFrameFormat(QSize(width, height), qt_evr_pixelFormatFromD3DFormat(d3dFormat));
+ } else {
+ releaseResources();
+ }
+
+ return hr;
+}
+
+QVideoFrame D3DPresentEngine::makeVideoFrame(const ComPtr<IMFSample> &sample)
+{
+ if (!sample)
+ return {};
+
+ HANDLE sharedHandle = nullptr;
+ for (const auto &p : m_sampleTextureHandle)
+ if (p.first == sample.Get())
+ sharedHandle = p.second;
+
+ std::unique_ptr<IMFSampleVideoBuffer> vb;
+ QRhi *rhi = m_sink ? m_sink->rhi() : nullptr;
+ if (m_useTextureRendering && sharedHandle && rhi) {
+ if (rhi->backend() == QRhi::D3D11) {
+ vb = std::make_unique<D3D11TextureVideoBuffer>(m_device, sample, sharedHandle, rhi);
+#if QT_CONFIG(opengl)
+ } else if (rhi->backend() == QRhi::OpenGLES2) {
+ vb = std::make_unique<OpenGlVideoBuffer>(m_device, sample, m_wglNvDxInterop,
+ sharedHandle, rhi);
+#endif
+ }
+ }
+
+ if (!vb)
+ vb = std::make_unique<IMFSampleVideoBuffer>(m_device, sample, rhi);
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(vb), m_surfaceFormat);
+
+ // WMF uses 100-nanosecond units, Qt uses microseconds
+ LONGLONG startTime = 0;
+ auto hr = sample->GetSampleTime(&startTime);
+ if (SUCCEEDED(hr)) {
+ frame.setStartTime(startTime / 10);
+
+ LONGLONG duration = -1;
+ if (SUCCEEDED(sample->GetSampleDuration(&duration)))
+ frame.setEndTime((startTime + duration) / 10);
+ }
+
+ return frame;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/windows/evr/evrd3dpresentengine_p.h b/src/plugins/multimedia/windows/evr/evrd3dpresentengine_p.h
new file mode 100644
index 000000000..93aa90b71
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrd3dpresentengine_p.h
@@ -0,0 +1,153 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef EVRD3DPRESENTENGINE_H
+#define EVRD3DPRESENTENGINE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QMutex>
+#include <QSize>
+#include <QVideoFrameFormat>
+#include <private/qcomptr_p.h>
+#include <qpointer.h>
+
+#include <d3d9.h>
+
+struct IDirect3D9Ex;
+struct IDirect3DDevice9Ex;
+struct IDirect3DDeviceManager9;
+struct IDirect3DSurface9;
+struct IDirect3DTexture9;
+struct IMFSample;
+struct IMFMediaType;
+
+QT_BEGIN_NAMESPACE
+class QVideoFrame;
+class QVideoSink;
+QT_END_NAMESPACE
+
+// Randomly generated GUIDs
+static const GUID MFSamplePresenter_SampleCounter =
+{ 0xb0bb83cc, 0xf10f, 0x4e2e, { 0xaa, 0x2b, 0x29, 0xea, 0x5e, 0x92, 0xef, 0x85 } };
+
+#if QT_CONFIG(opengl)
+# include <qopengl.h>
+#endif
+
+QT_BEGIN_NAMESPACE
+
+#ifdef MAYBE_ANGLE
+
+class OpenGLResources;
+
+class EGLWrapper
+{
+ Q_DISABLE_COPY(EGLWrapper)
+public:
+ EGLWrapper();
+
+ __eglMustCastToProperFunctionPointerType getProcAddress(const char *procname);
+ EGLSurface createPbufferSurface(EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list);
+ EGLBoolean destroySurface(EGLDisplay dpy, EGLSurface surface);
+ EGLBoolean bindTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
+ EGLBoolean releaseTexImage(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
+
+private:
+ typedef __eglMustCastToProperFunctionPointerType (EGLAPIENTRYP EglGetProcAddress)(const char *procname);
+ typedef EGLSurface (EGLAPIENTRYP EglCreatePbufferSurface)(EGLDisplay dpy, EGLConfig config, const EGLint *attrib_list);
+ typedef EGLBoolean (EGLAPIENTRYP EglDestroySurface)(EGLDisplay dpy, EGLSurface surface);
+ typedef EGLBoolean (EGLAPIENTRYP EglBindTexImage)(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
+ typedef EGLBoolean (EGLAPIENTRYP EglReleaseTexImage)(EGLDisplay dpy, EGLSurface surface, EGLint buffer);
+
+ EglGetProcAddress m_eglGetProcAddress;
+ EglCreatePbufferSurface m_eglCreatePbufferSurface;
+ EglDestroySurface m_eglDestroySurface;
+ EglBindTexImage m_eglBindTexImage;
+ EglReleaseTexImage m_eglReleaseTexImage;
+};
+
+#endif // MAYBE_ANGLE
+
+#if QT_CONFIG(opengl)
+
+struct WglNvDxInterop {
+ HANDLE (WINAPI* wglDXOpenDeviceNV) (void* dxDevice);
+ BOOL (WINAPI* wglDXCloseDeviceNV) (HANDLE hDevice);
+ HANDLE (WINAPI* wglDXRegisterObjectNV) (HANDLE hDevice, void *dxObject, GLuint name, GLenum type, GLenum access);
+ BOOL (WINAPI* wglDXSetResourceShareHandleNV) (void *dxResource, HANDLE shareHandle);
+ BOOL (WINAPI* wglDXLockObjectsNV) (HANDLE hDevice, GLint count, HANDLE *hObjects);
+ BOOL (WINAPI* wglDXUnlockObjectsNV) (HANDLE hDevice, GLint count, HANDLE *hObjects);
+ BOOL (WINAPI* wglDXUnregisterObjectNV) (HANDLE hDevice, HANDLE hObject);
+
+ static const int WGL_ACCESS_READ_ONLY_NV = 0;
+};
+
+#endif
+
+class D3DPresentEngine
+{
+ Q_DISABLE_COPY(D3DPresentEngine)
+public:
+ D3DPresentEngine(QVideoSink *sink);
+ virtual ~D3DPresentEngine();
+
+ bool isValid() const;
+
+ HRESULT getService(REFGUID guidService, REFIID riid, void** ppv);
+ HRESULT checkFormat(D3DFORMAT format);
+ UINT refreshRate() const { return m_displayMode.RefreshRate; }
+
+ HRESULT createVideoSamples(IMFMediaType *format, QList<ComPtr<IMFSample>> &videoSampleQueue,
+ QSize frameSize);
+ QVideoFrameFormat videoSurfaceFormat() const { return m_surfaceFormat; }
+ QVideoFrame makeVideoFrame(const ComPtr<IMFSample> &sample);
+
+ void releaseResources();
+ void setSink(QVideoSink *sink);
+
+private:
+ static const int PRESENTER_BUFFER_COUNT = 3;
+
+ HRESULT initializeD3D();
+ HRESULT createD3DDevice();
+
+ std::pair<IMFSample *, HANDLE> m_sampleTextureHandle[PRESENTER_BUFFER_COUNT] = {};
+
+ UINT m_deviceResetToken;
+ D3DDISPLAYMODE m_displayMode;
+
+ ComPtr<IDirect3D9Ex> m_D3D9;
+ ComPtr<IDirect3DDevice9Ex> m_device;
+ ComPtr<IDirect3DDeviceManager9> m_devices;
+
+ QVideoFrameFormat m_surfaceFormat;
+
+ QPointer<QVideoSink> m_sink;
+ bool m_useTextureRendering = false;
+#if QT_CONFIG(opengl)
+ WglNvDxInterop m_wglNvDxInterop;
+#endif
+
+#ifdef MAYBE_ANGLE
+ unsigned int updateTexture(IDirect3DSurface9 *src);
+
+ OpenGLResources *m_glResources;
+ IDirect3DTexture9 *m_texture;
+#endif
+
+ friend class IMFSampleVideoBuffer;
+};
+
+QT_END_NAMESPACE
+
+#endif // EVRD3DPRESENTENGINE_H
diff --git a/src/plugins/multimedia/windows/evr/evrhelpers.cpp b/src/plugins/multimedia/windows/evr/evrhelpers.cpp
new file mode 100644
index 000000000..bf4347c69
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrhelpers.cpp
@@ -0,0 +1,140 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "evrhelpers_p.h"
+
+#ifndef D3DFMT_YV12
+#define D3DFMT_YV12 (D3DFORMAT)MAKEFOURCC ('Y', 'V', '1', '2')
+#endif
+#ifndef D3DFMT_NV12
+#define D3DFMT_NV12 (D3DFORMAT)MAKEFOURCC ('N', 'V', '1', '2')
+#endif
+
+QT_BEGIN_NAMESPACE
+
+HRESULT qt_evr_getFourCC(IMFMediaType *type, DWORD *fourCC)
+{
+ if (!fourCC)
+ return E_POINTER;
+
+ HRESULT hr = S_OK;
+ GUID guidSubType = GUID_NULL;
+
+ if (SUCCEEDED(hr))
+ hr = type->GetGUID(MF_MT_SUBTYPE, &guidSubType);
+
+ if (SUCCEEDED(hr))
+ *fourCC = guidSubType.Data1;
+
+ return hr;
+}
+
+bool qt_evr_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2)
+{
+ if (!type1 && !type2)
+ return true;
+ if (!type1 || !type2)
+ return false;
+
+ DWORD dwFlags = 0;
+ HRESULT hr = type1->IsEqual(type2, &dwFlags);
+
+ return (hr == S_OK);
+}
+
+HRESULT qt_evr_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height)
+{
+ float fOffsetX = qt_evr_MFOffsetToFloat(area.OffsetX);
+ float fOffsetY = qt_evr_MFOffsetToFloat(area.OffsetY);
+
+ if ( ((LONG)fOffsetX + area.Area.cx > (LONG)width) ||
+ ((LONG)fOffsetY + area.Area.cy > (LONG)height) ) {
+ return MF_E_INVALIDMEDIATYPE;
+ }
+ return S_OK;
+}
+
+bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample)
+{
+ if (!sample || !clock)
+ return false;
+
+ HRESULT hr = S_OK;
+ MFTIME hnsTimeNow = 0;
+ MFTIME hnsSystemTime = 0;
+ MFTIME hnsSampleStart = 0;
+ MFTIME hnsSampleDuration = 0;
+
+ hr = clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime);
+
+ if (SUCCEEDED(hr))
+ hr = sample->GetSampleTime(&hnsSampleStart);
+
+ if (SUCCEEDED(hr))
+ hr = sample->GetSampleDuration(&hnsSampleDuration);
+
+ if (SUCCEEDED(hr)) {
+ if (hnsSampleStart + hnsSampleDuration < hnsTimeNow)
+ return true;
+ }
+
+ return false;
+}
+
+QVideoFrameFormat::PixelFormat qt_evr_pixelFormatFromD3DFormat(DWORD format)
+{
+ switch (format) {
+ case D3DFMT_A8R8G8B8:
+ return QVideoFrameFormat::Format_BGRA8888;
+ case D3DFMT_X8R8G8B8:
+ return QVideoFrameFormat::Format_BGRX8888;
+ case D3DFMT_A8:
+ return QVideoFrameFormat::Format_Y8;
+ case D3DFMT_A8B8G8R8:
+ return QVideoFrameFormat::Format_RGBA8888;
+ case D3DFMT_X8B8G8R8:
+ return QVideoFrameFormat::Format_RGBX8888;
+ case D3DFMT_UYVY:
+ return QVideoFrameFormat::Format_UYVY;
+ case D3DFMT_YUY2:
+ return QVideoFrameFormat::Format_YUYV;
+ case D3DFMT_NV12:
+ return QVideoFrameFormat::Format_NV12;
+ case D3DFMT_YV12:
+ return QVideoFrameFormat::Format_YV12;
+ case D3DFMT_UNKNOWN:
+ default:
+ return QVideoFrameFormat::Format_Invalid;
+ }
+}
+
+D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrameFormat::PixelFormat format)
+{
+ switch (format) {
+ case QVideoFrameFormat::Format_ARGB8888:
+ return D3DFMT_A8B8G8R8;
+ case QVideoFrameFormat::Format_BGRA8888:
+ return D3DFMT_A8R8G8B8;
+ case QVideoFrameFormat::Format_BGRX8888:
+ return D3DFMT_X8R8G8B8;
+ case QVideoFrameFormat::Format_Y8:
+ return D3DFMT_A8;
+ case QVideoFrameFormat::Format_RGBA8888:
+ return D3DFMT_A8B8G8R8;
+ case QVideoFrameFormat::Format_RGBX8888:
+ return D3DFMT_X8B8G8R8;
+ case QVideoFrameFormat::Format_UYVY:
+ return D3DFMT_UYVY;
+ case QVideoFrameFormat::Format_YUYV:
+ return D3DFMT_YUY2;
+ case QVideoFrameFormat::Format_NV12:
+ return D3DFMT_NV12;
+ case QVideoFrameFormat::Format_YV12:
+ return D3DFMT_YV12;
+ case QVideoFrameFormat::Format_Invalid:
+ default:
+ return D3DFMT_UNKNOWN;
+ }
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/windows/evr/evrhelpers_p.h b/src/plugins/multimedia/windows/evr/evrhelpers_p.h
new file mode 100644
index 000000000..30779c835
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrhelpers_p.h
@@ -0,0 +1,93 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef EVRHELPERS_H
+#define EVRHELPERS_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qvideoframe.h>
+#include <d3d9.h>
+#include <dxva2api.h>
+#include <evr9.h>
+#include <evr.h>
+#include <mfidl.h>
+#include <mfapi.h>
+#include <mferror.h>
+#include <private/quniquehandle_p.h>
+
+QT_BEGIN_NAMESPACE
+
+template<class T>
+static inline void qt_evr_safe_release(T **unk)
+{
+ if (*unk) {
+ (*unk)->Release();
+ *unk = NULL;
+ }
+}
+
+HRESULT qt_evr_getFourCC(IMFMediaType *type, DWORD *fourCC);
+
+bool qt_evr_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2);
+
+HRESULT qt_evr_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height);
+
+bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample);
+
+inline float qt_evr_MFOffsetToFloat(const MFOffset& offset)
+{
+ return offset.value + (float(offset.fract) / 65536);
+}
+
+inline MFOffset qt_evr_makeMFOffset(float v)
+{
+ MFOffset offset;
+ offset.value = short(v);
+ offset.fract = WORD(65536 * (v-offset.value));
+ return offset;
+}
+
+inline MFVideoArea qt_evr_makeMFArea(float x, float y, DWORD width, DWORD height)
+{
+ MFVideoArea area;
+ area.OffsetX = qt_evr_makeMFOffset(x);
+ area.OffsetY = qt_evr_makeMFOffset(y);
+ area.Area.cx = width;
+ area.Area.cy = height;
+ return area;
+}
+
+inline HRESULT qt_evr_getFrameRate(IMFMediaType *pType, MFRatio *pRatio)
+{
+ return MFGetAttributeRatio(pType, MF_MT_FRAME_RATE,
+ reinterpret_cast<UINT32*>(&pRatio->Numerator),
+ reinterpret_cast<UINT32*>(&pRatio->Denominator));
+}
+
+QVideoFrameFormat::PixelFormat qt_evr_pixelFormatFromD3DFormat(DWORD format);
+D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrameFormat::PixelFormat format);
+
+struct NullHandleTraits
+{
+ using Type = HANDLE;
+ static Type invalidValue() { return nullptr; }
+ static bool close(Type handle) { return CloseHandle(handle) != 0; }
+};
+
+using EventHandle = QUniqueHandle<NullHandleTraits>;
+using ThreadHandle = QUniqueHandle<NullHandleTraits>;
+
+QT_END_NAMESPACE
+
+#endif // EVRHELPERS_H
+
diff --git a/src/plugins/multimedia/windows/evr/evrvideowindowcontrol.cpp b/src/plugins/multimedia/windows/evr/evrvideowindowcontrol.cpp
new file mode 100644
index 000000000..854c9ddb2
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrvideowindowcontrol.cpp
@@ -0,0 +1,228 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "evrvideowindowcontrol_p.h"
+
+QT_BEGIN_NAMESPACE
+
+EvrVideoWindowControl::EvrVideoWindowControl(QVideoSink *parent)
+ : QPlatformVideoSink(parent)
+ , m_windowId(0)
+ , m_windowColor(RGB(0, 0, 0))
+ , m_dirtyValues(0)
+ , m_aspectRatioMode(Qt::KeepAspectRatio)
+ , m_brightness(0)
+ , m_contrast(0)
+ , m_hue(0)
+ , m_saturation(0)
+ , m_fullScreen(false)
+ , m_displayControl(0)
+ , m_processor(0)
+{
+}
+
+EvrVideoWindowControl::~EvrVideoWindowControl()
+{
+ clear();
+}
+
+bool EvrVideoWindowControl::setEvr(IUnknown *evr)
+{
+ clear();
+
+ if (!evr)
+ return true;
+
+ IMFGetService *service = NULL;
+
+ if (SUCCEEDED(evr->QueryInterface(IID_PPV_ARGS(&service)))
+ && SUCCEEDED(service->GetService(MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_displayControl)))) {
+
+ service->GetService(MR_VIDEO_MIXER_SERVICE, IID_PPV_ARGS(&m_processor));
+
+ setWinId(m_windowId);
+ setDisplayRect(m_displayRect);
+ setAspectRatioMode(m_aspectRatioMode);
+ m_dirtyValues = DXVA2_ProcAmp_Brightness | DXVA2_ProcAmp_Contrast | DXVA2_ProcAmp_Hue | DXVA2_ProcAmp_Saturation;
+ applyImageControls();
+ }
+
+ if (service)
+ service->Release();
+
+ return m_displayControl != NULL;
+}
+
+void EvrVideoWindowControl::clear()
+{
+ if (m_displayControl)
+ m_displayControl->Release();
+ m_displayControl = NULL;
+
+ if (m_processor)
+ m_processor->Release();
+ m_processor = NULL;
+}
+
+void EvrVideoWindowControl::setWinId(WId id)
+{
+ m_windowId = id;
+
+ if (m_displayControl)
+ m_displayControl->SetVideoWindow(HWND(m_windowId));
+}
+
+void EvrVideoWindowControl::setDisplayRect(const QRect &rect)
+{
+ m_displayRect = rect;
+
+ if (m_displayControl) {
+ RECT displayRect = { rect.left(), rect.top(), rect.right() + 1, rect.bottom() + 1 };
+ QSize sourceSize = nativeSize();
+
+ RECT sourceRect = { 0, 0, sourceSize.width(), sourceSize.height() };
+
+ if (m_aspectRatioMode == Qt::KeepAspectRatioByExpanding) {
+ QSize clippedSize = rect.size();
+ clippedSize.scale(sourceRect.right, sourceRect.bottom, Qt::KeepAspectRatio);
+
+ sourceRect.left = (sourceRect.right - clippedSize.width()) / 2;
+ sourceRect.top = (sourceRect.bottom - clippedSize.height()) / 2;
+ sourceRect.right = sourceRect.left + clippedSize.width();
+ sourceRect.bottom = sourceRect.top + clippedSize.height();
+ }
+
+ if (sourceSize.width() > 0 && sourceSize.height() > 0) {
+ MFVideoNormalizedRect sourceNormRect;
+ sourceNormRect.left = float(sourceRect.left) / float(sourceRect.right);
+ sourceNormRect.top = float(sourceRect.top) / float(sourceRect.bottom);
+ sourceNormRect.right = float(sourceRect.right) / float(sourceRect.right);
+ sourceNormRect.bottom = float(sourceRect.bottom) / float(sourceRect.bottom);
+ m_displayControl->SetVideoPosition(&sourceNormRect, &displayRect);
+ } else {
+ m_displayControl->SetVideoPosition(NULL, &displayRect);
+ }
+ }
+}
+
+void EvrVideoWindowControl::setFullScreen(bool fullScreen)
+{
+ if (m_fullScreen == fullScreen)
+ return;
+}
+
+void EvrVideoWindowControl::setAspectRatioMode(Qt::AspectRatioMode mode)
+{
+ m_aspectRatioMode = mode;
+
+ if (m_displayControl) {
+ switch (mode) {
+ case Qt::IgnoreAspectRatio:
+ //comment from MSDN: Do not maintain the aspect ratio of the video. Stretch the video to fit the output rectangle.
+ m_displayControl->SetAspectRatioMode(MFVideoARMode_None);
+ break;
+ case Qt::KeepAspectRatio:
+ //comment from MSDN: Preserve the aspect ratio of the video by letterboxing or within the output rectangle.
+ m_displayControl->SetAspectRatioMode(MFVideoARMode_PreservePicture);
+ break;
+ case Qt::KeepAspectRatioByExpanding:
+ //for this mode, more adjustment will be done in setDisplayRect
+ m_displayControl->SetAspectRatioMode(MFVideoARMode_PreservePicture);
+ break;
+ default:
+ break;
+ }
+ setDisplayRect(m_displayRect);
+ }
+}
+
+void EvrVideoWindowControl::setBrightness(float brightness)
+{
+ if (m_brightness == brightness)
+ return;
+
+ m_brightness = brightness;
+
+ m_dirtyValues |= DXVA2_ProcAmp_Brightness;
+
+ applyImageControls();
+}
+
+void EvrVideoWindowControl::setContrast(float contrast)
+{
+ if (m_contrast == contrast)
+ return;
+
+ m_contrast = contrast;
+
+ m_dirtyValues |= DXVA2_ProcAmp_Contrast;
+
+ applyImageControls();
+}
+
+void EvrVideoWindowControl::setHue(float hue)
+{
+ if (m_hue == hue)
+ return;
+
+ m_hue = hue;
+
+ m_dirtyValues |= DXVA2_ProcAmp_Hue;
+
+ applyImageControls();
+}
+
+void EvrVideoWindowControl::setSaturation(float saturation)
+{
+ if (m_saturation == saturation)
+ return;
+
+ m_saturation = saturation;
+
+ m_dirtyValues |= DXVA2_ProcAmp_Saturation;
+
+ applyImageControls();
+}
+
+void EvrVideoWindowControl::applyImageControls()
+{
+ if (m_processor) {
+ DXVA2_ProcAmpValues values;
+ if (m_dirtyValues & DXVA2_ProcAmp_Brightness) {
+ values.Brightness = scaleProcAmpValue(DXVA2_ProcAmp_Brightness, m_brightness);
+ }
+ if (m_dirtyValues & DXVA2_ProcAmp_Contrast) {
+ values.Contrast = scaleProcAmpValue(DXVA2_ProcAmp_Contrast, m_contrast);
+ }
+ if (m_dirtyValues & DXVA2_ProcAmp_Hue) {
+ values.Hue = scaleProcAmpValue(DXVA2_ProcAmp_Hue, m_hue);
+ }
+ if (m_dirtyValues & DXVA2_ProcAmp_Saturation) {
+ values.Saturation = scaleProcAmpValue(DXVA2_ProcAmp_Saturation, m_saturation);
+ }
+
+ if (SUCCEEDED(m_processor->SetProcAmpValues(m_dirtyValues, &values))) {
+ m_dirtyValues = 0;
+ }
+ }
+}
+
+DXVA2_Fixed32 EvrVideoWindowControl::scaleProcAmpValue(DWORD prop, float value) const
+{
+ float scaledValue = 0.0;
+
+ DXVA2_ValueRange range;
+ if (SUCCEEDED(m_processor->GetProcAmpRange(prop, &range))) {
+ scaledValue = DXVA2FixedToFloat(range.DefaultValue);
+ if (value > 0)
+ scaledValue += float(value) * (DXVA2FixedToFloat(range.MaxValue) - DXVA2FixedToFloat(range.DefaultValue));
+ else if (value < 0)
+ scaledValue -= float(value) * (DXVA2FixedToFloat(range.MinValue) - DXVA2FixedToFloat(range.DefaultValue));
+ }
+
+ return DXVA2FloatToFixed(scaledValue);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_evrvideowindowcontrol_p.cpp"
diff --git a/src/plugins/multimedia/windows/evr/evrvideowindowcontrol_p.h b/src/plugins/multimedia/windows/evr/evrvideowindowcontrol_p.h
new file mode 100644
index 000000000..c4875d28d
--- /dev/null
+++ b/src/plugins/multimedia/windows/evr/evrvideowindowcontrol_p.h
@@ -0,0 +1,72 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef EVRVIDEOWINDOWCONTROL_H
+#define EVRVIDEOWINDOWCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <d3d9.h>
+#include <dxva2api.h>
+#include <evr9.h>
+#include <evr.h>
+#include <private/qplatformvideosink_p.h>
+#include <private/qwindowsmfdefs_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class EvrVideoWindowControl : public QPlatformVideoSink
+{
+ Q_OBJECT
+public:
+ EvrVideoWindowControl(QVideoSink *parent = 0);
+ ~EvrVideoWindowControl() override;
+
+ bool setEvr(IUnknown *evr);
+
+ void setWinId(WId id) override;
+
+ void setDisplayRect(const QRect &rect) override;
+
+ void setFullScreen(bool fullScreen) override;
+
+ void setAspectRatioMode(Qt::AspectRatioMode mode) override;
+
+ void setBrightness(float brightness) override;
+ void setContrast(float contrast) override;
+ void setHue(float hue) override;
+ void setSaturation(float saturation) override;
+
+ void applyImageControls();
+
+private:
+ void clear();
+ DXVA2_Fixed32 scaleProcAmpValue(DWORD prop, float value) const;
+
+ WId m_windowId;
+ COLORREF m_windowColor;
+ DWORD m_dirtyValues;
+ Qt::AspectRatioMode m_aspectRatioMode;
+ QRect m_displayRect;
+ float m_brightness;
+ float m_contrast;
+ float m_hue;
+ float m_saturation;
+ bool m_fullScreen;
+
+ IMFVideoDisplayControl *m_displayControl;
+ IMFVideoProcessor *m_processor;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowscamera.cpp b/src/plugins/multimedia/windows/mediacapture/qwindowscamera.cpp
new file mode 100644
index 000000000..d5e25e1c5
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowscamera.cpp
@@ -0,0 +1,101 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowscamera_p.h"
+
+#include "qwindowsmediadevicesession_p.h"
+#include "qwindowsmediacapture_p.h"
+#include <qcameradevice.h>
+
+QT_BEGIN_NAMESPACE
+
+QWindowsCamera::QWindowsCamera(QCamera *camera)
+ : QPlatformCamera(camera)
+{
+}
+
+QWindowsCamera::~QWindowsCamera() = default;
+
+bool QWindowsCamera::isActive() const
+{
+ return m_active;
+}
+
+void QWindowsCamera::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+ if (m_cameraDevice.isNull() && active)
+ return;
+ m_active = active;
+ if (m_mediaDeviceSession)
+ m_mediaDeviceSession->setActive(active);
+
+ emit activeChanged(m_active);
+}
+
+void QWindowsCamera::setCamera(const QCameraDevice &camera)
+{
+ if (m_cameraDevice == camera)
+ return;
+ m_cameraDevice = camera;
+ if (m_mediaDeviceSession)
+ m_mediaDeviceSession->setActiveCamera(camera);
+}
+
+void QWindowsCamera::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ QWindowsMediaCaptureService *captureService = static_cast<QWindowsMediaCaptureService *>(session);
+ if (m_captureService == captureService)
+ return;
+
+ if (m_mediaDeviceSession) {
+ m_mediaDeviceSession->disconnect(this);
+ m_mediaDeviceSession->setActive(false);
+ m_mediaDeviceSession->setCameraFormat({});
+ m_mediaDeviceSession->setActiveCamera({});
+ }
+
+ m_captureService = captureService;
+ if (!m_captureService) {
+ m_mediaDeviceSession = nullptr;
+ return;
+ }
+
+ m_mediaDeviceSession = m_captureService->session();
+ Q_ASSERT(m_mediaDeviceSession);
+
+ m_mediaDeviceSession->setActive(false);
+ m_mediaDeviceSession->setActiveCamera(m_cameraDevice);
+ m_mediaDeviceSession->setCameraFormat(m_cameraFormat);
+ m_mediaDeviceSession->setActive(m_active);
+
+ connect(m_mediaDeviceSession, &QWindowsMediaDeviceSession::activeChanged,
+ this, &QWindowsCamera::onActiveChanged);
+}
+
+bool QWindowsCamera::setCameraFormat(const QCameraFormat &format)
+{
+ if (!format.isNull() && !m_cameraDevice.videoFormats().contains(format))
+ return false;
+
+ m_cameraFormat = format.isNull() ? findBestCameraFormat(m_cameraDevice) : format;
+
+ if (m_mediaDeviceSession)
+ m_mediaDeviceSession->setCameraFormat(m_cameraFormat);
+ return true;
+}
+
+void QWindowsCamera::onActiveChanged(bool active)
+{
+ if (m_active == active)
+ return;
+ if (m_cameraDevice.isNull() && active)
+ return;
+ m_active = active;
+ emit activeChanged(m_active);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qwindowscamera_p.cpp"
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowscamera_p.h b/src/plugins/multimedia/windows/mediacapture/qwindowscamera_p.h
new file mode 100644
index 000000000..2aec11165
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowscamera_p.h
@@ -0,0 +1,55 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWINDOWSCAMERA_H
+#define QWINDOWSCAMERA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformcamera_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QWindowsMediaCaptureService;
+class QWindowsMediaDeviceSession;
+
+class QWindowsCamera : public QPlatformCamera
+{
+ Q_OBJECT
+public:
+ explicit QWindowsCamera(QCamera *camera);
+ virtual ~QWindowsCamera();
+
+ bool isActive() const override;
+
+ void setCamera(const QCameraDevice &camera) override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *) override;
+
+ bool setCameraFormat(const QCameraFormat &format) override;
+
+ void setActive(bool active) override;
+
+private Q_SLOTS:
+ void onActiveChanged(bool active);
+
+private:
+ QWindowsMediaCaptureService *m_captureService = nullptr;
+ QWindowsMediaDeviceSession *m_mediaDeviceSession = nullptr;
+ QCameraDevice m_cameraDevice;
+ QCameraFormat m_cameraFormat;
+ bool m_active = false;
+};
+
+QT_END_NAMESPACE
+
+#endif // QWINDOWSCAMERA_H
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsimagecapture.cpp b/src/plugins/multimedia/windows/mediacapture/qwindowsimagecapture.cpp
new file mode 100644
index 000000000..ea66d561a
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsimagecapture.cpp
@@ -0,0 +1,207 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowsimagecapture_p.h"
+
+#include "qwindowsmediadevicesession_p.h"
+#include "qwindowsmediacapture_p.h"
+#include <private/qmediastoragelocation_p.h>
+
+#include <QtConcurrent/qtconcurrentrun.h>
+#include <QtGui/qimagewriter.h>
+
+QT_BEGIN_NAMESPACE
+
+QWindowsImageCapture::QWindowsImageCapture(QImageCapture *parent)
+ : QPlatformImageCapture(parent)
+{
+}
+
+QWindowsImageCapture::~QWindowsImageCapture() = default;
+
+bool QWindowsImageCapture::isReadyForCapture() const
+{
+ if (!m_mediaDeviceSession)
+ return false;
+ return !m_capturing && m_mediaDeviceSession->isActive() && !m_mediaDeviceSession->activeCamera().isNull();
+}
+
+int QWindowsImageCapture::capture(const QString &fileName)
+{
+ auto ext = writerFormat(m_settings.format());
+ auto path = QMediaStorageLocation::generateFileName(fileName, QStandardPaths::PicturesLocation, ext);
+ return doCapture(path);
+}
+
+int QWindowsImageCapture::captureToBuffer()
+{
+ return doCapture(QString());
+}
+
+int QWindowsImageCapture::doCapture(const QString &fileName)
+{
+ if (!isReadyForCapture())
+ return -1;
+ m_fileName = fileName;
+ m_capturing = true;
+ return m_captureId;
+}
+
+QImageEncoderSettings QWindowsImageCapture::imageSettings() const
+{
+ return m_settings;
+}
+
+void QWindowsImageCapture::setImageSettings(const QImageEncoderSettings &settings)
+{
+ m_settings = settings;
+}
+
+void QWindowsImageCapture::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ QWindowsMediaCaptureService *captureService = static_cast<QWindowsMediaCaptureService *>(session);
+ if (m_captureService == captureService)
+ return;
+
+ auto readyForCapture = isReadyForCapture();
+ if (m_mediaDeviceSession)
+ disconnect(m_mediaDeviceSession, nullptr, this, nullptr);
+
+ m_captureService = captureService;
+ if (!m_captureService) {
+ if (readyForCapture)
+ emit readyForCaptureChanged(false);
+ m_mediaDeviceSession = nullptr;
+ return;
+ }
+
+ m_mediaDeviceSession = m_captureService->session();
+ Q_ASSERT(m_mediaDeviceSession);
+
+ if (isReadyForCapture() != readyForCapture)
+ emit readyForCaptureChanged(isReadyForCapture());
+
+ connect(m_mediaDeviceSession, &QWindowsMediaDeviceSession::readyForCaptureChanged,
+ this, &QWindowsImageCapture::readyForCaptureChanged);
+
+ connect(m_mediaDeviceSession, &QWindowsMediaDeviceSession::videoFrameChanged,
+ this, &QWindowsImageCapture::handleVideoFrameChanged);
+}
+
+void QWindowsImageCapture::handleVideoFrameChanged(const QVideoFrame &frame)
+{
+ if (m_capturing) {
+
+ QImage image = frame.toImage();
+
+ QSize size = m_settings.resolution();
+ if (size.isValid() && image.size() != size) {
+ image = image.scaled(size, Qt::KeepAspectRatioByExpanding);
+ if (image.size() != size) {
+ int xoff = (image.size().width() - size.width()) / 2;
+ int yoff = (image.size().height() - size.height()) / 2;
+ image = image.copy(xoff, yoff, size.width(), size.height());
+ }
+ }
+
+ emit imageExposed(m_captureId);
+ emit imageAvailable(m_captureId, frame);
+ emit imageCaptured(m_captureId, image);
+
+ QMediaMetaData metaData = this->metaData();
+ metaData.insert(QMediaMetaData::Date, QDateTime::currentDateTime());
+ metaData.insert(QMediaMetaData::Resolution, size);
+
+ emit imageMetadataAvailable(m_captureId, metaData);
+
+ if (!m_fileName.isEmpty()) {
+
+ (void)QtConcurrent::run(&QWindowsImageCapture::saveImage, this,
+ m_captureId, m_fileName, image, metaData, m_settings);
+ }
+
+ ++m_captureId;
+ m_capturing = false;
+ }
+}
+
+void QWindowsImageCapture::saveImage(int captureId, const QString &fileName,
+ const QImage &image, const QMediaMetaData &metaData,
+ const QImageEncoderSettings &settings)
+{
+ QImageWriter imageWriter;
+ imageWriter.setFileName(fileName);
+
+ QString format = writerFormat(settings.format());
+ imageWriter.setFormat(format.toUtf8());
+
+ int quality = writerQuality(format, settings.quality());
+ if (quality > -1)
+ imageWriter.setQuality(quality);
+
+ for (auto key : metaData.keys())
+ imageWriter.setText(QMediaMetaData::metaDataKeyToString(key),
+ metaData.stringValue(key));
+
+ imageWriter.write(image);
+
+ QMetaObject::invokeMethod(this, "imageSaved", Qt::QueuedConnection,
+ Q_ARG(int, captureId), Q_ARG(QString, fileName));
+}
+
+QString QWindowsImageCapture::writerFormat(QImageCapture::FileFormat reqFormat)
+{
+ QString format;
+
+ switch (reqFormat) {
+ case QImageCapture::FileFormat::JPEG:
+ format = QLatin1String("jpg");
+ break;
+ case QImageCapture::FileFormat::PNG:
+ format = QLatin1String("png");
+ break;
+ case QImageCapture::FileFormat::WebP:
+ format = QLatin1String("webp");
+ break;
+ case QImageCapture::FileFormat::Tiff:
+ format = QLatin1String("tiff");
+ break;
+ default:
+ format = QLatin1String("jpg");
+ }
+
+ auto supported = QImageWriter::supportedImageFormats();
+ for (const auto &f : supported)
+ if (format.compare(QString::fromUtf8(f), Qt::CaseInsensitive) == 0)
+ return format;
+
+ return QLatin1String("jpg");
+}
+
+int QWindowsImageCapture::writerQuality(const QString &writerFormat,
+ QImageCapture::Quality quality)
+{
+ if (writerFormat.compare(QLatin1String("jpg"), Qt::CaseInsensitive) == 0 ||
+ writerFormat.compare(QLatin1String("jpeg"), Qt::CaseInsensitive) == 0) {
+
+ switch (quality) {
+ case QImageCapture::Quality::VeryLowQuality:
+ return 10;
+ case QImageCapture::Quality::LowQuality:
+ return 30;
+ case QImageCapture::Quality::NormalQuality:
+ return 75;
+ case QImageCapture::Quality::HighQuality:
+ return 90;
+ case QImageCapture::Quality::VeryHighQuality:
+ return 98;
+ default:
+ return 75;
+ }
+ }
+ return -1;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qwindowsimagecapture_p.cpp"
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsimagecapture_p.h b/src/plugins/multimedia/windows/mediacapture/qwindowsimagecapture_p.h
new file mode 100644
index 000000000..746732e73
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsimagecapture_p.h
@@ -0,0 +1,64 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWINDOWSIMAGECAPTURE_H
+#define QWINDOWSIMAGECAPTURE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformimagecapture_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QWindowsMediaDeviceSession;
+class QWindowsMediaCaptureService;
+
+class QWindowsImageCapture : public QPlatformImageCapture
+{
+ Q_OBJECT
+public:
+ explicit QWindowsImageCapture(QImageCapture *parent);
+ virtual ~QWindowsImageCapture();
+
+ bool isReadyForCapture() const override;
+
+ int capture(const QString &fileName) override;
+ int captureToBuffer() override;
+
+ QImageEncoderSettings imageSettings() const override;
+ void setImageSettings(const QImageEncoderSettings &settings) override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+private Q_SLOTS:
+ void handleVideoFrameChanged(const QVideoFrame &frame);
+
+private:
+ int doCapture(const QString &fileName);
+ void saveImage(int captureId, const QString &fileName,
+ const QImage &image, const QMediaMetaData &metaData,
+ const QImageEncoderSettings &settings);
+ QString writerFormat(QImageCapture::FileFormat reqFormat);
+ int writerQuality(const QString &writerFormat,
+ QImageCapture::Quality quality);
+
+ QWindowsMediaCaptureService *m_captureService = nullptr;
+ QWindowsMediaDeviceSession *m_mediaDeviceSession = nullptr;
+ QImageEncoderSettings m_settings;
+ int m_captureId = 0;
+ bool m_capturing = false;
+ QString m_fileName;
+};
+
+QT_END_NAMESPACE
+
+#endif // QWINDOWSIMAGECAPTURE_H
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediacapture.cpp b/src/plugins/multimedia/windows/mediacapture/qwindowsmediacapture.cpp
new file mode 100644
index 000000000..d349b2c43
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediacapture.cpp
@@ -0,0 +1,109 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowsmediacapture_p.h"
+
+#include "qwindowsmediaencoder_p.h"
+#include "qwindowscamera_p.h"
+#include "qwindowsmediadevicesession_p.h"
+#include "qwindowsimagecapture_p.h"
+#include "qmediadevices.h"
+#include "qaudiodevice.h"
+#include "private/qplatformaudioinput_p.h"
+#include "private/qplatformaudiooutput_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QWindowsMediaCaptureService::QWindowsMediaCaptureService()
+{
+ m_mediaDeviceSession = new QWindowsMediaDeviceSession(this);
+}
+
+QWindowsMediaCaptureService::~QWindowsMediaCaptureService()
+{
+ delete m_mediaDeviceSession;
+}
+
+QPlatformCamera *QWindowsMediaCaptureService::camera()
+{
+ return m_camera;
+}
+
+void QWindowsMediaCaptureService::setCamera(QPlatformCamera *camera)
+{
+ QWindowsCamera *control = static_cast<QWindowsCamera*>(camera);
+ if (m_camera == control)
+ return;
+
+ if (m_camera)
+ m_camera->setCaptureSession(nullptr);
+
+ m_camera = control;
+ if (m_camera)
+ m_camera->setCaptureSession(this);
+ emit cameraChanged();
+}
+
+QPlatformImageCapture *QWindowsMediaCaptureService::imageCapture()
+{
+ return m_imageCapture;
+}
+
+void QWindowsMediaCaptureService::setImageCapture(QPlatformImageCapture *imageCapture)
+{
+ QWindowsImageCapture *control = static_cast<QWindowsImageCapture *>(imageCapture);
+ if (m_imageCapture == control)
+ return;
+
+ if (m_imageCapture)
+ m_imageCapture->setCaptureSession(nullptr);
+
+ m_imageCapture = control;
+ if (m_imageCapture)
+ m_imageCapture->setCaptureSession(this);
+ emit imageCaptureChanged();
+}
+
+QPlatformMediaRecorder *QWindowsMediaCaptureService::mediaRecorder()
+{
+ return m_encoder;
+}
+
+void QWindowsMediaCaptureService::setMediaRecorder(QPlatformMediaRecorder *recorder)
+{
+ QWindowsMediaEncoder *control = static_cast<QWindowsMediaEncoder *>(recorder);
+ if (m_encoder == control)
+ return;
+
+ if (m_encoder)
+ m_encoder->setCaptureSession(nullptr);
+
+ m_encoder = control;
+ if (m_encoder)
+ m_encoder->setCaptureSession(this);
+ emit encoderChanged();
+}
+
+void QWindowsMediaCaptureService::setAudioInput(QPlatformAudioInput *input)
+{
+ m_mediaDeviceSession->setAudioInput(input ? input->q : nullptr);
+}
+
+void QWindowsMediaCaptureService::setAudioOutput(QPlatformAudioOutput *output)
+{
+ m_mediaDeviceSession->setAudioOutput(output ? output->q : nullptr);
+}
+
+void QWindowsMediaCaptureService::setVideoPreview(QVideoSink *sink)
+{
+ m_mediaDeviceSession->setVideoSink(sink);
+}
+
+QWindowsMediaDeviceSession *QWindowsMediaCaptureService::session() const
+{
+ return m_mediaDeviceSession;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qwindowsmediacapture_p.cpp"
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediacapture_p.h b/src/plugins/multimedia/windows/mediacapture/qwindowsmediacapture_p.h
new file mode 100644
index 000000000..579310afd
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediacapture_p.h
@@ -0,0 +1,62 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QWINDOWSMEDIACAPTURE_H
+#define QWINDOWSMEDIACAPTURE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediacapture_p.h>
+#include <private/qplatformmediaintegration_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QWindowsMediaEncoder;
+class QWindowsCamera;
+class QWindowsMediaDeviceSession;
+class QWindowsImageCapture;
+class QPlatformAudioInput;
+
+class QWindowsMediaCaptureService : public QPlatformMediaCaptureSession
+{
+ Q_OBJECT
+
+public:
+ QWindowsMediaCaptureService();
+ virtual ~QWindowsMediaCaptureService();
+
+ QPlatformCamera *camera() override;
+ void setCamera(QPlatformCamera *camera) override;
+
+ QPlatformImageCapture *imageCapture() override;
+ void setImageCapture(QPlatformImageCapture *imageCapture) override;
+
+ QPlatformMediaRecorder *mediaRecorder() override;
+ void setMediaRecorder(QPlatformMediaRecorder *recorder) override;
+
+ void setAudioInput(QPlatformAudioInput *) override;
+
+ void setAudioOutput(QPlatformAudioOutput *output) override;
+
+ void setVideoPreview(QVideoSink *sink) override;
+
+ QWindowsMediaDeviceSession *session() const;
+
+private:
+ QWindowsCamera *m_camera = nullptr;
+ QWindowsMediaDeviceSession *m_mediaDeviceSession = nullptr;
+ QWindowsImageCapture *m_imageCapture = nullptr;
+ QWindowsMediaEncoder *m_encoder = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QWINDOWSMEDIAINTERFACE_H
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp
new file mode 100644
index 000000000..e99b95ad2
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader.cpp
@@ -0,0 +1,1019 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowsmediadevicereader_p.h"
+
+#include "private/qwindowsmultimediautils_p.h"
+#include <qvideosink.h>
+#include <qmediadevices.h>
+#include <qaudiodevice.h>
+#include <private/qmemoryvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
+#include <private/qwindowsmfdefs_p.h>
+#include <private/qcomptr_p.h>
+#include <QtCore/qdebug.h>
+
+#include <mmdeviceapi.h>
+
+QT_BEGIN_NAMESPACE
+
+enum { MEDIA_TYPE_INDEX_DEFAULT = 0xffffffff };
+
+QWindowsMediaDeviceReader::QWindowsMediaDeviceReader(QObject *parent)
+ : QObject(parent)
+{
+ m_durationTimer.setInterval(100);
+ connect(&m_durationTimer, &QTimer::timeout, this, &QWindowsMediaDeviceReader::updateDuration);
+}
+
+QWindowsMediaDeviceReader::~QWindowsMediaDeviceReader()
+{
+ stopRecording();
+ deactivate();
+}
+
+// Creates a video or audio media source specified by deviceId (symbolic link)
+HRESULT QWindowsMediaDeviceReader::createSource(const QString &deviceId, bool video, IMFMediaSource **source)
+{
+ if (!source)
+ return E_INVALIDARG;
+
+ *source = nullptr;
+ IMFAttributes *sourceAttributes = nullptr;
+
+ HRESULT hr = MFCreateAttributes(&sourceAttributes, 2);
+ if (SUCCEEDED(hr)) {
+
+ hr = sourceAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ video ? QMM_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
+ : QMM_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_GUID);
+ if (SUCCEEDED(hr)) {
+
+ hr = sourceAttributes->SetString(video ? MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK
+ : MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_ENDPOINT_ID,
+ reinterpret_cast<LPCWSTR>(deviceId.utf16()));
+ if (SUCCEEDED(hr)) {
+
+ hr = MFCreateDeviceSource(sourceAttributes, source);
+ }
+ }
+ sourceAttributes->Release();
+ }
+
+ return hr;
+}
+
+// Creates a source/reader aggregating two other sources (video/audio).
+// If one of the sources is null the result will be video-only or audio-only.
+HRESULT QWindowsMediaDeviceReader::createAggregateReader(IMFMediaSource *firstSource,
+ IMFMediaSource *secondSource,
+ IMFMediaSource **aggregateSource,
+ IMFSourceReader **sourceReader)
+{
+ if ((!firstSource && !secondSource) || !aggregateSource || !sourceReader)
+ return E_INVALIDARG;
+
+ *aggregateSource = nullptr;
+ *sourceReader = nullptr;
+
+ IMFCollection *sourceCollection = nullptr;
+
+ HRESULT hr = MFCreateCollection(&sourceCollection);
+ if (SUCCEEDED(hr)) {
+
+ if (firstSource)
+ sourceCollection->AddElement(firstSource);
+
+ if (secondSource)
+ sourceCollection->AddElement(secondSource);
+
+ hr = MFCreateAggregateSource(sourceCollection, aggregateSource);
+ if (SUCCEEDED(hr)) {
+
+ IMFAttributes *readerAttributes = nullptr;
+
+ hr = MFCreateAttributes(&readerAttributes, 1);
+ if (SUCCEEDED(hr)) {
+
+ // Set callback so OnReadSample() is called for each new video frame or audio sample.
+ hr = readerAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK,
+ static_cast<IMFSourceReaderCallback*>(this));
+ if (SUCCEEDED(hr)) {
+
+ hr = MFCreateSourceReaderFromMediaSource(*aggregateSource, readerAttributes, sourceReader);
+ }
+ readerAttributes->Release();
+ }
+ }
+ sourceCollection->Release();
+ }
+ return hr;
+}
+
+// Selects the requested resolution/frame rate (if specified),
+// or chooses a high quality configuration otherwise.
+DWORD QWindowsMediaDeviceReader::findMediaTypeIndex(const QCameraFormat &reqFormat)
+{
+ DWORD mediaIndex = MEDIA_TYPE_INDEX_DEFAULT;
+
+ if (m_sourceReader && m_videoSource) {
+
+ DWORD index = 0;
+ IMFMediaType *mediaType = nullptr;
+
+ UINT32 currArea = 0;
+ float currFrameRate = 0.0f;
+
+ while (SUCCEEDED(m_sourceReader->GetNativeMediaType(DWORD(MF_SOURCE_READER_FIRST_VIDEO_STREAM),
+ index, &mediaType))) {
+
+ GUID subtype = GUID_NULL;
+ if (SUCCEEDED(mediaType->GetGUID(MF_MT_SUBTYPE, &subtype))) {
+
+ auto pixelFormat = QWindowsMultimediaUtils::pixelFormatFromMediaSubtype(subtype);
+ if (pixelFormat != QVideoFrameFormat::Format_Invalid) {
+
+ UINT32 width, height;
+ if (SUCCEEDED(MFGetAttributeSize(mediaType, MF_MT_FRAME_SIZE, &width, &height))) {
+
+ UINT32 num, den;
+ if (SUCCEEDED(MFGetAttributeRatio(mediaType, MF_MT_FRAME_RATE, &num, &den))) {
+
+ UINT32 area = width * height;
+ float frameRate = float(num) / den;
+
+ if (!reqFormat.isNull()
+ && UINT32(reqFormat.resolution().width()) == width
+ && UINT32(reqFormat.resolution().height()) == height
+ && qFuzzyCompare(reqFormat.maxFrameRate(), frameRate)
+ && reqFormat.pixelFormat() == pixelFormat) {
+ mediaType->Release();
+ return index;
+ }
+
+ if ((currFrameRate < 29.9 && currFrameRate < frameRate) ||
+ (currFrameRate == frameRate && currArea < area)) {
+ currArea = area;
+ currFrameRate = frameRate;
+ mediaIndex = index;
+ }
+ }
+ }
+ }
+ }
+ mediaType->Release();
+ ++index;
+ }
+ }
+
+ return mediaIndex;
+}
+
+
+// Prepares the source video stream and gets some metadata.
+HRESULT QWindowsMediaDeviceReader::prepareVideoStream(DWORD mediaTypeIndex)
+{
+ if (!m_sourceReader)
+ return E_FAIL;
+
+ if (!m_videoSource)
+ return S_OK; // It may be audio-only
+
+ HRESULT hr;
+
+ if (mediaTypeIndex == MEDIA_TYPE_INDEX_DEFAULT) {
+ hr = m_sourceReader->GetCurrentMediaType(DWORD(MF_SOURCE_READER_FIRST_VIDEO_STREAM),
+ &m_videoMediaType);
+ } else {
+ hr = m_sourceReader->GetNativeMediaType(DWORD(MF_SOURCE_READER_FIRST_VIDEO_STREAM),
+ mediaTypeIndex, &m_videoMediaType);
+ if (SUCCEEDED(hr))
+ hr = m_sourceReader->SetCurrentMediaType(DWORD(MF_SOURCE_READER_FIRST_VIDEO_STREAM),
+ nullptr, m_videoMediaType);
+ }
+
+ if (SUCCEEDED(hr)) {
+
+ GUID subtype = GUID_NULL;
+ hr = m_videoMediaType->GetGUID(MF_MT_SUBTYPE, &subtype);
+ if (SUCCEEDED(hr)) {
+
+ m_pixelFormat = QWindowsMultimediaUtils::pixelFormatFromMediaSubtype(subtype);
+
+ if (m_pixelFormat == QVideoFrameFormat::Format_Invalid) {
+ hr = E_FAIL;
+ } else {
+
+ // get the frame dimensions
+ hr = MFGetAttributeSize(m_videoMediaType, MF_MT_FRAME_SIZE, &m_frameWidth, &m_frameHeight);
+ if (SUCCEEDED(hr)) {
+
+ // and the stride, which we need to convert the frame later
+ hr = MFGetStrideForBitmapInfoHeader(subtype.Data1, m_frameWidth, &m_stride);
+ if (SUCCEEDED(hr)) {
+ m_stride = qAbs(m_stride);
+ UINT32 frameRateNum, frameRateDen;
+ hr = MFGetAttributeRatio(m_videoMediaType, MF_MT_FRAME_RATE, &frameRateNum, &frameRateDen);
+ if (SUCCEEDED(hr)) {
+
+ m_frameRate = qreal(frameRateNum) / frameRateDen;
+
+ hr = m_sourceReader->SetStreamSelection(DWORD(MF_SOURCE_READER_FIRST_VIDEO_STREAM), TRUE);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return hr;
+}
+
+HRESULT QWindowsMediaDeviceReader::initAudioType(IMFMediaType *mediaType, UINT32 channels, UINT32 samplesPerSec, bool flt)
+{
+ if (!mediaType)
+ return E_INVALIDARG;
+
+ HRESULT hr = mediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
+ if (SUCCEEDED(hr)) {
+ hr = mediaType->SetGUID(MF_MT_SUBTYPE, flt ? MFAudioFormat_Float : MFAudioFormat_PCM);
+ if (SUCCEEDED(hr)) {
+ hr = mediaType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, channels);
+ if (SUCCEEDED(hr)) {
+ hr = mediaType->SetUINT32(MF_MT_AUDIO_CHANNEL_MASK,
+ (channels == 1) ? SPEAKER_FRONT_CENTER : (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT ));
+ if (SUCCEEDED(hr)) {
+ hr = mediaType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, samplesPerSec);
+ if (SUCCEEDED(hr)) {
+ UINT32 bitsPerSample = flt ? 32 : 16;
+ UINT32 bytesPerFrame = channels * bitsPerSample/8;
+ hr = mediaType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, bitsPerSample);
+ if (SUCCEEDED(hr)) {
+ hr = mediaType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, bytesPerFrame);
+ if (SUCCEEDED(hr)) {
+ hr = mediaType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, bytesPerFrame * samplesPerSec);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return hr;
+}
+
+// Prepares the source audio stream.
+HRESULT QWindowsMediaDeviceReader::prepareAudioStream()
+{
+ if (!m_sourceReader)
+ return E_FAIL;
+
+ if (!m_audioSource)
+ return S_OK; // It may be video-only
+
+ HRESULT hr = m_sourceReader->GetCurrentMediaType(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM),
+ &m_audioMediaType);
+ if (SUCCEEDED(hr)) {
+ hr = initAudioType(m_audioMediaType, 2, 48000, true);
+ if (SUCCEEDED(hr)) {
+ hr = m_sourceReader->SetCurrentMediaType(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM),
+ nullptr, m_audioMediaType);
+ if (SUCCEEDED(hr)) {
+ hr = m_sourceReader->SetStreamSelection(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), TRUE);
+ }
+ }
+ }
+ return hr;
+}
+
+// Retrieves the indexes for selected video/audio streams.
+HRESULT QWindowsMediaDeviceReader::initSourceIndexes()
+{
+ if (!m_sourceReader)
+ return E_FAIL;
+
+ m_sourceVideoStreamIndex = MF_SOURCE_READER_INVALID_STREAM_INDEX;
+ m_sourceAudioStreamIndex = MF_SOURCE_READER_INVALID_STREAM_INDEX;
+
+ DWORD index = 0;
+ BOOL selected = FALSE;
+
+ while (m_sourceReader->GetStreamSelection(index, &selected) == S_OK) {
+ if (selected) {
+ IMFMediaType *mediaType = nullptr;
+ if (SUCCEEDED(m_sourceReader->GetCurrentMediaType(index, &mediaType))) {
+ GUID majorType = GUID_NULL;
+ if (SUCCEEDED(mediaType->GetGUID(MF_MT_MAJOR_TYPE, &majorType))) {
+ if (majorType == MFMediaType_Video)
+ m_sourceVideoStreamIndex = index;
+ else if (majorType == MFMediaType_Audio)
+ m_sourceAudioStreamIndex = index;
+ }
+ mediaType->Release();
+ }
+ }
+ ++index;
+ }
+ if ((m_videoSource && m_sourceVideoStreamIndex == MF_SOURCE_READER_INVALID_STREAM_INDEX) ||
+ (m_audioSource && m_sourceAudioStreamIndex == MF_SOURCE_READER_INVALID_STREAM_INDEX))
+ return E_FAIL;
+ return S_OK;
+}
+
+bool QWindowsMediaDeviceReader::setAudioOutput(const QString &audioOutputId)
+{
+ QMutexLocker locker(&m_mutex);
+
+ stopMonitoring();
+
+ m_audioOutputId = audioOutputId;
+
+ if (!m_active || m_audioOutputId.isEmpty())
+ return true;
+
+ HRESULT hr = startMonitoring();
+
+ return SUCCEEDED(hr);
+}
+
+HRESULT QWindowsMediaDeviceReader::startMonitoring()
+{
+ if (m_audioOutputId.isEmpty())
+ return E_FAIL;
+
+ IMFAttributes *sinkAttributes = nullptr;
+
+ HRESULT hr = MFCreateAttributes(&sinkAttributes, 1);
+ if (SUCCEEDED(hr)) {
+
+ hr = sinkAttributes->SetString(MF_AUDIO_RENDERER_ATTRIBUTE_ENDPOINT_ID,
+ reinterpret_cast<LPCWSTR>(m_audioOutputId.utf16()));
+ if (SUCCEEDED(hr)) {
+
+ IMFMediaSink *mediaSink = nullptr;
+ hr = MFCreateAudioRenderer(sinkAttributes, &mediaSink);
+ if (SUCCEEDED(hr)) {
+
+ IMFStreamSink *streamSink = nullptr;
+ hr = mediaSink->GetStreamSinkByIndex(0, &streamSink);
+ if (SUCCEEDED(hr)) {
+
+ IMFMediaTypeHandler *typeHandler = nullptr;
+ hr = streamSink->GetMediaTypeHandler(&typeHandler);
+ if (SUCCEEDED(hr)) {
+
+ hr = typeHandler->IsMediaTypeSupported(m_audioMediaType, nullptr);
+ if (SUCCEEDED(hr)) {
+
+ hr = typeHandler->SetCurrentMediaType(m_audioMediaType);
+ if (SUCCEEDED(hr)) {
+
+ IMFAttributes *writerAttributes = nullptr;
+
+ HRESULT hr = MFCreateAttributes(&writerAttributes, 1);
+ if (SUCCEEDED(hr)) {
+
+ hr = writerAttributes->SetUINT32(MF_SINK_WRITER_DISABLE_THROTTLING, TRUE);
+ if (SUCCEEDED(hr)) {
+
+ IMFSinkWriter *sinkWriter = nullptr;
+ hr = MFCreateSinkWriterFromMediaSink(mediaSink, writerAttributes, &sinkWriter);
+ if (SUCCEEDED(hr)) {
+
+ hr = sinkWriter->SetInputMediaType(0, m_audioMediaType, nullptr);
+ if (SUCCEEDED(hr)) {
+
+ IMFSimpleAudioVolume *audioVolume = nullptr;
+
+ if (SUCCEEDED(MFGetService(mediaSink, QMM_MR_POLICY_VOLUME_SERVICE, IID_PPV_ARGS(&audioVolume)))) {
+ audioVolume->SetMasterVolume(float(m_outputVolume));
+ audioVolume->SetMute(m_outputMuted);
+ audioVolume->Release();
+ }
+
+ hr = sinkWriter->BeginWriting();
+ if (SUCCEEDED(hr)) {
+ m_monitorSink = mediaSink;
+ m_monitorSink->AddRef();
+ m_monitorWriter = sinkWriter;
+ m_monitorWriter->AddRef();
+ }
+ }
+ sinkWriter->Release();
+ }
+ }
+ writerAttributes->Release();
+ }
+ }
+ }
+ typeHandler->Release();
+ }
+ streamSink->Release();
+ }
+ mediaSink->Release();
+ }
+ }
+ sinkAttributes->Release();
+ }
+
+ return hr;
+}
+
+void QWindowsMediaDeviceReader::stopMonitoring()
+{
+ if (m_monitorWriter) {
+ m_monitorWriter->Release();
+ m_monitorWriter = nullptr;
+ }
+ if (m_monitorSink) {
+ m_monitorSink->Shutdown();
+ m_monitorSink->Release();
+ m_monitorSink = nullptr;
+ }
+}
+
+// Activates the requested camera/microphone for streaming.
+// One of the IDs may be empty for video-only/audio-only.
+bool QWindowsMediaDeviceReader::activate(const QString &cameraId,
+ const QCameraFormat &cameraFormat,
+ const QString &microphoneId)
+{
+ QMutexLocker locker(&m_mutex);
+
+ if (cameraId.isEmpty() && microphoneId.isEmpty())
+ return false;
+
+ stopMonitoring();
+ releaseResources();
+
+ m_active = false;
+ m_streaming = false;
+
+ if (!cameraId.isEmpty()) {
+ if (!SUCCEEDED(createSource(cameraId, true, &m_videoSource))) {
+ releaseResources();
+ return false;
+ }
+ }
+
+ if (!microphoneId.isEmpty()) {
+ if (!SUCCEEDED(createSource(microphoneId, false, &m_audioSource))) {
+ releaseResources();
+ return false;
+ }
+ }
+
+ if (!SUCCEEDED(createAggregateReader(m_videoSource, m_audioSource, &m_aggregateSource, &m_sourceReader))) {
+ releaseResources();
+ return false;
+ }
+
+ DWORD mediaTypeIndex = findMediaTypeIndex(cameraFormat);
+
+ if (!SUCCEEDED(prepareVideoStream(mediaTypeIndex))) {
+ releaseResources();
+ return false;
+ }
+
+ if (!SUCCEEDED(prepareAudioStream())) {
+ releaseResources();
+ return false;
+ }
+
+ if (!SUCCEEDED(initSourceIndexes())) {
+ releaseResources();
+ return false;
+ }
+
+ updateSinkInputMediaTypes();
+ startMonitoring();
+
+ // Request the first frame or audio sample.
+ if (!SUCCEEDED(m_sourceReader->ReadSample(MF_SOURCE_READER_ANY_STREAM, 0, nullptr, nullptr, nullptr, nullptr))) {
+ releaseResources();
+ return false;
+ }
+
+ m_active = true;
+ return true;
+}
+
+void QWindowsMediaDeviceReader::deactivate()
+{
+ stopMonitoring();
+ stopStreaming();
+ m_active = false;
+ m_streaming = false;
+}
+
+void QWindowsMediaDeviceReader::stopStreaming()
+{
+ QMutexLocker locker(&m_mutex);
+ releaseResources();
+}
+
+// Releases allocated streaming stuff.
+void QWindowsMediaDeviceReader::releaseResources()
+{
+ if (m_videoMediaType) {
+ m_videoMediaType->Release();
+ m_videoMediaType = nullptr;
+ }
+ if (m_audioMediaType) {
+ m_audioMediaType->Release();
+ m_audioMediaType = nullptr;
+ }
+ if (m_sourceReader) {
+ m_sourceReader->Release();
+ m_sourceReader = nullptr;
+ }
+ if (m_aggregateSource) {
+ m_aggregateSource->Release();
+ m_aggregateSource = nullptr;
+ }
+ if (m_videoSource) {
+ m_videoSource->Release();
+ m_videoSource = nullptr;
+ }
+ if (m_audioSource) {
+ m_audioSource->Release();
+ m_audioSource = nullptr;
+ }
+}
+
+HRESULT QWindowsMediaDeviceReader::createVideoMediaType(const GUID &format, UINT32 bitRate, UINT32 width,
+ UINT32 height, qreal frameRate, IMFMediaType **mediaType)
+{
+ if (!mediaType)
+ return E_INVALIDARG;
+
+ *mediaType = nullptr;
+ IMFMediaType *targetMediaType = nullptr;
+
+ if (SUCCEEDED(MFCreateMediaType(&targetMediaType))) {
+
+ if (SUCCEEDED(targetMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video))) {
+
+ if (SUCCEEDED(targetMediaType->SetGUID(MF_MT_SUBTYPE, format))) {
+
+ if (SUCCEEDED(targetMediaType->SetUINT32(MF_MT_AVG_BITRATE, bitRate))) {
+
+ if (SUCCEEDED(MFSetAttributeSize(targetMediaType, MF_MT_FRAME_SIZE, width, height))) {
+
+ if (SUCCEEDED(MFSetAttributeRatio(targetMediaType, MF_MT_FRAME_RATE,
+ UINT32(frameRate * 1000), 1000))) {
+ UINT32 t1, t2;
+ if (SUCCEEDED(MFGetAttributeRatio(m_videoMediaType, MF_MT_PIXEL_ASPECT_RATIO, &t1, &t2))) {
+
+ if (SUCCEEDED(MFSetAttributeRatio(targetMediaType, MF_MT_PIXEL_ASPECT_RATIO, t1, t2))) {
+
+ if (SUCCEEDED(m_videoMediaType->GetUINT32(MF_MT_INTERLACE_MODE, &t1))) {
+
+ if (SUCCEEDED(targetMediaType->SetUINT32(MF_MT_INTERLACE_MODE, t1))) {
+
+ *mediaType = targetMediaType;
+ return S_OK;
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ targetMediaType->Release();
+ }
+ return E_FAIL;
+}
+
+HRESULT QWindowsMediaDeviceReader::createAudioMediaType(const GUID &format, UINT32 bitRate, IMFMediaType **mediaType)
+{
+ if (!mediaType)
+ return E_INVALIDARG;
+
+ *mediaType = nullptr;
+ IMFMediaType *targetMediaType = nullptr;
+
+ if (SUCCEEDED(MFCreateMediaType(&targetMediaType))) {
+
+ if (SUCCEEDED(targetMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio))) {
+
+ if (SUCCEEDED(targetMediaType->SetGUID(MF_MT_SUBTYPE, format))) {
+
+ if (bitRate == 0 || SUCCEEDED(targetMediaType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, bitRate / 8))) {
+
+ *mediaType = targetMediaType;
+ return S_OK;
+ }
+ }
+ }
+ targetMediaType->Release();
+ }
+ return E_FAIL;
+}
+
+HRESULT QWindowsMediaDeviceReader::updateSinkInputMediaTypes()
+{
+ HRESULT hr = S_OK;
+ if (m_sinkWriter) {
+ if (m_videoSource && m_videoMediaType && m_sinkVideoStreamIndex != MF_SINK_WRITER_INVALID_STREAM_INDEX) {
+ hr = m_sinkWriter->SetInputMediaType(m_sinkVideoStreamIndex, m_videoMediaType, nullptr);
+ }
+ if (SUCCEEDED(hr)) {
+ if (m_audioSource && m_audioMediaType && m_sinkAudioStreamIndex != MF_SINK_WRITER_INVALID_STREAM_INDEX) {
+ hr = m_sinkWriter->SetInputMediaType(m_sinkAudioStreamIndex, m_audioMediaType, nullptr);
+ }
+ }
+ }
+ return hr;
+}
+
+QMediaRecorder::Error QWindowsMediaDeviceReader::startRecording(
+ const QString &fileName, const GUID &container, const GUID &videoFormat, UINT32 videoBitRate,
+ UINT32 width, UINT32 height, qreal frameRate, const GUID &audioFormat, UINT32 audioBitRate)
+{
+ QMutexLocker locker(&m_mutex);
+
+ if (!m_active || m_recording || (videoFormat == GUID_NULL && audioFormat == GUID_NULL))
+ return QMediaRecorder::ResourceError;
+
+ ComPtr<IMFAttributes> writerAttributes;
+
+ HRESULT hr = MFCreateAttributes(writerAttributes.GetAddressOf(), 2);
+ if (FAILED(hr))
+ return QMediaRecorder::ResourceError;
+
+ // Set callback so OnFinalize() is called after video is saved.
+ hr = writerAttributes->SetUnknown(MF_SINK_WRITER_ASYNC_CALLBACK,
+ static_cast<IMFSinkWriterCallback*>(this));
+ if (FAILED(hr))
+ return QMediaRecorder::ResourceError;
+
+ hr = writerAttributes->SetGUID(QMM_MF_TRANSCODE_CONTAINERTYPE, container);
+ if (FAILED(hr))
+ return QMediaRecorder::ResourceError;
+
+ ComPtr<IMFSinkWriter> sinkWriter;
+ hr = MFCreateSinkWriterFromURL(reinterpret_cast<LPCWSTR>(fileName.utf16()),
+ nullptr, writerAttributes.Get(), sinkWriter.GetAddressOf());
+ if (FAILED(hr))
+ return QMediaRecorder::LocationNotWritable;
+
+ m_sinkVideoStreamIndex = MF_SINK_WRITER_INVALID_STREAM_INDEX;
+ m_sinkAudioStreamIndex = MF_SINK_WRITER_INVALID_STREAM_INDEX;
+
+ if (m_videoSource && videoFormat != GUID_NULL) {
+ IMFMediaType *targetMediaType = nullptr;
+
+ hr = createVideoMediaType(videoFormat, videoBitRate, width, height, frameRate, &targetMediaType);
+ if (SUCCEEDED(hr)) {
+
+ hr = sinkWriter->AddStream(targetMediaType, &m_sinkVideoStreamIndex);
+ if (SUCCEEDED(hr)) {
+
+ hr = sinkWriter->SetInputMediaType(m_sinkVideoStreamIndex, m_videoMediaType, nullptr);
+ }
+ targetMediaType->Release();
+ }
+ }
+
+ if (SUCCEEDED(hr)) {
+ if (m_audioSource && audioFormat != GUID_NULL) {
+ IMFMediaType *targetMediaType = nullptr;
+
+ hr = createAudioMediaType(audioFormat, audioBitRate, &targetMediaType);
+ if (SUCCEEDED(hr)) {
+
+ hr = sinkWriter->AddStream(targetMediaType, &m_sinkAudioStreamIndex);
+ if (SUCCEEDED(hr)) {
+
+ hr = sinkWriter->SetInputMediaType(m_sinkAudioStreamIndex, m_audioMediaType, nullptr);
+ }
+ targetMediaType->Release();
+ }
+ }
+ }
+
+ if (FAILED(hr))
+ return QMediaRecorder::FormatError;
+
+ hr = sinkWriter->BeginWriting();
+ if (FAILED(hr))
+ return QMediaRecorder::ResourceError;
+
+ m_sinkWriter = sinkWriter.Detach();
+ m_lastDuration = -1;
+ m_currentDuration = 0;
+ updateDuration();
+ m_durationTimer.start();
+ m_recording = true;
+ m_firstFrame = true;
+ m_paused = false;
+ m_pauseChanging = false;
+
+ return QMediaRecorder::NoError;
+}
+
+void QWindowsMediaDeviceReader::stopRecording()
+{
+ QMutexLocker locker(&m_mutex);
+
+ if (m_sinkWriter && m_recording) {
+
+ HRESULT hr = m_sinkWriter->Finalize();
+
+ if (SUCCEEDED(hr)) {
+ m_hasFinalized.wait(&m_mutex);
+ } else {
+ m_sinkWriter->Release();
+ m_sinkWriter = nullptr;
+
+ QMetaObject::invokeMethod(this, "recordingError",
+ Qt::QueuedConnection, Q_ARG(int, hr));
+ }
+ }
+
+ m_recording = false;
+ m_paused = false;
+ m_pauseChanging = false;
+
+ m_durationTimer.stop();
+ m_lastDuration = -1;
+ m_currentDuration = -1;
+}
+
+bool QWindowsMediaDeviceReader::pauseRecording()
+{
+ if (!m_recording || m_paused)
+ return false;
+ m_pauseTime = m_lastTimestamp;
+ m_paused = true;
+ m_pauseChanging = true;
+ return true;
+}
+
+bool QWindowsMediaDeviceReader::resumeRecording()
+{
+ if (!m_recording || !m_paused)
+ return false;
+ m_paused = false;
+ m_pauseChanging = true;
+ return true;
+}
+
+//from IUnknown
+STDMETHODIMP QWindowsMediaDeviceReader::QueryInterface(REFIID riid, LPVOID *ppvObject)
+{
+ if (!ppvObject)
+ return E_POINTER;
+ if (riid == IID_IMFSourceReaderCallback) {
+ *ppvObject = static_cast<IMFSourceReaderCallback*>(this);
+ } else if (riid == IID_IMFSinkWriterCallback) {
+ *ppvObject = static_cast<IMFSinkWriterCallback*>(this);
+ } else if (riid == IID_IUnknown) {
+ *ppvObject = static_cast<IUnknown*>(static_cast<IMFSourceReaderCallback*>(this));
+ } else {
+ *ppvObject = nullptr;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+}
+
+STDMETHODIMP_(ULONG) QWindowsMediaDeviceReader::AddRef(void)
+{
+ return InterlockedIncrement(&m_cRef);
+}
+
+STDMETHODIMP_(ULONG) QWindowsMediaDeviceReader::Release(void)
+{
+ LONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0) {
+ this->deleteLater();
+ }
+ return cRef;
+}
+
+UINT32 QWindowsMediaDeviceReader::frameWidth() const
+{
+ return m_frameWidth;
+}
+
+UINT32 QWindowsMediaDeviceReader::frameHeight() const
+{
+ return m_frameHeight;
+}
+
+qreal QWindowsMediaDeviceReader::frameRate() const
+{
+ return m_frameRate;
+}
+
+void QWindowsMediaDeviceReader::setInputMuted(bool muted)
+{
+ m_inputMuted = muted;
+}
+
+void QWindowsMediaDeviceReader::setInputVolume(qreal volume)
+{
+ m_inputVolume = qBound(0.0, volume, 1.0);
+}
+
+void QWindowsMediaDeviceReader::setOutputMuted(bool muted)
+{
+ QMutexLocker locker(&m_mutex);
+
+ m_outputMuted = muted;
+
+ if (m_active && m_monitorSink) {
+ IMFSimpleAudioVolume *audioVolume = nullptr;
+ if (SUCCEEDED(MFGetService(m_monitorSink, QMM_MR_POLICY_VOLUME_SERVICE,
+ IID_PPV_ARGS(&audioVolume)))) {
+ audioVolume->SetMute(m_outputMuted);
+ audioVolume->Release();
+ }
+ }
+}
+
+void QWindowsMediaDeviceReader::setOutputVolume(qreal volume)
+{
+ QMutexLocker locker(&m_mutex);
+
+ m_outputVolume = qBound(0.0, volume, 1.0);
+
+ if (m_active && m_monitorSink) {
+ IMFSimpleAudioVolume *audioVolume = nullptr;
+ if (SUCCEEDED(MFGetService(m_monitorSink, QMM_MR_POLICY_VOLUME_SERVICE,
+ IID_PPV_ARGS(&audioVolume)))) {
+ audioVolume->SetMasterVolume(float(m_outputVolume));
+ audioVolume->Release();
+ }
+ }
+}
+
+void QWindowsMediaDeviceReader::updateDuration()
+{
+ if (m_currentDuration >= 0 && m_lastDuration != m_currentDuration) {
+ m_lastDuration = m_currentDuration;
+ emit durationChanged(m_currentDuration);
+ }
+}
+
+//from IMFSourceReaderCallback
+STDMETHODIMP QWindowsMediaDeviceReader::OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex,
+ DWORD dwStreamFlags, LONGLONG llTimestamp,
+ IMFSample *pSample)
+{
+ QMutexLocker locker(&m_mutex);
+
+ if (FAILED(hrStatus)) {
+ emit streamingError(int(hrStatus));
+ return hrStatus;
+ }
+
+ m_lastTimestamp = llTimestamp;
+
+ if ((dwStreamFlags & MF_SOURCE_READERF_ENDOFSTREAM) == MF_SOURCE_READERF_ENDOFSTREAM) {
+ m_streaming = false;
+ emit streamingStopped();
+ } else {
+
+ if (!m_streaming) {
+ m_streaming = true;
+ emit streamingStarted();
+ }
+ if (pSample) {
+
+ if (m_monitorWriter && dwStreamIndex == m_sourceAudioStreamIndex)
+ m_monitorWriter->WriteSample(0, pSample);
+
+ if (m_recording) {
+
+ if (m_firstFrame) {
+ m_timeOffset = llTimestamp;
+ m_firstFrame = false;
+ emit recordingStarted();
+ }
+
+ if (m_pauseChanging) {
+ // Recording time should not pass while paused.
+ if (m_paused)
+ m_pauseTime = llTimestamp;
+ else
+ m_timeOffset += llTimestamp - m_pauseTime;
+ m_pauseChanging = false;
+ }
+
+ // Send the video frame or audio sample to be encoded.
+ if (m_sinkWriter && !m_paused) {
+
+ pSample->SetSampleTime(llTimestamp - m_timeOffset);
+
+ if (dwStreamIndex == m_sourceVideoStreamIndex) {
+
+ m_sinkWriter->WriteSample(m_sinkVideoStreamIndex, pSample);
+
+ } else if (dwStreamIndex == m_sourceAudioStreamIndex) {
+
+ float volume = m_inputMuted ? 0.0f : float(m_inputVolume);
+
+ // Change the volume of the audio sample, if needed.
+ if (volume != 1.0f) {
+ IMFMediaBuffer *mediaBuffer = nullptr;
+ if (SUCCEEDED(pSample->ConvertToContiguousBuffer(&mediaBuffer))) {
+
+ DWORD bufLen = 0;
+ BYTE *buffer = nullptr;
+
+ if (SUCCEEDED(mediaBuffer->Lock(&buffer, nullptr, &bufLen))) {
+
+ float *floatBuffer = reinterpret_cast<float*>(buffer);
+
+ for (DWORD i = 0; i < bufLen/4; ++i)
+ floatBuffer[i] *= volume;
+
+ mediaBuffer->Unlock();
+ }
+ mediaBuffer->Release();
+ }
+ }
+
+ m_sinkWriter->WriteSample(m_sinkAudioStreamIndex, pSample);
+ }
+ m_currentDuration = (llTimestamp - m_timeOffset) / 10000;
+ }
+ }
+
+ // Generate a new QVideoFrame from IMFSample.
+ if (dwStreamIndex == m_sourceVideoStreamIndex) {
+ IMFMediaBuffer *mediaBuffer = nullptr;
+ if (SUCCEEDED(pSample->ConvertToContiguousBuffer(&mediaBuffer))) {
+
+ DWORD bufLen = 0;
+ BYTE *buffer = nullptr;
+
+ if (SUCCEEDED(mediaBuffer->Lock(&buffer, nullptr, &bufLen))) {
+ auto bytes = QByteArray(reinterpret_cast<char*>(buffer), bufLen);
+ QVideoFrameFormat format(QSize(m_frameWidth, m_frameHeight), m_pixelFormat);
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(std::move(bytes), m_stride),
+ std::move(format));
+
+ // WMF uses 100-nanosecond units, Qt uses microseconds
+ frame.setStartTime(llTimestamp * 0.1);
+
+ LONGLONG duration = -1;
+ if (SUCCEEDED(pSample->GetSampleDuration(&duration)))
+ frame.setEndTime((llTimestamp + duration) * 0.1);
+
+ emit videoFrameChanged(frame);
+
+ mediaBuffer->Unlock();
+ }
+ mediaBuffer->Release();
+ }
+ }
+ }
+ // request the next video frame or sound sample
+ if (m_sourceReader)
+ m_sourceReader->ReadSample(MF_SOURCE_READER_ANY_STREAM,
+ 0, nullptr, nullptr, nullptr, nullptr);
+ }
+
+ return S_OK;
+}
+
+STDMETHODIMP QWindowsMediaDeviceReader::OnFlush(DWORD)
+{
+ return S_OK;
+}
+
+STDMETHODIMP QWindowsMediaDeviceReader::OnEvent(DWORD, IMFMediaEvent*)
+{
+ return S_OK;
+}
+
+//from IMFSinkWriterCallback
+STDMETHODIMP QWindowsMediaDeviceReader::OnFinalize(HRESULT)
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_sinkWriter) {
+ m_sinkWriter->Release();
+ m_sinkWriter = nullptr;
+ }
+ emit recordingStopped();
+ m_hasFinalized.notify_one();
+ return S_OK;
+}
+
+STDMETHODIMP QWindowsMediaDeviceReader::OnMarker(DWORD, LPVOID)
+{
+ return S_OK;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qwindowsmediadevicereader_p.cpp"
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader_p.h b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader_p.h
new file mode 100644
index 000000000..4699a463a
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicereader_p.h
@@ -0,0 +1,154 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWINDOWSMEDIADEVICEREADER_H
+#define QWINDOWSMEDIADEVICEREADER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <mfapi.h>
+#include <mfidl.h>
+#include <mferror.h>
+#include <mfreadwrite.h>
+
+#include <QtCore/qobject.h>
+#include <QtCore/qmutex.h>
+#include <QtCore/qwaitcondition.h>
+#include <QtCore/qtimer.h>
+#include <qvideoframe.h>
+#include <qcameradevice.h>
+#include <qmediarecorder.h>
+
+QT_BEGIN_NAMESPACE
+
+class QVideoSink;
+
+class QWindowsMediaDeviceReader : public QObject,
+ public IMFSourceReaderCallback,
+ public IMFSinkWriterCallback
+{
+ Q_OBJECT
+public:
+ explicit QWindowsMediaDeviceReader(QObject *parent = nullptr);
+ ~QWindowsMediaDeviceReader();
+
+ //from IUnknown
+ STDMETHODIMP QueryInterface(REFIID riid, LPVOID *ppvObject) override;
+ STDMETHODIMP_(ULONG) AddRef(void) override;
+ STDMETHODIMP_(ULONG) Release(void) override;
+
+ //from IMFSourceReaderCallback
+ STDMETHODIMP OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex,
+ DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *pSample) override;
+ STDMETHODIMP OnFlush(DWORD dwStreamIndex) override;
+ STDMETHODIMP OnEvent(DWORD dwStreamIndex, IMFMediaEvent *pEvent) override;
+
+ //from IMFSinkWriterCallback
+ STDMETHODIMP OnFinalize(HRESULT hrStatus) override;
+ STDMETHODIMP OnMarker(DWORD dwStreamIndex, LPVOID pvContext) override;
+
+ bool activate(const QString &cameraId,
+ const QCameraFormat &cameraFormat,
+ const QString &microphoneId);
+ void deactivate();
+
+ QMediaRecorder::Error startRecording(const QString &fileName, const GUID &container,
+ const GUID &videoFormat, UINT32 videoBitRate, UINT32 width,
+ UINT32 height, qreal frameRate, const GUID &audioFormat,
+ UINT32 audioBitRate);
+ void stopRecording();
+ bool pauseRecording();
+ bool resumeRecording();
+
+ UINT32 frameWidth() const;
+ UINT32 frameHeight() const;
+ qreal frameRate() const;
+ void setInputMuted(bool muted);
+ void setInputVolume(qreal volume);
+ void setOutputMuted(bool muted);
+ void setOutputVolume(qreal volume);
+ bool setAudioOutput(const QString &audioOutputId);
+
+Q_SIGNALS:
+ void streamingStarted();
+ void streamingStopped();
+ void streamingError(int errorCode);
+ void recordingStarted();
+ void recordingStopped();
+ void recordingError(int errorCode);
+ void durationChanged(qint64 duration);
+ void videoFrameChanged(const QVideoFrame &frame);
+
+private slots:
+ void updateDuration();
+
+private:
+ HRESULT createSource(const QString &deviceId, bool video, IMFMediaSource **source);
+ HRESULT createAggregateReader(IMFMediaSource *firstSource, IMFMediaSource *secondSource,
+ IMFMediaSource **aggregateSource, IMFSourceReader **sourceReader);
+ HRESULT createVideoMediaType(const GUID &format, UINT32 bitRate, UINT32 width, UINT32 height,
+ qreal frameRate, IMFMediaType **mediaType);
+ HRESULT createAudioMediaType(const GUID &format, UINT32 bitRate, IMFMediaType **mediaType);
+ HRESULT initAudioType(IMFMediaType *mediaType, UINT32 channels, UINT32 samplesPerSec, bool flt);
+ HRESULT prepareVideoStream(DWORD mediaTypeIndex);
+ HRESULT prepareAudioStream();
+ HRESULT initSourceIndexes();
+ HRESULT updateSinkInputMediaTypes();
+ HRESULT startMonitoring();
+ void stopMonitoring();
+ void releaseResources();
+ void stopStreaming();
+ DWORD findMediaTypeIndex(const QCameraFormat &reqFormat);
+
+ long m_cRef = 1;
+ QMutex m_mutex;
+ QWaitCondition m_hasFinalized;
+ IMFMediaSource *m_videoSource = nullptr;
+ IMFMediaType *m_videoMediaType = nullptr;
+ IMFMediaSource *m_audioSource = nullptr;
+ IMFMediaType *m_audioMediaType = nullptr;
+ IMFMediaSource *m_aggregateSource = nullptr;
+ IMFSourceReader *m_sourceReader = nullptr;
+ IMFSinkWriter *m_sinkWriter = nullptr;
+ IMFMediaSink *m_monitorSink = nullptr;
+ IMFSinkWriter *m_monitorWriter = nullptr;
+ QString m_audioOutputId;
+ DWORD m_sourceVideoStreamIndex = MF_SOURCE_READER_INVALID_STREAM_INDEX;
+ DWORD m_sourceAudioStreamIndex = MF_SOURCE_READER_INVALID_STREAM_INDEX;
+ DWORD m_sinkVideoStreamIndex = MF_SINK_WRITER_INVALID_STREAM_INDEX;
+ DWORD m_sinkAudioStreamIndex = MF_SINK_WRITER_INVALID_STREAM_INDEX;
+ UINT32 m_frameWidth = 0;
+ UINT32 m_frameHeight = 0;
+ qreal m_frameRate = 0.0;
+ LONG m_stride = 0;
+ bool m_active = false;
+ bool m_streaming = false;
+ bool m_recording = false;
+ bool m_firstFrame = false;
+ bool m_paused = false;
+ bool m_pauseChanging = false;
+ bool m_inputMuted = false;
+ bool m_outputMuted = false;
+ qreal m_inputVolume = 1.0;
+ qreal m_outputVolume = 1.0;
+ QVideoFrameFormat::PixelFormat m_pixelFormat = QVideoFrameFormat::Format_Invalid;
+ LONGLONG m_timeOffset = 0;
+ LONGLONG m_pauseTime = 0;
+ LONGLONG m_lastTimestamp = 0;
+ QTimer m_durationTimer;
+ qint64 m_currentDuration = -1;
+ qint64 m_lastDuration = -1;
+};
+
+QT_END_NAMESPACE
+
+#endif // QWINDOWSMEDIADEVICEREADER_H
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicesession.cpp b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicesession.cpp
new file mode 100644
index 000000000..b13599444
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicesession.cpp
@@ -0,0 +1,376 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowsmediadevicesession_p.h"
+
+#include "qwindowsmediadevicereader_p.h"
+#include "private/qwindowsmultimediautils_p.h"
+#include "private/qplatformvideosink_p.h"
+#include <qvideosink.h>
+#include <QtCore/qdebug.h>
+#include <qaudioinput.h>
+#include <qaudiooutput.h>
+
+QT_BEGIN_NAMESPACE
+
+QWindowsMediaDeviceSession::QWindowsMediaDeviceSession(QObject *parent)
+ : QObject(parent)
+{
+ m_mediaDeviceReader = new QWindowsMediaDeviceReader(this);
+ connect(m_mediaDeviceReader, &QWindowsMediaDeviceReader::streamingStarted,
+ this, &QWindowsMediaDeviceSession::handleStreamingStarted);
+ connect(m_mediaDeviceReader, &QWindowsMediaDeviceReader::streamingStopped,
+ this, &QWindowsMediaDeviceSession::handleStreamingStopped);
+ connect(m_mediaDeviceReader, &QWindowsMediaDeviceReader::streamingError,
+ this, &QWindowsMediaDeviceSession::handleStreamingError);
+ connect(m_mediaDeviceReader, &QWindowsMediaDeviceReader::videoFrameChanged,
+ this, &QWindowsMediaDeviceSession::handleVideoFrameChanged);
+ connect(m_mediaDeviceReader, &QWindowsMediaDeviceReader::recordingStarted,
+ this, &QWindowsMediaDeviceSession::recordingStarted);
+ connect(m_mediaDeviceReader, &QWindowsMediaDeviceReader::recordingStopped,
+ this, &QWindowsMediaDeviceSession::recordingStopped);
+ connect(m_mediaDeviceReader, &QWindowsMediaDeviceReader::recordingError,
+ this, &QWindowsMediaDeviceSession::recordingError);
+ connect(m_mediaDeviceReader, &QWindowsMediaDeviceReader::durationChanged,
+ this, &QWindowsMediaDeviceSession::durationChanged);
+}
+
+QWindowsMediaDeviceSession::~QWindowsMediaDeviceSession()
+{
+ delete m_mediaDeviceReader;
+}
+
+bool QWindowsMediaDeviceSession::isActive() const
+{
+ return m_active;
+}
+
+bool QWindowsMediaDeviceSession::isActivating() const
+{
+ return m_activating;
+}
+
+void QWindowsMediaDeviceSession::setActive(bool active)
+{
+ if ((active && (m_active || m_activating)) || (!active && !m_active && !m_activating))
+ return;
+
+ if (active) {
+ auto camId = QString::fromUtf8(m_activeCameraDevice.id());
+ auto micId = m_audioInput ? QString::fromUtf8(m_audioInput->device().id()) : QString();
+ if (!camId.isEmpty() || !micId.isEmpty()) {
+ if (m_mediaDeviceReader->activate(camId, m_cameraFormat, micId)) {
+ m_activating = true;
+ } else {
+ emit streamingError(MF_E_NOT_AVAILABLE);
+ }
+ } else {
+ qWarning() << Q_FUNC_INFO << "Camera ID and Microphone ID both undefined.";
+ }
+ } else {
+ m_mediaDeviceReader->deactivate();
+ m_active = false;
+ m_activating = false;
+ emit activeChanged(m_active);
+ emit readyForCaptureChanged(m_active);
+ }
+}
+
+void QWindowsMediaDeviceSession::reactivate()
+{
+ if (m_active || m_activating) {
+ pauseRecording();
+ setActive(false);
+ setActive(true);
+ resumeRecording();
+ }
+}
+
+void QWindowsMediaDeviceSession::setActiveCamera(const QCameraDevice &camera)
+{
+ m_activeCameraDevice = camera;
+ reactivate();
+}
+
+QCameraDevice QWindowsMediaDeviceSession::activeCamera() const
+{
+ return m_activeCameraDevice;
+}
+
+void QWindowsMediaDeviceSession::setCameraFormat(const QCameraFormat &cameraFormat)
+{
+ m_cameraFormat = cameraFormat;
+}
+
+void QWindowsMediaDeviceSession::setVideoSink(QVideoSink *surface)
+{
+ m_surface = surface;
+}
+
+void QWindowsMediaDeviceSession::handleStreamingStarted()
+{
+ if (m_activating) {
+ m_active = true;
+ m_activating = false;
+ emit activeChanged(m_active);
+ emit readyForCaptureChanged(m_active);
+ }
+}
+
+void QWindowsMediaDeviceSession::handleStreamingStopped()
+{
+ m_active = false;
+ emit activeChanged(m_active);
+ emit readyForCaptureChanged(m_active);
+}
+
+void QWindowsMediaDeviceSession::handleStreamingError(int errorCode)
+{
+ if (m_surface)
+ m_surface->platformVideoSink()->setVideoFrame(QVideoFrame());
+ emit streamingError(errorCode);
+}
+
+void QWindowsMediaDeviceSession::handleVideoFrameChanged(const QVideoFrame &frame)
+{
+ if (m_surface)
+ m_surface->platformVideoSink()->setVideoFrame(frame);
+ emit videoFrameChanged(frame);
+}
+
+void QWindowsMediaDeviceSession::setAudioInputMuted(bool muted)
+{
+ m_mediaDeviceReader->setInputMuted(muted);
+}
+
+void QWindowsMediaDeviceSession::setAudioInputVolume(float volume)
+{
+ m_mediaDeviceReader->setInputVolume(volume);
+}
+
+void QWindowsMediaDeviceSession::audioInputDeviceChanged()
+{
+ reactivate();
+}
+
+void QWindowsMediaDeviceSession::setAudioOutputMuted(bool muted)
+{
+ m_mediaDeviceReader->setOutputMuted(muted);
+}
+
+void QWindowsMediaDeviceSession::setAudioOutputVolume(float volume)
+{
+ m_mediaDeviceReader->setOutputVolume(volume);
+}
+
+void QWindowsMediaDeviceSession::audioOutputDeviceChanged()
+{
+ if (m_active || m_activating)
+ m_mediaDeviceReader->setAudioOutput(QString::fromUtf8(m_audioOutput->device().id()));
+}
+
+void QWindowsMediaDeviceSession::setAudioInput(QAudioInput *input)
+{
+ if (m_audioInput == input)
+ return;
+ if (m_audioInput)
+ m_audioInput->disconnect(this);
+ m_audioInput = input;
+
+ audioInputDeviceChanged();
+
+ if (!m_audioInput)
+ return;
+ connect(m_audioInput, &QAudioInput::mutedChanged, this, &QWindowsMediaDeviceSession::setAudioInputMuted);
+ connect(m_audioInput, &QAudioInput::volumeChanged, this, &QWindowsMediaDeviceSession::setAudioInputVolume);
+ connect(m_audioInput, &QAudioInput::deviceChanged, this, &QWindowsMediaDeviceSession::audioInputDeviceChanged);
+}
+
+void QWindowsMediaDeviceSession::setAudioOutput(QAudioOutput *output)
+{
+ if (m_audioOutput == output)
+ return;
+ if (m_audioOutput)
+ m_audioOutput->disconnect(this);
+ m_audioOutput = output;
+ if (!m_audioOutput) {
+ m_mediaDeviceReader->setAudioOutput({});
+ return;
+ }
+
+ m_mediaDeviceReader->setAudioOutput(QString::fromUtf8(m_audioOutput->device().id()));
+
+ connect(m_audioOutput, &QAudioOutput::mutedChanged, this, &QWindowsMediaDeviceSession::setAudioOutputMuted);
+ connect(m_audioOutput, &QAudioOutput::volumeChanged, this, &QWindowsMediaDeviceSession::setAudioOutputVolume);
+ connect(m_audioOutput, &QAudioOutput::deviceChanged, this, &QWindowsMediaDeviceSession::audioOutputDeviceChanged);
+}
+
+QMediaRecorder::Error QWindowsMediaDeviceSession::startRecording(QMediaEncoderSettings &settings, const QString &fileName, bool audioOnly)
+{
+ GUID container = audioOnly ? QWindowsMultimediaUtils::containerForAudioFileFormat(settings.mediaFormat().fileFormat())
+ : QWindowsMultimediaUtils::containerForVideoFileFormat(settings.mediaFormat().fileFormat());
+ GUID videoFormat = QWindowsMultimediaUtils::videoFormatForCodec(settings.videoCodec());
+ GUID audioFormat = QWindowsMultimediaUtils::audioFormatForCodec(settings.audioCodec());
+
+ QSize res = settings.videoResolution();
+ UINT32 width, height;
+ if (res.width() > 0 && res.height() > 0) {
+ width = UINT32(res.width());
+ height = UINT32(res.height());
+ } else {
+ width = m_mediaDeviceReader->frameWidth();
+ height = m_mediaDeviceReader->frameHeight();
+ settings.setVideoResolution(QSize(int(width), int(height)));
+ }
+
+ qreal frameRate = settings.videoFrameRate();
+ if (frameRate <= 0) {
+ frameRate = m_mediaDeviceReader->frameRate();
+ settings.setVideoFrameRate(frameRate);
+ }
+
+ auto quality = settings.quality();
+
+ UINT32 videoBitRate = 0;
+ if (settings.videoBitRate() > 0) {
+ videoBitRate = UINT32(settings.videoBitRate());
+ } else {
+ videoBitRate = estimateVideoBitRate(videoFormat, width, height, frameRate, quality);
+ settings.setVideoBitRate(int(videoBitRate));
+ }
+
+ UINT32 audioBitRate = 0;
+ if (settings.audioBitRate() > 0) {
+ audioBitRate = UINT32(settings.audioBitRate());
+ } else {
+ audioBitRate = estimateAudioBitRate(audioFormat, quality);
+ settings.setAudioBitRate(int(audioBitRate));
+ }
+
+ return m_mediaDeviceReader->startRecording(fileName, container, audioOnly ? GUID_NULL : videoFormat,
+ videoBitRate, width, height, frameRate,
+ audioFormat, audioBitRate);
+}
+
+void QWindowsMediaDeviceSession::stopRecording()
+{
+ m_mediaDeviceReader->stopRecording();
+}
+
+bool QWindowsMediaDeviceSession::pauseRecording()
+{
+ return m_mediaDeviceReader->pauseRecording();
+}
+
+bool QWindowsMediaDeviceSession::resumeRecording()
+{
+ return m_mediaDeviceReader->resumeRecording();
+}
+
+// empirical estimate of the required video bitrate (for H.264)
+quint32 QWindowsMediaDeviceSession::estimateVideoBitRate(const GUID &videoFormat, quint32 width, quint32 height,
+ qreal frameRate, QMediaRecorder::Quality quality)
+{
+ Q_UNUSED(videoFormat);
+
+ qreal bitsPerPixel;
+ switch (quality) {
+ case QMediaRecorder::Quality::VeryLowQuality:
+ bitsPerPixel = 0.08;
+ break;
+ case QMediaRecorder::Quality::LowQuality:
+ bitsPerPixel = 0.2;
+ break;
+ case QMediaRecorder::Quality::NormalQuality:
+ bitsPerPixel = 0.3;
+ break;
+ case QMediaRecorder::Quality::HighQuality:
+ bitsPerPixel = 0.5;
+ break;
+ case QMediaRecorder::Quality::VeryHighQuality:
+ bitsPerPixel = 0.8;
+ break;
+ default:
+ bitsPerPixel = 0.3;
+ }
+
+ // Required bitrate is not linear on the number of pixels; small resolutions
+ // require more BPP, thus the minimum values, to try to compensate it.
+ quint32 pixelsPerSec = quint32(qMax(width, 320u) * qMax(height, 240u) * qMax(frameRate, 6.0));
+ return pixelsPerSec * bitsPerPixel;
+}
+
+quint32 QWindowsMediaDeviceSession::estimateAudioBitRate(const GUID &audioFormat, QMediaRecorder::Quality quality)
+{
+ if (audioFormat == MFAudioFormat_AAC) {
+ // Bitrates supported by the AAC encoder are 96K, 128K, 160K, 192K.
+ switch (quality) {
+ case QMediaRecorder::Quality::VeryLowQuality:
+ return 96000;
+ case QMediaRecorder::Quality::LowQuality:
+ return 96000;
+ case QMediaRecorder::Quality::NormalQuality:
+ return 128000;
+ case QMediaRecorder::Quality::HighQuality:
+ return 160000;
+ case QMediaRecorder::Quality::VeryHighQuality:
+ return 192000;
+ default:
+ return 128000;
+ }
+ } else if (audioFormat == MFAudioFormat_MP3) {
+ // Bitrates supported by the MP3 encoder are
+ // 32K, 40K, 48K, 56K, 64K, 80K, 96K, 112K, 128K, 160K, 192K, 224K, 256K, 320K.
+ switch (quality) {
+ case QMediaRecorder::Quality::VeryLowQuality:
+ return 48000;
+ case QMediaRecorder::Quality::LowQuality:
+ return 96000;
+ case QMediaRecorder::Quality::NormalQuality:
+ return 128000;
+ case QMediaRecorder::Quality::HighQuality:
+ return 224000;
+ case QMediaRecorder::Quality::VeryHighQuality:
+ return 320000;
+ default:
+ return 128000;
+ }
+ } else if (audioFormat == MFAudioFormat_WMAudioV8) {
+ // Bitrates supported by the Windows Media Audio 8 encoder
+ switch (quality) {
+ case QMediaRecorder::Quality::VeryLowQuality:
+ return 32000;
+ case QMediaRecorder::Quality::LowQuality:
+ return 96000;
+ case QMediaRecorder::Quality::NormalQuality:
+ return 192000;
+ case QMediaRecorder::Quality::HighQuality:
+ return 256016;
+ case QMediaRecorder::Quality::VeryHighQuality:
+ return 320032;
+ default:
+ return 192000;
+ }
+ } else if (audioFormat == MFAudioFormat_WMAudioV9) {
+ // Bitrates supported by the Windows Media Audio 9 encoder
+ switch (quality) {
+ case QMediaRecorder::Quality::VeryLowQuality:
+ return 32000;
+ case QMediaRecorder::Quality::LowQuality:
+ return 96000;
+ case QMediaRecorder::Quality::NormalQuality:
+ return 192000;
+ case QMediaRecorder::Quality::HighQuality:
+ return 256016;
+ case QMediaRecorder::Quality::VeryHighQuality:
+ return 384000;
+ default:
+ return 192000;
+ }
+ }
+ return 0; // Use default for format
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qwindowsmediadevicesession_p.cpp"
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicesession_p.h b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicesession_p.h
new file mode 100644
index 000000000..c3998ce6c
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediadevicesession_p.h
@@ -0,0 +1,100 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWINDOWSMEDIADEVICESESSION_H
+#define QWINDOWSMEDIADEVICESESSION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qtmultimediaglobal_p.h>
+#include <qcamera.h>
+#include <qaudiodevice.h>
+#include <private/qwindowsmultimediautils_p.h>
+#include <private/qplatformmediarecorder_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAudioInput;
+class QAudioOutput;
+class QVideoSink;
+class QWindowsMediaDeviceReader;
+
+class QWindowsMediaDeviceSession : public QObject
+{
+ Q_OBJECT
+public:
+ explicit QWindowsMediaDeviceSession(QObject *parent = nullptr);
+ ~QWindowsMediaDeviceSession();
+
+ bool isActive() const;
+ void setActive(bool active);
+
+ bool isActivating() const;
+
+ void setActiveCamera(const QCameraDevice &camera);
+ QCameraDevice activeCamera() const;
+
+ void setCameraFormat(const QCameraFormat &cameraFormat);
+
+ void setVideoSink(QVideoSink *surface);
+
+public Q_SLOTS:
+ void setAudioInputMuted(bool muted);
+ void setAudioInputVolume(float volume);
+ void audioInputDeviceChanged();
+ void setAudioOutputMuted(bool muted);
+ void setAudioOutputVolume(float volume);
+ void audioOutputDeviceChanged();
+
+public:
+ void setAudioInput(QAudioInput *input);
+ void setAudioOutput(QAudioOutput *output);
+
+ QMediaRecorder::Error startRecording(QMediaEncoderSettings &settings, const QString &fileName, bool audioOnly);
+ void stopRecording();
+ bool pauseRecording();
+ bool resumeRecording();
+
+Q_SIGNALS:
+ void activeChanged(bool);
+ void readyForCaptureChanged(bool);
+ void durationChanged(qint64 duration);
+ void recordingStarted();
+ void recordingStopped();
+ void streamingError(int errorCode);
+ void recordingError(int errorCode);
+ void videoFrameChanged(const QVideoFrame &frame);
+
+private Q_SLOTS:
+ void handleStreamingStarted();
+ void handleStreamingStopped();
+ void handleStreamingError(int errorCode);
+ void handleVideoFrameChanged(const QVideoFrame &frame);
+
+private:
+ void reactivate();
+ quint32 estimateVideoBitRate(const GUID &videoFormat, quint32 width, quint32 height,
+ qreal frameRate, QMediaRecorder::Quality quality);
+ quint32 estimateAudioBitRate(const GUID &audioFormat, QMediaRecorder::Quality quality);
+ bool m_active = false;
+ bool m_activating = false;
+ QCameraDevice m_activeCameraDevice;
+ QCameraFormat m_cameraFormat;
+ QWindowsMediaDeviceReader *m_mediaDeviceReader = nullptr;
+ QAudioInput *m_audioInput = nullptr;
+ QAudioOutput *m_audioOutput = nullptr;
+ QVideoSink *m_surface = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QWINDOWSMEDIADEVICESESSION_H
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp b/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp
new file mode 100644
index 000000000..512110af6
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder.cpp
@@ -0,0 +1,225 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowsmediaencoder_p.h"
+
+#include "qwindowsmediadevicesession_p.h"
+#include "qwindowsmediacapture_p.h"
+#include "mfmetadata_p.h"
+#include <QtCore/QUrl>
+#include <QtCore/QMimeType>
+#include <mferror.h>
+#include <shobjidl.h>
+#include <private/qmediastoragelocation_p.h>
+#include <private/qmediarecorder_p.h>
+
+QT_BEGIN_NAMESPACE
+
+QWindowsMediaEncoder::QWindowsMediaEncoder(QMediaRecorder *parent)
+ : QObject(parent),
+ QPlatformMediaRecorder(parent)
+{
+}
+
+bool QWindowsMediaEncoder::isLocationWritable(const QUrl &location) const
+{
+ return location.scheme() == QLatin1String("file") || location.scheme().isEmpty();
+}
+
+QMediaRecorder::RecorderState QWindowsMediaEncoder::state() const
+{
+ return m_state;
+}
+
+qint64 QWindowsMediaEncoder::duration() const
+{
+ return m_duration;
+}
+
+void QWindowsMediaEncoder::record(QMediaEncoderSettings &settings)
+{
+ if (!m_captureService || !m_mediaDeviceSession) {
+ qWarning() << Q_FUNC_INFO << "Encoder is not set to a capture session";
+ return;
+ }
+ if (m_state != QMediaRecorder::StoppedState)
+ return;
+
+ m_sessionWasActive = m_mediaDeviceSession->isActive() || m_mediaDeviceSession->isActivating();
+
+ if (!m_sessionWasActive) {
+
+ m_mediaDeviceSession->setActive(true);
+
+ if (!m_mediaDeviceSession->isActivating()) {
+ updateError(QMediaRecorder::ResourceError,
+ QMediaRecorderPrivate::msgFailedStartRecording());
+ return;
+ }
+ }
+
+ const auto audioOnly = settings.videoCodec() == QMediaFormat::VideoCodec::Unspecified;
+ m_fileName = QMediaStorageLocation::generateFileName(outputLocation().toLocalFile(), audioOnly
+ ? QStandardPaths::MusicLocation
+ : QStandardPaths::MoviesLocation,
+ settings.mimeType().preferredSuffix());
+
+ QMediaRecorder::Error ec = m_mediaDeviceSession->startRecording(settings, m_fileName, audioOnly);
+ if (ec == QMediaRecorder::NoError) {
+ m_state = QMediaRecorder::RecordingState;
+
+ actualLocationChanged(QUrl::fromLocalFile(m_fileName));
+ stateChanged(m_state);
+
+ } else {
+ updateError(ec, QMediaRecorderPrivate::msgFailedStartRecording());
+ }
+}
+
+void QWindowsMediaEncoder::pause()
+{
+ if (!m_mediaDeviceSession || m_state != QMediaRecorder::RecordingState)
+ return;
+
+ if (m_mediaDeviceSession->pauseRecording()) {
+ m_state = QMediaRecorder::PausedState;
+ stateChanged(m_state);
+ } else {
+ updateError(QMediaRecorder::FormatError, tr("Failed to pause recording"));
+ }
+}
+
+void QWindowsMediaEncoder::resume()
+{
+ if (!m_mediaDeviceSession || m_state != QMediaRecorder::PausedState)
+ return;
+
+ if (m_mediaDeviceSession->resumeRecording()) {
+ m_state = QMediaRecorder::RecordingState;
+ stateChanged(m_state);
+ } else {
+ updateError(QMediaRecorder::FormatError, tr("Failed to resume recording"));
+ }
+}
+
+void QWindowsMediaEncoder::stop()
+{
+ if (m_mediaDeviceSession && m_state != QMediaRecorder::StoppedState) {
+ m_mediaDeviceSession->stopRecording();
+ if (!m_sessionWasActive)
+ m_mediaDeviceSession->setActive(false);
+ }
+}
+
+
+
+void QWindowsMediaEncoder::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ QWindowsMediaCaptureService *captureSession = static_cast<QWindowsMediaCaptureService *>(session);
+ if (m_captureService == captureSession)
+ return;
+
+ if (m_captureService)
+ stop();
+
+ m_captureService = captureSession;
+ if (!m_captureService) {
+ m_mediaDeviceSession = nullptr;
+ return;
+ }
+
+ m_mediaDeviceSession = m_captureService->session();
+ Q_ASSERT(m_mediaDeviceSession);
+
+ connect(m_mediaDeviceSession, &QWindowsMediaDeviceSession::recordingStarted, this, &QWindowsMediaEncoder::onRecordingStarted);
+ connect(m_mediaDeviceSession, &QWindowsMediaDeviceSession::recordingStopped, this, &QWindowsMediaEncoder::onRecordingStopped);
+ connect(m_mediaDeviceSession, &QWindowsMediaDeviceSession::streamingError, this, &QWindowsMediaEncoder::onStreamingError);
+ connect(m_mediaDeviceSession, &QWindowsMediaDeviceSession::recordingError, this, &QWindowsMediaEncoder::onRecordingError);
+ connect(m_mediaDeviceSession, &QWindowsMediaDeviceSession::durationChanged, this, &QWindowsMediaEncoder::onDurationChanged);
+ connect(m_captureService, &QWindowsMediaCaptureService::cameraChanged, this, &QWindowsMediaEncoder::onCameraChanged);
+ onCameraChanged();
+}
+
+void QWindowsMediaEncoder::setMetaData(const QMediaMetaData &metaData)
+{
+ m_metaData = metaData;
+}
+
+QMediaMetaData QWindowsMediaEncoder::metaData() const
+{
+ return m_metaData;
+}
+
+void QWindowsMediaEncoder::saveMetadata()
+{
+ if (!m_metaData.isEmpty()) {
+
+ const QString nativeFileName = QDir::toNativeSeparators(m_fileName);
+
+ IPropertyStore *store = nullptr;
+
+ if (SUCCEEDED(SHGetPropertyStoreFromParsingName(reinterpret_cast<LPCWSTR>(nativeFileName.utf16()),
+ nullptr, GPS_READWRITE, IID_PPV_ARGS(&store)))) {
+
+ MFMetaData::toNative(m_metaData, store);
+
+ store->Commit();
+ store->Release();
+ }
+ }
+}
+
+void QWindowsMediaEncoder::onDurationChanged(qint64 duration)
+{
+ m_duration = duration;
+ durationChanged(m_duration);
+}
+
+void QWindowsMediaEncoder::onStreamingError(int errorCode)
+{
+ if (errorCode == MF_E_VIDEO_RECORDING_DEVICE_INVALIDATED)
+ updateError(QMediaRecorder::ResourceError, tr("Camera is no longer present"));
+ else if (errorCode == MF_E_AUDIO_RECORDING_DEVICE_INVALIDATED)
+ updateError(QMediaRecorder::ResourceError, tr("Audio input is no longer present"));
+ else
+ updateError(QMediaRecorder::ResourceError, tr("Streaming error"));
+
+ if (m_state != QMediaRecorder::StoppedState) {
+ m_mediaDeviceSession->stopRecording();
+ if (!m_sessionWasActive)
+ m_mediaDeviceSession->setActive(false);
+ }
+}
+
+void QWindowsMediaEncoder::onRecordingError(int errorCode)
+{
+ Q_UNUSED(errorCode);
+ updateError(QMediaRecorder::ResourceError, tr("Recording error"));
+
+ auto lastState = m_state;
+ m_state = QMediaRecorder::StoppedState;
+ if (m_state != lastState)
+ stateChanged(m_state);
+}
+
+void QWindowsMediaEncoder::onCameraChanged()
+{
+}
+
+void QWindowsMediaEncoder::onRecordingStarted()
+{
+}
+
+void QWindowsMediaEncoder::onRecordingStopped()
+{
+ saveMetadata();
+
+ auto lastState = m_state;
+ m_state = QMediaRecorder::StoppedState;
+ if (m_state != lastState)
+ stateChanged(m_state);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qwindowsmediaencoder_p.cpp"
diff --git a/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder_p.h b/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder_p.h
new file mode 100644
index 000000000..51f35ce9d
--- /dev/null
+++ b/src/plugins/multimedia/windows/mediacapture/qwindowsmediaencoder_p.h
@@ -0,0 +1,71 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#ifndef QWINDOWSMEDIAENCODER_H
+#define QWINDOWSMEDIAENCODER_H
+
+#include <private/qplatformmediarecorder_p.h>
+
+#include <QtCore/qglobal.h>
+#include <QtCore/qurl.h>
+
+QT_BEGIN_NAMESPACE
+
+class QWindowsMediaDeviceSession;
+class QPlatformMediaCaptureSession;
+class QWindowsMediaCaptureService;
+
+class QWindowsMediaEncoder : public QObject, public QPlatformMediaRecorder
+{
+ Q_OBJECT
+public:
+ explicit QWindowsMediaEncoder(QMediaRecorder *parent);
+
+ bool isLocationWritable(const QUrl &location) const override;
+ QMediaRecorder::RecorderState state() const override;
+ qint64 duration() const override;
+
+ void setMetaData(const QMediaMetaData &metaData) override;
+ QMediaMetaData metaData() const override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+ void record(QMediaEncoderSettings &settings) override;
+ void pause() override;
+ void resume() override;
+ void stop() override;
+
+private Q_SLOTS:
+ void onCameraChanged();
+ void onRecordingStarted();
+ void onRecordingStopped();
+ void onDurationChanged(qint64 duration);
+ void onStreamingError(int errorCode);
+ void onRecordingError(int errorCode);
+
+private:
+ void saveMetadata();
+
+ QWindowsMediaCaptureService *m_captureService = nullptr;
+ QWindowsMediaDeviceSession *m_mediaDeviceSession = nullptr;
+ QMediaRecorder::RecorderState m_state = QMediaRecorder::StoppedState;
+ QString m_fileName;
+ QMediaMetaData m_metaData;
+ qint64 m_duration = 0;
+ bool m_sessionWasActive = false;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/mfstream.cpp b/src/plugins/multimedia/windows/mfstream.cpp
new file mode 100644
index 000000000..fb37ce293
--- /dev/null
+++ b/src/plugins/multimedia/windows/mfstream.cpp
@@ -0,0 +1,326 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "mfstream_p.h"
+#include <QtCore/qcoreapplication.h>
+
+QT_BEGIN_NAMESPACE
+//MFStream is added for supporting QIODevice type of media source.
+//It is used to delegate invocations from media foundation(through IMFByteStream) to QIODevice.
+
+MFStream::MFStream(QIODevice *stream, bool ownStream)
+ : m_cRef(1)
+ , m_stream(stream)
+ , m_ownStream(ownStream)
+ , m_currentReadResult(0)
+{
+ //Move to the thread of the stream object
+ //to make sure invocations on stream
+ //are happened in the same thread of stream object
+ this->moveToThread(stream->thread());
+}
+
+MFStream::~MFStream()
+{
+ if (m_currentReadResult)
+ m_currentReadResult->Release();
+ if (m_ownStream)
+ m_stream->deleteLater();
+}
+
+//from IUnknown
+STDMETHODIMP MFStream::QueryInterface(REFIID riid, LPVOID *ppvObject)
+{
+ if (!ppvObject)
+ return E_POINTER;
+ if (riid == IID_IMFByteStream) {
+ *ppvObject = static_cast<IMFByteStream*>(this);
+ } else if (riid == IID_IUnknown) {
+ *ppvObject = static_cast<IUnknown*>(this);
+ } else {
+ *ppvObject = NULL;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+}
+
+STDMETHODIMP_(ULONG) MFStream::AddRef(void)
+{
+ return InterlockedIncrement(&m_cRef);
+}
+
+STDMETHODIMP_(ULONG) MFStream::Release(void)
+{
+ LONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0) {
+ this->deleteLater();
+ }
+ return cRef;
+}
+
+
+//from IMFByteStream
+STDMETHODIMP MFStream::GetCapabilities(DWORD *pdwCapabilities)
+{
+ if (!pdwCapabilities)
+ return E_INVALIDARG;
+ *pdwCapabilities = MFBYTESTREAM_IS_READABLE;
+ if (!m_stream->isSequential())
+ *pdwCapabilities |= MFBYTESTREAM_IS_SEEKABLE;
+ return S_OK;
+}
+
+STDMETHODIMP MFStream::GetLength(QWORD *pqwLength)
+{
+ if (!pqwLength)
+ return E_INVALIDARG;
+ QMutexLocker locker(&m_mutex);
+ *pqwLength = QWORD(m_stream->size());
+ return S_OK;
+}
+
+STDMETHODIMP MFStream::SetLength(QWORD)
+{
+ return E_NOTIMPL;
+}
+
+STDMETHODIMP MFStream::GetCurrentPosition(QWORD *pqwPosition)
+{
+ if (!pqwPosition)
+ return E_INVALIDARG;
+ QMutexLocker locker(&m_mutex);
+ *pqwPosition = m_stream->pos();
+ return S_OK;
+}
+
+STDMETHODIMP MFStream::SetCurrentPosition(QWORD qwPosition)
+{
+ QMutexLocker locker(&m_mutex);
+ //SetCurrentPosition may happend during the BeginRead/EndRead pair,
+ //refusing to execute SetCurrentPosition during that time seems to be
+ //the simplest workable solution
+ if (m_currentReadResult)
+ return S_FALSE;
+
+ bool seekOK = m_stream->seek(qint64(qwPosition));
+ if (seekOK)
+ return S_OK;
+ else
+ return S_FALSE;
+}
+
+STDMETHODIMP MFStream::IsEndOfStream(BOOL *pfEndOfStream)
+{
+ if (!pfEndOfStream)
+ return E_INVALIDARG;
+ QMutexLocker locker(&m_mutex);
+ *pfEndOfStream = m_stream->atEnd() ? TRUE : FALSE;
+ return S_OK;
+}
+
+STDMETHODIMP MFStream::Read(BYTE *pb, ULONG cb, ULONG *pcbRead)
+{
+ QMutexLocker locker(&m_mutex);
+ qint64 read = m_stream->read((char*)(pb), qint64(cb));
+ if (pcbRead)
+ *pcbRead = ULONG(read);
+ return S_OK;
+}
+
+STDMETHODIMP MFStream::BeginRead(BYTE *pb, ULONG cb, IMFAsyncCallback *pCallback,
+ IUnknown *punkState)
+{
+ if (!pCallback || !pb)
+ return E_INVALIDARG;
+
+ Q_ASSERT(m_currentReadResult == NULL);
+
+ AsyncReadState *state = new (std::nothrow) AsyncReadState(pb, cb);
+ if (state == NULL)
+ return E_OUTOFMEMORY;
+
+ HRESULT hr = MFCreateAsyncResult(state, pCallback, punkState, &m_currentReadResult);
+ state->Release();
+ if (FAILED(hr))
+ return hr;
+
+ QCoreApplication::postEvent(this, new QEvent(QEvent::User));
+ return hr;
+}
+
+STDMETHODIMP MFStream::EndRead(IMFAsyncResult* pResult, ULONG *pcbRead)
+{
+ if (!pcbRead)
+ return E_INVALIDARG;
+ IUnknown *pUnk;
+ pResult->GetObject(&pUnk);
+ AsyncReadState *state = static_cast<AsyncReadState*>(pUnk);
+ *pcbRead = state->bytesRead();
+ pUnk->Release();
+
+ m_currentReadResult->Release();
+ m_currentReadResult = NULL;
+
+ return S_OK;
+}
+
+STDMETHODIMP MFStream::Write(const BYTE *, ULONG, ULONG *)
+{
+ return E_NOTIMPL;
+}
+
+STDMETHODIMP MFStream::BeginWrite(const BYTE *, ULONG ,
+ IMFAsyncCallback *,
+ IUnknown *)
+{
+ return E_NOTIMPL;
+}
+
+STDMETHODIMP MFStream::EndWrite(IMFAsyncResult *,
+ ULONG *)
+{
+ return E_NOTIMPL;
+}
+
+STDMETHODIMP MFStream::Seek(
+ MFBYTESTREAM_SEEK_ORIGIN SeekOrigin,
+ LONGLONG llSeekOffset,
+ DWORD,
+ QWORD *pqwCurrentPosition)
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_currentReadResult)
+ return S_FALSE;
+
+ qint64 pos = qint64(llSeekOffset);
+ switch (SeekOrigin) {
+ case msoBegin:
+ break;
+ case msoCurrent:
+ pos += m_stream->pos();
+ break;
+ }
+ bool seekOK = m_stream->seek(pos);
+ if (pqwCurrentPosition)
+ *pqwCurrentPosition = pos;
+ if (seekOK)
+ return S_OK;
+ else
+ return S_FALSE;
+}
+
+STDMETHODIMP MFStream::Flush()
+{
+ return E_NOTIMPL;
+}
+
+STDMETHODIMP MFStream::Close()
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_ownStream)
+ m_stream->close();
+ return S_OK;
+}
+
+void MFStream::doRead()
+{
+ if (!m_stream)
+ return;
+
+ bool readDone = true;
+ IUnknown *pUnk = NULL;
+ HRESULT hr = m_currentReadResult->GetObject(&pUnk);
+ if (SUCCEEDED(hr)) {
+ //do actual read
+ AsyncReadState *state = static_cast<AsyncReadState*>(pUnk);
+ ULONG cbRead;
+ Read(state->pb(), state->cb() - state->bytesRead(), &cbRead);
+ pUnk->Release();
+
+ state->setBytesRead(cbRead + state->bytesRead());
+ if (state->cb() > state->bytesRead() && !m_stream->atEnd()) {
+ readDone = false;
+ }
+ }
+
+ if (readDone) {
+ //now inform the original caller
+ m_currentReadResult->SetStatus(hr);
+ MFInvokeCallback(m_currentReadResult);
+ }
+}
+
+void MFStream::customEvent(QEvent *event)
+{
+ if (event->type() != QEvent::User) {
+ QObject::customEvent(event);
+ return;
+ }
+ doRead();
+}
+
+//AsyncReadState is a helper class used in BeginRead for asynchronous operation
+//to record some BeginRead parameters, so these parameters could be
+//used later when actually executing the read operation in another thread.
+MFStream::AsyncReadState::AsyncReadState(BYTE *pb, ULONG cb)
+ : m_cRef(1)
+ , m_pb(pb)
+ , m_cb(cb)
+ , m_cbRead(0)
+{
+}
+
+//from IUnknown
+STDMETHODIMP MFStream::AsyncReadState::QueryInterface(REFIID riid, LPVOID *ppvObject)
+{
+ if (!ppvObject)
+ return E_POINTER;
+
+ if (riid == IID_IUnknown) {
+ *ppvObject = static_cast<IUnknown*>(this);
+ } else {
+ *ppvObject = NULL;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+}
+
+STDMETHODIMP_(ULONG) MFStream::AsyncReadState::AddRef(void)
+{
+ return InterlockedIncrement(&m_cRef);
+}
+
+STDMETHODIMP_(ULONG) MFStream::AsyncReadState::Release(void)
+{
+ LONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0)
+ delete this;
+ // For thread safety, return a temporary variable.
+ return cRef;
+}
+
+BYTE* MFStream::AsyncReadState::pb() const
+{
+ return m_pb;
+}
+
+ULONG MFStream::AsyncReadState::cb() const
+{
+ return m_cb;
+}
+
+ULONG MFStream::AsyncReadState::bytesRead() const
+{
+ return m_cbRead;
+}
+
+void MFStream::AsyncReadState::setBytesRead(ULONG cbRead)
+{
+ m_cbRead = cbRead;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_mfstream_p.cpp"
diff --git a/src/plugins/multimedia/windows/mfstream_p.h b/src/plugins/multimedia/windows/mfstream_p.h
new file mode 100644
index 000000000..a5221ed75
--- /dev/null
+++ b/src/plugins/multimedia/windows/mfstream_p.h
@@ -0,0 +1,124 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFSTREAM_H
+#define MFSTREAM_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <mfapi.h>
+#include <mfidl.h>
+#include <QtCore/qmutex.h>
+#include <QtCore/qiodevice.h>
+#include <QtCore/qcoreevent.h>
+#include <QtCore/qpointer.h>
+
+QT_BEGIN_NAMESPACE
+
+class MFStream : public QObject, public IMFByteStream
+{
+ Q_OBJECT
+public:
+ MFStream(QIODevice *stream, bool ownStream);
+
+ ~MFStream();
+
+ //from IUnknown
+ STDMETHODIMP QueryInterface(REFIID riid, LPVOID *ppvObject) override;
+
+ STDMETHODIMP_(ULONG) AddRef(void) override;
+
+ STDMETHODIMP_(ULONG) Release(void) override;
+
+
+ //from IMFByteStream
+ STDMETHODIMP GetCapabilities(DWORD *pdwCapabilities) override;
+
+ STDMETHODIMP GetLength(QWORD *pqwLength) override;
+
+ STDMETHODIMP SetLength(QWORD) override;
+
+ STDMETHODIMP GetCurrentPosition(QWORD *pqwPosition) override;
+
+ STDMETHODIMP SetCurrentPosition(QWORD qwPosition) override;
+
+ STDMETHODIMP IsEndOfStream(BOOL *pfEndOfStream) override;
+
+ STDMETHODIMP Read(BYTE *pb, ULONG cb, ULONG *pcbRead) override;
+
+ STDMETHODIMP BeginRead(BYTE *pb, ULONG cb, IMFAsyncCallback *pCallback,
+ IUnknown *punkState) override;
+
+ STDMETHODIMP EndRead(IMFAsyncResult* pResult, ULONG *pcbRead) override;
+
+ STDMETHODIMP Write(const BYTE *, ULONG, ULONG *) override;
+
+ STDMETHODIMP BeginWrite(const BYTE *, ULONG ,
+ IMFAsyncCallback *,
+ IUnknown *) override;
+
+ STDMETHODIMP EndWrite(IMFAsyncResult *,
+ ULONG *) override;
+
+ STDMETHODIMP Seek(
+ MFBYTESTREAM_SEEK_ORIGIN SeekOrigin,
+ LONGLONG llSeekOffset,
+ DWORD,
+ QWORD *pqwCurrentPosition) override;
+
+ STDMETHODIMP Flush() override;
+
+ STDMETHODIMP Close() override;
+
+private:
+ class AsyncReadState : public IUnknown
+ {
+ public:
+ AsyncReadState(BYTE *pb, ULONG cb);
+ virtual ~AsyncReadState() = default;
+
+ //from IUnknown
+ STDMETHODIMP QueryInterface(REFIID riid, LPVOID *ppvObject) override;
+
+ STDMETHODIMP_(ULONG) AddRef(void) override;
+
+ STDMETHODIMP_(ULONG) Release(void) override;
+
+ BYTE* pb() const;
+ ULONG cb() const;
+ ULONG bytesRead() const;
+
+ void setBytesRead(ULONG cbRead);
+
+ private:
+ long m_cRef;
+ BYTE *m_pb;
+ ULONG m_cb;
+ ULONG m_cbRead;
+ };
+
+ long m_cRef;
+ QPointer<QIODevice> m_stream;
+ bool m_ownStream;
+ DWORD m_workQueueId;
+ QMutex m_mutex;
+
+ void doRead();
+
+protected:
+ void customEvent(QEvent *event) override;
+ IMFAsyncResult *m_currentReadResult;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/player/mfactivate.cpp b/src/plugins/multimedia/windows/player/mfactivate.cpp
new file mode 100644
index 000000000..644c96529
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfactivate.cpp
@@ -0,0 +1,17 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "mfactivate_p.h"
+
+#include <mfapi.h>
+
+MFAbstractActivate::MFAbstractActivate()
+{
+ MFCreateAttributes(&m_attributes, 0);
+}
+
+MFAbstractActivate::~MFAbstractActivate()
+{
+ if (m_attributes)
+ m_attributes->Release();
+}
diff --git a/src/plugins/multimedia/windows/player/mfactivate_p.h b/src/plugins/multimedia/windows/player/mfactivate_p.h
new file mode 100644
index 000000000..efe75474b
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfactivate_p.h
@@ -0,0 +1,202 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFACTIVATE_H
+#define MFACTIVATE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <mfidl.h>
+#include <private/qcomobject_p.h>
+
+QT_BEGIN_NAMESPACE
+
+namespace QtPrivate {
+
+template <>
+struct QComObjectTraits<IMFActivate>
+{
+ static constexpr bool isGuidOf(REFIID riid) noexcept
+ {
+ return QComObjectTraits<IMFActivate, IMFAttributes>::isGuidOf(riid);
+ }
+};
+
+} // namespace QtPrivate
+
+class MFAbstractActivate : public QComObject<IMFActivate>
+{
+public:
+ explicit MFAbstractActivate();
+
+ //from IMFAttributes
+ STDMETHODIMP GetItem(REFGUID guidKey, PROPVARIANT *pValue) override
+ {
+ return m_attributes->GetItem(guidKey, pValue);
+ }
+
+ STDMETHODIMP GetItemType(REFGUID guidKey, MF_ATTRIBUTE_TYPE *pType) override
+ {
+ return m_attributes->GetItemType(guidKey, pType);
+ }
+
+ STDMETHODIMP CompareItem(REFGUID guidKey, REFPROPVARIANT Value, BOOL *pbResult) override
+ {
+ return m_attributes->CompareItem(guidKey, Value, pbResult);
+ }
+
+ STDMETHODIMP Compare(IMFAttributes *pTheirs, MF_ATTRIBUTES_MATCH_TYPE MatchType, BOOL *pbResult) override
+ {
+ return m_attributes->Compare(pTheirs, MatchType, pbResult);
+ }
+
+ STDMETHODIMP GetUINT32(REFGUID guidKey, UINT32 *punValue) override
+ {
+ return m_attributes->GetUINT32(guidKey, punValue);
+ }
+
+ STDMETHODIMP GetUINT64(REFGUID guidKey, UINT64 *punValue) override
+ {
+ return m_attributes->GetUINT64(guidKey, punValue);
+ }
+
+ STDMETHODIMP GetDouble(REFGUID guidKey, double *pfValue) override
+ {
+ return m_attributes->GetDouble(guidKey, pfValue);
+ }
+
+ STDMETHODIMP GetGUID(REFGUID guidKey, GUID *pguidValue) override
+ {
+ return m_attributes->GetGUID(guidKey, pguidValue);
+ }
+
+ STDMETHODIMP GetStringLength(REFGUID guidKey, UINT32 *pcchLength) override
+ {
+ return m_attributes->GetStringLength(guidKey, pcchLength);
+ }
+
+ STDMETHODIMP GetString(REFGUID guidKey, LPWSTR pwszValue, UINT32 cchBufSize, UINT32 *pcchLength) override
+ {
+ return m_attributes->GetString(guidKey, pwszValue, cchBufSize, pcchLength);
+ }
+
+ STDMETHODIMP GetAllocatedString(REFGUID guidKey, LPWSTR *ppwszValue, UINT32 *pcchLength) override
+ {
+ return m_attributes->GetAllocatedString(guidKey, ppwszValue, pcchLength);
+ }
+
+ STDMETHODIMP GetBlobSize(REFGUID guidKey, UINT32 *pcbBlobSize) override
+ {
+ return m_attributes->GetBlobSize(guidKey, pcbBlobSize);
+ }
+
+ STDMETHODIMP GetBlob(REFGUID guidKey, UINT8 *pBuf, UINT32 cbBufSize, UINT32 *pcbBlobSize) override
+ {
+ return m_attributes->GetBlob(guidKey, pBuf, cbBufSize, pcbBlobSize);
+ }
+
+ STDMETHODIMP GetAllocatedBlob(REFGUID guidKey, UINT8 **ppBuf, UINT32 *pcbSize) override
+ {
+ return m_attributes->GetAllocatedBlob(guidKey, ppBuf, pcbSize);
+ }
+
+ STDMETHODIMP GetUnknown(REFGUID guidKey, REFIID riid, LPVOID *ppv) override
+ {
+ return m_attributes->GetUnknown(guidKey, riid, ppv);
+ }
+
+ STDMETHODIMP SetItem(REFGUID guidKey, REFPROPVARIANT Value) override
+ {
+ return m_attributes->SetItem(guidKey, Value);
+ }
+
+ STDMETHODIMP DeleteItem(REFGUID guidKey) override
+ {
+ return m_attributes->DeleteItem(guidKey);
+ }
+
+ STDMETHODIMP DeleteAllItems() override
+ {
+ return m_attributes->DeleteAllItems();
+ }
+
+ STDMETHODIMP SetUINT32(REFGUID guidKey, UINT32 unValue) override
+ {
+ return m_attributes->SetUINT32(guidKey, unValue);
+ }
+
+ STDMETHODIMP SetUINT64(REFGUID guidKey, UINT64 unValue) override
+ {
+ return m_attributes->SetUINT64(guidKey, unValue);
+ }
+
+ STDMETHODIMP SetDouble(REFGUID guidKey, double fValue) override
+ {
+ return m_attributes->SetDouble(guidKey, fValue);
+ }
+
+ STDMETHODIMP SetGUID(REFGUID guidKey, REFGUID guidValue) override
+ {
+ return m_attributes->SetGUID(guidKey, guidValue);
+ }
+
+ STDMETHODIMP SetString(REFGUID guidKey, LPCWSTR wszValue) override
+ {
+ return m_attributes->SetString(guidKey, wszValue);
+ }
+
+ STDMETHODIMP SetBlob(REFGUID guidKey, const UINT8 *pBuf, UINT32 cbBufSize) override
+ {
+ return m_attributes->SetBlob(guidKey, pBuf, cbBufSize);
+ }
+
+ STDMETHODIMP SetUnknown(REFGUID guidKey, IUnknown *pUnknown) override
+ {
+ return m_attributes->SetUnknown(guidKey, pUnknown);
+ }
+
+ STDMETHODIMP LockStore() override
+ {
+ return m_attributes->LockStore();
+ }
+
+ STDMETHODIMP UnlockStore() override
+ {
+ return m_attributes->UnlockStore();
+ }
+
+ STDMETHODIMP GetCount(UINT32 *pcItems) override
+ {
+ return m_attributes->GetCount(pcItems);
+ }
+
+ STDMETHODIMP GetItemByIndex(UINT32 unIndex, GUID *pguidKey, PROPVARIANT *pValue) override
+ {
+ return m_attributes->GetItemByIndex(unIndex, pguidKey, pValue);
+ }
+
+ STDMETHODIMP CopyAllItems(IMFAttributes *pDest) override
+ {
+ return m_attributes->CopyAllItems(pDest);
+ }
+
+protected:
+ // Destructor is not public. Caller should call Release.
+ ~MFAbstractActivate() override;
+
+private:
+ IMFAttributes *m_attributes = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // MFACTIVATE_H
diff --git a/src/plugins/multimedia/windows/player/mfevrvideowindowcontrol.cpp b/src/plugins/multimedia/windows/player/mfevrvideowindowcontrol.cpp
new file mode 100644
index 000000000..109f7964b
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfevrvideowindowcontrol.cpp
@@ -0,0 +1,55 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "mfevrvideowindowcontrol_p.h"
+
+#include <qdebug.h>
+
+MFEvrVideoWindowControl::MFEvrVideoWindowControl(QVideoSink *parent)
+ : EvrVideoWindowControl(parent)
+ , m_currentActivate(NULL)
+ , m_evrSink(NULL)
+{
+}
+
+MFEvrVideoWindowControl::~MFEvrVideoWindowControl()
+{
+ clear();
+}
+
+void MFEvrVideoWindowControl::clear()
+{
+ setEvr(NULL);
+
+ if (m_evrSink)
+ m_evrSink->Release();
+ if (m_currentActivate) {
+ m_currentActivate->ShutdownObject();
+ m_currentActivate->Release();
+ }
+ m_evrSink = NULL;
+ m_currentActivate = NULL;
+}
+
+IMFActivate* MFEvrVideoWindowControl::createActivate()
+{
+ clear();
+
+ if (FAILED(MFCreateVideoRendererActivate(0, &m_currentActivate))) {
+ qWarning() << "Failed to create evr video renderer activate!";
+ return NULL;
+ }
+ if (FAILED(m_currentActivate->ActivateObject(IID_IMFMediaSink, (LPVOID*)(&m_evrSink)))) {
+ qWarning() << "Failed to activate evr media sink!";
+ return NULL;
+ }
+ if (!setEvr(m_evrSink))
+ return NULL;
+
+ return m_currentActivate;
+}
+
+void MFEvrVideoWindowControl::releaseActivate()
+{
+ clear();
+}
diff --git a/src/plugins/multimedia/windows/player/mfevrvideowindowcontrol_p.h b/src/plugins/multimedia/windows/player/mfevrvideowindowcontrol_p.h
new file mode 100644
index 000000000..1ac90e8ce
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfevrvideowindowcontrol_p.h
@@ -0,0 +1,38 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFEVRVIDEOWINDOWCONTROL_H
+#define MFEVRVIDEOWINDOWCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "evrvideowindowcontrol_p.h"
+
+QT_USE_NAMESPACE
+
+class MFEvrVideoWindowControl : public EvrVideoWindowControl
+{
+public:
+ MFEvrVideoWindowControl(QVideoSink *parent = 0);
+ ~MFEvrVideoWindowControl();
+
+ IMFActivate* createActivate();
+ void releaseActivate();
+
+private:
+ void clear();
+
+ IMFActivate *m_currentActivate;
+ IMFMediaSink *m_evrSink;
+};
+
+#endif // MFEVRVIDEOWINDOWCONTROL_H
diff --git a/src/plugins/multimedia/windows/player/mfplayercontrol.cpp b/src/plugins/multimedia/windows/player/mfplayercontrol.cpp
new file mode 100644
index 000000000..ae0022773
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfplayercontrol.cpp
@@ -0,0 +1,306 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "mfplayercontrol_p.h"
+#include "mfplayersession_p.h"
+#include "mfvideorenderercontrol_p.h"
+#include <qdebug.h>
+
+//#define DEBUG_MEDIAFOUNDATION
+
+MFPlayerControl::MFPlayerControl(QMediaPlayer *player)
+ : QPlatformMediaPlayer(player)
+ , m_state(QMediaPlayer::StoppedState)
+ , m_stateDirty(false)
+ , m_videoAvailable(false)
+ , m_audioAvailable(false)
+ , m_duration(0)
+ , m_seekable(false)
+{
+ m_session = new MFPlayerSession(this);
+}
+
+MFPlayerControl::~MFPlayerControl()
+{
+ m_session->close();
+ m_session->setPlayerControl(nullptr);
+ m_session->Release();
+}
+
+void MFPlayerControl::setMedia(const QUrl &media, QIODevice *stream)
+{
+ if (m_state != QMediaPlayer::StoppedState) {
+ changeState(QMediaPlayer::StoppedState);
+ m_session->stop(true);
+ refreshState();
+ }
+
+ m_media = media;
+ m_stream = stream;
+ resetAudioVideoAvailable();
+ handleDurationUpdate(0);
+ handleSeekableUpdate(false);
+ m_session->load(media, stream);
+}
+
+void MFPlayerControl::play()
+{
+ if (m_state == QMediaPlayer::PlayingState)
+ return;
+ resetCurrentLoop();
+ if (QMediaPlayer::InvalidMedia == m_session->status())
+ m_session->load(m_media, m_stream);
+
+ switch (m_session->status()) {
+ case QMediaPlayer::NoMedia:
+ case QMediaPlayer::InvalidMedia:
+ return;
+ case QMediaPlayer::LoadedMedia:
+ case QMediaPlayer::BufferingMedia:
+ case QMediaPlayer::BufferedMedia:
+ case QMediaPlayer::EndOfMedia:
+ changeState(QMediaPlayer::PlayingState);
+ m_session->start();
+ break;
+ default: //Loading/Stalled
+ changeState(QMediaPlayer::PlayingState);
+ break;
+ }
+ refreshState();
+}
+
+void MFPlayerControl::pause()
+{
+ if (m_state == QMediaPlayer::PausedState)
+ return;
+
+ if (m_session->status() == QMediaPlayer::NoMedia ||
+ m_session->status() == QMediaPlayer::InvalidMedia)
+ return;
+
+ changeState(QMediaPlayer::PausedState);
+ m_session->pause();
+ refreshState();
+}
+
+void MFPlayerControl::stop()
+{
+ if (m_state == QMediaPlayer::StoppedState)
+ return;
+ changeState(QMediaPlayer::StoppedState);
+ m_session->stop();
+ refreshState();
+}
+
+QMediaMetaData MFPlayerControl::metaData() const
+{
+ return m_session->metaData();
+}
+
+void MFPlayerControl::setAudioOutput(QPlatformAudioOutput *output)
+{
+ m_session->setAudioOutput(output);
+}
+
+void MFPlayerControl::setVideoSink(QVideoSink *sink)
+{
+ m_session->setVideoSink(sink);
+}
+
+void MFPlayerControl::changeState(QMediaPlayer::PlaybackState state)
+{
+ if (m_state == state)
+ return;
+ m_state = state;
+ m_stateDirty = true;
+}
+
+void MFPlayerControl::refreshState()
+{
+ if (!m_stateDirty)
+ return;
+ m_stateDirty = false;
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "MFPlayerControl::emit stateChanged" << m_state;
+#endif
+ stateChanged(m_state);
+}
+
+void MFPlayerControl::handleStatusChanged()
+{
+ QMediaPlayer::MediaStatus status = m_session->status();
+ switch (status) {
+ case QMediaPlayer::EndOfMedia:
+ if (doLoop()) {
+ setPosition(0);
+ m_session->start();
+ } else {
+ changeState(QMediaPlayer::StoppedState);
+ }
+ break;
+ case QMediaPlayer::InvalidMedia:
+ break;
+ case QMediaPlayer::LoadedMedia:
+ case QMediaPlayer::BufferingMedia:
+ case QMediaPlayer::BufferedMedia:
+ if (m_state == QMediaPlayer::PlayingState)
+ m_session->start();
+ break;
+ default:
+ break;
+ }
+ mediaStatusChanged(m_session->status());
+ refreshState();
+}
+
+void MFPlayerControl::handleTracksChanged()
+{
+ tracksChanged();
+}
+
+void MFPlayerControl::handleVideoAvailable()
+{
+ if (m_videoAvailable)
+ return;
+ m_videoAvailable = true;
+ videoAvailableChanged(m_videoAvailable);
+}
+
+void MFPlayerControl::handleAudioAvailable()
+{
+ if (m_audioAvailable)
+ return;
+ m_audioAvailable = true;
+ audioAvailableChanged(m_audioAvailable);
+}
+
+void MFPlayerControl::resetAudioVideoAvailable()
+{
+ bool videoDirty = false;
+ if (m_videoAvailable) {
+ m_videoAvailable = false;
+ videoDirty = true;
+ }
+ if (m_audioAvailable) {
+ m_audioAvailable = false;
+ audioAvailableChanged(m_audioAvailable);
+ }
+ if (videoDirty)
+ videoAvailableChanged(m_videoAvailable);
+}
+
+void MFPlayerControl::handleDurationUpdate(qint64 duration)
+{
+ if (m_duration == duration)
+ return;
+ m_duration = duration;
+ durationChanged(m_duration);
+}
+
+void MFPlayerControl::handleSeekableUpdate(bool seekable)
+{
+ if (m_seekable == seekable)
+ return;
+ m_seekable = seekable;
+ seekableChanged(m_seekable);
+}
+
+QMediaPlayer::PlaybackState MFPlayerControl::state() const
+{
+ return m_state;
+}
+
+QMediaPlayer::MediaStatus MFPlayerControl::mediaStatus() const
+{
+ return m_session->status();
+}
+
+qint64 MFPlayerControl::duration() const
+{
+ return m_duration;
+}
+
+qint64 MFPlayerControl::position() const
+{
+ return m_session->position();
+}
+
+void MFPlayerControl::setPosition(qint64 position)
+{
+ if (!m_seekable || position == m_session->position())
+ return;
+ m_session->setPosition(position);
+}
+
+float MFPlayerControl::bufferProgress() const
+{
+ return m_session->bufferProgress() / 100.;
+}
+
+bool MFPlayerControl::isAudioAvailable() const
+{
+ return m_audioAvailable;
+}
+
+bool MFPlayerControl::isVideoAvailable() const
+{
+ return m_videoAvailable;
+}
+
+bool MFPlayerControl::isSeekable() const
+{
+ return m_seekable;
+}
+
+QMediaTimeRange MFPlayerControl::availablePlaybackRanges() const
+{
+ return m_session->availablePlaybackRanges();
+}
+
+qreal MFPlayerControl::playbackRate() const
+{
+ return m_session->playbackRate();
+}
+
+void MFPlayerControl::setPlaybackRate(qreal rate)
+{
+ m_session->setPlaybackRate(rate);
+}
+
+QUrl MFPlayerControl::media() const
+{
+ return m_media;
+}
+
+const QIODevice* MFPlayerControl::mediaStream() const
+{
+ return m_stream;
+}
+
+void MFPlayerControl::handleError(QMediaPlayer::Error errorCode, const QString& errorString, bool isFatal)
+{
+ if (isFatal)
+ stop();
+ error(int(errorCode), errorString);
+}
+
+void MFPlayerControl::setActiveTrack(TrackType type, int index)
+{
+ m_session->setActiveTrack(type, index);
+}
+
+int MFPlayerControl::activeTrack(TrackType type)
+{
+ return m_session->activeTrack(type);
+}
+
+int MFPlayerControl::trackCount(TrackType type)
+{
+ return m_session->trackCount(type);
+}
+
+QMediaMetaData MFPlayerControl::trackMetaData(TrackType type, int trackNumber)
+{
+ return m_session->trackMetaData(type, trackNumber);
+}
+
diff --git a/src/plugins/multimedia/windows/player/mfplayercontrol_p.h b/src/plugins/multimedia/windows/player/mfplayercontrol_p.h
new file mode 100644
index 000000000..db863afaa
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfplayercontrol_p.h
@@ -0,0 +1,103 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFPLAYERCONTROL_H
+#define MFPLAYERCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qurl.h"
+#include "private/qplatformmediaplayer_p.h"
+
+#include <QtCore/qcoreevent.h>
+
+QT_BEGIN_NAMESPACE
+
+class MFPlayerSession;
+
+class MFPlayerControl : public QPlatformMediaPlayer
+{
+public:
+ MFPlayerControl(QMediaPlayer *player);
+ ~MFPlayerControl();
+
+ QMediaPlayer::PlaybackState state() const override;
+
+ QMediaPlayer::MediaStatus mediaStatus() const override;
+
+ qint64 duration() const override;
+
+ qint64 position() const override;
+ void setPosition(qint64 position) override;
+
+ float bufferProgress() const override;
+
+ bool isAudioAvailable() const override;
+ bool isVideoAvailable() const override;
+
+ bool isSeekable() const override;
+
+ QMediaTimeRange availablePlaybackRanges() const override;
+
+ qreal playbackRate() const override;
+ void setPlaybackRate(qreal rate) override;
+
+ QUrl media() const override;
+ const QIODevice *mediaStream() const override;
+ void setMedia(const QUrl &media, QIODevice *stream) override;
+
+ void play() override;
+ void pause() override;
+ void stop() override;
+
+ bool streamPlaybackSupported() const override { return true; }
+
+ QMediaMetaData metaData() const override;
+
+ void setAudioOutput(QPlatformAudioOutput *output) override;
+
+ void setVideoSink(QVideoSink *sink) override;
+
+ void setActiveTrack(TrackType type, int index) override;
+ int activeTrack(TrackType type) override;
+ int trackCount(TrackType type) override;
+ QMediaMetaData trackMetaData(TrackType type, int trackNumber) override;
+
+ void handleStatusChanged();
+ void handleTracksChanged();
+ void handleVideoAvailable();
+ void handleAudioAvailable();
+ void handleDurationUpdate(qint64 duration);
+ void handleSeekableUpdate(bool seekable);
+ void handleError(QMediaPlayer::Error errorCode, const QString& errorString, bool isFatal);
+
+private:
+ void changeState(QMediaPlayer::PlaybackState state);
+ void resetAudioVideoAvailable();
+ void refreshState();
+
+ QMediaPlayer::PlaybackState m_state;
+ bool m_stateDirty;
+
+ bool m_videoAvailable;
+ bool m_audioAvailable;
+ qint64 m_duration;
+ bool m_seekable;
+
+ QIODevice *m_stream;
+ QUrl m_media;
+ MFPlayerSession *m_session;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/player/mfplayersession.cpp b/src/plugins/multimedia/windows/player/mfplayersession.cpp
new file mode 100644
index 000000000..996ce35d8
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfplayersession.cpp
@@ -0,0 +1,1736 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "private/qplatformmediaplayer_p.h"
+
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qdatetime.h>
+#include <QtCore/qthread.h>
+#include <QtCore/qvarlengtharray.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qfile.h>
+#include <QtCore/qbuffer.h>
+
+#include "private/qplatformaudiooutput_p.h"
+#include "qaudiooutput.h"
+
+#include "mfplayercontrol_p.h"
+#include "mfvideorenderercontrol_p.h"
+#include <mfmetadata_p.h>
+#include <private/qwindowsmfdefs_p.h>
+#include <private/qwindowsaudioutils_p.h>
+
+#include "mfplayersession_p.h"
+#include <mferror.h>
+#include <nserror.h>
+#include <winerror.h>
+#include "sourceresolver_p.h"
+#include <wmcodecdsp.h>
+
+#include <mmdeviceapi.h>
+#include <propvarutil.h>
+#include <functiondiscoverykeys_devpkey.h>
+
+//#define DEBUG_MEDIAFOUNDATION
+
+QT_BEGIN_NAMESPACE
+
+MFPlayerSession::MFPlayerSession(MFPlayerControl *playerControl)
+ : m_cRef(1),
+ m_playerControl(playerControl),
+ m_scrubbing(false),
+ m_restoreRate(1),
+ m_closing(false),
+ m_mediaTypes(0),
+ m_pendingRate(1),
+ m_status(QMediaPlayer::NoMedia)
+
+{
+ connect(this, &MFPlayerSession::sessionEvent, this, &MFPlayerSession::handleSessionEvent);
+
+ m_signalPositionChangeTimer.setInterval(10);
+ m_signalPositionChangeTimer.setTimerType(Qt::PreciseTimer);
+ m_signalPositionChangeTimer.callOnTimeout(this, &MFPlayerSession::timeout);
+
+ m_pendingState = NoPending;
+ ZeroMemory(&m_state, sizeof(m_state));
+ m_state.command = CmdStop;
+ m_state.prevCmd = CmdNone;
+ m_state.rate = 1.0f;
+ ZeroMemory(&m_request, sizeof(m_request));
+ m_request.command = CmdNone;
+ m_request.prevCmd = CmdNone;
+ m_request.rate = 1.0f;
+
+ m_videoRendererControl = new MFVideoRendererControl(this);
+}
+
+void MFPlayerSession::timeout()
+{
+ const qint64 pos = position();
+
+ if (pos != m_lastPosition) {
+ const bool updatePos = m_timeCounter++ % 10 == 0;
+ if (pos >= qint64(m_duration / 10000 - 20)) {
+ if (m_playerControl->doLoop()) {
+ m_session->Pause();
+ setPosition(0);
+ positionChanged(0);
+ } else {
+ if (updatePos)
+ positionChanged(pos);
+ }
+ } else {
+ if (updatePos)
+ positionChanged(pos);
+ }
+ m_lastPosition = pos;
+ }
+}
+
+void MFPlayerSession::close()
+{
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "close";
+#endif
+
+ m_signalPositionChangeTimer.stop();
+ clear();
+ if (!m_session)
+ return;
+
+ HRESULT hr = S_OK;
+ if (m_session) {
+ m_closing = true;
+ hr = m_session->Close();
+ if (SUCCEEDED(hr)) {
+ DWORD dwWaitResult = WaitForSingleObject(m_hCloseEvent.get(), 2000);
+ if (dwWaitResult == WAIT_TIMEOUT) {
+ qWarning() << "session close time out!";
+ }
+ }
+ m_closing = false;
+ }
+
+ if (SUCCEEDED(hr)) {
+ if (m_session)
+ m_session->Shutdown();
+ if (m_sourceResolver)
+ m_sourceResolver->shutdown();
+ }
+ m_sourceResolver.Reset();
+
+ m_videoRendererControl->releaseActivate();
+// } else if (m_playerService->videoWindowControl()) {
+// m_playerService->videoWindowControl()->releaseActivate();
+// }
+
+ m_session.Reset();
+ m_hCloseEvent = {};
+ m_lastPosition = -1;
+ m_position = 0;
+}
+
+void MFPlayerSession::load(const QUrl &url, QIODevice *stream)
+{
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "load";
+#endif
+ clear();
+
+ if (m_status == QMediaPlayer::LoadingMedia && m_sourceResolver)
+ m_sourceResolver->cancel();
+
+ if (url.isEmpty() && !stream) {
+ close();
+ changeStatus(QMediaPlayer::NoMedia);
+ } else if (stream && (!stream->isReadable())) {
+ close();
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::ResourceError, tr("Invalid stream source."), true);
+ } else if (createSession()) {
+ changeStatus(QMediaPlayer::LoadingMedia);
+ m_sourceResolver->load(url, stream);
+ if (url.isLocalFile())
+ m_updateRoutingOnStart = true;
+ }
+ positionChanged(position());
+}
+
+void MFPlayerSession::handleSourceError(long hr)
+{
+ QString errorString;
+ QMediaPlayer::Error errorCode = QMediaPlayer::ResourceError;
+ switch (hr) {
+ case QMediaPlayer::FormatError:
+ errorCode = QMediaPlayer::FormatError;
+ errorString = tr("Attempting to play invalid Qt resource.");
+ break;
+ case NS_E_FILE_NOT_FOUND:
+ errorString = tr("The system cannot find the file specified.");
+ break;
+ case NS_E_SERVER_NOT_FOUND:
+ errorString = tr("The specified server could not be found.");
+ break;
+ case MF_E_UNSUPPORTED_BYTESTREAM_TYPE:
+ errorCode = QMediaPlayer::FormatError;
+ errorString = tr("Unsupported media type.");
+ break;
+ case MF_E_UNSUPPORTED_SCHEME:
+ errorCode = QMediaPlayer::ResourceError;
+ errorString = tr("Unsupported URL scheme.");
+ break;
+ case QMM_WININET_E_CANNOT_CONNECT:
+ errorCode = QMediaPlayer::NetworkError;
+ errorString = tr("Connection to server could not be established.");
+ break;
+ default:
+ qWarning() << "handleSourceError:"
+ << Qt::showbase << Qt::hex << Qt::uppercasedigits << static_cast<quint32>(hr);
+ errorString = tr("Failed to load source.");
+ break;
+ }
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(errorCode, errorString, true);
+}
+
+void MFPlayerSession::handleMediaSourceReady()
+{
+ if (QMediaPlayer::LoadingMedia != m_status || !m_sourceResolver
+ || m_sourceResolver.Get() != sender())
+ return;
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "handleMediaSourceReady";
+#endif
+ HRESULT hr = S_OK;
+ IMFMediaSource* mediaSource = m_sourceResolver->mediaSource();
+
+ DWORD dwCharacteristics = 0;
+ mediaSource->GetCharacteristics(&dwCharacteristics);
+ seekableUpdate(MFMEDIASOURCE_CAN_SEEK & dwCharacteristics);
+
+ ComPtr<IMFPresentationDescriptor> sourcePD;
+ hr = mediaSource->CreatePresentationDescriptor(&sourcePD);
+ if (SUCCEEDED(hr)) {
+ m_duration = 0;
+ m_metaData = MFMetaData::fromNative(mediaSource);
+ metaDataChanged();
+ sourcePD->GetUINT64(MF_PD_DURATION, &m_duration);
+ //convert from 100 nanosecond to milisecond
+ durationUpdate(qint64(m_duration / 10000));
+ setupPlaybackTopology(mediaSource, sourcePD.Get());
+ tracksChanged();
+ } else {
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::ResourceError, tr("Cannot create presentation descriptor."), true);
+ }
+}
+
+bool MFPlayerSession::getStreamInfo(IMFStreamDescriptor *stream,
+ MFPlayerSession::MediaType *type,
+ QString *name,
+ QString *language,
+ GUID *format) const
+{
+ if (!stream || !type || !name || !language || !format)
+ return false;
+
+ *type = Unknown;
+ *name = QString();
+ *language = QString();
+
+ ComPtr<IMFMediaTypeHandler> typeHandler;
+
+ if (SUCCEEDED(stream->GetMediaTypeHandler(&typeHandler))) {
+
+ UINT32 len = 0;
+ if (SUCCEEDED(stream->GetStringLength(QMM_MF_SD_STREAM_NAME, &len)) && len > 0) {
+ WCHAR *wstr = new WCHAR[len+1];
+ if (SUCCEEDED(stream->GetString(QMM_MF_SD_STREAM_NAME, wstr, len+1, &len))) {
+ *name = QString::fromUtf16(reinterpret_cast<const char16_t *>(wstr));
+ }
+ delete []wstr;
+ }
+ if (SUCCEEDED(stream->GetStringLength(QMM_MF_SD_LANGUAGE, &len)) && len > 0) {
+ WCHAR *wstr = new WCHAR[len+1];
+ if (SUCCEEDED(stream->GetString(QMM_MF_SD_LANGUAGE, wstr, len+1, &len))) {
+ *language = QString::fromUtf16(reinterpret_cast<const char16_t *>(wstr));
+ }
+ delete []wstr;
+ }
+
+ GUID guidMajorType;
+ if (SUCCEEDED(typeHandler->GetMajorType(&guidMajorType))) {
+ if (guidMajorType == MFMediaType_Audio)
+ *type = Audio;
+ else if (guidMajorType == MFMediaType_Video)
+ *type = Video;
+ }
+
+ ComPtr<IMFMediaType> mediaType;
+ if (SUCCEEDED(typeHandler->GetCurrentMediaType(&mediaType))) {
+ mediaType->GetGUID(MF_MT_SUBTYPE, format);
+ }
+ }
+
+ return *type != Unknown;
+}
+
+void MFPlayerSession::setupPlaybackTopology(IMFMediaSource *source, IMFPresentationDescriptor *sourcePD)
+{
+ HRESULT hr = S_OK;
+ // Get the number of streams in the media source.
+ DWORD cSourceStreams = 0;
+ hr = sourcePD->GetStreamDescriptorCount(&cSourceStreams);
+ if (FAILED(hr)) {
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::ResourceError, tr("Failed to get stream count."), true);
+ return;
+ }
+
+ ComPtr<IMFTopology> topology;
+ hr = MFCreateTopology(&topology);
+ if (FAILED(hr)) {
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::ResourceError, tr("Failed to create topology."), true);
+ return;
+ }
+
+ // For each stream, create the topology nodes and add them to the topology.
+ DWORD succeededCount = 0;
+ for (DWORD i = 0; i < cSourceStreams; i++) {
+ BOOL selected = FALSE;
+ bool streamAdded = false;
+ ComPtr<IMFStreamDescriptor> streamDesc;
+
+ HRESULT hr = sourcePD->GetStreamDescriptorByIndex(i, &selected, &streamDesc);
+ if (SUCCEEDED(hr)) {
+ // The media might have multiple audio and video streams,
+ // only use one of each kind, and only if it is selected by default.
+ MediaType mediaType = Unknown;
+ QString streamName;
+ QString streamLanguage;
+ GUID format = GUID_NULL;
+
+ if (getStreamInfo(streamDesc.Get(), &mediaType, &streamName, &streamLanguage,
+ &format)) {
+
+ QPlatformMediaPlayer::TrackType trackType = (mediaType == Audio) ?
+ QPlatformMediaPlayer::AudioStream : QPlatformMediaPlayer::VideoStream;
+
+ QLocale::Language lang = streamLanguage.isEmpty() ?
+ QLocale::Language::AnyLanguage : QLocale(streamLanguage).language();
+
+ QMediaMetaData metaData;
+ metaData.insert(QMediaMetaData::Title, streamName);
+ metaData.insert(QMediaMetaData::Language, lang);
+
+ m_trackInfo[trackType].metaData.append(metaData);
+ m_trackInfo[trackType].nativeIndexes.append(i);
+ m_trackInfo[trackType].format = format;
+
+ if (((m_mediaTypes & mediaType) == 0) && selected) { // Check if this type isn't already added
+ ComPtr<IMFTopologyNode> sourceNode =
+ addSourceNode(topology.Get(), source, sourcePD, streamDesc.Get());
+ if (sourceNode) {
+ ComPtr<IMFTopologyNode> outputNode =
+ addOutputNode(mediaType, topology.Get(), 0);
+ if (outputNode) {
+ sourceNode->GetTopoNodeID(&m_trackInfo[trackType].sourceNodeId);
+ outputNode->GetTopoNodeID(&m_trackInfo[trackType].outputNodeId);
+
+ hr = sourceNode->ConnectOutput(0, outputNode.Get(), 0);
+
+ if (FAILED(hr)) {
+ error(QMediaPlayer::FormatError, tr("Unable to play any stream."), false);
+ } else {
+ m_trackInfo[trackType].currentIndex = m_trackInfo[trackType].nativeIndexes.count() - 1;
+ streamAdded = true;
+ succeededCount++;
+ m_mediaTypes |= mediaType;
+ switch (mediaType) {
+ case Audio:
+ audioAvailable();
+ break;
+ case Video:
+ videoAvailable();
+ break;
+ default:
+ break;
+ }
+ }
+ } else {
+ // remove the source node if the output node cannot be created
+ topology->RemoveNode(sourceNode.Get());
+ }
+ }
+ }
+ }
+
+ if (selected && !streamAdded)
+ sourcePD->DeselectStream(i);
+ }
+ }
+
+ if (succeededCount == 0) {
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::ResourceError, tr("Unable to play."), true);
+ } else {
+ if (m_trackInfo[QPlatformMediaPlayer::VideoStream].outputNodeId != TOPOID(-1))
+ topology = insertMFT(topology, m_trackInfo[QPlatformMediaPlayer::VideoStream].outputNodeId);
+
+ hr = m_session->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE, topology.Get());
+ if (SUCCEEDED(hr)) {
+ m_updatingTopology = true;
+ } else {
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::ResourceError, tr("Failed to set topology."), true);
+ }
+ }
+}
+
+ComPtr<IMFTopologyNode> MFPlayerSession::addSourceNode(IMFTopology *topology,
+ IMFMediaSource *source,
+ IMFPresentationDescriptor *presentationDesc,
+ IMFStreamDescriptor *streamDesc)
+{
+ ComPtr<IMFTopologyNode> node;
+ HRESULT hr = MFCreateTopologyNode(MF_TOPOLOGY_SOURCESTREAM_NODE, &node);
+ if (SUCCEEDED(hr)) {
+ hr = node->SetUnknown(MF_TOPONODE_SOURCE, source);
+ if (SUCCEEDED(hr)) {
+ hr = node->SetUnknown(MF_TOPONODE_PRESENTATION_DESCRIPTOR, presentationDesc);
+ if (SUCCEEDED(hr)) {
+ hr = node->SetUnknown(MF_TOPONODE_STREAM_DESCRIPTOR, streamDesc);
+ if (SUCCEEDED(hr)) {
+ hr = topology->AddNode(node.Get());
+ if (SUCCEEDED(hr))
+ return node;
+ }
+ }
+ }
+ }
+ return NULL;
+}
+
+ComPtr<IMFTopologyNode> MFPlayerSession::addOutputNode(MediaType mediaType, IMFTopology *topology,
+ DWORD sinkID)
+{
+ ComPtr<IMFTopologyNode> node;
+ if (FAILED(MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, &node)))
+ return NULL;
+
+ ComPtr<IMFActivate> activate;
+ if (mediaType == Audio) {
+ if (m_audioOutput) {
+ auto id = m_audioOutput->device.id();
+ if (id.isEmpty()) {
+ qInfo() << "No audio output";
+ return NULL;
+ }
+
+ HRESULT hr = MFCreateAudioRendererActivate(&activate);
+ if (FAILED(hr)) {
+ qWarning() << "Failed to create audio renderer activate";
+ return NULL;
+ }
+
+ QString s = QString::fromUtf8(id);
+ hr = activate->SetString(MF_AUDIO_RENDERER_ATTRIBUTE_ENDPOINT_ID, (LPCWSTR)s.utf16());
+ if (FAILED(hr)) {
+ qWarning() << "Failed to set attribute for audio device"
+ << m_audioOutput->device.description();
+ return NULL;
+ }
+ }
+ } else if (mediaType == Video) {
+ activate = m_videoRendererControl->createActivate();
+
+ QSize resolution = m_metaData.value(QMediaMetaData::Resolution).toSize();
+
+ if (resolution.isValid())
+ m_videoRendererControl->setCropRect(QRect(QPoint(), resolution));
+
+ } else {
+ // Unknown stream type.
+ error(QMediaPlayer::FormatError, tr("Unknown stream type."), false);
+ }
+
+ if (!activate || FAILED(node->SetObject(activate.Get()))
+ || FAILED(node->SetUINT32(MF_TOPONODE_STREAMID, sinkID))
+ || FAILED(node->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE))
+ || FAILED(topology->AddNode(node.Get()))) {
+ node.Reset();
+ }
+
+ if (activate && mediaType == Audio)
+ activate.Reset();
+
+ return node;
+}
+
+// BindOutputNode
+// Sets the IMFStreamSink pointer on an output node.
+// IMFActivate pointer in the output node must be converted to an
+// IMFStreamSink pointer before the topology loader resolves the topology.
+HRESULT BindOutputNode(IMFTopologyNode *pNode)
+{
+ ComPtr<IUnknown> nodeObject;
+ ComPtr<IMFActivate> activate;
+ ComPtr<IMFStreamSink> stream;
+ ComPtr<IMFMediaSink> sink;
+
+ HRESULT hr = pNode->GetObject(&nodeObject);
+ if (FAILED(hr))
+ return hr;
+
+ hr = nodeObject->QueryInterface(IID_PPV_ARGS(&activate));
+ if (SUCCEEDED(hr)) {
+ DWORD dwStreamID = 0;
+
+ // Try to create the media sink.
+ hr = activate->ActivateObject(IID_PPV_ARGS(&sink));
+ if (SUCCEEDED(hr))
+ dwStreamID = MFGetAttributeUINT32(pNode, MF_TOPONODE_STREAMID, 0);
+
+ if (SUCCEEDED(hr)) {
+ // First check if the media sink already has a stream sink with the requested ID.
+ hr = sink->GetStreamSinkById(dwStreamID, &stream);
+ if (FAILED(hr)) {
+ // Create the stream sink.
+ hr = sink->AddStreamSink(dwStreamID, NULL, &stream);
+ }
+ }
+
+ // Replace the node's object pointer with the stream sink.
+ if (SUCCEEDED(hr)) {
+ hr = pNode->SetObject(stream.Get());
+ }
+ } else {
+ hr = nodeObject->QueryInterface(IID_PPV_ARGS(&stream));
+ }
+
+ return hr;
+}
+
+// BindOutputNodes
+// Sets the IMFStreamSink pointers on all of the output nodes in a topology.
+HRESULT BindOutputNodes(IMFTopology *pTopology)
+{
+ ComPtr<IMFCollection> collection;
+
+ // Get the collection of output nodes.
+ HRESULT hr = pTopology->GetOutputNodeCollection(&collection);
+
+ // Enumerate all of the nodes in the collection.
+ if (SUCCEEDED(hr)) {
+ DWORD cNodes;
+ hr = collection->GetElementCount(&cNodes);
+
+ if (SUCCEEDED(hr)) {
+ for (DWORD i = 0; i < cNodes; i++) {
+ ComPtr<IUnknown> element;
+ hr = collection->GetElement(i, &element);
+ if (FAILED(hr))
+ break;
+
+ ComPtr<IMFTopologyNode> node;
+ hr = element->QueryInterface(IID_IMFTopologyNode, &node);
+ if (FAILED(hr))
+ break;
+
+ // Bind this node.
+ hr = BindOutputNode(node.Get());
+ if (FAILED(hr))
+ break;
+ }
+ }
+ }
+
+ return hr;
+}
+
+// This method binds output nodes to complete the topology,
+// then loads the topology and inserts MFT between the output node
+// and a filter connected to the output node.
+ComPtr<IMFTopology> MFPlayerSession::insertMFT(const ComPtr<IMFTopology> &topology,
+ TOPOID outputNodeId)
+{
+ bool isNewTopology = false;
+
+ ComPtr<IMFTopoLoader> topoLoader;
+ ComPtr<IMFTopology> resolvedTopology;
+ ComPtr<IMFCollection> outputNodes;
+
+ do {
+ if (FAILED(BindOutputNodes(topology.Get())))
+ break;
+
+ if (FAILED(MFCreateTopoLoader(&topoLoader)))
+ break;
+
+ if (FAILED(topoLoader->Load(topology.Get(), &resolvedTopology, NULL))) {
+ // Topology could not be resolved, adding ourselves a color converter
+ // to the topology might solve the problem
+ insertColorConverter(topology.Get(), outputNodeId);
+ if (FAILED(topoLoader->Load(topology.Get(), &resolvedTopology, NULL)))
+ break;
+ }
+
+ if (insertResizer(resolvedTopology.Get()))
+ isNewTopology = true;
+ } while (false);
+
+ if (isNewTopology) {
+ return resolvedTopology;
+ }
+
+ return topology;
+}
+
+// This method checks if the topology contains a color converter transform (CColorConvertDMO),
+// if it does it inserts a resizer transform (CResizerDMO) to handle dynamic frame size change
+// of the video stream.
+// Returns true if it inserted a resizer
+bool MFPlayerSession::insertResizer(IMFTopology *topology)
+{
+ bool inserted = false;
+ WORD elementCount = 0;
+ ComPtr<IMFTopologyNode> node;
+ ComPtr<IUnknown> object;
+ ComPtr<IWMColorConvProps> colorConv;
+ ComPtr<IMFTransform> resizer;
+ ComPtr<IMFTopologyNode> resizerNode;
+ ComPtr<IMFTopologyNode> inputNode;
+
+ HRESULT hr = topology->GetNodeCount(&elementCount);
+ if (FAILED(hr))
+ return false;
+
+ for (WORD i = 0; i < elementCount; ++i) {
+ node.Reset();
+ object.Reset();
+
+ if (FAILED(topology->GetNode(i, &node)))
+ break;
+
+ MF_TOPOLOGY_TYPE nodeType;
+ if (FAILED(node->GetNodeType(&nodeType)))
+ break;
+
+ if (nodeType != MF_TOPOLOGY_TRANSFORM_NODE)
+ continue;
+
+ if (FAILED(node->GetObject(&object)))
+ break;
+
+ if (FAILED(object->QueryInterface(IID_PPV_ARGS(&colorConv))))
+ continue;
+
+ if (FAILED(CoCreateInstance(CLSID_CResizerDMO, NULL, CLSCTX_INPROC_SERVER, IID_IMFTransform,
+ &resizer)))
+ break;
+
+ if (FAILED(MFCreateTopologyNode(MF_TOPOLOGY_TRANSFORM_NODE, &resizerNode)))
+ break;
+
+ if (FAILED(resizerNode->SetObject(resizer.Get())))
+ break;
+
+ if (FAILED(topology->AddNode(resizerNode.Get())))
+ break;
+
+ DWORD outputIndex = 0;
+ if (FAILED(node->GetInput(0, &inputNode, &outputIndex))) {
+ topology->RemoveNode(resizerNode.Get());
+ break;
+ }
+
+ if (FAILED(inputNode->ConnectOutput(0, resizerNode.Get(), 0))) {
+ topology->RemoveNode(resizerNode.Get());
+ break;
+ }
+
+ if (FAILED(resizerNode->ConnectOutput(0, node.Get(), 0))) {
+ inputNode->ConnectOutput(0, node.Get(), 0);
+ topology->RemoveNode(resizerNode.Get());
+ break;
+ }
+
+ inserted = true;
+ break;
+ }
+
+ return inserted;
+}
+
+// This method inserts a color converter (CColorConvertDMO) in the topology,
+// typically to convert to RGB format.
+// Usually this converter is automatically inserted when the topology is resolved but
+// for some reason it fails to do so in some cases, we then do it ourselves.
+void MFPlayerSession::insertColorConverter(IMFTopology *topology, TOPOID outputNodeId)
+{
+ ComPtr<IMFCollection> outputNodes;
+
+ if (FAILED(topology->GetOutputNodeCollection(&outputNodes)))
+ return;
+
+ DWORD elementCount = 0;
+ if (FAILED(outputNodes->GetElementCount(&elementCount)))
+ return;
+
+ for (DWORD n = 0; n < elementCount; n++) {
+ ComPtr<IUnknown> element;
+ ComPtr<IMFTopologyNode> node;
+ ComPtr<IMFTopologyNode> inputNode;
+ ComPtr<IMFTopologyNode> mftNode;
+ ComPtr<IMFTransform> converter;
+
+ do {
+ if (FAILED(outputNodes->GetElement(n, &element)))
+ break;
+
+ if (FAILED(element->QueryInterface(IID_IMFTopologyNode, &node)))
+ break;
+
+ TOPOID id;
+ if (FAILED(node->GetTopoNodeID(&id)))
+ break;
+
+ if (id != outputNodeId)
+ break;
+
+ DWORD outputIndex = 0;
+ if (FAILED(node->GetInput(0, &inputNode, &outputIndex)))
+ break;
+
+ if (FAILED(MFCreateTopologyNode(MF_TOPOLOGY_TRANSFORM_NODE, &mftNode)))
+ break;
+
+ if (FAILED(CoCreateInstance(CLSID_CColorConvertDMO, NULL, CLSCTX_INPROC_SERVER,
+ IID_IMFTransform, &converter)))
+ break;
+
+ if (FAILED(mftNode->SetObject(converter.Get())))
+ break;
+
+ if (FAILED(topology->AddNode(mftNode.Get())))
+ break;
+
+ if (FAILED(inputNode->ConnectOutput(0, mftNode.Get(), 0)))
+ break;
+
+ if (FAILED(mftNode->ConnectOutput(0, node.Get(), 0)))
+ break;
+
+ } while (false);
+ }
+}
+
+void MFPlayerSession::stop(bool immediate)
+{
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "stop";
+#endif
+ if (!immediate && m_pendingState != NoPending) {
+ m_request.setCommand(CmdStop);
+ } else {
+ if (m_state.command == CmdStop)
+ return;
+
+ if (m_scrubbing)
+ scrub(false);
+
+ if (SUCCEEDED(m_session->Stop())) {
+
+ m_state.setCommand(CmdStop);
+ m_pendingState = CmdPending;
+ if (m_status != QMediaPlayer::EndOfMedia) {
+ m_position = 0;
+ positionChanged(0);
+ }
+ } else {
+ error(QMediaPlayer::ResourceError, tr("Failed to stop."), true);
+ }
+ }
+}
+
+void MFPlayerSession::start()
+{
+ if (status() == QMediaPlayer::LoadedMedia && m_updateRoutingOnStart) {
+ m_updateRoutingOnStart = false;
+ updateOutputRouting();
+ }
+
+ if (m_status == QMediaPlayer::EndOfMedia) {
+ m_position = 0; // restart from the beginning
+ positionChanged(0);
+ }
+
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "start";
+#endif
+
+ if (m_pendingState != NoPending) {
+ m_request.setCommand(CmdStart);
+ } else {
+ if (m_state.command == CmdStart)
+ return;
+
+ if (m_scrubbing) {
+ scrub(false);
+ m_position = position() * 10000;
+ }
+
+ if (m_restorePosition >= 0) {
+ m_position = m_restorePosition;
+ if (!m_updatingTopology)
+ m_restorePosition = -1;
+ }
+
+ PROPVARIANT varStart;
+ InitPropVariantFromInt64(m_position, &varStart);
+
+ if (SUCCEEDED(m_session->Start(&GUID_NULL, &varStart))) {
+ m_state.setCommand(CmdStart);
+ m_pendingState = CmdPending;
+ } else {
+ error(QMediaPlayer::ResourceError, tr("failed to start playback"), true);
+ }
+ PropVariantClear(&varStart);
+ }
+}
+
+void MFPlayerSession::pause()
+{
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "pause";
+#endif
+ if (m_pendingState != NoPending) {
+ m_request.setCommand(CmdPause);
+ } else {
+ if (m_state.command == CmdPause)
+ return;
+
+ if (SUCCEEDED(m_session->Pause())) {
+ m_state.setCommand(CmdPause);
+ m_pendingState = CmdPending;
+ } else {
+ error(QMediaPlayer::ResourceError, tr("Failed to pause."), false);
+ }
+ if (m_status == QMediaPlayer::EndOfMedia) {
+ setPosition(0);
+ positionChanged(0);
+ }
+ }
+}
+
+void MFPlayerSession::changeStatus(QMediaPlayer::MediaStatus newStatus)
+{
+ if (m_status == newStatus)
+ return;
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "MFPlayerSession::changeStatus" << newStatus;
+#endif
+ m_status = newStatus;
+ statusChanged();
+}
+
+QMediaPlayer::MediaStatus MFPlayerSession::status() const
+{
+ return m_status;
+}
+
+bool MFPlayerSession::createSession()
+{
+ close();
+
+ Q_ASSERT(m_session == NULL);
+
+ HRESULT hr = MFCreateMediaSession(NULL, &m_session);
+ if (FAILED(hr)) {
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::ResourceError, tr("Unable to create mediasession."), true);
+ return false;
+ }
+
+ m_hCloseEvent = EventHandle{ CreateEvent(NULL, FALSE, FALSE, NULL) };
+
+ hr = m_session->BeginGetEvent(this, m_session.Get());
+ if (FAILED(hr)) {
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::ResourceError, tr("Unable to pull session events."), false);
+ close();
+ return false;
+ }
+
+ m_sourceResolver = makeComObject<SourceResolver>();
+ QObject::connect(m_sourceResolver.Get(), &SourceResolver::mediaSourceReady, this,
+ &MFPlayerSession::handleMediaSourceReady);
+ QObject::connect(m_sourceResolver.Get(), &SourceResolver::error, this,
+ &MFPlayerSession::handleSourceError);
+
+ m_position = 0;
+ return true;
+}
+
+qint64 MFPlayerSession::position()
+{
+ if (m_request.command == CmdSeek || m_request.command == CmdSeekResume)
+ return m_request.start;
+
+ if (m_pendingState == SeekPending)
+ return m_state.start;
+
+ if (m_state.command == CmdStop)
+ return m_position / 10000;
+
+ if (m_presentationClock) {
+ MFTIME time, sysTime;
+ if (FAILED(m_presentationClock->GetCorrelatedTime(0, &time, &sysTime)))
+ return m_position / 10000;
+ return qint64(time / 10000);
+ }
+ return m_position / 10000;
+}
+
+void MFPlayerSession::setPosition(qint64 position)
+{
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "setPosition";
+#endif
+ if (m_pendingState != NoPending) {
+ m_request.setCommand(CmdSeek);
+ m_request.start = position;
+ } else {
+ setPositionInternal(position, CmdNone);
+ }
+}
+
+void MFPlayerSession::setPositionInternal(qint64 position, Command requestCmd)
+{
+ if (m_status == QMediaPlayer::EndOfMedia)
+ changeStatus(QMediaPlayer::LoadedMedia);
+ if (m_state.command == CmdStop && requestCmd != CmdSeekResume) {
+ m_position = position * 10000;
+ // Even though the position is not actually set on the session yet,
+ // report it to have changed anyway for UI controls to be updated
+ positionChanged(this->position());
+ return;
+ }
+
+ if (m_state.command == CmdPause)
+ scrub(true);
+
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "setPositionInternal";
+#endif
+
+ PROPVARIANT varStart;
+ varStart.vt = VT_I8;
+ varStart.hVal.QuadPart = LONGLONG(position * 10000);
+ if (SUCCEEDED(m_session->Start(NULL, &varStart))) {
+ PropVariantClear(&varStart);
+ // Store the pending state.
+ m_state.setCommand(CmdStart);
+ m_state.start = position;
+ m_pendingState = SeekPending;
+ } else {
+ error(QMediaPlayer::ResourceError, tr("Failed to seek."), true);
+ }
+}
+
+qreal MFPlayerSession::playbackRate() const
+{
+ if (m_scrubbing)
+ return m_restoreRate;
+ return m_state.rate;
+}
+
+void MFPlayerSession::setPlaybackRate(qreal rate)
+{
+ if (m_scrubbing) {
+ m_restoreRate = rate;
+ playbackRateChanged(rate);
+ return;
+ }
+ setPlaybackRateInternal(rate);
+}
+
+void MFPlayerSession::setPlaybackRateInternal(qreal rate)
+{
+ if (rate == m_request.rate)
+ return;
+
+ m_pendingRate = rate;
+ if (!m_rateSupport)
+ return;
+
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "setPlaybackRate";
+#endif
+ BOOL isThin = FALSE;
+
+ //from MSDN http://msdn.microsoft.com/en-us/library/aa965220%28v=vs.85%29.aspx
+ //Thinning applies primarily to video streams.
+ //In thinned mode, the source drops delta frames and deliver only key frames.
+ //At very high playback rates, the source might skip some key frames (for example, deliver every other key frame).
+
+ if (FAILED(m_rateSupport->IsRateSupported(FALSE, rate, NULL))) {
+ isThin = TRUE;
+ if (FAILED(m_rateSupport->IsRateSupported(isThin, rate, NULL))) {
+ qWarning() << "unable to set playbackrate = " << rate;
+ m_pendingRate = m_request.rate = m_state.rate;
+ return;
+ }
+ }
+ if (m_pendingState != NoPending) {
+ m_request.rate = rate;
+ m_request.isThin = isThin;
+ // Remember the current transport state (play, paused, etc), so that we
+ // can restore it after the rate change, if necessary. However, if
+ // anothercommand is already pending, that one takes precedent.
+ if (m_request.command == CmdNone)
+ m_request.setCommand(m_state.command);
+ } else {
+ //No pending operation. Commit the new rate.
+ commitRateChange(rate, isThin);
+ }
+}
+
+void MFPlayerSession::commitRateChange(qreal rate, BOOL isThin)
+{
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "commitRateChange";
+#endif
+ Q_ASSERT(m_pendingState == NoPending);
+ MFTIME hnsSystemTime = 0;
+ MFTIME hnsClockTime = 0;
+ Command cmdNow = m_state.command;
+ bool resetPosition = false;
+ // Allowed rate transitions:
+ // Positive <-> negative: Stopped
+ // Negative <-> zero: Stopped
+ // Postive <-> zero: Paused or stopped
+ if ((rate > 0 && m_state.rate <= 0) || (rate < 0 && m_state.rate >= 0)) {
+ if (cmdNow == CmdStart) {
+ // Get the current clock position. This will be the restart time.
+ m_presentationClock->GetCorrelatedTime(0, &hnsClockTime, &hnsSystemTime);
+ Q_ASSERT(hnsSystemTime != 0);
+
+ if (rate < 0 || m_state.rate < 0)
+ m_request.setCommand(CmdSeekResume);
+ else if (isThin || m_state.isThin)
+ m_request.setCommand(CmdStartAndSeek);
+ else
+ m_request.setCommand(CmdStart);
+
+ // We need to stop only when dealing with negative rates
+ if (rate >= 0 && m_state.rate >= 0)
+ pause();
+ else
+ stop();
+
+ // If we deal with negative rates, we stopped the session and consequently
+ // reset the position to zero. We then need to resume to the current position.
+ m_request.start = hnsClockTime / 10000;
+ } else if (cmdNow == CmdPause) {
+ if (rate < 0 || m_state.rate < 0) {
+ // The current state is paused.
+ // For this rate change, the session must be stopped. However, the
+ // session cannot transition back from stopped to paused.
+ // Therefore, this rate transition is not supported while paused.
+ qWarning() << "Unable to change rate from positive to negative or vice versa in paused state";
+ rate = m_state.rate;
+ isThin = m_state.isThin;
+ goto done;
+ }
+
+ // This happens when resuming playback after scrubbing in pause mode.
+ // This transition requires the session to be paused. Even though our
+ // internal state is set to paused, the session might not be so we need
+ // to enforce it
+ if (rate > 0 && m_state.rate == 0) {
+ m_state.setCommand(CmdNone);
+ pause();
+ }
+ }
+ } else if (rate == 0 && m_state.rate > 0) {
+ if (cmdNow != CmdPause) {
+ // Transition to paused.
+ // This transisition requires the paused state.
+ // Pause and set the rate.
+ pause();
+
+ // Request: Switch back to current state.
+ m_request.setCommand(cmdNow);
+ }
+ } else if (rate == 0 && m_state.rate < 0) {
+ // Changing rate from negative to zero requires to stop the session
+ m_presentationClock->GetCorrelatedTime(0, &hnsClockTime, &hnsSystemTime);
+
+ m_request.setCommand(CmdSeekResume);
+
+ stop();
+
+ // Resume to the current position (stop() will reset the position to 0)
+ m_request.start = hnsClockTime / 10000;
+ } else if (!isThin && m_state.isThin) {
+ if (cmdNow == CmdStart) {
+ // When thinning, only key frames are read and presented. Going back
+ // to normal playback requires to reset the current position to force
+ // the pipeline to decode the actual frame at the current position
+ // (which might be earlier than the last decoded key frame)
+ resetPosition = true;
+ } else if (cmdNow == CmdPause) {
+ // If paused, don't reset the position until we resume, otherwise
+ // a new frame will be rendered
+ m_presentationClock->GetCorrelatedTime(0, &hnsClockTime, &hnsSystemTime);
+ m_request.setCommand(CmdSeekResume);
+ m_request.start = hnsClockTime / 10000;
+ }
+
+ }
+
+ // Set the rate.
+ if (FAILED(m_rateControl->SetRate(isThin, rate))) {
+ qWarning() << "failed to set playbackrate = " << rate;
+ rate = m_state.rate;
+ isThin = m_state.isThin;
+ goto done;
+ }
+
+ if (resetPosition) {
+ m_presentationClock->GetCorrelatedTime(0, &hnsClockTime, &hnsSystemTime);
+ setPosition(hnsClockTime / 10000);
+ }
+
+done:
+ // Adjust our current rate and requested rate.
+ m_pendingRate = m_request.rate = m_state.rate = rate;
+ if (rate != 0)
+ m_state.isThin = isThin;
+ playbackRateChanged(rate);
+}
+
+void MFPlayerSession::scrub(bool enableScrub)
+{
+ if (m_scrubbing == enableScrub)
+ return;
+
+ m_scrubbing = enableScrub;
+
+ if (!canScrub()) {
+ if (!enableScrub)
+ m_pendingRate = m_restoreRate;
+ return;
+ }
+
+ if (enableScrub) {
+ // Enter scrubbing mode. Cache the rate.
+ m_restoreRate = m_request.rate;
+ setPlaybackRateInternal(0.0f);
+ } else {
+ // Leaving scrubbing mode. Restore the old rate.
+ setPlaybackRateInternal(m_restoreRate);
+ }
+}
+
+void MFPlayerSession::setVolume(float volume)
+{
+ if (m_volume == volume)
+ return;
+ m_volume = volume;
+
+ if (!m_muted)
+ setVolumeInternal(volume);
+}
+
+void MFPlayerSession::setMuted(bool muted)
+{
+ if (m_muted == muted)
+ return;
+ m_muted = muted;
+
+ setVolumeInternal(muted ? 0 : m_volume);
+}
+
+void MFPlayerSession::setVolumeInternal(float volume)
+{
+ if (m_volumeControl) {
+ quint32 channelCount = 0;
+ if (!SUCCEEDED(m_volumeControl->GetChannelCount(&channelCount))
+ || channelCount == 0)
+ return;
+
+ for (quint32 i = 0; i < channelCount; ++i)
+ m_volumeControl->SetChannelVolume(i, volume);
+ }
+}
+
+float MFPlayerSession::bufferProgress()
+{
+ if (!m_netsourceStatistics)
+ return 0;
+ PROPVARIANT var;
+ PropVariantInit(&var);
+ PROPERTYKEY key;
+ key.fmtid = MFNETSOURCE_STATISTICS;
+ key.pid = MFNETSOURCE_BUFFERPROGRESS_ID;
+ int progress = -1;
+ // GetValue returns S_FALSE if the property is not available, which has
+ // a value > 0. We therefore can't use the SUCCEEDED macro here.
+ if (m_netsourceStatistics->GetValue(key, &var) == S_OK) {
+ progress = var.lVal;
+ PropVariantClear(&var);
+ }
+
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "bufferProgress: progress = " << progress;
+#endif
+
+ return progress/100.;
+}
+
+QMediaTimeRange MFPlayerSession::availablePlaybackRanges()
+{
+ // defaults to the whole media
+ qint64 start = 0;
+ qint64 end = qint64(m_duration / 10000);
+
+ if (m_netsourceStatistics) {
+ PROPVARIANT var;
+ PropVariantInit(&var);
+ PROPERTYKEY key;
+ key.fmtid = MFNETSOURCE_STATISTICS;
+ key.pid = MFNETSOURCE_SEEKRANGESTART_ID;
+ // GetValue returns S_FALSE if the property is not available, which has
+ // a value > 0. We therefore can't use the SUCCEEDED macro here.
+ if (m_netsourceStatistics->GetValue(key, &var) == S_OK) {
+ start = qint64(var.uhVal.QuadPart / 10000);
+ PropVariantClear(&var);
+ PropVariantInit(&var);
+ key.pid = MFNETSOURCE_SEEKRANGEEND_ID;
+ if (m_netsourceStatistics->GetValue(key, &var) == S_OK) {
+ end = qint64(var.uhVal.QuadPart / 10000);
+ PropVariantClear(&var);
+ }
+ }
+ }
+
+ return QMediaTimeRange(start, end);
+}
+
+HRESULT MFPlayerSession::QueryInterface(REFIID riid, void** ppvObject)
+{
+ if (!ppvObject)
+ return E_POINTER;
+ if (riid == IID_IMFAsyncCallback) {
+ *ppvObject = static_cast<IMFAsyncCallback*>(this);
+ } else if (riid == IID_IUnknown) {
+ *ppvObject = static_cast<IUnknown*>(this);
+ } else {
+ *ppvObject = NULL;
+ return E_NOINTERFACE;
+ }
+ return S_OK;
+}
+
+ULONG MFPlayerSession::AddRef(void)
+{
+ return InterlockedIncrement(&m_cRef);
+}
+
+ULONG MFPlayerSession::Release(void)
+{
+ LONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0) {
+ deleteLater();
+
+ // In rare cases the session has queued events to be run between deleteLater and deleting,
+ // so we set the parent control to nullptr in order to prevent crashes in the cases.
+ m_playerControl = nullptr;
+ }
+ return cRef;
+}
+
+HRESULT MFPlayerSession::Invoke(IMFAsyncResult *pResult)
+{
+ if (pResult->GetStateNoAddRef() != m_session.Get())
+ return S_OK;
+
+ ComPtr<IMFMediaEvent> pEvent;
+ // Get the event from the event queue.
+ HRESULT hr = m_session->EndGetEvent(pResult, &pEvent);
+ if (FAILED(hr)) {
+ return S_OK;
+ }
+
+ MediaEventType meType = MEUnknown;
+ hr = pEvent->GetType(&meType);
+ if (FAILED(hr)) {
+ return S_OK;
+ }
+
+ if (meType == MESessionClosed) {
+ SetEvent(m_hCloseEvent.get());
+ return S_OK;
+ } else {
+ hr = m_session->BeginGetEvent(this, m_session.Get());
+ if (FAILED(hr)) {
+ return S_OK;
+ }
+ }
+
+ if (!m_closing) {
+ emit sessionEvent(pEvent);
+ }
+ return S_OK;
+}
+
+void MFPlayerSession::handleSessionEvent(const ComPtr<IMFMediaEvent> &sessionEvent)
+{
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = sessionEvent->GetStatus(&hrStatus);
+ if (FAILED(hr) || !m_session) {
+ return;
+ }
+
+ MediaEventType meType = MEUnknown;
+ hr = sessionEvent->GetType(&meType);
+#ifdef DEBUG_MEDIAFOUNDATION
+ if (FAILED(hrStatus))
+ qDebug() << "handleSessionEvent: MediaEventType = " << meType << "Failed";
+ else
+ qDebug() << "handleSessionEvent: MediaEventType = " << meType;
+#endif
+
+ switch (meType) {
+ case MENonFatalError: {
+ PROPVARIANT var;
+ PropVariantInit(&var);
+ sessionEvent->GetValue(&var);
+ qWarning() << "handleSessionEvent: non fatal error = " << var.ulVal;
+ PropVariantClear(&var);
+ error(QMediaPlayer::ResourceError, tr("Media session non-fatal error."), false);
+ }
+ break;
+ case MESourceUnknown:
+ changeStatus(QMediaPlayer::InvalidMedia);
+ break;
+ case MEError:
+ if (hrStatus == MF_E_ALREADY_INITIALIZED) {
+ // Workaround for a possible WMF issue that causes an error
+ // with some specific videos, which play fine otherwise.
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "handleSessionEvent: ignoring MF_E_ALREADY_INITIALIZED";
+#endif
+ break;
+ }
+ changeStatus(QMediaPlayer::InvalidMedia);
+ qWarning() << "handleSessionEvent: serious error = "
+ << Qt::showbase << Qt::hex << Qt::uppercasedigits << static_cast<quint32>(hrStatus);
+ switch (hrStatus) {
+ case MF_E_NET_READ:
+ error(QMediaPlayer::NetworkError, tr("Error reading from the network."), true);
+ break;
+ case MF_E_NET_WRITE:
+ error(QMediaPlayer::NetworkError, tr("Error writing to the network."), true);
+ break;
+ case NS_E_FIREWALL:
+ error(QMediaPlayer::NetworkError, tr("Network packets might be blocked by a firewall."), true);
+ break;
+ case MF_E_MEDIAPROC_WRONGSTATE:
+ error(QMediaPlayer::ResourceError, tr("Media session state error."), true);
+ break;
+ case MF_E_INVALID_STREAM_DATA:
+ error(QMediaPlayer::ResourceError, tr("Invalid stream data."), true);
+ break;
+ default:
+ error(QMediaPlayer::ResourceError, tr("Media session serious error."), true);
+ break;
+ }
+ break;
+ case MESessionRateChanged:
+ // If the rate change succeeded, we've already got the rate
+ // cached. If it failed, try to get the actual rate.
+ if (FAILED(hrStatus)) {
+ PROPVARIANT var;
+ PropVariantInit(&var);
+ if (SUCCEEDED(sessionEvent->GetValue(&var)) && (var.vt == VT_R4)) {
+ m_state.rate = var.fltVal;
+ }
+ playbackRateChanged(playbackRate());
+ }
+ break;
+ case MESessionScrubSampleComplete :
+ if (m_scrubbing)
+ updatePendingCommands(CmdStart);
+ break;
+ case MESessionStarted:
+ if (m_status == QMediaPlayer::EndOfMedia
+ || m_status == QMediaPlayer::LoadedMedia) {
+ // If the session started, then enough data is buffered to play
+ changeStatus(QMediaPlayer::BufferedMedia);
+ }
+
+ updatePendingCommands(CmdStart);
+ // playback started, we can now set again the procAmpValues if they have been
+ // changed previously (these are lost when loading a new media)
+// if (m_playerService->videoWindowControl()) {
+// m_playerService->videoWindowControl()->applyImageControls();
+// }
+ m_signalPositionChangeTimer.start();
+ break;
+ case MESessionStopped:
+ if (m_status != QMediaPlayer::EndOfMedia) {
+ m_position = 0;
+
+ // Reset to Loaded status unless we are loading a new media
+ // or changing the playback rate to negative values (stop required)
+ if (m_status != QMediaPlayer::LoadingMedia && m_request.command != CmdSeekResume)
+ changeStatus(QMediaPlayer::LoadedMedia);
+ }
+ updatePendingCommands(CmdStop);
+ m_signalPositionChangeTimer.stop();
+ break;
+ case MESessionPaused:
+ m_position = position() * 10000;
+ updatePendingCommands(CmdPause);
+ m_signalPositionChangeTimer.stop();
+ if (m_status == QMediaPlayer::LoadedMedia)
+ setPosition(position());
+ break;
+ case MEReconnectStart:
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "MEReconnectStart" << ((hrStatus == S_OK) ? "OK" : "Failed");
+#endif
+ break;
+ case MEReconnectEnd:
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "MEReconnectEnd" << ((hrStatus == S_OK) ? "OK" : "Failed");
+#endif
+ break;
+ case MESessionTopologySet:
+ if (FAILED(hrStatus)) {
+ changeStatus(QMediaPlayer::InvalidMedia);
+ error(QMediaPlayer::FormatError, tr("Unsupported media, a codec is missing."), true);
+ } else {
+ // Topology is resolved and successfuly set, this happens only after loading a new media.
+ // Make sure we always start the media from the beginning
+ m_lastPosition = -1;
+ m_position = 0;
+ positionChanged(0);
+ changeStatus(QMediaPlayer::LoadedMedia);
+ }
+ break;
+ }
+
+ if (FAILED(hrStatus)) {
+ return;
+ }
+
+ switch (meType) {
+ case MEBufferingStarted:
+ changeStatus(QMediaPlayer::StalledMedia);
+ bufferProgressChanged(bufferProgress());
+ break;
+ case MEBufferingStopped:
+ changeStatus(QMediaPlayer::BufferedMedia);
+ bufferProgressChanged(bufferProgress());
+ break;
+ case MESessionEnded:
+ m_pendingState = NoPending;
+ m_state.command = CmdStop;
+ m_state.prevCmd = CmdNone;
+ m_request.command = CmdNone;
+ m_request.prevCmd = CmdNone;
+
+ //keep reporting the final position after end of media
+ m_position = qint64(m_duration);
+ positionChanged(position());
+
+ changeStatus(QMediaPlayer::EndOfMedia);
+ break;
+ case MEEndOfPresentationSegment:
+ break;
+ case MESessionTopologyStatus: {
+ UINT32 status;
+ if (SUCCEEDED(sessionEvent->GetUINT32(MF_EVENT_TOPOLOGY_STATUS, &status))) {
+ if (status == MF_TOPOSTATUS_READY) {
+ ComPtr<IMFClock> clock;
+ if (SUCCEEDED(m_session->GetClock(&clock))) {
+ clock->QueryInterface(IID_IMFPresentationClock, &m_presentationClock);
+ }
+
+ if (SUCCEEDED(MFGetService(m_session.Get(), MF_RATE_CONTROL_SERVICE,
+ IID_PPV_ARGS(&m_rateControl)))) {
+ if (SUCCEEDED(MFGetService(m_session.Get(), MF_RATE_CONTROL_SERVICE,
+ IID_PPV_ARGS(&m_rateSupport)))) {
+ if (SUCCEEDED(m_rateSupport->IsRateSupported(TRUE, 0, NULL)))
+ m_canScrub = true;
+ }
+ BOOL isThin = FALSE;
+ float rate = 1;
+ if (SUCCEEDED(m_rateControl->GetRate(&isThin, &rate))) {
+ if (m_pendingRate != rate) {
+ m_state.rate = m_request.rate = rate;
+ setPlaybackRate(m_pendingRate);
+ }
+ }
+ }
+ MFGetService(m_session.Get(), MFNETSOURCE_STATISTICS_SERVICE,
+ IID_PPV_ARGS(&m_netsourceStatistics));
+
+ if (SUCCEEDED(MFGetService(m_session.Get(), MR_STREAM_VOLUME_SERVICE,
+ IID_PPV_ARGS(&m_volumeControl))))
+ setVolumeInternal(m_muted ? 0 : m_volume);
+
+ m_updatingTopology = false;
+ stop();
+ }
+ }
+ }
+ break;
+ default:
+ break;
+ }
+}
+
+void MFPlayerSession::updatePendingCommands(Command command)
+{
+ positionChanged(position());
+ if (m_state.command != command || m_pendingState == NoPending)
+ return;
+
+ // Seek while paused completed
+ if (m_pendingState == SeekPending && m_state.prevCmd == CmdPause) {
+ m_pendingState = NoPending;
+ // A seek operation actually restarts playback. If scrubbing is possible, playback rate
+ // is set to 0.0 at this point and we just need to reset the current state to Pause.
+ // If scrubbing is not possible, the playback rate was not changed and we explicitly need
+ // to re-pause playback.
+ if (!canScrub())
+ pause();
+ else
+ m_state.setCommand(CmdPause);
+ }
+
+ m_pendingState = NoPending;
+
+ //First look for rate changes.
+ if (m_request.rate != m_state.rate) {
+ commitRateChange(m_request.rate, m_request.isThin);
+ }
+
+ // Now look for new requests.
+ if (m_pendingState == NoPending) {
+ switch (m_request.command) {
+ case CmdStart:
+ start();
+ break;
+ case CmdPause:
+ pause();
+ break;
+ case CmdStop:
+ stop();
+ break;
+ case CmdSeek:
+ case CmdSeekResume:
+ setPositionInternal(m_request.start, m_request.command);
+ break;
+ case CmdStartAndSeek:
+ start();
+ setPositionInternal(m_request.start, m_request.command);
+ break;
+ default:
+ break;
+ }
+ m_request.setCommand(CmdNone);
+ }
+
+}
+
+bool MFPlayerSession::canScrub() const
+{
+ return m_canScrub && m_rateSupport && m_rateControl;
+}
+
+void MFPlayerSession::clear()
+{
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "MFPlayerSession::clear";
+#endif
+ m_mediaTypes = 0;
+ m_canScrub = false;
+
+ m_pendingState = NoPending;
+ m_state.command = CmdStop;
+ m_state.prevCmd = CmdNone;
+ m_request.command = CmdNone;
+ m_request.prevCmd = CmdNone;
+
+ for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i) {
+ m_trackInfo[i].metaData.clear();
+ m_trackInfo[i].nativeIndexes.clear();
+ m_trackInfo[i].currentIndex = -1;
+ m_trackInfo[i].sourceNodeId = TOPOID(-1);
+ m_trackInfo[i].outputNodeId = TOPOID(-1);
+ m_trackInfo[i].format = GUID_NULL;
+ }
+
+ if (!m_metaData.isEmpty()) {
+ m_metaData.clear();
+ metaDataChanged();
+ }
+
+ m_presentationClock.Reset();
+ m_rateControl.Reset();
+ m_rateSupport.Reset();
+ m_volumeControl.Reset();
+ m_netsourceStatistics.Reset();
+}
+
+void MFPlayerSession::setAudioOutput(QPlatformAudioOutput *device)
+{
+ if (m_audioOutput == device)
+ return;
+
+ if (m_audioOutput)
+ m_audioOutput->q->disconnect(this);
+
+ m_audioOutput = device;
+ if (m_audioOutput) {
+ setMuted(m_audioOutput->q->isMuted());
+ setVolume(m_audioOutput->q->volume());
+ updateOutputRouting();
+ connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this, &MFPlayerSession::updateOutputRouting);
+ connect(m_audioOutput->q, &QAudioOutput::volumeChanged, this, &MFPlayerSession::setVolume);
+ connect(m_audioOutput->q, &QAudioOutput::mutedChanged, this, &MFPlayerSession::setMuted);
+ }
+}
+
+void MFPlayerSession::updateOutputRouting()
+{
+ int currentAudioTrack = m_trackInfo[QPlatformMediaPlayer::AudioStream].currentIndex;
+ if (currentAudioTrack > -1)
+ setActiveTrack(QPlatformMediaPlayer::AudioStream, currentAudioTrack);
+}
+
+void MFPlayerSession::setVideoSink(QVideoSink *sink)
+{
+ m_videoRendererControl->setSink(sink);
+}
+
+void MFPlayerSession::setActiveTrack(QPlatformMediaPlayer::TrackType type, int index)
+{
+ if (!m_session)
+ return;
+
+ // Only audio track selection is currently supported.
+ if (type != QPlatformMediaPlayer::AudioStream)
+ return;
+
+ const auto &nativeIndexes = m_trackInfo[type].nativeIndexes;
+
+ if (index < -1 || index >= nativeIndexes.count())
+ return;
+
+ // Updating the topology fails if there is a HEVC video stream,
+ // which causes other issues. Ignoring the change, for now.
+ if (m_trackInfo[QPlatformMediaPlayer::VideoStream].format == MFVideoFormat_HEVC)
+ return;
+
+ ComPtr<IMFTopology> topology;
+
+ if (SUCCEEDED(m_session->GetFullTopology(QMM_MFSESSION_GETFULLTOPOLOGY_CURRENT, 0, &topology))) {
+
+ m_restorePosition = position() * 10000;
+
+ if (m_state.command == CmdStart)
+ stop();
+
+ if (m_trackInfo[type].outputNodeId != TOPOID(-1)) {
+ ComPtr<IMFTopologyNode> node;
+ if (SUCCEEDED(topology->GetNodeByID(m_trackInfo[type].outputNodeId, &node))) {
+ topology->RemoveNode(node.Get());
+ m_trackInfo[type].outputNodeId = TOPOID(-1);
+ }
+ }
+ if (m_trackInfo[type].sourceNodeId != TOPOID(-1)) {
+ ComPtr<IMFTopologyNode> node;
+ if (SUCCEEDED(topology->GetNodeByID(m_trackInfo[type].sourceNodeId, &node))) {
+ topology->RemoveNode(node.Get());
+ m_trackInfo[type].sourceNodeId = TOPOID(-1);
+ }
+ }
+
+ IMFMediaSource *mediaSource = m_sourceResolver->mediaSource();
+
+ ComPtr<IMFPresentationDescriptor> sourcePD;
+ if (SUCCEEDED(mediaSource->CreatePresentationDescriptor(&sourcePD))) {
+
+ if (m_trackInfo[type].currentIndex >= 0 && m_trackInfo[type].currentIndex < nativeIndexes.count())
+ sourcePD->DeselectStream(nativeIndexes.at(m_trackInfo[type].currentIndex));
+
+ m_trackInfo[type].currentIndex = index;
+
+ if (index == -1) {
+ m_session->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE, topology.Get());
+ } else {
+ int nativeIndex = nativeIndexes.at(index);
+ sourcePD->SelectStream(nativeIndex);
+
+ ComPtr<IMFStreamDescriptor> streamDesc;
+ BOOL selected = FALSE;
+
+ if (SUCCEEDED(sourcePD->GetStreamDescriptorByIndex(nativeIndex, &selected, &streamDesc))) {
+ ComPtr<IMFTopologyNode> sourceNode = addSourceNode(
+ topology.Get(), mediaSource, sourcePD.Get(), streamDesc.Get());
+ if (sourceNode) {
+ ComPtr<IMFTopologyNode> outputNode =
+ addOutputNode(MFPlayerSession::Audio, topology.Get(), 0);
+ if (outputNode) {
+ if (SUCCEEDED(sourceNode->ConnectOutput(0, outputNode.Get(), 0))) {
+ sourceNode->GetTopoNodeID(&m_trackInfo[type].sourceNodeId);
+ outputNode->GetTopoNodeID(&m_trackInfo[type].outputNodeId);
+ m_session->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE,
+ topology.Get());
+ }
+ }
+ }
+ }
+ }
+ m_updatingTopology = true;
+ }
+ }
+}
+
+int MFPlayerSession::activeTrack(QPlatformMediaPlayer::TrackType type)
+{
+ if (type >= QPlatformMediaPlayer::NTrackTypes)
+ return -1;
+ return m_trackInfo[type].currentIndex;
+}
+
+int MFPlayerSession::trackCount(QPlatformMediaPlayer::TrackType type)
+{
+ if (type >= QPlatformMediaPlayer::NTrackTypes)
+ return -1;
+ return m_trackInfo[type].metaData.count();
+}
+
+QMediaMetaData MFPlayerSession::trackMetaData(QPlatformMediaPlayer::TrackType type, int trackNumber)
+{
+ if (type >= QPlatformMediaPlayer::NTrackTypes)
+ return {};
+
+ if (trackNumber < 0 || trackNumber >= m_trackInfo[type].metaData.count())
+ return {};
+
+ return m_trackInfo[type].metaData.at(trackNumber);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_mfplayersession_p.cpp"
diff --git a/src/plugins/multimedia/windows/player/mfplayersession_p.h b/src/plugins/multimedia/windows/player/mfplayersession_p.h
new file mode 100644
index 000000000..50141a7fb
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfplayersession_p.h
@@ -0,0 +1,240 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFPLAYERSESSION_H
+#define MFPLAYERSESSION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <mfapi.h>
+#include <mfidl.h>
+
+#include "qmediaplayer.h"
+#include "qmediatimerange.h"
+
+#include <QtCore/qcoreevent.h>
+#include <QtCore/qmutex.h>
+#include <QtCore/qurl.h>
+#include <QtCore/qwaitcondition.h>
+#include <QtMultimedia/qaudioformat.h>
+#include <QtMultimedia/qvideoframeformat.h>
+#include <qaudiodevice.h>
+#include <qtimer.h>
+#include "mfplayercontrol_p.h"
+#include <private/qcomptr_p.h>
+#include <evrhelpers_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QUrl;
+
+class SourceResolver;
+class MFVideoRendererControl;
+class MFPlayerControl;
+class MFPlayerService;
+class AudioSampleGrabberCallback;
+class MFTransform;
+
+class MFPlayerSession : public QObject, public IMFAsyncCallback
+{
+ Q_OBJECT
+ friend class SourceResolver;
+public:
+ MFPlayerSession(MFPlayerControl *playerControl = 0);
+
+ STDMETHODIMP QueryInterface(REFIID riid, LPVOID *ppvObject) override;
+
+ STDMETHODIMP_(ULONG) AddRef(void) override;
+
+ STDMETHODIMP_(ULONG) Release(void) override;
+
+ STDMETHODIMP Invoke(IMFAsyncResult *pResult) override;
+
+ STDMETHODIMP GetParameters(DWORD *pdwFlags, DWORD *pdwQueue) override
+ {
+ Q_UNUSED(pdwFlags);
+ Q_UNUSED(pdwQueue);
+ return E_NOTIMPL;
+ }
+
+ void load(const QUrl &media, QIODevice *stream);
+ void stop(bool immediate = false);
+ void start();
+ void pause();
+
+ QMediaPlayer::MediaStatus status() const;
+ qint64 position();
+ void setPosition(qint64 position);
+ qreal playbackRate() const;
+ void setPlaybackRate(qreal rate);
+ float bufferProgress();
+ QMediaTimeRange availablePlaybackRanges();
+
+ void changeStatus(QMediaPlayer::MediaStatus newStatus);
+
+ void close();
+
+ void setAudioOutput(QPlatformAudioOutput *device);
+
+ QMediaMetaData metaData() const { return m_metaData; }
+
+ void setVideoSink(QVideoSink *sink);
+
+ void setActiveTrack(QPlatformMediaPlayer::TrackType type, int index);
+ int activeTrack(QPlatformMediaPlayer::TrackType type);
+ int trackCount(QPlatformMediaPlayer::TrackType);
+ QMediaMetaData trackMetaData(QPlatformMediaPlayer::TrackType type, int trackNumber);
+
+ void setPlayerControl(MFPlayerControl *playerControl) { m_playerControl = playerControl; }
+
+ void statusChanged() { if (m_playerControl) m_playerControl->handleStatusChanged(); }
+ void tracksChanged() { if (m_playerControl) m_playerControl->handleTracksChanged(); }
+ void audioAvailable() { if (m_playerControl) m_playerControl->handleAudioAvailable(); }
+ void videoAvailable() { if (m_playerControl) m_playerControl->handleVideoAvailable(); }
+ void durationUpdate(qint64 duration) { if (m_playerControl) m_playerControl->handleDurationUpdate(duration); }
+ void seekableUpdate(bool seekable) { if (m_playerControl) m_playerControl->handleSeekableUpdate(seekable); }
+ void error(QMediaPlayer::Error error, QString errorString, bool isFatal) { if (m_playerControl) m_playerControl->handleError(error, errorString, isFatal); }
+ void playbackRateChanged(qreal rate) { if (m_playerControl) m_playerControl->playbackRateChanged(rate); }
+ void bufferProgressChanged(float percentFilled) { if (m_playerControl) m_playerControl->bufferProgressChanged(percentFilled); }
+ void metaDataChanged() { if (m_playerControl) m_playerControl->metaDataChanged(); }
+ void positionChanged(qint64 position) { if (m_playerControl) m_playerControl->positionChanged(position); }
+
+public Q_SLOTS:
+ void setVolume(float volume);
+ void setMuted(bool muted);
+ void updateOutputRouting();
+
+Q_SIGNALS:
+ void sessionEvent(const ComPtr<IMFMediaEvent> &sessionEvent);
+
+private Q_SLOTS:
+ void handleMediaSourceReady();
+ void handleSessionEvent(const ComPtr<IMFMediaEvent> &sessionEvent);
+ void handleSourceError(long hr);
+ void timeout();
+
+private:
+ long m_cRef;
+ MFPlayerControl *m_playerControl = nullptr;
+ MFVideoRendererControl *m_videoRendererControl = nullptr;
+ ComPtr<IMFMediaSession> m_session;
+ ComPtr<IMFPresentationClock> m_presentationClock;
+ ComPtr<IMFRateControl> m_rateControl;
+ ComPtr<IMFRateSupport> m_rateSupport;
+ ComPtr<IMFAudioStreamVolume> m_volumeControl;
+ ComPtr<IPropertyStore> m_netsourceStatistics;
+ qint64 m_position = 0;
+ qint64 m_restorePosition = -1;
+ qint64 m_timeCounter = 0;
+ UINT64 m_duration = 0;
+ bool m_updatingTopology = false;
+ bool m_updateRoutingOnStart = false;
+
+ enum Command
+ {
+ CmdNone = 0,
+ CmdStop,
+ CmdStart,
+ CmdPause,
+ CmdSeek,
+ CmdSeekResume,
+ CmdStartAndSeek
+ };
+
+ void clear();
+ void setPositionInternal(qint64 position, Command requestCmd);
+ void setPlaybackRateInternal(qreal rate);
+ void commitRateChange(qreal rate, BOOL isThin);
+ bool canScrub() const;
+ void scrub(bool enableScrub);
+ bool m_scrubbing;
+ float m_restoreRate;
+
+ ComPtr<SourceResolver> m_sourceResolver;
+ EventHandle m_hCloseEvent;
+ bool m_closing;
+
+ enum MediaType
+ {
+ Unknown = 0,
+ Audio = 1,
+ Video = 2,
+ };
+ DWORD m_mediaTypes;
+
+ enum PendingState
+ {
+ NoPending = 0,
+ CmdPending,
+ SeekPending,
+ };
+
+ struct SeekState
+ {
+ void setCommand(Command cmd) {
+ prevCmd = command;
+ command = cmd;
+ }
+ Command command;
+ Command prevCmd;
+ float rate; // Playback rate
+ BOOL isThin; // Thinned playback?
+ qint64 start; // Start position
+ };
+ SeekState m_state; // Current nominal state.
+ SeekState m_request; // Pending request.
+ PendingState m_pendingState;
+ float m_pendingRate;
+ void updatePendingCommands(Command command);
+
+ struct TrackInfo
+ {
+ QList<QMediaMetaData> metaData;
+ QList<int> nativeIndexes;
+ int currentIndex = -1;
+ TOPOID sourceNodeId = -1;
+ TOPOID outputNodeId = -1;
+ GUID format = GUID_NULL;
+ };
+ TrackInfo m_trackInfo[QPlatformMediaPlayer::NTrackTypes];
+
+ QMediaPlayer::MediaStatus m_status;
+ bool m_canScrub;
+ float m_volume = 1.;
+ bool m_muted = false;
+
+ QPlatformAudioOutput *m_audioOutput = nullptr;
+ QMediaMetaData m_metaData;
+
+ void setVolumeInternal(float volume);
+
+ bool createSession();
+ void setupPlaybackTopology(IMFMediaSource *source, IMFPresentationDescriptor *sourcePD);
+ bool getStreamInfo(IMFStreamDescriptor *stream, MFPlayerSession::MediaType *type, QString *name, QString *language, GUID *format) const;
+ ComPtr<IMFTopologyNode> addSourceNode(IMFTopology *topology, IMFMediaSource *source,
+ IMFPresentationDescriptor *presentationDesc,
+ IMFStreamDescriptor *streamDesc);
+ ComPtr<IMFTopologyNode> addOutputNode(MediaType mediaType, IMFTopology *topology, DWORD sinkID);
+
+ QAudioFormat audioFormatForMFMediaType(IMFMediaType *mediaType) const;
+
+ ComPtr<IMFTopology> insertMFT(const ComPtr<IMFTopology> &topology, TOPOID outputNodeId);
+ bool insertResizer(IMFTopology *topology);
+ void insertColorConverter(IMFTopology *topology, TOPOID outputNodeId);
+
+ QTimer m_signalPositionChangeTimer;
+ qint64 m_lastPosition = -1;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/player/mfvideorenderercontrol.cpp b/src/plugins/multimedia/windows/player/mfvideorenderercontrol.cpp
new file mode 100644
index 000000000..7c79c3a8a
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfvideorenderercontrol.cpp
@@ -0,0 +1,152 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "mfvideorenderercontrol_p.h"
+#include "mfactivate_p.h"
+
+#include "evrcustompresenter_p.h"
+
+#include <private/qplatformvideosink_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class EVRCustomPresenterActivate : public MFAbstractActivate
+{
+public:
+ EVRCustomPresenterActivate(QVideoSink *sink);
+
+ STDMETHODIMP ActivateObject(REFIID riid, void **ppv) override;
+ STDMETHODIMP ShutdownObject() override;
+ STDMETHODIMP DetachObject() override;
+
+ void setSink(QVideoSink *sink);
+ void setCropRect(QRect cropRect);
+
+private:
+ // Destructor is not public. Caller should call Release.
+ ~EVRCustomPresenterActivate() override { }
+
+ EVRCustomPresenter *m_presenter;
+ QVideoSink *m_videoSink;
+ QRect m_cropRect;
+ QMutex m_mutex;
+};
+
+
+MFVideoRendererControl::MFVideoRendererControl(QObject *parent)
+ : QObject(parent)
+{
+}
+
+MFVideoRendererControl::~MFVideoRendererControl()
+{
+ releaseActivate();
+}
+
+void MFVideoRendererControl::releaseActivate()
+{
+ if (m_sink)
+ m_sink->platformVideoSink()->setVideoFrame(QVideoFrame());
+
+ if (m_presenterActivate) {
+ m_presenterActivate->ShutdownObject();
+ m_presenterActivate->Release();
+ m_presenterActivate = NULL;
+ }
+
+ if (m_currentActivate) {
+ m_currentActivate->ShutdownObject();
+ m_currentActivate->Release();
+ }
+ m_currentActivate = NULL;
+}
+
+void MFVideoRendererControl::setSink(QVideoSink *sink)
+{
+ m_sink = sink;
+
+ if (m_presenterActivate)
+ m_presenterActivate->setSink(m_sink);
+}
+
+void MFVideoRendererControl::setCropRect(const QRect &cropRect)
+{
+ if (m_presenterActivate)
+ m_presenterActivate->setCropRect(cropRect);
+}
+
+IMFActivate* MFVideoRendererControl::createActivate()
+{
+ releaseActivate();
+
+ if (m_sink) {
+ // Create the EVR media sink, but replace the presenter with our own
+ if (SUCCEEDED(MFCreateVideoRendererActivate(::GetShellWindow(), &m_currentActivate))) {
+ m_presenterActivate = new EVRCustomPresenterActivate(m_sink);
+ m_currentActivate->SetUnknown(MF_ACTIVATE_CUSTOM_VIDEO_PRESENTER_ACTIVATE, m_presenterActivate);
+ }
+ }
+
+ return m_currentActivate;
+}
+
+EVRCustomPresenterActivate::EVRCustomPresenterActivate(QVideoSink *sink)
+ : MFAbstractActivate()
+ , m_presenter(0)
+ , m_videoSink(sink)
+{ }
+
+HRESULT EVRCustomPresenterActivate::ActivateObject(REFIID riid, void **ppv)
+{
+ if (!ppv)
+ return E_INVALIDARG;
+ QMutexLocker locker(&m_mutex);
+ if (!m_presenter) {
+ m_presenter = new EVRCustomPresenter(m_videoSink);
+ m_presenter->setCropRect(m_cropRect);
+ }
+ return m_presenter->QueryInterface(riid, ppv);
+}
+
+HRESULT EVRCustomPresenterActivate::ShutdownObject()
+{
+ // The presenter does not implement IMFShutdown so
+ // this function is the same as DetachObject()
+ return DetachObject();
+}
+
+HRESULT EVRCustomPresenterActivate::DetachObject()
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_presenter) {
+ m_presenter->Release();
+ m_presenter = 0;
+ }
+ return S_OK;
+}
+
+void EVRCustomPresenterActivate::setSink(QVideoSink *sink)
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_videoSink == sink)
+ return;
+
+ m_videoSink = sink;
+
+ if (m_presenter)
+ m_presenter->setSink(sink);
+}
+
+void EVRCustomPresenterActivate::setCropRect(QRect cropRect)
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_cropRect == cropRect)
+ return;
+
+ m_cropRect = cropRect;
+
+ if (m_presenter)
+ m_presenter->setCropRect(cropRect);
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/windows/player/mfvideorenderercontrol_p.h b/src/plugins/multimedia/windows/player/mfvideorenderercontrol_p.h
new file mode 100644
index 000000000..ed5195240
--- /dev/null
+++ b/src/plugins/multimedia/windows/player/mfvideorenderercontrol_p.h
@@ -0,0 +1,47 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef MFVIDEORENDERERCONTROL_H
+#define MFVIDEORENDERERCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qobject.h>
+#include <qpointer.h>
+#include <qrect.h>
+#include <mfobjects.h>
+
+QT_BEGIN_NAMESPACE
+class EVRCustomPresenterActivate;
+class QVideoSink;
+
+class MFVideoRendererControl : public QObject
+{
+public:
+ MFVideoRendererControl(QObject *parent = 0);
+ ~MFVideoRendererControl();
+
+ void setSink(QVideoSink *surface);
+ void setCropRect(const QRect &cropRect);
+
+ IMFActivate* createActivate();
+ void releaseActivate();
+
+private:
+ QPointer<QVideoSink> m_sink;
+ IMFActivate *m_currentActivate = nullptr;
+ EVRCustomPresenterActivate *m_presenterActivate = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/qwindowsformatinfo.cpp b/src/plugins/multimedia/windows/qwindowsformatinfo.cpp
new file mode 100644
index 000000000..6ef1f7f7f
--- /dev/null
+++ b/src/plugins/multimedia/windows/qwindowsformatinfo.cpp
@@ -0,0 +1,187 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowsformatinfo_p.h"
+
+#include <mfapi.h>
+#include <mftransform.h>
+#include <private/qcomptr_p.h>
+#include <private/qwindowsmultimediautils_p.h>
+#include <private/qcomtaskresource_p.h>
+
+#include <QtCore/qlist.h>
+#include <QtCore/qset.h>
+#include <QtCore/qhash.h>
+#include <QtGui/qimagewriter.h>
+
+QT_BEGIN_NAMESPACE
+
+namespace {
+
+template<typename T>
+using CheckedCodecs = QHash<QPair<T, QMediaFormat::ConversionMode>, bool>;
+
+bool isSupportedMFT(const GUID &category, const MFT_REGISTER_TYPE_INFO &type, QMediaFormat::ConversionMode mode)
+{
+ UINT32 count = 0;
+ IMFActivate **activateArrayRaw = nullptr;
+ HRESULT hr = MFTEnumEx(
+ category,
+ MFT_ENUM_FLAG_ALL,
+ (mode == QMediaFormat::Encode) ? nullptr : &type, // Input type
+ (mode == QMediaFormat::Encode) ? &type : nullptr, // Output type
+ &activateArrayRaw,
+ &count
+ );
+
+ if (FAILED(hr))
+ return false;
+
+ QComTaskResource<IMFActivate *[], QComDeleter> activateArray(activateArrayRaw, count);
+ for (UINT32 i = 0; i < count; ++i) {
+ ComPtr<IMFTransform> transform;
+ hr = activateArray[i]->ActivateObject(IID_PPV_ARGS(transform.GetAddressOf()));
+ if (SUCCEEDED(hr))
+ return true;
+ }
+
+ return false;
+}
+
+bool isSupportedCodec(QMediaFormat::AudioCodec codec, QMediaFormat::ConversionMode mode)
+{
+ return isSupportedMFT((mode == QMediaFormat::Encode) ? MFT_CATEGORY_AUDIO_ENCODER : MFT_CATEGORY_AUDIO_DECODER,
+ { MFMediaType_Audio, QWindowsMultimediaUtils::audioFormatForCodec(codec) },
+ mode);
+}
+
+bool isSupportedCodec(QMediaFormat::VideoCodec codec, QMediaFormat::ConversionMode mode)
+{
+ return isSupportedMFT((mode == QMediaFormat::Encode) ? MFT_CATEGORY_VIDEO_ENCODER : MFT_CATEGORY_VIDEO_DECODER,
+ { MFMediaType_Video, QWindowsMultimediaUtils::videoFormatForCodec(codec) },
+ mode);
+}
+
+template <typename T>
+bool isSupportedCodec(T codec, QMediaFormat::ConversionMode m, CheckedCodecs<T> &checkedCodecs)
+{
+ if (auto it = checkedCodecs.constFind(qMakePair(codec, m)); it != checkedCodecs.constEnd())
+ return it.value();
+
+ const bool supported = isSupportedCodec(codec, m);
+
+ checkedCodecs.insert(qMakePair(codec, m), supported);
+ return supported;
+}
+
+}
+
+static QList<QImageCapture::FileFormat> getImageFormatList()
+{
+ QList<QImageCapture::FileFormat> list;
+ const auto formats = QImageWriter::supportedImageFormats();
+
+ for (const auto &f : formats) {
+ auto format = QString::fromUtf8(f);
+ if (format.compare(QLatin1String("jpg"), Qt::CaseInsensitive) == 0)
+ list.append(QImageCapture::FileFormat::JPEG);
+ else if (format.compare(QLatin1String("png"), Qt::CaseInsensitive) == 0)
+ list.append(QImageCapture::FileFormat::PNG);
+ else if (format.compare(QLatin1String("webp"), Qt::CaseInsensitive) == 0)
+ list.append(QImageCapture::FileFormat::WebP);
+ else if (format.compare(QLatin1String("tiff"), Qt::CaseInsensitive) == 0)
+ list.append(QImageCapture::FileFormat::Tiff);
+ }
+
+ return list;
+}
+
+QWindowsFormatInfo::QWindowsFormatInfo()
+{
+ const QList<CodecMap> containerTable = {
+ { QMediaFormat::MPEG4,
+ { QMediaFormat::AudioCodec::AAC, QMediaFormat::AudioCodec::MP3, QMediaFormat::AudioCodec::ALAC, QMediaFormat::AudioCodec::AC3, QMediaFormat::AudioCodec::EAC3 },
+ { QMediaFormat::VideoCodec::H264, QMediaFormat::VideoCodec::H265, QMediaFormat::VideoCodec::MotionJPEG } },
+ { QMediaFormat::Matroska,
+ { QMediaFormat::AudioCodec::AAC, QMediaFormat::AudioCodec::MP3, QMediaFormat::AudioCodec::ALAC, QMediaFormat::AudioCodec::AC3, QMediaFormat::AudioCodec::EAC3, QMediaFormat::AudioCodec::FLAC, QMediaFormat::AudioCodec::Vorbis, QMediaFormat::AudioCodec::Opus },
+ { QMediaFormat::VideoCodec::H264, QMediaFormat::VideoCodec::H265, QMediaFormat::VideoCodec::VP8, QMediaFormat::VideoCodec::VP9, QMediaFormat::VideoCodec::MotionJPEG } },
+ { QMediaFormat::WebM,
+ { QMediaFormat::AudioCodec::Vorbis, QMediaFormat::AudioCodec::Opus },
+ { QMediaFormat::VideoCodec::VP8, QMediaFormat::VideoCodec::VP9 } },
+ { QMediaFormat::QuickTime,
+ { QMediaFormat::AudioCodec::AAC, QMediaFormat::AudioCodec::MP3, QMediaFormat::AudioCodec::ALAC, QMediaFormat::AudioCodec::AC3, QMediaFormat::AudioCodec::EAC3 },
+ { QMediaFormat::VideoCodec::H264, QMediaFormat::VideoCodec::H265, QMediaFormat::VideoCodec::MotionJPEG } },
+ { QMediaFormat::AAC,
+ { QMediaFormat::AudioCodec::AAC },
+ {} },
+ { QMediaFormat::MP3,
+ { QMediaFormat::AudioCodec::MP3 },
+ {} },
+ { QMediaFormat::FLAC,
+ { QMediaFormat::AudioCodec::FLAC },
+ {} },
+ { QMediaFormat::Mpeg4Audio,
+ { QMediaFormat::AudioCodec::AAC, QMediaFormat::AudioCodec::MP3, QMediaFormat::AudioCodec::ALAC, QMediaFormat::AudioCodec::AC3, QMediaFormat::AudioCodec::EAC3 },
+ {} },
+ { QMediaFormat::WMA,
+ { QMediaFormat::AudioCodec::WMA },
+ {} },
+ { QMediaFormat::WMV,
+ { QMediaFormat::AudioCodec::WMA },
+ { QMediaFormat::VideoCodec::WMV } }
+ };
+
+ const QSet<QMediaFormat::FileFormat> decoderFormats = {
+ QMediaFormat::MPEG4,
+ QMediaFormat::Matroska,
+ QMediaFormat::WebM,
+ QMediaFormat::QuickTime,
+ QMediaFormat::AAC,
+ QMediaFormat::MP3,
+ QMediaFormat::FLAC,
+ QMediaFormat::Mpeg4Audio,
+ QMediaFormat::WMA,
+ QMediaFormat::WMV,
+ };
+
+ const QSet<QMediaFormat::FileFormat> encoderFormats = {
+ QMediaFormat::MPEG4,
+ QMediaFormat::AAC,
+ QMediaFormat::MP3,
+ QMediaFormat::FLAC,
+ QMediaFormat::Mpeg4Audio,
+ QMediaFormat::WMA,
+ QMediaFormat::WMV,
+ };
+
+ CheckedCodecs<QMediaFormat::AudioCodec> checkedAudioCodecs;
+ CheckedCodecs<QMediaFormat::VideoCodec> checkedVideoCodecs;
+
+ auto ensureCodecs = [&] (CodecMap &codecs, QMediaFormat::ConversionMode mode) {
+ codecs.audio.removeIf([&] (auto codec) { return !isSupportedCodec(codec, mode, checkedAudioCodecs); });
+ codecs.video.removeIf([&] (auto codec) { return !isSupportedCodec(codec, mode, checkedVideoCodecs); });
+ return !codecs.video.empty() || !codecs.audio.empty();
+ };
+
+ for (const auto &codecMap : containerTable) {
+ if (decoderFormats.contains(codecMap.format)) {
+ auto m = codecMap;
+ if (ensureCodecs(m, QMediaFormat::Decode))
+ decoders.append(m);
+ }
+
+ if (encoderFormats.contains(codecMap.format)) {
+ auto m = codecMap;
+ if (ensureCodecs(m, QMediaFormat::Encode))
+ encoders.append(m);
+ }
+ }
+
+ imageFormats = getImageFormatList();
+}
+
+QWindowsFormatInfo::~QWindowsFormatInfo()
+{
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/windows/qwindowsformatinfo_p.h b/src/plugins/multimedia/windows/qwindowsformatinfo_p.h
new file mode 100644
index 000000000..31e6dd986
--- /dev/null
+++ b/src/plugins/multimedia/windows/qwindowsformatinfo_p.h
@@ -0,0 +1,31 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWINDOWSFORMATSINFO_H
+#define QWINDOWSFORMATSINFO_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaformatinfo_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QWindowsFormatInfo : public QPlatformMediaFormatInfo
+{
+public:
+ QWindowsFormatInfo();
+ ~QWindowsFormatInfo();
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/qwindowsintegration.cpp b/src/plugins/multimedia/windows/qwindowsintegration.cpp
new file mode 100644
index 000000000..1053f3c95
--- /dev/null
+++ b/src/plugins/multimedia/windows/qwindowsintegration.cpp
@@ -0,0 +1,96 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowsintegration_p.h"
+#include <private/qwindowsmediadevices_p.h>
+#include <qwindowsformatinfo_p.h>
+#include <qwindowsmediacapture_p.h>
+#include <qwindowsimagecapture_p.h>
+#include <qwindowscamera_p.h>
+#include <qwindowsmediaencoder_p.h>
+#include <mfplayercontrol_p.h>
+#include <mfaudiodecodercontrol_p.h>
+#include <mfevrvideowindowcontrol_p.h>
+#include <private/qplatformmediaplugin_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QWindowsMediaPlugin : public QPlatformMediaPlugin
+{
+ Q_OBJECT
+ Q_PLUGIN_METADATA(IID QPlatformMediaPlugin_iid FILE "windows.json")
+
+public:
+ QWindowsMediaPlugin()
+ : QPlatformMediaPlugin()
+ {}
+
+ QPlatformMediaIntegration* create(const QString &name) override
+ {
+ if (name == u"windows")
+ return new QWindowsMediaIntegration;
+ return nullptr;
+ }
+};
+
+QWindowsMediaIntegration::QWindowsMediaIntegration()
+ : QPlatformMediaIntegration(QLatin1String("windows"))
+{
+ CoInitialize(NULL);
+ MFStartup(MF_VERSION);
+}
+
+QWindowsMediaIntegration::~QWindowsMediaIntegration()
+{
+ MFShutdown();
+ CoUninitialize();
+}
+
+QPlatformMediaFormatInfo *QWindowsMediaIntegration::createFormatInfo()
+{
+ return new QWindowsFormatInfo();
+}
+
+QPlatformVideoDevices *QWindowsMediaIntegration::createVideoDevices()
+{
+ return new QWindowsVideoDevices(this);
+}
+
+QMaybe<QPlatformMediaCaptureSession *> QWindowsMediaIntegration::createCaptureSession()
+{
+ return new QWindowsMediaCaptureService();
+}
+
+QMaybe<QPlatformAudioDecoder *> QWindowsMediaIntegration::createAudioDecoder(QAudioDecoder *decoder)
+{
+ return new MFAudioDecoderControl(decoder);
+}
+
+QMaybe<QPlatformMediaPlayer *> QWindowsMediaIntegration::createPlayer(QMediaPlayer *parent)
+{
+ return new MFPlayerControl(parent);
+}
+
+QMaybe<QPlatformCamera *> QWindowsMediaIntegration::createCamera(QCamera *camera)
+{
+ return new QWindowsCamera(camera);
+}
+
+QMaybe<QPlatformMediaRecorder *> QWindowsMediaIntegration::createRecorder(QMediaRecorder *recorder)
+{
+ return new QWindowsMediaEncoder(recorder);
+}
+
+QMaybe<QPlatformImageCapture *> QWindowsMediaIntegration::createImageCapture(QImageCapture *imageCapture)
+{
+ return new QWindowsImageCapture(imageCapture);
+}
+
+QMaybe<QPlatformVideoSink *> QWindowsMediaIntegration::createVideoSink(QVideoSink *sink)
+{
+ return new MFEvrVideoWindowControl(sink);
+}
+
+QT_END_NAMESPACE
+
+#include "qwindowsintegration.moc"
diff --git a/src/plugins/multimedia/windows/qwindowsintegration_p.h b/src/plugins/multimedia/windows/qwindowsintegration_p.h
new file mode 100644
index 000000000..29498fa42
--- /dev/null
+++ b/src/plugins/multimedia/windows/qwindowsintegration_p.h
@@ -0,0 +1,51 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWINDOWSINTEGRATION_H
+#define QWINDOWSINTEGRATION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaintegration_p.h>
+#include "qwindowsvideodevices_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QWindowsMediaDevices;
+class QWindowsFormatInfo;
+
+class QWindowsMediaIntegration : public QPlatformMediaIntegration
+{
+ Q_OBJECT
+public:
+ QWindowsMediaIntegration();
+ ~QWindowsMediaIntegration();
+
+ QMaybe<QPlatformMediaCaptureSession *> createCaptureSession() override;
+
+ QMaybe<QPlatformAudioDecoder *> createAudioDecoder(QAudioDecoder *decoder) override;
+ QMaybe<QPlatformMediaPlayer *> createPlayer(QMediaPlayer *parent) override;
+ QMaybe<QPlatformCamera *> createCamera(QCamera *camera) override;
+ QMaybe<QPlatformMediaRecorder *> createRecorder(QMediaRecorder *recorder) override;
+ QMaybe<QPlatformImageCapture *> createImageCapture(QImageCapture *imageCapture) override;
+
+ QMaybe<QPlatformVideoSink *> createVideoSink(QVideoSink *sink) override;
+
+protected:
+ QPlatformMediaFormatInfo *createFormatInfo() override;
+
+ QPlatformVideoDevices *createVideoDevices() override;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/qwindowsvideodevices.cpp b/src/plugins/multimedia/windows/qwindowsvideodevices.cpp
new file mode 100644
index 000000000..8e5081d3b
--- /dev/null
+++ b/src/plugins/multimedia/windows/qwindowsvideodevices.cpp
@@ -0,0 +1,228 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qwindowsvideodevices_p.h"
+
+#include <private/qcameradevice_p.h>
+#include <private/qwindowsmfdefs_p.h>
+#include <private/qwindowsmultimediautils_p.h>
+#include <private/qcomptr_p.h>
+#include <private/qcomtaskresource_p.h>
+
+#include <dbt.h>
+
+#include <mfapi.h>
+#include <mfreadwrite.h>
+#include <mferror.h>
+
+QT_BEGIN_NAMESPACE
+
+LRESULT QT_WIN_CALLBACK deviceNotificationWndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
+{
+ if (message == WM_DEVICECHANGE) {
+ auto b = (PDEV_BROADCAST_HDR)lParam;
+ if (b && b->dbch_devicetype == DBT_DEVTYP_DEVICEINTERFACE) {
+ auto wmd = reinterpret_cast<QWindowsVideoDevices *>(GetWindowLongPtr(hWnd, GWLP_USERDATA));
+ if (wmd) {
+ if (wParam == DBT_DEVICEARRIVAL || wParam == DBT_DEVICEREMOVECOMPLETE) {
+ emit wmd->videoInputsChanged();
+ }
+ }
+ }
+ }
+
+ return 1;
+}
+
+static const auto windowClassName = TEXT("QWindowsMediaDevicesMessageWindow");
+
+static HWND createMessageOnlyWindow()
+{
+ WNDCLASSEX wx = {};
+ wx.cbSize = sizeof(WNDCLASSEX);
+ wx.lpfnWndProc = deviceNotificationWndProc;
+ wx.hInstance = GetModuleHandle(nullptr);
+ wx.lpszClassName = windowClassName;
+
+ if (!RegisterClassEx(&wx))
+ return nullptr;
+
+ auto hwnd = CreateWindowEx(0, windowClassName, TEXT("Message"),
+ 0, 0, 0, 0, 0, HWND_MESSAGE, nullptr, nullptr, nullptr);
+ if (!hwnd) {
+ UnregisterClass(windowClassName, GetModuleHandle(nullptr));
+ return nullptr;
+ }
+
+ return hwnd;
+}
+
+QWindowsVideoDevices::QWindowsVideoDevices(QPlatformMediaIntegration *integration)
+ : QPlatformVideoDevices(integration)
+{
+ CoInitialize(nullptr);
+
+ m_videoDeviceMsgWindow = createMessageOnlyWindow();
+ if (m_videoDeviceMsgWindow) {
+ SetWindowLongPtr(m_videoDeviceMsgWindow, GWLP_USERDATA, (LONG_PTR)this);
+
+ DEV_BROADCAST_DEVICEINTERFACE di = {};
+ di.dbcc_size = sizeof(di);
+ di.dbcc_devicetype = DBT_DEVTYP_DEVICEINTERFACE;
+ di.dbcc_classguid = QMM_KSCATEGORY_VIDEO_CAMERA;
+
+ m_videoDeviceNotification =
+ RegisterDeviceNotification(m_videoDeviceMsgWindow, &di, DEVICE_NOTIFY_WINDOW_HANDLE);
+ if (!m_videoDeviceNotification) {
+ DestroyWindow(m_videoDeviceMsgWindow);
+ m_videoDeviceMsgWindow = nullptr;
+
+ UnregisterClass(windowClassName, GetModuleHandle(nullptr));
+ }
+ }
+
+ if (!m_videoDeviceNotification) {
+ qWarning() << "Video device change notification disabled";
+ }
+}
+
+QWindowsVideoDevices::~QWindowsVideoDevices()
+{
+ if (m_videoDeviceNotification) {
+ UnregisterDeviceNotification(m_videoDeviceNotification);
+ }
+
+ if (m_videoDeviceMsgWindow) {
+ DestroyWindow(m_videoDeviceMsgWindow);
+ UnregisterClass(windowClassName, GetModuleHandle(nullptr));
+ }
+
+ CoUninitialize();
+}
+
+static std::optional<QCameraFormat> createCameraFormat(IMFMediaType *mediaFormat)
+{
+ GUID subtype = GUID_NULL;
+ if (FAILED(mediaFormat->GetGUID(MF_MT_SUBTYPE, &subtype)))
+ return {};
+
+ auto pixelFormat = QWindowsMultimediaUtils::pixelFormatFromMediaSubtype(subtype);
+ if (pixelFormat == QVideoFrameFormat::Format_Invalid)
+ return {};
+
+ UINT32 width = 0u;
+ UINT32 height = 0u;
+ if (FAILED(MFGetAttributeSize(mediaFormat, MF_MT_FRAME_SIZE, &width, &height)))
+ return {};
+ QSize resolution{ int(width), int(height) };
+
+ UINT32 num = 0u;
+ UINT32 den = 0u;
+ float minFr = 0.f;
+ float maxFr = 0.f;
+
+ if (SUCCEEDED(MFGetAttributeRatio(mediaFormat, MF_MT_FRAME_RATE_RANGE_MIN, &num, &den)))
+ minFr = float(num) / float(den);
+
+ if (SUCCEEDED(MFGetAttributeRatio(mediaFormat, MF_MT_FRAME_RATE_RANGE_MAX, &num, &den)))
+ maxFr = float(num) / float(den);
+
+ auto *f = new QCameraFormatPrivate{ QSharedData(), pixelFormat, resolution, minFr, maxFr };
+ return f->create();
+}
+
+static QString getString(IMFActivate *device, const IID &id)
+{
+ QComTaskResource<WCHAR> str;
+ UINT32 length = 0;
+ HRESULT hr = device->GetAllocatedString(id, str.address(), &length);
+ if (SUCCEEDED(hr)) {
+ return QString::fromWCharArray(str.get());
+ } else {
+ return {};
+ }
+}
+
+static std::optional<QCameraDevice> createCameraDevice(IMFActivate *device)
+{
+ auto info = std::make_unique<QCameraDevicePrivate>();
+ info->description = getString(device, MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME);
+ info->id = getString(device, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK).toUtf8();
+
+ IMFMediaSource *source = NULL;
+ HRESULT hr = device->ActivateObject(IID_PPV_ARGS(&source));
+ if (FAILED(hr))
+ return {};
+
+ ComPtr<IMFSourceReader> reader;
+ hr = MFCreateSourceReaderFromMediaSource(source, NULL, reader.GetAddressOf());
+ if (FAILED(hr))
+ return {};
+
+ QList<QSize> photoResolutions;
+ QList<QCameraFormat> videoFormats;
+ for (DWORD i = 0;; ++i) {
+ // Loop through the supported formats for the video device
+ ComPtr<IMFMediaType> mediaFormat;
+ hr = reader->GetNativeMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, i,
+ mediaFormat.GetAddressOf());
+ if (FAILED(hr))
+ break;
+
+ auto maybeCamera = createCameraFormat(mediaFormat.Get());
+ if (maybeCamera) {
+ videoFormats << *maybeCamera;
+ photoResolutions << maybeCamera->resolution();
+ }
+ }
+
+ info->videoFormats = videoFormats;
+ info->photoResolutions = photoResolutions;
+ return info.release()->create();
+}
+
+static QList<QCameraDevice> readCameraDevices(IMFAttributes *attr)
+{
+ QList<QCameraDevice> cameras;
+ UINT32 count = 0;
+ IMFActivate **devicesRaw = nullptr;
+ HRESULT hr = MFEnumDeviceSources(attr, &devicesRaw, &count);
+ if (SUCCEEDED(hr)) {
+ QComTaskResource<IMFActivate *[], QComDeleter> devices(devicesRaw, count);
+
+ for (UINT32 i = 0; i < count; i++) {
+ IMFActivate *device = devices[i];
+ if (device) {
+ auto maybeCamera = createCameraDevice(device);
+ if (maybeCamera)
+ cameras << *maybeCamera;
+ }
+ }
+ }
+ return cameras;
+}
+
+QList<QCameraDevice> QWindowsVideoDevices::videoDevices() const
+{
+ QList<QCameraDevice> cameras;
+
+ ComPtr<IMFAttributes> attr;
+ HRESULT hr = MFCreateAttributes(attr.GetAddressOf(), 2);
+ if (FAILED(hr))
+ return {};
+
+ hr = attr->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
+ if (SUCCEEDED(hr)) {
+ cameras << readCameraDevices(attr.Get());
+
+ hr = attr->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_CATEGORY,
+ QMM_KSCATEGORY_SENSOR_CAMERA);
+ if (SUCCEEDED(hr))
+ cameras << readCameraDevices(attr.Get());
+ }
+
+ return cameras;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/windows/qwindowsvideodevices_p.h b/src/plugins/multimedia/windows/qwindowsvideodevices_p.h
new file mode 100644
index 000000000..f8f5ed920
--- /dev/null
+++ b/src/plugins/multimedia/windows/qwindowsvideodevices_p.h
@@ -0,0 +1,44 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QWINDOWSVIDEODEVICES_H
+#define QWINDOWSVIDEODEVICES_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformvideodevices_p.h>
+#include <QtCore/qt_windows.h>
+
+QT_BEGIN_NAMESPACE
+
+LRESULT QT_WIN_CALLBACK deviceNotificationWndProc(HWND, UINT, WPARAM, LPARAM);
+
+class QWindowsVideoDevices : public QPlatformVideoDevices
+{
+public:
+ QWindowsVideoDevices(QPlatformMediaIntegration *integration);
+ ~QWindowsVideoDevices();
+
+ QList<QCameraDevice> videoDevices() const override;
+
+private:
+ HWND m_videoDeviceMsgWindow = nullptr;
+ HDEVNOTIFY m_videoDeviceNotification = nullptr;
+
+ friend LRESULT QT_WIN_CALLBACK deviceNotificationWndProc(HWND, UINT, WPARAM, LPARAM);
+};
+
+
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/sourceresolver.cpp b/src/plugins/multimedia/windows/sourceresolver.cpp
new file mode 100644
index 000000000..52fb024be
--- /dev/null
+++ b/src/plugins/multimedia/windows/sourceresolver.cpp
@@ -0,0 +1,294 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "mfstream_p.h"
+#include "sourceresolver_p.h"
+#include <mferror.h>
+#include <nserror.h>
+#include <QtCore/qfile.h>
+#include <QtCore/qdebug.h>
+#include <QtMultimedia/qmediaplayer.h>
+
+QT_BEGIN_NAMESPACE
+
+/*
+ SourceResolver is separated from MFPlayerSession to handle the work of resolving a media source
+ asynchronously. You call SourceResolver::load to request resolving a media source asynchronously,
+ and it will emit mediaSourceReady() when resolving is done. You can call SourceResolver::cancel to
+ stop the previous load operation if there is any.
+*/
+
+SourceResolver::SourceResolver()
+ : m_cRef(1)
+ , m_cancelCookie(0)
+ , m_sourceResolver(0)
+ , m_mediaSource(0)
+ , m_stream(0)
+{
+}
+
+SourceResolver::~SourceResolver()
+{
+ shutdown();
+ if (m_mediaSource) {
+ m_mediaSource->Release();
+ m_mediaSource = NULL;
+ }
+
+ if (m_cancelCookie)
+ m_cancelCookie->Release();
+ if (m_sourceResolver)
+ m_sourceResolver->Release();
+}
+
+STDMETHODIMP SourceResolver::QueryInterface(REFIID riid, LPVOID *ppvObject)
+{
+ if (!ppvObject)
+ return E_POINTER;
+ if (riid == IID_IUnknown) {
+ *ppvObject = static_cast<IUnknown*>(this);
+ } else if (riid == IID_IMFAsyncCallback) {
+ *ppvObject = static_cast<IMFAsyncCallback*>(this);
+ } else {
+ *ppvObject = NULL;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+}
+
+STDMETHODIMP_(ULONG) SourceResolver::AddRef(void)
+{
+ return InterlockedIncrement(&m_cRef);
+}
+
+STDMETHODIMP_(ULONG) SourceResolver::Release(void)
+{
+ LONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0)
+ this->deleteLater();
+ return cRef;
+}
+
+HRESULT STDMETHODCALLTYPE SourceResolver::Invoke(IMFAsyncResult *pAsyncResult)
+{
+ QMutexLocker locker(&m_mutex);
+
+ if (!m_sourceResolver)
+ return S_OK;
+
+ MF_OBJECT_TYPE ObjectType = MF_OBJECT_INVALID;
+ IUnknown* pSource = NULL;
+ State *state = static_cast<State*>(pAsyncResult->GetStateNoAddRef());
+
+ HRESULT hr = S_OK;
+ if (state->fromStream())
+ hr = m_sourceResolver->EndCreateObjectFromByteStream(pAsyncResult, &ObjectType, &pSource);
+ else
+ hr = m_sourceResolver->EndCreateObjectFromURL(pAsyncResult, &ObjectType, &pSource);
+
+ if (state->sourceResolver() != m_sourceResolver) {
+ //This is a cancelled one
+ return S_OK;
+ }
+
+ if (m_cancelCookie) {
+ m_cancelCookie->Release();
+ m_cancelCookie = NULL;
+ }
+
+ if (FAILED(hr)) {
+ emit error(hr);
+ return S_OK;
+ }
+
+ if (m_mediaSource) {
+ m_mediaSource->Release();
+ m_mediaSource = NULL;
+ }
+
+ hr = pSource->QueryInterface(IID_PPV_ARGS(&m_mediaSource));
+ pSource->Release();
+ if (FAILED(hr)) {
+ emit error(hr);
+ return S_OK;
+ }
+
+ emit mediaSourceReady();
+
+ return S_OK;
+}
+
+HRESULT STDMETHODCALLTYPE SourceResolver::GetParameters(DWORD*, DWORD*)
+{
+ return E_NOTIMPL;
+}
+
+void SourceResolver::load(const QUrl &url, QIODevice* stream)
+{
+ QMutexLocker locker(&m_mutex);
+ HRESULT hr = S_OK;
+ if (!m_sourceResolver)
+ hr = MFCreateSourceResolver(&m_sourceResolver);
+
+ if (m_stream) {
+ m_stream->Release();
+ m_stream = NULL;
+ }
+
+ if (FAILED(hr)) {
+ qWarning() << "Failed to create Source Resolver!";
+ emit error(hr);
+ } else if (stream) {
+ QString urlString = url.toString();
+ m_stream = new MFStream(stream, false);
+ hr = m_sourceResolver->BeginCreateObjectFromByteStream(
+ m_stream, urlString.isEmpty() ? 0 : reinterpret_cast<LPCWSTR>(urlString.utf16()),
+ MF_RESOLUTION_MEDIASOURCE | MF_RESOLUTION_CONTENT_DOES_NOT_HAVE_TO_MATCH_EXTENSION_OR_MIME_TYPE
+ , NULL, &m_cancelCookie, this, new State(m_sourceResolver, true));
+ if (FAILED(hr)) {
+ qWarning() << "Unsupported stream!";
+ emit error(hr);
+ }
+ } else {
+#ifdef DEBUG_MEDIAFOUNDATION
+ qDebug() << "loading :" << url;
+ qDebug() << "url path =" << url.path().mid(1);
+#endif
+#ifdef TEST_STREAMING
+ //Testing stream function
+ if (url.scheme() == QLatin1String("file")) {
+ stream = new QFile(url.path().mid(1));
+ if (stream->open(QIODevice::ReadOnly)) {
+ m_stream = new MFStream(stream, true);
+ hr = m_sourceResolver->BeginCreateObjectFromByteStream(
+ m_stream, reinterpret_cast<const OLECHAR *>(url.toString().utf16()),
+ MF_RESOLUTION_MEDIASOURCE | MF_RESOLUTION_CONTENT_DOES_NOT_HAVE_TO_MATCH_EXTENSION_OR_MIME_TYPE,
+ NULL, &m_cancelCookie, this, new State(m_sourceResolver, true));
+ if (FAILED(hr)) {
+ qWarning() << "Unsupported stream!";
+ emit error(hr);
+ }
+ } else {
+ delete stream;
+ emit error(QMediaPlayer::FormatError);
+ }
+ } else
+#endif
+ if (url.scheme() == QLatin1String("qrc")) {
+ // If the canonical URL refers to a Qt resource, open with QFile and use
+ // the stream playback capability to play.
+ stream = new QFile(QLatin1Char(':') + url.path());
+ if (stream->open(QIODevice::ReadOnly)) {
+ m_stream = new MFStream(stream, true);
+ hr = m_sourceResolver->BeginCreateObjectFromByteStream(
+ m_stream, reinterpret_cast<const OLECHAR *>(url.toString().utf16()),
+ MF_RESOLUTION_MEDIASOURCE | MF_RESOLUTION_CONTENT_DOES_NOT_HAVE_TO_MATCH_EXTENSION_OR_MIME_TYPE,
+ NULL, &m_cancelCookie, this, new State(m_sourceResolver, true));
+ if (FAILED(hr)) {
+ qWarning() << "Unsupported stream!";
+ emit error(hr);
+ }
+ } else {
+ delete stream;
+ emit error(QMediaPlayer::FormatError);
+ }
+ } else {
+ hr = m_sourceResolver->BeginCreateObjectFromURL(
+ reinterpret_cast<const OLECHAR *>(url.toString().utf16()),
+ MF_RESOLUTION_MEDIASOURCE | MF_RESOLUTION_CONTENT_DOES_NOT_HAVE_TO_MATCH_EXTENSION_OR_MIME_TYPE,
+ NULL, &m_cancelCookie, this, new State(m_sourceResolver, false));
+ if (FAILED(hr)) {
+ qWarning() << "Unsupported url scheme!";
+ emit error(hr);
+ }
+ }
+ }
+}
+
+void SourceResolver::cancel()
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_cancelCookie) {
+ m_sourceResolver->CancelObjectCreation(m_cancelCookie);
+ m_cancelCookie->Release();
+ m_cancelCookie = NULL;
+ m_sourceResolver->Release();
+ m_sourceResolver = NULL;
+ }
+}
+
+void SourceResolver::shutdown()
+{
+ if (m_mediaSource) {
+ m_mediaSource->Shutdown();
+ m_mediaSource->Release();
+ m_mediaSource = NULL;
+ }
+
+ if (m_stream) {
+ m_stream->Release();
+ m_stream = NULL;
+ }
+}
+
+IMFMediaSource* SourceResolver::mediaSource() const
+{
+ return m_mediaSource;
+}
+
+/////////////////////////////////////////////////////////////////////////////////
+SourceResolver::State::State(IMFSourceResolver *sourceResolver, bool fromStream)
+ : m_cRef(0)
+ , m_sourceResolver(sourceResolver)
+ , m_fromStream(fromStream)
+{
+ sourceResolver->AddRef();
+}
+
+SourceResolver::State::~State()
+{
+ m_sourceResolver->Release();
+}
+
+STDMETHODIMP SourceResolver::State::QueryInterface(REFIID riid, LPVOID *ppvObject)
+{
+ if (!ppvObject)
+ return E_POINTER;
+ if (riid == IID_IUnknown) {
+ *ppvObject = static_cast<IUnknown*>(this);
+ } else {
+ *ppvObject = NULL;
+ return E_NOINTERFACE;
+ }
+ AddRef();
+ return S_OK;
+}
+
+STDMETHODIMP_(ULONG) SourceResolver::State::AddRef(void)
+{
+ return InterlockedIncrement(&m_cRef);
+}
+
+STDMETHODIMP_(ULONG) SourceResolver::State::Release(void)
+{
+ LONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0)
+ delete this;
+ // For thread safety, return a temporary variable.
+ return cRef;
+}
+
+IMFSourceResolver* SourceResolver::State::sourceResolver() const
+{
+ return m_sourceResolver;
+}
+
+bool SourceResolver::State::fromStream() const
+{
+ return m_fromStream;
+}
+
+QT_END_NAMESPACE
+
+#include "moc_sourceresolver_p.cpp"
diff --git a/src/plugins/multimedia/windows/sourceresolver_p.h b/src/plugins/multimedia/windows/sourceresolver_p.h
new file mode 100644
index 000000000..57ac6fc9d
--- /dev/null
+++ b/src/plugins/multimedia/windows/sourceresolver_p.h
@@ -0,0 +1,83 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef SOURCERESOLVER_H
+#define SOURCERESOLVER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "mfstream_p.h"
+#include <QUrl>
+
+QT_BEGIN_NAMESPACE
+
+class SourceResolver: public QObject, public IMFAsyncCallback
+{
+ Q_OBJECT
+public:
+ SourceResolver();
+
+ ~SourceResolver();
+
+ STDMETHODIMP QueryInterface(REFIID riid, LPVOID *ppvObject) override;
+ STDMETHODIMP_(ULONG) AddRef(void) override;
+ STDMETHODIMP_(ULONG) Release(void) override;
+
+ HRESULT STDMETHODCALLTYPE Invoke(IMFAsyncResult *pAsyncResult) override;
+
+ HRESULT STDMETHODCALLTYPE GetParameters(DWORD*, DWORD*) override;
+
+ void load(const QUrl &url, QIODevice* stream);
+
+ void cancel();
+
+ void shutdown();
+
+ IMFMediaSource* mediaSource() const;
+
+Q_SIGNALS:
+ void error(long hr);
+ void mediaSourceReady();
+
+private:
+ class State : public IUnknown
+ {
+ public:
+ State(IMFSourceResolver *sourceResolver, bool fromStream);
+ virtual ~State();
+
+ STDMETHODIMP QueryInterface(REFIID riid, LPVOID *ppvObject) override;
+
+ STDMETHODIMP_(ULONG) AddRef(void) override;
+
+ STDMETHODIMP_(ULONG) Release(void) override;
+
+ IMFSourceResolver* sourceResolver() const;
+ bool fromStream() const;
+
+ private:
+ long m_cRef;
+ IMFSourceResolver *m_sourceResolver;
+ bool m_fromStream;
+ };
+
+ long m_cRef;
+ IUnknown *m_cancelCookie;
+ IMFSourceResolver *m_sourceResolver;
+ IMFMediaSource *m_mediaSource;
+ MFStream *m_stream;
+ QMutex m_mutex;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/windows/windows.json b/src/plugins/multimedia/windows/windows.json
new file mode 100644
index 000000000..05032c1b7
--- /dev/null
+++ b/src/plugins/multimedia/windows/windows.json
@@ -0,0 +1,3 @@
+{
+ "Keys": [ "windows" ]
+}