summaryrefslogtreecommitdiffstats
path: root/src/plugins/multimedia/darwin
diff options
context:
space:
mode:
Diffstat (limited to 'src/plugins/multimedia/darwin')
-rw-r--r--src/plugins/multimedia/darwin/CMakeLists.txt70
-rw-r--r--src/plugins/multimedia/darwin/avfaudiodecoder.mm544
-rw-r--r--src/plugins/multimedia/darwin/avfaudiodecoder_p.h99
-rw-r--r--src/plugins/multimedia/darwin/avfvideobuffer.mm207
-rw-r--r--src/plugins/multimedia/darwin/avfvideobuffer_p.h64
-rw-r--r--src/plugins/multimedia/darwin/avfvideosink.mm228
-rw-r--r--src/plugins/multimedia/darwin/avfvideosink_p.h99
-rw-r--r--src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate.mm98
-rw-r--r--src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate_p.h40
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamera.mm89
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamera_p.h48
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcameradebug_p.h26
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm292
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerarenderer_p.h95
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcameraservice.mm169
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcameraservice_p.h84
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerasession.mm513
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerasession_p.h132
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerautility.mm730
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerautility_p.h165
-rw-r--r--src/plugins/multimedia/darwin/camera/avfimagecapture.mm385
-rw-r--r--src/plugins/multimedia/darwin/camera/avfimagecapture_p.h81
-rw-r--r--src/plugins/multimedia/darwin/camera/avfmediaassetwriter.mm556
-rw-r--r--src/plugins/multimedia/darwin/camera/avfmediaassetwriter_p.h54
-rw-r--r--src/plugins/multimedia/darwin/camera/avfmediaencoder.mm664
-rw-r--r--src/plugins/multimedia/darwin/camera/avfmediaencoder_p.h96
-rw-r--r--src/plugins/multimedia/darwin/camera/qavfcamerabase.mm1084
-rw-r--r--src/plugins/multimedia/darwin/camera/qavfcamerabase_p.h110
-rw-r--r--src/plugins/multimedia/darwin/common/avfmetadata.mm382
-rw-r--r--src/plugins/multimedia/darwin/common/avfmetadata_p.h37
-rw-r--r--src/plugins/multimedia/darwin/darwin.json3
-rw-r--r--src/plugins/multimedia/darwin/mediaplayer/avfdisplaylink.mm207
-rw-r--r--src/plugins/multimedia/darwin/mediaplayer/avfdisplaylink_p.h65
-rw-r--r--src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm1270
-rw-r--r--src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer_p.h151
-rw-r--r--src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm222
-rw-r--r--src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol_p.h72
-rw-r--r--src/plugins/multimedia/darwin/qavfhelpers.mm143
-rw-r--r--src/plugins/multimedia/darwin/qavfhelpers_p.h41
-rw-r--r--src/plugins/multimedia/darwin/qdarwinformatsinfo.mm211
-rw-r--r--src/plugins/multimedia/darwin/qdarwinformatsinfo_p.h38
-rw-r--r--src/plugins/multimedia/darwin/qdarwinintegration.mm93
-rw-r--r--src/plugins/multimedia/darwin/qdarwinintegration_p.h45
43 files changed, 9802 insertions, 0 deletions
diff --git a/src/plugins/multimedia/darwin/CMakeLists.txt b/src/plugins/multimedia/darwin/CMakeLists.txt
new file mode 100644
index 000000000..0bbc054eb
--- /dev/null
+++ b/src/plugins/multimedia/darwin/CMakeLists.txt
@@ -0,0 +1,70 @@
+# Copyright (C) 2022 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+
+qt_internal_find_apple_system_framework(FWCoreMedia CoreMedia) # special case
+qt_internal_find_apple_system_framework(FWCoreAudio CoreAudio) # special case
+qt_internal_find_apple_system_framework(FWAudioUnit AudioUnit) # special case
+qt_internal_find_apple_system_framework(FWVideoToolbox VideoToolbox) # special case
+qt_internal_find_apple_system_framework(FWAVFoundation AVFoundation) # special case
+
+qt_internal_add_plugin(QDarwinMediaPlugin
+ OUTPUT_NAME darwinmediaplugin
+ PLUGIN_TYPE multimedia
+ SOURCES
+ avfaudiodecoder.mm avfaudiodecoder_p.h
+ mediaplayer/avfdisplaylink.mm mediaplayer/avfdisplaylink_p.h
+ mediaplayer/avfmediaplayer.mm mediaplayer/avfmediaplayer_p.h
+ common/avfmetadata.mm common/avfmetadata_p.h
+ mediaplayer/avfvideorenderercontrol.mm mediaplayer/avfvideorenderercontrol_p.h
+ avfvideosink.mm avfvideosink_p.h
+ avfvideobuffer.mm avfvideobuffer_p.h
+ qavfhelpers.mm qavfhelpers_p.h
+ qdarwinformatsinfo.mm qdarwinformatsinfo_p.h
+ qdarwinintegration.mm qdarwinintegration_p.h
+ INCLUDE_DIRECTORIES
+ audio
+ camera
+ common
+ mediaplayer
+ LIBRARIES
+ Qt::MultimediaPrivate
+ Qt::CorePrivate
+ ${FWCoreMedia}
+ ${FWCoreAudio}
+ ${FWCoreVideo}
+ ${FWFoundation}
+ ${FWMetal}
+ ${FWQuartzCore}
+ ${FWAudioToolbox}
+ AVFoundation::AVFoundation
+)
+
+qt_internal_extend_target(QDarwinMediaPlugin CONDITION NOT TVOS
+ SOURCES
+ camera/qavfcamerabase.mm camera/qavfcamerabase_p.h
+ camera/avfcamera.mm camera/avfcamera_p.h
+ camera/avfcameradebug_p.h
+ camera/avfaudiopreviewdelegate.mm camera/avfaudiopreviewdelegate_p.h
+ camera/avfcamerarenderer.mm camera/avfcamerarenderer_p.h
+ camera/avfcameraservice.mm camera/avfcameraservice_p.h
+ camera/avfcamerasession.mm camera/avfcamerasession_p.h
+ camera/avfcamerautility.mm camera/avfcamerautility_p.h
+ camera/avfimagecapture.mm camera/avfimagecapture_p.h
+ camera/avfmediaassetwriter.mm camera/avfmediaassetwriter_p.h
+ camera/avfmediaencoder.mm camera/avfmediaencoder_p.h
+)
+
+qt_internal_extend_target(QDarwinMediaPlugin CONDITION MACOS
+ LIBRARIES
+ ${FWAppKit}
+ ${FWAudioUnit}
+ ${FWVideoToolbox}
+ ${FWApplicationServices}
+)
+
+qt_internal_extend_target(QDarwinMediaPlugin CONDITION IOS
+ LIBRARIES
+ ${FWCoreGraphics}
+ ${FWCoreVideo}
+)
diff --git a/src/plugins/multimedia/darwin/avfaudiodecoder.mm b/src/plugins/multimedia/darwin/avfaudiodecoder.mm
new file mode 100644
index 000000000..3191b7db0
--- /dev/null
+++ b/src/plugins/multimedia/darwin/avfaudiodecoder.mm
@@ -0,0 +1,544 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfaudiodecoder_p.h"
+
+#include <QtCore/qmutex.h>
+#include <QtCore/qiodevice.h>
+#include <QMimeDatabase>
+#include <QThread>
+#include "private/qcoreaudioutils_p.h"
+#include <QtCore/qloggingcategory.h>
+
+#include <AVFoundation/AVFoundation.h>
+
+QT_USE_NAMESPACE
+
+static Q_LOGGING_CATEGORY(qLcAVFAudioDecoder, "qt.multimedia.darwin.AVFAudioDecoder")
+constexpr static int MAX_BUFFERS_IN_QUEUE = 5;
+
+QAudioBuffer handleNextSampleBuffer(CMSampleBufferRef sampleBuffer)
+{
+ if (!sampleBuffer)
+ return {};
+
+ // Check format
+ CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
+ if (!formatDescription)
+ return {};
+ const AudioStreamBasicDescription* const asbd = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription);
+ QAudioFormat qtFormat = CoreAudioUtils::toQAudioFormat(*asbd);
+ if (qtFormat.sampleFormat() == QAudioFormat::Unknown && asbd->mBitsPerChannel == 8)
+ qtFormat.setSampleFormat(QAudioFormat::UInt8);
+ if (!qtFormat.isValid())
+ return {};
+
+ // Get the required size to allocate to audioBufferList
+ size_t audioBufferListSize = 0;
+ OSStatus err = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer,
+ &audioBufferListSize,
+ NULL,
+ 0,
+ NULL,
+ NULL,
+ kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
+ NULL);
+ if (err != noErr)
+ return {};
+
+ CMBlockBufferRef blockBuffer = NULL;
+ AudioBufferList* audioBufferList = (AudioBufferList*) malloc(audioBufferListSize);
+ // This ensures the buffers placed in audioBufferList are contiguous
+ err = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer,
+ NULL,
+ audioBufferList,
+ audioBufferListSize,
+ NULL,
+ NULL,
+ kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
+ &blockBuffer);
+ if (err != noErr) {
+ free(audioBufferList);
+ return {};
+ }
+
+ QByteArray abuf;
+ for (UInt32 i = 0; i < audioBufferList->mNumberBuffers; i++)
+ {
+ AudioBuffer audioBuffer = audioBufferList->mBuffers[i];
+ abuf.push_back(QByteArray((const char*)audioBuffer.mData, audioBuffer.mDataByteSize));
+ }
+
+ free(audioBufferList);
+ CFRelease(blockBuffer);
+
+ CMTime sampleStartTime = (CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
+ float sampleStartTimeSecs = CMTimeGetSeconds(sampleStartTime);
+
+ return QAudioBuffer(abuf, qtFormat, qint64(sampleStartTimeSecs * 1000000));
+}
+
+@interface AVFResourceReaderDelegate : NSObject <AVAssetResourceLoaderDelegate> {
+ AVFAudioDecoder *m_decoder;
+ QMutex m_mutex;
+}
+
+- (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader
+ shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
+
+@end
+
+@implementation AVFResourceReaderDelegate
+
+- (id)initWithDecoder:(AVFAudioDecoder *)decoder
+{
+ if (!(self = [super init]))
+ return nil;
+
+ m_decoder = decoder;
+
+ return self;
+}
+
+-(BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader
+ shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
+{
+ Q_UNUSED(resourceLoader);
+
+ if (![loadingRequest.request.URL.scheme isEqualToString:@"iodevice"])
+ return NO;
+
+ QMutexLocker locker(&m_mutex);
+
+ QIODevice *device = m_decoder->sourceDevice();
+ if (!device)
+ return NO;
+
+ device->seek(loadingRequest.dataRequest.requestedOffset);
+ if (loadingRequest.contentInformationRequest) {
+ loadingRequest.contentInformationRequest.contentLength = device->size();
+ loadingRequest.contentInformationRequest.byteRangeAccessSupported = YES;
+ }
+
+ if (loadingRequest.dataRequest) {
+ NSInteger requestedLength = loadingRequest.dataRequest.requestedLength;
+ int maxBytes = qMin(32 * 1024, int(requestedLength));
+ char buffer[maxBytes];
+ NSInteger submitted = 0;
+ while (submitted < requestedLength) {
+ qint64 len = device->read(buffer, maxBytes);
+ if (len < 1)
+ break;
+
+ [loadingRequest.dataRequest respondWithData:[NSData dataWithBytes:buffer length:len]];
+ submitted += len;
+ }
+
+ // Finish loading even if not all bytes submitted.
+ [loadingRequest finishLoading];
+ }
+
+ return YES;
+}
+
+@end
+
+namespace {
+
+NSDictionary *av_audio_settings_for_format(const QAudioFormat &format)
+{
+ float sampleRate = format.sampleRate();
+ int nChannels = format.channelCount();
+ int sampleSize = format.bytesPerSample() * 8;
+ BOOL isFloat = format.sampleFormat() == QAudioFormat::Float;
+
+ NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
+ [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
+ [NSNumber numberWithFloat:sampleRate], AVSampleRateKey,
+ [NSNumber numberWithInt:nChannels], AVNumberOfChannelsKey,
+ [NSNumber numberWithInt:sampleSize], AVLinearPCMBitDepthKey,
+ [NSNumber numberWithBool:isFloat], AVLinearPCMIsFloatKey,
+ [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
+ [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
+ nil];
+
+ return audioSettings;
+}
+
+QAudioFormat qt_format_for_audio_track(AVAssetTrack *track)
+{
+ QAudioFormat format;
+ CMFormatDescriptionRef desc = (__bridge CMFormatDescriptionRef)track.formatDescriptions[0];
+ const AudioStreamBasicDescription* const asbd =
+ CMAudioFormatDescriptionGetStreamBasicDescription(desc);
+ format = CoreAudioUtils::toQAudioFormat(*asbd);
+ // AudioStreamBasicDescription's mBitsPerChannel is 0 for compressed formats
+ // In this case set default Int16 sample format
+ if (asbd->mBitsPerChannel == 0)
+ format.setSampleFormat(QAudioFormat::Int16);
+ return format;
+}
+
+}
+
+struct AVFAudioDecoder::DecodingContext
+{
+ AVAssetReader *m_reader = nullptr;
+ AVAssetReaderTrackOutput *m_readerOutput = nullptr;
+
+ ~DecodingContext()
+ {
+ if (m_reader) {
+ [m_reader cancelReading];
+ [m_reader release];
+ }
+
+ [m_readerOutput release];
+ }
+};
+
+AVFAudioDecoder::AVFAudioDecoder(QAudioDecoder *parent)
+ : QPlatformAudioDecoder(parent)
+{
+ m_readingQueue = dispatch_queue_create("reader_queue", DISPATCH_QUEUE_SERIAL);
+ m_decodingQueue = dispatch_queue_create("decoder_queue", DISPATCH_QUEUE_SERIAL);
+
+ m_readerDelegate = [[AVFResourceReaderDelegate alloc] initWithDecoder:this];
+}
+
+AVFAudioDecoder::~AVFAudioDecoder()
+{
+ stop();
+
+ [m_readerDelegate release];
+ [m_asset release];
+
+ dispatch_release(m_readingQueue);
+ dispatch_release(m_decodingQueue);
+}
+
+QUrl AVFAudioDecoder::source() const
+{
+ return m_source;
+}
+
+void AVFAudioDecoder::setSource(const QUrl &fileName)
+{
+ if (!m_device && m_source == fileName)
+ return;
+
+ stop();
+ m_device = nullptr;
+ [m_asset release];
+ m_asset = nil;
+
+ m_source = fileName;
+
+ if (!m_source.isEmpty()) {
+ NSURL *nsURL = m_source.toNSURL();
+ m_asset = [[AVURLAsset alloc] initWithURL:nsURL options:nil];
+ }
+
+ sourceChanged();
+}
+
+QIODevice *AVFAudioDecoder::sourceDevice() const
+{
+ return m_device;
+}
+
+void AVFAudioDecoder::setSourceDevice(QIODevice *device)
+{
+ if (m_device == device && m_source.isEmpty())
+ return;
+
+ stop();
+ m_source.clear();
+ [m_asset release];
+ m_asset = nil;
+
+ m_device = device;
+
+ if (m_device) {
+ const QString ext = QMimeDatabase().mimeTypeForData(m_device).preferredSuffix();
+ const QString url = "iodevice:///iodevice." + ext;
+ NSString *urlString = url.toNSString();
+ NSURL *nsURL = [NSURL URLWithString:urlString];
+
+ m_asset = [[AVURLAsset alloc] initWithURL:nsURL options:nil];
+
+ // use decoding queue instead of reading queue in order to fix random stucks.
+ // Anyway, decoding queue is empty in the moment.
+ [m_asset.resourceLoader setDelegate:m_readerDelegate queue:m_decodingQueue];
+ }
+
+ sourceChanged();
+}
+
+void AVFAudioDecoder::start()
+{
+ if (m_decodingContext) {
+ qCDebug(qLcAVFAudioDecoder()) << "AVFAudioDecoder has been already started";
+ return;
+ }
+
+ positionChanged(-1);
+
+ if (m_device && (!m_device->isOpen() || !m_device->isReadable())) {
+ processInvalidMedia(QAudioDecoder::ResourceError, tr("Unable to read from specified device"));
+ return;
+ }
+
+ m_decodingContext = std::make_shared<DecodingContext>();
+ std::weak_ptr<DecodingContext> weakContext(m_decodingContext);
+
+ auto handleLoadingResult = [=]() {
+ NSError *error = nil;
+ AVKeyValueStatus status = [m_asset statusOfValueForKey:@"tracks" error:&error];
+
+ if (status == AVKeyValueStatusFailed) {
+ if (error.domain == NSURLErrorDomain)
+ processInvalidMedia(QAudioDecoder::ResourceError,
+ QString::fromNSString(error.localizedDescription));
+ else
+ processInvalidMedia(QAudioDecoder::FormatError,
+ tr("Could not load media source's tracks"));
+ } else if (status != AVKeyValueStatusLoaded) {
+ qWarning() << "Unexpected AVKeyValueStatus:" << status;
+ stop();
+ }
+ else {
+ initAssetReader();
+ }
+ };
+
+ [m_asset loadValuesAsynchronouslyForKeys:@[ @"tracks" ]
+ completionHandler:[=]() {
+ invokeWithDecodingContext(weakContext, handleLoadingResult);
+ }];
+}
+
+void AVFAudioDecoder::decBuffersCounter(uint val)
+{
+ if (val) {
+ QMutexLocker locker(&m_buffersCounterMutex);
+ m_buffersCounter -= val;
+ }
+
+ Q_ASSERT(m_buffersCounter >= 0);
+
+ m_buffersCounterCondition.wakeAll();
+}
+
+void AVFAudioDecoder::stop()
+{
+ qCDebug(qLcAVFAudioDecoder()) << "stop decoding";
+
+ m_decodingContext.reset();
+ decBuffersCounter(m_cachedBuffers.size());
+ m_cachedBuffers.clear();
+
+ bufferAvailableChanged(false);
+ positionChanged(-1);
+ durationChanged(-1);
+
+ onFinished();
+}
+
+QAudioFormat AVFAudioDecoder::audioFormat() const
+{
+ return m_format;
+}
+
+void AVFAudioDecoder::setAudioFormat(const QAudioFormat &format)
+{
+ if (m_format != format) {
+ m_format = format;
+ formatChanged(m_format);
+ }
+}
+
+QAudioBuffer AVFAudioDecoder::read()
+{
+ if (m_cachedBuffers.empty())
+ return QAudioBuffer();
+
+ Q_ASSERT(m_cachedBuffers.size() > 0);
+ QAudioBuffer buffer = m_cachedBuffers.dequeue();
+ decBuffersCounter(1);
+
+ positionChanged(buffer.startTime() / 1000);
+ bufferAvailableChanged(!m_cachedBuffers.empty());
+ return buffer;
+}
+
+void AVFAudioDecoder::processInvalidMedia(QAudioDecoder::Error errorCode,
+ const QString &errorString)
+{
+ qCDebug(qLcAVFAudioDecoder()) << "Invalid media. Error code:" << errorCode
+ << "Description:" << errorString;
+
+ Q_ASSERT(QThread::currentThread() == thread());
+
+ error(int(errorCode), errorString);
+
+ // TODO: may be check if decodingCondext was changed by
+ // user's action (restart) from the emitted error.
+ // We should handle it somehow (don't run stop, print warning or etc...)
+
+ stop();
+}
+
+void AVFAudioDecoder::onFinished()
+{
+ m_decodingContext.reset();
+
+ if (isDecoding())
+ finished();
+}
+
+void AVFAudioDecoder::initAssetReader()
+{
+ qCDebug(qLcAVFAudioDecoder()) << "Init asset reader";
+
+ Q_ASSERT(m_asset);
+ Q_ASSERT(QThread::currentThread() == thread());
+
+ NSArray<AVAssetTrack *> *tracks = [m_asset tracksWithMediaType:AVMediaTypeAudio];
+ if (!tracks.count) {
+ processInvalidMedia(QAudioDecoder::FormatError, tr("No audio tracks found"));
+ return;
+ }
+
+ AVAssetTrack *track = [tracks objectAtIndex:0];
+ QAudioFormat format = m_format.isValid() ? m_format : qt_format_for_audio_track(track);
+ if (!format.isValid()) {
+ processInvalidMedia(QAudioDecoder::FormatError, tr("Unsupported source format"));
+ return;
+ }
+
+ durationChanged(CMTimeGetSeconds(track.timeRange.duration) * 1000);
+
+ NSError *error = nil;
+ NSDictionary *audioSettings = av_audio_settings_for_format(format);
+
+ AVAssetReaderTrackOutput *readerOutput =
+ [[AVAssetReaderTrackOutput alloc] initWithTrack:track outputSettings:audioSettings];
+ AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:m_asset error:&error];
+ if (error) {
+ processInvalidMedia(QAudioDecoder::ResourceError, QString::fromNSString(error.localizedDescription));
+ return;
+ }
+ if (![reader canAddOutput:readerOutput]) {
+ processInvalidMedia(QAudioDecoder::ResourceError, tr("Failed to add asset reader output"));
+ return;
+ }
+
+ [reader addOutput:readerOutput];
+
+ Q_ASSERT(m_decodingContext);
+ m_decodingContext->m_reader = reader;
+ m_decodingContext->m_readerOutput = readerOutput;
+
+ startReading();
+}
+
+void AVFAudioDecoder::startReading()
+{
+ Q_ASSERT(m_decodingContext);
+ Q_ASSERT(m_decodingContext->m_reader);
+ Q_ASSERT(QThread::currentThread() == thread());
+
+ // Prepares the receiver for obtaining sample buffers from the asset.
+ if (![m_decodingContext->m_reader startReading]) {
+ processInvalidMedia(QAudioDecoder::ResourceError, tr("Could not start reading"));
+ return;
+ }
+
+ setIsDecoding(true);
+
+ std::weak_ptr<DecodingContext> weakContext = m_decodingContext;
+
+ // Since copyNextSampleBuffer is synchronous, submit it to an async dispatch queue
+ // to run in a separate thread. Call the handleNextSampleBuffer "callback" on another
+ // thread when new audio sample is read.
+ auto copyNextSampleBuffer = [=]() {
+ auto decodingContext = weakContext.lock();
+ if (!decodingContext)
+ return false;
+
+ CMSampleBufferRef sampleBuffer = [decodingContext->m_readerOutput copyNextSampleBuffer];
+ if (!sampleBuffer)
+ return false;
+
+ dispatch_async(m_decodingQueue, [=]() {
+ if (!weakContext.expired() && CMSampleBufferDataIsReady(sampleBuffer)) {
+ auto audioBuffer = handleNextSampleBuffer(sampleBuffer);
+
+ if (audioBuffer.isValid())
+ invokeWithDecodingContext(weakContext,
+ [=]() { handleNewAudioBuffer(audioBuffer); });
+ }
+
+ CFRelease(sampleBuffer);
+ });
+
+ return true;
+ };
+
+ dispatch_async(m_readingQueue, [=]() {
+ qCDebug(qLcAVFAudioDecoder()) << "start reading thread";
+
+ do {
+ // Note, waiting here doesn't ensure strong contol of the counter.
+ // However, it doesn't affect the logic: the reading flow works fine
+ // even if the counter is time-to-time more than max value
+ waitUntilBuffersCounterLessMax();
+ } while (copyNextSampleBuffer());
+
+ // TODO: check m_reader.status == AVAssetReaderStatusFailed
+ invokeWithDecodingContext(weakContext, [this]() { onFinished(); });
+ });
+}
+
+void AVFAudioDecoder::waitUntilBuffersCounterLessMax()
+{
+ if (m_buffersCounter >= MAX_BUFFERS_IN_QUEUE) {
+ // the check avoids extra mutex lock.
+
+ QMutexLocker locker(&m_buffersCounterMutex);
+
+ while (m_buffersCounter >= MAX_BUFFERS_IN_QUEUE)
+ m_buffersCounterCondition.wait(&m_buffersCounterMutex);
+ }
+}
+
+void AVFAudioDecoder::handleNewAudioBuffer(QAudioBuffer buffer)
+{
+ m_cachedBuffers.enqueue(buffer);
+ ++m_buffersCounter;
+
+ Q_ASSERT(m_cachedBuffers.size() == m_buffersCounter);
+
+ bufferAvailableChanged(true);
+ bufferReady();
+}
+
+/*
+ * The method calls the passed functor in the thread of AVFAudioDecoder and guarantees that
+ * the passed decoding context is not expired. In other words, it helps avoiding all callbacks
+ * after stopping of the decoder.
+ */
+template<typename F>
+void AVFAudioDecoder::invokeWithDecodingContext(std::weak_ptr<DecodingContext> weakContext, F &&f)
+{
+ if (!weakContext.expired())
+ QMetaObject::invokeMethod(this, [=]() {
+ // strong check: compare with actual decoding context.
+ // Otherwise, the context can be temporary locked by one of dispatch queues.
+ if (auto context = weakContext.lock(); context && context == m_decodingContext)
+ f();
+ });
+}
+
+#include "moc_avfaudiodecoder_p.cpp"
diff --git a/src/plugins/multimedia/darwin/avfaudiodecoder_p.h b/src/plugins/multimedia/darwin/avfaudiodecoder_p.h
new file mode 100644
index 000000000..81ef3f49e
--- /dev/null
+++ b/src/plugins/multimedia/darwin/avfaudiodecoder_p.h
@@ -0,0 +1,99 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFAUDIODECODER_H
+#define AVFAUDIODECODER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/private/qtmultimediaglobal_p.h>
+#include <QObject>
+#include <QtCore/qurl.h>
+#include <QWaitCondition>
+#include <QMutex>
+#include <QQueue>
+
+#include "private/qplatformaudiodecoder_p.h"
+#include "qaudiodecoder.h"
+
+#include <dispatch/dispatch.h>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(AVURLAsset);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVAssetReader);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVAssetReaderTrackOutput);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVFResourceReaderDelegate);
+
+QT_BEGIN_NAMESPACE
+
+class AVFAudioDecoder : public QPlatformAudioDecoder
+{
+ Q_OBJECT
+
+ struct DecodingContext;
+
+public:
+ AVFAudioDecoder(QAudioDecoder *parent);
+ virtual ~AVFAudioDecoder();
+
+ QUrl source() const override;
+ void setSource(const QUrl &fileName) override;
+
+ QIODevice *sourceDevice() const override;
+ void setSourceDevice(QIODevice *device) override;
+
+ void start() override;
+ void stop() override;
+
+ QAudioFormat audioFormat() const override;
+ void setAudioFormat(const QAudioFormat &format) override;
+
+ QAudioBuffer read() override;
+
+private:
+ void handleNewAudioBuffer(QAudioBuffer);
+ void startReading();
+
+ void processInvalidMedia(QAudioDecoder::Error errorCode, const QString& errorString);
+ void initAssetReader();
+ void onFinished();
+
+ void waitUntilBuffersCounterLessMax();
+
+ void decBuffersCounter(uint val);
+
+ template<typename F>
+ void invokeWithDecodingContext(std::weak_ptr<DecodingContext> weakContext, F &&f);
+
+private:
+ QUrl m_source;
+ QIODevice *m_device = nullptr;
+ QAudioFormat m_format;
+
+ // Use a separate counter instead of buffers queue size in order to
+ // ensure atomic access and also make mutex locking shorter
+ std::atomic<int> m_buffersCounter = 0;
+ QQueue<QAudioBuffer> m_cachedBuffers;
+
+ AVURLAsset *m_asset = nullptr;
+
+ AVFResourceReaderDelegate *m_readerDelegate = nullptr;
+ dispatch_queue_t m_readingQueue;
+ dispatch_queue_t m_decodingQueue;
+
+ std::shared_ptr<DecodingContext> m_decodingContext;
+ QMutex m_buffersCounterMutex;
+ QWaitCondition m_buffersCounterCondition;
+};
+
+QT_END_NAMESPACE
+
+#endif // AVFAUDIODECODER_H
diff --git a/src/plugins/multimedia/darwin/avfvideobuffer.mm b/src/plugins/multimedia/darwin/avfvideobuffer.mm
new file mode 100644
index 000000000..57ec89ae7
--- /dev/null
+++ b/src/plugins/multimedia/darwin/avfvideobuffer.mm
@@ -0,0 +1,207 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfvideobuffer_p.h"
+#include <rhi/qrhi.h>
+#include <CoreVideo/CVMetalTexture.h>
+#include <CoreVideo/CVMetalTextureCache.h>
+#include <QtGui/qopenglcontext.h>
+
+#include <private/qvideotexturehelper_p.h>
+#include "qavfhelpers_p.h"
+
+#import <AVFoundation/AVFoundation.h>
+#import <Metal/Metal.h>
+
+QT_USE_NAMESPACE
+
+AVFVideoBuffer::AVFVideoBuffer(AVFVideoSinkInterface *sink, CVImageBufferRef buffer)
+ : QHwVideoBuffer(sink->rhi() ? QVideoFrame::RhiTextureHandle : QVideoFrame::NoHandle,
+ sink->rhi()),
+ sink(sink),
+ m_buffer(buffer)
+{
+// m_type = QVideoFrame::NoHandle;
+// qDebug() << "RHI" << m_rhi;
+ CVPixelBufferRetain(m_buffer);
+ const bool rhiIsOpenGL = sink && sink->rhi() && sink->rhi()->backend() == QRhi::OpenGLES2;
+ m_format = QAVFHelpers::videoFormatForImageBuffer(m_buffer, rhiIsOpenGL);
+}
+
+AVFVideoBuffer::~AVFVideoBuffer()
+{
+ AVFVideoBuffer::unmap();
+ for (int i = 0; i < 3; ++i)
+ if (cvMetalTexture[i])
+ CFRelease(cvMetalTexture[i]);
+#if defined(Q_OS_MACOS)
+ if (cvOpenGLTexture)
+ CVOpenGLTextureRelease(cvOpenGLTexture);
+#elif defined(Q_OS_IOS)
+ if (cvOpenGLESTexture)
+ CFRelease(cvOpenGLESTexture);
+#endif
+ CVPixelBufferRelease(m_buffer);
+}
+
+AVFVideoBuffer::MapData AVFVideoBuffer::map(QtVideo::MapMode mode)
+{
+ MapData mapData;
+
+ if (m_mode == QtVideo::MapMode::NotMapped) {
+ CVPixelBufferLockBaseAddress(m_buffer, mode == QtVideo::MapMode::ReadOnly
+ ? kCVPixelBufferLock_ReadOnly
+ : 0);
+ m_mode = mode;
+ }
+
+ mapData.planeCount = CVPixelBufferGetPlaneCount(m_buffer);
+ Q_ASSERT(mapData.planeCount <= 3);
+
+ if (!mapData.planeCount) {
+ // single plane
+ mapData.bytesPerLine[0] = CVPixelBufferGetBytesPerRow(m_buffer);
+ mapData.data[0] = static_cast<uchar*>(CVPixelBufferGetBaseAddress(m_buffer));
+ mapData.dataSize[0] = CVPixelBufferGetDataSize(m_buffer);
+ mapData.planeCount = mapData.data[0] ? 1 : 0;
+ return mapData;
+ }
+
+ // For a bi-planar or tri-planar format we have to set the parameters correctly:
+ for (int i = 0; i < mapData.planeCount; ++i) {
+ mapData.bytesPerLine[i] = CVPixelBufferGetBytesPerRowOfPlane(m_buffer, i);
+ mapData.dataSize[i] = mapData.bytesPerLine[i]*CVPixelBufferGetHeightOfPlane(m_buffer, i);
+ mapData.data[i] = static_cast<uchar*>(CVPixelBufferGetBaseAddressOfPlane(m_buffer, i));
+ }
+
+ return mapData;
+}
+
+void AVFVideoBuffer::unmap()
+{
+ if (m_mode != QtVideo::MapMode::NotMapped) {
+ CVPixelBufferUnlockBaseAddress(m_buffer, m_mode == QtVideo::MapMode::ReadOnly
+ ? kCVPixelBufferLock_ReadOnly
+ : 0);
+ m_mode = QtVideo::MapMode::NotMapped;
+ }
+}
+
+static MTLPixelFormat rhiTextureFormatToMetalFormat(QRhiTexture::Format f)
+{
+ switch (f) {
+ default:
+ case QRhiTexture::UnknownFormat:
+ return MTLPixelFormatInvalid;
+ case QRhiTexture::RGBA8:
+ return MTLPixelFormatRGBA8Unorm;
+ case QRhiTexture::BGRA8:
+ return MTLPixelFormatBGRA8Unorm;
+ case QRhiTexture::R8:
+ return MTLPixelFormatR8Unorm;
+ case QRhiTexture::RG8:
+ return MTLPixelFormatRG8Unorm;
+ case QRhiTexture::R16:
+ return MTLPixelFormatR16Unorm;
+ case QRhiTexture::RG16:
+ return MTLPixelFormatRG16Unorm;
+
+ case QRhiTexture::RGBA16F:
+ return MTLPixelFormatRGBA16Float;
+ case QRhiTexture::RGBA32F:
+ return MTLPixelFormatRGBA32Float;
+ case QRhiTexture::R16F:
+ return MTLPixelFormatR16Float;
+ case QRhiTexture::R32F:
+ return MTLPixelFormatR32Float;
+ }
+}
+
+
+quint64 AVFVideoBuffer::textureHandle(QRhi *, int plane) const
+{
+ auto *textureDescription = QVideoTextureHelper::textureDescription(m_format.pixelFormat());
+ int bufferPlanes = CVPixelBufferGetPlaneCount(m_buffer);
+// qDebug() << "texture handle" << plane << m_rhi << (m_rhi->backend() == QRhi::Metal) << bufferPlanes;
+ if (plane > 0 && plane >= bufferPlanes)
+ return 0;
+ if (!m_rhi)
+ return 0;
+ if (m_rhi->backend() == QRhi::Metal) {
+ if (!cvMetalTexture[plane]) {
+ size_t width = CVPixelBufferGetWidth(m_buffer);
+ size_t height = CVPixelBufferGetHeight(m_buffer);
+ width = textureDescription->widthForPlane(width, plane);
+ height = textureDescription->heightForPlane(height, plane);
+
+ // Create a CoreVideo pixel buffer backed Metal texture image from the texture cache.
+ QMutexLocker locker(sink->textureCacheMutex());
+ if (!metalCache && sink->cvMetalTextureCache)
+ metalCache = CVMetalTextureCacheRef(CFRetain(sink->cvMetalTextureCache));
+ if (!metalCache) {
+ qWarning("cannot create texture, Metal texture cache was released?");
+ return {};
+ }
+ auto ret = CVMetalTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault,
+ metalCache,
+ m_buffer, nil,
+ rhiTextureFormatToMetalFormat(textureDescription->textureFormat[plane]),
+ width, height,
+ plane,
+ &cvMetalTexture[plane]);
+
+ if (ret != kCVReturnSuccess)
+ qWarning() << "texture creation failed" << ret;
+// auto t = CVMetalTextureGetTexture(cvMetalTexture[plane]);
+// qDebug() << " metal texture is" << quint64(cvMetalTexture[plane]) << width << height;
+// qDebug() << " " << t.iosurfacePlane << t.pixelFormat << t.width << t.height;
+ }
+
+ // Get a Metal texture using the CoreVideo Metal texture reference.
+// qDebug() << " -> " << quint64(CVMetalTextureGetTexture(cvMetalTexture[plane]));
+ return cvMetalTexture[plane] ? quint64(CVMetalTextureGetTexture(cvMetalTexture[plane])) : 0;
+ } else if (m_rhi->backend() == QRhi::OpenGLES2) {
+#if QT_CONFIG(opengl)
+#ifdef Q_OS_MACOS
+ CVOpenGLTextureCacheFlush(sink->cvOpenGLTextureCache, 0);
+ // Create a CVPixelBuffer-backed OpenGL texture image from the texture cache.
+ const CVReturn cvret = CVOpenGLTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault,
+ sink->cvOpenGLTextureCache,
+ m_buffer,
+ nil,
+ &cvOpenGLTexture);
+ if (cvret != kCVReturnSuccess)
+ qWarning() << "OpenGL texture creation failed" << cvret;
+
+ Q_ASSERT(CVOpenGLTextureGetTarget(cvOpenGLTexture) == GL_TEXTURE_RECTANGLE);
+ // Get an OpenGL texture name from the CVPixelBuffer-backed OpenGL texture image.
+ return CVOpenGLTextureGetName(cvOpenGLTexture);
+#endif
+#ifdef Q_OS_IOS
+ CVOpenGLESTextureCacheFlush(sink->cvOpenGLESTextureCache, 0);
+ // Create a CVPixelBuffer-backed OpenGL texture image from the texture cache.
+ const CVReturn cvret = CVOpenGLESTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault,
+ sink->cvOpenGLESTextureCache,
+ m_buffer,
+ nil,
+ GL_TEXTURE_2D,
+ GL_RGBA,
+ CVPixelBufferGetWidth(m_buffer),
+ CVPixelBufferGetHeight(m_buffer),
+ GL_RGBA,
+ GL_UNSIGNED_BYTE,
+ 0,
+ &cvOpenGLESTexture);
+ if (cvret != kCVReturnSuccess)
+ qWarning() << "OpenGL ES texture creation failed" << cvret;
+
+ // Get an OpenGL texture name from the CVPixelBuffer-backed OpenGL texture image.
+ return CVOpenGLESTextureGetName(cvOpenGLESTexture);
+#endif
+#endif
+ }
+ return 0;
+}
diff --git a/src/plugins/multimedia/darwin/avfvideobuffer_p.h b/src/plugins/multimedia/darwin/avfvideobuffer_p.h
new file mode 100644
index 000000000..f70961c15
--- /dev/null
+++ b/src/plugins/multimedia/darwin/avfvideobuffer_p.h
@@ -0,0 +1,64 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFVIDEOBUFFER_H
+#define AVFVIDEOBUFFER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qhwvideobuffer_p.h>
+#include <private/qcore_mac_p.h>
+
+#include <QtCore/qobject.h>
+#include <QtCore/qmutex.h>
+#include <avfvideosink_p.h>
+
+#include <CoreVideo/CVImageBuffer.h>
+
+#import "Metal/Metal.h"
+#import "MetalKit/MetalKit.h"
+
+QT_BEGIN_NAMESPACE
+
+struct AVFMetalTexture;
+class AVFVideoBuffer : public QHwVideoBuffer
+{
+public:
+ AVFVideoBuffer(AVFVideoSinkInterface *sink, CVImageBufferRef buffer);
+ ~AVFVideoBuffer();
+
+ MapData map(QtVideo::MapMode mode);
+ void unmap();
+
+ virtual quint64 textureHandle(QRhi *, int plane) const;
+
+ QVideoFrameFormat videoFormat() const { return m_format; }
+
+private:
+ AVFVideoSinkInterface *sink = nullptr;
+
+ mutable CVMetalTextureRef cvMetalTexture[3] = {};
+ mutable QCFType<CVMetalTextureCacheRef> metalCache;
+#if defined(Q_OS_MACOS)
+ mutable CVOpenGLTextureRef cvOpenGLTexture = nullptr;
+#elif defined(Q_OS_IOS)
+ mutable CVOpenGLESTextureRef cvOpenGLESTexture = nullptr;
+#endif
+
+ CVImageBufferRef m_buffer = nullptr;
+ QtVideo::MapMode m_mode = QtVideo::MapMode::NotMapped;
+ QVideoFrameFormat m_format;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/avfvideosink.mm b/src/plugins/multimedia/darwin/avfvideosink.mm
new file mode 100644
index 000000000..f4c8bdb2e
--- /dev/null
+++ b/src/plugins/multimedia/darwin/avfvideosink.mm
@@ -0,0 +1,228 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfvideosink_p.h"
+
+#include <rhi/qrhi.h>
+#include <QtGui/qopenglcontext.h>
+
+#include <AVFoundation/AVFoundation.h>
+#import <QuartzCore/CATransaction.h>
+
+#if __has_include(<AppKit/AppKit.h>)
+#include <AppKit/AppKit.h>
+#endif
+
+#if __has_include(<UIKit/UIKit.h>)
+#include <UIKit/UIKit.h>
+#endif
+
+QT_USE_NAMESPACE
+
+AVFVideoSink::AVFVideoSink(QVideoSink *parent)
+ : QPlatformVideoSink(parent)
+{
+}
+
+AVFVideoSink::~AVFVideoSink()
+{
+}
+
+void AVFVideoSink::setRhi(QRhi *rhi)
+{
+ if (m_rhi == rhi)
+ return;
+ m_rhi = rhi;
+ if (m_interface)
+ m_interface->setRhi(rhi);
+}
+
+void AVFVideoSink::setNativeSize(QSize size)
+{
+ if (size == nativeSize())
+ return;
+ QPlatformVideoSink::setNativeSize(size);
+ if (m_interface)
+ m_interface->nativeSizeChanged();
+}
+
+void AVFVideoSink::setVideoSinkInterface(AVFVideoSinkInterface *interface)
+{
+ m_interface = interface;
+ if (m_interface)
+ m_interface->setRhi(m_rhi);
+}
+
+AVFVideoSinkInterface::~AVFVideoSinkInterface()
+{
+ if (m_layer)
+ [m_layer release];
+ if (m_outputSettings)
+ [m_outputSettings release];
+ freeTextureCaches();
+}
+
+void AVFVideoSinkInterface::freeTextureCaches()
+{
+ if (cvMetalTextureCache)
+ CFRelease(cvMetalTextureCache);
+ cvMetalTextureCache = nullptr;
+#if defined(Q_OS_MACOS)
+ if (cvOpenGLTextureCache)
+ CFRelease(cvOpenGLTextureCache);
+ cvOpenGLTextureCache = nullptr;
+#elif defined(Q_OS_IOS)
+ if (cvOpenGLESTextureCache)
+ CFRelease(cvOpenGLESTextureCache);
+ cvOpenGLESTextureCache = nullptr;
+#endif
+}
+
+void AVFVideoSinkInterface::setVideoSink(AVFVideoSink *sink)
+{
+ if (sink == m_sink)
+ return;
+
+ if (m_sink)
+ m_sink->setVideoSinkInterface(nullptr);
+
+ m_sink = sink;
+
+ if (m_sink) {
+ m_sink->setVideoSinkInterface(this);
+ reconfigure();
+ }
+}
+
+void AVFVideoSinkInterface::setRhi(QRhi *rhi)
+{
+ QMutexLocker locker(&m_textureCacheMutex);
+ if (m_rhi == rhi)
+ return;
+ freeTextureCaches();
+ m_rhi = rhi;
+
+ if (!rhi)
+ return;
+ if (rhi->backend() == QRhi::Metal) {
+ const auto *metal = static_cast<const QRhiMetalNativeHandles *>(rhi->nativeHandles());
+
+ // Create a Metal Core Video texture cache from the pixel buffer.
+ Q_ASSERT(!cvMetalTextureCache);
+ if (CVMetalTextureCacheCreate(
+ kCFAllocatorDefault,
+ nil,
+ (id<MTLDevice>)metal->dev,
+ nil,
+ &cvMetalTextureCache) != kCVReturnSuccess) {
+ qWarning() << "Metal texture cache creation failed";
+ m_rhi = nullptr;
+ }
+ } else if (rhi->backend() == QRhi::OpenGLES2) {
+#if QT_CONFIG(opengl)
+#ifdef Q_OS_MACOS
+ const auto *gl = static_cast<const QRhiGles2NativeHandles *>(rhi->nativeHandles());
+
+ auto nsGLContext = gl->context->nativeInterface<QNativeInterface::QCocoaGLContext>()->nativeContext();
+ auto nsGLPixelFormat = nsGLContext.pixelFormat.CGLPixelFormatObj;
+
+ // Create an OpenGL CoreVideo texture cache from the pixel buffer.
+ if (CVOpenGLTextureCacheCreate(
+ kCFAllocatorDefault,
+ nullptr,
+ reinterpret_cast<CGLContextObj>(nsGLContext.CGLContextObj),
+ nsGLPixelFormat,
+ nil,
+ &cvOpenGLTextureCache)) {
+ qWarning() << "OpenGL texture cache creation failed";
+ m_rhi = nullptr;
+ }
+#endif
+#ifdef Q_OS_IOS
+ // Create an OpenGL CoreVideo texture cache from the pixel buffer.
+ if (CVOpenGLESTextureCacheCreate(
+ kCFAllocatorDefault,
+ nullptr,
+ [EAGLContext currentContext],
+ nullptr,
+ &cvOpenGLESTextureCache)) {
+ qWarning() << "OpenGL texture cache creation failed";
+ m_rhi = nullptr;
+ }
+#endif
+#else
+ m_rhi = nullptr;
+#endif // QT_CONFIG(opengl)
+ }
+ setOutputSettings();
+}
+
+void AVFVideoSinkInterface::setLayer(CALayer *layer)
+{
+ if (layer == m_layer)
+ return;
+
+ if (m_layer)
+ [m_layer release];
+
+ m_layer = layer;
+ if (m_layer)
+ [m_layer retain];
+
+ reconfigure();
+}
+
+void AVFVideoSinkInterface::setOutputSettings()
+{
+ if (m_outputSettings)
+ [m_outputSettings release];
+ m_outputSettings = nil;
+
+ // Set pixel format
+ NSDictionary *dictionary = nil;
+ if (m_rhi && m_rhi->backend() == QRhi::OpenGLES2) {
+#if QT_CONFIG(opengl)
+ dictionary = @{(NSString *)kCVPixelBufferPixelFormatTypeKey:
+ @(kCVPixelFormatType_32BGRA)
+#ifndef Q_OS_IOS // On iOS this key generates a warning about unsupported key.
+ , (NSString *)kCVPixelBufferOpenGLCompatibilityKey: @true
+#endif // Q_OS_IOS
+ };
+#endif
+ } else {
+ dictionary = @{(NSString *)kCVPixelBufferPixelFormatTypeKey:
+ @[
+ @(kCVPixelFormatType_32BGRA),
+ @(kCVPixelFormatType_32RGBA),
+ @(kCVPixelFormatType_422YpCbCr8),
+ @(kCVPixelFormatType_422YpCbCr8_yuvs),
+ @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange),
+ @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
+ @(kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange),
+ @(kCVPixelFormatType_420YpCbCr10BiPlanarFullRange),
+ @(kCVPixelFormatType_OneComponent8),
+ @(kCVPixelFormatType_OneComponent16),
+ @(kCVPixelFormatType_420YpCbCr8Planar),
+ @(kCVPixelFormatType_420YpCbCr8PlanarFullRange)
+ ]
+#ifndef Q_OS_IOS // This key is not supported and generates a warning.
+ , (NSString *)kCVPixelBufferMetalCompatibilityKey: @true
+#endif // Q_OS_IOS
+ };
+ }
+
+ m_outputSettings = [[NSDictionary alloc] initWithDictionary:dictionary];
+}
+
+void AVFVideoSinkInterface::updateLayerBounds()
+{
+ if (!m_layer)
+ return;
+ [CATransaction begin];
+ [CATransaction setDisableActions: YES]; // disable animation/flicks
+ m_layer.frame = QRectF(0, 0, nativeSize().width(), nativeSize().height()).toCGRect();
+ m_layer.bounds = m_layer.frame;
+ [CATransaction commit];
+}
+
+#include "moc_avfvideosink_p.cpp"
diff --git a/src/plugins/multimedia/darwin/avfvideosink_p.h b/src/plugins/multimedia/darwin/avfvideosink_p.h
new file mode 100644
index 000000000..9b66e79f2
--- /dev/null
+++ b/src/plugins/multimedia/darwin/avfvideosink_p.h
@@ -0,0 +1,99 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFVIDEOWINDOWCONTROL_H
+#define AVFVIDEOWINDOWCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/QMutex>
+#include "private/qplatformvideosink_p.h"
+
+Q_FORWARD_DECLARE_OBJC_CLASS(CALayer);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVPlayerLayer);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureVideoPreviewLayer);
+
+#include <CoreVideo/CVBase.h>
+#include <CoreVideo/CVPixelBuffer.h>
+#include <CoreVideo/CVImageBuffer.h>
+
+#import "Metal/Metal.h"
+#import "MetalKit/MetalKit.h"
+
+QT_BEGIN_NAMESPACE
+
+class AVFVideoSinkInterface;
+
+class AVFVideoSink : public QPlatformVideoSink
+{
+ Q_OBJECT
+
+public:
+ AVFVideoSink(QVideoSink *parent = nullptr);
+ virtual ~AVFVideoSink();
+
+ // QPlatformVideoSink interface
+public:
+ void setRhi(QRhi *rhi) override;
+
+ void setNativeSize(QSize size);
+
+ void setVideoSinkInterface(AVFVideoSinkInterface *interface);
+
+private:
+ AVFVideoSinkInterface *m_interface = nullptr;
+ QRhi *m_rhi = nullptr;
+};
+
+class AVFVideoSinkInterface
+{
+public:
+ ~AVFVideoSinkInterface();
+
+ void setVideoSink(AVFVideoSink *sink);
+
+
+ virtual void reconfigure() = 0;
+ virtual void setRhi(QRhi *);
+ virtual void setLayer(CALayer *layer);
+ virtual void setOutputSettings();
+
+ QMutex *textureCacheMutex() { return &m_textureCacheMutex; }
+
+ QRhi *rhi() const { return m_rhi; }
+
+ void updateLayerBounds();
+ void nativeSizeChanged() { updateLayerBounds(); }
+ QSize nativeSize() const { return m_sink ? m_sink->nativeSize() : QSize(); }
+
+ CVMetalTextureCacheRef cvMetalTextureCache = nullptr;
+#if defined(Q_OS_MACOS)
+ CVOpenGLTextureCacheRef cvOpenGLTextureCache = nullptr;
+#elif defined(Q_OS_IOS)
+ CVOpenGLESTextureCacheRef cvOpenGLESTextureCache = nullptr;
+#endif
+private:
+ void freeTextureCaches();
+
+protected:
+
+ AVFVideoSink *m_sink = nullptr;
+ QRhi *m_rhi = nullptr;
+ CALayer *m_layer = nullptr;
+ NSDictionary *m_outputSettings = nullptr;
+ QMutex m_textureCacheMutex;
+};
+
+
+QT_END_NAMESPACE
+
+#endif // AVFVIDEOWINDOWCONTROL_H
diff --git a/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate.mm b/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate.mm
new file mode 100644
index 000000000..1b2d4b15d
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate.mm
@@ -0,0 +1,98 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcamerasession_p.h"
+#include "avfaudiopreviewdelegate_p.h"
+
+QT_USE_NAMESPACE
+
+@implementation AVFAudioPreviewDelegate
+{
+@private
+ AVSampleBufferAudioRenderer *m_audioRenderer;
+ AVFCameraSession *m_session;
+ AVSampleBufferRenderSynchronizer *m_audioBufferSynchronizer;
+ dispatch_queue_t m_audioPreviewQueue;
+}
+
+- (id)init
+{
+ if (self = [super init]) {
+ m_session = nil;
+ m_audioBufferSynchronizer = [[AVSampleBufferRenderSynchronizer alloc] init];
+ m_audioRenderer = [[AVSampleBufferAudioRenderer alloc] init];
+ [m_audioBufferSynchronizer addRenderer:m_audioRenderer];
+ return self;
+ }
+ return nil;
+}
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection
+{
+ Q_UNUSED(connection);
+ Q_ASSERT(m_session);
+
+ if (!CMSampleBufferDataIsReady(sampleBuffer)) {
+ qWarning() << Q_FUNC_INFO << "sample buffer is not ready, skipping.";
+ return;
+ }
+
+ CFRetain(sampleBuffer);
+
+ dispatch_async(m_audioPreviewQueue, ^{
+ [self renderAudioSampleBuffer:sampleBuffer];
+ CFRelease(sampleBuffer);
+ });
+}
+
+- (void)renderAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer
+{
+ Q_ASSERT(sampleBuffer);
+ Q_ASSERT(m_session);
+
+ if (m_audioBufferSynchronizer && m_audioRenderer) {
+ [m_audioRenderer enqueueSampleBuffer:sampleBuffer];
+ if (m_audioBufferSynchronizer.rate == 0)
+ [m_audioBufferSynchronizer setRate:1 time:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
+ }
+}
+
+- (void)resetAudioPreviewDelegate
+{
+ [m_session->audioOutput() setSampleBufferDelegate:self queue:m_audioPreviewQueue];
+}
+
+- (void)setupWithCaptureSession: (AVFCameraSession*)session
+ audioOutputDevice: (NSString*)deviceId
+{
+ m_session = session;
+
+ m_audioPreviewQueue = dispatch_queue_create("audio-preview-queue", nullptr);
+ [m_session->audioOutput() setSampleBufferDelegate:self queue:m_audioPreviewQueue];
+#ifdef Q_OS_MACOS
+ m_audioRenderer.audioOutputDeviceUniqueID = deviceId;
+#endif
+}
+
+- (void)setVolume: (float)volume
+{
+ m_audioRenderer.volume = volume;
+}
+
+- (void)setMuted: (bool)muted
+{
+ m_audioRenderer.muted = muted;
+}
+
+-(void)dealloc {
+ m_session = nil;
+ [m_audioRenderer release];
+ [m_audioBufferSynchronizer release];
+ dispatch_release(m_audioPreviewQueue);
+
+ [super dealloc];
+}
+
+@end
diff --git a/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate_p.h b/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate_p.h
new file mode 100644
index 000000000..8fa06ef39
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate_p.h
@@ -0,0 +1,40 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFAUDIOPREVIEWDELEGATE_P_H
+#define AVFAUDIOPREVIEWDELEGATE_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qglobal.h>
+
+#include <AVFoundation/AVFoundation.h>
+
+QT_BEGIN_NAMESPACE
+
+class AVFCameraSession;
+
+QT_END_NAMESPACE
+
+@interface AVFAudioPreviewDelegate : NSObject<AVCaptureAudioDataOutputSampleBufferDelegate>
+
+- (id)init;
+- (void)setupWithCaptureSession: (AVFCameraSession*)session
+ audioOutputDevice: (NSString*)deviceId;
+- (void)renderAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
+- (void)resetAudioPreviewDelegate;
+- (void)setVolume: (float)volume;
+- (void)setMuted: (bool)muted;
+
+@end
+
+#endif // AVFAUDIOPREVIEWDELEGATE_P_H
diff --git a/src/plugins/multimedia/darwin/camera/avfcamera.mm b/src/plugins/multimedia/darwin/camera/avfcamera.mm
new file mode 100644
index 000000000..05cdbae17
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamera.mm
@@ -0,0 +1,89 @@
+// Copyright (C) 2022 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcameradebug_p.h"
+#include "avfcamera_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcamerautility_p.h"
+#include "avfcamerarenderer_p.h"
+#include <qmediacapturesession.h>
+
+QT_USE_NAMESPACE
+
+AVFCamera::AVFCamera(QCamera *camera)
+ : QAVFCameraBase(camera)
+{
+ Q_ASSERT(camera);
+}
+
+AVFCamera::~AVFCamera()
+{
+}
+
+void AVFCamera::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+ if (m_cameraDevice.isNull() && active)
+ return;
+
+ m_active = active;
+ if (m_session)
+ m_session->setActive(active);
+
+ if (active)
+ updateCameraConfiguration();
+ Q_EMIT activeChanged(m_active);
+}
+
+void AVFCamera::setCamera(const QCameraDevice &camera)
+{
+ if (m_cameraDevice == camera)
+ return;
+ m_cameraDevice = camera;
+ if (m_session)
+ m_session->setActiveCamera(camera);
+ setCameraFormat({});
+}
+
+bool AVFCamera::setCameraFormat(const QCameraFormat &format)
+{
+ if (!format.isNull() && !m_cameraDevice.videoFormats().contains(format))
+ return false;
+
+ m_cameraFormat = format.isNull() ? findBestCameraFormat(m_cameraDevice) : format;
+
+ if (m_session)
+ m_session->setCameraFormat(m_cameraFormat);
+
+ return true;
+}
+
+void AVFCamera::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ AVFCameraService *captureSession = static_cast<AVFCameraService *>(session);
+ if (m_service == captureSession)
+ return;
+
+ if (m_session) {
+ m_session->disconnect(this);
+ m_session->setActiveCamera({});
+ m_session->setCameraFormat({});
+ }
+
+ m_service = captureSession;
+ if (!m_service) {
+ m_session = nullptr;
+ return;
+ }
+
+ m_session = m_service->session();
+ Q_ASSERT(m_session);
+
+ m_session->setActiveCamera(m_cameraDevice);
+ m_session->setCameraFormat(m_cameraFormat);
+ m_session->setActive(m_active);
+}
+
+#include "moc_avfcamera_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/avfcamera_p.h b/src/plugins/multimedia/darwin/camera/avfcamera_p.h
new file mode 100644
index 000000000..3c3e6da09
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamera_p.h
@@ -0,0 +1,48 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERA_H
+#define AVFCAMERA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qavfcamerabase_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class AVFCameraSession;
+class AVFCameraService;
+class AVFCameraSession;
+
+class AVFCamera : public QAVFCameraBase
+{
+Q_OBJECT
+public:
+ AVFCamera(QCamera *camera);
+ ~AVFCamera();
+
+ void setActive(bool activce) override;
+
+ void setCamera(const QCameraDevice &camera) override;
+ bool setCameraFormat(const QCameraFormat &format) override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *) override;
+
+private:
+ friend class AVFCameraSession;
+ AVFCameraService *m_service = nullptr;
+ AVFCameraSession *m_session = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfcameradebug_p.h b/src/plugins/multimedia/darwin/camera/avfcameradebug_p.h
new file mode 100644
index 000000000..f93c85142
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcameradebug_p.h
@@ -0,0 +1,26 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFDEBUG_H
+#define AVFDEBUG_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qtmultimediaglobal.h"
+
+#include <QtCore/qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+Q_DECLARE_LOGGING_CATEGORY(qLcCamera)
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm b/src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm
new file mode 100644
index 000000000..0c9ab3f2c
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm
@@ -0,0 +1,292 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qabstractvideobuffer.h"
+#include "private/qcameradevice_p.h"
+#include "private/qvideoframe_p.h"
+#include "avfcamerarenderer_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcameradebug_p.h"
+#include "avfcamera_p.h"
+#include <avfvideosink_p.h>
+#include <avfvideobuffer_p.h>
+#include "qvideosink.h"
+#include "qavfhelpers_p.h"
+
+#include <rhi/qrhi.h>
+
+#import <AVFoundation/AVFoundation.h>
+
+#ifdef Q_OS_IOS
+#include <QtGui/qopengl.h>
+#endif
+
+#include <QtMultimedia/qvideoframeformat.h>
+
+QT_USE_NAMESPACE
+
+@interface AVFCaptureFramesDelegate : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
+
+- (AVFCaptureFramesDelegate *) initWithRenderer:(AVFCameraRenderer*)renderer;
+
+- (void) captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection;
+
+@end
+
+@implementation AVFCaptureFramesDelegate
+{
+@private
+ AVFCameraRenderer *m_renderer;
+}
+
+- (AVFCaptureFramesDelegate *) initWithRenderer:(AVFCameraRenderer*)renderer
+{
+ if (!(self = [super init]))
+ return nil;
+
+ self->m_renderer = renderer;
+ return self;
+}
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection
+{
+ Q_UNUSED(connection);
+ Q_UNUSED(captureOutput);
+
+ // NB: on iOS captureOutput/connection can be nil (when recording a video -
+ // avfmediaassetwriter).
+
+ CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ auto buffer = std::make_unique<AVFVideoBuffer>(m_renderer, imageBuffer);
+ auto format = buffer->videoFormat();
+ if (!format.isValid()) {
+ return;
+ }
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(buffer), format);
+ m_renderer->syncHandleViewfinderFrame(frame);
+}
+
+@end
+
+AVFCameraRenderer::AVFCameraRenderer(QObject *parent)
+ : QObject(parent)
+{
+ m_viewfinderFramesDelegate = [[AVFCaptureFramesDelegate alloc] initWithRenderer:this];
+ connect(&m_orientationHandler, &QVideoOutputOrientationHandler::orientationChanged,
+ this, &AVFCameraRenderer::deviceOrientationChanged);
+}
+
+AVFCameraRenderer::~AVFCameraRenderer()
+{
+ [m_cameraSession->captureSession() removeOutput:m_videoDataOutput];
+ [m_viewfinderFramesDelegate release];
+ [m_videoDataOutput release];
+
+ if (m_delegateQueue)
+ dispatch_release(m_delegateQueue);
+#ifdef Q_OS_IOS
+ if (m_textureCache)
+ CFRelease(m_textureCache);
+#endif
+}
+
+void AVFCameraRenderer::reconfigure()
+{
+ QMutexLocker lock(&m_vfMutex);
+
+ // ### This is a hack, need to use a reliable way to determine the size and not use the preview layer
+ if (m_layer)
+ m_sink->setNativeSize(QSize(m_layer.bounds.size.width, m_layer.bounds.size.height));
+ nativeSizeChanged();
+ deviceOrientationChanged();
+}
+
+void AVFCameraRenderer::setOutputSettings()
+{
+ if (!m_videoDataOutput)
+ return;
+
+ if (m_cameraSession) {
+ const auto format = m_cameraSession->cameraFormat();
+ if (format.pixelFormat() != QVideoFrameFormat::Format_Invalid)
+ setPixelFormat(format.pixelFormat(), QCameraFormatPrivate::getColorRange(format));
+ }
+
+ // If no output settings set from above,
+ // it's most likely because the rhi is OpenGL
+ // and the pixel format is not BGRA.
+ // We force this in the base class implementation
+ if (!m_outputSettings)
+ AVFVideoSinkInterface::setOutputSettings();
+
+ if (m_outputSettings)
+ m_videoDataOutput.videoSettings = m_outputSettings;
+}
+
+void AVFCameraRenderer::configureAVCaptureSession(AVFCameraSession *cameraSession)
+{
+ m_cameraSession = cameraSession;
+ connect(m_cameraSession, SIGNAL(readyToConfigureConnections()),
+ this, SLOT(updateCaptureConnection()));
+
+ m_needsHorizontalMirroring = false;
+
+ m_videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
+
+ // Configure video output
+ m_delegateQueue = dispatch_queue_create("vf_queue", nullptr);
+ [m_videoDataOutput
+ setSampleBufferDelegate:m_viewfinderFramesDelegate
+ queue:m_delegateQueue];
+
+ [m_cameraSession->captureSession() addOutput:m_videoDataOutput];
+}
+
+void AVFCameraRenderer::updateCaptureConnection()
+{
+ AVCaptureConnection *connection = [m_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
+ if (connection == nil || !m_cameraSession->videoCaptureDevice())
+ return;
+
+ // Frames of front-facing cameras should be mirrored horizontally (it's the default when using
+ // AVCaptureVideoPreviewLayer but not with AVCaptureVideoDataOutput)
+ if (connection.isVideoMirroringSupported)
+ connection.videoMirrored = m_cameraSession->videoCaptureDevice().position == AVCaptureDevicePositionFront;
+
+ // If the connection does't support mirroring, we'll have to do it ourselves
+ m_needsHorizontalMirroring = !connection.isVideoMirrored
+ && m_cameraSession->videoCaptureDevice().position == AVCaptureDevicePositionFront;
+
+ deviceOrientationChanged();
+}
+
+void AVFCameraRenderer::deviceOrientationChanged(int angle)
+{
+ AVCaptureConnection *connection = [m_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
+ if (connection == nil || !m_cameraSession->videoCaptureDevice())
+ return;
+
+ if (!connection.supportsVideoOrientation)
+ return;
+
+ if (angle < 0)
+ angle = m_orientationHandler.currentOrientation();
+
+ AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
+ switch (angle) {
+ default:
+ break;
+ case 90:
+ orientation = AVCaptureVideoOrientationLandscapeRight;
+ break;
+ case 180:
+ // this keeps the last orientation, don't do anything
+ return;
+ case 270:
+ orientation = AVCaptureVideoOrientationLandscapeLeft;
+ break;
+ }
+
+ connection.videoOrientation = orientation;
+}
+
+//can be called from non main thread
+void AVFCameraRenderer::syncHandleViewfinderFrame(const QVideoFrame &frame)
+{
+ Q_EMIT newViewfinderFrame(frame);
+
+ QMutexLocker lock(&m_vfMutex);
+
+ if (!m_lastViewfinderFrame.isValid()) {
+ static QMetaMethod handleViewfinderFrameSlot = metaObject()->method(
+ metaObject()->indexOfMethod("handleViewfinderFrame()"));
+
+ handleViewfinderFrameSlot.invoke(this, Qt::QueuedConnection);
+ }
+
+ m_lastViewfinderFrame = frame;
+}
+
+AVCaptureVideoDataOutput *AVFCameraRenderer::videoDataOutput() const
+{
+ return m_videoDataOutput;
+}
+
+AVFCaptureFramesDelegate *AVFCameraRenderer::captureDelegate() const
+{
+ return m_viewfinderFramesDelegate;
+}
+
+void AVFCameraRenderer::resetCaptureDelegate() const
+{
+ [m_videoDataOutput setSampleBufferDelegate:m_viewfinderFramesDelegate queue:m_delegateQueue];
+}
+
+void AVFCameraRenderer::handleViewfinderFrame()
+{
+ QVideoFrame frame;
+ {
+ QMutexLocker lock(&m_vfMutex);
+ frame = m_lastViewfinderFrame;
+ m_lastViewfinderFrame = QVideoFrame();
+ }
+
+ if (m_sink && frame.isValid()) {
+ // frame.setMirroed(m_needsHorizontalMirroring) ?
+ m_sink->setVideoFrame(frame);
+ }
+}
+
+void AVFCameraRenderer::setPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat,
+ QVideoFrameFormat::ColorRange colorRange)
+{
+ if (rhi() && rhi()->backend() == QRhi::OpenGLES2) {
+ if (pixelFormat != QVideoFrameFormat::Format_BGRA8888)
+ qWarning() << "OpenGL rhi backend only supports 32BGRA pixel format.";
+ return;
+ }
+
+ // Default to 32BGRA pixel formats on the viewfinder, in case the requested
+ // format can't be used (shouldn't happen unless the developers sets a wrong camera
+ // format on the camera).
+ auto cvPixelFormat = QAVFHelpers::toCVPixelFormat(pixelFormat, colorRange);
+ if (cvPixelFormat == CvPixelFormatInvalid) {
+ cvPixelFormat = kCVPixelFormatType_32BGRA;
+ qWarning() << "QCamera::setCameraFormat: couldn't convert requested pixel format, using ARGB32";
+ }
+
+ bool isSupported = false;
+ NSArray *supportedPixelFormats = m_videoDataOutput.availableVideoCVPixelFormatTypes;
+ for (NSNumber *currentPixelFormat in supportedPixelFormats)
+ {
+ if ([currentPixelFormat unsignedIntValue] == cvPixelFormat) {
+ isSupported = true;
+ break;
+ }
+ }
+
+ if (isSupported) {
+ NSDictionary *outputSettings = @{
+ (NSString *)
+ kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:cvPixelFormat]
+#ifndef Q_OS_IOS // On iOS this key generates a warning about 'unsupported key'.
+ ,
+ (NSString *)kCVPixelBufferMetalCompatibilityKey : @true
+#endif // Q_OS_IOS
+ };
+ if (m_outputSettings)
+ [m_outputSettings release];
+ m_outputSettings = [[NSDictionary alloc] initWithDictionary:outputSettings];
+ } else {
+ qWarning() << "QCamera::setCameraFormat: requested pixel format not supported. Did you use a camera format from another camera?";
+ }
+}
+
+#include "moc_avfcamerarenderer_p.cpp"
+
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerarenderer_p.h b/src/plugins/multimedia/darwin/camera/avfcamerarenderer_p.h
new file mode 100644
index 000000000..57f665cd6
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerarenderer_p.h
@@ -0,0 +1,95 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERARENDERER_H
+#define AVFCAMERARENDERER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qobject.h>
+#include <QtMultimedia/qvideoframe.h>
+#include <QtCore/qmutex.h>
+#include <avfvideosink_p.h>
+#include <private/qvideooutputorientationhandler_p.h>
+
+#include <CoreVideo/CVBase.h>
+#include <CoreVideo/CVPixelBuffer.h>
+#include <CoreVideo/CVImageBuffer.h>
+#ifdef Q_OS_IOS
+#include <CoreVideo/CVOpenGLESTexture.h>
+#include <CoreVideo/CVOpenGLESTextureCache.h>
+#endif
+
+#include <dispatch/dispatch.h>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(AVFCaptureFramesDelegate);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureVideoDataOutput);
+
+QT_BEGIN_NAMESPACE
+
+class AVFCameraSession;
+class AVFCameraService;
+class AVFCameraRenderer;
+class AVFVideoSink;
+
+class AVFCameraRenderer : public QObject, public AVFVideoSinkInterface
+{
+Q_OBJECT
+public:
+ AVFCameraRenderer(QObject *parent = nullptr);
+ ~AVFCameraRenderer();
+
+ void reconfigure() override;
+ void setOutputSettings() override;
+
+ void configureAVCaptureSession(AVFCameraSession *cameraSession);
+ void syncHandleViewfinderFrame(const QVideoFrame &frame);
+
+ AVCaptureVideoDataOutput *videoDataOutput() const;
+
+ AVFCaptureFramesDelegate *captureDelegate() const;
+ void resetCaptureDelegate() const;
+
+ void setPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat,
+ QVideoFrameFormat::ColorRange colorRange);
+
+Q_SIGNALS:
+ void newViewfinderFrame(const QVideoFrame &frame);
+
+private Q_SLOTS:
+ void handleViewfinderFrame();
+ void updateCaptureConnection();
+public Q_SLOTS:
+ void deviceOrientationChanged(int angle = -1);
+
+private:
+ AVFCaptureFramesDelegate *m_viewfinderFramesDelegate = nullptr;
+ AVFCameraSession *m_cameraSession = nullptr;
+ AVCaptureVideoDataOutput *m_videoDataOutput = nullptr;
+
+ bool m_needsHorizontalMirroring = false;
+
+#ifdef Q_OS_IOS
+ CVOpenGLESTextureCacheRef m_textureCache = nullptr;
+#endif
+
+ QVideoFrame m_lastViewfinderFrame;
+ QMutex m_vfMutex;
+ dispatch_queue_t m_delegateQueue;
+ QVideoOutputOrientationHandler m_orientationHandler;
+
+ friend class CVImageVideoBuffer;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfcameraservice.mm b/src/plugins/multimedia/darwin/camera/avfcameraservice.mm
new file mode 100644
index 000000000..b25fb50a9
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcameraservice.mm
@@ -0,0 +1,169 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtCore/qvariant.h>
+#include <QtCore/qdebug.h>
+
+#include "avfcameraservice_p.h"
+#include "avfcamera_p.h"
+#include "avfcamerasession_p.h"
+#include "avfimagecapture_p.h"
+#include "avfcamerarenderer_p.h"
+#include "avfimagecapture_p.h"
+#include "avfmediaencoder_p.h"
+#include <qmediadevices.h>
+#include <private/qplatformaudioinput_p.h>
+#include <private/qplatformaudiooutput_p.h>
+#include <qaudioinput.h>
+#include <qaudiooutput.h>
+
+QT_USE_NAMESPACE
+
+AVFCameraService::AVFCameraService()
+{
+ m_session = new AVFCameraSession(this);
+}
+
+AVFCameraService::~AVFCameraService()
+{
+ if (m_session)
+ delete m_session;
+}
+
+QPlatformCamera *AVFCameraService::camera()
+{
+ return m_cameraControl;
+}
+
+void AVFCameraService::setCamera(QPlatformCamera *camera)
+{
+ AVFCamera *control = static_cast<AVFCamera *>(camera);
+ if (m_cameraControl == control)
+ return;
+
+ if (m_cameraControl) {
+ if (m_encoder)
+ m_cameraControl->disconnect(m_encoder);
+ m_cameraControl->setCaptureSession(nullptr);
+ }
+
+ m_cameraControl = control;
+
+ if (m_cameraControl)
+ m_cameraControl->setCaptureSession(this);
+
+ emit cameraChanged();
+}
+
+QPlatformImageCapture *AVFCameraService::imageCapture()
+{
+ return m_imageCaptureControl;
+}
+
+void AVFCameraService::setImageCapture(QPlatformImageCapture *imageCapture)
+{
+ AVFImageCapture *control = static_cast<AVFImageCapture *>(imageCapture);
+ if (m_imageCaptureControl == control)
+ return;
+
+ if (m_imageCaptureControl)
+ m_imageCaptureControl->setCaptureSession(nullptr);
+
+ m_imageCaptureControl = control;
+ if (m_imageCaptureControl)
+ m_imageCaptureControl->setCaptureSession(this);
+}
+
+QPlatformMediaRecorder *AVFCameraService::mediaRecorder()
+{
+ return m_encoder;
+}
+
+void AVFCameraService::setMediaRecorder(QPlatformMediaRecorder *recorder)
+{
+ AVFMediaEncoder *control = static_cast<AVFMediaEncoder *>(recorder);
+ if (m_encoder == control)
+ return;
+
+ if (m_encoder)
+ m_encoder->setCaptureSession(nullptr);
+
+ m_encoder = control;
+ if (m_encoder)
+ m_encoder->setCaptureSession(this);
+
+ emit encoderChanged();
+}
+
+void AVFCameraService::setAudioInput(QPlatformAudioInput *input)
+{
+ if (m_audioInput == input)
+ return;
+ if (m_audioInput)
+ m_audioInput->q->disconnect(this);
+
+ m_audioInput = input;
+
+ if (input) {
+ connect(m_audioInput->q, &QAudioInput::destroyed, this, &AVFCameraService::audioInputDestroyed);
+ connect(m_audioInput->q, &QAudioInput::deviceChanged, this, &AVFCameraService::audioInputChanged);
+ connect(m_audioInput->q, &QAudioInput::mutedChanged, this, &AVFCameraService::setAudioInputMuted);
+ connect(m_audioInput->q, &QAudioInput::volumeChanged, this, &AVFCameraService::setAudioInputVolume);
+ }
+ audioInputChanged();
+}
+
+void AVFCameraService::setAudioOutput(QPlatformAudioOutput *output)
+{
+ if (m_audioOutput == output)
+ return;
+ if (m_audioOutput)
+ m_audioOutput->q->disconnect(this);
+
+ m_audioOutput = output;
+
+ if (m_audioOutput) {
+ connect(m_audioOutput->q, &QAudioOutput::destroyed, this, &AVFCameraService::audioOutputDestroyed);
+ connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this, &AVFCameraService::audioOutputChanged);
+ connect(m_audioOutput->q, &QAudioOutput::mutedChanged, this, &AVFCameraService::setAudioOutputMuted);
+ connect(m_audioOutput->q, &QAudioOutput::volumeChanged, this, &AVFCameraService::setAudioOutputVolume);
+ }
+ audioOutputChanged();
+}
+
+void AVFCameraService::audioInputChanged()
+{
+ m_session->updateAudioInput();
+}
+
+void AVFCameraService::audioOutputChanged()
+{
+ m_session->updateAudioOutput();
+}
+
+void AVFCameraService::setAudioInputMuted(bool muted)
+{
+ m_session->setAudioInputMuted(muted);
+}
+
+void AVFCameraService::setAudioInputVolume(float volume)
+{
+ m_session->setAudioInputVolume(volume);
+}
+
+void AVFCameraService::setAudioOutputMuted(bool muted)
+{
+ m_session->setAudioOutputMuted(muted);
+}
+
+void AVFCameraService::setAudioOutputVolume(float volume)
+{
+ m_session->setAudioOutputVolume(volume);
+}
+
+void AVFCameraService::setVideoPreview(QVideoSink *sink)
+{
+ m_session->setVideoSink(sink);
+}
+
+#include "moc_avfcameraservice_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/avfcameraservice_p.h b/src/plugins/multimedia/darwin/camera/avfcameraservice_p.h
new file mode 100644
index 000000000..f3ef8d08e
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcameraservice_p.h
@@ -0,0 +1,84 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERASERVICE_H
+#define AVFCAMERASERVICE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qobject.h>
+#include <QtCore/qset.h>
+#include <private/qplatformmediacapture_p.h>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureDevice);
+
+QT_BEGIN_NAMESPACE
+class QPlatformCamera;
+class QPlatformMediaRecorder;
+class AVFCamera;
+class AVFImageCapture;
+class AVFCameraSession;
+class AVFMediaEncoder;
+
+class AVFCameraService : public QPlatformMediaCaptureSession
+{
+ Q_OBJECT
+public:
+ AVFCameraService();
+ ~AVFCameraService();
+
+ QPlatformCamera *camera() override;
+ void setCamera(QPlatformCamera *camera) override;
+
+ QPlatformImageCapture *imageCapture() override;
+ void setImageCapture(QPlatformImageCapture *imageCapture) override;
+
+ QPlatformMediaRecorder *mediaRecorder() override;
+ void setMediaRecorder(QPlatformMediaRecorder *recorder) override;
+
+ void setAudioInput(QPlatformAudioInput *) override;
+ void setAudioOutput(QPlatformAudioOutput *) override;
+
+ void setVideoPreview(QVideoSink *sink) override;
+
+ AVFCameraSession *session() const { return m_session; }
+ AVFCamera *avfCameraControl() const { return m_cameraControl; }
+ AVFMediaEncoder *recorderControl() const { return m_encoder; }
+ AVFImageCapture *avfImageCaptureControl() const { return m_imageCaptureControl; }
+
+ QPlatformAudioInput *audioInput() { return m_audioInput; }
+ QPlatformAudioOutput *audioOutput() { return m_audioOutput; }
+
+public Q_SLOTS:
+ void audioInputDestroyed() { setAudioInput(nullptr); }
+ void audioInputChanged();
+ void audioOutputDestroyed() { setAudioOutput(nullptr); }
+ void audioOutputChanged();
+
+ void setAudioInputMuted(bool muted);
+ void setAudioInputVolume(float volume);
+ void setAudioOutputMuted(bool muted);
+ void setAudioOutputVolume(float volume);
+
+private:
+ QPlatformAudioInput *m_audioInput = nullptr;
+ QPlatformAudioOutput *m_audioOutput = nullptr;
+
+ AVFCameraSession *m_session = nullptr;
+ AVFCamera *m_cameraControl = nullptr;
+ AVFMediaEncoder *m_encoder = nullptr;
+ AVFImageCapture *m_imageCaptureControl = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerasession.mm b/src/plugins/multimedia/darwin/camera/avfcamerasession.mm
new file mode 100644
index 000000000..52e2eadfa
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerasession.mm
@@ -0,0 +1,513 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcameradebug_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcamera_p.h"
+#include "avfcamerarenderer_p.h"
+#include "avfimagecapture_p.h"
+#include "avfmediaencoder_p.h"
+#include "avfcamerautility_p.h"
+#include <avfvideosink_p.h>
+
+#include <CoreFoundation/CoreFoundation.h>
+#include <Foundation/Foundation.h>
+
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qdatetime.h>
+#include <QtCore/qurl.h>
+#include <QtCore/qelapsedtimer.h>
+#include <QtCore/qpermissions.h>
+#include <QtCore/qpointer.h>
+
+#include <private/qplatformaudioinput_p.h>
+#include <private/qplatformaudiooutput_p.h>
+
+#include <QtCore/qdebug.h>
+
+QT_USE_NAMESPACE
+
+@interface AVFCameraSessionObserver : NSObject
+
+- (AVFCameraSessionObserver *) initWithCameraSession:(AVFCameraSession*)session;
+- (void) processRuntimeError:(NSNotification *)notification;
+- (void) processSessionStarted:(NSNotification *)notification;
+- (void) processSessionStopped:(NSNotification *)notification;
+
+@end
+
+@implementation AVFCameraSessionObserver
+{
+@private
+ AVFCameraSession *m_session;
+ AVCaptureSession *m_captureSession;
+}
+
+- (AVFCameraSessionObserver *) initWithCameraSession:(AVFCameraSession*)session
+{
+ if (!(self = [super init]))
+ return nil;
+
+ self->m_session = session;
+ self->m_captureSession = session->captureSession();
+
+ [m_captureSession retain];
+ [[NSNotificationCenter defaultCenter] addObserver:self
+ selector:@selector(processRuntimeError:)
+ name:AVCaptureSessionRuntimeErrorNotification
+ object:m_captureSession];
+
+ [[NSNotificationCenter defaultCenter] addObserver:self
+ selector:@selector(processSessionStarted:)
+ name:AVCaptureSessionDidStartRunningNotification
+ object:m_captureSession];
+
+ [[NSNotificationCenter defaultCenter] addObserver:self
+ selector:@selector(processSessionStopped:)
+ name:AVCaptureSessionDidStopRunningNotification
+ object:m_captureSession];
+
+ return self;
+}
+
+- (void) dealloc
+{
+ [[NSNotificationCenter defaultCenter] removeObserver:self
+ name:AVCaptureSessionRuntimeErrorNotification
+ object:m_captureSession];
+
+ [[NSNotificationCenter defaultCenter] removeObserver:self
+ name:AVCaptureSessionDidStartRunningNotification
+ object:m_captureSession];
+
+ [[NSNotificationCenter defaultCenter] removeObserver:self
+ name:AVCaptureSessionDidStopRunningNotification
+ object:m_captureSession];
+ [m_captureSession release];
+ [super dealloc];
+}
+
+- (void) processRuntimeError:(NSNotification *)notification
+{
+ Q_UNUSED(notification);
+ QMetaObject::invokeMethod(m_session, "processRuntimeError", Qt::AutoConnection);
+}
+
+- (void) processSessionStarted:(NSNotification *)notification
+{
+ Q_UNUSED(notification);
+ QMetaObject::invokeMethod(m_session, "processSessionStarted", Qt::AutoConnection);
+}
+
+- (void) processSessionStopped:(NSNotification *)notification
+{
+ Q_UNUSED(notification);
+ QMetaObject::invokeMethod(m_session, "processSessionStopped", Qt::AutoConnection);
+}
+
+@end
+
+AVFCameraSession::AVFCameraSession(AVFCameraService *service, QObject *parent)
+ : QObject(parent)
+ , m_service(service)
+ , m_defaultCodec(0)
+{
+ m_captureSession = [[AVCaptureSession alloc] init];
+ m_observer = [[AVFCameraSessionObserver alloc] initWithCameraSession:this];
+}
+
+AVFCameraSession::~AVFCameraSession()
+{
+ if (m_videoInput) {
+ [m_captureSession removeInput:m_videoInput];
+ [m_videoInput release];
+ }
+
+ if (m_audioInput) {
+ [m_captureSession removeInput:m_audioInput];
+ [m_audioInput release];
+ }
+
+ if (m_audioOutput) {
+ [m_captureSession removeOutput:m_audioOutput];
+ [m_audioOutput release];
+ }
+
+ if (m_videoOutput)
+ delete m_videoOutput;
+
+ [m_observer release];
+ [m_captureSession release];
+}
+
+void AVFCameraSession::setActiveCamera(const QCameraDevice &info)
+{
+ if (m_activeCameraDevice != info) {
+ m_activeCameraDevice = info;
+
+ if (checkCameraPermission())
+ updateVideoInput();
+ }
+}
+
+void AVFCameraSession::setCameraFormat(const QCameraFormat &format)
+{
+ if (m_cameraFormat == format)
+ return;
+
+ updateCameraFormat(format);
+}
+
+QCameraFormat AVFCameraSession::cameraFormat() const
+{
+ return m_cameraFormat;
+}
+
+void AVFCameraSession::updateCameraFormat(const QCameraFormat &format)
+{
+ m_cameraFormat = format;
+
+ AVCaptureDevice *captureDevice = videoCaptureDevice();
+ if (!captureDevice)
+ return;
+
+ AVCaptureDeviceFormat *newFormat = qt_convert_to_capture_device_format(captureDevice, format);
+ if (newFormat)
+ qt_set_active_format(captureDevice, newFormat, false);
+}
+
+void AVFCameraSession::setVideoOutput(AVFCameraRenderer *output)
+{
+ if (m_videoOutput == output)
+ return;
+
+ delete m_videoOutput;
+ m_videoOutput = output;
+ if (output)
+ output->configureAVCaptureSession(this);
+}
+
+void AVFCameraSession::addAudioCapture()
+{
+ if (!m_audioOutput) {
+ m_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
+ if (m_audioOutput && [m_captureSession canAddOutput:m_audioOutput]) {
+ [m_captureSession addOutput:m_audioOutput];
+ } else {
+ qWarning() << Q_FUNC_INFO << "failed to add audio output";
+ }
+ }
+}
+
+AVCaptureDevice *AVFCameraSession::videoCaptureDevice() const
+{
+ if (m_videoInput)
+ return m_videoInput.device;
+
+ return nullptr;
+}
+
+AVCaptureDevice *AVFCameraSession::audioCaptureDevice() const
+{
+ if (m_audioInput)
+ return m_audioInput.device;
+
+ return nullptr;
+}
+
+void AVFCameraSession::setAudioInputVolume(float volume)
+{
+ m_inputVolume = volume;
+
+ if (m_inputMuted)
+ volume = 0.0;
+
+#ifdef Q_OS_MACOS
+ AVCaptureConnection *audioInputConnection = [m_audioOutput connectionWithMediaType:AVMediaTypeAudio];
+ NSArray<AVCaptureAudioChannel *> *audioChannels = audioInputConnection.audioChannels;
+ if (audioChannels) {
+ for (AVCaptureAudioChannel *channel in audioChannels) {
+ channel.volume = volume;
+ }
+ }
+#endif
+}
+
+void AVFCameraSession::setAudioInputMuted(bool muted)
+{
+ m_inputMuted = muted;
+ setAudioInputVolume(m_inputVolume);
+}
+
+void AVFCameraSession::setAudioOutputVolume(float volume)
+{
+ if (m_audioPreviewDelegate)
+ [m_audioPreviewDelegate setVolume:volume];
+}
+
+void AVFCameraSession::setAudioOutputMuted(bool muted)
+{
+ if (m_audioPreviewDelegate)
+ [m_audioPreviewDelegate setMuted:muted];
+}
+
+bool AVFCameraSession::isActive() const
+{
+ return m_active;
+}
+
+void AVFCameraSession::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+
+ m_active = active;
+
+ qCDebug(qLcCamera) << Q_FUNC_INFO << m_active << " -> " << active;
+
+ if (active) {
+ if (!m_activeCameraDevice.isNull()) {
+ Q_EMIT readyToConfigureConnections();
+ m_defaultCodec = 0;
+ defaultCodec();
+ }
+
+ applyImageEncoderSettings();
+
+ // According to the doc, the capture device must be locked before
+ // startRunning to prevent the format we set to be overridden by the
+ // session preset.
+ [videoCaptureDevice() lockForConfiguration:nil];
+ [m_captureSession startRunning];
+ [videoCaptureDevice() unlockForConfiguration];
+ } else {
+ [m_captureSession stopRunning];
+ }
+}
+
+void AVFCameraSession::processRuntimeError()
+{
+ qWarning() << tr("Runtime camera error");
+ m_active = false;
+ Q_EMIT error(QCamera::CameraError, tr("Runtime camera error"));
+}
+
+void AVFCameraSession::processSessionStarted()
+{
+ qCDebug(qLcCamera) << Q_FUNC_INFO;
+ if (!m_active) {
+ m_active = true;
+ Q_EMIT activeChanged(m_active);
+ }
+}
+
+void AVFCameraSession::processSessionStopped()
+{
+ qCDebug(qLcCamera) << Q_FUNC_INFO;
+ if (m_active) {
+ m_active = false;
+ Q_EMIT activeChanged(m_active);
+ }
+}
+
+AVCaptureDevice *AVFCameraSession::createVideoCaptureDevice()
+{
+ AVCaptureDevice *device = nullptr;
+
+ QByteArray deviceId = m_activeCameraDevice.id();
+ if (!deviceId.isEmpty()) {
+ device = [AVCaptureDevice deviceWithUniqueID:
+ [NSString stringWithUTF8String:
+ deviceId.constData()]];
+ }
+
+ return device;
+}
+
+AVCaptureDevice *AVFCameraSession::createAudioCaptureDevice()
+{
+ AVCaptureDevice *device = nullptr;
+
+ QByteArray deviceId = m_service->audioInput() ? m_service->audioInput()->device.id()
+ : QByteArray();
+ if (!deviceId.isEmpty())
+ device = [AVCaptureDevice deviceWithUniqueID: [NSString stringWithUTF8String:deviceId.constData()]];
+
+ return device;
+}
+
+void AVFCameraSession::attachVideoInputDevice()
+{
+ if (!checkCameraPermission())
+ return;
+
+ if (m_videoInput) {
+ [m_captureSession removeInput:m_videoInput];
+ [m_videoInput release];
+ m_videoInput = nullptr;
+ }
+
+ AVCaptureDevice *videoDevice = createVideoCaptureDevice();
+ if (!videoDevice)
+ return;
+
+ m_videoInput = [AVCaptureDeviceInput
+ deviceInputWithDevice:videoDevice
+ error:nil];
+ if (m_videoInput && [m_captureSession canAddInput:m_videoInput]) {
+ [m_videoInput retain];
+ [m_captureSession addInput:m_videoInput];
+ } else {
+ qWarning() << "Failed to create video device input";
+ }
+}
+
+void AVFCameraSession::attachAudioInputDevice()
+{
+ if (m_audioInput) {
+ [m_captureSession removeInput:m_audioInput];
+ [m_audioInput release];
+ m_audioInput = nullptr;
+ }
+
+ AVCaptureDevice *audioDevice = createAudioCaptureDevice();
+ if (!audioDevice)
+ return;
+
+ m_audioInput = [AVCaptureDeviceInput
+ deviceInputWithDevice:audioDevice
+ error:nil];
+
+ if (m_audioInput && [m_captureSession canAddInput:m_audioInput]) {
+ [m_audioInput retain];
+ [m_captureSession addInput:m_audioInput];
+ } else {
+ qWarning() << "Failed to create audio device input";
+ }
+}
+
+bool AVFCameraSession::applyImageEncoderSettings()
+{
+ if (AVFImageCapture *control = m_service->avfImageCaptureControl())
+ return control->applySettings();
+
+ return false;
+}
+
+FourCharCode AVFCameraSession::defaultCodec()
+{
+ if (!m_defaultCodec) {
+ if (AVCaptureDevice *device = videoCaptureDevice()) {
+ AVCaptureDeviceFormat *format = device.activeFormat;
+ if (!format || !format.formatDescription)
+ return m_defaultCodec;
+ m_defaultCodec = CMVideoFormatDescriptionGetCodecType(format.formatDescription);
+ }
+ }
+ return m_defaultCodec;
+}
+
+void AVFCameraSession::setVideoSink(QVideoSink *sink)
+{
+ auto *videoSink = sink ? static_cast<AVFVideoSink *>(sink->platformVideoSink()) : nullptr;
+
+ if (m_videoSink == videoSink)
+ return;
+
+ m_videoSink = videoSink;
+
+ updateVideoOutput();
+}
+
+void AVFCameraSession::updateVideoInput()
+{
+ auto recorder = m_service->recorderControl();
+ if (recorder && recorder->state() == QMediaRecorder::RecordingState)
+ recorder->toggleRecord(false);
+
+ [m_captureSession beginConfiguration];
+
+ attachVideoInputDevice();
+ if (!m_activeCameraDevice.isNull() && !m_videoOutput) {
+ setVideoOutput(new AVFCameraRenderer(this));
+ connect(m_videoOutput, &AVFCameraRenderer::newViewfinderFrame,
+ this, &AVFCameraSession::newViewfinderFrame);
+ updateVideoOutput();
+ }
+ if (m_videoOutput)
+ m_videoOutput->deviceOrientationChanged();
+
+ [m_captureSession commitConfiguration];
+
+ if (recorder && recorder->state() == QMediaRecorder::RecordingState)
+ recorder->toggleRecord(true);
+ Q_EMIT readyToConfigureConnections();
+}
+
+void AVFCameraSession::updateAudioInput()
+{
+ if (!checkMicrophonePermission())
+ return;
+
+ auto recorder = m_service->recorderControl();
+ if (recorder && recorder->state() == QMediaRecorder::RecordingState)
+ recorder->toggleRecord(false);
+
+ [m_captureSession beginConfiguration];
+ if (m_audioOutput) {
+ AVCaptureConnection *lastConnection = [m_audioOutput connectionWithMediaType:AVMediaTypeAudio];
+ [m_captureSession removeConnection:lastConnection];
+ }
+ attachAudioInputDevice();
+ if (m_audioInput)
+ addAudioCapture();
+ [m_captureSession commitConfiguration];
+
+ if (recorder && recorder->state() == QMediaRecorder::RecordingState)
+ recorder->toggleRecord(true);
+}
+
+void AVFCameraSession::updateAudioOutput()
+{
+ QByteArray deviceId = m_service->audioOutput()
+ ? m_service->audioOutput()->device.id()
+ : QByteArray();
+
+ [m_audioPreviewDelegate release];
+ m_audioPreviewDelegate = nil;
+ if (!deviceId.isEmpty()) {
+ m_audioPreviewDelegate = [[AVFAudioPreviewDelegate alloc] init];
+ [m_audioPreviewDelegate setupWithCaptureSession:this
+ audioOutputDevice:[NSString stringWithUTF8String:
+ deviceId.constData()]];
+ }
+}
+
+void AVFCameraSession::updateVideoOutput()
+{
+ if (m_videoOutput)
+ m_videoOutput->setVideoSink(m_videoSink);
+}
+
+bool AVFCameraSession::checkCameraPermission()
+{
+ const QCameraPermission permission;
+ const bool granted = qApp->checkPermission(permission) == Qt::PermissionStatus::Granted;
+ if (!granted)
+ qWarning() << "Access to camera not granted";
+
+ return granted;
+}
+
+bool AVFCameraSession::checkMicrophonePermission()
+{
+ const QMicrophonePermission permission;
+ const bool granted = qApp->checkPermission(permission) == Qt::PermissionStatus::Granted;
+ if (!granted)
+ qWarning() << "Access to microphone not granted";
+
+ return granted;
+}
+
+#include "moc_avfcamerasession_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerasession_p.h b/src/plugins/multimedia/darwin/camera/avfcamerasession_p.h
new file mode 100644
index 000000000..76e31ab48
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerasession_p.h
@@ -0,0 +1,132 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERASESSION_H
+#define AVFCAMERASESSION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qmutex.h>
+#include <QtMultimedia/qcamera.h>
+#include <QVideoFrame>
+#include <qcameradevice.h>
+#include "avfaudiopreviewdelegate_p.h"
+
+#import <AVFoundation/AVFoundation.h>
+
+@class AVFCameraSessionObserver;
+
+QT_BEGIN_NAMESPACE
+
+class AVFCamera;
+class AVFCameraService;
+class AVFCameraRenderer;
+class AVFVideoSink;
+class QVideoSink;
+
+class AVFCameraSession : public QObject
+{
+ Q_OBJECT
+public:
+ AVFCameraSession(AVFCameraService *service, QObject *parent = nullptr);
+ ~AVFCameraSession();
+
+ QCameraDevice activecameraDevice() const { return m_activeCameraDevice; }
+ void setActiveCamera(const QCameraDevice &info);
+
+ void setCameraFormat(const QCameraFormat &format);
+ QCameraFormat cameraFormat() const;
+
+ AVFCameraRenderer *videoOutput() const { return m_videoOutput; }
+ AVCaptureAudioDataOutput *audioOutput() const { return m_audioOutput; }
+ AVFAudioPreviewDelegate *audioPreviewDelegate() const { return m_audioPreviewDelegate; }
+
+ AVCaptureSession *captureSession() const { return m_captureSession; }
+ AVCaptureDevice *videoCaptureDevice() const;
+ AVCaptureDevice *audioCaptureDevice() const;
+
+ bool isActive() const;
+
+ FourCharCode defaultCodec();
+
+ AVCaptureDeviceInput *videoInput() const { return m_videoInput; }
+ AVCaptureDeviceInput *audioInput() const { return m_audioInput; }
+
+ void setVideoSink(QVideoSink *sink);
+
+ void updateVideoInput();
+
+ void updateAudioInput();
+ void updateAudioOutput();
+
+public Q_SLOTS:
+ void setActive(bool active);
+
+ void setAudioInputVolume(float volume);
+ void setAudioInputMuted(bool muted);
+ void setAudioOutputMuted(bool muted);
+ void setAudioOutputVolume(float volume);
+
+ void processRuntimeError();
+ void processSessionStarted();
+ void processSessionStopped();
+
+Q_SIGNALS:
+ void readyToConfigureConnections();
+ void activeChanged(bool);
+ void error(int error, const QString &errorString);
+ void newViewfinderFrame(const QVideoFrame &frame);
+
+private:
+ void updateCameraFormat(const QCameraFormat &format);
+
+ void setVideoOutput(AVFCameraRenderer *output);
+ void updateVideoOutput();
+
+ void addAudioCapture();
+
+ AVCaptureDevice *createVideoCaptureDevice();
+ AVCaptureDevice *createAudioCaptureDevice();
+ void attachVideoInputDevice();
+ void attachAudioInputDevice();
+ bool checkCameraPermission();
+ bool checkMicrophonePermission();
+
+ bool applyImageEncoderSettings();
+
+ QCameraDevice m_activeCameraDevice;
+ QCameraFormat m_cameraFormat;
+
+ AVFCameraService *m_service;
+ AVCaptureSession *m_captureSession;
+ AVFCameraSessionObserver *m_observer;
+
+ AVFCameraRenderer *m_videoOutput = nullptr;
+ AVFVideoSink *m_videoSink = nullptr;
+
+ AVCaptureDeviceInput *m_videoInput = nullptr;
+ AVCaptureDeviceInput *m_audioInput = nullptr;
+
+ AVCaptureAudioDataOutput *m_audioOutput = nullptr;
+ AVFAudioPreviewDelegate *m_audioPreviewDelegate = nullptr;
+
+ bool m_active = false;
+
+ float m_inputVolume = 1.0;
+ bool m_inputMuted = false;
+
+ FourCharCode m_defaultCodec;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerautility.mm b/src/plugins/multimedia/darwin/camera/avfcamerautility.mm
new file mode 100644
index 000000000..1864eb0e8
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerautility.mm
@@ -0,0 +1,730 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcamerautility_p.h"
+#include "avfcameradebug_p.h"
+
+#include <QtCore/qvector.h>
+#include <QtCore/qpair.h>
+#include <private/qmultimediautils_p.h>
+#include <private/qcameradevice_p.h>
+#include "avfvideobuffer_p.h"
+#include "qavfhelpers_p.h"
+
+#include <functional>
+#include <algorithm>
+#include <limits>
+#include <tuple>
+
+QT_BEGIN_NAMESPACE
+
+Q_LOGGING_CATEGORY(qLcCamera, "qt.multimedia.camera")
+
+AVFPSRange qt_connection_framerates(AVCaptureConnection *videoConnection)
+{
+ Q_ASSERT(videoConnection);
+
+ AVFPSRange newRange;
+ // "The value in the videoMinFrameDuration is equivalent to the reciprocal
+ // of the maximum framerate, the value in the videoMaxFrameDuration is equivalent
+ // to the reciprocal of the minimum framerate."
+ if (videoConnection.supportsVideoMinFrameDuration) {
+ const CMTime cmMin = videoConnection.videoMinFrameDuration;
+ if (CMTimeCompare(cmMin, kCMTimeInvalid)) { // Has some non-default value:
+ if (const Float64 minSeconds = CMTimeGetSeconds(cmMin))
+ newRange.second = 1. / minSeconds;
+ }
+ }
+
+ if (videoConnection.supportsVideoMaxFrameDuration) {
+ const CMTime cmMax = videoConnection.videoMaxFrameDuration;
+ if (CMTimeCompare(cmMax, kCMTimeInvalid)) {
+ if (const Float64 maxSeconds = CMTimeGetSeconds(cmMax))
+ newRange.first = 1. / maxSeconds;
+ }
+ }
+
+ return newRange;
+}
+
+namespace {
+
+inline bool qt_area_sane(const QSize &size)
+{
+ return !size.isNull() && size.isValid()
+ && std::numeric_limits<int>::max() / size.width() >= size.height();
+}
+
+template <template <typename...> class Comp> // std::less or std::greater (or std::equal_to)
+struct ByResolution
+{
+ bool operator() (AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)const
+ {
+ Q_ASSERT(f1 && f2);
+ const QSize r1(qt_device_format_resolution(f1));
+ const QSize r2(qt_device_format_resolution(f2));
+ // use std::tuple for lexicograpical sorting:
+ const Comp<std::tuple<int, int>> op = {};
+ return op(std::make_tuple(r1.width(), r1.height()),
+ std::make_tuple(r2.width(), r2.height()));
+ }
+};
+
+struct FormatHasNoFPSRange
+{
+ bool operator() (AVCaptureDeviceFormat *format) const
+ {
+ Q_ASSERT(format);
+ return !format.videoSupportedFrameRateRanges || !format.videoSupportedFrameRateRanges.count;
+ }
+};
+
+Float64 qt_find_min_framerate_distance(AVCaptureDeviceFormat *format, Float64 fps)
+{
+ Q_ASSERT(format && format.videoSupportedFrameRateRanges
+ && format.videoSupportedFrameRateRanges.count);
+
+ AVFrameRateRange *range = [format.videoSupportedFrameRateRanges objectAtIndex:0];
+ Float64 distance = qAbs(range.maxFrameRate - fps);
+ for (NSUInteger i = 1, e = format.videoSupportedFrameRateRanges.count; i < e; ++i) {
+ range = [format.videoSupportedFrameRateRanges objectAtIndex:i];
+ distance = qMin(distance, qAbs(range.maxFrameRate - fps));
+ }
+
+ return distance;
+}
+
+} // Unnamed namespace.
+
+AVCaptureDeviceFormat *
+qt_convert_to_capture_device_format(AVCaptureDevice *captureDevice,
+ const QCameraFormat &cameraFormat,
+ const std::function<bool(uint32_t)> &cvFormatValidator)
+{
+ const auto cameraFormatPrivate = QCameraFormatPrivate::handle(cameraFormat);
+ if (!cameraFormatPrivate)
+ return nil;
+
+ const auto requiredCvPixFormat = QAVFHelpers::toCVPixelFormat(cameraFormatPrivate->pixelFormat,
+ cameraFormatPrivate->colorRange);
+
+ if (requiredCvPixFormat == CvPixelFormatInvalid)
+ return nil;
+
+ AVCaptureDeviceFormat *newFormat = nil;
+ Float64 newFormatMaxFrameRate = {};
+ NSArray<AVCaptureDeviceFormat *> *formats = captureDevice.formats;
+ for (AVCaptureDeviceFormat *format in formats) {
+ CMFormatDescriptionRef formatDesc = format.formatDescription;
+ CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(formatDesc);
+ FourCharCode cvPixFormat = CMVideoFormatDescriptionGetCodecType(formatDesc);
+
+ if (cvPixFormat != requiredCvPixFormat)
+ continue;
+
+ if (cameraFormatPrivate->resolution != QSize(dim.width, dim.height))
+ continue;
+
+ if (cvFormatValidator && !cvFormatValidator(cvPixFormat))
+ continue;
+
+ const float epsilon = 0.001f;
+ for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRanges) {
+ if (frameRateRange.minFrameRate >= cameraFormatPrivate->minFrameRate - epsilon
+ && frameRateRange.maxFrameRate <= cameraFormatPrivate->maxFrameRate + epsilon
+ && newFormatMaxFrameRate < frameRateRange.maxFrameRate) {
+ newFormat = format;
+ newFormatMaxFrameRate = frameRateRange.maxFrameRate;
+ }
+ }
+ }
+ return newFormat;
+}
+
+QVector<AVCaptureDeviceFormat *> qt_unique_device_formats(AVCaptureDevice *captureDevice, FourCharCode filter)
+{
+ // 'filter' is the format we prefer if we have duplicates.
+ Q_ASSERT(captureDevice);
+
+ QVector<AVCaptureDeviceFormat *> formats;
+
+ if (!captureDevice.formats || !captureDevice.formats.count)
+ return formats;
+
+ formats.reserve(captureDevice.formats.count);
+ for (AVCaptureDeviceFormat *format in captureDevice.formats) {
+ const QSize resolution(qt_device_format_resolution(format));
+ if (resolution.isNull() || !resolution.isValid())
+ continue;
+ formats << format;
+ }
+
+ if (!formats.size())
+ return formats;
+
+ std::sort(formats.begin(), formats.end(), ByResolution<std::less>());
+
+ QSize size(qt_device_format_resolution(formats[0]));
+ FourCharCode codec = CMVideoFormatDescriptionGetCodecType(formats[0].formatDescription);
+ int last = 0;
+ for (int i = 1; i < formats.size(); ++i) {
+ const QSize nextSize(qt_device_format_resolution(formats[i]));
+ if (nextSize == size) {
+ if (codec == filter)
+ continue;
+ formats[last] = formats[i];
+ } else {
+ ++last;
+ formats[last] = formats[i];
+ size = nextSize;
+ }
+ codec = CMVideoFormatDescriptionGetCodecType(formats[i].formatDescription);
+ }
+ formats.resize(last + 1);
+
+ return formats;
+}
+
+QSize qt_device_format_resolution(AVCaptureDeviceFormat *format)
+{
+ if (!format || !format.formatDescription)
+ return QSize();
+
+ const CMVideoDimensions res = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ return QSize(res.width, res.height);
+}
+
+QSize qt_device_format_high_resolution(AVCaptureDeviceFormat *format)
+{
+ Q_ASSERT(format);
+ QSize res;
+#if defined(Q_OS_IOS)
+ const CMVideoDimensions hrDim(format.highResolutionStillImageDimensions);
+ res.setWidth(hrDim.width);
+ res.setHeight(hrDim.height);
+#endif
+ return res;
+}
+
+QVector<AVFPSRange> qt_device_format_framerates(AVCaptureDeviceFormat *format)
+{
+ Q_ASSERT(format);
+
+ QVector<AVFPSRange> qtRanges;
+
+ if (!format.videoSupportedFrameRateRanges || !format.videoSupportedFrameRateRanges.count)
+ return qtRanges;
+
+ qtRanges.reserve(format.videoSupportedFrameRateRanges.count);
+ for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges)
+ qtRanges << AVFPSRange(range.minFrameRate, range.maxFrameRate);
+
+ return qtRanges;
+}
+
+QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format)
+{
+ Q_ASSERT(format);
+
+ if (!format.formatDescription) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no format description found";
+ return QSize();
+ }
+
+ const CMVideoDimensions res = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ const CGSize resPAR = CMVideoFormatDescriptionGetPresentationDimensions(format.formatDescription, true, false);
+
+ if (qAbs(resPAR.width - res.width) < 1.) {
+ // "Pixel aspect ratio is used to adjust the width, leaving the height alone."
+ return QSize(1, 1);
+ }
+
+ if (!res.width || !resPAR.width)
+ return QSize();
+
+ auto frac = qRealToFraction(resPAR.width > res.width ? res.width / qreal(resPAR.width)
+ : resPAR.width / qreal(res.width));
+
+ return QSize(frac.numerator, frac.denominator);
+}
+
+AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDevice,
+ const QSize &request,
+ FourCharCode filter,
+ bool stillImage)
+{
+ Q_ASSERT(captureDevice);
+ Q_ASSERT(!request.isNull() && request.isValid());
+
+ if (!captureDevice.formats || !captureDevice.formats.count)
+ return nullptr;
+
+ QVector<AVCaptureDeviceFormat *> formats(qt_unique_device_formats(captureDevice, filter));
+
+ for (int i = 0; i < formats.size(); ++i) {
+ AVCaptureDeviceFormat *format = formats[i];
+ if (qt_device_format_resolution(format) == request)
+ return format;
+ // iOS only (still images).
+ if (stillImage && qt_device_format_high_resolution(format) == request)
+ return format;
+ }
+
+ if (!qt_area_sane(request))
+ return nullptr;
+
+ typedef QPair<QSize, AVCaptureDeviceFormat *> FormatPair;
+
+ QVector<FormatPair> pairs; // default|HR sizes
+ pairs.reserve(formats.size());
+
+ for (int i = 0; i < formats.size(); ++i) {
+ AVCaptureDeviceFormat *format = formats[i];
+ const QSize res(qt_device_format_resolution(format));
+ if (!res.isNull() && res.isValid() && qt_area_sane(res))
+ pairs << FormatPair(res, format);
+ const QSize highRes(qt_device_format_high_resolution(format));
+ if (stillImage && !highRes.isNull() && highRes.isValid() && qt_area_sane(highRes))
+ pairs << FormatPair(highRes, format);
+ }
+
+ if (!pairs.size())
+ return nullptr;
+
+ AVCaptureDeviceFormat *best = pairs[0].second;
+ QSize next(pairs[0].first);
+ int wDiff = qAbs(request.width() - next.width());
+ int hDiff = qAbs(request.height() - next.height());
+ const int area = request.width() * request.height();
+ int areaDiff = qAbs(area - next.width() * next.height());
+ for (int i = 1; i < pairs.size(); ++i) {
+ next = pairs[i].first;
+ const int newWDiff = qAbs(next.width() - request.width());
+ const int newHDiff = qAbs(next.height() - request.height());
+ const int newAreaDiff = qAbs(area - next.width() * next.height());
+
+ if ((newWDiff < wDiff && newHDiff < hDiff)
+ || ((newWDiff <= wDiff || newHDiff <= hDiff) && newAreaDiff <= areaDiff)) {
+ wDiff = newWDiff;
+ hDiff = newHDiff;
+ best = pairs[i].second;
+ areaDiff = newAreaDiff;
+ }
+ }
+
+ return best;
+}
+
+AVCaptureDeviceFormat *qt_find_best_framerate_match(AVCaptureDevice *captureDevice,
+ FourCharCode filter,
+ Float64 fps)
+{
+ Q_ASSERT(captureDevice);
+ Q_ASSERT(fps > 0.);
+
+ const qreal epsilon = 0.1;
+
+ QVector<AVCaptureDeviceFormat *>sorted(qt_unique_device_formats(captureDevice, filter));
+ // Sort formats by their resolution in decreasing order:
+ std::sort(sorted.begin(), sorted.end(), ByResolution<std::greater>());
+ // We can use only formats with framerate ranges:
+ sorted.erase(std::remove_if(sorted.begin(), sorted.end(), FormatHasNoFPSRange()), sorted.end());
+
+ if (!sorted.size())
+ return nil;
+
+ for (int i = 0; i < sorted.size(); ++i) {
+ AVCaptureDeviceFormat *format = sorted[i];
+ for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
+ if (range.maxFrameRate - range.minFrameRate < epsilon) {
+ // On OS X ranges are points (built-in camera).
+ if (qAbs(fps - range.maxFrameRate) < epsilon)
+ return format;
+ }
+
+ if (fps >= range.minFrameRate && fps <= range.maxFrameRate)
+ return format;
+ }
+ }
+
+ Float64 distance = qt_find_min_framerate_distance(sorted[0], fps);
+ AVCaptureDeviceFormat *match = sorted[0];
+ for (int i = 1; i < sorted.size(); ++i) {
+ const Float64 newDistance = qt_find_min_framerate_distance(sorted[i], fps);
+ if (newDistance < distance) {
+ distance = newDistance;
+ match = sorted[i];
+ }
+ }
+
+ return match;
+}
+
+AVFrameRateRange *qt_find_supported_framerate_range(AVCaptureDeviceFormat *format, Float64 fps)
+{
+ Q_ASSERT(format && format.videoSupportedFrameRateRanges
+ && format.videoSupportedFrameRateRanges.count);
+
+ const qreal epsilon = 0.1;
+
+ for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
+ if (range.maxFrameRate - range.minFrameRate < epsilon) {
+ // On OS X ranges are points (built-in camera).
+ if (qAbs(fps - range.maxFrameRate) < epsilon)
+ return range;
+ }
+
+ if (fps >= range.minFrameRate && fps <= range.maxFrameRate)
+ return range;
+ }
+
+ AVFrameRateRange *match = [format.videoSupportedFrameRateRanges objectAtIndex:0];
+ Float64 distance = qAbs(match.maxFrameRate - fps);
+ for (NSUInteger i = 1, e = format.videoSupportedFrameRateRanges.count; i < e; ++i) {
+ AVFrameRateRange *range = [format.videoSupportedFrameRateRanges objectAtIndex:i];
+ const Float64 newDistance = qAbs(range.maxFrameRate - fps);
+ if (newDistance < distance) {
+ distance = newDistance;
+ match = range;
+ }
+ }
+
+ return match;
+}
+
+bool qt_format_supports_framerate(AVCaptureDeviceFormat *format, qreal fps)
+{
+ if (format && fps > qreal(0)) {
+ const qreal epsilon = 0.1;
+ for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
+ if (fps >= range.minFrameRate - epsilon && fps <= range.maxFrameRate + epsilon)
+ return true;
+ }
+ }
+
+ return false;
+}
+
+bool qt_formats_are_equal(AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)
+{
+ if (f1 == f2)
+ return true;
+
+ if (![f1.mediaType isEqualToString:f2.mediaType])
+ return false;
+
+ return CMFormatDescriptionEqual(f1.formatDescription, f2.formatDescription);
+}
+
+bool qt_set_active_format(AVCaptureDevice *captureDevice, AVCaptureDeviceFormat *format, bool preserveFps)
+{
+ static bool firstSet = true;
+
+ if (!captureDevice || !format)
+ return false;
+
+ if (qt_formats_are_equal(captureDevice.activeFormat, format)) {
+ if (firstSet) {
+ // The capture device format is persistent. The first time we set a format, report that
+ // it changed even if the formats are the same.
+ // This prevents the session from resetting the format to the default value.
+ firstSet = false;
+ return true;
+ }
+ return false;
+ }
+
+ firstSet = false;
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qWarning("Failed to set active format (lock failed)");
+ return false;
+ }
+
+ // Changing the activeFormat resets the frame rate.
+ AVFPSRange fps;
+ if (preserveFps)
+ fps = qt_current_framerates(captureDevice, nil);
+
+ captureDevice.activeFormat = format;
+
+ if (preserveFps)
+ qt_set_framerate_limits(captureDevice, nil, fps.first, fps.second);
+
+ return true;
+}
+
+void qt_set_framerate_limits(AVCaptureConnection *videoConnection, qreal minFPS, qreal maxFPS)
+{
+ Q_ASSERT(videoConnection);
+
+ if (minFPS < 0. || maxFPS < 0. || (maxFPS && maxFPS < minFPS)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid framerates (min, max):"
+ << minFPS << maxFPS;
+ return;
+ }
+
+ CMTime minDuration = kCMTimeInvalid;
+ if (maxFPS > 0.) {
+ if (!videoConnection.supportsVideoMinFrameDuration)
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "maximum framerate is not supported";
+ else
+ minDuration = CMTimeMake(1, maxFPS);
+ }
+ if (videoConnection.supportsVideoMinFrameDuration)
+ videoConnection.videoMinFrameDuration = minDuration;
+
+ CMTime maxDuration = kCMTimeInvalid;
+ if (minFPS > 0.) {
+ if (!videoConnection.supportsVideoMaxFrameDuration)
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "minimum framerate is not supported";
+ else
+ maxDuration = CMTimeMake(1, minFPS);
+ }
+ if (videoConnection.supportsVideoMaxFrameDuration)
+ videoConnection.videoMaxFrameDuration = maxDuration;
+}
+
+CMTime qt_adjusted_frame_duration(AVFrameRateRange *range, qreal fps)
+{
+ Q_ASSERT(range);
+ Q_ASSERT(fps > 0.);
+
+ if (range.maxFrameRate - range.minFrameRate < 0.1) {
+ // Can happen on OS X.
+ return range.minFrameDuration;
+ }
+
+ if (fps <= range.minFrameRate)
+ return range.maxFrameDuration;
+ if (fps >= range.maxFrameRate)
+ return range.minFrameDuration;
+
+ auto frac = qRealToFraction(1. / fps);
+ return CMTimeMake(frac.numerator, frac.denominator);
+}
+
+void qt_set_framerate_limits(AVCaptureDevice *captureDevice, qreal minFPS, qreal maxFPS)
+{
+ Q_ASSERT(captureDevice);
+ if (!captureDevice.activeFormat) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no active capture device format";
+ return;
+ }
+
+ if (minFPS < 0. || maxFPS < 0. || (maxFPS && maxFPS < minFPS)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid framerates (min, max):"
+ << minFPS << maxFPS;
+ return;
+ }
+
+ CMTime minFrameDuration = kCMTimeInvalid;
+ CMTime maxFrameDuration = kCMTimeInvalid;
+ if (maxFPS || minFPS) {
+ AVFrameRateRange *range = qt_find_supported_framerate_range(captureDevice.activeFormat,
+ maxFPS ? maxFPS : minFPS);
+ if (!range) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no framerate range found, (min, max):"
+ << minFPS << maxFPS;
+ return;
+ }
+
+ if (maxFPS)
+ minFrameDuration = qt_adjusted_frame_duration(range, maxFPS);
+ if (minFPS)
+ maxFrameDuration = qt_adjusted_frame_duration(range, minFPS);
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ // While Apple's docs say kCMTimeInvalid will end in default
+ // settings for this format, kCMTimeInvalid on OS X ends with a runtime
+ // exception:
+ // "The activeVideoMinFrameDuration passed is not supported by the device."
+ // Instead, use the first item in the supported frame rates.
+#ifdef Q_OS_IOS
+ [captureDevice setActiveVideoMinFrameDuration:minFrameDuration];
+ [captureDevice setActiveVideoMaxFrameDuration:maxFrameDuration];
+#elif defined(Q_OS_MACOS)
+ if (CMTimeCompare(minFrameDuration, kCMTimeInvalid) == 0
+ && CMTimeCompare(maxFrameDuration, kCMTimeInvalid) == 0) {
+ AVFrameRateRange *range = captureDevice.activeFormat.videoSupportedFrameRateRanges.firstObject;
+ minFrameDuration = range.minFrameDuration;
+ maxFrameDuration = range.maxFrameDuration;
+ }
+
+ if (CMTimeCompare(minFrameDuration, kCMTimeInvalid))
+ [captureDevice setActiveVideoMinFrameDuration:minFrameDuration];
+
+ if (CMTimeCompare(maxFrameDuration, kCMTimeInvalid))
+ [captureDevice setActiveVideoMaxFrameDuration:maxFrameDuration];
+#endif // Q_OS_MACOS
+}
+
+void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection,
+ qreal minFPS, qreal maxFPS)
+{
+ Q_UNUSED(videoConnection);
+ Q_ASSERT(captureDevice);
+ qt_set_framerate_limits(captureDevice, minFPS, maxFPS);
+}
+
+AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection)
+{
+ Q_UNUSED(videoConnection);
+ Q_ASSERT(captureDevice);
+
+ AVFPSRange fps;
+ const CMTime minDuration = captureDevice.activeVideoMinFrameDuration;
+ if (CMTimeCompare(minDuration, kCMTimeInvalid)) {
+ if (const Float64 minSeconds = CMTimeGetSeconds(minDuration))
+ fps.second = 1. / minSeconds; // Max FPS = 1 / MinDuration.
+ }
+
+ const CMTime maxDuration = captureDevice.activeVideoMaxFrameDuration;
+ if (CMTimeCompare(maxDuration, kCMTimeInvalid)) {
+ if (const Float64 maxSeconds = CMTimeGetSeconds(maxDuration))
+ fps.first = 1. / maxSeconds; // Min FPS = 1 / MaxDuration.
+ }
+
+ return fps;
+}
+
+QList<AudioValueRange> qt_supported_sample_rates_for_format(int codecId)
+{
+ QList<AudioValueRange> result;
+ UInt32 format = codecId;
+ UInt32 size;
+ OSStatus err = AudioFormatGetPropertyInfo(
+ kAudioFormatProperty_AvailableEncodeSampleRates,
+ sizeof(format),
+ &format,
+ &size);
+
+ if (err != noErr)
+ return result;
+
+ UInt32 numRanges = size / sizeof(AudioValueRange);
+ AudioValueRange sampleRanges[numRanges];
+
+ err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeSampleRates,
+ sizeof(format),
+ &format,
+ &size,
+ sampleRanges);
+ if (err != noErr)
+ return result;
+
+ for (UInt32 i = 0; i < numRanges; i++)
+ result << sampleRanges[i];
+
+ return result;
+}
+
+QList<AudioValueRange> qt_supported_bit_rates_for_format(int codecId)
+{
+ QList<AudioValueRange> result;
+ UInt32 format = codecId;
+ UInt32 size;
+ OSStatus err = AudioFormatGetPropertyInfo(
+ kAudioFormatProperty_AvailableEncodeBitRates,
+ sizeof(format),
+ &format,
+ &size);
+
+ if (err != noErr)
+ return result;
+
+ UInt32 numRanges = size / sizeof(AudioValueRange);
+ AudioValueRange bitRanges[numRanges];
+
+ err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeBitRates,
+ sizeof(format),
+ &format,
+ &size,
+ bitRanges);
+ if (err != noErr)
+ return result;
+
+ for (UInt32 i = 0; i < numRanges; i++)
+ result << bitRanges[i];
+
+ return result;
+}
+
+std::optional<QList<UInt32>> qt_supported_channel_counts_for_format(int codecId)
+{
+ QList<UInt32> result;
+ AudioStreamBasicDescription sf = {};
+ sf.mFormatID = codecId;
+ UInt32 size;
+ OSStatus err = AudioFormatGetPropertyInfo(
+ kAudioFormatProperty_AvailableEncodeNumberChannels,
+ sizeof(sf),
+ &sf,
+ &size);
+
+ if (err != noErr)
+ return result;
+
+ // From Apple's docs:
+ // A value of 0xFFFFFFFF indicates that any number of channels may be encoded.
+ if (int(size) == -1)
+ return std::nullopt;
+
+ UInt32 numCounts = size / sizeof(UInt32);
+ UInt32 channelCounts[numCounts];
+
+ err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeNumberChannels,
+ sizeof(sf),
+ &sf,
+ &size,
+ channelCounts);
+ if (err != noErr)
+ return result;
+
+ for (UInt32 i = 0; i < numCounts; i++)
+ result << channelCounts[i];
+
+ return result;
+}
+
+QList<UInt32> qt_supported_channel_layout_tags_for_format(int codecId, int noChannels)
+{
+ QList<UInt32> result;
+ AudioStreamBasicDescription sf = {};
+ sf.mFormatID = codecId;
+ sf.mChannelsPerFrame = noChannels;
+ UInt32 size;
+ OSStatus err = AudioFormatGetPropertyInfo(
+ kAudioFormatProperty_AvailableEncodeChannelLayoutTags,
+ sizeof(sf),
+ &sf,
+ &size);
+
+ if (err != noErr)
+ return result;
+
+ UInt32 noTags = (UInt32)size / sizeof(UInt32);
+ AudioChannelLayoutTag tagsArr[noTags];
+
+ err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeChannelLayoutTags,
+ sizeof(sf),
+ &sf,
+ &size,
+ tagsArr);
+ if (err != noErr)
+ return result;
+
+ for (UInt32 i = 0; i < noTags; i++)
+ result << tagsArr[i];
+
+ return result;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerautility_p.h b/src/plugins/multimedia/darwin/camera/avfcamerautility_p.h
new file mode 100644
index 000000000..b5c9e9bda
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerautility_p.h
@@ -0,0 +1,165 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERAUTILITY_H
+#define AVFCAMERAUTILITY_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qglobal.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qlist.h>
+#include <QtCore/qpair.h>
+#include <QtCore/qsize.h>
+
+#include "qcameradevice.h"
+
+#include <AVFoundation/AVFoundation.h>
+
+// In case we have SDK below 10.7/7.0:
+@class AVCaptureDeviceFormat;
+
+QT_BEGIN_NAMESPACE
+
+class AVFConfigurationLock
+{
+public:
+ explicit AVFConfigurationLock(AVCaptureDevice *captureDevice)
+ : m_captureDevice(captureDevice),
+ m_locked(false)
+ {
+ Q_ASSERT(m_captureDevice);
+ NSError *error = nil;
+ m_locked = [m_captureDevice lockForConfiguration:&error];
+ }
+
+ ~AVFConfigurationLock()
+ {
+ if (m_locked)
+ [m_captureDevice unlockForConfiguration];
+ }
+
+ operator bool() const
+ {
+ return m_locked;
+ }
+
+private:
+ Q_DISABLE_COPY(AVFConfigurationLock)
+
+ AVCaptureDevice *m_captureDevice;
+ bool m_locked;
+};
+
+struct AVFObjectDeleter {
+ void operator()(NSObject *obj)
+ {
+ if (obj)
+ [obj release];
+ }
+};
+
+template<class T>
+class AVFScopedPointer : public std::unique_ptr<NSObject, AVFObjectDeleter>
+{
+public:
+ AVFScopedPointer() {}
+ explicit AVFScopedPointer(T *ptr) : std::unique_ptr<NSObject, AVFObjectDeleter>(ptr) {}
+ operator T*() const
+ {
+ // Quite handy operator to enable Obj-C messages: [ptr someMethod];
+ return data();
+ }
+
+ T *data() const
+ {
+ return static_cast<T *>(get());
+ }
+
+ T *take()
+ {
+ return static_cast<T *>(release());
+ }
+};
+
+template<>
+class AVFScopedPointer<dispatch_queue_t>
+{
+public:
+ AVFScopedPointer() : m_queue(nullptr) {}
+ explicit AVFScopedPointer(dispatch_queue_t q) : m_queue(q) {}
+
+ ~AVFScopedPointer()
+ {
+ if (m_queue)
+ dispatch_release(m_queue);
+ }
+
+ operator dispatch_queue_t() const
+ {
+ // Quite handy operator to enable Obj-C messages: [ptr someMethod];
+ return m_queue;
+ }
+
+ dispatch_queue_t data() const
+ {
+ return m_queue;
+ }
+
+ void reset(dispatch_queue_t q = nullptr)
+ {
+ if (m_queue)
+ dispatch_release(m_queue);
+ m_queue = q;
+ }
+
+private:
+ dispatch_queue_t m_queue;
+
+ Q_DISABLE_COPY(AVFScopedPointer)
+};
+
+typedef QPair<qreal, qreal> AVFPSRange;
+AVFPSRange qt_connection_framerates(AVCaptureConnection *videoConnection);
+
+AVCaptureDeviceFormat *qt_convert_to_capture_device_format(
+ AVCaptureDevice *captureDevice, const QCameraFormat &format,
+ const std::function<bool(uint32_t)> &cvFormatValidator = nullptr);
+QList<AVCaptureDeviceFormat *> qt_unique_device_formats(AVCaptureDevice *captureDevice,
+ FourCharCode preferredFormat);
+QSize qt_device_format_resolution(AVCaptureDeviceFormat *format);
+QSize qt_device_format_high_resolution(AVCaptureDeviceFormat *format);
+QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format);
+QList<AVFPSRange> qt_device_format_framerates(AVCaptureDeviceFormat *format);
+AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDevice, const QSize &res,
+ FourCharCode preferredFormat, bool stillImage = true);
+AVCaptureDeviceFormat *qt_find_best_framerate_match(AVCaptureDevice *captureDevice,
+ FourCharCode preferredFormat,
+ Float64 fps);
+AVFrameRateRange *qt_find_supported_framerate_range(AVCaptureDeviceFormat *format, Float64 fps);
+bool qt_format_supports_framerate(AVCaptureDeviceFormat *format, qreal fps);
+
+bool qt_formats_are_equal(AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2);
+bool qt_set_active_format(AVCaptureDevice *captureDevice, AVCaptureDeviceFormat *format, bool preserveFps);
+
+AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection);
+void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection,
+ qreal minFPS, qreal maxFPS);
+
+QList<AudioValueRange> qt_supported_sample_rates_for_format(int codecId);
+QList<AudioValueRange> qt_supported_bit_rates_for_format(int codecId);
+std::optional<QList<UInt32>> qt_supported_channel_counts_for_format(int codecId);
+QList<UInt32> qt_supported_channel_layout_tags_for_format(int codecId, int noChannels);
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfimagecapture.mm b/src/plugins/multimedia/darwin/camera/avfimagecapture.mm
new file mode 100644
index 000000000..2ee7b0597
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfimagecapture.mm
@@ -0,0 +1,385 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcameradebug_p.h"
+#include "avfimagecapture_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcamerautility_p.h"
+#include "avfcamera_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcamerarenderer_p.h"
+#include "private/qmediastoragelocation_p.h"
+#include <private/qplatformimagecapture_p.h>
+#include <private/qmemoryvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
+
+#include <QtCore/qurl.h>
+#include <QtCore/qfile.h>
+#include <QtCore/qbuffer.h>
+#include <QtConcurrent/qtconcurrentrun.h>
+#include <QtGui/qimagereader.h>
+
+#import <AVFoundation/AVFoundation.h>
+
+QT_USE_NAMESPACE
+
+AVFImageCapture::AVFImageCapture(QImageCapture *parent)
+ : QPlatformImageCapture(parent)
+{
+ m_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
+
+ NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:
+ AVVideoCodecTypeJPEG, AVVideoCodecKey, nil];
+
+ [m_stillImageOutput setOutputSettings:outputSettings];
+ [outputSettings release];
+}
+
+AVFImageCapture::~AVFImageCapture()
+{
+ [m_stillImageOutput release];
+}
+
+bool AVFImageCapture::isReadyForCapture() const
+{
+ return m_cameraControl && m_videoConnection && m_cameraControl->isActive();
+}
+
+void AVFImageCapture::updateReadyStatus()
+{
+ if (m_ready != isReadyForCapture()) {
+ m_ready = !m_ready;
+ qCDebug(qLcCamera) << "ReadyToCapture status changed:" << m_ready;
+ Q_EMIT readyForCaptureChanged(m_ready);
+ }
+}
+
+int AVFImageCapture::doCapture(const QString &actualFileName)
+{
+ if (!m_session) {
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, m_lastCaptureId),
+ Q_ARG(int, QImageCapture::ResourceError),
+ Q_ARG(QString, QPlatformImageCapture::msgImageCaptureNotSet()));
+ return -1;
+ }
+ if (!isReadyForCapture()) {
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, m_lastCaptureId),
+ Q_ARG(int, QImageCapture::NotReadyError),
+ Q_ARG(QString, QPlatformImageCapture::msgCameraNotReady()));
+ return -1;
+ }
+ m_lastCaptureId++;
+
+ bool captureToBuffer = actualFileName.isEmpty();
+
+ CaptureRequest request = { m_lastCaptureId, QSharedPointer<QSemaphore>::create()};
+ m_requestsMutex.lock();
+ m_captureRequests.enqueue(request);
+ m_requestsMutex.unlock();
+
+ QString fileName(actualFileName);
+
+ [m_stillImageOutput captureStillImageAsynchronouslyFromConnection:m_videoConnection
+ completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
+
+ if (error) {
+ QStringList messageParts;
+ messageParts << QString::fromUtf8([[error localizedDescription] UTF8String]);
+ messageParts << QString::fromUtf8([[error localizedFailureReason] UTF8String]);
+ messageParts << QString::fromUtf8([[error localizedRecoverySuggestion] UTF8String]);
+
+ QString errorMessage = messageParts.join(QChar(u' '));
+ qCDebug(qLcCamera) << "Image capture failed:" << errorMessage;
+
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(int, QImageCapture::ResourceError),
+ Q_ARG(QString, errorMessage));
+ return;
+ }
+
+ // Wait for the preview to be generated before saving the JPEG (but only
+ // if we have AVFCameraRenderer attached).
+ // It is possible to stop camera immediately after trying to capture an
+ // image; this can result in a blocked callback's thread, waiting for a
+ // new viewfinder's frame to arrive/semaphore to be released. It is also
+ // unspecified on which thread this callback gets executed, (probably it's
+ // not the same thread that initiated a capture and stopped the camera),
+ // so we cannot reliably check the camera's status. Instead, we wait
+ // with a timeout and treat a failure to acquire a semaphore as an error.
+ if (!m_session->videoOutput() || request.previewReady->tryAcquire(1, 1000)) {
+ qCDebug(qLcCamera) << "Image capture completed";
+
+ NSData *nsJpgData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
+ QByteArray jpgData = QByteArray::fromRawData((const char *)[nsJpgData bytes], [nsJpgData length]);
+
+ if (captureToBuffer) {
+ QBuffer data(&jpgData);
+ QImageReader reader(&data, "JPEG");
+ QSize size = reader.size();
+ auto buffer = std::make_unique<QMemoryVideoBuffer>(
+ QByteArray(jpgData.constData(), jpgData.size()), -1);
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::move(buffer), QVideoFrameFormat(size, QVideoFrameFormat::Format_Jpeg));
+ QMetaObject::invokeMethod(this, "imageAvailable", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(QVideoFrame, frame));
+ } else {
+ QFile f(fileName);
+ if (f.open(QFile::WriteOnly)) {
+ if (f.write(jpgData) != -1) {
+ QMetaObject::invokeMethod(this, "imageSaved", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(QString, fileName));
+ } else {
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(int, QImageCapture::OutOfSpaceError),
+ Q_ARG(QString, f.errorString()));
+ }
+ } else {
+ QString errorMessage = tr("Could not open destination file:\n%1").arg(fileName);
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(int, QImageCapture::ResourceError),
+ Q_ARG(QString, errorMessage));
+ }
+ }
+ } else {
+ const QLatin1String errorMessage("Image capture failed: timed out waiting"
+ " for a preview frame.");
+ qCDebug(qLcCamera) << errorMessage;
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(int, QImageCapture::ResourceError),
+ Q_ARG(QString, errorMessage));
+ }
+ }];
+
+ return request.captureId;
+}
+
+int AVFImageCapture::capture(const QString &fileName)
+{
+ auto actualFileName = QMediaStorageLocation::generateFileName(fileName, QStandardPaths::PicturesLocation, QLatin1String("jpg"));
+
+ qCDebug(qLcCamera) << "Capture image to" << actualFileName;
+ return doCapture(actualFileName);
+}
+
+int AVFImageCapture::captureToBuffer()
+{
+ return doCapture(QString());
+}
+
+void AVFImageCapture::onNewViewfinderFrame(const QVideoFrame &frame)
+{
+ QMutexLocker locker(&m_requestsMutex);
+
+ if (m_captureRequests.isEmpty())
+ return;
+
+ CaptureRequest request = m_captureRequests.dequeue();
+ Q_EMIT imageExposed(request.captureId);
+
+ (void) QtConcurrent::run(&AVFImageCapture::makeCapturePreview, this,
+ request,
+ frame,
+ 0 /* rotation */);
+}
+
+void AVFImageCapture::onCameraChanged()
+{
+ auto camera = m_service ? static_cast<AVFCamera *>(m_service->camera()) : nullptr;
+
+ if (camera == m_cameraControl)
+ return;
+
+ m_cameraControl = camera;
+
+ if (m_cameraControl)
+ connect(m_cameraControl, SIGNAL(activeChanged(bool)), this, SLOT(updateReadyStatus()));
+ updateReadyStatus();
+}
+
+void AVFImageCapture::makeCapturePreview(CaptureRequest request,
+ const QVideoFrame &frame,
+ int rotation)
+{
+ QTransform transform;
+ transform.rotate(rotation);
+
+ Q_EMIT imageCaptured(request.captureId, frame.toImage().transformed(transform));
+
+ request.previewReady->release();
+}
+
+void AVFImageCapture::updateCaptureConnection()
+{
+ if (m_session && m_session->videoCaptureDevice()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO;
+ AVCaptureSession *captureSession = m_session->captureSession();
+
+ if (![captureSession.outputs containsObject:m_stillImageOutput]) {
+ if ([captureSession canAddOutput:m_stillImageOutput]) {
+ [captureSession beginConfiguration];
+ // Lock the video capture device to make sure the active format is not reset
+ const AVFConfigurationLock lock(m_session->videoCaptureDevice());
+ [captureSession addOutput:m_stillImageOutput];
+ m_videoConnection = [m_stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
+ [captureSession commitConfiguration];
+ updateReadyStatus();
+ }
+ } else {
+ m_videoConnection = [m_stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
+ }
+ }
+}
+
+
+QImageEncoderSettings AVFImageCapture::imageSettings() const
+{
+ QImageEncoderSettings settings;
+
+ if (!videoCaptureDeviceIsValid())
+ return settings;
+
+ AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
+ if (!captureDevice.activeFormat) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no active format";
+ return settings;
+ }
+
+ QSize res(qt_device_format_resolution(captureDevice.activeFormat));
+#ifdef Q_OS_IOS
+ if (!m_service->avfImageCaptureControl() || !m_service->avfImageCaptureControl()->stillImageOutput()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no still image output";
+ return settings;
+ }
+
+ AVCaptureStillImageOutput *stillImageOutput = m_service->avfImageCaptureControl()->stillImageOutput();
+ if (stillImageOutput.highResolutionStillImageOutputEnabled)
+ res = qt_device_format_high_resolution(captureDevice.activeFormat);
+#endif
+ if (res.isNull() || !res.isValid()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to exctract the image resolution";
+ return settings;
+ }
+
+ settings.setResolution(res);
+ settings.setFormat(QImageCapture::JPEG);
+
+ return settings;
+}
+
+void AVFImageCapture::setImageSettings(const QImageEncoderSettings &settings)
+{
+ if (m_settings == settings)
+ return;
+
+ m_settings = settings;
+ applySettings();
+}
+
+bool AVFImageCapture::applySettings()
+{
+ if (!videoCaptureDeviceIsValid())
+ return false;
+
+ AVFCameraSession *session = m_service->session();
+ if (!session)
+ return false;
+
+ if (!m_service->imageCapture()
+ || !m_service->avfImageCaptureControl()->stillImageOutput()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no still image output";
+ return false;
+ }
+
+ if (m_settings.format() != QImageCapture::UnspecifiedFormat && m_settings.format() != QImageCapture::JPEG) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "unsupported format:" << m_settings.format();
+ return false;
+ }
+
+ QSize res(m_settings.resolution());
+ if (res.isNull()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid resolution:" << res;
+ return false;
+ }
+
+ if (!res.isValid()) {
+ // Invalid == default value.
+ // Here we could choose the best format available, but
+ // activeFormat is already equal to 'preset high' by default,
+ // which is good enough, otherwise we can end in some format with low framerates.
+ return false;
+ }
+
+ bool activeFormatChanged = false;
+
+ AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
+ AVCaptureDeviceFormat *match = qt_find_best_resolution_match(captureDevice, res,
+ m_service->session()->defaultCodec());
+
+ if (!match) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "unsupported resolution:" << res;
+ return false;
+ }
+
+ activeFormatChanged = qt_set_active_format(captureDevice, match, true);
+
+#ifdef Q_OS_IOS
+ AVCaptureStillImageOutput *imageOutput = m_service->avfImageCaptureControl()->stillImageOutput();
+ if (res == qt_device_format_high_resolution(captureDevice.activeFormat))
+ imageOutput.highResolutionStillImageOutputEnabled = YES;
+ else
+ imageOutput.highResolutionStillImageOutputEnabled = NO;
+#endif
+
+ return activeFormatChanged;
+}
+
+void AVFImageCapture::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ AVFCameraService *captureSession = static_cast<AVFCameraService *>(session);
+ if (m_service == captureSession)
+ return;
+
+ m_service = captureSession;
+ if (!m_service) {
+ m_session->disconnect(this);
+ if (m_cameraControl)
+ m_cameraControl->disconnect(this);
+ m_session = nullptr;
+ m_cameraControl = nullptr;
+ m_videoConnection = nil;
+ } else {
+ m_session = m_service->session();
+ Q_ASSERT(m_session);
+
+ connect(m_service, &AVFCameraService::cameraChanged, this, &AVFImageCapture::onCameraChanged);
+ connect(m_session, SIGNAL(readyToConfigureConnections()), SLOT(updateCaptureConnection()));
+ connect(m_session, &AVFCameraSession::newViewfinderFrame,
+ this, &AVFImageCapture::onNewViewfinderFrame);
+ }
+
+ updateCaptureConnection();
+ onCameraChanged();
+ updateReadyStatus();
+}
+
+bool AVFImageCapture::videoCaptureDeviceIsValid() const
+{
+ if (!m_service || !m_service->session() || !m_service->session()->videoCaptureDevice())
+ return false;
+
+ AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
+ if (!captureDevice.formats || !captureDevice.formats.count)
+ return false;
+
+ return true;
+}
+
+#include "moc_avfimagecapture_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/avfimagecapture_p.h b/src/plugins/multimedia/darwin/camera/avfimagecapture_p.h
new file mode 100644
index 000000000..0714fa3cc
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfimagecapture_p.h
@@ -0,0 +1,81 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERAIMAGECAPTURE_H
+#define AVFCAMERAIMAGECAPTURE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#import <AVFoundation/AVFoundation.h>
+
+#include <QtCore/qqueue.h>
+#include <QtCore/qsemaphore.h>
+#include <QtCore/qsharedpointer.h>
+#include <private/qplatformimagecapture_p.h>
+#include "avfcamerasession_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class AVFImageCapture : public QPlatformImageCapture
+{
+Q_OBJECT
+public:
+ struct CaptureRequest {
+ int captureId;
+ QSharedPointer<QSemaphore> previewReady;
+ };
+
+ AVFImageCapture(QImageCapture *parent = nullptr);
+ ~AVFImageCapture();
+
+ bool isReadyForCapture() const override;
+
+ AVCaptureStillImageOutput *stillImageOutput() const {return m_stillImageOutput;}
+
+ int doCapture(const QString &fileName);
+ int capture(const QString &fileName) override;
+ int captureToBuffer() override;
+
+ QImageEncoderSettings imageSettings() const override;
+ void setImageSettings(const QImageEncoderSettings &settings) override;
+ bool applySettings();
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+private Q_SLOTS:
+ void updateCaptureConnection();
+ void updateReadyStatus();
+ void onNewViewfinderFrame(const QVideoFrame &frame);
+ void onCameraChanged();
+
+private:
+ void makeCapturePreview(CaptureRequest request, const QVideoFrame &frame, int rotation);
+ bool videoCaptureDeviceIsValid() const;
+
+ AVFCameraService *m_service = nullptr;
+ AVFCameraSession *m_session = nullptr;
+ AVFCamera *m_cameraControl = nullptr;
+ bool m_ready = false;
+ int m_lastCaptureId = 0;
+ AVCaptureStillImageOutput *m_stillImageOutput;
+ AVCaptureConnection *m_videoConnection = nullptr;
+
+ QMutex m_requestsMutex;
+ QQueue<CaptureRequest> m_captureRequests;
+ QImageEncoderSettings m_settings;
+};
+
+Q_DECLARE_TYPEINFO(AVFImageCapture::CaptureRequest, Q_PRIMITIVE_TYPE);
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfmediaassetwriter.mm b/src/plugins/multimedia/darwin/camera/avfmediaassetwriter.mm
new file mode 100644
index 000000000..37fc69926
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfmediaassetwriter.mm
@@ -0,0 +1,556 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfmediaencoder_p.h"
+#include "avfcamerarenderer_p.h"
+#include "avfmediaassetwriter_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcameradebug_p.h"
+#include <qdarwinformatsinfo_p.h>
+#include <avfmetadata_p.h>
+
+#include <QtCore/qmetaobject.h>
+#include <QtCore/qatomic.h>
+
+QT_USE_NAMESPACE
+
+namespace {
+
+bool qt_capture_session_isValid(AVFCameraService *service)
+{
+ if (!service || !service->session())
+ return false;
+
+ AVFCameraSession *session = service->session();
+ if (!session->captureSession())
+ return false;
+
+ if (!session->videoInput() && !session->audioInput())
+ return false;
+
+ return true;
+}
+
+enum WriterState
+{
+ WriterStateIdle,
+ WriterStateActive,
+ WriterStatePaused,
+ WriterStateAborted
+};
+
+using AVFAtomicInt64 = QAtomicInteger<qint64>;
+
+} // unnamed namespace
+
+@interface QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) (PrivateAPI)
+- (bool)addWriterInputs;
+- (void)setQueues;
+- (void)updateDuration:(CMTime)newTimeStamp;
+- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset;
+@end
+
+@implementation QT_MANGLE_NAMESPACE(AVFMediaAssetWriter)
+{
+@private
+ AVFCameraService *m_service;
+
+ AVFScopedPointer<AVAssetWriterInput> m_cameraWriterInput;
+ AVFScopedPointer<AVAssetWriterInput> m_audioWriterInput;
+
+ // Queue to write sample buffers:
+ AVFScopedPointer<dispatch_queue_t> m_writerQueue;
+ // High priority serial queue for video output:
+ AVFScopedPointer<dispatch_queue_t> m_videoQueue;
+ // Serial queue for audio output:
+ AVFScopedPointer<dispatch_queue_t> m_audioQueue;
+
+ AVFScopedPointer<AVAssetWriter> m_assetWriter;
+
+ AVFMediaEncoder *m_delegate;
+
+ bool m_setStartTime;
+
+ QAtomicInt m_state;
+
+ bool m_writeFirstAudioBuffer;
+
+ CMTime m_startTime;
+ CMTime m_lastTimeStamp;
+ CMTime m_lastVideoTimestamp;
+ CMTime m_lastAudioTimestamp;
+ CMTime m_timeOffset;
+ bool m_adjustTime;
+
+ NSDictionary *m_audioSettings;
+ NSDictionary *m_videoSettings;
+
+ AVFAtomicInt64 m_durationInMs;
+}
+
+- (id)initWithDelegate:(AVFMediaEncoder *)delegate
+{
+ Q_ASSERT(delegate);
+
+ if (self = [super init]) {
+ m_delegate = delegate;
+ m_setStartTime = true;
+ m_state.storeRelaxed(WriterStateIdle);
+ m_startTime = kCMTimeInvalid;
+ m_lastTimeStamp = kCMTimeInvalid;
+ m_lastAudioTimestamp = kCMTimeInvalid;
+ m_lastVideoTimestamp = kCMTimeInvalid;
+ m_timeOffset = kCMTimeInvalid;
+ m_adjustTime = false;
+ m_durationInMs.storeRelaxed(0);
+ m_audioSettings = nil;
+ m_videoSettings = nil;
+ m_writeFirstAudioBuffer = false;
+ }
+
+ return self;
+}
+
+- (bool)setupWithFileURL:(NSURL *)fileURL
+ cameraService:(AVFCameraService *)service
+ audioSettings:(NSDictionary *)audioSettings
+ videoSettings:(NSDictionary *)videoSettings
+ fileFormat:(QMediaFormat::FileFormat)fileFormat
+ transform:(CGAffineTransform)transform
+{
+ Q_ASSERT(fileURL);
+
+ if (!qt_capture_session_isValid(service)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid capture session";
+ return false;
+ }
+
+ m_service = service;
+ m_audioSettings = audioSettings;
+ m_videoSettings = videoSettings;
+
+ AVFCameraSession *session = m_service->session();
+
+ m_writerQueue.reset(dispatch_queue_create("asset-writer-queue", DISPATCH_QUEUE_SERIAL));
+ if (!m_writerQueue) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create an asset writer's queue";
+ return false;
+ }
+
+ m_videoQueue.reset();
+ if (session->videoInput() && session->videoOutput() && session->videoOutput()->videoDataOutput()) {
+ m_videoQueue.reset(dispatch_queue_create("video-output-queue", DISPATCH_QUEUE_SERIAL));
+ if (!m_videoQueue) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create video queue";
+ return false;
+ }
+ dispatch_set_target_queue(m_videoQueue, dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0));
+ }
+
+ m_audioQueue.reset();
+ if (session->audioInput() && session->audioOutput()) {
+ m_audioQueue.reset(dispatch_queue_create("audio-output-queue", DISPATCH_QUEUE_SERIAL));
+ if (!m_audioQueue) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create audio queue";
+ if (!m_videoQueue)
+ return false;
+ // But we still can write video!
+ }
+ }
+
+ auto fileType = QDarwinFormatInfo::avFileTypeForContainerFormat(fileFormat);
+ m_assetWriter.reset([[AVAssetWriter alloc] initWithURL:fileURL
+ fileType:fileType
+ error:nil]);
+ if (!m_assetWriter) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create asset writer";
+ return false;
+ }
+
+ if (!m_videoQueue)
+ m_writeFirstAudioBuffer = true;
+
+ if (![self addWriterInputs]) {
+ m_assetWriter.reset();
+ return false;
+ }
+
+ if (m_cameraWriterInput)
+ m_cameraWriterInput.data().transform = transform;
+
+ [self setMetaData:fileType];
+
+ // Ready to start ...
+ return true;
+}
+
+- (void)setMetaData:(AVFileType)fileType
+{
+ m_assetWriter.data().metadata = AVFMetaData::toAVMetadataForFormat(m_delegate->metaData(), fileType);
+}
+
+- (void)start
+{
+ [self setQueues];
+
+ m_setStartTime = true;
+
+ m_state.storeRelease(WriterStateActive);
+
+ [m_assetWriter startWriting];
+ AVCaptureSession *session = m_service->session()->captureSession();
+ if (!session.running)
+ [session startRunning];
+}
+
+- (void)stop
+{
+ if (m_state.loadAcquire() != WriterStateActive && m_state.loadAcquire() != WriterStatePaused)
+ return;
+
+ if ([m_assetWriter status] != AVAssetWriterStatusWriting
+ && [m_assetWriter status] != AVAssetWriterStatusFailed)
+ return;
+
+ // Do this here so that -
+ // 1. '-abort' should not try calling finishWriting again and
+ // 2. async block (see below) will know if recorder control was deleted
+ // before the block's execution:
+ m_state.storeRelease(WriterStateIdle);
+ // Now, since we have to ensure no sample buffers are
+ // appended after a call to finishWriting, we must
+ // ensure writer's queue sees this change in m_state
+ // _before_ we call finishWriting:
+ dispatch_sync(m_writerQueue, ^{});
+ // Done, but now we also want to prevent video queue
+ // from updating our viewfinder:
+ if (m_videoQueue)
+ dispatch_sync(m_videoQueue, ^{});
+
+ // Now we're safe to stop:
+ [m_assetWriter finishWritingWithCompletionHandler:^{
+ // This block is async, so by the time it's executed,
+ // it's possible that render control was deleted already ...
+ if (m_state.loadAcquire() == WriterStateAborted)
+ return;
+
+ AVCaptureSession *session = m_service->session()->captureSession();
+ if (session.running)
+ [session stopRunning];
+ QMetaObject::invokeMethod(m_delegate, "assetWriterFinished", Qt::QueuedConnection);
+ }];
+}
+
+- (void)abort
+{
+ // -abort is to be called from recorder control's dtor.
+
+ if (m_state.fetchAndStoreRelease(WriterStateAborted) != WriterStateActive) {
+ // Not recording, nothing to stop.
+ return;
+ }
+
+ // From Apple's docs:
+ // "To guarantee that all sample buffers are successfully written,
+ // you must ensure that all calls to appendSampleBuffer: and
+ // appendPixelBuffer:withPresentationTime: have returned before
+ // invoking this method."
+ //
+ // The only way we can ensure this is:
+ dispatch_sync(m_writerQueue, ^{});
+ // At this point next block (if any) on the writer's queue
+ // will see m_state preventing it from any further processing.
+ if (m_videoQueue)
+ dispatch_sync(m_videoQueue, ^{});
+ // After this point video queue will not try to modify our
+ // viewfider, so we're safe to delete now.
+
+ [m_assetWriter finishWritingWithCompletionHandler:^{
+ }];
+}
+
+- (void)pause
+{
+ if (m_state.loadAcquire() != WriterStateActive)
+ return;
+ if ([m_assetWriter status] != AVAssetWriterStatusWriting)
+ return;
+
+ m_state.storeRelease(WriterStatePaused);
+ m_adjustTime = true;
+}
+
+- (void)resume
+{
+ if (m_state.loadAcquire() != WriterStatePaused)
+ return;
+ if ([m_assetWriter status] != AVAssetWriterStatusWriting)
+ return;
+
+ m_state.storeRelease(WriterStateActive);
+}
+
+- (void)setStartTimeFrom:(CMSampleBufferRef)sampleBuffer
+{
+ // Writer's queue only.
+ Q_ASSERT(m_setStartTime);
+ Q_ASSERT(sampleBuffer);
+
+ if (m_state.loadAcquire() != WriterStateActive)
+ return;
+
+ QMetaObject::invokeMethod(m_delegate, "assetWriterStarted", Qt::QueuedConnection);
+
+ m_durationInMs.storeRelease(0);
+ m_startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+ m_lastTimeStamp = m_startTime;
+ [m_assetWriter startSessionAtSourceTime:m_startTime];
+ m_setStartTime = false;
+}
+
+- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset
+{
+ CMItemCount count;
+ CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
+ CMSampleTimingInfo* timingInfo = (CMSampleTimingInfo*) malloc(sizeof(CMSampleTimingInfo) * count);
+ CMSampleBufferGetSampleTimingInfoArray(sample, count, timingInfo, &count);
+ for (CMItemCount i = 0; i < count; i++)
+ {
+ timingInfo[i].decodeTimeStamp = CMTimeSubtract(timingInfo[i].decodeTimeStamp, offset);
+ timingInfo[i].presentationTimeStamp = CMTimeSubtract(timingInfo[i].presentationTimeStamp, offset);
+ }
+ CMSampleBufferRef updatedBuffer;
+ CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, sample, count, timingInfo, &updatedBuffer);
+ free(timingInfo);
+ return updatedBuffer;
+}
+
+- (void)writeVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
+{
+ // This code is executed only on a writer's queue.
+ Q_ASSERT(sampleBuffer);
+
+ if (m_state.loadAcquire() == WriterStateActive) {
+ if (m_setStartTime)
+ [self setStartTimeFrom:sampleBuffer];
+
+ if (m_cameraWriterInput.data().readyForMoreMediaData) {
+ [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
+ [m_cameraWriterInput appendSampleBuffer:sampleBuffer];
+ }
+ }
+}
+
+- (void)writeAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer
+{
+ Q_ASSERT(sampleBuffer);
+
+ // This code is executed only on a writer's queue.
+ if (m_state.loadAcquire() == WriterStateActive) {
+ if (m_setStartTime)
+ [self setStartTimeFrom:sampleBuffer];
+
+ if (m_audioWriterInput.data().readyForMoreMediaData) {
+ [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
+ [m_audioWriterInput appendSampleBuffer:sampleBuffer];
+ }
+ }
+}
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection
+{
+ Q_UNUSED(connection);
+ Q_ASSERT(m_service && m_service->session());
+
+ if (m_state.loadAcquire() != WriterStateActive && m_state.loadAcquire() != WriterStatePaused)
+ return;
+
+ if ([m_assetWriter status] != AVAssetWriterStatusWriting) {
+ if ([m_assetWriter status] == AVAssetWriterStatusFailed) {
+ NSError *error = [m_assetWriter error];
+ NSString *failureReason = error.localizedFailureReason;
+ NSString *suggestion = error.localizedRecoverySuggestion;
+ NSString *errorString = suggestion ? [failureReason stringByAppendingString:suggestion] : failureReason;
+ QMetaObject::invokeMethod(m_delegate, "assetWriterError",
+ Qt::QueuedConnection,
+ Q_ARG(QString, QString::fromNSString(errorString)));
+ }
+ return;
+ }
+
+ if (!CMSampleBufferDataIsReady(sampleBuffer)) {
+ qWarning() << Q_FUNC_INFO << "sample buffer is not ready, skipping.";
+ return;
+ }
+
+ CFRetain(sampleBuffer);
+
+ bool isVideoBuffer = true;
+ isVideoBuffer = (captureOutput != m_service->session()->audioOutput());
+ if (isVideoBuffer) {
+ // Find renderercontrol's delegate and invoke its method to
+ // show updated viewfinder's frame.
+ if (m_service->session()->videoOutput()) {
+ NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *vfDelegate =
+ (NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *)m_service->session()->videoOutput()->captureDelegate();
+ if (vfDelegate) {
+ AVCaptureOutput *output = nil;
+ AVCaptureConnection *connection = nil;
+ [vfDelegate captureOutput:output didOutputSampleBuffer:sampleBuffer fromConnection:connection];
+ }
+ }
+ } else {
+ if (m_service->session()->audioOutput()) {
+ NSObject<AVCaptureAudioDataOutputSampleBufferDelegate> *audioPreviewDelegate =
+ (NSObject<AVCaptureAudioDataOutputSampleBufferDelegate> *)m_service->session()->audioPreviewDelegate();
+ if (audioPreviewDelegate) {
+ AVCaptureOutput *output = nil;
+ AVCaptureConnection *connection = nil;
+ [audioPreviewDelegate captureOutput:output didOutputSampleBuffer:sampleBuffer fromConnection:connection];
+ }
+ }
+ }
+
+ if (m_state.loadAcquire() != WriterStateActive) {
+ CFRelease(sampleBuffer);
+ return;
+ }
+
+ if (m_adjustTime) {
+ CMTime currentTimestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+ CMTime lastTimestamp = isVideoBuffer ? m_lastVideoTimestamp : m_lastAudioTimestamp;
+
+ if (!CMTIME_IS_INVALID(lastTimestamp)) {
+ if (!CMTIME_IS_INVALID(m_timeOffset))
+ currentTimestamp = CMTimeSubtract(currentTimestamp, m_timeOffset);
+
+ CMTime pauseDuration = CMTimeSubtract(currentTimestamp, lastTimestamp);
+
+ if (m_timeOffset.value == 0)
+ m_timeOffset = pauseDuration;
+ else
+ m_timeOffset = CMTimeAdd(m_timeOffset, pauseDuration);
+ }
+ m_lastVideoTimestamp = kCMTimeInvalid;
+ m_adjustTime = false;
+ }
+
+ if (m_timeOffset.value > 0) {
+ CFRelease(sampleBuffer);
+ sampleBuffer = [self adjustTime:sampleBuffer by:m_timeOffset];
+ }
+
+ CMTime currentTimestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+ CMTime currentDuration = CMSampleBufferGetDuration(sampleBuffer);
+ if (currentDuration.value > 0)
+ currentTimestamp = CMTimeAdd(currentTimestamp, currentDuration);
+
+ if (isVideoBuffer)
+ {
+ m_lastVideoTimestamp = currentTimestamp;
+ dispatch_async(m_writerQueue, ^{
+ [self writeVideoSampleBuffer:sampleBuffer];
+ m_writeFirstAudioBuffer = true;
+ CFRelease(sampleBuffer);
+ });
+ } else if (m_writeFirstAudioBuffer) {
+ m_lastAudioTimestamp = currentTimestamp;
+ dispatch_async(m_writerQueue, ^{
+ [self writeAudioSampleBuffer:sampleBuffer];
+ CFRelease(sampleBuffer);
+ });
+ }
+}
+
+- (bool)addWriterInputs
+{
+ Q_ASSERT(m_service && m_service->session());
+ Q_ASSERT(m_assetWriter.data());
+
+ AVFCameraSession *session = m_service->session();
+
+ m_cameraWriterInput.reset();
+ if (m_videoQueue)
+ {
+ Q_ASSERT(session->videoCaptureDevice() && session->videoOutput() && session->videoOutput()->videoDataOutput());
+ m_cameraWriterInput.reset([[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
+ outputSettings:m_videoSettings
+ sourceFormatHint:session->videoCaptureDevice().activeFormat.formatDescription]);
+
+ if (m_cameraWriterInput && [m_assetWriter canAddInput:m_cameraWriterInput]) {
+ [m_assetWriter addInput:m_cameraWriterInput];
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to add camera writer input";
+ m_cameraWriterInput.reset();
+ return false;
+ }
+
+ m_cameraWriterInput.data().expectsMediaDataInRealTime = YES;
+ }
+
+ m_audioWriterInput.reset();
+ if (m_audioQueue) {
+ m_audioWriterInput.reset([[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio
+ outputSettings:m_audioSettings]);
+ if (!m_audioWriterInput) {
+ qWarning() << Q_FUNC_INFO << "failed to create audio writer input";
+ // But we still can record video.
+ if (!m_cameraWriterInput)
+ return false;
+ } else if ([m_assetWriter canAddInput:m_audioWriterInput]) {
+ [m_assetWriter addInput:m_audioWriterInput];
+ m_audioWriterInput.data().expectsMediaDataInRealTime = YES;
+ } else {
+ qWarning() << Q_FUNC_INFO << "failed to add audio writer input";
+ m_audioWriterInput.reset();
+ if (!m_cameraWriterInput)
+ return false;
+ // We can (still) write video though ...
+ }
+ }
+
+ return true;
+}
+
+- (void)setQueues
+{
+ Q_ASSERT(m_service && m_service->session());
+ AVFCameraSession *session = m_service->session();
+
+ if (m_videoQueue) {
+ Q_ASSERT(session->videoOutput() && session->videoOutput()->videoDataOutput());
+ [session->videoOutput()->videoDataOutput() setSampleBufferDelegate:self queue:m_videoQueue];
+ }
+
+ if (m_audioQueue) {
+ Q_ASSERT(session->audioOutput());
+ [session->audioOutput() setSampleBufferDelegate:self queue:m_audioQueue];
+ }
+}
+
+- (void)updateDuration:(CMTime)newTimeStamp
+{
+ Q_ASSERT(CMTimeCompare(m_startTime, kCMTimeInvalid));
+ Q_ASSERT(CMTimeCompare(m_lastTimeStamp, kCMTimeInvalid));
+ if (CMTimeCompare(newTimeStamp, m_lastTimeStamp) > 0) {
+
+ const CMTime duration = CMTimeSubtract(newTimeStamp, m_startTime);
+ if (!CMTimeCompare(duration, kCMTimeInvalid))
+ return;
+
+ m_durationInMs.storeRelease(CMTimeGetSeconds(duration) * 1000);
+ m_lastTimeStamp = newTimeStamp;
+
+ m_delegate->updateDuration([self durationInMs]);
+ }
+}
+
+- (qint64)durationInMs
+{
+ return m_durationInMs.loadAcquire();
+}
+
+@end
diff --git a/src/plugins/multimedia/darwin/camera/avfmediaassetwriter_p.h b/src/plugins/multimedia/darwin/camera/avfmediaassetwriter_p.h
new file mode 100644
index 000000000..8fe3e8522
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfmediaassetwriter_p.h
@@ -0,0 +1,54 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFMEDIAASSETWRITER_H
+#define AVFMEDIAASSETWRITER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "avfcamerautility_p.h"
+#include "qmediaformat.h"
+
+#include <QtCore/qglobal.h>
+
+#include <AVFoundation/AVFoundation.h>
+
+QT_BEGIN_NAMESPACE
+
+class AVFMediaEncoder;
+class AVFCameraService;
+
+QT_END_NAMESPACE
+
+@interface QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate,
+ AVCaptureAudioDataOutputSampleBufferDelegate>
+- (id)initWithDelegate:(QT_PREPEND_NAMESPACE(AVFMediaEncoder) *)delegate;
+
+- (bool)setupWithFileURL:(NSURL *)fileURL
+ cameraService:(QT_PREPEND_NAMESPACE(AVFCameraService) *)service
+ audioSettings:(NSDictionary *)audioSettings
+ videoSettings:(NSDictionary *)videoSettings
+ fileFormat:(QMediaFormat::FileFormat)fileFormat
+ transform:(CGAffineTransform)transform;
+
+// This to be called from the recorder control's thread:
+- (void)start;
+- (void)stop;
+- (void)pause;
+- (void)resume;
+// This to be called from the recorder control's dtor:
+- (void)abort;
+- (qint64)durationInMs;
+
+@end
+
+#endif // AVFMEDIAASSETWRITER_H
diff --git a/src/plugins/multimedia/darwin/camera/avfmediaencoder.mm b/src/plugins/multimedia/darwin/camera/avfmediaencoder.mm
new file mode 100644
index 000000000..3fbc57995
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfmediaencoder.mm
@@ -0,0 +1,664 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+
+#include "avfmediaencoder_p.h"
+#include "avfcamerarenderer_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcamera_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcameradebug_p.h"
+#include "avfcamerautility_p.h"
+#include "qaudiodevice.h"
+
+#include "qmediadevices.h"
+#include "private/qmediastoragelocation_p.h"
+#include "private/qmediarecorder_p.h"
+#include "qdarwinformatsinfo_p.h"
+#include "private/qplatformaudiooutput_p.h"
+#include <private/qplatformaudioinput_p.h>
+
+#include <QtCore/qmath.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qmimetype.h>
+
+#include <private/qcoreaudioutils_p.h>
+
+QT_USE_NAMESPACE
+
+namespace {
+
+bool qt_is_writable_file_URL(NSURL *fileURL)
+{
+ Q_ASSERT(fileURL);
+
+ if (![fileURL isFileURL])
+ return false;
+
+ if (NSString *path = [[fileURL path] stringByExpandingTildeInPath]) {
+ return [[NSFileManager defaultManager]
+ isWritableFileAtPath:[path stringByDeletingLastPathComponent]];
+ }
+
+ return false;
+}
+
+bool qt_file_exists(NSURL *fileURL)
+{
+ Q_ASSERT(fileURL);
+
+ if (NSString *path = [[fileURL path] stringByExpandingTildeInPath])
+ return [[NSFileManager defaultManager] fileExistsAtPath:path];
+
+ return false;
+}
+
+}
+
+AVFMediaEncoder::AVFMediaEncoder(QMediaRecorder *parent)
+ : QObject(parent)
+ , QPlatformMediaRecorder(parent)
+ , m_state(QMediaRecorder::StoppedState)
+ , m_duration(0)
+ , m_audioSettings(nil)
+ , m_videoSettings(nil)
+ //, m_restoreFPS(-1, -1)
+{
+ m_writer.reset([[QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) alloc] initWithDelegate:this]);
+ if (!m_writer) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create an asset writer";
+ return;
+ }
+}
+
+AVFMediaEncoder::~AVFMediaEncoder()
+{
+ [m_writer abort];
+
+ if (m_audioSettings)
+ [m_audioSettings release];
+ if (m_videoSettings)
+ [m_videoSettings release];
+}
+
+bool AVFMediaEncoder::isLocationWritable(const QUrl &location) const
+{
+ return location.scheme() == QLatin1String("file") || location.scheme().isEmpty();
+}
+
+QMediaRecorder::RecorderState AVFMediaEncoder::state() const
+{
+ return m_state;
+}
+
+qint64 AVFMediaEncoder::duration() const
+{
+ return m_duration;
+}
+
+void AVFMediaEncoder::updateDuration(qint64 duration)
+{
+ m_duration = duration;
+ durationChanged(m_duration);
+}
+
+static NSDictionary *avfAudioSettings(const QMediaEncoderSettings &encoderSettings, const QAudioFormat &format)
+{
+ NSMutableDictionary *settings = [NSMutableDictionary dictionary];
+
+ // Codec
+ int codecId = QDarwinFormatInfo::audioFormatForCodec(encoderSettings.mediaFormat().audioCodec());
+ [settings setObject:[NSNumber numberWithInt:codecId] forKey:AVFormatIDKey];
+
+ // Setting AVEncoderQualityKey is not allowed when format ID is alac or lpcm
+ if (codecId != kAudioFormatAppleLossless && codecId != kAudioFormatLinearPCM
+ && encoderSettings.encodingMode() == QMediaRecorder::ConstantQualityEncoding) {
+ // AudioQuality
+ int quality;
+ switch (encoderSettings.quality()) {
+ case QMediaRecorder::VeryLowQuality:
+ quality = AVAudioQualityMin;
+ break;
+ case QMediaRecorder::LowQuality:
+ quality = AVAudioQualityLow;
+ break;
+ case QMediaRecorder::HighQuality:
+ quality = AVAudioQualityHigh;
+ break;
+ case QMediaRecorder::VeryHighQuality:
+ quality = AVAudioQualityMax;
+ break;
+ case QMediaRecorder::NormalQuality:
+ default:
+ quality = AVAudioQualityMedium;
+ break;
+ }
+ [settings setObject:[NSNumber numberWithInt:quality] forKey:AVEncoderAudioQualityKey];
+ } else {
+ // BitRate
+ bool isBitRateSupported = false;
+ int bitRate = encoderSettings.audioBitRate();
+ if (bitRate > 0) {
+ QList<AudioValueRange> bitRates = qt_supported_bit_rates_for_format(codecId);
+ for (int i = 0; i < bitRates.count(); i++) {
+ if (bitRate >= bitRates[i].mMinimum &&
+ bitRate <= bitRates[i].mMaximum) {
+ isBitRateSupported = true;
+ break;
+ }
+ }
+ if (isBitRateSupported)
+ [settings setObject:[NSNumber numberWithInt:encoderSettings.audioBitRate()]
+ forKey:AVEncoderBitRateKey];
+ }
+ }
+
+ // SampleRate
+ int sampleRate = encoderSettings.audioSampleRate();
+ bool isSampleRateSupported = false;
+ if (sampleRate >= 8000 && sampleRate <= 192000) {
+ QList<AudioValueRange> sampleRates = qt_supported_sample_rates_for_format(codecId);
+ for (int i = 0; i < sampleRates.count(); i++) {
+ if (sampleRate >= sampleRates[i].mMinimum && sampleRate <= sampleRates[i].mMaximum) {
+ isSampleRateSupported = true;
+ break;
+ }
+ }
+ }
+ if (!isSampleRateSupported)
+ sampleRate = 44100;
+ [settings setObject:[NSNumber numberWithInt:sampleRate] forKey:AVSampleRateKey];
+
+ // Channels
+ int channelCount = encoderSettings.audioChannelCount();
+ bool isChannelCountSupported = false;
+ if (channelCount > 0) {
+ std::optional<QList<UInt32>> channelCounts = qt_supported_channel_counts_for_format(codecId);
+ // An std::nullopt result indicates that
+ // any number of channels can be encoded.
+ if (channelCounts == std::nullopt) {
+ isChannelCountSupported = true;
+ } else {
+ for (int i = 0; i < channelCounts.value().count(); i++) {
+ if ((UInt32)channelCount == channelCounts.value()[i]) {
+ isChannelCountSupported = true;
+ break;
+ }
+ }
+ }
+
+ // if channel count is provided and it's bigger than 2
+ // provide a supported channel layout
+ if (isChannelCountSupported && channelCount > 2) {
+ AudioChannelLayout channelLayout;
+ memset(&channelLayout, 0, sizeof(AudioChannelLayout));
+ auto channelLayoutTags = qt_supported_channel_layout_tags_for_format(codecId, channelCount);
+ if (channelLayoutTags.size()) {
+ channelLayout.mChannelLayoutTag = channelLayoutTags.first();
+ [settings setObject:[NSData dataWithBytes: &channelLayout length: sizeof(channelLayout)] forKey:AVChannelLayoutKey];
+ } else {
+ isChannelCountSupported = false;
+ }
+ }
+
+ if (isChannelCountSupported)
+ [settings setObject:[NSNumber numberWithInt:channelCount] forKey:AVNumberOfChannelsKey];
+ }
+
+ if (!isChannelCountSupported) {
+ // fallback to providing channel layout if channel count is not specified or supported
+ UInt32 size = 0;
+ if (format.isValid()) {
+ auto layout = CoreAudioUtils::toAudioChannelLayout(format, &size);
+ [settings setObject:[NSData dataWithBytes:layout.get() length:sizeof(AudioChannelLayout)] forKey:AVChannelLayoutKey];
+ } else {
+ // finally default to setting channel count to 1
+ [settings setObject:[NSNumber numberWithInt:1] forKey:AVNumberOfChannelsKey];
+ }
+ }
+
+ if (codecId == kAudioFormatAppleLossless)
+ [settings setObject:[NSNumber numberWithInt:24] forKey:AVEncoderBitDepthHintKey];
+
+ if (codecId == kAudioFormatLinearPCM) {
+ [settings setObject:[NSNumber numberWithInt:16] forKey:AVLinearPCMBitDepthKey];
+ [settings setObject:[NSNumber numberWithInt:NO] forKey:AVLinearPCMIsBigEndianKey];
+ [settings setObject:[NSNumber numberWithInt:NO] forKey:AVLinearPCMIsFloatKey];
+ [settings setObject:[NSNumber numberWithInt:NO] forKey:AVLinearPCMIsNonInterleaved];
+ }
+
+ return settings;
+}
+
+NSDictionary *avfVideoSettings(QMediaEncoderSettings &encoderSettings, AVCaptureDevice *device, AVCaptureConnection *connection, QSize nativeSize)
+{
+ if (!device)
+ return nil;
+
+
+ // ### re-add needFpsChange
+// AVFPSRange currentFps = qt_current_framerates(device, connection);
+
+ NSMutableDictionary *videoSettings = [NSMutableDictionary dictionary];
+
+ // -- Codec
+
+ // AVVideoCodecKey is the only mandatory key
+ auto codec = encoderSettings.mediaFormat().videoCodec();
+ NSString *c = QDarwinFormatInfo::videoFormatForCodec(codec);
+ [videoSettings setObject:c forKey:AVVideoCodecKey];
+ [c release];
+
+ // -- Resolution
+
+ int w = encoderSettings.videoResolution().width();
+ int h = encoderSettings.videoResolution().height();
+
+ if (AVCaptureDeviceFormat *currentFormat = device.activeFormat) {
+ CMFormatDescriptionRef formatDesc = currentFormat.formatDescription;
+ CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(formatDesc);
+ FourCharCode formatCodec = CMVideoFormatDescriptionGetCodecType(formatDesc);
+
+ // We have to change the device's activeFormat in 3 cases:
+ // - the requested recording resolution is higher than the current device resolution
+ // - the requested recording resolution has a different aspect ratio than the current device aspect ratio
+ // - the requested frame rate is not available for the current device format
+ AVCaptureDeviceFormat *newFormat = nil;
+ if ((w <= 0 || h <= 0)
+ && encoderSettings.videoFrameRate() > 0
+ && !qt_format_supports_framerate(currentFormat, encoderSettings.videoFrameRate())) {
+
+ newFormat = qt_find_best_framerate_match(device,
+ formatCodec,
+ encoderSettings.videoFrameRate());
+
+ } else if (w > 0 && h > 0) {
+ AVCaptureDeviceFormat *f = qt_find_best_resolution_match(device,
+ encoderSettings.videoResolution(),
+ formatCodec);
+
+ if (f) {
+ CMVideoDimensions d = CMVideoFormatDescriptionGetDimensions(f.formatDescription);
+ qreal fAspectRatio = qreal(d.width) / d.height;
+
+ if (w > dim.width || h > dim.height
+ || qAbs((qreal(dim.width) / dim.height) - fAspectRatio) > 0.01) {
+ newFormat = f;
+ }
+ }
+ }
+
+ if (qt_set_active_format(device, newFormat, false /*### !needFpsChange*/)) {
+ formatDesc = newFormat.formatDescription;
+ dim = CMVideoFormatDescriptionGetDimensions(formatDesc);
+ }
+
+ if (w < 0 || h < 0) {
+ w = dim.width;
+ h = dim.height;
+ }
+
+
+ if (w > 0 && h > 0) {
+ // Make sure the recording resolution has the same aspect ratio as the device's
+ // current resolution
+ qreal deviceAspectRatio = qreal(dim.width) / dim.height;
+ qreal recAspectRatio = qreal(w) / h;
+ if (qAbs(deviceAspectRatio - recAspectRatio) > 0.01) {
+ if (recAspectRatio > deviceAspectRatio)
+ w = qRound(h * deviceAspectRatio);
+ else
+ h = qRound(w / deviceAspectRatio);
+ }
+
+ // recording resolution can't be higher than the device's active resolution
+ w = qMin(w, dim.width);
+ h = qMin(h, dim.height);
+ }
+ }
+
+ if (w > 0 && h > 0) {
+ // Width and height must be divisible by 2
+ w += w & 1;
+ h += h & 1;
+
+ bool isPortrait = nativeSize.width() < nativeSize.height();
+ // Make sure the video has the right aspect ratio
+ if (isPortrait && h < w)
+ qSwap(w, h);
+ else if (!isPortrait && w < h)
+ qSwap(w, h);
+
+ encoderSettings.setVideoResolution(QSize(w, h));
+ } else {
+ w = nativeSize.width();
+ h = nativeSize.height();
+ encoderSettings.setVideoResolution(nativeSize);
+ }
+ [videoSettings setObject:[NSNumber numberWithInt:w] forKey:AVVideoWidthKey];
+ [videoSettings setObject:[NSNumber numberWithInt:h] forKey:AVVideoHeightKey];
+
+ // -- FPS
+
+ if (true /*needFpsChange*/) {
+ const qreal fps = encoderSettings.videoFrameRate();
+ qt_set_framerate_limits(device, connection, fps, fps);
+ }
+ encoderSettings.setVideoFrameRate(qt_current_framerates(device, connection).second);
+
+ // -- Codec Settings
+
+ NSMutableDictionary *codecProperties = [NSMutableDictionary dictionary];
+ int bitrate = -1;
+ float quality = -1.f;
+
+ if (encoderSettings.encodingMode() == QMediaRecorder::ConstantQualityEncoding) {
+ if (encoderSettings.quality() != QMediaRecorder::NormalQuality) {
+ if (codec != QMediaFormat::VideoCodec::MotionJPEG) {
+ qWarning("ConstantQualityEncoding is not supported for MotionJPEG");
+ } else {
+ switch (encoderSettings.quality()) {
+ case QMediaRecorder::VeryLowQuality:
+ quality = 0.f;
+ break;
+ case QMediaRecorder::LowQuality:
+ quality = 0.25f;
+ break;
+ case QMediaRecorder::HighQuality:
+ quality = 0.75f;
+ break;
+ case QMediaRecorder::VeryHighQuality:
+ quality = 1.f;
+ break;
+ default:
+ quality = -1.f; // NormalQuality, let the system decide
+ break;
+ }
+ }
+ }
+ } else if (encoderSettings.encodingMode() == QMediaRecorder::AverageBitRateEncoding){
+ if (codec != QMediaFormat::VideoCodec::H264 && codec != QMediaFormat::VideoCodec::H265)
+ qWarning() << "AverageBitRateEncoding is not supported for codec" << QMediaFormat::videoCodecName(codec);
+ else
+ bitrate = encoderSettings.videoBitRate();
+ } else {
+ qWarning("Encoding mode is not supported");
+ }
+
+ if (bitrate != -1)
+ [codecProperties setObject:[NSNumber numberWithInt:bitrate] forKey:AVVideoAverageBitRateKey];
+ if (quality != -1.f)
+ [codecProperties setObject:[NSNumber numberWithFloat:quality] forKey:AVVideoQualityKey];
+
+ [videoSettings setObject:codecProperties forKey:AVVideoCompressionPropertiesKey];
+
+ return videoSettings;
+}
+
+void AVFMediaEncoder::applySettings(QMediaEncoderSettings &settings)
+{
+ unapplySettings();
+
+ AVFCameraSession *session = m_service->session();
+
+ // audio settings
+ const auto audioInput = m_service->audioInput();
+ const QAudioFormat audioFormat = audioInput ? audioInput->device.preferredFormat() : QAudioFormat();
+ m_audioSettings = avfAudioSettings(settings, audioFormat);
+ if (m_audioSettings)
+ [m_audioSettings retain];
+
+ // video settings
+ AVCaptureDevice *device = session->videoCaptureDevice();
+ if (!device)
+ return;
+ const AVFConfigurationLock lock(device); // prevents activeFormat from being overridden
+ AVCaptureConnection *conn = [session->videoOutput()->videoDataOutput() connectionWithMediaType:AVMediaTypeVideo];
+ auto nativeSize = session->videoOutput()->nativeSize();
+ m_videoSettings = avfVideoSettings(settings, device, conn, nativeSize);
+ if (m_videoSettings)
+ [m_videoSettings retain];
+}
+
+void AVFMediaEncoder::unapplySettings()
+{
+ if (m_audioSettings) {
+ [m_audioSettings release];
+ m_audioSettings = nil;
+ }
+ if (m_videoSettings) {
+ [m_videoSettings release];
+ m_videoSettings = nil;
+ }
+}
+
+void AVFMediaEncoder::setMetaData(const QMediaMetaData &metaData)
+{
+ m_metaData = metaData;
+}
+
+QMediaMetaData AVFMediaEncoder::metaData() const
+{
+ return m_metaData;
+}
+
+void AVFMediaEncoder::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ AVFCameraService *captureSession = static_cast<AVFCameraService *>(session);
+ if (m_service == captureSession)
+ return;
+
+ if (m_service)
+ stop();
+
+ m_service = captureSession;
+ if (!m_service)
+ return;
+
+ connect(m_service, &AVFCameraService::cameraChanged, this, &AVFMediaEncoder::onCameraChanged);
+ onCameraChanged();
+}
+
+void AVFMediaEncoder::record(QMediaEncoderSettings &settings)
+{
+ if (!m_service || !m_service->session()) {
+ qWarning() << Q_FUNC_INFO << "Encoder is not set to a capture session";
+ return;
+ }
+
+ if (!m_writer) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "Invalid recorder";
+ return;
+ }
+
+ if (QMediaRecorder::RecordingState == m_state)
+ return;
+
+ AVFCamera *cameraControl = m_service->avfCameraControl();
+ auto audioInput = m_service->audioInput();
+
+ if (!cameraControl && !audioInput) {
+ qWarning() << Q_FUNC_INFO << "Cannot record without any inputs";
+ updateError(QMediaRecorder::ResourceError, tr("No inputs specified"));
+ return;
+ }
+
+ m_service->session()->setActive(true);
+ const bool audioOnly = settings.videoCodec() == QMediaFormat::VideoCodec::Unspecified;
+ AVCaptureSession *session = m_service->session()->captureSession();
+ float rotation = 0;
+
+ if (!audioOnly) {
+ if (!cameraControl || !cameraControl->isActive()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "can not start record while camera is not active";
+ updateError(QMediaRecorder::ResourceError,
+ QMediaRecorderPrivate::msgFailedStartRecording());
+ return;
+ }
+ }
+
+ const QString path(outputLocation().scheme() == QLatin1String("file") ?
+ outputLocation().path() : outputLocation().toString());
+ const QUrl fileURL(QUrl::fromLocalFile(QMediaStorageLocation::generateFileName(path,
+ audioOnly ? QStandardPaths::MusicLocation : QStandardPaths::MoviesLocation,
+ settings.mimeType().preferredSuffix())));
+
+ NSURL *nsFileURL = fileURL.toNSURL();
+ if (!nsFileURL) {
+ qWarning() << Q_FUNC_INFO << "invalid output URL:" << fileURL;
+ updateError(QMediaRecorder::ResourceError, tr("Invalid output file URL"));
+ return;
+ }
+ if (!qt_is_writable_file_URL(nsFileURL)) {
+ qWarning() << Q_FUNC_INFO << "invalid output URL:" << fileURL
+ << "(the location is not writable)";
+ updateError(QMediaRecorder::ResourceError, tr("Non-writeable file location"));
+ return;
+ }
+ if (qt_file_exists(nsFileURL)) {
+ // We test for/handle this error here since AWAssetWriter will raise an
+ // Objective-C exception, which is not good at all.
+ qWarning() << Q_FUNC_INFO << "invalid output URL:" << fileURL
+ << "(file already exists)";
+ updateError(QMediaRecorder::ResourceError, tr("File already exists"));
+ return;
+ }
+
+ applySettings(settings);
+
+ QVideoOutputOrientationHandler::setIsRecording(true);
+
+ // We stop session now so that no more frames for renderer's queue
+ // generated, will restart in assetWriterStarted.
+ [session stopRunning];
+
+ if ([m_writer setupWithFileURL:nsFileURL
+ cameraService:m_service
+ audioSettings:m_audioSettings
+ videoSettings:m_videoSettings
+ fileFormat:settings.fileFormat()
+ transform:CGAffineTransformMakeRotation(qDegreesToRadians(rotation))]) {
+
+ m_state = QMediaRecorder::RecordingState;
+
+ Q_EMIT actualLocationChanged(fileURL);
+ Q_EMIT stateChanged(m_state);
+
+ // Apple recommends to call startRunning and do all
+ // setup on a special queue, and that's what we had
+ // initially (dispatch_async to writerQueue). Unfortunately,
+ // writer's queue is not the only queue/thread that can
+ // access/modify the session, and as a result we have
+ // all possible data/race-conditions with Obj-C exceptions
+ // at best and something worse in general.
+ // Now we try to only modify session on the same thread.
+ [m_writer start];
+ } else {
+ [session startRunning];
+ updateError(QMediaRecorder::FormatError, QMediaRecorderPrivate::msgFailedStartRecording());
+ }
+}
+
+void AVFMediaEncoder::pause()
+{
+ if (!m_service || !m_service->session() || state() != QMediaRecorder::RecordingState)
+ return;
+
+ toggleRecord(false);
+ m_state = QMediaRecorder::PausedState;
+ stateChanged(m_state);
+}
+
+void AVFMediaEncoder::resume()
+{
+ if (!m_service || !m_service->session() || state() != QMediaRecorder::PausedState)
+ return;
+
+ toggleRecord(true);
+ m_state = QMediaRecorder::RecordingState;
+ stateChanged(m_state);
+}
+
+void AVFMediaEncoder::stop()
+{
+ if (m_state != QMediaRecorder::StoppedState) {
+ // Do not check the camera status, we can stop if we started.
+ stopWriter();
+ }
+ QVideoOutputOrientationHandler::setIsRecording(false);
+}
+
+
+void AVFMediaEncoder::toggleRecord(bool enable)
+{
+ if (!m_service || !m_service->session())
+ return;
+
+ if (!enable)
+ [m_writer pause];
+ else
+ [m_writer resume];
+}
+
+void AVFMediaEncoder::assetWriterStarted()
+{
+}
+
+void AVFMediaEncoder::assetWriterFinished()
+{
+
+ const QMediaRecorder::RecorderState lastState = m_state;
+
+ unapplySettings();
+
+ if (m_service) {
+ AVFCameraSession *session = m_service->session();
+
+ if (session->videoOutput()) {
+ session->videoOutput()->resetCaptureDelegate();
+ }
+ if (session->audioPreviewDelegate()) {
+ [session->audioPreviewDelegate() resetAudioPreviewDelegate];
+ }
+ if (session->videoOutput() || session->audioPreviewDelegate())
+ [session->captureSession() startRunning];
+ }
+
+ m_state = QMediaRecorder::StoppedState;
+ if (m_state != lastState)
+ Q_EMIT stateChanged(m_state);
+}
+
+void AVFMediaEncoder::assetWriterError(QString err)
+{
+ updateError(QMediaRecorder::FormatError, err);
+ if (m_state != QMediaRecorder::StoppedState)
+ stopWriter();
+}
+
+void AVFMediaEncoder::onCameraChanged()
+{
+ if (m_service && m_service->avfCameraControl()) {
+ AVFCamera *cameraControl = m_service->avfCameraControl();
+ connect(cameraControl, SIGNAL(activeChanged(bool)),
+ SLOT(cameraActiveChanged(bool)));
+ }
+}
+
+void AVFMediaEncoder::cameraActiveChanged(bool active)
+{
+ Q_ASSERT(m_service);
+ AVFCamera *cameraControl = m_service->avfCameraControl();
+ Q_ASSERT(cameraControl);
+
+ if (!active) {
+ return stopWriter();
+ }
+}
+
+void AVFMediaEncoder::stopWriter()
+{
+ [m_writer stop];
+}
+
+#include "moc_avfmediaencoder_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/avfmediaencoder_p.h b/src/plugins/multimedia/darwin/camera/avfmediaencoder_p.h
new file mode 100644
index 000000000..23aced325
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfmediaencoder_p.h
@@ -0,0 +1,96 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFMEDIAENCODER_H
+#define AVFMEDIAENCODER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "avfmediaassetwriter_p.h"
+#include "avfcamerautility_p.h"
+#include "qaudiodevice.h"
+
+#include <private/qplatformmediarecorder_p.h>
+#include <private/qplatformmediacapture_p.h>
+#include <QtMultimedia/qmediametadata.h>
+
+#include <QtCore/qglobal.h>
+#include <QtCore/qurl.h>
+
+#include <AVFoundation/AVFoundation.h>
+
+QT_BEGIN_NAMESPACE
+
+class AVFCameraService;
+class QString;
+class QUrl;
+
+class AVFMediaEncoder : public QObject, public QPlatformMediaRecorder
+{
+ Q_OBJECT
+public:
+ AVFMediaEncoder(QMediaRecorder *parent);
+ ~AVFMediaEncoder() override;
+
+ bool isLocationWritable(const QUrl &location) const override;
+
+ QMediaRecorder::RecorderState state() const override;
+
+ qint64 duration() const override;
+
+ void record(QMediaEncoderSettings &settings) override;
+ void pause() override;
+ void resume() override;
+ void stop() override;
+
+ void setMetaData(const QMediaMetaData &) override;
+ QMediaMetaData metaData() const override;
+
+ AVFCameraService *cameraService() const { return m_service; }
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+ void updateDuration(qint64 duration);
+
+ void toggleRecord(bool enable);
+
+private:
+ void applySettings(QMediaEncoderSettings &settings);
+ void unapplySettings();
+
+ Q_INVOKABLE void assetWriterStarted();
+ Q_INVOKABLE void assetWriterFinished();
+ Q_INVOKABLE void assetWriterError(QString error);
+
+private Q_SLOTS:
+ void onCameraChanged();
+ void cameraActiveChanged(bool);
+
+private:
+ void stopWriter();
+
+ AVFCameraService *m_service = nullptr;
+ AVFScopedPointer<QT_MANGLE_NAMESPACE(AVFMediaAssetWriter)> m_writer;
+
+ QMediaRecorder::RecorderState m_state;
+
+ QMediaMetaData m_metaData;
+
+ qint64 m_duration;
+
+ NSDictionary *m_audioSettings;
+ NSDictionary *m_videoSettings;
+};
+
+QT_END_NAMESPACE
+
+#endif // AVFMEDIAENCODER_H
diff --git a/src/plugins/multimedia/darwin/camera/qavfcamerabase.mm b/src/plugins/multimedia/darwin/camera/qavfcamerabase.mm
new file mode 100644
index 000000000..9d99de0b9
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/qavfcamerabase.mm
@@ -0,0 +1,1084 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcameradebug_p.h"
+#include "qavfcamerabase_p.h"
+#include "avfcamerautility_p.h"
+#include <private/qcameradevice_p.h>
+#include "qavfhelpers_p.h"
+#include <private/qplatformmediaintegration_p.h>
+#include <QtCore/qset.h>
+#include <QtCore/qsystemdetection.h>
+
+QT_USE_NAMESPACE
+
+namespace {
+
+// All these methods to work with exposure/ISO/SS in custom mode do not support macOS.
+
+#ifdef Q_OS_IOS
+
+// Misc. helpers to check values/ranges:
+
+bool qt_check_exposure_duration(AVCaptureDevice *captureDevice, CMTime duration)
+{
+ Q_ASSERT(captureDevice);
+
+ AVCaptureDeviceFormat *activeFormat = captureDevice.activeFormat;
+ if (!activeFormat) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to obtain capture device format";
+ return false;
+ }
+
+ return CMTimeCompare(duration, activeFormat.minExposureDuration) != -1
+ && CMTimeCompare(activeFormat.maxExposureDuration, duration) != -1;
+}
+
+bool qt_check_ISO_value(AVCaptureDevice *captureDevice, int newISO)
+{
+ Q_ASSERT(captureDevice);
+
+ AVCaptureDeviceFormat *activeFormat = captureDevice.activeFormat;
+ if (!activeFormat) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to obtain capture device format";
+ return false;
+ }
+
+ return !(newISO < activeFormat.minISO || newISO > activeFormat.maxISO);
+}
+
+bool qt_exposure_duration_equal(AVCaptureDevice *captureDevice, qreal qDuration)
+{
+ Q_ASSERT(captureDevice);
+ const CMTime avDuration = CMTimeMakeWithSeconds(qDuration, captureDevice.exposureDuration.timescale);
+ return !CMTimeCompare(avDuration, captureDevice.exposureDuration);
+}
+
+bool qt_iso_equal(AVCaptureDevice *captureDevice, int iso)
+{
+ Q_ASSERT(captureDevice);
+ return qFuzzyCompare(float(iso), captureDevice.ISO);
+}
+
+bool qt_exposure_bias_equal(AVCaptureDevice *captureDevice, qreal bias)
+{
+ Q_ASSERT(captureDevice);
+ return qFuzzyCompare(bias, qreal(captureDevice.exposureTargetBias));
+}
+
+// Converters:
+
+bool qt_convert_exposure_mode(AVCaptureDevice *captureDevice, QCamera::ExposureMode mode,
+ AVCaptureExposureMode &avMode)
+{
+ // Test if mode supported and convert.
+ Q_ASSERT(captureDevice);
+
+ if (mode == QCamera::ExposureAuto) {
+ if ([captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
+ avMode = AVCaptureExposureModeContinuousAutoExposure;
+ return true;
+ }
+ }
+
+ if (mode == QCamera::ExposureManual) {
+ if ([captureDevice isExposureModeSupported:AVCaptureExposureModeCustom]) {
+ avMode = AVCaptureExposureModeCustom;
+ return true;
+ }
+ }
+
+ return false;
+}
+
+#endif // defined(Q_OS_IOS)
+
+} // Unnamed namespace.
+
+
+QAVFVideoDevices::QAVFVideoDevices(QPlatformMediaIntegration *integration)
+ : QPlatformVideoDevices(integration)
+{
+ NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter];
+ m_deviceConnectedObserver = [notificationCenter addObserverForName:AVCaptureDeviceWasConnectedNotification
+ object:nil
+ queue:[NSOperationQueue mainQueue]
+ usingBlock:^(NSNotification *) {
+ this->updateCameraDevices();
+ }];
+
+ m_deviceDisconnectedObserver = [notificationCenter addObserverForName:AVCaptureDeviceWasDisconnectedNotification
+ object:nil
+ queue:[NSOperationQueue mainQueue]
+ usingBlock:^(NSNotification *) {
+ this->updateCameraDevices();
+ }];
+ updateCameraDevices();
+}
+
+QAVFVideoDevices::~QAVFVideoDevices()
+{
+ NSNotificationCenter* notificationCenter = [NSNotificationCenter defaultCenter];
+ [notificationCenter removeObserver:(id)m_deviceConnectedObserver];
+ [notificationCenter removeObserver:(id)m_deviceDisconnectedObserver];
+}
+
+QList<QCameraDevice> QAVFVideoDevices::videoDevices() const
+{
+ return m_cameraDevices;
+}
+
+void QAVFVideoDevices::updateCameraDevices()
+{
+#ifdef Q_OS_IOS
+ // Cameras can't change dynamically on iOS. Update only once.
+ if (!m_cameraDevices.isEmpty())
+ return;
+#endif
+
+ QList<QCameraDevice> cameras;
+
+ // List of all capture device types that we want to discover. Seems that this is the
+ // only way to discover all types. This filter is mandatory and has no "unspecified"
+ // option like AVCaptureDevicePosition(Unspecified) has. Order of the list is important
+ // because discovered devices will be in the same order and we want the first one found
+ // to be our default device.
+ NSArray *discoveryDevices = @[
+#ifdef Q_OS_IOS
+ AVCaptureDeviceTypeBuiltInTripleCamera, // We always prefer triple camera.
+ AVCaptureDeviceTypeBuiltInDualCamera, // If triple is not available, we prefer
+ // dual with wide + tele lens.
+ AVCaptureDeviceTypeBuiltInDualWideCamera, // Dual with wide and ultrawide is still
+ // better than single.
+#endif
+ AVCaptureDeviceTypeBuiltInWideAngleCamera, // This is the most common single camera type.
+ // We prefer that over tele and ultra-wide.
+#ifdef Q_OS_IOS
+ AVCaptureDeviceTypeBuiltInTelephotoCamera, // Cannot imagine how, but if only tele and
+ // ultrawide are available, we prefer tele.
+ AVCaptureDeviceTypeBuiltInUltraWideCamera,
+#endif
+ ];
+
+#if QT_DARWIN_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_14_0, __IPHONE_17_0, __TVOS_NA, __WATCHOS_NA)
+ if (@available(macOS 14, iOS 17, *)) {
+ discoveryDevices = [discoveryDevices arrayByAddingObjectsFromArray: @[
+ AVCaptureDeviceTypeExternal,
+ AVCaptureDeviceTypeContinuityCamera
+ ]];
+ } else
+#endif
+ {
+#ifdef Q_OS_MACOS
+ QT_WARNING_PUSH
+ QT_WARNING_DISABLE_DEPRECATED
+ discoveryDevices = [discoveryDevices arrayByAddingObjectsFromArray: @[
+ AVCaptureDeviceTypeExternalUnknown
+ ]];
+ QT_WARNING_POP
+#endif
+ }
+ // Create discovery session to discover all possible camera types of the system.
+ // Both "hard" and "soft" types.
+ AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
+ discoverySessionWithDeviceTypes:discoveryDevices
+ mediaType:AVMediaTypeVideo
+ position:AVCaptureDevicePositionUnspecified];
+ NSArray<AVCaptureDevice *> *videoDevices = discoverySession.devices;
+
+ for (AVCaptureDevice *device in videoDevices) {
+ auto info = std::make_unique<QCameraDevicePrivate>();
+ if ([videoDevices[0].uniqueID isEqualToString:device.uniqueID])
+ info->isDefault = true;
+ info->id = QByteArray([[device uniqueID] UTF8String]);
+ info->description = QString::fromNSString([device localizedName]);
+
+ qCDebug(qLcCamera) << "Handling camera info" << info->description
+ << (info->isDefault ? "(default)" : "");
+
+ QSet<QSize> photoResolutions;
+ QList<QCameraFormat> videoFormats;
+
+ for (AVCaptureDeviceFormat *format in device.formats) {
+ if (![format.mediaType isEqualToString:AVMediaTypeVideo])
+ continue;
+
+ auto dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ QSize resolution(dimensions.width, dimensions.height);
+ photoResolutions.insert(resolution);
+
+ float maxFrameRate = 0;
+ float minFrameRate = 1.e6;
+
+ auto encoding = CMVideoFormatDescriptionGetCodecType(format.formatDescription);
+ auto pixelFormat = QAVFHelpers::fromCVPixelFormat(encoding);
+ auto colorRange = QAVFHelpers::colorRangeForCVPixelFormat(encoding);
+ // Ignore pixel formats we can't handle
+ if (pixelFormat == QVideoFrameFormat::Format_Invalid) {
+ qCDebug(qLcCamera) << "ignore camera CV format" << encoding
+ << "as no matching video format found";
+ continue;
+ }
+
+ for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRanges) {
+ if (frameRateRange.minFrameRate < minFrameRate)
+ minFrameRate = frameRateRange.minFrameRate;
+ if (frameRateRange.maxFrameRate > maxFrameRate)
+ maxFrameRate = frameRateRange.maxFrameRate;
+ }
+
+#ifdef Q_OS_IOS
+ // From Apple's docs (iOS):
+ // By default, AVCaptureStillImageOutput emits images with the same dimensions as
+ // its source AVCaptureDevice instance’s activeFormat.formatDescription. However,
+ // if you set this property to YES, the receiver emits still images at the capture
+ // device’s highResolutionStillImageDimensions value.
+ const QSize hrRes(qt_device_format_high_resolution(format));
+ if (!hrRes.isNull() && hrRes.isValid())
+ photoResolutions.insert(hrRes);
+#endif
+
+ qCDebug(qLcCamera) << "Add camera format. pixelFormat:" << pixelFormat
+ << "colorRange:" << colorRange << "cvPixelFormat" << encoding
+ << "resolution:" << resolution << "frameRate: [" << minFrameRate
+ << maxFrameRate << "]";
+
+ auto *f = new QCameraFormatPrivate{ QSharedData(), pixelFormat, resolution,
+ minFrameRate, maxFrameRate, colorRange };
+ videoFormats << f->create();
+ }
+ if (videoFormats.isEmpty()) {
+ // skip broken cameras without valid formats
+ qCWarning(qLcCamera())
+ << "Skip camera" << info->description << "without supported formats";
+ continue;
+ }
+ info->videoFormats = videoFormats;
+ info->photoResolutions = photoResolutions.values();
+
+ cameras.append(info.release()->create());
+ }
+
+ if (cameras != m_cameraDevices) {
+ m_cameraDevices = cameras;
+ emit videoInputsChanged();
+ }
+}
+
+
+QAVFCameraBase::QAVFCameraBase(QCamera *camera)
+ : QPlatformCamera(camera)
+{
+ Q_ASSERT(camera);
+}
+
+QAVFCameraBase::~QAVFCameraBase()
+{
+}
+
+bool QAVFCameraBase::isActive() const
+{
+ return m_active;
+}
+
+void QAVFCameraBase::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+ if (m_cameraDevice.isNull() && active)
+ return;
+
+ m_active = active;
+
+ if (active)
+ updateCameraConfiguration();
+ Q_EMIT activeChanged(m_active);
+}
+
+void QAVFCameraBase::setCamera(const QCameraDevice &camera)
+{
+ if (m_cameraDevice == camera)
+ return;
+ m_cameraDevice = camera;
+ setCameraFormat({});
+}
+
+bool QAVFCameraBase::setCameraFormat(const QCameraFormat &format)
+{
+ if (!format.isNull() && !m_cameraDevice.videoFormats().contains(format))
+ return false;
+
+ m_cameraFormat = format.isNull() ? findBestCameraFormat(m_cameraDevice) : format;
+
+ return true;
+}
+
+AVCaptureDevice *QAVFCameraBase::device() const
+{
+ AVCaptureDevice *device = nullptr;
+ QByteArray deviceId = m_cameraDevice.id();
+ if (!deviceId.isEmpty()) {
+ device = [AVCaptureDevice deviceWithUniqueID:
+ [NSString stringWithUTF8String:
+ deviceId.constData()]];
+ }
+ return device;
+}
+
+#ifdef Q_OS_IOS
+namespace
+{
+
+bool qt_focus_mode_supported(QCamera::FocusMode mode)
+{
+ // Check if QCamera::FocusMode has counterpart in AVFoundation.
+
+ // AVFoundation has 'Manual', 'Auto' and 'Continuous',
+ // where 'Manual' is actually 'Locked' + writable property 'lensPosition'.
+ return mode == QCamera::FocusModeAuto
+ || mode == QCamera::FocusModeManual;
+}
+
+AVCaptureFocusMode avf_focus_mode(QCamera::FocusMode requestedMode)
+{
+ switch (requestedMode) {
+ case QCamera::FocusModeHyperfocal:
+ case QCamera::FocusModeInfinity:
+ case QCamera::FocusModeManual:
+ return AVCaptureFocusModeLocked;
+ default:
+ return AVCaptureFocusModeContinuousAutoFocus;
+ }
+
+}
+
+}
+#endif
+
+void QAVFCameraBase::setFocusMode(QCamera::FocusMode mode)
+{
+#ifdef Q_OS_IOS
+ if (focusMode() == mode)
+ return;
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ if (qt_focus_mode_supported(mode)) {
+ focusModeChanged(mode);
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO
+ << "focus mode not supported";
+ }
+ return;
+ }
+
+ if (isFocusModeSupported(mode)) {
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO
+ << "failed to lock for configuration";
+ return;
+ }
+
+ captureDevice.focusMode = avf_focus_mode(mode);
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "focus mode not supported";
+ return;
+ }
+
+ Q_EMIT focusModeChanged(mode);
+#else
+ Q_UNUSED(mode);
+#endif
+}
+
+bool QAVFCameraBase::isFocusModeSupported(QCamera::FocusMode mode) const
+{
+#ifdef Q_OS_IOS
+ AVCaptureDevice *captureDevice = device();
+ if (captureDevice) {
+ AVCaptureFocusMode avMode = avf_focus_mode(mode);
+ switch (mode) {
+ case QCamera::FocusModeAuto:
+ case QCamera::FocusModeHyperfocal:
+ case QCamera::FocusModeInfinity:
+ case QCamera::FocusModeManual:
+ return [captureDevice isFocusModeSupported:avMode];
+ case QCamera::FocusModeAutoNear:
+ Q_FALLTHROUGH();
+ case QCamera::FocusModeAutoFar:
+ return captureDevice.autoFocusRangeRestrictionSupported
+ && [captureDevice isFocusModeSupported:avMode];
+ }
+ }
+#endif
+ return mode == QCamera::FocusModeAuto; // stupid builtin webcam doesn't do any focus handling, but hey it's usually focused :)
+}
+
+void QAVFCameraBase::setCustomFocusPoint(const QPointF &point)
+{
+ if (customFocusPoint() == point)
+ return;
+
+ if (!QRectF(0.f, 0.f, 1.f, 1.f).contains(point)) {
+ // ### release custom focus point, tell the camera to focus where it wants...
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid focus point (out of range)";
+ return;
+ }
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice)
+ return;
+
+ if ([captureDevice isFocusPointOfInterestSupported]) {
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ const CGPoint focusPOI = CGPointMake(point.x(), point.y());
+ [captureDevice setFocusPointOfInterest:focusPOI];
+ if (focusMode() != QCamera::FocusModeAuto)
+ [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
+
+ customFocusPointChanged(point);
+ }
+}
+
+void QAVFCameraBase::setFocusDistance(float d)
+{
+#ifdef Q_OS_IOS
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice)
+ return;
+
+ if (captureDevice.lockingFocusWithCustomLensPositionSupported) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "Setting custom focus distance not supported\n";
+ return;
+ }
+
+ {
+ AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+ [captureDevice setFocusModeLockedWithLensPosition:d completionHandler:nil];
+ }
+ focusDistanceChanged(d);
+#else
+ Q_UNUSED(d);
+#endif
+}
+
+void QAVFCameraBase::updateCameraConfiguration()
+{
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "capture device is nil in 'active' state";
+ return;
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ if ([captureDevice isFocusPointOfInterestSupported]) {
+ auto point = customFocusPoint();
+ const CGPoint focusPOI = CGPointMake(point.x(), point.y());
+ [captureDevice setFocusPointOfInterest:focusPOI];
+ }
+
+#ifdef Q_OS_IOS
+ if (focusMode() != QCamera::FocusModeAuto) {
+ const AVCaptureFocusMode avMode = avf_focus_mode(focusMode());
+ if (captureDevice.focusMode != avMode) {
+ if ([captureDevice isFocusModeSupported:avMode]) {
+ [captureDevice setFocusMode:avMode];
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "focus mode not supported";
+ }
+ }
+ }
+
+ if (!captureDevice.activeFormat) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "camera state is active, but active format is nil";
+ return;
+ }
+
+ minimumZoomFactorChanged(captureDevice.minAvailableVideoZoomFactor);
+ maximumZoomFactorChanged(captureDevice.activeFormat.videoMaxZoomFactor);
+
+ captureDevice.videoZoomFactor = zoomFactor();
+
+ CMTime newDuration = AVCaptureExposureDurationCurrent;
+ bool setCustomMode = false;
+
+ float exposureTime = manualExposureTime();
+ if (exposureTime > 0
+ && !qt_exposure_duration_equal(captureDevice, exposureTime)) {
+ newDuration = CMTimeMakeWithSeconds(exposureTime, captureDevice.exposureDuration.timescale);
+ if (!qt_check_exposure_duration(captureDevice, newDuration)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "requested exposure duration is out of range";
+ return;
+ }
+ setCustomMode = true;
+ }
+
+ float newISO = AVCaptureISOCurrent;
+ int iso = manualIsoSensitivity();
+ if (iso > 0 && !qt_iso_equal(captureDevice, iso)) {
+ newISO = iso;
+ if (!qt_check_ISO_value(captureDevice, newISO)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "requested ISO value is out of range";
+ return;
+ }
+ setCustomMode = true;
+ }
+
+ float bias = exposureCompensation();
+ if (bias != 0 && !qt_exposure_bias_equal(captureDevice, bias)) {
+ // TODO: mixed fpns.
+ if (bias < captureDevice.minExposureTargetBias || bias > captureDevice.maxExposureTargetBias) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "exposure compensation value is"
+ << "out of range";
+ return;
+ }
+ [captureDevice setExposureTargetBias:bias completionHandler:nil];
+ }
+
+ // Setting shutter speed (exposure duration) or ISO values
+ // also reset exposure mode into Custom. With this settings
+ // we ignore any attempts to set exposure mode.
+
+ if (setCustomMode) {
+ [captureDevice setExposureModeCustomWithDuration:newDuration
+ ISO:newISO
+ completionHandler:nil];
+ return;
+ }
+
+ QCamera::ExposureMode qtMode = exposureMode();
+ AVCaptureExposureMode avMode = AVCaptureExposureModeContinuousAutoExposure;
+ if (!qt_convert_exposure_mode(captureDevice, qtMode, avMode)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "requested exposure mode is not supported";
+ return;
+ }
+
+ captureDevice.exposureMode = avMode;
+#endif
+
+ isFlashSupported = isFlashAutoSupported = false;
+ isTorchSupported = isTorchAutoSupported = false;
+
+ if (captureDevice.hasFlash) {
+ if ([captureDevice isFlashModeSupported:AVCaptureFlashModeOn])
+ isFlashSupported = true;
+ if ([captureDevice isFlashModeSupported:AVCaptureFlashModeAuto])
+ isFlashAutoSupported = true;
+ }
+
+ if (captureDevice.hasTorch) {
+ if ([captureDevice isTorchModeSupported:AVCaptureTorchModeOn])
+ isTorchSupported = true;
+ if ([captureDevice isTorchModeSupported:AVCaptureTorchModeAuto])
+ isTorchAutoSupported = true;
+ }
+
+ applyFlashSettings();
+ flashReadyChanged(isFlashSupported);
+}
+
+void QAVFCameraBase::updateCameraProperties()
+{
+ QCamera::Features features;
+ AVCaptureDevice *captureDevice = device();
+
+#ifdef Q_OS_IOS
+ features = QCamera::Feature::ColorTemperature | QCamera::Feature::ExposureCompensation |
+ QCamera::Feature::IsoSensitivity | QCamera::Feature::ManualExposureTime;
+
+ if (captureDevice && [captureDevice isLockingFocusWithCustomLensPositionSupported])
+ features |= QCamera::Feature::FocusDistance;
+#endif
+
+ if (captureDevice && [captureDevice isFocusPointOfInterestSupported])
+ features |= QCamera::Feature::CustomFocusPoint;
+
+ supportedFeaturesChanged(features);
+}
+
+void QAVFCameraBase::zoomTo(float factor, float rate)
+{
+ Q_UNUSED(factor);
+ Q_UNUSED(rate);
+
+#ifdef Q_OS_IOS
+ if (zoomFactor() == factor)
+ return;
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice || !captureDevice.activeFormat)
+ return;
+
+ factor = qBound(captureDevice.minAvailableVideoZoomFactor, factor,
+ captureDevice.activeFormat.videoMaxZoomFactor);
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ if (rate <= 0)
+ captureDevice.videoZoomFactor = factor;
+ else
+ [captureDevice rampToVideoZoomFactor:factor withRate:rate];
+#endif
+}
+
+void QAVFCameraBase::setFlashMode(QCamera::FlashMode mode)
+{
+ if (flashMode() == mode)
+ return;
+
+ if (isActive() && !isFlashModeSupported(mode)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "unsupported mode" << mode;
+ return;
+ }
+
+ flashModeChanged(mode);
+
+ if (!isActive())
+ return;
+
+ applyFlashSettings();
+}
+
+bool QAVFCameraBase::isFlashModeSupported(QCamera::FlashMode mode) const
+{
+ if (mode == QCamera::FlashOff)
+ return true;
+ else if (mode == QCamera::FlashOn)
+ return isFlashSupported;
+ else //if (mode == QCamera::FlashAuto)
+ return isFlashAutoSupported;
+}
+
+bool QAVFCameraBase::isFlashReady() const
+{
+ if (!isActive())
+ return false;
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice)
+ return false;
+
+ if (!captureDevice.hasFlash)
+ return false;
+
+ if (!isFlashModeSupported(flashMode()))
+ return false;
+
+ // AVCaptureDevice's docs:
+ // "The flash may become unavailable if, for example,
+ // the device overheats and needs to cool off."
+ return [captureDevice isFlashAvailable];
+}
+
+void QAVFCameraBase::setTorchMode(QCamera::TorchMode mode)
+{
+ if (torchMode() == mode)
+ return;
+
+ if (isActive() && !isTorchModeSupported(mode)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "unsupported torch mode" << mode;
+ return;
+ }
+
+ torchModeChanged(mode);
+
+ if (!isActive())
+ return;
+
+ applyFlashSettings();
+}
+
+bool QAVFCameraBase::isTorchModeSupported(QCamera::TorchMode mode) const
+{
+ if (mode == QCamera::TorchOff)
+ return true;
+ else if (mode == QCamera::TorchOn)
+ return isTorchSupported;
+ else //if (mode == QCamera::TorchAuto)
+ return isTorchAutoSupported;
+}
+
+void QAVFCameraBase::setExposureMode(QCamera::ExposureMode qtMode)
+{
+#ifdef Q_OS_IOS
+ if (qtMode != QCamera::ExposureAuto && qtMode != QCamera::ExposureManual) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "exposure mode not supported";
+ return;
+ }
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ exposureModeChanged(qtMode);
+ return;
+ }
+
+ AVCaptureExposureMode avMode = AVCaptureExposureModeContinuousAutoExposure;
+ if (!qt_convert_exposure_mode(captureDevice, qtMode, avMode)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "exposure mode not supported";
+ return;
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock a capture device"
+ << "for configuration";
+ return;
+ }
+
+ [captureDevice setExposureMode:avMode];
+ exposureModeChanged(qtMode);
+#else
+ Q_UNUSED(qtMode);
+#endif
+}
+
+bool QAVFCameraBase::isExposureModeSupported(QCamera::ExposureMode mode) const
+{
+ if (mode == QCamera::ExposureAuto)
+ return true;
+ if (mode != QCamera::ExposureManual)
+ return false;
+
+ if (@available(macOS 10.15, *)) {
+ AVCaptureDevice *captureDevice = device();
+ return captureDevice && [captureDevice isExposureModeSupported:AVCaptureExposureModeCustom];
+ }
+
+ return false;
+}
+
+void QAVFCameraBase::applyFlashSettings()
+{
+ Q_ASSERT(isActive());
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no capture device found";
+ return;
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+
+ if (captureDevice.hasFlash) {
+ const auto mode = flashMode();
+
+ auto setAvFlashModeSafe = [&captureDevice](AVCaptureFlashMode avFlashMode) {
+ // Note, in some cases captureDevice.hasFlash == false even though
+ // no there're no supported flash modes.
+ if ([captureDevice isFlashModeSupported:avFlashMode])
+ captureDevice.flashMode = avFlashMode;
+ else
+ qCDebug(qLcCamera) << "Attempt to setup unsupported flash mode " << avFlashMode;
+ };
+
+ if (mode == QCamera::FlashOff) {
+ setAvFlashModeSafe(AVCaptureFlashModeOff);
+ } else {
+ if ([captureDevice isFlashAvailable]) {
+ if (mode == QCamera::FlashOn)
+ setAvFlashModeSafe(AVCaptureFlashModeOn);
+ else if (mode == QCamera::FlashAuto)
+ setAvFlashModeSafe(AVCaptureFlashModeAuto);
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "flash is not available at the moment";
+ }
+ }
+ }
+
+ if (captureDevice.hasTorch) {
+ const auto mode = torchMode();
+
+ auto setAvTorchModeSafe = [&captureDevice](AVCaptureTorchMode avTorchMode) {
+ if ([captureDevice isTorchModeSupported:avTorchMode])
+ captureDevice.torchMode = avTorchMode;
+ else
+ qCDebug(qLcCamera) << "Attempt to setup unsupported torch mode " << avTorchMode;
+ };
+
+ if (mode == QCamera::TorchOff) {
+ setAvTorchModeSafe(AVCaptureTorchModeOff);
+ } else {
+ if ([captureDevice isTorchAvailable]) {
+ if (mode == QCamera::TorchOn)
+ setAvTorchModeSafe(AVCaptureTorchModeOn);
+ else if (mode == QCamera::TorchAuto)
+ setAvTorchModeSafe(AVCaptureTorchModeAuto);
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "torch is not available at the moment";
+ }
+ }
+ }
+}
+
+
+void QAVFCameraBase::setExposureCompensation(float bias)
+{
+#ifdef Q_OS_IOS
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ exposureCompensationChanged(bias);
+ return;
+ }
+
+ bias = qBound(captureDevice.minExposureTargetBias, bias, captureDevice.maxExposureTargetBias);
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ [captureDevice setExposureTargetBias:bias completionHandler:nil];
+ exposureCompensationChanged(bias);
+#else
+ Q_UNUSED(bias);
+#endif
+}
+
+void QAVFCameraBase::setManualExposureTime(float value)
+{
+#ifdef Q_OS_IOS
+ if (value < 0) {
+ setExposureMode(QCamera::ExposureAuto);
+ return;
+ }
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ exposureTimeChanged(value);
+ return;
+ }
+
+ const CMTime newDuration = CMTimeMakeWithSeconds(value, captureDevice.exposureDuration.timescale);
+ if (!qt_check_exposure_duration(captureDevice, newDuration)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "shutter speed value is out of range";
+ return;
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ // Setting the shutter speed (exposure duration in Apple's terms,
+ // since there is no shutter actually) will also reset
+ // exposure mode into custom mode.
+ [captureDevice setExposureModeCustomWithDuration:newDuration
+ ISO:AVCaptureISOCurrent
+ completionHandler:nil];
+
+ exposureTimeChanged(value);
+
+#else
+ Q_UNUSED(value);
+#endif
+}
+
+float QAVFCameraBase::exposureTime() const
+{
+#ifdef Q_OS_IOS
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice)
+ return -1.;
+ auto duration = captureDevice.exposureDuration;
+ return CMTimeGetSeconds(duration);
+#else
+ return -1;
+#endif
+}
+
+#ifdef Q_OS_IOS
+namespace {
+
+void avf_convert_white_balance_mode(QCamera::WhiteBalanceMode qtMode,
+ AVCaptureWhiteBalanceMode &avMode)
+{
+ if (qtMode == QCamera::WhiteBalanceAuto)
+ avMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
+ else
+ avMode = AVCaptureWhiteBalanceModeLocked;
+}
+
+bool avf_set_white_balance_mode(AVCaptureDevice *captureDevice,
+ AVCaptureWhiteBalanceMode avMode)
+{
+ Q_ASSERT(captureDevice);
+
+ const bool lock = [captureDevice lockForConfiguration:nil];
+ if (!lock) {
+ qDebug() << "Failed to lock a capture device for configuration\n";
+ return false;
+ }
+
+ captureDevice.whiteBalanceMode = avMode;
+ [captureDevice unlockForConfiguration];
+ return true;
+}
+
+bool avf_convert_temp_and_tint_to_wb_gains(AVCaptureDevice *captureDevice,
+ float temp, float tint, AVCaptureWhiteBalanceGains &wbGains)
+{
+ Q_ASSERT(captureDevice);
+
+ AVCaptureWhiteBalanceTemperatureAndTintValues wbTTValues = {
+ .temperature = temp,
+ .tint = tint
+ };
+ wbGains = [captureDevice deviceWhiteBalanceGainsForTemperatureAndTintValues:wbTTValues];
+
+ if (wbGains.redGain >= 1.0 && wbGains.redGain <= captureDevice.maxWhiteBalanceGain
+ && wbGains.greenGain >= 1.0 && wbGains.greenGain <= captureDevice.maxWhiteBalanceGain
+ && wbGains.blueGain >= 1.0 && wbGains.blueGain <= captureDevice.maxWhiteBalanceGain)
+ return true;
+
+ return false;
+}
+
+bool avf_set_white_balance_gains(AVCaptureDevice *captureDevice,
+ AVCaptureWhiteBalanceGains wbGains)
+{
+ const bool lock = [captureDevice lockForConfiguration:nil];
+ if (!lock) {
+ qDebug() << "Failed to lock a capture device for configuration\n";
+ return false;
+ }
+
+ [captureDevice setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:wbGains
+ completionHandler:nil];
+ [captureDevice unlockForConfiguration];
+ return true;
+}
+
+}
+
+bool QAVFCameraBase::isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const
+{
+ if (mode == QCamera::WhiteBalanceAuto)
+ return true;
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice)
+ return false;
+ return [captureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeLocked];
+}
+
+void QAVFCameraBase::setWhiteBalanceMode(QCamera::WhiteBalanceMode mode)
+{
+ if (!isWhiteBalanceModeSupported(mode))
+ return;
+
+ AVCaptureDevice *captureDevice = device();
+ Q_ASSERT(captureDevice);
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock a capture device"
+ << "for configuration";
+ return;
+ }
+
+ AVCaptureWhiteBalanceMode avMode;
+ avf_convert_white_balance_mode(mode, avMode);
+ avf_set_white_balance_mode(captureDevice, avMode);
+
+ if (mode == QCamera::WhiteBalanceAuto || mode == QCamera::WhiteBalanceManual) {
+ whiteBalanceModeChanged(mode);
+ return;
+ }
+
+ const int colorTemp = colorTemperatureForWhiteBalance(mode);
+ AVCaptureWhiteBalanceGains wbGains;
+ if (avf_convert_temp_and_tint_to_wb_gains(captureDevice, colorTemp, 0., wbGains)
+ && avf_set_white_balance_gains(captureDevice, wbGains))
+ whiteBalanceModeChanged(mode);
+}
+
+void QAVFCameraBase::setColorTemperature(int colorTemp)
+{
+ if (colorTemp == 0) {
+ colorTemperatureChanged(colorTemp);
+ return;
+ }
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice || ![captureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeLocked])
+ return;
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock a capture device"
+ << "for configuration";
+ return;
+ }
+
+ AVCaptureWhiteBalanceGains wbGains;
+ if (avf_convert_temp_and_tint_to_wb_gains(captureDevice, colorTemp, 0., wbGains)
+ && avf_set_white_balance_gains(captureDevice, wbGains))
+ colorTemperatureChanged(colorTemp);
+}
+#endif
+
+void QAVFCameraBase::setManualIsoSensitivity(int value)
+{
+#ifdef Q_OS_IOS
+ if (value < 0) {
+ setExposureMode(QCamera::ExposureAuto);
+ return;
+ }
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ isoSensitivityChanged(value);
+ return;
+ }
+
+ if (!qt_check_ISO_value(captureDevice, value)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "ISO value is out of range";
+ return;
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock a capture device"
+ << "for configuration";
+ return;
+ }
+
+ // Setting the ISO will also reset
+ // exposure mode to the custom mode.
+ [captureDevice setExposureModeCustomWithDuration:AVCaptureExposureDurationCurrent
+ ISO:value
+ completionHandler:nil];
+
+ isoSensitivityChanged(value);
+#else
+ Q_UNUSED(value);
+#endif
+}
+
+int QAVFCameraBase::isoSensitivity() const
+{
+ return manualIsoSensitivity();
+}
+
+
+#include "moc_qavfcamerabase_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/qavfcamerabase_p.h b/src/plugins/multimedia/darwin/camera/qavfcamerabase_p.h
new file mode 100644
index 000000000..1ad3ba250
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/qavfcamerabase_p.h
@@ -0,0 +1,110 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QAVFCAMERABASE_H
+#define QAVFCAMERABASE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qobject.h>
+
+#include <private/qplatformcamera_p.h>
+#include <private/qplatformvideodevices_p.h>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureDeviceFormat);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureConnection);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureDevice);
+
+QT_BEGIN_NAMESPACE
+class QPlatformMediaIntegration;
+
+class QAVFVideoDevices : public QPlatformVideoDevices
+{
+public:
+ QAVFVideoDevices(QPlatformMediaIntegration *integration);
+ ~QAVFVideoDevices();
+
+ QList<QCameraDevice> videoDevices() const override;
+
+private:
+ void updateCameraDevices();
+
+ NSObject *m_deviceConnectedObserver;
+ NSObject *m_deviceDisconnectedObserver;
+
+ QList<QCameraDevice> m_cameraDevices;
+};
+
+
+class QAVFCameraBase : public QPlatformCamera
+{;
+Q_OBJECT
+public:
+ QAVFCameraBase(QCamera *camera);
+ ~QAVFCameraBase();
+
+ bool isActive() const override;
+ void setActive(bool activce) override;
+
+ void setCamera(const QCameraDevice &camera) override;
+ bool setCameraFormat(const QCameraFormat &format) override;
+
+ void setFocusMode(QCamera::FocusMode mode) override;
+ bool isFocusModeSupported(QCamera::FocusMode mode) const override;
+
+ void setCustomFocusPoint(const QPointF &point) override;
+
+ void setFocusDistance(float d) override;
+ void zoomTo(float factor, float rate) override;
+
+ void setFlashMode(QCamera::FlashMode mode) override;
+ bool isFlashModeSupported(QCamera::FlashMode mode) const override;
+ bool isFlashReady() const override;
+
+ void setTorchMode(QCamera::TorchMode mode) override;
+ bool isTorchModeSupported(QCamera::TorchMode mode) const override;
+
+ void setExposureMode(QCamera::ExposureMode) override;
+ bool isExposureModeSupported(QCamera::ExposureMode mode) const override;
+
+ void setExposureCompensation(float bias) override;
+ void setManualIsoSensitivity(int value) override;
+ virtual int isoSensitivity() const override;
+ void setManualExposureTime(float value) override;
+ virtual float exposureTime() const override;
+
+#ifdef Q_OS_IOS
+ // not supported on macOS
+ bool isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const override;
+ void setWhiteBalanceMode(QCamera::WhiteBalanceMode /*mode*/) override;
+ void setColorTemperature(int /*temperature*/) override;
+#endif
+
+ AVCaptureDevice *device() const;
+
+protected:
+ void updateCameraConfiguration();
+ void updateCameraProperties();
+ void applyFlashSettings();
+
+ QCameraDevice m_cameraDevice;
+ bool m_active = false;
+private:
+ bool isFlashSupported = false;
+ bool isFlashAutoSupported = false;
+ bool isTorchSupported = false;
+ bool isTorchAutoSupported = false;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/common/avfmetadata.mm b/src/plugins/multimedia/darwin/common/avfmetadata.mm
new file mode 100644
index 000000000..da07f69c6
--- /dev/null
+++ b/src/plugins/multimedia/darwin/common/avfmetadata.mm
@@ -0,0 +1,382 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfmetadata_p.h"
+#include <qdarwinformatsinfo_p.h>
+#include <avfmediaplayer_p.h>
+
+#include <QtCore/qbuffer.h>
+#include <QtCore/qiodevice.h>
+#include <QtCore/qdatetime.h>
+#include <QtCore/qlocale.h>
+#include <QtCore/qurl.h>
+#include <QImage>
+#include <QtMultimedia/qvideoframe.h>
+
+#if __has_include(<AppKit/AppKit.h>)
+#include <AppKit/AppKit.h>
+#endif
+
+#include <CoreFoundation/CoreFoundation.h>
+
+QT_USE_NAMESPACE
+
+struct AVMetadataIDs {
+ AVMetadataIdentifier common;
+ AVMetadataIdentifier iTunes;
+ AVMetadataIdentifier quickTime;
+ AVMetadataIdentifier ID3;
+ AVMetadataIdentifier quickTimeUserData;
+ AVMetadataIdentifier isoUserData;
+};
+
+const AVMetadataIDs keyToAVMetaDataID[] = {
+ // Title
+ { AVMetadataCommonIdentifierTitle, AVMetadataIdentifieriTunesMetadataSongName,
+ AVMetadataIdentifierQuickTimeMetadataTitle,
+ AVMetadataIdentifierID3MetadataTitleDescription,
+ nil, AVMetadata3GPUserDataKeyTitle },
+ // Author
+ { AVMetadataCommonIdentifierAuthor,AVMetadataIdentifieriTunesMetadataAuthor,
+ AVMetadataIdentifierQuickTimeMetadataAuthor, nil,
+ AVMetadataQuickTimeUserDataKeyAuthor, AVMetadata3GPUserDataKeyAuthor },
+ // Comment
+ { nil, AVMetadataIdentifieriTunesMetadataUserComment,
+ AVMetadataIdentifierQuickTimeMetadataComment, AVMetadataIdentifierID3MetadataComments,
+ AVMetadataQuickTimeUserDataKeyComment, nil },
+ // Description
+ { AVMetadataCommonIdentifierDescription,AVMetadataIdentifieriTunesMetadataDescription,
+ AVMetadataIdentifierQuickTimeMetadataDescription, nil,
+ AVMetadataQuickTimeUserDataKeyDescription, AVMetadata3GPUserDataKeyDescription },
+ // Genre
+ { nil, AVMetadataIdentifieriTunesMetadataUserGenre,
+ AVMetadataIdentifierQuickTimeMetadataGenre, nil,
+ AVMetadataQuickTimeUserDataKeyGenre, AVMetadata3GPUserDataKeyGenre },
+ // Date
+ { AVMetadataCommonIdentifierCreationDate, AVMetadataIdentifieriTunesMetadataReleaseDate,
+ AVMetadataIdentifierQuickTimeMetadataCreationDate, AVMetadataIdentifierID3MetadataDate,
+ AVMetadataQuickTimeUserDataKeyCreationDate, AVMetadataISOUserDataKeyDate },
+ // Language
+ { AVMetadataCommonIdentifierLanguage, nil, nil, AVMetadataIdentifierID3MetadataLanguage, nil, nil },
+ // Publisher
+ { AVMetadataCommonIdentifierPublisher, AVMetadataIdentifieriTunesMetadataPublisher,
+ AVMetadataIdentifierQuickTimeMetadataPublisher, AVMetadataIdentifierID3MetadataPublisher, nil, nil },
+ // Copyright
+ { AVMetadataCommonIdentifierCopyrights, AVMetadataIdentifieriTunesMetadataCopyright,
+ AVMetadataIdentifierQuickTimeMetadataCopyright, AVMetadataIdentifierID3MetadataCopyright,
+ AVMetadataQuickTimeUserDataKeyCopyright, AVMetadataISOUserDataKeyCopyright },
+ // Url
+ { nil, nil, nil, AVMetadataIdentifierID3MetadataOfficialAudioSourceWebpage, nil, nil },
+ // Duration
+ { nil, nil, nil, AVMetadataIdentifierID3MetadataLength, nil, nil },
+ // MediaType
+ { AVMetadataCommonIdentifierType, nil, nil, AVMetadataIdentifierID3MetadataContentType, nil, nil },
+ // FileFormat
+ { nil, nil, nil, AVMetadataIdentifierID3MetadataFileType, nil, nil },
+ // AudioBitRate
+ { nil, nil, nil, nil, nil, nil },
+ // AudioCodec
+ { nil, nil, nil, nil, nil, nil },
+ // VideoBitRate
+ { nil, nil, nil, nil, nil, nil },
+ // VideoCodec
+ { nil, nil, nil, nil, nil, nil },
+ // VideoFrameRate
+ { nil, nil, AVMetadataIdentifierQuickTimeMetadataCameraFrameReadoutTime, nil, nil, nil },
+ // AlbumTitle
+ { AVMetadataCommonIdentifierAlbumName, AVMetadataIdentifieriTunesMetadataAlbum,
+ AVMetadataIdentifierQuickTimeMetadataAlbum, AVMetadataIdentifierID3MetadataAlbumTitle,
+ AVMetadataQuickTimeUserDataKeyAlbum, AVMetadata3GPUserDataKeyAlbumAndTrack },
+ // AlbumArtist
+ { nil, AVMetadataIdentifieriTunesMetadataAlbumArtist, nil, nil,
+ AVMetadataQuickTimeUserDataKeyArtist, AVMetadata3GPUserDataKeyPerformer },
+ // ContributingArtist
+ { AVMetadataCommonIdentifierArtist, AVMetadataIdentifieriTunesMetadataArtist,
+ AVMetadataIdentifierQuickTimeMetadataArtist, nil, nil, nil },
+ // TrackNumber
+ { nil, AVMetadataIdentifieriTunesMetadataTrackNumber,
+ nil, AVMetadataIdentifierID3MetadataTrackNumber, nil, nil },
+ // Composer
+ { nil, AVMetadataIdentifieriTunesMetadataComposer,
+ AVMetadataIdentifierQuickTimeMetadataComposer, AVMetadataIdentifierID3MetadataComposer, nil, nil },
+ // LeadPerformer
+ { nil, AVMetadataIdentifieriTunesMetadataPerformer,
+ AVMetadataIdentifierQuickTimeMetadataPerformer, AVMetadataIdentifierID3MetadataLeadPerformer, nil, nil },
+ // ThumbnailImage
+ { nil, nil, nil, AVMetadataIdentifierID3MetadataAttachedPicture, nil, nil },
+ // CoverArtImage
+ { AVMetadataCommonIdentifierArtwork, AVMetadataIdentifieriTunesMetadataCoverArt,
+ AVMetadataIdentifierQuickTimeMetadataArtwork, nil, nil, nil },
+ // Orientation
+ { nil, nil, AVMetadataIdentifierQuickTimeMetadataVideoOrientation, nil, nil, nil },
+ // Resolution
+ { nil, nil, nil, nil, nil, nil }
+};
+
+static AVMetadataIdentifier toIdentifier(QMediaMetaData::Key key, AVMetadataKeySpace keySpace)
+{
+ static_assert(sizeof(keyToAVMetaDataID)/sizeof(AVMetadataIDs) == QMediaMetaData::Key::Resolution + 1);
+
+ AVMetadataIdentifier identifier = nil;
+ if ([keySpace isEqualToString:AVMetadataKeySpaceiTunes]) {
+ identifier = keyToAVMetaDataID[key].iTunes;
+ } else if ([keySpace isEqualToString:AVMetadataKeySpaceID3]) {
+ identifier = keyToAVMetaDataID[key].ID3;
+ } else if ([keySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata]) {
+ identifier = keyToAVMetaDataID[key].quickTime;
+ } else {
+ identifier = keyToAVMetaDataID[key].common;
+ }
+ return identifier;
+}
+
+static std::optional<QMediaMetaData::Key> toKey(AVMetadataItem *item)
+{
+ static_assert(sizeof(keyToAVMetaDataID)/sizeof(AVMetadataIDs) == QMediaMetaData::Key::Resolution + 1);
+
+ // The item identifier may be different than the ones we support,
+ // so check by common key first, as it will get the metadata
+ // irrespective of the format.
+ AVMetadataKey commonKey = item.commonKey;
+ if (commonKey.length != 0) {
+ if ([commonKey isEqualToString:AVMetadataCommonKeyTitle]) {
+ return QMediaMetaData::Title;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyDescription]) {
+ return QMediaMetaData::Description;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyPublisher]) {
+ return QMediaMetaData::Publisher;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyCreationDate]) {
+ return QMediaMetaData::Date;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyType]) {
+ return QMediaMetaData::MediaType;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyLanguage]) {
+ return QMediaMetaData::Language;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyCopyrights]) {
+ return QMediaMetaData::Copyright;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyAlbumName]) {
+ return QMediaMetaData::AlbumTitle;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyAuthor]) {
+ return QMediaMetaData::Author;
+ } else if ([commonKey isEqualToString:AVMetadataCommonKeyArtist]) {
+ return QMediaMetaData::ContributingArtist;
+ }
+ }
+
+ // Check by identifier if no common key found
+ // No need to check for the common keySpace since there's no common key
+ enum keySpaces { iTunes, QuickTime, QuickTimeUserData, IsoUserData, ID3, Other } itemKeySpace;
+ itemKeySpace = Other;
+ AVMetadataKeySpace keySpace = [item keySpace];
+ AVMetadataIdentifier identifier = [item identifier];
+
+ if ([keySpace isEqualToString:AVMetadataKeySpaceiTunes]) {
+ itemKeySpace = iTunes;
+ } else if ([keySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata]) {
+ itemKeySpace = QuickTime;
+ } else if ([keySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData]) {
+ itemKeySpace = QuickTimeUserData;
+ } else if ([keySpace isEqualToString:AVMetadataKeySpaceISOUserData]) {
+ itemKeySpace = IsoUserData;
+ } else if (([keySpace isEqualToString:AVMetadataKeySpaceID3])) {
+ itemKeySpace = ID3;
+ }
+
+ for (int key = 0; key < QMediaMetaData::Resolution + 1; key++) {
+ AVMetadataIdentifier idForKey = nil;
+ switch (itemKeySpace) {
+ case iTunes:
+ idForKey = keyToAVMetaDataID[key].iTunes;
+ break;
+ case QuickTime:
+ idForKey = keyToAVMetaDataID[key].quickTime;
+ break;
+ case ID3:
+ idForKey = keyToAVMetaDataID[key].ID3;
+ break;
+ case QuickTimeUserData:
+ idForKey = keyToAVMetaDataID[key].quickTimeUserData;
+ break;
+ case IsoUserData:
+ idForKey = keyToAVMetaDataID[key].isoUserData;
+ break;
+ default:
+ break;
+ }
+
+ if ([identifier isEqualToString:idForKey])
+ return QMediaMetaData::Key(key);
+ }
+
+ return std::nullopt;
+}
+
+static QMediaMetaData fromAVMetadata(NSArray *metadataItems)
+{
+ QMediaMetaData metadata;
+
+ for (AVMetadataItem* item in metadataItems) {
+ auto key = toKey(item);
+ if (!key)
+ continue;
+
+ const QString value = QString::fromNSString([item stringValue]);
+ if (!value.isNull())
+ metadata.insert(*key, value);
+ }
+ return metadata;
+}
+
+QMediaMetaData AVFMetaData::fromAsset(AVAsset *asset)
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ QMediaMetaData metadata = fromAVMetadata([asset metadata]);
+
+ // add duration
+ const CMTime time = [asset duration];
+ const qint64 duration = static_cast<qint64>(float(time.value) / float(time.timescale) * 1000.0f);
+ metadata.insert(QMediaMetaData::Duration, duration);
+
+ return metadata;
+}
+
+QMediaMetaData AVFMetaData::fromAssetTrack(AVAssetTrack *asset)
+{
+ QMediaMetaData metadata = fromAVMetadata([asset metadata]);
+ if ([asset.mediaType isEqualToString:AVMediaTypeAudio]) {
+ if (metadata.value(QMediaMetaData::Language).isNull()) {
+ auto *languageCode = asset.languageCode;
+ if (languageCode) {
+ // languageCode is encoded as ISO 639-2, which QLocale does not handle.
+ // Convert it to 639-1 first.
+ auto id = CFLocaleCreateCanonicalLanguageIdentifierFromString(kCFAllocatorDefault,
+ (__bridge CFStringRef)languageCode);
+ QString lang = QString::fromCFString(id);
+ CFRelease(id);
+ metadata.insert(QMediaMetaData::Language, QLocale::codeToLanguage(lang));
+ }
+ }
+ }
+ if ([asset.mediaType isEqualToString:AVMediaTypeVideo]) {
+ // add orientation
+ if (metadata.value(QMediaMetaData::Orientation).isNull()) {
+ QtVideo::Rotation angle = QtVideo::Rotation::None;
+ bool mirrored;
+ AVFMediaPlayer::videoOrientationForAssetTrack(asset, angle, mirrored);
+ Q_UNUSED(mirrored);
+ metadata.insert(QMediaMetaData::Orientation, int(angle));
+ }
+ }
+ return metadata;
+}
+
+static AVMutableMetadataItem *setAVMetadataItemForKey(QMediaMetaData::Key key, const QVariant &value,
+ AVMetadataKeySpace keySpace = AVMetadataKeySpaceCommon)
+{
+ AVMetadataIdentifier identifier = toIdentifier(key, keySpace);
+ if (!identifier.length)
+ return nil;
+
+ AVMutableMetadataItem *item = [AVMutableMetadataItem metadataItem];
+ item.keySpace = keySpace;
+ item.identifier = identifier;
+
+ switch (key) {
+ case QMediaMetaData::ThumbnailImage:
+ case QMediaMetaData::CoverArtImage: {
+#if defined(Q_OS_MACOS)
+ QImage img = value.value<QImage>();
+ if (!img.isNull()) {
+ QByteArray arr;
+ QBuffer buffer(&arr);
+ buffer.open(QIODevice::WriteOnly);
+ img.save(&buffer);
+ NSData *data = arr.toNSData();
+ NSImage *nsImg = [[NSImage alloc] initWithData:data];
+ item.value = nsImg;
+ [nsImg release];
+ }
+#endif
+ break;
+ }
+ case QMediaMetaData::FileFormat: {
+ QMediaFormat::FileFormat qtFormat = value.value<QMediaFormat::FileFormat>();
+ AVFileType avFormat = QDarwinFormatInfo::avFileTypeForContainerFormat(qtFormat);
+ item.value = avFormat;
+ break;
+ }
+ case QMediaMetaData::Language: {
+ QString lang = QLocale::languageToCode(value.value<QLocale::Language>());
+ if (!lang.isEmpty())
+ item.value = lang.toNSString();
+ break;
+ }
+ case QMediaMetaData::Orientation: {
+ bool ok;
+ int rotation = value.toInt(&ok);
+ if (ok)
+ item.value = [NSNumber numberWithInt:rotation];
+ }
+ default: {
+ switch (value.typeId()) {
+ case QMetaType::QString: {
+ item.value = value.toString().toNSString();
+ break;
+ }
+ case QMetaType::Int: {
+ item.value = [NSNumber numberWithInt:value.toInt()];
+ break;
+ }
+ case QMetaType::LongLong: {
+ item.value = [NSNumber numberWithLongLong:value.toLongLong()];
+ break;
+ }
+ case QMetaType::Double: {
+ item.value = [NSNumber numberWithDouble:value.toDouble()];
+ break;
+ }
+ case QMetaType::QDate:
+ case QMetaType::QDateTime: {
+ item.value = value.toDateTime().toNSDate();
+ break;
+ }
+ case QMetaType::QUrl: {
+ item.value = value.toUrl().toNSURL();
+ break;
+ }
+ default:
+ break;
+ }
+ }
+ }
+
+ return item;
+}
+
+NSMutableArray<AVMetadataItem *> *AVFMetaData::toAVMetadataForFormat(QMediaMetaData metadata, AVFileType format)
+{
+ NSMutableArray<AVMetadataKeySpace> *keySpaces = [NSMutableArray<AVMetadataKeySpace> array];
+ if (format == AVFileTypeAppleM4A) {
+ [keySpaces addObject:AVMetadataKeySpaceiTunes];
+ } else if (format == AVFileTypeMPEGLayer3) {
+ [keySpaces addObject:AVMetadataKeySpaceID3];
+ [keySpaces addObject:AVMetadataKeySpaceiTunes];
+ } else if (format == AVFileTypeQuickTimeMovie) {
+ [keySpaces addObject:AVMetadataKeySpaceQuickTimeMetadata];
+ } else {
+ [keySpaces addObject:AVMetadataKeySpaceCommon];
+ }
+ NSMutableArray<AVMetadataItem *> *avMetaDataArr = [NSMutableArray array];
+ for (const auto &key : metadata.keys()) {
+ for (NSUInteger i = 0; i < [keySpaces count]; i++) {
+ const QVariant &value = metadata.value(key);
+ // set format-specific metadata
+ AVMetadataItem *item = setAVMetadataItemForKey(key, value, keySpaces[i]);
+ if (item)
+ [avMetaDataArr addObject:item];
+ }
+ }
+ return avMetaDataArr;
+}
+
diff --git a/src/plugins/multimedia/darwin/common/avfmetadata_p.h b/src/plugins/multimedia/darwin/common/avfmetadata_p.h
new file mode 100644
index 000000000..d1cb2e7e8
--- /dev/null
+++ b/src/plugins/multimedia/darwin/common/avfmetadata_p.h
@@ -0,0 +1,37 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFMEDIAPLAYERMETADATACONTROL_H
+#define AVFMEDIAPLAYERMETADATACONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/QMediaMetaData>
+#include <QtCore/qvariant.h>
+
+#import <AVFoundation/AVFoundation.h>
+
+QT_BEGIN_NAMESPACE
+
+class AVFMediaPlayer;
+
+class AVFMetaData
+{
+public:
+ static QMediaMetaData fromAsset(AVAsset *asset);
+ static QMediaMetaData fromAssetTrack(AVAssetTrack *asset);
+ static NSMutableArray<AVMetadataItem *> *toAVMetadataForFormat(QMediaMetaData metaData, AVFileType format);
+};
+
+QT_END_NAMESPACE
+
+#endif // AVFMEDIAPLAYERMETADATACONTROL_H
diff --git a/src/plugins/multimedia/darwin/darwin.json b/src/plugins/multimedia/darwin/darwin.json
new file mode 100644
index 000000000..f72350b17
--- /dev/null
+++ b/src/plugins/multimedia/darwin/darwin.json
@@ -0,0 +1,3 @@
+{
+ "Keys": [ "darwin" ]
+}
diff --git a/src/plugins/multimedia/darwin/mediaplayer/avfdisplaylink.mm b/src/plugins/multimedia/darwin/mediaplayer/avfdisplaylink.mm
new file mode 100644
index 000000000..8c6561f37
--- /dev/null
+++ b/src/plugins/multimedia/darwin/mediaplayer/avfdisplaylink.mm
@@ -0,0 +1,207 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfdisplaylink_p.h"
+#include <QtCore/qcoreapplication.h>
+
+#ifdef QT_DEBUG_AVF
+#include <QtCore/qdebug.h>
+#endif
+
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+#import <QuartzCore/CADisplayLink.h>
+#import <Foundation/NSRunLoop.h>
+#define _m_displayLink static_cast<DisplayLinkObserver*>(m_displayLink)
+#else
+#endif
+
+QT_USE_NAMESPACE
+
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+@interface DisplayLinkObserver : NSObject
+
+- (void)start;
+- (void)stop;
+- (void)displayLinkNotification:(CADisplayLink *)sender;
+
+@end
+
+@implementation DisplayLinkObserver
+{
+ AVFDisplayLink *m_avfDisplayLink;
+ CADisplayLink *m_displayLink;
+}
+
+- (id)initWithAVFDisplayLink:(AVFDisplayLink *)link
+{
+ self = [super init];
+
+ if (self) {
+ m_avfDisplayLink = link;
+ m_displayLink = [[CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkNotification:)] retain];
+ }
+
+ return self;
+}
+
+- (void) dealloc
+{
+ if (m_displayLink) {
+ [m_displayLink release];
+ m_displayLink = nullptr;
+ }
+
+ [super dealloc];
+}
+
+- (void)start
+{
+ [m_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
+}
+
+- (void)stop
+{
+ [m_displayLink removeFromRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
+}
+
+- (void)displayLinkNotification:(CADisplayLink *)sender
+{
+ Q_UNUSED(sender);
+ m_avfDisplayLink->displayLinkEvent(nullptr);
+}
+
+@end
+#else
+static CVReturn CVDisplayLinkCallback(CVDisplayLinkRef displayLink,
+ const CVTimeStamp *inNow,
+ const CVTimeStamp *inOutputTime,
+ CVOptionFlags flagsIn,
+ CVOptionFlags *flagsOut,
+ void *displayLinkContext)
+{
+ Q_UNUSED(displayLink);
+ Q_UNUSED(inNow);
+ Q_UNUSED(flagsIn);
+ Q_UNUSED(flagsOut);
+
+ AVFDisplayLink *link = (AVFDisplayLink *)displayLinkContext;
+
+ link->displayLinkEvent(inOutputTime);
+ return kCVReturnSuccess;
+}
+#endif
+
+AVFDisplayLink::AVFDisplayLink(QObject *parent)
+ : QObject(parent)
+ , m_displayLink(nullptr)
+ , m_pendingDisplayLinkEvent(false)
+ , m_isActive(false)
+{
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ m_displayLink = [[DisplayLinkObserver alloc] initWithAVFDisplayLink:this];
+#else
+ // create display link for the main display
+ CVDisplayLinkCreateWithCGDisplay(kCGDirectMainDisplay, &m_displayLink);
+ if (m_displayLink) {
+ // set the current display of a display link.
+ CVDisplayLinkSetCurrentCGDisplay(m_displayLink, kCGDirectMainDisplay);
+
+ // set the renderer output callback function
+ CVDisplayLinkSetOutputCallback(m_displayLink, &CVDisplayLinkCallback, this);
+ }
+#endif
+}
+
+AVFDisplayLink::~AVFDisplayLink()
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+
+ if (m_displayLink) {
+ stop();
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ [_m_displayLink release];
+#else
+ CVDisplayLinkRelease(m_displayLink);
+#endif
+ m_displayLink = nullptr;
+ }
+}
+
+bool AVFDisplayLink::isValid() const
+{
+ return m_displayLink != nullptr;
+}
+
+bool AVFDisplayLink::isActive() const
+{
+ return m_isActive;
+}
+
+void AVFDisplayLink::start()
+{
+ if (m_displayLink && !m_isActive) {
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ [_m_displayLink start];
+#else
+ CVDisplayLinkStart(m_displayLink);
+#endif
+ m_isActive = true;
+ }
+}
+
+void AVFDisplayLink::stop()
+{
+ if (m_displayLink && m_isActive) {
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ [_m_displayLink stop];
+#else
+ CVDisplayLinkStop(m_displayLink);
+#endif
+ m_isActive = false;
+ }
+}
+
+void AVFDisplayLink::displayLinkEvent(const CVTimeStamp *ts)
+{
+ // This function is called from a
+ // thread != gui thread. So we post the event.
+ // But we need to make sure that we don't post faster
+ // than the event loop can eat:
+ m_displayLinkMutex.lock();
+ bool pending = m_pendingDisplayLinkEvent;
+ m_pendingDisplayLinkEvent = true;
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ Q_UNUSED(ts);
+ memset(&m_frameTimeStamp, 0, sizeof(CVTimeStamp));
+#else
+ m_frameTimeStamp = *ts;
+#endif
+ m_displayLinkMutex.unlock();
+
+ if (!pending)
+ qApp->postEvent(this, new QEvent(QEvent::User), Qt::HighEventPriority);
+}
+
+bool AVFDisplayLink::event(QEvent *event)
+{
+ switch (event->type()){
+ case QEvent::User: {
+ m_displayLinkMutex.lock();
+ m_pendingDisplayLinkEvent = false;
+ CVTimeStamp ts = m_frameTimeStamp;
+ m_displayLinkMutex.unlock();
+
+ Q_EMIT tick(ts);
+
+ return false;
+ }
+ break;
+ default:
+ break;
+ }
+ return QObject::event(event);
+}
+
+#include "moc_avfdisplaylink_p.cpp"
diff --git a/src/plugins/multimedia/darwin/mediaplayer/avfdisplaylink_p.h b/src/plugins/multimedia/darwin/mediaplayer/avfdisplaylink_p.h
new file mode 100644
index 000000000..c4eb504a5
--- /dev/null
+++ b/src/plugins/multimedia/darwin/mediaplayer/avfdisplaylink_p.h
@@ -0,0 +1,65 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFDISPLAYLINK_H
+#define AVFDISPLAYLINK_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qobject.h>
+#include <QtCore/qmutex.h>
+
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+#include <CoreVideo/CVBase.h>
+#else
+#include <QuartzCore/CVDisplayLink.h>
+#endif
+
+QT_BEGIN_NAMESPACE
+
+class AVFDisplayLink : public QObject
+{
+ Q_OBJECT
+public:
+ explicit AVFDisplayLink(QObject *parent = nullptr);
+ virtual ~AVFDisplayLink();
+ bool isValid() const;
+ bool isActive() const;
+
+public Q_SLOTS:
+ void start();
+ void stop();
+
+Q_SIGNALS:
+ void tick(const CVTimeStamp &ts);
+
+public:
+ void displayLinkEvent(const CVTimeStamp *);
+
+protected:
+ virtual bool event(QEvent *) override;
+
+private:
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ void *m_displayLink;
+#else
+ CVDisplayLinkRef m_displayLink;
+#endif
+ QMutex m_displayLinkMutex;
+ bool m_pendingDisplayLinkEvent;
+ bool m_isActive;
+ CVTimeStamp m_frameTimeStamp;
+};
+
+QT_END_NAMESPACE
+
+#endif // AVFDISPLAYLINK_H
diff --git a/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm b/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm
new file mode 100644
index 000000000..964964a8e
--- /dev/null
+++ b/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer.mm
@@ -0,0 +1,1270 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfmediaplayer_p.h"
+#include "avfvideorenderercontrol_p.h"
+#include <avfvideosink_p.h>
+#include <avfmetadata_p.h>
+
+#include "qaudiooutput.h"
+#include "private/qplatformaudiooutput_p.h"
+
+#include <qpointer.h>
+#include <QFileInfo>
+#include <QtCore/qmath.h>
+#include <QtCore/qmutex.h>
+
+#import <AVFoundation/AVFoundation.h>
+
+QT_USE_NAMESPACE
+
+//AVAsset Keys
+static NSString* const AVF_TRACKS_KEY = @"tracks";
+static NSString* const AVF_PLAYABLE_KEY = @"playable";
+
+//AVPlayerItem keys
+static NSString* const AVF_STATUS_KEY = @"status";
+static NSString* const AVF_BUFFER_LIKELY_KEEP_UP_KEY = @"playbackLikelyToKeepUp";
+
+//AVPlayer keys
+static NSString* const AVF_RATE_KEY = @"rate";
+static NSString* const AVF_CURRENT_ITEM_KEY = @"currentItem";
+static NSString* const AVF_CURRENT_ITEM_DURATION_KEY = @"currentItem.duration";
+
+static void *AVFMediaPlayerObserverRateObservationContext = &AVFMediaPlayerObserverRateObservationContext;
+static void *AVFMediaPlayerObserverStatusObservationContext = &AVFMediaPlayerObserverStatusObservationContext;
+static void *AVFMediaPlayerObserverPresentationSizeContext = &AVFMediaPlayerObserverPresentationSizeContext;
+static void *AVFMediaPlayerObserverBufferLikelyToKeepUpContext = &AVFMediaPlayerObserverBufferLikelyToKeepUpContext;
+static void *AVFMediaPlayerObserverTracksContext = &AVFMediaPlayerObserverTracksContext;
+static void *AVFMediaPlayerObserverCurrentItemObservationContext = &AVFMediaPlayerObserverCurrentItemObservationContext;
+static void *AVFMediaPlayerObserverCurrentItemDurationObservationContext = &AVFMediaPlayerObserverCurrentItemDurationObservationContext;
+
+@interface AVFMediaPlayerObserver : NSObject<AVAssetResourceLoaderDelegate>
+
+@property (readonly, getter=player) AVPlayer* m_player;
+@property (readonly, getter=playerItem) AVPlayerItem* m_playerItem;
+@property (readonly, getter=playerLayer) AVPlayerLayer* m_playerLayer;
+@property (readonly, getter=session) AVFMediaPlayer* m_session;
+@property (retain) AVPlayerItemTrack *videoTrack;
+
+- (AVFMediaPlayerObserver *) initWithMediaPlayerSession:(AVFMediaPlayer *)session;
+- (void) setURL:(NSURL *)url mimeType:(NSString *)mimeType;
+- (void) unloadMedia;
+- (void) prepareToPlayAsset:(AVURLAsset *)asset withKeys:(NSArray *)requestedKeys;
+- (void) assetFailedToPrepareForPlayback:(NSError *)error;
+- (void) playerItemDidReachEnd:(NSNotification *)notification;
+- (void) observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object
+ change:(NSDictionary *)change context:(void *)context;
+- (void) detatchSession;
+- (void) dealloc;
+- (BOOL) resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
+@end
+
+#ifdef Q_OS_IOS
+// Alas, no such thing as 'class variable', hence globals:
+static unsigned sessionActivationCount;
+static QMutex sessionMutex;
+#endif // Q_OS_IOS
+
+@implementation AVFMediaPlayerObserver
+{
+@private
+ AVFMediaPlayer *m_session;
+ AVPlayer *m_player;
+ AVPlayerItem *m_playerItem;
+ AVPlayerLayer *m_playerLayer;
+ NSURL *m_URL;
+ BOOL m_bufferIsLikelyToKeepUp;
+ NSData *m_data;
+ NSString *m_mimeType;
+#ifdef Q_OS_IOS
+ BOOL m_activated;
+#endif
+}
+
+@synthesize m_player, m_playerItem, m_playerLayer, m_session;
+
+#ifdef Q_OS_IOS
+- (void)setSessionActive:(BOOL)active
+{
+ const QMutexLocker lock(&sessionMutex);
+ if (active) {
+ // Don't count the same player twice if already activated,
+ // unless it tried to deactivate first:
+ if (m_activated)
+ return;
+ if (!sessionActivationCount)
+ [AVAudioSession.sharedInstance setActive:YES error:nil];
+ ++sessionActivationCount;
+ m_activated = YES;
+ } else {
+ if (!sessionActivationCount || !m_activated) {
+ qWarning("Unbalanced audio session deactivation, ignoring.");
+ return;
+ }
+ --sessionActivationCount;
+ m_activated = NO;
+ if (!sessionActivationCount)
+ [AVAudioSession.sharedInstance setActive:NO error:nil];
+ }
+}
+#endif // Q_OS_IOS
+
+- (AVFMediaPlayerObserver *) initWithMediaPlayerSession:(AVFMediaPlayer *)session
+{
+ if (!(self = [super init]))
+ return nil;
+
+ m_session = session;
+ m_bufferIsLikelyToKeepUp = FALSE;
+
+ m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:nil];
+ [m_playerLayer retain];
+ m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
+ m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
+ return self;
+}
+
+- (void) setURL:(NSURL *)url mimeType:(NSString *)mimeType
+{
+ if (!m_session)
+ return;
+
+ [m_mimeType release];
+ m_mimeType = [mimeType retain];
+
+ if (m_URL != url)
+ {
+ [m_URL release];
+ m_URL = [url copy];
+
+ //Create an asset for inspection of a resource referenced by a given URL.
+ //Load the values for the asset keys "tracks", "playable".
+
+ // use __block to avoid maintaining strong references on variables captured by the
+ // following block callback
+#if defined(Q_OS_IOS)
+ BOOL isAccessing = [m_URL startAccessingSecurityScopedResource];
+#endif
+ __block AVURLAsset *asset = [[AVURLAsset URLAssetWithURL:m_URL options:nil] retain];
+ [asset.resourceLoader setDelegate:self queue:dispatch_get_main_queue()];
+
+ __block NSArray *requestedKeys = [[NSArray arrayWithObjects:AVF_TRACKS_KEY, AVF_PLAYABLE_KEY, nil] retain];
+
+ __block AVFMediaPlayerObserver *blockSelf = [self retain];
+
+ // Tells the asset to load the values of any of the specified keys that are not already loaded.
+ [asset loadValuesAsynchronouslyForKeys:requestedKeys completionHandler:
+ ^{
+ dispatch_async( dispatch_get_main_queue(),
+ ^{
+#if defined(Q_OS_IOS)
+ if (isAccessing)
+ [m_URL stopAccessingSecurityScopedResource];
+#endif
+ [blockSelf prepareToPlayAsset:asset withKeys:requestedKeys];
+ [asset release];
+ [requestedKeys release];
+ [blockSelf release];
+ });
+ }];
+ }
+}
+
+- (void) unloadMedia
+{
+ if (m_playerItem) {
+ [m_playerItem removeObserver:self forKeyPath:@"presentationSize"];
+ [m_playerItem removeObserver:self forKeyPath:AVF_STATUS_KEY];
+ [m_playerItem removeObserver:self forKeyPath:AVF_BUFFER_LIKELY_KEEP_UP_KEY];
+ [m_playerItem removeObserver:self forKeyPath:AVF_TRACKS_KEY];
+
+ [[NSNotificationCenter defaultCenter] removeObserver:self
+ name:AVPlayerItemDidPlayToEndTimeNotification
+ object:m_playerItem];
+ m_playerItem = 0;
+ }
+ if (m_player) {
+ [m_player setRate:0.0];
+ [m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_DURATION_KEY];
+ [m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_KEY];
+ [m_player removeObserver:self forKeyPath:AVF_RATE_KEY];
+ [m_player release];
+ m_player = 0;
+ }
+ if (m_playerLayer)
+ m_playerLayer.player = nil;
+#if defined(Q_OS_IOS)
+ [self setSessionActive:NO];
+#endif
+}
+
+- (void) prepareToPlayAsset:(AVURLAsset *)asset
+ withKeys:(NSArray *)requestedKeys
+{
+ if (!m_session)
+ return;
+
+ //Make sure that the value of each key has loaded successfully.
+ for (NSString *thisKey in requestedKeys)
+ {
+ NSError *error = nil;
+ AVKeyValueStatus keyStatus = [asset statusOfValueForKey:thisKey error:&error];
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << [thisKey UTF8String] << " status: " << keyStatus;
+#endif
+ if (keyStatus == AVKeyValueStatusFailed)
+ {
+ [self assetFailedToPrepareForPlayback:error];
+ return;
+ }
+ }
+
+ //Use the AVAsset playable property to detect whether the asset can be played.
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << "isPlayable: " << [asset isPlayable];
+#endif
+ if (!asset.playable)
+ qWarning() << "Asset reported to be not playable. Playback of this asset may not be possible.";
+
+ //At this point we're ready to set up for playback of the asset.
+ //Stop observing our prior AVPlayerItem, if we have one.
+ if (m_playerItem)
+ {
+ //Remove existing player item key value observers and notifications.
+ [self unloadMedia];
+ }
+
+ //Create a new instance of AVPlayerItem from the now successfully loaded AVAsset.
+ m_playerItem = [AVPlayerItem playerItemWithAsset:asset];
+ if (!m_playerItem) {
+ qWarning() << "Failed to create player item";
+ //Generate an error describing the failure.
+ NSString *localizedDescription = NSLocalizedString(@"Item cannot be played", @"Item cannot be played description");
+ NSString *localizedFailureReason = NSLocalizedString(@"The assets tracks were loaded, but couldn't create player item.", @"Item cannot be played failure reason");
+ NSDictionary *errorDict = [NSDictionary dictionaryWithObjectsAndKeys:
+ localizedDescription, NSLocalizedDescriptionKey,
+ localizedFailureReason, NSLocalizedFailureReasonErrorKey,
+ nil];
+ NSError *assetCannotBePlayedError = [NSError errorWithDomain:@"StitchedStreamPlayer" code:0 userInfo:errorDict];
+
+ [self assetFailedToPrepareForPlayback:assetCannotBePlayedError];
+ return;
+ }
+
+ //Observe the player item "status" key to determine when it is ready to play.
+ [m_playerItem addObserver:self
+ forKeyPath:AVF_STATUS_KEY
+ options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
+ context:AVFMediaPlayerObserverStatusObservationContext];
+
+ [m_playerItem addObserver:self
+ forKeyPath:@"presentationSize"
+ options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
+ context:AVFMediaPlayerObserverPresentationSizeContext];
+
+ [m_playerItem addObserver:self
+ forKeyPath:AVF_BUFFER_LIKELY_KEEP_UP_KEY
+ options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
+ context:AVFMediaPlayerObserverBufferLikelyToKeepUpContext];
+
+ [m_playerItem addObserver:self
+ forKeyPath:AVF_TRACKS_KEY
+ options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
+ context:AVFMediaPlayerObserverTracksContext];
+
+ //When the player item has played to its end time we'll toggle
+ //the movie controller Pause button to be the Play button
+ [[NSNotificationCenter defaultCenter] addObserver:self
+ selector:@selector(playerItemDidReachEnd:)
+ name:AVPlayerItemDidPlayToEndTimeNotification
+ object:m_playerItem];
+
+ //Get a new AVPlayer initialized to play the specified player item.
+ m_player = [AVPlayer playerWithPlayerItem:m_playerItem];
+ [m_player retain];
+
+ //Set the initial volume on new player object
+ if (self.session) {
+ auto *audioOutput = m_session->m_audioOutput;
+ m_player.volume = (audioOutput ? audioOutput->volume : 1.);
+ m_player.muted = (audioOutput ? audioOutput->muted : true);
+ }
+
+ //Assign the output layer to the new player
+ m_playerLayer.player = m_player;
+
+ //Observe the AVPlayer "currentItem" property to find out when any
+ //AVPlayer replaceCurrentItemWithPlayerItem: replacement will/did
+ //occur.
+ [m_player addObserver:self
+ forKeyPath:AVF_CURRENT_ITEM_KEY
+ options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
+ context:AVFMediaPlayerObserverCurrentItemObservationContext];
+
+ //Observe the AVPlayer "rate" property to update the scrubber control.
+ [m_player addObserver:self
+ forKeyPath:AVF_RATE_KEY
+ options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
+ context:AVFMediaPlayerObserverRateObservationContext];
+
+ //Observe the duration for getting the buffer state
+ [m_player addObserver:self
+ forKeyPath:AVF_CURRENT_ITEM_DURATION_KEY
+ options:0
+ context:AVFMediaPlayerObserverCurrentItemDurationObservationContext];
+#if defined(Q_OS_IOS)
+ [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback withOptions:AVAudioSessionCategoryOptionMixWithOthers error:nil];
+ [self setSessionActive:YES];
+#endif
+}
+
+-(void) assetFailedToPrepareForPlayback:(NSError *)error
+{
+ Q_UNUSED(error);
+ QMetaObject::invokeMethod(m_session, "processMediaLoadError", Qt::AutoConnection);
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+ qDebug() << [[error localizedDescription] UTF8String];
+ qDebug() << [[error localizedFailureReason] UTF8String];
+ qDebug() << [[error localizedRecoverySuggestion] UTF8String];
+#endif
+}
+
+- (void) playerItemDidReachEnd:(NSNotification *)notification
+{
+ Q_UNUSED(notification);
+ if (self.session)
+ QMetaObject::invokeMethod(m_session, "processEOS", Qt::AutoConnection);
+}
+
+- (void) observeValueForKeyPath:(NSString*) path
+ ofObject:(id)object
+ change:(NSDictionary*)change
+ context:(void*)context
+{
+ //AVPlayerItem "status" property value observer.
+ if (context == AVFMediaPlayerObserverStatusObservationContext)
+ {
+ AVPlayerStatus status = (AVPlayerStatus)[[change objectForKey:NSKeyValueChangeNewKey] integerValue];
+ switch (status)
+ {
+ //Indicates that the status of the player is not yet known because
+ //it has not tried to load new media resources for playback
+ case AVPlayerStatusUnknown:
+ {
+ //QMetaObject::invokeMethod(m_session, "processLoadStateChange", Qt::AutoConnection);
+ }
+ break;
+
+ case AVPlayerStatusReadyToPlay:
+ {
+ //Once the AVPlayerItem becomes ready to play, i.e.
+ //[playerItem status] == AVPlayerItemStatusReadyToPlay,
+ //its duration can be fetched from the item.
+ if (self.session)
+ QMetaObject::invokeMethod(m_session, "processLoadStateChange", Qt::AutoConnection);
+ }
+ break;
+
+ case AVPlayerStatusFailed:
+ {
+ AVPlayerItem *playerItem = static_cast<AVPlayerItem*>(object);
+ [self assetFailedToPrepareForPlayback:playerItem.error];
+
+ if (self.session)
+ QMetaObject::invokeMethod(m_session, "processLoadStateFailure", Qt::AutoConnection);
+ }
+ break;
+ }
+ } else if (context == AVFMediaPlayerObserverPresentationSizeContext) {
+ QSize size(m_playerItem.presentationSize.width, m_playerItem.presentationSize.height);
+ QMetaObject::invokeMethod(m_session, "nativeSizeChanged", Qt::AutoConnection, Q_ARG(QSize, size));
+ } else if (context == AVFMediaPlayerObserverBufferLikelyToKeepUpContext)
+ {
+ const bool isPlaybackLikelyToKeepUp = [m_playerItem isPlaybackLikelyToKeepUp];
+ if (isPlaybackLikelyToKeepUp != m_bufferIsLikelyToKeepUp) {
+ m_bufferIsLikelyToKeepUp = isPlaybackLikelyToKeepUp;
+ QMetaObject::invokeMethod(m_session, "processBufferStateChange", Qt::AutoConnection,
+ Q_ARG(int, isPlaybackLikelyToKeepUp ? 100 : 0));
+ }
+ }
+ else if (context == AVFMediaPlayerObserverTracksContext)
+ {
+ QMetaObject::invokeMethod(m_session, "updateTracks", Qt::AutoConnection);
+ }
+ //AVPlayer "rate" property value observer.
+ else if (context == AVFMediaPlayerObserverRateObservationContext)
+ {
+ //QMetaObject::invokeMethod(m_session, "setPlaybackRate", Qt::AutoConnection, Q_ARG(qreal, [m_player rate]));
+ }
+ //AVPlayer "currentItem" property observer.
+ //Called when the AVPlayer replaceCurrentItemWithPlayerItem:
+ //replacement will/did occur.
+ else if (context == AVFMediaPlayerObserverCurrentItemObservationContext)
+ {
+ AVPlayerItem *newPlayerItem = [change objectForKey:NSKeyValueChangeNewKey];
+ if (m_playerItem != newPlayerItem)
+ m_playerItem = newPlayerItem;
+ }
+ else if (context == AVFMediaPlayerObserverCurrentItemDurationObservationContext)
+ {
+ const CMTime time = [m_playerItem duration];
+ const qint64 duration = static_cast<qint64>(float(time.value) / float(time.timescale) * 1000.0f);
+ if (self.session)
+ QMetaObject::invokeMethod(m_session, "processDurationChange", Qt::AutoConnection, Q_ARG(qint64, duration));
+ }
+ else
+ {
+ [super observeValueForKeyPath:path ofObject:object change:change context:context];
+ }
+}
+
+- (void) detatchSession
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ m_session = 0;
+}
+
+- (void) dealloc
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ [self unloadMedia];
+
+ if (m_URL) {
+ [m_URL release];
+ }
+
+ [m_mimeType release];
+ [m_playerLayer release];
+ [super dealloc];
+}
+
+- (BOOL) resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
+{
+ Q_UNUSED(resourceLoader);
+
+ if (![loadingRequest.request.URL.scheme isEqualToString:@"iodevice"])
+ return NO;
+
+ QIODevice *device = m_session->mediaStream();
+ if (!device)
+ return NO;
+
+ device->seek(loadingRequest.dataRequest.requestedOffset);
+ if (loadingRequest.contentInformationRequest) {
+ loadingRequest.contentInformationRequest.contentType = m_mimeType;
+ loadingRequest.contentInformationRequest.contentLength = device->size();
+ loadingRequest.contentInformationRequest.byteRangeAccessSupported = YES;
+ }
+
+ if (loadingRequest.dataRequest) {
+ NSInteger requestedLength = loadingRequest.dataRequest.requestedLength;
+ int maxBytes = qMin(32 * 1064, int(requestedLength));
+ char buffer[maxBytes];
+ NSInteger submitted = 0;
+ while (submitted < requestedLength) {
+ qint64 len = device->read(buffer, maxBytes);
+ if (len < 1)
+ break;
+
+ [loadingRequest.dataRequest respondWithData:[NSData dataWithBytes:buffer length:len]];
+ submitted += len;
+ }
+
+ // Finish loading even if not all bytes submitted.
+ [loadingRequest finishLoading];
+ }
+
+ return YES;
+}
+@end
+
+AVFMediaPlayer::AVFMediaPlayer(QMediaPlayer *player)
+ : QObject(player),
+ QPlatformMediaPlayer(player),
+ m_state(QMediaPlayer::StoppedState),
+ m_mediaStatus(QMediaPlayer::NoMedia),
+ m_mediaStream(nullptr),
+ m_rate(1.0),
+ m_requestedPosition(-1),
+ m_duration(0),
+ m_bufferProgress(0),
+ m_videoAvailable(false),
+ m_audioAvailable(false),
+ m_seekable(false)
+{
+ m_observer = [[AVFMediaPlayerObserver alloc] initWithMediaPlayerSession:this];
+ connect(&m_playbackTimer, &QTimer::timeout, this, &AVFMediaPlayer::processPositionChange);
+ setVideoOutput(new AVFVideoRendererControl(this));
+}
+
+AVFMediaPlayer::~AVFMediaPlayer()
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ //Detatch the session from the sessionObserver (which could still be alive trying to communicate with this session).
+ [m_observer detatchSession];
+ [static_cast<AVFMediaPlayerObserver*>(m_observer) release];
+}
+
+void AVFMediaPlayer::setVideoSink(QVideoSink *sink)
+{
+ m_videoSink = sink ? static_cast<AVFVideoSink *>(sink->platformVideoSink()): nullptr;
+ m_videoOutput->setVideoSink(m_videoSink);
+}
+
+void AVFMediaPlayer::setVideoOutput(AVFVideoRendererControl *output)
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << output;
+#endif
+
+ if (m_videoOutput == output)
+ return;
+
+ //Set the current output layer to null to stop rendering
+ if (m_videoOutput) {
+ m_videoOutput->setLayer(nullptr);
+ }
+
+ m_videoOutput = output;
+
+ if (m_videoOutput && m_state != QMediaPlayer::StoppedState)
+ m_videoOutput->setLayer([static_cast<AVFMediaPlayerObserver*>(m_observer) playerLayer]);
+}
+
+AVAsset *AVFMediaPlayer::currentAssetHandle()
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ AVAsset *currentAsset = [[static_cast<AVFMediaPlayerObserver*>(m_observer) playerItem] asset];
+ return currentAsset;
+}
+
+QMediaPlayer::PlaybackState AVFMediaPlayer::state() const
+{
+ return m_state;
+}
+
+QMediaPlayer::MediaStatus AVFMediaPlayer::mediaStatus() const
+{
+ return m_mediaStatus;
+}
+
+QUrl AVFMediaPlayer::media() const
+{
+ return m_resources;
+}
+
+QIODevice *AVFMediaPlayer::mediaStream() const
+{
+ return m_mediaStream;
+}
+
+static void setURL(AVFMediaPlayerObserver *observer, const QByteArray &url, const QString &mimeType = QString())
+{
+ NSString *urlString = [NSString stringWithUTF8String:url.constData()];
+ NSURL *nsurl = [NSURL URLWithString:urlString];
+ [observer setURL:nsurl mimeType:[NSString stringWithUTF8String:mimeType.toLatin1().constData()]];
+}
+
+static void setStreamURL(AVFMediaPlayerObserver *observer, const QByteArray &url)
+{
+ setURL(observer, QByteArrayLiteral("iodevice://") + url, QFileInfo(QString::fromUtf8(url)).suffix());
+}
+
+void AVFMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << content.request().url();
+#endif
+
+ [static_cast<AVFMediaPlayerObserver*>(m_observer) unloadMedia];
+
+ m_resources = content;
+ resetStream(stream);
+
+ setAudioAvailable(false);
+ setVideoAvailable(false);
+ setSeekable(false);
+ m_requestedPosition = -1;
+ orientationChanged(QtVideo::Rotation::None, false);
+ Q_EMIT positionChanged(position());
+ if (m_duration != 0) {
+ m_duration = 0;
+ Q_EMIT durationChanged(0);
+ }
+ if (!m_metaData.isEmpty()) {
+ m_metaData.clear();
+ metaDataChanged();
+ }
+ for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i) {
+ tracks[i].clear();
+ nativeTracks[i].clear();
+ }
+ tracksChanged();
+
+ const QMediaPlayer::MediaStatus oldMediaStatus = m_mediaStatus;
+ const QMediaPlayer::PlaybackState oldState = m_state;
+
+ if (!m_mediaStream && content.isEmpty()) {
+ m_mediaStatus = QMediaPlayer::NoMedia;
+ if (m_mediaStatus != oldMediaStatus)
+ Q_EMIT mediaStatusChanged(m_mediaStatus);
+
+ m_state = QMediaPlayer::StoppedState;
+ if (m_state != oldState)
+ Q_EMIT stateChanged(m_state);
+
+ return;
+ }
+
+ m_mediaStatus = QMediaPlayer::LoadingMedia;
+ if (m_mediaStatus != oldMediaStatus)
+ Q_EMIT mediaStatusChanged(m_mediaStatus);
+
+ if (m_mediaStream) {
+ // If there is a data, try to load it,
+ // otherwise wait for readyRead.
+ if (m_mediaStream->size())
+ setStreamURL(m_observer, m_resources.toEncoded());
+ } else {
+ //Load AVURLAsset
+ //initialize asset using content's URL
+ setURL(m_observer, m_resources.toEncoded());
+ }
+
+ m_state = QMediaPlayer::StoppedState;
+ if (m_state != oldState)
+ Q_EMIT stateChanged(m_state);
+}
+
+qint64 AVFMediaPlayer::position() const
+{
+ AVPlayerItem *playerItem = [static_cast<AVFMediaPlayerObserver*>(m_observer) playerItem];
+
+ if (m_requestedPosition != -1)
+ return m_requestedPosition;
+
+ if (!playerItem)
+ return 0;
+
+ CMTime time = [playerItem currentTime];
+ return static_cast<quint64>(float(time.value) / float(time.timescale) * 1000.0f);
+}
+
+qint64 AVFMediaPlayer::duration() const
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ return m_duration;
+}
+
+float AVFMediaPlayer::bufferProgress() const
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ return m_bufferProgress/100.;
+}
+
+void AVFMediaPlayer::setAudioAvailable(bool available)
+{
+ if (m_audioAvailable == available)
+ return;
+
+ m_audioAvailable = available;
+ Q_EMIT audioAvailableChanged(available);
+}
+
+bool AVFMediaPlayer::isAudioAvailable() const
+{
+ return m_audioAvailable;
+}
+
+void AVFMediaPlayer::setVideoAvailable(bool available)
+{
+ if (m_videoAvailable == available)
+ return;
+
+ m_videoAvailable = available;
+ Q_EMIT videoAvailableChanged(available);
+}
+
+bool AVFMediaPlayer::isVideoAvailable() const
+{
+ return m_videoAvailable;
+}
+
+bool AVFMediaPlayer::isSeekable() const
+{
+ return m_seekable;
+}
+
+void AVFMediaPlayer::setSeekable(bool seekable)
+{
+ if (m_seekable == seekable)
+ return;
+
+ m_seekable = seekable;
+ Q_EMIT seekableChanged(seekable);
+}
+
+QMediaTimeRange AVFMediaPlayer::availablePlaybackRanges() const
+{
+ AVPlayerItem *playerItem = [static_cast<AVFMediaPlayerObserver*>(m_observer) playerItem];
+
+ if (playerItem) {
+ QMediaTimeRange timeRanges;
+
+ NSArray *ranges = [playerItem loadedTimeRanges];
+ for (NSValue *timeRange in ranges) {
+ CMTimeRange currentTimeRange = [timeRange CMTimeRangeValue];
+ qint64 startTime = qint64(float(currentTimeRange.start.value) / currentTimeRange.start.timescale * 1000.0);
+ timeRanges.addInterval(startTime, startTime + qint64(float(currentTimeRange.duration.value) / currentTimeRange.duration.timescale * 1000.0));
+ }
+ if (!timeRanges.isEmpty())
+ return timeRanges;
+ }
+ return QMediaTimeRange(0, duration());
+}
+
+qreal AVFMediaPlayer::playbackRate() const
+{
+ return m_rate;
+}
+
+void AVFMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
+{
+ if (m_audioOutput == output)
+ return;
+ if (m_audioOutput)
+ m_audioOutput->q->disconnect(this);
+ m_audioOutput = output;
+ if (m_audioOutput) {
+ connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this, &AVFMediaPlayer::audioOutputChanged);
+ connect(m_audioOutput->q, &QAudioOutput::volumeChanged, this, &AVFMediaPlayer::setVolume);
+ connect(m_audioOutput->q, &QAudioOutput::mutedChanged, this, &AVFMediaPlayer::setMuted);
+ //connect(m_audioOutput->q, &QAudioOutput::audioRoleChanged, this, &AVFMediaPlayer::setAudioRole);
+ }
+ audioOutputChanged();
+ setMuted(m_audioOutput ? m_audioOutput->muted : true);
+ setVolume(m_audioOutput ? m_audioOutput->volume : 1.);
+}
+
+QMediaMetaData AVFMediaPlayer::metaData() const
+{
+ return m_metaData;
+}
+
+void AVFMediaPlayer::setPlaybackRate(qreal rate)
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << rate;
+#endif
+
+ if (qFuzzyCompare(m_rate, rate))
+ return;
+
+ m_rate = rate;
+
+ AVPlayer *player = [static_cast<AVFMediaPlayerObserver*>(m_observer) player];
+ if (player && m_state == QMediaPlayer::PlayingState)
+ [player setRate:m_rate];
+
+ Q_EMIT playbackRateChanged(m_rate);
+}
+
+void AVFMediaPlayer::setPosition(qint64 pos)
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << pos;
+#endif
+
+ if (pos == position())
+ return;
+
+ AVPlayerItem *playerItem = [static_cast<AVFMediaPlayerObserver*>(m_observer) playerItem];
+ if (!playerItem) {
+ m_requestedPosition = pos;
+ Q_EMIT positionChanged(m_requestedPosition);
+ return;
+ }
+
+ if (!isSeekable()) {
+ if (m_requestedPosition != -1) {
+ m_requestedPosition = -1;
+ Q_EMIT positionChanged(position());
+ }
+ return;
+ }
+
+ pos = qMax(qint64(0), pos);
+ if (duration() > 0)
+ pos = qMin(pos, duration());
+ m_requestedPosition = pos;
+
+ CMTime newTime = [playerItem currentTime];
+ newTime.value = (pos / 1000.0f) * newTime.timescale;
+ [playerItem seekToTime:newTime toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero
+ completionHandler:^(BOOL finished) {
+ if (finished)
+ m_requestedPosition = -1;
+ }];
+
+ Q_EMIT positionChanged(pos);
+
+ // Reset media status if the current status is EndOfMedia
+ if (m_mediaStatus == QMediaPlayer::EndOfMedia) {
+ QMediaPlayer::MediaStatus newMediaStatus = (m_state == QMediaPlayer::PausedState) ? QMediaPlayer::BufferedMedia
+ : QMediaPlayer::LoadedMedia;
+ Q_EMIT mediaStatusChanged((m_mediaStatus = newMediaStatus));
+ }
+}
+
+void AVFMediaPlayer::play()
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << "currently: " << m_state;
+#endif
+
+ if (m_mediaStatus == QMediaPlayer::NoMedia || m_mediaStatus == QMediaPlayer::InvalidMedia)
+ return;
+
+ if (m_state == QMediaPlayer::PlayingState)
+ return;
+
+ resetCurrentLoop();
+
+ if (m_videoOutput && m_videoSink)
+ m_videoOutput->setLayer([static_cast<AVFMediaPlayerObserver*>(m_observer) playerLayer]);
+
+ // Reset media status if the current status is EndOfMedia
+ if (m_mediaStatus == QMediaPlayer::EndOfMedia)
+ setPosition(0);
+
+ if (m_mediaStatus == QMediaPlayer::LoadedMedia || m_mediaStatus == QMediaPlayer::BufferedMedia) {
+ // Setting the rate starts playback
+ [[static_cast<AVFMediaPlayerObserver*>(m_observer) player] setRate:m_rate];
+ }
+
+ m_state = QMediaPlayer::PlayingState;
+ processLoadStateChange();
+
+ Q_EMIT stateChanged(m_state);
+ m_playbackTimer.start(100);
+}
+
+void AVFMediaPlayer::pause()
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << "currently: " << m_state;
+#endif
+
+ if (m_mediaStatus == QMediaPlayer::NoMedia)
+ return;
+
+ if (m_state == QMediaPlayer::PausedState)
+ return;
+
+ m_state = QMediaPlayer::PausedState;
+
+ if (m_videoOutput && m_videoSink)
+ m_videoOutput->setLayer([static_cast<AVFMediaPlayerObserver*>(m_observer) playerLayer]);
+
+ [[static_cast<AVFMediaPlayerObserver*>(m_observer) player] pause];
+
+ // Reset media status if the current status is EndOfMedia
+ if (m_mediaStatus == QMediaPlayer::EndOfMedia)
+ setPosition(0);
+
+ Q_EMIT positionChanged(position());
+ Q_EMIT stateChanged(m_state);
+ m_playbackTimer.stop();
+}
+
+void AVFMediaPlayer::stop()
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << "currently: " << m_state;
+#endif
+
+ if (m_state == QMediaPlayer::StoppedState)
+ return;
+
+ // AVPlayer doesn't have stop(), only pause() and play().
+ [[static_cast<AVFMediaPlayerObserver*>(m_observer) player] pause];
+ setPosition(0);
+
+ if (m_videoOutput)
+ m_videoOutput->setLayer(nullptr);
+
+ if (m_mediaStatus == QMediaPlayer::BufferedMedia)
+ Q_EMIT mediaStatusChanged((m_mediaStatus = QMediaPlayer::LoadedMedia));
+
+ Q_EMIT stateChanged((m_state = QMediaPlayer::StoppedState));
+ m_playbackTimer.stop();
+}
+
+void AVFMediaPlayer::setVolume(float volume)
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << volume;
+#endif
+
+ AVPlayer *player = [static_cast<AVFMediaPlayerObserver*>(m_observer) player];
+ if (player)
+ player.volume = volume;
+}
+
+void AVFMediaPlayer::setMuted(bool muted)
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << muted;
+#endif
+
+ AVPlayer *player = [static_cast<AVFMediaPlayerObserver*>(m_observer) player];
+ if (player)
+ player.muted = muted;
+}
+
+void AVFMediaPlayer::audioOutputChanged()
+{
+#ifdef Q_OS_MACOS
+ AVPlayer *player = [static_cast<AVFMediaPlayerObserver*>(m_observer) player];
+ if (!m_audioOutput || m_audioOutput->device.id().isEmpty()) {
+ player.audioOutputDeviceUniqueID = nil;
+ if (!m_audioOutput)
+ player.muted = true;
+ } else {
+ NSString *str = QString::fromUtf8(m_audioOutput->device.id()).toNSString();
+ player.audioOutputDeviceUniqueID = str;
+ }
+#endif
+}
+
+void AVFMediaPlayer::processEOS()
+{
+ if (doLoop()) {
+ setPosition(0);
+ [[static_cast<AVFMediaPlayerObserver*>(m_observer) player] setRate:m_rate];
+ return;
+ }
+
+ //AVPlayerItem has reached end of track/stream
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ Q_EMIT positionChanged(position());
+ m_mediaStatus = QMediaPlayer::EndOfMedia;
+ m_state = QMediaPlayer::StoppedState;
+
+ if (m_videoOutput)
+ m_videoOutput->setLayer(nullptr);
+
+ Q_EMIT mediaStatusChanged(m_mediaStatus);
+ Q_EMIT stateChanged(m_state);
+}
+
+void AVFMediaPlayer::processLoadStateChange(QMediaPlayer::PlaybackState newState)
+{
+ AVPlayerStatus currentStatus = [[static_cast<AVFMediaPlayerObserver*>(m_observer) player] status];
+
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << currentStatus << ", " << m_mediaStatus << ", " << newState;
+#endif
+
+ if (m_mediaStatus == QMediaPlayer::NoMedia)
+ return;
+
+ if (currentStatus == AVPlayerStatusReadyToPlay) {
+
+ QMediaPlayer::MediaStatus newStatus = m_mediaStatus;
+
+ AVPlayerItem *playerItem = [m_observer playerItem];
+
+ // get the meta data
+ m_metaData = AVFMetaData::fromAsset(playerItem.asset);
+ Q_EMIT metaDataChanged();
+ updateTracks();
+
+ if (playerItem) {
+ setSeekable([[playerItem seekableTimeRanges] count] > 0);
+
+ // Get the native size of the video, and reset the bounds of the player layer
+ AVPlayerLayer *playerLayer = [m_observer playerLayer];
+ if (m_observer.videoTrack && playerLayer) {
+ if (!playerLayer.bounds.size.width || !playerLayer.bounds.size.height) {
+ playerLayer.bounds = CGRectMake(0.0f, 0.0f,
+ m_observer.videoTrack.assetTrack.naturalSize.width,
+ m_observer.videoTrack.assetTrack.naturalSize.height);
+ }
+ }
+
+ if (m_requestedPosition != -1) {
+ setPosition(m_requestedPosition);
+ m_requestedPosition = -1;
+ }
+ }
+
+ newStatus = (newState != QMediaPlayer::StoppedState) ? QMediaPlayer::BufferedMedia
+ : QMediaPlayer::LoadedMedia;
+
+ if (newStatus != m_mediaStatus)
+ Q_EMIT mediaStatusChanged((m_mediaStatus = newStatus));
+
+ }
+
+ if (newState == QMediaPlayer::PlayingState && [static_cast<AVFMediaPlayerObserver*>(m_observer) player]) {
+ // Setting the rate is enough to start playback, no need to call play()
+ [[static_cast<AVFMediaPlayerObserver*>(m_observer) player] setRate:m_rate];
+ m_playbackTimer.start();
+ }
+}
+
+
+void AVFMediaPlayer::processLoadStateChange()
+{
+ processLoadStateChange(m_state);
+}
+
+
+void AVFMediaPlayer::processLoadStateFailure()
+{
+ Q_EMIT stateChanged((m_state = QMediaPlayer::StoppedState));
+}
+
+void AVFMediaPlayer::processBufferStateChange(int bufferProgress)
+{
+ if (bufferProgress == m_bufferProgress)
+ return;
+
+ auto status = m_mediaStatus;
+ // Buffered -> unbuffered.
+ if (!bufferProgress) {
+ status = QMediaPlayer::StalledMedia;
+ } else if (status == QMediaPlayer::StalledMedia) {
+ status = QMediaPlayer::BufferedMedia;
+ // Resume playback.
+ if (m_state == QMediaPlayer::PlayingState) {
+ [[static_cast<AVFMediaPlayerObserver*>(m_observer) player] setRate:m_rate];
+ m_playbackTimer.start();
+ }
+ }
+
+ if (m_mediaStatus != status)
+ Q_EMIT mediaStatusChanged(m_mediaStatus = status);
+
+ m_bufferProgress = bufferProgress;
+ Q_EMIT bufferProgressChanged(bufferProgress/100.);
+}
+
+void AVFMediaPlayer::processDurationChange(qint64 duration)
+{
+ if (duration == m_duration)
+ return;
+
+ m_duration = duration;
+ Q_EMIT durationChanged(duration);
+}
+
+void AVFMediaPlayer::processPositionChange()
+{
+ if (m_state == QMediaPlayer::StoppedState)
+ return;
+
+ Q_EMIT positionChanged(position());
+}
+
+void AVFMediaPlayer::processMediaLoadError()
+{
+ if (m_requestedPosition != -1) {
+ m_requestedPosition = -1;
+ Q_EMIT positionChanged(position());
+ }
+
+ Q_EMIT mediaStatusChanged((m_mediaStatus = QMediaPlayer::InvalidMedia));
+
+ Q_EMIT error(QMediaPlayer::FormatError, tr("Failed to load media"));
+}
+
+void AVFMediaPlayer::streamReady()
+{
+ setStreamURL(m_observer, m_resources.toEncoded());
+}
+
+void AVFMediaPlayer::streamDestroyed()
+{
+ resetStream(nullptr);
+}
+
+void AVFMediaPlayer::updateTracks()
+{
+ bool firstLoad = true;
+ for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i) {
+ if (tracks[i].count())
+ firstLoad = false;
+ tracks[i].clear();
+ nativeTracks[i].clear();
+ }
+ AVPlayerItem *playerItem = [m_observer playerItem];
+ if (playerItem) {
+ // Check each track for audio and video content
+ NSArray *tracks = playerItem.tracks;
+ for (AVPlayerItemTrack *track in tracks) {
+ AVAssetTrack *assetTrack = track.assetTrack;
+ if (assetTrack) {
+ int qtTrack = -1;
+ if ([assetTrack.mediaType isEqualToString:AVMediaTypeAudio]) {
+ qtTrack = QPlatformMediaPlayer::AudioStream;
+ setAudioAvailable(true);
+ } else if ([assetTrack.mediaType isEqualToString:AVMediaTypeVideo]) {
+ qtTrack = QPlatformMediaPlayer::VideoStream;
+ setVideoAvailable(true);
+ if (m_observer.videoTrack != track) {
+ m_observer.videoTrack = track;
+ bool isMirrored = false;
+ QtVideo::Rotation orientation = QtVideo::Rotation::None;
+ videoOrientationForAssetTrack(assetTrack, orientation, isMirrored);
+ orientationChanged(orientation, isMirrored);
+ }
+ }
+ else if ([assetTrack.mediaType isEqualToString:AVMediaTypeSubtitle]) {
+ qtTrack = QPlatformMediaPlayer::SubtitleStream;
+ }
+ if (qtTrack != -1) {
+ QMediaMetaData metaData = AVFMetaData::fromAssetTrack(assetTrack);
+ this->tracks[qtTrack].append(metaData);
+ nativeTracks[qtTrack].append(track);
+ }
+ }
+ }
+ // subtitles are disabled by default
+ if (firstLoad)
+ setActiveTrack(SubtitleStream, -1);
+ }
+ Q_EMIT tracksChanged();
+}
+
+void AVFMediaPlayer::setActiveTrack(QPlatformMediaPlayer::TrackType type, int index)
+{
+ const auto &t = nativeTracks[type];
+ if (type == QPlatformMediaPlayer::SubtitleStream) {
+ // subtitle streams are not always automatically enabled on macOS/iOS.
+ // this hack ensures they get enables and we actually get the text
+ AVPlayerItem *playerItem = m_observer.m_playerItem;
+ if (playerItem) {
+ AVAsset *asset = playerItem.asset;
+ if (!asset)
+ return;
+ AVMediaSelectionGroup *group = [asset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
+ if (!group)
+ return;
+ auto *options = group.options;
+ if (options.count)
+ [playerItem selectMediaOption:options.firstObject inMediaSelectionGroup:group];
+ }
+ }
+ for (int i = 0; i < t.count(); ++i)
+ t.at(i).enabled = (i == index);
+ emit activeTracksChanged();
+}
+
+int AVFMediaPlayer::activeTrack(QPlatformMediaPlayer::TrackType type)
+{
+ const auto &t = nativeTracks[type];
+ for (int i = 0; i < t.count(); ++i)
+ if (t.at(i).enabled)
+ return i;
+ return -1;
+}
+
+int AVFMediaPlayer::trackCount(QPlatformMediaPlayer::TrackType type)
+{
+ return nativeTracks[type].count();
+}
+
+QMediaMetaData AVFMediaPlayer::trackMetaData(QPlatformMediaPlayer::TrackType type, int trackNumber)
+{
+ const auto &t = tracks[type];
+ if (trackNumber < 0 || trackNumber >= t.count())
+ return QMediaMetaData();
+ return t.at(trackNumber);
+}
+
+void AVFMediaPlayer::resetStream(QIODevice *stream)
+{
+ if (m_mediaStream) {
+ disconnect(m_mediaStream, &QIODevice::readyRead, this, &AVFMediaPlayer::streamReady);
+ disconnect(m_mediaStream, &QIODevice::destroyed, this, &AVFMediaPlayer::streamDestroyed);
+ }
+
+ m_mediaStream = stream;
+
+ if (m_mediaStream) {
+ connect(m_mediaStream, &QIODevice::readyRead, this, &AVFMediaPlayer::streamReady);
+ connect(m_mediaStream, &QIODevice::destroyed, this, &AVFMediaPlayer::streamDestroyed);
+ }
+}
+
+void AVFMediaPlayer::nativeSizeChanged(QSize size)
+{
+ if (!m_videoSink)
+ return;
+ m_videoSink->setNativeSize(size);
+}
+
+void AVFMediaPlayer::orientationChanged(QtVideo::Rotation rotation, bool mirrored)
+{
+ if (!m_videoOutput)
+ return;
+
+ m_videoOutput->setVideoRotation(rotation);
+ m_videoOutput->setVideoMirrored(mirrored);
+}
+
+void AVFMediaPlayer::videoOrientationForAssetTrack(AVAssetTrack *videoTrack,
+ QtVideo::Rotation &angle,
+ bool &mirrored)
+{
+ angle = QtVideo::Rotation::None;
+ if (videoTrack) {
+ CGAffineTransform transform = videoTrack.preferredTransform;
+ if (CGAffineTransformIsIdentity(transform))
+ return;
+ qreal delta = transform.a * transform.d - transform.b * transform.c;
+ qreal radians = qAtan2(transform.b, transform.a);
+ qreal degrees = qRadiansToDegrees(radians);
+ qreal scaleX = (transform.a/qAbs(transform.a)) * qSqrt(qPow(transform.a, 2) + qPow(transform.c, 2));
+ qreal scaleY = (transform.d/abs(transform.d)) * qSqrt(qPow(transform.b, 2) + qPow(transform.d, 2));
+
+ if (delta < 0.0) { // flipped
+ if (scaleX < 0.0) {
+ // vertical flip
+ degrees = -degrees;
+ } else if (scaleY < 0.0) {
+ // horizontal flip
+ degrees = (180 + (int)degrees) % 360;
+ }
+ mirrored = true;
+ }
+
+ if (qFuzzyCompare(degrees, qreal(90)) || qFuzzyCompare(degrees, qreal(-270))) {
+ angle = QtVideo::Rotation::Clockwise90;
+ } else if (qFuzzyCompare(degrees, qreal(-90)) || qFuzzyCompare(degrees, qreal(270))) {
+ angle = QtVideo::Rotation::Clockwise270;
+ } else if (qFuzzyCompare(degrees, qreal(180)) || qFuzzyCompare(degrees, qreal(-180))) {
+ angle = QtVideo::Rotation::Clockwise180;
+ }
+ }
+}
+
+#include "moc_avfmediaplayer_p.cpp"
diff --git a/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer_p.h b/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer_p.h
new file mode 100644
index 000000000..d04ab0818
--- /dev/null
+++ b/src/plugins/multimedia/darwin/mediaplayer/avfmediaplayer_p.h
@@ -0,0 +1,151 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFMEDIAPLAYER_H
+#define AVFMEDIAPLAYER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/QObject>
+#include <QtCore/QByteArray>
+#include <QtCore/QSet>
+#include <QtCore/QResource>
+#include <QtCore/QUrl>
+#include <QtCore/QTimer>
+
+#include <private/qplatformmediaplayer_p.h>
+#include <QtMultimedia/QMediaPlayer>
+#include <QtMultimedia/QVideoFrame>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(AVAsset);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVPlayerItemTrack);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVFMediaPlayerObserver);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVAssetTrack);
+
+QT_BEGIN_NAMESPACE
+
+class AVFMediaPlayer;
+class AVFVideoRendererControl;
+class AVFVideoSink;
+
+class AVFMediaPlayer : public QObject, public QPlatformMediaPlayer
+{
+ Q_OBJECT
+public:
+ AVFMediaPlayer(QMediaPlayer *parent);
+ virtual ~AVFMediaPlayer();
+
+ void setVideoSink(QVideoSink *sink) override;
+ void setVideoOutput(AVFVideoRendererControl *output);
+ AVAsset *currentAssetHandle();
+
+ QMediaPlayer::PlaybackState state() const override;
+ QMediaPlayer::MediaStatus mediaStatus() const override;
+
+ QUrl media() const override;
+ QIODevice *mediaStream() const override;
+ void setMedia(const QUrl &content, QIODevice *stream) override;
+
+ qint64 position() const override;
+ qint64 duration() const override;
+
+ float bufferProgress() const override;
+
+ bool isAudioAvailable() const override;
+ bool isVideoAvailable() const override;
+
+ bool isSeekable() const override;
+ QMediaTimeRange availablePlaybackRanges() const override;
+
+ qreal playbackRate() const override;
+
+ void setAudioOutput(QPlatformAudioOutput *output) override;
+ QPlatformAudioOutput *m_audioOutput = nullptr;
+
+ QMediaMetaData metaData() const override;
+
+ static void videoOrientationForAssetTrack(AVAssetTrack *track,
+ QtVideo::Rotation &angle,
+ bool &mirrored);
+
+public Q_SLOTS:
+ void setPlaybackRate(qreal rate) override;
+ void nativeSizeChanged(QSize size);
+
+ void setPosition(qint64 pos) override;
+
+ void play() override;
+ void pause() override;
+ void stop() override;
+
+ void setVolume(float volume);
+ void setMuted(bool muted);
+ void audioOutputChanged();
+
+ void processEOS();
+ void processLoadStateChange(QMediaPlayer::PlaybackState newState);
+ void processPositionChange();
+ void processMediaLoadError();
+
+ void processLoadStateChange();
+ void processLoadStateFailure();
+
+ void processBufferStateChange(int bufferProgress);
+
+ void processDurationChange(qint64 duration);
+
+ void streamReady();
+ void streamDestroyed();
+ void updateTracks();
+ void setActiveTrack(QPlatformMediaPlayer::TrackType type, int index) override;
+ int activeTrack(QPlatformMediaPlayer::TrackType type) override;
+ int trackCount(TrackType) override;
+ QMediaMetaData trackMetaData(TrackType type, int trackNumber) override;
+
+public:
+ QList<QMediaMetaData> tracks[QPlatformMediaPlayer::NTrackTypes];
+ QList<AVPlayerItemTrack *> nativeTracks[QPlatformMediaPlayer::NTrackTypes];
+
+private:
+ void setAudioAvailable(bool available);
+ void setVideoAvailable(bool available);
+ void setSeekable(bool seekable);
+ void resetStream(QIODevice *stream = nullptr);
+
+ void orientationChanged(QtVideo::Rotation rotation, bool mirrored);
+
+ AVFVideoRendererControl *m_videoOutput = nullptr;
+ AVFVideoSink *m_videoSink = nullptr;
+
+ QMediaPlayer::PlaybackState m_state;
+ QMediaPlayer::MediaStatus m_mediaStatus;
+ QIODevice *m_mediaStream;
+ QUrl m_resources;
+ QMediaMetaData m_metaData;
+
+ qreal m_rate;
+ qint64 m_requestedPosition;
+
+ qint64 m_duration;
+ int m_bufferProgress;
+ bool m_videoAvailable;
+ bool m_audioAvailable;
+ bool m_seekable;
+
+ AVFMediaPlayerObserver *m_observer;
+
+ QTimer m_playbackTimer;
+};
+
+QT_END_NAMESPACE
+
+#endif // AVFMEDIAPLAYER_H
diff --git a/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm b/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm
new file mode 100644
index 000000000..66687c931
--- /dev/null
+++ b/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol.mm
@@ -0,0 +1,222 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfvideorenderercontrol_p.h"
+#include "avfdisplaylink_p.h"
+#include <avfvideobuffer_p.h>
+#include "qavfhelpers_p.h"
+#include "private/qvideoframe_p.h"
+
+#include <QtMultimedia/qvideoframeformat.h>
+
+#include <avfvideosink_p.h>
+#include <rhi/qrhi.h>
+
+#include <QtCore/qdebug.h>
+
+#import <AVFoundation/AVFoundation.h>
+#include <CoreVideo/CVPixelBuffer.h>
+#include <CoreVideo/CVImageBuffer.h>
+
+QT_USE_NAMESPACE
+
+@interface SubtitleDelegate : NSObject <AVPlayerItemLegibleOutputPushDelegate>
+{
+ AVFVideoRendererControl *m_renderer;
+}
+
+- (void)legibleOutput:(AVPlayerItemLegibleOutput *)output
+ didOutputAttributedStrings:(NSArray<NSAttributedString *> *)strings
+ nativeSampleBuffers:(NSArray *)nativeSamples
+ forItemTime:(CMTime)itemTime;
+
+@end
+
+@implementation SubtitleDelegate
+
+-(id)initWithRenderer: (AVFVideoRendererControl *)renderer
+{
+ if (!(self = [super init]))
+ return nil;
+
+ m_renderer = renderer;
+
+ return self;
+}
+
+- (void)legibleOutput:(AVPlayerItemLegibleOutput *)output
+ didOutputAttributedStrings:(NSArray<NSAttributedString *> *)strings
+ nativeSampleBuffers:(NSArray *)nativeSamples
+ forItemTime:(CMTime)itemTime
+{
+ QString text;
+ for (NSAttributedString *s : strings) {
+ if (!text.isEmpty())
+ text += QChar::LineSeparator;
+ text += QString::fromNSString(s.string);
+ }
+ m_renderer->setSubtitleText(text);
+}
+
+@end
+
+
+AVFVideoRendererControl::AVFVideoRendererControl(QObject *parent)
+ : QObject(parent)
+{
+ m_displayLink = new AVFDisplayLink(this);
+ connect(m_displayLink, SIGNAL(tick(CVTimeStamp)), SLOT(updateVideoFrame(CVTimeStamp)));
+}
+
+AVFVideoRendererControl::~AVFVideoRendererControl()
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO;
+#endif
+ m_displayLink->stop();
+ if (m_videoOutput)
+ [m_videoOutput release];
+ if (m_subtitleOutput)
+ [m_subtitleOutput release];
+ if (m_subtitleDelegate)
+ [m_subtitleDelegate release];
+}
+
+void AVFVideoRendererControl::reconfigure()
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << "reconfigure";
+#endif
+ if (!m_layer) {
+ m_displayLink->stop();
+ return;
+ }
+
+ QMutexLocker locker(&m_mutex);
+
+ m_displayLink->start();
+
+ nativeSizeChanged();
+}
+
+void AVFVideoRendererControl::setLayer(CALayer *layer)
+{
+ if (m_layer == layer)
+ return;
+
+ AVPlayerLayer *plLayer = playerLayer();
+ if (plLayer) {
+ if (m_videoOutput)
+ [[[plLayer player] currentItem] removeOutput:m_videoOutput];
+
+ if (m_subtitleOutput)
+ [[[plLayer player] currentItem] removeOutput:m_subtitleOutput];
+ }
+
+ if (!layer && m_sink)
+ m_sink->setVideoFrame(QVideoFrame());
+
+ AVFVideoSinkInterface::setLayer(layer);
+}
+
+void AVFVideoRendererControl::setVideoRotation(QtVideo::Rotation rotation)
+{
+ m_rotation = rotation;
+}
+
+void AVFVideoRendererControl::setVideoMirrored(bool mirrored)
+{
+ m_mirrored = mirrored;
+}
+
+void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
+{
+ Q_UNUSED(ts);
+
+ if (!m_sink)
+ return;
+
+ if (!m_layer)
+ return;
+
+ auto *layer = playerLayer();
+ if (!layer.readyForDisplay)
+ return;
+ nativeSizeChanged();
+
+ QVideoFrame frame;
+ size_t width, height;
+ CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(width, height);
+ if (!pixelBuffer)
+ return;
+ auto buffer = std::make_unique<AVFVideoBuffer>(this, pixelBuffer);
+ // qDebug() << "Got pixelbuffer with format" << fmt << Qt::hex <<
+ // CVPixelBufferGetPixelFormatType(pixelBuffer);
+ CVPixelBufferRelease(pixelBuffer);
+
+ frame = QVideoFramePrivate::createFrame(std::move(buffer), buffer->videoFormat());
+ frame.setRotation(m_rotation);
+ frame.setMirrored(m_mirrored);
+ m_sink->setVideoFrame(frame);
+}
+
+CVPixelBufferRef AVFVideoRendererControl::copyPixelBufferFromLayer(size_t& width, size_t& height)
+{
+ AVPlayerLayer *layer = playerLayer();
+ //Is layer valid
+ if (!layer) {
+#ifdef QT_DEBUG_AVF
+ qWarning("copyPixelBufferFromLayer: invalid layer");
+#endif
+ return nullptr;
+ }
+
+ AVPlayerItem * item = [[layer player] currentItem];
+
+ if (!m_videoOutput) {
+ if (!m_outputSettings)
+ setOutputSettings();
+ m_videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:m_outputSettings];
+ [m_videoOutput setDelegate:nil queue:nil];
+ }
+ if (!m_subtitleOutput) {
+ m_subtitleOutput = [[AVPlayerItemLegibleOutput alloc] init];
+ m_subtitleDelegate = [[SubtitleDelegate alloc] initWithRenderer:this];
+ [m_subtitleOutput setDelegate:m_subtitleDelegate queue:dispatch_get_main_queue()];
+ }
+ if (![item.outputs containsObject:m_videoOutput])
+ [item addOutput:m_videoOutput];
+ if (![item.outputs containsObject:m_subtitleOutput])
+ [item addOutput:m_subtitleOutput];
+
+ CFTimeInterval currentCAFrameTime = CACurrentMediaTime();
+ CMTime currentCMFrameTime = [m_videoOutput itemTimeForHostTime:currentCAFrameTime];
+ // happens when buffering / loading
+ if (CMTimeCompare(currentCMFrameTime, kCMTimeZero) < 0) {
+ return nullptr;
+ }
+
+ if (![m_videoOutput hasNewPixelBufferForItemTime:currentCMFrameTime])
+ return nullptr;
+
+ CVPixelBufferRef pixelBuffer = [m_videoOutput copyPixelBufferForItemTime:currentCMFrameTime
+ itemTimeForDisplay:nil];
+ if (!pixelBuffer) {
+#ifdef QT_DEBUG_AVF
+ qWarning("copyPixelBufferForItemTime returned nil");
+ CMTimeShow(currentCMFrameTime);
+#endif
+ return nullptr;
+ }
+
+ width = CVPixelBufferGetWidth(pixelBuffer);
+ height = CVPixelBufferGetHeight(pixelBuffer);
+// auto f = CVPixelBufferGetPixelFormatType(pixelBuffer);
+// char fmt[5];
+// memcpy(fmt, &f, 4);
+// fmt[4] = 0;
+// qDebug() << "copyPixelBuffer" << f << fmt << width << height;
+ return pixelBuffer;
+}
+
+#include "moc_avfvideorenderercontrol_p.cpp"
diff --git a/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol_p.h b/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol_p.h
new file mode 100644
index 000000000..177114127
--- /dev/null
+++ b/src/plugins/multimedia/darwin/mediaplayer/avfvideorenderercontrol_p.h
@@ -0,0 +1,72 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFVIDEORENDERERCONTROL_H
+#define AVFVIDEORENDERERCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/QObject>
+#include <QtCore/QMutex>
+#include <QtCore/QSize>
+
+#include <avfvideosink_p.h>
+
+#include <CoreVideo/CVBase.h>
+#include <CoreVideo/CVPixelBuffer.h>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(CALayer);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVPlayerItemVideoOutput);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVPlayerItemLegibleOutput);
+Q_FORWARD_DECLARE_OBJC_CLASS(SubtitleDelegate);
+
+QT_BEGIN_NAMESPACE
+
+class AVFDisplayLink;
+
+class AVFVideoRendererControl : public QObject, public AVFVideoSinkInterface
+{
+ Q_OBJECT
+public:
+ explicit AVFVideoRendererControl(QObject *parent = nullptr);
+ virtual ~AVFVideoRendererControl();
+
+ // AVFVideoSinkInterface
+ void reconfigure() override;
+ void setLayer(CALayer *layer) override;
+
+ void setVideoRotation(QtVideo::Rotation);
+ void setVideoMirrored(bool mirrored);
+
+ void setSubtitleText(const QString &subtitle)
+ {
+ m_sink->setSubtitleText(subtitle);
+ }
+private Q_SLOTS:
+ void updateVideoFrame(const CVTimeStamp &ts);
+
+private:
+ AVPlayerLayer *playerLayer() const { return static_cast<AVPlayerLayer *>(m_layer); }
+ CVPixelBufferRef copyPixelBufferFromLayer(size_t& width, size_t& height);
+
+ QMutex m_mutex;
+ AVFDisplayLink *m_displayLink = nullptr;
+ AVPlayerItemVideoOutput *m_videoOutput = nullptr;
+ AVPlayerItemLegibleOutput *m_subtitleOutput = nullptr;
+ SubtitleDelegate *m_subtitleDelegate = nullptr;
+ QtVideo::Rotation m_rotation = QtVideo::Rotation::None;
+ bool m_mirrored = false;
+};
+
+QT_END_NAMESPACE
+
+#endif // AVFVIDEORENDERERCONTROL_H
diff --git a/src/plugins/multimedia/darwin/qavfhelpers.mm b/src/plugins/multimedia/darwin/qavfhelpers.mm
new file mode 100644
index 000000000..51ae9eedc
--- /dev/null
+++ b/src/plugins/multimedia/darwin/qavfhelpers.mm
@@ -0,0 +1,143 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include <qavfhelpers_p.h>
+#include <CoreMedia/CMFormatDescription.h>
+#include <CoreVideo/CoreVideo.h>
+#include <qdebug.h>
+
+#import <CoreVideo/CoreVideo.h>
+
+namespace {
+
+using PixelFormat = QVideoFrameFormat::PixelFormat;
+using ColorRange = QVideoFrameFormat::ColorRange;
+
+// clang-format off
+constexpr std::tuple<CvPixelFormat, PixelFormat, ColorRange> PixelFormatMap[] = {
+ { kCVPixelFormatType_32ARGB, PixelFormat::Format_ARGB8888, ColorRange::ColorRange_Unknown },
+ { kCVPixelFormatType_32BGRA, PixelFormat::Format_BGRA8888, ColorRange::ColorRange_Unknown },
+ { kCVPixelFormatType_420YpCbCr8Planar, PixelFormat::Format_YUV420P, ColorRange::ColorRange_Unknown },
+ { kCVPixelFormatType_420YpCbCr8PlanarFullRange, PixelFormat::Format_YUV420P, ColorRange::ColorRange_Full },
+ { kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, PixelFormat::Format_NV12, ColorRange::ColorRange_Video },
+ { kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, PixelFormat::Format_NV12, ColorRange::ColorRange_Full },
+ { kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange, PixelFormat::Format_P010, ColorRange::ColorRange_Video },
+ { kCVPixelFormatType_420YpCbCr10BiPlanarFullRange, PixelFormat::Format_P010, ColorRange::ColorRange_Full },
+ { kCVPixelFormatType_422YpCbCr8, PixelFormat::Format_UYVY, ColorRange::ColorRange_Video },
+ { kCVPixelFormatType_422YpCbCr8_yuvs, PixelFormat::Format_YUYV, ColorRange::ColorRange_Video },
+ { kCVPixelFormatType_OneComponent8, PixelFormat::Format_Y8, ColorRange::ColorRange_Unknown },
+ { kCVPixelFormatType_OneComponent16, PixelFormat::Format_Y16, ColorRange::ColorRange_Unknown },
+
+ // The cases with kCMVideoCodecType_JPEG/kCMVideoCodecType_JPEG_OpenDML as cv pixel format should be investigated.
+ // Matching kCMVideoCodecType_JPEG_OpenDML to ColorRange_Full is a little hack to distinguish between
+ // kCMVideoCodecType_JPEG and kCMVideoCodecType_JPEG_OpenDML.
+ { kCMVideoCodecType_JPEG, PixelFormat::Format_Jpeg, ColorRange::ColorRange_Unknown },
+ { kCMVideoCodecType_JPEG_OpenDML, PixelFormat::Format_Jpeg, ColorRange::ColorRange_Full }
+};
+// clang-format on
+
+template<typename Type, typename... Args>
+Type findInPixelFormatMap(Type defaultValue, Args... args)
+{
+ auto checkElement = [&](const auto &element) {
+ return ((args == std::get<Args>(element)) && ...);
+ };
+
+ auto found = std::find_if(std::begin(PixelFormatMap), std::end(PixelFormatMap), checkElement);
+ return found == std::end(PixelFormatMap) ? defaultValue : std::get<Type>(*found);
+}
+
+}
+
+ColorRange QAVFHelpers::colorRangeForCVPixelFormat(CvPixelFormat cvPixelFormat)
+{
+ return findInPixelFormatMap(ColorRange::ColorRange_Unknown, cvPixelFormat);
+}
+
+PixelFormat QAVFHelpers::fromCVPixelFormat(CvPixelFormat cvPixelFormat)
+{
+ return findInPixelFormatMap(PixelFormat::Format_Invalid, cvPixelFormat);
+}
+
+CvPixelFormat QAVFHelpers::toCVPixelFormat(PixelFormat pixelFmt, ColorRange colorRange)
+{
+ return findInPixelFormatMap(CvPixelFormatInvalid, pixelFmt, colorRange);
+}
+
+QVideoFrameFormat QAVFHelpers::videoFormatForImageBuffer(CVImageBufferRef buffer, bool openGL)
+{
+ auto cvPixelFormat = CVPixelBufferGetPixelFormatType(buffer);
+ auto pixelFormat = fromCVPixelFormat(cvPixelFormat);
+ if (openGL) {
+ if (cvPixelFormat == kCVPixelFormatType_32BGRA)
+ pixelFormat = QVideoFrameFormat::Format_SamplerRect;
+ else
+ qWarning() << "Accelerated macOS OpenGL video supports BGRA only, got CV pixel format"
+ << cvPixelFormat;
+ }
+
+ size_t width = CVPixelBufferGetWidth(buffer);
+ size_t height = CVPixelBufferGetHeight(buffer);
+
+ QVideoFrameFormat format(QSize(width, height), pixelFormat);
+
+ auto colorSpace = QVideoFrameFormat::ColorSpace_Undefined;
+ auto colorTransfer = QVideoFrameFormat::ColorTransfer_Unknown;
+
+ if (CFStringRef cSpace = reinterpret_cast<CFStringRef>(
+ CVBufferGetAttachment(buffer, kCVImageBufferYCbCrMatrixKey, nullptr))) {
+ if (CFEqual(cSpace, kCVImageBufferYCbCrMatrix_ITU_R_709_2)) {
+ colorSpace = QVideoFrameFormat::ColorSpace_BT709;
+ } else if (CFEqual(cSpace, kCVImageBufferYCbCrMatrix_ITU_R_601_4)
+ || CFEqual(cSpace, kCVImageBufferYCbCrMatrix_SMPTE_240M_1995)) {
+ colorSpace = QVideoFrameFormat::ColorSpace_BT601;
+ } else if (@available(macOS 10.11, iOS 9.0, *)) {
+ if (CFEqual(cSpace, kCVImageBufferYCbCrMatrix_ITU_R_2020)) {
+ colorSpace = QVideoFrameFormat::ColorSpace_BT2020;
+ }
+ }
+ }
+
+ if (CFStringRef cTransfer = reinterpret_cast<CFStringRef>(
+ CVBufferGetAttachment(buffer, kCVImageBufferTransferFunctionKey, nullptr))) {
+
+ if (CFEqual(cTransfer, kCVImageBufferTransferFunction_ITU_R_709_2)) {
+ colorTransfer = QVideoFrameFormat::ColorTransfer_BT709;
+ } else if (CFEqual(cTransfer, kCVImageBufferTransferFunction_SMPTE_240M_1995)) {
+ colorTransfer = QVideoFrameFormat::ColorTransfer_BT601;
+ } else if (CFEqual(cTransfer, kCVImageBufferTransferFunction_sRGB)) {
+ colorTransfer = QVideoFrameFormat::ColorTransfer_Gamma22;
+ } else if (CFEqual(cTransfer, kCVImageBufferTransferFunction_UseGamma)) {
+ auto gamma = reinterpret_cast<CFNumberRef>(
+ CVBufferGetAttachment(buffer, kCVImageBufferGammaLevelKey, nullptr));
+ double g;
+ CFNumberGetValue(gamma, kCFNumberFloat32Type, &g);
+ // These are best fit values given what we have in our enum
+ if (g < 0.8)
+ ; // unknown
+ else if (g < 1.5)
+ colorTransfer = QVideoFrameFormat::ColorTransfer_Linear;
+ else if (g < 2.1)
+ colorTransfer = QVideoFrameFormat::ColorTransfer_BT709;
+ else if (g < 2.5)
+ colorTransfer = QVideoFrameFormat::ColorTransfer_Gamma22;
+ else if (g < 3.2)
+ colorTransfer = QVideoFrameFormat::ColorTransfer_Gamma28;
+ }
+ if (@available(macOS 10.12, iOS 11.0, *)) {
+ if (CFEqual(cTransfer, kCVImageBufferTransferFunction_ITU_R_2020))
+ colorTransfer = QVideoFrameFormat::ColorTransfer_BT709;
+ }
+ if (@available(macOS 10.12, iOS 11.0, *)) {
+ if (CFEqual(cTransfer, kCVImageBufferTransferFunction_ITU_R_2100_HLG)) {
+ colorTransfer = QVideoFrameFormat::ColorTransfer_STD_B67;
+ } else if (CFEqual(cTransfer, kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ)) {
+ colorTransfer = QVideoFrameFormat::ColorTransfer_ST2084;
+ }
+ }
+ }
+
+ format.setColorRange(colorRangeForCVPixelFormat(cvPixelFormat));
+ format.setColorSpace(colorSpace);
+ format.setColorTransfer(colorTransfer);
+ return format;
+}
diff --git a/src/plugins/multimedia/darwin/qavfhelpers_p.h b/src/plugins/multimedia/darwin/qavfhelpers_p.h
new file mode 100644
index 000000000..8133d5500
--- /dev/null
+++ b/src/plugins/multimedia/darwin/qavfhelpers_p.h
@@ -0,0 +1,41 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QAVFHELPERS_H
+#define QAVFHELPERS_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtMultimedia/qvideoframe.h>
+#include <qvideoframeformat.h>
+
+#include <CoreVideo/CVBase.h>
+#include <CoreVideo/CVPixelBuffer.h>
+#include <CoreVideo/CVImageBuffer.h>
+
+QT_BEGIN_NAMESPACE
+
+using CvPixelFormat = unsigned;
+constexpr CvPixelFormat CvPixelFormatInvalid = 0;
+
+namespace QAVFHelpers
+{
+QVideoFrameFormat::ColorRange colorRangeForCVPixelFormat(CvPixelFormat cvPixelFormat);
+QVideoFrameFormat::PixelFormat fromCVPixelFormat(CvPixelFormat cvPixelFormat);
+CvPixelFormat toCVPixelFormat(QVideoFrameFormat::PixelFormat pixFmt,
+ QVideoFrameFormat::ColorRange colorRange);
+
+QVideoFrameFormat videoFormatForImageBuffer(CVImageBufferRef buffer, bool openGL = false);
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/qdarwinformatsinfo.mm b/src/plugins/multimedia/darwin/qdarwinformatsinfo.mm
new file mode 100644
index 000000000..582060a6c
--- /dev/null
+++ b/src/plugins/multimedia/darwin/qdarwinformatsinfo.mm
@@ -0,0 +1,211 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qdarwinformatsinfo_p.h"
+#include <AVFoundation/AVFoundation.h>
+#include <qdebug.h>
+
+QT_BEGIN_NAMESPACE
+
+static struct {
+ const char *name;
+ QMediaFormat::FileFormat value;
+} mediaContainerMap[] = {
+ { "video/x-ms-asf", QMediaFormat::WMV },
+ { "video/avi", QMediaFormat::AVI },
+ { "video/x-matroska", QMediaFormat::Matroska },
+ { "video/mp4", QMediaFormat::MPEG4 },
+ { "video/quicktime", QMediaFormat::QuickTime },
+ { "video/ogg", QMediaFormat::Ogg },
+ { "audio/mp3", QMediaFormat::MP3 },
+ { "audio/flac", QMediaFormat::FLAC },
+ { nullptr, QMediaFormat::UnspecifiedFormat }
+};
+
+static struct {
+ const char *name;
+ QMediaFormat::VideoCodec value;
+} videoCodecMap[] = {
+ // See CMVideoCodecType for the four character code names of codecs
+ { "; codecs=\"mp1v\"", QMediaFormat::VideoCodec::MPEG1 },
+ { "; codecs=\"mp2v\"", QMediaFormat::VideoCodec::MPEG2 },
+ { "; codecs=\"mp4v\"", QMediaFormat::VideoCodec::MPEG4 },
+ { "; codecs=\"avc1\"", QMediaFormat::VideoCodec::H264 },
+ { "; codecs=\"hvc1\"", QMediaFormat::VideoCodec::H265 },
+ { "; codecs=\"vp09\"", QMediaFormat::VideoCodec::VP9 },
+ { "; codecs=\"av01\"", QMediaFormat::VideoCodec::AV1 }, // ### ????
+ { "; codecs=\"jpeg\"", QMediaFormat::VideoCodec::MotionJPEG },
+ { nullptr, QMediaFormat::VideoCodec::Unspecified }
+};
+
+static struct {
+ const char *name;
+ QMediaFormat::AudioCodec value;
+} audioCodecMap[] = {
+ // AudioFile.h
+ // ### The next two entries do not work, probably because they contain non a space and period and AVFoundation doesn't like that
+ // We know they are supported on all Apple platforms, so we'll add them manually below
+// { "; codecs=\".mp3\"", QMediaFormat::AudioCodec::MP3 },
+// { "; codecs=\"aac \"", QMediaFormat::AudioCodec::AAC },
+ { "; codecs=\"ac-3\"", QMediaFormat::AudioCodec::AC3 },
+ { "; codecs=\"ec-3\"", QMediaFormat::AudioCodec::EAC3 },
+ { "; codecs=\"flac\"", QMediaFormat::AudioCodec::FLAC },
+ { "; codecs=\"alac\"", QMediaFormat::AudioCodec::ALAC },
+ { "; codecs=\"opus\"", QMediaFormat::AudioCodec::Opus },
+ { nullptr, QMediaFormat::AudioCodec::Unspecified },
+};
+
+QDarwinFormatInfo::QDarwinFormatInfo()
+{
+ auto avtypes = [AVURLAsset audiovisualMIMETypes];
+ for (AVFileType filetype in avtypes) {
+ auto *m = mediaContainerMap;
+ while (m->name) {
+ if (strcmp(filetype.UTF8String, m->name)) {
+ ++m;
+ continue;
+ }
+
+ QList<QMediaFormat::VideoCodec> video;
+ QList<QMediaFormat::AudioCodec> audio;
+
+ auto *v = videoCodecMap;
+ while (v->name) {
+ QByteArray extendedMimetype = m->name;
+ extendedMimetype += v->name;
+ if ([AVURLAsset isPlayableExtendedMIMEType:[NSString stringWithUTF8String:extendedMimetype.constData()]])
+ video << v->value;
+ ++v;
+ }
+
+ auto *a = audioCodecMap;
+ while (a->name) {
+ QByteArray extendedMimetype = m->name;
+ extendedMimetype += a->name;
+ if ([AVURLAsset isPlayableExtendedMIMEType:[NSString stringWithUTF8String:extendedMimetype.constData()]])
+ audio << a->value;
+ ++a;
+ }
+ // Added manually, see comment in the list above
+ if (m->value <= QMediaFormat::AAC)
+ audio << QMediaFormat::AudioCodec::AAC;
+ if (m->value < QMediaFormat::AAC || m->value == QMediaFormat::MP3)
+ audio << QMediaFormat::AudioCodec::MP3;
+
+ decoders << CodecMap{ m->value, audio, video };
+ ++m;
+ }
+ }
+
+ // seems AVFoundation only supports those for encoding
+ encoders = {
+ { QMediaFormat::MPEG4,
+ { QMediaFormat::AudioCodec::AAC, QMediaFormat::AudioCodec::ALAC },
+ { QMediaFormat::VideoCodec::H264, QMediaFormat::VideoCodec::H265, QMediaFormat::VideoCodec::MotionJPEG } },
+ { QMediaFormat::QuickTime,
+ { QMediaFormat::AudioCodec::AAC, QMediaFormat::AudioCodec::ALAC },
+ { QMediaFormat::VideoCodec::H264, QMediaFormat::VideoCodec::H265, QMediaFormat::VideoCodec::MotionJPEG } },
+ { QMediaFormat::Mpeg4Audio,
+ { QMediaFormat::AudioCodec::AAC },
+ {} },
+ { QMediaFormat::Wave,
+ { QMediaFormat::AudioCodec::Wave },
+ {} },
+ };
+
+ // ###
+ imageFormats << QImageCapture::JPEG;
+}
+
+QDarwinFormatInfo::~QDarwinFormatInfo()
+{
+}
+
+int QDarwinFormatInfo::audioFormatForCodec(QMediaFormat::AudioCodec codec)
+{
+ int codecId = kAudioFormatMPEG4AAC;
+ switch (codec) {
+ case QMediaFormat::AudioCodec::Unspecified:
+ case QMediaFormat::AudioCodec::DolbyTrueHD:
+ case QMediaFormat::AudioCodec::Vorbis:
+ case QMediaFormat::AudioCodec::WMA:
+ // Unsupported, shouldn't happen. Fall back to AAC
+ case QMediaFormat::AudioCodec::AAC:
+ codecId = kAudioFormatMPEG4AAC;
+ break;
+ case QMediaFormat::AudioCodec::MP3:
+ codecId = kAudioFormatMPEGLayer3;
+ break;
+ case QMediaFormat::AudioCodec::AC3:
+ codecId = kAudioFormatAC3;
+ break;
+ case QMediaFormat::AudioCodec::EAC3:
+ codecId = kAudioFormatEnhancedAC3;
+ break;
+ case QMediaFormat::AudioCodec::FLAC:
+ codecId = kAudioFormatFLAC;
+ break;
+ case QMediaFormat::AudioCodec::ALAC:
+ codecId = kAudioFormatAppleLossless;
+ break;
+ case QMediaFormat::AudioCodec::Opus:
+ codecId = kAudioFormatOpus;
+ break;
+ case QMediaFormat::AudioCodec::Wave:
+ codecId = kAudioFormatLinearPCM;
+ }
+ return codecId;
+}
+
+NSString *QDarwinFormatInfo::videoFormatForCodec(QMediaFormat::VideoCodec codec)
+{
+ const char *c = "hvc1"; // fallback is H265
+ switch (codec) {
+ case QMediaFormat::VideoCodec::Unspecified:
+ case QMediaFormat::VideoCodec::VP8:
+ case QMediaFormat::VideoCodec::H265:
+ case QMediaFormat::VideoCodec::AV1:
+ case QMediaFormat::VideoCodec::Theora:
+ case QMediaFormat::VideoCodec::WMV:
+ break;
+
+ case QMediaFormat::VideoCodec::MPEG1:
+ c = "mp1v";
+ break;
+ case QMediaFormat::VideoCodec::MPEG2:
+ c = "mp2v";
+ break;
+ case QMediaFormat::VideoCodec::MPEG4:
+ c = "mp4v";
+ break;
+ case QMediaFormat::VideoCodec::H264:
+ c = "avc1";
+ break;
+ case QMediaFormat::VideoCodec::VP9:
+ c = "vp09";
+ break;
+ case QMediaFormat::VideoCodec::MotionJPEG:
+ c = "jpeg";
+ }
+ return [NSString stringWithUTF8String:c];
+}
+
+NSString *QDarwinFormatInfo::avFileTypeForContainerFormat(QMediaFormat::FileFormat container)
+{
+ switch (container) {
+ case QMediaFormat::MPEG4:
+ return AVFileTypeMPEG4;
+ case QMediaFormat::QuickTime:
+ return AVFileTypeQuickTimeMovie;
+ case QMediaFormat::MP3:
+ return AVFileTypeMPEGLayer3;
+ case QMediaFormat::Mpeg4Audio:
+ return AVFileTypeAppleM4A;
+ case QMediaFormat::Wave:
+ return AVFileTypeWAVE;
+ default:
+ return AVFileTypeQuickTimeMovie;
+ }
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/darwin/qdarwinformatsinfo_p.h b/src/plugins/multimedia/darwin/qdarwinformatsinfo_p.h
new file mode 100644
index 000000000..e01486286
--- /dev/null
+++ b/src/plugins/multimedia/darwin/qdarwinformatsinfo_p.h
@@ -0,0 +1,38 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QDARWINFORMATINFO_H
+#define QDARWINFORMATINFO_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaformatinfo_p.h>
+#include <qlist.h>
+
+QT_BEGIN_NAMESPACE
+
+class QDarwinMediaDevices;
+
+class QDarwinFormatInfo : public QPlatformMediaFormatInfo
+{
+public:
+ QDarwinFormatInfo();
+ ~QDarwinFormatInfo();
+
+ static int audioFormatForCodec(QMediaFormat::AudioCodec codec);
+ static NSString *videoFormatForCodec(QMediaFormat::VideoCodec codec);
+ static NSString *avFileTypeForContainerFormat(QMediaFormat::FileFormat fileType);
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/qdarwinintegration.mm b/src/plugins/multimedia/darwin/qdarwinintegration.mm
new file mode 100644
index 000000000..0e880447e
--- /dev/null
+++ b/src/plugins/multimedia/darwin/qdarwinintegration.mm
@@ -0,0 +1,93 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qdarwinintegration_p.h"
+#include <avfmediaplayer_p.h>
+#include <avfcameraservice_p.h>
+#include <avfcamera_p.h>
+#include <avfimagecapture_p.h>
+#include <avfmediaencoder_p.h>
+#include <qdarwinformatsinfo_p.h>
+#include <avfvideosink_p.h>
+#include <avfaudiodecoder_p.h>
+#include <VideoToolbox/VideoToolbox.h>
+#include <qdebug.h>
+#include <private/qplatformmediaplugin_p.h>
+#include <qavfcamerabase_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QDarwinMediaPlugin : public QPlatformMediaPlugin
+{
+ Q_OBJECT
+ Q_PLUGIN_METADATA(IID QPlatformMediaPlugin_iid FILE "darwin.json")
+
+public:
+ QDarwinMediaPlugin()
+ : QPlatformMediaPlugin()
+ {}
+
+ QPlatformMediaIntegration* create(const QString &name) override
+ {
+ if (name == u"darwin")
+ return new QDarwinIntegration;
+ return nullptr;
+ }
+};
+
+QDarwinIntegration::QDarwinIntegration() : QPlatformMediaIntegration(QLatin1String("darwin"))
+{
+#if defined(Q_OS_MACOS) && QT_MACOS_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_11_0)
+ if (__builtin_available(macOS 11.0, *))
+ VTRegisterSupplementalVideoDecoderIfAvailable(kCMVideoCodecType_VP9);
+#endif
+}
+
+QPlatformMediaFormatInfo *QDarwinIntegration::createFormatInfo()
+{
+ return new QDarwinFormatInfo();
+}
+
+QPlatformVideoDevices *QDarwinIntegration::createVideoDevices()
+{
+ return new QAVFVideoDevices(this);
+}
+
+QMaybe<QPlatformAudioDecoder *> QDarwinIntegration::createAudioDecoder(QAudioDecoder *decoder)
+{
+ return new AVFAudioDecoder(decoder);
+}
+
+QMaybe<QPlatformMediaCaptureSession *> QDarwinIntegration::createCaptureSession()
+{
+ return new AVFCameraService;
+}
+
+QMaybe<QPlatformMediaPlayer *> QDarwinIntegration::createPlayer(QMediaPlayer *player)
+{
+ return new AVFMediaPlayer(player);
+}
+
+QMaybe<QPlatformCamera *> QDarwinIntegration::createCamera(QCamera *camera)
+{
+ return new AVFCamera(camera);
+}
+
+QMaybe<QPlatformMediaRecorder *> QDarwinIntegration::createRecorder(QMediaRecorder *recorder)
+{
+ return new AVFMediaEncoder(recorder);
+}
+
+QMaybe<QPlatformImageCapture *> QDarwinIntegration::createImageCapture(QImageCapture *imageCapture)
+{
+ return new AVFImageCapture(imageCapture);
+}
+
+QMaybe<QPlatformVideoSink *> QDarwinIntegration::createVideoSink(QVideoSink *sink)
+{
+ return new AVFVideoSink(sink);
+}
+
+QT_END_NAMESPACE
+
+#include "qdarwinintegration.moc"
diff --git a/src/plugins/multimedia/darwin/qdarwinintegration_p.h b/src/plugins/multimedia/darwin/qdarwinintegration_p.h
new file mode 100644
index 000000000..8333de4ec
--- /dev/null
+++ b/src/plugins/multimedia/darwin/qdarwinintegration_p.h
@@ -0,0 +1,45 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QDARWININTEGRATION_H
+#define QDARWININTEGRATION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaintegration_p.h>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(NSObject);
+
+QT_BEGIN_NAMESPACE
+
+class QDarwinIntegration : public QPlatformMediaIntegration
+{
+public:
+ QDarwinIntegration();
+
+ QMaybe<QPlatformAudioDecoder *> createAudioDecoder(QAudioDecoder *) override;
+ QMaybe<QPlatformMediaCaptureSession *> createCaptureSession() override;
+ QMaybe<QPlatformMediaPlayer *> createPlayer(QMediaPlayer *player) override;
+ QMaybe<QPlatformCamera *> createCamera(QCamera *camera) override;
+ QMaybe<QPlatformMediaRecorder *> createRecorder(QMediaRecorder *) override;
+ QMaybe<QPlatformImageCapture *> createImageCapture(QImageCapture *) override;
+
+ QMaybe<QPlatformVideoSink *> createVideoSink(QVideoSink *) override;
+
+protected:
+ QPlatformMediaFormatInfo *createFormatInfo() override;
+ QPlatformVideoDevices *createVideoDevices() override;
+};
+
+QT_END_NAMESPACE
+
+#endif