summaryrefslogtreecommitdiffstats
path: root/src/plugins/multimedia/darwin/camera
diff options
context:
space:
mode:
Diffstat (limited to 'src/plugins/multimedia/darwin/camera')
-rw-r--r--src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate.mm98
-rw-r--r--src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate_p.h40
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamera.mm89
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamera_p.h48
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcameradebug_p.h26
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm292
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerarenderer_p.h95
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcameraservice.mm169
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcameraservice_p.h84
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerasession.mm513
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerasession_p.h132
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerautility.mm730
-rw-r--r--src/plugins/multimedia/darwin/camera/avfcamerautility_p.h165
-rw-r--r--src/plugins/multimedia/darwin/camera/avfimagecapture.mm385
-rw-r--r--src/plugins/multimedia/darwin/camera/avfimagecapture_p.h81
-rw-r--r--src/plugins/multimedia/darwin/camera/avfmediaassetwriter.mm556
-rw-r--r--src/plugins/multimedia/darwin/camera/avfmediaassetwriter_p.h54
-rw-r--r--src/plugins/multimedia/darwin/camera/avfmediaencoder.mm664
-rw-r--r--src/plugins/multimedia/darwin/camera/avfmediaencoder_p.h96
-rw-r--r--src/plugins/multimedia/darwin/camera/qavfcamerabase.mm1080
-rw-r--r--src/plugins/multimedia/darwin/camera/qavfcamerabase_p.h110
21 files changed, 5507 insertions, 0 deletions
diff --git a/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate.mm b/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate.mm
new file mode 100644
index 000000000..1b2d4b15d
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate.mm
@@ -0,0 +1,98 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcamerasession_p.h"
+#include "avfaudiopreviewdelegate_p.h"
+
+QT_USE_NAMESPACE
+
+@implementation AVFAudioPreviewDelegate
+{
+@private
+ AVSampleBufferAudioRenderer *m_audioRenderer;
+ AVFCameraSession *m_session;
+ AVSampleBufferRenderSynchronizer *m_audioBufferSynchronizer;
+ dispatch_queue_t m_audioPreviewQueue;
+}
+
+- (id)init
+{
+ if (self = [super init]) {
+ m_session = nil;
+ m_audioBufferSynchronizer = [[AVSampleBufferRenderSynchronizer alloc] init];
+ m_audioRenderer = [[AVSampleBufferAudioRenderer alloc] init];
+ [m_audioBufferSynchronizer addRenderer:m_audioRenderer];
+ return self;
+ }
+ return nil;
+}
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection
+{
+ Q_UNUSED(connection);
+ Q_ASSERT(m_session);
+
+ if (!CMSampleBufferDataIsReady(sampleBuffer)) {
+ qWarning() << Q_FUNC_INFO << "sample buffer is not ready, skipping.";
+ return;
+ }
+
+ CFRetain(sampleBuffer);
+
+ dispatch_async(m_audioPreviewQueue, ^{
+ [self renderAudioSampleBuffer:sampleBuffer];
+ CFRelease(sampleBuffer);
+ });
+}
+
+- (void)renderAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer
+{
+ Q_ASSERT(sampleBuffer);
+ Q_ASSERT(m_session);
+
+ if (m_audioBufferSynchronizer && m_audioRenderer) {
+ [m_audioRenderer enqueueSampleBuffer:sampleBuffer];
+ if (m_audioBufferSynchronizer.rate == 0)
+ [m_audioBufferSynchronizer setRate:1 time:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
+ }
+}
+
+- (void)resetAudioPreviewDelegate
+{
+ [m_session->audioOutput() setSampleBufferDelegate:self queue:m_audioPreviewQueue];
+}
+
+- (void)setupWithCaptureSession: (AVFCameraSession*)session
+ audioOutputDevice: (NSString*)deviceId
+{
+ m_session = session;
+
+ m_audioPreviewQueue = dispatch_queue_create("audio-preview-queue", nullptr);
+ [m_session->audioOutput() setSampleBufferDelegate:self queue:m_audioPreviewQueue];
+#ifdef Q_OS_MACOS
+ m_audioRenderer.audioOutputDeviceUniqueID = deviceId;
+#endif
+}
+
+- (void)setVolume: (float)volume
+{
+ m_audioRenderer.volume = volume;
+}
+
+- (void)setMuted: (bool)muted
+{
+ m_audioRenderer.muted = muted;
+}
+
+-(void)dealloc {
+ m_session = nil;
+ [m_audioRenderer release];
+ [m_audioBufferSynchronizer release];
+ dispatch_release(m_audioPreviewQueue);
+
+ [super dealloc];
+}
+
+@end
diff --git a/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate_p.h b/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate_p.h
new file mode 100644
index 000000000..8fa06ef39
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfaudiopreviewdelegate_p.h
@@ -0,0 +1,40 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFAUDIOPREVIEWDELEGATE_P_H
+#define AVFAUDIOPREVIEWDELEGATE_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qglobal.h>
+
+#include <AVFoundation/AVFoundation.h>
+
+QT_BEGIN_NAMESPACE
+
+class AVFCameraSession;
+
+QT_END_NAMESPACE
+
+@interface AVFAudioPreviewDelegate : NSObject<AVCaptureAudioDataOutputSampleBufferDelegate>
+
+- (id)init;
+- (void)setupWithCaptureSession: (AVFCameraSession*)session
+ audioOutputDevice: (NSString*)deviceId;
+- (void)renderAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
+- (void)resetAudioPreviewDelegate;
+- (void)setVolume: (float)volume;
+- (void)setMuted: (bool)muted;
+
+@end
+
+#endif // AVFAUDIOPREVIEWDELEGATE_P_H
diff --git a/src/plugins/multimedia/darwin/camera/avfcamera.mm b/src/plugins/multimedia/darwin/camera/avfcamera.mm
new file mode 100644
index 000000000..05cdbae17
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamera.mm
@@ -0,0 +1,89 @@
+// Copyright (C) 2022 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcameradebug_p.h"
+#include "avfcamera_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcamerautility_p.h"
+#include "avfcamerarenderer_p.h"
+#include <qmediacapturesession.h>
+
+QT_USE_NAMESPACE
+
+AVFCamera::AVFCamera(QCamera *camera)
+ : QAVFCameraBase(camera)
+{
+ Q_ASSERT(camera);
+}
+
+AVFCamera::~AVFCamera()
+{
+}
+
+void AVFCamera::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+ if (m_cameraDevice.isNull() && active)
+ return;
+
+ m_active = active;
+ if (m_session)
+ m_session->setActive(active);
+
+ if (active)
+ updateCameraConfiguration();
+ Q_EMIT activeChanged(m_active);
+}
+
+void AVFCamera::setCamera(const QCameraDevice &camera)
+{
+ if (m_cameraDevice == camera)
+ return;
+ m_cameraDevice = camera;
+ if (m_session)
+ m_session->setActiveCamera(camera);
+ setCameraFormat({});
+}
+
+bool AVFCamera::setCameraFormat(const QCameraFormat &format)
+{
+ if (!format.isNull() && !m_cameraDevice.videoFormats().contains(format))
+ return false;
+
+ m_cameraFormat = format.isNull() ? findBestCameraFormat(m_cameraDevice) : format;
+
+ if (m_session)
+ m_session->setCameraFormat(m_cameraFormat);
+
+ return true;
+}
+
+void AVFCamera::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ AVFCameraService *captureSession = static_cast<AVFCameraService *>(session);
+ if (m_service == captureSession)
+ return;
+
+ if (m_session) {
+ m_session->disconnect(this);
+ m_session->setActiveCamera({});
+ m_session->setCameraFormat({});
+ }
+
+ m_service = captureSession;
+ if (!m_service) {
+ m_session = nullptr;
+ return;
+ }
+
+ m_session = m_service->session();
+ Q_ASSERT(m_session);
+
+ m_session->setActiveCamera(m_cameraDevice);
+ m_session->setCameraFormat(m_cameraFormat);
+ m_session->setActive(m_active);
+}
+
+#include "moc_avfcamera_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/avfcamera_p.h b/src/plugins/multimedia/darwin/camera/avfcamera_p.h
new file mode 100644
index 000000000..3c3e6da09
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamera_p.h
@@ -0,0 +1,48 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERA_H
+#define AVFCAMERA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qavfcamerabase_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class AVFCameraSession;
+class AVFCameraService;
+class AVFCameraSession;
+
+class AVFCamera : public QAVFCameraBase
+{
+Q_OBJECT
+public:
+ AVFCamera(QCamera *camera);
+ ~AVFCamera();
+
+ void setActive(bool activce) override;
+
+ void setCamera(const QCameraDevice &camera) override;
+ bool setCameraFormat(const QCameraFormat &format) override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *) override;
+
+private:
+ friend class AVFCameraSession;
+ AVFCameraService *m_service = nullptr;
+ AVFCameraSession *m_session = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfcameradebug_p.h b/src/plugins/multimedia/darwin/camera/avfcameradebug_p.h
new file mode 100644
index 000000000..f93c85142
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcameradebug_p.h
@@ -0,0 +1,26 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFDEBUG_H
+#define AVFDEBUG_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "qtmultimediaglobal.h"
+
+#include <QtCore/qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+Q_DECLARE_LOGGING_CATEGORY(qLcCamera)
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm b/src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm
new file mode 100644
index 000000000..0c9ab3f2c
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerarenderer.mm
@@ -0,0 +1,292 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qabstractvideobuffer.h"
+#include "private/qcameradevice_p.h"
+#include "private/qvideoframe_p.h"
+#include "avfcamerarenderer_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcameradebug_p.h"
+#include "avfcamera_p.h"
+#include <avfvideosink_p.h>
+#include <avfvideobuffer_p.h>
+#include "qvideosink.h"
+#include "qavfhelpers_p.h"
+
+#include <rhi/qrhi.h>
+
+#import <AVFoundation/AVFoundation.h>
+
+#ifdef Q_OS_IOS
+#include <QtGui/qopengl.h>
+#endif
+
+#include <QtMultimedia/qvideoframeformat.h>
+
+QT_USE_NAMESPACE
+
+@interface AVFCaptureFramesDelegate : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
+
+- (AVFCaptureFramesDelegate *) initWithRenderer:(AVFCameraRenderer*)renderer;
+
+- (void) captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection;
+
+@end
+
+@implementation AVFCaptureFramesDelegate
+{
+@private
+ AVFCameraRenderer *m_renderer;
+}
+
+- (AVFCaptureFramesDelegate *) initWithRenderer:(AVFCameraRenderer*)renderer
+{
+ if (!(self = [super init]))
+ return nil;
+
+ self->m_renderer = renderer;
+ return self;
+}
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection
+{
+ Q_UNUSED(connection);
+ Q_UNUSED(captureOutput);
+
+ // NB: on iOS captureOutput/connection can be nil (when recording a video -
+ // avfmediaassetwriter).
+
+ CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ auto buffer = std::make_unique<AVFVideoBuffer>(m_renderer, imageBuffer);
+ auto format = buffer->videoFormat();
+ if (!format.isValid()) {
+ return;
+ }
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(buffer), format);
+ m_renderer->syncHandleViewfinderFrame(frame);
+}
+
+@end
+
+AVFCameraRenderer::AVFCameraRenderer(QObject *parent)
+ : QObject(parent)
+{
+ m_viewfinderFramesDelegate = [[AVFCaptureFramesDelegate alloc] initWithRenderer:this];
+ connect(&m_orientationHandler, &QVideoOutputOrientationHandler::orientationChanged,
+ this, &AVFCameraRenderer::deviceOrientationChanged);
+}
+
+AVFCameraRenderer::~AVFCameraRenderer()
+{
+ [m_cameraSession->captureSession() removeOutput:m_videoDataOutput];
+ [m_viewfinderFramesDelegate release];
+ [m_videoDataOutput release];
+
+ if (m_delegateQueue)
+ dispatch_release(m_delegateQueue);
+#ifdef Q_OS_IOS
+ if (m_textureCache)
+ CFRelease(m_textureCache);
+#endif
+}
+
+void AVFCameraRenderer::reconfigure()
+{
+ QMutexLocker lock(&m_vfMutex);
+
+ // ### This is a hack, need to use a reliable way to determine the size and not use the preview layer
+ if (m_layer)
+ m_sink->setNativeSize(QSize(m_layer.bounds.size.width, m_layer.bounds.size.height));
+ nativeSizeChanged();
+ deviceOrientationChanged();
+}
+
+void AVFCameraRenderer::setOutputSettings()
+{
+ if (!m_videoDataOutput)
+ return;
+
+ if (m_cameraSession) {
+ const auto format = m_cameraSession->cameraFormat();
+ if (format.pixelFormat() != QVideoFrameFormat::Format_Invalid)
+ setPixelFormat(format.pixelFormat(), QCameraFormatPrivate::getColorRange(format));
+ }
+
+ // If no output settings set from above,
+ // it's most likely because the rhi is OpenGL
+ // and the pixel format is not BGRA.
+ // We force this in the base class implementation
+ if (!m_outputSettings)
+ AVFVideoSinkInterface::setOutputSettings();
+
+ if (m_outputSettings)
+ m_videoDataOutput.videoSettings = m_outputSettings;
+}
+
+void AVFCameraRenderer::configureAVCaptureSession(AVFCameraSession *cameraSession)
+{
+ m_cameraSession = cameraSession;
+ connect(m_cameraSession, SIGNAL(readyToConfigureConnections()),
+ this, SLOT(updateCaptureConnection()));
+
+ m_needsHorizontalMirroring = false;
+
+ m_videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
+
+ // Configure video output
+ m_delegateQueue = dispatch_queue_create("vf_queue", nullptr);
+ [m_videoDataOutput
+ setSampleBufferDelegate:m_viewfinderFramesDelegate
+ queue:m_delegateQueue];
+
+ [m_cameraSession->captureSession() addOutput:m_videoDataOutput];
+}
+
+void AVFCameraRenderer::updateCaptureConnection()
+{
+ AVCaptureConnection *connection = [m_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
+ if (connection == nil || !m_cameraSession->videoCaptureDevice())
+ return;
+
+ // Frames of front-facing cameras should be mirrored horizontally (it's the default when using
+ // AVCaptureVideoPreviewLayer but not with AVCaptureVideoDataOutput)
+ if (connection.isVideoMirroringSupported)
+ connection.videoMirrored = m_cameraSession->videoCaptureDevice().position == AVCaptureDevicePositionFront;
+
+ // If the connection does't support mirroring, we'll have to do it ourselves
+ m_needsHorizontalMirroring = !connection.isVideoMirrored
+ && m_cameraSession->videoCaptureDevice().position == AVCaptureDevicePositionFront;
+
+ deviceOrientationChanged();
+}
+
+void AVFCameraRenderer::deviceOrientationChanged(int angle)
+{
+ AVCaptureConnection *connection = [m_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
+ if (connection == nil || !m_cameraSession->videoCaptureDevice())
+ return;
+
+ if (!connection.supportsVideoOrientation)
+ return;
+
+ if (angle < 0)
+ angle = m_orientationHandler.currentOrientation();
+
+ AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
+ switch (angle) {
+ default:
+ break;
+ case 90:
+ orientation = AVCaptureVideoOrientationLandscapeRight;
+ break;
+ case 180:
+ // this keeps the last orientation, don't do anything
+ return;
+ case 270:
+ orientation = AVCaptureVideoOrientationLandscapeLeft;
+ break;
+ }
+
+ connection.videoOrientation = orientation;
+}
+
+//can be called from non main thread
+void AVFCameraRenderer::syncHandleViewfinderFrame(const QVideoFrame &frame)
+{
+ Q_EMIT newViewfinderFrame(frame);
+
+ QMutexLocker lock(&m_vfMutex);
+
+ if (!m_lastViewfinderFrame.isValid()) {
+ static QMetaMethod handleViewfinderFrameSlot = metaObject()->method(
+ metaObject()->indexOfMethod("handleViewfinderFrame()"));
+
+ handleViewfinderFrameSlot.invoke(this, Qt::QueuedConnection);
+ }
+
+ m_lastViewfinderFrame = frame;
+}
+
+AVCaptureVideoDataOutput *AVFCameraRenderer::videoDataOutput() const
+{
+ return m_videoDataOutput;
+}
+
+AVFCaptureFramesDelegate *AVFCameraRenderer::captureDelegate() const
+{
+ return m_viewfinderFramesDelegate;
+}
+
+void AVFCameraRenderer::resetCaptureDelegate() const
+{
+ [m_videoDataOutput setSampleBufferDelegate:m_viewfinderFramesDelegate queue:m_delegateQueue];
+}
+
+void AVFCameraRenderer::handleViewfinderFrame()
+{
+ QVideoFrame frame;
+ {
+ QMutexLocker lock(&m_vfMutex);
+ frame = m_lastViewfinderFrame;
+ m_lastViewfinderFrame = QVideoFrame();
+ }
+
+ if (m_sink && frame.isValid()) {
+ // frame.setMirroed(m_needsHorizontalMirroring) ?
+ m_sink->setVideoFrame(frame);
+ }
+}
+
+void AVFCameraRenderer::setPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat,
+ QVideoFrameFormat::ColorRange colorRange)
+{
+ if (rhi() && rhi()->backend() == QRhi::OpenGLES2) {
+ if (pixelFormat != QVideoFrameFormat::Format_BGRA8888)
+ qWarning() << "OpenGL rhi backend only supports 32BGRA pixel format.";
+ return;
+ }
+
+ // Default to 32BGRA pixel formats on the viewfinder, in case the requested
+ // format can't be used (shouldn't happen unless the developers sets a wrong camera
+ // format on the camera).
+ auto cvPixelFormat = QAVFHelpers::toCVPixelFormat(pixelFormat, colorRange);
+ if (cvPixelFormat == CvPixelFormatInvalid) {
+ cvPixelFormat = kCVPixelFormatType_32BGRA;
+ qWarning() << "QCamera::setCameraFormat: couldn't convert requested pixel format, using ARGB32";
+ }
+
+ bool isSupported = false;
+ NSArray *supportedPixelFormats = m_videoDataOutput.availableVideoCVPixelFormatTypes;
+ for (NSNumber *currentPixelFormat in supportedPixelFormats)
+ {
+ if ([currentPixelFormat unsignedIntValue] == cvPixelFormat) {
+ isSupported = true;
+ break;
+ }
+ }
+
+ if (isSupported) {
+ NSDictionary *outputSettings = @{
+ (NSString *)
+ kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:cvPixelFormat]
+#ifndef Q_OS_IOS // On iOS this key generates a warning about 'unsupported key'.
+ ,
+ (NSString *)kCVPixelBufferMetalCompatibilityKey : @true
+#endif // Q_OS_IOS
+ };
+ if (m_outputSettings)
+ [m_outputSettings release];
+ m_outputSettings = [[NSDictionary alloc] initWithDictionary:outputSettings];
+ } else {
+ qWarning() << "QCamera::setCameraFormat: requested pixel format not supported. Did you use a camera format from another camera?";
+ }
+}
+
+#include "moc_avfcamerarenderer_p.cpp"
+
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerarenderer_p.h b/src/plugins/multimedia/darwin/camera/avfcamerarenderer_p.h
new file mode 100644
index 000000000..57f665cd6
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerarenderer_p.h
@@ -0,0 +1,95 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERARENDERER_H
+#define AVFCAMERARENDERER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qobject.h>
+#include <QtMultimedia/qvideoframe.h>
+#include <QtCore/qmutex.h>
+#include <avfvideosink_p.h>
+#include <private/qvideooutputorientationhandler_p.h>
+
+#include <CoreVideo/CVBase.h>
+#include <CoreVideo/CVPixelBuffer.h>
+#include <CoreVideo/CVImageBuffer.h>
+#ifdef Q_OS_IOS
+#include <CoreVideo/CVOpenGLESTexture.h>
+#include <CoreVideo/CVOpenGLESTextureCache.h>
+#endif
+
+#include <dispatch/dispatch.h>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(AVFCaptureFramesDelegate);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureVideoDataOutput);
+
+QT_BEGIN_NAMESPACE
+
+class AVFCameraSession;
+class AVFCameraService;
+class AVFCameraRenderer;
+class AVFVideoSink;
+
+class AVFCameraRenderer : public QObject, public AVFVideoSinkInterface
+{
+Q_OBJECT
+public:
+ AVFCameraRenderer(QObject *parent = nullptr);
+ ~AVFCameraRenderer();
+
+ void reconfigure() override;
+ void setOutputSettings() override;
+
+ void configureAVCaptureSession(AVFCameraSession *cameraSession);
+ void syncHandleViewfinderFrame(const QVideoFrame &frame);
+
+ AVCaptureVideoDataOutput *videoDataOutput() const;
+
+ AVFCaptureFramesDelegate *captureDelegate() const;
+ void resetCaptureDelegate() const;
+
+ void setPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat,
+ QVideoFrameFormat::ColorRange colorRange);
+
+Q_SIGNALS:
+ void newViewfinderFrame(const QVideoFrame &frame);
+
+private Q_SLOTS:
+ void handleViewfinderFrame();
+ void updateCaptureConnection();
+public Q_SLOTS:
+ void deviceOrientationChanged(int angle = -1);
+
+private:
+ AVFCaptureFramesDelegate *m_viewfinderFramesDelegate = nullptr;
+ AVFCameraSession *m_cameraSession = nullptr;
+ AVCaptureVideoDataOutput *m_videoDataOutput = nullptr;
+
+ bool m_needsHorizontalMirroring = false;
+
+#ifdef Q_OS_IOS
+ CVOpenGLESTextureCacheRef m_textureCache = nullptr;
+#endif
+
+ QVideoFrame m_lastViewfinderFrame;
+ QMutex m_vfMutex;
+ dispatch_queue_t m_delegateQueue;
+ QVideoOutputOrientationHandler m_orientationHandler;
+
+ friend class CVImageVideoBuffer;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfcameraservice.mm b/src/plugins/multimedia/darwin/camera/avfcameraservice.mm
new file mode 100644
index 000000000..b25fb50a9
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcameraservice.mm
@@ -0,0 +1,169 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include <QtCore/qvariant.h>
+#include <QtCore/qdebug.h>
+
+#include "avfcameraservice_p.h"
+#include "avfcamera_p.h"
+#include "avfcamerasession_p.h"
+#include "avfimagecapture_p.h"
+#include "avfcamerarenderer_p.h"
+#include "avfimagecapture_p.h"
+#include "avfmediaencoder_p.h"
+#include <qmediadevices.h>
+#include <private/qplatformaudioinput_p.h>
+#include <private/qplatformaudiooutput_p.h>
+#include <qaudioinput.h>
+#include <qaudiooutput.h>
+
+QT_USE_NAMESPACE
+
+AVFCameraService::AVFCameraService()
+{
+ m_session = new AVFCameraSession(this);
+}
+
+AVFCameraService::~AVFCameraService()
+{
+ if (m_session)
+ delete m_session;
+}
+
+QPlatformCamera *AVFCameraService::camera()
+{
+ return m_cameraControl;
+}
+
+void AVFCameraService::setCamera(QPlatformCamera *camera)
+{
+ AVFCamera *control = static_cast<AVFCamera *>(camera);
+ if (m_cameraControl == control)
+ return;
+
+ if (m_cameraControl) {
+ if (m_encoder)
+ m_cameraControl->disconnect(m_encoder);
+ m_cameraControl->setCaptureSession(nullptr);
+ }
+
+ m_cameraControl = control;
+
+ if (m_cameraControl)
+ m_cameraControl->setCaptureSession(this);
+
+ emit cameraChanged();
+}
+
+QPlatformImageCapture *AVFCameraService::imageCapture()
+{
+ return m_imageCaptureControl;
+}
+
+void AVFCameraService::setImageCapture(QPlatformImageCapture *imageCapture)
+{
+ AVFImageCapture *control = static_cast<AVFImageCapture *>(imageCapture);
+ if (m_imageCaptureControl == control)
+ return;
+
+ if (m_imageCaptureControl)
+ m_imageCaptureControl->setCaptureSession(nullptr);
+
+ m_imageCaptureControl = control;
+ if (m_imageCaptureControl)
+ m_imageCaptureControl->setCaptureSession(this);
+}
+
+QPlatformMediaRecorder *AVFCameraService::mediaRecorder()
+{
+ return m_encoder;
+}
+
+void AVFCameraService::setMediaRecorder(QPlatformMediaRecorder *recorder)
+{
+ AVFMediaEncoder *control = static_cast<AVFMediaEncoder *>(recorder);
+ if (m_encoder == control)
+ return;
+
+ if (m_encoder)
+ m_encoder->setCaptureSession(nullptr);
+
+ m_encoder = control;
+ if (m_encoder)
+ m_encoder->setCaptureSession(this);
+
+ emit encoderChanged();
+}
+
+void AVFCameraService::setAudioInput(QPlatformAudioInput *input)
+{
+ if (m_audioInput == input)
+ return;
+ if (m_audioInput)
+ m_audioInput->q->disconnect(this);
+
+ m_audioInput = input;
+
+ if (input) {
+ connect(m_audioInput->q, &QAudioInput::destroyed, this, &AVFCameraService::audioInputDestroyed);
+ connect(m_audioInput->q, &QAudioInput::deviceChanged, this, &AVFCameraService::audioInputChanged);
+ connect(m_audioInput->q, &QAudioInput::mutedChanged, this, &AVFCameraService::setAudioInputMuted);
+ connect(m_audioInput->q, &QAudioInput::volumeChanged, this, &AVFCameraService::setAudioInputVolume);
+ }
+ audioInputChanged();
+}
+
+void AVFCameraService::setAudioOutput(QPlatformAudioOutput *output)
+{
+ if (m_audioOutput == output)
+ return;
+ if (m_audioOutput)
+ m_audioOutput->q->disconnect(this);
+
+ m_audioOutput = output;
+
+ if (m_audioOutput) {
+ connect(m_audioOutput->q, &QAudioOutput::destroyed, this, &AVFCameraService::audioOutputDestroyed);
+ connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this, &AVFCameraService::audioOutputChanged);
+ connect(m_audioOutput->q, &QAudioOutput::mutedChanged, this, &AVFCameraService::setAudioOutputMuted);
+ connect(m_audioOutput->q, &QAudioOutput::volumeChanged, this, &AVFCameraService::setAudioOutputVolume);
+ }
+ audioOutputChanged();
+}
+
+void AVFCameraService::audioInputChanged()
+{
+ m_session->updateAudioInput();
+}
+
+void AVFCameraService::audioOutputChanged()
+{
+ m_session->updateAudioOutput();
+}
+
+void AVFCameraService::setAudioInputMuted(bool muted)
+{
+ m_session->setAudioInputMuted(muted);
+}
+
+void AVFCameraService::setAudioInputVolume(float volume)
+{
+ m_session->setAudioInputVolume(volume);
+}
+
+void AVFCameraService::setAudioOutputMuted(bool muted)
+{
+ m_session->setAudioOutputMuted(muted);
+}
+
+void AVFCameraService::setAudioOutputVolume(float volume)
+{
+ m_session->setAudioOutputVolume(volume);
+}
+
+void AVFCameraService::setVideoPreview(QVideoSink *sink)
+{
+ m_session->setVideoSink(sink);
+}
+
+#include "moc_avfcameraservice_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/avfcameraservice_p.h b/src/plugins/multimedia/darwin/camera/avfcameraservice_p.h
new file mode 100644
index 000000000..f3ef8d08e
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcameraservice_p.h
@@ -0,0 +1,84 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERASERVICE_H
+#define AVFCAMERASERVICE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qobject.h>
+#include <QtCore/qset.h>
+#include <private/qplatformmediacapture_p.h>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureDevice);
+
+QT_BEGIN_NAMESPACE
+class QPlatformCamera;
+class QPlatformMediaRecorder;
+class AVFCamera;
+class AVFImageCapture;
+class AVFCameraSession;
+class AVFMediaEncoder;
+
+class AVFCameraService : public QPlatformMediaCaptureSession
+{
+ Q_OBJECT
+public:
+ AVFCameraService();
+ ~AVFCameraService();
+
+ QPlatformCamera *camera() override;
+ void setCamera(QPlatformCamera *camera) override;
+
+ QPlatformImageCapture *imageCapture() override;
+ void setImageCapture(QPlatformImageCapture *imageCapture) override;
+
+ QPlatformMediaRecorder *mediaRecorder() override;
+ void setMediaRecorder(QPlatformMediaRecorder *recorder) override;
+
+ void setAudioInput(QPlatformAudioInput *) override;
+ void setAudioOutput(QPlatformAudioOutput *) override;
+
+ void setVideoPreview(QVideoSink *sink) override;
+
+ AVFCameraSession *session() const { return m_session; }
+ AVFCamera *avfCameraControl() const { return m_cameraControl; }
+ AVFMediaEncoder *recorderControl() const { return m_encoder; }
+ AVFImageCapture *avfImageCaptureControl() const { return m_imageCaptureControl; }
+
+ QPlatformAudioInput *audioInput() { return m_audioInput; }
+ QPlatformAudioOutput *audioOutput() { return m_audioOutput; }
+
+public Q_SLOTS:
+ void audioInputDestroyed() { setAudioInput(nullptr); }
+ void audioInputChanged();
+ void audioOutputDestroyed() { setAudioOutput(nullptr); }
+ void audioOutputChanged();
+
+ void setAudioInputMuted(bool muted);
+ void setAudioInputVolume(float volume);
+ void setAudioOutputMuted(bool muted);
+ void setAudioOutputVolume(float volume);
+
+private:
+ QPlatformAudioInput *m_audioInput = nullptr;
+ QPlatformAudioOutput *m_audioOutput = nullptr;
+
+ AVFCameraSession *m_session = nullptr;
+ AVFCamera *m_cameraControl = nullptr;
+ AVFMediaEncoder *m_encoder = nullptr;
+ AVFImageCapture *m_imageCaptureControl = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerasession.mm b/src/plugins/multimedia/darwin/camera/avfcamerasession.mm
new file mode 100644
index 000000000..52e2eadfa
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerasession.mm
@@ -0,0 +1,513 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcameradebug_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcamera_p.h"
+#include "avfcamerarenderer_p.h"
+#include "avfimagecapture_p.h"
+#include "avfmediaencoder_p.h"
+#include "avfcamerautility_p.h"
+#include <avfvideosink_p.h>
+
+#include <CoreFoundation/CoreFoundation.h>
+#include <Foundation/Foundation.h>
+
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qdatetime.h>
+#include <QtCore/qurl.h>
+#include <QtCore/qelapsedtimer.h>
+#include <QtCore/qpermissions.h>
+#include <QtCore/qpointer.h>
+
+#include <private/qplatformaudioinput_p.h>
+#include <private/qplatformaudiooutput_p.h>
+
+#include <QtCore/qdebug.h>
+
+QT_USE_NAMESPACE
+
+@interface AVFCameraSessionObserver : NSObject
+
+- (AVFCameraSessionObserver *) initWithCameraSession:(AVFCameraSession*)session;
+- (void) processRuntimeError:(NSNotification *)notification;
+- (void) processSessionStarted:(NSNotification *)notification;
+- (void) processSessionStopped:(NSNotification *)notification;
+
+@end
+
+@implementation AVFCameraSessionObserver
+{
+@private
+ AVFCameraSession *m_session;
+ AVCaptureSession *m_captureSession;
+}
+
+- (AVFCameraSessionObserver *) initWithCameraSession:(AVFCameraSession*)session
+{
+ if (!(self = [super init]))
+ return nil;
+
+ self->m_session = session;
+ self->m_captureSession = session->captureSession();
+
+ [m_captureSession retain];
+ [[NSNotificationCenter defaultCenter] addObserver:self
+ selector:@selector(processRuntimeError:)
+ name:AVCaptureSessionRuntimeErrorNotification
+ object:m_captureSession];
+
+ [[NSNotificationCenter defaultCenter] addObserver:self
+ selector:@selector(processSessionStarted:)
+ name:AVCaptureSessionDidStartRunningNotification
+ object:m_captureSession];
+
+ [[NSNotificationCenter defaultCenter] addObserver:self
+ selector:@selector(processSessionStopped:)
+ name:AVCaptureSessionDidStopRunningNotification
+ object:m_captureSession];
+
+ return self;
+}
+
+- (void) dealloc
+{
+ [[NSNotificationCenter defaultCenter] removeObserver:self
+ name:AVCaptureSessionRuntimeErrorNotification
+ object:m_captureSession];
+
+ [[NSNotificationCenter defaultCenter] removeObserver:self
+ name:AVCaptureSessionDidStartRunningNotification
+ object:m_captureSession];
+
+ [[NSNotificationCenter defaultCenter] removeObserver:self
+ name:AVCaptureSessionDidStopRunningNotification
+ object:m_captureSession];
+ [m_captureSession release];
+ [super dealloc];
+}
+
+- (void) processRuntimeError:(NSNotification *)notification
+{
+ Q_UNUSED(notification);
+ QMetaObject::invokeMethod(m_session, "processRuntimeError", Qt::AutoConnection);
+}
+
+- (void) processSessionStarted:(NSNotification *)notification
+{
+ Q_UNUSED(notification);
+ QMetaObject::invokeMethod(m_session, "processSessionStarted", Qt::AutoConnection);
+}
+
+- (void) processSessionStopped:(NSNotification *)notification
+{
+ Q_UNUSED(notification);
+ QMetaObject::invokeMethod(m_session, "processSessionStopped", Qt::AutoConnection);
+}
+
+@end
+
+AVFCameraSession::AVFCameraSession(AVFCameraService *service, QObject *parent)
+ : QObject(parent)
+ , m_service(service)
+ , m_defaultCodec(0)
+{
+ m_captureSession = [[AVCaptureSession alloc] init];
+ m_observer = [[AVFCameraSessionObserver alloc] initWithCameraSession:this];
+}
+
+AVFCameraSession::~AVFCameraSession()
+{
+ if (m_videoInput) {
+ [m_captureSession removeInput:m_videoInput];
+ [m_videoInput release];
+ }
+
+ if (m_audioInput) {
+ [m_captureSession removeInput:m_audioInput];
+ [m_audioInput release];
+ }
+
+ if (m_audioOutput) {
+ [m_captureSession removeOutput:m_audioOutput];
+ [m_audioOutput release];
+ }
+
+ if (m_videoOutput)
+ delete m_videoOutput;
+
+ [m_observer release];
+ [m_captureSession release];
+}
+
+void AVFCameraSession::setActiveCamera(const QCameraDevice &info)
+{
+ if (m_activeCameraDevice != info) {
+ m_activeCameraDevice = info;
+
+ if (checkCameraPermission())
+ updateVideoInput();
+ }
+}
+
+void AVFCameraSession::setCameraFormat(const QCameraFormat &format)
+{
+ if (m_cameraFormat == format)
+ return;
+
+ updateCameraFormat(format);
+}
+
+QCameraFormat AVFCameraSession::cameraFormat() const
+{
+ return m_cameraFormat;
+}
+
+void AVFCameraSession::updateCameraFormat(const QCameraFormat &format)
+{
+ m_cameraFormat = format;
+
+ AVCaptureDevice *captureDevice = videoCaptureDevice();
+ if (!captureDevice)
+ return;
+
+ AVCaptureDeviceFormat *newFormat = qt_convert_to_capture_device_format(captureDevice, format);
+ if (newFormat)
+ qt_set_active_format(captureDevice, newFormat, false);
+}
+
+void AVFCameraSession::setVideoOutput(AVFCameraRenderer *output)
+{
+ if (m_videoOutput == output)
+ return;
+
+ delete m_videoOutput;
+ m_videoOutput = output;
+ if (output)
+ output->configureAVCaptureSession(this);
+}
+
+void AVFCameraSession::addAudioCapture()
+{
+ if (!m_audioOutput) {
+ m_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
+ if (m_audioOutput && [m_captureSession canAddOutput:m_audioOutput]) {
+ [m_captureSession addOutput:m_audioOutput];
+ } else {
+ qWarning() << Q_FUNC_INFO << "failed to add audio output";
+ }
+ }
+}
+
+AVCaptureDevice *AVFCameraSession::videoCaptureDevice() const
+{
+ if (m_videoInput)
+ return m_videoInput.device;
+
+ return nullptr;
+}
+
+AVCaptureDevice *AVFCameraSession::audioCaptureDevice() const
+{
+ if (m_audioInput)
+ return m_audioInput.device;
+
+ return nullptr;
+}
+
+void AVFCameraSession::setAudioInputVolume(float volume)
+{
+ m_inputVolume = volume;
+
+ if (m_inputMuted)
+ volume = 0.0;
+
+#ifdef Q_OS_MACOS
+ AVCaptureConnection *audioInputConnection = [m_audioOutput connectionWithMediaType:AVMediaTypeAudio];
+ NSArray<AVCaptureAudioChannel *> *audioChannels = audioInputConnection.audioChannels;
+ if (audioChannels) {
+ for (AVCaptureAudioChannel *channel in audioChannels) {
+ channel.volume = volume;
+ }
+ }
+#endif
+}
+
+void AVFCameraSession::setAudioInputMuted(bool muted)
+{
+ m_inputMuted = muted;
+ setAudioInputVolume(m_inputVolume);
+}
+
+void AVFCameraSession::setAudioOutputVolume(float volume)
+{
+ if (m_audioPreviewDelegate)
+ [m_audioPreviewDelegate setVolume:volume];
+}
+
+void AVFCameraSession::setAudioOutputMuted(bool muted)
+{
+ if (m_audioPreviewDelegate)
+ [m_audioPreviewDelegate setMuted:muted];
+}
+
+bool AVFCameraSession::isActive() const
+{
+ return m_active;
+}
+
+void AVFCameraSession::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+
+ m_active = active;
+
+ qCDebug(qLcCamera) << Q_FUNC_INFO << m_active << " -> " << active;
+
+ if (active) {
+ if (!m_activeCameraDevice.isNull()) {
+ Q_EMIT readyToConfigureConnections();
+ m_defaultCodec = 0;
+ defaultCodec();
+ }
+
+ applyImageEncoderSettings();
+
+ // According to the doc, the capture device must be locked before
+ // startRunning to prevent the format we set to be overridden by the
+ // session preset.
+ [videoCaptureDevice() lockForConfiguration:nil];
+ [m_captureSession startRunning];
+ [videoCaptureDevice() unlockForConfiguration];
+ } else {
+ [m_captureSession stopRunning];
+ }
+}
+
+void AVFCameraSession::processRuntimeError()
+{
+ qWarning() << tr("Runtime camera error");
+ m_active = false;
+ Q_EMIT error(QCamera::CameraError, tr("Runtime camera error"));
+}
+
+void AVFCameraSession::processSessionStarted()
+{
+ qCDebug(qLcCamera) << Q_FUNC_INFO;
+ if (!m_active) {
+ m_active = true;
+ Q_EMIT activeChanged(m_active);
+ }
+}
+
+void AVFCameraSession::processSessionStopped()
+{
+ qCDebug(qLcCamera) << Q_FUNC_INFO;
+ if (m_active) {
+ m_active = false;
+ Q_EMIT activeChanged(m_active);
+ }
+}
+
+AVCaptureDevice *AVFCameraSession::createVideoCaptureDevice()
+{
+ AVCaptureDevice *device = nullptr;
+
+ QByteArray deviceId = m_activeCameraDevice.id();
+ if (!deviceId.isEmpty()) {
+ device = [AVCaptureDevice deviceWithUniqueID:
+ [NSString stringWithUTF8String:
+ deviceId.constData()]];
+ }
+
+ return device;
+}
+
+AVCaptureDevice *AVFCameraSession::createAudioCaptureDevice()
+{
+ AVCaptureDevice *device = nullptr;
+
+ QByteArray deviceId = m_service->audioInput() ? m_service->audioInput()->device.id()
+ : QByteArray();
+ if (!deviceId.isEmpty())
+ device = [AVCaptureDevice deviceWithUniqueID: [NSString stringWithUTF8String:deviceId.constData()]];
+
+ return device;
+}
+
+void AVFCameraSession::attachVideoInputDevice()
+{
+ if (!checkCameraPermission())
+ return;
+
+ if (m_videoInput) {
+ [m_captureSession removeInput:m_videoInput];
+ [m_videoInput release];
+ m_videoInput = nullptr;
+ }
+
+ AVCaptureDevice *videoDevice = createVideoCaptureDevice();
+ if (!videoDevice)
+ return;
+
+ m_videoInput = [AVCaptureDeviceInput
+ deviceInputWithDevice:videoDevice
+ error:nil];
+ if (m_videoInput && [m_captureSession canAddInput:m_videoInput]) {
+ [m_videoInput retain];
+ [m_captureSession addInput:m_videoInput];
+ } else {
+ qWarning() << "Failed to create video device input";
+ }
+}
+
+void AVFCameraSession::attachAudioInputDevice()
+{
+ if (m_audioInput) {
+ [m_captureSession removeInput:m_audioInput];
+ [m_audioInput release];
+ m_audioInput = nullptr;
+ }
+
+ AVCaptureDevice *audioDevice = createAudioCaptureDevice();
+ if (!audioDevice)
+ return;
+
+ m_audioInput = [AVCaptureDeviceInput
+ deviceInputWithDevice:audioDevice
+ error:nil];
+
+ if (m_audioInput && [m_captureSession canAddInput:m_audioInput]) {
+ [m_audioInput retain];
+ [m_captureSession addInput:m_audioInput];
+ } else {
+ qWarning() << "Failed to create audio device input";
+ }
+}
+
+bool AVFCameraSession::applyImageEncoderSettings()
+{
+ if (AVFImageCapture *control = m_service->avfImageCaptureControl())
+ return control->applySettings();
+
+ return false;
+}
+
+FourCharCode AVFCameraSession::defaultCodec()
+{
+ if (!m_defaultCodec) {
+ if (AVCaptureDevice *device = videoCaptureDevice()) {
+ AVCaptureDeviceFormat *format = device.activeFormat;
+ if (!format || !format.formatDescription)
+ return m_defaultCodec;
+ m_defaultCodec = CMVideoFormatDescriptionGetCodecType(format.formatDescription);
+ }
+ }
+ return m_defaultCodec;
+}
+
+void AVFCameraSession::setVideoSink(QVideoSink *sink)
+{
+ auto *videoSink = sink ? static_cast<AVFVideoSink *>(sink->platformVideoSink()) : nullptr;
+
+ if (m_videoSink == videoSink)
+ return;
+
+ m_videoSink = videoSink;
+
+ updateVideoOutput();
+}
+
+void AVFCameraSession::updateVideoInput()
+{
+ auto recorder = m_service->recorderControl();
+ if (recorder && recorder->state() == QMediaRecorder::RecordingState)
+ recorder->toggleRecord(false);
+
+ [m_captureSession beginConfiguration];
+
+ attachVideoInputDevice();
+ if (!m_activeCameraDevice.isNull() && !m_videoOutput) {
+ setVideoOutput(new AVFCameraRenderer(this));
+ connect(m_videoOutput, &AVFCameraRenderer::newViewfinderFrame,
+ this, &AVFCameraSession::newViewfinderFrame);
+ updateVideoOutput();
+ }
+ if (m_videoOutput)
+ m_videoOutput->deviceOrientationChanged();
+
+ [m_captureSession commitConfiguration];
+
+ if (recorder && recorder->state() == QMediaRecorder::RecordingState)
+ recorder->toggleRecord(true);
+ Q_EMIT readyToConfigureConnections();
+}
+
+void AVFCameraSession::updateAudioInput()
+{
+ if (!checkMicrophonePermission())
+ return;
+
+ auto recorder = m_service->recorderControl();
+ if (recorder && recorder->state() == QMediaRecorder::RecordingState)
+ recorder->toggleRecord(false);
+
+ [m_captureSession beginConfiguration];
+ if (m_audioOutput) {
+ AVCaptureConnection *lastConnection = [m_audioOutput connectionWithMediaType:AVMediaTypeAudio];
+ [m_captureSession removeConnection:lastConnection];
+ }
+ attachAudioInputDevice();
+ if (m_audioInput)
+ addAudioCapture();
+ [m_captureSession commitConfiguration];
+
+ if (recorder && recorder->state() == QMediaRecorder::RecordingState)
+ recorder->toggleRecord(true);
+}
+
+void AVFCameraSession::updateAudioOutput()
+{
+ QByteArray deviceId = m_service->audioOutput()
+ ? m_service->audioOutput()->device.id()
+ : QByteArray();
+
+ [m_audioPreviewDelegate release];
+ m_audioPreviewDelegate = nil;
+ if (!deviceId.isEmpty()) {
+ m_audioPreviewDelegate = [[AVFAudioPreviewDelegate alloc] init];
+ [m_audioPreviewDelegate setupWithCaptureSession:this
+ audioOutputDevice:[NSString stringWithUTF8String:
+ deviceId.constData()]];
+ }
+}
+
+void AVFCameraSession::updateVideoOutput()
+{
+ if (m_videoOutput)
+ m_videoOutput->setVideoSink(m_videoSink);
+}
+
+bool AVFCameraSession::checkCameraPermission()
+{
+ const QCameraPermission permission;
+ const bool granted = qApp->checkPermission(permission) == Qt::PermissionStatus::Granted;
+ if (!granted)
+ qWarning() << "Access to camera not granted";
+
+ return granted;
+}
+
+bool AVFCameraSession::checkMicrophonePermission()
+{
+ const QMicrophonePermission permission;
+ const bool granted = qApp->checkPermission(permission) == Qt::PermissionStatus::Granted;
+ if (!granted)
+ qWarning() << "Access to microphone not granted";
+
+ return granted;
+}
+
+#include "moc_avfcamerasession_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerasession_p.h b/src/plugins/multimedia/darwin/camera/avfcamerasession_p.h
new file mode 100644
index 000000000..76e31ab48
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerasession_p.h
@@ -0,0 +1,132 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERASESSION_H
+#define AVFCAMERASESSION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qmutex.h>
+#include <QtMultimedia/qcamera.h>
+#include <QVideoFrame>
+#include <qcameradevice.h>
+#include "avfaudiopreviewdelegate_p.h"
+
+#import <AVFoundation/AVFoundation.h>
+
+@class AVFCameraSessionObserver;
+
+QT_BEGIN_NAMESPACE
+
+class AVFCamera;
+class AVFCameraService;
+class AVFCameraRenderer;
+class AVFVideoSink;
+class QVideoSink;
+
+class AVFCameraSession : public QObject
+{
+ Q_OBJECT
+public:
+ AVFCameraSession(AVFCameraService *service, QObject *parent = nullptr);
+ ~AVFCameraSession();
+
+ QCameraDevice activecameraDevice() const { return m_activeCameraDevice; }
+ void setActiveCamera(const QCameraDevice &info);
+
+ void setCameraFormat(const QCameraFormat &format);
+ QCameraFormat cameraFormat() const;
+
+ AVFCameraRenderer *videoOutput() const { return m_videoOutput; }
+ AVCaptureAudioDataOutput *audioOutput() const { return m_audioOutput; }
+ AVFAudioPreviewDelegate *audioPreviewDelegate() const { return m_audioPreviewDelegate; }
+
+ AVCaptureSession *captureSession() const { return m_captureSession; }
+ AVCaptureDevice *videoCaptureDevice() const;
+ AVCaptureDevice *audioCaptureDevice() const;
+
+ bool isActive() const;
+
+ FourCharCode defaultCodec();
+
+ AVCaptureDeviceInput *videoInput() const { return m_videoInput; }
+ AVCaptureDeviceInput *audioInput() const { return m_audioInput; }
+
+ void setVideoSink(QVideoSink *sink);
+
+ void updateVideoInput();
+
+ void updateAudioInput();
+ void updateAudioOutput();
+
+public Q_SLOTS:
+ void setActive(bool active);
+
+ void setAudioInputVolume(float volume);
+ void setAudioInputMuted(bool muted);
+ void setAudioOutputMuted(bool muted);
+ void setAudioOutputVolume(float volume);
+
+ void processRuntimeError();
+ void processSessionStarted();
+ void processSessionStopped();
+
+Q_SIGNALS:
+ void readyToConfigureConnections();
+ void activeChanged(bool);
+ void error(int error, const QString &errorString);
+ void newViewfinderFrame(const QVideoFrame &frame);
+
+private:
+ void updateCameraFormat(const QCameraFormat &format);
+
+ void setVideoOutput(AVFCameraRenderer *output);
+ void updateVideoOutput();
+
+ void addAudioCapture();
+
+ AVCaptureDevice *createVideoCaptureDevice();
+ AVCaptureDevice *createAudioCaptureDevice();
+ void attachVideoInputDevice();
+ void attachAudioInputDevice();
+ bool checkCameraPermission();
+ bool checkMicrophonePermission();
+
+ bool applyImageEncoderSettings();
+
+ QCameraDevice m_activeCameraDevice;
+ QCameraFormat m_cameraFormat;
+
+ AVFCameraService *m_service;
+ AVCaptureSession *m_captureSession;
+ AVFCameraSessionObserver *m_observer;
+
+ AVFCameraRenderer *m_videoOutput = nullptr;
+ AVFVideoSink *m_videoSink = nullptr;
+
+ AVCaptureDeviceInput *m_videoInput = nullptr;
+ AVCaptureDeviceInput *m_audioInput = nullptr;
+
+ AVCaptureAudioDataOutput *m_audioOutput = nullptr;
+ AVFAudioPreviewDelegate *m_audioPreviewDelegate = nullptr;
+
+ bool m_active = false;
+
+ float m_inputVolume = 1.0;
+ bool m_inputMuted = false;
+
+ FourCharCode m_defaultCodec;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerautility.mm b/src/plugins/multimedia/darwin/camera/avfcamerautility.mm
new file mode 100644
index 000000000..1864eb0e8
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerautility.mm
@@ -0,0 +1,730 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcamerautility_p.h"
+#include "avfcameradebug_p.h"
+
+#include <QtCore/qvector.h>
+#include <QtCore/qpair.h>
+#include <private/qmultimediautils_p.h>
+#include <private/qcameradevice_p.h>
+#include "avfvideobuffer_p.h"
+#include "qavfhelpers_p.h"
+
+#include <functional>
+#include <algorithm>
+#include <limits>
+#include <tuple>
+
+QT_BEGIN_NAMESPACE
+
+Q_LOGGING_CATEGORY(qLcCamera, "qt.multimedia.camera")
+
+AVFPSRange qt_connection_framerates(AVCaptureConnection *videoConnection)
+{
+ Q_ASSERT(videoConnection);
+
+ AVFPSRange newRange;
+ // "The value in the videoMinFrameDuration is equivalent to the reciprocal
+ // of the maximum framerate, the value in the videoMaxFrameDuration is equivalent
+ // to the reciprocal of the minimum framerate."
+ if (videoConnection.supportsVideoMinFrameDuration) {
+ const CMTime cmMin = videoConnection.videoMinFrameDuration;
+ if (CMTimeCompare(cmMin, kCMTimeInvalid)) { // Has some non-default value:
+ if (const Float64 minSeconds = CMTimeGetSeconds(cmMin))
+ newRange.second = 1. / minSeconds;
+ }
+ }
+
+ if (videoConnection.supportsVideoMaxFrameDuration) {
+ const CMTime cmMax = videoConnection.videoMaxFrameDuration;
+ if (CMTimeCompare(cmMax, kCMTimeInvalid)) {
+ if (const Float64 maxSeconds = CMTimeGetSeconds(cmMax))
+ newRange.first = 1. / maxSeconds;
+ }
+ }
+
+ return newRange;
+}
+
+namespace {
+
+inline bool qt_area_sane(const QSize &size)
+{
+ return !size.isNull() && size.isValid()
+ && std::numeric_limits<int>::max() / size.width() >= size.height();
+}
+
+template <template <typename...> class Comp> // std::less or std::greater (or std::equal_to)
+struct ByResolution
+{
+ bool operator() (AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)const
+ {
+ Q_ASSERT(f1 && f2);
+ const QSize r1(qt_device_format_resolution(f1));
+ const QSize r2(qt_device_format_resolution(f2));
+ // use std::tuple for lexicograpical sorting:
+ const Comp<std::tuple<int, int>> op = {};
+ return op(std::make_tuple(r1.width(), r1.height()),
+ std::make_tuple(r2.width(), r2.height()));
+ }
+};
+
+struct FormatHasNoFPSRange
+{
+ bool operator() (AVCaptureDeviceFormat *format) const
+ {
+ Q_ASSERT(format);
+ return !format.videoSupportedFrameRateRanges || !format.videoSupportedFrameRateRanges.count;
+ }
+};
+
+Float64 qt_find_min_framerate_distance(AVCaptureDeviceFormat *format, Float64 fps)
+{
+ Q_ASSERT(format && format.videoSupportedFrameRateRanges
+ && format.videoSupportedFrameRateRanges.count);
+
+ AVFrameRateRange *range = [format.videoSupportedFrameRateRanges objectAtIndex:0];
+ Float64 distance = qAbs(range.maxFrameRate - fps);
+ for (NSUInteger i = 1, e = format.videoSupportedFrameRateRanges.count; i < e; ++i) {
+ range = [format.videoSupportedFrameRateRanges objectAtIndex:i];
+ distance = qMin(distance, qAbs(range.maxFrameRate - fps));
+ }
+
+ return distance;
+}
+
+} // Unnamed namespace.
+
+AVCaptureDeviceFormat *
+qt_convert_to_capture_device_format(AVCaptureDevice *captureDevice,
+ const QCameraFormat &cameraFormat,
+ const std::function<bool(uint32_t)> &cvFormatValidator)
+{
+ const auto cameraFormatPrivate = QCameraFormatPrivate::handle(cameraFormat);
+ if (!cameraFormatPrivate)
+ return nil;
+
+ const auto requiredCvPixFormat = QAVFHelpers::toCVPixelFormat(cameraFormatPrivate->pixelFormat,
+ cameraFormatPrivate->colorRange);
+
+ if (requiredCvPixFormat == CvPixelFormatInvalid)
+ return nil;
+
+ AVCaptureDeviceFormat *newFormat = nil;
+ Float64 newFormatMaxFrameRate = {};
+ NSArray<AVCaptureDeviceFormat *> *formats = captureDevice.formats;
+ for (AVCaptureDeviceFormat *format in formats) {
+ CMFormatDescriptionRef formatDesc = format.formatDescription;
+ CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(formatDesc);
+ FourCharCode cvPixFormat = CMVideoFormatDescriptionGetCodecType(formatDesc);
+
+ if (cvPixFormat != requiredCvPixFormat)
+ continue;
+
+ if (cameraFormatPrivate->resolution != QSize(dim.width, dim.height))
+ continue;
+
+ if (cvFormatValidator && !cvFormatValidator(cvPixFormat))
+ continue;
+
+ const float epsilon = 0.001f;
+ for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRanges) {
+ if (frameRateRange.minFrameRate >= cameraFormatPrivate->minFrameRate - epsilon
+ && frameRateRange.maxFrameRate <= cameraFormatPrivate->maxFrameRate + epsilon
+ && newFormatMaxFrameRate < frameRateRange.maxFrameRate) {
+ newFormat = format;
+ newFormatMaxFrameRate = frameRateRange.maxFrameRate;
+ }
+ }
+ }
+ return newFormat;
+}
+
+QVector<AVCaptureDeviceFormat *> qt_unique_device_formats(AVCaptureDevice *captureDevice, FourCharCode filter)
+{
+ // 'filter' is the format we prefer if we have duplicates.
+ Q_ASSERT(captureDevice);
+
+ QVector<AVCaptureDeviceFormat *> formats;
+
+ if (!captureDevice.formats || !captureDevice.formats.count)
+ return formats;
+
+ formats.reserve(captureDevice.formats.count);
+ for (AVCaptureDeviceFormat *format in captureDevice.formats) {
+ const QSize resolution(qt_device_format_resolution(format));
+ if (resolution.isNull() || !resolution.isValid())
+ continue;
+ formats << format;
+ }
+
+ if (!formats.size())
+ return formats;
+
+ std::sort(formats.begin(), formats.end(), ByResolution<std::less>());
+
+ QSize size(qt_device_format_resolution(formats[0]));
+ FourCharCode codec = CMVideoFormatDescriptionGetCodecType(formats[0].formatDescription);
+ int last = 0;
+ for (int i = 1; i < formats.size(); ++i) {
+ const QSize nextSize(qt_device_format_resolution(formats[i]));
+ if (nextSize == size) {
+ if (codec == filter)
+ continue;
+ formats[last] = formats[i];
+ } else {
+ ++last;
+ formats[last] = formats[i];
+ size = nextSize;
+ }
+ codec = CMVideoFormatDescriptionGetCodecType(formats[i].formatDescription);
+ }
+ formats.resize(last + 1);
+
+ return formats;
+}
+
+QSize qt_device_format_resolution(AVCaptureDeviceFormat *format)
+{
+ if (!format || !format.formatDescription)
+ return QSize();
+
+ const CMVideoDimensions res = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ return QSize(res.width, res.height);
+}
+
+QSize qt_device_format_high_resolution(AVCaptureDeviceFormat *format)
+{
+ Q_ASSERT(format);
+ QSize res;
+#if defined(Q_OS_IOS)
+ const CMVideoDimensions hrDim(format.highResolutionStillImageDimensions);
+ res.setWidth(hrDim.width);
+ res.setHeight(hrDim.height);
+#endif
+ return res;
+}
+
+QVector<AVFPSRange> qt_device_format_framerates(AVCaptureDeviceFormat *format)
+{
+ Q_ASSERT(format);
+
+ QVector<AVFPSRange> qtRanges;
+
+ if (!format.videoSupportedFrameRateRanges || !format.videoSupportedFrameRateRanges.count)
+ return qtRanges;
+
+ qtRanges.reserve(format.videoSupportedFrameRateRanges.count);
+ for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges)
+ qtRanges << AVFPSRange(range.minFrameRate, range.maxFrameRate);
+
+ return qtRanges;
+}
+
+QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format)
+{
+ Q_ASSERT(format);
+
+ if (!format.formatDescription) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no format description found";
+ return QSize();
+ }
+
+ const CMVideoDimensions res = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ const CGSize resPAR = CMVideoFormatDescriptionGetPresentationDimensions(format.formatDescription, true, false);
+
+ if (qAbs(resPAR.width - res.width) < 1.) {
+ // "Pixel aspect ratio is used to adjust the width, leaving the height alone."
+ return QSize(1, 1);
+ }
+
+ if (!res.width || !resPAR.width)
+ return QSize();
+
+ auto frac = qRealToFraction(resPAR.width > res.width ? res.width / qreal(resPAR.width)
+ : resPAR.width / qreal(res.width));
+
+ return QSize(frac.numerator, frac.denominator);
+}
+
+AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDevice,
+ const QSize &request,
+ FourCharCode filter,
+ bool stillImage)
+{
+ Q_ASSERT(captureDevice);
+ Q_ASSERT(!request.isNull() && request.isValid());
+
+ if (!captureDevice.formats || !captureDevice.formats.count)
+ return nullptr;
+
+ QVector<AVCaptureDeviceFormat *> formats(qt_unique_device_formats(captureDevice, filter));
+
+ for (int i = 0; i < formats.size(); ++i) {
+ AVCaptureDeviceFormat *format = formats[i];
+ if (qt_device_format_resolution(format) == request)
+ return format;
+ // iOS only (still images).
+ if (stillImage && qt_device_format_high_resolution(format) == request)
+ return format;
+ }
+
+ if (!qt_area_sane(request))
+ return nullptr;
+
+ typedef QPair<QSize, AVCaptureDeviceFormat *> FormatPair;
+
+ QVector<FormatPair> pairs; // default|HR sizes
+ pairs.reserve(formats.size());
+
+ for (int i = 0; i < formats.size(); ++i) {
+ AVCaptureDeviceFormat *format = formats[i];
+ const QSize res(qt_device_format_resolution(format));
+ if (!res.isNull() && res.isValid() && qt_area_sane(res))
+ pairs << FormatPair(res, format);
+ const QSize highRes(qt_device_format_high_resolution(format));
+ if (stillImage && !highRes.isNull() && highRes.isValid() && qt_area_sane(highRes))
+ pairs << FormatPair(highRes, format);
+ }
+
+ if (!pairs.size())
+ return nullptr;
+
+ AVCaptureDeviceFormat *best = pairs[0].second;
+ QSize next(pairs[0].first);
+ int wDiff = qAbs(request.width() - next.width());
+ int hDiff = qAbs(request.height() - next.height());
+ const int area = request.width() * request.height();
+ int areaDiff = qAbs(area - next.width() * next.height());
+ for (int i = 1; i < pairs.size(); ++i) {
+ next = pairs[i].first;
+ const int newWDiff = qAbs(next.width() - request.width());
+ const int newHDiff = qAbs(next.height() - request.height());
+ const int newAreaDiff = qAbs(area - next.width() * next.height());
+
+ if ((newWDiff < wDiff && newHDiff < hDiff)
+ || ((newWDiff <= wDiff || newHDiff <= hDiff) && newAreaDiff <= areaDiff)) {
+ wDiff = newWDiff;
+ hDiff = newHDiff;
+ best = pairs[i].second;
+ areaDiff = newAreaDiff;
+ }
+ }
+
+ return best;
+}
+
+AVCaptureDeviceFormat *qt_find_best_framerate_match(AVCaptureDevice *captureDevice,
+ FourCharCode filter,
+ Float64 fps)
+{
+ Q_ASSERT(captureDevice);
+ Q_ASSERT(fps > 0.);
+
+ const qreal epsilon = 0.1;
+
+ QVector<AVCaptureDeviceFormat *>sorted(qt_unique_device_formats(captureDevice, filter));
+ // Sort formats by their resolution in decreasing order:
+ std::sort(sorted.begin(), sorted.end(), ByResolution<std::greater>());
+ // We can use only formats with framerate ranges:
+ sorted.erase(std::remove_if(sorted.begin(), sorted.end(), FormatHasNoFPSRange()), sorted.end());
+
+ if (!sorted.size())
+ return nil;
+
+ for (int i = 0; i < sorted.size(); ++i) {
+ AVCaptureDeviceFormat *format = sorted[i];
+ for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
+ if (range.maxFrameRate - range.minFrameRate < epsilon) {
+ // On OS X ranges are points (built-in camera).
+ if (qAbs(fps - range.maxFrameRate) < epsilon)
+ return format;
+ }
+
+ if (fps >= range.minFrameRate && fps <= range.maxFrameRate)
+ return format;
+ }
+ }
+
+ Float64 distance = qt_find_min_framerate_distance(sorted[0], fps);
+ AVCaptureDeviceFormat *match = sorted[0];
+ for (int i = 1; i < sorted.size(); ++i) {
+ const Float64 newDistance = qt_find_min_framerate_distance(sorted[i], fps);
+ if (newDistance < distance) {
+ distance = newDistance;
+ match = sorted[i];
+ }
+ }
+
+ return match;
+}
+
+AVFrameRateRange *qt_find_supported_framerate_range(AVCaptureDeviceFormat *format, Float64 fps)
+{
+ Q_ASSERT(format && format.videoSupportedFrameRateRanges
+ && format.videoSupportedFrameRateRanges.count);
+
+ const qreal epsilon = 0.1;
+
+ for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
+ if (range.maxFrameRate - range.minFrameRate < epsilon) {
+ // On OS X ranges are points (built-in camera).
+ if (qAbs(fps - range.maxFrameRate) < epsilon)
+ return range;
+ }
+
+ if (fps >= range.minFrameRate && fps <= range.maxFrameRate)
+ return range;
+ }
+
+ AVFrameRateRange *match = [format.videoSupportedFrameRateRanges objectAtIndex:0];
+ Float64 distance = qAbs(match.maxFrameRate - fps);
+ for (NSUInteger i = 1, e = format.videoSupportedFrameRateRanges.count; i < e; ++i) {
+ AVFrameRateRange *range = [format.videoSupportedFrameRateRanges objectAtIndex:i];
+ const Float64 newDistance = qAbs(range.maxFrameRate - fps);
+ if (newDistance < distance) {
+ distance = newDistance;
+ match = range;
+ }
+ }
+
+ return match;
+}
+
+bool qt_format_supports_framerate(AVCaptureDeviceFormat *format, qreal fps)
+{
+ if (format && fps > qreal(0)) {
+ const qreal epsilon = 0.1;
+ for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
+ if (fps >= range.minFrameRate - epsilon && fps <= range.maxFrameRate + epsilon)
+ return true;
+ }
+ }
+
+ return false;
+}
+
+bool qt_formats_are_equal(AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)
+{
+ if (f1 == f2)
+ return true;
+
+ if (![f1.mediaType isEqualToString:f2.mediaType])
+ return false;
+
+ return CMFormatDescriptionEqual(f1.formatDescription, f2.formatDescription);
+}
+
+bool qt_set_active_format(AVCaptureDevice *captureDevice, AVCaptureDeviceFormat *format, bool preserveFps)
+{
+ static bool firstSet = true;
+
+ if (!captureDevice || !format)
+ return false;
+
+ if (qt_formats_are_equal(captureDevice.activeFormat, format)) {
+ if (firstSet) {
+ // The capture device format is persistent. The first time we set a format, report that
+ // it changed even if the formats are the same.
+ // This prevents the session from resetting the format to the default value.
+ firstSet = false;
+ return true;
+ }
+ return false;
+ }
+
+ firstSet = false;
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qWarning("Failed to set active format (lock failed)");
+ return false;
+ }
+
+ // Changing the activeFormat resets the frame rate.
+ AVFPSRange fps;
+ if (preserveFps)
+ fps = qt_current_framerates(captureDevice, nil);
+
+ captureDevice.activeFormat = format;
+
+ if (preserveFps)
+ qt_set_framerate_limits(captureDevice, nil, fps.first, fps.second);
+
+ return true;
+}
+
+void qt_set_framerate_limits(AVCaptureConnection *videoConnection, qreal minFPS, qreal maxFPS)
+{
+ Q_ASSERT(videoConnection);
+
+ if (minFPS < 0. || maxFPS < 0. || (maxFPS && maxFPS < minFPS)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid framerates (min, max):"
+ << minFPS << maxFPS;
+ return;
+ }
+
+ CMTime minDuration = kCMTimeInvalid;
+ if (maxFPS > 0.) {
+ if (!videoConnection.supportsVideoMinFrameDuration)
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "maximum framerate is not supported";
+ else
+ minDuration = CMTimeMake(1, maxFPS);
+ }
+ if (videoConnection.supportsVideoMinFrameDuration)
+ videoConnection.videoMinFrameDuration = minDuration;
+
+ CMTime maxDuration = kCMTimeInvalid;
+ if (minFPS > 0.) {
+ if (!videoConnection.supportsVideoMaxFrameDuration)
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "minimum framerate is not supported";
+ else
+ maxDuration = CMTimeMake(1, minFPS);
+ }
+ if (videoConnection.supportsVideoMaxFrameDuration)
+ videoConnection.videoMaxFrameDuration = maxDuration;
+}
+
+CMTime qt_adjusted_frame_duration(AVFrameRateRange *range, qreal fps)
+{
+ Q_ASSERT(range);
+ Q_ASSERT(fps > 0.);
+
+ if (range.maxFrameRate - range.minFrameRate < 0.1) {
+ // Can happen on OS X.
+ return range.minFrameDuration;
+ }
+
+ if (fps <= range.minFrameRate)
+ return range.maxFrameDuration;
+ if (fps >= range.maxFrameRate)
+ return range.minFrameDuration;
+
+ auto frac = qRealToFraction(1. / fps);
+ return CMTimeMake(frac.numerator, frac.denominator);
+}
+
+void qt_set_framerate_limits(AVCaptureDevice *captureDevice, qreal minFPS, qreal maxFPS)
+{
+ Q_ASSERT(captureDevice);
+ if (!captureDevice.activeFormat) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no active capture device format";
+ return;
+ }
+
+ if (minFPS < 0. || maxFPS < 0. || (maxFPS && maxFPS < minFPS)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid framerates (min, max):"
+ << minFPS << maxFPS;
+ return;
+ }
+
+ CMTime minFrameDuration = kCMTimeInvalid;
+ CMTime maxFrameDuration = kCMTimeInvalid;
+ if (maxFPS || minFPS) {
+ AVFrameRateRange *range = qt_find_supported_framerate_range(captureDevice.activeFormat,
+ maxFPS ? maxFPS : minFPS);
+ if (!range) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no framerate range found, (min, max):"
+ << minFPS << maxFPS;
+ return;
+ }
+
+ if (maxFPS)
+ minFrameDuration = qt_adjusted_frame_duration(range, maxFPS);
+ if (minFPS)
+ maxFrameDuration = qt_adjusted_frame_duration(range, minFPS);
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ // While Apple's docs say kCMTimeInvalid will end in default
+ // settings for this format, kCMTimeInvalid on OS X ends with a runtime
+ // exception:
+ // "The activeVideoMinFrameDuration passed is not supported by the device."
+ // Instead, use the first item in the supported frame rates.
+#ifdef Q_OS_IOS
+ [captureDevice setActiveVideoMinFrameDuration:minFrameDuration];
+ [captureDevice setActiveVideoMaxFrameDuration:maxFrameDuration];
+#elif defined(Q_OS_MACOS)
+ if (CMTimeCompare(minFrameDuration, kCMTimeInvalid) == 0
+ && CMTimeCompare(maxFrameDuration, kCMTimeInvalid) == 0) {
+ AVFrameRateRange *range = captureDevice.activeFormat.videoSupportedFrameRateRanges.firstObject;
+ minFrameDuration = range.minFrameDuration;
+ maxFrameDuration = range.maxFrameDuration;
+ }
+
+ if (CMTimeCompare(minFrameDuration, kCMTimeInvalid))
+ [captureDevice setActiveVideoMinFrameDuration:minFrameDuration];
+
+ if (CMTimeCompare(maxFrameDuration, kCMTimeInvalid))
+ [captureDevice setActiveVideoMaxFrameDuration:maxFrameDuration];
+#endif // Q_OS_MACOS
+}
+
+void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection,
+ qreal minFPS, qreal maxFPS)
+{
+ Q_UNUSED(videoConnection);
+ Q_ASSERT(captureDevice);
+ qt_set_framerate_limits(captureDevice, minFPS, maxFPS);
+}
+
+AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection)
+{
+ Q_UNUSED(videoConnection);
+ Q_ASSERT(captureDevice);
+
+ AVFPSRange fps;
+ const CMTime minDuration = captureDevice.activeVideoMinFrameDuration;
+ if (CMTimeCompare(minDuration, kCMTimeInvalid)) {
+ if (const Float64 minSeconds = CMTimeGetSeconds(minDuration))
+ fps.second = 1. / minSeconds; // Max FPS = 1 / MinDuration.
+ }
+
+ const CMTime maxDuration = captureDevice.activeVideoMaxFrameDuration;
+ if (CMTimeCompare(maxDuration, kCMTimeInvalid)) {
+ if (const Float64 maxSeconds = CMTimeGetSeconds(maxDuration))
+ fps.first = 1. / maxSeconds; // Min FPS = 1 / MaxDuration.
+ }
+
+ return fps;
+}
+
+QList<AudioValueRange> qt_supported_sample_rates_for_format(int codecId)
+{
+ QList<AudioValueRange> result;
+ UInt32 format = codecId;
+ UInt32 size;
+ OSStatus err = AudioFormatGetPropertyInfo(
+ kAudioFormatProperty_AvailableEncodeSampleRates,
+ sizeof(format),
+ &format,
+ &size);
+
+ if (err != noErr)
+ return result;
+
+ UInt32 numRanges = size / sizeof(AudioValueRange);
+ AudioValueRange sampleRanges[numRanges];
+
+ err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeSampleRates,
+ sizeof(format),
+ &format,
+ &size,
+ sampleRanges);
+ if (err != noErr)
+ return result;
+
+ for (UInt32 i = 0; i < numRanges; i++)
+ result << sampleRanges[i];
+
+ return result;
+}
+
+QList<AudioValueRange> qt_supported_bit_rates_for_format(int codecId)
+{
+ QList<AudioValueRange> result;
+ UInt32 format = codecId;
+ UInt32 size;
+ OSStatus err = AudioFormatGetPropertyInfo(
+ kAudioFormatProperty_AvailableEncodeBitRates,
+ sizeof(format),
+ &format,
+ &size);
+
+ if (err != noErr)
+ return result;
+
+ UInt32 numRanges = size / sizeof(AudioValueRange);
+ AudioValueRange bitRanges[numRanges];
+
+ err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeBitRates,
+ sizeof(format),
+ &format,
+ &size,
+ bitRanges);
+ if (err != noErr)
+ return result;
+
+ for (UInt32 i = 0; i < numRanges; i++)
+ result << bitRanges[i];
+
+ return result;
+}
+
+std::optional<QList<UInt32>> qt_supported_channel_counts_for_format(int codecId)
+{
+ QList<UInt32> result;
+ AudioStreamBasicDescription sf = {};
+ sf.mFormatID = codecId;
+ UInt32 size;
+ OSStatus err = AudioFormatGetPropertyInfo(
+ kAudioFormatProperty_AvailableEncodeNumberChannels,
+ sizeof(sf),
+ &sf,
+ &size);
+
+ if (err != noErr)
+ return result;
+
+ // From Apple's docs:
+ // A value of 0xFFFFFFFF indicates that any number of channels may be encoded.
+ if (int(size) == -1)
+ return std::nullopt;
+
+ UInt32 numCounts = size / sizeof(UInt32);
+ UInt32 channelCounts[numCounts];
+
+ err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeNumberChannels,
+ sizeof(sf),
+ &sf,
+ &size,
+ channelCounts);
+ if (err != noErr)
+ return result;
+
+ for (UInt32 i = 0; i < numCounts; i++)
+ result << channelCounts[i];
+
+ return result;
+}
+
+QList<UInt32> qt_supported_channel_layout_tags_for_format(int codecId, int noChannels)
+{
+ QList<UInt32> result;
+ AudioStreamBasicDescription sf = {};
+ sf.mFormatID = codecId;
+ sf.mChannelsPerFrame = noChannels;
+ UInt32 size;
+ OSStatus err = AudioFormatGetPropertyInfo(
+ kAudioFormatProperty_AvailableEncodeChannelLayoutTags,
+ sizeof(sf),
+ &sf,
+ &size);
+
+ if (err != noErr)
+ return result;
+
+ UInt32 noTags = (UInt32)size / sizeof(UInt32);
+ AudioChannelLayoutTag tagsArr[noTags];
+
+ err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeChannelLayoutTags,
+ sizeof(sf),
+ &sf,
+ &size,
+ tagsArr);
+ if (err != noErr)
+ return result;
+
+ for (UInt32 i = 0; i < noTags; i++)
+ result << tagsArr[i];
+
+ return result;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/darwin/camera/avfcamerautility_p.h b/src/plugins/multimedia/darwin/camera/avfcamerautility_p.h
new file mode 100644
index 000000000..b5c9e9bda
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfcamerautility_p.h
@@ -0,0 +1,165 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERAUTILITY_H
+#define AVFCAMERAUTILITY_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qglobal.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qlist.h>
+#include <QtCore/qpair.h>
+#include <QtCore/qsize.h>
+
+#include "qcameradevice.h"
+
+#include <AVFoundation/AVFoundation.h>
+
+// In case we have SDK below 10.7/7.0:
+@class AVCaptureDeviceFormat;
+
+QT_BEGIN_NAMESPACE
+
+class AVFConfigurationLock
+{
+public:
+ explicit AVFConfigurationLock(AVCaptureDevice *captureDevice)
+ : m_captureDevice(captureDevice),
+ m_locked(false)
+ {
+ Q_ASSERT(m_captureDevice);
+ NSError *error = nil;
+ m_locked = [m_captureDevice lockForConfiguration:&error];
+ }
+
+ ~AVFConfigurationLock()
+ {
+ if (m_locked)
+ [m_captureDevice unlockForConfiguration];
+ }
+
+ operator bool() const
+ {
+ return m_locked;
+ }
+
+private:
+ Q_DISABLE_COPY(AVFConfigurationLock)
+
+ AVCaptureDevice *m_captureDevice;
+ bool m_locked;
+};
+
+struct AVFObjectDeleter {
+ void operator()(NSObject *obj)
+ {
+ if (obj)
+ [obj release];
+ }
+};
+
+template<class T>
+class AVFScopedPointer : public std::unique_ptr<NSObject, AVFObjectDeleter>
+{
+public:
+ AVFScopedPointer() {}
+ explicit AVFScopedPointer(T *ptr) : std::unique_ptr<NSObject, AVFObjectDeleter>(ptr) {}
+ operator T*() const
+ {
+ // Quite handy operator to enable Obj-C messages: [ptr someMethod];
+ return data();
+ }
+
+ T *data() const
+ {
+ return static_cast<T *>(get());
+ }
+
+ T *take()
+ {
+ return static_cast<T *>(release());
+ }
+};
+
+template<>
+class AVFScopedPointer<dispatch_queue_t>
+{
+public:
+ AVFScopedPointer() : m_queue(nullptr) {}
+ explicit AVFScopedPointer(dispatch_queue_t q) : m_queue(q) {}
+
+ ~AVFScopedPointer()
+ {
+ if (m_queue)
+ dispatch_release(m_queue);
+ }
+
+ operator dispatch_queue_t() const
+ {
+ // Quite handy operator to enable Obj-C messages: [ptr someMethod];
+ return m_queue;
+ }
+
+ dispatch_queue_t data() const
+ {
+ return m_queue;
+ }
+
+ void reset(dispatch_queue_t q = nullptr)
+ {
+ if (m_queue)
+ dispatch_release(m_queue);
+ m_queue = q;
+ }
+
+private:
+ dispatch_queue_t m_queue;
+
+ Q_DISABLE_COPY(AVFScopedPointer)
+};
+
+typedef QPair<qreal, qreal> AVFPSRange;
+AVFPSRange qt_connection_framerates(AVCaptureConnection *videoConnection);
+
+AVCaptureDeviceFormat *qt_convert_to_capture_device_format(
+ AVCaptureDevice *captureDevice, const QCameraFormat &format,
+ const std::function<bool(uint32_t)> &cvFormatValidator = nullptr);
+QList<AVCaptureDeviceFormat *> qt_unique_device_formats(AVCaptureDevice *captureDevice,
+ FourCharCode preferredFormat);
+QSize qt_device_format_resolution(AVCaptureDeviceFormat *format);
+QSize qt_device_format_high_resolution(AVCaptureDeviceFormat *format);
+QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format);
+QList<AVFPSRange> qt_device_format_framerates(AVCaptureDeviceFormat *format);
+AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDevice, const QSize &res,
+ FourCharCode preferredFormat, bool stillImage = true);
+AVCaptureDeviceFormat *qt_find_best_framerate_match(AVCaptureDevice *captureDevice,
+ FourCharCode preferredFormat,
+ Float64 fps);
+AVFrameRateRange *qt_find_supported_framerate_range(AVCaptureDeviceFormat *format, Float64 fps);
+bool qt_format_supports_framerate(AVCaptureDeviceFormat *format, qreal fps);
+
+bool qt_formats_are_equal(AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2);
+bool qt_set_active_format(AVCaptureDevice *captureDevice, AVCaptureDeviceFormat *format, bool preserveFps);
+
+AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection);
+void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection,
+ qreal minFPS, qreal maxFPS);
+
+QList<AudioValueRange> qt_supported_sample_rates_for_format(int codecId);
+QList<AudioValueRange> qt_supported_bit_rates_for_format(int codecId);
+std::optional<QList<UInt32>> qt_supported_channel_counts_for_format(int codecId);
+QList<UInt32> qt_supported_channel_layout_tags_for_format(int codecId, int noChannels);
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfimagecapture.mm b/src/plugins/multimedia/darwin/camera/avfimagecapture.mm
new file mode 100644
index 000000000..2ee7b0597
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfimagecapture.mm
@@ -0,0 +1,385 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcameradebug_p.h"
+#include "avfimagecapture_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcamerautility_p.h"
+#include "avfcamera_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcamerarenderer_p.h"
+#include "private/qmediastoragelocation_p.h"
+#include <private/qplatformimagecapture_p.h>
+#include <private/qmemoryvideobuffer_p.h>
+#include <private/qvideoframe_p.h>
+
+#include <QtCore/qurl.h>
+#include <QtCore/qfile.h>
+#include <QtCore/qbuffer.h>
+#include <QtConcurrent/qtconcurrentrun.h>
+#include <QtGui/qimagereader.h>
+
+#import <AVFoundation/AVFoundation.h>
+
+QT_USE_NAMESPACE
+
+AVFImageCapture::AVFImageCapture(QImageCapture *parent)
+ : QPlatformImageCapture(parent)
+{
+ m_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
+
+ NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:
+ AVVideoCodecTypeJPEG, AVVideoCodecKey, nil];
+
+ [m_stillImageOutput setOutputSettings:outputSettings];
+ [outputSettings release];
+}
+
+AVFImageCapture::~AVFImageCapture()
+{
+ [m_stillImageOutput release];
+}
+
+bool AVFImageCapture::isReadyForCapture() const
+{
+ return m_cameraControl && m_videoConnection && m_cameraControl->isActive();
+}
+
+void AVFImageCapture::updateReadyStatus()
+{
+ if (m_ready != isReadyForCapture()) {
+ m_ready = !m_ready;
+ qCDebug(qLcCamera) << "ReadyToCapture status changed:" << m_ready;
+ Q_EMIT readyForCaptureChanged(m_ready);
+ }
+}
+
+int AVFImageCapture::doCapture(const QString &actualFileName)
+{
+ if (!m_session) {
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, m_lastCaptureId),
+ Q_ARG(int, QImageCapture::ResourceError),
+ Q_ARG(QString, QPlatformImageCapture::msgImageCaptureNotSet()));
+ return -1;
+ }
+ if (!isReadyForCapture()) {
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, m_lastCaptureId),
+ Q_ARG(int, QImageCapture::NotReadyError),
+ Q_ARG(QString, QPlatformImageCapture::msgCameraNotReady()));
+ return -1;
+ }
+ m_lastCaptureId++;
+
+ bool captureToBuffer = actualFileName.isEmpty();
+
+ CaptureRequest request = { m_lastCaptureId, QSharedPointer<QSemaphore>::create()};
+ m_requestsMutex.lock();
+ m_captureRequests.enqueue(request);
+ m_requestsMutex.unlock();
+
+ QString fileName(actualFileName);
+
+ [m_stillImageOutput captureStillImageAsynchronouslyFromConnection:m_videoConnection
+ completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
+
+ if (error) {
+ QStringList messageParts;
+ messageParts << QString::fromUtf8([[error localizedDescription] UTF8String]);
+ messageParts << QString::fromUtf8([[error localizedFailureReason] UTF8String]);
+ messageParts << QString::fromUtf8([[error localizedRecoverySuggestion] UTF8String]);
+
+ QString errorMessage = messageParts.join(QChar(u' '));
+ qCDebug(qLcCamera) << "Image capture failed:" << errorMessage;
+
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(int, QImageCapture::ResourceError),
+ Q_ARG(QString, errorMessage));
+ return;
+ }
+
+ // Wait for the preview to be generated before saving the JPEG (but only
+ // if we have AVFCameraRenderer attached).
+ // It is possible to stop camera immediately after trying to capture an
+ // image; this can result in a blocked callback's thread, waiting for a
+ // new viewfinder's frame to arrive/semaphore to be released. It is also
+ // unspecified on which thread this callback gets executed, (probably it's
+ // not the same thread that initiated a capture and stopped the camera),
+ // so we cannot reliably check the camera's status. Instead, we wait
+ // with a timeout and treat a failure to acquire a semaphore as an error.
+ if (!m_session->videoOutput() || request.previewReady->tryAcquire(1, 1000)) {
+ qCDebug(qLcCamera) << "Image capture completed";
+
+ NSData *nsJpgData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
+ QByteArray jpgData = QByteArray::fromRawData((const char *)[nsJpgData bytes], [nsJpgData length]);
+
+ if (captureToBuffer) {
+ QBuffer data(&jpgData);
+ QImageReader reader(&data, "JPEG");
+ QSize size = reader.size();
+ auto buffer = std::make_unique<QMemoryVideoBuffer>(
+ QByteArray(jpgData.constData(), jpgData.size()), -1);
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::move(buffer), QVideoFrameFormat(size, QVideoFrameFormat::Format_Jpeg));
+ QMetaObject::invokeMethod(this, "imageAvailable", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(QVideoFrame, frame));
+ } else {
+ QFile f(fileName);
+ if (f.open(QFile::WriteOnly)) {
+ if (f.write(jpgData) != -1) {
+ QMetaObject::invokeMethod(this, "imageSaved", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(QString, fileName));
+ } else {
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(int, QImageCapture::OutOfSpaceError),
+ Q_ARG(QString, f.errorString()));
+ }
+ } else {
+ QString errorMessage = tr("Could not open destination file:\n%1").arg(fileName);
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(int, QImageCapture::ResourceError),
+ Q_ARG(QString, errorMessage));
+ }
+ }
+ } else {
+ const QLatin1String errorMessage("Image capture failed: timed out waiting"
+ " for a preview frame.");
+ qCDebug(qLcCamera) << errorMessage;
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(int, QImageCapture::ResourceError),
+ Q_ARG(QString, errorMessage));
+ }
+ }];
+
+ return request.captureId;
+}
+
+int AVFImageCapture::capture(const QString &fileName)
+{
+ auto actualFileName = QMediaStorageLocation::generateFileName(fileName, QStandardPaths::PicturesLocation, QLatin1String("jpg"));
+
+ qCDebug(qLcCamera) << "Capture image to" << actualFileName;
+ return doCapture(actualFileName);
+}
+
+int AVFImageCapture::captureToBuffer()
+{
+ return doCapture(QString());
+}
+
+void AVFImageCapture::onNewViewfinderFrame(const QVideoFrame &frame)
+{
+ QMutexLocker locker(&m_requestsMutex);
+
+ if (m_captureRequests.isEmpty())
+ return;
+
+ CaptureRequest request = m_captureRequests.dequeue();
+ Q_EMIT imageExposed(request.captureId);
+
+ (void) QtConcurrent::run(&AVFImageCapture::makeCapturePreview, this,
+ request,
+ frame,
+ 0 /* rotation */);
+}
+
+void AVFImageCapture::onCameraChanged()
+{
+ auto camera = m_service ? static_cast<AVFCamera *>(m_service->camera()) : nullptr;
+
+ if (camera == m_cameraControl)
+ return;
+
+ m_cameraControl = camera;
+
+ if (m_cameraControl)
+ connect(m_cameraControl, SIGNAL(activeChanged(bool)), this, SLOT(updateReadyStatus()));
+ updateReadyStatus();
+}
+
+void AVFImageCapture::makeCapturePreview(CaptureRequest request,
+ const QVideoFrame &frame,
+ int rotation)
+{
+ QTransform transform;
+ transform.rotate(rotation);
+
+ Q_EMIT imageCaptured(request.captureId, frame.toImage().transformed(transform));
+
+ request.previewReady->release();
+}
+
+void AVFImageCapture::updateCaptureConnection()
+{
+ if (m_session && m_session->videoCaptureDevice()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO;
+ AVCaptureSession *captureSession = m_session->captureSession();
+
+ if (![captureSession.outputs containsObject:m_stillImageOutput]) {
+ if ([captureSession canAddOutput:m_stillImageOutput]) {
+ [captureSession beginConfiguration];
+ // Lock the video capture device to make sure the active format is not reset
+ const AVFConfigurationLock lock(m_session->videoCaptureDevice());
+ [captureSession addOutput:m_stillImageOutput];
+ m_videoConnection = [m_stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
+ [captureSession commitConfiguration];
+ updateReadyStatus();
+ }
+ } else {
+ m_videoConnection = [m_stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
+ }
+ }
+}
+
+
+QImageEncoderSettings AVFImageCapture::imageSettings() const
+{
+ QImageEncoderSettings settings;
+
+ if (!videoCaptureDeviceIsValid())
+ return settings;
+
+ AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
+ if (!captureDevice.activeFormat) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no active format";
+ return settings;
+ }
+
+ QSize res(qt_device_format_resolution(captureDevice.activeFormat));
+#ifdef Q_OS_IOS
+ if (!m_service->avfImageCaptureControl() || !m_service->avfImageCaptureControl()->stillImageOutput()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no still image output";
+ return settings;
+ }
+
+ AVCaptureStillImageOutput *stillImageOutput = m_service->avfImageCaptureControl()->stillImageOutput();
+ if (stillImageOutput.highResolutionStillImageOutputEnabled)
+ res = qt_device_format_high_resolution(captureDevice.activeFormat);
+#endif
+ if (res.isNull() || !res.isValid()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to exctract the image resolution";
+ return settings;
+ }
+
+ settings.setResolution(res);
+ settings.setFormat(QImageCapture::JPEG);
+
+ return settings;
+}
+
+void AVFImageCapture::setImageSettings(const QImageEncoderSettings &settings)
+{
+ if (m_settings == settings)
+ return;
+
+ m_settings = settings;
+ applySettings();
+}
+
+bool AVFImageCapture::applySettings()
+{
+ if (!videoCaptureDeviceIsValid())
+ return false;
+
+ AVFCameraSession *session = m_service->session();
+ if (!session)
+ return false;
+
+ if (!m_service->imageCapture()
+ || !m_service->avfImageCaptureControl()->stillImageOutput()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no still image output";
+ return false;
+ }
+
+ if (m_settings.format() != QImageCapture::UnspecifiedFormat && m_settings.format() != QImageCapture::JPEG) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "unsupported format:" << m_settings.format();
+ return false;
+ }
+
+ QSize res(m_settings.resolution());
+ if (res.isNull()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid resolution:" << res;
+ return false;
+ }
+
+ if (!res.isValid()) {
+ // Invalid == default value.
+ // Here we could choose the best format available, but
+ // activeFormat is already equal to 'preset high' by default,
+ // which is good enough, otherwise we can end in some format with low framerates.
+ return false;
+ }
+
+ bool activeFormatChanged = false;
+
+ AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
+ AVCaptureDeviceFormat *match = qt_find_best_resolution_match(captureDevice, res,
+ m_service->session()->defaultCodec());
+
+ if (!match) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "unsupported resolution:" << res;
+ return false;
+ }
+
+ activeFormatChanged = qt_set_active_format(captureDevice, match, true);
+
+#ifdef Q_OS_IOS
+ AVCaptureStillImageOutput *imageOutput = m_service->avfImageCaptureControl()->stillImageOutput();
+ if (res == qt_device_format_high_resolution(captureDevice.activeFormat))
+ imageOutput.highResolutionStillImageOutputEnabled = YES;
+ else
+ imageOutput.highResolutionStillImageOutputEnabled = NO;
+#endif
+
+ return activeFormatChanged;
+}
+
+void AVFImageCapture::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ AVFCameraService *captureSession = static_cast<AVFCameraService *>(session);
+ if (m_service == captureSession)
+ return;
+
+ m_service = captureSession;
+ if (!m_service) {
+ m_session->disconnect(this);
+ if (m_cameraControl)
+ m_cameraControl->disconnect(this);
+ m_session = nullptr;
+ m_cameraControl = nullptr;
+ m_videoConnection = nil;
+ } else {
+ m_session = m_service->session();
+ Q_ASSERT(m_session);
+
+ connect(m_service, &AVFCameraService::cameraChanged, this, &AVFImageCapture::onCameraChanged);
+ connect(m_session, SIGNAL(readyToConfigureConnections()), SLOT(updateCaptureConnection()));
+ connect(m_session, &AVFCameraSession::newViewfinderFrame,
+ this, &AVFImageCapture::onNewViewfinderFrame);
+ }
+
+ updateCaptureConnection();
+ onCameraChanged();
+ updateReadyStatus();
+}
+
+bool AVFImageCapture::videoCaptureDeviceIsValid() const
+{
+ if (!m_service || !m_service->session() || !m_service->session()->videoCaptureDevice())
+ return false;
+
+ AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
+ if (!captureDevice.formats || !captureDevice.formats.count)
+ return false;
+
+ return true;
+}
+
+#include "moc_avfimagecapture_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/avfimagecapture_p.h b/src/plugins/multimedia/darwin/camera/avfimagecapture_p.h
new file mode 100644
index 000000000..0714fa3cc
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfimagecapture_p.h
@@ -0,0 +1,81 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFCAMERAIMAGECAPTURE_H
+#define AVFCAMERAIMAGECAPTURE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#import <AVFoundation/AVFoundation.h>
+
+#include <QtCore/qqueue.h>
+#include <QtCore/qsemaphore.h>
+#include <QtCore/qsharedpointer.h>
+#include <private/qplatformimagecapture_p.h>
+#include "avfcamerasession_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class AVFImageCapture : public QPlatformImageCapture
+{
+Q_OBJECT
+public:
+ struct CaptureRequest {
+ int captureId;
+ QSharedPointer<QSemaphore> previewReady;
+ };
+
+ AVFImageCapture(QImageCapture *parent = nullptr);
+ ~AVFImageCapture();
+
+ bool isReadyForCapture() const override;
+
+ AVCaptureStillImageOutput *stillImageOutput() const {return m_stillImageOutput;}
+
+ int doCapture(const QString &fileName);
+ int capture(const QString &fileName) override;
+ int captureToBuffer() override;
+
+ QImageEncoderSettings imageSettings() const override;
+ void setImageSettings(const QImageEncoderSettings &settings) override;
+ bool applySettings();
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+private Q_SLOTS:
+ void updateCaptureConnection();
+ void updateReadyStatus();
+ void onNewViewfinderFrame(const QVideoFrame &frame);
+ void onCameraChanged();
+
+private:
+ void makeCapturePreview(CaptureRequest request, const QVideoFrame &frame, int rotation);
+ bool videoCaptureDeviceIsValid() const;
+
+ AVFCameraService *m_service = nullptr;
+ AVFCameraSession *m_session = nullptr;
+ AVFCamera *m_cameraControl = nullptr;
+ bool m_ready = false;
+ int m_lastCaptureId = 0;
+ AVCaptureStillImageOutput *m_stillImageOutput;
+ AVCaptureConnection *m_videoConnection = nullptr;
+
+ QMutex m_requestsMutex;
+ QQueue<CaptureRequest> m_captureRequests;
+ QImageEncoderSettings m_settings;
+};
+
+Q_DECLARE_TYPEINFO(AVFImageCapture::CaptureRequest, Q_PRIMITIVE_TYPE);
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/darwin/camera/avfmediaassetwriter.mm b/src/plugins/multimedia/darwin/camera/avfmediaassetwriter.mm
new file mode 100644
index 000000000..37fc69926
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfmediaassetwriter.mm
@@ -0,0 +1,556 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfmediaencoder_p.h"
+#include "avfcamerarenderer_p.h"
+#include "avfmediaassetwriter_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcameradebug_p.h"
+#include <qdarwinformatsinfo_p.h>
+#include <avfmetadata_p.h>
+
+#include <QtCore/qmetaobject.h>
+#include <QtCore/qatomic.h>
+
+QT_USE_NAMESPACE
+
+namespace {
+
+bool qt_capture_session_isValid(AVFCameraService *service)
+{
+ if (!service || !service->session())
+ return false;
+
+ AVFCameraSession *session = service->session();
+ if (!session->captureSession())
+ return false;
+
+ if (!session->videoInput() && !session->audioInput())
+ return false;
+
+ return true;
+}
+
+enum WriterState
+{
+ WriterStateIdle,
+ WriterStateActive,
+ WriterStatePaused,
+ WriterStateAborted
+};
+
+using AVFAtomicInt64 = QAtomicInteger<qint64>;
+
+} // unnamed namespace
+
+@interface QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) (PrivateAPI)
+- (bool)addWriterInputs;
+- (void)setQueues;
+- (void)updateDuration:(CMTime)newTimeStamp;
+- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset;
+@end
+
+@implementation QT_MANGLE_NAMESPACE(AVFMediaAssetWriter)
+{
+@private
+ AVFCameraService *m_service;
+
+ AVFScopedPointer<AVAssetWriterInput> m_cameraWriterInput;
+ AVFScopedPointer<AVAssetWriterInput> m_audioWriterInput;
+
+ // Queue to write sample buffers:
+ AVFScopedPointer<dispatch_queue_t> m_writerQueue;
+ // High priority serial queue for video output:
+ AVFScopedPointer<dispatch_queue_t> m_videoQueue;
+ // Serial queue for audio output:
+ AVFScopedPointer<dispatch_queue_t> m_audioQueue;
+
+ AVFScopedPointer<AVAssetWriter> m_assetWriter;
+
+ AVFMediaEncoder *m_delegate;
+
+ bool m_setStartTime;
+
+ QAtomicInt m_state;
+
+ bool m_writeFirstAudioBuffer;
+
+ CMTime m_startTime;
+ CMTime m_lastTimeStamp;
+ CMTime m_lastVideoTimestamp;
+ CMTime m_lastAudioTimestamp;
+ CMTime m_timeOffset;
+ bool m_adjustTime;
+
+ NSDictionary *m_audioSettings;
+ NSDictionary *m_videoSettings;
+
+ AVFAtomicInt64 m_durationInMs;
+}
+
+- (id)initWithDelegate:(AVFMediaEncoder *)delegate
+{
+ Q_ASSERT(delegate);
+
+ if (self = [super init]) {
+ m_delegate = delegate;
+ m_setStartTime = true;
+ m_state.storeRelaxed(WriterStateIdle);
+ m_startTime = kCMTimeInvalid;
+ m_lastTimeStamp = kCMTimeInvalid;
+ m_lastAudioTimestamp = kCMTimeInvalid;
+ m_lastVideoTimestamp = kCMTimeInvalid;
+ m_timeOffset = kCMTimeInvalid;
+ m_adjustTime = false;
+ m_durationInMs.storeRelaxed(0);
+ m_audioSettings = nil;
+ m_videoSettings = nil;
+ m_writeFirstAudioBuffer = false;
+ }
+
+ return self;
+}
+
+- (bool)setupWithFileURL:(NSURL *)fileURL
+ cameraService:(AVFCameraService *)service
+ audioSettings:(NSDictionary *)audioSettings
+ videoSettings:(NSDictionary *)videoSettings
+ fileFormat:(QMediaFormat::FileFormat)fileFormat
+ transform:(CGAffineTransform)transform
+{
+ Q_ASSERT(fileURL);
+
+ if (!qt_capture_session_isValid(service)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid capture session";
+ return false;
+ }
+
+ m_service = service;
+ m_audioSettings = audioSettings;
+ m_videoSettings = videoSettings;
+
+ AVFCameraSession *session = m_service->session();
+
+ m_writerQueue.reset(dispatch_queue_create("asset-writer-queue", DISPATCH_QUEUE_SERIAL));
+ if (!m_writerQueue) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create an asset writer's queue";
+ return false;
+ }
+
+ m_videoQueue.reset();
+ if (session->videoInput() && session->videoOutput() && session->videoOutput()->videoDataOutput()) {
+ m_videoQueue.reset(dispatch_queue_create("video-output-queue", DISPATCH_QUEUE_SERIAL));
+ if (!m_videoQueue) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create video queue";
+ return false;
+ }
+ dispatch_set_target_queue(m_videoQueue, dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0));
+ }
+
+ m_audioQueue.reset();
+ if (session->audioInput() && session->audioOutput()) {
+ m_audioQueue.reset(dispatch_queue_create("audio-output-queue", DISPATCH_QUEUE_SERIAL));
+ if (!m_audioQueue) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create audio queue";
+ if (!m_videoQueue)
+ return false;
+ // But we still can write video!
+ }
+ }
+
+ auto fileType = QDarwinFormatInfo::avFileTypeForContainerFormat(fileFormat);
+ m_assetWriter.reset([[AVAssetWriter alloc] initWithURL:fileURL
+ fileType:fileType
+ error:nil]);
+ if (!m_assetWriter) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create asset writer";
+ return false;
+ }
+
+ if (!m_videoQueue)
+ m_writeFirstAudioBuffer = true;
+
+ if (![self addWriterInputs]) {
+ m_assetWriter.reset();
+ return false;
+ }
+
+ if (m_cameraWriterInput)
+ m_cameraWriterInput.data().transform = transform;
+
+ [self setMetaData:fileType];
+
+ // Ready to start ...
+ return true;
+}
+
+- (void)setMetaData:(AVFileType)fileType
+{
+ m_assetWriter.data().metadata = AVFMetaData::toAVMetadataForFormat(m_delegate->metaData(), fileType);
+}
+
+- (void)start
+{
+ [self setQueues];
+
+ m_setStartTime = true;
+
+ m_state.storeRelease(WriterStateActive);
+
+ [m_assetWriter startWriting];
+ AVCaptureSession *session = m_service->session()->captureSession();
+ if (!session.running)
+ [session startRunning];
+}
+
+- (void)stop
+{
+ if (m_state.loadAcquire() != WriterStateActive && m_state.loadAcquire() != WriterStatePaused)
+ return;
+
+ if ([m_assetWriter status] != AVAssetWriterStatusWriting
+ && [m_assetWriter status] != AVAssetWriterStatusFailed)
+ return;
+
+ // Do this here so that -
+ // 1. '-abort' should not try calling finishWriting again and
+ // 2. async block (see below) will know if recorder control was deleted
+ // before the block's execution:
+ m_state.storeRelease(WriterStateIdle);
+ // Now, since we have to ensure no sample buffers are
+ // appended after a call to finishWriting, we must
+ // ensure writer's queue sees this change in m_state
+ // _before_ we call finishWriting:
+ dispatch_sync(m_writerQueue, ^{});
+ // Done, but now we also want to prevent video queue
+ // from updating our viewfinder:
+ if (m_videoQueue)
+ dispatch_sync(m_videoQueue, ^{});
+
+ // Now we're safe to stop:
+ [m_assetWriter finishWritingWithCompletionHandler:^{
+ // This block is async, so by the time it's executed,
+ // it's possible that render control was deleted already ...
+ if (m_state.loadAcquire() == WriterStateAborted)
+ return;
+
+ AVCaptureSession *session = m_service->session()->captureSession();
+ if (session.running)
+ [session stopRunning];
+ QMetaObject::invokeMethod(m_delegate, "assetWriterFinished", Qt::QueuedConnection);
+ }];
+}
+
+- (void)abort
+{
+ // -abort is to be called from recorder control's dtor.
+
+ if (m_state.fetchAndStoreRelease(WriterStateAborted) != WriterStateActive) {
+ // Not recording, nothing to stop.
+ return;
+ }
+
+ // From Apple's docs:
+ // "To guarantee that all sample buffers are successfully written,
+ // you must ensure that all calls to appendSampleBuffer: and
+ // appendPixelBuffer:withPresentationTime: have returned before
+ // invoking this method."
+ //
+ // The only way we can ensure this is:
+ dispatch_sync(m_writerQueue, ^{});
+ // At this point next block (if any) on the writer's queue
+ // will see m_state preventing it from any further processing.
+ if (m_videoQueue)
+ dispatch_sync(m_videoQueue, ^{});
+ // After this point video queue will not try to modify our
+ // viewfider, so we're safe to delete now.
+
+ [m_assetWriter finishWritingWithCompletionHandler:^{
+ }];
+}
+
+- (void)pause
+{
+ if (m_state.loadAcquire() != WriterStateActive)
+ return;
+ if ([m_assetWriter status] != AVAssetWriterStatusWriting)
+ return;
+
+ m_state.storeRelease(WriterStatePaused);
+ m_adjustTime = true;
+}
+
+- (void)resume
+{
+ if (m_state.loadAcquire() != WriterStatePaused)
+ return;
+ if ([m_assetWriter status] != AVAssetWriterStatusWriting)
+ return;
+
+ m_state.storeRelease(WriterStateActive);
+}
+
+- (void)setStartTimeFrom:(CMSampleBufferRef)sampleBuffer
+{
+ // Writer's queue only.
+ Q_ASSERT(m_setStartTime);
+ Q_ASSERT(sampleBuffer);
+
+ if (m_state.loadAcquire() != WriterStateActive)
+ return;
+
+ QMetaObject::invokeMethod(m_delegate, "assetWriterStarted", Qt::QueuedConnection);
+
+ m_durationInMs.storeRelease(0);
+ m_startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+ m_lastTimeStamp = m_startTime;
+ [m_assetWriter startSessionAtSourceTime:m_startTime];
+ m_setStartTime = false;
+}
+
+- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset
+{
+ CMItemCount count;
+ CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
+ CMSampleTimingInfo* timingInfo = (CMSampleTimingInfo*) malloc(sizeof(CMSampleTimingInfo) * count);
+ CMSampleBufferGetSampleTimingInfoArray(sample, count, timingInfo, &count);
+ for (CMItemCount i = 0; i < count; i++)
+ {
+ timingInfo[i].decodeTimeStamp = CMTimeSubtract(timingInfo[i].decodeTimeStamp, offset);
+ timingInfo[i].presentationTimeStamp = CMTimeSubtract(timingInfo[i].presentationTimeStamp, offset);
+ }
+ CMSampleBufferRef updatedBuffer;
+ CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, sample, count, timingInfo, &updatedBuffer);
+ free(timingInfo);
+ return updatedBuffer;
+}
+
+- (void)writeVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
+{
+ // This code is executed only on a writer's queue.
+ Q_ASSERT(sampleBuffer);
+
+ if (m_state.loadAcquire() == WriterStateActive) {
+ if (m_setStartTime)
+ [self setStartTimeFrom:sampleBuffer];
+
+ if (m_cameraWriterInput.data().readyForMoreMediaData) {
+ [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
+ [m_cameraWriterInput appendSampleBuffer:sampleBuffer];
+ }
+ }
+}
+
+- (void)writeAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer
+{
+ Q_ASSERT(sampleBuffer);
+
+ // This code is executed only on a writer's queue.
+ if (m_state.loadAcquire() == WriterStateActive) {
+ if (m_setStartTime)
+ [self setStartTimeFrom:sampleBuffer];
+
+ if (m_audioWriterInput.data().readyForMoreMediaData) {
+ [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
+ [m_audioWriterInput appendSampleBuffer:sampleBuffer];
+ }
+ }
+}
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection
+{
+ Q_UNUSED(connection);
+ Q_ASSERT(m_service && m_service->session());
+
+ if (m_state.loadAcquire() != WriterStateActive && m_state.loadAcquire() != WriterStatePaused)
+ return;
+
+ if ([m_assetWriter status] != AVAssetWriterStatusWriting) {
+ if ([m_assetWriter status] == AVAssetWriterStatusFailed) {
+ NSError *error = [m_assetWriter error];
+ NSString *failureReason = error.localizedFailureReason;
+ NSString *suggestion = error.localizedRecoverySuggestion;
+ NSString *errorString = suggestion ? [failureReason stringByAppendingString:suggestion] : failureReason;
+ QMetaObject::invokeMethod(m_delegate, "assetWriterError",
+ Qt::QueuedConnection,
+ Q_ARG(QString, QString::fromNSString(errorString)));
+ }
+ return;
+ }
+
+ if (!CMSampleBufferDataIsReady(sampleBuffer)) {
+ qWarning() << Q_FUNC_INFO << "sample buffer is not ready, skipping.";
+ return;
+ }
+
+ CFRetain(sampleBuffer);
+
+ bool isVideoBuffer = true;
+ isVideoBuffer = (captureOutput != m_service->session()->audioOutput());
+ if (isVideoBuffer) {
+ // Find renderercontrol's delegate and invoke its method to
+ // show updated viewfinder's frame.
+ if (m_service->session()->videoOutput()) {
+ NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *vfDelegate =
+ (NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *)m_service->session()->videoOutput()->captureDelegate();
+ if (vfDelegate) {
+ AVCaptureOutput *output = nil;
+ AVCaptureConnection *connection = nil;
+ [vfDelegate captureOutput:output didOutputSampleBuffer:sampleBuffer fromConnection:connection];
+ }
+ }
+ } else {
+ if (m_service->session()->audioOutput()) {
+ NSObject<AVCaptureAudioDataOutputSampleBufferDelegate> *audioPreviewDelegate =
+ (NSObject<AVCaptureAudioDataOutputSampleBufferDelegate> *)m_service->session()->audioPreviewDelegate();
+ if (audioPreviewDelegate) {
+ AVCaptureOutput *output = nil;
+ AVCaptureConnection *connection = nil;
+ [audioPreviewDelegate captureOutput:output didOutputSampleBuffer:sampleBuffer fromConnection:connection];
+ }
+ }
+ }
+
+ if (m_state.loadAcquire() != WriterStateActive) {
+ CFRelease(sampleBuffer);
+ return;
+ }
+
+ if (m_adjustTime) {
+ CMTime currentTimestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+ CMTime lastTimestamp = isVideoBuffer ? m_lastVideoTimestamp : m_lastAudioTimestamp;
+
+ if (!CMTIME_IS_INVALID(lastTimestamp)) {
+ if (!CMTIME_IS_INVALID(m_timeOffset))
+ currentTimestamp = CMTimeSubtract(currentTimestamp, m_timeOffset);
+
+ CMTime pauseDuration = CMTimeSubtract(currentTimestamp, lastTimestamp);
+
+ if (m_timeOffset.value == 0)
+ m_timeOffset = pauseDuration;
+ else
+ m_timeOffset = CMTimeAdd(m_timeOffset, pauseDuration);
+ }
+ m_lastVideoTimestamp = kCMTimeInvalid;
+ m_adjustTime = false;
+ }
+
+ if (m_timeOffset.value > 0) {
+ CFRelease(sampleBuffer);
+ sampleBuffer = [self adjustTime:sampleBuffer by:m_timeOffset];
+ }
+
+ CMTime currentTimestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+ CMTime currentDuration = CMSampleBufferGetDuration(sampleBuffer);
+ if (currentDuration.value > 0)
+ currentTimestamp = CMTimeAdd(currentTimestamp, currentDuration);
+
+ if (isVideoBuffer)
+ {
+ m_lastVideoTimestamp = currentTimestamp;
+ dispatch_async(m_writerQueue, ^{
+ [self writeVideoSampleBuffer:sampleBuffer];
+ m_writeFirstAudioBuffer = true;
+ CFRelease(sampleBuffer);
+ });
+ } else if (m_writeFirstAudioBuffer) {
+ m_lastAudioTimestamp = currentTimestamp;
+ dispatch_async(m_writerQueue, ^{
+ [self writeAudioSampleBuffer:sampleBuffer];
+ CFRelease(sampleBuffer);
+ });
+ }
+}
+
+- (bool)addWriterInputs
+{
+ Q_ASSERT(m_service && m_service->session());
+ Q_ASSERT(m_assetWriter.data());
+
+ AVFCameraSession *session = m_service->session();
+
+ m_cameraWriterInput.reset();
+ if (m_videoQueue)
+ {
+ Q_ASSERT(session->videoCaptureDevice() && session->videoOutput() && session->videoOutput()->videoDataOutput());
+ m_cameraWriterInput.reset([[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
+ outputSettings:m_videoSettings
+ sourceFormatHint:session->videoCaptureDevice().activeFormat.formatDescription]);
+
+ if (m_cameraWriterInput && [m_assetWriter canAddInput:m_cameraWriterInput]) {
+ [m_assetWriter addInput:m_cameraWriterInput];
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to add camera writer input";
+ m_cameraWriterInput.reset();
+ return false;
+ }
+
+ m_cameraWriterInput.data().expectsMediaDataInRealTime = YES;
+ }
+
+ m_audioWriterInput.reset();
+ if (m_audioQueue) {
+ m_audioWriterInput.reset([[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio
+ outputSettings:m_audioSettings]);
+ if (!m_audioWriterInput) {
+ qWarning() << Q_FUNC_INFO << "failed to create audio writer input";
+ // But we still can record video.
+ if (!m_cameraWriterInput)
+ return false;
+ } else if ([m_assetWriter canAddInput:m_audioWriterInput]) {
+ [m_assetWriter addInput:m_audioWriterInput];
+ m_audioWriterInput.data().expectsMediaDataInRealTime = YES;
+ } else {
+ qWarning() << Q_FUNC_INFO << "failed to add audio writer input";
+ m_audioWriterInput.reset();
+ if (!m_cameraWriterInput)
+ return false;
+ // We can (still) write video though ...
+ }
+ }
+
+ return true;
+}
+
+- (void)setQueues
+{
+ Q_ASSERT(m_service && m_service->session());
+ AVFCameraSession *session = m_service->session();
+
+ if (m_videoQueue) {
+ Q_ASSERT(session->videoOutput() && session->videoOutput()->videoDataOutput());
+ [session->videoOutput()->videoDataOutput() setSampleBufferDelegate:self queue:m_videoQueue];
+ }
+
+ if (m_audioQueue) {
+ Q_ASSERT(session->audioOutput());
+ [session->audioOutput() setSampleBufferDelegate:self queue:m_audioQueue];
+ }
+}
+
+- (void)updateDuration:(CMTime)newTimeStamp
+{
+ Q_ASSERT(CMTimeCompare(m_startTime, kCMTimeInvalid));
+ Q_ASSERT(CMTimeCompare(m_lastTimeStamp, kCMTimeInvalid));
+ if (CMTimeCompare(newTimeStamp, m_lastTimeStamp) > 0) {
+
+ const CMTime duration = CMTimeSubtract(newTimeStamp, m_startTime);
+ if (!CMTimeCompare(duration, kCMTimeInvalid))
+ return;
+
+ m_durationInMs.storeRelease(CMTimeGetSeconds(duration) * 1000);
+ m_lastTimeStamp = newTimeStamp;
+
+ m_delegate->updateDuration([self durationInMs]);
+ }
+}
+
+- (qint64)durationInMs
+{
+ return m_durationInMs.loadAcquire();
+}
+
+@end
diff --git a/src/plugins/multimedia/darwin/camera/avfmediaassetwriter_p.h b/src/plugins/multimedia/darwin/camera/avfmediaassetwriter_p.h
new file mode 100644
index 000000000..8fe3e8522
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfmediaassetwriter_p.h
@@ -0,0 +1,54 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFMEDIAASSETWRITER_H
+#define AVFMEDIAASSETWRITER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "avfcamerautility_p.h"
+#include "qmediaformat.h"
+
+#include <QtCore/qglobal.h>
+
+#include <AVFoundation/AVFoundation.h>
+
+QT_BEGIN_NAMESPACE
+
+class AVFMediaEncoder;
+class AVFCameraService;
+
+QT_END_NAMESPACE
+
+@interface QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate,
+ AVCaptureAudioDataOutputSampleBufferDelegate>
+- (id)initWithDelegate:(QT_PREPEND_NAMESPACE(AVFMediaEncoder) *)delegate;
+
+- (bool)setupWithFileURL:(NSURL *)fileURL
+ cameraService:(QT_PREPEND_NAMESPACE(AVFCameraService) *)service
+ audioSettings:(NSDictionary *)audioSettings
+ videoSettings:(NSDictionary *)videoSettings
+ fileFormat:(QMediaFormat::FileFormat)fileFormat
+ transform:(CGAffineTransform)transform;
+
+// This to be called from the recorder control's thread:
+- (void)start;
+- (void)stop;
+- (void)pause;
+- (void)resume;
+// This to be called from the recorder control's dtor:
+- (void)abort;
+- (qint64)durationInMs;
+
+@end
+
+#endif // AVFMEDIAASSETWRITER_H
diff --git a/src/plugins/multimedia/darwin/camera/avfmediaencoder.mm b/src/plugins/multimedia/darwin/camera/avfmediaencoder.mm
new file mode 100644
index 000000000..3fbc57995
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfmediaencoder.mm
@@ -0,0 +1,664 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+
+#include "avfmediaencoder_p.h"
+#include "avfcamerarenderer_p.h"
+#include "avfcamerasession_p.h"
+#include "avfcamera_p.h"
+#include "avfcameraservice_p.h"
+#include "avfcameradebug_p.h"
+#include "avfcamerautility_p.h"
+#include "qaudiodevice.h"
+
+#include "qmediadevices.h"
+#include "private/qmediastoragelocation_p.h"
+#include "private/qmediarecorder_p.h"
+#include "qdarwinformatsinfo_p.h"
+#include "private/qplatformaudiooutput_p.h"
+#include <private/qplatformaudioinput_p.h>
+
+#include <QtCore/qmath.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qmimetype.h>
+
+#include <private/qcoreaudioutils_p.h>
+
+QT_USE_NAMESPACE
+
+namespace {
+
+bool qt_is_writable_file_URL(NSURL *fileURL)
+{
+ Q_ASSERT(fileURL);
+
+ if (![fileURL isFileURL])
+ return false;
+
+ if (NSString *path = [[fileURL path] stringByExpandingTildeInPath]) {
+ return [[NSFileManager defaultManager]
+ isWritableFileAtPath:[path stringByDeletingLastPathComponent]];
+ }
+
+ return false;
+}
+
+bool qt_file_exists(NSURL *fileURL)
+{
+ Q_ASSERT(fileURL);
+
+ if (NSString *path = [[fileURL path] stringByExpandingTildeInPath])
+ return [[NSFileManager defaultManager] fileExistsAtPath:path];
+
+ return false;
+}
+
+}
+
+AVFMediaEncoder::AVFMediaEncoder(QMediaRecorder *parent)
+ : QObject(parent)
+ , QPlatformMediaRecorder(parent)
+ , m_state(QMediaRecorder::StoppedState)
+ , m_duration(0)
+ , m_audioSettings(nil)
+ , m_videoSettings(nil)
+ //, m_restoreFPS(-1, -1)
+{
+ m_writer.reset([[QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) alloc] initWithDelegate:this]);
+ if (!m_writer) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create an asset writer";
+ return;
+ }
+}
+
+AVFMediaEncoder::~AVFMediaEncoder()
+{
+ [m_writer abort];
+
+ if (m_audioSettings)
+ [m_audioSettings release];
+ if (m_videoSettings)
+ [m_videoSettings release];
+}
+
+bool AVFMediaEncoder::isLocationWritable(const QUrl &location) const
+{
+ return location.scheme() == QLatin1String("file") || location.scheme().isEmpty();
+}
+
+QMediaRecorder::RecorderState AVFMediaEncoder::state() const
+{
+ return m_state;
+}
+
+qint64 AVFMediaEncoder::duration() const
+{
+ return m_duration;
+}
+
+void AVFMediaEncoder::updateDuration(qint64 duration)
+{
+ m_duration = duration;
+ durationChanged(m_duration);
+}
+
+static NSDictionary *avfAudioSettings(const QMediaEncoderSettings &encoderSettings, const QAudioFormat &format)
+{
+ NSMutableDictionary *settings = [NSMutableDictionary dictionary];
+
+ // Codec
+ int codecId = QDarwinFormatInfo::audioFormatForCodec(encoderSettings.mediaFormat().audioCodec());
+ [settings setObject:[NSNumber numberWithInt:codecId] forKey:AVFormatIDKey];
+
+ // Setting AVEncoderQualityKey is not allowed when format ID is alac or lpcm
+ if (codecId != kAudioFormatAppleLossless && codecId != kAudioFormatLinearPCM
+ && encoderSettings.encodingMode() == QMediaRecorder::ConstantQualityEncoding) {
+ // AudioQuality
+ int quality;
+ switch (encoderSettings.quality()) {
+ case QMediaRecorder::VeryLowQuality:
+ quality = AVAudioQualityMin;
+ break;
+ case QMediaRecorder::LowQuality:
+ quality = AVAudioQualityLow;
+ break;
+ case QMediaRecorder::HighQuality:
+ quality = AVAudioQualityHigh;
+ break;
+ case QMediaRecorder::VeryHighQuality:
+ quality = AVAudioQualityMax;
+ break;
+ case QMediaRecorder::NormalQuality:
+ default:
+ quality = AVAudioQualityMedium;
+ break;
+ }
+ [settings setObject:[NSNumber numberWithInt:quality] forKey:AVEncoderAudioQualityKey];
+ } else {
+ // BitRate
+ bool isBitRateSupported = false;
+ int bitRate = encoderSettings.audioBitRate();
+ if (bitRate > 0) {
+ QList<AudioValueRange> bitRates = qt_supported_bit_rates_for_format(codecId);
+ for (int i = 0; i < bitRates.count(); i++) {
+ if (bitRate >= bitRates[i].mMinimum &&
+ bitRate <= bitRates[i].mMaximum) {
+ isBitRateSupported = true;
+ break;
+ }
+ }
+ if (isBitRateSupported)
+ [settings setObject:[NSNumber numberWithInt:encoderSettings.audioBitRate()]
+ forKey:AVEncoderBitRateKey];
+ }
+ }
+
+ // SampleRate
+ int sampleRate = encoderSettings.audioSampleRate();
+ bool isSampleRateSupported = false;
+ if (sampleRate >= 8000 && sampleRate <= 192000) {
+ QList<AudioValueRange> sampleRates = qt_supported_sample_rates_for_format(codecId);
+ for (int i = 0; i < sampleRates.count(); i++) {
+ if (sampleRate >= sampleRates[i].mMinimum && sampleRate <= sampleRates[i].mMaximum) {
+ isSampleRateSupported = true;
+ break;
+ }
+ }
+ }
+ if (!isSampleRateSupported)
+ sampleRate = 44100;
+ [settings setObject:[NSNumber numberWithInt:sampleRate] forKey:AVSampleRateKey];
+
+ // Channels
+ int channelCount = encoderSettings.audioChannelCount();
+ bool isChannelCountSupported = false;
+ if (channelCount > 0) {
+ std::optional<QList<UInt32>> channelCounts = qt_supported_channel_counts_for_format(codecId);
+ // An std::nullopt result indicates that
+ // any number of channels can be encoded.
+ if (channelCounts == std::nullopt) {
+ isChannelCountSupported = true;
+ } else {
+ for (int i = 0; i < channelCounts.value().count(); i++) {
+ if ((UInt32)channelCount == channelCounts.value()[i]) {
+ isChannelCountSupported = true;
+ break;
+ }
+ }
+ }
+
+ // if channel count is provided and it's bigger than 2
+ // provide a supported channel layout
+ if (isChannelCountSupported && channelCount > 2) {
+ AudioChannelLayout channelLayout;
+ memset(&channelLayout, 0, sizeof(AudioChannelLayout));
+ auto channelLayoutTags = qt_supported_channel_layout_tags_for_format(codecId, channelCount);
+ if (channelLayoutTags.size()) {
+ channelLayout.mChannelLayoutTag = channelLayoutTags.first();
+ [settings setObject:[NSData dataWithBytes: &channelLayout length: sizeof(channelLayout)] forKey:AVChannelLayoutKey];
+ } else {
+ isChannelCountSupported = false;
+ }
+ }
+
+ if (isChannelCountSupported)
+ [settings setObject:[NSNumber numberWithInt:channelCount] forKey:AVNumberOfChannelsKey];
+ }
+
+ if (!isChannelCountSupported) {
+ // fallback to providing channel layout if channel count is not specified or supported
+ UInt32 size = 0;
+ if (format.isValid()) {
+ auto layout = CoreAudioUtils::toAudioChannelLayout(format, &size);
+ [settings setObject:[NSData dataWithBytes:layout.get() length:sizeof(AudioChannelLayout)] forKey:AVChannelLayoutKey];
+ } else {
+ // finally default to setting channel count to 1
+ [settings setObject:[NSNumber numberWithInt:1] forKey:AVNumberOfChannelsKey];
+ }
+ }
+
+ if (codecId == kAudioFormatAppleLossless)
+ [settings setObject:[NSNumber numberWithInt:24] forKey:AVEncoderBitDepthHintKey];
+
+ if (codecId == kAudioFormatLinearPCM) {
+ [settings setObject:[NSNumber numberWithInt:16] forKey:AVLinearPCMBitDepthKey];
+ [settings setObject:[NSNumber numberWithInt:NO] forKey:AVLinearPCMIsBigEndianKey];
+ [settings setObject:[NSNumber numberWithInt:NO] forKey:AVLinearPCMIsFloatKey];
+ [settings setObject:[NSNumber numberWithInt:NO] forKey:AVLinearPCMIsNonInterleaved];
+ }
+
+ return settings;
+}
+
+NSDictionary *avfVideoSettings(QMediaEncoderSettings &encoderSettings, AVCaptureDevice *device, AVCaptureConnection *connection, QSize nativeSize)
+{
+ if (!device)
+ return nil;
+
+
+ // ### re-add needFpsChange
+// AVFPSRange currentFps = qt_current_framerates(device, connection);
+
+ NSMutableDictionary *videoSettings = [NSMutableDictionary dictionary];
+
+ // -- Codec
+
+ // AVVideoCodecKey is the only mandatory key
+ auto codec = encoderSettings.mediaFormat().videoCodec();
+ NSString *c = QDarwinFormatInfo::videoFormatForCodec(codec);
+ [videoSettings setObject:c forKey:AVVideoCodecKey];
+ [c release];
+
+ // -- Resolution
+
+ int w = encoderSettings.videoResolution().width();
+ int h = encoderSettings.videoResolution().height();
+
+ if (AVCaptureDeviceFormat *currentFormat = device.activeFormat) {
+ CMFormatDescriptionRef formatDesc = currentFormat.formatDescription;
+ CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(formatDesc);
+ FourCharCode formatCodec = CMVideoFormatDescriptionGetCodecType(formatDesc);
+
+ // We have to change the device's activeFormat in 3 cases:
+ // - the requested recording resolution is higher than the current device resolution
+ // - the requested recording resolution has a different aspect ratio than the current device aspect ratio
+ // - the requested frame rate is not available for the current device format
+ AVCaptureDeviceFormat *newFormat = nil;
+ if ((w <= 0 || h <= 0)
+ && encoderSettings.videoFrameRate() > 0
+ && !qt_format_supports_framerate(currentFormat, encoderSettings.videoFrameRate())) {
+
+ newFormat = qt_find_best_framerate_match(device,
+ formatCodec,
+ encoderSettings.videoFrameRate());
+
+ } else if (w > 0 && h > 0) {
+ AVCaptureDeviceFormat *f = qt_find_best_resolution_match(device,
+ encoderSettings.videoResolution(),
+ formatCodec);
+
+ if (f) {
+ CMVideoDimensions d = CMVideoFormatDescriptionGetDimensions(f.formatDescription);
+ qreal fAspectRatio = qreal(d.width) / d.height;
+
+ if (w > dim.width || h > dim.height
+ || qAbs((qreal(dim.width) / dim.height) - fAspectRatio) > 0.01) {
+ newFormat = f;
+ }
+ }
+ }
+
+ if (qt_set_active_format(device, newFormat, false /*### !needFpsChange*/)) {
+ formatDesc = newFormat.formatDescription;
+ dim = CMVideoFormatDescriptionGetDimensions(formatDesc);
+ }
+
+ if (w < 0 || h < 0) {
+ w = dim.width;
+ h = dim.height;
+ }
+
+
+ if (w > 0 && h > 0) {
+ // Make sure the recording resolution has the same aspect ratio as the device's
+ // current resolution
+ qreal deviceAspectRatio = qreal(dim.width) / dim.height;
+ qreal recAspectRatio = qreal(w) / h;
+ if (qAbs(deviceAspectRatio - recAspectRatio) > 0.01) {
+ if (recAspectRatio > deviceAspectRatio)
+ w = qRound(h * deviceAspectRatio);
+ else
+ h = qRound(w / deviceAspectRatio);
+ }
+
+ // recording resolution can't be higher than the device's active resolution
+ w = qMin(w, dim.width);
+ h = qMin(h, dim.height);
+ }
+ }
+
+ if (w > 0 && h > 0) {
+ // Width and height must be divisible by 2
+ w += w & 1;
+ h += h & 1;
+
+ bool isPortrait = nativeSize.width() < nativeSize.height();
+ // Make sure the video has the right aspect ratio
+ if (isPortrait && h < w)
+ qSwap(w, h);
+ else if (!isPortrait && w < h)
+ qSwap(w, h);
+
+ encoderSettings.setVideoResolution(QSize(w, h));
+ } else {
+ w = nativeSize.width();
+ h = nativeSize.height();
+ encoderSettings.setVideoResolution(nativeSize);
+ }
+ [videoSettings setObject:[NSNumber numberWithInt:w] forKey:AVVideoWidthKey];
+ [videoSettings setObject:[NSNumber numberWithInt:h] forKey:AVVideoHeightKey];
+
+ // -- FPS
+
+ if (true /*needFpsChange*/) {
+ const qreal fps = encoderSettings.videoFrameRate();
+ qt_set_framerate_limits(device, connection, fps, fps);
+ }
+ encoderSettings.setVideoFrameRate(qt_current_framerates(device, connection).second);
+
+ // -- Codec Settings
+
+ NSMutableDictionary *codecProperties = [NSMutableDictionary dictionary];
+ int bitrate = -1;
+ float quality = -1.f;
+
+ if (encoderSettings.encodingMode() == QMediaRecorder::ConstantQualityEncoding) {
+ if (encoderSettings.quality() != QMediaRecorder::NormalQuality) {
+ if (codec != QMediaFormat::VideoCodec::MotionJPEG) {
+ qWarning("ConstantQualityEncoding is not supported for MotionJPEG");
+ } else {
+ switch (encoderSettings.quality()) {
+ case QMediaRecorder::VeryLowQuality:
+ quality = 0.f;
+ break;
+ case QMediaRecorder::LowQuality:
+ quality = 0.25f;
+ break;
+ case QMediaRecorder::HighQuality:
+ quality = 0.75f;
+ break;
+ case QMediaRecorder::VeryHighQuality:
+ quality = 1.f;
+ break;
+ default:
+ quality = -1.f; // NormalQuality, let the system decide
+ break;
+ }
+ }
+ }
+ } else if (encoderSettings.encodingMode() == QMediaRecorder::AverageBitRateEncoding){
+ if (codec != QMediaFormat::VideoCodec::H264 && codec != QMediaFormat::VideoCodec::H265)
+ qWarning() << "AverageBitRateEncoding is not supported for codec" << QMediaFormat::videoCodecName(codec);
+ else
+ bitrate = encoderSettings.videoBitRate();
+ } else {
+ qWarning("Encoding mode is not supported");
+ }
+
+ if (bitrate != -1)
+ [codecProperties setObject:[NSNumber numberWithInt:bitrate] forKey:AVVideoAverageBitRateKey];
+ if (quality != -1.f)
+ [codecProperties setObject:[NSNumber numberWithFloat:quality] forKey:AVVideoQualityKey];
+
+ [videoSettings setObject:codecProperties forKey:AVVideoCompressionPropertiesKey];
+
+ return videoSettings;
+}
+
+void AVFMediaEncoder::applySettings(QMediaEncoderSettings &settings)
+{
+ unapplySettings();
+
+ AVFCameraSession *session = m_service->session();
+
+ // audio settings
+ const auto audioInput = m_service->audioInput();
+ const QAudioFormat audioFormat = audioInput ? audioInput->device.preferredFormat() : QAudioFormat();
+ m_audioSettings = avfAudioSettings(settings, audioFormat);
+ if (m_audioSettings)
+ [m_audioSettings retain];
+
+ // video settings
+ AVCaptureDevice *device = session->videoCaptureDevice();
+ if (!device)
+ return;
+ const AVFConfigurationLock lock(device); // prevents activeFormat from being overridden
+ AVCaptureConnection *conn = [session->videoOutput()->videoDataOutput() connectionWithMediaType:AVMediaTypeVideo];
+ auto nativeSize = session->videoOutput()->nativeSize();
+ m_videoSettings = avfVideoSettings(settings, device, conn, nativeSize);
+ if (m_videoSettings)
+ [m_videoSettings retain];
+}
+
+void AVFMediaEncoder::unapplySettings()
+{
+ if (m_audioSettings) {
+ [m_audioSettings release];
+ m_audioSettings = nil;
+ }
+ if (m_videoSettings) {
+ [m_videoSettings release];
+ m_videoSettings = nil;
+ }
+}
+
+void AVFMediaEncoder::setMetaData(const QMediaMetaData &metaData)
+{
+ m_metaData = metaData;
+}
+
+QMediaMetaData AVFMediaEncoder::metaData() const
+{
+ return m_metaData;
+}
+
+void AVFMediaEncoder::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ AVFCameraService *captureSession = static_cast<AVFCameraService *>(session);
+ if (m_service == captureSession)
+ return;
+
+ if (m_service)
+ stop();
+
+ m_service = captureSession;
+ if (!m_service)
+ return;
+
+ connect(m_service, &AVFCameraService::cameraChanged, this, &AVFMediaEncoder::onCameraChanged);
+ onCameraChanged();
+}
+
+void AVFMediaEncoder::record(QMediaEncoderSettings &settings)
+{
+ if (!m_service || !m_service->session()) {
+ qWarning() << Q_FUNC_INFO << "Encoder is not set to a capture session";
+ return;
+ }
+
+ if (!m_writer) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "Invalid recorder";
+ return;
+ }
+
+ if (QMediaRecorder::RecordingState == m_state)
+ return;
+
+ AVFCamera *cameraControl = m_service->avfCameraControl();
+ auto audioInput = m_service->audioInput();
+
+ if (!cameraControl && !audioInput) {
+ qWarning() << Q_FUNC_INFO << "Cannot record without any inputs";
+ updateError(QMediaRecorder::ResourceError, tr("No inputs specified"));
+ return;
+ }
+
+ m_service->session()->setActive(true);
+ const bool audioOnly = settings.videoCodec() == QMediaFormat::VideoCodec::Unspecified;
+ AVCaptureSession *session = m_service->session()->captureSession();
+ float rotation = 0;
+
+ if (!audioOnly) {
+ if (!cameraControl || !cameraControl->isActive()) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "can not start record while camera is not active";
+ updateError(QMediaRecorder::ResourceError,
+ QMediaRecorderPrivate::msgFailedStartRecording());
+ return;
+ }
+ }
+
+ const QString path(outputLocation().scheme() == QLatin1String("file") ?
+ outputLocation().path() : outputLocation().toString());
+ const QUrl fileURL(QUrl::fromLocalFile(QMediaStorageLocation::generateFileName(path,
+ audioOnly ? QStandardPaths::MusicLocation : QStandardPaths::MoviesLocation,
+ settings.mimeType().preferredSuffix())));
+
+ NSURL *nsFileURL = fileURL.toNSURL();
+ if (!nsFileURL) {
+ qWarning() << Q_FUNC_INFO << "invalid output URL:" << fileURL;
+ updateError(QMediaRecorder::ResourceError, tr("Invalid output file URL"));
+ return;
+ }
+ if (!qt_is_writable_file_URL(nsFileURL)) {
+ qWarning() << Q_FUNC_INFO << "invalid output URL:" << fileURL
+ << "(the location is not writable)";
+ updateError(QMediaRecorder::ResourceError, tr("Non-writeable file location"));
+ return;
+ }
+ if (qt_file_exists(nsFileURL)) {
+ // We test for/handle this error here since AWAssetWriter will raise an
+ // Objective-C exception, which is not good at all.
+ qWarning() << Q_FUNC_INFO << "invalid output URL:" << fileURL
+ << "(file already exists)";
+ updateError(QMediaRecorder::ResourceError, tr("File already exists"));
+ return;
+ }
+
+ applySettings(settings);
+
+ QVideoOutputOrientationHandler::setIsRecording(true);
+
+ // We stop session now so that no more frames for renderer's queue
+ // generated, will restart in assetWriterStarted.
+ [session stopRunning];
+
+ if ([m_writer setupWithFileURL:nsFileURL
+ cameraService:m_service
+ audioSettings:m_audioSettings
+ videoSettings:m_videoSettings
+ fileFormat:settings.fileFormat()
+ transform:CGAffineTransformMakeRotation(qDegreesToRadians(rotation))]) {
+
+ m_state = QMediaRecorder::RecordingState;
+
+ Q_EMIT actualLocationChanged(fileURL);
+ Q_EMIT stateChanged(m_state);
+
+ // Apple recommends to call startRunning and do all
+ // setup on a special queue, and that's what we had
+ // initially (dispatch_async to writerQueue). Unfortunately,
+ // writer's queue is not the only queue/thread that can
+ // access/modify the session, and as a result we have
+ // all possible data/race-conditions with Obj-C exceptions
+ // at best and something worse in general.
+ // Now we try to only modify session on the same thread.
+ [m_writer start];
+ } else {
+ [session startRunning];
+ updateError(QMediaRecorder::FormatError, QMediaRecorderPrivate::msgFailedStartRecording());
+ }
+}
+
+void AVFMediaEncoder::pause()
+{
+ if (!m_service || !m_service->session() || state() != QMediaRecorder::RecordingState)
+ return;
+
+ toggleRecord(false);
+ m_state = QMediaRecorder::PausedState;
+ stateChanged(m_state);
+}
+
+void AVFMediaEncoder::resume()
+{
+ if (!m_service || !m_service->session() || state() != QMediaRecorder::PausedState)
+ return;
+
+ toggleRecord(true);
+ m_state = QMediaRecorder::RecordingState;
+ stateChanged(m_state);
+}
+
+void AVFMediaEncoder::stop()
+{
+ if (m_state != QMediaRecorder::StoppedState) {
+ // Do not check the camera status, we can stop if we started.
+ stopWriter();
+ }
+ QVideoOutputOrientationHandler::setIsRecording(false);
+}
+
+
+void AVFMediaEncoder::toggleRecord(bool enable)
+{
+ if (!m_service || !m_service->session())
+ return;
+
+ if (!enable)
+ [m_writer pause];
+ else
+ [m_writer resume];
+}
+
+void AVFMediaEncoder::assetWriterStarted()
+{
+}
+
+void AVFMediaEncoder::assetWriterFinished()
+{
+
+ const QMediaRecorder::RecorderState lastState = m_state;
+
+ unapplySettings();
+
+ if (m_service) {
+ AVFCameraSession *session = m_service->session();
+
+ if (session->videoOutput()) {
+ session->videoOutput()->resetCaptureDelegate();
+ }
+ if (session->audioPreviewDelegate()) {
+ [session->audioPreviewDelegate() resetAudioPreviewDelegate];
+ }
+ if (session->videoOutput() || session->audioPreviewDelegate())
+ [session->captureSession() startRunning];
+ }
+
+ m_state = QMediaRecorder::StoppedState;
+ if (m_state != lastState)
+ Q_EMIT stateChanged(m_state);
+}
+
+void AVFMediaEncoder::assetWriterError(QString err)
+{
+ updateError(QMediaRecorder::FormatError, err);
+ if (m_state != QMediaRecorder::StoppedState)
+ stopWriter();
+}
+
+void AVFMediaEncoder::onCameraChanged()
+{
+ if (m_service && m_service->avfCameraControl()) {
+ AVFCamera *cameraControl = m_service->avfCameraControl();
+ connect(cameraControl, SIGNAL(activeChanged(bool)),
+ SLOT(cameraActiveChanged(bool)));
+ }
+}
+
+void AVFMediaEncoder::cameraActiveChanged(bool active)
+{
+ Q_ASSERT(m_service);
+ AVFCamera *cameraControl = m_service->avfCameraControl();
+ Q_ASSERT(cameraControl);
+
+ if (!active) {
+ return stopWriter();
+ }
+}
+
+void AVFMediaEncoder::stopWriter()
+{
+ [m_writer stop];
+}
+
+#include "moc_avfmediaencoder_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/avfmediaencoder_p.h b/src/plugins/multimedia/darwin/camera/avfmediaencoder_p.h
new file mode 100644
index 000000000..23aced325
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/avfmediaencoder_p.h
@@ -0,0 +1,96 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef AVFMEDIAENCODER_H
+#define AVFMEDIAENCODER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include "avfmediaassetwriter_p.h"
+#include "avfcamerautility_p.h"
+#include "qaudiodevice.h"
+
+#include <private/qplatformmediarecorder_p.h>
+#include <private/qplatformmediacapture_p.h>
+#include <QtMultimedia/qmediametadata.h>
+
+#include <QtCore/qglobal.h>
+#include <QtCore/qurl.h>
+
+#include <AVFoundation/AVFoundation.h>
+
+QT_BEGIN_NAMESPACE
+
+class AVFCameraService;
+class QString;
+class QUrl;
+
+class AVFMediaEncoder : public QObject, public QPlatformMediaRecorder
+{
+ Q_OBJECT
+public:
+ AVFMediaEncoder(QMediaRecorder *parent);
+ ~AVFMediaEncoder() override;
+
+ bool isLocationWritable(const QUrl &location) const override;
+
+ QMediaRecorder::RecorderState state() const override;
+
+ qint64 duration() const override;
+
+ void record(QMediaEncoderSettings &settings) override;
+ void pause() override;
+ void resume() override;
+ void stop() override;
+
+ void setMetaData(const QMediaMetaData &) override;
+ QMediaMetaData metaData() const override;
+
+ AVFCameraService *cameraService() const { return m_service; }
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+ void updateDuration(qint64 duration);
+
+ void toggleRecord(bool enable);
+
+private:
+ void applySettings(QMediaEncoderSettings &settings);
+ void unapplySettings();
+
+ Q_INVOKABLE void assetWriterStarted();
+ Q_INVOKABLE void assetWriterFinished();
+ Q_INVOKABLE void assetWriterError(QString error);
+
+private Q_SLOTS:
+ void onCameraChanged();
+ void cameraActiveChanged(bool);
+
+private:
+ void stopWriter();
+
+ AVFCameraService *m_service = nullptr;
+ AVFScopedPointer<QT_MANGLE_NAMESPACE(AVFMediaAssetWriter)> m_writer;
+
+ QMediaRecorder::RecorderState m_state;
+
+ QMediaMetaData m_metaData;
+
+ qint64 m_duration;
+
+ NSDictionary *m_audioSettings;
+ NSDictionary *m_videoSettings;
+};
+
+QT_END_NAMESPACE
+
+#endif // AVFMEDIAENCODER_H
diff --git a/src/plugins/multimedia/darwin/camera/qavfcamerabase.mm b/src/plugins/multimedia/darwin/camera/qavfcamerabase.mm
new file mode 100644
index 000000000..11dfa99a8
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/qavfcamerabase.mm
@@ -0,0 +1,1080 @@
+// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "avfcameradebug_p.h"
+#include "qavfcamerabase_p.h"
+#include "avfcamerautility_p.h"
+#include <private/qcameradevice_p.h>
+#include "qavfhelpers_p.h"
+#include <private/qplatformmediaintegration_p.h>
+#include <QtCore/qset.h>
+#include <QtCore/qsystemdetection.h>
+
+QT_USE_NAMESPACE
+
+namespace {
+
+// All these methods to work with exposure/ISO/SS in custom mode do not support macOS.
+
+#ifdef Q_OS_IOS
+
+// Misc. helpers to check values/ranges:
+
+bool qt_check_exposure_duration(AVCaptureDevice *captureDevice, CMTime duration)
+{
+ Q_ASSERT(captureDevice);
+
+ AVCaptureDeviceFormat *activeFormat = captureDevice.activeFormat;
+ if (!activeFormat) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to obtain capture device format";
+ return false;
+ }
+
+ return CMTimeCompare(duration, activeFormat.minExposureDuration) != -1
+ && CMTimeCompare(activeFormat.maxExposureDuration, duration) != -1;
+}
+
+bool qt_check_ISO_value(AVCaptureDevice *captureDevice, int newISO)
+{
+ Q_ASSERT(captureDevice);
+
+ AVCaptureDeviceFormat *activeFormat = captureDevice.activeFormat;
+ if (!activeFormat) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to obtain capture device format";
+ return false;
+ }
+
+ return !(newISO < activeFormat.minISO || newISO > activeFormat.maxISO);
+}
+
+bool qt_exposure_duration_equal(AVCaptureDevice *captureDevice, qreal qDuration)
+{
+ Q_ASSERT(captureDevice);
+ const CMTime avDuration = CMTimeMakeWithSeconds(qDuration, captureDevice.exposureDuration.timescale);
+ return !CMTimeCompare(avDuration, captureDevice.exposureDuration);
+}
+
+bool qt_iso_equal(AVCaptureDevice *captureDevice, int iso)
+{
+ Q_ASSERT(captureDevice);
+ return qFuzzyCompare(float(iso), captureDevice.ISO);
+}
+
+bool qt_exposure_bias_equal(AVCaptureDevice *captureDevice, qreal bias)
+{
+ Q_ASSERT(captureDevice);
+ return qFuzzyCompare(bias, qreal(captureDevice.exposureTargetBias));
+}
+
+// Converters:
+
+bool qt_convert_exposure_mode(AVCaptureDevice *captureDevice, QCamera::ExposureMode mode,
+ AVCaptureExposureMode &avMode)
+{
+ // Test if mode supported and convert.
+ Q_ASSERT(captureDevice);
+
+ if (mode == QCamera::ExposureAuto) {
+ if ([captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
+ avMode = AVCaptureExposureModeContinuousAutoExposure;
+ return true;
+ }
+ }
+
+ if (mode == QCamera::ExposureManual) {
+ if ([captureDevice isExposureModeSupported:AVCaptureExposureModeCustom]) {
+ avMode = AVCaptureExposureModeCustom;
+ return true;
+ }
+ }
+
+ return false;
+}
+
+#endif // defined(Q_OS_IOS)
+
+} // Unnamed namespace.
+
+
+QAVFVideoDevices::QAVFVideoDevices(QPlatformMediaIntegration *integration)
+ : QPlatformVideoDevices(integration)
+{
+ NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter];
+ m_deviceConnectedObserver = [notificationCenter addObserverForName:AVCaptureDeviceWasConnectedNotification
+ object:nil
+ queue:[NSOperationQueue mainQueue]
+ usingBlock:^(NSNotification *) {
+ this->updateCameraDevices();
+ }];
+
+ m_deviceDisconnectedObserver = [notificationCenter addObserverForName:AVCaptureDeviceWasDisconnectedNotification
+ object:nil
+ queue:[NSOperationQueue mainQueue]
+ usingBlock:^(NSNotification *) {
+ this->updateCameraDevices();
+ }];
+ updateCameraDevices();
+}
+
+QAVFVideoDevices::~QAVFVideoDevices()
+{
+ NSNotificationCenter* notificationCenter = [NSNotificationCenter defaultCenter];
+ [notificationCenter removeObserver:(id)m_deviceConnectedObserver];
+ [notificationCenter removeObserver:(id)m_deviceDisconnectedObserver];
+}
+
+QList<QCameraDevice> QAVFVideoDevices::videoDevices() const
+{
+ return m_cameraDevices;
+}
+
+void QAVFVideoDevices::updateCameraDevices()
+{
+#ifdef Q_OS_IOS
+ // Cameras can't change dynamically on iOS. Update only once.
+ if (!m_cameraDevices.isEmpty())
+ return;
+#endif
+
+ QList<QCameraDevice> cameras;
+
+ // List of all capture device types that we want to discover. Seems that this is the
+ // only way to discover all types. This filter is mandatory and has no "unspecified"
+ // option like AVCaptureDevicePosition(Unspecified) has. Order of the list is important
+ // because discovered devices will be in the same order and we want the first one found
+ // to be our default device.
+ NSArray *discoveryDevices = @[
+#ifdef Q_OS_IOS
+ AVCaptureDeviceTypeBuiltInTripleCamera, // We always prefer triple camera.
+ AVCaptureDeviceTypeBuiltInDualCamera, // If triple is not available, we prefer
+ // dual with wide + tele lens.
+ AVCaptureDeviceTypeBuiltInDualWideCamera, // Dual with wide and ultrawide is still
+ // better than single.
+#endif
+ AVCaptureDeviceTypeBuiltInWideAngleCamera, // This is the most common single camera type.
+ // We prefer that over tele and ultra-wide.
+#ifdef Q_OS_IOS
+ AVCaptureDeviceTypeBuiltInTelephotoCamera, // Cannot imagine how, but if only tele and
+ // ultrawide are available, we prefer tele.
+ AVCaptureDeviceTypeBuiltInUltraWideCamera,
+#endif
+ ];
+
+#if QT_DARWIN_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_14_0, __IPHONE_17_0, __TVOS_NA, __WATCHOS_NA)
+ if (@available(macOS 14, iOS 17, *)) {
+ discoveryDevices = [discoveryDevices arrayByAddingObjectsFromArray: @[
+ AVCaptureDeviceTypeExternal,
+ AVCaptureDeviceTypeContinuityCamera
+ ]];
+ } else
+#endif
+ {
+#ifdef Q_OS_MACOS
+ QT_WARNING_PUSH
+ QT_WARNING_DISABLE_DEPRECATED
+ discoveryDevices = [discoveryDevices arrayByAddingObjectsFromArray: @[
+ AVCaptureDeviceTypeExternalUnknown
+ ]];
+ QT_WARNING_POP
+#endif
+ }
+ // Create discovery session to discover all possible camera types of the system.
+ // Both "hard" and "soft" types.
+ AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
+ discoverySessionWithDeviceTypes:discoveryDevices
+ mediaType:AVMediaTypeVideo
+ position:AVCaptureDevicePositionUnspecified];
+ NSArray<AVCaptureDevice *> *videoDevices = discoverySession.devices;
+
+ for (AVCaptureDevice *device in videoDevices) {
+ auto info = std::make_unique<QCameraDevicePrivate>();
+ if ([videoDevices[0].uniqueID isEqualToString:device.uniqueID])
+ info->isDefault = true;
+ info->id = QByteArray([[device uniqueID] UTF8String]);
+ info->description = QString::fromNSString([device localizedName]);
+
+ qCDebug(qLcCamera) << "Handling camera info" << info->description
+ << (info->isDefault ? "(default)" : "");
+
+ QSet<QSize> photoResolutions;
+ QList<QCameraFormat> videoFormats;
+
+ for (AVCaptureDeviceFormat *format in device.formats) {
+ if (![format.mediaType isEqualToString:AVMediaTypeVideo])
+ continue;
+
+ auto dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ QSize resolution(dimensions.width, dimensions.height);
+ photoResolutions.insert(resolution);
+
+ float maxFrameRate = 0;
+ float minFrameRate = 1.e6;
+
+ auto encoding = CMVideoFormatDescriptionGetCodecType(format.formatDescription);
+ auto pixelFormat = QAVFHelpers::fromCVPixelFormat(encoding);
+ auto colorRange = QAVFHelpers::colorRangeForCVPixelFormat(encoding);
+ // Ignore pixel formats we can't handle
+ if (pixelFormat == QVideoFrameFormat::Format_Invalid) {
+ qCDebug(qLcCamera) << "ignore camera CV format" << encoding
+ << "as no matching video format found";
+ continue;
+ }
+
+ for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRanges) {
+ if (frameRateRange.minFrameRate < minFrameRate)
+ minFrameRate = frameRateRange.minFrameRate;
+ if (frameRateRange.maxFrameRate > maxFrameRate)
+ maxFrameRate = frameRateRange.maxFrameRate;
+ }
+
+#ifdef Q_OS_IOS
+ // From Apple's docs (iOS):
+ // By default, AVCaptureStillImageOutput emits images with the same dimensions as
+ // its source AVCaptureDevice instance’s activeFormat.formatDescription. However,
+ // if you set this property to YES, the receiver emits still images at the capture
+ // device’s highResolutionStillImageDimensions value.
+ const QSize hrRes(qt_device_format_high_resolution(format));
+ if (!hrRes.isNull() && hrRes.isValid())
+ photoResolutions.insert(hrRes);
+#endif
+
+ qCDebug(qLcCamera) << "Add camera format. pixelFormat:" << pixelFormat
+ << "colorRange:" << colorRange << "cvPixelFormat" << encoding
+ << "resolution:" << resolution << "frameRate: [" << minFrameRate
+ << maxFrameRate << "]";
+
+ auto *f = new QCameraFormatPrivate{ QSharedData(), pixelFormat, resolution,
+ minFrameRate, maxFrameRate, colorRange };
+ videoFormats << f->create();
+ }
+ if (videoFormats.isEmpty()) {
+ // skip broken cameras without valid formats
+ qCWarning(qLcCamera())
+ << "Skip camera" << info->description << "without supported formats";
+ continue;
+ }
+ info->videoFormats = videoFormats;
+ info->photoResolutions = photoResolutions.values();
+
+ cameras.append(info.release()->create());
+ }
+
+ if (cameras != m_cameraDevices) {
+ m_cameraDevices = cameras;
+ emit videoInputsChanged();
+ }
+}
+
+
+QAVFCameraBase::QAVFCameraBase(QCamera *camera)
+ : QPlatformCamera(camera)
+{
+ Q_ASSERT(camera);
+}
+
+QAVFCameraBase::~QAVFCameraBase()
+{
+}
+
+bool QAVFCameraBase::isActive() const
+{
+ return m_active;
+}
+
+void QAVFCameraBase::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+ if (m_cameraDevice.isNull() && active)
+ return;
+
+ m_active = active;
+
+ if (active)
+ updateCameraConfiguration();
+ Q_EMIT activeChanged(m_active);
+}
+
+void QAVFCameraBase::setCamera(const QCameraDevice &camera)
+{
+ if (m_cameraDevice == camera)
+ return;
+ m_cameraDevice = camera;
+ setCameraFormat({});
+}
+
+bool QAVFCameraBase::setCameraFormat(const QCameraFormat &format)
+{
+ if (!format.isNull() && !m_cameraDevice.videoFormats().contains(format))
+ return false;
+
+ m_cameraFormat = format.isNull() ? findBestCameraFormat(m_cameraDevice) : format;
+
+ return true;
+}
+
+AVCaptureDevice *QAVFCameraBase::device() const
+{
+ AVCaptureDevice *device = nullptr;
+ QByteArray deviceId = m_cameraDevice.id();
+ if (!deviceId.isEmpty()) {
+ device = [AVCaptureDevice deviceWithUniqueID:
+ [NSString stringWithUTF8String:
+ deviceId.constData()]];
+ }
+ return device;
+}
+
+#ifdef Q_OS_IOS
+namespace
+{
+
+bool qt_focus_mode_supported(QCamera::FocusMode mode)
+{
+ // Check if QCamera::FocusMode has counterpart in AVFoundation.
+
+ // AVFoundation has 'Manual', 'Auto' and 'Continuous',
+ // where 'Manual' is actually 'Locked' + writable property 'lensPosition'.
+ return mode == QCamera::FocusModeAuto
+ || mode == QCamera::FocusModeManual;
+}
+
+AVCaptureFocusMode avf_focus_mode(QCamera::FocusMode requestedMode)
+{
+ switch (requestedMode) {
+ case QCamera::FocusModeHyperfocal:
+ case QCamera::FocusModeInfinity:
+ case QCamera::FocusModeManual:
+ return AVCaptureFocusModeLocked;
+ default:
+ return AVCaptureFocusModeContinuousAutoFocus;
+ }
+
+}
+
+}
+#endif
+
+void QAVFCameraBase::setFocusMode(QCamera::FocusMode mode)
+{
+#ifdef Q_OS_IOS
+ if (focusMode() == mode)
+ return;
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ if (qt_focus_mode_supported(mode)) {
+ focusModeChanged(mode);
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO
+ << "focus mode not supported";
+ }
+ return;
+ }
+
+ if (isFocusModeSupported(mode)) {
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO
+ << "failed to lock for configuration";
+ return;
+ }
+
+ captureDevice.focusMode = avf_focus_mode(mode);
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "focus mode not supported";
+ return;
+ }
+
+ Q_EMIT focusModeChanged(mode);
+#else
+ Q_UNUSED(mode);
+#endif
+}
+
+bool QAVFCameraBase::isFocusModeSupported(QCamera::FocusMode mode) const
+{
+#ifdef Q_OS_IOS
+ AVCaptureDevice *captureDevice = device();
+ if (captureDevice) {
+ AVCaptureFocusMode avMode = avf_focus_mode(mode);
+ switch (mode) {
+ case QCamera::FocusModeAuto:
+ case QCamera::FocusModeHyperfocal:
+ case QCamera::FocusModeInfinity:
+ case QCamera::FocusModeManual:
+ return [captureDevice isFocusModeSupported:avMode];
+ case QCamera::FocusModeAutoNear:
+ Q_FALLTHROUGH();
+ case QCamera::FocusModeAutoFar:
+ return captureDevice.autoFocusRangeRestrictionSupported
+ && [captureDevice isFocusModeSupported:avMode];
+ }
+ }
+#endif
+ return mode == QCamera::FocusModeAuto; // stupid builtin webcam doesn't do any focus handling, but hey it's usually focused :)
+}
+
+void QAVFCameraBase::setCustomFocusPoint(const QPointF &point)
+{
+ if (customFocusPoint() == point)
+ return;
+
+ if (!QRectF(0.f, 0.f, 1.f, 1.f).contains(point)) {
+ // ### release custom focus point, tell the camera to focus where it wants...
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid focus point (out of range)";
+ return;
+ }
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice)
+ return;
+
+ if ([captureDevice isFocusPointOfInterestSupported]) {
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ const CGPoint focusPOI = CGPointMake(point.x(), point.y());
+ [captureDevice setFocusPointOfInterest:focusPOI];
+ if (focusMode() != QCamera::FocusModeAuto)
+ [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
+
+ customFocusPointChanged(point);
+ }
+}
+
+void QAVFCameraBase::setFocusDistance(float d)
+{
+#ifdef Q_OS_IOS
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice)
+ return;
+
+ if (captureDevice.lockingFocusWithCustomLensPositionSupported) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "Setting custom focus distance not supported\n";
+ return;
+ }
+
+ {
+ AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+ [captureDevice setFocusModeLockedWithLensPosition:d completionHandler:nil];
+ }
+ focusDistanceChanged(d);
+#else
+ Q_UNUSED(d);
+#endif
+}
+
+void QAVFCameraBase::updateCameraConfiguration()
+{
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "capture device is nil in 'active' state";
+ return;
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ if ([captureDevice isFocusPointOfInterestSupported]) {
+ auto point = customFocusPoint();
+ const CGPoint focusPOI = CGPointMake(point.x(), point.y());
+ [captureDevice setFocusPointOfInterest:focusPOI];
+ }
+
+#ifdef Q_OS_IOS
+ if (focusMode() != QCamera::FocusModeAuto) {
+ const AVCaptureFocusMode avMode = avf_focus_mode(focusMode());
+ if (captureDevice.focusMode != avMode) {
+ if ([captureDevice isFocusModeSupported:avMode]) {
+ [captureDevice setFocusMode:avMode];
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "focus mode not supported";
+ }
+ }
+ }
+
+ if (!captureDevice.activeFormat) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "camera state is active, but active format is nil";
+ return;
+ }
+
+ minimumZoomFactorChanged(captureDevice.minAvailableVideoZoomFactor);
+ maximumZoomFactorChanged(captureDevice.activeFormat.videoMaxZoomFactor);
+
+ captureDevice.videoZoomFactor = zoomFactor();
+
+ CMTime newDuration = AVCaptureExposureDurationCurrent;
+ bool setCustomMode = false;
+
+ float exposureTime = manualExposureTime();
+ if (exposureTime > 0
+ && !qt_exposure_duration_equal(captureDevice, exposureTime)) {
+ newDuration = CMTimeMakeWithSeconds(exposureTime, captureDevice.exposureDuration.timescale);
+ if (!qt_check_exposure_duration(captureDevice, newDuration)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "requested exposure duration is out of range";
+ return;
+ }
+ setCustomMode = true;
+ }
+
+ float newISO = AVCaptureISOCurrent;
+ int iso = manualIsoSensitivity();
+ if (iso > 0 && !qt_iso_equal(captureDevice, iso)) {
+ newISO = iso;
+ if (!qt_check_ISO_value(captureDevice, newISO)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "requested ISO value is out of range";
+ return;
+ }
+ setCustomMode = true;
+ }
+
+ float bias = exposureCompensation();
+ if (bias != 0 && !qt_exposure_bias_equal(captureDevice, bias)) {
+ // TODO: mixed fpns.
+ if (bias < captureDevice.minExposureTargetBias || bias > captureDevice.maxExposureTargetBias) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "exposure compensation value is"
+ << "out of range";
+ return;
+ }
+ [captureDevice setExposureTargetBias:bias completionHandler:nil];
+ }
+
+ // Setting shutter speed (exposure duration) or ISO values
+ // also reset exposure mode into Custom. With this settings
+ // we ignore any attempts to set exposure mode.
+
+ if (setCustomMode) {
+ [captureDevice setExposureModeCustomWithDuration:newDuration
+ ISO:newISO
+ completionHandler:nil];
+ return;
+ }
+
+ QCamera::ExposureMode qtMode = exposureMode();
+ AVCaptureExposureMode avMode = AVCaptureExposureModeContinuousAutoExposure;
+ if (!qt_convert_exposure_mode(captureDevice, qtMode, avMode)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "requested exposure mode is not supported";
+ return;
+ }
+
+ captureDevice.exposureMode = avMode;
+#endif
+
+ isFlashSupported = isFlashAutoSupported = false;
+ isTorchSupported = isTorchAutoSupported = false;
+
+ if (captureDevice.hasFlash) {
+ if ([captureDevice isFlashModeSupported:AVCaptureFlashModeOn])
+ isFlashSupported = true;
+ if ([captureDevice isFlashModeSupported:AVCaptureFlashModeAuto])
+ isFlashAutoSupported = true;
+ }
+
+ if (captureDevice.hasTorch) {
+ if ([captureDevice isTorchModeSupported:AVCaptureTorchModeOn])
+ isTorchSupported = true;
+ if ([captureDevice isTorchModeSupported:AVCaptureTorchModeAuto])
+ isTorchAutoSupported = true;
+ }
+
+ applyFlashSettings();
+ flashReadyChanged(isFlashSupported);
+}
+
+void QAVFCameraBase::updateCameraProperties()
+{
+ QCamera::Features features;
+ AVCaptureDevice *captureDevice = device();
+
+#ifdef Q_OS_IOS
+ features = QCamera::Feature::ColorTemperature | QCamera::Feature::ExposureCompensation |
+ QCamera::Feature::IsoSensitivity | QCamera::Feature::ManualExposureTime;
+
+ if (captureDevice && [captureDevice isLockingFocusWithCustomLensPositionSupported])
+ features |= QCamera::Feature::FocusDistance;
+#endif
+
+ if (captureDevice && [captureDevice isFocusPointOfInterestSupported])
+ features |= QCamera::Feature::CustomFocusPoint;
+
+ supportedFeaturesChanged(features);
+}
+
+void QAVFCameraBase::zoomTo(float factor, float rate)
+{
+ Q_UNUSED(factor);
+ Q_UNUSED(rate);
+
+#ifdef Q_OS_IOS
+ if (zoomFactor() == factor)
+ return;
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice || !captureDevice.activeFormat)
+ return;
+
+ factor = qBound(captureDevice.minAvailableVideoZoomFactor, factor,
+ captureDevice.activeFormat.videoMaxZoomFactor);
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ if (rate <= 0)
+ captureDevice.videoZoomFactor = factor;
+ else
+ [captureDevice rampToVideoZoomFactor:factor withRate:rate];
+#endif
+}
+
+void QAVFCameraBase::setFlashMode(QCamera::FlashMode mode)
+{
+ if (flashMode() == mode)
+ return;
+
+ if (isActive() && !isFlashModeSupported(mode)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "unsupported mode" << mode;
+ return;
+ }
+
+ flashModeChanged(mode);
+
+ if (!isActive())
+ return;
+
+ applyFlashSettings();
+}
+
+bool QAVFCameraBase::isFlashModeSupported(QCamera::FlashMode mode) const
+{
+ if (mode == QCamera::FlashOff)
+ return true;
+ else if (mode == QCamera::FlashOn)
+ return isFlashSupported;
+ else //if (mode == QCamera::FlashAuto)
+ return isFlashAutoSupported;
+}
+
+bool QAVFCameraBase::isFlashReady() const
+{
+ if (!isActive())
+ return false;
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice)
+ return false;
+
+ if (!captureDevice.hasFlash)
+ return false;
+
+ if (!isFlashModeSupported(flashMode()))
+ return false;
+
+ // AVCaptureDevice's docs:
+ // "The flash may become unavailable if, for example,
+ // the device overheats and needs to cool off."
+ return [captureDevice isFlashAvailable];
+}
+
+void QAVFCameraBase::setTorchMode(QCamera::TorchMode mode)
+{
+ if (torchMode() == mode)
+ return;
+
+ if (isActive() && !isTorchModeSupported(mode)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "unsupported torch mode" << mode;
+ return;
+ }
+
+ torchModeChanged(mode);
+
+ if (!isActive())
+ return;
+
+ applyFlashSettings();
+}
+
+bool QAVFCameraBase::isTorchModeSupported(QCamera::TorchMode mode) const
+{
+ if (mode == QCamera::TorchOff)
+ return true;
+ else if (mode == QCamera::TorchOn)
+ return isTorchSupported;
+ else //if (mode == QCamera::TorchAuto)
+ return isTorchAutoSupported;
+}
+
+void QAVFCameraBase::setExposureMode(QCamera::ExposureMode qtMode)
+{
+#ifdef Q_OS_IOS
+ if (qtMode != QCamera::ExposureAuto && qtMode != QCamera::ExposureManual) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "exposure mode not supported";
+ return;
+ }
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ exposureModeChanged(qtMode);
+ return;
+ }
+
+ AVCaptureExposureMode avMode = AVCaptureExposureModeContinuousAutoExposure;
+ if (!qt_convert_exposure_mode(captureDevice, qtMode, avMode)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "exposure mode not supported";
+ return;
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock a capture device"
+ << "for configuration";
+ return;
+ }
+
+ [captureDevice setExposureMode:avMode];
+ exposureModeChanged(qtMode);
+#else
+ Q_UNUSED(qtMode);
+#endif
+}
+
+bool QAVFCameraBase::isExposureModeSupported(QCamera::ExposureMode mode) const
+{
+ if (mode == QCamera::ExposureAuto)
+ return true;
+ if (mode != QCamera::ExposureManual)
+ return false;
+
+ AVCaptureDevice *captureDevice = device();
+ return captureDevice && [captureDevice isExposureModeSupported:AVCaptureExposureModeCustom];
+}
+
+void QAVFCameraBase::applyFlashSettings()
+{
+ Q_ASSERT(isActive());
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "no capture device found";
+ return;
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+
+ if (captureDevice.hasFlash) {
+ const auto mode = flashMode();
+
+ auto setAvFlashModeSafe = [&captureDevice](AVCaptureFlashMode avFlashMode) {
+ // Note, in some cases captureDevice.hasFlash == false even though
+ // no there're no supported flash modes.
+ if ([captureDevice isFlashModeSupported:avFlashMode])
+ captureDevice.flashMode = avFlashMode;
+ else
+ qCDebug(qLcCamera) << "Attempt to setup unsupported flash mode " << avFlashMode;
+ };
+
+ if (mode == QCamera::FlashOff) {
+ setAvFlashModeSafe(AVCaptureFlashModeOff);
+ } else {
+ if ([captureDevice isFlashAvailable]) {
+ if (mode == QCamera::FlashOn)
+ setAvFlashModeSafe(AVCaptureFlashModeOn);
+ else if (mode == QCamera::FlashAuto)
+ setAvFlashModeSafe(AVCaptureFlashModeAuto);
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "flash is not available at the moment";
+ }
+ }
+ }
+
+ if (captureDevice.hasTorch) {
+ const auto mode = torchMode();
+
+ auto setAvTorchModeSafe = [&captureDevice](AVCaptureTorchMode avTorchMode) {
+ if ([captureDevice isTorchModeSupported:avTorchMode])
+ captureDevice.torchMode = avTorchMode;
+ else
+ qCDebug(qLcCamera) << "Attempt to setup unsupported torch mode " << avTorchMode;
+ };
+
+ if (mode == QCamera::TorchOff) {
+ setAvTorchModeSafe(AVCaptureTorchModeOff);
+ } else {
+ if ([captureDevice isTorchAvailable]) {
+ if (mode == QCamera::TorchOn)
+ setAvTorchModeSafe(AVCaptureTorchModeOn);
+ else if (mode == QCamera::TorchAuto)
+ setAvTorchModeSafe(AVCaptureTorchModeAuto);
+ } else {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "torch is not available at the moment";
+ }
+ }
+ }
+}
+
+
+void QAVFCameraBase::setExposureCompensation(float bias)
+{
+#ifdef Q_OS_IOS
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ exposureCompensationChanged(bias);
+ return;
+ }
+
+ bias = qBound(captureDevice.minExposureTargetBias, bias, captureDevice.maxExposureTargetBias);
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ [captureDevice setExposureTargetBias:bias completionHandler:nil];
+ exposureCompensationChanged(bias);
+#else
+ Q_UNUSED(bias);
+#endif
+}
+
+void QAVFCameraBase::setManualExposureTime(float value)
+{
+#ifdef Q_OS_IOS
+ if (value < 0) {
+ setExposureMode(QCamera::ExposureAuto);
+ return;
+ }
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ exposureTimeChanged(value);
+ return;
+ }
+
+ const CMTime newDuration = CMTimeMakeWithSeconds(value, captureDevice.exposureDuration.timescale);
+ if (!qt_check_exposure_duration(captureDevice, newDuration)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "shutter speed value is out of range";
+ return;
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
+ return;
+ }
+
+ // Setting the shutter speed (exposure duration in Apple's terms,
+ // since there is no shutter actually) will also reset
+ // exposure mode into custom mode.
+ [captureDevice setExposureModeCustomWithDuration:newDuration
+ ISO:AVCaptureISOCurrent
+ completionHandler:nil];
+
+ exposureTimeChanged(value);
+
+#else
+ Q_UNUSED(value);
+#endif
+}
+
+float QAVFCameraBase::exposureTime() const
+{
+#ifdef Q_OS_IOS
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice)
+ return -1.;
+ auto duration = captureDevice.exposureDuration;
+ return CMTimeGetSeconds(duration);
+#else
+ return -1;
+#endif
+}
+
+#ifdef Q_OS_IOS
+namespace {
+
+void avf_convert_white_balance_mode(QCamera::WhiteBalanceMode qtMode,
+ AVCaptureWhiteBalanceMode &avMode)
+{
+ if (qtMode == QCamera::WhiteBalanceAuto)
+ avMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
+ else
+ avMode = AVCaptureWhiteBalanceModeLocked;
+}
+
+bool avf_set_white_balance_mode(AVCaptureDevice *captureDevice,
+ AVCaptureWhiteBalanceMode avMode)
+{
+ Q_ASSERT(captureDevice);
+
+ const bool lock = [captureDevice lockForConfiguration:nil];
+ if (!lock) {
+ qDebug() << "Failed to lock a capture device for configuration\n";
+ return false;
+ }
+
+ captureDevice.whiteBalanceMode = avMode;
+ [captureDevice unlockForConfiguration];
+ return true;
+}
+
+bool avf_convert_temp_and_tint_to_wb_gains(AVCaptureDevice *captureDevice,
+ float temp, float tint, AVCaptureWhiteBalanceGains &wbGains)
+{
+ Q_ASSERT(captureDevice);
+
+ AVCaptureWhiteBalanceTemperatureAndTintValues wbTTValues = {
+ .temperature = temp,
+ .tint = tint
+ };
+ wbGains = [captureDevice deviceWhiteBalanceGainsForTemperatureAndTintValues:wbTTValues];
+
+ if (wbGains.redGain >= 1.0 && wbGains.redGain <= captureDevice.maxWhiteBalanceGain
+ && wbGains.greenGain >= 1.0 && wbGains.greenGain <= captureDevice.maxWhiteBalanceGain
+ && wbGains.blueGain >= 1.0 && wbGains.blueGain <= captureDevice.maxWhiteBalanceGain)
+ return true;
+
+ return false;
+}
+
+bool avf_set_white_balance_gains(AVCaptureDevice *captureDevice,
+ AVCaptureWhiteBalanceGains wbGains)
+{
+ const bool lock = [captureDevice lockForConfiguration:nil];
+ if (!lock) {
+ qDebug() << "Failed to lock a capture device for configuration\n";
+ return false;
+ }
+
+ [captureDevice setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:wbGains
+ completionHandler:nil];
+ [captureDevice unlockForConfiguration];
+ return true;
+}
+
+}
+
+bool QAVFCameraBase::isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const
+{
+ if (mode == QCamera::WhiteBalanceAuto)
+ return true;
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice)
+ return false;
+ return [captureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeLocked];
+}
+
+void QAVFCameraBase::setWhiteBalanceMode(QCamera::WhiteBalanceMode mode)
+{
+ if (!isWhiteBalanceModeSupported(mode))
+ return;
+
+ AVCaptureDevice *captureDevice = device();
+ Q_ASSERT(captureDevice);
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock a capture device"
+ << "for configuration";
+ return;
+ }
+
+ AVCaptureWhiteBalanceMode avMode;
+ avf_convert_white_balance_mode(mode, avMode);
+ avf_set_white_balance_mode(captureDevice, avMode);
+
+ if (mode == QCamera::WhiteBalanceAuto || mode == QCamera::WhiteBalanceManual) {
+ whiteBalanceModeChanged(mode);
+ return;
+ }
+
+ const int colorTemp = colorTemperatureForWhiteBalance(mode);
+ AVCaptureWhiteBalanceGains wbGains;
+ if (avf_convert_temp_and_tint_to_wb_gains(captureDevice, colorTemp, 0., wbGains)
+ && avf_set_white_balance_gains(captureDevice, wbGains))
+ whiteBalanceModeChanged(mode);
+}
+
+void QAVFCameraBase::setColorTemperature(int colorTemp)
+{
+ if (colorTemp == 0) {
+ colorTemperatureChanged(colorTemp);
+ return;
+ }
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice || ![captureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeLocked])
+ return;
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock a capture device"
+ << "for configuration";
+ return;
+ }
+
+ AVCaptureWhiteBalanceGains wbGains;
+ if (avf_convert_temp_and_tint_to_wb_gains(captureDevice, colorTemp, 0., wbGains)
+ && avf_set_white_balance_gains(captureDevice, wbGains))
+ colorTemperatureChanged(colorTemp);
+}
+#endif
+
+void QAVFCameraBase::setManualIsoSensitivity(int value)
+{
+#ifdef Q_OS_IOS
+ if (value < 0) {
+ setExposureMode(QCamera::ExposureAuto);
+ return;
+ }
+
+ AVCaptureDevice *captureDevice = device();
+ if (!captureDevice) {
+ isoSensitivityChanged(value);
+ return;
+ }
+
+ if (!qt_check_ISO_value(captureDevice, value)) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "ISO value is out of range";
+ return;
+ }
+
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock) {
+ qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock a capture device"
+ << "for configuration";
+ return;
+ }
+
+ // Setting the ISO will also reset
+ // exposure mode to the custom mode.
+ [captureDevice setExposureModeCustomWithDuration:AVCaptureExposureDurationCurrent
+ ISO:value
+ completionHandler:nil];
+
+ isoSensitivityChanged(value);
+#else
+ Q_UNUSED(value);
+#endif
+}
+
+int QAVFCameraBase::isoSensitivity() const
+{
+ return manualIsoSensitivity();
+}
+
+
+#include "moc_qavfcamerabase_p.cpp"
diff --git a/src/plugins/multimedia/darwin/camera/qavfcamerabase_p.h b/src/plugins/multimedia/darwin/camera/qavfcamerabase_p.h
new file mode 100644
index 000000000..1ad3ba250
--- /dev/null
+++ b/src/plugins/multimedia/darwin/camera/qavfcamerabase_p.h
@@ -0,0 +1,110 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QAVFCAMERABASE_H
+#define QAVFCAMERABASE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qobject.h>
+
+#include <private/qplatformcamera_p.h>
+#include <private/qplatformvideodevices_p.h>
+
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureDeviceFormat);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureConnection);
+Q_FORWARD_DECLARE_OBJC_CLASS(AVCaptureDevice);
+
+QT_BEGIN_NAMESPACE
+class QPlatformMediaIntegration;
+
+class QAVFVideoDevices : public QPlatformVideoDevices
+{
+public:
+ QAVFVideoDevices(QPlatformMediaIntegration *integration);
+ ~QAVFVideoDevices();
+
+ QList<QCameraDevice> videoDevices() const override;
+
+private:
+ void updateCameraDevices();
+
+ NSObject *m_deviceConnectedObserver;
+ NSObject *m_deviceDisconnectedObserver;
+
+ QList<QCameraDevice> m_cameraDevices;
+};
+
+
+class QAVFCameraBase : public QPlatformCamera
+{;
+Q_OBJECT
+public:
+ QAVFCameraBase(QCamera *camera);
+ ~QAVFCameraBase();
+
+ bool isActive() const override;
+ void setActive(bool activce) override;
+
+ void setCamera(const QCameraDevice &camera) override;
+ bool setCameraFormat(const QCameraFormat &format) override;
+
+ void setFocusMode(QCamera::FocusMode mode) override;
+ bool isFocusModeSupported(QCamera::FocusMode mode) const override;
+
+ void setCustomFocusPoint(const QPointF &point) override;
+
+ void setFocusDistance(float d) override;
+ void zoomTo(float factor, float rate) override;
+
+ void setFlashMode(QCamera::FlashMode mode) override;
+ bool isFlashModeSupported(QCamera::FlashMode mode) const override;
+ bool isFlashReady() const override;
+
+ void setTorchMode(QCamera::TorchMode mode) override;
+ bool isTorchModeSupported(QCamera::TorchMode mode) const override;
+
+ void setExposureMode(QCamera::ExposureMode) override;
+ bool isExposureModeSupported(QCamera::ExposureMode mode) const override;
+
+ void setExposureCompensation(float bias) override;
+ void setManualIsoSensitivity(int value) override;
+ virtual int isoSensitivity() const override;
+ void setManualExposureTime(float value) override;
+ virtual float exposureTime() const override;
+
+#ifdef Q_OS_IOS
+ // not supported on macOS
+ bool isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const override;
+ void setWhiteBalanceMode(QCamera::WhiteBalanceMode /*mode*/) override;
+ void setColorTemperature(int /*temperature*/) override;
+#endif
+
+ AVCaptureDevice *device() const;
+
+protected:
+ void updateCameraConfiguration();
+ void updateCameraProperties();
+ void applyFlashSettings();
+
+ QCameraDevice m_cameraDevice;
+ bool m_active = false;
+private:
+ bool isFlashSupported = false;
+ bool isFlashAutoSupported = false;
+ bool isTorchSupported = false;
+ bool isTorchAutoSupported = false;
+};
+
+QT_END_NAMESPACE
+
+#endif