/**************************************************************************** ** ** Copyright (C) 2016 The Qt Company Ltd. ** Contact: https://www.qt.io/licensing/ ** ** This file is part of the Qt Toolkit. ** ** $QT_BEGIN_LICENSE:LGPL$ ** Commercial License Usage ** Licensees holding valid commercial Qt licenses may use this file in ** accordance with the commercial license agreement provided with the ** Software or, alternatively, in accordance with the terms contained in ** a written agreement between you and The Qt Company. For licensing terms ** and conditions see https://www.qt.io/terms-conditions. For further ** information use the contact form at https://www.qt.io/contact-us. ** ** GNU Lesser General Public License Usage ** Alternatively, this file may be used under the terms of the GNU Lesser ** General Public License version 3 as published by the Free Software ** Foundation and appearing in the file LICENSE.LGPL3 included in the ** packaging of this file. Please review the following information to ** ensure the GNU Lesser General Public License version 3 requirements ** will be met: https://www.gnu.org/licenses/lgpl-3.0.html. ** ** GNU General Public License Usage ** Alternatively, this file may be used under the terms of the GNU ** General Public License version 2.0 or (at your option) the GNU General ** Public license version 3 or any later version approved by the KDE Free ** Qt Foundation. The licenses are as published by the Free Software ** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3 ** included in the packaging of this file. Please review the following ** information to ensure the GNU General Public License requirements will ** be met: https://www.gnu.org/licenses/gpl-2.0.html and ** https://www.gnu.org/licenses/gpl-3.0.html. ** ** $QT_END_LICENSE$ ** ****************************************************************************/ #include "avfmediaencoder_p.h" #include "avfcamerarenderer_p.h" #include "avfmediaassetwriter_p.h" #include "avfcameraservice_p.h" #include "avfcamerasession_p.h" #include "avfcameradebug_p.h" #include #include #include #include QT_USE_NAMESPACE namespace { bool qt_capture_session_isValid(AVFCameraService *service) { if (!service || !service->session()) return false; AVFCameraSession *session = service->session(); if (!session->captureSession()) return false; return true; } enum WriterState { WriterStateIdle, WriterStateActive, WriterStateAborted }; using AVFAtomicInt64 = QAtomicInteger; } // unnamed namespace @interface QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) (PrivateAPI) - (bool)addWriterInputs; - (void)setQueues; - (void)updateDuration:(CMTime)newTimeStamp; @end @implementation QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) { @private AVFCameraService *m_service; AVFScopedPointer m_cameraWriterInput; AVFScopedPointer m_audioWriterInput; // Queue to write sample buffers: AVFScopedPointer m_writerQueue; // High priority serial queue for video output: AVFScopedPointer m_videoQueue; // Serial queue for audio output: AVFScopedPointer m_audioQueue; AVFScopedPointer m_assetWriter; AVFMediaEncoder *m_delegate; bool m_setStartTime; QAtomicInt m_state; bool writeFirstAudioBuffer; CMTime m_startTime; CMTime m_lastTimeStamp; NSDictionary *m_audioSettings; NSDictionary *m_videoSettings; AVFAtomicInt64 m_durationInMs; } - (id)initWithDelegate:(AVFMediaEncoder *)delegate { Q_ASSERT(delegate); if (self = [super init]) { m_delegate = delegate; m_setStartTime = true; m_state.storeRelaxed(WriterStateIdle); m_startTime = kCMTimeInvalid; m_lastTimeStamp = kCMTimeInvalid; m_durationInMs.storeRelaxed(0); m_audioSettings = nil; m_videoSettings = nil; writeFirstAudioBuffer = false; } return self; } - (bool)setupWithFileURL:(NSURL *)fileURL cameraService:(AVFCameraService *)service audioSettings:(NSDictionary *)audioSettings videoSettings:(NSDictionary *)videoSettings transform:(CGAffineTransform)transform { Q_ASSERT(fileURL); if (!qt_capture_session_isValid(service)) { qDebugCamera() << Q_FUNC_INFO << "invalid capture session"; return false; } m_service = service; m_audioSettings = audioSettings; m_videoSettings = videoSettings; AVFCameraSession *session = m_service->session(); m_writerQueue.reset(dispatch_queue_create("asset-writer-queue", DISPATCH_QUEUE_SERIAL)); if (!m_writerQueue) { qDebugCamera() << Q_FUNC_INFO << "failed to create an asset writer's queue"; return false; } if (session->videoOutput() && session->videoOutput()->videoDataOutput()) { m_videoQueue.reset(dispatch_queue_create("video-output-queue", DISPATCH_QUEUE_SERIAL)); if (!m_videoQueue) { qDebugCamera() << Q_FUNC_INFO << "failed to create video queue"; return false; } dispatch_set_target_queue(m_videoQueue, dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0)); } m_audioQueue.reset(dispatch_queue_create("audio-output-queue", DISPATCH_QUEUE_SERIAL)); if (!m_audioQueue) { qDebugCamera() << Q_FUNC_INFO << "failed to create audio queue"; if (!m_videoQueue) return false; // But we still can write video! } auto settings = m_service->recorderControl()->encoderSettings(); auto fileType = QDarwinFormatInfo::avFileTypeForContainerFormat(settings.fileFormat()); m_assetWriter.reset([[AVAssetWriter alloc] initWithURL:fileURL fileType:fileType error:nil]); if (!m_assetWriter) { qDebugCamera() << Q_FUNC_INFO << "failed to create asset writer"; return false; } bool audioCaptureOn = false; if (m_audioQueue) audioCaptureOn = session->audioOutput() != nil; if (!m_videoQueue) writeFirstAudioBuffer = true; if (![self addWriterInputs]) { m_assetWriter.reset(); return false; } if (m_cameraWriterInput) m_cameraWriterInput.data().transform = transform; [self setMetaData:fileType]; // Ready to start ... return true; } - (void)setMetaData:(AVFileType)fileType { m_assetWriter.data().metadata = AVFMetaData::toAVMetadataForFormat(m_delegate->metaData(), fileType); } - (void)start { [self setQueues]; m_setStartTime = true; m_state.storeRelease(WriterStateActive); [m_assetWriter startWriting]; AVCaptureSession *session = m_service->session()->captureSession(); if (!session.running) [session startRunning]; } - (void)stop { if (m_state.loadAcquire() != WriterStateActive) return; if ([m_assetWriter status] != AVAssetWriterStatusWriting) return; // Do this here so that - // 1. '-abort' should not try calling finishWriting again and // 2. async block (see below) will know if recorder control was deleted // before the block's execution: m_state.storeRelease(WriterStateIdle); // Now, since we have to ensure no sample buffers are // appended after a call to finishWriting, we must // ensure writer's queue sees this change in m_state // _before_ we call finishWriting: dispatch_sync(m_writerQueue, ^{}); // Done, but now we also want to prevent video queue // from updating our viewfinder: if (m_videoQueue) dispatch_sync(m_videoQueue, ^{}); // Now we're safe to stop: [m_assetWriter finishWritingWithCompletionHandler:^{ // This block is async, so by the time it's executed, // it's possible that render control was deleted already ... if (m_state.loadAcquire() == WriterStateAborted) return; AVCaptureSession *session = m_service->session()->captureSession(); if (session.running) [session stopRunning]; QMetaObject::invokeMethod(m_delegate, "assetWriterFinished", Qt::QueuedConnection); }]; } - (void)abort { // -abort is to be called from recorder control's dtor. if (m_state.fetchAndStoreRelease(WriterStateAborted) != WriterStateActive) { // Not recording, nothing to stop. return; } // From Apple's docs: // "To guarantee that all sample buffers are successfully written, // you must ensure that all calls to appendSampleBuffer: and // appendPixelBuffer:withPresentationTime: have returned before // invoking this method." // // The only way we can ensure this is: dispatch_sync(m_writerQueue, ^{}); // At this point next block (if any) on the writer's queue // will see m_state preventing it from any further processing. if (m_videoQueue) dispatch_sync(m_videoQueue, ^{}); // After this point video queue will not try to modify our // viewfider, so we're safe to delete now. [m_assetWriter finishWritingWithCompletionHandler:^{ }]; } - (void)setStartTimeFrom:(CMSampleBufferRef)sampleBuffer { // Writer's queue only. Q_ASSERT(m_setStartTime); Q_ASSERT(sampleBuffer); if (m_state.loadAcquire() != WriterStateActive) return; QMetaObject::invokeMethod(m_delegate, "assetWriterStarted", Qt::QueuedConnection); m_durationInMs.storeRelease(0); m_startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); m_lastTimeStamp = m_startTime; [m_assetWriter startSessionAtSourceTime:m_startTime]; m_setStartTime = false; } - (void)writeVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer { // This code is executed only on a writer's queue. Q_ASSERT(sampleBuffer); if (m_state.loadAcquire() == WriterStateActive) { if (m_setStartTime) [self setStartTimeFrom:sampleBuffer]; if (m_cameraWriterInput.data().readyForMoreMediaData) { [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)]; [m_cameraWriterInput appendSampleBuffer:sampleBuffer]; } } CFRelease(sampleBuffer); } - (void)writeAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer { Q_ASSERT(sampleBuffer); // This code is executed only on a writer's queue. if (m_state.loadAcquire() == WriterStateActive) { if (m_setStartTime) [self setStartTimeFrom:sampleBuffer]; if (m_audioWriterInput.data().readyForMoreMediaData) { [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)]; [m_audioWriterInput appendSampleBuffer:sampleBuffer]; } } CFRelease(sampleBuffer); } - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { Q_UNUSED(connection); Q_ASSERT(m_service && m_service->session()); if (m_state.loadAcquire() != WriterStateActive) return; if (!CMSampleBufferDataIsReady(sampleBuffer)) { qWarning() << Q_FUNC_INFO << "sample buffer is not ready, skipping."; return; } CFRetain(sampleBuffer); if (captureOutput != m_service->session()->audioOutput()) { if (m_state.loadRelaxed() != WriterStateActive) { CFRelease(sampleBuffer); return; } writeFirstAudioBuffer = true; // Find renderercontrol's delegate and invoke its method to // show updated viewfinder's frame. if (m_service->session()->videoOutput()) { NSObject *vfDelegate = (NSObject *)m_service->session()->videoOutput()->captureDelegate(); if (vfDelegate) [vfDelegate captureOutput:nil didOutputSampleBuffer:sampleBuffer fromConnection:nil]; } dispatch_async(m_writerQueue, ^{ [self writeVideoSampleBuffer:sampleBuffer]; }); } else if (writeFirstAudioBuffer) { dispatch_async(m_writerQueue, ^{ [self writeAudioSampleBuffer:sampleBuffer]; }); } } - (bool)addWriterInputs { Q_ASSERT(m_service && m_service->session()); Q_ASSERT(m_assetWriter.data()); AVFCameraSession *session = m_service->session(); if (m_videoQueue) { Q_ASSERT(session->videoOutput() && session->videoOutput()->videoDataOutput()); m_cameraWriterInput.reset([[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:m_videoSettings sourceFormatHint:session->videoCaptureDevice().activeFormat.formatDescription]); if (!m_cameraWriterInput) { qDebugCamera() << Q_FUNC_INFO << "failed to create camera writer input"; return false; } if ([m_assetWriter canAddInput:m_cameraWriterInput]) { [m_assetWriter addInput:m_cameraWriterInput]; } else { qDebugCamera() << Q_FUNC_INFO << "failed to add camera writer input"; m_cameraWriterInput.reset(); return false; } m_cameraWriterInput.data().expectsMediaDataInRealTime = YES; } if (session->audioOutput()) { CMFormatDescriptionRef sourceFormat = session->audioCaptureDevice() ? session->audioCaptureDevice().activeFormat.formatDescription : 0; m_audioWriterInput.reset([[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:m_audioSettings sourceFormatHint:sourceFormat]); if (!m_audioWriterInput) { qWarning() << Q_FUNC_INFO << "failed to create audio writer input"; // But we still can record video. if (!m_cameraWriterInput) return false; } else if ([m_assetWriter canAddInput:m_audioWriterInput]) { [m_assetWriter addInput:m_audioWriterInput]; m_audioWriterInput.data().expectsMediaDataInRealTime = YES; } else { qWarning() << Q_FUNC_INFO << "failed to add audio writer input"; m_audioWriterInput.reset(); if (!m_cameraWriterInput) return false; // We can (still) write video though ... } } return true; } - (void)setQueues { Q_ASSERT(m_service && m_service->session()); if (m_videoQueue) { Q_ASSERT(m_service->session()->videoOutput() && m_service->session()->videoOutput()->videoDataOutput()); [m_service->session()->videoOutput()->videoDataOutput() setSampleBufferDelegate:self queue:m_videoQueue]; } if (m_service->session()->audioOutput()) { Q_ASSERT(m_audioQueue); [m_service->session()->audioOutput() setSampleBufferDelegate:self queue:m_audioQueue]; } } - (void)updateDuration:(CMTime)newTimeStamp { Q_ASSERT(CMTimeCompare(m_startTime, kCMTimeInvalid)); Q_ASSERT(CMTimeCompare(m_lastTimeStamp, kCMTimeInvalid)); if (CMTimeCompare(newTimeStamp, m_lastTimeStamp) > 0) { const CMTime duration = CMTimeSubtract(newTimeStamp, m_startTime); if (!CMTimeCompare(duration, kCMTimeInvalid)) return; m_durationInMs.storeRelease(CMTimeGetSeconds(duration) * 1000); m_lastTimeStamp = newTimeStamp; } } - (qint64)durationInMs { return m_durationInMs.loadAcquire(); } @end