diff options
author | Timur Pocheptsov <Timur.Pocheptsov@digia.com> | 2015-03-20 18:33:28 +0100 |
---|---|---|
committer | Yoann Lopes <yoann.lopes@theqtcompany.com> | 2015-05-29 16:29:39 +0000 |
commit | 1508f775acfd7aad18e71dde35c3ff0c9b073fc1 (patch) | |
tree | 81e6ab4c4762e28a2a035db358456afc006f9435 /src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm | |
parent | aeb79d4a8bcb291822d74d923f7e68fb02ce96fe (diff) |
Video asset writer for iOS
AVFoundation on iOS lacks the ability to use AVCaptureVideoDataOutput and
AVCaptureMovieFileOutput simultaneously. Right now viewfinder stops working
as soon as we add movie file output. The only workaround
we have now is to write video/audio 'maually' - creating asset writer
and feeding it with audio/video samples.
Change-Id: I33a63546783279c545f0433b5051287269825d3f
Task-number: QTBUG-37655
Reviewed-by: Yoann Lopes <yoann.lopes@theqtcompany.com>
Diffstat (limited to 'src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm')
-rw-r--r-- | src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm | 26 |
1 files changed, 23 insertions, 3 deletions
diff --git a/src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm b/src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm index 1fa1df99e..dd838d9b5 100644 --- a/src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm +++ b/src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm @@ -143,6 +143,7 @@ private: - (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection; + @end @implementation AVFCaptureFramesDelegate @@ -163,6 +164,9 @@ private: Q_UNUSED(connection); Q_UNUSED(captureOutput); + // NB: on iOS captureOutput/connection can be nil (when recording a video - + // avfmediaassetwriter). + CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); int width = CVPixelBufferGetWidth(imageBuffer); @@ -176,6 +180,7 @@ private: QVideoFrame frame(new CVPixelBufferVideoBuffer(imageBuffer), QSize(width, height), format); m_renderer->syncHandleViewfinderFrame(frame); } + @end @@ -191,6 +196,8 @@ AVFCameraRendererControl::~AVFCameraRendererControl() { [m_cameraSession->captureSession() removeOutput:m_videoDataOutput]; [m_viewfinderFramesDelegate release]; + if (m_delegateQueue) + dispatch_release(m_delegateQueue); } QAbstractVideoSurface *AVFCameraRendererControl::surface() const @@ -217,11 +224,10 @@ void AVFCameraRendererControl::configureAVCaptureSession(AVFCameraSession *camer m_videoDataOutput = [[[AVCaptureVideoDataOutput alloc] init] autorelease]; // Configure video output - dispatch_queue_t queue = dispatch_queue_create("vf_queue", NULL); + m_delegateQueue = dispatch_queue_create("vf_queue", NULL); [m_videoDataOutput setSampleBufferDelegate:m_viewfinderFramesDelegate - queue:queue]; - dispatch_release(queue); + queue:m_delegateQueue]; [m_cameraSession->captureSession() addOutput:m_videoDataOutput]; } @@ -279,6 +285,20 @@ AVCaptureVideoDataOutput *AVFCameraRendererControl::videoDataOutput() const return m_videoDataOutput; } +#ifdef Q_OS_IOS + +AVFCaptureFramesDelegate *AVFCameraRendererControl::captureDelegate() const +{ + return m_viewfinderFramesDelegate; +} + +void AVFCameraRendererControl::resetCaptureDelegate() const +{ + [m_videoDataOutput setSampleBufferDelegate:m_viewfinderFramesDelegate queue:m_delegateQueue]; +} + +#endif + void AVFCameraRendererControl::handleViewfinderFrame() { QVideoFrame frame; |