summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/plugins/avfoundation/camera/avfimagecapturecontrol.h4
-rw-r--r--src/plugins/avfoundation/camera/avfimagecapturecontrol.mm30
-rw-r--r--src/plugins/common/evr/evrd3dpresentengine.cpp2
-rw-r--r--src/plugins/common/evr/evrhelpers.cpp2
-rw-r--r--src/plugins/common/evr/evrhelpers.h2
-rw-r--r--src/plugins/directshow/player/directshowmetadatacontrol.cpp4
-rw-r--r--src/plugins/directshow/player/directshowplayerservice.cpp13
7 files changed, 42 insertions, 15 deletions
diff --git a/src/plugins/avfoundation/camera/avfimagecapturecontrol.h b/src/plugins/avfoundation/camera/avfimagecapturecontrol.h
index a0f3cf8ba..2cdf0e5f3 100644
--- a/src/plugins/avfoundation/camera/avfimagecapturecontrol.h
+++ b/src/plugins/avfoundation/camera/avfimagecapturecontrol.h
@@ -44,6 +44,7 @@
#include <QtCore/qqueue.h>
#include <QtCore/qsemaphore.h>
+#include <QtCore/qsharedpointer.h>
#include <QtMultimedia/qcameraimagecapturecontrol.h>
#include "avfcamerasession.h"
#include "avfstoragelocation.h"
@@ -56,7 +57,7 @@ Q_OBJECT
public:
struct CaptureRequest {
int captureId;
- QSemaphore *previewReady;
+ QSharedPointer<QSemaphore> previewReady;
};
AVFImageCaptureControl(AVFCameraService *service, QObject *parent = 0);
@@ -79,6 +80,7 @@ private Q_SLOTS:
private:
void makeCapturePreview(CaptureRequest request, const QVideoFrame &frame, int rotation);
+ AVFCameraService *m_service;
AVFCameraSession *m_session;
AVFCameraControl *m_cameraControl;
bool m_ready;
diff --git a/src/plugins/avfoundation/camera/avfimagecapturecontrol.mm b/src/plugins/avfoundation/camera/avfimagecapturecontrol.mm
index b59aa7bfd..bad1b362b 100644
--- a/src/plugins/avfoundation/camera/avfimagecapturecontrol.mm
+++ b/src/plugins/avfoundation/camera/avfimagecapturecontrol.mm
@@ -54,6 +54,7 @@ QT_USE_NAMESPACE
AVFImageCaptureControl::AVFImageCaptureControl(AVFCameraService *service, QObject *parent)
: QCameraImageCaptureControl(parent)
+ , m_service(service)
, m_session(service->session())
, m_cameraControl(service->cameraControl())
, m_ready(false)
@@ -68,7 +69,6 @@ AVFImageCaptureControl::AVFImageCaptureControl(AVFCameraService *service, QObjec
[m_stillImageOutput setOutputSettings:outputSettings];
[outputSettings release];
-
connect(m_cameraControl, SIGNAL(captureModeChanged(QCamera::CaptureModes)), SLOT(updateReadyStatus()));
connect(m_cameraControl, SIGNAL(statusChanged(QCamera::Status)), SLOT(updateReadyStatus()));
@@ -119,7 +119,7 @@ int AVFImageCaptureControl::capture(const QString &fileName)
qDebugCamera() << "Capture image to" << actualFileName;
- CaptureRequest request = { m_lastCaptureId, new QSemaphore };
+ CaptureRequest request = { m_lastCaptureId, QSharedPointer<QSemaphore>::create()};
m_requestsMutex.lock();
m_captureRequests.enqueue(request);
m_requestsMutex.unlock();
@@ -127,10 +127,6 @@ int AVFImageCaptureControl::capture(const QString &fileName)
[m_stillImageOutput captureStillImageAsynchronouslyFromConnection:m_videoConnection
completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
- // Wait for the preview to be generated before saving the JPEG
- request.previewReady->acquire();
- delete request.previewReady;
-
if (error) {
QStringList messageParts;
messageParts << QString::fromUtf8([[error localizedDescription] UTF8String]);
@@ -144,7 +140,19 @@ int AVFImageCaptureControl::capture(const QString &fileName)
Q_ARG(int, request.captureId),
Q_ARG(int, QCameraImageCapture::ResourceError),
Q_ARG(QString, errorMessage));
- } else {
+ return;
+ }
+
+ // Wait for the preview to be generated before saving the JPEG (but only
+ // if we have AVFCameraRendererControl attached).
+ // It is possible to stop camera immediately after trying to capture an
+ // image; this can result in a blocked callback's thread, waiting for a
+ // new viewfinder's frame to arrive/semaphore to be released. It is also
+ // unspecified on which thread this callback gets executed, (probably it's
+ // not the same thread that initiated a capture and stopped the camera),
+ // so we cannot reliably check the camera's status. Instead, we wait
+ // with a timeout and treat a failure to acquire a semaphore as an error.
+ if (!m_service->videoOutput() || request.previewReady->tryAcquire(1, 1000)) {
qDebugCamera() << "Image capture completed:" << actualFileName;
NSData *nsJpgData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
@@ -169,6 +177,14 @@ int AVFImageCaptureControl::capture(const QString &fileName)
Q_ARG(int, QCameraImageCapture::ResourceError),
Q_ARG(QString, errorMessage));
}
+ } else {
+ const QLatin1String errorMessage("Image capture failed: timed out waiting"
+ " for a preview frame.");
+ qDebugCamera() << errorMessage;
+ QMetaObject::invokeMethod(this, "error", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(int, QCameraImageCapture::ResourceError),
+ Q_ARG(QString, errorMessage));
}
}];
diff --git a/src/plugins/common/evr/evrd3dpresentengine.cpp b/src/plugins/common/evr/evrd3dpresentengine.cpp
index 4bc2bac83..043d0ad73 100644
--- a/src/plugins/common/evr/evrd3dpresentengine.cpp
+++ b/src/plugins/common/evr/evrd3dpresentengine.cpp
@@ -515,7 +515,7 @@ done:
if (SUCCEEDED(hr)) {
m_surfaceFormat = QVideoSurfaceFormat(QSize(width, height),
m_useTextureRendering ? QVideoFrame::Format_RGB32
- : qt_evr_pixelFormatFromD3DFormat((D3DFORMAT)d3dFormat),
+ : qt_evr_pixelFormatFromD3DFormat(d3dFormat),
m_useTextureRendering ? QAbstractVideoBuffer::GLTextureHandle
: QAbstractVideoBuffer::NoHandle);
} else {
diff --git a/src/plugins/common/evr/evrhelpers.cpp b/src/plugins/common/evr/evrhelpers.cpp
index f4710b1a0..96b61e2eb 100644
--- a/src/plugins/common/evr/evrhelpers.cpp
+++ b/src/plugins/common/evr/evrhelpers.cpp
@@ -117,7 +117,7 @@ bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample)
return false;
}
-QVideoFrame::PixelFormat qt_evr_pixelFormatFromD3DFormat(D3DFORMAT format)
+QVideoFrame::PixelFormat qt_evr_pixelFormatFromD3DFormat(DWORD format)
{
switch (format) {
case D3DFMT_R8G8B8:
diff --git a/src/plugins/common/evr/evrhelpers.h b/src/plugins/common/evr/evrhelpers.h
index d2fdfdcae..527612c45 100644
--- a/src/plugins/common/evr/evrhelpers.h
+++ b/src/plugins/common/evr/evrhelpers.h
@@ -90,7 +90,7 @@ inline HRESULT qt_evr_getFrameRate(IMFMediaType *pType, MFRatio *pRatio)
return MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, (UINT32*)&pRatio->Numerator, (UINT32*)&pRatio->Denominator);
}
-QVideoFrame::PixelFormat qt_evr_pixelFormatFromD3DFormat(D3DFORMAT format);
+QVideoFrame::PixelFormat qt_evr_pixelFormatFromD3DFormat(DWORD format);
D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrame::PixelFormat format);
QT_END_NAMESPACE
diff --git a/src/plugins/directshow/player/directshowmetadatacontrol.cpp b/src/plugins/directshow/player/directshowmetadatacontrol.cpp
index 1f75e3340..84f990830 100644
--- a/src/plugins/directshow/player/directshowmetadatacontrol.cpp
+++ b/src/plugins/directshow/player/directshowmetadatacontrol.cpp
@@ -624,9 +624,7 @@ void DirectShowMetaDataControl::updateMetadata(IFilterGraph2 *graph, IBaseFilter
#endif
#if QT_CONFIG(wmsdk)
- IWMHeaderInfo *info = com_cast<IWMHeaderInfo>(source, IID_IWMHeaderInfo);
-
- if (info) {
+ if (IWMHeaderInfo *info = com_cast<IWMHeaderInfo>(source, IID_IWMHeaderInfo)) {
const auto keys = *qt_wmMetaDataKeys();
for (const QWMMetaDataKey &key : keys) {
QVariant var = getValue(info, key.wmName);
diff --git a/src/plugins/directshow/player/directshowplayerservice.cpp b/src/plugins/directshow/player/directshowplayerservice.cpp
index de427f781..2f959b232 100644
--- a/src/plugins/directshow/player/directshowplayerservice.cpp
+++ b/src/plugins/directshow/player/directshowplayerservice.cpp
@@ -674,6 +674,9 @@ void DirectShowPlayerService::doReleaseGraph(QMutexLocker *locker)
m_loop->wake();
}
+QT_WARNING_PUSH
+QT_WARNING_DISABLE_GCC("-Wmissing-field-initializers")
+
void DirectShowPlayerService::doSetVideoProbe(QMutexLocker *locker)
{
Q_UNUSED(locker);
@@ -721,9 +724,10 @@ void DirectShowPlayerService::doSetVideoProbe(QMutexLocker *locker)
for (int i = 0; i != items; ++i) {
mediaType->subtype = subtypes[i];
m_videoSampleGrabber->setMediaType(&mediaType);
- if (SUCCEEDED(DirectShowUtils::connectFilters(m_graph, m_source, m_videoSampleGrabber->filter(), true)))
+ if (SUCCEEDED(DirectShowUtils::connectFilters(m_graph, m_source, m_videoSampleGrabber->filter(), true))) {
connected = true;
break;
+ }
}
if (!connected) {
@@ -765,6 +769,8 @@ void DirectShowPlayerService::doSetAudioProbe(QMutexLocker *locker)
m_audioSampleGrabber->start(DirectShowSampleGrabber::CallbackMethod::BufferCB);
}
+QT_WARNING_POP
+
void DirectShowPlayerService::doReleaseVideoProbe(QMutexLocker *locker)
{
Q_UNUSED(locker);
@@ -1444,6 +1450,9 @@ void DirectShowPlayerService::videoOutputChanged()
setVideoOutput(m_videoRendererControl->filter());
}
+QT_WARNING_PUSH
+QT_WARNING_DISABLE_GCC("-Wmissing-field-initializers")
+
void DirectShowPlayerService::onAudioBufferAvailable(double time, quint8 *buffer, long len)
{
QMutexLocker locker(&m_mutex);
@@ -1535,6 +1544,8 @@ void DirectShowPlayerService::onVideoBufferAvailable(double time, quint8 *buffer
Q_EMIT m_videoProbeControl->videoFrameProbed(frame);
}
+QT_WARNING_POP
+
void DirectShowPlayerService::graphEvent(QMutexLocker *locker)
{
Q_UNUSED(locker)