summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorFrederik Gladhorn <frederik.gladhorn@digia.com>2014-07-22 20:23:12 +0200
committerFrederik Gladhorn <frederik.gladhorn@digia.com>2014-07-22 20:23:12 +0200
commit86d5c7e4dc7258c3fea26f34b2076a52e292e81c (patch)
treeb0e96bb12880fb6070272995d9f9843149f6ee23
parent39c269441421e1e61fbfc698b65f769db82cad56 (diff)
parent2bc7a39a0d01498567aef958908b4bdba4071c8f (diff)
Merge remote-tracking branch 'origin/5.3' into dev
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcameraexposurecontrol.cpp88
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcameraflashcontrol.cpp14
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcamerafocuscontrol.cpp118
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcamerafocuscontrol.h16
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcameraimageprocessingcontrol.cpp45
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcameraimageprocessingcontrol.h6
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcameralockscontrol.cpp6
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcamerasession.cpp7
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcamerazoomcontrol.cpp32
-rw-r--r--src/plugins/android/src/wrappers/jni/androidcamera.cpp80
-rw-r--r--src/plugins/android/src/wrappers/jni/androidcamera.h8
-rw-r--r--src/plugins/avfoundation/camera/avfmediarecordercontrol.mm6
-rw-r--r--src/plugins/coreaudio/coreaudiosessionmanager.mm5
-rw-r--r--src/plugins/directshow/camera/camera.pri5
-rw-r--r--src/plugins/directshow/camera/dscameracontrol.cpp53
-rw-r--r--src/plugins/directshow/camera/dscameracontrol.h15
-rw-r--r--src/plugins/directshow/camera/dscameraservice.cpp41
-rw-r--r--src/plugins/directshow/camera/dscameraservice.h4
-rw-r--r--src/plugins/directshow/camera/dscamerasession.cpp1281
-rw-r--r--src/plugins/directshow/camera/dscamerasession.h162
-rw-r--r--src/plugins/directshow/camera/dsimagecapturecontrol.cpp23
-rw-r--r--src/plugins/directshow/camera/dsimagecapturecontrol.h13
-rw-r--r--src/plugins/directshow/camera/dsvideodevicecontrol.cpp2
-rw-r--r--src/plugins/directshow/camera/dsvideowidgetcontrol.cpp253
-rw-r--r--src/plugins/directshow/camera/dsvideowidgetcontrol.h150
-rw-r--r--src/plugins/directshow/directshow.pro2
-rw-r--r--src/plugins/directshow/dsserviceplugin.cpp26
-rw-r--r--src/plugins/directshow/player/directshowplayerservice.cpp2
-rw-r--r--src/plugins/pulseaudio/qaudioinput_pulse.cpp222
-rw-r--r--src/plugins/pulseaudio/qaudioinput_pulse.h6
-rw-r--r--src/plugins/pulseaudio/qaudiooutput_pulse.cpp194
-rw-r--r--src/plugins/pulseaudio/qaudiooutput_pulse.h6
-rw-r--r--src/plugins/pulseaudio/qpulseaudioengine.cpp139
-rw-r--r--src/plugins/pulseaudio/qpulseaudioengine.h33
34 files changed, 1215 insertions, 1848 deletions
diff --git a/src/plugins/android/src/mediacapture/qandroidcameraexposurecontrol.cpp b/src/plugins/android/src/mediacapture/qandroidcameraexposurecontrol.cpp
index 57057ebd7..351e6ba40 100644
--- a/src/plugins/android/src/mediacapture/qandroidcameraexposurecontrol.cpp
+++ b/src/plugins/android/src/mediacapture/qandroidcameraexposurecontrol.cpp
@@ -63,6 +63,9 @@ QAndroidCameraExposureControl::QAndroidCameraExposureControl(QAndroidCameraSessi
bool QAndroidCameraExposureControl::isParameterSupported(ExposureParameter parameter) const
{
+ if (!m_session->camera())
+ return false;
+
switch (parameter) {
case QCameraExposureControl::ISO:
return false;
@@ -71,7 +74,7 @@ bool QAndroidCameraExposureControl::isParameterSupported(ExposureParameter param
case QCameraExposureControl::ShutterSpeed:
return false;
case QCameraExposureControl::ExposureCompensation:
- return true;
+ return !m_supportedExposureCompensations.isEmpty();
case QCameraExposureControl::FlashPower:
return false;
case QCameraExposureControl::FlashCompensation:
@@ -81,7 +84,7 @@ bool QAndroidCameraExposureControl::isParameterSupported(ExposureParameter param
case QCameraExposureControl::SpotMeteringPoint:
return false;
case QCameraExposureControl::ExposureMode:
- return true;
+ return !m_supportedExposureModes.isEmpty();
case QCameraExposureControl::MeteringMode:
return false;
default:
@@ -127,27 +130,41 @@ QVariant QAndroidCameraExposureControl::actualValue(ExposureParameter parameter)
bool QAndroidCameraExposureControl::setValue(ExposureParameter parameter, const QVariant& value)
{
- if (!m_session->camera() || !value.isValid())
+ if (!value.isValid())
return false;
if (parameter == QCameraExposureControl::ExposureCompensation) {
- m_requestedExposureCompensation = value.toReal();
- emit requestedValueChanged(QCameraExposureControl::ExposureCompensation);
+ qreal expComp = value.toReal();
+ if (!qFuzzyCompare(m_requestedExposureCompensation, expComp)) {
+ m_requestedExposureCompensation = expComp;
+ emit requestedValueChanged(QCameraExposureControl::ExposureCompensation);
+ }
+
+ if (!m_session->camera())
+ return true;
int expCompIndex = qRound(m_requestedExposureCompensation / m_exposureCompensationStep);
if (expCompIndex >= m_minExposureCompensationIndex
&& expCompIndex <= m_maxExposureCompensationIndex) {
+ qreal comp = expCompIndex * m_exposureCompensationStep;
m_session->camera()->setExposureCompensation(expCompIndex);
-
- m_actualExposureCompensation = expCompIndex * m_exposureCompensationStep;
- emit actualValueChanged(QCameraExposureControl::ExposureCompensation);
+ if (!qFuzzyCompare(m_actualExposureCompensation, comp)) {
+ m_actualExposureCompensation = expCompIndex * m_exposureCompensationStep;
+ emit actualValueChanged(QCameraExposureControl::ExposureCompensation);
+ }
return true;
}
} else if (parameter == QCameraExposureControl::ExposureMode) {
- m_requestedExposureMode = value.value<QCameraExposure::ExposureMode>();
- emit requestedValueChanged(QCameraExposureControl::ExposureMode);
+ QCameraExposure::ExposureMode expMode = value.value<QCameraExposure::ExposureMode>();
+ if (m_requestedExposureMode != expMode) {
+ m_requestedExposureMode = expMode;
+ emit requestedValueChanged(QCameraExposureControl::ExposureMode);
+ }
+
+ if (!m_session->camera())
+ return true;
if (!m_supportedExposureModes.isEmpty()) {
m_actualExposureMode = m_requestedExposureMode;
@@ -190,38 +207,39 @@ bool QAndroidCameraExposureControl::setValue(ExposureParameter parameter, const
void QAndroidCameraExposureControl::onCameraOpened()
{
- m_requestedExposureCompensation = m_actualExposureCompensation = 0.0;
- m_requestedExposureMode = m_actualExposureMode = QCameraExposure::ExposureAuto;
- emit requestedValueChanged(QCameraExposureControl::ExposureCompensation);
- emit actualValueChanged(QCameraExposureControl::ExposureCompensation);
- emit requestedValueChanged(QCameraExposureControl::ExposureMode);
- emit actualValueChanged(QCameraExposureControl::ExposureMode);
-
+ m_supportedExposureCompensations.clear();
m_minExposureCompensationIndex = m_session->camera()->getMinExposureCompensation();
m_maxExposureCompensationIndex = m_session->camera()->getMaxExposureCompensation();
m_exposureCompensationStep = m_session->camera()->getExposureCompensationStep();
- for (int i = m_minExposureCompensationIndex; i <= m_maxExposureCompensationIndex; ++i)
- m_supportedExposureCompensations.append(i * m_exposureCompensationStep);
- emit parameterRangeChanged(QCameraExposureControl::ExposureCompensation);
+ if (m_minExposureCompensationIndex != 0 || m_maxExposureCompensationIndex != 0) {
+ for (int i = m_minExposureCompensationIndex; i <= m_maxExposureCompensationIndex; ++i)
+ m_supportedExposureCompensations.append(i * m_exposureCompensationStep);
+ emit parameterRangeChanged(QCameraExposureControl::ExposureCompensation);
+ }
m_supportedExposureModes.clear();
QStringList sceneModes = m_session->camera()->getSupportedSceneModes();
- for (int i = 0; i < sceneModes.size(); ++i) {
- const QString &sceneMode = sceneModes.at(i);
- if (sceneMode == QLatin1String("auto"))
- m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureAuto);
- else if (sceneMode == QLatin1String("beach"))
- m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureBeach);
- else if (sceneMode == QLatin1String("night"))
- m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureNight);
- else if (sceneMode == QLatin1String("portrait"))
- m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposurePortrait);
- else if (sceneMode == QLatin1String("snow"))
- m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureSnow);
- else if (sceneMode == QLatin1String("sports"))
- m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureSports);
+ if (!sceneModes.isEmpty()) {
+ for (int i = 0; i < sceneModes.size(); ++i) {
+ const QString &sceneMode = sceneModes.at(i);
+ if (sceneMode == QLatin1String("auto"))
+ m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureAuto);
+ else if (sceneMode == QLatin1String("beach"))
+ m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureBeach);
+ else if (sceneMode == QLatin1String("night"))
+ m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureNight);
+ else if (sceneMode == QLatin1String("portrait"))
+ m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposurePortrait);
+ else if (sceneMode == QLatin1String("snow"))
+ m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureSnow);
+ else if (sceneMode == QLatin1String("sports"))
+ m_supportedExposureModes << QVariant::fromValue(QCameraExposure::ExposureSports);
+ }
+ emit parameterRangeChanged(QCameraExposureControl::ExposureMode);
}
- emit parameterRangeChanged(QCameraExposureControl::ExposureMode);
+
+ setValue(QCameraExposureControl::ExposureCompensation, QVariant::fromValue(m_requestedExposureCompensation));
+ setValue(QCameraExposureControl::ExposureMode, QVariant::fromValue(m_requestedExposureMode));
}
QT_END_NAMESPACE
diff --git a/src/plugins/android/src/mediacapture/qandroidcameraflashcontrol.cpp b/src/plugins/android/src/mediacapture/qandroidcameraflashcontrol.cpp
index de8e521ee..20aece378 100644
--- a/src/plugins/android/src/mediacapture/qandroidcameraflashcontrol.cpp
+++ b/src/plugins/android/src/mediacapture/qandroidcameraflashcontrol.cpp
@@ -62,7 +62,12 @@ QCameraExposure::FlashModes QAndroidCameraFlashControl::flashMode() const
void QAndroidCameraFlashControl::setFlashMode(QCameraExposure::FlashModes mode)
{
- if (m_flashMode == mode || !m_session->camera() || !isFlashModeSupported(mode))
+ if (!m_session->camera()) {
+ m_flashMode = mode;
+ return;
+ }
+
+ if (!isFlashModeSupported(mode))
return;
// if torch was enabled, it first needs to be turned off before setting another mode
@@ -88,7 +93,7 @@ void QAndroidCameraFlashControl::setFlashMode(QCameraExposure::FlashModes mode)
bool QAndroidCameraFlashControl::isFlashModeSupported(QCameraExposure::FlashModes mode) const
{
- return m_supportedFlashModes.contains(mode);
+ return m_session->camera() ? m_supportedFlashModes.contains(mode) : false;
}
bool QAndroidCameraFlashControl::isFlashReady() const
@@ -115,6 +120,11 @@ void QAndroidCameraFlashControl::onCameraOpened()
else if (flashMode == QLatin1String("torch"))
m_supportedFlashModes << QCameraExposure::FlashVideoLight;
}
+
+ if (!m_supportedFlashModes.contains(m_flashMode))
+ m_flashMode = QCameraExposure::FlashOff;
+
+ setFlashMode(m_flashMode);
}
QT_END_NAMESPACE
diff --git a/src/plugins/android/src/mediacapture/qandroidcamerafocuscontrol.cpp b/src/plugins/android/src/mediacapture/qandroidcamerafocuscontrol.cpp
index c4e0ea1a8..dc636ccaf 100644
--- a/src/plugins/android/src/mediacapture/qandroidcamerafocuscontrol.cpp
+++ b/src/plugins/android/src/mediacapture/qandroidcamerafocuscontrol.cpp
@@ -80,42 +80,45 @@ QCameraFocus::FocusModes QAndroidCameraFocusControl::focusMode() const
void QAndroidCameraFocusControl::setFocusMode(QCameraFocus::FocusModes mode)
{
- if (m_focusMode == mode || !m_session->camera() || !isFocusModeSupported(mode))
+ if (!m_session->camera()) {
+ setFocusModeHelper(mode);
return;
+ }
- QString focusMode = QLatin1String("fixed");
-
- if (mode.testFlag(QCameraFocus::HyperfocalFocus)) {
- focusMode = QLatin1String("edof");
- } else if (mode.testFlag(QCameraFocus::ManualFocus)) {
- focusMode = QLatin1String("fixed");
- } else if (mode.testFlag(QCameraFocus::AutoFocus)) {
- focusMode = QLatin1String("auto");
- } else if (mode.testFlag(QCameraFocus::MacroFocus)) {
- focusMode = QLatin1String("macro");
- } else if (mode.testFlag(QCameraFocus::ContinuousFocus)) {
- if ((m_session->captureMode().testFlag(QCamera::CaptureVideo) && m_continuousVideoFocusSupported)
- || !m_continuousPictureFocusSupported) {
- focusMode = QLatin1String("continuous-video");
- } else {
- focusMode = QLatin1String("continuous-picture");
+ if (isFocusModeSupported(mode)) {
+ QString focusMode = QLatin1String("fixed");
+
+ if (mode.testFlag(QCameraFocus::HyperfocalFocus)) {
+ focusMode = QLatin1String("edof");
+ } else if (mode.testFlag(QCameraFocus::ManualFocus)) {
+ focusMode = QLatin1String("fixed");
+ } else if (mode.testFlag(QCameraFocus::AutoFocus)) {
+ focusMode = QLatin1String("auto");
+ } else if (mode.testFlag(QCameraFocus::MacroFocus)) {
+ focusMode = QLatin1String("macro");
+ } else if (mode.testFlag(QCameraFocus::ContinuousFocus)) {
+ if ((m_session->captureMode().testFlag(QCamera::CaptureVideo) && m_continuousVideoFocusSupported)
+ || !m_continuousPictureFocusSupported) {
+ focusMode = QLatin1String("continuous-video");
+ } else {
+ focusMode = QLatin1String("continuous-picture");
+ }
+ } else if (mode.testFlag(QCameraFocus::InfinityFocus)) {
+ focusMode = QLatin1String("infinity");
}
- } else if (mode.testFlag(QCameraFocus::InfinityFocus)) {
- focusMode = QLatin1String("infinity");
- }
- m_session->camera()->setFocusMode(focusMode);
+ m_session->camera()->setFocusMode(focusMode);
- // reset focus position
- m_session->camera()->cancelAutoFocus();
+ // reset focus position
+ m_session->camera()->cancelAutoFocus();
- m_focusMode = mode;
- emit focusModeChanged(m_focusMode);
+ setFocusModeHelper(mode);
+ }
}
bool QAndroidCameraFocusControl::isFocusModeSupported(QCameraFocus::FocusModes mode) const
{
- return m_supportedFocusModes.contains(mode);
+ return m_session->camera() ? m_supportedFocusModes.contains(mode) : false;
}
QCameraFocus::FocusPointMode QAndroidCameraFocusControl::focusPointMode() const
@@ -125,29 +128,31 @@ QCameraFocus::FocusPointMode QAndroidCameraFocusControl::focusPointMode() const
void QAndroidCameraFocusControl::setFocusPointMode(QCameraFocus::FocusPointMode mode)
{
- if (!m_session->camera() || m_focusPointMode == mode || !isFocusPointModeSupported(mode))
+ if (!m_session->camera()) {
+ setFocusPointModeHelper(mode);
return;
-
- m_focusPointMode = mode;
-
- if (mode == QCameraFocus::FocusPointCustom) {
- m_actualFocusPoint = m_customFocusPoint;
- } else {
- // FocusPointAuto | FocusPointCenter
- // note: there is no way to know the actual focus point in FocusPointAuto mode,
- // so just report the focus point to be at the center of the frame
- m_actualFocusPoint = QPointF(0.5, 0.5);
}
- updateFocusZones();
- setCameraFocusArea();
+ if (isFocusPointModeSupported(mode)) {
+ if (mode == QCameraFocus::FocusPointCustom) {
+ m_actualFocusPoint = m_customFocusPoint;
+ } else {
+ // FocusPointAuto | FocusPointCenter
+ // note: there is no way to know the actual focus point in FocusPointAuto mode,
+ // so just report the focus point to be at the center of the frame
+ m_actualFocusPoint = QPointF(0.5, 0.5);
+ }
- emit focusPointModeChanged(mode);
+ setFocusPointModeHelper(mode);
+
+ updateFocusZones();
+ setCameraFocusArea();
+ }
}
bool QAndroidCameraFocusControl::isFocusPointModeSupported(QCameraFocus::FocusPointMode mode) const
{
- return m_supportedFocusPointModes.contains(mode);
+ return m_session->camera() ? m_supportedFocusPointModes.contains(mode) : false;
}
QPointF QAndroidCameraFocusControl::customFocusPoint() const
@@ -157,13 +162,12 @@ QPointF QAndroidCameraFocusControl::customFocusPoint() const
void QAndroidCameraFocusControl::setCustomFocusPoint(const QPointF &point)
{
- if (m_customFocusPoint == point)
- return;
-
- m_customFocusPoint = point;
- emit customFocusPointChanged(m_customFocusPoint);
+ if (m_customFocusPoint != point) {
+ m_customFocusPoint = point;
+ emit customFocusPointChanged(m_customFocusPoint);
+ }
- if (m_focusPointMode == QCameraFocus::FocusPointCustom) {
+ if (m_session->camera() && m_focusPointMode == QCameraFocus::FocusPointCustom) {
m_actualFocusPoint = m_customFocusPoint;
updateFocusZones();
setCameraFocusArea();
@@ -187,12 +191,7 @@ void QAndroidCameraFocusControl::onCameraOpened()
m_supportedFocusModes.clear();
m_continuousPictureFocusSupported = false;
m_continuousVideoFocusSupported = false;
-
- m_focusPointMode = QCameraFocus::FocusPointAuto;
- m_actualFocusPoint = QPointF(0.5, 0.5);
- m_customFocusPoint = QPointF();
m_supportedFocusPointModes.clear();
- m_focusZones.clear();
QStringList focusModes = m_session->camera()->getSupportedFocusModes();
for (int i = 0; i < focusModes.size(); ++i) {
@@ -220,10 +219,14 @@ void QAndroidCameraFocusControl::onCameraOpened()
if (m_session->camera()->getMaxNumFocusAreas() > 0)
m_supportedFocusPointModes << QCameraFocus::FocusPointCenter << QCameraFocus::FocusPointCustom;
- emit focusModeChanged(focusMode());
- emit focusPointModeChanged(m_focusPointMode);
- emit customFocusPointChanged(m_customFocusPoint);
- emit focusZonesChanged();
+ if (!m_supportedFocusModes.contains(m_focusMode))
+ setFocusModeHelper(QCameraFocus::AutoFocus);
+ if (!m_supportedFocusPointModes.contains(m_focusPointMode))
+ setFocusPointModeHelper(QCameraFocus::FocusPointAuto);
+
+ setFocusMode(m_focusMode);
+ setCustomFocusPoint(m_customFocusPoint);
+ setFocusPointMode(m_focusPointMode);
}
void QAndroidCameraFocusControl::updateFocusZones(QCameraFocusZone::FocusZoneStatus status)
@@ -276,11 +279,12 @@ void QAndroidCameraFocusControl::onViewportSizeChanged()
if (!m_focusZones.isEmpty())
status = m_focusZones.at(0).status();
updateFocusZones(status);
+ setCameraFocusArea();
}
void QAndroidCameraFocusControl::onCameraCaptureModeChanged()
{
- if (m_focusMode == QCameraFocus::ContinuousFocus) {
+ if (m_session->camera() && m_focusMode == QCameraFocus::ContinuousFocus) {
QString focusMode;
if ((m_session->captureMode().testFlag(QCamera::CaptureVideo) && m_continuousVideoFocusSupported)
|| !m_continuousPictureFocusSupported) {
diff --git a/src/plugins/android/src/mediacapture/qandroidcamerafocuscontrol.h b/src/plugins/android/src/mediacapture/qandroidcamerafocuscontrol.h
index 4311e78ba..997a313af 100644
--- a/src/plugins/android/src/mediacapture/qandroidcamerafocuscontrol.h
+++ b/src/plugins/android/src/mediacapture/qandroidcamerafocuscontrol.h
@@ -72,6 +72,22 @@ private Q_SLOTS:
void onAutoFocusComplete(bool success);
private:
+ inline void setFocusModeHelper(QCameraFocus::FocusModes mode)
+ {
+ if (m_focusMode != mode) {
+ m_focusMode = mode;
+ emit focusModeChanged(mode);
+ }
+ }
+
+ inline void setFocusPointModeHelper(QCameraFocus::FocusPointMode mode)
+ {
+ if (m_focusPointMode != mode) {
+ m_focusPointMode = mode;
+ emit focusPointModeChanged(mode);
+ }
+ }
+
void updateFocusZones(QCameraFocusZone::FocusZoneStatus status = QCameraFocusZone::Selected);
void setCameraFocusArea();
diff --git a/src/plugins/android/src/mediacapture/qandroidcameraimageprocessingcontrol.cpp b/src/plugins/android/src/mediacapture/qandroidcameraimageprocessingcontrol.cpp
index 4e4a416dd..7d0650420 100644
--- a/src/plugins/android/src/mediacapture/qandroidcameraimageprocessingcontrol.cpp
+++ b/src/plugins/android/src/mediacapture/qandroidcameraimageprocessingcontrol.cpp
@@ -49,6 +49,7 @@ QT_BEGIN_NAMESPACE
QAndroidCameraImageProcessingControl::QAndroidCameraImageProcessingControl(QAndroidCameraSession *session)
: QCameraImageProcessingControl()
, m_session(session)
+ , m_whiteBalanceMode(QCameraImageProcessing::WhiteBalanceAuto)
{
connect(m_session, SIGNAL(opened()),
this, SLOT(onCameraOpened()));
@@ -56,19 +57,17 @@ QAndroidCameraImageProcessingControl::QAndroidCameraImageProcessingControl(QAndr
bool QAndroidCameraImageProcessingControl::isParameterSupported(ProcessingParameter parameter) const
{
- return (parameter == QCameraImageProcessingControl::WhiteBalancePreset);
+ return parameter == QCameraImageProcessingControl::WhiteBalancePreset
+ && m_session->camera()
+ && !m_supportedWhiteBalanceModes.isEmpty();
}
bool QAndroidCameraImageProcessingControl::isParameterValueSupported(ProcessingParameter parameter,
const QVariant &value) const
{
- if (parameter != QCameraImageProcessingControl::WhiteBalancePreset)
- return false;
-
- if (!m_session->camera())
- return false;
-
- return m_supportedWhiteBalanceModes.contains(value.value<QCameraImageProcessing::WhiteBalanceMode>());
+ return parameter == QCameraImageProcessingControl::WhiteBalancePreset
+ && m_session->camera()
+ && m_supportedWhiteBalanceModes.contains(value.value<QCameraImageProcessing::WhiteBalanceMode>());
}
QVariant QAndroidCameraImageProcessingControl::parameter(ProcessingParameter parameter) const
@@ -76,13 +75,7 @@ QVariant QAndroidCameraImageProcessingControl::parameter(ProcessingParameter par
if (parameter != QCameraImageProcessingControl::WhiteBalancePreset)
return QVariant();
- if (!m_session->camera())
- return QVariant();
-
- QString wb = m_session->camera()->getWhiteBalance();
- QCameraImageProcessing::WhiteBalanceMode mode = m_supportedWhiteBalanceModes.key(wb, QCameraImageProcessing::WhiteBalanceAuto);
-
- return QVariant::fromValue(mode);
+ return QVariant::fromValue(m_whiteBalanceMode);
}
void QAndroidCameraImageProcessingControl::setParameter(ProcessingParameter parameter, const QVariant &value)
@@ -90,12 +83,21 @@ void QAndroidCameraImageProcessingControl::setParameter(ProcessingParameter para
if (parameter != QCameraImageProcessingControl::WhiteBalancePreset)
return;
- if (!m_session->camera())
- return;
+ QCameraImageProcessing::WhiteBalanceMode mode = value.value<QCameraImageProcessing::WhiteBalanceMode>();
- QString wb = m_supportedWhiteBalanceModes.value(value.value<QCameraImageProcessing::WhiteBalanceMode>(), QString());
- if (!wb.isEmpty())
+ if (m_session->camera())
+ setWhiteBalanceModeHelper(mode);
+ else
+ m_whiteBalanceMode = mode;
+}
+
+void QAndroidCameraImageProcessingControl::setWhiteBalanceModeHelper(QCameraImageProcessing::WhiteBalanceMode mode)
+{
+ QString wb = m_supportedWhiteBalanceModes.value(mode, QString());
+ if (!wb.isEmpty()) {
m_session->camera()->setWhiteBalance(wb);
+ m_whiteBalanceMode = mode;
+ }
}
void QAndroidCameraImageProcessingControl::onCameraOpened()
@@ -130,6 +132,11 @@ void QAndroidCameraImageProcessingControl::onCameraOpened()
QStringLiteral("warm-fluorescent"));
}
}
+
+ if (!m_supportedWhiteBalanceModes.contains(m_whiteBalanceMode))
+ m_whiteBalanceMode = QCameraImageProcessing::WhiteBalanceAuto;
+
+ setWhiteBalanceModeHelper(m_whiteBalanceMode);
}
QT_END_NAMESPACE
diff --git a/src/plugins/android/src/mediacapture/qandroidcameraimageprocessingcontrol.h b/src/plugins/android/src/mediacapture/qandroidcameraimageprocessingcontrol.h
index 3ef3c3144..0554dbf46 100644
--- a/src/plugins/android/src/mediacapture/qandroidcameraimageprocessingcontrol.h
+++ b/src/plugins/android/src/mediacapture/qandroidcameraimageprocessingcontrol.h
@@ -63,9 +63,13 @@ private Q_SLOTS:
void onCameraOpened();
private:
+ void setWhiteBalanceModeHelper(QCameraImageProcessing::WhiteBalanceMode mode);
+
QAndroidCameraSession *m_session;
- QHash<QCameraImageProcessing::WhiteBalanceMode, QString> m_supportedWhiteBalanceModes;
+ QCameraImageProcessing::WhiteBalanceMode m_whiteBalanceMode;
+
+ QMap<QCameraImageProcessing::WhiteBalanceMode, QString> m_supportedWhiteBalanceModes;
};
QT_END_NAMESPACE
diff --git a/src/plugins/android/src/mediacapture/qandroidcameralockscontrol.cpp b/src/plugins/android/src/mediacapture/qandroidcameralockscontrol.cpp
index bfb48d06e..3cf4209eb 100644
--- a/src/plugins/android/src/mediacapture/qandroidcameralockscontrol.cpp
+++ b/src/plugins/android/src/mediacapture/qandroidcameralockscontrol.cpp
@@ -66,13 +66,13 @@ QAndroidCameraLocksControl::QAndroidCameraLocksControl(QAndroidCameraSession *se
QCamera::LockTypes QAndroidCameraLocksControl::supportedLocks() const
{
- return (QCamera::LockExposure | QCamera::LockWhiteBalance | QCamera::LockFocus);
+ return m_supportedLocks;
}
QCamera::LockStatus QAndroidCameraLocksControl::lockStatus(QCamera::LockType lock) const
{
if (!m_supportedLocks.testFlag(lock) || !m_session->camera())
- return QCamera::Locked;
+ return QCamera::Unlocked;
if (lock == QCamera::LockFocus)
return m_focusLockStatus;
@@ -83,7 +83,7 @@ QCamera::LockStatus QAndroidCameraLocksControl::lockStatus(QCamera::LockType loc
if (lock == QCamera::LockWhiteBalance)
return m_whiteBalanceLockStatus;
- return QCamera::Locked;
+ return QCamera::Unlocked;
}
void QAndroidCameraLocksControl::searchAndLock(QCamera::LockTypes locks)
diff --git a/src/plugins/android/src/mediacapture/qandroidcamerasession.cpp b/src/plugins/android/src/mediacapture/qandroidcamerasession.cpp
index 963952294..90d223c34 100644
--- a/src/plugins/android/src/mediacapture/qandroidcamerasession.cpp
+++ b/src/plugins/android/src/mediacapture/qandroidcamerasession.cpp
@@ -331,11 +331,12 @@ bool QAndroidCameraSession::startPreview()
if (m_previewStarted)
return true;
- if (m_videoOutput->isReady())
- m_camera->setPreviewTexture(m_videoOutput->surfaceTexture());
- else
+ if (!m_videoOutput->isReady())
return true; // delay starting until the video output is ready
+ if (!m_camera->setPreviewTexture(m_videoOutput->surfaceTexture()))
+ return false;
+
m_status = QCamera::StartingStatus;
emit statusChanged(m_status);
diff --git a/src/plugins/android/src/mediacapture/qandroidcamerazoomcontrol.cpp b/src/plugins/android/src/mediacapture/qandroidcamerazoomcontrol.cpp
index 385cd942a..3b6645fa8 100644
--- a/src/plugins/android/src/mediacapture/qandroidcamerazoomcontrol.cpp
+++ b/src/plugins/android/src/mediacapture/qandroidcamerazoomcontrol.cpp
@@ -96,32 +96,25 @@ void QAndroidCameraZoomControl::zoomTo(qreal optical, qreal digital)
{
Q_UNUSED(optical);
- if (!m_cameraSession->camera() ||
- qFuzzyCompare(m_requestedZoom, digital) ||
- qFuzzyCompare(m_maximumZoom, qreal(1))) {
- return;
+ if (!qFuzzyCompare(m_requestedZoom, digital)) {
+ m_requestedZoom = digital;
+ emit requestedDigitalZoomChanged(m_requestedZoom);
}
- m_requestedZoom = digital;
- emit requestedDigitalZoomChanged(m_requestedZoom);
-
- digital = qBound(qreal(1), digital, m_maximumZoom);
- int validZoomIndex = qt_findClosestValue(m_zoomRatios, qRound(digital * 100));
- qreal newZoom = m_zoomRatios.at(validZoomIndex) / qreal(100);
- if (!qFuzzyCompare(m_currentZoom, newZoom)) {
- m_cameraSession->camera()->setZoom(validZoomIndex);
- m_currentZoom = newZoom;
- emit currentDigitalZoomChanged(m_currentZoom);
+ if (m_cameraSession->camera()) {
+ digital = qBound(qreal(1), digital, m_maximumZoom);
+ int validZoomIndex = qt_findClosestValue(m_zoomRatios, qRound(digital * 100));
+ qreal newZoom = m_zoomRatios.at(validZoomIndex) / qreal(100);
+ if (!qFuzzyCompare(m_currentZoom, newZoom)) {
+ m_cameraSession->camera()->setZoom(validZoomIndex);
+ m_currentZoom = newZoom;
+ emit currentDigitalZoomChanged(m_currentZoom);
+ }
}
}
void QAndroidCameraZoomControl::onCameraOpened()
{
- m_requestedZoom = 1.0;
- m_currentZoom = 1.0;
- emit requestedDigitalZoomChanged(m_requestedZoom);
- emit currentDigitalZoomChanged(m_currentZoom);
-
if (m_cameraSession->camera()->isZoomSupported()) {
m_zoomRatios = m_cameraSession->camera()->getZoomRatios();
qreal maxZoom = m_zoomRatios.last() / qreal(100);
@@ -129,6 +122,7 @@ void QAndroidCameraZoomControl::onCameraOpened()
m_maximumZoom = maxZoom;
emit maximumDigitalZoomChanged(m_maximumZoom);
}
+ zoomTo(1, m_requestedZoom);
} else {
m_zoomRatios.clear();
if (!qFuzzyCompare(m_maximumZoom, qreal(1))) {
diff --git a/src/plugins/android/src/wrappers/jni/androidcamera.cpp b/src/plugins/android/src/wrappers/jni/androidcamera.cpp
index c917bf2b7..32fc4bbaf 100644
--- a/src/plugins/android/src/wrappers/jni/androidcamera.cpp
+++ b/src/plugins/android/src/wrappers/jni/androidcamera.cpp
@@ -56,6 +56,19 @@ static QMutex g_cameraMapMutex;
typedef QMap<int, AndroidCamera *> CameraMap;
Q_GLOBAL_STATIC(CameraMap, g_cameraMap)
+static inline bool exceptionCheckAndClear(JNIEnv *env)
+{
+ if (Q_UNLIKELY(env->ExceptionCheck())) {
+#ifdef QT_DEBUG
+ env->ExceptionDescribe();
+#endif // QT_DEBUG
+ env->ExceptionClear();
+ return true;
+ }
+
+ return false;
+}
+
static QRect areaToRect(jobject areaObj)
{
QJNIObjectPrivate area(areaObj);
@@ -132,9 +145,9 @@ public:
Q_INVOKABLE bool init(int cameraId);
Q_INVOKABLE void release();
- Q_INVOKABLE void lock();
- Q_INVOKABLE void unlock();
- Q_INVOKABLE void reconnect();
+ Q_INVOKABLE bool lock();
+ Q_INVOKABLE bool unlock();
+ Q_INVOKABLE bool reconnect();
Q_INVOKABLE AndroidCamera::CameraFacing getFacing();
Q_INVOKABLE int getNativeOrientation();
@@ -147,7 +160,7 @@ public:
Q_INVOKABLE QSize previewSize() const { return m_previewSize; }
Q_INVOKABLE void updatePreviewSize();
- Q_INVOKABLE void setPreviewTexture(void *surfaceTexture);
+ Q_INVOKABLE bool setPreviewTexture(void *surfaceTexture);
Q_INVOKABLE bool isZoomSupported();
Q_INVOKABLE int getMaxZoom();
@@ -266,7 +279,7 @@ AndroidCamera *AndroidCamera::open(int cameraId)
worker->start();
d->moveToThread(worker);
connect(worker, &QThread::finished, d, &AndroidCameraPrivate::deleteLater);
- bool ok = false;
+ bool ok = true;
QMetaObject::invokeMethod(d, "init", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok), Q_ARG(int, cameraId));
if (!ok) {
worker->quit();
@@ -289,22 +302,28 @@ int AndroidCamera::cameraId() const
return d->m_cameraId;
}
-void AndroidCamera::lock()
+bool AndroidCamera::lock()
{
Q_D(AndroidCamera);
- QMetaObject::invokeMethod(d, "lock", Qt::BlockingQueuedConnection);
+ bool ok = true;
+ QMetaObject::invokeMethod(d, "lock", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok));
+ return ok;
}
-void AndroidCamera::unlock()
+bool AndroidCamera::unlock()
{
Q_D(AndroidCamera);
- QMetaObject::invokeMethod(d, "unlock", Qt::BlockingQueuedConnection);
+ bool ok = true;
+ QMetaObject::invokeMethod(d, "unlock", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok));
+ return ok;
}
-void AndroidCamera::reconnect()
+bool AndroidCamera::reconnect()
{
Q_D(AndroidCamera);
- QMetaObject::invokeMethod(d, "reconnect");
+ bool ok = true;
+ QMetaObject::invokeMethod(d, "reconnect", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok));
+ return ok;
}
void AndroidCamera::release()
@@ -368,13 +387,16 @@ void AndroidCamera::setPreviewSize(const QSize &size)
QMetaObject::invokeMethod(d, "updatePreviewSize");
}
-void AndroidCamera::setPreviewTexture(AndroidSurfaceTexture *surfaceTexture)
+bool AndroidCamera::setPreviewTexture(AndroidSurfaceTexture *surfaceTexture)
{
Q_D(AndroidCamera);
+ bool ok = true;
QMetaObject::invokeMethod(d,
"setPreviewTexture",
Qt::BlockingQueuedConnection,
+ Q_RETURN_ARG(bool, ok),
Q_ARG(void *, surfaceTexture ? surfaceTexture->surfaceTexture() : 0));
+ return ok;
}
bool AndroidCamera::isZoomSupported()
@@ -698,12 +720,12 @@ AndroidCameraPrivate::~AndroidCameraPrivate()
bool AndroidCameraPrivate::init(int cameraId)
{
m_cameraId = cameraId;
+ QJNIEnvironmentPrivate env;
m_camera = QJNIObjectPrivate::callStaticObjectMethod("android/hardware/Camera",
"open",
"(I)Landroid/hardware/Camera;",
cameraId);
-
- if (!m_camera.isValid())
+ if (exceptionCheckAndClear(env) || !m_camera.isValid())
return false;
m_cameraListener = QJNIObjectPrivate(g_qtCameraListenerClass, "(I)V", m_cameraId);
@@ -731,26 +753,25 @@ void AndroidCameraPrivate::release()
m_camera.callMethod<void>("release");
}
-void AndroidCameraPrivate::lock()
+bool AndroidCameraPrivate::lock()
{
+ QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("lock");
+ return !exceptionCheckAndClear(env);
}
-void AndroidCameraPrivate::unlock()
+bool AndroidCameraPrivate::unlock()
{
+ QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("unlock");
+ return !exceptionCheckAndClear(env);
}
-void AndroidCameraPrivate::reconnect()
+bool AndroidCameraPrivate::reconnect()
{
QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("reconnect");
- if (env->ExceptionCheck()) {
-#ifdef QT_DEBUG
- env->ExceptionDescribe();
-#endif // QT_DEBUG
- env->ExceptionDescribe();
- }
+ return !exceptionCheckAndClear(env);
}
AndroidCamera::CameraFacing AndroidCameraPrivate::getFacing()
@@ -832,11 +853,13 @@ void AndroidCameraPrivate::updatePreviewSize()
emit previewSizeChanged();
}
-void AndroidCameraPrivate::setPreviewTexture(void *surfaceTexture)
+bool AndroidCameraPrivate::setPreviewTexture(void *surfaceTexture)
{
+ QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("setPreviewTexture",
"(Landroid/graphics/SurfaceTexture;)V",
static_cast<jobject>(surfaceTexture));
+ return !exceptionCheckAndClear(env);
}
bool AndroidCameraPrivate::isZoomSupported()
@@ -1020,8 +1043,7 @@ void AndroidCameraPrivate::setFocusAreas(const QList<QRect> &areas)
arrayList.callMethod<jboolean>("add",
"(Ljava/lang/Object;)Z",
rectToArea(areas.at(i)).object());
- if (env->ExceptionCheck())
- env->ExceptionClear();
+ exceptionCheckAndClear(env);
}
list = arrayList;
}
@@ -1347,9 +1369,11 @@ void AndroidCameraPrivate::fetchLastPreviewFrame()
void AndroidCameraPrivate::applyParameters()
{
+ QJNIEnvironmentPrivate env;
m_camera.callMethod<void>("setParameters",
"(Landroid/hardware/Camera$Parameters;)V",
m_parameters.object());
+ exceptionCheckAndClear(env);
}
QStringList AndroidCameraPrivate::callParametersStringListMethod(const QByteArray &methodName)
@@ -1386,10 +1410,8 @@ static JNINativeMethod methods[] = {
bool AndroidCamera::initJNI(JNIEnv *env)
{
jclass clazz = env->FindClass("org/qtproject/qt5/android/multimedia/QtCameraListener");
- if (env->ExceptionCheck())
- env->ExceptionClear();
- if (clazz) {
+ if (!exceptionCheckAndClear(env) && clazz) {
g_qtCameraListenerClass = static_cast<jclass>(env->NewGlobalRef(clazz));
if (env->RegisterNatives(g_qtCameraListenerClass,
methods,
diff --git a/src/plugins/android/src/wrappers/jni/androidcamera.h b/src/plugins/android/src/wrappers/jni/androidcamera.h
index 2ea69b7e3..010f089fb 100644
--- a/src/plugins/android/src/wrappers/jni/androidcamera.h
+++ b/src/plugins/android/src/wrappers/jni/androidcamera.h
@@ -90,9 +90,9 @@ public:
int cameraId() const;
- void lock();
- void unlock();
- void reconnect();
+ bool lock();
+ bool unlock();
+ bool reconnect();
void release();
CameraFacing getFacing();
@@ -106,7 +106,7 @@ public:
QSize previewSize() const;
void setPreviewSize(const QSize &size);
- void setPreviewTexture(AndroidSurfaceTexture *surfaceTexture);
+ bool setPreviewTexture(AndroidSurfaceTexture *surfaceTexture);
bool isZoomSupported();
int getMaxZoom();
diff --git a/src/plugins/avfoundation/camera/avfmediarecordercontrol.mm b/src/plugins/avfoundation/camera/avfmediarecordercontrol.mm
index 4e82ada1b..d65d238d9 100644
--- a/src/plugins/avfoundation/camera/avfmediarecordercontrol.mm
+++ b/src/plugins/avfoundation/camera/avfmediarecordercontrol.mm
@@ -250,10 +250,8 @@ void AVFMediaRecorderControl::setState(QMediaRecorder::State state)
qDebugCamera() << "Video capture location:" << actualLocation.toString();
- NSString *urlString = [NSString stringWithUTF8String:actualLocation.toString().toUtf8().constData()];
- NSURL *fileURL = [NSURL URLWithString:urlString];
-
- [m_movieOutput startRecordingToOutputFileURL:fileURL recordingDelegate:m_recorderDelagate];
+ [m_movieOutput startRecordingToOutputFileURL:actualLocation.toNSURL()
+ recordingDelegate:m_recorderDelagate];
Q_EMIT actualLocationChanged(actualLocation);
} else {
diff --git a/src/plugins/coreaudio/coreaudiosessionmanager.mm b/src/plugins/coreaudio/coreaudiosessionmanager.mm
index 04c8b6ed4..6a8e97f4a 100644
--- a/src/plugins/coreaudio/coreaudiosessionmanager.mm
+++ b/src/plugins/coreaudio/coreaudiosessionmanager.mm
@@ -216,8 +216,9 @@ CoreAudioSessionManager::CoreAudioSessionManager() :
{
m_sessionObserver = [[CoreAudioSessionObserver alloc] initWithAudioSessionManager:this];
setActive(true);
- //set default category to just Playback and only switch if we need more permissions
- setCategory(CoreAudioSessionManager::Playback, CoreAudioSessionManager::MixWithOthers);
+ // Set default category to Ambient (implies MixWithOthers). This makes sure audio stops playing
+ // if the screen is locked or if the Silent switch is toggled.
+ setCategory(CoreAudioSessionManager::Ambient, CoreAudioSessionManager::None);
}
CoreAudioSessionManager::~CoreAudioSessionManager()
diff --git a/src/plugins/directshow/camera/camera.pri b/src/plugins/directshow/camera/camera.pri
index 2c0fe55b1..75fca4aad 100644
--- a/src/plugins/directshow/camera/camera.pri
+++ b/src/plugins/directshow/camera/camera.pri
@@ -23,10 +23,5 @@ SOURCES += \
$$PWD/dsimagecapturecontrol.cpp \
$$PWD/dscamerasession.cpp
-qtHaveModule(widgets) {
- HEADERS += $$PWD/dsvideowidgetcontrol.h
- SOURCES += $$PWD/dsvideowidgetcontrol.cpp
-}
-
*-msvc*:INCLUDEPATH += $$(DXSDK_DIR)/include
LIBS += -lstrmiids -ldmoguids -luuid -lmsdmo -lole32 -loleaut32
diff --git a/src/plugins/directshow/camera/dscameracontrol.cpp b/src/plugins/directshow/camera/dscameracontrol.cpp
index 07035f5dd..15c669f41 100644
--- a/src/plugins/directshow/camera/dscameracontrol.cpp
+++ b/src/plugins/directshow/camera/dscameracontrol.cpp
@@ -48,10 +48,13 @@
QT_BEGIN_NAMESPACE
DSCameraControl::DSCameraControl(QObject *parent)
- :QCameraControl(parent), m_captureMode(QCamera::CaptureStillImage)
+ : QCameraControl(parent)
+ , m_state(QCamera::UnloadedState)
+ , m_captureMode(QCamera::CaptureStillImage)
{
m_session = qobject_cast<DSCameraSession*>(parent);
- connect(m_session, SIGNAL(stateChanged(QCamera::State)),this, SIGNAL(stateChanged(QCamera::State)));
+ connect(m_session, SIGNAL(statusChanged(QCamera::Status)),
+ this, SIGNAL(statusChanged(QCamera::Status)));
}
DSCameraControl::~DSCameraControl()
@@ -60,14 +63,30 @@ DSCameraControl::~DSCameraControl()
void DSCameraControl::setState(QCamera::State state)
{
+ if (m_state == state)
+ return;
+
+ bool succeeded = false;
switch (state) {
- case QCamera::ActiveState:
- start();
- break;
- case QCamera::UnloadedState: /* fall through */
- case QCamera::LoadedState:
- stop();
- break;
+ case QCamera::UnloadedState:
+ succeeded = m_session->unload();
+ break;
+ case QCamera::LoadedState:
+ case QCamera::ActiveState:
+ if (m_state == QCamera::UnloadedState && !m_session->load())
+ return;
+
+ if (state == QCamera::ActiveState)
+ succeeded = m_session->startPreview();
+ else
+ succeeded = m_session->stopPreview();
+
+ break;
+ }
+
+ if (succeeded) {
+ m_state = state;
+ emit stateChanged(m_state);
}
}
@@ -85,19 +104,17 @@ bool DSCameraControl::isCaptureModeSupported(QCamera::CaptureModes mode) const
return bCaptureSupported;
}
-void DSCameraControl::start()
+void DSCameraControl::setCaptureMode(QCamera::CaptureModes mode)
{
- m_session->record();
-}
-
-void DSCameraControl::stop()
-{
- m_session->stop();
+ if (m_captureMode != mode && isCaptureModeSupported(mode)) {
+ m_captureMode = mode;
+ emit captureModeChanged(mode);
+ }
}
-QCamera::State DSCameraControl::state() const
+QCamera::Status DSCameraControl::status() const
{
- return (QCamera::State)m_session->state();
+ return m_session->status();
}
QT_END_NAMESPACE
diff --git a/src/plugins/directshow/camera/dscameracontrol.h b/src/plugins/directshow/camera/dscameracontrol.h
index 5c7661643..36ab515bf 100644
--- a/src/plugins/directshow/camera/dscameracontrol.h
+++ b/src/plugins/directshow/camera/dscameracontrol.h
@@ -58,28 +58,21 @@ public:
DSCameraControl(QObject *parent = 0);
~DSCameraControl();
- void start();
- void stop();
- QCamera::State state() const;
+ QCamera::State state() const { return m_state; }
QCamera::CaptureModes captureMode() const { return m_captureMode; }
- void setCaptureMode(QCamera::CaptureModes mode)
- {
- if (m_captureMode != mode) {
- m_captureMode = mode;
- emit captureModeChanged(mode);
- }
- }
+ void setCaptureMode(QCamera::CaptureModes mode);
void setState(QCamera::State state);
- QCamera::Status status() const { return QCamera::UnavailableStatus; }
+ QCamera::Status status() const;
bool isCaptureModeSupported(QCamera::CaptureModes mode) const;
bool canChangeProperty(PropertyChangeType /* changeType */, QCamera::Status /* status */) const {return false; }
private:
DSCameraSession *m_session;
DSCameraService *m_service;
+ QCamera::State m_state;
QCamera::CaptureModes m_captureMode;
};
diff --git a/src/plugins/directshow/camera/dscameraservice.cpp b/src/plugins/directshow/camera/dscameraservice.cpp
index 9d99c6dd6..b8a9b4aa7 100644
--- a/src/plugins/directshow/camera/dscameraservice.cpp
+++ b/src/plugins/directshow/camera/dscameraservice.cpp
@@ -42,11 +42,6 @@
#include <QtCore/qvariant.h>
#include <QtCore/qdebug.h>
-#if defined(HAVE_WIDGETS)
-#include <QtWidgets/qwidget.h>
-#include <QVideoWidgetControl>
-#endif
-
#include "dscameraservice.h"
#include "dscameracontrol.h"
#include "dscamerasession.h"
@@ -54,28 +49,16 @@
#include "dsvideodevicecontrol.h"
#include "dsimagecapturecontrol.h"
-#if defined(HAVE_WIDGETS)
-#include "dsvideowidgetcontrol.h"
-#endif
-
QT_BEGIN_NAMESPACE
DSCameraService::DSCameraService(QObject *parent):
QMediaService(parent)
-#if defined(HAVE_WIDGETS)
- , m_viewFinderWidget(0)
- #endif
, m_videoRenderer(0)
{
m_session = new DSCameraSession(this);
-
m_control = new DSCameraControl(m_session);
-
m_videoDevice = new DSVideoDeviceControl(m_session);
-
m_imageCapture = new DSImageCaptureControl(m_session);
-
- m_device = QByteArray("default");
}
DSCameraService::~DSCameraService()
@@ -84,9 +67,6 @@ DSCameraService::~DSCameraService()
delete m_videoDevice;
delete m_videoRenderer;
delete m_imageCapture;
-#if defined(HAVE_WIDGETS)
- delete m_viewFinderWidget;
-#endif
delete m_session;
}
@@ -98,21 +78,8 @@ QMediaControl* DSCameraService::requestControl(const char *name)
if (qstrcmp(name, QCameraImageCaptureControl_iid) == 0)
return m_imageCapture;
-#if defined(HAVE_WIDGETS)
- if (qstrcmp(name, QVideoWidgetControl_iid) == 0) {
- if (!m_viewFinderWidget && !m_videoRenderer) {
- m_viewFinderWidget = new DSVideoWidgetControl(m_session);
- return m_viewFinderWidget;
- }
- }
-#endif
-
if (qstrcmp(name,QVideoRendererControl_iid) == 0) {
-#if defined(HAVE_WIDGETS)
- if (!m_videoRenderer && !m_viewFinderWidget) {
-#else
if (!m_videoRenderer) {
-#endif
m_videoRenderer = new DSVideoRendererControl(m_session, this);
return m_videoRenderer;
}
@@ -131,14 +98,6 @@ void DSCameraService::releaseControl(QMediaControl *control)
m_videoRenderer = 0;
return;
}
-
-#if defined(HAVE_WIDGETS)
- if (control == m_viewFinderWidget) {
- delete m_viewFinderWidget;
- m_viewFinderWidget = 0;
- return;
- }
-#endif
}
QT_END_NAMESPACE
diff --git a/src/plugins/directshow/camera/dscameraservice.h b/src/plugins/directshow/camera/dscameraservice.h
index fb8b7886a..ceb786414 100644
--- a/src/plugins/directshow/camera/dscameraservice.h
+++ b/src/plugins/directshow/camera/dscameraservice.h
@@ -70,13 +70,9 @@ private:
DSCameraControl *m_control;
DSCameraSession *m_session;
DSVideoOutputControl *m_videoOutput;
-#if defined(HAVE_WIDGETS)
- QMediaControl *m_viewFinderWidget;
-#endif
DSVideoDeviceControl *m_videoDevice;
QMediaControl *m_videoRenderer;
DSImageCaptureControl *m_imageCapture;
- QByteArray m_device;
};
QT_END_NAMESPACE
diff --git a/src/plugins/directshow/camera/dscamerasession.cpp b/src/plugins/directshow/camera/dscamerasession.cpp
index 1ecc368e7..a8c85e5c6 100644
--- a/src/plugins/directshow/camera/dscamerasession.cpp
+++ b/src/plugins/directshow/camera/dscamerasession.cpp
@@ -1,6 +1,6 @@
/****************************************************************************
**
-** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
+** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
@@ -42,8 +42,11 @@
#include <QtCore/qdebug.h>
#include <QWidget>
#include <QFile>
+#include <QtConcurrent/QtConcurrentRun>
#include <QtMultimedia/qabstractvideobuffer.h>
#include <QtMultimedia/qvideosurfaceformat.h>
+#include <QtMultimedia/qcameraimagecapture.h>
+#include <private/qmemoryvideobuffer_p.h>
#include "dscamerasession.h"
#include "dsvideorenderer.h"
@@ -51,12 +54,23 @@
QT_BEGIN_NAMESPACE
-// If frames come in quicker than we display them, we allow the queue to build
-// up to this number before we start dropping them.
-const int LIMIT_FRAME = 5;
namespace {
// DirectShow helper implementation
+void _CopyMediaType(AM_MEDIA_TYPE *pmtTarget, const AM_MEDIA_TYPE *pmtSource)
+{
+ *pmtTarget = *pmtSource;
+ if (pmtTarget->cbFormat != 0) {
+ pmtTarget->pbFormat = reinterpret_cast<BYTE *>(CoTaskMemAlloc(pmtTarget->cbFormat));
+ if (pmtTarget->pbFormat)
+ memcpy(pmtTarget->pbFormat, pmtSource->pbFormat, pmtTarget->cbFormat);
+ }
+ if (pmtTarget->pUnk != NULL) {
+ // pUnk should not be used.
+ pmtTarget->pUnk->AddRef();
+ }
+}
+
void _FreeMediaType(AM_MEDIA_TYPE& mt)
{
if (mt.cbFormat != 0) {
@@ -70,14 +84,36 @@ void _FreeMediaType(AM_MEDIA_TYPE& mt)
mt.pUnk = NULL;
}
}
-
} // end namespace
+typedef QList<QSize> SizeList;
+Q_GLOBAL_STATIC(SizeList, commonPreviewResolutions)
+
+static HRESULT getPin(IBaseFilter *filter, PIN_DIRECTION pinDir, IPin **pin);
+
+
class SampleGrabberCallbackPrivate : public ISampleGrabberCB
{
public:
- STDMETHODIMP_(ULONG) AddRef() { return 1; }
- STDMETHODIMP_(ULONG) Release() { return 2; }
+ explicit SampleGrabberCallbackPrivate(DSCameraSession *session)
+ : m_ref(1)
+ , m_session(session)
+ { }
+
+ virtual ~SampleGrabberCallbackPrivate() { }
+
+ STDMETHODIMP_(ULONG) AddRef()
+ {
+ return InterlockedIncrement(&m_ref);
+ }
+
+ STDMETHODIMP_(ULONG) Release()
+ {
+ ULONG ref = InterlockedDecrement(&m_ref);
+ if (ref == 0)
+ delete this;
+ return ref;
+ }
STDMETHODIMP QueryInterface(REFIID riid, void **ppvObject)
{
@@ -101,143 +137,53 @@ public:
return E_NOTIMPL;
}
- STDMETHODIMP BufferCB(double Time, BYTE *pBuffer, long BufferLen)
+ STDMETHODIMP BufferCB(double time, BYTE *pBuffer, long bufferLen)
{
- if (!cs || active) {
- return S_OK;
- }
-
- if ((cs->StillMediaType.majortype != MEDIATYPE_Video) ||
- (cs->StillMediaType.formattype != FORMAT_VideoInfo) ||
- (cs->StillMediaType.cbFormat < sizeof(VIDEOINFOHEADER))) {
- return VFW_E_INVALIDMEDIATYPE;
- }
-
- active = true;
-
- if(toggle == true) {
- toggle = false;
- }
- else {
- toggle = true;
- }
-
- if(toggle) {
- active = false;
- return S_OK;
- }
-
- bool check = false;
- cs->mutex.lock();
+ // We display frames as they arrive, the presentation time is
+ // irrelevant
+ Q_UNUSED(time);
- if (cs->frames.size() > LIMIT_FRAME) {
- check = true;
+ if (m_session) {
+ m_session->onFrameAvailable(reinterpret_cast<const char *>(pBuffer),
+ bufferLen);
}
- if (check) {
- cs->mutex.unlock();
- // Frames building up. We're going to drop some here
- Sleep(100);
- active = false;
- return S_OK;
- }
- cs->mutex.unlock();
-
- unsigned char* vidData = new unsigned char[BufferLen];
- memcpy(vidData, pBuffer, BufferLen);
-
- cs->mutex.lock();
-
- video_buffer* buf = new video_buffer;
- buf->buffer = vidData;
- buf->length = BufferLen;
- buf->time = (qint64)Time;
-
- cs->frames.append(buf);
-
- cs->mutex.unlock();
-
- QMetaObject::invokeMethod(cs, "captureFrame", Qt::QueuedConnection);
-
- active = false;
-
return S_OK;
}
- DSCameraSession* cs;
- bool active;
- bool toggle;
+private:
+ ULONG m_ref;
+ DSCameraSession *m_session;
};
DSCameraSession::DSCameraSession(QObject *parent)
: QObject(parent)
- ,m_currentImageId(0)
- , needsHorizontalMirroring(false)
- , needsVerticalMirroring(true)
-{
- pBuild = NULL;
- pGraph = NULL;
- pCap = NULL;
- pSG_Filter = NULL;
- pSG = NULL;
-
- opened = false;
- available = false;
- resolutions.clear();
- m_state = QCamera::UnloadedState;
- m_device = "default";
-
- StillCapCB = new SampleGrabberCallbackPrivate;
- StillCapCB->cs = this;
- StillCapCB->active = false;
- StillCapCB->toggle = false;
-
- m_output = 0;
- m_surface = 0;
- pixelF = QVideoFrame::Format_Invalid;
-
- graph = false;
- active = false;
-
- ::CoInitialize(NULL);
+ , m_graphBuilder(Q_NULLPTR)
+ , m_filterGraph(Q_NULLPTR)
+ , m_sourceDeviceName(QLatin1String("default"))
+ , m_sourceFilter(Q_NULLPTR)
+ , m_needsHorizontalMirroring(false)
+ , m_previewFilter(Q_NULLPTR)
+ , m_previewSampleGrabber(Q_NULLPTR)
+ , m_nullRendererFilter(Q_NULLPTR)
+ , m_previewStarted(false)
+ , m_surface(Q_NULLPTR)
+ , m_previewPixelFormat(QVideoFrame::Format_Invalid)
+ , m_readyForCapture(false)
+ , m_imageIdCounter(0)
+ , m_currentImageId(-1)
+ , m_status(QCamera::UnloadedStatus)
+{
+ ZeroMemory(&m_sourcePreferredFormat, sizeof(m_sourcePreferredFormat));
+
+ connect(this, SIGNAL(statusChanged(QCamera::Status)),
+ this, SLOT(updateReadyForCapture()));
}
DSCameraSession::~DSCameraSession()
{
- if (opened) {
- closeStream();
- }
-
- CoUninitialize();
-
- SAFE_RELEASE(pCap);
- SAFE_RELEASE(pSG_Filter);
- SAFE_RELEASE(pGraph);
- SAFE_RELEASE(pBuild);
-
- if (StillCapCB) {
- delete StillCapCB;
- }
-}
-
-int DSCameraSession::captureImage(const QString &fileName)
-{
- emit readyForCaptureChanged(false);
-
- // We're going to do this in one big synchronous call
- m_currentImageId++;
- if (fileName.isEmpty()) {
- m_snapshot = "img.jpg";
- } else {
- m_snapshot = fileName;
- }
-
- if (!active) {
- startStream();
- }
-
- return m_currentImageId;
+ unload();
}
void DSCameraSession::setSurface(QAbstractVideoSurface* surface)
@@ -245,415 +191,260 @@ void DSCameraSession::setSurface(QAbstractVideoSurface* surface)
m_surface = surface;
}
-bool DSCameraSession::deviceReady()
-{
- return available;
-}
-
-bool DSCameraSession::pictureInProgress()
+void DSCameraSession::setDevice(const QString &device)
{
- return m_snapshot.isEmpty();
+ m_sourceDeviceName = device;
}
-int DSCameraSession::framerate() const
+bool DSCameraSession::load()
{
- return -1;
-}
+ unload();
-void DSCameraSession::setFrameRate(int rate)
-{
- Q_UNUSED(rate)
-}
+ setStatus(QCamera::LoadingStatus);
-int DSCameraSession::brightness() const
-{
- return -1;
-}
+ bool succeeded = createFilterGraph();
+ if (succeeded)
+ setStatus(QCamera::LoadedStatus);
+ else
+ setStatus(QCamera::UnavailableStatus);
-void DSCameraSession::setBrightness(int b)
-{
- Q_UNUSED(b)
+ return succeeded;
}
-int DSCameraSession::contrast() const
+bool DSCameraSession::unload()
{
- return -1;
-}
-
-void DSCameraSession::setContrast(int c)
-{
- Q_UNUSED(c)
-}
+ if (!m_graphBuilder)
+ return false;
-int DSCameraSession::saturation() const
-{
- return -1;
-}
+ if (!stopPreview())
+ return false;
-void DSCameraSession::setSaturation(int s)
-{
- Q_UNUSED(s)
-}
+ setStatus(QCamera::UnloadingStatus);
-int DSCameraSession::hue() const
-{
- return -1;
-}
+ m_needsHorizontalMirroring = false;
+ m_sourcePreferredResolution = QSize();
+ _FreeMediaType(m_sourcePreferredFormat);
+ ZeroMemory(&m_sourcePreferredFormat, sizeof(m_sourcePreferredFormat));
+ SAFE_RELEASE(m_sourceFilter);
+ SAFE_RELEASE(m_previewSampleGrabber);
+ SAFE_RELEASE(m_previewFilter);
+ SAFE_RELEASE(m_nullRendererFilter);
+ SAFE_RELEASE(m_filterGraph);
+ SAFE_RELEASE(m_graphBuilder);
-void DSCameraSession::setHue(int h)
-{
- Q_UNUSED(h)
-}
+ setStatus(QCamera::UnloadedStatus);
-int DSCameraSession::sharpness() const
-{
- return -1;
-}
-
-void DSCameraSession::setSharpness(int s)
-{
- Q_UNUSED(s)
+ return true;
}
-int DSCameraSession::zoom() const
+bool DSCameraSession::startPreview()
{
- return -1;
-}
+ if (m_previewStarted)
+ return true;
-void DSCameraSession::setZoom(int z)
-{
- Q_UNUSED(z)
-}
+ if (!m_graphBuilder)
+ return false;
-bool DSCameraSession::backlightCompensation() const
-{
- return false;
-}
+ setStatus(QCamera::StartingStatus);
-void DSCameraSession::setBacklightCompensation(bool b)
-{
- Q_UNUSED(b)
-}
+ HRESULT hr = S_OK;
+ IMediaControl* pControl = 0;
-int DSCameraSession::whitelevel() const
-{
- return -1;
-}
+ if (!configurePreviewFormat()) {
+ qWarning() << "Failed to configure preview format";
+ goto failed;
+ }
-void DSCameraSession::setWhitelevel(int w)
-{
- Q_UNUSED(w)
-}
+ if (!connectGraph())
+ goto failed;
-int DSCameraSession::rotation() const
-{
- return 0;
-}
+ if (m_surface)
+ m_surface->start(m_previewSurfaceFormat);
-void DSCameraSession::setRotation(int r)
-{
- Q_UNUSED(r)
-}
+ hr = m_filterGraph->QueryInterface(IID_IMediaControl, (void**)&pControl);
+ if (FAILED(hr)) {
+ qWarning() << "failed to get stream control";
+ goto failed;
+ }
+ hr = pControl->Run();
+ pControl->Release();
-bool DSCameraSession::flash() const
-{
- return false;
-}
+ if (FAILED(hr)) {
+ qWarning() << "failed to start";
+ goto failed;
+ }
-void DSCameraSession::setFlash(bool f)
-{
- Q_UNUSED(f)
-}
+ setStatus(QCamera::ActiveStatus);
+ m_previewStarted = true;
+ return true;
-bool DSCameraSession::autofocus() const
-{
+failed:
+ // go back to a clean state
+ if (m_surface && m_surface->isActive())
+ m_surface->stop();
+ disconnectGraph();
+ setStatus(QCamera::LoadedStatus);
return false;
}
-void DSCameraSession::setAutofocus(bool f)
+bool DSCameraSession::stopPreview()
{
- Q_UNUSED(f)
-}
-
-QSize DSCameraSession::frameSize() const
-{
- return m_windowSize;
-}
-
-void DSCameraSession::setFrameSize(const QSize& s)
-{
- if (supportedResolutions(pixelF).contains(s))
- m_windowSize = s;
- else
- qWarning() << "frame size if not supported for current pixel format, no change";
-}
-
-void DSCameraSession::setDevice(const QString &device)
-{
- if(opened)
- stopStream();
-
- if(graph) {
- SAFE_RELEASE(pCap);
- SAFE_RELEASE(pSG_Filter);
- SAFE_RELEASE(pGraph);
- SAFE_RELEASE(pBuild);
- }
-
- available = false;
- m_state = QCamera::LoadedState;
-
- CoInitialize(NULL);
-
- ICreateDevEnum* pDevEnum = NULL;
- IEnumMoniker* pEnum = NULL;
+ if (!m_previewStarted)
+ return true;
- // Create the System device enumerator
- HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL,
- CLSCTX_INPROC_SERVER, IID_ICreateDevEnum,
- reinterpret_cast<void**>(&pDevEnum));
- if(SUCCEEDED(hr)) {
- // Create the enumerator for the video capture category
- hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnum, 0);
- if (S_OK == hr) {
- pEnum->Reset();
- // go through and find all video capture devices
- IMoniker* pMoniker = NULL;
- IMalloc *mallocInterface = 0;
- CoGetMalloc(1, (LPMALLOC*)&mallocInterface);
- while(pEnum->Next(1, &pMoniker, NULL) == S_OK) {
+ setStatus(QCamera::StoppingStatus);
- BSTR strName = 0;
- hr = pMoniker->GetDisplayName(NULL, NULL, &strName);
- if (SUCCEEDED(hr)) {
- QString temp(QString::fromWCharArray(strName));
- mallocInterface->Free(strName);
- if(temp.contains(device)) {
- available = true;
- }
- }
-
- pMoniker->Release();
- }
- mallocInterface->Release();
- pEnum->Release();
- }
- pDevEnum->Release();
+ IMediaControl* pControl = 0;
+ HRESULT hr = m_filterGraph->QueryInterface(IID_IMediaControl, (void**)&pControl);
+ if (FAILED(hr)) {
+ qWarning() << "failed to get stream control";
+ goto failed;
}
- CoUninitialize();
- if(available) {
- m_device = QByteArray(device.toUtf8().constData());
- graph = createFilterGraph();
- if(!graph)
- available = false;
+ hr = pControl->Stop();
+ pControl->Release();
+ if (FAILED(hr)) {
+ qWarning() << "failed to stop";
+ goto failed;
}
-}
-QList<QVideoFrame::PixelFormat> DSCameraSession::supportedPixelFormats()
-{
- return types;
-}
-
-QVideoFrame::PixelFormat DSCameraSession::pixelFormat() const
-{
- return pixelF;
-}
+ disconnectGraph();
-void DSCameraSession::setPixelFormat(QVideoFrame::PixelFormat fmt)
-{
- pixelF = fmt;
-}
+ m_previewStarted = false;
+ setStatus(QCamera::LoadedStatus);
+ return true;
-QList<QSize> DSCameraSession::supportedResolutions(QVideoFrame::PixelFormat format)
-{
- if (!resolutions.contains(format))
- return QList<QSize>();
- return resolutions.value(format);
+failed:
+ setStatus(QCamera::ActiveStatus);
+ return false;
}
-bool DSCameraSession::setOutputLocation(const QUrl &sink)
+void DSCameraSession::setStatus(QCamera::Status status)
{
- m_sink = sink;
+ if (m_status == status)
+ return;
- return true;
+ m_status = status;
+ emit statusChanged(m_status);
}
-QUrl DSCameraSession::outputLocation() const
+bool DSCameraSession::isReadyForCapture()
{
- return m_sink;
+ return m_readyForCapture;
}
-qint64 DSCameraSession::position() const
+void DSCameraSession::updateReadyForCapture()
{
- return timeStamp.elapsed();
+ bool isReady = (m_status == QCamera::ActiveStatus && m_imageCaptureFileName.isEmpty());
+ if (isReady != m_readyForCapture) {
+ m_readyForCapture = isReady;
+ emit readyForCaptureChanged(isReady);
+ }
}
-int DSCameraSession::state() const
+int DSCameraSession::captureImage(const QString &fileName)
{
- return int(m_state);
-}
+ ++m_imageIdCounter;
-void DSCameraSession::record()
-{
- if(opened) {
- return;
+ if (!m_readyForCapture) {
+ emit captureError(m_imageIdCounter, QCameraImageCapture::NotReadyError,
+ tr("Camera not ready for capture"));
+ return m_imageIdCounter;
}
- if(m_surface) {
+ m_imageCaptureFileName = m_fileNameGenerator.generateFileName(fileName,
+ QMediaStorageLocation::Pictures,
+ QLatin1String("IMG_"),
+ QLatin1String("jpg"));
- if (!graph)
- graph = createFilterGraph();
+ updateReadyForCapture();
- if (types.isEmpty()) {
- if (pixelF == QVideoFrame::Format_Invalid)
- pixelF = QVideoFrame::Format_RGB32;
- if (!m_windowSize.isValid())
- m_windowSize = QSize(320, 240);
- }
- actualFormat = QVideoSurfaceFormat(m_windowSize, pixelF);
-
- if (!m_surface->isFormatSupported(actualFormat) && !types.isEmpty()) {
- // enumerate through camera formats
- QList<QVideoFrame::PixelFormat> fmts = m_surface->supportedPixelFormats();
- foreach(QVideoFrame::PixelFormat f, types) {
- if (fmts.contains(f)) {
- pixelF = f;
- if (!resolutions[pixelF].contains(m_windowSize)) {
- Q_ASSERT(!resolutions[pixelF].isEmpty());
- m_windowSize = resolutions[pixelF].first();
- }
- actualFormat = QVideoSurfaceFormat(m_windowSize, pixelF);
- break;
- }
- }
- }
+ m_captureMutex.lock();
+ m_currentImageId = m_imageIdCounter;
+ m_captureMutex.unlock();
- if (m_surface->isFormatSupported(actualFormat)) {
- m_surface->start(actualFormat);
- m_state = QCamera::ActiveState;
- emit stateChanged(QCamera::ActiveState);
- } else {
- qWarning() << "surface doesn't support camera format, cant start";
- m_state = QCamera::LoadedState;
- emit stateChanged(QCamera::LoadedState);
- return;
- }
- } else {
- qWarning() << "no video surface, cant start";
- m_state = QCamera::LoadedState;
- emit stateChanged(QCamera::LoadedState);
- return;
- }
-
- opened = startStream();
-
- if (!opened) {
- qWarning() << "Stream did not open";
- m_state = QCamera::LoadedState;
- emit stateChanged(QCamera::LoadedState);
- }
+ return m_imageIdCounter;
}
-void DSCameraSession::pause()
+void DSCameraSession::onFrameAvailable(const char *frameData, long len)
{
- suspendStream();
-}
+ // !!! Not called on the main thread
-void DSCameraSession::stop()
-{
- if(!opened) {
- return;
- }
+ // Deep copy, the data might be modified or freed after the callback returns
+ QByteArray data(frameData, len);
- stopStream();
- opened = false;
- m_state = QCamera::LoadedState;
- emit stateChanged(QCamera::LoadedState);
-}
+ m_presentMutex.lock();
-void DSCameraSession::captureFrame()
-{
- if(m_surface && frames.count() > 0) {
+ // (We should be getting only RGB32 data)
+ int stride = m_previewSize.width() * 4;
- QImage image;
+ // In case the source produces frames faster than we can display them,
+ // only keep the most recent one
+ m_currentFrame = QVideoFrame(new QMemoryVideoBuffer(data, stride),
+ m_previewSize,
+ m_previewPixelFormat);
- if(pixelF == QVideoFrame::Format_RGB24) {
+ m_presentMutex.unlock();
- mutex.lock();
+ // Image capture
+ QMutexLocker locker(&m_captureMutex);
+ if (m_currentImageId != -1 && !m_capturedFrame.isValid()) {
+ m_capturedFrame = m_currentFrame;
+ emit imageExposed(m_currentImageId);
+ }
- image = QImage(frames.at(0)->buffer,m_windowSize.width(),m_windowSize.height(),
- QImage::Format_RGB888).rgbSwapped().mirrored(needsHorizontalMirroring, needsVerticalMirroring);
+ QMetaObject::invokeMethod(this, "presentFrame", Qt::QueuedConnection);
+}
- QVideoFrame frame(image);
- frame.setStartTime(frames.at(0)->time);
+void DSCameraSession::presentFrame()
+{
+ m_presentMutex.lock();
- mutex.unlock();
+ if (m_currentFrame.isValid() && m_surface) {
+ m_surface->present(m_currentFrame);
+ m_currentFrame = QVideoFrame();
+ }
- m_surface->present(frame);
+ m_presentMutex.unlock();
- } else if (pixelF == QVideoFrame::Format_RGB32) {
+ m_captureMutex.lock();
- mutex.lock();
+ if (m_capturedFrame.isValid()) {
+ Q_ASSERT(m_previewPixelFormat == QVideoFrame::Format_RGB32);
- image = QImage(frames.at(0)->buffer,m_windowSize.width(),m_windowSize.height(),
- QImage::Format_RGB32).mirrored(needsHorizontalMirroring, needsVerticalMirroring);
+ m_capturedFrame.map(QAbstractVideoBuffer::ReadOnly);
- QVideoFrame frame(image);
- frame.setStartTime(frames.at(0)->time);
+ QImage image = QImage(m_capturedFrame.bits(),
+ m_previewSize.width(), m_previewSize.height(),
+ QImage::Format_RGB32);
- mutex.unlock();
+ image = image.mirrored(m_needsHorizontalMirroring); // also causes a deep copy of the data
- m_surface->present(frame);
+ m_capturedFrame.unmap();
- } else {
- qWarning() << "TODO:captureFrame() format =" << pixelF;
- }
+ emit imageCaptured(m_currentImageId, image);
- if (m_snapshot.length() > 0) {
- emit imageCaptured(m_currentImageId, image);
- image.save(m_snapshot,"JPG");
- emit imageSaved(m_currentImageId, m_snapshot);
- m_snapshot.clear();
- emit readyForCaptureChanged(true);
- }
+ QtConcurrent::run(this, &DSCameraSession::saveCapturedImage,
+ m_currentImageId, image, m_imageCaptureFileName);
- mutex.lock();
- if (frames.isEmpty()) {
- qWarning() << "Frames over-run";
- }
+ m_imageCaptureFileName.clear();
+ m_currentImageId = -1;
+ updateReadyForCapture();
- video_buffer* buf = frames.takeFirst();
- delete buf->buffer;
- delete buf;
- mutex.unlock();
+ m_capturedFrame = QVideoFrame();
}
+
+ m_captureMutex.unlock();
}
-HRESULT DSCameraSession::getPin(IBaseFilter *pFilter, PIN_DIRECTION PinDir, IPin **ppPin)
+void DSCameraSession::saveCapturedImage(int id, const QImage &image, const QString &path)
{
- *ppPin = 0;
- IEnumPins *pEnum = 0;
- IPin *pPin = 0;
-
- HRESULT hr = pFilter->EnumPins(&pEnum);
- if(FAILED(hr)) {
- return hr;
- }
-
- pEnum->Reset();
- while(pEnum->Next(1, &pPin, NULL) == S_OK) {
- PIN_DIRECTION ThisPinDir;
- pPin->QueryDirection(&ThisPinDir);
- if(ThisPinDir == PinDir) {
- pEnum->Release();
- *ppPin = pPin;
- return S_OK;
- }
- pEnum->Release();
+ if (image.save(path, "JPG")) {
+ emit imageSaved(id, path);
+ } else {
+ emit captureError(id, QCameraImageCapture::ResourceError,
+ tr("Could not save image to file."));
}
- pEnum->Release();
- return E_FAIL;
}
bool DSCameraSession::createFilterGraph()
@@ -661,35 +452,34 @@ bool DSCameraSession::createFilterGraph()
// Previously containered in <qedit.h>.
static const IID iID_ISampleGrabber = { 0x6B652FFF, 0x11FE, 0x4fce, { 0x92, 0xAD, 0x02, 0x66, 0xB5, 0xD7, 0xC7, 0x8F } };
static const CLSID cLSID_SampleGrabber = { 0xC1F400A0, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
+ static const CLSID cLSID_NullRenderer = { 0xC1F400A4, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
HRESULT hr;
IMoniker* pMoniker = NULL;
ICreateDevEnum* pDevEnum = NULL;
IEnumMoniker* pEnum = NULL;
- CoInitialize(NULL);
-
// Create the filter graph
hr = CoCreateInstance(CLSID_FilterGraph,NULL,CLSCTX_INPROC,
- IID_IGraphBuilder, (void**)&pGraph);
+ IID_IGraphBuilder, (void**)&m_filterGraph);
if (FAILED(hr)) {
- qWarning()<<"failed to create filter graph";
- return false;
+ qWarning() << "failed to create filter graph";
+ goto failed;
}
// Create the capture graph builder
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC,
- IID_ICaptureGraphBuilder2, (void**)&pBuild);
+ IID_ICaptureGraphBuilder2, (void**)&m_graphBuilder);
if (FAILED(hr)) {
- qWarning()<<"failed to create graph builder";
- return false;
+ qWarning() << "failed to create graph builder";
+ goto failed;
}
// Attach the filter graph to the capture graph
- hr = pBuild->SetFiltergraph(pGraph);
+ hr = m_graphBuilder->SetFiltergraph(m_filterGraph);
if (FAILED(hr)) {
- qWarning()<<"failed to connect capture graph and filter graph";
- return false;
+ qWarning() << "failed to connect capture graph and filter graph";
+ goto failed;
}
// Find the Capture device
@@ -712,8 +502,8 @@ bool DSCameraSession::createFilterGraph()
if (SUCCEEDED(hr)) {
QString output = QString::fromWCharArray(strName);
mallocInterface->Free(strName);
- if (m_device.contains(output.toUtf8().constData())) {
- hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pCap);
+ if (m_sourceDeviceName.contains(output)) {
+ hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&m_sourceFilter);
if (SUCCEEDED(hr)) {
pMoniker->Release();
break;
@@ -723,9 +513,9 @@ bool DSCameraSession::createFilterGraph()
pMoniker->Release();
}
mallocInterface->Release();
- if (NULL == pCap)
+ if (NULL == m_sourceFilter)
{
- if (m_device.contains("default"))
+ if (m_sourceDeviceName.contains(QLatin1String("default")))
{
pEnum->Reset();
// still have to loop to discard bind to storage failure case
@@ -740,7 +530,7 @@ bool DSCameraSession::createFilterGraph()
// No need to get the description, just grab it
- hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pCap);
+ hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&m_sourceFilter);
pPropBag->Release();
pMoniker->Release();
if (SUCCEEDED(hr)) {
@@ -757,443 +547,314 @@ bool DSCameraSession::createFilterGraph()
}
}
+ if (!m_sourceFilter) {
+ qWarning() << "No capture device found";
+ goto failed;
+ }
+
// Sample grabber filter
hr = CoCreateInstance(cLSID_SampleGrabber, NULL,CLSCTX_INPROC,
- IID_IBaseFilter, (void**)&pSG_Filter);
+ IID_IBaseFilter, (void**)&m_previewFilter);
if (FAILED(hr)) {
qWarning() << "failed to create sample grabber";
- return false;
+ goto failed;
}
- hr = pSG_Filter->QueryInterface(iID_ISampleGrabber, (void**)&pSG);
+ hr = m_previewFilter->QueryInterface(iID_ISampleGrabber, (void**)&m_previewSampleGrabber);
if (FAILED(hr)) {
qWarning() << "failed to get sample grabber";
- return false;
+ goto failed;
}
- pSG->SetOneShot(FALSE);
- pSG->SetBufferSamples(TRUE);
- pSG->SetCallback(StillCapCB, 1);
-
- updateProperties();
- CoUninitialize();
- return true;
-}
-
-void DSCameraSession::updateProperties()
-{
- HRESULT hr;
- AM_MEDIA_TYPE *pmt = NULL;
- VIDEOINFOHEADER *pvi = NULL;
- VIDEO_STREAM_CONFIG_CAPS scc;
- IAMStreamConfig* pConfig = 0;
- hr = pBuild->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,pCap,
- IID_IAMStreamConfig, (void**)&pConfig);
- if (FAILED(hr)) {
- qWarning()<<"failed to get config on capture device";
- return;
+ {
+ SampleGrabberCallbackPrivate *callback = new SampleGrabberCallbackPrivate(this);
+ m_previewSampleGrabber->SetCallback(callback, 1);
+ m_previewSampleGrabber->SetOneShot(FALSE);
+ m_previewSampleGrabber->SetBufferSamples(FALSE);
+ callback->Release();
}
- int iCount;
- int iSize;
- hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize);
+ // Null renderer. Input connected to the sample grabber's output. Simply
+ // discard the samples it receives.
+ hr = CoCreateInstance(cLSID_NullRenderer, NULL, CLSCTX_INPROC,
+ IID_IBaseFilter, (void**)&m_nullRendererFilter);
if (FAILED(hr)) {
- qWarning()<<"failed to get capabilities";
- return;
+ qWarning() << "failed to create null renderer";
+ goto failed;
}
- QList<QSize> sizes;
- QVideoFrame::PixelFormat f = QVideoFrame::Format_Invalid;
+ updateSourceCapabilities();
- types.clear();
- resolutions.clear();
- IAMVideoControl *pVideoControl = 0;
- hr = pBuild->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,pCap,
- IID_IAMVideoControl, (void**)&pVideoControl);
- if (FAILED(hr)) {
- qWarning() << "Failed to get the video control";
- } else {
- IPin *pPin = 0;
- if (pCap) {
- hr = getPin(pCap, PINDIR_OUTPUT, &pPin);
- if (FAILED(hr)) {
- qWarning() << "Failed to get the pin for the video control";
- } else {
- long supportedModes;
- hr = pVideoControl->GetCaps(pPin, &supportedModes);
- if (FAILED(hr)) {
- qWarning() << "Failed to get the supported modes of the video control";
- } else if (supportedModes & VideoControlFlag_FlipHorizontal || supportedModes & VideoControlFlag_FlipVertical) {
- long mode;
- hr = pVideoControl->GetMode(pPin, &mode);
- if (FAILED(hr)) {
- qWarning() << "Failed to get the mode of the video control";
- } else {
- if (supportedModes & VideoControlFlag_FlipHorizontal)
- needsHorizontalMirroring = (mode & VideoControlFlag_FlipHorizontal);
- if (supportedModes & VideoControlFlag_FlipVertical)
- needsVerticalMirroring = (mode & VideoControlFlag_FlipVertical);
- }
- }
- pPin->Release();
- }
- }
- pVideoControl->Release();
- }
- for (int iIndex = 0; iIndex < iCount; iIndex++) {
- hr = pConfig->GetStreamCaps(iIndex, &pmt, reinterpret_cast<BYTE*>(&scc));
- if (hr == S_OK) {
- pvi = (VIDEOINFOHEADER*)pmt->pbFormat;
- if ((pmt->majortype == MEDIATYPE_Video) &&
- (pmt->formattype == FORMAT_VideoInfo)) {
- // Add types
- if (pmt->subtype == MEDIASUBTYPE_RGB24) {
- if (!types.contains(QVideoFrame::Format_RGB24)) {
- types.append(QVideoFrame::Format_RGB24);
- f = QVideoFrame::Format_RGB24;
- }
- } else if (pmt->subtype == MEDIASUBTYPE_RGB32) {
- if (!types.contains(QVideoFrame::Format_RGB32)) {
- types.append(QVideoFrame::Format_RGB32);
- f = QVideoFrame::Format_RGB32;
- }
- } else if (pmt->subtype == MEDIASUBTYPE_YUY2) {
- if (!types.contains(QVideoFrame::Format_YUYV)) {
- types.append(QVideoFrame::Format_YUYV);
- f = QVideoFrame::Format_YUYV;
- }
- } else if (pmt->subtype == MEDIASUBTYPE_MJPG) {
- } else if (pmt->subtype == MEDIASUBTYPE_I420) {
- if (!types.contains(QVideoFrame::Format_YUV420P)) {
- types.append(QVideoFrame::Format_YUV420P);
- f = QVideoFrame::Format_YUV420P;
- }
- } else if (pmt->subtype == MEDIASUBTYPE_RGB555) {
- if (!types.contains(QVideoFrame::Format_RGB555)) {
- types.append(QVideoFrame::Format_RGB555);
- f = QVideoFrame::Format_RGB555;
- }
- } else if (pmt->subtype == MEDIASUBTYPE_YVU9) {
- } else if (pmt->subtype == MEDIASUBTYPE_UYVY) {
- if (!types.contains(QVideoFrame::Format_UYVY)) {
- types.append(QVideoFrame::Format_UYVY);
- f = QVideoFrame::Format_UYVY;
- }
- } else {
- qWarning() << "UNKNOWN FORMAT: " << pmt->subtype.Data1;
- }
- // Add resolutions
- QSize res(pvi->bmiHeader.biWidth, pvi->bmiHeader.biHeight);
- if (!resolutions.contains(f)) {
- sizes.clear();
- resolutions.insert(f,sizes);
- }
- resolutions[f].append(res);
- }
- }
- }
- pConfig->Release();
+ return true;
- if (!types.isEmpty()) {
- // Add RGB formats and let directshow do color space conversion if required.
- if (!types.contains(QVideoFrame::Format_RGB24)) {
- types.append(QVideoFrame::Format_RGB24);
- resolutions.insert(QVideoFrame::Format_RGB24, resolutions[types.first()]);
- }
- if (!types.contains(QVideoFrame::Format_RGB32)) {
- types.append(QVideoFrame::Format_RGB32);
- resolutions.insert(QVideoFrame::Format_RGB32, resolutions[types.first()]);
- }
- }
+failed:
+ m_needsHorizontalMirroring = false;
+ m_sourcePreferredResolution = QSize();
+ _FreeMediaType(m_sourcePreferredFormat);
+ ZeroMemory(&m_sourcePreferredFormat, sizeof(m_sourcePreferredFormat));
+ SAFE_RELEASE(m_sourceFilter);
+ SAFE_RELEASE(m_previewSampleGrabber);
+ SAFE_RELEASE(m_previewFilter);
+ SAFE_RELEASE(m_nullRendererFilter);
+ SAFE_RELEASE(m_filterGraph);
+ SAFE_RELEASE(m_graphBuilder);
+
+ return false;
}
-bool DSCameraSession::setProperties()
+bool DSCameraSession::configurePreviewFormat()
{
- CoInitialize(NULL);
+ // We only support RGB32, if the capture source doesn't support
+ // that format, the graph builder will automatically insert a
+ // converter.
- HRESULT hr;
- AM_MEDIA_TYPE am_media_type;
- AM_MEDIA_TYPE *pmt = NULL;
- VIDEOINFOHEADER *pvi = NULL;
- VIDEO_STREAM_CONFIG_CAPS scc;
-
- IAMStreamConfig* pConfig = 0;
- hr = pBuild->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, pCap,
- IID_IAMStreamConfig, (void**)&pConfig);
- if(FAILED(hr)) {
- qWarning()<<"failed to get config on capture device";
+ if (m_surface && !m_surface->supportedPixelFormats(QAbstractVideoBuffer::NoHandle)
+ .contains(QVideoFrame::Format_RGB32)) {
+ qWarning() << "Video surface needs to support RGB32 pixel format";
return false;
}
- int iCount;
- int iSize;
- hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize);
- if(FAILED(hr)) {
- qWarning()<<"failed to get capabilities";
+ m_previewPixelFormat = QVideoFrame::Format_RGB32;
+ m_previewSize = m_sourcePreferredResolution;
+ m_previewSurfaceFormat = QVideoSurfaceFormat(m_previewSize,
+ m_previewPixelFormat,
+ QAbstractVideoBuffer::NoHandle);
+ m_previewSurfaceFormat.setScanLineDirection(QVideoSurfaceFormat::BottomToTop);
+
+ HRESULT hr;
+ IAMStreamConfig* pConfig = 0;
+ hr = m_graphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE,
+ &MEDIATYPE_Video,
+ m_sourceFilter,
+ IID_IAMStreamConfig, (void**)&pConfig);
+ if (FAILED(hr)) {
+ qWarning() << "Failed to get config for capture device";
return false;
}
- bool setFormatOK = false;
- for (int iIndex = 0; iIndex < iCount; iIndex++) {
- hr = pConfig->GetStreamCaps(iIndex, &pmt, reinterpret_cast<BYTE*>(&scc));
- if (hr == S_OK) {
- pvi = (VIDEOINFOHEADER*)pmt->pbFormat;
+ hr = pConfig->SetFormat(&m_sourcePreferredFormat);
- if ((pmt->majortype == MEDIATYPE_Video) &&
- (pmt->formattype == FORMAT_VideoInfo)) {
- if ((actualFormat.frameWidth() == pvi->bmiHeader.biWidth) &&
- (actualFormat.frameHeight() == pvi->bmiHeader.biHeight)) {
- hr = pConfig->SetFormat(pmt);
- _FreeMediaType(*pmt);
- if(FAILED(hr)) {
- qWarning()<<"failed to set format:" << hr;
- qWarning()<<"but going to continue";
- continue; // We going to continue
- } else {
- setFormatOK = true;
- break;
- }
- }
- }
- }
- }
pConfig->Release();
- if (!setFormatOK) {
- qWarning() << "unable to set any format for camera";
- return false;
- }
-
- // Set Sample Grabber config to match capture
- ZeroMemory(&am_media_type, sizeof(am_media_type));
- am_media_type.majortype = MEDIATYPE_Video;
-
- if (actualFormat.pixelFormat() == QVideoFrame::Format_RGB32)
- am_media_type.subtype = MEDIASUBTYPE_RGB32;
- else if (actualFormat.pixelFormat() == QVideoFrame::Format_RGB24)
- am_media_type.subtype = MEDIASUBTYPE_RGB24;
- else if (actualFormat.pixelFormat() == QVideoFrame::Format_YUYV)
- am_media_type.subtype = MEDIASUBTYPE_YUY2;
- else if (actualFormat.pixelFormat() == QVideoFrame::Format_YUV420P)
- am_media_type.subtype = MEDIASUBTYPE_I420;
- else if (actualFormat.pixelFormat() == QVideoFrame::Format_RGB555)
- am_media_type.subtype = MEDIASUBTYPE_RGB555;
- else if (actualFormat.pixelFormat() == QVideoFrame::Format_UYVY)
- am_media_type.subtype = MEDIASUBTYPE_UYVY;
- else {
- qWarning()<<"unknown format? for SG";
+ if (FAILED(hr)) {
+ qWarning() << "Unable to set video format on capture device";
return false;
}
- am_media_type.formattype = FORMAT_VideoInfo;
- hr = pSG->SetMediaType(&am_media_type);
+ // Set sample grabber format (always RGB32)
+ AM_MEDIA_TYPE grabberFormat;
+ ZeroMemory(&grabberFormat, sizeof(grabberFormat));
+ grabberFormat.majortype = MEDIATYPE_Video;
+ grabberFormat.subtype = MEDIASUBTYPE_RGB32;
+ grabberFormat.formattype = FORMAT_VideoInfo;
+ hr = m_previewSampleGrabber->SetMediaType(&grabberFormat);
if (FAILED(hr)) {
- qWarning()<<"failed to set video format on grabber";
+ qWarning() << "Failed to set video format on grabber";
return false;
}
- pSG->GetConnectedMediaType(&StillMediaType);
-
- CoUninitialize();
-
return true;
}
-bool DSCameraSession::openStream()
+bool DSCameraSession::connectGraph()
{
- //Opens the stream for reading and allocates any necessary resources needed
- //Return true if success, false otherwise
-
- if (opened) {
- return true;
- }
-
- if (!graph) {
- graph = createFilterGraph();
- if(!graph) {
- qWarning()<<"failed to create filter graph in openStream";
- return false;
- }
+ HRESULT hr = m_filterGraph->AddFilter(m_sourceFilter, L"Capture Filter");
+ if (FAILED(hr)) {
+ qWarning() << "failed to add capture filter to graph";
+ return false;
}
- CoInitialize(NULL);
-
- HRESULT hr;
-
- hr = pGraph->AddFilter(pCap, L"Capture Filter");
+ hr = m_filterGraph->AddFilter(m_previewFilter, L"Sample Grabber");
if (FAILED(hr)) {
- qWarning()<<"failed to create capture filter";
+ qWarning() << "failed to add sample grabber to graph";
return false;
}
- hr = pGraph->AddFilter(pSG_Filter, L"Sample Grabber");
+ hr = m_filterGraph->AddFilter(m_nullRendererFilter, L"Null Renderer");
if (FAILED(hr)) {
- qWarning()<<"failed to add sample grabber";
+ qWarning() << "failed to add null renderer to graph";
return false;
}
- hr = pBuild->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
- pCap, NULL, pSG_Filter);
+ hr = m_graphBuilder->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
+ m_sourceFilter,
+ m_previewFilter,
+ m_nullRendererFilter);
if (FAILED(hr)) {
- qWarning() << "failed to renderstream" << hr;
+ qWarning() << "Graph failed to connect filters" << hr;
return false;
}
- pSG->GetConnectedMediaType(&StillMediaType);
- pSG_Filter->Release();
-
- CoUninitialize();
return true;
}
-void DSCameraSession::closeStream()
+void DSCameraSession::disconnectGraph()
{
- // Closes the stream and internally frees any resources used
- HRESULT hr;
- IMediaControl* pControl = 0;
-
- hr = pGraph->QueryInterface(IID_IMediaControl,(void**)&pControl);
- if (FAILED(hr)) {
- qWarning()<<"failed to get stream control";
- return;
+ IPin *pPin = 0;
+ HRESULT hr = getPin(m_sourceFilter, PINDIR_OUTPUT, &pPin);
+ if (SUCCEEDED(hr)) {
+ m_filterGraph->Disconnect(pPin);
+ pPin->Release();
+ pPin = NULL;
}
- hr = pControl->StopWhenReady();
- if (FAILED(hr)) {
- qWarning()<<"failed to stop";
- pControl->Release();
- return;
+ hr = getPin(m_previewFilter, PINDIR_INPUT, &pPin);
+ if (SUCCEEDED(hr)) {
+ m_filterGraph->Disconnect(pPin);
+ pPin->Release();
+ pPin = NULL;
}
- pControl->Release();
-
- opened = false;
- IPin *pPin = 0;
-
- if (pCap)
- {
- hr = getPin(pCap, PINDIR_OUTPUT, &pPin);
- if(FAILED(hr)) {
- qWarning()<<"failed to disconnect capture filter";
- return;
- }
+ hr = getPin(m_previewFilter, PINDIR_OUTPUT, &pPin);
+ if (SUCCEEDED(hr)) {
+ m_filterGraph->Disconnect(pPin);
+ pPin->Release();
+ pPin = NULL;
}
- pGraph->Disconnect(pPin);
- if (FAILED(hr)) {
- qWarning()<<"failed to disconnect grabber filter";
- return;
+ hr = getPin(m_nullRendererFilter, PINDIR_INPUT, &pPin);
+ if (SUCCEEDED(hr)) {
+ m_filterGraph->Disconnect(pPin);
+ pPin->Release();
+ pPin = NULL;
}
- hr = getPin(pSG_Filter,PINDIR_INPUT,&pPin);
- pGraph->Disconnect(pPin);
- pGraph->RemoveFilter(pSG_Filter);
- pGraph->RemoveFilter(pCap);
-
- SAFE_RELEASE(pCap);
- SAFE_RELEASE(pSG_Filter);
- SAFE_RELEASE(pGraph);
- SAFE_RELEASE(pBuild);
-
- graph = false;
+ m_filterGraph->RemoveFilter(m_nullRendererFilter);
+ m_filterGraph->RemoveFilter(m_previewFilter);
+ m_filterGraph->RemoveFilter(m_sourceFilter);
}
-bool DSCameraSession::startStream()
+void DSCameraSession::updateSourceCapabilities()
{
- // Starts the stream, by emitting either QVideoPackets
- // or QvideoFrames, depending on Format chosen
- if (!graph)
- graph = createFilterGraph();
-
- if (!setProperties()) {
- qWarning() << "Couldn't set properties (retrying)";
- closeStream();
- if (!openStream()) {
- qWarning() << "Retry to open strean failed";
- return false;
- }
- }
-
- if (!opened) {
- opened = openStream();
- if (!opened) {
- qWarning() << "failed to openStream()";
- return false;
- }
- }
-
HRESULT hr;
- IMediaControl* pControl = 0;
-
- hr = pGraph->QueryInterface(IID_IMediaControl, (void**)&pControl);
- if (FAILED(hr)) {
- qWarning() << "failed to get stream control";
- return false;
- }
+ AM_MEDIA_TYPE *pmt = NULL;
+ VIDEOINFOHEADER *pvi = NULL;
+ VIDEO_STREAM_CONFIG_CAPS scc;
+ IAMStreamConfig* pConfig = 0;
- hr = pControl->Run();
- pControl->Release();
+ m_needsHorizontalMirroring = false;
+ m_sourcePreferredResolution = QSize();
+ _FreeMediaType(m_sourcePreferredFormat);
+ ZeroMemory(&m_sourcePreferredFormat, sizeof(m_sourcePreferredFormat));
+ IAMVideoControl *pVideoControl = 0;
+ hr = m_graphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
+ m_sourceFilter,
+ IID_IAMVideoControl, (void**)&pVideoControl);
if (FAILED(hr)) {
- qWarning() << "failed to start";
- return false;
+ qWarning() << "Failed to get the video control";
+ } else {
+ IPin *pPin = 0;
+ hr = getPin(m_sourceFilter, PINDIR_OUTPUT, &pPin);
+ if (FAILED(hr)) {
+ qWarning() << "Failed to get the pin for the video control";
+ } else {
+ long supportedModes;
+ hr = pVideoControl->GetCaps(pPin, &supportedModes);
+ if (FAILED(hr)) {
+ qWarning() << "Failed to get the supported modes of the video control";
+ } else if (supportedModes & VideoControlFlag_FlipHorizontal) {
+ long mode;
+ hr = pVideoControl->GetMode(pPin, &mode);
+ if (FAILED(hr))
+ qWarning() << "Failed to get the mode of the video control";
+ else if (supportedModes & VideoControlFlag_FlipHorizontal)
+ m_needsHorizontalMirroring = (mode & VideoControlFlag_FlipHorizontal);
+ }
+ pPin->Release();
+ }
+ pVideoControl->Release();
}
- active = true;
- return true;
-}
-void DSCameraSession::stopStream()
-{
- // Stops the stream from emitting packets
- HRESULT hr;
-
- IMediaControl* pControl = 0;
- hr = pGraph->QueryInterface(IID_IMediaControl, (void**)&pControl);
+ hr = m_graphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
+ m_sourceFilter,
+ IID_IAMStreamConfig, (void**)&pConfig);
if (FAILED(hr)) {
- qWarning() << "failed to get stream control";
+ qWarning() << "failed to get config on capture device";
return;
}
- hr = pControl->Stop();
- pControl->Release();
+ int iCount;
+ int iSize;
+ hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize);
if (FAILED(hr)) {
- qWarning() << "failed to stop";
+ qWarning() << "failed to get capabilities";
return;
}
- active = false;
- if (opened) {
- closeStream();
+ // Use preferred pixel format (first in the list)
+ // Then, pick the highest available resolution among the typical resolutions
+ // used for camera preview.
+ if (commonPreviewResolutions->isEmpty())
+ populateCommonResolutions();
+
+ long maxPixelCount = 0;
+ for (int iIndex = 0; iIndex < iCount; ++iIndex) {
+ hr = pConfig->GetStreamCaps(iIndex, &pmt, reinterpret_cast<BYTE*>(&scc));
+ if (hr == S_OK) {
+ if ((pmt->majortype == MEDIATYPE_Video) &&
+ (pmt->formattype == FORMAT_VideoInfo) &&
+ (!m_sourcePreferredFormat.cbFormat ||
+ m_sourcePreferredFormat.subtype == pmt->subtype)) {
+
+ pvi = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat);
+
+ QSize resolution(pvi->bmiHeader.biWidth, pvi->bmiHeader.biHeight);
+ long pixelCount = resolution.width() * resolution.height();
+
+ if (!m_sourcePreferredFormat.cbFormat ||
+ (pixelCount > maxPixelCount && commonPreviewResolutions->contains(resolution))) {
+ _FreeMediaType(m_sourcePreferredFormat);
+ _CopyMediaType(&m_sourcePreferredFormat, pmt);
+ m_sourcePreferredResolution = resolution;
+ maxPixelCount = pixelCount;
+ }
+ }
+ _FreeMediaType(*pmt);
+ }
}
+
+ pConfig->Release();
+
+ if (!m_sourcePreferredResolution.isValid())
+ m_sourcePreferredResolution = QSize(640, 480);
}
-void DSCameraSession::suspendStream()
+void DSCameraSession::populateCommonResolutions()
{
- // Pauses the stream
- HRESULT hr;
+ commonPreviewResolutions->append(QSize(1920, 1080)); // 1080p
+ commonPreviewResolutions->append(QSize(1280, 720)); // 720p
+ commonPreviewResolutions->append(QSize(1024, 576)); // WSVGA
+ commonPreviewResolutions->append(QSize(720, 480)); // 480p (16:9)
+ commonPreviewResolutions->append(QSize(640, 480)); // 480p (4:3)
+ commonPreviewResolutions->append(QSize(352, 288)); // CIF
+ commonPreviewResolutions->append(QSize(320, 240)); // QVGA
+}
- IMediaControl* pControl = 0;
- hr = pGraph->QueryInterface(IID_IMediaControl, (void**)&pControl);
- if (FAILED(hr)) {
- qWarning() << "failed to get stream control";
- return;
- }
+HRESULT getPin(IBaseFilter *pFilter, PIN_DIRECTION PinDir, IPin **ppPin)
+{
+ *ppPin = 0;
+ IEnumPins *pEnum = 0;
+ IPin *pPin = 0;
- hr = pControl->Pause();
- pControl->Release();
+ HRESULT hr = pFilter->EnumPins(&pEnum);
if (FAILED(hr)) {
- qWarning() << "failed to pause";
- return;
+ return hr;
}
- active = false;
-}
-
-void DSCameraSession::resumeStream()
-{
- // Resumes a paused stream
- startStream();
+ pEnum->Reset();
+ while (pEnum->Next(1, &pPin, NULL) == S_OK) {
+ PIN_DIRECTION ThisPinDir;
+ pPin->QueryDirection(&ThisPinDir);
+ if (ThisPinDir == PinDir) {
+ pEnum->Release();
+ *ppPin = pPin;
+ return S_OK;
+ }
+ pPin->Release();
+ }
+ pEnum->Release();
+ return E_FAIL;
}
QT_END_NAMESPACE
-
diff --git a/src/plugins/directshow/camera/dscamerasession.h b/src/plugins/directshow/camera/dscamerasession.h
index 0fe12dec3..2ba9c6109 100644
--- a/src/plugins/directshow/camera/dscamerasession.h
+++ b/src/plugins/directshow/camera/dscamerasession.h
@@ -1,6 +1,6 @@
/****************************************************************************
**
-** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
+** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
@@ -51,6 +51,7 @@
#include <QtMultimedia/qvideoframe.h>
#include <QtMultimedia/qabstractvideosurface.h>
#include <QtMultimedia/qvideosurfaceformat.h>
+#include <private/qmediastoragelocation_p.h>
#include <tchar.h>
#include <dshow.h>
@@ -75,18 +76,8 @@ struct ISampleGrabber;
QT_BEGIN_NAMESPACE
-class DSVideoRenderer;
class SampleGrabberCallbackPrivate;
-
-struct video_buffer {
- unsigned char* buffer;
- int length;
- qint64 time;
-};
-
-typedef QMap<unsigned int, QList<QSize> > FormatResolutionMap;
-
class DSCameraSession : public QObject
{
Q_OBJECT
@@ -94,113 +85,82 @@ public:
DSCameraSession(QObject *parent = 0);
~DSCameraSession();
- bool deviceReady();
- bool pictureInProgress();
-
- // camera controls
-
- int framerate() const;
- void setFrameRate(int rate);
- int brightness() const;
- void setBrightness(int b);
- int contrast() const;
- void setContrast(int c);
- int saturation() const;
- void setSaturation(int s);
- int hue() const;
- void setHue(int h);
- int sharpness() const;
- void setSharpness(int s);
- int zoom() const;
- void setZoom(int z);
- bool backlightCompensation() const;
- void setBacklightCompensation(bool);
- int whitelevel() const;
- void setWhitelevel(int w);
- int rotation() const;
- void setRotation(int r);
- bool flash() const;
- void setFlash(bool f);
- bool autofocus() const;
- void setAutofocus(bool f);
-
- QSize frameSize() const;
- void setFrameSize(const QSize& s);
- void setDevice(const QString &device);
- QList<QVideoFrame::PixelFormat> supportedPixelFormats();
- QVideoFrame::PixelFormat pixelFormat() const;
- void setPixelFormat(QVideoFrame::PixelFormat fmt);
- QList<QSize> supportedResolutions(QVideoFrame::PixelFormat format);
+ QCamera::Status status() const { return m_status; }
- // media control
-
- bool setOutputLocation(const QUrl &sink);
- QUrl outputLocation() const;
- qint64 position() const;
- int state() const;
- void record();
- void pause();
- void stop();
+ void setDevice(const QString &device);
- void setSurface(QAbstractVideoSurface* surface);
+ bool load();
+ bool unload();
+ bool startPreview();
+ bool stopPreview();
+ bool isReadyForCapture();
int captureImage(const QString &fileName);
- AM_MEDIA_TYPE StillMediaType;
- QList<video_buffer*> frames;
- SampleGrabberCallbackPrivate* StillCapCB;
-
- QMutex mutex;
+ void setSurface(QAbstractVideoSurface* surface);
Q_SIGNALS:
- void stateChanged(QCamera::State);
+ void statusChanged(QCamera::Status);
+ void imageExposed(int id);
void imageCaptured(int id, const QImage &preview);
void imageSaved(int id, const QString &fileName);
void readyForCaptureChanged(bool);
+ void captureError(int id, int error, const QString &errorString);
private Q_SLOTS:
- void captureFrame();
+ void presentFrame();
+ void updateReadyForCapture();
private:
- QVideoSurfaceFormat actualFormat;
- QList<QVideoFrame::PixelFormat> types;
-
- QTime timeStamp;
- bool graph;
- bool active;
- bool opened;
- bool available;
- QCamera::State m_state;
- QByteArray m_device;
- QUrl m_sink;
- DSVideoRenderer* m_output;
- QAbstractVideoSurface* m_surface;
- QVideoFrame::PixelFormat pixelF;
- QSize m_windowSize;
- FormatResolutionMap resolutions;
+ void setStatus(QCamera::Status status);
+ void populateCommonResolutions();
- ICaptureGraphBuilder2* pBuild;
- IGraphBuilder* pGraph;
- IBaseFilter* pCap;
- IBaseFilter* pSG_Filter;
- ISampleGrabber *pSG;
+ void onFrameAvailable(const char *frameData, long len);
+ void saveCapturedImage(int id, const QImage &image, const QString &path);
-
- QString m_snapshot;
- int m_currentImageId;
- bool needsHorizontalMirroring;
- bool needsVerticalMirroring;
-protected:
- HRESULT getPin(IBaseFilter *pFilter, PIN_DIRECTION PinDir, IPin **ppPin);
bool createFilterGraph();
- void updateProperties();
- bool setProperties();
- bool openStream();
- void closeStream();
- bool startStream();
- void stopStream();
- void suspendStream();
- void resumeStream();
+ bool connectGraph();
+ void disconnectGraph();
+ void updateSourceCapabilities();
+ bool configurePreviewFormat();
+
+ QMutex m_presentMutex;
+ QMutex m_captureMutex;
+
+ // Capture Graph
+ ICaptureGraphBuilder2* m_graphBuilder;
+ IGraphBuilder* m_filterGraph;
+
+ // Source (camera)
+ QString m_sourceDeviceName;
+ IBaseFilter* m_sourceFilter;
+ AM_MEDIA_TYPE m_sourcePreferredFormat;
+ QSize m_sourcePreferredResolution;
+ bool m_needsHorizontalMirroring;
+
+ // Preview
+ IBaseFilter *m_previewFilter;
+ ISampleGrabber *m_previewSampleGrabber;
+ IBaseFilter *m_nullRendererFilter;
+ QVideoFrame m_currentFrame;
+ bool m_previewStarted;
+ QAbstractVideoSurface* m_surface;
+ QVideoSurfaceFormat m_previewSurfaceFormat;
+ QVideoFrame::PixelFormat m_previewPixelFormat;
+ QSize m_previewSize;
+
+ // Image capture
+ QString m_imageCaptureFileName;
+ QMediaStorageLocation m_fileNameGenerator;
+ bool m_readyForCapture;
+ int m_imageIdCounter;
+ int m_currentImageId;
+ QVideoFrame m_capturedFrame;
+
+ // Internal state
+ QCamera::Status m_status;
+
+ friend class SampleGrabberCallbackPrivate;
};
QT_END_NAMESPACE
diff --git a/src/plugins/directshow/camera/dsimagecapturecontrol.cpp b/src/plugins/directshow/camera/dsimagecapturecontrol.cpp
index e689b13d1..329f6d63a 100644
--- a/src/plugins/directshow/camera/dsimagecapturecontrol.cpp
+++ b/src/plugins/directshow/camera/dsimagecapturecontrol.cpp
@@ -46,15 +46,19 @@
QT_BEGIN_NAMESPACE
DSImageCaptureControl::DSImageCaptureControl(DSCameraSession *session)
- :QCameraImageCaptureControl(session), m_session(session), m_ready(false)
+ : QCameraImageCaptureControl(session)
+ , m_session(session)
{
- connect(m_session, SIGNAL(stateChanged(QCamera::State)), SLOT(updateState()));
+ connect(m_session, SIGNAL(imageExposed(int)),
+ this, SIGNAL(imageExposed(int)));
connect(m_session, SIGNAL(imageCaptured(int,QImage)),
this, SIGNAL(imageCaptured(int,QImage)));
connect(m_session, SIGNAL(imageSaved(int,QString)),
this, SIGNAL(imageSaved(int,QString)));
connect(m_session, SIGNAL(readyForCaptureChanged(bool)),
this, SIGNAL(readyForCaptureChanged(bool)));
+ connect(m_session, SIGNAL(captureError(int,int,QString)),
+ this, SIGNAL(error(int,int,QString)));
}
DSImageCaptureControl::~DSImageCaptureControl()
@@ -63,7 +67,7 @@ DSImageCaptureControl::~DSImageCaptureControl()
bool DSImageCaptureControl::isReadyForCapture() const
{
- return m_ready;
+ return m_session->isReadyForCapture();
}
int DSImageCaptureControl::capture(const QString &fileName)
@@ -71,12 +75,15 @@ int DSImageCaptureControl::capture(const QString &fileName)
return m_session->captureImage(fileName);
}
-void DSImageCaptureControl::updateState()
+QCameraImageCapture::DriveMode DSImageCaptureControl::driveMode() const
{
- bool ready = (m_session->state() == QCamera::ActiveState) &&
- !m_session->pictureInProgress();
- if(m_ready != ready)
- emit readyForCaptureChanged(m_ready = ready);
+ return QCameraImageCapture::SingleImageCapture;
+}
+
+void DSImageCaptureControl::setDriveMode(QCameraImageCapture::DriveMode mode)
+{
+ if (mode != QCameraImageCapture::SingleImageCapture)
+ qWarning("Drive mode not supported.");
}
QT_END_NAMESPACE
diff --git a/src/plugins/directshow/camera/dsimagecapturecontrol.h b/src/plugins/directshow/camera/dsimagecapturecontrol.h
index e39539191..af8765943 100644
--- a/src/plugins/directshow/camera/dsimagecapturecontrol.h
+++ b/src/plugins/directshow/camera/dsimagecapturecontrol.h
@@ -52,23 +52,18 @@ class DSImageCaptureControl : public QCameraImageCaptureControl
Q_OBJECT
public:
DSImageCaptureControl(DSCameraSession *session);
- virtual ~DSImageCaptureControl();
+ ~DSImageCaptureControl();
bool isReadyForCapture() const;
int capture(const QString &fileName);
- virtual QCameraImageCapture::DriveMode driveMode() const { return QCameraImageCapture::SingleImageCapture; }
- virtual void setDriveMode(QCameraImageCapture::DriveMode mode) { Q_UNUSED(mode) }
-
- virtual void cancelCapture() {}
-
-private slots:
- void updateState();
+ QCameraImageCapture::DriveMode driveMode() const;
+ void setDriveMode(QCameraImageCapture::DriveMode mode);
+ void cancelCapture() {}
private:
DSCameraSession *m_session;
- bool m_ready;
};
QT_END_NAMESPACE
diff --git a/src/plugins/directshow/camera/dsvideodevicecontrol.cpp b/src/plugins/directshow/camera/dsvideodevicecontrol.cpp
index ead1060d4..28d4956dd 100644
--- a/src/plugins/directshow/camera/dsvideodevicecontrol.cpp
+++ b/src/plugins/directshow/camera/dsvideodevicecontrol.cpp
@@ -102,7 +102,6 @@ void DSVideoDeviceControl::enumerateDevices(QList<QByteArray> *devices, QStringL
devices->clear();
descriptions->clear();
- CoInitialize(NULL);
ICreateDevEnum* pDevEnum = NULL;
IEnumMoniker* pEnum = NULL;
// Create the System device enumerator
@@ -148,7 +147,6 @@ void DSVideoDeviceControl::enumerateDevices(QList<QByteArray> *devices, QStringL
}
pDevEnum->Release();
}
- CoUninitialize();
}
void DSVideoDeviceControl::setSelectedDevice(int index)
diff --git a/src/plugins/directshow/camera/dsvideowidgetcontrol.cpp b/src/plugins/directshow/camera/dsvideowidgetcontrol.cpp
deleted file mode 100644
index 35b795546..000000000
--- a/src/plugins/directshow/camera/dsvideowidgetcontrol.cpp
+++ /dev/null
@@ -1,253 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
-** Contact: http://www.qt-project.org/legal
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and Digia. For licensing terms and
-** conditions see http://qt.digia.com/licensing. For further information
-** use the contact form at http://qt.digia.com/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 2.1 as published by the Free Software
-** Foundation and appearing in the file LICENSE.LGPL included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU Lesser General Public License version 2.1 requirements
-** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
-**
-** In addition, as a special exception, Digia gives you certain additional
-** rights. These rights are described in the Digia Qt LGPL Exception
-** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
-**
-** GNU General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU
-** General Public License version 3.0 as published by the Free Software
-** Foundation and appearing in the file LICENSE.GPL included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU General Public License version 3.0 requirements will be
-** met: http://www.gnu.org/copyleft/gpl.html.
-**
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#include <QtCore/qcoreevent.h>
-#include <QtCore/qtimer.h>
-
-#include "dsvideowidgetcontrol.h"
-#include "dscamerasession.h"
-
-QT_BEGIN_NAMESPACE
-
-DSVideoWidgetSurface::DSVideoWidgetSurface(QLabel *pWidget, QObject *parent)
- : QAbstractVideoSurface(parent)
-{
- widget = pWidget;
- myPixmap = 0;
-}
-
-QList<QVideoFrame::PixelFormat> DSVideoWidgetSurface::supportedPixelFormats(
- QAbstractVideoBuffer::HandleType handleType) const
-{
- if (handleType == QAbstractVideoBuffer::NoHandle) {
- return QList<QVideoFrame::PixelFormat>()
- << QVideoFrame::Format_RGB32
- << QVideoFrame::Format_RGB24;
- } else {
- return QList<QVideoFrame::PixelFormat>();
- }
-}
-
-
-bool DSVideoWidgetSurface::present(const QVideoFrame &frame)
-{
- QVideoFrame myFrame = frame;
- myFrame.map(QAbstractVideoBuffer::ReadOnly);
- QImage image(
- frame.bits(),
- frame.width(),
- frame.height(),
- frame.bytesPerLine(),
- imageFormat);
- if (image.isNull())
- {
- // Try to adapt
- QImage image2(
- frame.bits(),
- frame.width(),
- frame.height(),
- frame.bytesPerLine(),
- QImage::Format_RGB888);
- image = image2;
- }
- myFrame.unmap();
- delete myPixmap;
- myPixmap = new QPixmap(QPixmap::fromImage(image).scaled(widget->size()));
- widget->setPixmap(*myPixmap);
- widget->repaint();
- return true;
-}
-
-void DSVideoWidgetSurface::setImageFormat(QImage::Format fmt)
-{
- imageFormat = fmt;
-}
-
-void DSVideoWidgetSurface::updateVideoRect()
-{
-}
-
-void DSVideoWidgetSurface::paint(QPainter *painter)
-{
- Q_UNUSED(painter)
-}
-
-
-DSVideoWidgetControl::DSVideoWidgetControl(DSCameraSession* session, QObject *parent) :
- QVideoWidgetControl(parent),
- m_session(session),
- m_widget(new QLabel()),
- m_fullScreen(false)
-{
- m_widget->setSizePolicy(QSizePolicy::MinimumExpanding, QSizePolicy::MinimumExpanding);
- m_widget->setAlignment(Qt::AlignCenter);
- m_widget->setAttribute(Qt::WA_NoSystemBackground, true);
-
- surface = new DSVideoWidgetSurface(m_widget);
-
- QPalette palette;
- palette.setColor(QPalette::Background, Qt::black);
- m_widget->setPalette(palette);
- m_widget->setAutoFillBackground( true );
-
- // Request QEvents
- m_widget->installEventFilter(this);
- m_windowId = m_widget->effectiveWinId();
-
- surface->setImageFormat(QImage::Format_RGB888);
- session->setSurface(surface);
-}
-
-DSVideoWidgetControl::~DSVideoWidgetControl()
-{
- delete m_widget;
-}
-
-bool DSVideoWidgetControl::eventFilter(QObject *object, QEvent *e)
-{
- if (object == m_widget) {
- switch (e->type()) {
- case QEvent::ParentChange:
- case QEvent::WinIdChange:
- case QEvent::Show:
- m_windowId = m_widget->effectiveWinId();
- emit widgetUpdated();
- break;
- case QEvent::Resize:
- emit widgetResized(m_widget->size());
- break;
- case QEvent::PolishRequest:
- m_widget->ensurePolished();
- break;
-
- default:
- // Do nothing
- break;
- }
- }
- return false;
-}
-
-QWidget *DSVideoWidgetControl::videoWidget()
-{
- return m_widget;
-}
-
-Qt::AspectRatioMode DSVideoWidgetControl::aspectRatioMode() const
-{
- return m_aspectRatioMode;
-}
-
-void DSVideoWidgetControl::setAspectRatioMode(Qt::AspectRatioMode ratio)
-{
- if (m_aspectRatioMode==ratio) {
- return;
- }
- m_aspectRatioMode = ratio;
-
- if (m_aspectRatioMode == Qt::KeepAspectRatio)
- m_widget->setScaledContents(false);
- else {
- m_widget->setScaledContents(true);
- }
-}
-
-bool DSVideoWidgetControl::isFullScreen() const
-{
- return m_fullScreen;
-}
-
-void DSVideoWidgetControl::setFullScreen(bool fullScreen)
-{
- if (m_widget && !fullScreen && m_fullScreen) {
- m_widget->showNormal();
- m_fullScreen = false;
- } else if (m_widget && fullScreen) {
- m_widget->showFullScreen();
- m_fullScreen = true;
- }
-
- emit fullScreenChanged(fullScreen);
-}
-
-int DSVideoWidgetControl::brightness() const
-{
- return 0;
-}
-
-void DSVideoWidgetControl::setBrightness(int brightness)
-{
- Q_UNUSED(brightness);
-}
-
-int DSVideoWidgetControl::contrast() const
-{
- return 0;
-}
-
-void DSVideoWidgetControl::setContrast(int contrast)
-{
- Q_UNUSED(contrast);
-}
-
-int DSVideoWidgetControl::hue() const
-{
- return 0;
-}
-
-void DSVideoWidgetControl::setHue(int hue)
-{
- Q_UNUSED(hue);
-}
-
-int DSVideoWidgetControl::saturation() const
-{
- return 0;
-}
-
-void DSVideoWidgetControl::setSaturation(int saturation)
-{
- Q_UNUSED(saturation);
-}
-
-QT_END_NAMESPACE
-
-// End of file
diff --git a/src/plugins/directshow/camera/dsvideowidgetcontrol.h b/src/plugins/directshow/camera/dsvideowidgetcontrol.h
deleted file mode 100644
index 2a8775aee..000000000
--- a/src/plugins/directshow/camera/dsvideowidgetcontrol.h
+++ /dev/null
@@ -1,150 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
-** Contact: http://www.qt-project.org/legal
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and Digia. For licensing terms and
-** conditions see http://qt.digia.com/licensing. For further information
-** use the contact form at http://qt.digia.com/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 2.1 as published by the Free Software
-** Foundation and appearing in the file LICENSE.LGPL included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU Lesser General Public License version 2.1 requirements
-** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
-**
-** In addition, as a special exception, Digia gives you certain additional
-** rights. These rights are described in the Digia Qt LGPL Exception
-** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
-**
-** GNU General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU
-** General Public License version 3.0 as published by the Free Software
-** Foundation and appearing in the file LICENSE.GPL included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU General Public License version 3.0 requirements will be
-** met: http://www.gnu.org/copyleft/gpl.html.
-**
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#ifndef DSVIDEOWIDGETCONTROL_H
-#define DSVIDEOWIDGETCONTROL_H
-
-#include <QtCore/qobject.h>
-#include <QtWidgets>
-#include <QtMultimedia/qvideoframe.h>
-#include <QtMultimedia/qabstractvideosurface.h>
-#include <QtMultimedia/qvideosurfaceformat.h>
-
-#include <qvideowidgetcontrol.h>
-#include "dscameracontrol.h"
-
-QT_BEGIN_NAMESPACE
-
-class DSVideoWidgetSurface : public QAbstractVideoSurface
-{
- Q_OBJECT
- public:
- DSVideoWidgetSurface(QLabel *pWidget, QObject *parent = 0);
-
- QList<QVideoFrame::PixelFormat> supportedPixelFormats(
- QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::NoHandle) const;
-
- bool present(const QVideoFrame &frame);
-
- QRect videoRect() const { return targetRect; }
- void updateVideoRect();
-
- void paint(QPainter *painter);
- void setImageFormat(QImage::Format fmt);
-
- private:
- QLabel *widget;
- QImage::Format imageFormat;
- QRect targetRect;
- QSize imageSize;
- QRect sourceRect;
- QPixmap* myPixmap;
- };
-
-class DSVideoWidgetControl : public QVideoWidgetControl
-{
- Q_OBJECT
-
- DSVideoWidgetSurface* surface;
-public: // Constructor & Destructor
-
- DSVideoWidgetControl(DSCameraSession* session, QObject *parent = 0);
- virtual ~DSVideoWidgetControl();
-
-public: // QVideoWidgetControl
-
- QWidget *videoWidget();
-
- // Aspect Ratio
- Qt::AspectRatioMode aspectRatioMode() const;
- void setAspectRatioMode(Qt::AspectRatioMode ratio);
-
- // Full Screen
- bool isFullScreen() const;
- void setFullScreen(bool fullScreen);
-
- // Brightness
- int brightness() const;
- void setBrightness(int brightness);
-
- // Contrast
- int contrast() const;
- void setContrast(int contrast);
-
- // Hue
- int hue() const;
- void setHue(int hue);
-
- // Saturation
- int saturation() const;
- void setSaturation(int saturation);
-
-public: // Internal
-
- bool eventFilter(QObject *object, QEvent *event);
-
-/*
-Q_SIGNALS: // QVideoWidgetControl
-
- void fullScreenChanged(bool fullScreen);
- void brightnessChanged(int brightness);
- void contrastChanged(int contrast);
- void hueChanged(int hue);
- void saturationChanged(int saturation);
-*/
-
-Q_SIGNALS: // Internal Signals
-
- void widgetResized(QSize size);
- void widgetUpdated();
-
-private: // Data
-
- DSCameraSession* m_session;
- QLabel *m_widget;
- WId m_windowId;
- Qt::AspectRatioMode m_aspectRatioMode;
- bool m_fullScreen;
-};
-
-QT_END_NAMESPACE
-
-#endif // DSVideoWidgetControl_H
diff --git a/src/plugins/directshow/directshow.pro b/src/plugins/directshow/directshow.pro
index 4dad97113..982741a11 100644
--- a/src/plugins/directshow/directshow.pro
+++ b/src/plugins/directshow/directshow.pro
@@ -4,7 +4,7 @@ PLUGIN_TYPE=mediaservice
PLUGIN_CLASS_NAME = DSServicePlugin
load(qt_plugin)
-QT += multimedia
+QT += multimedia-private
HEADERS += dsserviceplugin.h
SOURCES += dsserviceplugin.cpp
diff --git a/src/plugins/directshow/dsserviceplugin.cpp b/src/plugins/directshow/dsserviceplugin.cpp
index d262febdd..954fab22c 100644
--- a/src/plugins/directshow/dsserviceplugin.cpp
+++ b/src/plugins/directshow/dsserviceplugin.cpp
@@ -79,15 +79,32 @@ extern const CLSID CLSID_VideoInputDeviceCategory;
QT_USE_NAMESPACE
+static int g_refCount = 0;
+void addRefCount()
+{
+ if (++g_refCount == 1)
+ CoInitialize(NULL);
+}
+
+void releaseRefCount()
+{
+ if (--g_refCount == 0)
+ CoUninitialize();
+}
+
QMediaService* DSServicePlugin::create(QString const& key)
{
#ifdef QMEDIA_DIRECTSHOW_CAMERA
- if (key == QLatin1String(Q_MEDIASERVICE_CAMERA))
+ if (key == QLatin1String(Q_MEDIASERVICE_CAMERA)) {
+ addRefCount();
return new DSCameraService;
+ }
#endif
#ifdef QMEDIA_DIRECTSHOW_PLAYER
- if (key == QLatin1String(Q_MEDIASERVICE_MEDIAPLAYER))
+ if (key == QLatin1String(Q_MEDIASERVICE_MEDIAPLAYER)) {
+ addRefCount();
return new DirectShowPlayerService;
+ }
#endif
return 0;
@@ -96,6 +113,7 @@ QMediaService* DSServicePlugin::create(QString const& key)
void DSServicePlugin::release(QMediaService *service)
{
delete service;
+ releaseRefCount();
}
QMediaServiceProviderHint::Features DSServicePlugin::supportedFeatures(
@@ -154,6 +172,8 @@ QString DSServicePlugin::deviceDescription(const QByteArray &service, const QByt
void DSServicePlugin::updateDevices() const
{
+ addRefCount();
+
m_defaultCameraDevice.clear();
DSVideoDeviceControl::enumerateDevices(&m_cameraDevices, &m_cameraDescriptions);
@@ -162,6 +182,8 @@ void DSServicePlugin::updateDevices() const
} else {
m_defaultCameraDevice = m_cameraDevices.first();
}
+
+ releaseRefCount();
}
#endif
diff --git a/src/plugins/directshow/player/directshowplayerservice.cpp b/src/plugins/directshow/player/directshowplayerservice.cpp
index 0afeacb85..1ac7669cd 100644
--- a/src/plugins/directshow/player/directshowplayerservice.cpp
+++ b/src/plugins/directshow/player/directshowplayerservice.cpp
@@ -111,7 +111,6 @@ DirectShowPlayerService::DirectShowPlayerService(QObject *parent)
, m_seekable(false)
, m_atEnd(false)
{
- CoInitialize(NULL);
m_playerControl = new DirectShowPlayerControl(this);
m_metaDataControl = new DirectShowMetaDataControl(this);
m_audioEndpointControl = new DirectShowAudioEndpointControl(this);
@@ -153,7 +152,6 @@ DirectShowPlayerService::~DirectShowPlayerService()
#endif
::CloseHandle(m_taskHandle);
- CoUninitialize();
}
QMediaControl *DirectShowPlayerService::requestControl(const char *name)
diff --git a/src/plugins/pulseaudio/qaudioinput_pulse.cpp b/src/plugins/pulseaudio/qaudioinput_pulse.cpp
index 83075beda..89dc08612 100644
--- a/src/plugins/pulseaudio/qaudioinput_pulse.cpp
+++ b/src/plugins/pulseaudio/qaudioinput_pulse.cpp
@@ -1,6 +1,6 @@
/****************************************************************************
**
-** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
+** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
@@ -54,10 +54,6 @@ QT_BEGIN_NAMESPACE
const int PeriodTimeMs = 50;
-// Map from void* (for userdata) to QPulseAudioInput instance
-// protected by pulse mainloop lock
-QMap<void *, QPulseAudioInput*> QPulseAudioInput::s_inputsMap;
-
static void inputStreamReadCallback(pa_stream *stream, size_t length, void *userdata)
{
Q_UNUSED(userdata);
@@ -136,8 +132,8 @@ void QPulseAudioInput::sourceInfoCallback(pa_context *context, const pa_source_i
Q_UNUSED(eol);
Q_ASSERT(userdata);
- QPulseAudioInput *that = QPulseAudioInput::s_inputsMap.value(userdata);
- if (that && i) {
+ if (i) {
+ QPulseAudioInput *that = reinterpret_cast<QPulseAudioInput*>(userdata);
that->m_volume = pa_sw_volume_to_linear(pa_cvolume_avg(&i->volume));
}
}
@@ -149,12 +145,11 @@ void QPulseAudioInput::inputVolumeCallback(pa_context *context, int success, voi
if (!success)
qWarning() << "QAudioInput: failed to set input volume";
- QPulseAudioInput *that = QPulseAudioInput::s_inputsMap.value(userdata);
+ QPulseAudioInput *that = reinterpret_cast<QPulseAudioInput*>(userdata);
// Regardless of success or failure, we update the volume property
- if (that && that->m_stream) {
+ if (that->m_stream)
pa_context_get_source_info_by_index(context, pa_stream_get_device_index(that->m_stream), sourceInfoCallback, userdata);
- }
}
QPulseAudioInput::QPulseAudioInput(const QByteArray &device)
@@ -175,31 +170,39 @@ QPulseAudioInput::QPulseAudioInput(const QByteArray &device)
{
m_timer = new QTimer(this);
connect(m_timer, SIGNAL(timeout()), SLOT(userFeed()));
-
- QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
- pa_threaded_mainloop_lock(pulseEngine->mainloop());
- s_inputsMap.insert(this, this);
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
}
QPulseAudioInput::~QPulseAudioInput()
{
- QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
- pa_threaded_mainloop_lock(pulseEngine->mainloop());
- s_inputsMap.remove(this);
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
-
close();
disconnect(m_timer, SIGNAL(timeout()));
QCoreApplication::processEvents();
delete m_timer;
}
+void QPulseAudioInput::setError(QAudio::Error error)
+{
+ if (m_errorState == error)
+ return;
+
+ m_errorState = error;
+ emit errorChanged(error);
+}
+
QAudio::Error QPulseAudioInput::error() const
{
return m_errorState;
}
+void QPulseAudioInput::setState(QAudio::State state)
+{
+ if (m_deviceState == state)
+ return;
+
+ m_deviceState = state;
+ emit stateChanged(state);
+}
+
QAudio::State QPulseAudioInput::state() const
{
return m_deviceState;
@@ -218,41 +221,45 @@ QAudioFormat QPulseAudioInput::format() const
void QPulseAudioInput::start(QIODevice *device)
{
- if (m_deviceState != QAudio::StoppedState)
- close();
+ setState(QAudio::StoppedState);
+ setError(QAudio::NoError);
- if (!m_pullMode && m_audioSource)
+ if (!m_pullMode && m_audioSource) {
delete m_audioSource;
+ m_audioSource = 0;
+ }
- m_pullMode = true;
- m_audioSource = device;
-
- m_deviceState = QAudio::ActiveState;
+ close();
if (!open())
return;
- emit stateChanged(m_deviceState);
+ m_pullMode = true;
+ m_audioSource = device;
+
+ setState(QAudio::ActiveState);
}
QIODevice *QPulseAudioInput::start()
{
- if (m_deviceState != QAudio::StoppedState)
- close();
+ setState(QAudio::StoppedState);
+ setError(QAudio::NoError);
- if (!m_pullMode && m_audioSource)
+ if (!m_pullMode && m_audioSource) {
delete m_audioSource;
+ m_audioSource = 0;
+ }
+
+ close();
+
+ if (!open())
+ return Q_NULLPTR;
m_pullMode = false;
m_audioSource = new InputPrivate(this);
m_audioSource->open(QIODevice::ReadOnly | QIODevice::Unbuffered);
- m_deviceState = QAudio::IdleState;
-
- if (!open())
- return 0;
-
- emit stateChanged(m_deviceState);
+ setState(QAudio::IdleState);
return m_audioSource;
}
@@ -262,56 +269,57 @@ void QPulseAudioInput::stop()
if (m_deviceState == QAudio::StoppedState)
return;
- m_errorState = QAudio::NoError;
- m_deviceState = QAudio::StoppedState;
-
close();
- emit stateChanged(m_deviceState);
+
+ setError(QAudio::NoError);
+ setState(QAudio::StoppedState);
}
bool QPulseAudioInput::open()
{
if (m_opened)
- return false;
+ return true;
-#ifdef DEBUG_PULSE
-// QTime now(QTime::currentTime());
-// qDebug()<<now.second()<<"s "<<now.msec()<<"ms :open()";
-#endif
- m_clockStamp.restart();
- m_timeStamp.restart();
- m_elapsedTimeOffset = 0;
+ QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
- if (m_streamName.isNull())
- m_streamName = QString(QLatin1String("QtmPulseStream-%1-%2")).arg(::getpid()).arg(quintptr(this)).toUtf8();
+ if (!pulseEngine->context() || pa_context_get_state(pulseEngine->context()) != PA_CONTEXT_READY) {
+ setError(QAudio::FatalError);
+ setState(QAudio::StoppedState);
+ return false;
+ }
pa_sample_spec spec = QPulseAudioInternal::audioFormatToSampleSpec(m_format);
if (!pa_sample_spec_valid(&spec)) {
- m_errorState = QAudio::OpenError;
- m_deviceState = QAudio::StoppedState;
- emit stateChanged(m_deviceState);
+ setError(QAudio::OpenError);
+ setState(QAudio::StoppedState);
return false;
}
m_spec = spec;
#ifdef DEBUG_PULSE
+// QTime now(QTime::currentTime());
+// qDebug()<<now.second()<<"s "<<now.msec()<<"ms :open()";
+#endif
+
+ if (m_streamName.isNull())
+ m_streamName = QString(QLatin1String("QtmPulseStream-%1-%2")).arg(::getpid()).arg(quintptr(this)).toUtf8();
+
+#ifdef DEBUG_PULSE
qDebug() << "Format: " << QPulseAudioInternal::sampleFormatToQString(spec.format);
qDebug() << "Rate: " << spec.rate;
qDebug() << "Channels: " << spec.channels;
qDebug() << "Frame size: " << pa_frame_size(&spec);
#endif
- QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
- pa_threaded_mainloop_lock(pulseEngine->mainloop());
+ pulseEngine->lock();
pa_channel_map channel_map;
pa_channel_map_init_extend(&channel_map, spec.channels, PA_CHANNEL_MAP_DEFAULT);
- if (!pa_channel_map_compatible(&channel_map, &spec)) {
+ if (!pa_channel_map_compatible(&channel_map, &spec))
qWarning() << "Channel map doesn't match sample specification!";
- }
m_stream = pa_stream_new(pulseEngine->context(), m_streamName.constData(), &spec, &channel_map);
@@ -338,13 +346,16 @@ bool QPulseAudioInput::open()
if (pa_stream_connect_record(m_stream, m_device.data(), &buffer_attr, (pa_stream_flags_t)flags) < 0) {
qWarning() << "pa_stream_connect_record() failed!";
- m_errorState = QAudio::FatalError;
+ pa_stream_unref(m_stream);
+ m_stream = 0;
+ pulseEngine->unlock();
+ setError(QAudio::OpenError);
+ setState(QAudio::StoppedState);
return false;
}
- while (pa_stream_get_state(m_stream) != PA_STREAM_READY) {
+ while (pa_stream_get_state(m_stream) != PA_STREAM_READY)
pa_threaded_mainloop_wait(pulseEngine->mainloop());
- }
const pa_buffer_attr *actualBufferAttr = pa_stream_get_buffer_attr(m_stream);
m_periodSize = actualBufferAttr->fragsize;
@@ -354,12 +365,16 @@ bool QPulseAudioInput::open()
setPulseVolume();
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
+ pulseEngine->unlock();
+
+ connect(pulseEngine, &QPulseAudioEngine::contextFailed, this, &QPulseAudioInput::onPulseContextFailed);
m_opened = true;
m_timer->start(m_periodTime);
- m_errorState = QAudio::NoError;
+ m_clockStamp.restart();
+ m_timeStamp.restart();
+ m_elapsedTimeOffset = 0;
m_totalTimeValue = 0;
return true;
@@ -367,21 +382,30 @@ bool QPulseAudioInput::open()
void QPulseAudioInput::close()
{
+ if (!m_opened)
+ return;
+
m_timer->stop();
+ QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
+
if (m_stream) {
- QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
- pa_threaded_mainloop_lock(pulseEngine->mainloop());
+ pulseEngine->lock();
+ pa_stream_set_state_callback(m_stream, 0, 0);
pa_stream_set_read_callback(m_stream, 0, 0);
+ pa_stream_set_underflow_callback(m_stream, 0, 0);
+ pa_stream_set_overflow_callback(m_stream, 0, 0);
pa_stream_disconnect(m_stream);
pa_stream_unref(m_stream);
m_stream = 0;
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
+ pulseEngine->unlock();
}
+ disconnect(pulseEngine, &QPulseAudioEngine::contextFailed, this, &QPulseAudioInput::onPulseContextFailed);
+
if (!m_pullMode && m_audioSource) {
delete m_audioSource;
m_audioSource = 0;
@@ -393,6 +417,7 @@ void QPulseAudioInput::close()
void QPulseAudioInput::setPulseVolume()
{
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
+ Q_ASSERT(pulseEngine->context() != 0);
pa_cvolume cvolume;
@@ -434,11 +459,8 @@ qint64 QPulseAudioInput::read(char *data, qint64 len)
{
m_bytesAvailable = checkBytesReady();
- if (m_deviceState != QAudio::ActiveState) {
- m_errorState = QAudio::NoError;
- m_deviceState = QAudio::ActiveState;
- emit stateChanged(m_deviceState);
- }
+ setError(QAudio::NoError);
+ setState(QAudio::ActiveState);
int readBytes = 0;
@@ -463,7 +485,8 @@ qint64 QPulseAudioInput::read(char *data, qint64 len)
#endif
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
- pa_threaded_mainloop_lock(pulseEngine->mainloop());
+ pulseEngine->lock();
+
const void *audioBuffer;
// Second and third parameters (audioBuffer and length) to pa_stream_peek are output parameters,
@@ -471,7 +494,7 @@ qint64 QPulseAudioInput::read(char *data, qint64 len)
// and the length is set to the length of this data.
if (pa_stream_peek(m_stream, &audioBuffer, &readLength) < 0) {
qWarning() << QString("pa_stream_peek() failed: %1").arg(pa_strerror(pa_context_errno(pa_stream_get_context(m_stream))));
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
+ pulseEngine->unlock();
return 0;
}
@@ -480,11 +503,10 @@ qint64 QPulseAudioInput::read(char *data, qint64 len)
actualLength = m_audioSource->write(static_cast<const char *>(audioBuffer), readLength);
if (actualLength < qint64(readLength)) {
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
+ pulseEngine->unlock();
- m_errorState = QAudio::UnderrunError;
- m_deviceState = QAudio::IdleState;
- emit stateChanged(m_deviceState);
+ setError(QAudio::UnderrunError);
+ setState(QAudio::IdleState);
return actualLength;
}
@@ -509,7 +531,7 @@ qint64 QPulseAudioInput::read(char *data, qint64 len)
readBytes += actualLength;
pa_stream_drop(m_stream);
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
+ pulseEngine->unlock();
if (!m_pullMode && readBytes >= len)
break;
@@ -534,22 +556,18 @@ void QPulseAudioInput::resume()
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
pa_operation *operation;
- pa_threaded_mainloop_lock(pulseEngine->mainloop());
+ pulseEngine->lock();
operation = pa_stream_cork(m_stream, 0, inputStreamSuccessCallback, 0);
-
- while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
- pa_threaded_mainloop_wait(pulseEngine->mainloop());
-
+ pulseEngine->wait(operation);
pa_operation_unref(operation);
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
+ pulseEngine->unlock();
m_timer->start(m_periodTime);
- m_deviceState = QAudio::ActiveState;
-
- emit stateChanged(m_deviceState);
+ setState(QAudio::ActiveState);
+ setError(QAudio::NoError);
}
}
@@ -557,23 +575,23 @@ void QPulseAudioInput::setVolume(qreal vol)
{
if (vol >= 0.0 && vol <= 1.0) {
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
- pa_threaded_mainloop_lock(pulseEngine->mainloop());
+ pulseEngine->lock();
if (!qFuzzyCompare(m_volume, vol)) {
m_volume = vol;
if (m_opened) {
setPulseVolume();
}
}
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
+ pulseEngine->unlock();
}
}
qreal QPulseAudioInput::volume() const
{
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
- pa_threaded_mainloop_lock(pulseEngine->mainloop());
+ pulseEngine->lock();
qreal vol = m_volume;
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
+ pulseEngine->unlock();
return vol;
}
@@ -614,23 +632,21 @@ qint64 QPulseAudioInput::processedUSecs() const
void QPulseAudioInput::suspend()
{
if (m_deviceState == QAudio::ActiveState) {
+ setError(QAudio::NoError);
+ setState(QAudio::SuspendedState);
+
m_timer->stop();
- m_deviceState = QAudio::SuspendedState;
- emit stateChanged(m_deviceState);
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
pa_operation *operation;
- pa_threaded_mainloop_lock(pulseEngine->mainloop());
+ pulseEngine->lock();
operation = pa_stream_cork(m_stream, 1, inputStreamSuccessCallback, 0);
-
- while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
- pa_threaded_mainloop_wait(pulseEngine->mainloop());
-
+ pulseEngine->wait(operation);
pa_operation_unref(operation);
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
+ pulseEngine->unlock();
}
}
@@ -685,6 +701,14 @@ void QPulseAudioInput::reset()
m_bytesAvailable = 0;
}
+void QPulseAudioInput::onPulseContextFailed()
+{
+ close();
+
+ setError(QAudio::FatalError);
+ setState(QAudio::StoppedState);
+}
+
InputPrivate::InputPrivate(QPulseAudioInput *audio)
{
m_audioDevice = qobject_cast<QPulseAudioInput*>(audio);
diff --git a/src/plugins/pulseaudio/qaudioinput_pulse.h b/src/plugins/pulseaudio/qaudioinput_pulse.h
index bb72628f8..e2c46c8e6 100644
--- a/src/plugins/pulseaudio/qaudioinput_pulse.h
+++ b/src/plugins/pulseaudio/qaudioinput_pulse.h
@@ -1,6 +1,6 @@
/****************************************************************************
**
-** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
+** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
@@ -112,8 +112,12 @@ public:
private slots:
void userFeed();
bool deviceReady();
+ void onPulseContextFailed();
private:
+ void setState(QAudio::State state);
+ void setError(QAudio::Error error);
+
int checkBytesReady();
bool open();
void close();
diff --git a/src/plugins/pulseaudio/qaudiooutput_pulse.cpp b/src/plugins/pulseaudio/qaudiooutput_pulse.cpp
index 2b482ba40..64a080663 100644
--- a/src/plugins/pulseaudio/qaudiooutput_pulse.cpp
+++ b/src/plugins/pulseaudio/qaudiooutput_pulse.cpp
@@ -1,6 +1,6 @@
/****************************************************************************
**
-** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
+** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
@@ -170,11 +170,29 @@ QPulseAudioOutput::~QPulseAudioOutput()
QCoreApplication::processEvents();
}
+void QPulseAudioOutput::setError(QAudio::Error error)
+{
+ if (m_errorState == error)
+ return;
+
+ m_errorState = error;
+ emit errorChanged(error);
+}
+
QAudio::Error QPulseAudioOutput::error() const
{
return m_errorState;
}
+void QPulseAudioOutput::setState(QAudio::State state)
+{
+ if (m_deviceState == state)
+ return;
+
+ m_deviceState = state;
+ emit stateChanged(state);
+}
+
QAudio::State QPulseAudioOutput::state() const
{
return m_deviceState;
@@ -183,19 +201,15 @@ QAudio::State QPulseAudioOutput::state() const
void QPulseAudioOutput::streamUnderflowCallback()
{
if (m_deviceState != QAudio::IdleState && !m_resuming) {
- m_errorState = QAudio::UnderrunError;
- emit errorChanged(m_errorState);
- m_deviceState = QAudio::IdleState;
- emit stateChanged(m_deviceState);
+ setError(QAudio::UnderrunError);
+ setState(QAudio::IdleState);
}
}
void QPulseAudioOutput::start(QIODevice *device)
{
- if (m_deviceState != QAudio::StoppedState)
- m_deviceState = QAudio::StoppedState;
-
- m_errorState = QAudio::NoError;
+ setState(QAudio::StoppedState);
+ setError(QAudio::NoError);
// Handle change of mode
if (m_audioSource && !m_pullMode) {
@@ -205,22 +219,19 @@ void QPulseAudioOutput::start(QIODevice *device)
close();
+ if (!open())
+ return;
+
m_pullMode = true;
m_audioSource = device;
- m_deviceState = QAudio::ActiveState;
-
- open();
-
- emit stateChanged(m_deviceState);
+ setState(QAudio::ActiveState);
}
QIODevice *QPulseAudioOutput::start()
{
- if (m_deviceState != QAudio::StoppedState)
- m_deviceState = QAudio::StoppedState;
-
- m_errorState = QAudio::NoError;
+ setState(QAudio::StoppedState);
+ setError(QAudio::NoError);
// Handle change of mode
if (m_audioSource && !m_pullMode) {
@@ -230,15 +241,14 @@ QIODevice *QPulseAudioOutput::start()
close();
+ if (!open())
+ return Q_NULLPTR;
+
m_audioSource = new OutputPrivate(this);
m_audioSource->open(QIODevice::WriteOnly|QIODevice::Unbuffered);
m_pullMode = false;
- m_deviceState = QAudio::IdleState;
-
- open();
-
- emit stateChanged(m_deviceState);
+ setState(QAudio::IdleState);
return m_audioSource;
}
@@ -246,33 +256,38 @@ QIODevice *QPulseAudioOutput::start()
bool QPulseAudioOutput::open()
{
if (m_opened)
+ return true;
+
+ QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
+
+ if (!pulseEngine->context() || pa_context_get_state(pulseEngine->context()) != PA_CONTEXT_READY) {
+ setError(QAudio::FatalError);
+ setState(QAudio::StoppedState);
return false;
+ }
pa_sample_spec spec = QPulseAudioInternal::audioFormatToSampleSpec(m_format);
if (!pa_sample_spec_valid(&spec)) {
- m_errorState = QAudio::OpenError;
- m_deviceState = QAudio::StoppedState;
+ setError(QAudio::OpenError);
+ setState(QAudio::StoppedState);
return false;
}
m_spec = spec;
m_totalTimeValue = 0;
- m_elapsedTimeOffset = 0;
- m_timeStamp.restart();
if (m_streamName.isNull())
m_streamName = QString(QLatin1String("QtmPulseStream-%1-%2")).arg(::getpid()).arg(quintptr(this)).toUtf8();
#ifdef DEBUG_PULSE
- qDebug() << "Format: " << QPulseAudioInternal::sampleFormatToQString(spec.format);
- qDebug() << "Rate: " << spec.rate;
- qDebug() << "Channels: " << spec.channels;
- qDebug() << "Frame size: " << pa_frame_size(&spec);
+ qDebug() << "Format: " << QPulseAudioInternal::sampleFormatToQString(spec.format);
+ qDebug() << "Rate: " << spec.rate;
+ qDebug() << "Channels: " << spec.channels;
+ qDebug() << "Frame size: " << pa_frame_size(&spec);
#endif
- QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
- pa_threaded_mainloop_lock(pulseEngine->mainloop());
+ pulseEngine->lock();
qint64 bytesPerSecond = m_format.sampleRate() * m_format.channelCount() * m_format.sampleSize() / 8;
@@ -280,7 +295,7 @@ bool QPulseAudioOutput::open()
if (!m_category.isNull())
pa_proplist_sets(propList, PA_PROP_MEDIA_ROLE, m_category.toLatin1().constData());
- m_stream = pa_stream_new_with_proplist(pulseEngine->context(), m_streamName.constData(), &spec, 0, propList);
+ m_stream = pa_stream_new_with_proplist(pulseEngine->context(), m_streamName.constData(), &m_spec, 0, propList);
pa_proplist_free(propList);
pa_stream_set_state_callback(m_stream, outputStreamStateCallback, this);
@@ -312,15 +327,20 @@ bool QPulseAudioOutput::open()
if (pa_stream_connect_playback(m_stream, m_device.data(), (m_bufferSize > 0) ? &requestedBuffer : NULL, (pa_stream_flags_t)0, &m_chVolume, NULL) < 0) {
qWarning() << "pa_stream_connect_playback() failed!";
+ pa_stream_unref(m_stream);
+ m_stream = 0;
+ pulseEngine->unlock();
+ setError(QAudio::OpenError);
+ setState(QAudio::StoppedState);
return false;
}
- while (pa_stream_get_state(m_stream) != PA_STREAM_READY) {
+ while (pa_stream_get_state(m_stream) != PA_STREAM_READY)
pa_threaded_mainloop_wait(pulseEngine->mainloop());
- }
+
const pa_buffer_attr *buffer = pa_stream_get_buffer_attr(m_stream);
m_periodTime = (m_category == LOW_LATENCY_CATEGORY_NAME) ? LowLatencyPeriodTimeMs : PeriodTimeMs;
- m_periodSize = pa_usec_to_bytes(m_periodTime*1000, &spec);
+ m_periodSize = pa_usec_to_bytes(m_periodTime*1000, &m_spec);
m_bufferSize = buffer->tlength;
m_maxBufferSize = buffer->maxlength;
m_audioBuffer = new char[m_maxBufferSize];
@@ -333,9 +353,12 @@ bool QPulseAudioOutput::open()
qDebug() << "\tFragment size: " << buffer->fragsize;
#endif
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
+ pulseEngine->unlock();
+
+ connect(pulseEngine, &QPulseAudioEngine::contextFailed, this, &QPulseAudioOutput::onPulseContextFailed);
m_opened = true;
+
m_tickTimer->start(m_periodTime);
m_elapsedTimeOffset = 0;
@@ -347,28 +370,35 @@ bool QPulseAudioOutput::open()
void QPulseAudioOutput::close()
{
+ if (!m_opened)
+ return;
+
m_tickTimer->stop();
+ QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
+
if (m_stream) {
- QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
- pa_threaded_mainloop_lock(pulseEngine->mainloop());
+ pulseEngine->lock();
- pa_stream_set_write_callback(m_stream, NULL, NULL);
+ pa_stream_set_state_callback(m_stream, 0, 0);
+ pa_stream_set_write_callback(m_stream, 0, 0);
+ pa_stream_set_underflow_callback(m_stream, 0, 0);
+ pa_stream_set_overflow_callback(m_stream, 0, 0);
+ pa_stream_set_latency_update_callback(m_stream, 0, 0);
pa_operation *o = pa_stream_drain(m_stream, outputStreamDrainComplete, NULL);
- if (!o) {
- qWarning() << QString("pa_stream_drain(): %1").arg(pa_strerror(pa_context_errno(pa_stream_get_context(m_stream))));
- } else {
+ if (o)
pa_operation_unref(o);
- }
pa_stream_disconnect(m_stream);
pa_stream_unref(m_stream);
m_stream = NULL;
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
+ pulseEngine->unlock();
}
+ disconnect(pulseEngine, &QPulseAudioEngine::contextFailed, this, &QPulseAudioOutput::onPulseContextFailed);
+
if (!m_pullMode && m_audioSource) {
delete m_audioSource;
m_audioSource = 0;
@@ -430,17 +460,14 @@ qint64 QPulseAudioOutput::write(const char *data, qint64 len)
{
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
- pa_threaded_mainloop_lock(pulseEngine->mainloop());
+ pulseEngine->lock();
len = qMin(len, static_cast<qint64>(pa_stream_writable_size(m_stream)));
pa_stream_write(m_stream, data, len, 0, 0, PA_SEEK_RELATIVE);
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
+ pulseEngine->unlock();
m_totalTimeValue += len;
- m_errorState = QAudio::NoError;
- if (m_deviceState != QAudio::ActiveState) {
- m_deviceState = QAudio::ActiveState;
- emit stateChanged(m_deviceState);
- }
+ setError(QAudio::NoError);
+ setState(QAudio::ActiveState);
return len;
}
@@ -450,10 +477,10 @@ void QPulseAudioOutput::stop()
if (m_deviceState == QAudio::StoppedState)
return;
- m_errorState = QAudio::NoError;
- m_deviceState = QAudio::StoppedState;
close();
- emit stateChanged(m_deviceState);
+
+ setError(QAudio::NoError);
+ setState(QAudio::StoppedState);
}
int QPulseAudioOutput::bytesFree() const
@@ -462,9 +489,9 @@ int QPulseAudioOutput::bytesFree() const
return 0;
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
- pa_threaded_mainloop_lock(pulseEngine->mainloop());
+ pulseEngine->lock();
int writableSize = pa_stream_writable_size(m_stream);
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
+ pulseEngine->unlock();
return writableSize;
}
@@ -509,30 +536,22 @@ void QPulseAudioOutput::resume()
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
- pa_threaded_mainloop_lock(pulseEngine->mainloop());
+ pulseEngine->lock();
pa_operation *operation = pa_stream_cork(m_stream, 0, outputStreamSuccessCallback, NULL);
-
- while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
- pa_threaded_mainloop_wait(pulseEngine->mainloop());
-
+ pulseEngine->wait(operation);
pa_operation_unref(operation);
operation = pa_stream_trigger(m_stream, outputStreamSuccessCallback, NULL);
-
- while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
- pa_threaded_mainloop_wait(pulseEngine->mainloop());
-
+ pulseEngine->wait(operation);
pa_operation_unref(operation);
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
+ pulseEngine->unlock();
- m_deviceState = QAudio::ActiveState;
-
- m_errorState = QAudio::NoError;
m_tickTimer->start(m_periodTime);
- emit stateChanged(m_deviceState);
+ setState(QAudio::ActiveState);
+ setError(QAudio::NoError);
}
}
@@ -549,24 +568,21 @@ QAudioFormat QPulseAudioOutput::format() const
void QPulseAudioOutput::suspend()
{
if (m_deviceState == QAudio::ActiveState || m_deviceState == QAudio::IdleState) {
+ setError(QAudio::NoError);
+ setState(QAudio::SuspendedState);
+
m_tickTimer->stop();
- m_deviceState = QAudio::SuspendedState;
- m_errorState = QAudio::NoError;
- emit stateChanged(m_deviceState);
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
pa_operation *operation;
- pa_threaded_mainloop_lock(pulseEngine->mainloop());
+ pulseEngine->lock();
operation = pa_stream_cork(m_stream, 1, outputStreamSuccessCallback, NULL);
-
- while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
- pa_threaded_mainloop_wait(pulseEngine->mainloop());
-
+ pulseEngine->wait(operation);
pa_operation_unref(operation);
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
+ pulseEngine->unlock();
}
}
@@ -601,8 +617,8 @@ qint64 OutputPrivate::writeData(const char *data, qint64 len)
int retry = 0;
qint64 written = 0;
- if ((m_audioDevice->m_deviceState == QAudio::ActiveState)
- ||(m_audioDevice->m_deviceState == QAudio::IdleState)) {
+ if ((m_audioDevice->m_deviceState == QAudio::ActiveState
+ || m_audioDevice->m_deviceState == QAudio::IdleState)) {
while(written < len) {
int chunk = m_audioDevice->write(data+written, (len-written));
if (chunk <= 0)
@@ -623,7 +639,7 @@ void QPulseAudioOutput::setVolume(qreal vol)
m_volume = vol;
if (m_opened) {
QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();
- pa_threaded_mainloop_lock(pulseEngine->mainloop());
+ pulseEngine->lock();
pa_volume_t paVolume;
if (qFuzzyCompare(vol, 0.0)) {
pa_cvolume_mute(&m_chVolume, m_spec.channels);
@@ -641,7 +657,7 @@ void QPulseAudioOutput::setVolume(qreal vol)
qWarning()<<"QAudioOutput: Failed to set volume";
else
pa_operation_unref(op);
- pa_threaded_mainloop_unlock(pulseEngine->mainloop());
+ pulseEngine->unlock();
}
}
}
@@ -664,6 +680,14 @@ QString QPulseAudioOutput::category() const
return m_category;
}
+void QPulseAudioOutput::onPulseContextFailed()
+{
+ close();
+
+ setError(QAudio::FatalError);
+ setState(QAudio::StoppedState);
+}
+
QT_END_NAMESPACE
#include "moc_qaudiooutput_pulse.cpp"
diff --git a/src/plugins/pulseaudio/qaudiooutput_pulse.h b/src/plugins/pulseaudio/qaudiooutput_pulse.h
index 5954b8975..fea151ba2 100644
--- a/src/plugins/pulseaudio/qaudiooutput_pulse.h
+++ b/src/plugins/pulseaudio/qaudiooutput_pulse.h
@@ -1,6 +1,6 @@
/****************************************************************************
**
-** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
+** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
@@ -105,12 +105,16 @@ public:
void streamUnderflowCallback();
private:
+ void setState(QAudio::State state);
+ void setError(QAudio::Error error);
+
bool open();
void close();
qint64 write(const char *data, qint64 len);
private Q_SLOTS:
void userFeed();
+ void onPulseContextFailed();
private:
QByteArray m_device;
diff --git a/src/plugins/pulseaudio/qpulseaudioengine.cpp b/src/plugins/pulseaudio/qpulseaudioengine.cpp
index b7a3e66b7..05c8be89e 100644
--- a/src/plugins/pulseaudio/qpulseaudioengine.cpp
+++ b/src/plugins/pulseaudio/qpulseaudioengine.cpp
@@ -1,6 +1,6 @@
/****************************************************************************
**
-** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
+** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
@@ -170,15 +170,17 @@ static void contextStateCallbackInit(pa_context *context, void *userdata)
pa_threaded_mainloop_signal(pulseEngine->mainloop(), 0);
}
-static void contextStateCallback(pa_context *context, void *userdata)
+static void contextStateCallback(pa_context *c, void *userdata)
{
- Q_UNUSED(userdata);
- Q_UNUSED(context);
+ QPulseAudioEngine *self = reinterpret_cast<QPulseAudioEngine*>(userdata);
+ pa_context_state_t state = pa_context_get_state(c);
#ifdef DEBUG_PULSE
- pa_context_state_t state = pa_context_get_state(context);
qDebug() << QPulseAudioInternal::stateToQString(state);
#endif
+
+ if (state == PA_CONTEXT_FAILED)
+ QMetaObject::invokeMethod(self, "onContextFailed", Qt::QueuedConnection);
}
Q_GLOBAL_STATIC(QPulseAudioEngine, pulseEngine);
@@ -187,40 +189,59 @@ QPulseAudioEngine::QPulseAudioEngine(QObject *parent)
: QObject(parent)
, m_mainLoopApi(0)
, m_context(0)
+ , m_prepared(false)
+{
+ prepare();
+}
+
+QPulseAudioEngine::~QPulseAudioEngine()
+{
+ if (m_prepared)
+ release();
+}
+void QPulseAudioEngine::prepare()
{
bool keepGoing = true;
bool ok = true;
m_mainLoop = pa_threaded_mainloop_new();
if (m_mainLoop == 0) {
- qWarning("Unable to create pulseaudio mainloop");
+ qWarning("PulseAudioService: unable to create pulseaudio mainloop");
return;
}
if (pa_threaded_mainloop_start(m_mainLoop) != 0) {
- qWarning("Unable to start pulseaudio mainloop");
+ qWarning("PulseAudioService: unable to start pulseaudio mainloop");
pa_threaded_mainloop_free(m_mainLoop);
+ m_mainLoop = 0;
return;
}
m_mainLoopApi = pa_threaded_mainloop_get_api(m_mainLoop);
- pa_threaded_mainloop_lock(m_mainLoop);
+ lock();
- m_context = pa_context_new(m_mainLoopApi, QString(QLatin1String("QtmPulseContext:%1")).arg(::getpid()).toLatin1().constData());
- pa_context_set_state_callback(m_context, contextStateCallbackInit, this);
+ m_context = pa_context_new(m_mainLoopApi, QString(QLatin1String("QtPulseAudio:%1")).arg(::getpid()).toLatin1().constData());
- if (!m_context) {
- qWarning("Unable to create new pulseaudio context");
+ if (m_context == 0) {
+ qWarning("PulseAudioService: Unable to create new pulseaudio context");
+ pa_threaded_mainloop_unlock(m_mainLoop);
pa_threaded_mainloop_free(m_mainLoop);
+ m_mainLoop = 0;
+ onContextFailed();
return;
}
- if (pa_context_connect(m_context, NULL, (pa_context_flags_t)0, NULL) < 0) {
- qWarning("Unable to create a connection to the pulseaudio context");
+ pa_context_set_state_callback(m_context, contextStateCallbackInit, this);
+
+ if (pa_context_connect(m_context, 0, (pa_context_flags_t)0, 0) < 0) {
+ qWarning("PulseAudioService: pa_context_connect() failed");
pa_context_unref(m_context);
+ pa_threaded_mainloop_unlock(m_mainLoop);
pa_threaded_mainloop_free(m_mainLoop);
+ m_mainLoop = 0;
+ m_context = 0;
return;
}
@@ -241,47 +262,49 @@ QPulseAudioEngine::QPulseAudioEngine(QObject *parent)
break;
case PA_CONTEXT_TERMINATED:
- qCritical("Context terminated.");
+ qCritical("PulseAudioService: Context terminated.");
keepGoing = false;
ok = false;
break;
case PA_CONTEXT_FAILED:
default:
- qCritical() << QString("Connection failure: %1").arg(pa_strerror(pa_context_errno(m_context)));
+ qCritical() << QString("PulseAudioService: Connection failure: %1").arg(pa_strerror(pa_context_errno(m_context)));
keepGoing = false;
ok = false;
}
- if (keepGoing) {
+ if (keepGoing)
pa_threaded_mainloop_wait(m_mainLoop);
- }
}
if (ok) {
pa_context_set_state_callback(m_context, contextStateCallback, this);
} else {
- if (m_context) {
- pa_context_unref(m_context);
- m_context = 0;
- }
+ pa_context_unref(m_context);
+ m_context = 0;
}
- pa_threaded_mainloop_unlock(m_mainLoop);
+ unlock();
if (ok) {
- serverInfo();
- sinks();
- sources();
+ updateDevices();
+ m_prepared = true;
+ } else {
+ pa_threaded_mainloop_free(m_mainLoop);
+ m_mainLoop = 0;
+ onContextFailed();
}
}
-QPulseAudioEngine::~QPulseAudioEngine()
+void QPulseAudioEngine::release()
{
+ if (!m_prepared)
+ return;
+
if (m_context) {
- pa_threaded_mainloop_lock(m_mainLoop);
pa_context_disconnect(m_context);
- pa_threaded_mainloop_unlock(m_mainLoop);
+ pa_context_unref(m_context);
m_context = 0;
}
@@ -290,62 +313,52 @@ QPulseAudioEngine::~QPulseAudioEngine()
pa_threaded_mainloop_free(m_mainLoop);
m_mainLoop = 0;
}
+
+ m_prepared = false;
}
-void QPulseAudioEngine::serverInfo()
+void QPulseAudioEngine::updateDevices()
{
- pa_operation *operation;
-
- pa_threaded_mainloop_lock(m_mainLoop);
-
- operation = pa_context_get_server_info(m_context, serverInfoCallback, this);
+ lock();
+ // Get default input and output devices
+ pa_operation *operation = pa_context_get_server_info(m_context, serverInfoCallback, this);
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
pa_threaded_mainloop_wait(m_mainLoop);
-
pa_operation_unref(operation);
- pa_threaded_mainloop_unlock(m_mainLoop);
-}
-
-void QPulseAudioEngine::sinks()
-{
- pa_operation *operation;
-
- pa_threaded_mainloop_lock(m_mainLoop);
-
+ // Get output devices
operation = pa_context_get_sink_info_list(m_context, sinkInfoCallback, this);
-
while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
pa_threaded_mainloop_wait(m_mainLoop);
+ pa_operation_unref(operation);
+ // Get input devices
+ operation = pa_context_get_source_info_list(m_context, sourceInfoCallback, this);
+ while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
+ pa_threaded_mainloop_wait(m_mainLoop);
pa_operation_unref(operation);
- pa_threaded_mainloop_unlock(m_mainLoop);
+ unlock();
- // Swap the default sink to index 0
+ // Swap the default output to index 0
m_sinks.removeOne(m_defaultSink);
m_sinks.prepend(m_defaultSink);
+
+ // Swap the default input to index 0
+ m_sources.removeOne(m_defaultSource);
+ m_sources.prepend(m_defaultSource);
}
-void QPulseAudioEngine::sources()
+void QPulseAudioEngine::onContextFailed()
{
- pa_operation *operation;
-
- pa_threaded_mainloop_lock(m_mainLoop);
-
- operation = pa_context_get_source_info_list(m_context, sourceInfoCallback, this);
+ // Give a chance to the connected slots to still use the Pulse main loop before releasing it.
+ emit contextFailed();
- while (pa_operation_get_state(operation) == PA_OPERATION_RUNNING)
- pa_threaded_mainloop_wait(m_mainLoop);
-
- pa_operation_unref(operation);
+ release();
- pa_threaded_mainloop_unlock(m_mainLoop);
-
- // Swap the default source to index 0
- m_sources.removeOne(m_defaultSource);
- m_sources.prepend(m_defaultSource);
+ // Try to reconnect later
+ QTimer::singleShot(3000, this, SLOT(prepare()));
}
QPulseAudioEngine *QPulseAudioEngine::instance()
diff --git a/src/plugins/pulseaudio/qpulseaudioengine.h b/src/plugins/pulseaudio/qpulseaudioengine.h
index 1c6c2aaf4..04591d035 100644
--- a/src/plugins/pulseaudio/qpulseaudioengine.h
+++ b/src/plugins/pulseaudio/qpulseaudioengine.h
@@ -1,6 +1,6 @@
/****************************************************************************
**
-** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
+** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Toolkit.
@@ -74,12 +74,36 @@ public:
pa_threaded_mainloop *mainloop() { return m_mainLoop; }
pa_context *context() { return m_context; }
+ inline void lock()
+ {
+ if (m_mainLoop)
+ pa_threaded_mainloop_lock(m_mainLoop);
+ }
+
+ inline void unlock()
+ {
+ if (m_mainLoop)
+ pa_threaded_mainloop_unlock(m_mainLoop);
+ }
+
+ inline void wait(pa_operation *op)
+ {
+ while (m_mainLoop && pa_operation_get_state(op) == PA_OPERATION_RUNNING)
+ pa_threaded_mainloop_wait(m_mainLoop);
+ }
+
QList<QByteArray> availableDevices(QAudio::Mode mode) const;
+Q_SIGNALS:
+ void contextFailed();
+
+private Q_SLOTS:
+ void prepare();
+ void onContextFailed();
+
private:
- void serverInfo();
- void sinks();
- void sources();
+ void updateDevices();
+ void release();
public:
QList<QByteArray> m_sinks;
@@ -93,6 +117,7 @@ private:
pa_mainloop_api *m_mainLoopApi;
pa_threaded_mainloop *m_mainLoop;
pa_context *m_context;
+ bool m_prepared;
};
QT_END_NAMESPACE