summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorYoann Lopes <yoann.lopes@digia.com>2014-03-26 19:15:39 +0100
committerThe Qt Project <gerrit-noreply@qt-project.org>2014-03-26 19:15:39 +0100
commit609d5c681b1ec3f6a504e2fdd5680115b54e37f5 (patch)
treedeae69abe3ac8e7cc42259d37735e509aa03112a
parentc5c3ce6f9bcdb560e30438ffca5647580f2f00c1 (diff)
parent2618b6c77ddf7fd8e7a54dd80891b724c8fc035f (diff)
Merge "Merge remote-tracking branch 'origin/stable' into dev" into refs/staging/dev
-rw-r--r--examples/multimedia/audiorecorder/audiorecorder.cpp1
-rw-r--r--examples/multimedia/declarative-camera/PhotoCaptureControls.qml1
-rw-r--r--examples/multimedia/declarative-camera/ZoomControl.qml39
-rw-r--r--src/gsttools/qgstappsrc.cpp4
-rw-r--r--src/gsttools/qgstcodecsinfo.cpp6
-rw-r--r--src/gsttools/qgstreamergltexturerenderer.cpp1
-rw-r--r--src/gsttools/qgstreamervideorenderer.cpp4
-rw-r--r--src/gsttools/qgstreamervideowidget.cpp4
-rw-r--r--src/gsttools/qgstreamervideowindow.cpp6
-rw-r--r--src/gsttools/qgstutils.cpp18
-rw-r--r--src/imports/audioengine/qdeclarative_attenuationmodel_p.cpp14
-rw-r--r--src/imports/audioengine/qdeclarative_audiocategory_p.cpp14
-rw-r--r--src/imports/audioengine/qdeclarative_audioengine_p.cpp24
-rw-r--r--src/imports/audioengine/qdeclarative_audiolistener_p.cpp38
-rw-r--r--src/imports/audioengine/qdeclarative_audiosample_p.cpp25
-rw-r--r--src/imports/audioengine/qdeclarative_playvariation_p.cpp12
-rw-r--r--src/imports/audioengine/qdeclarative_sound_p.cpp36
-rw-r--r--src/imports/audioengine/qdeclarative_soundinstance_p.cpp64
-rw-r--r--src/imports/multimedia/Video.qml6
-rw-r--r--src/imports/multimedia/qdeclarativeaudio.cpp46
-rw-r--r--src/imports/multimedia/qdeclarativecamera.cpp40
-rw-r--r--src/imports/multimedia/qdeclarativecameracapture.cpp34
-rw-r--r--src/imports/multimedia/qdeclarativecameraexposure.cpp14
-rw-r--r--src/imports/multimedia/qdeclarativecameraexposure_p.h39
-rw-r--r--src/imports/multimedia/qdeclarativecameraflash.cpp10
-rw-r--r--src/imports/multimedia/qdeclarativecameraflash_p.h23
-rw-r--r--src/imports/multimedia/qdeclarativecamerafocus.cpp16
-rw-r--r--src/imports/multimedia/qdeclarativecamerafocus_p.h39
-rw-r--r--src/imports/multimedia/qdeclarativecameraimageprocessing.cpp4
-rw-r--r--src/imports/multimedia/qdeclarativeradio.cpp2
-rw-r--r--src/multimedia/camera/qcamerainfo.cpp2
-rw-r--r--src/multimedia/gsttools_headers/qgstutils_p.h2
-rw-r--r--src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtAndroidMediaPlayer.java309
-rw-r--r--src/plugins/android/src/common/qandroidvideooutput.h1
-rw-r--r--src/plugins/android/src/common/qandroidvideorendercontrol.cpp39
-rw-r--r--src/plugins/android/src/common/qandroidvideorendercontrol.h2
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcapturesession.cpp110
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcapturesession.h5
-rw-r--r--src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp397
-rw-r--r--src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.h11
-rw-r--r--src/plugins/android/src/wrappers/jcamera.cpp8
-rw-r--r--src/plugins/android/src/wrappers/jmediaplayer.cpp146
-rw-r--r--src/plugins/android/src/wrappers/jmediaplayer.h46
-rw-r--r--src/plugins/android/src/wrappers/jmediarecorder.cpp66
-rw-r--r--src/plugins/avfoundation/camera/avfaudioinputselectorcontrol.mm5
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfmediaplayerservice.mm8
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm4
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideowindowcontrol.h120
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideowindowcontrol.mm246
-rw-r--r--src/plugins/avfoundation/mediaplayer/mediaplayer.pro6
-rw-r--r--src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp1
-rw-r--r--src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.cpp5
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinaudioencoder.cpp14
-rw-r--r--src/plugins/gstreamer/camerabin/camerabincontainer.cpp14
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinrecorder.cpp12
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinsession.cpp22
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinvideoencoder.cpp2
-rw-r--r--src/plugins/gstreamer/mediacapture/qgstreameraudioencode.cpp2
-rw-r--r--src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp1
-rw-r--r--src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp39
-rw-r--r--src/plugins/gstreamer/mediacapture/qgstreamervideoencode.cpp2
-rw-r--r--src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.cpp20
-rw-r--r--src/plugins/opensles/qopenslesengine.cpp14
-rw-r--r--src/plugins/opensles/qopenslesengine.h1
-rw-r--r--src/plugins/qnx/mediaplayer/bpsmediaplayercontrol.cpp10
-rw-r--r--src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp33
-rw-r--r--src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.h3
-rw-r--r--src/plugins/qnx/mediaplayer/ppsmediaplayercontrol.cpp7
-rw-r--r--src/plugins/wmf/player/mfvideorenderercontrol.cpp49
-rw-r--r--src/qtmultimediaquicktools/qdeclarativevideooutput.cpp2
-rw-r--r--tests/auto/integration/qcamerabackend/qcamerabackend.pro1
71 files changed, 1692 insertions, 679 deletions
diff --git a/examples/multimedia/audiorecorder/audiorecorder.cpp b/examples/multimedia/audiorecorder/audiorecorder.cpp
index 18ced4d5f..c77396b0f 100644
--- a/examples/multimedia/audiorecorder/audiorecorder.cpp
+++ b/examples/multimedia/audiorecorder/audiorecorder.cpp
@@ -167,6 +167,7 @@ void AudioRecorder::updateStatus(QMediaRecorder::Status status)
statusMessage = tr("Paused");
break;
case QMediaRecorder::UnloadedStatus:
+ case QMediaRecorder::LoadedStatus:
clearAudioLevels();
ui->recordButton->setText(tr("Record"));
ui->pauseButton->setText(tr("Pause"));
diff --git a/examples/multimedia/declarative-camera/PhotoCaptureControls.qml b/examples/multimedia/declarative-camera/PhotoCaptureControls.qml
index 991a46e14..2cd39b8cb 100644
--- a/examples/multimedia/declarative-camera/PhotoCaptureControls.qml
+++ b/examples/multimedia/declarative-camera/PhotoCaptureControls.qml
@@ -110,6 +110,7 @@ FocusScope {
text: "Fluorescent"
}
}
+ onValueChanged: captureControls.camera.imageProcessing.whiteBalanceMode = wbModesButton.value
}
CameraButton {
diff --git a/examples/multimedia/declarative-camera/ZoomControl.qml b/examples/multimedia/declarative-camera/ZoomControl.qml
index 1bfda7500..48199b58d 100644
--- a/examples/multimedia/declarative-camera/ZoomControl.qml
+++ b/examples/multimedia/declarative-camera/ZoomControl.qml
@@ -47,6 +47,8 @@ Item {
property real maximumZoom : 1
signal zoomTo(real value)
+ visible: zoomControl.maximumZoom > 1
+
MouseArea {
id : mouseArea
anchors.fill: parent
@@ -61,7 +63,7 @@ Item {
onPositionChanged: {
if (pressed) {
- var target = initialZoom * Math.pow(2, (initialPos-mouseY)/zoomControl.height);
+ var target = initialZoom * Math.pow(5, (initialPos-mouseY)/zoomControl.height);
target = Math.max(1, Math.min(target, zoomControl.maximumZoom))
zoomControl.zoomTo(target)
}
@@ -74,45 +76,42 @@ Item {
y : parent.height/4
width : 24
height : parent.height/2
- opacity : 0
Rectangle {
anchors.fill: parent
smooth: true
radius: 8
- border.color: "black"
+ border.color: "white"
border.width: 2
- color: "white"
+ color: "black"
opacity: 0.3
}
Rectangle {
+ id: groove
x : 0
y : parent.height * (1.0 - (zoomControl.currentZoom-1.0) / (zoomControl.maximumZoom-1.0))
width: parent.width
height: parent.height - y
smooth: true
radius: 8
- color: "black"
+ color: "white"
opacity: 0.5
}
- states: State {
- name: "ShowBar"
- when: mouseArea.pressed || zoomControl.currentZoom > 1.0
- PropertyChanges { target: bar; opacity: 1 }
- }
-
- transitions: [
- Transition {
- to : "ShowBar"
- NumberAnimation { properties: "opacity"; duration: 100 }
- },
- Transition {
- from : "ShowBar"
- NumberAnimation { properties: "opacity"; duration: 500 }
+ Text {
+ id: zoomText
+ anchors {
+ left: bar.right; leftMargin: 16
}
- ]
+ y: Math.min(parent.height - height, Math.max(0, groove.y - height / 2))
+ text: "x" + Math.round(zoomControl.currentZoom * 100) / 100
+ font.bold: true
+ color: "white"
+ style: Text.Raised; styleColor: "black"
+ opacity: 0.85
+ font.pixelSize: 18
+ }
}
}
diff --git a/src/gsttools/qgstappsrc.cpp b/src/gsttools/qgstappsrc.cpp
index 8917bda85..2c9f64c34 100644
--- a/src/gsttools/qgstappsrc.cpp
+++ b/src/gsttools/qgstappsrc.cpp
@@ -72,7 +72,11 @@ bool QGstAppSrc::setup(GstElement* appsrc)
if (m_setup || m_stream == 0 || appsrc == 0)
return false;
+ if (m_appSrc)
+ gst_object_unref(G_OBJECT(m_appSrc));
+
m_appSrc = GST_APP_SRC(appsrc);
+ gst_object_ref(G_OBJECT(m_appSrc));
gst_app_src_set_callbacks(m_appSrc, (GstAppSrcCallbacks*)&m_callbacks, this, (GDestroyNotify)&QGstAppSrc::destroy_notify);
g_object_get(G_OBJECT(m_appSrc), "max-bytes", &m_maxBytes, NULL);
diff --git a/src/gsttools/qgstcodecsinfo.cpp b/src/gsttools/qgstcodecsinfo.cpp
index ade79fd44..6db9b016a 100644
--- a/src/gsttools/qgstcodecsinfo.cpp
+++ b/src/gsttools/qgstcodecsinfo.cpp
@@ -93,6 +93,9 @@ QGstCodecsInfo::QGstCodecsInfo(QGstCodecsInfo::ElementType elementType)
gst_caps_remove_structure(caps, 0);
}
+
+ gst_caps_unref(caps);
+ gst_caps_unref(allCaps);
#else
Q_UNUSED(elementType);
#endif // GST_CHECK_VERSION(0,10,31)
@@ -143,7 +146,7 @@ GstCaps* QGstCodecsInfo::supportedElementCaps(GstElementFactoryListType elementT
padTemplates = padTemplates->next;
if (padTemplate->direction == padDirection) {
- const GstCaps *caps = gst_static_caps_get(&padTemplate->static_caps);
+ GstCaps *caps = gst_static_caps_get(&padTemplate->static_caps);
for (uint i=0; i<gst_caps_get_size(caps); i++) {
const GstStructure *structure = gst_caps_get_structure(caps, i);
@@ -173,6 +176,7 @@ GstCaps* QGstCodecsInfo::supportedElementCaps(GstElementFactoryListType elementT
gst_caps_merge_structure(res, newStructure);
}
+ gst_caps_unref(caps);
}
}
}
diff --git a/src/gsttools/qgstreamergltexturerenderer.cpp b/src/gsttools/qgstreamergltexturerenderer.cpp
index dda04792e..85199e00e 100644
--- a/src/gsttools/qgstreamergltexturerenderer.cpp
+++ b/src/gsttools/qgstreamergltexturerenderer.cpp
@@ -216,6 +216,7 @@ GstElement *QGstreamerGLTextureRenderer::videoSink()
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
+ gst_object_unref(GST_OBJECT(pad));
}
}
diff --git a/src/gsttools/qgstreamervideorenderer.cpp b/src/gsttools/qgstreamervideorenderer.cpp
index 5b0b0d7dd..36c9f78d0 100644
--- a/src/gsttools/qgstreamervideorenderer.cpp
+++ b/src/gsttools/qgstreamervideorenderer.cpp
@@ -41,6 +41,7 @@
#include "qgstreamervideorenderer_p.h"
#include <private/qvideosurfacegstsink_p.h>
+#include <private/qgstutils_p.h>
#include <qabstractvideosurface.h>
#include <QDebug>
@@ -62,8 +63,7 @@ GstElement *QGstreamerVideoRenderer::videoSink()
{
if (!m_videoSink && m_surface) {
m_videoSink = QVideoSurfaceGstSink::createSink(m_surface);
- gst_object_ref(GST_OBJECT(m_videoSink)); //Take ownership
- gst_object_sink(GST_OBJECT(m_videoSink));
+ qt_gst_object_ref_sink(GST_OBJECT(m_videoSink)); //Take ownership
}
return reinterpret_cast<GstElement*>(m_videoSink);
diff --git a/src/gsttools/qgstreamervideowidget.cpp b/src/gsttools/qgstreamervideowidget.cpp
index 8297040c1..7e11bfb21 100644
--- a/src/gsttools/qgstreamervideowidget.cpp
+++ b/src/gsttools/qgstreamervideowidget.cpp
@@ -137,8 +137,7 @@ void QGstreamerVideoWidgetControl::createVideoWidget()
if (!m_videoSink)
m_videoSink = gst_element_factory_make ("ximagesink", NULL);
- gst_object_ref (GST_OBJECT (m_videoSink)); //Take ownership
- gst_object_sink (GST_OBJECT (m_videoSink));
+ qt_gst_object_ref_sink(GST_OBJECT (m_videoSink)); //Take ownership
}
@@ -219,6 +218,7 @@ void QGstreamerVideoWidgetControl::updateNativeVideoSize()
//find video native size to update video widget size hint
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
GstCaps *caps = gst_pad_get_negotiated_caps(pad);
+ gst_object_unref(GST_OBJECT(pad));
if (caps) {
m_widget->setNativeSize(QGstUtils::capsCorrectedResolution(caps));
diff --git a/src/gsttools/qgstreamervideowindow.cpp b/src/gsttools/qgstreamervideowindow.cpp
index 70d321575..2dc351065 100644
--- a/src/gsttools/qgstreamervideowindow.cpp
+++ b/src/gsttools/qgstreamervideowindow.cpp
@@ -63,11 +63,11 @@ QGstreamerVideoWindow::QGstreamerVideoWindow(QObject *parent, const char *elemen
m_videoSink = gst_element_factory_make("xvimagesink", NULL);
if (m_videoSink) {
- gst_object_ref(GST_OBJECT(m_videoSink)); //Take ownership
- gst_object_sink(GST_OBJECT(m_videoSink));
+ qt_gst_object_ref_sink(GST_OBJECT(m_videoSink)); //Take ownership
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
+ gst_object_unref(GST_OBJECT(pad));
}
}
@@ -114,6 +114,7 @@ bool QGstreamerVideoWindow::processSyncMessage(const QGstreamerMessage &message)
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
+ gst_object_unref(GST_OBJECT(pad));
return true;
}
@@ -319,6 +320,7 @@ void QGstreamerVideoWindow::updateNativeVideoSize()
//find video native size to update video widget size hint
GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
GstCaps *caps = gst_pad_get_negotiated_caps(pad);
+ gst_object_unref(GST_OBJECT(pad));
if (caps) {
m_nativeSize = QGstUtils::capsCorrectedResolution(caps);
diff --git a/src/gsttools/qgstutils.cpp b/src/gsttools/qgstutils.cpp
index e93b46753..41bd005bd 100644
--- a/src/gsttools/qgstutils.cpp
+++ b/src/gsttools/qgstutils.cpp
@@ -401,4 +401,22 @@ QMultimedia::SupportEstimate QGstUtils::hasSupport(const QString &mimeType,
return QMultimedia::MaybeSupported;
}
+void qt_gst_object_ref_sink(gpointer object)
+{
+#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 24)
+ gst_object_ref_sink(object);
+#else
+ g_return_if_fail (GST_IS_OBJECT(object));
+
+ GST_OBJECT_LOCK(object);
+ if (G_LIKELY(GST_OBJECT_IS_FLOATING(object))) {
+ GST_OBJECT_FLAG_UNSET(object, GST_OBJECT_FLOATING);
+ GST_OBJECT_UNLOCK(object);
+ } else {
+ GST_OBJECT_UNLOCK(object);
+ gst_object_ref(object);
+ }
+#endif
+}
+
QT_END_NAMESPACE
diff --git a/src/imports/audioengine/qdeclarative_attenuationmodel_p.cpp b/src/imports/audioengine/qdeclarative_attenuationmodel_p.cpp
index 4456f175b..3e8cecefa 100644
--- a/src/imports/audioengine/qdeclarative_attenuationmodel_p.cpp
+++ b/src/imports/audioengine/qdeclarative_attenuationmodel_p.cpp
@@ -140,7 +140,7 @@ void QDeclarativeAttenuationModel::setName(const QString& name)
*/
/*!
- \qmlproperty string QtAudioEngine1::AttenuationModelLinear::name
+ \qmlproperty string QtAudioEngine::AttenuationModelLinear::name
This property holds the name of AttenuationModelLinear, must be unique among all attenuation
models and only defined once.
@@ -162,7 +162,7 @@ void QDeclarativeAttenuationModelLinear::componentComplete()
}
/*!
- \qmlproperty real QtAudioEngine1::AttenuationModelLinear::start
+ \qmlproperty real QtAudioEngine::AttenuationModelLinear::start
This property holds the start distance. There will be no attenuation if the distance from sound
to listener is within this range.
@@ -187,7 +187,7 @@ void QDeclarativeAttenuationModelLinear::setStartDistance(qreal startDist)
}
/*!
- \qmlproperty real QtAudioEngine1::AttenuationModelLinear::end
+ \qmlproperty real QtAudioEngine::AttenuationModelLinear::end
This property holds the end distance. There will be no sound hearable if the distance from sound
to listener is larger than this.
@@ -280,14 +280,14 @@ qreal QDeclarativeAttenuationModelLinear::calculateGain(const QVector3D &listene
*/
/*!
- \qmlproperty string QtAudioEngine1::AttenuationModelInverse::name
+ \qmlproperty string QtAudioEngine::AttenuationModelInverse::name
This property holds the name of AttenuationModelInverse, must be unique among all attenuation
models and only defined once.
*/
/*!
- \qmlproperty real QtAudioEngine1::AttenuationModelInverse::start
+ \qmlproperty real QtAudioEngine::AttenuationModelInverse::start
This property holds the start distance. There will be no attenuation if the distance from sound
to listener is within this range.
@@ -295,7 +295,7 @@ qreal QDeclarativeAttenuationModelLinear::calculateGain(const QVector3D &listene
*/
/*!
- \qmlproperty real QtAudioEngine1::AttenuationModelInverse::end
+ \qmlproperty real QtAudioEngine::AttenuationModelInverse::end
This property holds the end distance. There will be no further attenuation if the distance from
sound to listener is larger than this.
@@ -303,7 +303,7 @@ qreal QDeclarativeAttenuationModelLinear::calculateGain(const QVector3D &listene
*/
/*!
- \qmlproperty real QtAudioEngine1::AttenuationModelInverse::rolloff
+ \qmlproperty real QtAudioEngine::AttenuationModelInverse::rolloff
This property holds the rolloff factor. The bigger the value is, the faster the sound attenuates.
The default value is 1.
diff --git a/src/imports/audioengine/qdeclarative_audiocategory_p.cpp b/src/imports/audioengine/qdeclarative_audiocategory_p.cpp
index a115c0f12..c3e5e4ed0 100644
--- a/src/imports/audioengine/qdeclarative_audiocategory_p.cpp
+++ b/src/imports/audioengine/qdeclarative_audiocategory_p.cpp
@@ -58,8 +58,8 @@ QT_USE_NAMESPACE
This type is part of the \b{QtAudioEngine 1.0} module.
- An instance of AudioCategory can be accessed through QtAudioEngine1::AudioEngine::categories with its
- unique name and must be defined inside AudioEngine.
+ An instance of AudioCategory can be accessed through \l {QtAudioEngine::AudioEngine::categories}
+ {AudioEngine.categories} with its unique name and must be defined inside AudioEngine.
\qml
import QtQuick 2.0
@@ -138,7 +138,7 @@ void QDeclarativeAudioCategory::componentComplete()
}
/*!
- \qmlproperty real QtAudioEngine1::AudioCategory::volume
+ \qmlproperty real QtAudioEngine::AudioCategory::volume
This property holds the volume of the category and will modulate all audio output from the
instances which belong to this category.
@@ -160,7 +160,7 @@ void QDeclarativeAudioCategory::setVolume(qreal volume)
}
/*!
- \qmlproperty string QtAudioEngine1::AudioCategory::name
+ \qmlproperty string QtAudioEngine::AudioCategory::name
This property holds the name of AudioCategory. The name must be unique among all categories and only
defined once. The name cannot be changed after the instance has been initialized.
@@ -180,7 +180,7 @@ QString QDeclarativeAudioCategory::name() const
}
/*!
- \qmlmethod QtAudioEngine1::AudioCategory::stop()
+ \qmlmethod QtAudioEngine::AudioCategory::stop()
Stops all active sound instances which belong to this category.
*/
@@ -190,7 +190,7 @@ void QDeclarativeAudioCategory::stop()
}
/*!
- \qmlmethod QtAudioEngine1::AudioCategory::pause()
+ \qmlmethod QtAudioEngine::AudioCategory::pause()
Pauses all active sound instances which belong to this category.
*/
@@ -200,7 +200,7 @@ void QDeclarativeAudioCategory::pause()
}
/*!
- \qmlmethod QtAudioEngine1::AudioCategory::pause()
+ \qmlmethod QtAudioEngine::AudioCategory::pause()
Resumes all active sound instances from paused state which belong to this category.
*/
diff --git a/src/imports/audioengine/qdeclarative_audioengine_p.cpp b/src/imports/audioengine/qdeclarative_audioengine_p.cpp
index 7f0a8e747..e0c1ea082 100644
--- a/src/imports/audioengine/qdeclarative_audioengine_p.cpp
+++ b/src/imports/audioengine/qdeclarative_audioengine_p.cpp
@@ -195,7 +195,7 @@ void QDeclarativeAudioEngine::releaseManagedDeclarativeSoundInstance(QDeclarativ
}
/*!
- \qmlproperty int QtAudioEngine1::AudioEngine::liveInstances
+ \qmlproperty int QtAudioEngine::AudioEngine::liveInstances
This property indicates how many live sound instances there are at the moment.
*/
@@ -408,7 +408,7 @@ QQmlListProperty<QObject> QDeclarativeAudioEngine::bank()
}
/*!
- \qmlproperty map QtAudioEngine1::AudioEngine::categories
+ \qmlproperty map QtAudioEngine::AudioEngine::categories
Container of all AudioCategory instances.
*/
@@ -418,7 +418,7 @@ QObject* QDeclarativeAudioEngine::categories()
}
/*!
- \qmlproperty map QtAudioEngine1::AudioEngine::samples
+ \qmlproperty map QtAudioEngine::AudioEngine::samples
Container of all AudioSample instances.
*/
@@ -428,7 +428,7 @@ QObject* QDeclarativeAudioEngine::samples()
}
/*!
- \qmlproperty map QtAudioEngine1::AudioEngine::sounds
+ \qmlproperty map QtAudioEngine::AudioEngine::sounds
Container of all Sound instances.
*/
@@ -438,7 +438,7 @@ QObject* QDeclarativeAudioEngine::sounds()
}
/*!
- \qmlproperty QtAudioEngine1::AudioListener QtAudioEngine1::AudioEngine::listener
+ \qmlproperty QtAudioEngine::AudioListener QtAudioEngine::AudioEngine::listener
This property holds the listener object. You can change various
properties to affect the 3D positioning of sounds.
@@ -451,7 +451,7 @@ QDeclarativeAudioListener* QDeclarativeAudioEngine::listener() const
}
/*!
- \qmlproperty real QtAudioEngine1::AudioEngine::dopplerFactor
+ \qmlproperty real QtAudioEngine::AudioEngine::dopplerFactor
This property holds a simple scaling for the effect of doppler shift.
*/
@@ -466,7 +466,7 @@ void QDeclarativeAudioEngine::setDopplerFactor(qreal dopplerFactor)
}
/*!
- \qmlproperty real QtAudioEngine1::AudioEngine::speedOfSound
+ \qmlproperty real QtAudioEngine::AudioEngine::speedOfSound
This property holds the reference value of the sound speed (in meters per second)
which will be used in doppler shift calculation. The doppler shift calculation is
@@ -485,7 +485,7 @@ void QDeclarativeAudioEngine::setSpeedOfSound(qreal speedOfSound)
}
/*!
- \qmlproperty bool QtAudioEngine1::AudioEngine::loading
+ \qmlproperty bool QtAudioEngine::AudioEngine::loading
This property indicates if the audio engine is loading any audio sample at the moment. This may
be useful if you specified the preloaded property in AudioSample and would like to show a loading screen
@@ -505,7 +505,7 @@ void QDeclarativeAudioEngine::handleLoadingChanged()
}
/*!
- \qmlsignal QtAudioEngine1::AudioEngine::finishedLoading()
+ \qmlsignal QtAudioEngine::AudioEngine::finishedLoading()
This signal is emitted when \l loading has completed.
@@ -513,7 +513,7 @@ void QDeclarativeAudioEngine::handleLoadingChanged()
*/
/*!
- \qmlsignal QtAudioEngine1::AudioEngine::ready()
+ \qmlsignal QtAudioEngine::AudioEngine::ready()
This signal is emitted when the AudioEngine is ready to use.
@@ -521,7 +521,7 @@ void QDeclarativeAudioEngine::handleLoadingChanged()
*/
/*!
- \qmlsignal QtAudioEngine1::AudioEngine::liveInstanceCountChanged()
+ \qmlsignal QtAudioEngine::AudioEngine::liveInstanceCountChanged()
This signal is emitted when the number of live instances managed by the
AudioEngine is changed.
@@ -530,7 +530,7 @@ void QDeclarativeAudioEngine::handleLoadingChanged()
*/
/*!
- \qmlsignal QtAudioEngine1::AudioEngine::isLoadingChanged()
+ \qmlsignal QtAudioEngine::AudioEngine::isLoadingChanged()
This signal is emitted when the \l loading property changes.
diff --git a/src/imports/audioengine/qdeclarative_audiolistener_p.cpp b/src/imports/audioengine/qdeclarative_audiolistener_p.cpp
index 9e0be0d2b..7795ce8a3 100644
--- a/src/imports/audioengine/qdeclarative_audiolistener_p.cpp
+++ b/src/imports/audioengine/qdeclarative_audiolistener_p.cpp
@@ -88,11 +88,10 @@ QT_USE_NAMESPACE
loops: Animation.Infinite
running: true
NumberAnimation {
- duration: 8000
- from: 0
- to: 1
+ duration: 8000
+ from: 0
+ to: 1
}
-
}
}
}
@@ -130,17 +129,16 @@ QT_USE_NAMESPACE
loops: Animation.Infinite
running: true
NumberAnimation {
- duration: 8000
- from: 0
- to: 1
+ duration: 8000
+ from: 0
+ to: 1
}
-
}
}
}
\endqml
- This separate AudioListener definition is allowed to make qml bindings easier in some case.
+ This separate AudioListener definition is allowed to make QML bindings easier in some cases.
*/
QDeclarativeAudioListener::QDeclarativeAudioListener(QObject *parent)
@@ -155,9 +153,9 @@ QDeclarativeAudioListener::~QDeclarativeAudioListener()
}
/*!
- \qmlproperty QtAudioEngine1::AudioEngine QtAudioEngine1::AudioListener::engine
+ \qmlproperty QtAudioEngine::AudioEngine QtAudioEngine::AudioListener::engine
- This property holds the reference to AudioEngine, must be set only once.
+ This property holds the reference to AudioEngine, and must only be set once.
*/
QDeclarativeAudioEngine* QDeclarativeAudioListener::engine() const
{
@@ -171,9 +169,9 @@ void QDeclarativeAudioListener::setEngine(QDeclarativeAudioEngine *engine)
}
/*!
- \qmlproperty vector3d QtAudioEngine1::AudioListener::position
+ \qmlproperty vector3d QtAudioEngine::AudioListener::position
- This property holds the 3d position of the listener.
+ This property holds the 3D position of the listener.
*/
QVector3D QDeclarativeAudioListener::position() const
{
@@ -190,9 +188,9 @@ void QDeclarativeAudioListener::setPosition(const QVector3D &position)
}
/*!
- \qmlproperty vector3d QtAudioEngine1::AudioListener::direction
+ \qmlproperty vector3d QtAudioEngine::AudioListener::direction
- This property holds the normalized 3d direction vector of the listener.
+ This property holds the normalized 3D direction vector of the listener.
*/
QVector3D QDeclarativeAudioListener::direction() const
{
@@ -209,9 +207,9 @@ void QDeclarativeAudioListener::setDirection(const QVector3D &direction)
}
/*!
- \qmlproperty vector3d QtAudioEngine1::AudioListener::velocity
+ \qmlproperty vector3d QtAudioEngine::AudioListener::velocity
- This property holds the 3d velocity vector of the listener.
+ This property holds the 3D velocity vector of the listener.
*/
QVector3D QDeclarativeAudioListener::velocity() const
{
@@ -228,9 +226,9 @@ void QDeclarativeAudioListener::setVelocity(const QVector3D &velocity)
}
/*!
- \qmlproperty vector3d QtAudioEngine1::AudioListener::up
+ \qmlproperty vector3d QtAudioEngine::AudioListener::up
- This property holds the normalized 3d up vector of the listener.
+ This property holds the normalized 3D up vector of the listener.
*/
QVector3D QDeclarativeAudioListener::up() const
{
@@ -247,7 +245,7 @@ void QDeclarativeAudioListener::setUp(const QVector3D &up)
}
/*!
- \qmlproperty real QtAudioEngine1::AudioListener::gain
+ \qmlproperty real QtAudioEngine::AudioListener::gain
This property will modulate all audio output from audio engine instances.
*/
diff --git a/src/imports/audioengine/qdeclarative_audiosample_p.cpp b/src/imports/audioengine/qdeclarative_audiosample_p.cpp
index 0a6acb2f8..a29f0f46e 100644
--- a/src/imports/audioengine/qdeclarative_audiosample_p.cpp
+++ b/src/imports/audioengine/qdeclarative_audiosample_p.cpp
@@ -61,7 +61,7 @@ QT_USE_NAMESPACE
\c AudioSample is part of the \b{QtAudioEngine 1.0} module.
- It can be accessed through QtAudioEngine1::AudioEngine::samples with its unique
+ It can be accessed through QtAudioEngine::AudioEngine::samples with its unique
name and must be defined inside AudioEngine.
\qml
@@ -134,11 +134,12 @@ bool QDeclarativeAudioSample::isStreaming() const
}
/*!
- \qmlproperty bool QtAudioEngine1::AudioSample::preloaded
+ \qmlproperty bool QtAudioEngine::AudioSample::preloaded
This property indicates whether this sample needs to be preloaded or not.
- If true, the audio engine will start loading the sample file immediately when the app started,
- otherwise the sample will not be loaded until explicitly requested.
+ If \c true, the audio engine will start loading the sample file immediately
+ when the application starts, otherwise the sample will not be loaded until
+ explicitly requested.
*/
bool QDeclarativeAudioSample::isPreloaded() const
@@ -147,7 +148,7 @@ bool QDeclarativeAudioSample::isPreloaded() const
}
/*!
- \qmlproperty bool QtAudioEngine1::AudioSample::loaded
+ \qmlproperty bool QtAudioEngine::AudioSample::loaded
This property indicates whether this sample has been loaded into memory or not.
*/
@@ -159,7 +160,7 @@ bool QDeclarativeAudioSample::isLoaded() const
}
/*!
- \qmlmethod void QtAudioEngine1::AudioSample::load()
+ \qmlmethod void QtAudioEngine::AudioSample::load()
Starts loading the sample into memory if not loaded.
*/
@@ -193,10 +194,10 @@ void QDeclarativeAudioSample::setStreaming(bool streaming)
}
/*!
- \qmlproperty string QtAudioEngine1::AudioSample::name
+ \qmlproperty string QtAudioEngine::AudioSample::name
- This property holds the name of AudioSample, must be unique among all samples and only
- defined once.
+ This property holds the name of the sample, which must be unique among all
+ samples and only defined once.
*/
QString QDeclarativeAudioSample::name() const
{
@@ -237,9 +238,11 @@ QSoundBuffer* QDeclarativeAudioSample::soundBuffer() const
}
/*!
- \qmlsignal QtAudioEngine1::AudioSample::onLoadedChanged()
+ \qmlsignal QtAudioEngine::AudioSample::loadedChanged()
- This handler is called when \l loaded is changed
+ This signal is emitted when \l loaded is changed.
+
+ The corresponding handler is \c onLoadedChanged.
*/
diff --git a/src/imports/audioengine/qdeclarative_playvariation_p.cpp b/src/imports/audioengine/qdeclarative_playvariation_p.cpp
index 204ffb23a..b9a0a3ab8 100644
--- a/src/imports/audioengine/qdeclarative_playvariation_p.cpp
+++ b/src/imports/audioengine/qdeclarative_playvariation_p.cpp
@@ -143,7 +143,7 @@ void QDeclarativePlayVariation::componentComplete()
}
/*!
- \qmlproperty string QtAudioEngine1::PlayVariation::sample
+ \qmlproperty string QtAudioEngine::PlayVariation::sample
This property specifies which \l AudioSample this variation will use.
*/
@@ -162,7 +162,7 @@ void QDeclarativePlayVariation::setSample(const QString& sample)
}
/*!
- \qmlproperty bool QtAudioEngine1::PlayVariation::looping
+ \qmlproperty bool QtAudioEngine::PlayVariation::looping
This property indicates whether the playback will be looped or not.
*/
@@ -181,7 +181,7 @@ void QDeclarativePlayVariation::setLooping(bool looping)
}
/*!
- \qmlproperty real QtAudioEngine1::PlayVariation::maxGain
+ \qmlproperty real QtAudioEngine::PlayVariation::maxGain
This property specifies the maximum gain adjustment that can be applied in any playback.
*/
@@ -204,7 +204,7 @@ void QDeclarativePlayVariation::setMaxGain(qreal maxGain)
}
/*!
- \qmlproperty real QtAudioEngine1::PlayVariation::minGain
+ \qmlproperty real QtAudioEngine::PlayVariation::minGain
This property specifies the minimum gain adjustment that can be applied in any playback.
*/
@@ -227,7 +227,7 @@ void QDeclarativePlayVariation::setMinGain(qreal minGain)
}
/*!
- \qmlproperty real QtAudioEngine1::PlayVariation::maxPitch
+ \qmlproperty real QtAudioEngine::PlayVariation::maxPitch
This property specifies the maximum pitch adjustment that can be applied in any playback.
*/
@@ -250,7 +250,7 @@ void QDeclarativePlayVariation::setMaxPitch(qreal maxPitch)
}
/*!
- \qmlproperty real QtAudioEngine1::PlayVariation::minPitch
+ \qmlproperty real QtAudioEngine::PlayVariation::minPitch
This property specifies the minimum pitch adjustment that can be applied in any playback.
*/
diff --git a/src/imports/audioengine/qdeclarative_sound_p.cpp b/src/imports/audioengine/qdeclarative_sound_p.cpp
index 5b22d43ba..e6c690cec 100644
--- a/src/imports/audioengine/qdeclarative_sound_p.cpp
+++ b/src/imports/audioengine/qdeclarative_sound_p.cpp
@@ -150,7 +150,7 @@ void QDeclarativeSoundCone::componentComplete()
This type is part of the \b{QtAudioEngine 1.0} module.
- Sound can be accessed through QtAudioEngine1::AudioEngine::sounds with its unique name
+ Sound can be accessed through QtAudioEngine::AudioEngine::sounds with its unique name
and must be defined inside AudioEngine.
\qml
@@ -228,7 +228,7 @@ void QDeclarativeSound::componentComplete()
}
/*!
- \qmlproperty enumeration QtAudioEngine1::Sound::playType
+ \qmlproperty enumeration QtAudioEngine::Sound::playType
This property holds the playType. It can be one of:
@@ -254,7 +254,7 @@ void QDeclarativeSound::setPlayType(PlayType playType)
}
/*!
- \qmlproperty string QtAudioEngine1::Sound::category
+ \qmlproperty string QtAudioEngine::Sound::category
This property specifies which AudioCategory this sound belongs to.
*/
@@ -273,7 +273,7 @@ void QDeclarativeSound::setCategory(const QString& category)
}
/*!
- \qmlproperty string QtAudioEngine1::Sound::name
+ \qmlproperty string QtAudioEngine::Sound::name
This property holds the name of Sound, must be unique among all sounds and only
defined once.
@@ -293,7 +293,7 @@ void QDeclarativeSound::setName(const QString& name)
}
/*!
- \qmlproperty string QtAudioEngine1::Sound::attenuationModel
+ \qmlproperty string QtAudioEngine::Sound::attenuationModel
This property specifies which attenuation model this sound will apply.
*/
@@ -382,7 +382,7 @@ void QDeclarativeSound::appendFunction(QQmlListProperty<QDeclarativePlayVariatio
}
/*!
- \qmlmethod QtAudioEngine1::Sound::play()
+ \qmlmethod QtAudioEngine::Sound::play()
Creates a new \l SoundInstance and starts playing.
Position, direction and velocity are all set to \c "0,0,0".
@@ -393,7 +393,7 @@ void QDeclarativeSound::play()
}
/*!
- \qmlmethod QtAudioEngine1::Sound::play(gain)
+ \qmlmethod QtAudioEngine::Sound::play(gain)
Creates a new SoundInstance and starts playing with the adjusted \a gain.
Position, direction and velocity are all set to \c "0,0,0".
@@ -404,7 +404,7 @@ void QDeclarativeSound::play(qreal gain)
}
/*!
- \qmlmethod QtAudioEngine1::Sound::play(gain, pitch)
+ \qmlmethod QtAudioEngine::Sound::play(gain, pitch)
Creates a new SoundInstance and starts playing with the adjusted \a gain and \a pitch.
Position, direction and velocity are all set to \c "0,0,0".
@@ -415,7 +415,7 @@ void QDeclarativeSound::play(qreal gain, qreal pitch)
}
/*!
- \qmlmethod QtAudioEngine1::Sound::play(position)
+ \qmlmethod QtAudioEngine::Sound::play(position)
Creates a new SoundInstance and starts playing with specified \a position.
Direction and velocity are all set to \c "0,0,0".
@@ -426,7 +426,7 @@ void QDeclarativeSound::play(const QVector3D& position)
}
/*!
- \qmlmethod QtAudioEngine1::Sound::play(position, velocity)
+ \qmlmethod QtAudioEngine::Sound::play(position, velocity)
Creates a new SoundInstance and starts playing with specified \a position and \a velocity.
Direction is set to \c "0,0,0".
@@ -437,7 +437,7 @@ void QDeclarativeSound::play(const QVector3D& position, const QVector3D& velocit
}
/*!
- \qmlmethod QtAudioEngine1::Sound::play(position, velocity, direction)
+ \qmlmethod QtAudioEngine::Sound::play(position, velocity, direction)
Creates a new SoundInstance and starts playing with specified \a position, \a velocity and
\a direction.
@@ -449,7 +449,7 @@ void QDeclarativeSound::play(const QVector3D& position, const QVector3D& velocit
}
/*!
- \qmlmethod QtAudioEngine1::Sound::play(position, gain)
+ \qmlmethod QtAudioEngine::Sound::play(position, gain)
Creates a new SoundInstance and starts playing with specified \a position and adjusted \a gain.
Direction and velocity are all set to \c "0,0,0".
@@ -460,7 +460,7 @@ void QDeclarativeSound::play(const QVector3D& position, qreal gain)
}
/*!
- \qmlmethod QtAudioEngine1::Sound::play(position, velocity, gain)
+ \qmlmethod QtAudioEngine::Sound::play(position, velocity, gain)
Creates a new SoundInstance and starts playing with specified \a position, \a velocity and
adjusted \a gain.
@@ -472,7 +472,7 @@ void QDeclarativeSound::play(const QVector3D& position, const QVector3D& velocit
}
/*!
- \qmlmethod QtAudioEngine1::Sound::play(position, velocity, direction, gain)
+ \qmlmethod QtAudioEngine::Sound::play(position, velocity, direction, gain)
Creates a new SoundInstance and starts playing with specified \a position, \a velocity,
\a direction and adjusted \a gain.
@@ -483,7 +483,7 @@ void QDeclarativeSound::play(const QVector3D& position, const QVector3D& velocit
}
/*!
- \qmlmethod QtAudioEngine1::Sound::play(position, gain, pitch)
+ \qmlmethod QtAudioEngine::Sound::play(position, gain, pitch)
Creates a new SoundInstance and starts playing with specified \a position, adjusted \a gain and
\a pitch.
@@ -495,7 +495,7 @@ void QDeclarativeSound::play(const QVector3D& position, qreal gain, qreal pitch)
}
/*!
- \qmlmethod QtAudioEngine1::Sound::play(position, velocity, gain, pitch)
+ \qmlmethod QtAudioEngine::Sound::play(position, velocity, gain, pitch)
Creates a new SoundInstance and starts playing with specified \a position, \a velocity,
adjusted \a gain and \a pitch.
@@ -507,7 +507,7 @@ void QDeclarativeSound::play(const QVector3D& position, const QVector3D& velocit
}
/*!
- \qmlmethod QtAudioEngine1::Sound::play(position, velocity, direction, gain, pitch)
+ \qmlmethod QtAudioEngine::Sound::play(position, velocity, direction, gain, pitch)
Creates a new SoundInstance and starts playing with specified \a position, \a velocity,
\a direction, adjusted \a gain and \a pitch.
@@ -542,7 +542,7 @@ void QDeclarativeSound::play(const QVector3D& position, const QVector3D& velocit
}
/*!
- \qmlmethod QtAudioEngine1::SoundInstance QtAudioEngine1::Sound::newInstance()
+ \qmlmethod QtAudioEngine::SoundInstance QtAudioEngine::Sound::newInstance()
Returns a new \l SoundInstance.
*/
diff --git a/src/imports/audioengine/qdeclarative_soundinstance_p.cpp b/src/imports/audioengine/qdeclarative_soundinstance_p.cpp
index f2576a7fa..9f3a0d35d 100644
--- a/src/imports/audioengine/qdeclarative_soundinstance_p.cpp
+++ b/src/imports/audioengine/qdeclarative_soundinstance_p.cpp
@@ -185,7 +185,7 @@ QDeclarativeSoundInstance::QDeclarativeSoundInstance(QObject *parent)
}
/*!
- \qmlproperty QtAudioEngine1::AudioEngine QtAudioEngine1::SoundInstance::engine
+ \qmlproperty QtAudioEngine::AudioEngine QtAudioEngine::SoundInstance::engine
This property holds the reference to AudioEngine, must be set only once.
*/
@@ -234,7 +234,7 @@ QDeclarativeSoundInstance::~QDeclarativeSoundInstance()
}
/*!
- \qmlproperty string QtAudioEngine1::SoundInstance::sound
+ \qmlproperty string QtAudioEngine::SoundInstance::sound
This property specifies which Sound this SoundInstance will use. Unlike some properties in
other types, this property can be changed dynamically.
@@ -294,7 +294,7 @@ void QDeclarativeSoundInstance::dropInstance()
}
/*!
- \qmlproperty enumeration QtAudioEngine1::SoundInstance::state
+ \qmlproperty enumeration QtAudioEngine::SoundInstance::state
This property holds the current playback state. It can be one of:
@@ -318,7 +318,7 @@ QDeclarativeSoundInstance::State QDeclarativeSoundInstance::state() const
}
/*!
- \qmlmethod QtAudioEngine1::SoundInstance::play()
+ \qmlmethod QtAudioEngine::SoundInstance::play()
Starts playback.
*/
@@ -335,7 +335,7 @@ void QDeclarativeSoundInstance::play()
}
/*!
- \qmlmethod QtAudioEngine1::SoundInstance::play()
+ \qmlmethod QtAudioEngine::SoundInstance::play()
Stops current playback.
*/
@@ -351,7 +351,7 @@ void QDeclarativeSoundInstance::stop()
}
/*!
- \qmlmethod QtAudioEngine1::SoundInstance::play()
+ \qmlmethod QtAudioEngine::SoundInstance::play()
Pauses current playback.
*/
@@ -375,7 +375,7 @@ void QDeclarativeSoundInstance::updatePosition(qreal deltaTime)
}
/*!
- \qmlproperty vector3d QtAudioEngine1::SoundInstance::position
+ \qmlproperty vector3d QtAudioEngine::SoundInstance::position
This property holds the current 3d position.
*/
@@ -397,7 +397,7 @@ void QDeclarativeSoundInstance::setPosition(const QVector3D& position)
}
/*!
- \qmlproperty vector3d QtAudioEngine1::SoundInstance::direction
+ \qmlproperty vector3d QtAudioEngine::SoundInstance::direction
This property holds the current 3d direction.
*/
@@ -419,7 +419,7 @@ void QDeclarativeSoundInstance::setDirection(const QVector3D& direction)
}
/*!
- \qmlproperty vector3d QtAudioEngine1::SoundInstance::velocity
+ \qmlproperty vector3d QtAudioEngine::SoundInstance::velocity
This property holds the current 3d velocity.
*/
@@ -440,7 +440,7 @@ void QDeclarativeSoundInstance::setVelocity(const QVector3D& velocity)
}
/*!
- \qmlproperty vector3d QtAudioEngine1::SoundInstance::gain
+ \qmlproperty vector3d QtAudioEngine::SoundInstance::gain
This property holds the gain adjustment which will be used to modulate the audio output level
from this SoundInstance.
@@ -466,7 +466,7 @@ void QDeclarativeSoundInstance::setGain(qreal gain)
}
/*!
- \qmlproperty vector3d QtAudioEngine1::SoundInstance::pitch
+ \qmlproperty vector3d QtAudioEngine::SoundInstance::pitch
This property holds the pitch adjustment which will be used to modulate the audio pitch
from this SoundInstance.
@@ -527,43 +527,57 @@ void QDeclarativeSoundInstance::handleStateChanged()
}
/*!
- \qmlsignal QtAudioEngine1::SoundInstance::onStateChanged(state)
+ \qmlsignal QtAudioEngine::SoundInstance::stateChanged(state)
- This handler is called when \l state is changed
+ This signal is emitted when \l state is changed
+
+ The corresponding handler is \c onStateChanged.
*/
/*!
- \qmlsignal QtAudioEngine1::SoundInstance::onPositionChanged()
+ \qmlsignal QtAudioEngine::SoundInstance::positionChanged()
+
+ This signal is emitted when \l position is changed
- This handler is called when \l position is changed
+ The corresponding handler is \c onPositionChanged.
*/
/*!
- \qmlsignal QtAudioEngine1::SoundInstance::onDirectionChanged()
+ \qmlsignal QtAudioEngine::SoundInstance::directionChanged()
- This handler is called when \l direction is changed
+ This signal is emitted when \l direction is changed
+
+ The corresponding handler is \c onDirectionChanged.
*/
/*!
- \qmlsignal QtAudioEngine1::SoundInstance::onVelocityChanged()
+ \qmlsignal QtAudioEngine::SoundInstance::velocityChanged()
+
+ This signal is emitted when \l velocity is changed
- This handler is called when \l velocity is changed
+ The corresponding handler is \c onVelocityChanged.
*/
/*!
- \qmlsignal QtAudioEngine1::SoundInstance::onGainChanged()
+ \qmlsignal QtAudioEngine::SoundInstance::gainChanged()
- This handler is called when \l gain is changed
+ This signal is emitted when \l gain is changed
+
+ The corresponding handler is \c onGainChanged.
*/
/*!
- \qmlsignal QtAudioEngine1::SoundInstance::onPitchChanged()
+ \qmlsignal QtAudioEngine::SoundInstance::pitchChanged()
+
+ This signal is emitted when \l pitch is changed
- This handler is called when \l pitch is changed
+ The corresponding handler is \c onPitchChanged.
*/
/*!
- \qmlsignal QtAudioEngine1::SoundInstance::onSoundChanged()
+ \qmlsignal QtAudioEngine::SoundInstance::soundChanged()
+
+ This signal is emitted when \l sound is changed
- This handler is called when \l sound is changed
+ The corresponding handler is \c onSoundChanged.
*/
diff --git a/src/imports/multimedia/Video.qml b/src/imports/multimedia/Video.qml
index e52d303f8..0b8dc1118 100644
--- a/src/imports/multimedia/Video.qml
+++ b/src/imports/multimedia/Video.qml
@@ -328,6 +328,8 @@ Item {
\qmlsignal Video::paused()
This signal is emitted when playback is paused.
+
+ The corresponding handler is \c onPaused.
*/
signal paused
@@ -335,6 +337,8 @@ Item {
\qmlsignal Video::stopped()
This signal is emitted when playback is stopped.
+
+ The corresponding handler is \c onStopped.
*/
signal stopped
@@ -342,6 +346,8 @@ Item {
\qmlsignal Video::playing()
This signal is emitted when playback is started or continued.
+
+ The corresponding handler is \c onPlaying.
*/
signal playing
diff --git a/src/imports/multimedia/qdeclarativeaudio.cpp b/src/imports/multimedia/qdeclarativeaudio.cpp
index 4f741616f..589c45dae 100644
--- a/src/imports/multimedia/qdeclarativeaudio.cpp
+++ b/src/imports/multimedia/qdeclarativeaudio.cpp
@@ -457,32 +457,40 @@ void QDeclarativeAudio::seek(int position)
This property indicates if loading of media should begin immediately.
- Defaults to true, if false media will not be loaded until playback is started.
+ Defaults to \c true. If \c false, the media will not be loaded until playback is started.
*/
/*!
\qmlsignal QtMultimedia::Audio::playbackStateChanged()
- This handler is called when the \l playbackState property is altered.
+ This signal is emitted when the \l playbackState property is altered.
+
+ The corresponding handler is \c onPlaybackStateChanged.
*/
/*!
\qmlsignal QtMultimedia::Audio::paused()
- This handler is called when playback is paused.
+ This signal is emitted when playback is paused.
+
+ The corresponding handler is \c onPaused.
*/
/*!
\qmlsignal QtMultimedia::Audio::stopped()
- This handler is called when playback is stopped.
+ This signal is emitted when playback is stopped.
+
+ The corresponding handler is \c onStopped.
*/
/*!
\qmlsignal QtMultimedia::Audio::playing()
- This handler is called when playback is started or resumed.
+ This signal is emitted when playback is started or resumed.
+
+ The corresponding handler is \c onPlaying.
*/
/*!
@@ -531,7 +539,7 @@ QDeclarativeAudio::PlaybackState QDeclarativeAudio::playbackState() const
This property controls whether the media will begin to play on start up.
- Defaults to false, if set true the value of autoLoad will be overwritten to true.
+ Defaults to \c false. If set to \c true, the value of autoLoad will be overwritten to \c true.
*/
/*!
@@ -760,9 +768,11 @@ void QDeclarativeAudio::_q_statusChanged()
/*!
\qmlsignal QtMultimedia::Audio::error(error, errorString)
- This handler is called when an \l {QMediaPlayer::Error}{error} has
+ This signal is emitted when an \l {QMediaPlayer::Error}{error} has
occurred. The errorString parameter may contain more detailed
information about the error.
+
+ The corresponding handler is \c onError.
*/
/*!
@@ -1267,26 +1277,34 @@ void QDeclarativeAudio::_q_statusChanged()
/*!
\qmlsignal QtMultimedia::MediaPlayer::playbackStateChanged()
- This handler is called when the \l playbackState property is altered.
+ This signal is emitted when the \l playbackState property is altered.
+
+ The corresponding handler is \c onPlaybackStateChanged.
*/
/*!
\qmlsignal QtMultimedia::MediaPlayer::paused()
- This handler is called when playback is paused.
+ This signal is emitted when playback is paused.
+
+ The corresponding handler is \c onPaused.
*/
/*!
\qmlsignal QtMultimedia::MediaPlayer::stopped()
- This handler is called when playback is stopped.
+ This signal is emitted when playback is stopped.
+
+ The corresponding handler is \c onStopped.
*/
/*!
\qmlsignal QtMultimedia::MediaPlayer::playing()
- This handler is called when playback is started or resumed.
+ This signal is emitted when playback is started or resumed.
+
+ The corresponding handler is \c onPlaying.
*/
/*!
@@ -1324,7 +1342,7 @@ void QDeclarativeAudio::_q_statusChanged()
This property controls whether the media will begin to play on start up.
- Defaults to false, if set true the value of autoLoad will be overwritten to true.
+ Defaults to \c false. If set to \c true, the value of autoLoad will be overwritten to \c true.
*/
/*!
@@ -1440,9 +1458,11 @@ void QDeclarativeAudio::_q_statusChanged()
/*!
\qmlsignal QtMultimedia::MediaPlayer::error(error, errorString)
- This handler is called when an \l {QMediaPlayer::Error}{error} has
+ This signal is emitted when an \l {QMediaPlayer::Error}{error} has
occurred. The errorString parameter may contain more detailed
information about the error.
+
+ The corresponding handler is \c onError.
*/
/*!
diff --git a/src/imports/multimedia/qdeclarativecamera.cpp b/src/imports/multimedia/qdeclarativecamera.cpp
index 35feecfe2..db5ba81a0 100644
--- a/src/imports/multimedia/qdeclarativecamera.cpp
+++ b/src/imports/multimedia/qdeclarativecamera.cpp
@@ -227,7 +227,7 @@ QDeclarativeCamera::Error QDeclarativeCamera::errorCode() const
This property holds the last error string, if any.
- \sa QtMultimedia::Camera::onError
+ \sa error, errorCode
*/
QString QDeclarativeCamera::errorString() const
{
@@ -594,13 +594,13 @@ void QDeclarativeCamera::setDigitalZoom(qreal value)
This property holds the last error code.
- \sa QtMultimedia::Camera::onError
+ \sa error, errorString
*/
/*!
- \qmlsignal QtMultimedia::Camera::onError(errorCode, errorString)
+ \qmlsignal QtMultimedia::Camera::error(errorCode, errorString)
- This handler is called when an error occurs. The enumeration value
+ This signal is emitted when an error occurs. The enumeration value
\a errorCode is one of the values defined below, and a descriptive string
value is available in \a errorString.
@@ -612,6 +612,10 @@ void QDeclarativeCamera::setDigitalZoom(qreal value)
\row \li ServiceMissingError \li No camera service available.
\row \li NotSupportedFeatureError \li The feature is not supported.
\endtable
+
+ The corresponding handler is \c onError.
+
+ \sa errorCode, errorString
*/
/*!
@@ -619,6 +623,8 @@ void QDeclarativeCamera::setDigitalZoom(qreal value)
This signal is emitted when the lock status (focus, exposure etc) changes.
This can happen when locking (e.g. autofocusing) is complete or has failed.
+
+ The corresponding handler is \c onLockStatusChanged.
*/
/*!
@@ -627,34 +633,44 @@ void QDeclarativeCamera::setDigitalZoom(qreal value)
This signal is emitted when the camera state has changed to \a state. Since the
state changes may take some time to occur this signal may arrive sometime
after the state change has been requested.
+
+ The corresponding handler is \c onStateChanged.
*/
/*!
\qmlsignal Camera::opticalZoomChanged(zoom)
- The optical zoom setting has changed to \a zoom.
+ This signal is emitted when the optical zoom setting has changed to \a zoom.
+
+ The corresponding handler is \c onOpticalZoomChanged.
*/
/*!
\qmlsignal Camera::digitalZoomChanged(zoom)
- The digital zoom setting has changed to \a zoom.
+ This signal is emitted when the digital zoom setting has changed to \a zoom.
+
+ The corresponding handler is \c onDigitalZoomChanged.
*/
/*!
\qmlsignal Camera::maximumOpticalZoomChanged(zoom)
- The maximum optical zoom setting has changed to \a zoom. This
- can occur when you change between video and still image capture
- modes, or the capture settings are changed.
+ This signal is emitted when the maximum optical zoom setting has
+ changed to \a zoom. This can occur when you change between video
+ and still image capture modes, or the capture settings are changed.
+
+ The corresponding handler is \c onMaximumOpticalZoomChanged.
*/
/*!
\qmlsignal Camera::maximumDigitalZoomChanged(zoom)
- The maximum digital zoom setting has changed to \a zoom. This
- can occur when you change between video and still image capture
- modes, or the capture settings are changed.
+ This signal is emitted when the maximum digital zoom setting has
+ changed to \a zoom. This can occur when you change between video
+ and still image capture modes, or the capture settings are changed.
+
+ The corresponding handler is \c onMaximumDigitalZoomChanged.
*/
QT_END_NAMESPACE
diff --git a/src/imports/multimedia/qdeclarativecameracapture.cpp b/src/imports/multimedia/qdeclarativecameracapture.cpp
index 7e30867c8..6a198cdc4 100644
--- a/src/imports/multimedia/qdeclarativecameracapture.cpp
+++ b/src/imports/multimedia/qdeclarativecameracapture.cpp
@@ -156,7 +156,7 @@ bool QDeclarativeCameraCapture::isReadyForCapture() const
/*!
\qmlmethod QtMultimedia::CameraCapture::capture()
- Start image capture. The \l onImageCaptured() and \l onImageSaved() signals will
+ Start image capture. The \l imageCaptured and \l imageSaved signals will
be emitted when the capture is complete.
The image will be captured to the default system location, typically
@@ -179,7 +179,7 @@ int QDeclarativeCameraCapture::capture()
/*!
\qmlmethod QtMultimedia::CameraCapture::captureToLocation(location)
- Start image capture to specified \a location. The \l onImageCaptured() and \l onImageSaved() signals will
+ Start image capture to specified \a location. The \l imageCaptured and \l imageSaved signals will
be emitted when the capture is complete.
CameraCapture::captureToLocation returns the capture requestId parameter, used with
@@ -310,39 +310,47 @@ void QDeclarativeCameraCapture::setMetadata(const QString &key, const QVariant &
}
/*!
- \qmlsignal QtMultimedia::CameraCapture::onCaptureFailed(requestId, message)
+ \qmlsignal QtMultimedia::CameraCapture::captureFailed(requestId, message)
- This handler is called when an error occurs during capture with \a requestId.
+ This signal is emitted when an error occurs during capture with \a requestId.
A descriptive message is available in \a message.
+
+ The corresponding handler is \c onCaptureFailed.
*/
/*!
- \qmlsignal QtMultimedia::CameraCapture::onImageCaptured(requestId, preview)
+ \qmlsignal QtMultimedia::CameraCapture::imageCaptured(requestId, preview)
- This handler is called when an image with \a requestId has been captured
+ This signal is emitted when an image with \a requestId has been captured
but not yet saved to the filesystem. The \a preview
parameter can be used as the URL supplied to an \l Image.
- \sa onImageSaved
+ The corresponding handler is \c onImageCaptured.
+
+ \sa imageSaved
*/
/*!
- \qmlsignal QtMultimedia::CameraCapture::onImageSaved(requestId, path)
+ \qmlsignal QtMultimedia::CameraCapture::imageSaved(requestId, path)
- This handler is called after the image with \a requestId has been written to the filesystem.
+ This signal is emitted after the image with \a requestId has been written to the filesystem.
The \a path is a local file path, not a URL.
- \sa onImageCaptured
+ The corresponding handler is \c onImageSaved.
+
+ \sa imageCaptured
*/
/*!
- \qmlsignal QtMultimedia::CameraCapture::onImageMetadataAvailable(requestId, key, value)
+ \qmlsignal QtMultimedia::CameraCapture::imageMetadataAvailable(requestId, key, value)
- This handler is called when the image with \a requestId has new metadata
+ This signal is emitted when the image with \a requestId has new metadata
available with the key \a key and value \a value.
- \sa onImageCaptured
+ The corresponding handler is \c onImageMetadataAvailable.
+
+ \sa imageCaptured
*/
diff --git a/src/imports/multimedia/qdeclarativecameraexposure.cpp b/src/imports/multimedia/qdeclarativecameraexposure.cpp
index ea1b92f99..8c0ed86e4 100644
--- a/src/imports/multimedia/qdeclarativecameraexposure.cpp
+++ b/src/imports/multimedia/qdeclarativecameraexposure.cpp
@@ -353,12 +353,12 @@ void QDeclarativeCameraExposure::setAutoIsoSensitivity()
\endtable
*/
-QDeclarativeCamera::ExposureMode QDeclarativeCameraExposure::exposureMode() const
+QDeclarativeCameraExposure::ExposureMode QDeclarativeCameraExposure::exposureMode() const
{
- return QDeclarativeCamera::ExposureMode(m_exposure->exposureMode());
+ return QDeclarativeCameraExposure::ExposureMode(m_exposure->exposureMode());
}
-void QDeclarativeCameraExposure::setExposureMode(QDeclarativeCamera::ExposureMode mode)
+void QDeclarativeCameraExposure::setExposureMode(QDeclarativeCameraExposure::ExposureMode mode)
{
if (exposureMode() != mode) {
m_exposure->setExposureMode(QCameraExposure::ExposureMode(mode));
@@ -413,14 +413,14 @@ void QDeclarativeCameraExposure::setSpotMeteringPoint(const QPointF &point)
\row \li Camera.MeteringSpot \li A specific location (\l spotMeteringPoint) is used to measure exposure.
\endtable
*/
-QDeclarativeCamera::MeteringMode QDeclarativeCameraExposure::meteringMode() const
+QDeclarativeCameraExposure::MeteringMode QDeclarativeCameraExposure::meteringMode() const
{
- return QDeclarativeCamera::MeteringMode(m_exposure->meteringMode());
+ return QDeclarativeCameraExposure::MeteringMode(m_exposure->meteringMode());
}
-void QDeclarativeCameraExposure::setMeteringMode(QDeclarativeCamera::MeteringMode mode)
+void QDeclarativeCameraExposure::setMeteringMode(QDeclarativeCameraExposure::MeteringMode mode)
{
- QDeclarativeCamera::MeteringMode oldMode = meteringMode();
+ QDeclarativeCameraExposure::MeteringMode oldMode = meteringMode();
m_exposure->setMeteringMode(QCameraExposure::MeteringMode(mode));
if (oldMode != meteringMode())
emit meteringModeChanged(meteringMode());
diff --git a/src/imports/multimedia/qdeclarativecameraexposure_p.h b/src/imports/multimedia/qdeclarativecameraexposure_p.h
index 6908a965c..a0b3dea1e 100644
--- a/src/imports/multimedia/qdeclarativecameraexposure_p.h
+++ b/src/imports/multimedia/qdeclarativecameraexposure_p.h
@@ -74,15 +74,38 @@ class QDeclarativeCameraExposure : public QObject
Q_PROPERTY(qreal manualAperture READ manualAperture WRITE setManualAperture NOTIFY manualApertureChanged)
Q_PROPERTY(qreal manualIso READ manualIsoSensitivity WRITE setManualIsoSensitivity NOTIFY manualIsoSensitivityChanged)
- Q_PROPERTY(QDeclarativeCamera::ExposureMode exposureMode READ exposureMode WRITE setExposureMode NOTIFY exposureModeChanged)
+ Q_PROPERTY(ExposureMode exposureMode READ exposureMode WRITE setExposureMode NOTIFY exposureModeChanged)
Q_PROPERTY(QPointF spotMeteringPoint READ spotMeteringPoint WRITE setSpotMeteringPoint NOTIFY spotMeteringPointChanged)
- Q_PROPERTY(QDeclarativeCamera::MeteringMode meteringMode READ meteringMode WRITE setMeteringMode NOTIFY meteringModeChanged)
+ Q_PROPERTY(MeteringMode meteringMode READ meteringMode WRITE setMeteringMode NOTIFY meteringModeChanged)
+ Q_ENUMS(ExposureMode)
+ Q_ENUMS(MeteringMode)
public:
+ enum ExposureMode {
+ ExposureAuto = QCameraExposure::ExposureAuto,
+ ExposureManual = QCameraExposure::ExposureManual,
+ ExposurePortrait = QCameraExposure::ExposurePortrait,
+ ExposureNight = QCameraExposure::ExposureNight,
+ ExposureBacklight = QCameraExposure::ExposureBacklight,
+ ExposureSpotlight = QCameraExposure::ExposureSpotlight,
+ ExposureSports = QCameraExposure::ExposureSports,
+ ExposureSnow = QCameraExposure::ExposureSnow,
+ ExposureBeach = QCameraExposure::ExposureBeach,
+ ExposureLargeAperture = QCameraExposure::ExposureLargeAperture,
+ ExposureSmallAperture = QCameraExposure::ExposureSmallAperture,
+ ExposureModeVendor = QCameraExposure::ExposureModeVendor
+ };
+
+ enum MeteringMode {
+ MeteringMatrix = QCameraExposure::MeteringMatrix,
+ MeteringAverage = QCameraExposure::MeteringAverage,
+ MeteringSpot = QCameraExposure::MeteringSpot
+ };
+
~QDeclarativeCameraExposure();
- QDeclarativeCamera::ExposureMode exposureMode() const;
+ ExposureMode exposureMode() const;
qreal exposureCompensation() const;
int isoSensitivity() const;
@@ -96,11 +119,11 @@ public:
QPointF spotMeteringPoint() const;
void setSpotMeteringPoint(const QPointF &point);
- QDeclarativeCamera::MeteringMode meteringMode() const;
- void setMeteringMode(QDeclarativeCamera::MeteringMode mode);
+ MeteringMode meteringMode() const;
+ void setMeteringMode(MeteringMode mode);
public Q_SLOTS:
- void setExposureMode(QDeclarativeCamera::ExposureMode);
+ void setExposureMode(ExposureMode);
void setExposureCompensation(qreal ev);
void setManualAperture(qreal);
@@ -121,9 +144,9 @@ Q_SIGNALS:
void manualShutterSpeedChanged(qreal);
void exposureCompensationChanged(qreal);
- void exposureModeChanged(QDeclarativeCamera::ExposureMode);
+ void exposureModeChanged(ExposureMode);
- void meteringModeChanged(QDeclarativeCamera::MeteringMode);
+ void meteringModeChanged(MeteringMode);
void spotMeteringPointChanged(QPointF);
private:
diff --git a/src/imports/multimedia/qdeclarativecameraflash.cpp b/src/imports/multimedia/qdeclarativecameraflash.cpp
index 726465d1f..a7b31767e 100644
--- a/src/imports/multimedia/qdeclarativecameraflash.cpp
+++ b/src/imports/multimedia/qdeclarativecameraflash.cpp
@@ -131,14 +131,14 @@ bool QDeclarativeCameraFlash::isFlashReady() const
\endtable
*/
-int QDeclarativeCameraFlash::flashMode() const
+QDeclarativeCameraFlash::FlashMode QDeclarativeCameraFlash::flashMode() const
{
- return m_exposure->flashMode();
+ return QDeclarativeCameraFlash::FlashMode(int(m_exposure->flashMode()));
}
-void QDeclarativeCameraFlash::setFlashMode(int mode)
+void QDeclarativeCameraFlash::setFlashMode(QDeclarativeCameraFlash::FlashMode mode)
{
- if (m_exposure->flashMode() != mode) {
+ if (flashMode() != mode) {
m_exposure->setFlashMode(QCameraExposure::FlashModes(mode));
emit flashModeChanged(mode);
}
@@ -154,7 +154,7 @@ void QDeclarativeCameraFlash::setFlashMode(int mode)
\qmlsignal QtMultimedia::CameraFlash::flashReady(bool)
This signal is emitted when QCameraExposure indicates that
the flash is ready to use.
- The corresponsing handler is \c onFlashReadyChanged.
+ The corresponding handler is \c onFlashReadyChanged.
*/
QT_END_NAMESPACE
diff --git a/src/imports/multimedia/qdeclarativecameraflash_p.h b/src/imports/multimedia/qdeclarativecameraflash_p.h
index e3919a41c..7ff2a637a 100644
--- a/src/imports/multimedia/qdeclarativecameraflash_p.h
+++ b/src/imports/multimedia/qdeclarativecameraflash_p.h
@@ -64,19 +64,34 @@ class QDeclarativeCameraFlash : public QObject
{
Q_OBJECT
Q_PROPERTY(bool ready READ isFlashReady NOTIFY flashReady)
- Q_PROPERTY(int mode READ flashMode WRITE setFlashMode NOTIFY flashModeChanged)
+ Q_PROPERTY(FlashMode mode READ flashMode WRITE setFlashMode NOTIFY flashModeChanged)
+
+ Q_ENUMS(FlashMode)
public:
+ enum FlashMode {
+ FlashAuto = QCameraExposure::FlashAuto,
+ FlashOff = QCameraExposure::FlashOff,
+ FlashOn = QCameraExposure::FlashOn,
+ FlashRedEyeReduction = QCameraExposure::FlashRedEyeReduction,
+ FlashFill = QCameraExposure::FlashFill,
+ FlashTorch = QCameraExposure::FlashTorch,
+ FlashVideoLight = QCameraExposure::FlashVideoLight,
+ FlashSlowSyncFrontCurtain = QCameraExposure::FlashSlowSyncFrontCurtain,
+ FlashSlowSyncRearCurtain = QCameraExposure::FlashSlowSyncRearCurtain,
+ FlashManual = QCameraExposure::FlashManual
+ };
+
~QDeclarativeCameraFlash();
- int flashMode() const;
+ FlashMode flashMode() const;
bool isFlashReady() const;
public Q_SLOTS:
- void setFlashMode(int);
+ void setFlashMode(FlashMode);
Q_SIGNALS:
void flashReady(bool status);
- void flashModeChanged(int);
+ void flashModeChanged(FlashMode);
private:
friend class QDeclarativeCamera;
diff --git a/src/imports/multimedia/qdeclarativecamerafocus.cpp b/src/imports/multimedia/qdeclarativecamerafocus.cpp
index 7f0133b6a..b474599b5 100644
--- a/src/imports/multimedia/qdeclarativecamerafocus.cpp
+++ b/src/imports/multimedia/qdeclarativecamerafocus.cpp
@@ -161,9 +161,9 @@ QDeclarativeCameraFocus::~QDeclarativeCameraFocus()
and \l focusZones property provide information and control
over how automatic focusing is performed.
*/
-QDeclarativeCamera::FocusMode QDeclarativeCameraFocus::focusMode() const
+QDeclarativeCameraFocus::FocusMode QDeclarativeCameraFocus::focusMode() const
{
- return QDeclarativeCamera::FocusMode(int(m_focus->focusMode()));
+ return QDeclarativeCameraFocus::FocusMode(int(m_focus->focusMode()));
}
/*!
@@ -172,12 +172,12 @@ QDeclarativeCamera::FocusMode QDeclarativeCameraFocus::focusMode() const
Returns true if the supplied \a mode is a supported focus mode, and
false otherwise.
*/
-bool QDeclarativeCameraFocus::isFocusModeSupported(QDeclarativeCamera::FocusMode mode) const
+bool QDeclarativeCameraFocus::isFocusModeSupported(QDeclarativeCameraFocus::FocusMode mode) const
{
return m_focus->isFocusModeSupported(QCameraFocus::FocusModes(int(mode)));
}
-void QDeclarativeCameraFocus::setFocusMode(QDeclarativeCamera::FocusMode mode)
+void QDeclarativeCameraFocus::setFocusMode(QDeclarativeCameraFocus::FocusMode mode)
{
if (mode != focusMode()) {
m_focus->setFocusMode(QCameraFocus::FocusModes(int(mode)));
@@ -221,12 +221,12 @@ void QDeclarativeCameraFocus::setFocusMode(QDeclarativeCamera::FocusMode mode)
\li Focus to the custom point, defined by the customFocusPoint property.
\endtable
*/
-QDeclarativeCamera::FocusPointMode QDeclarativeCameraFocus::focusPointMode() const
+QDeclarativeCameraFocus::FocusPointMode QDeclarativeCameraFocus::focusPointMode() const
{
- return QDeclarativeCamera::FocusPointMode(m_focus->focusPointMode());
+ return QDeclarativeCameraFocus::FocusPointMode(m_focus->focusPointMode());
}
-void QDeclarativeCameraFocus::setFocusPointMode(QDeclarativeCamera::FocusPointMode mode)
+void QDeclarativeCameraFocus::setFocusPointMode(QDeclarativeCameraFocus::FocusPointMode mode)
{
if (mode != focusPointMode()) {
m_focus->setFocusPointMode(QCameraFocus::FocusPointMode(mode));
@@ -240,7 +240,7 @@ void QDeclarativeCameraFocus::setFocusPointMode(QDeclarativeCamera::FocusPointMo
Returns true if the supplied \a mode is a supported focus point mode, and
false otherwise.
*/
-bool QDeclarativeCameraFocus::isFocusPointModeSupported(QDeclarativeCamera::FocusPointMode mode) const
+bool QDeclarativeCameraFocus::isFocusPointModeSupported(QDeclarativeCameraFocus::FocusPointMode mode) const
{
return m_focus->isFocusPointModeSupported(QCameraFocus::FocusPointMode(mode));
}
diff --git a/src/imports/multimedia/qdeclarativecamerafocus_p.h b/src/imports/multimedia/qdeclarativecamerafocus_p.h
index 3287b9309..f9b79879b 100644
--- a/src/imports/multimedia/qdeclarativecamerafocus_p.h
+++ b/src/imports/multimedia/qdeclarativecamerafocus_p.h
@@ -66,30 +66,49 @@ class QDeclarativeCamera;
class QDeclarativeCameraFocus : public QObject
{
Q_OBJECT
- Q_PROPERTY(QDeclarativeCamera::FocusMode focusMode READ focusMode WRITE setFocusMode NOTIFY focusModeChanged)
- Q_PROPERTY(QDeclarativeCamera::FocusPointMode focusPointMode READ focusPointMode WRITE setFocusPointMode NOTIFY focusPointModeChanged)
+ Q_PROPERTY(FocusMode focusMode READ focusMode WRITE setFocusMode NOTIFY focusModeChanged)
+ Q_PROPERTY(FocusPointMode focusPointMode READ focusPointMode WRITE setFocusPointMode NOTIFY focusPointModeChanged)
Q_PROPERTY(QPointF customFocusPoint READ customFocusPoint WRITE setCustomFocusPoint NOTIFY customFocusPointChanged)
Q_PROPERTY(QObject *focusZones READ focusZones CONSTANT)
+
+ Q_ENUMS(FocusMode)
+ Q_ENUMS(FocusPointMode)
public:
+ enum FocusMode {
+ FocusManual = QCameraFocus::ManualFocus,
+ FocusHyperfocal = QCameraFocus::HyperfocalFocus,
+ FocusInfinity = QCameraFocus::InfinityFocus,
+ FocusAuto = QCameraFocus::AutoFocus,
+ FocusContinuous = QCameraFocus::ContinuousFocus,
+ FocusMacro = QCameraFocus::MacroFocus
+ };
+
+ enum FocusPointMode {
+ FocusPointAuto = QCameraFocus::FocusPointAuto,
+ FocusPointCenter = QCameraFocus::FocusPointCenter,
+ FocusPointFaceDetection = QCameraFocus::FocusPointFaceDetection,
+ FocusPointCustom = QCameraFocus::FocusPointCustom
+ };
+
~QDeclarativeCameraFocus();
- QDeclarativeCamera::FocusMode focusMode() const;
- QDeclarativeCamera::FocusPointMode focusPointMode() const;
+ FocusMode focusMode() const;
+ FocusPointMode focusPointMode() const;
QPointF customFocusPoint() const;
QAbstractListModel *focusZones() const;
- Q_INVOKABLE bool isFocusModeSupported(QDeclarativeCamera::FocusMode mode) const;
- Q_INVOKABLE bool isFocusPointModeSupported(QDeclarativeCamera::FocusPointMode mode) const;
+ Q_INVOKABLE bool isFocusModeSupported(FocusMode mode) const;
+ Q_INVOKABLE bool isFocusPointModeSupported(FocusPointMode mode) const;
public Q_SLOTS:
- void setFocusMode(QDeclarativeCamera::FocusMode);
- void setFocusPointMode(QDeclarativeCamera::FocusPointMode mode);
+ void setFocusMode(FocusMode);
+ void setFocusPointMode(FocusPointMode mode);
void setCustomFocusPoint(const QPointF &point);
Q_SIGNALS:
- void focusModeChanged(QDeclarativeCamera::FocusMode);
- void focusPointModeChanged(QDeclarativeCamera::FocusPointMode);
+ void focusModeChanged(FocusMode);
+ void focusPointModeChanged(FocusPointMode);
void customFocusPointChanged(const QPointF &);
private Q_SLOTS:
diff --git a/src/imports/multimedia/qdeclarativecameraimageprocessing.cpp b/src/imports/multimedia/qdeclarativecameraimageprocessing.cpp
index 5860e028d..88c73e57c 100644
--- a/src/imports/multimedia/qdeclarativecameraimageprocessing.cpp
+++ b/src/imports/multimedia/qdeclarativecameraimageprocessing.cpp
@@ -230,11 +230,15 @@ void QDeclarativeCameraImageProcessing::setDenoisingLevel(qreal value)
/*!
\qmlsignal QtMultimedia::Camera::whiteBalanceModeChanged(Camera::WhiteBalanceMode)
This signal is emitted when the \c whiteBalanceMode property is changed.
+
+ The corresponding handler is \c onWhiteBalanceModeChanged.
*/
/*!
\qmlsignal QtMultimedia::Camera::manualWhiteBalanceChanged(qreal)
This signal is emitted when the \c manualWhiteBalance property is changed.
+
+ The corresponding handler is \c onManualWhiteBalanceChanged.
*/
QT_END_NAMESPACE
diff --git a/src/imports/multimedia/qdeclarativeradio.cpp b/src/imports/multimedia/qdeclarativeradio.cpp
index b3a7b3f6e..00731b451 100644
--- a/src/imports/multimedia/qdeclarativeradio.cpp
+++ b/src/imports/multimedia/qdeclarativeradio.cpp
@@ -540,6 +540,8 @@ void QDeclarativeRadio::_q_availabilityChanged(QMultimedia::AvailabilityStatus a
The \a frequency is returned in Hertz, and the \a stationId corresponds to the station Id
in the \l RadioData for this radio station.
+
+ The corresponding handler is \c onStationFound.
*/
QT_END_NAMESPACE
diff --git a/src/multimedia/camera/qcamerainfo.cpp b/src/multimedia/camera/qcamerainfo.cpp
index 218219c7e..230b71222 100644
--- a/src/multimedia/camera/qcamerainfo.cpp
+++ b/src/multimedia/camera/qcamerainfo.cpp
@@ -104,7 +104,7 @@ QCameraInfo::QCameraInfo(const QCamera &camera)
: d(new QCameraInfoPrivate)
{
const QVideoDeviceSelectorControl *deviceControl = camera.d_func()->deviceControl;
- if (deviceControl) {
+ if (deviceControl && deviceControl->deviceCount() > 0) {
const int selectedDevice = deviceControl->selectedDevice();
d->deviceName = deviceControl->deviceName(selectedDevice);
d->description = deviceControl->deviceDescription(selectedDevice);
diff --git a/src/multimedia/gsttools_headers/qgstutils_p.h b/src/multimedia/gsttools_headers/qgstutils_p.h
index 568580264..eea1e15d9 100644
--- a/src/multimedia/gsttools_headers/qgstutils_p.h
+++ b/src/multimedia/gsttools_headers/qgstutils_p.h
@@ -78,6 +78,8 @@ namespace QGstUtils {
const QSet<QString> &supportedMimeTypeSet);
}
+void qt_gst_object_ref_sink(gpointer object);
+
QT_END_NAMESPACE
#endif
diff --git a/src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtAndroidMediaPlayer.java b/src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtAndroidMediaPlayer.java
index cd79c979c..86ec30a5f 100644
--- a/src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtAndroidMediaPlayer.java
+++ b/src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtAndroidMediaPlayer.java
@@ -53,59 +53,56 @@ import android.util.Log;
import java.io.FileDescriptor;
import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager;
+import android.view.SurfaceHolder;
-public class QtAndroidMediaPlayer extends MediaPlayer
+public class QtAndroidMediaPlayer
{
// Native callback functions for MediaPlayer
native public void onErrorNative(int what, int extra, long id);
native public void onBufferingUpdateNative(int percent, long id);
+ native public void onProgressUpdateNative(int progress, long id);
+ native public void onDurationChangedNative(int duration, long id);
native public void onInfoNative(int what, int extra, long id);
- native public void onMediaPlayerInfoNative(int what, int extra, long id);
native public void onVideoSizeChangedNative(int width, int height, long id);
+ native public void onStateChangedNative(int state, long id);
+ private MediaPlayer mMediaPlayer = null;
private Uri mUri = null;
private final long mID;
+ private final Activity mActivity;
private boolean mMuted = false;
- private boolean mPreparing = false;
- private boolean mInitialized = false;
private int mVolume = 100;
private static final String TAG = "Qt MediaPlayer";
- private static Context mApplicationContext = null;
-
- final int MEDIA_PLAYER_INVALID_STATE = 1;
- final int MEDIA_PLAYER_PREPARING = 2;
- final int MEDIA_PLAYER_READY = 3;
- final int MEDIA_PLAYER_DURATION = 4;
- final int MEDIA_PLAYER_PROGRESS = 5;
- final int MEDIA_PLAYER_FINISHED = 6;
-
- // Activity set by Qt on load.
- static public void setActivity(final Activity activity)
- {
- try {
- mApplicationContext = activity.getApplicationContext();
- } catch(final Exception e) {
- Log.d(TAG, "" + e.getMessage());
- }
+ private SurfaceHolder mSurfaceHolder = null;
+
+ private class State {
+ final static int Uninitialized = 0x1 /* End */;
+ final static int Idle = 0x2;
+ final static int Preparing = 0x4;
+ final static int Prepared = 0x8;
+ final static int Initialized = 0x10;
+ final static int Started = 0x20;
+ final static int Stopped = 0x40;
+ final static int Paused = 0x80;
+ final static int PlaybackCompleted = 0x100;
+ final static int Error = 0x200;
}
- private class ProgressWatcher implements Runnable
+ private volatile int mState = State.Uninitialized;
+
+ private class ProgressWatcher
+ implements Runnable
{
@Override
public void run()
{
- final int duratation = getDuration();
- int currentPosition = getCurrentPosition();
-
try {
- while (duratation >= currentPosition && isPlaying()) {
- onMediaPlayerInfoNative(MEDIA_PLAYER_PROGRESS, currentPosition, mID);
+ while ((mState & (State.Started)) != 0) {
+ onProgressUpdateNative(getCurrentPosition(), mID);
Thread.sleep(1000);
- currentPosition = getCurrentPosition();
}
} catch (final InterruptedException e) {
- Log.d(TAG, "" + e.getMessage());
- return;
+ // Ignore
}
}
}
@@ -121,7 +118,7 @@ public class QtAndroidMediaPlayer extends MediaPlayer
final int what,
final int extra)
{
- reset();
+ setState(State.Error);
onErrorNative(what, extra, mID);
return true;
}
@@ -158,7 +155,7 @@ public class QtAndroidMediaPlayer extends MediaPlayer
@Override
public void onCompletion(final MediaPlayer mp)
{
- onMediaPlayerInfoNative(MEDIA_PLAYER_FINISHED, 0, mID);
+ setState(State.PlaybackCompleted);
}
}
@@ -190,9 +187,8 @@ public class QtAndroidMediaPlayer extends MediaPlayer
@Override
public void onPrepared(final MediaPlayer mp)
{
- mPreparing = false;
- onMediaPlayerInfoNative(MEDIA_PLAYER_READY, 0, mID);
- onMediaPlayerInfoNative(MEDIA_PLAYER_DURATION, getDuration(), mID);
+ setState(State.Prepared);
+ onDurationChangedNative(getDuration(), mID);
}
}
@@ -207,7 +203,7 @@ public class QtAndroidMediaPlayer extends MediaPlayer
@Override
public void onSeekComplete(final MediaPlayer mp)
{
- onMediaPlayerInfoNative(MEDIA_PLAYER_PROGRESS, getCurrentPosition(), mID);
+ onProgressUpdateNative(getCurrentPosition(), mID);
}
}
@@ -229,98 +225,117 @@ public class QtAndroidMediaPlayer extends MediaPlayer
}
- public QtAndroidMediaPlayer(final long id)
+ public QtAndroidMediaPlayer(final Activity activity, final long id)
{
- super();
mID = id;
- setOnBufferingUpdateListener(new MediaPlayerBufferingListener());
- setOnCompletionListener(new MediaPlayerCompletionListener());
- setOnInfoListener(new MediaPlayerInfoListener());
- setOnSeekCompleteListener(new MediaPlayerSeekCompleteListener());
- setOnVideoSizeChangedListener(new MediaPlayerVideoSizeChangedListener());
- setOnErrorListener(new MediaPlayerErrorListener());
+ mActivity = activity;
}
- @Override
- public void start()
+ private void setState(int state)
{
- if (!mInitialized) {
- onMediaPlayerInfoNative(MEDIA_PLAYER_INVALID_STATE, 0, mID);
+ if (mState == state)
return;
- }
- if (mApplicationContext == null)
- return;
+ mState = state;
- if (mPreparing)
- return;
+ onStateChangedNative(mState, mID);
+ }
+
+
+ private void init()
+ {
+ if (mMediaPlayer == null) {
+ mMediaPlayer = new MediaPlayer();
+ setState(State.Idle);
+ }
+ }
- if (isPlaying())
+ public void start()
+ {
+ if ((mState & (State.Prepared
+ | State.Started
+ | State.Paused
+ | State.PlaybackCompleted)) == 0) {
return;
+ }
try {
- super.start();
+ mMediaPlayer.start();
+ setState(State.Started);
Thread progressThread = new Thread(new ProgressWatcher());
progressThread.start();
} catch (final IllegalStateException e) {
- reset();
Log.d(TAG, "" + e.getMessage());
}
}
- @Override
+
public void pause()
{
- if (!isPlaying())
+ if ((mState & (State.Started | State.Paused | State.PlaybackCompleted)) == 0)
return;
try {
- super.pause();
+ mMediaPlayer.pause();
+ setState(State.Paused);
} catch (final IllegalStateException e) {
- reset();
Log.d(TAG, "" + e.getMessage());
}
}
- @Override
+
public void stop()
{
- if (!mInitialized)
+ if ((mState & (State.Prepared
+ | State.Started
+ | State.Stopped
+ | State.Paused
+ | State.PlaybackCompleted)) == 0) {
return;
+ }
try {
- super.stop();
+ mMediaPlayer.stop();
+ setState(State.Stopped);
} catch (final IllegalStateException e) {
Log.d(TAG, "" + e.getMessage());
- } finally {
- reset();
}
}
- @Override
+
public void seekTo(final int msec)
{
- if (!mInitialized)
+ if ((mState & (State.Prepared
+ | State.Started
+ | State.Paused
+ | State.PlaybackCompleted)) == 0) {
return;
+ }
try {
- super.seekTo(msec);
- onMediaPlayerInfoNative(MEDIA_PLAYER_PROGRESS, msec, mID);
+ mMediaPlayer.seekTo(msec);
+ onProgressUpdateNative(msec, mID);
} catch (final IllegalStateException e) {
Log.d(TAG, "" + e.getMessage());
}
}
- @Override
+
public boolean isPlaying()
{
boolean playing = false;
-
- if (!mInitialized)
+ if ((mState & (State.Idle
+ | State.Initialized
+ | State.Prepared
+ | State.Started
+ | State.Paused
+ | State.Stopped
+ | State.PlaybackCompleted)) == 0) {
return playing;
+ }
try {
- playing = super.isPlaying();
+ playing = mMediaPlayer.isPlaying();
} catch (final IllegalStateException e) {
Log.d(TAG, "" + e.getMessage());
}
@@ -328,34 +343,56 @@ public class QtAndroidMediaPlayer extends MediaPlayer
return playing;
}
- public void setMediaPath(final String path)
+ public void prepareAsync()
+ {
+ if ((mState & (State.Initialized | State.Stopped)) == 0)
+ return;
+
+ try {
+ mMediaPlayer.prepareAsync();
+ setState(State.Preparing);
+ } catch (final IllegalStateException e) {
+ Log.d(TAG, "" + e.getMessage());
+ }
+ }
+
+ public void setDataSource(final String path)
{
- if (mInitialized)
- reset();
+ if ((mState & State.Uninitialized) != 0)
+ init();
+
+ if ((mState & State.Idle) == 0)
+ return;
+
+ mMediaPlayer.setOnBufferingUpdateListener(new MediaPlayerBufferingListener());
+ mMediaPlayer.setOnCompletionListener(new MediaPlayerCompletionListener());
+ mMediaPlayer.setOnInfoListener(new MediaPlayerInfoListener());
+ mMediaPlayer.setOnSeekCompleteListener(new MediaPlayerSeekCompleteListener());
+ mMediaPlayer.setOnVideoSizeChangedListener(new MediaPlayerVideoSizeChangedListener());
+ mMediaPlayer.setOnErrorListener(new MediaPlayerErrorListener());
+ mMediaPlayer.setOnPreparedListener(new MediaPlayerPreparedListener());
+ if (mSurfaceHolder != null)
+ mMediaPlayer.setDisplay(mSurfaceHolder);
+
+ AssetFileDescriptor afd = null;
try {
- mPreparing = true;
- onMediaPlayerInfoNative(MEDIA_PLAYER_PREPARING, 0, mID);
mUri = Uri.parse(path);
- if (mUri.getScheme().compareTo("assets") == 0) {
+ final boolean inAssets = (mUri.getScheme().compareTo("assets") == 0);
+ if (inAssets) {
final String asset = mUri.getPath().substring(1 /* Remove first '/' */);
- final AssetManager am = mApplicationContext.getAssets();
- final AssetFileDescriptor afd = am.openFd(asset);
+ final AssetManager am = mActivity.getAssets();
+ afd = am.openFd(asset);
final long offset = afd.getStartOffset();
final long length = afd.getLength();
FileDescriptor fd = afd.getFileDescriptor();
- setDataSource(fd, offset, length);
+ mMediaPlayer.setDataSource(fd, offset, length);
} else {
- setDataSource(mApplicationContext, mUri);
+ mMediaPlayer.setDataSource(mActivity, mUri);
}
- mInitialized = true;
- setOnPreparedListener(new MediaPlayerPreparedListener());
- prepareAsync();
+ setState(State.Initialized);
} catch (final IOException e) {
- mPreparing = false;
- onErrorNative(MEDIA_ERROR_UNKNOWN,
- /* MEDIA_ERROR_UNSUPPORTED= */ -1010,
- mID);
+ Log.d(TAG, "" + e.getMessage());
} catch (final IllegalArgumentException e) {
Log.d(TAG, "" + e.getMessage());
} catch (final SecurityException e) {
@@ -364,19 +401,36 @@ public class QtAndroidMediaPlayer extends MediaPlayer
Log.d(TAG, "" + e.getMessage());
} catch (final NullPointerException e) {
Log.d(TAG, "" + e.getMessage());
+ } finally {
+ if (afd !=null) {
+ try { afd.close(); } catch (final IOException ioe) { /* Ignore... */ }
+ }
+ if ((mState & State.Initialized) == 0) {
+ setState(State.Error);
+ onErrorNative(MediaPlayer.MEDIA_ERROR_UNKNOWN,
+ -1004 /*MEDIA_ERROR_IO*/,
+ mID);
+ return;
+ }
}
}
- @Override
+
public int getCurrentPosition()
{
int currentPosition = 0;
-
- if (!mInitialized)
+ if ((mState & (State.Idle
+ | State.Initialized
+ | State.Prepared
+ | State.Started
+ | State.Paused
+ | State.Stopped
+ | State.PlaybackCompleted)) == 0) {
return currentPosition;
+ }
try {
- currentPosition = super.getCurrentPosition();
+ currentPosition = mMediaPlayer.getCurrentPosition();
} catch (final IllegalStateException e) {
Log.d(TAG, "" + e.getMessage());
}
@@ -384,16 +438,20 @@ public class QtAndroidMediaPlayer extends MediaPlayer
return currentPosition;
}
- @Override
+
public int getDuration()
{
int duration = 0;
-
- if (!mInitialized)
+ if ((mState & (State.Prepared
+ | State.Started
+ | State.Paused
+ | State.Stopped
+ | State.PlaybackCompleted)) == 0) {
return duration;
+ }
try {
- duration = super.getDuration();
+ duration = mMediaPlayer.getDuration();
} catch (final IllegalStateException e) {
Log.d(TAG, "" + e.getMessage());
}
@@ -414,6 +472,16 @@ public class QtAndroidMediaPlayer extends MediaPlayer
public void setVolume(int volume)
{
+ if ((mState & (State.Idle
+ | State.Initialized
+ | State.Stopped
+ | State.Prepared
+ | State.Started
+ | State.Paused
+ | State.PlaybackCompleted)) == 0) {
+ return;
+ }
+
if (volume < 0)
volume = 0;
@@ -423,7 +491,7 @@ public class QtAndroidMediaPlayer extends MediaPlayer
float newVolume = adjustVolume(volume);
try {
- super.setVolume(newVolume, newVolume);
+ mMediaPlayer.setVolume(newVolume, newVolume);
if (!mMuted)
mVolume = volume;
} catch (final IllegalStateException e) {
@@ -431,6 +499,22 @@ public class QtAndroidMediaPlayer extends MediaPlayer
}
}
+ public SurfaceHolder display()
+ {
+ return mSurfaceHolder;
+ }
+
+ public void setDisplay(SurfaceHolder sh)
+ {
+ mSurfaceHolder = sh;
+
+ if ((mState & State.Uninitialized) != 0)
+ return;
+
+ mMediaPlayer.setDisplay(mSurfaceHolder);
+ }
+
+
public int getVolume()
{
return mVolume;
@@ -447,11 +531,32 @@ public class QtAndroidMediaPlayer extends MediaPlayer
return mMuted;
}
- @Override
+
public void reset()
{
- mInitialized = false;
- super.reset();
+ if ((mState & (State.Idle
+ | State.Initialized
+ | State.Prepared
+ | State.Started
+ | State.Paused
+ | State.Stopped
+ | State.PlaybackCompleted
+ | State.Error)) == 0) {
+ return;
+ }
+
+ mMediaPlayer.reset();
+ setState(State.Idle);
}
+ public void release()
+ {
+ if (mMediaPlayer != null) {
+ mMediaPlayer.reset();
+ mMediaPlayer.release();
+ mMediaPlayer = null;
+ }
+
+ setState(State.Uninitialized);
+ }
}
diff --git a/src/plugins/android/src/common/qandroidvideooutput.h b/src/plugins/android/src/common/qandroidvideooutput.h
index 6e4a32e3f..8bf6be6cb 100644
--- a/src/plugins/android/src/common/qandroidvideooutput.h
+++ b/src/plugins/android/src/common/qandroidvideooutput.h
@@ -60,6 +60,7 @@ public:
virtual void setVideoSize(const QSize &) { }
virtual void stop() { }
+ virtual void reset() { }
// signals:
// void readyChanged(bool);
diff --git a/src/plugins/android/src/common/qandroidvideorendercontrol.cpp b/src/plugins/android/src/common/qandroidvideorendercontrol.cpp
index 55f71d735..b737e8a42 100644
--- a/src/plugins/android/src/common/qandroidvideorendercontrol.cpp
+++ b/src/plugins/android/src/common/qandroidvideorendercontrol.cpp
@@ -122,20 +122,8 @@ QAndroidVideoRendererControl::QAndroidVideoRendererControl(QObject *parent)
QAndroidVideoRendererControl::~QAndroidVideoRendererControl()
{
- if (m_surfaceTexture) {
- m_surfaceTexture->callMethod<void>("release");
- delete m_surfaceTexture;
- m_surfaceTexture = 0;
- }
- if (m_androidSurface) {
- m_androidSurface->callMethod<void>("release");
- delete m_androidSurface;
- m_androidSurface = 0;
- }
- if (m_surfaceHolder) {
- delete m_surfaceHolder;
- m_surfaceHolder = 0;
- }
+ clearSurfaceTexture();
+
if (m_glDeleter)
m_glDeleter->deleteLater();
}
@@ -202,6 +190,24 @@ bool QAndroidVideoRendererControl::initSurfaceTexture()
return m_surfaceTexture != 0;
}
+void QAndroidVideoRendererControl::clearSurfaceTexture()
+{
+ if (m_surfaceTexture) {
+ m_surfaceTexture->callMethod<void>("release");
+ delete m_surfaceTexture;
+ m_surfaceTexture = 0;
+ }
+ if (m_androidSurface) {
+ m_androidSurface->callMethod<void>("release");
+ delete m_androidSurface;
+ m_androidSurface = 0;
+ }
+ if (m_surfaceHolder) {
+ delete m_surfaceHolder;
+ m_surfaceHolder = 0;
+ }
+}
+
jobject QAndroidVideoRendererControl::surfaceHolder()
{
if (!initSurfaceTexture())
@@ -245,6 +251,11 @@ void QAndroidVideoRendererControl::stop()
m_nativeSize = QSize();
}
+void QAndroidVideoRendererControl::reset()
+{
+ clearSurfaceTexture();
+}
+
void QAndroidVideoRendererControl::onFrameAvailable()
{
if (!m_nativeSize.isValid() || !m_surface)
diff --git a/src/plugins/android/src/common/qandroidvideorendercontrol.h b/src/plugins/android/src/common/qandroidvideorendercontrol.h
index 6ce1e2dd4..56407d5de 100644
--- a/src/plugins/android/src/common/qandroidvideorendercontrol.h
+++ b/src/plugins/android/src/common/qandroidvideorendercontrol.h
@@ -92,6 +92,7 @@ public:
bool isReady() Q_DECL_OVERRIDE;
void setVideoSize(const QSize &size) Q_DECL_OVERRIDE;
void stop() Q_DECL_OVERRIDE;
+ void reset() Q_DECL_OVERRIDE;
void customEvent(QEvent *) Q_DECL_OVERRIDE;
@@ -107,6 +108,7 @@ private:
void createGLResources();
QMutex m_mutex;
+ void clearSurfaceTexture();
QAbstractVideoSurface *m_surface;
QSize m_nativeSize;
diff --git a/src/plugins/android/src/mediacapture/qandroidcapturesession.cpp b/src/plugins/android/src/mediacapture/qandroidcapturesession.cpp
index f40ec2498..008ebc7d5 100644
--- a/src/plugins/android/src/mediacapture/qandroidcapturesession.cpp
+++ b/src/plugins/android/src/mediacapture/qandroidcapturesession.cpp
@@ -65,12 +65,16 @@ QAndroidCaptureSession::QAndroidCaptureSession(QAndroidCameraSession *cameraSess
, m_audioEncoder(JMediaRecorder::DefaultAudioEncoder)
, m_videoEncoder(JMediaRecorder::DefaultVideoEncoder)
{
+ connect(this, SIGNAL(stateChanged(QMediaRecorder::State)), this, SLOT(updateStatus()));
+
if (cameraSession) {
connect(cameraSession, SIGNAL(opened()), this, SLOT(onCameraOpened()));
- connect(cameraSession, SIGNAL(statusChanged(QCamera::Status)),
- this, SLOT(onCameraStatusChanged(QCamera::Status)));
+ connect(cameraSession, SIGNAL(statusChanged(QCamera::Status)), this, SLOT(updateStatus()));
connect(cameraSession, SIGNAL(captureModeChanged(QCamera::CaptureModes)),
- this, SLOT(onCameraCaptureModeChanged(QCamera::CaptureModes)));
+ this, SLOT(updateStatus()));
+ connect(cameraSession, SIGNAL(readyForCaptureChanged(bool)), this, SLOT(updateStatus()));
+ } else {
+ updateStatus();
}
m_notifyTimer.setInterval(1000);
@@ -126,7 +130,6 @@ bool QAndroidCaptureSession::setOutputLocation(const QUrl &location)
if (m_requestedOutputLocation.isValid()
&& (m_requestedOutputLocation.isLocalFile() || m_requestedOutputLocation.isRelative())) {
- emit actualLocationChanged(m_requestedOutputLocation);
return true;
}
@@ -164,10 +167,10 @@ void QAndroidCaptureSession::setState(QMediaRecorder::State state)
bool QAndroidCaptureSession::start()
{
- if (m_state == QMediaRecorder::RecordingState)
+ if (m_state == QMediaRecorder::RecordingState || m_status != QMediaRecorder::LoadedStatus)
return false;
- setStatus(QMediaRecorder::LoadingStatus);
+ setStatus(QMediaRecorder::StartingStatus);
if (m_mediaRecorder) {
m_mediaRecorder->release();
@@ -179,17 +182,11 @@ bool QAndroidCaptureSession::start()
// Set audio/video sources
if (m_cameraSession) {
- if (m_cameraSession->status() != QCamera::ActiveStatus) {
- emit error(QMediaRecorder::ResourceError, QLatin1String("Camera must be active to record it."));
- setStatus(QMediaRecorder::UnloadedStatus);
- return false;
- } else {
- updateViewfinder();
- m_cameraSession->camera()->unlock();
- m_mediaRecorder->setCamera(m_cameraSession->camera());
- m_mediaRecorder->setAudioSource(JMediaRecorder::Camcorder);
- m_mediaRecorder->setVideoSource(JMediaRecorder::Camera);
- }
+ updateViewfinder();
+ m_cameraSession->camera()->unlock();
+ m_mediaRecorder->setCamera(m_cameraSession->camera());
+ m_mediaRecorder->setAudioSource(JMediaRecorder::Camcorder);
+ m_mediaRecorder->setVideoSource(JMediaRecorder::Camera);
} else {
m_mediaRecorder->setAudioSource(m_audioSource);
}
@@ -213,7 +210,6 @@ bool QAndroidCaptureSession::start()
m_mediaRecorder->setOrientationHint(m_cameraSession->currentCameraRotation());
}
-
// Set output file
QString filePath = m_mediaStorageLocation.generateFileName(
m_requestedOutputLocation.isLocalFile() ? m_requestedOutputLocation.toLocalFile()
@@ -224,29 +220,21 @@ bool QAndroidCaptureSession::start()
: QLatin1String("REC_"),
m_containerFormat);
- m_actualOutputLocation = QUrl::fromLocalFile(filePath);
- if (m_actualOutputLocation != m_requestedOutputLocation)
- emit actualLocationChanged(m_actualOutputLocation);
-
+ m_usedOutputLocation = QUrl::fromLocalFile(filePath);
m_mediaRecorder->setOutputFile(filePath);
if (!m_mediaRecorder->prepare()) {
emit error(QMediaRecorder::FormatError, QLatin1String("Unable to prepare the media recorder."));
- setStatus(QMediaRecorder::UnloadedStatus);
+ restartViewfinder();
return false;
}
- setStatus(QMediaRecorder::LoadedStatus);
- setStatus(QMediaRecorder::StartingStatus);
-
if (!m_mediaRecorder->start()) {
emit error(QMediaRecorder::FormatError, QLatin1String("Unable to start the media recorder."));
- setStatus(QMediaRecorder::UnloadedStatus);
+ restartViewfinder();
return false;
}
- setStatus(QMediaRecorder::RecordingStatus);
-
m_elapsedTime.start();
m_notifyTimer.start();
updateDuration();
@@ -270,17 +258,15 @@ void QAndroidCaptureSession::stop(bool error)
updateDuration();
m_elapsedTime.invalidate();
- if (m_cameraSession) {
- m_cameraSession->camera()->reconnect();
- // Viewport needs to be restarted
- m_cameraSession->camera()->startPreview();
- m_cameraSession->setReadyForCapture(true);
- }
-
m_mediaRecorder->release();
delete m_mediaRecorder;
m_mediaRecorder = 0;
+ if (m_cameraSession) {
+ // Viewport needs to be restarted after recording
+ restartViewfinder();
+ }
+
if (!error) {
// if the media is saved into the standard media location, register it
// with the Android media scanner so it appears immediately in apps
@@ -290,9 +276,10 @@ void QAndroidCaptureSession::stop(bool error)
: JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::Sounds);
if (mediaPath.startsWith(standardLoc))
JMultimediaUtils::registerMediaFile(mediaPath);
- }
- setStatus(QMediaRecorder::UnloadedStatus);
+ m_actualOutputLocation = m_usedOutputLocation;
+ emit actualLocationChanged(m_actualOutputLocation);
+ }
}
void QAndroidCaptureSession::setStatus(QMediaRecorder::Status status)
@@ -437,6 +424,13 @@ void QAndroidCaptureSession::updateViewfinder()
m_resolutionDirty = false;
}
+void QAndroidCaptureSession::restartViewfinder()
+{
+ m_cameraSession->camera()->reconnect();
+ m_cameraSession->camera()->startPreview();
+ m_cameraSession->setReadyForCapture(true);
+}
+
void QAndroidCaptureSession::updateDuration()
{
if (m_elapsedTime.isValid())
@@ -513,16 +507,40 @@ QAndroidCaptureSession::CaptureProfile QAndroidCaptureSession::getProfile(int id
return profile;
}
-void QAndroidCaptureSession::onCameraStatusChanged(QCamera::Status status)
+void QAndroidCaptureSession::updateStatus()
{
- if (status == QCamera::StoppingStatus)
- setState(QMediaRecorder::StoppedState);
-}
+ if (m_cameraSession) {
+ // Video recording
-void QAndroidCaptureSession::onCameraCaptureModeChanged(QCamera::CaptureModes mode)
-{
- if (!mode.testFlag(QCamera::CaptureVideo))
- setState(QMediaRecorder::StoppedState);
+ // stop recording when stopping the camera
+ if (m_cameraSession->status() == QCamera::StoppingStatus
+ || !m_cameraSession->captureMode().testFlag(QCamera::CaptureVideo)) {
+ setState(QMediaRecorder::StoppedState);
+ }
+
+ if (m_state == QMediaRecorder::RecordingState) {
+ setStatus(QMediaRecorder::RecordingStatus);
+ } else if (m_cameraSession->status() == QCamera::UnavailableStatus) {
+ setStatus(QMediaRecorder::UnavailableStatus);
+ } else if (m_cameraSession->captureMode().testFlag(QCamera::CaptureVideo)
+ && m_cameraSession->isReadyForCapture()) {
+ if (m_cameraSession->status() == QCamera::StartingStatus)
+ setStatus(QMediaRecorder::LoadingStatus);
+ else if (m_cameraSession->status() == QCamera::ActiveStatus)
+ setStatus(QMediaRecorder::LoadedStatus);
+ else
+ setStatus(QMediaRecorder::UnloadedStatus);
+ } else {
+ setStatus(QMediaRecorder::UnloadedStatus);
+ }
+
+ } else {
+ // Audio-only recording
+ if (m_state == QMediaRecorder::RecordingState)
+ setStatus(QMediaRecorder::RecordingStatus);
+ else
+ setStatus(QMediaRecorder::LoadedStatus);
+ }
}
void QAndroidCaptureSession::onError(int what, int extra)
diff --git a/src/plugins/android/src/mediacapture/qandroidcapturesession.h b/src/plugins/android/src/mediacapture/qandroidcapturesession.h
index fcd87cd02..32ca9d1ae 100644
--- a/src/plugins/android/src/mediacapture/qandroidcapturesession.h
+++ b/src/plugins/android/src/mediacapture/qandroidcapturesession.h
@@ -99,8 +99,7 @@ Q_SIGNALS:
private Q_SLOTS:
void updateDuration();
void onCameraOpened();
- void onCameraStatusChanged(QCamera::Status);
- void onCameraCaptureModeChanged(QCamera::CaptureModes mode);
+ void updateStatus();
void onError(int what, int extra);
void onInfo(int what, int extra);
@@ -145,6 +144,7 @@ private:
void setStatus(QMediaRecorder::Status status);
void updateViewfinder();
+ void restartViewfinder();
JMediaRecorder *m_mediaRecorder;
QAndroidCameraSession *m_cameraSession;
@@ -161,6 +161,7 @@ private:
QMediaRecorder::State m_state;
QMediaRecorder::Status m_status;
QUrl m_requestedOutputLocation;
+ QUrl m_usedOutputLocation;
QUrl m_actualOutputLocation;
CaptureProfile m_defaultSettings;
diff --git a/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp b/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp
index 7b0c58277..4b836ddf4 100644
--- a/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp
+++ b/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp
@@ -57,25 +57,31 @@ QAndroidMediaPlayerControl::QAndroidMediaPlayerControl(QObject *parent)
mAudioAvailable(false),
mVideoAvailable(false),
mBuffering(false),
- mMediaPlayerReady(false),
+ mState(JMediaPlayer::Uninitialized),
+ mPendingState(-1),
mPendingPosition(-1),
- mPendingSetMedia(false)
-{
- connect(mMediaPlayer, SIGNAL(bufferingUpdate(qint32)),
- this, SLOT(onBufferChanged(qint32)));
- connect(mMediaPlayer, SIGNAL(info(qint32,qint32)),
- this, SLOT(onInfo(qint32,qint32)));
- connect(mMediaPlayer, SIGNAL(error(qint32,qint32)),
- this, SLOT(onError(qint32,qint32)));
- connect(mMediaPlayer, SIGNAL(mediaPlayerInfo(qint32,qint32)),
- this, SLOT(onMediaPlayerInfo(qint32,qint32)));
- connect(mMediaPlayer, SIGNAL(videoSizeChanged(qint32,qint32)),
- this, SLOT(onVideoSizeChanged(qint32,qint32)));
+ mPendingSetMedia(false),
+ mPendingVolume(-1),
+ mPendingMute(-1)
+{
+ connect(mMediaPlayer,SIGNAL(bufferingChanged(qint32)),
+ this,SLOT(onBufferingChanged(qint32)));
+ connect(mMediaPlayer,SIGNAL(info(qint32,qint32)),
+ this,SLOT(onInfo(qint32,qint32)));
+ connect(mMediaPlayer,SIGNAL(error(qint32,qint32)),
+ this,SLOT(onError(qint32,qint32)));
+ connect(mMediaPlayer,SIGNAL(stateChanged(qint32)),
+ this,SLOT(onStateChanged(qint32)));
+ connect(mMediaPlayer,SIGNAL(videoSizeChanged(qint32,qint32)),
+ this,SLOT(onVideoSizeChanged(qint32,qint32)));
+ connect(mMediaPlayer,SIGNAL(progressChanged(qint64)),
+ this,SIGNAL(positionChanged(qint64)));
+ connect(mMediaPlayer,SIGNAL(durationChanged(qint64)),
+ this,SIGNAL(durationChanged(qint64)));
}
QAndroidMediaPlayerControl::~QAndroidMediaPlayerControl()
{
- mMediaPlayer->stop();
mMediaPlayer->release();
delete mMediaPlayer;
}
@@ -92,18 +98,33 @@ QMediaPlayer::MediaStatus QAndroidMediaPlayerControl::mediaStatus() const
qint64 QAndroidMediaPlayerControl::duration() const
{
- return (mCurrentMediaStatus == QMediaPlayer::InvalidMedia
- || mCurrentMediaStatus == QMediaPlayer::NoMedia
- || !mMediaPlayerReady) ? 0
- : mMediaPlayer->getDuration();
+ if ((mState & (JMediaPlayer::Prepared
+ | JMediaPlayer::Started
+ | JMediaPlayer::Paused
+ | JMediaPlayer::Stopped
+ | JMediaPlayer::PlaybackCompleted)) == 0) {
+ return 0;
+ }
+
+ return mMediaPlayer->getDuration();
}
qint64 QAndroidMediaPlayerControl::position() const
{
- if (!mMediaPlayerReady)
- return mPendingPosition < 0 ? 0 : mPendingPosition;
+ if (mCurrentMediaStatus == QMediaPlayer::EndOfMedia)
+ return duration();
+
+ if ((mState & (JMediaPlayer::Idle
+ | JMediaPlayer::Initialized
+ | JMediaPlayer::Prepared
+ | JMediaPlayer::Started
+ | JMediaPlayer::Paused
+ | JMediaPlayer::Stopped
+ | JMediaPlayer::PlaybackCompleted)) == 0) {
+ return (mPendingPosition == -1) ? 0 : mPendingPosition;
+ }
- return mMediaPlayer->getCurrentPosition();
+ return (mCurrentState == QMediaPlayer::StoppedState) ? 0 : mMediaPlayer->getCurrentPosition();
}
void QAndroidMediaPlayerControl::setPosition(qint64 position)
@@ -113,35 +134,88 @@ void QAndroidMediaPlayerControl::setPosition(qint64 position)
const int seekPosition = (position > INT_MAX) ? INT_MAX : position;
- if (!mMediaPlayerReady) {
- mPendingPosition = seekPosition;
- Q_EMIT positionChanged(seekPosition);
+ if ((mState & (JMediaPlayer::Prepared
+ | JMediaPlayer::Started
+ | JMediaPlayer::Paused
+ | JMediaPlayer::PlaybackCompleted)) == 0) {
+ if (mPendingPosition != seekPosition) {
+ mPendingPosition = seekPosition;
+ Q_EMIT positionChanged(seekPosition);
+ }
return;
}
+ if (mCurrentMediaStatus == QMediaPlayer::EndOfMedia)
+ setMediaStatus(QMediaPlayer::LoadedMedia);
+
mMediaPlayer->seekTo(seekPosition);
- mPendingPosition = -1;
+
+ if (mPendingPosition != -1) {
+ mPendingPosition = -1;
+ }
+
+ Q_EMIT positionChanged(seekPosition);
}
int QAndroidMediaPlayerControl::volume() const
{
- return mMediaPlayer->volume();
+ return (mPendingVolume == -1) ? mMediaPlayer->volume() : mPendingVolume;
}
void QAndroidMediaPlayerControl::setVolume(int volume)
{
+ if ((mState & (JMediaPlayer::Idle
+ | JMediaPlayer::Initialized
+ | JMediaPlayer::Stopped
+ | JMediaPlayer::Prepared
+ | JMediaPlayer::Started
+ | JMediaPlayer::Paused
+ | JMediaPlayer::PlaybackCompleted)) == 0) {
+ if (mPendingVolume != volume) {
+ mPendingVolume = volume;
+ Q_EMIT volumeChanged(volume);
+ }
+ return;
+ }
+
mMediaPlayer->setVolume(volume);
+
+ if (mPendingVolume != -1) {
+ mPendingVolume = -1;
+ return;
+ }
+
Q_EMIT volumeChanged(volume);
}
bool QAndroidMediaPlayerControl::isMuted() const
{
- return mMediaPlayer->isMuted();
+ return (mPendingMute == -1) ? mMediaPlayer->isMuted() : (mPendingMute == 1);
}
void QAndroidMediaPlayerControl::setMuted(bool muted)
{
+ if ((mState & (JMediaPlayer::Idle
+ | JMediaPlayer::Initialized
+ | JMediaPlayer::Stopped
+ | JMediaPlayer::Prepared
+ | JMediaPlayer::Started
+ | JMediaPlayer::Paused
+ | JMediaPlayer::PlaybackCompleted)) == 0) {
+ if (mPendingMute != muted) {
+ mPendingMute = muted;
+ Q_EMIT mutedChanged(muted);
+ }
+ return;
+ }
+
mMediaPlayer->setMuted(muted);
+
+ if (mPendingMute != -1) {
+ mPendingMute = -1;
+ return;
+ }
+
Q_EMIT mutedChanged(muted);
}
@@ -208,10 +282,21 @@ const QIODevice *QAndroidMediaPlayerControl::mediaStream() const
void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
QIODevice *stream)
{
- mMediaContent = mediaContent;
- mMediaStream = stream;
+ const bool reloading = (mMediaContent == mediaContent);
+
+ if (!reloading) {
+ mMediaContent = mediaContent;
+ mMediaStream = stream;
+ }
- if (mVideoOutput && !mMediaPlayer->display()) {
+ mMediaPlayer->release();
+
+ if (mediaContent.isNull()) {
+ setMediaStatus(QMediaPlayer::NoMedia);
+ return;
+ }
+
+ if (mVideoOutput && !mVideoOutput->isReady()) {
// if a video output is set but the video texture is not ready, delay loading the media
// since it can cause problems on some hardware
mPendingSetMedia = true;
@@ -229,68 +314,88 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
mediaPath = url.toString();
}
- if (!mediaPath.isEmpty())
- mMediaPlayer->setDataSource(mediaPath);
- else
- setMediaStatus(QMediaPlayer::NoMedia);
+ if (mVideoSize.isValid() && mVideoOutput)
+ mVideoOutput->setVideoSize(mVideoSize);
+
+ if (!mMediaPlayer->display() && mVideoOutput)
+ mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
+ mMediaPlayer->setDataSource(mediaPath);
+ mMediaPlayer->prepareAsync();
- Q_EMIT mediaChanged(mMediaContent);
+ if (!reloading)
+ Q_EMIT mediaChanged(mMediaContent);
resetBufferingProgress();
-
- // reset some properties
- setAudioAvailable(false);
- setVideoAvailable(false);
- setSeekable(true);
}
void QAndroidMediaPlayerControl::setVideoOutput(QObject *videoOutput)
{
- if (mVideoOutput)
+ if (mVideoOutput) {
+ mMediaPlayer->setDisplay(0);
mVideoOutput->stop();
+ mVideoOutput->reset();
+ }
mVideoOutput = qobject_cast<QAndroidVideoOutput *>(videoOutput);
- if (mVideoOutput && !mMediaPlayer->display()) {
- if (mVideoOutput->isReady())
- mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
- else
- connect(videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool)));
- }
+ if (!mVideoOutput)
+ return;
+
+ if (mVideoOutput->isReady())
+ mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
+
+ connect(videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool)));
}
void QAndroidMediaPlayerControl::play()
{
- if (!mMediaPlayerReady) {
+ // We need to prepare the mediaplayer again.
+ if ((mState & JMediaPlayer::Stopped) && !mMediaContent.isNull()) {
+ setMedia(mMediaContent, mMediaStream);
+ }
+
+ setState(QMediaPlayer::PlayingState);
+
+ if ((mState & (JMediaPlayer::Prepared
+ | JMediaPlayer::Started
+ | JMediaPlayer::Paused
+ | JMediaPlayer::PlaybackCompleted)) == 0) {
mPendingState = QMediaPlayer::PlayingState;
- if (mCurrentState == QMediaPlayer::StoppedState
- && !mMediaContent.isNull()
- && mCurrentMediaStatus != QMediaPlayer::LoadingMedia
- && !mPendingSetMedia) {
- setMedia(mMediaContent, 0);
- }
return;
}
mMediaPlayer->play();
- setState(QMediaPlayer::PlayingState);
}
void QAndroidMediaPlayerControl::pause()
{
- if (!mMediaPlayerReady) {
+ setState(QMediaPlayer::PausedState);
+
+ if ((mState & (JMediaPlayer::Started
+ | JMediaPlayer::Paused
+ | JMediaPlayer::PlaybackCompleted)) == 0) {
mPendingState = QMediaPlayer::PausedState;
return;
}
mMediaPlayer->pause();
- setState(QMediaPlayer::PausedState);
}
void QAndroidMediaPlayerControl::stop()
{
- mMediaPlayer->stop();
setState(QMediaPlayer::StoppedState);
+
+ if ((mState & (JMediaPlayer::Prepared
+ | JMediaPlayer::Started
+ | JMediaPlayer::Stopped
+ | JMediaPlayer::Paused
+ | JMediaPlayer::PlaybackCompleted)) == 0) {
+ if ((mState & (JMediaPlayer::Idle | JMediaPlayer::Uninitialized | JMediaPlayer::Error)) == 0)
+ mPendingState = QMediaPlayer::StoppedState;
+ return;
+ }
+
+ mMediaPlayer->stop();
}
void QAndroidMediaPlayerControl::onInfo(qint32 what, qint32 extra)
@@ -310,8 +415,8 @@ void QAndroidMediaPlayerControl::onInfo(qint32 what, qint32 extra)
setMediaStatus(QMediaPlayer::StalledMedia);
break;
case JMediaPlayer::MEDIA_INFO_BUFFERING_END:
- setMediaStatus(mBufferPercent == 100 ? QMediaPlayer::BufferedMedia : QMediaPlayer::BufferingMedia);
- flushPendingStates();
+ if (mCurrentState != QMediaPlayer::StoppedState)
+ flushPendingStates();
break;
case JMediaPlayer::MEDIA_INFO_BAD_INTERLEAVING:
break;
@@ -324,41 +429,6 @@ void QAndroidMediaPlayerControl::onInfo(qint32 what, qint32 extra)
}
}
-void QAndroidMediaPlayerControl::onMediaPlayerInfo(qint32 what, qint32 extra)
-{
- switch (what) {
- case JMediaPlayer::MEDIA_PLAYER_INVALID_STATE:
- setError(what, QStringLiteral("Error: Invalid state"));
- break;
- case JMediaPlayer::MEDIA_PLAYER_PREPARING:
- setMediaStatus(QMediaPlayer::LoadingMedia);
- setState(QMediaPlayer::StoppedState);
- break;
- case JMediaPlayer::MEDIA_PLAYER_READY:
- setMediaStatus(QMediaPlayer::LoadedMedia);
- if (mBuffering) {
- setMediaStatus(mBufferPercent == 100 ? QMediaPlayer::BufferedMedia
- : QMediaPlayer::BufferingMedia);
- } else {
- onBufferChanged(100);
- }
- setAudioAvailable(true);
- mMediaPlayerReady = true;
- flushPendingStates();
- break;
- case JMediaPlayer::MEDIA_PLAYER_DURATION:
- Q_EMIT durationChanged(extra);
- break;
- case JMediaPlayer::MEDIA_PLAYER_PROGRESS:
- Q_EMIT positionChanged(extra);
- break;
- case JMediaPlayer::MEDIA_PLAYER_FINISHED:
- stop();
- setMediaStatus(QMediaPlayer::EndOfMedia);
- break;
- }
-}
-
void QAndroidMediaPlayerControl::onError(qint32 what, qint32 extra)
{
QString errorString;
@@ -372,6 +442,10 @@ void QAndroidMediaPlayerControl::onError(qint32 what, qint32 extra)
errorString = QLatin1String("Error: Server died");
error = QMediaPlayer::ServiceMissingError;
break;
+ case JMediaPlayer::MEDIA_ERROR_INVALID_STATE:
+ errorString = QLatin1String("Error: Invalid state");
+ error = QMediaPlayer::ServiceMissingError;
+ break;
}
switch (extra) {
@@ -398,12 +472,16 @@ void QAndroidMediaPlayerControl::onError(qint32 what, qint32 extra)
error = QMediaPlayer::FormatError;
setMediaStatus(QMediaPlayer::InvalidMedia);
break;
+ case JMediaPlayer::MEDIA_ERROR_BAD_THINGS_ARE_GOING_TO_HAPPEN:
+ errorString += QLatin1String(" (Unknown error/Insufficient resources)");
+ error = QMediaPlayer::ServiceMissingError;
+ break;
}
- setError(error, errorString);
+ Q_EMIT QMediaPlayerControl::error(error, errorString);
}
-void QAndroidMediaPlayerControl::onBufferChanged(qint32 percent)
+void QAndroidMediaPlayerControl::onBufferingChanged(qint32 percent)
{
mBuffering = percent != 100;
mBufferPercent = percent;
@@ -411,8 +489,8 @@ void QAndroidMediaPlayerControl::onBufferChanged(qint32 percent)
updateAvailablePlaybackRanges();
- if (mBufferPercent == 100)
- setMediaStatus(QMediaPlayer::BufferedMedia);
+ if (mCurrentState != QMediaPlayer::StoppedState)
+ setMediaStatus(mBuffering ? QMediaPlayer::BufferingMedia : QMediaPlayer::BufferedMedia);
}
void QAndroidMediaPlayerControl::onVideoSizeChanged(qint32 width, qint32 height)
@@ -429,27 +507,98 @@ void QAndroidMediaPlayerControl::onVideoSizeChanged(qint32 width, qint32 height)
mVideoOutput->setVideoSize(mVideoSize);
}
-void QAndroidMediaPlayerControl::onVideoOutputReady(bool ready)
+void QAndroidMediaPlayerControl::onStateChanged(qint32 state)
{
- if (!mMediaPlayer->display() && mVideoOutput && ready) {
- mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
+ // If reloading, don't report state changes unless the new state is Prepared or Error.
+ if ((mState & JMediaPlayer::Stopped) && !(state & (JMediaPlayer::Prepared | JMediaPlayer::Error)))
+ return;
+
+ mState = state;
+ switch (mState) {
+ case JMediaPlayer::Idle:
+ break;
+ case JMediaPlayer::Initialized:
+ break;
+ case JMediaPlayer::Preparing:
+ setMediaStatus(QMediaPlayer::LoadingMedia);
+ break;
+ case JMediaPlayer::Prepared:
+ setMediaStatus(QMediaPlayer::LoadedMedia);
+ if (mBuffering) {
+ setMediaStatus(mBufferPercent == 100 ? QMediaPlayer::BufferedMedia
+ : QMediaPlayer::BufferingMedia);
+ } else {
+ onBufferingChanged(100);
+ }
+ setAudioAvailable(true);
flushPendingStates();
+ break;
+ case JMediaPlayer::Started:
+ setState(QMediaPlayer::PlayingState);
+ if (mBuffering) {
+ setMediaStatus(mBufferPercent == 100 ? QMediaPlayer::BufferedMedia
+ : QMediaPlayer::BufferingMedia);
+ } else {
+ setMediaStatus(QMediaPlayer::BufferedMedia);
+ }
+ break;
+ case JMediaPlayer::Paused:
+ setState(QMediaPlayer::PausedState);
+ break;
+ case JMediaPlayer::Error:
+ setState(QMediaPlayer::StoppedState);
+ setMediaStatus(QMediaPlayer::UnknownMediaStatus);
+ mMediaPlayer->release();
+ break;
+ case JMediaPlayer::Stopped:
+ setState(QMediaPlayer::StoppedState);
+ setMediaStatus(QMediaPlayer::LoadedMedia);
+ setPosition(0);
+ break;
+ case JMediaPlayer::PlaybackCompleted:
+ setState(QMediaPlayer::StoppedState);
+ setPosition(0);
+ setMediaStatus(QMediaPlayer::EndOfMedia);
+ break;
+ case JMediaPlayer::Uninitialized:
+ // reset some properties
+ resetBufferingProgress();
+ mPendingPosition = -1;
+ mPendingSetMedia = false;
+ mPendingState = -1;
+
+ setAudioAvailable(false);
+ setVideoAvailable(false);
+ setSeekable(true);
+ break;
+ default:
+ break;
+ }
+
+ if ((mState & (JMediaPlayer::Stopped | JMediaPlayer::Uninitialized)) != 0) {
+ mMediaPlayer->setDisplay(0);
+ if (mVideoOutput) {
+ mVideoOutput->stop();
+ mVideoOutput->reset();
+ }
}
}
+void QAndroidMediaPlayerControl::onVideoOutputReady(bool ready)
+{
+ if (!mMediaPlayer->display() && mVideoOutput && ready)
+ mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
+
+ flushPendingStates();
+}
+
void QAndroidMediaPlayerControl::setState(QMediaPlayer::State state)
{
if (mCurrentState == state)
return;
- if (state == QMediaPlayer::StoppedState) {
- if (mVideoOutput)
- mVideoOutput->stop();
- resetBufferingProgress();
- mMediaPlayerReady = false;
- mPendingPosition = -1;
- Q_EMIT positionChanged(0);
- }
+ if (mCurrentState == QMediaPlayer::StoppedState && state == QMediaPlayer::PausedState)
+ return;
mCurrentState = state;
Q_EMIT stateChanged(mCurrentState);
@@ -463,17 +612,13 @@ void QAndroidMediaPlayerControl::setMediaStatus(QMediaPlayer::MediaStatus status
if (status == QMediaPlayer::NoMedia || status == QMediaPlayer::InvalidMedia)
Q_EMIT durationChanged(0);
+ if (status == QMediaPlayer::EndOfMedia)
+ Q_EMIT durationChanged(duration());
+
mCurrentMediaStatus = status;
Q_EMIT mediaStatusChanged(mCurrentMediaStatus);
}
-void QAndroidMediaPlayerControl::setError(int error,
- const QString &errorString)
-{
- setState(QMediaPlayer::StoppedState);
- Q_EMIT QMediaPlayerControl::error(error, errorString);
-}
-
void QAndroidMediaPlayerControl::setSeekable(bool seekable)
{
if (mSeekable == seekable)
@@ -515,15 +660,23 @@ void QAndroidMediaPlayerControl::resetBufferingProgress()
void QAndroidMediaPlayerControl::flushPendingStates()
{
if (mPendingSetMedia) {
- setMedia(mMediaContent, 0);
mPendingSetMedia = false;
+ setMedia(mMediaContent, 0);
return;
}
- switch (mPendingState) {
+ const int newState = mPendingState;
+ mPendingState = -1;
+
+ if (mPendingPosition != -1)
+ setPosition(mPendingPosition);
+ if (mPendingVolume != -1)
+ setVolume(mPendingVolume);
+ if (mPendingMute != -1)
+ setMuted((mPendingMute == 1));
+
+ switch (newState) {
case QMediaPlayer::PlayingState:
- if (mPendingPosition > -1)
- setPosition(mPendingPosition);
play();
break;
case QMediaPlayer::PausedState:
@@ -532,6 +685,8 @@ void QAndroidMediaPlayerControl::flushPendingStates()
case QMediaPlayer::StoppedState:
stop();
break;
+ default:
+ break;
}
}
diff --git a/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.h b/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.h
index fadac3c19..1be3b4428 100644
--- a/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.h
+++ b/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.h
@@ -93,9 +93,9 @@ private Q_SLOTS:
void onVideoOutputReady(bool ready);
void onError(qint32 what, qint32 extra);
void onInfo(qint32 what, qint32 extra);
- void onMediaPlayerInfo(qint32 what, qint32 extra);
- void onBufferChanged(qint32 percent);
+ void onBufferingChanged(qint32 percent);
void onVideoSizeChanged(qint32 width, qint32 height);
+ void onStateChanged(qint32 state);
private:
JMediaPlayer *mMediaPlayer;
@@ -111,15 +111,16 @@ private:
QSize mVideoSize;
bool mBuffering;
QMediaTimeRange mAvailablePlaybackRange;
- bool mMediaPlayerReady;
- QMediaPlayer::State mPendingState;
+ int mState;
+ int mPendingState;
qint64 mPendingPosition;
bool mPendingSetMedia;
+ int mPendingVolume;
+ int mPendingMute;
QScopedPointer<QTemporaryFile> mTempFile;
void setState(QMediaPlayer::State state);
void setMediaStatus(QMediaPlayer::MediaStatus status);
- void setError(int error, const QString &errorString);
void setSeekable(bool seekable);
void setAudioAvailable(bool available);
void setVideoAvailable(bool available);
diff --git a/src/plugins/android/src/wrappers/jcamera.cpp b/src/plugins/android/src/wrappers/jcamera.cpp
index 23f3e14b4..50a65a779 100644
--- a/src/plugins/android/src/wrappers/jcamera.cpp
+++ b/src/plugins/android/src/wrappers/jcamera.cpp
@@ -306,12 +306,16 @@ int JCamera::cameraId() const
void JCamera::lock()
{
- QMetaObject::invokeMethod(d, "callVoidMethod", Q_ARG(QByteArray, "lock"));
+ QMetaObject::invokeMethod(d, "callVoidMethod",
+ Qt::BlockingQueuedConnection,
+ Q_ARG(QByteArray, "lock"));
}
void JCamera::unlock()
{
- QMetaObject::invokeMethod(d, "callVoidMethod", Q_ARG(QByteArray, "unlock"));
+ QMetaObject::invokeMethod(d, "callVoidMethod",
+ Qt::BlockingQueuedConnection,
+ Q_ARG(QByteArray, "unlock"));
}
void JCamera::reconnect()
diff --git a/src/plugins/android/src/wrappers/jmediaplayer.cpp b/src/plugins/android/src/wrappers/jmediaplayer.cpp
index 3d7f7f9d1..de86cd041 100644
--- a/src/plugins/android/src/wrappers/jmediaplayer.cpp
+++ b/src/plugins/android/src/wrappers/jmediaplayer.cpp
@@ -46,135 +46,113 @@
#include <QtCore/private/qjnihelpers_p.h>
#include <QMap>
-namespace {
-
-jclass mediaPlayerClass = 0;
-
-QMap<jlong, JMediaPlayer *> mplayers;
-
-}
+static jclass mediaPlayerClass = Q_NULLPTR;
+typedef QMap<jlong, JMediaPlayer *> MediaPlayerMap;
+Q_GLOBAL_STATIC(MediaPlayerMap, mediaPlayers)
QT_BEGIN_NAMESPACE
-bool JMediaPlayer::mActivitySet = false;
-
JMediaPlayer::JMediaPlayer()
: QObject()
- , QJNIObjectPrivate(mediaPlayerClass, "(J)V", reinterpret_cast<jlong>(this))
- , mId(reinterpret_cast<jlong>(this))
- , mDisplay(0)
{
- mplayers.insert(mId, this);
-
- if (!mActivitySet) {
- QJNIObjectPrivate::callStaticMethod<void>(mediaPlayerClass,
- "setActivity",
- "(Landroid/app/Activity;)V",
- QtAndroidPrivate::activity());
- mActivitySet = true;
- }
+
+ const jlong id = reinterpret_cast<jlong>(this);
+ mMediaPlayer = QJNIObjectPrivate(mediaPlayerClass,
+ "(Landroid/app/Activity;J)V",
+ QtAndroidPrivate::activity(),
+ id);
+ (*mediaPlayers)[id] = this;
}
JMediaPlayer::~JMediaPlayer()
{
- mplayers.remove(mId);
+ mediaPlayers->remove(reinterpret_cast<jlong>(this));
}
void JMediaPlayer::release()
{
- callMethod<void>("release");
+ mMediaPlayer.callMethod<void>("release");
}
-void JMediaPlayer::onError(qint32 what, qint32 extra)
+void JMediaPlayer::reset()
{
- Q_EMIT error(what, extra);
-}
-
-void JMediaPlayer::onBufferingUpdate(qint32 percent)
-{
- Q_EMIT bufferingUpdate(percent);
-}
-
-void JMediaPlayer::onInfo(qint32 what, qint32 extra)
-{
- Q_EMIT info(what, extra);
-}
-
-void JMediaPlayer::onMediaPlayerInfo(qint32 what, qint32 extra)
-{
- Q_EMIT mediaPlayerInfo(what, extra);
-}
-
-void JMediaPlayer::onVideoSizeChanged(qint32 width, qint32 height)
-{
- Q_EMIT videoSizeChanged(width, height);
+ mMediaPlayer.callMethod<void>("reset");
}
int JMediaPlayer::getCurrentPosition()
{
- return callMethod<jint>("getCurrentPosition");
+ return mMediaPlayer.callMethod<jint>("getCurrentPosition");
}
int JMediaPlayer::getDuration()
{
- return callMethod<jint>("getDuration");
+ return mMediaPlayer.callMethod<jint>("getDuration");
}
bool JMediaPlayer::isPlaying()
{
- return callMethod<jboolean>("isPlaying");
+ return mMediaPlayer.callMethod<jboolean>("isPlaying");
}
int JMediaPlayer::volume()
{
- return callMethod<jint>("getVolume");
+ return mMediaPlayer.callMethod<jint>("getVolume");
}
bool JMediaPlayer::isMuted()
{
- return callMethod<jboolean>("isMuted");
+ return mMediaPlayer.callMethod<jboolean>("isMuted");
+}
+
+jobject JMediaPlayer::display()
+{
+ return mMediaPlayer.callObjectMethod("display", "()Landroid/view/SurfaceHolder;").object();
}
void JMediaPlayer::play()
{
- callMethod<void>("start");
+ mMediaPlayer.callMethod<void>("start");
}
void JMediaPlayer::pause()
{
- callMethod<void>("pause");
+ mMediaPlayer.callMethod<void>("pause");
}
void JMediaPlayer::stop()
{
- callMethod<void>("stop");
+ mMediaPlayer.callMethod<void>("stop");
}
void JMediaPlayer::seekTo(qint32 msec)
{
- callMethod<void>("seekTo", "(I)V", jint(msec));
+ mMediaPlayer.callMethod<void>("seekTo", "(I)V", jint(msec));
}
void JMediaPlayer::setMuted(bool mute)
{
- callMethod<void>("mute", "(Z)V", jboolean(mute));
+ mMediaPlayer.callMethod<void>("mute", "(Z)V", jboolean(mute));
}
void JMediaPlayer::setDataSource(const QString &path)
{
QJNIObjectPrivate string = QJNIObjectPrivate::fromString(path);
- callMethod<void>("setMediaPath", "(Ljava/lang/String;)V", string.object());
+ mMediaPlayer.callMethod<void>("setDataSource", "(Ljava/lang/String;)V", string.object());
+}
+
+void JMediaPlayer::prepareAsync()
+{
+ mMediaPlayer.callMethod<void>("prepareAsync");
}
void JMediaPlayer::setVolume(int volume)
{
- callMethod<void>("setVolume", "(I)V", jint(volume));
+ mMediaPlayer.callMethod<void>("setVolume", "(I)V", jint(volume));
}
void JMediaPlayer::setDisplay(jobject surfaceHolder)
{
- mDisplay = surfaceHolder;
- callMethod<void>("setDisplay", "(Landroid/view/SurfaceHolder;)V", mDisplay);
+ mMediaPlayer.callMethod<void>("setDisplay", "(Landroid/view/SurfaceHolder;)V", surfaceHolder);
}
QT_END_NAMESPACE
@@ -183,44 +161,66 @@ static void onErrorNative(JNIEnv *env, jobject thiz, jint what, jint extra, jlon
{
Q_UNUSED(env);
Q_UNUSED(thiz);
- JMediaPlayer *const mp = mplayers[id];
+ JMediaPlayer *const mp = (*mediaPlayers)[id];
if (!mp)
return;
- mp->onError(what, extra);
+ Q_EMIT mp->error(what, extra);
}
static void onBufferingUpdateNative(JNIEnv *env, jobject thiz, jint percent, jlong id)
{
Q_UNUSED(env);
Q_UNUSED(thiz);
- JMediaPlayer *const mp = mplayers[id];
+ JMediaPlayer *const mp = (*mediaPlayers)[id];
+ if (!mp)
+ return;
+
+ Q_EMIT mp->bufferingChanged(percent);
+}
+
+static void onProgressUpdateNative(JNIEnv *env, jobject thiz, jint progress, jlong id)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ JMediaPlayer *const mp = (*mediaPlayers)[id];
+ if (!mp)
+ return;
+
+ Q_EMIT mp->progressChanged(progress);
+}
+
+static void onDurationChangedNative(JNIEnv *env, jobject thiz, jint duration, jlong id)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ JMediaPlayer *const mp = (*mediaPlayers)[id];
if (!mp)
return;
- mp->onBufferingUpdate(percent);
+ Q_EMIT mp->durationChanged(duration);
}
static void onInfoNative(JNIEnv *env, jobject thiz, jint what, jint extra, jlong id)
{
Q_UNUSED(env);
Q_UNUSED(thiz);
- JMediaPlayer *const mp = mplayers[id];
+ JMediaPlayer *const mp = (*mediaPlayers)[id];
if (!mp)
return;
- mp->onInfo(what, extra);
+ Q_EMIT mp->info(what, extra);
}
-static void onMediaPlayerInfoNative(JNIEnv *env, jobject thiz, jint what, jint extra, jlong id)
+static void onStateChangedNative(JNIEnv *env, jobject thiz, jint state, jlong id)
{
Q_UNUSED(env);
Q_UNUSED(thiz);
- JMediaPlayer *const mp = mplayers[id];
+ JMediaPlayer *const mp = (*mediaPlayers)[id];
if (!mp)
return;
- mp->onMediaPlayerInfo(what, extra);
+ Q_EMIT mp->stateChanged(state);
}
static void onVideoSizeChangedNative(JNIEnv *env,
@@ -231,11 +231,11 @@ static void onVideoSizeChangedNative(JNIEnv *env,
{
Q_UNUSED(env);
Q_UNUSED(thiz);
- JMediaPlayer *const mp = mplayers[id];
+ JMediaPlayer *const mp = (*mediaPlayers)[id];
if (!mp)
return;
- mp->onVideoSizeChanged(width, height);
+ Q_EMIT mp->videoSizeChanged(width, height);
}
QT_BEGIN_NAMESPACE
@@ -250,9 +250,11 @@ bool JMediaPlayer::initJNI(JNIEnv *env)
JNINativeMethod methods[] = {
{"onErrorNative", "(IIJ)V", reinterpret_cast<void *>(onErrorNative)},
{"onBufferingUpdateNative", "(IJ)V", reinterpret_cast<void *>(onBufferingUpdateNative)},
+ {"onProgressUpdateNative", "(IJ)V", reinterpret_cast<void *>(onProgressUpdateNative)},
+ {"onDurationChangedNative", "(IJ)V", reinterpret_cast<void *>(onDurationChangedNative)},
{"onInfoNative", "(IIJ)V", reinterpret_cast<void *>(onInfoNative)},
- {"onMediaPlayerInfoNative", "(IIJ)V", reinterpret_cast<void *>(onMediaPlayerInfoNative)},
- {"onVideoSizeChangedNative", "(IIJ)V", reinterpret_cast<void *>(onVideoSizeChangedNative)}
+ {"onVideoSizeChangedNative", "(IIJ)V", reinterpret_cast<void *>(onVideoSizeChangedNative)},
+ {"onStateChangedNative", "(IJ)V", reinterpret_cast<void *>(onStateChangedNative)}
};
if (env->RegisterNatives(mediaPlayerClass,
diff --git a/src/plugins/android/src/wrappers/jmediaplayer.h b/src/plugins/android/src/wrappers/jmediaplayer.h
index c737cfa26..cd469e677 100644
--- a/src/plugins/android/src/wrappers/jmediaplayer.h
+++ b/src/plugins/android/src/wrappers/jmediaplayer.h
@@ -47,7 +47,7 @@
QT_BEGIN_NAMESPACE
-class JMediaPlayer : public QObject, public QJNIObjectPrivate
+class JMediaPlayer : public QObject
{
Q_OBJECT
public:
@@ -59,12 +59,14 @@ public:
// What
MEDIA_ERROR_UNKNOWN = 1,
MEDIA_ERROR_SERVER_DIED = 100,
+ MEDIA_ERROR_INVALID_STATE = -38, // Undocumented
// Extra
MEDIA_ERROR_IO = -1004,
MEDIA_ERROR_MALFORMED = -1007,
MEDIA_ERROR_UNSUPPORTED = -1010,
MEDIA_ERROR_TIMED_OUT = -110,
- MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK = 200
+ MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK = 200,
+ MEDIA_ERROR_BAD_THINGS_ARE_GOING_TO_HAPPEN = -2147483648 // Undocumented
};
enum MediaInfo
@@ -79,24 +81,29 @@ public:
MEDIA_INFO_METADATA_UPDATE = 802
};
- enum MediaPlayerInfo
+ enum MediaPlayerState
{
- MEDIA_PLAYER_INVALID_STATE = 1,
- MEDIA_PLAYER_PREPARING = 2,
- MEDIA_PLAYER_READY = 3,
- MEDIA_PLAYER_DURATION = 4,
- MEDIA_PLAYER_PROGRESS = 5,
- MEDIA_PLAYER_FINISHED = 6
+ Uninitialized = 0x1, /* End */
+ Idle = 0x2,
+ Preparing = 0x4,
+ Prepared = 0x8,
+ Initialized = 0x10,
+ Started = 0x20,
+ Stopped = 0x40,
+ Paused = 0x80,
+ PlaybackCompleted = 0x100,
+ Error = 0x200
};
void release();
+ void reset();
int getCurrentPosition();
int getDuration();
bool isPlaying();
int volume();
bool isMuted();
- jobject display() { return mDisplay; }
+ jobject display();
void play();
void pause();
@@ -104,30 +111,23 @@ public:
void seekTo(qint32 msec);
void setMuted(bool mute);
void setDataSource(const QString &path);
+ void prepareAsync();
void setVolume(int volume);
void setDisplay(jobject surfaceHolder);
- void onError(qint32 what, qint32 extra);
- void onBufferingUpdate(qint32 percent);
- void onInfo(qint32 what, qint32 extra);
- void onMediaPlayerInfo(qint32 what, qint32 extra);
- void onVideoSizeChanged(qint32 width, qint32 height);
-
static bool initJNI(JNIEnv *env);
Q_SIGNALS:
void error(qint32 what, qint32 extra);
- void bufferingUpdate(qint32 percent);
- void completion();
+ void bufferingChanged(qint32 percent);
+ void durationChanged(qint64 duration);
+ void progressChanged(qint64 progress);
+ void stateChanged(qint32 state);
void info(qint32 what, qint32 extra);
- void mediaPlayerInfo(qint32 what, qint32 extra);
void videoSizeChanged(qint32 width, qint32 height);
private:
- jlong mId;
- jobject mDisplay;
-
- static bool mActivitySet;
+ QJNIObjectPrivate mMediaPlayer;
};
QT_END_NAMESPACE
diff --git a/src/plugins/android/src/wrappers/jmediarecorder.cpp b/src/plugins/android/src/wrappers/jmediarecorder.cpp
index b2b93f893..2c3eaeab7 100644
--- a/src/plugins/android/src/wrappers/jmediarecorder.cpp
+++ b/src/plugins/android/src/wrappers/jmediarecorder.cpp
@@ -88,6 +88,9 @@ bool JMediaRecorder::prepare()
QJNIEnvironmentPrivate env;
callMethod<void>("prepare");
if (env->ExceptionCheck()) {
+#ifdef QT_DEBUG
+ env->ExceptionDescribe();
+#endif
env->ExceptionClear();
return false;
}
@@ -104,6 +107,9 @@ bool JMediaRecorder::start()
QJNIEnvironmentPrivate env;
callMethod<void>("start");
if (env->ExceptionCheck()) {
+#ifdef QT_DEBUG
+ env->ExceptionDescribe();
+#endif
env->ExceptionClear();
return false;
}
@@ -114,8 +120,12 @@ void JMediaRecorder::stop()
{
QJNIEnvironmentPrivate env;
callMethod<void>("stop");
- if (env->ExceptionCheck())
+ if (env->ExceptionCheck()) {
+#ifdef QT_DEBUG
+ env->ExceptionDescribe();
+#endif
env->ExceptionClear();
+ }
}
void JMediaRecorder::setAudioChannels(int numChannels)
@@ -127,8 +137,12 @@ void JMediaRecorder::setAudioEncoder(AudioEncoder encoder)
{
QJNIEnvironmentPrivate env;
callMethod<void>("setAudioEncoder", "(I)V", int(encoder));
- if (env->ExceptionCheck())
+ if (env->ExceptionCheck()) {
+#ifdef QT_DEBUG
+ env->ExceptionDescribe();
+#endif
env->ExceptionClear();
+ }
}
void JMediaRecorder::setAudioEncodingBitRate(int bitRate)
@@ -145,8 +159,12 @@ void JMediaRecorder::setAudioSource(AudioSource source)
{
QJNIEnvironmentPrivate env;
callMethod<void>("setAudioSource", "(I)V", int(source));
- if (env->ExceptionCheck())
+ if (env->ExceptionCheck()) {
+#ifdef QT_DEBUG
+ env->ExceptionDescribe();
+#endif
env->ExceptionClear();
+ }
}
void JMediaRecorder::setCamera(JCamera *camera)
@@ -159,8 +177,12 @@ void JMediaRecorder::setVideoEncoder(VideoEncoder encoder)
{
QJNIEnvironmentPrivate env;
callMethod<void>("setVideoEncoder", "(I)V", int(encoder));
- if (env->ExceptionCheck())
+ if (env->ExceptionCheck()) {
+#ifdef QT_DEBUG
+ env->ExceptionDescribe();
+#endif
env->ExceptionClear();
+ }
}
void JMediaRecorder::setVideoEncodingBitRate(int bitRate)
@@ -172,40 +194,60 @@ void JMediaRecorder::setVideoFrameRate(int rate)
{
QJNIEnvironmentPrivate env;
callMethod<void>("setVideoFrameRate", "(I)V", rate);
- if (env->ExceptionCheck())
+ if (env->ExceptionCheck()) {
+#ifdef QT_DEBUG
+ env->ExceptionDescribe();
+#endif
env->ExceptionClear();
+ }
}
void JMediaRecorder::setVideoSize(const QSize &size)
{
QJNIEnvironmentPrivate env;
callMethod<void>("setVideoSize", "(II)V", size.width(), size.height());
- if (env->ExceptionCheck())
+ if (env->ExceptionCheck()) {
+#ifdef QT_DEBUG
+ env->ExceptionDescribe();
+#endif
env->ExceptionClear();
+ }
}
void JMediaRecorder::setVideoSource(VideoSource source)
{
QJNIEnvironmentPrivate env;
callMethod<void>("setVideoSource", "(I)V", int(source));
- if (env->ExceptionCheck())
+ if (env->ExceptionCheck()) {
+#ifdef QT_DEBUG
+ env->ExceptionDescribe();
+#endif
env->ExceptionClear();
+ }
}
void JMediaRecorder::setOrientationHint(int degrees)
{
QJNIEnvironmentPrivate env;
callMethod<void>("setOrientationHint", "(I)V", degrees);
- if (env->ExceptionCheck())
+ if (env->ExceptionCheck()) {
+#ifdef QT_DEBUG
+ env->ExceptionDescribe();
+#endif
env->ExceptionClear();
+ }
}
void JMediaRecorder::setOutputFormat(OutputFormat format)
{
QJNIEnvironmentPrivate env;
callMethod<void>("setOutputFormat", "(I)V", int(format));
- if (env->ExceptionCheck())
+ if (env->ExceptionCheck()) {
+#ifdef QT_DEBUG
+ env->ExceptionDescribe();
+#endif
env->ExceptionClear();
+ }
}
void JMediaRecorder::setOutputFile(const QString &path)
@@ -214,8 +256,12 @@ void JMediaRecorder::setOutputFile(const QString &path)
callMethod<void>("setOutputFile",
"(Ljava/lang/String;)V",
QJNIObjectPrivate::fromString(path).object());
- if (env->ExceptionCheck())
+ if (env->ExceptionCheck()) {
+#ifdef QT_DEBUG
+ env->ExceptionDescribe();
+#endif
env->ExceptionClear();
+ }
}
static JNINativeMethod methods[] = {
diff --git a/src/plugins/avfoundation/camera/avfaudioinputselectorcontrol.mm b/src/plugins/avfoundation/camera/avfaudioinputselectorcontrol.mm
index 6c6df5164..4a2f068f5 100644
--- a/src/plugins/avfoundation/camera/avfaudioinputselectorcontrol.mm
+++ b/src/plugins/avfoundation/camera/avfaudioinputselectorcontrol.mm
@@ -60,7 +60,8 @@ AVFAudioInputSelectorControl::AVFAudioInputSelectorControl(AVFCameraService *ser
QString::fromUtf8([[device localizedName] UTF8String]));
}
- m_activeInput = m_devices.first();
+ if (m_devices.size() > 0)
+ m_activeInput = m_devices.first();
}
AVFAudioInputSelectorControl::~AVFAudioInputSelectorControl()
@@ -79,7 +80,7 @@ QString AVFAudioInputSelectorControl::inputDescription(const QString &name) cons
QString AVFAudioInputSelectorControl::defaultInput() const
{
- return m_devices.first();
+ return m_devices.size() > 0 ? m_devices.first() : QString();
}
QString AVFAudioInputSelectorControl::activeInput() const
diff --git a/src/plugins/avfoundation/mediaplayer/avfmediaplayerservice.mm b/src/plugins/avfoundation/mediaplayer/avfmediaplayerservice.mm
index 398f00e2f..e5549803f 100644
--- a/src/plugins/avfoundation/mediaplayer/avfmediaplayerservice.mm
+++ b/src/plugins/avfoundation/mediaplayer/avfmediaplayerservice.mm
@@ -50,6 +50,7 @@
#ifndef QT_NO_WIDGETS
# include "avfvideowidgetcontrol.h"
#endif
+#include "avfvideowindowcontrol.h"
QT_USE_NAMESPACE
@@ -102,6 +103,13 @@ QMediaControl *AVFMediaPlayerService::requestControl(const char *name)
return m_videoOutput;
}
#endif
+ if (qstrcmp(name, QVideoWindowControl_iid) == 0) {
+ if (!m_videoOutput)
+ m_videoOutput = new AVFVideoWindowControl(this);
+
+ m_session->setVideoOutput(qobject_cast<AVFVideoOutput*>(m_videoOutput));
+ return m_videoOutput;
+ }
return 0;
}
diff --git a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
index 9e0ac4f42..a73974ccd 100644
--- a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
+++ b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
@@ -404,6 +404,8 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
#ifdef QT_DEBUG_AVF
qDebug() << Q_FUNC_INFO;
#endif
+ [self unloadMedia];
+
if (m_player) {
[m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_KEY];
[m_player removeObserver:self forKeyPath:AVF_RATE_KEY];
@@ -416,8 +418,6 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
m_playerLayer = 0;
}
- [self unloadMedia];
-
if (m_URL) {
[m_URL release];
}
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideowindowcontrol.h b/src/plugins/avfoundation/mediaplayer/avfvideowindowcontrol.h
new file mode 100644
index 000000000..9ea87058d
--- /dev/null
+++ b/src/plugins/avfoundation/mediaplayer/avfvideowindowcontrol.h
@@ -0,0 +1,120 @@
+/****************************************************************************
+**
+** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/legal
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and Digia. For licensing terms and
+** conditions see http://qt.digia.com/licensing. For further information
+** use the contact form at http://qt.digia.com/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 2.1 as published by the Free Software
+** Foundation and appearing in the file LICENSE.LGPL included in the
+** packaging of this file. Please review the following information to
+** ensure the GNU Lesser General Public License version 2.1 requirements
+** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Digia gives you certain additional
+** rights. These rights are described in the Digia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU
+** General Public License version 3.0 as published by the Free Software
+** Foundation and appearing in the file LICENSE.GPL included in the
+** packaging of this file. Please review the following information to
+** ensure the GNU General Public License version 3.0 requirements will be
+** met: http://www.gnu.org/copyleft/gpl.html.
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#ifndef AVFVIDEOWINDOWCONTROL_H
+#define AVFVIDEOWINDOWCONTROL_H
+
+#include <QVideoWindowControl>
+
+@class AVPlayerLayer;
+#if defined(Q_OS_OSX)
+@class NSView;
+typedef NSView NativeView;
+#else
+@class UIView;
+typedef UIView NativeView;
+#endif
+
+#include "avfvideooutput.h"
+
+QT_BEGIN_NAMESPACE
+
+class AVFVideoWindowControl : public QVideoWindowControl, public AVFVideoOutput
+{
+ Q_OBJECT
+ Q_INTERFACES(AVFVideoOutput)
+
+public:
+ AVFVideoWindowControl(QObject *parent = 0);
+ virtual ~AVFVideoWindowControl();
+
+ // QVideoWindowControl interface
+public:
+ WId winId() const;
+ void setWinId(WId id);
+
+ QRect displayRect() const;
+ void setDisplayRect(const QRect &rect);
+
+ bool isFullScreen() const;
+ void setFullScreen(bool fullScreen);
+
+ void repaint();
+ QSize nativeSize() const;
+
+ Qt::AspectRatioMode aspectRatioMode() const;
+ void setAspectRatioMode(Qt::AspectRatioMode mode);
+
+ int brightness() const;
+ void setBrightness(int brightness);
+
+ int contrast() const;
+ void setContrast(int contrast);
+
+ int hue() const;
+ void setHue(int hue);
+
+ int saturation() const;
+ void setSaturation(int saturation);
+
+ // AVFVideoOutput interface
+ void setLayer(void *playerLayer);
+
+private:
+ void updateAspectRatio();
+ void updatePlayerLayerBounds();
+
+ WId m_winId;
+ QRect m_displayRect;
+ bool m_fullscreen;
+ int m_brightness;
+ int m_contrast;
+ int m_hue;
+ int m_saturation;
+ Qt::AspectRatioMode m_aspectRatioMode;
+ QSize m_nativeSize;
+ AVPlayerLayer *m_playerLayer;
+ NativeView *m_nativeView;
+};
+
+QT_END_NAMESPACE
+
+#endif // AVFVIDEOWINDOWCONTROL_H
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideowindowcontrol.mm b/src/plugins/avfoundation/mediaplayer/avfvideowindowcontrol.mm
new file mode 100644
index 000000000..17fc94de7
--- /dev/null
+++ b/src/plugins/avfoundation/mediaplayer/avfvideowindowcontrol.mm
@@ -0,0 +1,246 @@
+/****************************************************************************
+**
+** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/legal
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and Digia. For licensing terms and
+** conditions see http://qt.digia.com/licensing. For further information
+** use the contact form at http://qt.digia.com/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 2.1 as published by the Free Software
+** Foundation and appearing in the file LICENSE.LGPL included in the
+** packaging of this file. Please review the following information to
+** ensure the GNU Lesser General Public License version 2.1 requirements
+** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Digia gives you certain additional
+** rights. These rights are described in the Digia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU
+** General Public License version 3.0 as published by the Free Software
+** Foundation and appearing in the file LICENSE.GPL included in the
+** packaging of this file. Please review the following information to
+** ensure the GNU General Public License version 3.0 requirements will be
+** met: http://www.gnu.org/copyleft/gpl.html.
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "avfvideowindowcontrol.h"
+
+#include <AVFoundation/AVFoundation.h>
+
+QT_USE_NAMESPACE
+
+AVFVideoWindowControl::AVFVideoWindowControl(QObject *parent)
+ : QVideoWindowControl(parent)
+ , m_winId(0)
+ , m_fullscreen(false)
+ , m_brightness(0)
+ , m_contrast(0)
+ , m_hue(0)
+ , m_saturation(0)
+ , m_aspectRatioMode(Qt::IgnoreAspectRatio)
+ , m_playerLayer(0)
+ , m_nativeView(0)
+{
+}
+
+AVFVideoWindowControl::~AVFVideoWindowControl()
+{
+ if (m_playerLayer)
+ [m_playerLayer release];
+}
+
+WId AVFVideoWindowControl::winId() const
+{
+ return m_winId;
+}
+
+void AVFVideoWindowControl::setWinId(WId id)
+{
+ m_winId = id;
+ m_nativeView = (NativeView*)m_winId;
+}
+
+QRect AVFVideoWindowControl::displayRect() const
+{
+ return m_displayRect;
+}
+
+void AVFVideoWindowControl::setDisplayRect(const QRect &rect)
+{
+ if (m_displayRect != rect) {
+ m_displayRect = rect;
+ updatePlayerLayerBounds();
+ }
+}
+
+bool AVFVideoWindowControl::isFullScreen() const
+{
+ return m_fullscreen;
+}
+
+void AVFVideoWindowControl::setFullScreen(bool fullScreen)
+{
+ if (m_fullscreen != fullScreen) {
+ m_fullscreen = fullScreen;
+ Q_EMIT QVideoWindowControl::fullScreenChanged(fullScreen);
+ }
+}
+
+void AVFVideoWindowControl::repaint()
+{
+ if (m_playerLayer)
+ [m_playerLayer setNeedsDisplay];
+}
+
+QSize AVFVideoWindowControl::nativeSize() const
+{
+ return m_nativeSize;
+}
+
+Qt::AspectRatioMode AVFVideoWindowControl::aspectRatioMode() const
+{
+ return m_aspectRatioMode;
+}
+
+void AVFVideoWindowControl::setAspectRatioMode(Qt::AspectRatioMode mode)
+{
+ if (m_aspectRatioMode != mode) {
+ m_aspectRatioMode = mode;
+ updateAspectRatio();
+ }
+}
+
+int AVFVideoWindowControl::brightness() const
+{
+ return m_brightness;
+}
+
+void AVFVideoWindowControl::setBrightness(int brightness)
+{
+ if (m_brightness != brightness) {
+ m_brightness = brightness;
+ Q_EMIT QVideoWindowControl::brightnessChanged(brightness);
+ }
+}
+
+int AVFVideoWindowControl::contrast() const
+{
+ return m_contrast;
+}
+
+void AVFVideoWindowControl::setContrast(int contrast)
+{
+ if (m_contrast != contrast) {
+ m_contrast = contrast;
+ Q_EMIT QVideoWindowControl::contrastChanged(contrast);
+ }
+}
+
+int AVFVideoWindowControl::hue() const
+{
+ return m_hue;
+}
+
+void AVFVideoWindowControl::setHue(int hue)
+{
+ if (m_hue != hue) {
+ m_hue = hue;
+ Q_EMIT QVideoWindowControl::hueChanged(hue);
+ }
+}
+
+int AVFVideoWindowControl::saturation() const
+{
+ return m_saturation;
+}
+
+void AVFVideoWindowControl::setSaturation(int saturation)
+{
+ if (m_saturation != saturation) {
+ m_saturation = saturation;
+ Q_EMIT QVideoWindowControl::saturationChanged(saturation);
+ }
+}
+
+void AVFVideoWindowControl::setLayer(void *playerLayer)
+{
+ AVPlayerLayer *layer = (AVPlayerLayer*)playerLayer;
+ if (m_playerLayer == layer)
+ return;
+
+ if (!m_winId) {
+ qDebug("AVFVideoWindowControl: No video window");
+ return;
+ }
+
+#if defined(Q_OS_OSX)
+ [m_nativeView setWantsLayer:YES];
+#endif
+
+ if (m_playerLayer) {
+ [m_playerLayer removeFromSuperlayer];
+ [m_playerLayer release];
+ }
+
+ m_playerLayer = layer;
+
+ CALayer *nativeLayer = [m_nativeView layer];
+
+ if (layer) {
+ [layer retain];
+
+ m_nativeSize = QSize(m_playerLayer.bounds.size.width,
+ m_playerLayer.bounds.size.height);
+
+ updateAspectRatio();
+ [nativeLayer addSublayer:m_playerLayer];
+ updatePlayerLayerBounds();
+ }
+}
+
+void AVFVideoWindowControl::updateAspectRatio()
+{
+ if (m_playerLayer) {
+ switch (m_aspectRatioMode) {
+ case Qt::IgnoreAspectRatio:
+ [m_playerLayer setVideoGravity:AVLayerVideoGravityResize];
+ break;
+ case Qt::KeepAspectRatio:
+ [m_playerLayer setVideoGravity:AVLayerVideoGravityResizeAspect];
+ break;
+ case Qt::KeepAspectRatioByExpanding:
+ [m_playerLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
+ break;
+ default:
+ break;
+ }
+ }
+}
+
+void AVFVideoWindowControl::updatePlayerLayerBounds()
+{
+ if (m_playerLayer) {
+ CGRect newBounds = CGRectMake(0, 0,
+ m_displayRect.width(), m_displayRect.height());
+ m_playerLayer.bounds = newBounds;
+ m_playerLayer.position = CGPointMake(m_displayRect.x(), m_displayRect.y());
+ }
+}
+
+#include "moc_avfvideowindowcontrol.cpp"
diff --git a/src/plugins/avfoundation/mediaplayer/mediaplayer.pro b/src/plugins/avfoundation/mediaplayer/mediaplayer.pro
index e5bccd150..b5193b73d 100644
--- a/src/plugins/avfoundation/mediaplayer/mediaplayer.pro
+++ b/src/plugins/avfoundation/mediaplayer/mediaplayer.pro
@@ -21,7 +21,8 @@ HEADERS += \
avfmediaplayerservice.h \
avfmediaplayersession.h \
avfmediaplayerserviceplugin.h \
- avfvideooutput.h
+ avfvideooutput.h \
+ avfvideowindowcontrol.h
OBJECTIVE_SOURCES += \
avfmediaplayercontrol.mm \
@@ -29,7 +30,8 @@ OBJECTIVE_SOURCES += \
avfmediaplayerservice.mm \
avfmediaplayerserviceplugin.mm \
avfmediaplayersession.mm \
- avfvideooutput.mm
+ avfvideooutput.mm \
+ avfvideowindowcontrol.mm
qtHaveModule(widgets) {
QT += multimediawidgets-private
diff --git a/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp b/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp
index d34c10e10..3085d1391 100644
--- a/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp
+++ b/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp
@@ -147,6 +147,7 @@ void QGstreamerAudioDecoderServicePlugin::updateSupportedMimeTypes() const
}
}
}
+ gst_caps_unref(caps);
}
}
gst_object_unref (factory);
diff --git a/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.cpp b/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.cpp
index 5bcf1aa54..9f1a765ec 100644
--- a/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.cpp
+++ b/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.cpp
@@ -158,6 +158,8 @@ void QGstreamerAudioDecoderSession::configureAppSrcElement(GObject* object, GObj
if (!self->appsrc()->setup(appsrc))
qWarning()<<"Could not setup appsrc element";
+
+ g_object_unref(G_OBJECT(appsrc));
}
#endif
@@ -372,7 +374,8 @@ void QGstreamerAudioDecoderSession::start()
if (mFormat.isValid()) {
setAudioFlags(false);
GstCaps *caps = QGstUtils::capsForAudioFormat(mFormat);
- gst_app_sink_set_caps(m_appSink, caps); // appsink unrefs caps
+ gst_app_sink_set_caps(m_appSink, caps);
+ gst_caps_unref(caps);
} else {
// We want whatever the native audio format is
setAudioFlags(true);
diff --git a/src/plugins/gstreamer/camerabin/camerabinaudioencoder.cpp b/src/plugins/gstreamer/camerabin/camerabinaudioencoder.cpp
index 0fa854cc5..332891a47 100644
--- a/src/plugins/gstreamer/camerabin/camerabinaudioencoder.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinaudioencoder.cpp
@@ -114,11 +114,15 @@ GstEncodingProfile *CameraBinAudioEncoder::createProfile()
else
caps = gst_caps_from_string(codec.toLatin1());
- return (GstEncodingProfile *)gst_encoding_audio_profile_new(
- caps,
- !preset.isEmpty() ? preset.toLatin1().constData() : NULL, //preset
- NULL, //restriction
- 0); //presence
+ GstEncodingProfile *profile = (GstEncodingProfile *)gst_encoding_audio_profile_new(
+ caps,
+ !preset.isEmpty() ? preset.toLatin1().constData() : NULL, //preset
+ NULL, //restriction
+ 0); //presence
+
+ gst_caps_unref(caps);
+
+ return profile;
}
QT_END_NAMESPACE
diff --git a/src/plugins/gstreamer/camerabin/camerabincontainer.cpp b/src/plugins/gstreamer/camerabin/camerabincontainer.cpp
index 4c2a3bda9..44eb36818 100644
--- a/src/plugins/gstreamer/camerabin/camerabincontainer.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabincontainer.cpp
@@ -124,11 +124,15 @@ GstEncodingContainerProfile *CameraBinContainer::createProfile()
caps = gst_caps_from_string(format.toLatin1());
}
- return (GstEncodingContainerProfile *)gst_encoding_container_profile_new(
- "camerabin2_profile",
- (gchar *)"custom camera profile",
- caps,
- NULL); //preset
+ GstEncodingContainerProfile *profile = (GstEncodingContainerProfile *)gst_encoding_container_profile_new(
+ "camerabin2_profile",
+ (gchar *)"custom camera profile",
+ caps,
+ NULL); //preset
+
+ gst_caps_unref(caps);
+
+ return profile;
}
/*!
diff --git a/src/plugins/gstreamer/camerabin/camerabinrecorder.cpp b/src/plugins/gstreamer/camerabin/camerabinrecorder.cpp
index 4ac0d942e..7cef82a69 100644
--- a/src/plugins/gstreamer/camerabin/camerabinrecorder.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinrecorder.cpp
@@ -191,10 +191,14 @@ GstEncodingContainerProfile *CameraBinRecorder::videoProfile()
GstEncodingProfile *audioProfile = m_session->audioEncodeControl()->createProfile();
GstEncodingProfile *videoProfile = m_session->videoEncodeControl()->createProfile();
- if (audioProfile)
- gst_encoding_container_profile_add_profile(containerProfile, audioProfile);
- if (videoProfile)
- gst_encoding_container_profile_add_profile(containerProfile, videoProfile);
+ if (audioProfile) {
+ if (!gst_encoding_container_profile_add_profile(containerProfile, audioProfile))
+ gst_encoding_profile_unref(audioProfile);
+ }
+ if (videoProfile) {
+ if (!gst_encoding_container_profile_add_profile(containerProfile, videoProfile))
+ gst_encoding_profile_unref(videoProfile);
+ }
}
return containerProfile;
diff --git a/src/plugins/gstreamer/camerabin/camerabinsession.cpp b/src/plugins/gstreamer/camerabin/camerabinsession.cpp
index 8ca6bfd83..6e3448ffe 100644
--- a/src/plugins/gstreamer/camerabin/camerabinsession.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinsession.cpp
@@ -61,6 +61,7 @@
#include "camerabincapturebufferformat.h"
#include <private/qgstreamerbushelper_p.h>
#include <private/qgstreamervideorendererinterface_p.h>
+#include <private/qgstutils_p.h>
#include <qmediarecorder.h>
#ifdef HAVE_GST_PHOTOGRAPHY
@@ -108,9 +109,6 @@
#define CAMERABIN_IMAGE_MODE 1
#define CAMERABIN_VIDEO_MODE 2
-#define gstRef(element) { gst_object_ref(GST_OBJECT(element)); gst_object_sink(GST_OBJECT(element)); }
-#define gstUnref(element) { if (element) { gst_object_unref(GST_OBJECT(element)); element = 0; } }
-
#define PREVIEW_CAPS_4_3 \
"video/x-raw-rgb, width = (int) 640, height = (int) 480"
@@ -146,7 +144,7 @@ CameraBinSession::CameraBinSession(QObject *parent)
{
m_camerabin = gst_element_factory_make("camerabin2", "camerabin2");
g_signal_connect(G_OBJECT(m_camerabin), "notify::idle", G_CALLBACK(updateBusyStatus), this);
- gstRef(m_camerabin);
+ qt_gst_object_ref_sink(m_camerabin);
m_bus = gst_element_get_bus(m_camerabin);
@@ -192,9 +190,11 @@ CameraBinSession::~CameraBinSession()
gst_element_set_state(m_camerabin, GST_STATE_NULL);
gst_element_get_state(m_camerabin, NULL, NULL, GST_CLOCK_TIME_NONE);
- gstUnref(m_camerabin);
- gstUnref(m_viewfinderElement);
+ gst_object_unref(GST_OBJECT(m_bus));
+ gst_object_unref(GST_OBJECT(m_camerabin));
}
+ if (m_viewfinderElement)
+ gst_object_unref(GST_OBJECT(m_viewfinderElement));
}
#ifdef HAVE_GST_PHOTOGRAPHY
@@ -239,7 +239,7 @@ bool CameraBinSession::setupCameraBin()
qWarning() << "Staring camera without viewfinder available";
m_viewfinderElement = gst_element_factory_make("fakesink", NULL);
}
- gst_object_ref(GST_OBJECT(m_viewfinderElement));
+ qt_gst_object_ref_sink(GST_OBJECT(m_viewfinderElement));
gst_element_set_state(m_camerabin, GST_STATE_NULL);
g_object_set(G_OBJECT(m_camerabin), VIEWFINDER_SINK_PROPERTY, m_viewfinderElement, NULL);
}
@@ -438,6 +438,9 @@ GstElement *CameraBinSession::buildCameraSource()
if (m_videoSrc != videoSrc)
g_object_set(G_OBJECT(m_camerabin), CAMERA_SOURCE_PROPERTY, m_videoSrc, NULL);
+ if (videoSrc)
+ gst_object_unref(GST_OBJECT(videoSrc));
+
return m_videoSrc;
}
@@ -680,10 +683,12 @@ void CameraBinSession::setState(QCamera::State newState)
m_recorderControl->applySettings();
+ GstEncodingContainerProfile *profile = m_recorderControl->videoProfile();
g_object_set (G_OBJECT(m_camerabin),
"video-profile",
- m_recorderControl->videoProfile(),
+ profile,
NULL);
+ gst_encoding_profile_unref(profile);
setAudioCaptureCaps();
@@ -803,6 +808,7 @@ void CameraBinSession::setMetaData(const QMap<QByteArray, QVariant> &data)
}
}
}
+ gst_iterator_free(elements);
}
}
diff --git a/src/plugins/gstreamer/camerabin/camerabinvideoencoder.cpp b/src/plugins/gstreamer/camerabin/camerabinvideoencoder.cpp
index cb479d8df..146e150ec 100644
--- a/src/plugins/gstreamer/camerabin/camerabinvideoencoder.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinvideoencoder.cpp
@@ -175,6 +175,8 @@ GstEncodingProfile *CameraBinVideoEncoder::createProfile()
NULL, //restriction
1); //presence
+ gst_caps_unref(caps);
+
gst_encoding_video_profile_set_pass(profile, 0);
gst_encoding_video_profile_set_variableframerate(profile, TRUE);
diff --git a/src/plugins/gstreamer/mediacapture/qgstreameraudioencode.cpp b/src/plugins/gstreamer/mediacapture/qgstreameraudioencode.cpp
index 88c86b3b5..e735566dd 100644
--- a/src/plugins/gstreamer/mediacapture/qgstreameraudioencode.cpp
+++ b/src/plugins/gstreamer/mediacapture/qgstreameraudioencode.cpp
@@ -196,6 +196,8 @@ GstElement *QGstreamerAudioEncode::createEncoder()
//qDebug() << "set caps filter:" << gst_caps_to_string(caps);
g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL);
+
+ gst_caps_unref(caps);
}
if (encoderElement) {
diff --git a/src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp b/src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp
index de07d2707..657b9806f 100644
--- a/src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp
+++ b/src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp
@@ -261,6 +261,7 @@ void QGstreamerCaptureServicePlugin::updateSupportedMimeTypes() const
}
}
}
+ gst_caps_unref(caps);
}
}
gst_object_unref (factory);
diff --git a/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp b/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp
index d7473327f..518a66bc0 100644
--- a/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp
+++ b/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp
@@ -49,6 +49,7 @@
#include <private/qgstreamervideorendererinterface_p.h>
#include <private/qgstreameraudioprobecontrol_p.h>
#include <private/qgstreamerbushelper_p.h>
+#include <private/qgstutils_p.h>
#include <gst/gsttagsetter.h>
#include <gst/gstversion.h>
@@ -64,9 +65,6 @@
QT_BEGIN_NAMESPACE
-#define gstRef(element) { gst_object_ref(GST_OBJECT(element)); gst_object_sink(GST_OBJECT(element)); }
-#define gstUnref(element) { if (element) { gst_object_unref(GST_OBJECT(element)); element = 0; } }
-
QGstreamerCaptureSession::QGstreamerCaptureSession(QGstreamerCaptureSession::CaptureMode captureMode, QObject *parent)
:QObject(parent),
m_state(StoppedState),
@@ -97,7 +95,7 @@ QGstreamerCaptureSession::QGstreamerCaptureSession(QGstreamerCaptureSession::Cap
m_passPrerollImage(false)
{
m_pipeline = gst_pipeline_new("media-capture-pipeline");
- gstRef(m_pipeline);
+ qt_gst_object_ref_sink(m_pipeline);
m_bus = gst_element_get_bus(m_pipeline);
m_busHelper = new QGstreamerBusHelper(m_bus, this);
@@ -116,6 +114,7 @@ QGstreamerCaptureSession::~QGstreamerCaptureSession()
{
setState(StoppedState);
gst_element_set_state(m_pipeline, GST_STATE_NULL);
+ gst_object_unref(GST_OBJECT(m_bus));
gst_object_unref(GST_OBJECT(m_pipeline));
}
@@ -160,6 +159,7 @@ GstElement *QGstreamerCaptureSession::buildEncodeBin()
gst_bin_add(GST_BIN(encodeBin), audioEncoder);
if (!gst_element_link_many(audioConvert, audioQueue, m_audioVolume, audioEncoder, muxer, NULL)) {
+ m_audioVolume = 0;
gst_object_unref(encodeBin);
return 0;
}
@@ -333,6 +333,7 @@ GstElement *QGstreamerCaptureSession::buildVideoPreview()
g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL);
+ gst_caps_unref(caps);
}
// add ghostpads
@@ -501,6 +502,7 @@ GstElement *QGstreamerCaptureSession::buildImageCapture()
GstPad *pad = gst_element_get_static_pad(queue, "src");
Q_ASSERT(pad);
gst_pad_add_buffer_probe(pad, G_CALLBACK(passImageFilter), this);
+ gst_object_unref(GST_OBJECT(pad));
g_object_set(G_OBJECT(sink), "signal-handoffs", TRUE, NULL);
g_signal_connect(G_OBJECT(sink), "handoff",
@@ -531,6 +533,7 @@ void QGstreamerCaptureSession::captureImage(int requestId, const QString &fileNa
#define REMOVE_ELEMENT(element) { if (element) {gst_bin_remove(GST_BIN(m_pipeline), element); element = 0;} }
+#define UNREF_ELEMENT(element) { if (element) { gst_object_unref(GST_OBJECT(element)); element = 0; } }
bool QGstreamerCaptureSession::rebuildGraph(QGstreamerCaptureSession::PipelineMode newMode)
{
@@ -562,6 +565,9 @@ bool QGstreamerCaptureSession::rebuildGraph(QGstreamerCaptureSession::PipelineMo
if (ok) {
gst_bin_add_many(GST_BIN(m_pipeline), m_audioSrc, m_audioPreview, NULL);
ok &= gst_element_link(m_audioSrc, m_audioPreview);
+ } else {
+ UNREF_ELEMENT(m_audioSrc);
+ UNREF_ELEMENT(m_audioPreview);
}
}
if (m_captureMode & Video || m_captureMode & Image) {
@@ -582,6 +588,12 @@ bool QGstreamerCaptureSession::rebuildGraph(QGstreamerCaptureSession::PipelineMo
ok &= gst_element_link(m_videoTee, m_videoPreviewQueue);
ok &= gst_element_link(m_videoPreviewQueue, m_videoPreview);
ok &= gst_element_link(m_videoTee, m_imageCaptureBin);
+ } else {
+ UNREF_ELEMENT(m_videoSrc);
+ UNREF_ELEMENT(m_videoTee);
+ UNREF_ELEMENT(m_videoPreviewQueue);
+ UNREF_ELEMENT(m_videoPreview);
+ UNREF_ELEMENT(m_imageCaptureBin);
}
}
break;
@@ -631,6 +643,11 @@ bool QGstreamerCaptureSession::rebuildGraph(QGstreamerCaptureSession::PipelineMo
ok &= gst_element_link(m_audioTee, m_audioPreviewQueue);
ok &= gst_element_link(m_audioPreviewQueue, m_audioPreview);
ok &= gst_element_link(m_audioTee, m_encodeBin);
+ } else {
+ UNREF_ELEMENT(m_audioSrc);
+ UNREF_ELEMENT(m_audioPreview);
+ UNREF_ELEMENT(m_audioTee);
+ UNREF_ELEMENT(m_audioPreviewQueue);
}
}
@@ -648,6 +665,11 @@ bool QGstreamerCaptureSession::rebuildGraph(QGstreamerCaptureSession::PipelineMo
ok &= gst_element_link(m_videoSrc, m_videoTee);
ok &= gst_element_link(m_videoTee, m_videoPreviewQueue);
ok &= gst_element_link(m_videoPreviewQueue, m_videoPreview);
+ } else {
+ UNREF_ELEMENT(m_videoSrc);
+ UNREF_ELEMENT(m_videoTee);
+ UNREF_ELEMENT(m_videoPreviewQueue);
+ UNREF_ELEMENT(m_videoPreview);
}
if (ok && (m_captureMode & Video))
@@ -917,6 +939,7 @@ void QGstreamerCaptureSession::setMetaData(const QMap<QByteArray, QVariant> &dat
}
}
+ gst_iterator_free(elements);
}
}
@@ -1096,8 +1119,10 @@ void QGstreamerCaptureSession::removeAudioBufferProbe()
return;
GstPad *pad = getAudioProbePad();
- if (pad)
+ if (pad) {
gst_pad_remove_buffer_probe(pad, m_audioBufferProbeId);
+ gst_object_unref(G_OBJECT(pad));
+ }
m_audioBufferProbeId = -1;
}
@@ -1107,8 +1132,10 @@ void QGstreamerCaptureSession::addAudioBufferProbe()
Q_ASSERT(m_audioBufferProbeId == -1);
GstPad *pad = getAudioProbePad();
- if (pad)
+ if (pad) {
m_audioBufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padAudioBufferProbe), this);
+ gst_object_unref(G_OBJECT(pad));
+ }
}
QT_END_NAMESPACE
diff --git a/src/plugins/gstreamer/mediacapture/qgstreamervideoencode.cpp b/src/plugins/gstreamer/mediacapture/qgstreamervideoencode.cpp
index 73112491f..c29735037 100644
--- a/src/plugins/gstreamer/mediacapture/qgstreamervideoencode.cpp
+++ b/src/plugins/gstreamer/mediacapture/qgstreamervideoencode.cpp
@@ -286,6 +286,8 @@ GstElement *QGstreamerVideoEncode::createEncoder()
//qDebug() << "set video caps filter:" << gst_caps_to_string(caps);
g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL);
+
+ gst_caps_unref(caps);
}
return GST_ELEMENT(encoderBin);
diff --git a/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.cpp b/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.cpp
index da6120181..f4ac59420 100644
--- a/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.cpp
+++ b/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.cpp
@@ -48,6 +48,7 @@
#include <private/gstvideoconnector_p.h>
#include <private/qgstutils_p.h>
#include <private/playlistfileparser_p.h>
+#include <private/qgstutils_p.h>
#include <gst/gstvalue.h>
#include <gst/base/gstbasesrc.h>
@@ -157,17 +158,20 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
}
}
- m_videoOutputBin = gst_bin_new("video-output-bin");
- gst_object_ref(GST_OBJECT(m_videoOutputBin));
-
- m_videoIdentity = GST_ELEMENT(g_object_new(gst_video_connector_get_type(), 0));
+ m_videoIdentity = GST_ELEMENT(g_object_new(gst_video_connector_get_type(), 0)); // floating ref
g_signal_connect(G_OBJECT(m_videoIdentity), "connection-failed", G_CALLBACK(insertColorSpaceElement), (gpointer)this);
+
m_colorSpace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-vo");
- gst_object_ref(GST_OBJECT(m_colorSpace));
+ // might not get a parent, take ownership to avoid leak
+ qt_gst_object_ref_sink(GST_OBJECT(m_colorSpace));
m_nullVideoSink = gst_element_factory_make("fakesink", NULL);
g_object_set(G_OBJECT(m_nullVideoSink), "sync", true, NULL);
gst_object_ref(GST_OBJECT(m_nullVideoSink));
+
+ m_videoOutputBin = gst_bin_new("video-output-bin");
+ // might not get a parent, take ownership to avoid leak
+ qt_gst_object_ref_sink(GST_OBJECT(m_videoOutputBin));
gst_bin_add_many(GST_BIN(m_videoOutputBin), m_videoIdentity, m_nullVideoSink, NULL);
gst_element_link(m_videoIdentity, m_nullVideoSink);
@@ -238,6 +242,8 @@ void QGstreamerPlayerSession::configureAppSrcElement(GObject* object, GObject *o
if (!self->appsrc()->setup(appsrc))
qWarning()<<"Could not setup appsrc element";
+
+ g_object_unref(G_OBJECT(appsrc));
}
#endif
@@ -327,7 +333,7 @@ void QGstreamerPlayerSession::setPlaybackRate(qreal rate)
#endif
if (!qFuzzyCompare(m_playbackRate, rate)) {
m_playbackRate = rate;
- if (m_playbin) {
+ if (m_playbin && m_seekable) {
gst_element_seek(m_playbin, rate, GST_FORMAT_TIME,
GstSeekFlags(GST_SEEK_FLAG_FLUSH),
GST_SEEK_TYPE_NONE,0,
@@ -877,7 +883,7 @@ bool QGstreamerPlayerSession::seek(qint64 ms)
qDebug() << Q_FUNC_INFO << ms;
#endif
//seek locks when the video output sink is changing and pad is blocked
- if (m_playbin && !m_pendingVideoSink && m_state != QMediaPlayer::StoppedState) {
+ if (m_playbin && !m_pendingVideoSink && m_state != QMediaPlayer::StoppedState && m_seekable) {
ms = qMax(ms,qint64(0));
gint64 position = ms * 1000000;
bool isSeeking = gst_element_seek(m_playbin,
diff --git a/src/plugins/opensles/qopenslesengine.cpp b/src/plugins/opensles/qopenslesengine.cpp
index 056b51e26..68c324f67 100644
--- a/src/plugins/opensles/qopenslesengine.cpp
+++ b/src/plugins/opensles/qopenslesengine.cpp
@@ -55,6 +55,7 @@ Q_GLOBAL_STATIC(QOpenSLESEngine, openslesEngine);
QOpenSLESEngine::QOpenSLESEngine()
: m_engineObject(0)
, m_engine(0)
+ , m_checkedInputFormats(false)
{
SLresult result;
@@ -66,8 +67,6 @@ QOpenSLESEngine::QOpenSLESEngine()
result = (*m_engineObject)->GetInterface(m_engineObject, SL_IID_ENGINE, &m_engine);
CheckError("Failed to get engine interface");
-
- checkSupportedInputFormats();
}
QOpenSLESEngine::~QOpenSLESEngine()
@@ -118,15 +117,20 @@ QList<QByteArray> QOpenSLESEngine::availableDevices(QAudio::Mode mode) const
QList<int> QOpenSLESEngine::supportedChannelCounts(QAudio::Mode mode) const
{
- if (mode == QAudio::AudioInput)
+ if (mode == QAudio::AudioInput) {
+ if (!m_checkedInputFormats)
+ const_cast<QOpenSLESEngine *>(this)->checkSupportedInputFormats();
return m_supportedInputChannelCounts;
- else
+ } else {
return QList<int>() << 1 << 2;
+ }
}
QList<int> QOpenSLESEngine::supportedSampleRates(QAudio::Mode mode) const
{
if (mode == QAudio::AudioInput) {
+ if (!m_checkedInputFormats)
+ const_cast<QOpenSLESEngine *>(this)->checkSupportedInputFormats();
return m_supportedInputSampleRates;
} else {
return QList<int>() << 8000 << 11025 << 12000 << 16000 << 22050
@@ -177,6 +181,8 @@ void QOpenSLESEngine::checkSupportedInputFormats()
if (inputFormatIsSupported(format))
m_supportedInputChannelCounts.append(2);
}
+
+ m_checkedInputFormats = true;
}
bool QOpenSLESEngine::inputFormatIsSupported(SLDataFormat_PCM format)
diff --git a/src/plugins/opensles/qopenslesengine.h b/src/plugins/opensles/qopenslesengine.h
index 9f12ac65d..81d1f665b 100644
--- a/src/plugins/opensles/qopenslesengine.h
+++ b/src/plugins/opensles/qopenslesengine.h
@@ -75,6 +75,7 @@ private:
QList<int> m_supportedInputChannelCounts;
QList<int> m_supportedInputSampleRates;
+ bool m_checkedInputFormats;
};
QT_END_NAMESPACE
diff --git a/src/plugins/qnx/mediaplayer/bpsmediaplayercontrol.cpp b/src/plugins/qnx/mediaplayer/bpsmediaplayercontrol.cpp
index dde03ad59..2774a4c88 100644
--- a/src/plugins/qnx/mediaplayer/bpsmediaplayercontrol.cpp
+++ b/src/plugins/qnx/mediaplayer/bpsmediaplayercontrol.cpp
@@ -103,11 +103,15 @@ bool BpsMediaPlayerControl::nativeEventFilter(const QByteArray &eventType, void
}
if (bps_event_get_code(event) == MMRENDERER_STATUS_UPDATE) {
- const qint64 newPosition = QString::fromLatin1(mmrenderer_event_get_position(event)).toLongLong();
+ const qint64 newPosition = QString::fromLatin1(mmrenderer_event_get_position(event)).
+ toLongLong();
handleMmStatusUpdate(newPosition);
- const QString bufferStatus = QString::fromLatin1(mmrenderer_event_get_bufferlevel(event));
- setMmBufferStatus(bufferStatus);
+ const QString status = QString::fromLatin1(mmrenderer_event_get_bufferstatus(event));
+ setMmBufferStatus(status);
+
+ const QString level = QString::fromLatin1(mmrenderer_event_get_bufferlevel(event));
+ setMmBufferLevel(level);
}
}
diff --git a/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp b/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp
index ba3cbfdff..004eca36d 100644
--- a/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp
+++ b/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp
@@ -73,7 +73,7 @@ MmRendererMediaPlayerControl::MmRendererMediaPlayerControl(QObject *parent)
m_playAfterMediaLoaded(false),
m_inputAttached(false),
m_stopEventsToIgnore(0),
- m_bufferStatus(0)
+ m_bufferLevel(0)
{
m_loadingTimer.setSingleShot(true);
m_loadingTimer.setInterval(0);
@@ -234,8 +234,11 @@ void MmRendererMediaPlayerControl::attach()
m_inputAttached = true;
setMediaStatus(QMediaPlayer::LoadedMedia);
- m_bufferStatus = 0;
- emit bufferStatusChanged(m_bufferStatus);
+
+ // mm-renderer has buffer properties "status" and "level"
+ // QMediaPlayer's buffer status maps to mm-renderer's buffer level
+ m_bufferLevel = 0;
+ emit bufferStatusChanged(m_bufferLevel);
}
void MmRendererMediaPlayerControl::detach()
@@ -406,7 +409,9 @@ void MmRendererMediaPlayerControl::setMuted(bool muted)
int MmRendererMediaPlayerControl::bufferStatus() const
{
- return m_bufferStatus;
+ // mm-renderer has buffer properties "status" and "level"
+ // QMediaPlayer's buffer status maps to mm-renderer's buffer level
+ return m_bufferLevel;
}
bool MmRendererMediaPlayerControl::isAudioAvailable() const
@@ -585,13 +590,23 @@ void MmRendererMediaPlayerControl::setMmPosition(qint64 newPosition)
void MmRendererMediaPlayerControl::setMmBufferStatus(const QString &bufferStatus)
{
- const int slashPos = bufferStatus.indexOf('/');
+ if (bufferStatus == QLatin1String("buffering"))
+ setMediaStatus(QMediaPlayer::BufferingMedia);
+ else if (bufferStatus == QLatin1String("playing"))
+ setMediaStatus(QMediaPlayer::BufferedMedia);
+ // ignore "idle" buffer status
+}
+
+void MmRendererMediaPlayerControl::setMmBufferLevel(const QString &bufferLevel)
+{
+ // buffer level has format level/capacity, e.g. "91319/124402"
+ const int slashPos = bufferLevel.indexOf('/');
if (slashPos != -1) {
- const int fill = bufferStatus.leftRef(slashPos).toInt();
- const int capacity = bufferStatus.midRef(slashPos + 1).toInt();
+ const int fill = bufferLevel.leftRef(slashPos).toInt();
+ const int capacity = bufferLevel.midRef(slashPos + 1).toInt();
if (capacity != 0) {
- m_bufferStatus = fill / static_cast<float>(capacity) * 100.0f;
- emit bufferStatusChanged(m_bufferStatus);
+ m_bufferLevel = fill / static_cast<float>(capacity) * 100.0f;
+ emit bufferStatusChanged(m_bufferLevel);
}
}
}
diff --git a/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.h b/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.h
index a22e71bfc..ffa8ae4fb 100644
--- a/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.h
+++ b/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.h
@@ -115,6 +115,7 @@ protected:
void emitPError(const QString &msg);
void setMmPosition(qint64 newPosition);
void setMmBufferStatus(const QString &bufferStatus);
+ void setMmBufferLevel(const QString &bufferLevel);
void handleMmStopped();
void handleMmStatusUpdate(qint64 position);
@@ -162,7 +163,7 @@ private:
bool m_playAfterMediaLoaded;
bool m_inputAttached;
int m_stopEventsToIgnore;
- int m_bufferStatus;
+ int m_bufferLevel;
QString m_tempMediaFileName;
QTimer m_loadingTimer;
};
diff --git a/src/plugins/qnx/mediaplayer/ppsmediaplayercontrol.cpp b/src/plugins/qnx/mediaplayer/ppsmediaplayercontrol.cpp
index b54c7963f..de209c49b 100644
--- a/src/plugins/qnx/mediaplayer/ppsmediaplayercontrol.cpp
+++ b/src/plugins/qnx/mediaplayer/ppsmediaplayercontrol.cpp
@@ -171,9 +171,12 @@ void PpsMediaPlayerControl::ppsReadyRead(int fd)
pps_decoder_push(&decoder, 0);
const char *value = 0;
- if (pps_decoder_get_string(&decoder, "bufferlevel", &value) == PPS_DECODER_OK) {
+
+ if (pps_decoder_get_string(&decoder, "bufferstatus", &value) == PPS_DECODER_OK)
setMmBufferStatus(QString::fromLatin1(value));
- }
+
+ if (pps_decoder_get_string(&decoder, "bufferlevel", &value) == PPS_DECODER_OK)
+ setMmBufferLevel(QString::fromLatin1(value));
if (pps_decoder_get_string(&decoder, "state", &value) == PPS_DECODER_OK) {
const QByteArray state = value;
diff --git a/src/plugins/wmf/player/mfvideorenderercontrol.cpp b/src/plugins/wmf/player/mfvideorenderercontrol.cpp
index e10cebb26..1705b2a48 100644
--- a/src/plugins/wmf/player/mfvideorenderercontrol.cpp
+++ b/src/plugins/wmf/player/mfvideorenderercontrol.cpp
@@ -257,6 +257,7 @@ namespace
, m_bufferStartTime(-1)
, m_bufferDuration(-1)
, m_presentationClock(0)
+ , m_sampleRequested(false)
, m_currentMediaType(0)
, m_prerolling(false)
, m_prerollTargetTime(0)
@@ -854,6 +855,15 @@ namespace
schedulePresentation(true);
}
+ void clearScheduledFrame()
+ {
+ QMutexLocker locker(&m_mutex);
+ if (m_scheduledBuffer) {
+ m_scheduledBuffer->Release();
+ m_scheduledBuffer = NULL;
+ }
+ }
+
enum
{
StartSurface = QEvent::User,
@@ -871,7 +881,7 @@ namespace
{
}
- int targetTime()
+ MFTIME targetTime()
{
return m_time;
}
@@ -1317,6 +1327,8 @@ namespace
HRESULT processSampleData(IMFSample *pSample)
{
+ m_sampleRequested = false;
+
LONGLONG time, duration = -1;
HRESULT hr = pSample->GetSampleTime(&time);
if (SUCCEEDED(hr))
@@ -1392,7 +1404,9 @@ namespace
m_bufferCache.pop_front();
if (timeOK && currentTime > sb.m_time) {
sb.m_buffer->Release();
+#ifdef DEBUG_MEDIAFOUNDATION
qDebug() << "currentPresentTime =" << float(currentTime / 10000) * 0.001f << " and sampleTime is" << float(sb.m_time / 10000) * 0.001f;
+#endif
continue;
}
m_scheduledBuffer = sb.m_buffer;
@@ -1404,13 +1418,16 @@ namespace
break;
}
}
- if (requestSample && m_bufferCache.size() < BUFFER_CACHE_SIZE)
+ if (requestSample && !m_sampleRequested && m_bufferCache.size() < BUFFER_CACHE_SIZE) {
+ m_sampleRequested = true;
queueEvent(MEStreamSinkRequestSample, GUID_NULL, S_OK, NULL);
+ }
}
IMFMediaBuffer *m_scheduledBuffer;
MFTIME m_bufferStartTime;
MFTIME m_bufferDuration;
IMFPresentationClock *m_presentationClock;
+ bool m_sampleRequested;
float m_rate;
};
@@ -1478,6 +1495,14 @@ namespace
m_stream->present();
}
+ void clearScheduledFrame()
+ {
+ QMutexLocker locker(&m_mutex);
+ if (m_shutdown)
+ return;
+ m_stream->clearScheduledFrame();
+ }
+
MFTIME getTime()
{
QMutexLocker locker(&m_mutex);
@@ -2064,6 +2089,14 @@ namespace
m_sink->present();
}
+ void clearScheduledFrame()
+ {
+ QMutexLocker locker(&m_mutex);
+ if (!m_sink)
+ return;
+ m_sink->clearScheduledFrame();
+ }
+
MFTIME getTime()
{
if (m_sink)
@@ -2168,10 +2201,16 @@ void MFVideoRendererControl::customEvent(QEvent *event)
MFTIME targetTime = static_cast<MediaStream::PresentEvent*>(event)->targetTime();
MFTIME currentTime = static_cast<VideoRendererActivate*>(m_currentActivate)->getTime();
float playRate = static_cast<VideoRendererActivate*>(m_currentActivate)->getPlayRate();
- if (playRate > 0.0001f && targetTime > currentTime)
- QTimer::singleShot(int((float)((targetTime - currentTime) / 10000) / playRate), this, SLOT(present()));
- else
+ if (!qFuzzyIsNull(playRate)) {
+ // If the scheduled frame is too late or too much in advance, skip it
+ const int diff = (targetTime - currentTime) / 10000;
+ if (diff < 0 || diff > 500)
+ static_cast<VideoRendererActivate*>(m_currentActivate)->clearScheduledFrame();
+ else
+ QTimer::singleShot(diff / playRate, this, SLOT(present()));
+ } else {
present();
+ }
return;
}
if (event->type() >= MediaStream::StartSurface) {
diff --git a/src/qtmultimediaquicktools/qdeclarativevideooutput.cpp b/src/qtmultimediaquicktools/qdeclarativevideooutput.cpp
index 5d2c57dbf..b13665f66 100644
--- a/src/qtmultimediaquicktools/qdeclarativevideooutput.cpp
+++ b/src/qtmultimediaquicktools/qdeclarativevideooutput.cpp
@@ -464,7 +464,7 @@ void QDeclarativeVideoOutput::setOrientation(int orientation)
}
/*!
- \qmlproperty int QtMultimedia::VideoOutput::autoOrientation
+ \qmlproperty bool QtMultimedia::VideoOutput::autoOrientation
This property allows you to enable and disable auto orientation
of the video stream, so that its orientation always matches
diff --git a/tests/auto/integration/qcamerabackend/qcamerabackend.pro b/tests/auto/integration/qcamerabackend/qcamerabackend.pro
index c39ce9c41..b19c52abe 100644
--- a/tests/auto/integration/qcamerabackend/qcamerabackend.pro
+++ b/tests/auto/integration/qcamerabackend/qcamerabackend.pro
@@ -9,4 +9,3 @@ CONFIG += testcase
SOURCES += tst_qcamerabackend.cpp
DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0
-mac: CONFIG += insignificant_test # QTBUG-36040