summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorFrederik Gladhorn <frederik.gladhorn@digia.com>2014-04-11 14:21:32 +0200
committerFrederik Gladhorn <frederik.gladhorn@digia.com>2014-04-11 14:22:22 +0200
commit00c072bf0eb25104086eb43e860e038af5f5d340 (patch)
treeead5b949d0a8314bdfd62dfe6fe900e27025c685 /src
parenta2303617ea8d2cb118d0edc5e4b743d7c4dc8dfc (diff)
parent8a047ef087cebba5953ad3ed7b2d99df7966b695 (diff)
Merge remote-tracking branch 'origin/stable' into dev
Conflicts: tests/auto/unit/qaudioformat/tst_qaudioformat.cpp Change-Id: I8499473569df9eac8c7069160e42ed477dacad4d
Diffstat (limited to 'src')
-rw-r--r--src/gsttools/gsttools.pro3
-rw-r--r--src/gsttools/qgstreamervideoinputdevicecontrol.cpp15
-rw-r--r--src/imports/multimedia/qdeclarativeaudio.cpp44
-rw-r--r--src/multimedia/audio/qwavedecoder_p.cpp20
-rw-r--r--src/multimedia/audio/qwavedecoder_p.h2
-rw-r--r--src/multimedia/doc/qtmultimedia.qdocconf2
-rw-r--r--src/multimedia/multimedia.pro11
-rw-r--r--src/plugins/android/jar/jar.pri2
-rw-r--r--src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtMediaRecorderListener.java (renamed from src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtMediaRecorder.java)7
-rw-r--r--src/plugins/android/src/wrappers/jcamera.cpp117
-rw-r--r--src/plugins/android/src/wrappers/jmediarecorder.cpp73
-rw-r--r--src/plugins/android/src/wrappers/jmediarecorder.h3
-rw-r--r--src/plugins/android/videonode/videonode.pro1
-rw-r--r--src/plugins/coreaudio/coreaudiodeviceinfo.mm36
-rw-r--r--src/plugins/coreaudio/coreaudiosessionmanager.h3
-rw-r--r--src/plugins/coreaudio/coreaudiosessionmanager.mm10
-rw-r--r--src/plugins/directshow/player/directshowplayercontrol.cpp13
-rw-r--r--src/plugins/directshow/player/directshowplayercontrol.h1
-rw-r--r--src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp13
-rw-r--r--src/plugins/gstreamer/camerabin/camerabincontrol.cpp12
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinserviceplugin.cpp16
-rw-r--r--src/plugins/gstreamer/common.pri2
-rw-r--r--src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp16
-rw-r--r--src/plugins/gstreamer/mediacapture/qgstreamerv4l2input.cpp11
-rw-r--r--src/plugins/gstreamer/mediaplayer/qgstreamermetadataprovider.cpp19
-rw-r--r--src/plugins/gstreamer/mediaplayer/qgstreamerplayerserviceplugin.cpp14
-rw-r--r--src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.cpp76
-rw-r--r--src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.h1
-rw-r--r--src/plugins/qnx/camera/bbcameraorientationhandler.cpp18
-rw-r--r--src/plugins/qnx/camera/bbcameraorientationhandler.h2
-rw-r--r--src/plugins/qnx/camera/bbcamerasession.cpp13
-rw-r--r--src/plugins/qnx/common/windowgrabber.cpp173
-rw-r--r--src/plugins/qnx/common/windowgrabber.h29
-rw-r--r--src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp7
-rw-r--r--src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp81
-rw-r--r--src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.h4
-rw-r--r--src/plugins/videonode/imx6/imx6.pro1
-rw-r--r--src/plugins/wmf/mfactivate.cpp2
-rw-r--r--src/plugins/wmf/player/mfplayersession.cpp53
-rw-r--r--src/plugins/wmf/player/mfplayersession.h1
-rw-r--r--src/plugins/wmf/player/mfvideorenderercontrol.cpp139
-rw-r--r--src/plugins/wmf/samplegrabber.h3
-rw-r--r--src/plugins/wmf/sourceresolver.cpp2
-rw-r--r--src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp10
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_p.cpp1
45 files changed, 684 insertions, 398 deletions
diff --git a/src/gsttools/gsttools.pro b/src/gsttools/gsttools.pro
index 46184965c..15edd04d2 100644
--- a/src/gsttools/gsttools.pro
+++ b/src/gsttools/gsttools.pro
@@ -2,9 +2,10 @@ TEMPLATE = lib
TARGET = qgsttools_p
QPRO_PWD = $$PWD
-QT = core multimedia-private gui-private
+QT = core-private multimedia-private gui-private
!static:DEFINES += QT_MAKEDLL
+DEFINES += GLIB_VERSION_MIN_REQUIRED=GLIB_VERSION_2_26
unix:!maemo*:contains(QT_CONFIG, alsa) {
DEFINES += HAVE_ALSA
diff --git a/src/gsttools/qgstreamervideoinputdevicecontrol.cpp b/src/gsttools/qgstreamervideoinputdevicecontrol.cpp
index ad61aefad..e4e202caf 100644
--- a/src/gsttools/qgstreamervideoinputdevicecontrol.cpp
+++ b/src/gsttools/qgstreamervideoinputdevicecontrol.cpp
@@ -44,16 +44,7 @@
#include <QtCore/QDir>
#include <QtCore/QDebug>
-#include <linux/types.h>
-#include <sys/time.h>
-#include <sys/ioctl.h>
-#include <sys/poll.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <errno.h>
-#include <string.h>
-#include <stdlib.h>
-#include <sys/mman.h>
+#include <private/qcore_unix_p.h>
#include <linux/videodev2.h>
QGstreamerVideoInputDeviceControl::QGstreamerVideoInputDeviceControl(QObject *parent)
@@ -135,7 +126,7 @@ void QGstreamerVideoInputDeviceControl::update()
foreach( const QFileInfo &entryInfo, entries ) {
//qDebug() << "Try" << entryInfo.filePath();
- int fd = ::open(entryInfo.filePath().toLatin1().constData(), O_RDWR );
+ int fd = qt_safe_open(entryInfo.filePath().toLatin1().constData(), O_RDWR );
if (fd == -1)
continue;
@@ -165,6 +156,6 @@ void QGstreamerVideoInputDeviceControl::update()
m_names.append(entryInfo.filePath());
m_descriptions.append(name);
}
- ::close(fd);
+ qt_safe_close(fd);
}
}
diff --git a/src/imports/multimedia/qdeclarativeaudio.cpp b/src/imports/multimedia/qdeclarativeaudio.cpp
index 16828a4d1..37509b17e 100644
--- a/src/imports/multimedia/qdeclarativeaudio.cpp
+++ b/src/imports/multimedia/qdeclarativeaudio.cpp
@@ -290,15 +290,15 @@ void QDeclarativeAudio::setVolume(qreal volume)
return;
}
- if (m_vol == volume)
+ if (this->volume() == volume)
return;
- m_vol = volume;
-
- if (m_complete)
+ if (m_complete) {
m_player->setVolume(qRound(volume * 100));
- else
+ } else {
+ m_vol = volume;
emit volumeChanged();
+ }
}
bool QDeclarativeAudio::isMuted() const
@@ -308,15 +308,15 @@ bool QDeclarativeAudio::isMuted() const
void QDeclarativeAudio::setMuted(bool muted)
{
- if (m_muted == muted)
+ if (isMuted() == muted)
return;
- m_muted = muted;
-
- if (m_complete)
+ if (m_complete) {
m_player->setMuted(muted);
- else
+ } else {
+ m_muted = muted;
emit mutedChanged();
+ }
}
qreal QDeclarativeAudio::bufferProgress() const
@@ -331,20 +331,20 @@ bool QDeclarativeAudio::isSeekable() const
qreal QDeclarativeAudio::playbackRate() const
{
- return m_playbackRate;
+ return m_complete ? m_player->playbackRate() : m_playbackRate;
}
void QDeclarativeAudio::setPlaybackRate(qreal rate)
{
- if (m_playbackRate == rate)
+ if (playbackRate() == rate)
return;
- m_playbackRate = rate;
-
- if (m_complete)
- m_player->setPlaybackRate(m_playbackRate);
- else
+ if (m_complete) {
+ m_player->setPlaybackRate(rate);
+ } else {
+ m_playbackRate = rate;
emit playbackRateChanged();
+ }
}
QString QDeclarativeAudio::errorString() const
@@ -426,12 +426,12 @@ void QDeclarativeAudio::seek(int position)
if (this->position() == position)
return;
- m_position = position;
-
- if (m_complete)
- m_player->setPosition(m_position);
- else
+ if (m_complete) {
+ m_player->setPosition(position);
+ } else {
+ m_position = position;
emit positionChanged();
+ }
}
/*!
diff --git a/src/multimedia/audio/qwavedecoder_p.cpp b/src/multimedia/audio/qwavedecoder_p.cpp
index 497a146df..974a8f509 100644
--- a/src/multimedia/audio/qwavedecoder_p.cpp
+++ b/src/multimedia/audio/qwavedecoder_p.cpp
@@ -166,6 +166,8 @@ void QWaveDecoder::handleData()
// Swizzle this
if (bigEndian) {
wave.audioFormat = qFromBigEndian<quint16>(wave.audioFormat);
+ } else {
+ wave.audioFormat = qFromLittleEndian<quint16>(wave.audioFormat);
}
if (wave.audioFormat != 0 && wave.audioFormat != 1) {
@@ -207,6 +209,8 @@ void QWaveDecoder::handleData()
source->read(reinterpret_cast<char *>(&descriptor), sizeof(chunk));
if (bigEndian)
descriptor.size = qFromBigEndian<quint32>(descriptor.size);
+ else
+ descriptor.size = qFromLittleEndian<quint32>(descriptor.size);
dataSize = descriptor.size;
@@ -227,13 +231,15 @@ void QWaveDecoder::handleData()
bool QWaveDecoder::enoughDataAvailable()
{
chunk descriptor;
- if (!peekChunk(&descriptor))
+ if (!peekChunk(&descriptor, false))
return false;
// This is only called for the RIFF/RIFX header, before bigEndian is set,
// so we have to manually swizzle
if (qstrncmp(descriptor.id, "RIFX", 4) == 0)
descriptor.size = qFromBigEndian<quint32>(descriptor.size);
+ if (qstrncmp(descriptor.id, "RIFF", 4) == 0)
+ descriptor.size = qFromLittleEndian<quint32>(descriptor.size);
if (source->bytesAvailable() < qint64(sizeof(chunk) + descriptor.size))
return false;
@@ -270,16 +276,18 @@ bool QWaveDecoder::findChunk(const char *chunkId)
return false;
}
-// Handles endianness
-bool QWaveDecoder::peekChunk(chunk *pChunk)
+bool QWaveDecoder::peekChunk(chunk *pChunk, bool handleEndianness)
{
if (source->bytesAvailable() < qint64(sizeof(chunk)))
return false;
source->peek(reinterpret_cast<char *>(pChunk), sizeof(chunk));
- if (bigEndian)
- pChunk->size = qFromBigEndian<quint32>(pChunk->size);
-
+ if (handleEndianness) {
+ if (bigEndian)
+ pChunk->size = qFromBigEndian<quint32>(pChunk->size);
+ else
+ pChunk->size = qFromLittleEndian<quint32>(pChunk->size);
+ }
return true;
}
diff --git a/src/multimedia/audio/qwavedecoder_p.h b/src/multimedia/audio/qwavedecoder_p.h
index c21d8cb5b..24cdb7885 100644
--- a/src/multimedia/audio/qwavedecoder_p.h
+++ b/src/multimedia/audio/qwavedecoder_p.h
@@ -103,7 +103,7 @@ private:
char id[4];
quint32 size;
};
- bool peekChunk(chunk* pChunk);
+ bool peekChunk(chunk* pChunk, bool handleEndianness = true);
struct RIFFHeader
{
diff --git a/src/multimedia/doc/qtmultimedia.qdocconf b/src/multimedia/doc/qtmultimedia.qdocconf
index 83abf5b49..d6ff6822e 100644
--- a/src/multimedia/doc/qtmultimedia.qdocconf
+++ b/src/multimedia/doc/qtmultimedia.qdocconf
@@ -43,7 +43,7 @@ sourcedirs += ../..
excludedirs += ../../multimediawidgets
-depends += qtcore qtdoc qtquick qtqml qtmultimediawidgets
+depends += qtcore qtdoc qtgui qtquick qtqml qtmultimediawidgets qtwidgets
navigation.landingpage = "Qt Multimedia"
navigation.cppclassespage = "Qt Multimedia C++ Classes"
diff --git a/src/multimedia/multimedia.pro b/src/multimedia/multimedia.pro
index 5ed0cefc0..23cca2537 100644
--- a/src/multimedia/multimedia.pro
+++ b/src/multimedia/multimedia.pro
@@ -1,6 +1,12 @@
TARGET = QtMultimedia
QT = core-private network gui-private
+MODULE_PLUGIN_TYPES = \
+ mediaservice \
+ audio \
+ video/videonode \
+ playlistformats
+
QMAKE_DOCS = $$PWD/doc/qtmultimedia.qdocconf
load(qt_module)
@@ -67,11 +73,6 @@ ANDROID_FEATURES += \
android.hardware.camera \
android.hardware.camera.autofocus \
android.hardware.microphone
-MODULE_PLUGIN_TYPES = \
- mediaservice \
- audio \
- video/videonode \
- playlistformats
win32: LIBS_PRIVATE += -luuid
diff --git a/src/plugins/android/jar/jar.pri b/src/plugins/android/jar/jar.pri
index e56e3d966..d8bc59a72 100644
--- a/src/plugins/android/jar/jar.pri
+++ b/src/plugins/android/jar/jar.pri
@@ -10,7 +10,7 @@ JAVASOURCES += $$PWD/src/org/qtproject/qt5/android/multimedia/QtAndroidMediaPlay
$$PWD/src/org/qtproject/qt5/android/multimedia/QtSurfaceTextureListener.java \
$$PWD/src/org/qtproject/qt5/android/multimedia/QtSurfaceTextureHolder.java \
$$PWD/src/org/qtproject/qt5/android/multimedia/QtMultimediaUtils.java \
- $$PWD/src/org/qtproject/qt5/android/multimedia/QtMediaRecorder.java
+ $$PWD/src/org/qtproject/qt5/android/multimedia/QtMediaRecorderListener.java
# install
target.path = $$[QT_INSTALL_PREFIX]/jar
diff --git a/src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtMediaRecorder.java b/src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtMediaRecorderListener.java
index d76cd2221..5c8557e88 100644
--- a/src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtMediaRecorder.java
+++ b/src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtMediaRecorderListener.java
@@ -43,16 +43,13 @@ package org.qtproject.qt5.android.multimedia;
import android.media.MediaRecorder;
-public class QtMediaRecorder extends MediaRecorder implements MediaRecorder.OnErrorListener, MediaRecorder.OnInfoListener
+public class QtMediaRecorderListener implements MediaRecorder.OnErrorListener, MediaRecorder.OnInfoListener
{
private long m_id = -1;
- public QtMediaRecorder(long id)
+ public QtMediaRecorderListener(long id)
{
- super();
m_id = id;
- setOnErrorListener(this);
- setOnInfoListener(this);
}
@Override
diff --git a/src/plugins/android/src/wrappers/jcamera.cpp b/src/plugins/android/src/wrappers/jcamera.cpp
index 50a65a779..b0e9f89f3 100644
--- a/src/plugins/android/src/wrappers/jcamera.cpp
+++ b/src/plugins/android/src/wrappers/jcamera.cpp
@@ -47,6 +47,7 @@
#include "qandroidmultimediautils.h"
#include <qthread.h>
#include <qmutex.h>
+#include <QtCore/private/qjnihelpers_p.h>
QT_BEGIN_NAMESPACE
@@ -145,7 +146,7 @@ private:
QJNIObjectPrivate lastCamera;
};
-class JCameraWorker : public QObject, public QJNIObjectPrivate
+class JCameraWorker : public QObject
{
Q_OBJECT
friend class JCamera;
@@ -230,12 +231,11 @@ class JCameraWorker : public QObject, public QJNIObjectPrivate
QSize m_previewSize;
int m_rotation;
- bool m_hasAPI14;
-
JCamera *q;
QThread *m_workerThread;
QMutex m_parametersMutex;
+ QJNIObjectPrivate m_camera;
Q_SIGNALS:
void previewSizeChanged();
@@ -269,7 +269,7 @@ JCamera::JCamera(int cameraId, jobject cam, QThread *workerThread)
JCamera::~JCamera()
{
- if (d->isValid()) {
+ if (d->m_camera.isValid()) {
g_objectMapMutex.lock();
g_objectMap.remove(d->m_cameraId);
g_objectMapMutex.unlock();
@@ -595,7 +595,7 @@ void JCamera::fetchLastPreviewFrame()
QJNIObjectPrivate JCamera::getCameraObject()
{
- return d->getObjectField("m_camera", "Landroid/hardware/Camera;");
+ return d->m_camera.getObjectField("m_camera", "Landroid/hardware/Camera;");
}
void JCamera::startPreview()
@@ -613,47 +613,29 @@ void JCamera::stopPreview()
JCameraWorker::JCameraWorker(JCamera *camera, int cameraId, jobject cam, QThread *workerThread)
: QObject(0)
- , QJNIObjectPrivate(cam)
, m_cameraId(cameraId)
, m_rotation(0)
- , m_hasAPI14(false)
, m_parametersMutex(QMutex::Recursive)
+ , m_camera(cam)
{
q = camera;
m_workerThread = workerThread;
moveToThread(m_workerThread);
- if (isValid()) {
+ if (m_camera.isValid()) {
g_objectMapMutex.lock();
g_objectMap.insert(cameraId, q);
g_objectMapMutex.unlock();
m_info = QJNIObjectPrivate("android/hardware/Camera$CameraInfo");
- callStaticMethod<void>("android/hardware/Camera",
- "getCameraInfo",
- "(ILandroid/hardware/Camera$CameraInfo;)V",
- cameraId, m_info.object());
+ m_camera.callStaticMethod<void>("android/hardware/Camera",
+ "getCameraInfo",
+ "(ILandroid/hardware/Camera$CameraInfo;)V",
+ cameraId, m_info.object());
- QJNIObjectPrivate params = callObjectMethod("getParameters",
- "()Landroid/hardware/Camera$Parameters;");
+ QJNIObjectPrivate params = m_camera.callObjectMethod("getParameters",
+ "()Landroid/hardware/Camera$Parameters;");
m_parameters = QJNIObjectPrivate(params);
-
- // Check if API 14 is available
- QJNIEnvironmentPrivate env;
- jclass clazz = env->FindClass("android/hardware/Camera");
- if (env->ExceptionCheck()) {
- clazz = 0;
- env->ExceptionClear();
- }
- if (clazz) {
- // startFaceDetection() was added in API 14
- jmethodID id = env->GetMethodID(clazz, "startFaceDetection", "()V");
- if (env->ExceptionCheck()) {
- id = 0;
- env->ExceptionClear();
- }
- m_hasAPI14 = bool(id);
- }
}
}
@@ -668,7 +650,7 @@ void JCameraWorker::release()
m_parametersMutex.lock();
m_parameters = QJNIObjectPrivate();
m_parametersMutex.unlock();
- callMethod<void>("release");
+ m_camera.callMethod<void>("release");
}
JCamera::CameraFacing JCameraWorker::getFacing()
@@ -752,7 +734,9 @@ void JCameraWorker::updatePreviewSize()
void JCameraWorker::setPreviewTexture(void *surfaceTexture)
{
- callMethod<void>("setPreviewTexture", "(Landroid/graphics/SurfaceTexture;)V", static_cast<jobject>(surfaceTexture));
+ m_camera.callMethod<void>("setPreviewTexture",
+ "(Landroid/graphics/SurfaceTexture;)V",
+ static_cast<jobject>(surfaceTexture));
}
bool JCameraWorker::isZoomSupported()
@@ -878,9 +862,12 @@ void JCameraWorker::setFocusMode(const QString &value)
int JCameraWorker::getMaxNumFocusAreas()
{
+ if (QtAndroidPrivate::androidSdkVersion() < 14)
+ return 0;
+
QMutexLocker parametersLocker(&m_parametersMutex);
- if (!m_hasAPI14 || !m_parameters.isValid())
+ if (!m_parameters.isValid())
return 0;
return m_parameters.callMethod<jint>("getMaxNumFocusAreas");
@@ -888,11 +875,14 @@ int JCameraWorker::getMaxNumFocusAreas()
QList<QRect> JCameraWorker::getFocusAreas()
{
- QMutexLocker parametersLocker(&m_parametersMutex);
-
QList<QRect> areas;
- if (m_hasAPI14 && m_parameters.isValid()) {
+ if (QtAndroidPrivate::androidSdkVersion() < 14)
+ return areas;
+
+ QMutexLocker parametersLocker(&m_parametersMutex);
+
+ if (m_parameters.isValid()) {
QJNIObjectPrivate list = m_parameters.callObjectMethod("getFocusAreas",
"()Ljava/util/List;");
@@ -913,9 +903,12 @@ QList<QRect> JCameraWorker::getFocusAreas()
void JCameraWorker::setFocusAreas(const QList<QRect> &areas)
{
+ if (QtAndroidPrivate::androidSdkVersion() < 14)
+ return;
+
QMutexLocker parametersLocker(&m_parametersMutex);
- if (!m_hasAPI14 || !m_parameters.isValid())
+ if (!m_parameters.isValid())
return;
QJNIObjectPrivate list;
@@ -940,15 +933,18 @@ void JCameraWorker::setFocusAreas(const QList<QRect> &areas)
void JCameraWorker::autoFocus()
{
- callMethod<void>("autoFocus");
+ m_camera.callMethod<void>("autoFocus");
emit autoFocusStarted();
}
bool JCameraWorker::isAutoExposureLockSupported()
{
+ if (QtAndroidPrivate::androidSdkVersion() < 14)
+ return false;
+
QMutexLocker parametersLocker(&m_parametersMutex);
- if (!m_hasAPI14 || !m_parameters.isValid())
+ if (!m_parameters.isValid())
return false;
return m_parameters.callMethod<jboolean>("isAutoExposureLockSupported");
@@ -956,9 +952,12 @@ bool JCameraWorker::isAutoExposureLockSupported()
bool JCameraWorker::getAutoExposureLock()
{
+ if (QtAndroidPrivate::androidSdkVersion() < 14)
+ return false;
+
QMutexLocker parametersLocker(&m_parametersMutex);
- if (!m_hasAPI14 || !m_parameters.isValid())
+ if (!m_parameters.isValid())
return false;
return m_parameters.callMethod<jboolean>("getAutoExposureLock");
@@ -966,9 +965,12 @@ bool JCameraWorker::getAutoExposureLock()
void JCameraWorker::setAutoExposureLock(bool toggle)
{
+ if (QtAndroidPrivate::androidSdkVersion() < 14)
+ return;
+
QMutexLocker parametersLocker(&m_parametersMutex);
- if (!m_hasAPI14 || !m_parameters.isValid())
+ if (!m_parameters.isValid())
return;
m_parameters.callMethod<void>("setAutoExposureLock", "(Z)V", toggle);
@@ -977,9 +979,12 @@ void JCameraWorker::setAutoExposureLock(bool toggle)
bool JCameraWorker::isAutoWhiteBalanceLockSupported()
{
+ if (QtAndroidPrivate::androidSdkVersion() < 14)
+ return false;
+
QMutexLocker parametersLocker(&m_parametersMutex);
- if (!m_hasAPI14 || !m_parameters.isValid())
+ if (!m_parameters.isValid())
return false;
return m_parameters.callMethod<jboolean>("isAutoWhiteBalanceLockSupported");
@@ -987,9 +992,12 @@ bool JCameraWorker::isAutoWhiteBalanceLockSupported()
bool JCameraWorker::getAutoWhiteBalanceLock()
{
+ if (QtAndroidPrivate::androidSdkVersion() < 14)
+ return false;
+
QMutexLocker parametersLocker(&m_parametersMutex);
- if (!m_hasAPI14 || !m_parameters.isValid())
+ if (!m_parameters.isValid())
return false;
return m_parameters.callMethod<jboolean>("getAutoWhiteBalanceLock");
@@ -997,9 +1005,12 @@ bool JCameraWorker::getAutoWhiteBalanceLock()
void JCameraWorker::setAutoWhiteBalanceLock(bool toggle)
{
+ if (QtAndroidPrivate::androidSdkVersion() < 14)
+ return;
+
QMutexLocker parametersLocker(&m_parametersMutex);
- if (!m_hasAPI14 || !m_parameters.isValid())
+ if (!m_parameters.isValid())
return;
m_parameters.callMethod<void>("setAutoWhiteBalanceLock", "(Z)V", toggle);
@@ -1184,15 +1195,15 @@ void JCameraWorker::stopPreview()
void JCameraWorker::fetchEachFrame(bool fetch)
{
- callMethod<void>("fetchEachFrame", "(Z)V", fetch);
+ m_camera.callMethod<void>("fetchEachFrame", "(Z)V", fetch);
}
void JCameraWorker::fetchLastPreviewFrame()
{
QJNIEnvironmentPrivate env;
- QJNIObjectPrivate dataObj = callObjectMethod("lockAndFetchPreviewBuffer", "()[B");
+ QJNIObjectPrivate dataObj = m_camera.callObjectMethod("lockAndFetchPreviewBuffer", "()[B");
if (!dataObj.object()) {
- callMethod<void>("unlockPreviewBuffer");
+ m_camera.callMethod<void>("unlockPreviewBuffer");
return;
}
jbyteArray data = static_cast<jbyteArray>(dataObj.object());
@@ -1200,16 +1211,16 @@ void JCameraWorker::fetchLastPreviewFrame()
int arrayLength = env->GetArrayLength(data);
bytes.resize(arrayLength);
env->GetByteArrayRegion(data, 0, arrayLength, (jbyte*)bytes.data());
- callMethod<void>("unlockPreviewBuffer");
+ m_camera.callMethod<void>("unlockPreviewBuffer");
emit previewFetched(bytes);
}
void JCameraWorker::applyParameters()
{
- callMethod<void>("setParameters",
- "(Landroid/hardware/Camera$Parameters;)V",
- m_parameters.object());
+ m_camera.callMethod<void>("setParameters",
+ "(Landroid/hardware/Camera$Parameters;)V",
+ m_parameters.object());
}
QStringList JCameraWorker::callParametersStringListMethod(const QByteArray &methodName)
@@ -1239,7 +1250,7 @@ QStringList JCameraWorker::callParametersStringListMethod(const QByteArray &meth
void JCameraWorker::callVoidMethod(const QByteArray &methodName)
{
- callMethod<void>(methodName.constData());
+ m_camera.callMethod<void>(methodName.constData());
}
diff --git a/src/plugins/android/src/wrappers/jmediarecorder.cpp b/src/plugins/android/src/wrappers/jmediarecorder.cpp
index 2c3eaeab7..0b1498d99 100644
--- a/src/plugins/android/src/wrappers/jmediarecorder.cpp
+++ b/src/plugins/android/src/wrappers/jmediarecorder.cpp
@@ -47,46 +47,55 @@
QT_BEGIN_NAMESPACE
-static jclass g_qtMediaRecorderClass = 0;
-static QMap<jlong, JMediaRecorder*> g_objectMap;
+static jclass g_qtMediaRecorderListenerClass = 0;
+typedef QMap<jlong, JMediaRecorder*> MediaRecorderMap;
+Q_GLOBAL_STATIC(MediaRecorderMap, mediaRecorders)
static void notifyError(JNIEnv* , jobject, jlong id, jint what, jint extra)
{
- JMediaRecorder *obj = g_objectMap.value(id, 0);
+ JMediaRecorder *obj = mediaRecorders->value(id, 0);
if (obj)
emit obj->error(what, extra);
}
static void notifyInfo(JNIEnv* , jobject, jlong id, jint what, jint extra)
{
- JMediaRecorder *obj = g_objectMap.value(id, 0);
+ JMediaRecorder *obj = mediaRecorders->value(id, 0);
if (obj)
emit obj->info(what, extra);
}
JMediaRecorder::JMediaRecorder()
: QObject()
- , QJNIObjectPrivate(g_qtMediaRecorderClass, "(J)V", reinterpret_cast<jlong>(this))
, m_id(reinterpret_cast<jlong>(this))
{
- if (isValid())
- g_objectMap.insert(m_id, this);
+ m_mediaRecorder = QJNIObjectPrivate("android/media/MediaRecorder");
+ if (m_mediaRecorder.isValid()) {
+ QJNIObjectPrivate listener(g_qtMediaRecorderListenerClass, "(J)V", m_id);
+ m_mediaRecorder.callMethod<void>("setOnErrorListener",
+ "(Landroid/media/MediaRecorder$OnErrorListener;)V",
+ listener.object());
+ m_mediaRecorder.callMethod<void>("setOnInfoListener",
+ "(Landroid/media/MediaRecorder$OnInfoListener;)V",
+ listener.object());
+ mediaRecorders->insert(m_id, this);
+ }
}
JMediaRecorder::~JMediaRecorder()
{
- g_objectMap.remove(m_id);
+ mediaRecorders->remove(m_id);
}
void JMediaRecorder::release()
{
- callMethod<void>("release");
+ m_mediaRecorder.callMethod<void>("release");
}
bool JMediaRecorder::prepare()
{
QJNIEnvironmentPrivate env;
- callMethod<void>("prepare");
+ m_mediaRecorder.callMethod<void>("prepare");
if (env->ExceptionCheck()) {
#ifdef QT_DEBUG
env->ExceptionDescribe();
@@ -99,13 +108,13 @@ bool JMediaRecorder::prepare()
void JMediaRecorder::reset()
{
- callMethod<void>("reset");
+ m_mediaRecorder.callMethod<void>("reset");
}
bool JMediaRecorder::start()
{
QJNIEnvironmentPrivate env;
- callMethod<void>("start");
+ m_mediaRecorder.callMethod<void>("start");
if (env->ExceptionCheck()) {
#ifdef QT_DEBUG
env->ExceptionDescribe();
@@ -119,7 +128,7 @@ bool JMediaRecorder::start()
void JMediaRecorder::stop()
{
QJNIEnvironmentPrivate env;
- callMethod<void>("stop");
+ m_mediaRecorder.callMethod<void>("stop");
if (env->ExceptionCheck()) {
#ifdef QT_DEBUG
env->ExceptionDescribe();
@@ -130,13 +139,13 @@ void JMediaRecorder::stop()
void JMediaRecorder::setAudioChannels(int numChannels)
{
- callMethod<void>("setAudioChannels", "(I)V", numChannels);
+ m_mediaRecorder.callMethod<void>("setAudioChannels", "(I)V", numChannels);
}
void JMediaRecorder::setAudioEncoder(AudioEncoder encoder)
{
QJNIEnvironmentPrivate env;
- callMethod<void>("setAudioEncoder", "(I)V", int(encoder));
+ m_mediaRecorder.callMethod<void>("setAudioEncoder", "(I)V", int(encoder));
if (env->ExceptionCheck()) {
#ifdef QT_DEBUG
env->ExceptionDescribe();
@@ -147,18 +156,18 @@ void JMediaRecorder::setAudioEncoder(AudioEncoder encoder)
void JMediaRecorder::setAudioEncodingBitRate(int bitRate)
{
- callMethod<void>("setAudioEncodingBitRate", "(I)V", bitRate);
+ m_mediaRecorder.callMethod<void>("setAudioEncodingBitRate", "(I)V", bitRate);
}
void JMediaRecorder::setAudioSamplingRate(int samplingRate)
{
- callMethod<void>("setAudioSamplingRate", "(I)V", samplingRate);
+ m_mediaRecorder.callMethod<void>("setAudioSamplingRate", "(I)V", samplingRate);
}
void JMediaRecorder::setAudioSource(AudioSource source)
{
QJNIEnvironmentPrivate env;
- callMethod<void>("setAudioSource", "(I)V", int(source));
+ m_mediaRecorder.callMethod<void>("setAudioSource", "(I)V", int(source));
if (env->ExceptionCheck()) {
#ifdef QT_DEBUG
env->ExceptionDescribe();
@@ -170,13 +179,13 @@ void JMediaRecorder::setAudioSource(AudioSource source)
void JMediaRecorder::setCamera(JCamera *camera)
{
QJNIObjectPrivate cam = camera->getCameraObject();
- callMethod<void>("setCamera", "(Landroid/hardware/Camera;)V", cam.object());
+ m_mediaRecorder.callMethod<void>("setCamera", "(Landroid/hardware/Camera;)V", cam.object());
}
void JMediaRecorder::setVideoEncoder(VideoEncoder encoder)
{
QJNIEnvironmentPrivate env;
- callMethod<void>("setVideoEncoder", "(I)V", int(encoder));
+ m_mediaRecorder.callMethod<void>("setVideoEncoder", "(I)V", int(encoder));
if (env->ExceptionCheck()) {
#ifdef QT_DEBUG
env->ExceptionDescribe();
@@ -187,13 +196,13 @@ void JMediaRecorder::setVideoEncoder(VideoEncoder encoder)
void JMediaRecorder::setVideoEncodingBitRate(int bitRate)
{
- callMethod<void>("setVideoEncodingBitRate", "(I)V", bitRate);
+ m_mediaRecorder.callMethod<void>("setVideoEncodingBitRate", "(I)V", bitRate);
}
void JMediaRecorder::setVideoFrameRate(int rate)
{
QJNIEnvironmentPrivate env;
- callMethod<void>("setVideoFrameRate", "(I)V", rate);
+ m_mediaRecorder.callMethod<void>("setVideoFrameRate", "(I)V", rate);
if (env->ExceptionCheck()) {
#ifdef QT_DEBUG
env->ExceptionDescribe();
@@ -205,7 +214,7 @@ void JMediaRecorder::setVideoFrameRate(int rate)
void JMediaRecorder::setVideoSize(const QSize &size)
{
QJNIEnvironmentPrivate env;
- callMethod<void>("setVideoSize", "(II)V", size.width(), size.height());
+ m_mediaRecorder.callMethod<void>("setVideoSize", "(II)V", size.width(), size.height());
if (env->ExceptionCheck()) {
#ifdef QT_DEBUG
env->ExceptionDescribe();
@@ -217,7 +226,7 @@ void JMediaRecorder::setVideoSize(const QSize &size)
void JMediaRecorder::setVideoSource(VideoSource source)
{
QJNIEnvironmentPrivate env;
- callMethod<void>("setVideoSource", "(I)V", int(source));
+ m_mediaRecorder.callMethod<void>("setVideoSource", "(I)V", int(source));
if (env->ExceptionCheck()) {
#ifdef QT_DEBUG
env->ExceptionDescribe();
@@ -229,7 +238,7 @@ void JMediaRecorder::setVideoSource(VideoSource source)
void JMediaRecorder::setOrientationHint(int degrees)
{
QJNIEnvironmentPrivate env;
- callMethod<void>("setOrientationHint", "(I)V", degrees);
+ m_mediaRecorder.callMethod<void>("setOrientationHint", "(I)V", degrees);
if (env->ExceptionCheck()) {
#ifdef QT_DEBUG
env->ExceptionDescribe();
@@ -241,7 +250,7 @@ void JMediaRecorder::setOrientationHint(int degrees)
void JMediaRecorder::setOutputFormat(OutputFormat format)
{
QJNIEnvironmentPrivate env;
- callMethod<void>("setOutputFormat", "(I)V", int(format));
+ m_mediaRecorder.callMethod<void>("setOutputFormat", "(I)V", int(format));
if (env->ExceptionCheck()) {
#ifdef QT_DEBUG
env->ExceptionDescribe();
@@ -253,9 +262,9 @@ void JMediaRecorder::setOutputFormat(OutputFormat format)
void JMediaRecorder::setOutputFile(const QString &path)
{
QJNIEnvironmentPrivate env;
- callMethod<void>("setOutputFile",
- "(Ljava/lang/String;)V",
- QJNIObjectPrivate::fromString(path).object());
+ m_mediaRecorder.callMethod<void>("setOutputFile",
+ "(Ljava/lang/String;)V",
+ QJNIObjectPrivate::fromString(path).object());
if (env->ExceptionCheck()) {
#ifdef QT_DEBUG
env->ExceptionDescribe();
@@ -271,13 +280,13 @@ static JNINativeMethod methods[] = {
bool JMediaRecorder::initJNI(JNIEnv *env)
{
- jclass clazz = env->FindClass("org/qtproject/qt5/android/multimedia/QtMediaRecorder");
+ jclass clazz = env->FindClass("org/qtproject/qt5/android/multimedia/QtMediaRecorderListener");
if (env->ExceptionCheck())
env->ExceptionClear();
if (clazz) {
- g_qtMediaRecorderClass = static_cast<jclass>(env->NewGlobalRef(clazz));
- if (env->RegisterNatives(g_qtMediaRecorderClass,
+ g_qtMediaRecorderListenerClass = static_cast<jclass>(env->NewGlobalRef(clazz));
+ if (env->RegisterNatives(g_qtMediaRecorderListenerClass,
methods,
sizeof(methods) / sizeof(methods[0])) < 0) {
return false;
diff --git a/src/plugins/android/src/wrappers/jmediarecorder.h b/src/plugins/android/src/wrappers/jmediarecorder.h
index 2bdb06b9f..3a83e7e16 100644
--- a/src/plugins/android/src/wrappers/jmediarecorder.h
+++ b/src/plugins/android/src/wrappers/jmediarecorder.h
@@ -50,7 +50,7 @@ QT_BEGIN_NAMESPACE
class JCamera;
-class JMediaRecorder : public QObject, public QJNIObjectPrivate
+class JMediaRecorder : public QObject
{
Q_OBJECT
public:
@@ -127,6 +127,7 @@ Q_SIGNALS:
private:
jlong m_id;
+ QJNIObjectPrivate m_mediaRecorder;
};
QT_END_NAMESPACE
diff --git a/src/plugins/android/videonode/videonode.pro b/src/plugins/android/videonode/videonode.pro
index 4ae2dc36e..661e36436 100644
--- a/src/plugins/android/videonode/videonode.pro
+++ b/src/plugins/android/videonode/videonode.pro
@@ -2,6 +2,7 @@ TARGET = qtsgvideonode_android
QT += quick multimedia-private qtmultimediaquicktools-private
PLUGIN_TYPE = video/videonode
+PLUGIN_EXTENDS = quick
PLUGIN_CLASS_NAME = QAndroidSGVideoNodeFactoryPlugin
load(qt_plugin)
diff --git a/src/plugins/coreaudio/coreaudiodeviceinfo.mm b/src/plugins/coreaudio/coreaudiodeviceinfo.mm
index 56765cafd..ac41a310c 100644
--- a/src/plugins/coreaudio/coreaudiodeviceinfo.mm
+++ b/src/plugins/coreaudio/coreaudiodeviceinfo.mm
@@ -196,38 +196,14 @@ QList<int> CoreAudioDeviceInfo::supportedSampleRates()
QList<int> CoreAudioDeviceInfo::supportedChannelCounts()
{
- QList<int> supportedChannels;
- int maxChannels = 0;
+ static QList<int> supportedChannels;
-#if defined(Q_OS_OSX)
- UInt32 propSize = 0;
- AudioObjectPropertyScope scope = m_mode == QAudio::AudioInput ? kAudioDevicePropertyScopeInput : kAudioDevicePropertyScopeOutput;
- AudioObjectPropertyAddress streamConfigurationPropertyAddress = { kAudioDevicePropertyStreamConfiguration,
- scope,
- kAudioObjectPropertyElementMaster };
-
- if (AudioObjectGetPropertyDataSize(m_deviceId, &streamConfigurationPropertyAddress, 0, NULL, &propSize) == noErr) {
- AudioBufferList* audioBufferList = static_cast<AudioBufferList*>(malloc(propSize));
-
- if (audioBufferList != 0) {
- if (AudioObjectGetPropertyData(m_deviceId, &streamConfigurationPropertyAddress, 0, NULL, &propSize, audioBufferList) == noErr) {
- for (int i = 0; i < int(audioBufferList->mNumberBuffers); ++i)
- maxChannels += audioBufferList->mBuffers[i].mNumberChannels;
- }
-
- free(audioBufferList);
- }
+ if (supportedChannels.isEmpty()) {
+ // If the number of channels is not supported by an audio device, Core Audio will
+ // automatically convert the audio data.
+ for (int i = 1; i <= 16; ++i)
+ supportedChannels.append(i);
}
-#else //iOS
- if (m_mode == QAudio::AudioInput)
- maxChannels = CoreAudioSessionManager::instance().inputChannelCount();
- else if (m_mode == QAudio::AudioOutput)
- maxChannels = CoreAudioSessionManager::instance().outputChannelCount();
-#endif
-
- // Assume all channel configurations are supported up to the maximum number of channels
- for (int i = 1; i <= maxChannels; ++i)
- supportedChannels.append(i);
return supportedChannels;
}
diff --git a/src/plugins/coreaudio/coreaudiosessionmanager.h b/src/plugins/coreaudio/coreaudiosessionmanager.h
index 61d8967b1..26f8fee09 100644
--- a/src/plugins/coreaudio/coreaudiosessionmanager.h
+++ b/src/plugins/coreaudio/coreaudiosessionmanager.h
@@ -92,9 +92,6 @@ public:
QList<QByteArray> inputDevices();
QList<QByteArray> outputDevices();
- int inputChannelCount();
- int outputChannelCount();
-
float currentIOBufferDuration();
float preferredSampleRate();
diff --git a/src/plugins/coreaudio/coreaudiosessionmanager.mm b/src/plugins/coreaudio/coreaudiosessionmanager.mm
index 0e795e786..04c8b6ed4 100644
--- a/src/plugins/coreaudio/coreaudiosessionmanager.mm
+++ b/src/plugins/coreaudio/coreaudiosessionmanager.mm
@@ -377,16 +377,6 @@ QList<QByteArray> CoreAudioSessionManager::outputDevices()
return outputDevices;
}
-int CoreAudioSessionManager::inputChannelCount()
-{
- return [[m_sessionObserver audioSession] inputNumberOfChannels];
-}
-
-int CoreAudioSessionManager::outputChannelCount()
-{
- return [[m_sessionObserver audioSession] outputNumberOfChannels];
-}
-
float CoreAudioSessionManager::currentIOBufferDuration()
{
return [[m_sessionObserver audioSession] IOBufferDuration];
diff --git a/src/plugins/directshow/player/directshowplayercontrol.cpp b/src/plugins/directshow/player/directshowplayercontrol.cpp
index 553ccb43b..179f635ec 100644
--- a/src/plugins/directshow/player/directshowplayercontrol.cpp
+++ b/src/plugins/directshow/player/directshowplayercontrol.cpp
@@ -83,6 +83,7 @@ DirectShowPlayerControl::DirectShowPlayerControl(DirectShowPlayerService *servic
, m_streamTypes(0)
, m_muteVolume(-1)
, m_position(0)
+ , m_pendingPosition(-1)
, m_duration(0)
, m_playbackRate(0)
, m_seekable(false)
@@ -112,12 +113,22 @@ qint64 DirectShowPlayerControl::duration() const
qint64 DirectShowPlayerControl::position() const
{
+ if (m_pendingPosition != -1)
+ return m_pendingPosition;
+
return const_cast<qint64 &>(m_position) = m_service->position();
}
void DirectShowPlayerControl::setPosition(qint64 position)
{
+ if (m_state == QMediaPlayer::StoppedState && m_pendingPosition != position) {
+ m_pendingPosition = position;
+ emit positionChanged(m_pendingPosition);
+ return;
+ }
+
m_service->seek(position);
+ m_pendingPosition = -1;
}
int DirectShowPlayerControl::volume() const
@@ -253,6 +264,8 @@ void DirectShowPlayerControl::play()
return;
}
m_service->play();
+ if (m_pendingPosition != -1)
+ setPosition(m_pendingPosition);
emit stateChanged(m_state = QMediaPlayer::PlayingState);
}
diff --git a/src/plugins/directshow/player/directshowplayercontrol.h b/src/plugins/directshow/player/directshowplayercontrol.h
index 478389de6..19dcb5a88 100644
--- a/src/plugins/directshow/player/directshowplayercontrol.h
+++ b/src/plugins/directshow/player/directshowplayercontrol.h
@@ -135,6 +135,7 @@ private:
int m_streamTypes;
int m_muteVolume;
qint64 m_position;
+ qint64 m_pendingPosition;
qint64 m_duration;
qreal m_playbackRate;
bool m_seekable;
diff --git a/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp b/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp
index 3085d1391..e6d24216f 100644
--- a/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp
+++ b/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp
@@ -49,19 +49,6 @@
#include <QtCore/QDir>
#include <QtCore/QDebug>
-#include <linux/types.h>
-#include <sys/time.h>
-#include <sys/ioctl.h>
-#include <sys/poll.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <errno.h>
-#include <string.h>
-#include <stdlib.h>
-#include <sys/mman.h>
-#include <linux/videodev2.h>
-#include <gst/gst.h>
-
// #define QT_SUPPORTEDMIMETYPES_DEBUG
QMediaService* QGstreamerAudioDecoderServicePlugin::create(const QString &key)
diff --git a/src/plugins/gstreamer/camerabin/camerabincontrol.cpp b/src/plugins/gstreamer/camerabin/camerabincontrol.cpp
index c84ebc41a..2ba9b07e8 100644
--- a/src/plugins/gstreamer/camerabin/camerabincontrol.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabincontrol.cpp
@@ -50,18 +50,6 @@
#include <QtCore/qfile.h>
#include <QtCore/qmetaobject.h>
-#include <linux/types.h>
-#include <sys/time.h>
-#include <sys/ioctl.h>
-#include <sys/poll.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <errno.h>
-#include <string.h>
-#include <stdlib.h>
-#include <sys/mman.h>
-#include <linux/videodev2.h>
-
QT_BEGIN_NAMESPACE
//#define CAMEABIN_DEBUG 1
diff --git a/src/plugins/gstreamer/camerabin/camerabinserviceplugin.cpp b/src/plugins/gstreamer/camerabin/camerabinserviceplugin.cpp
index 37641b001..3decd6070 100644
--- a/src/plugins/gstreamer/camerabin/camerabinserviceplugin.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinserviceplugin.cpp
@@ -50,18 +50,8 @@
#include "camerabinservice.h"
#include <private/qgstutils_p.h>
-#include <linux/types.h>
-#include <sys/time.h>
-#include <sys/ioctl.h>
-#include <sys/poll.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <errno.h>
-#include <string.h>
-#include <stdlib.h>
-#include <sys/mman.h>
+#include <private/qcore_unix_p.h>
#include <linux/videodev2.h>
-#include <gst/gst.h>
QT_BEGIN_NAMESPACE
@@ -148,7 +138,7 @@ void CameraBinServicePlugin::updateDevices() const
QFileInfoList entries = devDir.entryInfoList(QStringList() << "video*");
foreach (const QFileInfo &entryInfo, entries) {
- int fd = ::open(entryInfo.filePath().toLatin1().constData(), O_RDWR );
+ int fd = qt_safe_open(entryInfo.filePath().toLatin1().constData(), O_RDWR );
if (fd == -1)
continue;
@@ -178,7 +168,7 @@ void CameraBinServicePlugin::updateDevices() const
m_cameraDevices.append(entryInfo.filePath().toLocal8Bit());
m_cameraDescriptions.append(name);
}
- ::close(fd);
+ qt_safe_close(fd);
}
if (!m_cameraDevices.isEmpty())
diff --git a/src/plugins/gstreamer/common.pri b/src/plugins/gstreamer/common.pri
index 98e427d71..8b421b8d1 100644
--- a/src/plugins/gstreamer/common.pri
+++ b/src/plugins/gstreamer/common.pri
@@ -1,5 +1,5 @@
-QT += multimedia-private network
+QT += core-private multimedia-private network
CONFIG += no_private_qt_headers_warning
qtHaveModule(widgets) {
diff --git a/src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp b/src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp
index 657b9806f..8b88fbb71 100644
--- a/src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp
+++ b/src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp
@@ -51,18 +51,8 @@
#include "qgstreamercaptureservice.h"
#include <private/qgstutils_p.h>
-#include <linux/types.h>
-#include <sys/time.h>
-#include <sys/ioctl.h>
-#include <sys/poll.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <errno.h>
-#include <string.h>
-#include <stdlib.h>
-#include <sys/mman.h>
+#include <private/qcore_unix_p.h>
#include <linux/videodev2.h>
-#include <gst/gst.h>
QMediaService* QGstreamerCaptureServicePlugin::create(const QString &key)
{
@@ -155,7 +145,7 @@ void QGstreamerCaptureServicePlugin::updateDevices() const
foreach( const QFileInfo &entryInfo, entries ) {
//qDebug() << "Try" << entryInfo.filePath();
- int fd = ::open(entryInfo.filePath().toLatin1().constData(), O_RDWR );
+ int fd = qt_safe_open(entryInfo.filePath().toLatin1().constData(), O_RDWR );
if (fd == -1)
continue;
@@ -185,7 +175,7 @@ void QGstreamerCaptureServicePlugin::updateDevices() const
m_cameraDevices.append(entryInfo.filePath().toLocal8Bit());
m_cameraDescriptions.append(name);
}
- ::close(fd);
+ qt_safe_close(fd);
}
if (!m_cameraDevices.isEmpty())
diff --git a/src/plugins/gstreamer/mediacapture/qgstreamerv4l2input.cpp b/src/plugins/gstreamer/mediacapture/qgstreamerv4l2input.cpp
index b618ab997..450bc51ea 100644
--- a/src/plugins/gstreamer/mediacapture/qgstreamerv4l2input.cpp
+++ b/src/plugins/gstreamer/mediacapture/qgstreamerv4l2input.cpp
@@ -44,16 +44,7 @@
#include <QtCore/qdebug.h>
#include <QtCore/qfile.h>
-#include <linux/types.h>
-#include <sys/time.h>
-#include <sys/ioctl.h>
-#include <sys/poll.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <errno.h>
-#include <string.h>
-#include <stdlib.h>
-#include <sys/mman.h>
+#include <private/qcore_unix_p.h>
#include <linux/videodev2.h>
QT_BEGIN_NAMESPACE
diff --git a/src/plugins/gstreamer/mediaplayer/qgstreamermetadataprovider.cpp b/src/plugins/gstreamer/mediaplayer/qgstreamermetadataprovider.cpp
index 22f203665..fa837d3f4 100644
--- a/src/plugins/gstreamer/mediaplayer/qgstreamermetadataprovider.cpp
+++ b/src/plugins/gstreamer/mediaplayer/qgstreamermetadataprovider.cpp
@@ -162,25 +162,18 @@ void QGstreamerMetaDataProvider::updateTags()
{
QVariantMap oldTags = m_tags;
m_tags.clear();
-
- QSet<QString> allTags = QSet<QString>::fromList(m_tags.keys());
+ bool changed = false;
QMapIterator<QByteArray ,QVariant> i(m_session->tags());
while (i.hasNext()) {
i.next();
//use gstreamer native keys for elements not in m_keysMap
QString key = m_keysMap.value(i.key(), i.key());
- m_tags[key] = i.value();
- allTags.insert(key);
- }
-
- bool changed = false;
- foreach (const QString &key, allTags) {
- const QVariant value = m_tags.value(key);
- if (value != oldTags.value(key)) {
- changed = true;
- emit metaDataChanged(key, value);
- }
+ m_tags.insert(key, i.value());
+ if (i.value() != oldTags.value(key)) {
+ changed = true;
+ emit metaDataChanged(key, i.value());
+ }
}
if (changed)
diff --git a/src/plugins/gstreamer/mediaplayer/qgstreamerplayerserviceplugin.cpp b/src/plugins/gstreamer/mediaplayer/qgstreamerplayerserviceplugin.cpp
index b2ad24899..a9052ca4b 100644
--- a/src/plugins/gstreamer/mediaplayer/qgstreamerplayerserviceplugin.cpp
+++ b/src/plugins/gstreamer/mediaplayer/qgstreamerplayerserviceplugin.cpp
@@ -51,20 +51,6 @@
#include "qgstreamerplayerservice.h"
#include <private/qgstutils_p.h>
-#include <linux/types.h>
-#include <sys/time.h>
-#include <sys/ioctl.h>
-#include <sys/poll.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <errno.h>
-#include <string.h>
-#include <stdlib.h>
-#include <sys/mman.h>
-#include <linux/videodev2.h>
-#include <gst/gst.h>
-
-
QMediaService* QGstreamerPlayerServicePlugin::create(const QString &key)
{
QGstUtils::initializeGst();
diff --git a/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.cpp b/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.cpp
index f4ac59420..27446e07d 100644
--- a/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.cpp
+++ b/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.cpp
@@ -67,6 +67,20 @@
QT_BEGIN_NAMESPACE
+static bool usePlaybinVolume()
+{
+ static enum { Yes, No, Unknown } status = Unknown;
+ if (status == Unknown) {
+ QByteArray v = qgetenv("QT_GSTREAMER_USE_PLAYBIN_VOLUME");
+ bool value = !v.isEmpty() && v != "0" && v != "false";
+ if (value)
+ status = Yes;
+ else
+ status = No;
+ }
+ return status == Yes;
+}
+
typedef enum {
GST_PLAY_FLAG_VIDEO = 0x00000001,
GST_PLAY_FLAG_AUDIO = 0x00000002,
@@ -104,6 +118,7 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
m_pendingVideoSink(0),
m_nullVideoSink(0),
m_audioSink(0),
+ m_volumeElement(0),
m_bus(0),
m_videoOutput(0),
m_renderer(0),
@@ -151,8 +166,28 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
#endif
g_object_set(G_OBJECT(m_playbin), "flags", flags, NULL);
- m_audioSink = gst_element_factory_make("autoaudiosink", "audiosink");
- if (m_audioSink) {
+ GstElement *audioSink = gst_element_factory_make("autoaudiosink", "audiosink");
+ if (audioSink) {
+ if (usePlaybinVolume()) {
+ m_audioSink = audioSink;
+ m_volumeElement = m_playbin;
+ } else {
+ m_volumeElement = gst_element_factory_make("volume", "volumeelement");
+ if (m_volumeElement) {
+ m_audioSink = gst_bin_new("audio-output-bin");
+
+ gst_bin_add_many(GST_BIN(m_audioSink), m_volumeElement, audioSink, NULL);
+ gst_element_link(m_volumeElement, audioSink);
+
+ GstPad *pad = gst_element_get_static_pad(m_volumeElement, "sink");
+ gst_element_add_pad(GST_ELEMENT(m_audioSink), gst_ghost_pad_new("sink", pad));
+ gst_object_unref(GST_OBJECT(pad));
+ } else {
+ m_audioSink = audioSink;
+ m_volumeElement = m_playbin;
+ }
+ }
+
g_object_set(G_OBJECT(m_playbin), "audio-sink", m_audioSink, NULL);
addAudioBufferProbe();
}
@@ -193,12 +228,12 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
g_signal_connect(G_OBJECT(m_playbin), "notify::source", G_CALLBACK(playbinNotifySource), this);
g_signal_connect(G_OBJECT(m_playbin), "element-added", G_CALLBACK(handleElementAdded), this);
- // Init volume and mute state
- g_object_set(G_OBJECT(m_playbin), "volume", 1.0, NULL);
- g_object_set(G_OBJECT(m_playbin), "mute", FALSE, NULL);
-
- g_signal_connect(G_OBJECT(m_playbin), "notify::volume", G_CALLBACK(handleVolumeChange), this);
- g_signal_connect(G_OBJECT(m_playbin), "notify::mute", G_CALLBACK(handleMutedChange), this);
+ if (usePlaybinVolume()) {
+ updateVolume();
+ updateMuted();
+ g_signal_connect(G_OBJECT(m_playbin), "notify::volume", G_CALLBACK(handleVolumeChange), this);
+ g_signal_connect(G_OBJECT(m_playbin), "notify::mute", G_CALLBACK(handleMutedChange), this);
+ }
g_signal_connect(G_OBJECT(m_playbin), "video-changed", G_CALLBACK(handleStreamsChange), this);
g_signal_connect(G_OBJECT(m_playbin), "audio-changed", G_CALLBACK(handleStreamsChange), this);
@@ -912,10 +947,8 @@ void QGstreamerPlayerSession::setVolume(int volume)
if (m_volume != volume) {
m_volume = volume;
- if (m_playbin) {
- //playbin2 allows to set volume and muted independently,
- g_object_set(G_OBJECT(m_playbin), "volume", m_volume/100.0, NULL);
- }
+ if (m_volumeElement)
+ g_object_set(G_OBJECT(m_volumeElement), "volume", m_volume / 100.0, NULL);
emit volumeChanged(m_volume);
}
@@ -929,7 +962,9 @@ void QGstreamerPlayerSession::setMuted(bool muted)
if (m_muted != muted) {
m_muted = muted;
- g_object_set(G_OBJECT(m_playbin), "mute", m_muted ? TRUE : FALSE, NULL);
+ if (m_volumeElement)
+ g_object_set(G_OBJECT(m_volumeElement), "mute", m_muted ? TRUE : FALSE, NULL);
+
emit mutedStateChanged(m_muted);
}
}
@@ -952,13 +987,15 @@ bool QGstreamerPlayerSession::processBusMessage(const QGstreamerMessage &message
if (gm) {
//tag message comes from elements inside playbin, not from playbin itself
if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_TAG) {
- //qDebug() << "tag message";
GstTagList *tag_list;
gst_message_parse_tag(gm, &tag_list);
- m_tags.unite(QGstUtils::gstTagListToMap(tag_list));
+
+ QMap<QByteArray, QVariant> newTags = QGstUtils::gstTagListToMap(tag_list);
+ QMap<QByteArray, QVariant>::const_iterator it = newTags.constBegin();
+ for ( ; it != newTags.constEnd(); ++it)
+ m_tags.insert(it.key(), it.value()); // overwrite existing tags
gst_tag_list_free(tag_list);
- //qDebug() << m_tags;
emit tagsChanged();
} else if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_DURATION) {
@@ -1424,13 +1461,6 @@ void QGstreamerPlayerSession::playbinNotifySource(GObject *o, GParamSpec *p, gpo
qDebug() << "Playbin source added:" << G_OBJECT_CLASS_NAME(G_OBJECT_GET_CLASS(source));
#endif
- // Turn off icecast metadata request, will be re-set if in QNetworkRequest
- // (souphttpsrc docs say is false by default, but header appears in request
- // @version 0.10.21)
- if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "iradio-mode") != 0)
- g_object_set(G_OBJECT(source), "iradio-mode", FALSE, NULL);
-
-
// Set Headers
const QByteArray userAgentString("User-Agent");
diff --git a/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.h b/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.h
index 94a84e5dd..23e70315b 100644
--- a/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.h
+++ b/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.h
@@ -211,6 +211,7 @@ private:
GstElement* m_nullVideoSink;
GstElement* m_audioSink;
+ GstElement* m_volumeElement;
GstBus* m_bus;
QObject *m_videoOutput;
diff --git a/src/plugins/qnx/camera/bbcameraorientationhandler.cpp b/src/plugins/qnx/camera/bbcameraorientationhandler.cpp
index b715249f9..393c9e6d8 100644
--- a/src/plugins/qnx/camera/bbcameraorientationhandler.cpp
+++ b/src/plugins/qnx/camera/bbcameraorientationhandler.cpp
@@ -41,7 +41,8 @@
#include "bbcameraorientationhandler.h"
#include <QAbstractEventDispatcher>
-#include <QCoreApplication>
+#include <QGuiApplication>
+#include <QScreen>
#include <QDebug>
#include <bps/orientation.h>
@@ -87,6 +88,10 @@ bool BbCameraOrientationHandler::nativeEventFilter(const QByteArray&, void *mess
const int angle = orientation_event_get_angle(event);
if (angle != m_orientation) {
+#ifndef Q_OS_BLACKBERRY_TABLET
+ if (angle == 180) // The screen does not rotate at 180 degrees
+ return false;
+#endif
m_orientation = angle;
emit orientationChanged(m_orientation);
}
@@ -94,6 +99,17 @@ bool BbCameraOrientationHandler::nativeEventFilter(const QByteArray&, void *mess
return false; // do not drop the event
}
+int BbCameraOrientationHandler::viewfinderOrientation() const
+{
+ // On a keyboard device we do not rotate the screen at all
+ if (qGuiApp->primaryScreen()->nativeOrientation()
+ != qGuiApp->primaryScreen()->primaryOrientation()) {
+ return m_orientation;
+ }
+
+ return 0;
+}
+
int BbCameraOrientationHandler::orientation() const
{
return m_orientation;
diff --git a/src/plugins/qnx/camera/bbcameraorientationhandler.h b/src/plugins/qnx/camera/bbcameraorientationhandler.h
index 3d236b884..42d68915b 100644
--- a/src/plugins/qnx/camera/bbcameraorientationhandler.h
+++ b/src/plugins/qnx/camera/bbcameraorientationhandler.h
@@ -57,6 +57,8 @@ public:
int orientation() const;
+ int viewfinderOrientation() const;
+
Q_SIGNALS:
void orientationChanged(int degree);
diff --git a/src/plugins/qnx/camera/bbcamerasession.cpp b/src/plugins/qnx/camera/bbcamerasession.cpp
index 77ba71493..c5309b045 100644
--- a/src/plugins/qnx/camera/bbcamerasession.cpp
+++ b/src/plugins/qnx/camera/bbcamerasession.cpp
@@ -140,7 +140,7 @@ BbCameraSession::BbCameraSession(QObject *parent)
connect(this, SIGNAL(captureModeChanged(QCamera::CaptureModes)), SLOT(updateReadyForCapture()));
connect(m_orientationHandler, SIGNAL(orientationChanged(int)), SLOT(deviceOrientationChanged(int)));
- connect(m_windowGrabber, SIGNAL(frameGrabbed(QImage)), SLOT(viewfinderFrameGrabbed(QImage)));
+ connect(m_windowGrabber, SIGNAL(frameGrabbed(QImage, int)), SLOT(viewfinderFrameGrabbed(QImage)));
}
BbCameraSession::~BbCameraSession()
@@ -772,11 +772,16 @@ void BbCameraSession::viewfinderFrameGrabbed(const QImage &image)
{
QTransform transform;
+ // subtract out the native rotation
transform.rotate(m_nativeCameraOrientation);
+ // subtract out the current device orientation
+ if (m_device == cameraIdentifierRear())
+ transform.rotate(360 - m_orientationHandler->viewfinderOrientation());
+ else
+ transform.rotate(m_orientationHandler->viewfinderOrientation());
+
QImage frame = image.copy().transformed(transform);
- if (m_device == cameraIdentifierFront())
- frame = frame.mirrored(true, false);
QMutexLocker locker(&m_surfaceMutex);
if (m_surface) {
@@ -896,7 +901,7 @@ bool BbCameraSession::startViewFinder()
return false;
}
- const int angle = m_orientationHandler->orientation();
+ const int angle = m_orientationHandler->viewfinderOrientation();
const QSize rotatedSize = ((angle == 0 || angle == 180) ? viewfinderResolution
: viewfinderResolution.transposed());
diff --git a/src/plugins/qnx/common/windowgrabber.cpp b/src/plugins/qnx/common/windowgrabber.cpp
index 5ed54b87e..3f6eaca41 100644
--- a/src/plugins/qnx/common/windowgrabber.cpp
+++ b/src/plugins/qnx/common/windowgrabber.cpp
@@ -47,6 +47,8 @@
#include <QImage>
#include <qpa/qplatformnativeinterface.h>
+#include <QOpenGLContext>
+
#ifdef Q_OS_BLACKBERRY
#include <bps/event.h>
#include <bps/screen.h>
@@ -57,13 +59,15 @@ QT_BEGIN_NAMESPACE
WindowGrabber::WindowGrabber(QObject *parent)
: QObject(parent),
- m_screenBuffer(0),
m_screenBufferWidth(-1),
m_screenBufferHeight(-1),
m_active(false),
m_screenContextInitialized(false),
- m_screenPixmapInitialized(false),
- m_screenPixmapBufferInitialized(false)
+ m_screenPixmapBuffersInitialized(false),
+ m_currentFrame(0),
+ m_eglImageSupported(false),
+ m_eglImagesInitialized(false),
+ m_eglImageCheck(false)
{
// grab the window frame with 60 frames per second
m_timer.setInterval(1000/60);
@@ -76,6 +80,11 @@ WindowGrabber::WindowGrabber(QObject *parent)
WindowGrabber::~WindowGrabber()
{
QCoreApplication::eventDispatcher()->removeNativeEventFilter(this);
+ if (eglImagesInitialized()) {
+ glDeleteTextures(2, imgTextures);
+ eglDestroyImageKHR(eglGetDisplay(EGL_DEFAULT_DISPLAY), img[0]);
+ eglDestroyImageKHR(eglGetDisplay(EGL_DEFAULT_DISPLAY), img[1]);
+ }
}
void WindowGrabber::setFrameRate(int frameRate)
@@ -83,6 +92,37 @@ void WindowGrabber::setFrameRate(int frameRate)
m_timer.setInterval(1000/frameRate);
}
+void WindowGrabber::createEglImages()
+{
+ // Do nothing if either egl images are not supported, the screen context is not valid
+ // or the images are already created
+ if (!eglImageSupported() || !m_screenContextInitialized || eglImagesInitialized())
+ return;
+
+ glGenTextures(2, imgTextures);
+ glBindTexture(GL_TEXTURE_2D, imgTextures[0]);
+ img[0] = eglCreateImageKHR(eglGetDisplay(EGL_DEFAULT_DISPLAY), EGL_NO_CONTEXT,
+ EGL_NATIVE_PIXMAP_KHR,
+ m_screenPixmaps[0],
+ 0);
+ glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, img[0]);
+
+ glBindTexture(GL_TEXTURE_2D, imgTextures[1]);
+ img[1] = eglCreateImageKHR(eglGetDisplay(EGL_DEFAULT_DISPLAY), EGL_NO_CONTEXT,
+ EGL_NATIVE_PIXMAP_KHR,
+ m_screenPixmaps[1],
+ 0);
+
+ glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, img[1]);
+
+ if (img[0] == 0 || img[1] == 0) {
+ qWarning() << "Failed to create KHR images" << img[0] << img[1] << strerror(errno) << errno;
+ m_eglImageSupported = false;
+ } else {
+ m_eglImagesInitialized = true;
+ }
+}
+
void WindowGrabber::setWindowId(const QByteArray &windowId)
{
m_windowId = windowId;
@@ -90,6 +130,9 @@ void WindowGrabber::setWindowId(const QByteArray &windowId)
void WindowGrabber::start()
{
+ if (m_active)
+ return;
+
int result = 0;
#ifdef Q_OS_BLACKBERRY_TABLET
@@ -124,30 +167,21 @@ void WindowGrabber::start()
m_screenContextInitialized = true;
}
- result = screen_create_pixmap(&m_screenPixmap, m_screenContext);
+ result = screen_create_pixmap(&m_screenPixmaps[0], m_screenContext);
+ result = screen_create_pixmap(&m_screenPixmaps[1], m_screenContext);
if (result != 0) {
cleanup();
- qWarning() << "WindowGrabber: cannot create pixmap:" << strerror(errno);
+ qWarning() << "WindowGrabber: cannot create pixmaps:" << strerror(errno);
return;
- } else {
- m_screenPixmapInitialized = true;
}
- const int usage = SCREEN_USAGE_READ | SCREEN_USAGE_NATIVE;
- result = screen_set_pixmap_property_iv(m_screenPixmap, SCREEN_PROPERTY_USAGE, &usage);
- if (result != 0) {
- cleanup();
- qWarning() << "WindowGrabber: cannot set pixmap usage:" << strerror(errno);
- return;
- }
+ const int usage = SCREEN_USAGE_NATIVE;
+ result = screen_set_pixmap_property_iv(m_screenPixmaps[0], SCREEN_PROPERTY_USAGE, &usage);
+ result |= screen_set_pixmap_property_iv(m_screenPixmaps[1], SCREEN_PROPERTY_USAGE, &usage);
- const int format = SCREEN_FORMAT_RGBA8888;
- result = screen_set_pixmap_property_iv(m_screenPixmap, SCREEN_PROPERTY_FORMAT, &format);
- if (result != 0) {
- cleanup();
- qWarning() << "WindowGrabber: cannot set pixmap format:" << strerror(errno);
- return;
- }
+ const int format = SCREEN_FORMAT_RGBX8888;
+ screen_set_pixmap_property_iv(m_screenPixmaps[0], SCREEN_PROPERTY_FORMAT, &format);
+ screen_set_pixmap_property_iv(m_screenPixmaps[1], SCREEN_PROPERTY_FORMAT, &format);
int size[2] = { 0, 0 };
@@ -172,37 +206,51 @@ void WindowGrabber::updateFrameSize()
{
int size[2] = { m_screenBufferWidth, m_screenBufferHeight };
- int result = screen_set_pixmap_property_iv(m_screenPixmap, SCREEN_PROPERTY_BUFFER_SIZE, size);
- if (result != 0) {
- cleanup();
- qWarning() << "WindowGrabber: cannot set pixmap size:" << strerror(errno);
- return;
- }
+ screen_set_pixmap_property_iv(m_screenPixmaps[0], SCREEN_PROPERTY_BUFFER_SIZE, size);
+ if (eglImageSupported())
+ screen_set_pixmap_property_iv(m_screenPixmaps[1], SCREEN_PROPERTY_BUFFER_SIZE, size);
+
+ int result = screen_create_pixmap_buffer(m_screenPixmaps[0]);
+ if (eglImageSupported())
+ result |= screen_create_pixmap_buffer(m_screenPixmaps[1]);
- result = screen_create_pixmap_buffer(m_screenPixmap);
if (result != 0) {
cleanup();
qWarning() << "WindowGrabber: cannot create pixmap buffer:" << strerror(errno);
return;
+ } else {
+ m_screenPixmapBuffersInitialized = true;
+ }
+
+ result = screen_get_pixmap_property_pv(m_screenPixmaps[0], SCREEN_PROPERTY_RENDER_BUFFERS,
+ (void**)&m_screenPixmapBuffers[0]);
+ if (eglImageSupported()) {
+ result |= screen_get_pixmap_property_pv(m_screenPixmaps[1], SCREEN_PROPERTY_RENDER_BUFFERS,
+ (void**)&m_screenPixmapBuffers[1]);
}
- result = screen_get_pixmap_property_pv(m_screenPixmap, SCREEN_PROPERTY_RENDER_BUFFERS, (void**)&m_screenPixmapBuffer);
if (result != 0) {
cleanup();
qWarning() << "WindowGrabber: cannot get pixmap buffer:" << strerror(errno);
return;
- } else {
- m_screenPixmapBufferInitialized = true;
}
- result = screen_get_buffer_property_pv(m_screenPixmapBuffer, SCREEN_PROPERTY_POINTER, (void**)&m_screenBuffer);
+ result = screen_get_buffer_property_pv(m_screenPixmapBuffers[0], SCREEN_PROPERTY_POINTER,
+ (void**)&m_screenBuffers[0]);
+ if (eglImageSupported()) {
+ result |= screen_get_buffer_property_pv(m_screenPixmapBuffers[1], SCREEN_PROPERTY_POINTER,
+ (void**)&m_screenBuffers[1]);
+ }
+
if (result != 0) {
cleanup();
qWarning() << "WindowGrabber: cannot get pixmap buffer pointer:" << strerror(errno);
return;
}
- result = screen_get_buffer_property_iv(m_screenPixmapBuffer, SCREEN_PROPERTY_STRIDE, &m_screenBufferStride);
+ result = screen_get_buffer_property_iv(m_screenPixmapBuffers[0], SCREEN_PROPERTY_STRIDE,
+ &m_screenBufferStride);
+
if (result != 0) {
cleanup();
qWarning() << "WindowGrabber: cannot get pixmap buffer stride:" << strerror(errno);
@@ -310,8 +358,40 @@ QByteArray WindowGrabber::windowGroupId() const
return QByteArray(groupIdData);
}
+bool WindowGrabber::eglImageSupported()
+{
+ return m_eglImageSupported;
+}
+
+void WindowGrabber::checkForEglImageExtension()
+{
+ QOpenGLContext *m_context = QOpenGLContext::currentContext();
+ if (!m_context) //Should not happen, because we are called from the render thread
+ return;
+
+ QByteArray eglExtensions = QByteArray(eglQueryString(eglGetDisplay(EGL_DEFAULT_DISPLAY),
+ EGL_EXTENSIONS));
+ m_eglImageSupported = m_context->hasExtension(QByteArrayLiteral("GL_OES_EGL_image"))
+ && eglExtensions.contains(QByteArrayLiteral("EGL_KHR_image"));
+
+ m_eglImageCheck = true;
+}
+
+bool WindowGrabber::eglImagesInitialized()
+{
+ return m_eglImagesInitialized;
+}
+
void WindowGrabber::grab()
{
+ if (!m_eglImageCheck) // We did not check for egl images yet
+ return;
+
+ if (eglImageSupported())
+ m_currentFrame = (m_currentFrame + 1) % 2;
+ else
+ m_currentFrame = 0;
+
int size[2] = { 0, 0 };
int result = screen_get_window_property_iv(m_window, SCREEN_PROPERTY_SOURCE_SIZE, size);
@@ -324,40 +404,33 @@ void WindowGrabber::grab()
if (m_screenBufferWidth != size[0] || m_screenBufferHeight != size[1]) {
// The source viewport size changed, so we have to adapt our buffers
- if (m_screenPixmapBufferInitialized) {
- screen_destroy_pixmap_buffer(m_screenPixmap);
- m_screenPixmapBufferInitialized = false;
+ if (m_screenPixmapBuffersInitialized) {
+ screen_destroy_pixmap_buffer(m_screenPixmaps[0]);
+ if (eglImageSupported())
+ screen_destroy_pixmap_buffer(m_screenPixmaps[1]);
}
m_screenBufferWidth = size[0];
m_screenBufferHeight = size[1];
updateFrameSize();
+ m_eglImagesInitialized = false;
}
const int rect[] = { 0, 0, m_screenBufferWidth, m_screenBufferHeight };
- result = screen_read_window(m_window, m_screenPixmapBuffer, 1, rect, 0);
+ result = screen_read_window(m_window, m_screenPixmapBuffers[m_currentFrame], 1, rect, 0);
if (result != 0)
return;
- const QImage frame((unsigned char*)m_screenBuffer, m_screenBufferWidth, m_screenBufferHeight,
- m_screenBufferStride, QImage::Format_ARGB32);
+ const QImage frame((unsigned char*)m_screenBuffers[m_currentFrame], m_screenBufferWidth,
+ m_screenBufferHeight, m_screenBufferStride, QImage::Format_ARGB32);
- emit frameGrabbed(frame);
+ emit frameGrabbed(frame, imgTextures[m_currentFrame]);
}
void WindowGrabber::cleanup()
{
- if (m_screenPixmapBufferInitialized) {
- screen_destroy_buffer(m_screenPixmapBuffer);
- m_screenPixmapBufferInitialized = false;
- }
-
- if (m_screenPixmapInitialized) {
- screen_destroy_pixmap(m_screenPixmap);
- m_screenPixmapInitialized = false;
- }
-
+ //We only need to destroy the context as it frees all resources associated with it
if (m_screenContextInitialized) {
screen_destroy_context(m_screenContext);
m_screenContextInitialized = false;
diff --git a/src/plugins/qnx/common/windowgrabber.h b/src/plugins/qnx/common/windowgrabber.h
index 7ec4202a2..40351ef25 100644
--- a/src/plugins/qnx/common/windowgrabber.h
+++ b/src/plugins/qnx/common/windowgrabber.h
@@ -41,6 +41,12 @@
#ifndef WINDOWGRABBER_H
#define WINDOWGRABBER_H
+#define EGL_EGLEXT_PROTOTYPES = 1
+#define GL_GLEXT_PROTOTYPES = 1
+#include <EGL/egl.h>
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#include <EGL/eglext.h>
#include <QAbstractNativeEventFilter>
#include <QObject>
#include <QTimer>
@@ -59,6 +65,8 @@ public:
void setFrameRate(int frameRate);
+ void createEglImages();
+
void setWindowId(const QByteArray &windowId);
void start();
@@ -73,8 +81,12 @@ public:
QByteArray windowGroupId() const;
+ bool eglImageSupported();
+ void checkForEglImageExtension();
+ bool eglImagesInitialized();
+
signals:
- void frameGrabbed(const QImage &frame);
+ void frameGrabbed(const QImage &frame, int);
private slots:
void grab();
@@ -89,10 +101,10 @@ private:
screen_window_t m_window;
screen_context_t m_screenContext;
- screen_pixmap_t m_screenPixmap;
- screen_buffer_t m_screenPixmapBuffer;
+ screen_pixmap_t m_screenPixmaps[2];
+ screen_buffer_t m_screenPixmapBuffers[2];
- char* m_screenBuffer;
+ char *m_screenBuffers[2];
int m_screenBufferWidth;
int m_screenBufferHeight;
@@ -100,8 +112,13 @@ private:
bool m_active : 1;
bool m_screenContextInitialized : 1;
- bool m_screenPixmapInitialized : 1;
- bool m_screenPixmapBufferInitialized : 1;
+ bool m_screenPixmapBuffersInitialized : 1;
+ int m_currentFrame;
+ EGLImageKHR img[2];
+ GLuint imgTextures[2];
+ bool m_eglImageSupported : 1;
+ bool m_eglImagesInitialized : 1;
+ bool m_eglImageCheck : 1; // We must not send a grabed frame before this is true
};
QT_END_NAMESPACE
diff --git a/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp b/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp
index 004eca36d..2555b2876 100644
--- a/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp
+++ b/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp
@@ -351,10 +351,11 @@ void MmRendererMediaPlayerControl::setState(QMediaPlayer::State state)
{
if (m_state != state) {
if (m_videoRendererControl) {
- if (state == QMediaPlayer::PausedState)
+ if (state == QMediaPlayer::PausedState || state == QMediaPlayer::StoppedState) {
m_videoRendererControl->pause();
- else if ((state == QMediaPlayer::PlayingState)
- && (m_state == QMediaPlayer::PausedState)) {
+ } else if ((state == QMediaPlayer::PlayingState)
+ && (m_state == QMediaPlayer::PausedState
+ || m_state == QMediaPlayer::StoppedState)) {
m_videoRendererControl->resume();
}
}
diff --git a/src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp b/src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp
index 0abdfec49..b9fe95026 100644
--- a/src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp
+++ b/src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp
@@ -46,6 +46,7 @@
#include <QCoreApplication>
#include <QDebug>
#include <QVideoSurfaceFormat>
+#include <QOpenGLContext>
#include <mm/renderer.h>
@@ -59,7 +60,7 @@ MmRendererPlayerVideoRendererControl::MmRendererPlayerVideoRendererControl(QObje
, m_context(0)
, m_videoId(-1)
{
- connect(m_windowGrabber, SIGNAL(frameGrabbed(QImage)), SLOT(frameGrabbed(QImage)));
+ connect(m_windowGrabber, SIGNAL(frameGrabbed(QImage, int)), SLOT(frameGrabbed(QImage, int)));
}
MmRendererPlayerVideoRendererControl::~MmRendererPlayerVideoRendererControl()
@@ -75,6 +76,10 @@ QAbstractVideoSurface *MmRendererPlayerVideoRendererControl::surface() const
void MmRendererPlayerVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
{
m_surface = QPointer<QAbstractVideoSurface>(surface);
+ if (QOpenGLContext::currentContext())
+ m_windowGrabber->checkForEglImageExtension();
+ else
+ m_surface->setProperty("_q_GLThreadCallback", QVariant::fromValue<QObject*>(this));
}
void MmRendererPlayerVideoRendererControl::attachDisplay(mmr_context_t *context)
@@ -139,20 +144,86 @@ void MmRendererPlayerVideoRendererControl::resume()
m_windowGrabber->resume();
}
-void MmRendererPlayerVideoRendererControl::frameGrabbed(const QImage &frame)
+class BBTextureBuffer : public QAbstractVideoBuffer
+{
+public:
+ BBTextureBuffer(int handle) :
+ QAbstractVideoBuffer(QAbstractVideoBuffer::GLTextureHandle)
+ {
+ m_handle = handle;
+ }
+ MapMode mapMode() const {
+ return QAbstractVideoBuffer::ReadWrite;
+ }
+ void unmap() {
+
+ }
+ uchar *map(MapMode mode, int * numBytes, int * bytesPerLine) {
+ Q_UNUSED(mode);
+ Q_UNUSED(numBytes);
+ Q_UNUSED(bytesPerLine);
+ return 0;
+ }
+ QVariant handle() const {
+ return m_handle;
+ }
+private:
+ int m_handle;
+};
+
+void MmRendererPlayerVideoRendererControl::frameGrabbed(const QImage &frame, int handle)
{
if (m_surface) {
if (!m_surface->isActive()) {
- m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoFrame::Format_ARGB32));
+ if (m_windowGrabber->eglImageSupported()) {
+ if (QOpenGLContext::currentContext())
+ m_windowGrabber->createEglImages();
+ else
+ m_surface->setProperty("_q_GLThreadCallback", QVariant::fromValue<QObject*>(this));
+
+ m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoFrame::Format_BGR32,
+ QAbstractVideoBuffer::GLTextureHandle));
+ } else {
+ m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoFrame::Format_ARGB32));
+ }
} else {
if (m_surface->surfaceFormat().frameSize() != frame.size()) {
+ QAbstractVideoBuffer::HandleType type = m_surface->surfaceFormat().handleType();
m_surface->stop();
- m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoFrame::Format_ARGB32));
+ if (type != QAbstractVideoBuffer::NoHandle) {
+ m_surface->setProperty("_q_GLThreadCallback", QVariant::fromValue<QObject*>(this));
+ m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoFrame::Format_BGR32,
+ QAbstractVideoBuffer::GLTextureHandle));
+ } else {
+ m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoFrame::Format_ARGB32));
+ }
}
}
- m_surface->present(frame.copy());
+ // Depending on the support of EGL images on the current platform we either pass a texture
+ // handle or a copy of the image data
+ if (m_surface->surfaceFormat().handleType() != QAbstractVideoBuffer::NoHandle) {
+ if (m_windowGrabber->eglImagesInitialized() &&
+ m_surface->property("_q_GLThreadCallback") != 0)
+ m_surface->setProperty("_q_GLThreadCallback", 0);
+
+
+ BBTextureBuffer *textBuffer = new BBTextureBuffer(handle);
+ QVideoFrame actualFrame(textBuffer, frame.size(), QVideoFrame::Format_BGR32);
+ m_surface->present(actualFrame);
+ } else {
+ m_surface->present(frame.copy());
+ }
}
}
+void MmRendererPlayerVideoRendererControl::customEvent(QEvent *e)
+{
+ // This is running in the render thread (OpenGL enabled)
+ if (e->type() == QEvent::User)
+ m_windowGrabber->checkForEglImageExtension();
+ else if (e->type() == QEvent::User + 1)
+ m_windowGrabber->createEglImages();
+}
+
QT_END_NAMESPACE
diff --git a/src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.h b/src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.h
index 4e271ad5d..5624b464e 100644
--- a/src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.h
+++ b/src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.h
@@ -67,8 +67,10 @@ public:
void pause();
void resume();
+ void customEvent(QEvent *) Q_DECL_OVERRIDE;
+
private Q_SLOTS:
- void frameGrabbed(const QImage &frame);
+ void frameGrabbed(const QImage &frame, int);
private:
QPointer<QAbstractVideoSurface> m_surface;
diff --git a/src/plugins/videonode/imx6/imx6.pro b/src/plugins/videonode/imx6/imx6.pro
index a2e6798e3..36e25e86b 100644
--- a/src/plugins/videonode/imx6/imx6.pro
+++ b/src/plugins/videonode/imx6/imx6.pro
@@ -2,6 +2,7 @@ TARGET = imx6vivantevideonode
QT += multimedia-private qtmultimediaquicktools-private
PLUGIN_TYPE=video/videonode
+PLUGIN_EXTENDS = quick
PLUGIN_CLASS_NAME = QSGVivanteVideoNodeFactory
load(qt_plugin)
diff --git a/src/plugins/wmf/mfactivate.cpp b/src/plugins/wmf/mfactivate.cpp
index cf64adc80..faa678ddf 100644
--- a/src/plugins/wmf/mfactivate.cpp
+++ b/src/plugins/wmf/mfactivate.cpp
@@ -85,5 +85,5 @@ ULONG MFAbstractActivate::Release(void)
ULONG cRef = InterlockedDecrement(&m_cRef);
if (cRef == 0)
delete this;
- return m_cRef;
+ return cRef;
}
diff --git a/src/plugins/wmf/player/mfplayersession.cpp b/src/plugins/wmf/player/mfplayersession.cpp
index 69958061b..f61f7aba2 100644
--- a/src/plugins/wmf/player/mfplayersession.cpp
+++ b/src/plugins/wmf/player/mfplayersession.cpp
@@ -1157,8 +1157,8 @@ void MFPlayerSession::setPositionInternal(qint64 position, Command requestCmd)
qreal MFPlayerSession::playbackRate() const
{
- if (m_pendingState != NoPending)
- return m_request.rate;
+ if (m_scrubbing)
+ return m_restoreRate;
return m_state.rate;
}
@@ -1166,6 +1166,7 @@ void MFPlayerSession::setPlaybackRate(qreal rate)
{
if (m_scrubbing) {
m_restoreRate = rate;
+ emit playbackRateChanged(rate);
return;
}
setPlaybackRateInternal(rate);
@@ -1194,6 +1195,8 @@ void MFPlayerSession::setPlaybackRateInternal(qreal rate)
isThin = TRUE;
if (FAILED(m_rateSupport->IsRateSupported(isThin, rate, NULL))) {
qWarning() << "unable to set playbackrate = " << rate;
+ m_pendingRate = m_request.rate = m_state.rate;
+ return;
}
}
if (m_pendingState != NoPending) {
@@ -1219,6 +1222,7 @@ void MFPlayerSession::commitRateChange(qreal rate, BOOL isThin)
MFTIME hnsSystemTime = 0;
MFTIME hnsClockTime = 0;
Command cmdNow = m_state.command;
+ bool resetPosition = false;
// Allowed rate transitions:
// Positive <-> negative: Stopped
// Negative <-> zero: Stopped
@@ -1229,7 +1233,12 @@ void MFPlayerSession::commitRateChange(qreal rate, BOOL isThin)
m_presentationClock->GetCorrelatedTime(0, &hnsClockTime, &hnsSystemTime);
Q_ASSERT(hnsSystemTime != 0);
- m_request.setCommand(rate < 0 || m_state.rate < 0 ? CmdSeekResume : CmdStart);
+ if (rate < 0 || m_state.rate < 0)
+ m_request.setCommand(CmdSeekResume);
+ else if (isThin || m_state.isThin)
+ m_request.setCommand(CmdStartAndSeek);
+ else
+ m_request.setCommand(CmdStart);
// We need to stop only when dealing with negative rates
if (rate >= 0 && m_state.rate >= 0)
@@ -1247,7 +1256,9 @@ void MFPlayerSession::commitRateChange(qreal rate, BOOL isThin)
// session cannot transition back from stopped to paused.
// Therefore, this rate transition is not supported while paused.
qWarning() << "Unable to change rate from positive to negative or vice versa in paused state";
- return;
+ rate = m_state.rate;
+ isThin = m_state.isThin;
+ goto done;
}
// This happens when resuming playback after scrubbing in pause mode.
@@ -1279,17 +1290,42 @@ void MFPlayerSession::commitRateChange(qreal rate, BOOL isThin)
// Resume to the current position (stop() will reset the position to 0)
m_request.start = hnsClockTime / 10000;
+ } else if (!isThin && m_state.isThin) {
+ if (cmdNow == CmdStart) {
+ // When thinning, only key frames are read and presented. Going back
+ // to normal playback requires to reset the current position to force
+ // the pipeline to decode the actual frame at the current position
+ // (which might be earlier than the last decoded key frame)
+ resetPosition = true;
+ } else if (cmdNow == CmdPause) {
+ // If paused, dont reset the position until we resume, otherwise
+ // a new frame will be rendered
+ m_presentationClock->GetCorrelatedTime(0, &hnsClockTime, &hnsSystemTime);
+ m_request.setCommand(CmdSeekResume);
+ m_request.start = hnsClockTime / 10000;
+ }
+
}
// Set the rate.
if (FAILED(m_rateControl->SetRate(isThin, rate))) {
qWarning() << "failed to set playbackrate = " << rate;
- return;
+ rate = m_state.rate;
+ isThin = m_state.isThin;
+ goto done;
}
+ if (resetPosition) {
+ m_presentationClock->GetCorrelatedTime(0, &hnsClockTime, &hnsSystemTime);
+ setPosition(hnsClockTime / 10000);
+ }
+
+done:
// Adjust our current rate and requested rate.
m_pendingRate = m_request.rate = m_state.rate = rate;
-
+ if (rate != 0)
+ m_state.isThin = isThin;
+ emit playbackRateChanged(rate);
}
void MFPlayerSession::scrub(bool enableScrub)
@@ -1705,6 +1741,11 @@ void MFPlayerSession::updatePendingCommands(Command command)
case CmdSeek:
case CmdSeekResume:
setPositionInternal(m_request.start, m_request.command);
+ break;
+ case CmdStartAndSeek:
+ start();
+ setPositionInternal(m_request.start, m_request.command);
+ break;
}
m_request.setCommand(CmdNone);
}
diff --git a/src/plugins/wmf/player/mfplayersession.h b/src/plugins/wmf/player/mfplayersession.h
index 3ba43ce58..2c87f3cc6 100644
--- a/src/plugins/wmf/player/mfplayersession.h
+++ b/src/plugins/wmf/player/mfplayersession.h
@@ -165,6 +165,7 @@ private:
CmdPause,
CmdSeek,
CmdSeekResume,
+ CmdStartAndSeek
};
void clear();
diff --git a/src/plugins/wmf/player/mfvideorenderercontrol.cpp b/src/plugins/wmf/player/mfvideorenderercontrol.cpp
index 1705b2a48..040682965 100644
--- a/src/plugins/wmf/player/mfvideorenderercontrol.cpp
+++ b/src/plugins/wmf/player/mfvideorenderercontrol.cpp
@@ -655,11 +655,6 @@ namespace
m_presentationClock = NULL;
}
- if (m_scheduledBuffer) {
- m_scheduledBuffer->Release();
- m_scheduledBuffer = NULL;
- }
-
clearMediaTypes();
clearSampleQueue();
clearBufferCache();
@@ -677,6 +672,7 @@ namespace
QMutexLocker locker(&m_mutex);
HRESULT hr = validateOperation(OpPreroll);
if (SUCCEEDED(hr)) {
+ m_state = State_Prerolling;
m_prerollTargetTime = hnsUpcomingStartTime;
hr = queueAsyncOperation(OpPreroll);
}
@@ -772,9 +768,12 @@ namespace
qDebug() << "MediaStream::setRate" << rate;
#endif
QMutexLocker locker(&m_mutex);
- m_rate = rate;
- queueEvent(MEStreamSinkRateChanged, GUID_NULL, S_OK, NULL);
- return S_OK;
+ HRESULT hr = validateOperation(OpSetRate);
+ if (SUCCEEDED(hr)) {
+ m_rate = rate;
+ hr = queueAsyncOperation(OpSetRate);
+ }
+ return hr;
}
void supportedFormatsChanged()
@@ -861,6 +860,7 @@ namespace
if (m_scheduledBuffer) {
m_scheduledBuffer->Release();
m_scheduledBuffer = NULL;
+ schedulePresentation(true);
}
}
@@ -933,6 +933,7 @@ namespace
{
State_TypeNotSet = 0, // No media type is set
State_Ready, // Media type is set, Start has never been called.
+ State_Prerolling,
State_Started,
State_Paused,
State_Stopped,
@@ -1121,6 +1122,9 @@ namespace
break;
}
}
+
+ if (m_state == State_Started)
+ schedulePresentation(true);
case OpRestart:
endPreroll(S_FALSE);
if (SUCCEEDED(hr)) {
@@ -1139,10 +1143,7 @@ namespace
case OpStop:
// Drop samples from queue.
hr = processSamplesFromQueue(DropSamples);
- if (m_scheduledBuffer) {
- m_scheduledBuffer->Release();
- m_scheduledBuffer = NULL;
- }
+ clearBufferCache();
// Send the event even if the previous call failed.
hr = queueEvent(MEStreamSinkStopped, GUID_NULL, hr, NULL);
if (m_surface->isActive()) {
@@ -1158,7 +1159,7 @@ namespace
hr = queueEvent(MEStreamSinkPaused, GUID_NULL, hr, NULL);
break;
case OpSetRate:
- //TODO:
+ hr = queueEvent(MEStreamSinkRateChanged, GUID_NULL, S_OK, NULL);
break;
case OpProcessSample:
case OpPlaceMarker:
@@ -1335,7 +1336,7 @@ namespace
pSample->GetSampleDuration(&duration);
if (m_prerolling) {
- if (SUCCEEDED(hr) && time >= m_prerollTargetTime) {
+ if (SUCCEEDED(hr) && ((time - m_prerollTargetTime) * m_rate) >= 0) {
IMFMediaBuffer *pBuffer = NULL;
hr = pSample->ConvertToContiguousBuffer(&pBuffer);
if (SUCCEEDED(hr)) {
@@ -1352,7 +1353,7 @@ namespace
} else {
bool requestSample = true;
// If the time stamp is too early, just discard this sample.
- if (SUCCEEDED(hr) && time >= m_startTime) {
+ if (SUCCEEDED(hr) && ((time - m_startTime) * m_rate) >= 0) {
IMFMediaBuffer *pBuffer = NULL;
hr = pSample->ConvertToContiguousBuffer(&pBuffer);
if (SUCCEEDED(hr)) {
@@ -1385,11 +1386,16 @@ namespace
foreach (SampleBuffer sb, m_bufferCache)
sb.m_buffer->Release();
m_bufferCache.clear();
+
+ if (m_scheduledBuffer) {
+ m_scheduledBuffer->Release();
+ m_scheduledBuffer = NULL;
+ }
}
void schedulePresentation(bool requestSample)
{
- if (m_state == State_Paused)
+ if (m_state == State_Paused || m_state == State_Prerolling)
return;
if (!m_scheduledBuffer) {
//get time from presentation time
@@ -1400,9 +1406,8 @@ namespace
timeOK = false;
}
while (!m_bufferCache.isEmpty()) {
- SampleBuffer sb = m_bufferCache.first();
- m_bufferCache.pop_front();
- if (timeOK && currentTime > sb.m_time) {
+ SampleBuffer sb = m_bufferCache.takeFirst();
+ if (timeOK && ((sb.m_time - currentTime) * m_rate) < 0) {
sb.m_buffer->Release();
#ifdef DEBUG_MEDIAFOUNDATION
qDebug() << "currentPresentTime =" << float(currentTime / 10000) * 0.001f << " and sampleTime is" << float(sb.m_time / 10000) * 0.001f;
@@ -1439,6 +1444,8 @@ namespace
/* Ready */ TRUE, TRUE, TRUE, FALSE, TRUE, TRUE, TRUE, FALSE, TRUE, TRUE,
+ /* Prerolling */ TRUE, TRUE, FALSE, FALSE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
+
/* Start */ FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
/* Pause */ FALSE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
@@ -1454,7 +1461,11 @@ namespace
// 2. While paused, the sink accepts samples but does not process them.
};
- class MediaSink : public IMFFinalizableMediaSink, public IMFClockStateSink, public IMFMediaSinkPreroll
+ class MediaSink : public IMFFinalizableMediaSink,
+ public IMFClockStateSink,
+ public IMFMediaSinkPreroll,
+ public IMFGetService,
+ public IMFRateSupport
{
public:
MediaSink(MFVideoRendererControl *rendererControl)
@@ -1526,10 +1537,14 @@ namespace
return E_POINTER;
if (riid == IID_IMFMediaSink) {
*ppvObject = static_cast<IMFMediaSink*>(this);
+ } else if (riid == IID_IMFGetService) {
+ *ppvObject = static_cast<IMFGetService*>(this);
} else if (riid == IID_IMFMediaSinkPreroll) {
*ppvObject = static_cast<IMFMediaSinkPreroll*>(this);
} else if (riid == IID_IMFClockStateSink) {
*ppvObject = static_cast<IMFClockStateSink*>(this);
+ } else if (riid == IID_IMFRateSupport) {
+ *ppvObject = static_cast<IMFRateSupport*>(this);
} else if (riid == IID_IUnknown) {
*ppvObject = static_cast<IUnknown*>(static_cast<IMFFinalizableMediaSink*>(this));
} else {
@@ -1554,7 +1569,19 @@ namespace
return cRef;
}
+ // IMFGetService methods
+ STDMETHODIMP GetService(const GUID &guidService,
+ const IID &riid,
+ LPVOID *ppvObject)
+ {
+ if (!ppvObject)
+ return E_POINTER;
+
+ if (guidService != MF_RATE_CONTROL_SERVICE)
+ return MF_E_UNSUPPORTED_SERVICE;
+ return QueryInterface(riid, ppvObject);
+ }
//IMFMediaSinkPreroll
STDMETHODIMP NotifyPreroll(MFTIME hnsUpcomingStartTime)
@@ -1749,6 +1776,68 @@ namespace
return m_stream->setRate(flRate);
}
+ // IMFRateSupport methods
+ STDMETHODIMP GetFastestRate(MFRATE_DIRECTION eDirection,
+ BOOL fThin,
+ float *pflRate)
+ {
+ if (!pflRate)
+ return E_POINTER;
+
+ *pflRate = (fThin ? 8.f : 2.0f) * (eDirection == MFRATE_FORWARD ? 1 : -1) ;
+
+ return S_OK;
+ }
+
+ STDMETHODIMP GetSlowestRate(MFRATE_DIRECTION eDirection,
+ BOOL fThin,
+ float *pflRate)
+ {
+ Q_UNUSED(eDirection);
+ Q_UNUSED(fThin);
+
+ if (!pflRate)
+ return E_POINTER;
+
+ // we support any rate
+ *pflRate = 0.f;
+
+ return S_OK;
+ }
+
+ STDMETHODIMP IsRateSupported(BOOL fThin,
+ float flRate,
+ float *pflNearestSupportedRate)
+ {
+ HRESULT hr = S_OK;
+
+ if (!qFuzzyIsNull(flRate)) {
+ MFRATE_DIRECTION direction = flRate > 0.f ? MFRATE_FORWARD
+ : MFRATE_REVERSE;
+
+ float fastestRate = 0.f;
+ float slowestRate = 0.f;
+ GetFastestRate(direction, fThin, &fastestRate);
+ GetSlowestRate(direction, fThin, &slowestRate);
+
+ if (direction == MFRATE_REVERSE)
+ qSwap(fastestRate, slowestRate);
+
+ if (flRate < slowestRate || flRate > fastestRate) {
+ hr = MF_E_UNSUPPORTED_RATE;
+ if (pflNearestSupportedRate) {
+ *pflNearestSupportedRate = qBound(slowestRate,
+ flRate,
+ fastestRate);
+ }
+ }
+ } else if (pflNearestSupportedRate) {
+ *pflNearestSupportedRate = flRate;
+ }
+
+ return hr;
+ }
+
private:
long m_cRef;
QMutex m_mutex;
@@ -2201,13 +2290,13 @@ void MFVideoRendererControl::customEvent(QEvent *event)
MFTIME targetTime = static_cast<MediaStream::PresentEvent*>(event)->targetTime();
MFTIME currentTime = static_cast<VideoRendererActivate*>(m_currentActivate)->getTime();
float playRate = static_cast<VideoRendererActivate*>(m_currentActivate)->getPlayRate();
- if (!qFuzzyIsNull(playRate)) {
- // If the scheduled frame is too late or too much in advance, skip it
- const int diff = (targetTime - currentTime) / 10000;
- if (diff < 0 || diff > 500)
+ if (!qFuzzyIsNull(playRate) && targetTime != currentTime) {
+ // If the scheduled frame is too late, skip it
+ const int interval = ((targetTime - currentTime) / 10000) / playRate;
+ if (interval < 0)
static_cast<VideoRendererActivate*>(m_currentActivate)->clearScheduledFrame();
else
- QTimer::singleShot(diff / playRate, this, SLOT(present()));
+ QTimer::singleShot(interval, this, SLOT(present()));
} else {
present();
}
diff --git a/src/plugins/wmf/samplegrabber.h b/src/plugins/wmf/samplegrabber.h
index 7a5c2260b..63cb5fa62 100644
--- a/src/plugins/wmf/samplegrabber.h
+++ b/src/plugins/wmf/samplegrabber.h
@@ -72,6 +72,9 @@ public:
protected:
SampleGrabberCallback() : m_cRef(1) {}
+public:
+ virtual ~SampleGrabberCallback() {}
+
private:
long m_cRef;
};
diff --git a/src/plugins/wmf/sourceresolver.cpp b/src/plugins/wmf/sourceresolver.cpp
index 9ac126bda..57da12282 100644
--- a/src/plugins/wmf/sourceresolver.cpp
+++ b/src/plugins/wmf/sourceresolver.cpp
@@ -278,7 +278,7 @@ IMFMediaSource* SourceResolver::mediaSource() const
/////////////////////////////////////////////////////////////////////////////////
SourceResolver::State::State(IMFSourceResolver *sourceResolver, bool fromStream)
- : m_cRef(1)
+ : m_cRef(0)
, m_sourceResolver(sourceResolver)
, m_fromStream(fromStream)
{
diff --git a/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp b/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp
index cd03cd6b8..647732485 100644
--- a/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp
+++ b/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp
@@ -201,6 +201,16 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
obj->event(&ev);
}
}
+#if defined (Q_OS_QNX) // On QNX we need to be called back again for creating the egl images
+ else {
+ // Internal mechanism to call back the surface renderer from the QtQuick render thread
+ QObject *obj = m_surface->property("_q_GLThreadCallback").value<QObject*>();
+ if (obj) {
+ QEvent ev(static_cast<QEvent::Type>(QEvent::User + 1));
+ obj->event(&ev);
+ }
+ }
+#endif
if (m_frameChanged) {
if (videoNode && videoNode->pixelFormat() != m_frame.pixelFormat()) {
diff --git a/src/qtmultimediaquicktools/qsgvideonode_p.cpp b/src/qtmultimediaquicktools/qsgvideonode_p.cpp
index b958180a0..6fbc1443b 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_p.cpp
+++ b/src/qtmultimediaquicktools/qsgvideonode_p.cpp
@@ -46,6 +46,7 @@ QT_BEGIN_NAMESPACE
QSGVideoNode::QSGVideoNode()
: m_orientation(-1)
{
+ setFlag(QSGNode::OwnsGeometry);
}
/* Helpers */