summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--examples/multimedia/declarative-camera/declarative-camera.pro3
-rw-r--r--examples/multimedia/spectrum/3rdparty/fftreal/fftreal.pro1
-rw-r--r--examples/multimedia/spectrum/app/app.pro2
-rw-r--r--examples/multimedia/spectrum/spectrum.pro3
-rw-r--r--examples/multimedia/video/qmlvideo/qmlvideo.pro3
-rw-r--r--examples/multimedia/video/qmlvideofx/qmlvideofx.pro4
-rw-r--r--examples/multimedia/video/video.pro3
-rw-r--r--examples/multimediawidgets/videowidget/main.cpp25
-rw-r--r--examples/multimediawidgets/videowidget/videoplayer.cpp53
-rw-r--r--examples/multimediawidgets/videowidget/videoplayer.h3
-rw-r--r--src/imports/audioengine/qdeclarative_attenuationmodel_p.h10
-rw-r--r--src/multimediawidgets/qpaintervideosurface.cpp19
-rw-r--r--src/plugins/alsa/qalsaaudioinput.cpp1
-rw-r--r--src/plugins/alsa/qalsaaudiooutput.cpp1
-rw-r--r--src/plugins/android/src/common/qandroidvideooutput.cpp55
-rw-r--r--src/plugins/android/src/common/qandroidvideooutput.h2
-rw-r--r--src/plugins/android/src/wrappers/jni/androidsurfacetexture.cpp16
-rw-r--r--src/plugins/android/src/wrappers/jni/androidsurfacetexture.h3
-rw-r--r--src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm7
-rw-r--r--src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm7
-rw-r--r--src/plugins/avfoundation/camera/avfimagecapturecontrol.mm3
-rw-r--r--src/plugins/avfoundation/camera/avfmediaassetwriter.h26
-rw-r--r--src/plugins/avfoundation/camera/avfmediaassetwriter.mm136
-rw-r--r--src/plugins/avfoundation/camera/avfmediarecordercontrol_ios.h9
-rw-r--r--src/plugins/avfoundation/camera/avfmediarecordercontrol_ios.mm11
-rw-r--r--src/plugins/coreaudio/coreaudioinput.mm33
-rw-r--r--src/plugins/coreaudio/coreaudiooutput.mm32
-rw-r--r--src/plugins/directshow/player/directshowplayerservice.cpp5
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinlocks.cpp21
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinsession.cpp3
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinsession.h2
-rw-r--r--src/plugins/qnx/common/windowgrabber.cpp362
-rw-r--r--src/plugins/qnx/common/windowgrabber.h61
-rw-r--r--src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp53
-rw-r--r--src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.h2
-rw-r--r--src/plugins/v4l/radio/v4lradiocontrol.h4
-rw-r--r--src/plugins/winrt/qwinrtabstractvideorenderercontrol.h5
-rw-r--r--src/plugins/winrt/qwinrtcameracontrol.cpp376
-rw-r--r--src/plugins/winrt/qwinrtcameracontrol.h7
-rw-r--r--src/plugins/winrt/qwinrtcameraimagecapturecontrol.cpp58
-rw-r--r--src/plugins/winrt/qwinrtcameravideorenderercontrol.cpp6
-rw-r--r--src/plugins/winrt/qwinrtvideodeviceselectorcontrol.cpp5
-rw-r--r--src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp10
-rw-r--r--tests/auto/integration/qaudiodecoderbackend/BLACKLIST41
-rw-r--r--tests/auto/integration/qaudiodecoderbackend/tst_qaudiodecoderbackend.cpp8
-rw-r--r--tests/auto/unit/qaudiobuffer/qaudiobuffer.pro6
46 files changed, 787 insertions, 719 deletions
diff --git a/examples/multimedia/declarative-camera/declarative-camera.pro b/examples/multimedia/declarative-camera/declarative-camera.pro
index f977e1cb3..71d4f68b0 100644
--- a/examples/multimedia/declarative-camera/declarative-camera.pro
+++ b/examples/multimedia/declarative-camera/declarative-camera.pro
@@ -9,6 +9,3 @@ RESOURCES += declarative-camera.qrc
target.path = $$[QT_INSTALL_EXAMPLES]/multimedia/declarative-camera
INSTALLS += target
-winrt {
- WINRT_MANIFEST.capabilities_device += webcam microphone
-}
diff --git a/examples/multimedia/spectrum/3rdparty/fftreal/fftreal.pro b/examples/multimedia/spectrum/3rdparty/fftreal/fftreal.pro
index f6abeeb15..b2c96f96c 100644
--- a/examples/multimedia/spectrum/3rdparty/fftreal/fftreal.pro
+++ b/examples/multimedia/spectrum/3rdparty/fftreal/fftreal.pro
@@ -42,3 +42,4 @@ EXAMPLE_FILES = bwins/fftreal.def eabi/fftreal.def readme.txt license.txt
target.path = $$[QT_INSTALL_EXAMPLES]/multimedia/spectrum
INSTALLS += target
+CONFIG += install_ok # Do not cargo-cult this!
diff --git a/examples/multimedia/spectrum/app/app.pro b/examples/multimedia/spectrum/app/app.pro
index 8262372c4..76aa02cdf 100644
--- a/examples/multimedia/spectrum/app/app.pro
+++ b/examples/multimedia/spectrum/app/app.pro
@@ -57,6 +57,8 @@ RESOURCES = spectrum.qrc
target.path = $$[QT_INSTALL_EXAMPLES]/multimedia/spectrum
INSTALLS += target
+CONFIG += install_ok # Do not cargo-cult this!
+
# Deployment
DESTDIR = ..$${spectrum_build_dir}
diff --git a/examples/multimedia/spectrum/spectrum.pro b/examples/multimedia/spectrum/spectrum.pro
index 81006a24c..0ca2ee554 100644
--- a/examples/multimedia/spectrum/spectrum.pro
+++ b/examples/multimedia/spectrum/spectrum.pro
@@ -10,3 +10,6 @@ SUBDIRS += app
TARGET = spectrum
+EXAMPLE_FILES += \
+ README.txt \
+ TODO.txt
diff --git a/examples/multimedia/video/qmlvideo/qmlvideo.pro b/examples/multimedia/video/qmlvideo/qmlvideo.pro
index 26865c59a..022835f12 100644
--- a/examples/multimedia/video/qmlvideo/qmlvideo.pro
+++ b/examples/multimedia/video/qmlvideo/qmlvideo.pro
@@ -16,3 +16,6 @@ include($$SNIPPETS_PATH/performancemonitor/performancemonitordeclarative.pri)
target.path = $$[QT_INSTALL_EXAMPLES]/multimedia/video/qmlvideo
INSTALLS += target
+EXAMPLE_FILES += \
+ qmlvideo.png \
+ qmlvideo.svg
diff --git a/examples/multimedia/video/qmlvideofx/qmlvideofx.pro b/examples/multimedia/video/qmlvideofx/qmlvideofx.pro
index 678ba4c97..097ad7516 100644
--- a/examples/multimedia/video/qmlvideofx/qmlvideofx.pro
+++ b/examples/multimedia/video/qmlvideofx/qmlvideofx.pro
@@ -14,3 +14,7 @@ target.path = $$[QT_INSTALL_EXAMPLES]/multimedia/video/qmlvideofx
INSTALLS += target
QMAKE_INFO_PLIST = Info.plist
+
+EXAMPLE_FILES += \
+ qmlvideofx.png \
+ qmlvideofx.svg
diff --git a/examples/multimedia/video/video.pro b/examples/multimedia/video/video.pro
index 3127a4e49..f38cbc124 100644
--- a/examples/multimedia/video/video.pro
+++ b/examples/multimedia/video/video.pro
@@ -2,3 +2,6 @@ TEMPLATE = subdirs
SUBDIRS += qmlvideo qmlvideofx
+EXAMPLE_FILES += \
+ qmlvideofilter_opencl \ # FIXME: this one should use a configure check instead
+ snippets
diff --git a/examples/multimediawidgets/videowidget/main.cpp b/examples/multimediawidgets/videowidget/main.cpp
index c9940e10e..fd726884b 100644
--- a/examples/multimediawidgets/videowidget/main.cpp
+++ b/examples/multimediawidgets/videowidget/main.cpp
@@ -41,13 +41,36 @@
#include "videoplayer.h"
#include <QtWidgets/QApplication>
+#include <QtWidgets/QDesktopWidget>
+#include <QtCore/QCommandLineParser>
+#include <QtCore/QCommandLineOption>
+#include <QtCore/QDir>
int main(int argc, char *argv[])
{
QApplication app(argc, argv);
+ QCoreApplication::setApplicationName("Video Widget Example");
+ QCoreApplication::setOrganizationName("QtProject");
+ QGuiApplication::setApplicationDisplayName(QCoreApplication::applicationName());
+ QCoreApplication::setApplicationVersion(QT_VERSION_STR);
+ QCommandLineParser parser;
+ parser.setApplicationDescription("Qt Video Widget Example");
+ parser.addHelpOption();
+ parser.addVersionOption();
+ parser.addPositionalArgument("url", "The URL to open.");
+ parser.process(app);
+
VideoPlayer player;
- player.resize(320, 240);
+ if (!parser.positionalArguments().isEmpty()) {
+ const QUrl url =
+ QUrl::fromUserInput(parser.positionalArguments().constFirst(),
+ QDir::currentPath(), QUrl::AssumeLocalFile);
+ player.setUrl(url);
+ }
+
+ const QRect availableGeometry = QApplication::desktop()->availableGeometry(&player);
+ player.resize(availableGeometry.width() / 6, availableGeometry.height() / 4);
player.show();
return app.exec();
diff --git a/examples/multimediawidgets/videowidget/videoplayer.cpp b/examples/multimediawidgets/videowidget/videoplayer.cpp
index c3554ff04..8504746c4 100644
--- a/examples/multimediawidgets/videowidget/videoplayer.cpp
+++ b/examples/multimediawidgets/videowidget/videoplayer.cpp
@@ -54,20 +54,20 @@ VideoPlayer::VideoPlayer(QWidget *parent)
QVideoWidget *videoWidget = new QVideoWidget;
QAbstractButton *openButton = new QPushButton(tr("Open..."));
- connect(openButton, SIGNAL(clicked()), this, SLOT(openFile()));
+ connect(openButton, &QAbstractButton::clicked, this, &VideoPlayer::openFile);
playButton = new QPushButton;
playButton->setEnabled(false);
playButton->setIcon(style()->standardIcon(QStyle::SP_MediaPlay));
- connect(playButton, SIGNAL(clicked()),
- this, SLOT(play()));
+ connect(playButton, &QAbstractButton::clicked,
+ this, &VideoPlayer::play);
positionSlider = new QSlider(Qt::Horizontal);
positionSlider->setRange(0, 0);
- connect(positionSlider, SIGNAL(sliderMoved(int)),
- this, SLOT(setPosition(int)));
+ connect(positionSlider, &QAbstractSlider::sliderMoved,
+ this, &VideoPlayer::setPosition);
errorLabel = new QLabel;
errorLabel->setSizePolicy(QSizePolicy::Preferred, QSizePolicy::Maximum);
@@ -86,11 +86,13 @@ VideoPlayer::VideoPlayer(QWidget *parent)
setLayout(layout);
mediaPlayer.setVideoOutput(videoWidget);
- connect(&mediaPlayer, SIGNAL(stateChanged(QMediaPlayer::State)),
- this, SLOT(mediaStateChanged(QMediaPlayer::State)));
- connect(&mediaPlayer, SIGNAL(positionChanged(qint64)), this, SLOT(positionChanged(qint64)));
- connect(&mediaPlayer, SIGNAL(durationChanged(qint64)), this, SLOT(durationChanged(qint64)));
- connect(&mediaPlayer, SIGNAL(error(QMediaPlayer::Error)), this, SLOT(handleError()));
+ connect(&mediaPlayer, &QMediaPlayer::stateChanged,
+ this, &VideoPlayer::mediaStateChanged);
+ connect(&mediaPlayer, &QMediaPlayer::positionChanged, this, &VideoPlayer::positionChanged);
+ connect(&mediaPlayer, &QMediaPlayer::durationChanged, this, &VideoPlayer::durationChanged);
+ typedef void (QMediaPlayer::*ErrorSignal)(QMediaPlayer::Error);
+ connect(&mediaPlayer, static_cast<ErrorSignal>(&QMediaPlayer::error),
+ this, &VideoPlayer::handleError);
}
VideoPlayer::~VideoPlayer()
@@ -99,14 +101,23 @@ VideoPlayer::~VideoPlayer()
void VideoPlayer::openFile()
{
- errorLabel->setText("");
-
- QString fileName = QFileDialog::getOpenFileName(this, tr("Open Movie"),QDir::homePath());
+ QFileDialog fileDialog(this);
+ fileDialog.setAcceptMode(QFileDialog::AcceptOpen);
+ fileDialog.setWindowTitle(tr("Open Movie"));
+ QStringList supportedMimeTypes = mediaPlayer.supportedMimeTypes();
+ if (!supportedMimeTypes.isEmpty())
+ fileDialog.setMimeTypeFilters(supportedMimeTypes);
+ fileDialog.setDirectory(QStandardPaths::standardLocations(QStandardPaths::MoviesLocation).value(0, QDir::homePath()));
+ if (fileDialog.exec() == QDialog::Accepted)
+ setUrl(fileDialog.selectedUrls().constFirst());
+}
- if (!fileName.isEmpty()) {
- mediaPlayer.setMedia(QUrl::fromLocalFile(fileName));
- playButton->setEnabled(true);
- }
+void VideoPlayer::setUrl(const QUrl &url)
+{
+ errorLabel->setText(QString());
+ setWindowFilePath(url.isLocalFile() ? url.toLocalFile() : QString());
+ mediaPlayer.setMedia(url);
+ playButton->setEnabled(true);
}
void VideoPlayer::play()
@@ -151,5 +162,11 @@ void VideoPlayer::setPosition(int position)
void VideoPlayer::handleError()
{
playButton->setEnabled(false);
- errorLabel->setText("Error: " + mediaPlayer.errorString());
+ const QString errorString = mediaPlayer.errorString();
+ QString message = "Error: ";
+ if (errorString.isEmpty())
+ message += " #" + QString::number(int(mediaPlayer.error()));
+ else
+ message += errorString;
+ errorLabel->setText(message);
}
diff --git a/examples/multimediawidgets/videowidget/videoplayer.h b/examples/multimediawidgets/videowidget/videoplayer.h
index 24589f542..f9f3b692b 100644
--- a/examples/multimediawidgets/videowidget/videoplayer.h
+++ b/examples/multimediawidgets/videowidget/videoplayer.h
@@ -50,6 +50,7 @@ QT_BEGIN_NAMESPACE
class QAbstractButton;
class QSlider;
class QLabel;
+class QUrl;
QT_END_NAMESPACE
class VideoPlayer : public QWidget
@@ -59,6 +60,8 @@ public:
VideoPlayer(QWidget *parent = 0);
~VideoPlayer();
+ void setUrl(const QUrl &url);
+
public slots:
void openFile();
void play();
diff --git a/src/imports/audioengine/qdeclarative_attenuationmodel_p.h b/src/imports/audioengine/qdeclarative_attenuationmodel_p.h
index ae0db05f0..b50ed2ff5 100644
--- a/src/imports/audioengine/qdeclarative_attenuationmodel_p.h
+++ b/src/imports/audioengine/qdeclarative_attenuationmodel_p.h
@@ -85,8 +85,8 @@ private:
class QDeclarativeAttenuationModelLinear : public QDeclarativeAttenuationModel
{
Q_OBJECT
- Q_PROPERTY(qreal start READ startDistance WRITE setStartDistance CONSTANT)
- Q_PROPERTY(qreal end READ endDistance WRITE setEndDistance CONSTANT)
+ Q_PROPERTY(qreal start READ startDistance WRITE setStartDistance)
+ Q_PROPERTY(qreal end READ endDistance WRITE setEndDistance)
public:
QDeclarativeAttenuationModelLinear(QObject *parent = 0);
@@ -110,9 +110,9 @@ private:
class QDeclarativeAttenuationModelInverse : public QDeclarativeAttenuationModel
{
Q_OBJECT
- Q_PROPERTY(qreal start READ referenceDistance WRITE setReferenceDistance CONSTANT)
- Q_PROPERTY(qreal end READ maxDistance WRITE setMaxDistance CONSTANT)
- Q_PROPERTY(qreal rolloff READ rolloffFactor WRITE setRolloffFactor CONSTANT)
+ Q_PROPERTY(qreal start READ referenceDistance WRITE setReferenceDistance)
+ Q_PROPERTY(qreal end READ maxDistance WRITE setMaxDistance)
+ Q_PROPERTY(qreal rolloff READ rolloffFactor WRITE setRolloffFactor)
public:
QDeclarativeAttenuationModelInverse(QObject *parent = 0);
diff --git a/src/multimediawidgets/qpaintervideosurface.cpp b/src/multimediawidgets/qpaintervideosurface.cpp
index d64cdbecb..0d060dbce 100644
--- a/src/multimediawidgets/qpaintervideosurface.cpp
+++ b/src/multimediawidgets/qpaintervideosurface.cpp
@@ -224,18 +224,13 @@ void QVideoSurfaceGenericPainter::updateColors(int, int, int, int)
#if !defined(QT_NO_OPENGL) && !defined(QT_OPENGL_ES_1_CL) && !defined(QT_OPENGL_ES_1)
-#ifndef Q_OS_MAC
-# ifndef APIENTRYP
-# ifdef APIENTRY
-# define APIENTRYP APIENTRY *
-# else
-# define APIENTRY
-# define APIENTRYP *
-# endif
-# endif
-#else
-# define APIENTRY
-# define APIENTRYP *
+#ifndef APIENTRYP
+# ifdef APIENTRY
+# define APIENTRYP APIENTRY *
+# else
+# define APIENTRY
+# define APIENTRYP *
+# endif
#endif
#ifndef GL_TEXTURE0
diff --git a/src/plugins/alsa/qalsaaudioinput.cpp b/src/plugins/alsa/qalsaaudioinput.cpp
index 5f83631ab..6ad9a6c5b 100644
--- a/src/plugins/alsa/qalsaaudioinput.cpp
+++ b/src/plugins/alsa/qalsaaudioinput.cpp
@@ -707,6 +707,7 @@ qint64 QAlsaAudioInput::processedUSecs() const
void QAlsaAudioInput::suspend()
{
if(deviceState == QAudio::ActiveState||resuming) {
+ snd_pcm_drain(handle);
timer->stop();
deviceState = QAudio::SuspendedState;
emit stateChanged(deviceState);
diff --git a/src/plugins/alsa/qalsaaudiooutput.cpp b/src/plugins/alsa/qalsaaudiooutput.cpp
index df050ecd7..d59e2b740 100644
--- a/src/plugins/alsa/qalsaaudiooutput.cpp
+++ b/src/plugins/alsa/qalsaaudiooutput.cpp
@@ -679,6 +679,7 @@ QAudioFormat QAlsaAudioOutput::format() const
void QAlsaAudioOutput::suspend()
{
if(deviceState == QAudio::ActiveState || deviceState == QAudio::IdleState || resuming) {
+ snd_pcm_drain(handle);
timer->stop();
deviceState = QAudio::SuspendedState;
errorState = QAudio::NoError;
diff --git a/src/plugins/android/src/common/qandroidvideooutput.cpp b/src/plugins/android/src/common/qandroidvideooutput.cpp
index 9f3fe4dc3..4e96377d8 100644
--- a/src/plugins/android/src/common/qandroidvideooutput.cpp
+++ b/src/plugins/android/src/common/qandroidvideooutput.cpp
@@ -48,6 +48,7 @@
#include <qopenglfunctions.h>
#include <qopenglshaderprogram.h>
#include <qopenglframebufferobject.h>
+#include <QtCore/private/qjnihelpers_p.h>
QT_BEGIN_NAMESPACE
@@ -165,6 +166,7 @@ QAndroidTextureVideoOutput::QAndroidTextureVideoOutput(QObject *parent)
, m_fbo(0)
, m_program(0)
, m_glDeleter(0)
+ , m_surfaceTextureCanAttachToContext(QtAndroidPrivate::androidSdkVersion() >= 16)
{
}
@@ -190,12 +192,14 @@ void QAndroidTextureVideoOutput::setSurface(QAbstractVideoSurface *surface)
if (m_surface) {
if (m_surface->isActive())
m_surface->stop();
- m_surface->setProperty("_q_GLThreadCallback", QVariant());
+
+ if (!m_surfaceTextureCanAttachToContext)
+ m_surface->setProperty("_q_GLThreadCallback", QVariant());
}
m_surface = surface;
- if (m_surface) {
+ if (m_surface && !m_surfaceTextureCanAttachToContext) {
m_surface->setProperty("_q_GLThreadCallback",
QVariant::fromValue<QObject*>(this));
}
@@ -203,7 +207,7 @@ void QAndroidTextureVideoOutput::setSurface(QAbstractVideoSurface *surface)
bool QAndroidTextureVideoOutput::isReady()
{
- return QOpenGLContext::currentContext() || m_externalTex;
+ return m_surfaceTextureCanAttachToContext || QOpenGLContext::currentContext() || m_externalTex;
}
bool QAndroidTextureVideoOutput::initSurfaceTexture()
@@ -214,16 +218,20 @@ bool QAndroidTextureVideoOutput::initSurfaceTexture()
if (!m_surface)
return false;
- // if we have an OpenGL context in the current thread, create a texture. Otherwise, wait
- // for the GL render thread to call us back to do it.
- if (QOpenGLContext::currentContext()) {
- glGenTextures(1, &m_externalTex);
- m_glDeleter = new OpenGLResourcesDeleter;
- m_glDeleter->setTexture(m_externalTex);
- } else if (!m_externalTex) {
- return false;
+ if (!m_surfaceTextureCanAttachToContext) {
+ // if we have an OpenGL context in the current thread, create a texture. Otherwise, wait
+ // for the GL render thread to call us back to do it.
+ if (QOpenGLContext::currentContext()) {
+ glGenTextures(1, &m_externalTex);
+ m_glDeleter = new OpenGLResourcesDeleter;
+ m_glDeleter->setTexture(m_externalTex);
+ } else if (!m_externalTex) {
+ return false;
+ }
}
+ QMutexLocker locker(&m_mutex);
+
m_surfaceTexture = new AndroidSurfaceTexture(m_externalTex);
if (m_surfaceTexture->surfaceTexture() != 0) {
@@ -231,7 +239,8 @@ bool QAndroidTextureVideoOutput::initSurfaceTexture()
} else {
delete m_surfaceTexture;
m_surfaceTexture = 0;
- m_glDeleter->deleteLater();
+ if (m_glDeleter)
+ m_glDeleter->deleteLater();
m_externalTex = 0;
m_glDeleter = 0;
}
@@ -241,10 +250,15 @@ bool QAndroidTextureVideoOutput::initSurfaceTexture()
void QAndroidTextureVideoOutput::clearSurfaceTexture()
{
+ QMutexLocker locker(&m_mutex);
if (m_surfaceTexture) {
delete m_surfaceTexture;
m_surfaceTexture = 0;
}
+
+ // Also reset the attached OpenGL texture
+ if (m_surfaceTextureCanAttachToContext)
+ m_externalTex = 0;
}
AndroidSurfaceTexture *QAndroidTextureVideoOutput::surfaceTexture()
@@ -307,6 +321,9 @@ void QAndroidTextureVideoOutput::renderFrameToFbo()
{
QMutexLocker locker(&m_mutex);
+ if (!m_nativeSize.isValid() || !m_surfaceTexture)
+ return;
+
createGLResources();
m_surfaceTexture->updateTexImage();
@@ -364,6 +381,18 @@ void QAndroidTextureVideoOutput::renderFrameToFbo()
void QAndroidTextureVideoOutput::createGLResources()
{
+ Q_ASSERT(QOpenGLContext::currentContext() != NULL);
+
+ if (!m_glDeleter)
+ m_glDeleter = new OpenGLResourcesDeleter;
+
+ if (m_surfaceTextureCanAttachToContext && !m_externalTex) {
+ m_surfaceTexture->detachFromGLContext();
+ glGenTextures(1, &m_externalTex);
+ m_surfaceTexture->attachToGLContext(m_externalTex);
+ m_glDeleter->setTexture(m_externalTex);
+ }
+
if (!m_fbo || m_fbo->size() != m_nativeSize) {
delete m_fbo;
m_fbo = new QOpenGLFramebufferObject(m_nativeSize);
@@ -407,7 +436,7 @@ void QAndroidTextureVideoOutput::customEvent(QEvent *e)
{
if (e->type() == QEvent::User) {
// This is running in the render thread (OpenGL enabled)
- if (!m_externalTex) {
+ if (!m_surfaceTextureCanAttachToContext && !m_externalTex) {
glGenTextures(1, &m_externalTex);
m_glDeleter = new OpenGLResourcesDeleter; // will cleanup GL resources in the correct thread
m_glDeleter->setTexture(m_externalTex);
diff --git a/src/plugins/android/src/common/qandroidvideooutput.h b/src/plugins/android/src/common/qandroidvideooutput.h
index 624012d57..67bac7052 100644
--- a/src/plugins/android/src/common/qandroidvideooutput.h
+++ b/src/plugins/android/src/common/qandroidvideooutput.h
@@ -116,6 +116,8 @@ private:
QOpenGLShaderProgram *m_program;
OpenGLResourcesDeleter *m_glDeleter;
+ bool m_surfaceTextureCanAttachToContext;
+
friend class AndroidTextureVideoBuffer;
};
diff --git a/src/plugins/android/src/wrappers/jni/androidsurfacetexture.cpp b/src/plugins/android/src/wrappers/jni/androidsurfacetexture.cpp
index 70cbb9b30..82a8bffe3 100644
--- a/src/plugins/android/src/wrappers/jni/androidsurfacetexture.cpp
+++ b/src/plugins/android/src/wrappers/jni/androidsurfacetexture.cpp
@@ -152,6 +152,22 @@ jobject AndroidSurfaceTexture::surfaceHolder()
return m_surfaceHolder.object();
}
+void AndroidSurfaceTexture::attachToGLContext(int texName)
+{
+ if (QtAndroidPrivate::androidSdkVersion() < 16 || !m_surfaceTexture.isValid())
+ return;
+
+ m_surfaceTexture.callMethod<void>("attachToGLContext", "(I)V", texName);
+}
+
+void AndroidSurfaceTexture::detachFromGLContext()
+{
+ if (QtAndroidPrivate::androidSdkVersion() < 16 || !m_surfaceTexture.isValid())
+ return;
+
+ m_surfaceTexture.callMethod<void>("detachFromGLContext");
+}
+
bool AndroidSurfaceTexture::initJNI(JNIEnv *env)
{
// SurfaceTexture is available since API 11.
diff --git a/src/plugins/android/src/wrappers/jni/androidsurfacetexture.h b/src/plugins/android/src/wrappers/jni/androidsurfacetexture.h
index 3b143a9a8..b45ee384c 100644
--- a/src/plugins/android/src/wrappers/jni/androidsurfacetexture.h
+++ b/src/plugins/android/src/wrappers/jni/androidsurfacetexture.h
@@ -64,6 +64,9 @@ public:
void release(); // API level 14
void updateTexImage();
+ void attachToGLContext(int texName); // API level 16
+ void detachFromGLContext(); // API level 16
+
static bool initJNI(JNIEnv *env);
Q_SIGNALS:
diff --git a/src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm b/src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm
index fb02a7d09..e4f234b7e 100644
--- a/src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm
+++ b/src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm
@@ -66,11 +66,14 @@ public:
&& CVPixelBufferGetPixelFormatType(buffer) == kCVPixelFormatType_32BGRA
? GLTextureHandle : NoHandle)
, m_texture(0)
+ , m_renderer(renderer)
#endif
, m_buffer(buffer)
- , m_renderer(renderer)
, m_mode(NotMapped)
{
+#ifndef Q_OS_IOS
+ Q_UNUSED(renderer)
+#endif // Q_OS_IOS
CVPixelBufferRetain(m_buffer);
}
@@ -200,9 +203,9 @@ public:
private:
#ifdef Q_OS_IOS
mutable CVOpenGLESTextureRef m_texture;
+ AVFCameraRendererControl *m_renderer;
#endif
CVImageBufferRef m_buffer;
- AVFCameraRendererControl *m_renderer;
MapMode m_mode;
};
diff --git a/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm b/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm
index d2ae2af05..924b0d76a 100644
--- a/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm
+++ b/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm
@@ -345,7 +345,7 @@ QList<QCameraViewfinderSettings> AVFCameraViewfinderSettingsControl2::supportedV
QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::viewfinderSettings() const
{
- QCameraViewfinderSettings settings;
+ QCameraViewfinderSettings settings = m_settings;
AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice();
if (!captureDevice) {
@@ -353,6 +353,11 @@ QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::viewfinderSetting
return settings;
}
+ if (m_service->session()->state() != QCamera::LoadedState &&
+ m_service->session()->state() != QCamera::ActiveState) {
+ return settings;
+ }
+
#if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0)
if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) {
if (!captureDevice.activeFormat) {
diff --git a/src/plugins/avfoundation/camera/avfimagecapturecontrol.mm b/src/plugins/avfoundation/camera/avfimagecapturecontrol.mm
index dea407e3b..6465e69e3 100644
--- a/src/plugins/avfoundation/camera/avfimagecapturecontrol.mm
+++ b/src/plugins/avfoundation/camera/avfimagecapturecontrol.mm
@@ -40,6 +40,7 @@
#include "avfcameradebug.h"
#include "avfimagecapturecontrol.h"
#include "avfcameraservice.h"
+#include "avfcamerautility.h"
#include "avfcameracontrol.h"
#include <QtCore/qurl.h>
@@ -214,6 +215,8 @@ void AVFImageCaptureControl::updateCaptureConnection()
if (![captureSession.outputs containsObject:m_stillImageOutput]) {
if ([captureSession canAddOutput:m_stillImageOutput]) {
+ // Lock the video capture device to make sure the active format is not reset
+ const AVFConfigurationLock lock(m_session->videoCaptureDevice());
[captureSession addOutput:m_stillImageOutput];
m_videoConnection = [m_stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
updateReadyStatus();
diff --git a/src/plugins/avfoundation/camera/avfmediaassetwriter.h b/src/plugins/avfoundation/camera/avfmediaassetwriter.h
index 7cb6f9e44..c70deea10 100644
--- a/src/plugins/avfoundation/camera/avfmediaassetwriter.h
+++ b/src/plugins/avfoundation/camera/avfmediaassetwriter.h
@@ -50,19 +50,9 @@
QT_BEGIN_NAMESPACE
+class AVFMediaRecorderControlIOS;
class AVFCameraService;
-class AVFMediaAssetWriterDelegate
-{
-public:
- virtual ~AVFMediaAssetWriterDelegate();
-
- virtual void assetWriterStarted() = 0;
- virtual void assetWriterFailedToStart() = 0;
- virtual void assetWriterFailedToStop() = 0;
- virtual void assetWriterFinished() = 0;
-};
-
typedef QAtomicInteger<bool> AVFAtomicBool;
typedef QAtomicInteger<qint64> AVFAtomicInt64;
@@ -86,18 +76,15 @@ QT_END_NAMESPACE
// Serial queue for audio output:
QT_PREPEND_NAMESPACE(AVFScopedPointer)<dispatch_queue_t> m_audioQueue;
// Queue to write sample buffers:
- __weak dispatch_queue_t m_writerQueue;
+ QT_PREPEND_NAMESPACE(AVFScopedPointer)<dispatch_queue_t> m_writerQueue;
QT_PREPEND_NAMESPACE(AVFScopedPointer)<AVAssetWriter> m_assetWriter;
- // Delegate's queue.
- __weak dispatch_queue_t m_delegateQueue;
- // TODO: QPointer??
- QT_PREPEND_NAMESPACE(AVFMediaAssetWriterDelegate) *m_delegate;
+
+ QT_PREPEND_NAMESPACE(AVFMediaRecorderControlIOS) *m_delegate;
bool m_setStartTime;
QT_PREPEND_NAMESPACE(AVFAtomicBool) m_stopped;
- bool m_stoppedInternal;
- bool m_aborted;
+ QT_PREPEND_NAMESPACE(AVFAtomicBool) m_aborted;
QT_PREPEND_NAMESPACE(QMutex) m_writerMutex;
@public
@@ -108,8 +95,7 @@ QT_END_NAMESPACE
}
- (id)initWithQueue:(dispatch_queue_t)writerQueue
- delegate:(QT_PREPEND_NAMESPACE(AVFMediaAssetWriterDelegate) *)delegate
- delegateQueue:(dispatch_queue_t)delegateQueue;
+ delegate:(QT_PREPEND_NAMESPACE(AVFMediaRecorderControlIOS) *)delegate;
- (bool)setupWithFileURL:(NSURL *)fileURL
cameraService:(QT_PREPEND_NAMESPACE(AVFCameraService) *)service;
diff --git a/src/plugins/avfoundation/camera/avfmediaassetwriter.mm b/src/plugins/avfoundation/camera/avfmediaassetwriter.mm
index 88c4914aa..1b8c253e2 100644
--- a/src/plugins/avfoundation/camera/avfmediaassetwriter.mm
+++ b/src/plugins/avfoundation/camera/avfmediaassetwriter.mm
@@ -38,6 +38,7 @@
****************************************************************************/
#include "avfaudioinputselectorcontrol.h"
+#include "avfmediarecordercontrol_ios.h"
#include "avfcamerarenderercontrol.h"
#include "avfmediaassetwriter.h"
#include "avfcameraservice.h"
@@ -45,6 +46,7 @@
#include "avfcameradebug.h"
//#include <QtCore/qmutexlocker.h>
+#include <QtCore/qmetaobject.h>
#include <QtCore/qsysinfo.h>
QT_USE_NAMESPACE
@@ -71,11 +73,7 @@ bool qt_camera_service_isValid(AVFCameraService *service)
return true;
}
-}
-
-AVFMediaAssetWriterDelegate::~AVFMediaAssetWriterDelegate()
-{
-}
+} // unnamed namespace
@interface QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) (PrivateAPI)
- (bool)addAudioCapture;
@@ -89,21 +87,20 @@ AVFMediaAssetWriterDelegate::~AVFMediaAssetWriterDelegate()
@implementation QT_MANGLE_NAMESPACE(AVFMediaAssetWriter)
- (id)initWithQueue:(dispatch_queue_t)writerQueue
- delegate:(AVFMediaAssetWriterDelegate *)delegate
- delegateQueue:(dispatch_queue_t)delegateQueue
+ delegate:(AVFMediaRecorderControlIOS *)delegate
{
Q_ASSERT(writerQueue);
Q_ASSERT(delegate);
- Q_ASSERT(delegateQueue);
if (self = [super init]) {
- m_writerQueue = writerQueue;
+ // "Shared" queue:
+ dispatch_retain(writerQueue);
+ m_writerQueue.reset(writerQueue);
+
m_delegate = delegate;
- m_delegateQueue = delegateQueue;
m_setStartTime = true;
m_stopped.store(true);
- m_stoppedInternal = false;
- m_aborted = false;
+ m_aborted.store(false);
m_startTime = kCMTimeInvalid;
m_lastTimeStamp = kCMTimeInvalid;
m_durationInMs.store(0);
@@ -166,14 +163,13 @@ AVFMediaAssetWriterDelegate::~AVFMediaAssetWriterDelegate()
{
// To be executed on a writer's queue.
const QMutexLocker lock(&m_writerMutex);
- if (m_aborted)
+ if (m_aborted.load())
return;
[self setQueues];
m_setStartTime = true;
m_stopped.store(false);
- m_stoppedInternal = false;
[m_assetWriter startWriting];
AVCaptureSession *session = m_service->session()->captureSession();
if (!session.running)
@@ -183,40 +179,41 @@ AVFMediaAssetWriterDelegate::~AVFMediaAssetWriterDelegate()
- (void)stop
{
// To be executed on a writer's queue.
+ {
const QMutexLocker lock(&m_writerMutex);
- if (m_aborted)
+ if (m_aborted.load())
return;
- if (m_stopped.load()) {
- // Should never happen, but ...
- // if something went wrong in a recorder control
- // and we set state stopped without starting first ...
- // m_stoppedIntenal will be false, but m_stopped - true.
+ if (m_stopped.load())
return;
- }
m_stopped.store(true);
- m_stoppedInternal = true;
+ }
+
[m_assetWriter finishWritingWithCompletionHandler:^{
- // TODO: make sure the session exist and we can call stop/remove on it.
+ // This block is async, so by the time it's executed,
+ // it's possible that render control was deleted already ...
+ const QMutexLocker lock(&m_writerMutex);
+ if (m_aborted.load())
+ return;
+
AVCaptureSession *session = m_service->session()->captureSession();
[session stopRunning];
[session removeOutput:m_audioOutput];
[session removeInput:m_audioInput];
- dispatch_async(m_delegateQueue, ^{
- m_delegate->assetWriterFinished();
- });
+ QMetaObject::invokeMethod(m_delegate, "assetWriterFinished", Qt::QueuedConnection);
}];
}
- (void)abort
{
- // To be executed on any thread, prevents writer from
- // accessing any external object (probably deleted by this time)
+ // To be executed on any thread (presumably, it's the main thread),
+ // prevents writer from accessing any shared object.
const QMutexLocker lock(&m_writerMutex);
- m_aborted = true;
+ m_aborted.store(true);
if (m_stopped.load())
return;
+
[m_assetWriter finishWritingWithCompletionHandler:^{
}];
}
@@ -227,9 +224,11 @@ AVFMediaAssetWriterDelegate::~AVFMediaAssetWriterDelegate()
Q_ASSERT(m_setStartTime);
Q_ASSERT(sampleBuffer);
- dispatch_async(m_delegateQueue, ^{
- m_delegate->assetWriterStarted();
- });
+ const QMutexLocker lock(&m_writerMutex);
+ if (m_aborted.load() || m_stopped.load())
+ return;
+
+ QMetaObject::invokeMethod(m_delegate, "assetWriterStarted", Qt::QueuedConnection);
m_durationInMs.store(0);
m_startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
@@ -242,22 +241,18 @@ AVFMediaAssetWriterDelegate::~AVFMediaAssetWriterDelegate()
{
Q_ASSERT(sampleBuffer);
- // This code is executed only on a writer's queue, but
- // it can access potentially deleted objects, so we
- // need a lock and m_aborted flag test.
- {
- const QMutexLocker lock(&m_writerMutex);
- if (!m_aborted && !m_stoppedInternal) {
- if (m_setStartTime)
- [self setStartTimeFrom:sampleBuffer];
-
- if (m_cameraWriterInput.data().readyForMoreMediaData) {
- [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
- [m_cameraWriterInput appendSampleBuffer:sampleBuffer];
- }
+ // This code is executed only on a writer's queue.
+ if (!m_aborted.load() && !m_stopped.load()) {
+ if (m_setStartTime)
+ [self setStartTimeFrom:sampleBuffer];
+
+ if (m_cameraWriterInput.data().readyForMoreMediaData) {
+ [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
+ [m_cameraWriterInput appendSampleBuffer:sampleBuffer];
}
}
+
CFRelease(sampleBuffer);
}
@@ -267,16 +262,13 @@ AVFMediaAssetWriterDelegate::~AVFMediaAssetWriterDelegate()
// it does not touch any shared/external data.
Q_ASSERT(sampleBuffer);
- {
- const QMutexLocker lock(&m_writerMutex);
- if (!m_aborted && !m_stoppedInternal) {
- if (m_setStartTime)
- [self setStartTimeFrom:sampleBuffer];
-
- if (m_audioWriterInput.data().readyForMoreMediaData) {
- [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
- [m_audioWriterInput appendSampleBuffer:sampleBuffer];
- }
+ if (!m_aborted.load() && !m_stopped.load()) {
+ if (m_setStartTime)
+ [self setStartTimeFrom:sampleBuffer];
+
+ if (m_audioWriterInput.data().readyForMoreMediaData) {
+ [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
+ [m_audioWriterInput appendSampleBuffer:sampleBuffer];
}
}
@@ -289,13 +281,12 @@ AVFMediaAssetWriterDelegate::~AVFMediaAssetWriterDelegate()
{
Q_UNUSED(connection)
- // This method can be called on either video or audio queue, never on a writer's
- // queue - it does not access any shared data except this atomic flag below.
+ // This method can be called on either video or audio queue,
+ // never on a writer's queue, it needs access to a shared data, so
+ // lock is required.
if (m_stopped.load())
return;
- // Even if we are stopped now, we still do not access any data.
-
if (!CMSampleBufferDataIsReady(sampleBuffer)) {
qDebugCamera() << Q_FUNC_INFO << "sample buffer is not ready, skipping.";
return;
@@ -304,21 +295,18 @@ AVFMediaAssetWriterDelegate::~AVFMediaAssetWriterDelegate()
CFRetain(sampleBuffer);
if (captureOutput != m_audioOutput.data()) {
- {
- const QMutexLocker lock(&m_writerMutex);
- if (m_aborted || m_stoppedInternal) {
- CFRelease(sampleBuffer);
- return;
- }
-
- // Find renderercontrol's delegate and invoke its method to
- // show updated viewfinder's frame.
- if (m_service && m_service->videoOutput()) {
- NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *vfDelegate =
- (NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *)m_service->videoOutput()->captureDelegate();
- if (vfDelegate)
- [vfDelegate captureOutput:nil didOutputSampleBuffer:sampleBuffer fromConnection:nil];
- }
+ const QMutexLocker lock(&m_writerMutex);
+ if (m_aborted.load() || m_stopped.load()) {
+ CFRelease(sampleBuffer);
+ return;
+ }
+ // Find renderercontrol's delegate and invoke its method to
+ // show updated viewfinder's frame.
+ if (m_service && m_service->videoOutput()) {
+ NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *vfDelegate =
+ (NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *)m_service->videoOutput()->captureDelegate();
+ if (vfDelegate)
+ [vfDelegate captureOutput:nil didOutputSampleBuffer:sampleBuffer fromConnection:nil];
}
dispatch_async(m_writerQueue, ^{
diff --git a/src/plugins/avfoundation/camera/avfmediarecordercontrol_ios.h b/src/plugins/avfoundation/camera/avfmediarecordercontrol_ios.h
index 59af1d4bf..c3fe02c44 100644
--- a/src/plugins/avfoundation/camera/avfmediarecordercontrol_ios.h
+++ b/src/plugins/avfoundation/camera/avfmediarecordercontrol_ios.h
@@ -57,7 +57,7 @@ class AVFCameraService;
class QString;
class QUrl;
-class AVFMediaRecorderControlIOS : public QMediaRecorderControl, public AVFMediaAssetWriterDelegate
+class AVFMediaRecorderControlIOS : public QMediaRecorderControl
{
Q_OBJECT
public:
@@ -82,13 +82,10 @@ public Q_SLOTS:
void setMuted(bool muted) Q_DECL_OVERRIDE;
void setVolume(qreal volume) Q_DECL_OVERRIDE;
- // Writer delegate:
private:
- void assetWriterStarted() Q_DECL_OVERRIDE;
- void assetWriterFailedToStart() Q_DECL_OVERRIDE;
- void assetWriterFailedToStop() Q_DECL_OVERRIDE;
- void assetWriterFinished() Q_DECL_OVERRIDE;
+ Q_INVOKABLE void assetWriterStarted();
+ Q_INVOKABLE void assetWriterFinished();
private Q_SLOTS:
void captureModeChanged(QCamera::CaptureModes);
diff --git a/src/plugins/avfoundation/camera/avfmediarecordercontrol_ios.mm b/src/plugins/avfoundation/camera/avfmediarecordercontrol_ios.mm
index 470cc8034..72386eeda 100644
--- a/src/plugins/avfoundation/camera/avfmediarecordercontrol_ios.mm
+++ b/src/plugins/avfoundation/camera/avfmediarecordercontrol_ios.mm
@@ -92,8 +92,7 @@ AVFMediaRecorderControlIOS::AVFMediaRecorderControlIOS(AVFCameraService *service
return;
}
- m_writer.reset([[QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) alloc] initWithQueue:m_writerQueue
- delegate:this delegateQueue:dispatch_get_main_queue()]);
+ m_writer.reset([[QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) alloc] initWithQueue:m_writerQueue delegate:this]);
if (!m_writer) {
qDebugCamera() << Q_FUNC_INFO << "failed to create an asset writer";
return;
@@ -265,14 +264,6 @@ void AVFMediaRecorderControlIOS::assetWriterStarted()
Q_EMIT statusChanged(QMediaRecorder::RecordingStatus);
}
-void AVFMediaRecorderControlIOS::assetWriterFailedToStart()
-{
-}
-
-void AVFMediaRecorderControlIOS::assetWriterFailedToStop()
-{
-}
-
void AVFMediaRecorderControlIOS::assetWriterFinished()
{
AVFCameraControl *cameraControl = m_service->cameraControl();
diff --git a/src/plugins/coreaudio/coreaudioinput.mm b/src/plugins/coreaudio/coreaudioinput.mm
index 79e6742c2..f7d511d27 100644
--- a/src/plugins/coreaudio/coreaudioinput.mm
+++ b/src/plugins/coreaudio/coreaudioinput.mm
@@ -42,7 +42,7 @@
#include "coreaudioutils.h"
#if defined(Q_OS_OSX)
-# include <CoreServices/CoreServices.h>
+# include <AudioUnit/AudioComponent.h>
#endif
#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
@@ -486,31 +486,15 @@ bool CoreAudioInput::open()
if (m_isOpen)
return true;
-#if defined(Q_OS_OSX)
UInt32 size = 0;
- ComponentDescription componentDescription;
- componentDescription.componentType = kAudioUnitType_Output;
- componentDescription.componentSubType = kAudioUnitSubType_HALOutput;
- componentDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
- componentDescription.componentFlags = 0;
- componentDescription.componentFlagsMask = 0;
-
- // Open
- Component component = FindNextComponent(NULL, &componentDescription);
- if (component == 0) {
- qWarning() << "QAudioInput: Failed to find HAL Output component";
- return false;
- }
-
- if (OpenAComponent(component, &m_audioUnit) != noErr) {
- qWarning() << "QAudioInput: Unable to Open Output Component";
- return false;
- }
-#else //iOS
AudioComponentDescription componentDescription;
componentDescription.componentType = kAudioUnitType_Output;
+#if defined(Q_OS_OSX)
+ componentDescription.componentSubType = kAudioUnitSubType_HALOutput;
+#else
componentDescription.componentSubType = kAudioUnitSubType_RemoteIO;
+#endif
componentDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
componentDescription.componentFlags = 0;
componentDescription.componentFlagsMask = 0;
@@ -525,7 +509,7 @@ bool CoreAudioInput::open()
qWarning() << "QAudioInput: Unable to Open Output Component";
return false;
}
-#endif
+
// Set mode
// switch to input mode
UInt32 enable = 1;
@@ -693,12 +677,7 @@ void CoreAudioInput::close()
if (m_audioUnit != 0) {
AudioOutputUnitStop(m_audioUnit);
AudioUnitUninitialize(m_audioUnit);
-#if defined(Q_OS_OSX)
- CloseComponent(m_audioUnit);
-#else //iOS
AudioComponentInstanceDispose(m_audioUnit);
-#endif
-
}
delete m_audioBuffer;
diff --git a/src/plugins/coreaudio/coreaudiooutput.mm b/src/plugins/coreaudio/coreaudiooutput.mm
index dfac44c2d..900e34e16 100644
--- a/src/plugins/coreaudio/coreaudiooutput.mm
+++ b/src/plugins/coreaudio/coreaudiooutput.mm
@@ -48,7 +48,7 @@
#include <AudioUnit/AudioUnit.h>
#include <AudioToolbox/AudioToolbox.h>
#if defined(Q_OS_OSX)
-# include <CoreServices/CoreServices.h>
+# include <AudioUnit/AudioComponent.h>
#endif
#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
@@ -549,30 +549,13 @@ bool CoreAudioOutput::open()
if (m_isOpen)
return true;
-#if defined(Q_OS_OSX)
- ComponentDescription componentDescription;
- componentDescription.componentType = kAudioUnitType_Output;
- componentDescription.componentSubType = kAudioUnitSubType_HALOutput;
- componentDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
- componentDescription.componentFlags = 0;
- componentDescription.componentFlagsMask = 0;
-
- // Open
- Component component = FindNextComponent(NULL, &componentDescription);
- if (component == 0) {
- qWarning() << "QAudioOutput: Failed to find HAL Output component";
- return false;
- }
-
- if (OpenAComponent(component, &m_audioUnit) != noErr) {
- qWarning() << "QAudioOutput: Unable to Open Output Component";
- return false;
- }
-#else //iOS
-
AudioComponentDescription componentDescription;
componentDescription.componentType = kAudioUnitType_Output;
+#if defined(Q_OS_OSX)
+ componentDescription.componentSubType = kAudioUnitSubType_HALOutput;
+#else
componentDescription.componentSubType = kAudioUnitSubType_RemoteIO;
+#endif
componentDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
componentDescription.componentFlags = 0;
componentDescription.componentFlagsMask = 0;
@@ -587,7 +570,6 @@ bool CoreAudioOutput::open()
qWarning() << "QAudioOutput: Unable to Open Output Component";
return false;
}
-#endif
// register callback
AURenderCallbackStruct callback;
@@ -679,11 +661,7 @@ void CoreAudioOutput::close()
if (m_audioUnit != 0) {
AudioOutputUnitStop(m_audioUnit);
AudioUnitUninitialize(m_audioUnit);
-#if defined(Q_OS_OSX)
- CloseComponent(m_audioUnit);
-#else //iOS
AudioComponentInstanceDispose(m_audioUnit);
-#endif
}
delete m_audioBuffer;
diff --git a/src/plugins/directshow/player/directshowplayerservice.cpp b/src/plugins/directshow/player/directshowplayerservice.cpp
index 05ea3e68f..fa5ce3d55 100644
--- a/src/plugins/directshow/player/directshowplayerservice.cpp
+++ b/src/plugins/directshow/player/directshowplayerservice.cpp
@@ -69,6 +69,7 @@
#include <QtCore/qcoreapplication.h>
#include <QtCore/qdatetime.h>
+#include <QtCore/qdir.h>
#include <QtCore/qthread.h>
#include <QtCore/qvarlengtharray.h>
@@ -353,8 +354,10 @@ void DirectShowPlayerService::doSetUrlSource(QMutexLocker *locker)
if (!SUCCEEDED(hr)) {
locker->unlock();
+ const QString urlString = m_url.isLocalFile()
+ ? QDir::toNativeSeparators(m_url.toLocalFile()) : m_url.toString();
hr = m_graph->AddSourceFilter(
- reinterpret_cast<const OLECHAR *>(m_url.toString().utf16()), L"Source", &source);
+ reinterpret_cast<const OLECHAR *>(urlString.utf16()), L"Source", &source);
locker->relock();
}
diff --git a/src/plugins/gstreamer/camerabin/camerabinlocks.cpp b/src/plugins/gstreamer/camerabin/camerabinlocks.cpp
index 9aac3ef5a..2ccc1b0bc 100644
--- a/src/plugins/gstreamer/camerabin/camerabinlocks.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinlocks.cpp
@@ -71,9 +71,12 @@ QCamera::LockTypes CameraBinLocks::supportedLocks() const
if (GstPhotography *photography = m_session->photography()) {
if (gst_photography_get_capabilities(photography) & GST_PHOTOGRAPHY_CAPS_WB_MODE)
locks |= QCamera::LockWhiteBalance;
- if (g_object_class_find_property(
- G_OBJECT_GET_CLASS(m_session->cameraSource()), "exposure-mode")) {
- locks |= QCamera::LockExposure;
+
+ if (GstElement *source = m_session->cameraSource()) {
+ if (g_object_class_find_property(
+ G_OBJECT_GET_CLASS(source), "exposure-mode")) {
+ locks |= QCamera::LockExposure;
+ }
}
}
#endif
@@ -201,9 +204,13 @@ bool CameraBinLocks::isExposureLocked() const
void CameraBinLocks::lockExposure(QCamera::LockChangeReason reason)
{
+ GstElement *source = m_session->cameraSource();
+ if (!source)
+ return;
+
m_pendingLocks &= ~QCamera::LockExposure;
g_object_set(
- G_OBJECT(m_session->cameraSource()),
+ G_OBJECT(source),
"exposure-mode",
GST_PHOTOGRAPHY_EXPOSURE_MODE_MANUAL,
NULL);
@@ -212,8 +219,12 @@ void CameraBinLocks::lockExposure(QCamera::LockChangeReason reason)
void CameraBinLocks::unlockExposure(QCamera::LockStatus status, QCamera::LockChangeReason reason)
{
+ GstElement *source = m_session->cameraSource();
+ if (!source)
+ return;
+
g_object_set(
- G_OBJECT(m_session->cameraSource()),
+ G_OBJECT(source),
"exposure-mode",
GST_PHOTOGRAPHY_EXPOSURE_MODE_AUTO,
NULL);
diff --git a/src/plugins/gstreamer/camerabin/camerabinsession.cpp b/src/plugins/gstreamer/camerabin/camerabinsession.cpp
index 3e5182ff2..3cb4c6161 100644
--- a/src/plugins/gstreamer/camerabin/camerabinsession.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinsession.cpp
@@ -520,7 +520,8 @@ GstElement *CameraBinSession::buildCameraSource()
if (!m_videoSrc)
m_videoSrc = gst_element_factory_make("v4l2src", "camera_source");
- g_object_set(G_OBJECT(m_cameraSrc), "video-source", m_videoSrc, NULL);
+ if (m_videoSrc)
+ g_object_set(G_OBJECT(m_cameraSrc), "video-source", m_videoSrc, NULL);
}
if (m_videoSrc)
diff --git a/src/plugins/gstreamer/camerabin/camerabinsession.h b/src/plugins/gstreamer/camerabin/camerabinsession.h
index e8f41f4db..44faaf701 100644
--- a/src/plugins/gstreamer/camerabin/camerabinsession.h
+++ b/src/plugins/gstreamer/camerabin/camerabinsession.h
@@ -97,7 +97,7 @@ public:
GstPhotography *photography();
#endif
GstElement *cameraBin() { return m_camerabin; }
- GstElement *cameraSource() { return m_videoSrc; }
+ GstElement *cameraSource() { return m_cameraSrc; }
QGstreamerBusHelper *bus() { return m_busHelper; }
QList< QPair<int,int> > supportedFrameRates(const QSize &frameSize, bool *continuous) const;
diff --git a/src/plugins/qnx/common/windowgrabber.cpp b/src/plugins/qnx/common/windowgrabber.cpp
index ce5b45298..ca7224a4f 100644
--- a/src/plugins/qnx/common/windowgrabber.cpp
+++ b/src/plugins/qnx/common/windowgrabber.cpp
@@ -43,47 +43,50 @@
#include <QDebug>
#include <QGuiApplication>
#include <QImage>
+#include <QThread>
#include <qpa/qplatformnativeinterface.h>
#include <QOpenGLContext>
#include <QOpenGLFunctions>
-#ifdef Q_OS_BLACKBERRY
-#include <bps/event.h>
-#include <bps/screen.h>
-#endif
#include <errno.h>
QT_BEGIN_NAMESPACE
+static PFNEGLCREATEIMAGEKHRPROC s_eglCreateImageKHR;
+static PFNEGLDESTROYIMAGEKHRPROC s_eglDestroyImageKHR;
+
WindowGrabber::WindowGrabber(QObject *parent)
: QObject(parent),
- m_screenBufferWidth(-1),
- m_screenBufferHeight(-1),
+ m_screenContext(0),
m_active(false),
- m_screenContextInitialized(false),
- m_screenPixmapBuffersInitialized(false),
m_currentFrame(0),
m_eglImageSupported(false),
- m_eglImagesInitialized(false),
m_eglImageCheck(false)
{
// grab the window frame with 60 frames per second
m_timer.setInterval(1000/60);
- connect(&m_timer, SIGNAL(timeout()), SLOT(grab()));
+ connect(&m_timer, SIGNAL(timeout()), SLOT(triggerUpdate()));
QCoreApplication::eventDispatcher()->installNativeEventFilter(this);
+
+ for ( int i = 0; i < 2; ++i )
+ m_images[i] = 0;
+
+ // Use of EGL images can be disabled by setting QQNX_MM_DISABLE_EGLIMAGE_SUPPORT to something
+ // non-zero. This is probably useful only to test that this path still works since it results
+ // in a high CPU load.
+ if (!s_eglCreateImageKHR && qgetenv("QQNX_MM_DISABLE_EGLIMAGE_SUPPORT").toInt() == 0) {
+ s_eglCreateImageKHR = reinterpret_cast<PFNEGLCREATEIMAGEKHRPROC>(eglGetProcAddress("eglCreateImageKHR"));
+ s_eglDestroyImageKHR = reinterpret_cast<PFNEGLDESTROYIMAGEKHRPROC>(eglGetProcAddress("eglDestroyImageKHR"));
+ }
}
WindowGrabber::~WindowGrabber()
{
QCoreApplication::eventDispatcher()->removeNativeEventFilter(this);
- if (eglImagesInitialized()) {
- glDeleteTextures(2, imgTextures);
- eglDestroyImageKHR(eglGetDisplay(EGL_DEFAULT_DISPLAY), img[0]);
- eglDestroyImageKHR(eglGetDisplay(EGL_DEFAULT_DISPLAY), img[1]);
- }
+ cleanup();
}
void WindowGrabber::setFrameRate(int frameRate)
@@ -91,36 +94,6 @@ void WindowGrabber::setFrameRate(int frameRate)
m_timer.setInterval(1000/frameRate);
}
-void WindowGrabber::createEglImages()
-{
- // Do nothing if either egl images are not supported, the screen context is not valid
- // or the images are already created
- if (!eglImageSupported() || !m_screenContextInitialized || eglImagesInitialized())
- return;
-
- glGenTextures(2, imgTextures);
- glBindTexture(GL_TEXTURE_2D, imgTextures[0]);
- img[0] = eglCreateImageKHR(eglGetDisplay(EGL_DEFAULT_DISPLAY), EGL_NO_CONTEXT,
- EGL_NATIVE_PIXMAP_KHR,
- m_screenPixmaps[0],
- 0);
- glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, img[0]);
-
- glBindTexture(GL_TEXTURE_2D, imgTextures[1]);
- img[1] = eglCreateImageKHR(eglGetDisplay(EGL_DEFAULT_DISPLAY), EGL_NO_CONTEXT,
- EGL_NATIVE_PIXMAP_KHR,
- m_screenPixmaps[1],
- 0);
-
- glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, img[1]);
-
- if (img[0] == 0 || img[1] == 0) {
- qWarning() << "Failed to create KHR images" << img[0] << img[1] << strerror(errno) << errno;
- m_eglImageSupported = false;
- } else {
- m_eglImagesInitialized = true;
- }
-}
void WindowGrabber::setWindowId(const QByteArray &windowId)
{
@@ -132,107 +105,14 @@ void WindowGrabber::start()
if (m_active)
return;
- int result = 0;
-
- result = screen_create_context(&m_screenContext, SCREEN_APPLICATION_CONTEXT);
- if (result != 0) {
- qWarning() << "WindowGrabber: cannot create screen context:" << strerror(errno);
- return;
- } else {
- m_screenContextInitialized = true;
- }
-
- result = screen_create_pixmap(&m_screenPixmaps[0], m_screenContext);
- result = screen_create_pixmap(&m_screenPixmaps[1], m_screenContext);
- if (result != 0) {
- cleanup();
- qWarning() << "WindowGrabber: cannot create pixmaps:" << strerror(errno);
- return;
- }
-
- const int usage = SCREEN_USAGE_NATIVE;
- result = screen_set_pixmap_property_iv(m_screenPixmaps[0], SCREEN_PROPERTY_USAGE, &usage);
- result |= screen_set_pixmap_property_iv(m_screenPixmaps[1], SCREEN_PROPERTY_USAGE, &usage);
-
- const int format = SCREEN_FORMAT_RGBX8888;
- screen_set_pixmap_property_iv(m_screenPixmaps[0], SCREEN_PROPERTY_FORMAT, &format);
- screen_set_pixmap_property_iv(m_screenPixmaps[1], SCREEN_PROPERTY_FORMAT, &format);
-
- int size[2] = { 0, 0 };
-
- result = screen_get_window_property_iv(m_window, SCREEN_PROPERTY_SOURCE_SIZE, size);
- if (result != 0) {
- cleanup();
- qWarning() << "WindowGrabber: cannot get window size:" << strerror(errno);
- return;
- }
-
- m_screenBufferWidth = size[0];
- m_screenBufferHeight = size[1];
-
- updateFrameSize();
+ if (!m_screenContext)
+ screen_get_window_property_pv(m_window, SCREEN_PROPERTY_CONTEXT, reinterpret_cast<void**>(&m_screenContext));
m_timer.start();
m_active = true;
}
-void WindowGrabber::updateFrameSize()
-{
- int size[2] = { m_screenBufferWidth, m_screenBufferHeight };
-
- screen_set_pixmap_property_iv(m_screenPixmaps[0], SCREEN_PROPERTY_BUFFER_SIZE, size);
- if (eglImageSupported())
- screen_set_pixmap_property_iv(m_screenPixmaps[1], SCREEN_PROPERTY_BUFFER_SIZE, size);
-
- int result = screen_create_pixmap_buffer(m_screenPixmaps[0]);
- if (eglImageSupported())
- result |= screen_create_pixmap_buffer(m_screenPixmaps[1]);
-
- if (result != 0) {
- cleanup();
- qWarning() << "WindowGrabber: cannot create pixmap buffer:" << strerror(errno);
- return;
- } else {
- m_screenPixmapBuffersInitialized = true;
- }
-
- result = screen_get_pixmap_property_pv(m_screenPixmaps[0], SCREEN_PROPERTY_RENDER_BUFFERS,
- (void**)&m_screenPixmapBuffers[0]);
- if (eglImageSupported()) {
- result |= screen_get_pixmap_property_pv(m_screenPixmaps[1], SCREEN_PROPERTY_RENDER_BUFFERS,
- (void**)&m_screenPixmapBuffers[1]);
- }
-
- if (result != 0) {
- cleanup();
- qWarning() << "WindowGrabber: cannot get pixmap buffer:" << strerror(errno);
- return;
- }
-
- result = screen_get_buffer_property_pv(m_screenPixmapBuffers[0], SCREEN_PROPERTY_POINTER,
- (void**)&m_screenBuffers[0]);
- if (eglImageSupported()) {
- result |= screen_get_buffer_property_pv(m_screenPixmapBuffers[1], SCREEN_PROPERTY_POINTER,
- (void**)&m_screenBuffers[1]);
- }
-
- if (result != 0) {
- cleanup();
- qWarning() << "WindowGrabber: cannot get pixmap buffer pointer:" << strerror(errno);
- return;
- }
-
- result = screen_get_buffer_property_iv(m_screenPixmapBuffers[0], SCREEN_PROPERTY_STRIDE,
- &m_screenBufferStride);
-
- if (result != 0) {
- cleanup();
- qWarning() << "WindowGrabber: cannot get pixmap buffer stride:" << strerror(errno);
- return;
- }
-}
-
void WindowGrabber::stop()
{
if (!m_active)
@@ -294,20 +174,10 @@ bool WindowGrabber::handleScreenEvent(screen_event_t screen_event)
bool WindowGrabber::nativeEventFilter(const QByteArray &eventType, void *message, long*)
{
-#ifdef Q_OS_BLACKBERRY
- Q_UNUSED(eventType)
- bps_event_t * const event = static_cast<bps_event_t *>(message);
-
- if (event && bps_event_get_domain(event) == screen_get_domain()) {
- const screen_event_t screen_event = screen_event_get_event(event);
- return handleScreenEvent(screen_event);
- }
-#else
if (eventType == "screen_event_t") {
const screen_event_t event = static_cast<screen_event_t>(message);
return handleScreenEvent(event);
}
-#endif
return false;
}
@@ -348,7 +218,8 @@ void WindowGrabber::checkForEglImageExtension()
QByteArray eglExtensions = QByteArray(eglQueryString(eglGetDisplay(EGL_DEFAULT_DISPLAY),
EGL_EXTENSIONS));
m_eglImageSupported = m_context->hasExtension(QByteArrayLiteral("GL_OES_EGL_image"))
- && eglExtensions.contains(QByteArrayLiteral("EGL_KHR_image"));
+ && eglExtensions.contains(QByteArrayLiteral("EGL_KHR_image"))
+ && s_eglCreateImageKHR && s_eglDestroyImageKHR;
if (strstr(reinterpret_cast<const char*>(glGetString(GL_VENDOR)), "VMware"))
m_eglImageSupported = false;
@@ -356,21 +227,11 @@ void WindowGrabber::checkForEglImageExtension()
m_eglImageCheck = true;
}
-bool WindowGrabber::eglImagesInitialized()
-{
- return m_eglImagesInitialized;
-}
-
-void WindowGrabber::grab()
+void WindowGrabber::triggerUpdate()
{
if (!m_eglImageCheck) // We did not check for egl images yet
return;
- if (eglImageSupported())
- m_currentFrame = (m_currentFrame + 1) % 2;
- else
- m_currentFrame = 0;
-
int size[2] = { 0, 0 };
int result = screen_get_window_property_iv(m_window, SCREEN_PROPERTY_SOURCE_SIZE, size);
@@ -380,40 +241,173 @@ void WindowGrabber::grab()
return;
}
- if (m_screenBufferWidth != size[0] || m_screenBufferHeight != size[1]) {
- // The source viewport size changed, so we have to adapt our buffers
+ if (m_size.width() != size[0] || m_size.height() != size[1])
+ m_size = QSize(size[0], size[1]);
- if (m_screenPixmapBuffersInitialized) {
- screen_destroy_pixmap_buffer(m_screenPixmaps[0]);
- if (eglImageSupported())
- screen_destroy_pixmap_buffer(m_screenPixmaps[1]);
- }
+ emit updateScene(m_size);
+}
- m_screenBufferWidth = size[0];
- m_screenBufferHeight = size[1];
+bool WindowGrabber::selectBuffer()
+{
+ // If we're using egl images we need to double buffer since the gpu may still be using the last
+ // video frame. If we're not, it doesn't matter since the data is immediately copied.
+ if (eglImageSupported())
+ m_currentFrame = (m_currentFrame + 1) % 2;
- updateFrameSize();
- m_eglImagesInitialized = false;
+ if (!m_images[m_currentFrame]) {
+ m_images[m_currentFrame] = new WindowGrabberImage();
+ if (!m_images[m_currentFrame]->initialize(m_screenContext)) {
+ delete m_images[m_currentFrame];
+ m_images[m_currentFrame] = 0;
+ return false;
+ }
}
+ return true;
+}
- const int rect[] = { 0, 0, m_screenBufferWidth, m_screenBufferHeight };
- result = screen_read_window(m_window, m_screenPixmapBuffers[m_currentFrame], 1, rect, 0);
- if (result != 0)
- return;
+int WindowGrabber::getNextTextureId()
+{
+ if (!selectBuffer())
+ return 0;
+ return m_images[m_currentFrame]->getTexture(m_window, m_size);
+}
- const QImage frame((unsigned char*)m_screenBuffers[m_currentFrame], m_screenBufferWidth,
- m_screenBufferHeight, m_screenBufferStride, QImage::Format_ARGB32);
+QImage WindowGrabber::getNextImage()
+{
+ if (!selectBuffer())
+ return QImage();
- emit frameGrabbed(frame, imgTextures[m_currentFrame]);
+ return m_images[m_currentFrame]->getImage(m_window, m_size);
}
void WindowGrabber::cleanup()
{
- //We only need to destroy the context as it frees all resources associated with it
- if (m_screenContextInitialized) {
- screen_destroy_context(m_screenContext);
- m_screenContextInitialized = false;
+ for ( int i = 0; i < 2; ++i ) {
+ if (m_images[i]) {
+ m_images[i]->destroy();
+ m_images[i] = 0;
+ }
+ }
+}
+
+
+WindowGrabberImage::WindowGrabberImage()
+ : m_pixmap(0), m_pixmapBuffer(0), m_eglImage(0), m_glTexture(0), m_bufferAddress(0), m_bufferStride(0)
+{
+}
+
+WindowGrabberImage::~WindowGrabberImage()
+{
+ if (m_glTexture)
+ glDeleteTextures(1, &m_glTexture);
+ if (m_eglImage)
+ s_eglDestroyImageKHR(eglGetDisplay(EGL_DEFAULT_DISPLAY), m_eglImage);
+ if (m_pixmap)
+ screen_destroy_pixmap(m_pixmap);
+}
+
+bool
+WindowGrabberImage::initialize(screen_context_t screenContext)
+{
+ if (screen_create_pixmap(&m_pixmap, screenContext) != 0) {
+ qWarning() << "WindowGrabber: cannot create pixmap:" << strerror(errno);
+ return false;
+ }
+ const int usage = SCREEN_USAGE_WRITE | SCREEN_USAGE_READ | SCREEN_USAGE_NATIVE;
+ screen_set_pixmap_property_iv(m_pixmap, SCREEN_PROPERTY_USAGE, &usage);
+
+ const int format = SCREEN_FORMAT_RGBX8888;
+ screen_set_pixmap_property_iv(m_pixmap, SCREEN_PROPERTY_FORMAT, &format);
+
+ return true;
+}
+
+void
+WindowGrabberImage::destroy()
+{
+ // We want to delete in the thread we were created in since we need the thread that
+ // has called eglMakeCurrent on the right EGL context. This doesn't actually guarantee
+ // this but that would be hard to achieve and in practice it works.
+ if (QThread::currentThread() == thread())
+ delete this;
+ else
+ deleteLater();
+}
+
+bool
+WindowGrabberImage::resize(const QSize &newSize)
+{
+ if (m_pixmapBuffer) {
+ screen_destroy_pixmap_buffer(m_pixmap);
+ m_pixmapBuffer = 0;
+ m_bufferAddress = 0;
+ m_bufferStride = 0;
+ }
+
+ int size[2] = { newSize.width(), newSize.height() };
+
+ screen_set_pixmap_property_iv(m_pixmap, SCREEN_PROPERTY_BUFFER_SIZE, size);
+
+ if (screen_create_pixmap_buffer(m_pixmap) == 0) {
+ screen_get_pixmap_property_pv(m_pixmap, SCREEN_PROPERTY_RENDER_BUFFERS,
+ reinterpret_cast<void**>(&m_pixmapBuffer));
+ screen_get_buffer_property_pv(m_pixmapBuffer, SCREEN_PROPERTY_POINTER,
+ reinterpret_cast<void**>(&m_bufferAddress));
+ screen_get_buffer_property_iv(m_pixmapBuffer, SCREEN_PROPERTY_STRIDE, &m_bufferStride);
+ m_size = newSize;
+
+ return true;
+ } else {
+ m_size = QSize();
+ return false;
}
}
+bool
+WindowGrabberImage::grab(screen_window_t window)
+{
+ const int rect[] = { 0, 0, m_size.width(), m_size.height() };
+ return screen_read_window(window, m_pixmapBuffer, 1, rect, 0) == 0;
+}
+
+QImage
+WindowGrabberImage::getImage(screen_window_t window, const QSize &size)
+{
+ if (size != m_size) {
+ if (!resize(size))
+ return QImage();
+ }
+ if (!m_bufferAddress || !grab(window))
+ return QImage();
+
+ return QImage(m_bufferAddress, m_size.width(), m_size.height(), m_bufferStride, QImage::Format_ARGB32);
+}
+
+GLuint
+WindowGrabberImage::getTexture(screen_window_t window, const QSize &size)
+{
+ if (size != m_size) {
+ if (!m_glTexture)
+ glGenTextures(1, &m_glTexture);
+ glBindTexture(GL_TEXTURE_2D, m_glTexture);
+ if (m_eglImage) {
+ glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, 0);
+ s_eglDestroyImageKHR(eglGetDisplay(EGL_DEFAULT_DISPLAY), m_eglImage);
+ m_eglImage = 0;
+ }
+ if (!resize(size))
+ return 0;
+ m_eglImage = s_eglCreateImageKHR(eglGetDisplay(EGL_DEFAULT_DISPLAY), EGL_NO_CONTEXT,
+ EGL_NATIVE_PIXMAP_KHR, m_pixmap, 0);
+ glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, m_eglImage);
+ }
+
+ if (!m_pixmap || !grab(window))
+ return 0;
+
+ return m_glTexture;
+}
+
+
+
QT_END_NAMESPACE
diff --git a/src/plugins/qnx/common/windowgrabber.h b/src/plugins/qnx/common/windowgrabber.h
index 424fa35fe..b2d43a27b 100644
--- a/src/plugins/qnx/common/windowgrabber.h
+++ b/src/plugins/qnx/common/windowgrabber.h
@@ -47,12 +47,41 @@
#include <EGL/eglext.h>
#include <QAbstractNativeEventFilter>
#include <QObject>
+#include <QSize>
#include <QTimer>
#include <screen/screen.h>
QT_BEGIN_NAMESPACE
+class WindowGrabberImage : public QObject
+{
+ Q_OBJECT
+
+public:
+ WindowGrabberImage();
+ ~WindowGrabberImage();
+
+ bool initialize(screen_context_t screenContext);
+
+ void destroy();
+
+ QImage getImage(screen_window_t window, const QSize &size);
+ GLuint getTexture(screen_window_t window, const QSize &size);
+
+private:
+ bool grab(screen_window_t window);
+ bool resize(const QSize &size);
+
+ QSize m_size;
+ screen_pixmap_t m_pixmap;
+ screen_buffer_t m_pixmapBuffer;
+ EGLImageKHR m_eglImage;
+ GLuint m_glTexture;
+ unsigned char *m_bufferAddress;
+ int m_bufferStride;
+};
+
class WindowGrabber : public QObject, public QAbstractNativeEventFilter
{
Q_OBJECT
@@ -63,8 +92,6 @@ public:
void setFrameRate(int frameRate);
- void createEglImages();
-
void setWindowId(const QByteArray &windowId);
void start();
@@ -81,17 +108,19 @@ public:
bool eglImageSupported();
void checkForEglImageExtension();
- bool eglImagesInitialized();
+
+ int getNextTextureId();
+ QImage getNextImage();
signals:
- void frameGrabbed(const QImage &frame, int);
+ void updateScene(const QSize &size);
private slots:
- void grab();
+ void triggerUpdate();
private:
+ bool selectBuffer();
void cleanup();
- void updateFrameSize();
QTimer m_timer;
@@ -99,24 +128,14 @@ private:
screen_window_t m_window;
screen_context_t m_screenContext;
- screen_pixmap_t m_screenPixmaps[2];
- screen_buffer_t m_screenPixmapBuffers[2];
-
- char *m_screenBuffers[2];
- int m_screenBufferWidth;
- int m_screenBufferHeight;
- int m_screenBufferStride;
+ WindowGrabberImage *m_images[2];
+ QSize m_size;
- bool m_active : 1;
- bool m_screenContextInitialized : 1;
- bool m_screenPixmapBuffersInitialized : 1;
+ bool m_active;
int m_currentFrame;
- EGLImageKHR img[2];
- GLuint imgTextures[2];
- bool m_eglImageSupported : 1;
- bool m_eglImagesInitialized : 1;
- bool m_eglImageCheck : 1; // We must not send a grabed frame before this is true
+ bool m_eglImageSupported;
+ bool m_eglImageCheck; // We must not send a grabed frame before this is true
};
QT_END_NAMESPACE
diff --git a/src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp b/src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp
index 7bb3c284d..3b5715157 100644
--- a/src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp
+++ b/src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.cpp
@@ -58,7 +58,7 @@ MmRendererPlayerVideoRendererControl::MmRendererPlayerVideoRendererControl(QObje
, m_context(0)
, m_videoId(-1)
{
- connect(m_windowGrabber, SIGNAL(frameGrabbed(QImage, int)), SLOT(frameGrabbed(QImage, int)));
+ connect(m_windowGrabber, SIGNAL(updateScene(const QSize &)), SLOT(updateScene(const QSize &)));
}
MmRendererPlayerVideoRendererControl::~MmRendererPlayerVideoRendererControl()
@@ -142,13 +142,14 @@ void MmRendererPlayerVideoRendererControl::resume()
m_windowGrabber->resume();
}
-class BBTextureBuffer : public QAbstractVideoBuffer
+class QnxTextureBuffer : public QAbstractVideoBuffer
{
public:
- BBTextureBuffer(int handle) :
+ QnxTextureBuffer(WindowGrabber *windowGrabber) :
QAbstractVideoBuffer(QAbstractVideoBuffer::GLTextureHandle)
{
- m_handle = handle;
+ m_windowGrabber = windowGrabber;
+ m_handle = 0;
}
MapMode mapMode() const {
return QAbstractVideoBuffer::ReadWrite;
@@ -163,54 +164,46 @@ public:
return 0;
}
QVariant handle() const {
+ if (!m_handle) {
+ const_cast<QnxTextureBuffer*>(this)->m_handle = m_windowGrabber->getNextTextureId();
+ }
return m_handle;
}
private:
+ WindowGrabber *m_windowGrabber;
int m_handle;
};
-void MmRendererPlayerVideoRendererControl::frameGrabbed(const QImage &frame, int handle)
+void MmRendererPlayerVideoRendererControl::updateScene(const QSize &size)
{
if (m_surface) {
if (!m_surface->isActive()) {
if (m_windowGrabber->eglImageSupported()) {
- if (QOpenGLContext::currentContext())
- m_windowGrabber->createEglImages();
- else
- m_surface->setProperty("_q_GLThreadCallback", QVariant::fromValue<QObject*>(this));
-
- m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoFrame::Format_BGR32,
- QAbstractVideoBuffer::GLTextureHandle));
+ m_surface->start(QVideoSurfaceFormat(size, QVideoFrame::Format_BGR32,
+ QAbstractVideoBuffer::GLTextureHandle));
} else {
- m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoFrame::Format_ARGB32));
+ m_surface->start(QVideoSurfaceFormat(size, QVideoFrame::Format_ARGB32));
}
} else {
- if (m_surface->surfaceFormat().frameSize() != frame.size()) {
- QAbstractVideoBuffer::HandleType type = m_surface->surfaceFormat().handleType();
+ if (m_surface->surfaceFormat().frameSize() != size) {
m_surface->stop();
- if (type != QAbstractVideoBuffer::NoHandle) {
- m_surface->setProperty("_q_GLThreadCallback", QVariant::fromValue<QObject*>(this));
- m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoFrame::Format_BGR32,
- QAbstractVideoBuffer::GLTextureHandle));
+ if (m_windowGrabber->eglImageSupported()) {
+ m_surface->start(QVideoSurfaceFormat(size, QVideoFrame::Format_BGR32,
+ QAbstractVideoBuffer::GLTextureHandle));
} else {
- m_surface->start(QVideoSurfaceFormat(frame.size(), QVideoFrame::Format_ARGB32));
+ m_surface->start(QVideoSurfaceFormat(size, QVideoFrame::Format_ARGB32));
}
}
}
// Depending on the support of EGL images on the current platform we either pass a texture
// handle or a copy of the image data
- if (m_surface->surfaceFormat().handleType() != QAbstractVideoBuffer::NoHandle) {
- if (m_windowGrabber->eglImagesInitialized() &&
- m_surface->property("_q_GLThreadCallback") != 0)
- m_surface->setProperty("_q_GLThreadCallback", 0);
-
-
- BBTextureBuffer *textBuffer = new BBTextureBuffer(handle);
- QVideoFrame actualFrame(textBuffer, frame.size(), QVideoFrame::Format_BGR32);
+ if (m_windowGrabber->eglImageSupported()) {
+ QnxTextureBuffer *textBuffer = new QnxTextureBuffer(m_windowGrabber);
+ QVideoFrame actualFrame(textBuffer, size, QVideoFrame::Format_BGR32);
m_surface->present(actualFrame);
} else {
- m_surface->present(frame.copy());
+ m_surface->present(m_windowGrabber->getNextImage().copy());
}
}
}
@@ -220,8 +213,6 @@ void MmRendererPlayerVideoRendererControl::customEvent(QEvent *e)
// This is running in the render thread (OpenGL enabled)
if (e->type() == QEvent::User)
m_windowGrabber->checkForEglImageExtension();
- else if (e->type() == QEvent::User + 1)
- m_windowGrabber->createEglImages();
}
QT_END_NAMESPACE
diff --git a/src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.h b/src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.h
index 2d1559260..878aa4bb0 100644
--- a/src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.h
+++ b/src/plugins/qnx/mediaplayer/mmrendererplayervideorenderercontrol.h
@@ -68,7 +68,7 @@ public:
void customEvent(QEvent *) Q_DECL_OVERRIDE;
private Q_SLOTS:
- void frameGrabbed(const QImage &frame, int);
+ void updateScene(const QSize &size);
private:
QPointer<QAbstractVideoSurface> m_surface;
diff --git a/src/plugins/v4l/radio/v4lradiocontrol.h b/src/plugins/v4l/radio/v4lradiocontrol.h
index c19fc9c9c..ce68ebeec 100644
--- a/src/plugins/v4l/radio/v4lradiocontrol.h
+++ b/src/plugins/v4l/radio/v4lradiocontrol.h
@@ -46,7 +46,11 @@
#include <qradiotunercontrol.h>
+#if defined(Q_OS_FREEBSD)
+#include <sys/types.h>
+#else
#include <linux/types.h>
+#endif
#include <sys/time.h>
#include <sys/ioctl.h>
#include <linux/videodev2.h>
diff --git a/src/plugins/winrt/qwinrtabstractvideorenderercontrol.h b/src/plugins/winrt/qwinrtabstractvideorenderercontrol.h
index 70227c53c..bfef55fc7 100644
--- a/src/plugins/winrt/qwinrtabstractvideorenderercontrol.h
+++ b/src/plugins/winrt/qwinrtabstractvideorenderercontrol.h
@@ -68,8 +68,6 @@ public:
void setScanLineDirection(QVideoSurfaceFormat::Direction direction);
- void setActive(bool active);
-
BlitMode blitMode() const;
void setBlitMode(BlitMode mode);
@@ -78,6 +76,9 @@ public:
static ID3D11Device *d3dDevice();
+public slots:
+ void setActive(bool active);
+
protected:
void shutdown();
diff --git a/src/plugins/winrt/qwinrtcameracontrol.cpp b/src/plugins/winrt/qwinrtcameracontrol.cpp
index d209d9394..40946270a 100644
--- a/src/plugins/winrt/qwinrtcameracontrol.cpp
+++ b/src/plugins/winrt/qwinrtcameracontrol.cpp
@@ -45,7 +45,9 @@
#include <QtCore/qfunctions_winrt.h>
#include <QtCore/QPointer>
#include <QtGui/QGuiApplication>
+#include <private/qeventdispatcher_winrt_p.h>
+#include <functional>
#include <mfapi.h>
#include <mferror.h>
#include <mfidl.h>
@@ -81,6 +83,8 @@ QT_BEGIN_NAMESPACE
#define FOCUS_RECT_POSITION_MAX 0.995f // FOCUS_RECT_BOUNDARY - FOCUS_RECT_HALF_SIZE
#define ASPECTRATIO_EPSILON 0.01f
+Q_LOGGING_CATEGORY(lcMMCamera, "qt.mm.camera")
+
HRESULT getMediaStreamResolutions(IMediaDeviceController *device,
MediaStreamType type,
IVectorView<IMediaEncodingProperties *> **propertiesList,
@@ -444,7 +448,7 @@ public:
HRESULT __stdcall Shutdown() Q_DECL_OVERRIDE
{
m_stream->Flush();
- m_videoRenderer->setActive(false);
+ scheduleSetActive(false);
return m_presentationClock ? m_presentationClock->Stop() : S_OK;
}
@@ -453,7 +457,7 @@ public:
Q_UNUSED(systemTime);
Q_UNUSED(clockStartOffset);
- m_videoRenderer->setActive(true);
+ scheduleSetActive(true);
return S_OK;
}
@@ -462,7 +466,7 @@ public:
{
Q_UNUSED(systemTime);
- m_videoRenderer->setActive(false);
+ scheduleSetActive(false);
return m_stream->QueueEvent(MEStreamSinkStopped, GUID_NULL, S_OK, Q_NULLPTR);
}
@@ -471,7 +475,7 @@ public:
{
Q_UNUSED(systemTime);
- m_videoRenderer->setActive(false);
+ scheduleSetActive(false);
return m_stream->QueueEvent(MEStreamSinkPaused, GUID_NULL, S_OK, Q_NULLPTR);
}
@@ -480,7 +484,7 @@ public:
{
Q_UNUSED(systemTime);
- m_videoRenderer->setActive(true);
+ scheduleSetActive(true);
return m_stream->QueueEvent(MEStreamSinkStarted, GUID_NULL, S_OK, Q_NULLPTR);
}
@@ -493,6 +497,12 @@ public:
}
private:
+
+ inline void scheduleSetActive(bool active)
+ {
+ QMetaObject::invokeMethod(m_videoRenderer, "setActive", Qt::QueuedConnection, Q_ARG(bool, active));
+ }
+
ComPtr<MediaStream> m_stream;
ComPtr<IMFPresentationClock> m_presentationClock;
@@ -532,6 +542,7 @@ public:
QWinRTCameraControl::QWinRTCameraControl(QObject *parent)
: QCameraControl(parent), d_ptr(new QWinRTCameraControlPrivate)
{
+ qCDebug(lcMMCamera) << __FUNCTION__ << parent;
Q_D(QWinRTCameraControl);
d->delayClose = nullptr;
@@ -568,6 +579,7 @@ QCamera::State QWinRTCameraControl::state() const
void QWinRTCameraControl::setState(QCamera::State state)
{
+ qCDebug(lcMMCamera) << __FUNCTION__ << state;
Q_D(QWinRTCameraControl);
if (d->state == state)
@@ -583,16 +595,21 @@ void QWinRTCameraControl::setState(QCamera::State state)
}
Q_ASSERT(d->state == QCamera::LoadedState);
- d->mediaSink = Make<MediaSink>(d->encodingProfile.Get(), d->videoRenderer);
ComPtr<IAsyncAction> op;
- hr = d->capturePreview->StartPreviewToCustomSinkAsync(d->encodingProfile.Get(), d->mediaSink.Get(), &op);
- RETURN_VOID_AND_EMIT_ERROR("Failed to initiate capture");
+ hr = QEventDispatcherWinRT::runOnXamlThread([d, &op]() {
+ d->mediaSink = Make<MediaSink>(d->encodingProfile.Get(), d->videoRenderer);
+ HRESULT hr = d->capturePreview->StartPreviewToCustomSinkAsync(d->encodingProfile.Get(), d->mediaSink.Get(), &op);
+ return hr;
+ });
+ RETURN_VOID_AND_EMIT_ERROR("Failed to initiate capture.");
if (d->status != QCamera::StartingStatus) {
d->status = QCamera::StartingStatus;
emit statusChanged(d->status);
}
- hr = QWinRTFunctions::await(op);
+ hr = QEventDispatcherWinRT::runOnXamlThread([&op]() {
+ return QWinRTFunctions::await(op);
+ });
if (FAILED(hr)) {
emit error(QCamera::CameraError, qt_error_string(hr));
setState(QCamera::UnloadedState); // Unload everything, as initialize() will need be called again
@@ -607,7 +624,7 @@ void QWinRTCameraControl::setState(QCamera::State state)
emit stateChanged(d->state);
d->status = QCamera::ActiveStatus;
emit statusChanged(d->status);
- d->mediaSink->RequestSample();
+ QEventDispatcherWinRT::runOnXamlThread([d]() { d->mediaSink->RequestSample(); return S_OK;});
break;
}
case QCamera::LoadedState: {
@@ -635,20 +652,28 @@ void QWinRTCameraControl::setState(QCamera::State state)
}
ComPtr<IAsyncAction> op;
- hr = d->capturePreview->StopPreviewAsync(&op);
+ hr = QEventDispatcherWinRT::runOnXamlThread([d, &op]() {
+ HRESULT hr = d->capturePreview->StopPreviewAsync(&op);
+ return hr;
+ });
RETURN_VOID_AND_EMIT_ERROR("Failed to stop camera preview");
if (d->status != QCamera::StoppingStatus) {
d->status = QCamera::StoppingStatus;
emit statusChanged(d->status);
}
Q_ASSERT_SUCCEEDED(hr);
- hr = QWinRTFunctions::await(op); // Synchronize unloading
+ hr = QEventDispatcherWinRT::runOnXamlThread([&op]() {
+ return QWinRTFunctions::await(op); // Synchronize unloading
+ });
if (FAILED(hr))
emit error(QCamera::InvalidRequestError, qt_error_string(hr));
if (d->mediaSink) {
+ hr = QEventDispatcherWinRT::runOnXamlThread([d]() {
d->mediaSink->Shutdown();
d->mediaSink.Reset();
+ return S_OK;
+ });
}
d->state = QCamera::LoadedState;
@@ -667,22 +692,27 @@ void QWinRTCameraControl::setState(QCamera::State state)
emit statusChanged(d->status);
}
- if (d->capture && d->captureFailedCookie.value) {
- hr = d->capture->remove_Failed(d->captureFailedCookie);
+ hr = QEventDispatcherWinRT::runOnXamlThread([d]() {
+ HRESULT hr;
+ if (d->capture && d->captureFailedCookie.value) {
+ hr = d->capture->remove_Failed(d->captureFailedCookie);
+ Q_ASSERT_SUCCEEDED(hr);
+ d->captureFailedCookie.value = 0;
+ }
+ if (d->capture && d->recordLimitationCookie.value) {
+ d->capture->remove_RecordLimitationExceeded(d->recordLimitationCookie);
+ Q_ASSERT_SUCCEEDED(hr);
+ d->recordLimitationCookie.value = 0;
+ }
+ ComPtr<IClosable> capture;
+ hr = d->capture.As(&capture);
Q_ASSERT_SUCCEEDED(hr);
- d->captureFailedCookie.value = 0;
- }
- if (d->capture && d->recordLimitationCookie.value) {
- d->capture->remove_RecordLimitationExceeded(d->recordLimitationCookie);
- Q_ASSERT_SUCCEEDED(hr);
- d->recordLimitationCookie.value = 0;
- }
- ComPtr<IClosable> capture;
- hr = d->capture.As(&capture);
- Q_ASSERT_SUCCEEDED(hr);
- hr = capture->Close();
+ hr = capture->Close();
+ RETURN_HR_IF_FAILED("");
+ d->capture.Reset();
+ return hr;
+ });
RETURN_VOID_AND_EMIT_ERROR("Failed to close the capture manger");
- d->capture.Reset();
if (d->state != QCamera::UnloadedState) {
d->state = QCamera::UnloadedState;
emit stateChanged(d->state);
@@ -713,6 +743,7 @@ QCamera::CaptureModes QWinRTCameraControl::captureMode() const
void QWinRTCameraControl::setCaptureMode(QCamera::CaptureModes mode)
{
+ qCDebug(lcMMCamera) << __FUNCTION__ << mode;
Q_D(QWinRTCameraControl);
if (d->captureMode == mode)
@@ -775,10 +806,10 @@ QCameraLocksControl *QWinRTCameraControl::cameraLocksControl() const
return d->cameraLocksControl;
}
-IMediaCapture *QWinRTCameraControl::handle() const
+Microsoft::WRL::ComPtr<ABI::Windows::Media::Capture::IMediaCapture> QWinRTCameraControl::handle() const
{
Q_D(const QWinRTCameraControl);
- return d->capture.Get();
+ return d->capture;
}
void QWinRTCameraControl::onBufferRequested()
@@ -791,6 +822,10 @@ void QWinRTCameraControl::onBufferRequested()
void QWinRTCameraControl::onApplicationStateChanged(Qt::ApplicationState state)
{
+ qCDebug(lcMMCamera) << __FUNCTION__ << state;
+#ifdef _DEBUG
+ return;
+#else // !_DEBUG
Q_D(QWinRTCameraControl);
static QCamera::State savedState = d->state;
switch (state) {
@@ -806,10 +841,12 @@ void QWinRTCameraControl::onApplicationStateChanged(Qt::ApplicationState state)
default:
break;
}
+#endif // _DEBUG
}
HRESULT QWinRTCameraControl::initialize()
{
+ qCDebug(lcMMCamera) << __FUNCTION__;
Q_D(QWinRTCameraControl);
if (d->status != QCamera::LoadingStatus) {
@@ -817,154 +854,158 @@ HRESULT QWinRTCameraControl::initialize()
emit statusChanged(d->status);
}
- HRESULT hr;
- ComPtr<IInspectable> capture;
- hr = RoActivateInstance(Wrappers::HString::MakeReference(RuntimeClass_Windows_Media_Capture_MediaCapture).Get(),
- &capture);
- Q_ASSERT_SUCCEEDED(hr);
- hr = capture.As(&d->capture);
- Q_ASSERT_SUCCEEDED(hr);
- hr = d->capture.As(&d->capturePreview);
- Q_ASSERT_SUCCEEDED(hr);
- hr = d->capture->add_Failed(Callback<IMediaCaptureFailedEventHandler>(this, &QWinRTCameraControl::onCaptureFailed).Get(),
- &d->captureFailedCookie);
- Q_ASSERT_SUCCEEDED(hr);
- hr = d->capture->add_RecordLimitationExceeded(Callback<IRecordLimitationExceededEventHandler>(this, &QWinRTCameraControl::onRecordLimitationExceeded).Get(),
- &d->recordLimitationCookie);
- Q_ASSERT_SUCCEEDED(hr);
- hr = RoGetActivationFactory(HString::MakeReference(RuntimeClass_Windows_Media_MediaProperties_MediaEncodingProfile).Get(),
- IID_PPV_ARGS(&d->encodingProfileFactory));
- Q_ASSERT_SUCCEEDED(hr);
+ HRESULT hr = QEventDispatcherWinRT::runOnXamlThread([this, d]() {
+ HRESULT hr;
+ ComPtr<IInspectable> capture;
+ hr = RoActivateInstance(Wrappers::HString::MakeReference(RuntimeClass_Windows_Media_Capture_MediaCapture).Get(),
+ &capture);
+ Q_ASSERT_SUCCEEDED(hr);
+ hr = capture.As(&d->capture);
+ Q_ASSERT_SUCCEEDED(hr);
+ hr = d->capture.As(&d->capturePreview);
+ Q_ASSERT_SUCCEEDED(hr);
+ hr = d->capture->add_Failed(Callback<IMediaCaptureFailedEventHandler>(this, &QWinRTCameraControl::onCaptureFailed).Get(),
+ &d->captureFailedCookie);
+ Q_ASSERT_SUCCEEDED(hr);
+ hr = d->capture->add_RecordLimitationExceeded(Callback<IRecordLimitationExceededEventHandler>(this, &QWinRTCameraControl::onRecordLimitationExceeded).Get(),
+ &d->recordLimitationCookie);
+ Q_ASSERT_SUCCEEDED(hr);
+ hr = RoGetActivationFactory(HString::MakeReference(RuntimeClass_Windows_Media_MediaProperties_MediaEncodingProfile).Get(),
+ IID_PPV_ARGS(&d->encodingProfileFactory));
+ Q_ASSERT_SUCCEEDED(hr);
- int deviceIndex = d->videoDeviceSelector->selectedDevice();
- if (deviceIndex < 0)
- deviceIndex = d->videoDeviceSelector->defaultDevice();
+ int deviceIndex = d->videoDeviceSelector->selectedDevice();
+ if (deviceIndex < 0)
+ deviceIndex = d->videoDeviceSelector->defaultDevice();
- const QString deviceName = d->videoDeviceSelector->deviceName(deviceIndex);
- if (deviceName.isEmpty()) {
- qWarning("No video device available or selected.");
- return E_FAIL;
- }
+ const QString deviceName = d->videoDeviceSelector->deviceName(deviceIndex);
+ if (deviceName.isEmpty()) {
+ qWarning("No video device available or selected.");
+ return E_FAIL;
+ }
- const QCamera::Position position = d->videoDeviceSelector->cameraPosition(deviceName);
- d->videoRenderer->setScanLineDirection(position == QCamera::BackFace ? QVideoSurfaceFormat::TopToBottom
- : QVideoSurfaceFormat::BottomToTop);
- ComPtr<IMediaCaptureInitializationSettings> settings;
- hr = RoActivateInstance(HString::MakeReference(RuntimeClass_Windows_Media_Capture_MediaCaptureInitializationSettings).Get(),
- &settings);
- Q_ASSERT_SUCCEEDED(hr);
- HStringReference deviceId(reinterpret_cast<LPCWSTR>(deviceName.utf16()), deviceName.length());
- hr = settings->put_VideoDeviceId(deviceId.Get());
- Q_ASSERT_SUCCEEDED(hr);
+ const QCamera::Position position = d->videoDeviceSelector->cameraPosition(deviceName);
+ d->videoRenderer->setScanLineDirection(position == QCamera::BackFace ? QVideoSurfaceFormat::TopToBottom
+ : QVideoSurfaceFormat::BottomToTop);
+ ComPtr<IMediaCaptureInitializationSettings> settings;
+ hr = RoActivateInstance(HString::MakeReference(RuntimeClass_Windows_Media_Capture_MediaCaptureInitializationSettings).Get(),
+ &settings);
+ Q_ASSERT_SUCCEEDED(hr);
+ HStringReference deviceId(reinterpret_cast<LPCWSTR>(deviceName.utf16()), deviceName.length());
+ hr = settings->put_VideoDeviceId(deviceId.Get());
+ Q_ASSERT_SUCCEEDED(hr);
- hr = settings->put_StreamingCaptureMode(StreamingCaptureMode_Video);
- Q_ASSERT_SUCCEEDED(hr);
+ hr = settings->put_StreamingCaptureMode(StreamingCaptureMode_Video);
+ Q_ASSERT_SUCCEEDED(hr);
- hr = settings->put_PhotoCaptureSource(PhotoCaptureSource_Auto);
- Q_ASSERT_SUCCEEDED(hr);
+ hr = settings->put_PhotoCaptureSource(PhotoCaptureSource_Auto);
+ Q_ASSERT_SUCCEEDED(hr);
- ComPtr<IAsyncAction> op;
- hr = d->capture->InitializeWithSettingsAsync(settings.Get(), &op);
- RETURN_HR_IF_FAILED("Failed to begin initialization of media capture manager");
- hr = QWinRTFunctions::await(op, QWinRTFunctions::ProcessThreadEvents);
- if (hr == E_ACCESSDENIED) {
- qWarning("Access denied when initializing the media capture manager. "
- "Check your manifest settings for microphone and webcam access.");
- }
- RETURN_HR_IF_FAILED("Failed to initialize media capture manager");
-
- ComPtr<IVideoDeviceController> videoDeviceController;
- hr = d->capture->get_VideoDeviceController(&videoDeviceController);
- ComPtr<IAdvancedVideoCaptureDeviceController2> advancedVideoDeviceController;
- hr = videoDeviceController.As(&advancedVideoDeviceController);
- Q_ASSERT_SUCCEEDED(hr);
- hr = advancedVideoDeviceController->get_FocusControl(&d->focusControl);
- Q_ASSERT_SUCCEEDED(hr);
+ ComPtr<IAsyncAction> op;
+ hr = d->capture->InitializeWithSettingsAsync(settings.Get(), &op);
+ RETURN_HR_IF_FAILED("Failed to begin initialization of media capture manager");
+ hr = QWinRTFunctions::await(op, QWinRTFunctions::ProcessThreadEvents);
+ if (hr == E_ACCESSDENIED) {
+ qWarning("Access denied when initializing the media capture manager. "
+ "Check your manifest settings for microphone and webcam access.");
+ }
+ RETURN_HR_IF_FAILED("Failed to initialize media capture manager");
- boolean isFocusSupported;
- hr = d->focusControl->get_Supported(&isFocusSupported);
- Q_ASSERT_SUCCEEDED(hr);
- if (isFocusSupported) {
- hr = advancedVideoDeviceController->get_RegionsOfInterestControl(&d->regionsOfInterestControl);
+ ComPtr<IVideoDeviceController> videoDeviceController;
+ hr = d->capture->get_VideoDeviceController(&videoDeviceController);
+ ComPtr<IAdvancedVideoCaptureDeviceController2> advancedVideoDeviceController;
+ hr = videoDeviceController.As(&advancedVideoDeviceController);
Q_ASSERT_SUCCEEDED(hr);
- hr = initializeFocus();
+ hr = advancedVideoDeviceController->get_FocusControl(&d->focusControl);
Q_ASSERT_SUCCEEDED(hr);
- } else {
- d->cameraFocusControl->setSupportedFocusMode(0);
- d->cameraFocusControl->setSupportedFocusPointMode(QSet<QCameraFocus::FocusPointMode>());
- }
- d->cameraLocksControl->initialize();
- Q_ASSERT_SUCCEEDED(hr);
- ComPtr<IMediaDeviceController> deviceController;
- hr = videoDeviceController.As(&deviceController);
- Q_ASSERT_SUCCEEDED(hr);
+ boolean isFocusSupported;
+ hr = d->focusControl->get_Supported(&isFocusSupported);
+ Q_ASSERT_SUCCEEDED(hr);
+ if (isFocusSupported) {
+ hr = advancedVideoDeviceController->get_RegionsOfInterestControl(&d->regionsOfInterestControl);
+ Q_ASSERT_SUCCEEDED(hr);
+ hr = initializeFocus();
+ Q_ASSERT_SUCCEEDED(hr);
+ } else {
+ d->cameraFocusControl->setSupportedFocusMode(0);
+ d->cameraFocusControl->setSupportedFocusPointMode(QSet<QCameraFocus::FocusPointMode>());
+ }
+ d->cameraLocksControl->initialize();
- // Get preview stream properties.
- ComPtr<IVectorView<IMediaEncodingProperties *>> previewPropertiesList;
- QVector<QSize> previewResolutions;
- hr = getMediaStreamResolutions(deviceController.Get(),
- MediaStreamType_VideoPreview,
- &previewPropertiesList,
- &previewResolutions);
- RETURN_HR_IF_FAILED("Failed to find a suitable video format");
-
- MediaStreamType mediaStreamType =
- d->captureMode == QCamera::CaptureVideo ? MediaStreamType_VideoRecord : MediaStreamType_Photo;
-
- // Get capture stream properties.
- ComPtr<IVectorView<IMediaEncodingProperties *>> capturePropertiesList;
- QVector<QSize> captureResolutions;
- hr = getMediaStreamResolutions(deviceController.Get(),
- mediaStreamType,
- &capturePropertiesList,
- &captureResolutions);
- RETURN_HR_IF_FAILED("Failed to find a suitable video format");
-
- // Set capture resolutions.
- d->imageEncoderControl->setSupportedResolutionsList(captureResolutions.toList());
- const QSize captureResolution = d->imageEncoderControl->imageSettings().resolution();
- const quint32 captureResolutionIndex = captureResolutions.indexOf(captureResolution);
- ComPtr<IMediaEncodingProperties> captureProperties;
- hr = capturePropertiesList->GetAt(captureResolutionIndex, &captureProperties);
- Q_ASSERT_SUCCEEDED(hr);
- hr = deviceController->SetMediaStreamPropertiesAsync(mediaStreamType, captureProperties.Get(), &op);
- Q_ASSERT_SUCCEEDED(hr);
- hr = QWinRTFunctions::await(op);
- Q_ASSERT_SUCCEEDED(hr);
+ Q_ASSERT_SUCCEEDED(hr);
+ ComPtr<IMediaDeviceController> deviceController;
+ hr = videoDeviceController.As(&deviceController);
+ Q_ASSERT_SUCCEEDED(hr);
- // Set preview resolution.
- QVector<QSize> filtered;
- const float captureAspectRatio = float(captureResolution.width()) / captureResolution.height();
- for (const QSize &resolution : qAsConst(previewResolutions)) {
- const float aspectRatio = float(resolution.width()) / resolution.height();
- if (qAbs(aspectRatio - captureAspectRatio) <= ASPECTRATIO_EPSILON)
- filtered.append(resolution);
- }
- qSort(filtered.begin(),
- filtered.end(),
- [](QSize size1, QSize size2) { return size1.width() * size1.height() < size2.width() * size2.height(); });
-
- const QSize &viewfinderResolution = filtered.first();
- const quint32 viewfinderResolutionIndex = previewResolutions.indexOf(viewfinderResolution);
- hr = RoActivateInstance(HString::MakeReference(RuntimeClass_Windows_Media_MediaProperties_MediaEncodingProfile).Get(),
- &d->encodingProfile);
- Q_ASSERT_SUCCEEDED(hr);
- ComPtr<IMediaEncodingProperties> previewProperties;
- hr = previewPropertiesList->GetAt(viewfinderResolutionIndex, &previewProperties);
- Q_ASSERT_SUCCEEDED(hr);
- hr = deviceController->SetMediaStreamPropertiesAsync(MediaStreamType_VideoPreview, previewProperties.Get(), &op);
- Q_ASSERT_SUCCEEDED(hr);
- hr = QWinRTFunctions::await(op);
- Q_ASSERT_SUCCEEDED(hr);
- ComPtr<IVideoEncodingProperties> videoPreviewProperties;
- hr = previewProperties.As(&videoPreviewProperties);
- Q_ASSERT_SUCCEEDED(hr);
- hr = d->encodingProfile->put_Video(videoPreviewProperties.Get());
- Q_ASSERT_SUCCEEDED(hr);
+ // Get preview stream properties.
+ ComPtr<IVectorView<IMediaEncodingProperties *>> previewPropertiesList;
+ QVector<QSize> previewResolutions;
+ hr = getMediaStreamResolutions(deviceController.Get(),
+ MediaStreamType_VideoPreview,
+ &previewPropertiesList,
+ &previewResolutions);
+ RETURN_HR_IF_FAILED("Failed to find a suitable video format");
+
+ MediaStreamType mediaStreamType =
+ d->captureMode == QCamera::CaptureVideo ? MediaStreamType_VideoRecord : MediaStreamType_Photo;
+
+ // Get capture stream properties.
+ ComPtr<IVectorView<IMediaEncodingProperties *>> capturePropertiesList;
+ QVector<QSize> captureResolutions;
+ hr = getMediaStreamResolutions(deviceController.Get(),
+ mediaStreamType,
+ &capturePropertiesList,
+ &captureResolutions);
+ RETURN_HR_IF_FAILED("Failed to find a suitable video format");
+
+ // Set capture resolutions.
+ d->imageEncoderControl->setSupportedResolutionsList(captureResolutions.toList());
+ const QSize captureResolution = d->imageEncoderControl->imageSettings().resolution();
+ const quint32 captureResolutionIndex = captureResolutions.indexOf(captureResolution);
+ ComPtr<IMediaEncodingProperties> captureProperties;
+ hr = capturePropertiesList->GetAt(captureResolutionIndex, &captureProperties);
+ Q_ASSERT_SUCCEEDED(hr);
+ hr = deviceController->SetMediaStreamPropertiesAsync(mediaStreamType, captureProperties.Get(), &op);
+ Q_ASSERT_SUCCEEDED(hr);
+ hr = QWinRTFunctions::await(op);
+ Q_ASSERT_SUCCEEDED(hr);
- if (d->videoRenderer)
- d->videoRenderer->setSize(viewfinderResolution);
+ // Set preview resolution.
+ QVector<QSize> filtered;
+ const float captureAspectRatio = float(captureResolution.width()) / captureResolution.height();
+ for (const QSize &resolution : qAsConst(previewResolutions)) {
+ const float aspectRatio = float(resolution.width()) / resolution.height();
+ if (qAbs(aspectRatio - captureAspectRatio) <= ASPECTRATIO_EPSILON)
+ filtered.append(resolution);
+ }
+ qSort(filtered.begin(),
+ filtered.end(),
+ [](QSize size1, QSize size2) { return size1.width() * size1.height() < size2.width() * size2.height(); });
+
+ const QSize &viewfinderResolution = filtered.first();
+ const quint32 viewfinderResolutionIndex = previewResolutions.indexOf(viewfinderResolution);
+ hr = RoActivateInstance(HString::MakeReference(RuntimeClass_Windows_Media_MediaProperties_MediaEncodingProfile).Get(),
+ &d->encodingProfile);
+ Q_ASSERT_SUCCEEDED(hr);
+ ComPtr<IMediaEncodingProperties> previewProperties;
+ hr = previewPropertiesList->GetAt(viewfinderResolutionIndex, &previewProperties);
+ Q_ASSERT_SUCCEEDED(hr);
+ hr = deviceController->SetMediaStreamPropertiesAsync(MediaStreamType_VideoPreview, previewProperties.Get(), &op);
+ Q_ASSERT_SUCCEEDED(hr);
+ hr = QWinRTFunctions::await(op);
+ Q_ASSERT_SUCCEEDED(hr);
+ ComPtr<IVideoEncodingProperties> videoPreviewProperties;
+ hr = previewProperties.As(&videoPreviewProperties);
+ Q_ASSERT_SUCCEEDED(hr);
+ hr = d->encodingProfile->put_Video(videoPreviewProperties.Get());
+ Q_ASSERT_SUCCEEDED(hr);
+
+ if (d->videoRenderer)
+ d->videoRenderer->setSize(viewfinderResolution);
+
+ return S_OK;
+ });
if (SUCCEEDED(hr) && d->state != QCamera::LoadedState) {
d->state = QCamera::LoadedState;
@@ -977,7 +1018,7 @@ HRESULT QWinRTCameraControl::initialize()
return hr;
}
-#ifdef Q_OS_WINPHONE
+#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_PHONE_APP)
HRESULT QWinRTCameraControl::initializeFocus()
{
@@ -1224,7 +1265,7 @@ bool QWinRTCameraControl::unlockFocus()
return QWinRTFunctions::await(op) == S_OK;
}
-#else // Q_OS_WINPHONE
+#else // !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_PHONE_APP)
HRESULT QWinRTCameraControl::initializeFocus()
{
@@ -1265,7 +1306,7 @@ bool QWinRTCameraControl::unlockFocus()
return false;
}
-#endif // !Q_OS_WINPHONE
+#endif // !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_PHONE_APP)
void QWinRTCameraControl::frameMapped()
{
@@ -1284,6 +1325,7 @@ void QWinRTCameraControl::frameUnmapped()
HRESULT QWinRTCameraControl::onCaptureFailed(IMediaCapture *, IMediaCaptureFailedEventArgs *args)
{
+ qCDebug(lcMMCamera) << __FUNCTION__ << args;
HRESULT hr;
UINT32 code;
hr = args->get_Code(&code);
@@ -1300,6 +1342,7 @@ HRESULT QWinRTCameraControl::onCaptureFailed(IMediaCapture *, IMediaCaptureFaile
HRESULT QWinRTCameraControl::onRecordLimitationExceeded(IMediaCapture *)
{
+ qCDebug(lcMMCamera) << __FUNCTION__;
emit error(QCamera::CameraError, QStringLiteral("Recording limit exceeded."));
setState(QCamera::LoadedState);
return S_OK;
@@ -1307,6 +1350,7 @@ HRESULT QWinRTCameraControl::onRecordLimitationExceeded(IMediaCapture *)
void QWinRTCameraControl::emitError(int errorCode, const QString &errorString)
{
+ qCDebug(lcMMCamera) << __FUNCTION__ << errorString << errorCode;
emit error(errorCode, errorString);
}
diff --git a/src/plugins/winrt/qwinrtcameracontrol.h b/src/plugins/winrt/qwinrtcameracontrol.h
index b3c86adf9..c657d5935 100644
--- a/src/plugins/winrt/qwinrtcameracontrol.h
+++ b/src/plugins/winrt/qwinrtcameracontrol.h
@@ -38,8 +38,11 @@
#define QWINRTCAMERACONTROL_H
#include <QtMultimedia/QCameraControl>
+#include <QtCore/QLoggingCategory>
#include <QtCore/qt_windows.h>
+#include <wrl.h>
+
namespace ABI {
namespace Windows {
namespace Media {
@@ -57,6 +60,8 @@ namespace ABI {
QT_BEGIN_NAMESPACE
+Q_DECLARE_LOGGING_CATEGORY(lcMMCamera)
+
class QVideoRendererControl;
class QVideoDeviceSelectorControl;
class QCameraImageCaptureControl;
@@ -90,7 +95,7 @@ public:
QCameraFocusControl *cameraFocusControl() const;
QCameraLocksControl *cameraLocksControl() const;
- ABI::Windows::Media::Capture::IMediaCapture *handle() const;
+ Microsoft::WRL::ComPtr<ABI::Windows::Media::Capture::IMediaCapture> handle() const;
bool setFocus(QCameraFocus::FocusModes mode);
bool setFocusPoint(const QPointF &point);
diff --git a/src/plugins/winrt/qwinrtcameraimagecapturecontrol.cpp b/src/plugins/winrt/qwinrtcameraimagecapturecontrol.cpp
index ae67e33f4..07ec0b40d 100644
--- a/src/plugins/winrt/qwinrtcameraimagecapturecontrol.cpp
+++ b/src/plugins/winrt/qwinrtcameraimagecapturecontrol.cpp
@@ -46,8 +46,10 @@
#include <QtCore/QStandardPaths>
#include <QtCore/QVector>
#include <QtCore/qfunctions_winrt.h>
+#include <QtCore/private/qeventdispatcher_winrt_p.h>
#include <QtMultimedia/private/qmediastoragelocation_p.h>
+#include <functional>
#include <wrl.h>
#include <windows.media.capture.h>
#include <windows.media.devices.h>
@@ -118,6 +120,7 @@ public:
QWinRTCameraImageCaptureControl::QWinRTCameraImageCaptureControl(QWinRTCameraControl *parent)
: QCameraImageCaptureControl(parent), d_ptr(new QWinRTCameraImageCaptureControlPrivate)
{
+ qCDebug(lcMMCamera) << __FUNCTION__ << parent;
Q_D(QWinRTCameraImageCaptureControl);
d->cameraControl = parent;
@@ -144,10 +147,11 @@ void QWinRTCameraImageCaptureControl::setDriveMode(QCameraImageCapture::DriveMod
int QWinRTCameraImageCaptureControl::capture(const QString &fileName)
{
+ qCDebug(lcMMCamera) << __FUNCTION__ << fileName;
Q_D(QWinRTCameraImageCaptureControl);
++d->currentCaptureId;
- IMediaCapture *capture = d->cameraControl->handle();
+ ComPtr<IMediaCapture> capture = d->cameraControl->handle();
if (!capture) {
emit error(d->currentCaptureId, QCameraImageCapture::NotReadyError, tr("Camera not ready"));
return -1;
@@ -159,38 +163,43 @@ int QWinRTCameraImageCaptureControl::capture(const QString &fileName)
fileName.isEmpty() ? QStringLiteral("jpg") : QFileInfo(fileName).suffix())
};
- HRESULT hr;
- hr = RoActivateInstance(HString::MakeReference(RuntimeClass_Windows_Storage_Streams_InMemoryRandomAccessStream).Get(),
- &request.stream);
- Q_ASSERT_SUCCEEDED(hr);
+ HRESULT hr = QEventDispatcherWinRT::runOnXamlThread([this, d, capture, &request]() {
+ HRESULT hr;
+ hr = RoActivateInstance(HString::MakeReference(RuntimeClass_Windows_Storage_Streams_InMemoryRandomAccessStream).Get(),
+ &request.stream);
+ Q_ASSERT_SUCCEEDED(hr);
- hr = g->encodingPropertiesFactory->CreateBmp(&request.imageFormat);
- Q_ASSERT_SUCCEEDED(hr);
+ hr = g->encodingPropertiesFactory->CreateBmp(&request.imageFormat);
+ Q_ASSERT_SUCCEEDED(hr);
- const QSize imageSize = static_cast<QWinRTImageEncoderControl*>(d->cameraControl->imageEncoderControl())->imageSettings().resolution();
- hr = request.imageFormat->put_Width(imageSize.width());
- Q_ASSERT_SUCCEEDED(hr);
- hr = request.imageFormat->put_Height(imageSize.height());
- Q_ASSERT_SUCCEEDED(hr);
+ const QSize imageSize = static_cast<QWinRTImageEncoderControl*>(d->cameraControl->imageEncoderControl())->imageSettings().resolution();
+ hr = request.imageFormat->put_Width(imageSize.width());
+ Q_ASSERT_SUCCEEDED(hr);
+ hr = request.imageFormat->put_Height(imageSize.height());
+ Q_ASSERT_SUCCEEDED(hr);
- hr = capture->CapturePhotoToStreamAsync(request.imageFormat.Get(), request.stream.Get(), &request.op);
- Q_ASSERT_SUCCEEDED(hr);
- if (!request.op) {
- qErrnoWarning("Camera photo capture failed.");
+ hr = capture->CapturePhotoToStreamAsync(request.imageFormat.Get(), request.stream.Get(), &request.op);
+ Q_ASSERT_SUCCEEDED(hr);
+ if (!request.op) {
+ qErrnoWarning("Camera photo capture failed.");
+ return E_FAIL;
+ }
+ emit captureQueueChanged(false);
+ d->requests.insert(request.op.Get(), request);
+
+ hr = request.op->put_Completed(Callback<IAsyncActionCompletedHandler>(
+ this, &QWinRTCameraImageCaptureControl::onCaptureCompleted).Get());
+ Q_ASSERT_SUCCEEDED(hr);
+ return hr;
+ });
+ if (FAILED(hr))
return -1;
- }
- emit captureQueueChanged(false);
- d->requests.insert(request.op.Get(), request);
-
- hr = request.op->put_Completed(Callback<IAsyncActionCompletedHandler>(
- this, &QWinRTCameraImageCaptureControl::onCaptureCompleted).Get());
- Q_ASSERT_SUCCEEDED(hr);
-
return request.id;
}
void QWinRTCameraImageCaptureControl::cancelCapture()
{
+ qCDebug(lcMMCamera) << __FUNCTION__;
Q_D(QWinRTCameraImageCaptureControl);
QHash<IAsyncAction *, CaptureRequest>::iterator it = d->requests.begin();
@@ -205,6 +214,7 @@ void QWinRTCameraImageCaptureControl::cancelCapture()
HRESULT QWinRTCameraImageCaptureControl::onCaptureCompleted(IAsyncAction *asyncInfo, AsyncStatus status)
{
+ qCDebug(lcMMCamera) << __FUNCTION__;
Q_D(QWinRTCameraImageCaptureControl);
if (status == Canceled || !d->requests.contains(asyncInfo))
diff --git a/src/plugins/winrt/qwinrtcameravideorenderercontrol.cpp b/src/plugins/winrt/qwinrtcameravideorenderercontrol.cpp
index fe07581c9..796a36f55 100644
--- a/src/plugins/winrt/qwinrtcameravideorenderercontrol.cpp
+++ b/src/plugins/winrt/qwinrtcameravideorenderercontrol.cpp
@@ -48,7 +48,7 @@
#include "qwinrtcameracontrol.h"
-#ifdef Q_OS_WINPHONE
+#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_PHONE_APP)
#include <Windows.Security.ExchangeActiveSyncProvisioning.h>
using namespace ABI::Windows::Security::ExchangeActiveSyncProvisioning;
#endif
@@ -58,7 +58,7 @@ using namespace Microsoft::WRL::Wrappers;
QT_BEGIN_NAMESPACE
-#ifdef Q_OS_WINPHONE
+#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_PHONE_APP)
template <int n>
static bool blacklisted(const wchar_t (&blackListName)[n], const HString &deviceModel)
{
@@ -282,7 +282,7 @@ QWinRTCameraVideoRendererControl::QWinRTCameraVideoRendererControl(const QSize &
d->cameraSampleformat = QVideoFrame::Format_User;
d->videoProbesCounter = 0;
-#ifdef Q_OS_WINPHONE
+#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_PHONE_APP)
// Workaround for certain devices which fail to blit.
ComPtr<IEasClientDeviceInformation> deviceInfo;
HRESULT hr = RoActivateInstance(HString::MakeReference(RuntimeClass_Windows_Security_ExchangeActiveSyncProvisioning_EasClientDeviceInformation).Get(),
diff --git a/src/plugins/winrt/qwinrtvideodeviceselectorcontrol.cpp b/src/plugins/winrt/qwinrtvideodeviceselectorcontrol.cpp
index 07b63be9a..d3348e719 100644
--- a/src/plugins/winrt/qwinrtvideodeviceselectorcontrol.cpp
+++ b/src/plugins/winrt/qwinrtvideodeviceselectorcontrol.cpp
@@ -204,7 +204,8 @@ public:
DeviceWatcherStatus status;
hr = deviceWatcher->get_Status(&status);
Q_ASSERT_SUCCEEDED(hr);
- if (status != DeviceWatcherStatus_Started) {
+ if (status != DeviceWatcherStatus_Started &&
+ status != DeviceWatcherStatus_EnumerationCompleted) {
// We can't immediately Start() if we have just called Stop()
while (status == DeviceWatcherStatus_Stopping) {
QThread::yieldCurrentThread();
@@ -332,7 +333,7 @@ QCamera::Position QWinRTVideoDeviceSelectorControl::cameraPosition(const QString
int QWinRTVideoDeviceSelectorControl::cameraOrientation(const QString &deviceName)
{
-#ifdef Q_OS_WINPHONE
+#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_PHONE_APP)
switch (cameraPosition(deviceName)) {
case QCamera::FrontFace:
case QCamera::BackFace:
diff --git a/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp b/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp
index 96290aed0..412694cc3 100644
--- a/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp
+++ b/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp
@@ -289,16 +289,6 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
obj->event(&ev);
}
}
-#if defined (Q_OS_QNX) // On QNX we need to be called back again for creating the egl images
- else {
- // Internal mechanism to call back the surface renderer from the QtQuick render thread
- QObject *obj = m_surface->property("_q_GLThreadCallback").value<QObject*>();
- if (obj) {
- QEvent ev(static_cast<QEvent::Type>(QEvent::User + 1));
- obj->event(&ev);
- }
- }
-#endif
bool isFrameModified = false;
if (m_frameChanged) {
diff --git a/tests/auto/integration/qaudiodecoderbackend/BLACKLIST b/tests/auto/integration/qaudiodecoderbackend/BLACKLIST
deleted file mode 100644
index 038b89022..000000000
--- a/tests/auto/integration/qaudiodecoderbackend/BLACKLIST
+++ /dev/null
@@ -1,41 +0,0 @@
-# QTBUG-46331
-
-[fileTest]
-opensuse-13.1 64bit
-redhatenterpriselinuxworkstation-6.6
-osx-10.8
-osx-10.9
-osx-10.10
-osx-10.11
-windows 32bit developer-build
-windows 64bit developer-build
-
-[unsupportedFileTest]
-opensuse-13.1 64bit
-redhatenterpriselinuxworkstation-6.6
-osx-10.8
-osx-10.9
-osx-10.10
-osx-10.11
-windows 32bit developer-build
-windows 64bit developer-build
-
-[corruptedFileTest]
-opensuse-13.1 64bit
-redhatenterpriselinuxworkstation-6.6
-osx-10.8
-osx-10.9
-osx-10.10
-osx-10.11
-windows 32bit developer-build
-windows 64bit developer-build
-
-[deviceTest]
-opensuse-13.1 64bit
-redhatenterpriselinuxworkstation-6.6
-osx-10.8
-osx-10.9
-osx-10.10
-osx-10.11
-windows 32bit developer-build
-windows 64bit developer-build
diff --git a/tests/auto/integration/qaudiodecoderbackend/tst_qaudiodecoderbackend.cpp b/tests/auto/integration/qaudiodecoderbackend/tst_qaudiodecoderbackend.cpp
index 576da50e3..2af06b46c 100644
--- a/tests/auto/integration/qaudiodecoderbackend/tst_qaudiodecoderbackend.cpp
+++ b/tests/auto/integration/qaudiodecoderbackend/tst_qaudiodecoderbackend.cpp
@@ -76,6 +76,8 @@ void tst_QAudioDecoderBackend::cleanup()
void tst_QAudioDecoderBackend::fileTest()
{
QAudioDecoder d;
+ if (d.error() == QAudioDecoder::ServiceMissingError)
+ QSKIP("There is no audio decoding support on this platform.");
QAudioBuffer buffer;
quint64 duration = 0;
int byteCount = 0;
@@ -258,6 +260,8 @@ void tst_QAudioDecoderBackend::fileTest()
void tst_QAudioDecoderBackend::unsupportedFileTest()
{
QAudioDecoder d;
+ if (d.error() == QAudioDecoder::ServiceMissingError)
+ QSKIP("There is no audio decoding support on this platform.");
QAudioBuffer buffer;
QVERIFY(d.state() == QAudioDecoder::StoppedState);
@@ -334,6 +338,8 @@ void tst_QAudioDecoderBackend::unsupportedFileTest()
void tst_QAudioDecoderBackend::corruptedFileTest()
{
QAudioDecoder d;
+ if (d.error() == QAudioDecoder::ServiceMissingError)
+ QSKIP("There is no audio decoding support on this platform.");
QAudioBuffer buffer;
QVERIFY(d.state() == QAudioDecoder::StoppedState);
@@ -406,6 +412,8 @@ void tst_QAudioDecoderBackend::corruptedFileTest()
void tst_QAudioDecoderBackend::deviceTest()
{
QAudioDecoder d;
+ if (d.error() == QAudioDecoder::ServiceMissingError)
+ QSKIP("There is no audio decoding support on this platform.");
QAudioBuffer buffer;
quint64 duration = 0;
int sampleCount = 0;
diff --git a/tests/auto/unit/qaudiobuffer/qaudiobuffer.pro b/tests/auto/unit/qaudiobuffer/qaudiobuffer.pro
index 2da74b8c7..8f8f54ac3 100644
--- a/tests/auto/unit/qaudiobuffer/qaudiobuffer.pro
+++ b/tests/auto/unit/qaudiobuffer/qaudiobuffer.pro
@@ -1,9 +1,3 @@
-#-------------------------------------------------
-#
-# Project created by QtCreator 2012-02-02T23:40:38
-#
-#-------------------------------------------------
-
QT += multimedia testlib
QT -= gui