diff options
Diffstat (limited to 'src/plugins')
97 files changed, 4555 insertions, 1794 deletions
diff --git a/src/plugins/alsa/qalsaaudiodeviceinfo.cpp b/src/plugins/alsa/qalsaaudiodeviceinfo.cpp index be90ca666..3d310871f 100644 --- a/src/plugins/alsa/qalsaaudiodeviceinfo.cpp +++ b/src/plugins/alsa/qalsaaudiodeviceinfo.cpp @@ -141,7 +141,7 @@ bool QAlsaAudioDeviceInfo::open() QList<QByteArray> devices = availableDevices(mode); if(dev.compare(QLatin1String("default")) == 0) { -#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14) +#if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14)) if (devices.size() > 0) dev = QLatin1String(devices.first().constData()); else @@ -150,7 +150,7 @@ bool QAlsaAudioDeviceInfo::open() dev = QLatin1String("hw:0,0"); #endif } else { -#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14) +#if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14)) dev = device; #else int idx = 0; @@ -194,7 +194,7 @@ bool QAlsaAudioDeviceInfo::testSettings(const QAudioFormat& format) const snd_pcm_hw_params_t *params; QString dev; -#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14) +#if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14)) dev = device; if (dev.compare(QLatin1String("default")) == 0) { QList<QByteArray> devices = availableDevices(QAudio::AudioOutput); @@ -335,7 +335,7 @@ QList<QByteArray> QAlsaAudioDeviceInfo::availableDevices(QAudio::Mode mode) QList<QByteArray> devices; QByteArray filter; -#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14) +#if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14)) // Create a list of all current audio devices that support mode void **hints, **n; char *name, *descr, *io; diff --git a/src/plugins/alsa/qalsaaudioinput.cpp b/src/plugins/alsa/qalsaaudioinput.cpp index 4a8dd80da..d6d8adcff 100644 --- a/src/plugins/alsa/qalsaaudioinput.cpp +++ b/src/plugins/alsa/qalsaaudioinput.cpp @@ -303,7 +303,7 @@ bool QAlsaAudioInput::open() QString dev = QString(QLatin1String(m_device.constData())); QList<QByteArray> devices = QAlsaAudioDeviceInfo::availableDevices(QAudio::AudioInput); if(dev.compare(QLatin1String("default")) == 0) { -#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14) +#if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14)) if (devices.size() > 0) dev = QLatin1String(devices.first()); else @@ -312,7 +312,7 @@ bool QAlsaAudioInput::open() dev = QLatin1String("hw:0,0"); #endif } else { -#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14) +#if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14)) dev = QLatin1String(m_device); #else int idx = 0; diff --git a/src/plugins/alsa/qalsaaudiooutput.cpp b/src/plugins/alsa/qalsaaudiooutput.cpp index 7b7da50a4..f8f0f58e8 100644 --- a/src/plugins/alsa/qalsaaudiooutput.cpp +++ b/src/plugins/alsa/qalsaaudiooutput.cpp @@ -306,7 +306,7 @@ bool QAlsaAudioOutput::open() QString dev = QString(QLatin1String(m_device.constData())); QList<QByteArray> devices = QAlsaAudioDeviceInfo::availableDevices(QAudio::AudioOutput); if(dev.compare(QLatin1String("default")) == 0) { -#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14) +#if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14)) if (devices.size() > 0) dev = QLatin1String(devices.first()); else @@ -315,7 +315,7 @@ bool QAlsaAudioOutput::open() dev = QLatin1String("hw:0,0"); #endif } else { -#if(SND_LIB_MAJOR == 1 && SND_LIB_MINOR == 0 && SND_LIB_SUBMINOR >= 14) +#if (SND_LIB_MAJOR == 1 && (SND_LIB_MINOR > 0 || SND_LIB_SUBMINOR >= 14)) dev = QLatin1String(m_device); #else int idx = 0; diff --git a/src/plugins/android/jar/jar.pri b/src/plugins/android/jar/jar.pri index d31839c61..713123baf 100644 --- a/src/plugins/android/jar/jar.pri +++ b/src/plugins/android/jar/jar.pri @@ -10,7 +10,8 @@ JAVASOURCES += $$PWD/src/org/qtproject/qt5/android/multimedia/QtAndroidMediaPlay $$PWD/src/org/qtproject/qt5/android/multimedia/QtSurfaceTextureListener.java \ $$PWD/src/org/qtproject/qt5/android/multimedia/QtSurfaceTextureHolder.java \ $$PWD/src/org/qtproject/qt5/android/multimedia/QtMultimediaUtils.java \ - $$PWD/src/org/qtproject/qt5/android/multimedia/QtMediaRecorderListener.java + $$PWD/src/org/qtproject/qt5/android/multimedia/QtMediaRecorderListener.java \ + $$PWD/src/org/qtproject/qt5/android/multimedia/QtSurfaceHolderCallback.java # install target.path = $$[QT_INSTALL_PREFIX]/jar diff --git a/src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtCameraListener.java b/src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtCameraListener.java index 974489c19..8724eeba4 100644 --- a/src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtCameraListener.java +++ b/src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtCameraListener.java @@ -54,6 +54,8 @@ public class QtCameraListener implements Camera.ShutterCallback, private byte[][] m_previewBuffers = null; private byte[] m_lastPreviewBuffer = null; private Camera.Size m_previewSize = null; + private int m_previewFormat = ImageFormat.NV21; // Default preview format on all devices + private int m_previewBytesPerLine = -1; private QtCameraListener(int id) { @@ -86,6 +88,16 @@ public class QtCameraListener implements Camera.ShutterCallback, return m_previewSize.height; } + public int previewFormat() + { + return m_previewFormat; + } + + public int previewBytesPerLine() + { + return m_previewBytesPerLine; + } + public void setupPreviewCallback(Camera camera) { // Clear previous callback (also clears added buffers) @@ -94,8 +106,37 @@ public class QtCameraListener implements Camera.ShutterCallback, final Camera.Parameters params = camera.getParameters(); m_previewSize = params.getPreviewSize(); - double bytesPerPixel = ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8.0; - int bufferSizeNeeded = (int) Math.ceil(bytesPerPixel * m_previewSize.width * m_previewSize.height); + m_previewFormat = params.getPreviewFormat(); + + int bufferSizeNeeded = 0; + if (m_previewFormat == ImageFormat.YV12) { + // For YV12, bytes per line must be a multiple of 16 + final int yStride = (int) Math.ceil(m_previewSize.width / 16.0) * 16; + final int uvStride = (int) Math.ceil((yStride / 2) / 16.0) * 16; + final int ySize = yStride * m_previewSize.height; + final int uvSize = uvStride * m_previewSize.height / 2; + bufferSizeNeeded = ySize + uvSize * 2; + + m_previewBytesPerLine = yStride; + + } else { + double bytesPerPixel = ImageFormat.getBitsPerPixel(m_previewFormat) / 8.0; + bufferSizeNeeded = (int) Math.ceil(bytesPerPixel * m_previewSize.width * m_previewSize.height); + + // bytes per line are calculated only for the first plane + switch (m_previewFormat) { + case ImageFormat.NV21: + m_previewBytesPerLine = m_previewSize.width; // 1 byte per sample and tightly packed + break; + case ImageFormat.RGB_565: + case ImageFormat.YUY2: + m_previewBytesPerLine = m_previewSize.width * 2; // 2 bytes per pixel + break; + default: + m_previewBytesPerLine = -1; + break; + } + } // We could keep the same buffers when they are already bigger than the required size // but the Android doc says the size must match, so in doubt just replace them. @@ -117,8 +158,12 @@ public class QtCameraListener implements Camera.ShutterCallback, m_lastPreviewBuffer = data; - if (data != null && m_notifyNewFrames) - notifyNewPreviewFrame(m_cameraId, data, m_previewSize.width, m_previewSize.height); + if (data != null && m_notifyNewFrames) { + notifyNewPreviewFrame(m_cameraId, data, + m_previewSize.width, m_previewSize.height, + m_previewFormat, + m_previewBytesPerLine); + } } @Override @@ -142,5 +187,6 @@ public class QtCameraListener implements Camera.ShutterCallback, private static native void notifyAutoFocusComplete(int id, boolean success); private static native void notifyPictureExposed(int id); private static native void notifyPictureCaptured(int id, byte[] data); - private static native void notifyNewPreviewFrame(int id, byte[] data, int width, int height); + private static native void notifyNewPreviewFrame(int id, byte[] data, int width, int height, + int pixelFormat, int bytesPerLine); } diff --git a/src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtSurfaceHolderCallback.java b/src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtSurfaceHolderCallback.java new file mode 100644 index 000000000..266d8a150 --- /dev/null +++ b/src/plugins/android/jar/src/org/qtproject/qt5/android/multimedia/QtSurfaceHolderCallback.java @@ -0,0 +1,67 @@ +/**************************************************************************** +** +** Copyright (C) 2015 The Qt Company Ltd. +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the QtMultimedia of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +package org.qtproject.qt5.android.multimedia; + +import android.view.SurfaceHolder; + +public class QtSurfaceHolderCallback implements SurfaceHolder.Callback +{ + private long m_id = -1; + + public QtSurfaceHolderCallback(long id) + { + m_id = id; + } + + @Override + public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) + { + } + + @Override + public void surfaceCreated(SurfaceHolder holder) + { + notifySurfaceCreated(m_id); + } + + @Override + public void surfaceDestroyed(SurfaceHolder holder) + { + notifySurfaceDestroyed(m_id); + } + + + private static native void notifySurfaceCreated(long id); + private static native void notifySurfaceDestroyed(long id); +} diff --git a/src/plugins/android/src/common/common.pri b/src/plugins/android/src/common/common.pri index f99dad507..9c741bd94 100644 --- a/src/plugins/android/src/common/common.pri +++ b/src/plugins/android/src/common/common.pri @@ -2,9 +2,8 @@ INCLUDEPATH += $$PWD HEADERS += \ $$PWD/qandroidvideooutput.h \ - $$PWD/qandroidvideorendercontrol.h \ $$PWD/qandroidmultimediautils.h SOURCES += \ - $$PWD/qandroidvideorendercontrol.cpp \ + $$PWD/qandroidvideooutput.cpp \ $$PWD/qandroidmultimediautils.cpp diff --git a/src/plugins/android/src/common/qandroidmultimediautils.cpp b/src/plugins/android/src/common/qandroidmultimediautils.cpp index 9255db549..6b6ca3255 100644 --- a/src/plugins/android/src/common/qandroidmultimediautils.cpp +++ b/src/plugins/android/src/common/qandroidmultimediautils.cpp @@ -68,5 +68,40 @@ bool qt_sizeLessThan(const QSize &s1, const QSize &s2) return s1.width() * s1.height() < s2.width() * s2.height(); } +QVideoFrame::PixelFormat qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat f) +{ + switch (f) { + case AndroidCamera::NV21: + return QVideoFrame::Format_NV21; + case AndroidCamera::YV12: + return QVideoFrame::Format_YV12; + case AndroidCamera::RGB565: + return QVideoFrame::Format_RGB565; + case AndroidCamera::YUY2: + return QVideoFrame::Format_YUYV; + case AndroidCamera::JPEG: + return QVideoFrame::Format_Jpeg; + default: + return QVideoFrame::Format_Invalid; + } +} + +AndroidCamera::ImageFormat qt_androidImageFormatFromPixelFormat(QVideoFrame::PixelFormat f) +{ + switch (f) { + case QVideoFrame::Format_NV21: + return AndroidCamera::NV21; + case QVideoFrame::Format_YV12: + return AndroidCamera::YV12; + case QVideoFrame::Format_RGB565: + return AndroidCamera::RGB565; + case QVideoFrame::Format_YUYV: + return AndroidCamera::YUY2; + case QVideoFrame::Format_Jpeg: + return AndroidCamera::JPEG; + default: + return AndroidCamera::UnknownImageFormat; + } +} QT_END_NAMESPACE diff --git a/src/plugins/android/src/common/qandroidmultimediautils.h b/src/plugins/android/src/common/qandroidmultimediautils.h index 6955c49e9..622f343f5 100644 --- a/src/plugins/android/src/common/qandroidmultimediautils.h +++ b/src/plugins/android/src/common/qandroidmultimediautils.h @@ -36,6 +36,7 @@ #include <qglobal.h> #include <qsize.h> +#include "androidcamera.h" QT_BEGIN_NAMESPACE @@ -45,6 +46,8 @@ int qt_findClosestValue(const QList<int> &list, int value); bool qt_sizeLessThan(const QSize &s1, const QSize &s2); +QVideoFrame::PixelFormat qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat f); +AndroidCamera::ImageFormat qt_androidImageFormatFromPixelFormat(QVideoFrame::PixelFormat f); QT_END_NAMESPACE diff --git a/src/plugins/android/src/common/qandroidvideorendercontrol.cpp b/src/plugins/android/src/common/qandroidvideooutput.cpp index cd9c9d1d6..82c27035d 100644 --- a/src/plugins/android/src/common/qandroidvideorendercontrol.cpp +++ b/src/plugins/android/src/common/qandroidvideooutput.cpp @@ -31,9 +31,9 @@ ** ****************************************************************************/ -#include "qandroidvideorendercontrol.h" -#include "androidsurfacetexture.h" +#include "qandroidvideooutput.h" +#include "androidsurfacetexture.h" #include <QAbstractVideoSurface> #include <QVideoSurfaceFormat> #include <qevent.h> @@ -59,19 +59,13 @@ static const GLfloat g_texture_data[] = { 0.f, 1.f }; -OpenGLResourcesDeleter::~OpenGLResourcesDeleter() -{ - glDeleteTextures(1, &m_textureID); - delete m_fbo; - delete m_program; -} class AndroidTextureVideoBuffer : public QAbstractVideoBuffer { public: - AndroidTextureVideoBuffer(QAndroidVideoRendererControl *control) + AndroidTextureVideoBuffer(QAndroidTextureVideoOutput *output) : QAbstractVideoBuffer(GLTextureHandle) - , m_control(control) + , m_output(output) , m_textureUpdated(false) , m_mapMode(NotMapped) { @@ -86,7 +80,7 @@ public: if (m_mapMode == NotMapped && mode == ReadOnly) { updateFrame(); m_mapMode = mode; - m_image = m_control->m_fbo->toImage(); + m_image = m_output->m_fbo->toImage(); if (numBytes) *numBytes = m_image.byteCount(); @@ -110,7 +104,7 @@ public: { AndroidTextureVideoBuffer *that = const_cast<AndroidTextureVideoBuffer*>(this); that->updateFrame(); - return m_control->m_fbo->texture(); + return m_output->m_fbo->texture(); } private: @@ -118,19 +112,47 @@ private: { if (!m_textureUpdated) { // update the video texture (called from the render thread) - m_control->renderFrameToFbo(); + m_output->renderFrameToFbo(); m_textureUpdated = true; } } - QAndroidVideoRendererControl *m_control; + QAndroidTextureVideoOutput *m_output; bool m_textureUpdated; MapMode m_mapMode; QImage m_image; }; -QAndroidVideoRendererControl::QAndroidVideoRendererControl(QObject *parent) - : QVideoRendererControl(parent) + +class OpenGLResourcesDeleter : public QObject +{ +public: + OpenGLResourcesDeleter() + : m_textureID(0) + , m_fbo(0) + , m_program(0) + { } + + ~OpenGLResourcesDeleter() + { + glDeleteTextures(1, &m_textureID); + delete m_fbo; + delete m_program; + } + + void setTexture(quint32 id) { m_textureID = id; } + void setFbo(QOpenGLFramebufferObject *fbo) { m_fbo = fbo; } + void setShaderProgram(QOpenGLShaderProgram *prog) { m_program = prog; } + +private: + quint32 m_textureID; + QOpenGLFramebufferObject *m_fbo; + QOpenGLShaderProgram *m_program; +}; + + +QAndroidTextureVideoOutput::QAndroidTextureVideoOutput(QObject *parent) + : QAndroidVideoOutput(parent) , m_surface(0) , m_surfaceTexture(0) , m_externalTex(0) @@ -138,9 +160,10 @@ QAndroidVideoRendererControl::QAndroidVideoRendererControl(QObject *parent) , m_program(0) , m_glDeleter(0) { + } -QAndroidVideoRendererControl::~QAndroidVideoRendererControl() +QAndroidTextureVideoOutput::~QAndroidTextureVideoOutput() { clearSurfaceTexture(); @@ -148,12 +171,12 @@ QAndroidVideoRendererControl::~QAndroidVideoRendererControl() m_glDeleter->deleteLater(); } -QAbstractVideoSurface *QAndroidVideoRendererControl::surface() const +QAbstractVideoSurface *QAndroidTextureVideoOutput::surface() const { return m_surface; } -void QAndroidVideoRendererControl::setSurface(QAbstractVideoSurface *surface) +void QAndroidTextureVideoOutput::setSurface(QAbstractVideoSurface *surface) { if (surface == m_surface) return; @@ -172,12 +195,12 @@ void QAndroidVideoRendererControl::setSurface(QAbstractVideoSurface *surface) } } -bool QAndroidVideoRendererControl::isReady() +bool QAndroidTextureVideoOutput::isReady() { return QOpenGLContext::currentContext() || m_externalTex; } -bool QAndroidVideoRendererControl::initSurfaceTexture() +bool QAndroidTextureVideoOutput::initSurfaceTexture() { if (m_surfaceTexture) return true; @@ -210,7 +233,7 @@ bool QAndroidVideoRendererControl::initSurfaceTexture() return m_surfaceTexture != 0; } -void QAndroidVideoRendererControl::clearSurfaceTexture() +void QAndroidTextureVideoOutput::clearSurfaceTexture() { if (m_surfaceTexture) { delete m_surfaceTexture; @@ -218,7 +241,7 @@ void QAndroidVideoRendererControl::clearSurfaceTexture() } } -AndroidSurfaceTexture *QAndroidVideoRendererControl::surfaceTexture() +AndroidSurfaceTexture *QAndroidTextureVideoOutput::surfaceTexture() { if (!initSurfaceTexture()) return 0; @@ -226,7 +249,7 @@ AndroidSurfaceTexture *QAndroidVideoRendererControl::surfaceTexture() return m_surfaceTexture; } -void QAndroidVideoRendererControl::setVideoSize(const QSize &size) +void QAndroidTextureVideoOutput::setVideoSize(const QSize &size) { QMutexLocker locker(&m_mutex); @@ -238,19 +261,19 @@ void QAndroidVideoRendererControl::setVideoSize(const QSize &size) m_nativeSize = size; } -void QAndroidVideoRendererControl::stop() +void QAndroidTextureVideoOutput::stop() { if (m_surface && m_surface->isActive()) m_surface->stop(); m_nativeSize = QSize(); } -void QAndroidVideoRendererControl::reset() +void QAndroidTextureVideoOutput::reset() { clearSurfaceTexture(); } -void QAndroidVideoRendererControl::onFrameAvailable() +void QAndroidTextureVideoOutput::onFrameAvailable() { if (!m_nativeSize.isValid() || !m_surface) return; @@ -274,7 +297,7 @@ void QAndroidVideoRendererControl::onFrameAvailable() m_surface->present(frame); } -void QAndroidVideoRendererControl::renderFrameToFbo() +void QAndroidTextureVideoOutput::renderFrameToFbo() { QMutexLocker locker(&m_mutex); @@ -333,7 +356,7 @@ void QAndroidVideoRendererControl::renderFrameToFbo() glEnable(GL_BLEND); } -void QAndroidVideoRendererControl::createGLResources() +void QAndroidTextureVideoOutput::createGLResources() { if (!m_fbo || m_fbo->size() != m_nativeSize) { delete m_fbo; @@ -374,7 +397,7 @@ void QAndroidVideoRendererControl::createGLResources() } } -void QAndroidVideoRendererControl::customEvent(QEvent *e) +void QAndroidTextureVideoOutput::customEvent(QEvent *e) { if (e->type() == QEvent::User) { // This is running in the render thread (OpenGL enabled) diff --git a/src/plugins/android/src/common/qandroidvideooutput.h b/src/plugins/android/src/common/qandroidvideooutput.h index d45779d12..f4401fa1d 100644 --- a/src/plugins/android/src/common/qandroidvideooutput.h +++ b/src/plugins/android/src/common/qandroidvideooutput.h @@ -34,19 +34,27 @@ #ifndef QANDROIDVIDEOOUTPUT_H #define QANDROIDVIDEOOUTPUT_H -#include <qglobal.h> +#include <qobject.h> #include <qsize.h> +#include <qmutex.h> QT_BEGIN_NAMESPACE class AndroidSurfaceTexture; +class AndroidSurfaceHolder; +class QOpenGLFramebufferObject; +class QOpenGLShaderProgram; +class OpenGLResourcesDeleter; +class QAbstractVideoSurface; -class QAndroidVideoOutput +class QAndroidVideoOutput : public QObject { + Q_OBJECT public: virtual ~QAndroidVideoOutput() { } virtual AndroidSurfaceTexture *surfaceTexture() { return 0; } + virtual AndroidSurfaceHolder *surfaceHolder() { return 0; } virtual bool isReady() { return true; } @@ -54,12 +62,56 @@ public: virtual void stop() { } virtual void reset() { } - // signals: - // void readyChanged(bool); +Q_SIGNALS: + void readyChanged(bool); + +protected: + QAndroidVideoOutput(QObject *parent) : QObject(parent) { } }; -#define QAndroidVideoOutput_iid "org.qt-project.qt.qandroidvideooutput/5.0" -Q_DECLARE_INTERFACE(QAndroidVideoOutput, QAndroidVideoOutput_iid) + +class QAndroidTextureVideoOutput : public QAndroidVideoOutput +{ + Q_OBJECT +public: + explicit QAndroidTextureVideoOutput(QObject *parent = 0); + ~QAndroidTextureVideoOutput() Q_DECL_OVERRIDE; + + QAbstractVideoSurface *surface() const; + void setSurface(QAbstractVideoSurface *surface); + + AndroidSurfaceTexture *surfaceTexture() Q_DECL_OVERRIDE; + + bool isReady() Q_DECL_OVERRIDE; + void setVideoSize(const QSize &) Q_DECL_OVERRIDE; + void stop() Q_DECL_OVERRIDE; + void reset() Q_DECL_OVERRIDE; + + void customEvent(QEvent *) Q_DECL_OVERRIDE; + +private Q_SLOTS: + void onFrameAvailable(); + +private: + bool initSurfaceTexture(); + void renderFrameToFbo(); + void createGLResources(); + + QMutex m_mutex; + void clearSurfaceTexture(); + + QAbstractVideoSurface *m_surface; + QSize m_nativeSize; + + AndroidSurfaceTexture *m_surfaceTexture; + + quint32 m_externalTex; + QOpenGLFramebufferObject *m_fbo; + QOpenGLShaderProgram *m_program; + OpenGLResourcesDeleter *m_glDeleter; + + friend class AndroidTextureVideoBuffer; +}; QT_END_NAMESPACE diff --git a/src/plugins/android/src/mediacapture/mediacapture.pri b/src/plugins/android/src/mediacapture/mediacapture.pri index fde0e3d6f..2811f0371 100644 --- a/src/plugins/android/src/mediacapture/mediacapture.pri +++ b/src/plugins/android/src/mediacapture/mediacapture.pri @@ -22,7 +22,8 @@ SOURCES += \ $$PWD/qandroidvideoencodersettingscontrol.cpp \ $$PWD/qandroidaudioinputselectorcontrol.cpp \ $$PWD/qandroidmediavideoprobecontrol.cpp \ - $$PWD/qandroidcamerainfocontrol.cpp + $$PWD/qandroidcamerainfocontrol.cpp \ + $$PWD/qandroidcameravideorenderercontrol.cpp HEADERS += \ $$PWD/qandroidcaptureservice.h \ @@ -46,4 +47,5 @@ HEADERS += \ $$PWD/qandroidvideoencodersettingscontrol.h \ $$PWD/qandroidaudioinputselectorcontrol.h \ $$PWD/qandroidmediavideoprobecontrol.h \ - $$PWD/qandroidcamerainfocontrol.h + $$PWD/qandroidcamerainfocontrol.h \ + $$PWD/qandroidcameravideorenderercontrol.h diff --git a/src/plugins/android/src/mediacapture/qandroidcamerasession.cpp b/src/plugins/android/src/mediacapture/qandroidcamerasession.cpp index 3623ce05a..eec31e65e 100644 --- a/src/plugins/android/src/mediacapture/qandroidcamerasession.cpp +++ b/src/plugins/android/src/mediacapture/qandroidcamerasession.cpp @@ -48,42 +48,6 @@ QT_BEGIN_NAMESPACE -class DataVideoBuffer : public QAbstractVideoBuffer -{ -public: - DataVideoBuffer(const QByteArray &d, int bpl = -1) - : QAbstractVideoBuffer(NoHandle) - , data(d) - , mode(NotMapped) - , bytesPerLine(bpl) - { } - - MapMode mapMode() const { return mode; } - - uchar *map(MapMode m, int *numBytes, int *bpl) - { - if (mode != NotMapped || m == NotMapped) - return 0; - - mode = m; - - if (numBytes) - *numBytes = data.size(); - - if (bpl) - *bpl = bytesPerLine; - - return reinterpret_cast<uchar *>(data.data()); - } - - void unmap() { mode = NotMapped; } - -private: - QByteArray data; - MapMode mode; - int bytesPerLine; -}; - Q_GLOBAL_STATIC(QList<AndroidCameraInfo>, g_availableCameras) QAndroidCameraSession::QAndroidCameraSession(QObject *parent) @@ -104,6 +68,7 @@ QAndroidCameraSession::QAndroidCameraSession(QObject *parent) , m_readyForCapture(false) , m_captureCanceled(false) , m_currentImageCaptureId(-1) + , m_previewCallback(0) { m_mediaStorageLocation.addStorageLocation( QMediaStorageLocation::Pictures, @@ -208,14 +173,17 @@ bool QAndroidCameraSession::open() if (m_camera) { connect(m_camera, SIGNAL(pictureExposed()), this, SLOT(onCameraPictureExposed())); - connect(m_camera, SIGNAL(lastPreviewFrameFetched(QByteArray,int,int)), - this, SLOT(onLastPreviewFrameFetched(QByteArray,int,int))); - connect(m_camera, SIGNAL(newPreviewFrame(QByteArray,int,int)), - this, SLOT(onNewPreviewFrame(QByteArray,int,int)), + connect(m_camera, SIGNAL(lastPreviewFrameFetched(QVideoFrame)), + this, SLOT(onLastPreviewFrameFetched(QVideoFrame)), + Qt::DirectConnection); + connect(m_camera, SIGNAL(newPreviewFrame(QVideoFrame)), + this, SLOT(onNewPreviewFrame(QVideoFrame)), Qt::DirectConnection); connect(m_camera, SIGNAL(pictureCaptured(QByteArray)), this, SLOT(onCameraPictureCaptured(QByteArray))); connect(m_camera, SIGNAL(previewStarted()), this, SLOT(onCameraPreviewStarted())); connect(m_camera, SIGNAL(previewStopped()), this, SLOT(onCameraPreviewStopped())); + connect(m_camera, &AndroidCamera::previewFailedToStart, this, &QAndroidCameraSession::onCameraPreviewFailedToStart); + connect(m_camera, &AndroidCamera::takePictureFailed, this, &QAndroidCameraSession::onCameraTakePictureFailed); m_nativeOrientation = m_camera->getNativeOrientation(); @@ -224,7 +192,7 @@ bool QAndroidCameraSession::open() if (m_camera->getPreviewFormat() != AndroidCamera::NV21) m_camera->setPreviewFormat(AndroidCamera::NV21); - m_camera->notifyNewFrames(m_videoProbes.count()); + m_camera->notifyNewFrames(m_videoProbes.count() || m_previewCallback); emit opened(); } else { @@ -259,16 +227,19 @@ void QAndroidCameraSession::close() emit statusChanged(m_status); } -void QAndroidCameraSession::setVideoPreview(QObject *videoOutput) +void QAndroidCameraSession::setVideoOutput(QAndroidVideoOutput *output) { if (m_videoOutput) { m_videoOutput->stop(); m_videoOutput->reset(); } - if (videoOutput) { - connect(videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool))); - m_videoOutput = qobject_cast<QAndroidVideoOutput *>(videoOutput); + if (output) { + m_videoOutput = output; + if (m_videoOutput->isReady()) + onVideoOutputReady(true); + else + connect(m_videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool))); } else { m_videoOutput = 0; } @@ -336,7 +307,10 @@ bool QAndroidCameraSession::startPreview() if (!m_videoOutput->isReady()) return true; // delay starting until the video output is ready - if (!m_camera->setPreviewTexture(m_videoOutput->surfaceTexture())) + Q_ASSERT(m_videoOutput->surfaceTexture() || m_videoOutput->surfaceHolder()); + + if ((m_videoOutput->surfaceTexture() && !m_camera->setPreviewTexture(m_videoOutput->surfaceTexture())) + || (m_videoOutput->surfaceHolder() && !m_camera->setPreviewDisplay(m_videoOutput->surfaceHolder()))) return false; m_status = QCamera::StartingStatus; @@ -366,6 +340,7 @@ void QAndroidCameraSession::stopPreview() m_camera->stopPreview(); m_camera->setPreviewSize(QSize()); m_camera->setPreviewTexture(0); + m_camera->setPreviewDisplay(0); if (m_videoOutput) { m_videoOutput->stop(); @@ -413,7 +388,7 @@ void QAndroidCameraSession::addProbe(QAndroidMediaVideoProbeControl *probe) if (probe) m_videoProbes << probe; if (m_camera) - m_camera->notifyNewFrames(m_videoProbes.count()); + m_camera->notifyNewFrames(m_videoProbes.count() || m_previewCallback); m_videoProbesMutex.unlock(); } @@ -422,7 +397,24 @@ void QAndroidCameraSession::removeProbe(QAndroidMediaVideoProbeControl *probe) m_videoProbesMutex.lock(); m_videoProbes.remove(probe); if (m_camera) - m_camera->notifyNewFrames(m_videoProbes.count()); + m_camera->notifyNewFrames(m_videoProbes.count() || m_previewCallback); + m_videoProbesMutex.unlock(); +} + +void QAndroidCameraSession::setPreviewFormat(AndroidCamera::ImageFormat format) +{ + if (format == AndroidCamera::UnknownImageFormat) + return; + + m_camera->setPreviewFormat(format); +} + +void QAndroidCameraSession::setPreviewCallback(PreviewCallback *callback) +{ + m_videoProbesMutex.lock(); + m_previewCallback = callback; + if (m_camera) + m_camera->notifyNewFrames(m_videoProbes.count() || m_previewCallback); m_videoProbesMutex.unlock(); } @@ -556,6 +548,12 @@ void QAndroidCameraSession::cancelCapture() m_captureCanceled = true; } +void QAndroidCameraSession::onCameraTakePictureFailed() +{ + emit imageCaptureError(m_currentImageCaptureId, QCameraImageCapture::ResourceError, + tr("Failed to capture image")); +} + void QAndroidCameraSession::onCameraPictureExposed() { if (m_captureCanceled) @@ -565,57 +563,37 @@ void QAndroidCameraSession::onCameraPictureExposed() m_camera->fetchLastPreviewFrame(); } -void QAndroidCameraSession::onLastPreviewFrameFetched(const QByteArray &preview, int width, int height) +void QAndroidCameraSession::onLastPreviewFrameFetched(const QVideoFrame &frame) { - if (preview.size()) { - QtConcurrent::run(this, &QAndroidCameraSession::processPreviewImage, - m_currentImageCaptureId, - preview, - width, - height, - m_camera->getRotation()); - } + QtConcurrent::run(this, &QAndroidCameraSession::processPreviewImage, + m_currentImageCaptureId, + frame, + m_camera->getRotation()); } -void QAndroidCameraSession::processPreviewImage(int id, const QByteArray &data, int width, int height, int rotation) +void QAndroidCameraSession::processPreviewImage(int id, const QVideoFrame &frame, int rotation) { - emit imageCaptured(id, prepareImageFromPreviewData(data, width, height, rotation)); -} - -QImage QAndroidCameraSession::prepareImageFromPreviewData(const QByteArray &data, int width, int height, int rotation) -{ - QVideoFrame frame(new QMemoryVideoBuffer(data, width), - QSize(width, height), QVideoFrame::Format_NV21); - - QImage result = qt_imageFromVideoFrame(frame); - - QTransform transform; - // Preview display of front-facing cameras is flipped horizontally, but the frame data // we get here is not. Flip it ourselves if the camera is front-facing to match what the user // sees on the viewfinder. + QTransform transform; if (m_camera->getFacing() == AndroidCamera::CameraFacingFront) transform.scale(-1, 1); - transform.rotate(rotation); - result = result.transformed(transform); - - return result; + emit imageCaptured(id, qt_imageFromVideoFrame(frame).transformed(transform)); } -void QAndroidCameraSession::onNewPreviewFrame(const QByteArray &frame, int width, int height) +void QAndroidCameraSession::onNewPreviewFrame(const QVideoFrame &frame) { m_videoProbesMutex.lock(); - if (frame.size() && m_videoProbes.count()) { - // Bytes per line should be only for the first plane. For NV21, the Y plane has 8 bits - // per sample, so bpl == width - QVideoFrame videoFrame(new DataVideoBuffer(frame, width), - QSize(width, height), - QVideoFrame::Format_NV21); - for (QAndroidMediaVideoProbeControl *probe : qAsConst(m_videoProbes)) - probe->newFrameProbed(videoFrame); - } + + for (QAndroidMediaVideoProbeControl *probe : qAsConst(m_videoProbes)) + probe->newFrameProbed(frame); + + if (m_previewCallback) + m_previewCallback->onFrameAvailable(frame); + m_videoProbesMutex.unlock(); } @@ -647,6 +625,27 @@ void QAndroidCameraSession::onCameraPreviewStarted() setReadyForCapture(true); } +void QAndroidCameraSession::onCameraPreviewFailedToStart() +{ + if (m_status == QCamera::StartingStatus) { + Q_EMIT error(QCamera::CameraError, tr("Camera preview failed to start.")); + + AndroidMultimediaUtils::enableOrientationListener(false); + m_camera->setPreviewSize(QSize()); + m_camera->setPreviewTexture(0); + if (m_videoOutput) { + m_videoOutput->stop(); + m_videoOutput->reset(); + } + m_previewStarted = false; + + m_status = QCamera::LoadedStatus; + emit statusChanged(m_status); + + setReadyForCapture(false); + } +} + void QAndroidCameraSession::onCameraPreviewStopped() { if (m_status == QCamera::StoppingStatus) { @@ -692,7 +691,7 @@ void QAndroidCameraSession::processCapturedImage(int id, } if (dest & QCameraImageCapture::CaptureToBuffer) { - QVideoFrame frame(new DataVideoBuffer(data), resolution, QVideoFrame::Format_Jpeg); + QVideoFrame frame(new QMemoryVideoBuffer(data, -1), resolution, QVideoFrame::Format_Jpeg); emit imageAvailable(id, frame); } } diff --git a/src/plugins/android/src/mediacapture/qandroidcamerasession.h b/src/plugins/android/src/mediacapture/qandroidcamerasession.h index a56721bcd..d15509fe8 100644 --- a/src/plugins/android/src/mediacapture/qandroidcamerasession.h +++ b/src/plugins/android/src/mediacapture/qandroidcamerasession.h @@ -68,7 +68,8 @@ public: void setCaptureMode(QCamera::CaptureModes mode); bool isCaptureModeSupported(QCamera::CaptureModes mode) const; - void setVideoPreview(QObject *videoOutput); + QAndroidVideoOutput *videoOutput() const { return m_videoOutput; } + void setVideoOutput(QAndroidVideoOutput *output); void adjustViewfinderSize(const QSize &captureSize, bool restartPreview = true); QImageEncoderSettings imageSettings() const { return m_imageSettings; } @@ -90,6 +91,14 @@ public: void addProbe(QAndroidMediaVideoProbeControl *probe); void removeProbe(QAndroidMediaVideoProbeControl *probe); + void setPreviewFormat(AndroidCamera::ImageFormat format); + + struct PreviewCallback + { + virtual void onFrameAvailable(const QVideoFrame &frame) = 0; + }; + void setPreviewCallback(PreviewCallback *callback); + Q_SIGNALS: void statusChanged(QCamera::Status status); void stateChanged(QCamera::State); @@ -112,11 +121,13 @@ private Q_SLOTS: void onApplicationStateChanged(Qt::ApplicationState state); + void onCameraTakePictureFailed(); void onCameraPictureExposed(); void onCameraPictureCaptured(const QByteArray &data); - void onLastPreviewFrameFetched(const QByteArray &preview, int width, int height); - void onNewPreviewFrame(const QByteArray &frame, int width, int height); + void onLastPreviewFrameFetched(const QVideoFrame &frame); + void onNewPreviewFrame(const QVideoFrame &frame); void onCameraPreviewStarted(); + void onCameraPreviewFailedToStart(); void onCameraPreviewStopped(); private: @@ -129,8 +140,8 @@ private: void stopPreview(); void applyImageSettings(); - void processPreviewImage(int id, const QByteArray &data, int width, int height, int rotation); - QImage prepareImageFromPreviewData(const QByteArray &data, int width, int height, int rotation); + + void processPreviewImage(int id, const QVideoFrame &frame, int rotation); void processCapturedImage(int id, const QByteArray &data, const QSize &resolution, @@ -162,6 +173,7 @@ private: QSet<QAndroidMediaVideoProbeControl *> m_videoProbes; QMutex m_videoProbesMutex; + PreviewCallback *m_previewCallback; }; QT_END_NAMESPACE diff --git a/src/plugins/android/src/mediacapture/qandroidcameravideorenderercontrol.cpp b/src/plugins/android/src/mediacapture/qandroidcameravideorenderercontrol.cpp new file mode 100644 index 000000000..1d5b521b8 --- /dev/null +++ b/src/plugins/android/src/mediacapture/qandroidcameravideorenderercontrol.cpp @@ -0,0 +1,275 @@ +/**************************************************************************** +** +** Copyright (C) 2015 The Qt Company Ltd. +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#include "qandroidcameravideorenderercontrol.h" + +#include "qandroidcamerasession.h" +#include "qandroidvideooutput.h" +#include "androidsurfaceview.h" +#include "qandroidmultimediautils.h" +#include <qabstractvideosurface.h> +#include <qvideosurfaceformat.h> +#include <qcoreapplication.h> +#include <qthread.h> + +QT_BEGIN_NAMESPACE + +class QAndroidCameraDataVideoOutput : public QAndroidVideoOutput + , public QAndroidCameraSession::PreviewCallback +{ + Q_OBJECT +public: + explicit QAndroidCameraDataVideoOutput(QAndroidCameraVideoRendererControl *control); + ~QAndroidCameraDataVideoOutput() Q_DECL_OVERRIDE; + + AndroidSurfaceHolder *surfaceHolder() Q_DECL_OVERRIDE; + + bool isReady() Q_DECL_OVERRIDE; + + void stop() Q_DECL_OVERRIDE; + +private Q_SLOTS: + void onSurfaceCreated(); + void configureFormat(); + +private: + void onFrameAvailable(const QVideoFrame &frame); + void presentFrame(); + bool event(QEvent *); + + QAndroidCameraVideoRendererControl *m_control; + AndroidSurfaceView *m_surfaceView; + QMutex m_mutex; + QVideoFrame::PixelFormat m_pixelFormat; + QVideoFrame m_lastFrame; +}; + +QAndroidCameraDataVideoOutput::QAndroidCameraDataVideoOutput(QAndroidCameraVideoRendererControl *control) + : QAndroidVideoOutput(control) + , m_control(control) + , m_pixelFormat(QVideoFrame::Format_Invalid) +{ + // The camera preview cannot be started unless we set a SurfaceTexture or a + // SurfaceHolder. In this case we don't actually care about either of these, but since + // we need to, we setup an offscreen dummy SurfaceView in order to be able to start + // the camera preview. We'll then be able to use setPreviewCallbackWithBuffer() to + // get the raw data. + + m_surfaceView = new AndroidSurfaceView; + + connect(m_surfaceView, &AndroidSurfaceView::surfaceCreated, + this, &QAndroidCameraDataVideoOutput::onSurfaceCreated); + + m_surfaceView->setGeometry(-1, -1, 1, 1); + m_surfaceView->setVisible(true); + + connect(m_control->cameraSession(), &QAndroidCameraSession::opened, + this, &QAndroidCameraDataVideoOutput::configureFormat); + connect(m_control->surface(), &QAbstractVideoSurface::supportedFormatsChanged, + this, &QAndroidCameraDataVideoOutput::configureFormat); + configureFormat(); +} + +QAndroidCameraDataVideoOutput::~QAndroidCameraDataVideoOutput() +{ + m_control->cameraSession()->setPreviewCallback(Q_NULLPTR); + delete m_surfaceView; +} + +AndroidSurfaceHolder *QAndroidCameraDataVideoOutput::surfaceHolder() +{ + return m_surfaceView->holder(); +} + +bool QAndroidCameraDataVideoOutput::isReady() +{ + return m_surfaceView->holder() && m_surfaceView->holder()->isSurfaceCreated(); +} + +void QAndroidCameraDataVideoOutput::onSurfaceCreated() +{ + emit readyChanged(true); +} + +void QAndroidCameraDataVideoOutput::configureFormat() +{ + m_pixelFormat = QVideoFrame::Format_Invalid; + + if (!m_control->cameraSession()->camera()) + return; + + QList<QVideoFrame::PixelFormat> surfaceFormats = m_control->surface()->supportedPixelFormats(); + QList<AndroidCamera::ImageFormat> previewFormats = m_control->cameraSession()->camera()->getSupportedPreviewFormats(); + for (int i = 0; i < surfaceFormats.size(); ++i) { + QVideoFrame::PixelFormat pixFormat = surfaceFormats.at(i); + AndroidCamera::ImageFormat f = qt_androidImageFormatFromPixelFormat(pixFormat); + if (previewFormats.contains(f)) { + m_pixelFormat = pixFormat; + break; + } + } + + if (m_pixelFormat == QVideoFrame::Format_Invalid) { + m_control->cameraSession()->setPreviewCallback(Q_NULLPTR); + qWarning("The video surface is not compatible with any format supported by the camera"); + } else { + m_control->cameraSession()->setPreviewCallback(this); + + if (m_control->cameraSession()->status() > QCamera::LoadedStatus) + m_control->cameraSession()->camera()->stopPreview(); + + m_control->cameraSession()->setPreviewFormat(qt_androidImageFormatFromPixelFormat(m_pixelFormat)); + + if (m_control->cameraSession()->status() > QCamera::LoadedStatus) + m_control->cameraSession()->camera()->startPreview(); + } +} + +void QAndroidCameraDataVideoOutput::stop() +{ + m_mutex.lock(); + m_lastFrame = QVideoFrame(); + m_mutex.unlock(); + + if (m_control->surface() && m_control->surface()->isActive()) + m_control->surface()->stop(); +} + +void QAndroidCameraDataVideoOutput::onFrameAvailable(const QVideoFrame &frame) +{ + m_mutex.lock(); + m_lastFrame = frame; + m_mutex.unlock(); + + if (thread() == QThread::currentThread()) + presentFrame(); + else + QCoreApplication::postEvent(this, new QEvent(QEvent::User), Qt::HighEventPriority); +} + +bool QAndroidCameraDataVideoOutput::event(QEvent *e) +{ + if (e->type() == QEvent::User) { + presentFrame(); + return true; + } + + return QObject::event(e); +} + +void QAndroidCameraDataVideoOutput::presentFrame() +{ + Q_ASSERT(thread() == QThread::currentThread()); + + QMutexLocker locker(&m_mutex); + + if (m_control->surface() && m_lastFrame.isValid() && m_lastFrame.pixelFormat() == m_pixelFormat) { + + if (m_control->surface()->isActive() && (m_control->surface()->surfaceFormat().pixelFormat() != m_lastFrame.pixelFormat() + || m_control->surface()->surfaceFormat().frameSize() != m_lastFrame.size())) { + m_control->surface()->stop(); + } + + if (!m_control->surface()->isActive()) { + QVideoSurfaceFormat format(m_lastFrame.size(), m_lastFrame.pixelFormat(), m_lastFrame.handleType()); + // Front camera frames are automatically mirrored when using SurfaceTexture or SurfaceView, + // but the buffers we get from the data callback are not. Tell the QAbstractVideoSurface + // that it needs to mirror the frames. + if (m_control->cameraSession()->camera()->getFacing() == AndroidCamera::CameraFacingFront) + format.setProperty("mirrored", true); + + m_control->surface()->start(format); + } + + if (m_control->surface()->isActive()) + m_control->surface()->present(m_lastFrame); + } + + m_lastFrame = QVideoFrame(); +} + + +QAndroidCameraVideoRendererControl::QAndroidCameraVideoRendererControl(QAndroidCameraSession *session, QObject *parent) + : QVideoRendererControl(parent) + , m_cameraSession(session) + , m_surface(0) + , m_textureOutput(0) + , m_dataOutput(0) +{ +} + +QAndroidCameraVideoRendererControl::~QAndroidCameraVideoRendererControl() +{ + m_cameraSession->setVideoOutput(0); +} + +QAbstractVideoSurface *QAndroidCameraVideoRendererControl::surface() const +{ + return m_surface; +} + +void QAndroidCameraVideoRendererControl::setSurface(QAbstractVideoSurface *surface) +{ + if (m_surface == surface) + return; + + m_surface = surface; + QAndroidVideoOutput *oldOutput = m_textureOutput ? static_cast<QAndroidVideoOutput*>(m_textureOutput) + : static_cast<QAndroidVideoOutput*>(m_dataOutput); + QAndroidVideoOutput *newOutput = 0; + + if (m_surface) { + if (!m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).isEmpty()) { + if (!m_textureOutput) { + m_dataOutput = 0; + newOutput = m_textureOutput = new QAndroidTextureVideoOutput(this); + } + } else if (!m_dataOutput) { + m_textureOutput = 0; + newOutput = m_dataOutput = new QAndroidCameraDataVideoOutput(this); + } + + if (m_textureOutput) + m_textureOutput->setSurface(m_surface); + } + + if (newOutput != oldOutput) { + m_cameraSession->setVideoOutput(newOutput); + delete oldOutput; + } +} + +QT_END_NAMESPACE + +#include "qandroidcameravideorenderercontrol.moc" + diff --git a/src/plugins/android/src/common/qandroidvideorendercontrol.h b/src/plugins/android/src/mediacapture/qandroidcameravideorenderercontrol.h index c660758fb..4b6428ba0 100644 --- a/src/plugins/android/src/common/qandroidvideorendercontrol.h +++ b/src/plugins/android/src/mediacapture/qandroidcameravideorenderercontrol.h @@ -31,88 +31,36 @@ ** ****************************************************************************/ -#ifndef QANDROIDVIDEORENDERCONTROL_H -#define QANDROIDVIDEORENDERCONTROL_H +#ifndef QANDROIDCAMERAVIDEORENDERERCONTROL_H +#define QANDROIDCAMERAVIDEORENDERERCONTROL_H #include <qvideorenderercontrol.h> -#include <qmutex.h> -#include "qandroidvideooutput.h" QT_BEGIN_NAMESPACE -class QOpenGLTexture; -class QOpenGLFramebufferObject; -class QOpenGLShaderProgram; -class AndroidSurfaceTexture; +class QAndroidCameraSession; +class QAndroidTextureVideoOutput; +class QAndroidCameraDataVideoOutput; -class OpenGLResourcesDeleter : public QObject +class QAndroidCameraVideoRendererControl : public QVideoRendererControl { Q_OBJECT public: - OpenGLResourcesDeleter() - : m_textureID(0) - , m_fbo(0) - , m_program(0) - { } - - ~OpenGLResourcesDeleter(); - - void setTexture(quint32 id) { m_textureID = id; } - void setFbo(QOpenGLFramebufferObject *fbo) { m_fbo = fbo; } - void setShaderProgram(QOpenGLShaderProgram *prog) { m_program = prog; } - -private: - quint32 m_textureID; - QOpenGLFramebufferObject *m_fbo; - QOpenGLShaderProgram *m_program; -}; - -class QAndroidVideoRendererControl : public QVideoRendererControl, public QAndroidVideoOutput -{ - Q_OBJECT - Q_INTERFACES(QAndroidVideoOutput) -public: - explicit QAndroidVideoRendererControl(QObject *parent = 0); - ~QAndroidVideoRendererControl() Q_DECL_OVERRIDE; + QAndroidCameraVideoRendererControl(QAndroidCameraSession *session, QObject *parent = 0); + ~QAndroidCameraVideoRendererControl() Q_DECL_OVERRIDE; QAbstractVideoSurface *surface() const Q_DECL_OVERRIDE; void setSurface(QAbstractVideoSurface *surface) Q_DECL_OVERRIDE; - AndroidSurfaceTexture *surfaceTexture() Q_DECL_OVERRIDE; - bool isReady() Q_DECL_OVERRIDE; - void setVideoSize(const QSize &size) Q_DECL_OVERRIDE; - void stop() Q_DECL_OVERRIDE; - void reset() Q_DECL_OVERRIDE; - - void customEvent(QEvent *) Q_DECL_OVERRIDE; - -Q_SIGNALS: - void readyChanged(bool); - -private Q_SLOTS: - void onFrameAvailable(); + QAndroidCameraSession *cameraSession() const { return m_cameraSession; } private: - bool initSurfaceTexture(); - void renderFrameToFbo(); - void createGLResources(); - - QMutex m_mutex; - void clearSurfaceTexture(); - + QAndroidCameraSession *m_cameraSession; QAbstractVideoSurface *m_surface; - QSize m_nativeSize; - - AndroidSurfaceTexture *m_surfaceTexture; - - quint32 m_externalTex; - QOpenGLFramebufferObject *m_fbo; - QOpenGLShaderProgram *m_program; - OpenGLResourcesDeleter *m_glDeleter; - - friend class AndroidTextureVideoBuffer; + QAndroidTextureVideoOutput *m_textureOutput; + QAndroidCameraDataVideoOutput *m_dataOutput; }; QT_END_NAMESPACE -#endif // QANDROIDVIDEORENDERCONTROL_H +#endif // QANDROIDCAMERAVIDEORENDERERCONTROL_H diff --git a/src/plugins/android/src/mediacapture/qandroidcaptureservice.cpp b/src/plugins/android/src/mediacapture/qandroidcaptureservice.cpp index e9cdb1e78..d2107e8a5 100644 --- a/src/plugins/android/src/mediacapture/qandroidcaptureservice.cpp +++ b/src/plugins/android/src/mediacapture/qandroidcaptureservice.cpp @@ -40,7 +40,7 @@ #include "qandroidvideodeviceselectorcontrol.h" #include "qandroidaudioinputselectorcontrol.h" #include "qandroidcamerasession.h" -#include "qandroidvideorendercontrol.h" +#include "qandroidcameravideorenderercontrol.h" #include "qandroidcamerazoomcontrol.h" #include "qandroidcameraexposurecontrol.h" #include "qandroidcameraflashcontrol.h" @@ -196,8 +196,7 @@ QMediaControl *QAndroidCaptureService::requestControl(const char *name) if (qstrcmp(name, QVideoRendererControl_iid) == 0 && m_service == QLatin1String(Q_MEDIASERVICE_CAMERA) && !m_videoRendererControl) { - m_videoRendererControl = new QAndroidVideoRendererControl; - m_cameraSession->setVideoPreview(m_videoRendererControl); + m_videoRendererControl = new QAndroidCameraVideoRendererControl(m_cameraSession); return m_videoRendererControl; } @@ -217,7 +216,6 @@ void QAndroidCaptureService::releaseControl(QMediaControl *control) { if (control) { if (control == m_videoRendererControl) { - m_cameraSession->setVideoPreview(0); delete m_videoRendererControl; m_videoRendererControl = 0; return; diff --git a/src/plugins/android/src/mediacapture/qandroidcaptureservice.h b/src/plugins/android/src/mediacapture/qandroidcaptureservice.h index fc84ac124..02f063444 100644 --- a/src/plugins/android/src/mediacapture/qandroidcaptureservice.h +++ b/src/plugins/android/src/mediacapture/qandroidcaptureservice.h @@ -46,7 +46,7 @@ class QAndroidCameraInfoControl; class QAndroidVideoDeviceSelectorControl; class QAndroidAudioInputSelectorControl; class QAndroidCameraSession; -class QAndroidVideoRendererControl; +class QAndroidCameraVideoRendererControl; class QAndroidCameraZoomControl; class QAndroidCameraExposureControl; class QAndroidCameraFlashControl; @@ -82,7 +82,7 @@ private: QAndroidVideoDeviceSelectorControl *m_videoInputControl; QAndroidAudioInputSelectorControl *m_audioInputControl; QAndroidCameraSession *m_cameraSession; - QMediaControl *m_videoRendererControl; + QAndroidCameraVideoRendererControl *m_videoRendererControl; QAndroidCameraZoomControl *m_cameraZoomControl; QAndroidCameraExposureControl *m_cameraExposureControl; QAndroidCameraFlashControl *m_cameraFlashControl; diff --git a/src/plugins/android/src/mediacapture/qandroidcapturesession.cpp b/src/plugins/android/src/mediacapture/qandroidcapturesession.cpp index f2ea1b9d7..f02016654 100644 --- a/src/plugins/android/src/mediacapture/qandroidcapturesession.cpp +++ b/src/plugins/android/src/mediacapture/qandroidcapturesession.cpp @@ -37,6 +37,7 @@ #include "qandroidcamerasession.h" #include "androidmultimediautils.h" #include "qandroidmultimediautils.h" +#include "qandroidvideooutput.h" QT_BEGIN_NAMESPACE @@ -217,15 +218,31 @@ void QAndroidCaptureSession::start() m_usedOutputLocation = QUrl::fromLocalFile(filePath); m_mediaRecorder->setOutputFile(filePath); + // Even though the Android doc explicitly says that calling MediaRecorder.setPreviewDisplay() + // is not necessary when the Camera already has a Surface, it doesn't actually work on some + // devices. For example on the Samsung Galaxy Tab 2, the camera server dies after prepare() + // and start() if MediaRecorder.setPreviewDispaly() is not called. + if (m_cameraSession) { + // When using a SurfaceTexture, we need to pass a new one to the MediaRecorder, not the same + // one that is set on the Camera or it will crash, hence the reset(). + m_cameraSession->videoOutput()->reset(); + if (m_cameraSession->videoOutput()->surfaceTexture()) + m_mediaRecorder->setSurfaceTexture(m_cameraSession->videoOutput()->surfaceTexture()); + else if (m_cameraSession->videoOutput()->surfaceHolder()) + m_mediaRecorder->setSurfaceHolder(m_cameraSession->videoOutput()->surfaceHolder()); + } + if (!m_mediaRecorder->prepare()) { emit error(QMediaRecorder::FormatError, QLatin1String("Unable to prepare the media recorder.")); - restartViewfinder(); + if (m_cameraSession) + restartViewfinder(); return; } if (!m_mediaRecorder->start()) { emit error(QMediaRecorder::FormatError, QLatin1String("Unable to start the media recorder.")); - restartViewfinder(); + if (m_cameraSession) + restartViewfinder(); return; } @@ -412,13 +429,26 @@ void QAndroidCaptureSession::applySettings() void QAndroidCaptureSession::updateViewfinder() { - m_cameraSession->camera()->stopPreview(); + m_cameraSession->camera()->stopPreviewSynchronous(); m_cameraSession->adjustViewfinderSize(m_videoSettings.resolution(), false); } void QAndroidCaptureSession::restartViewfinder() { + if (!m_cameraSession) + return; + m_cameraSession->camera()->reconnect(); + + // This is not necessary on most devices, but it crashes on some if we don't stop the + // preview and reset the preview display on the camera when recording is over. + m_cameraSession->camera()->stopPreviewSynchronous(); + m_cameraSession->videoOutput()->reset(); + if (m_cameraSession->videoOutput()->surfaceTexture()) + m_cameraSession->camera()->setPreviewTexture(m_cameraSession->videoOutput()->surfaceTexture()); + else if (m_cameraSession->videoOutput()->surfaceHolder()) + m_cameraSession->camera()->setPreviewDisplay(m_cameraSession->videoOutput()->surfaceHolder()); + m_cameraSession->camera()->startPreview(); m_cameraSession->setReadyForCapture(true); } diff --git a/src/plugins/android/src/mediaplayer/mediaplayer.pri b/src/plugins/android/src/mediaplayer/mediaplayer.pri index c386d996b..9f758a993 100644 --- a/src/plugins/android/src/mediaplayer/mediaplayer.pri +++ b/src/plugins/android/src/mediaplayer/mediaplayer.pri @@ -3,9 +3,11 @@ INCLUDEPATH += $$PWD HEADERS += \ $$PWD/qandroidmediaplayercontrol.h \ $$PWD/qandroidmediaservice.h \ - $$PWD/qandroidmetadatareadercontrol.h + $$PWD/qandroidmetadatareadercontrol.h \ + $$PWD/qandroidmediaplayervideorenderercontrol.h SOURCES += \ $$PWD/qandroidmediaplayercontrol.cpp \ $$PWD/qandroidmediaservice.cpp \ - $$PWD/qandroidmetadatareadercontrol.cpp + $$PWD/qandroidmetadatareadercontrol.cpp \ + $$PWD/qandroidmediaplayervideorenderercontrol.cpp diff --git a/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp b/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp index 9a050e7ad..a6258a74d 100644 --- a/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp +++ b/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp @@ -345,7 +345,7 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent, mReloadingMedia = false; } -void QAndroidMediaPlayerControl::setVideoOutput(QObject *videoOutput) +void QAndroidMediaPlayerControl::setVideoOutput(QAndroidVideoOutput *videoOutput) { if (mVideoOutput) { mMediaPlayer->setDisplay(0); @@ -353,7 +353,7 @@ void QAndroidMediaPlayerControl::setVideoOutput(QObject *videoOutput) mVideoOutput->reset(); } - mVideoOutput = qobject_cast<QAndroidVideoOutput *>(videoOutput); + mVideoOutput = videoOutput; if (!mVideoOutput) return; @@ -563,6 +563,7 @@ void QAndroidMediaPlayerControl::onStateChanged(qint32 state) } else { onBufferingChanged(100); } + Q_EMIT metaDataUpdated(); setAudioAvailable(true); flushPendingStates(); break; diff --git a/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.h b/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.h index 3f92d809c..a015a6809 100644 --- a/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.h +++ b/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.h @@ -67,7 +67,7 @@ public: const QIODevice *mediaStream() const Q_DECL_OVERRIDE; void setMedia(const QMediaContent &mediaContent, QIODevice *stream) Q_DECL_OVERRIDE; - void setVideoOutput(QObject *videoOutput); + void setVideoOutput(QAndroidVideoOutput *videoOutput); Q_SIGNALS: void metaDataUpdated(); diff --git a/src/plugins/android/src/mediaplayer/qandroidmediaplayervideorenderercontrol.cpp b/src/plugins/android/src/mediaplayer/qandroidmediaplayervideorenderercontrol.cpp new file mode 100644 index 000000000..5dd51c395 --- /dev/null +++ b/src/plugins/android/src/mediaplayer/qandroidmediaplayervideorenderercontrol.cpp @@ -0,0 +1,70 @@ +/**************************************************************************** +** +** Copyright (C) 2015 The Qt Company Ltd. +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#include "qandroidmediaplayervideorenderercontrol.h" + +#include "qandroidmediaplayercontrol.h" +#include "qandroidvideooutput.h" +#include <qabstractvideosurface.h> + +QT_BEGIN_NAMESPACE + +QAndroidMediaPlayerVideoRendererControl::QAndroidMediaPlayerVideoRendererControl(QAndroidMediaPlayerControl *mediaPlayer, QObject *parent) + : QVideoRendererControl(parent) + , m_mediaPlayerControl(mediaPlayer) + , m_surface(0) + , m_textureOutput(new QAndroidTextureVideoOutput(this)) +{ + m_mediaPlayerControl->setVideoOutput(m_textureOutput); +} + +QAndroidMediaPlayerVideoRendererControl::~QAndroidMediaPlayerVideoRendererControl() +{ + m_mediaPlayerControl->setVideoOutput(0); +} + +QAbstractVideoSurface *QAndroidMediaPlayerVideoRendererControl::surface() const +{ + return m_surface; +} + +void QAndroidMediaPlayerVideoRendererControl::setSurface(QAbstractVideoSurface *surface) +{ + if (m_surface == surface) + return; + + m_surface = surface; + m_textureOutput->setSurface(m_surface); +} + +QT_END_NAMESPACE diff --git a/src/plugins/android/src/mediaplayer/qandroidmediaplayervideorenderercontrol.h b/src/plugins/android/src/mediaplayer/qandroidmediaplayervideorenderercontrol.h new file mode 100644 index 000000000..cfa41980d --- /dev/null +++ b/src/plugins/android/src/mediaplayer/qandroidmediaplayervideorenderercontrol.h @@ -0,0 +1,62 @@ +/**************************************************************************** +** +** Copyright (C) 2015 The Qt Company Ltd. +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#ifndef QANDROIDMEDIAPLAYERVIDEORENDERERCONTROL_H +#define QANDROIDMEDIAPLAYERVIDEORENDERERCONTROL_H + +#include <qvideorenderercontrol.h> + +QT_BEGIN_NAMESPACE + +class QAndroidMediaPlayerControl; +class QAndroidTextureVideoOutput; + +class QAndroidMediaPlayerVideoRendererControl : public QVideoRendererControl +{ + Q_OBJECT +public: + QAndroidMediaPlayerVideoRendererControl(QAndroidMediaPlayerControl *mediaPlayer, QObject *parent = 0); + ~QAndroidMediaPlayerVideoRendererControl() Q_DECL_OVERRIDE; + + QAbstractVideoSurface *surface() const Q_DECL_OVERRIDE; + void setSurface(QAbstractVideoSurface *surface) Q_DECL_OVERRIDE; + +private: + QAndroidMediaPlayerControl *m_mediaPlayerControl; + QAbstractVideoSurface *m_surface; + QAndroidTextureVideoOutput *m_textureOutput; +}; + +QT_END_NAMESPACE + +#endif // QANDROIDMEDIAPLAYERVIDEORENDERERCONTROL_H diff --git a/src/plugins/android/src/mediaplayer/qandroidmediaservice.cpp b/src/plugins/android/src/mediaplayer/qandroidmediaservice.cpp index 74943ca64..992bcead2 100644 --- a/src/plugins/android/src/mediaplayer/qandroidmediaservice.cpp +++ b/src/plugins/android/src/mediaplayer/qandroidmediaservice.cpp @@ -35,7 +35,7 @@ #include "qandroidmediaplayercontrol.h" #include "qandroidmetadatareadercontrol.h" -#include "qandroidvideorendercontrol.h" +#include "qandroidmediaplayervideorenderercontrol.h" QT_BEGIN_NAMESPACE @@ -53,9 +53,9 @@ QAndroidMediaService::QAndroidMediaService(QObject *parent) QAndroidMediaService::~QAndroidMediaService() { - delete mMediaControl; - delete mMetadataControl; delete mVideoRendererControl; + delete mMetadataControl; + delete mMediaControl; } QMediaControl *QAndroidMediaService::requestControl(const char *name) @@ -68,8 +68,7 @@ QMediaControl *QAndroidMediaService::requestControl(const char *name) if (qstrcmp(name, QVideoRendererControl_iid) == 0) { if (!mVideoRendererControl) { - mVideoRendererControl = new QAndroidVideoRendererControl; - mMediaControl->setVideoOutput(mVideoRendererControl); + mVideoRendererControl = new QAndroidMediaPlayerVideoRendererControl(mMediaControl); return mVideoRendererControl; } } @@ -80,7 +79,6 @@ QMediaControl *QAndroidMediaService::requestControl(const char *name) void QAndroidMediaService::releaseControl(QMediaControl *control) { if (control == mVideoRendererControl) { - mMediaControl->setVideoOutput(0); delete mVideoRendererControl; mVideoRendererControl = 0; } diff --git a/src/plugins/android/src/mediaplayer/qandroidmediaservice.h b/src/plugins/android/src/mediaplayer/qandroidmediaservice.h index 6babbb15f..798d6ef39 100644 --- a/src/plugins/android/src/mediaplayer/qandroidmediaservice.h +++ b/src/plugins/android/src/mediaplayer/qandroidmediaservice.h @@ -40,6 +40,7 @@ QT_BEGIN_NAMESPACE class QAndroidMediaPlayerControl; class QAndroidMetaDataReaderControl; +class QAndroidMediaPlayerVideoRendererControl; class QAndroidMediaService : public QMediaService { @@ -54,7 +55,7 @@ public: private: QAndroidMediaPlayerControl *mMediaControl; QAndroidMetaDataReaderControl *mMetadataControl; - QMediaControl *mVideoRendererControl; + QAndroidMediaPlayerVideoRendererControl *mVideoRendererControl; }; QT_END_NAMESPACE diff --git a/src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.cpp b/src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.cpp index d09a7734f..b0f027ac3 100644 --- a/src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.cpp +++ b/src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.cpp @@ -37,6 +37,8 @@ #include <QtMultimedia/qmediametadata.h> #include <qsize.h> #include <QDate> +#include <QtConcurrent/qtconcurrentrun.h> +#include <QtCore/qvector.h> QT_BEGIN_NAMESPACE @@ -63,147 +65,178 @@ static const char* qt_ID3GenreNames[] = "Euro-House", "Dance Hall" }; +typedef QVector<QAndroidMetaDataReaderControl *> AndroidMetaDataReaders; +Q_GLOBAL_STATIC(AndroidMetaDataReaders, g_metaDataReaders) +Q_GLOBAL_STATIC(QMutex, g_metaDataReadersMtx) + QAndroidMetaDataReaderControl::QAndroidMetaDataReaderControl(QObject *parent) : QMetaDataReaderControl(parent) , m_available(false) - , m_retriever(new AndroidMediaMetadataRetriever) { } QAndroidMetaDataReaderControl::~QAndroidMetaDataReaderControl() { - if (m_retriever) { - m_retriever->release(); - delete m_retriever; - } + QMutexLocker l(g_metaDataReadersMtx); + const int idx = g_metaDataReaders->indexOf(this); + if (idx != -1) + g_metaDataReaders->remove(idx); } bool QAndroidMetaDataReaderControl::isMetaDataAvailable() const { - return m_available; + const QMutexLocker l(&m_mtx); + return m_available && !m_metadata.isEmpty(); } QVariant QAndroidMetaDataReaderControl::metaData(const QString &key) const { + const QMutexLocker l(&m_mtx); return m_metadata.value(key); } QStringList QAndroidMetaDataReaderControl::availableMetaData() const { + const QMutexLocker l(&m_mtx); return m_metadata.keys(); } void QAndroidMetaDataReaderControl::onMediaChanged(const QMediaContent &media) { - if (!m_retriever) - return; - + const QMutexLocker l(&m_mtx); + m_metadata.clear(); m_mediaContent = media; - updateData(); } void QAndroidMetaDataReaderControl::onUpdateMetaData() { - if (!m_retriever || m_mediaContent.isNull()) + { + const QMutexLocker l(g_metaDataReadersMtx); + if (!g_metaDataReaders->contains(this)) + g_metaDataReaders->append(this); + } + + const QMutexLocker ml(&m_mtx); + if (m_mediaContent.isNull()) return; - updateData(); + const QUrl &url = m_mediaContent.canonicalUrl(); + QtConcurrent::run(&extractMetadata, this, url); } -void QAndroidMetaDataReaderControl::updateData() +void QAndroidMetaDataReaderControl::updateData(const QVariantMap &metadata, const QUrl &url) { - m_metadata.clear(); + const QMutexLocker l(&m_mtx); - if (!m_mediaContent.isNull()) { - if (m_retriever->setDataSource(m_mediaContent.canonicalUrl())) { - QString mimeType = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::MimeType); - if (!mimeType.isNull()) - m_metadata.insert(QMediaMetaData::MediaType, mimeType); + if (m_mediaContent.canonicalUrl() != url) + return; - bool isVideo = !m_retriever->extractMetadata(AndroidMediaMetadataRetriever::HasVideo).isNull() - || mimeType.startsWith(QStringLiteral("video")); + const bool oldAvailable = m_available; + m_metadata = metadata; + m_available = !m_metadata.isEmpty(); - QString string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Album); - if (!string.isNull()) - m_metadata.insert(QMediaMetaData::AlbumTitle, string); + if (m_available != oldAvailable) + Q_EMIT metaDataAvailableChanged(m_available); - string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::AlbumArtist); - if (!string.isNull()) - m_metadata.insert(QMediaMetaData::AlbumArtist, string); + Q_EMIT metaDataChanged(); +} - string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Artist); - if (!string.isNull()) { - m_metadata.insert(isVideo ? QMediaMetaData::LeadPerformer - : QMediaMetaData::ContributingArtist, - string.split('/', QString::SkipEmptyParts)); - } +void QAndroidMetaDataReaderControl::extractMetadata(QAndroidMetaDataReaderControl *caller, + const QUrl &url) +{ + QVariantMap metadata; - string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Author); - if (!string.isNull()) - m_metadata.insert(QMediaMetaData::Author, string.split('/', QString::SkipEmptyParts)); + if (!url.isEmpty()) { + AndroidMediaMetadataRetriever retriever; + if (!retriever.setDataSource(url)) + return; - string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Bitrate); - if (!string.isNull()) { - m_metadata.insert(isVideo ? QMediaMetaData::VideoBitRate - : QMediaMetaData::AudioBitRate, - string.toInt()); - } + QString mimeType = retriever.extractMetadata(AndroidMediaMetadataRetriever::MimeType); + if (!mimeType.isNull()) + metadata.insert(QMediaMetaData::MediaType, mimeType); - string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::CDTrackNumber); - if (!string.isNull()) - m_metadata.insert(QMediaMetaData::TrackNumber, string.toInt()); - - string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Composer); - if (!string.isNull()) - m_metadata.insert(QMediaMetaData::Composer, string.split('/', QString::SkipEmptyParts)); - - string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Date); - if (!string.isNull()) - m_metadata.insert(QMediaMetaData::Date, QDateTime::fromString(string, QStringLiteral("yyyyMMddTHHmmss.zzzZ")).date()); - - string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Duration); - if (!string.isNull()) - m_metadata.insert(QMediaMetaData::Duration, string.toLongLong()); - - string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Genre); - if (!string.isNull()) { - // The genre can be returned as an ID3v2 id, get the name for it in that case - if (string.startsWith('(') && string.endsWith(')')) { - bool ok = false; - int genreId = string.midRef(1, string.length() - 2).toInt(&ok); - if (ok && genreId >= 0 && genreId <= 125) - string = QLatin1String(qt_ID3GenreNames[genreId]); - } - m_metadata.insert(QMediaMetaData::Genre, string); - } + bool isVideo = !retriever.extractMetadata(AndroidMediaMetadataRetriever::HasVideo).isNull() + || mimeType.startsWith(QStringLiteral("video")); + + QString string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Album); + if (!string.isNull()) + metadata.insert(QMediaMetaData::AlbumTitle, string); + + string = retriever.extractMetadata(AndroidMediaMetadataRetriever::AlbumArtist); + if (!string.isNull()) + metadata.insert(QMediaMetaData::AlbumArtist, string); - string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Title); - if (!string.isNull()) - m_metadata.insert(QMediaMetaData::Title, string); + string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Artist); + if (!string.isNull()) { + metadata.insert(isVideo ? QMediaMetaData::LeadPerformer + : QMediaMetaData::ContributingArtist, + string.split('/', QString::SkipEmptyParts)); + } + + string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Author); + if (!string.isNull()) + metadata.insert(QMediaMetaData::Author, string.split('/', QString::SkipEmptyParts)); + + string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Bitrate); + if (!string.isNull()) { + metadata.insert(isVideo ? QMediaMetaData::VideoBitRate + : QMediaMetaData::AudioBitRate, + string.toInt()); + } - string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::VideoHeight); - if (!string.isNull()) { - int height = string.toInt(); - int width = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::VideoWidth).toInt(); - m_metadata.insert(QMediaMetaData::Resolution, QSize(width, height)); + string = retriever.extractMetadata(AndroidMediaMetadataRetriever::CDTrackNumber); + if (!string.isNull()) + metadata.insert(QMediaMetaData::TrackNumber, string.toInt()); + + string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Composer); + if (!string.isNull()) + metadata.insert(QMediaMetaData::Composer, string.split('/', QString::SkipEmptyParts)); + + string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Date); + if (!string.isNull()) + metadata.insert(QMediaMetaData::Date, QDateTime::fromString(string, QStringLiteral("yyyyMMddTHHmmss.zzzZ")).date()); + + string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Duration); + if (!string.isNull()) + metadata.insert(QMediaMetaData::Duration, string.toLongLong()); + + string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Genre); + if (!string.isNull()) { + // The genre can be returned as an ID3v2 id, get the name for it in that case + if (string.startsWith('(') && string.endsWith(')')) { + bool ok = false; + const int genreId = string.midRef(1, string.length() - 2).toInt(&ok); + if (ok && genreId >= 0 && genreId <= 125) + string = QLatin1String(qt_ID3GenreNames[genreId]); } + metadata.insert(QMediaMetaData::Genre, string); + } - string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Writer); - if (!string.isNull()) - m_metadata.insert(QMediaMetaData::Writer, string.split('/', QString::SkipEmptyParts)); + string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Title); + if (!string.isNull()) + metadata.insert(QMediaMetaData::Title, string); - string = m_retriever->extractMetadata(AndroidMediaMetadataRetriever::Year); - if (!string.isNull()) - m_metadata.insert(QMediaMetaData::Year, string.toInt()); + string = retriever.extractMetadata(AndroidMediaMetadataRetriever::VideoHeight); + if (!string.isNull()) { + const int height = string.toInt(); + const int width = retriever.extractMetadata(AndroidMediaMetadataRetriever::VideoWidth).toInt(); + metadata.insert(QMediaMetaData::Resolution, QSize(width, height)); } + + string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Writer); + if (!string.isNull()) + metadata.insert(QMediaMetaData::Writer, string.split('/', QString::SkipEmptyParts)); + + string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Year); + if (!string.isNull()) + metadata.insert(QMediaMetaData::Year, string.toInt()); } - bool oldAvailable = m_available; - m_available = !m_metadata.isEmpty(); - if (m_available != oldAvailable) - Q_EMIT metaDataAvailableChanged(m_available); + const QMutexLocker lock(g_metaDataReadersMtx); + if (!g_metaDataReaders->contains(caller)) + return; - Q_EMIT metaDataChanged(); + caller->updateData(metadata, url); } QT_END_NAMESPACE diff --git a/src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.h b/src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.h index 14fb01ea0..e2e668d5c 100644 --- a/src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.h +++ b/src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.h @@ -36,6 +36,7 @@ #include <QMetaDataReaderControl> #include <qmediacontent.h> +#include <QMutex> QT_BEGIN_NAMESPACE @@ -58,13 +59,13 @@ public Q_SLOTS: void onUpdateMetaData(); private: - void updateData(); + void updateData(const QVariantMap &metadata, const QUrl &url); + static void extractMetadata(QAndroidMetaDataReaderControl *caller, const QUrl &url); + mutable QMutex m_mtx; QMediaContent m_mediaContent; bool m_available; QVariantMap m_metadata; - - AndroidMediaMetadataRetriever *m_retriever; }; QT_END_NAMESPACE diff --git a/src/plugins/android/src/qandroidmediaserviceplugin.cpp b/src/plugins/android/src/qandroidmediaserviceplugin.cpp index 5d35ddf51..bf89badb3 100644 --- a/src/plugins/android/src/qandroidmediaserviceplugin.cpp +++ b/src/plugins/android/src/qandroidmediaserviceplugin.cpp @@ -43,6 +43,7 @@ #include "androidcamera.h" #include "androidmultimediautils.h" #include "androidmediarecorder.h" +#include "androidsurfaceview.h" #include <qdebug.h> QT_BEGIN_NAMESPACE @@ -160,7 +161,8 @@ Q_DECL_EXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void * /*reserved*/) if (!AndroidMediaPlayer::initJNI(jniEnv) || !AndroidCamera::initJNI(jniEnv) || - !AndroidMediaRecorder::initJNI(jniEnv)) { + !AndroidMediaRecorder::initJNI(jniEnv) || + !AndroidSurfaceHolder::initJNI(jniEnv)) { return JNI_ERR; } diff --git a/src/plugins/android/src/wrappers/jni/androidcamera.cpp b/src/plugins/android/src/wrappers/jni/androidcamera.cpp index a4acbd8f9..23200462e 100644 --- a/src/plugins/android/src/wrappers/jni/androidcamera.cpp +++ b/src/plugins/android/src/wrappers/jni/androidcamera.cpp @@ -33,20 +33,24 @@ #include "androidcamera.h" #include "androidsurfacetexture.h" +#include "androidsurfaceview.h" #include "qandroidmultimediautils.h" #include <qstringlist.h> #include <qdebug.h> -#include <qmutex.h> #include <QtCore/private/qjnihelpers_p.h> #include <QtCore/qthread.h> +#include <QtCore/qreadwritelock.h> +#include <QtCore/qmutex.h> +#include <QtMultimedia/private/qmemoryvideobuffer_p.h> QT_BEGIN_NAMESPACE static const char QtCameraListenerClassName[] = "org/qtproject/qt5/android/multimedia/QtCameraListener"; -static QMutex g_cameraMapMutex; -typedef QMap<int, AndroidCamera *> CameraMap; -Q_GLOBAL_STATIC(CameraMap, g_cameraMap) + +typedef QHash<int, AndroidCamera *> CameraMap; +Q_GLOBAL_STATIC(CameraMap, cameras) +Q_GLOBAL_STATIC(QReadWriteLock, rwLock) static inline bool exceptionCheckAndClear(JNIEnv *env) { @@ -88,43 +92,57 @@ static QJNIObjectPrivate rectToArea(const QRect &rect) // native method for QtCameraLisener.java static void notifyAutoFocusComplete(JNIEnv* , jobject, int id, jboolean success) { - QMutexLocker locker(&g_cameraMapMutex); - AndroidCamera *obj = g_cameraMap->value(id, 0); - if (obj) - Q_EMIT obj->autoFocusComplete(success); + QReadLocker locker(rwLock); + const auto it = cameras->constFind(id); + if (Q_UNLIKELY(it == cameras->cend())) + return; + + Q_EMIT (*it)->autoFocusComplete(success); } static void notifyPictureExposed(JNIEnv* , jobject, int id) { - QMutexLocker locker(&g_cameraMapMutex); - AndroidCamera *obj = g_cameraMap->value(id, 0); - if (obj) - Q_EMIT obj->pictureExposed(); + QReadLocker locker(rwLock); + const auto it = cameras->constFind(id); + if (Q_UNLIKELY(it == cameras->cend())) + return; + + Q_EMIT (*it)->pictureExposed(); } static void notifyPictureCaptured(JNIEnv *env, jobject, int id, jbyteArray data) { - QMutexLocker locker(&g_cameraMapMutex); - AndroidCamera *obj = g_cameraMap->value(id, 0); - if (obj) { - const int arrayLength = env->GetArrayLength(data); - QByteArray bytes(arrayLength, Qt::Uninitialized); - env->GetByteArrayRegion(data, 0, arrayLength, (jbyte*)bytes.data()); - Q_EMIT obj->pictureCaptured(bytes); - } + QReadLocker locker(rwLock); + const auto it = cameras->constFind(id); + if (Q_UNLIKELY(it == cameras->cend())) + return; + + const int arrayLength = env->GetArrayLength(data); + QByteArray bytes(arrayLength, Qt::Uninitialized); + env->GetByteArrayRegion(data, 0, arrayLength, (jbyte*)bytes.data()); + Q_EMIT (*it)->pictureCaptured(bytes); } -static void notifyNewPreviewFrame(JNIEnv *env, jobject, int id, jbyteArray data, int width, int height) +static void notifyNewPreviewFrame(JNIEnv *env, jobject, int id, jbyteArray data, + int width, int height, int format, int bpl) { - QMutexLocker locker(&g_cameraMapMutex); - AndroidCamera *obj = g_cameraMap->value(id, 0); - if (obj) { - const int arrayLength = env->GetArrayLength(data); - QByteArray bytes(arrayLength, Qt::Uninitialized); - env->GetByteArrayRegion(data, 0, arrayLength, (jbyte*)bytes.data()); + QReadLocker locker(rwLock); + const auto it = cameras->constFind(id); + if (Q_UNLIKELY(it == cameras->cend())) + return; - Q_EMIT obj->newPreviewFrame(bytes, width, height); - } + const int arrayLength = env->GetArrayLength(data); + if (arrayLength == 0) + return; + + QByteArray bytes(arrayLength, Qt::Uninitialized); + env->GetByteArrayRegion(data, 0, arrayLength, (jbyte*)bytes.data()); + + QVideoFrame frame(new QMemoryVideoBuffer(bytes, bpl), + QSize(width, height), + qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat(format))); + + Q_EMIT (*it)->newPreviewFrame(frame); } class AndroidCameraPrivate : public QObject @@ -149,10 +167,12 @@ public: Q_INVOKABLE AndroidCamera::ImageFormat getPreviewFormat(); Q_INVOKABLE void setPreviewFormat(AndroidCamera::ImageFormat fmt); + Q_INVOKABLE QList<AndroidCamera::ImageFormat> getSupportedPreviewFormats(); Q_INVOKABLE QSize previewSize() const { return m_previewSize; } Q_INVOKABLE void updatePreviewSize(); Q_INVOKABLE bool setPreviewTexture(void *surfaceTexture); + Q_INVOKABLE bool setPreviewDisplay(void *surfaceHolder); Q_INVOKABLE bool isZoomSupported(); Q_INVOKABLE int getMaxZoom(); @@ -224,13 +244,16 @@ public: Q_SIGNALS: void previewSizeChanged(); void previewStarted(); + void previewFailedToStart(); void previewStopped(); void autoFocusStarted(); void whiteBalanceChanged(); - void lastPreviewFrameFetched(const QByteArray &preview, int width, int height); + void takePictureFailed(); + + void lastPreviewFrameFetched(const QVideoFrame &frame); }; AndroidCamera::AndroidCamera(AndroidCameraPrivate *d, QThread *worker) @@ -242,12 +265,15 @@ AndroidCamera::AndroidCamera(AndroidCameraPrivate *d, QThread *worker) qRegisterMetaType<QList<int> >(); qRegisterMetaType<QList<QSize> >(); qRegisterMetaType<QList<QRect> >(); + qRegisterMetaType<ImageFormat>(); connect(d, &AndroidCameraPrivate::previewSizeChanged, this, &AndroidCamera::previewSizeChanged); connect(d, &AndroidCameraPrivate::previewStarted, this, &AndroidCamera::previewStarted); + connect(d, &AndroidCameraPrivate::previewFailedToStart, this, &AndroidCamera::previewFailedToStart); connect(d, &AndroidCameraPrivate::previewStopped, this, &AndroidCamera::previewStopped); connect(d, &AndroidCameraPrivate::autoFocusStarted, this, &AndroidCamera::autoFocusStarted); connect(d, &AndroidCameraPrivate::whiteBalanceChanged, this, &AndroidCamera::whiteBalanceChanged); + connect(d, &AndroidCameraPrivate::takePictureFailed, this, &AndroidCamera::takePictureFailed); connect(d, &AndroidCameraPrivate::lastPreviewFrameFetched, this, &AndroidCamera::lastPreviewFrameFetched); } @@ -255,12 +281,11 @@ AndroidCamera::~AndroidCamera() { Q_D(AndroidCamera); if (d->m_camera.isValid()) { - g_cameraMapMutex.lock(); - g_cameraMap->remove(d->m_cameraId); - g_cameraMapMutex.unlock(); + release(); + QWriteLocker locker(rwLock); + cameras->remove(cameraId()); } - release(); m_worker->exit(); m_worker->wait(5000); } @@ -283,9 +308,9 @@ AndroidCamera *AndroidCamera::open(int cameraId) } AndroidCamera *q = new AndroidCamera(d, worker); - g_cameraMapMutex.lock(); - g_cameraMap->insert(cameraId, q); - g_cameraMapMutex.unlock(); + QWriteLocker locker(rwLock); + cameras->insert(cameraId, q); + return q; } @@ -361,6 +386,12 @@ void AndroidCamera::setPreviewFormat(ImageFormat fmt) QMetaObject::invokeMethod(d, "setPreviewFormat", Q_ARG(AndroidCamera::ImageFormat, fmt)); } +QList<AndroidCamera::ImageFormat> AndroidCamera::getSupportedPreviewFormats() +{ + Q_D(AndroidCamera); + return d->getSupportedPreviewFormats(); +} + QSize AndroidCamera::previewSize() const { Q_D(const AndroidCamera); @@ -392,6 +423,18 @@ bool AndroidCamera::setPreviewTexture(AndroidSurfaceTexture *surfaceTexture) return ok; } +bool AndroidCamera::setPreviewDisplay(AndroidSurfaceHolder *surfaceHolder) +{ + Q_D(AndroidCamera); + bool ok = true; + QMetaObject::invokeMethod(d, + "setPreviewDisplay", + Qt::BlockingQueuedConnection, + Q_RETURN_ARG(bool, ok), + Q_ARG(void *, surfaceHolder ? surfaceHolder->surfaceHolder() : 0)); + return ok; +} + bool AndroidCamera::isZoomSupported() { Q_D(AndroidCamera); @@ -706,6 +749,12 @@ void AndroidCamera::stopPreview() QMetaObject::invokeMethod(d, "stopPreview"); } +void AndroidCamera::stopPreviewSynchronous() +{ + Q_D(AndroidCamera); + QMetaObject::invokeMethod(d, "stopPreview", Qt::BlockingQueuedConnection); +} + AndroidCameraPrivate::AndroidCameraPrivate() : QObject(), m_parametersMutex(QMutex::Recursive) @@ -827,7 +876,7 @@ AndroidCamera::ImageFormat AndroidCameraPrivate::getPreviewFormat() QMutexLocker parametersLocker(&m_parametersMutex); if (!m_parameters.isValid()) - return AndroidCamera::Unknown; + return AndroidCamera::UnknownImageFormat; return AndroidCamera::ImageFormat(m_parameters.callMethod<jint>("getPreviewFormat")); } @@ -843,6 +892,27 @@ void AndroidCameraPrivate::setPreviewFormat(AndroidCamera::ImageFormat fmt) applyParameters(); } +QList<AndroidCamera::ImageFormat> AndroidCameraPrivate::getSupportedPreviewFormats() +{ + QList<AndroidCamera::ImageFormat> list; + + QMutexLocker parametersLocker(&m_parametersMutex); + + if (m_parameters.isValid()) { + QJNIObjectPrivate formatList = m_parameters.callObjectMethod("getSupportedPreviewFormats", + "()Ljava/util/List;"); + int count = formatList.callMethod<jint>("size"); + for (int i = 0; i < count; ++i) { + QJNIObjectPrivate format = formatList.callObjectMethod("get", + "(I)Ljava/lang/Object;", + i); + list.append(AndroidCamera::ImageFormat(format.callMethod<jint>("intValue"))); + } + } + + return list; +} + void AndroidCameraPrivate::updatePreviewSize() { QMutexLocker parametersLocker(&m_parametersMutex); @@ -864,6 +934,15 @@ bool AndroidCameraPrivate::setPreviewTexture(void *surfaceTexture) return !exceptionCheckAndClear(env); } +bool AndroidCameraPrivate::setPreviewDisplay(void *surfaceHolder) +{ + QJNIEnvironmentPrivate env; + m_camera.callMethod<void>("setPreviewDisplay", + "(Landroid/view/SurfaceHolder;)V", + static_cast<jobject>(surfaceHolder)); + return !exceptionCheckAndClear(env); +} + bool AndroidCameraPrivate::isZoomSupported() { QMutexLocker parametersLocker(&m_parametersMutex); @@ -1057,15 +1136,21 @@ void AndroidCameraPrivate::setFocusAreas(const QList<QRect> &areas) void AndroidCameraPrivate::autoFocus() { + QJNIEnvironmentPrivate env; + m_camera.callMethod<void>("autoFocus", "(Landroid/hardware/Camera$AutoFocusCallback;)V", m_cameraListener.object()); - emit autoFocusStarted(); + + if (!exceptionCheckAndClear(env)) + emit autoFocusStarted(); } void AndroidCameraPrivate::cancelAutoFocus() { + QJNIEnvironmentPrivate env; m_camera.callMethod<void>("cancelAutoFocus"); + exceptionCheckAndClear(env); } bool AndroidCameraPrivate::isAutoExposureLockSupported() @@ -1314,25 +1399,40 @@ void AndroidCameraPrivate::setJpegQuality(int quality) void AndroidCameraPrivate::startPreview() { + QJNIEnvironmentPrivate env; + setupPreviewFrameCallback(); m_camera.callMethod<void>("startPreview"); - emit previewStarted(); + + if (exceptionCheckAndClear(env)) + emit previewFailedToStart(); + else + emit previewStarted(); } void AndroidCameraPrivate::stopPreview() { + QJNIEnvironmentPrivate env; + m_camera.callMethod<void>("stopPreview"); + + exceptionCheckAndClear(env); emit previewStopped(); } void AndroidCameraPrivate::takePicture() { + QJNIEnvironmentPrivate env; + m_camera.callMethod<void>("takePicture", "(Landroid/hardware/Camera$ShutterCallback;" "Landroid/hardware/Camera$PictureCallback;" "Landroid/hardware/Camera$PictureCallback;)V", m_cameraListener.object(), jobject(0), m_cameraListener.object()); + + if (exceptionCheckAndClear(env)) + emit takePictureFailed(); } void AndroidCameraPrivate::setupPreviewFrameCallback() @@ -1354,15 +1454,25 @@ void AndroidCameraPrivate::fetchLastPreviewFrame() return; const int arrayLength = env->GetArrayLength(static_cast<jbyteArray>(data.object())); + if (arrayLength == 0) + return; + QByteArray bytes(arrayLength, Qt::Uninitialized); env->GetByteArrayRegion(static_cast<jbyteArray>(data.object()), 0, arrayLength, reinterpret_cast<jbyte *>(bytes.data())); - emit lastPreviewFrameFetched(bytes, - m_cameraListener.callMethod<jint>("previewWidth"), - m_cameraListener.callMethod<jint>("previewHeight")); + const int width = m_cameraListener.callMethod<jint>("previewWidth"); + const int height = m_cameraListener.callMethod<jint>("previewHeight"); + const int format = m_cameraListener.callMethod<jint>("previewFormat"); + const int bpl = m_cameraListener.callMethod<jint>("previewBytesPerLine"); + + QVideoFrame frame(new QMemoryVideoBuffer(bytes, bpl), + QSize(width, height), + qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat(format))); + + emit lastPreviewFrameFetched(frame); } void AndroidCameraPrivate::applyParameters() @@ -1407,7 +1517,7 @@ bool AndroidCamera::initJNI(JNIEnv *env) {"notifyAutoFocusComplete", "(IZ)V", (void *)notifyAutoFocusComplete}, {"notifyPictureExposed", "(I)V", (void *)notifyPictureExposed}, {"notifyPictureCaptured", "(I[B)V", (void *)notifyPictureCaptured}, - {"notifyNewPreviewFrame", "(I[BII)V", (void *)notifyNewPreviewFrame} + {"notifyNewPreviewFrame", "(I[BIIII)V", (void *)notifyNewPreviewFrame} }; if (clazz && env->RegisterNatives(clazz, diff --git a/src/plugins/android/src/wrappers/jni/androidcamera.h b/src/plugins/android/src/wrappers/jni/androidcamera.h index 7a8ae8b23..a5e0294c0 100644 --- a/src/plugins/android/src/wrappers/jni/androidcamera.h +++ b/src/plugins/android/src/wrappers/jni/androidcamera.h @@ -46,6 +46,7 @@ class QThread; class AndroidCameraPrivate; class AndroidSurfaceTexture; +class AndroidSurfaceHolder; struct AndroidCameraInfo { @@ -67,7 +68,7 @@ public: }; enum ImageFormat { // same values as in android.graphics.ImageFormat Java class - Unknown = 0, + UnknownImageFormat = 0, RGB565 = 4, NV16 = 16, NV21 = 17, @@ -95,10 +96,12 @@ public: ImageFormat getPreviewFormat(); void setPreviewFormat(ImageFormat fmt); + QList<ImageFormat> getSupportedPreviewFormats(); QSize previewSize() const; void setPreviewSize(const QSize &size); bool setPreviewTexture(AndroidSurfaceTexture *surfaceTexture); + bool setPreviewDisplay(AndroidSurfaceHolder *surfaceHolder); bool isZoomSupported(); int getMaxZoom(); @@ -152,6 +155,7 @@ public: void startPreview(); void stopPreview(); + void stopPreviewSynchronous(); void takePicture(); @@ -168,6 +172,7 @@ public: Q_SIGNALS: void previewSizeChanged(); void previewStarted(); + void previewFailedToStart(); void previewStopped(); void autoFocusStarted(); @@ -175,10 +180,11 @@ Q_SIGNALS: void whiteBalanceChanged(); + void takePictureFailed(); void pictureExposed(); void pictureCaptured(const QByteArray &data); - void lastPreviewFrameFetched(const QByteArray &preview, int width, int height); - void newPreviewFrame(const QByteArray &frame, int width, int height); + void lastPreviewFrameFetched(const QVideoFrame &frame); + void newPreviewFrame(const QVideoFrame &frame); private: AndroidCamera(AndroidCameraPrivate *d, QThread *worker); @@ -188,6 +194,8 @@ private: QScopedPointer<QThread> m_worker; }; +Q_DECLARE_METATYPE(AndroidCamera::ImageFormat) + QT_END_NAMESPACE #endif // ANDROIDCAMERA_H diff --git a/src/plugins/android/src/wrappers/jni/androidmediametadataretriever.cpp b/src/plugins/android/src/wrappers/jni/androidmediametadataretriever.cpp index 56ac0e0ac..f67428b6e 100644 --- a/src/plugins/android/src/wrappers/jni/androidmediametadataretriever.cpp +++ b/src/plugins/android/src/wrappers/jni/androidmediametadataretriever.cpp @@ -60,6 +60,7 @@ AndroidMediaMetadataRetriever::AndroidMediaMetadataRetriever() AndroidMediaMetadataRetriever::~AndroidMediaMetadataRetriever() { + release(); } QString AndroidMediaMetadataRetriever::extractMetadata(MetadataKey key) diff --git a/src/plugins/android/src/wrappers/jni/androidmediametadataretriever.h b/src/plugins/android/src/wrappers/jni/androidmediametadataretriever.h index 01a98490b..1b4a09bb7 100644 --- a/src/plugins/android/src/wrappers/jni/androidmediametadataretriever.h +++ b/src/plugins/android/src/wrappers/jni/androidmediametadataretriever.h @@ -71,10 +71,10 @@ public: ~AndroidMediaMetadataRetriever(); QString extractMetadata(MetadataKey key); - void release(); bool setDataSource(const QUrl &url); private: + void release(); QJNIObjectPrivate m_metadataRetriever; }; diff --git a/src/plugins/android/src/wrappers/jni/androidmediaplayer.cpp b/src/plugins/android/src/wrappers/jni/androidmediaplayer.cpp index 8fbecbc73..3267d838b 100644 --- a/src/plugins/android/src/wrappers/jni/androidmediaplayer.cpp +++ b/src/plugins/android/src/wrappers/jni/androidmediaplayer.cpp @@ -37,29 +37,34 @@ #include <QtCore/private/qjni_p.h> #include <QtCore/private/qjnihelpers_p.h> #include "androidsurfacetexture.h" -#include <QMap> +#include <QVector> +#include <QReadWriteLock> static const char QtAndroidMediaPlayerClassName[] = "org/qtproject/qt5/android/multimedia/QtAndroidMediaPlayer"; -typedef QMap<jlong, AndroidMediaPlayer *> MediaPlayerMap; -Q_GLOBAL_STATIC(MediaPlayerMap, mediaPlayers) +typedef QVector<AndroidMediaPlayer *> MediaPlayerList; +Q_GLOBAL_STATIC(MediaPlayerList, mediaPlayers) +Q_GLOBAL_STATIC(QReadWriteLock, rwLock) QT_BEGIN_NAMESPACE AndroidMediaPlayer::AndroidMediaPlayer() : QObject() { - + QWriteLocker locker(rwLock); const jlong id = reinterpret_cast<jlong>(this); mMediaPlayer = QJNIObjectPrivate(QtAndroidMediaPlayerClassName, "(Landroid/app/Activity;J)V", QtAndroidPrivate::activity(), id); - (*mediaPlayers)[id] = this; + mediaPlayers->append(this); } AndroidMediaPlayer::~AndroidMediaPlayer() { - mediaPlayers->remove(reinterpret_cast<jlong>(this)); + QWriteLocker locker(rwLock); + const int i = mediaPlayers->indexOf(this); + Q_ASSERT(i != -1); + mediaPlayers->remove(i); } void AndroidMediaPlayer::release() @@ -154,66 +159,72 @@ static void onErrorNative(JNIEnv *env, jobject thiz, jint what, jint extra, jlon { Q_UNUSED(env); Q_UNUSED(thiz); - AndroidMediaPlayer *const mp = (*mediaPlayers)[id]; - if (!mp) + QReadLocker locker(rwLock); + const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id)); + if (Q_UNLIKELY(i == -1)) return; - Q_EMIT mp->error(what, extra); + Q_EMIT (*mediaPlayers)[i]->error(what, extra); } static void onBufferingUpdateNative(JNIEnv *env, jobject thiz, jint percent, jlong id) { Q_UNUSED(env); Q_UNUSED(thiz); - AndroidMediaPlayer *const mp = (*mediaPlayers)[id]; - if (!mp) + QReadLocker locker(rwLock); + const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id)); + if (Q_UNLIKELY(i == -1)) return; - Q_EMIT mp->bufferingChanged(percent); + Q_EMIT (*mediaPlayers)[i]->bufferingChanged(percent); } static void onProgressUpdateNative(JNIEnv *env, jobject thiz, jint progress, jlong id) { Q_UNUSED(env); Q_UNUSED(thiz); - AndroidMediaPlayer *const mp = (*mediaPlayers)[id]; - if (!mp) + QReadLocker locker(rwLock); + const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id)); + if (Q_UNLIKELY(i == -1)) return; - Q_EMIT mp->progressChanged(progress); + Q_EMIT (*mediaPlayers)[i]->progressChanged(progress); } static void onDurationChangedNative(JNIEnv *env, jobject thiz, jint duration, jlong id) { Q_UNUSED(env); Q_UNUSED(thiz); - AndroidMediaPlayer *const mp = (*mediaPlayers)[id]; - if (!mp) + QReadLocker locker(rwLock); + const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id)); + if (Q_UNLIKELY(i == -1)) return; - Q_EMIT mp->durationChanged(duration); + Q_EMIT (*mediaPlayers)[i]->durationChanged(duration); } static void onInfoNative(JNIEnv *env, jobject thiz, jint what, jint extra, jlong id) { Q_UNUSED(env); Q_UNUSED(thiz); - AndroidMediaPlayer *const mp = (*mediaPlayers)[id]; - if (!mp) + QReadLocker locker(rwLock); + const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id)); + if (Q_UNLIKELY(i == -1)) return; - Q_EMIT mp->info(what, extra); + Q_EMIT (*mediaPlayers)[i]->info(what, extra); } static void onStateChangedNative(JNIEnv *env, jobject thiz, jint state, jlong id) { Q_UNUSED(env); Q_UNUSED(thiz); - AndroidMediaPlayer *const mp = (*mediaPlayers)[id]; - if (!mp) + QReadLocker locker(rwLock); + const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id)); + if (Q_UNLIKELY(i == -1)) return; - Q_EMIT mp->stateChanged(state); + Q_EMIT (*mediaPlayers)[i]->stateChanged(state); } static void onVideoSizeChangedNative(JNIEnv *env, @@ -224,11 +235,12 @@ static void onVideoSizeChangedNative(JNIEnv *env, { Q_UNUSED(env); Q_UNUSED(thiz); - AndroidMediaPlayer *const mp = (*mediaPlayers)[id]; - if (!mp) + QReadLocker locker(rwLock); + const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id)); + if (Q_UNLIKELY(i == -1)) return; - Q_EMIT mp->videoSizeChanged(width, height); + Q_EMIT (*mediaPlayers)[i]->videoSizeChanged(width, height); } bool AndroidMediaPlayer::initJNI(JNIEnv *env) diff --git a/src/plugins/android/src/wrappers/jni/androidmediarecorder.cpp b/src/plugins/android/src/wrappers/jni/androidmediarecorder.cpp index fa32f31ef..34063056f 100644 --- a/src/plugins/android/src/wrappers/jni/androidmediarecorder.cpp +++ b/src/plugins/android/src/wrappers/jni/androidmediarecorder.cpp @@ -34,6 +34,8 @@ #include "androidmediarecorder.h" #include "androidcamera.h" +#include "androidsurfacetexture.h" +#include "androidsurfaceview.h" #include <QtCore/private/qjni_p.h> #include <qmap.h> @@ -339,6 +341,41 @@ void AndroidMediaRecorder::setOutputFile(const QString &path) } } +void AndroidMediaRecorder::setSurfaceTexture(AndroidSurfaceTexture *texture) +{ + QJNIEnvironmentPrivate env; + m_mediaRecorder.callMethod<void>("setPreviewDisplay", + "(Landroid/view/Surface;)V", + texture->surface()); + if (env->ExceptionCheck()) { +#ifdef QT_DEBUG + env->ExceptionDescribe(); +#endif + env->ExceptionClear(); + } +} + +void AndroidMediaRecorder::setSurfaceHolder(AndroidSurfaceHolder *holder) +{ + QJNIEnvironmentPrivate env; + QJNIObjectPrivate surfaceHolder(holder->surfaceHolder()); + QJNIObjectPrivate surface = surfaceHolder.callObjectMethod("getSurface", + "()Landroid/view/Surface;"); + if (!surface.isValid()) + return; + + m_mediaRecorder.callMethod<void>("setPreviewDisplay", + "(Landroid/view/Surface;)V", + surface.object()); + if (env->ExceptionCheck()) { +#ifdef QT_DEBUG + env->ExceptionDescribe(); +#endif + env->ExceptionClear(); + } +} + + bool AndroidMediaRecorder::initJNI(JNIEnv *env) { jclass clazz = QJNIEnvironmentPrivate::findClass(QtMediaRecorderListenerClassName, diff --git a/src/plugins/android/src/wrappers/jni/androidmediarecorder.h b/src/plugins/android/src/wrappers/jni/androidmediarecorder.h index 1aa83a201..95b48ed47 100644 --- a/src/plugins/android/src/wrappers/jni/androidmediarecorder.h +++ b/src/plugins/android/src/wrappers/jni/androidmediarecorder.h @@ -41,6 +41,8 @@ QT_BEGIN_NAMESPACE class AndroidCamera; +class AndroidSurfaceTexture; +class AndroidSurfaceHolder; class AndroidCamcorderProfile { @@ -149,6 +151,9 @@ public: void setOutputFormat(OutputFormat format); void setOutputFile(const QString &path); + void setSurfaceTexture(AndroidSurfaceTexture *texture); + void setSurfaceHolder(AndroidSurfaceHolder *holder); + static bool initJNI(JNIEnv *env); Q_SIGNALS: diff --git a/src/plugins/android/src/wrappers/jni/androidsurfacetexture.cpp b/src/plugins/android/src/wrappers/jni/androidsurfacetexture.cpp index ffa37d7d4..9a25b7e28 100644 --- a/src/plugins/android/src/wrappers/jni/androidsurfacetexture.cpp +++ b/src/plugins/android/src/wrappers/jni/androidsurfacetexture.cpp @@ -78,8 +78,8 @@ AndroidSurfaceTexture::AndroidSurfaceTexture(unsigned int texName) AndroidSurfaceTexture::~AndroidSurfaceTexture() { - if (QtAndroidPrivate::androidSdkVersion() > 13 && m_surfaceView.isValid()) - m_surfaceView.callMethod<void>("release"); + if (QtAndroidPrivate::androidSdkVersion() > 13 && m_surface.isValid()) + m_surface.callMethod<void>("release"); if (m_surfaceTexture.isValid()) { release(); @@ -124,21 +124,23 @@ jobject AndroidSurfaceTexture::surfaceTexture() return m_surfaceTexture.object(); } -jobject AndroidSurfaceTexture::surfaceView() +jobject AndroidSurfaceTexture::surface() { - return m_surfaceView.object(); + if (!m_surface.isValid()) { + m_surface = QJNIObjectPrivate("android/view/Surface", + "(Landroid/graphics/SurfaceTexture;)V", + m_surfaceTexture.object()); + } + + return m_surface.object(); } jobject AndroidSurfaceTexture::surfaceHolder() { if (!m_surfaceHolder.isValid()) { - m_surfaceView = QJNIObjectPrivate("android/view/Surface", - "(Landroid/graphics/SurfaceTexture;)V", - m_surfaceTexture.object()); - m_surfaceHolder = QJNIObjectPrivate("org/qtproject/qt5/android/multimedia/QtSurfaceTextureHolder", "(Landroid/view/Surface;)V", - m_surfaceView.object()); + surface()); } return m_surfaceHolder.object(); diff --git a/src/plugins/android/src/wrappers/jni/androidsurfacetexture.h b/src/plugins/android/src/wrappers/jni/androidsurfacetexture.h index 2618ed6c9..ac2af694e 100644 --- a/src/plugins/android/src/wrappers/jni/androidsurfacetexture.h +++ b/src/plugins/android/src/wrappers/jni/androidsurfacetexture.h @@ -50,7 +50,7 @@ public: int textureID() const { return m_texID; } jobject surfaceTexture(); - jobject surfaceView(); + jobject surface(); jobject surfaceHolder(); inline bool isValid() const { return m_surfaceTexture.isValid(); } @@ -66,7 +66,7 @@ Q_SIGNALS: private: int m_texID; QJNIObjectPrivate m_surfaceTexture; - QJNIObjectPrivate m_surfaceView; + QJNIObjectPrivate m_surface; QJNIObjectPrivate m_surfaceHolder; }; diff --git a/src/plugins/android/src/wrappers/jni/androidsurfaceview.cpp b/src/plugins/android/src/wrappers/jni/androidsurfaceview.cpp new file mode 100644 index 000000000..67560baf4 --- /dev/null +++ b/src/plugins/android/src/wrappers/jni/androidsurfaceview.cpp @@ -0,0 +1,204 @@ +/**************************************************************************** +** +** Copyright (C) 2015 The Qt Company Ltd. +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#include "androidsurfaceview.h" + +#include <QtCore/private/qjnihelpers_p.h> +#include <QtCore/qcoreapplication.h> +#include <QtCore/qvector.h> +#include <QtCore/qdebug.h> +#include <QtCore/qmutex.h> +#include <QtGui/qwindow.h> + +QT_BEGIN_NAMESPACE + +static const char QtSurfaceHolderCallbackClassName[] = "org/qtproject/qt5/android/multimedia/QtSurfaceHolderCallback"; +typedef QVector<AndroidSurfaceHolder *> SurfaceHolders; +Q_GLOBAL_STATIC(SurfaceHolders, surfaceHolders) +Q_GLOBAL_STATIC(QMutex, shLock) + +AndroidSurfaceHolder::AndroidSurfaceHolder(QJNIObjectPrivate object) + : m_surfaceHolder(object) + , m_surfaceCreated(false) +{ + if (!m_surfaceHolder.isValid()) + return; + + { + QMutexLocker locker(shLock); + surfaceHolders->append(this); + } + + QJNIObjectPrivate callback(QtSurfaceHolderCallbackClassName, "(J)V", reinterpret_cast<jlong>(this)); + m_surfaceHolder.callMethod<void>("addCallback", + "(Landroid/view/SurfaceHolder$Callback;)V", + callback.object()); +} + +AndroidSurfaceHolder::~AndroidSurfaceHolder() +{ + QMutexLocker locker(shLock); + const int i = surfaceHolders->indexOf(this); + if (Q_UNLIKELY(i == -1)) + return; + + surfaceHolders->remove(i); +} + +jobject AndroidSurfaceHolder::surfaceHolder() const +{ + return m_surfaceHolder.object(); +} + +bool AndroidSurfaceHolder::isSurfaceCreated() const +{ + QMutexLocker locker(shLock); + return m_surfaceCreated; +} + +void AndroidSurfaceHolder::handleSurfaceCreated(JNIEnv*, jobject, jlong id) +{ + QMutexLocker locker(shLock); + const int i = surfaceHolders->indexOf(reinterpret_cast<AndroidSurfaceHolder *>(id)); + if (Q_UNLIKELY(i == -1)) + return; + + (*surfaceHolders)[i]->m_surfaceCreated = true; + Q_EMIT (*surfaceHolders)[i]->surfaceCreated(); +} + +void AndroidSurfaceHolder::handleSurfaceDestroyed(JNIEnv*, jobject, jlong id) +{ + QMutexLocker locker(shLock); + const int i = surfaceHolders->indexOf(reinterpret_cast<AndroidSurfaceHolder *>(id)); + if (Q_UNLIKELY(i == -1)) + return; + + (*surfaceHolders)[i]->m_surfaceCreated = false; +} + +bool AndroidSurfaceHolder::initJNI(JNIEnv *env) +{ + jclass clazz = QJNIEnvironmentPrivate::findClass(QtSurfaceHolderCallbackClassName, + env); + + static const JNINativeMethod methods[] = { + {"notifySurfaceCreated", "(J)V", (void *)AndroidSurfaceHolder::handleSurfaceCreated}, + {"notifySurfaceDestroyed", "(J)V", (void *)AndroidSurfaceHolder::handleSurfaceDestroyed} + }; + + if (clazz && env->RegisterNatives(clazz, + methods, + sizeof(methods) / sizeof(methods[0])) != JNI_OK) { + return false; + } + + return true; +} + +AndroidSurfaceView::AndroidSurfaceView() + : m_window(0) + , m_surfaceHolder(0) + , m_pendingVisible(-1) +{ + setAutoDelete(false); + QtAndroidPrivate::runOnUiThread(this, QJNIEnvironmentPrivate()); +} + +AndroidSurfaceView::~AndroidSurfaceView() +{ + delete m_surfaceHolder; + delete m_window; +} + +AndroidSurfaceHolder *AndroidSurfaceView::holder() const +{ + return m_surfaceHolder; +} + +void AndroidSurfaceView::setVisible(bool v) +{ + if (m_window) + m_window->setVisible(v); + else + m_pendingVisible = int(v); +} + +void AndroidSurfaceView::setGeometry(int x, int y, int width, int height) +{ + if (m_window) + m_window->setGeometry(x, y, width, height); + else + m_pendingGeometry = QRect(x, y, width, height); +} + +bool AndroidSurfaceView::event(QEvent *e) +{ + if (e->type() == QEvent::User) { + Q_ASSERT(m_surfaceView.isValid()); + + QJNIObjectPrivate holder = m_surfaceView.callObjectMethod("getHolder", + "()Landroid/view/SurfaceHolder;"); + if (!holder.isValid()) { + m_surfaceView = QJNIObjectPrivate(); + } else { + m_surfaceHolder = new AndroidSurfaceHolder(holder); + connect(m_surfaceHolder, &AndroidSurfaceHolder::surfaceCreated, + this, &AndroidSurfaceView::surfaceCreated); + { // Lock now to avoid a race with handleSurfaceCreated() + QMutexLocker locker(shLock); + m_window = QWindow::fromWinId(WId(m_surfaceView.object())); + + if (m_pendingVisible != -1) + m_window->setVisible(m_pendingVisible); + if (m_pendingGeometry.isValid()) + m_window->setGeometry(m_pendingGeometry); + } + } + + return true; + } + + return QObject::event(e); +} + +// Called on the Android UI thread. +void AndroidSurfaceView::run() +{ + m_surfaceView = QJNIObjectPrivate("android/view/SurfaceView", + "(Landroid/content/Context;)V", + QtAndroidPrivate::activity()); + QCoreApplication::postEvent(this, new QEvent(QEvent::User)); +} + +QT_END_NAMESPACE diff --git a/src/plugins/android/src/wrappers/jni/androidsurfaceview.h b/src/plugins/android/src/wrappers/jni/androidsurfaceview.h new file mode 100644 index 000000000..661a5959f --- /dev/null +++ b/src/plugins/android/src/wrappers/jni/androidsurfaceview.h @@ -0,0 +1,101 @@ +/**************************************************************************** +** +** Copyright (C) 2015 The Qt Company Ltd. +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#ifndef ANDROIDSURFACEVIEW_H +#define ANDROIDSURFACEVIEW_H + +#include <QtCore/private/qjni_p.h> +#include <qrect.h> +#include <QtCore/qrunnable.h> + +QT_BEGIN_NAMESPACE + +class QWindow; + +class AndroidSurfaceHolder : public QObject +{ + Q_OBJECT +public: + ~AndroidSurfaceHolder(); + + jobject surfaceHolder() const; + bool isSurfaceCreated() const; + + static bool initJNI(JNIEnv *env); + +Q_SIGNALS: + void surfaceCreated(); + +private: + AndroidSurfaceHolder(QJNIObjectPrivate object); + + static void handleSurfaceCreated(JNIEnv*, jobject, jlong id); + static void handleSurfaceDestroyed(JNIEnv*, jobject, jlong id); + + QJNIObjectPrivate m_surfaceHolder; + bool m_surfaceCreated; + + friend class AndroidSurfaceView; +}; + +class AndroidSurfaceView : public QObject, public QRunnable +{ + Q_OBJECT +public: + AndroidSurfaceView(); + ~AndroidSurfaceView(); + + AndroidSurfaceHolder *holder() const; + + void setVisible(bool v); + void setGeometry(int x, int y, int width, int height); + + bool event(QEvent *); + +Q_SIGNALS: + void surfaceCreated(); + +protected: + void run() override; + +private: + QJNIObjectPrivate m_surfaceView; + QWindow *m_window; + AndroidSurfaceHolder *m_surfaceHolder; + int m_pendingVisible; + QRect m_pendingGeometry; +}; + +QT_END_NAMESPACE + +#endif // ANDROIDSURFACEVIEW_H diff --git a/src/plugins/android/src/wrappers/jni/jni.pri b/src/plugins/android/src/wrappers/jni/jni.pri index e96baff1c..930d7e922 100644 --- a/src/plugins/android/src/wrappers/jni/jni.pri +++ b/src/plugins/android/src/wrappers/jni/jni.pri @@ -8,7 +8,8 @@ HEADERS += \ $$PWD/androidmediametadataretriever.h \ $$PWD/androidcamera.h \ $$PWD/androidmultimediautils.h \ - $$PWD/androidmediarecorder.h + $$PWD/androidmediarecorder.h \ + $$PWD/androidsurfaceview.h SOURCES += \ $$PWD/androidmediaplayer.cpp \ @@ -16,4 +17,5 @@ SOURCES += \ $$PWD/androidmediametadataretriever.cpp \ $$PWD/androidcamera.cpp \ $$PWD/androidmultimediautils.cpp \ - $$PWD/androidmediarecorder.cpp + $$PWD/androidmediarecorder.cpp \ + $$PWD/androidsurfaceview.cpp diff --git a/src/plugins/avfoundation/camera/avfcamerasession.h b/src/plugins/avfoundation/camera/avfcamerasession.h index 838234522..13a8a35c5 100644 --- a/src/plugins/avfoundation/camera/avfcamerasession.h +++ b/src/plugins/avfoundation/camera/avfcamerasession.h @@ -105,8 +105,8 @@ Q_SIGNALS: private: static void updateCameraDevices(); void attachVideoInputDevice(); - void applyImageEncoderSettings(); - void applyViewfinderSettings(); + bool applyImageEncoderSettings(); + bool applyViewfinderSettings(); static int m_defaultCameraIndex; static QList<AVFCameraInfo> m_cameraDevices; diff --git a/src/plugins/avfoundation/camera/avfcamerasession.mm b/src/plugins/avfoundation/camera/avfcamerasession.mm index 43ab1d149..e0f18c114 100644 --- a/src/plugins/avfoundation/camera/avfcamerasession.mm +++ b/src/plugins/avfoundation/camera/avfcamerasession.mm @@ -285,10 +285,23 @@ void AVFCameraSession::setState(QCamera::State newState) Q_EMIT readyToConfigureConnections(); m_defaultCodec = 0; defaultCodec(); - applyImageEncoderSettings(); - applyViewfinderSettings(); + + bool activeFormatSet = applyImageEncoderSettings(); + activeFormatSet |= applyViewfinderSettings(); + [m_captureSession commitConfiguration]; + + if (activeFormatSet) { + // According to the doc, the capture device must be locked before + // startRunning to prevent the format we set to be overriden by the + // session preset. + [videoCaptureDevice() lockForConfiguration:nil]; + } + [m_captureSession startRunning]; + + if (activeFormatSet) + [videoCaptureDevice() unlockForConfiguration]; } if (oldState == QCamera::ActiveState) { @@ -357,27 +370,32 @@ void AVFCameraSession::attachVideoInputDevice() } } -void AVFCameraSession::applyImageEncoderSettings() +bool AVFCameraSession::applyImageEncoderSettings() { if (AVFImageEncoderControl *control = m_service->imageEncoderControl()) - control->applySettings(); + return control->applySettings(); + + return false; } -void AVFCameraSession::applyViewfinderSettings() +bool AVFCameraSession::applyViewfinderSettings() { if (AVFCameraViewfinderSettingsControl2 *vfControl = m_service->viewfinderSettingsControl2()) { QCameraViewfinderSettings vfSettings(vfControl->requestedSettings()); + // Viewfinder and image capture solutions must be the same, if an image capture + // resolution is set, it takes precedence over the viewfinder resolution. if (AVFImageEncoderControl *imControl = m_service->imageEncoderControl()) { - const QSize imageResolution(imControl->imageSettings().resolution()); + const QSize imageResolution(imControl->requestedSettings().resolution()); if (!imageResolution.isNull() && imageResolution.isValid()) { vfSettings.setResolution(imageResolution); vfControl->setViewfinderSettings(vfSettings); - return; } } - vfControl->applySettings(); + return vfControl->applySettings(); } + + return false; } void AVFCameraSession::addProbe(AVFMediaVideoProbeControl *probe) diff --git a/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.h b/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.h index cf2f512a7..9a5bbd5de 100644 --- a/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.h +++ b/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.h @@ -76,17 +76,13 @@ private: AVCaptureDeviceFormat *findBestFormatMatch(const QCameraViewfinderSettings &settings) const; QVector<QVideoFrame::PixelFormat> viewfinderPixelFormats() const; bool convertPixelFormatIfSupported(QVideoFrame::PixelFormat format, unsigned &avfFormat) const; - void applySettings(); + bool applySettings(); QCameraViewfinderSettings requestedSettings() const; - // Aux. function to extract things like captureDevice, videoOutput, etc. - bool updateAVFoundationObjects() const; + + AVCaptureConnection *videoConnection() const; AVFCameraService *m_service; - mutable AVFCameraSession *m_session; QCameraViewfinderSettings m_settings; - mutable AVCaptureDevice *m_captureDevice; - mutable AVCaptureVideoDataOutput *m_videoOutput; - mutable AVCaptureConnection *m_videoConnection; }; class AVFCameraViewfinderSettingsControl : public QCameraViewfinderSettingsControl diff --git a/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm b/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm index c5da1c343..3c20801e5 100644 --- a/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm +++ b/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm @@ -206,7 +206,6 @@ void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection) { Q_ASSERT(captureDevice); - Q_ASSERT(videoConnection); AVFPSRange fps; #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) @@ -234,7 +233,8 @@ AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnec #else // OSX < 10.7 or iOS < 7.0 { #endif // QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) - fps = qt_connection_framerates(videoConnection); + if (videoConnection) + fps = qt_connection_framerates(videoConnection); } return fps; @@ -244,24 +244,20 @@ void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection const QCameraViewfinderSettings &settings) { Q_ASSERT(captureDevice); - Q_ASSERT(videoConnection); #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_9, QSysInfo::MV_IOS_7_0)) qt_set_framerate_limits(captureDevice, settings); else - qt_set_framerate_limits(videoConnection, settings); -#else - qt_set_framerate_limits(videoConnection, settings); #endif + if (videoConnection) + qt_set_framerate_limits(videoConnection, settings); + } } // Unnamed namespace. AVFCameraViewfinderSettingsControl2::AVFCameraViewfinderSettingsControl2(AVFCameraService *service) - : m_service(service), - m_captureDevice(0), - m_videoOutput(0), - m_videoConnection(0) + : m_service(service) { Q_ASSERT(service); } @@ -270,8 +266,9 @@ QList<QCameraViewfinderSettings> AVFCameraViewfinderSettingsControl2::supportedV { QList<QCameraViewfinderSettings> supportedSettings; - if (!updateAVFoundationObjects()) { - qDebugCamera() << Q_FUNC_INFO << "no capture device or video output found"; + AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice(); + if (!captureDevice) { + qDebugCamera() << Q_FUNC_INFO << "no capture device found"; return supportedSettings; } @@ -281,15 +278,16 @@ QList<QCameraViewfinderSettings> AVFCameraViewfinderSettingsControl2::supportedV if (!pixelFormats.size()) pixelFormats << QVideoFrame::Format_Invalid; // The default value. + #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) { - if (!m_captureDevice.formats || !m_captureDevice.formats.count) { + if (!captureDevice.formats || !captureDevice.formats.count) { qDebugCamera() << Q_FUNC_INFO << "no capture device formats found"; return supportedSettings; } - const QVector<AVCaptureDeviceFormat *> formats(qt_unique_device_formats(m_captureDevice, - m_session->defaultCodec())); + const QVector<AVCaptureDeviceFormat *> formats(qt_unique_device_formats(captureDevice, + m_service->session()->defaultCodec())); for (int i = 0; i < formats.size(); ++i) { AVCaptureDeviceFormat *format = formats[i]; @@ -320,15 +318,18 @@ QList<QCameraViewfinderSettings> AVFCameraViewfinderSettingsControl2::supportedV #else { #endif - // TODO: resolution and PAR. - framerates << qt_connection_framerates(m_videoConnection); - for (int i = 0; i < pixelFormats.size(); ++i) { - for (int j = 0; j < framerates.size(); ++j) { - QCameraViewfinderSettings newSet; - newSet.setPixelFormat(pixelFormats[i]); - newSet.setMinimumFrameRate(framerates[j].first); - newSet.setMaximumFrameRate(framerates[j].second); - supportedSettings << newSet; + AVCaptureConnection *connection = videoConnection(); + if (connection) { + // TODO: resolution and PAR. + framerates << qt_connection_framerates(connection); + for (int i = 0; i < pixelFormats.size(); ++i) { + for (int j = 0; j < framerates.size(); ++j) { + QCameraViewfinderSettings newSet; + newSet.setPixelFormat(pixelFormats[i]); + newSet.setMinimumFrameRate(framerates[j].first); + newSet.setMaximumFrameRate(framerates[j].second); + supportedSettings << newSet; + } } } } @@ -340,20 +341,21 @@ QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::viewfinderSetting { QCameraViewfinderSettings settings; - if (!updateAVFoundationObjects()) { - qDebugCamera() << Q_FUNC_INFO << "no capture device or video output found"; + AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice(); + if (!captureDevice) { + qDebugCamera() << Q_FUNC_INFO << "no capture device found"; return settings; } #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) { - if (!m_captureDevice.activeFormat) { + if (!captureDevice.activeFormat) { qDebugCamera() << Q_FUNC_INFO << "no active capture device format"; return settings; } - const QSize res(qt_device_format_resolution(m_captureDevice.activeFormat)); - const QSize par(qt_device_format_pixel_aspect_ratio(m_captureDevice.activeFormat)); + const QSize res(qt_device_format_resolution(captureDevice.activeFormat)); + const QSize par(qt_device_format_pixel_aspect_ratio(captureDevice.activeFormat)); if (res.isNull() || !res.isValid() || par.isNull() || !par.isValid()) { qDebugCamera() << Q_FUNC_INFO << "failed to obtain resolution/pixel aspect ratio"; return settings; @@ -364,12 +366,14 @@ QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::viewfinderSetting } #endif // TODO: resolution and PAR before 7.0. - const AVFPSRange fps = qt_current_framerates(m_captureDevice, m_videoConnection); + const AVFPSRange fps = qt_current_framerates(captureDevice, videoConnection()); settings.setMinimumFrameRate(fps.first); settings.setMaximumFrameRate(fps.second); - if (NSObject *obj = [m_videoOutput.videoSettings objectForKey:(id)kCVPixelBufferPixelFormatTypeKey]) { - if ([obj isKindOfClass:[NSNumber class]]) { + AVCaptureVideoDataOutput *videoOutput = m_service->videoOutput() ? m_service->videoOutput()->videoDataOutput() : 0; + if (videoOutput) { + NSObject *obj = [videoOutput.videoSettings objectForKey:(id)kCVPixelBufferPixelFormatTypeKey]; + if (obj && [obj isKindOfClass:[NSNumber class]]) { NSNumber *nsNum = static_cast<NSNumber *>(obj); settings.setPixelFormat(QtPixelFormatFromCVFormat([nsNum unsignedIntValue])); } @@ -380,11 +384,6 @@ QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::viewfinderSetting void AVFCameraViewfinderSettingsControl2::setViewfinderSettings(const QCameraViewfinderSettings &settings) { - if (settings.isNull()) { - qDebugCamera() << Q_FUNC_INFO << "empty viewfinder settings"; - return; - } - if (m_settings == settings) return; @@ -449,17 +448,19 @@ bool AVFCameraViewfinderSettingsControl2::CVPixelFormatFromQtFormat(QVideoFrame: AVCaptureDeviceFormat *AVFCameraViewfinderSettingsControl2::findBestFormatMatch(const QCameraViewfinderSettings &settings) const { + AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice(); + if (!captureDevice || settings.isNull()) + return nil; + #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) { - Q_ASSERT(m_captureDevice); - Q_ASSERT(m_session); const QSize &resolution = settings.resolution(); if (!resolution.isNull() && resolution.isValid()) { // Either the exact match (including high resolution for images on iOS) // or a format with a resolution close to the requested one. - return qt_find_best_resolution_match(m_captureDevice, resolution, - m_session->defaultCodec()); + return qt_find_best_resolution_match(captureDevice, resolution, + m_service->session()->defaultCodec()); } // No resolution requested, what about framerates? @@ -472,22 +473,28 @@ AVCaptureDeviceFormat *AVFCameraViewfinderSettingsControl2::findBestFormatMatch( const qreal minFPS(settings.minimumFrameRate()); const qreal maxFPS(settings.maximumFrameRate()); if (minFPS || maxFPS) - return qt_find_best_framerate_match(m_captureDevice, maxFPS ? maxFPS : minFPS, - m_session->defaultCodec()); + return qt_find_best_framerate_match(captureDevice, maxFPS ? maxFPS : minFPS, + m_service->session()->defaultCodec()); // Ignore PAR for the moment (PAR without resolution can // pick a format with really bad resolution). // No need to test pixel format, just return settings. } #endif + return nil; } QVector<QVideoFrame::PixelFormat> AVFCameraViewfinderSettingsControl2::viewfinderPixelFormats() const { - Q_ASSERT(m_videoOutput); - QVector<QVideoFrame::PixelFormat> qtFormats; - NSArray *pixelFormats = [m_videoOutput availableVideoCVPixelFormatTypes]; + + AVCaptureVideoDataOutput *videoOutput = m_service->videoOutput() ? m_service->videoOutput()->videoDataOutput() : 0; + if (!videoOutput) { + qDebugCamera() << Q_FUNC_INFO << "no video output found"; + return qtFormats; + } + + NSArray *pixelFormats = [videoOutput availableVideoCVPixelFormatTypes]; for (NSObject *obj in pixelFormats) { if (![obj isKindOfClass:[NSNumber class]]) @@ -508,17 +515,19 @@ QVector<QVideoFrame::PixelFormat> AVFCameraViewfinderSettingsControl2::viewfinde bool AVFCameraViewfinderSettingsControl2::convertPixelFormatIfSupported(QVideoFrame::PixelFormat qtFormat, unsigned &avfFormat)const { - Q_ASSERT(m_videoOutput); + AVCaptureVideoDataOutput *videoOutput = m_service->videoOutput() ? m_service->videoOutput()->videoDataOutput() : 0; + if (!videoOutput) + return false; unsigned conv = 0; if (!CVPixelFormatFromQtFormat(qtFormat, conv)) return false; - NSArray *formats = [m_videoOutput availableVideoCVPixelFormatTypes]; + NSArray *formats = [videoOutput availableVideoCVPixelFormatTypes]; if (!formats || !formats.count) return false; - if (m_service->videoOutput() && m_service->videoOutput()->surface()) { + if (m_service->videoOutput()->surface()) { const QAbstractVideoSurface *surface = m_service->videoOutput()->surface(); if (!surface->supportedPixelFormats().contains(qtFormat)) return false; @@ -539,31 +548,30 @@ bool AVFCameraViewfinderSettingsControl2::convertPixelFormatIfSupported(QVideoFr return found; } -void AVFCameraViewfinderSettingsControl2::applySettings() +bool AVFCameraViewfinderSettingsControl2::applySettings() { - if (m_settings.isNull()) - return; + if (m_service->session()->state() != QCamera::LoadedState && + m_service->session()->state() != QCamera::ActiveState) { + return false; + } - if (!updateAVFoundationObjects()) - return; + AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice(); + if (!captureDevice) + return false; - if (m_session->state() != QCamera::LoadedState && - m_session->state() != QCamera::ActiveState) { - return; - } + bool activeFormatChanged = false; - NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithCapacity:1]; #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) AVCaptureDeviceFormat *match = findBestFormatMatch(m_settings); if (match) { - if (match != m_captureDevice.activeFormat) { - const AVFConfigurationLock lock(m_captureDevice); - if (!lock) { + if (match != captureDevice.activeFormat) { + const AVFConfigurationLock lock(captureDevice); + if (lock) { + captureDevice.activeFormat = match; + activeFormatChanged = true; + } else { qDebugCamera() << Q_FUNC_INFO << "failed to lock for configuration"; - return; } - - m_captureDevice.activeFormat = match; } } else { qDebugCamera() << Q_FUNC_INFO << "matching device format not found"; @@ -571,43 +579,48 @@ void AVFCameraViewfinderSettingsControl2::applySettings() } #endif - unsigned avfPixelFormat = 0; - if (!convertPixelFormatIfSupported(m_settings.pixelFormat(), avfPixelFormat)) { - // If the the pixel format is not specified or invalid, pick the preferred video surface - // format, or if no surface is set, the preferred capture device format - - const QVector<QVideoFrame::PixelFormat> deviceFormats = viewfinderPixelFormats(); - QVideoFrame::PixelFormat pickedFormat = deviceFormats.first(); - - QAbstractVideoSurface *surface = m_service->videoOutput() ? m_service->videoOutput()->surface() - : 0; - if (surface) { - if (m_service->videoOutput()->supportsTextures()) { - pickedFormat = QVideoFrame::Format_ARGB32; - } else { - QList<QVideoFrame::PixelFormat> surfaceFormats = m_service->videoOutput()->surface()->supportedPixelFormats(); - - for (int i = 0; i < surfaceFormats.count(); ++i) { - const QVideoFrame::PixelFormat surfaceFormat = surfaceFormats.at(i); - if (deviceFormats.contains(surfaceFormat)) { - pickedFormat = surfaceFormat; - break; + AVCaptureVideoDataOutput *videoOutput = m_service->videoOutput() ? m_service->videoOutput()->videoDataOutput() : 0; + if (videoOutput) { + unsigned avfPixelFormat = 0; + if (!convertPixelFormatIfSupported(m_settings.pixelFormat(), avfPixelFormat)) { + // If the the pixel format is not specified or invalid, pick the preferred video surface + // format, or if no surface is set, the preferred capture device format + + const QVector<QVideoFrame::PixelFormat> deviceFormats = viewfinderPixelFormats(); + QVideoFrame::PixelFormat pickedFormat = deviceFormats.first(); + + QAbstractVideoSurface *surface = m_service->videoOutput()->surface(); + if (surface) { + if (m_service->videoOutput()->supportsTextures()) { + pickedFormat = QVideoFrame::Format_ARGB32; + } else { + QList<QVideoFrame::PixelFormat> surfaceFormats = surface->supportedPixelFormats(); + + for (int i = 0; i < surfaceFormats.count(); ++i) { + const QVideoFrame::PixelFormat surfaceFormat = surfaceFormats.at(i); + if (deviceFormats.contains(surfaceFormat)) { + pickedFormat = surfaceFormat; + break; + } } } } - } - CVPixelFormatFromQtFormat(pickedFormat, avfPixelFormat); - } + CVPixelFormatFromQtFormat(pickedFormat, avfPixelFormat); + } - if (avfPixelFormat != 0) { - [videoSettings setObject:[NSNumber numberWithUnsignedInt:avfPixelFormat] - forKey:(id)kCVPixelBufferPixelFormatTypeKey]; + if (avfPixelFormat != 0) { + NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithCapacity:1]; + [videoSettings setObject:[NSNumber numberWithUnsignedInt:avfPixelFormat] + forKey:(id)kCVPixelBufferPixelFormatTypeKey]; - m_videoOutput.videoSettings = videoSettings; + videoOutput.videoSettings = videoSettings; + } } - qt_set_framerate_limits(m_captureDevice, m_videoConnection, m_settings); + qt_set_framerate_limits(captureDevice, videoConnection(), m_settings); + + return activeFormatChanged; } QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::requestedSettings() const @@ -615,33 +628,12 @@ QCameraViewfinderSettings AVFCameraViewfinderSettingsControl2::requestedSettings return m_settings; } -bool AVFCameraViewfinderSettingsControl2::updateAVFoundationObjects() const +AVCaptureConnection *AVFCameraViewfinderSettingsControl2::videoConnection() const { - m_session = 0; - m_captureDevice = 0; - m_videoOutput = 0; - m_videoConnection = 0; - - if (!m_service->session()) - return false; - - if (!m_service->session()->videoCaptureDevice()) - return false; - if (!m_service->videoOutput() || !m_service->videoOutput()->videoDataOutput()) - return false; - - AVCaptureVideoDataOutput *output = m_service->videoOutput()->videoDataOutput(); - AVCaptureConnection *connection = [output connectionWithMediaType:AVMediaTypeVideo]; - if (!connection) - return false; - - m_session = m_service->session(); - m_captureDevice = m_session->videoCaptureDevice(); - m_videoOutput = output; - m_videoConnection = connection; + return nil; - return true; + return [m_service->videoOutput()->videoDataOutput() connectionWithMediaType:AVMediaTypeVideo]; } AVFCameraViewfinderSettingsControl::AVFCameraViewfinderSettingsControl(AVFCameraService *service) diff --git a/src/plugins/avfoundation/camera/avfimageencodercontrol.h b/src/plugins/avfoundation/camera/avfimageencodercontrol.h index fcb665a03..d3af77ffd 100644 --- a/src/plugins/avfoundation/camera/avfimageencodercontrol.h +++ b/src/plugins/avfoundation/camera/avfimageencodercontrol.h @@ -62,11 +62,13 @@ public: QImageEncoderSettings imageSettings() const Q_DECL_OVERRIDE; void setImageSettings(const QImageEncoderSettings &settings) Q_DECL_OVERRIDE; + QImageEncoderSettings requestedSettings() const; + private: AVFCameraService *m_service; QImageEncoderSettings m_settings; - void applySettings(); + bool applySettings(); bool videoCaptureDeviceIsValid() const; }; diff --git a/src/plugins/avfoundation/camera/avfimageencodercontrol.mm b/src/plugins/avfoundation/camera/avfimageencodercontrol.mm index 36050c3a2..e2eb0bd01 100644 --- a/src/plugins/avfoundation/camera/avfimageencodercontrol.mm +++ b/src/plugins/avfoundation/camera/avfimageencodercontrol.mm @@ -115,6 +115,11 @@ QList<QSize> AVFImageEncoderControl::supportedResolutions(const QImageEncoderSet return resolutions; } +QImageEncoderSettings AVFImageEncoderControl::requestedSettings() const +{ + return m_settings; +} + QImageEncoderSettings AVFImageEncoderControl::imageSettings() const { QImageEncoderSettings settings; @@ -163,40 +168,40 @@ QImageEncoderSettings AVFImageEncoderControl::imageSettings() const void AVFImageEncoderControl::setImageSettings(const QImageEncoderSettings &settings) { - if (m_settings == settings || settings.isNull()) + if (m_settings == settings) return; m_settings = settings; applySettings(); } -void AVFImageEncoderControl::applySettings() +bool AVFImageEncoderControl::applySettings() { if (!videoCaptureDeviceIsValid()) - return; + return false; AVFCameraSession *session = m_service->session(); if (!session || (session->state() != QCamera::ActiveState && session->state() != QCamera::LoadedState)) { - return; + return false; } if (!m_service->imageCaptureControl() || !m_service->imageCaptureControl()->stillImageOutput()) { qDebugCamera() << Q_FUNC_INFO << "no still image output"; - return; + return false; } if (m_settings.codec().size() && m_settings.codec() != QLatin1String("jpeg")) { qDebugCamera() << Q_FUNC_INFO << "unsupported codec:" << m_settings.codec(); - return; + return false; } QSize res(m_settings.resolution()); if (res.isNull()) { qDebugCamera() << Q_FUNC_INFO << "invalid resolution:" << res; - return; + return false; } if (!res.isValid()) { @@ -204,9 +209,11 @@ void AVFImageEncoderControl::applySettings() // Here we could choose the best format available, but // activeFormat is already equal to 'preset high' by default, // which is good enough, otherwise we can end in some format with low framerates. - return; + return false; } + bool activeFormatChanged = false; + #if QT_MAC_PLATFORM_SDK_EQUAL_OR_ABOVE(__MAC_10_7, __IPHONE_7_0) if (QSysInfo::MacintoshVersion >= qt_OS_limit(QSysInfo::MV_10_7, QSysInfo::MV_IOS_7_0)) { AVCaptureDevice *captureDevice = m_service->session()->videoCaptureDevice(); @@ -215,16 +222,17 @@ void AVFImageEncoderControl::applySettings() if (!match) { qDebugCamera() << Q_FUNC_INFO << "unsupported resolution:" << res; - return; + return false; } if (match != captureDevice.activeFormat) { const AVFConfigurationLock lock(captureDevice); if (!lock) { qDebugCamera() << Q_FUNC_INFO << "failed to lock for configuration"; - return; + return false; } captureDevice.activeFormat = match; + activeFormatChanged = true; } #if defined(Q_OS_IOS) && QT_IOS_PLATFORM_SDK_EQUAL_OR_ABOVE(__IPHONE_8_0) @@ -242,6 +250,8 @@ void AVFImageEncoderControl::applySettings() #endif // TODO: resolution without capture device format ... } + + return activeFormatChanged; } bool AVFImageEncoderControl::videoCaptureDeviceIsValid() const diff --git a/src/plugins/avfoundation/camera/avfmediarecordercontrol.mm b/src/plugins/avfoundation/camera/avfmediarecordercontrol.mm index 412dab76c..1b6e23ee5 100644 --- a/src/plugins/avfoundation/camera/avfmediarecordercontrol.mm +++ b/src/plugins/avfoundation/camera/avfmediarecordercontrol.mm @@ -37,6 +37,7 @@ #include "avfcameraservice.h" #include "avfcameracontrol.h" #include "avfaudioinputselectorcontrol.h" +#include "avfcamerautility.h" #include <QtCore/qurl.h> #include <QtCore/qfileinfo.h> @@ -330,6 +331,9 @@ void AVFMediaRecorderControl::setupSessionForCapture() && m_cameraControl->captureMode().testFlag(QCamera::CaptureVideo) && m_session->state() != QCamera::UnloadedState) { + // Lock the video capture device to make sure the active format is not reset + const AVFConfigurationLock lock(m_session->videoCaptureDevice()); + // Add audio input // Allow recording even if something wrong happens with the audio input initialization AVCaptureDevice *audioDevice = m_audioInputControl->createCaptureDevice(); @@ -359,7 +363,10 @@ void AVFMediaRecorderControl::setupSessionForCapture() } } else if (m_connected && (!m_cameraControl->captureMode().testFlag(QCamera::CaptureVideo) - || m_session->state() != QCamera::ActiveState)) { + || m_session->state() == QCamera::UnloadedState)) { + + // Lock the video capture device to make sure the active format is not reset + const AVFConfigurationLock lock(m_session->videoCaptureDevice()); [captureSession removeOutput:m_movieOutput]; diff --git a/src/plugins/common/evr.pri b/src/plugins/common/evr.pri index f951f6730..2a1b383df 100644 --- a/src/plugins/common/evr.pri +++ b/src/plugins/common/evr.pri @@ -1,8 +1,20 @@ INCLUDEPATH += $$PWD/evr qtHaveModule(widgets): QT += widgets +QT += gui-private -HEADERS += $$PWD/evr/evrvideowindowcontrol.h \ - $$PWD/evr/evrdefs.h +LIBS += -lmf -lmfplat -lmfuuid -ld3d9 -ldxva2 -lwinmm -levr -SOURCES += $$PWD/evr/evrvideowindowcontrol.cpp +HEADERS += \ + $$PWD/evr/evrvideowindowcontrol.h \ + $$PWD/evr/evrcustompresenter.h \ + $$PWD/evr/evrd3dpresentengine.h \ + $$PWD/evr/evrhelpers.h \ + $$PWD/evr/evrdefs.h + +SOURCES += \ + $$PWD/evr/evrvideowindowcontrol.cpp \ + $$PWD/evr/evrcustompresenter.cpp \ + $$PWD/evr/evrd3dpresentengine.cpp \ + $$PWD/evr/evrhelpers.cpp \ + $$PWD/evr/evrdefs.cpp diff --git a/src/plugins/wmf/evrcustompresenter.cpp b/src/plugins/common/evr/evrcustompresenter.cpp index 967095b20..73d032aa6 100644 --- a/src/plugins/wmf/evrcustompresenter.cpp +++ b/src/plugins/common/evr/evrcustompresenter.cpp @@ -33,8 +33,8 @@ #include "evrcustompresenter.h" -#include "mfglobal.h" #include "evrd3dpresentengine.h" +#include "evrhelpers.h" #include <QtCore/qmutex.h> #include <QtCore/qvarlengtharray.h> @@ -44,10 +44,8 @@ #include <qcoreapplication.h> #include <qmath.h> #include <QtCore/qdebug.h> -#include <d3d9.h> -#include <dshow.h> - -QT_USE_NAMESPACE +#include <float.h> +#include <evcode.h> const static MFRatio g_DefaultFrameRate = { 30, 1 }; static const DWORD SCHEDULER_TIMEOUT = 5000; @@ -58,7 +56,6 @@ static const LONG ONE_MSEC = 1000; static HRESULT setDesiredSampleTime(IMFSample *sample, const LONGLONG& hnsSampleTime, const LONGLONG& hnsDuration); static HRESULT clearDesiredSampleTime(IMFSample *sample); static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect& nrcSource); -static DWORD getFourCCFromPixelFormat(QVideoFrame::PixelFormat pixelFormat); static QVideoFrame::PixelFormat pixelFormatFromMediaType(IMFMediaType *type); static inline LONG MFTimeToMsec(const LONGLONG& time) @@ -66,24 +63,62 @@ static inline LONG MFTimeToMsec(const LONGLONG& time) return (LONG)(time / (ONE_SECOND / ONE_MSEC)); } +bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter) +{ + if (!evr || !presenter) + return false; + + HRESULT result = E_FAIL; + + IMFVideoRenderer *renderer = NULL; + if (SUCCEEDED(evr->QueryInterface(IID_PPV_ARGS(&renderer)))) { + result = renderer->InitializeRenderer(NULL, presenter); + renderer->Release(); + } + + return result == S_OK; +} + +class PresentSampleEvent : public QEvent +{ +public: + PresentSampleEvent(IMFSample *sample) + : QEvent(QEvent::Type(EVRCustomPresenter::PresentSample)) + , m_sample(sample) + { + if (m_sample) + m_sample->AddRef(); + } -Scheduler::Scheduler() - : m_CB(NULL) + ~PresentSampleEvent() + { + if (m_sample) + m_sample->Release(); + } + + IMFSample *sample() const { return m_sample; } + +private: + IMFSample *m_sample; +}; + +Scheduler::Scheduler(EVRCustomPresenter *presenter) + : m_presenter(presenter) , m_clock(NULL) , m_threadID(0) , m_schedulerThread(0) , m_threadReadyEvent(0) , m_flushEvent(0) , m_playbackRate(1.0f) - , m_lastSampleTime(0) , m_perFrameInterval(0) , m_perFrame_1_4th(0) + , m_lastSampleTime(0) { } Scheduler::~Scheduler() { - qt_wmf_safeRelease(&m_clock); + qt_evr_safe_release(&m_clock); for (int i = 0; i < m_scheduledSamples.size(); ++i) m_scheduledSamples[i]->Release(); m_scheduledSamples.clear(); @@ -109,8 +144,14 @@ HRESULT Scheduler::startScheduler(IMFClock *clock) HRESULT hr = S_OK; DWORD dwID = 0; + HANDLE hObjects[2]; + DWORD dwWait = 0; - qt_wmf_copyComPointer(m_clock, clock); + if (m_clock) + m_clock->Release(); + m_clock = clock; + if (m_clock) + m_clock->AddRef(); // Set a high the timer resolution (ie, short timer period). timeBeginPeriod(1); @@ -136,10 +177,9 @@ HRESULT Scheduler::startScheduler(IMFClock *clock) goto done; } - HANDLE hObjects[] = { m_threadReadyEvent, m_schedulerThread }; - DWORD dwWait = 0; - // Wait for the thread to signal the "thread ready" event. + hObjects[0] = m_threadReadyEvent; + hObjects[1] = m_schedulerThread; dwWait = WaitForMultipleObjects(2, hObjects, FALSE, INFINITE); // Wait for EITHER of these handles. if (WAIT_OBJECT_0 != dwWait) { // The thread terminated early for some reason. This is an error condition. @@ -210,9 +250,6 @@ HRESULT Scheduler::flush() HRESULT Scheduler::scheduleSample(IMFSample *sample, bool presentNow) { - if (!m_CB) - return MF_E_NOT_INITIALIZED; - if (!m_schedulerThread) return MF_E_NOT_INITIALIZED; @@ -224,13 +261,7 @@ HRESULT Scheduler::scheduleSample(IMFSample *sample, bool presentNow) return E_FAIL; if (presentNow || !m_clock) { - // Present the sample immediately. - sample->AddRef(); - QMetaObject::invokeMethod(m_CB, - "presentSample", - Qt::QueuedConnection, - Q_ARG(void*, sample), - Q_ARG(qint64, 0)); + m_presenter->presentSample(sample); } else { // Queue the sample and ask the scheduler thread to wake up. m_mutex.lock(); @@ -262,7 +293,7 @@ HRESULT Scheduler::processSamplesInQueue(LONG *nextSleep) // means the scheduler should sleep for that amount of time. hr = processSample(sample, &wait); - qt_wmf_safeRelease(&sample); + qt_evr_safe_release(&sample); if (FAILED(hr) || wait > 0) break; @@ -326,12 +357,7 @@ HRESULT Scheduler::processSample(IMFSample *sample, LONG *pNextSleep) } if (presentNow) { - sample->AddRef(); - QMetaObject::invokeMethod(m_CB, - "presentSample", - Qt::QueuedConnection, - Q_ARG(void*, sample), - Q_ARG(qint64, hnsPresentationTime)); + m_presenter->presentSample(sample); } else { // The sample is not ready yet. Return it to the queue. m_mutex.lock(); @@ -401,7 +427,7 @@ DWORD Scheduler::schedulerThreadProcPrivate() hr = processSamplesInQueue(&wait); if (FAILED(hr)) exitThread = true; - processSamples = (wait != INFINITE); + processSamples = (wait != (LONG)INFINITE); } break; } @@ -520,24 +546,27 @@ HRESULT SamplePool::clear() } -EVRCustomPresenter::EVRCustomPresenter() +EVRCustomPresenter::EVRCustomPresenter(QAbstractVideoSurface *surface) : QObject() , m_sampleFreeCB(this, &EVRCustomPresenter::onSampleFree) , m_refCount(1) , m_renderState(RenderShutdown) , m_mutex(QMutex::Recursive) + , m_scheduler(this) , m_tokenCounter(0) , m_sampleNotify(false) , m_repaint(false) , m_prerolled(false) , m_endStreaming(false) , m_playbackRate(1.0f) - , m_D3DPresentEngine(0) + , m_presentEngine(new D3DPresentEngine) , m_clock(0) , m_mixer(0) , m_mediaEventSink(0) , m_mediaType(0) , m_surface(0) + , m_canRenderToSurface(false) + , m_sampleToPresent(0) { // Initial source rectangle = (0,0,1,1) m_sourceRect.top = 0; @@ -545,18 +574,21 @@ EVRCustomPresenter::EVRCustomPresenter() m_sourceRect.bottom = 1; m_sourceRect.right = 1; - m_D3DPresentEngine = new D3DPresentEngine; - m_scheduler.setCallback(m_D3DPresentEngine); + setSurface(surface); } EVRCustomPresenter::~EVRCustomPresenter() { - qt_wmf_safeRelease(&m_clock); - qt_wmf_safeRelease(&m_mixer); - qt_wmf_safeRelease(&m_mediaEventSink); - qt_wmf_safeRelease(&m_mediaType); + m_scheduler.flush(); + m_scheduler.stopScheduler(); + m_samplePool.clear(); + + qt_evr_safe_release(&m_clock); + qt_evr_safe_release(&m_mixer); + qt_evr_safe_release(&m_mediaEventSink); + qt_evr_safe_release(&m_mediaType); - m_D3DPresentEngine->deleteLater(); + delete m_presentEngine; } HRESULT EVRCustomPresenter::QueryInterface(REFIID riid, void ** ppvObject) @@ -606,11 +638,11 @@ HRESULT EVRCustomPresenter::GetService(REFGUID guidService, REFIID riid, LPVOID return E_POINTER; // The only service GUID that we support is MR_VIDEO_RENDER_SERVICE. - if (guidService != MR_VIDEO_RENDER_SERVICE) + if (guidService != mr_VIDEO_RENDER_SERVICE) return MF_E_UNSUPPORTED_SERVICE; // First try to get the service interface from the D3DPresentEngine object. - hr = m_D3DPresentEngine->getService(guidService, riid, ppvObject); + hr = m_presentEngine->getService(guidService, riid, ppvObject); if (FAILED(hr)) // Next, check if this object supports the interface. hr = QueryInterface(riid, ppvObject); @@ -623,7 +655,7 @@ HRESULT EVRCustomPresenter::GetDeviceID(IID* deviceID) if (!deviceID) return E_POINTER; - *deviceID = IID_IDirect3DDevice9; + *deviceID = iid_IDirect3DDevice9; return S_OK; } @@ -642,15 +674,15 @@ HRESULT EVRCustomPresenter::InitServicePointers(IMFTopologyServiceLookup *lookup if (isActive()) return MF_E_INVALIDREQUEST; - qt_wmf_safeRelease(&m_clock); - qt_wmf_safeRelease(&m_mixer); - qt_wmf_safeRelease(&m_mediaEventSink); + qt_evr_safe_release(&m_clock); + qt_evr_safe_release(&m_mixer); + qt_evr_safe_release(&m_mediaEventSink); // Ask for the clock. Optional, because the EVR might not have a clock. objectCount = 1; lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0, - MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_clock), + mr_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_clock), &objectCount ); @@ -658,7 +690,7 @@ HRESULT EVRCustomPresenter::InitServicePointers(IMFTopologyServiceLookup *lookup objectCount = 1; hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0, - MR_VIDEO_MIXER_SERVICE, IID_PPV_ARGS(&m_mixer), + mr_VIDEO_MIXER_SERVICE, IID_PPV_ARGS(&m_mixer), &objectCount ); @@ -674,7 +706,7 @@ HRESULT EVRCustomPresenter::InitServicePointers(IMFTopologyServiceLookup *lookup objectCount = 1; hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0, - MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_mediaEventSink), + mr_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_mediaEventSink), &objectCount ); @@ -700,13 +732,18 @@ HRESULT EVRCustomPresenter::ReleaseServicePointers() setMediaType(NULL); // Release all services that were acquired from InitServicePointers. - qt_wmf_safeRelease(&m_clock); - qt_wmf_safeRelease(&m_mixer); - qt_wmf_safeRelease(&m_mediaEventSink); + qt_evr_safe_release(&m_clock); + qt_evr_safe_release(&m_mixer); + qt_evr_safe_release(&m_mediaEventSink); return S_OK; } +bool EVRCustomPresenter::isValid() const +{ + return m_presentEngine->isValid() && m_canRenderToSurface; +} + HRESULT EVRCustomPresenter::ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param) { HRESULT hr = S_OK; @@ -819,11 +856,7 @@ HRESULT EVRCustomPresenter::OnClockStart(MFTIME, LONGLONG clockStartOffset) return hr; } - // Start the video surface in the main thread - if (thread() == QThread::currentThread()) - startSurface(); - else - QMetaObject::invokeMethod(this, "startSurface", Qt::QueuedConnection); + startSurface(); // Now try to get new output samples from the mixer. processOutputLoop(); @@ -872,11 +905,7 @@ HRESULT EVRCustomPresenter::OnClockStop(MFTIME) cancelFrameStep(); } - // Stop the video surface in the main thread - if (thread() == QThread::currentThread()) - stopSurface(); - else - QMetaObject::invokeMethod(this, "stopSurface", Qt::QueuedConnection); + stopSurface(); return S_OK; } @@ -1003,26 +1032,44 @@ void EVRCustomPresenter::supportedFormatsChanged() { QMutexLocker locker(&m_mutex); - m_supportedGLFormats.clear(); - if (!m_surface) - return; + m_canRenderToSurface = false; + m_presentEngine->setHint(D3DPresentEngine::RenderToTexture, false); - QList<QVideoFrame::PixelFormat> formats = m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle); - for (int i = 0; i < formats.size(); ++i) { - DWORD fourCC = getFourCCFromPixelFormat(formats.at(i)); - if (fourCC) - m_supportedGLFormats.append(fourCC); + // check if we can render to the surface (compatible formats) + if (m_surface) { + QList<QVideoFrame::PixelFormat> formats = m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle); + if (m_presentEngine->supportsTextureRendering() && formats.contains(QVideoFrame::Format_RGB32)) { + m_presentEngine->setHint(D3DPresentEngine::RenderToTexture, true); + m_canRenderToSurface = true; + } else { + formats = m_surface->supportedPixelFormats(QAbstractVideoBuffer::NoHandle); + Q_FOREACH (QVideoFrame::PixelFormat format, formats) { + if (SUCCEEDED(m_presentEngine->checkFormat(qt_evr_D3DFormatFromPixelFormat(format)))) { + m_canRenderToSurface = true; + break; + } + } + } } + + // TODO: if media type already set, renegotiate? } void EVRCustomPresenter::setSurface(QAbstractVideoSurface *surface) { m_mutex.lock(); + if (m_surface) { + disconnect(m_surface, &QAbstractVideoSurface::supportedFormatsChanged, + this, &EVRCustomPresenter::supportedFormatsChanged); + } + m_surface = surface; - if (m_D3DPresentEngine) - m_D3DPresentEngine->setSurface(surface); + if (m_surface) { + connect(m_surface, &QAbstractVideoSurface::supportedFormatsChanged, + this, &EVRCustomPresenter::supportedFormatsChanged); + } m_mutex.unlock(); @@ -1049,8 +1096,8 @@ HRESULT EVRCustomPresenter::renegotiateMediaType() // Loop through all of the mixer's proposed output types. DWORD typeIndex = 0; while (!foundMediaType && (hr != MF_E_NO_MORE_TYPES)) { - qt_wmf_safeRelease(&mixerType); - qt_wmf_safeRelease(&optimalType); + qt_evr_safe_release(&mixerType); + qt_evr_safe_release(&optimalType); // Step 1. Get the next media type supported by mixer. hr = m_mixer->GetOutputAvailableType(0, typeIndex++, &mixerType); @@ -1089,8 +1136,8 @@ HRESULT EVRCustomPresenter::renegotiateMediaType() foundMediaType = true; } - qt_wmf_safeRelease(&mixerType); - qt_wmf_safeRelease(&optimalType); + qt_evr_safe_release(&mixerType); + qt_evr_safe_release(&optimalType); return hr; } @@ -1112,11 +1159,7 @@ HRESULT EVRCustomPresenter::flush() if (m_renderState == RenderStopped) { // Repaint with black. - QMetaObject::invokeMethod(m_D3DPresentEngine, - "presentSample", - Qt::QueuedConnection, - Q_ARG(void*, 0), - Q_ARG(qint64, 0)); + presentSample(NULL); } return S_OK; @@ -1218,7 +1261,7 @@ HRESULT EVRCustomPresenter::startFrameStep() if (FAILED(hr)) goto done; - qt_wmf_safeRelease(&sample); + qt_evr_safe_release(&sample); // We break from this loop when: // (a) the frame-step queue is empty, or @@ -1234,12 +1277,12 @@ HRESULT EVRCustomPresenter::startFrameStep() if (FAILED(hr)) goto done; - qt_wmf_safeRelease(&sample); + qt_evr_safe_release(&sample); } } done: - qt_wmf_safeRelease(&sample); + qt_evr_safe_release(&sample); return hr; } @@ -1251,7 +1294,7 @@ HRESULT EVRCustomPresenter::completeFrameStep(IMFSample *sample) // Update our state. m_frameStep.state = FrameStepComplete; - m_frameStep.sampleNoRef = NULL; + m_frameStep.sampleNoRef = 0; // Notify the EVR that the frame-step is complete. notifyEvent(EC_STEP_COMPLETE, FALSE, 0); // FALSE = completed (not cancelled) @@ -1279,7 +1322,7 @@ HRESULT EVRCustomPresenter::cancelFrameStep() m_frameStep.state = FrameStepNone; m_frameStep.steps = 0; - m_frameStep.sampleNoRef = NULL; + m_frameStep.sampleNoRef = 0; // Don't clear the frame-step queue yet, because we might frame step again. if (oldState > FrameStepNone && oldState < FrameStepComplete) { @@ -1302,6 +1345,10 @@ HRESULT EVRCustomPresenter::createOptimalVideoType(IMFMediaType *proposedType, I IMFMediaType *mtOptimal = NULL; + UINT64 size; + int width; + int height; + // Clone the proposed type. hr = MFCreateMediaType(&mtOptimal); @@ -1315,21 +1362,22 @@ HRESULT EVRCustomPresenter::createOptimalVideoType(IMFMediaType *proposedType, I // Modify the new type. // Set the pixel aspect ratio (PAR) to 1:1 (see assumption #1, above) - hr = MFSetAttributeRatio(mtOptimal, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); + // The ratio is packed in a single UINT64. A helper function is normally available for + // that (MFSetAttributeRatio) but it's not correctly defined in MinGW 4.9.1. + hr = mtOptimal->SetUINT64(MF_MT_PIXEL_ASPECT_RATIO, (((UINT64) 1) << 32) | ((UINT64) 1)); if (FAILED(hr)) goto done; - UINT64 size; hr = proposedType->GetUINT64(MF_MT_FRAME_SIZE, &size); - int width = int(HI32(size)); - int height = int(LO32(size)); + width = int(HI32(size)); + height = int(LO32(size)); rcOutput.left = 0; rcOutput.top = 0; rcOutput.right = width; rcOutput.bottom = height; // Set the geometric aperture, and disable pan/scan. - displayArea = qt_wmf_makeMFArea(0, 0, rcOutput.right, rcOutput.bottom); + displayArea = qt_evr_makeMFArea(0, 0, rcOutput.right, rcOutput.bottom); hr = mtOptimal->SetUINT32(MF_MT_PAN_SCAN_ENABLED, FALSE); if (FAILED(hr)) @@ -1355,7 +1403,7 @@ HRESULT EVRCustomPresenter::createOptimalVideoType(IMFMediaType *proposedType, I (*optimalType)->AddRef(); done: - qt_wmf_safeRelease(&mtOptimal); + qt_evr_safe_release(&mtOptimal); return hr; } @@ -1366,9 +1414,8 @@ HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType) // Clearing the media type is allowed in any state (including shutdown). if (!mediaType) { - qt_wmf_safeRelease(&m_mediaType); + qt_evr_safe_release(&m_mediaType); releaseResources(); - m_D3DPresentEngine->setSurfaceFormat(QVideoSurfaceFormat()); return S_OK; } @@ -1377,8 +1424,6 @@ HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType) IMFSample *sample = NULL; - QVideoSurfaceFormat surfaceFormat; - // Cannot set the media type after shutdown. HRESULT hr = checkShutdown(); if (FAILED(hr)) @@ -1386,17 +1431,17 @@ HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType) // Check if the new type is actually different. // Note: This function safely handles NULL input parameters. - if (qt_wmf_areMediaTypesEqual(m_mediaType, mediaType)) + if (qt_evr_areMediaTypesEqual(m_mediaType, mediaType)) goto done; // Nothing more to do. // We're really changing the type. First get rid of the old type. - qt_wmf_safeRelease(&m_mediaType); + qt_evr_safe_release(&m_mediaType); releaseResources(); // Initialize the presenter engine with the new media type. // The presenter engine allocates the samples. - hr = m_D3DPresentEngine->createVideoSamples(mediaType, sampleQueue); + hr = m_presentEngine->createVideoSamples(mediaType, sampleQueue); if (FAILED(hr)) goto done; @@ -1416,7 +1461,7 @@ HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType) goto done; // Set the frame rate on the scheduler. - if (SUCCEEDED(qt_wmf_getFrameRate(mediaType, &fps)) && (fps.Numerator != 0) && (fps.Denominator != 0)) { + if (SUCCEEDED(qt_evr_getFrameRate(mediaType, &fps)) && (fps.Numerator != 0) && (fps.Denominator != 0)) { m_scheduler.setFrameRate(fps); } else { // NOTE: The mixer's proposed type might not have a frame rate, in which case @@ -1429,16 +1474,6 @@ HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType) m_mediaType = mediaType; m_mediaType->AddRef(); - // Create the surface format - UINT64 size; - hr = m_mediaType->GetUINT64(MF_MT_FRAME_SIZE, &size); - int width = int(HI32(size)); - int height = int(LO32(size)); - surfaceFormat = QVideoSurfaceFormat(QSize(width, height), - pixelFormatFromMediaType(m_mediaType), - QAbstractVideoBuffer::GLTextureHandle); - m_D3DPresentEngine->setSurfaceFormat(surfaceFormat); - done: if (FAILED(hr)) releaseResources(); @@ -1454,14 +1489,21 @@ HRESULT EVRCustomPresenter::isMediaTypeSupported(IMFMediaType *proposed) UINT32 width = 0, height = 0; // Validate the format. - HRESULT hr = qt_wmf_getFourCC(proposed, (DWORD*)&d3dFormat); + HRESULT hr = qt_evr_getFourCC(proposed, (DWORD*)&d3dFormat); if (FAILED(hr)) return hr; - // Only accept pixel formats supported by the video surface - if (!m_supportedGLFormats.contains((DWORD)d3dFormat)) + QVideoFrame::PixelFormat pixelFormat = pixelFormatFromMediaType(proposed); + if (pixelFormat == QVideoFrame::Format_Invalid) return MF_E_INVALIDMEDIATYPE; + // When not rendering to texture, only accept pixel formats supported by the video surface + if (!m_presentEngine->isTextureRenderingEnabled() + && m_surface + && !m_surface->supportedPixelFormats().contains(pixelFormat)) { + return MF_E_INVALIDMEDIATYPE; + } + // Reject compressed media types. hr = proposed->IsCompressedFormat(&compressed); if (FAILED(hr)) @@ -1470,9 +1512,8 @@ HRESULT EVRCustomPresenter::isMediaTypeSupported(IMFMediaType *proposed) if (compressed) return MF_E_INVALIDMEDIATYPE; - // The D3DPresentEngine checks whether the format can be used as - // the back-buffer format for the swap chains. - hr = m_D3DPresentEngine->checkFormat(d3dFormat); + // The D3DPresentEngine checks whether surfaces can be created using this format + hr = m_presentEngine->checkFormat(d3dFormat); if (FAILED(hr)) return hr; @@ -1493,13 +1534,13 @@ HRESULT EVRCustomPresenter::isMediaTypeSupported(IMFMediaType *proposed) // we ignore it. We just want to reject invalid apertures. if (SUCCEEDED(proposed->GetBlob(MF_MT_PAN_SCAN_APERTURE, (UINT8*)&videoCropArea, sizeof(videoCropArea), NULL))) - hr = qt_wmf_validateVideoArea(videoCropArea, width, height); + hr = qt_evr_validateVideoArea(videoCropArea, width, height); if (SUCCEEDED(proposed->GetBlob(MF_MT_GEOMETRIC_APERTURE, (UINT8*)&videoCropArea, sizeof(videoCropArea), NULL))) - hr = qt_wmf_validateVideoArea(videoCropArea, width, height); + hr = qt_evr_validateVideoArea(videoCropArea, width, height); if (SUCCEEDED(proposed->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, (UINT8*)&videoCropArea, sizeof(videoCropArea), NULL))) - hr = qt_wmf_validateVideoArea(videoCropArea, width, height); + hr = qt_evr_validateVideoArea(videoCropArea, width, height); return hr; } @@ -1640,10 +1681,10 @@ HRESULT EVRCustomPresenter::processOutput() } done: - qt_wmf_safeRelease(&sample); + qt_evr_safe_release(&sample); // Important: Release any events returned from the ProcessOutput method. - qt_wmf_safeRelease(&dataBuffer.pEvents); + qt_evr_safe_release(&dataBuffer.pEvents); return hr; } @@ -1673,7 +1714,7 @@ HRESULT EVRCustomPresenter::deliverFrameStepSample(IMFSample *sample) IUnknown *unk = NULL; // For rate 0, discard any sample that ends earlier than the clock time. - if (isScrubbing() && m_clock && qt_wmf_isSampleTimePassed(m_clock, sample)) { + if (isScrubbing() && m_clock && qt_evr_isSampleTimePassed(m_clock, sample)) { // Discard this sample. } else if (m_frameStep.state >= FrameStepScheduled) { // A frame was already submitted. Put this sample on the frame-step queue, @@ -1719,7 +1760,7 @@ HRESULT EVRCustomPresenter::deliverFrameStepSample(IMFSample *sample) } } done: - qt_wmf_safeRelease(&unk); + qt_evr_safe_release(&unk); return hr; } @@ -1732,7 +1773,7 @@ HRESULT EVRCustomPresenter::trackSample(IMFSample *sample) if (SUCCEEDED(hr)) hr = tracked->SetAllocator(&m_sampleFreeCB, NULL); - qt_wmf_safeRelease(&tracked); + qt_evr_safe_release(&tracked); return hr; } @@ -1753,7 +1794,7 @@ void EVRCustomPresenter::releaseResources() m_samplePool.clear(); - m_D3DPresentEngine->releaseResources(); + m_presentEngine->releaseResources(); } HRESULT EVRCustomPresenter::onSampleFree(IMFAsyncResult *result) @@ -1761,6 +1802,7 @@ HRESULT EVRCustomPresenter::onSampleFree(IMFAsyncResult *result) IUnknown *object = NULL; IMFSample *sample = NULL; IUnknown *unk = NULL; + UINT32 token; // Get the sample from the async result object. HRESULT hr = result->GetObject(&object); @@ -1795,7 +1837,7 @@ HRESULT EVRCustomPresenter::onSampleFree(IMFAsyncResult *result) m_mutex.lock(); - UINT32 token = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1); + token = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1); if (token == m_tokenCounter) { // Return the sample to the sample pool. @@ -1811,24 +1853,12 @@ HRESULT EVRCustomPresenter::onSampleFree(IMFAsyncResult *result) done: if (FAILED(hr)) notifyEvent(EC_ERRORABORT, hr, 0); - qt_wmf_safeRelease(&object); - qt_wmf_safeRelease(&sample); - qt_wmf_safeRelease(&unk); + qt_evr_safe_release(&object); + qt_evr_safe_release(&sample); + qt_evr_safe_release(&unk); return hr; } -void EVRCustomPresenter::startSurface() -{ - if (m_D3DPresentEngine) - m_D3DPresentEngine->start(); -} - -void EVRCustomPresenter::stopSurface() -{ - if (m_D3DPresentEngine) - m_D3DPresentEngine->stop(); -} - float EVRCustomPresenter::getMaxRate(bool thin) { // Non-thinned: @@ -1843,8 +1873,8 @@ float EVRCustomPresenter::getMaxRate(bool thin) UINT monitorRateHz = 0; if (!thin && m_mediaType) { - qt_wmf_getFrameRate(m_mediaType, &fps); - monitorRateHz = m_D3DPresentEngine->refreshRate(); + qt_evr_getFrameRate(m_mediaType, &fps); + monitorRateHz = m_presentEngine->refreshRate(); if (fps.Denominator && fps.Numerator && monitorRateHz) { // Max Rate = Refresh Rate / Frame Rate @@ -1855,6 +1885,74 @@ float EVRCustomPresenter::getMaxRate(bool thin) return maxRate; } +bool EVRCustomPresenter::event(QEvent *e) +{ + if (e->type() == StartSurface) { + startSurface(); + return true; + } else if (e->type() == StopSurface) { + stopSurface(); + return true; + } else if (e->type() == PresentSample) { + PresentSampleEvent *ev = static_cast<PresentSampleEvent *>(e); + presentSample(ev->sample()); + return true; + } + + return QObject::event(e); +} + +void EVRCustomPresenter::startSurface() +{ + if (thread() != QThread::currentThread()) { + QCoreApplication::postEvent(this, new QEvent(QEvent::Type(StartSurface))); + return; + } + + if (!m_surface || m_surface->isActive()) + return; + + QVideoSurfaceFormat format = m_presentEngine->videoSurfaceFormat(); + if (!format.isValid()) + return; + + m_surface->start(format); +} + +void EVRCustomPresenter::stopSurface() +{ + if (thread() != QThread::currentThread()) { + QCoreApplication::postEvent(this, new QEvent(QEvent::Type(StopSurface))); + return; + } + + if (!m_surface || !m_surface->isActive()) + return; + + m_surface->stop(); +} + +void EVRCustomPresenter::presentSample(IMFSample *sample) +{ + if (thread() != QThread::currentThread()) { + QCoreApplication::postEvent(this, new PresentSampleEvent(sample)); + return; + } + + if (!m_surface || !m_surface->isActive() || !m_presentEngine->videoSurfaceFormat().isValid()) + return; + + QVideoFrame frame = m_presentEngine->makeVideoFrame(sample); + + if (m_surface->isActive() && m_surface->surfaceFormat() != m_presentEngine->videoSurfaceFormat()) { + m_surface->stop(); + if (!m_surface->start(m_presentEngine->videoSurfaceFormat())) + return; + } + + m_surface->present(frame); +} + HRESULT setDesiredSampleTime(IMFSample *sample, const LONGLONG &sampleTime, const LONGLONG &duration) { if (!sample) @@ -1867,7 +1965,7 @@ HRESULT setDesiredSampleTime(IMFSample *sample, const LONGLONG &sampleTime, cons if (SUCCEEDED(hr)) desired->SetDesiredSampleTimeAndDuration(sampleTime, duration); - qt_wmf_safeRelease(&desired); + qt_evr_safe_release(&desired); return hr; } @@ -1889,8 +1987,6 @@ HRESULT clearDesiredSampleTime(IMFSample *sample) UINT32 counter = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1); - sample->GetUnknown(MFSamplePresenter_SampleSwapChain, IID_IUnknown, (void**)&unkSwapChain); - hr = sample->QueryInterface(IID_PPV_ARGS(&desired)); if (SUCCEEDED(hr)) { desired->Clear(); @@ -1898,17 +1994,11 @@ HRESULT clearDesiredSampleTime(IMFSample *sample) hr = sample->SetUINT32(MFSamplePresenter_SampleCounter, counter); if (FAILED(hr)) goto done; - - if (unkSwapChain) { - hr = sample->SetUnknown(MFSamplePresenter_SampleSwapChain, unkSwapChain); - if (FAILED(hr)) - goto done; - } } done: - qt_wmf_safeRelease(&unkSwapChain); - qt_wmf_safeRelease(&desired); + qt_evr_safe_release(&unkSwapChain); + qt_evr_safe_release(&desired); return hr; } @@ -1921,54 +2011,12 @@ HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &sou HRESULT hr = mixer->GetAttributes(&attributes); if (SUCCEEDED(hr)) { - hr = attributes->SetBlob(VIDEO_ZOOM_RECT, (const UINT8*)&sourceRect, sizeof(sourceRect)); + hr = attributes->SetBlob(video_ZOOM_RECT, (const UINT8*)&sourceRect, sizeof(sourceRect)); attributes->Release(); } return hr; } -DWORD getFourCCFromPixelFormat(QVideoFrame::PixelFormat pixelFormat) -{ - DWORD fourCC = 0; - switch (pixelFormat) { - case QVideoFrame::Format_ARGB32: - case QVideoFrame::Format_ARGB32_Premultiplied: - fourCC = MFVideoFormat_ARGB32.Data1; - break; - case QVideoFrame::Format_RGB32: - fourCC = MFVideoFormat_RGB32.Data1; - break; - case QVideoFrame::Format_RGB24: - fourCC = MFVideoFormat_RGB24.Data1; - break; - case QVideoFrame::Format_RGB565: - fourCC = MFVideoFormat_RGB565.Data1; - break; - case QVideoFrame::Format_RGB555: - fourCC = MFVideoFormat_RGB555.Data1; - break; - case QVideoFrame::Format_AYUV444: - case QVideoFrame::Format_AYUV444_Premultiplied: - fourCC = MFVideoFormat_AYUV.Data1; - break; - case QVideoFrame::Format_YUV420P: - fourCC = MFVideoFormat_I420.Data1; - break; - case QVideoFrame::Format_UYVY: - fourCC = MFVideoFormat_UYVY.Data1; - break; - case QVideoFrame::Format_YV12: - fourCC = MFVideoFormat_YV12.Data1; - break; - case QVideoFrame::Format_NV12: - fourCC = MFVideoFormat_NV12.Data1; - break; - default: - break; - } - return fourCC; -} - static QVideoFrame::PixelFormat pixelFormatFromMediaType(IMFMediaType *type) { GUID majorType; @@ -1977,69 +2025,30 @@ static QVideoFrame::PixelFormat pixelFormatFromMediaType(IMFMediaType *type) if (majorType != MFMediaType_Video) return QVideoFrame::Format_Invalid; - GUID subType; - if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &subType))) + GUID subtype; + if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &subtype))) return QVideoFrame::Format_Invalid; - if (subType == MFVideoFormat_RGB32) + if (subtype == MFVideoFormat_RGB32) return QVideoFrame::Format_RGB32; + else if (subtype == MFVideoFormat_ARGB32) + return QVideoFrame::Format_ARGB32; + else if (subtype == MFVideoFormat_RGB24) + return QVideoFrame::Format_RGB24; + else if (subtype == MFVideoFormat_RGB565) + return QVideoFrame::Format_RGB565; + else if (subtype == MFVideoFormat_RGB555) + return QVideoFrame::Format_RGB555; + else if (subtype == MFVideoFormat_AYUV) + return QVideoFrame::Format_AYUV444; + else if (subtype == MFVideoFormat_I420) + return QVideoFrame::Format_YUV420P; + else if (subtype == MFVideoFormat_UYVY) + return QVideoFrame::Format_UYVY; + else if (subtype == MFVideoFormat_YV12) + return QVideoFrame::Format_YV12; + else if (subtype == MFVideoFormat_NV12) + return QVideoFrame::Format_NV12; return QVideoFrame::Format_Invalid; } - - -EVRCustomPresenterActivate::EVRCustomPresenterActivate() - : MFAbstractActivate() - , m_presenter(0) - , m_surface(0) -{ } - -HRESULT EVRCustomPresenterActivate::ActivateObject(REFIID riid, void **ppv) -{ - if (!ppv) - return E_INVALIDARG; - QMutexLocker locker(&m_mutex); - if (!m_presenter) { - m_presenter = new EVRCustomPresenter; - if (m_surface) - m_presenter->setSurface(m_surface); - } - return m_presenter->QueryInterface(riid, ppv); -} - -HRESULT EVRCustomPresenterActivate::ShutdownObject() -{ - // The presenter does not implement IMFShutdown so - // this function is the same as DetachObject() - return DetachObject(); -} - -HRESULT EVRCustomPresenterActivate::DetachObject() -{ - QMutexLocker locker(&m_mutex); - if (m_presenter) { - m_presenter->Release(); - m_presenter = 0; - } - return S_OK; -} - -void EVRCustomPresenterActivate::setSurface(QAbstractVideoSurface *surface) -{ - QMutexLocker locker(&m_mutex); - if (m_surface == surface) - return; - - m_surface = surface; - - if (m_presenter) - m_presenter->setSurface(surface); -} - -void EVRCustomPresenterActivate::supportedFormatsChanged() -{ - QMutexLocker locker(&m_mutex); - - if (m_presenter) - m_presenter->supportedFormatsChanged(); -} diff --git a/src/plugins/wmf/evrcustompresenter.h b/src/plugins/common/evr/evrcustompresenter.h index 1d24feaa5..e25780140 100644 --- a/src/plugins/wmf/evrcustompresenter.h +++ b/src/plugins/common/evr/evrcustompresenter.h @@ -37,13 +37,72 @@ #include <QObject> #include <qmutex.h> #include <qqueue.h> -#include <evr.h> -#include "mfactivate.h" +#include <qevent.h> +#include <qvideosurfaceformat.h> + +#include "evrdefs.h" QT_BEGIN_NAMESPACE +class QAbstractVideoSurface; +QT_END_NAMESPACE + +QT_USE_NAMESPACE +class EVRCustomPresenter; class D3DPresentEngine; -class QAbstractVideoSurface; + +template<class T> +class AsyncCallback : public IMFAsyncCallback +{ +public: + typedef HRESULT (T::*InvokeFn)(IMFAsyncResult *asyncResult); + + AsyncCallback(T *parent, InvokeFn fn) : m_parent(parent), m_invokeFn(fn) + { + } + + // IUnknown + STDMETHODIMP QueryInterface(REFIID iid, void** ppv) + { + if (!ppv) + return E_POINTER; + + if (iid == __uuidof(IUnknown)) { + *ppv = static_cast<IUnknown*>(static_cast<IMFAsyncCallback*>(this)); + } else if (iid == __uuidof(IMFAsyncCallback)) { + *ppv = static_cast<IMFAsyncCallback*>(this); + } else { + *ppv = NULL; + return E_NOINTERFACE; + } + AddRef(); + return S_OK; + } + + STDMETHODIMP_(ULONG) AddRef() { + // Delegate to parent class. + return m_parent->AddRef(); + } + STDMETHODIMP_(ULONG) Release() { + // Delegate to parent class. + return m_parent->Release(); + } + + // IMFAsyncCallback methods + STDMETHODIMP GetParameters(DWORD*, DWORD*) + { + // Implementation of this method is optional. + return E_NOTIMPL; + } + + STDMETHODIMP Invoke(IMFAsyncResult* asyncResult) + { + return (m_parent->*m_invokeFn)(asyncResult); + } + + T *m_parent; + InvokeFn m_invokeFn; +}; class Scheduler { @@ -55,13 +114,9 @@ public: Flush = WM_USER + 2 }; - Scheduler(); + Scheduler(EVRCustomPresenter *presenter); ~Scheduler(); - void setCallback(QObject *cb) { - m_CB = cb; - } - void setFrameRate(const MFRatio &fps); void setClockRate(float rate) { m_playbackRate = rate; } @@ -82,10 +137,11 @@ public: private: DWORD schedulerThreadProcPrivate(); + EVRCustomPresenter *m_presenter; + QQueue<IMFSample*> m_scheduledSamples; // Samples waiting to be presented. IMFClock *m_clock; // Presentation clock. Can be NULL. - QObject *m_CB; // Weak reference; do not delete. DWORD m_threadID; HANDLE m_schedulerThread; @@ -128,8 +184,6 @@ class EVRCustomPresenter , public IMFGetService , public IMFTopologyServiceLookupClient { - Q_OBJECT - public: // Defines the state of the presenter. enum RenderState @@ -150,9 +204,18 @@ public: FrameStepComplete // Sample was rendered. }; - EVRCustomPresenter(); + enum PresenterEvents + { + StartSurface = QEvent::User, + StopSurface = QEvent::User + 1, + PresentSample = QEvent::User + 2 + }; + + EVRCustomPresenter(QAbstractVideoSurface *surface = 0); ~EVRCustomPresenter(); + bool isValid() const; + // IUnknown methods STDMETHODIMP QueryInterface(REFIID riid, void ** ppv); STDMETHODIMP_(ULONG) AddRef(); @@ -187,9 +250,11 @@ public: void supportedFormatsChanged(); void setSurface(QAbstractVideoSurface *surface); -private Q_SLOTS: void startSurface(); void stopSurface(); + void presentSample(IMFSample *sample); + + bool event(QEvent *); private: HRESULT checkShutdown() const @@ -250,7 +315,7 @@ private: // Callback when a video sample is released. HRESULT onSampleFree(IMFAsyncResult *result); - AsyncCallback<EVRCustomPresenter> m_sampleFreeCB; + AsyncCallback<EVRCustomPresenter> m_sampleFreeCB; // Holds information related to frame-stepping. struct FrameStep @@ -258,7 +323,7 @@ private: FrameStep() : state(FrameStepNone) , steps(0) - , sampleNoRef(NULL) + , sampleNoRef(0) { } @@ -289,7 +354,7 @@ private: MFVideoNormalizedRect m_sourceRect; float m_playbackRate; - D3DPresentEngine *m_D3DPresentEngine; // Rendering engine. (Never null if the constructor succeeds.) + D3DPresentEngine *m_presentEngine; // Rendering engine. (Never null if the constructor succeeds.) IMFClock *m_clock; // The EVR's clock. IMFTransform *m_mixer; // The EVR's mixer. @@ -297,29 +362,11 @@ private: IMFMediaType *m_mediaType; // Output media type QAbstractVideoSurface *m_surface; - QList<DWORD> m_supportedGLFormats; -}; + bool m_canRenderToSurface; -class EVRCustomPresenterActivate : public MFAbstractActivate -{ -public: - EVRCustomPresenterActivate(); - ~EVRCustomPresenterActivate() - { } - - STDMETHODIMP ActivateObject(REFIID riid, void **ppv); - STDMETHODIMP ShutdownObject(); - STDMETHODIMP DetachObject(); - - void setSurface(QAbstractVideoSurface *surface); - void supportedFormatsChanged(); - -private: - EVRCustomPresenter *m_presenter; - QAbstractVideoSurface *m_surface; - QMutex m_mutex; + IMFSample *m_sampleToPresent; }; -QT_END_NAMESPACE +bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter); #endif // EVRCUSTOMPRESENTER_H diff --git a/src/plugins/wmf/evrd3dpresentengine.cpp b/src/plugins/common/evr/evrd3dpresentengine.cpp index d66918a99..77cd7e0c8 100644 --- a/src/plugins/wmf/evrd3dpresentengine.cpp +++ b/src/plugins/common/evr/evrd3dpresentengine.cpp @@ -33,52 +33,25 @@ #include "evrd3dpresentengine.h" -#include "mfglobal.h" +#include "evrhelpers.h" -#include <qtgui/qguiapplication.h> -#include <qpa/qplatformnativeinterface.h> -#include <qtgui/qopenglcontext.h> #include <qabstractvideobuffer.h> #include <QAbstractVideoSurface> #include <qvideoframe.h> #include <QDebug> -#include <qopenglcontext.h> -#include <qopenglfunctions.h> -#include <qwindow.h> - -#include <EGL/egl.h> -#include <EGL/eglext.h> -#include <d3d9.h> -#include <dxva2api.h> -#include <WinUser.h> -#include <evr.h> - -QT_USE_NAMESPACE - -static const DWORD PRESENTER_BUFFER_COUNT = 3; - -class TextureVideoBuffer : public QAbstractVideoBuffer -{ -public: - TextureVideoBuffer(GLuint textureId) - : QAbstractVideoBuffer(GLTextureHandle) - , m_textureId(textureId) - {} - - ~TextureVideoBuffer() {} - - MapMode mapMode() const { return NotMapped; } - uchar *map(MapMode, int*, int*) { return 0; } - void unmap() {} +#include <qthread.h> +#include <private/qmediaopenglhelper_p.h> + +#ifdef MAYBE_ANGLE +# include <qtgui/qguiapplication.h> +# include <qpa/qplatformnativeinterface.h> +# include <qopenglfunctions.h> +# include <EGL/eglext.h> +#endif - QVariant handle() const - { - return QVariant::fromValue<unsigned int>(m_textureId); - } +static const int PRESENTER_BUFFER_COUNT = 3; -private: - GLuint m_textureId; -}; +#ifdef MAYBE_ANGLE EGLWrapper::EGLWrapper() { @@ -142,22 +115,160 @@ EGLBoolean EGLWrapper::releaseTexImage(EGLDisplay dpy, EGLSurface surface, EGLin return m_eglReleaseTexImage(dpy, surface, buffer); } + +class OpenGLResources : public QObject +{ +public: + OpenGLResources() + : egl(new EGLWrapper) + , eglDisplay(0) + , eglSurface(0) + , glTexture(0) + {} + + void release() + { + if (thread() == QThread::currentThread()) + delete this; + else + deleteLater(); + } + + EGLWrapper *egl; + EGLDisplay *eglDisplay; + EGLSurface eglSurface; + unsigned int glTexture; + +private: + ~OpenGLResources() + { + Q_ASSERT(QOpenGLContext::currentContext() != NULL); + + if (eglSurface && egl) { + egl->releaseTexImage(eglDisplay, eglSurface, EGL_BACK_BUFFER); + egl->destroySurface(eglDisplay, eglSurface); + } + if (glTexture) + QOpenGLContext::currentContext()->functions()->glDeleteTextures(1, &glTexture); + + delete egl; + } +}; + +#endif // MAYBE_ANGLE + + +class IMFSampleVideoBuffer: public QAbstractVideoBuffer +{ +public: + IMFSampleVideoBuffer(D3DPresentEngine *engine, IMFSample *sample, QAbstractVideoBuffer::HandleType handleType) + : QAbstractVideoBuffer(handleType) + , m_engine(engine) + , m_sample(sample) + , m_surface(0) + , m_mapMode(NotMapped) + , m_textureUpdated(false) + { + if (m_sample) { + m_sample->AddRef(); + + IMFMediaBuffer *buffer; + if (SUCCEEDED(m_sample->GetBufferByIndex(0, &buffer))) { + MFGetService(buffer, + mr_BUFFER_SERVICE, + iid_IDirect3DSurface9, + reinterpret_cast<void **>(&m_surface)); + buffer->Release(); + } + } + } + + ~IMFSampleVideoBuffer() + { + if (m_surface) { + if (m_mapMode != NotMapped) + m_surface->UnlockRect(); + m_surface->Release(); + } + if (m_sample) + m_sample->Release(); + } + + QVariant handle() const; + + MapMode mapMode() const { return m_mapMode; } + uchar *map(MapMode, int*, int*); + void unmap(); + +private: + mutable D3DPresentEngine *m_engine; + IMFSample *m_sample; + IDirect3DSurface9 *m_surface; + MapMode m_mapMode; + mutable bool m_textureUpdated; +}; + +uchar *IMFSampleVideoBuffer::map(MapMode mode, int *numBytes, int *bytesPerLine) +{ + if (!m_surface || m_mapMode != NotMapped) + return 0; + + D3DSURFACE_DESC desc; + if (FAILED(m_surface->GetDesc(&desc))) + return 0; + + D3DLOCKED_RECT rect; + if (FAILED(m_surface->LockRect(&rect, NULL, mode == ReadOnly ? D3DLOCK_READONLY : 0))) + return 0; + + m_mapMode = mode; + + if (numBytes) + *numBytes = (int)(rect.Pitch * desc.Height); + + if (bytesPerLine) + *bytesPerLine = (int)rect.Pitch; + + return reinterpret_cast<uchar *>(rect.pBits); +} + +void IMFSampleVideoBuffer::unmap() +{ + if (m_mapMode == NotMapped) + return; + + m_mapMode = NotMapped; + m_surface->UnlockRect(); +} + +QVariant IMFSampleVideoBuffer::handle() const +{ + QVariant handle; + +#ifdef MAYBE_ANGLE + if (handleType() != GLTextureHandle) + return handle; + + if (m_textureUpdated || m_engine->updateTexture(m_surface)) { + m_textureUpdated = true; + handle = QVariant::fromValue<unsigned int>(m_engine->m_glResources->glTexture); + } +#endif + + return handle; +} + + D3DPresentEngine::D3DPresentEngine() - : QObject() - , m_mutex(QMutex::Recursive) - , m_deviceResetToken(0) + : m_deviceResetToken(0) , m_D3D9(0) , m_device(0) , m_deviceManager(0) - , m_surface(0) - , m_glContext(0) - , m_offscreenSurface(0) - , m_eglDisplay(0) - , m_eglConfig(0) - , m_eglSurface(0) - , m_glTexture(0) + , m_useTextureRendering(false) +#ifdef MAYBE_ANGLE + , m_glResources(0) , m_texture(0) - , m_egl(0) +#endif { ZeroMemory(&m_displayMode, sizeof(m_displayMode)); @@ -174,47 +285,110 @@ D3DPresentEngine::D3DPresentEngine() D3DPresentEngine::~D3DPresentEngine() { - qt_wmf_safeRelease(&m_texture); - qt_wmf_safeRelease(&m_device); - qt_wmf_safeRelease(&m_deviceManager); - qt_wmf_safeRelease(&m_D3D9); - - if (m_eglSurface) { - m_egl->releaseTexImage(m_eglDisplay, m_eglSurface, EGL_BACK_BUFFER); - m_egl->destroySurface(m_eglDisplay, m_eglSurface); - m_eglSurface = NULL; - } - if (m_glTexture) { - if (QOpenGLContext *current = QOpenGLContext::currentContext()) - current->functions()->glDeleteTextures(1, &m_glTexture); - else - qWarning() << "D3DPresentEngine: Cannot obtain GL context, unable to delete textures"; - } + releaseResources(); - delete m_glContext; - delete m_offscreenSurface; - delete m_egl; + qt_evr_safe_release(&m_device); + qt_evr_safe_release(&m_deviceManager); + qt_evr_safe_release(&m_D3D9); } -void D3DPresentEngine::start() +HRESULT D3DPresentEngine::initializeD3D() { - QMutexLocker locker(&m_mutex); + HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, &m_D3D9); - if (!m_surfaceFormat.isValid()) - return; + if (SUCCEEDED(hr)) + hr = DXVA2CreateDirect3DDeviceManager9(&m_deviceResetToken, &m_deviceManager); + + return hr; +} - if (!m_texture) - createOffscreenTexture(); +HRESULT D3DPresentEngine::createD3DDevice() +{ + HRESULT hr = S_OK; + HWND hwnd = NULL; + UINT uAdapterID = D3DADAPTER_DEFAULT; + DWORD vp = 0; + + D3DCAPS9 ddCaps; + ZeroMemory(&ddCaps, sizeof(ddCaps)); + + IDirect3DDevice9Ex* device = NULL; + + if (!m_D3D9 || !m_deviceManager) + return MF_E_NOT_INITIALIZED; + + hwnd = ::GetShellWindow(); + + D3DPRESENT_PARAMETERS pp; + ZeroMemory(&pp, sizeof(pp)); + + pp.BackBufferWidth = 1; + pp.BackBufferHeight = 1; + pp.BackBufferFormat = D3DFMT_UNKNOWN; + pp.BackBufferCount = 1; + pp.Windowed = TRUE; + pp.SwapEffect = D3DSWAPEFFECT_DISCARD; + pp.BackBufferFormat = D3DFMT_UNKNOWN; + pp.hDeviceWindow = hwnd; + pp.Flags = D3DPRESENTFLAG_VIDEO; + pp.PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT; + + hr = m_D3D9->GetDeviceCaps(uAdapterID, D3DDEVTYPE_HAL, &ddCaps); + if (FAILED(hr)) + goto done; + + if (ddCaps.DevCaps & D3DDEVCAPS_HWTRANSFORMANDLIGHT) + vp = D3DCREATE_HARDWARE_VERTEXPROCESSING; + else + vp = D3DCREATE_SOFTWARE_VERTEXPROCESSING; + + hr = m_D3D9->CreateDeviceEx( + uAdapterID, + D3DDEVTYPE_HAL, + pp.hDeviceWindow, + vp | D3DCREATE_NOWINDOWCHANGES | D3DCREATE_MULTITHREADED | D3DCREATE_FPU_PRESERVE, + &pp, + NULL, + &device + ); + if (FAILED(hr)) + goto done; + + hr = m_D3D9->GetAdapterDisplayMode(uAdapterID, &m_displayMode); + if (FAILED(hr)) + goto done; + + hr = m_deviceManager->ResetDevice(device, m_deviceResetToken); + if (FAILED(hr)) + goto done; + + qt_evr_safe_release(&m_device); - if (m_surface && !m_surface->isActive()) - m_surface->start(m_surfaceFormat); + m_device = device; + m_device->AddRef(); + +done: + qt_evr_safe_release(&device); + return hr; } -void D3DPresentEngine::stop() +bool D3DPresentEngine::isValid() const { - QMutexLocker locker(&m_mutex); - if (m_surface && m_surface->isActive()) - m_surface->stop(); + return m_device != NULL; +} + +void D3DPresentEngine::releaseResources() +{ + m_surfaceFormat = QVideoSurfaceFormat(); + +#ifdef MAYBE_ANGLE + qt_evr_safe_release(&m_texture); + + if (m_glResources) { + m_glResources->release(); // deleted in GL thread + m_glResources = NULL; + } +#endif } HRESULT D3DPresentEngine::getService(REFGUID, REFIID riid, void** ppv) @@ -237,33 +411,52 @@ HRESULT D3DPresentEngine::getService(REFGUID, REFIID riid, void** ppv) HRESULT D3DPresentEngine::checkFormat(D3DFORMAT format) { - HRESULT hr = S_OK; + if (!m_D3D9 || !m_device) + return E_FAIL; - UINT uAdapter = D3DADAPTER_DEFAULT; - D3DDEVTYPE type = D3DDEVTYPE_HAL; + HRESULT hr = S_OK; D3DDISPLAYMODE mode; D3DDEVICE_CREATION_PARAMETERS params; - // Our shared D3D/EGL surface only supports RGB32, - // reject all other formats - if (format != D3DFMT_X8R8G8B8) - return MF_E_INVALIDMEDIATYPE; - - if (m_device) { - hr = m_device->GetCreationParameters(¶ms); - if (FAILED(hr)) - return hr; + hr = m_device->GetCreationParameters(¶ms); + if (FAILED(hr)) + return hr; - uAdapter = params.AdapterOrdinal; - type = params.DeviceType; - } + UINT uAdapter = params.AdapterOrdinal; + D3DDEVTYPE type = params.DeviceType; hr = m_D3D9->GetAdapterDisplayMode(uAdapter, &mode); if (FAILED(hr)) return hr; - return m_D3D9->CheckDeviceType(uAdapter, type, mode.Format, format, TRUE); + hr = m_D3D9->CheckDeviceFormat(uAdapter, type, mode.Format, + D3DUSAGE_RENDERTARGET, + D3DRTYPE_SURFACE, + format); + + if (m_useTextureRendering && format != D3DFMT_X8R8G8B8 && format != D3DFMT_A8R8G8B8) { + // The texture is always in RGB32 so the d3d driver must support conversion from the + // requested format to RGB32. + hr = m_D3D9->CheckDeviceFormatConversion(uAdapter, type, format, D3DFMT_X8R8G8B8); + } + + return hr; +} + +bool D3DPresentEngine::supportsTextureRendering() const +{ +#ifdef MAYBE_ANGLE + return QMediaOpenGLHelper::isANGLE(); +#else + return false; +#endif +} + +void D3DPresentEngine::setHint(Hint hint, bool enable) +{ + if (hint == RenderToTexture) + m_useTextureRendering = enable && supportsTextureRendering(); } HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSample*> &videoSampleQueue) @@ -272,211 +465,148 @@ HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSamp return MF_E_UNEXPECTED; HRESULT hr = S_OK; - D3DPRESENT_PARAMETERS pp; - IDirect3DSwapChain9 *swapChain = NULL; + IDirect3DSurface9 *surface = NULL; IMFSample *videoSample = NULL; - QMutexLocker locker(&m_mutex); - releaseResources(); - // Get the swap chain parameters from the media type. - hr = getSwapChainPresentParameters(format, &pp); + UINT32 width = 0, height = 0; + hr = MFGetAttributeSize(format, MF_MT_FRAME_SIZE, &width, &height); if (FAILED(hr)) - goto done; + return hr; + + DWORD d3dFormat = 0; + hr = qt_evr_getFourCC(format, &d3dFormat); + if (FAILED(hr)) + return hr; // Create the video samples. for (int i = 0; i < PRESENTER_BUFFER_COUNT; i++) { - // Create a new swap chain. - hr = m_device->CreateAdditionalSwapChain(&pp, &swapChain); + hr = m_device->CreateRenderTarget(width, height, + (D3DFORMAT)d3dFormat, + D3DMULTISAMPLE_NONE, + 0, + TRUE, + &surface, NULL); if (FAILED(hr)) goto done; - // Create the video sample from the swap chain. - hr = createD3DSample(swapChain, &videoSample); + hr = MFCreateVideoSampleFromSurface(surface, &videoSample); if (FAILED(hr)) goto done; - // Add it to the list. videoSample->AddRef(); videoSampleQueue.append(videoSample); - // Set the swap chain pointer as a custom attribute on the sample. This keeps - // a reference count on the swap chain, so that the swap chain is kept alive - // for the duration of the sample's lifetime. - hr = videoSample->SetUnknown(MFSamplePresenter_SampleSwapChain, swapChain); - if (FAILED(hr)) - goto done; - - qt_wmf_safeRelease(&videoSample); - qt_wmf_safeRelease(&swapChain); + qt_evr_safe_release(&videoSample); + qt_evr_safe_release(&surface); } done: - if (FAILED(hr)) + if (SUCCEEDED(hr)) { + m_surfaceFormat = QVideoSurfaceFormat(QSize(width, height), + m_useTextureRendering ? QVideoFrame::Format_RGB32 + : qt_evr_pixelFormatFromD3DFormat((D3DFORMAT)d3dFormat), + m_useTextureRendering ? QAbstractVideoBuffer::GLTextureHandle + : QAbstractVideoBuffer::NoHandle); + } else { releaseResources(); + } - qt_wmf_safeRelease(&swapChain); - qt_wmf_safeRelease(&videoSample); + qt_evr_safe_release(&videoSample); + qt_evr_safe_release(&surface); return hr; } -void D3DPresentEngine::releaseResources() +QVideoFrame D3DPresentEngine::makeVideoFrame(IMFSample *sample) { -} + if (!sample) + return QVideoFrame(); -void D3DPresentEngine::presentSample(void *opaque, qint64) -{ - HRESULT hr = S_OK; + QVideoFrame frame(new IMFSampleVideoBuffer(this, sample, m_surfaceFormat.handleType()), + m_surfaceFormat.frameSize(), + m_surfaceFormat.pixelFormat()); - IMFSample *sample = reinterpret_cast<IMFSample*>(opaque); - IMFMediaBuffer* buffer = NULL; - IDirect3DSurface9* surface = NULL; - - if (m_surface && m_surface->isActive()) { - if (sample) { - // Get the buffer from the sample. - hr = sample->GetBufferByIndex(0, &buffer); - if (FAILED(hr)) - goto done; - - // Get the surface from the buffer. - hr = MFGetService(buffer, MR_BUFFER_SERVICE, IID_PPV_ARGS(&surface)); - if (FAILED(hr)) - goto done; - } + // WMF uses 100-nanosecond units, Qt uses microseconds + LONGLONG startTime = -1; + if (SUCCEEDED(sample->GetSampleTime(&startTime))) { + frame.setStartTime(startTime * 0.1); - if (surface && updateTexture(surface)) { - QVideoFrame frame = QVideoFrame(new TextureVideoBuffer(m_glTexture), - m_surfaceFormat.frameSize(), - m_surfaceFormat.pixelFormat()); - - // WMF uses 100-nanosecond units, Qt uses microseconds - LONGLONG startTime = -1; - if (SUCCEEDED(sample->GetSampleTime(&startTime))) { - frame.setStartTime(startTime * 0.1); - - LONGLONG duration = -1; - if (SUCCEEDED(sample->GetSampleDuration(&duration))) - frame.setEndTime((startTime + duration) * 0.1); - } - - m_surface->present(frame); - } + LONGLONG duration = -1; + if (SUCCEEDED(sample->GetSampleDuration(&duration))) + frame.setEndTime((startTime + duration) * 0.1); } -done: - qt_wmf_safeRelease(&surface); - qt_wmf_safeRelease(&buffer); - qt_wmf_safeRelease(&sample); + return frame; } -void D3DPresentEngine::setSurface(QAbstractVideoSurface *surface) -{ - QMutexLocker locker(&m_mutex); - m_surface = surface; -} - -void D3DPresentEngine::setSurfaceFormat(const QVideoSurfaceFormat &format) -{ - QMutexLocker locker(&m_mutex); - m_surfaceFormat = format; -} +#ifdef MAYBE_ANGLE -void D3DPresentEngine::createOffscreenTexture() +bool D3DPresentEngine::createRenderTexture() { - // First, check if we have a context on this thread - QOpenGLContext *currentContext = QOpenGLContext::currentContext(); + if (m_texture) + return true; - if (!currentContext) { - //Create OpenGL context and set share context from surface - QOpenGLContext *shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>()); - if (!shareContext) - return; - - m_offscreenSurface = new QWindow; - m_offscreenSurface->setSurfaceType(QWindow::OpenGLSurface); - //Needs geometry to be a valid surface, but size is not important - m_offscreenSurface->setGeometry(-1, -1, 1, 1); - m_offscreenSurface->create(); - - m_glContext = new QOpenGLContext; - m_glContext->setFormat(m_offscreenSurface->requestedFormat()); - m_glContext->setShareContext(shareContext); - - if (!m_glContext->create()) { - delete m_glContext; - delete m_offscreenSurface; - m_glContext = 0; - m_offscreenSurface = 0; - return; - } - - currentContext = m_glContext; - } + Q_ASSERT(QOpenGLContext::currentContext() != NULL); - if (m_glContext) - m_glContext->makeCurrent(m_offscreenSurface); + if (!m_glResources) + m_glResources = new OpenGLResources; - if (!m_egl) - m_egl = new EGLWrapper; + QOpenGLContext *currentContext = QOpenGLContext::currentContext(); + if (!currentContext) + return false; QPlatformNativeInterface *nativeInterface = QGuiApplication::platformNativeInterface(); - m_eglDisplay = static_cast<EGLDisplay*>( + m_glResources->eglDisplay = static_cast<EGLDisplay*>( nativeInterface->nativeResourceForContext("eglDisplay", currentContext)); - m_eglConfig = static_cast<EGLConfig*>( + EGLConfig *eglConfig = static_cast<EGLConfig*>( nativeInterface->nativeResourceForContext("eglConfig", currentContext)); - currentContext->functions()->glGenTextures(1, &m_glTexture); + currentContext->functions()->glGenTextures(1, &m_glResources->glTexture); - int w = m_surfaceFormat.frameWidth(); - int h = m_surfaceFormat.frameHeight(); bool hasAlpha = currentContext->format().hasAlpha(); EGLint attribs[] = { - EGL_WIDTH, w, - EGL_HEIGHT, h, + EGL_WIDTH, m_surfaceFormat.frameWidth(), + EGL_HEIGHT, m_surfaceFormat.frameHeight(), EGL_TEXTURE_FORMAT, hasAlpha ? EGL_TEXTURE_RGBA : EGL_TEXTURE_RGB, EGL_TEXTURE_TARGET, EGL_TEXTURE_2D, EGL_NONE }; - EGLSurface pbuffer = m_egl->createPbufferSurface(m_eglDisplay, m_eglConfig, attribs); + EGLSurface pbuffer = m_glResources->egl->createPbufferSurface(m_glResources->eglDisplay, eglConfig, attribs); HANDLE share_handle = 0; PFNEGLQUERYSURFACEPOINTERANGLEPROC eglQuerySurfacePointerANGLE = - reinterpret_cast<PFNEGLQUERYSURFACEPOINTERANGLEPROC>(m_egl->getProcAddress("eglQuerySurfacePointerANGLE")); + reinterpret_cast<PFNEGLQUERYSURFACEPOINTERANGLEPROC>(m_glResources->egl->getProcAddress("eglQuerySurfacePointerANGLE")); Q_ASSERT(eglQuerySurfacePointerANGLE); eglQuerySurfacePointerANGLE( - m_eglDisplay, + m_glResources->eglDisplay, pbuffer, EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE, &share_handle); - m_device->CreateTexture(w, h, 1, + m_device->CreateTexture(m_surfaceFormat.frameWidth(), m_surfaceFormat.frameHeight(), 1, D3DUSAGE_RENDERTARGET, hasAlpha ? D3DFMT_A8R8G8B8 : D3DFMT_X8R8G8B8, D3DPOOL_DEFAULT, &m_texture, &share_handle); - m_eglSurface = pbuffer; + m_glResources->eglSurface = pbuffer; + + QOpenGLContext::currentContext()->functions()->glBindTexture(GL_TEXTURE_2D, m_glResources->glTexture); + m_glResources->egl->bindTexImage(m_glResources->eglDisplay, m_glResources->eglSurface, EGL_BACK_BUFFER); - if (m_glContext) - m_glContext->doneCurrent(); + return m_texture != NULL; } bool D3DPresentEngine::updateTexture(IDirect3DSurface9 *src) { - if (!m_texture) + if (!m_texture && !createRenderTexture()) return false; - if (m_glContext) - m_glContext->makeCurrent(m_offscreenSurface); - - QOpenGLContext::currentContext()->functions()->glBindTexture(GL_TEXTURE_2D, m_glTexture); - IDirect3DSurface9 *dest = NULL; // Copy the sample surface to the shared D3D/EGL surface @@ -485,173 +615,24 @@ bool D3DPresentEngine::updateTexture(IDirect3DSurface9 *src) goto done; hr = m_device->StretchRect(src, NULL, dest, NULL, D3DTEXF_NONE); - if (FAILED(hr)) + if (FAILED(hr)) { qWarning("Failed to copy D3D surface"); - - if (hr == S_OK) - m_egl->bindTexImage(m_eglDisplay, m_eglSurface, EGL_BACK_BUFFER); + } else { + // Shared surfaces are not synchronized, there's no guarantee that + // StretchRect is complete when the texture is later rendered by Qt. + // To make sure the next rendered frame is up to date, flush the command pipeline + // using an event query. + IDirect3DQuery9 *eventQuery = NULL; + m_device->CreateQuery(D3DQUERYTYPE_EVENT, &eventQuery); + eventQuery->Issue(D3DISSUE_END); + while (eventQuery->GetData(NULL, 0, D3DGETDATA_FLUSH) == S_FALSE); + eventQuery->Release(); + } done: - qt_wmf_safeRelease(&dest); - - if (m_glContext) - m_glContext->doneCurrent(); + qt_evr_safe_release(&dest); return SUCCEEDED(hr); } -HRESULT D3DPresentEngine::initializeD3D() -{ - HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, &m_D3D9); - - if (SUCCEEDED(hr)) - hr = DXVA2CreateDirect3DDeviceManager9(&m_deviceResetToken, &m_deviceManager); - - return hr; -} - -HRESULT D3DPresentEngine::createD3DDevice() -{ - HRESULT hr = S_OK; - HWND hwnd = NULL; - UINT uAdapterID = D3DADAPTER_DEFAULT; - DWORD vp = 0; - - D3DCAPS9 ddCaps; - ZeroMemory(&ddCaps, sizeof(ddCaps)); - - IDirect3DDevice9Ex* device = NULL; - - // Hold the lock because we might be discarding an existing device. - QMutexLocker locker(&m_mutex); - - if (!m_D3D9 || !m_deviceManager) - return MF_E_NOT_INITIALIZED; - - hwnd = ::GetShellWindow(); - - // Note: The presenter creates additional swap chains to present the - // video frames. Therefore, it does not use the device's implicit - // swap chain, so the size of the back buffer here is 1 x 1. - - D3DPRESENT_PARAMETERS pp; - ZeroMemory(&pp, sizeof(pp)); - - pp.BackBufferWidth = 1; - pp.BackBufferHeight = 1; - pp.BackBufferFormat = D3DFMT_UNKNOWN; - pp.BackBufferCount = 1; - pp.Windowed = TRUE; - pp.SwapEffect = D3DSWAPEFFECT_DISCARD; - pp.BackBufferFormat = D3DFMT_UNKNOWN; - pp.hDeviceWindow = hwnd; - pp.Flags = D3DPRESENTFLAG_VIDEO; - pp.PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT; - - hr = m_D3D9->GetDeviceCaps(uAdapterID, D3DDEVTYPE_HAL, &ddCaps); - if (FAILED(hr)) - goto done; - - if (ddCaps.DevCaps & D3DDEVCAPS_HWTRANSFORMANDLIGHT) - vp = D3DCREATE_HARDWARE_VERTEXPROCESSING; - else - vp = D3DCREATE_SOFTWARE_VERTEXPROCESSING; - - hr = m_D3D9->CreateDeviceEx( - uAdapterID, - D3DDEVTYPE_HAL, - pp.hDeviceWindow, - vp | D3DCREATE_NOWINDOWCHANGES | D3DCREATE_MULTITHREADED | D3DCREATE_FPU_PRESERVE, - &pp, - NULL, - &device - ); - if (FAILED(hr)) - goto done; - - hr = m_D3D9->GetAdapterDisplayMode(uAdapterID, &m_displayMode); - if (FAILED(hr)) - goto done; - - hr = m_deviceManager->ResetDevice(device, m_deviceResetToken); - if (FAILED(hr)) - goto done; - - qt_wmf_safeRelease(&m_device); - - m_device = device; - m_device->AddRef(); - -done: - qt_wmf_safeRelease(&device); - return hr; -} - -HRESULT D3DPresentEngine::createD3DSample(IDirect3DSwapChain9 *swapChain, IMFSample **videoSample) -{ - D3DCOLOR clrBlack = D3DCOLOR_ARGB(0xFF, 0x00, 0x00, 0x00); - - IDirect3DSurface9* surface = NULL; - IMFSample* sample = NULL; - - // Get the back buffer surface. - HRESULT hr = swapChain->GetBackBuffer(0, D3DBACKBUFFER_TYPE_MONO, &surface); - if (FAILED(hr)) - goto done; - - // Fill it with black. - hr = m_device->ColorFill(surface, NULL, clrBlack); - if (FAILED(hr)) - goto done; - - hr = MFCreateVideoSampleFromSurface(surface, &sample); - if (FAILED(hr)) - goto done; - - *videoSample = sample; - (*videoSample)->AddRef(); - -done: - qt_wmf_safeRelease(&surface); - qt_wmf_safeRelease(&sample); - return hr; -} - -HRESULT D3DPresentEngine::getSwapChainPresentParameters(IMFMediaType *type, D3DPRESENT_PARAMETERS* pp) -{ - ZeroMemory(pp, sizeof(D3DPRESENT_PARAMETERS)); - - // Get some information about the video format. - - UINT32 width = 0, height = 0; - - HRESULT hr = MFGetAttributeSize(type, MF_MT_FRAME_SIZE, &width, &height); - if (FAILED(hr)) - return hr; - - DWORD d3dFormat = 0; - - hr = qt_wmf_getFourCC(type, &d3dFormat); - if (FAILED(hr)) - return hr; - - ZeroMemory(pp, sizeof(D3DPRESENT_PARAMETERS)); - pp->BackBufferWidth = width; - pp->BackBufferHeight = height; - pp->Windowed = TRUE; - pp->SwapEffect = D3DSWAPEFFECT_DISCARD; - pp->BackBufferFormat = (D3DFORMAT)d3dFormat; - pp->hDeviceWindow = ::GetShellWindow(); - pp->Flags = D3DPRESENTFLAG_VIDEO; - pp->PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT; - - D3DDEVICE_CREATION_PARAMETERS params; - hr = m_device->GetCreationParameters(¶ms); - if (FAILED(hr)) - return hr; - - if (params.DeviceType != D3DDEVTYPE_HAL) - pp->Flags |= D3DPRESENTFLAG_LOCKABLE_BACKBUFFER; - - return S_OK; -} +#endif // MAYBE_ANGLE diff --git a/src/plugins/wmf/evrd3dpresentengine.h b/src/plugins/common/evr/evrd3dpresentengine.h index 7a88ee555..f3bbb8b3e 100644 --- a/src/plugins/wmf/evrd3dpresentengine.h +++ b/src/plugins/common/evr/evrd3dpresentengine.h @@ -34,33 +34,36 @@ #ifndef EVRD3DPRESENTENGINE_H #define EVRD3DPRESENTENGINE_H -#include <QObject> #include <EGL/egl.h> #include <QMutex> #include <d3d9types.h> #include <QVideoSurfaceFormat> +#if defined(QT_OPENGL_ES_2) || defined(QT_OPENGL_DYNAMIC) +#define MAYBE_ANGLE +#endif + +QT_BEGIN_NAMESPACE +class QAbstractVideoSurface; +QT_END_NAMESPACE + struct IDirect3D9Ex; -struct IDirect3DDevice9; struct IDirect3DDevice9Ex; struct IDirect3DDeviceManager9; struct IDirect3DSurface9; struct IDirect3DTexture9; struct IMFSample; struct IMFMediaType; -struct IDirect3DSwapChain9; // Randomly generated GUIDs static const GUID MFSamplePresenter_SampleCounter = { 0xb0bb83cc, 0xf10f, 0x4e2e, { 0xaa, 0x2b, 0x29, 0xea, 0x5e, 0x92, 0xef, 0x85 } }; -static const GUID MFSamplePresenter_SampleSwapChain = -{ 0xad885bd1, 0x7def, 0x414a, { 0xb5, 0xb0, 0xd3, 0xd2, 0x63, 0xd6, 0xe9, 0x6d } }; +QT_USE_NAMESPACE -QT_BEGIN_NAMESPACE +#ifdef MAYBE_ANGLE -class QAbstractVideoSurface; -class QOpenGLContext; +class OpenGLResources; class EGLWrapper { @@ -87,40 +90,39 @@ private: EglReleaseTexImage m_eglReleaseTexImage; }; -class D3DPresentEngine : public QObject +#endif // MAYBE_ANGLE + +class D3DPresentEngine { - Q_OBJECT public: + enum Hint + { + RenderToTexture + }; + D3DPresentEngine(); virtual ~D3DPresentEngine(); - void start(); - void stop(); + bool isValid() const; + void setHint(Hint hint, bool enable = true); HRESULT getService(REFGUID guidService, REFIID riid, void** ppv); HRESULT checkFormat(D3DFORMAT format); - - HRESULT createVideoSamples(IMFMediaType *format, QList<IMFSample*>& videoSampleQueue); - void releaseResources(); - UINT refreshRate() const { return m_displayMode.RefreshRate; } - void setSurface(QAbstractVideoSurface *surface); - void setSurfaceFormat(const QVideoSurfaceFormat &format); + bool supportsTextureRendering() const; + bool isTextureRenderingEnabled() const { return m_useTextureRendering; } - void createOffscreenTexture(); - bool updateTexture(IDirect3DSurface9 *src); + HRESULT createVideoSamples(IMFMediaType *format, QList<IMFSample*>& videoSampleQueue); + QVideoSurfaceFormat videoSurfaceFormat() const { return m_surfaceFormat; } + QVideoFrame makeVideoFrame(IMFSample* sample); -public Q_SLOTS: - void presentSample(void* sample, qint64 llTarget); + void releaseResources(); private: HRESULT initializeD3D(); - HRESULT getSwapChainPresentParameters(IMFMediaType *type, D3DPRESENT_PARAMETERS *pp); HRESULT createD3DDevice(); - HRESULT createD3DSample(IDirect3DSwapChain9 *swapChain, IMFSample **videoSample); - QMutex m_mutex; UINT m_deviceResetToken; D3DDISPLAYMODE m_displayMode; @@ -130,19 +132,18 @@ private: IDirect3DDeviceManager9 *m_deviceManager; QVideoSurfaceFormat m_surfaceFormat; - QAbstractVideoSurface *m_surface; - QOpenGLContext *m_glContext; - QWindow *m_offscreenSurface; + bool m_useTextureRendering; - EGLDisplay *m_eglDisplay; - EGLConfig *m_eglConfig; - EGLSurface m_eglSurface; - unsigned int m_glTexture; +#ifdef MAYBE_ANGLE + bool createRenderTexture(); + bool updateTexture(IDirect3DSurface9 *src); + + OpenGLResources *m_glResources; IDirect3DTexture9 *m_texture; - EGLWrapper *m_egl; -}; +#endif -QT_END_NAMESPACE + friend class IMFSampleVideoBuffer; +}; #endif // EVRD3DPRESENTENGINE_H diff --git a/src/plugins/common/evr/evrdefs.cpp b/src/plugins/common/evr/evrdefs.cpp new file mode 100644 index 000000000..07d1c11eb --- /dev/null +++ b/src/plugins/common/evr/evrdefs.cpp @@ -0,0 +1,42 @@ +/**************************************************************************** +** +** Copyright (C) 2015 The Qt Company Ltd. +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#include "evrdefs.h" + +const CLSID clsid_EnhancedVideoRenderer = { 0xfa10746c, 0x9b63, 0x4b6c, {0xbc, 0x49, 0xfc, 0x30, 0xe, 0xa5, 0xf2, 0x56} }; +const GUID mr_VIDEO_RENDER_SERVICE = { 0x1092a86c, 0xab1a, 0x459a, {0xa3, 0x36, 0x83, 0x1f, 0xbc, 0x4d, 0x11, 0xff} }; +const GUID mr_VIDEO_MIXER_SERVICE = { 0x73cd2fc, 0x6cf4, 0x40b7, {0x88, 0x59, 0xe8, 0x95, 0x52, 0xc8, 0x41, 0xf8} }; +const GUID mr_BUFFER_SERVICE = { 0xa562248c, 0x9ac6, 0x4ffc, {0x9f, 0xba, 0x3a, 0xf8, 0xf8, 0xad, 0x1a, 0x4d} }; +const GUID video_ZOOM_RECT = { 0x7aaa1638, 0x1b7f, 0x4c93, {0xbd, 0x89, 0x5b, 0x9c, 0x9f, 0xb6, 0xfc, 0xf0} }; +const GUID iid_IDirect3DDevice9 = { 0xd0223b96, 0xbf7a, 0x43fd, {0x92, 0xbd, 0xa4, 0x3b, 0xd, 0x82, 0xb9, 0xeb} }; +const GUID iid_IDirect3DSurface9 = { 0xcfbaf3a, 0x9ff6, 0x429a, {0x99, 0xb3, 0xa2, 0x79, 0x6a, 0xf8, 0xb8, 0x9b} }; diff --git a/src/plugins/common/evr/evrdefs.h b/src/plugins/common/evr/evrdefs.h index ce6ca6584..3b2c2530a 100644 --- a/src/plugins/common/evr/evrdefs.h +++ b/src/plugins/common/evr/evrdefs.h @@ -36,10 +36,47 @@ #include <d3d9.h> #include <Evr9.h> +#include <evr.h> #include <dxva2api.h> +#include <mfapi.h> +#include <mfidl.h> +#include <Mferror.h> + +extern const CLSID clsid_EnhancedVideoRenderer; +extern const GUID mr_VIDEO_RENDER_SERVICE; +extern const GUID mr_VIDEO_MIXER_SERVICE; +extern const GUID mr_BUFFER_SERVICE; +extern const GUID video_ZOOM_RECT; +extern const GUID iid_IDirect3DDevice9; +extern const GUID iid_IDirect3DSurface9; // The following is required to compile with MinGW +extern "C" { +HRESULT WINAPI MFCreateVideoSampleFromSurface(IUnknown *pUnkSurface, IMFSample **ppSample); +HRESULT WINAPI Direct3DCreate9Ex(UINT SDKVersion, IDirect3D9Ex**); +} + +#ifndef PRESENTATION_CURRENT_POSITION +#define PRESENTATION_CURRENT_POSITION 0x7fffffffffffffff +#endif + +#ifndef MF_E_SHUTDOWN +#define MF_E_SHUTDOWN ((HRESULT)0xC00D3E85L) +#endif + +#ifndef MF_E_SAMPLEALLOCATOR_EMPTY +#define MF_E_SAMPLEALLOCATOR_EMPTY ((HRESULT)0xC00D4A3EL) +#endif + +#ifndef MF_E_TRANSFORM_STREAM_CHANGE +#define MF_E_TRANSFORM_STREAM_CHANGE ((HRESULT)0xC00D6D61L) +#endif + +#ifndef MF_E_TRANSFORM_NEED_MORE_INPUT +#define MF_E_TRANSFORM_NEED_MORE_INPUT ((HRESULT)0xC00D6D72L) +#endif + #ifdef __GNUC__ typedef struct MFVideoNormalizedRect { float left; @@ -49,6 +86,8 @@ typedef struct MFVideoNormalizedRect { } MFVideoNormalizedRect; #endif +#include <initguid.h> + #ifndef __IMFGetService_INTERFACE_DEFINED__ #define __IMFGetService_INTERFACE_DEFINED__ DEFINE_GUID(IID_IMFGetService, 0xfa993888, 0x4383, 0x415a, 0xa9,0x30, 0xdd,0x47,0x2a,0x8c,0xf6,0xf7); @@ -123,5 +162,185 @@ __CRT_UUID_DECL(IMFVideoProcessor, 0x6AB0000C, 0xFECE, 0x4d1f, 0xA2,0xAC, 0xA9,0 #endif #endif // __IMFVideoProcessor_INTERFACE_DEFINED__ +#ifndef __IMFVideoDeviceID_INTERFACE_DEFINED__ +#define __IMFVideoDeviceID_INTERFACE_DEFINED__ +DEFINE_GUID(IID_IMFVideoDeviceID, 0xA38D9567, 0x5A9C, 0x4f3c, 0xB2,0x93, 0x8E,0xB4,0x15,0xB2,0x79,0xBA); +MIDL_INTERFACE("A38D9567-5A9C-4f3c-B293-8EB415B279BA") +IMFVideoDeviceID : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE GetDeviceID(IID *pDeviceID) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFVideoDeviceID, 0xA38D9567, 0x5A9C, 0x4f3c, 0xB2,0x93, 0x8E,0xB4,0x15,0xB2,0x79,0xBA) +#endif +#endif // __IMFVideoDeviceID_INTERFACE_DEFINED__ + +#ifndef __IMFClockStateSink_INTERFACE_DEFINED__ +#define __IMFClockStateSink_INTERFACE_DEFINED__ +DEFINE_GUID(IID_IMFClockStateSink, 0xF6696E82, 0x74F7, 0x4f3d, 0xA1,0x78, 0x8A,0x5E,0x09,0xC3,0x65,0x9F); +MIDL_INTERFACE("F6696E82-74F7-4f3d-A178-8A5E09C3659F") +IMFClockStateSink : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset) = 0; + virtual HRESULT STDMETHODCALLTYPE OnClockStop(MFTIME hnsSystemTime) = 0; + virtual HRESULT STDMETHODCALLTYPE OnClockPause(MFTIME hnsSystemTime) = 0; + virtual HRESULT STDMETHODCALLTYPE OnClockRestart(MFTIME hnsSystemTime) = 0; + virtual HRESULT STDMETHODCALLTYPE OnClockSetRate(MFTIME hnsSystemTime, float flRate) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFClockStateSink, 0xF6696E82, 0x74F7, 0x4f3d, 0xA1,0x78, 0x8A,0x5E,0x09,0xC3,0x65,0x9F) +#endif +#endif // __IMFClockStateSink_INTERFACE_DEFINED__ + +#ifndef __IMFVideoPresenter_INTERFACE_DEFINED__ +#define __IMFVideoPresenter_INTERFACE_DEFINED__ +typedef enum MFVP_MESSAGE_TYPE +{ + MFVP_MESSAGE_FLUSH = 0, + MFVP_MESSAGE_INVALIDATEMEDIATYPE = 0x1, + MFVP_MESSAGE_PROCESSINPUTNOTIFY = 0x2, + MFVP_MESSAGE_BEGINSTREAMING = 0x3, + MFVP_MESSAGE_ENDSTREAMING = 0x4, + MFVP_MESSAGE_ENDOFSTREAM = 0x5, + MFVP_MESSAGE_STEP = 0x6, + MFVP_MESSAGE_CANCELSTEP = 0x7 +} MFVP_MESSAGE_TYPE; + +DEFINE_GUID(IID_IMFVideoPresenter, 0x29AFF080, 0x182A, 0x4a5d, 0xAF,0x3B, 0x44,0x8F,0x3A,0x63,0x46,0xCB); +MIDL_INTERFACE("29AFF080-182A-4a5d-AF3B-448F3A6346CB") +IMFVideoPresenter : public IMFClockStateSink +{ +public: + virtual HRESULT STDMETHODCALLTYPE ProcessMessage(MFVP_MESSAGE_TYPE eMessage, ULONG_PTR ulParam) = 0; + virtual HRESULT STDMETHODCALLTYPE GetCurrentMediaType(IMFVideoMediaType **ppMediaType) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFVideoPresenter, 0x29AFF080, 0x182A, 0x4a5d, 0xAF,0x3B, 0x44,0x8F,0x3A,0x63,0x46,0xCB) +#endif +#endif // __IMFVideoPresenter_INTERFACE_DEFINED__ + +#ifndef __IMFRateSupport_INTERFACE_DEFINED__ +#define __IMFRateSupport_INTERFACE_DEFINED__ +DEFINE_GUID(IID_IMFRateSupport, 0x0a9ccdbc, 0xd797, 0x4563, 0x96,0x67, 0x94,0xec,0x5d,0x79,0x29,0x2d); +MIDL_INTERFACE("0a9ccdbc-d797-4563-9667-94ec5d79292d") +IMFRateSupport : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE GetSlowestRate(MFRATE_DIRECTION eDirection, BOOL fThin, float *pflRate) = 0; + virtual HRESULT STDMETHODCALLTYPE GetFastestRate(MFRATE_DIRECTION eDirection, BOOL fThin, float *pflRate) = 0; + virtual HRESULT STDMETHODCALLTYPE IsRateSupported(BOOL fThin, float flRate, float *pflNearestSupportedRate) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFRateSupport, 0x0a9ccdbc, 0xd797, 0x4563, 0x96,0x67, 0x94,0xec,0x5d,0x79,0x29,0x2d) +#endif +#endif // __IMFRateSupport_INTERFACE_DEFINED__ + +#ifndef __IMFTopologyServiceLookup_INTERFACE_DEFINED__ +#define __IMFTopologyServiceLookup_INTERFACE_DEFINED__ +typedef enum _MF_SERVICE_LOOKUP_TYPE +{ + MF_SERVICE_LOOKUP_UPSTREAM = 0, + MF_SERVICE_LOOKUP_UPSTREAM_DIRECT = (MF_SERVICE_LOOKUP_UPSTREAM + 1), + MF_SERVICE_LOOKUP_DOWNSTREAM = (MF_SERVICE_LOOKUP_UPSTREAM_DIRECT + 1), + MF_SERVICE_LOOKUP_DOWNSTREAM_DIRECT = (MF_SERVICE_LOOKUP_DOWNSTREAM + 1), + MF_SERVICE_LOOKUP_ALL = (MF_SERVICE_LOOKUP_DOWNSTREAM_DIRECT + 1), + MF_SERVICE_LOOKUP_GLOBAL = (MF_SERVICE_LOOKUP_ALL + 1) +} MF_SERVICE_LOOKUP_TYPE; + +DEFINE_GUID(IID_IMFTopologyServiceLookup, 0xfa993889, 0x4383, 0x415a, 0xa9,0x30, 0xdd,0x47,0x2a,0x8c,0xf6,0xf7); +MIDL_INTERFACE("fa993889-4383-415a-a930-dd472a8cf6f7") +IMFTopologyServiceLookup : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE LookupService(MF_SERVICE_LOOKUP_TYPE Type, + DWORD dwIndex, + REFGUID guidService, + REFIID riid, + LPVOID *ppvObjects, + DWORD *pnObjects) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFTopologyServiceLookup, 0xfa993889, 0x4383, 0x415a, 0xa9,0x30, 0xdd,0x47,0x2a,0x8c,0xf6,0xf7) +#endif +#endif // __IMFTopologyServiceLookup_INTERFACE_DEFINED__ + +#ifndef __IMFTopologyServiceLookupClient_INTERFACE_DEFINED__ +#define __IMFTopologyServiceLookupClient_INTERFACE_DEFINED__ +DEFINE_GUID(IID_IMFTopologyServiceLookupClient, 0xfa99388a, 0x4383, 0x415a, 0xa9,0x30, 0xdd,0x47,0x2a,0x8c,0xf6,0xf7); +MIDL_INTERFACE("fa99388a-4383-415a-a930-dd472a8cf6f7") +IMFTopologyServiceLookupClient : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE InitServicePointers(IMFTopologyServiceLookup *pLookup) = 0; + virtual HRESULT STDMETHODCALLTYPE ReleaseServicePointers(void) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFTopologyServiceLookupClient, 0xfa99388a, 0x4383, 0x415a, 0xa9,0x30, 0xdd,0x47,0x2a,0x8c,0xf6,0xf7) +#endif +#endif // __IMFTopologyServiceLookupClient_INTERFACE_DEFINED__ + +#ifndef __IMediaEventSink_INTERFACE_DEFINED__ +#define __IMediaEventSink_INTERFACE_DEFINED__ +DEFINE_GUID(IID_IMediaEventSink, 0x56a868a2, 0x0ad4, 0x11ce, 0xb0,0x3a, 0x00,0x20,0xaf,0x0b,0xa7,0x70); +MIDL_INTERFACE("56a868a2-0ad4-11ce-b03a-0020af0ba770") +IMediaEventSink : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE Notify(long EventCode, LONG_PTR EventParam1, LONG_PTR EventParam2) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMediaEventSink, 0x56a868a2, 0x0ad4, 0x11ce, 0xb0,0x3a, 0x00,0x20,0xaf,0x0b,0xa7,0x70) +#endif +#endif // __IMediaEventSink_INTERFACE_DEFINED__ + +#ifndef __IMFVideoRenderer_INTERFACE_DEFINED__ +#define __IMFVideoRenderer_INTERFACE_DEFINED__ +DEFINE_GUID(IID_IMFVideoRenderer, 0xDFDFD197, 0xA9CA, 0x43d8, 0xB3,0x41, 0x6A,0xF3,0x50,0x37,0x92,0xCD); +MIDL_INTERFACE("DFDFD197-A9CA-43d8-B341-6AF3503792CD") +IMFVideoRenderer : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE InitializeRenderer(IMFTransform *pVideoMixer, + IMFVideoPresenter *pVideoPresenter) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFVideoRenderer, 0xDFDFD197, 0xA9CA, 0x43d8, 0xB3,0x41, 0x6A,0xF3,0x50,0x37,0x92,0xCD) +#endif +#endif // __IMFVideoRenderer_INTERFACE_DEFINED__ + +#ifndef __IMFTrackedSample_INTERFACE_DEFINED__ +#define __IMFTrackedSample_INTERFACE_DEFINED__ +DEFINE_GUID(IID_IMFTrackedSample, 0x245BF8E9, 0x0755, 0x40f7, 0x88,0xA5, 0xAE,0x0F,0x18,0xD5,0x5E,0x17); +MIDL_INTERFACE("245BF8E9-0755-40f7-88A5-AE0F18D55E17") +IMFTrackedSample : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE SetAllocator(IMFAsyncCallback *pSampleAllocator, IUnknown *pUnkState) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFTrackedSample, 0x245BF8E9, 0x0755, 0x40f7, 0x88,0xA5, 0xAE,0x0F,0x18,0xD5,0x5E,0x17) +#endif +#endif // __IMFTrackedSample_INTERFACE_DEFINED__ + +#ifndef __IMFDesiredSample_INTERFACE_DEFINED__ +#define __IMFDesiredSample_INTERFACE_DEFINED__ +DEFINE_GUID(IID_IMFDesiredSample, 0x56C294D0, 0x753E, 0x4260, 0x8D,0x61, 0xA3,0xD8,0x82,0x0B,0x1D,0x54); +MIDL_INTERFACE("56C294D0-753E-4260-8D61-A3D8820B1D54") +IMFDesiredSample : public IUnknown +{ +public: + virtual HRESULT STDMETHODCALLTYPE GetDesiredSampleTimeAndDuration(LONGLONG *phnsSampleTime, + LONGLONG *phnsSampleDuration) = 0; + virtual void STDMETHODCALLTYPE SetDesiredSampleTimeAndDuration(LONGLONG hnsSampleTime, + LONGLONG hnsSampleDuration) = 0; + virtual void STDMETHODCALLTYPE Clear( void) = 0; +}; +#ifdef __CRT_UUID_DECL +__CRT_UUID_DECL(IMFDesiredSample, 0x56C294D0, 0x753E, 0x4260, 0x8D,0x61, 0xA3,0xD8,0x82,0x0B,0x1D,0x54) +#endif +#endif + #endif // EVRDEFS_H diff --git a/src/plugins/wmf/mfglobal.cpp b/src/plugins/common/evr/evrhelpers.cpp index 55f2882db..2338d40fc 100644 --- a/src/plugins/wmf/mfglobal.cpp +++ b/src/plugins/common/evr/evrhelpers.cpp @@ -31,9 +31,16 @@ ** ****************************************************************************/ -#include "mfglobal.h" +#include "evrhelpers.h" -HRESULT qt_wmf_getFourCC(IMFMediaType *type, DWORD *fourCC) +#ifndef D3DFMT_YV12 +#define D3DFMT_YV12 (D3DFORMAT)MAKEFOURCC ('Y', 'V', '1', '2') +#endif +#ifndef D3DFMT_NV12 +#define D3DFMT_NV12 (D3DFORMAT)MAKEFOURCC ('N', 'V', '1', '2') +#endif + +HRESULT qt_evr_getFourCC(IMFMediaType *type, DWORD *fourCC) { if (!fourCC) return E_POINTER; @@ -50,20 +57,7 @@ HRESULT qt_wmf_getFourCC(IMFMediaType *type, DWORD *fourCC) return hr; } -MFRatio qt_wmf_getPixelAspectRatio(IMFMediaType *type) -{ - MFRatio ratio = { 0, 0 }; - HRESULT hr = S_OK; - - hr = MFGetAttributeRatio(type, MF_MT_PIXEL_ASPECT_RATIO, (UINT32*)&ratio.Numerator, (UINT32*)&ratio.Denominator); - if (FAILED(hr)) { - ratio.Numerator = 1; - ratio.Denominator = 1; - } - return ratio; -} - -bool qt_wmf_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2) +bool qt_evr_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2) { if (!type1 && !type2) return true; @@ -76,10 +70,10 @@ bool qt_wmf_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2) return (hr == S_OK); } -HRESULT qt_wmf_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height) +HRESULT qt_evr_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height) { - float fOffsetX = qt_wmf_MFOffsetToFloat(area.OffsetX); - float fOffsetY = qt_wmf_MFOffsetToFloat(area.OffsetY); + float fOffsetX = qt_evr_MFOffsetToFloat(area.OffsetX); + float fOffsetY = qt_evr_MFOffsetToFloat(area.OffsetY); if ( ((LONG)fOffsetX + area.Area.cx > (LONG)width) || ((LONG)fOffsetY + area.Area.cy > (LONG)height) ) @@ -88,7 +82,7 @@ HRESULT qt_wmf_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 h return S_OK; } -bool qt_wmf_isSampleTimePassed(IMFClock *clock, IMFSample *sample) +bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample) { if (!sample || !clock) return false; @@ -114,3 +108,69 @@ bool qt_wmf_isSampleTimePassed(IMFClock *clock, IMFSample *sample) return false; } + +QVideoFrame::PixelFormat qt_evr_pixelFormatFromD3DFormat(D3DFORMAT format) +{ + switch (format) { + case D3DFMT_R8G8B8: + return QVideoFrame::Format_RGB24; + case D3DFMT_A8R8G8B8: + return QVideoFrame::Format_ARGB32; + case D3DFMT_X8R8G8B8: + return QVideoFrame::Format_RGB32; + case D3DFMT_R5G6B5: + return QVideoFrame::Format_RGB565; + case D3DFMT_X1R5G5B5: + return QVideoFrame::Format_RGB555; + case D3DFMT_A8: + return QVideoFrame::Format_Y8; + case D3DFMT_A8B8G8R8: + return QVideoFrame::Format_BGRA32; + case D3DFMT_X8B8G8R8: + return QVideoFrame::Format_BGR32; + case D3DFMT_UYVY: + return QVideoFrame::Format_UYVY; + case D3DFMT_YUY2: + return QVideoFrame::Format_YUYV; + case D3DFMT_NV12: + return QVideoFrame::Format_NV12; + case D3DFMT_YV12: + return QVideoFrame::Format_YV12; + case D3DFMT_UNKNOWN: + default: + return QVideoFrame::Format_Invalid; + } +} + +D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrame::PixelFormat format) +{ + switch (format) { + case QVideoFrame::Format_RGB24: + return D3DFMT_R8G8B8; + case QVideoFrame::Format_ARGB32: + return D3DFMT_A8R8G8B8; + case QVideoFrame::Format_RGB32: + return D3DFMT_X8R8G8B8; + case QVideoFrame::Format_RGB565: + return D3DFMT_R5G6B5; + case QVideoFrame::Format_RGB555: + return D3DFMT_X1R5G5B5; + case QVideoFrame::Format_Y8: + return D3DFMT_A8; + case QVideoFrame::Format_BGRA32: + return D3DFMT_A8B8G8R8; + case QVideoFrame::Format_BGR32: + return D3DFMT_X8B8G8R8; + case QVideoFrame::Format_UYVY: + return D3DFMT_UYVY; + case QVideoFrame::Format_YUYV: + return D3DFMT_YUY2; + case QVideoFrame::Format_NV12: + return D3DFMT_NV12; + case QVideoFrame::Format_YV12: + return D3DFMT_YV12; + case QVideoFrame::Format_Invalid: + default: + return D3DFMT_UNKNOWN; + } +} diff --git a/src/plugins/common/evr/evrhelpers.h b/src/plugins/common/evr/evrhelpers.h new file mode 100644 index 000000000..1b1f64194 --- /dev/null +++ b/src/plugins/common/evr/evrhelpers.h @@ -0,0 +1,91 @@ +/**************************************************************************** +** +** Copyright (C) 2015 The Qt Company Ltd. +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#ifndef EVRHELPERS_H +#define EVRHELPERS_H + +#include "evrdefs.h" +#include <qvideoframe.h> + +QT_USE_NAMESPACE + +template<class T> +static inline void qt_evr_safe_release(T **unk) +{ + if (*unk) { + (*unk)->Release(); + *unk = NULL; + } +} + +HRESULT qt_evr_getFourCC(IMFMediaType *type, DWORD *fourCC); + +bool qt_evr_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2); + +HRESULT qt_evr_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height); + +bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample); + +inline float qt_evr_MFOffsetToFloat(const MFOffset& offset) +{ + return offset.value + (float(offset.fract) / 65536); +} + +inline MFOffset qt_evr_makeMFOffset(float v) +{ + MFOffset offset; + offset.value = short(v); + offset.fract = WORD(65536 * (v-offset.value)); + return offset; +} + +inline MFVideoArea qt_evr_makeMFArea(float x, float y, DWORD width, DWORD height) +{ + MFVideoArea area; + area.OffsetX = qt_evr_makeMFOffset(x); + area.OffsetY = qt_evr_makeMFOffset(y); + area.Area.cx = width; + area.Area.cy = height; + return area; +} + +inline HRESULT qt_evr_getFrameRate(IMFMediaType *pType, MFRatio *pRatio) +{ + return MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, (UINT32*)&pRatio->Numerator, (UINT32*)&pRatio->Denominator); +} + +QVideoFrame::PixelFormat qt_evr_pixelFormatFromD3DFormat(D3DFORMAT format); +D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrame::PixelFormat format); + +#endif // EVRHELPERS_H + diff --git a/src/plugins/common/evr/evrvideowindowcontrol.cpp b/src/plugins/common/evr/evrvideowindowcontrol.cpp index faa23d6e5..dae6583ff 100644 --- a/src/plugins/common/evr/evrvideowindowcontrol.cpp +++ b/src/plugins/common/evr/evrvideowindowcontrol.cpp @@ -65,8 +65,6 @@ bool EvrVideoWindowControl::setEvr(IUnknown *evr) if (!evr) return true; - static const GUID mr_VIDEO_RENDER_SERVICE = { 0x1092a86c, 0xab1a, 0x459a, {0xa3, 0x36, 0x83, 0x1f, 0xbc, 0x4d, 0x11, 0xff} }; - static const GUID mr_VIDEO_MIXER_SERVICE = { 0x73cd2fc, 0x6cf4, 0x40b7, {0x88, 0x59, 0xe8, 0x95, 0x52, 0xc8, 0x41, 0xf8} }; IMFGetService *service = NULL; if (SUCCEEDED(evr->QueryInterface(IID_PPV_ARGS(&service))) diff --git a/src/plugins/directshow/camera/camera.pri b/src/plugins/directshow/camera/camera.pri index 3a532f472..c6b16da59 100644 --- a/src/plugins/directshow/camera/camera.pri +++ b/src/plugins/directshow/camera/camera.pri @@ -14,7 +14,8 @@ HEADERS += \ $$PWD/dsimagecapturecontrol.h \ $$PWD/dscamerasession.h \ $$PWD/directshowglobal.h \ - $$PWD/dscameraviewfindersettingscontrol.h + $$PWD/dscameraviewfindersettingscontrol.h \ + $$PWD/dscameraimageprocessingcontrol.h SOURCES += \ $$PWD/dscameraservice.cpp \ @@ -23,7 +24,8 @@ SOURCES += \ $$PWD/dsvideodevicecontrol.cpp \ $$PWD/dsimagecapturecontrol.cpp \ $$PWD/dscamerasession.cpp \ - $$PWD/dscameraviewfindersettingscontrol.cpp + $$PWD/dscameraviewfindersettingscontrol.cpp \ + $$PWD/dscameraimageprocessingcontrol.cpp *-msvc*:INCLUDEPATH += $$(DXSDK_DIR)/include LIBS += -lstrmiids -ldmoguids -luuid -lmsdmo -lole32 -loleaut32 diff --git a/src/plugins/directshow/camera/dscameraimageprocessingcontrol.cpp b/src/plugins/directshow/camera/dscameraimageprocessingcontrol.cpp new file mode 100644 index 000000000..39fa471ec --- /dev/null +++ b/src/plugins/directshow/camera/dscameraimageprocessingcontrol.cpp @@ -0,0 +1,74 @@ +/**************************************************************************** +** +** Copyright (C) 2015 Denis Shienkov <denis.shienkov@gmail.com> +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#include "dscameraimageprocessingcontrol.h" +#include "dscamerasession.h" + +QT_BEGIN_NAMESPACE + +DSCameraImageProcessingControl::DSCameraImageProcessingControl(DSCameraSession *session) + : QCameraImageProcessingControl(session) + , m_session(session) +{ +} + +DSCameraImageProcessingControl::~DSCameraImageProcessingControl() +{ +} + +bool DSCameraImageProcessingControl::isParameterSupported( + QCameraImageProcessingControl::ProcessingParameter parameter) const +{ + return m_session->isImageProcessingParameterSupported(parameter); +} + +bool DSCameraImageProcessingControl::isParameterValueSupported( + QCameraImageProcessingControl::ProcessingParameter parameter, + const QVariant &value) const +{ + return m_session->isImageProcessingParameterValueSupported(parameter, value); +} + +QVariant DSCameraImageProcessingControl::parameter( + QCameraImageProcessingControl::ProcessingParameter parameter) const +{ + return m_session->imageProcessingParameter(parameter); +} + +void DSCameraImageProcessingControl::setParameter(QCameraImageProcessingControl::ProcessingParameter parameter, + const QVariant &value) +{ + m_session->setImageProcessingParameter(parameter, value); +} + +QT_END_NAMESPACE diff --git a/src/plugins/directshow/camera/dscameraimageprocessingcontrol.h b/src/plugins/directshow/camera/dscameraimageprocessingcontrol.h new file mode 100644 index 000000000..2e50fe14d --- /dev/null +++ b/src/plugins/directshow/camera/dscameraimageprocessingcontrol.h @@ -0,0 +1,63 @@ +/**************************************************************************** +** +** Copyright (C) 2015 Denis Shienkov <denis.shienkov@gmail.com> +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#ifndef DSCAMERAIMAGEPROCESSINGCONTROL_H +#define DSCAMERAIMAGEPROCESSINGCONTROL_H + +#include <qcamera.h> +#include <qcameraimageprocessingcontrol.h> + +QT_BEGIN_NAMESPACE + +class DSCameraSession; + +class DSCameraImageProcessingControl : public QCameraImageProcessingControl +{ + Q_OBJECT + +public: + DSCameraImageProcessingControl(DSCameraSession *session); + virtual ~DSCameraImageProcessingControl(); + + bool isParameterSupported(ProcessingParameter) const; + bool isParameterValueSupported(ProcessingParameter parameter, const QVariant &value) const; + QVariant parameter(ProcessingParameter parameter) const; + void setParameter(ProcessingParameter parameter, const QVariant &value); + +private: + DSCameraSession *m_session; +}; + +QT_END_NAMESPACE + +#endif // DSCAMERAIMAGEPROCESSINGCONTROL_H diff --git a/src/plugins/directshow/camera/dscameraservice.cpp b/src/plugins/directshow/camera/dscameraservice.cpp index 9fcd4de70..6c92df04b 100644 --- a/src/plugins/directshow/camera/dscameraservice.cpp +++ b/src/plugins/directshow/camera/dscameraservice.cpp @@ -41,6 +41,7 @@ #include "dsvideodevicecontrol.h" #include "dsimagecapturecontrol.h" #include "dscameraviewfindersettingscontrol.h" +#include "dscameraimageprocessingcontrol.h" QT_BEGIN_NAMESPACE @@ -53,12 +54,14 @@ DSCameraService::DSCameraService(QObject *parent): m_videoDevice = new DSVideoDeviceControl(m_session); m_imageCapture = new DSImageCaptureControl(m_session); m_viewfinderSettings = new DSCameraViewfinderSettingsControl(m_session); + m_imageProcessingControl = new DSCameraImageProcessingControl(m_session); } DSCameraService::~DSCameraService() { delete m_control; delete m_viewfinderSettings; + delete m_imageProcessingControl; delete m_videoDevice; delete m_videoRenderer; delete m_imageCapture; @@ -86,6 +89,9 @@ QMediaControl* DSCameraService::requestControl(const char *name) if (qstrcmp(name, QCameraViewfinderSettingsControl2_iid) == 0) return m_viewfinderSettings; + if (qstrcmp(name, QCameraImageProcessingControl_iid) == 0) + return m_imageProcessingControl; + return 0; } diff --git a/src/plugins/directshow/camera/dscameraservice.h b/src/plugins/directshow/camera/dscameraservice.h index c3c881d0e..05222ebc4 100644 --- a/src/plugins/directshow/camera/dscameraservice.h +++ b/src/plugins/directshow/camera/dscameraservice.h @@ -46,6 +46,7 @@ class DSVideoOutputControl; class DSVideoDeviceControl; class DSImageCaptureControl; class DSCameraViewfinderSettingsControl; +class DSCameraImageProcessingControl; class DSCameraService : public QMediaService { @@ -66,6 +67,7 @@ private: QMediaControl *m_videoRenderer; DSImageCaptureControl *m_imageCapture; DSCameraViewfinderSettingsControl *m_viewfinderSettings; + DSCameraImageProcessingControl *m_imageProcessingControl; }; QT_END_NAMESPACE diff --git a/src/plugins/directshow/camera/dscamerasession.cpp b/src/plugins/directshow/camera/dscamerasession.cpp index 24dad94c5..ede5e4763 100644 --- a/src/plugins/directshow/camera/dscamerasession.cpp +++ b/src/plugins/directshow/camera/dscamerasession.cpp @@ -230,6 +230,240 @@ void DSCameraSession::setViewfinderSettings(const QCameraViewfinderSettings &set m_viewfinderSettings = settings; } +qreal DSCameraSession::scaledImageProcessingParameterValue( + const ImageProcessingParameterInfo &sourceValueInfo) +{ + if (sourceValueInfo.currentValue == sourceValueInfo.defaultValue) { + return 0.0f; + } else if (sourceValueInfo.currentValue < sourceValueInfo.defaultValue) { + return ((sourceValueInfo.currentValue - sourceValueInfo.minimumValue) + / qreal(sourceValueInfo.defaultValue - sourceValueInfo.minimumValue)) + + (-1.0f); + } else { + return ((sourceValueInfo.currentValue - sourceValueInfo.defaultValue) + / qreal(sourceValueInfo.maximumValue - sourceValueInfo.defaultValue)); + } +} + +qint32 DSCameraSession::sourceImageProcessingParameterValue( + qreal scaledValue, const ImageProcessingParameterInfo &valueRange) +{ + if (qFuzzyIsNull(scaledValue)) { + return valueRange.defaultValue; + } else if (scaledValue < 0.0f) { + return ((scaledValue - (-1.0f)) * (valueRange.defaultValue - valueRange.minimumValue)) + + valueRange.minimumValue; + } else { + return (scaledValue * (valueRange.maximumValue - valueRange.defaultValue)) + + valueRange.defaultValue; + } +} + +static QCameraImageProcessingControl::ProcessingParameter searchRelatedResultingParameter( + QCameraImageProcessingControl::ProcessingParameter sourceParameter) +{ + if (sourceParameter == QCameraImageProcessingControl::WhiteBalancePreset) + return QCameraImageProcessingControl::ColorTemperature; + return sourceParameter; +} + +bool DSCameraSession::isImageProcessingParameterSupported( + QCameraImageProcessingControl::ProcessingParameter parameter) const +{ + const QCameraImageProcessingControl::ProcessingParameter resultingParameter = + searchRelatedResultingParameter(parameter); + + return m_imageProcessingParametersInfos.contains(resultingParameter); +} + +bool DSCameraSession::isImageProcessingParameterValueSupported( + QCameraImageProcessingControl::ProcessingParameter parameter, + const QVariant &value) const +{ + const QCameraImageProcessingControl::ProcessingParameter resultingParameter = + searchRelatedResultingParameter(parameter); + + QMap<QCameraImageProcessingControl::ProcessingParameter, + ImageProcessingParameterInfo>::const_iterator sourceValueInfo = + m_imageProcessingParametersInfos.constFind(resultingParameter); + + if (sourceValueInfo == m_imageProcessingParametersInfos.constEnd()) + return false; + + switch (parameter) { + + case QCameraImageProcessingControl::WhiteBalancePreset: { + const QCameraImageProcessing::WhiteBalanceMode checkedValue = + value.value<QCameraImageProcessing::WhiteBalanceMode>(); + // Supports only the Manual and the Auto values + if (checkedValue != QCameraImageProcessing::WhiteBalanceManual + && checkedValue != QCameraImageProcessing::WhiteBalanceAuto) { + return false; + } + } + break; + + case QCameraImageProcessingControl::ColorTemperature: { + const qint32 checkedValue = value.toInt(); + if (checkedValue < (*sourceValueInfo).minimumValue + || checkedValue > (*sourceValueInfo).maximumValue) { + return false; + } + } + break; + + case QCameraImageProcessingControl::ContrastAdjustment: // falling back + case QCameraImageProcessingControl::SaturationAdjustment: // falling back + case QCameraImageProcessingControl::BrightnessAdjustment: // falling back + case QCameraImageProcessingControl::SharpeningAdjustment: { + const qint32 sourceValue = sourceImageProcessingParameterValue( + value.toReal(), (*sourceValueInfo)); + if (sourceValue < (*sourceValueInfo).minimumValue + || sourceValue > (*sourceValueInfo).maximumValue) + return false; + } + break; + + default: + return false; + } + + return true; +} + +QVariant DSCameraSession::imageProcessingParameter( + QCameraImageProcessingControl::ProcessingParameter parameter) const +{ + if (!m_graphBuilder) { + qWarning() << "failed to access to the graph builder"; + return QVariant(); + } + + const QCameraImageProcessingControl::ProcessingParameter resultingParameter = + searchRelatedResultingParameter(parameter); + + QMap<QCameraImageProcessingControl::ProcessingParameter, + ImageProcessingParameterInfo>::const_iterator sourceValueInfo = + m_imageProcessingParametersInfos.constFind(resultingParameter); + + if (sourceValueInfo == m_imageProcessingParametersInfos.constEnd()) + return QVariant(); + + switch (parameter) { + + case QCameraImageProcessingControl::WhiteBalancePreset: + return QVariant::fromValue<QCameraImageProcessing::WhiteBalanceMode>( + (*sourceValueInfo).capsFlags == VideoProcAmp_Flags_Auto + ? QCameraImageProcessing::WhiteBalanceAuto + : QCameraImageProcessing::WhiteBalanceManual); + + case QCameraImageProcessingControl::ColorTemperature: + return QVariant::fromValue<qint32>((*sourceValueInfo).currentValue); + + case QCameraImageProcessingControl::ContrastAdjustment: // falling back + case QCameraImageProcessingControl::SaturationAdjustment: // falling back + case QCameraImageProcessingControl::BrightnessAdjustment: // falling back + case QCameraImageProcessingControl::SharpeningAdjustment: + return scaledImageProcessingParameterValue((*sourceValueInfo)); + + default: + return QVariant(); + } +} + +void DSCameraSession::setImageProcessingParameter( + QCameraImageProcessingControl::ProcessingParameter parameter, + const QVariant &value) +{ + if (!m_graphBuilder) { + qWarning() << "failed to access to the graph builder"; + return; + } + + const QCameraImageProcessingControl::ProcessingParameter resultingParameter = + searchRelatedResultingParameter(parameter); + + QMap<QCameraImageProcessingControl::ProcessingParameter, + ImageProcessingParameterInfo>::iterator sourceValueInfo = + m_imageProcessingParametersInfos.find(resultingParameter); + + if (sourceValueInfo == m_imageProcessingParametersInfos.constEnd()) + return; + + LONG sourceValue = 0; + LONG capsFlags = VideoProcAmp_Flags_Manual; + + switch (parameter) { + + case QCameraImageProcessingControl::WhiteBalancePreset: { + const QCameraImageProcessing::WhiteBalanceMode checkedValue = + value.value<QCameraImageProcessing::WhiteBalanceMode>(); + // Supports only the Manual and the Auto values + if (checkedValue == QCameraImageProcessing::WhiteBalanceManual) + capsFlags = VideoProcAmp_Flags_Manual; + else if (checkedValue == QCameraImageProcessing::WhiteBalanceAuto) + capsFlags = VideoProcAmp_Flags_Auto; + else + return; + + sourceValue = ((*sourceValueInfo).hasBeenExplicitlySet) + ? (*sourceValueInfo).currentValue + : (*sourceValueInfo).defaultValue; + } + break; + + case QCameraImageProcessingControl::ColorTemperature: + sourceValue = value.isValid() ? + value.value<qint32>() : (*sourceValueInfo).defaultValue; + capsFlags = (*sourceValueInfo).capsFlags; + break; + + case QCameraImageProcessingControl::ContrastAdjustment: // falling back + case QCameraImageProcessingControl::SaturationAdjustment: // falling back + case QCameraImageProcessingControl::BrightnessAdjustment: // falling back + case QCameraImageProcessingControl::SharpeningAdjustment: + if (value.isValid()) { + sourceValue = sourceImageProcessingParameterValue( + value.toReal(), (*sourceValueInfo)); + } else { + sourceValue = (*sourceValueInfo).defaultValue; + } + break; + + default: + return; + } + + IAMVideoProcAmp *pVideoProcAmp = NULL; + HRESULT hr = m_graphBuilder->FindInterface( + NULL, + NULL, + m_sourceFilter, + IID_IAMVideoProcAmp, + reinterpret_cast<void**>(&pVideoProcAmp) + ); + + if (FAILED(hr) || !pVideoProcAmp) { + qWarning() << "failed to find the video proc amp"; + return; + } + + hr = pVideoProcAmp->Set( + (*sourceValueInfo).videoProcAmpProperty, + sourceValue, + capsFlags); + + pVideoProcAmp->Release(); + + if (FAILED(hr)) { + qWarning() << "failed to set the parameter value"; + } else { + (*sourceValueInfo).capsFlags = capsFlags; + (*sourceValueInfo).hasBeenExplicitlySet = true; + (*sourceValueInfo).currentValue = sourceValue; + } +} + bool DSCameraSession::load() { unload(); @@ -720,6 +954,81 @@ bool DSCameraSession::configurePreviewFormat() return true; } +void DSCameraSession::updateImageProcessingParametersInfos() +{ + if (!m_graphBuilder) { + qWarning() << "failed to access to the graph builder"; + return; + } + + IAMVideoProcAmp *pVideoProcAmp = NULL; + const HRESULT hr = m_graphBuilder->FindInterface( + NULL, + NULL, + m_sourceFilter, + IID_IAMVideoProcAmp, + reinterpret_cast<void**>(&pVideoProcAmp) + ); + + if (FAILED(hr) || !pVideoProcAmp) { + qWarning() << "failed to find the video proc amp"; + return; + } + + for (int property = VideoProcAmp_Brightness; property <= VideoProcAmp_Gain; ++property) { + + QCameraImageProcessingControl::ProcessingParameter processingParameter; // not initialized + + switch (property) { + case VideoProcAmp_Brightness: + processingParameter = QCameraImageProcessingControl::BrightnessAdjustment; + break; + case VideoProcAmp_Contrast: + processingParameter = QCameraImageProcessingControl::ContrastAdjustment; + break; + case VideoProcAmp_Saturation: + processingParameter = QCameraImageProcessingControl::SaturationAdjustment; + break; + case VideoProcAmp_Sharpness: + processingParameter = QCameraImageProcessingControl::SharpeningAdjustment; + break; + case VideoProcAmp_WhiteBalance: + processingParameter = QCameraImageProcessingControl::ColorTemperature; + break; + default: // unsupported or not implemented yet parameter + continue; + } + + ImageProcessingParameterInfo sourceValueInfo; + LONG steppingDelta = 0; + + HRESULT hr = pVideoProcAmp->GetRange( + property, + &sourceValueInfo.minimumValue, + &sourceValueInfo.maximumValue, + &steppingDelta, + &sourceValueInfo.defaultValue, + &sourceValueInfo.capsFlags); + + if (FAILED(hr)) + continue; + + hr = pVideoProcAmp->Get( + property, + &sourceValueInfo.currentValue, + &sourceValueInfo.capsFlags); + + if (FAILED(hr)) + continue; + + sourceValueInfo.videoProcAmpProperty = static_cast<VideoProcAmpProperty>(property); + + m_imageProcessingParametersInfos.insert(processingParameter, sourceValueInfo); + } + + pVideoProcAmp->Release(); +} + bool DSCameraSession::connectGraph() { HRESULT hr = m_filterGraph->AddFilter(m_sourceFilter, L"Capture Filter"); @@ -806,6 +1115,7 @@ void DSCameraSession::updateSourceCapabilities() for (AM_MEDIA_TYPE f : qAsConst(m_supportedFormats)) _FreeMediaType(f); m_supportedFormats.clear(); + m_imageProcessingParametersInfos.clear(); IAMVideoControl *pVideoControl = 0; hr = m_graphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, @@ -915,6 +1225,8 @@ void DSCameraSession::updateSourceCapabilities() } pConfig->Release(); + + updateImageProcessingParametersInfos(); } HRESULT getPin(IBaseFilter *pFilter, PIN_DIRECTION PinDir, IPin **ppPin) diff --git a/src/plugins/directshow/camera/dscamerasession.h b/src/plugins/directshow/camera/dscamerasession.h index 9ac121463..768e3583a 100644 --- a/src/plugins/directshow/camera/dscamerasession.h +++ b/src/plugins/directshow/camera/dscamerasession.h @@ -43,6 +43,7 @@ #include <QtMultimedia/qvideoframe.h> #include <QtMultimedia/qabstractvideosurface.h> #include <QtMultimedia/qvideosurfaceformat.h> +#include <QtMultimedia/qcameraimageprocessingcontrol.h> #include <private/qmediastoragelocation_p.h> #include <tchar.h> @@ -97,6 +98,20 @@ public: QList<QCameraViewfinderSettings> supportedViewfinderSettings() const { return m_supportedViewfinderSettings; } + bool isImageProcessingParameterSupported( + QCameraImageProcessingControl::ProcessingParameter) const; + + bool isImageProcessingParameterValueSupported( + QCameraImageProcessingControl::ProcessingParameter, + const QVariant &) const; + + QVariant imageProcessingParameter( + QCameraImageProcessingControl::ProcessingParameter) const; + + void setImageProcessingParameter( + QCameraImageProcessingControl::ProcessingParameter, + const QVariant &); + Q_SIGNALS: void statusChanged(QCamera::Status); void imageExposed(int id); @@ -110,6 +125,27 @@ private Q_SLOTS: void updateReadyForCapture(); private: + struct ImageProcessingParameterInfo { + ImageProcessingParameterInfo() + : minimumValue(0) + , maximumValue(0) + , defaultValue(0) + , currentValue(0) + , capsFlags(0) + , hasBeenExplicitlySet(false) + , videoProcAmpProperty(VideoProcAmp_Brightness) + { + } + + LONG minimumValue; + LONG maximumValue; + LONG defaultValue; + LONG currentValue; + LONG capsFlags; + bool hasBeenExplicitlySet; + VideoProcAmpProperty videoProcAmpProperty; + }; + void setStatus(QCamera::Status status); void onFrameAvailable(const char *frameData, long len); @@ -120,6 +156,14 @@ private: void disconnectGraph(); void updateSourceCapabilities(); bool configurePreviewFormat(); + void updateImageProcessingParametersInfos(); + + // These static functions are used for scaling of adjustable parameters, + // which have the ranges from -1.0 to +1.0 in the QCameraImageProcessing API. + static qreal scaledImageProcessingParameterValue( + const ImageProcessingParameterInfo &sourceValueInfo); + static qint32 sourceImageProcessingParameterValue( + qreal scaledValue, const ImageProcessingParameterInfo &sourceValueInfo); QMutex m_presentMutex; QMutex m_captureMutex; @@ -135,6 +179,7 @@ private: QList<AM_MEDIA_TYPE> m_supportedFormats; QList<QCameraViewfinderSettings> m_supportedViewfinderSettings; AM_MEDIA_TYPE m_sourceFormat; + QMap<QCameraImageProcessingControl::ProcessingParameter, ImageProcessingParameterInfo> m_imageProcessingParametersInfos; // Preview IBaseFilter *m_previewFilter; diff --git a/src/plugins/directshow/player/directshowevrvideowindowcontrol.cpp b/src/plugins/directshow/player/directshowevrvideowindowcontrol.cpp index 7bffe47d2..22771bd4c 100644 --- a/src/plugins/directshow/player/directshowevrvideowindowcontrol.cpp +++ b/src/plugins/directshow/player/directshowevrvideowindowcontrol.cpp @@ -49,10 +49,8 @@ DirectShowEvrVideoWindowControl::~DirectShowEvrVideoWindowControl() IBaseFilter *DirectShowEvrVideoWindowControl::filter() { - static const GUID clsid_EnhancendVideoRenderer = { 0xfa10746c, 0x9b63, 0x4b6c, {0xbc, 0x49, 0xfc, 0x30, 0xe, 0xa5, 0xf2, 0x56} }; - if (!m_evrFilter) { - m_evrFilter = com_new<IBaseFilter>(clsid_EnhancendVideoRenderer); + m_evrFilter = com_new<IBaseFilter>(clsid_EnhancedVideoRenderer); if (!setEvr(m_evrFilter)) { m_evrFilter->Release(); m_evrFilter = NULL; diff --git a/src/plugins/directshow/player/directshowmetadatacontrol.cpp b/src/plugins/directshow/player/directshowmetadatacontrol.cpp index 3da02aa45..f1581e129 100644 --- a/src/plugins/directshow/player/directshowmetadatacontrol.cpp +++ b/src/plugins/directshow/player/directshowmetadatacontrol.cpp @@ -362,7 +362,18 @@ static QString convertBSTR(BSTR *string) return value; } -void DirectShowMetaDataControl::updateGraph(IFilterGraph2 *graph, IBaseFilter *source, const QString &fileSrc) +void DirectShowMetaDataControl::reset() +{ + bool hadMetadata = !m_metadata.isEmpty(); + m_metadata.clear(); + + setMetadataAvailable(false); + + if (hadMetadata) + emit metaDataChanged(); +} + +void DirectShowMetaDataControl::updateMetadata(IFilterGraph2 *graph, IBaseFilter *source, const QString &fileSrc) { m_metadata.clear(); @@ -569,13 +580,19 @@ void DirectShowMetaDataControl::customEvent(QEvent *event) if (event->type() == QEvent::Type(MetaDataChanged)) { event->accept(); - bool oldAvailable = m_available; - m_available = !m_metadata.isEmpty(); - if (m_available != oldAvailable) - emit metaDataAvailableChanged(m_available); + setMetadataAvailable(!m_metadata.isEmpty()); emit metaDataChanged(); } else { QMetaDataReaderControl::customEvent(event); } } + +void DirectShowMetaDataControl::setMetadataAvailable(bool available) +{ + if (m_available == available) + return; + + m_available = available; + emit metaDataAvailableChanged(m_available); +} diff --git a/src/plugins/directshow/player/directshowmetadatacontrol.h b/src/plugins/directshow/player/directshowmetadatacontrol.h index d32ae8508..55504ba4b 100644 --- a/src/plugins/directshow/player/directshowmetadatacontrol.h +++ b/src/plugins/directshow/player/directshowmetadatacontrol.h @@ -56,13 +56,16 @@ public: QVariant metaData(const QString &key) const; QStringList availableMetaData() const; - void updateGraph(IFilterGraph2 *graph, IBaseFilter *source, - const QString &fileSrc = QString()); + void reset(); + void updateMetadata(IFilterGraph2 *graph, IBaseFilter *source, + const QString &fileSrc = QString()); protected: void customEvent(QEvent *event); private: + void setMetadataAvailable(bool available); + enum Event { MetaDataChanged = QEvent::User diff --git a/src/plugins/directshow/player/directshowplayercontrol.cpp b/src/plugins/directshow/player/directshowplayercontrol.cpp index e988cbdd3..3449c9270 100644 --- a/src/plugins/directshow/player/directshowplayercontrol.cpp +++ b/src/plugins/directshow/player/directshowplayercontrol.cpp @@ -53,17 +53,6 @@ static int volumeToDecibels(int volume) } } -static int decibelsToVolume(int dB) -{ - if (dB == -10000) { - return 0; - } else if (dB == 0) { - return 100; - } else { - return qRound(100 * qPow(10, qreal(dB) / 5000)); - } -} - DirectShowPlayerControl::DirectShowPlayerControl(DirectShowPlayerService *service, QObject *parent) : QMediaPlayerControl(parent) , m_service(service) @@ -75,7 +64,7 @@ DirectShowPlayerControl::DirectShowPlayerControl(DirectShowPlayerService *servic , m_streamTypes(0) , m_volume(100) , m_muted(false) - , m_position(0) + , m_emitPosition(-1) , m_pendingPosition(-1) , m_duration(0) , m_playbackRate(0) @@ -109,7 +98,7 @@ qint64 DirectShowPlayerControl::position() const if (m_pendingPosition != -1) return m_pendingPosition; - return const_cast<qint64 &>(m_position) = m_service->position(); + return m_service->position(); } void DirectShowPlayerControl::setPosition(qint64 position) @@ -226,6 +215,7 @@ const QIODevice *DirectShowPlayerControl::mediaStream() const void DirectShowPlayerControl::setMedia(const QMediaContent &media, QIODevice *stream) { m_pendingPosition = -1; + m_emitPosition = -1; m_media = media; m_stream = stream; @@ -258,6 +248,7 @@ void DirectShowPlayerControl::playOrPause(QMediaPlayer::State state) return; } + m_emitPosition = -1; m_state = state; if (m_pendingPosition != -1) @@ -273,6 +264,7 @@ void DirectShowPlayerControl::playOrPause(QMediaPlayer::State state) void DirectShowPlayerControl::stop() { + m_emitPosition = -1; m_service->stop(); emit stateChanged(m_state = QMediaPlayer::StoppedState); } @@ -304,8 +296,8 @@ void DirectShowPlayerControl::emitPropertyChanges() emit videoAvailableChanged(m_streamTypes & DirectShowPlayerService::VideoStream); } - if (properties & PositionProperty) - emit positionChanged(m_position); + if (properties & PositionProperty && m_emitPosition != -1) + emit positionChanged(m_emitPosition); if (properties & DurationProperty) emit durationChanged(m_duration); @@ -400,8 +392,8 @@ void DirectShowPlayerControl::updateError(QMediaPlayer::Error error, const QStri void DirectShowPlayerControl::updatePosition(qint64 position) { - if (m_position != position) { - m_position = position; + if (m_emitPosition != position) { + m_emitPosition = position; scheduleUpdate(PositionProperty); } diff --git a/src/plugins/directshow/player/directshowplayercontrol.h b/src/plugins/directshow/player/directshowplayercontrol.h index ab842f511..f67c4108b 100644 --- a/src/plugins/directshow/player/directshowplayercontrol.h +++ b/src/plugins/directshow/player/directshowplayercontrol.h @@ -130,7 +130,7 @@ private: int m_streamTypes; int m_volume; bool m_muted; - qint64 m_position; + qint64 m_emitPosition; qint64 m_pendingPosition; qint64 m_duration; qreal m_playbackRate; diff --git a/src/plugins/directshow/player/directshowplayerservice.cpp b/src/plugins/directshow/player/directshowplayerservice.cpp index 07427583d..8e9e50cbf 100644 --- a/src/plugins/directshow/player/directshowplayerservice.cpp +++ b/src/plugins/directshow/player/directshowplayerservice.cpp @@ -103,6 +103,7 @@ DirectShowPlayerService::DirectShowPlayerService(QObject *parent) , m_buffering(false) , m_seekable(false) , m_atEnd(false) + , m_dontCacheNextSeekResult(false) { m_playerControl = new DirectShowPlayerControl(this); m_metaDataControl = new DirectShowMetaDataControl(this); @@ -229,7 +230,8 @@ void DirectShowPlayerService::load(const QMediaContent &media, QIODevice *stream m_buffering = false; m_seekable = false; m_atEnd = false; - m_metaDataControl->updateGraph(0, 0); + m_dontCacheNextSeekResult = false; + m_metaDataControl->reset(); if (m_resources.isEmpty() && !stream) { m_pendingTasks = 0; @@ -568,9 +570,6 @@ void DirectShowPlayerService::doReleaseGraph(QMutexLocker *locker) control->Release(); } - //release m_headerInfo -> decrease ref counter of m_source - m_metaDataControl->updateGraph(0, 0); - if (m_source) { m_source->Release(); m_source = 0; @@ -672,7 +671,9 @@ void DirectShowPlayerService::play() if (m_executedTasks & Stop) { m_atEnd = false; if (m_seekPosition == -1) { + m_dontCacheNextSeekResult = true; m_seekPosition = 0; + m_position = 0; m_pendingTasks |= Seek; } m_executedTasks ^= Stop; @@ -718,7 +719,9 @@ void DirectShowPlayerService::pause() if (m_executedTasks & Stop) { m_atEnd = false; if (m_seekPosition == -1) { + m_dontCacheNextSeekResult = true; m_seekPosition = 0; + m_position = 0; m_pendingTasks |= Seek; } m_executedTasks ^= Stop; @@ -791,6 +794,8 @@ void DirectShowPlayerService::doStop(QMutexLocker *locker) } m_seekPosition = 0; + m_position = 0; + m_dontCacheNextSeekResult = true; m_pendingTasks |= Seek; m_executedTasks &= ~(Play | Pause); @@ -913,8 +918,10 @@ void DirectShowPlayerService::doSeek(QMutexLocker *locker) // Cache current values as we can't query IMediaSeeking during a seek due to the // possibility of a deadlock when flushing the VideoSurfaceFilter. LONGLONG currentPosition = 0; - seeking->GetCurrentPosition(¤tPosition); - m_position = currentPosition / qt_directShowTimeScale; + if (!m_dontCacheNextSeekResult) { + seeking->GetCurrentPosition(¤tPosition); + m_position = currentPosition / qt_directShowTimeScale; + } LONGLONG minimum = 0; LONGLONG maximum = 0; @@ -928,8 +935,10 @@ void DirectShowPlayerService::doSeek(QMutexLocker *locker) &seekPosition, AM_SEEKING_AbsolutePositioning, 0, AM_SEEKING_NoPositioning); locker->relock(); - seeking->GetCurrentPosition(¤tPosition); - m_position = currentPosition / qt_directShowTimeScale; + if (!m_dontCacheNextSeekResult) { + seeking->GetCurrentPosition(¤tPosition); + m_position = currentPosition / qt_directShowTimeScale; + } seeking->Release(); @@ -937,6 +946,7 @@ void DirectShowPlayerService::doSeek(QMutexLocker *locker) } m_seekPosition = -1; + m_dontCacheNextSeekResult = false; } int DirectShowPlayerService::bufferStatus() const @@ -1125,7 +1135,7 @@ void DirectShowPlayerService::customEvent(QEvent *event) QMutexLocker locker(&m_mutex); m_playerControl->updateMediaInfo(m_duration, m_streamTypes, m_seekable); - m_metaDataControl->updateGraph(m_graph, m_source, m_url.toString()); + m_metaDataControl->updateMetadata(m_graph, m_source, m_url.toString()); updateStatus(); } else if (event->type() == QEvent::Type(Error)) { diff --git a/src/plugins/directshow/player/directshowplayerservice.h b/src/plugins/directshow/player/directshowplayerservice.h index edfde105e..4d3762f74 100644 --- a/src/plugins/directshow/player/directshowplayerservice.h +++ b/src/plugins/directshow/player/directshowplayerservice.h @@ -195,6 +195,7 @@ private: bool m_buffering; bool m_seekable; bool m_atEnd; + bool m_dontCacheNextSeekResult; QMediaTimeRange m_playbackRange; QUrl m_url; QMediaResourceList m_resources; diff --git a/src/plugins/directshow/player/directshowvideorenderercontrol.cpp b/src/plugins/directshow/player/directshowvideorenderercontrol.cpp index 6a1580ea5..027d2ce55 100644 --- a/src/plugins/directshow/player/directshowvideorenderercontrol.cpp +++ b/src/plugins/directshow/player/directshowvideorenderercontrol.cpp @@ -35,6 +35,12 @@ #include "videosurfacefilter.h" +#ifdef HAVE_EVR +#include "evrcustompresenter.h" +#endif + +#include <qabstractvideosurface.h> + DirectShowVideoRendererControl::DirectShowVideoRendererControl(DirectShowEventLoop *loop, QObject *parent) : QVideoRendererControl(parent) , m_loop(loop) @@ -56,22 +62,34 @@ QAbstractVideoSurface *DirectShowVideoRendererControl::surface() const void DirectShowVideoRendererControl::setSurface(QAbstractVideoSurface *surface) { - if (surface != m_surface) { - m_surface = surface; + if (m_surface == surface) + return; + + if (m_filter) { + m_filter->Release(); + m_filter = 0; + } - VideoSurfaceFilter *existingFilter = m_filter; + m_surface = surface; - if (surface) { - m_filter = new VideoSurfaceFilter(surface, m_loop); - } else { + if (m_surface) { +#ifdef HAVE_EVR + m_filter = com_new<IBaseFilter>(clsid_EnhancedVideoRenderer); + EVRCustomPresenter *evrPresenter = new EVRCustomPresenter(m_surface); + if (!evrPresenter->isValid() || !qt_evr_setCustomPresenter(m_filter, evrPresenter)) { + m_filter->Release(); m_filter = 0; } + evrPresenter->Release(); - emit filterChanged(); - - if (existingFilter) - existingFilter->Release(); + if (!m_filter) +#endif + { + m_filter = new VideoSurfaceFilter(m_surface, m_loop); + } } + + emit filterChanged(); } IBaseFilter *DirectShowVideoRendererControl::filter() diff --git a/src/plugins/directshow/player/directshowvideorenderercontrol.h b/src/plugins/directshow/player/directshowvideorenderercontrol.h index b4828d1b0..484fda263 100644 --- a/src/plugins/directshow/player/directshowvideorenderercontrol.h +++ b/src/plugins/directshow/player/directshowvideorenderercontrol.h @@ -39,7 +39,6 @@ #include <dshow.h> class DirectShowEventLoop; -class VideoSurfaceFilter; QT_USE_NAMESPACE @@ -61,7 +60,7 @@ Q_SIGNALS: private: DirectShowEventLoop *m_loop; QAbstractVideoSurface *m_surface; - VideoSurfaceFilter *m_filter; + IBaseFilter *m_filter; }; #endif diff --git a/src/plugins/directshow/player/player.pri b/src/plugins/directshow/player/player.pri index 8586ea5da..5ecb912b2 100644 --- a/src/plugins/directshow/player/player.pri +++ b/src/plugins/directshow/player/player.pri @@ -46,8 +46,11 @@ config_evr { include($$PWD/../../common/evr.pri) - HEADERS += $$PWD/directshowevrvideowindowcontrol.h - SOURCES += $$PWD/directshowevrvideowindowcontrol.cpp + HEADERS += \ + $$PWD/directshowevrvideowindowcontrol.h + + SOURCES += \ + $$PWD/directshowevrvideowindowcontrol.cpp } config_wshellitem { diff --git a/src/plugins/gstreamer/camerabin/camerabin.pro b/src/plugins/gstreamer/camerabin/camerabin.pro index 80d992960..b807071f2 100644 --- a/src/plugins/gstreamer/camerabin/camerabin.pro +++ b/src/plugins/gstreamer/camerabin/camerabin.pro @@ -89,6 +89,15 @@ config_gstreamer_encodingprofiles { DEFINES += HAVE_GST_ENCODING_PROFILES } +config_linux_v4l: { + DEFINES += USE_V4L + + HEADERS += \ + $$PWD/camerabinv4limageprocessing.h + + SOURCES += \ + $$PWD/camerabinv4limageprocessing.cpp +} + OTHER_FILES += \ camerabin.json - diff --git a/src/plugins/gstreamer/camerabin/camerabinimageprocessing.cpp b/src/plugins/gstreamer/camerabin/camerabinimageprocessing.cpp index 633662c70..2d1659900 100644 --- a/src/plugins/gstreamer/camerabin/camerabinimageprocessing.cpp +++ b/src/plugins/gstreamer/camerabin/camerabinimageprocessing.cpp @@ -34,6 +34,10 @@ #include "camerabinimageprocessing.h" #include "camerabinsession.h" +#ifdef USE_V4L +#include "camerabinv4limageprocessing.h" +#endif + #if GST_CHECK_VERSION(1,0,0) # include <gst/video/colorbalance.h> #else @@ -43,9 +47,12 @@ QT_BEGIN_NAMESPACE CameraBinImageProcessing::CameraBinImageProcessing(CameraBinSession *session) - :QCameraImageProcessingControl(session), - m_session(session), - m_whiteBalanceMode(QCameraImageProcessing::WhiteBalanceAuto) + : QCameraImageProcessingControl(session) + , m_session(session) + , m_whiteBalanceMode(QCameraImageProcessing::WhiteBalanceAuto) +#ifdef USE_V4L + , m_v4lImageControl(Q_NULLPTR) +#endif { #ifdef HAVE_GST_PHOTOGRAPHY if (m_session->photography()) { @@ -83,6 +90,12 @@ CameraBinImageProcessing::CameraBinImageProcessing(CameraBinSession *session) #endif #endif +#ifdef USE_V4L + m_v4lImageControl = new CameraBinV4LImageProcessing(m_session); + connect(m_session, &CameraBinSession::statusChanged, + m_v4lImageControl, &CameraBinV4LImageProcessing::updateParametersInfo); +#endif + updateColorBalanceValues(); } @@ -160,7 +173,7 @@ QCameraImageProcessing::WhiteBalanceMode CameraBinImageProcessing::whiteBalanceM return m_whiteBalanceMode; } -void CameraBinImageProcessing::setWhiteBalanceMode(QCameraImageProcessing::WhiteBalanceMode mode) +bool CameraBinImageProcessing::setWhiteBalanceMode(QCameraImageProcessing::WhiteBalanceMode mode) { #ifdef HAVE_GST_PHOTOGRAPHY if (isWhiteBalanceModeSupported(mode)) { @@ -172,11 +185,13 @@ void CameraBinImageProcessing::setWhiteBalanceMode(QCameraImageProcessing::White #endif { unlockWhiteBalance(); + return true; } } #else Q_UNUSED(mode); #endif + return false; } bool CameraBinImageProcessing::isWhiteBalanceModeSupported(QCameraImageProcessing::WhiteBalanceMode mode) const @@ -184,7 +199,8 @@ bool CameraBinImageProcessing::isWhiteBalanceModeSupported(QCameraImageProcessin #ifdef HAVE_GST_PHOTOGRAPHY return m_mappedWbValues.values().contains(mode); #else - return mode == QCameraImageProcessing::WhiteBalanceAuto; + Q_UNUSED(mode); + return false; #endif } @@ -192,16 +208,24 @@ bool CameraBinImageProcessing::isParameterSupported(QCameraImageProcessingContro { #ifdef HAVE_GST_PHOTOGRAPHY if (parameter == QCameraImageProcessingControl::WhiteBalancePreset - || parameter == QCameraImageProcessingControl::ColorFilter) - return m_session->photography(); + || parameter == QCameraImageProcessingControl::ColorFilter) { + if (m_session->photography()) + return true; + } #endif if (parameter == QCameraImageProcessingControl::Contrast || parameter == QCameraImageProcessingControl::Brightness || parameter == QCameraImageProcessingControl::Saturation) { - return GST_IS_COLOR_BALANCE(m_session->cameraBin()); + if (GST_IS_COLOR_BALANCE(m_session->cameraBin())) + return true; } +#ifdef USE_V4L + if (m_v4lImageControl->isParameterSupported(parameter)) + return true; +#endif + return false; } @@ -210,10 +234,39 @@ bool CameraBinImageProcessing::isParameterValueSupported(QCameraImageProcessingC switch (parameter) { case ContrastAdjustment: case BrightnessAdjustment: - case SaturationAdjustment: - return GST_IS_COLOR_BALANCE(m_session->cameraBin()) && qAbs(value.toReal()) <= 1.0; - case WhiteBalancePreset: - return isWhiteBalanceModeSupported(value.value<QCameraImageProcessing::WhiteBalanceMode>()); + case SaturationAdjustment: { + const bool isGstColorBalanceValueSupported = GST_IS_COLOR_BALANCE(m_session->cameraBin()) + && qAbs(value.toReal()) <= 1.0; +#ifdef USE_V4L + if (!isGstColorBalanceValueSupported) + return m_v4lImageControl->isParameterValueSupported(parameter, value); +#endif + return isGstColorBalanceValueSupported; + } + case SharpeningAdjustment: { +#ifdef USE_V4L + return m_v4lImageControl->isParameterValueSupported(parameter, value); +#else + return false; +#endif + } + case WhiteBalancePreset: { + const QCameraImageProcessing::WhiteBalanceMode mode = + value.value<QCameraImageProcessing::WhiteBalanceMode>(); + const bool isPhotographyWhiteBalanceSupported = isWhiteBalanceModeSupported(mode); +#ifdef USE_V4L + if (!isPhotographyWhiteBalanceSupported) + return m_v4lImageControl->isParameterValueSupported(parameter, value); +#endif + return isPhotographyWhiteBalanceSupported; + } + case ColorTemperature: { +#ifdef USE_V4L + return m_v4lImageControl->isParameterValueSupported(parameter, value); +#else + return false; +#endif + } case ColorFilter: { const QCameraImageProcessing::ColorFilter filter = value.value<QCameraImageProcessing::ColorFilter>(); #ifdef HAVE_GST_PHOTOGRAPHY @@ -233,8 +286,23 @@ QVariant CameraBinImageProcessing::parameter( QCameraImageProcessingControl::ProcessingParameter parameter) const { switch (parameter) { - case QCameraImageProcessingControl::WhiteBalancePreset: - return QVariant::fromValue<QCameraImageProcessing::WhiteBalanceMode>(whiteBalanceMode()); + case QCameraImageProcessingControl::WhiteBalancePreset: { + const QCameraImageProcessing::WhiteBalanceMode mode = whiteBalanceMode(); +#ifdef USE_V4L + if (mode == QCameraImageProcessing::WhiteBalanceAuto + || mode == QCameraImageProcessing::WhiteBalanceManual) { + return m_v4lImageControl->parameter(parameter); + } +#endif + return QVariant::fromValue<QCameraImageProcessing::WhiteBalanceMode>(mode); + } + case QCameraImageProcessingControl::ColorTemperature: { +#ifdef USE_V4L + return m_v4lImageControl->parameter(parameter); +#else + return QVariant(); +#endif + } case QCameraImageProcessingControl::ColorFilter: #ifdef HAVE_GST_PHOTOGRAPHY if (GstPhotography *photography = m_session->photography()) { @@ -249,29 +317,79 @@ QVariant CameraBinImageProcessing::parameter( } #endif return QVariant::fromValue(QCameraImageProcessing::ColorFilterNone); - default: - return m_values.contains(parameter) + default: { + const bool isGstParameterSupported = m_values.contains(parameter); +#ifdef USE_V4L + if (!isGstParameterSupported) { + if (parameter == QCameraImageProcessingControl::BrightnessAdjustment + || parameter == QCameraImageProcessingControl::ContrastAdjustment + || parameter == QCameraImageProcessingControl::SaturationAdjustment + || parameter == QCameraImageProcessingControl::SharpeningAdjustment) { + return m_v4lImageControl->parameter(parameter); + } + } +#endif + return isGstParameterSupported ? QVariant(m_values.value(parameter)) : QVariant(); } + } } void CameraBinImageProcessing::setParameter(QCameraImageProcessingControl::ProcessingParameter parameter, const QVariant &value) { switch (parameter) { - case ContrastAdjustment: - setColorBalanceValue("contrast", value.toReal()); + case ContrastAdjustment: { + if (!setColorBalanceValue("contrast", value.toReal())) { +#ifdef USE_V4L + m_v4lImageControl->setParameter(parameter, value); +#endif + } + } break; - case BrightnessAdjustment: - setColorBalanceValue("brightness", value.toReal()); + case BrightnessAdjustment: { + if (!setColorBalanceValue("brightness", value.toReal())) { +#ifdef USE_V4L + m_v4lImageControl->setParameter(parameter, value); +#endif + } + } + break; + case SaturationAdjustment: { + if (!setColorBalanceValue("saturation", value.toReal())) { +#ifdef USE_V4L + m_v4lImageControl->setParameter(parameter, value); +#endif + } + } + break; + case SharpeningAdjustment: { +#ifdef USE_V4L + m_v4lImageControl->setParameter(parameter, value); +#endif + } break; - case SaturationAdjustment: - setColorBalanceValue("saturation", value.toReal()); + case WhiteBalancePreset: { + if (!setWhiteBalanceMode(value.value<QCameraImageProcessing::WhiteBalanceMode>())) { +#ifdef USE_V4L + const QCameraImageProcessing::WhiteBalanceMode mode = + value.value<QCameraImageProcessing::WhiteBalanceMode>(); + if (mode == QCameraImageProcessing::WhiteBalanceAuto + || mode == QCameraImageProcessing::WhiteBalanceManual) { + m_v4lImageControl->setParameter(parameter, value); + return; + } +#endif + } + } break; - case WhiteBalancePreset: - setWhiteBalanceMode(value.value<QCameraImageProcessing::WhiteBalanceMode>()); + case QCameraImageProcessingControl::ColorTemperature: { +#ifdef USE_V4L + m_v4lImageControl->setParameter(parameter, value); +#endif break; + } case QCameraImageProcessingControl::ColorFilter: #ifdef HAVE_GST_PHOTOGRAPHY if (GstPhotography *photography = m_session->photography()) { diff --git a/src/plugins/gstreamer/camerabin/camerabinimageprocessing.h b/src/plugins/gstreamer/camerabin/camerabinimageprocessing.h index 9f280c485..6b2a114ec 100644 --- a/src/plugins/gstreamer/camerabin/camerabinimageprocessing.h +++ b/src/plugins/gstreamer/camerabin/camerabinimageprocessing.h @@ -50,6 +50,10 @@ typedef GstColourToneMode GstPhotographyColorToneMode; QT_BEGIN_NAMESPACE +#ifdef USE_V4L +class CameraBinV4LImageProcessing; +#endif + class CameraBinSession; class CameraBinImageProcessing : public QCameraImageProcessingControl @@ -61,7 +65,7 @@ public: virtual ~CameraBinImageProcessing(); QCameraImageProcessing::WhiteBalanceMode whiteBalanceMode() const; - void setWhiteBalanceMode(QCameraImageProcessing::WhiteBalanceMode mode); + bool setWhiteBalanceMode(QCameraImageProcessing::WhiteBalanceMode mode); bool isWhiteBalanceModeSupported(QCameraImageProcessing::WhiteBalanceMode mode) const; bool isParameterSupported(ProcessingParameter) const; @@ -86,6 +90,10 @@ private: QMap<QCameraImageProcessing::ColorFilter, GstPhotographyColorToneMode> m_filterMap; #endif QCameraImageProcessing::WhiteBalanceMode m_whiteBalanceMode; + +#ifdef USE_V4L + CameraBinV4LImageProcessing *m_v4lImageControl; +#endif }; QT_END_NAMESPACE diff --git a/src/plugins/gstreamer/camerabin/camerabinsession.h b/src/plugins/gstreamer/camerabin/camerabinsession.h index 1c5c9899d..dda900a8b 100644 --- a/src/plugins/gstreamer/camerabin/camerabinsession.h +++ b/src/plugins/gstreamer/camerabin/camerabinsession.h @@ -153,6 +153,8 @@ public: bool isMuted() const; + QString device() const { return m_inputDevice; } + bool processSyncMessage(const QGstreamerMessage &message); bool processBusMessage(const QGstreamerMessage &message); diff --git a/src/plugins/gstreamer/camerabin/camerabinv4limageprocessing.cpp b/src/plugins/gstreamer/camerabin/camerabinv4limageprocessing.cpp new file mode 100644 index 000000000..bf51cbfd0 --- /dev/null +++ b/src/plugins/gstreamer/camerabin/camerabinv4limageprocessing.cpp @@ -0,0 +1,304 @@ +/**************************************************************************** +** +** Copyright (C) 2015 Denis Shienkov <denis.shienkov@gmail.com> +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#include "camerabinv4limageprocessing.h" +#include "camerabinsession.h" + +#include <QDebug> + +#include <private/qcore_unix_p.h> +#include <linux/videodev2.h> + +QT_BEGIN_NAMESPACE + +CameraBinV4LImageProcessing::CameraBinV4LImageProcessing(CameraBinSession *session) + : QCameraImageProcessingControl(session) + , m_session(session) +{ +} + +CameraBinV4LImageProcessing::~CameraBinV4LImageProcessing() +{ +} + +bool CameraBinV4LImageProcessing::isParameterSupported( + ProcessingParameter parameter) const +{ + return m_parametersInfo.contains(parameter); +} + +bool CameraBinV4LImageProcessing::isParameterValueSupported( + ProcessingParameter parameter, const QVariant &value) const +{ + QMap<ProcessingParameter, SourceParameterValueInfo>::const_iterator sourceValueInfo = + m_parametersInfo.constFind(parameter); + if (sourceValueInfo == m_parametersInfo.constEnd()) + return false; + + switch (parameter) { + + case QCameraImageProcessingControl::WhiteBalancePreset: { + const QCameraImageProcessing::WhiteBalanceMode checkedValue = + value.value<QCameraImageProcessing::WhiteBalanceMode>(); + const QCameraImageProcessing::WhiteBalanceMode firstAllowedValue = + (*sourceValueInfo).minimumValue ? QCameraImageProcessing::WhiteBalanceAuto + : QCameraImageProcessing::WhiteBalanceManual; + const QCameraImageProcessing::WhiteBalanceMode secondAllowedValue = + (*sourceValueInfo).maximumValue ? QCameraImageProcessing::WhiteBalanceAuto + : QCameraImageProcessing::WhiteBalanceManual; + if (checkedValue != firstAllowedValue + && checkedValue != secondAllowedValue) { + return false; + } + } + break; + + case QCameraImageProcessingControl::ColorTemperature: { + const qint32 checkedValue = value.toInt(); + if (checkedValue < (*sourceValueInfo).minimumValue + || checkedValue > (*sourceValueInfo).maximumValue) { + return false; + } + } + break; + + case QCameraImageProcessingControl::ContrastAdjustment: // falling back + case QCameraImageProcessingControl::SaturationAdjustment: // falling back + case QCameraImageProcessingControl::BrightnessAdjustment: // falling back + case QCameraImageProcessingControl::SharpeningAdjustment: { + const qint32 sourceValue = sourceImageProcessingParameterValue( + value.toReal(), (*sourceValueInfo)); + if (sourceValue < (*sourceValueInfo).minimumValue + || sourceValue > (*sourceValueInfo).maximumValue) { + return false; + } + } + break; + + default: + return false; + } + + return true; +} + +QVariant CameraBinV4LImageProcessing::parameter( + ProcessingParameter parameter) const +{ + QMap<ProcessingParameter, SourceParameterValueInfo>::const_iterator sourceValueInfo = + m_parametersInfo.constFind(parameter); + if (sourceValueInfo == m_parametersInfo.constEnd()) { + qWarning() << "Unable to get the parameter value: the parameter is not supported."; + return QVariant(); + } + + const QString deviceName = m_session->device(); + const int fd = qt_safe_open(deviceName.toLocal8Bit().constData(), O_RDONLY); + if (fd == -1) { + qWarning() << "Unable to open the camera" << deviceName + << "for read to get the parameter value:" << qt_error_string(errno); + return QVariant(); + } + + struct v4l2_control control; + ::memset(&control, 0, sizeof(control)); + control.id = (*sourceValueInfo).cid; + + const bool ret = (::ioctl(fd, VIDIOC_G_CTRL, &control) == 0); + + qt_safe_close(fd); + + if (!ret) { + qWarning() << "Unable to get the parameter value:" << qt_error_string(errno); + return QVariant(); + } + + switch (parameter) { + + case QCameraImageProcessingControl::WhiteBalancePreset: + return QVariant::fromValue<QCameraImageProcessing::WhiteBalanceMode>( + control.value ? QCameraImageProcessing::WhiteBalanceAuto + : QCameraImageProcessing::WhiteBalanceManual); + + case QCameraImageProcessingControl::ColorTemperature: + return QVariant::fromValue<qint32>(control.value); + + case QCameraImageProcessingControl::ContrastAdjustment: // falling back + case QCameraImageProcessingControl::SaturationAdjustment: // falling back + case QCameraImageProcessingControl::BrightnessAdjustment: // falling back + case QCameraImageProcessingControl::SharpeningAdjustment: { + return scaledImageProcessingParameterValue( + control.value, (*sourceValueInfo)); + } + + default: + return QVariant(); + } +} + +void CameraBinV4LImageProcessing::setParameter( + ProcessingParameter parameter, const QVariant &value) +{ + QMap<ProcessingParameter, SourceParameterValueInfo>::const_iterator sourceValueInfo = + m_parametersInfo.constFind(parameter); + if (sourceValueInfo == m_parametersInfo.constEnd()) { + qWarning() << "Unable to set the parameter value: the parameter is not supported."; + return; + } + + const QString deviceName = m_session->device(); + const int fd = qt_safe_open(deviceName.toLocal8Bit().constData(), O_WRONLY); + if (fd == -1) { + qWarning() << "Unable to open the camera" << deviceName + << "for write to set the parameter value:" << qt_error_string(errno); + return; + } + + struct v4l2_control control; + ::memset(&control, 0, sizeof(control)); + control.id = (*sourceValueInfo).cid; + + switch (parameter) { + + case QCameraImageProcessingControl::WhiteBalancePreset: { + const QCameraImageProcessing::WhiteBalanceMode m = + value.value<QCameraImageProcessing::WhiteBalanceMode>(); + if (m != QCameraImageProcessing::WhiteBalanceAuto + && m != QCameraImageProcessing::WhiteBalanceManual) + return; + + control.value = (m == QCameraImageProcessing::WhiteBalanceAuto) ? true : false; + } + break; + + case QCameraImageProcessingControl::ColorTemperature: + control.value = value.toInt(); + break; + + case QCameraImageProcessingControl::ContrastAdjustment: // falling back + case QCameraImageProcessingControl::SaturationAdjustment: // falling back + case QCameraImageProcessingControl::BrightnessAdjustment: // falling back + case QCameraImageProcessingControl::SharpeningAdjustment: + control.value = sourceImageProcessingParameterValue( + value.toReal(), (*sourceValueInfo)); + break; + + default: + return; + } + + if (::ioctl(fd, VIDIOC_S_CTRL, &control) != 0) + qWarning() << "Unable to set the parameter value:" << qt_error_string(errno); + + qt_safe_close(fd); +} + +void CameraBinV4LImageProcessing::updateParametersInfo( + QCamera::Status cameraStatus) +{ + if (cameraStatus == QCamera::UnloadedStatus) + m_parametersInfo.clear(); + else if (cameraStatus == QCamera::LoadedStatus) { + const QString deviceName = m_session->device(); + const int fd = qt_safe_open(deviceName.toLocal8Bit().constData(), O_RDONLY); + if (fd == -1) { + qWarning() << "Unable to open the camera" << deviceName + << "for read to query the parameter info:" << qt_error_string(errno); + return; + } + + static const struct SupportedParameterEntry { + quint32 cid; + QCameraImageProcessingControl::ProcessingParameter parameter; + } supportedParametersEntries[] = { + { V4L2_CID_AUTO_WHITE_BALANCE, QCameraImageProcessingControl::WhiteBalancePreset }, + { V4L2_CID_WHITE_BALANCE_TEMPERATURE, QCameraImageProcessingControl::ColorTemperature }, + { V4L2_CID_CONTRAST, QCameraImageProcessingControl::ContrastAdjustment }, + { V4L2_CID_SATURATION, QCameraImageProcessingControl::SaturationAdjustment }, + { V4L2_CID_BRIGHTNESS, QCameraImageProcessingControl::BrightnessAdjustment }, + { V4L2_CID_SHARPNESS, QCameraImageProcessingControl::SharpeningAdjustment } + }; + + for (int i = 0; i < int(sizeof(supportedParametersEntries) / sizeof(SupportedParameterEntry)); ++i) { + struct v4l2_queryctrl queryControl; + ::memset(&queryControl, 0, sizeof(queryControl)); + queryControl.id = supportedParametersEntries[i].cid; + + if (::ioctl(fd, VIDIOC_QUERYCTRL, &queryControl) != 0) { + qWarning() << "Unable to query the parameter info:" << qt_error_string(errno); + continue; + } + + SourceParameterValueInfo sourceValueInfo; + sourceValueInfo.cid = queryControl.id; + sourceValueInfo.defaultValue = queryControl.default_value; + sourceValueInfo.maximumValue = queryControl.maximum; + sourceValueInfo.minimumValue = queryControl.minimum; + + m_parametersInfo.insert(supportedParametersEntries[i].parameter, sourceValueInfo); + } + + qt_safe_close(fd); + } +} + +qreal CameraBinV4LImageProcessing::scaledImageProcessingParameterValue( + qint32 sourceValue, const SourceParameterValueInfo &sourceValueInfo) +{ + if (sourceValue == sourceValueInfo.defaultValue) { + return 0.0f; + } else if (sourceValue < sourceValueInfo.defaultValue) { + return ((sourceValue - sourceValueInfo.minimumValue) + / qreal(sourceValueInfo.defaultValue - sourceValueInfo.minimumValue)) + + (-1.0f); + } else { + return ((sourceValue - sourceValueInfo.defaultValue) + / qreal(sourceValueInfo.maximumValue - sourceValueInfo.defaultValue)); + } +} + +qint32 CameraBinV4LImageProcessing::sourceImageProcessingParameterValue( + qreal scaledValue, const SourceParameterValueInfo &valueRange) +{ + if (qFuzzyIsNull(scaledValue)) { + return valueRange.defaultValue; + } else if (scaledValue < 0.0f) { + return ((scaledValue - (-1.0f)) * (valueRange.defaultValue - valueRange.minimumValue)) + + valueRange.minimumValue; + } else { + return (scaledValue * (valueRange.maximumValue - valueRange.defaultValue)) + + valueRange.defaultValue; + } +} + +QT_END_NAMESPACE diff --git a/src/plugins/gstreamer/camerabin/camerabinv4limageprocessing.h b/src/plugins/gstreamer/camerabin/camerabinv4limageprocessing.h new file mode 100644 index 000000000..7961d6c0d --- /dev/null +++ b/src/plugins/gstreamer/camerabin/camerabinv4limageprocessing.h @@ -0,0 +1,84 @@ +/**************************************************************************** +** +** Copyright (C) 2015 Denis Shienkov <denis.shienkov@gmail.com> +** Contact: http://www.qt.io/licensing/ +** +** This file is part of the Qt Toolkit. +** +** $QT_BEGIN_LICENSE:LGPL21$ +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see http://www.qt.io/terms-conditions. For further +** information use the contact form at http://www.qt.io/contact-us. +** +** GNU Lesser General Public License Usage +** Alternatively, this file may be used under the terms of the GNU Lesser +** General Public License version 2.1 or version 3 as published by the Free +** Software Foundation and appearing in the file LICENSE.LGPLv21 and +** LICENSE.LGPLv3 included in the packaging of this file. Please review the +** following information to ensure the GNU Lesser General Public License +** requirements will be met: https://www.gnu.org/licenses/lgpl.html and +** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. +** +** As a special exception, The Qt Company gives you certain additional +** rights. These rights are described in The Qt Company LGPL Exception +** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. +** +** $QT_END_LICENSE$ +** +****************************************************************************/ + +#ifndef CAMERABINV4LIMAGEPROCESSINGCONTROL_H +#define CAMERABINV4LIMAGEPROCESSINGCONTROL_H + +#include <qcamera.h> +#include <qcameraimageprocessingcontrol.h> + +QT_BEGIN_NAMESPACE + +class CameraBinSession; + +class CameraBinV4LImageProcessing : public QCameraImageProcessingControl +{ + Q_OBJECT + +public: + CameraBinV4LImageProcessing(CameraBinSession *session); + virtual ~CameraBinV4LImageProcessing(); + + bool isParameterSupported(ProcessingParameter) const; + bool isParameterValueSupported(ProcessingParameter parameter, const QVariant &value) const; + QVariant parameter(ProcessingParameter parameter) const; + void setParameter(ProcessingParameter parameter, const QVariant &value); + +public slots: + void updateParametersInfo(QCamera::Status cameraStatus); + +private: + struct SourceParameterValueInfo { + SourceParameterValueInfo() + : cid(0) + { + } + + qint32 defaultValue; + qint32 minimumValue; + qint32 maximumValue; + quint32 cid; // V4L control id + }; + + static qreal scaledImageProcessingParameterValue( + qint32 sourceValue, const SourceParameterValueInfo &sourceValueInfo); + static qint32 sourceImageProcessingParameterValue( + qreal scaledValue, const SourceParameterValueInfo &valueRange); +private: + CameraBinSession *m_session; + QMap<ProcessingParameter, SourceParameterValueInfo> m_parametersInfo; +}; + +QT_END_NAMESPACE + +#endif // CAMERABINV4LIMAGEPROCESSINGCONTROL_H diff --git a/src/plugins/opensles/opensles.pro b/src/plugins/opensles/opensles.pro index 53c5a120b..aa8e05444 100644 --- a/src/plugins/opensles/opensles.pro +++ b/src/plugins/opensles/opensles.pro @@ -1,5 +1,5 @@ TARGET = qtaudio_opensles -QT += multimedia-private +QT += multimedia-private core-private PLUGIN_TYPE = audio PLUGIN_CLASS_NAME = QOpenSLESPlugin diff --git a/src/plugins/opensles/qopenslesaudiooutput.cpp b/src/plugins/opensles/qopenslesaudiooutput.cpp index d17363d20..3af4e8bb2 100644 --- a/src/plugins/opensles/qopenslesaudiooutput.cpp +++ b/src/plugins/opensles/qopenslesaudiooutput.cpp @@ -42,13 +42,23 @@ #endif // ANDROID #define BUFFER_COUNT 2 -#define DEFAULT_PERIOD_TIME_MS 50 -#define MINIMUM_PERIOD_TIME_MS 5 #define EBASE 2.302585093 #define LOG10(x) qLn(x)/qreal(EBASE) QT_BEGIN_NAMESPACE +static inline void openSlDebugInfo() +{ + const QAudioFormat &format = QAudioDeviceInfo::defaultOutputDevice().preferredFormat(); + qDebug() << "======= OpenSL ES Device info =======" + << "\nSupports low-latency playback: " << (QOpenSLESEngine::supportsLowLatency() ? "YES" : "NO") + << "\nPreferred sample rate: " << QOpenSLESEngine::getOutputValue(QOpenSLESEngine::SampleRate, -1) + << "\nFrames per buffer: " << QOpenSLESEngine::getOutputValue(QOpenSLESEngine::FramesPerBuffer, -1) + << "\nPreferred Format: " << format + << "\nLow-latency buffer size: " << QOpenSLESEngine::getLowLatencyBufferSize(format) + << "\nDefault buffer size: " << QOpenSLESEngine::getDefaultBufferSize(format); +} + QMap<QString, qint32> QOpenSLESAudioOutput::m_categories; QOpenSLESAudioOutput::QOpenSLESAudioOutput(const QByteArray &device) @@ -531,13 +541,17 @@ bool QOpenSLESAudioOutput::preparePlayer() setVolume(m_volume); + const int lowLatencyBufferSize = QOpenSLESEngine::getLowLatencyBufferSize(m_format); + const int defaultBufferSize = QOpenSLESEngine::getDefaultBufferSize(m_format); + // Buffer size if (m_bufferSize <= 0) { - m_bufferSize = m_format.bytesForDuration(DEFAULT_PERIOD_TIME_MS * 1000); - } else { - const int minimumBufSize = m_format.bytesForDuration(MINIMUM_PERIOD_TIME_MS * 1000); - if (m_bufferSize < minimumBufSize) - m_bufferSize = minimumBufSize; + m_bufferSize = defaultBufferSize; + } else if (QOpenSLESEngine::supportsLowLatency()) { + if (m_bufferSize < lowLatencyBufferSize) + m_bufferSize = lowLatencyBufferSize; + } else if (m_bufferSize < defaultBufferSize) { + m_bufferSize = defaultBufferSize; } m_periodSize = m_bufferSize; @@ -598,6 +612,9 @@ void QOpenSLESAudioOutput::stopPlayer() void QOpenSLESAudioOutput::startPlayer() { + if (QOpenSLESEngine::printDebugInfo()) + openSlDebugInfo(); + if (SL_RESULT_SUCCESS != (*m_playItf)->SetPlayState(m_playItf, SL_PLAYSTATE_PLAYING)) { setError(QAudio::FatalError); destroyPlayer(); diff --git a/src/plugins/opensles/qopenslesdeviceinfo.cpp b/src/plugins/opensles/qopenslesdeviceinfo.cpp index c93a66e1e..0dd8183ef 100644 --- a/src/plugins/opensles/qopenslesdeviceinfo.cpp +++ b/src/plugins/opensles/qopenslesdeviceinfo.cpp @@ -61,7 +61,7 @@ QAudioFormat QOpenSLESDeviceInfo::preferredFormat() const format.setCodec(QStringLiteral("audio/pcm")); format.setSampleSize(16); format.setSampleType(QAudioFormat::SignedInt); - format.setSampleRate(44100); + format.setSampleRate(QOpenSLESEngine::getOutputValue(QOpenSLESEngine::SampleRate, 48000)); format.setChannelCount(m_mode == QAudio::AudioInput ? 1 : 2); return format; } diff --git a/src/plugins/opensles/qopenslesengine.cpp b/src/plugins/opensles/qopenslesengine.cpp index 7689533e6..a30c90d84 100644 --- a/src/plugins/opensles/qopenslesengine.cpp +++ b/src/plugins/opensles/qopenslesengine.cpp @@ -38,8 +38,13 @@ #ifdef ANDROID #include <SLES/OpenSLES_Android.h> +#include <QtCore/private/qjnihelpers_p.h> +#include <QtCore/private/qjni_p.h> #endif +#define MINIMUM_PERIOD_TIME_MS 5 +#define DEFAULT_PERIOD_TIME_MS 50 + #define CheckError(message) if (result != SL_RESULT_SUCCESS) { qWarning(message); return; } Q_GLOBAL_STATIC(QOpenSLESEngine, openslesEngine); @@ -130,6 +135,151 @@ QList<int> QOpenSLESEngine::supportedSampleRates(QAudio::Mode mode) const } } +int QOpenSLESEngine::getOutputValue(QOpenSLESEngine::OutputValue type, int defaultValue) +{ +#if defined(Q_OS_ANDROID) && !defined(Q_OS_ANDROID_NO_SDK) + static int sampleRate = 0; + static int framesPerBuffer = 0; + static const int sdkVersion = QtAndroidPrivate::androidSdkVersion(); + + if (sdkVersion < 17) // getProperty() was added in API level 17... + return defaultValue; + + if (type == FramesPerBuffer && framesPerBuffer != 0) + return framesPerBuffer; + + if (type == SampleRate && sampleRate != 0) + return sampleRate; + + QJNIObjectPrivate ctx(QtAndroidPrivate::activity()); + if (!ctx.isValid()) + return defaultValue; + + + QJNIObjectPrivate audioServiceString = ctx.getStaticObjectField("android/content/Context", + "AUDIO_SERVICE", + "Ljava/lang/String;"); + QJNIObjectPrivate am = ctx.callObjectMethod("getSystemService", + "(Ljava/lang/String;)Ljava/lang/Object;", + audioServiceString.object()); + if (!am.isValid()) + return defaultValue; + + QJNIObjectPrivate sampleRateField = QJNIObjectPrivate::getStaticObjectField("android/media/AudioManager", + "PROPERTY_OUTPUT_SAMPLE_RATE", + "Ljava/lang/String;"); + QJNIObjectPrivate framesPerBufferField = QJNIObjectPrivate::getStaticObjectField("android/media/AudioManager", + "PROPERTY_OUTPUT_FRAMES_PER_BUFFER", + "Ljava/lang/String;"); + + QJNIObjectPrivate sampleRateString = am.callObjectMethod("getProperty", + "(Ljava/lang/String;)Ljava/lang/String;", + sampleRateField.object()); + QJNIObjectPrivate framesPerBufferString = am.callObjectMethod("getProperty", + "(Ljava/lang/String;)Ljava/lang/String;", + framesPerBufferField.object()); + + if (!sampleRateString.isValid() || !framesPerBufferString.isValid()) + return defaultValue; + + framesPerBuffer = framesPerBufferString.toString().toInt(); + sampleRate = sampleRateString.toString().toInt(); + + if (type == FramesPerBuffer) + return framesPerBuffer; + + if (type == SampleRate) + return sampleRate; + +#endif // Q_OS_ANDROID + + return defaultValue; +} + +int QOpenSLESEngine::getDefaultBufferSize(const QAudioFormat &format) +{ +#if defined(Q_OS_ANDROID) && !defined(Q_OS_ANDROID_NO_SDK) + if (!format.isValid()) + return 0; + + const int channelConfig = [&format]() -> int + { + if (format.channelCount() == 1) + return 4; /* MONO */ + else if (format.channelCount() == 2) + return 12; /* STEREO */ + else if (format.channelCount() > 2) + return 1052; /* SURROUND */ + else + return 1; /* DEFAULT */ + }(); + + const int audioFormat = [&format]() -> int + { + if (format.sampleType() == QAudioFormat::Float && QtAndroidPrivate::androidSdkVersion() >= 21) + return 4; /* PCM_FLOAT */ + else if (format.sampleSize() == 8) + return 3; /* PCM_8BIT */ + else if (format.sampleSize() == 16) + return 2; /* PCM_16BIT*/ + else + return 1; /* DEFAULT */ + }(); + + const int sampleRate = format.sampleRate(); + return QJNIObjectPrivate::callStaticMethod<jint>("android/media/AudioTrack", + "getMinBufferSize", + "(III)I", + sampleRate, + channelConfig, + audioFormat); +#else + return format.bytesForDuration(DEFAULT_PERIOD_TIME_MS); +#endif // Q_OS_ANDROID +} + +int QOpenSLESEngine::getLowLatencyBufferSize(const QAudioFormat &format) +{ + return format.bytesForFrames(QOpenSLESEngine::getOutputValue(QOpenSLESEngine::FramesPerBuffer, + format.framesForDuration(MINIMUM_PERIOD_TIME_MS))); +} + +bool QOpenSLESEngine::supportsLowLatency() +{ +#if defined(Q_OS_ANDROID) && !defined(Q_OS_ANDROID_NO_SDK) + static int isSupported = -1; + + if (isSupported != -1) + return (isSupported == 1); + + QJNIObjectPrivate ctx(QtAndroidPrivate::activity()); + if (!ctx.isValid()) + return false; + + QJNIObjectPrivate pm = ctx.callObjectMethod("getPackageManager", "()Landroid/content/pm/PackageManager;"); + if (!pm.isValid()) + return false; + + QJNIObjectPrivate audioFeatureField = QJNIObjectPrivate::getStaticObjectField("android/content/pm/PackageManager", + "FEATURE_AUDIO_LOW_LATENCY", + "Ljava/lang/String;"); + if (!audioFeatureField.isValid()) + return false; + + isSupported = pm.callMethod<jboolean>("hasSystemFeature", + "(Ljava/lang/String;)Z", + audioFeatureField.object()); + return (isSupported == 1); +#else + return true; +#endif // Q_OS_ANDROID +} + +bool QOpenSLESEngine::printDebugInfo() +{ + return qEnvironmentVariableIsSet("QT_OPENSL_INFO"); +} + void QOpenSLESEngine::checkSupportedInputFormats() { m_supportedInputChannelCounts = QList<int>() << 1; diff --git a/src/plugins/opensles/qopenslesengine.h b/src/plugins/opensles/qopenslesengine.h index 0c2042f50..cbc3ca115 100644 --- a/src/plugins/opensles/qopenslesengine.h +++ b/src/plugins/opensles/qopenslesengine.h @@ -45,6 +45,8 @@ QT_BEGIN_NAMESPACE class QOpenSLESEngine { public: + enum OutputValue { FramesPerBuffer, SampleRate }; + QOpenSLESEngine(); ~QOpenSLESEngine(); @@ -58,6 +60,12 @@ public: QList<int> supportedChannelCounts(QAudio::Mode mode) const; QList<int> supportedSampleRates(QAudio::Mode mode) const; + static int getOutputValue(OutputValue type, int defaultValue = 0); + static int getDefaultBufferSize(const QAudioFormat &format); + static int getLowLatencyBufferSize(const QAudioFormat &format); + static bool supportsLowLatency(); + static bool printDebugInfo(); + private: void checkSupportedInputFormats(); bool inputFormatIsSupported(SLDataFormat_PCM format); diff --git a/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp b/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp index 3ba640cd6..1cccbfa01 100644 --- a/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp +++ b/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp @@ -508,6 +508,9 @@ void MmRendererMediaPlayerControl::play() return; } + if (m_mediaStatus == QMediaPlayer::EndOfMedia) + m_position = 0; + setPositionInternal(m_position); setVolumeInternal(m_muted ? 0 : m_volume); setPlaybackRateInternal(m_rate); diff --git a/src/plugins/winrt/qwinrtcameravideorenderercontrol.cpp b/src/plugins/winrt/qwinrtcameravideorenderercontrol.cpp index 7313ae7a2..fe07581c9 100644 --- a/src/plugins/winrt/qwinrtcameravideorenderercontrol.cpp +++ b/src/plugins/winrt/qwinrtcameravideorenderercontrol.cpp @@ -291,8 +291,9 @@ QWinRTCameraVideoRendererControl::QWinRTCameraVideoRendererControl(const QSize & HString deviceModel; hr = deviceInfo->get_SystemProductName(deviceModel.GetAddressOf()); Q_ASSERT_SUCCEEDED(hr); - // Blacklist Lumia 1520 - setBlitMode(blacklisted(L"RM-937", deviceModel) ? MediaFoundation : DirectVideo); + const bool blacklist = blacklisted(L"RM-1045", deviceModel) // Lumia 930 + || blacklisted(L"RM-937", deviceModel); // Lumia 1520 + setBlitMode(blacklist ? MediaFoundation : DirectVideo); #endif } diff --git a/src/plugins/wmf/mfactivate.h b/src/plugins/wmf/mfactivate.h index 878e30d4d..8b8e51b56 100644 --- a/src/plugins/wmf/mfactivate.h +++ b/src/plugins/wmf/mfactivate.h @@ -34,8 +34,6 @@ #ifndef MFACTIVATE_H #define MFACTIVATE_H -#include "mfglobal.h" - #include <mfidl.h> class MFAbstractActivate : public IMFActivate diff --git a/src/plugins/wmf/mfglobal.h b/src/plugins/wmf/mfglobal.h deleted file mode 100644 index 073f959f7..000000000 --- a/src/plugins/wmf/mfglobal.h +++ /dev/null @@ -1,149 +0,0 @@ -/**************************************************************************** -** -** Copyright (C) 2015 The Qt Company Ltd. -** Contact: http://www.qt.io/licensing/ -** -** This file is part of the Qt Toolkit. -** -** $QT_BEGIN_LICENSE:LGPL21$ -** Commercial License Usage -** Licensees holding valid commercial Qt licenses may use this file in -** accordance with the commercial license agreement provided with the -** Software or, alternatively, in accordance with the terms contained in -** a written agreement between you and The Qt Company. For licensing terms -** and conditions see http://www.qt.io/terms-conditions. For further -** information use the contact form at http://www.qt.io/contact-us. -** -** GNU Lesser General Public License Usage -** Alternatively, this file may be used under the terms of the GNU Lesser -** General Public License version 2.1 or version 3 as published by the Free -** Software Foundation and appearing in the file LICENSE.LGPLv21 and -** LICENSE.LGPLv3 included in the packaging of this file. Please review the -** following information to ensure the GNU Lesser General Public License -** requirements will be met: https://www.gnu.org/licenses/lgpl.html and -** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. -** -** As a special exception, The Qt Company gives you certain additional -** rights. These rights are described in The Qt Company LGPL Exception -** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. -** -** $QT_END_LICENSE$ -** -****************************************************************************/ - -#ifndef MFGLOBAL_H -#define MFGLOBAL_H - -#include <mfapi.h> -#include <mfidl.h> -#include <Mferror.h> - - -template<class T> -class AsyncCallback : public IMFAsyncCallback -{ -public: - typedef HRESULT (T::*InvokeFn)(IMFAsyncResult *asyncResult); - - AsyncCallback(T *parent, InvokeFn fn) : m_parent(parent), m_invokeFn(fn) - { - } - - // IUnknown - STDMETHODIMP QueryInterface(REFIID iid, void** ppv) - { - if (!ppv) - return E_POINTER; - - if (iid == __uuidof(IUnknown)) { - *ppv = static_cast<IUnknown*>(static_cast<IMFAsyncCallback*>(this)); - } else if (iid == __uuidof(IMFAsyncCallback)) { - *ppv = static_cast<IMFAsyncCallback*>(this); - } else { - *ppv = NULL; - return E_NOINTERFACE; - } - AddRef(); - return S_OK; - } - - STDMETHODIMP_(ULONG) AddRef() { - // Delegate to parent class. - return m_parent->AddRef(); - } - STDMETHODIMP_(ULONG) Release() { - // Delegate to parent class. - return m_parent->Release(); - } - - - // IMFAsyncCallback methods - STDMETHODIMP GetParameters(DWORD*, DWORD*) - { - // Implementation of this method is optional. - return E_NOTIMPL; - } - - STDMETHODIMP Invoke(IMFAsyncResult* asyncResult) - { - return (m_parent->*m_invokeFn)(asyncResult); - } - - T *m_parent; - InvokeFn m_invokeFn; -}; - -template <class T> void qt_wmf_safeRelease(T **ppT) -{ - if (*ppT) { - (*ppT)->Release(); - *ppT = NULL; - } -} - -template <class T> -void qt_wmf_copyComPointer(T* &dest, T *src) -{ - if (dest) - dest->Release(); - dest = src; - if (dest) - dest->AddRef(); -} - -HRESULT qt_wmf_getFourCC(IMFMediaType *type, DWORD *fourCC); -MFRatio qt_wmf_getPixelAspectRatio(IMFMediaType *type); -bool qt_wmf_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2); -HRESULT qt_wmf_validateVideoArea(const MFVideoArea& area, UINT32 width, UINT32 height); -bool qt_wmf_isSampleTimePassed(IMFClock *clock, IMFSample *sample); - -inline float qt_wmf_MFOffsetToFloat(const MFOffset& offset) -{ - return offset.value + (float(offset.fract) / 65536); -} - -inline MFOffset qt_wmf_makeMFOffset(float v) -{ - MFOffset offset; - offset.value = short(v); - offset.fract = WORD(65536 * (v-offset.value)); - return offset; -} - -inline MFVideoArea qt_wmf_makeMFArea(float x, float y, DWORD width, DWORD height) -{ - MFVideoArea area; - area.OffsetX = qt_wmf_makeMFOffset(x); - area.OffsetY = qt_wmf_makeMFOffset(y); - area.Area.cx = width; - area.Area.cy = height; - return area; -} - -inline HRESULT qt_wmf_getFrameRate(IMFMediaType *pType, MFRatio *pRatio) -{ - return MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, (UINT32*)&pRatio->Numerator, (UINT32*)&pRatio->Denominator); -} - - -#endif // MFGLOBAL_H diff --git a/src/plugins/wmf/mftvideo.cpp b/src/plugins/wmf/mftvideo.cpp index 6faa8604c..b7a416213 100644 --- a/src/plugins/wmf/mftvideo.cpp +++ b/src/plugins/wmf/mftvideo.cpp @@ -52,6 +52,7 @@ MFTransform::MFTransform(): m_inputType(0), m_outputType(0), m_sample(0), + m_videoSinkTypeHandler(0), m_bytesPerLine(0) { } @@ -64,8 +65,8 @@ MFTransform::~MFTransform() if (m_outputType) m_outputType->Release(); - for (int i = 0; i < m_mediaTypes.size(); ++i) - m_mediaTypes[i]->Release(); + if (m_videoSinkTypeHandler) + m_videoSinkTypeHandler->Release(); } void MFTransform::addProbe(MFVideoProbeControl *probe) @@ -84,12 +85,18 @@ void MFTransform::removeProbe(MFVideoProbeControl *probe) m_videoProbes.removeOne(probe); } -void MFTransform::addSupportedMediaType(IMFMediaType *type) +void MFTransform::setVideoSink(IUnknown *videoSink) { - if (!type) - return; - QMutexLocker locker(&m_mutex); - m_mediaTypes.append(type); + // This transform supports the same input types as the video sink. + // Store its type handler interface in order to report the correct supported types. + + if (m_videoSinkTypeHandler) { + m_videoSinkTypeHandler->Release(); + m_videoSinkTypeHandler = NULL; + } + + if (videoSink) + videoSink->QueryInterface(IID_PPV_ARGS(&m_videoSinkTypeHandler)); } STDMETHODIMP MFTransform::QueryInterface(REFIID riid, void** ppv) @@ -165,9 +172,12 @@ STDMETHODIMP MFTransform::GetInputStreamInfo(DWORD dwInputStreamID, MFT_INPUT_ST pStreamInfo->cbSize = 0; pStreamInfo->hnsMaxLatency = 0; - pStreamInfo->dwFlags = MFT_INPUT_STREAM_WHOLE_SAMPLES | MFT_INPUT_STREAM_SINGLE_SAMPLE_PER_BUFFER; pStreamInfo->cbMaxLookahead = 0; pStreamInfo->cbAlignment = 0; + pStreamInfo->dwFlags = MFT_INPUT_STREAM_WHOLE_SAMPLES + | MFT_INPUT_STREAM_SINGLE_SAMPLE_PER_BUFFER + | MFT_INPUT_STREAM_PROCESSES_IN_PLACE; + return S_OK; } @@ -182,8 +192,11 @@ STDMETHODIMP MFTransform::GetOutputStreamInfo(DWORD dwOutputStreamID, MFT_OUTPUT return E_POINTER; pStreamInfo->cbSize = 0; - pStreamInfo->dwFlags = MFT_OUTPUT_STREAM_WHOLE_SAMPLES | MFT_OUTPUT_STREAM_SINGLE_SAMPLE_PER_BUFFER; pStreamInfo->cbAlignment = 0; + pStreamInfo->dwFlags = MFT_OUTPUT_STREAM_WHOLE_SAMPLES + | MFT_OUTPUT_STREAM_SINGLE_SAMPLE_PER_BUFFER + | MFT_OUTPUT_STREAM_PROVIDES_SAMPLES + | MFT_OUTPUT_STREAM_DISCARDABLE; return S_OK; } @@ -228,20 +241,42 @@ STDMETHODIMP MFTransform::AddInputStreams(DWORD cStreams, DWORD *adwStreamIDs) STDMETHODIMP MFTransform::GetInputAvailableType(DWORD dwInputStreamID, DWORD dwTypeIndex, IMFMediaType **ppType) { - // This MFT does not have a list of preferred input types - Q_UNUSED(dwInputStreamID); - Q_UNUSED(dwTypeIndex); - Q_UNUSED(ppType); - return E_NOTIMPL; + // We support the same input types as the video sink + if (!m_videoSinkTypeHandler) + return E_NOTIMPL; + + if (dwInputStreamID > 0) + return MF_E_INVALIDSTREAMNUMBER; + + if (!ppType) + return E_POINTER; + + return m_videoSinkTypeHandler->GetMediaTypeByIndex(dwTypeIndex, ppType); } -STDMETHODIMP MFTransform::GetOutputAvailableType(DWORD dwOutputStreamID,DWORD dwTypeIndex, IMFMediaType **ppType) +STDMETHODIMP MFTransform::GetOutputAvailableType(DWORD dwOutputStreamID, DWORD dwTypeIndex, IMFMediaType **ppType) { - // This MFT does not have a list of preferred output types - Q_UNUSED(dwOutputStreamID); - Q_UNUSED(dwTypeIndex); - Q_UNUSED(ppType); - return E_NOTIMPL; + // Since we don't modify the samples, the output type must be the same as the input type. + // Report our input type as the only available output type. + + if (dwOutputStreamID > 0) + return MF_E_INVALIDSTREAMNUMBER; + + if (!ppType) + return E_POINTER; + + // Input type must be set first + if (!m_inputType) + return MF_E_TRANSFORM_TYPE_NOT_SET; + + if (dwTypeIndex > 0) + return MF_E_NO_MORE_TYPES; + + // Return a copy to make sure our type is not modified + if (FAILED(MFCreateMediaType(ppType))) + return E_OUTOFMEMORY; + + return m_inputType->CopyAllItems(*ppType); } STDMETHODIMP MFTransform::SetInputType(DWORD dwInputStreamID, IMFMediaType *pType, DWORD dwFlags) @@ -257,17 +292,14 @@ STDMETHODIMP MFTransform::SetInputType(DWORD dwInputStreamID, IMFMediaType *pTyp if (!isMediaTypeSupported(pType)) return MF_E_INVALIDMEDIATYPE; - DWORD flags = 0; - if (pType && !m_inputType && m_outputType && m_outputType->IsEqual(pType, &flags) != S_OK) - return MF_E_INVALIDMEDIATYPE; - if (dwFlags == MFT_SET_TYPE_TEST_ONLY) return pType ? S_OK : E_POINTER; if (m_inputType) { m_inputType->Release(); // Input type has changed, discard output type (if it's set) so it's reset later on - if (m_outputType && m_outputType->IsEqual(pType, &flags) != S_OK) { + DWORD flags = 0; + if (m_outputType && m_outputType->IsEqual(pType, &flags) != S_OK) { m_outputType->Release(); m_outputType = 0; } @@ -286,29 +318,27 @@ STDMETHODIMP MFTransform::SetOutputType(DWORD dwOutputStreamID, IMFMediaType *pT if (dwOutputStreamID > 0) return MF_E_INVALIDSTREAMNUMBER; + if (dwFlags == MFT_SET_TYPE_TEST_ONLY && !pType) + return E_POINTER; + QMutexLocker locker(&m_mutex); + // Input type must be set first + if (!m_inputType) + return MF_E_TRANSFORM_TYPE_NOT_SET; + if (m_sample) return MF_E_TRANSFORM_CANNOT_CHANGE_MEDIATYPE_WHILE_PROCESSING; - if (!isMediaTypeSupported(pType)) - return MF_E_INVALIDMEDIATYPE; - DWORD flags = 0; - if (pType && !m_outputType && m_inputType && m_inputType->IsEqual(pType, &flags) != S_OK) + if (pType && m_inputType->IsEqual(pType, &flags) != S_OK) return MF_E_INVALIDMEDIATYPE; if (dwFlags == MFT_SET_TYPE_TEST_ONLY) return pType ? S_OK : E_POINTER; - if (m_outputType) { + if (m_outputType) m_outputType->Release(); - // Output type has changed, discard input type (if it's set) so it's reset later on - if (m_inputType && m_inputType->IsEqual(pType, &flags) != S_OK) { - m_inputType->Release(); - m_inputType = 0; - } - } m_outputType = pType; @@ -333,10 +363,11 @@ STDMETHODIMP MFTransform::GetInputCurrentType(DWORD dwInputStreamID, IMFMediaTyp if (!m_inputType) return MF_E_TRANSFORM_TYPE_NOT_SET; - *ppType = m_inputType; - (*ppType)->AddRef(); + // Return a copy to make sure our type is not modified + if (FAILED(MFCreateMediaType(ppType))) + return E_OUTOFMEMORY; - return S_OK; + return m_inputType->CopyAllItems(*ppType); } STDMETHODIMP MFTransform::GetOutputCurrentType(DWORD dwOutputStreamID, IMFMediaType **ppType) @@ -349,19 +380,14 @@ STDMETHODIMP MFTransform::GetOutputCurrentType(DWORD dwOutputStreamID, IMFMediaT QMutexLocker locker(&m_mutex); - if (!m_outputType) { - if (m_inputType) { - *ppType = m_inputType; - (*ppType)->AddRef(); - return S_OK; - } + if (!m_outputType) return MF_E_TRANSFORM_TYPE_NOT_SET; - } - *ppType = m_outputType; - (*ppType)->AddRef(); + // Return a copy to make sure our type is not modified + if (FAILED(MFCreateMediaType(ppType))) + return E_OUTOFMEMORY; - return S_OK; + return m_outputType->CopyAllItems(*ppType); } STDMETHODIMP MFTransform::GetInputStatus(DWORD dwInputStreamID, DWORD *pdwFlags) @@ -374,7 +400,7 @@ STDMETHODIMP MFTransform::GetInputStatus(DWORD dwInputStreamID, DWORD *pdwFlags) QMutexLocker locker(&m_mutex); - if (!m_inputType) + if (!m_inputType || !m_outputType) return MF_E_TRANSFORM_TYPE_NOT_SET; if (m_sample) @@ -392,7 +418,7 @@ STDMETHODIMP MFTransform::GetOutputStatus(DWORD *pdwFlags) QMutexLocker locker(&m_mutex); - if (!m_outputType) + if (!m_inputType || !m_outputType) return MF_E_TRANSFORM_TYPE_NOT_SET; if (m_sample) @@ -464,7 +490,7 @@ STDMETHODIMP MFTransform::ProcessInput(DWORD dwInputStreamID, IMFSample *pSample QMutexLocker locker(&m_mutex); - if (!m_inputType || !m_outputType) + if (!m_inputType) return MF_E_TRANSFORM_TYPE_NOT_SET; if (m_sample) @@ -499,9 +525,6 @@ STDMETHODIMP MFTransform::ProcessInput(DWORD dwInputStreamID, IMFSample *pSample STDMETHODIMP MFTransform::ProcessOutput(DWORD dwFlags, DWORD cOutputBufferCount, MFT_OUTPUT_DATA_BUFFER *pOutputSamples, DWORD *pdwStatus) { - if (dwFlags != 0) - return E_INVALIDARG; - if (pOutputSamples == NULL || pdwStatus == NULL) return E_POINTER; @@ -510,57 +533,44 @@ STDMETHODIMP MFTransform::ProcessOutput(DWORD dwFlags, DWORD cOutputBufferCount, QMutexLocker locker(&m_mutex); - if (!m_sample) - return MF_E_TRANSFORM_NEED_MORE_INPUT; + if (!m_inputType) + return MF_E_TRANSFORM_TYPE_NOT_SET; + + if (!m_outputType) { + pOutputSamples[0].dwStatus = MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE; + return MF_E_TRANSFORM_STREAM_CHANGE; + } IMFMediaBuffer *input = NULL; IMFMediaBuffer *output = NULL; - DWORD sampleLength = 0; - m_sample->GetTotalLength(&sampleLength); - - // If the sample length is null, it means we're getting DXVA buffers. - // In that case just pass on the sample we got as input. - // Otherwise we need to copy the input buffer into the buffer the sink - // is giving us. - if (pOutputSamples[0].pSample && sampleLength > 0) { - - if (FAILED(m_sample->ConvertToContiguousBuffer(&input))) - goto done; - - if (FAILED(pOutputSamples[0].pSample->ConvertToContiguousBuffer(&output))) - goto done; - - DWORD inputLength = 0; - DWORD outputLength = 0; - input->GetMaxLength(&inputLength); - output->GetMaxLength(&outputLength); + if (dwFlags == MFT_PROCESS_OUTPUT_DISCARD_WHEN_NO_BUFFER) + goto done; + else if (dwFlags != 0) + return E_INVALIDARG; - if (outputLength < inputLength) { - pOutputSamples[0].pSample->RemoveAllBuffers(); - output->Release(); - output = NULL; - if (SUCCEEDED(MFCreateMemoryBuffer(inputLength, &output))) - pOutputSamples[0].pSample->AddBuffer(output); - } + if (!m_sample) + return MF_E_TRANSFORM_NEED_MORE_INPUT; - if (output) - m_sample->CopyToBuffer(output); + // Since the MFT_OUTPUT_STREAM_PROVIDES_SAMPLES flag is set, the client + // should not be providing samples here + if (pOutputSamples[0].pSample != NULL) + return E_INVALIDARG; - LONGLONG hnsDuration = 0; - LONGLONG hnsTime = 0; - if (SUCCEEDED(m_sample->GetSampleDuration(&hnsDuration))) - pOutputSamples[0].pSample->SetSampleDuration(hnsDuration); - if (SUCCEEDED(m_sample->GetSampleTime(&hnsTime))) - pOutputSamples[0].pSample->SetSampleTime(hnsTime); + pOutputSamples[0].pSample = m_sample; + pOutputSamples[0].pSample->AddRef(); + // Send video frame to probes + // We do it here (instead of inside ProcessInput) to make sure samples discarded by the renderer + // are not sent. + m_videoProbeMutex.lock(); + if (!m_videoProbes.isEmpty()) { + QVideoFrame frame = makeVideoFrame(); - } else { - if (pOutputSamples[0].pSample) - pOutputSamples[0].pSample->Release(); - pOutputSamples[0].pSample = m_sample; - pOutputSamples[0].pSample->AddRef(); + foreach (MFVideoProbeControl* probe, m_videoProbes) + probe->bufferProbed(frame); } + m_videoProbeMutex.unlock(); done: pOutputSamples[0].dwStatus = 0; @@ -728,16 +738,10 @@ QByteArray MFTransform::dataFromBuffer(IMFMediaBuffer *buffer, int height, int * bool MFTransform::isMediaTypeSupported(IMFMediaType *type) { - // if the list is empty, it supports all formats - if (!type || m_mediaTypes.isEmpty()) + // If we don't have the video sink's type handler, + // assume it supports anything... + if (!m_videoSinkTypeHandler || !type) return true; - for (int i = 0; i < m_mediaTypes.size(); ++i) { - DWORD flags = 0; - m_mediaTypes.at(i)->IsEqual(type, &flags); - if (flags & MF_MEDIATYPE_EQUAL_FORMAT_TYPES) - return true; - } - - return false; + return m_videoSinkTypeHandler->IsMediaTypeSupported(type, NULL) == S_OK; } diff --git a/src/plugins/wmf/mftvideo.h b/src/plugins/wmf/mftvideo.h index 1a188c4db..c37c8f700 100644 --- a/src/plugins/wmf/mftvideo.h +++ b/src/plugins/wmf/mftvideo.h @@ -53,7 +53,7 @@ public: void addProbe(MFVideoProbeControl* probe); void removeProbe(MFVideoProbeControl* probe); - void addSupportedMediaType(IMFMediaType *type); + void setVideoSink(IUnknown *videoSink); // IUnknown methods STDMETHODIMP QueryInterface(REFIID iid, void** ppv); @@ -99,7 +99,7 @@ private: IMFSample *m_sample; QMutex m_mutex; - QList<IMFMediaType*> m_mediaTypes; + IMFMediaTypeHandler *m_videoSinkTypeHandler; QList<MFVideoProbeControl*> m_videoProbes; QMutex m_videoProbeMutex; diff --git a/src/plugins/wmf/player/mfplayersession.cpp b/src/plugins/wmf/player/mfplayersession.cpp index e4c498b76..0ac1c3d66 100644 --- a/src/plugins/wmf/player/mfplayersession.cpp +++ b/src/plugins/wmf/player/mfplayersession.cpp @@ -266,6 +266,25 @@ void MFPlayerSession::handleMediaSourceReady() } } +MFPlayerSession::MediaType MFPlayerSession::getStreamType(IMFStreamDescriptor *stream) const +{ + if (!stream) + return Unknown; + + IMFMediaTypeHandler *typeHandler = NULL; + if (SUCCEEDED(stream->GetMediaTypeHandler(&typeHandler))) { + GUID guidMajorType; + if (SUCCEEDED(typeHandler->GetMajorType(&guidMajorType))) { + if (guidMajorType == MFMediaType_Audio) + return Audio; + else if (guidMajorType == MFMediaType_Video) + return Video; + } + } + + return Unknown; +} + void MFPlayerSession::setupPlaybackTopology(IMFMediaSource *source, IMFPresentationDescriptor *sourcePD) { HRESULT hr = S_OK; @@ -294,45 +313,58 @@ void MFPlayerSession::setupPlaybackTopology(IMFMediaSource *source, IMFPresentat for (DWORD i = 0; i < cSourceStreams; i++) { BOOL fSelected = FALSE; + bool streamAdded = false; IMFStreamDescriptor *streamDesc = NULL; HRESULT hr = sourcePD->GetStreamDescriptorByIndex(i, &fSelected, &streamDesc); if (SUCCEEDED(hr)) { - MediaType mediaType = Unknown; - IMFTopologyNode *sourceNode = addSourceNode(topology, source, sourcePD, streamDesc); - if (sourceNode) { - IMFTopologyNode *outputNode = addOutputNode(streamDesc, mediaType, topology, 0); - if (outputNode) { - bool connected = false; - if (mediaType == Audio) { - if (!m_audioSampleGrabberNode) - connected = setupAudioSampleGrabber(topology, sourceNode, outputNode); - } else if (mediaType == Video && outputNodeId == -1) { - // Remember video output node ID. - outputNode->GetTopoNodeID(&outputNodeId); - } + // The media might have multiple audio and video streams, + // only use one of each kind, and only if it is selected by default. + MediaType mediaType = getStreamType(streamDesc); + if (mediaType != Unknown + && ((m_mediaTypes & mediaType) == 0) // Check if this type isn't already added + && fSelected) { + + IMFTopologyNode *sourceNode = addSourceNode(topology, source, sourcePD, streamDesc); + if (sourceNode) { + IMFTopologyNode *outputNode = addOutputNode(mediaType, topology, 0); + if (outputNode) { + bool connected = false; + if (mediaType == Audio) { + if (!m_audioSampleGrabberNode) + connected = setupAudioSampleGrabber(topology, sourceNode, outputNode); + } else if (mediaType == Video && outputNodeId == -1) { + // Remember video output node ID. + outputNode->GetTopoNodeID(&outputNodeId); + } - if (!connected) - hr = sourceNode->ConnectOutput(0, outputNode, 0); - if (FAILED(hr)) { - emit error(QMediaPlayer::FormatError, tr("Unable to play any stream."), false); - } - else { - succeededCount++; - m_mediaTypes |= mediaType; - switch (mediaType) { - case Audio: - emit audioAvailable(); - break; - case Video: - emit videoAvailable(); - break; + if (!connected) + hr = sourceNode->ConnectOutput(0, outputNode, 0); + + if (FAILED(hr)) { + emit error(QMediaPlayer::FormatError, tr("Unable to play any stream."), false); + } else { + streamAdded = true; + succeededCount++; + m_mediaTypes |= mediaType; + switch (mediaType) { + case Audio: + emit audioAvailable(); + break; + case Video: + emit videoAvailable(); + break; + } } + outputNode->Release(); } - outputNode->Release(); + sourceNode->Release(); } - sourceNode->Release(); } + + if (fSelected && !streamAdded) + sourcePD->DeselectStream(i); + streamDesc->Release(); } } @@ -377,56 +409,38 @@ IMFTopologyNode* MFPlayerSession::addSourceNode(IMFTopology* topology, IMFMediaS return NULL; } -IMFTopologyNode* MFPlayerSession::addOutputNode(IMFStreamDescriptor *streamDesc, MediaType& mediaType, IMFTopology* topology, DWORD sinkID) +IMFTopologyNode* MFPlayerSession::addOutputNode(MediaType mediaType, IMFTopology* topology, DWORD sinkID) { IMFTopologyNode *node = NULL; - HRESULT hr = MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, &node); - if (FAILED(hr)) + if (FAILED(MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, &node))) return NULL; - node->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE); - mediaType = Unknown; - IMFMediaTypeHandler *handler = NULL; - hr = streamDesc->GetMediaTypeHandler(&handler); - if (SUCCEEDED(hr)) { - GUID guidMajorType; - hr = handler->GetMajorType(&guidMajorType); - if (SUCCEEDED(hr)) { - IMFActivate *activate = NULL; - if (MFMediaType_Audio == guidMajorType) { - mediaType = Audio; - activate = m_playerService->audioEndpointControl()->createActivate(); - } else if (MFMediaType_Video == guidMajorType) { - mediaType = Video; - if (m_playerService->videoRendererControl()) { - activate = m_playerService->videoRendererControl()->createActivate(); - } else if (m_playerService->videoWindowControl()) { - activate = m_playerService->videoWindowControl()->createActivate(); - } else { - qWarning() << "no videoWindowControl or videoRendererControl, unable to add output node for video data"; - } - } else { - // Unknown stream type. - emit error(QMediaPlayer::FormatError, tr("Unknown stream type."), false); - } - - if (activate) { - hr = node->SetObject(activate); - if (SUCCEEDED(hr)) { - hr = node->SetUINT32(MF_TOPONODE_STREAMID, sinkID); - if (SUCCEEDED(hr)) { - if (SUCCEEDED(topology->AddNode(node))) { - handler->Release(); - return node; - } - } - } - } + IMFActivate *activate = NULL; + if (mediaType == Audio) { + activate = m_playerService->audioEndpointControl()->createActivate(); + } else if (mediaType == Video) { + if (m_playerService->videoRendererControl()) { + activate = m_playerService->videoRendererControl()->createActivate(); + } else if (m_playerService->videoWindowControl()) { + activate = m_playerService->videoWindowControl()->createActivate(); + } else { + qWarning() << "no videoWindowControl or videoRendererControl, unable to add output node for video data"; } - handler->Release(); + } else { + // Unknown stream type. + emit error(QMediaPlayer::FormatError, tr("Unknown stream type."), false); } - node->Release(); - return NULL; + + if (!activate + || FAILED(node->SetObject(activate)) + || FAILED(node->SetUINT32(MF_TOPONODE_STREAMID, sinkID)) + || FAILED(node->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE)) + || FAILED(topology->AddNode(node))) { + node->Release(); + node = NULL; + } + + return node; } bool MFPlayerSession::addAudioSampleGrabberNode(IMFTopology *topology) @@ -692,7 +706,6 @@ IMFTopology *MFPlayerSession::insertMFT(IMFTopology *topology, TOPOID outputNode IUnknown *element = 0; IMFTopologyNode *node = 0; IUnknown *outputObject = 0; - IMFMediaTypeHandler *videoSink = 0; IMFTopologyNode *inputNode = 0; IMFTopologyNode *mftNode = 0; bool mftAdded = false; @@ -711,22 +724,10 @@ IMFTopology *MFPlayerSession::insertMFT(IMFTopology *topology, TOPOID outputNode if (id != outputNodeId) break; - // Use output supported media types for the MFT if (FAILED(node->GetObject(&outputObject))) break; - if (FAILED(outputObject->QueryInterface(IID_IMFMediaTypeHandler, (void**)&videoSink))) - break; - - DWORD mtCount; - if (FAILED(videoSink->GetMediaTypeCount(&mtCount))) - break; - - for (DWORD i = 0; i < mtCount; ++i) { - IMFMediaType *type = 0; - if (SUCCEEDED(videoSink->GetMediaTypeByIndex(i, &type))) - m_videoProbeMFT->addSupportedMediaType(type); - } + m_videoProbeMFT->setVideoSink(outputObject); // Insert MFT between the output node and the node connected to it. DWORD outputIndex = 0; @@ -760,13 +761,13 @@ IMFTopology *MFPlayerSession::insertMFT(IMFTopology *topology, TOPOID outputNode node->Release(); if (element) element->Release(); - if (videoSink) - videoSink->Release(); if (outputObject) outputObject->Release(); if (mftAdded) break; + else + m_videoProbeMFT->setVideoSink(NULL); } } while (false); diff --git a/src/plugins/wmf/player/mfplayersession.h b/src/plugins/wmf/player/mfplayersession.h index 1d136ba55..5bbf8e212 100644 --- a/src/plugins/wmf/player/mfplayersession.h +++ b/src/plugins/wmf/player/mfplayersession.h @@ -215,9 +215,10 @@ private: void createSession(); void setupPlaybackTopology(IMFMediaSource *source, IMFPresentationDescriptor *sourcePD); + MediaType getStreamType(IMFStreamDescriptor *stream) const; IMFTopologyNode* addSourceNode(IMFTopology* topology, IMFMediaSource* source, IMFPresentationDescriptor* presentationDesc, IMFStreamDescriptor *streamDesc); - IMFTopologyNode* addOutputNode(IMFStreamDescriptor *streamDesc, MediaType& mediaType, IMFTopology* topology, DWORD sinkID); + IMFTopologyNode* addOutputNode(MediaType mediaType, IMFTopology* topology, DWORD sinkID); bool addAudioSampleGrabberNode(IMFTopology* topology); bool setupAudioSampleGrabber(IMFTopology *topology, IMFTopologyNode *sourceNode, IMFTopologyNode *outputNode); diff --git a/src/plugins/wmf/player/mfvideorenderercontrol.cpp b/src/plugins/wmf/player/mfvideorenderercontrol.cpp index 683dd4b71..222d74ef2 100644 --- a/src/plugins/wmf/player/mfvideorenderercontrol.cpp +++ b/src/plugins/wmf/player/mfvideorenderercontrol.cpp @@ -32,15 +32,9 @@ ****************************************************************************/ #include "mfvideorenderercontrol.h" -#include "mfglobal.h" +#include "mfactivate.h" -#if defined(QT_OPENGL_ES_2) || defined(QT_OPENGL_DYNAMIC) -#define MAYBE_ANGLE -#endif - -#ifdef MAYBE_ANGLE #include "evrcustompresenter.h" -#endif #include <qabstractvideosurface.h> #include <qvideosurfaceformat.h> @@ -2226,6 +2220,27 @@ namespace }; } + +class EVRCustomPresenterActivate : public MFAbstractActivate +{ +public: + EVRCustomPresenterActivate(); + ~EVRCustomPresenterActivate() + { } + + STDMETHODIMP ActivateObject(REFIID riid, void **ppv); + STDMETHODIMP ShutdownObject(); + STDMETHODIMP DetachObject(); + + void setSurface(QAbstractVideoSurface *surface); + +private: + EVRCustomPresenter *m_presenter; + QAbstractVideoSurface *m_surface; + QMutex m_mutex; +}; + + MFVideoRendererControl::MFVideoRendererControl(QObject *parent) : QVideoRendererControl(parent) , m_surface(0) @@ -2245,13 +2260,11 @@ void MFVideoRendererControl::clear() if (m_surface) m_surface->stop(); -#ifdef MAYBE_ANGLE if (m_presenterActivate) { m_presenterActivate->ShutdownObject(); m_presenterActivate->Release(); m_presenterActivate = NULL; } -#endif if (m_currentActivate) { m_currentActivate->ShutdownObject(); @@ -2280,12 +2293,9 @@ void MFVideoRendererControl::setSurface(QAbstractVideoSurface *surface) connect(m_surface, SIGNAL(supportedFormatsChanged()), this, SLOT(supportedFormatsChanged())); } -#ifdef MAYBE_ANGLE if (m_presenterActivate) m_presenterActivate->setSurface(m_surface); - else -#endif - if (m_currentActivate) + else if (m_currentActivate) static_cast<VideoRendererActivate*>(m_currentActivate)->setSurface(m_surface); } @@ -2323,11 +2333,9 @@ void MFVideoRendererControl::customEvent(QEvent *event) void MFVideoRendererControl::supportedFormatsChanged() { -#ifdef MAYBE_ANGLE if (m_presenterActivate) - m_presenterActivate->supportedFormatsChanged(); - else -#endif + return; + if (m_currentActivate) static_cast<VideoRendererActivate*>(m_currentActivate)->supportedFormatsChanged(); } @@ -2347,26 +2355,67 @@ IMFActivate* MFVideoRendererControl::createActivate() clear(); -#ifdef MAYBE_ANGLE - // We can use the EVR with our custom presenter only if the surface supports OpenGL - // texture handles. We also require ANGLE (due to the D3D interop). - if (!m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).isEmpty() - && QMediaOpenGLHelper::isANGLE()) { - // Create the EVR media sink, but replace the presenter with our own - if (SUCCEEDED(MFCreateVideoRendererActivate(::GetShellWindow(), &m_currentActivate))) { - m_presenterActivate = new EVRCustomPresenterActivate; - m_currentActivate->SetUnknown(MF_ACTIVATE_CUSTOM_VIDEO_PRESENTER_ACTIVATE, m_presenterActivate); - } - } -#endif - - if (!m_currentActivate) + // Create the EVR media sink, but replace the presenter with our own + if (SUCCEEDED(MFCreateVideoRendererActivate(::GetShellWindow(), &m_currentActivate))) { + m_presenterActivate = new EVRCustomPresenterActivate; + m_currentActivate->SetUnknown(MF_ACTIVATE_CUSTOM_VIDEO_PRESENTER_ACTIVATE, m_presenterActivate); + } else { m_currentActivate = new VideoRendererActivate(this); + } setSurface(m_surface); return m_currentActivate; } + +EVRCustomPresenterActivate::EVRCustomPresenterActivate() + : MFAbstractActivate() + , m_presenter(0) + , m_surface(0) +{ } + +HRESULT EVRCustomPresenterActivate::ActivateObject(REFIID riid, void **ppv) +{ + if (!ppv) + return E_INVALIDARG; + QMutexLocker locker(&m_mutex); + if (!m_presenter) { + m_presenter = new EVRCustomPresenter; + if (m_surface) + m_presenter->setSurface(m_surface); + } + return m_presenter->QueryInterface(riid, ppv); +} + +HRESULT EVRCustomPresenterActivate::ShutdownObject() +{ + // The presenter does not implement IMFShutdown so + // this function is the same as DetachObject() + return DetachObject(); +} + +HRESULT EVRCustomPresenterActivate::DetachObject() +{ + QMutexLocker locker(&m_mutex); + if (m_presenter) { + m_presenter->Release(); + m_presenter = 0; + } + return S_OK; +} + +void EVRCustomPresenterActivate::setSurface(QAbstractVideoSurface *surface) +{ + QMutexLocker locker(&m_mutex); + if (m_surface == surface) + return; + + m_surface = surface; + + if (m_presenter) + m_presenter->setSurface(surface); +} + #include "moc_mfvideorenderercontrol.cpp" #include "mfvideorenderercontrol.moc" diff --git a/src/plugins/wmf/player/mfvideorenderercontrol.h b/src/plugins/wmf/player/mfvideorenderercontrol.h index ca3b95d10..224fcea51 100644 --- a/src/plugins/wmf/player/mfvideorenderercontrol.h +++ b/src/plugins/wmf/player/mfvideorenderercontrol.h @@ -38,14 +38,10 @@ #include <mfapi.h> #include <mfidl.h> -QT_BEGIN_NAMESPACE +QT_USE_NAMESPACE class EVRCustomPresenterActivate; -QT_END_NAMESPACE - -QT_USE_NAMESPACE - class MFVideoRendererControl : public QVideoRendererControl { Q_OBJECT diff --git a/src/plugins/wmf/wmf.pro b/src/plugins/wmf/wmf.pro index 68a777f37..1f43bb128 100644 --- a/src/plugins/wmf/wmf.pro +++ b/src/plugins/wmf/wmf.pro @@ -17,7 +17,6 @@ HEADERS += \ sourceresolver.h \ samplegrabber.h \ mftvideo.h \ - mfglobal.h \ mfactivate.h SOURCES += \ @@ -26,21 +25,7 @@ SOURCES += \ sourceresolver.cpp \ samplegrabber.cpp \ mftvideo.cpp \ - mfactivate.cpp \ - mfglobal.cpp - -contains(QT_CONFIG, angle)|contains(QT_CONFIG, dynamicgl) { - LIBS += -ld3d9 -ldxva2 -lwinmm -levr - QT += gui-private - - HEADERS += \ - $$PWD/evrcustompresenter.h \ - $$PWD/evrd3dpresentengine.h - - SOURCES += \ - $$PWD/evrcustompresenter.cpp \ - $$PWD/evrd3dpresentengine.cpp -} + mfactivate.cpp include (player/player.pri) include (decoder/decoder.pri) |