summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/gsttools/gsttools.pro9
-rw-r--r--src/gsttools/qgstreamermirtexturerenderer.cpp357
-rw-r--r--src/gsttools/qgstreamermirtexturerenderer_p.h119
-rw-r--r--src/gsttools/qgstreamerplayercontrol.cpp8
-rw-r--r--src/gsttools/qgstreamerplayersession.cpp15
-rw-r--r--src/gsttools/qgstreamervideooverlay.cpp9
-rw-r--r--src/gsttools/qgstreamervideowidget.cpp2
-rw-r--r--src/gsttools/qgstutils.cpp10
-rw-r--r--src/gsttools/qgstutils_p.h2
-rw-r--r--src/gsttools/qgstvideorenderersink.cpp185
-rw-r--r--src/gsttools/qgstvideorenderersink_p.h14
-rw-r--r--src/imports/multimedia/qdeclarativeplaylist.cpp6
-rw-r--r--src/multimedia/configure.json16
-rw-r--r--src/multimedia/playback/qmediacontent.cpp18
-rw-r--r--src/multimedia/playback/qmediacontent.h5
-rw-r--r--src/multimedia/playback/qmediaplayer.cpp15
-rw-r--r--src/multimedia/playback/qplaylistfileparser.cpp2
-rw-r--r--src/multimedia/video/qvideoframe.cpp17
-rw-r--r--src/multimedia/video/qvideoframe.h1
-rw-r--r--src/multimedia/video/qvideosurfaceformat.cpp12
-rw-r--r--src/multimediawidgets/qvideowidget.cpp4
-rw-r--r--src/multimediawidgets/qvideowidget.h4
-rw-r--r--src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp2
-rw-r--r--src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.cpp4
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm6
-rw-r--r--src/plugins/directshow/player/directshowplayerservice.cpp2
-rw-r--r--src/plugins/gstreamer/camerabin/camerabincontrol.cpp2
-rw-r--r--src/plugins/gstreamer/mediaplayer/qgstreamerplayerservice.cpp10
-rw-r--r--src/plugins/m3u/qm3uhandler.cpp2
-rw-r--r--src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp2
-rw-r--r--src/plugins/winrt/qwinrtmediaplayercontrol.cpp2
-rw-r--r--src/plugins/wmf/player/mfplayersession.cpp2
-rw-r--r--src/qtmultimediaquicktools/qdeclarativevideooutput_backend_p.h (renamed from src/multimedia/qtmultimediaquicktools_headers/qdeclarativevideooutput_backend_p.h)0
-rw-r--r--src/qtmultimediaquicktools/qdeclarativevideooutput_p.h (renamed from src/multimedia/qtmultimediaquicktools_headers/qdeclarativevideooutput_p.h)0
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_p.h (renamed from src/multimedia/qtmultimediaquicktools_headers/qsgvideonode_p.h)0
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_texture.cpp1
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_yuv.cpp19
-rw-r--r--src/qtmultimediaquicktools/qtmultimediaquickdefs_p.h (renamed from src/multimedia/qtmultimediaquicktools_headers/qtmultimediaquickdefs_p.h)0
-rw-r--r--src/qtmultimediaquicktools/qtmultimediaquicktools.pro5
39 files changed, 334 insertions, 555 deletions
diff --git a/src/gsttools/gsttools.pro b/src/gsttools/gsttools.pro
index b13479ce7..fff039b3b 100644
--- a/src/gsttools/gsttools.pro
+++ b/src/gsttools/gsttools.pro
@@ -82,14 +82,7 @@ qtConfig(gstreamer_0_10) {
qgstvideorenderersink.cpp
}
-qtConfig(mirclient): {
- qtConfig(opengles2):qtHaveModule(widgets) {
- PRIVATE_HEADERS += qgstreamermirtexturerenderer_p.h
- SOURCES += qgstreamermirtexturerenderer.cpp
- QT += opengl quick
- LIBS += -lEGL
- }
-}
+qtConfig(gstreamer_gl): QMAKE_USE += gstreamer_gl
qtConfig(gstreamer_app) {
QMAKE_USE += gstreamer_app
diff --git a/src/gsttools/qgstreamermirtexturerenderer.cpp b/src/gsttools/qgstreamermirtexturerenderer.cpp
deleted file mode 100644
index 35050db03..000000000
--- a/src/gsttools/qgstreamermirtexturerenderer.cpp
+++ /dev/null
@@ -1,357 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2016 Canonical Ltd.
-** Contact: https://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see https://www.qt.io/terms-conditions. For further
-** information use the contact form at https://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 3 as published by the Free Software
-** Foundation and appearing in the file LICENSE.LGPL3 included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU Lesser General Public License version 3 requirements
-** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
-**
-** GNU General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU
-** General Public License version 2.0 or (at your option) the GNU General
-** Public license version 3 or any later version approved by the KDE Free
-** Qt Foundation. The licenses are as published by the Free Software
-** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
-** included in the packaging of this file. Please review the following
-** information to ensure the GNU General Public License requirements will
-** be met: https://www.gnu.org/licenses/gpl-2.0.html and
-** https://www.gnu.org/licenses/gpl-3.0.html.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#include "qgstreamermirtexturerenderer_p.h"
-
-#include <qgstreamerplayersession.h>
-#include <private/qvideosurfacegstsink_p.h>
-#include <private/qgstutils_p.h>
-#include <qabstractvideosurface.h>
-
-#include <QAbstractVideoBuffer>
-#include <QGuiApplication>
-#include <QDebug>
-#include <QtQuick/QQuickWindow>
-#include <QOpenGLContext>
-#include <QGLContext>
-#include <QGuiApplication>
-#include <qgl.h>
-
-#include <gst/gst.h>
-
-static QGstreamerMirTextureRenderer *rendererInstance = NULL;
-
-class QGstreamerMirTextureBuffer : public QAbstractVideoBuffer
-{
-public:
- QGstreamerMirTextureBuffer(GLuint textureId) :
- QAbstractVideoBuffer(QAbstractVideoBuffer::GLTextureHandle),
- m_textureId(textureId)
- {
- }
-
- MapMode mapMode() const { return NotMapped; }
-
- uchar *map(MapMode mode, int *numBytes, int *bytesPerLine)
- {
- qDebug() << Q_FUNC_INFO;
- Q_UNUSED(mode);
- Q_UNUSED(numBytes);
- Q_UNUSED(bytesPerLine);
-
- return NULL;
- }
-
- void unmap() { qDebug() << Q_FUNC_INFO; }
-
- QVariant handle() const { return QVariant::fromValue<unsigned int>(m_textureId); }
-
- GLuint textureId() { return m_textureId; }
-
-private:
- GLuint m_textureId;
-};
-
-QGstreamerMirTextureRenderer::QGstreamerMirTextureRenderer(QObject *parent
- , const QGstreamerPlayerSession *playerSession)
- : QVideoRendererControl(0), m_videoSink(0), m_surface(0),
- m_glSurface(0),
- m_context(0),
- m_glContext(0),
- m_textureId(0),
- m_offscreenSurface(0),
- m_textureBuffer(0)
-{
- Q_UNUSED(parent);
- setPlayerSession(playerSession);
-}
-
-QGstreamerMirTextureRenderer::~QGstreamerMirTextureRenderer()
-{
- if (m_videoSink)
- gst_object_unref(GST_OBJECT(m_videoSink));
-
- delete m_glContext;
- delete m_offscreenSurface;
-}
-
-GstElement *QGstreamerMirTextureRenderer::videoSink()
-{
- qDebug() << Q_FUNC_INFO;
-
- // FIXME: Ugly hack until I figure out why passing this segfaults in the g_signal handler
- rendererInstance = const_cast<QGstreamerMirTextureRenderer*>(this);
-
- if (!m_videoSink && m_surface) {
- qDebug() << Q_FUNC_INFO << ": using mirsink, (this: " << this << ")";
-
- m_videoSink = gst_element_factory_make("mirsink", "video-output");
-
- connect(QGuiApplication::instance(), SIGNAL(focusWindowChanged(QWindow*)),
- this, SLOT(handleFocusWindowChanged(QWindow*)), Qt::QueuedConnection);
-
- g_signal_connect(G_OBJECT(m_videoSink), "frame-ready", G_CALLBACK(handleFrameReady),
- (gpointer)this);
- }
-
- if (m_videoSink) {
- gst_object_ref_sink(GST_OBJECT(m_videoSink));
-
- GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
- gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
- padBufferProbe, this, NULL);
- }
-
- return m_videoSink;
-}
-
-QWindow *QGstreamerMirTextureRenderer::createOffscreenWindow(const QSurfaceFormat &format)
-{
- QWindow *w = new QWindow();
- w->setSurfaceType(QWindow::OpenGLSurface);
- w->setFormat(format);
- w->setGeometry(0, 0, 1, 1);
- w->setFlags(w->flags() | Qt::WindowTransparentForInput);
- w->create();
-
- return w;
-}
-
-void QGstreamerMirTextureRenderer::handleFrameReady(gpointer userData)
-{
- QGstreamerMirTextureRenderer *renderer = reinterpret_cast<QGstreamerMirTextureRenderer*>(userData);
-#if 1
- QMutexLocker locker(&rendererInstance->m_mutex);
- QMetaObject::invokeMethod(rendererInstance, "renderFrame", Qt::QueuedConnection);
-#else
- // FIXME!
- //QMutexLocker locker(&renderer->m_mutex);
- QMetaObject::invokeMethod(renderer, "renderFrame", Qt::QueuedConnection);
-#endif
-}
-
-void QGstreamerMirTextureRenderer::renderFrame()
-{
- //qDebug() << Q_FUNC_INFO;
-
- if (m_context)
- m_context->makeCurrent();
-
- GstState pendingState = GST_STATE_NULL;
- GstState newState = GST_STATE_NULL;
- // Don't block and return immediately:
- GstStateChangeReturn ret = gst_element_get_state(m_videoSink, &newState,
- &pendingState, 0);
- if (ret == GST_STATE_CHANGE_FAILURE || newState == GST_STATE_NULL||
- pendingState == GST_STATE_NULL) {
- qWarning() << "Invalid state change for renderer, aborting";
- stopRenderer();
- return;
- }
-
- if (!m_surface->isActive()) {
- qDebug() << "m_surface is not active";
- GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
- GstCaps *caps = gst_pad_get_current_caps(pad);
-
- if (caps) {
- // Get the native video size from the video sink
- QSize newNativeSize = QGstUtils::capsCorrectedResolution(caps);
- if (m_nativeSize != newNativeSize) {
- m_nativeSize = newNativeSize;
- emit nativeSizeChanged();
- }
- gst_caps_unref(caps);
- }
-
- // Start the surface
- QVideoSurfaceFormat format(m_nativeSize, QVideoFrame::Format_RGB32, QAbstractVideoBuffer::GLTextureHandle);
- qDebug() << "m_nativeSize: " << m_nativeSize;
- qDebug() << "format: " << format;
- if (!m_surface->start(format)) {
- qWarning() << Q_FUNC_INFO << ": failed to start the video surface " << format;
- return;
- }
- }
-
- QGstreamerMirTextureBuffer *buffer = new QGstreamerMirTextureBuffer(m_textureId);
- //qDebug() << "frameSize: " << m_surface->surfaceFormat().frameSize();
- QVideoFrame frame(buffer, m_surface->surfaceFormat().frameSize(),
- m_surface->surfaceFormat().pixelFormat());
-
- frame.setMetaData("TextureId", m_textureId);
-
- // Display the video frame on the surface:
- m_surface->present(frame);
-}
-
-GstPadProbeReturn QGstreamerMirTextureRenderer::padBufferProbe(GstPad *pad, GstPadProbeInfo *info, gpointer userData)
-{
- Q_UNUSED(pad);
- Q_UNUSED(info);
-
- QGstreamerMirTextureRenderer *control = reinterpret_cast<QGstreamerMirTextureRenderer*>(userData);
- QMetaObject::invokeMethod(control, "updateNativeVideoSize", Qt::QueuedConnection);
-
- return GST_PAD_PROBE_REMOVE;
-}
-
-void QGstreamerMirTextureRenderer::stopRenderer()
-{
- if (m_surface)
- m_surface->stop();
-}
-
-QAbstractVideoSurface *QGstreamerMirTextureRenderer::surface() const
-{
- return m_surface;
-}
-
-void QGstreamerMirTextureRenderer::setSurface(QAbstractVideoSurface *surface)
-{
- qDebug() << Q_FUNC_INFO;
-
- if (m_surface != surface) {
- qDebug() << "Saving current QGLContext";
- m_context = const_cast<QGLContext*>(QGLContext::currentContext());
-
- if (m_videoSink)
- gst_object_unref(GST_OBJECT(m_videoSink));
-
- m_videoSink = 0;
-
- if (m_surface) {
- disconnect(m_surface.data(), SIGNAL(supportedFormatsChanged()),
- this, SLOT(handleFormatChange()));
- }
-
- bool wasReady = isReady();
-
- m_surface = surface;
-
- if (m_surface) {
- connect(m_surface.data(), SIGNAL(supportedFormatsChanged()),
- this, SLOT(handleFormatChange()));
- }
-
- if (wasReady != isReady())
- emit readyChanged(isReady());
-
- emit sinkChanged();
- }
-}
-
-void QGstreamerMirTextureRenderer::setPlayerSession(const QGstreamerPlayerSession *playerSession)
-{
- m_playerSession = const_cast<QGstreamerPlayerSession*>(playerSession);
-}
-
-void QGstreamerMirTextureRenderer::handleFormatChange()
-{
- qDebug() << "Supported formats list has changed, reload video output";
-
- if (m_videoSink)
- gst_object_unref(GST_OBJECT(m_videoSink));
-
- m_videoSink = 0;
- emit sinkChanged();
-}
-
-void QGstreamerMirTextureRenderer::updateNativeVideoSize()
-{
- //qDebug() << Q_FUNC_INFO;
- const QSize oldSize = m_nativeSize;
-
- if (m_videoSink) {
- // Find video native size to update video widget size hint
- GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
- GstCaps *caps = gst_pad_get_current_caps(pad);
-
- if (caps) {
- m_nativeSize = QGstUtils::capsCorrectedResolution(caps);
- gst_caps_unref(caps);
- }
- } else {
- m_nativeSize = QSize();
- }
- qDebug() << Q_FUNC_INFO << oldSize << m_nativeSize << m_videoSink;
-
- if (m_nativeSize != oldSize)
- emit nativeSizeChanged();
-}
-
-void QGstreamerMirTextureRenderer::handleFocusWindowChanged(QWindow *window)
-{
- qDebug() << Q_FUNC_INFO;
-
- QOpenGLContext *currContext = QOpenGLContext::currentContext();
-
- QQuickWindow *w = dynamic_cast<QQuickWindow*>(window);
- // If we don't have a GL context in the current thread, create one and share it
- // with the render thread GL context
- if (!currContext && !m_glContext) {
- // This emulates the new QOffscreenWindow class with Qt5.1
- m_offscreenSurface = createOffscreenWindow(w->openglContext()->surface()->format());
- m_offscreenSurface->setParent(window);
-
- QOpenGLContext *shareContext = 0;
- if (m_surface)
- shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
- m_glContext = new QOpenGLContext;
- m_glContext->setFormat(m_offscreenSurface->requestedFormat());
-
- if (shareContext)
- m_glContext->setShareContext(shareContext);
-
- if (!m_glContext->create())
- {
- qWarning() << "Failed to create new shared context.";
- return;
- }
- }
-
- if (m_glContext)
- m_glContext->makeCurrent(m_offscreenSurface);
-
- if (m_textureId == 0) {
- glGenTextures(1, &m_textureId);
- qDebug() << "texture_id (handleFocusWindowChanged): " << m_textureId << endl;
- g_object_set(G_OBJECT(m_videoSink), "texture-id", m_textureId, (char*)NULL);
- }
-}
diff --git a/src/gsttools/qgstreamermirtexturerenderer_p.h b/src/gsttools/qgstreamermirtexturerenderer_p.h
deleted file mode 100644
index 62150f7e1..000000000
--- a/src/gsttools/qgstreamermirtexturerenderer_p.h
+++ /dev/null
@@ -1,119 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2016 Canonical Ltd.
-** Contact: https://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see https://www.qt.io/terms-conditions. For further
-** information use the contact form at https://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 3 as published by the Free Software
-** Foundation and appearing in the file LICENSE.LGPL3 included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU Lesser General Public License version 3 requirements
-** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
-**
-** GNU General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU
-** General Public License version 2.0 or (at your option) the GNU General
-** Public license version 3 or any later version approved by the KDE Free
-** Qt Foundation. The licenses are as published by the Free Software
-** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
-** included in the packaging of this file. Please review the following
-** information to ensure the GNU General Public License requirements will
-** be met: https://www.gnu.org/licenses/gpl-2.0.html and
-** https://www.gnu.org/licenses/gpl-3.0.html.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#ifndef QGSTREAMERMIRTEXTURERENDERER_H
-#define QGSTREAMERMIRTEXTURERENDERER_H
-
-//
-// W A R N I N G
-// -------------
-//
-// This file is not part of the Qt API. It exists purely as an
-// implementation detail. This header file may change from version to
-// version without notice, or even be removed.
-//
-// We mean it.
-//
-
-#include <qmediaplayer.h>
-#include <qvideorenderercontrol.h>
-#include <private/qvideosurfacegstsink_p.h>
-#include <qabstractvideosurface.h>
-
-#include "qgstreamervideorendererinterface_p.h"
-
-QT_BEGIN_NAMESPACE
-
-class QGstreamerMirTextureBuffer;
-class QGstreamerPlayerSession;
-class QGLContext;
-class QOpenGLContext;
-class QSurfaceFormat;
-
-class QGstreamerMirTextureRenderer : public QVideoRendererControl, public QGstreamerVideoRendererInterface
-{
- Q_OBJECT
- Q_INTERFACES(QGstreamerVideoRendererInterface)
-public:
- QGstreamerMirTextureRenderer(QObject *parent = 0, const QGstreamerPlayerSession *playerSession = 0);
- virtual ~QGstreamerMirTextureRenderer();
-
- QAbstractVideoSurface *surface() const;
- void setSurface(QAbstractVideoSurface *surface);
-
- void setPlayerSession(const QGstreamerPlayerSession *playerSession);
-
- GstElement *videoSink();
-
- void stopRenderer();
- bool isReady() const { return m_surface != 0; }
-
-signals:
- void sinkChanged();
- void readyChanged(bool);
- void nativeSizeChanged();
-
-private slots:
- void handleFormatChange();
- void updateNativeVideoSize();
- void handleFocusWindowChanged(QWindow *window);
- void renderFrame();
-
-private:
- QWindow *createOffscreenWindow(const QSurfaceFormat &format);
- static void handleFrameReady(gpointer userData);
- static GstPadProbeReturn padBufferProbe(GstPad *pad, GstPadProbeInfo *info, gpointer userData);
-
- GstElement *m_videoSink;
- QPointer<QAbstractVideoSurface> m_surface;
- QPointer<QAbstractVideoSurface> m_glSurface;
- QGLContext *m_context;
- QOpenGLContext *m_glContext;
- unsigned int m_textureId;
- QWindow *m_offscreenSurface;
- QGstreamerPlayerSession *m_playerSession;
- QGstreamerMirTextureBuffer *m_textureBuffer;
- QSize m_nativeSize;
-
- QMutex m_mutex;
-};
-
-QT_END_NAMESPACE
-
-#endif // QGSTREAMERMIRTEXTURERENDRER_H
diff --git a/src/gsttools/qgstreamerplayercontrol.cpp b/src/gsttools/qgstreamerplayercontrol.cpp
index 73f43f0b9..03350a432 100644
--- a/src/gsttools/qgstreamerplayercontrol.cpp
+++ b/src/gsttools/qgstreamerplayercontrol.cpp
@@ -377,14 +377,10 @@ void QGstreamerPlayerControl::setMedia(const QMediaContent &content, QIODevice *
m_currentResource = content;
m_stream = stream;
- QNetworkRequest request;
+ QNetworkRequest request = content.request();
- if (m_stream) {
+ if (m_stream)
userStreamValid = stream->isOpen() && m_stream->isReadable();
- request = content.canonicalRequest();
- } else if (!content.isNull()) {
- request = content.canonicalRequest();
- }
#if !QT_CONFIG(gstreamer_app)
m_session->loadFromUri(request);
diff --git a/src/gsttools/qgstreamerplayersession.cpp b/src/gsttools/qgstreamerplayersession.cpp
index 5ede8a1c9..2f89d2a1a 100644
--- a/src/gsttools/qgstreamerplayersession.cpp
+++ b/src/gsttools/qgstreamerplayersession.cpp
@@ -206,13 +206,26 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
m_videoOutputBin = gst_bin_new("video-output-bin");
// might not get a parent, take ownership to avoid leak
qt_gst_object_ref_sink(GST_OBJECT(m_videoOutputBin));
+
+ GstElement *videoOutputSink = m_videoIdentity;
+#if QT_CONFIG(gstreamer_gl)
+ if (QGstUtils::useOpenGL()) {
+ videoOutputSink = gst_element_factory_make("glupload", NULL);
+ GstElement *colorConvert = gst_element_factory_make("glcolorconvert", NULL);
+ gst_bin_add_many(GST_BIN(m_videoOutputBin), videoOutputSink, colorConvert, m_videoIdentity, m_nullVideoSink, NULL);
+ gst_element_link_many(videoOutputSink, colorConvert, m_videoIdentity, NULL);
+ } else {
+ gst_bin_add_many(GST_BIN(m_videoOutputBin), m_videoIdentity, m_nullVideoSink, NULL);
+ }
+#else
gst_bin_add_many(GST_BIN(m_videoOutputBin), m_videoIdentity, m_nullVideoSink, NULL);
+#endif
gst_element_link(m_videoIdentity, m_nullVideoSink);
m_videoSink = m_nullVideoSink;
// add ghostpads
- GstPad *pad = gst_element_get_static_pad(m_videoIdentity,"sink");
+ GstPad *pad = gst_element_get_static_pad(videoOutputSink, "sink");
gst_element_add_pad(GST_ELEMENT(m_videoOutputBin), gst_ghost_pad_new("sink", pad));
gst_object_unref(GST_OBJECT(pad));
diff --git a/src/gsttools/qgstreamervideooverlay.cpp b/src/gsttools/qgstreamervideooverlay.cpp
index 1f3e28549..6b862e475 100644
--- a/src/gsttools/qgstreamervideooverlay.cpp
+++ b/src/gsttools/qgstreamervideooverlay.cpp
@@ -48,6 +48,8 @@
#include <gst/video/videooverlay.h>
#endif
+#include <QtMultimedia/private/qtmultimediaglobal_p.h>
+
QT_BEGIN_NAMESPACE
struct ElementMap
@@ -59,6 +61,9 @@ struct ElementMap
// Ordered by descending priority
static const ElementMap elementMap[] =
{
+#if QT_CONFIG(gstreamer_gl)
+ { "xcb", "glimagesink" },
+#endif
{ "xcb", "vaapisink" },
{ "xcb", "xvimagesink" },
{ "xcb", "ximagesink" }
@@ -340,6 +345,10 @@ static GstElement *findBestVideoSink()
// First, try some known video sinks, depending on the Qt platform plugin in use.
for (quint32 i = 0; i < (sizeof(elementMap) / sizeof(ElementMap)); ++i) {
+#if QT_CONFIG(gstreamer_gl)
+ if (!QGstUtils::useOpenGL() && qstrcmp(elementMap[i].gstreamerElement, "glimagesink") == 0)
+ continue;
+#endif
if (platform == QLatin1String(elementMap[i].qtPlatform)
&& (choice = gst_element_factory_make(elementMap[i].gstreamerElement, NULL))) {
diff --git a/src/gsttools/qgstreamervideowidget.cpp b/src/gsttools/qgstreamervideowidget.cpp
index bb85e1eb7..288a9c9c0 100644
--- a/src/gsttools/qgstreamervideowidget.cpp
+++ b/src/gsttools/qgstreamervideowidget.cpp
@@ -173,6 +173,8 @@ bool QGstreamerVideoWidgetControl::eventFilter(QObject *object, QEvent *e)
}
if (e->type() == QEvent::Paint) {
+ // Update overlay by new size if any.
+ m_videoOverlay.setRenderRectangle(QRect(0, 0, m_widget->width(), m_widget->height()));
if (m_videoOverlay.isActive())
m_videoOverlay.expose(); // triggers a repaint of the last frame
else
diff --git a/src/gsttools/qgstutils.cpp b/src/gsttools/qgstutils.cpp
index 06f277766..82af0f1e2 100644
--- a/src/gsttools/qgstutils.cpp
+++ b/src/gsttools/qgstutils.cpp
@@ -1034,6 +1034,7 @@ struct VideoFormat
static const VideoFormat qt_videoFormatLookup[] =
{
{ QVideoFrame::Format_YUV420P, GST_VIDEO_FORMAT_I420 },
+ { QVideoFrame::Format_YUV422P, GST_VIDEO_FORMAT_Y42B },
{ QVideoFrame::Format_YV12 , GST_VIDEO_FORMAT_YV12 },
{ QVideoFrame::Format_UYVY , GST_VIDEO_FORMAT_UYVY },
{ QVideoFrame::Format_YUYV , GST_VIDEO_FORMAT_YUY2 },
@@ -1044,11 +1045,13 @@ static const VideoFormat qt_videoFormatLookup[] =
{ QVideoFrame::Format_RGB32 , GST_VIDEO_FORMAT_BGRx },
{ QVideoFrame::Format_BGR32 , GST_VIDEO_FORMAT_RGBx },
{ QVideoFrame::Format_ARGB32, GST_VIDEO_FORMAT_BGRA },
+ { QVideoFrame::Format_ABGR32, GST_VIDEO_FORMAT_RGBA },
{ QVideoFrame::Format_BGRA32, GST_VIDEO_FORMAT_ARGB },
#else
{ QVideoFrame::Format_RGB32 , GST_VIDEO_FORMAT_xRGB },
{ QVideoFrame::Format_BGR32 , GST_VIDEO_FORMAT_xBGR },
{ QVideoFrame::Format_ARGB32, GST_VIDEO_FORMAT_ARGB },
+ { QVideoFrame::Format_ABGR32, GST_VIDEO_FORMAT_ABGR },
{ QVideoFrame::Format_BGRA32, GST_VIDEO_FORMAT_BGRA },
#endif
{ QVideoFrame::Format_RGB24 , GST_VIDEO_FORMAT_RGB },
@@ -1086,6 +1089,7 @@ struct YuvFormat
static const YuvFormat qt_yuvColorLookup[] =
{
{ QVideoFrame::Format_YUV420P, GST_MAKE_FOURCC('I','4','2','0'), 8 },
+ { QVideoFrame::Format_YUV422P, GST_MAKE_FOURCC('Y','4','2','B'), 8 },
{ QVideoFrame::Format_YV12, GST_MAKE_FOURCC('Y','V','1','2'), 8 },
{ QVideoFrame::Format_UYVY, GST_MAKE_FOURCC('U','Y','V','Y'), 16 },
{ QVideoFrame::Format_YUYV, GST_MAKE_FOURCC('Y','U','Y','2'), 16 },
@@ -1566,6 +1570,12 @@ QVariant QGstUtils::toGStreamerOrientation(const QVariant &value)
}
#endif
+bool QGstUtils::useOpenGL()
+{
+ static bool result = qEnvironmentVariableIntValue("QT_GSTREAMER_USE_OPENGL_PLUGIN");
+ return result;
+}
+
void qt_gst_object_ref_sink(gpointer object)
{
#if GST_CHECK_VERSION(0,10,24)
diff --git a/src/gsttools/qgstutils_p.h b/src/gsttools/qgstutils_p.h
index 387a2e27a..5a2feec17 100644
--- a/src/gsttools/qgstutils_p.h
+++ b/src/gsttools/qgstutils_p.h
@@ -153,6 +153,8 @@ namespace QGstUtils {
Q_GSTTOOLS_EXPORT QVariant fromGStreamerOrientation(const QVariant &value);
Q_GSTTOOLS_EXPORT QVariant toGStreamerOrientation(const QVariant &value);
#endif
+
+ Q_GSTTOOLS_EXPORT bool useOpenGL();
}
Q_GSTTOOLS_EXPORT void qt_gst_object_ref_sink(gpointer object);
diff --git a/src/gsttools/qgstvideorenderersink.cpp b/src/gsttools/qgstvideorenderersink.cpp
index c3a7a5988..5f71d342c 100644
--- a/src/gsttools/qgstvideorenderersink.cpp
+++ b/src/gsttools/qgstvideorenderersink.cpp
@@ -54,12 +54,32 @@
#include "qgstutils_p.h"
+#if QT_CONFIG(gstreamer_gl)
+#include <QOpenGLContext>
+#include <QGuiApplication>
+#include <QWindow>
+#include <qpa/qplatformnativeinterface.h>
+
+#include <gst/gl/gstglconfig.h>
+
+#if GST_GL_HAVE_WINDOW_X11
+# include <gst/gl/x11/gstgldisplay_x11.h>
+#endif
+#if GST_GL_HAVE_PLATFORM_EGL
+# include <gst/gl/egl/gstgldisplay_egl.h>
+#endif
+#if GST_CHECK_VERSION(1,11,1) && GST_GL_HAVE_WINDOW_WAYLAND
+# include <gst/gl/wayland/gstgldisplay_wayland.h>
+#endif
+#endif // #if QT_CONFIG(gstreamer_gl)
+
//#define DEBUG_VIDEO_SURFACE_SINK
QT_BEGIN_NAMESPACE
QGstDefaultVideoRenderer::QGstDefaultVideoRenderer()
: m_flushed(true)
+ , m_handleType(QAbstractVideoBuffer::NoHandle)
{
}
@@ -69,13 +89,33 @@ QGstDefaultVideoRenderer::~QGstDefaultVideoRenderer()
GstCaps *QGstDefaultVideoRenderer::getCaps(QAbstractVideoSurface *surface)
{
- return QGstUtils::capsForFormats(surface->supportedPixelFormats());
+#if QT_CONFIG(gstreamer_gl)
+ if (QGstUtils::useOpenGL()) {
+ m_handleType = QAbstractVideoBuffer::GLTextureHandle;
+ auto formats = surface->supportedPixelFormats(m_handleType);
+ // Even if the surface does not support gl textures,
+ // glupload will be added to the pipeline and GLMemory will be requested.
+ // This will lead to upload data to gl textures
+ // and download it when the buffer will be used within rendering.
+ if (formats.isEmpty()) {
+ m_handleType = QAbstractVideoBuffer::NoHandle;
+ formats = surface->supportedPixelFormats(m_handleType);
+ }
+
+ GstCaps *caps = QGstUtils::capsForFormats(formats);
+ for (guint i = 0; i < gst_caps_get_size(caps); ++i)
+ gst_caps_set_features(caps, i, gst_caps_features_from_string("memory:GLMemory"));
+
+ return caps;
+ }
+#endif
+ return QGstUtils::capsForFormats(surface->supportedPixelFormats(QAbstractVideoBuffer::NoHandle));
}
bool QGstDefaultVideoRenderer::start(QAbstractVideoSurface *surface, GstCaps *caps)
{
m_flushed = true;
- m_format = QGstUtils::formatForCaps(caps, &m_videoInfo);
+ m_format = QGstUtils::formatForCaps(caps, &m_videoInfo, m_handleType);
return m_format.isValid() && surface->start(m_format);
}
@@ -90,8 +130,21 @@ void QGstDefaultVideoRenderer::stop(QAbstractVideoSurface *surface)
bool QGstDefaultVideoRenderer::present(QAbstractVideoSurface *surface, GstBuffer *buffer)
{
m_flushed = false;
+
+ QGstVideoBuffer *videoBuffer = nullptr;
+#if QT_CONFIG(gstreamer_gl)
+ if (m_format.handleType() == QAbstractVideoBuffer::GLTextureHandle) {
+ GstGLMemory *glmem = GST_GL_MEMORY_CAST(gst_buffer_peek_memory(buffer, 0));
+ guint textureId = gst_gl_memory_get_texture_id(glmem);
+ videoBuffer = new QGstVideoBuffer(buffer, m_videoInfo, m_format.handleType(), textureId);
+ }
+#endif
+
+ if (!videoBuffer)
+ videoBuffer = new QGstVideoBuffer(buffer, m_videoInfo);
+
QVideoFrame frame(
- new QGstVideoBuffer(buffer, m_videoInfo),
+ videoBuffer,
m_format.frameSize(),
m_format.pixelFormat());
QGstUtils::setFrameTimeStamps(&frame, buffer);
@@ -145,6 +198,10 @@ QVideoSurfaceGstDelegate::~QVideoSurfaceGstDelegate()
gst_caps_unref(m_surfaceCaps);
if (m_startCaps)
gst_caps_unref(m_startCaps);
+#if QT_CONFIG(gstreamer_gl)
+ if (m_gstGLDisplayContext)
+ gst_object_unref(m_gstGLDisplayContext);
+#endif
}
GstCaps *QVideoSurfaceGstDelegate::caps()
@@ -254,6 +311,118 @@ GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer)
return m_renderReturn;
}
+#if QT_CONFIG(gstreamer_gl)
+static GstGLContext *gstGLDisplayContext(QAbstractVideoSurface *surface)
+{
+ QOpenGLContext *glContext = qobject_cast<QOpenGLContext*>(surface->property("GLContext").value<QObject*>());
+ // Context is not ready yet.
+ if (!glContext)
+ return nullptr;
+
+ GstGLDisplay *display = nullptr;
+ const QString platform = QGuiApplication::platformName();
+ const char *contextName = "eglcontext";
+ GstGLPlatform glPlatform = GST_GL_PLATFORM_EGL;
+ QPlatformNativeInterface *pni = QGuiApplication::platformNativeInterface();
+
+#if GST_GL_HAVE_WINDOW_X11
+ if (platform == QLatin1String("xcb")) {
+ if (QOpenGLContext::openGLModuleType() == QOpenGLContext::LibGL) {
+ contextName = "glxcontext";
+ glPlatform = GST_GL_PLATFORM_GLX;
+ }
+
+ display = (GstGLDisplay *)gst_gl_display_x11_new_with_display(
+ (Display *)pni->nativeResourceForIntegration("display"));
+ }
+#endif
+
+#if GST_GL_HAVE_PLATFORM_EGL
+ if (!display && platform == QLatin1String("eglfs")) {
+ display = (GstGLDisplay *)gst_gl_display_egl_new_with_egl_display(
+ pni->nativeResourceForIntegration("egldisplay"));
+ }
+#endif
+
+#if GST_CHECK_VERSION(1,11,1)
+#if GST_GL_HAVE_WINDOW_WAYLAND
+ if (!display && platform.startsWith(QLatin1String("wayland"))) {
+ const char *displayName = (platform == QLatin1String("wayland"))
+ ? "display" : "egldisplay";
+
+ display = (GstGLDisplay *)gst_gl_display_wayland_new_with_display(
+ (struct wl_display *)pni->nativeResourceForIntegration(displayName));
+ }
+#endif
+#endif
+
+ if (!display) {
+ qWarning() << "Could not create GstGLDisplay";
+ return nullptr;
+ }
+
+ void *nativeContext = pni->nativeResourceForContext(contextName, glContext);
+ if (!nativeContext)
+ qWarning() << "Could not find resource for" << contextName;
+
+ GstGLContext *appContext = gst_gl_context_new_wrapped(display, (guintptr)nativeContext, glPlatform, GST_GL_API_ANY);
+ if (!appContext)
+ qWarning() << "Could not create wrappped context for platform:" << glPlatform;
+
+ GstGLContext *displayContext = nullptr;
+ GError *error = NULL;
+ gst_gl_display_create_context(display, appContext, &displayContext, &error);
+ if (error) {
+ qWarning() << "Could not create display context:" << error->message;
+ g_clear_error(&error);
+ }
+
+ if (appContext)
+ gst_object_unref(appContext);
+
+ gst_object_unref(display);
+
+ return displayContext;
+}
+#endif // #if QT_CONFIG(gstreamer_gl)
+
+bool QVideoSurfaceGstDelegate::query(GstQuery *query)
+{
+#if QT_CONFIG(gstreamer_gl)
+ if (GST_QUERY_TYPE(query) == GST_QUERY_CONTEXT) {
+ const gchar *type;
+ gst_query_parse_context_type(query, &type);
+
+ if (strcmp(type, "gst.gl.local_context") != 0)
+ return false;
+
+ if (!m_gstGLDisplayContext)
+ m_gstGLDisplayContext = gstGLDisplayContext(m_surface);
+
+ // No context yet.
+ if (!m_gstGLDisplayContext)
+ return false;
+
+ GstContext *context = NULL;
+ gst_query_parse_context(query, &context);
+ context = context ? gst_context_copy(context) : gst_context_new(type, FALSE);
+ GstStructure *structure = gst_context_writable_structure(context);
+#if GST_CHECK_VERSION(1,11,1)
+ gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, m_gstGLDisplayContext, NULL);
+#else
+ gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, m_gstGLDisplayContext, NULL);
+#endif
+ gst_query_set_context(query, context);
+ gst_context_unref(context);
+
+ return m_gstGLDisplayContext;
+ }
+#else
+ Q_UNUSED(query);
+#endif
+ return false;
+}
+
bool QVideoSurfaceGstDelegate::event(QEvent *event)
{
if (event->type() == QEvent::UpdateRequest) {
@@ -460,6 +629,7 @@ void QGstVideoRendererSink::class_init(gpointer g_class, gpointer class_data)
base_sink_class->propose_allocation = QGstVideoRendererSink::propose_allocation;
base_sink_class->stop = QGstVideoRendererSink::stop;
base_sink_class->unlock = QGstVideoRendererSink::unlock;
+ base_sink_class->query = QGstVideoRendererSink::query;
GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
element_class->change_state = QGstVideoRendererSink::change_state;
@@ -603,4 +773,13 @@ GstFlowReturn QGstVideoRendererSink::show_frame(GstVideoSink *base, GstBuffer *b
return sink->delegate->render(buffer);
}
+gboolean QGstVideoRendererSink::query(GstBaseSink *base, GstQuery *query)
+{
+ VO_SINK(base);
+ if (sink->delegate->query(query))
+ return TRUE;
+
+ return GST_BASE_SINK_CLASS(sink_parent_class)->query(base, query);
+}
+
QT_END_NAMESPACE
diff --git a/src/gsttools/qgstvideorenderersink_p.h b/src/gsttools/qgstvideorenderersink_p.h
index d2417a7c9..38854291a 100644
--- a/src/gsttools/qgstvideorenderersink_p.h
+++ b/src/gsttools/qgstvideorenderersink_p.h
@@ -51,6 +51,7 @@
// We mean it.
//
+#include <QtMultimedia/private/qtmultimediaglobal_p.h>
#include <gst/video/gstvideosink.h>
#include <gst/video/video.h>
@@ -67,6 +68,13 @@
#include "qgstvideorendererplugin_p.h"
+#if QT_CONFIG(gstreamer_gl)
+#ifndef GST_USE_UNSTABLE_API
+#define GST_USE_UNSTABLE_API
+#endif
+#include <gst/gl/gl.h>
+#endif
+
QT_BEGIN_NAMESPACE
class QAbstractVideoSurface;
@@ -89,6 +97,7 @@ private:
QVideoSurfaceFormat m_format;
GstVideoInfo m_videoInfo;
bool m_flushed;
+ QAbstractVideoBuffer::HandleType m_handleType;
};
class QVideoSurfaceGstDelegate : public QObject
@@ -110,6 +119,7 @@ public:
GstFlowReturn render(GstBuffer *buffer);
bool event(QEvent *event) override;
+ bool query(GstQuery *query);
private slots:
bool handleEvent(QMutexLocker *locker);
@@ -132,6 +142,9 @@ private:
GstCaps *m_surfaceCaps;
GstCaps *m_startCaps;
GstBuffer *m_renderBuffer;
+#if QT_CONFIG(gstreamer_gl)
+ GstGLContext *m_gstGLDisplayContext = nullptr;
+#endif
bool m_notified;
bool m_stop;
@@ -168,6 +181,7 @@ private:
static gboolean unlock(GstBaseSink *sink);
static GstFlowReturn show_frame(GstVideoSink *sink, GstBuffer *buffer);
+ static gboolean query(GstBaseSink *element, GstQuery *query);
private:
QVideoSurfaceGstDelegate *delegate;
diff --git a/src/imports/multimedia/qdeclarativeplaylist.cpp b/src/imports/multimedia/qdeclarativeplaylist.cpp
index b768f99e3..400e23467 100644
--- a/src/imports/multimedia/qdeclarativeplaylist.cpp
+++ b/src/imports/multimedia/qdeclarativeplaylist.cpp
@@ -232,7 +232,7 @@ void QDeclarativePlaylist::setPlaybackMode(PlaybackMode mode)
*/
QUrl QDeclarativePlaylist::currentItemSource() const
{
- return m_playlist->currentMedia().canonicalUrl();
+ return m_playlist->currentMedia().request().url();
}
/*!
@@ -316,7 +316,7 @@ QString QDeclarativePlaylist::errorString() const
*/
QUrl QDeclarativePlaylist::itemSource(int index)
{
- return m_playlist->media(index).canonicalUrl();
+ return m_playlist->media(index).request().url();
}
/*!
@@ -548,7 +548,7 @@ QVariant QDeclarativePlaylist::data(const QModelIndex &index, int role) const
if (!index.isValid())
return QVariant();
- return m_playlist->media(index.row()).canonicalUrl();
+ return m_playlist->media(index.row()).request().url();
}
QHash<int, QByteArray> QDeclarativePlaylist::roleNames() const
diff --git a/src/multimedia/configure.json b/src/multimedia/configure.json
index 6d56af5ed..ca2839cea 100644
--- a/src/multimedia/configure.json
+++ b/src/multimedia/configure.json
@@ -95,6 +95,17 @@
{ "libs": "-lgstphotography-1.0" }
]
},
+ "gstreamer_gl_1_0": {
+ "label": "GStreamer OpenGL 1.0",
+ "export": "gstreamer_gl",
+ "test": {
+ "include": "gst/gl/gl.h"
+ },
+ "use": "gstreamer_1_0",
+ "sources": [
+ { "type": "pkgConfig", "args": "gstreamer-gl-1.0" }
+ ]
+ },
"libresourceqt5": {
"label": "libresourceqt5",
"test": "resourcepolicy",
@@ -229,6 +240,11 @@
"condition": "(features.gstreamer_1_0 && libs.gstreamer_photography_1_0) || (features.gstreamer_0_10 && libs.gstreamer_photography_0_10)",
"output": [ "privateFeature" ]
},
+ "gstreamer_gl": {
+ "label": "GStreamer OpenGL",
+ "condition": "features.gstreamer_1_0 && libs.gstreamer_gl_1_0",
+ "output": [ "privateFeature" ]
+ },
"gpu_vivante": {
"label": "Vivante GPU",
"condition": "features.gui && features.opengles2 && tests.gpu_vivante",
diff --git a/src/multimedia/playback/qmediacontent.cpp b/src/multimedia/playback/qmediacontent.cpp
index 95116d02f..f61511f1c 100644
--- a/src/multimedia/playback/qmediacontent.cpp
+++ b/src/multimedia/playback/qmediacontent.cpp
@@ -123,8 +123,7 @@ private:
which provides the URL of the content.
A non-null QMediaContent will always have a reference to
- the content available through the canonicalUrl() or canonicalRequest()
- methods.
+ the content available through the request() method.
Alternatively QMediaContent can represent a playlist and contain a pointer to a
valid QMediaPlaylist object. In this case URL is optional and can either be empty
@@ -258,25 +257,34 @@ bool QMediaContent::isNull() const
return d.constData() == nullptr;
}
+QNetworkRequest QMediaContent::request() const
+{
+ return (d && !d->requests.isEmpty()) ? d->requests.first() : QNetworkRequest();
+}
+
+#if QT_DEPRECATED_SINCE(6, 0)
/*!
+ \obsolete
+
Returns a QUrl that represents that canonical resource for this media content.
*/
QUrl QMediaContent::canonicalUrl() const
{
- return canonicalRequest().url();
+ return request().url();
}
/*!
+ \obsolete
+
Returns a QNetworkRequest that represents that canonical resource for this media content.
*/
QNetworkRequest QMediaContent::canonicalRequest() const
{
- return (d && !d->requests.isEmpty()) ? d->requests.first() : QNetworkRequest();
+ return request();
}
-#if QT_DEPRECATED_SINCE(6, 0)
/*!
\obsolete
diff --git a/src/multimedia/playback/qmediacontent.h b/src/multimedia/playback/qmediacontent.h
index 5193a1fcc..244715b41 100644
--- a/src/multimedia/playback/qmediacontent.h
+++ b/src/multimedia/playback/qmediacontent.h
@@ -72,10 +72,11 @@ public:
bool operator!=(const QMediaContent &other) const;
bool isNull() const;
+ QNetworkRequest request() const;
- QUrl canonicalUrl() const;
- QNetworkRequest canonicalRequest() const;
#if QT_DEPRECATED_SINCE(6, 0)
+ QT_DEPRECATED_X("Use QMediaContent::request().url()") QUrl canonicalUrl() const;
+ QT_DEPRECATED_X("Use QMediaContent::request()") QNetworkRequest canonicalRequest() const;
QT_DEPRECATED QMediaResource canonicalResource() const;
QT_DEPRECATED QMediaResourceList resources() const;
#endif
diff --git a/src/multimedia/playback/qmediaplayer.cpp b/src/multimedia/playback/qmediaplayer.cpp
index 81d525ea8..6ff3fc7df 100644
--- a/src/multimedia/playback/qmediaplayer.cpp
+++ b/src/multimedia/playback/qmediaplayer.cpp
@@ -183,7 +183,7 @@ bool QMediaPlayerPrivate::isInChain(const QUrl &url)
// Check whether a URL is already in the chain of playlists.
// Also see a comment in parentPlaylist().
for (QMediaPlaylist *current = rootMedia.playlist(); current && current != playlist; current = current->currentMedia().playlist())
- if (current->currentMedia().canonicalUrl() == url) {
+ if (current->currentMedia().request().url() == url) {
return true;
}
return false;
@@ -345,10 +345,10 @@ void QMediaPlayerPrivate::setMedia(const QMediaContent &media, QIODevice *stream
// Backends can't play qrc files directly.
// If the backend supports StreamPlayback, we pass a QFile for that resource.
// If it doesn't, we copy the data to a temporary file and pass its path.
- if (!media.isNull() && !stream && media.canonicalUrl().scheme() == QLatin1String("qrc")) {
+ if (!media.isNull() && !stream && media.request().url().scheme() == QLatin1String("qrc")) {
qrcMedia = media;
- file.reset(new QFile(QLatin1Char(':') + media.canonicalUrl().path()));
+ file.reset(new QFile(QLatin1Char(':') + media.request().url().path()));
if (!file->open(QFile::ReadOnly)) {
QMetaObject::invokeMethod(q, "_q_error", Qt::QueuedConnection,
Q_ARG(int, QMediaPlayer::ResourceError),
@@ -464,11 +464,14 @@ void QMediaPlayerPrivate::loadPlaylist()
// Do not load a playlist if there are more than MAX_NESTED_PLAYLISTS in the chain already,
// or if the playlist URL is already in the chain, i.e. do not allow recursive playlists and loops.
- if (nestedPlaylists < MAX_NESTED_PLAYLISTS && !q->currentMedia().canonicalUrl().isEmpty() && !isInChain(q->currentMedia().canonicalUrl())) {
- pendingPlaylist = QMediaContent(new QMediaPlaylist, q->currentMedia().canonicalUrl(), true);
+ if (nestedPlaylists < MAX_NESTED_PLAYLISTS
+ && !q->currentMedia().request().url().isEmpty()
+ && !isInChain(q->currentMedia().request().url()))
+ {
+ pendingPlaylist = QMediaContent(new QMediaPlaylist, q->currentMedia().request().url(), true);
QObject::connect(pendingPlaylist.playlist(), SIGNAL(loaded()), q, SLOT(_q_handlePlaylistLoaded()));
QObject::connect(pendingPlaylist.playlist(), SIGNAL(loadFailed()), q, SLOT(_q_handlePlaylistLoadFailed()));
- pendingPlaylist.playlist()->load(pendingPlaylist.canonicalRequest());
+ pendingPlaylist.playlist()->load(pendingPlaylist.request());
} else if (playlist) {
playlist->next();
}
diff --git a/src/multimedia/playback/qplaylistfileparser.cpp b/src/multimedia/playback/qplaylistfileparser.cpp
index 92e7f97e8..9af447032 100644
--- a/src/multimedia/playback/qplaylistfileparser.cpp
+++ b/src/multimedia/playback/qplaylistfileparser.cpp
@@ -504,7 +504,7 @@ void QPlaylistFileParser::start(const QMediaContent &media, QIODevice *stream, c
if (stream)
start(stream, mimeType);
else
- start(media.canonicalRequest(), mimeType);
+ start(media.request(), mimeType);
}
void QPlaylistFileParser::start(QIODevice *stream, const QString &mimeType)
diff --git a/src/multimedia/video/qvideoframe.cpp b/src/multimedia/video/qvideoframe.cpp
index e94b838f9..dfc6ad3ee 100644
--- a/src/multimedia/video/qvideoframe.cpp
+++ b/src/multimedia/video/qvideoframe.cpp
@@ -227,6 +227,11 @@ private:
horizontally and vertically sub-sampled, i.e. the height and width of the U and V planes are
half that of the Y plane.
+ \value Format_YUV422P
+ The frame is stored using an 8-bit per component planar YUV format with the U and V planes
+ horizontally sub-sampled, i.e. the width of the U and V planes are
+ half that of the Y plane, and height of U and V planes is the same as Y.
+
\value Format_YV12
The frame is stored using an 8-bit per component planar YVU format with the V and U planes
horizontally and vertically sub-sampled, i.e. the height and width of the V and U planes are
@@ -638,6 +643,7 @@ bool QVideoFrame::map(QAbstractVideoBuffer::MapMode mode)
// Single plane or opaque format.
break;
case Format_YUV420P:
+ case Format_YUV422P:
case Format_YV12: {
// The UV stride is usually half the Y stride and is 32-bit aligned.
// However it's not always the case, at least on Windows where the
@@ -646,13 +652,14 @@ bool QVideoFrame::map(QAbstractVideoBuffer::MapMode mode)
// have a correct stride.
const int height = d->size.height();
const int yStride = d->bytesPerLine[0];
- const int uvStride = (d->mappedBytes - (yStride * height)) / height;
+ const int uvHeight = d->pixelFormat == Format_YUV422P ? height : height / 2;
+ const int uvStride = (d->mappedBytes - (yStride * height)) / uvHeight / 2;
- // Three planes, the second and third vertically and horizontally subsampled.
+ // Three planes, the second and third vertically (and horizontally for other than Format_YUV422P formats) subsampled.
d->planeCount = 3;
d->bytesPerLine[2] = d->bytesPerLine[1] = uvStride;
d->data[1] = d->data[0] + (yStride * height);
- d->data[2] = d->data[1] + (uvStride * height / 2);
+ d->data[2] = d->data[1] + (uvStride * uvHeight);
break;
}
case Format_NV12:
@@ -1001,6 +1008,7 @@ QImage::Format QVideoFrame::imageFormatFromPixelFormat(PixelFormat format)
case Format_AYUV444_Premultiplied:
case Format_YUV444:
case Format_YUV420P:
+ case Format_YUV422P:
case Format_YV12:
case Format_UYVY:
case Format_YUYV:
@@ -1058,6 +1066,7 @@ static VideoFrameConvertFunc qConvertFuncs[QVideoFrame::NPixelFormats] = {
/* Format_AYUV444_Premultiplied */ nullptr,
/* Format_YUV444 */ qt_convert_YUV444_to_ARGB32,
/* Format_YUV420P */ qt_convert_YUV420P_to_ARGB32,
+ /* Format_YUV422P */ nullptr,
/* Format_YV12 */ qt_convert_YV12_to_ARGB32,
/* Format_UYVY */ qt_convert_UYVY_to_ARGB32,
/* Format_YUYV */ qt_convert_YUYV_to_ARGB32,
@@ -1191,6 +1200,8 @@ QDebug operator<<(QDebug dbg, QVideoFrame::PixelFormat pf)
return dbg << "Format_YUV444";
case QVideoFrame::Format_YUV420P:
return dbg << "Format_YUV420P";
+ case QVideoFrame::Format_YUV422P:
+ return dbg << "Format_YUV422P";
case QVideoFrame::Format_YV12:
return dbg << "Format_YV12";
case QVideoFrame::Format_UYVY:
diff --git a/src/multimedia/video/qvideoframe.h b/src/multimedia/video/qvideoframe.h
index 375f80dac..b0710cd27 100644
--- a/src/multimedia/video/qvideoframe.h
+++ b/src/multimedia/video/qvideoframe.h
@@ -86,6 +86,7 @@ public:
Format_AYUV444_Premultiplied,
Format_YUV444,
Format_YUV420P,
+ Format_YUV422P,
Format_YV12,
Format_UYVY,
Format_YUYV,
diff --git a/src/multimedia/video/qvideosurfaceformat.cpp b/src/multimedia/video/qvideosurfaceformat.cpp
index 7a703c260..77ea276c7 100644
--- a/src/multimedia/video/qvideosurfaceformat.cpp
+++ b/src/multimedia/video/qvideosurfaceformat.cpp
@@ -519,9 +519,9 @@ QList<QByteArray> QVideoSurfaceFormat::propertyNames() const
QVariant QVideoSurfaceFormat::property(const char *name) const
{
if (qstrcmp(name, "handleType") == 0) {
- return qVariantFromValue(d->handleType);
+ return QVariant::fromValue(d->handleType);
} else if (qstrcmp(name, "pixelFormat") == 0) {
- return qVariantFromValue(d->pixelFormat);
+ return QVariant::fromValue(d->pixelFormat);
} else if (qstrcmp(name, "frameSize") == 0) {
return d->frameSize;
} else if (qstrcmp(name, "frameWidth") == 0) {
@@ -531,15 +531,15 @@ QVariant QVideoSurfaceFormat::property(const char *name) const
} else if (qstrcmp(name, "viewport") == 0) {
return d->viewport;
} else if (qstrcmp(name, "scanLineDirection") == 0) {
- return qVariantFromValue(d->scanLineDirection);
+ return QVariant::fromValue(d->scanLineDirection);
} else if (qstrcmp(name, "frameRate") == 0) {
- return qVariantFromValue(d->frameRate);
+ return QVariant::fromValue(d->frameRate);
} else if (qstrcmp(name, "pixelAspectRatio") == 0) {
- return qVariantFromValue(d->pixelAspectRatio);
+ return QVariant::fromValue(d->pixelAspectRatio);
} else if (qstrcmp(name, "sizeHint") == 0) {
return sizeHint();
} else if (qstrcmp(name, "yCbCrColorSpace") == 0) {
- return qVariantFromValue(d->ycbcrColorSpace);
+ return QVariant::fromValue(d->ycbcrColorSpace);
} else if (qstrcmp(name, "mirrored") == 0) {
return d->mirrored;
} else {
diff --git a/src/multimediawidgets/qvideowidget.cpp b/src/multimediawidgets/qvideowidget.cpp
index 7d26a8534..991901097 100644
--- a/src/multimediawidgets/qvideowidget.cpp
+++ b/src/multimediawidgets/qvideowidget.cpp
@@ -1007,7 +1007,11 @@ void QVideoWidget::paintEvent(QPaintEvent *event)
}
#if defined(Q_OS_WIN)
+# if QT_VERSION >= QT_VERSION_CHECK(6, 0, 0)
+bool QVideoWidget::nativeEvent(const QByteArray &eventType, void *message, qintptr *result)
+# else
bool QVideoWidget::nativeEvent(const QByteArray &eventType, void *message, long *result)
+# endif
{
Q_D(QVideoWidget);
Q_UNUSED(eventType);
diff --git a/src/multimediawidgets/qvideowidget.h b/src/multimediawidgets/qvideowidget.h
index 2a08b6fbd..fff1153ca 100644
--- a/src/multimediawidgets/qvideowidget.h
+++ b/src/multimediawidgets/qvideowidget.h
@@ -82,7 +82,11 @@ public:
QSize sizeHint() const override;
#if defined(Q_OS_WIN)
+# if QT_VERSION >= QT_VERSION_CHECK(6, 0, 0)
+ bool nativeEvent(const QByteArray &eventType, void *message, qintptr *result) override;
+# else
bool nativeEvent(const QByteArray &eventType, void *message, long *result) override;
+# endif
#endif
public Q_SLOTS:
diff --git a/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp b/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp
index 13a8cdbbb..82250b654 100644
--- a/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp
+++ b/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp
@@ -384,7 +384,7 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
if ((mMediaPlayer->display() == 0) && mVideoOutput)
mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
- mMediaPlayer->setDataSource(mediaContent.canonicalRequest());
+ mMediaPlayer->setDataSource(mediaContent.request());
mMediaPlayer->prepareAsync();
}
diff --git a/src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.cpp b/src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.cpp
index 1185e63dc..ef86af896 100644
--- a/src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.cpp
+++ b/src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.cpp
@@ -126,7 +126,7 @@ void QAndroidMetaDataReaderControl::onUpdateMetaData()
if (m_mediaContent.isNull())
return;
- const QUrl &url = m_mediaContent.canonicalUrl();
+ const QUrl &url = m_mediaContent.request().url();
QtConcurrent::run(&extractMetadata, this, url);
}
@@ -134,7 +134,7 @@ void QAndroidMetaDataReaderControl::updateData(const QVariantMap &metadata, cons
{
const QMutexLocker l(&m_mtx);
- if (m_mediaContent.canonicalUrl() != url)
+ if (m_mediaContent.request().url() != url)
return;
const bool oldAvailable = m_available;
diff --git a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
index 2309221c2..3b3f47a43 100644
--- a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
+++ b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
@@ -491,7 +491,7 @@ const QIODevice *AVFMediaPlayerSession::mediaStream() const
void AVFMediaPlayerSession::setMedia(const QMediaContent &content, QIODevice *stream)
{
#ifdef QT_DEBUG_AVF
- qDebug() << Q_FUNC_INFO << content.canonicalUrl();
+ qDebug() << Q_FUNC_INFO << content.request().url();
#endif
[static_cast<AVFMediaPlayerSessionObserver*>(m_observer) unloadMedia];
@@ -508,7 +508,7 @@ void AVFMediaPlayerSession::setMedia(const QMediaContent &content, QIODevice *st
const QMediaPlayer::MediaStatus oldMediaStatus = m_mediaStatus;
const QMediaPlayer::State oldState = m_state;
- if (content.isNull() || content.canonicalUrl().isEmpty()) {
+ if (content.isNull() || content.request().url().isEmpty()) {
m_mediaStatus = QMediaPlayer::NoMedia;
if (m_mediaStatus != oldMediaStatus)
Q_EMIT mediaStatusChanged(m_mediaStatus);
@@ -526,7 +526,7 @@ void AVFMediaPlayerSession::setMedia(const QMediaContent &content, QIODevice *st
//Load AVURLAsset
//initialize asset using content's URL
- NSString *urlString = [NSString stringWithUTF8String:content.canonicalUrl().toEncoded().constData()];
+ NSString *urlString = [NSString stringWithUTF8String:content.request().url().toEncoded().constData()];
NSURL *url = [NSURL URLWithString:urlString];
[static_cast<AVFMediaPlayerSessionObserver*>(m_observer) setURL:url];
diff --git a/src/plugins/directshow/player/directshowplayerservice.cpp b/src/plugins/directshow/player/directshowplayerservice.cpp
index 5f7c65ad0..d5b8bef26 100644
--- a/src/plugins/directshow/player/directshowplayerservice.cpp
+++ b/src/plugins/directshow/player/directshowplayerservice.cpp
@@ -298,7 +298,7 @@ void DirectShowPlayerService::load(const QMediaContent &media, QIODevice *stream
if (m_graph)
releaseGraph();
- m_url = media.canonicalUrl();
+ m_url = media.request().url();
m_stream = stream;
m_error = QMediaPlayer::NoError;
m_errorString = QString();
diff --git a/src/plugins/gstreamer/camerabin/camerabincontrol.cpp b/src/plugins/gstreamer/camerabin/camerabincontrol.cpp
index a34315b8a..fdf3ff4ac 100644
--- a/src/plugins/gstreamer/camerabin/camerabincontrol.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabincontrol.cpp
@@ -121,7 +121,7 @@ void CameraBinControl::setState(QCamera::State state)
//special case for stopping the camera while it's busy,
//it should be delayed until the camera is idle
- if (state == QCamera::LoadedState &&
+ if ((state == QCamera::LoadedState || state == QCamera::UnloadedState) &&
m_session->status() == QCamera::ActiveStatus &&
m_session->isBusy()) {
#ifdef CAMEABIN_DEBUG
diff --git a/src/plugins/gstreamer/mediaplayer/qgstreamerplayerservice.cpp b/src/plugins/gstreamer/mediaplayer/qgstreamerplayerservice.cpp
index 0712f6e6c..a28968145 100644
--- a/src/plugins/gstreamer/mediaplayer/qgstreamerplayerservice.cpp
+++ b/src/plugins/gstreamer/mediaplayer/qgstreamerplayerservice.cpp
@@ -55,10 +55,6 @@
#include <private/qgstreamervideowindow_p.h>
#include <private/qgstreamervideorenderer_p.h>
-#if QT_CONFIG(mirclient) && defined (__arm__)
-#include "private/qgstreamermirtexturerenderer_p.h"
-#endif
-
#include "qgstreamerstreamscontrol.h"
#include <private/qgstreameraudioprobecontrol_p.h>
#include <private/qgstreamervideoprobecontrol_p.h>
@@ -88,13 +84,7 @@ QGstreamerPlayerService::QGstreamerPlayerService(QObject *parent):
m_metaData = new QGstreamerMetaDataProvider(m_session, this);
m_streamsControl = new QGstreamerStreamsControl(m_session,this);
m_availabilityControl = new QGStreamerAvailabilityControl(m_control->resources(), this);
-
-#if QT_CONFIG(mirclient) && defined (__arm__)
- m_videoRenderer = new QGstreamerMirTextureRenderer(this, m_session);
-#else
m_videoRenderer = new QGstreamerVideoRenderer(this);
-#endif
-
m_videoWindow = new QGstreamerVideoWindow(this);
// If the GStreamer video sink is not available, don't provide the video window control since
// it won't work anyway.
diff --git a/src/plugins/m3u/qm3uhandler.cpp b/src/plugins/m3u/qm3uhandler.cpp
index bc5366bf6..017c32d92 100644
--- a/src/plugins/m3u/qm3uhandler.cpp
+++ b/src/plugins/m3u/qm3uhandler.cpp
@@ -163,7 +163,7 @@ public:
virtual bool writeItem(const QMediaContent& item)
{
- *m_textStream << item.canonicalUrl().toString() << endl;
+ *m_textStream << item.request().url().toString() << endl;
return true;
}
diff --git a/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp b/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp
index c66ac937d..fc48ed818 100644
--- a/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp
+++ b/src/plugins/qnx/mediaplayer/mmrenderermediaplayercontrol.cpp
@@ -230,7 +230,7 @@ void MmRendererMediaPlayerControl::attach()
}
}
- const QByteArray resourcePath = resourcePathForUrl(m_media.canonicalUrl());
+ const QByteArray resourcePath = resourcePathForUrl(m_media.request().url());
if (resourcePath.isEmpty()) {
detach();
return;
diff --git a/src/plugins/winrt/qwinrtmediaplayercontrol.cpp b/src/plugins/winrt/qwinrtmediaplayercontrol.cpp
index 7991f435b..374ff2831 100644
--- a/src/plugins/winrt/qwinrtmediaplayercontrol.cpp
+++ b/src/plugins/winrt/qwinrtmediaplayercontrol.cpp
@@ -759,7 +759,7 @@ void QWinRTMediaPlayerControl::setMedia(const QMediaContent &media, QIODevice *s
}
emit mediaChanged(media);
- QString urlString = media.canonicalUrl().toString();
+ QString urlString = media.request().url().toString();
if (!d->stream) {
// If we can read the file via Qt, use the byte stream approach
const auto resources = media.resources();
diff --git a/src/plugins/wmf/player/mfplayersession.cpp b/src/plugins/wmf/player/mfplayersession.cpp
index 9f909252d..10ba25998 100644
--- a/src/plugins/wmf/player/mfplayersession.cpp
+++ b/src/plugins/wmf/player/mfplayersession.cpp
@@ -197,7 +197,7 @@ void MFPlayerSession::load(const QMediaContent &media, QIODevice *stream)
qDebug() << "load";
#endif
clear();
- QUrl url = media.canonicalUrl();
+ QUrl url = media.request().url();
if (m_status == QMediaPlayer::LoadingMedia && m_sourceResolver)
m_sourceResolver->cancel();
diff --git a/src/multimedia/qtmultimediaquicktools_headers/qdeclarativevideooutput_backend_p.h b/src/qtmultimediaquicktools/qdeclarativevideooutput_backend_p.h
index da99b387a..da99b387a 100644
--- a/src/multimedia/qtmultimediaquicktools_headers/qdeclarativevideooutput_backend_p.h
+++ b/src/qtmultimediaquicktools/qdeclarativevideooutput_backend_p.h
diff --git a/src/multimedia/qtmultimediaquicktools_headers/qdeclarativevideooutput_p.h b/src/qtmultimediaquicktools/qdeclarativevideooutput_p.h
index 8ea0dc338..8ea0dc338 100644
--- a/src/multimedia/qtmultimediaquicktools_headers/qdeclarativevideooutput_p.h
+++ b/src/qtmultimediaquicktools/qdeclarativevideooutput_p.h
diff --git a/src/multimedia/qtmultimediaquicktools_headers/qsgvideonode_p.h b/src/qtmultimediaquicktools/qsgvideonode_p.h
index c094bed1f..c094bed1f 100644
--- a/src/multimedia/qtmultimediaquicktools_headers/qsgvideonode_p.h
+++ b/src/qtmultimediaquicktools/qsgvideonode_p.h
diff --git a/src/qtmultimediaquicktools/qsgvideonode_texture.cpp b/src/qtmultimediaquicktools/qsgvideonode_texture.cpp
index f5545afc7..318e4cef5 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_texture.cpp
+++ b/src/qtmultimediaquicktools/qsgvideonode_texture.cpp
@@ -58,6 +58,7 @@ QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_Texture::supportedPixelForma
pixelFormats.append(QVideoFrame::Format_ARGB32);
pixelFormats.append(QVideoFrame::Format_BGR32);
pixelFormats.append(QVideoFrame::Format_BGRA32);
+ pixelFormats.append(QVideoFrame::Format_ABGR32);
}
return pixelFormats;
diff --git a/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp b/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp
index 8e4ea01a1..f07362bf1 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp
+++ b/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp
@@ -74,7 +74,7 @@ QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_YUV::supportedPixelFormats(
QList<QVideoFrame::PixelFormat> formats;
if (handleType == QAbstractVideoBuffer::NoHandle) {
- formats << QVideoFrame::Format_YUV420P << QVideoFrame::Format_YV12
+ formats << QVideoFrame::Format_YUV420P << QVideoFrame::Format_YV12 << QVideoFrame::Format_YUV422P
<< QVideoFrame::Format_NV12 << QVideoFrame::Format_NV21
<< QVideoFrame::Format_UYVY << QVideoFrame::Format_YUYV;
}
@@ -235,7 +235,7 @@ public:
return &uyvyType;
case QVideoFrame::Format_YUYV:
return &yuyvType;
- default: // Currently: YUV420P and YV12
+ default: // Currently: YUV420P, YUV422P and YV12
return &triPlanarType;
}
}
@@ -250,7 +250,7 @@ public:
return new QSGVideoMaterialShader_UYVY;
case QVideoFrame::Format_YUYV:
return new QSGVideoMaterialShader_YUYV;
- default: // Currently: YUV420P and YV12
+ default: // Currently: YUV420P, YUV422P and YV12
return new QSGVideoMaterialShader_YUV_TriPlanar;
}
}
@@ -308,6 +308,7 @@ QSGVideoMaterial_YUV::QSGVideoMaterial_YUV(const QVideoSurfaceFormat &format) :
break;
case QVideoFrame::Format_YUV420P:
case QVideoFrame::Format_YV12:
+ case QVideoFrame::Format_YUV422P:
m_planeCount = 3;
break;
case QVideoFrame::Format_UYVY:
@@ -408,18 +409,20 @@ void QSGVideoMaterial_YUV::bind()
functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
bindTexture(m_textureIds[0], m_frame.bytesPerLine(y), fh, m_frame.bits(y), texFormat1);
- } else { // YUV420P || YV12
+ } else { // YUV420P || YV12 || YUV422P
const int y = 0;
- const int u = m_frame.pixelFormat() == QVideoFrame::Format_YUV420P ? 1 : 2;
- const int v = m_frame.pixelFormat() == QVideoFrame::Format_YUV420P ? 2 : 1;
+ const int u = m_frame.pixelFormat() == QVideoFrame::Format_YV12 ? 2 : 1;
+ const int v = m_frame.pixelFormat() == QVideoFrame::Format_YV12 ? 1 : 2;
m_planeWidth[0] = qreal(fw) / m_frame.bytesPerLine(y);
m_planeWidth[1] = m_planeWidth[2] = qreal(fw) / (2 * m_frame.bytesPerLine(u));
+ const int uvHeight = m_frame.pixelFormat() == QVideoFrame::Format_YUV422P ? fh : fh / 2;
+
functions->glActiveTexture(GL_TEXTURE1);
- bindTexture(m_textureIds[1], m_frame.bytesPerLine(u), fh / 2, m_frame.bits(u), texFormat1);
+ bindTexture(m_textureIds[1], m_frame.bytesPerLine(u), uvHeight, m_frame.bits(u), texFormat1);
functions->glActiveTexture(GL_TEXTURE2);
- bindTexture(m_textureIds[2], m_frame.bytesPerLine(v), fh / 2, m_frame.bits(v), texFormat1);
+ bindTexture(m_textureIds[2], m_frame.bytesPerLine(v), uvHeight, m_frame.bits(v), texFormat1);
functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
bindTexture(m_textureIds[0], m_frame.bytesPerLine(y), fh, m_frame.bits(y), texFormat1);
}
diff --git a/src/multimedia/qtmultimediaquicktools_headers/qtmultimediaquickdefs_p.h b/src/qtmultimediaquicktools/qtmultimediaquickdefs_p.h
index 20188739c..20188739c 100644
--- a/src/multimedia/qtmultimediaquicktools_headers/qtmultimediaquickdefs_p.h
+++ b/src/qtmultimediaquicktools/qtmultimediaquickdefs_p.h
diff --git a/src/qtmultimediaquicktools/qtmultimediaquicktools.pro b/src/qtmultimediaquicktools/qtmultimediaquicktools.pro
index bffdc6ec2..7c8d07647 100644
--- a/src/qtmultimediaquicktools/qtmultimediaquicktools.pro
+++ b/src/qtmultimediaquicktools/qtmultimediaquicktools.pro
@@ -3,11 +3,6 @@ TARGET = QtMultimediaQuick
QT = core quick multimedia-private
CONFIG += internal_module
-# Header files must go inside source directory of a module
-# to be installed by syncqt.
-INCLUDEPATH += ../multimedia/qtmultimediaquicktools_headers/
-VPATH += ../multimedia/qtmultimediaquicktools_headers/
-
PRIVATE_HEADERS += \
qdeclarativevideooutput_p.h \
qdeclarativevideooutput_backend_p.h \