summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/imports/multimedia/multimedia.cpp2
-rw-r--r--src/imports/multimedia/plugins.qmltypes1
-rw-r--r--src/multimedia/qtmultimediaquicktools_headers/qdeclarativevideooutput_backend_p.h7
-rw-r--r--src/multimedia/qtmultimediaquicktools_headers/qdeclarativevideooutput_p.h13
-rw-r--r--src/multimedia/qtmultimediaquicktools_headers/qsgvideonode_p.h10
-rw-r--r--src/multimedia/video/qabstractvideofilter.cpp319
-rw-r--r--src/multimedia/video/qabstractvideofilter.h85
-rw-r--r--src/multimedia/video/qvideoframe.cpp17
-rw-r--r--src/multimedia/video/qvideoframe.h2
-rw-r--r--src/multimedia/video/video.pri10
-rw-r--r--src/plugins/android/videonode/qandroidsgvideonode.cpp2
-rw-r--r--src/plugins/android/videonode/qandroidsgvideonode.h3
-rw-r--r--src/plugins/videonode/egl/qsgvideonode_egl.cpp7
-rw-r--r--src/plugins/videonode/egl/qsgvideonode_egl.h3
-rw-r--r--src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp134
-rw-r--r--src/plugins/videonode/imx6/qsgvivantevideomaterial.h9
-rw-r--r--src/plugins/videonode/imx6/qsgvivantevideonode.cpp4
-rw-r--r--src/plugins/videonode/imx6/qsgvivantevideonode.h5
-rw-r--r--src/qtmultimediaquicktools/qdeclarativevideooutput.cpp62
-rw-r--r--src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp114
-rw-r--r--src/qtmultimediaquicktools/qdeclarativevideooutput_render_p.h17
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_i420.cpp2
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_i420.h5
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_rgb.cpp2
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_rgb.h5
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_texture.cpp2
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_texture.h5
27 files changed, 777 insertions, 70 deletions
diff --git a/src/imports/multimedia/multimedia.cpp b/src/imports/multimedia/multimedia.cpp
index 8b2287ce4..de0290756 100644
--- a/src/imports/multimedia/multimedia.cpp
+++ b/src/imports/multimedia/multimedia.cpp
@@ -38,6 +38,7 @@
#include "qsoundeffect.h"
#include <private/qdeclarativevideooutput_p.h>
+#include "qabstractvideofilter.h"
#include "qdeclarativemultimediaglobal_p.h"
#include "qdeclarativemediametadata_p.h"
@@ -108,6 +109,7 @@ public:
qmlRegisterRevision<QDeclarativeCamera, 2>(uri, 5, 5);
qmlRegisterType<QDeclarativeMediaMetaData>();
+ qmlRegisterType<QAbstractVideoFilter>();
}
void initializeEngine(QQmlEngine *engine, const char *uri)
diff --git a/src/imports/multimedia/plugins.qmltypes b/src/imports/multimedia/plugins.qmltypes
index 12d7ea85d..0cc3a6a24 100644
--- a/src/imports/multimedia/plugins.qmltypes
+++ b/src/imports/multimedia/plugins.qmltypes
@@ -1225,6 +1225,7 @@ Module {
Property { name: "orientation"; type: "int" }
Property { name: "sourceRect"; type: "QRectF"; isReadonly: true }
Property { name: "contentRect"; type: "QRectF"; isReadonly: true }
+ Property { name: "filters"; isList: true; isReadonly: true }
Signal {
name: "fillModeChanged"
Parameter { type: "QDeclarativeVideoOutput::FillMode" }
diff --git a/src/multimedia/qtmultimediaquicktools_headers/qdeclarativevideooutput_backend_p.h b/src/multimedia/qtmultimediaquicktools_headers/qdeclarativevideooutput_backend_p.h
index 5996af773..1d267df82 100644
--- a/src/multimedia/qtmultimediaquicktools_headers/qdeclarativevideooutput_backend_p.h
+++ b/src/multimedia/qtmultimediaquicktools_headers/qdeclarativevideooutput_backend_p.h
@@ -46,6 +46,7 @@ QT_BEGIN_NAMESPACE
class QAbstractVideoSurface;
class QDeclarativeVideoOutput;
class QMediaService;
+class QAbstractVideoFilter;
class Q_MULTIMEDIAQUICK_EXPORT QDeclarativeVideoBackend
{
@@ -70,6 +71,12 @@ public:
// The viewport, adjusted for the pixel aspect ratio
virtual QRectF adjustedViewport() const = 0;
+ virtual void appendFilter(QAbstractVideoFilter *filter) { Q_UNUSED(filter); }
+ virtual void clearFilters() { }
+
+ virtual void releaseResources() { }
+ virtual void invalidateSceneGraph() { }
+
protected:
QDeclarativeVideoOutput *q;
QPointer<QMediaService> m_service;
diff --git a/src/multimedia/qtmultimediaquicktools_headers/qdeclarativevideooutput_p.h b/src/multimedia/qtmultimediaquicktools_headers/qdeclarativevideooutput_p.h
index e1dbf5646..f022c0e3e 100644
--- a/src/multimedia/qtmultimediaquicktools_headers/qdeclarativevideooutput_p.h
+++ b/src/multimedia/qtmultimediaquicktools_headers/qdeclarativevideooutput_p.h
@@ -40,6 +40,7 @@
#include <QtQuick/qquickitem.h>
#include <QtCore/qpointer.h>
#include <QtMultimedia/qcamerainfo.h>
+#include <QtMultimedia/qabstractvideofilter.h>
#include <private/qtmultimediaquickdefs_p.h>
@@ -60,6 +61,7 @@ class Q_MULTIMEDIAQUICK_EXPORT QDeclarativeVideoOutput : public QQuickItem
Q_PROPERTY(bool autoOrientation READ autoOrientation WRITE setAutoOrientation NOTIFY autoOrientationChanged REVISION 2)
Q_PROPERTY(QRectF sourceRect READ sourceRect NOTIFY sourceRectChanged)
Q_PROPERTY(QRectF contentRect READ contentRect NOTIFY contentRectChanged)
+ Q_PROPERTY(QQmlListProperty<QAbstractVideoFilter> filters READ filters);
Q_ENUMS(FillMode)
public:
@@ -104,6 +106,8 @@ public:
};
SourceType sourceType() const;
+ QQmlListProperty<QAbstractVideoFilter> filters();
+
Q_SIGNALS:
void sourceChanged();
void fillModeChanged(QDeclarativeVideoOutput::FillMode);
@@ -116,6 +120,7 @@ protected:
QSGNode *updatePaintNode(QSGNode *, UpdatePaintNodeData *);
void itemChange(ItemChange change, const ItemChangeData &changeData);
void geometryChanged(const QRectF &newGeometry, const QRectF &oldGeometry);
+ void releaseResources();
private Q_SLOTS:
void _q_updateMediaObject();
@@ -123,10 +128,16 @@ private Q_SLOTS:
void _q_updateNativeSize();
void _q_updateGeometry();
void _q_screenOrientationChanged(int);
+ void _q_invalidateSceneGraph();
private:
bool createBackend(QMediaService *service);
+ static void filter_append(QQmlListProperty<QAbstractVideoFilter> *property, QAbstractVideoFilter *value);
+ static int filter_count(QQmlListProperty<QAbstractVideoFilter> *property);
+ static QAbstractVideoFilter *filter_at(QQmlListProperty<QAbstractVideoFilter> *property, int index);
+ static void filter_clear(QQmlListProperty<QAbstractVideoFilter> *property);
+
SourceType m_sourceType;
QPointer<QObject> m_source;
@@ -145,6 +156,8 @@ private:
QVideoOutputOrientationHandler *m_screenOrientationHandler;
QScopedPointer<QDeclarativeVideoBackend> m_backend;
+
+ QList<QAbstractVideoFilter *> m_filters;
};
QT_END_NAMESPACE
diff --git a/src/multimedia/qtmultimediaquicktools_headers/qsgvideonode_p.h b/src/multimedia/qtmultimediaquicktools_headers/qsgvideonode_p.h
index 8be77ff07..9a15f4afa 100644
--- a/src/multimedia/qtmultimediaquicktools_headers/qsgvideonode_p.h
+++ b/src/multimedia/qtmultimediaquicktools_headers/qsgvideonode_p.h
@@ -46,10 +46,16 @@ QT_BEGIN_NAMESPACE
class Q_MULTIMEDIAQUICK_EXPORT QSGVideoNode : public QSGGeometryNode
{
public:
+ enum FrameFlag {
+ FrameFiltered = 0x01
+ };
+ Q_DECLARE_FLAGS(FrameFlags, FrameFlag)
+
QSGVideoNode();
- virtual void setCurrentFrame(const QVideoFrame &frame) = 0;
+ virtual void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags) = 0;
virtual QVideoFrame::PixelFormat pixelFormat() const = 0;
+ virtual QAbstractVideoBuffer::HandleType handleType() const = 0;
void setTexturedRectGeometry(const QRectF &boundingRect, const QRectF &textureRect, int orientation);
@@ -59,6 +65,8 @@ private:
int m_orientation;
};
+Q_DECLARE_OPERATORS_FOR_FLAGS(QSGVideoNode::FrameFlags)
+
class Q_MULTIMEDIAQUICK_EXPORT QSGVideoNodeFactoryInterface
{
public:
diff --git a/src/multimedia/video/qabstractvideofilter.cpp b/src/multimedia/video/qabstractvideofilter.cpp
new file mode 100644
index 000000000..e04cbf1df
--- /dev/null
+++ b/src/multimedia/video/qabstractvideofilter.cpp
@@ -0,0 +1,319 @@
+/****************************************************************************
+**
+** Copyright (C) 2015 Digia Plc and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/legal
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL21$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and Digia. For licensing terms and
+** conditions see http://qt.digia.com/licensing. For further information
+** use the contact form at http://qt.digia.com/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 2.1 or version 3 as published by the Free
+** Software Foundation and appearing in the file LICENSE.LGPLv21 and
+** LICENSE.LGPLv3 included in the packaging of this file. Please review the
+** following information to ensure the GNU Lesser General Public License
+** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Digia gives you certain additional
+** rights. These rights are described in the Digia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "qabstractvideofilter.h"
+
+QT_BEGIN_NAMESPACE
+
+/*!
+ \class QAbstractVideoFilter
+ \since 5.5
+ \brief The QAbstractVideoFilter class represents a filter that is applied to the video frames
+ received by a VideoOutput type.
+ \inmodule QtMultimedia
+
+ \ingroup multimedia
+ \ingroup multimedia_video
+
+ QAbstractVideoFilter provides a convenient way for applications to run image
+ processing, computer vision algorithms or any generic transformation or
+ calculation on the output of a VideoOutput type, regardless of the source
+ (video or camera). By providing a simple interface it allows applications and
+ third parties to easily develop QML types that provide image processing
+ algorithms using popular frameworks like \l{http://opencv.org}{OpenCV}. Due to
+ the close integration with the final stages of the Qt Multimedia video
+ pipeline, accelerated and possibly zero-copy solutions are feasible too: for
+ instance, a plugin providing OpenCL-based algorithms can use OpenCL's OpenGL
+ interop to use the OpenGL textures created by a hardware accelerated video
+ decoder, without additional readbacks and copies.
+
+ \note QAbstractVideoFilter is not always the best choice. To apply effects or
+ transformations using OpenGL shaders to the image shown on screen, the
+ standard Qt Quick approach of using ShaderEffect items in combination with
+ VideoOutput should be used. VideoFilter is not a replacement for this. It is
+ rather targeted for performing computations (that do not necessarily change
+ the image shown on screen) and computer vision algorithms provided by
+ external frameworks.
+
+ QAbstractVideoFilter is meant to be subclassed. The subclasses are then registered to
+ the QML engine, so they can be used as a QML type. The list of filters are
+ assigned to a VideoOutput type via its \l{QtMultimedia::VideoOutput::filters}{filters}
+ property.
+
+ A single filter represents one transformation or processing step on
+ a video frame. The output is a modified video frame, some arbitrary data or
+ both. For example, image transformations will result in a different image,
+ whereas an algorithm for detecting objects on an image will likely provide
+ a list of rectangles.
+
+ Arbitrary data can be represented as properties on the QAbstractVideoFilter subclass
+ and on the QObject or QJSValue instances passed to its signals. What exactly
+ these properties and signals are, is up to the individual video
+ filters. Completion of the operations can be indicated by
+ signals. Computations that do not result in a modified image will pass the
+ input image through so that subsequent filters can be placed after them.
+
+ Properties set on QAbstractVideoFilter serve as input to the computation, similarly
+ to how uniform values are specified in ShaderEffect types. The changed
+ property values are taken into use when the next video frame is processed.
+
+ The typical usage is to subclass QAbstractVideoFilter and QVideoFilterRunnable:
+
+ \badcode
+ class MyFilterRunnable : public QVideoFilterRunnable {
+ public:
+ QVideoFrame run(QVideoFrame *input, const QVideoSurfaceFormat &surfaceFormat, RunFlags flags) { ... }
+ };
+
+ class MyFilter : public QAbstractVideoFilter {
+ public:
+ QVideoFilterRunnable *createFilterRunnable() { return new MyFilterRunnable; }
+ signals:
+ void finished(QObject *result);
+ };
+
+ int main(int argc, char **argv) {
+ ...
+ qmlRegisterType<MyFilter>("my.uri", 1, 0, "MyFilter");
+ ...
+ }
+ \endcode
+
+ MyFilter is thus accessible from QML:
+
+ \badcode
+ import my.uri 1.0
+
+ Camera {
+ id: camera
+ }
+ MyFilter {
+ id: filter
+ // set properties, they can also be animated
+ onFinished: console.log("results of the computation: " + result)
+ }
+ VideoOutput {
+ source: camera
+ filters: [ filter ]
+ anchors.fill: parent
+ }
+ \endcode
+
+ This also allows providing filters in QML plugins, separately from the application.
+
+ \sa VideoOutput, Camera, MediaPlayer, QVideoFilterRunnable
+*/
+
+/*!
+ \class QVideoFilterRunnable
+ \since 5.5
+ \brief The QVideoFilterRunnable class represents the implementation of a filter
+ that owns all graphics and computational resources, and performs the actual filtering
+ or calculations.
+ \inmodule QtMultimedia
+
+ \ingroup multimedia
+ \ingroup multimedia_video
+
+ Video filters are split into QAbstractVideoFilter and corresponding QVideoFilterRunnable
+ instances, similar to QQuickItem and QSGNode. This is necessary to support
+ threaded rendering scenarios. When using the threaded render loop of the Qt
+ Quick scene graph, all rendering happens on a dedicated thread.
+ QVideoFilterRunnable instances always live on this thread and all its functions,
+ run(), the constructor, and the destructor, are guaranteed to be invoked on
+ that thread with the OpenGL context bound. QAbstractVideoFilter instances live on
+ the main (GUI) thread, like any other QObject and QQuickItem instances
+ created from QML.
+
+ Once created, QVideoFilterRunnable instances are managed by Qt Multimedia and
+ will be automatically destroyed and recreated when necessary, for example
+ when the scene graph is invalidated or the QQuickWindow changes or is closed.
+ Creation happens via the QAbstractVideoFilter::createFilterRunnable() factory function.
+
+ \sa QAbstractVideoFilter
+ */
+
+/*!
+ \fn QVideoFrame QVideoFilterRunnable::run(QVideoFrame *input, const QVideoSurfaceFormat &surfaceFormat, RunFlags flags)
+
+ Reimplement this function to perform filtering or computation on the \a
+ input video frame. Like the constructor and destructor, this function is
+ always called on the render thread with the OpenGL context bound.
+
+ Implementations that do not modify the video frame can simply return \a input.
+
+ It is safe to access properties of the associated QAbstractVideoFilter instance from
+ this function.
+
+ \a input will not be mapped, it is up to this function to call QVideoFrame::map()
+ and QVideoFrame::unmap() as necessary.
+
+ \a surfaceFormat provides additional information, for example it can be used
+ to determine which way is up in the input image as that is important for
+ filters to operate on multiple platforms with multiple cameras.
+
+ \a flags contains additional information about the filter's invocation. For
+ example the LastInChain flag indicates that the filter is the last in a
+ VideoOutput's associated filter list. This can be very useful in cases where
+ multiple filters are chained together and the work is performed on image data
+ in some custom format (for example a format specific to some computer vision
+ framework). To avoid conversion on every filter in the chain, all
+ intermediate filters can return a QVideoFrame hosting data in the custom
+ format. Only the last, where the flag is set, returns a QVideoFrame in a
+ format compatible with Qt.
+
+ Filters that want to expose the results of their computation to Javascript
+ code in QML can declare their own custom signals in the QAbstractVideoFilter
+ subclass to indicate the completion of the operation. For filters that only
+ calculate some results and do not modify the video frame, it is also possible
+ to operate asynchronously. They can queue the necessary operations using the
+ compute API and return from this function without emitting any signals. The
+ signal indicating the completion is then emitted only when the compute API
+ indicates that the operations were done and the results are available. Note
+ that it is strongly recommended to represent the filter's output data as a
+ separate instance of QJSValue or a QObject-derived class which is passed as a
+ parameter to the signal and becomes exposed to the Javascript engine. In case
+ of QObject the ownership of this object is controlled by the standard QML
+ rules: if it has no parent, ownership is transferred to the Javascript engine,
+ otherwise it stays with the emitter. Note that the signal connection may be
+ queued,for example when using the threaded render loop of Qt Quick, and so the
+ object must stay valid for a longer time, destroying it right after calling
+ this function is not safe. Using a dedicated results object is guaranteed to
+ be safe even when using threaded rendering. The same is not necessarily true
+ for properties on the QAbstractVideoFilter instance itself: properties can
+ safely be read in run() since the gui thread is blocked during that time but
+ writing may become problematic.
+
+ \note Avoid time consuming operations in this function as they block the
+ entire rendering of the application.
+
+ \note The handleType() and pixelFormat() of \a input is completely up to the
+ video decoding backend on the platform in use. On some platforms different
+ forms of input are used depending on the graphics stack. For example, when
+ playing back videos on Windows with the WMF backend, QVideoFrame contains
+ OpenGL-wrapped Direct3D textures in case of using ANGLE, but regular pixel
+ data when using desktop OpenGL (opengl32.dll). Similarly, the video file
+ format will often decide if the data is RGB or YUV, but this may also depend
+ on the decoder and the configuration in use. The returned video frame does
+ not have to be in the same format as the input, for example a filter with an
+ input of a QVideoFrame backed by system memory can output a QVideoFrame with
+ an OpenGL texture handle.
+
+ \sa QVideoFrame, QVideoSurfaceFormat
+ */
+
+/*!
+ \enum QVideoFilterRunnable::RunFlag
+
+ \value LastInChain Indicates that the filter runnable's associated QAbstractVideoFilter
+ is the last in the corresponding VideoOutput type's filters list, meaning
+ that the returned frame is the one that is going to be presented to the scene
+ graph without invoking any further filters.
+ */
+
+class QAbstractVideoFilterPrivate
+{
+public:
+ QAbstractVideoFilterPrivate() :
+ active(true)
+ { }
+
+ bool active;
+};
+
+/*!
+ \internal
+ */
+QVideoFilterRunnable::~QVideoFilterRunnable()
+{
+}
+
+/*!
+ Constructs a new QAbstractVideoFilter instance.
+ */
+QAbstractVideoFilter::QAbstractVideoFilter(QObject *parent) :
+ QObject(parent),
+ d_ptr(new QAbstractVideoFilterPrivate)
+{
+}
+
+/*!
+ \internal
+ */
+QAbstractVideoFilter::~QAbstractVideoFilter()
+{
+ delete d_ptr;
+}
+
+/*!
+ \return \c true if the filter is active.
+
+ By default filters are active. When set to \c false, the filter will be
+ ignored by the VideoOutput type.
+ */
+bool QAbstractVideoFilter::isActive() const
+{
+ Q_D(const QAbstractVideoFilter);
+ return d->active;
+}
+
+/*!
+ \internal
+ */
+void QAbstractVideoFilter::setActive(bool v)
+{
+ Q_D(QAbstractVideoFilter);
+ if (d->active != v) {
+ d->active = v;
+ emit activeChanged();
+ }
+}
+
+/*!
+ \fn QVideoFilterRunnable *QAbstractVideoFilter::createFilterRunnable()
+
+ Factory function to create a new instance of a QVideoFilterRunnable subclass
+ corresponding to this filter.
+
+ This function is called on the thread on which the Qt Quick scene graph
+ performs rendering, with the OpenGL context bound. Ownership of the returned
+ instance is transferred: the returned instance will live on the render thread
+ and will be destroyed automatically when necessary.
+
+ Typically, implementations of the function will simply construct a new
+ QVideoFilterRunnable instance, passing \c this to the constructor as the
+ filter runnables must know their associated QAbstractVideoFilter instance to
+ access dynamic properties and optionally emit signals.
+ */
+
+QT_END_NAMESPACE
diff --git a/src/multimedia/video/qabstractvideofilter.h b/src/multimedia/video/qabstractvideofilter.h
new file mode 100644
index 000000000..01e39fb8a
--- /dev/null
+++ b/src/multimedia/video/qabstractvideofilter.h
@@ -0,0 +1,85 @@
+/****************************************************************************
+**
+** Copyright (C) 2015 Digia Plc and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/legal
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL21$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and Digia. For licensing terms and
+** conditions see http://qt.digia.com/licensing. For further information
+** use the contact form at http://qt.digia.com/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 2.1 or version 3 as published by the Free
+** Software Foundation and appearing in the file LICENSE.LGPLv21 and
+** LICENSE.LGPLv3 included in the packaging of this file. Please review the
+** following information to ensure the GNU Lesser General Public License
+** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Digia gives you certain additional
+** rights. These rights are described in the Digia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#ifndef QABSTRACTVIDEOFILTER_H
+#define QABSTRACTVIDEOFILTER_H
+
+#include <QtCore/qobject.h>
+#include <QtMultimedia/qvideoframe.h>
+#include <QtMultimedia/qvideosurfaceformat.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAbstractVideoFilterPrivate;
+
+class Q_MULTIMEDIA_EXPORT QVideoFilterRunnable
+{
+public:
+ enum RunFlag {
+ LastInChain = 0x01
+ };
+ Q_DECLARE_FLAGS(RunFlags, RunFlag)
+
+ virtual ~QVideoFilterRunnable();
+ virtual QVideoFrame run(QVideoFrame *input, const QVideoSurfaceFormat &surfaceFormat, RunFlags flags) = 0;
+};
+
+Q_DECLARE_OPERATORS_FOR_FLAGS(QVideoFilterRunnable::RunFlags)
+
+class Q_MULTIMEDIA_EXPORT QAbstractVideoFilter : public QObject
+{
+ Q_OBJECT
+ Q_PROPERTY(bool active READ isActive WRITE setActive NOTIFY activeChanged)
+
+public:
+ QAbstractVideoFilter(QObject *parent = 0);
+ ~QAbstractVideoFilter();
+
+ bool isActive() const;
+ void setActive(bool v);
+
+ virtual QVideoFilterRunnable *createFilterRunnable() = 0;
+
+Q_SIGNALS:
+ void activeChanged();
+
+private:
+ Q_DECLARE_PRIVATE(QAbstractVideoFilter)
+ Q_DISABLE_COPY(QAbstractVideoFilter)
+
+ QAbstractVideoFilterPrivate *d_ptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QABSTRACTVIDEOFILTER_H
diff --git a/src/multimedia/video/qvideoframe.cpp b/src/multimedia/video/qvideoframe.cpp
index 8dd23d2f5..869972a5c 100644
--- a/src/multimedia/video/qvideoframe.cpp
+++ b/src/multimedia/video/qvideoframe.cpp
@@ -369,6 +369,23 @@ QVideoFrame &QVideoFrame::operator =(const QVideoFrame &other)
}
/*!
+ \return \c true if this QVideoFrame and \a other reflect the same frame.
+ */
+bool QVideoFrame::operator==(const QVideoFrame &other) const
+{
+ // Due to explicit sharing we just compare the QSharedData which in turn compares the pointers.
+ return d == other.d;
+}
+
+/*!
+ \return \c true if this QVideoFrame and \a other do not reflect the same frame.
+ */
+bool QVideoFrame::operator!=(const QVideoFrame &other) const
+{
+ return d != other.d;
+}
+
+/*!
Destroys a video frame.
*/
QVideoFrame::~QVideoFrame()
diff --git a/src/multimedia/video/qvideoframe.h b/src/multimedia/video/qvideoframe.h
index d44219d36..89cf2fe32 100644
--- a/src/multimedia/video/qvideoframe.h
+++ b/src/multimedia/video/qvideoframe.h
@@ -107,6 +107,8 @@ public:
~QVideoFrame();
QVideoFrame &operator =(const QVideoFrame &other);
+ bool operator==(const QVideoFrame &other) const;
+ bool operator!=(const QVideoFrame &other) const;
bool isValid() const;
diff --git a/src/multimedia/video/video.pri b/src/multimedia/video/video.pri
index 161163783..3e39d9d18 100644
--- a/src/multimedia/video/video.pri
+++ b/src/multimedia/video/video.pri
@@ -6,7 +6,8 @@ PUBLIC_HEADERS += \
video/qabstractvideosurface.h \
video/qvideoframe.h \
video/qvideosurfaceformat.h \
- video/qvideoprobe.h
+ video/qvideoprobe.h \
+ video/qabstractvideofilter.h
PRIVATE_HEADERS += \
video/qabstractvideobuffer_p.h \
@@ -24,8 +25,5 @@ SOURCES += \
video/qvideooutputorientationhandler.cpp \
video/qvideosurfaceformat.cpp \
video/qvideosurfaceoutput.cpp \
- video/qvideoprobe.cpp
-
-
-
-
+ video/qvideoprobe.cpp \
+ video/qabstractvideofilter.cpp
diff --git a/src/plugins/android/videonode/qandroidsgvideonode.cpp b/src/plugins/android/videonode/qandroidsgvideonode.cpp
index 4e1b2a89b..f094c0d8d 100644
--- a/src/plugins/android/videonode/qandroidsgvideonode.cpp
+++ b/src/plugins/android/videonode/qandroidsgvideonode.cpp
@@ -165,7 +165,7 @@ QAndroidSGVideoNode::~QAndroidSGVideoNode()
m_frame = QVideoFrame();
}
-void QAndroidSGVideoNode::setCurrentFrame(const QVideoFrame &frame)
+void QAndroidSGVideoNode::setCurrentFrame(const QVideoFrame &frame, FrameFlags)
{
QMutexLocker lock(&m_frameMutex);
m_frame = frame;
diff --git a/src/plugins/android/videonode/qandroidsgvideonode.h b/src/plugins/android/videonode/qandroidsgvideonode.h
index f3f838331..0c50d8cf9 100644
--- a/src/plugins/android/videonode/qandroidsgvideonode.h
+++ b/src/plugins/android/videonode/qandroidsgvideonode.h
@@ -47,8 +47,9 @@ public:
QAndroidSGVideoNode(const QVideoSurfaceFormat &format);
~QAndroidSGVideoNode();
- void setCurrentFrame(const QVideoFrame &frame);
+ void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags);
QVideoFrame::PixelFormat pixelFormat() const { return m_format.pixelFormat(); }
+ QAbstractVideoBuffer::HandleType handleType() const { return QAbstractVideoBuffer::GLTextureHandle; }
void preprocess();
diff --git a/src/plugins/videonode/egl/qsgvideonode_egl.cpp b/src/plugins/videonode/egl/qsgvideonode_egl.cpp
index 4e63c0dba..15af8b5e7 100644
--- a/src/plugins/videonode/egl/qsgvideonode_egl.cpp
+++ b/src/plugins/videonode/egl/qsgvideonode_egl.cpp
@@ -187,7 +187,7 @@ QSGVideoNode_EGL::~QSGVideoNode_EGL()
{
}
-void QSGVideoNode_EGL::setCurrentFrame(const QVideoFrame &frame)
+void QSGVideoNode_EGL::setCurrentFrame(const QVideoFrame &frame, FrameFlags)
{
EGLImageKHR image = frame.handle().value<void *>();
m_material.setImage(image);
@@ -199,6 +199,11 @@ QVideoFrame::PixelFormat QSGVideoNode_EGL::pixelFormat() const
return m_pixelFormat;
}
+QAbstractVideoBuffer::HandleType QSGVideoNode_EGL::handleType() const
+{
+ return QAbstractVideoBuffer::EGLImageHandle;
+}
+
static bool isExtensionSupported()
{
static const bool supported = eglGetProcAddress("glEGLImageTargetTexture2DOES");
diff --git a/src/plugins/videonode/egl/qsgvideonode_egl.h b/src/plugins/videonode/egl/qsgvideonode_egl.h
index b6f70abfb..7e9cfe871 100644
--- a/src/plugins/videonode/egl/qsgvideonode_egl.h
+++ b/src/plugins/videonode/egl/qsgvideonode_egl.h
@@ -74,8 +74,9 @@ public:
QSGVideoNode_EGL(const QVideoSurfaceFormat &format);
~QSGVideoNode_EGL();
- void setCurrentFrame(const QVideoFrame &frame);
+ void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags);
QVideoFrame::PixelFormat pixelFormat() const;
+ QAbstractVideoBuffer::HandleType handleType() const;
private:
QSGVideoMaterial_EGL m_material;
diff --git a/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp b/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp
index 948e9f9c8..073ffb34e 100644
--- a/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp
+++ b/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp
@@ -51,7 +51,9 @@ QSGVivanteVideoMaterial::QSGVivanteVideoMaterial() :
mWidth(0),
mHeight(0),
mFormat(QVideoFrame::Format_Invalid),
- mCurrentTexture(0)
+ mCurrentTexture(0),
+ mMappable(true),
+ mTexDirectTexture(0)
{
#ifdef QT_VIVANTE_VIDEO_DEBUG
qDebug() << Q_FUNC_INFO;
@@ -62,12 +64,7 @@ QSGVivanteVideoMaterial::QSGVivanteVideoMaterial() :
QSGVivanteVideoMaterial::~QSGVivanteVideoMaterial()
{
- Q_FOREACH (GLuint id, mBitsToTextureMap.values()) {
-#ifdef QT_VIVANTE_VIDEO_DEBUG
- qDebug() << "delete texture: " << id;
-#endif
- glDeleteTextures(1, &id);
- }
+ clearTextures();
}
QSGMaterialType *QSGVivanteVideoMaterial::type() const {
@@ -94,9 +91,11 @@ void QSGVivanteVideoMaterial::updateBlending() {
setFlag(Blending, qFuzzyCompare(mOpacity, qreal(1.0)) ? false : true);
}
-void QSGVivanteVideoMaterial::setCurrentFrame(const QVideoFrame &frame) {
+void QSGVivanteVideoMaterial::setCurrentFrame(const QVideoFrame &frame, QSGVideoNode::FrameFlags flags)
+{
QMutexLocker lock(&mFrameMutex);
mNextFrame = frame;
+ mMappable = !flags.testFlag(QSGVideoNode::FrameFiltered);
#ifdef QT_VIVANTE_VIDEO_DEBUG
qDebug() << Q_FUNC_INFO << " new frame: " << frame;
@@ -122,6 +121,22 @@ void QSGVivanteVideoMaterial::bind()
glBindTexture(GL_TEXTURE_2D, mCurrentTexture);
}
+void QSGVivanteVideoMaterial::clearTextures()
+{
+ Q_FOREACH (GLuint id, mBitsToTextureMap.values()) {
+#ifdef QT_VIVANTE_VIDEO_DEBUG
+ qDebug() << "delete texture: " << id;
+#endif
+ glDeleteTextures(1, &id);
+ }
+ mBitsToTextureMap.clear();
+
+ if (mTexDirectTexture) {
+ glDeleteTextures(1, &mTexDirectTexture);
+ mTexDirectTexture = 0;
+ }
+}
+
GLuint QSGVivanteVideoMaterial::vivanteMapping(QVideoFrame vF)
{
QOpenGLContext *glcontext = QOpenGLContext::currentContext();
@@ -130,14 +145,16 @@ GLuint QSGVivanteVideoMaterial::vivanteMapping(QVideoFrame vF)
return 0;
}
+ static PFNGLTEXDIRECTVIVPROC glTexDirectVIV_LOCAL = 0;
static PFNGLTEXDIRECTVIVMAPPROC glTexDirectVIVMap_LOCAL = 0;
static PFNGLTEXDIRECTINVALIDATEVIVPROC glTexDirectInvalidateVIV_LOCAL = 0;
- if (glTexDirectVIVMap_LOCAL == 0 || glTexDirectInvalidateVIV_LOCAL == 0) {
+ if (glTexDirectVIV_LOCAL == 0 || glTexDirectVIVMap_LOCAL == 0 || glTexDirectInvalidateVIV_LOCAL == 0) {
+ glTexDirectVIV_LOCAL = reinterpret_cast<PFNGLTEXDIRECTVIVPROC>(glcontext->getProcAddress("glTexDirectVIV"));
glTexDirectVIVMap_LOCAL = reinterpret_cast<PFNGLTEXDIRECTVIVMAPPROC>(glcontext->getProcAddress("glTexDirectVIVMap"));
glTexDirectInvalidateVIV_LOCAL = reinterpret_cast<PFNGLTEXDIRECTINVALIDATEVIVPROC>(glcontext->getProcAddress("glTexDirectInvalidateVIV"));
}
- if (glTexDirectVIVMap_LOCAL == 0 || glTexDirectInvalidateVIV_LOCAL == 0) {
+ if (glTexDirectVIV_LOCAL == 0 || glTexDirectVIVMap_LOCAL == 0 || glTexDirectInvalidateVIV_LOCAL == 0) {
qWarning() << Q_FUNC_INFO << "couldn't find \"glTexDirectVIVMap\" and/or \"glTexDirectInvalidateVIV\" => do nothing and return";
return 0;
}
@@ -146,49 +163,80 @@ GLuint QSGVivanteVideoMaterial::vivanteMapping(QVideoFrame vF)
mWidth = vF.width();
mHeight = vF.height();
mFormat = vF.pixelFormat();
- Q_FOREACH (GLuint id, mBitsToTextureMap.values()) {
-#ifdef QT_VIVANTE_VIDEO_DEBUG
- qDebug() << "delete texture: " << id;
-#endif
- glDeleteTextures(1, &id);
- }
- mBitsToTextureMap.clear();
+ clearTextures();
}
if (vF.map(QAbstractVideoBuffer::ReadOnly)) {
- if (!mBitsToTextureMap.contains(vF.bits())) {
- GLuint tmpTexId;
- glGenTextures(1, &tmpTexId);
- mBitsToTextureMap.insert(vF.bits(), tmpTexId);
+ if (mMappable) {
+ if (!mBitsToTextureMap.contains(vF.bits())) {
+ // Haven't yet seen this logical address: map to texture.
+ GLuint tmpTexId;
+ glGenTextures(1, &tmpTexId);
+ mBitsToTextureMap.insert(vF.bits(), tmpTexId);
- const uchar *constBits = vF.bits();
- void *bits = (void*)constBits;
+ const uchar *constBits = vF.bits();
+ void *bits = (void*)constBits;
#ifdef QT_VIVANTE_VIDEO_DEBUG
- qDebug() << Q_FUNC_INFO << "new texture, texId: " << tmpTexId << "; constBits: " << constBits;
+ qDebug() << Q_FUNC_INFO << "new texture, texId: " << tmpTexId << "; constBits: " << constBits;
#endif
- GLuint physical = ~0U;
-
- glBindTexture(GL_TEXTURE_2D, tmpTexId);
- glTexDirectVIVMap_LOCAL(GL_TEXTURE_2D,
- vF.width(), vF.height(),
- QSGVivanteVideoNode::getVideoFormat2GLFormatMap().value(vF.pixelFormat()),
- &bits, &physical);
-
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- glTexDirectInvalidateVIV_LOCAL(GL_TEXTURE_2D);
-
- return tmpTexId;
- }
- else {
- glBindTexture(GL_TEXTURE_2D, mBitsToTextureMap.value(vF.bits()));
+ GLuint physical = ~0U;
+
+ glBindTexture(GL_TEXTURE_2D, tmpTexId);
+ glTexDirectVIVMap_LOCAL(GL_TEXTURE_2D,
+ vF.width(), vF.height(),
+ QSGVivanteVideoNode::getVideoFormat2GLFormatMap().value(vF.pixelFormat()),
+ &bits, &physical);
+
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ glTexDirectInvalidateVIV_LOCAL(GL_TEXTURE_2D);
+
+ return tmpTexId;
+ } else {
+ // Fastest path: already seen this logical address. Just
+ // indicate that the data belonging to the texture has changed.
+ glBindTexture(GL_TEXTURE_2D, mBitsToTextureMap.value(vF.bits()));
+ glTexDirectInvalidateVIV_LOCAL(GL_TEXTURE_2D);
+ return mBitsToTextureMap.value(vF.bits());
+ }
+ } else {
+ // Cannot map. So copy.
+ if (!mTexDirectTexture) {
+ glGenTextures(1, &mTexDirectTexture);
+ glBindTexture(GL_TEXTURE_2D, mTexDirectTexture);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ glTexDirectVIV_LOCAL(GL_TEXTURE_2D, mCurrentFrame.width(), mCurrentFrame.height(),
+ QSGVivanteVideoNode::getVideoFormat2GLFormatMap().value(mCurrentFrame.pixelFormat()),
+ (GLvoid **) &mTexDirectPlanes);
+ } else {
+ glBindTexture(GL_TEXTURE_2D, mTexDirectTexture);
+ }
+ switch (mCurrentFrame.pixelFormat()) {
+ case QVideoFrame::Format_YUV420P:
+ case QVideoFrame::Format_YV12:
+ memcpy(mTexDirectPlanes[0], mCurrentFrame.bits(0), mCurrentFrame.height() * mCurrentFrame.bytesPerLine(0));
+ memcpy(mTexDirectPlanes[1], mCurrentFrame.bits(1), mCurrentFrame.height() * mCurrentFrame.bytesPerLine(1));
+ memcpy(mTexDirectPlanes[2], mCurrentFrame.bits(2), mCurrentFrame.height() * mCurrentFrame.bytesPerLine(2));
+ break;
+ case QVideoFrame::Format_NV12:
+ case QVideoFrame::Format_NV21:
+ memcpy(mTexDirectPlanes[0], mCurrentFrame.bits(0), mCurrentFrame.height() * mCurrentFrame.bytesPerLine(0));
+ memcpy(mTexDirectPlanes[1], mCurrentFrame.bits(1), mCurrentFrame.height() / 2 * mCurrentFrame.bytesPerLine(1));
+ break;
+ default:
+ memcpy(mTexDirectPlanes[0], mCurrentFrame.bits(), mCurrentFrame.height() * mCurrentFrame.bytesPerLine());
+ break;
+ }
glTexDirectInvalidateVIV_LOCAL(GL_TEXTURE_2D);
- return mBitsToTextureMap.value(vF.bits());
+ return mTexDirectTexture;
}
}
else {
diff --git a/src/plugins/videonode/imx6/qsgvivantevideomaterial.h b/src/plugins/videonode/imx6/qsgvivantevideomaterial.h
index 227bc633c..910595836 100644
--- a/src/plugins/videonode/imx6/qsgvivantevideomaterial.h
+++ b/src/plugins/videonode/imx6/qsgvivantevideomaterial.h
@@ -41,6 +41,7 @@
#include <QVideoFrame>
#include <QMutex>
+#include <private/qsgvideonode_p.h>
class QSGVivanteVideoMaterial : public QSGMaterial
{
@@ -52,7 +53,7 @@ public:
virtual QSGMaterialShader *createShader() const;
virtual int compare(const QSGMaterial *other) const;
void updateBlending();
- void setCurrentFrame(const QVideoFrame &frame);
+ void setCurrentFrame(const QVideoFrame &frame, QSGVideoNode::FrameFlags flags);
void bind();
GLuint vivanteMapping(QVideoFrame texIdVideoFramePair);
@@ -60,6 +61,8 @@ public:
void setOpacity(float o) { mOpacity = o; }
private:
+ void clearTextures();
+
qreal mOpacity;
int mWidth;
@@ -69,8 +72,12 @@ private:
QMap<const uchar*, GLuint> mBitsToTextureMap;
QVideoFrame mCurrentFrame, mNextFrame;
GLuint mCurrentTexture;
+ bool mMappable;
QMutex mFrameMutex;
+
+ GLuint mTexDirectTexture;
+ GLvoid *mTexDirectPlanes[3];
};
#endif // QSGVIDEOMATERIAL_VIVMAP_H
diff --git a/src/plugins/videonode/imx6/qsgvivantevideonode.cpp b/src/plugins/videonode/imx6/qsgvivantevideonode.cpp
index 1c1c1008b..e24ab3962 100644
--- a/src/plugins/videonode/imx6/qsgvivantevideonode.cpp
+++ b/src/plugins/videonode/imx6/qsgvivantevideonode.cpp
@@ -52,9 +52,9 @@ QSGVivanteVideoNode::~QSGVivanteVideoNode()
{
}
-void QSGVivanteVideoNode::setCurrentFrame(const QVideoFrame &frame)
+void QSGVivanteVideoNode::setCurrentFrame(const QVideoFrame &frame, FrameFlags flags)
{
- mMaterial->setCurrentFrame(frame);
+ mMaterial->setCurrentFrame(frame, flags);
markDirty(DirtyMaterial);
}
diff --git a/src/plugins/videonode/imx6/qsgvivantevideonode.h b/src/plugins/videonode/imx6/qsgvivantevideonode.h
index 5830cc3b4..79b6e9e57 100644
--- a/src/plugins/videonode/imx6/qsgvivantevideonode.h
+++ b/src/plugins/videonode/imx6/qsgvivantevideonode.h
@@ -44,8 +44,9 @@ public:
QSGVivanteVideoNode(const QVideoSurfaceFormat &format);
~QSGVivanteVideoNode();
- virtual QVideoFrame::PixelFormat pixelFormat() const { return mFormat.pixelFormat(); }
- void setCurrentFrame(const QVideoFrame &frame);
+ QVideoFrame::PixelFormat pixelFormat() const { return mFormat.pixelFormat(); }
+ QAbstractVideoBuffer::HandleType handleType() const { return QAbstractVideoBuffer::NoHandle; }
+ void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags);
static const QMap<QVideoFrame::PixelFormat, GLenum>& getVideoFormat2GLFormatMap();
diff --git a/src/qtmultimediaquicktools/qdeclarativevideooutput.cpp b/src/qtmultimediaquicktools/qdeclarativevideooutput.cpp
index 9a9256868..c36fca67d 100644
--- a/src/qtmultimediaquicktools/qdeclarativevideooutput.cpp
+++ b/src/qtmultimediaquicktools/qdeclarativevideooutput.cpp
@@ -262,6 +262,12 @@ bool QDeclarativeVideoOutput::createBackend(QMediaService *service)
m_backend->updateGeometry();
}
+ if (m_backend) {
+ m_backend->clearFilters();
+ for (int i = 0; i < m_filters.count(); ++i)
+ m_backend->appendFilter(m_filters[i]);
+ }
+
return backendAvailable;
}
@@ -795,6 +801,12 @@ void QDeclarativeVideoOutput::itemChange(QQuickItem::ItemChange change,
m_backend->itemChange(change, changeData);
}
+void QDeclarativeVideoOutput::releaseResources()
+{
+ if (m_backend)
+ m_backend->releaseResources();
+}
+
void QDeclarativeVideoOutput::geometryChanged(const QRectF &newGeometry, const QRectF &oldGeometry)
{
Q_UNUSED(newGeometry);
@@ -809,4 +821,54 @@ void QDeclarativeVideoOutput::geometryChanged(const QRectF &newGeometry, const Q
_q_updateGeometry();
}
+/*!
+ \qmlproperty list<object> QtMultimedia::VideoOutput::filters
+
+ This property holds the list of video filters that are run on the video
+ frames. The order of the filters in the list matches the order in which
+ they will be invoked on the video frames. The objects in the list must be
+ instances of a subclass of QAbstractVideoFilter.
+
+ \sa QAbstractVideoFilter
+*/
+
+QQmlListProperty<QAbstractVideoFilter> QDeclarativeVideoOutput::filters()
+{
+ return QQmlListProperty<QAbstractVideoFilter>(this, 0, filter_append, filter_count, filter_at, filter_clear);
+}
+
+void QDeclarativeVideoOutput::filter_append(QQmlListProperty<QAbstractVideoFilter> *property, QAbstractVideoFilter *value)
+{
+ QDeclarativeVideoOutput *self = static_cast<QDeclarativeVideoOutput *>(property->object);
+ self->m_filters.append(value);
+ if (self->m_backend)
+ self->m_backend->appendFilter(value);
+}
+
+int QDeclarativeVideoOutput::filter_count(QQmlListProperty<QAbstractVideoFilter> *property)
+{
+ QDeclarativeVideoOutput *self = static_cast<QDeclarativeVideoOutput *>(property->object);
+ return self->m_filters.count();
+}
+
+QAbstractVideoFilter *QDeclarativeVideoOutput::filter_at(QQmlListProperty<QAbstractVideoFilter> *property, int index)
+{
+ QDeclarativeVideoOutput *self = static_cast<QDeclarativeVideoOutput *>(property->object);
+ return self->m_filters.at(index);
+}
+
+void QDeclarativeVideoOutput::filter_clear(QQmlListProperty<QAbstractVideoFilter> *property)
+{
+ QDeclarativeVideoOutput *self = static_cast<QDeclarativeVideoOutput *>(property->object);
+ self->m_filters.clear();
+ if (self->m_backend)
+ self->m_backend->clearFilters();
+}
+
+void QDeclarativeVideoOutput::_q_invalidateSceneGraph()
+{
+ if (m_backend)
+ m_backend->invalidateSceneGraph();
+}
+
QT_END_NAMESPACE
diff --git a/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp b/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp
index 657ef1767..bde416f44 100644
--- a/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp
+++ b/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp
@@ -34,6 +34,7 @@
#include "qdeclarativevideooutput_render_p.h"
#include "qdeclarativevideooutput_p.h"
+#include <QtMultimedia/qabstractvideofilter.h>
#include <QtMultimedia/qvideorenderercontrol.h>
#include <QtMultimedia/qmediaservice.h>
#include <QtCore/qloggingcategory.h>
@@ -41,6 +42,8 @@
#include <private/qsgvideonode_p.h>
#include <QtGui/QOpenGLContext>
+#include <QtQuick/QQuickWindow>
+#include <QtCore/QRunnable>
QT_BEGIN_NAMESPACE
@@ -103,11 +106,79 @@ bool QDeclarativeVideoRendererBackend::init(QMediaService *service)
return false;
}
+void QDeclarativeVideoRendererBackend::appendFilter(QAbstractVideoFilter *filter)
+{
+ QMutexLocker lock(&m_frameMutex);
+ m_filters.append(Filter(filter));
+}
+
+void QDeclarativeVideoRendererBackend::clearFilters()
+{
+ QMutexLocker lock(&m_frameMutex);
+ scheduleDeleteFilterResources();
+ m_filters.clear();
+}
+
+class FilterRunnableDeleter : public QRunnable
+{
+public:
+ FilterRunnableDeleter(const QList<QVideoFilterRunnable *> &runnables) : m_runnables(runnables) { }
+ void run() Q_DECL_OVERRIDE {
+ foreach (QVideoFilterRunnable *runnable, m_runnables)
+ delete runnable;
+ }
+private:
+ QList<QVideoFilterRunnable *> m_runnables;
+};
+
+void QDeclarativeVideoRendererBackend::scheduleDeleteFilterResources()
+{
+ if (!q->window())
+ return;
+
+ QList<QVideoFilterRunnable *> runnables;
+ for (int i = 0; i < m_filters.count(); ++i) {
+ if (m_filters[i].runnable) {
+ runnables.append(m_filters[i].runnable);
+ m_filters[i].runnable = 0;
+ }
+ }
+
+ if (!runnables.isEmpty()) {
+ // Request the scenegraph to run our cleanup job on the render thread.
+ // The execution of our QRunnable may happen after the QML tree including the QAbstractVideoFilter instance is
+ // destroyed on the main thread so no references to it must be used during cleanup.
+ q->window()->scheduleRenderJob(new FilterRunnableDeleter(runnables), QQuickWindow::BeforeSynchronizingStage);
+ }
+}
+
+void QDeclarativeVideoRendererBackend::releaseResources()
+{
+ // Called on the gui thread when the window is closed or changed.
+ QMutexLocker lock(&m_frameMutex);
+ scheduleDeleteFilterResources();
+}
+
+void QDeclarativeVideoRendererBackend::invalidateSceneGraph()
+{
+ // Called on the render thread, e.g. when the context is lost.
+ QMutexLocker lock(&m_frameMutex);
+ for (int i = 0; i < m_filters.count(); ++i) {
+ if (m_filters[i].runnable) {
+ delete m_filters[i].runnable;
+ m_filters[i].runnable = 0;
+ }
+ }
+}
+
void QDeclarativeVideoRendererBackend::itemChange(QQuickItem::ItemChange change,
const QQuickItem::ItemChangeData &changeData)
{
- Q_UNUSED(change);
- Q_UNUSED(changeData);
+ if (change == QQuickItem::ItemSceneChange) {
+ if (changeData.window)
+ QObject::connect(changeData.window, SIGNAL(sceneGraphInvalidated()),
+ q, SLOT(_q_invalidateSceneGraph()), Qt::DirectConnection);
+ }
}
void QDeclarativeVideoRendererBackend::releaseSource()
@@ -216,8 +287,36 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
}
#endif
+ bool isFrameModified = false;
if (m_frameChanged) {
- if (videoNode && videoNode->pixelFormat() != m_frame.pixelFormat()) {
+ // Run the VideoFilter if there is one. This must be done before potentially changing the videonode below.
+ if (m_frame.isValid() && !m_filters.isEmpty()) {
+ const QVideoSurfaceFormat surfaceFormat = videoSurface()->surfaceFormat();
+ for (int i = 0; i < m_filters.count(); ++i) {
+ QAbstractVideoFilter *filter = m_filters[i].filter;
+ QVideoFilterRunnable *&runnable = m_filters[i].runnable;
+ if (filter && filter->isActive()) {
+ // Create the filter runnable if not yet done. Ownership is taken and is tied to this thread, on which rendering happens.
+ if (!runnable)
+ runnable = filter->createFilterRunnable();
+ if (!runnable)
+ continue;
+
+ QVideoFilterRunnable::RunFlags flags = 0;
+ if (i == m_filters.count() - 1)
+ flags |= QVideoFilterRunnable::LastInChain;
+
+ QVideoFrame newFrame = runnable->run(&m_frame, surfaceFormat, flags);
+
+ if (newFrame.isValid() && newFrame != m_frame) {
+ isFrameModified = true;
+ m_frame = newFrame;
+ }
+ }
+ }
+ }
+
+ if (videoNode && (videoNode->pixelFormat() != m_frame.pixelFormat() || videoNode->handleType() != m_frame.handleType())) {
qCDebug(qLcVideo) << "updatePaintNode: deleting old video node because frame format changed";
delete videoNode;
videoNode = 0;
@@ -231,7 +330,9 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
if (!videoNode) {
foreach (QSGVideoNodeFactoryInterface* factory, m_videoNodeFactories) {
- videoNode = factory->createNode(m_surface->surfaceFormat());
+ // Get a node that supports our frame. The surface is irrelevant, our
+ // QSGVideoItemSurface supports (logically) anything.
+ videoNode = factory->createNode(QVideoSurfaceFormat(m_frame.size(), m_frame.pixelFormat(), m_frame.handleType()));
if (videoNode) {
qCDebug(qLcVideo) << "updatePaintNode: Video node created. Handle type:" << m_frame.handleType()
<< " Supported formats for the handle by this node:"
@@ -252,7 +353,10 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
videoNode->setTexturedRectGeometry(m_renderedRect, m_sourceTextureRect,
qNormalizedOrientation(q->orientation()));
if (m_frameChanged) {
- videoNode->setCurrentFrame(m_frame);
+ QSGVideoNode::FrameFlags flags = 0;
+ if (isFrameModified)
+ flags |= QSGVideoNode::FrameFiltered;
+ videoNode->setCurrentFrame(m_frame, flags);
//don't keep the frame for more than really necessary
m_frameChanged = false;
m_frame = QVideoFrame();
diff --git a/src/qtmultimediaquicktools/qdeclarativevideooutput_render_p.h b/src/qtmultimediaquicktools/qdeclarativevideooutput_render_p.h
index dd8449fa8..a0fed3d00 100644
--- a/src/qtmultimediaquicktools/qdeclarativevideooutput_render_p.h
+++ b/src/qtmultimediaquicktools/qdeclarativevideooutput_render_p.h
@@ -48,6 +48,8 @@ QT_BEGIN_NAMESPACE
class QSGVideoItemSurface;
class QVideoRendererControl;
class QOpenGLContext;
+class QAbstractVideoFilter;
+class QVideoFilterRunnable;
class QDeclarativeVideoRendererBackend : public QDeclarativeVideoBackend
{
@@ -70,7 +72,14 @@ public:
void present(const QVideoFrame &frame);
void stop();
+ void appendFilter(QAbstractVideoFilter *filter) Q_DECL_OVERRIDE;
+ void clearFilters() Q_DECL_OVERRIDE;
+ void releaseResources() Q_DECL_OVERRIDE;
+ void invalidateSceneGraph() Q_DECL_OVERRIDE;
+
private:
+ void scheduleDeleteFilterResources();
+
QPointer<QVideoRendererControl> m_rendererControl;
QList<QSGVideoNodeFactoryInterface*> m_videoNodeFactories;
QSGVideoItemSurface *m_surface;
@@ -83,6 +92,14 @@ private:
QMutex m_frameMutex;
QRectF m_renderedRect; // Destination pixel coordinates, clipped
QRectF m_sourceTextureRect; // Source texture coordinates
+
+ struct Filter {
+ Filter() : filter(0), runnable(0) { }
+ Filter(QAbstractVideoFilter *filter) : filter(filter), runnable(0) { }
+ QAbstractVideoFilter *filter;
+ QVideoFilterRunnable *runnable;
+ };
+ QList<Filter> m_filters;
};
class QSGVideoItemSurface : public QAbstractVideoSurface
diff --git a/src/qtmultimediaquicktools/qsgvideonode_i420.cpp b/src/qtmultimediaquicktools/qsgvideonode_i420.cpp
index 17b4924d0..3ff1f5bd9 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_i420.cpp
+++ b/src/qtmultimediaquicktools/qsgvideonode_i420.cpp
@@ -311,7 +311,7 @@ QSGVideoNode_I420::~QSGVideoNode_I420()
{
}
-void QSGVideoNode_I420::setCurrentFrame(const QVideoFrame &frame)
+void QSGVideoNode_I420::setCurrentFrame(const QVideoFrame &frame, FrameFlags)
{
m_material->setCurrentFrame(frame);
markDirty(DirtyMaterial);
diff --git a/src/qtmultimediaquicktools/qsgvideonode_i420.h b/src/qtmultimediaquicktools/qsgvideonode_i420.h
index 06d302d80..71dacf157 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_i420.h
+++ b/src/qtmultimediaquicktools/qsgvideonode_i420.h
@@ -49,7 +49,10 @@ public:
virtual QVideoFrame::PixelFormat pixelFormat() const {
return m_format.pixelFormat();
}
- void setCurrentFrame(const QVideoFrame &frame);
+ QAbstractVideoBuffer::HandleType handleType() const {
+ return QAbstractVideoBuffer::NoHandle;
+ }
+ void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags);
private:
void bindTexture(int id, int unit, int w, int h, const uchar *bits);
diff --git a/src/qtmultimediaquicktools/qsgvideonode_rgb.cpp b/src/qtmultimediaquicktools/qsgvideonode_rgb.cpp
index 72cbc9614..486632914 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_rgb.cpp
+++ b/src/qtmultimediaquicktools/qsgvideonode_rgb.cpp
@@ -278,7 +278,7 @@ QSGVideoNode_RGB::~QSGVideoNode_RGB()
{
}
-void QSGVideoNode_RGB::setCurrentFrame(const QVideoFrame &frame)
+void QSGVideoNode_RGB::setCurrentFrame(const QVideoFrame &frame, FrameFlags)
{
m_material->setVideoFrame(frame);
markDirty(DirtyMaterial);
diff --git a/src/qtmultimediaquicktools/qsgvideonode_rgb.h b/src/qtmultimediaquicktools/qsgvideonode_rgb.h
index e6800ce9d..6fcca9bf4 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_rgb.h
+++ b/src/qtmultimediaquicktools/qsgvideonode_rgb.h
@@ -50,7 +50,10 @@ public:
virtual QVideoFrame::PixelFormat pixelFormat() const {
return m_format.pixelFormat();
}
- void setCurrentFrame(const QVideoFrame &frame);
+ QAbstractVideoBuffer::HandleType handleType() const {
+ return QAbstractVideoBuffer::NoHandle;
+ }
+ void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags);
private:
QVideoSurfaceFormat m_format;
diff --git a/src/qtmultimediaquicktools/qsgvideonode_texture.cpp b/src/qtmultimediaquicktools/qsgvideonode_texture.cpp
index 3af14c0b8..b88327d0d 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_texture.cpp
+++ b/src/qtmultimediaquicktools/qsgvideonode_texture.cpp
@@ -235,7 +235,7 @@ QSGVideoNode_Texture::~QSGVideoNode_Texture()
{
}
-void QSGVideoNode_Texture::setCurrentFrame(const QVideoFrame &frame)
+void QSGVideoNode_Texture::setCurrentFrame(const QVideoFrame &frame, FrameFlags)
{
m_material->setVideoFrame(frame);
markDirty(DirtyMaterial);
diff --git a/src/qtmultimediaquicktools/qsgvideonode_texture.h b/src/qtmultimediaquicktools/qsgvideonode_texture.h
index c5b84c53d..1389d4f3d 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_texture.h
+++ b/src/qtmultimediaquicktools/qsgvideonode_texture.h
@@ -50,7 +50,10 @@ public:
virtual QVideoFrame::PixelFormat pixelFormat() const {
return m_format.pixelFormat();
}
- void setCurrentFrame(const QVideoFrame &frame);
+ QAbstractVideoBuffer::HandleType handleType() const {
+ return QAbstractVideoBuffer::GLTextureHandle;
+ }
+ void setCurrentFrame(const QVideoFrame &frame, FrameFlags flags);
private:
QVideoSurfaceFormat m_format;