summaryrefslogtreecommitdiffstats
path: root/examples/multimedia
diff options
context:
space:
mode:
authorLaszlo Agocs <laszlo.agocs@theqtcompany.com>2015-01-08 14:32:41 +0100
committerYoann Lopes <yoann.lopes@theqtcompany.com>2015-02-03 14:41:39 +0000
commit3e94b7ce2d8166767ec47425d2cefbc77cb5fde2 (patch)
treec27061139c94b04d8237d4fabba05801a39f9369 /examples/multimedia
parent2f494446388e314286506eae335439b7bbc75736 (diff)
Add video filtering support to VideoOutput
Add the QAbstractVideoFilter base class and integrate it with VideoOutput. This can be used to perform arbitrary filtering or image processing on the frames of a video stream of a VideoOutput element right before the OpenGL texture is provided to the scenegraph by the video node. This opens up the possibility to integrate computer vision frameworks or accelerated image processing with Qt Quick applications that display video streams using Qt Multimedia. Conceptually it is somewhat similar to QVideoProbe, this approach however allows modifying the frame, in real time with tight integration to the scenegraph node, and targets Qt Quick meaning setting up the filter and processing the results of the computations happen completely in QML. [ChangeLog] Added QAbstractVideoFilter that serves as a base class for QML video filtering elements that integrate compute, vision, and image processing frameworks with VideoOutput. Change-Id: Ice1483f8c2daec5a43536978627a7bbb64549480 Reviewed-by: Yoann Lopes <yoann.lopes@theqtcompany.com>
Diffstat (limited to 'examples/multimedia')
-rw-r--r--examples/multimedia/video/qmlvideofilter_opencl/README18
-rw-r--r--examples/multimedia/video/qmlvideofilter_opencl/main.cpp481
-rw-r--r--examples/multimedia/video/qmlvideofilter_opencl/main.qml114
-rw-r--r--examples/multimedia/video/qmlvideofilter_opencl/qmlvideofilter_opencl.pro22
-rw-r--r--examples/multimedia/video/qmlvideofilter_opencl/qmlvideofilter_opencl.qrc5
-rw-r--r--examples/multimedia/video/qmlvideofilter_opencl/rgbframehelper.h124
6 files changed, 764 insertions, 0 deletions
diff --git a/examples/multimedia/video/qmlvideofilter_opencl/README b/examples/multimedia/video/qmlvideofilter_opencl/README
new file mode 100644
index 000000000..c239bed2f
--- /dev/null
+++ b/examples/multimedia/video/qmlvideofilter_opencl/README
@@ -0,0 +1,18 @@
+This example performs some simple OpenCL operations on camera or video input which
+is assumed to be provided in RGB format. The OpenCL operation is done on an
+OpenGL texture using CL-GL interop, without any further readbacks or copies
+(except for the initial texture upload, when necessary).
+
+Currently only OS X and Windows with desktop OpenGL (opengl32.dll) are supported.
+On Windows you may need to edit testplugin.pro to specify the location of the OpenCL
+headers and libraries.
+
+Note that an OpenCL implementation with GPU support is required.
+The platform and device selection logic supports NVIDIA and Intel.
+Porting to other platforms is probably simple, see clCreateContextFromType.
+Note however that YUV formats, that are commonly used also for camera input
+on some platforms, are not supported in this example.
+
+Pass the name of a video file to perform video playback or launch without
+arguments to use the camera.
+
diff --git a/examples/multimedia/video/qmlvideofilter_opencl/main.cpp b/examples/multimedia/video/qmlvideofilter_opencl/main.cpp
new file mode 100644
index 000000000..7e2796395
--- /dev/null
+++ b/examples/multimedia/video/qmlvideofilter_opencl/main.cpp
@@ -0,0 +1,481 @@
+/****************************************************************************
+**
+** Copyright (C) 2015 Digia Plc and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/legal
+**
+** This file is part of the examples of the Qt Multimedia module.
+**
+** $QT_BEGIN_LICENSE:LGPL21$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and Digia. For licensing terms and
+** conditions see http://qt.digia.com/licensing. For further information
+** use the contact form at http://qt.digia.com/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 2.1 or version 3 as published by the Free
+** Software Foundation and appearing in the file LICENSE.LGPLv21 and
+** LICENSE.LGPLv3 included in the packaging of this file. Please review the
+** following information to ensure the GNU Lesser General Public License
+** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Digia gives you certain additional
+** rights. These rights are described in the Digia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include <QGuiApplication>
+#include <QQuickView>
+#include <QOpenGLContext>
+#include <QOpenGLFunctions>
+#include <QAbstractVideoFilter>
+#include <QQmlContext>
+#include <QFileInfo>
+
+#ifdef Q_OS_OSX
+#include <OpenCL/opencl.h>
+#include <OpenGL/OpenGL.h>
+#else
+#include <CL/opencl.h>
+#endif
+
+#include "rgbframehelper.h"
+
+static const char *openclSrc =
+ "__constant sampler_t sampler = CLK_NORMALIZED_COORDS_FALSE | CLK_ADDRESS_CLAMP_TO_EDGE | CLK_FILTER_NEAREST;\n"
+ "__kernel void Emboss(__read_only image2d_t imgIn, __write_only image2d_t imgOut, float factor) {\n"
+ " const int2 pos = { get_global_id(0), get_global_id(1) };\n"
+ " float4 diff = read_imagef(imgIn, sampler, pos + (int2)(1,1)) - read_imagef(imgIn, sampler, pos - (int2)(1,1));\n"
+ " float color = (diff.x + diff.y + diff.z) / factor + 0.5f;\n"
+ " write_imagef(imgOut, pos, (float4)(color, color, color, 1.0f));\n"
+ "}\n";
+
+class CLFilter : public QAbstractVideoFilter
+{
+ Q_OBJECT
+ Q_PROPERTY(qreal factor READ factor WRITE setFactor NOTIFY factorChanged)
+
+public:
+ CLFilter() : m_factor(1) { }
+ qreal factor() const { return m_factor; }
+ void setFactor(qreal v);
+
+ QVideoFilterRunnable *createFilterRunnable() Q_DECL_OVERRIDE;
+
+signals:
+ void factorChanged();
+
+private:
+ qreal m_factor;
+};
+
+class CLFilterRunnable : public QVideoFilterRunnable
+{
+public:
+ CLFilterRunnable(CLFilter *filter);
+ ~CLFilterRunnable();
+ QVideoFrame run(QVideoFrame *input, const QVideoSurfaceFormat &surfaceFormat, RunFlags flags) Q_DECL_OVERRIDE;
+
+private:
+ void releaseTextures();
+ uint newTexture();
+
+ CLFilter *m_filter;
+ QSize m_size;
+ uint m_tempTexture;
+ uint m_outTexture;
+ uint m_lastInputTexture;
+ cl_context m_clContext;
+ cl_device_id m_clDeviceId;
+ cl_mem m_clImage[2];
+ cl_command_queue m_clQueue;
+ cl_program m_clProgram;
+ cl_kernel m_clKernel;
+};
+
+QVideoFilterRunnable *CLFilter::createFilterRunnable()
+{
+ return new CLFilterRunnable(this);
+}
+
+CLFilterRunnable::CLFilterRunnable(CLFilter *filter) :
+ m_filter(filter),
+ m_tempTexture(0),
+ m_outTexture(0),
+ m_lastInputTexture(0),
+ m_clContext(0),
+ m_clQueue(0),
+ m_clProgram(0),
+ m_clKernel(0)
+{
+ m_clImage[0] = m_clImage[1] = 0;
+
+ // Set up OpenCL.
+ QOpenGLFunctions *f = QOpenGLContext::currentContext()->functions();
+ cl_int err;
+ cl_uint n;
+ if (clGetPlatformIDs(0, 0, &n) != CL_SUCCESS) {
+ qWarning("Failed to get platform ID count");
+ return;
+ }
+ if (n == 0) {
+ qWarning("No OpenCL platform found");
+ return;
+ }
+ QVector<cl_platform_id> platformIds;
+ platformIds.resize(n);
+ if (clGetPlatformIDs(n, platformIds.data(), 0) != CL_SUCCESS) {
+ qWarning("Failed to get platform IDs");
+ return;
+ }
+ cl_platform_id platform = platformIds[0];
+ const char *vendor = (const char *) f->glGetString(GL_VENDOR);
+ qDebug("GL_VENDOR: %s", vendor);
+ const bool isNV = vendor && strstr(vendor, "NVIDIA");
+ const bool isIntel = vendor && strstr(vendor, "Intel");
+ qDebug("Found %u OpenCL platforms:", n);
+ for (cl_uint i = 0; i < n; ++i) {
+ QByteArray name;
+ name.resize(1024);
+ clGetPlatformInfo(platformIds[i], CL_PLATFORM_NAME, name.size(), name.data(), 0);
+ qDebug("Platform %p: %s", platformIds[i], name.constData());
+ // Running with an OpenCL platform without GPU support is not going
+ // to cut it. In practice we want the platform for the GPU which we
+ // are using with OpenGL.
+ if (isNV && name.contains(QByteArrayLiteral("NVIDIA")))
+ platform = platformIds[i];
+ else if (isIntel && name.contains(QByteArrayLiteral("Intel")))
+ platform = platformIds[i];
+ }
+ qDebug("Using platform %p", platform);
+
+ // Set up the context with OpenCL/OpenGL interop.
+#if defined (Q_OS_OSX)
+ cl_context_properties contextProps[] = { CL_CONTEXT_PROPERTY_USE_CGL_SHAREGROUP_APPLE,
+ (cl_context_properties) CGLGetShareGroup(CGLGetCurrentContext()),
+ 0 };
+#elif defined(Q_OS_WIN)
+ cl_context_properties contextProps[] = { CL_CONTEXT_PLATFORM, (cl_context_properties) platform,
+ CL_GL_CONTEXT_KHR, (cl_context_properties) wglGetCurrentContext(),
+ CL_WGL_HDC_KHR, (cl_context_properties) wglGetCurrentDC(),
+ 0 };
+#endif
+
+ m_clContext = clCreateContextFromType(contextProps, CL_DEVICE_TYPE_GPU, 0, 0, &err);
+ if (!m_clContext) {
+ qWarning("Failed to create OpenCL context: %d", err);
+ return;
+ }
+
+ // Get the GPU device id
+#if defined(Q_OS_OSX)
+ // On OS X, get the "online" device/GPU. This is required for OpenCL/OpenGL context sharing.
+ if (clGetGLContextInfoAPPLE(m_clContext, CGLGetCurrentContext(),
+ CL_CGL_DEVICE_FOR_CURRENT_VIRTUAL_SCREEN_APPLE,
+ sizeof(cl_device_id), &m_clDeviceId, NULL) != CL_SUCCESS) {
+ qWarning("Failed to get OpenCL device for current screen: %d", err);
+ return;
+ }
+#else
+ if (clGetDeviceIDs(platform, CL_DEVICE_TYPE_GPU, 1, &m_clDeviceId, 0) != CL_SUCCESS) {
+ qWarning("Failed to get OpenCL device");
+ return;
+ }
+#endif
+
+ m_clQueue = clCreateCommandQueue(m_clContext, m_clDeviceId, 0, &err);
+ if (!m_clQueue) {
+ qWarning("Failed to create OpenCL command queue: %d", err);
+ return;
+ }
+ // Build the program.
+ m_clProgram = clCreateProgramWithSource(m_clContext, 1, &openclSrc, 0, &err);
+ if (!m_clProgram) {
+ qWarning("Failed to create OpenCL program: %d", err);
+ return;
+ }
+ if (clBuildProgram(m_clProgram, 1, &m_clDeviceId, 0, 0, 0) != CL_SUCCESS) {
+ qWarning("Failed to build OpenCL program");
+ QByteArray log;
+ log.resize(2048);
+ clGetProgramBuildInfo(m_clProgram, m_clDeviceId, CL_PROGRAM_BUILD_LOG, log.size(), log.data(), 0);
+ qDebug("Build log: %s", log.constData());
+ return;
+ }
+ m_clKernel = clCreateKernel(m_clProgram, "Emboss", &err);
+ if (!m_clKernel) {
+ qWarning("Failed to create emboss OpenCL kernel: %d", err);
+ return;
+ }
+}
+
+CLFilterRunnable::~CLFilterRunnable()
+{
+ releaseTextures();
+ if (m_clKernel)
+ clReleaseKernel(m_clKernel);
+ if (m_clProgram)
+ clReleaseProgram(m_clProgram);
+ if (m_clQueue)
+ clReleaseCommandQueue(m_clQueue);
+ if (m_clContext)
+ clReleaseContext(m_clContext);
+}
+
+void CLFilterRunnable::releaseTextures()
+{
+ QOpenGLFunctions *f = QOpenGLContext::currentContext()->functions();
+ if (m_tempTexture)
+ f->glDeleteTextures(1, &m_tempTexture);
+ if (m_outTexture)
+ f->glDeleteTextures(1, &m_outTexture);
+ m_tempTexture = m_outTexture = m_lastInputTexture = 0;
+ if (m_clImage[0])
+ clReleaseMemObject(m_clImage[0]);
+ if (m_clImage[1])
+ clReleaseMemObject(m_clImage[1]);
+ m_clImage[0] = m_clImage[1] = 0;
+}
+
+uint CLFilterRunnable::newTexture()
+{
+ QOpenGLFunctions *f = QOpenGLContext::currentContext()->functions();
+ GLuint texture;
+ f->glGenTextures(1, &texture);
+ f->glBindTexture(GL_TEXTURE_2D, texture);
+ f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+ f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
+ f->glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_size.width(), m_size.height(),
+ 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
+ return texture;
+}
+
+QVideoFrame CLFilterRunnable::run(QVideoFrame *input, const QVideoSurfaceFormat &surfaceFormat, RunFlags flags)
+{
+ Q_UNUSED(surfaceFormat);
+ Q_UNUSED(flags);
+
+ // This example supports RGB data only, either in system memory (typical with cameras on all
+ // platforms) or as an OpenGL texture (e.g. video playback on OS X or on Windows with ANGLE).
+ // The latter is the fast path where everything happens on GPU. THe former involves a texture upload.
+
+ // ANGLE is not compatible with this example since we only do CL-GL interop, not D3D9/11.
+ if (QOpenGLContext::openGLModuleType() == QOpenGLContext::LibGLES) {
+ qWarning("ANGLE is not supported");
+ return *input;
+ }
+
+ if (!input->isValid()
+ || (input->handleType() != QAbstractVideoBuffer::NoHandle
+ && input->handleType() != QAbstractVideoBuffer::GLTextureHandle)) {
+ qWarning("Invalid input format");
+ return *input;
+ }
+
+ if (input->pixelFormat() == QVideoFrame::Format_YUV420P
+ || input->pixelFormat() == QVideoFrame::Format_YV12) {
+ qWarning("YUV data is not supported");
+ return *input;
+ }
+
+ if (m_size != input->size()) {
+ releaseTextures();
+ m_size = input->size();
+ }
+
+ // Create a texture from the image data.
+ QOpenGLFunctions *f = QOpenGLContext::currentContext()->functions();
+ GLuint texture;
+ if (input->handleType() == QAbstractVideoBuffer::NoHandle) {
+ // Upload.
+ if (m_tempTexture)
+ f->glBindTexture(GL_TEXTURE_2D, m_tempTexture);
+ else
+ m_tempTexture = newTexture();
+ input->map(QAbstractVideoBuffer::ReadOnly);
+ // glTexImage2D only once and use TexSubImage later on. This avoids the need
+ // to recreate the CL image object on every frame.
+ f->glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, m_size.width(), m_size.height(),
+ GL_RGBA, GL_UNSIGNED_BYTE, input->bits());
+ input->unmap();
+ texture = m_tempTexture;
+ } else {
+ // Already an OpenGL texture.
+ texture = input->handle().toUInt();
+ f->glBindTexture(GL_TEXTURE_2D, texture);
+ // Unlike on the other branch, the input texture may change, so m_clImage[0] may need to be recreated.
+ if (m_lastInputTexture && m_lastInputTexture != texture && m_clImage[0]) {
+ clReleaseMemObject(m_clImage[0]);
+ m_clImage[0] = 0;
+ }
+ m_lastInputTexture = texture;
+ }
+
+ // OpenCL image objects cannot be read and written at the same time. So use
+ // a separate texture for the result.
+ if (!m_outTexture)
+ m_outTexture = newTexture();
+
+ // Create the image objects if not yet done.
+ cl_int err;
+ if (!m_clImage[0]) {
+ m_clImage[0] = clCreateFromGLTexture2D(m_clContext, CL_MEM_READ_ONLY, GL_TEXTURE_2D, 0, texture, &err);
+ if (!m_clImage[0]) {
+ qWarning("Failed to create OpenGL image object from OpenGL texture: %d", err);
+ return *input;
+ }
+ cl_image_format fmt;
+ if (clGetImageInfo(m_clImage[0], CL_IMAGE_FORMAT, sizeof(fmt), &fmt, 0) != CL_SUCCESS) {
+ qWarning("Failed to query image format");
+ return *input;
+ }
+ if (fmt.image_channel_order != CL_RGBA)
+ qWarning("OpenCL image is not RGBA, expect errors");
+ }
+ if (!m_clImage[1]) {
+ m_clImage[1] = clCreateFromGLTexture2D(m_clContext, CL_MEM_WRITE_ONLY, GL_TEXTURE_2D, 0, m_outTexture, &err);
+ if (!m_clImage[1]) {
+ qWarning("Failed to create output OpenGL image object from OpenGL texture: %d", err);
+ return *input;
+ }
+ }
+
+ // We are all set. Queue acquiring the image objects.
+ f->glFinish();
+ clEnqueueAcquireGLObjects(m_clQueue, 2, m_clImage, 0, 0, 0);
+
+ // Set up the kernel arguments.
+ clSetKernelArg(m_clKernel, 0, sizeof(cl_mem), &m_clImage[0]);
+ clSetKernelArg(m_clKernel, 1, sizeof(cl_mem), &m_clImage[1]);
+ // Accessing dynamic properties on the filter element is simple:
+ cl_float factor = m_filter->factor();
+ clSetKernelArg(m_clKernel, 2, sizeof(cl_float), &factor);
+
+ // And queue the kernel.
+ const size_t workSize[] = { size_t(m_size.width()), size_t(m_size.height()) };
+ err = clEnqueueNDRangeKernel(m_clQueue, m_clKernel, 2, 0, workSize, 0, 0, 0, 0);
+ if (err != CL_SUCCESS)
+ qWarning("Failed to enqueue kernel: %d", err);
+
+ // Return the texture from our output image object.
+ // We return a texture even when the original video frame had pixel data in system memory.
+ // Qt Multimedia is smart enough to handle this. Once the data is on the GPU, it stays there. No readbacks, no copies.
+ clEnqueueReleaseGLObjects(m_clQueue, 2, m_clImage, 0, 0, 0);
+ clFinish(m_clQueue);
+ return frameFromTexture(m_outTexture, m_size, input->pixelFormat());
+}
+
+// InfoFilter will just provide some information about the video frame, to demonstrate
+// passing arbitrary data to QML via its finished() signal.
+class InfoFilter : public QAbstractVideoFilter
+{
+ Q_OBJECT
+
+public:
+ QVideoFilterRunnable *createFilterRunnable() Q_DECL_OVERRIDE;
+
+signals:
+ void finished(QObject *result);
+
+private:
+ friend class InfoFilterRunnable;
+};
+
+class InfoFilterRunnable : public QVideoFilterRunnable
+{
+public:
+ InfoFilterRunnable(InfoFilter *filter) : m_filter(filter) { }
+ QVideoFrame run(QVideoFrame *input, const QVideoSurfaceFormat &surfaceFormat, RunFlags flags) Q_DECL_OVERRIDE;
+
+private:
+ InfoFilter *m_filter;
+};
+
+class InfoFilterResult : public QObject
+{
+ Q_OBJECT
+ Q_PROPERTY(QSize frameResolution READ frameResolution)
+ Q_PROPERTY(QString handleType READ handleType)
+ Q_PROPERTY(int pixelFormat READ pixelFormat)
+
+public:
+ InfoFilterResult() : m_pixelFormat(0) { }
+ QSize frameResolution() const { return m_frameResolution; }
+ QString handleType() const { return m_handleType; }
+ int pixelFormat() const { return m_pixelFormat; }
+
+private:
+ QSize m_frameResolution;
+ QString m_handleType;
+ int m_pixelFormat;
+ friend class InfoFilterRunnable;
+};
+
+void CLFilter::setFactor(qreal v)
+{
+ if (m_factor != v) {
+ m_factor = v;
+ emit factorChanged();
+ }
+}
+
+QVideoFilterRunnable *InfoFilter::createFilterRunnable()
+{
+ return new InfoFilterRunnable(this);
+}
+
+QVideoFrame InfoFilterRunnable::run(QVideoFrame *input, const QVideoSurfaceFormat &surfaceFormat, RunFlags flags)
+{
+ Q_UNUSED(surfaceFormat);
+ Q_UNUSED(flags);
+ InfoFilterResult *result = new InfoFilterResult;
+ result->m_frameResolution = input->size();
+ switch (input->handleType()) {
+ case QAbstractVideoBuffer::NoHandle:
+ result->m_handleType = QLatin1String("pixel data");
+ result->m_pixelFormat = input->pixelFormat();
+ break;
+ case QAbstractVideoBuffer::GLTextureHandle:
+ result->m_handleType = QLatin1String("OpenGL texture");
+ break;
+ default:
+ result->m_handleType = QLatin1String("unknown");
+ break;
+ }
+ emit m_filter->finished(result); // parent-less QObject -> ownership transferred to the JS engine
+ return *input;
+}
+
+int main(int argc, char **argv)
+{
+ QGuiApplication app(argc, argv);
+
+ qmlRegisterType<CLFilter>("qmlvideofilter.cl.test", 1, 0, "CLFilter");
+ qmlRegisterType<InfoFilter>("qmlvideofilter.cl.test", 1, 0, "InfoFilter");
+
+ QQuickView view;
+ QString fn;
+ if (argc > 1) {
+ fn = QUrl::fromLocalFile(QFileInfo(QString::fromUtf8(argv[1])).absoluteFilePath()).toString();
+ qDebug("Playing video %s", qPrintable(fn));
+ } else {
+ qDebug("No video file specified, using camera instead.");
+ }
+ view.rootContext()->setContextProperty("videoFilename", fn);
+ view.setSource(QUrl("qrc:///main.qml"));
+
+ view.show();
+
+ return app.exec();
+}
+
+#include "main.moc"
diff --git a/examples/multimedia/video/qmlvideofilter_opencl/main.qml b/examples/multimedia/video/qmlvideofilter_opencl/main.qml
new file mode 100644
index 000000000..636df568c
--- /dev/null
+++ b/examples/multimedia/video/qmlvideofilter_opencl/main.qml
@@ -0,0 +1,114 @@
+/****************************************************************************
+**
+** Copyright (C) 2015 Digia Plc and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/legal
+**
+** This file is part of the examples of the Qt Multimedia module.
+**
+** $QT_BEGIN_LICENSE:LGPL21$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and Digia. For licensing terms and
+** conditions see http://qt.digia.com/licensing. For further information
+** use the contact form at http://qt.digia.com/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 2.1 or version 3 as published by the Free
+** Software Foundation and appearing in the file LICENSE.LGPLv21 and
+** LICENSE.LGPLv3 included in the packaging of this file. Please review the
+** following information to ensure the GNU Lesser General Public License
+** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Digia gives you certain additional
+** rights. These rights are described in the Digia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+import QtQuick 2.0
+import QtMultimedia 5.5
+import qmlvideofilter.cl.test 1.0
+
+Item {
+ width: 1024
+ height: 768
+
+ Camera {
+ id: camera
+ }
+
+ MediaPlayer {
+ id: player
+ autoPlay: true
+ source: videoFilename
+ }
+
+ VideoOutput {
+ id: output
+ source: videoFilename !== "" ? player : camera
+ filters: [ infofilter, clfilter ]
+ anchors.fill: parent
+ }
+
+ CLFilter {
+ id: clfilter
+ // Animate a property which is passed to the OpenCL kernel.
+ SequentialAnimation on factor {
+ loops: Animation.Infinite
+ NumberAnimation {
+ from: 1
+ to: 20
+ duration: 6000
+ }
+ NumberAnimation {
+ from: 20
+ to: 1
+ duration: 3000
+ }
+ }
+ }
+
+ InfoFilter {
+ // This filter does not change the image. Instead, it provides some results calculated from the frame.
+ id: infofilter
+ onFinished: {
+ info.res = result.frameResolution.width + "x" + result.frameResolution.height;
+ info.type = result.handleType;
+ info.fmt = result.pixelFormat;
+ }
+ }
+
+ Column {
+ Text {
+ font.pointSize: 20
+ color: "green"
+ text: "Transformed with OpenCL on GPU\nClick to disable and enable the emboss filter"
+ }
+ Text {
+ font.pointSize: 12
+ color: "green"
+ text: "Emboss factor " + Math.round(clfilter.factor)
+ visible: clfilter.active
+ }
+ Text {
+ id: info
+ font.pointSize: 12
+ color: "green"
+ property string res
+ property string type
+ property int fmt
+ text: "Input resolution: " + res + " Input frame type: " + type + (fmt ? " Pixel format: " + fmt : "")
+ }
+ }
+
+ MouseArea {
+ anchors.fill: parent
+ onClicked: clfilter.active = !clfilter.active
+ }
+}
diff --git a/examples/multimedia/video/qmlvideofilter_opencl/qmlvideofilter_opencl.pro b/examples/multimedia/video/qmlvideofilter_opencl/qmlvideofilter_opencl.pro
new file mode 100644
index 000000000..b391bb8d7
--- /dev/null
+++ b/examples/multimedia/video/qmlvideofilter_opencl/qmlvideofilter_opencl.pro
@@ -0,0 +1,22 @@
+TEMPLATE = app
+TARGET = qmlvideofilter_opencl
+
+QT += quick multimedia
+
+SOURCES = main.cpp
+HEADERS = rgbframehelper.h
+
+RESOURCES = qmlvideofilter_opencl.qrc
+OTHER_FILES = main.qml
+
+target.path = $$[QT_INSTALL_EXAMPLES]/multimedia/video/qmlvideofilter_opencl
+INSTALLS += target
+
+# Edit these as necessary
+osx {
+ LIBS += -framework OpenCL
+} else {
+ INCLUDEPATH += c:/cuda/include
+ LIBPATH += c:/cuda/lib/x64
+ LIBS += -lopengl32 -lOpenCL
+}
diff --git a/examples/multimedia/video/qmlvideofilter_opencl/qmlvideofilter_opencl.qrc b/examples/multimedia/video/qmlvideofilter_opencl/qmlvideofilter_opencl.qrc
new file mode 100644
index 000000000..5f6483ac3
--- /dev/null
+++ b/examples/multimedia/video/qmlvideofilter_opencl/qmlvideofilter_opencl.qrc
@@ -0,0 +1,5 @@
+<RCC>
+ <qresource prefix="/">
+ <file>main.qml</file>
+ </qresource>
+</RCC>
diff --git a/examples/multimedia/video/qmlvideofilter_opencl/rgbframehelper.h b/examples/multimedia/video/qmlvideofilter_opencl/rgbframehelper.h
new file mode 100644
index 000000000..080c3fe2b
--- /dev/null
+++ b/examples/multimedia/video/qmlvideofilter_opencl/rgbframehelper.h
@@ -0,0 +1,124 @@
+/****************************************************************************
+**
+** Copyright (C) 2015 Digia Plc and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/legal
+**
+** This file is part of the examples of the Qt Multimedia module.
+**
+** $QT_BEGIN_LICENSE:LGPL21$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and Digia. For licensing terms and
+** conditions see http://qt.digia.com/licensing. For further information
+** use the contact form at http://qt.digia.com/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 2.1 or version 3 as published by the Free
+** Software Foundation and appearing in the file LICENSE.LGPLv21 and
+** LICENSE.LGPLv3 included in the packaging of this file. Please review the
+** following information to ensure the GNU Lesser General Public License
+** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Digia gives you certain additional
+** rights. These rights are described in the Digia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#ifndef RGBFRAMEHELPER_H
+#define RGBFRAMEHELPER_H
+
+#include <QImage>
+#include <QAbstractVideoBuffer>
+#include <QOpenGLContext>
+#include <QOpenGLFunctions>
+#include <QOpenGLFramebufferObject>
+
+/*
+ Returns a QImage that wraps the given video frame.
+
+ This is suitable only for QAbstractVideoBuffer::NoHandle frames with RGB (or BGR)
+ data. YUV is not supported here.
+
+ The QVideoFrame must be mapped and kept mapped as long as the wrapping QImage
+ exists.
+
+ As a convenience the function also supports frames with a handle type of
+ QAbstractVideoBuffer::GLTextureHandle. This allows creating a system memory backed
+ QVideoFrame containing the image data from an OpenGL texture. However, readback is a
+ slow operation and may stall the GPU pipeline and should be avoided in production code.
+*/
+QImage imageWrapper(const QVideoFrame &frame)
+{
+#ifndef QT_NO_OPENGL
+ if (frame.handleType() == QAbstractVideoBuffer::GLTextureHandle) {
+ // Slow and inefficient path. Ideally what's on the GPU should remain on the GPU, instead of readbacks like this.
+ QImage img(frame.width(), frame.height(), QImage::Format_RGBA8888);
+ GLuint textureId = frame.handle().toUInt();
+ QOpenGLContext *ctx = QOpenGLContext::currentContext();
+ QOpenGLFunctions *f = ctx->functions();
+ GLuint fbo;
+ f->glGenFramebuffers(1, &fbo);
+ GLuint prevFbo;
+ f->glGetIntegerv(GL_FRAMEBUFFER_BINDING, (GLint *) &prevFbo);
+ f->glBindFramebuffer(GL_FRAMEBUFFER, fbo);
+ f->glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureId, 0);
+ f->glReadPixels(0, 0, frame.width(), frame.height(), GL_RGBA, GL_UNSIGNED_BYTE, img.bits());
+ f->glBindFramebuffer(GL_FRAMEBUFFER, prevFbo);
+ return img;
+ } else
+#endif // QT_NO_OPENGL
+ {
+ if (!frame.isReadable()) {
+ qWarning("imageFromVideoFrame: No mapped image data available for read");
+ return QImage();
+ }
+
+ QImage::Format fmt = QVideoFrame::imageFormatFromPixelFormat(frame.pixelFormat());
+ if (fmt != QImage::Format_Invalid)
+ return QImage(frame.bits(), frame.width(), frame.height(), fmt);
+
+ qWarning("imageFromVideoFrame: No matching QImage format");
+ }
+
+ return QImage();
+}
+
+#ifndef QT_NO_OPENGL
+class TextureBuffer : public QAbstractVideoBuffer
+{
+public:
+ TextureBuffer(uint id) : QAbstractVideoBuffer(GLTextureHandle), m_id(id) { }
+ MapMode mapMode() const { return NotMapped; }
+ uchar *map(MapMode, int *, int *) { return 0; }
+ void unmap() { }
+ QVariant handle() const { return QVariant::fromValue<uint>(m_id); }
+
+private:
+ GLuint m_id;
+};
+#endif // QT_NO_OPENGL
+
+/*
+ Creates and returns a new video frame wrapping the OpenGL texture textureId. The size
+ must be passed in size, together with the format of the underlying image data in
+ format. When the texture originates from a QImage, use
+ QVideoFrame::imageFormatFromPixelFormat() to get a suitable format. Ownership is not
+ altered, the new QVideoFrame will not destroy the texture.
+*/
+QVideoFrame frameFromTexture(uint textureId, const QSize &size, QVideoFrame::PixelFormat format)
+{
+#ifndef QT_NO_OPENGL
+ return QVideoFrame(new TextureBuffer(textureId), size, format);
+#else
+ return QVideoFrame();
+#endif // QT_NO_OPENGL
+}
+
+#endif