summaryrefslogtreecommitdiffstats
path: root/src/gsttools
diff options
context:
space:
mode:
authorJonas Rabbe <jonas.rabbe@nokia.com>2012-03-22 11:04:03 +1000
committerQt by Nokia <qt-info@nokia.com>2012-04-27 09:22:09 +0200
commita3b6eabd45b452b98c5b7c45df8a332ad018b8f1 (patch)
tree9f3a3fd7bfd6c514a472ad0103037476da4c6e0f /src/gsttools
parente44bcf0a384c15e7df593ec3f3cee30775f3f1ef (diff)
Split gstreamer plugin into smaller plugins providing fewer services
The gstreamer blob has been split into four plugins: audiodecoder, camerabin, mediacapture, and mediaplayer. Note: camerabin is still disabled because it is untested camerabin2 implementation. A new qmake configuration use_gstreamer_camera has been introduced and is needed for the mediacapture plugin to expose the camera service. This configuration has been disabled by default. Shared functionality has been moved to the internal gsttools library. Change-Id: Ifb2604f440cfa97513d39f5d7978766c88eaec45 Reviewed-by: Michael Goddard <michael.goddard@nokia.com>
Diffstat (limited to 'src/gsttools')
-rw-r--r--src/gsttools/gsttools.pro54
-rw-r--r--src/gsttools/gstvideoconnector.c474
-rw-r--r--src/gsttools/qgstappsrc.cpp231
-rw-r--r--src/gsttools/qgstcodecsinfo.cpp183
-rw-r--r--src/gsttools/qgstreameraudioinputendpointselector.cpp166
-rw-r--r--src/gsttools/qgstreameraudioprobecontrol.cpp86
-rw-r--r--src/gsttools/qgstreamergltexturerenderer.cpp583
-rw-r--r--src/gsttools/qgstreamervideoinputdevicecontrol.cpp155
-rw-r--r--src/gsttools/qgstreamervideooverlay.cpp228
-rw-r--r--src/gsttools/qgstreamervideoprobecontrol.cpp117
-rw-r--r--src/gsttools/qgstreamervideorenderer.cpp117
-rw-r--r--src/gsttools/qgstreamervideorendererinterface.cpp46
-rw-r--r--src/gsttools/qgstreamervideowidget.cpp352
-rw-r--r--src/gsttools/qgstreamervideowindow.cpp346
-rw-r--r--src/gsttools/qgstutils.cpp88
-rw-r--r--src/gsttools/qx11videosurface.cpp534
16 files changed, 3759 insertions, 1 deletions
diff --git a/src/gsttools/gsttools.pro b/src/gsttools/gsttools.pro
index fef65aebf..9d9971ed8 100644
--- a/src/gsttools/gsttools.pro
+++ b/src/gsttools/gsttools.pro
@@ -23,7 +23,6 @@ PKGCONFIG += \
gstreamer-pbutils-0.10
maemo*:PKGCONFIG +=gstreamer-plugins-bad-0.10
-contains(config_test_gstreamer_appsrc, yes): PKGCONFIG += gstreamer-app-0.10
contains(config_test_resourcepolicy, yes) {
DEFINES += HAVE_RESOURCE_POLICY
@@ -42,6 +41,14 @@ PRIVATE_HEADERS += \
qgstutils_p.h \
qgstvideobuffer_p.h \
qvideosurfacegstsink_p.h \
+ qgstreamervideorendererinterface_p.h \
+ qgstreameraudioinputendpointselector_p.h \
+ qgstreamervideorenderer_p.h \
+ qgstreamervideoinputdevicecontrol_p.h \
+ gstvideoconnector_p.h \
+ qgstcodecsinfo_p.h \
+ qgstreamervideoprobecontrol_p.h \
+ qgstreameraudioprobecontrol_p.h \
SOURCES += \
qgstbufferpoolinterface.cpp \
@@ -50,6 +57,14 @@ SOURCES += \
qgstutils.cpp \
qgstvideobuffer.cpp \
qvideosurfacegstsink.cpp \
+ qgstreamervideorendererinterface.cpp \
+ qgstreameraudioinputendpointselector.cpp \
+ qgstreamervideorenderer.cpp \
+ qgstreamervideoinputdevicecontrol.cpp \
+ qgstcodecsinfo.cpp \
+ gstvideoconnector.c \
+ qgstreamervideoprobecontrol.cpp \
+ qgstreameraudioprobecontrol.cpp \
contains(config_test_xvideo, yes) {
DEFINES += HAVE_XVIDEO
@@ -61,6 +76,43 @@ contains(config_test_xvideo, yes) {
SOURCES += \
qgstxvimagebuffer.cpp \
+
+ !isEmpty(QT.widgets.name) {
+ QT += multimediawidgets
+
+ PRIVATE_HEADERS += \
+ qgstreamervideooverlay_p.h \
+ qgstreamervideowindow_p.h \
+ qgstreamervideowidget_p.h \
+ qx11videosurface_p.h \
+
+ SOURCES += \
+ qgstreamervideooverlay.cpp \
+ qgstreamervideowindow.cpp \
+ qgstreamervideowidget.cpp \
+ qx11videosurface.cpp \
+ }
+}
+
+maemo6 {
+ PKGCONFIG += qmsystem2
+
+ contains(QT_CONFIG, opengles2):!isEmpty(QT.widgets.name) {
+ PRIVATE_HEADERS += qgstreamergltexturerenderer_p.h
+ SOURCES += qgstreamergltexturerenderer.cpp
+ QT += opengl
+ LIBS += -lEGL -lgstmeegointerfaces-0.10
+ }
+}
+
+contains(config_test_gstreamer_appsrc, yes) {
+ PKGCONFIG += gstreamer-app-0.10
+ PRIVATE_HEADERS += qgstappsrc_p.h
+ SOURCES += qgstappsrc.cpp
+
+ DEFINES += HAVE_GST_APPSRC
+
+ LIBS += -lgstapp-0.10
}
HEADERS += $$PRIVATE_HEADERS
diff --git a/src/gsttools/gstvideoconnector.c b/src/gsttools/gstvideoconnector.c
new file mode 100644
index 000000000..c3cb2f430
--- /dev/null
+++ b/src/gsttools/gstvideoconnector.c
@@ -0,0 +1,474 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Nokia Corporation and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** GNU Lesser General Public License Usage
+** This file may be used under the terms of the GNU Lesser General Public
+** License version 2.1 as published by the Free Software Foundation and
+** appearing in the file LICENSE.LGPL included in the packaging of this
+** file. Please review the following information to ensure the GNU Lesser
+** General Public License version 2.1 requirements will be met:
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Nokia gives you certain additional
+** rights. These rights are described in the Nokia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU General
+** Public License version 3.0 as published by the Free Software Foundation
+** and appearing in the file LICENSE.GPL included in the packaging of this
+** file. Please review the following information to ensure the GNU General
+** Public License version 3.0 requirements will be met:
+** http://www.gnu.org/copyleft/gpl.html.
+**
+** Other Usage
+** Alternatively, this file may be used in accordance with the terms and
+** conditions contained in a signed written agreement between you and Nokia.
+**
+**
+**
+**
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "gstvideoconnector_p.h"
+#include <unistd.h>
+
+/* signals */
+enum
+{
+ SIGNAL_RESEND_NEW_SEGMENT,
+ SIGNAL_CONNECTION_FAILED,
+ LAST_SIGNAL
+};
+static guint gst_video_connector_signals[LAST_SIGNAL] = { 0 };
+
+
+GST_DEBUG_CATEGORY_STATIC (video_connector_debug);
+#define GST_CAT_DEFAULT video_connector_debug
+
+static GstStaticPadTemplate gst_video_connector_sink_factory =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate gst_video_connector_src_factory =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+#define _do_init(bla) \
+ GST_DEBUG_CATEGORY_INIT (video_connector_debug, \
+ "video-connector", 0, "An identity like element for reconnecting video stream");
+
+GST_BOILERPLATE_FULL (GstVideoConnector, gst_video_connector, GstElement,
+ GST_TYPE_ELEMENT, _do_init);
+
+static void gst_video_connector_dispose (GObject * object);
+static GstFlowReturn gst_video_connector_chain (GstPad * pad, GstBuffer * buf);
+static GstFlowReturn gst_video_connector_buffer_alloc (GstPad * pad,
+ guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf);
+static GstStateChangeReturn gst_video_connector_change_state (GstElement *
+ element, GstStateChange transition);
+static gboolean gst_video_connector_handle_sink_event (GstPad * pad,
+ GstEvent * event);
+static gboolean gst_video_connector_new_buffer_probe(GstObject *pad, GstBuffer *buffer, guint * object);
+static void gst_video_connector_resend_new_segment(GstElement * element, gboolean emitFailedSignal);
+static gboolean gst_video_connector_setcaps (GstPad *pad, GstCaps *caps);
+static GstCaps *gst_video_connector_getcaps (GstPad * pad);
+static gboolean gst_video_connector_acceptcaps (GstPad * pad, GstCaps * caps);
+
+static void
+gst_video_connector_base_init (gpointer g_class)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+
+ gst_element_class_set_details_simple (element_class, "Video Connector",
+ "Generic",
+ "An identity like element used for reconnecting video stream",
+ "Dmytro Poplavskiy <dmytro.poplavskiy@nokia.com>");
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_video_connector_sink_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_video_connector_src_factory));
+}
+
+static void
+gst_video_connector_class_init (GstVideoConnectorClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->dispose = gst_video_connector_dispose;
+ gstelement_class->change_state = gst_video_connector_change_state;
+ klass->resend_new_segment = gst_video_connector_resend_new_segment;
+
+ gst_video_connector_signals[SIGNAL_RESEND_NEW_SEGMENT] =
+ g_signal_new ("resend-new-segment", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_STRUCT_OFFSET (GstVideoConnectorClass, resend_new_segment), NULL, NULL,
+ g_cclosure_marshal_VOID__BOOLEAN, G_TYPE_NONE, 1, G_TYPE_BOOLEAN);
+
+ gst_video_connector_signals[SIGNAL_CONNECTION_FAILED] =
+ g_signal_new ("connection-failed", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST,
+ 0, NULL, NULL,
+ g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
+}
+
+static void
+gst_video_connector_init (GstVideoConnector *element,
+ GstVideoConnectorClass *g_class)
+{
+ (void) g_class;
+ element->sinkpad =
+ gst_pad_new_from_static_template (&gst_video_connector_sink_factory,
+ "sink");
+ gst_pad_set_chain_function(element->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_video_connector_chain));
+ gst_pad_set_event_function(element->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_video_connector_handle_sink_event));
+ gst_pad_set_bufferalloc_function(element->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_video_connector_buffer_alloc));
+ gst_pad_set_setcaps_function(element->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_video_connector_setcaps));
+ gst_pad_set_getcaps_function(element->sinkpad,
+ GST_DEBUG_FUNCPTR(gst_video_connector_getcaps));
+ gst_pad_set_acceptcaps_function(element->sinkpad,
+ GST_DEBUG_FUNCPTR(gst_video_connector_acceptcaps));
+
+ gst_element_add_pad (GST_ELEMENT (element), element->sinkpad);
+
+ element->srcpad =
+ gst_pad_new_from_static_template (&gst_video_connector_src_factory,
+ "src");
+ gst_pad_add_buffer_probe(element->srcpad,
+ G_CALLBACK(gst_video_connector_new_buffer_probe), element);
+ gst_element_add_pad (GST_ELEMENT (element), element->srcpad);
+
+ element->relinked = FALSE;
+ element->failedSignalEmited = FALSE;
+ gst_segment_init (&element->segment, GST_FORMAT_TIME);
+ element->latest_buffer = NULL;
+}
+
+static void
+gst_video_connector_reset (GstVideoConnector * element)
+{
+ element->relinked = FALSE;
+ element->failedSignalEmited = FALSE;
+ if (element->latest_buffer != NULL) {
+ gst_buffer_unref (element->latest_buffer);
+ element->latest_buffer = NULL;
+ }
+ gst_segment_init (&element->segment, GST_FORMAT_UNDEFINED);
+}
+
+static void
+gst_video_connector_dispose (GObject * object)
+{
+ GstVideoConnector *element = GST_VIDEO_CONNECTOR (object);
+
+ gst_video_connector_reset (element);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+// "When this function returns anything else than GST_FLOW_OK,
+// the buffer allocation failed and buf does not contain valid data."
+static GstFlowReturn
+gst_video_connector_buffer_alloc (GstPad * pad, guint64 offset, guint size,
+ GstCaps * caps, GstBuffer ** buf)
+{
+ GstVideoConnector *element;
+ GstFlowReturn res = GST_FLOW_OK;
+ element = GST_VIDEO_CONNECTOR (GST_PAD_PARENT (pad));
+
+ if (!buf)
+ return GST_FLOW_ERROR;
+ *buf = NULL;
+
+ gboolean isFailed = FALSE;
+ while (1) {
+ GST_OBJECT_LOCK (element);
+ gst_object_ref(element->srcpad);
+ GST_OBJECT_UNLOCK (element);
+
+ // Check if downstream element is in NULL state
+ // and wait for up to 1 second for it to switch.
+ GstPad *peerPad = gst_pad_get_peer(element->srcpad);
+ if (peerPad) {
+ GstElement *parent = gst_pad_get_parent_element(peerPad);
+ gst_object_unref (peerPad);
+ if (parent) {
+ GstState state;
+ GstState pending;
+ int totalTimeout = 0;
+ // This seems to sleep for about 10ms usually.
+ while (totalTimeout < 1000000) {
+ gst_element_get_state(parent, &state, &pending, 0);
+ if (state != GST_STATE_NULL)
+ break;
+ usleep(5000);
+ totalTimeout += 5000;
+ }
+
+ gst_object_unref (parent);
+ if (state == GST_STATE_NULL) {
+ GST_DEBUG_OBJECT (element, "Downstream element is in NULL state");
+ // Downstream filter seems to be in the wrong state
+ return GST_FLOW_UNEXPECTED;
+ }
+ }
+ }
+
+ res = gst_pad_alloc_buffer(element->srcpad, offset, size, caps, buf);
+ gst_object_unref (element->srcpad);
+
+ GST_DEBUG_OBJECT (element, "buffer alloc finished: %s", gst_flow_get_name (res));
+
+ if (res == GST_FLOW_WRONG_STATE) {
+ // Just in case downstream filter is still somehow in the wrong state.
+ // Pipeline stalls if we report GST_FLOW_WRONG_STATE.
+ return GST_FLOW_UNEXPECTED;
+ }
+
+ if (res >= GST_FLOW_OK || isFailed == TRUE)
+ break;
+
+ //if gst_pad_alloc_buffer failed, emit "connection-failed" signal
+ //so colorspace transformation element can be inserted
+ GST_INFO_OBJECT(element, "gst_video_connector_buffer_alloc failed, emit connection-failed signal");
+ g_signal_emit(G_OBJECT(element), gst_video_connector_signals[SIGNAL_CONNECTION_FAILED], 0);
+ isFailed = TRUE;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_video_connector_setcaps (GstPad *pad, GstCaps *caps)
+{
+ GstVideoConnector *element;
+ element = GST_VIDEO_CONNECTOR (GST_PAD_PARENT (pad));
+
+ /* forward-negotiate */
+ gboolean res = gst_pad_set_caps(element->srcpad, caps);
+
+ GST_DEBUG_OBJECT(element, "gst_video_connector_setcaps %s %i", gst_caps_to_string(caps), res);
+
+ if (!res) {
+ //if set_caps failed, emit "connection-failed" signal
+ //so colorspace transformation element can be inserted
+ GST_INFO_OBJECT(element, "gst_video_connector_setcaps failed, emit connection-failed signal");
+ g_signal_emit(G_OBJECT(element), gst_video_connector_signals[SIGNAL_CONNECTION_FAILED], 0);
+
+ return gst_pad_set_caps(element->srcpad, caps);
+ }
+
+ return TRUE;
+}
+
+static GstCaps *gst_video_connector_getcaps (GstPad * pad)
+{
+ GstVideoConnector *element;
+ element = GST_VIDEO_CONNECTOR (GST_PAD_PARENT (pad));
+
+#if (GST_VERSION_MICRO > 25)
+ GstCaps *caps = gst_pad_peer_get_caps_reffed(element->srcpad);
+#else
+ GstCaps *caps = gst_pad_peer_get_caps(element->srcpad);
+#endif
+
+ if (!caps)
+ caps = gst_caps_new_any();
+
+ return caps;
+}
+
+static gboolean gst_video_connector_acceptcaps (GstPad * pad, GstCaps * caps)
+{
+ GstVideoConnector *element;
+ element = GST_VIDEO_CONNECTOR (GST_PAD_PARENT (pad));
+
+ return gst_pad_peer_accept_caps(element->srcpad, caps);
+}
+
+static void
+gst_video_connector_resend_new_segment(GstElement * element, gboolean emitFailedSignal)
+{
+ GST_INFO_OBJECT(element, "New segment requested, failed signal enabled: %i", emitFailedSignal);
+ GstVideoConnector *connector = GST_VIDEO_CONNECTOR(element);
+ connector->relinked = TRUE;
+ if (emitFailedSignal)
+ connector->failedSignalEmited = FALSE;
+}
+
+
+static gboolean gst_video_connector_new_buffer_probe(GstObject *pad, GstBuffer *buffer, guint * object)
+{
+ (void) pad;
+ (void) buffer;
+
+ GstVideoConnector *element = GST_VIDEO_CONNECTOR (object);
+
+ /*
+ If relinking is requested, the current buffer should be rejected and
+ the new segment + previous buffer should be pushed first
+ */
+
+ if (element->relinked)
+ GST_LOG_OBJECT(element, "rejected buffer because of new segment request");
+
+ return !element->relinked;
+}
+
+
+static GstFlowReturn
+gst_video_connector_chain (GstPad * pad, GstBuffer * buf)
+{
+ GstFlowReturn res;
+ GstVideoConnector *element;
+
+ element = GST_VIDEO_CONNECTOR (gst_pad_get_parent (pad));
+
+ do {
+ /*
+ Resend the segment message and last buffer to preroll the new sink.
+ Sinks can be changed multiple times while paused,
+ while loop allows to send the segment message and preroll
+ all of them with the same buffer.
+ */
+ while (element->relinked) {
+ element->relinked = FALSE;
+
+ gint64 pos = element->segment.last_stop;
+
+ if (element->latest_buffer && GST_BUFFER_TIMESTAMP_IS_VALID(element->latest_buffer)) {
+ pos = GST_BUFFER_TIMESTAMP (element->latest_buffer);
+ }
+
+ //push a new segment and last buffer
+ GstEvent *ev = gst_event_new_new_segment (TRUE,
+ element->segment.rate,
+ element->segment.format,
+ pos, //start
+ element->segment.stop,
+ pos);
+
+ GST_DEBUG_OBJECT (element, "Pushing new segment event");
+ if (!gst_pad_push_event (element->srcpad, ev)) {
+ GST_WARNING_OBJECT (element,
+ "Newsegment handling failed in %" GST_PTR_FORMAT,
+ element->srcpad);
+ }
+
+ if (element->latest_buffer) {
+ GST_DEBUG_OBJECT (element, "Pushing latest buffer...");
+ gst_buffer_ref(element->latest_buffer);
+ gst_pad_push(element->srcpad, element->latest_buffer);
+ }
+ }
+
+ gst_buffer_ref(buf);
+
+ //it's possible video sink is changed during gst_pad_push blocked by
+ //pad lock, in this case ( element->relinked == TRUE )
+ //the buffer should be rejected by the buffer probe and
+ //the new segment + prev buffer should be sent before
+
+ GST_LOG_OBJECT (element, "Pushing buffer...");
+ res = gst_pad_push (element->srcpad, buf);
+ GST_LOG_OBJECT (element, "Pushed buffer: %s", gst_flow_get_name (res));
+
+ //if gst_pad_push failed give the service another chance,
+ //it may still work with the colorspace element added
+ if (!element->failedSignalEmited && res == GST_FLOW_NOT_NEGOTIATED) {
+ element->failedSignalEmited = TRUE;
+ GST_INFO_OBJECT(element, "gst_pad_push failed, emit connection-failed signal");
+ g_signal_emit(G_OBJECT(element), gst_video_connector_signals[SIGNAL_CONNECTION_FAILED], 0);
+ }
+
+ } while (element->relinked);
+
+
+ if (element->latest_buffer) {
+ gst_buffer_unref (element->latest_buffer);
+ element->latest_buffer = NULL;
+ }
+
+ //don't save the last video buffer on maemo6 because of buffers shortage
+ //with omapxvsink
+#ifndef Q_WS_MAEMO_6
+ element->latest_buffer = gst_buffer_ref(buf);
+#endif
+
+ gst_buffer_unref(buf);
+ gst_object_unref (element);
+
+ return res;
+}
+
+static GstStateChangeReturn
+gst_video_connector_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstVideoConnector *connector;
+ GstStateChangeReturn result;
+
+ connector = GST_VIDEO_CONNECTOR(element);
+ result = GST_ELEMENT_CLASS (parent_class)->change_state(element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_video_connector_reset (connector);
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ connector->relinked = FALSE;
+ break;
+ default:
+ break;
+ }
+
+ return result;
+}
+
+static gboolean
+gst_video_connector_handle_sink_event (GstPad * pad, GstEvent * event)
+{
+ if (GST_EVENT_TYPE (event) == GST_EVENT_NEWSEGMENT) {
+ GstVideoConnector *element = GST_VIDEO_CONNECTOR (gst_pad_get_parent (pad));
+
+ gboolean update;
+ GstFormat format;
+ gdouble rate, arate;
+ gint64 start, stop, time;
+
+ gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
+ &start, &stop, &time);
+
+ GST_LOG_OBJECT (element,
+ "NEWSEGMENT update %d, rate %lf, applied rate %lf, "
+ "format %d, " "%" G_GINT64_FORMAT " -- %" G_GINT64_FORMAT ", time %"
+ G_GINT64_FORMAT, update, rate, arate, format, start, stop, time);
+
+ gst_segment_set_newsegment_full (&element->segment, update,
+ rate, arate, format, start, stop, time);
+
+ gst_object_unref (element);
+ }
+
+ return gst_pad_event_default (pad, event);
+}
diff --git a/src/gsttools/qgstappsrc.cpp b/src/gsttools/qgstappsrc.cpp
new file mode 100644
index 000000000..937db3e8e
--- /dev/null
+++ b/src/gsttools/qgstappsrc.cpp
@@ -0,0 +1,231 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Nokia Corporation and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** GNU Lesser General Public License Usage
+** This file may be used under the terms of the GNU Lesser General Public
+** License version 2.1 as published by the Free Software Foundation and
+** appearing in the file LICENSE.LGPL included in the packaging of this
+** file. Please review the following information to ensure the GNU Lesser
+** General Public License version 2.1 requirements will be met:
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Nokia gives you certain additional
+** rights. These rights are described in the Nokia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU General
+** Public License version 3.0 as published by the Free Software Foundation
+** and appearing in the file LICENSE.GPL included in the packaging of this
+** file. Please review the following information to ensure the GNU General
+** Public License version 3.0 requirements will be met:
+** http://www.gnu.org/copyleft/gpl.html.
+**
+** Other Usage
+** Alternatively, this file may be used in accordance with the terms and
+** conditions contained in a signed written agreement between you and Nokia.
+**
+**
+**
+**
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include <QDebug>
+
+#include "qgstappsrc_p.h"
+#include <QtNetwork>
+
+QGstAppSrc::QGstAppSrc(QObject *parent)
+ :QObject(parent)
+ ,m_stream(0)
+ ,m_appSrc(0)
+ ,m_sequential(false)
+ ,m_maxBytes(0)
+ ,m_setup(false)
+ ,m_dataRequestSize(-1)
+ ,m_dataRequested(false)
+ ,m_enoughData(false)
+ ,m_forceData(false)
+{
+ m_callbacks.need_data = &QGstAppSrc::on_need_data;
+ m_callbacks.enough_data = &QGstAppSrc::on_enough_data;
+ m_callbacks.seek_data = &QGstAppSrc::on_seek_data;
+}
+
+QGstAppSrc::~QGstAppSrc()
+{
+ if (m_appSrc)
+ gst_object_unref(G_OBJECT(m_appSrc));
+}
+
+bool QGstAppSrc::setup(GstElement* appsrc)
+{
+ if (m_setup || m_stream == 0 || appsrc == 0)
+ return false;
+
+ m_appSrc = GST_APP_SRC(appsrc);
+ gst_app_src_set_callbacks(m_appSrc, (GstAppSrcCallbacks*)&m_callbacks, this, (GDestroyNotify)&QGstAppSrc::destroy_notify);
+
+ g_object_get(G_OBJECT(m_appSrc), "max-bytes", &m_maxBytes, NULL);
+
+ if (m_sequential)
+ m_streamType = GST_APP_STREAM_TYPE_STREAM;
+ else
+ m_streamType = GST_APP_STREAM_TYPE_RANDOM_ACCESS;
+ gst_app_src_set_stream_type(m_appSrc, m_streamType);
+ gst_app_src_set_size(m_appSrc, (m_sequential) ? -1 : m_stream->size());
+
+ return m_setup = true;
+}
+
+void QGstAppSrc::setStream(QIODevice *stream)
+{
+ if (stream == 0)
+ return;
+ if (m_stream) {
+ disconnect(m_stream, SIGNAL(readyRead()), this, SLOT(onDataReady()));
+ disconnect(m_stream, SIGNAL(destroyed()), this, SLOT(streamDestroyed()));
+ }
+ if (m_appSrc)
+ gst_object_unref(G_OBJECT(m_appSrc));
+
+ m_dataRequestSize = -1;
+ m_dataRequested = false;
+ m_enoughData = false;
+ m_forceData = false;
+ m_maxBytes = 0;
+
+ m_appSrc = 0;
+ m_stream = stream;
+ connect(m_stream, SIGNAL(destroyed()), SLOT(streamDestroyed()));
+ connect(m_stream, SIGNAL(readyRead()), this, SLOT(onDataReady()));
+ m_sequential = m_stream->isSequential();
+ m_setup = false;
+}
+
+QIODevice *QGstAppSrc::stream() const
+{
+ return m_stream;
+}
+
+GstAppSrc *QGstAppSrc::element()
+{
+ return m_appSrc;
+}
+
+void QGstAppSrc::onDataReady()
+{
+ if (!m_enoughData) {
+ m_dataRequested = true;
+ pushDataToAppSrc();
+ }
+}
+
+void QGstAppSrc::streamDestroyed()
+{
+ if (sender() == m_stream) {
+ m_stream = 0;
+ sendEOS();
+ }
+}
+
+void QGstAppSrc::pushDataToAppSrc()
+{
+ if (!isStreamValid() || !m_setup)
+ return;
+
+ if (m_dataRequested && !m_enoughData) {
+ qint64 size;
+ if (m_dataRequestSize == (unsigned int)-1)
+ size = qMin(m_stream->bytesAvailable(), queueSize());
+ else
+ size = qMin(m_stream->bytesAvailable(), (qint64)m_dataRequestSize);
+ void *data = g_malloc(size);
+ GstBuffer* buffer = gst_app_buffer_new(data, size, g_free, data);
+ buffer->offset = m_stream->pos();
+ qint64 bytesRead = m_stream->read((char*)GST_BUFFER_DATA(buffer), size);
+ buffer->offset_end = buffer->offset + bytesRead - 1;
+
+ if (bytesRead > 0) {
+ m_dataRequested = false;
+ m_enoughData = false;
+ GstFlowReturn ret = gst_app_src_push_buffer (GST_APP_SRC (element()), buffer);
+ if (ret == GST_FLOW_ERROR) {
+ qWarning()<<"appsrc: push buffer error";
+ } else if (ret == GST_FLOW_WRONG_STATE) {
+ qWarning()<<"appsrc: push buffer wrong state";
+ } else if (ret == GST_FLOW_RESEND) {
+ qWarning()<<"appsrc: push buffer resend";
+ }
+ }
+
+ // After reading we might be all done
+ if (m_stream->atEnd())
+ sendEOS();
+ } else if (m_stream->atEnd()) {
+ sendEOS();
+ }
+}
+
+bool QGstAppSrc::doSeek(qint64 value)
+{
+ if (isStreamValid())
+ return stream()->seek(value);
+ return false;
+}
+
+
+gboolean QGstAppSrc::on_seek_data(GstAppSrc *element, guint64 arg0, gpointer userdata)
+{
+ Q_UNUSED(element);
+ QGstAppSrc *self = reinterpret_cast<QGstAppSrc*>(userdata);
+ if (self && self->isStreamValid()) {
+ if (!self->stream()->isSequential())
+ QMetaObject::invokeMethod(self, "doSeek", Qt::AutoConnection, Q_ARG(qint64, arg0));
+ }
+ else
+ return false;
+
+ return true;
+}
+
+void QGstAppSrc::on_enough_data(GstAppSrc *element, gpointer userdata)
+{
+ Q_UNUSED(element);
+ QGstAppSrc *self = reinterpret_cast<QGstAppSrc*>(userdata);
+ if (self)
+ self->enoughData() = true;
+}
+
+void QGstAppSrc::on_need_data(GstAppSrc *element, guint arg0, gpointer userdata)
+{
+ Q_UNUSED(element);
+ QGstAppSrc *self = reinterpret_cast<QGstAppSrc*>(userdata);
+ if (self) {
+ self->dataRequested() = true;
+ self->enoughData() = false;
+ self->dataRequestSize()= arg0;
+ QMetaObject::invokeMethod(self, "pushDataToAppSrc", Qt::AutoConnection);
+ }
+}
+
+void QGstAppSrc::destroy_notify(gpointer data)
+{
+ Q_UNUSED(data);
+}
+
+void QGstAppSrc::sendEOS()
+{
+ gst_app_src_end_of_stream(GST_APP_SRC(m_appSrc));
+ if (isStreamValid() && !stream()->isSequential())
+ stream()->reset();
+}
diff --git a/src/gsttools/qgstcodecsinfo.cpp b/src/gsttools/qgstcodecsinfo.cpp
new file mode 100644
index 000000000..2d39977a3
--- /dev/null
+++ b/src/gsttools/qgstcodecsinfo.cpp
@@ -0,0 +1,183 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Nokia Corporation and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** GNU Lesser General Public License Usage
+** This file may be used under the terms of the GNU Lesser General Public
+** License version 2.1 as published by the Free Software Foundation and
+** appearing in the file LICENSE.LGPL included in the packaging of this
+** file. Please review the following information to ensure the GNU Lesser
+** General Public License version 2.1 requirements will be met:
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Nokia gives you certain additional
+** rights. These rights are described in the Nokia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU General
+** Public License version 3.0 as published by the Free Software Foundation
+** and appearing in the file LICENSE.GPL included in the packaging of this
+** file. Please review the following information to ensure the GNU General
+** Public License version 3.0 requirements will be met:
+** http://www.gnu.org/copyleft/gpl.html.
+**
+** Other Usage
+** Alternatively, this file may be used in accordance with the terms and
+** conditions contained in a signed written agreement between you and Nokia.
+**
+**
+**
+**
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "qgstcodecsinfo_p.h"
+
+#include <QtCore/qset.h>
+
+#ifdef QMEDIA_GSTREAMER_CAMERABIN
+#include <gst/pbutils/pbutils.h>
+#include <gst/pbutils/encoding-profile.h>
+#endif
+
+
+QGstCodecsInfo::QGstCodecsInfo(QGstCodecsInfo::ElementType elementType)
+{
+
+#if GST_CHECK_VERSION(0,10,31)
+
+ GstElementFactoryListType gstElementType = 0;
+ switch (elementType) {
+ case AudioEncoder:
+ gstElementType = GST_ELEMENT_FACTORY_TYPE_AUDIO_ENCODER;
+ break;
+ case VideoEncoder:
+ gstElementType = GST_ELEMENT_FACTORY_TYPE_VIDEO_ENCODER;
+ break;
+ case Muxer:
+ gstElementType = GST_ELEMENT_FACTORY_TYPE_MUXER;
+ break;
+ }
+
+ GstCaps *allCaps = supportedElementCaps(gstElementType);
+ GstCaps *caps = gst_caps_new_empty();
+
+ uint codecsCount = gst_caps_get_size(allCaps);
+ for (uint i=0; i<codecsCount; i++) {
+ gst_caps_append_structure(caps, gst_caps_steal_structure(allCaps, 0));
+ gchar * capsString = gst_caps_to_string(caps);
+
+ QString codec = QLatin1String(capsString);
+ m_codecs.append(codec);
+
+#ifdef QMEDIA_GSTREAMER_CAMERABIN
+ gchar *description = gst_pb_utils_get_codec_description(caps);
+ m_codecDescriptions.insert(codec, QString::fromUtf8(description));
+
+ if (description)
+ g_free(description);
+#else
+ m_codecDescriptions.insert(codec, codec);
+#endif
+
+ if (capsString)
+ g_free(capsString);
+
+ gst_caps_remove_structure(caps, 0);
+ }
+#else
+ Q_UNUSED(elementType);
+#endif // GST_CHECK_VERSION(0,10,31)
+}
+
+QStringList QGstCodecsInfo::supportedCodecs() const
+{
+ return m_codecs;
+}
+
+QString QGstCodecsInfo::codecDescription(const QString &codec) const
+{
+ return m_codecDescriptions.value(codec);
+}
+
+#if GST_CHECK_VERSION(0,10,31)
+
+/*!
+ List all supported caps for all installed elements of type \a elementType.
+
+ Caps are simplified to mime type and a few field necessary to distinguish
+ different codecs like mpegversion or layer.
+ */
+GstCaps* QGstCodecsInfo::supportedElementCaps(GstElementFactoryListType elementType,
+ GstRank minimumRank,
+ GstPadDirection padDirection)
+{
+ GList *elements = gst_element_factory_list_get_elements(elementType, minimumRank);
+ GstCaps *res = gst_caps_new_empty();
+
+ QSet<QByteArray> fakeEncoderMimeTypes;
+ fakeEncoderMimeTypes << "unknown/unknown"
+ << "audio/x-raw-int" << "audio/x-raw-float"
+ << "video/x-raw-yuv" << "video/x-raw-rgb";
+
+ QSet<QByteArray> fieldsToAdd;
+ fieldsToAdd << "mpegversion" << "layer" << "layout" << "raversion"
+ << "wmaversion" << "wmvversion" << "variant";
+
+ GList *element = elements;
+ while (element) {
+ GstElementFactory *factory = (GstElementFactory *)element->data;
+ element = element->next;
+
+ const GList *padTemplates = gst_element_factory_get_static_pad_templates(factory);
+ while (padTemplates) {
+ GstStaticPadTemplate *padTemplate = (GstStaticPadTemplate *)padTemplates->data;
+ padTemplates = padTemplates->next;
+
+ if (padTemplate->direction == padDirection) {
+ const GstCaps *caps = gst_static_caps_get(&padTemplate->static_caps);
+ for (uint i=0; i<gst_caps_get_size(caps); i++) {
+ const GstStructure *structure = gst_caps_get_structure(caps, i);
+
+ //skip "fake" encoders
+ if (fakeEncoderMimeTypes.contains(gst_structure_get_name(structure)))
+ continue;
+
+ GstStructure *newStructure = gst_structure_new(gst_structure_get_name(structure), NULL);
+
+ //add structure fields to distinguish between formats with similar mime types,
+ //like audio/mpeg
+ for (int j=0; j<gst_structure_n_fields(structure); j++) {
+ const gchar* fieldName = gst_structure_nth_field_name(structure, j);
+ if (fieldsToAdd.contains(fieldName)) {
+ const GValue *value = gst_structure_get_value(structure, fieldName);
+ GType valueType = G_VALUE_TYPE(value);
+
+ //don't add values of range type,
+ //gst_pb_utils_get_codec_description complains about not fixed caps
+
+ if (valueType != GST_TYPE_INT_RANGE && valueType != GST_TYPE_DOUBLE_RANGE &&
+ valueType != GST_TYPE_FRACTION_RANGE && valueType != GST_TYPE_LIST &&
+ valueType != GST_TYPE_ARRAY)
+ gst_structure_set_value(newStructure, fieldName, value);
+ }
+ }
+
+ gst_caps_merge_structure(res, newStructure);
+ }
+ }
+ }
+ }
+ gst_plugin_feature_list_free(elements);
+
+ return res;
+}
+#endif //GST_CHECK_VERSION(0,10,31)
diff --git a/src/gsttools/qgstreameraudioinputendpointselector.cpp b/src/gsttools/qgstreameraudioinputendpointselector.cpp
new file mode 100644
index 000000000..cdbc67d23
--- /dev/null
+++ b/src/gsttools/qgstreameraudioinputendpointselector.cpp
@@ -0,0 +1,166 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Nokia Corporation and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** GNU Lesser General Public License Usage
+** This file may be used under the terms of the GNU Lesser General Public
+** License version 2.1 as published by the Free Software Foundation and
+** appearing in the file LICENSE.LGPL included in the packaging of this
+** file. Please review the following information to ensure the GNU Lesser
+** General Public License version 2.1 requirements will be met:
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Nokia gives you certain additional
+** rights. These rights are described in the Nokia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU General
+** Public License version 3.0 as published by the Free Software Foundation
+** and appearing in the file LICENSE.GPL included in the packaging of this
+** file. Please review the following information to ensure the GNU General
+** Public License version 3.0 requirements will be met:
+** http://www.gnu.org/copyleft/gpl.html.
+**
+** Other Usage
+** Alternatively, this file may be used in accordance with the terms and
+** conditions contained in a signed written agreement between you and Nokia.
+**
+**
+**
+**
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "qgstreameraudioinputendpointselector_p.h"
+
+#include <QtCore/QDir>
+#include <QtCore/QDebug>
+
+#include <gst/gst.h>
+
+#ifdef HAVE_ALSA
+#include <alsa/asoundlib.h>
+#endif
+
+QGstreamerAudioInputEndpointSelector::QGstreamerAudioInputEndpointSelector(QObject *parent)
+ :QAudioEndpointSelector(parent)
+{
+ update();
+}
+
+QGstreamerAudioInputEndpointSelector::~QGstreamerAudioInputEndpointSelector()
+{
+}
+
+QList<QString> QGstreamerAudioInputEndpointSelector::availableEndpoints() const
+{
+ return m_names;
+}
+
+QString QGstreamerAudioInputEndpointSelector::endpointDescription(const QString& name) const
+{
+ QString desc;
+
+ for (int i = 0; i < m_names.size(); i++) {
+ if (m_names.at(i).compare(name) == 0) {
+ desc = m_descriptions.at(i);
+ break;
+ }
+ }
+ return desc;
+}
+
+QString QGstreamerAudioInputEndpointSelector::defaultEndpoint() const
+{
+ if (m_names.size() > 0)
+ return m_names.at(0);
+
+ return QString();
+}
+
+QString QGstreamerAudioInputEndpointSelector::activeEndpoint() const
+{
+ return m_audioInput;
+}
+
+void QGstreamerAudioInputEndpointSelector::setActiveEndpoint(const QString& name)
+{
+ if (m_audioInput.compare(name) != 0) {
+ m_audioInput = name;
+ emit activeEndpointChanged(name);
+ }
+}
+
+void QGstreamerAudioInputEndpointSelector::update()
+{
+ m_names.clear();
+ m_descriptions.clear();
+ updateAlsaDevices();
+ updateOssDevices();
+ updatePulseDevices();
+ if (m_names.size() > 0)
+ m_audioInput = m_names.at(0);
+}
+
+void QGstreamerAudioInputEndpointSelector::updateAlsaDevices()
+{
+#ifdef HAVE_ALSA
+ void **hints, **n;
+ if (snd_device_name_hint(-1, "pcm", &hints) < 0) {
+ qWarning()<<"no alsa devices available";
+ return;
+ }
+ n = hints;
+
+ while (*n != NULL) {
+ char *name = snd_device_name_get_hint(*n, "NAME");
+ char *descr = snd_device_name_get_hint(*n, "DESC");
+ char *io = snd_device_name_get_hint(*n, "IOID");
+
+ if ((name != NULL) && (descr != NULL)) {
+ if ( io == NULL || qstrcmp(io,"Input") == 0 ) {
+ m_names.append(QLatin1String("alsa:")+QString::fromUtf8(name));
+ m_descriptions.append(QString::fromUtf8(descr));
+ }
+ }
+
+ if (name != NULL)
+ free(name);
+ if (descr != NULL)
+ free(descr);
+ if (io != NULL)
+ free(io);
+ n++;
+ }
+ snd_device_name_free_hint(hints);
+#endif
+}
+
+void QGstreamerAudioInputEndpointSelector::updateOssDevices()
+{
+ QDir devDir("/dev");
+ devDir.setFilter(QDir::System);
+ QFileInfoList entries = devDir.entryInfoList(QStringList() << "dsp*");
+ foreach(const QFileInfo& entryInfo, entries) {
+ m_names.append(QLatin1String("oss:")+entryInfo.filePath());
+ m_descriptions.append(QString("OSS device %1").arg(entryInfo.fileName()));
+ }
+}
+
+void QGstreamerAudioInputEndpointSelector::updatePulseDevices()
+{
+ GstElementFactory *factory = gst_element_factory_find("pulsesrc");
+ if (factory) {
+ m_names.append("pulseaudio:");
+ m_descriptions.append("PulseAudio device.");
+ gst_object_unref(GST_OBJECT(factory));
+ }
+}
diff --git a/src/gsttools/qgstreameraudioprobecontrol.cpp b/src/gsttools/qgstreameraudioprobecontrol.cpp
new file mode 100644
index 000000000..e1efe9606
--- /dev/null
+++ b/src/gsttools/qgstreameraudioprobecontrol.cpp
@@ -0,0 +1,86 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Nokia Corporation and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** GNU Lesser General Public License Usage
+** This file may be used under the terms of the GNU Lesser General Public
+** License version 2.1 as published by the Free Software Foundation and
+** appearing in the file LICENSE.LGPL included in the packaging of this
+** file. Please review the following information to ensure the GNU Lesser
+** General Public License version 2.1 requirements will be met:
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Nokia gives you certain additional
+** rights. These rights are described in the Nokia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU General
+** Public License version 3.0 as published by the Free Software Foundation
+** and appearing in the file LICENSE.GPL included in the packaging of this
+** file. Please review the following information to ensure the GNU General
+** Public License version 3.0 requirements will be met:
+** http://www.gnu.org/copyleft/gpl.html.
+**
+** Other Usage
+** Alternatively, this file may be used in accordance with the terms and
+** conditions contained in a signed written agreement between you and Nokia.
+**
+**
+**
+**
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "qgstreameraudioprobecontrol_p.h"
+#include <private/qgstutils_p.h>
+
+QGstreamerAudioProbeControl::QGstreamerAudioProbeControl(QObject *parent)
+ : QMediaAudioProbeControl(parent)
+{
+
+}
+
+QGstreamerAudioProbeControl::~QGstreamerAudioProbeControl()
+{
+
+}
+
+void QGstreamerAudioProbeControl::bufferProbed(GstBuffer* buffer)
+{
+ GstCaps* caps = gst_buffer_get_caps(buffer);
+ if (!caps)
+ return;
+
+ QAudioFormat format = QGstUtils::audioFormatForCaps(caps);
+ gst_caps_unref(caps);
+ if (!format.isValid())
+ return;
+
+ QAudioBuffer audioBuffer = QAudioBuffer(QByteArray((const char*)buffer->data, buffer->size), format);
+
+ {
+ QMutexLocker locker(&m_bufferMutex);
+ m_pendingBuffer = audioBuffer;
+ QMetaObject::invokeMethod(this, "bufferProbed", Qt::QueuedConnection);
+ }
+}
+
+void QGstreamerAudioProbeControl::bufferProbed()
+{
+ QAudioBuffer audioBuffer;
+ {
+ QMutexLocker locker(&m_bufferMutex);
+ if (!m_pendingBuffer.isValid())
+ return;
+ audioBuffer = m_pendingBuffer;
+ }
+ emit audioBufferProbed(audioBuffer);
+}
diff --git a/src/gsttools/qgstreamergltexturerenderer.cpp b/src/gsttools/qgstreamergltexturerenderer.cpp
new file mode 100644
index 000000000..f5cd2f432
--- /dev/null
+++ b/src/gsttools/qgstreamergltexturerenderer.cpp
@@ -0,0 +1,583 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Nokia Corporation and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** GNU Lesser General Public License Usage
+** This file may be used under the terms of the GNU Lesser General Public
+** License version 2.1 as published by the Free Software Foundation and
+** appearing in the file LICENSE.LGPL included in the packaging of this
+** file. Please review the following information to ensure the GNU Lesser
+** General Public License version 2.1 requirements will be met:
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Nokia gives you certain additional
+** rights. These rights are described in the Nokia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU General
+** Public License version 3.0 as published by the Free Software Foundation
+** and appearing in the file LICENSE.GPL included in the packaging of this
+** file. Please review the following information to ensure the GNU General
+** Public License version 3.0 requirements will be met:
+** http://www.gnu.org/copyleft/gpl.html.
+**
+** Other Usage
+** Alternatively, this file may be used in accordance with the terms and
+** conditions contained in a signed written agreement between you and Nokia.
+**
+**
+**
+**
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include <private/qvideosurfacegstsink_p.h>
+#include <qabstractvideosurface.h>
+#include <private/qgstutils_p.h>
+
+#include <QtGui/qevent.h>
+#include <QtWidgets/qapplication.h>
+#include <QtWidgets/qx11info_x11.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qthread.h>
+
+#include <QtOpenGL/qgl.h>
+
+#include <gst/gst.h>
+#include <gst/interfaces/xoverlay.h>
+#include <gst/interfaces/propertyprobe.h>
+#include <gst/interfaces/meegovideotexture.h>
+#include <gst/interfaces/meegovideorenderswitch.h>
+
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+
+#include "qgstreamergltexturerenderer_p.h"
+
+//#define GL_TEXTURE_SINK_DEBUG 1
+
+//from extdefs.h
+typedef void *EGLSyncKHR;
+typedef khronos_utime_nanoseconds_t EGLTimeKHR;
+
+#define GL_TEXTURE_EXTERNAL_OES 0x8D65
+#define EGL_SYNC_FENCE_KHR 0x30F9
+
+typedef EGLSyncKHR (EGLAPIENTRYP _PFNEGLCREATESYNCKHRPROC) (EGLDisplay dpy,
+ EGLenum type, const EGLint * attrib_list);
+typedef EGLBoolean (EGLAPIENTRYP _PFNEGLDESTROYSYNCKHRPROC) (EGLDisplay dpy,
+ EGLSyncKHR sync);
+
+
+const QAbstractVideoBuffer::HandleType EGLImageTextureHandle =
+ QAbstractVideoBuffer::HandleType(QAbstractVideoBuffer::UserHandle+3434);
+
+// EGLSync functions
+_PFNEGLCREATESYNCKHRPROC eglCreateSyncKHR;
+_PFNEGLDESTROYSYNCKHRPROC eglDestroySyncKHR;
+
+class QGStreamerGLTextureBuffer : public QAbstractVideoBuffer
+{
+public:
+ QGStreamerGLTextureBuffer(MeegoGstVideoTexture *textureSink, int frameNumber) :
+ QAbstractVideoBuffer(EGLImageTextureHandle),
+ m_textureSink(MEEGO_GST_VIDEO_TEXTURE(textureSink)),
+ m_frameNumber(frameNumber)
+ {
+ }
+
+ ~QGStreamerGLTextureBuffer()
+ {
+ }
+
+
+ MapMode mapMode() const { return NotMapped; }
+ uchar *map(MapMode mode, int *numBytes, int *bytesPerLine)
+ {
+ Q_UNUSED(mode);
+ Q_UNUSED(numBytes);
+ Q_UNUSED(bytesPerLine);
+
+ //acquire_frame should really be called at buffer construction time
+ //but it conflicts with id-less implementation of gst texture sink.
+#if defined(GL_TEXTURE_SINK_DEBUG) && GL_TEXTURE_SINK_DEBUG > 1
+ qDebug() << "acquire frame" << m_frameNumber;
+#endif
+ if (!meego_gst_video_texture_acquire_frame(m_textureSink,m_frameNumber))
+ qWarning() << Q_FUNC_INFO << "acquire-frame failed" << m_frameNumber;
+
+
+#if defined(GL_TEXTURE_SINK_DEBUG) && GL_TEXTURE_SINK_DEBUG > 1
+ qDebug() << "map frame" << m_frameNumber;
+#endif
+
+ gboolean bind_status = meego_gst_video_texture_bind_frame(m_textureSink, GL_TEXTURE_EXTERNAL_OES, m_frameNumber);
+ if (!bind_status)
+ qWarning() << Q_FUNC_INFO << "bind-frame failed";
+
+ return (uchar*)1;
+ }
+
+ void unmap()
+ {
+ gboolean bind_status = meego_gst_video_texture_bind_frame(m_textureSink, GL_TEXTURE_EXTERNAL_OES, -1);
+
+#if defined(GL_TEXTURE_SINK_DEBUG) && GL_TEXTURE_SINK_DEBUG > 1
+ qDebug() << "unmap frame" << m_frameNumber;
+#endif
+
+ if (!bind_status)
+ qWarning() << Q_FUNC_INFO << "unbind-frame failed";
+
+ //release_frame should really be called in destructor
+ //but this conflicts with id-less implementation of gst texture sink.
+#if defined(GL_TEXTURE_SINK_DEBUG) && GL_TEXTURE_SINK_DEBUG > 1
+ qDebug() << "release frame" << m_frameNumber;
+#endif
+ EGLSyncKHR sync = eglCreateSyncKHR(eglGetDisplay((EGLNativeDisplayType)QX11Info::display()), EGL_SYNC_FENCE_KHR, NULL);
+ meego_gst_video_texture_release_frame(m_textureSink, m_frameNumber, sync);
+ }
+
+ QVariant handle() const
+ {
+ return m_frameNumber;
+ }
+
+private:
+ MeegoGstVideoTexture *m_textureSink;
+ int m_frameNumber;
+};
+
+
+QGstreamerGLTextureRenderer::QGstreamerGLTextureRenderer(QObject *parent) :
+ QVideoRendererControl(parent),
+ m_videoSink(0),
+ m_surface(0),
+ m_context(0),
+ m_winId(0),
+ m_colorKey(49,0,49),
+ m_overlayEnabled(false),
+ m_bufferProbeId(-1)
+{
+ eglCreateSyncKHR =
+ (_PFNEGLCREATESYNCKHRPROC)eglGetProcAddress("eglCreateSyncKHR");
+ eglDestroySyncKHR =
+ (_PFNEGLDESTROYSYNCKHRPROC)eglGetProcAddress("eglDestroySyncKHR");
+}
+
+QGstreamerGLTextureRenderer::~QGstreamerGLTextureRenderer()
+{
+ if (m_surface && m_surface->isActive())
+ m_surface->stop();
+
+ if (m_videoSink)
+ gst_object_unref(GST_OBJECT(m_videoSink));
+}
+
+GstElement *QGstreamerGLTextureRenderer::videoSink()
+{
+ if (!m_videoSink && isReady()) {
+ if (m_context && !m_surface->supportedPixelFormats(EGLImageTextureHandle).isEmpty()) {
+#ifdef GL_TEXTURE_SINK_DEBUG
+ qDebug() << Q_FUNC_INFO << ": using gltexture sink";
+#endif
+ if (m_context)
+ m_context->makeCurrent();
+ m_videoSink = gst_element_factory_make("gltexturesink", "egl-texture-sink");
+ g_object_set(G_OBJECT(m_videoSink),
+ "x-display", QX11Info::display(),
+ "egl-display", eglGetDisplay((EGLNativeDisplayType)QX11Info::display()),
+ "egl-context", eglGetCurrentContext(),
+ "colorkey", m_colorKey.rgb(),
+ "autopaint-colorkey", false,
+ "use-framebuffer-memory", true,
+ "render-mode", m_overlayEnabled ? VIDEO_RENDERSWITCH_XOVERLAY_MODE
+ : VIDEO_RENDERSWITCH_TEXTURE_STREAMING_MODE,
+ (char*)NULL);
+
+ g_signal_connect(G_OBJECT(m_videoSink), "frame-ready", G_CALLBACK(handleFrameReady), (gpointer)this);
+ } else {
+ qWarning() << Q_FUNC_INFO << ": Fallback to QVideoSurfaceGstSink since EGLImageTextureHandle is not supported";
+ m_videoSink = reinterpret_cast<GstElement*>(QVideoSurfaceGstSink::createSink(m_surface));
+ }
+
+ if (m_videoSink) {
+ gst_object_ref(GST_OBJECT(m_videoSink)); //Take ownership
+ gst_object_sink(GST_OBJECT(m_videoSink));
+
+ GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
+ m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
+ }
+ }
+
+ return m_videoSink;
+}
+
+QAbstractVideoSurface *QGstreamerGLTextureRenderer::surface() const
+{
+ return m_surface;
+}
+
+void QGstreamerGLTextureRenderer::setSurface(QAbstractVideoSurface *surface)
+{
+ if (m_surface != surface) {
+#ifdef GL_TEXTURE_SINK_DEBUG
+ qDebug() << Q_FUNC_INFO << surface;
+#endif
+
+ bool oldReady = isReady();
+
+ m_context = const_cast<QGLContext*>(QGLContext::currentContext());
+
+ if (m_videoSink)
+ gst_object_unref(GST_OBJECT(m_videoSink));
+
+ m_videoSink = 0;
+
+ if (m_surface) {
+ disconnect(m_surface, SIGNAL(supportedFormatsChanged()),
+ this, SLOT(handleFormatChange()));
+ }
+
+ m_surface = surface;
+
+ if (oldReady != isReady())
+ emit readyChanged(!oldReady);
+
+ if (m_surface) {
+ connect(m_surface, SIGNAL(supportedFormatsChanged()),
+ this, SLOT(handleFormatChange()));
+ }
+
+ emit sinkChanged();
+ }
+}
+
+void QGstreamerGLTextureRenderer::handleFormatChange()
+{
+ if (m_videoSink)
+ gst_object_unref(GST_OBJECT(m_videoSink));
+
+ m_videoSink = 0;
+ emit sinkChanged();
+}
+
+void QGstreamerGLTextureRenderer::handleFrameReady(GstElement *sink, gint frame, gpointer data)
+{
+ Q_UNUSED(sink);
+ QGstreamerGLTextureRenderer* renderer = reinterpret_cast<QGstreamerGLTextureRenderer*>(data);
+
+ QMutexLocker locker(&renderer->m_mutex);
+ QMetaObject::invokeMethod(renderer, "renderGLFrame",
+ Qt::QueuedConnection,
+ Q_ARG(int, frame));
+
+ //we have to wait to ensure the frame is not reused,
+ //timeout is added to avoid deadlocks when the main thread is
+ //waiting for rendering to complete, this is possible for example during state chages.
+ //If frame is not rendered during 60ms (~1-2 frames interval) it's better to unblock and drop it if necessary
+ renderer->m_renderCondition.wait(&renderer->m_mutex, 60);
+}
+
+void QGstreamerGLTextureRenderer::renderGLFrame(int frame)
+{
+#if defined(GL_TEXTURE_SINK_DEBUG) && GL_TEXTURE_SINK_DEBUG > 1
+ qDebug() << Q_FUNC_INFO << "frame:" << frame << "surface active:" << m_surface->isActive();
+#endif
+ QMutexLocker locker(&m_mutex);
+
+ if (!m_surface) {
+ m_renderCondition.wakeAll();
+ return;
+ }
+
+ MeegoGstVideoTexture *textureSink = MEEGO_GST_VIDEO_TEXTURE(m_videoSink);
+
+ if (m_context)
+ m_context->makeCurrent();
+
+ //don't try to render the frame if state is changed to NULL or READY
+ GstState pendingState = GST_STATE_NULL;
+ GstState newState = GST_STATE_NULL;
+ GstStateChangeReturn res = gst_element_get_state(m_videoSink,
+ &newState,
+ &pendingState,
+ 0);//don't block and return immediately
+
+ if (res == GST_STATE_CHANGE_FAILURE ||
+ newState == GST_STATE_NULL ||
+ pendingState == GST_STATE_NULL) {
+ stopRenderer();
+ m_renderCondition.wakeAll();
+ return;
+ }
+
+ if (!m_surface->isActive()) {
+ //find the native video size
+ GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
+ GstCaps *caps = gst_pad_get_negotiated_caps(pad);
+
+ if (caps) {
+ QSize newNativeSize = QGstUtils::capsCorrectedResolution(caps);
+ if (m_nativeSize != newNativeSize) {
+ m_nativeSize = newNativeSize;
+ emit nativeSizeChanged();
+ }
+ gst_caps_unref(caps);
+ }
+
+ //start the surface...
+ QVideoSurfaceFormat format(m_nativeSize, QVideoFrame::Format_RGB32, EGLImageTextureHandle);
+ if (!m_surface->start(format)) {
+ qWarning() << Q_FUNC_INFO << "failed to start video surface" << format;
+ m_renderCondition.wakeAll();
+ return;
+ }
+ }
+
+ QGStreamerGLTextureBuffer *buffer = new QGStreamerGLTextureBuffer(textureSink, frame);
+ QVideoFrame videoFrame(buffer,
+ m_surface->surfaceFormat().frameSize(),
+ m_surface->surfaceFormat().pixelFormat());
+ m_surface->present(videoFrame);
+ m_renderCondition.wakeAll();
+}
+
+bool QGstreamerGLTextureRenderer::isReady() const
+{
+ if (!m_surface)
+ return false;
+
+ if (m_winId > 0)
+ return true;
+
+ //winId is required only for EGLImageTextureHandle compatible surfaces
+ return m_surface->supportedPixelFormats(EGLImageTextureHandle).isEmpty();
+}
+
+bool QGstreamerGLTextureRenderer::processBusMessage(const QGstreamerMessage &message)
+{
+ GstMessage* gm = message.rawMessage();
+
+#ifdef GL_TEXTURE_SINK_DEBUG
+ qDebug() << Q_FUNC_INFO << GST_MESSAGE_TYPE_NAME(gm);
+#endif
+
+ if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_STATE_CHANGED &&
+ GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_videoSink)) {
+ GstState oldState;
+ GstState newState;
+ gst_message_parse_state_changed(gm, &oldState, &newState, 0);
+
+#ifdef GL_TEXTURE_SINK_DEBUG
+ qDebug() << Q_FUNC_INFO << "State changed:" << oldState << newState;
+#endif
+
+ if (newState == GST_STATE_READY || newState == GST_STATE_NULL) {
+ stopRenderer();
+ }
+
+ if (oldState == GST_STATE_READY && newState == GST_STATE_PAUSED) {
+ updateNativeVideoSize();
+ }
+ }
+
+ return false;
+}
+
+bool QGstreamerGLTextureRenderer::processSyncMessage(const QGstreamerMessage &message)
+{
+ GstMessage* gm = message.rawMessage();
+
+ if ((GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
+ gst_structure_has_name(gm->structure, "prepare-xwindow-id") &&
+ m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
+#ifdef GL_TEXTURE_SINK_DEBUG
+ qDebug() << Q_FUNC_INFO;
+#endif
+ GstXOverlay *overlay = GST_X_OVERLAY(m_videoSink);
+
+ gst_x_overlay_set_xwindow_id(overlay, m_winId);
+
+ if (!m_displayRect.isEmpty()) {
+ gst_x_overlay_set_render_rectangle(overlay,
+ m_displayRect.x(),
+ m_displayRect.y(),
+ m_displayRect.width(),
+ m_displayRect.height());
+ }
+
+ GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
+ m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
+
+ return true;
+ }
+
+ return false;
+}
+
+void QGstreamerGLTextureRenderer::stopRenderer()
+{
+#ifdef GL_TEXTURE_SINK_DEBUG
+ qDebug() << Q_FUNC_INFO;
+#endif
+
+ if (m_surface && m_surface->isActive())
+ m_surface->stop();
+
+ if (!m_nativeSize.isEmpty()) {
+ m_nativeSize = QSize();
+ emit nativeSizeChanged();
+ }
+}
+
+bool QGstreamerGLTextureRenderer::overlayEnabled() const
+{
+ return m_overlayEnabled;
+}
+
+void QGstreamerGLTextureRenderer::setOverlayEnabled(bool enabled)
+{
+
+ if (m_videoSink && (m_overlayEnabled != enabled)) {
+ qDebug() << Q_FUNC_INFO << enabled;
+ g_object_set(G_OBJECT(m_videoSink),
+ "render-mode",
+ enabled ? VIDEO_RENDERSWITCH_XOVERLAY_MODE : VIDEO_RENDERSWITCH_TEXTURE_STREAMING_MODE,
+ (char *)NULL);
+ }
+
+ m_overlayEnabled = enabled;
+}
+
+
+WId QGstreamerGLTextureRenderer::winId() const
+{
+ return m_winId;
+}
+
+void QGstreamerGLTextureRenderer::setWinId(WId id)
+{
+#ifdef GL_TEXTURE_SINK_DEBUG
+ qDebug() << Q_FUNC_INFO << id;
+#endif
+
+ if (m_winId == id)
+ return;
+
+ bool oldReady = isReady();
+
+ m_winId = id;
+
+ if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
+ //don't set winId in NULL state,
+ //texture sink opens xvideo port on set_xwindow_id,
+ //this fails if video resource is not granted by resource policy yet.
+ //state is changed to READY/PAUSED/PLAYING only after resource is granted.
+ GstState pendingState = GST_STATE_NULL;
+ GstState newState = GST_STATE_NULL;
+ GstStateChangeReturn res = gst_element_get_state(m_videoSink,
+ &newState,
+ &pendingState,
+ 0);//don't block and return immediately
+
+ if (res != GST_STATE_CHANGE_FAILURE &&
+ newState != GST_STATE_NULL &&
+ pendingState != GST_STATE_NULL)
+ gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), m_winId);
+ }
+
+ if (oldReady != isReady())
+ emit readyChanged(!oldReady);
+}
+
+QRect QGstreamerGLTextureRenderer::overlayGeometry() const
+{
+ return m_displayRect;
+}
+
+void QGstreamerGLTextureRenderer::setOverlayGeometry(const QRect &geometry)
+{
+ if (m_displayRect != geometry) {
+#ifdef GL_TEXTURE_SINK_DEBUG
+ qDebug() << Q_FUNC_INFO << geometry;
+#endif
+ m_displayRect = geometry;
+
+ if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
+ if (m_displayRect.isEmpty())
+ gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(m_videoSink), -1, -1, -1, -1);
+ else
+ gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(m_videoSink),
+ m_displayRect.x(),
+ m_displayRect.y(),
+ m_displayRect.width(),
+ m_displayRect.height());
+ repaintOverlay();
+ }
+ }
+}
+
+QColor QGstreamerGLTextureRenderer::colorKey() const
+{
+ return m_colorKey;
+}
+
+void QGstreamerGLTextureRenderer::repaintOverlay()
+{
+ if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
+ //don't call gst_x_overlay_expose if the sink is in null state
+ GstState state = GST_STATE_NULL;
+ GstStateChangeReturn res = gst_element_get_state(m_videoSink, &state, NULL, 1000000);
+ if (res != GST_STATE_CHANGE_FAILURE && state != GST_STATE_NULL) {
+ gst_x_overlay_expose(GST_X_OVERLAY(m_videoSink));
+ }
+ }
+}
+
+QSize QGstreamerGLTextureRenderer::nativeSize() const
+{
+ return m_nativeSize;
+}
+
+gboolean QGstreamerGLTextureRenderer::padBufferProbe(GstPad *pad, GstBuffer *buffer, gpointer user_data)
+{
+ QGstreamerGLTextureRenderer *control = reinterpret_cast<QGstreamerGLTextureRenderer*>(user_data);
+ QMetaObject::invokeMethod(control, "updateNativeVideoSize", Qt::QueuedConnection);
+ gst_pad_remove_buffer_probe(pad, control->m_bufferProbeId);
+
+ return TRUE;
+}
+
+void QGstreamerGLTextureRenderer::updateNativeVideoSize()
+{
+ const QSize oldSize = m_nativeSize;
+
+ if (m_videoSink) {
+ //find video native size to update video widget size hint
+ GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
+ GstCaps *caps = gst_pad_get_negotiated_caps(pad);
+
+ if (caps) {
+ m_nativeSize = QGstUtils::capsCorrectedResolution(caps);
+ gst_caps_unref(caps);
+ }
+ } else {
+ m_nativeSize = QSize();
+ }
+#ifdef GL_TEXTURE_SINK_DEBUG
+ qDebug() << Q_FUNC_INFO << oldSize << m_nativeSize << m_videoSink;
+#endif
+
+ if (m_nativeSize != oldSize)
+ emit nativeSizeChanged();
+}
diff --git a/src/gsttools/qgstreamervideoinputdevicecontrol.cpp b/src/gsttools/qgstreamervideoinputdevicecontrol.cpp
new file mode 100644
index 000000000..5fe9ca705
--- /dev/null
+++ b/src/gsttools/qgstreamervideoinputdevicecontrol.cpp
@@ -0,0 +1,155 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Nokia Corporation and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** GNU Lesser General Public License Usage
+** This file may be used under the terms of the GNU Lesser General Public
+** License version 2.1 as published by the Free Software Foundation and
+** appearing in the file LICENSE.LGPL included in the packaging of this
+** file. Please review the following information to ensure the GNU Lesser
+** General Public License version 2.1 requirements will be met:
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Nokia gives you certain additional
+** rights. These rights are described in the Nokia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU General
+** Public License version 3.0 as published by the Free Software Foundation
+** and appearing in the file LICENSE.GPL included in the packaging of this
+** file. Please review the following information to ensure the GNU General
+** Public License version 3.0 requirements will be met:
+** http://www.gnu.org/copyleft/gpl.html.
+**
+** Other Usage
+** Alternatively, this file may be used in accordance with the terms and
+** conditions contained in a signed written agreement between you and Nokia.
+**
+**
+**
+**
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "qgstreamervideoinputdevicecontrol_p.h"
+
+#include <QtCore/QDir>
+#include <QtCore/QDebug>
+
+#include <linux/types.h>
+#include <sys/time.h>
+#include <sys/ioctl.h>
+#include <sys/poll.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <errno.h>
+#include <string.h>
+#include <stdlib.h>
+#include <sys/mman.h>
+#include <linux/videodev2.h>
+
+QGstreamerVideoInputDeviceControl::QGstreamerVideoInputDeviceControl(QObject *parent)
+ :QVideoDeviceControl(parent), m_selectedDevice(0)
+{
+ update();
+}
+
+QGstreamerVideoInputDeviceControl::~QGstreamerVideoInputDeviceControl()
+{
+}
+
+int QGstreamerVideoInputDeviceControl::deviceCount() const
+{
+ return m_names.size();
+}
+
+QString QGstreamerVideoInputDeviceControl::deviceName(int index) const
+{
+ return m_names[index];
+}
+
+QString QGstreamerVideoInputDeviceControl::deviceDescription(int index) const
+{
+ return m_descriptions[index];
+}
+
+int QGstreamerVideoInputDeviceControl::defaultDevice() const
+{
+ return 0;
+}
+
+int QGstreamerVideoInputDeviceControl::selectedDevice() const
+{
+ return m_selectedDevice;
+}
+
+
+void QGstreamerVideoInputDeviceControl::setSelectedDevice(int index)
+{
+ if (index != m_selectedDevice) {
+ m_selectedDevice = index;
+ emit selectedDeviceChanged(index);
+ emit selectedDeviceChanged(deviceName(index));
+ }
+}
+
+
+void QGstreamerVideoInputDeviceControl::update()
+{
+ m_names.clear();
+ m_descriptions.clear();
+
+#ifdef Q_WS_MAEMO_6
+ m_names << QLatin1String("primary") << QLatin1String("secondary");
+ m_descriptions << tr("Main camera") << tr("Front camera");
+#else
+ QDir devDir("/dev");
+ devDir.setFilter(QDir::System);
+
+ QFileInfoList entries = devDir.entryInfoList(QStringList() << "video*");
+
+ foreach( const QFileInfo &entryInfo, entries ) {
+ //qDebug() << "Try" << entryInfo.filePath();
+
+ int fd = ::open(entryInfo.filePath().toLatin1().constData(), O_RDWR );
+ if (fd == -1)
+ continue;
+
+ bool isCamera = false;
+
+ v4l2_input input;
+ memset(&input, 0, sizeof(input));
+ for (; ::ioctl(fd, VIDIOC_ENUMINPUT, &input) >= 0; ++input.index) {
+ if(input.type == V4L2_INPUT_TYPE_CAMERA || input.type == 0) {
+ isCamera = ::ioctl(fd, VIDIOC_S_INPUT, input.index) != 0;
+ break;
+ }
+ }
+
+ if (isCamera) {
+ // find out its driver "name"
+ QString name;
+ struct v4l2_capability vcap;
+ memset(&vcap, 0, sizeof(struct v4l2_capability));
+
+ if (ioctl(fd, VIDIOC_QUERYCAP, &vcap) != 0)
+ name = entryInfo.fileName();
+ else
+ name = QString((const char*)vcap.card);
+ //qDebug() << "found camera: " << name;
+
+ m_names.append(entryInfo.filePath());
+ m_descriptions.append(name);
+ }
+ ::close(fd);
+ }
+#endif
+}
diff --git a/src/gsttools/qgstreamervideooverlay.cpp b/src/gsttools/qgstreamervideooverlay.cpp
new file mode 100644
index 000000000..5b8030479
--- /dev/null
+++ b/src/gsttools/qgstreamervideooverlay.cpp
@@ -0,0 +1,228 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Nokia Corporation and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** GNU Lesser General Public License Usage
+** This file may be used under the terms of the GNU Lesser General Public
+** License version 2.1 as published by the Free Software Foundation and
+** appearing in the file LICENSE.LGPL included in the packaging of this
+** file. Please review the following information to ensure the GNU Lesser
+** General Public License version 2.1 requirements will be met:
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Nokia gives you certain additional
+** rights. These rights are described in the Nokia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU General
+** Public License version 3.0 as published by the Free Software Foundation
+** and appearing in the file LICENSE.GPL included in the packaging of this
+** file. Please review the following information to ensure the GNU General
+** Public License version 3.0 requirements will be met:
+** http://www.gnu.org/copyleft/gpl.html.
+**
+** Other Usage
+** Alternatively, this file may be used in accordance with the terms and
+** conditions contained in a signed written agreement between you and Nokia.
+**
+**
+**
+**
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "qgstreamervideooverlay_p.h"
+#include <private/qvideosurfacegstsink_p.h>
+
+#include <qvideosurfaceformat.h>
+
+#include <qx11videosurface_p.h>
+
+QGstreamerVideoOverlay::QGstreamerVideoOverlay(QObject *parent)
+ : QVideoWindowControl(parent)
+ , m_surface(new QX11VideoSurface)
+ , m_videoSink(reinterpret_cast<GstElement*>(QVideoSurfaceGstSink::createSink(m_surface)))
+ , m_aspectRatioMode(Qt::KeepAspectRatio)
+ , m_fullScreen(false)
+{
+ if (m_videoSink) {
+ gst_object_ref(GST_OBJECT(m_videoSink)); //Take ownership
+ gst_object_sink(GST_OBJECT(m_videoSink));
+ }
+
+ connect(m_surface, SIGNAL(surfaceFormatChanged(QVideoSurfaceFormat)),
+ this, SLOT(surfaceFormatChanged()));
+}
+
+QGstreamerVideoOverlay::~QGstreamerVideoOverlay()
+{
+ if (m_videoSink)
+ gst_object_unref(GST_OBJECT(m_videoSink));
+
+ delete m_surface;
+}
+
+WId QGstreamerVideoOverlay::winId() const
+{
+ return m_surface->winId();
+}
+
+void QGstreamerVideoOverlay::setWinId(WId id)
+{
+ bool wasReady = isReady();
+ m_surface->setWinId(id);
+
+ if (isReady() != wasReady)
+ emit readyChanged(!wasReady);
+}
+
+QRect QGstreamerVideoOverlay::displayRect() const
+{
+ return m_displayRect;
+}
+
+void QGstreamerVideoOverlay::setDisplayRect(const QRect &rect)
+{
+ m_displayRect = rect;
+
+ setScaledDisplayRect();
+}
+
+Qt::AspectRatioMode QGstreamerVideoOverlay::aspectRatioMode() const
+{
+ return m_aspectRatioMode;
+}
+
+void QGstreamerVideoOverlay::setAspectRatioMode(Qt::AspectRatioMode mode)
+{
+ m_aspectRatioMode = mode;
+
+ setScaledDisplayRect();
+}
+
+void QGstreamerVideoOverlay::repaint()
+{
+}
+
+int QGstreamerVideoOverlay::brightness() const
+{
+ return m_surface->brightness();
+}
+
+void QGstreamerVideoOverlay::setBrightness(int brightness)
+{
+ m_surface->setBrightness(brightness);
+
+ emit brightnessChanged(m_surface->brightness());
+}
+
+int QGstreamerVideoOverlay::contrast() const
+{
+ return m_surface->contrast();
+}
+
+void QGstreamerVideoOverlay::setContrast(int contrast)
+{
+ m_surface->setContrast(contrast);
+
+ emit contrastChanged(m_surface->contrast());
+}
+
+int QGstreamerVideoOverlay::hue() const
+{
+ return m_surface->hue();
+}
+
+void QGstreamerVideoOverlay::setHue(int hue)
+{
+ m_surface->setHue(hue);
+
+ emit hueChanged(m_surface->hue());
+}
+
+int QGstreamerVideoOverlay::saturation() const
+{
+ return m_surface->saturation();
+}
+
+void QGstreamerVideoOverlay::setSaturation(int saturation)
+{
+ m_surface->setSaturation(saturation);
+
+ emit saturationChanged(m_surface->saturation());
+}
+
+bool QGstreamerVideoOverlay::isFullScreen() const
+{
+ return m_fullScreen;
+}
+
+void QGstreamerVideoOverlay::setFullScreen(bool fullScreen)
+{
+ emit fullScreenChanged(m_fullScreen = fullScreen);
+}
+
+QSize QGstreamerVideoOverlay::nativeSize() const
+{
+ return m_surface->surfaceFormat().sizeHint();
+}
+
+QAbstractVideoSurface *QGstreamerVideoOverlay::surface() const
+{
+ return m_surface;
+}
+
+GstElement *QGstreamerVideoOverlay::videoSink()
+{
+ return m_videoSink;
+}
+
+void QGstreamerVideoOverlay::surfaceFormatChanged()
+{
+ setScaledDisplayRect();
+
+ emit nativeSizeChanged();
+}
+
+void QGstreamerVideoOverlay::setScaledDisplayRect()
+{
+ QRect formatViewport = m_surface->surfaceFormat().viewport();
+
+ switch (m_aspectRatioMode) {
+ case Qt::KeepAspectRatio:
+ {
+ QSize size = m_surface->surfaceFormat().sizeHint();
+ size.scale(m_displayRect.size(), Qt::KeepAspectRatio);
+
+ QRect rect(QPoint(0, 0), size);
+ rect.moveCenter(m_displayRect.center());
+
+ m_surface->setDisplayRect(rect);
+ m_surface->setViewport(formatViewport);
+ }
+ break;
+ case Qt::IgnoreAspectRatio:
+ m_surface->setDisplayRect(m_displayRect);
+ m_surface->setViewport(formatViewport);
+ break;
+ case Qt::KeepAspectRatioByExpanding:
+ {
+ QSize size = m_displayRect.size();
+ size.scale(m_surface->surfaceFormat().sizeHint(), Qt::KeepAspectRatio);
+
+ QRect viewport(QPoint(0, 0), size);
+ viewport.moveCenter(formatViewport.center());
+ m_surface->setDisplayRect(m_displayRect);
+ m_surface->setViewport(viewport);
+ }
+ break;
+ };
+}
diff --git a/src/gsttools/qgstreamervideoprobecontrol.cpp b/src/gsttools/qgstreamervideoprobecontrol.cpp
new file mode 100644
index 000000000..8e4e74fad
--- /dev/null
+++ b/src/gsttools/qgstreamervideoprobecontrol.cpp
@@ -0,0 +1,117 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Nokia Corporation and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** GNU Lesser General Public License Usage
+** This file may be used under the terms of the GNU Lesser General Public
+** License version 2.1 as published by the Free Software Foundation and
+** appearing in the file LICENSE.LGPL included in the packaging of this
+** file. Please review the following information to ensure the GNU Lesser
+** General Public License version 2.1 requirements will be met:
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Nokia gives you certain additional
+** rights. These rights are described in the Nokia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU General
+** Public License version 3.0 as published by the Free Software Foundation
+** and appearing in the file LICENSE.GPL included in the packaging of this
+** file. Please review the following information to ensure the GNU General
+** Public License version 3.0 requirements will be met:
+** http://www.gnu.org/copyleft/gpl.html.
+**
+** Other Usage
+** Alternatively, this file may be used in accordance with the terms and
+** conditions contained in a signed written agreement between you and Nokia.
+**
+**
+**
+**
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "qgstreamervideoprobecontrol_p.h"
+#include <private/qvideosurfacegstsink_p.h>
+#include <private/qgstvideobuffer_p.h>
+
+QGstreamerVideoProbeControl::QGstreamerVideoProbeControl(QObject *parent)
+ : QMediaVideoProbeControl(parent)
+ , m_flushing(false)
+ , m_frameProbed(false)
+{
+
+}
+
+QGstreamerVideoProbeControl::~QGstreamerVideoProbeControl()
+{
+
+}
+
+void QGstreamerVideoProbeControl::startFlushing()
+{
+ m_flushing = true;
+
+ {
+ QMutexLocker locker(&m_frameMutex);
+ m_pendingFrame = QVideoFrame();
+ }
+
+ // only emit flush if at least one frame was probed
+ if (m_frameProbed)
+ emit flush();
+}
+
+void QGstreamerVideoProbeControl::stopFlushing()
+{
+ m_flushing = false;
+}
+
+void QGstreamerVideoProbeControl::bufferProbed(GstBuffer* buffer)
+{
+ if (m_flushing)
+ return;
+
+ GstCaps* caps = gst_buffer_get_caps(buffer);
+ if (!caps)
+ return;
+
+ int bytesPerLine = 0;
+ QVideoSurfaceFormat format = QVideoSurfaceGstSink::formatForCaps(caps, &bytesPerLine);
+ gst_caps_unref(caps);
+ if (!format.isValid() || !bytesPerLine)
+ return;
+
+ QVideoFrame frame = QVideoFrame(new QGstVideoBuffer(buffer, bytesPerLine),
+ format.frameSize(), format.pixelFormat());
+
+ QVideoSurfaceGstSink::setFrameTimeStamps(&frame, buffer);
+
+ m_frameProbed = true;
+
+ {
+ QMutexLocker locker(&m_frameMutex);
+ m_pendingFrame = frame;
+ QMetaObject::invokeMethod(this, "frameProbed", Qt::QueuedConnection);
+ }
+}
+
+void QGstreamerVideoProbeControl::frameProbed()
+{
+ QVideoFrame frame;
+ {
+ QMutexLocker locker(&m_frameMutex);
+ if (!m_pendingFrame.isValid())
+ return;
+ frame = m_pendingFrame;
+ }
+ emit videoFrameProbed(frame);
+}
diff --git a/src/gsttools/qgstreamervideorenderer.cpp b/src/gsttools/qgstreamervideorenderer.cpp
new file mode 100644
index 000000000..bd2a0b12e
--- /dev/null
+++ b/src/gsttools/qgstreamervideorenderer.cpp
@@ -0,0 +1,117 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Nokia Corporation and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** GNU Lesser General Public License Usage
+** This file may be used under the terms of the GNU Lesser General Public
+** License version 2.1 as published by the Free Software Foundation and
+** appearing in the file LICENSE.LGPL included in the packaging of this
+** file. Please review the following information to ensure the GNU Lesser
+** General Public License version 2.1 requirements will be met:
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Nokia gives you certain additional
+** rights. These rights are described in the Nokia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU General
+** Public License version 3.0 as published by the Free Software Foundation
+** and appearing in the file LICENSE.GPL included in the packaging of this
+** file. Please review the following information to ensure the GNU General
+** Public License version 3.0 requirements will be met:
+** http://www.gnu.org/copyleft/gpl.html.
+**
+** Other Usage
+** Alternatively, this file may be used in accordance with the terms and
+** conditions contained in a signed written agreement between you and Nokia.
+**
+**
+**
+**
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "qgstreamervideorenderer_p.h"
+#include <private/qvideosurfacegstsink_p.h>
+#include <qabstractvideosurface.h>
+
+#include <QDebug>
+
+#include <gst/gst.h>
+
+QGstreamerVideoRenderer::QGstreamerVideoRenderer(QObject *parent)
+ :QVideoRendererControl(parent),m_videoSink(0), m_surface(0)
+{
+}
+
+QGstreamerVideoRenderer::~QGstreamerVideoRenderer()
+{
+ if (m_videoSink)
+ gst_object_unref(GST_OBJECT(m_videoSink));
+}
+
+GstElement *QGstreamerVideoRenderer::videoSink()
+{
+ if (!m_videoSink && m_surface) {
+ m_videoSink = QVideoSurfaceGstSink::createSink(m_surface);
+ gst_object_ref(GST_OBJECT(m_videoSink)); //Take ownership
+ gst_object_sink(GST_OBJECT(m_videoSink));
+ }
+
+ return reinterpret_cast<GstElement*>(m_videoSink);
+}
+
+
+QAbstractVideoSurface *QGstreamerVideoRenderer::surface() const
+{
+ return m_surface;
+}
+
+void QGstreamerVideoRenderer::setSurface(QAbstractVideoSurface *surface)
+{
+ if (m_surface != surface) {
+ //qDebug() << Q_FUNC_INFO << surface;
+ if (m_videoSink)
+ gst_object_unref(GST_OBJECT(m_videoSink));
+
+ m_videoSink = 0;
+
+ if (m_surface) {
+ disconnect(m_surface, SIGNAL(supportedFormatsChanged()),
+ this, SLOT(handleFormatChange()));
+ }
+
+ bool wasReady = isReady();
+
+ m_surface = surface;
+
+ if (m_surface) {
+ connect(m_surface, SIGNAL(supportedFormatsChanged()),
+ this, SLOT(handleFormatChange()));
+ }
+
+ if (wasReady != isReady())
+ emit readyChanged(isReady());
+
+ emit sinkChanged();
+ }
+}
+
+void QGstreamerVideoRenderer::handleFormatChange()
+{
+ //qDebug() << "Supported formats list has changed, reload video output";
+
+ if (m_videoSink)
+ gst_object_unref(GST_OBJECT(m_videoSink));
+
+ m_videoSink = 0;
+ emit sinkChanged();
+}
diff --git a/src/gsttools/qgstreamervideorendererinterface.cpp b/src/gsttools/qgstreamervideorendererinterface.cpp
new file mode 100644
index 000000000..5a7a85fcc
--- /dev/null
+++ b/src/gsttools/qgstreamervideorendererinterface.cpp
@@ -0,0 +1,46 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Nokia Corporation and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** GNU Lesser General Public License Usage
+** This file may be used under the terms of the GNU Lesser General Public
+** License version 2.1 as published by the Free Software Foundation and
+** appearing in the file LICENSE.LGPL included in the packaging of this
+** file. Please review the following information to ensure the GNU Lesser
+** General Public License version 2.1 requirements will be met:
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Nokia gives you certain additional
+** rights. These rights are described in the Nokia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU General
+** Public License version 3.0 as published by the Free Software Foundation
+** and appearing in the file LICENSE.GPL included in the packaging of this
+** file. Please review the following information to ensure the GNU General
+** Public License version 3.0 requirements will be met:
+** http://www.gnu.org/copyleft/gpl.html.
+**
+** Other Usage
+** Alternatively, this file may be used in accordance with the terms and
+** conditions contained in a signed written agreement between you and Nokia.
+**
+**
+**
+**
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "qgstreamervideorendererinterface_p.h"
+
+QGstreamerVideoRendererInterface::~QGstreamerVideoRendererInterface()
+{
+}
diff --git a/src/gsttools/qgstreamervideowidget.cpp b/src/gsttools/qgstreamervideowidget.cpp
new file mode 100644
index 000000000..9c56a06de
--- /dev/null
+++ b/src/gsttools/qgstreamervideowidget.cpp
@@ -0,0 +1,352 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Nokia Corporation and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** GNU Lesser General Public License Usage
+** This file may be used under the terms of the GNU Lesser General Public
+** License version 2.1 as published by the Free Software Foundation and
+** appearing in the file LICENSE.LGPL included in the packaging of this
+** file. Please review the following information to ensure the GNU Lesser
+** General Public License version 2.1 requirements will be met:
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Nokia gives you certain additional
+** rights. These rights are described in the Nokia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU General
+** Public License version 3.0 as published by the Free Software Foundation
+** and appearing in the file LICENSE.GPL included in the packaging of this
+** file. Please review the following information to ensure the GNU General
+** Public License version 3.0 requirements will be met:
+** http://www.gnu.org/copyleft/gpl.html.
+**
+** Other Usage
+** Alternatively, this file may be used in accordance with the terms and
+** conditions contained in a signed written agreement between you and Nokia.
+**
+**
+**
+**
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "qgstreamervideowidget_p.h"
+#include <private/qgstutils_p.h>
+
+#include <QtCore/qcoreevent.h>
+#include <QtCore/qdebug.h>
+#include <QtWidgets/qapplication.h>
+#include <QtGui/qpainter.h>
+
+#ifdef Q_WS_X11
+# include <X11/Xlib.h>
+#endif
+#include <gst/gst.h>
+#include <gst/interfaces/xoverlay.h>
+#include <gst/interfaces/propertyprobe.h>
+
+class QGstreamerVideoWidget : public QWidget
+{
+public:
+ QGstreamerVideoWidget(QWidget *parent = 0)
+ :QWidget(parent)
+ {
+ setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Expanding);
+ QPalette palette;
+ palette.setColor(QPalette::Background, Qt::black);
+ setPalette(palette);
+ }
+
+ virtual ~QGstreamerVideoWidget() {}
+
+ QSize sizeHint() const
+ {
+ return m_nativeSize;
+ }
+
+ void setNativeSize( const QSize &size)
+ {
+ if (size != m_nativeSize) {
+ m_nativeSize = size;
+ if (size.isEmpty())
+ setMinimumSize(0,0);
+ else
+ setMinimumSize(160,120);
+
+ updateGeometry();
+ }
+ }
+
+protected:
+ void paintEvent(QPaintEvent *)
+ {
+ QPainter painter(this);
+ painter.fillRect(rect(), palette().background());
+ }
+
+ QSize m_nativeSize;
+};
+
+QGstreamerVideoWidgetControl::QGstreamerVideoWidgetControl(QObject *parent)
+ : QVideoWidgetControl(parent)
+ , m_videoSink(0)
+ , m_widget(0)
+ , m_fullScreen(false)
+{
+}
+
+QGstreamerVideoWidgetControl::~QGstreamerVideoWidgetControl()
+{
+ if (m_videoSink)
+ gst_object_unref(GST_OBJECT(m_videoSink));
+
+ delete m_widget;
+}
+
+void QGstreamerVideoWidgetControl::createVideoWidget()
+{
+ if (m_widget)
+ return;
+
+ m_widget = new QGstreamerVideoWidget;
+
+ m_widget->installEventFilter(this);
+ m_windowId = m_widget->winId();
+
+ m_videoSink = gst_element_factory_make ("xvimagesink", NULL);
+ if (m_videoSink) {
+ // Check if the xv sink is usable
+ if (gst_element_set_state(m_videoSink, GST_STATE_READY) != GST_STATE_CHANGE_SUCCESS) {
+ gst_object_unref(GST_OBJECT(m_videoSink));
+ m_videoSink = 0;
+ } else {
+ gst_element_set_state(m_videoSink, GST_STATE_NULL);
+
+ g_object_set(G_OBJECT(m_videoSink), "force-aspect-ratio", 1, (const char*)NULL);
+ }
+ }
+
+ if (!m_videoSink)
+ m_videoSink = gst_element_factory_make ("ximagesink", NULL);
+
+ gst_object_ref (GST_OBJECT (m_videoSink)); //Take ownership
+ gst_object_sink (GST_OBJECT (m_videoSink));
+
+
+}
+
+GstElement *QGstreamerVideoWidgetControl::videoSink()
+{
+ createVideoWidget();
+ return m_videoSink;
+}
+
+bool QGstreamerVideoWidgetControl::eventFilter(QObject *object, QEvent *e)
+{
+ if (m_widget && object == m_widget) {
+ if (e->type() == QEvent::ParentChange || e->type() == QEvent::Show) {
+ WId newWId = m_widget->winId();
+ if (newWId != m_windowId) {
+ m_windowId = newWId;
+ // Even if we have created a winId at this point, other X applications
+ // need to be aware of it.
+ QApplication::syncX();
+ setOverlay();
+ }
+ }
+
+ if (e->type() == QEvent::Show) {
+ // Setting these values ensures smooth resizing since it
+ // will prevent the system from clearing the background
+ m_widget->setAttribute(Qt::WA_NoSystemBackground, true);
+ m_widget->setAttribute(Qt::WA_PaintOnScreen, true);
+ } else if (e->type() == QEvent::Resize) {
+ // This is a workaround for missing background repaints
+ // when reducing window size
+ windowExposed();
+ }
+ }
+
+ return false;
+}
+
+bool QGstreamerVideoWidgetControl::processSyncMessage(const QGstreamerMessage &message)
+{
+ GstMessage* gm = message.rawMessage();
+
+ if (gm && (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
+ gst_structure_has_name(gm->structure, "prepare-xwindow-id")) {
+
+ setOverlay();
+ QMetaObject::invokeMethod(this, "updateNativeVideoSize", Qt::QueuedConnection);
+ return true;
+ }
+
+ return false;
+}
+
+bool QGstreamerVideoWidgetControl::processBusMessage(const QGstreamerMessage &message)
+{
+ GstMessage* gm = message.rawMessage();
+
+ if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_STATE_CHANGED &&
+ GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_videoSink)) {
+ GstState oldState;
+ GstState newState;
+ gst_message_parse_state_changed(gm, &oldState, &newState, 0);
+
+ if (oldState == GST_STATE_READY && newState == GST_STATE_PAUSED)
+ updateNativeVideoSize();
+ }
+
+ return false;
+}
+
+void QGstreamerVideoWidgetControl::setOverlay()
+{
+ if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
+ gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), m_windowId);
+ }
+}
+
+void QGstreamerVideoWidgetControl::updateNativeVideoSize()
+{
+ if (m_videoSink) {
+ //find video native size to update video widget size hint
+ GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
+ GstCaps *caps = gst_pad_get_negotiated_caps(pad);
+
+ if (caps) {
+ m_widget->setNativeSize(QGstUtils::capsCorrectedResolution(caps));
+ gst_caps_unref(caps);
+ }
+ } else {
+ if (m_widget)
+ m_widget->setNativeSize(QSize());
+ }
+}
+
+
+void QGstreamerVideoWidgetControl::windowExposed()
+{
+ if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink))
+ gst_x_overlay_expose(GST_X_OVERLAY(m_videoSink));
+}
+
+QWidget *QGstreamerVideoWidgetControl::videoWidget()
+{
+ createVideoWidget();
+ return m_widget;
+}
+
+Qt::AspectRatioMode QGstreamerVideoWidgetControl::aspectRatioMode() const
+{
+ return m_aspectRatioMode;
+}
+
+void QGstreamerVideoWidgetControl::setAspectRatioMode(Qt::AspectRatioMode mode)
+{
+ if (m_videoSink) {
+ g_object_set(G_OBJECT(m_videoSink),
+ "force-aspect-ratio",
+ (mode == Qt::KeepAspectRatio),
+ (const char*)NULL);
+ }
+
+ m_aspectRatioMode = mode;
+}
+
+bool QGstreamerVideoWidgetControl::isFullScreen() const
+{
+ return m_fullScreen;
+}
+
+void QGstreamerVideoWidgetControl::setFullScreen(bool fullScreen)
+{
+ emit fullScreenChanged(m_fullScreen = fullScreen);
+}
+
+int QGstreamerVideoWidgetControl::brightness() const
+{
+ int brightness = 0;
+
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "brightness"))
+ g_object_get(G_OBJECT(m_videoSink), "brightness", &brightness, NULL);
+
+ return brightness / 10;
+}
+
+void QGstreamerVideoWidgetControl::setBrightness(int brightness)
+{
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "brightness")) {
+ g_object_set(G_OBJECT(m_videoSink), "brightness", brightness * 10, NULL);
+
+ emit brightnessChanged(brightness);
+ }
+}
+
+int QGstreamerVideoWidgetControl::contrast() const
+{
+ int contrast = 0;
+
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "contrast"))
+ g_object_get(G_OBJECT(m_videoSink), "contrast", &contrast, NULL);
+
+ return contrast / 10;
+}
+
+void QGstreamerVideoWidgetControl::setContrast(int contrast)
+{
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "contrast")) {
+ g_object_set(G_OBJECT(m_videoSink), "contrast", contrast * 10, NULL);
+
+ emit contrastChanged(contrast);
+ }
+}
+
+int QGstreamerVideoWidgetControl::hue() const
+{
+ int hue = 0;
+
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "hue"))
+ g_object_get(G_OBJECT(m_videoSink), "hue", &hue, NULL);
+
+ return hue / 10;
+}
+
+void QGstreamerVideoWidgetControl::setHue(int hue)
+{
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "hue")) {
+ g_object_set(G_OBJECT(m_videoSink), "hue", hue * 10, NULL);
+
+ emit hueChanged(hue);
+ }
+}
+
+int QGstreamerVideoWidgetControl::saturation() const
+{
+ int saturation = 0;
+
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "saturation"))
+ g_object_get(G_OBJECT(m_videoSink), "saturation", &saturation, NULL);
+
+ return saturation / 10;
+}
+
+void QGstreamerVideoWidgetControl::setSaturation(int saturation)
+{
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "saturation")) {
+ g_object_set(G_OBJECT(m_videoSink), "saturation", saturation * 10, NULL);
+
+ emit saturationChanged(saturation);
+ }
+}
diff --git a/src/gsttools/qgstreamervideowindow.cpp b/src/gsttools/qgstreamervideowindow.cpp
new file mode 100644
index 000000000..077060fd2
--- /dev/null
+++ b/src/gsttools/qgstreamervideowindow.cpp
@@ -0,0 +1,346 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Nokia Corporation and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** GNU Lesser General Public License Usage
+** This file may be used under the terms of the GNU Lesser General Public
+** License version 2.1 as published by the Free Software Foundation and
+** appearing in the file LICENSE.LGPL included in the packaging of this
+** file. Please review the following information to ensure the GNU Lesser
+** General Public License version 2.1 requirements will be met:
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Nokia gives you certain additional
+** rights. These rights are described in the Nokia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU General
+** Public License version 3.0 as published by the Free Software Foundation
+** and appearing in the file LICENSE.GPL included in the packaging of this
+** file. Please review the following information to ensure the GNU General
+** Public License version 3.0 requirements will be met:
+** http://www.gnu.org/copyleft/gpl.html.
+**
+** Other Usage
+** Alternatively, this file may be used in accordance with the terms and
+** conditions contained in a signed written agreement between you and Nokia.
+**
+**
+**
+**
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "qgstreamervideowindow_p.h"
+#include <private/qgstutils_p.h>
+
+#include <QtCore/qdebug.h>
+
+#include <gst/gst.h>
+#include <gst/interfaces/xoverlay.h>
+#include <gst/interfaces/propertyprobe.h>
+
+/*
+ QGstreamerVideoWindow is similar to QGstreamerVideoOverlay,
+ but uses xvimagesink like gstreamer element instead of QX11VideoSurface.
+
+ This allows to use the accelerated elements if available on the target platform,
+ but requires at least 0.10.29 gstreamer version
+ with gst_x_overlay_set_render_rectangle to set display rect.
+*/
+
+QGstreamerVideoWindow::QGstreamerVideoWindow(QObject *parent, const char *elementName)
+ : QVideoWindowControl(parent)
+ , m_videoSink(0)
+ , m_windowId(0)
+ , m_aspectRatioMode(Qt::KeepAspectRatio)
+ , m_fullScreen(false)
+ , m_colorKey(QColor::Invalid)
+{
+ if (elementName)
+ m_videoSink = gst_element_factory_make(elementName, NULL);
+ else
+ m_videoSink = gst_element_factory_make("xvimagesink", NULL);
+
+ if (m_videoSink) {
+ gst_object_ref(GST_OBJECT(m_videoSink)); //Take ownership
+ gst_object_sink(GST_OBJECT(m_videoSink));
+
+ GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
+ m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
+ }
+}
+
+QGstreamerVideoWindow::~QGstreamerVideoWindow()
+{
+ if (m_videoSink)
+ gst_object_unref(GST_OBJECT(m_videoSink));
+}
+
+WId QGstreamerVideoWindow::winId() const
+{
+ return m_windowId;
+}
+
+void QGstreamerVideoWindow::setWinId(WId id)
+{
+ if (m_windowId == id)
+ return;
+
+ qDebug() << Q_FUNC_INFO << id;
+
+ WId oldId = m_windowId;
+
+ m_windowId = id;
+
+ if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
+ gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), m_windowId);
+ }
+
+ if (!oldId)
+ emit readyChanged(true);
+
+ if (!id)
+ emit readyChanged(false);
+}
+
+bool QGstreamerVideoWindow::processSyncMessage(const QGstreamerMessage &message)
+{
+ GstMessage* gm = message.rawMessage();
+
+ if ((GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
+ gst_structure_has_name(gm->structure, "prepare-xwindow-id") &&
+ m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
+
+ gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), m_windowId);
+
+ GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
+ m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
+
+ return true;
+ }
+
+ return false;
+}
+
+QRect QGstreamerVideoWindow::displayRect() const
+{
+ return m_displayRect;
+}
+
+void QGstreamerVideoWindow::setDisplayRect(const QRect &rect)
+{
+ m_displayRect = rect;
+
+ if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
+#if GST_VERSION_MICRO >= 29
+ if (m_displayRect.isEmpty())
+ gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(m_videoSink), -1, -1, -1, -1);
+ else
+ gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(m_videoSink),
+ m_displayRect.x(),
+ m_displayRect.y(),
+ m_displayRect.width(),
+ m_displayRect.height());
+ repaint();
+#endif
+ }
+}
+
+Qt::AspectRatioMode QGstreamerVideoWindow::aspectRatioMode() const
+{
+ return m_aspectRatioMode;
+}
+
+void QGstreamerVideoWindow::setAspectRatioMode(Qt::AspectRatioMode mode)
+{
+ m_aspectRatioMode = mode;
+
+ if (m_videoSink) {
+ g_object_set(G_OBJECT(m_videoSink),
+ "force-aspect-ratio",
+ (m_aspectRatioMode == Qt::KeepAspectRatio),
+ (const char*)NULL);
+ }
+}
+
+void QGstreamerVideoWindow::repaint()
+{
+ if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
+ //don't call gst_x_overlay_expose if the sink is in null state
+ GstState state = GST_STATE_NULL;
+ GstStateChangeReturn res = gst_element_get_state(m_videoSink, &state, NULL, 1000000);
+ if (res != GST_STATE_CHANGE_FAILURE && state != GST_STATE_NULL) {
+ gst_x_overlay_expose(GST_X_OVERLAY(m_videoSink));
+ }
+ }
+}
+
+QColor QGstreamerVideoWindow::colorKey() const
+{
+ if (!m_colorKey.isValid()) {
+ gint colorkey = 0;
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "colorkey"))
+ g_object_get(G_OBJECT(m_videoSink), "colorkey", &colorkey, NULL);
+
+ if (colorkey > 0)
+ m_colorKey.setRgb(colorkey);
+ }
+
+ return m_colorKey;
+}
+
+void QGstreamerVideoWindow::setColorKey(const QColor &color)
+{
+ m_colorKey = color;
+
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "colorkey"))
+ g_object_set(G_OBJECT(m_videoSink), "colorkey", color.rgba(), NULL);
+}
+
+bool QGstreamerVideoWindow::autopaintColorKey() const
+{
+ bool enabled = true;
+
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "autopaint-colorkey"))
+ g_object_get(G_OBJECT(m_videoSink), "autopaint-colorkey", &enabled, NULL);
+
+ return enabled;
+}
+
+void QGstreamerVideoWindow::setAutopaintColorKey(bool enabled)
+{
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "autopaint-colorkey"))
+ g_object_set(G_OBJECT(m_videoSink), "autopaint-colorkey", enabled, NULL);
+}
+
+int QGstreamerVideoWindow::brightness() const
+{
+ int brightness = 0;
+
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "brightness"))
+ g_object_get(G_OBJECT(m_videoSink), "brightness", &brightness, NULL);
+
+ return brightness / 10;
+}
+
+void QGstreamerVideoWindow::setBrightness(int brightness)
+{
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "brightness")) {
+ g_object_set(G_OBJECT(m_videoSink), "brightness", brightness * 10, NULL);
+
+ emit brightnessChanged(brightness);
+ }
+}
+
+int QGstreamerVideoWindow::contrast() const
+{
+ int contrast = 0;
+
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "contrast"))
+ g_object_get(G_OBJECT(m_videoSink), "contrast", &contrast, NULL);
+
+ return contrast / 10;
+}
+
+void QGstreamerVideoWindow::setContrast(int contrast)
+{
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "contrast")) {
+ g_object_set(G_OBJECT(m_videoSink), "contrast", contrast * 10, NULL);
+
+ emit contrastChanged(contrast);
+ }
+}
+
+int QGstreamerVideoWindow::hue() const
+{
+ int hue = 0;
+
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "hue"))
+ g_object_get(G_OBJECT(m_videoSink), "hue", &hue, NULL);
+
+ return hue / 10;
+}
+
+void QGstreamerVideoWindow::setHue(int hue)
+{
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "hue")) {
+ g_object_set(G_OBJECT(m_videoSink), "hue", hue * 10, NULL);
+
+ emit hueChanged(hue);
+ }
+}
+
+int QGstreamerVideoWindow::saturation() const
+{
+ int saturation = 0;
+
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "saturation"))
+ g_object_get(G_OBJECT(m_videoSink), "saturation", &saturation, NULL);
+
+ return saturation / 10;
+}
+
+void QGstreamerVideoWindow::setSaturation(int saturation)
+{
+ if (m_videoSink && g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "saturation")) {
+ g_object_set(G_OBJECT(m_videoSink), "saturation", saturation * 10, NULL);
+
+ emit saturationChanged(saturation);
+ }
+}
+
+bool QGstreamerVideoWindow::isFullScreen() const
+{
+ return m_fullScreen;
+}
+
+void QGstreamerVideoWindow::setFullScreen(bool fullScreen)
+{
+ emit fullScreenChanged(m_fullScreen = fullScreen);
+}
+
+QSize QGstreamerVideoWindow::nativeSize() const
+{
+ return m_nativeSize;
+}
+
+void QGstreamerVideoWindow::padBufferProbe(GstPad *pad, GstBuffer * /* buffer */, gpointer user_data)
+{
+ QGstreamerVideoWindow *control = reinterpret_cast<QGstreamerVideoWindow*>(user_data);
+ QMetaObject::invokeMethod(control, "updateNativeVideoSize", Qt::QueuedConnection);
+ gst_pad_remove_buffer_probe(pad, control->m_bufferProbeId);
+}
+
+void QGstreamerVideoWindow::updateNativeVideoSize()
+{
+ const QSize oldSize = m_nativeSize;
+ m_nativeSize = QSize();
+
+ if (m_videoSink) {
+ //find video native size to update video widget size hint
+ GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
+ GstCaps *caps = gst_pad_get_negotiated_caps(pad);
+
+ if (caps) {
+ m_nativeSize = QGstUtils::capsCorrectedResolution(caps);
+ gst_caps_unref(caps);
+ }
+ }
+
+ if (m_nativeSize != oldSize)
+ emit nativeSizeChanged();
+}
+
+GstElement *QGstreamerVideoWindow::videoSink()
+{
+ return m_videoSink;
+}
diff --git a/src/gsttools/qgstutils.cpp b/src/gsttools/qgstutils.cpp
index 03c31fbdf..b29ca367c 100644
--- a/src/gsttools/qgstutils.cpp
+++ b/src/gsttools/qgstutils.cpp
@@ -45,6 +45,8 @@
#include <QtCore/qbytearray.h>
#include <QtCore/qvariant.h>
#include <QtCore/qsize.h>
+#include <QtCore/qset.h>
+#include <QtCore/qstringlist.h>
#include <qaudioformat.h>
QT_BEGIN_NAMESPACE
@@ -313,4 +315,90 @@ GstCaps *QGstUtils::capsForAudioFormat(QAudioFormat format)
return caps;
}
+void QGstUtils::initializeGst()
+{
+ static bool initialized = false;
+ if (!initialized) {
+ initialized = true;
+ gst_init(NULL, NULL);
+ }
+}
+
+namespace {
+ const char* getCodecAlias(const QString &codec)
+ {
+ if (codec.startsWith("avc1."))
+ return "video/x-h264";
+
+ if (codec.startsWith("mp4a."))
+ return "audio/mpeg4";
+
+ if (codec.startsWith("mp4v.20."))
+ return "video/mpeg4";
+
+ if (codec == "samr")
+ return "audio/amr";
+
+ return 0;
+ }
+
+ const char* getMimeTypeAlias(const QString &mimeType)
+ {
+ if (mimeType == "video/mp4")
+ return "video/mpeg4";
+
+ if (mimeType == "audio/mp4")
+ return "audio/mpeg4";
+
+ if (mimeType == "video/ogg"
+ || mimeType == "audio/ogg")
+ return "application/ogg";
+
+ return 0;
+ }
+}
+
+QtMultimedia::SupportEstimate QGstUtils::hasSupport(const QString &mimeType,
+ const QStringList &codecs,
+ const QSet<QString> &supportedMimeTypeSet)
+{
+ if (supportedMimeTypeSet.isEmpty())
+ return QtMultimedia::NotSupported;
+
+ QString mimeTypeLowcase = mimeType.toLower();
+ bool containsMimeType = supportedMimeTypeSet.contains(mimeTypeLowcase);
+ if (!containsMimeType) {
+ const char* mimeTypeAlias = getMimeTypeAlias(mimeTypeLowcase);
+ containsMimeType = supportedMimeTypeSet.contains(mimeTypeAlias);
+ if (!containsMimeType) {
+ containsMimeType = supportedMimeTypeSet.contains("video/" + mimeTypeLowcase)
+ || supportedMimeTypeSet.contains("video/x-" + mimeTypeLowcase)
+ || supportedMimeTypeSet.contains("audio/" + mimeTypeLowcase)
+ || supportedMimeTypeSet.contains("audio/x-" + mimeTypeLowcase);
+ }
+ }
+
+ int supportedCodecCount = 0;
+ foreach (const QString &codec, codecs) {
+ QString codecLowcase = codec.toLower();
+ const char* codecAlias = getCodecAlias(codecLowcase);
+ if (codecAlias) {
+ if (supportedMimeTypeSet.contains(codecAlias))
+ supportedCodecCount++;
+ } else if (supportedMimeTypeSet.contains("video/" + codecLowcase)
+ || supportedMimeTypeSet.contains("video/x-" + codecLowcase)
+ || supportedMimeTypeSet.contains("audio/" + codecLowcase)
+ || supportedMimeTypeSet.contains("audio/x-" + codecLowcase)) {
+ supportedCodecCount++;
+ }
+ }
+ if (supportedCodecCount > 0 && supportedCodecCount == codecs.size())
+ return QtMultimedia::ProbablySupported;
+
+ if (supportedCodecCount == 0 && !containsMimeType)
+ return QtMultimedia::NotSupported;
+
+ return QtMultimedia::MaybeSupported;
+}
+
QT_END_NAMESPACE
diff --git a/src/gsttools/qx11videosurface.cpp b/src/gsttools/qx11videosurface.cpp
new file mode 100644
index 000000000..232041b1d
--- /dev/null
+++ b/src/gsttools/qx11videosurface.cpp
@@ -0,0 +1,534 @@
+/****************************************************************************
+**
+** Copyright (C) 2012 Nokia Corporation and/or its subsidiary(-ies).
+** Contact: http://www.qt-project.org/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** GNU Lesser General Public License Usage
+** This file may be used under the terms of the GNU Lesser General Public
+** License version 2.1 as published by the Free Software Foundation and
+** appearing in the file LICENSE.LGPL included in the packaging of this
+** file. Please review the following information to ensure the GNU Lesser
+** General Public License version 2.1 requirements will be met:
+** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
+**
+** In addition, as a special exception, Nokia gives you certain additional
+** rights. These rights are described in the Nokia Qt LGPL Exception
+** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU General
+** Public License version 3.0 as published by the Free Software Foundation
+** and appearing in the file LICENSE.GPL included in the packaging of this
+** file. Please review the following information to ensure the GNU General
+** Public License version 3.0 requirements will be met:
+** http://www.gnu.org/copyleft/gpl.html.
+**
+** Other Usage
+** Alternatively, this file may be used in accordance with the terms and
+** conditions contained in a signed written agreement between you and Nokia.
+**
+**
+**
+**
+**
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include <QtCore/qvariant.h>
+#include <QtCore/qdebug.h>
+#include <QtGui/qguiapplication.h>
+#include <QtGui/qplatformnativeinterface_qpa.h>
+#include <qvideosurfaceformat.h>
+
+#include "qx11videosurface_p.h"
+
+Q_DECLARE_METATYPE(XvImage*);
+
+struct XvFormatRgb
+{
+ QVideoFrame::PixelFormat pixelFormat;
+ int bits_per_pixel;
+ int format;
+ int num_planes;
+
+ int depth;
+ unsigned int red_mask;
+ unsigned int green_mask;
+ unsigned int blue_mask;
+
+};
+
+bool operator ==(const XvImageFormatValues &format, const XvFormatRgb &rgb)
+{
+ return format.type == XvRGB
+ && format.bits_per_pixel == rgb.bits_per_pixel
+ && format.format == rgb.format
+ && format.num_planes == rgb.num_planes
+ && format.depth == rgb.depth
+ && format.red_mask == rgb.red_mask
+ && format.blue_mask == rgb.blue_mask;
+}
+
+static const XvFormatRgb qt_xvRgbLookup[] =
+{
+ { QVideoFrame::Format_ARGB32, 32, XvPacked, 1, 32, 0x00FF0000, 0x0000FF00, 0x000000FF },
+ { QVideoFrame::Format_RGB32 , 32, XvPacked, 1, 24, 0x00FF0000, 0x0000FF00, 0x000000FF },
+ { QVideoFrame::Format_RGB24 , 24, XvPacked, 1, 24, 0x00FF0000, 0x0000FF00, 0x000000FF },
+ { QVideoFrame::Format_RGB565, 16, XvPacked, 1, 16, 0x0000F800, 0x000007E0, 0x0000001F },
+ { QVideoFrame::Format_BGRA32, 32, XvPacked, 1, 32, 0xFF000000, 0x00FF0000, 0x0000FF00 },
+ { QVideoFrame::Format_BGR32 , 32, XvPacked, 1, 24, 0x00FF0000, 0x0000FF00, 0x000000FF },
+ { QVideoFrame::Format_BGR24 , 24, XvPacked, 1, 24, 0x00FF0000, 0x0000FF00, 0x000000FF },
+ { QVideoFrame::Format_BGR565, 16, XvPacked, 1, 16, 0x0000F800, 0x000007E0, 0x0000001F }
+};
+
+struct XvFormatYuv
+{
+ QVideoFrame::PixelFormat pixelFormat;
+ int bits_per_pixel;
+ int format;
+ int num_planes;
+
+ unsigned int y_sample_bits;
+ unsigned int u_sample_bits;
+ unsigned int v_sample_bits;
+ unsigned int horz_y_period;
+ unsigned int horz_u_period;
+ unsigned int horz_v_period;
+ unsigned int vert_y_period;
+ unsigned int vert_u_period;
+ unsigned int vert_v_period;
+ char component_order[32];
+};
+
+bool operator ==(const XvImageFormatValues &format, const XvFormatYuv &yuv)
+{
+ return format.type == XvYUV
+ && format.bits_per_pixel == yuv.bits_per_pixel
+ && format.format == yuv.format
+ && format.num_planes == yuv.num_planes
+ && format.y_sample_bits == yuv.y_sample_bits
+ && format.u_sample_bits == yuv.u_sample_bits
+ && format.v_sample_bits == yuv.v_sample_bits
+ && format.horz_y_period == yuv.horz_y_period
+ && format.horz_u_period == yuv.horz_u_period
+ && format.horz_v_period == yuv.horz_v_period
+ && format.horz_y_period == yuv.vert_y_period
+ && format.vert_u_period == yuv.vert_u_period
+ && format.vert_v_period == yuv.vert_v_period
+ && qstrncmp(format.component_order, yuv.component_order, 32) == 0;
+}
+
+static const XvFormatYuv qt_xvYuvLookup[] =
+{
+ { QVideoFrame::Format_YUV444 , 24, XvPacked, 1, 8, 8, 8, 1, 1, 1, 1, 1, 1, "YUV" },
+ { QVideoFrame::Format_YUV420P, 12, XvPlanar, 3, 8, 8, 8, 1, 2, 2, 1, 2, 2, "YUV" },
+ { QVideoFrame::Format_YV12 , 12, XvPlanar, 3, 8, 8, 8, 1, 2, 2, 1, 2, 2, "YVU" },
+ { QVideoFrame::Format_UYVY , 16, XvPacked, 1, 8, 8, 8, 1, 2, 2, 1, 1, 1, "UYVY" },
+ { QVideoFrame::Format_YUYV , 16, XvPacked, 1, 8, 8, 8, 1, 2, 2, 1, 1, 1, "YUY2" },
+ { QVideoFrame::Format_YUYV , 16, XvPacked, 1, 8, 8, 8, 1, 2, 2, 1, 1, 1, "YUYV" },
+ { QVideoFrame::Format_NV12 , 12, XvPlanar, 2, 8, 8, 8, 1, 2, 2, 1, 2, 2, "YUV" },
+ { QVideoFrame::Format_NV12 , 12, XvPlanar, 2, 8, 8, 8, 1, 2, 2, 1, 2, 2, "YVU" },
+ { QVideoFrame::Format_Y8 , 8 , XvPlanar, 1, 8, 0, 0, 1, 0, 0, 1, 0, 0, "Y" }
+};
+
+QX11VideoSurface::QX11VideoSurface(QObject *parent)
+ : QAbstractVideoSurface(parent)
+ , m_winId(0)
+ , m_portId(0)
+ , m_gc(0)
+ , m_image(0)
+{
+}
+
+QX11VideoSurface::~QX11VideoSurface()
+{
+ if (m_gc)
+ XFreeGC(display(), m_gc);
+
+ if (m_portId != 0)
+ XvUngrabPort(display(), m_portId, 0);
+}
+
+WId QX11VideoSurface::winId() const
+{
+ return m_winId;
+}
+
+void QX11VideoSurface::setWinId(WId id)
+{
+ //qDebug() << "setWinID:" << id;
+
+ if (id == m_winId)
+ return;
+
+ if (m_image)
+ XFree(m_image);
+
+ if (m_gc) {
+ XFreeGC(display(), m_gc);
+ m_gc = 0;
+ }
+
+ if (m_portId != 0)
+ XvUngrabPort(display(), m_portId, 0);
+
+ m_supportedPixelFormats.clear();
+ m_formatIds.clear();
+
+ m_winId = id;
+
+ if (m_winId && findPort()) {
+ querySupportedFormats();
+
+ m_gc = XCreateGC(display(), m_winId, 0, 0);
+
+ if (m_image) {
+ m_image = 0;
+
+ if (!start(surfaceFormat())) {
+ QAbstractVideoSurface::stop();
+ qWarning() << "Failed to start video surface with format" << surfaceFormat();
+ }
+ }
+ } else {
+ qWarning() << "Failed to find XVideo port";
+ if (m_image) {
+ m_image = 0;
+
+ QAbstractVideoSurface::stop();
+ }
+ }
+
+ emit supportedFormatsChanged();
+}
+
+QRect QX11VideoSurface::displayRect() const
+{
+ return m_displayRect;
+}
+
+void QX11VideoSurface::setDisplayRect(const QRect &rect)
+{
+ m_displayRect = rect;
+}
+
+QRect QX11VideoSurface::viewport() const
+{
+ return m_viewport;
+}
+
+void QX11VideoSurface::setViewport(const QRect &rect)
+{
+ m_viewport = rect;
+}
+
+int QX11VideoSurface::brightness() const
+{
+ return getAttribute("XV_BRIGHTNESS", m_brightnessRange.first, m_brightnessRange.second);
+}
+
+void QX11VideoSurface::setBrightness(int brightness)
+{
+ setAttribute("XV_BRIGHTNESS", brightness, m_brightnessRange.first, m_brightnessRange.second);
+}
+
+int QX11VideoSurface::contrast() const
+{
+ return getAttribute("XV_CONTRAST", m_contrastRange.first, m_contrastRange.second);
+}
+
+void QX11VideoSurface::setContrast(int contrast)
+{
+ setAttribute("XV_CONTRAST", contrast, m_contrastRange.first, m_contrastRange.second);
+}
+
+int QX11VideoSurface::hue() const
+{
+ return getAttribute("XV_HUE", m_hueRange.first, m_hueRange.second);
+}
+
+void QX11VideoSurface::setHue(int hue)
+{
+ setAttribute("XV_HUE", hue, m_hueRange.first, m_hueRange.second);
+}
+
+int QX11VideoSurface::saturation() const
+{
+ return getAttribute("XV_SATURATION", m_saturationRange.first, m_saturationRange.second);
+}
+
+void QX11VideoSurface::setSaturation(int saturation)
+{
+ setAttribute("XV_SATURATION", saturation, m_saturationRange.first, m_saturationRange.second);
+}
+
+int QX11VideoSurface::getAttribute(const char *attribute, int minimum, int maximum) const
+{
+ if (m_portId != 0) {
+ Display *disp = display();
+
+ Atom atom = XInternAtom(disp, attribute, True);
+
+ int value = 0;
+
+ XvGetPortAttribute(disp, m_portId, atom, &value);
+
+ return redistribute(value, minimum, maximum, -100, 100);
+ } else {
+ return 0;
+ }
+}
+
+void QX11VideoSurface::setAttribute(const char *attribute, int value, int minimum, int maximum)
+{
+ if (m_portId != 0) {
+ Display *disp = display();
+
+ Atom atom = XInternAtom(disp, attribute, True);
+
+ XvSetPortAttribute(
+ disp, m_portId, atom, redistribute(value, -100, 100, minimum, maximum));
+ }
+}
+
+int QX11VideoSurface::redistribute(
+ int value, int fromLower, int fromUpper, int toLower, int toUpper)
+{
+ return fromUpper != fromLower
+ ? ((value - fromLower) * (toUpper - toLower) / (fromUpper - fromLower)) + toLower
+ : 0;
+}
+
+QList<QVideoFrame::PixelFormat> QX11VideoSurface::supportedPixelFormats(
+ QAbstractVideoBuffer::HandleType handleType) const
+{
+ return handleType == QAbstractVideoBuffer::NoHandle || handleType == QAbstractVideoBuffer::XvShmImageHandle
+ ? m_supportedPixelFormats
+ : QList<QVideoFrame::PixelFormat>();
+}
+
+bool QX11VideoSurface::start(const QVideoSurfaceFormat &format)
+{
+ if (m_image)
+ XFree(m_image);
+
+ int xvFormatId = 0;
+ for (int i = 0; i < m_supportedPixelFormats.count(); ++i) {
+ if (m_supportedPixelFormats.at(i) == format.pixelFormat()) {
+ xvFormatId = m_formatIds.at(i);
+ break;
+ }
+ }
+
+ if (xvFormatId == 0) {
+ setError(UnsupportedFormatError);
+ } else {
+ XvImage *image = XvCreateImage(
+ display(),
+ m_portId,
+ xvFormatId,
+ 0,
+ format.frameWidth(),
+ format.frameHeight());
+
+ if (!image) {
+ setError(ResourceError);
+ } else {
+ m_viewport = format.viewport();
+ m_image = image;
+
+ QVideoSurfaceFormat newFormat = format;
+ newFormat.setProperty("portId", QVariant(quint64(m_portId)));
+ newFormat.setProperty("xvFormatId", xvFormatId);
+ newFormat.setProperty("dataSize", image->data_size);
+
+ return QAbstractVideoSurface::start(newFormat);
+ }
+ }
+
+ if (m_image) {
+ m_image = 0;
+
+ QAbstractVideoSurface::stop();
+ }
+
+ return false;
+}
+
+void QX11VideoSurface::stop()
+{
+ if (m_image) {
+ XFree(m_image);
+ m_image = 0;
+
+ QAbstractVideoSurface::stop();
+ }
+}
+
+bool QX11VideoSurface::present(const QVideoFrame &frame)
+{
+ if (!m_image) {
+ setError(StoppedError);
+ return false;
+ } else if (m_image->width != frame.width() || m_image->height != frame.height()) {
+ setError(IncorrectFormatError);
+ return false;
+ } else {
+ QVideoFrame frameCopy(frame);
+
+ if (!frameCopy.map(QAbstractVideoBuffer::ReadOnly)) {
+ setError(IncorrectFormatError);
+ return false;
+ } else {
+ bool presented = false;
+
+ if (frame.handleType() != QAbstractVideoBuffer::XvShmImageHandle &&
+ m_image->data_size > frame.mappedBytes()) {
+ qWarning("Insufficient frame buffer size");
+ setError(IncorrectFormatError);
+ } else if (frame.handleType() != QAbstractVideoBuffer::XvShmImageHandle &&
+ m_image->num_planes > 0 &&
+ m_image->pitches[0] != frame.bytesPerLine()) {
+ qWarning("Incompatible frame pitches");
+ setError(IncorrectFormatError);
+ } else {
+ if (frame.handleType() != QAbstractVideoBuffer::XvShmImageHandle) {
+ m_image->data = reinterpret_cast<char *>(frameCopy.bits());
+
+ //qDebug() << "copy frame";
+ XvPutImage(
+ display(),
+ m_portId,
+ m_winId,
+ m_gc,
+ m_image,
+ m_viewport.x(),
+ m_viewport.y(),
+ m_viewport.width(),
+ m_viewport.height(),
+ m_displayRect.x(),
+ m_displayRect.y(),
+ m_displayRect.width(),
+ m_displayRect.height());
+
+ m_image->data = 0;
+ } else {
+ XvImage *img = frame.handle().value<XvImage*>();
+
+ //qDebug() << "render directly";
+ if (img)
+ XvShmPutImage(
+ display(),
+ m_portId,
+ m_winId,
+ m_gc,
+ img,
+ m_viewport.x(),
+ m_viewport.y(),
+ m_viewport.width(),
+ m_viewport.height(),
+ m_displayRect.x(),
+ m_displayRect.y(),
+ m_displayRect.width(),
+ m_displayRect.height(),
+ false);
+ }
+
+ presented = true;
+ }
+
+ frameCopy.unmap();
+
+ return presented;
+ }
+ }
+}
+
+Display *QX11VideoSurface::display() const
+{
+ QWindow *window = QGuiApplication::focusWindow();
+ Display *display = (Display *)QGuiApplication::platformNativeInterface()->nativeResourceForWindow("Display", window);
+
+ return display;
+}
+
+bool QX11VideoSurface::findPort()
+{
+ unsigned int count = 0;
+ XvAdaptorInfo *adaptors = 0;
+ bool portFound = false;
+
+ if (XvQueryAdaptors(display(), m_winId, &count, &adaptors) == Success) {
+ for (unsigned int i = 0; i < count && !portFound; ++i) {
+ if (adaptors[i].type & XvImageMask) {
+ m_portId = adaptors[i].base_id;
+
+ for (unsigned int j = 0; j < adaptors[i].num_ports && !portFound; ++j, ++m_portId)
+ portFound = XvGrabPort(display(), m_portId, 0) == Success;
+ }
+ }
+ XvFreeAdaptorInfo(adaptors);
+ }
+
+ return portFound;
+}
+
+void QX11VideoSurface::querySupportedFormats()
+{
+ int count = 0;
+ if (XvImageFormatValues *imageFormats = XvListImageFormats(
+ display(), m_portId, &count)) {
+ const int rgbCount = sizeof(qt_xvRgbLookup) / sizeof(XvFormatRgb);
+ const int yuvCount = sizeof(qt_xvYuvLookup) / sizeof(XvFormatYuv);
+
+ for (int i = 0; i < count; ++i) {
+ switch (imageFormats[i].type) {
+ case XvRGB:
+ for (int j = 0; j < rgbCount; ++j) {
+ if (imageFormats[i] == qt_xvRgbLookup[j]) {
+ m_supportedPixelFormats.append(qt_xvRgbLookup[j].pixelFormat);
+ m_formatIds.append(imageFormats[i].id);
+ break;
+ }
+ }
+ break;
+ case XvYUV:
+ for (int j = 0; j < yuvCount; ++j) {
+ if (imageFormats[i] == qt_xvYuvLookup[j]) {
+ m_supportedPixelFormats.append(qt_xvYuvLookup[j].pixelFormat);
+ m_formatIds.append(imageFormats[i].id);
+ break;
+ }
+ }
+ break;
+ }
+ }
+ XFree(imageFormats);
+ }
+
+ m_brightnessRange = qMakePair(0, 0);
+ m_contrastRange = qMakePair(0, 0);
+ m_hueRange = qMakePair(0, 0);
+ m_saturationRange = qMakePair(0, 0);
+
+ if (XvAttribute *attributes = XvQueryPortAttributes(display(), m_portId, &count)) {
+ for (int i = 0; i < count; ++i) {
+ if (qstrcmp(attributes[i].name, "XV_BRIGHTNESS") == 0)
+ m_brightnessRange = qMakePair(attributes[i].min_value, attributes[i].max_value);
+ else if (qstrcmp(attributes[i].name, "XV_CONTRAST") == 0)
+ m_contrastRange = qMakePair(attributes[i].min_value, attributes[i].max_value);
+ else if (qstrcmp(attributes[i].name, "XV_HUE") == 0)
+ m_hueRange = qMakePair(attributes[i].min_value, attributes[i].max_value);
+ else if (qstrcmp(attributes[i].name, "XV_SATURATION") == 0)
+ m_saturationRange = qMakePair(attributes[i].min_value, attributes[i].max_value);
+ }
+
+ XFree(attributes);
+ }
+}
+