summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--src/gsttools/gsttools.pro28
-rw-r--r--src/gsttools/gstvideoconnector.c471
-rw-r--r--src/gsttools/gstvideoconnector_p.h98
-rw-r--r--src/gsttools/qgstappsrc.cpp17
-rw-r--r--src/gsttools/qgstbufferpoolinterface.cpp51
-rw-r--r--src/gsttools/qgstbufferpoolinterface_p.h117
-rw-r--r--src/gsttools/qgstcodecsinfo.cpp44
-rw-r--r--src/gsttools/qgstreameraudioprobecontrol.cpp4
-rw-r--r--src/gsttools/qgstreamerbufferprobe.cpp47
-rw-r--r--src/gsttools/qgstreamerbufferprobe_p.h5
-rw-r--r--src/gsttools/qgstreamerbushelper.cpp12
-rw-r--r--src/gsttools/qgstreamerplayersession.cpp251
-rw-r--r--src/gsttools/qgstreamerplayersession_p.h7
-rw-r--r--src/gsttools/qgstreamervideooverlay.cpp36
-rw-r--r--src/gsttools/qgstreamervideoprobecontrol.cpp12
-rw-r--r--src/gsttools/qgstreamervideoprobecontrol_p.h4
-rw-r--r--src/gsttools/qgstreamervideorenderer.cpp4
-rw-r--r--src/gsttools/qgstreamervideorenderer_p.h2
-rw-r--r--src/gsttools/qgstutils.cpp518
-rw-r--r--src/gsttools/qgstutils_p.h24
-rw-r--r--src/gsttools/qgstvideobuffer.cpp38
-rw-r--r--src/gsttools/qgstvideobuffer_p.h18
-rw-r--r--src/gsttools/qgstvideorenderersink.cpp8
-rw-r--r--src/gsttools/qvideosurfacegstsink.cpp712
-rw-r--r--src/gsttools/qvideosurfacegstsink_p.h192
-rw-r--r--src/multimedia/configure.json47
-rw-r--r--src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp6
-rw-r--r--src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.cpp15
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinexposure.cpp8
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinflash.cpp4
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinfocus.cpp24
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinfocus.h10
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinimagecapture.cpp49
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinimagecapture.h10
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinimageprocessing.cpp34
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinimageprocessing.h4
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinlocks.cpp15
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinlocks.h4
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinmetadata.cpp11
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinsession.cpp54
-rw-r--r--src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp7
-rw-r--r--src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp12
-rw-r--r--src/plugins/gstreamer/mediacapture/qgstreamercapturesession.h2
-rw-r--r--src/plugins/gstreamer/mediaplayer/qgstreamermetadataprovider.cpp6
-rw-r--r--src/plugins/gstreamer/mediaplayer/qgstreamerplayerserviceplugin.cpp7
-rw-r--r--tests/auto/unit/qmediaplaylist/tst_qmediaplaylist.cpp1
46 files changed, 32 insertions, 3018 deletions
diff --git a/src/gsttools/gsttools.pro b/src/gsttools/gsttools.pro
index fea2edca9..5271c396f 100644
--- a/src/gsttools/gsttools.pro
+++ b/src/gsttools/gsttools.pro
@@ -29,7 +29,9 @@ PRIVATE_HEADERS += \
qgstreamervideooverlay_p.h \
qgsttools_global_p.h \
qgstreamerplayersession_p.h \
- qgstreamerplayercontrol_p.h
+ qgstreamerplayercontrol_p.h \
+ qgstvideorendererplugin_p.h \
+ qgstvideorenderersink_p.h
SOURCES += \
qgstreamerbushelper.cpp \
@@ -47,7 +49,9 @@ SOURCES += \
qgstreamervideowindow.cpp \
qgstreamervideooverlay.cpp \
qgstreamerplayersession.cpp \
- qgstreamerplayercontrol.cpp
+ qgstreamerplayercontrol.cpp \
+ qgstvideorendererplugin.cpp \
+ qgstvideorenderersink.cpp
qtHaveModule(widgets) {
QT += multimediawidgets
@@ -59,26 +63,6 @@ qtHaveModule(widgets) {
qgstreamervideowidget.cpp
}
-qtConfig(gstreamer_0_10) {
- PRIVATE_HEADERS += \
- qgstbufferpoolinterface_p.h \
- qvideosurfacegstsink_p.h \
- gstvideoconnector_p.h
-
- SOURCES += \
- qgstbufferpoolinterface.cpp \
- qvideosurfacegstsink.cpp \
- gstvideoconnector.c
-} else {
- PRIVATE_HEADERS += \
- qgstvideorendererplugin_p.h \
- qgstvideorenderersink_p.h
-
- SOURCES += \
- qgstvideorendererplugin.cpp \
- qgstvideorenderersink.cpp
-}
-
qtConfig(gstreamer_gl): QMAKE_USE += gstreamer_gl
qtConfig(gstreamer_app) {
diff --git a/src/gsttools/gstvideoconnector.c b/src/gsttools/gstvideoconnector.c
deleted file mode 100644
index b85f5bdbe..000000000
--- a/src/gsttools/gstvideoconnector.c
+++ /dev/null
@@ -1,471 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2016 The Qt Company Ltd.
-** Contact: https://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see https://www.qt.io/terms-conditions. For further
-** information use the contact form at https://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 3 as published by the Free Software
-** Foundation and appearing in the file LICENSE.LGPL3 included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU Lesser General Public License version 3 requirements
-** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
-**
-** GNU General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU
-** General Public License version 2.0 or (at your option) the GNU General
-** Public license version 3 or any later version approved by the KDE Free
-** Qt Foundation. The licenses are as published by the Free Software
-** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
-** included in the packaging of this file. Please review the following
-** information to ensure the GNU General Public License requirements will
-** be met: https://www.gnu.org/licenses/gpl-2.0.html and
-** https://www.gnu.org/licenses/gpl-3.0.html.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#include "gstvideoconnector_p.h"
-#include <unistd.h>
-
-/* signals */
-enum
-{
- SIGNAL_RESEND_NEW_SEGMENT,
- SIGNAL_CONNECTION_FAILED,
- LAST_SIGNAL
-};
-static guint gst_video_connector_signals[LAST_SIGNAL] = { 0 };
-
-
-GST_DEBUG_CATEGORY_STATIC (video_connector_debug);
-#define GST_CAT_DEFAULT video_connector_debug
-
-static GstStaticPadTemplate gst_video_connector_sink_factory =
-GST_STATIC_PAD_TEMPLATE ("sink",
- GST_PAD_SINK,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS_ANY);
-
-static GstStaticPadTemplate gst_video_connector_src_factory =
-GST_STATIC_PAD_TEMPLATE ("src",
- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS_ANY);
-
-#define _do_init(bla) \
- GST_DEBUG_CATEGORY_INIT (video_connector_debug, \
- "video-connector", 0, "An identity like element for reconnecting video stream");
-
-GST_BOILERPLATE_FULL (GstVideoConnector, gst_video_connector, GstElement,
- GST_TYPE_ELEMENT, _do_init);
-
-static void gst_video_connector_dispose (GObject * object);
-static GstFlowReturn gst_video_connector_chain (GstPad * pad, GstBuffer * buf);
-static GstFlowReturn gst_video_connector_buffer_alloc (GstPad * pad,
- guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf);
-static GstStateChangeReturn gst_video_connector_change_state (GstElement *
- element, GstStateChange transition);
-static gboolean gst_video_connector_handle_sink_event (GstPad * pad,
- GstEvent * event);
-static gboolean gst_video_connector_new_buffer_probe(GstObject *pad, GstBuffer *buffer, guint * object);
-static void gst_video_connector_resend_new_segment(GstElement * element, gboolean emitFailedSignal);
-static gboolean gst_video_connector_setcaps (GstPad *pad, GstCaps *caps);
-static GstCaps *gst_video_connector_getcaps (GstPad * pad);
-static gboolean gst_video_connector_acceptcaps (GstPad * pad, GstCaps * caps);
-
-static void
-gst_video_connector_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_set_details_simple (element_class, "Video Connector",
- "Generic",
- "An identity like element used for reconnecting video stream",
- "Dmytro Poplavskiy <dmytro.poplavskiy@nokia.com>");
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_video_connector_sink_factory));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_video_connector_src_factory));
-}
-
-static void
-gst_video_connector_class_init (GstVideoConnectorClass * klass)
-{
- GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
-
- parent_class = g_type_class_peek_parent (klass);
-
- gobject_class->dispose = gst_video_connector_dispose;
- gstelement_class->change_state = gst_video_connector_change_state;
- klass->resend_new_segment = gst_video_connector_resend_new_segment;
-
- gst_video_connector_signals[SIGNAL_RESEND_NEW_SEGMENT] =
- g_signal_new ("resend-new-segment", G_TYPE_FROM_CLASS (klass),
- G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
- G_STRUCT_OFFSET (GstVideoConnectorClass, resend_new_segment), NULL, NULL,
- g_cclosure_marshal_VOID__BOOLEAN, G_TYPE_NONE, 1, G_TYPE_BOOLEAN);
-
- gst_video_connector_signals[SIGNAL_CONNECTION_FAILED] =
- g_signal_new ("connection-failed", G_TYPE_FROM_CLASS (klass),
- G_SIGNAL_RUN_LAST,
- 0, NULL, NULL,
- g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
-}
-
-static void
-gst_video_connector_init (GstVideoConnector *element,
- GstVideoConnectorClass *g_class)
-{
- (void) g_class;
- element->sinkpad =
- gst_pad_new_from_static_template (&gst_video_connector_sink_factory,
- "sink");
- gst_pad_set_chain_function(element->sinkpad,
- GST_DEBUG_FUNCPTR (gst_video_connector_chain));
- gst_pad_set_event_function(element->sinkpad,
- GST_DEBUG_FUNCPTR (gst_video_connector_handle_sink_event));
- gst_pad_set_bufferalloc_function(element->sinkpad,
- GST_DEBUG_FUNCPTR (gst_video_connector_buffer_alloc));
- gst_pad_set_setcaps_function(element->sinkpad,
- GST_DEBUG_FUNCPTR (gst_video_connector_setcaps));
- gst_pad_set_getcaps_function(element->sinkpad,
- GST_DEBUG_FUNCPTR(gst_video_connector_getcaps));
- gst_pad_set_acceptcaps_function(element->sinkpad,
- GST_DEBUG_FUNCPTR(gst_video_connector_acceptcaps));
-
- gst_element_add_pad (GST_ELEMENT (element), element->sinkpad);
-
- element->srcpad =
- gst_pad_new_from_static_template (&gst_video_connector_src_factory,
- "src");
- gst_pad_add_buffer_probe(element->srcpad,
- G_CALLBACK(gst_video_connector_new_buffer_probe), element);
- gst_element_add_pad (GST_ELEMENT (element), element->srcpad);
-
- element->relinked = FALSE;
- element->failedSignalEmited = FALSE;
- gst_segment_init (&element->segment, GST_FORMAT_TIME);
- element->latest_buffer = NULL;
-}
-
-static void
-gst_video_connector_reset (GstVideoConnector * element)
-{
- element->relinked = FALSE;
- element->failedSignalEmited = FALSE;
- if (element->latest_buffer != NULL) {
- gst_buffer_unref (element->latest_buffer);
- element->latest_buffer = NULL;
- }
- gst_segment_init (&element->segment, GST_FORMAT_UNDEFINED);
-}
-
-static void
-gst_video_connector_dispose (GObject * object)
-{
- GstVideoConnector *element = GST_VIDEO_CONNECTOR (object);
-
- gst_video_connector_reset (element);
-
- G_OBJECT_CLASS (parent_class)->dispose (object);
-}
-
-// "When this function returns anything else than GST_FLOW_OK,
-// the buffer allocation failed and buf does not contain valid data."
-static GstFlowReturn
-gst_video_connector_buffer_alloc (GstPad * pad, guint64 offset, guint size,
- GstCaps * caps, GstBuffer ** buf)
-{
- GstVideoConnector *element;
- GstFlowReturn res = GST_FLOW_OK;
- element = GST_VIDEO_CONNECTOR (GST_PAD_PARENT (pad));
-
- if (!buf)
- return GST_FLOW_ERROR;
- *buf = NULL;
-
- gboolean isFailed = FALSE;
- while (1) {
- GST_OBJECT_LOCK (element);
- gst_object_ref(element->srcpad);
- GST_OBJECT_UNLOCK (element);
-
- // Check if downstream element is in NULL state
- // and wait for up to 1 second for it to switch.
- GstPad *peerPad = gst_pad_get_peer(element->srcpad);
- if (peerPad) {
- GstElement *parent = gst_pad_get_parent_element(peerPad);
- gst_object_unref (peerPad);
- if (parent) {
- GstState state;
- GstState pending;
- int totalTimeout = 0;
- // This seems to sleep for about 10ms usually.
- while (totalTimeout < 1000000) {
- gst_element_get_state(parent, &state, &pending, 0);
- if (state != GST_STATE_NULL)
- break;
- usleep(5000);
- totalTimeout += 5000;
- }
-
- gst_object_unref (parent);
- if (state == GST_STATE_NULL) {
- GST_DEBUG_OBJECT (element, "Downstream element is in NULL state");
- // Downstream filter seems to be in the wrong state
- return GST_FLOW_UNEXPECTED;
- }
- }
- }
-
- res = gst_pad_alloc_buffer(element->srcpad, offset, size, caps, buf);
- gst_object_unref (element->srcpad);
-
- GST_DEBUG_OBJECT (element, "buffer alloc finished: %s", gst_flow_get_name (res));
-
- if (res == GST_FLOW_WRONG_STATE) {
- // Just in case downstream filter is still somehow in the wrong state.
- // Pipeline stalls if we report GST_FLOW_WRONG_STATE.
- return GST_FLOW_UNEXPECTED;
- }
-
- if (res >= GST_FLOW_OK || isFailed == TRUE)
- break;
-
- //if gst_pad_alloc_buffer failed, emit "connection-failed" signal
- //so colorspace transformation element can be inserted
- GST_INFO_OBJECT(element, "gst_video_connector_buffer_alloc failed, emit connection-failed signal");
- g_signal_emit(G_OBJECT(element), gst_video_connector_signals[SIGNAL_CONNECTION_FAILED], 0);
- isFailed = TRUE;
- }
-
- return res;
-}
-
-static gboolean
-gst_video_connector_setcaps (GstPad *pad, GstCaps *caps)
-{
- GstVideoConnector *element;
- element = GST_VIDEO_CONNECTOR (GST_PAD_PARENT (pad));
-
- /* forward-negotiate */
- gboolean res = gst_pad_set_caps(element->srcpad, caps);
-
- gchar * debugmsg = NULL;
- GST_DEBUG_OBJECT(element, "gst_video_connector_setcaps %s %i", debugmsg = gst_caps_to_string(caps), res);
- if (debugmsg)
- g_free(debugmsg);
-
- if (!res) {
- //if set_caps failed, emit "connection-failed" signal
- //so colorspace transformation element can be inserted
- GST_INFO_OBJECT(element, "gst_video_connector_setcaps failed, emit connection-failed signal");
- g_signal_emit(G_OBJECT(element), gst_video_connector_signals[SIGNAL_CONNECTION_FAILED], 0);
-
- return gst_pad_set_caps(element->srcpad, caps);
- }
-
- return TRUE;
-}
-
-static GstCaps *gst_video_connector_getcaps (GstPad * pad)
-{
- GstVideoConnector *element;
- element = GST_VIDEO_CONNECTOR (GST_PAD_PARENT (pad));
-
-#if (GST_VERSION_MICRO > 25)
- GstCaps *caps = gst_pad_peer_get_caps_reffed(element->srcpad);
-#else
- GstCaps *caps = gst_pad_peer_get_caps(element->srcpad);
-#endif
-
- if (!caps)
- caps = gst_caps_new_any();
-
- return caps;
-}
-
-static gboolean gst_video_connector_acceptcaps (GstPad * pad, GstCaps * caps)
-{
- GstVideoConnector *element;
- element = GST_VIDEO_CONNECTOR (GST_PAD_PARENT (pad));
-
- return gst_pad_peer_accept_caps(element->srcpad, caps);
-}
-
-static void
-gst_video_connector_resend_new_segment(GstElement * element, gboolean emitFailedSignal)
-{
- GST_INFO_OBJECT(element, "New segment requested, failed signal enabled: %i", emitFailedSignal);
- GstVideoConnector *connector = GST_VIDEO_CONNECTOR(element);
- connector->relinked = TRUE;
- if (emitFailedSignal)
- connector->failedSignalEmited = FALSE;
-}
-
-
-static gboolean gst_video_connector_new_buffer_probe(GstObject *pad, GstBuffer *buffer, guint * object)
-{
- (void) pad;
- (void) buffer;
-
- GstVideoConnector *element = GST_VIDEO_CONNECTOR (object);
-
- /*
- If relinking is requested, the current buffer should be rejected and
- the new segment + previous buffer should be pushed first
- */
-
- if (element->relinked)
- GST_LOG_OBJECT(element, "rejected buffer because of new segment request");
-
- return !element->relinked;
-}
-
-
-static GstFlowReturn
-gst_video_connector_chain (GstPad * pad, GstBuffer * buf)
-{
- GstFlowReturn res;
- GstVideoConnector *element;
-
- element = GST_VIDEO_CONNECTOR (gst_pad_get_parent (pad));
-
- do {
- /*
- Resend the segment message and last buffer to preroll the new sink.
- Sinks can be changed multiple times while paused,
- while loop allows to send the segment message and preroll
- all of them with the same buffer.
- */
- while (element->relinked) {
- element->relinked = FALSE;
-
- gint64 pos = element->segment.last_stop;
-
- if (element->latest_buffer && GST_BUFFER_TIMESTAMP_IS_VALID(element->latest_buffer)) {
- pos = GST_BUFFER_TIMESTAMP (element->latest_buffer);
- }
-
- //push a new segment and last buffer
- GstEvent *ev = gst_event_new_new_segment (TRUE,
- element->segment.rate,
- element->segment.format,
- pos, //start
- element->segment.stop,
- pos);
-
- GST_DEBUG_OBJECT (element, "Pushing new segment event");
- if (!gst_pad_push_event (element->srcpad, ev)) {
- GST_WARNING_OBJECT (element,
- "Newsegment handling failed in %" GST_PTR_FORMAT,
- element->srcpad);
- }
-
- if (element->latest_buffer) {
- GST_DEBUG_OBJECT (element, "Pushing latest buffer...");
- gst_buffer_ref(element->latest_buffer);
- gst_pad_push(element->srcpad, element->latest_buffer);
- }
- }
-
- gst_buffer_ref(buf);
-
- //it's possible video sink is changed during gst_pad_push blocked by
- //pad lock, in this case ( element->relinked == TRUE )
- //the buffer should be rejected by the buffer probe and
- //the new segment + prev buffer should be sent before
-
- GST_LOG_OBJECT (element, "Pushing buffer...");
- res = gst_pad_push (element->srcpad, buf);
- GST_LOG_OBJECT (element, "Pushed buffer: %s", gst_flow_get_name (res));
-
- //if gst_pad_push failed give the service another chance,
- //it may still work with the colorspace element added
- if (!element->failedSignalEmited && res == GST_FLOW_NOT_NEGOTIATED) {
- element->failedSignalEmited = TRUE;
- GST_INFO_OBJECT(element, "gst_pad_push failed, emit connection-failed signal");
- g_signal_emit(G_OBJECT(element), gst_video_connector_signals[SIGNAL_CONNECTION_FAILED], 0);
- }
-
- } while (element->relinked);
-
-
- if (element->latest_buffer) {
- gst_buffer_unref (element->latest_buffer);
- element->latest_buffer = NULL;
- }
-
- element->latest_buffer = gst_buffer_ref(buf);
-
- gst_buffer_unref(buf);
- gst_object_unref (element);
-
- return res;
-}
-
-static GstStateChangeReturn
-gst_video_connector_change_state (GstElement * element,
- GstStateChange transition)
-{
- GstVideoConnector *connector;
- GstStateChangeReturn result;
-
- connector = GST_VIDEO_CONNECTOR(element);
- result = GST_ELEMENT_CLASS (parent_class)->change_state(element, transition);
-
- switch (transition) {
- case GST_STATE_CHANGE_PAUSED_TO_READY:
- gst_video_connector_reset (connector);
- break;
- case GST_STATE_CHANGE_READY_TO_PAUSED:
- connector->relinked = FALSE;
- break;
- default:
- break;
- }
-
- return result;
-}
-
-static gboolean
-gst_video_connector_handle_sink_event (GstPad * pad, GstEvent * event)
-{
- if (GST_EVENT_TYPE (event) == GST_EVENT_NEWSEGMENT) {
- GstVideoConnector *element = GST_VIDEO_CONNECTOR (gst_pad_get_parent (pad));
-
- gboolean update;
- GstFormat format;
- gdouble rate, arate;
- gint64 start, stop, time;
-
- gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
- &start, &stop, &time);
-
- GST_LOG_OBJECT (element,
- "NEWSEGMENT update %d, rate %lf, applied rate %lf, "
- "format %d, " "%" G_GINT64_FORMAT " -- %" G_GINT64_FORMAT ", time %"
- G_GINT64_FORMAT, update, rate, arate, format, start, stop, time);
-
- gst_segment_set_newsegment_full (&element->segment, update,
- rate, arate, format, start, stop, time);
-
- gst_object_unref (element);
- }
-
- return gst_pad_event_default (pad, event);
-}
diff --git a/src/gsttools/gstvideoconnector_p.h b/src/gsttools/gstvideoconnector_p.h
deleted file mode 100644
index a38ca2e65..000000000
--- a/src/gsttools/gstvideoconnector_p.h
+++ /dev/null
@@ -1,98 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2016 The Qt Company Ltd.
-** Contact: https://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see https://www.qt.io/terms-conditions. For further
-** information use the contact form at https://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 3 as published by the Free Software
-** Foundation and appearing in the file LICENSE.LGPL3 included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU Lesser General Public License version 3 requirements
-** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
-**
-** GNU General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU
-** General Public License version 2.0 or (at your option) the GNU General
-** Public license version 3 or any later version approved by the KDE Free
-** Qt Foundation. The licenses are as published by the Free Software
-** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
-** included in the packaging of this file. Please review the following
-** information to ensure the GNU General Public License requirements will
-** be met: https://www.gnu.org/licenses/gpl-2.0.html and
-** https://www.gnu.org/licenses/gpl-3.0.html.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#ifndef QGSTVIDEOCONNECTOR_H
-#define QGSTVIDEOCONNECTOR_H
-
-//
-// W A R N I N G
-// -------------
-//
-// This file is not part of the Qt API. It exists purely as an
-// implementation detail. This header file may change from version to
-// version without notice, or even be removed.
-//
-// We mean it.
-//
-
-#include <private/qgsttools_global_p.h>
-
-#include <gst/gst.h>
-
-G_BEGIN_DECLS
-
-#define GST_TYPE_VIDEO_CONNECTOR \
- (gst_video_connector_get_type())
-#define GST_VIDEO_CONNECTOR(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VIDEO_CONNECTOR, GstVideoConnector))
-#define GST_VIDEO_CONNECTOR_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_VIDEO_CONNECTOR, GstVideoConnectorClass))
-#define GST_IS_VIDEO_CONNECTOR(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VIDEO_CONNECTOR))
-#define GST_IS_VIDEO_CONNECTOR_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_VIDEO_CONNECTOR))
-
-typedef struct _GstVideoConnector GstVideoConnector;
-typedef struct _GstVideoConnectorClass GstVideoConnectorClass;
-
-struct Q_GSTTOOLS_EXPORT _GstVideoConnector {
- GstElement element;
-
- GstPad *srcpad;
- GstPad *sinkpad;
-
- gboolean relinked;
- gboolean failedSignalEmited;
- GstSegment segment;
- GstBuffer *latest_buffer;
-};
-
-struct Q_GSTTOOLS_EXPORT _GstVideoConnectorClass {
- GstElementClass parent_class;
-
- /* action signal to resend new segment */
- void (*resend_new_segment) (GstElement * element, gboolean emitFailedSignal);
-};
-
-GType Q_GSTTOOLS_EXPORT gst_video_connector_get_type (void);
-
-G_END_DECLS
-
-#endif
-
diff --git a/src/gsttools/qgstappsrc.cpp b/src/gsttools/qgstappsrc.cpp
index f6ecd48be..e8ec63bb9 100644
--- a/src/gsttools/qgstappsrc.cpp
+++ b/src/gsttools/qgstappsrc.cpp
@@ -150,21 +150,15 @@ void QGstAppSrc::pushDataToAppSrc()
if (size) {
GstBuffer* buffer = gst_buffer_new_and_alloc(size);
-#if GST_CHECK_VERSION(1,0,0)
GstMapInfo mapInfo;
gst_buffer_map(buffer, &mapInfo, GST_MAP_WRITE);
void* bufferData = mapInfo.data;
-#else
- void* bufferData = GST_BUFFER_DATA(buffer);
-#endif
buffer->offset = m_stream->pos();
qint64 bytesRead = m_stream->read((char*)bufferData, size);
buffer->offset_end = buffer->offset + bytesRead - 1;
-#if GST_CHECK_VERSION(1,0,0)
gst_buffer_unmap(buffer, &mapInfo);
-#endif
if (bytesRead > 0) {
m_dataRequested = false;
@@ -172,20 +166,9 @@ void QGstAppSrc::pushDataToAppSrc()
GstFlowReturn ret = gst_app_src_push_buffer (GST_APP_SRC (element()), buffer);
if (ret == GST_FLOW_ERROR) {
qWarning()<<"appsrc: push buffer error";
-#if GST_CHECK_VERSION(1,0,0)
} else if (ret == GST_FLOW_FLUSHING) {
qWarning()<<"appsrc: push buffer wrong state";
}
-#else
- } else if (ret == GST_FLOW_WRONG_STATE) {
- qWarning()<<"appsrc: push buffer wrong state";
- }
-#endif
-#if GST_VERSION_MAJOR < 1
- else if (ret == GST_FLOW_RESEND) {
- qWarning()<<"appsrc: push buffer resend";
- }
-#endif
}
} else if (!m_sequential) {
sendEOS();
diff --git a/src/gsttools/qgstbufferpoolinterface.cpp b/src/gsttools/qgstbufferpoolinterface.cpp
deleted file mode 100644
index be8a2e116..000000000
--- a/src/gsttools/qgstbufferpoolinterface.cpp
+++ /dev/null
@@ -1,51 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2016 The Qt Company Ltd.
-** Contact: https://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see https://www.qt.io/terms-conditions. For further
-** information use the contact form at https://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 3 as published by the Free Software
-** Foundation and appearing in the file LICENSE.LGPL3 included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU Lesser General Public License version 3 requirements
-** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
-**
-** GNU General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU
-** General Public License version 2.0 or (at your option) the GNU General
-** Public license version 3 or any later version approved by the KDE Free
-** Qt Foundation. The licenses are as published by the Free Software
-** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
-** included in the packaging of this file. Please review the following
-** information to ensure the GNU General Public License requirements will
-** be met: https://www.gnu.org/licenses/gpl-2.0.html and
-** https://www.gnu.org/licenses/gpl-3.0.html.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#include "qgstbufferpoolinterface_p.h"
-
-QT_BEGIN_NAMESPACE
-
-QGstBufferPoolPlugin::QGstBufferPoolPlugin(QObject *parent) :
- QObject(parent)
-{
-}
-
-QT_END_NAMESPACE
-
-#include "moc_qgstbufferpoolinterface_p.cpp"
diff --git a/src/gsttools/qgstbufferpoolinterface_p.h b/src/gsttools/qgstbufferpoolinterface_p.h
deleted file mode 100644
index f5cbc35aa..000000000
--- a/src/gsttools/qgstbufferpoolinterface_p.h
+++ /dev/null
@@ -1,117 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2016 The Qt Company Ltd.
-** Contact: https://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see https://www.qt.io/terms-conditions. For further
-** information use the contact form at https://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 3 as published by the Free Software
-** Foundation and appearing in the file LICENSE.LGPL3 included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU Lesser General Public License version 3 requirements
-** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
-**
-** GNU General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU
-** General Public License version 2.0 or (at your option) the GNU General
-** Public license version 3 or any later version approved by the KDE Free
-** Qt Foundation. The licenses are as published by the Free Software
-** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
-** included in the packaging of this file. Please review the following
-** information to ensure the GNU General Public License requirements will
-** be met: https://www.gnu.org/licenses/gpl-2.0.html and
-** https://www.gnu.org/licenses/gpl-3.0.html.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#ifndef QGSTBUFFERPOOLINTERFACE_P_H
-#define QGSTBUFFERPOOLINTERFACE_P_H
-
-//
-// W A R N I N G
-// -------------
-//
-// This file is not part of the Qt API. It exists purely as an
-// implementation detail. This header file may change from version to
-// version without notice, or even be removed.
-//
-// We mean it.
-//
-
-#include <private/qgsttools_global_p.h>
-#include <qabstractvideobuffer.h>
-#include <qvideosurfaceformat.h>
-#include <QtCore/qobject.h>
-#include <QtCore/qplugin.h>
-
-#include <gst/gst.h>
-
-QT_BEGIN_NAMESPACE
-
-const QLatin1String QGstBufferPoolPluginKey("bufferpool");
-
-/*!
- Abstract interface for video buffers allocation.
-*/
-class Q_GSTTOOLS_EXPORT QGstBufferPoolInterface
-{
-public:
- virtual ~QGstBufferPoolInterface() {}
-
- virtual bool isFormatSupported(const QVideoSurfaceFormat &format) const = 0;
- virtual GstBuffer *takeBuffer(const QVideoSurfaceFormat &format, GstCaps *caps) = 0;
- virtual void clear() = 0;
-
- virtual QAbstractVideoBuffer::HandleType handleType() const = 0;
-
- /*!
- Build an QAbstractVideoBuffer instance from GstBuffer.
- Returns nullptr if GstBuffer is not compatible with this buffer pool.
-
- This method is called from gstreamer video sink thread.
- */
- virtual QAbstractVideoBuffer *prepareVideoBuffer(GstBuffer *buffer, int bytesPerLine) = 0;
-};
-
-#define QGstBufferPoolInterface_iid "org.qt-project.qt.gstbufferpool/5.0"
-Q_DECLARE_INTERFACE(QGstBufferPoolInterface, QGstBufferPoolInterface_iid)
-
-class QGstBufferPoolPlugin : public QObject, public QGstBufferPoolInterface
-{
- Q_OBJECT
- Q_INTERFACES(QGstBufferPoolInterface)
-public:
- explicit QGstBufferPoolPlugin(QObject *parent = 0);
- virtual ~QGstBufferPoolPlugin() {}
-
- bool isFormatSupported(const QVideoSurfaceFormat &format) const override = 0;
- GstBuffer *takeBuffer(const QVideoSurfaceFormat &format, GstCaps *caps) override = 0;
- void clear() override = 0;
-
- QAbstractVideoBuffer::HandleType handleType() const override = 0;
-
- /*!
- Build an QAbstractVideoBuffer instance from compatible GstBuffer.
- Returns nullptr if GstBuffer is not compatible with this buffer pool.
-
- This method is called from gstreamer video sink thread.
- */
- QAbstractVideoBuffer *prepareVideoBuffer(GstBuffer *buffer, int bytesPerLine) override = 0;
-};
-
-QT_END_NAMESPACE
-
-#endif
diff --git a/src/gsttools/qgstcodecsinfo.cpp b/src/gsttools/qgstcodecsinfo.cpp
index 2522ee19c..bbf78124d 100644
--- a/src/gsttools/qgstcodecsinfo.cpp
+++ b/src/gsttools/qgstcodecsinfo.cpp
@@ -211,45 +211,8 @@ void QGstCodecsInfo::updateCodecs(ElementType elementType)
gst_plugin_feature_list_free(elements);
}
-#if !GST_CHECK_VERSION(0, 10, 31)
-static gboolean element_filter(GstPluginFeature *feature, gpointer user_data)
-{
- if (Q_UNLIKELY(!GST_IS_ELEMENT_FACTORY(feature)))
- return FALSE;
-
- const QGstCodecsInfo::ElementType type = *reinterpret_cast<QGstCodecsInfo::ElementType *>(user_data);
-
- const gchar *klass = gst_element_factory_get_klass(GST_ELEMENT_FACTORY(feature));
- if (type == QGstCodecsInfo::AudioEncoder && !(strstr(klass, "Encoder") && strstr(klass, "Audio")))
- return FALSE;
- if (type == QGstCodecsInfo::VideoEncoder && !(strstr(klass, "Encoder") && strstr(klass, "Video")))
- return FALSE;
- if (type == QGstCodecsInfo::Muxer && !strstr(klass, "Muxer"))
- return FALSE;
-
- guint rank = gst_plugin_feature_get_rank(feature);
- if (rank < GST_RANK_MARGINAL)
- return FALSE;
-
- return TRUE;
-}
-
-static gint compare_plugin_func(const void *item1, const void *item2)
-{
- GstPluginFeature *f1 = reinterpret_cast<GstPluginFeature *>(const_cast<void *>(item1));
- GstPluginFeature *f2 = reinterpret_cast<GstPluginFeature *>(const_cast<void *>(item2));
-
- gint diff = gst_plugin_feature_get_rank(f2) - gst_plugin_feature_get_rank(f1);
- if (diff != 0)
- return diff;
-
- return strcmp(gst_plugin_feature_get_name(f1), gst_plugin_feature_get_name (f2));
-}
-#endif
-
GList *QGstCodecsInfo::elementFactories(ElementType elementType) const
{
-#if GST_CHECK_VERSION(0,10,31)
GstElementFactoryListType gstElementType = 0;
switch (elementType) {
case AudioEncoder:
@@ -273,13 +236,6 @@ GList *QGstCodecsInfo::elementFactories(ElementType elementType) const
}
return list;
-#else
- GList *result = gst_registry_feature_filter(gst_registry_get_default(),
- element_filter,
- FALSE, &elementType);
- result = g_list_sort(result, compare_plugin_func);
- return result;
-#endif
}
QSet<QString> QGstCodecsInfo::supportedStreamTypes(const QString &codec) const
diff --git a/src/gsttools/qgstreameraudioprobecontrol.cpp b/src/gsttools/qgstreameraudioprobecontrol.cpp
index d40b39939..8b0415bde 100644
--- a/src/gsttools/qgstreameraudioprobecontrol.cpp
+++ b/src/gsttools/qgstreameraudioprobecontrol.cpp
@@ -65,7 +65,6 @@ bool QGstreamerAudioProbeControl::probeBuffer(GstBuffer *buffer)
: -1;
QByteArray data;
-#if GST_CHECK_VERSION(1,0,0)
GstMapInfo info;
if (gst_buffer_map(buffer, &info, GST_MAP_READ)) {
data = QByteArray(reinterpret_cast<const char *>(info.data), info.size);
@@ -73,9 +72,6 @@ bool QGstreamerAudioProbeControl::probeBuffer(GstBuffer *buffer)
} else {
return true;
}
-#else
- data = QByteArray(reinterpret_cast<const char *>(buffer->data), buffer->size);
-#endif
QMutexLocker locker(&m_bufferMutex);
if (m_format.isValid()) {
diff --git a/src/gsttools/qgstreamerbufferprobe.cpp b/src/gsttools/qgstreamerbufferprobe.cpp
index e2956eadd..230807466 100644
--- a/src/gsttools/qgstreamerbufferprobe.cpp
+++ b/src/gsttools/qgstreamerbufferprobe.cpp
@@ -49,10 +49,6 @@ QGstreamerBufferProbe::QGstreamerBufferProbe(Flags flags)
QGstreamerBufferProbe::~QGstreamerBufferProbe()
{
-#if !GST_CHECK_VERSION(1,0,0)
- if (m_caps)
- gst_caps_unref(m_caps);
-#endif
}
void QGstreamerBufferProbe::addProbeToPad(GstPad *pad, bool downstream)
@@ -61,7 +57,6 @@ void QGstreamerBufferProbe::addProbeToPad(GstPad *pad, bool downstream)
probeCaps(caps);
gst_caps_unref(caps);
}
-#if GST_CHECK_VERSION(1,0,0)
if (m_flags & ProbeCaps) {
m_capsProbeId = gst_pad_add_probe(
pad,
@@ -76,16 +71,10 @@ void QGstreamerBufferProbe::addProbeToPad(GstPad *pad, bool downstream)
m_bufferProbeId = gst_pad_add_probe(
pad, GST_PAD_PROBE_TYPE_BUFFER, bufferProbe, this, nullptr);
}
-#else
- Q_UNUSED(downstream);
-
- m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(bufferProbe), this);
-#endif
}
void QGstreamerBufferProbe::removeProbeFromPad(GstPad *pad)
{
-#if GST_CHECK_VERSION(1,0,0)
if (m_capsProbeId != -1) {
gst_pad_remove_probe(pad, m_capsProbeId);
m_capsProbeId = -1;
@@ -94,16 +83,6 @@ void QGstreamerBufferProbe::removeProbeFromPad(GstPad *pad)
gst_pad_remove_probe(pad, m_bufferProbeId);
m_bufferProbeId = -1;
}
-#else
- if (m_bufferProbeId != -1) {
- gst_pad_remove_buffer_probe(pad, m_bufferProbeId);
- m_bufferProbeId = -1;
- if (m_caps) {
- gst_caps_unref(m_caps);
- m_caps = 0;
- }
- }
-#endif
}
void QGstreamerBufferProbe::probeCaps(GstCaps *)
@@ -115,9 +94,7 @@ bool QGstreamerBufferProbe::probeBuffer(GstBuffer *)
return true;
}
-#if GST_CHECK_VERSION(1,0,0)
-GstPadProbeReturn QGstreamerBufferProbe::capsProbe(
- GstPad *, GstPadProbeInfo *info, gpointer user_data)
+GstPadProbeReturn QGstreamerBufferProbe::capsProbe(GstPad *, GstPadProbeInfo *info, gpointer user_data)
{
QGstreamerBufferProbe * const control = static_cast<QGstreamerBufferProbe *>(user_data);
@@ -140,27 +117,5 @@ GstPadProbeReturn QGstreamerBufferProbe::bufferProbe(
return control->probeBuffer(buffer) ? GST_PAD_PROBE_OK : GST_PAD_PROBE_DROP;
return GST_PAD_PROBE_OK;
}
-#else
-gboolean QGstreamerBufferProbe::bufferProbe(GstElement *, GstBuffer *buffer, gpointer user_data)
-{
- QGstreamerBufferProbe * const control = static_cast<QGstreamerBufferProbe *>(user_data);
-
- if (control->m_flags & ProbeCaps) {
- GstCaps *caps = gst_buffer_get_caps(buffer);
- if (caps && (!control->m_caps || !gst_caps_is_equal(control->m_caps, caps))) {
- qSwap(caps, control->m_caps);
- control->probeCaps(control->m_caps);
- }
- if (caps)
- gst_caps_unref(caps);
- }
-
- if (control->m_flags & ProbeBuffers) {
- return control->probeBuffer(buffer) ? TRUE : FALSE;
- } else {
- return TRUE;
- }
-}
-#endif
QT_END_NAMESPACE
diff --git a/src/gsttools/qgstreamerbufferprobe_p.h b/src/gsttools/qgstreamerbufferprobe_p.h
index 2dda73e40..46067325c 100644
--- a/src/gsttools/qgstreamerbufferprobe_p.h
+++ b/src/gsttools/qgstreamerbufferprobe_p.h
@@ -80,14 +80,9 @@ protected:
virtual bool probeBuffer(GstBuffer *buffer);
private:
-#if GST_CHECK_VERSION(1,0,0)
static GstPadProbeReturn capsProbe(GstPad *pad, GstPadProbeInfo *info, gpointer user_data);
static GstPadProbeReturn bufferProbe(GstPad *pad, GstPadProbeInfo *info, gpointer user_data);
int m_capsProbeId = -1;
-#else
- static gboolean bufferProbe(GstElement *element, GstBuffer *buffer, gpointer user_data);
- GstCaps *m_caps = nullptr;
-#endif
int m_bufferProbeId = -1;
const Flags m_flags;
};
diff --git a/src/gsttools/qgstreamerbushelper.cpp b/src/gsttools/qgstreamerbushelper.cpp
index 1a4034eee..2eb038dfa 100644
--- a/src/gsttools/qgstreamerbushelper.cpp
+++ b/src/gsttools/qgstreamerbushelper.cpp
@@ -79,11 +79,7 @@ public:
delete m_intervalTimer;
if (m_tag)
-#if GST_CHECK_VERSION(1, 6, 0)
gst_bus_remove_watch(m_bus);
-#else
- g_source_remove(m_tag);
-#endif
}
GstBus* bus() const { return m_bus; }
@@ -166,21 +162,13 @@ QGstreamerBusHelper::QGstreamerBusHelper(GstBus* bus, QObject* parent):
QObject(parent)
{
d = new QGstreamerBusHelperPrivate(this, bus);
-#if GST_CHECK_VERSION(1,0,0)
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)syncGstBusFilter, d, 0);
-#else
- gst_bus_set_sync_handler(bus, (GstBusSyncHandler)syncGstBusFilter, d);
-#endif
gst_object_ref(GST_OBJECT(bus));
}
QGstreamerBusHelper::~QGstreamerBusHelper()
{
-#if GST_CHECK_VERSION(1,0,0)
gst_bus_set_sync_handler(d->bus(), 0, 0, 0);
-#else
- gst_bus_set_sync_handler(d->bus(),0,0);
-#endif
gst_object_unref(GST_OBJECT(d->bus()));
}
diff --git a/src/gsttools/qgstreamerplayersession.cpp b/src/gsttools/qgstreamerplayersession.cpp
index adf11b022..4e21846e1 100644
--- a/src/gsttools/qgstreamerplayersession.cpp
+++ b/src/gsttools/qgstreamerplayersession.cpp
@@ -43,11 +43,8 @@
#include <private/qgstreameraudioprobecontrol_p.h>
#include <private/qgstreamervideoprobecontrol_p.h>
#include <private/qgstreamervideorendererinterface_p.h>
-#if !GST_CHECK_VERSION(1,0,0)
-#include <private/gstvideoconnector_p.h>
-#endif
#include <private/qgstutils_p.h>
-#include <private/qvideosurfacegstsink_p.h>
+#include <private/qgstvideorenderersink_p.h>
#include <gst/gstvalue.h>
#include <gst/base/gstbasesrc.h>
@@ -93,23 +90,6 @@ typedef enum {
GST_PLAY_FLAG_BUFFERING = 0x000000100
} GstPlayFlags;
-#if !GST_CHECK_VERSION(1,0,0)
-#define DEFAULT_RAW_CAPS \
- "video/x-raw-yuv; " \
- "video/x-raw-rgb; " \
- "video/x-raw-gray; " \
- "video/x-surface; " \
- "video/x-android-buffer; " \
- "audio/x-raw-int; " \
- "audio/x-raw-float; " \
- "text/plain; " \
- "text/x-pango-markup; " \
- "video/x-dvd-subpicture; " \
- "subpicture/x-pgs"
-
-static GstStaticCaps static_RawCaps = GST_STATIC_CAPS(DEFAULT_RAW_CAPS);
-#endif
-
QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
: QObject(parent)
{
@@ -126,10 +106,6 @@ void QGstreamerPlayerSession::initPlaybin()
QByteArray envFlags = qgetenv("QT_GSTREAMER_PLAYBIN_FLAGS");
if (!envFlags.isEmpty()) {
flags |= envFlags.toInt();
-#if !GST_CHECK_VERSION(1,0,0)
- } else {
- flags |= GST_PLAY_FLAG_NATIVE_VIDEO;
-#endif
}
g_object_set(G_OBJECT(m_playbin), "flags", flags, nullptr);
@@ -161,7 +137,6 @@ void QGstreamerPlayerSession::initPlaybin()
}
}
-#if GST_CHECK_VERSION(1,0,0)
static const auto convDesc = qEnvironmentVariable("QT_GSTREAMER_PLAYBIN_CONVERT");
GError *err = nullptr;
auto convPipeline = !convDesc.isEmpty() ? convDesc.toLatin1().constData() : "identity";
@@ -171,14 +146,6 @@ void QGstreamerPlayerSession::initPlaybin()
g_clear_error(&err);
}
m_videoIdentity = convElement;
-#else
- m_videoIdentity = GST_ELEMENT(g_object_new(gst_video_connector_get_type(), 0)); // floating ref
- g_signal_connect(G_OBJECT(m_videoIdentity), "connection-failed", G_CALLBACK(insertColorSpaceElement), (gpointer)this);
- m_colorSpace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, "ffmpegcolorspace-vo");
-
- // might not get a parent, take ownership to avoid leak
- qt_gst_object_ref_sink(GST_OBJECT(m_colorSpace));
-#endif
m_nullVideoSink = gst_element_factory_make("fakesink", nullptr);
g_object_set(G_OBJECT(m_nullVideoSink), "sync", true, nullptr);
@@ -267,9 +234,6 @@ void QGstreamerPlayerSession::resetElements()
setBus(nullptr);
resetGstObject(m_playbin);
resetGstObject(m_pipeline);
-#if !GST_CHECK_VERSION(1,0,0)
- resetGstObject(m_colorSpace);
-#endif
resetGstObject(m_nullVideoSink);
resetGstObject(m_videoOutputBin);
@@ -379,7 +343,7 @@ bool QGstreamerPlayerSession::parsePipeline()
// Set current surface to video sink before creating a pipeline.
auto renderer = qobject_cast<QVideoRendererControl *>(m_videoOutput);
if (renderer)
- QVideoSurfaceGstSink::setSurface(renderer->surface());
+ QGstVideoRendererSink::setSurface(renderer->surface());
QString url = m_request.url().toString(QUrl::RemoveScheme);
QString desc = QUrl::fromPercentEncoding(url.toLatin1().constData());
@@ -397,22 +361,15 @@ bool QGstreamerPlayerSession::parsePipeline()
static void gst_foreach(GstIterator *it, const std::function<bool(GstElement *)> &cmp)
{
-#if GST_CHECK_VERSION(1,0,0)
GValue value = G_VALUE_INIT;
while (gst_iterator_next (it, &value) == GST_ITERATOR_OK) {
auto child = static_cast<GstElement*>(g_value_get_object(&value));
-#else
- GstElement *child = nullptr;
- while (gst_iterator_next(it, reinterpret_cast<gpointer *>(&child)) == GST_ITERATOR_OK) {
-#endif
if (cmp(child))
break;
}
gst_iterator_free(it);
-#if GST_CHECK_VERSION(1,0,0)
g_value_unset(&value);
-#endif
}
bool QGstreamerPlayerSession::setPipeline(GstElement *pipeline)
@@ -520,7 +477,6 @@ QMediaTimeRange QGstreamerPlayerSession::availablePlaybackRanges() const
if (duration() <= 0)
return ranges;
-#if GST_CHECK_VERSION(0, 10, 31)
//GST_FORMAT_TIME would be more appropriate, but unfortunately it's not supported.
//with GST_FORMAT_PERCENT media is treated as encoded with constant bitrate.
GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
@@ -539,7 +495,6 @@ QMediaTimeRange QGstreamerPlayerSession::availablePlaybackRanges() const
}
gst_query_unref(query);
-#endif
if (ranges.isEmpty() && !isLiveSource() && isSeekable())
ranges.addInterval(0, duration());
@@ -617,26 +572,12 @@ bool QGstreamerPlayerSession::isAudioAvailable() const
return m_audioAvailable;
}
-#if GST_CHECK_VERSION(1,0,0)
static GstPadProbeReturn block_pad_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
-#else
-static void block_pad_cb(GstPad *pad, gboolean blocked, gpointer user_data)
-#endif
{
Q_UNUSED(pad);
-#if GST_CHECK_VERSION(1,0,0)
Q_UNUSED(info);
Q_UNUSED(user_data);
return GST_PAD_PROBE_OK;
-#else
-#ifdef DEBUG_PLAYBIN
- qDebug() << "block_pad_cb, blocked:" << blocked;
-#endif
- if (blocked && user_data) {
- QGstreamerPlayerSession *session = reinterpret_cast<QGstreamerPlayerSession*>(user_data);
- QMetaObject::invokeMethod(session, "finishVideoOutputChange", Qt::QueuedConnection);
- }
-#endif
}
void QGstreamerPlayerSession::updateVideoRenderer()
@@ -719,15 +660,6 @@ void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
gst_element_set_state(m_videoSink, GST_STATE_NULL);
gst_element_set_state(m_playbin, GST_STATE_NULL);
-#if !GST_CHECK_VERSION(1,0,0)
- if (m_usingColorspaceElement) {
- gst_element_unlink(m_colorSpace, m_videoSink);
- gst_bin_remove(GST_BIN(m_videoOutputBin), m_colorSpace);
- } else {
- gst_element_unlink(m_videoIdentity, m_videoSink);
- }
-#endif
-
removeVideoBufferProbe();
gst_bin_remove(GST_BIN(m_videoOutputBin), m_videoSink);
@@ -737,18 +669,6 @@ void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
gst_bin_add(GST_BIN(m_videoOutputBin), m_videoSink);
bool linked = gst_element_link(m_videoIdentity, m_videoSink);
-#if !GST_CHECK_VERSION(1,0,0)
- m_usingColorspaceElement = false;
- if (!linked) {
- m_usingColorspaceElement = true;
-#ifdef DEBUG_PLAYBIN
- qDebug() << "Failed to connect video output, inserting the colorspace element.";
-#endif
- gst_bin_add(GST_BIN(m_videoOutputBin), m_colorSpace);
- linked = gst_element_link_many(m_videoIdentity, m_colorSpace, m_videoSink, nullptr);
- }
-#endif
-
if (!linked)
qWarning() << "Linking video output element failed";
@@ -789,11 +709,7 @@ void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
//block pads, async to avoid locking in paused state
GstPad *srcPad = gst_element_get_static_pad(m_videoIdentity, "src");
-#if GST_CHECK_VERSION(1,0,0)
this->pad_probe_id = gst_pad_add_probe(srcPad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER | GST_PAD_PROBE_TYPE_BLOCKING), block_pad_cb, this, nullptr);
-#else
- gst_pad_set_blocked_async(srcPad, true, &block_pad_cb, this);
-#endif
gst_object_unref(GST_OBJECT(srcPad));
//Unpause the sink to avoid waiting until the buffer is processed
@@ -835,31 +751,15 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
//video output was change back to the current one,
//no need to torment the pipeline, just unblock the pad
if (gst_pad_is_blocked(srcPad))
-#if GST_CHECK_VERSION(1,0,0)
gst_pad_remove_probe(srcPad, this->pad_probe_id);
-#else
- gst_pad_set_blocked_async(srcPad, false, &block_pad_cb, 0);
-#endif
m_pendingVideoSink = 0;
gst_object_unref(GST_OBJECT(srcPad));
return;
}
-#if !GST_CHECK_VERSION(1,0,0)
- if (m_usingColorspaceElement) {
- gst_element_set_state(m_colorSpace, GST_STATE_NULL);
- gst_element_set_state(m_videoSink, GST_STATE_NULL);
-
- gst_element_unlink(m_colorSpace, m_videoSink);
- gst_bin_remove(GST_BIN(m_videoOutputBin), m_colorSpace);
- } else {
-#else
- {
-#endif
- gst_element_set_state(m_videoSink, GST_STATE_NULL);
- gst_element_unlink(m_videoIdentity, m_videoSink);
- }
+ gst_element_set_state(m_videoSink, GST_STATE_NULL);
+ gst_element_unlink(m_videoIdentity, m_videoSink);
removeVideoBufferProbe();
@@ -873,17 +773,6 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
addVideoBufferProbe();
bool linked = gst_element_link(m_videoIdentity, m_videoSink);
-#if !GST_CHECK_VERSION(1,0,0)
- m_usingColorspaceElement = false;
- if (!linked) {
- m_usingColorspaceElement = true;
-#ifdef DEBUG_PLAYBIN
- qDebug() << "Failed to connect video output, inserting the colorspace element.";
-#endif
- gst_bin_add(GST_BIN(m_videoOutputBin), m_colorSpace);
- linked = gst_element_link_many(m_videoIdentity, m_colorSpace, m_videoSink, nullptr);
- }
-#endif
if (!linked)
qWarning() << "Linking video output element failed";
@@ -892,16 +781,6 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
qDebug() << "notify the video connector it has to emit a new segment message...";
#endif
-#if !GST_CHECK_VERSION(1,0,0)
- //it's necessary to send a new segment event just before
- //the first buffer pushed to the new sink
- g_signal_emit_by_name(m_videoIdentity,
- "resend-new-segment",
- true //emit connection-failed signal
- //to have a chance to insert colorspace element
- );
-#endif
-
GstState state = GST_STATE_VOID_PENDING;
switch (m_pendingState) {
@@ -916,11 +795,6 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
break;
}
-#if !GST_CHECK_VERSION(1,0,0)
- if (m_usingColorspaceElement)
- gst_element_set_state(m_colorSpace, state);
-#endif
-
gst_element_set_state(m_videoSink, state);
if (state == GST_STATE_NULL)
@@ -935,64 +809,12 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
//don't have to wait here, it will unblock eventually
if (gst_pad_is_blocked(srcPad))
-#if GST_CHECK_VERSION(1,0,0)
- gst_pad_remove_probe(srcPad, this->pad_probe_id);
-#else
- gst_pad_set_blocked_async(srcPad, false, &block_pad_cb, 0);
-#endif
+ gst_pad_remove_probe(srcPad, this->pad_probe_id);
gst_object_unref(GST_OBJECT(srcPad));
}
-#if !GST_CHECK_VERSION(1,0,0)
-
-void QGstreamerPlayerSession::insertColorSpaceElement(GstElement *element, gpointer data)
-{
-#ifdef DEBUG_PLAYBIN
- qDebug() << Q_FUNC_INFO;
-#endif
- Q_UNUSED(element);
- QGstreamerPlayerSession* session = reinterpret_cast<QGstreamerPlayerSession*>(data);
-
- if (session->m_usingColorspaceElement)
- return;
- session->m_usingColorspaceElement = true;
-
-#ifdef DEBUG_PLAYBIN
- qDebug() << "Failed to connect video output, inserting the colorspace elemnt.";
- qDebug() << "notify the video connector it has to emit a new segment message...";
-#endif
- //it's necessary to send a new segment event just before
- //the first buffer pushed to the new sink
- g_signal_emit_by_name(session->m_videoIdentity,
- "resend-new-segment",
- false // don't emit connection-failed signal
- );
-
- gst_element_unlink(session->m_videoIdentity, session->m_videoSink);
- gst_bin_add(GST_BIN(session->m_videoOutputBin), session->m_colorSpace);
- gst_element_link_many(session->m_videoIdentity, session->m_colorSpace, session->m_videoSink, nullptr);
-
- GstState state = GST_STATE_VOID_PENDING;
-
- switch (session->m_pendingState) {
- case QMediaPlayer::StoppedState:
- state = GST_STATE_NULL;
- break;
- case QMediaPlayer::PausedState:
- state = GST_STATE_PAUSED;
- break;
- case QMediaPlayer::PlayingState:
- state = GST_STATE_PLAYING;
- break;
- }
-
- gst_element_set_state(session->m_colorSpace, state);
-}
-
-#endif
-
bool QGstreamerPlayerSession::isVideoAvailable() const
{
return m_videoAvailable;
@@ -1005,11 +827,9 @@ bool QGstreamerPlayerSession::isSeekable() const
bool QGstreamerPlayerSession::play()
{
-#if GST_CHECK_VERSION(1,0,0)
static bool dumpDot = qEnvironmentVariableIsSet("GST_DEBUG_DUMP_DOT_DIR");
if (dumpDot)
gst_debug_bin_to_dot_file_with_ts(GST_BIN(m_pipeline), GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL), "gst.play");
-#endif
#ifdef DEBUG_PLAYBIN
qDebug() << Q_FUNC_INFO;
#endif
@@ -1337,7 +1157,6 @@ bool QGstreamerPlayerSession::processBusMessage(const QGstreamerMessage &message
case GST_MESSAGE_SEGMENT_DONE:
break;
case GST_MESSAGE_LATENCY:
-#if GST_CHECK_VERSION(0,10,13)
case GST_MESSAGE_ASYNC_START:
break;
case GST_MESSAGE_ASYNC_DONE:
@@ -1350,10 +1169,7 @@ bool QGstreamerPlayerSession::processBusMessage(const QGstreamerMessage &message
}
break;
}
-#if GST_CHECK_VERSION(0,10,23)
case GST_MESSAGE_REQUEST_STATE:
-#endif
-#endif
case GST_MESSAGE_ANY:
break;
default:
@@ -1501,11 +1317,7 @@ void QGstreamerPlayerSession::getStreamsInfo()
default:
break;
}
-#if GST_CHECK_VERSION(1,0,0)
if (tags && GST_IS_TAG_LIST(tags)) {
-#else
- if (tags && gst_is_tag_list(tags)) {
-#endif
gchar *languageCode = 0;
if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &languageCode))
streamProperties[QMediaMetaData::Language] = QString::fromUtf8(languageCode);
@@ -1674,13 +1486,8 @@ void QGstreamerPlayerSession::playbinNotifySource(GObject *o, GParamSpec *p, gpo
const int timeout = 30;
if (qstrcmp(G_OBJECT_CLASS_NAME(G_OBJECT_GET_CLASS(source)), "GstUDPSrc") == 0) {
quint64 convertedTimeout = timeout;
-#if GST_CHECK_VERSION(1,0,0)
// Gst 1.x -> nanosecond
convertedTimeout *= 1000000000;
-#else
- // Gst 0.10 -> microsecond
- convertedTimeout *= 1000000;
-#endif
g_object_set(G_OBJECT(source), "timeout", convertedTimeout, nullptr);
self->m_sourceType = UDPSrc;
//The udpsrc is always a live source.
@@ -1776,35 +1583,6 @@ void QGstreamerPlayerSession::updateMuted()
}
}
-#if !GST_CHECK_VERSION(0, 10, 33)
-static gboolean factory_can_src_any_caps (GstElementFactory *factory, const GstCaps *caps)
-{
- GList *templates;
-
- g_return_val_if_fail(factory != nullptr, FALSE);
- g_return_val_if_fail(caps != nullptr, FALSE);
-
- templates = factory->staticpadtemplates;
-
- while (templates) {
- GstStaticPadTemplate *templ = (GstStaticPadTemplate *)templates->data;
-
- if (templ->direction == GST_PAD_SRC) {
- GstCaps *templcaps = gst_static_caps_get(&templ->static_caps);
-
- if (qt_gst_caps_can_intersect(caps, templcaps)) {
- gst_caps_unref(templcaps);
- return TRUE;
- }
- gst_caps_unref(templcaps);
- }
- templates = g_list_next(templates);
- }
-
- return FALSE;
-}
-#endif
-
GstAutoplugSelectResult QGstreamerPlayerSession::handleAutoplugSelect(GstBin *bin, GstPad *pad, GstCaps *caps, GstElementFactory *factory, QGstreamerPlayerSession *session)
{
Q_UNUSED(bin);
@@ -1818,17 +1596,9 @@ GstAutoplugSelectResult QGstreamerPlayerSession::handleAutoplugSelect(GstBin *bi
const gchar *factoryName = gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(factory));
if (g_str_has_prefix(factoryName, "vaapi")) {
GstPad *sinkPad = gst_element_get_static_pad(session->m_videoSink, "sink");
-#if GST_CHECK_VERSION(1,0,0)
GstCaps *sinkCaps = gst_pad_query_caps(sinkPad, nullptr);
-#else
- GstCaps *sinkCaps = gst_pad_get_caps(sinkPad);
-#endif
-#if !GST_CHECK_VERSION(0, 10, 33)
- if (!factory_can_src_any_caps(factory, sinkCaps))
-#else
if (!gst_element_factory_can_src_any_caps(factory, sinkCaps))
-#endif
res = GST_AUTOPLUG_SELECT_SKIP;
gst_object_unref(sinkPad);
@@ -1851,18 +1621,7 @@ void QGstreamerPlayerSession::handleElementAdded(GstBin *bin, GstElement *elemen
// Disable on-disk buffering.
g_object_set(G_OBJECT(element), "temp-template", nullptr, nullptr);
} else if (g_str_has_prefix(elementName, "uridecodebin") ||
-#if GST_CHECK_VERSION(1,0,0)
g_str_has_prefix(elementName, "decodebin")) {
-#else
- g_str_has_prefix(elementName, "decodebin2")) {
- if (g_str_has_prefix(elementName, "uridecodebin")) {
- // Add video/x-surface (VAAPI) to default raw formats
- g_object_set(G_OBJECT(element), "caps", gst_static_caps_get(&static_RawCaps), nullptr);
- // listen for uridecodebin autoplug-select to skip VAAPI usage when the current
- // video sink doesn't support it
- g_signal_connect(element, "autoplug-select", G_CALLBACK(handleAutoplugSelect), session);
- }
-#endif
//listen for queue2 element added to uridecodebin/decodebin2 as well.
//Don't touch other bins since they may have unrelated queues
g_signal_connect(element, "element-added",
diff --git a/src/gsttools/qgstreamerplayersession_p.h b/src/gsttools/qgstreamerplayersession_p.h
index 797229e69..6ce773d7a 100644
--- a/src/gsttools/qgstreamerplayersession_p.h
+++ b/src/gsttools/qgstreamerplayersession_p.h
@@ -193,9 +193,6 @@ private:
static void playbinNotifySource(GObject *o, GParamSpec *p, gpointer d);
static void handleVolumeChange(GObject *o, GParamSpec *p, gpointer d);
static void handleMutedChange(GObject *o, GParamSpec *p, gpointer d);
-#if !GST_CHECK_VERSION(1,0,0)
- static void insertColorSpaceElement(GstElement *element, gpointer data);
-#endif
static void handleElementAdded(GstBin *bin, GstElement *element, QGstreamerPlayerSession *session);
static void handleStreamsChange(GstBin *bin, gpointer user_data);
static GstAutoplugSelectResult handleAutoplugSelect(GstBin *bin, GstPad *pad, GstCaps *caps, GstElementFactory *factory, QGstreamerPlayerSession *session);
@@ -225,10 +222,6 @@ private:
GstElement *m_videoOutputBin = nullptr;
GstElement *m_videoIdentity = nullptr;
-#if !GST_CHECK_VERSION(1,0,0)
- GstElement *m_colorSpace = nullptr;
- bool m_usingColorspaceElement = false;
-#endif
GstElement *m_pendingVideoSink = nullptr;
GstElement *m_nullVideoSink = nullptr;
diff --git a/src/gsttools/qgstreamervideooverlay.cpp b/src/gsttools/qgstreamervideooverlay.cpp
index ea8149442..3cf8c61d6 100644
--- a/src/gsttools/qgstreamervideooverlay.cpp
+++ b/src/gsttools/qgstreamervideooverlay.cpp
@@ -42,11 +42,7 @@
#include <QtGui/qguiapplication.h>
#include "qgstutils_p.h"
-#if !GST_CHECK_VERSION(1,0,0)
-#include <gst/interfaces/xoverlay.h>
-#else
#include <gst/video/videooverlay.h>
-#endif
#include <QtMultimedia/private/qtmultimediaglobal_p.h>
@@ -453,17 +449,8 @@ void QGstreamerVideoOverlay::setWindowHandle(WId id)
void QGstreamerVideoOverlay::setWindowHandle_helper(WId id)
{
-#if GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(m_videoSink), id);
-#else
- if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
-# if GST_CHECK_VERSION(0,10,31)
- gst_x_overlay_set_window_handle(GST_X_OVERLAY(m_videoSink), id);
-# else
- gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(m_videoSink), id);
-# endif
-#endif
// Properties need to be reset when changing the winId.
m_sinkProperties->reset();
@@ -475,14 +462,8 @@ void QGstreamerVideoOverlay::expose()
if (!isActive())
return;
-#if !GST_CHECK_VERSION(1,0,0)
- if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink))
- gst_x_overlay_expose(GST_X_OVERLAY(m_videoSink));
-#else
- if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink)) {
+ if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink))
gst_video_overlay_expose(GST_VIDEO_OVERLAY(m_videoSink));
- }
-#endif
}
void QGstreamerVideoOverlay::setRenderRectangle(const QRect &rect)
@@ -499,31 +480,16 @@ void QGstreamerVideoOverlay::setRenderRectangle(const QRect &rect)
h = rect.height();
}
-#if GST_CHECK_VERSION(1,0,0)
if (m_videoSink && GST_IS_VIDEO_OVERLAY(m_videoSink))
gst_video_overlay_set_render_rectangle(GST_VIDEO_OVERLAY(m_videoSink), x, y, w, h);
-#elif GST_CHECK_VERSION(0, 10, 29)
- if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink))
- gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(m_videoSink), x, y , w , h);
-#else
- Q_UNUSED(x);
- Q_UNUSED(y);
- Q_UNUSED(w);
- Q_UNUSED(h);
-#endif
}
bool QGstreamerVideoOverlay::processSyncMessage(const QGstreamerMessage &message)
{
GstMessage* gm = message.rawMessage();
-#if !GST_CHECK_VERSION(1,0,0)
- if (gm && (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
- gst_structure_has_name(gm->structure, "prepare-xwindow-id")) {
-#else
if (gm && (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) &&
gst_structure_has_name(gst_message_get_structure(gm), "prepare-window-handle")) {
-#endif
setWindowHandle_helper(m_windowId);
return true;
}
diff --git a/src/gsttools/qgstreamervideoprobecontrol.cpp b/src/gsttools/qgstreamervideoprobecontrol.cpp
index f9ce4e412..3d587eb2c 100644
--- a/src/gsttools/qgstreamervideoprobecontrol.cpp
+++ b/src/gsttools/qgstreamervideoprobecontrol.cpp
@@ -72,19 +72,11 @@ void QGstreamerVideoProbeControl::stopFlushing()
void QGstreamerVideoProbeControl::probeCaps(GstCaps *caps)
{
-#if GST_CHECK_VERSION(1,0,0)
GstVideoInfo videoInfo;
QVideoSurfaceFormat format = QGstUtils::formatForCaps(caps, &videoInfo);
QMutexLocker locker(&m_frameMutex);
m_videoInfo = videoInfo;
-#else
- int bytesPerLine = 0;
- QVideoSurfaceFormat format = QGstUtils::formatForCaps(caps, &bytesPerLine);
-
- QMutexLocker locker(&m_frameMutex);
- m_bytesPerLine = bytesPerLine;
-#endif
m_format = format;
}
@@ -96,11 +88,7 @@ bool QGstreamerVideoProbeControl::probeBuffer(GstBuffer *buffer)
return true;
QVideoFrame frame(
-#if GST_CHECK_VERSION(1,0,0)
new QGstVideoBuffer(buffer, m_videoInfo),
-#else
- new QGstVideoBuffer(buffer, m_bytesPerLine),
-#endif
m_format.frameSize(),
m_format.pixelFormat());
diff --git a/src/gsttools/qgstreamervideoprobecontrol_p.h b/src/gsttools/qgstreamervideoprobecontrol_p.h
index 8f2101d74..2876f3124 100644
--- a/src/gsttools/qgstreamervideoprobecontrol_p.h
+++ b/src/gsttools/qgstreamervideoprobecontrol_p.h
@@ -86,11 +86,7 @@ private:
QVideoSurfaceFormat m_format;
QVideoFrame m_pendingFrame;
QMutex m_frameMutex;
-#if GST_CHECK_VERSION(1,0,0)
GstVideoInfo m_videoInfo;
-#else
- int m_bytesPerLine = 0;
-#endif
bool m_flushing = false;
bool m_frameProbed = false; // true if at least one frame was probed
};
diff --git a/src/gsttools/qgstreamervideorenderer.cpp b/src/gsttools/qgstreamervideorenderer.cpp
index c2226d658..c6ca935a4 100644
--- a/src/gsttools/qgstreamervideorenderer.cpp
+++ b/src/gsttools/qgstreamervideorenderer.cpp
@@ -38,7 +38,7 @@
****************************************************************************/
#include "qgstreamervideorenderer_p.h"
-#include <private/qvideosurfacegstsink_p.h>
+#include <private/qgstvideorenderersink_p.h>
#include <private/qgstutils_p.h>
#include <qabstractvideosurface.h>
#include <QtCore/qdebug.h>
@@ -78,7 +78,7 @@ void QGstreamerVideoRenderer::setVideoSink(GstElement *sink)
GstElement *QGstreamerVideoRenderer::videoSink()
{
if (!m_videoSink && m_surface) {
- auto sink = reinterpret_cast<GstElement *>(QVideoSurfaceGstSink::createSink(m_surface));
+ auto sink = reinterpret_cast<GstElement *>(QGstVideoRendererSink::createSink(m_surface));
resetSink(m_videoSink, sink);
}
diff --git a/src/gsttools/qgstreamervideorenderer_p.h b/src/gsttools/qgstreamervideorenderer_p.h
index d87bfcb8f..df8c58da2 100644
--- a/src/gsttools/qgstreamervideorenderer_p.h
+++ b/src/gsttools/qgstreamervideorenderer_p.h
@@ -52,8 +52,8 @@
//
#include <private/qgsttools_global_p.h>
+#include <private/qgstvideorenderersink_p.h>
#include <qvideorenderercontrol.h>
-#include <private/qvideosurfacegstsink_p.h>
#include <qabstractvideosurface.h>
#include "qgstreamervideorendererinterface_p.h"
diff --git a/src/gsttools/qgstutils.cpp b/src/gsttools/qgstutils.cpp
index 5c8d4c90c..c741ecb87 100644
--- a/src/gsttools/qgstutils.cpp
+++ b/src/gsttools/qgstutils.cpp
@@ -110,13 +110,8 @@ static void addTagToMap(const GstTagList *list,
break;
default:
// GST_TYPE_DATE is a function, not a constant, so pull it out of the switch
-#if GST_CHECK_VERSION(1,0,0)
if (G_VALUE_TYPE(&val) == G_TYPE_DATE) {
const GDate *date = (const GDate *)g_value_get_boxed(&val);
-#else
- if (G_VALUE_TYPE(&val) == GST_TYPE_DATE) {
- const GDate *date = gst_value_get_date(&val);
-#endif
if (g_date_valid(date)) {
int year = g_date_get_year(date);
int month = g_date_get_month(date);
@@ -125,7 +120,6 @@ static void addTagToMap(const GstTagList *list,
if (!map->contains("year"))
map->insert("year", year);
}
-#if GST_CHECK_VERSION(1,0,0)
} else if (G_VALUE_TYPE(&val) == GST_TYPE_DATE_TIME) {
const GstDateTime *dateTime = (const GstDateTime *)g_value_get_boxed(&val);
int year = gst_date_time_has_year(dateTime) ? gst_date_time_get_year(dateTime) : 0;
@@ -160,7 +154,6 @@ static void addTagToMap(const GstTagList *list,
}
}
}
-#endif
} else if (G_VALUE_TYPE(&val) == GST_TYPE_FRACTION) {
int nom = gst_value_get_fraction_numerator(&val);
int denom = gst_value_get_fraction_denominator(&val);
@@ -230,7 +223,6 @@ QSize QGstUtils::capsCorrectedResolution(const GstCaps *caps)
}
-#if GST_CHECK_VERSION(1,0,0)
namespace {
struct AudioFormat
@@ -263,7 +255,6 @@ static const AudioFormat qt_audioLookup[] =
};
}
-#endif
/*!
Returns audio format for caps.
@@ -273,7 +264,6 @@ static const AudioFormat qt_audioLookup[] =
QAudioFormat QGstUtils::audioFormatForCaps(const GstCaps *caps)
{
QAudioFormat format;
-#if GST_CHECK_VERSION(1,0,0)
GstAudioInfo info;
if (gst_audio_info_from_caps(&info, caps)) {
for (int i = 0; i < lengthOf(qt_audioLookup); ++i) {
@@ -290,83 +280,10 @@ QAudioFormat QGstUtils::audioFormatForCaps(const GstCaps *caps)
return format;
}
}
-#else
- const GstStructure *structure = gst_caps_get_structure(caps, 0);
-
- if (qstrcmp(gst_structure_get_name(structure), "audio/x-raw-int") == 0) {
-
- format.setCodec("audio/pcm");
-
- int endianness = 0;
- gst_structure_get_int(structure, "endianness", &endianness);
- if (endianness == 1234)
- format.setByteOrder(QAudioFormat::LittleEndian);
- else if (endianness == 4321)
- format.setByteOrder(QAudioFormat::BigEndian);
-
- gboolean isSigned = FALSE;
- gst_structure_get_boolean(structure, "signed", &isSigned);
- if (isSigned)
- format.setSampleType(QAudioFormat::SignedInt);
- else
- format.setSampleType(QAudioFormat::UnSignedInt);
-
- // Number of bits allocated per sample.
- int width = 0;
- gst_structure_get_int(structure, "width", &width);
-
- // The number of bits used per sample. This must be less than or equal to the width.
- int depth = 0;
- gst_structure_get_int(structure, "depth", &depth);
-
- if (width != depth) {
- // Unsupported sample layout.
- return QAudioFormat();
- }
- format.setSampleSize(width);
-
- int rate = 0;
- gst_structure_get_int(structure, "rate", &rate);
- format.setSampleRate(rate);
-
- int channels = 0;
- gst_structure_get_int(structure, "channels", &channels);
- format.setChannelCount(channels);
-
- } else if (qstrcmp(gst_structure_get_name(structure), "audio/x-raw-float") == 0) {
-
- format.setCodec("audio/pcm");
-
- int endianness = 0;
- gst_structure_get_int(structure, "endianness", &endianness);
- if (endianness == 1234)
- format.setByteOrder(QAudioFormat::LittleEndian);
- else if (endianness == 4321)
- format.setByteOrder(QAudioFormat::BigEndian);
-
- format.setSampleType(QAudioFormat::Float);
- int width = 0;
- gst_structure_get_int(structure, "width", &width);
-
- format.setSampleSize(width);
-
- int rate = 0;
- gst_structure_get_int(structure, "rate", &rate);
- format.setSampleRate(rate);
-
- int channels = 0;
- gst_structure_get_int(structure, "channels", &channels);
- format.setChannelCount(channels);
-
- } else {
- return QAudioFormat();
- }
-#endif
return format;
}
-#if GST_CHECK_VERSION(1,0,0)
/*
Returns audio format for a sample.
If the buffer doesn't have a valid audio format, an empty QAudioFormat is returned.
@@ -379,22 +296,6 @@ QAudioFormat QGstUtils::audioFormatForSample(GstSample *sample)
return QGstUtils::audioFormatForCaps(caps);
}
-#else
-/*!
- Returns audio format for a buffer.
- If the buffer doesn't have a valid audio format, an empty QAudioFormat is returned.
-*/
-QAudioFormat QGstUtils::audioFormatForBuffer(GstBuffer *buffer)
-{
- GstCaps* caps = gst_buffer_get_caps(buffer);
- if (!caps)
- return QAudioFormat();
-
- QAudioFormat format = QGstUtils::audioFormatForCaps(caps);
- gst_caps_unref(caps);
- return format;
-}
-#endif
/*!
Builds GstCaps for an audio format.
@@ -407,7 +308,6 @@ GstCaps *QGstUtils::capsForAudioFormat(const QAudioFormat &format)
if (!format.isValid())
return 0;
-#if GST_CHECK_VERSION(1,0,0)
const QAudioFormat::SampleType sampleType = format.sampleType();
const QAudioFormat::Endian byteOrder = format.byteOrder();
const int sampleSize = format.sampleSize();
@@ -427,42 +327,6 @@ GstCaps *QGstUtils::capsForAudioFormat(const QAudioFormat &format)
nullptr);
}
return 0;
-#else
- GstStructure *structure = 0;
-
- if (format.isValid()) {
- if (format.sampleType() == QAudioFormat::SignedInt || format.sampleType() == QAudioFormat::UnSignedInt) {
- structure = gst_structure_new("audio/x-raw-int", nullptr);
- } else if (format.sampleType() == QAudioFormat::Float) {
- structure = gst_structure_new("audio/x-raw-float", nullptr);
- }
- }
-
- GstCaps *caps = 0;
-
- if (structure) {
- gst_structure_set(structure, "rate", G_TYPE_INT, format.sampleRate(), nullptr);
- gst_structure_set(structure, "channels", G_TYPE_INT, format.channelCount(), nullptr);
- gst_structure_set(structure, "width", G_TYPE_INT, format.sampleSize(), nullptr);
- gst_structure_set(structure, "depth", G_TYPE_INT, format.sampleSize(), nullptr);
-
- if (format.byteOrder() == QAudioFormat::LittleEndian)
- gst_structure_set(structure, "endianness", G_TYPE_INT, 1234, nullptr);
- else if (format.byteOrder() == QAudioFormat::BigEndian)
- gst_structure_set(structure, "endianness", G_TYPE_INT, 4321, nullptr);
-
- if (format.sampleType() == QAudioFormat::SignedInt)
- gst_structure_set(structure, "signed", G_TYPE_BOOLEAN, TRUE, nullptr);
- else if (format.sampleType() == QAudioFormat::UnSignedInt)
- gst_structure_set(structure, "signed", G_TYPE_BOOLEAN, FALSE, nullptr);
-
- caps = gst_caps_new_empty();
- Q_ASSERT(caps);
- gst_caps_append_structure(caps, structure);
- }
-
- return caps;
-#endif
}
void QGstUtils::initializeGst()
@@ -686,7 +550,7 @@ QList<QGstUtils::CameraInfo> QGstUtils::enumerateCameras(GstElementFactory *fact
camerasCacheAgeTimer.restart();
#endif // linux_v4l
-#if GST_CHECK_VERSION(1,4,0) && (defined(Q_OS_WIN) || defined(Q_OS_MACOS))
+#if defined(Q_OS_WIN) || defined(Q_OS_MACOS)
if (!devices.isEmpty())
return devices;
@@ -745,7 +609,7 @@ QList<QGstUtils::CameraInfo> QGstUtils::enumerateCameras(GstElementFactory *fact
devs = g_list_delete_link(devs, devs);
}
gst_object_unref(monitor);
-#endif // GST_CHECK_VERSION(1,4,0) && (defined(Q_OS_WIN) || defined(Q_OS_MACOS))
+#endif // (defined(Q_OS_WIN) || defined(Q_OS_MACOS))
return devices;
}
@@ -809,22 +673,12 @@ QSet<QString> QGstUtils::supportedMimeTypes(bool (*isValidFactory)(GstElementFac
//enumerate supported mime types
gst_init(nullptr, nullptr);
-#if GST_CHECK_VERSION(1,0,0)
GstRegistry *registry = gst_registry_get();
GList *orig_plugins = gst_registry_get_plugin_list(registry);
-#else
- GstRegistry *registry = gst_registry_get_default();
- GList *orig_plugins = gst_default_registry_get_plugin_list ();
-#endif
for (GList *plugins = orig_plugins; plugins; plugins = g_list_next(plugins)) {
GstPlugin *plugin = (GstPlugin *) (plugins->data);
-#if GST_CHECK_VERSION(1,0,0)
if (GST_OBJECT_FLAG_IS_SET(GST_OBJECT(plugin), GST_PLUGIN_FLAG_BLACKLISTED))
continue;
-#else
- if (plugin->flags & (1<<1)) //GST_PLUGIN_FLAG_BLACKLISTED
- continue;
-#endif
GList *orig_features = gst_registry_get_feature_list_by_plugin(
registry, gst_plugin_get_name(plugin));
@@ -892,7 +746,6 @@ QSet<QString> QGstUtils::supportedMimeTypes(bool (*isValidFactory)(GstElementFac
return supportedMimeTypes;
}
-#if GST_CHECK_VERSION(1, 0, 0)
namespace {
struct ColorFormat { QImage::Format imageFormat; GstVideoFormat gstFormat; };
@@ -905,42 +758,16 @@ static const ColorFormat qt_colorLookup[] =
};
}
-#endif
-#if GST_CHECK_VERSION(1,0,0)
QImage QGstUtils::bufferToImage(GstBuffer *buffer, const GstVideoInfo &videoInfo)
-#else
-QImage QGstUtils::bufferToImage(GstBuffer *buffer)
-#endif
{
QImage img;
-#if GST_CHECK_VERSION(1,0,0)
GstVideoInfo info = videoInfo;
GstVideoFrame frame;
if (!gst_video_frame_map(&frame, &info, buffer, GST_MAP_READ))
return img;
-#else
- GstCaps *caps = gst_buffer_get_caps(buffer);
- if (!caps)
- return img;
-
- GstStructure *structure = gst_caps_get_structure (caps, 0);
- gint width = 0;
- gint height = 0;
- if (!structure
- || !gst_structure_get_int(structure, "width", &width)
- || !gst_structure_get_int(structure, "height", &height)
- || width <= 0
- || height <= 0) {
- gst_caps_unref(caps);
- return img;
- }
- gst_caps_unref(caps);
-#endif
-
-#if GST_CHECK_VERSION(1,0,0)
if (videoInfo.finfo->format == GST_VIDEO_FORMAT_I420) {
const int width = videoInfo.width;
const int height = videoInfo.height;
@@ -951,15 +778,6 @@ QImage QGstUtils::bufferToImage(GstBuffer *buffer)
static_cast<const uchar *>(frame.data[1]),
static_cast<const uchar *>(frame.data[2])
};
-#else
- if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) {
- const int stride[] = { width, width / 2, width / 2 };
- const uchar *data[] = {
- (const uchar *)buffer->data,
- (const uchar *)buffer->data + width * height,
- (const uchar *)buffer->data + width * height * 5 / 4
- };
-#endif
img = QImage(width/2, height/2, QImage::Format_RGB32);
for (int y=0; y<height; y+=2) {
@@ -979,7 +797,6 @@ QImage QGstUtils::bufferToImage(GstBuffer *buffer)
img.setPixel(x/2,y/2,qRgb(r,g,b));
}
}
-#if GST_CHECK_VERSION(1,0,0)
} else for (int i = 0; i < lengthOf(qt_colorLookup); ++i) {
if (qt_colorLookup[i].gstFormat != videoInfo.finfo->format)
continue;
@@ -997,34 +814,13 @@ QImage QGstUtils::bufferToImage(GstBuffer *buffer)
}
gst_video_frame_unmap(&frame);
-#else
- } else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) {
- QImage::Format format = QImage::Format_Invalid;
- int bpp = 0;
- gst_structure_get_int(structure, "bpp", &bpp);
-
- if (bpp == 24)
- format = QImage::Format_RGB888;
- else if (bpp == 32)
- format = QImage::Format_RGB32;
-
- if (format != QImage::Format_Invalid) {
- img = QImage((const uchar *)buffer->data,
- width,
- height,
- format);
- img.bits(); //detach
- }
- }
-#endif
+
return img;
}
namespace {
-#if GST_CHECK_VERSION(1,0,0)
-
struct VideoFormat
{
QVideoFrame::PixelFormat pixelFormat;
@@ -1077,98 +873,8 @@ static int indexOfVideoFormat(GstVideoFormat format)
return -1;
}
-#else
-
-struct YuvFormat
-{
- QVideoFrame::PixelFormat pixelFormat;
- guint32 fourcc;
- int bitsPerPixel;
-};
-
-static const YuvFormat qt_yuvColorLookup[] =
-{
- { QVideoFrame::Format_YUV420P, GST_MAKE_FOURCC('I','4','2','0'), 8 },
- { QVideoFrame::Format_YUV422P, GST_MAKE_FOURCC('Y','4','2','B'), 8 },
- { QVideoFrame::Format_YV12, GST_MAKE_FOURCC('Y','V','1','2'), 8 },
- { QVideoFrame::Format_UYVY, GST_MAKE_FOURCC('U','Y','V','Y'), 16 },
- { QVideoFrame::Format_YUYV, GST_MAKE_FOURCC('Y','U','Y','2'), 16 },
- { QVideoFrame::Format_NV12, GST_MAKE_FOURCC('N','V','1','2'), 8 },
- { QVideoFrame::Format_NV21, GST_MAKE_FOURCC('N','V','2','1'), 8 },
- { QVideoFrame::Format_AYUV444, GST_MAKE_FOURCC('A','Y','U','V'), 32 }
-};
-
-static int indexOfYuvColor(QVideoFrame::PixelFormat format)
-{
- const int count = sizeof(qt_yuvColorLookup) / sizeof(YuvFormat);
-
- for (int i = 0; i < count; ++i)
- if (qt_yuvColorLookup[i].pixelFormat == format)
- return i;
-
- return -1;
-}
-
-static int indexOfYuvColor(guint32 fourcc)
-{
- const int count = sizeof(qt_yuvColorLookup) / sizeof(YuvFormat);
-
- for (int i = 0; i < count; ++i)
- if (qt_yuvColorLookup[i].fourcc == fourcc)
- return i;
-
- return -1;
}
-struct RgbFormat
-{
- QVideoFrame::PixelFormat pixelFormat;
- int bitsPerPixel;
- int depth;
- int endianness;
- int red;
- int green;
- int blue;
- int alpha;
-};
-
-static const RgbFormat qt_rgbColorLookup[] =
-{
- { QVideoFrame::Format_RGB32 , 32, 24, 4321, 0x0000FF00, 0x00FF0000, int(0xFF000000), 0x00000000 },
- { QVideoFrame::Format_RGB32 , 32, 24, 1234, 0x00FF0000, 0x0000FF00, 0x000000FF, 0x00000000 },
- { QVideoFrame::Format_BGR32 , 32, 24, 4321, int(0xFF000000), 0x00FF0000, 0x0000FF00, 0x00000000 },
- { QVideoFrame::Format_BGR32 , 32, 24, 1234, 0x000000FF, 0x0000FF00, 0x00FF0000, 0x00000000 },
- { QVideoFrame::Format_ARGB32, 32, 24, 4321, 0x0000FF00, 0x00FF0000, int(0xFF000000), 0x000000FF },
- { QVideoFrame::Format_ARGB32, 32, 24, 1234, 0x00FF0000, 0x0000FF00, 0x000000FF, int(0xFF000000) },
- { QVideoFrame::Format_RGB24 , 24, 24, 4321, 0x00FF0000, 0x0000FF00, 0x000000FF, 0x00000000 },
- { QVideoFrame::Format_BGR24 , 24, 24, 4321, 0x000000FF, 0x0000FF00, 0x00FF0000, 0x00000000 },
- { QVideoFrame::Format_RGB565, 16, 16, 1234, 0x0000F800, 0x000007E0, 0x0000001F, 0x00000000 }
-};
-
-static int indexOfRgbColor(
- int bits, int depth, int endianness, int red, int green, int blue, int alpha)
-{
- const int count = sizeof(qt_rgbColorLookup) / sizeof(RgbFormat);
-
- for (int i = 0; i < count; ++i) {
- if (qt_rgbColorLookup[i].bitsPerPixel == bits
- && qt_rgbColorLookup[i].depth == depth
- && qt_rgbColorLookup[i].endianness == endianness
- && qt_rgbColorLookup[i].red == red
- && qt_rgbColorLookup[i].green == green
- && qt_rgbColorLookup[i].blue == blue
- && qt_rgbColorLookup[i].alpha == alpha) {
- return i;
- }
- }
- return -1;
-}
-#endif
-
-}
-
-#if GST_CHECK_VERSION(1,0,0)
-
QVideoSurfaceFormat QGstUtils::formatForCaps(
GstCaps *caps, GstVideoInfo *info, QAbstractVideoBuffer::HandleType handleType)
{
@@ -1196,41 +902,10 @@ QVideoSurfaceFormat QGstUtils::formatForCaps(
return QVideoSurfaceFormat();
}
-#else
-
-QVideoSurfaceFormat QGstUtils::formatForCaps(
- GstCaps *caps, int *bytesPerLine, QAbstractVideoBuffer::HandleType handleType)
-{
- const GstStructure *structure = gst_caps_get_structure(caps, 0);
-
- int bitsPerPixel = 0;
- QSize size = structureResolution(structure);
- QVideoFrame::PixelFormat pixelFormat = structurePixelFormat(structure, &bitsPerPixel);
-
- if (pixelFormat != QVideoFrame::Format_Invalid) {
- QVideoSurfaceFormat format(size, pixelFormat, handleType);
-
- QPair<qreal, qreal> rate = structureFrameRateRange(structure);
- if (rate.second)
- format.setFrameRate(rate.second);
-
- format.setPixelAspectRatio(structurePixelAspectRatio(structure));
-
- if (bytesPerLine)
- *bytesPerLine = ((size.width() * bitsPerPixel / 8) + 3) & ~3;
-
- return format;
- }
- return QVideoSurfaceFormat();
-}
-
-#endif
-
GstCaps *QGstUtils::capsForFormats(const QList<QVideoFrame::PixelFormat> &formats)
{
GstCaps *caps = gst_caps_new_empty();
-#if GST_CHECK_VERSION(1,0,0)
for (QVideoFrame::PixelFormat format : formats) {
int index = indexOfVideoFormat(format);
@@ -1241,41 +916,6 @@ GstCaps *QGstUtils::capsForFormats(const QList<QVideoFrame::PixelFormat> &format
nullptr));
}
}
-#else
- for (QVideoFrame::PixelFormat format : formats) {
- int index = indexOfYuvColor(format);
-
- if (index != -1) {
- gst_caps_append_structure(caps, gst_structure_new(
- "video/x-raw-yuv",
- "format", GST_TYPE_FOURCC, qt_yuvColorLookup[index].fourcc,
- nullptr));
- continue;
- }
-
- const int count = sizeof(qt_rgbColorLookup) / sizeof(RgbFormat);
-
- for (int i = 0; i < count; ++i) {
- if (qt_rgbColorLookup[i].pixelFormat == format) {
- GstStructure *structure = gst_structure_new(
- "video/x-raw-rgb",
- "bpp" , G_TYPE_INT, qt_rgbColorLookup[i].bitsPerPixel,
- "depth" , G_TYPE_INT, qt_rgbColorLookup[i].depth,
- "endianness", G_TYPE_INT, qt_rgbColorLookup[i].endianness,
- "red_mask" , G_TYPE_INT, qt_rgbColorLookup[i].red,
- "green_mask", G_TYPE_INT, qt_rgbColorLookup[i].green,
- "blue_mask" , G_TYPE_INT, qt_rgbColorLookup[i].blue,
- nullptr);
-
- if (qt_rgbColorLookup[i].alpha != 0) {
- gst_structure_set(
- structure, "alpha_mask", G_TYPE_INT, qt_rgbColorLookup[i].alpha, nullptr);
- }
- gst_caps_append_structure(caps, structure);
- }
- }
- }
-#endif
gst_caps_set_simple(
caps,
@@ -1334,7 +974,6 @@ void QGstUtils::setMetaData(GstElement *element, const QMap<QByteArray, QVariant
tagValue.toDouble(),
nullptr);
break;
-#if GST_CHECK_VERSION(0, 10, 31)
case QVariant::DateTime: {
QDateTime date = tagValue.toDateTime().toLocalTime();
gst_tag_setter_add_tags(GST_TAG_SETTER(element),
@@ -1346,7 +985,6 @@ void QGstUtils::setMetaData(GstElement *element, const QMap<QByteArray, QVariant
nullptr);
break;
}
-#endif
default:
break;
}
@@ -1356,14 +994,9 @@ void QGstUtils::setMetaData(GstElement *element, const QMap<QByteArray, QVariant
void QGstUtils::setMetaData(GstBin *bin, const QMap<QByteArray, QVariant> &data)
{
GstIterator *elements = gst_bin_iterate_all_by_interface(bin, GST_TYPE_TAG_SETTER);
-#if GST_CHECK_VERSION(1,0,0)
GValue item = G_VALUE_INIT;
while (gst_iterator_next(elements, &item) == GST_ITERATOR_OK) {
GstElement * const element = GST_ELEMENT(g_value_get_object(&item));
-#else
- GstElement *element = 0;
- while (gst_iterator_next(elements, (void**)&element) == GST_ITERATOR_OK) {
-#endif
setMetaData(element, data);
}
gst_iterator_free(elements);
@@ -1373,16 +1006,7 @@ void QGstUtils::setMetaData(GstBin *bin, const QMap<QByteArray, QVariant> &data)
GstCaps *QGstUtils::videoFilterCaps()
{
const char *caps =
-#if GST_CHECK_VERSION(1,2,0)
"video/x-raw(ANY);"
-#elif GST_CHECK_VERSION(1,0,0)
- "video/x-raw;"
-#else
- "video/x-raw-yuv;"
- "video/x-raw-rgb;"
- "video/x-raw-data;"
- "video/x-android-buffer;"
-#endif
"image/jpeg;"
"video/x-h264";
static GstStaticCaps staticCaps = GST_STATIC_CAPS(caps);
@@ -1403,16 +1027,13 @@ QSize QGstUtils::structureResolution(const GstStructure *s)
return size;
}
-QVideoFrame::PixelFormat QGstUtils::structurePixelFormat(const GstStructure *structure, int *bpp)
+QVideoFrame::PixelFormat QGstUtils::structurePixelFormat(const GstStructure *structure)
{
QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid;
if (!structure)
return pixelFormat;
-#if GST_CHECK_VERSION(1,0,0)
- Q_UNUSED(bpp);
-
if (gst_structure_has_name(structure, "video/x-raw")) {
const gchar *s = gst_structure_get_string(structure, "format");
if (s) {
@@ -1423,43 +1044,6 @@ QVideoFrame::PixelFormat QGstUtils::structurePixelFormat(const GstStructure *str
pixelFormat = qt_videoFormatLookup[index].pixelFormat;
}
}
-#else
- if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) {
- guint32 fourcc = 0;
- gst_structure_get_fourcc(structure, "format", &fourcc);
-
- int index = indexOfYuvColor(fourcc);
- if (index != -1) {
- pixelFormat = qt_yuvColorLookup[index].pixelFormat;
- if (bpp)
- *bpp = qt_yuvColorLookup[index].bitsPerPixel;
- }
- } else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) {
- int bitsPerPixel = 0;
- int depth = 0;
- int endianness = 0;
- int red = 0;
- int green = 0;
- int blue = 0;
- int alpha = 0;
-
- gst_structure_get_int(structure, "bpp", &bitsPerPixel);
- gst_structure_get_int(structure, "depth", &depth);
- gst_structure_get_int(structure, "endianness", &endianness);
- gst_structure_get_int(structure, "red_mask", &red);
- gst_structure_get_int(structure, "green_mask", &green);
- gst_structure_get_int(structure, "blue_mask", &blue);
- gst_structure_get_int(structure, "alpha_mask", &alpha);
-
- int index = indexOfRgbColor(bitsPerPixel, depth, endianness, red, green, blue, alpha);
-
- if (index != -1) {
- pixelFormat = qt_rgbColorLookup[index].pixelFormat;
- if (bpp)
- *bpp = qt_rgbColorLookup[index].bitsPerPixel;
- }
- }
-#endif
return pixelFormat;
}
@@ -1537,7 +1121,6 @@ QString QGstUtils::fileExtensionForMimeType(const QString &mimeType)
return extension;
}
-#if GST_CHECK_VERSION(0,10,30)
QVariant QGstUtils::fromGStreamerOrientation(const QVariant &value)
{
// Note gstreamer tokens either describe the counter clockwise rotation of the
@@ -1566,7 +1149,6 @@ QVariant QGstUtils::toGStreamerOrientation(const QVariant &value)
return QStringLiteral("rotate-0");
}
}
-#endif
bool QGstUtils::useOpenGL()
{
@@ -1576,80 +1158,38 @@ bool QGstUtils::useOpenGL()
void qt_gst_object_ref_sink(gpointer object)
{
-#if GST_CHECK_VERSION(0,10,24)
gst_object_ref_sink(object);
-#else
- g_return_if_fail (GST_IS_OBJECT(object));
-
- GST_OBJECT_LOCK(object);
- if (G_LIKELY(GST_OBJECT_IS_FLOATING(object))) {
- GST_OBJECT_FLAG_UNSET(object, GST_OBJECT_FLOATING);
- GST_OBJECT_UNLOCK(object);
- } else {
- GST_OBJECT_UNLOCK(object);
- gst_object_ref(object);
- }
-#endif
}
GstCaps *qt_gst_pad_get_current_caps(GstPad *pad)
{
-#if GST_CHECK_VERSION(1,0,0)
return gst_pad_get_current_caps(pad);
-#else
- return gst_pad_get_negotiated_caps(pad);
-#endif
}
GstCaps *qt_gst_pad_get_caps(GstPad *pad)
{
-#if GST_CHECK_VERSION(1,0,0)
return gst_pad_query_caps(pad, nullptr);
-#elif GST_CHECK_VERSION(0, 10, 26)
- return gst_pad_get_caps_reffed(pad);
-#else
- return gst_pad_get_caps(pad);
-#endif
}
GstStructure *qt_gst_structure_new_empty(const char *name)
{
-#if GST_CHECK_VERSION(1,0,0)
return gst_structure_new_empty(name);
-#else
- return gst_structure_new(name, nullptr);
-#endif
}
gboolean qt_gst_element_query_position(GstElement *element, GstFormat format, gint64 *cur)
{
-#if GST_CHECK_VERSION(1,0,0)
return gst_element_query_position(element, format, cur);
-#else
- return gst_element_query_position(element, &format, cur);
-#endif
}
gboolean qt_gst_element_query_duration(GstElement *element, GstFormat format, gint64 *cur)
{
-#if GST_CHECK_VERSION(1,0,0)
return gst_element_query_duration(element, format, cur);
-#else
- return gst_element_query_duration(element, &format, cur);
-#endif
}
GstCaps *qt_gst_caps_normalize(GstCaps *caps)
{
-#if GST_CHECK_VERSION(1,0,0)
// gst_caps_normalize() takes ownership of the argument in 1.0
return gst_caps_normalize(caps);
-#else
- // in 0.10, it doesn't. Unref the argument to mimic the 1.0 behavior
- GstCaps *res = gst_caps_normalize(caps);
- gst_caps_unref(caps);
- return res;
-#endif
}
const gchar *qt_gst_element_get_factory_name(GstElement *element)
@@ -1665,72 +1205,22 @@ const gchar *qt_gst_element_get_factory_name(GstElement *element)
gboolean qt_gst_caps_can_intersect(const GstCaps * caps1, const GstCaps * caps2)
{
-#if GST_CHECK_VERSION(0, 10, 25)
return gst_caps_can_intersect(caps1, caps2);
-#else
- GstCaps *intersection = gst_caps_intersect(caps1, caps2);
- gboolean res = !gst_caps_is_empty(intersection);
- gst_caps_unref(intersection);
- return res;
-#endif
}
-#if !GST_CHECK_VERSION(0, 10, 31)
-static gboolean qt_gst_videosink_factory_filter(GstPluginFeature *feature, gpointer)
-{
- guint rank;
- const gchar *klass;
-
- if (!GST_IS_ELEMENT_FACTORY(feature))
- return FALSE;
-
- klass = gst_element_factory_get_klass(GST_ELEMENT_FACTORY(feature));
- if (!(strstr(klass, "Sink") && strstr(klass, "Video")))
- return FALSE;
-
- rank = gst_plugin_feature_get_rank(feature);
- if (rank < GST_RANK_MARGINAL)
- return FALSE;
-
- return TRUE;
-}
-
-static gint qt_gst_compare_ranks(GstPluginFeature *f1, GstPluginFeature *f2)
-{
- gint diff;
-
- diff = gst_plugin_feature_get_rank(f2) - gst_plugin_feature_get_rank(f1);
- if (diff != 0)
- return diff;
-
- return strcmp(gst_plugin_feature_get_name(f2), gst_plugin_feature_get_name (f1));
-}
-#endif
-
GList *qt_gst_video_sinks()
{
GList *list = nullptr;
-#if GST_CHECK_VERSION(0, 10, 31)
list = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_SINK | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO,
GST_RANK_MARGINAL);
-#else
- list = gst_registry_feature_filter(gst_registry_get_default(),
- (GstPluginFeatureFilter)qt_gst_videosink_factory_filter,
- FALSE, nullptr);
- list = g_list_sort(list, (GCompareFunc)qt_gst_compare_ranks);
-#endif
return list;
}
void qt_gst_util_double_to_fraction(gdouble src, gint *dest_n, gint *dest_d)
{
-#if GST_CHECK_VERSION(0, 10, 26)
gst_util_double_to_fraction(src, dest_n, dest_d);
-#else
- qt_real_to_fraction(src, dest_n, dest_d);
-#endif
}
QDebug operator <<(QDebug debug, GstCaps *caps)
diff --git a/src/gsttools/qgstutils_p.h b/src/gsttools/qgstutils_p.h
index 9ecf2e92c..d2c6b1dbd 100644
--- a/src/gsttools/qgstutils_p.h
+++ b/src/gsttools/qgstutils_p.h
@@ -63,19 +63,11 @@
#include <qvideoframe.h>
#include <QDebug>
-#if GST_CHECK_VERSION(1,0,0)
# define QT_GSTREAMER_PLAYBIN_ELEMENT_NAME "playbin"
# define QT_GSTREAMER_CAMERABIN_ELEMENT_NAME "camerabin"
# define QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME "videoconvert"
# define QT_GSTREAMER_RAW_AUDIO_MIME "audio/x-raw"
# define QT_GSTREAMER_VIDEOOVERLAY_INTERFACE_NAME "GstVideoOverlay"
-#else
-# define QT_GSTREAMER_PLAYBIN_ELEMENT_NAME "playbin2"
-# define QT_GSTREAMER_CAMERABIN_ELEMENT_NAME "camerabin2"
-# define QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME "ffmpegcolorspace"
-# define QT_GSTREAMER_RAW_AUDIO_MIME "audio/x-raw-int"
-# define QT_GSTREAMER_VIDEOOVERLAY_INTERFACE_NAME "GstXOverlay"
-#endif
QT_BEGIN_NAMESPACE
@@ -100,11 +92,7 @@ namespace QGstUtils {
Q_GSTTOOLS_EXPORT QSize capsResolution(const GstCaps *caps);
Q_GSTTOOLS_EXPORT QSize capsCorrectedResolution(const GstCaps *caps);
Q_GSTTOOLS_EXPORT QAudioFormat audioFormatForCaps(const GstCaps *caps);
-#if GST_CHECK_VERSION(1,0,0)
Q_GSTTOOLS_EXPORT QAudioFormat audioFormatForSample(GstSample *sample);
-#else
- Q_GSTTOOLS_EXPORT QAudioFormat audioFormatForBuffer(GstBuffer *buffer);
-#endif
Q_GSTTOOLS_EXPORT GstCaps *capsForAudioFormat(const QAudioFormat &format);
Q_GSTTOOLS_EXPORT void initializeGst();
Q_GSTTOOLS_EXPORT QMultimedia::SupportEstimate hasSupport(const QString &mimeType,
@@ -120,19 +108,11 @@ namespace QGstUtils {
Q_GSTTOOLS_EXPORT QSet<QString> supportedMimeTypes(bool (*isValidFactory)(GstElementFactory *factory));
-#if GST_CHECK_VERSION(1,0,0)
Q_GSTTOOLS_EXPORT QImage bufferToImage(GstBuffer *buffer, const GstVideoInfo &info);
Q_GSTTOOLS_EXPORT QVideoSurfaceFormat formatForCaps(
GstCaps *caps,
GstVideoInfo *info = 0,
QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::NoHandle);
-#else
- Q_GSTTOOLS_EXPORT QImage bufferToImage(GstBuffer *buffer);
- Q_GSTTOOLS_EXPORT QVideoSurfaceFormat formatForCaps(
- GstCaps *caps,
- int *bytesPerLine = 0,
- QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::NoHandle);
-#endif
Q_GSTTOOLS_EXPORT GstCaps *capsForFormats(const QList<QVideoFrame::PixelFormat> &formats);
void setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer);
@@ -143,16 +123,14 @@ namespace QGstUtils {
Q_GSTTOOLS_EXPORT GstCaps *videoFilterCaps();
Q_GSTTOOLS_EXPORT QSize structureResolution(const GstStructure *s);
- Q_GSTTOOLS_EXPORT QVideoFrame::PixelFormat structurePixelFormat(const GstStructure *s, int *bpp = 0);
+ Q_GSTTOOLS_EXPORT QVideoFrame::PixelFormat structurePixelFormat(const GstStructure *s);
Q_GSTTOOLS_EXPORT QSize structurePixelAspectRatio(const GstStructure *s);
Q_GSTTOOLS_EXPORT QPair<qreal, qreal> structureFrameRateRange(const GstStructure *s);
Q_GSTTOOLS_EXPORT QString fileExtensionForMimeType(const QString &mimeType);
-#if GST_CHECK_VERSION(0,10,30)
Q_GSTTOOLS_EXPORT QVariant fromGStreamerOrientation(const QVariant &value);
Q_GSTTOOLS_EXPORT QVariant toGStreamerOrientation(const QVariant &value);
-#endif
Q_GSTTOOLS_EXPORT bool useOpenGL();
}
diff --git a/src/gsttools/qgstvideobuffer.cpp b/src/gsttools/qgstvideobuffer.cpp
index 245b7e024..699fcb348 100644
--- a/src/gsttools/qgstvideobuffer.cpp
+++ b/src/gsttools/qgstvideobuffer.cpp
@@ -41,33 +41,19 @@
QT_BEGIN_NAMESPACE
-#if GST_CHECK_VERSION(1,0,0)
QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, const GstVideoInfo &info)
: QAbstractPlanarVideoBuffer(NoHandle)
, m_videoInfo(info)
-#else
-QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine)
- : QAbstractVideoBuffer(NoHandle)
- , m_bytesPerLine(bytesPerLine)
-#endif
, m_buffer(buffer)
{
gst_buffer_ref(m_buffer);
}
-#if GST_CHECK_VERSION(1,0,0)
QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, const GstVideoInfo &info,
QGstVideoBuffer::HandleType handleType,
const QVariant &handle)
: QAbstractPlanarVideoBuffer(handleType)
, m_videoInfo(info)
-#else
-QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine,
- QGstVideoBuffer::HandleType handleType,
- const QVariant &handle)
- : QAbstractVideoBuffer(handleType)
- , m_bytesPerLine(bytesPerLine)
-#endif
, m_buffer(buffer)
, m_handle(handle)
{
@@ -87,8 +73,6 @@ QAbstractVideoBuffer::MapMode QGstVideoBuffer::mapMode() const
return m_mode;
}
-#if GST_CHECK_VERSION(1,0,0)
-
int QGstVideoBuffer::map(MapMode mode, int *numBytes, int bytesPerLine[4], uchar *data[4])
{
const GstMapFlags flags = GstMapFlags(((mode & ReadOnly) ? GST_MAP_READ : 0)
@@ -123,36 +107,14 @@ int QGstVideoBuffer::map(MapMode mode, int *numBytes, int bytesPerLine[4], uchar
return 0;
}
-#else
-
-uchar *QGstVideoBuffer::map(MapMode mode, int *numBytes, int *bytesPerLine)
-{
- if (mode != NotMapped && m_mode == NotMapped) {
- if (numBytes)
- *numBytes = m_buffer->size;
- if (bytesPerLine)
- *bytesPerLine = m_bytesPerLine;
-
- m_mode = mode;
-
- return m_buffer->data;
- } else {
- return 0;
- }
-}
-
-#endif
-
void QGstVideoBuffer::unmap()
{
-#if GST_CHECK_VERSION(1,0,0)
if (m_mode != NotMapped) {
if (m_videoInfo.finfo->n_planes == 0)
gst_buffer_unmap(m_buffer, &m_frame.map[0]);
else
gst_video_frame_unmap(&m_frame);
}
-#endif
m_mode = NotMapped;
}
diff --git a/src/gsttools/qgstvideobuffer_p.h b/src/gsttools/qgstvideobuffer_p.h
index b7de17e19..5f026ebc5 100644
--- a/src/gsttools/qgstvideobuffer_p.h
+++ b/src/gsttools/qgstvideobuffer_p.h
@@ -60,43 +60,25 @@
QT_BEGIN_NAMESPACE
-#if GST_CHECK_VERSION(1,0,0)
class Q_GSTTOOLS_EXPORT QGstVideoBuffer : public QAbstractPlanarVideoBuffer
{
public:
QGstVideoBuffer(GstBuffer *buffer, const GstVideoInfo &info);
QGstVideoBuffer(GstBuffer *buffer, const GstVideoInfo &info,
HandleType handleType, const QVariant &handle);
-#else
-class Q_GSTTOOLS_EXPORT QGstVideoBuffer : public QAbstractVideoBuffer
-{
-public:
- QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine);
- QGstVideoBuffer(GstBuffer *buffer, int bytesPerLine,
- HandleType handleType, const QVariant &handle);
-#endif
~QGstVideoBuffer();
GstBuffer *buffer() const { return m_buffer; }
MapMode mapMode() const override;
-#if GST_CHECK_VERSION(1,0,0)
int map(MapMode mode, int *numBytes, int bytesPerLine[4], uchar *data[4]) override;
-#else
- uchar *map(MapMode mode, int *numBytes, int *bytesPerLine) override;
-#endif
-
void unmap() override;
QVariant handle() const override { return m_handle; }
private:
-#if GST_CHECK_VERSION(1,0,0)
GstVideoInfo m_videoInfo;
GstVideoFrame m_frame;
-#else
- int m_bytesPerLine = 0;
-#endif
GstBuffer *m_buffer = nullptr;
MapMode m_mode = NotMapped;
QVariant m_handle;
diff --git a/src/gsttools/qgstvideorenderersink.cpp b/src/gsttools/qgstvideorenderersink.cpp
index 8f4f59358..5257a1849 100644
--- a/src/gsttools/qgstvideorenderersink.cpp
+++ b/src/gsttools/qgstvideorenderersink.cpp
@@ -68,7 +68,7 @@
#if GST_GL_HAVE_PLATFORM_EGL
# include <gst/gl/egl/gstgldisplay_egl.h>
#endif
-#if GST_CHECK_VERSION(1,11,1) && GST_GL_HAVE_WINDOW_WAYLAND
+#if GST_GL_HAVE_WINDOW_WAYLAND
# include <gst/gl/wayland/gstgldisplay_wayland.h>
#endif
#endif // #if QT_CONFIG(gstreamer_gl)
@@ -347,7 +347,6 @@ static GstGLContext *gstGLDisplayContext(QAbstractVideoSurface *surface)
}
#endif
-#if GST_CHECK_VERSION(1,11,1)
#if GST_GL_HAVE_WINDOW_WAYLAND
if (!display && platform.startsWith(QLatin1String("wayland"))) {
const char *displayName = (platform == QLatin1String("wayland"))
@@ -357,7 +356,6 @@ static GstGLContext *gstGLDisplayContext(QAbstractVideoSurface *surface)
(struct wl_display *)pni->nativeResourceForIntegration(displayName));
}
#endif
-#endif
if (!display) {
qWarning() << "Could not create GstGLDisplay";
@@ -410,11 +408,7 @@ bool QVideoSurfaceGstDelegate::query(GstQuery *query)
gst_query_parse_context(query, &context);
context = context ? gst_context_copy(context) : gst_context_new(type, FALSE);
GstStructure *structure = gst_context_writable_structure(context);
-#if GST_CHECK_VERSION(1,11,1)
gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, m_gstGLDisplayContext, nullptr);
-#else
- gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, m_gstGLDisplayContext, nullptr);
-#endif
gst_query_set_context(query, context);
gst_context_unref(context);
diff --git a/src/gsttools/qvideosurfacegstsink.cpp b/src/gsttools/qvideosurfacegstsink.cpp
deleted file mode 100644
index c6951bdef..000000000
--- a/src/gsttools/qvideosurfacegstsink.cpp
+++ /dev/null
@@ -1,712 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2016 The Qt Company Ltd.
-** Contact: https://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see https://www.qt.io/terms-conditions. For further
-** information use the contact form at https://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 3 as published by the Free Software
-** Foundation and appearing in the file LICENSE.LGPL3 included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU Lesser General Public License version 3 requirements
-** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
-**
-** GNU General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU
-** General Public License version 2.0 or (at your option) the GNU General
-** Public license version 3 or any later version approved by the KDE Free
-** Qt Foundation. The licenses are as published by the Free Software
-** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
-** included in the packaging of this file. Please review the following
-** information to ensure the GNU General Public License requirements will
-** be met: https://www.gnu.org/licenses/gpl-2.0.html and
-** https://www.gnu.org/licenses/gpl-3.0.html.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#include <qabstractvideosurface.h>
-#include <qvideoframe.h>
-#include <QDebug>
-#include <QMap>
-#include <QDebug>
-#include <QThread>
-
-#include <private/qmediapluginloader_p.h>
-#include "qgstvideobuffer_p.h"
-
-#include "qgstutils_p.h"
-#include "qvideosurfacegstsink_p.h"
-
-#if GST_VERSION_MAJOR >=1
-#include <gst/video/video.h>
-#endif
-
-//#define DEBUG_VIDEO_SURFACE_SINK
-
-QT_BEGIN_NAMESPACE
-
-Q_GLOBAL_STATIC_WITH_ARGS(QMediaPluginLoader, bufferPoolLoader,
- (QGstBufferPoolInterface_iid, QLatin1String("video/bufferpool"), Qt::CaseInsensitive))
-
-
-QVideoSurfaceGstDelegate::QVideoSurfaceGstDelegate(
- QAbstractVideoSurface *surface)
- : m_surface(surface)
-{
- if (m_surface) {
- const auto instances = bufferPoolLoader()->instances(QGstBufferPoolPluginKey);
- for (QObject *instance : instances) {
- auto plugin = qobject_cast<QGstBufferPoolInterface*>(instance);
-
- if (plugin) {
- m_pools.append(plugin);
- }
- }
-
- updateSupportedFormats();
- connect(m_surface, SIGNAL(supportedFormatsChanged()), this, SLOT(updateSupportedFormats()));
- }
-}
-
-QVideoSurfaceGstDelegate::~QVideoSurfaceGstDelegate()
-{
-}
-
-QList<QVideoFrame::PixelFormat> QVideoSurfaceGstDelegate::supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const
-{
- QMutexLocker locker(const_cast<QMutex *>(&m_mutex));
-
- if (!m_surface)
- return QList<QVideoFrame::PixelFormat>();
- else if (handleType == QAbstractVideoBuffer::NoHandle)
- return m_supportedPixelFormats;
- else if (handleType == m_pool->handleType())
- return m_supportedPoolPixelFormats;
- else
- return m_surface->supportedPixelFormats(handleType);
-}
-
-QVideoSurfaceFormat QVideoSurfaceGstDelegate::surfaceFormat() const
-{
- QMutexLocker locker(const_cast<QMutex *>(&m_mutex));
- return m_format;
-}
-
-bool QVideoSurfaceGstDelegate::start(const QVideoSurfaceFormat &format, int bytesPerLine)
-{
- if (!m_surface)
- return false;
-
- QMutexLocker locker(&m_mutex);
-
- m_format = format;
- m_bytesPerLine = bytesPerLine;
-
- if (QThread::currentThread() == thread()) {
- m_started = !m_surface.isNull() ? m_surface->start(m_format) : false;
- } else {
- m_started = false;
- m_startCanceled = false;
- QMetaObject::invokeMethod(this, "queuedStart", Qt::QueuedConnection);
-
- /*
- Waiting for start() to be invoked in the main thread may block
- if gstreamer blocks the main thread until this call is finished.
- This situation is rare and usually caused by setState(Null)
- while pipeline is being prerolled.
-
- The proper solution to this involves controlling gstreamer pipeline from
- other thread than video surface.
-
- Currently start() fails if wait() timed out.
- */
- if (!m_setupCondition.wait(&m_mutex, 1000)) {
- qWarning() << "Failed to start video surface due to main thread blocked.";
- m_startCanceled = true;
- }
- }
-
- m_format = m_surface->surfaceFormat();
-
- return m_started;
-}
-
-void QVideoSurfaceGstDelegate::stop()
-{
- if (!m_surface)
- return;
-
- QMutexLocker locker(&m_mutex);
-
- if (QThread::currentThread() == thread()) {
- if (!m_surface.isNull())
- m_surface->stop();
- } else {
- QMetaObject::invokeMethod(this, "queuedStop", Qt::QueuedConnection);
-
- // Waiting for stop() to be invoked in the main thread may block
- // if gstreamer blocks the main thread until this call is finished.
- m_setupCondition.wait(&m_mutex, 500);
- }
-
- m_started = false;
-}
-
-void QVideoSurfaceGstDelegate::unlock()
-{
- QMutexLocker locker(&m_mutex);
-
- m_startCanceled = true;
- m_setupCondition.wakeAll();
- m_renderCondition.wakeAll();
-}
-
-bool QVideoSurfaceGstDelegate::isActive()
-{
- QMutexLocker locker(&m_mutex);
- return !m_surface.isNull() && m_surface->isActive();
-}
-
-void QVideoSurfaceGstDelegate::clearPoolBuffers()
-{
- QMutexLocker locker(&m_poolMutex);
- if (m_pool)
- m_pool->clear();
-}
-
-void QVideoSurfaceGstDelegate::flush()
-{
- QMutexLocker locker(&m_mutex);
-
- m_frame = QVideoFrame();
- m_renderCondition.wakeAll();
-
- if (QThread::currentThread() == thread()) {
- if (!m_surface.isNull())
- m_surface->present(m_frame);
- } else {
- QMetaObject::invokeMethod(this, "queuedFlush", Qt::QueuedConnection);
- }
-}
-
-GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer)
-{
- if (!m_surface) {
- qWarning() << "Rendering video frame to deleted surface, skip.";
- //return GST_FLOW_NOT_NEGOTIATED;
- return GST_FLOW_OK;
- }
-
- QMutexLocker locker(&m_mutex);
-
- QAbstractVideoBuffer *videoBuffer = 0;
-
- if (m_pool)
- videoBuffer = m_pool->prepareVideoBuffer(buffer, m_bytesPerLine);
-
- if (!videoBuffer)
- videoBuffer = new QGstVideoBuffer(buffer, m_bytesPerLine);
-
- m_frame = QVideoFrame(
- videoBuffer,
- m_format.frameSize(),
- m_format.pixelFormat());
-
- QGstUtils::setFrameTimeStamps(&m_frame, buffer);
-
- m_renderReturn = GST_FLOW_OK;
-
- if (QThread::currentThread() == thread()) {
- if (!m_surface.isNull())
- m_surface->present(m_frame);
- else
- qWarning() << "m_surface.isNull().";
- } else {
- QMetaObject::invokeMethod(this, "queuedRender", Qt::QueuedConnection);
- m_renderCondition.wait(&m_mutex, 300);
- }
-
- m_frame = QVideoFrame();
- return m_renderReturn;
-}
-
-void QVideoSurfaceGstDelegate::queuedStart()
-{
- QMutexLocker locker(&m_mutex);
-
- if (!m_startCanceled) {
- m_started = m_surface->start(m_format);
- m_setupCondition.wakeAll();
- }
-}
-
-void QVideoSurfaceGstDelegate::queuedStop()
-{
- QMutexLocker locker(&m_mutex);
-
- m_surface->stop();
-
- m_setupCondition.wakeAll();
-}
-
-void QVideoSurfaceGstDelegate::queuedFlush()
-{
- QMutexLocker locker(&m_mutex);
-
- if (!m_surface.isNull())
- m_surface->present(QVideoFrame());
-}
-
-void QVideoSurfaceGstDelegate::queuedRender()
-{
- QMutexLocker locker(&m_mutex);
-
- if (!m_frame.isValid())
- return;
-
- if (m_surface.isNull()) {
- qWarning() << "Rendering video frame to deleted surface, skip the frame";
- m_renderReturn = GST_FLOW_OK;
- } else if (m_surface->present(m_frame)) {
- m_renderReturn = GST_FLOW_OK;
- } else {
- switch (m_surface->error()) {
- case QAbstractVideoSurface::NoError:
- m_renderReturn = GST_FLOW_OK;
- break;
- case QAbstractVideoSurface::StoppedError:
- //It's likely we are in process of changing video output
- //and the surface is already stopped, ignore the frame
- m_renderReturn = GST_FLOW_OK;
- break;
- default:
- qWarning() << "Failed to render video frame:" << m_surface->error();
- m_renderReturn = GST_FLOW_OK;
- break;
- }
- }
-
- m_renderCondition.wakeAll();
-}
-
-void QVideoSurfaceGstDelegate::updateSupportedFormats()
-{
- QGstBufferPoolInterface *newPool = 0;
- for (QGstBufferPoolInterface *pool : qAsConst(m_pools)) {
- if (!m_surface->supportedPixelFormats(pool->handleType()).isEmpty()) {
- newPool = pool;
- break;
- }
- }
-
- if (newPool != m_pool) {
- QMutexLocker lock(&m_poolMutex);
-
- if (m_pool)
- m_pool->clear();
- m_pool = newPool;
- }
-
- QMutexLocker locker(&m_mutex);
-
- m_supportedPixelFormats.clear();
- m_supportedPoolPixelFormats.clear();
- if (m_surface) {
- m_supportedPixelFormats = m_surface->supportedPixelFormats();
- if (m_pool)
- m_supportedPoolPixelFormats = m_surface->supportedPixelFormats(m_pool->handleType());
- }
-}
-
-static GstVideoSinkClass *sink_parent_class;
-
-#define VO_SINK(s) QVideoSurfaceGstSink *sink(reinterpret_cast<QVideoSurfaceGstSink *>(s))
-
-QVideoSurfaceGstSink *QVideoSurfaceGstSink::createSink(QAbstractVideoSurface *surface)
-{
- QVideoSurfaceGstSink *sink = reinterpret_cast<QVideoSurfaceGstSink *>(
- g_object_new(QVideoSurfaceGstSink::get_type(), 0));
-
- sink->delegate = new QVideoSurfaceGstDelegate(surface);
-
- g_signal_connect(G_OBJECT(sink), "notify::show-preroll-frame", G_CALLBACK(handleShowPrerollChange), sink);
-
- return sink;
-}
-
-GType QVideoSurfaceGstSink::get_type()
-{
- static GType type = 0;
-
- if (type == 0) {
- static const GTypeInfo info =
- {
- sizeof(QVideoSurfaceGstSinkClass), // class_size
- base_init, // base_init
- nullptr, // base_finalize
- class_init, // class_init
- nullptr, // class_finalize
- nullptr, // class_data
- sizeof(QVideoSurfaceGstSink), // instance_size
- 0, // n_preallocs
- instance_init, // instance_init
- 0 // value_table
- };
-
- type = g_type_register_static(
- GST_TYPE_VIDEO_SINK, "QVideoSurfaceGstSink", &info, GTypeFlags(0));
- }
-
- return type;
-}
-
-void QVideoSurfaceGstSink::class_init(gpointer g_class, gpointer class_data)
-{
- Q_UNUSED(class_data);
-
- sink_parent_class = reinterpret_cast<GstVideoSinkClass *>(g_type_class_peek_parent(g_class));
-
- GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class);
- base_sink_class->get_caps = QVideoSurfaceGstSink::get_caps;
- base_sink_class->set_caps = QVideoSurfaceGstSink::set_caps;
- base_sink_class->buffer_alloc = QVideoSurfaceGstSink::buffer_alloc;
- base_sink_class->start = QVideoSurfaceGstSink::start;
- base_sink_class->stop = QVideoSurfaceGstSink::stop;
- base_sink_class->unlock = QVideoSurfaceGstSink::unlock;
-
-#if GST_CHECK_VERSION(0, 10, 25)
- GstVideoSinkClass *video_sink_class = reinterpret_cast<GstVideoSinkClass *>(g_class);
- video_sink_class->show_frame = QVideoSurfaceGstSink::show_frame;
-#else
- base_sink_class->preroll = QVideoSurfaceGstSink::preroll;
- base_sink_class->render = QVideoSurfaceGstSink::render;
-#endif
-
- GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
- element_class->change_state = QVideoSurfaceGstSink::change_state;
-
- GObjectClass *object_class = reinterpret_cast<GObjectClass *>(g_class);
- object_class->finalize = QVideoSurfaceGstSink::finalize;
-}
-
-void QVideoSurfaceGstSink::base_init(gpointer g_class)
-{
- static GstStaticPadTemplate sink_pad_template = GST_STATIC_PAD_TEMPLATE(
- "sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS(
- "video/x-raw-rgb, "
- "framerate = (fraction) [ 0, MAX ], "
- "width = (int) [ 1, MAX ], "
- "height = (int) [ 1, MAX ]; "
- "video/x-raw-yuv, "
- "framerate = (fraction) [ 0, MAX ], "
- "width = (int) [ 1, MAX ], "
- "height = (int) [ 1, MAX ]"));
-
- gst_element_class_add_pad_template(
- GST_ELEMENT_CLASS(g_class), gst_static_pad_template_get(&sink_pad_template));
-}
-
-void QVideoSurfaceGstSink::instance_init(GTypeInstance *instance, gpointer g_class)
-{
- VO_SINK(instance);
-
- Q_UNUSED(g_class);
-
- sink->delegate = 0;
-
- sink->lastRequestedCaps = 0;
- sink->lastBufferCaps = 0;
- sink->lastSurfaceFormat = new QVideoSurfaceFormat;
-}
-
-void QVideoSurfaceGstSink::finalize(GObject *object)
-{
- VO_SINK(object);
-
- delete sink->lastSurfaceFormat;
- sink->lastSurfaceFormat = 0;
-
- if (sink->lastBufferCaps)
- gst_caps_unref(sink->lastBufferCaps);
- sink->lastBufferCaps = 0;
-
- if (sink->lastRequestedCaps)
- gst_caps_unref(sink->lastRequestedCaps);
- sink->lastRequestedCaps = 0;
-
- delete sink->delegate;
-
- // Chain up
- G_OBJECT_CLASS(sink_parent_class)->finalize(object);
-}
-
-void QVideoSurfaceGstSink::handleShowPrerollChange(GObject *o, GParamSpec *p, gpointer d)
-{
- Q_UNUSED(o);
- Q_UNUSED(p);
- QVideoSurfaceGstSink *sink = reinterpret_cast<QVideoSurfaceGstSink *>(d);
-
- gboolean showPrerollFrame = true; // "show-preroll-frame" property is true by default
- g_object_get(G_OBJECT(sink), "show-preroll-frame", &showPrerollFrame, nullptr);
-
- if (!showPrerollFrame) {
- GstState state = GST_STATE_VOID_PENDING;
- gst_element_get_state(GST_ELEMENT(sink), &state, nullptr, GST_CLOCK_TIME_NONE);
- // show-preroll-frame being set to 'false' while in GST_STATE_PAUSED means
- // the QMediaPlayer was stopped from the paused state.
- // We need to flush the current frame.
- if (state == GST_STATE_PAUSED)
- sink->delegate->flush();
- }
-}
-
-GstStateChangeReturn QVideoSurfaceGstSink::change_state(GstElement *element, GstStateChange transition)
-{
- QVideoSurfaceGstSink *sink = reinterpret_cast<QVideoSurfaceGstSink *>(element);
-
- gboolean showPrerollFrame = true; // "show-preroll-frame" property is true by default
- g_object_get(G_OBJECT(element), "show-preroll-frame", &showPrerollFrame, nullptr);
-
- // If show-preroll-frame is 'false' when transitioning from GST_STATE_PLAYING to
- // GST_STATE_PAUSED, it means the QMediaPlayer was stopped.
- // We need to flush the current frame.
- if (transition == GST_STATE_CHANGE_PLAYING_TO_PAUSED && !showPrerollFrame)
- sink->delegate->flush();
-
- return GST_ELEMENT_CLASS(sink_parent_class)->change_state(element, transition);
-}
-
-GstCaps *QVideoSurfaceGstSink::get_caps(GstBaseSink *base)
-{
- VO_SINK(base);
-
- // Find the supported pixel formats
- // with buffer pool specific formats listed first
- QList<QVideoFrame::PixelFormat> supportedFormats;
-
- QList<QVideoFrame::PixelFormat> poolHandleFormats;
- sink->delegate->poolMutex()->lock();
- QGstBufferPoolInterface *pool = sink->delegate->pool();
-
- if (pool)
- poolHandleFormats = sink->delegate->supportedPixelFormats(pool->handleType());
- sink->delegate->poolMutex()->unlock();
-
- supportedFormats = poolHandleFormats;
- const auto supportedPixelFormats = sink->delegate->supportedPixelFormats();
- for (QVideoFrame::PixelFormat format : supportedPixelFormats) {
- if (!poolHandleFormats.contains(format))
- supportedFormats.append(format);
- }
-
- return QGstUtils::capsForFormats(supportedFormats);
-}
-
-gboolean QVideoSurfaceGstSink::set_caps(GstBaseSink *base, GstCaps *caps)
-{
- VO_SINK(base);
-
-#ifdef DEBUG_VIDEO_SURFACE_SINK
- qDebug() << "set_caps:";
- qDebug() << gst_caps_to_string(caps);
-#endif
-
- if (!caps) {
- sink->delegate->stop();
-
- return TRUE;
- } else {
- int bytesPerLine = 0;
- QGstBufferPoolInterface *pool = sink->delegate->pool();
- QAbstractVideoBuffer::HandleType handleType =
- pool ? pool->handleType() : QAbstractVideoBuffer::NoHandle;
-
- QVideoSurfaceFormat format = QGstUtils::formatForCaps(caps, &bytesPerLine, handleType);
-
- if (sink->delegate->isActive()) {
- QVideoSurfaceFormat surfaceFormst = sink->delegate->surfaceFormat();
-
- if (format.pixelFormat() == surfaceFormst.pixelFormat() &&
- format.frameSize() == surfaceFormst.frameSize())
- return TRUE;
- else
- sink->delegate->stop();
- }
-
- if (sink->lastRequestedCaps)
- gst_caps_unref(sink->lastRequestedCaps);
- sink->lastRequestedCaps = 0;
-
-#ifdef DEBUG_VIDEO_SURFACE_SINK
- qDebug() << "Starting video surface, format:";
- qDebug() << format;
- qDebug() << "bytesPerLine:" << bytesPerLine;
-#endif
-
- if (sink->delegate->start(format, bytesPerLine))
- return TRUE;
- else
- qWarning() << "Failed to start video surface";
- }
-
- return FALSE;
-}
-
-GstFlowReturn QVideoSurfaceGstSink::buffer_alloc(
- GstBaseSink *base, guint64 offset, guint size, GstCaps *caps, GstBuffer **buffer)
-{
- VO_SINK(base);
-
- Q_UNUSED(offset);
- Q_UNUSED(size);
-
- if (!buffer)
- return GST_FLOW_ERROR;
-
- *buffer = nullptr;
-
- if (!sink->delegate->pool())
- return GST_FLOW_OK;
-
- QMutexLocker poolLock(sink->delegate->poolMutex());
- QGstBufferPoolInterface *pool = sink->delegate->pool();
-
- if (!pool)
- return GST_FLOW_OK;
-
- if (sink->lastRequestedCaps && gst_caps_is_equal(sink->lastRequestedCaps, caps)) {
- //qDebug() << "reusing last caps";
- *buffer = GST_BUFFER(pool->takeBuffer(*sink->lastSurfaceFormat, sink->lastBufferCaps));
- return GST_FLOW_OK;
- }
-
- if (sink->delegate->supportedPixelFormats(pool->handleType()).isEmpty()) {
- //qDebug() << "sink doesn't support native pool buffers, skip buffers allocation";
- return GST_FLOW_OK;
- }
-
- poolLock.unlock();
-
- GstCaps *intersection = gst_caps_intersect(get_caps(GST_BASE_SINK(sink)), caps);
-
- if (gst_caps_is_empty (intersection)) {
- gst_caps_unref(intersection);
- return GST_FLOW_NOT_NEGOTIATED;
- }
-
- if (sink->delegate->isActive()) {
- //if format was changed, restart the surface
- QVideoSurfaceFormat format = QGstUtils::formatForCaps(intersection);
- QVideoSurfaceFormat surfaceFormat = sink->delegate->surfaceFormat();
-
- if (format.pixelFormat() != surfaceFormat.pixelFormat() ||
- format.frameSize() != surfaceFormat.frameSize()) {
-#ifdef DEBUG_VIDEO_SURFACE_SINK
- qDebug() << "new format requested, restart video surface";
-#endif
- sink->delegate->stop();
- }
- }
-
- if (!sink->delegate->isActive()) {
- int bytesPerLine = 0;
- QGstBufferPoolInterface *pool = sink->delegate->pool();
- QAbstractVideoBuffer::HandleType handleType =
- pool ? pool->handleType() : QAbstractVideoBuffer::NoHandle;
-
- QVideoSurfaceFormat format = QGstUtils::formatForCaps(intersection, &bytesPerLine, handleType);
-
- if (!sink->delegate->start(format, bytesPerLine)) {
- qWarning() << "failed to start video surface";
- return GST_FLOW_NOT_NEGOTIATED;
- }
- }
-
- poolLock.relock();
- pool = sink->delegate->pool();
-
- QVideoSurfaceFormat surfaceFormat = sink->delegate->surfaceFormat();
-
- if (!pool->isFormatSupported(surfaceFormat)) {
- qDebug() << "sink doesn't support native pool format, skip custom buffers allocation";
- return GST_FLOW_OK;
- }
-
- if (sink->lastRequestedCaps)
- gst_caps_unref(sink->lastRequestedCaps);
- sink->lastRequestedCaps = caps;
- gst_caps_ref(sink->lastRequestedCaps);
-
- if (sink->lastBufferCaps)
- gst_caps_unref(sink->lastBufferCaps);
- sink->lastBufferCaps = intersection;
- gst_caps_ref(sink->lastBufferCaps);
-
- *sink->lastSurfaceFormat = surfaceFormat;
-
- *buffer = GST_BUFFER(pool->takeBuffer(surfaceFormat, intersection));
-
- return GST_FLOW_OK;
-}
-
-gboolean QVideoSurfaceGstSink::start(GstBaseSink *base)
-{
- Q_UNUSED(base);
- return TRUE;
-}
-
-gboolean QVideoSurfaceGstSink::stop(GstBaseSink *base)
-{
- VO_SINK(base);
- sink->delegate->clearPoolBuffers();
-
- return TRUE;
-}
-
-gboolean QVideoSurfaceGstSink::unlock(GstBaseSink *base)
-{
- VO_SINK(base);
- sink->delegate->unlock();
- return TRUE;
-}
-
-#if GST_CHECK_VERSION(0, 10, 25)
-GstFlowReturn QVideoSurfaceGstSink::show_frame(GstVideoSink *base, GstBuffer *buffer)
-{
- VO_SINK(base);
- return sink->delegate->render(buffer);
-}
-#else
-GstFlowReturn QVideoSurfaceGstSink::preroll(GstBaseSink *base, GstBuffer *buffer)
-{
- VO_SINK(base);
- gboolean showPrerollFrame = true;
- g_object_get(G_OBJECT(sink), "show-preroll-frame", &showPrerollFrame, nullptr);
-
- if (showPrerollFrame)
- return sink->delegate->render(buffer);
-
- return GST_FLOW_OK;
-}
-
-GstFlowReturn QVideoSurfaceGstSink::render(GstBaseSink *base, GstBuffer *buffer)
-{
- VO_SINK(base);
- return sink->delegate->render(buffer);
-}
-#endif
-
-QT_END_NAMESPACE
diff --git a/src/gsttools/qvideosurfacegstsink_p.h b/src/gsttools/qvideosurfacegstsink_p.h
deleted file mode 100644
index dd758ff82..000000000
--- a/src/gsttools/qvideosurfacegstsink_p.h
+++ /dev/null
@@ -1,192 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2016 The Qt Company Ltd.
-** Contact: https://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see https://www.qt.io/terms-conditions. For further
-** information use the contact form at https://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 3 as published by the Free Software
-** Foundation and appearing in the file LICENSE.LGPL3 included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU Lesser General Public License version 3 requirements
-** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
-**
-** GNU General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU
-** General Public License version 2.0 or (at your option) the GNU General
-** Public license version 3 or any later version approved by the KDE Free
-** Qt Foundation. The licenses are as published by the Free Software
-** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
-** included in the packaging of this file. Please review the following
-** information to ensure the GNU General Public License requirements will
-** be met: https://www.gnu.org/licenses/gpl-2.0.html and
-** https://www.gnu.org/licenses/gpl-3.0.html.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#ifndef VIDEOSURFACEGSTSINK_P_H
-#define VIDEOSURFACEGSTSINK_P_H
-
-//
-// W A R N I N G
-// -------------
-//
-// This file is not part of the Qt API. It exists purely as an
-// implementation detail. This header file may change from version to
-// version without notice, or even be removed.
-//
-// We mean it.
-//
-
-#include <gst/gst.h>
-
-#if GST_CHECK_VERSION(1,0,0)
-
-#include "qgstvideorenderersink_p.h"
-
-QT_BEGIN_NAMESPACE
-typedef QGstVideoRendererSink QVideoSurfaceGstSink;
-QT_END_NAMESPACE
-
-#else
-
-#include <gst/video/gstvideosink.h>
-
-#include <QtCore/qlist.h>
-#include <QtCore/qmutex.h>
-#include <QtCore/qqueue.h>
-#include <QtCore/qpointer.h>
-#include <QtCore/qwaitcondition.h>
-#include <qvideosurfaceformat.h>
-#include <qvideoframe.h>
-#include <qabstractvideobuffer.h>
-
-#include "qgstbufferpoolinterface_p.h"
-
-QT_BEGIN_NAMESPACE
-class QAbstractVideoSurface;
-
-class QVideoSurfaceGstDelegate : public QObject
-{
- Q_OBJECT
-public:
- QVideoSurfaceGstDelegate(QAbstractVideoSurface *surface);
- ~QVideoSurfaceGstDelegate();
-
- QList<QVideoFrame::PixelFormat> supportedPixelFormats(
- QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::NoHandle) const;
-
- QVideoSurfaceFormat surfaceFormat() const;
-
- bool start(const QVideoSurfaceFormat &format, int bytesPerLine);
- void stop();
-
- void unlock();
-
- bool isActive();
-
- QGstBufferPoolInterface *pool() { return m_pool; }
- QMutex *poolMutex() { return &m_poolMutex; }
- void clearPoolBuffers();
-
- void flush();
-
- GstFlowReturn render(GstBuffer *buffer);
-
-private slots:
- void queuedStart();
- void queuedStop();
- void queuedFlush();
- void queuedRender();
-
- void updateSupportedFormats();
-
-private:
- QPointer<QAbstractVideoSurface> m_surface;
- QList<QVideoFrame::PixelFormat> m_supportedPixelFormats;
- //pixel formats of buffers pool native type
- QList<QVideoFrame::PixelFormat> m_supportedPoolPixelFormats;
- QGstBufferPoolInterface *m_pool = nullptr;
- QList<QGstBufferPoolInterface *> m_pools;
- QMutex m_poolMutex;
- QMutex m_mutex;
- QWaitCondition m_setupCondition;
- QWaitCondition m_renderCondition;
- QVideoSurfaceFormat m_format;
- QVideoFrame m_frame;
- GstFlowReturn m_renderReturn = GST_FLOW_ERROR;
- int m_bytesPerLine = 0;
- bool m_started = false;
- bool m_startCanceled = false;
-};
-
-class QVideoSurfaceGstSink
-{
-public:
- GstVideoSink parent;
-
- static QVideoSurfaceGstSink *createSink(QAbstractVideoSurface *surface);
- static void setSurface(QAbstractVideoSurface *surface) { Q_UNUSED(surface); }
-
-private:
- static GType get_type();
- static void class_init(gpointer g_class, gpointer class_data);
- static void base_init(gpointer g_class);
- static void instance_init(GTypeInstance *instance, gpointer g_class);
-
- static void finalize(GObject *object);
-
- static void handleShowPrerollChange(GObject *o, GParamSpec *p, gpointer d);
-
- static GstStateChangeReturn change_state(GstElement *element, GstStateChange transition);
-
- static GstCaps *get_caps(GstBaseSink *sink);
- static gboolean set_caps(GstBaseSink *sink, GstCaps *caps);
-
- static GstFlowReturn buffer_alloc(
- GstBaseSink *sink, guint64 offset, guint size, GstCaps *caps, GstBuffer **buffer);
-
- static gboolean start(GstBaseSink *sink);
- static gboolean stop(GstBaseSink *sink);
-
- static gboolean unlock(GstBaseSink *sink);
-
-#if GST_CHECK_VERSION(0, 10, 25)
- static GstFlowReturn show_frame(GstVideoSink *sink, GstBuffer *buffer);
-#else
- static GstFlowReturn preroll(GstBaseSink *sink, GstBuffer *buffer);
- static GstFlowReturn render(GstBaseSink *sink, GstBuffer *buffer);
-#endif
-
-private:
- QVideoSurfaceGstDelegate *delegate = nullptr;
-
- GstCaps *lastRequestedCaps = nullptr;
- GstCaps *lastBufferCaps = nullptr;
- QVideoSurfaceFormat *lastSurfaceFormat = nullptr;
-};
-
-class QVideoSurfaceGstSinkClass
-{
-public:
- GstVideoSinkClass parent_class;
-};
-
-QT_END_NAMESPACE
-
-#endif
-
-#endif
diff --git a/src/multimedia/configure.json b/src/multimedia/configure.json
index 56004152d..8d5cce826 100644
--- a/src/multimedia/configure.json
+++ b/src/multimedia/configure.json
@@ -12,7 +12,7 @@
"evr": { "type": "boolean" },
"directshow": { "type": "boolean" },
"wmf": { "type": "boolean" },
- "gstreamer": { "type": "optionalString", "values": [ "no", "yes", "0.10", "1.0" ] },
+ "gstreamer": { "type": "optionalString", "values": [ "no", "yes" ] },
"pulseaudio": "boolean"
}
},
@@ -39,15 +39,6 @@
{ "libs": "-lstrmiids -ldmoguids -luuid -lmsdmo -lole32 -loleaut32" }
]
},
- "gstreamer_0_10": {
- "label": "GStreamer 0.10",
- "export": "gstreamer",
- "test": "gstreamer",
- "sources": [
- { "type": "pkgConfig",
- "args": "gstreamer-0.10 gstreamer-base-0.10 gstreamer-audio-0.10 gstreamer-video-0.10 gstreamer-pbutils-0.10 gstreamer-interfaces-0.10" }
- ]
- },
"gstreamer_1_0": {
"label": "GStreamer 1.0",
"export": "gstreamer",
@@ -60,15 +51,6 @@
{ "libs": "", "condition": "config.android && input.gstreamer != ''" }
]
},
- "gstreamer_app_0_10": {
- "label": "GStreamer App 0.10",
- "export": "gstreamer_app",
- "test": "gstreamer_appsrc",
- "use": "gstreamer_0_10",
- "sources": [
- { "type": "pkgConfig", "args": "gstreamer-app-0.10" }
- ]
- },
"gstreamer_app_1_0": {
"label": "GStreamer App 1.0",
"export": "gstreamer_app",
@@ -80,15 +62,6 @@
{ "libs": "", "condition": "config.android && input.gstreamer != ''" }
]
},
- "gstreamer_photography_0_10": {
- "label": "GStreamer Photography 0.10",
- "export": "gstreamer_photography",
- "test": "gstreamer_photography",
- "use": "gstreamer_0_10",
- "sources": [
- { "libs": "-lgstphotography-0.10" }
- ]
- },
"gstreamer_photography_1_0": {
"label": "GStreamer Photography 1.0",
"export": "gstreamer_photography",
@@ -188,27 +161,20 @@
"condition": "config.win32 && tests.evr",
"output": [ "feature", "privateFeature" ]
},
- "gstreamer_0_10": {
- "label": "GStreamer 0.10",
- "disable": "input.gstreamer == '1.0' || input.gstreamer == 'no'",
- "enable": "input.gstreamer == '0.10'",
- "condition": "!features.gstreamer_1_0 && libs.gstreamer_0_10",
- "output": [ "privateFeature" ]
- },
"gstreamer_1_0": {
"label": "GStreamer 1.0",
- "disable": "input.gstreamer == '0.10' || input.gstreamer == 'no'",
- "enable": "input.gstreamer == '1.0'",
+ "disable": "input.gstreamer == 'no'",
+ "enable": "input.gstreamer == 'yes'",
"condition": "libs.gstreamer_1_0",
"output": [ "privateFeature" ]
},
"gstreamer": {
- "condition": "features.gstreamer_1_0 || features.gstreamer_0_10",
+ "condition": "features.gstreamer_1_0",
"output": [ "privateFeature" ]
},
"gstreamer_app": {
"label": "GStreamer App",
- "condition": "(features.gstreamer_1_0 && libs.gstreamer_app_1_0) || (features.gstreamer_0_10 && libs.gstreamer_app_0_10)",
+ "condition": "(features.gstreamer_1_0 && libs.gstreamer_app_1_0)",
"output": [ "privateFeature" ]
},
"gstreamer_encodingprofiles": {
@@ -218,7 +184,7 @@
},
"gstreamer_photography": {
"label": "GStreamer Photography",
- "condition": "(features.gstreamer_1_0 && libs.gstreamer_photography_1_0) || (features.gstreamer_0_10 && libs.gstreamer_photography_0_10)",
+ "condition": "(features.gstreamer_1_0 && libs.gstreamer_photography_1_0)",
"output": [ "privateFeature" ]
},
"gstreamer_gl": {
@@ -274,7 +240,6 @@
"entries": [
"alsa",
"gstreamer_1_0",
- "gstreamer_0_10",
"linux_v4l",
"pulseaudio",
"mmrenderer",
diff --git a/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp b/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp
index 463ed2d92..d9e6a2a38 100644
--- a/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp
+++ b/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp
@@ -76,15 +76,9 @@ QMultimedia::SupportEstimate QGstreamerAudioDecoderServicePlugin::hasSupport(con
static bool isDecoderOrDemuxer(GstElementFactory *factory)
{
-#if GST_CHECK_VERSION(0, 10, 31)
return gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_DEMUXER)
|| gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_DECODER
| GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO);
-#else
- return (factory
- && (qstrcmp(factory->details.klass, "Codec/Decoder/Audio") == 0
- || qstrcmp(factory->details.klass, "Codec/Demux") == 0));
-#endif
}
void QGstreamerAudioDecoderServicePlugin::updateSupportedMimeTypes() const
diff --git a/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.cpp b/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.cpp
index d6b8ad759..185494f65 100644
--- a/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.cpp
+++ b/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.cpp
@@ -451,7 +451,6 @@ QAudioBuffer QGstreamerAudioDecoderSession::read()
const char* bufferData = 0;
int bufferSize = 0;
-#if GST_CHECK_VERSION(1,0,0)
GstSample *sample = gst_app_sink_pull_sample(m_appSink);
GstBuffer *buffer = gst_sample_get_buffer(sample);
GstMapInfo mapInfo;
@@ -459,12 +458,6 @@ QAudioBuffer QGstreamerAudioDecoderSession::read()
bufferData = (const char*)mapInfo.data;
bufferSize = mapInfo.size;
QAudioFormat format = QGstUtils::audioFormatForSample(sample);
-#else
- GstBuffer *buffer = gst_app_sink_pull_buffer(m_appSink);
- bufferData = (const char*)buffer->data;
- bufferSize = buffer->size;
- QAudioFormat format = QGstUtils::audioFormatForBuffer(buffer);
-#endif
if (format.isValid()) {
// XXX At the moment we have to copy data from GstBuffer into QAudioBuffer.
@@ -477,12 +470,8 @@ QAudioBuffer QGstreamerAudioDecoderSession::read()
emit positionChanged(m_position);
}
}
-#if GST_CHECK_VERSION(1,0,0)
gst_buffer_unmap(buffer, &mapInfo);
gst_sample_unref(sample);
-#else
- gst_buffer_unref(buffer);
-#endif
}
return audioBuffer;
@@ -553,11 +542,7 @@ void QGstreamerAudioDecoderSession::addAppSink()
GstAppSinkCallbacks callbacks;
memset(&callbacks, 0, sizeof(callbacks));
-#if GST_CHECK_VERSION(1,0,0)
callbacks.new_sample = &new_sample;
-#else
- callbacks.new_buffer = &new_sample;
-#endif
gst_app_sink_set_callbacks(m_appSink, &callbacks, this, NULL);
gst_app_sink_set_max_buffers(m_appSink, MAX_BUFFERS_IN_QUEUE);
gst_base_sink_set_sync(GST_BASE_SINK(m_appSink), FALSE);
diff --git a/src/plugins/gstreamer/camerabin/camerabinexposure.cpp b/src/plugins/gstreamer/camerabin/camerabinexposure.cpp
index 35d3e7c8f..65a8539d5 100644
--- a/src/plugins/gstreamer/camerabin/camerabinexposure.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinexposure.cpp
@@ -43,10 +43,6 @@
#include <QDebug>
-#if !GST_CHECK_VERSION(1,0,0)
-typedef GstSceneMode GstPhotographySceneMode;
-#endif
-
QT_BEGIN_NAMESPACE
CameraBinExposure::CameraBinExposure(CameraBinSession *session)
@@ -143,7 +139,6 @@ QVariant CameraBinExposure::actualValue(ExposureParameter parameter) const
return QVariant::fromValue(QCameraExposure::ExposureManual);
case GST_PHOTOGRAPHY_SCENE_MODE_LANDSCAPE:
return QVariant::fromValue(QCameraExposure::ExposureLandscape);
-#if GST_CHECK_VERSION(1, 2, 0)
case GST_PHOTOGRAPHY_SCENE_MODE_SNOW:
return QVariant::fromValue(QCameraExposure::ExposureSnow);
case GST_PHOTOGRAPHY_SCENE_MODE_BEACH:
@@ -166,7 +161,6 @@ QVariant CameraBinExposure::actualValue(ExposureParameter parameter) const
return QVariant::fromValue(QCameraExposure::ExposureCandlelight);
case GST_PHOTOGRAPHY_SCENE_MODE_BARCODE:
return QVariant::fromValue(QCameraExposure::ExposureBarcode);
-#endif
//no direct mapping available so mapping to auto mode
case GST_PHOTOGRAPHY_SCENE_MODE_CLOSEUP:
case GST_PHOTOGRAPHY_SCENE_MODE_AUTO:
@@ -224,7 +218,6 @@ bool CameraBinExposure::setValue(ExposureParameter parameter, const QVariant& va
case QCameraExposure::ExposureLandscape:
sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_LANDSCAPE;
break;
-#if GST_CHECK_VERSION(1, 2, 0)
case QCameraExposure::ExposureSnow:
sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_SNOW;
break;
@@ -258,7 +251,6 @@ bool CameraBinExposure::setValue(ExposureParameter parameter, const QVariant& va
case QCameraExposure::ExposureBarcode:
sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_BARCODE;
break;
-#endif
default:
break;
}
diff --git a/src/plugins/gstreamer/camerabin/camerabinflash.cpp b/src/plugins/gstreamer/camerabin/camerabinflash.cpp
index 2bf7a2776..d5c32ffdb 100644
--- a/src/plugins/gstreamer/camerabin/camerabinflash.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinflash.cpp
@@ -43,10 +43,6 @@
#include <QDebug>
-#if !GST_CHECK_VERSION(1,0,0)
-typedef GstFlashMode GstPhotographyFlashMode;
-#endif
-
QT_BEGIN_NAMESPACE
CameraBinFlash::CameraBinFlash(CameraBinSession *session)
diff --git a/src/plugins/gstreamer/camerabin/camerabinfocus.cpp b/src/plugins/gstreamer/camerabin/camerabinfocus.cpp
index 33ac7e3d7..d60a643f6 100644
--- a/src/plugins/gstreamer/camerabin/camerabinfocus.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinfocus.cpp
@@ -48,19 +48,13 @@
#include <private/qgstutils_p.h>
-#if !GST_CHECK_VERSION(1,0,0)
-typedef GstFocusMode GstPhotographyFocusMode;
-#endif
-
//#define CAMERABIN_DEBUG 1
QT_BEGIN_NAMESPACE
CameraBinFocus::CameraBinFocus(CameraBinSession *session)
:QCameraFocusControl(session),
-#if GST_CHECK_VERSION(1,0,0)
QGstreamerBufferProbe(ProbeBuffers),
-#endif
m_session(session),
m_cameraStatus(QCamera::UnloadedStatus),
m_focusMode(QCameraFocus::AutoFocus),
@@ -146,7 +140,6 @@ void CameraBinFocus::setFocusPointMode(QCameraFocus::FocusPointMode mode)
if (m_focusPointMode == mode || !source)
return;
-#if GST_CHECK_VERSION(1,0,0)
if (m_focusPointMode == QCameraFocus::FocusPointFaceDetection) {
g_object_set (G_OBJECT(source), "detect-faces", FALSE, NULL);
@@ -161,7 +154,6 @@ void CameraBinFocus::setFocusPointMode(QCameraFocus::FocusPointMode mode)
QMutexLocker locker(&m_mutex);
m_faces.clear();
}
-#endif
if (m_focusPointMode != QCameraFocus::FocusPointAuto)
resetFocusPoint();
@@ -170,7 +162,6 @@ void CameraBinFocus::setFocusPointMode(QCameraFocus::FocusPointMode mode)
case QCameraFocus::FocusPointAuto:
case QCameraFocus::FocusPointCustom:
break;
-#if GST_CHECK_VERSION(1,0,0)
case QCameraFocus::FocusPointFaceDetection:
if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "detect-faces")) {
if (GstPad *pad = gst_element_get_static_pad(source, "vfsrc")) {
@@ -180,7 +171,6 @@ void CameraBinFocus::setFocusPointMode(QCameraFocus::FocusPointMode mode)
}
}
return;
-#endif
default:
return;
}
@@ -196,12 +186,10 @@ bool CameraBinFocus::isFocusPointModeSupported(QCameraFocus::FocusPointMode mode
case QCameraFocus::FocusPointAuto:
case QCameraFocus::FocusPointCustom:
return true;
-#if GST_CHECK_VERSION(1,0,0)
case QCameraFocus::FocusPointFaceDetection:
if (GstElement *source = m_session->cameraSource())
return g_object_class_find_property(G_OBJECT_GET_CLASS(source), "detect-faces");
return false;
-#endif
default:
return false;
}
@@ -242,7 +230,6 @@ QCameraFocusZoneList CameraBinFocus::focusZones() const
if (m_focusPointMode != QCameraFocus::FocusPointFaceDetection) {
zones.append(QCameraFocusZone(m_focusRect, m_focusZoneStatus));
-#if GST_CHECK_VERSION(1,0,0)
} else for (const QRect &face : qAsConst(m_faceFocusRects)) {
const QRectF normalizedRect(
face.x() / qreal(m_viewfinderResolution.width()),
@@ -250,7 +237,6 @@ QCameraFocusZoneList CameraBinFocus::focusZones() const
face.width() / qreal(m_viewfinderResolution.width()),
face.height() / qreal(m_viewfinderResolution.height()));
zones.append(QCameraFocusZone(normalizedRect, m_focusZoneStatus));
-#endif
}
return zones;
}
@@ -312,12 +298,10 @@ void CameraBinFocus::_q_setFocusStatus(QCamera::LockStatus status, QCamera::Lock
emit focusZonesChanged();
}
-#if GST_CHECK_VERSION(1,0,0)
if (m_focusPointMode == QCameraFocus::FocusPointFaceDetection
&& m_focusStatus == QCamera::Unlocked) {
_q_updateFaces();
}
-#endif
emit _q_focusStatusChanged(m_focusStatus, reason);
}
@@ -462,8 +446,6 @@ void CameraBinFocus::updateRegionOfInterest(const QList<QRect> &rectangles)
gst_element_send_event(cameraSource, event);
}
-#if GST_CHECK_VERSION(1,0,0)
-
void CameraBinFocus::_q_updateFaces()
{
if (m_focusPointMode != QCameraFocus::FocusPointFaceDetection
@@ -507,7 +489,6 @@ bool CameraBinFocus::probeBuffer(GstBuffer *buffer)
{
QList<QRect> faces;
-#if GST_CHECK_VERSION(1,1,3)
gpointer state = NULL;
const GstMetaInfo *info = GST_VIDEO_REGION_OF_INTEREST_META_INFO;
@@ -519,9 +500,6 @@ bool CameraBinFocus::probeBuffer(GstBuffer *buffer)
faces.append(QRect(region->x, region->y, region->w, region->h));
}
-#else
- Q_UNUSED(buffer);
-#endif
QMutexLocker locker(&m_mutex);
@@ -535,6 +513,4 @@ bool CameraBinFocus::probeBuffer(GstBuffer *buffer)
return true;
}
-#endif
-
QT_END_NAMESPACE
diff --git a/src/plugins/gstreamer/camerabin/camerabinfocus.h b/src/plugins/gstreamer/camerabin/camerabinfocus.h
index 0fca02c35..40168c183 100644
--- a/src/plugins/gstreamer/camerabin/camerabinfocus.h
+++ b/src/plugins/gstreamer/camerabin/camerabinfocus.h
@@ -58,9 +58,7 @@ class CameraBinSession;
class CameraBinFocus
: public QCameraFocusControl
-#if GST_CHECK_VERSION(1,0,0)
, QGstreamerBufferProbe
-#endif
{
Q_OBJECT
@@ -92,27 +90,19 @@ public Q_SLOTS:
void setViewfinderResolution(const QSize &resolution);
-#if GST_CHECK_VERSION(1,0,0)
protected:
void timerEvent(QTimerEvent *event) override;
-#endif
private Q_SLOTS:
void _q_setFocusStatus(QCamera::LockStatus status, QCamera::LockChangeReason reason);
void _q_handleCameraStatusChange(QCamera::Status status);
-
-#if GST_CHECK_VERSION(1,0,0)
void _q_updateFaces();
-#endif
private:
void resetFocusPoint();
void updateRegionOfInterest(const QRectF &rectangle);
void updateRegionOfInterest(const QList<QRect> &rectangles);
-
-#if GST_CHECK_VERSION(1,0,0)
bool probeBuffer(GstBuffer *buffer) override;
-#endif
CameraBinSession *m_session;
QCamera::Status m_cameraStatus;
diff --git a/src/plugins/gstreamer/camerabin/camerabinimagecapture.cpp b/src/plugins/gstreamer/camerabin/camerabinimagecapture.cpp
index aac5c2c94..d0b34a61d 100644
--- a/src/plugins/gstreamer/camerabin/camerabinimagecapture.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinimagecapture.cpp
@@ -43,7 +43,6 @@
#include "camerabincapturebufferformat.h"
#include "camerabinsession.h"
#include <private/qgstvideobuffer_p.h>
-#include <private/qvideosurfacegstsink_p.h>
#include <private/qgstutils_p.h>
#include <QtMultimedia/qmediametadata.h>
#include <QtCore/qdebug.h>
@@ -113,16 +112,10 @@ void CameraBinImageCapture::updateState()
}
}
-#if GST_CHECK_VERSION(1,0,0)
GstPadProbeReturn CameraBinImageCapture::encoderEventProbe(
GstPad *, GstPadProbeInfo *info, gpointer user_data)
{
GstEvent * const event = gst_pad_probe_info_get_event(info);
-#else
-gboolean CameraBinImageCapture::encoderEventProbe(
- GstElement *, GstEvent *event, gpointer user_data)
-{
-#endif
CameraBinImageCapture * const self = static_cast<CameraBinImageCapture *>(user_data);
if (event && GST_EVENT_TYPE(event) == GST_EVENT_TAG) {
GstTagList *gstTags;
@@ -156,23 +149,12 @@ gboolean CameraBinImageCapture::encoderEventProbe(
}
}
}
-#if GST_CHECK_VERSION(1,0,0)
return GST_PAD_PROBE_OK;
-#else
- return TRUE;
-#endif
}
void CameraBinImageCapture::EncoderProbe::probeCaps(GstCaps *caps)
{
-#if GST_CHECK_VERSION(1,0,0)
capture->m_bufferFormat = QGstUtils::formatForCaps(caps, &capture->m_videoInfo);
-#else
- int bytesPerLine = 0;
- QVideoSurfaceFormat format = QGstUtils::formatForCaps(caps, &bytesPerLine);
- capture->m_bytesPerLine = bytesPerLine;
- capture->m_bufferFormat = format;
-#endif
}
bool CameraBinImageCapture::EncoderProbe::probeBuffer(GstBuffer *buffer)
@@ -192,11 +174,7 @@ bool CameraBinImageCapture::EncoderProbe::probeBuffer(GstBuffer *buffer)
#ifdef DEBUG_CAPTURE
qDebug() << "imageAvailable(uncompressed):" << format;
#endif
-#if GST_CHECK_VERSION(1,0,0)
QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer, capture->m_videoInfo);
-#else
- QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer, capture->m_bytesPerLine);
-#endif
QVideoFrame frame(
videoBuffer,
@@ -235,7 +213,6 @@ bool CameraBinImageCapture::MuxerProbe::probeBuffer(GstBuffer *buffer)
QSize resolution = capture->m_jpegResolution;
//if resolution is not presented in caps, try to find it from encoded jpeg data:
-#if GST_CHECK_VERSION(1,0,0)
GstMapInfo mapInfo;
if (resolution.isEmpty() && gst_buffer_map(buffer, &mapInfo, GST_MAP_READ)) {
QBuffer data;
@@ -251,18 +228,6 @@ bool CameraBinImageCapture::MuxerProbe::probeBuffer(GstBuffer *buffer)
gst_video_info_set_format(
&info, GST_VIDEO_FORMAT_ENCODED, resolution.width(), resolution.height());
QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer, info);
-#else
- if (resolution.isEmpty()) {
- QBuffer data;
- data.setData(reinterpret_cast<const char*>(GST_BUFFER_DATA(buffer)), GST_BUFFER_SIZE(buffer));
- QImageReader reader(&data, "JPEG");
- resolution = reader.size();
- }
-
- QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer,
- -1); //bytesPerLine is not available for jpegs
-#endif
-
QVideoFrame frame(videoBuffer,
resolution,
@@ -303,10 +268,6 @@ bool CameraBinImageCapture::processBusMessage(const QGstreamerMessage &message)
gchar *name = gst_element_get_name(element);
QString elementName = QString::fromLatin1(name);
g_free(name);
-#if !GST_CHECK_VERSION(1,0,0)
- GstElementClass *elementClass = GST_ELEMENT_GET_CLASS(element);
- QString elementLongName = elementClass->details.longname;
-#endif
if (elementName.contains("jpegenc") && element != m_jpegEncoderElement) {
m_jpegEncoderElement = element;
GstPad *sinkpad = gst_element_get_static_pad(element, "sink");
@@ -316,23 +277,15 @@ bool CameraBinImageCapture::processBusMessage(const QGstreamerMessage &message)
#ifdef DEBUG_CAPTURE
qDebug() << "install metadata probe";
#endif
-#if GST_CHECK_VERSION(1,0,0)
gst_pad_add_probe(
sinkpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, encoderEventProbe, this, NULL);
-#else
- gst_pad_add_event_probe(sinkpad, G_CALLBACK(encoderEventProbe), this);
-#endif
#ifdef DEBUG_CAPTURE
qDebug() << "install uncompressed buffer probe";
#endif
m_encoderProbe.addProbeToPad(sinkpad, true);
gst_object_unref(sinkpad);
- } else if ((elementName.contains("jifmux")
-#if !GST_CHECK_VERSION(1,0,0)
- || elementLongName == QLatin1String("JPEG stream muxer")
-#endif
- || elementName.startsWith("metadatamux"))
+ } else if ((elementName.contains("jifmux") || elementName.startsWith("metadatamux"))
&& element != m_metadataMuxerElement) {
//Jpeg encoded buffer probe is added after jifmux/metadatamux
//element to ensure the resulting jpeg buffer contains capture metadata
diff --git a/src/plugins/gstreamer/camerabin/camerabinimagecapture.h b/src/plugins/gstreamer/camerabin/camerabinimagecapture.h
index 5e00ab6d5..e86adc4fd 100644
--- a/src/plugins/gstreamer/camerabin/camerabinimagecapture.h
+++ b/src/plugins/gstreamer/camerabin/camerabinimagecapture.h
@@ -48,9 +48,7 @@
#include <private/qgstreamerbufferprobe_p.h>
-#if GST_CHECK_VERSION(1,0,0)
#include <gst/video/video.h>
-#endif
QT_BEGIN_NAMESPACE
@@ -75,11 +73,7 @@ private slots:
void updateState();
private:
-#if GST_CHECK_VERSION(1,0,0)
static GstPadProbeReturn encoderEventProbe(GstPad *, GstPadProbeInfo *info, gpointer user_data);
-#else
- static gboolean encoderEventProbe(GstElement *, GstEvent *event, gpointer user_data);
-#endif
class EncoderProbe : public QGstreamerBufferProbe
{
@@ -109,11 +103,7 @@ private:
CameraBinSession *m_session;
GstElement *m_jpegEncoderElement;
GstElement *m_metadataMuxerElement;
-#if GST_CHECK_VERSION(1,0,0)
GstVideoInfo m_videoInfo;
-#else
- int m_bytesPerLine;
-#endif
int m_requestId;
bool m_ready;
};
diff --git a/src/plugins/gstreamer/camerabin/camerabinimageprocessing.cpp b/src/plugins/gstreamer/camerabin/camerabinimageprocessing.cpp
index 9d281e10c..3629a1336 100644
--- a/src/plugins/gstreamer/camerabin/camerabinimageprocessing.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinimageprocessing.cpp
@@ -45,11 +45,7 @@
#include "camerabinv4limageprocessing.h"
#endif
-#if GST_CHECK_VERSION(1,0,0)
# include <gst/video/colorbalance.h>
-#else
-# include <gst/interfaces/colorbalance.h>
-#endif
QT_BEGIN_NAMESPACE
@@ -72,30 +68,18 @@ CameraBinImageProcessing::CameraBinImageProcessing(CameraBinSession *session)
unlockWhiteBalance();
}
-#if GST_CHECK_VERSION(1, 0, 0)
m_filterMap.insert(QCameraImageProcessing::ColorFilterNone, GST_PHOTOGRAPHY_COLOR_TONE_MODE_NORMAL);
if (m_session->photography()) {
m_filterMap.insert(QCameraImageProcessing::ColorFilterSepia, GST_PHOTOGRAPHY_COLOR_TONE_MODE_SEPIA);
m_filterMap.insert(QCameraImageProcessing::ColorFilterGrayscale, GST_PHOTOGRAPHY_COLOR_TONE_MODE_GRAYSCALE);
m_filterMap.insert(QCameraImageProcessing::ColorFilterNegative, GST_PHOTOGRAPHY_COLOR_TONE_MODE_NEGATIVE);
m_filterMap.insert(QCameraImageProcessing::ColorFilterSolarize, GST_PHOTOGRAPHY_COLOR_TONE_MODE_SOLARIZE);
-#if GST_CHECK_VERSION(1, 2, 0)
m_filterMap.insert(QCameraImageProcessing::ColorFilterPosterize, GST_PHOTOGRAPHY_COLOR_TONE_MODE_POSTERIZE);
m_filterMap.insert(QCameraImageProcessing::ColorFilterWhiteboard, GST_PHOTOGRAPHY_COLOR_TONE_MODE_WHITEBOARD);
m_filterMap.insert(QCameraImageProcessing::ColorFilterBlackboard, GST_PHOTOGRAPHY_COLOR_TONE_MODE_BLACKBOARD);
m_filterMap.insert(QCameraImageProcessing::ColorFilterAqua, GST_PHOTOGRAPHY_COLOR_TONE_MODE_AQUA);
-#endif
- }
-#else
- m_filterMap.insert(QCameraImageProcessing::ColorFilterNone, GST_PHOTOGRAPHY_COLOUR_TONE_MODE_NORMAL);
- if (m_session->photography()) {
- m_filterMap.insert(QCameraImageProcessing::ColorFilterSepia, GST_PHOTOGRAPHY_COLOUR_TONE_MODE_SEPIA);
- m_filterMap.insert(QCameraImageProcessing::ColorFilterGrayscale, GST_PHOTOGRAPHY_COLOUR_TONE_MODE_GRAYSCALE);
- m_filterMap.insert(QCameraImageProcessing::ColorFilterNegative, GST_PHOTOGRAPHY_COLOUR_TONE_MODE_NEGATIVE);
- m_filterMap.insert(QCameraImageProcessing::ColorFilterSolarize, GST_PHOTOGRAPHY_COLOUR_TONE_MODE_SOLARIZE);
}
#endif
-#endif
#if QT_CONFIG(linux_v4l)
m_v4lImageControl = new CameraBinV4LImageProcessing(m_session);
@@ -185,11 +169,9 @@ bool CameraBinImageProcessing::setWhiteBalanceMode(QCameraImageProcessing::White
#if QT_CONFIG(gstreamer_photography)
if (isWhiteBalanceModeSupported(mode)) {
m_whiteBalanceMode = mode;
-#if GST_CHECK_VERSION(1, 2, 0)
GstPhotographyWhiteBalanceMode currentMode;
if (gst_photography_get_white_balance_mode(m_session->photography(), &currentMode)
&& currentMode != GST_PHOTOGRAPHY_WB_MODE_MANUAL)
-#endif
{
unlockWhiteBalance();
return true;
@@ -313,13 +295,8 @@ QVariant CameraBinImageProcessing::parameter(
case QCameraImageProcessingControl::ColorFilter:
#if QT_CONFIG(gstreamer_photography)
if (GstPhotography *photography = m_session->photography()) {
-#if GST_CHECK_VERSION(1, 0, 0)
GstPhotographyColorToneMode mode = GST_PHOTOGRAPHY_COLOR_TONE_MODE_NORMAL;
gst_photography_get_color_tone_mode(photography, &mode);
-#else
- GstColourToneMode mode = GST_PHOTOGRAPHY_COLOUR_TONE_MODE_NORMAL;
- gst_photography_get_colour_tone_mode(photography, &mode);
-#endif
return QVariant::fromValue(m_filterMap.key(mode, QCameraImageProcessing::ColorFilterNone));
}
#endif
@@ -400,15 +377,9 @@ void CameraBinImageProcessing::setParameter(QCameraImageProcessingControl::Proce
case QCameraImageProcessingControl::ColorFilter:
#if QT_CONFIG(gstreamer_photography)
if (GstPhotography *photography = m_session->photography()) {
-#if GST_CHECK_VERSION(1, 0, 0)
gst_photography_set_color_tone_mode(photography, m_filterMap.value(
value.value<QCameraImageProcessing::ColorFilter>(),
GST_PHOTOGRAPHY_COLOR_TONE_MODE_NORMAL));
-#else
- gst_photography_set_colour_tone_mode(photography, m_filterMap.value(
- value.value<QCameraImageProcessing::ColorFilter>(),
- GST_PHOTOGRAPHY_COLOUR_TONE_MODE_NORMAL));
-#endif
}
#endif
break;
@@ -422,11 +393,8 @@ void CameraBinImageProcessing::setParameter(QCameraImageProcessingControl::Proce
#if QT_CONFIG(gstreamer_photography)
void CameraBinImageProcessing::lockWhiteBalance()
{
-#if GST_CHECK_VERSION(1, 2, 0)
- if (GstPhotography *photography = m_session->photography()) {
+ if (GstPhotography *photography = m_session->photography())
gst_photography_set_white_balance_mode(photography, GST_PHOTOGRAPHY_WB_MODE_MANUAL);
- }
-#endif
}
void CameraBinImageProcessing::unlockWhiteBalance()
diff --git a/src/plugins/gstreamer/camerabin/camerabinimageprocessing.h b/src/plugins/gstreamer/camerabin/camerabinimageprocessing.h
index 259138e91..51e6f7c82 100644
--- a/src/plugins/gstreamer/camerabin/camerabinimageprocessing.h
+++ b/src/plugins/gstreamer/camerabin/camerabinimageprocessing.h
@@ -49,10 +49,6 @@
#if QT_CONFIG(gstreamer_photography)
# include <gst/interfaces/photography.h>
-# if !GST_CHECK_VERSION(1,0,0)
-typedef GstWhiteBalanceMode GstPhotographyWhiteBalanceMode;
-typedef GstColourToneMode GstPhotographyColorToneMode;
-# endif
#endif
QT_BEGIN_NAMESPACE
diff --git a/src/plugins/gstreamer/camerabin/camerabinlocks.cpp b/src/plugins/gstreamer/camerabin/camerabinlocks.cpp
index 89be2ac5d..9891ace84 100644
--- a/src/plugins/gstreamer/camerabin/camerabinlocks.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinlocks.cpp
@@ -67,7 +67,6 @@ QCamera::LockTypes CameraBinLocks::supportedLocks() const
{
QCamera::LockTypes locks = QCamera::LockFocus;
-#if GST_CHECK_VERSION(1, 2, 0)
if (GstPhotography *photography = m_session->photography()) {
if (gst_photography_get_capabilities(photography) & GST_PHOTOGRAPHY_CAPS_WB_MODE)
locks |= QCamera::LockWhiteBalance;
@@ -79,7 +78,6 @@ QCamera::LockTypes CameraBinLocks::supportedLocks() const
}
}
}
-#endif
return locks;
}
@@ -89,7 +87,6 @@ QCamera::LockStatus CameraBinLocks::lockStatus(QCamera::LockType lock) const
switch (lock) {
case QCamera::LockFocus:
return m_focus->focusStatus();
-#if GST_CHECK_VERSION(1, 2, 0)
case QCamera::LockExposure:
if (m_pendingLocks & QCamera::LockExposure)
return QCamera::Searching;
@@ -98,7 +95,6 @@ QCamera::LockStatus CameraBinLocks::lockStatus(QCamera::LockType lock) const
if (m_pendingLocks & QCamera::LockWhiteBalance)
return QCamera::Searching;
return isWhiteBalanceLocked() ? QCamera::Locked : QCamera::Unlocked;
-#endif
default:
return QCamera::Unlocked;
}
@@ -112,7 +108,6 @@ void CameraBinLocks::searchAndLock(QCamera::LockTypes locks)
m_pendingLocks |= QCamera::LockFocus;
m_focus->_q_startFocusing();
}
-#if GST_CHECK_VERSION(1, 2, 0)
if (!m_pendingLocks)
m_lockTimer.stop();
@@ -134,8 +129,6 @@ void CameraBinLocks::searchAndLock(QCamera::LockTypes locks)
lockWhiteBalance(QCamera::UserRequest);
}
}
-#endif
-
}
void CameraBinLocks::unlock(QCamera::LockTypes locks)
@@ -145,7 +138,6 @@ void CameraBinLocks::unlock(QCamera::LockTypes locks)
if (locks & QCamera::LockFocus)
m_focus->_q_stopFocusing();
-#if GST_CHECK_VERSION(1, 2, 0)
if (!m_pendingLocks)
m_lockTimer.stop();
@@ -153,7 +145,6 @@ void CameraBinLocks::unlock(QCamera::LockTypes locks)
unlockExposure(QCamera::Unlocked, QCamera::UserRequest);
if (locks & QCamera::LockWhiteBalance)
unlockWhiteBalance(QCamera::Unlocked, QCamera::UserRequest);
-#endif
}
void CameraBinLocks::updateFocusStatus(QCamera::LockStatus status, QCamera::LockChangeReason reason)
@@ -161,19 +152,15 @@ void CameraBinLocks::updateFocusStatus(QCamera::LockStatus status, QCamera::Lock
if (status != QCamera::Searching)
m_pendingLocks &= ~QCamera::LockFocus;
-#if GST_CHECK_VERSION(1, 2, 0)
if (status == QCamera::Locked && !m_lockTimer.isActive()) {
if (m_pendingLocks & QCamera::LockExposure)
lockExposure(QCamera::LockAcquired);
if (m_pendingLocks & QCamera::LockWhiteBalance)
lockWhiteBalance(QCamera::LockAcquired);
}
-#endif
emit lockStatusChanged(QCamera::LockFocus, status, reason);
}
-#if GST_CHECK_VERSION(1, 2, 0)
-
void CameraBinLocks::timerEvent(QTimerEvent *event)
{
if (event->timerId() != m_lockTimer.timerId())
@@ -254,6 +241,4 @@ void CameraBinLocks::unlockWhiteBalance(
emit lockStatusChanged(QCamera::LockWhiteBalance, status, reason);
}
-#endif
-
QT_END_NAMESPACE
diff --git a/src/plugins/gstreamer/camerabin/camerabinlocks.h b/src/plugins/gstreamer/camerabin/camerabinlocks.h
index cd592ffd2..dcc960e8e 100644
--- a/src/plugins/gstreamer/camerabin/camerabinlocks.h
+++ b/src/plugins/gstreamer/camerabin/camerabinlocks.h
@@ -69,15 +69,12 @@ public:
void unlock(QCamera::LockTypes locks) override;
protected:
-#if GST_CHECK_VERSION(1, 2, 0)
void timerEvent(QTimerEvent *event) override;
-#endif
private slots:
void updateFocusStatus(QCamera::LockStatus status, QCamera::LockChangeReason reason);
private:
-#if GST_CHECK_VERSION(1, 2, 0)
bool isExposureLocked() const;
void lockExposure(QCamera::LockChangeReason reason);
void unlockExposure(QCamera::LockStatus status, QCamera::LockChangeReason reason);
@@ -85,7 +82,6 @@ private:
bool isWhiteBalanceLocked() const;
void lockWhiteBalance(QCamera::LockChangeReason reason);
void unlockWhiteBalance(QCamera::LockStatus status, QCamera::LockChangeReason reason);
-#endif
CameraBinSession *m_session;
CameraBinFocus *m_focus;
diff --git a/src/plugins/gstreamer/camerabin/camerabinmetadata.cpp b/src/plugins/gstreamer/camerabin/camerabinmetadata.cpp
index afda2346d..fbdc4c440 100644
--- a/src/plugins/gstreamer/camerabin/camerabinmetadata.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinmetadata.cpp
@@ -74,9 +74,7 @@ static const QGStreamerMetaDataKeys *qt_gstreamerMetaDataKeys()
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::SubTitle, 0, QVariant::String));
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Author, 0, QVariant::String));
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Comment, GST_TAG_COMMENT, QVariant::String));
-#if GST_CHECK_VERSION(0,10,31)
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Date, GST_TAG_DATE_TIME, QVariant::DateTime));
-#endif
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Description, GST_TAG_DESCRIPTION, QVariant::String));
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Category, 0, QVariant::String));
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Genre, GST_TAG_GENRE, QVariant::String));
@@ -105,9 +103,7 @@ static const QGStreamerMetaDataKeys *qt_gstreamerMetaDataKeys()
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::AlbumTitle, GST_TAG_ALBUM, QVariant::String));
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::AlbumArtist, GST_TAG_ARTIST, QVariant::String));
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::ContributingArtist, GST_TAG_PERFORMER, QVariant::String));
-#if GST_CHECK_VERSION(0,10,19)
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Composer, GST_TAG_COMPOSER, QVariant::String));
-#endif
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Conductor, 0, QVariant::String));
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Lyrics, 0, QVariant::String));
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Mood, 0, QVariant::String));
@@ -132,7 +128,7 @@ static const QGStreamerMetaDataKeys *qt_gstreamerMetaDataKeys()
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Director, 0, QVariant::String));
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::LeadPerformer, GST_TAG_PERFORMER, QVariant::String));
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Writer, 0, QVariant::String));
-#if GST_CHECK_VERSION(0,10,30)
+
// Photos
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::CameraManufacturer, GST_TAG_DEVICE_MANUFACTURER, QVariant::String));
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::CameraModel, GST_TAG_DEVICE_MODEL, QVariant::String));
@@ -148,7 +144,6 @@ static const QGStreamerMetaDataKeys *qt_gstreamerMetaDataKeys()
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::GPSTrack, GST_TAG_GEO_LOCATION_MOVEMENT_DIRECTION, QVariant::Double));
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::GPSSpeed, GST_TAG_GEO_LOCATION_MOVEMENT_SPEED, QVariant::Double));
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::GPSImgDirection, GST_TAG_GEO_LOCATION_CAPTURE_DIRECTION, QVariant::Double));
-#endif
}
return metadataKeys;
@@ -161,14 +156,12 @@ CameraBinMetaData::CameraBinMetaData(QObject *parent)
QVariant CameraBinMetaData::metaData(const QString &key) const
{
-#if GST_CHECK_VERSION(0,10,30)
if (key == QMediaMetaData::Orientation) {
return QGstUtils::fromGStreamerOrientation(m_values.value(QByteArray(GST_TAG_IMAGE_ORIENTATION)));
} else if (key == QMediaMetaData::GPSSpeed) {
const double metersPerSec = m_values.value(QByteArray(GST_TAG_GEO_LOCATION_MOVEMENT_SPEED)).toDouble();
return (metersPerSec * 3600) / 1000;
}
-#endif
const auto keys = *qt_gstreamerMetaDataKeys();
for (const QGStreamerMetaDataKey &metadataKey : keys) {
@@ -181,7 +174,6 @@ QVariant CameraBinMetaData::metaData(const QString &key) const
void CameraBinMetaData::setMetaData(const QString &key, const QVariant &value)
{
QVariant correctedValue = value;
-#if GST_CHECK_VERSION(0,10,30)
if (value.isValid()) {
if (key == QMediaMetaData::Orientation) {
correctedValue = QGstUtils::toGStreamerOrientation(value);
@@ -190,7 +182,6 @@ void CameraBinMetaData::setMetaData(const QString &key, const QVariant &value)
correctedValue = (value.toDouble() * 1000) / 3600;
}
}
-#endif
const auto keys = *qt_gstreamerMetaDataKeys();
for (const QGStreamerMetaDataKey &metadataKey : keys) {
diff --git a/src/plugins/gstreamer/camerabin/camerabinsession.cpp b/src/plugins/gstreamer/camerabin/camerabinsession.cpp
index 22f5fb116..355f2d247 100644
--- a/src/plugins/gstreamer/camerabin/camerabinsession.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinsession.cpp
@@ -185,14 +185,10 @@ CameraBinSession::CameraBinSession(GstElementFactory *sourceFactory, QObject *pa
//post image preview in RGB format
g_object_set(G_OBJECT(m_camerabin), POST_PREVIEWS_PROPERTY, TRUE, NULL);
-#if GST_CHECK_VERSION(1,0,0)
GstCaps *previewCaps = gst_caps_new_simple(
"video/x-raw",
"format", G_TYPE_STRING, "RGBx",
NULL);
-#else
- GstCaps *previewCaps = gst_caps_from_string("video/x-raw-rgb");
-#endif
g_object_set(G_OBJECT(m_camerabin), PREVIEW_CAPS_PROPERTY, previewCaps, NULL);
gst_caps_unref(previewCaps);
@@ -441,17 +437,7 @@ void CameraBinSession::setAudioCaptureCaps()
if (sampleRate <= 0 && channelCount <=0)
return;
-#if GST_CHECK_VERSION(1,0,0)
GstStructure *structure = gst_structure_new_empty(QT_GSTREAMER_RAW_AUDIO_MIME);
-#else
- GstStructure *structure = gst_structure_new(
- QT_GSTREAMER_RAW_AUDIO_MIME,
- "endianness", G_TYPE_INT, 1234,
- "signed", G_TYPE_BOOLEAN, TRUE,
- "width", G_TYPE_INT, 16,
- "depth", G_TYPE_INT, 16,
- NULL);
-#endif
if (sampleRate > 0)
gst_structure_set(structure, "rate", G_TYPE_INT, sampleRate, NULL);
if (channelCount > 0)
@@ -997,27 +983,16 @@ bool CameraBinSession::processSyncMessage(const QGstreamerMessage &message)
const GValue *sampleValue = 0;
if (m_captureMode == QCamera::CaptureStillImage
&& gst_structure_has_name(st, "preview-image")
-#if GST_CHECK_VERSION(1,0,0)
&& gst_structure_has_field_typed(st, "sample", GST_TYPE_SAMPLE)
&& (sampleValue = gst_structure_get_value(st, "sample"))) {
GstSample * const sample = gst_value_get_sample(sampleValue);
GstCaps * const previewCaps = gst_sample_get_caps(sample);
GstBuffer * const buffer = gst_sample_get_buffer(sample);
-#else
- && gst_structure_has_field_typed(st, "buffer", GST_TYPE_BUFFER)
- && (sampleValue = gst_structure_get_value(st, "buffer"))) {
- GstBuffer * const buffer = gst_value_get_buffer(sampleValue);
-#endif
QImage image;
-#if GST_CHECK_VERSION(1,0,0)
GstVideoInfo previewInfo;
if (gst_video_info_from_caps(&previewInfo, previewCaps))
image = QGstUtils::bufferToImage(buffer, previewInfo);
-#else
- image = QGstUtils::bufferToImage(buffer);
- gst_buffer_unref(buffer);
-#endif
if (!image.isNull()) {
static QMetaMethod exposedSignal = QMetaMethod::fromSignal(&CameraBinSession::imageExposed);
exposedSignal.invoke(this,
@@ -1314,9 +1289,7 @@ QList< QPair<int,int> > CameraBinSession::supportedFrameRates(const QSize &frame
for (uint i=0; i<gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i);
gst_structure_set_name(structure, "video/x-raw");
-#if GST_CHECK_VERSION(1,2,0)
gst_caps_set_features(caps, i, NULL);
-#endif
const GValue *oldRate = gst_structure_get_value(structure, "framerate");
if (!oldRate)
continue;
@@ -1329,11 +1302,7 @@ QList< QPair<int,int> > CameraBinSession::supportedFrameRates(const QSize &frame
gst_structure_set_value(structure, "framerate", &rate);
g_value_unset(&rate);
}
-#if GST_CHECK_VERSION(1,0,0)
caps = gst_caps_simplify(caps);
-#else
- gst_caps_do_simplify(caps);
-#endif
for (uint i=0; i<gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i);
@@ -1430,9 +1399,7 @@ QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate,
for (uint i=0; i<gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i);
gst_structure_set_name(structure, "video/x-raw");
-#if GST_CHECK_VERSION(1,2,0)
gst_caps_set_features(caps, i, NULL);
-#endif
const GValue *oldW = gst_structure_get_value(structure, "width");
const GValue *oldH = gst_structure_get_value(structure, "height");
if (!oldW || !oldH)
@@ -1453,12 +1420,7 @@ QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate,
g_value_unset(&h);
}
-#if GST_CHECK_VERSION(1,0,0)
caps = gst_caps_simplify(caps);
-#else
- gst_caps_do_simplify(caps);
-#endif
-
for (uint i=0; i<gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i);
@@ -1549,28 +1511,14 @@ void CameraBinSession::elementAdded(GstBin *, GstElement *element, CameraBinSess
g_signal_connect(G_OBJECT(element), "element-removed", G_CALLBACK(elementRemoved), session);
} else if (!factory) {
// no-op
-#if GST_CHECK_VERSION(0,10,31)
} else if (gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_AUDIO_ENCODER)) {
-#else
- } else if (strstr(gst_element_factory_get_klass(factory), "Encoder/Audio") != NULL) {
-#endif
session->m_audioEncoder = element;
-
session->m_audioEncodeControl->applySettings(element);
-#if GST_CHECK_VERSION(0,10,31)
} else if (gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_VIDEO_ENCODER)) {
-#else
- } else if (strstr(gst_element_factory_get_klass(factory), "Encoder/Video") != NULL) {
-#endif
session->m_videoEncoder = element;
-
session->m_videoEncodeControl->applySettings(element);
-#if GST_CHECK_VERSION(0,10,31)
} else if (gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_MUXER)) {
-#else
- } else if (strstr(gst_element_factory_get_klass(factory), "Muxer") != NULL) {
-#endif
- session->m_muxer = element;
+ session->m_muxer = element;
}
}
diff --git a/src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp b/src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp
index 2a2dec60a..65f885d80 100644
--- a/src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp
+++ b/src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp
@@ -119,15 +119,8 @@ QMultimedia::SupportEstimate QGstreamerCaptureServicePlugin::hasSupport(const QS
static bool isEncoderOrMuxer(GstElementFactory *factory)
{
-#if GST_CHECK_VERSION(0, 10, 31)
return gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_MUXER)
|| gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_ENCODER);
-#else
- return (factory
- && (qstrcmp(factory->details.klass, "Codec/Encoder/Audio") == 0
- || qstrcmp(factory->details.klass, "Codec/Encoder/Video") == 0
- || qstrcmp(factory->details.klass, "Codec/Muxer") == 0 ));
-#endif
}
void QGstreamerCaptureServicePlugin::updateSupportedMimeTypes() const
diff --git a/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp b/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp
index 4363f6d3b..26d0999db 100644
--- a/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp
+++ b/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp
@@ -358,11 +358,7 @@ GstElement *QGstreamerCaptureSession::buildVideoPreview()
void QGstreamerCaptureSession::probeCaps(GstCaps *caps)
{
-#if GST_CHECK_VERSION(1,0,0)
gst_video_info_from_caps(&m_previewInfo, caps);
-#else
- Q_UNUSED(caps);
-#endif
}
bool QGstreamerCaptureSession::probeBuffer(GstBuffer *buffer)
@@ -378,11 +374,7 @@ bool QGstreamerCaptureSession::probeBuffer(GstBuffer *buffer)
m_passImage = false;
-#if GST_CHECK_VERSION(1,0,0)
QImage img = QGstUtils::bufferToImage(buffer, m_previewInfo);
-#else
- QImage img = QGstUtils::bufferToImage(buffer);
-#endif
if (img.isNull())
return true;
@@ -415,15 +407,11 @@ static gboolean saveImageFilter(GstElement *element,
if (!fileName.isEmpty()) {
QFile f(fileName);
if (f.open(QFile::WriteOnly)) {
-#if GST_CHECK_VERSION(1,0,0)
GstMapInfo info;
if (gst_buffer_map(buffer, &info, GST_MAP_READ)) {
f.write(reinterpret_cast<const char *>(info.data), info.size);
gst_buffer_unmap(buffer, &info);
}
-#else
- f.write(reinterpret_cast<const char *>(buffer->data), buffer->size);
-#endif
f.close();
static QMetaMethod savedSignal = QMetaMethod::fromSignal(&QGstreamerCaptureSession::imageSaved);
diff --git a/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.h b/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.h
index e0c9107a7..4e54ecc15 100644
--- a/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.h
+++ b/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.h
@@ -228,9 +228,7 @@ private:
GstElement *m_encodeBin;
-#if GST_CHECK_VERSION(1,0,0)
GstVideoInfo m_previewInfo;
-#endif
public:
bool m_passImage;
diff --git a/src/plugins/gstreamer/mediaplayer/qgstreamermetadataprovider.cpp b/src/plugins/gstreamer/mediaplayer/qgstreamermetadataprovider.cpp
index bd503d3a1..191878c9a 100644
--- a/src/plugins/gstreamer/mediaplayer/qgstreamermetadataprovider.cpp
+++ b/src/plugins/gstreamer/mediaplayer/qgstreamermetadataprovider.cpp
@@ -83,9 +83,7 @@ static const QGstreamerMetaDataKeyLookup *qt_gstreamerMetaDataKeys()
// Music
metadataKeys->insert(GST_TAG_ALBUM, QMediaMetaData::AlbumTitle);
-#if GST_CHECK_VERSION(0, 10, 25)
metadataKeys->insert(GST_TAG_ALBUM_ARTIST, QMediaMetaData::AlbumArtist);
-#endif
metadataKeys->insert(GST_TAG_ARTIST, QMediaMetaData::ContributingArtist);
//metadataKeys->insert(0, QMediaMetaData::Conductor);
//metadataKeys->insert(0, QMediaMetaData::Lyrics);
@@ -100,9 +98,7 @@ static const QGstreamerMetaDataKeyLookup *qt_gstreamerMetaDataKeys()
// Image/Video
metadataKeys->insert("resolution", QMediaMetaData::Resolution);
metadataKeys->insert("pixel-aspect-ratio", QMediaMetaData::PixelAspectRatio);
-#if GST_CHECK_VERSION(0,10,30)
metadataKeys->insert(GST_TAG_IMAGE_ORIENTATION, QMediaMetaData::Orientation);
-#endif
// Video
//metadataKeys->insert(0, QMediaMetaData::VideoFrameRate);
@@ -149,10 +145,8 @@ bool QGstreamerMetaDataProvider::isWritable() const
QVariant QGstreamerMetaDataProvider::metaData(const QString &key) const
{
-#if GST_CHECK_VERSION(0,10,30)
if (key == QMediaMetaData::Orientation)
return QGstUtils::fromGStreamerOrientation(m_tags.value(key));
-#endif
return m_tags.value(key);
}
diff --git a/src/plugins/gstreamer/mediaplayer/qgstreamerplayerserviceplugin.cpp b/src/plugins/gstreamer/mediaplayer/qgstreamerplayerserviceplugin.cpp
index db266a10e..59302f1d2 100644
--- a/src/plugins/gstreamer/mediaplayer/qgstreamerplayerserviceplugin.cpp
+++ b/src/plugins/gstreamer/mediaplayer/qgstreamerplayerserviceplugin.cpp
@@ -90,15 +90,8 @@ QMultimedia::SupportEstimate QGstreamerPlayerServicePlugin::hasSupport(const QSt
static bool isDecoderOrDemuxer(GstElementFactory *factory)
{
-#if GST_CHECK_VERSION(0, 10 ,31)
return gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_DEMUXER)
|| gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_DECODER);
-#else
- return (factory
- && (qstrcmp(factory->details.klass, "Codec/Decoder/Audio") == 0
- || qstrcmp(factory->details.klass, "Codec/Decoder/Video") == 0
- || qstrcmp(factory->details.klass, "Codec/Demux") == 0 ));
-#endif
}
void QGstreamerPlayerServicePlugin::updateSupportedMimeTypes() const
diff --git a/tests/auto/unit/qmediaplaylist/tst_qmediaplaylist.cpp b/tests/auto/unit/qmediaplaylist/tst_qmediaplaylist.cpp
index 9b1385dac..e843374fb 100644
--- a/tests/auto/unit/qmediaplaylist/tst_qmediaplaylist.cpp
+++ b/tests/auto/unit/qmediaplaylist/tst_qmediaplaylist.cpp
@@ -41,7 +41,6 @@
#include "mockplaylistservice.h"
#include "mockmediaplaylistcontrol.h"
-#include "mockmediaplaylistsourcecontrol.h"
#include "mockreadonlyplaylistprovider.h"
QT_USE_NAMESPACE