summaryrefslogtreecommitdiffstats
path: root/src/plugins
diff options
context:
space:
mode:
authorYoann Lopes <yoann.lopes@theqtcompany.com>2014-11-20 17:54:18 +0100
committerAndrew den Exter <andrew.den.exter@qinetic.com.au>2014-11-27 23:30:05 +0100
commit108dda7a90bd0f0337358b0db47ae55acd16dea6 (patch)
treee74c44c004b257fb99fdc96063641c76ac0426cf /src/plugins
parent7e3d69668e3f04110a651dee1850a1d0c885947b (diff)
GStreamer: port to 1.0.
0.10 is still used by default. To enable GStreamer 1.0, pass GST_VERSION=1.0 to qmake for qtmultimedia.pro. Contributions from: Andrew den Exter <andrew.den.exter@qinetic.com.au> Ilya Smelykh <ilya@videoexpertsgroup.com> Jim Hodapp <jim.hodapp@canonical.com> Sergio Schvezov <sergio.schvezov@canonical.com> Change-Id: I72a46d1170a8794a149bdb5e20767afcc5b7587c Reviewed-by: Andrew den Exter <andrew.den.exter@qinetic.com.au>
Diffstat (limited to 'src/plugins')
-rw-r--r--src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp89
-rw-r--r--src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.cpp36
-rw-r--r--src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.h2
-rw-r--r--src/plugins/gstreamer/camerabin/camerabin.pro2
-rw-r--r--src/plugins/gstreamer/camerabin/camerabincontainer.cpp2
-rw-r--r--src/plugins/gstreamer/camerabin/camerabincontrol.cpp7
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinexposure.cpp8
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinflash.cpp8
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinfocus.cpp15
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinimagecapture.cpp146
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinimagecapture.h50
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinimageencoder.cpp1
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinimageprocessing.cpp8
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinimageprocessing.h7
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinmetadata.cpp5
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinrecorder.cpp9
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinservice.cpp7
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinsession.cpp339
-rw-r--r--src/plugins/gstreamer/common.pri22
-rw-r--r--src/plugins/gstreamer/gstreamer.pro4
-rw-r--r--src/plugins/gstreamer/mediacapture/mediacapturecamera.json2
-rw-r--r--src/plugins/gstreamer/mediacapture/qgstreameraudioencode.cpp3
-rw-r--r--src/plugins/gstreamer/mediacapture/qgstreamercaptureservice.cpp58
-rw-r--r--src/plugins/gstreamer/mediacapture/qgstreamercaptureservice.h3
-rw-r--r--src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp90
-rw-r--r--src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp285
-rw-r--r--src/plugins/gstreamer/mediacapture/qgstreamercapturesession.h19
-rw-r--r--src/plugins/gstreamer/mediacapture/qgstreamervideoencode.cpp39
-rw-r--r--src/plugins/gstreamer/mediaplayer/mediaplayer.pro1
-rw-r--r--src/plugins/gstreamer/mediaplayer/qgstreamerplayercontrol.cpp1
-rw-r--r--src/plugins/gstreamer/mediaplayer/qgstreamerplayerservice.cpp65
-rw-r--r--src/plugins/gstreamer/mediaplayer/qgstreamerplayerservice.h5
-rw-r--r--src/plugins/gstreamer/mediaplayer/qgstreamerplayerserviceplugin.cpp88
-rw-r--r--src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.cpp238
-rw-r--r--src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.h19
35 files changed, 697 insertions, 986 deletions
diff --git a/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp b/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp
index 3098aab9d..befbb9ae2 100644
--- a/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp
+++ b/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecoderserviceplugin.cpp
@@ -68,89 +68,16 @@ QMultimedia::SupportEstimate QGstreamerAudioDecoderServicePlugin::hasSupport(con
return QGstUtils::hasSupport(mimeType, codecs, m_supportedMimeTypeSet);
}
-void QGstreamerAudioDecoderServicePlugin::updateSupportedMimeTypes() const
+static bool isDecoderOrDemuxer(GstElementFactory *factory)
{
- //enumerate supported mime types
- gst_init(NULL, NULL);
-
- GList *plugins, *orig_plugins;
- orig_plugins = plugins = gst_default_registry_get_plugin_list ();
-
- while (plugins) {
- GList *features, *orig_features;
-
- GstPlugin *plugin = (GstPlugin *) (plugins->data);
- plugins = g_list_next (plugins);
-
- if (plugin->flags & (1<<1)) //GST_PLUGIN_FLAG_BLACKLISTED
- continue;
-
- orig_features = features = gst_registry_get_feature_list_by_plugin(gst_registry_get_default (),
- plugin->desc.name);
- while (features) {
- if (!G_UNLIKELY(features->data == NULL)) {
- GstPluginFeature *feature = GST_PLUGIN_FEATURE(features->data);
- if (GST_IS_ELEMENT_FACTORY (feature)) {
- GstElementFactory *factory = GST_ELEMENT_FACTORY(gst_plugin_feature_load(feature));
- if (factory
- && factory->numpadtemplates > 0
- && (qstrcmp(factory->details.klass, "Codec/Decoder/Audio") == 0
- || qstrcmp(factory->details.klass, "Codec/Demux") == 0 )) {
- const GList *pads = factory->staticpadtemplates;
- while (pads) {
- GstStaticPadTemplate *padtemplate = (GstStaticPadTemplate*)(pads->data);
- pads = g_list_next (pads);
- if (padtemplate->direction != GST_PAD_SINK)
- continue;
- if (padtemplate->static_caps.string) {
- GstCaps *caps = gst_static_caps_get(&padtemplate->static_caps);
- if (!gst_caps_is_any (caps) && ! gst_caps_is_empty (caps)) {
- for (guint i = 0; i < gst_caps_get_size(caps); i++) {
- GstStructure *structure = gst_caps_get_structure(caps, i);
- QString nameLowcase = QString(gst_structure_get_name (structure)).toLower();
-
- m_supportedMimeTypeSet.insert(nameLowcase);
- if (nameLowcase.contains("mpeg")) {
- //Because mpeg version number is only included in the detail
- //description, it is necessary to manually extract this information
- //in order to match the mime type of mpeg4.
- const GValue *value = gst_structure_get_value(structure, "mpegversion");
- if (value) {
- gchar *str = gst_value_serialize (value);
- QString versions(str);
- QStringList elements = versions.split(QRegExp("\\D+"), QString::SkipEmptyParts);
- foreach (const QString &e, elements)
- m_supportedMimeTypeSet.insert(nameLowcase + e);
- g_free (str);
- }
- }
- }
- }
- gst_caps_unref(caps);
- }
- }
- gst_object_unref (factory);
- }
- } else if (GST_IS_TYPE_FIND_FACTORY(feature)) {
- QString name(gst_plugin_feature_get_name(feature));
- if (name.contains('/')) //filter out any string without '/' which is obviously not a mime type
- m_supportedMimeTypeSet.insert(name.toLower());
- }
- }
- features = g_list_next (features);
- }
- gst_plugin_feature_list_free (orig_features);
- }
- gst_plugin_list_free (orig_plugins);
+ return gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_DEMUXER)
+ || gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_DECODER
+ | GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO);
+}
-#if defined QT_SUPPORTEDMIMETYPES_DEBUG
- QStringList list = m_supportedMimeTypeSet.toList();
- list.sort();
- if (qgetenv("QT_DEBUG_PLUGINS").toInt() > 0) {
- foreach (const QString &type, list)
- qDebug() << type;
- }
-#endif
+void QGstreamerAudioDecoderServicePlugin::updateSupportedMimeTypes() const
+{
+ m_supportedMimeTypeSet = QGstUtils::supportedMimeTypes(isDecoderOrDemuxer);
}
QStringList QGstreamerAudioDecoderServicePlugin::supportedMimeTypes() const
diff --git a/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.cpp b/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.cpp
index f944a60ac..69876b963 100644
--- a/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.cpp
+++ b/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.cpp
@@ -85,7 +85,7 @@ QGstreamerAudioDecoderSession::QGstreamerAudioDecoderSession(QObject *parent)
m_durationQueries(0)
{
// Create pipeline here
- m_playbin = gst_element_factory_make("playbin2", NULL);
+ m_playbin = gst_element_factory_make(QT_GSTREAMER_PLAYBIN_ELEMENT_NAME, NULL);
if (m_playbin != 0) {
// Sort out messages
@@ -446,21 +446,40 @@ QAudioBuffer QGstreamerAudioDecoderSession::read()
if (buffersAvailable == 1)
emit bufferAvailableChanged(false);
+ const char* bufferData = 0;
+ int bufferSize = 0;
+
+#if GST_CHECK_VERSION(1,0,0)
+ GstSample *sample = gst_app_sink_pull_sample(m_appSink);
+ GstBuffer *buffer = gst_sample_get_buffer(sample);
+ GstMapInfo mapInfo;
+ gst_buffer_map(buffer, &mapInfo, GST_MAP_READ);
+ bufferData = (const char*)mapInfo.data;
+ bufferSize = mapInfo.size;
+ QAudioFormat format = QGstUtils::audioFormatForSample(sample);
+#else
GstBuffer *buffer = gst_app_sink_pull_buffer(m_appSink);
-
+ bufferData = (const char*)buffer->data;
+ bufferSize = buffer->size;
QAudioFormat format = QGstUtils::audioFormatForBuffer(buffer);
+#endif
+
if (format.isValid()) {
// XXX At the moment we have to copy data from GstBuffer into QAudioBuffer.
// We could improve performance by implementing QAbstractAudioBuffer for GstBuffer.
qint64 position = getPositionFromBuffer(buffer);
- audioBuffer = QAudioBuffer(QByteArray((const char*)buffer->data, buffer->size), format, position);
+ audioBuffer = QAudioBuffer(QByteArray((const char*)bufferData, bufferSize), format, position);
position /= 1000; // convert to milliseconds
if (position != m_position) {
m_position = position;
emit positionChanged(m_position);
}
}
+#if GST_CHECK_VERSION(1,0,0)
+ gst_sample_unref(sample);
+#else
gst_buffer_unref(buffer);
+#endif
}
return audioBuffer;
@@ -488,7 +507,7 @@ void QGstreamerAudioDecoderSession::processInvalidMedia(QAudioDecoder::Error err
emit error(int(errorCode), errorString);
}
-GstFlowReturn QGstreamerAudioDecoderSession::new_buffer(GstAppSink *, gpointer user_data)
+GstFlowReturn QGstreamerAudioDecoderSession::new_sample(GstAppSink *, gpointer user_data)
{
// "Note that the preroll buffer will also be returned as the first buffer when calling gst_app_sink_pull_buffer()."
QGstreamerAudioDecoderSession *session = reinterpret_cast<QGstreamerAudioDecoderSession*>(user_data);
@@ -531,7 +550,11 @@ void QGstreamerAudioDecoderSession::addAppSink()
GstAppSinkCallbacks callbacks;
memset(&callbacks, 0, sizeof(callbacks));
- callbacks.new_buffer = &new_buffer;
+#if GST_CHECK_VERSION(1,0,0)
+ callbacks.new_sample = &new_sample;
+#else
+ callbacks.new_buffer = &new_sample;
+#endif
gst_app_sink_set_callbacks(m_appSink, &callbacks, this, NULL);
gst_app_sink_set_max_buffers(m_appSink, MAX_BUFFERS_IN_QUEUE);
gst_base_sink_set_sync(GST_BASE_SINK(m_appSink), FALSE);
@@ -553,11 +576,10 @@ void QGstreamerAudioDecoderSession::removeAppSink()
void QGstreamerAudioDecoderSession::updateDuration()
{
- GstFormat format = GST_FORMAT_TIME;
gint64 gstDuration = 0;
int duration = -1;
- if (m_playbin && gst_element_query_duration(m_playbin, &format, &gstDuration))
+ if (m_playbin && qt_gst_element_query_duration(m_playbin, GST_FORMAT_TIME, &gstDuration))
duration = gstDuration / 1000000;
if (m_duration != duration) {
diff --git a/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.h b/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.h
index 091219666..068221c93 100644
--- a/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.h
+++ b/src/plugins/gstreamer/audiodecoder/qgstreameraudiodecodersession.h
@@ -92,7 +92,7 @@ public:
qint64 position() const;
qint64 duration() const;
- static GstFlowReturn new_buffer(GstAppSink *sink, gpointer user_data);
+ static GstFlowReturn new_sample(GstAppSink *sink, gpointer user_data);
signals:
void stateChanged(QAudioDecoder::State newState);
diff --git a/src/plugins/gstreamer/camerabin/camerabin.pro b/src/plugins/gstreamer/camerabin/camerabin.pro
index bba797f5e..64fee3e41 100644
--- a/src/plugins/gstreamer/camerabin/camerabin.pro
+++ b/src/plugins/gstreamer/camerabin/camerabin.pro
@@ -79,7 +79,7 @@ config_gstreamer_photography {
$$PWD/camerabinlocks.cpp \
$$PWD/camerabinzoom.cpp
- LIBS += -lgstphotography-0.10
+ LIBS += -lgstphotography-$$GST_VERSION
DEFINES += GST_USE_UNSTABLE_API #prevents warnings because of unstable photography API
}
diff --git a/src/plugins/gstreamer/camerabin/camerabincontainer.cpp b/src/plugins/gstreamer/camerabin/camerabincontainer.cpp
index ebb914b60..9531f0130 100644
--- a/src/plugins/gstreamer/camerabin/camerabincontainer.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabincontainer.cpp
@@ -96,7 +96,7 @@ GstEncodingContainerProfile *CameraBinContainer::createProfile()
GstCaps *caps;
if (m_actualFormat.isEmpty()) {
- caps = gst_caps_new_any();
+ return 0;
} else {
QString format = m_actualFormat;
QStringList supportedFormats = m_supportedContainers.supportedCodecs();
diff --git a/src/plugins/gstreamer/camerabin/camerabincontrol.cpp b/src/plugins/gstreamer/camerabin/camerabincontrol.cpp
index 3ec992791..8c6b8b0ea 100644
--- a/src/plugins/gstreamer/camerabin/camerabincontrol.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabincontrol.cpp
@@ -95,11 +95,6 @@ void CameraBinControl::setCaptureMode(QCamera::CaptureModes mode)
captureMode() == QCamera::CaptureStillImage ?
CamerabinResourcePolicy::ImageCaptureResources :
CamerabinResourcePolicy::VideoCaptureResources);
-#if (GST_VERSION_MAJOR == 0) && ((GST_VERSION_MINOR < 10) || (GST_VERSION_MICRO < 23))
- //due to bug in v4l2src, it's necessary to reload camera on video caps changes
- //https://bugzilla.gnome.org/show_bug.cgi?id=649832
- reloadLater();
-#endif
}
emit captureModeChanged(mode);
}
@@ -299,6 +294,8 @@ bool CameraBinControl::canChangeProperty(PropertyChangeType changeType, QCamera:
switch (changeType) {
case QCameraControl::CaptureMode:
+ return status != QCamera::ActiveStatus;
+ break;
case QCameraControl::ImageEncodingSettings:
case QCameraControl::VideoEncodingSettings:
case QCameraControl::Viewfinder:
diff --git a/src/plugins/gstreamer/camerabin/camerabinexposure.cpp b/src/plugins/gstreamer/camerabin/camerabinexposure.cpp
index a235de2f2..795fd4275 100644
--- a/src/plugins/gstreamer/camerabin/camerabinexposure.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinexposure.cpp
@@ -37,6 +37,10 @@
#include <QDebug>
+#if !GST_CHECK_VERSION(1,0,0)
+typedef GstSceneMode GstPhotographySceneMode;
+#endif
+
QT_BEGIN_NAMESPACE
CameraBinExposure::CameraBinExposure(CameraBinSession *session)
@@ -119,7 +123,7 @@ QVariant CameraBinExposure::actualValue(ExposureParameter parameter) const
}
case QCameraExposureControl::ExposureMode:
{
- GstSceneMode sceneMode;
+ GstPhotographySceneMode sceneMode;
gst_photography_get_scene_mode(m_session->photography(), &sceneMode);
switch (sceneMode) {
@@ -167,7 +171,7 @@ bool CameraBinExposure::setValue(ExposureParameter parameter, const QVariant& va
case QCameraExposureControl::ExposureMode:
{
QCameraExposure::ExposureMode mode = QCameraExposure::ExposureMode(value.toInt());
- GstSceneMode sceneMode;
+ GstPhotographySceneMode sceneMode;
gst_photography_get_scene_mode(m_session->photography(), &sceneMode);
switch (mode) {
diff --git a/src/plugins/gstreamer/camerabin/camerabinflash.cpp b/src/plugins/gstreamer/camerabin/camerabinflash.cpp
index 2140f6684..51bb9a27d 100644
--- a/src/plugins/gstreamer/camerabin/camerabinflash.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinflash.cpp
@@ -37,6 +37,10 @@
#include <QDebug>
+#if !GST_CHECK_VERSION(1,0,0)
+typedef GstFlashMode GstPhotographyFlashMode;
+#endif
+
QT_BEGIN_NAMESPACE
CameraBinFlash::CameraBinFlash(CameraBinSession *session)
@@ -51,7 +55,7 @@ CameraBinFlash::~CameraBinFlash()
QCameraExposure::FlashModes CameraBinFlash::flashMode() const
{
- GstFlashMode flashMode;
+ GstPhotographyFlashMode flashMode;
gst_photography_get_flash_mode(m_session->photography(), &flashMode);
QCameraExposure::FlashModes modes;
@@ -70,7 +74,7 @@ QCameraExposure::FlashModes CameraBinFlash::flashMode() const
void CameraBinFlash::setFlashMode(QCameraExposure::FlashModes mode)
{
- GstFlashMode flashMode;
+ GstPhotographyFlashMode flashMode;
gst_photography_get_flash_mode(m_session->photography(), &flashMode);
if (mode.testFlag(QCameraExposure::FlashAuto)) flashMode = GST_PHOTOGRAPHY_FLASH_MODE_AUTO;
diff --git a/src/plugins/gstreamer/camerabin/camerabinfocus.cpp b/src/plugins/gstreamer/camerabin/camerabinfocus.cpp
index 665e20443..061c68044 100644
--- a/src/plugins/gstreamer/camerabin/camerabinfocus.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinfocus.cpp
@@ -39,6 +39,12 @@
#include <QDebug>
#include <QtCore/qmetaobject.h>
+#include <private/qgstutils_p.h>
+
+#if !GST_CHECK_VERSION(1,0,0)
+typedef GstFocusMode GstPhotographyFocusMode;
+#endif
+
//#define CAMERABIN_DEBUG 1
QT_BEGIN_NAMESPACE
@@ -73,7 +79,7 @@ QCameraFocus::FocusModes CameraBinFocus::focusMode() const
void CameraBinFocus::setFocusMode(QCameraFocus::FocusModes mode)
{
- GstFocusMode photographyMode;
+ GstPhotographyFocusMode photographyMode;
switch (mode) {
case QCameraFocus::AutoFocus:
@@ -181,9 +187,10 @@ QCameraFocusZoneList CameraBinFocus::focusZones() const
void CameraBinFocus::handleFocusMessage(GstMessage *gm)
{
//it's a sync message, so it's called from non main thread
- if (gst_structure_has_name(gm->structure, GST_PHOTOGRAPHY_AUTOFOCUS_DONE)) {
+ const GstStructure *structure = gst_message_get_structure(gm);
+ if (gst_structure_has_name(structure, GST_PHOTOGRAPHY_AUTOFOCUS_DONE)) {
gint status = GST_PHOTOGRAPHY_FOCUS_STATUS_NONE;
- gst_structure_get_int (gm->structure, "status", &status);
+ gst_structure_get_int (structure, "status", &status);
QCamera::LockStatus focusStatus = m_focusStatus;
QCamera::LockChangeReason reason = QCamera::UserRequest;
@@ -243,7 +250,7 @@ void CameraBinFocus::_q_handleCameraStateChange(QCamera::State state)
m_cameraState = state;
if (state == QCamera::ActiveState) {
if (GstPad *pad = gst_element_get_static_pad(m_session->cameraSource(), "vfsrc")) {
- if (GstCaps *caps = gst_pad_get_negotiated_caps(pad)) {
+ if (GstCaps *caps = qt_gst_pad_get_current_caps(pad)) {
if (GstStructure *structure = gst_caps_get_structure(caps, 0)) {
int width = 0;
int height = 0;
diff --git a/src/plugins/gstreamer/camerabin/camerabinimagecapture.cpp b/src/plugins/gstreamer/camerabin/camerabinimagecapture.cpp
index 695215592..8b5130641 100644
--- a/src/plugins/gstreamer/camerabin/camerabinimagecapture.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinimagecapture.cpp
@@ -53,11 +53,13 @@ QT_BEGIN_NAMESPACE
CameraBinImageCapture::CameraBinImageCapture(CameraBinSession *session)
:QCameraImageCaptureControl(session)
+ , m_encoderProbe(this)
+ , m_muxerProbe(this)
, m_session(session)
- , m_ready(false)
- , m_requestId(0)
, m_jpegEncoderElement(0)
, m_metadataMuxerElement(0)
+ , m_requestId(0)
+ , m_ready(false)
{
connect(m_session, SIGNAL(stateChanged(QCamera::State)), SLOT(updateState()));
connect(m_session, SIGNAL(imageExposed(int)), this, SIGNAL(imageExposed(int)));
@@ -108,11 +110,18 @@ void CameraBinImageCapture::updateState()
}
}
-gboolean CameraBinImageCapture::metadataEventProbe(GstPad *pad, GstEvent *event, CameraBinImageCapture *self)
+#if GST_CHECK_VERSION(1,0,0)
+GstPadProbeReturn CameraBinImageCapture::encoderEventProbe(
+ GstPad *, GstPadProbeInfo *info, gpointer user_data)
{
- Q_UNUSED(pad);
-
- if (GST_EVENT_TYPE(event) == GST_EVENT_TAG) {
+ GstEvent * const event = gst_pad_probe_info_get_event(info);
+#else
+gboolean CameraBinImageCapture::encoderEventProbe(
+ GstElement *, GstEvent *event, gpointer user_data)
+{
+#endif
+ CameraBinImageCapture * const self = static_cast<CameraBinImageCapture *>(user_data);
+ if (event && GST_EVENT_TYPE(event) == GST_EVENT_TAG) {
GstTagList *gstTags;
gst_event_parse_tag(event, &gstTags);
QMap<QByteArray, QVariant> extendedTags = QGstUtils::gstTagListToMap(gstTags);
@@ -146,17 +155,31 @@ gboolean CameraBinImageCapture::metadataEventProbe(GstPad *pad, GstEvent *event,
}
}
}
+#if GST_CHECK_VERSION(1,0,0)
+ return GST_PAD_PROBE_OK;
+#else
+ return TRUE;
+#endif
+}
- return true;
+void CameraBinImageCapture::EncoderProbe::probeCaps(GstCaps *caps)
+{
+#if GST_CHECK_VERSION(1,0,0)
+ capture->m_bufferFormat = QGstUtils::formatForCaps(caps, &capture->m_videoInfo);
+#else
+ int bytesPerLine = 0;
+ QVideoSurfaceFormat format = QGstUtils::formatForCaps(caps, &bytesPerLine);
+ capture->m_bytesPerLine = bytesPerLine;
+ capture->m_bufferFormat = format;
+#endif
}
-gboolean CameraBinImageCapture::uncompressedBufferProbe(GstPad *pad, GstBuffer *buffer, CameraBinImageCapture *self)
+bool CameraBinImageCapture::EncoderProbe::probeBuffer(GstBuffer *buffer)
{
- Q_UNUSED(pad);
- CameraBinSession *session = self->m_session;
+ CameraBinSession * const session = capture->m_session;
#ifdef DEBUG_CAPTURE
- qDebug() << "Uncompressed buffer probe" << gst_caps_to_string(GST_BUFFER_CAPS(buffer));
+ qDebug() << "Uncompressed buffer probe";
#endif
QCameraImageCapture::CaptureDestinations destination =
@@ -165,21 +188,23 @@ gboolean CameraBinImageCapture::uncompressedBufferProbe(GstPad *pad, GstBuffer *
if (destination & QCameraImageCapture::CaptureToBuffer) {
if (format != QVideoFrame::Format_Jpeg) {
- GstCaps *caps = GST_BUFFER_CAPS(buffer);
- int bytesPerLine = -1;
- QVideoSurfaceFormat format = QVideoSurfaceGstSink::formatForCaps(caps, &bytesPerLine);
#ifdef DEBUG_CAPTURE
qDebug() << "imageAvailable(uncompressed):" << format;
#endif
- QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer, bytesPerLine);
+#if GST_CHECK_VERSION(1,0,0)
+ QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer, capture->m_videoInfo);
+#else
+ QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer, capture->m_bytesPerLine);
+#endif
- QVideoFrame frame(videoBuffer,
- format.frameSize(),
- format.pixelFormat());
+ QVideoFrame frame(
+ videoBuffer,
+ capture->m_bufferFormat.frameSize(),
+ capture->m_bufferFormat.pixelFormat());
- QMetaObject::invokeMethod(self, "imageAvailable",
+ QMetaObject::invokeMethod(capture, "imageAvailable",
Qt::QueuedConnection,
- Q_ARG(int, self->m_requestId),
+ Q_ARG(int, capture->m_requestId),
Q_ARG(QVideoFrame, frame));
}
}
@@ -192,25 +217,40 @@ gboolean CameraBinImageCapture::uncompressedBufferProbe(GstPad *pad, GstBuffer *
return keepBuffer;
}
-gboolean CameraBinImageCapture::jpegBufferProbe(GstPad *pad, GstBuffer *buffer, CameraBinImageCapture *self)
+void CameraBinImageCapture::MuxerProbe::probeCaps(GstCaps *caps)
{
- Q_UNUSED(pad);
- CameraBinSession *session = self->m_session;
+ capture->m_jpegResolution = QGstUtils::capsCorrectedResolution(caps);
+}
-#ifdef DEBUG_CAPTURE
- qDebug() << "Jpeg buffer probe" << gst_caps_to_string(GST_BUFFER_CAPS(buffer));
-#endif
+bool CameraBinImageCapture::MuxerProbe::probeBuffer(GstBuffer *buffer)
+{
+ CameraBinSession * const session = capture->m_session;
QCameraImageCapture::CaptureDestinations destination =
session->captureDestinationControl()->captureDestination();
if ((destination & QCameraImageCapture::CaptureToBuffer) &&
session->captureBufferFormatControl()->bufferFormat() == QVideoFrame::Format_Jpeg) {
- QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer,
- -1); //bytesPerLine is not available for jpegs
- QSize resolution = QGstUtils::capsCorrectedResolution(GST_BUFFER_CAPS(buffer));
+ QSize resolution = capture->m_jpegResolution;
//if resolution is not presented in caps, try to find it from encoded jpeg data:
+#if GST_CHECK_VERSION(1,0,0)
+ GstMapInfo mapInfo;
+ if (resolution.isEmpty() && gst_buffer_map(buffer, &mapInfo, GST_MAP_READ)) {
+ QBuffer data;
+ data.setData(reinterpret_cast<const char*>(mapInfo.data), mapInfo.size);
+
+ QImageReader reader(&data, "JPEG");
+ resolution = reader.size();
+
+ gst_buffer_unmap(buffer, &mapInfo);
+ }
+
+ GstVideoInfo info;
+ gst_video_info_set_format(
+ &info, GST_VIDEO_FORMAT_ENCODED, resolution.width(), resolution.height());
+ QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer, info);
+#else
if (resolution.isEmpty()) {
QBuffer data;
data.setData(reinterpret_cast<const char*>(GST_BUFFER_DATA(buffer)), GST_BUFFER_SIZE(buffer));
@@ -218,20 +258,28 @@ gboolean CameraBinImageCapture::jpegBufferProbe(GstPad *pad, GstBuffer *buffer,
resolution = reader.size();
}
+ QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer,
+ -1); //bytesPerLine is not available for jpegs
+#endif
+
+
QVideoFrame frame(videoBuffer,
resolution,
QVideoFrame::Format_Jpeg);
-
- QMetaObject::invokeMethod(self, "imageAvailable",
+ QMetaObject::invokeMethod(capture, "imageAvailable",
Qt::QueuedConnection,
- Q_ARG(int, self->m_requestId),
+ Q_ARG(int, capture->m_requestId),
Q_ARG(QVideoFrame, frame));
}
- //drop the buffer if capture to file was disabled
- return destination & QCameraImageCapture::CaptureToFile;
+
+ // Theoretically we could drop the buffer here when don't want to capture to file but that
+ // prevents camerabin from recognizing that capture has been completed and returning
+ // to its idle state.
+ return true;
}
+
bool CameraBinImageCapture::processBusMessage(const QGstreamerMessage &message)
{
//Install metadata event and buffer probes
@@ -252,9 +300,10 @@ bool CameraBinImageCapture::processBusMessage(const QGstreamerMessage &message)
return false;
QString elementName = QString::fromLatin1(gst_element_get_name(element));
+#if !GST_CHECK_VERSION(1,0,0)
GstElementClass *elementClass = GST_ELEMENT_GET_CLASS(element);
QString elementLongName = elementClass->details.longname;
-
+#endif
if (elementName.contains("jpegenc") && element != m_jpegEncoderElement) {
m_jpegEncoderElement = element;
GstPad *sinkpad = gst_element_get_static_pad(element, "sink");
@@ -264,21 +313,23 @@ bool CameraBinImageCapture::processBusMessage(const QGstreamerMessage &message)
#ifdef DEBUG_CAPTURE
qDebug() << "install metadata probe";
#endif
- gst_pad_add_event_probe(sinkpad,
- G_CALLBACK(CameraBinImageCapture::metadataEventProbe),
- this);
-
+#if GST_CHECK_VERSION(1,0,0)
+ gst_pad_add_probe(
+ sinkpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, encoderEventProbe, this, NULL);
+#else
+ gst_pad_add_event_probe(sinkpad, G_CALLBACK(encoderEventProbe), this);
+#endif
#ifdef DEBUG_CAPTURE
qDebug() << "install uncompressed buffer probe";
#endif
- gst_pad_add_buffer_probe(sinkpad,
- G_CALLBACK(CameraBinImageCapture::uncompressedBufferProbe),
- this);
+ m_encoderProbe.addProbeToPad(sinkpad, true);
gst_object_unref(sinkpad);
- } else if ((elementName.contains("jifmux") ||
- elementName.startsWith("metadatamux") ||
- elementLongName == QLatin1String("JPEG stream muxer"))
+ } else if ((elementName.contains("jifmux")
+#if !GST_CHECK_VERSION(1,0,0)
+ || elementLongName == QLatin1String("JPEG stream muxer")
+#endif
+ || elementName.startsWith("metadatamux"))
&& element != m_metadataMuxerElement) {
//Jpeg encoded buffer probe is added after jifmux/metadatamux
//element to ensure the resulting jpeg buffer contains capture metadata
@@ -288,9 +339,8 @@ bool CameraBinImageCapture::processBusMessage(const QGstreamerMessage &message)
#ifdef DEBUG_CAPTURE
qDebug() << "install jpeg buffer probe";
#endif
- gst_pad_add_buffer_probe(srcpad,
- G_CALLBACK(CameraBinImageCapture::jpegBufferProbe),
- this);
+ m_muxerProbe.addProbeToPad(srcpad);
+
gst_object_unref(srcpad);
}
}
diff --git a/src/plugins/gstreamer/camerabin/camerabinimagecapture.h b/src/plugins/gstreamer/camerabin/camerabinimagecapture.h
index c2e26f51c..9a52dd9da 100644
--- a/src/plugins/gstreamer/camerabin/camerabinimagecapture.h
+++ b/src/plugins/gstreamer/camerabin/camerabinimagecapture.h
@@ -38,6 +38,14 @@
#include <qcameraimagecapturecontrol.h>
#include "camerabinsession.h"
+#include <qvideosurfaceformat.h>
+
+#include <private/qgstreamerbufferprobe_p.h>
+
+#if GST_CHECK_VERSION(1,0,0)
+#include <gst/video/video.h>
+#endif
+
QT_BEGIN_NAMESPACE
class CameraBinImageCapture : public QCameraImageCaptureControl, public QGstreamerBusMessageFilter
@@ -61,15 +69,47 @@ private slots:
void updateState();
private:
- static gboolean metadataEventProbe(GstPad *pad, GstEvent *event, CameraBinImageCapture *);
- static gboolean uncompressedBufferProbe(GstPad *pad, GstBuffer *buffer, CameraBinImageCapture *);
- static gboolean jpegBufferProbe(GstPad *pad, GstBuffer *buffer, CameraBinImageCapture *);
+#if GST_CHECK_VERSION(1,0,0)
+ static GstPadProbeReturn encoderEventProbe(GstPad *, GstPadProbeInfo *info, gpointer user_data);
+#else
+ static gboolean encoderEventProbe(GstElement *, GstEvent *event, gpointer user_data);
+#endif
+
+ class EncoderProbe : public QGstreamerBufferProbe
+ {
+ public:
+ EncoderProbe(CameraBinImageCapture *capture) : capture(capture) {}
+ void probeCaps(GstCaps *caps);
+ bool probeBuffer(GstBuffer *buffer);
+
+ private:
+ CameraBinImageCapture * const capture;
+ } m_encoderProbe;
+
+ class MuxerProbe : public QGstreamerBufferProbe
+ {
+ public:
+ MuxerProbe(CameraBinImageCapture *capture) : capture(capture) {}
+ void probeCaps(GstCaps *caps);
+ bool probeBuffer(GstBuffer *buffer);
+ private:
+ CameraBinImageCapture * const capture;
+
+ } m_muxerProbe;
+
+ QVideoSurfaceFormat m_bufferFormat;
+ QSize m_jpegResolution;
CameraBinSession *m_session;
- bool m_ready;
- int m_requestId;
GstElement *m_jpegEncoderElement;
GstElement *m_metadataMuxerElement;
+#if GST_CHECK_VERSION(1,0,0)
+ GstVideoInfo m_videoInfo;
+#else
+ int m_bytesPerLine;
+#endif
+ int m_requestId;
+ bool m_ready;
};
QT_END_NAMESPACE
diff --git a/src/plugins/gstreamer/camerabin/camerabinimageencoder.cpp b/src/plugins/gstreamer/camerabin/camerabinimageencoder.cpp
index 824f9964f..739364f89 100644
--- a/src/plugins/gstreamer/camerabin/camerabinimageencoder.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinimageencoder.cpp
@@ -49,7 +49,6 @@ CameraBinImageEncoder::~CameraBinImageEncoder()
QList<QSize> CameraBinImageEncoder::supportedResolutions(const QImageEncoderSettings &, bool *continuous) const
{
- qDebug() << "CameraBinImageEncoder::supportedResolutions()";
if (continuous)
*continuous = false;
diff --git a/src/plugins/gstreamer/camerabin/camerabinimageprocessing.cpp b/src/plugins/gstreamer/camerabin/camerabinimageprocessing.cpp
index ebfb08741..811225f68 100644
--- a/src/plugins/gstreamer/camerabin/camerabinimageprocessing.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinimageprocessing.cpp
@@ -34,7 +34,11 @@
#include "camerabinimageprocessing.h"
#include "camerabinsession.h"
-#include <gst/interfaces/colorbalance.h>
+#if GST_CHECK_VERSION(1,0,0)
+# include <gst/video/colorbalance.h>
+#else
+# include <gst/interfaces/colorbalance.h>
+#endif
QT_BEGIN_NAMESPACE
@@ -126,7 +130,7 @@ bool CameraBinImageProcessing::setColorBalanceValue(const QString& channel, qrea
QCameraImageProcessing::WhiteBalanceMode CameraBinImageProcessing::whiteBalanceMode() const
{
#ifdef HAVE_GST_PHOTOGRAPHY
- GstWhiteBalanceMode wbMode;
+ GstPhotographyWhiteBalanceMode wbMode;
gst_photography_get_white_balance_mode(m_session->photography(), &wbMode);
return m_mappedWbValues[wbMode];
#else
diff --git a/src/plugins/gstreamer/camerabin/camerabinimageprocessing.h b/src/plugins/gstreamer/camerabin/camerabinimageprocessing.h
index dcefcd048..2c6347f03 100644
--- a/src/plugins/gstreamer/camerabin/camerabinimageprocessing.h
+++ b/src/plugins/gstreamer/camerabin/camerabinimageprocessing.h
@@ -41,7 +41,10 @@
#include <glib.h>
#ifdef HAVE_GST_PHOTOGRAPHY
-#include <gst/interfaces/photography.h>
+# include <gst/interfaces/photography.h>
+# if !GST_CHECK_VERSION(1,0,0)
+typedef GstWhiteBalanceMode GstPhotographyWhiteBalanceMode;
+# endif
#endif
QT_BEGIN_NAMESPACE
@@ -73,7 +76,7 @@ private:
CameraBinSession *m_session;
QMap<QCameraImageProcessingControl::ProcessingParameter, int> m_values;
#ifdef HAVE_GST_PHOTOGRAPHY
- QMap<GstWhiteBalanceMode, QCameraImageProcessing::WhiteBalanceMode> m_mappedWbValues;
+ QMap<GstPhotographyWhiteBalanceMode, QCameraImageProcessing::WhiteBalanceMode> m_mappedWbValues;
#endif
};
diff --git a/src/plugins/gstreamer/camerabin/camerabinmetadata.cpp b/src/plugins/gstreamer/camerabin/camerabinmetadata.cpp
index 514813539..bc1b260a8 100644
--- a/src/plugins/gstreamer/camerabin/camerabinmetadata.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinmetadata.cpp
@@ -126,7 +126,7 @@ static const QGStreamerMetaDataKeys *qt_gstreamerMetaDataKeys()
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::AlbumTitle, GST_TAG_ALBUM, QVariant::String));
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::AlbumArtist, GST_TAG_ARTIST, QVariant::String));
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::ContributingArtist, GST_TAG_PERFORMER, QVariant::String));
-#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 19)
+#if GST_CHECK_VERSION(0,10,19)
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Composer, GST_TAG_COMPOSER, QVariant::String));
#endif
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Conductor, 0, QVariant::String));
@@ -153,8 +153,7 @@ static const QGStreamerMetaDataKeys *qt_gstreamerMetaDataKeys()
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Director, 0, QVariant::String));
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::LeadPerformer, GST_TAG_PERFORMER, QVariant::String));
//metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::Writer, 0, QVariant::String));
-
-#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 30)
+#if GST_CHECK_VERSION(0,10,30)
// Photos
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::CameraManufacturer, GST_TAG_DEVICE_MANUFACTURER, QVariant::String));
metadataKeys->append(QGStreamerMetaDataKey(QMediaMetaData::CameraModel, GST_TAG_DEVICE_MODEL, QVariant::String));
diff --git a/src/plugins/gstreamer/camerabin/camerabinrecorder.cpp b/src/plugins/gstreamer/camerabin/camerabinrecorder.cpp
index 3a04c2f47..801c7ab4e 100644
--- a/src/plugins/gstreamer/camerabin/camerabinrecorder.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinrecorder.cpp
@@ -110,9 +110,10 @@ void CameraBinRecorder::updateStatus()
m_state = QMediaRecorder::StoppedState;
m_session->stopVideoRecording();
}
- m_status = m_session->pendingState() == QCamera::ActiveState ?
- QMediaRecorder::LoadingStatus :
- QMediaRecorder::UnloadedStatus;
+ m_status = m_session->pendingState() == QCamera::ActiveState
+ && m_session->captureMode().testFlag(QCamera::CaptureVideo)
+ ? QMediaRecorder::LoadingStatus
+ : QMediaRecorder::UnloadedStatus;
}
if (m_state != oldState)
@@ -161,8 +162,6 @@ void CameraBinRecorder::applySettings()
QVideoEncoderSettings videoSettings = videoEncoderControl->videoSettings();
videoSettings.setCodec(candidate[1]);
- if (videoSettings.resolution().isEmpty())
- videoSettings.setResolution(640, 480);
videoEncoderControl->setActualVideoSettings(videoSettings);
QAudioEncoderSettings audioSettings = audioEncoderControl->audioSettings();
diff --git a/src/plugins/gstreamer/camerabin/camerabinservice.cpp b/src/plugins/gstreamer/camerabin/camerabinservice.cpp
index 969955f8a..388f2fdd5 100644
--- a/src/plugins/gstreamer/camerabin/camerabinservice.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinservice.cpp
@@ -56,11 +56,11 @@
#include "camerabincapturedestination.h"
#include "camerabinviewfindersettings.h"
#include <private/qgstreamerbushelper_p.h>
+#include <private/qgstutils_p.h>
#include <private/qgstreameraudioinputselector_p.h>
#include <private/qgstreamervideoinputdevicecontrol_p.h>
-
#if defined(HAVE_WIDGETS)
#include <private/qgstreamervideowidget_p.h>
#endif
@@ -121,7 +121,6 @@ CameraBinService::CameraBinService(GstElementFactory *sourceFactory, QObject *pa
#else
m_videoWindow = new QGstreamerVideoWindow(this);
#endif
-
#if defined(HAVE_WIDGETS)
m_videoWidgetControl = new QGstreamerVideoWidgetControl(this);
#endif
@@ -150,8 +149,6 @@ QMediaControl *CameraBinService::requestControl(const char *name)
if (!m_captureSession)
return 0;
- //qDebug() << "Request control" << name;
-
if (!m_videoOutput) {
if (qstrcmp(name, QVideoRendererControl_iid) == 0) {
m_videoOutput = m_videoRenderer;
@@ -249,7 +246,7 @@ void CameraBinService::releaseControl(QMediaControl *control)
bool CameraBinService::isCameraBinAvailable()
{
- GstElementFactory *factory = gst_element_factory_find("camerabin2");
+ GstElementFactory *factory = gst_element_factory_find(QT_GSTREAMER_CAMERABIN_ELEMENT_NAME);
if (factory) {
gst_object_unref(GST_OBJECT(factory));
return true;
diff --git a/src/plugins/gstreamer/camerabin/camerabinsession.cpp b/src/plugins/gstreamer/camerabin/camerabinsession.cpp
index a4038c589..f916b58c7 100644
--- a/src/plugins/gstreamer/camerabin/camerabinsession.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinsession.cpp
@@ -140,8 +140,8 @@ CameraBinSession::CameraBinSession(GstElementFactory *sourceFactory, QObject *pa
{
if (m_sourceFactory)
gst_object_ref(GST_OBJECT(m_sourceFactory));
+ m_camerabin = gst_element_factory_make(QT_GSTREAMER_CAMERABIN_ELEMENT_NAME, "camerabin");
- m_camerabin = gst_element_factory_make("camerabin2", "camerabin2");
g_signal_connect(G_OBJECT(m_camerabin), "notify::idle", G_CALLBACK(updateBusyStatus), this);
g_signal_connect(G_OBJECT(m_camerabin), "element-added", G_CALLBACK(elementAdded), this);
g_signal_connect(G_OBJECT(m_camerabin), "element-removed", G_CALLBACK(elementRemoved), this);
@@ -178,7 +178,15 @@ CameraBinSession::CameraBinSession(GstElementFactory *sourceFactory, QObject *pa
//post image preview in RGB format
g_object_set(G_OBJECT(m_camerabin), POST_PREVIEWS_PROPERTY, TRUE, NULL);
+#if GST_CHECK_VERSION(1,0,0)
+ GstCaps *previewCaps = gst_caps_new_simple(
+ "video/x-raw",
+ "format", G_TYPE_STRING, "RGBx",
+ NULL);
+#else
GstCaps *previewCaps = gst_caps_from_string("video/x-raw-rgb");
+#endif
+
g_object_set(G_OBJECT(m_camerabin), PREVIEW_CAPS_PROPERTY, previewCaps, NULL);
gst_caps_unref(previewCaps);
}
@@ -243,6 +251,7 @@ bool CameraBinSession::setupCameraBin()
qWarning() << "Staring camera without viewfinder available";
m_viewfinderElement = gst_element_factory_make("fakesink", NULL);
}
+ g_object_set(G_OBJECT(m_viewfinderElement), "sync", FALSE, NULL);
qt_gst_object_ref_sink(GST_OBJECT(m_viewfinderElement));
gst_element_set_state(m_camerabin, GST_STATE_NULL);
g_object_set(G_OBJECT(m_camerabin), VIEWFINDER_SINK_PROPERTY, m_viewfinderElement, NULL);
@@ -251,61 +260,27 @@ bool CameraBinSession::setupCameraBin()
return true;
}
-static GstCaps *resolutionToCaps(const QSize &resolution, const QPair<int, int> &rate = qMakePair<int,int>(0,0))
+static GstCaps *resolutionToCaps(const QSize &resolution, qreal frameRate = 0.0)
{
- if (resolution.isEmpty())
- return gst_caps_new_any();
+ GstCaps *caps = QGstUtils::videoFilterCaps();
- GstCaps *caps = 0;
- if (rate.second > 0) {
- caps = gst_caps_new_full(gst_structure_new("video/x-raw-yuv",
- "width", G_TYPE_INT, resolution.width(),
- "height", G_TYPE_INT, resolution.height(),
- "framerate", GST_TYPE_FRACTION, rate.first, rate.second,
- NULL),
- gst_structure_new("video/x-raw-rgb",
- "width", G_TYPE_INT, resolution.width(),
- "height", G_TYPE_INT, resolution.height(),
- "framerate", GST_TYPE_FRACTION, rate.first, rate.second,
- NULL),
- gst_structure_new("video/x-raw-data",
- "width", G_TYPE_INT, resolution.width(),
- "height", G_TYPE_INT, resolution.height(),
- "framerate", GST_TYPE_FRACTION, rate.first, rate.second,
- NULL),
- gst_structure_new("video/x-android-buffer",
- "width", G_TYPE_INT, resolution.width(),
- "height", G_TYPE_INT, resolution.height(),
- "framerate", GST_TYPE_FRACTION, rate.first, rate.second,
- NULL),
- gst_structure_new("image/jpeg",
- "width", G_TYPE_INT, resolution.width(),
- "height", G_TYPE_INT, resolution.height(),
- "framerate", GST_TYPE_FRACTION, rate.first, rate.second,
- NULL),
- NULL);
- } else {
- caps = gst_caps_new_full (gst_structure_new ("video/x-raw-yuv",
- "width", G_TYPE_INT, resolution.width(),
- "height", G_TYPE_INT, resolution.height(),
- NULL),
- gst_structure_new ("video/x-raw-rgb",
- "width", G_TYPE_INT, resolution.width(),
- "height", G_TYPE_INT, resolution.height(),
- NULL),
- gst_structure_new("video/x-raw-data",
- "width", G_TYPE_INT, resolution.width(),
- "height", G_TYPE_INT, resolution.height(),
- NULL),
- gst_structure_new ("video/x-android-buffer",
- "width", G_TYPE_INT, resolution.width(),
- "height", G_TYPE_INT, resolution.height(),
- NULL),
- gst_structure_new ("image/jpeg",
- "width", G_TYPE_INT, resolution.width(),
- "height", G_TYPE_INT, resolution.height(),
- NULL),
- NULL);
+ if (!resolution.isEmpty()) {
+ gst_caps_set_simple(
+ caps,
+ "width", G_TYPE_INT, resolution.width(),
+ "height", G_TYPE_INT, resolution.height(),
+ NULL);
+ }
+
+ if (frameRate > 0.0) {
+ gint numerator;
+ gint denominator;
+ gst_util_double_to_fraction(frameRate, &numerator, &denominator);
+
+ gst_caps_set_simple(
+ caps,
+ "framerate", GST_TYPE_FRACTION, numerator, denominator,
+ NULL);
}
return caps;
@@ -314,40 +289,40 @@ static GstCaps *resolutionToCaps(const QSize &resolution, const QPair<int, int>
void CameraBinSession::setupCaptureResolution()
{
QSize resolution = m_imageEncodeControl->imageSettings().resolution();
- if (!resolution.isEmpty()) {
+ {
GstCaps *caps = resolutionToCaps(resolution);
#if CAMERABIN_DEBUG
- qDebug() << Q_FUNC_INFO << "set image resolution" << resolution << gst_caps_to_string(caps);
+ qDebug() << Q_FUNC_INFO << "set image resolution" << resolution << caps;
#endif
g_object_set(m_camerabin, IMAGE_CAPTURE_CAPS_PROPERTY, caps, NULL);
- gst_caps_unref(caps);
- } else {
- g_object_set(m_camerabin, IMAGE_CAPTURE_CAPS_PROPERTY, NULL, NULL);
+ if (caps)
+ gst_caps_unref(caps);
}
+ const QSize viewfinderResolution = m_viewfinderSettingsControl->resolution();
resolution = m_videoEncodeControl->actualVideoSettings().resolution();
- //qreal framerate = m_videoEncodeControl->videoSettings().frameRate();
- if (!resolution.isEmpty()) {
- GstCaps *caps = resolutionToCaps(resolution /*, framerate*/); //convert to rational
+ qreal framerate = m_videoEncodeControl->videoSettings().frameRate();
+ {
+ GstCaps *caps = resolutionToCaps(
+ !resolution.isEmpty() ? resolution : viewfinderResolution, framerate);
#if CAMERABIN_DEBUG
- qDebug() << Q_FUNC_INFO << "set video resolution" << resolution << gst_caps_to_string(caps);
+ qDebug() << Q_FUNC_INFO << "set video resolution" << resolution << caps;
#endif
g_object_set(m_camerabin, VIDEO_CAPTURE_CAPS_PROPERTY, caps, NULL);
- gst_caps_unref(caps);
- } else {
- g_object_set(m_camerabin, VIDEO_CAPTURE_CAPS_PROPERTY, NULL, NULL);
+ if (caps)
+ gst_caps_unref(caps);
}
- resolution = m_viewfinderSettingsControl->resolution();
- if (!resolution.isEmpty()) {
+ if (!viewfinderResolution.isEmpty())
+ resolution = viewfinderResolution;
+ {
GstCaps *caps = resolutionToCaps(resolution);
#if CAMERABIN_DEBUG
- qDebug() << Q_FUNC_INFO << "set viewfinder resolution" << resolution << gst_caps_to_string(caps);
+ qDebug() << Q_FUNC_INFO << "set viewfinder resolution" << resolution << caps;
#endif
g_object_set(m_camerabin, VIEWFINDER_CAPS_PROPERTY, caps, NULL);
- gst_caps_unref(caps);
- } else {
- g_object_set(m_camerabin, VIEWFINDER_CAPS_PROPERTY, NULL, NULL);
+ if (caps)
+ gst_caps_unref(caps);
}
if (m_videoEncoder)
@@ -363,13 +338,17 @@ void CameraBinSession::setAudioCaptureCaps()
if (sampleRate == -1 && channelCount == -1)
return;
+#if GST_CHECK_VERSION(1,0,0)
+ GstStructure *structure = gst_structure_new_empty(QT_GSTREAMER_RAW_AUDIO_MIME);
+#else
GstStructure *structure = gst_structure_new(
- "audio/x-raw-int",
+ QT_GSTREAMER_RAW_AUDIO_MIME,
"endianness", G_TYPE_INT, 1234,
"signed", G_TYPE_BOOLEAN, TRUE,
"width", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
NULL);
+#endif
if (sampleRate != -1)
gst_structure_set(structure, "rate", G_TYPE_INT, sampleRate, NULL);
if (channelCount != -1)
@@ -760,7 +739,7 @@ qint64 CameraBinSession::duration() const
if (fileSink) {
GstFormat format = GST_FORMAT_TIME;
gint64 duration = 0;
- bool ret = gst_element_query_position(fileSink, &format, &duration);
+ bool ret = qt_gst_element_query_position(fileSink, format, &duration);
gst_object_unref(GST_OBJECT(fileSink));
if (ret)
return duration / 1000000;
@@ -795,129 +774,57 @@ void CameraBinSession::setMetaData(const QMap<QByteArray, QVariant> &data)
{
m_metaData = data;
- if (m_camerabin) {
- GstIterator *elements = gst_bin_iterate_all_by_interface(GST_BIN(m_camerabin), GST_TYPE_TAG_SETTER);
- GstElement *element = 0;
- while (gst_iterator_next(elements, (void**)&element) == GST_ITERATOR_OK) {
- gst_tag_setter_reset_tags(GST_TAG_SETTER(element));
-
- QMapIterator<QByteArray, QVariant> it(data);
- while (it.hasNext()) {
- it.next();
- const QString tagName = it.key();
- const QVariant tagValue = it.value();
-
- switch(tagValue.type()) {
- case QVariant::String:
- gst_tag_setter_add_tags(GST_TAG_SETTER(element),
- GST_TAG_MERGE_REPLACE,
- tagName.toUtf8().constData(),
- tagValue.toString().toUtf8().constData(),
- NULL);
- break;
- case QVariant::Int:
- case QVariant::LongLong:
- gst_tag_setter_add_tags(GST_TAG_SETTER(element),
- GST_TAG_MERGE_REPLACE,
- tagName.toUtf8().constData(),
- tagValue.toInt(),
- NULL);
- break;
- case QVariant::Double:
- gst_tag_setter_add_tags(GST_TAG_SETTER(element),
- GST_TAG_MERGE_REPLACE,
- tagName.toUtf8().constData(),
- tagValue.toDouble(),
- NULL);
- break;
- case QVariant::DateTime: {
- QDateTime date = tagValue.toDateTime().toLocalTime();
- gst_tag_setter_add_tags(GST_TAG_SETTER(element),
- GST_TAG_MERGE_REPLACE,
- tagName.toUtf8().constData(),
- gst_date_time_new_local_time(
- date.date().year(), date.date().month(), date.date().day(),
- date.time().hour(), date.time().minute(), date.time().second()),
- NULL);
- break;
- }
- default:
- break;
- }
- }
- }
- gst_iterator_free(elements);
- }
+ if (m_camerabin)
+ QGstUtils::setMetaData(m_camerabin, data);
}
bool CameraBinSession::processSyncMessage(const QGstreamerMessage &message)
{
GstMessage* gm = message.rawMessage();
- const GstStructure *st;
- const GValue *image;
- GstBuffer *buffer = NULL;
if (gm && GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) {
- if (m_captureMode == QCamera::CaptureStillImage &&
- gst_structure_has_name(gm->structure, "preview-image")) {
- st = gst_message_get_structure(gm);
-
- if (gst_structure_has_field_typed(st, "buffer", GST_TYPE_BUFFER)) {
- image = gst_structure_get_value(st, "buffer");
- if (image) {
- buffer = gst_value_get_buffer(image);
-
- QImage img;
-
- GstCaps *caps = gst_buffer_get_caps(buffer);
- if (caps) {
- GstStructure *structure = gst_caps_get_structure(caps, 0);
- gint width = 0;
- gint height = 0;
-#if CAMERABIN_DEBUG
- qDebug() << "Preview caps:" << gst_structure_to_string(structure);
+ const GstStructure *st = gst_message_get_structure(gm);
+ const GValue *sampleValue = 0;
+ if (m_captureMode == QCamera::CaptureStillImage
+ && gst_structure_has_name(st, "preview-image")
+#if GST_CHECK_VERSION(1,0,0)
+ && gst_structure_has_field_typed(st, "sample", GST_TYPE_SAMPLE)
+ && (sampleValue = gst_structure_get_value(st, "sample"))) {
+ GstSample * const sample = gst_value_get_sample(sampleValue);
+ GstCaps * const previewCaps = gst_sample_get_caps(sample);
+ GstBuffer * const buffer = gst_sample_get_buffer(sample);
+#else
+ && gst_structure_has_field_typed(st, "buffer", GST_TYPE_BUFFER)
+ && (sampleValue = gst_structure_get_value(st, "buffer"))) {
+ GstBuffer * const buffer = gst_value_get_buffer(sampleValue);
#endif
- if (structure &&
- gst_structure_get_int(structure, "width", &width) &&
- gst_structure_get_int(structure, "height", &height) &&
- width > 0 && height > 0) {
- if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) {
- QImage::Format format = QImage::Format_Invalid;
- int bpp = 0;
- gst_structure_get_int(structure, "bpp", &bpp);
-
- if (bpp == 24)
- format = QImage::Format_RGB888;
- else if (bpp == 32)
- format = QImage::Format_RGB32;
-
- if (format != QImage::Format_Invalid) {
- img = QImage((const uchar *)buffer->data, width, height, format);
- img.bits(); //detach
- }
- }
- }
- gst_caps_unref(caps);
-
- static QMetaMethod exposedSignal = QMetaMethod::fromSignal(&CameraBinSession::imageExposed);
- exposedSignal.invoke(this,
- Qt::QueuedConnection,
- Q_ARG(int,m_requestId));
-
- static QMetaMethod capturedSignal = QMetaMethod::fromSignal(&CameraBinSession::imageCaptured);
- capturedSignal.invoke(this,
- Qt::QueuedConnection,
- Q_ARG(int,m_requestId),
- Q_ARG(QImage,img));
- }
-
- }
- return true;
+ QImage image;
+#if GST_CHECK_VERSION(1,0,0)
+ GstVideoInfo previewInfo;
+ if (gst_video_info_from_caps(&previewInfo, previewCaps))
+ image = QGstUtils::bufferToImage(buffer, previewInfo);
+ gst_sample_unref(sample);
+#else
+ image = QGstUtils::bufferToImage(buffer);
+ gst_buffer_unref(buffer);
+#endif
+ if (!image.isNull()) {
+ static QMetaMethod exposedSignal = QMetaMethod::fromSignal(&CameraBinSession::imageExposed);
+ exposedSignal.invoke(this,
+ Qt::QueuedConnection,
+ Q_ARG(int,m_requestId));
+
+ static QMetaMethod capturedSignal = QMetaMethod::fromSignal(&CameraBinSession::imageCaptured);
+ capturedSignal.invoke(this,
+ Qt::QueuedConnection,
+ Q_ARG(int,m_requestId),
+ Q_ARG(QImage,image));
}
+ return true;
}
#ifdef HAVE_GST_PHOTOGRAPHY
- if (gst_structure_has_name(gm->structure, GST_PHOTOGRAPHY_AUTOFOCUS_DONE))
+ if (gst_structure_has_name(st, GST_PHOTOGRAPHY_AUTOFOCUS_DONE))
m_cameraFocusControl->handleFocusMessage(gm);
#endif
}
@@ -1109,20 +1016,12 @@ QList< QPair<int,int> > CameraBinSession::supportedFrameRates(const QSize &frame
if (frameSize.isEmpty()) {
caps = gst_caps_copy(supportedCaps);
} else {
- GstCaps *filter = gst_caps_new_full(
- gst_structure_new(
- "video/x-raw-rgb",
- "width" , G_TYPE_INT , frameSize.width(),
- "height" , G_TYPE_INT, frameSize.height(), NULL),
- gst_structure_new(
- "video/x-raw-yuv",
- "width" , G_TYPE_INT, frameSize.width(),
- "height" , G_TYPE_INT, frameSize.height(), NULL),
- gst_structure_new(
- "image/jpeg",
- "width" , G_TYPE_INT, frameSize.width(),
- "height" , G_TYPE_INT, frameSize.height(), NULL),
- NULL);
+ GstCaps *filter = QGstUtils::videoFilterCaps();
+ gst_caps_set_simple(
+ filter,
+ "width", G_TYPE_INT, frameSize.width(),
+ "height", G_TYPE_INT, frameSize.height(),
+ NULL);
caps = gst_caps_intersect(supportedCaps, filter);
gst_caps_unref(filter);
@@ -1133,7 +1032,7 @@ QList< QPair<int,int> > CameraBinSession::supportedFrameRates(const QSize &frame
caps = gst_caps_make_writable(caps);
for (uint i=0; i<gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i);
- gst_structure_set_name(structure, "video/x-raw-yuv");
+ gst_structure_set_name(structure, "video/x-raw");
const GValue *oldRate = gst_structure_get_value(structure, "framerate");
GValue rate;
memset(&rate, 0, sizeof(rate));
@@ -1142,8 +1041,11 @@ QList< QPair<int,int> > CameraBinSession::supportedFrameRates(const QSize &frame
gst_structure_remove_all_fields(structure);
gst_structure_set_value(structure, "framerate", &rate);
}
+#if GST_CHECK_VERSION(1,0,0)
+ caps = gst_caps_simplify(caps);
+#else
gst_caps_do_simplify(caps);
-
+#endif
for (uint i=0; i<gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i);
@@ -1154,7 +1056,7 @@ QList< QPair<int,int> > CameraBinSession::supportedFrameRates(const QSize &frame
qSort(res.begin(), res.end(), rateLessThan);
#if CAMERABIN_DEBUG
- qDebug() << "Supported rates:" << gst_caps_to_string(caps);
+ qDebug() << "Supported rates:" << caps;
qDebug() << res;
#endif
@@ -1213,31 +1115,24 @@ QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate,
SUPPORTED_IMAGE_CAPTURE_CAPS_PROPERTY : SUPPORTED_VIDEO_CAPTURE_CAPS_PROPERTY,
&supportedCaps, NULL);
- if (!supportedCaps)
- return res;
-
#if CAMERABIN_DEBUG
- qDebug() << "Source caps:" << gst_caps_to_string(supportedCaps);
+ qDebug() << "Source caps:" << supportedCaps;
#endif
+ if (!supportedCaps)
+ return res;
+
GstCaps *caps = 0;
bool isContinuous = false;
if (rate.first <= 0 || rate.second <= 0) {
caps = gst_caps_copy(supportedCaps);
} else {
- GstCaps *filter = gst_caps_new_full(
- gst_structure_new(
- "video/x-raw-rgb",
- "framerate" , GST_TYPE_FRACTION , rate.first, rate.second, NULL),
- gst_structure_new(
- "video/x-raw-yuv",
- "framerate" , GST_TYPE_FRACTION , rate.first, rate.second, NULL),
- gst_structure_new(
- "image/jpeg",
- "framerate" , GST_TYPE_FRACTION , rate.first, rate.second, NULL),
- NULL);
-
+ GstCaps *filter = QGstUtils::videoFilterCaps();
+ gst_caps_set_simple(
+ filter,
+ "framerate" , GST_TYPE_FRACTION , rate.first, rate.second,
+ NULL);
caps = gst_caps_intersect(supportedCaps, filter);
gst_caps_unref(filter);
}
@@ -1247,7 +1142,7 @@ QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate,
caps = gst_caps_make_writable(caps);
for (uint i=0; i<gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i);
- gst_structure_set_name(structure, "video/x-raw-yuv");
+ gst_structure_set_name(structure, "video/x-raw");
const GValue *oldW = gst_structure_get_value(structure, "width");
const GValue *oldH = gst_structure_get_value(structure, "height");
GValue w;
@@ -1262,7 +1157,13 @@ QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate,
gst_structure_set_value(structure, "width", &w);
gst_structure_set_value(structure, "height", &h);
}
+
+#if GST_CHECK_VERSION(1,0,0)
+ caps = gst_caps_simplify(caps);
+#else
gst_caps_do_simplify(caps);
+#endif
+
for (uint i=0; i<gst_caps_get_size(caps); i++) {
GstStructure *structure = gst_caps_get_structure(caps, i);
diff --git a/src/plugins/gstreamer/common.pri b/src/plugins/gstreamer/common.pri
index 8b421b8d1..eb6a29987 100644
--- a/src/plugins/gstreamer/common.pri
+++ b/src/plugins/gstreamer/common.pri
@@ -12,14 +12,18 @@ LIBS += -lqgsttools_p
CONFIG += link_pkgconfig
PKGCONFIG += \
- gstreamer-0.10 \
- gstreamer-base-0.10 \
- gstreamer-interfaces-0.10 \
- gstreamer-audio-0.10 \
- gstreamer-video-0.10 \
- gstreamer-pbutils-0.10
+ gstreamer-$$GST_VERSION \
+ gstreamer-base-$$GST_VERSION \
+ gstreamer-audio-$$GST_VERSION \
+ gstreamer-video-$$GST_VERSION \
+ gstreamer-pbutils-$$GST_VERSION
+
+maemo*:PKGCONFIG +=gstreamer-plugins-bad-$$GST_VERSION
+
+mir: {
+ DEFINES += HAVE_MIR
+}
-maemo*:PKGCONFIG +=gstreamer-plugins-bad-0.10
config_resourcepolicy {
DEFINES += HAVE_RESOURCE_POLICY
@@ -27,8 +31,8 @@ config_resourcepolicy {
}
config_gstreamer_appsrc {
- PKGCONFIG += gstreamer-app-0.10
+ PKGCONFIG += gstreamer-app-$$GST_VERSION
DEFINES += HAVE_GST_APPSRC
- LIBS += -lgstapp-0.10
+ LIBS += -lgstapp-$$GST_VERSION
}
diff --git a/src/plugins/gstreamer/gstreamer.pro b/src/plugins/gstreamer/gstreamer.pro
index 76490100d..0ff35101e 100644
--- a/src/plugins/gstreamer/gstreamer.pro
+++ b/src/plugins/gstreamer/gstreamer.pro
@@ -2,8 +2,8 @@ TEMPLATE = subdirs
SUBDIRS += \
audiodecoder \
- mediacapture \
- mediaplayer
+ mediaplayer \
+ mediacapture
config_gstreamer_encodingprofiles {
SUBDIRS += camerabin
diff --git a/src/plugins/gstreamer/mediacapture/mediacapturecamera.json b/src/plugins/gstreamer/mediacapture/mediacapturecamera.json
index af9f3575f..f5fba17e6 100644
--- a/src/plugins/gstreamer/mediacapture/mediacapturecamera.json
+++ b/src/plugins/gstreamer/mediacapture/mediacapturecamera.json
@@ -1,4 +1,4 @@
{
- "Keys": ["gstreamermediacapture"]
+ "Keys": ["gstreamermediacapture"],
"Services": ["org.qt-project.qt.audiosource", "org.qt-project.qt.camera"]
}
diff --git a/src/plugins/gstreamer/mediacapture/qgstreameraudioencode.cpp b/src/plugins/gstreamer/mediacapture/qgstreameraudioencode.cpp
index d05df4997..8881445f8 100644
--- a/src/plugins/gstreamer/mediacapture/qgstreameraudioencode.cpp
+++ b/src/plugins/gstreamer/mediacapture/qgstreameraudioencode.cpp
@@ -34,6 +34,7 @@
#include "qgstreameraudioencode.h"
#include "qgstreamercapturesession.h"
#include "qgstreamermediacontainercontrol.h"
+#include <private/qgstutils_p.h>
#include <QtCore/qdebug.h>
@@ -175,7 +176,7 @@ GstElement *QGstreamerAudioEncode::createEncoder()
if (m_audioSettings.sampleRate() > 0 || m_audioSettings.channelCount() > 0) {
GstCaps *caps = gst_caps_new_empty();
- GstStructure *structure = gst_structure_new("audio/x-raw-int", NULL);
+ GstStructure *structure = qt_gst_structure_new_empty(QT_GSTREAMER_RAW_AUDIO_MIME);
if (m_audioSettings.sampleRate() > 0)
gst_structure_set(structure, "rate", G_TYPE_INT, m_audioSettings.sampleRate(), NULL );
diff --git a/src/plugins/gstreamer/mediacapture/qgstreamercaptureservice.cpp b/src/plugins/gstreamer/mediacapture/qgstreamercaptureservice.cpp
index 97a165dca..1ab98cd4a 100644
--- a/src/plugins/gstreamer/mediacapture/qgstreamercaptureservice.cpp
+++ b/src/plugins/gstreamer/mediacapture/qgstreamercaptureservice.cpp
@@ -62,27 +62,25 @@
QT_BEGIN_NAMESPACE
-QGstreamerCaptureService::QGstreamerCaptureService(const QString &service, QObject *parent):
- QMediaService(parent)
-{
- m_captureSession = 0;
- m_cameraControl = 0;
- m_metaDataControl = 0;
-
+QGstreamerCaptureService::QGstreamerCaptureService(const QString &service, QObject *parent)
+ : QMediaService(parent)
+ , m_captureSession(0)
+ , m_cameraControl(0)
#if defined(USE_GSTREAMER_CAMERA)
- m_videoInput = 0;
+ , m_videoInput(0)
#endif
- m_audioInputSelector = 0;
- m_videoInputDevice = 0;
-
- m_videoOutput = 0;
- m_videoRenderer = 0;
- m_videoWindow = 0;
+ , m_metaDataControl(0)
+ , m_audioInputSelector(0)
+ , m_videoInputDevice(0)
+ , m_videoOutput(0)
+ , m_videoRenderer(0)
+ , m_videoWindow(0)
#if defined(HAVE_WIDGETS)
- m_videoWidgetControl = 0;
+ , m_videoWidgetControl(0)
#endif
- m_imageCaptureControl = 0;
-
+ , m_imageCaptureControl(0)
+ , m_audioProbeControl(0)
+{
if (service == Q_MEDIASERVICE_AUDIOSOURCE) {
m_captureSession = new QGstreamerCaptureSession(QGstreamerCaptureSession::Audio, this);
}
@@ -163,12 +161,12 @@ QMediaControl *QGstreamerCaptureService::requestControl(const char *name)
return m_imageCaptureControl;
if (qstrcmp(name,QMediaAudioProbeControl_iid) == 0) {
- if (m_captureSession) {
- QGstreamerAudioProbeControl *probe = new QGstreamerAudioProbeControl(this);
- m_captureSession->addProbe(probe);
- return probe;
+ if (!m_audioProbeControl) {
+ m_audioProbeControl = new QGstreamerAudioProbeControl(this);
+ m_captureSession->addProbe(m_audioProbeControl);
}
- return 0;
+ m_audioProbeControl->ref.ref();
+ return m_audioProbeControl;
}
if (!m_videoOutput) {
@@ -194,17 +192,15 @@ QMediaControl *QGstreamerCaptureService::requestControl(const char *name)
void QGstreamerCaptureService::releaseControl(QMediaControl *control)
{
- if (control && control == m_videoOutput) {
+ if (!control) {
+ return;
+ } else if (control == m_videoOutput) {
m_videoOutput = 0;
m_captureSession->setVideoPreview(0);
- }
-
- QGstreamerAudioProbeControl* audioProbe = qobject_cast<QGstreamerAudioProbeControl*>(control);
- if (audioProbe) {
- if (m_captureSession)
- m_captureSession->removeProbe(audioProbe);
- delete audioProbe;
- return;
+ } else if (control == m_audioProbeControl && !m_audioProbeControl->ref.deref()) {
+ m_captureSession->removeProbe(m_audioProbeControl);
+ delete m_audioProbeControl;
+ m_audioProbeControl = 0;
}
}
diff --git a/src/plugins/gstreamer/mediacapture/qgstreamercaptureservice.h b/src/plugins/gstreamer/mediacapture/qgstreamercaptureservice.h
index 7ff8ce253..e0cf4ee42 100644
--- a/src/plugins/gstreamer/mediacapture/qgstreamercaptureservice.h
+++ b/src/plugins/gstreamer/mediacapture/qgstreamercaptureservice.h
@@ -43,6 +43,7 @@ QT_BEGIN_NAMESPACE
class QAudioInputSelectorControl;
class QVideoDeviceSelectorControl;
+class QGstreamerAudioProbeControl;
class QGstreamerCaptureSession;
class QGstreamerCameraControl;
class QGstreamerMessage;
@@ -86,6 +87,8 @@ private:
QMediaControl *m_videoWidgetControl;
#endif
QGstreamerImageCaptureControl *m_imageCaptureControl;
+
+ QGstreamerAudioProbeControl *m_audioProbeControl;
};
QT_END_NAMESPACE
diff --git a/src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp b/src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp
index 0ac34ee72..85ed687d3 100644
--- a/src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp
+++ b/src/plugins/gstreamer/mediacapture/qgstreamercaptureserviceplugin.cpp
@@ -110,90 +110,16 @@ QMultimedia::SupportEstimate QGstreamerCaptureServicePlugin::hasSupport(const QS
return QGstUtils::hasSupport(mimeType, codecs, m_supportedMimeTypeSet);
}
+
+static bool isEncoderOrMuxer(GstElementFactory *factory)
+{
+ return gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_MUXER)
+ || gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_ENCODER);
+}
+
void QGstreamerCaptureServicePlugin::updateSupportedMimeTypes() const
{
- //enumerate supported mime types
- gst_init(NULL, NULL);
-
- GList *plugins, *orig_plugins;
- orig_plugins = plugins = gst_default_registry_get_plugin_list ();
-
- while (plugins) {
- GList *features, *orig_features;
-
- GstPlugin *plugin = (GstPlugin *) (plugins->data);
- plugins = g_list_next (plugins);
-
- if (plugin->flags & (1<<1)) //GST_PLUGIN_FLAG_BLACKLISTED
- continue;
-
- orig_features = features = gst_registry_get_feature_list_by_plugin(gst_registry_get_default (),
- plugin->desc.name);
- while (features) {
- if (!G_UNLIKELY(features->data == NULL)) {
- GstPluginFeature *feature = GST_PLUGIN_FEATURE(features->data);
- if (GST_IS_ELEMENT_FACTORY (feature)) {
- GstElementFactory *factory = GST_ELEMENT_FACTORY(gst_plugin_feature_load(feature));
- if (factory
- && factory->numpadtemplates > 0
- && (qstrcmp(factory->details.klass, "Codec/Decoder/Audio") == 0
- || qstrcmp(factory->details.klass, "Codec/Decoder/Video") == 0
- || qstrcmp(factory->details.klass, "Codec/Demux") == 0 )) {
- const GList *pads = factory->staticpadtemplates;
- while (pads) {
- GstStaticPadTemplate *padtemplate = (GstStaticPadTemplate*)(pads->data);
- pads = g_list_next (pads);
- if (padtemplate->direction != GST_PAD_SINK)
- continue;
- if (padtemplate->static_caps.string) {
- GstCaps *caps = gst_static_caps_get(&padtemplate->static_caps);
- if (!gst_caps_is_any (caps) && ! gst_caps_is_empty (caps)) {
- for (guint i = 0; i < gst_caps_get_size(caps); i++) {
- GstStructure *structure = gst_caps_get_structure(caps, i);
- QString nameLowcase = QString(gst_structure_get_name (structure)).toLower();
-
- m_supportedMimeTypeSet.insert(nameLowcase);
- if (nameLowcase.contains("mpeg")) {
- //Because mpeg version number is only included in the detail
- //description, it is necessary to manually extract this information
- //in order to match the mime type of mpeg4.
- const GValue *value = gst_structure_get_value(structure, "mpegversion");
- if (value) {
- gchar *str = gst_value_serialize (value);
- QString versions(str);
- QStringList elements = versions.split(QRegExp("\\D+"), QString::SkipEmptyParts);
- foreach (const QString &e, elements)
- m_supportedMimeTypeSet.insert(nameLowcase + e);
- g_free (str);
- }
- }
- }
- }
- gst_caps_unref(caps);
- }
- }
- gst_object_unref (factory);
- }
- } else if (GST_IS_TYPE_FIND_FACTORY(feature)) {
- QString name(gst_plugin_feature_get_name(feature));
- if (name.contains('/')) //filter out any string without '/' which is obviously not a mime type
- m_supportedMimeTypeSet.insert(name.toLower());
- }
- }
- features = g_list_next (features);
- }
- gst_plugin_feature_list_free (orig_features);
- }
- gst_plugin_list_free (orig_plugins);
-
-#if defined QT_SUPPORTEDMIMETYPES_DEBUG
- QStringList list = m_supportedMimeTypeSet.toList();
- list.sort();
- if (qgetenv("QT_DEBUG_PLUGINS").toInt() > 0) {
- foreach (const QString &type, list)
- qDebug() << type;
- }
-#endif
+ m_supportedMimeTypeSet = QGstUtils::supportedMimeTypes(isEncoderOrMuxer);
}
QStringList QGstreamerCaptureServicePlugin::supportedMimeTypes() const
diff --git a/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp b/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp
index a2bd80de8..af5b339e5 100644
--- a/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp
+++ b/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp
@@ -45,6 +45,7 @@
#include <gst/gsttagsetter.h>
#include <gst/gstversion.h>
+#include <gst/video/video.h>
#include <QtCore/qdebug.h>
#include <QtCore/qurl.h>
@@ -52,7 +53,6 @@
#include <QCoreApplication>
#include <QtCore/qmetaobject.h>
#include <QtCore/qfile.h>
-
#include <QtGui/qimage.h>
QT_BEGIN_NAMESPACE
@@ -64,7 +64,7 @@ QGstreamerCaptureSession::QGstreamerCaptureSession(QGstreamerCaptureSession::Cap
m_waitingForEos(false),
m_pipelineMode(EmptyPipeline),
m_captureMode(captureMode),
- m_audioBufferProbeId(-1),
+ m_audioProbe(0),
m_audioInputFactory(0),
m_audioPreviewFactory(0),
m_videoInputFactory(0),
@@ -169,7 +169,7 @@ GstElement *QGstreamerCaptureSession::buildEncodeBin()
if (m_captureMode & Video) {
GstElement *videoQueue = gst_element_factory_make("queue", "video-encode-queue");
- GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-encoder");
+ GstElement *colorspace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, "videoconvert-encoder");
GstElement *videoscale = gst_element_factory_make("videoscale","videoscale-encoder");
gst_bin_add_many(GST_BIN(encodeBin), videoQueue, colorspace, videoscale, NULL);
@@ -280,7 +280,7 @@ GstElement *QGstreamerCaptureSession::buildVideoPreview()
if (m_viewfinderInterface) {
GstElement *bin = gst_bin_new("video-preview-bin");
- GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-preview");
+ GstElement *colorspace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, "videoconvert-preview");
GstElement *capsFilter = gst_element_factory_make("capsfilter", "capsfilter-video-preview");
GstElement *preview = m_viewfinderInterface->videoSink();
@@ -299,36 +299,25 @@ GstElement *QGstreamerCaptureSession::buildVideoPreview()
resolution = m_imageEncodeControl->imageSettings().resolution();
}
- if (!resolution.isEmpty() || frameRate > 0.001) {
- GstCaps *caps = gst_caps_new_empty();
- QStringList structureTypes;
- structureTypes << "video/x-raw-yuv" << "video/x-raw-rgb";
-
- foreach(const QString &structureType, structureTypes) {
- GstStructure *structure = gst_structure_new(structureType.toLatin1().constData(), NULL);
-
- if (!resolution.isEmpty()) {
- gst_structure_set(structure, "width", G_TYPE_INT, resolution.width(), NULL);
- gst_structure_set(structure, "height", G_TYPE_INT, resolution.height(), NULL);
- }
-
- if (frameRate > 0.001) {
- QPair<int,int> rate = m_videoEncodeControl->rateAsRational();
+ GstCaps *caps = QGstUtils::videoFilterCaps();
- //qDebug() << "frame rate:" << num << denum;
+ if (!resolution.isEmpty()) {
+ gst_caps_set_simple(caps, "width", G_TYPE_INT, resolution.width(), NULL);
+ gst_caps_set_simple(caps, "height", G_TYPE_INT, resolution.height(), NULL);
+ }
+ if (frameRate > 0.001) {
+ QPair<int,int> rate = m_videoEncodeControl->rateAsRational();
- gst_structure_set(structure, "framerate", GST_TYPE_FRACTION, rate.first, rate.second, NULL);
- }
+ //qDebug() << "frame rate:" << num << denum;
- gst_caps_append_structure(caps,structure);
- }
+ gst_caps_set_simple(caps, "framerate", GST_TYPE_FRACTION, rate.first, rate.second, NULL);
+ }
- //qDebug() << "set video preview caps filter:" << gst_caps_to_string(caps);
+ //qDebug() << "set video preview caps filter:" << gst_caps_to_string(caps);
- g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL);
+ g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL);
- gst_caps_unref(caps);
- }
+ gst_caps_unref(caps);
// add ghostpads
GstPad *pad = gst_element_get_static_pad(colorspace, "sink");
@@ -342,7 +331,7 @@ GstElement *QGstreamerCaptureSession::buildVideoPreview()
previewElement = gst_element_factory_make("fakesink", "video-preview");
#else
GstElement *bin = gst_bin_new("video-preview-bin");
- GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-preview");
+ GstElement *colorspace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, "videoconvert-preview");
GstElement *preview = gst_element_factory_make("ximagesink", "video-preview");
gst_bin_add_many(GST_BIN(bin), colorspace, preview, NULL);
gst_element_link(colorspace,preview);
@@ -360,101 +349,49 @@ GstElement *QGstreamerCaptureSession::buildVideoPreview()
return previewElement;
}
-
-static gboolean passImageFilter(GstElement *element,
- GstBuffer *buffer,
- void *appdata)
+void QGstreamerCaptureSession::probeCaps(GstCaps *caps)
{
- Q_UNUSED(element);
- Q_UNUSED(buffer);
-
- QGstreamerCaptureSession *session = (QGstreamerCaptureSession *)appdata;
- if (session->m_passImage || session->m_passPrerollImage) {
- session->m_passImage = false;
-
- if (session->m_passPrerollImage) {
- session->m_passPrerollImage = false;
- return TRUE;
- }
- session->m_passPrerollImage = false;
-
- QImage img;
-
- GstCaps *caps = gst_buffer_get_caps(buffer);
- if (caps) {
- GstStructure *structure = gst_caps_get_structure (caps, 0);
- gint width = 0;
- gint height = 0;
-
- if (structure &&
- gst_structure_get_int(structure, "width", &width) &&
- gst_structure_get_int(structure, "height", &height) &&
- width > 0 && height > 0) {
- if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) {
- guint32 fourcc = 0;
- gst_structure_get_fourcc(structure, "format", &fourcc);
-
- if (fourcc == GST_MAKE_FOURCC('I','4','2','0')) {
- img = QImage(width/2, height/2, QImage::Format_RGB32);
-
- const uchar *data = (const uchar *)buffer->data;
+#if GST_CHECK_VERSION(1,0,0)
+ gst_video_info_from_caps(&m_previewInfo, caps);
+#else
+ Q_UNUSED(caps);
+#endif
+}
- for (int y=0; y<height; y+=2) {
- const uchar *yLine = data + y*width;
- const uchar *uLine = data + width*height + y*width/4;
- const uchar *vLine = data + width*height*5/4 + y*width/4;
+bool QGstreamerCaptureSession::probeBuffer(GstBuffer *buffer)
+{
+ if (m_passPrerollImage) {
+ m_passImage = false;
+ m_passPrerollImage = false;
- for (int x=0; x<width; x+=2) {
- const qreal Y = 1.164*(yLine[x]-16);
- const int U = uLine[x/2]-128;
- const int V = vLine[x/2]-128;
+ return true;
+ } else if (!m_passImage) {
+ return false;
+ }
- int b = qBound(0, int(Y + 2.018*U), 255);
- int g = qBound(0, int(Y - 0.813*V - 0.391*U), 255);
- int r = qBound(0, int(Y + 1.596*V), 255);
+ m_passImage = false;
- img.setPixel(x/2,y/2,qRgb(r,g,b));
- }
- }
- }
+#if GST_CHECK_VERSION(1,0,0)
+ QImage img = QGstUtils::bufferToImage(buffer, m_previewInfo);
+#else
+ QImage img = QGstUtils::bufferToImage(buffer);
+#endif
- } else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) {
- QImage::Format format = QImage::Format_Invalid;
- int bpp = 0;
- gst_structure_get_int(structure, "bpp", &bpp);
-
- if (bpp == 24)
- format = QImage::Format_RGB888;
- else if (bpp == 32)
- format = QImage::Format_RGB32;
-
- if (format != QImage::Format_Invalid) {
- img = QImage((const uchar *)buffer->data,
- width,
- height,
- format);
- img.bits(); //detach
- }
- }
- }
- gst_caps_unref(caps);
- }
+ if (img.isNull())
+ return true;
- static QMetaMethod exposedSignal = QMetaMethod::fromSignal(&QGstreamerCaptureSession::imageExposed);
- exposedSignal.invoke(session,
- Qt::QueuedConnection,
- Q_ARG(int,session->m_imageRequestId));
+ static QMetaMethod exposedSignal = QMetaMethod::fromSignal(&QGstreamerCaptureSession::imageExposed);
+ exposedSignal.invoke(this,
+ Qt::QueuedConnection,
+ Q_ARG(int,m_imageRequestId));
- static QMetaMethod capturedSignal = QMetaMethod::fromSignal(&QGstreamerCaptureSession::imageCaptured);
- capturedSignal.invoke(session,
- Qt::QueuedConnection,
- Q_ARG(int,session->m_imageRequestId),
- Q_ARG(QImage,img));
+ static QMetaMethod capturedSignal = QMetaMethod::fromSignal(&QGstreamerCaptureSession::imageCaptured);
+ capturedSignal.invoke(this,
+ Qt::QueuedConnection,
+ Q_ARG(int,m_imageRequestId),
+ Q_ARG(QImage,img));
- return TRUE;
- } else {
- return FALSE;
- }
+ return true;
}
static gboolean saveImageFilter(GstElement *element,
@@ -471,7 +408,15 @@ static gboolean saveImageFilter(GstElement *element,
if (!fileName.isEmpty()) {
QFile f(fileName);
if (f.open(QFile::WriteOnly)) {
- f.write((const char *)buffer->data, buffer->size);
+#if GST_CHECK_VERSION(1,0,0)
+ GstMapInfo info;
+ if (gst_buffer_map(buffer, &info, GST_MAP_READ)) {
+ f.write(reinterpret_cast<const char *>(info.data), info.size);
+ gst_buffer_unmap(buffer, &info);
+ }
+#else
+ f.write(reinterpret_cast<const char *>(buffer->data), buffer->size);
+#endif
f.close();
static QMetaMethod savedSignal = QMetaMethod::fromSignal(&QGstreamerCaptureSession::imageSaved);
@@ -489,18 +434,19 @@ GstElement *QGstreamerCaptureSession::buildImageCapture()
{
GstElement *bin = gst_bin_new("image-capture-bin");
GstElement *queue = gst_element_factory_make("queue", "queue-image-capture");
- GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-image-capture");
+ GstElement *colorspace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, "videoconvert-image-capture");
GstElement *encoder = gst_element_factory_make("jpegenc", "image-encoder");
GstElement *sink = gst_element_factory_make("fakesink","sink-image-capture");
GstPad *pad = gst_element_get_static_pad(queue, "src");
Q_ASSERT(pad);
- gst_pad_add_buffer_probe(pad, G_CALLBACK(passImageFilter), this);
+
+ addProbeToPad(pad, false);
+
gst_object_unref(GST_OBJECT(pad));
g_object_set(G_OBJECT(sink), "signal-handoffs", TRUE, NULL);
- g_signal_connect(G_OBJECT(sink), "handoff",
- G_CALLBACK(saveImageFilter), this);
+ g_signal_connect(G_OBJECT(sink), "handoff", G_CALLBACK(saveImageFilter), this);
gst_bin_add_many(GST_BIN(bin), queue, colorspace, encoder, sink, NULL);
gst_element_link_many(queue, colorspace, encoder, sink, NULL);
@@ -715,6 +661,8 @@ void QGstreamerCaptureSession::dumpGraph(const QString &fileName)
_gst_debug_bin_to_dot_file(GST_BIN(m_pipeline),
GstDebugGraphDetails(/*GST_DEBUG_GRAPH_SHOW_ALL |*/ GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES),
fileName.toLatin1());
+#else
+ Q_UNUSED(fileName);
#endif
}
@@ -877,10 +825,8 @@ void QGstreamerCaptureSession::setState(QGstreamerCaptureSession::State newState
qint64 QGstreamerCaptureSession::duration() const
{
- GstFormat format = GST_FORMAT_TIME;
- gint64 duration = 0;
-
- if ( m_encodeBin && gst_element_query_position(m_encodeBin, &format, &duration))
+ gint64 duration = 0;
+ if (m_encodeBin && qt_gst_element_query_position(m_encodeBin, GST_FORMAT_TIME, &duration))
return duration / 1000000;
else
return 0;
@@ -896,50 +842,8 @@ void QGstreamerCaptureSession::setMetaData(const QMap<QByteArray, QVariant> &dat
//qDebug() << "QGstreamerCaptureSession::setMetaData" << data;
m_metaData = data;
- if (m_encodeBin) {
- GstIterator *elements = gst_bin_iterate_all_by_interface(GST_BIN(m_encodeBin), GST_TYPE_TAG_SETTER);
- GstElement *element = 0;
- while (gst_iterator_next(elements, (void**)&element) == GST_ITERATOR_OK) {
- //qDebug() << "found element with tag setter interface:" << gst_element_get_name(element);
- QMapIterator<QByteArray, QVariant> it(data);
- while (it.hasNext()) {
- it.next();
- const QString tagName = it.key();
- const QVariant tagValue = it.value();
-
-
- switch(tagValue.type()) {
- case QVariant::String:
- gst_tag_setter_add_tags(GST_TAG_SETTER(element),
- GST_TAG_MERGE_REPLACE_ALL,
- tagName.toUtf8().constData(),
- tagValue.toString().toUtf8().constData(),
- NULL);
- break;
- case QVariant::Int:
- case QVariant::LongLong:
- gst_tag_setter_add_tags(GST_TAG_SETTER(element),
- GST_TAG_MERGE_REPLACE_ALL,
- tagName.toUtf8().constData(),
- tagValue.toInt(),
- NULL);
- break;
- case QVariant::Double:
- gst_tag_setter_add_tags(GST_TAG_SETTER(element),
- GST_TAG_MERGE_REPLACE_ALL,
- tagName.toUtf8().constData(),
- tagValue.toDouble(),
- NULL);
- break;
- default:
- break;
- }
-
- }
-
- }
- gst_iterator_free(elements);
- }
+ if (m_encodeBin)
+ QGstUtils::setMetaData(GST_BIN(m_encodeBin), data);
}
bool QGstreamerCaptureSession::processBusMessage(const QGstreamerMessage &message)
@@ -1058,34 +962,16 @@ void QGstreamerCaptureSession::setVolume(qreal volume)
void QGstreamerCaptureSession::addProbe(QGstreamerAudioProbeControl* probe)
{
- QMutexLocker locker(&m_audioProbeMutex);
-
- if (m_audioProbes.contains(probe))
- return;
-
- m_audioProbes.append(probe);
+ Q_ASSERT(!m_audioProbe);
+ m_audioProbe = probe;
+ addAudioBufferProbe();
}
void QGstreamerCaptureSession::removeProbe(QGstreamerAudioProbeControl* probe)
{
- QMutexLocker locker(&m_audioProbeMutex);
- m_audioProbes.removeOne(probe);
-}
-
-gboolean QGstreamerCaptureSession::padAudioBufferProbe(GstPad *pad, GstBuffer *buffer, gpointer user_data)
-{
- Q_UNUSED(pad);
-
- QGstreamerCaptureSession *session = reinterpret_cast<QGstreamerCaptureSession*>(user_data);
- QMutexLocker locker(&session->m_audioProbeMutex);
-
- if (session->m_audioProbes.isEmpty())
- return TRUE;
-
- foreach (QGstreamerAudioProbeControl* probe, session->m_audioProbes)
- probe->bufferProbed(buffer);
-
- return TRUE;
+ Q_ASSERT(m_audioProbe == probe);
+ removeAudioBufferProbe();
+ m_audioProbe = 0;
}
GstPad *QGstreamerCaptureSession::getAudioProbePad()
@@ -1114,26 +1000,25 @@ GstPad *QGstreamerCaptureSession::getAudioProbePad()
void QGstreamerCaptureSession::removeAudioBufferProbe()
{
- if (m_audioBufferProbeId == -1)
+ if (!m_audioProbe)
return;
GstPad *pad = getAudioProbePad();
if (pad) {
- gst_pad_remove_buffer_probe(pad, m_audioBufferProbeId);
- gst_object_unref(G_OBJECT(pad));
+ m_audioProbe->removeProbeFromPad(pad);
+ gst_object_unref(GST_OBJECT(pad));
}
-
- m_audioBufferProbeId = -1;
}
void QGstreamerCaptureSession::addAudioBufferProbe()
{
- Q_ASSERT(m_audioBufferProbeId == -1);
+ if (!m_audioProbe)
+ return;
GstPad *pad = getAudioProbePad();
if (pad) {
- m_audioBufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padAudioBufferProbe), this);
- gst_object_unref(G_OBJECT(pad));
+ m_audioProbe->addProbeToPad(pad);
+ gst_object_unref(GST_OBJECT(pad));
}
}
diff --git a/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.h b/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.h
index a759f22e5..ad26327e7 100644
--- a/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.h
+++ b/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.h
@@ -41,8 +41,10 @@
#include <QtCore/qurl.h>
#include <gst/gst.h>
+#include <gst/video/video.h>
#include <private/qgstreamerbushelper_p.h>
+#include <private/qgstreamerbufferprobe_p.h>
QT_BEGIN_NAMESPACE
@@ -70,7 +72,10 @@ public:
virtual QList<QSize> supportedResolutions(qreal frameRate = -1) const = 0;
};
-class QGstreamerCaptureSession : public QObject, public QGstreamerBusMessageFilter
+class QGstreamerCaptureSession
+ : public QObject
+ , public QGstreamerBusMessageFilter
+ , private QGstreamerBufferProbe
{
Q_OBJECT
Q_PROPERTY(qint64 duration READ duration NOTIFY durationChanged)
@@ -131,7 +136,6 @@ public:
void addProbe(QGstreamerAudioProbeControl* probe);
void removeProbe(QGstreamerAudioProbeControl* probe);
- static gboolean padAudioBufferProbe(GstPad *pad, GstBuffer *buffer, gpointer user_data);
signals:
void stateChanged(QGstreamerCaptureSession::State state);
@@ -156,6 +160,9 @@ public slots:
void setVolume(qreal volume);
private:
+ void probeCaps(GstCaps *caps);
+ bool probeBuffer(GstBuffer *buffer);
+
enum PipelineMode { EmptyPipeline, PreviewPipeline, RecordingPipeline, PreviewAndRecordingPipeline };
GstElement *buildEncodeBin();
@@ -180,9 +187,7 @@ private:
QGstreamerCaptureSession::CaptureMode m_captureMode;
QMap<QByteArray, QVariant> m_metaData;
- QList<QGstreamerAudioProbeControl*> m_audioProbes;
- QMutex m_audioProbeMutex;
- int m_audioBufferProbeId;
+ QGstreamerAudioProbeControl *m_audioProbe;
QGstreamerElementFactory *m_audioInputFactory;
QGstreamerElementFactory *m_audioPreviewFactory;
@@ -217,6 +222,10 @@ private:
GstElement *m_encodeBin;
+#if GST_CHECK_VERSION(1,0,0)
+ GstVideoInfo m_previewInfo;
+#endif
+
public:
bool m_passImage;
bool m_passPrerollImage;
diff --git a/src/plugins/gstreamer/mediacapture/qgstreamervideoencode.cpp b/src/plugins/gstreamer/mediacapture/qgstreamervideoencode.cpp
index 2f0d0ee76..81b85d7b0 100644
--- a/src/plugins/gstreamer/mediacapture/qgstreamervideoencode.cpp
+++ b/src/plugins/gstreamer/mediacapture/qgstreamervideoencode.cpp
@@ -34,7 +34,7 @@
#include "qgstreamervideoencode.h"
#include "qgstreamercapturesession.h"
#include "qgstreamermediacontainercontrol.h"
-
+#include <private/qgstutils_p.h>
#include <QtCore/qdebug.h>
#include <math.h>
@@ -147,7 +147,7 @@ GstElement *QGstreamerVideoEncode::createEncoder()
GstElement *capsFilter = gst_element_factory_make("capsfilter", "capsfilter-video");
gst_bin_add(encoderBin, capsFilter);
- GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", NULL);
+ GstElement *colorspace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, NULL);
gst_bin_add(encoderBin, colorspace);
gst_bin_add(encoderBin, encoderElement);
@@ -252,27 +252,22 @@ GstElement *QGstreamerVideoEncode::createEncoder()
}
if (!m_videoSettings.resolution().isEmpty() || m_videoSettings.frameRate() > 0.001) {
- GstCaps *caps = gst_caps_new_empty();
- QStringList structureTypes;
- structureTypes << "video/x-raw-yuv" << "video/x-raw-rgb";
-
- foreach(const QString &structureType, structureTypes) {
- GstStructure *structure = gst_structure_new(structureType.toLatin1().constData(), NULL);
-
- if (!m_videoSettings.resolution().isEmpty()) {
- gst_structure_set(structure, "width", G_TYPE_INT, m_videoSettings.resolution().width(), NULL);
- gst_structure_set(structure, "height", G_TYPE_INT, m_videoSettings.resolution().height(), NULL);
- }
-
- if (m_videoSettings.frameRate() > 0.001) {
- QPair<int,int> rate = rateAsRational();
-
- //qDebug() << "frame rate:" << num << denum;
-
- gst_structure_set(structure, "framerate", GST_TYPE_FRACTION, rate.first, rate.second, NULL);
- }
+ GstCaps *caps = QGstUtils::videoFilterCaps();
+
+ if (!m_videoSettings.resolution().isEmpty()) {
+ gst_caps_set_simple(
+ caps,
+ "width", G_TYPE_INT, m_videoSettings.resolution().width(),
+ "height", G_TYPE_INT, m_videoSettings.resolution().height(),
+ NULL);
+ }
- gst_caps_append_structure(caps,structure);
+ if (m_videoSettings.frameRate() > 0.001) {
+ QPair<int,int> rate = rateAsRational();
+ gst_caps_set_simple(
+ caps,
+ "framerate", GST_TYPE_FRACTION, rate.first, rate.second,
+ NULL);
}
//qDebug() << "set video caps filter:" << gst_caps_to_string(caps);
diff --git a/src/plugins/gstreamer/mediaplayer/mediaplayer.pro b/src/plugins/gstreamer/mediaplayer/mediaplayer.pro
index 2ca9377db..b986fc787 100644
--- a/src/plugins/gstreamer/mediaplayer/mediaplayer.pro
+++ b/src/plugins/gstreamer/mediaplayer/mediaplayer.pro
@@ -28,4 +28,3 @@ SOURCES += \
OTHER_FILES += \
mediaplayer.json
-
diff --git a/src/plugins/gstreamer/mediaplayer/qgstreamerplayercontrol.cpp b/src/plugins/gstreamer/mediaplayer/qgstreamerplayercontrol.cpp
index fed756ac9..c1fb64acd 100644
--- a/src/plugins/gstreamer/mediaplayer/qgstreamerplayercontrol.cpp
+++ b/src/plugins/gstreamer/mediaplayer/qgstreamerplayercontrol.cpp
@@ -425,7 +425,6 @@ void QGstreamerPlayerControl::setMedia(const QMediaContent &content, QIODevice *
m_session->loadFromUri(request);
#endif
-
#if defined(HAVE_GST_APPSRC)
if (!request.url().isEmpty() || userStreamValid) {
#else
diff --git a/src/plugins/gstreamer/mediaplayer/qgstreamerplayerservice.cpp b/src/plugins/gstreamer/mediaplayer/qgstreamerplayerservice.cpp
index ce267d737..84805b67a 100644
--- a/src/plugins/gstreamer/mediaplayer/qgstreamerplayerservice.cpp
+++ b/src/plugins/gstreamer/mediaplayer/qgstreamerplayerservice.cpp
@@ -51,7 +51,11 @@
#include <private/qgstreamervideorenderer_p.h>
#if defined(Q_WS_MAEMO_6) && defined(__arm__)
-#include "qgstreamergltexturerenderer.h"
+#include "private/qgstreamergltexturerenderer.h"
+#endif
+
+#if defined(HAVE_MIR) && defined (__arm__)
+#include "private/qgstreamermirtexturerenderer_p.h"
#endif
#include "qgstreamerstreamscontrol.h"
@@ -66,6 +70,8 @@ QT_BEGIN_NAMESPACE
QGstreamerPlayerService::QGstreamerPlayerService(QObject *parent):
QMediaService(parent)
+ , m_audioProbeControl(0)
+ , m_videoProbeControl(0)
, m_videoOutput(0)
, m_videoRenderer(0)
, m_videoWindow(0)
@@ -82,6 +88,8 @@ QGstreamerPlayerService::QGstreamerPlayerService(QObject *parent):
#if defined(Q_WS_MAEMO_6) && defined(__arm__)
m_videoRenderer = new QGstreamerGLTextureRenderer(this);
+#elif defined(HAVE_MIR) && defined (__arm__)
+ m_videoRenderer = new QGstreamerMirTextureRenderer(this, m_session);
#else
m_videoRenderer = new QGstreamerVideoRenderer(this);
#endif
@@ -115,23 +123,23 @@ QMediaControl *QGstreamerPlayerService::requestControl(const char *name)
if (qstrcmp(name, QMediaAvailabilityControl_iid) == 0)
return m_availabilityControl;
- if (qstrcmp(name,QMediaVideoProbeControl_iid) == 0) {
- if (m_session) {
- QGstreamerVideoProbeControl *probe = new QGstreamerVideoProbeControl(this);
+ if (qstrcmp(name, QMediaVideoProbeControl_iid) == 0) {
+ if (!m_videoProbeControl) {
increaseVideoRef();
- m_session->addProbe(probe);
- return probe;
+ m_videoProbeControl = new QGstreamerVideoProbeControl(this);
+ m_session->addProbe(m_videoProbeControl);
}
- return 0;
+ m_videoProbeControl->ref.ref();
+ return m_videoProbeControl;
}
- if (qstrcmp(name,QMediaAudioProbeControl_iid) == 0) {
- if (m_session) {
- QGstreamerAudioProbeControl *probe = new QGstreamerAudioProbeControl(this);
- m_session->addProbe(probe);
- return probe;
+ if (qstrcmp(name, QMediaAudioProbeControl_iid) == 0) {
+ if (!m_audioProbeControl) {
+ m_audioProbeControl = new QGstreamerAudioProbeControl(this);
+ m_session->addProbe(m_audioProbeControl);
}
- return 0;
+ m_audioProbeControl->ref.ref();
+ return m_audioProbeControl;
}
if (!m_videoOutput) {
@@ -156,28 +164,21 @@ QMediaControl *QGstreamerPlayerService::requestControl(const char *name)
void QGstreamerPlayerService::releaseControl(QMediaControl *control)
{
- if (control == m_videoOutput) {
+ if (!control) {
+ return;
+ } else if (control == m_videoOutput) {
m_videoOutput = 0;
m_control->setVideoOutput(0);
decreaseVideoRef();
- }
-
- QGstreamerVideoProbeControl* videoProbe = qobject_cast<QGstreamerVideoProbeControl*>(control);
- if (videoProbe) {
- if (m_session) {
- m_session->removeProbe(videoProbe);
- decreaseVideoRef();
- }
- delete videoProbe;
- return;
- }
-
- QGstreamerAudioProbeControl* audioProbe = qobject_cast<QGstreamerAudioProbeControl*>(control);
- if (audioProbe) {
- if (m_session)
- m_session->removeProbe(audioProbe);
- delete audioProbe;
- return;
+ } else if (control == m_videoProbeControl && !m_videoProbeControl->ref.deref()) {
+ m_session->removeProbe(m_videoProbeControl);
+ delete m_videoProbeControl;
+ m_videoProbeControl = 0;
+ decreaseVideoRef();
+ } else if (control == m_audioProbeControl && !m_audioProbeControl->ref.deref()) {
+ m_session->removeProbe(m_audioProbeControl);
+ delete m_audioProbeControl;
+ m_audioProbeControl = 0;
}
}
diff --git a/src/plugins/gstreamer/mediaplayer/qgstreamerplayerservice.h b/src/plugins/gstreamer/mediaplayer/qgstreamerplayerservice.h
index f3081e986..22be262ab 100644
--- a/src/plugins/gstreamer/mediaplayer/qgstreamerplayerservice.h
+++ b/src/plugins/gstreamer/mediaplayer/qgstreamerplayerservice.h
@@ -52,6 +52,8 @@ class QGstreamerStreamsControl;
class QGstreamerVideoRenderer;
class QGstreamerVideoWidgetControl;
class QGStreamerAvailabilityControl;
+class QGstreamerAudioProbeControl;
+class QGstreamerVideoProbeControl;
class QGstreamerPlayerService : public QMediaService
{
@@ -70,6 +72,9 @@ private:
QGstreamerStreamsControl *m_streamsControl;
QGStreamerAvailabilityControl *m_availabilityControl;
+ QGstreamerAudioProbeControl *m_audioProbeControl;
+ QGstreamerVideoProbeControl *m_videoProbeControl;
+
QMediaControl *m_videoOutput;
QMediaControl *m_videoRenderer;
QMediaControl *m_videoWindow;
diff --git a/src/plugins/gstreamer/mediaplayer/qgstreamerplayerserviceplugin.cpp b/src/plugins/gstreamer/mediaplayer/qgstreamerplayerserviceplugin.cpp
index 7d20b6d92..f1fd4211c 100644
--- a/src/plugins/gstreamer/mediaplayer/qgstreamerplayerserviceplugin.cpp
+++ b/src/plugins/gstreamer/mediaplayer/qgstreamerplayerserviceplugin.cpp
@@ -81,89 +81,15 @@ QMultimedia::SupportEstimate QGstreamerPlayerServicePlugin::hasSupport(const QSt
return QGstUtils::hasSupport(mimeType, codecs, m_supportedMimeTypeSet);
}
-void QGstreamerPlayerServicePlugin::updateSupportedMimeTypes() const
+static bool isDecoderOrDemuxer(GstElementFactory *factory)
{
- //enumerate supported mime types
- gst_init(NULL, NULL);
-
- GList *plugins, *orig_plugins;
- orig_plugins = plugins = gst_default_registry_get_plugin_list ();
-
- while (plugins) {
- GList *features, *orig_features;
-
- GstPlugin *plugin = (GstPlugin *) (plugins->data);
- plugins = g_list_next (plugins);
-
- if (plugin->flags & (1<<1)) //GST_PLUGIN_FLAG_BLACKLISTED
- continue;
-
- orig_features = features = gst_registry_get_feature_list_by_plugin(gst_registry_get_default (),
- plugin->desc.name);
- while (features) {
- if (!G_UNLIKELY(features->data == NULL)) {
- GstPluginFeature *feature = GST_PLUGIN_FEATURE(features->data);
- if (GST_IS_ELEMENT_FACTORY (feature)) {
- GstElementFactory *factory = GST_ELEMENT_FACTORY(gst_plugin_feature_load(feature));
- if (factory
- && factory->numpadtemplates > 0
- && (qstrcmp(factory->details.klass, "Codec/Decoder/Audio") == 0
- || qstrcmp(factory->details.klass, "Codec/Decoder/Video") == 0
- || qstrcmp(factory->details.klass, "Codec/Demux") == 0 )) {
- const GList *pads = factory->staticpadtemplates;
- while (pads) {
- GstStaticPadTemplate *padtemplate = (GstStaticPadTemplate*)(pads->data);
- pads = g_list_next (pads);
- if (padtemplate->direction != GST_PAD_SINK)
- continue;
- if (padtemplate->static_caps.string) {
- GstCaps *caps = gst_static_caps_get(&padtemplate->static_caps);
- if (!gst_caps_is_any (caps) && ! gst_caps_is_empty (caps)) {
- for (guint i = 0; i < gst_caps_get_size(caps); i++) {
- GstStructure *structure = gst_caps_get_structure(caps, i);
- QString nameLowcase = QString(gst_structure_get_name (structure)).toLower();
-
- m_supportedMimeTypeSet.insert(nameLowcase);
- if (nameLowcase.contains("mpeg")) {
- //Because mpeg version number is only included in the detail
- //description, it is necessary to manually extract this information
- //in order to match the mime type of mpeg4.
- const GValue *value = gst_structure_get_value(structure, "mpegversion");
- if (value) {
- gchar *str = gst_value_serialize (value);
- QString versions(str);
- QStringList elements = versions.split(QRegExp("\\D+"), QString::SkipEmptyParts);
- foreach (const QString &e, elements)
- m_supportedMimeTypeSet.insert(nameLowcase + e);
- g_free (str);
- }
- }
- }
- }
- }
- }
- gst_object_unref (factory);
- }
- } else if (GST_IS_TYPE_FIND_FACTORY(feature)) {
- QString name(gst_plugin_feature_get_name(feature));
- if (name.contains('/')) //filter out any string without '/' which is obviously not a mime type
- m_supportedMimeTypeSet.insert(name.toLower());
- }
- }
- features = g_list_next (features);
- }
- gst_plugin_feature_list_free (orig_features);
- }
- gst_plugin_list_free (orig_plugins);
+ return gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_DEMUXER)
+ || gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_DECODER);
+}
-#if defined QT_SUPPORTEDMIMETYPES_DEBUG
- QStringList list = m_supportedMimeTypeSet.toList();
- list.sort();
- if (qgetenv("QT_DEBUG_PLUGINS").toInt() > 0) {
- foreach (const QString &type, list)
- qDebug() << type;
- }
-#endif
+void QGstreamerPlayerServicePlugin::updateSupportedMimeTypes() const
+{
+ m_supportedMimeTypeSet = QGstUtils::supportedMimeTypes(isDecoderOrDemuxer);
}
QStringList QGstreamerPlayerServicePlugin::supportedMimeTypes() const
diff --git a/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.cpp b/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.cpp
index 15924a68b..b5c354dff 100644
--- a/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.cpp
+++ b/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.cpp
@@ -37,7 +37,9 @@
#include <private/qgstreameraudioprobecontrol_p.h>
#include <private/qgstreamervideoprobecontrol_p.h>
#include <private/qgstreamervideorendererinterface_p.h>
+#if !GST_CHECK_VERSION(1,0,0)
#include <private/gstvideoconnector_p.h>
+#endif
#include <private/qgstutils_p.h>
#include <private/playlistfileparser_p.h>
#include <private/qgstutils_p.h>
@@ -85,6 +87,7 @@ typedef enum {
GST_PLAY_FLAG_BUFFERING = 0x000000100
} GstPlayFlags;
+#if !GST_CHECK_VERSION(1,0,0)
#define DEFAULT_RAW_CAPS \
"video/x-raw-yuv; " \
"video/x-raw-rgb; " \
@@ -97,7 +100,9 @@ typedef enum {
"text/x-pango-markup; " \
"video/x-dvd-subpicture; " \
"subpicture/x-pgs"
+
static GstStaticCaps static_RawCaps = GST_STATIC_CAPS(DEFAULT_RAW_CAPS);
+#endif
QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
:QObject(parent),
@@ -105,7 +110,9 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
m_pendingState(QMediaPlayer::StoppedState),
m_busHelper(0),
m_playbin(0),
+#if !GST_CHECK_VERSION(1,0,0)
m_usingColorspaceElement(false),
+#endif
m_videoSink(0),
m_pendingVideoSink(0),
m_nullVideoSink(0),
@@ -117,8 +124,8 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
#if defined(HAVE_GST_APPSRC)
m_appSrc(0),
#endif
- m_videoBufferProbeId(-1),
- m_audioBufferProbeId(-1),
+ m_videoProbe(0),
+ m_audioProbe(0),
m_volume(100),
m_playbackRate(1.0),
m_muted(false),
@@ -138,8 +145,7 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
Q_ASSERT(result == TRUE);
Q_UNUSED(result);
- m_playbin = gst_element_factory_make("playbin2", NULL);
-
+ m_playbin = gst_element_factory_make(QT_GSTREAMER_PLAYBIN_ELEMENT_NAME, NULL);
if (m_playbin) {
//GST_PLAY_FLAG_NATIVE_VIDEO omits configuration of ffmpegcolorspace and videoscale,
//since those elements are included in the video output bin when necessary.
@@ -147,13 +153,14 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
int flags = GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO |
GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_NATIVE_AUDIO;
#else
- int flags = 0;
- g_object_get(G_OBJECT(m_playbin), "flags", &flags, NULL);
+ int flags = GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO;
QByteArray envFlags = qgetenv("QT_GSTREAMER_PLAYBIN_FLAGS");
if (!envFlags.isEmpty()) {
flags |= envFlags.toInt();
+#if !GST_CHECK_VERSION(1,0,0)
} else {
flags |= GST_PLAY_FLAG_NATIVE_VIDEO;
+#endif
}
#endif
g_object_set(G_OBJECT(m_playbin), "flags", flags, NULL);
@@ -185,12 +192,16 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
}
}
+#if GST_CHECK_VERSION(1,0,0)
+ m_videoIdentity = gst_element_factory_make("identity", NULL); // floating ref
+#else
m_videoIdentity = GST_ELEMENT(g_object_new(gst_video_connector_get_type(), 0)); // floating ref
g_signal_connect(G_OBJECT(m_videoIdentity), "connection-failed", G_CALLBACK(insertColorSpaceElement), (gpointer)this);
+ m_colorSpace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, "ffmpegcolorspace-vo");
- m_colorSpace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-vo");
// might not get a parent, take ownership to avoid leak
qt_gst_object_ref_sink(GST_OBJECT(m_colorSpace));
+#endif
m_nullVideoSink = gst_element_factory_make("fakesink", NULL);
g_object_set(G_OBJECT(m_nullVideoSink), "sync", true, NULL);
@@ -206,7 +217,7 @@ QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
// add ghostpads
GstPad *pad = gst_element_get_static_pad(m_videoIdentity,"sink");
- gst_element_add_pad(GST_ELEMENT(m_videoOutputBin), gst_ghost_pad_new("videosink", pad));
+ gst_element_add_pad(GST_ELEMENT(m_videoOutputBin), gst_ghost_pad_new("sink", pad));
gst_object_unref(GST_OBJECT(pad));
if (m_playbin != 0) {
@@ -244,7 +255,9 @@ QGstreamerPlayerSession::~QGstreamerPlayerSession()
delete m_busHelper;
gst_object_unref(GST_OBJECT(m_bus));
gst_object_unref(GST_OBJECT(m_playbin));
+#if !GST_CHECK_VERSION(1,0,0)
gst_object_unref(GST_OBJECT(m_colorSpace));
+#endif
gst_object_unref(GST_OBJECT(m_nullVideoSink));
gst_object_unref(GST_OBJECT(m_videoOutputBin));
}
@@ -339,12 +352,10 @@ qint64 QGstreamerPlayerSession::duration() const
qint64 QGstreamerPlayerSession::position() const
{
- GstFormat format = GST_FORMAT_TIME;
gint64 position = 0;
- if ( m_playbin && gst_element_query_position(m_playbin, &format, &position))
+ if (m_playbin && qt_gst_element_query_position(m_playbin, GST_FORMAT_TIME, &position))
m_lastPosition = position / 1000000;
-
return m_lastPosition;
}
@@ -474,17 +485,26 @@ bool QGstreamerPlayerSession::isAudioAvailable() const
return m_audioAvailable;
}
+#if GST_CHECK_VERSION(1,0,0)
+static GstPadProbeReturn block_pad_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
+#else
static void block_pad_cb(GstPad *pad, gboolean blocked, gpointer user_data)
+#endif
{
Q_UNUSED(pad);
+#if GST_CHECK_VERSION(1,0,0)
+ Q_UNUSED(info);
+ Q_UNUSED(user_data);
+ return GST_PAD_PROBE_OK;
+#else
#ifdef DEBUG_PLAYBIN
qDebug() << "block_pad_cb, blocked:" << blocked;
#endif
-
if (blocked && user_data) {
QGstreamerPlayerSession *session = reinterpret_cast<QGstreamerPlayerSession*>(user_data);
QMetaObject::invokeMethod(session, "finishVideoOutputChange", Qt::QueuedConnection);
}
+#endif
}
void QGstreamerPlayerSession::updateVideoRenderer()
@@ -529,7 +549,7 @@ void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
m_renderer = renderer;
#ifdef DEBUG_VO_BIN_DUMP
- _gst_debug_bin_to_dot_file_with_ts(GST_BIN(m_playbin),
+ gst_debug_bin_to_dot_file_with_ts(GST_BIN(m_playbin),
GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL /* GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES*/),
"playbin_set");
#endif
@@ -570,12 +590,14 @@ void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
gst_element_set_state(m_videoSink, GST_STATE_NULL);
gst_element_set_state(m_playbin, GST_STATE_NULL);
+#if !GST_CHECK_VERSION(1,0,0)
if (m_usingColorspaceElement) {
gst_element_unlink(m_colorSpace, m_videoSink);
gst_bin_remove(GST_BIN(m_videoOutputBin), m_colorSpace);
} else {
gst_element_unlink(m_videoIdentity, m_videoSink);
}
+#endif
removeVideoBufferProbe();
@@ -585,8 +607,9 @@ void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
gst_bin_add(GST_BIN(m_videoOutputBin), m_videoSink);
- m_usingColorspaceElement = false;
bool linked = gst_element_link(m_videoIdentity, m_videoSink);
+#if !GST_CHECK_VERSION(1,0,0)
+ m_usingColorspaceElement = false;
if (!linked) {
m_usingColorspaceElement = true;
#ifdef DEBUG_PLAYBIN
@@ -595,6 +618,10 @@ void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
gst_bin_add(GST_BIN(m_videoOutputBin), m_colorSpace);
linked = gst_element_link_many(m_videoIdentity, m_colorSpace, m_videoSink, NULL);
}
+#endif
+
+ if (!linked)
+ qWarning() << "Linking video output element failed";
if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), "show-preroll-frame") != 0) {
gboolean value = m_displayPrerolledFrame;
@@ -633,7 +660,11 @@ void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
//block pads, async to avoid locking in paused state
GstPad *srcPad = gst_element_get_static_pad(m_videoIdentity, "src");
+#if GST_CHECK_VERSION(1,0,0)
+ this->pad_probe_id = gst_pad_add_probe(srcPad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER | GST_PAD_PROBE_TYPE_BLOCKING), block_pad_cb, this, NULL);
+#else
gst_pad_set_blocked_async(srcPad, true, &block_pad_cb, this);
+#endif
gst_object_unref(GST_OBJECT(srcPad));
//Unpause the sink to avoid waiting until the buffer is processed
@@ -671,16 +702,22 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
}
if (m_pendingVideoSink == m_videoSink) {
+ qDebug() << "Abort, no change";
//video output was change back to the current one,
//no need to torment the pipeline, just unblock the pad
if (gst_pad_is_blocked(srcPad))
+#if GST_CHECK_VERSION(1,0,0)
+ gst_pad_remove_probe(srcPad, this->pad_probe_id);
+#else
gst_pad_set_blocked_async(srcPad, false, &block_pad_cb, 0);
+#endif
m_pendingVideoSink = 0;
gst_object_unref(GST_OBJECT(srcPad));
return;
}
+#if !GST_CHECK_VERSION(1,0,0)
if (m_usingColorspaceElement) {
gst_element_set_state(m_colorSpace, GST_STATE_NULL);
gst_element_set_state(m_videoSink, GST_STATE_NULL);
@@ -688,6 +725,9 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
gst_element_unlink(m_colorSpace, m_videoSink);
gst_bin_remove(GST_BIN(m_videoOutputBin), m_colorSpace);
} else {
+#else
+ {
+#endif
gst_element_set_state(m_videoSink, GST_STATE_NULL);
gst_element_unlink(m_videoIdentity, m_videoSink);
}
@@ -703,8 +743,9 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
addVideoBufferProbe();
- m_usingColorspaceElement = false;
bool linked = gst_element_link(m_videoIdentity, m_videoSink);
+#if !GST_CHECK_VERSION(1,0,0)
+ m_usingColorspaceElement = false;
if (!linked) {
m_usingColorspaceElement = true;
#ifdef DEBUG_PLAYBIN
@@ -713,6 +754,7 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
gst_bin_add(GST_BIN(m_videoOutputBin), m_colorSpace);
linked = gst_element_link_many(m_videoIdentity, m_colorSpace, m_videoSink, NULL);
}
+#endif
if (!linked)
qWarning() << "Linking video output element failed";
@@ -720,6 +762,8 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
#ifdef DEBUG_PLAYBIN
qDebug() << "notify the video connector it has to emit a new segment message...";
#endif
+
+#if !GST_CHECK_VERSION(1,0,0)
//it's necessary to send a new segment event just before
//the first buffer pushed to the new sink
g_signal_emit_by_name(m_videoIdentity,
@@ -727,7 +771,7 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
true //emit connection-failed signal
//to have a chance to insert colorspace element
);
-
+#endif
GstState state = GST_STATE_VOID_PENDING;
@@ -743,8 +787,10 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
break;
}
+#if !GST_CHECK_VERSION(1,0,0)
if (m_usingColorspaceElement)
gst_element_set_state(m_colorSpace, state);
+#endif
gst_element_set_state(m_videoSink, state);
@@ -760,16 +806,23 @@ void QGstreamerPlayerSession::finishVideoOutputChange()
//don't have to wait here, it will unblock eventually
if (gst_pad_is_blocked(srcPad))
- gst_pad_set_blocked_async(srcPad, false, &block_pad_cb, 0);
+#if GST_CHECK_VERSION(1,0,0)
+ gst_pad_remove_probe(srcPad, this->pad_probe_id);
+#else
+ gst_pad_set_blocked_async(srcPad, false, &block_pad_cb, 0);
+#endif
+
gst_object_unref(GST_OBJECT(srcPad));
#ifdef DEBUG_VO_BIN_DUMP
- _gst_debug_bin_to_dot_file_with_ts(GST_BIN(m_playbin),
- GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL /* GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES*/),
+ gst_debug_bin_to_dot_file_with_ts(GST_BIN(m_playbin),
+ GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL /* | GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES */),
"playbin_finish");
#endif
}
+#if !GST_CHECK_VERSION(1,0,0)
+
void QGstreamerPlayerSession::insertColorSpaceElement(GstElement *element, gpointer data)
{
#ifdef DEBUG_PLAYBIN
@@ -814,6 +867,7 @@ void QGstreamerPlayerSession::insertColorSpaceElement(GstElement *element, gpoin
gst_element_set_state(session->m_colorSpace, state);
}
+#endif
bool QGstreamerPlayerSession::isVideoAvailable() const
{
@@ -830,6 +884,7 @@ bool QGstreamerPlayerSession::play()
#ifdef DEBUG_PLAYBIN
qDebug() << Q_FUNC_INFO;
#endif
+
m_everPlayed = false;
if (m_playbin) {
m_pendingState = QMediaPlayer::PlayingState;
@@ -1161,21 +1216,20 @@ bool QGstreamerPlayerSession::processBusMessage(const QGstreamerMessage &message
case GST_MESSAGE_SEGMENT_DONE:
break;
case GST_MESSAGE_LATENCY:
-#if (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 13)
+#if GST_CHECK_VERSION(0,10,13)
case GST_MESSAGE_ASYNC_START:
break;
case GST_MESSAGE_ASYNC_DONE:
{
- GstFormat format = GST_FORMAT_TIME;
gint64 position = 0;
- if (gst_element_query_position(m_playbin, &format, &position)) {
+ if (qt_gst_element_query_position(m_playbin, GST_FORMAT_TIME, &position)) {
position /= 1000000;
m_lastPosition = position;
emit positionChanged(position);
}
break;
}
-#if GST_VERSION_MICRO >= 23
+#if GST_CHECK_VERSION(0,10,23)
case GST_MESSAGE_REQUEST_STATE:
#endif
#endif
@@ -1327,8 +1381,11 @@ void QGstreamerPlayerSession::getStreamsInfo()
default:
break;
}
-
+#if GST_CHECK_VERSION(1,0,0)
+ if (tags && GST_IS_TAG_LIST(tags)) {
+#else
if (tags && gst_is_tag_list(tags)) {
+#endif
gchar *languageCode = 0;
if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &languageCode))
streamProperties[QMediaMetaData::Language] = QString::fromUtf8(languageCode);
@@ -1365,9 +1422,8 @@ void QGstreamerPlayerSession::updateVideoResolutionTag()
#endif
QSize size;
QSize aspectRatio;
-
GstPad *pad = gst_element_get_static_pad(m_videoIdentity, "src");
- GstCaps *caps = gst_pad_get_negotiated_caps(pad);
+ GstCaps *caps = qt_gst_pad_get_current_caps(pad);
if (caps) {
const GstStructure *structure = gst_caps_get_structure(caps, 0);
@@ -1407,11 +1463,10 @@ void QGstreamerPlayerSession::updateVideoResolutionTag()
void QGstreamerPlayerSession::updateDuration()
{
- GstFormat format = GST_FORMAT_TIME;
gint64 gstDuration = 0;
int duration = -1;
- if (m_playbin && gst_element_query_duration(m_playbin, &format, &gstDuration))
+ if (m_playbin && qt_gst_element_query_duration(m_playbin, GST_FORMAT_TIME, &gstDuration))
duration = gstDuration / 1000000;
if (m_duration != duration) {
@@ -1467,7 +1522,7 @@ void QGstreamerPlayerSession::playbinNotifySource(GObject *o, GParamSpec *p, gpo
// The rest
if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "extra-headers") != 0) {
- GstStructure *extras = gst_structure_empty_new("extras");
+ GstStructure *extras = qt_gst_structure_new_empty("extras");
foreach (const QByteArray &rawHeader, self->m_request.rawHeaderList()) {
if (rawHeader == userAgentString) // Filter User-Agent
@@ -1528,7 +1583,8 @@ void QGstreamerPlayerSession::playbinNotifySource(GObject *o, GParamSpec *p, gpo
qDebug() << "Current source is a non-live source";
#endif
- g_object_set(G_OBJECT(self->m_videoSink), "sync", !self->m_isLiveSource, NULL);
+ if (self->m_videoSink)
+ g_object_set(G_OBJECT(self->m_videoSink), "sync", !self->m_isLiveSource, NULL);
gst_object_unref(source);
}
@@ -1623,7 +1679,11 @@ GstAutoplugSelectResult QGstreamerPlayerSession::handleAutoplugSelect(GstBin *bi
const gchar *factoryName = gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(factory));
if (g_str_has_prefix(factoryName, "vaapi")) {
GstPad *sinkPad = gst_element_get_static_pad(session->m_videoSink, "sink");
+#if GST_CHECK_VERSION(1,0,0)
+ GstCaps *sinkCaps = gst_pad_query_caps(sinkPad, NULL);
+#else
GstCaps *sinkCaps = gst_pad_get_caps(sinkPad);
+#endif
#if (GST_VERSION_MAJOR == 0) && ((GST_VERSION_MINOR < 10) || (GST_VERSION_MICRO < 33))
if (!factory_can_src_any_caps(factory, sinkCaps))
@@ -1652,8 +1712,10 @@ void QGstreamerPlayerSession::handleElementAdded(GstBin *bin, GstElement *elemen
// Disable on-disk buffering.
g_object_set(G_OBJECT(element), "temp-template", NULL, NULL);
} else if (g_str_has_prefix(elementName, "uridecodebin") ||
- g_str_has_prefix(elementName, "decodebin2")) {
-
+#if GST_CHECK_VERSION(1,0,0)
+ g_str_has_prefix(elementName, "decodebin")) {
+#else
+ g_str_has_prefix(elementName, "decodebin2")) {
if (g_str_has_prefix(elementName, "uridecodebin")) {
// Add video/x-surface (VAAPI) to default raw formats
g_object_set(G_OBJECT(element), "caps", gst_static_caps_get(&static_RawCaps), NULL);
@@ -1661,7 +1723,7 @@ void QGstreamerPlayerSession::handleElementAdded(GstBin *bin, GstElement *elemen
// video sink doesn't support it
g_signal_connect(element, "autoplug-select", G_CALLBACK(handleAutoplugSelect), session);
}
-
+#endif
//listen for queue2 element added to uridecodebin/decodebin2 as well.
//Don't touch other bins since they may have unrelated queues
g_signal_connect(element, "element-added",
@@ -1711,68 +1773,30 @@ void QGstreamerPlayerSession::showPrerollFrames(bool enabled)
void QGstreamerPlayerSession::addProbe(QGstreamerVideoProbeControl* probe)
{
- QMutexLocker locker(&m_videoProbeMutex);
-
- if (m_videoProbes.contains(probe))
- return;
-
- m_videoProbes.append(probe);
+ Q_ASSERT(!m_videoProbe);
+ m_videoProbe = probe;
+ addVideoBufferProbe();
}
void QGstreamerPlayerSession::removeProbe(QGstreamerVideoProbeControl* probe)
{
- QMutexLocker locker(&m_videoProbeMutex);
- m_videoProbes.removeOne(probe);
- // Do not emit flush signal in this case.
- // Assume user releases any outstanding references to video frames.
-}
-
-gboolean QGstreamerPlayerSession::padVideoBufferProbe(GstPad *pad, GstBuffer *buffer, gpointer user_data)
-{
- Q_UNUSED(pad);
-
- QGstreamerPlayerSession *session = reinterpret_cast<QGstreamerPlayerSession*>(user_data);
- QMutexLocker locker(&session->m_videoProbeMutex);
-
- if (session->m_videoProbes.isEmpty())
- return TRUE;
-
- foreach (QGstreamerVideoProbeControl* probe, session->m_videoProbes)
- probe->bufferProbed(buffer);
-
- return TRUE;
+ Q_ASSERT(m_videoProbe == probe);
+ removeVideoBufferProbe();
+ m_videoProbe = 0;
}
void QGstreamerPlayerSession::addProbe(QGstreamerAudioProbeControl* probe)
{
- QMutexLocker locker(&m_audioProbeMutex);
-
- if (m_audioProbes.contains(probe))
- return;
-
- m_audioProbes.append(probe);
+ Q_ASSERT(!m_audioProbe);
+ m_audioProbe = probe;
+ addAudioBufferProbe();
}
void QGstreamerPlayerSession::removeProbe(QGstreamerAudioProbeControl* probe)
{
- QMutexLocker locker(&m_audioProbeMutex);
- m_audioProbes.removeOne(probe);
-}
-
-gboolean QGstreamerPlayerSession::padAudioBufferProbe(GstPad *pad, GstBuffer *buffer, gpointer user_data)
-{
- Q_UNUSED(pad);
-
- QGstreamerPlayerSession *session = reinterpret_cast<QGstreamerPlayerSession*>(user_data);
- QMutexLocker locker(&session->m_audioProbeMutex);
-
- if (session->m_audioProbes.isEmpty())
- return TRUE;
-
- foreach (QGstreamerAudioProbeControl* probe, session->m_audioProbes)
- probe->bufferProbed(buffer);
-
- return TRUE;
+ Q_ASSERT(m_audioProbe == probe);
+ removeAudioBufferProbe();
+ m_audioProbe = 0;
}
// This function is similar to stop(),
@@ -1797,80 +1821,62 @@ void QGstreamerPlayerSession::endOfMediaReset()
void QGstreamerPlayerSession::removeVideoBufferProbe()
{
- if (m_videoBufferProbeId == -1)
+ if (!m_videoProbe)
return;
- if (!m_videoSink) {
- m_videoBufferProbeId = -1;
- return;
- }
-
GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
if (pad) {
- gst_pad_remove_buffer_probe(pad, m_videoBufferProbeId);
+ m_videoProbe->removeProbeFromPad(pad);
gst_object_unref(GST_OBJECT(pad));
}
-
- m_videoBufferProbeId = -1;
}
void QGstreamerPlayerSession::addVideoBufferProbe()
{
- Q_ASSERT(m_videoBufferProbeId == -1);
- if (!m_videoSink)
+ if (!m_videoProbe)
return;
GstPad *pad = gst_element_get_static_pad(m_videoSink, "sink");
if (pad) {
- m_videoBufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padVideoBufferProbe), this);
+ m_videoProbe->addProbeToPad(pad);
gst_object_unref(GST_OBJECT(pad));
}
}
void QGstreamerPlayerSession::removeAudioBufferProbe()
{
- if (m_audioBufferProbeId == -1)
- return;
-
- if (!m_audioSink) {
- m_audioBufferProbeId = -1;
+ if (!m_audioProbe)
return;
- }
GstPad *pad = gst_element_get_static_pad(m_audioSink, "sink");
if (pad) {
- gst_pad_remove_buffer_probe(pad, m_audioBufferProbeId);
+ m_audioProbe->removeProbeFromPad(pad);
gst_object_unref(GST_OBJECT(pad));
}
-
- m_audioBufferProbeId = -1;
}
void QGstreamerPlayerSession::addAudioBufferProbe()
{
- Q_ASSERT(m_audioBufferProbeId == -1);
- if (!m_audioSink)
+ if (!m_audioProbe)
return;
GstPad *pad = gst_element_get_static_pad(m_audioSink, "sink");
if (pad) {
- m_audioBufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padAudioBufferProbe), this);
+ m_audioProbe->addProbeToPad(pad);
gst_object_unref(GST_OBJECT(pad));
}
}
void QGstreamerPlayerSession::flushVideoProbes()
{
- QMutexLocker locker(&m_videoProbeMutex);
- foreach (QGstreamerVideoProbeControl* probe, m_videoProbes)
- probe->startFlushing();
+ if (m_videoProbe)
+ m_videoProbe->startFlushing();
}
void QGstreamerPlayerSession::resumeVideoProbes()
{
- QMutexLocker locker(&m_videoProbeMutex);
- foreach (QGstreamerVideoProbeControl* probe, m_videoProbes)
- probe->stopFlushing();
+ if (m_videoProbe)
+ m_videoProbe->stopFlushing();
}
void QGstreamerPlayerSession::playlistTypeFindFunction(GstTypeFind *find, gpointer userData)
@@ -1878,7 +1884,11 @@ void QGstreamerPlayerSession::playlistTypeFindFunction(GstTypeFind *find, gpoint
QGstreamerPlayerSession* session = (QGstreamerPlayerSession*)userData;
const gchar *uri = 0;
+#if GST_CHECK_VERSION(1,0,0)
+ g_object_get(G_OBJECT(session->m_playbin), "current-uri", &uri, NULL);
+#else
g_object_get(G_OBJECT(session->m_playbin), "uri", &uri, NULL);
+#endif
guint64 length = gst_type_find_get_length(find);
if (!length)
@@ -1887,7 +1897,7 @@ void QGstreamerPlayerSession::playlistTypeFindFunction(GstTypeFind *find, gpoint
length = qMin(length, guint64(1024));
while (length > 0) {
- guint8 *data = gst_type_find_peek(find, 0, length);
+ const guint8 *data = gst_type_find_peek(find, 0, length);
if (data) {
session->m_isPlaylist = (QPlaylistFileParser::findPlaylistType(QString::fromUtf8(uri), 0, data, length) != QPlaylistFileParser::UNKNOWN);
return;
diff --git a/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.h b/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.h
index f2e760ae9..92b4a0cfc 100644
--- a/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.h
+++ b/src/plugins/gstreamer/mediaplayer/qgstreamerplayersession.h
@@ -119,11 +119,9 @@ public:
void addProbe(QGstreamerVideoProbeControl* probe);
void removeProbe(QGstreamerVideoProbeControl* probe);
- static gboolean padVideoBufferProbe(GstPad *pad, GstBuffer *buffer, gpointer user_data);
void addProbe(QGstreamerAudioProbeControl* probe);
void removeProbe(QGstreamerAudioProbeControl* probe);
- static gboolean padAudioBufferProbe(GstPad *pad, GstBuffer *buffer, gpointer user_data);
void endOfMediaReset();
@@ -172,7 +170,9 @@ private:
static void playbinNotifySource(GObject *o, GParamSpec *p, gpointer d);
static void handleVolumeChange(GObject *o, GParamSpec *p, gpointer d);
static void handleMutedChange(GObject *o, GParamSpec *p, gpointer d);
+#if !GST_CHECK_VERSION(1,0,0)
static void insertColorSpaceElement(GstElement *element, gpointer data);
+#endif
static void handleElementAdded(GstBin *bin, GstElement *element, QGstreamerPlayerSession *session);
static void handleStreamsChange(GstBin *bin, gpointer user_data);
static GstAutoplugSelectResult handleAutoplugSelect(GstBin *bin, GstPad *pad, GstCaps *caps, GstElementFactory *factory, QGstreamerPlayerSession *session);
@@ -194,11 +194,14 @@ private:
QGstreamerBusHelper* m_busHelper;
GstElement* m_playbin;
+ GstElement* m_videoSink;
+
GstElement* m_videoOutputBin;
GstElement* m_videoIdentity;
+#if !GST_CHECK_VERSION(1,0,0)
GstElement* m_colorSpace;
bool m_usingColorspaceElement;
- GstElement* m_videoSink;
+#endif
GstElement* m_pendingVideoSink;
GstElement* m_nullVideoSink;
@@ -218,13 +221,8 @@ private:
QList<QMediaStreamsControl::StreamType> m_streamTypes;
QMap<QMediaStreamsControl::StreamType, int> m_playbin2StreamOffset;
- QList<QGstreamerVideoProbeControl*> m_videoProbes;
- QMutex m_videoProbeMutex;
- int m_videoBufferProbeId;
-
- QList<QGstreamerAudioProbeControl*> m_audioProbes;
- QMutex m_audioProbeMutex;
- int m_audioBufferProbeId;
+ QGstreamerVideoProbeControl *m_videoProbe;
+ QGstreamerAudioProbeControl *m_audioProbe;
int m_volume;
qreal m_playbackRate;
@@ -252,6 +250,7 @@ private:
bool m_isLiveSource;
bool m_isPlaylist;
+ gulong pad_probe_id;
};
QT_END_NAMESPACE