summaryrefslogtreecommitdiffstats
path: root/src/multimedia
diff options
context:
space:
mode:
authorPiotr Srebrny <piotr.srebrny@qt.io>2021-08-26 11:15:07 +0200
committerLars Knoll <lars.knoll@qt.io>2021-09-13 11:57:47 +0200
commit81a3ea8ef3868818bdca2fb3fb7a76553f9b35ec (patch)
tree70d42333b097092043272f1564b8e2cec761c87b /src/multimedia
parent1c0dc886841338c8128fe1b3f1fac2438944db70 (diff)
GStreamer: enable dynamic pipeline modification for capture session
Work in progress... Change-Id: I4423745a9d229e509aa161cd6a0a28647f5579e9 Reviewed-by: Lars Knoll <lars.knoll@qt.io>
Diffstat (limited to 'src/multimedia')
-rw-r--r--src/multimedia/platform/gstreamer/common/qgst_p.h23
-rw-r--r--src/multimedia/platform/gstreamer/common/qgstreameraudioinput.cpp15
-rw-r--r--src/multimedia/platform/gstreamer/common/qgstreameraudioinput_p.h3
-rw-r--r--src/multimedia/platform/gstreamer/common/qgstreameraudiooutput.cpp19
-rw-r--r--src/multimedia/platform/gstreamer/common/qgstvideobuffer.cpp10
-rw-r--r--src/multimedia/platform/gstreamer/mediacapture/qgstreamercamera.cpp124
-rw-r--r--src/multimedia/platform/gstreamer/mediacapture/qgstreamercamera_p.h8
-rw-r--r--src/multimedia/platform/gstreamer/mediacapture/qgstreamerimagecapture.cpp58
-rw-r--r--src/multimedia/platform/gstreamer/mediacapture/qgstreamerimagecapture_p.h5
-rw-r--r--src/multimedia/platform/gstreamer/mediacapture/qgstreamermediacapture.cpp241
-rw-r--r--src/multimedia/platform/gstreamer/mediacapture/qgstreamermediacapture_p.h17
-rw-r--r--src/multimedia/platform/gstreamer/mediacapture/qgstreamermediaencoder.cpp79
-rw-r--r--src/multimedia/platform/gstreamer/mediacapture/qgstreamermediaencoder_p.h4
13 files changed, 331 insertions, 275 deletions
diff --git a/src/multimedia/platform/gstreamer/common/qgst_p.h b/src/multimedia/platform/gstreamer/common/qgst_p.h
index 12eb123c0..0130ca00f 100644
--- a/src/multimedia/platform/gstreamer/common/qgst_p.h
+++ b/src/multimedia/platform/gstreamer/common/qgst_p.h
@@ -53,6 +53,7 @@
#include <private/qtmultimediaglobal_p.h>
+#include <QSemaphore>
#include <QtCore/qlist.h>
#include <QtMultimedia/qaudioformat.h>
@@ -61,6 +62,8 @@
#include <gst/gst.h>
#include <gst/video/video-info.h>
+#include <functional>
+
#if QT_CONFIG(gstreamer_photography)
#define GST_USE_UNSTABLE_API
#include <gst/interfaces/photography.h>
@@ -391,6 +394,24 @@ public:
gst_pad_add_probe (pad(), type, Impl::callback, instance, nullptr);
}
+ void doInIdleProbe(std::function<void()> work) {
+ struct CallbackData {
+ QSemaphore waitDone;
+ std::function<void()> work;
+ } cd;
+ cd.work = work;
+
+ auto callback= [](GstPad *, GstPadProbeInfo *, gpointer p) {
+ auto cd = reinterpret_cast<CallbackData*>(p);
+ cd->work();
+ cd->waitDone.release();
+ return GST_PAD_PROBE_REMOVE;
+ };
+
+ gst_pad_add_probe(pad(), GST_PAD_PROBE_TYPE_IDLE, callback, &cd, nullptr);
+ cd.waitDone.acquire();
+ }
+
template<auto Member, typename T>
void addEosProbe(T *instance) {
struct Impl {
@@ -455,6 +476,8 @@ public:
{ gst_element_unlink(element(), next.element()); }
QGstPad staticPad(const char *name) const { return QGstPad(gst_element_get_static_pad(element(), name), HasRef); }
+ QGstPad src() const { return staticPad("src"); }
+ QGstPad sink() const { return staticPad("sink"); }
QGstPad getRequestPad(const char *name) const { return QGstPad(gst_element_get_request_pad(element(), name), HasRef); }
void releaseRequestPad(const QGstPad &pad) const { return gst_element_release_request_pad(element(), pad.pad()); }
diff --git a/src/multimedia/platform/gstreamer/common/qgstreameraudioinput.cpp b/src/multimedia/platform/gstreamer/common/qgstreameraudioinput.cpp
index 2249b8b34..cd1233c1d 100644
--- a/src/multimedia/platform/gstreamer/common/qgstreameraudioinput.cpp
+++ b/src/multimedia/platform/gstreamer/common/qgstreameraudioinput.cpp
@@ -100,11 +100,6 @@ void QGstreamerAudioInput::setMuted(bool muted)
emit mutedChanged(muted);
}
-void QGstreamerAudioInput::setPipeline(const QGstPipeline &pipeline)
-{
- gstPipeline = pipeline;
-}
-
void QGstreamerAudioInput::setAudioDevice(const QAudioDevice &device)
{
if (device == m_audioDevice)
@@ -112,24 +107,24 @@ void QGstreamerAudioInput::setAudioDevice(const QAudioDevice &device)
qCDebug(qLcMediaAudioInput) << "setAudioInput" << device.description() << device.isNull();
m_audioDevice = device;
- gstPipeline.beginConfig();
-
QGstElement newSrc;
auto *deviceInfo = static_cast<const QGStreamerAudioDeviceInfo *>(m_audioDevice.handle());
if (deviceInfo && deviceInfo->gstDevice)
- newSrc = gst_device_create_element(deviceInfo->gstDevice , "audiosrc");
+ newSrc = gst_device_create_element(deviceInfo->gstDevice, "audiosrc");
if (newSrc.isNull())
newSrc = QGstElement("autoaudiosrc", "audiosrc");
+ // FIXME: most probably source can be disconnected outside of idle probe
+ audioSrc.staticPad("src").doInIdleProbe([&](){
+ audioSrc.unlink(audioVolume);
+ });
audioSrc.setStateSync(GST_STATE_NULL);
gstAudioInput.remove(audioSrc);
audioSrc = newSrc;
gstAudioInput.add(audioSrc);
audioSrc.link(audioVolume);
audioSrc.syncStateWithParent();
-
- gstPipeline.endConfig();
}
QAudioDevice QGstreamerAudioInput::audioInput() const
diff --git a/src/multimedia/platform/gstreamer/common/qgstreameraudioinput_p.h b/src/multimedia/platform/gstreamer/common/qgstreameraudioinput_p.h
index b45f96b6c..a0e7d96ab 100644
--- a/src/multimedia/platform/gstreamer/common/qgstreameraudioinput_p.h
+++ b/src/multimedia/platform/gstreamer/common/qgstreameraudioinput_p.h
@@ -83,8 +83,6 @@ public:
void setVolume(float volume) override;
void setMuted(bool muted) override;
- void setPipeline(const QGstPipeline &pipeline);
-
QGstElement gstElement() const { return gstAudioInput; }
Q_SIGNALS:
@@ -98,7 +96,6 @@ private:
QAudioDevice m_audioDevice;
// Gst elements
- QGstPipeline gstPipeline;
QGstBin gstAudioInput;
QGstElement audioSrc;
diff --git a/src/multimedia/platform/gstreamer/common/qgstreameraudiooutput.cpp b/src/multimedia/platform/gstreamer/common/qgstreameraudiooutput.cpp
index 5b7235735..6338c1ee0 100644
--- a/src/multimedia/platform/gstreamer/common/qgstreameraudiooutput.cpp
+++ b/src/multimedia/platform/gstreamer/common/qgstreameraudiooutput.cpp
@@ -97,11 +97,6 @@ void QGstreamerAudioOutput::setAudioDevice(const QAudioDevice &info)
qCDebug(qLcMediaAudioOutput) << "setAudioOutput" << info.description() << info.isNull();
m_audioOutput = info;
- gstPipeline.beginConfig();
-
- audioSink.setStateSync(GST_STATE_NULL);
- gstAudioOutput.remove(audioSink);
-
QGstElement newSink;
auto *deviceInfo = static_cast<const QGStreamerAudioDeviceInfo *>(m_audioOutput.handle());
if (deviceInfo && deviceInfo->gstDevice)
@@ -110,12 +105,16 @@ void QGstreamerAudioOutput::setAudioDevice(const QAudioDevice &info)
if (newSink.isNull())
newSink = QGstElement("autoaudiosink", "audiosink");
- audioSink = newSink;
- gstAudioOutput.add(audioSink);
- audioVolume.link(audioSink);
- audioSink.syncStateWithParent();
+ audioVolume.staticPad("src").doInIdleProbe([&](){
+ audioVolume.unlink(audioSink);
+ gstAudioOutput.remove(audioSink);
+ gstAudioOutput.add(newSink);
+ newSink.syncStateWithParent();
+ audioVolume.link(newSink);
+ });
- gstPipeline.endConfig();
+ audioSink.setStateSync(GST_STATE_NULL);
+ audioSink = newSink;
}
QT_END_NAMESPACE
diff --git a/src/multimedia/platform/gstreamer/common/qgstvideobuffer.cpp b/src/multimedia/platform/gstreamer/common/qgstvideobuffer.cpp
index b93cb9eac..878b4d410 100644
--- a/src/multimedia/platform/gstreamer/common/qgstvideobuffer.cpp
+++ b/src/multimedia/platform/gstreamer/common/qgstvideobuffer.cpp
@@ -88,16 +88,18 @@ QT_BEGIN_NAMESPACE
QGstVideoBuffer::QGstVideoBuffer(GstBuffer *buffer, const GstVideoInfo &info, QGstreamerVideoSink *sink,
const QVideoFrameFormat &frameFormat,
QGstCaps::MemoryFormat format)
- : QAbstractVideoBuffer((sink->rhi() && format != QGstCaps::CpuMemory) ?
- QVideoFrame::RhiTextureHandle : QVideoFrame::NoHandle, sink->rhi())
+ : QAbstractVideoBuffer((sink && sink->rhi() && format != QGstCaps::CpuMemory) ?
+ QVideoFrame::RhiTextureHandle : QVideoFrame::NoHandle, sink ? sink->rhi() : nullptr)
, memoryFormat(format)
, m_frameFormat(frameFormat)
, m_videoInfo(info)
, m_buffer(buffer)
{
gst_buffer_ref(m_buffer);
- eglDisplay = sink->eglDisplay();
- eglImageTargetTexture2D = sink->eglImageTargetTexture2D();
+ if (sink) {
+ eglDisplay = sink->eglDisplay();
+ eglImageTargetTexture2D = sink->eglImageTargetTexture2D();
+ }
}
QGstVideoBuffer::~QGstVideoBuffer()
diff --git a/src/multimedia/platform/gstreamer/mediacapture/qgstreamercamera.cpp b/src/multimedia/platform/gstreamer/mediacapture/qgstreamercamera.cpp
index b5ca6c881..1ee0244ef 100644
--- a/src/multimedia/platform/gstreamer/mediacapture/qgstreamercamera.cpp
+++ b/src/multimedia/platform/gstreamer/mediacapture/qgstreamercamera.cpp
@@ -53,16 +53,16 @@
#include <QtCore/qdebug.h>
QGstreamerCamera::QGstreamerCamera(QCamera *camera)
- : QPlatformCamera(camera),
- gstCameraBin("camerabin")
+ : QPlatformCamera(camera)
{
gstCamera = QGstElement("videotestsrc");
+ gstCapsFilter = QGstElement("capsfilter", "videoCapsFilter");
gstDecode = QGstElement("identity");
gstVideoConvert = QGstElement("videoconvert", "videoConvert");
gstVideoScale = QGstElement("videoscale", "videoScale");
- gstCameraBin.add(gstCamera, gstDecode, gstVideoConvert, gstVideoScale);
- gstCamera.link(gstDecode, gstVideoConvert, gstVideoScale);
-
+ gstCameraBin = QGstBin("camerabin");
+ gstCameraBin.add(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert, gstVideoScale);
+ gstCamera.link(gstCapsFilter, gstDecode, gstVideoConvert, gstVideoScale);
gstCameraBin.addGhostPad(gstVideoScale, "src");
}
@@ -97,88 +97,94 @@ void QGstreamerCamera::setCamera(const QCameraDevice &camera)
{
if (m_cameraDevice == camera)
return;
-// qDebug() << "setCamera" << camera;
+ qDebug() << "setCamera" << camera;
m_cameraDevice = camera;
- gstPipeline.beginConfig();
-
- Q_ASSERT(!gstCamera.isNull());
-
- gstCamera.setStateSync(GST_STATE_NULL);
- gstCameraBin.remove(gstCamera);
-
+ QGstElement gstNewCamera;
if (camera.isNull()) {
- gstCamera = QGstElement("videotestsrc");
+ gstNewCamera = QGstElement("videotestsrc");
} else {
auto *devices = static_cast<QGstreamerMediaDevices *>(QGstreamerIntegration::instance()->devices());
auto *device = devices->videoDevice(camera.id());
- gstCamera = gst_device_create_element(device, "camerasrc");
+ gstNewCamera = gst_device_create_element(device, "camerasrc");
QGstStructure properties = gst_device_get_properties(device);
if (properties.name() == "v4l2deviceprovider")
m_v4l2Device = QString::fromUtf8(properties["device.path"].toString());
}
- gstCameraBin.add(gstCamera);
- // set the camera up with a decent format
- setCameraFormatInternal({});
+ QCameraFormat f = findBestCameraFormat(camera);
+ auto caps = QGstMutableCaps::fromCameraFormat(f);
+ auto gstNewDecode = QGstElement(f.pixelFormat() == QVideoFrameFormat::Format_Jpeg ? "jpegdec" : "identity");
+
+ gstCamera.setStateSync(GST_STATE_NULL);
+ gstDecode.setStateSync(GST_STATE_NULL);
- gstCamera.setState(GST_STATE_PAUSED);
+ gstCamera.unlink(gstCapsFilter);
+ gstCapsFilter.unlink(gstDecode);
+ gstDecode.unlink(gstVideoConvert);
- gstPipeline.endConfig();
- gstPipeline.dumpGraph("setCamera");
+ gstCameraBin.remove(gstCamera);
+ gstCameraBin.remove(gstDecode);
+
+ gstCapsFilter.set("caps", caps);
+
+ gstCameraBin.add(gstNewCamera, gstNewDecode);
+
+ gstNewDecode.link(gstVideoConvert);
+ gstCapsFilter.link(gstNewDecode);
+
+ if (!gstNewCamera.link(gstCapsFilter))
+ qWarning() << "linking camera failed" << gstCamera.name() << caps.toString();
+
+ // Start sending frames once pipeline is linked
+ // FIXME: put camera to READY state before linking to decoder as in the NULL state it does not know its true caps
+ gstCapsFilter.syncStateWithParent();
+ gstNewDecode.syncStateWithParent();
+ gstNewCamera.syncStateWithParent();
+
+ gstCamera = gstNewCamera;
+ gstDecode = gstNewDecode;
updateCameraProperties();
}
-void QGstreamerCamera::setCameraFormatInternal(const QCameraFormat &format)
+bool QGstreamerCamera::setCameraFormat(const QCameraFormat &format)
{
+ if (!format.isNull() && !m_cameraDevice.videoFormats().contains(format))
+ return false;
+
+ qDebug() << "Set camera format";
+
QCameraFormat f = format;
if (f.isNull())
f = findBestCameraFormat(m_cameraDevice);
- // add jpeg decoder where required
- gstDecode.setStateSync(GST_STATE_NULL);
- gstCameraBin.remove(gstDecode);
+ auto caps = QGstMutableCaps::fromCameraFormat(f);
- if (f.pixelFormat() == QVideoFrameFormat::Format_Jpeg) {
-// qDebug() << " enabling jpeg decoder";
- gstDecode = QGstElement("jpegdec");
- } else {
-// qDebug() << " camera delivers raw video";
- gstDecode = QGstElement("identity");
- }
- gstCameraBin.add(gstDecode);
- gstDecode.link(gstVideoConvert);
+ auto newGstDecode = QGstElement(f.pixelFormat() == QVideoFrameFormat::Format_Jpeg ? "jpegdec" : "identity");
+ gstCameraBin.add(newGstDecode);
+ newGstDecode.syncStateWithParent();
- auto caps = QGstMutableCaps::fromCameraFormat(f);
- if (!caps.isNull()) {
- if (!gstCamera.linkFiltered(gstDecode, caps))
- qWarning() << "linking filtered camera to decoder failed" << gstCamera.name() << gstDecode.name() << caps.toString();
- } else {
- if (!gstCamera.link(gstDecode))
- qWarning() << "linking camera to decoder failed" << gstCamera.name() << gstDecode.name();
- }
-}
+ gstCamera.staticPad("src").doInIdleProbe([&](){
+ gstCamera.unlink(gstCapsFilter);
+ gstCapsFilter.unlink(gstDecode);
+ gstDecode.unlink(gstVideoConvert);
-bool QGstreamerCamera::setCameraFormat(const QCameraFormat &format)
-{
- if (!format.isNull() && !m_cameraDevice.videoFormats().contains(format))
- return false;
- gstPipeline.beginConfig();
- setCameraFormatInternal(format);
- gstPipeline.endConfig();
- return true;
-}
+ gstCapsFilter.set("caps", caps);
-void QGstreamerCamera::setCaptureSession(QPlatformMediaCaptureSession *session)
-{
- QGstreamerMediaCapture *captureSession = static_cast<QGstreamerMediaCapture *>(session);
- if (m_session == captureSession)
- return;
+ newGstDecode.link(gstVideoConvert);
+ gstCapsFilter.link(newGstDecode);
+ if (!gstCamera.link(gstCapsFilter))
+ qWarning() << "linking filtered camera to decoder failed" << gstCamera.name() << caps.toString();
+ });
- m_session = captureSession;
- gstPipeline = m_session ? m_session->pipeline() : QGstPipeline();
+ gstDecode.setStateSync(GST_STATE_NULL);
+ gstCameraBin.remove(gstDecode);
+
+ gstDecode = newGstDecode;
+
+ return true;
}
void QGstreamerCamera::updateCameraProperties()
diff --git a/src/multimedia/platform/gstreamer/mediacapture/qgstreamercamera_p.h b/src/multimedia/platform/gstreamer/mediacapture/qgstreamercamera_p.h
index 51037119e..94f2f8678 100644
--- a/src/multimedia/platform/gstreamer/mediacapture/qgstreamercamera_p.h
+++ b/src/multimedia/platform/gstreamer/mediacapture/qgstreamercamera_p.h
@@ -71,9 +71,6 @@ public:
void setCamera(const QCameraDevice &camera) override;
bool setCameraFormat(const QCameraFormat &format) override;
- void setCameraFormatInternal(const QCameraFormat &format);
-
- void setCaptureSession(QPlatformMediaCaptureSession *session) override;
QGstElement gstElement() const { return gstCameraBin.element(); }
#if QT_CONFIG(gstreamer_photography)
@@ -123,14 +120,11 @@ private:
int v4l2FileDescriptor = -1;
#endif
- QGstreamerMediaCapture *m_session = nullptr;
-
QCameraDevice m_cameraDevice;
- QGstPipeline gstPipeline;
-
QGstBin gstCameraBin;
QGstElement gstCamera;
+ QGstElement gstCapsFilter;
QGstElement gstDecode;
QGstElement gstVideoConvert;
QGstElement gstVideoScale;
diff --git a/src/multimedia/platform/gstreamer/mediacapture/qgstreamerimagecapture.cpp b/src/multimedia/platform/gstreamer/mediacapture/qgstreamerimagecapture.cpp
index f4434e549..2d943def2 100644
--- a/src/multimedia/platform/gstreamer/mediacapture/qgstreamerimagecapture.cpp
+++ b/src/multimedia/platform/gstreamer/mediacapture/qgstreamerimagecapture.cpp
@@ -74,10 +74,13 @@ QGstreamerImageCapture::QGstreamerImageCapture(QImageCapture *parent)
encoder = QGstElement("jpegenc", "jpegEncoder");
muxer = QGstElement("jifmux", "jpegMuxer");
sink = QGstElement("fakesink","imageCaptureSink");
+ // imageCaptureSink do not wait for a preroll buffer when going READY -> PAUSED
+ // as no buffer will arrive until capture() is called
+ sink.set("async", false);
+
bin.add(queue, videoConvert, encoder, muxer, sink);
queue.link(videoConvert, encoder, muxer, sink);
bin.addGhostPad(queue, "sink");
- bin.lockState(true);
addProbeToPad(queue.staticPad("src").pad(), false);
@@ -87,8 +90,7 @@ QGstreamerImageCapture::QGstreamerImageCapture(QImageCapture *parent)
QGstreamerImageCapture::~QGstreamerImageCapture()
{
- if (m_session)
- m_session->releaseVideoPad(videoSrcPad);
+ bin.setStateSync(GST_STATE_NULL);
}
bool QGstreamerImageCapture::isReadyForCapture() const
@@ -149,10 +151,6 @@ int QGstreamerImageCapture::doCapture(const QString &fileName)
// let one image pass the pipeline
passImage = true;
- link();
-
- gstPipeline.dumpGraph("captureImage");
-
emit readyForCaptureChanged(false);
return m_lastId;
}
@@ -217,22 +215,12 @@ void QGstreamerImageCapture::setCaptureSession(QPlatformMediaCaptureSession *ses
pendingImages.clear();
passImage = false;
cameraActive = false;
- gstPipeline.beginConfig();
- bin.setStateSync(GST_STATE_NULL);
- gstPipeline.remove(bin);
- gstPipeline.endConfig();
- gstPipeline = {};
}
m_session = captureSession;
if (!m_session)
return;
- gstPipeline = captureSession->pipeline();
- gstPipeline.beginConfig();
- gstPipeline.add(bin);
- bin.setStateSync(GST_STATE_READY);
- gstPipeline.endConfig();
connect(m_session, &QPlatformMediaCaptureSession::cameraChanged, this, &QGstreamerImageCapture::onCameraChanged);
onCameraChanged();
}
@@ -265,14 +253,14 @@ gboolean QGstreamerImageCapture::saveImageFilter(GstElement *element,
Q_UNUSED(pad);
QGstreamerImageCapture *capture = static_cast<QGstreamerImageCapture *>(appdata);
+ capture->passImage = false;
+
if (capture->pendingImages.isEmpty()) {
- capture->unlink();
return true;
}
auto imageData = capture->pendingImages.dequeue();
if (imageData.filename.isEmpty()) {
- capture->unlink();
return true;
}
@@ -294,41 +282,9 @@ gboolean QGstreamerImageCapture::saveImageFilter(GstElement *element,
Q_ARG(QString, imageData.filename));
}
- capture->unlink();
-
return TRUE;
}
-void QGstreamerImageCapture::unlink()
-{
- return;
- if (passImage)
- return;
- if (gstPipeline.isNull())
- return;
- gstPipeline.beginConfig();
- videoSrcPad.unlinkPeer();
- m_session->releaseVideoPad(videoSrcPad);
- videoSrcPad = {};
- bin.setStateSync(GST_STATE_READY);
- bin.lockState(true);
- gstPipeline.endConfig();
-}
-
-void QGstreamerImageCapture::link()
-{
- if (!(m_session && m_session->camera()))
- return;
- if (!bin.staticPad("sink").peer().isNull() || gstPipeline.isNull())
- return;
- gstPipeline.beginConfig();
- videoSrcPad = m_session->getVideoPad();
- videoSrcPad.link(bin.staticPad("sink"));
- bin.lockState(false);
- bin.setState(GST_STATE_PAUSED);
- gstPipeline.endConfig();
-}
-
QImageEncoderSettings QGstreamerImageCapture::imageSettings() const
{
return m_settings;
diff --git a/src/multimedia/platform/gstreamer/mediacapture/qgstreamerimagecapture_p.h b/src/multimedia/platform/gstreamer/mediacapture/qgstreamerimagecapture_p.h
index 50d1d9a7b..a0f5d1a0b 100644
--- a/src/multimedia/platform/gstreamer/mediacapture/qgstreamerimagecapture_p.h
+++ b/src/multimedia/platform/gstreamer/mediacapture/qgstreamerimagecapture_p.h
@@ -68,7 +68,7 @@ class QGstreamerImageCapture : public QPlatformImageCapture, private QGstreamerB
{
Q_OBJECT
public:
- QGstreamerImageCapture(QImageCapture *parent/*, const QGstPipeline &pipeline*/);
+ QGstreamerImageCapture(QImageCapture *parent);
virtual ~QGstreamerImageCapture();
bool isReadyForCapture() const override;
@@ -91,8 +91,6 @@ public Q_SLOTS:
private:
int doCapture(const QString &fileName);
static gboolean saveImageFilter(GstElement *element, GstBuffer *buffer, GstPad *pad, void *appdata);
- void link();
- void unlink();
QGstreamerMediaCapture *m_session = nullptr;
int m_lastId = 0;
@@ -106,7 +104,6 @@ private:
QQueue<PendingImage> pendingImages;
- QGstPipeline gstPipeline;
QGstBin bin;
QGstElement queue;
QGstElement videoConvert;
diff --git a/src/multimedia/platform/gstreamer/mediacapture/qgstreamermediacapture.cpp b/src/multimedia/platform/gstreamer/mediacapture/qgstreamermediacapture.cpp
index ad043ba51..478eb1c0a 100644
--- a/src/multimedia/platform/gstreamer/mediacapture/qgstreamermediacapture.cpp
+++ b/src/multimedia/platform/gstreamer/mediacapture/qgstreamermediacapture.cpp
@@ -53,6 +53,28 @@ QT_BEGIN_NAMESPACE
Q_LOGGING_CATEGORY(qLcMediaCapture, "qt.multimedia.capture")
+
+static void linkTeeToPad(QGstElement tee, QGstPad sink)
+{
+ if (tee.isNull() || sink.isNull())
+ return;
+
+ auto source = tee.getRequestPad("src_%u");
+ source.link(sink);
+}
+
+static void unlinkTeeFromPad(QGstElement tee, QGstPad sink)
+{
+ if (tee.isNull() || sink.isNull())
+ return;
+
+ auto source = sink.peer();
+ source.unlink(sink);
+
+ tee.releaseRequestPad(source);
+}
+
+
QGstreamerMediaCapture::QGstreamerMediaCapture()
: gstPipeline("pipeline")
{
@@ -60,8 +82,48 @@ QGstreamerMediaCapture::QGstreamerMediaCapture()
gstVideoOutput->setIsPreview();
gstVideoOutput->setPipeline(gstPipeline);
+ // Use system clock to drive all elements in the pipeline. Otherwise,
+ // the clock is sourced from the elements (e.g. from an audio source).
+ // Since the elements are added and removed dynamically the clock would
+ // also change causing lost of synchronization in the pipeline.
+ gst_pipeline_use_clock(gstPipeline.pipeline(), gst_system_clock_obtain());
+
+ // This is the recording pipeline with only live sources, thus the pipeline
+ // will be always in the playing state.
gstPipeline.setState(GST_STATE_PLAYING);
+ // TODO: remove this debug before final commit
+ // Four basic elements of the capture session showing the current position and state
+ // All presented position should show similar progress.
+ heartbeat.setInterval(1000);
+ heartbeat.start();
+ QObject::connect(&heartbeat, &QTimer::timeout, [this]() {
+ if (!gstPipeline.isNull()) {
+ gint64 current = -1;
+ gst_element_query_position(gstPipeline.element(),GST_FORMAT_TIME, &current);
+ qDebug() << "Pipeline " << current / 1000000 << gstPipeline.state();
+// auto name = QString::number(current).toLocal8Bit().data();
+// gstPipeline.dumpGraph(name);
+ }
+ if (gstAudioInput && !gstAudioInput->gstElement().isNull()) {
+ gint64 current = -1;
+ auto element = gstAudioInput->gstElement().element();
+ gst_element_query_position(element,GST_FORMAT_TIME, &current);
+ qDebug() << "Audio " << current / 1000000 << gstAudioInput->gstElement().state();
+ }
+ if (gstCamera && !gstCamera->gstElement().isNull()) {
+ gint64 current = -1;
+ gst_element_query_position(gstCamera->gstElement().element(),GST_FORMAT_TIME, &current);
+ qDebug() << "Camera " << current / 1000000 << gstCamera->gstElement().state();
+ }
+ auto encoder = !m_mediaEncoder ? QGstElement{} : m_mediaEncoder->getEncoder();
+ if (!encoder.isNull()) {
+ gint64 current = -1;
+ gst_element_query_position(encoder.element(),GST_FORMAT_TIME, &current);
+ qDebug() << "Encoder " << current / 1000000 << encoder.state();
+ }
+ });
+
gstPipeline.dumpGraph("initial");
}
@@ -84,10 +146,10 @@ void QGstreamerMediaCapture::setCamera(QPlatformCamera *camera)
if (gstCamera == control)
return;
- gstPipeline.beginConfig();
-
if (gstCamera) {
- gstCamera->setCaptureSession(nullptr);
+ unlinkTeeFromPad(gstVideoTee, encoderVideoSink);
+ unlinkTeeFromPad(gstVideoTee, imageCaptureSink);
+
auto camera = gstCamera->gstElement();
camera.setStateSync(GST_STATE_NULL);
gstVideoTee.setStateSync(GST_STATE_NULL);
@@ -98,26 +160,31 @@ void QGstreamerMediaCapture::setCamera(QPlatformCamera *camera)
gstPipeline.remove(gstVideoOutput->gstElement());
gstVideoTee = {};
+ gstCamera->setCaptureSession(nullptr);
}
gstCamera = control;
if (gstCamera) {
QGstElement camera = gstCamera->gstElement();
gstVideoTee = QGstElement("tee", "videotee");
+ gstVideoTee.set("allow-not-linked", true);
+
+ gstPipeline.add(gstVideoOutput->gstElement(), camera, gstVideoTee);
- gstPipeline.add(camera, gstVideoTee, gstVideoOutput->gstElement());
- auto pad = gstVideoTee.getRequestPad("src_%u");
- pad.link(gstVideoOutput->gstElement().staticPad("sink"));
- gstCamera->gstElement().link(gstVideoTee);
- gstCamera->setCaptureSession(this);
+ linkTeeToPad(gstVideoTee, encoderVideoSink);
+ linkTeeToPad(gstVideoTee, gstVideoOutput->gstElement().staticPad("sink"));
+ linkTeeToPad(gstVideoTee, imageCaptureSink);
- camera.setState(GST_STATE_PAUSED);
+ camera.link(gstVideoTee);
+
+ gstVideoOutput->gstElement().setState(GST_STATE_PLAYING);
+ gstVideoTee.setState(GST_STATE_PLAYING);
+ camera.setState(GST_STATE_PLAYING);
}
- gstPipeline.endConfig();
+ gstPipeline.dumpGraph("camera");
emit cameraChanged();
- gstPipeline.dumpGraph("camera");
}
QPlatformImageCapture *QGstreamerMediaCapture::imageCapture()
@@ -131,16 +198,25 @@ void QGstreamerMediaCapture::setImageCapture(QPlatformImageCapture *imageCapture
if (m_imageCapture == control)
return;
- gstPipeline.beginConfig();
-
- if (m_imageCapture)
+ if (m_imageCapture) {
+ unlinkTeeFromPad(gstVideoTee, imageCaptureSink);
+ m_imageCapture->gstElement().setStateSync(GST_STATE_NULL);
+ gstPipeline.remove(m_imageCapture->gstElement());
+ imageCaptureSink = {};
m_imageCapture->setCaptureSession(nullptr);
+ }
m_imageCapture = control;
- if (m_imageCapture)
+ if (m_imageCapture) {
+ imageCaptureSink = m_imageCapture->gstElement().staticPad("sink");
+ m_imageCapture->gstElement().setState(GST_STATE_PLAYING);
+ gstPipeline.add(m_imageCapture->gstElement());
+ linkTeeToPad(gstVideoTee, imageCaptureSink);
m_imageCapture->setCaptureSession(this);
+ }
+
+ gstPipeline.dumpGraph("imageCapture");
- gstPipeline.endConfig();
emit imageCaptureChanged();
}
@@ -150,16 +226,12 @@ void QGstreamerMediaCapture::setMediaEncoder(QPlatformMediaEncoder *encoder)
if (m_mediaEncoder == control)
return;
- gstPipeline.setStateSync(GST_STATE_PAUSED);
-
if (m_mediaEncoder)
m_mediaEncoder->setCaptureSession(nullptr);
m_mediaEncoder = control;
if (m_mediaEncoder)
m_mediaEncoder->setCaptureSession(this);
- gstPipeline.setState(GST_STATE_PLAYING);
-
emit encoderChanged();
gstPipeline.dumpGraph("encoder");
}
@@ -169,31 +241,82 @@ QPlatformMediaEncoder *QGstreamerMediaCapture::mediaEncoder()
return m_mediaEncoder;
}
+void QGstreamerMediaCapture::linkEncoder(QGstPad audioSink, QGstPad videoSink)
+{
+ if (!gstVideoTee.isNull() && !videoSink.isNull()) {
+ auto caps = gst_pad_get_current_caps(gstVideoTee.sink().pad());
+
+ encoderVideoCapsFilter = QGstElement("capsfilter", "encoderVideoCapsFilter");
+ encoderVideoCapsFilter.set("caps", QGstMutableCaps(caps));
+
+ gstPipeline.add(encoderVideoCapsFilter);
+
+ encoderVideoCapsFilter.src().link(videoSink);
+ linkTeeToPad(gstVideoTee, encoderVideoCapsFilter.sink());
+ encoderVideoCapsFilter.setState(GST_STATE_PLAYING);
+ encoderVideoSink = encoderVideoCapsFilter.sink();
+ }
+
+ if (!gstAudioTee.isNull() && !audioSink.isNull()) {
+ auto caps = gst_pad_get_current_caps(gstAudioTee.sink().pad());
+
+ encoderAudioCapsFilter = QGstElement("capsfilter", "encoderAudioCapsFilter");
+ encoderAudioCapsFilter.set("caps", QGstMutableCaps(caps));
+
+ gstPipeline.add(encoderAudioCapsFilter);
+
+ encoderAudioCapsFilter.src().link(audioSink);
+ linkTeeToPad(gstAudioTee, encoderAudioCapsFilter.sink());
+ encoderAudioCapsFilter.setState(GST_STATE_PLAYING);
+ encoderAudioSink = encoderAudioCapsFilter.sink();
+ }
+}
+
+void QGstreamerMediaCapture::unlinkEncoder()
+{
+ if (!encoderVideoCapsFilter.isNull()) {
+ encoderVideoCapsFilter.src().unlinkPeer();
+ unlinkTeeFromPad(gstVideoTee, encoderVideoCapsFilter.sink());
+ encoderVideoCapsFilter.setStateSync(GST_STATE_NULL);
+ gstPipeline.remove(encoderVideoCapsFilter);
+ encoderVideoCapsFilter = {};
+ }
+
+ if (!encoderAudioCapsFilter.isNull()) {
+ encoderAudioCapsFilter.src().unlinkPeer();
+ unlinkTeeFromPad(gstAudioTee, encoderAudioCapsFilter.sink());
+ encoderAudioCapsFilter.setStateSync(GST_STATE_NULL);
+ gstPipeline.remove(encoderAudioCapsFilter);
+ encoderAudioCapsFilter = {};
+ }
+
+ encoderAudioSink = {};
+ encoderVideoSink = {};
+}
+
void QGstreamerMediaCapture::setAudioInput(QPlatformAudioInput *input)
{
if (gstAudioInput == input)
return;
- gstPipeline.beginConfig();
if (gstAudioInput) {
- gstAudioInput->gstElement().setStateSync(GST_STATE_NULL);
- gstPipeline.remove(gstAudioInput->gstElement());
- gstAudioInput->setPipeline({});
- gstAudioTee.setStateSync(GST_STATE_NULL);
+ unlinkTeeFromPad(gstAudioTee, encoderAudioSink);
+
if (gstAudioOutput) {
- gstAudioOutputPad.unlinkPeer();
- gstAudioTee.releaseRequestPad(gstAudioOutputPad);
- gstAudioOutputPad = {};
+ unlinkTeeFromPad(gstAudioTee, gstAudioOutput->gstElement().staticPad("sink"));
gstAudioOutput->gstElement().setStateSync(GST_STATE_NULL);
gstPipeline.remove(gstAudioOutput->gstElement());
}
+
+ gstAudioInput->gstElement().setStateSync(GST_STATE_NULL);
+ gstPipeline.remove(gstAudioInput->gstElement());
+ gstAudioTee.setStateSync(GST_STATE_NULL);
gstPipeline.remove(gstAudioTee);
gstAudioTee = {};
}
gstAudioInput = static_cast<QGstreamerAudioInput *>(input);
if (gstAudioInput) {
- gstAudioInput->setPipeline(gstPipeline);
Q_ASSERT(gstAudioTee.isNull());
gstAudioTee = QGstElement("tee", "audiotee");
gstAudioTee.set("allow-not-linked", true);
@@ -202,12 +325,15 @@ void QGstreamerMediaCapture::setAudioInput(QPlatformAudioInput *input)
if (gstAudioOutput) {
gstPipeline.add(gstAudioOutput->gstElement());
- gstAudioOutputPad = gstAudioTee.getRequestPad("src_%u");
- gstAudioOutputPad.link(gstAudioOutput->gstElement().staticPad("sink"));
+ gstAudioOutput->gstElement().setState(GST_STATE_PLAYING);
+ linkTeeToPad(gstAudioTee, gstAudioOutput->gstElement().staticPad("sink"));
}
- }
- gstPipeline.endConfig();
+ gstAudioTee.setState(GST_STATE_PLAYING);
+ gstAudioInput->gstElement().setStateSync(GST_STATE_PLAYING);
+
+ linkTeeToPad(gstAudioTee, encoderAudioSink);
+ }
}
void QGstreamerMediaCapture::setVideoPreview(QVideoSink *sink)
@@ -219,53 +345,20 @@ void QGstreamerMediaCapture::setAudioOutput(QPlatformAudioOutput *output)
{
if (gstAudioOutput == output)
return;
- gstPipeline.beginConfig();
- if (gstAudioOutput) {
+ if (gstAudioOutput && gstAudioInput) {
+ // If audio input is set, the output is in the pipeline
+ unlinkTeeFromPad(gstAudioTee, gstAudioOutput->gstElement().staticPad("sink"));
gstAudioOutput->gstElement().setStateSync(GST_STATE_NULL);
- if (!gstAudioTee.isNull()) {
- gstAudioOutputPad.unlinkPeer();
- gstAudioTee.releaseRequestPad(gstAudioOutputPad);
- gstAudioOutputPad = {};
- gstPipeline.remove(gstAudioOutput->gstElement());
- }
- gstAudioOutput->setPipeline({});
+ gstPipeline.remove(gstAudioOutput->gstElement());
}
gstAudioOutput = static_cast<QGstreamerAudioOutput *>(output);
- if (gstAudioOutput) {
- gstAudioOutput->setPipeline(gstPipeline);
- if (!gstAudioTee.isNull()) {
- gstPipeline.add(gstAudioOutput->gstElement());
- gstAudioOutputPad = gstAudioTee.getRequestPad("src_%u");
- gstAudioOutputPad.link(gstAudioOutput->gstElement().staticPad("sink"));
- }
+ if (gstAudioOutput && gstAudioInput) {
+ gstPipeline.add(gstAudioOutput->gstElement());
+ gstAudioOutput->gstElement().setState(GST_STATE_PLAYING);
+ linkTeeToPad(gstAudioTee, gstAudioOutput->gstElement().staticPad("sink"));
}
-
- gstPipeline.endConfig();
-}
-
-
-QGstPad QGstreamerMediaCapture::getAudioPad() const
-{
- return gstAudioTee.isNull() ? QGstPad() : gstAudioTee.getRequestPad("src_%u");
-}
-
-QGstPad QGstreamerMediaCapture::getVideoPad() const
-{
- return gstVideoTee.isNull() ? QGstPad() : gstVideoTee.getRequestPad("src_%u");
-}
-
-void QGstreamerMediaCapture::releaseAudioPad(const QGstPad &pad) const
-{
- if (!pad.isNull())
- gstAudioTee.releaseRequestPad(pad);
-}
-
-void QGstreamerMediaCapture::releaseVideoPad(const QGstPad &pad) const
-{
- if (!pad.isNull())
- gstVideoTee.releaseRequestPad(pad);
}
QGstreamerVideoSink *QGstreamerMediaCapture::gstreamerVideoSink() const
diff --git a/src/multimedia/platform/gstreamer/mediacapture/qgstreamermediacapture_p.h b/src/multimedia/platform/gstreamer/mediacapture/qgstreamermediacapture_p.h
index 542f82531..ed75a27d1 100644
--- a/src/multimedia/platform/gstreamer/mediacapture/qgstreamermediacapture_p.h
+++ b/src/multimedia/platform/gstreamer/mediacapture/qgstreamermediacapture_p.h
@@ -57,6 +57,8 @@
#include <private/qgst_p.h>
#include <private/qgstpipeline_p.h>
+#include <qtimer.h>
+
QT_BEGIN_NAMESPACE
class QGstreamerCamera;
@@ -85,14 +87,13 @@ public:
void setMediaEncoder(QPlatformMediaEncoder *encoder) override;
void setAudioInput(QPlatformAudioInput *input) override;
+ QGstreamerAudioInput *audioInput() { return gstAudioInput; }
void setVideoPreview(QVideoSink *sink) override;
void setAudioOutput(QPlatformAudioOutput *output) override;
- QGstPad getAudioPad() const;
- QGstPad getVideoPad() const;
- void releaseAudioPad(const QGstPad &pad) const;
- void releaseVideoPad(const QGstPad &pad) const;
+ void linkEncoder(QGstPad audioSink, QGstPad videoSink);
+ void unlinkEncoder();
QGstPipeline pipeline() const { return gstPipeline; }
@@ -102,14 +103,20 @@ private:
friend QGstreamerMediaEncoder;
// Gst elements
QGstPipeline gstPipeline;
+ QTimer heartbeat;
QGstreamerAudioInput *gstAudioInput = nullptr;
QGstreamerCamera *gstCamera = nullptr;
QGstElement gstAudioTee;
QGstElement gstVideoTee;
+ QGstElement encoderVideoCapsFilter;
+ QGstElement encoderAudioCapsFilter;
+
+ QGstPad encoderAudioSink;
+ QGstPad encoderVideoSink;
+ QGstPad imageCaptureSink;
- QGstPad gstAudioOutputPad;
QGstreamerAudioOutput *gstAudioOutput = nullptr;
QGstreamerVideoOutput *gstVideoOutput = nullptr;
diff --git a/src/multimedia/platform/gstreamer/mediacapture/qgstreamermediaencoder.cpp b/src/multimedia/platform/gstreamer/mediacapture/qgstreamermediaencoder.cpp
index 7fba45341..54caca0d6 100644
--- a/src/multimedia/platform/gstreamer/mediacapture/qgstreamermediaencoder.cpp
+++ b/src/multimedia/platform/gstreamer/mediacapture/qgstreamermediaencoder.cpp
@@ -42,6 +42,7 @@
#include "private/qgstreamerformatinfo_p.h"
#include "private/qgstpipeline_p.h"
#include "private/qgstreamermessage_p.h"
+#include "private/qplatformcamera_p.h"
#include "qaudiodevice.h"
#include "qmediastoragelocation_p.h"
@@ -123,6 +124,7 @@ bool QGstreamerMediaEncoder::processBusMessage(const QGstreamerMessage &message)
error(QMediaRecorder::ResourceError, QString::fromUtf8(err->message));
g_error_free(err);
g_free(debug);
+ finalize();
}
if (GST_MESSAGE_SRC(gm) == gstEncoder.object()) {
@@ -283,6 +285,14 @@ void QGstreamerMediaEncoder::record(QMediaEncoderSettings &settings)
if (!m_session || state() != QMediaRecorder::StoppedState)
return;
+ const auto hasVideo = m_session->camera() && m_session->camera()->isActive();
+ const auto hasAudio = m_session->audioInput() != nullptr;
+
+ if (!hasVideo && !hasAudio) {
+ error(QMediaRecorder::ResourceError, QMediaRecorder::tr("No camera or audio input"));
+ return;
+ }
+
const auto audioOnly = settings.videoCodec() == QMediaFormat::VideoCodec::Unspecified;
auto primaryLocation = audioOnly ? QStandardPaths::MusicLocation : QStandardPaths::MoviesLocation;
@@ -294,10 +304,6 @@ void QGstreamerMediaEncoder::record(QMediaEncoderSettings &settings)
Q_ASSERT(!actualSink.isEmpty());
- gstPipeline.dumpGraph("before-recording");
-
- gstPipeline.beginConfig();
-
gstEncoder = QGstElement("encodebin", "encodebin");
auto *encodingProfile = createEncodingProfile(settings);
g_object_set (gstEncoder.object(), "profile", encodingProfile, nullptr);
@@ -305,30 +311,33 @@ void QGstreamerMediaEncoder::record(QMediaEncoderSettings &settings)
gstFileSink = QGstElement("filesink", "filesink");
gstFileSink.set("location", QFile::encodeName(actualSink.toLocalFile()).constData());
+ gstFileSink.set("async", false);
- gstPipeline.add(gstEncoder, gstFileSink);
- gstEncoder.link(gstFileSink);
-
- gstEncoder.setState(GST_STATE_PAUSED);
- gstFileSink.setState(GST_STATE_PAUSED);
+ QGstPad audioSink = {};
+ QGstPad videoSink = {};
- audioSrcPad = m_session->getAudioPad();
- if (!audioSrcPad.isNull()) {
- QGstPad audioPad = gstEncoder.getRequestPad("audio_%u");
- audioPauseControl.installOn(audioPad);
- audioSrcPad.link(audioPad);
+ if (hasAudio) {
+ audioSink = gstEncoder.getRequestPad("audio_%u");
+ if (audioSink.isNull())
+ qWarning() << "Unsupported audio codec";
+ else
+ audioPauseControl.installOn(audioSink);
}
- if (settings.videoCodec() != QMediaFormat::VideoCodec::Unspecified) {
- videoSrcPad = m_session->getVideoPad();
- if (!videoSrcPad.isNull()) {
- QGstPad videoPad = gstEncoder.getRequestPad("video_%u");
- videoPauseControl.installOn(videoPad);
- videoSrcPad.link(videoPad);
- }
+ if (hasVideo) {
+ videoSink = gstEncoder.getRequestPad("video_%u");
+ if (videoSink.isNull())
+ qWarning() << "Unsupported video codec";
+ else
+ videoPauseControl.installOn(videoSink);
}
- gstPipeline.endConfig();
+ gstPipeline.add(gstEncoder, gstFileSink);
+ gstEncoder.link(gstFileSink);
+ m_session->linkEncoder(audioSink, videoSink);
+
+ gstEncoder.syncStateWithParent();
+ gstFileSink.syncStateWithParent();
m_duration.start();
heartbeat.start();
@@ -344,8 +353,6 @@ void QGstreamerMediaEncoder::pause()
return;
heartbeat.stop();
gstPipeline.dumpGraph("before-pause");
- gstEncoder.setState(GST_STATE_PAUSED);
-
stateChanged(QMediaRecorder::PausedState);
}
@@ -355,7 +362,6 @@ void QGstreamerMediaEncoder::resume()
if (!m_session || state() != QMediaRecorder::PausedState)
return;
heartbeat.start();
- gstEncoder.setState(GST_STATE_PLAYING);
stateChanged(QMediaRecorder::RecordingState);
}
@@ -365,46 +371,29 @@ void QGstreamerMediaEncoder::stop()
return;
qCDebug(qLcMediaEncoder) << "stop";
- gstPipeline.beginConfig();
-
- if (!audioSrcPad.isNull()) {
- audioSrcPad.unlinkPeer();
- m_session->releaseAudioPad(audioSrcPad);
- audioSrcPad = {};
- }
- if (!videoSrcPad.isNull()) {
- videoSrcPad.unlinkPeer();
- m_session->releaseVideoPad(videoSrcPad);
- videoSrcPad = {};
- }
-
- gstPipeline.endConfig();
+ m_session->unlinkEncoder();
//with live sources it's necessary to send EOS even to pipeline
//before going to STOPPED state
qCDebug(qLcMediaEncoder) << ">>>>>>>>>>>>> sending EOS";
-
gstEncoder.sendEos();
- stateChanged(QMediaRecorder::StoppedState);
}
void QGstreamerMediaEncoder::finalize()
{
if (!m_session || gstEncoder.isNull())
return;
- qCDebug(qLcMediaEncoder) << "finalize";
+ qCDebug(qLcMediaEncoder) << "finalize";
heartbeat.stop();
- // The filesink can only be used once, replace it with a new one
- gstPipeline.beginConfig();
gstEncoder.setState(GST_STATE_NULL);
gstFileSink.setState(GST_STATE_NULL);
gstPipeline.remove(gstEncoder);
gstPipeline.remove(gstFileSink);
gstFileSink = {};
gstEncoder = {};
- gstPipeline.endConfig();
+ stateChanged(QMediaRecorder::StoppedState);
}
void QGstreamerMediaEncoder::setMetaData(const QMediaMetaData &metaData)
diff --git a/src/multimedia/platform/gstreamer/mediacapture/qgstreamermediaencoder_p.h b/src/multimedia/platform/gstreamer/mediacapture/qgstreamermediaencoder_p.h
index 7570a939b..2c9b480ce 100644
--- a/src/multimedia/platform/gstreamer/mediacapture/qgstreamermediaencoder_p.h
+++ b/src/multimedia/platform/gstreamer/mediacapture/qgstreamermediaencoder_p.h
@@ -86,6 +86,7 @@ public:
void setCaptureSession(QPlatformMediaCaptureSession *session);
+ QGstElement getEncoder() { return gstEncoder; }
private:
bool processBusMessage(const QGstreamerMessage& message) override;
public:
@@ -117,9 +118,6 @@ private:
QGstPipeline gstPipeline;
QGstBin gstEncoder;
QGstElement gstFileSink;
-
- QGstPad audioSrcPad;
- QGstPad videoSrcPad;
};
QT_END_NAMESPACE