summaryrefslogtreecommitdiffstats
path: root/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp')
-rw-r--r--src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp1024
1 files changed, 0 insertions, 1024 deletions
diff --git a/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp b/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp
deleted file mode 100644
index 8d87bb3..0000000
--- a/src/plugins/gstreamer/mediacapture/qgstreamercapturesession.cpp
+++ /dev/null
@@ -1,1024 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
-** All rights reserved.
-** Contact: Nokia Corporation (qt-info@nokia.com)
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL$
-** GNU Lesser General Public License Usage
-** This file may be used under the terms of the GNU Lesser General Public
-** License version 2.1 as published by the Free Software Foundation and
-** appearing in the file LICENSE.LGPL included in the packaging of this
-** file. Please review the following information to ensure the GNU Lesser
-** General Public License version 2.1 requirements will be met:
-** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
-**
-** In addition, as a special exception, Nokia gives you certain additional
-** rights. These rights are described in the Nokia Qt LGPL Exception
-** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
-**
-** GNU General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU General
-** Public License version 3.0 as published by the Free Software Foundation
-** and appearing in the file LICENSE.GPL included in the packaging of this
-** file. Please review the following information to ensure the GNU General
-** Public License version 3.0 requirements will be met:
-** http://www.gnu.org/copyleft/gpl.html.
-**
-** Other Usage
-** Alternatively, this file may be used in accordance with the terms and
-** conditions contained in a signed written agreement between you and Nokia.
-**
-**
-**
-**
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#include "qgstreamercapturesession.h"
-#include "qgstreamerrecordercontrol.h"
-#include "qgstreamermediacontainercontrol.h"
-#include "qgstreamervideorendererinterface.h"
-#include "qgstreameraudioencode.h"
-#include "qgstreamervideoencode.h"
-#include "qgstreamerimageencode.h"
-#include <qmediarecorder.h>
-#include <private/qgstreamerbushelper_p.h>
-
-#include <gst/gsttagsetter.h>
-#include <gst/gstversion.h>
-
-#include <QtCore/qdebug.h>
-#include <QtCore/qurl.h>
-#include <QtCore/qset.h>
-#include <QCoreApplication>
-#include <QtCore/qmetaobject.h>
-#include <QtCore/qfile.h>
-
-#include <QtGui/qimage.h>
-
-#define gstRef(element) { gst_object_ref(GST_OBJECT(element)); gst_object_sink(GST_OBJECT(element)); }
-#define gstUnref(element) { if (element) { gst_object_unref(GST_OBJECT(element)); element = 0; } }
-
-QGstreamerCaptureSession::QGstreamerCaptureSession(QGstreamerCaptureSession::CaptureMode captureMode, QObject *parent)
- :QObject(parent),
- m_state(StoppedState),
- m_pendingState(StoppedState),
- m_waitingForEos(false),
- m_pipelineMode(EmptyPipeline),
- m_captureMode(captureMode),
- m_audioInputFactory(0),
- m_audioPreviewFactory(0),
- m_videoInputFactory(0),
- m_viewfinder(0),
- m_viewfinderInterface(0),
- m_audioSrc(0),
- m_audioTee(0),
- m_audioPreviewQueue(0),
- m_audioPreview(0),
- m_audioVolume(0),
- m_muted(false),
- m_videoSrc(0),
- m_videoTee(0),
- m_videoPreviewQueue(0),
- m_videoPreview(0),
- m_imageCaptureBin(0),
- m_encodeBin(0),
- m_passImage(false),
- m_passPrerollImage(false)
-{
- m_pipeline = gst_pipeline_new("media-capture-pipeline");
- gstRef(m_pipeline);
-
- m_bus = gst_element_get_bus(m_pipeline);
- m_busHelper = new QGstreamerBusHelper(m_bus, this);
- m_busHelper->installMessageFilter(this);
-
- m_audioEncodeControl = new QGstreamerAudioEncode(this);
- m_videoEncodeControl = new QGstreamerVideoEncode(this);
- m_imageEncodeControl = new QGstreamerImageEncode(this);
- m_recorderControl = new QGstreamerRecorderControl(this);
- m_mediaContainerControl = new QGstreamerMediaContainerControl(this);
-
- setState(StoppedState);
-}
-
-QGstreamerCaptureSession::~QGstreamerCaptureSession()
-{
- setState(StoppedState);
- gst_element_set_state(m_pipeline, GST_STATE_NULL);
- gst_object_unref(GST_OBJECT(m_pipeline));
-}
-
-void QGstreamerCaptureSession::setCaptureMode(CaptureMode mode)
-{
- m_captureMode = mode;
-}
-
-GstElement *QGstreamerCaptureSession::buildEncodeBin()
-{
- GstElement *encodeBin = gst_bin_new("encode-bin");
-
- GstElement *muxer = gst_element_factory_make( m_mediaContainerControl->formatElementName().constData(), "muxer");
- if (!muxer) {
- qWarning() << "Could not create a media muxer element:" << m_mediaContainerControl->formatElementName();
- gst_object_unref(encodeBin);
- return 0;
- }
-
- GstElement *fileSink = gst_element_factory_make("filesink", "filesink");
- g_object_set(G_OBJECT(fileSink), "location", m_sink.toString().toLocal8Bit().constData(), NULL);
- gst_bin_add_many(GST_BIN(encodeBin), muxer, fileSink, NULL);
-
- if (!gst_element_link(muxer, fileSink)) {
- gst_object_unref(encodeBin);
- return 0;
- }
-
- if (m_captureMode & Audio) {
- GstElement *audioConvert = gst_element_factory_make("audioconvert", "audioconvert");
- GstElement *audioQueue = gst_element_factory_make("queue", "audio-encode-queue");
- m_audioVolume = gst_element_factory_make("volume", "volume");
- gst_bin_add_many(GST_BIN(encodeBin), audioConvert, audioQueue, m_audioVolume, NULL);
-
- GstElement *audioEncoder = m_audioEncodeControl->createEncoder();
- if (!audioEncoder) {
- gst_object_unref(encodeBin);
- qWarning() << "Could not create an audio encoder element:" << m_audioEncodeControl->audioSettings().codec();
- return 0;
- }
-
- gst_bin_add(GST_BIN(encodeBin), audioEncoder);
-
- if (!gst_element_link_many(audioConvert, audioQueue, m_audioVolume, audioEncoder, muxer, NULL)) {
- gst_object_unref(encodeBin);
- return 0;
- }
-
- g_object_set(G_OBJECT(m_audioVolume), "volume", (m_muted ? 0.0 : 1.0), NULL);
-
- // add ghostpads
- GstPad *pad = gst_element_get_static_pad(audioConvert, "sink");
- gst_element_add_pad(GST_ELEMENT(encodeBin), gst_ghost_pad_new("audiosink", pad));
- gst_object_unref(GST_OBJECT(pad));
- }
-
- if (m_captureMode & Video) {
- GstElement *videoQueue = gst_element_factory_make("queue", "video-encode-queue");
- GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-encoder");
- GstElement *videoscale = gst_element_factory_make("videoscale","videoscale-encoder");
- gst_bin_add_many(GST_BIN(encodeBin), videoQueue, colorspace, videoscale, NULL);
-
- GstElement *videoEncoder = m_videoEncodeControl->createEncoder();
- if (!videoEncoder) {
- gst_object_unref(encodeBin);
- qWarning() << "Could not create a video encoder element:" << m_videoEncodeControl->videoSettings().codec();
- return 0;
- }
-
- gst_bin_add(GST_BIN(encodeBin), videoEncoder);
-
- if (!gst_element_link_many(videoQueue, colorspace, videoscale, videoEncoder, muxer, NULL)) {
- gst_object_unref(encodeBin);
- return 0;
- }
-
- // add ghostpads
- GstPad *pad = gst_element_get_static_pad(videoQueue, "sink");
- gst_element_add_pad(GST_ELEMENT(encodeBin), gst_ghost_pad_new("videosink", pad));
- gst_object_unref(GST_OBJECT(pad));
- }
-
- return encodeBin;
-}
-
-GstElement *QGstreamerCaptureSession::buildAudioSrc()
-{
- GstElement *audioSrc = 0;
- if (m_audioInputFactory)
- audioSrc = m_audioInputFactory->buildElement();
- else {
- audioSrc = gst_element_factory_make("pulsesrc", "audio_src");
- QString elementName = "alsasrc";
- QString device;
-
- if (m_captureDevice.startsWith("alsa:")) {
- device = m_captureDevice.mid(QString("alsa:").length());
- } else if (m_captureDevice.startsWith("oss:")) {
- elementName = "osssrc";
- device = m_captureDevice.mid(QString("oss:").length());
- } else if (m_captureDevice.startsWith("pulseaudio:")) {
- elementName = "pulsesrc";
- } else {
- elementName = "autoaudiosrc";
- }
-
- audioSrc = gst_element_factory_make(elementName.toAscii().constData(), "audio_src");
- if (audioSrc && !device.isEmpty())
- g_object_set(G_OBJECT(audioSrc), "device", device.toLocal8Bit().constData(), NULL);
- }
-
- if (!audioSrc) {
- emit error(int(QMediaRecorder::ResourceError), tr("Could not create an audio source element"));
- audioSrc = gst_element_factory_make("fakesrc", NULL);
- }
-
- return audioSrc;
-}
-
-GstElement *QGstreamerCaptureSession::buildAudioPreview()
-{
- GstElement *previewElement = 0;
-
- if (m_audioPreviewFactory) {
- previewElement = m_audioPreviewFactory->buildElement();
- } else {
-
-
-#if 1
- previewElement = gst_element_factory_make("fakesink", "audio-preview");
-#else
- GstElement *bin = gst_bin_new("audio-preview-bin");
- GstElement *visual = gst_element_factory_make("libvisual_lv_scope", "audio-preview");
- GstElement *sink = gst_element_factory_make("ximagesink", NULL);
- gst_bin_add_many(GST_BIN(bin), visual, sink, NULL);
- gst_element_link_many(visual,sink, NULL);
-
-
- // add ghostpads
- GstPad *pad = gst_element_get_static_pad(visual, "sink");
- Q_ASSERT(pad);
- gst_element_add_pad(GST_ELEMENT(bin), gst_ghost_pad_new("audiosink", pad));
- gst_object_unref(GST_OBJECT(pad));
-
- previewElement = bin;
-#endif
- }
-
- return previewElement;
-}
-
-GstElement *QGstreamerCaptureSession::buildVideoSrc()
-{
- GstElement *videoSrc = 0;
- if (m_videoInputFactory) {
- videoSrc = m_videoInputFactory->buildElement();
- } else {
- videoSrc = gst_element_factory_make("videotestsrc", "video_test_src");
- //videoSrc = gst_element_factory_make("v4l2src", "video_test_src");
- }
-
- return videoSrc;
-}
-
-GstElement *QGstreamerCaptureSession::buildVideoPreview()
-{
- GstElement *previewElement = 0;
-
- if (m_viewfinderInterface) {
- GstElement *bin = gst_bin_new("video-preview-bin");
- GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-preview");
- GstElement *capsFilter = gst_element_factory_make("capsfilter", "capsfilter-video-preview");
- GstElement *preview = m_viewfinderInterface->videoSink();
-
- gst_bin_add_many(GST_BIN(bin), colorspace, capsFilter, preview, NULL);
- gst_element_link(colorspace,capsFilter);
- gst_element_link(capsFilter,preview);
-
- QSize resolution;
- qreal frameRate = 0;
-
- if (m_captureMode & Video) {
- QVideoEncoderSettings videoSettings = m_videoEncodeControl->videoSettings();
- resolution = videoSettings.resolution();
- frameRate = videoSettings.frameRate();
- } else if (m_captureMode & Image) {
- resolution = m_imageEncodeControl->imageSettings().resolution();
- }
-
- if (!resolution.isEmpty() || frameRate > 0.001) {
- GstCaps *caps = gst_caps_new_empty();
- QStringList structureTypes;
- structureTypes << "video/x-raw-yuv" << "video/x-raw-rgb";
-
- foreach(const QString &structureType, structureTypes) {
- GstStructure *structure = gst_structure_new(structureType.toAscii().constData(), NULL);
-
- if (!resolution.isEmpty()) {
- gst_structure_set(structure, "width", G_TYPE_INT, resolution.width(), NULL);
- gst_structure_set(structure, "height", G_TYPE_INT, resolution.height(), NULL);
- }
-
- if (frameRate > 0.001) {
- QPair<int,int> rate = m_videoEncodeControl->rateAsRational();
-
- //qDebug() << "frame rate:" << num << denum;
-
- gst_structure_set(structure, "framerate", GST_TYPE_FRACTION, rate.first, rate.second, NULL);
- }
-
- gst_caps_append_structure(caps,structure);
- }
-
- //qDebug() << "set video preview caps filter:" << gst_caps_to_string(caps);
-
- g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL);
-
- }
-
- // add ghostpads
- GstPad *pad = gst_element_get_static_pad(colorspace, "sink");
- Q_ASSERT(pad);
- gst_element_add_pad(GST_ELEMENT(bin), gst_ghost_pad_new("videosink", pad));
- gst_object_unref(GST_OBJECT(pad));
-
- previewElement = bin;
- } else {
-#if 1
- previewElement = gst_element_factory_make("fakesink", "video-preview");
-#else
- GstElement *bin = gst_bin_new("video-preview-bin");
- GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-preview");
- GstElement *preview = gst_element_factory_make("ximagesink", "video-preview");
- gst_bin_add_many(GST_BIN(bin), colorspace, preview, NULL);
- gst_element_link(colorspace,preview);
-
- // add ghostpads
- GstPad *pad = gst_element_get_static_pad(colorspace, "sink");
- Q_ASSERT(pad);
- gst_element_add_pad(GST_ELEMENT(bin), gst_ghost_pad_new("videosink", pad));
- gst_object_unref(GST_OBJECT(pad));
-
- previewElement = bin;
-#endif
- }
-
- return previewElement;
-}
-
-
-static gboolean passImageFilter(GstElement *element,
- GstBuffer *buffer,
- void *appdata)
-{
- Q_UNUSED(element);
- Q_UNUSED(buffer);
-
- QGstreamerCaptureSession *session = (QGstreamerCaptureSession *)appdata;
- if (session->m_passImage || session->m_passPrerollImage) {
- session->m_passImage = false;
-
- if (session->m_passPrerollImage) {
- session->m_passPrerollImage = false;
- return TRUE;
- }
- session->m_passPrerollImage = false;
-
- QImage img;
-
- GstCaps *caps = gst_buffer_get_caps(buffer);
- if (caps) {
- GstStructure *structure = gst_caps_get_structure (caps, 0);
- gint width = 0;
- gint height = 0;
-
- if (structure &&
- gst_structure_get_int(structure, "width", &width) &&
- gst_structure_get_int(structure, "height", &height) &&
- width > 0 && height > 0) {
- if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) {
- guint32 fourcc = 0;
- gst_structure_get_fourcc(structure, "format", &fourcc);
-
- if (fourcc == GST_MAKE_FOURCC('I','4','2','0')) {
- img = QImage(width/2, height/2, QImage::Format_RGB32);
-
- const uchar *data = (const uchar *)buffer->data;
-
- for (int y=0; y<height; y+=2) {
- const uchar *yLine = data + y*width;
- const uchar *uLine = data + width*height + y*width/4;
- const uchar *vLine = data + width*height*5/4 + y*width/4;
-
- for (int x=0; x<width; x+=2) {
- const qreal Y = 1.164*(yLine[x]-16);
- const int U = uLine[x/2]-128;
- const int V = vLine[x/2]-128;
-
- int b = qBound(0, int(Y + 2.018*U), 255);
- int g = qBound(0, int(Y - 0.813*V - 0.391*U), 255);
- int r = qBound(0, int(Y + 1.596*V), 255);
-
- img.setPixel(x/2,y/2,qRgb(r,g,b));
- }
- }
- }
-
- } else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) {
- QImage::Format format = QImage::Format_Invalid;
- int bpp = 0;
- gst_structure_get_int(structure, "bpp", &bpp);
-
- if (bpp == 24)
- format = QImage::Format_RGB888;
- else if (bpp == 32)
- format = QImage::Format_RGB32;
-
- if (format != QImage::Format_Invalid) {
- img = QImage((const uchar *)buffer->data,
- width,
- height,
- format);
- img.bits(); //detach
- }
- }
- }
- gst_caps_unref(caps);
- }
-
- static int exposedSignalIndex = session->metaObject()->indexOfSignal("imageExposed(int)");
- session->metaObject()->method(exposedSignalIndex).invoke(session,
- Qt::QueuedConnection,
- Q_ARG(int,session->m_imageRequestId));
-
- static int capturedSignalIndex = session->metaObject()->indexOfSignal("imageCaptured(int,QImage)");
- session->metaObject()->method(capturedSignalIndex).invoke(session,
- Qt::QueuedConnection,
- Q_ARG(int,session->m_imageRequestId),
- Q_ARG(QImage,img));
-
- return TRUE;
- } else {
- return FALSE;
- }
-}
-
-static gboolean saveImageFilter(GstElement *element,
- GstBuffer *buffer,
- GstPad *pad,
- void *appdata)
-{
- Q_UNUSED(element);
- Q_UNUSED(pad);
- QGstreamerCaptureSession *session = (QGstreamerCaptureSession *)appdata;
-
- QString fileName = session->m_imageFileName;
-
- if (!fileName.isEmpty()) {
- QFile f(fileName);
- if (f.open(QFile::WriteOnly)) {
- f.write((const char *)buffer->data, buffer->size);
- f.close();
-
- static int signalIndex = session->metaObject()->indexOfSignal("imageSaved(int,QString)");
- session->metaObject()->method(signalIndex).invoke(session,
- Qt::QueuedConnection,
- Q_ARG(int,session->m_imageRequestId),
- Q_ARG(QString,fileName));
- }
- }
-
- return TRUE;
-}
-
-GstElement *QGstreamerCaptureSession::buildImageCapture()
-{
- GstElement *bin = gst_bin_new("image-capture-bin");
- GstElement *queue = gst_element_factory_make("queue", "queue-image-capture");
- GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-image-capture");
- GstElement *encoder = gst_element_factory_make("jpegenc", "image-encoder");
- GstElement *sink = gst_element_factory_make("fakesink","sink-image-capture");
-
- GstPad *pad = gst_element_get_static_pad(queue, "src");
- Q_ASSERT(pad);
- gst_pad_add_buffer_probe(pad, G_CALLBACK(passImageFilter), this);
-
- g_object_set(G_OBJECT(sink), "signal-handoffs", TRUE, NULL);
- g_signal_connect(G_OBJECT(sink), "handoff",
- G_CALLBACK(saveImageFilter), this);
-
- gst_bin_add_many(GST_BIN(bin), queue, colorspace, encoder, sink, NULL);
- gst_element_link_many(queue, colorspace, encoder, sink, NULL);
-
- // add ghostpads
- pad = gst_element_get_static_pad(queue, "sink");
- Q_ASSERT(pad);
- gst_element_add_pad(GST_ELEMENT(bin), gst_ghost_pad_new("imagesink", pad));
- gst_object_unref(GST_OBJECT(pad));
-
- m_passImage = false;
- m_passPrerollImage = true;
- m_imageFileName = QString();
-
- return bin;
-}
-
-void QGstreamerCaptureSession::captureImage(int requestId, const QString &fileName)
-{
- m_imageRequestId = requestId;
- m_imageFileName = fileName;
- m_passImage = true;
-}
-
-
-#define REMOVE_ELEMENT(element) { if (element) {gst_bin_remove(GST_BIN(m_pipeline), element); element = 0;} }
-
-bool QGstreamerCaptureSession::rebuildGraph(QGstreamerCaptureSession::PipelineMode newMode)
-{
- REMOVE_ELEMENT(m_audioSrc);
- REMOVE_ELEMENT(m_audioPreview);
- REMOVE_ELEMENT(m_audioPreviewQueue);
- REMOVE_ELEMENT(m_audioTee);
- REMOVE_ELEMENT(m_videoSrc);
- REMOVE_ELEMENT(m_videoPreview);
- REMOVE_ELEMENT(m_videoPreviewQueue);
- REMOVE_ELEMENT(m_videoTee);
- REMOVE_ELEMENT(m_encodeBin);
- REMOVE_ELEMENT(m_imageCaptureBin);
- m_audioVolume = 0;
-
- bool ok = true;
-
- switch (newMode) {
- case EmptyPipeline:
- break;
- case PreviewPipeline:
- if (m_captureMode & Audio) {
- m_audioSrc = buildAudioSrc();
- m_audioPreview = buildAudioPreview();
-
- ok &= m_audioSrc && m_audioPreview;
-
- if (ok) {
- gst_bin_add_many(GST_BIN(m_pipeline), m_audioSrc, m_audioPreview, NULL);
- ok &= gst_element_link(m_audioSrc, m_audioPreview);
- }
- }
- if (m_captureMode & Video || m_captureMode & Image) {
- m_videoSrc = buildVideoSrc();
- m_videoTee = gst_element_factory_make("tee", "video-preview-tee");
- m_videoPreviewQueue = gst_element_factory_make("queue", "video-preview-queue");
- m_videoPreview = buildVideoPreview();
- m_imageCaptureBin = buildImageCapture();
-
- ok &= m_videoSrc && m_videoTee && m_videoPreviewQueue && m_videoPreview && m_imageCaptureBin;
-
- if (ok) {
- gst_bin_add_many(GST_BIN(m_pipeline), m_videoSrc, m_videoTee,
- m_videoPreviewQueue, m_videoPreview,
- m_imageCaptureBin, NULL);
-
- ok &= gst_element_link(m_videoSrc, m_videoTee);
- ok &= gst_element_link(m_videoTee, m_videoPreviewQueue);
- ok &= gst_element_link(m_videoPreviewQueue, m_videoPreview);
- ok &= gst_element_link(m_videoTee, m_imageCaptureBin);
- }
- }
- break;
- case RecordingPipeline:
- m_encodeBin = buildEncodeBin();
- gst_bin_add(GST_BIN(m_pipeline), m_encodeBin);
-
- if (m_captureMode & Audio) {
- m_audioSrc = buildAudioSrc();
- ok &= m_audioSrc != 0;
-
- gst_bin_add(GST_BIN(m_pipeline), m_audioSrc);
- ok &= gst_element_link(m_audioSrc, m_encodeBin);
- }
-
- if (m_captureMode & Video) {
- m_videoSrc = buildVideoSrc();
- ok &= m_videoSrc != 0;
-
- gst_bin_add(GST_BIN(m_pipeline), m_videoSrc);
- ok &= gst_element_link(m_videoSrc, m_encodeBin);
- }
-
- if (!m_metaData.isEmpty())
- setMetaData(m_metaData);
-
- break;
- case PreviewAndRecordingPipeline:
- m_encodeBin = buildEncodeBin();
- if (m_encodeBin)
- gst_bin_add(GST_BIN(m_pipeline), m_encodeBin);
-
- ok &= m_encodeBin != 0;
-
- if (ok && m_captureMode & Audio) {
- m_audioSrc = buildAudioSrc();
- m_audioPreview = buildAudioPreview();
- m_audioTee = gst_element_factory_make("tee", NULL);
- m_audioPreviewQueue = gst_element_factory_make("queue", NULL);
-
- ok &= m_audioSrc && m_audioPreview && m_audioTee && m_audioPreviewQueue;
-
- if (ok) {
- gst_bin_add_many(GST_BIN(m_pipeline), m_audioSrc, m_audioTee,
- m_audioPreviewQueue, m_audioPreview, NULL);
- ok &= gst_element_link(m_audioSrc, m_audioTee);
- ok &= gst_element_link(m_audioTee, m_audioPreviewQueue);
- ok &= gst_element_link(m_audioPreviewQueue, m_audioPreview);
- ok &= gst_element_link(m_audioTee, m_encodeBin);
- }
- }
-
- if (ok && (m_captureMode & Video || m_captureMode & Image)) {
- m_videoSrc = buildVideoSrc();
- m_videoPreview = buildVideoPreview();
- m_videoTee = gst_element_factory_make("tee", NULL);
- m_videoPreviewQueue = gst_element_factory_make("queue", NULL);
-
- ok &= m_videoSrc && m_videoPreview && m_videoTee && m_videoPreviewQueue;
-
- if (ok) {
- gst_bin_add_many(GST_BIN(m_pipeline), m_videoSrc, m_videoTee,
- m_videoPreviewQueue, m_videoPreview, NULL);
- ok &= gst_element_link(m_videoSrc, m_videoTee);
- ok &= gst_element_link(m_videoTee, m_videoPreviewQueue);
- ok &= gst_element_link(m_videoPreviewQueue, m_videoPreview);
- }
-
- if (ok && (m_captureMode & Video))
- ok &= gst_element_link(m_videoTee, m_encodeBin);
- }
-
- if (!m_metaData.isEmpty())
- setMetaData(m_metaData);
-
-
- break;
- }
-
- if (!ok) {
- emit error(int(QMediaRecorder::FormatError),tr("Failed to build media capture pipeline."));
- }
-
- dumpGraph( QString("rebuild_graph_%1_%2").arg(m_pipelineMode).arg(newMode) );
- if (m_encodeBin) {
- QString fileName = QString("rebuild_graph_encode_%1_%2").arg(m_pipelineMode).arg(newMode);
-#if !(GST_DISABLE_GST_DEBUG) && (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 19)
- _gst_debug_bin_to_dot_file(GST_BIN(m_encodeBin), GST_DEBUG_GRAPH_SHOW_ALL, fileName.toAscii());
-#endif
- }
-
- if (ok) {
- m_pipelineMode = newMode;
- } else {
- m_pipelineMode = EmptyPipeline;
-
- REMOVE_ELEMENT(m_audioSrc);
- REMOVE_ELEMENT(m_audioPreview);
- REMOVE_ELEMENT(m_audioPreviewQueue);
- REMOVE_ELEMENT(m_audioTee);
- REMOVE_ELEMENT(m_videoSrc);
- REMOVE_ELEMENT(m_videoPreview);
- REMOVE_ELEMENT(m_videoPreviewQueue);
- REMOVE_ELEMENT(m_videoTee);
- REMOVE_ELEMENT(m_encodeBin);
- }
-
- return ok;
-}
-
-void QGstreamerCaptureSession::dumpGraph(const QString &fileName)
-{
-#if !(GST_DISABLE_GST_DEBUG) && (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 19)
- _gst_debug_bin_to_dot_file(GST_BIN(m_pipeline),
- GstDebugGraphDetails(/*GST_DEBUG_GRAPH_SHOW_ALL |*/ GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES),
- fileName.toAscii());
-#endif
-}
-
-QUrl QGstreamerCaptureSession::outputLocation() const
-{
- return m_sink;
-}
-
-bool QGstreamerCaptureSession::setOutputLocation(const QUrl& sink)
-{
- m_sink = sink;
- return true;
-}
-
-void QGstreamerCaptureSession::setAudioInput(QGstreamerElementFactory *audioInput)
-{
- m_audioInputFactory = audioInput;
-}
-
-void QGstreamerCaptureSession::setAudioPreview(QGstreamerElementFactory *audioPreview)
-{
- m_audioPreviewFactory = audioPreview;
-}
-
-void QGstreamerCaptureSession::setVideoInput(QGstreamerVideoInput *videoInput)
-{
- m_videoInputFactory = videoInput;
-}
-
-void QGstreamerCaptureSession::setVideoPreview(QObject *viewfinder)
-{
- m_viewfinderInterface = qobject_cast<QGstreamerVideoRendererInterface*>(viewfinder);
- if (!m_viewfinderInterface)
- viewfinder = 0;
-
- if (m_viewfinder != viewfinder) {
- bool oldReady = isReady();
-
- if (m_viewfinder) {
- disconnect(m_viewfinder, SIGNAL(sinkChanged()),
- this, SIGNAL(viewfinderChanged()));
- disconnect(m_viewfinder, SIGNAL(readyChanged(bool)),
- this, SIGNAL(readyChanged(bool)));
-
- m_busHelper->removeMessageFilter(m_viewfinder);
- }
-
- m_viewfinder = viewfinder;
- //m_viewfinderHasChanged = true;
-
- if (m_viewfinder) {
- connect(m_viewfinder, SIGNAL(sinkChanged()),
- this, SIGNAL(viewfinderChanged()));
- connect(m_viewfinder, SIGNAL(readyChanged(bool)),
- this, SIGNAL(readyChanged(bool)));
-
- m_busHelper->installMessageFilter(m_viewfinder);
- }
-
- emit viewfinderChanged();
- if (oldReady != isReady())
- emit readyChanged(isReady());
- }
-}
-
-bool QGstreamerCaptureSession::isReady() const
-{
- return m_viewfinderInterface != 0 && m_viewfinderInterface->isReady();
-}
-
-QGstreamerCaptureSession::State QGstreamerCaptureSession::state() const
-{
- return m_state;
-}
-
-void QGstreamerCaptureSession::waitForStopped()
-{
- GstState state = GST_STATE_PLAYING;
- gst_element_get_state(m_pipeline, &state, 0, 0);
-
- while (state != GST_STATE_NULL) {
- qApp->processEvents();
- gst_element_get_state(m_pipeline, &state, 0, 0);
- }
-}
-
-void QGstreamerCaptureSession::setState(QGstreamerCaptureSession::State newState)
-{
- if (newState == m_pendingState && !m_waitingForEos)
- return;
-
- m_pendingState = newState;
-
- PipelineMode newMode = EmptyPipeline;
-
- switch (newState) {
- case PausedState:
- case RecordingState:
- newMode = PreviewAndRecordingPipeline;
- break;
- case PreviewState:
- newMode = PreviewPipeline;
- break;
- case StoppedState:
- newMode = EmptyPipeline;
- break;
- }
-
- if (newMode != m_pipelineMode) {
- if (m_pipelineMode == PreviewAndRecordingPipeline) {
- if (!m_waitingForEos) {
- m_waitingForEos = true;
- //qDebug() << "Waiting for EOS";
- //with live sources it's necessary to send EOS even to pipeline
- //before going to STOPPED state
- gst_element_send_event(m_pipeline, gst_event_new_eos());
- // Unless gstreamer is in GST_STATE_PLAYING our EOS message will not be received.
- gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
-
- return;
- } else {
- m_waitingForEos = false;
- //qDebug() << "EOS received";
- }
- }
-
- //select suitable default codecs/containers, if necessary
- m_recorderControl->applySettings();
-
- gst_element_set_state(m_pipeline, GST_STATE_NULL);
-
- //It would be better to do this async. but
- //gstreamer doesn't notify about pipeline went to NULL state
- waitForStopped();
- if (!rebuildGraph(newMode)) {
- m_pendingState = StoppedState;
- m_state = StoppedState;
- emit stateChanged(StoppedState);
-
- return;
- }
- }
-
- switch (newState) {
- case PausedState:
- gst_element_set_state(m_pipeline, GST_STATE_PAUSED);
- break;
- case RecordingState:
- case PreviewState:
- gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
- break;
- case StoppedState:
- gst_element_set_state(m_pipeline, GST_STATE_NULL);
- }
-
- //we have to do it here, since gstreamer will not emit bus messages any more
- if (newState == StoppedState) {
- m_state = StoppedState;
- emit stateChanged(StoppedState);
- }
-}
-
-
-qint64 QGstreamerCaptureSession::duration() const
-{
- GstFormat format = GST_FORMAT_TIME;
- gint64 duration = 0;
-
- if ( m_encodeBin && gst_element_query_position(m_encodeBin, &format, &duration))
- return duration / 1000000;
- else
- return 0;
-}
-
-void QGstreamerCaptureSession::setCaptureDevice(const QString &deviceName)
-{
- m_captureDevice = deviceName;
-}
-
-void QGstreamerCaptureSession::setMetaData(const QMap<QByteArray, QVariant> &data)
-{
- //qDebug() << "QGstreamerCaptureSession::setMetaData" << data;
- m_metaData = data;
-
- if (m_encodeBin) {
- GstIterator *elements = gst_bin_iterate_all_by_interface(GST_BIN(m_encodeBin), GST_TYPE_TAG_SETTER);
- GstElement *element = 0;
- while (gst_iterator_next(elements, (void**)&element) == GST_ITERATOR_OK) {
- //qDebug() << "found element with tag setter interface:" << gst_element_get_name(element);
- QMapIterator<QByteArray, QVariant> it(data);
- while (it.hasNext()) {
- it.next();
- const QString tagName = it.key();
- const QVariant tagValue = it.value();
-
-
- switch(tagValue.type()) {
- case QVariant::String:
- gst_tag_setter_add_tags(GST_TAG_SETTER(element),
- GST_TAG_MERGE_REPLACE_ALL,
- tagName.toUtf8().constData(),
- tagValue.toString().toUtf8().constData(),
- NULL);
- break;
- case QVariant::Int:
- case QVariant::LongLong:
- gst_tag_setter_add_tags(GST_TAG_SETTER(element),
- GST_TAG_MERGE_REPLACE_ALL,
- tagName.toUtf8().constData(),
- tagValue.toInt(),
- NULL);
- break;
- case QVariant::Double:
- gst_tag_setter_add_tags(GST_TAG_SETTER(element),
- GST_TAG_MERGE_REPLACE_ALL,
- tagName.toUtf8().constData(),
- tagValue.toDouble(),
- NULL);
- break;
- default:
- break;
- }
-
- }
-
- }
- }
-}
-
-bool QGstreamerCaptureSession::processBusMessage(const QGstreamerMessage &message)
-{
- GstMessage* gm = message.rawMessage();
-
- if (gm) {
- if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ERROR) {
- GError *err;
- gchar *debug;
- gst_message_parse_error (gm, &err, &debug);
- emit error(int(QMediaRecorder::ResourceError),QString::fromUtf8(err->message));
- g_error_free (err);
- g_free (debug);
- }
-
- if (GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_pipeline)) {
- switch (GST_MESSAGE_TYPE(gm)) {
- case GST_MESSAGE_DURATION:
- break;
-
- case GST_MESSAGE_EOS:
- if (m_waitingForEos)
- setState(m_pendingState);
- break;
-
- case GST_MESSAGE_STATE_CHANGED:
- {
-
- GstState oldState;
- GstState newState;
- GstState pending;
-
- gst_message_parse_state_changed(gm, &oldState, &newState, &pending);
-
- QStringList states;
- states << "GST_STATE_VOID_PENDING" << "GST_STATE_NULL" << "GST_STATE_READY" << "GST_STATE_PAUSED" << "GST_STATE_PLAYING";
-
- /*
- qDebug() << QString("state changed: old: %1 new: %2 pending: %3") \
- .arg(states[oldState]) \
- .arg(states[newState]) \
- .arg(states[pending]);
-
- #define ENUM_NAME(c,e,v) (c::staticMetaObject.enumerator(c::staticMetaObject.indexOfEnumerator(e)).valueToKey((v)))
-
- qDebug() << "Current session state:" << ENUM_NAME(QGstreamerCaptureSession,"State",m_state);
- qDebug() << "Pending session state:" << ENUM_NAME(QGstreamerCaptureSession,"State",m_pendingState);
- */
-
- switch (newState) {
- case GST_STATE_VOID_PENDING:
- case GST_STATE_NULL:
- case GST_STATE_READY:
- if (m_state != StoppedState && m_pendingState == StoppedState) {
- emit stateChanged(m_state = StoppedState);
- dumpGraph("stopped");
- }
- break;
- case GST_STATE_PAUSED:
- if (m_state != PausedState && m_pendingState == PausedState)
- emit stateChanged(m_state = PausedState);
- dumpGraph("paused");
-
- if (m_pipelineMode == RecordingPipeline && !m_metaData.isEmpty())
- setMetaData(m_metaData);
- break;
- case GST_STATE_PLAYING:
- {
- if ((m_pendingState == PreviewState || m_pendingState == RecordingState) &&
- m_state != m_pendingState)
- {
- m_state = m_pendingState;
- emit stateChanged(m_state);
- }
-
- if (m_pipelineMode == PreviewPipeline)
- dumpGraph("preview");
- else
- dumpGraph("recording");
- }
- break;
- }
- }
- break;
- default:
- break;
- }
- //qDebug() << "New session state:" << ENUM_NAME(QGstreamerCaptureSession,"State",m_state);
- }
- }
- return false;
-}
-
-void QGstreamerCaptureSession::setMuted(bool muted)
-{
- if (m_muted != muted) {
- m_muted = muted;
- if (m_audioVolume)
- g_object_set(G_OBJECT(m_audioVolume), "volume", (m_muted ? 0.0 : 1.0), NULL);
- emit mutedChanged(muted);
- }
-}