diff options
Diffstat (limited to 'src/plugins/multimedia/gstreamer')
56 files changed, 13342 insertions, 0 deletions
diff --git a/src/plugins/multimedia/gstreamer/CMakeLists.txt b/src/plugins/multimedia/gstreamer/CMakeLists.txt new file mode 100644 index 000000000..1ef1f9a36 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/CMakeLists.txt @@ -0,0 +1,73 @@ +# Copyright (C) 2022 The Qt Company Ltd. +# SPDX-License-Identifier: BSD-3-Clause + +qt_find_package(EGL) + +qt_internal_add_module(QGstreamerMediaPluginPrivate + STATIC + INTERNAL_MODULE + SOURCES + audio/qgstreameraudiodevice.cpp audio/qgstreameraudiodevice_p.h + audio/qgstreameraudiodecoder.cpp audio/qgstreameraudiodecoder_p.h + common/qglist_helper_p.h + common/qgst.cpp common/qgst_p.h + common/qgst_debug.cpp common/qgst_debug_p.h + common/qgst_handle_types_p.h + common/qgstappsource.cpp common/qgstappsource_p.h + common/qgstreameraudioinput.cpp common/qgstreameraudioinput_p.h + common/qgstreameraudiooutput.cpp common/qgstreameraudiooutput_p.h + common/qgstreamerbufferprobe.cpp common/qgstreamerbufferprobe_p.h + common/qgstreamermetadata.cpp common/qgstreamermetadata_p.h + common/qgstreamermessage_p.h + common/qgstreamermediaplayer.cpp common/qgstreamermediaplayer_p.h + common/qgstreamervideooutput.cpp common/qgstreamervideooutput_p.h + common/qgstreamervideooverlay.cpp common/qgstreamervideooverlay_p.h + common/qgstreamervideosink.cpp common/qgstreamervideosink_p.h + common/qgstpipeline.cpp common/qgstpipeline_p.h + common/qgstutils.cpp common/qgstutils_p.h + common/qgstvideobuffer.cpp common/qgstvideobuffer_p.h + common/qgstvideorenderersink.cpp common/qgstvideorenderersink_p.h + common/qgstsubtitlesink.cpp common/qgstsubtitlesink_p.h + qgstreamerintegration.cpp qgstreamerintegration_p.h + qgstreamerformatinfo.cpp qgstreamerformatinfo_p.h + qgstreamervideodevices.cpp qgstreamervideodevices_p.h + mediacapture/qgstreamercamera.cpp mediacapture/qgstreamercamera_p.h + mediacapture/qgstreamerimagecapture.cpp mediacapture/qgstreamerimagecapture_p.h + mediacapture/qgstreamermediacapture.cpp mediacapture/qgstreamermediacapture_p.h + mediacapture/qgstreamermediaencoder.cpp mediacapture/qgstreamermediaencoder_p.h + NO_UNITY_BUILD_SOURCES + # Conflicts with macros defined in X11.h, and Xlib.h + common/qgstvideobuffer.cpp + common/qgstreamervideosink.cpp + NO_GENERATE_CPP_EXPORTS + DEFINES + GLIB_VERSION_MIN_REQUIRED=GLIB_VERSION_2_26 + PUBLIC_LIBRARIES + Qt::MultimediaPrivate + Qt::CorePrivate + GStreamer::GStreamer + GStreamer::App +) + +qt_internal_extend_target(QGstreamerMediaPluginPrivate CONDITION QT_FEATURE_gstreamer_photography + PUBLIC_LIBRARIES + GStreamer::Photography +) + +qt_internal_extend_target(QGstreamerMediaPluginPrivate CONDITION QT_FEATURE_gstreamer_gl + PUBLIC_LIBRARIES + GStreamer::Gl + LIBRARIES + EGL::EGL +) + +qt_internal_add_plugin(QGstreamerMediaPlugin + OUTPUT_NAME gstreamermediaplugin + PLUGIN_TYPE multimedia + SOURCES + qgstreamerplugin.cpp + gstreamer.json + LIBRARIES + Qt::QGstreamerMediaPluginPrivate + Qt::MultimediaPrivate +) diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder.cpp b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder.cpp new file mode 100644 index 000000000..280b43cdb --- /dev/null +++ b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder.cpp @@ -0,0 +1,531 @@ +// Copyright (C) 2020 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only +//#define DEBUG_DECODER + +#include <audio/qgstreameraudiodecoder_p.h> + +#include <common/qgstreamermessage_p.h> +#include <common/qgst_debug_p.h> +#include <common/qgstutils_p.h> + +#include <gst/gstvalue.h> +#include <gst/base/gstbasesrc.h> + +#include <QtCore/qdatetime.h> +#include <QtCore/qdebug.h> +#include <QtCore/qsize.h> +#include <QtCore/qtimer.h> +#include <QtCore/qdebug.h> +#include <QtCore/qdir.h> +#include <QtCore/qstandardpaths.h> +#include <QtCore/qurl.h> +#include <QtCore/qloggingcategory.h> + +QT_BEGIN_NAMESPACE + +static Q_LOGGING_CATEGORY(qLcGstreamerAudioDecoder, "qt.multimedia.gstreameraudiodecoder"); + +typedef enum { + GST_PLAY_FLAG_VIDEO = 0x00000001, + GST_PLAY_FLAG_AUDIO = 0x00000002, + GST_PLAY_FLAG_TEXT = 0x00000004, + GST_PLAY_FLAG_VIS = 0x00000008, + GST_PLAY_FLAG_SOFT_VOLUME = 0x00000010, + GST_PLAY_FLAG_NATIVE_AUDIO = 0x00000020, + GST_PLAY_FLAG_NATIVE_VIDEO = 0x00000040, + GST_PLAY_FLAG_DOWNLOAD = 0x00000080, + GST_PLAY_FLAG_BUFFERING = 0x000000100 +} GstPlayFlags; + + +QMaybe<QPlatformAudioDecoder *> QGstreamerAudioDecoder::create(QAudioDecoder *parent) +{ + static const auto error = qGstErrorMessageIfElementsNotAvailable("audioconvert", "playbin"); + if (error) + return *error; + + return new QGstreamerAudioDecoder(parent); +} + +QGstreamerAudioDecoder::QGstreamerAudioDecoder(QAudioDecoder *parent) + : QPlatformAudioDecoder(parent), + m_playbin{ + QGstPipeline::adopt(GST_PIPELINE_CAST( + QGstElement::createFromFactory("playbin", "playbin").element())), + }, + m_audioConvert{ + QGstElement::createFromFactory("audioconvert", "audioconvert"), + } +{ + // Sort out messages + m_playbin.installMessageFilter(this); + + // Set the rest of the pipeline up + setAudioFlags(true); + + m_outputBin = QGstBin::create("audio-output-bin"); + m_outputBin.add(m_audioConvert); + + // add ghostpad + m_outputBin.addGhostPad(m_audioConvert, "sink"); + + g_object_set(m_playbin.object(), "audio-sink", m_outputBin.element(), NULL); + + m_deepNotifySourceConnection = m_playbin.connect( + "deep-notify::source", (GCallback)&configureAppSrcElement, (gpointer)this); + + // Set volume to 100% + gdouble volume = 1.0; + m_playbin.set("volume", volume); +} + +QGstreamerAudioDecoder::~QGstreamerAudioDecoder() +{ + stop(); + + m_playbin.removeMessageFilter(this); + + delete m_appSrc; +} + +void QGstreamerAudioDecoder::configureAppSrcElement([[maybe_unused]] GObject *object, GObject *orig, + [[maybe_unused]] GParamSpec *pspec, + QGstreamerAudioDecoder *self) +{ + // In case we switch from appsrc to not + if (!self->m_appSrc) + return; + + QGstElementHandle appsrc; + g_object_get(orig, "source", &appsrc, NULL); + + auto *qAppSrc = self->m_appSrc; + qAppSrc->setExternalAppSrc(QGstAppSrc{ + qGstSafeCast<GstAppSrc>(appsrc.get()), + QGstAppSrc::NeedsRef, // CHECK: can we `release()`? + }); + qAppSrc->setup(self->mDevice); +} + +bool QGstreamerAudioDecoder::processBusMessage(const QGstreamerMessage &message) +{ + qCDebug(qLcGstreamerAudioDecoder) << "received bus message:" << message; + + GstMessage *gm = message.message(); + + switch (message.type()) { + case GST_MESSAGE_DURATION: { + updateDuration(); + return false; + } + + case GST_MESSAGE_ERROR: { + qCDebug(qLcGstreamerAudioDecoder) << " error" << QCompactGstMessageAdaptor(message); + + QUniqueGErrorHandle err; + QGString debug; + gst_message_parse_error(gm, &err, &debug); + + if (message.source() == m_playbin) { + if (err.get()->domain == GST_STREAM_ERROR + && err.get()->code == GST_STREAM_ERROR_CODEC_NOT_FOUND) + processInvalidMedia(QAudioDecoder::FormatError, + tr("Cannot play stream of type: <unknown>")); + else + processInvalidMedia(QAudioDecoder::ResourceError, + QString::fromUtf8(err.get()->message)); + } else { + QAudioDecoder::Error qerror = QAudioDecoder::ResourceError; + if (err.get()->domain == GST_STREAM_ERROR) { + switch (err.get()->code) { + case GST_STREAM_ERROR_DECRYPT: + case GST_STREAM_ERROR_DECRYPT_NOKEY: + qerror = QAudioDecoder::AccessDeniedError; + break; + case GST_STREAM_ERROR_FORMAT: + case GST_STREAM_ERROR_DEMUX: + case GST_STREAM_ERROR_DECODE: + case GST_STREAM_ERROR_WRONG_TYPE: + case GST_STREAM_ERROR_TYPE_NOT_FOUND: + case GST_STREAM_ERROR_CODEC_NOT_FOUND: + qerror = QAudioDecoder::FormatError; + break; + default: + break; + } + } else if (err.get()->domain == GST_CORE_ERROR) { + switch (err.get()->code) { + case GST_CORE_ERROR_MISSING_PLUGIN: + qerror = QAudioDecoder::FormatError; + break; + default: + break; + } + } + + processInvalidMedia(qerror, QString::fromUtf8(err.get()->message)); + } + break; + } + + default: + if (message.source() == m_playbin) + return handlePlaybinMessage(message); + } + + return false; +} + +bool QGstreamerAudioDecoder::handlePlaybinMessage(const QGstreamerMessage &message) +{ + GstMessage *gm = message.message(); + + switch (GST_MESSAGE_TYPE(gm)) { + case GST_MESSAGE_STATE_CHANGED: { + GstState oldState; + GstState newState; + GstState pending; + + gst_message_parse_state_changed(gm, &oldState, &newState, &pending); + + bool isDecoding = false; + switch (newState) { + case GST_STATE_VOID_PENDING: + case GST_STATE_NULL: + case GST_STATE_READY: + break; + case GST_STATE_PLAYING: + isDecoding = true; + break; + case GST_STATE_PAUSED: + isDecoding = true; + + // gstreamer doesn't give a reliable indication the duration + // information is ready, GST_MESSAGE_DURATION is not sent by most elements + // the duration is queried up to 5 times with increasing delay + m_durationQueries = 5; + updateDuration(); + break; + } + + setIsDecoding(isDecoding); + break; + }; + + case GST_MESSAGE_EOS: + m_playbin.setState(GST_STATE_NULL); + finished(); + break; + + case GST_MESSAGE_ERROR: + Q_UNREACHABLE_RETURN(false); // handled in processBusMessage + + case GST_MESSAGE_WARNING: + qCWarning(qLcGstreamerAudioDecoder) << "Warning:" << QCompactGstMessageAdaptor(message); + break; + + case GST_MESSAGE_INFO: { + if (qLcGstreamerAudioDecoder().isDebugEnabled()) + qCWarning(qLcGstreamerAudioDecoder) << "Info:" << QCompactGstMessageAdaptor(message); + break; + } + default: + break; + } + + return false; +} + +QUrl QGstreamerAudioDecoder::source() const +{ + return mSource; +} + +void QGstreamerAudioDecoder::setSource(const QUrl &fileName) +{ + stop(); + mDevice = nullptr; + delete m_appSrc; + m_appSrc = nullptr; + + bool isSignalRequired = (mSource != fileName); + mSource = fileName; + if (isSignalRequired) + sourceChanged(); +} + +QIODevice *QGstreamerAudioDecoder::sourceDevice() const +{ + return mDevice; +} + +void QGstreamerAudioDecoder::setSourceDevice(QIODevice *device) +{ + stop(); + mSource.clear(); + bool isSignalRequired = (mDevice != device); + mDevice = device; + if (isSignalRequired) + sourceChanged(); +} + +void QGstreamerAudioDecoder::start() +{ + addAppSink(); + + if (!mSource.isEmpty()) { + m_playbin.set("uri", mSource.toEncoded().constData()); + } else if (mDevice) { + // make sure we can read from device + if (!mDevice->isOpen() || !mDevice->isReadable()) { + processInvalidMedia(QAudioDecoder::ResourceError, QLatin1String("Unable to read from specified device")); + return; + } + + if (!m_appSrc) { + auto maybeAppSrc = QGstAppSource::create(this); + if (maybeAppSrc) { + m_appSrc = maybeAppSrc.value(); + } else { + processInvalidMedia(QAudioDecoder::ResourceError, maybeAppSrc.error()); + return; + } + } + + m_playbin.set("uri", "appsrc://"); + } else { + return; + } + + // Set audio format + if (m_appSink) { + if (mFormat.isValid()) { + setAudioFlags(false); + auto caps = QGstUtils::capsForAudioFormat(mFormat); + m_appSink.setCaps(caps); + } else { + // We want whatever the native audio format is + setAudioFlags(true); + m_appSink.setCaps({}); + } + } + + if (m_playbin.setState(GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) { + qWarning() << "GStreamer; Unable to start decoding process"; + m_playbin.dumpGraph("failed"); + return; + } +} + +void QGstreamerAudioDecoder::stop() +{ + m_playbin.setState(GST_STATE_NULL); + m_currentSessionId += 1; + removeAppSink(); + + // GStreamer thread is stopped. Can safely access m_buffersAvailable + if (m_buffersAvailable != 0) { + m_buffersAvailable = 0; + bufferAvailableChanged(false); + } + + if (m_position != invalidPosition) { + m_position = invalidPosition; + positionChanged(m_position.count()); + } + + if (m_duration != invalidDuration) { + m_duration = invalidDuration; + durationChanged(m_duration.count()); + } + + setIsDecoding(false); +} + +QAudioFormat QGstreamerAudioDecoder::audioFormat() const +{ + return mFormat; +} + +void QGstreamerAudioDecoder::setAudioFormat(const QAudioFormat &format) +{ + if (mFormat != format) { + mFormat = format; + formatChanged(mFormat); + } +} + +QAudioBuffer QGstreamerAudioDecoder::read() +{ + using namespace std::chrono; + + QAudioBuffer audioBuffer; + + if (m_buffersAvailable == 0) + return audioBuffer; + + m_buffersAvailable -= 1; + + if (m_buffersAvailable == 0) + bufferAvailableChanged(false); + + QGstSampleHandle sample = m_appSink.pullSample(); + GstBuffer *buffer = gst_sample_get_buffer(sample.get()); + GstMapInfo mapInfo; + gst_buffer_map(buffer, &mapInfo, GST_MAP_READ); + const char *bufferData = (const char *)mapInfo.data; + int bufferSize = mapInfo.size; + QAudioFormat format = QGstUtils::audioFormatForSample(sample.get()); + + if (format.isValid()) { + // XXX At the moment we have to copy data from GstBuffer into QAudioBuffer. + // We could improve performance by implementing QAbstractAudioBuffer for GstBuffer. + nanoseconds position = getPositionFromBuffer(buffer); + audioBuffer = QAudioBuffer{ + QByteArray(bufferData, bufferSize), + format, + round<microseconds>(position).count(), + }; + milliseconds positionInMs = round<milliseconds>(position); + if (position != m_position) { + m_position = positionInMs; + positionChanged(m_position.count()); + } + } + gst_buffer_unmap(buffer, &mapInfo); + + return audioBuffer; +} + +qint64 QGstreamerAudioDecoder::position() const +{ + return m_position.count(); +} + +qint64 QGstreamerAudioDecoder::duration() const +{ + return m_duration.count(); +} + +void QGstreamerAudioDecoder::processInvalidMedia(QAudioDecoder::Error errorCode, const QString& errorString) +{ + stop(); + error(int(errorCode), errorString); +} + +GstFlowReturn QGstreamerAudioDecoder::newSample(GstAppSink *) +{ + // "Note that the preroll buffer will also be returned as the first buffer when calling + // gst_app_sink_pull_buffer()." + + QMetaObject::invokeMethod(this, [this, sessionId = m_currentSessionId] { + if (sessionId != m_currentSessionId) + return; // stop()ed before message is executed + + m_buffersAvailable += 1; + bufferAvailableChanged(true); + bufferReady(); + }); + + return GST_FLOW_OK; +} + +GstFlowReturn QGstreamerAudioDecoder::new_sample(GstAppSink *sink, gpointer user_data) +{ + QGstreamerAudioDecoder *decoder = reinterpret_cast<QGstreamerAudioDecoder *>(user_data); + qCDebug(qLcGstreamerAudioDecoder) << "QGstreamerAudioDecoder::new_sample"; + return decoder->newSample(sink); +} + +void QGstreamerAudioDecoder::setAudioFlags(bool wantNativeAudio) +{ + int flags = m_playbin.getInt("flags"); + // make sure not to use GST_PLAY_FLAG_NATIVE_AUDIO unless desired + // it prevents audio format conversion + flags &= ~(GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_TEXT | GST_PLAY_FLAG_VIS | GST_PLAY_FLAG_NATIVE_AUDIO); + flags |= GST_PLAY_FLAG_AUDIO; + if (wantNativeAudio) + flags |= GST_PLAY_FLAG_NATIVE_AUDIO; + m_playbin.set("flags", flags); +} + +void QGstreamerAudioDecoder::addAppSink() +{ + using namespace std::chrono_literals; + + if (m_appSink) + return; + + qCDebug(qLcGstreamerAudioDecoder) << "QGstreamerAudioDecoder::addAppSink"; + m_appSink = QGstAppSink::create("decoderAppSink"); + GstAppSinkCallbacks callbacks{}; + callbacks.new_sample = new_sample; + m_appSink.setCallbacks(callbacks, this, nullptr); + +#if GST_CHECK_VERSION(1, 24, 0) + static constexpr auto maxBufferTime = 500ms; + m_appSink.setMaxBufferTime(maxBufferTime); +#else + static constexpr int maxBuffers = 16; + m_appSink.setMaxBuffers(maxBuffers); +#endif + + static constexpr bool sync = false; + m_appSink.setSync(sync); + + QGstPipeline::modifyPipelineWhileNotRunning(m_playbin.getPipeline(), [&] { + m_outputBin.add(m_appSink); + qLinkGstElements(m_audioConvert, m_appSink); + }); +} + +void QGstreamerAudioDecoder::removeAppSink() +{ + if (!m_appSink) + return; + + qCDebug(qLcGstreamerAudioDecoder) << "QGstreamerAudioDecoder::removeAppSink"; + + QGstPipeline::modifyPipelineWhileNotRunning(m_playbin.getPipeline(), [&] { + qUnlinkGstElements(m_audioConvert, m_appSink); + m_outputBin.stopAndRemoveElements(m_appSink); + }); + m_appSink = {}; +} + +void QGstreamerAudioDecoder::updateDuration() +{ + std::optional<std::chrono::milliseconds> duration = m_playbin.durationInMs(); + if (!duration) + duration = invalidDuration; + + if (m_duration != duration) { + m_duration = *duration; + durationChanged(m_duration.count()); + } + + if (m_duration.count() > 0) + m_durationQueries = 0; + + if (m_durationQueries > 0) { + //increase delay between duration requests + int delay = 25 << (5 - m_durationQueries); + QTimer::singleShot(delay, this, &QGstreamerAudioDecoder::updateDuration); + m_durationQueries--; + } +} + +std::chrono::nanoseconds QGstreamerAudioDecoder::getPositionFromBuffer(GstBuffer *buffer) +{ + using namespace std::chrono; + using namespace std::chrono_literals; + nanoseconds position{ GST_BUFFER_TIMESTAMP(buffer) }; + if (position >= 0ns) + return position; + else + return invalidPosition; +} + +QT_END_NAMESPACE + +#include "moc_qgstreameraudiodecoder_p.cpp" diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder_p.h b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder_p.h new file mode 100644 index 000000000..d2d259dde --- /dev/null +++ b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodecoder_p.h @@ -0,0 +1,111 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTREAMERAUDIODECODERCONTROL_H +#define QGSTREAMERAUDIODECODERCONTROL_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <QtMultimedia/private/qmultimediautils_p.h> +#include <QtMultimedia/private/qplatformaudiodecoder_p.h> +#include <QtMultimedia/private/qtmultimediaglobal_p.h> +#include <QtMultimedia/qaudiodecoder.h> +#include <QtCore/qobject.h> +#include <QtCore/qmutex.h> +#include <QtCore/qurl.h> + +#include <common/qgst_p.h> +#include <common/qgstappsource_p.h> +#include <common/qgstpipeline_p.h> + +#include <gst/app/gstappsink.h> + +QT_BEGIN_NAMESPACE + +class QGstreamerMessage; + +class QGstreamerAudioDecoder final : public QPlatformAudioDecoder, public QGstreamerBusMessageFilter +{ + Q_OBJECT + +public: + static QMaybe<QPlatformAudioDecoder *> create(QAudioDecoder *parent); + virtual ~QGstreamerAudioDecoder(); + + QUrl source() const override; + void setSource(const QUrl &fileName) override; + + QIODevice *sourceDevice() const override; + void setSourceDevice(QIODevice *device) override; + + void start() override; + void stop() override; + + QAudioFormat audioFormat() const override; + void setAudioFormat(const QAudioFormat &format) override; + + QAudioBuffer read() override; + + qint64 position() const override; + qint64 duration() const override; + + // GStreamerBusMessageFilter interface + bool processBusMessage(const QGstreamerMessage &message) override; + +private slots: + void updateDuration(); + +private: + explicit QGstreamerAudioDecoder(QAudioDecoder *parent); + + static GstFlowReturn new_sample(GstAppSink *sink, gpointer user_data); + GstFlowReturn newSample(GstAppSink *sink); + + static void configureAppSrcElement(GObject *, GObject *, GParamSpec *, + QGstreamerAudioDecoder *_this); + + void setAudioFlags(bool wantNativeAudio); + void addAppSink(); + void removeAppSink(); + + bool handlePlaybinMessage(const QGstreamerMessage &); + + void processInvalidMedia(QAudioDecoder::Error errorCode, const QString &errorString); + static std::chrono::nanoseconds getPositionFromBuffer(GstBuffer *buffer); + + QGstPipeline m_playbin; + QGstBin m_outputBin; + QGstElement m_audioConvert; + QGstAppSink m_appSink; + QGstAppSource *m_appSrc = nullptr; + + QUrl mSource; + QIODevice *mDevice = nullptr; + QAudioFormat mFormat; + + int m_buffersAvailable = 0; + + static constexpr auto invalidDuration = std::chrono::milliseconds{ -1 }; + static constexpr auto invalidPosition = std::chrono::milliseconds{ -1 }; + std::chrono::milliseconds m_position{ invalidPosition }; + std::chrono::milliseconds m_duration{ invalidDuration }; + + int m_durationQueries = 0; + + qint32 m_currentSessionId{}; + + QGObjectHandlerScopedConnection m_deepNotifySourceConnection; +}; + +QT_END_NAMESPACE + +#endif // QGSTREAMERPLAYERSESSION_H diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice.cpp b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice.cpp new file mode 100644 index 000000000..b22e40118 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice.cpp @@ -0,0 +1,87 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include "qgstreameraudiodevice_p.h" + +#include <common/qgst_p.h> +#include <common/qgstutils_p.h> +#include <private/qplatformmediaintegration_p.h> + +QT_BEGIN_NAMESPACE + +QGStreamerAudioDeviceInfo::QGStreamerAudioDeviceInfo(GstDevice *d, const QByteArray &device, + QAudioDevice::Mode mode) + : QAudioDevicePrivate(device, mode), + gstDevice{ + d, + QGstDeviceHandle::NeedsRef, + } +{ + QGString name{ + gst_device_get_display_name(gstDevice.get()), + }; + description = name.toQString(); + + auto caps = QGstCaps(gst_device_get_caps(gstDevice.get()), QGstCaps::HasRef); + int size = caps.size(); + for (int i = 0; i < size; ++i) { + auto c = caps.at(i); + if (c.name() == "audio/x-raw") { + auto rate = c["rate"].toIntRange(); + if (rate) { + minimumSampleRate = rate->min; + maximumSampleRate = rate->max; + } + auto channels = c["channels"].toIntRange(); + if (channels) { + minimumChannelCount = channels->min; + maximumChannelCount = channels->max; + } + supportedSampleFormats = c["format"].getSampleFormats(); + } + } + + preferredFormat.setChannelCount(qBound(minimumChannelCount, 2, maximumChannelCount)); + preferredFormat.setSampleRate(qBound(minimumSampleRate, 48000, maximumSampleRate)); + QAudioFormat::SampleFormat f = QAudioFormat::Int16; + if (!supportedSampleFormats.contains(f)) + f = supportedSampleFormats.value(0, QAudioFormat::Unknown); + preferredFormat.setSampleFormat(f); +} + +QGStreamerCustomAudioDeviceInfo::QGStreamerCustomAudioDeviceInfo( + const QByteArray &gstreamerPipeline, QAudioDevice::Mode mode) + : QAudioDevicePrivate{ + gstreamerPipeline, + mode, + } +{ +} + +QAudioDevice qMakeCustomGStreamerAudioInput(const QByteArray &gstreamerPipeline) +{ + auto deviceInfo = std::make_unique<QGStreamerCustomAudioDeviceInfo>(gstreamerPipeline, + QAudioDevice::Mode::Input); + + return deviceInfo.release()->create(); +} + +QAudioDevice qMakeCustomGStreamerAudioOutput(const QByteArray &gstreamerPipeline) +{ + auto deviceInfo = std::make_unique<QGStreamerCustomAudioDeviceInfo>(gstreamerPipeline, + QAudioDevice::Mode::Output); + + return deviceInfo.release()->create(); +} + +bool isCustomAudioDevice(const QAudioDevicePrivate *device) +{ + return dynamic_cast<const QGStreamerCustomAudioDeviceInfo *>(device); +} + +bool isCustomAudioDevice(const QAudioDevice &device) +{ + return isCustomAudioDevice(device.handle()); +} + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice_p.h b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice_p.h new file mode 100644 index 000000000..403fd5e74 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/audio/qgstreameraudiodevice_p.h @@ -0,0 +1,55 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTREAMERAUDIODEVICEINFO_H +#define QGSTREAMERAUDIODEVICEINFO_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <QtCore/qbytearray.h> +#include <QtCore/qstringlist.h> +#include <QtCore/qlist.h> + +#include <QtMultimedia/qaudio.h> +#include <QtMultimedia/qaudiodevice.h> +#include <QtMultimedia/private/qaudiodevice_p.h> + +#include <QtQGstreamerMediaPlugin/private/qgst_handle_types_p.h> + +#include <gst/gst.h> + +QT_BEGIN_NAMESPACE + +class QGStreamerAudioDeviceInfo : public QAudioDevicePrivate +{ +public: + QGStreamerAudioDeviceInfo(GstDevice *gstDevice, const QByteArray &device, QAudioDevice::Mode mode); + + QGstDeviceHandle gstDevice; +}; + +class QGStreamerCustomAudioDeviceInfo : public QAudioDevicePrivate +{ +public: + QGStreamerCustomAudioDeviceInfo(const QByteArray &gstreamerPipeline, QAudioDevice::Mode mode); +}; + +bool isCustomAudioDevice(const QAudioDevicePrivate *device); +bool isCustomAudioDevice(const QAudioDevice &device); + +QAudioDevice qMakeCustomGStreamerAudioInput(const QByteArray &gstreamerPipeline); +QAudioDevice qMakeCustomGStreamerAudioOutput(const QByteArray &gstreamerPipeline); + +QT_END_NAMESPACE + +#endif + diff --git a/src/plugins/multimedia/gstreamer/common/qglist_helper_p.h b/src/plugins/multimedia/gstreamer/common/qglist_helper_p.h new file mode 100644 index 000000000..54108e1c3 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qglist_helper_p.h @@ -0,0 +1,82 @@ +// Copyright (C) 2024 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGLIST_HELPER_P_H +#define QGLIST_HELPER_P_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <QtCore/qtconfigmacros.h> + +#include <glib.h> +#include <iterator> + +QT_BEGIN_NAMESPACE + +namespace QGstUtils { + +template <typename ListType> +struct GListIterator +{ + explicit GListIterator(const GList *element = nullptr) : element(element) { } + + const ListType &operator*() const noexcept { return *operator->(); } + const ListType *operator->() const noexcept + { + return reinterpret_cast<const ListType *>(&element->data); + } + + GListIterator &operator++() noexcept + { + if (element) + element = element->next; + + return *this; + } + GListIterator operator++(int n) noexcept + { + for (int i = 0; i != n; ++i) + operator++(); + + return *this; + } + + bool operator==(const GListIterator &r) const noexcept { return element == r.element; } + bool operator!=(const GListIterator &r) const noexcept { return element != r.element; } + + using difference_type = std::ptrdiff_t; + using value_type = ListType; + using pointer = value_type *; + using reference = value_type &; + using iterator_category = std::input_iterator_tag; + + const GList *element = nullptr; +}; + +template <typename ListType> +struct GListRangeAdaptor +{ + static_assert(std::is_pointer_v<ListType>); + + explicit GListRangeAdaptor(const GList *list) : head(list) { } + + auto begin() { return GListIterator<ListType>(head); } + auto end() { return GListIterator<ListType>(nullptr); } + + const GList *head; +}; + +} // namespace QGstUtils + +QT_END_NAMESPACE + +#endif diff --git a/src/plugins/multimedia/gstreamer/common/qgst.cpp b/src/plugins/multimedia/gstreamer/common/qgst.cpp new file mode 100644 index 000000000..cb1f38495 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgst.cpp @@ -0,0 +1,1404 @@ +// Copyright (C) 2024 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include <common/qgst_p.h> +#include <common/qgst_debug_p.h> +#include <common/qgstpipeline_p.h> +#include <common/qgstreamermessage_p.h> + +#include <QtCore/qdebug.h> +#include <QtMultimedia/qcameradevice.h> + +#include <array> + +QT_BEGIN_NAMESPACE + +namespace { + +struct VideoFormat +{ + QVideoFrameFormat::PixelFormat pixelFormat; + GstVideoFormat gstFormat; +}; + +constexpr std::array<VideoFormat, 19> qt_videoFormatLookup{ { + { QVideoFrameFormat::Format_YUV420P, GST_VIDEO_FORMAT_I420 }, + { QVideoFrameFormat::Format_YUV422P, GST_VIDEO_FORMAT_Y42B }, + { QVideoFrameFormat::Format_YV12, GST_VIDEO_FORMAT_YV12 }, + { QVideoFrameFormat::Format_UYVY, GST_VIDEO_FORMAT_UYVY }, + { QVideoFrameFormat::Format_YUYV, GST_VIDEO_FORMAT_YUY2 }, + { QVideoFrameFormat::Format_NV12, GST_VIDEO_FORMAT_NV12 }, + { QVideoFrameFormat::Format_NV21, GST_VIDEO_FORMAT_NV21 }, + { QVideoFrameFormat::Format_AYUV, GST_VIDEO_FORMAT_AYUV }, + { QVideoFrameFormat::Format_Y8, GST_VIDEO_FORMAT_GRAY8 }, + { QVideoFrameFormat::Format_XRGB8888, GST_VIDEO_FORMAT_xRGB }, + { QVideoFrameFormat::Format_XBGR8888, GST_VIDEO_FORMAT_xBGR }, + { QVideoFrameFormat::Format_RGBX8888, GST_VIDEO_FORMAT_RGBx }, + { QVideoFrameFormat::Format_BGRX8888, GST_VIDEO_FORMAT_BGRx }, + { QVideoFrameFormat::Format_ARGB8888, GST_VIDEO_FORMAT_ARGB }, + { QVideoFrameFormat::Format_ABGR8888, GST_VIDEO_FORMAT_ABGR }, + { QVideoFrameFormat::Format_RGBA8888, GST_VIDEO_FORMAT_RGBA }, + { QVideoFrameFormat::Format_BGRA8888, GST_VIDEO_FORMAT_BGRA }, +#if Q_BYTE_ORDER == Q_LITTLE_ENDIAN + { QVideoFrameFormat::Format_Y16, GST_VIDEO_FORMAT_GRAY16_LE }, + { QVideoFrameFormat::Format_P010, GST_VIDEO_FORMAT_P010_10LE }, +#else + { QVideoFrameFormat::Format_Y16, GST_VIDEO_FORMAT_GRAY16_BE }, + { QVideoFrameFormat::Format_P010, GST_VIDEO_FORMAT_P010_10BE }, +#endif +} }; + +int indexOfVideoFormat(QVideoFrameFormat::PixelFormat format) +{ + for (size_t i = 0; i < qt_videoFormatLookup.size(); ++i) + if (qt_videoFormatLookup[i].pixelFormat == format) + return int(i); + + return -1; +} + +int indexOfVideoFormat(GstVideoFormat format) +{ + for (size_t i = 0; i < qt_videoFormatLookup.size(); ++i) + if (qt_videoFormatLookup[i].gstFormat == format) + return int(i); + + return -1; +} + +} // namespace + +// QGValue + +QGValue::QGValue(const GValue *v) : value(v) { } + +bool QGValue::isNull() const +{ + return !value; +} + +std::optional<bool> QGValue::toBool() const +{ + if (!G_VALUE_HOLDS_BOOLEAN(value)) + return std::nullopt; + return g_value_get_boolean(value); +} + +std::optional<int> QGValue::toInt() const +{ + if (!G_VALUE_HOLDS_INT(value)) + return std::nullopt; + return g_value_get_int(value); +} + +std::optional<int> QGValue::toInt64() const +{ + if (!G_VALUE_HOLDS_INT64(value)) + return std::nullopt; + return g_value_get_int64(value); +} + +const char *QGValue::toString() const +{ + return value ? g_value_get_string(value) : nullptr; +} + +std::optional<float> QGValue::getFraction() const +{ + if (!GST_VALUE_HOLDS_FRACTION(value)) + return std::nullopt; + return (float)gst_value_get_fraction_numerator(value) + / (float)gst_value_get_fraction_denominator(value); +} + +std::optional<QGRange<float>> QGValue::getFractionRange() const +{ + if (!GST_VALUE_HOLDS_FRACTION_RANGE(value)) + return std::nullopt; + QGValue min = QGValue{ gst_value_get_fraction_range_min(value) }; + QGValue max = QGValue{ gst_value_get_fraction_range_max(value) }; + return QGRange<float>{ *min.getFraction(), *max.getFraction() }; +} + +std::optional<QGRange<int>> QGValue::toIntRange() const +{ + if (!GST_VALUE_HOLDS_INT_RANGE(value)) + return std::nullopt; + return QGRange<int>{ gst_value_get_int_range_min(value), gst_value_get_int_range_max(value) }; +} + +QGstStructureView QGValue::toStructure() const +{ + if (!value || !GST_VALUE_HOLDS_STRUCTURE(value)) + return QGstStructureView(nullptr); + return QGstStructureView(gst_value_get_structure(value)); +} + +QGstCaps QGValue::toCaps() const +{ + if (!value || !GST_VALUE_HOLDS_CAPS(value)) + return {}; + return QGstCaps(gst_caps_copy(gst_value_get_caps(value)), QGstCaps::HasRef); +} + +bool QGValue::isList() const +{ + return value && GST_VALUE_HOLDS_LIST(value); +} + +int QGValue::listSize() const +{ + return gst_value_list_get_size(value); +} + +QGValue QGValue::at(int index) const +{ + return QGValue{ gst_value_list_get_value(value, index) }; +} + +// QGstStructureView + +QGstStructureView::QGstStructureView(const GstStructure *s) : structure(s) { } + +QGstStructureView::QGstStructureView(const QUniqueGstStructureHandle &handle) + : QGstStructureView{ handle.get() } +{ +} + +QUniqueGstStructureHandle QGstStructureView::clone() const +{ + return QUniqueGstStructureHandle{ gst_structure_copy(structure) }; +} + +bool QGstStructureView::isNull() const +{ + return !structure; +} + +QByteArrayView QGstStructureView::name() const +{ + return gst_structure_get_name(structure); +} + +QGValue QGstStructureView::operator[](const char *fieldname) const +{ + return QGValue{ gst_structure_get_value(structure, fieldname) }; +} + +QGstCaps QGstStructureView::caps() const +{ + return operator[]("caps").toCaps(); +} + +QGstTagListHandle QGstStructureView::tags() const +{ + QGValue tags = operator[]("tags"); + if (tags.isNull()) + return {}; + + QGstTagListHandle tagList; + gst_structure_get(structure, "tags", GST_TYPE_TAG_LIST, &tagList, nullptr); + return tagList; +} + +QSize QGstStructureView::resolution() const +{ + QSize size; + + int w, h; + if (structure && gst_structure_get_int(structure, "width", &w) + && gst_structure_get_int(structure, "height", &h)) { + size.rwidth() = w; + size.rheight() = h; + } + + return size; +} + +QVideoFrameFormat::PixelFormat QGstStructureView::pixelFormat() const +{ + QVideoFrameFormat::PixelFormat pixelFormat = QVideoFrameFormat::Format_Invalid; + + if (!structure) + return pixelFormat; + + if (gst_structure_has_name(structure, "video/x-raw")) { + const gchar *s = gst_structure_get_string(structure, "format"); + if (s) { + GstVideoFormat format = gst_video_format_from_string(s); + int index = indexOfVideoFormat(format); + + if (index != -1) + pixelFormat = qt_videoFormatLookup[index].pixelFormat; + } + } else if (gst_structure_has_name(structure, "image/jpeg")) { + pixelFormat = QVideoFrameFormat::Format_Jpeg; + } + + return pixelFormat; +} + +QGRange<float> QGstStructureView::frameRateRange() const +{ + float minRate = 0.; + float maxRate = 0.; + + if (!structure) + return { 0.f, 0.f }; + + auto extractFraction = [](const GValue *v) -> float { + return (float)gst_value_get_fraction_numerator(v) + / (float)gst_value_get_fraction_denominator(v); + }; + auto extractFrameRate = [&](const GValue *v) { + auto insert = [&](float min, float max) { + if (max > maxRate) + maxRate = max; + if (min < minRate) + minRate = min; + }; + + if (GST_VALUE_HOLDS_FRACTION(v)) { + float rate = extractFraction(v); + insert(rate, rate); + } else if (GST_VALUE_HOLDS_FRACTION_RANGE(v)) { + auto *min = gst_value_get_fraction_range_max(v); + auto *max = gst_value_get_fraction_range_max(v); + insert(extractFraction(min), extractFraction(max)); + } + }; + + const GValue *gstFrameRates = gst_structure_get_value(structure, "framerate"); + if (gstFrameRates) { + if (GST_VALUE_HOLDS_LIST(gstFrameRates)) { + guint nFrameRates = gst_value_list_get_size(gstFrameRates); + for (guint f = 0; f < nFrameRates; ++f) { + extractFrameRate(gst_value_list_get_value(gstFrameRates, f)); + } + } else { + extractFrameRate(gstFrameRates); + } + } else { + const GValue *min = gst_structure_get_value(structure, "min-framerate"); + const GValue *max = gst_structure_get_value(structure, "max-framerate"); + if (min && max) { + minRate = extractFraction(min); + maxRate = extractFraction(max); + } + } + + return { minRate, maxRate }; +} + +QGstreamerMessage QGstStructureView::getMessage() +{ + GstMessage *message = nullptr; + gst_structure_get(structure, "message", GST_TYPE_MESSAGE, &message, nullptr); + return QGstreamerMessage(message, QGstreamerMessage::HasRef); +} + +std::optional<Fraction> QGstStructureView::pixelAspectRatio() const +{ + gint numerator; + gint denominator; + if (gst_structure_get_fraction(structure, "pixel-aspect-ratio", &numerator, &denominator)) { + return Fraction{ + numerator, + denominator, + }; + } + + return std::nullopt; +} + +// QTBUG-125249: gstreamer tries "to keep the input height (because of interlacing)". Can we align +// the behavior between gstreamer and ffmpeg? +static QSize qCalculateFrameSizeGStreamer(QSize resolution, Fraction par) +{ + if (par.numerator == par.denominator || par.numerator < 1 || par.denominator < 1) + return resolution; + + return QSize{ + resolution.width() * par.numerator / par.denominator, + resolution.height(), + }; +} + +QSize QGstStructureView::nativeSize() const +{ + QSize size = resolution(); + if (!size.isValid()) { + qWarning() << Q_FUNC_INFO << "invalid resolution when querying nativeSize"; + return size; + } + + std::optional<Fraction> par = pixelAspectRatio(); + if (par) + size = qCalculateFrameSizeGStreamer(size, *par); + return size; +} + +// QGstCaps + +std::optional<std::pair<QVideoFrameFormat, GstVideoInfo>> QGstCaps::formatAndVideoInfo() const +{ + GstVideoInfo vidInfo; + + bool success = gst_video_info_from_caps(&vidInfo, get()); + if (!success) + return std::nullopt; + + int index = indexOfVideoFormat(vidInfo.finfo->format); + if (index == -1) + return std::nullopt; + + QVideoFrameFormat format(QSize(vidInfo.width, vidInfo.height), + qt_videoFormatLookup[index].pixelFormat); + + if (vidInfo.fps_d > 0) + format.setStreamFrameRate(qreal(vidInfo.fps_n) / vidInfo.fps_d); + + QVideoFrameFormat::ColorRange range = QVideoFrameFormat::ColorRange_Unknown; + switch (vidInfo.colorimetry.range) { + case GST_VIDEO_COLOR_RANGE_UNKNOWN: + break; + case GST_VIDEO_COLOR_RANGE_0_255: + range = QVideoFrameFormat::ColorRange_Full; + break; + case GST_VIDEO_COLOR_RANGE_16_235: + range = QVideoFrameFormat::ColorRange_Video; + break; + } + format.setColorRange(range); + + QVideoFrameFormat::ColorSpace colorSpace = QVideoFrameFormat::ColorSpace_Undefined; + switch (vidInfo.colorimetry.matrix) { + case GST_VIDEO_COLOR_MATRIX_UNKNOWN: + case GST_VIDEO_COLOR_MATRIX_RGB: + case GST_VIDEO_COLOR_MATRIX_FCC: + break; + case GST_VIDEO_COLOR_MATRIX_BT709: + colorSpace = QVideoFrameFormat::ColorSpace_BT709; + break; + case GST_VIDEO_COLOR_MATRIX_BT601: + colorSpace = QVideoFrameFormat::ColorSpace_BT601; + break; + case GST_VIDEO_COLOR_MATRIX_SMPTE240M: + colorSpace = QVideoFrameFormat::ColorSpace_AdobeRgb; + break; + case GST_VIDEO_COLOR_MATRIX_BT2020: + colorSpace = QVideoFrameFormat::ColorSpace_BT2020; + break; + } + format.setColorSpace(colorSpace); + + QVideoFrameFormat::ColorTransfer transfer = QVideoFrameFormat::ColorTransfer_Unknown; + switch (vidInfo.colorimetry.transfer) { + case GST_VIDEO_TRANSFER_UNKNOWN: + break; + case GST_VIDEO_TRANSFER_GAMMA10: + transfer = QVideoFrameFormat::ColorTransfer_Linear; + break; + case GST_VIDEO_TRANSFER_GAMMA22: + case GST_VIDEO_TRANSFER_SMPTE240M: + case GST_VIDEO_TRANSFER_SRGB: + case GST_VIDEO_TRANSFER_ADOBERGB: + transfer = QVideoFrameFormat::ColorTransfer_Gamma22; + break; + case GST_VIDEO_TRANSFER_GAMMA18: + case GST_VIDEO_TRANSFER_GAMMA20: + // not quite, but best fit + case GST_VIDEO_TRANSFER_BT709: + case GST_VIDEO_TRANSFER_BT2020_12: + transfer = QVideoFrameFormat::ColorTransfer_BT709; + break; + case GST_VIDEO_TRANSFER_GAMMA28: + transfer = QVideoFrameFormat::ColorTransfer_Gamma28; + break; + case GST_VIDEO_TRANSFER_LOG100: + case GST_VIDEO_TRANSFER_LOG316: + break; +#if GST_CHECK_VERSION(1, 18, 0) + case GST_VIDEO_TRANSFER_SMPTE2084: + transfer = QVideoFrameFormat::ColorTransfer_ST2084; + break; + case GST_VIDEO_TRANSFER_ARIB_STD_B67: + transfer = QVideoFrameFormat::ColorTransfer_STD_B67; + break; + case GST_VIDEO_TRANSFER_BT2020_10: + transfer = QVideoFrameFormat::ColorTransfer_BT709; + break; + case GST_VIDEO_TRANSFER_BT601: + transfer = QVideoFrameFormat::ColorTransfer_BT601; + break; +#endif + } + format.setColorTransfer(transfer); + + return std::pair{ + std::move(format), + vidInfo, + }; +} + +void QGstCaps::addPixelFormats(const QList<QVideoFrameFormat::PixelFormat> &formats, + const char *modifier) +{ + if (!gst_caps_is_writable(get())) + *this = QGstCaps(gst_caps_make_writable(release()), QGstCaps::RefMode::HasRef); + + GValue list = {}; + g_value_init(&list, GST_TYPE_LIST); + + for (QVideoFrameFormat::PixelFormat format : formats) { + int index = indexOfVideoFormat(format); + if (index == -1) + continue; + GValue item = {}; + + g_value_init(&item, G_TYPE_STRING); + g_value_set_string(&item, + gst_video_format_to_string(qt_videoFormatLookup[index].gstFormat)); + gst_value_list_append_value(&list, &item); + g_value_unset(&item); + } + + auto *structure = gst_structure_new("video/x-raw", "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, + INT_MAX, 1, "width", GST_TYPE_INT_RANGE, 1, INT_MAX, + "height", GST_TYPE_INT_RANGE, 1, INT_MAX, nullptr); + gst_structure_set_value(structure, "format", &list); + gst_caps_append_structure(get(), structure); + g_value_unset(&list); + + if (modifier) + gst_caps_set_features(get(), size() - 1, gst_caps_features_from_string(modifier)); +} + +void QGstCaps::setResolution(QSize resolution) +{ + Q_ASSERT(resolution.isValid()); + GValue width{}; + g_value_init(&width, G_TYPE_INT); + g_value_set_int(&width, resolution.width()); + GValue height{}; + g_value_init(&height, G_TYPE_INT); + g_value_set_int(&height, resolution.height()); + + gst_caps_set_value(caps(), "width", &width); + gst_caps_set_value(caps(), "height", &height); +} + +QGstCaps QGstCaps::fromCameraFormat(const QCameraFormat &format) +{ + QSize size = format.resolution(); + GstStructure *structure = nullptr; + if (format.pixelFormat() == QVideoFrameFormat::Format_Jpeg) { + structure = gst_structure_new("image/jpeg", "width", G_TYPE_INT, size.width(), "height", + G_TYPE_INT, size.height(), nullptr); + } else { + int index = indexOfVideoFormat(format.pixelFormat()); + if (index < 0) + return {}; + auto gstFormat = qt_videoFormatLookup[index].gstFormat; + structure = gst_structure_new("video/x-raw", "format", G_TYPE_STRING, + gst_video_format_to_string(gstFormat), "width", G_TYPE_INT, + size.width(), "height", G_TYPE_INT, size.height(), nullptr); + } + auto caps = QGstCaps::create(); + gst_caps_append_structure(caps.get(), structure); + return caps; +} + +QGstCaps QGstCaps::copy() const +{ + return QGstCaps{ + gst_caps_copy(caps()), + QGstCaps::HasRef, + }; +} + +QGstCaps::MemoryFormat QGstCaps::memoryFormat() const +{ + auto *features = gst_caps_get_features(get(), 0); + if (gst_caps_features_contains(features, "memory:GLMemory")) + return GLTexture; + if (gst_caps_features_contains(features, "memory:DMABuf")) + return DMABuf; + return CpuMemory; +} + +int QGstCaps::size() const +{ + return int(gst_caps_get_size(get())); +} + +QGstStructureView QGstCaps::at(int index) const +{ + return QGstStructureView{ + gst_caps_get_structure(get(), index), + }; +} + +GstCaps *QGstCaps::caps() const +{ + return get(); +} + +QGstCaps QGstCaps::create() +{ + return QGstCaps(gst_caps_new_empty(), HasRef); +} + +// QGstObject + +void QGstObject::set(const char *property, const char *str) +{ + g_object_set(get(), property, str, nullptr); +} + +void QGstObject::set(const char *property, bool b) +{ + g_object_set(get(), property, gboolean(b), nullptr); +} + +void QGstObject::set(const char *property, uint i) +{ + g_object_set(get(), property, guint(i), nullptr); +} + +void QGstObject::set(const char *property, int i) +{ + g_object_set(get(), property, gint(i), nullptr); +} + +void QGstObject::set(const char *property, qint64 i) +{ + g_object_set(get(), property, gint64(i), nullptr); +} + +void QGstObject::set(const char *property, quint64 i) +{ + g_object_set(get(), property, guint64(i), nullptr); +} + +void QGstObject::set(const char *property, double d) +{ + g_object_set(get(), property, gdouble(d), nullptr); +} + +void QGstObject::set(const char *property, const QGstObject &o) +{ + g_object_set(get(), property, o.object(), nullptr); +} + +void QGstObject::set(const char *property, const QGstCaps &c) +{ + g_object_set(get(), property, c.caps(), nullptr); +} + +QGString QGstObject::getString(const char *property) const +{ + char *s = nullptr; + g_object_get(get(), property, &s, nullptr); + return QGString(s); +} + +QGstStructureView QGstObject::getStructure(const char *property) const +{ + GstStructure *s = nullptr; + g_object_get(get(), property, &s, nullptr); + return QGstStructureView(s); +} + +bool QGstObject::getBool(const char *property) const +{ + gboolean b = false; + g_object_get(get(), property, &b, nullptr); + return b; +} + +uint QGstObject::getUInt(const char *property) const +{ + guint i = 0; + g_object_get(get(), property, &i, nullptr); + return i; +} + +int QGstObject::getInt(const char *property) const +{ + gint i = 0; + g_object_get(get(), property, &i, nullptr); + return i; +} + +quint64 QGstObject::getUInt64(const char *property) const +{ + guint64 i = 0; + g_object_get(get(), property, &i, nullptr); + return i; +} + +qint64 QGstObject::getInt64(const char *property) const +{ + gint64 i = 0; + g_object_get(get(), property, &i, nullptr); + return i; +} + +float QGstObject::getFloat(const char *property) const +{ + gfloat d = 0; + g_object_get(get(), property, &d, nullptr); + return d; +} + +double QGstObject::getDouble(const char *property) const +{ + gdouble d = 0; + g_object_get(get(), property, &d, nullptr); + return d; +} + +QGstObject QGstObject::getObject(const char *property) const +{ + GstObject *o = nullptr; + g_object_get(get(), property, &o, nullptr); + return QGstObject(o, HasRef); +} + +QGObjectHandlerConnection QGstObject::connect(const char *name, GCallback callback, + gpointer userData) +{ + return QGObjectHandlerConnection{ + *this, + g_signal_connect(get(), name, callback, userData), + }; +} + +void QGstObject::disconnect(gulong handlerId) +{ + g_signal_handler_disconnect(get(), handlerId); +} + +GType QGstObject::type() const +{ + return G_OBJECT_TYPE(get()); +} + +QLatin1StringView QGstObject::typeName() const +{ + return QLatin1StringView{ + g_type_name(type()), + }; +} + +GstObject *QGstObject::object() const +{ + return get(); +} + +QLatin1StringView QGstObject::name() const +{ + using namespace Qt::StringLiterals; + + return get() ? QLatin1StringView{ GST_OBJECT_NAME(get()) } : "(null)"_L1; +} + +// QGObjectHandlerConnection + +QGObjectHandlerConnection::QGObjectHandlerConnection(QGstObject object, gulong handlerId) + : object{ std::move(object) }, handlerId{ handlerId } +{ +} + +void QGObjectHandlerConnection::disconnect() +{ + if (!object) + return; + + object.disconnect(handlerId); + object = {}; + handlerId = invalidHandlerId; +} + +// QGObjectHandlerScopedConnection + +QGObjectHandlerScopedConnection::QGObjectHandlerScopedConnection( + QGObjectHandlerConnection connection) + : connection{ + std::move(connection), + } +{ +} + +QGObjectHandlerScopedConnection::~QGObjectHandlerScopedConnection() +{ + connection.disconnect(); +} + +void QGObjectHandlerScopedConnection::disconnect() +{ + connection.disconnect(); +} + +// QGstPad + +QGstPad::QGstPad(const QGstObject &o) + : QGstPad{ + qGstSafeCast<GstPad>(o.object()), + QGstElement::NeedsRef, + } +{ +} + +QGstPad::QGstPad(GstPad *pad, RefMode mode) + : QGstObject{ + qGstCheckedCast<GstObject>(pad), + mode, + } +{ +} + +QGstCaps QGstPad::currentCaps() const +{ + return QGstCaps(gst_pad_get_current_caps(pad()), QGstCaps::HasRef); +} + +QGstCaps QGstPad::queryCaps() const +{ + return QGstCaps(gst_pad_query_caps(pad(), nullptr), QGstCaps::HasRef); +} + +QGstTagListHandle QGstPad::tags() const +{ + QGstTagListHandle tagList; + g_object_get(object(), "tags", &tagList, nullptr); + return tagList; +} + +std::optional<QPlatformMediaPlayer::TrackType> QGstPad::inferTrackTypeFromName() const +{ + using namespace Qt::Literals; + QLatin1StringView padName = name(); + + if (padName.startsWith("video_"_L1)) + return QPlatformMediaPlayer::TrackType::VideoStream; + if (padName.startsWith("audio_"_L1)) + return QPlatformMediaPlayer::TrackType::AudioStream; + if (padName.startsWith("text_"_L1)) + return QPlatformMediaPlayer::TrackType::SubtitleStream; + + return std::nullopt; +} + +bool QGstPad::isLinked() const +{ + return gst_pad_is_linked(pad()); +} + +bool QGstPad::link(const QGstPad &sink) const +{ + return gst_pad_link(pad(), sink.pad()) == GST_PAD_LINK_OK; +} + +bool QGstPad::unlink(const QGstPad &sink) const +{ + return gst_pad_unlink(pad(), sink.pad()); +} + +bool QGstPad::unlinkPeer() const +{ + return unlink(peer()); +} + +QGstPad QGstPad::peer() const +{ + return QGstPad(gst_pad_get_peer(pad()), HasRef); +} + +QGstElement QGstPad::parent() const +{ + return QGstElement(gst_pad_get_parent_element(pad()), HasRef); +} + +GstPad *QGstPad::pad() const +{ + return qGstCheckedCast<GstPad>(object()); +} + +GstEvent *QGstPad::stickyEvent(GstEventType type) +{ + return gst_pad_get_sticky_event(pad(), type, 0); +} + +bool QGstPad::sendEvent(GstEvent *event) +{ + return gst_pad_send_event(pad(), event); +} + +// QGstClock + +QGstClock::QGstClock(const QGstObject &o) + : QGstClock{ + qGstSafeCast<GstClock>(o.object()), + QGstElement::NeedsRef, + } +{ +} + +QGstClock::QGstClock(GstClock *clock, RefMode mode) + : QGstObject{ + qGstCheckedCast<GstObject>(clock), + mode, + } +{ +} + +GstClock *QGstClock::clock() const +{ + return qGstCheckedCast<GstClock>(object()); +} + +GstClockTime QGstClock::time() const +{ + return gst_clock_get_time(clock()); +} + +// QGstElement + +QGstElement::QGstElement(GstElement *element, RefMode mode) + : QGstObject{ + qGstCheckedCast<GstObject>(element), + mode, + } +{ +} + +QGstElement QGstElement::createFromFactory(const char *factory, const char *name) +{ + GstElement *element = gst_element_factory_make(factory, name); + +#ifndef QT_NO_DEBUG + if (!element) { + qWarning() << "Failed to make element" << name << "from factory" << factory; + return QGstElement{}; + } +#endif + + return QGstElement{ + element, + NeedsRef, + }; +} + +QGstElement QGstElement::createFromFactory(GstElementFactory *factory, const char *name) +{ + return QGstElement{ + gst_element_factory_create(factory, name), + NeedsRef, + }; +} + +QGstElement QGstElement::createFromFactory(const QGstElementFactoryHandle &factory, + const char *name) +{ + return createFromFactory(factory.get(), name); +} + +QGstElement QGstElement::createFromDevice(const QGstDeviceHandle &device, const char *name) +{ + return createFromDevice(device.get(), name); +} + +QGstElement QGstElement::createFromDevice(GstDevice *device, const char *name) +{ + return QGstElement{ + gst_device_create_element(device, name), + QGstElement::NeedsRef, + }; +} + +QGstElement QGstElement::createFromPipelineDescription(const char *str) +{ + QUniqueGErrorHandle error; + QGstElement element{ + gst_parse_launch(str, &error), + QGstElement::NeedsRef, + }; + + if (error) // error does not mean that the element could not be constructed + qWarning() << "gst_parse_launch error:" << error; + + return element; +} + +QGstElement QGstElement::createFromPipelineDescription(const QByteArray &str) +{ + return createFromPipelineDescription(str.constData()); +} + +QGstElementFactoryHandle QGstElement::findFactory(const char *name) +{ + return QGstElementFactoryHandle{ + gst_element_factory_find(name), + QGstElementFactoryHandle::HasRef, + }; +} + +QGstElementFactoryHandle QGstElement::findFactory(const QByteArray &name) +{ + return findFactory(name.constData()); +} + +QGstPad QGstElement::staticPad(const char *name) const +{ + return QGstPad(gst_element_get_static_pad(element(), name), HasRef); +} + +QGstPad QGstElement::src() const +{ + return staticPad("src"); +} + +QGstPad QGstElement::sink() const +{ + return staticPad("sink"); +} + +QGstPad QGstElement::getRequestPad(const char *name) const +{ +#if GST_CHECK_VERSION(1, 19, 1) + return QGstPad(gst_element_request_pad_simple(element(), name), HasRef); +#else + return QGstPad(gst_element_get_request_pad(element(), name), HasRef); +#endif +} + +void QGstElement::releaseRequestPad(const QGstPad &pad) const +{ + return gst_element_release_request_pad(element(), pad.pad()); +} + +GstState QGstElement::state(std::chrono::nanoseconds timeout) const +{ + using namespace std::chrono_literals; + + GstState state; + GstStateChangeReturn change = + gst_element_get_state(element(), &state, nullptr, timeout.count()); + + if (Q_UNLIKELY(change == GST_STATE_CHANGE_ASYNC)) + qWarning() << "QGstElement::state detected an asynchronous state change. Return value not " + "reliable"; + + return state; +} + +GstStateChangeReturn QGstElement::setState(GstState state) +{ + return gst_element_set_state(element(), state); +} + +bool QGstElement::setStateSync(GstState state, std::chrono::nanoseconds timeout) +{ + if (state == GST_STATE_NULL) { + // QTBUG-125251: when changing pipeline state too quickly between NULL->PAUSED->NULL there + // may be a pending task to activate pads while we try to switch to NULL. This can cause an + // assertion failure in gstreamer. we therefore finish the state change when called on a bin + // or pipeline. + if (qIsGstObjectOfType<GstBin>(element())) + finishStateChange(); + } + + GstStateChangeReturn change = gst_element_set_state(element(), state); + if (change == GST_STATE_CHANGE_ASYNC) + change = gst_element_get_state(element(), nullptr, &state, timeout.count()); + + if (change != GST_STATE_CHANGE_SUCCESS && change != GST_STATE_CHANGE_NO_PREROLL) { + qWarning() << "Could not change state of" << name() << "to" << state << change; + dumpPipelineGraph("setStateSyncFailure"); + } + return change == GST_STATE_CHANGE_SUCCESS; +} + +bool QGstElement::syncStateWithParent() +{ + Q_ASSERT(element()); + return gst_element_sync_state_with_parent(element()) == TRUE; +} + +bool QGstElement::finishStateChange(std::chrono::nanoseconds timeout) +{ + GstState state, pending; + GstStateChangeReturn change = + gst_element_get_state(element(), &state, &pending, timeout.count()); + + if (change != GST_STATE_CHANGE_SUCCESS && change != GST_STATE_CHANGE_NO_PREROLL) { + qWarning() << "Could not finish change state of" << name() << change << state << pending; + dumpPipelineGraph("finishStateChangeFailure"); + } + return change == GST_STATE_CHANGE_SUCCESS; +} + +void QGstElement::lockState(bool locked) +{ + gst_element_set_locked_state(element(), locked); +} + +bool QGstElement::isStateLocked() const +{ + return gst_element_is_locked_state(element()); +} + +void QGstElement::sendEvent(GstEvent *event) const +{ + gst_element_send_event(element(), event); +} + +void QGstElement::sendEos() const +{ + sendEvent(gst_event_new_eos()); +} + +std::optional<std::chrono::nanoseconds> QGstElement::duration() const +{ + gint64 d; + if (!gst_element_query_duration(element(), GST_FORMAT_TIME, &d)) { + qDebug() << "QGstElement: failed to query duration"; + return std::nullopt; + } + return std::chrono::nanoseconds{ d }; +} + +std::optional<std::chrono::milliseconds> QGstElement::durationInMs() const +{ + using namespace std::chrono; + auto dur = duration(); + if (dur) + return round<milliseconds>(*dur); + return std::nullopt; +} + +std::optional<std::chrono::nanoseconds> QGstElement::position() const +{ + QGstQueryHandle &query = positionQuery(); + + gint64 pos; + if (gst_element_query(element(), query.get())) { + gst_query_parse_position(query.get(), nullptr, &pos); + return std::chrono::nanoseconds{ pos }; + } + + qDebug() << "QGstElement: failed to query position"; + return std::nullopt; +} + +std::optional<std::chrono::milliseconds> QGstElement::positionInMs() const +{ + using namespace std::chrono; + auto pos = position(); + if (pos) + return round<milliseconds>(*pos); + return std::nullopt; +} + +std::optional<bool> QGstElement::canSeek() const +{ + QGstQueryHandle query{ + gst_query_new_seeking(GST_FORMAT_TIME), + QGstQueryHandle::HasRef, + }; + gboolean canSeek = false; + gst_query_parse_seeking(query.get(), nullptr, &canSeek, nullptr, nullptr); + + if (gst_element_query(element(), query.get())) { + gst_query_parse_seeking(query.get(), nullptr, &canSeek, nullptr, nullptr); + return canSeek; + } + return std::nullopt; +} + +GstClockTime QGstElement::baseTime() const +{ + return gst_element_get_base_time(element()); +} + +void QGstElement::setBaseTime(GstClockTime time) const +{ + gst_element_set_base_time(element(), time); +} + +GstElement *QGstElement::element() const +{ + return GST_ELEMENT_CAST(get()); +} + +QGstElement QGstElement::getParent() const +{ + return QGstElement{ + qGstCheckedCast<GstElement>(gst_element_get_parent(object())), + QGstElement::HasRef, + }; +} + +QGstPipeline QGstElement::getPipeline() const +{ + QGstElement ancestor = *this; + for (;;) { + QGstElement greatAncestor = ancestor.getParent(); + if (greatAncestor) { + ancestor = std::move(greatAncestor); + continue; + } + + return QGstPipeline{ + qGstSafeCast<GstPipeline>(ancestor.element()), + QGstPipeline::NeedsRef, + }; + } +} + +void QGstElement::dumpPipelineGraph(const char *filename) const +{ + static const bool dumpEnabled = qEnvironmentVariableIsSet("GST_DEBUG_DUMP_DOT_DIR"); + if (dumpEnabled) { + QGstPipeline pipeline = getPipeline(); + if (pipeline) + pipeline.dumpGraph(filename); + } +} + +QGstQueryHandle &QGstElement::positionQuery() const +{ + if (Q_UNLIKELY(!m_positionQuery)) + m_positionQuery = QGstQueryHandle{ + gst_query_new_position(GST_FORMAT_TIME), + QGstQueryHandle::HasRef, + }; + + return m_positionQuery; +} + +// QGstBin + +QGstBin QGstBin::create(const char *name) +{ + return QGstBin(gst_bin_new(name), NeedsRef); +} + +QGstBin QGstBin::createFromFactory(const char *factory, const char *name) +{ + QGstElement element = QGstElement::createFromFactory(factory, name); + Q_ASSERT(GST_IS_BIN(element.element())); + return QGstBin{ + GST_BIN(element.release()), + RefMode::HasRef, + }; +} + +QGstBin QGstBin::createFromPipelineDescription(const QByteArray &pipelineDescription, + const char *name, bool ghostUnlinkedPads) +{ + return createFromPipelineDescription(pipelineDescription.constData(), name, ghostUnlinkedPads); +} + +QGstBin QGstBin::createFromPipelineDescription(const char *pipelineDescription, const char *name, + bool ghostUnlinkedPads) +{ + QUniqueGErrorHandle error; + + GstElement *element = + gst_parse_bin_from_description_full(pipelineDescription, ghostUnlinkedPads, + /*context=*/nullptr, GST_PARSE_FLAG_NONE, &error); + + if (!element) { + qWarning() << "Failed to make element from pipeline description" << pipelineDescription + << error; + return QGstBin{}; + } + + if (name) + gst_element_set_name(element, name); + + return QGstBin{ + element, + NeedsRef, + }; +} + +QGstBin::QGstBin(GstBin *bin, RefMode mode) + : QGstElement{ + qGstCheckedCast<GstElement>(bin), + mode, + } +{ +} + +GstBin *QGstBin::bin() const +{ + return qGstCheckedCast<GstBin>(object()); +} + +void QGstBin::addGhostPad(const QGstElement &child, const char *name) +{ + addGhostPad(name, child.staticPad(name)); +} + +void QGstBin::addGhostPad(const char *name, const QGstPad &pad) +{ + gst_element_add_pad(element(), gst_ghost_pad_new(name, pad.pad())); +} + +bool QGstBin::syncChildrenState() +{ + return gst_bin_sync_children_states(bin()); +} + +void QGstBin::dumpGraph(const char *fileNamePrefix) const +{ + if (isNull()) + return; + + GST_DEBUG_BIN_TO_DOT_FILE(bin(), GST_DEBUG_GRAPH_SHOW_VERBOSE, fileNamePrefix); +} + +QGstElement QGstBin::findByName(const char *name) +{ + return QGstElement{ + gst_bin_get_by_name(bin(), name), + QGstElement::NeedsRef, + }; +} + +// QGstBaseSink + +QGstBaseSink::QGstBaseSink(GstBaseSink *element, RefMode mode) + : QGstElement{ + qGstCheckedCast<GstElement>(element), + mode, + } +{ +} + +void QGstBaseSink::setSync(bool arg) +{ + gst_base_sink_set_sync(baseSink(), arg ? TRUE : FALSE); +} + +GstBaseSink *QGstBaseSink::baseSink() const +{ + return qGstCheckedCast<GstBaseSink>(element()); +} + +// QGstBaseSrc + +QGstBaseSrc::QGstBaseSrc(GstBaseSrc *element, RefMode mode) + : QGstElement{ + qGstCheckedCast<GstElement>(element), + mode, + } +{ +} + +GstBaseSrc *QGstBaseSrc::baseSrc() const +{ + return qGstCheckedCast<GstBaseSrc>(element()); +} + +// QGstAppSink + +QGstAppSink::QGstAppSink(GstAppSink *element, RefMode mode) + : QGstBaseSink{ + qGstCheckedCast<GstBaseSink>(element), + mode, + } +{ +} + +QGstAppSink QGstAppSink::create(const char *name) +{ + QGstElement created = QGstElement::createFromFactory("appsink", name); + return QGstAppSink{ + qGstCheckedCast<GstAppSink>(created.element()), + QGstAppSink::NeedsRef, + }; +} + +GstAppSink *QGstAppSink::appSink() const +{ + return qGstCheckedCast<GstAppSink>(element()); +} + +# if GST_CHECK_VERSION(1, 24, 0) +void QGstAppSink::setMaxBufferTime(std::chrono::nanoseconds ns) +{ + gst_app_sink_set_max_time(appSink(), qGstClockTimeFromChrono(ns)); +} +# endif + +void QGstAppSink::setMaxBuffers(int n) +{ + gst_app_sink_set_max_buffers(appSink(), n); +} + +void QGstAppSink::setCaps(const QGstCaps &caps) +{ + gst_app_sink_set_caps(appSink(), caps.caps()); +} + +void QGstAppSink::setCallbacks(GstAppSinkCallbacks &callbacks, gpointer user_data, + GDestroyNotify notify) +{ + gst_app_sink_set_callbacks(appSink(), &callbacks, user_data, notify); +} + +QGstSampleHandle QGstAppSink::pullSample() +{ + return QGstSampleHandle{ + gst_app_sink_pull_sample(appSink()), + QGstSampleHandle::HasRef, + }; +} + +// QGstAppSrc + +QGstAppSrc::QGstAppSrc(GstAppSrc *element, RefMode mode) + : QGstBaseSrc{ + qGstCheckedCast<GstBaseSrc>(element), + mode, + } +{ +} + +QGstAppSrc QGstAppSrc::create(const char *name) +{ + QGstElement created = QGstElement::createFromFactory("appsrc", name); + return QGstAppSrc{ + qGstCheckedCast<GstAppSrc>(created.element()), + QGstAppSrc::NeedsRef, + }; +} + +GstAppSrc *QGstAppSrc::appSrc() const +{ + return qGstCheckedCast<GstAppSrc>(element()); +} + +void QGstAppSrc::setCallbacks(GstAppSrcCallbacks &callbacks, gpointer user_data, + GDestroyNotify notify) +{ + gst_app_src_set_callbacks(appSrc(), &callbacks, user_data, notify); +} + +GstFlowReturn QGstAppSrc::pushBuffer(GstBuffer *buffer) +{ + return gst_app_src_push_buffer(appSrc(), buffer); +} + +QString qGstErrorMessageCannotFindElement(std::string_view element) +{ + return QStringLiteral("Could not find the %1 GStreamer element") + .arg(QLatin1StringView(element)); +} + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/common/qgst_debug.cpp b/src/plugins/multimedia/gstreamer/common/qgst_debug.cpp new file mode 100644 index 000000000..e47515d2d --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgst_debug.cpp @@ -0,0 +1,573 @@ +// Copyright (C) 2024 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include "qgst_debug_p.h" +#include "qgstreamermessage_p.h" + +#include <gst/gstclock.h> + +QT_BEGIN_NAMESPACE + +// NOLINTBEGIN(performance-unnecessary-value-param) + +QDebug operator<<(QDebug dbg, const QGString &str) +{ + return dbg << str.get(); +} + +QDebug operator<<(QDebug dbg, const QGstCaps &caps) +{ + return dbg << caps.caps(); +} + +QDebug operator<<(QDebug dbg, const QGstStructureView &structure) +{ + return dbg << structure.structure; +} + +QDebug operator<<(QDebug dbg, const QGValue &value) +{ + return dbg << value.value; +} + +QDebug operator<<(QDebug dbg, const QGstreamerMessage &msg) +{ + return dbg << msg.message(); +} + +QDebug operator<<(QDebug dbg, const QUniqueGErrorHandle &handle) +{ + return dbg << handle.get(); +} + +QDebug operator<<(QDebug dbg, const QUniqueGStringHandle &handle) +{ + return dbg << handle.get(); +} + +QDebug operator<<(QDebug dbg, const QGstStreamCollectionHandle &handle) +{ + return dbg << handle.get(); +} + +QDebug operator<<(QDebug dbg, const QGstStreamHandle &handle) +{ + return dbg << handle.get(); +} + +QDebug operator<<(QDebug dbg, const QGstTagListHandle &handle) +{ + return dbg << handle.get(); +} + +QDebug operator<<(QDebug dbg, const QGstElement &element) +{ + return dbg << element.element(); +} + +QDebug operator<<(QDebug dbg, const QGstPad &pad) +{ + return dbg << pad.pad(); +} + +QDebug operator<<(QDebug dbg, const GstCaps *caps) +{ + if (caps) + return dbg << QGString(gst_caps_to_string(caps)); + else + return dbg << "null"; +} + +QDebug operator<<(QDebug dbg, const GstVideoInfo *info) +{ +#if GST_CHECK_VERSION(1, 20, 0) + return dbg << QGstCaps{ + gst_video_info_to_caps(info), + QGstCaps::NeedsRef, + }; +#else + return dbg << QGstCaps{ + gst_video_info_to_caps(const_cast<GstVideoInfo *>(info)), + QGstCaps::NeedsRef, + }; +#endif +} + +QDebug operator<<(QDebug dbg, const GstStructure *structure) +{ + if (structure) + return dbg << QGString(gst_structure_to_string(structure)); + else + return dbg << "null"; +} + +QDebug operator<<(QDebug dbg, const GstObject *object) +{ + dbg << QGString{gst_object_get_name(const_cast<GstObject*>(object))}; + + { + QDebugStateSaver saver(dbg); + dbg.nospace(); + + dbg << "{"; + + guint numProperties; + GParamSpec **properties = g_object_class_list_properties(G_OBJECT_GET_CLASS(object), &numProperties); + + for (guint i = 0; i < numProperties; i++) { + GParamSpec *param = properties[i]; + + const gchar *name = g_param_spec_get_name(param); + constexpr bool trace_blurb = false; + if constexpr (trace_blurb) { + const gchar *blurb = g_param_spec_get_blurb(param); + dbg << name << " (" << blurb << "): "; + } else + dbg << name << ": "; + + bool readable = bool(param->flags & G_PARAM_READABLE); + if (!readable) { + dbg << "(not readable)"; + } else if (QLatin1StringView(name) == QLatin1StringView("parent")) { + if (object->parent) + dbg << QGString{ gst_object_get_name(object->parent) }; + else + dbg << "(none)"; + } else { + GValue value = {}; + g_object_get_property(&const_cast<GstObject *>(object)->object, param->name, + &value); + dbg << &value; + } + if (i != numProperties - 1) + dbg << ", "; + } + + dbg << "}"; + + g_free(properties); + } + return dbg; +} + +QDebug operator<<(QDebug dbg, const GstElement *element) +{ + return dbg << GST_OBJECT_CAST(element); // LATER: output other members? +} + +QDebug operator<<(QDebug dbg, const GstPad *pad) +{ + return dbg << GST_OBJECT_CAST(pad); // LATER: output other members? +} + +QDebug operator<<(QDebug dbg, const GstDevice *device) +{ + GstDevice *d = const_cast<GstDevice *>(device); + QDebugStateSaver saver(dbg); + dbg.nospace(); + + dbg << gst_device_get_display_name(d) << "(" << gst_device_get_device_class(d) << ") "; + dbg << "Caps: " << QGstCaps{ gst_device_get_caps(d), QGstCaps::NeedsRef, } << ", "; + dbg << "Properties: " << QUniqueGstStructureHandle{ gst_device_get_properties(d) }.get(); + return dbg; +} + +namespace { + +struct Timepoint +{ + explicit Timepoint(guint64 us) : ts{ us } { } + guint64 ts; +}; + +QDebug operator<<(QDebug dbg, Timepoint ts) +{ + char buffer[128]; + snprintf(buffer, sizeof(buffer), "%" GST_TIME_FORMAT, GST_TIME_ARGS(ts.ts)); + dbg << buffer; + return dbg; +} + +} // namespace + +QDebug operator<<(QDebug dbg, const GstMessage *msg) +{ + QDebugStateSaver saver(dbg); + dbg.nospace(); + + dbg << GST_MESSAGE_TYPE_NAME(msg) << ", Source: " << GST_MESSAGE_SRC_NAME(msg); + if (GST_MESSAGE_TIMESTAMP(msg) != 0xFFFFFFFFFFFFFFFF) + dbg << ", Timestamp: " << GST_MESSAGE_TIMESTAMP(msg); + + switch (msg->type) { + case GST_MESSAGE_ERROR: { + QUniqueGErrorHandle err; + QGString debug; + gst_message_parse_error(const_cast<GstMessage *>(msg), &err, &debug); + + dbg << ", Error: " << err << " (" << debug << ")"; + break; + } + + case GST_MESSAGE_WARNING: { + QUniqueGErrorHandle err; + QGString debug; + gst_message_parse_warning(const_cast<GstMessage *>(msg), &err, &debug); + + dbg << ", Warning: " << err << " (" << debug << ")"; + break; + } + + case GST_MESSAGE_INFO: { + QUniqueGErrorHandle err; + QGString debug; + gst_message_parse_info(const_cast<GstMessage *>(msg), &err, &debug); + + dbg << ", Info: " << err << " (" << debug << ")"; + break; + } + + case GST_MESSAGE_TAG: { + QGstTagListHandle tagList; + gst_message_parse_tag(const_cast<GstMessage *>(msg), &tagList); + + dbg << ", Tags: " << tagList; + break; + } + + case GST_MESSAGE_QOS: { + gboolean live; + guint64 running_time; + guint64 stream_time; + guint64 timestamp; + guint64 duration; + + gst_message_parse_qos(const_cast<GstMessage *>(msg), &live, &running_time, &stream_time, + ×tamp, &duration); + + dbg << ", Live: " << bool(live) << ", Running time: " << Timepoint{ running_time } + << ", Stream time: " << Timepoint{ stream_time } + << ", Timestamp: " << Timepoint{ timestamp } << ", Duration: " << Timepoint{ duration }; + break; + } + + case GST_MESSAGE_STATE_CHANGED: { + GstState oldState; + GstState newState; + GstState pending; + + gst_message_parse_state_changed(const_cast<GstMessage *>(msg), &oldState, &newState, + &pending); + + dbg << ", Transition: " << oldState << "->" << newState; + + if (pending != GST_STATE_VOID_PENDING) + dbg << ", Pending State: " << pending; + break; + } + + case GST_MESSAGE_STREAM_COLLECTION: { + QGstStreamCollectionHandle collection; + gst_message_parse_stream_collection(const_cast<GstMessage *>(msg), &collection); + + dbg << ", " << collection; + break; + } + + case GST_MESSAGE_STREAMS_SELECTED: { + QGstStreamCollectionHandle collection; + gst_message_parse_streams_selected(const_cast<GstMessage *>(msg), &collection); + + dbg << ", " << collection; + break; + } + + case GST_MESSAGE_STREAM_STATUS: { + GstStreamStatusType streamStatus; + gst_message_parse_stream_status(const_cast<GstMessage *>(msg), &streamStatus, nullptr); + + dbg << ", Stream Status: " << streamStatus; + break; + } + + case GST_MESSAGE_BUFFERING: { + int progress = 0; + gst_message_parse_buffering(const_cast<GstMessage *>(msg), &progress); + + dbg << ", Buffering: " << progress << "%"; + break; + } + + default: + break; + } + return dbg; +} + +QDebug operator<<(QDebug dbg, const GstTagList *tagList) +{ + dbg << QGString{ gst_tag_list_to_string(tagList) }; + return dbg; +} + +QDebug operator<<(QDebug dbg, const GstQuery *query) +{ + dbg << GST_QUERY_TYPE_NAME(query); + return dbg; +} + +QDebug operator<<(QDebug dbg, const GstEvent *event) +{ + dbg << GST_EVENT_TYPE_NAME(event); + return dbg; +} + +QDebug operator<<(QDebug dbg, const GstPadTemplate *padTemplate) +{ + QGstCaps caps = padTemplate + ? QGstCaps{ gst_pad_template_get_caps(const_cast<GstPadTemplate *>(padTemplate)), QGstCaps::HasRef, } + : QGstCaps{}; + + dbg << caps; + return dbg; +} + +QDebug operator<<(QDebug dbg, const GstStreamCollection *streamCollection) +{ + GstStreamCollection *collection = const_cast<GstStreamCollection *>(streamCollection); + guint size = gst_stream_collection_get_size(collection); + + dbg << "Stream Collection: {"; + for (guint index = 0; index != size; ++index) { + dbg << gst_stream_collection_get_stream(collection, index); + if (index + 1 != size) + dbg << ", "; + } + + dbg << "}"; + return dbg; +} + +QDebug operator<<(QDebug dbg, const GstStream *cstream) +{ + GstStream *stream = const_cast<GstStream *>(cstream); + + dbg << "GstStream { "; + dbg << "Type: " << gst_stream_type_get_name(gst_stream_get_stream_type(stream)); + + QGstTagListHandle tagList{ + gst_stream_get_tags(stream), + QGstTagListHandle::HasRef, + }; + + if (tagList) + dbg << ", Tags: " << tagList; + + QGstCaps caps{ + gst_stream_get_caps(stream), + QGstCaps::HasRef, + }; + + if (caps) + dbg << ", Caps: " << caps; + + dbg << "}"; + + return dbg; +} + +QDebug operator<<(QDebug dbg, GstState state) +{ + return dbg << gst_element_state_get_name(state); +} + +QDebug operator<<(QDebug dbg, GstStateChange transition) +{ + return dbg << gst_state_change_get_name(transition); +} + +QDebug operator<<(QDebug dbg, GstStateChangeReturn stateChangeReturn) +{ + return dbg << gst_element_state_change_return_get_name(stateChangeReturn); +} + +QDebug operator<<(QDebug dbg, GstMessageType type) +{ + return dbg << gst_message_type_get_name(type); +} + +#define ADD_ENUM_SWITCH(value) \ + case value: \ + return dbg << #value; \ + static_assert(true, "enforce semicolon") + +QDebug operator<<(QDebug dbg, GstPadDirection direction) +{ + switch (direction) { + ADD_ENUM_SWITCH(GST_PAD_UNKNOWN); + ADD_ENUM_SWITCH(GST_PAD_SRC); + ADD_ENUM_SWITCH(GST_PAD_SINK); + default: + Q_UNREACHABLE_RETURN(dbg); + } +} + +QDebug operator<<(QDebug dbg, GstStreamStatusType type) +{ + switch (type) { + ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_CREATE); + ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_ENTER); + ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_LEAVE); + ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_DESTROY); + ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_START); + ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_PAUSE); + ADD_ENUM_SWITCH(GST_STREAM_STATUS_TYPE_STOP); + default: + Q_UNREACHABLE_RETURN(dbg); + } + return dbg; +} + +#undef ADD_ENUM_SWITCH + +QDebug operator<<(QDebug dbg, const GValue *value) +{ + switch (G_VALUE_TYPE(value)) { + case G_TYPE_STRING: + return dbg << g_value_get_string(value); + case G_TYPE_BOOLEAN: + return dbg << g_value_get_boolean(value); + case G_TYPE_ULONG: + return dbg << g_value_get_ulong(value); + case G_TYPE_LONG: + return dbg << g_value_get_long(value); + case G_TYPE_UINT: + return dbg << g_value_get_uint(value); + case G_TYPE_INT: + return dbg << g_value_get_int(value); + case G_TYPE_UINT64: + return dbg << g_value_get_uint64(value); + case G_TYPE_INT64: + return dbg << g_value_get_int64(value); + case G_TYPE_FLOAT: + return dbg << g_value_get_float(value); + case G_TYPE_DOUBLE: + return dbg << g_value_get_double(value); + default: + break; + } + + if (GST_VALUE_HOLDS_BITMASK(value)) { + QDebugStateSaver saver(dbg); + return dbg << Qt::hex << gst_value_get_bitmask(value); + } + + if (GST_VALUE_HOLDS_FRACTION(value)) + return dbg << gst_value_get_fraction_numerator(value) << "/" + << gst_value_get_fraction_denominator(value); + + if (GST_VALUE_HOLDS_CAPS(value)) + return dbg << gst_value_get_caps(value); + + if (GST_VALUE_HOLDS_STRUCTURE(value)) + return dbg << gst_value_get_structure(value); + + if (GST_VALUE_HOLDS_ARRAY(value)) { + const guint size = gst_value_array_get_size(value); + const guint last = size - 1; + dbg << "["; + for (guint index = 0; index != size; ++index) { + dbg << gst_value_array_get_value(value, index); + if (index != last) + dbg << ", "; + } + dbg << "}"; + return dbg; + } + + if (G_VALUE_TYPE(value) == GST_TYPE_PAD_DIRECTION) { + GstPadDirection direction = static_cast<GstPadDirection>(g_value_get_enum(value)); + return dbg << direction; + } + + if (G_VALUE_TYPE(value) == GST_TYPE_PAD_TEMPLATE) { + GstPadTemplate *padTemplate = static_cast<GstPadTemplate *>(g_value_get_object(value)); + return dbg << padTemplate; + } + + dbg << "(not implemented: " << G_VALUE_TYPE_NAME(value) << ")"; + + return dbg; +} + +QDebug operator<<(QDebug dbg, const GError *error) +{ + return dbg << error->message; +} + +QCompactGstMessageAdaptor::QCompactGstMessageAdaptor(const QGstreamerMessage &m) + : QCompactGstMessageAdaptor{ + m.message(), + } +{ +} + +QCompactGstMessageAdaptor::QCompactGstMessageAdaptor(GstMessage *m) + : msg{ + m, + } +{ +} + +QDebug operator<<(QDebug dbg, const QCompactGstMessageAdaptor &m) +{ + std::optional<QDebugStateSaver> saver(dbg); + dbg.nospace(); + + switch (GST_MESSAGE_TYPE(m.msg)) { + case GST_MESSAGE_ERROR: { + QUniqueGErrorHandle err; + QGString debug; + gst_message_parse_error(m.msg, &err, &debug); + dbg << err << " (" << debug << ")"; + return dbg; + } + + case GST_MESSAGE_WARNING: { + QUniqueGErrorHandle err; + QGString debug; + gst_message_parse_warning(m.msg, &err, &debug); + dbg << err << " (" << debug << ")"; + return dbg; + } + + case GST_MESSAGE_INFO: { + QUniqueGErrorHandle err; + QGString debug; + gst_message_parse_info(m.msg, &err, &debug); + + dbg << err << " (" << debug << ")"; + return dbg; + } + + case GST_MESSAGE_STATE_CHANGED: { + GstState oldState; + GstState newState; + GstState pending; + + gst_message_parse_state_changed(m.msg, &oldState, &newState, &pending); + + dbg << oldState << " -> " << newState; + if (pending != GST_STATE_VOID_PENDING) + dbg << " (pending: " << pending << ")"; + return dbg; + } + + default: { + saver.reset(); + return dbg << m.msg; + } + } +} + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/common/qgst_debug_p.h b/src/plugins/multimedia/gstreamer/common/qgst_debug_p.h new file mode 100644 index 000000000..df13c6c13 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgst_debug_p.h @@ -0,0 +1,74 @@ +// Copyright (C) 2024 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGST_DEBUG_P_H +#define QGST_DEBUG_P_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include "qgst_p.h" +#include <qdebug.h> + +QT_BEGIN_NAMESPACE + +class QGstreamerMessage; + +QDebug operator<<(QDebug, const QGstCaps &); +QDebug operator<<(QDebug, const QGstStructureView &); +QDebug operator<<(QDebug, const QGstElement &); +QDebug operator<<(QDebug, const QGstPad &); +QDebug operator<<(QDebug, const QGString &); +QDebug operator<<(QDebug, const QGValue &); +QDebug operator<<(QDebug, const QGstreamerMessage &); +QDebug operator<<(QDebug, const QUniqueGErrorHandle &); +QDebug operator<<(QDebug, const QUniqueGStringHandle &); +QDebug operator<<(QDebug, const QGstStreamCollectionHandle &); +QDebug operator<<(QDebug, const QGstStreamHandle &); +QDebug operator<<(QDebug, const QGstTagListHandle &); + +QDebug operator<<(QDebug, const GstCaps *); +QDebug operator<<(QDebug, const GstVideoInfo *); +QDebug operator<<(QDebug, const GstStructure *); +QDebug operator<<(QDebug, const GstObject *); +QDebug operator<<(QDebug, const GstElement *); +QDebug operator<<(QDebug, const GstPad *); +QDebug operator<<(QDebug, const GstDevice *); +QDebug operator<<(QDebug, const GstMessage *); +QDebug operator<<(QDebug, const GstTagList *); +QDebug operator<<(QDebug, const GstQuery *); +QDebug operator<<(QDebug, const GstEvent *); +QDebug operator<<(QDebug, const GstPadTemplate *); +QDebug operator<<(QDebug, const GstStreamCollection *); +QDebug operator<<(QDebug, const GstStream *); + +QDebug operator<<(QDebug, GstState); +QDebug operator<<(QDebug, GstStateChange); +QDebug operator<<(QDebug, GstStateChangeReturn); +QDebug operator<<(QDebug, GstMessageType); +QDebug operator<<(QDebug, GstPadDirection); +QDebug operator<<(QDebug, GstStreamStatusType); + +QDebug operator<<(QDebug, const GValue *); +QDebug operator<<(QDebug, const GError *); + +struct QCompactGstMessageAdaptor +{ + explicit QCompactGstMessageAdaptor(const QGstreamerMessage &m); + explicit QCompactGstMessageAdaptor(GstMessage *m); + GstMessage *msg; +}; + +QDebug operator<<(QDebug, const QCompactGstMessageAdaptor &); + +QT_END_NAMESPACE + +#endif diff --git a/src/plugins/multimedia/gstreamer/common/qgst_handle_types_p.h b/src/plugins/multimedia/gstreamer/common/qgst_handle_types_p.h new file mode 100644 index 000000000..e813f4181 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgst_handle_types_p.h @@ -0,0 +1,270 @@ +// Copyright (C) 2024 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGST_HANDLE_TYPES_P_H +#define QGST_HANDLE_TYPES_P_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <QtCore/private/qcore_unix_p.h> +#include <QtCore/private/quniquehandle_p.h> +#include <QtCore/qtconfigmacros.h> + +#include <QtMultimedia/private/qtmultimedia-config_p.h> + +#include <gst/gst.h> + +#if QT_CONFIG(gstreamer_gl) +# include <gst/gl/gstglcontext.h> +#endif + +QT_BEGIN_NAMESPACE + +namespace QGstImpl { + +template <typename HandleTraits> +struct QSharedHandle : private QUniqueHandle<HandleTraits> +{ + using BaseClass = QUniqueHandle<HandleTraits>; + + enum RefMode { HasRef, NeedsRef }; + + QSharedHandle() = default; + + explicit QSharedHandle(typename HandleTraits::Type object, RefMode mode) + : BaseClass{ mode == NeedsRef ? HandleTraits::ref(object) : object } + { + } + + QSharedHandle(const QSharedHandle &o) + : BaseClass{ + HandleTraits::ref(o.get()), + } + { + } + + QSharedHandle(QSharedHandle &&) noexcept = default; + + QSharedHandle &operator=(const QSharedHandle &o) // NOLINT: bugprone-unhandled-self-assign + { + if (BaseClass::get() != o.get()) + reset(HandleTraits::ref(o.get())); + return *this; + }; + + QSharedHandle &operator=(QSharedHandle &&) noexcept = default; + + [[nodiscard]] friend bool operator==(const QSharedHandle &lhs, + const QSharedHandle &rhs) noexcept + { + return lhs.get() == rhs.get(); + } + + [[nodiscard]] friend bool operator!=(const QSharedHandle &lhs, + const QSharedHandle &rhs) noexcept + { + return lhs.get() != rhs.get(); + } + + [[nodiscard]] friend bool operator<(const QSharedHandle &lhs, const QSharedHandle &rhs) noexcept + { + return lhs.get() < rhs.get(); + } + + [[nodiscard]] friend bool operator<=(const QSharedHandle &lhs, + const QSharedHandle &rhs) noexcept + { + return lhs.get() <= rhs.get(); + } + + [[nodiscard]] friend bool operator>(const QSharedHandle &lhs, const QSharedHandle &rhs) noexcept + { + return lhs.get() > rhs.get(); + } + + [[nodiscard]] friend bool operator>=(const QSharedHandle &lhs, + const QSharedHandle &rhs) noexcept + { + return lhs.get() >= rhs.get(); + } + + using BaseClass::get; + using BaseClass::isValid; + using BaseClass::operator bool; + using BaseClass::release; + using BaseClass::reset; + using BaseClass::operator&; + using BaseClass::close; +}; + +struct QGstTagListHandleTraits +{ + using Type = GstTagList *; + static constexpr Type invalidValue() noexcept { return nullptr; } + static bool close(Type handle) noexcept + { + gst_tag_list_unref(handle); + return true; + } + static Type ref(Type handle) noexcept { return gst_tag_list_ref(handle); } +}; + +struct QGstSampleHandleTraits +{ + using Type = GstSample *; + static constexpr Type invalidValue() noexcept { return nullptr; } + static bool close(Type handle) noexcept + { + gst_sample_unref(handle); + return true; + } + static Type ref(Type handle) noexcept { return gst_sample_ref(handle); } +}; + +struct QUniqueGstStructureHandleTraits +{ + using Type = GstStructure *; + static constexpr Type invalidValue() noexcept { return nullptr; } + static bool close(Type handle) noexcept + { + gst_structure_free(handle); + return true; + } +}; + +struct QUniqueGStringHandleTraits +{ + using Type = gchar *; + static constexpr Type invalidValue() noexcept { return nullptr; } + static bool close(Type handle) noexcept + { + g_free(handle); + return true; + } +}; + +struct QUniqueGErrorHandleTraits +{ + using Type = GError *; + static constexpr Type invalidValue() noexcept { return nullptr; } + static bool close(Type handle) noexcept + { + g_error_free(handle); + return true; + } +}; + + +struct QUniqueGstDateTimeHandleTraits +{ + using Type = GstDateTime *; + static constexpr Type invalidValue() noexcept { return nullptr; } + static bool close(Type handle) noexcept + { + gst_date_time_unref(handle); + return true; + } +}; + +struct QFileDescriptorHandleTraits +{ + using Type = int; + static constexpr Type invalidValue() noexcept { return -1; } + static bool close(Type fd) noexcept + { + int closeResult = qt_safe_close(fd); + return closeResult == 0; + } +}; + +template <typename GstType> +struct QGstHandleHelper +{ + struct QGstSafeObjectHandleTraits + { + using Type = GstType *; + static constexpr Type invalidValue() noexcept { return nullptr; } + static bool close(Type handle) noexcept + { + gst_object_unref(G_OBJECT(handle)); + return true; + } + + static Type ref(Type handle) noexcept + { + gst_object_ref_sink(G_OBJECT(handle)); + return handle; + } + }; + + using SharedHandle = QSharedHandle<QGstSafeObjectHandleTraits>; + using UniqueHandle = QUniqueHandle<QGstSafeObjectHandleTraits>; +}; + +template <typename GstType> +struct QGstMiniObjectHandleHelper +{ + struct Traits + { + using Type = GstType *; + static constexpr Type invalidValue() noexcept { return nullptr; } + static bool close(Type handle) noexcept + { + gst_mini_object_unref(GST_MINI_OBJECT_CAST(handle)); + return true; + } + + static Type ref(Type handle) noexcept + { + if (GST_MINI_OBJECT_CAST(handle)) + gst_mini_object_ref(GST_MINI_OBJECT_CAST(handle)); + return handle; + } + }; + + using SharedHandle = QSharedHandle<Traits>; + using UniqueHandle = QUniqueHandle<Traits>; +}; + +} // namespace QGstImpl + +using QGstClockHandle = QGstImpl::QGstHandleHelper<GstClock>::UniqueHandle; +using QGstElementHandle = QGstImpl::QGstHandleHelper<GstElement>::UniqueHandle; +using QGstElementFactoryHandle = QGstImpl::QGstHandleHelper<GstElementFactory>::SharedHandle; +using QGstDeviceHandle = QGstImpl::QGstHandleHelper<GstDevice>::SharedHandle; +using QGstDeviceMonitorHandle = QGstImpl::QGstHandleHelper<GstDeviceMonitor>::UniqueHandle; +using QGstBusHandle = QGstImpl::QGstHandleHelper<GstBus>::UniqueHandle; +using QGstStreamCollectionHandle = QGstImpl::QGstHandleHelper<GstStreamCollection>::SharedHandle; +using QGstStreamHandle = QGstImpl::QGstHandleHelper<GstStream>::SharedHandle; + +using QGstTagListHandle = QGstImpl::QSharedHandle<QGstImpl::QGstTagListHandleTraits>; +using QGstSampleHandle = QGstImpl::QSharedHandle<QGstImpl::QGstSampleHandleTraits>; + +using QUniqueGstStructureHandle = QUniqueHandle<QGstImpl::QUniqueGstStructureHandleTraits>; +using QUniqueGStringHandle = QUniqueHandle<QGstImpl::QUniqueGStringHandleTraits>; +using QUniqueGErrorHandle = QUniqueHandle<QGstImpl::QUniqueGErrorHandleTraits>; +using QUniqueGstDateTimeHandle = QUniqueHandle<QGstImpl::QUniqueGstDateTimeHandleTraits>; +using QFileDescriptorHandle = QUniqueHandle<QGstImpl::QFileDescriptorHandleTraits>; +using QGstBufferHandle = QGstImpl::QGstMiniObjectHandleHelper<GstBuffer>::SharedHandle; +using QGstContextHandle = QGstImpl::QGstMiniObjectHandleHelper<GstContext>::UniqueHandle; +using QGstGstDateTimeHandle = QGstImpl::QGstMiniObjectHandleHelper<GstDateTime>::SharedHandle; +using QGstPluginFeatureHandle = QGstImpl::QGstHandleHelper<GstPluginFeature>::SharedHandle; +using QGstQueryHandle = QGstImpl::QGstMiniObjectHandleHelper<GstQuery>::SharedHandle; + +#if QT_CONFIG(gstreamer_gl) +using QGstGLContextHandle = QGstImpl::QGstHandleHelper<GstGLContext>::UniqueHandle; +using QGstGLDisplayHandle = QGstImpl::QGstHandleHelper<GstGLDisplay>::UniqueHandle; +#endif + +QT_END_NAMESPACE + +#endif diff --git a/src/plugins/multimedia/gstreamer/common/qgst_p.h b/src/plugins/multimedia/gstreamer/common/qgst_p.h new file mode 100644 index 000000000..bf5290d5d --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgst_p.h @@ -0,0 +1,847 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGST_P_H +#define QGST_P_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <QtCore/qdebug.h> +#include <QtCore/qlist.h> +#include <QtCore/qsemaphore.h> + +#include <QtMultimedia/qaudioformat.h> +#include <QtMultimedia/qvideoframe.h> +#include <QtMultimedia/private/qtmultimediaglobal_p.h> +#include <QtMultimedia/private/qmultimediautils_p.h> +#include <QtMultimedia/private/qplatformmediaplayer_p.h> + +#include <gst/gst.h> +#include <gst/app/gstappsink.h> +#include <gst/app/gstappsrc.h> +#include <gst/video/video-info.h> + +#include "qgst_handle_types_p.h" + +#include <type_traits> + +#if QT_CONFIG(gstreamer_photography) +# define GST_USE_UNSTABLE_API +# include <gst/interfaces/photography.h> +# undef GST_USE_UNSTABLE_API +#endif + + +QT_BEGIN_NAMESPACE + +namespace QGstImpl { + +template <typename T> +struct GstObjectTraits +{ + // using Type = T; + // template <typename U> + // static bool isObjectOfType(U *); + // template <typename U> + // static T *cast(U *); +}; + +#define QGST_DEFINE_CAST_TRAITS(ClassName, MACRO_LABEL) \ + template <> \ + struct GstObjectTraits<ClassName> \ + { \ + using Type = ClassName; \ + template <typename U> \ + static bool isObjectOfType(U *arg) \ + { \ + return GST_IS_##MACRO_LABEL(arg); \ + } \ + template <typename U> \ + static Type *cast(U *arg) \ + { \ + return GST_##MACRO_LABEL##_CAST(arg); \ + } \ + template <typename U> \ + static Type *checked_cast(U *arg) \ + { \ + return GST_##MACRO_LABEL(arg); \ + } \ + }; \ + static_assert(true, "ensure semicolon") + +#define QGST_DEFINE_CAST_TRAITS_FOR_INTERFACE(ClassName, MACRO_LABEL) \ + template <> \ + struct GstObjectTraits<ClassName> \ + { \ + using Type = ClassName; \ + template <typename U> \ + static bool isObjectOfType(U *arg) \ + { \ + return GST_IS_##MACRO_LABEL(arg); \ + } \ + template <typename U> \ + static Type *cast(U *arg) \ + { \ + return checked_cast(arg); \ + } \ + template <typename U> \ + static Type *checked_cast(U *arg) \ + { \ + return GST_##MACRO_LABEL(arg); \ + } \ + }; \ + static_assert(true, "ensure semicolon") + +QGST_DEFINE_CAST_TRAITS(GstBin, BIN); +QGST_DEFINE_CAST_TRAITS(GstClock, CLOCK); +QGST_DEFINE_CAST_TRAITS(GstElement, ELEMENT); +QGST_DEFINE_CAST_TRAITS(GstObject, OBJECT); +QGST_DEFINE_CAST_TRAITS(GstPad, PAD); +QGST_DEFINE_CAST_TRAITS(GstPipeline, PIPELINE); +QGST_DEFINE_CAST_TRAITS(GstBaseSink, BASE_SINK); +QGST_DEFINE_CAST_TRAITS(GstBaseSrc, BASE_SRC); +QGST_DEFINE_CAST_TRAITS(GstAppSink, APP_SINK); +QGST_DEFINE_CAST_TRAITS(GstAppSrc, APP_SRC); + +QGST_DEFINE_CAST_TRAITS_FOR_INTERFACE(GstTagSetter, TAG_SETTER); + + +template <> +struct GstObjectTraits<GObject> +{ + using Type = GObject; + template <typename U> + static bool isObjectOfType(U *arg) + { + return G_IS_OBJECT(arg); + } + template <typename U> + static Type *cast(U *arg) + { + return G_OBJECT(arg); + } + template <typename U> + static Type *checked_cast(U *arg) + { + return G_OBJECT(arg); + } +}; + +#undef QGST_DEFINE_CAST_TRAITS +#undef QGST_DEFINE_CAST_TRAITS_FOR_INTERFACE + +} // namespace QGstImpl + +template <typename DestinationType, typename SourceType> +bool qIsGstObjectOfType(SourceType *arg) +{ + using Traits = QGstImpl::GstObjectTraits<DestinationType>; + return arg && Traits::isObjectOfType(arg); +} + +template <typename DestinationType, typename SourceType> +DestinationType *qGstSafeCast(SourceType *arg) +{ + using Traits = QGstImpl::GstObjectTraits<DestinationType>; + if (arg && Traits::isObjectOfType(arg)) + return Traits::cast(arg); + return nullptr; +} + +template <typename DestinationType, typename SourceType> +DestinationType *qGstCheckedCast(SourceType *arg) +{ + using Traits = QGstImpl::GstObjectTraits<DestinationType>; + if (arg) + Q_ASSERT(Traits::isObjectOfType(arg)); + return Traits::cast(arg); +} + +class QSize; +class QGstStructureView; +class QGstCaps; +class QGstPipelinePrivate; +class QCameraFormat; + +template <typename T> struct QGRange +{ + T min; + T max; +}; + +struct QGString : QUniqueGStringHandle +{ + using QUniqueGStringHandle::QUniqueGStringHandle; + + QLatin1StringView asStringView() const { return QLatin1StringView{ get() }; } + QString toQString() const { return QString::fromUtf8(get()); } +}; + +class QGValue +{ +public: + explicit QGValue(const GValue *v); + const GValue *value; + + bool isNull() const; + + std::optional<bool> toBool() const; + std::optional<int> toInt() const; + std::optional<int> toInt64() const; + template<typename T> + T *getPointer() const + { + return value ? static_cast<T *>(g_value_get_pointer(value)) : nullptr; + } + + const char *toString() const; + std::optional<float> getFraction() const; + std::optional<QGRange<float>> getFractionRange() const; + std::optional<QGRange<int>> toIntRange() const; + + QGstStructureView toStructure() const; + QGstCaps toCaps() const; + + bool isList() const; + int listSize() const; + QGValue at(int index) const; + + QList<QAudioFormat::SampleFormat> getSampleFormats() const; +}; + +namespace QGstPointerImpl { + +template <typename RefcountedObject> +struct QGstRefcountingAdaptor; + +template <typename GstType> +class QGstObjectWrapper +{ + using Adaptor = QGstRefcountingAdaptor<GstType>; + + GstType *m_object = nullptr; + +public: + enum RefMode { HasRef, NeedsRef }; + + constexpr QGstObjectWrapper() = default; + + explicit QGstObjectWrapper(GstType *object, RefMode mode) : m_object(object) + { + if (m_object && mode == NeedsRef) + Adaptor::ref(m_object); + } + + QGstObjectWrapper(const QGstObjectWrapper &other) : m_object(other.m_object) + { + if (m_object) + Adaptor::ref(m_object); + } + + ~QGstObjectWrapper() + { + if (m_object) + Adaptor::unref(m_object); + } + + QGstObjectWrapper(QGstObjectWrapper &&other) noexcept + : m_object(std::exchange(other.m_object, nullptr)) + { + } + + QGstObjectWrapper & + operator=(const QGstObjectWrapper &other) // NOLINT: bugprone-unhandled-self-assign + { + if (m_object != other.m_object) { + GstType *originalObject = m_object; + + m_object = other.m_object; + if (m_object) + Adaptor::ref(m_object); + if (originalObject) + Adaptor::unref(originalObject); + } + return *this; + } + + QGstObjectWrapper &operator=(QGstObjectWrapper &&other) noexcept + { + if (this != &other) { + GstType *originalObject = m_object; + m_object = std::exchange(other.m_object, nullptr); + + if (originalObject) + Adaptor::unref(originalObject); + } + return *this; + } + + friend bool operator==(const QGstObjectWrapper &a, const QGstObjectWrapper &b) + { + return a.m_object == b.m_object; + } + friend bool operator!=(const QGstObjectWrapper &a, const QGstObjectWrapper &b) + { + return a.m_object != b.m_object; + } + + explicit operator bool() const { return bool(m_object); } + bool isNull() const { return !m_object; } + GstType *release() { return std::exchange(m_object, nullptr); } + +protected: + GstType *get() const { return m_object; } +}; + +} // namespace QGstPointerImpl + +class QGstreamerMessage; + +class QGstStructureView +{ +public: + const GstStructure *structure = nullptr; + explicit QGstStructureView(const GstStructure *); + explicit QGstStructureView(const QUniqueGstStructureHandle &); + + QUniqueGstStructureHandle clone() const; + + bool isNull() const; + QByteArrayView name() const; + QGValue operator[](const char *fieldname) const; + + QGstCaps caps() const; + QGstTagListHandle tags() const; + + QSize resolution() const; + QVideoFrameFormat::PixelFormat pixelFormat() const; + QGRange<float> frameRateRange() const; + QGstreamerMessage getMessage(); + std::optional<Fraction> pixelAspectRatio() const; + QSize nativeSize() const; +}; + +template <> +struct QGstPointerImpl::QGstRefcountingAdaptor<GstCaps> +{ + static void ref(GstCaps *arg) noexcept { gst_caps_ref(arg); } + static void unref(GstCaps *arg) noexcept { gst_caps_unref(arg); } +}; + +class QGstCaps : public QGstPointerImpl::QGstObjectWrapper<GstCaps> +{ + using BaseClass = QGstPointerImpl::QGstObjectWrapper<GstCaps>; + +public: + using BaseClass::BaseClass; + QGstCaps(const QGstCaps &) = default; + QGstCaps(QGstCaps &&) noexcept = default; + QGstCaps &operator=(const QGstCaps &) = default; + QGstCaps &operator=(QGstCaps &&) noexcept = default; + + enum MemoryFormat { CpuMemory, GLTexture, DMABuf }; + + int size() const; + QGstStructureView at(int index) const; + GstCaps *caps() const; + + MemoryFormat memoryFormat() const; + std::optional<std::pair<QVideoFrameFormat, GstVideoInfo>> formatAndVideoInfo() const; + + void addPixelFormats(const QList<QVideoFrameFormat::PixelFormat> &formats, const char *modifier = nullptr); + void setResolution(QSize); + + static QGstCaps create(); + + static QGstCaps fromCameraFormat(const QCameraFormat &format); + + QGstCaps copy() const; +}; + +template <> +struct QGstPointerImpl::QGstRefcountingAdaptor<GstObject> +{ + static void ref(GstObject *arg) noexcept { gst_object_ref_sink(arg); } + static void unref(GstObject *arg) noexcept { gst_object_unref(arg); } +}; + +class QGObjectHandlerConnection; + +class QGstObject : public QGstPointerImpl::QGstObjectWrapper<GstObject> +{ + using BaseClass = QGstPointerImpl::QGstObjectWrapper<GstObject>; + +public: + using BaseClass::BaseClass; + QGstObject(const QGstObject &) = default; + QGstObject(QGstObject &&) noexcept = default; + + QGstObject &operator=(const QGstObject &) = default; + QGstObject &operator=(QGstObject &&) noexcept = default; + + void set(const char *property, const char *str); + void set(const char *property, bool b); + void set(const char *property, uint i); + void set(const char *property, int i); + void set(const char *property, qint64 i); + void set(const char *property, quint64 i); + void set(const char *property, double d); + void set(const char *property, const QGstObject &o); + void set(const char *property, const QGstCaps &c); + + QGString getString(const char *property) const; + QGstStructureView getStructure(const char *property) const; + bool getBool(const char *property) const; + uint getUInt(const char *property) const; + int getInt(const char *property) const; + quint64 getUInt64(const char *property) const; + qint64 getInt64(const char *property) const; + float getFloat(const char *property) const; + double getDouble(const char *property) const; + QGstObject getObject(const char *property) const; + + QGObjectHandlerConnection connect(const char *name, GCallback callback, gpointer userData); + void disconnect(gulong handlerId); + + GType type() const; + QLatin1StringView typeName() const; + GstObject *object() const; + QLatin1StringView name() const; +}; + +class QGObjectHandlerConnection +{ +public: + QGObjectHandlerConnection(QGstObject object, gulong handler); + + QGObjectHandlerConnection() = default; + QGObjectHandlerConnection(const QGObjectHandlerConnection &) = default; + QGObjectHandlerConnection(QGObjectHandlerConnection &&) = default; + QGObjectHandlerConnection &operator=(const QGObjectHandlerConnection &) = default; + QGObjectHandlerConnection &operator=(QGObjectHandlerConnection &&) = default; + + void disconnect(); + +private: + static constexpr gulong invalidHandlerId = std::numeric_limits<gulong>::max(); + + QGstObject object; + gulong handlerId = invalidHandlerId; +}; + +// disconnects in dtor +class QGObjectHandlerScopedConnection +{ +public: + QGObjectHandlerScopedConnection(QGObjectHandlerConnection connection); + + QGObjectHandlerScopedConnection() = default; + QGObjectHandlerScopedConnection(const QGObjectHandlerScopedConnection &) = delete; + QGObjectHandlerScopedConnection &operator=(const QGObjectHandlerScopedConnection &) = delete; + QGObjectHandlerScopedConnection(QGObjectHandlerScopedConnection &&) = default; + QGObjectHandlerScopedConnection &operator=(QGObjectHandlerScopedConnection &&) = default; + + ~QGObjectHandlerScopedConnection(); + + void disconnect(); + +private: + QGObjectHandlerConnection connection; +}; + +class QGstElement; + +class QGstPad : public QGstObject +{ +public: + using QGstObject::QGstObject; + QGstPad(const QGstPad &) = default; + QGstPad(QGstPad &&) noexcept = default; + + explicit QGstPad(const QGstObject &o); + explicit QGstPad(GstPad *pad, RefMode mode); + + QGstPad &operator=(const QGstPad &) = default; + QGstPad &operator=(QGstPad &&) noexcept = default; + + QGstCaps currentCaps() const; + QGstCaps queryCaps() const; + + QGstTagListHandle tags() const; + + std::optional<QPlatformMediaPlayer::TrackType> + inferTrackTypeFromName() const; // for decodebin3 etc + + bool isLinked() const; + bool link(const QGstPad &sink) const; + bool unlink(const QGstPad &sink) const; + bool unlinkPeer() const; + QGstPad peer() const; + QGstElement parent() const; + + GstPad *pad() const; + + GstEvent *stickyEvent(GstEventType type); + bool sendEvent(GstEvent *event); + + template<auto Member, typename T> + void addProbe(T *instance, GstPadProbeType type) { + auto callback = [](GstPad *pad, GstPadProbeInfo *info, gpointer userData) { + return (static_cast<T *>(userData)->*Member)(QGstPad(pad, NeedsRef), info); + }; + + gst_pad_add_probe(pad(), type, callback, instance, nullptr); + } + + template <typename Functor> + void doInIdleProbe(Functor &&work) + { + struct CallbackData { + QSemaphore waitDone; + Functor work; + }; + + CallbackData cd{ + .waitDone = QSemaphore{}, + .work = std::forward<Functor>(work), + }; + + auto callback= [](GstPad *, GstPadProbeInfo *, gpointer p) { + auto cd = reinterpret_cast<CallbackData*>(p); + cd->work(); + cd->waitDone.release(); + return GST_PAD_PROBE_REMOVE; + }; + + gst_pad_add_probe(pad(), GST_PAD_PROBE_TYPE_IDLE, callback, &cd, nullptr); + cd.waitDone.acquire(); + } + + template<auto Member, typename T> + void addEosProbe(T *instance) { + auto callback = [](GstPad *, GstPadProbeInfo *info, gpointer userData) { + if (GST_EVENT_TYPE(GST_PAD_PROBE_INFO_DATA(info)) != GST_EVENT_EOS) + return GST_PAD_PROBE_PASS; + (static_cast<T *>(userData)->*Member)(); + return GST_PAD_PROBE_REMOVE; + }; + + gst_pad_add_probe(pad(), GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, callback, instance, nullptr); + } +}; + +class QGstClock : public QGstObject +{ +public: + QGstClock() = default; + explicit QGstClock(const QGstObject &o); + explicit QGstClock(GstClock *clock, RefMode mode); + + GstClock *clock() const; + GstClockTime time() const; +}; + +class QGstPipeline; + +class QGstElement : public QGstObject +{ +public: + using QGstObject::QGstObject; + + QGstElement(const QGstElement &) = default; + QGstElement(QGstElement &&) noexcept = default; + QGstElement &operator=(const QGstElement &) = default; + QGstElement &operator=(QGstElement &&) noexcept = default; + + explicit QGstElement(GstElement *element, RefMode mode); + static QGstElement createFromFactory(const char *factory, const char *name = nullptr); + static QGstElement createFromFactory(GstElementFactory *, const char *name = nullptr); + static QGstElement createFromFactory(const QGstElementFactoryHandle &, + const char *name = nullptr); + static QGstElement createFromDevice(const QGstDeviceHandle &, const char *name = nullptr); + static QGstElement createFromDevice(GstDevice *, const char *name = nullptr); + static QGstElement createFromPipelineDescription(const char *); + static QGstElement createFromPipelineDescription(const QByteArray &); + + static QGstElementFactoryHandle findFactory(const char *); + static QGstElementFactoryHandle findFactory(const QByteArray &name); + + QGstPad staticPad(const char *name) const; + QGstPad src() const; + QGstPad sink() const; + QGstPad getRequestPad(const char *name) const; + void releaseRequestPad(const QGstPad &pad) const; + + GstState state(std::chrono::nanoseconds timeout = std::chrono::seconds(0)) const; + GstStateChangeReturn setState(GstState state); + bool setStateSync(GstState state, std::chrono::nanoseconds timeout = std::chrono::seconds(1)); + bool syncStateWithParent(); + bool finishStateChange(std::chrono::nanoseconds timeout = std::chrono::seconds(5)); + + void lockState(bool locked); + bool isStateLocked() const; + + void sendEvent(GstEvent *event) const; + void sendEos() const; + + std::optional<std::chrono::nanoseconds> duration() const; + std::optional<std::chrono::milliseconds> durationInMs() const; + std::optional<std::chrono::nanoseconds> position() const; + std::optional<std::chrono::milliseconds> positionInMs() const; + std::optional<bool> canSeek() const; + + template <auto Member, typename T> + QGObjectHandlerConnection onPadAdded(T *instance) + { + struct Impl + { + static void callback(GstElement *e, GstPad *pad, gpointer userData) + { + (static_cast<T *>(userData)->*Member)(QGstElement(e, NeedsRef), + QGstPad(pad, NeedsRef)); + }; + }; + + return connect("pad-added", G_CALLBACK(Impl::callback), instance); + } + template <auto Member, typename T> + QGObjectHandlerConnection onPadRemoved(T *instance) + { + struct Impl + { + static void callback(GstElement *e, GstPad *pad, gpointer userData) + { + (static_cast<T *>(userData)->*Member)(QGstElement(e, NeedsRef), + QGstPad(pad, NeedsRef)); + }; + }; + + return connect("pad-removed", G_CALLBACK(Impl::callback), instance); + } + template <auto Member, typename T> + QGObjectHandlerConnection onNoMorePads(T *instance) + { + struct Impl + { + static void callback(GstElement *e, gpointer userData) + { + (static_cast<T *>(userData)->*Member)(QGstElement(e, NeedsRef)); + }; + }; + + return connect("no-more-pads", G_CALLBACK(Impl::callback), instance); + } + + GstClockTime baseTime() const; + void setBaseTime(GstClockTime time) const; + + GstElement *element() const; + + QGstElement getParent() const; + QGstPipeline getPipeline() const; + void dumpPipelineGraph(const char *filename) const; + +private: + QGstQueryHandle &positionQuery() const; + mutable QGstQueryHandle m_positionQuery; +}; + +template <typename... Ts> +std::enable_if_t<(std::is_base_of_v<QGstElement, Ts> && ...), void> +qLinkGstElements(const Ts &...ts) +{ + bool link_success = [&] { + if constexpr (sizeof...(Ts) == 2) + return gst_element_link(ts.element()...); + else + return gst_element_link_many(ts.element()..., nullptr); + }(); + + if (Q_UNLIKELY(!link_success)) { + qWarning() << "qLinkGstElements: could not link elements: " + << std::initializer_list<const char *>{ + (GST_ELEMENT_NAME(ts.element()))..., + }; + } +} + +template <typename... Ts> +std::enable_if_t<(std::is_base_of_v<QGstElement, Ts> && ...), void> +qUnlinkGstElements(const Ts &...ts) +{ + if constexpr (sizeof...(Ts) == 2) + gst_element_unlink(ts.element()...); + else + gst_element_unlink_many(ts.element()..., nullptr); +} + +class QGstBin : public QGstElement +{ +public: + using QGstElement::QGstElement; + QGstBin(const QGstBin &) = default; + QGstBin(QGstBin &&) noexcept = default; + QGstBin &operator=(const QGstBin &) = default; + QGstBin &operator=(QGstBin &&) noexcept = default; + + explicit QGstBin(GstBin *bin, RefMode mode = NeedsRef); + static QGstBin create(const char *name); + static QGstBin createFromFactory(const char *factory, const char *name); + static QGstBin createFromPipelineDescription(const QByteArray &pipelineDescription, + const char *name = nullptr, + bool ghostUnlinkedPads = false); + static QGstBin createFromPipelineDescription(const char *pipelineDescription, + const char *name = nullptr, + bool ghostUnlinkedPads = false); + + template <typename... Ts> + std::enable_if_t<(std::is_base_of_v<QGstElement, Ts> && ...), void> add(const Ts &...ts) + { + if constexpr (sizeof...(Ts) == 1) + gst_bin_add(bin(), ts.element()...); + else + gst_bin_add_many(bin(), ts.element()..., nullptr); + } + + template <typename... Ts> + std::enable_if_t<(std::is_base_of_v<QGstElement, Ts> && ...), void> remove(const Ts &...ts) + { + if constexpr (sizeof...(Ts) == 1) + gst_bin_remove(bin(), ts.element()...); + else + gst_bin_remove_many(bin(), ts.element()..., nullptr); + } + + template <typename... Ts> + std::enable_if_t<(std::is_base_of_v<QGstElement, Ts> && ...), void> + stopAndRemoveElements(Ts... ts) + { + bool stateChangeSuccessful = (ts.setStateSync(GST_STATE_NULL) && ...); + Q_ASSERT(stateChangeSuccessful); + remove(ts...); + } + + GstBin *bin() const; + + void addGhostPad(const QGstElement &child, const char *name); + void addGhostPad(const char *name, const QGstPad &pad); + + bool syncChildrenState(); + + void dumpGraph(const char *fileNamePrefix) const; + + QGstElement findByName(const char *); +}; + +class QGstBaseSink : public QGstElement +{ +public: + using QGstElement::QGstElement; + + explicit QGstBaseSink(GstBaseSink *, RefMode); + + QGstBaseSink(const QGstBaseSink &) = default; + QGstBaseSink(QGstBaseSink &&) noexcept = default; + QGstBaseSink &operator=(const QGstBaseSink &) = default; + QGstBaseSink &operator=(QGstBaseSink &&) noexcept = default; + + void setSync(bool); + + GstBaseSink *baseSink() const; +}; + +class QGstBaseSrc : public QGstElement +{ +public: + using QGstElement::QGstElement; + + explicit QGstBaseSrc(GstBaseSrc *, RefMode); + + QGstBaseSrc(const QGstBaseSrc &) = default; + QGstBaseSrc(QGstBaseSrc &&) noexcept = default; + QGstBaseSrc &operator=(const QGstBaseSrc &) = default; + QGstBaseSrc &operator=(QGstBaseSrc &&) noexcept = default; + + GstBaseSrc *baseSrc() const; +}; + +class QGstAppSink : public QGstBaseSink +{ +public: + using QGstBaseSink::QGstBaseSink; + + explicit QGstAppSink(GstAppSink *, RefMode); + + QGstAppSink(const QGstAppSink &) = default; + QGstAppSink(QGstAppSink &&) noexcept = default; + QGstAppSink &operator=(const QGstAppSink &) = default; + QGstAppSink &operator=(QGstAppSink &&) noexcept = default; + + static QGstAppSink create(const char *name); + + GstAppSink *appSink() const; + + void setMaxBuffers(int); +# if GST_CHECK_VERSION(1, 24, 0) + void setMaxBufferTime(std::chrono::nanoseconds); +# endif + + void setCaps(const QGstCaps &caps); + void setCallbacks(GstAppSinkCallbacks &callbacks, gpointer user_data, GDestroyNotify notify); + + QGstSampleHandle pullSample(); +}; + +class QGstAppSrc : public QGstBaseSrc +{ +public: + using QGstBaseSrc::QGstBaseSrc; + + explicit QGstAppSrc(GstAppSrc *, RefMode); + + QGstAppSrc(const QGstAppSrc &) = default; + QGstAppSrc(QGstAppSrc &&) noexcept = default; + QGstAppSrc &operator=(const QGstAppSrc &) = default; + QGstAppSrc &operator=(QGstAppSrc &&) noexcept = default; + + static QGstAppSrc create(const char *name); + + GstAppSrc *appSrc() const; + + void setCallbacks(GstAppSrcCallbacks &callbacks, gpointer user_data, GDestroyNotify notify); + + GstFlowReturn pushBuffer(GstBuffer *); // take ownership +}; + +inline GstClockTime qGstClockTimeFromChrono(std::chrono::nanoseconds ns) +{ + return ns.count(); +} + +QString qGstErrorMessageCannotFindElement(std::string_view element); + +template <typename Arg, typename... Args> +std::optional<QString> qGstErrorMessageIfElementsNotAvailable(const Arg &arg, Args... args) +{ + QGstElementFactoryHandle factory = QGstElement::findFactory(arg); + if (!factory) + return qGstErrorMessageCannotFindElement(arg); + + if constexpr (sizeof...(args) != 0) + return qGstErrorMessageIfElementsNotAvailable(args...); + else + return std::nullopt; +} + +QT_END_NAMESPACE + +#endif diff --git a/src/plugins/multimedia/gstreamer/common/qgstappsource.cpp b/src/plugins/multimedia/gstreamer/common/qgstappsource.cpp new file mode 100644 index 000000000..5779ba8b1 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstappsource.cpp @@ -0,0 +1,240 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include "qgstappsource_p.h" + +#include <QtCore/qdebug.h> +#include <QtCore/qloggingcategory.h> + +#include <common/qgstutils_p.h> + +static Q_LOGGING_CATEGORY(qLcAppSrc, "qt.multimedia.appsrc") + +QT_BEGIN_NAMESPACE + +QMaybe<QGstAppSource *> QGstAppSource::create(QObject *parent) +{ + QGstAppSrc appsrc = QGstAppSrc::create("appsrc"); + if (!appsrc) + return qGstErrorMessageCannotFindElement("appsrc"); + + return new QGstAppSource(appsrc, parent); +} + +QGstAppSource::QGstAppSource(QGstAppSrc appsrc, QObject *parent) + : QObject(parent), m_appSrc(std::move(appsrc)) +{ + m_appSrc.set("emit-signals", false); +} + +QGstAppSource::~QGstAppSource() +{ + m_appSrc.setStateSync(GST_STATE_NULL); + streamDestroyed(); + qCDebug(qLcAppSrc) << "~QGstAppSrc"; +} + +bool QGstAppSource::setup(QIODevice *stream, qint64 offset) +{ + QMutexLocker locker(&m_mutex); + + if (m_appSrc.isNull()) + return false; + + if (!setStream(stream, offset)) + return false; + + GstAppSrcCallbacks callbacks{}; + callbacks.need_data = QGstAppSource::on_need_data; + callbacks.enough_data = QGstAppSource::on_enough_data; + callbacks.seek_data = QGstAppSource::on_seek_data; + + m_appSrc.setCallbacks(callbacks, this, nullptr); + + GstAppSrc *appSrc = m_appSrc.appSrc(); + m_maxBytes = gst_app_src_get_max_bytes(appSrc); + + if (m_sequential) + m_streamType = GST_APP_STREAM_TYPE_STREAM; + else + m_streamType = GST_APP_STREAM_TYPE_RANDOM_ACCESS; + gst_app_src_set_stream_type(appSrc, m_streamType); + gst_app_src_set_size(appSrc, m_sequential ? -1 : m_stream->size() - m_offset); + + return true; +} + +void QGstAppSource::setExternalAppSrc(QGstAppSrc appsrc) +{ + QMutexLocker locker(&m_mutex); + m_appSrc = std::move(appsrc); +} + +bool QGstAppSource::setStream(QIODevice *stream, qint64 offset) +{ + if (m_stream) { + disconnect(m_stream, &QIODevice::readyRead, this, &QGstAppSource::onDataReady); + disconnect(m_stream, &QIODevice::destroyed, this, &QGstAppSource::streamDestroyed); + m_stream = nullptr; + } + + m_dataRequestSize = 0; + m_sequential = true; + m_maxBytes = 0; + + if (stream) { + if (!stream->isOpen() && !stream->open(QIODevice::ReadOnly)) + return false; + m_stream = stream; + connect(m_stream, &QIODevice::destroyed, this, &QGstAppSource::streamDestroyed); + connect(m_stream, &QIODevice::readyRead, this, &QGstAppSource::onDataReady); + m_sequential = m_stream->isSequential(); + m_offset = offset; + } + return true; +} + +bool QGstAppSource::isStreamValid() const +{ + return m_stream != nullptr && m_stream->isOpen(); +} + +QGstElement QGstAppSource::element() const +{ + return m_appSrc; +} + +void QGstAppSource::onDataReady() +{ + qCDebug(qLcAppSrc) << "onDataReady" << m_stream->bytesAvailable() << m_stream->size(); + pushData(); +} + +void QGstAppSource::streamDestroyed() +{ + qCDebug(qLcAppSrc) << "stream destroyed"; + m_stream = nullptr; + m_dataRequestSize = 0; + sendEOS(); +} + +void QGstAppSource::pushData() +{ + if (m_appSrc.isNull() || !m_dataRequestSize) { + qCDebug(qLcAppSrc) << "push data: return immediately" << m_appSrc.isNull() + << m_dataRequestSize; + return; + } + + Q_ASSERT(m_stream); + + qCDebug(qLcAppSrc) << "pushData" << m_stream; + if ((m_stream && m_stream->atEnd())) { + sendEOS(); + qCDebug(qLcAppSrc) << "end pushData" << m_stream; + return; + } + + qint64 size = m_stream->bytesAvailable(); + + if (!m_dataRequestSize) + m_dataRequestSize = m_maxBytes; + size = qMin(size, (qint64)m_dataRequestSize); + qCDebug(qLcAppSrc) << " reading" << size << "bytes" << size << m_dataRequestSize; + + GstBuffer* buffer = gst_buffer_new_and_alloc(size); + + if (m_sequential) + buffer->offset = bytesReadSoFar; + else + buffer->offset = m_stream->pos(); + + GstMapInfo mapInfo; + gst_buffer_map(buffer, &mapInfo, GST_MAP_WRITE); + void* bufferData = mapInfo.data; + + qint64 bytesRead; + bytesRead = m_stream->read((char *)bufferData, size); + + buffer->offset_end = buffer->offset + bytesRead - 1; + bytesReadSoFar += bytesRead; + + gst_buffer_unmap(buffer, &mapInfo); + qCDebug(qLcAppSrc) << "pushing bytes into gstreamer" << buffer->offset << bytesRead; + if (bytesRead == 0) { + gst_buffer_unref(buffer); + sendEOS(); + qCDebug(qLcAppSrc) << "end pushData" << m_stream; + return; + } + + GstFlowReturn ret = m_appSrc.pushBuffer(buffer); + switch (ret) { + case GST_FLOW_OK: + break; + + default: + qWarning() << "QGstAppSrc: push buffer error -" << gst_flow_get_name(ret); + break; + } + + qCDebug(qLcAppSrc) << "end pushData" << m_stream; +} + +bool QGstAppSource::doSeek(qint64 value) +{ + if (isStreamValid()) + return m_stream->seek(value + m_offset); + return false; +} + +gboolean QGstAppSource::on_seek_data(GstAppSrc *, guint64 arg0, gpointer userdata) +{ + // we do get some spurious seeks to INT_MAX, ignore those + if (arg0 == std::numeric_limits<quint64>::max()) + return true; + + QGstAppSource *self = reinterpret_cast<QGstAppSource *>(userdata); + Q_ASSERT(self); + + QMutexLocker locker(&self->m_mutex); + + if (self->m_sequential) + return false; + + self->doSeek(arg0); + return true; +} + +void QGstAppSource::on_enough_data(GstAppSrc *, gpointer userdata) +{ + qCDebug(qLcAppSrc) << "on_enough_data"; + QGstAppSource *self = static_cast<QGstAppSource *>(userdata); + Q_ASSERT(self); + QMutexLocker locker(&self->m_mutex); + self->m_dataRequestSize = 0; +} + +void QGstAppSource::on_need_data(GstAppSrc *, guint arg0, gpointer userdata) +{ + qCDebug(qLcAppSrc) << "on_need_data requesting bytes" << arg0; + QGstAppSource *self = static_cast<QGstAppSource *>(userdata); + Q_ASSERT(self); + QMutexLocker locker(&self->m_mutex); + self->m_dataRequestSize = arg0; + self->pushData(); + qCDebug(qLcAppSrc) << "done on_need_data"; +} + +void QGstAppSource::sendEOS() +{ + qCDebug(qLcAppSrc) << "sending EOS"; + if (m_appSrc.isNull()) + return; + + gst_app_src_end_of_stream(GST_APP_SRC(m_appSrc.element())); +} + +QT_END_NAMESPACE + +#include "moc_qgstappsource_p.cpp" diff --git a/src/plugins/multimedia/gstreamer/common/qgstappsource_p.h b/src/plugins/multimedia/gstreamer/common/qgstappsource_p.h new file mode 100644 index 000000000..b181212d2 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstappsource_p.h @@ -0,0 +1,77 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTAPPSRC_H +#define QGSTAPPSRC_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + + +#include <QtCore/qobject.h> +#include <QtCore/qiodevice.h> +#include <QtCore/qatomic.h> +#include <QtCore/qmutex.h> + +#include <QtMultimedia/private/qtmultimediaglobal_p.h> + +#include <common/qgst_p.h> +#include <gst/app/gstappsrc.h> + +QT_BEGIN_NAMESPACE + +class QGstAppSource : public QObject +{ + Q_OBJECT +public: + static QMaybe<QGstAppSource *> create(QObject *parent = nullptr); + ~QGstAppSource(); + + bool setup(QIODevice *stream = nullptr, qint64 offset = 0); + + void setExternalAppSrc(QGstAppSrc); + QGstElement element() const; + +private Q_SLOTS: + void onDataReady(); + void streamDestroyed(); + +private: + bool doSeek(qint64); + void pushData(); + + QGstAppSource(QGstAppSrc appsrc, QObject *parent); + + bool setStream(QIODevice *, qint64 offset); + bool isStreamValid() const; + + static gboolean on_seek_data(GstAppSrc *element, guint64 arg0, gpointer userdata); + static void on_enough_data(GstAppSrc *element, gpointer userdata); + static void on_need_data(GstAppSrc *element, uint arg0, gpointer userdata); + + void sendEOS(); + + mutable QMutex m_mutex; + + QIODevice *m_stream = nullptr; + + QGstAppSrc m_appSrc; + bool m_sequential = true; + GstAppStreamType m_streamType = GST_APP_STREAM_TYPE_RANDOM_ACCESS; + qint64 m_offset = 0; + qint64 m_maxBytes = 0; + qint64 bytesReadSoFar = 0; + QAtomicInteger<unsigned int> m_dataRequestSize = 0; +}; + +QT_END_NAMESPACE + +#endif diff --git a/src/plugins/multimedia/gstreamer/common/qgstpipeline.cpp b/src/plugins/multimedia/gstreamer/common/qgstpipeline.cpp new file mode 100644 index 000000000..8898d84a9 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstpipeline.cpp @@ -0,0 +1,402 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include <QtCore/qabstracteventdispatcher.h> +#include <QtCore/qcoreapplication.h> +#include <QtCore/qlist.h> +#include <QtCore/qloggingcategory.h> +#include <QtCore/qmap.h> +#include <QtCore/qmutex.h> +#include <QtCore/qproperty.h> +#include <QtCore/qtimer.h> + +#include "qgstpipeline_p.h" +#include "qgstreamermessage_p.h" + +QT_BEGIN_NAMESPACE + +static Q_LOGGING_CATEGORY(qLcGstPipeline, "qt.multimedia.gstpipeline"); + +static constexpr GstSeekFlags rateChangeSeekFlags = +#if GST_CHECK_VERSION(1, 18, 0) + GST_SEEK_FLAG_INSTANT_RATE_CHANGE; +#else + GST_SEEK_FLAG_FLUSH; +#endif + +class QGstPipelinePrivate +{ +public: + guint m_eventSourceID = 0; + GstBus *m_bus = nullptr; + std::unique_ptr<QTimer> m_intervalTimer; + QMutex filterMutex; + QList<QGstreamerSyncMessageFilter*> syncFilters; + QList<QGstreamerBusMessageFilter*> busFilters; + bool inStoppedState = true; + mutable std::chrono::nanoseconds m_position{}; + double m_rate = 1.; + + int m_configCounter = 0; + GstState m_savedState = GST_STATE_NULL; + + explicit QGstPipelinePrivate(GstBus *bus); + ~QGstPipelinePrivate(); + + void installMessageFilter(QGstreamerSyncMessageFilter *filter); + void removeMessageFilter(QGstreamerSyncMessageFilter *filter); + void installMessageFilter(QGstreamerBusMessageFilter *filter); + void removeMessageFilter(QGstreamerBusMessageFilter *filter); + + void processMessage(const QGstreamerMessage &msg) + { + for (QGstreamerBusMessageFilter *filter : std::as_const(busFilters)) { + if (filter->processBusMessage(msg)) + break; + } + } + +private: + static GstBusSyncReply syncGstBusFilter(GstBus *bus, GstMessage *message, + QGstPipelinePrivate *d) + { + if (!message) + return GST_BUS_PASS; + + Q_UNUSED(bus); + QMutexLocker lock(&d->filterMutex); + + for (QGstreamerSyncMessageFilter *filter : std::as_const(d->syncFilters)) { + if (filter->processSyncMessage( + QGstreamerMessage{ message, QGstreamerMessage::NeedsRef })) { + gst_message_unref(message); + return GST_BUS_DROP; + } + } + + return GST_BUS_PASS; + } + + void processMessage(GstMessage *message) + { + if (!message) + return; + + QGstreamerMessage msg{ + message, + QGstreamerMessage::NeedsRef, + }; + + processMessage(msg); + } + + static gboolean busCallback(GstBus *, GstMessage *message, gpointer data) + { + static_cast<QGstPipelinePrivate *>(data)->processMessage(message); + return TRUE; + } +}; + +QGstPipelinePrivate::QGstPipelinePrivate(GstBus *bus) : m_bus(bus) +{ + // glib event loop can be disabled either by env variable or QT_NO_GLIB define, so check the dispacher + QAbstractEventDispatcher *dispatcher = QCoreApplication::eventDispatcher(); + const bool hasGlib = dispatcher && dispatcher->inherits("QEventDispatcherGlib"); + if (!hasGlib) { + m_intervalTimer = std::make_unique<QTimer>(); + m_intervalTimer->setInterval(250); + QObject::connect(m_intervalTimer.get(), &QTimer::timeout, m_intervalTimer.get(), [this] { + GstMessage *message; + while ((message = gst_bus_poll(m_bus, GST_MESSAGE_ANY, 0)) != nullptr) { + processMessage(message); + gst_message_unref(message); + } + }); + m_intervalTimer->start(); + } else { + m_eventSourceID = + gst_bus_add_watch_full(bus, G_PRIORITY_DEFAULT, busCallback, this, nullptr); + } + + gst_bus_set_sync_handler(bus, (GstBusSyncHandler)syncGstBusFilter, this, nullptr); +} + +QGstPipelinePrivate::~QGstPipelinePrivate() +{ + m_intervalTimer.reset(); + + if (m_eventSourceID) + gst_bus_remove_watch(m_bus); + + gst_bus_set_sync_handler(m_bus, nullptr, nullptr, nullptr); + gst_object_unref(GST_OBJECT(m_bus)); +} + +void QGstPipelinePrivate::installMessageFilter(QGstreamerSyncMessageFilter *filter) +{ + if (filter) { + QMutexLocker lock(&filterMutex); + if (!syncFilters.contains(filter)) + syncFilters.append(filter); + } +} + +void QGstPipelinePrivate::removeMessageFilter(QGstreamerSyncMessageFilter *filter) +{ + if (filter) { + QMutexLocker lock(&filterMutex); + syncFilters.removeAll(filter); + } +} + +void QGstPipelinePrivate::installMessageFilter(QGstreamerBusMessageFilter *filter) +{ + if (filter && !busFilters.contains(filter)) + busFilters.append(filter); +} + +void QGstPipelinePrivate::removeMessageFilter(QGstreamerBusMessageFilter *filter) +{ + if (filter) + busFilters.removeAll(filter); +} + +QGstPipeline QGstPipeline::create(const char *name) +{ + GstPipeline *pipeline = qGstCheckedCast<GstPipeline>(gst_pipeline_new(name)); + return adopt(pipeline); +} + +QGstPipeline QGstPipeline::adopt(GstPipeline *pipeline) +{ + QGstPipelinePrivate *d = new QGstPipelinePrivate(gst_pipeline_get_bus(pipeline)); + g_object_set_data_full(qGstCheckedCast<GObject>(pipeline), "pipeline-private", d, + [](gpointer ptr) { + delete reinterpret_cast<QGstPipelinePrivate *>(ptr); + return; + }); + + return QGstPipeline{ + pipeline, + QGstPipeline::NeedsRef, + }; +} + +QGstPipeline::QGstPipeline(GstPipeline *p, RefMode mode) : QGstBin(qGstCheckedCast<GstBin>(p), mode) +{ +} + +QGstPipeline::~QGstPipeline() = default; + +bool QGstPipeline::inStoppedState() const +{ + QGstPipelinePrivate *d = getPrivate(); + return d->inStoppedState; +} + +void QGstPipeline::setInStoppedState(bool stopped) +{ + QGstPipelinePrivate *d = getPrivate(); + d->inStoppedState = stopped; +} + +void QGstPipeline::installMessageFilter(QGstreamerSyncMessageFilter *filter) +{ + QGstPipelinePrivate *d = getPrivate(); + d->installMessageFilter(filter); +} + +void QGstPipeline::removeMessageFilter(QGstreamerSyncMessageFilter *filter) +{ + QGstPipelinePrivate *d = getPrivate(); + d->removeMessageFilter(filter); +} + +void QGstPipeline::installMessageFilter(QGstreamerBusMessageFilter *filter) +{ + QGstPipelinePrivate *d = getPrivate(); + d->installMessageFilter(filter); +} + +void QGstPipeline::removeMessageFilter(QGstreamerBusMessageFilter *filter) +{ + QGstPipelinePrivate *d = getPrivate(); + d->removeMessageFilter(filter); +} + +GstStateChangeReturn QGstPipeline::setState(GstState state) +{ + return gst_element_set_state(element(), state); +} + +void QGstPipeline::processMessages(GstMessageType types) +{ + QGstPipelinePrivate *d = getPrivate(); + QGstreamerMessage message{ + gst_bus_pop_filtered(d->m_bus, types), + QGstreamerMessage::HasRef, + }; + d->processMessage(message); +} + +void QGstPipeline::beginConfig() +{ + QGstPipelinePrivate *d = getPrivate(); + Q_ASSERT(!isNull()); + + ++d->m_configCounter; + if (d->m_configCounter > 1) + return; + + GstState state; + GstState pending; + GstStateChangeReturn stateChangeReturn = gst_element_get_state(element(), &state, &pending, 0); + switch (stateChangeReturn) { + case GST_STATE_CHANGE_ASYNC: { + if (state == GST_STATE_PLAYING) { + // playing->paused transition in progress. wait for it to finish + bool stateChangeSuccessful = this->finishStateChange(); + if (!stateChangeSuccessful) + qWarning() << "QGstPipeline::beginConfig: timeout when waiting for state change"; + } + + state = pending; + break; + } + case GST_STATE_CHANGE_FAILURE: { + qDebug() << "QGstPipeline::beginConfig: state change failure"; + dumpGraph("beginConfigFailure"); + break; + } + + case GST_STATE_CHANGE_NO_PREROLL: + case GST_STATE_CHANGE_SUCCESS: + break; + } + + d->m_savedState = state; + if (d->m_savedState == GST_STATE_PLAYING) + setStateSync(GST_STATE_PAUSED); +} + +void QGstPipeline::endConfig() +{ + QGstPipelinePrivate *d = getPrivate(); + Q_ASSERT(!isNull()); + + --d->m_configCounter; + if (d->m_configCounter) + return; + + if (d->m_savedState == GST_STATE_PLAYING) + setState(GST_STATE_PLAYING); + d->m_savedState = GST_STATE_NULL; +} + +void QGstPipeline::flush() +{ + seek(position()); +} + +void QGstPipeline::seek(std::chrono::nanoseconds pos, double rate) +{ + using namespace std::chrono_literals; + + QGstPipelinePrivate *d = getPrivate(); + // always adjust the rate, so it can be set before playback starts + // setting position needs a loaded media file that's seekable + + qCDebug(qLcGstPipeline) << "QGstPipeline::seek to" << pos << "rate:" << rate; + + bool success = (rate > 0) + ? gst_element_seek(element(), rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, + GST_SEEK_TYPE_SET, pos.count(), GST_SEEK_TYPE_END, 0) + : gst_element_seek(element(), rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, + GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, pos.count()); + + if (!success) { + qDebug() << "seek: gst_element_seek failed" << pos; + return; + } + + d->m_position = pos; +} + +void QGstPipeline::seek(std::chrono::nanoseconds pos) +{ + qCDebug(qLcGstPipeline) << "QGstPipeline::seek to" << pos; + seek(pos, getPrivate()->m_rate); +} + +void QGstPipeline::setPlaybackRate(double rate) +{ + QGstPipelinePrivate *d = getPrivate(); + if (rate == d->m_rate) + return; + + d->m_rate = rate; + + qCDebug(qLcGstPipeline) << "QGstPipeline::setPlaybackRate to" << rate; + + applyPlaybackRate(/*instantRateChange =*/true); +} + +double QGstPipeline::playbackRate() const +{ + QGstPipelinePrivate *d = getPrivate(); + return d->m_rate; +} + +void QGstPipeline::applyPlaybackRate(bool instantRateChange) +{ + QGstPipelinePrivate *d = getPrivate(); + + // do not GST_SEEK_FLAG_FLUSH with GST_SEEK_TYPE_NONE + // https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/3604 + if (instantRateChange && GST_CHECK_VERSION(1, 18, 0)) { + qCDebug(qLcGstPipeline) << "QGstPipeline::applyPlaybackRate instantly"; + bool success = gst_element_seek( + element(), d->m_rate, GST_FORMAT_UNDEFINED, rateChangeSeekFlags, GST_SEEK_TYPE_NONE, + GST_CLOCK_TIME_NONE, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE); + if (!success) + qDebug() << "setPlaybackRate: gst_element_seek failed"; + } else { + seek(position(), d->m_rate); + } +} + +void QGstPipeline::setPosition(std::chrono::nanoseconds pos) +{ + seek(pos); +} + +std::chrono::nanoseconds QGstPipeline::position() const +{ + QGstPipelinePrivate *d = getPrivate(); + std::optional<std::chrono::nanoseconds> pos = QGstElement::position(); + if (pos) { + d->m_position = *pos; + qCDebug(qLcGstPipeline) << "QGstPipeline::position:" + << std::chrono::round<std::chrono::milliseconds>(*pos); + } else { + qDebug() << "QGstPipeline: failed to query position, using previous position"; + } + + return d->m_position; +} + +std::chrono::milliseconds QGstPipeline::positionInMs() const +{ + using namespace std::chrono; + return round<milliseconds>(position()); +} + +QGstPipelinePrivate *QGstPipeline::getPrivate() const +{ + gpointer p = g_object_get_data(qGstCheckedCast<GObject>(object()), "pipeline-private"); + auto *d = reinterpret_cast<QGstPipelinePrivate *>(p); + Q_ASSERT(d); + return d; +} + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/common/qgstpipeline_p.h b/src/plugins/multimedia/gstreamer/common/qgstpipeline_p.h new file mode 100644 index 000000000..559e7b382 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstpipeline_p.h @@ -0,0 +1,115 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef qgstpipeline_p_H +#define qgstpipeline_p_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <QtMultimedia/private/qtmultimediaglobal_p.h> +#include <QtCore/qobject.h> + +#include "qgst_p.h" + +QT_BEGIN_NAMESPACE + +class QGstreamerMessage; + +class QGstreamerSyncMessageFilter { +public: + //returns true if message was processed and should be dropped, false otherwise + virtual bool processSyncMessage(const QGstreamerMessage &message) = 0; +}; + + +class QGstreamerBusMessageFilter { +public: + //returns true if message was processed and should be dropped, false otherwise + virtual bool processBusMessage(const QGstreamerMessage &message) = 0; +}; + +class QGstPipelinePrivate; + +class QGstPipeline : public QGstBin +{ +public: + constexpr QGstPipeline() = default; + QGstPipeline(const QGstPipeline &) = default; + QGstPipeline(QGstPipeline &&) = default; + QGstPipeline &operator=(const QGstPipeline &) = default; + QGstPipeline &operator=(QGstPipeline &&) noexcept = default; + QGstPipeline(GstPipeline *, RefMode mode); + ~QGstPipeline(); + + // installs QGstPipelinePrivate as "pipeline-private" gobject property + static QGstPipeline create(const char *name); + static QGstPipeline adopt(GstPipeline *); + + // This is needed to help us avoid sending QVideoFrames or audio buffers to the + // application while we're prerolling the pipeline. + // QMediaPlayer is still in a stopped state, while we put the gstreamer pipeline into a + // Paused state so that we can get the required metadata of the stream and also have a fast + // transition to play. + bool inStoppedState() const; + void setInStoppedState(bool stopped); + + void installMessageFilter(QGstreamerSyncMessageFilter *filter); + void removeMessageFilter(QGstreamerSyncMessageFilter *filter); + void installMessageFilter(QGstreamerBusMessageFilter *filter); + void removeMessageFilter(QGstreamerBusMessageFilter *filter); + + GstStateChangeReturn setState(GstState state); + + GstPipeline *pipeline() const { return GST_PIPELINE_CAST(get()); } + + void processMessages(GstMessageType = GST_MESSAGE_ANY); + + template <typename Functor> + void modifyPipelineWhileNotRunning(Functor &&fn) + { + beginConfig(); + fn(); + endConfig(); + } + + template <typename Functor> + static void modifyPipelineWhileNotRunning(QGstPipeline &&pipeline, Functor &&fn) + { + if (pipeline) + pipeline.modifyPipelineWhileNotRunning(fn); + else + fn(); + } + + void flush(); + + void setPlaybackRate(double rate); + double playbackRate() const; + void applyPlaybackRate(bool instantRateChange); + + void setPosition(std::chrono::nanoseconds pos); + std::chrono::nanoseconds position() const; + std::chrono::milliseconds positionInMs() const; + +private: + void seek(std::chrono::nanoseconds pos, double rate); + void seek(std::chrono::nanoseconds pos); + + QGstPipelinePrivate *getPrivate() const; + + void beginConfig(); + void endConfig(); +}; + +QT_END_NAMESPACE + +#endif diff --git a/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput.cpp b/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput.cpp new file mode 100644 index 000000000..a2f60eaa1 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput.cpp @@ -0,0 +1,156 @@ +// Copyright (C) 2021 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include <common/qgstreameraudioinput_p.h> + +#include <QtCore/qloggingcategory.h> +#include <QtMultimedia/qaudiodevice.h> +#include <QtMultimedia/qaudioinput.h> + +#include <audio/qgstreameraudiodevice_p.h> +#include <common/qgstpipeline_p.h> + + +QT_BEGIN_NAMESPACE + +namespace { + +Q_LOGGING_CATEGORY(qLcMediaAudioInput, "qt.multimedia.audioinput") + +constexpr QLatin1String defaultSrcName = [] { + using namespace Qt::Literals; + + if constexpr (QT_CONFIG(pulseaudio)) + return "pulsesrc"_L1; + else if constexpr (QT_CONFIG(alsa)) + return "alsasrc"_L1; + else + return "autoaudiosrc"_L1; +}(); + +bool hasDeviceProperty(const QGstElement &element) +{ + using namespace Qt::Literals; + QLatin1String elementType = element.typeName(); + + if constexpr (QT_CONFIG(pulseaudio)) + return elementType == "GstPulseSrc"_L1; + + if constexpr (0 && QT_CONFIG(alsa)) // alsasrc has a "device" property, but it cannot be changed + // during playback + return elementType == "GstAlsaSrc"_L1; + + return false; +} + +} // namespace + +QMaybe<QPlatformAudioInput *> QGstreamerAudioInput::create(QAudioInput *parent) +{ + static const auto error = qGstErrorMessageIfElementsNotAvailable("autoaudiosrc", "volume"); + if (error) + return *error; + + return new QGstreamerAudioInput(parent); +} + +QGstreamerAudioInput::QGstreamerAudioInput(QAudioInput *parent) + : QObject(parent), + QPlatformAudioInput(parent), + m_audioInputBin(QGstBin::create("audioInput")), + m_audioSrc{ + QGstElement::createFromFactory(defaultSrcName.constData(), "autoaudiosrc"), + }, + m_audioVolume{ + QGstElement::createFromFactory("volume", "volume"), + } +{ + m_audioInputBin.add(m_audioSrc, m_audioVolume); + qLinkGstElements(m_audioSrc, m_audioVolume); + + m_audioInputBin.addGhostPad(m_audioVolume, "src"); +} + +QGstElement QGstreamerAudioInput::createGstElement() +{ + const auto *customDeviceInfo = + dynamic_cast<const QGStreamerCustomAudioDeviceInfo *>(m_audioDevice.handle()); + + if (customDeviceInfo) { + qCDebug(qLcMediaAudioInput) + << "requesting custom audio src element: " << customDeviceInfo->id; + + QGstElement element = QGstBin::createFromPipelineDescription(customDeviceInfo->id, + /*name=*/nullptr, + /*ghostUnlinkedPads=*/true); + if (element) + return element; + + qCWarning(qLcMediaAudioInput) + << "Cannot create audio source element:" << customDeviceInfo->id; + } + + const QByteArray &id = m_audioDevice.id(); + if constexpr (QT_CONFIG(pulseaudio) || QT_CONFIG(alsa)) { + QGstElement newSrc = QGstElement::createFromFactory(defaultSrcName.constData(), "audiosrc"); + if (newSrc) { + newSrc.set("device", id.constData()); + return newSrc; + } + + qWarning() << "Cannot create" << defaultSrcName; + + } else { + auto *deviceInfo = dynamic_cast<const QGStreamerAudioDeviceInfo *>(m_audioDevice.handle()); + if (deviceInfo && deviceInfo->gstDevice) { + QGstElement element = QGstElement::createFromDevice(deviceInfo->gstDevice, "audiosrc"); + if (element) + return element; + } + } + qCWarning(qLcMediaAudioInput) << "Invalid audio device"; + qCWarning(qLcMediaAudioInput) + << "Failed to create a gst element for the audio device, using a default audio source"; + return QGstElement::createFromFactory("autoaudiosrc", "audiosrc"); +} + +QGstreamerAudioInput::~QGstreamerAudioInput() +{ + m_audioInputBin.setStateSync(GST_STATE_NULL); +} + +void QGstreamerAudioInput::setVolume(float volume) +{ + m_audioVolume.set("volume", volume); +} + +void QGstreamerAudioInput::setMuted(bool muted) +{ + m_audioVolume.set("mute", muted); +} + +void QGstreamerAudioInput::setAudioDevice(const QAudioDevice &device) +{ + if (device == m_audioDevice) + return; + qCDebug(qLcMediaAudioInput) << "setAudioDevice" << device.description() << device.isNull(); + m_audioDevice = device; + + if (hasDeviceProperty(m_audioSrc) && !isCustomAudioDevice(m_audioDevice)) { + m_audioSrc.set("device", m_audioDevice.id().constData()); + return; + } + + QGstElement newSrc = createGstElement(); + + QGstPipeline::modifyPipelineWhileNotRunning(m_audioInputBin.getPipeline(), [&] { + qUnlinkGstElements(m_audioSrc, m_audioVolume); + m_audioInputBin.stopAndRemoveElements(m_audioSrc); + m_audioSrc = std::move(newSrc); + m_audioInputBin.add(m_audioSrc); + qLinkGstElements(m_audioSrc, m_audioVolume); + m_audioSrc.syncStateWithParent(); + }); +} + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput_p.h b/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput_p.h new file mode 100644 index 000000000..4b01b53a6 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstreameraudioinput_p.h @@ -0,0 +1,55 @@ +// Copyright (C) 2021 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTREAMERAUDIOINPUT_P_H +#define QGSTREAMERAUDIOINPUT_P_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <QtCore/qobject.h> +#include <QtMultimedia/private/qplatformaudioinput_p.h> + +#include <common/qgst_p.h> + +QT_BEGIN_NAMESPACE + +class QAudioDevice; + +class QGstreamerAudioInput : public QObject, public QPlatformAudioInput +{ +public: + static QMaybe<QPlatformAudioInput *> create(QAudioInput *parent); + ~QGstreamerAudioInput(); + + void setAudioDevice(const QAudioDevice &) override; + void setVolume(float) override; + void setMuted(bool) override; + + QGstElement gstElement() const { return m_audioInputBin; } + +private: + explicit QGstreamerAudioInput(QAudioInput *parent); + + QGstElement createGstElement(); + + QAudioDevice m_audioDevice; + + // Gst elements + QGstBin m_audioInputBin; + + QGstElement m_audioSrc; + QGstElement m_audioVolume; +}; + +QT_END_NAMESPACE + +#endif diff --git a/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput.cpp b/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput.cpp new file mode 100644 index 000000000..1a8c6976c --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput.cpp @@ -0,0 +1,170 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include <common/qgstreameraudiooutput_p.h> + +#include <QtCore/qloggingcategory.h> +#include <QtMultimedia/qaudiodevice.h> +#include <QtMultimedia/qaudiooutput.h> + +#include <common/qgstpipeline_p.h> +#include <audio/qgstreameraudiodevice_p.h> + + +QT_BEGIN_NAMESPACE + +namespace { + +Q_LOGGING_CATEGORY(qLcMediaAudioOutput, "qt.multimedia.audiooutput") + +constexpr QLatin1String defaultSinkName = [] { + using namespace Qt::Literals; + + if constexpr (QT_CONFIG(pulseaudio)) + return "pulsesink"_L1; + else if constexpr (QT_CONFIG(alsa)) + return "alsasink"_L1; + else + return "autoaudiosink"_L1; +}(); + +bool hasDeviceProperty(const QGstElement &element) +{ + using namespace Qt::Literals; + QLatin1String elementType = element.typeName(); + + if constexpr (QT_CONFIG(pulseaudio)) + return elementType == "GstPulseSink"_L1; + if constexpr (0 && QT_CONFIG(alsa)) // alsasrc has a "device" property, but it cannot be changed + // during playback + return elementType == "GstAlsaSink"_L1; + + return false; +} + +} // namespace + +QMaybe<QPlatformAudioOutput *> QGstreamerAudioOutput::create(QAudioOutput *parent) +{ + static const auto error = qGstErrorMessageIfElementsNotAvailable( + "audioconvert", "audioresample", "volume", "autoaudiosink"); + if (error) + return *error; + + return new QGstreamerAudioOutput(parent); +} + +QGstreamerAudioOutput::QGstreamerAudioOutput(QAudioOutput *parent) + : QObject(parent), + QPlatformAudioOutput(parent), + m_audioOutputBin(QGstBin::create("audioOutput")), + m_audioQueue{ + QGstElement::createFromFactory("queue", "audioQueue"), + }, + m_audioConvert{ + QGstElement::createFromFactory("audioconvert", "audioConvert"), + }, + m_audioResample{ + QGstElement::createFromFactory("audioresample", "audioResample"), + }, + m_audioVolume{ + QGstElement::createFromFactory("volume", "volume"), + }, + m_audioSink{ + QGstElement::createFromFactory(defaultSinkName.constData(), "audiosink"), + } +{ + m_audioOutputBin.add(m_audioQueue, m_audioConvert, m_audioResample, m_audioVolume, m_audioSink); + qLinkGstElements(m_audioQueue, m_audioConvert, m_audioResample, m_audioVolume, m_audioSink); + + m_audioOutputBin.addGhostPad(m_audioQueue, "sink"); +} + +QGstElement QGstreamerAudioOutput::createGstElement() +{ + const auto *customDeviceInfo = + dynamic_cast<const QGStreamerCustomAudioDeviceInfo *>(m_audioDevice.handle()); + + if (customDeviceInfo) { + qCDebug(qLcMediaAudioOutput) + << "requesting custom audio sink element: " << customDeviceInfo->id; + + QGstElement element = + QGstBin::createFromPipelineDescription(customDeviceInfo->id, /*name=*/nullptr, + /*ghostUnlinkedPads=*/true); + if (element) + return element; + + qCWarning(qLcMediaAudioOutput) + << "Cannot create audio sink element:" << customDeviceInfo->id; + } + + const QByteArray &id = m_audioDevice.id(); + if constexpr (QT_CONFIG(pulseaudio) || QT_CONFIG(alsa)) { + QGstElement newSink = + QGstElement::createFromFactory(defaultSinkName.constData(), "audiosink"); + if (newSink) { + newSink.set("device", id.constData()); + return newSink; + } + + qWarning() << "Cannot create" << defaultSinkName; + } else { + auto *deviceInfo = dynamic_cast<const QGStreamerAudioDeviceInfo *>(m_audioDevice.handle()); + if (deviceInfo && deviceInfo->gstDevice) { + QGstElement element = QGstElement::createFromDevice(deviceInfo->gstDevice, "audiosink"); + if (element) + return element; + } + } + qCWarning(qLcMediaAudioOutput) << "Invalid audio device:" << m_audioDevice.id(); + qCWarning(qLcMediaAudioOutput) + << "Failed to create a gst element for the audio device, using a default audio sink"; + return QGstElement::createFromFactory("autoaudiosink", "audiosink"); +} + +QGstreamerAudioOutput::~QGstreamerAudioOutput() +{ + m_audioOutputBin.setStateSync(GST_STATE_NULL); +} + +void QGstreamerAudioOutput::setVolume(float volume) +{ + m_audioVolume.set("volume", volume); +} + +void QGstreamerAudioOutput::setMuted(bool muted) +{ + m_audioVolume.set("mute", muted); +} + +void QGstreamerAudioOutput::setAudioDevice(const QAudioDevice &device) +{ + if (device == m_audioDevice) + return; + qCDebug(qLcMediaAudioOutput) << "setAudioDevice" << device.description() << device.isNull(); + + m_audioDevice = device; + + if (hasDeviceProperty(m_audioSink) && !isCustomAudioDevice(m_audioDevice)) { + m_audioSink.set("device", m_audioDevice.id().constData()); + return; + } + + QGstElement newSink = createGstElement(); + + QGstPipeline::modifyPipelineWhileNotRunning(m_audioOutputBin.getPipeline(), [&] { + qUnlinkGstElements(m_audioVolume, m_audioSink); + m_audioOutputBin.stopAndRemoveElements(m_audioSink); + m_audioSink = std::move(newSink); + m_audioOutputBin.add(m_audioSink); + m_audioSink.syncStateWithParent(); + qLinkGstElements(m_audioVolume, m_audioSink); + }); + + // we need to flush the pipeline, otherwise, the new sink doesn't always reach the new state + if (m_audioOutputBin.getPipeline()) + m_audioOutputBin.getPipeline().flush(); +} + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput_p.h b/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput_p.h new file mode 100644 index 000000000..da11c39d2 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstreameraudiooutput_p.h @@ -0,0 +1,58 @@ +// Copyright (C) 2021 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTREAMERAUDIOOUTPUT_P_H +#define QGSTREAMERAUDIOOUTPUT_P_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <QtCore/qobject.h> +#include <QtMultimedia/private/qplatformaudiooutput_p.h> + +#include <common/qgst_p.h> + +QT_BEGIN_NAMESPACE + +class QAudioDevice; + +class QGstreamerAudioOutput : public QObject, public QPlatformAudioOutput +{ +public: + static QMaybe<QPlatformAudioOutput *> create(QAudioOutput *parent); + ~QGstreamerAudioOutput(); + + void setAudioDevice(const QAudioDevice &) override; + void setVolume(float) override; + void setMuted(bool) override; + + QGstElement gstElement() const { return m_audioOutputBin; } + +private: + explicit QGstreamerAudioOutput(QAudioOutput *parent); + + QGstElement createGstElement(); + + QAudioDevice m_audioDevice; + + // Gst elements + QGstBin m_audioOutputBin; + + QGstElement m_audioQueue; + QGstElement m_audioConvert; + QGstElement m_audioResample; + QGstElement m_audioVolume; + QGstElement m_audioSink; +}; + +QT_END_NAMESPACE + +#endif diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe.cpp new file mode 100644 index 000000000..9cba810db --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe.cpp @@ -0,0 +1,88 @@ +// Copyright (C) 2016 Jolla Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include <common/qgstreamerbufferprobe_p.h> + +#include <common/qgst_p.h> + +QT_BEGIN_NAMESPACE + +QGstreamerBufferProbe::QGstreamerBufferProbe(Flags flags) + : m_flags(flags) +{ +} + +QGstreamerBufferProbe::~QGstreamerBufferProbe() = default; + +void QGstreamerBufferProbe::addProbeToPad(GstPad *pad, bool downstream) +{ + QGstCaps caps{ + gst_pad_get_current_caps(pad), + QGstCaps::HasRef, + }; + + if (caps) + probeCaps(caps.caps()); + + if (m_flags & ProbeCaps) { + m_capsProbeId = gst_pad_add_probe( + pad, + downstream + ? GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM + : GST_PAD_PROBE_TYPE_EVENT_UPSTREAM, + capsProbe, + this, + nullptr); + } + if (m_flags & ProbeBuffers) { + m_bufferProbeId = gst_pad_add_probe( + pad, GST_PAD_PROBE_TYPE_BUFFER, bufferProbe, this, nullptr); + } +} + +void QGstreamerBufferProbe::removeProbeFromPad(GstPad *pad) +{ + if (m_capsProbeId != -1) { + gst_pad_remove_probe(pad, m_capsProbeId); + m_capsProbeId = -1; + } + if (m_bufferProbeId != -1) { + gst_pad_remove_probe(pad, m_bufferProbeId); + m_bufferProbeId = -1; + } +} + +void QGstreamerBufferProbe::probeCaps(GstCaps *) +{ +} + +bool QGstreamerBufferProbe::probeBuffer(GstBuffer *) +{ + return true; +} + +GstPadProbeReturn QGstreamerBufferProbe::capsProbe(GstPad *, GstPadProbeInfo *info, gpointer user_data) +{ + QGstreamerBufferProbe * const control = static_cast<QGstreamerBufferProbe *>(user_data); + + if (GstEvent * const event = gst_pad_probe_info_get_event(info)) { + if (GST_EVENT_TYPE(event) == GST_EVENT_CAPS) { + GstCaps *caps; + gst_event_parse_caps(event, &caps); + + control->probeCaps(caps); + } + } + return GST_PAD_PROBE_OK; +} + +GstPadProbeReturn QGstreamerBufferProbe::bufferProbe( + GstPad *, GstPadProbeInfo *info, gpointer user_data) +{ + QGstreamerBufferProbe * const control = static_cast<QGstreamerBufferProbe *>(user_data); + if (GstBuffer * const buffer = gst_pad_probe_info_get_buffer(info)) + return control->probeBuffer(buffer) ? GST_PAD_PROBE_OK : GST_PAD_PROBE_DROP; + return GST_PAD_PROBE_OK; +} + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe_p.h new file mode 100644 index 000000000..71996a0cc --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstreamerbufferprobe_p.h @@ -0,0 +1,56 @@ +// Copyright (C) 2016 Jolla Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTREAMERBUFFERPROBE_H +#define QGSTREAMERBUFFERPROBE_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <private/qtmultimediaglobal_p.h> +#include <gst/gst.h> + +#include <QtCore/qglobal.h> + + +QT_BEGIN_NAMESPACE + +class QGstreamerBufferProbe +{ +public: + enum Flags + { + ProbeCaps = 0x01, + ProbeBuffers = 0x02, + ProbeAll = ProbeCaps | ProbeBuffers + }; + + explicit QGstreamerBufferProbe(Flags flags = ProbeAll); + virtual ~QGstreamerBufferProbe(); + + void addProbeToPad(GstPad *pad, bool downstream = true); + void removeProbeFromPad(GstPad *pad); + +protected: + virtual void probeCaps(GstCaps *caps); + virtual bool probeBuffer(GstBuffer *buffer); + +private: + static GstPadProbeReturn capsProbe(GstPad *pad, GstPadProbeInfo *info, gpointer user_data); + static GstPadProbeReturn bufferProbe(GstPad *pad, GstPadProbeInfo *info, gpointer user_data); + int m_capsProbeId = -1; + int m_bufferProbeId = -1; + const Flags m_flags; +}; + +QT_END_NAMESPACE + +#endif // QGSTREAMERBUFFERPROBE_H diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer.cpp new file mode 100644 index 000000000..014bbe77d --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer.cpp @@ -0,0 +1,1115 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include <common/qgstreamermediaplayer_p.h> + +#include <audio/qgstreameraudiodevice_p.h> +#include <common/qgst_debug_p.h> +#include <common/qgstappsource_p.h> +#include <common/qgstpipeline_p.h> +#include <common/qgstreameraudiooutput_p.h> +#include <common/qgstreamermessage_p.h> +#include <common/qgstreamermetadata_p.h> +#include <common/qgstreamervideooutput_p.h> +#include <common/qgstreamervideosink_p.h> +#include <qgstreamerformatinfo_p.h> + +#include <QtMultimedia/qaudiodevice.h> +#include <QtCore/qdir.h> +#include <QtCore/qsocketnotifier.h> +#include <QtCore/qurl.h> +#include <QtCore/qdebug.h> +#include <QtCore/qloggingcategory.h> +#include <QtCore/private/quniquehandle_p.h> + +#include <sys/types.h> +#include <sys/stat.h> +#include <fcntl.h> + +#if QT_CONFIG(gstreamer_gl) +# include <gst/gl/gl.h> +#endif + +static Q_LOGGING_CATEGORY(qLcMediaPlayer, "qt.multimedia.player") + +QT_BEGIN_NAMESPACE + +QGstreamerMediaPlayer::TrackSelector::TrackSelector(TrackType type, QGstElement selector) + : selector(selector), type(type) +{ + selector.set("sync-streams", true); + selector.set("sync-mode", 1 /*clock*/); + + if (type == SubtitleStream) + selector.set("cache-buffers", true); +} + +QGstPad QGstreamerMediaPlayer::TrackSelector::createInputPad() +{ + auto pad = selector.getRequestPad("sink_%u"); + tracks.append(pad); + return pad; +} + +void QGstreamerMediaPlayer::TrackSelector::removeAllInputPads() +{ + for (auto &pad : tracks) + selector.releaseRequestPad(pad); + tracks.clear(); +} + +void QGstreamerMediaPlayer::TrackSelector::removeInputPad(QGstPad pad) +{ + selector.releaseRequestPad(pad); + tracks.removeOne(pad); +} + +QGstPad QGstreamerMediaPlayer::TrackSelector::inputPad(int index) +{ + if (index >= 0 && index < tracks.count()) + return tracks[index]; + return {}; +} + +QGstreamerMediaPlayer::TrackSelector &QGstreamerMediaPlayer::trackSelector(TrackType type) +{ + auto &ts = trackSelectors[type]; + Q_ASSERT(ts.type == type); + return ts; +} + +void QGstreamerMediaPlayer::mediaStatusChanged(QMediaPlayer::MediaStatus status) +{ + if (status != QMediaPlayer::StalledMedia) + m_stalledMediaNotifier.stop(); + + QPlatformMediaPlayer::mediaStatusChanged(status); +} + +void QGstreamerMediaPlayer::updateBufferProgress(float newProgress) +{ + if (qFuzzyIsNull(newProgress - m_bufferProgress)) + return; + + m_bufferProgress = newProgress; + bufferProgressChanged(m_bufferProgress); +} + +void QGstreamerMediaPlayer::disconnectDecoderHandlers() +{ + auto handlers = std::initializer_list<QGObjectHandlerScopedConnection *>{ + &padAdded, &padRemoved, &sourceSetup, &uridecodebinElementAdded, + &unknownType, &elementAdded, &elementRemoved, + }; + for (QGObjectHandlerScopedConnection *handler : handlers) + handler->disconnect(); + + decodeBinQueues = 0; +} + +QMaybe<QPlatformMediaPlayer *> QGstreamerMediaPlayer::create(QMediaPlayer *parent) +{ + auto videoOutput = QGstreamerVideoOutput::create(); + if (!videoOutput) + return videoOutput.error(); + + static const auto error = + qGstErrorMessageIfElementsNotAvailable("input-selector", "decodebin", "uridecodebin"); + if (error) + return *error; + + return new QGstreamerMediaPlayer(videoOutput.value(), parent); +} + +QGstreamerMediaPlayer::QGstreamerMediaPlayer(QGstreamerVideoOutput *videoOutput, + QMediaPlayer *parent) + : QObject(parent), + QPlatformMediaPlayer(parent), + trackSelectors{ { + { VideoStream, + QGstElement::createFromFactory("input-selector", "videoInputSelector") }, + { AudioStream, + QGstElement::createFromFactory("input-selector", "audioInputSelector") }, + { SubtitleStream, + QGstElement::createFromFactory("input-selector", "subTitleInputSelector") }, + } }, + playerPipeline(QGstPipeline::create("playerPipeline")), + gstVideoOutput(videoOutput) +{ + gstVideoOutput->setParent(this); + gstVideoOutput->setPipeline(playerPipeline); + + for (auto &ts : trackSelectors) + playerPipeline.add(ts.selector); + + playerPipeline.installMessageFilter(static_cast<QGstreamerBusMessageFilter *>(this)); + playerPipeline.installMessageFilter(static_cast<QGstreamerSyncMessageFilter *>(this)); + + QGstClockHandle systemClock{ + gst_system_clock_obtain(), + }; + + gst_pipeline_use_clock(playerPipeline.pipeline(), systemClock.get()); + + connect(&positionUpdateTimer, &QTimer::timeout, this, [this] { + updatePositionFromPipeline(); + }); + + m_stalledMediaNotifier.setSingleShot(true); + connect(&m_stalledMediaNotifier, &QTimer::timeout, this, [this] { + mediaStatusChanged(QMediaPlayer::StalledMedia); + }); +} + +QGstreamerMediaPlayer::~QGstreamerMediaPlayer() +{ + playerPipeline.removeMessageFilter(static_cast<QGstreamerBusMessageFilter *>(this)); + playerPipeline.removeMessageFilter(static_cast<QGstreamerSyncMessageFilter *>(this)); + playerPipeline.setStateSync(GST_STATE_NULL); +} + +std::chrono::nanoseconds QGstreamerMediaPlayer::pipelinePosition() const +{ + if (!hasMedia()) + return {}; + + Q_ASSERT(playerPipeline); + return playerPipeline.position(); +} + +void QGstreamerMediaPlayer::updatePositionFromPipeline() +{ + using namespace std::chrono; + + positionChanged(round<milliseconds>(pipelinePosition())); +} + +void QGstreamerMediaPlayer::updateDurationFromPipeline() +{ + std::optional<std::chrono::milliseconds> duration = playerPipeline.durationInMs(); + if (!duration) + duration = std::chrono::milliseconds{ -1 }; + + if (duration != m_duration) { + qCDebug(qLcMediaPlayer) << "updateDurationFromPipeline" << *duration; + m_duration = *duration; + durationChanged(m_duration); + } +} + +qint64 QGstreamerMediaPlayer::duration() const +{ + return m_duration.count(); +} + +float QGstreamerMediaPlayer::bufferProgress() const +{ + return m_bufferProgress; +} + +QMediaTimeRange QGstreamerMediaPlayer::availablePlaybackRanges() const +{ + return QMediaTimeRange(); +} + +qreal QGstreamerMediaPlayer::playbackRate() const +{ + return playerPipeline.playbackRate(); +} + +void QGstreamerMediaPlayer::setPlaybackRate(qreal rate) +{ + if (rate == m_rate) + return; + + m_rate = rate; + + playerPipeline.setPlaybackRate(rate); + playbackRateChanged(rate); +} + +void QGstreamerMediaPlayer::setPosition(qint64 pos) +{ + std::chrono::milliseconds posInMs{ pos }; + setPosition(posInMs); +} + +void QGstreamerMediaPlayer::setPosition(std::chrono::milliseconds pos) +{ + if (pos == playerPipeline.position()) + return; + playerPipeline.finishStateChange(); + playerPipeline.setPosition(pos); + qCDebug(qLcMediaPlayer) << Q_FUNC_INFO << pos << playerPipeline.positionInMs(); + if (mediaStatus() == QMediaPlayer::EndOfMedia) + mediaStatusChanged(QMediaPlayer::LoadedMedia); + positionChanged(pos); +} + +void QGstreamerMediaPlayer::play() +{ + QMediaPlayer::PlaybackState currentState = state(); + if (currentState == QMediaPlayer::PlayingState || !hasMedia()) + return; + + if (currentState != QMediaPlayer::PausedState) + resetCurrentLoop(); + + playerPipeline.setInStoppedState(false); + if (mediaStatus() == QMediaPlayer::EndOfMedia) { + playerPipeline.setPosition({}); + positionChanged(0); + } + + qCDebug(qLcMediaPlayer) << "play()."; + int ret = playerPipeline.setState(GST_STATE_PLAYING); + if (m_requiresSeekOnPlay) { + // Flushing the pipeline is required to get track changes immediately, when they happen + // while paused. + playerPipeline.flush(); + m_requiresSeekOnPlay = false; + } else { + if (currentState == QMediaPlayer::StoppedState) { + // we get an assertion failure during instant playback rate changes + // https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/3545 + constexpr bool performInstantRateChange = false; + playerPipeline.applyPlaybackRate(/*instantRateChange=*/performInstantRateChange); + } + } + if (ret == GST_STATE_CHANGE_FAILURE) + qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the playing state."; + + positionUpdateTimer.start(100); + stateChanged(QMediaPlayer::PlayingState); +} + +void QGstreamerMediaPlayer::pause() +{ + if (state() == QMediaPlayer::PausedState || !hasMedia() + || m_resourceErrorState != ResourceErrorState::NoError) + return; + + positionUpdateTimer.stop(); + if (playerPipeline.inStoppedState()) { + playerPipeline.setInStoppedState(false); + playerPipeline.flush(); + } + int ret = playerPipeline.setStateSync(GST_STATE_PAUSED); + if (ret == GST_STATE_CHANGE_FAILURE) + qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the paused state."; + if (mediaStatus() == QMediaPlayer::EndOfMedia) { + playerPipeline.setPosition({}); + positionChanged(0); + } else { + updatePositionFromPipeline(); + } + stateChanged(QMediaPlayer::PausedState); + + if (m_bufferProgress > 0 || !canTrackProgress()) + mediaStatusChanged(QMediaPlayer::BufferedMedia); + else + mediaStatusChanged(QMediaPlayer::BufferingMedia); +} + +void QGstreamerMediaPlayer::stop() +{ + using namespace std::chrono_literals; + if (state() == QMediaPlayer::StoppedState) { + if (position() != 0) { + playerPipeline.setPosition({}); + positionChanged(0ms); + mediaStatusChanged(QMediaPlayer::LoadedMedia); + } + return; + } + stopOrEOS(false); +} + +const QGstPipeline &QGstreamerMediaPlayer::pipeline() const +{ + return playerPipeline; +} + +void QGstreamerMediaPlayer::stopOrEOS(bool eos) +{ + using namespace std::chrono_literals; + + positionUpdateTimer.stop(); + playerPipeline.setInStoppedState(true); + bool ret = playerPipeline.setStateSync(GST_STATE_PAUSED); + if (!ret) + qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the stopped state."; + if (!eos) { + playerPipeline.setPosition(0ms); + positionChanged(0ms); + } + stateChanged(QMediaPlayer::StoppedState); + if (eos) + mediaStatusChanged(QMediaPlayer::EndOfMedia); + else + mediaStatusChanged(QMediaPlayer::LoadedMedia); + m_initialBufferProgressSent = false; + bufferProgressChanged(0.f); +} + +void QGstreamerMediaPlayer::detectPipelineIsSeekable() +{ + std::optional<bool> canSeek = playerPipeline.canSeek(); + if (canSeek) { + qCDebug(qLcMediaPlayer) << "detectPipelineIsSeekable: pipeline is seekable:" << *canSeek; + seekableChanged(*canSeek); + } else { + qCWarning(qLcMediaPlayer) << "detectPipelineIsSeekable: query for seekable failed."; + seekableChanged(false); + } +} + +QGstElement QGstreamerMediaPlayer::getSinkElementForTrackType(TrackType trackType) +{ + switch (trackType) { + case AudioStream: + return gstAudioOutput ? gstAudioOutput->gstElement() : QGstElement{}; + case VideoStream: + return gstVideoOutput ? gstVideoOutput->gstElement() : QGstElement{}; + case SubtitleStream: + return gstVideoOutput ? gstVideoOutput->gstSubtitleElement() : QGstElement{}; + break; + default: + Q_UNREACHABLE_RETURN(QGstElement{}); + } +} + +bool QGstreamerMediaPlayer::hasMedia() const +{ + return !m_url.isEmpty() || m_stream; +} + +bool QGstreamerMediaPlayer::processBusMessage(const QGstreamerMessage &message) +{ + qCDebug(qLcMediaPlayer) << "received bus message:" << message; + + GstMessage* gm = message.message(); + switch (message.type()) { + case GST_MESSAGE_TAG: { + // #### This isn't ideal. We shouldn't catch stream specific tags here, rather the global ones + QGstTagListHandle tagList; + gst_message_parse_tag(gm, &tagList); + + qCDebug(qLcMediaPlayer) << " Got tags: " << tagList.get(); + + QMediaMetaData originalMetaData = m_metaData; + extendMetaDataFromTagList(m_metaData, tagList); + if (originalMetaData != m_metaData) + metaDataChanged(); + + if (gstVideoOutput) { + QVariant rotation = m_metaData.value(QMediaMetaData::Orientation); + gstVideoOutput->setRotation(rotation.value<QtVideo::Rotation>()); + } + break; + } + case GST_MESSAGE_DURATION_CHANGED: { + if (!prerolling) + updateDurationFromPipeline(); + + return false; + } + case GST_MESSAGE_EOS: { + positionChanged(m_duration); + if (doLoop()) { + setPosition(0); + break; + } + stopOrEOS(true); + break; + } + case GST_MESSAGE_BUFFERING: { + int progress = 0; + gst_message_parse_buffering(gm, &progress); + + if (state() != QMediaPlayer::StoppedState && !prerolling) { + if (!m_initialBufferProgressSent) { + mediaStatusChanged(QMediaPlayer::BufferingMedia); + m_initialBufferProgressSent = true; + } + + if (m_bufferProgress > 0 && progress == 0) { + m_stalledMediaNotifier.start(stalledMediaDebouncePeriod); + } else if (progress >= 50) + // QTBUG-124517: rethink buffering + mediaStatusChanged(QMediaPlayer::BufferedMedia); + else + mediaStatusChanged(QMediaPlayer::BufferingMedia); + } + + updateBufferProgress(progress * 0.01); + break; + } + case GST_MESSAGE_STATE_CHANGED: { + if (message.source() != playerPipeline) + return false; + + GstState oldState; + GstState newState; + GstState pending; + + gst_message_parse_state_changed(gm, &oldState, &newState, &pending); + qCDebug(qLcMediaPlayer) << " state changed message from" + << QCompactGstMessageAdaptor(message); + + switch (newState) { + case GST_STATE_VOID_PENDING: + case GST_STATE_NULL: + case GST_STATE_READY: + break; + case GST_STATE_PAUSED: { + if (prerolling) { + qCDebug(qLcMediaPlayer) << "Preroll done, setting status to Loaded"; + playerPipeline.dumpGraph("playerPipelinePrerollDone"); + + prerolling = false; + + updateDurationFromPipeline(); + + m_metaData.insert(QMediaMetaData::Duration, duration()); + if (!m_url.isEmpty()) + m_metaData.insert(QMediaMetaData::Url, m_url); + parseStreamsAndMetadata(); + metaDataChanged(); + + tracksChanged(); + mediaStatusChanged(QMediaPlayer::LoadedMedia); + + if (!playerPipeline.inStoppedState()) { + Q_ASSERT(!m_initialBufferProgressSent); + + bool immediatelySendBuffered = !canTrackProgress() || m_bufferProgress > 0; + mediaStatusChanged(QMediaPlayer::BufferingMedia); + m_initialBufferProgressSent = true; + if (immediatelySendBuffered) + mediaStatusChanged(QMediaPlayer::BufferedMedia); + } + } + + break; + } + case GST_STATE_PLAYING: { + if (!m_initialBufferProgressSent) { + bool immediatelySendBuffered = !canTrackProgress() || m_bufferProgress > 0; + mediaStatusChanged(QMediaPlayer::BufferingMedia); + m_initialBufferProgressSent = true; + if (immediatelySendBuffered) + mediaStatusChanged(QMediaPlayer::BufferedMedia); + } + break; + } + } + break; + } + case GST_MESSAGE_ERROR: { + qCDebug(qLcMediaPlayer) << " error" << QCompactGstMessageAdaptor(message); + + QUniqueGErrorHandle err; + QUniqueGStringHandle debug; + gst_message_parse_error(gm, &err, &debug); + GQuark errorDomain = err.get()->domain; + gint errorCode = err.get()->code; + + if (errorDomain == GST_STREAM_ERROR) { + if (errorCode == GST_STREAM_ERROR_CODEC_NOT_FOUND) + error(QMediaPlayer::FormatError, tr("Cannot play stream of type: <unknown>")); + else { + error(QMediaPlayer::FormatError, QString::fromUtf8(err.get()->message)); + } + } else if (errorDomain == GST_RESOURCE_ERROR) { + if (errorCode == GST_RESOURCE_ERROR_NOT_FOUND) { + if (m_resourceErrorState != ResourceErrorState::ErrorReported) { + // gstreamer seems to deliver multiple GST_RESOURCE_ERROR_NOT_FOUND events + error(QMediaPlayer::ResourceError, QString::fromUtf8(err.get()->message)); + m_resourceErrorState = ResourceErrorState::ErrorReported; + m_url.clear(); + m_stream = nullptr; + } + } else { + error(QMediaPlayer::ResourceError, QString::fromUtf8(err.get()->message)); + } + } else { + playerPipeline.dumpGraph("error"); + } + mediaStatusChanged(QMediaPlayer::InvalidMedia); + break; + } + + case GST_MESSAGE_WARNING: + qCWarning(qLcMediaPlayer) << "Warning:" << QCompactGstMessageAdaptor(message); + playerPipeline.dumpGraph("warning"); + break; + + case GST_MESSAGE_INFO: + if (qLcMediaPlayer().isDebugEnabled()) + qCDebug(qLcMediaPlayer) << "Info:" << QCompactGstMessageAdaptor(message); + break; + + case GST_MESSAGE_SEGMENT_START: { + qCDebug(qLcMediaPlayer) << " segment start message, updating position"; + QGstStructureView structure(gst_message_get_structure(gm)); + auto p = structure["position"].toInt64(); + if (p) { + std::chrono::milliseconds position{ + (*p) / 1000000, + }; + positionChanged(position); + } + break; + } + case GST_MESSAGE_ELEMENT: { + QGstStructureView structure(gst_message_get_structure(gm)); + auto type = structure.name(); + if (type == "stream-topology") + topology = structure.clone(); + + break; + } + + case GST_MESSAGE_ASYNC_DONE: { + if (playerPipeline.state() >= GST_STATE_PAUSED) + detectPipelineIsSeekable(); + break; + } + + default: +// qCDebug(qLcMediaPlayer) << " default message handler, doing nothing"; + + break; + } + + return false; +} + +bool QGstreamerMediaPlayer::processSyncMessage(const QGstreamerMessage &message) +{ +#if QT_CONFIG(gstreamer_gl) + if (message.type() != GST_MESSAGE_NEED_CONTEXT) + return false; + const gchar *type = nullptr; + gst_message_parse_context_type (message.message(), &type); + if (strcmp(type, GST_GL_DISPLAY_CONTEXT_TYPE)) + return false; + if (!gstVideoOutput || !gstVideoOutput->gstreamerVideoSink()) + return false; + auto *context = gstVideoOutput->gstreamerVideoSink()->gstGlDisplayContext(); + if (!context) + return false; + gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message.message())), context); + playerPipeline.dumpGraph("need_context"); + return true; +#else + Q_UNUSED(message); + return false; +#endif +} + +QUrl QGstreamerMediaPlayer::media() const +{ + return m_url; +} + +const QIODevice *QGstreamerMediaPlayer::mediaStream() const +{ + return m_stream; +} + +void QGstreamerMediaPlayer::decoderPadAdded(const QGstElement &src, const QGstPad &pad) +{ + if (src != decoder) + return; + + auto caps = pad.currentCaps(); + auto type = caps.at(0).name(); + qCDebug(qLcMediaPlayer) << "Received new pad" << pad.name() << "from" << src.name() << "type" << type; + qCDebug(qLcMediaPlayer) << " " << caps; + + TrackType streamType = NTrackTypes; + if (type.startsWith("video/x-raw")) { + streamType = VideoStream; + } else if (type.startsWith("audio/x-raw")) { + streamType = AudioStream; + } else if (type.startsWith("text/")) { + streamType = SubtitleStream; + } else { + qCWarning(qLcMediaPlayer) << "Ignoring unknown media stream:" << pad.name() << type; + return; + } + + auto &ts = trackSelector(streamType); + QGstPad sinkPad = ts.createInputPad(); + if (!pad.link(sinkPad)) { + qCWarning(qLcMediaPlayer) << "Failed to add track, cannot link pads"; + return; + } + qCDebug(qLcMediaPlayer) << "Adding track"; + + if (ts.trackCount() == 1) { + if (streamType == VideoStream) { + connectOutput(ts); + ts.setActiveInputPad(sinkPad); + videoAvailableChanged(true); + } + else if (streamType == AudioStream) { + connectOutput(ts); + ts.setActiveInputPad(sinkPad); + audioAvailableChanged(true); + } + } + + if (!prerolling) + tracksChanged(); + + decoderOutputMap.emplace(pad, sinkPad); +} + +void QGstreamerMediaPlayer::decoderPadRemoved(const QGstElement &src, const QGstPad &pad) +{ + if (src != decoder) + return; + + qCDebug(qLcMediaPlayer) << "Removed pad" << pad.name() << "from" << src.name(); + + auto it = decoderOutputMap.find(pad); + if (it == decoderOutputMap.end()) + return; + QGstPad track = it->second; + + auto ts = std::find_if(std::begin(trackSelectors), std::end(trackSelectors), + [&](TrackSelector &ts){ return ts.selector == track.parent(); }); + if (ts == std::end(trackSelectors)) + return; + + qCDebug(qLcMediaPlayer) << " was linked to pad" << track.name() << "from" << ts->selector.name(); + ts->removeInputPad(track); + + if (ts->trackCount() == 0) { + removeOutput(*ts); + if (ts->type == AudioStream) + audioAvailableChanged(false); + else if (ts->type == VideoStream) + videoAvailableChanged(false); + } + + if (!prerolling) + tracksChanged(); +} + +void QGstreamerMediaPlayer::removeAllOutputs() +{ + for (auto &ts : trackSelectors) { + removeOutput(ts); + ts.removeAllInputPads(); + } + audioAvailableChanged(false); + videoAvailableChanged(false); +} + +void QGstreamerMediaPlayer::connectOutput(TrackSelector &ts) +{ + if (ts.isConnected) + return; + + QGstElement e = getSinkElementForTrackType(ts.type); + if (e) { + qCDebug(qLcMediaPlayer) << "connecting output for track type" << ts.type; + playerPipeline.add(e); + qLinkGstElements(ts.selector, e); + e.syncStateWithParent(); + } + + ts.isConnected = true; +} + +void QGstreamerMediaPlayer::removeOutput(TrackSelector &ts) +{ + if (!ts.isConnected) + return; + + QGstElement e = getSinkElementForTrackType(ts.type); + if (e) { + qCDebug(qLcMediaPlayer) << "removing output for track type" << ts.type; + playerPipeline.stopAndRemoveElements(e); + } + + ts.isConnected = false; +} + +void QGstreamerMediaPlayer::removeDynamicPipelineElements() +{ + for (QGstElement *element : { &src, &decoder }) { + if (element->isNull()) + continue; + + element->setStateSync(GstState::GST_STATE_NULL); + playerPipeline.remove(*element); + *element = QGstElement{}; + } +} + +void QGstreamerMediaPlayer::uridecodebinElementAddedCallback(GstElement * /*uridecodebin*/, + GstElement *child, + QGstreamerMediaPlayer *) +{ + QGstElement c(child, QGstElement::NeedsRef); + qCDebug(qLcMediaPlayer) << "New element added to uridecodebin:" << c.name(); + + static const GType decodeBinType = [] { + QGstElementFactoryHandle factory = QGstElement::findFactory("decodebin"); + return gst_element_factory_get_element_type(factory.get()); + }(); + + if (c.type() == decodeBinType) { + qCDebug(qLcMediaPlayer) << " -> setting post-stream-topology property"; + c.set("post-stream-topology", true); + } +} + +void QGstreamerMediaPlayer::sourceSetupCallback(GstElement *uridecodebin, GstElement *source, QGstreamerMediaPlayer *that) +{ + Q_UNUSED(uridecodebin) + Q_UNUSED(that) + + qCDebug(qLcMediaPlayer) << "Setting up source:" << g_type_name_from_instance((GTypeInstance*)source); + + if (std::string_view("GstRTSPSrc") == g_type_name_from_instance((GTypeInstance *)source)) { + QGstElement s(source, QGstElement::NeedsRef); + int latency{40}; + bool ok{false}; + int v = qEnvironmentVariableIntValue("QT_MEDIA_RTSP_LATENCY", &ok); + if (ok) + latency = v; + qCDebug(qLcMediaPlayer) << " -> setting source latency to:" << latency << "ms"; + s.set("latency", latency); + + bool drop{true}; + v = qEnvironmentVariableIntValue("QT_MEDIA_RTSP_DROP_ON_LATENCY", &ok); + if (ok && v == 0) + drop = false; + qCDebug(qLcMediaPlayer) << " -> setting drop-on-latency to:" << drop; + s.set("drop-on-latency", drop); + + bool retrans{false}; + v = qEnvironmentVariableIntValue("QT_MEDIA_RTSP_DO_RETRANSMISSION", &ok); + if (ok && v != 0) + retrans = true; + qCDebug(qLcMediaPlayer) << " -> setting do-retransmission to:" << retrans; + s.set("do-retransmission", retrans); + } +} + +void QGstreamerMediaPlayer::unknownTypeCallback(GstElement *decodebin, GstPad *pad, GstCaps *caps, + QGstreamerMediaPlayer *self) +{ + Q_UNUSED(decodebin) + Q_UNUSED(pad) + Q_UNUSED(self) + qCDebug(qLcMediaPlayer) << "Unknown type:" << caps; + + QMetaObject::invokeMethod(self, [self] { + self->stop(); + }); +} + +static bool isQueue(const QGstElement &element) +{ + static const GType queueType = [] { + QGstElementFactoryHandle factory = QGstElement::findFactory("queue"); + return gst_element_factory_get_element_type(factory.get()); + }(); + + static const GType multiQueueType = [] { + QGstElementFactoryHandle factory = QGstElement::findFactory("multiqueue"); + return gst_element_factory_get_element_type(factory.get()); + }(); + + return element.type() == queueType || element.type() == multiQueueType; +} + +void QGstreamerMediaPlayer::decodebinElementAddedCallback(GstBin * /*decodebin*/, + GstBin * /*sub_bin*/, GstElement *child, + QGstreamerMediaPlayer *self) +{ + QGstElement c(child, QGstElement::NeedsRef); + if (isQueue(c)) + self->decodeBinQueues += 1; +} + +void QGstreamerMediaPlayer::decodebinElementRemovedCallback(GstBin * /*decodebin*/, + GstBin * /*sub_bin*/, GstElement *child, + QGstreamerMediaPlayer *self) +{ + QGstElement c(child, QGstElement::NeedsRef); + if (isQueue(c)) + self->decodeBinQueues -= 1; +} + +void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream) +{ + using namespace std::chrono_literals; + + qCDebug(qLcMediaPlayer) << Q_FUNC_INFO << "setting location to" << content; + + prerolling = true; + m_requiresSeekOnPlay = true; + m_resourceErrorState = ResourceErrorState::NoError; + + bool ret = playerPipeline.setStateSync(GST_STATE_NULL); + if (!ret) + qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the stopped state."; + + m_url = content; + m_stream = stream; + + removeDynamicPipelineElements(); + disconnectDecoderHandlers(); + removeAllOutputs(); + seekableChanged(false); + Q_ASSERT(playerPipeline.inStoppedState()); + + if (m_duration != 0ms) { + m_duration = 0ms; + durationChanged(0ms); + } + stateChanged(QMediaPlayer::StoppedState); + if (position() != 0) + positionChanged(0ms); + if (!m_metaData.isEmpty()) { + m_metaData.clear(); + metaDataChanged(); + } + + if (content.isEmpty() && !stream) { + mediaStatusChanged(QMediaPlayer::NoMedia); + return; + } + + if (m_stream) { + if (!m_appSrc) { + auto maybeAppSrc = QGstAppSource::create(this); + if (maybeAppSrc) { + m_appSrc = maybeAppSrc.value(); + } else { + error(QMediaPlayer::ResourceError, maybeAppSrc.error()); + return; + } + } + src = m_appSrc->element(); + decoder = QGstElement::createFromFactory("decodebin", "decoder"); + if (!decoder) { + error(QMediaPlayer::ResourceError, qGstErrorMessageCannotFindElement("decodebin")); + return; + } + decoder.set("post-stream-topology", true); + decoder.set("use-buffering", true); + unknownType = decoder.connect("unknown-type", GCallback(unknownTypeCallback), this); + elementAdded = decoder.connect("deep-element-added", + GCallback(decodebinElementAddedCallback), this); + elementRemoved = decoder.connect("deep-element-removed", + GCallback(decodebinElementAddedCallback), this); + + playerPipeline.add(src, decoder); + qLinkGstElements(src, decoder); + + m_appSrc->setup(m_stream); + seekableChanged(!stream->isSequential()); + } else { + // use uridecodebin + decoder = QGstElement::createFromFactory("uridecodebin", "decoder"); + if (!decoder) { + error(QMediaPlayer::ResourceError, qGstErrorMessageCannotFindElement("uridecodebin")); + return; + } + playerPipeline.add(decoder); + + constexpr bool hasPostStreamTopology = GST_CHECK_VERSION(1, 22, 0); + if constexpr (hasPostStreamTopology) { + decoder.set("post-stream-topology", true); + } else { + // can't set post-stream-topology to true, as uridecodebin doesn't have the property. + // Use a hack + uridecodebinElementAdded = decoder.connect( + "element-added", GCallback(uridecodebinElementAddedCallback), this); + } + + sourceSetup = decoder.connect("source-setup", GCallback(sourceSetupCallback), this); + unknownType = decoder.connect("unknown-type", GCallback(unknownTypeCallback), this); + + decoder.set("uri", content.toEncoded().constData()); + decoder.set("use-buffering", true); + + constexpr int mb = 1024 * 1024; + decoder.set("ring-buffer-max-size", 2 * mb); + + updateBufferProgress(0.f); + + elementAdded = decoder.connect("deep-element-added", + GCallback(decodebinElementAddedCallback), this); + elementRemoved = decoder.connect("deep-element-removed", + GCallback(decodebinElementAddedCallback), this); + } + padAdded = decoder.onPadAdded<&QGstreamerMediaPlayer::decoderPadAdded>(this); + padRemoved = decoder.onPadRemoved<&QGstreamerMediaPlayer::decoderPadRemoved>(this); + + mediaStatusChanged(QMediaPlayer::LoadingMedia); + if (!playerPipeline.setStateSync(GST_STATE_PAUSED)) { + qCWarning(qLcMediaPlayer) << "Unable to set the pipeline to the paused state."; + // Note: no further error handling: errors will be delivered via a GstMessage + return; + } + + playerPipeline.setPosition(0ms); + positionChanged(0ms); +} + +void QGstreamerMediaPlayer::setAudioOutput(QPlatformAudioOutput *output) +{ + if (gstAudioOutput == output) + return; + + auto &ts = trackSelector(AudioStream); + + playerPipeline.modifyPipelineWhileNotRunning([&] { + if (gstAudioOutput) + removeOutput(ts); + + gstAudioOutput = static_cast<QGstreamerAudioOutput *>(output); + if (gstAudioOutput) + connectOutput(ts); + }); +} + +QMediaMetaData QGstreamerMediaPlayer::metaData() const +{ + return m_metaData; +} + +void QGstreamerMediaPlayer::setVideoSink(QVideoSink *sink) +{ + gstVideoOutput->setVideoSink(sink); +} + +static QGstStructureView endOfChain(const QGstStructureView &s) +{ + QGstStructureView e = s; + while (1) { + auto next = e["next"].toStructure(); + if (!next.isNull()) + e = next; + else + break; + } + return e; +} + +void QGstreamerMediaPlayer::parseStreamsAndMetadata() +{ + qCDebug(qLcMediaPlayer) << "============== parse topology ============"; + + if (!topology) { + qCDebug(qLcMediaPlayer) << " null topology"; + return; + } + + QGstStructureView topologyView{ topology }; + + QGstCaps caps = topologyView.caps(); + extendMetaDataFromCaps(m_metaData, caps); + + QGstTagListHandle tagList = QGstStructureView{ topology }.tags(); + if (tagList) + extendMetaDataFromTagList(m_metaData, tagList); + + QGstStructureView demux = endOfChain(topologyView); + QGValue next = demux["next"]; + if (!next.isList()) { + qCDebug(qLcMediaPlayer) << " no additional streams"; + metaDataChanged(); + return; + } + + // collect stream info + int size = next.listSize(); + for (int i = 0; i < size; ++i) { + auto val = next.at(i); + caps = val.toStructure().caps(); + + extendMetaDataFromCaps(m_metaData, caps); + + QGstStructureView structure = caps.at(0); + + if (structure.name().startsWith("video/")) { + QSize nativeSize = structure.nativeSize(); + gstVideoOutput->setNativeSize(nativeSize); + } + } + + auto sinkPad = trackSelector(VideoStream).activeInputPad(); + if (sinkPad) { + QGstTagListHandle tagList = sinkPad.tags(); + if (tagList) + qCDebug(qLcMediaPlayer) << " tags=" << tagList.get(); + else + qCDebug(qLcMediaPlayer) << " tags=(null)"; + } + + qCDebug(qLcMediaPlayer) << "============== end parse topology ============"; + playerPipeline.dumpGraph("playback"); +} + +int QGstreamerMediaPlayer::trackCount(QPlatformMediaPlayer::TrackType type) +{ + return trackSelector(type).trackCount(); +} + +QMediaMetaData QGstreamerMediaPlayer::trackMetaData(QPlatformMediaPlayer::TrackType type, int index) +{ + auto track = trackSelector(type).inputPad(index); + if (!track) + return {}; + + QGstTagListHandle tagList = track.tags(); + return taglistToMetaData(tagList); +} + +int QGstreamerMediaPlayer::activeTrack(TrackType type) +{ + return trackSelector(type).activeInputIndex(); +} + +void QGstreamerMediaPlayer::setActiveTrack(TrackType type, int index) +{ + auto &ts = trackSelector(type); + auto track = ts.inputPad(index); + if (track.isNull() && index != -1) { + qCWarning(qLcMediaPlayer) << "Attempt to set an incorrect index" << index + << "for the track type" << type; + return; + } + + qCDebug(qLcMediaPlayer) << "Setting the index" << index << "for the track type" << type; + if (type == QPlatformMediaPlayer::SubtitleStream) + gstVideoOutput->flushSubtitles(); + + playerPipeline.modifyPipelineWhileNotRunning([&] { + if (track.isNull()) { + removeOutput(ts); + } else { + ts.setActiveInputPad(track); + connectOutput(ts); + } + }); + + // seek to force an immediate change of the stream + if (playerPipeline.state() == GST_STATE_PLAYING) + playerPipeline.flush(); + else + m_requiresSeekOnPlay = true; +} + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer_p.h new file mode 100644 index 000000000..f634d32a1 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstreamermediaplayer_p.h @@ -0,0 +1,206 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTREAMERMEDIAPLAYER_P_H +#define QGSTREAMERMEDIAPLAYER_P_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <QtCore/qstack.h> +#include <private/qplatformmediaplayer_p.h> +#include <private/qtmultimediaglobal_p.h> +#include <private/qmultimediautils_p.h> +#include <qurl.h> +#include <common/qgst_p.h> +#include <common/qgstpipeline_p.h> + +#include <QtCore/qtimer.h> + +#include <array> + +QT_BEGIN_NAMESPACE + +class QNetworkAccessManager; +class QGstreamerMessage; +class QGstAppSource; +class QGstreamerAudioOutput; +class QGstreamerVideoOutput; + +class QGstreamerMediaPlayer : public QObject, + public QPlatformMediaPlayer, + public QGstreamerBusMessageFilter, + public QGstreamerSyncMessageFilter +{ +public: + static QMaybe<QPlatformMediaPlayer *> create(QMediaPlayer *parent = nullptr); + ~QGstreamerMediaPlayer(); + + qint64 duration() const override; + + float bufferProgress() const override; + + QMediaTimeRange availablePlaybackRanges() const override; + + qreal playbackRate() const override; + void setPlaybackRate(qreal rate) override; + + QUrl media() const override; + const QIODevice *mediaStream() const override; + void setMedia(const QUrl &, QIODevice *) override; + + bool streamPlaybackSupported() const override { return true; } + + void setAudioOutput(QPlatformAudioOutput *output) override; + + QMediaMetaData metaData() const override; + + void setVideoSink(QVideoSink *sink) override; + + int trackCount(TrackType) override; + QMediaMetaData trackMetaData(TrackType /*type*/, int /*streamNumber*/) override; + int activeTrack(TrackType) override; + void setActiveTrack(TrackType, int /*streamNumber*/) override; + + void setPosition(qint64 pos) override; + void setPosition(std::chrono::milliseconds pos); + + void play() override; + void pause() override; + void stop() override; + + const QGstPipeline &pipeline() const; + + bool processBusMessage(const QGstreamerMessage& message) override; + bool processSyncMessage(const QGstreamerMessage& message) override; + +private: + QGstreamerMediaPlayer(QGstreamerVideoOutput *videoOutput, QMediaPlayer *parent); + + struct TrackSelector + { + TrackSelector(TrackType, QGstElement selector); + QGstPad createInputPad(); + void removeInputPad(QGstPad pad); + void removeAllInputPads(); + QGstPad inputPad(int index); + int activeInputIndex() const { return isConnected ? tracks.indexOf(activeInputPad()) : -1; } + QGstPad activeInputPad() const + { + return isConnected ? QGstPad{ selector.getObject("active-pad") } : QGstPad{}; + } + void setActiveInputPad(QGstPad input) { selector.set("active-pad", input); } + int trackCount() const { return tracks.count(); } + + QGstElement selector; + TrackType type; + QList<QGstPad> tracks; + bool isConnected = false; + }; + + friend class QGstreamerStreamsControl; + void decoderPadAdded(const QGstElement &src, const QGstPad &pad); + void decoderPadRemoved(const QGstElement &src, const QGstPad &pad); + void disconnectDecoderHandlers(); + static void uridecodebinElementAddedCallback(GstElement *uridecodebin, GstElement *child, + QGstreamerMediaPlayer *that); + static void sourceSetupCallback(GstElement *uridecodebin, GstElement *source, + QGstreamerMediaPlayer *that); + static void unknownTypeCallback(GstElement *decodebin, GstPad *pad, GstCaps *caps, + QGstreamerMediaPlayer *self); + static void decodebinElementAddedCallback(GstBin *decodebin, GstBin *sub_bin, + GstElement *element, QGstreamerMediaPlayer *self); + static void decodebinElementRemovedCallback(GstBin *decodebin, GstBin *sub_bin, + GstElement *element, QGstreamerMediaPlayer *self); + + void parseStreamsAndMetadata(); + void connectOutput(TrackSelector &ts); + void removeOutput(TrackSelector &ts); + void removeDynamicPipelineElements(); + void removeAllOutputs(); + void stopOrEOS(bool eos); + bool canTrackProgress() const { return decodeBinQueues > 0; } + void detectPipelineIsSeekable(); + bool hasMedia() const; + + std::chrono::nanoseconds pipelinePosition() const; + void updatePositionFromPipeline(); + void updateDurationFromPipeline(); + void updateBufferProgress(float); + + QGstElement getSinkElementForTrackType(TrackType); + + std::array<TrackSelector, NTrackTypes> trackSelectors; + TrackSelector &trackSelector(TrackType type); + + QMediaMetaData m_metaData; + + QUrl m_url; + QIODevice *m_stream = nullptr; + + enum class ResourceErrorState : uint8_t { + NoError, + ErrorOccurred, + ErrorReported, + }; + + bool prerolling = false; + bool m_requiresSeekOnPlay = true; + bool m_initialBufferProgressSent = false; + ResourceErrorState m_resourceErrorState = ResourceErrorState::NoError; + float m_rate = 1.f; + float m_bufferProgress = 0.f; + std::chrono::milliseconds m_duration{}; + QTimer positionUpdateTimer; + + QGstAppSource *m_appSrc = nullptr; + + QUniqueGstStructureHandle topology; + + // Gst elements + QGstPipeline playerPipeline; + QGstElement src; + QGstElement decoder; + + QGstreamerAudioOutput *gstAudioOutput = nullptr; + QGstreamerVideoOutput *gstVideoOutput = nullptr; + + // QGstElement streamSynchronizer; + + struct QGstPadLess + { + bool operator()(const QGstPad &lhs, const QGstPad &rhs) const + { + return lhs.pad() < rhs.pad(); + } + }; + + std::map<QGstPad, QGstPad, QGstPadLess> decoderOutputMap; + + // decoder connections + QGObjectHandlerScopedConnection padAdded; + QGObjectHandlerScopedConnection padRemoved; + QGObjectHandlerScopedConnection sourceSetup; + QGObjectHandlerScopedConnection uridecodebinElementAdded; + QGObjectHandlerScopedConnection unknownType; + QGObjectHandlerScopedConnection elementAdded; + QGObjectHandlerScopedConnection elementRemoved; + + int decodeBinQueues = 0; + + void mediaStatusChanged(QMediaPlayer::MediaStatus status); + static constexpr auto stalledMediaDebouncePeriod = std::chrono::milliseconds{ 500 }; + QTimer m_stalledMediaNotifier; +}; + +QT_END_NAMESPACE + +#endif diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermessage_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamermessage_p.h new file mode 100644 index 000000000..9836bd0cb --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstreamermessage_p.h @@ -0,0 +1,55 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTREAMERMESSAGE_P_H +#define QGSTREAMERMESSAGE_P_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <private/qtmultimediaglobal_p.h> +#include <common/qgst_p.h> + +QT_BEGIN_NAMESPACE + +// Required for QDoc workaround +class QString; + +template <> +struct QGstPointerImpl::QGstRefcountingAdaptor<GstMessage> +{ + static void ref(GstMessage *arg) noexcept { gst_message_ref(arg); } + static void unref(GstMessage *arg) noexcept { gst_message_unref(arg); } +}; + +class QGstreamerMessage : public QGstPointerImpl::QGstObjectWrapper<GstMessage> +{ + using BaseClass = QGstPointerImpl::QGstObjectWrapper<GstMessage>; + +public: + using BaseClass::BaseClass; + QGstreamerMessage(const QGstreamerMessage &) = default; + QGstreamerMessage(QGstreamerMessage &&) noexcept = default; + QGstreamerMessage &operator=(const QGstreamerMessage &) = default; + QGstreamerMessage &operator=(QGstreamerMessage &&) noexcept = default; + + GstMessageType type() const { return GST_MESSAGE_TYPE(get()); } + QGstObject source() const { return QGstObject(GST_MESSAGE_SRC(get()), QGstObject::NeedsRef); } + QGstStructureView structure() const { return QGstStructureView(gst_message_get_structure(get())); } + + GstMessage *message() const { return get(); } +}; + +QT_END_NAMESPACE + +Q_DECLARE_METATYPE(QGstreamerMessage); + +#endif diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermetadata.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamermetadata.cpp new file mode 100644 index 000000000..9aa9406b9 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstreamermetadata.cpp @@ -0,0 +1,489 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include "qgstreamermetadata_p.h" +#include <QtMultimedia/qmediametadata.h> +#include <QtMultimedia/qtvideo.h> +#include <QtCore/qdebug.h> +#include <QtCore/qdatetime.h> +#include <QtCore/qlocale.h> +#include <QtCore/qtimezone.h> +#include <QtGui/qimage.h> + +#include <gst/gstversion.h> +#include <common/qgst_handle_types_p.h> +#include <common/qgstutils_p.h> +#include <qgstreamerformatinfo_p.h> + +QT_BEGIN_NAMESPACE + +namespace { + +namespace MetadataLookupImpl { + +#ifdef __cpp_lib_constexpr_algorithms +# define constexpr_lookup constexpr +#else +# define constexpr_lookup /*constexpr*/ +#endif + +struct MetadataKeyValuePair +{ + const char *tag; + QMediaMetaData::Key key; +}; + +constexpr const char *toTag(const char *t) +{ + return t; +} +constexpr const char *toTag(const MetadataKeyValuePair &kv) +{ + return kv.tag; +} + +constexpr QMediaMetaData::Key toKey(QMediaMetaData::Key k) +{ + return k; +} +constexpr QMediaMetaData::Key toKey(const MetadataKeyValuePair &kv) +{ + return kv.key; +} + +constexpr auto compareByKey = [](const auto &lhs, const auto &rhs) { + return toKey(lhs) < toKey(rhs); +}; + +constexpr auto compareByTag = [](const auto &lhs, const auto &rhs) { + return std::strcmp(toTag(lhs), toTag(rhs)) < 0; +}; + +constexpr_lookup auto makeLookupTable() +{ + std::array<MetadataKeyValuePair, 22> lookupTable{ { + { GST_TAG_TITLE, QMediaMetaData::Title }, + { GST_TAG_COMMENT, QMediaMetaData::Comment }, + { GST_TAG_DESCRIPTION, QMediaMetaData::Description }, + { GST_TAG_GENRE, QMediaMetaData::Genre }, + { GST_TAG_DATE_TIME, QMediaMetaData::Date }, + { GST_TAG_DATE, QMediaMetaData::Date }, + + { GST_TAG_LANGUAGE_CODE, QMediaMetaData::Language }, + + { GST_TAG_ORGANIZATION, QMediaMetaData::Publisher }, + { GST_TAG_COPYRIGHT, QMediaMetaData::Copyright }, + + // Media + { GST_TAG_DURATION, QMediaMetaData::Duration }, + + // Audio + { GST_TAG_BITRATE, QMediaMetaData::AudioBitRate }, + { GST_TAG_AUDIO_CODEC, QMediaMetaData::AudioCodec }, + + // Music + { GST_TAG_ALBUM, QMediaMetaData::AlbumTitle }, + { GST_TAG_ALBUM_ARTIST, QMediaMetaData::AlbumArtist }, + { GST_TAG_ARTIST, QMediaMetaData::ContributingArtist }, + { GST_TAG_TRACK_NUMBER, QMediaMetaData::TrackNumber }, + + { GST_TAG_PREVIEW_IMAGE, QMediaMetaData::ThumbnailImage }, + { GST_TAG_IMAGE, QMediaMetaData::CoverArtImage }, + + // Image/Video + { "resolution", QMediaMetaData::Resolution }, + { GST_TAG_IMAGE_ORIENTATION, QMediaMetaData::Orientation }, + + // Video + { GST_TAG_VIDEO_CODEC, QMediaMetaData::VideoCodec }, + + // Movie + { GST_TAG_PERFORMER, QMediaMetaData::LeadPerformer }, + } }; + + std::sort(lookupTable.begin(), lookupTable.end(), + [](const MetadataKeyValuePair &lhs, const MetadataKeyValuePair &rhs) { + return std::string_view(lhs.tag) < std::string_view(rhs.tag); + }); + return lookupTable; +} + +constexpr_lookup auto gstTagToMetaDataKey = makeLookupTable(); +constexpr_lookup auto metaDataKeyToGstTag = [] { + auto array = gstTagToMetaDataKey; + std::sort(array.begin(), array.end(), compareByKey); + return array; +}(); + +} // namespace MetadataLookupImpl + +QMediaMetaData::Key tagToKey(const char *tag) +{ + if (tag == nullptr) + return QMediaMetaData::Key(-1); + + using namespace MetadataLookupImpl; + auto foundIterator = std::lower_bound(gstTagToMetaDataKey.begin(), gstTagToMetaDataKey.end(), + tag, compareByTag); + if (std::strcmp(foundIterator->tag, tag) == 0) + return foundIterator->key; + + return QMediaMetaData::Key(-1); +} + +const char *keyToTag(QMediaMetaData::Key key) +{ + using namespace MetadataLookupImpl; + auto foundIterator = std::lower_bound(metaDataKeyToGstTag.begin(), metaDataKeyToGstTag.end(), + key, compareByKey); + if (foundIterator->key == key) + return foundIterator->tag; + + return nullptr; +} + +#undef constexpr_lookup + +QtVideo::Rotation parseRotationTag(const char *string) +{ + using namespace std::string_view_literals; + + if (string == "rotate-90"sv) + return QtVideo::Rotation::Clockwise90; + if (string == "rotate-180"sv) + return QtVideo::Rotation::Clockwise180; + if (string == "rotate-270"sv) + return QtVideo::Rotation::Clockwise270; + if (string == "rotate-0"sv) + return QtVideo::Rotation::None; + + qCritical() << "cannot parse orientation: {}" << string; + return QtVideo::Rotation::None; +} + +QDateTime parseDate(const GValue &val) +{ + Q_ASSERT(G_VALUE_TYPE(&val) == G_TYPE_DATE); + + const GDate *date = (const GDate *)g_value_get_boxed(&val); + if (!g_date_valid(date)) + return {}; + + int year = g_date_get_year(date); + int month = g_date_get_month(date); + int day = g_date_get_day(date); + return QDateTime(QDate(year, month, day), QTime()); +} + +QDateTime parseDateTime(const GValue &val) +{ + Q_ASSERT(G_VALUE_TYPE(&val) == GST_TYPE_DATE_TIME); + + const GstDateTime *dateTime = (const GstDateTime *)g_value_get_boxed(&val); + int year = gst_date_time_has_year(dateTime) ? gst_date_time_get_year(dateTime) : 0; + int month = gst_date_time_has_month(dateTime) ? gst_date_time_get_month(dateTime) : 0; + int day = gst_date_time_has_day(dateTime) ? gst_date_time_get_day(dateTime) : 0; + int hour = 0; + int minute = 0; + int second = 0; + float tz = 0; + if (gst_date_time_has_time(dateTime)) { + hour = gst_date_time_get_hour(dateTime); + minute = gst_date_time_get_minute(dateTime); + second = gst_date_time_get_second(dateTime); + tz = gst_date_time_get_time_zone_offset(dateTime); + } + return QDateTime{ + QDate(year, month, day), + QTime(hour, minute, second), + QTimeZone(tz * 60 * 60), + }; +} + +QImage parseImage(const GValue &val) +{ + Q_ASSERT(G_VALUE_TYPE(&val) == GST_TYPE_SAMPLE); + + GstSample *sample = (GstSample *)g_value_get_boxed(&val); + GstCaps *caps = gst_sample_get_caps(sample); + if (caps && !gst_caps_is_empty(caps)) { + GstStructure *structure = gst_caps_get_structure(caps, 0); + const gchar *name = gst_structure_get_name(structure); + if (QByteArray(name).startsWith("image/")) { + GstBuffer *buffer = gst_sample_get_buffer(sample); + if (buffer) { + GstMapInfo info; + gst_buffer_map(buffer, &info, GST_MAP_READ); + QImage image = QImage::fromData(info.data, info.size, name); + gst_buffer_unmap(buffer, &info); + return image; + } + } + } + + return {}; +} + +std::optional<double> parseFractionAsDouble(const GValue &val) +{ + Q_ASSERT(G_VALUE_TYPE(&val) == GST_TYPE_FRACTION); + + int nom = gst_value_get_fraction_numerator(&val); + int denom = gst_value_get_fraction_denominator(&val); + if (denom == 0) + return std::nullopt; + return double(nom) / double(denom); +} + +constexpr std::string_view extendedComment{ GST_TAG_EXTENDED_COMMENT }; + +void addTagsFromExtendedComment(const GstTagList *list, const gchar *tag, QMediaMetaData &metadata) +{ + using namespace Qt::Literals; + assert(tag == extendedComment); + + int entryCount = gst_tag_list_get_tag_size(list, tag); + for (int i = 0; i != entryCount; ++i) { + const GValue *value = gst_tag_list_get_value_index(list, tag, i); + + const QLatin1StringView strValue{ g_value_get_string(value) }; + + auto equalIndex = strValue.indexOf(QLatin1StringView("=")); + if (equalIndex == -1) { + qDebug() << "Cannot parse GST_TAG_EXTENDED_COMMENT entry: " << value; + continue; + } + + const QLatin1StringView key = strValue.first(equalIndex); + const QLatin1StringView valueString = strValue.last(strValue.size() - equalIndex - 1); + + if (key == "DURATION"_L1) { + QUniqueGstDateTimeHandle duration{ + gst_date_time_new_from_iso8601_string(valueString.data()), + }; + + if (duration) { + using namespace std::chrono; + + auto chronoDuration = hours(gst_date_time_get_hour(duration.get())) + + minutes(gst_date_time_get_minute(duration.get())) + + seconds(gst_date_time_get_second(duration.get())) + + microseconds(gst_date_time_get_microsecond(duration.get())); + + metadata.insert(QMediaMetaData::Duration, + QVariant::fromValue(round<milliseconds>(chronoDuration).count())); + } + } + } +} + +void addTagToMetaData(const GstTagList *list, const gchar *tag, void *userdata) +{ + QMediaMetaData &metadata = *reinterpret_cast<QMediaMetaData *>(userdata); + + QMediaMetaData::Key key = tagToKey(tag); + if (key == QMediaMetaData::Key(-1)) { + if (tag == extendedComment) + addTagsFromExtendedComment(list, tag, metadata); + + return; + } + + GValue val{}; + gst_tag_list_copy_value(&val, list, tag); + + GType type = G_VALUE_TYPE(&val); + + if (auto entryCount = gst_tag_list_get_tag_size(list, tag) != 0; entryCount != 1) + qWarning() << "addTagToMetaData: invaled entry count for" << tag << "-" << entryCount; + + if (type == G_TYPE_STRING) { + const gchar *str_value = g_value_get_string(&val); + + switch (key) { + case QMediaMetaData::Language: { + metadata.insert(key, + QVariant::fromValue(QLocale::codeToLanguage( + QString::fromUtf8(str_value), QLocale::AnyLanguageCode))); + break; + } + case QMediaMetaData::Orientation: { + metadata.insert(key, QVariant::fromValue(parseRotationTag(str_value))); + break; + } + default: + metadata.insert(key, QString::fromUtf8(str_value)); + break; + }; + } else if (type == G_TYPE_INT) { + metadata.insert(key, g_value_get_int(&val)); + } else if (type == G_TYPE_UINT) { + metadata.insert(key, g_value_get_uint(&val)); + } else if (type == G_TYPE_LONG) { + metadata.insert(key, qint64(g_value_get_long(&val))); + } else if (type == G_TYPE_BOOLEAN) { + metadata.insert(key, g_value_get_boolean(&val)); + } else if (type == G_TYPE_CHAR) { + metadata.insert(key, g_value_get_schar(&val)); + } else if (type == G_TYPE_DOUBLE) { + metadata.insert(key, g_value_get_double(&val)); + } else if (type == G_TYPE_DATE) { + if (!metadata.keys().contains(key)) { + QDateTime date = parseDate(val); + if (date.isValid()) + metadata.insert(key, date); + } + } else if (type == GST_TYPE_DATE_TIME) { + metadata.insert(key, parseDateTime(val)); + } else if (type == GST_TYPE_SAMPLE) { + QImage image = parseImage(val); + if (!image.isNull()) + metadata.insert(key, image); + } else if (type == GST_TYPE_FRACTION) { + std::optional<double> fraction = parseFractionAsDouble(val); + + if (fraction) + metadata.insert(key, *fraction); + } + + g_value_unset(&val); +} + +} // namespace + +QMediaMetaData taglistToMetaData(const QGstTagListHandle &handle) +{ + QMediaMetaData m; + extendMetaDataFromTagList(m, handle); + return m; +} + +void extendMetaDataFromTagList(QMediaMetaData &metadata, const QGstTagListHandle &handle) +{ + if (handle) + gst_tag_list_foreach(handle.get(), reinterpret_cast<GstTagForeachFunc>(&addTagToMetaData), + &metadata); +} + +static void applyMetaDataToTagSetter(const QMediaMetaData &metadata, GstTagSetter *element) +{ + gst_tag_setter_reset_tags(element); + + for (QMediaMetaData::Key key : metadata.keys()) { + const char *tagName = keyToTag(key); + if (!tagName) + continue; + const QVariant &tagValue = metadata.value(key); + + auto setTag = [&](const auto &value) { + gst_tag_setter_add_tags(element, GST_TAG_MERGE_REPLACE, tagName, value, nullptr); + }; + + switch (tagValue.typeId()) { + case QMetaType::QString: + setTag(tagValue.toString().toUtf8().constData()); + break; + case QMetaType::Int: + case QMetaType::LongLong: + setTag(tagValue.toInt()); + break; + case QMetaType::Double: + setTag(tagValue.toDouble()); + break; + case QMetaType::QDate: + case QMetaType::QDateTime: { + QDateTime date = tagValue.toDateTime(); + + QGstGstDateTimeHandle dateTime{ + gst_date_time_new(date.offsetFromUtc() / 60. / 60., date.date().year(), + date.date().month(), date.date().day(), date.time().hour(), + date.time().minute(), date.time().second()), + QGstGstDateTimeHandle::HasRef, + }; + + setTag(dateTime.get()); + break; + } + default: { + if (tagValue.typeId() == qMetaTypeId<QLocale::Language>()) { + QByteArray language = QLocale::languageToCode(tagValue.value<QLocale::Language>(), + QLocale::ISO639Part2) + .toUtf8(); + setTag(language.constData()); + } + + break; + } + } + } +} + +void applyMetaDataToTagSetter(const QMediaMetaData &metadata, const QGstElement &element) +{ + GstTagSetter *tagSetter = qGstSafeCast<GstTagSetter>(element.element()); + if (tagSetter) + applyMetaDataToTagSetter(metadata, tagSetter); + else + qWarning() << "applyMetaDataToTagSetter failed: element not a GstTagSetter" + << element.name(); +} + +void applyMetaDataToTagSetter(const QMediaMetaData &metadata, const QGstBin &bin) +{ + GstIterator *elements = gst_bin_iterate_all_by_interface(bin.bin(), GST_TYPE_TAG_SETTER); + GValue item = {}; + + while (gst_iterator_next(elements, &item) == GST_ITERATOR_OK) { + GstElement *element = static_cast<GstElement *>(g_value_get_object(&item)); + if (!element) + continue; + + GstTagSetter *tagSetter = qGstSafeCast<GstTagSetter>(element); + + if (tagSetter) + applyMetaDataToTagSetter(metadata, tagSetter); + } + + gst_iterator_free(elements); +} + +void extendMetaDataFromCaps(QMediaMetaData &metadata, const QGstCaps &caps) +{ + QGstStructureView structure = caps.at(0); + + QMediaFormat::FileFormat fileFormat = QGstreamerFormatInfo::fileFormatForCaps(structure); + if (fileFormat != QMediaFormat::FileFormat::UnspecifiedFormat) { + // Container caps + metadata.insert(QMediaMetaData::FileFormat, fileFormat); + return; + } + + QMediaFormat::AudioCodec audioCodec = QGstreamerFormatInfo::audioCodecForCaps(structure); + if (audioCodec != QMediaFormat::AudioCodec::Unspecified) { + // Audio stream caps + metadata.insert(QMediaMetaData::AudioCodec, QVariant::fromValue(audioCodec)); + return; + } + + QMediaFormat::VideoCodec videoCodec = QGstreamerFormatInfo::videoCodecForCaps(structure); + if (videoCodec != QMediaFormat::VideoCodec::Unspecified) { + // Video stream caps + metadata.insert(QMediaMetaData::VideoCodec, QVariant::fromValue(videoCodec)); + std::optional<float> framerate = structure["framerate"].getFraction(); + if (framerate) + metadata.insert(QMediaMetaData::VideoFrameRate, *framerate); + + QSize resolution = structure.resolution(); + if (resolution.isValid()) + metadata.insert(QMediaMetaData::Resolution, resolution); + } +} + +QMediaMetaData capsToMetaData(const QGstCaps &caps) +{ + QMediaMetaData metadata; + extendMetaDataFromCaps(metadata, caps); + return metadata; +} + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamermetadata_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamermetadata_p.h new file mode 100644 index 000000000..f04a9aba9 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstreamermetadata_p.h @@ -0,0 +1,35 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTREAMERMETADATA_H +#define QGSTREAMERMETADATA_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <qmediametadata.h> + +#include "qgst_p.h" + +QT_BEGIN_NAMESPACE + +QMediaMetaData taglistToMetaData(const QGstTagListHandle &); +void extendMetaDataFromTagList(QMediaMetaData &, const QGstTagListHandle &); + +QMediaMetaData capsToMetaData(const QGstCaps &); +void extendMetaDataFromCaps(QMediaMetaData &, const QGstCaps &); + +void applyMetaDataToTagSetter(const QMediaMetaData &metadata, const QGstBin &); +void applyMetaDataToTagSetter(const QMediaMetaData &metadata, const QGstElement &); + +QT_END_NAMESPACE + +#endif // QGSTREAMERMETADATA_H diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput.cpp new file mode 100644 index 000000000..3d20a4b87 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput.cpp @@ -0,0 +1,198 @@ +// Copyright (C) 2021 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include <QtMultimedia/qvideosink.h> + +#include <QtCore/qloggingcategory.h> +#include <QtCore/qthread.h> + +#include <common/qgstreamervideooutput_p.h> +#include <common/qgstreamervideosink_p.h> +#include <common/qgstsubtitlesink_p.h> + +static Q_LOGGING_CATEGORY(qLcMediaVideoOutput, "qt.multimedia.videooutput") + +QT_BEGIN_NAMESPACE + +static QGstElement makeVideoConvertScale(const char *name) +{ + QGstElementFactoryHandle factory = QGstElement::findFactory("videoconvertscale"); + if (factory) // videoconvertscale is only available in gstreamer 1.20 + return QGstElement::createFromFactory(factory, name); + + return QGstBin::createFromPipelineDescription("videoconvert ! videoscale", name, + /*ghostUnlinkedPads=*/true); +} + +QMaybe<QGstreamerVideoOutput *> QGstreamerVideoOutput::create(QObject *parent) +{ + QGstElementFactoryHandle factory = QGstElement::findFactory("videoconvertscale"); + + static std::optional<QString> elementCheck = []() -> std::optional<QString> { + std::optional<QString> error = qGstErrorMessageIfElementsNotAvailable("fakesink", "queue"); + if (error) + return error; + + QGstElementFactoryHandle factory = QGstElement::findFactory("videoconvertscale"); + if (factory) + return std::nullopt; + + return qGstErrorMessageIfElementsNotAvailable("videoconvert", "videoscale"); + }(); + + if (elementCheck) + return *elementCheck; + + return new QGstreamerVideoOutput(parent); +} + +QGstreamerVideoOutput::QGstreamerVideoOutput(QObject *parent) + : QObject(parent), + m_outputBin{ + QGstBin::create("videoOutput"), + }, + m_videoQueue{ + QGstElement::createFromFactory("queue", "videoQueue"), + }, + m_videoConvertScale{ + makeVideoConvertScale("videoConvertScale"), + }, + m_videoSink{ + QGstElement::createFromFactory("fakesink", "fakeVideoSink"), + } +{ + m_videoSink.set("sync", true); + m_videoSink.set("async", false); // no asynchronous state changes + + m_outputBin.add(m_videoQueue, m_videoConvertScale, m_videoSink); + qLinkGstElements(m_videoQueue, m_videoConvertScale, m_videoSink); + + m_subtitleSink = QGstSubtitleSink::createSink(this); + + m_outputBin.addGhostPad(m_videoQueue, "sink"); +} + +QGstreamerVideoOutput::~QGstreamerVideoOutput() +{ + QObject::disconnect(m_subtitleConnection); + m_outputBin.setStateSync(GST_STATE_NULL); +} + +void QGstreamerVideoOutput::setVideoSink(QVideoSink *sink) +{ + auto *gstVideoSink = sink ? static_cast<QGstreamerVideoSink *>(sink->platformVideoSink()) : nullptr; + if (gstVideoSink == m_platformVideoSink) + return; + + if (m_platformVideoSink) + m_platformVideoSink->setPipeline({}); + + m_platformVideoSink = gstVideoSink; + if (m_platformVideoSink) { + m_platformVideoSink->setPipeline(m_pipeline); + if (m_nativeSize.isValid()) + m_platformVideoSink->setNativeSize(m_nativeSize); + } + QGstElement videoSink; + if (m_platformVideoSink) { + videoSink = m_platformVideoSink->gstSink(); + } else { + videoSink = QGstElement::createFromFactory("fakesink", "fakevideosink"); + Q_ASSERT(videoSink); + videoSink.set("sync", true); + videoSink.set("async", false); // no asynchronous state changes + } + + QObject::disconnect(m_subtitleConnection); + if (sink) { + m_subtitleConnection = QObject::connect(this, &QGstreamerVideoOutput::subtitleChanged, sink, + [sink](const QString &subtitle) { + sink->setSubtitleText(subtitle); + }); + sink->setSubtitleText(m_lastSubtitleString); + } + + if (m_videoSink == videoSink) + return; + + m_pipeline.modifyPipelineWhileNotRunning([&] { + if (!m_videoSink.isNull()) + m_outputBin.stopAndRemoveElements(m_videoSink); + + m_videoSink = videoSink; + m_outputBin.add(m_videoSink); + + qLinkGstElements(m_videoConvertScale, m_videoSink); + + GstEvent *event = gst_event_new_reconfigure(); + gst_element_send_event(m_videoSink.element(), event); + m_videoSink.syncStateWithParent(); + }); + + qCDebug(qLcMediaVideoOutput) << "sinkChanged" << videoSink.name(); + + m_pipeline.dumpGraph(m_videoSink.name().constData()); +} + +void QGstreamerVideoOutput::setPipeline(const QGstPipeline &pipeline) +{ + m_pipeline = pipeline; + if (m_platformVideoSink) + m_platformVideoSink->setPipeline(m_pipeline); +} + +void QGstreamerVideoOutput::updateNativeSize() +{ + if (!m_platformVideoSink) + return; + + m_platformVideoSink->setNativeSize(qRotatedFrameSize(m_nativeSize, m_rotation)); +} + +void QGstreamerVideoOutput::setIsPreview() +{ + // configures the queue to be fast and lightweight for camera preview + // also avoids blocking the queue in case we have an encodebin attached to the tee as well + m_videoQueue.set("leaky", 2 /*downstream*/); + m_videoQueue.set("silent", true); + m_videoQueue.set("max-size-buffers", uint(1)); + m_videoQueue.set("max-size-bytes", uint(0)); + m_videoQueue.set("max-size-time", quint64(0)); +} + +void QGstreamerVideoOutput::flushSubtitles() +{ + if (!m_subtitleSink.isNull()) { + auto pad = m_subtitleSink.staticPad("sink"); + auto *event = gst_event_new_flush_start(); + pad.sendEvent(event); + event = gst_event_new_flush_stop(false); + pad.sendEvent(event); + } +} + +void QGstreamerVideoOutput::setNativeSize(QSize sz) +{ + m_nativeSize = sz; + updateNativeSize(); +} + +void QGstreamerVideoOutput::setRotation(QtVideo::Rotation rot) +{ + m_rotation = rot; + updateNativeSize(); +} + +void QGstreamerVideoOutput::updateSubtitle(QString string) +{ + // GStreamer thread + + QMetaObject::invokeMethod(this, [this, string = std::move(string)]() mutable { + m_lastSubtitleString = string; + Q_EMIT subtitleChanged(std::move(string)); + }); +} + +QT_END_NAMESPACE + +#include "moc_qgstreamervideooutput_p.cpp" diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput_p.h new file mode 100644 index 000000000..a74f058f0 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideooutput_p.h @@ -0,0 +1,85 @@ +// Copyright (C) 2021 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTREAMERVIDEOOUTPUT_P_H +#define QGSTREAMERVIDEOOUTPUT_P_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <QtCore/qobject.h> +#include <private/qtmultimediaglobal_p.h> +#include <private/qmultimediautils_p.h> +#include <common/qgst_p.h> +#include <common/qgstpipeline_p.h> +#include <common/qgstreamervideosink_p.h> +#include <common/qgstsubtitlesink_p.h> +#include <qwaitcondition.h> +#include <qmutex.h> +#include <qpointer.h> + +QT_BEGIN_NAMESPACE + +class QVideoSink; + +class QGstreamerVideoOutput : public QObject, QAbstractSubtitleObserver +{ + Q_OBJECT + +public: + static QMaybe<QGstreamerVideoOutput *> create(QObject *parent = nullptr); + ~QGstreamerVideoOutput(); + + void setVideoSink(QVideoSink *sink); + QGstreamerVideoSink *gstreamerVideoSink() const { return m_platformVideoSink; } + + void setPipeline(const QGstPipeline &pipeline); + + QGstElement gstElement() const { return m_outputBin; } + QGstElement gstSubtitleElement() const { return m_subtitleSink; } + + void setIsPreview(); + void flushSubtitles(); + + void setNativeSize(QSize); + void setRotation(QtVideo::Rotation); + + void updateSubtitle(QString) override; + +signals: + void subtitleChanged(QString); + +private: + explicit QGstreamerVideoOutput(QObject *parent); + + void updateNativeSize(); + + QPointer<QGstreamerVideoSink> m_platformVideoSink; + + // Gst elements + QGstPipeline m_pipeline; + + QGstBin m_outputBin; + QGstElement m_videoQueue; + QGstElement m_videoConvertScale; + QGstElement m_videoSink; + + QGstElement m_subtitleSink; + QMetaObject::Connection m_subtitleConnection; + QString m_lastSubtitleString; + + QSize m_nativeSize; + QtVideo::Rotation m_rotation{}; +}; + +QT_END_NAMESPACE + +#endif diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideooverlay.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamervideooverlay.cpp new file mode 100644 index 000000000..6ca23006b --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideooverlay.cpp @@ -0,0 +1,218 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include "qgstreamervideooverlay_p.h" + +#include <QtGui/qguiapplication.h> +#include <QtMultimedia/private/qtmultimediaglobal_p.h> + +#include <common/qglist_helper_p.h> +#include <common/qgst_p.h> +#include <common/qgstreamermessage_p.h> +#include <common/qgstreamervideosink_p.h> +#include <common/qgstutils_p.h> + +#include <gst/video/videooverlay.h> + +QT_BEGIN_NAMESPACE + +struct ElementMap +{ + QStringView qtPlatform; + const char *gstreamerElement = nullptr; +}; + +// Ordered by descending priority +static constexpr ElementMap elementMap[] = { + { u"xcb", "xvimagesink" }, + { u"xcb", "ximagesink" }, + + // wayland + { u"wayland", "waylandsink" }, +}; + +static bool qt_gst_element_is_functioning(QGstElement element) +{ + GstStateChangeReturn ret = element.setState(GST_STATE_READY); + if (ret == GST_STATE_CHANGE_SUCCESS) { + element.setState(GST_STATE_NULL); + return true; + } + + return false; +} + +static QGstElement findBestVideoSink() +{ + using namespace Qt::StringLiterals; + QString platform = QGuiApplication::platformName(); + + // First, try some known video sinks, depending on the Qt platform plugin in use. + for (const auto &i : elementMap) { + if (platform != i.qtPlatform) + continue; + QGstElement choice = QGstElement::createFromFactory(i.gstreamerElement, i.gstreamerElement); + if (choice.isNull()) + continue; + + if (qt_gst_element_is_functioning(choice)) + return choice; + } + + // We need a native window ID to use the GstVideoOverlay interface. + // Bail out if the Qt platform plugin in use cannot provide a sensible WId. + if (platform != QStringView{ u"xcb" } && platform != QStringView{ u"wayland" }) + return {}; + + QGstElement choice; + // If none of the known video sinks are available, try to find one that implements the + // GstVideoOverlay interface and has autoplugging rank. + GList *list = qt_gst_video_sinks(); + for (GstElementFactory *f : QGstUtils::GListRangeAdaptor<GstElementFactory *>(list)) { + if (!gst_element_factory_has_interface(f, "GstVideoOverlay")) + continue; + + choice = QGstElement::createFromFactory(f, nullptr); + if (choice.isNull()) + continue; + + if (qt_gst_element_is_functioning(choice)) + break; + choice = {}; + } + + gst_plugin_feature_list_free(list); + if (choice.isNull()) + qWarning() << "Could not find a valid windowed video sink"; + + return choice; +} + +QGstreamerVideoOverlay::QGstreamerVideoOverlay(QGstreamerVideoSink *parent, const QByteArray &elementName) + : QObject(parent) + , QGstreamerBufferProbe(QGstreamerBufferProbe::ProbeCaps) + , m_gstreamerVideoSink(parent) +{ + QGstElement sink; + if (!elementName.isEmpty()) + sink = QGstElement::createFromFactory(elementName.constData()); + else + sink = findBestVideoSink(); + + setVideoSink(sink); +} + +QGstreamerVideoOverlay::~QGstreamerVideoOverlay() +{ + if (!m_videoSink.isNull()) { + QGstPad pad = m_videoSink.staticPad("sink"); + removeProbeFromPad(pad.pad()); + } +} + +QGstElement QGstreamerVideoOverlay::videoSink() const +{ + return m_videoSink; +} + +void QGstreamerVideoOverlay::setVideoSink(QGstElement sink) +{ + if (sink.isNull()) + return; + + m_videoSink = std::move(sink); + + QGstPad pad = m_videoSink.staticPad("sink"); + addProbeToPad(pad.pad()); + + auto *klass = G_OBJECT_GET_CLASS(m_videoSink.object()); + m_hasForceAspectRatio = g_object_class_find_property(klass, "force-aspect-ratio"); + m_hasFullscreen = g_object_class_find_property(klass, "fullscreen"); +} + +QSize QGstreamerVideoOverlay::nativeVideoSize() const +{ + return m_nativeVideoSize; +} + +void QGstreamerVideoOverlay::setWindowHandle(WId id) +{ + m_windowId = id; + + if (!m_videoSink.isNull() && GST_IS_VIDEO_OVERLAY(m_videoSink.object())) { + applyRenderRect(); + + // Properties need to be reset when changing the winId. + setAspectRatioMode(m_aspectRatioMode); + setFullScreen(m_fullScreen); + applyRenderRect(); + } +} + +void QGstreamerVideoOverlay::setRenderRectangle(const QRect &rect) +{ + renderRect = rect; + applyRenderRect(); +} + +void QGstreamerVideoOverlay::applyRenderRect() +{ + if (!m_windowId) + return; + + int x = -1; + int y = -1; + int w = -1; + int h = -1; + + if (!renderRect.isEmpty()) { + x = renderRect.x(); + y = renderRect.y(); + w = renderRect.width(); + h = renderRect.height(); + QSize scaledVideo = m_nativeVideoSize.scaled(w, h, m_aspectRatioMode); + x += (w - scaledVideo.width())/2; + y += (h - scaledVideo.height())/2; + w = scaledVideo.width(); + h = scaledVideo.height(); + } + + if (!m_videoSink.isNull() && GST_IS_VIDEO_OVERLAY(m_videoSink.object())) + gst_video_overlay_set_render_rectangle(GST_VIDEO_OVERLAY(m_videoSink.object()), x, y, w, h); +} + +void QGstreamerVideoOverlay::probeCaps(GstCaps *caps) +{ + QSize size = QGstCaps(caps, QGstCaps::NeedsRef).at(0).resolution(); + if (size != m_nativeVideoSize) { + m_nativeVideoSize = size; + m_gstreamerVideoSink->setNativeSize(m_nativeVideoSize); + applyRenderRect(); + } +} + +void QGstreamerVideoOverlay::setAspectRatioMode(Qt::AspectRatioMode mode) +{ + m_aspectRatioMode = mode; + if (m_hasForceAspectRatio) + m_videoSink.set("force-aspect-ratio", (mode == Qt::KeepAspectRatio)); +} + +void QGstreamerVideoOverlay::setFullScreen(bool fullscreen) +{ + m_fullScreen = fullscreen; + if (m_hasFullscreen) + m_videoSink.set("fullscreen", fullscreen); +} + +bool QGstreamerVideoOverlay::processSyncMessage(const QGstreamerMessage &message) +{ + if (!gst_is_video_overlay_prepare_window_handle_message(message.message())) + return false; + gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(m_videoSink.object()), m_windowId); + return true; +} + +QT_END_NAMESPACE + +#include "moc_qgstreamervideooverlay_p.cpp" diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideooverlay_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamervideooverlay_p.h new file mode 100644 index 000000000..588e8b5e4 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideooverlay_p.h @@ -0,0 +1,74 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTREAMERVIDEOOVERLAY_P_H +#define QGSTREAMERVIDEOOVERLAY_P_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <common/qgstpipeline_p.h> +#include <common/qgstreamerbufferprobe_p.h> +#include <common/qgst_p.h> +#include <QtGui/qwindowdefs.h> + +QT_BEGIN_NAMESPACE +class QGstreamerVideoSink; + +class QGstreamerVideoOverlay : public QObject, + public QGstreamerSyncMessageFilter, + private QGstreamerBufferProbe +{ + Q_OBJECT +public: + explicit QGstreamerVideoOverlay(QGstreamerVideoSink *parent = nullptr, + const QByteArray &elementName = QByteArray()); + virtual ~QGstreamerVideoOverlay(); + + QGstElement videoSink() const; + void setVideoSink(QGstElement); + QSize nativeVideoSize() const; + + void setWindowHandle(WId id); + void setRenderRectangle(const QRect &rect); + + void setAspectRatioMode(Qt::AspectRatioMode mode); + void setFullScreen(bool fullscreen); + + bool processSyncMessage(const QGstreamerMessage &message) override; + + bool isNull() const { return m_videoSink.isNull(); } + +Q_SIGNALS: + void nativeVideoSizeChanged(); + void activeChanged(); + +private: + void probeCaps(GstCaps *caps) override; + void applyRenderRect(); + + QGstreamerVideoSink *m_gstreamerVideoSink = nullptr; + QGstElement m_videoSink; + QSize m_nativeVideoSize; + + bool m_hasForceAspectRatio = false; + bool m_hasFullscreen = false; + Qt::AspectRatioMode m_aspectRatioMode = Qt::KeepAspectRatio; + bool m_fullScreen = false; + + WId m_windowId = 0; + QRect renderRect; +}; + +QT_END_NAMESPACE + +#endif // QGSTREAMERVIDEOOVERLAY_P_H + diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideosink.cpp b/src/plugins/multimedia/gstreamer/common/qgstreamervideosink.cpp new file mode 100644 index 000000000..456febe2a --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideosink.cpp @@ -0,0 +1,309 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include <common/qgstreamervideosink_p.h> +#include <common/qgstvideorenderersink_p.h> +#include <common/qgst_debug_p.h> +#include <common/qgstutils_p.h> +#include <rhi/qrhi.h> + +#include <QtCore/qdebug.h> +#include <QtCore/qloggingcategory.h> + +#if QT_CONFIG(gstreamer_gl) +# include <QtGui/QGuiApplication> +# include <QtGui/qopenglcontext.h> +# include <QtGui/QWindow> +# include <QtGui/qpa/qplatformnativeinterface.h> +# include <gst/gl/gstglconfig.h> + +# if GST_GL_HAVE_WINDOW_X11 && __has_include("X11/Xlib-xcb.h") +# include <gst/gl/x11/gstgldisplay_x11.h> +# endif +# if GST_GL_HAVE_PLATFORM_EGL +# include <gst/gl/egl/gstgldisplay_egl.h> +# include <EGL/egl.h> +# include <EGL/eglext.h> +# endif +# if GST_GL_HAVE_WINDOW_WAYLAND && __has_include("wayland-client.h") +# include <gst/gl/wayland/gstgldisplay_wayland.h> +# endif +#endif // #if QT_CONFIG(gstreamer_gl) + +QT_BEGIN_NAMESPACE + +static Q_LOGGING_CATEGORY(qLcGstVideoSink, "qt.multimedia.gstvideosink"); + +QGstreamerVideoSink::QGstreamerVideoSink(QVideoSink *parent) + : QPlatformVideoSink{ + parent, + }, + m_sinkBin{ + QGstBin::create("videoSinkBin"), + } +{ + // This is a hack for some iMX and NVidia platforms. These require the use of a special video + // conversion element in the pipeline before the video sink, as they unfortunately + // output some proprietary format from the decoder even though it's sometimes marked as + // a regular supported video/x-raw format. + // + // To fix this, simply insert the element into the pipeline if it's available. Otherwise + // we simply use an identity element. + QGstElementFactoryHandle factory; + + // QT_GSTREAMER_OVERRIDE_VIDEO_CONVERSION_ELEMENT allows users to override the + // conversion element. Ideally we construct the element programatically, though. + QByteArray preprocessOverride = qgetenv("QT_GSTREAMER_OVERRIDE_VIDEO_CONVERSION_ELEMENT"); + if (!preprocessOverride.isEmpty()) { + qCDebug(qLcGstVideoSink) << "requesting conversion element from environment:" + << preprocessOverride; + + m_gstPreprocess = QGstBin::createFromPipelineDescription(preprocessOverride, nullptr, + /*ghostUnlinkedPads=*/true); + if (!m_gstPreprocess) + qCWarning(qLcGstVideoSink) << "Cannot create conversion element:" << preprocessOverride; + } + + if (!m_gstPreprocess) { + // This is a hack for some iMX and NVidia platforms. These require the use of a special + // video conversion element in the pipeline before the video sink, as they unfortunately + // output some proprietary format from the decoder even though it's sometimes marked as + // a regular supported video/x-raw format. + static constexpr auto decodersToTest = { + "imxvideoconvert_g2d", + "nvvidconv", + }; + + for (const char *decoder : decodersToTest) { + factory = QGstElement::findFactory(decoder); + if (factory) + break; + } + + if (factory) { + qCDebug(qLcGstVideoSink) + << "instantiating conversion element:" + << g_type_name(gst_element_factory_get_element_type(factory.get())); + + m_gstPreprocess = QGstElement::createFromFactory(factory, "preprocess"); + } + } + + bool disablePixelAspectRatio = + qEnvironmentVariableIsSet("QT_GSTREAMER_DISABLE_PIXEL_ASPECT_RATIO"); + if (disablePixelAspectRatio) { + // Enabling the pixel aspect ratio may expose a gstreamer bug on cameras that don't expose a + // pixel-aspect-ratio via `VIDIOC_CROPCAP`. This can cause the caps negotiation to fail. + // Using the QT_GSTREAMER_DISABLE_PIXEL_ASPECT_RATIO environment variable, one can disable + // pixel-aspect-ratio handling + // + // compare: https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/6242 + m_gstCapsFilter = + QGstElement::createFromFactory("identity", "nullPixelAspectRatioCapsFilter"); + } else { + m_gstCapsFilter = + QGstElement::createFromFactory("capsfilter", "pixelAspectRatioCapsFilter"); + QGstCaps capsFilterCaps{ + gst_caps_new_simple("video/x-raw", "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, NULL), + QGstCaps::HasRef, + }; + g_object_set(m_gstCapsFilter.element(), "caps", capsFilterCaps.caps(), NULL); + } + + if (m_gstPreprocess) { + m_sinkBin.add(m_gstPreprocess, m_gstCapsFilter); + qLinkGstElements(m_gstPreprocess, m_gstCapsFilter); + m_sinkBin.addGhostPad(m_gstPreprocess, "sink"); + } else { + m_sinkBin.add(m_gstCapsFilter); + m_sinkBin.addGhostPad(m_gstCapsFilter, "sink"); + } +} + +QGstreamerVideoSink::~QGstreamerVideoSink() +{ + emit aboutToBeDestroyed(); + + unrefGstContexts(); + + setPipeline(QGstPipeline()); +} + +QGstElement QGstreamerVideoSink::gstSink() +{ + updateSinkElement(); + return m_sinkBin; +} + +void QGstreamerVideoSink::setPipeline(QGstPipeline pipeline) +{ + m_pipeline = std::move(pipeline); +} + +bool QGstreamerVideoSink::inStoppedState() const +{ + if (m_pipeline.isNull()) + return true; + return m_pipeline.inStoppedState(); +} + +void QGstreamerVideoSink::setRhi(QRhi *rhi) +{ + if (rhi && rhi->backend() != QRhi::OpenGLES2) + rhi = nullptr; + if (m_rhi == rhi) + return; + + m_rhi = rhi; + updateGstContexts(); + if (!m_gstQtSink.isNull()) { + // force creation of a new sink with proper caps + createQtSink(); + updateSinkElement(); + } +} + +void QGstreamerVideoSink::createQtSink() +{ + if (m_gstQtSink) + m_gstQtSink.setStateSync(GST_STATE_NULL); + + m_gstQtSink = + QGstElement(reinterpret_cast<GstElement *>(QGstVideoRendererSink::createSink(this)), + QGstElement::NeedsRef); +} + +void QGstreamerVideoSink::updateSinkElement() +{ + QGstElement newSink; + if (m_gstQtSink.isNull()) + createQtSink(); + newSink = m_gstQtSink; + + if (newSink == m_gstVideoSink) + return; + + m_pipeline.modifyPipelineWhileNotRunning([&] { + if (!m_gstVideoSink.isNull()) + m_sinkBin.stopAndRemoveElements(m_gstVideoSink); + + newSink.set("async", false); // no asynchronous state changes + + m_gstVideoSink = newSink; + m_sinkBin.add(m_gstVideoSink); + qLinkGstElements(m_gstCapsFilter, m_gstVideoSink); + m_gstVideoSink.setState(GST_STATE_PAUSED); + }); + + m_pipeline.dumpGraph("updateVideoSink"); +} + +void QGstreamerVideoSink::unrefGstContexts() +{ + m_gstGlDisplayContext.close(); + m_gstGlLocalContext.close(); + m_eglDisplay = nullptr; + m_eglImageTargetTexture2D = nullptr; +} + +void QGstreamerVideoSink::updateGstContexts() +{ + using namespace Qt::Literals; + + unrefGstContexts(); + +#if QT_CONFIG(gstreamer_gl) + if (!m_rhi || m_rhi->backend() != QRhi::OpenGLES2) + return; + + auto *nativeHandles = static_cast<const QRhiGles2NativeHandles *>(m_rhi->nativeHandles()); + auto glContext = nativeHandles->context; + Q_ASSERT(glContext); + + const QString platform = QGuiApplication::platformName(); + QPlatformNativeInterface *pni = QGuiApplication::platformNativeInterface(); + m_eglDisplay = pni->nativeResourceForIntegration("egldisplay"_ba); +// qDebug() << "platform is" << platform << m_eglDisplay; + + QGstGLDisplayHandle gstGlDisplay; + + QByteArray contextName = "eglcontext"_ba; + GstGLPlatform glPlatform = GST_GL_PLATFORM_EGL; + // use the egl display if we have one + if (m_eglDisplay) { +#if GST_GL_HAVE_PLATFORM_EGL + gstGlDisplay.reset( + GST_GL_DISPLAY_CAST(gst_gl_display_egl_new_with_egl_display(m_eglDisplay))); + m_eglImageTargetTexture2D = eglGetProcAddress("glEGLImageTargetTexture2DOES"); +#endif + } else { + auto display = pni->nativeResourceForIntegration("display"_ba); + + if (display) { +#if GST_GL_HAVE_WINDOW_X11 && __has_include("X11/Xlib-xcb.h") + if (platform == QLatin1String("xcb")) { + contextName = "glxcontext"_ba; + glPlatform = GST_GL_PLATFORM_GLX; + + gstGlDisplay.reset(GST_GL_DISPLAY_CAST( + gst_gl_display_x11_new_with_display(reinterpret_cast<Display *>(display)))); + } +#endif +#if GST_GL_HAVE_WINDOW_WAYLAND && __has_include("wayland-client.h") + if (platform.startsWith(QLatin1String("wayland"))) { + Q_ASSERT(!gstGlDisplay); + gstGlDisplay.reset(GST_GL_DISPLAY_CAST(gst_gl_display_wayland_new_with_display( + reinterpret_cast<struct wl_display *>(display)))); + } +#endif + } + } + + if (!gstGlDisplay) { + qWarning() << "Could not create GstGLDisplay"; + return; + } + + void *nativeContext = pni->nativeResourceForContext(contextName, glContext); + if (!nativeContext) + qWarning() << "Could not find resource for" << contextName; + + GstGLAPI glApi = QOpenGLContext::openGLModuleType() == QOpenGLContext::LibGL ? GST_GL_API_OPENGL : GST_GL_API_GLES2; + QGstGLContextHandle appContext{ + gst_gl_context_new_wrapped(gstGlDisplay.get(), guintptr(nativeContext), glPlatform, glApi), + }; + if (!appContext) + qWarning() << "Could not create wrappped context for platform:" << glPlatform; + + gst_gl_context_activate(appContext.get(), true); + + QUniqueGErrorHandle error; + gst_gl_context_fill_info(appContext.get(), &error); + if (error) { + qWarning() << "Could not fill context info:" << error; + error = {}; + } + + QGstGLContextHandle displayContext; + gst_gl_display_create_context(gstGlDisplay.get(), appContext.get(), &displayContext, &error); + if (error) + qWarning() << "Could not create display context:" << error; + + appContext.close(); + + m_gstGlDisplayContext.reset(gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, false)); + gst_context_set_gl_display(m_gstGlDisplayContext.get(), gstGlDisplay.get()); + + m_gstGlLocalContext.reset(gst_context_new("gst.gl.local_context", false)); + GstStructure *structure = gst_context_writable_structure(m_gstGlLocalContext.get()); + gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, displayContext.get(), nullptr); + displayContext.close(); + + if (m_pipeline) + gst_element_set_context(m_pipeline.element(), m_gstGlLocalContext.get()); +#endif // #if QT_CONFIG(gstreamer_gl) +} + +QT_END_NAMESPACE + +#include "moc_qgstreamervideosink_p.cpp" diff --git a/src/plugins/multimedia/gstreamer/common/qgstreamervideosink_p.h b/src/plugins/multimedia/gstreamer/common/qgstreamervideosink_p.h new file mode 100644 index 000000000..d940485f4 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstreamervideosink_p.h @@ -0,0 +1,74 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTREAMERVIDEOSINK_H +#define QGSTREAMERVIDEOSINK_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <QtMultimedia/qvideosink.h> +#include <QtMultimedia/private/qplatformvideosink_p.h> + +#include <common/qgstpipeline_p.h> + +QT_BEGIN_NAMESPACE + +class QGstreamerVideoSink : public QPlatformVideoSink +{ + Q_OBJECT + +public: + explicit QGstreamerVideoSink(QVideoSink *parent = nullptr); + ~QGstreamerVideoSink(); + + void setRhi(QRhi *rhi) override; + QRhi *rhi() const { return m_rhi; } + + QGstElement gstSink(); + + void setPipeline(QGstPipeline pipeline); + bool inStoppedState() const; + + GstContext *gstGlDisplayContext() const { return m_gstGlDisplayContext.get(); } + GstContext *gstGlLocalContext() const { return m_gstGlLocalContext.get(); } + Qt::HANDLE eglDisplay() const { return m_eglDisplay; } + QFunctionPointer eglImageTargetTexture2D() const { return m_eglImageTargetTexture2D; } + +Q_SIGNALS: + void aboutToBeDestroyed(); + +private: + void createQtSink(); + void updateSinkElement(); + + void unrefGstContexts(); + void updateGstContexts(); + + QGstPipeline m_pipeline; + QGstBin m_sinkBin; + QGstElement m_gstPreprocess; + QGstElement m_gstCapsFilter; + QGstElement m_gstVideoSink; + QGstElement m_gstQtSink; + + QRhi *m_rhi = nullptr; + + Qt::HANDLE m_eglDisplay = nullptr; + QFunctionPointer m_eglImageTargetTexture2D = nullptr; + + QGstContextHandle m_gstGlLocalContext; + QGstContextHandle m_gstGlDisplayContext; +}; + +QT_END_NAMESPACE + +#endif diff --git a/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink.cpp b/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink.cpp new file mode 100644 index 000000000..58b5c3f53 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink.cpp @@ -0,0 +1,161 @@ +// Copyright (C) 2021 The Qt Company +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include "qgstsubtitlesink_p.h" +#include "qgst_debug_p.h" + +#include <QtCore/qdebug.h> + +QT_BEGIN_NAMESPACE + +namespace { +GstBaseSinkClass *gst_sink_parent_class; +thread_local QAbstractSubtitleObserver *gst_current_observer; + +class QGstSubtitleSinkClass +{ +public: + GstBaseSinkClass parent_class; +}; + +} // namespace + +#define ST_SINK(s) QGstSubtitleSink *sink(reinterpret_cast<QGstSubtitleSink *>(s)) + +QGstElement QGstSubtitleSink::createSink(QAbstractSubtitleObserver *observer) +{ + gst_current_observer = observer; + + QGstSubtitleSink *gstSink = reinterpret_cast<QGstSubtitleSink *>( + g_object_new(QGstSubtitleSink::get_type(), nullptr)); + g_object_set(gstSink, "async", false, nullptr); + + return QGstElement{ + qGstCheckedCast<GstElement>(gstSink), + QGstElement::NeedsRef, + }; +} + +GType QGstSubtitleSink::get_type() +{ + // clang-format off + static constexpr GTypeInfo info = + { + sizeof(QGstSubtitleSinkClass), // class_size + base_init, // base_init + nullptr, // base_finalize + class_init, // class_init + nullptr, // class_finalize + nullptr, // class_data + sizeof(QGstSubtitleSink), // instance_size + 0, // n_preallocs + instance_init, // instance_init + nullptr // value_table + }; + // clang-format on + + static const GType type = []() { + const auto result = g_type_register_static( + GST_TYPE_BASE_SINK, "QGstSubtitleSink", &info, GTypeFlags(0)); + return result; + }(); + + return type; +} + +void QGstSubtitleSink::class_init(gpointer g_class, gpointer class_data) +{ + Q_UNUSED(class_data); + + gst_sink_parent_class = reinterpret_cast<GstBaseSinkClass *>(g_type_class_peek_parent(g_class)); + + GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class); + base_sink_class->render = QGstSubtitleSink::render; + base_sink_class->get_caps = QGstSubtitleSink::get_caps; + base_sink_class->set_caps = QGstSubtitleSink::set_caps; + base_sink_class->propose_allocation = QGstSubtitleSink::propose_allocation; + base_sink_class->wait_event = QGstSubtitleSink::wait_event; + + GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class); + element_class->change_state = QGstSubtitleSink::change_state; + gst_element_class_set_metadata(element_class, + "Qt built-in subtitle sink", + "Sink/Subtitle", + "Qt default built-in subtitle sink", + "The Qt Company"); + + GObjectClass *object_class = reinterpret_cast<GObjectClass *>(g_class); + object_class->finalize = QGstSubtitleSink::finalize; +} + +void QGstSubtitleSink::base_init(gpointer g_class) +{ + static GstStaticPadTemplate sink_pad_template = + GST_STATIC_PAD_TEMPLATE("sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS("ANY")); + + gst_element_class_add_pad_template( + GST_ELEMENT_CLASS(g_class), gst_static_pad_template_get(&sink_pad_template)); +} + +void QGstSubtitleSink::instance_init(GTypeInstance *instance, gpointer /*g_class*/) +{ + ST_SINK(instance); + + Q_ASSERT(gst_current_observer); + sink->observer = gst_current_observer; + gst_current_observer = nullptr; +} + +void QGstSubtitleSink::finalize(GObject *object) +{ + // Chain up + G_OBJECT_CLASS(gst_sink_parent_class)->finalize(object); +} + +GstStateChangeReturn QGstSubtitleSink::change_state(GstElement *element, GstStateChange transition) +{ + return GST_ELEMENT_CLASS(gst_sink_parent_class)->change_state(element, transition); +} + +GstCaps *QGstSubtitleSink::get_caps(GstBaseSink *base, GstCaps *filter) +{ + return gst_sink_parent_class->get_caps(base, filter); +} + +gboolean QGstSubtitleSink::set_caps(GstBaseSink *base, GstCaps *caps) +{ + qDebug() << "set_caps:" << caps; + return gst_sink_parent_class->set_caps(base, caps); +} + +gboolean QGstSubtitleSink::propose_allocation(GstBaseSink *base, GstQuery *query) +{ + return gst_sink_parent_class->propose_allocation(base, query); +} + +GstFlowReturn QGstSubtitleSink::wait_event(GstBaseSink *base, GstEvent *event) +{ + GstFlowReturn retval = gst_sink_parent_class->wait_event(base, event); + ST_SINK(base); + if (event->type == GST_EVENT_GAP) { + // qDebug() << "gap, clearing subtitle"; + sink->observer->updateSubtitle(QString()); + } + return retval; +} + +GstFlowReturn QGstSubtitleSink::render(GstBaseSink *base, GstBuffer *buffer) +{ + ST_SINK(base); + GstMemory *mem = gst_buffer_get_memory(buffer, 0); + GstMapInfo info; + QString subtitle; + if (gst_memory_map(mem, &info, GST_MAP_READ)) + subtitle = QString::fromUtf8(reinterpret_cast<const char *>(info.data)); + gst_memory_unmap(mem, &info); +// qDebug() << "render" << buffer << subtitle; + sink->observer->updateSubtitle(subtitle); + return GST_FLOW_OK; +} + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink_p.h b/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink_p.h new file mode 100644 index 000000000..1970ac48b --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstsubtitlesink_p.h @@ -0,0 +1,64 @@ +// Copyright (C) 2021 The Qt Company +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTSUBTITLESINK_P_H +#define QGSTSUBTITLESINK_P_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <QtMultimedia/private/qtmultimediaglobal_p.h> + +#include <QtCore/qstring.h> +#include <common/qgst_p.h> +#include <gst/base/gstbasesink.h> + +QT_BEGIN_NAMESPACE + +class QAbstractSubtitleObserver +{ +public: + virtual ~QAbstractSubtitleObserver() = default; + virtual void updateSubtitle(QString) = 0; +}; + +class QGstSubtitleSink +{ +public: + GstBaseSink parent{}; + + static QGstElement createSink(QAbstractSubtitleObserver *observer); + +private: + static GType get_type(); + static void class_init(gpointer g_class, gpointer class_data); + static void base_init(gpointer g_class); + static void instance_init(GTypeInstance *instance, gpointer g_class); + + static void finalize(GObject *object); + + static GstStateChangeReturn change_state(GstElement *element, GstStateChange transition); + + static GstCaps *get_caps(GstBaseSink *sink, GstCaps *filter); + static gboolean set_caps(GstBaseSink *sink, GstCaps *caps); + + static gboolean propose_allocation(GstBaseSink *sink, GstQuery *query); + + static GstFlowReturn wait_event(GstBaseSink * sink, GstEvent * event); + static GstFlowReturn render(GstBaseSink *sink, GstBuffer *buffer); + +private: + QAbstractSubtitleObserver *observer = nullptr; +}; + +QT_END_NAMESPACE + +#endif diff --git a/src/plugins/multimedia/gstreamer/common/qgstutils.cpp b/src/plugins/multimedia/gstreamer/common/qgstutils.cpp new file mode 100644 index 000000000..8ec2bde3c --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstutils.cpp @@ -0,0 +1,141 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include <common/qgstutils_p.h> +#include <common/qgst_p.h> + +#include <QtMultimedia/qaudioformat.h> + +#include <chrono> + +QT_BEGIN_NAMESPACE + +namespace { + +const char *audioSampleFormatNames[QAudioFormat::NSampleFormats] = { + nullptr, +#if Q_BYTE_ORDER == Q_LITTLE_ENDIAN + "U8", + "S16LE", + "S32LE", + "F32LE" +#else + "U8", + "S16BE", + "S32BE", + "F32BE" +#endif +}; + +QAudioFormat::SampleFormat gstSampleFormatToSampleFormat(const char *fmt) +{ + if (fmt) { + for (int i = 1; i < QAudioFormat::NSampleFormats; ++i) { + if (strcmp(fmt, audioSampleFormatNames[i])) + continue; + return QAudioFormat::SampleFormat(i); + } + } + return QAudioFormat::Unknown; +} + +} // namespace + +/* + Returns audio format for a sample \a sample. + If the buffer doesn't have a valid audio format, an empty QAudioFormat is returned. +*/ +QAudioFormat QGstUtils::audioFormatForSample(GstSample *sample) +{ + auto caps = QGstCaps(gst_sample_get_caps(sample), QGstCaps::NeedsRef); + if (caps.isNull()) + return {}; + return audioFormatForCaps(caps); +} + +QAudioFormat QGstUtils::audioFormatForCaps(const QGstCaps &caps) +{ + QAudioFormat format; + QGstStructureView s = caps.at(0); + if (s.name() != "audio/x-raw") + return format; + + auto rate = s["rate"].toInt(); + auto channels = s["channels"].toInt(); + QAudioFormat::SampleFormat fmt = gstSampleFormatToSampleFormat(s["format"].toString()); + if (!rate || !channels || fmt == QAudioFormat::Unknown) + return format; + + format.setSampleRate(*rate); + format.setChannelCount(*channels); + format.setSampleFormat(fmt); + + return format; +} + +/* + Builds GstCaps for an audio format \a format. + Returns 0 if the audio format is not valid. + + \note Caller must unreference GstCaps. +*/ + +QGstCaps QGstUtils::capsForAudioFormat(const QAudioFormat &format) +{ + if (!format.isValid()) + return {}; + + auto sampleFormat = format.sampleFormat(); + auto caps = gst_caps_new_simple( + "audio/x-raw", + "format" , G_TYPE_STRING, audioSampleFormatNames[sampleFormat], + "rate" , G_TYPE_INT , format.sampleRate(), + "channels", G_TYPE_INT , format.channelCount(), + "layout" , G_TYPE_STRING, "interleaved", + nullptr); + + return QGstCaps(caps, QGstCaps::HasRef); +} + +QList<QAudioFormat::SampleFormat> QGValue::getSampleFormats() const +{ + if (!GST_VALUE_HOLDS_LIST(value)) + return {}; + + QList<QAudioFormat::SampleFormat> formats; + guint nFormats = gst_value_list_get_size(value); + for (guint f = 0; f < nFormats; ++f) { + QGValue v = QGValue{ gst_value_list_get_value(value, f) }; + auto *name = v.toString(); + QAudioFormat::SampleFormat fmt = gstSampleFormatToSampleFormat(name); + if (fmt == QAudioFormat::Unknown) + continue; + formats.append(fmt); + } + return formats; +} + +void QGstUtils::setFrameTimeStampsFromBuffer(QVideoFrame *frame, GstBuffer *buffer) +{ + using namespace std::chrono; + using namespace std::chrono_literals; + + // GStreamer uses nanoseconds, Qt uses microseconds + nanoseconds startTime{ GST_BUFFER_TIMESTAMP(buffer) }; + if (startTime >= 0ns) { + frame->setStartTime(floor<microseconds>(startTime).count()); + + nanoseconds duration{ GST_BUFFER_DURATION(buffer) }; + if (duration >= 0ns) + frame->setEndTime(floor<microseconds>(startTime + duration).count()); + } +} + +GList *qt_gst_video_sinks() +{ + return gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_SINK + | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, + GST_RANK_MARGINAL); +} + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/common/qgstutils_p.h b/src/plugins/multimedia/gstreamer/common/qgstutils_p.h new file mode 100644 index 000000000..c65fcf090 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstutils_p.h @@ -0,0 +1,41 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTUTILS_P_H +#define QGSTUTILS_P_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <gst/gstsample.h> +#include <gst/gstbuffer.h> + +#include <QtCore/qglobal.h> + +QT_BEGIN_NAMESPACE + +class QAudioFormat; +class QGstCaps; +class QVideoFrame; + +namespace QGstUtils { +QAudioFormat audioFormatForSample(GstSample *sample); +QAudioFormat audioFormatForCaps(const QGstCaps &caps); +QGstCaps capsForAudioFormat(const QAudioFormat &format); + +void setFrameTimeStampsFromBuffer(QVideoFrame *frame, GstBuffer *buffer); +} // namespace QGstUtils + +GList *qt_gst_video_sinks(); + +QT_END_NAMESPACE + +#endif diff --git a/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp new file mode 100644 index 000000000..df3fb3d69 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer.cpp @@ -0,0 +1,393 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include "qgstvideobuffer_p.h" +#include "qgstreamervideosink_p.h" +#include <private/qvideotexturehelper_p.h> +#include <qpa/qplatformnativeinterface.h> +#include <qguiapplication.h> + +#include <gst/video/video.h> +#include <gst/video/video-frame.h> +#include <gst/video/gstvideometa.h> +#include <gst/pbutils/gstpluginsbaseversion.h> + +#include <common/qgstutils_p.h> + +#if QT_CONFIG(gstreamer_gl) +# include <QtGui/rhi/qrhi.h> +# include <QtGui/qopenglcontext.h> +# include <QtGui/qopenglfunctions.h> +# include <QtGui/qopengl.h> + +# include <gst/gl/gstglconfig.h> +# include <gst/gl/gstglmemory.h> +# include <gst/gl/gstglsyncmeta.h> + +# include <EGL/egl.h> +# include <EGL/eglext.h> + +# if QT_CONFIG(linux_dmabuf) +# include <gst/allocators/gstdmabuf.h> +# endif +#endif + +QT_BEGIN_NAMESPACE + +// keep things building without drm_fourcc.h +#define fourcc_code(a, b, c, d) ((uint32_t)(a) | ((uint32_t)(b) << 8) | \ + ((uint32_t)(c) << 16) | ((uint32_t)(d) << 24)) + +#define DRM_FORMAT_RGBA8888 fourcc_code('R', 'A', '2', '4') /* [31:0] R:G:B:A 8:8:8:8 little endian */ +#define DRM_FORMAT_RGB888 fourcc_code('R', 'G', '2', '4') /* [23:0] R:G:B little endian */ +#define DRM_FORMAT_RG88 fourcc_code('R', 'G', '8', '8') /* [15:0] R:G 8:8 little endian */ +#define DRM_FORMAT_ABGR8888 fourcc_code('A', 'B', '2', '4') /* [31:0] A:B:G:R 8:8:8:8 little endian */ +#define DRM_FORMAT_BGR888 fourcc_code('B', 'G', '2', '4') /* [23:0] B:G:R little endian */ +#define DRM_FORMAT_GR88 fourcc_code('G', 'R', '8', '8') /* [15:0] G:R 8:8 little endian */ +#define DRM_FORMAT_R8 fourcc_code('R', '8', ' ', ' ') /* [7:0] R */ +#define DRM_FORMAT_R16 fourcc_code('R', '1', '6', ' ') /* [15:0] R little endian */ +#define DRM_FORMAT_RGB565 fourcc_code('R', 'G', '1', '6') /* [15:0] R:G:B 5:6:5 little endian */ +#define DRM_FORMAT_RG1616 fourcc_code('R', 'G', '3', '2') /* [31:0] R:G 16:16 little endian */ +#define DRM_FORMAT_GR1616 fourcc_code('G', 'R', '3', '2') /* [31:0] G:R 16:16 little endian */ +#define DRM_FORMAT_BGRA1010102 fourcc_code('B', 'A', '3', '0') /* [31:0] B:G:R:A 10:10:10:2 little endian */ + +QGstVideoBuffer::QGstVideoBuffer(QGstBufferHandle buffer, const GstVideoInfo &info, + QGstreamerVideoSink *sink, const QVideoFrameFormat &frameFormat, + QGstCaps::MemoryFormat format) + : QHwVideoBuffer((sink && sink->rhi() && format != QGstCaps::CpuMemory) + ? QVideoFrame::RhiTextureHandle + : QVideoFrame::NoHandle, + sink ? sink->rhi() : nullptr), + memoryFormat(format), + m_frameFormat(frameFormat), + m_rhi(sink ? sink->rhi() : nullptr), + m_videoInfo(info), + m_buffer(std::move(buffer)) +{ + if (sink) { + eglDisplay = sink->eglDisplay(); + eglImageTargetTexture2D = sink->eglImageTargetTexture2D(); + } + +#if !QT_CONFIG(gstreamer_gl) + Q_UNUSED(memoryFormat); +#endif +} + +QGstVideoBuffer::~QGstVideoBuffer() +{ + Q_ASSERT(m_mode == QtVideo::MapMode::NotMapped); +} + +QAbstractVideoBuffer::MapData QGstVideoBuffer::map(QtVideo::MapMode mode) +{ + const GstMapFlags flags = GstMapFlags( + ((mode & QtVideo::MapMode::ReadOnly ) == QtVideo::MapMode::NotMapped ? 0 : GST_MAP_READ) + | ((mode & QtVideo::MapMode::WriteOnly) == QtVideo::MapMode::NotMapped ? 0 : GST_MAP_WRITE)); + + MapData mapData; + if (mode == QtVideo::MapMode::NotMapped || m_mode != QtVideo::MapMode::NotMapped) + return mapData; + + if (m_videoInfo.finfo->n_planes == 0) { // Encoded + if (gst_buffer_map(m_buffer.get(), &m_frame.map[0], flags)) { + mapData.planeCount = 1; + mapData.bytesPerLine[0] = -1; + mapData.dataSize[0] = m_frame.map[0].size; + mapData.data[0] = static_cast<uchar *>(m_frame.map[0].data); + + m_mode = mode; + } + } else if (gst_video_frame_map(&m_frame, &m_videoInfo, m_buffer.get(), flags)) { + mapData.planeCount = GST_VIDEO_FRAME_N_PLANES(&m_frame); + + for (guint i = 0; i < GST_VIDEO_FRAME_N_PLANES(&m_frame); ++i) { + mapData.bytesPerLine[i] = GST_VIDEO_FRAME_PLANE_STRIDE(&m_frame, i); + mapData.data[i] = static_cast<uchar *>(GST_VIDEO_FRAME_PLANE_DATA(&m_frame, i)); + mapData.dataSize[i] = mapData.bytesPerLine[i]*GST_VIDEO_FRAME_COMP_HEIGHT(&m_frame, i); + } + + m_mode = mode; + } + return mapData; +} + +void QGstVideoBuffer::unmap() +{ + if (m_mode != QtVideo::MapMode::NotMapped) { + if (m_videoInfo.finfo->n_planes == 0) + gst_buffer_unmap(m_buffer.get(), &m_frame.map[0]); + else + gst_video_frame_unmap(&m_frame); + } + m_mode = QtVideo::MapMode::NotMapped; +} + +#if QT_CONFIG(gstreamer_gl) && QT_CONFIG(linux_dmabuf) +static int +fourccFromVideoInfo(const GstVideoInfo * info, int plane) +{ + GstVideoFormat format = GST_VIDEO_INFO_FORMAT (info); +#if G_BYTE_ORDER == G_LITTLE_ENDIAN + const gint rgba_fourcc = DRM_FORMAT_ABGR8888; + const gint rgb_fourcc = DRM_FORMAT_BGR888; + const gint rg_fourcc = DRM_FORMAT_GR88; +#else + const gint rgba_fourcc = DRM_FORMAT_RGBA8888; + const gint rgb_fourcc = DRM_FORMAT_RGB888; + const gint rg_fourcc = DRM_FORMAT_RG88; +#endif + + GST_DEBUG ("Getting DRM fourcc for %s plane %i", + gst_video_format_to_string (format), plane); + + switch (format) { + case GST_VIDEO_FORMAT_RGB16: + case GST_VIDEO_FORMAT_BGR16: + return DRM_FORMAT_RGB565; + + case GST_VIDEO_FORMAT_RGB: + case GST_VIDEO_FORMAT_BGR: + return rgb_fourcc; + + case GST_VIDEO_FORMAT_RGBA: + case GST_VIDEO_FORMAT_RGBx: + case GST_VIDEO_FORMAT_BGRA: + case GST_VIDEO_FORMAT_BGRx: + case GST_VIDEO_FORMAT_ARGB: + case GST_VIDEO_FORMAT_xRGB: + case GST_VIDEO_FORMAT_ABGR: + case GST_VIDEO_FORMAT_xBGR: + case GST_VIDEO_FORMAT_AYUV: +#if GST_CHECK_PLUGINS_BASE_VERSION(1,16,0) + case GST_VIDEO_FORMAT_VUYA: +#endif + return rgba_fourcc; + + case GST_VIDEO_FORMAT_GRAY8: + return DRM_FORMAT_R8; + + case GST_VIDEO_FORMAT_YUY2: + case GST_VIDEO_FORMAT_UYVY: + case GST_VIDEO_FORMAT_GRAY16_LE: + case GST_VIDEO_FORMAT_GRAY16_BE: + return rg_fourcc; + + case GST_VIDEO_FORMAT_NV12: + case GST_VIDEO_FORMAT_NV21: + return plane == 0 ? DRM_FORMAT_R8 : rg_fourcc; + + case GST_VIDEO_FORMAT_I420: + case GST_VIDEO_FORMAT_YV12: + case GST_VIDEO_FORMAT_Y41B: + case GST_VIDEO_FORMAT_Y42B: + case GST_VIDEO_FORMAT_Y444: + return DRM_FORMAT_R8; + +#if GST_CHECK_PLUGINS_BASE_VERSION(1,16,0) + case GST_VIDEO_FORMAT_BGR10A2_LE: + return DRM_FORMAT_BGRA1010102; +#endif + +// case GST_VIDEO_FORMAT_RGB10A2_LE: +// return DRM_FORMAT_RGBA1010102; + + case GST_VIDEO_FORMAT_P010_10LE: +// case GST_VIDEO_FORMAT_P012_LE: +// case GST_VIDEO_FORMAT_P016_LE: + return plane == 0 ? DRM_FORMAT_R16 : DRM_FORMAT_GR1616; + + case GST_VIDEO_FORMAT_P010_10BE: +// case GST_VIDEO_FORMAT_P012_BE: +// case GST_VIDEO_FORMAT_P016_BE: + return plane == 0 ? DRM_FORMAT_R16 : DRM_FORMAT_RG1616; + + default: + GST_ERROR ("Unsupported format for DMABuf."); + return -1; + } +} +#endif + +#if QT_CONFIG(gstreamer_gl) +struct GlTextures +{ + uint count = 0; + bool owned = false; + std::array<guint32, QVideoTextureHelper::TextureDescription::maxPlanes> names{}; +}; + +class QGstQVideoFrameTextures : public QVideoFrameTextures +{ +public: + QGstQVideoFrameTextures(QRhi *rhi, QSize size, QVideoFrameFormat::PixelFormat format, GlTextures &textures) + : m_rhi(rhi) + , m_glTextures(textures) + { + auto desc = QVideoTextureHelper::textureDescription(format); + for (uint i = 0; i < textures.count; ++i) { + QSize planeSize(desc->widthForPlane(size.width(), int(i)), + desc->heightForPlane(size.height(), int(i))); + m_textures[i].reset(rhi->newTexture(desc->textureFormat[i], planeSize, 1, {})); + m_textures[i]->createFrom({textures.names[i], 0}); + } + } + + ~QGstQVideoFrameTextures() + { + m_rhi->makeThreadLocalNativeContextCurrent(); + auto ctx = QOpenGLContext::currentContext(); + if (m_glTextures.owned && ctx) + ctx->functions()->glDeleteTextures(int(m_glTextures.count), m_glTextures.names.data()); + } + + QRhiTexture *texture(uint plane) const override + { + return plane < m_glTextures.count ? m_textures[plane].get() : nullptr; + } + +private: + QRhi *m_rhi = nullptr; + GlTextures m_glTextures; + std::unique_ptr<QRhiTexture> m_textures[QVideoTextureHelper::TextureDescription::maxPlanes]; +}; + +static GlTextures mapFromGlTexture(const QGstBufferHandle &bufferHandle, GstVideoFrame &frame, + GstVideoInfo &videoInfo) +{ + GstBuffer *buffer = bufferHandle.get(); + auto *mem = GST_GL_BASE_MEMORY_CAST(gst_buffer_peek_memory(buffer, 0)); + if (!mem) + return {}; + + if (!gst_video_frame_map(&frame, &videoInfo, buffer, GstMapFlags(GST_MAP_READ|GST_MAP_GL))) { + qWarning() << "Could not map GL textures"; + return {}; + } + + auto *sync_meta = gst_buffer_get_gl_sync_meta(buffer); + GstBuffer *sync_buffer = nullptr; + if (!sync_meta) { + sync_buffer = gst_buffer_new(); + sync_meta = gst_buffer_add_gl_sync_meta(mem->context, sync_buffer); + } + gst_gl_sync_meta_set_sync_point (sync_meta, mem->context); + gst_gl_sync_meta_wait (sync_meta, mem->context); + if (sync_buffer) + gst_buffer_unref(sync_buffer); + + GlTextures textures; + textures.count = frame.info.finfo->n_planes; + + for (uint i = 0; i < textures.count; ++i) + textures.names[i] = *(guint32 *)frame.data[i]; + + gst_video_frame_unmap(&frame); + + return textures; +} + +#if GST_GL_HAVE_PLATFORM_EGL && QT_CONFIG(linux_dmabuf) +static GlTextures mapFromDmaBuffer(QRhi *rhi, const QGstBufferHandle &bufferHandle, + GstVideoFrame &frame, GstVideoInfo &videoInfo, + Qt::HANDLE eglDisplay, QFunctionPointer eglImageTargetTexture2D) +{ + GstBuffer *buffer = bufferHandle.get(); + + Q_ASSERT(gst_is_dmabuf_memory(gst_buffer_peek_memory(buffer, 0))); + Q_ASSERT(eglDisplay); + Q_ASSERT(eglImageTargetTexture2D); + + auto *nativeHandles = static_cast<const QRhiGles2NativeHandles *>(rhi->nativeHandles()); + auto glContext = nativeHandles->context; + if (!glContext) { + qWarning() << "no GL context"; + return {}; + } + + if (!gst_video_frame_map(&frame, &videoInfo, buffer, GstMapFlags(GST_MAP_READ))) { + qDebug() << "Couldn't map DMA video frame"; + return {}; + } + + GlTextures textures = {}; + textures.owned = true; + textures.count = GST_VIDEO_FRAME_N_PLANES(&frame); + // int width = GST_VIDEO_FRAME_WIDTH(&frame); + // int height = GST_VIDEO_FRAME_HEIGHT(&frame); + Q_ASSERT(GST_VIDEO_FRAME_N_PLANES(&frame) == gst_buffer_n_memory(buffer)); + + QOpenGLFunctions functions(glContext); + functions.glGenTextures(int(textures.count), textures.names.data()); + + // qDebug() << Qt::hex << "glGenTextures: glerror" << glGetError() << "egl error" << eglGetError(); + // qDebug() << "converting DMA buffer nPlanes=" << nPlanes << m_textures[0] << m_textures[1] << m_textures[2]; + + for (int i = 0; i < int(textures.count); ++i) { + auto offset = GST_VIDEO_FRAME_PLANE_OFFSET(&frame, i); + auto stride = GST_VIDEO_FRAME_PLANE_STRIDE(&frame, i); + int planeWidth = GST_VIDEO_FRAME_COMP_WIDTH(&frame, i); + int planeHeight = GST_VIDEO_FRAME_COMP_HEIGHT(&frame, i); + auto mem = gst_buffer_peek_memory(buffer, i); + int fd = gst_dmabuf_memory_get_fd(mem); + + // qDebug() << " plane" << i << "size" << width << height << "stride" << stride << "offset" << offset << "fd=" << fd; + // ### do we need to open/close the fd? + // ### can we convert several planes at once? + // Get the correct DRM_FORMATs from the texture format in the description + EGLAttrib const attribute_list[] = { + EGL_WIDTH, planeWidth, + EGL_HEIGHT, planeHeight, + EGL_LINUX_DRM_FOURCC_EXT, fourccFromVideoInfo(&videoInfo, i), + EGL_DMA_BUF_PLANE0_FD_EXT, fd, + EGL_DMA_BUF_PLANE0_OFFSET_EXT, (EGLAttrib)offset, + EGL_DMA_BUF_PLANE0_PITCH_EXT, stride, + EGL_NONE + }; + EGLImage image = eglCreateImage(eglDisplay, + EGL_NO_CONTEXT, + EGL_LINUX_DMA_BUF_EXT, + nullptr, + attribute_list); + if (image == EGL_NO_IMAGE_KHR) { + qWarning() << "could not create EGL image for plane" << i << Qt::hex << eglGetError(); + } + // qDebug() << Qt::hex << "eglCreateImage: glerror" << glGetError() << "egl error" << eglGetError(); + functions.glBindTexture(GL_TEXTURE_2D, textures.names[i]); + // qDebug() << Qt::hex << "bind texture: glerror" << glGetError() << "egl error" << eglGetError(); + auto EGLImageTargetTexture2D = (PFNGLEGLIMAGETARGETTEXTURE2DOESPROC)eglImageTargetTexture2D; + EGLImageTargetTexture2D(GL_TEXTURE_2D, image); + // qDebug() << Qt::hex << "glerror" << glGetError() << "egl error" << eglGetError(); + eglDestroyImage(eglDisplay, image); + } + gst_video_frame_unmap(&frame); + + return textures; +} +#endif +#endif + +std::unique_ptr<QVideoFrameTextures> QGstVideoBuffer::mapTextures(QRhi *rhi) +{ + if (!rhi) + return {}; + +#if QT_CONFIG(gstreamer_gl) + GlTextures textures = {}; + if (memoryFormat == QGstCaps::GLTexture) + textures = mapFromGlTexture(m_buffer, m_frame, m_videoInfo); + +# if GST_GL_HAVE_PLATFORM_EGL && QT_CONFIG(linux_dmabuf) + else if (memoryFormat == QGstCaps::DMABuf) + textures = mapFromDmaBuffer(m_rhi, m_buffer, m_frame, m_videoInfo, eglDisplay, + eglImageTargetTexture2D); + +# endif + if (textures.count > 0) + return std::make_unique<QGstQVideoFrameTextures>(rhi, QSize{m_videoInfo.width, m_videoInfo.height}, + m_frameFormat.pixelFormat(), textures); +#endif + return {}; +} + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/common/qgstvideobuffer_p.h b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer_p.h new file mode 100644 index 000000000..573a4662c --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstvideobuffer_p.h @@ -0,0 +1,55 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTVIDEOBUFFER_P_H +#define QGSTVIDEOBUFFER_P_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <private/qhwvideobuffer_p.h> +#include <QtCore/qvariant.h> + +#include <common/qgst_p.h> +#include <gst/video/video.h> + +QT_BEGIN_NAMESPACE +class QVideoFrameFormat; +class QGstreamerVideoSink; +class QOpenGLContext; + +class QGstVideoBuffer final : public QHwVideoBuffer +{ +public: + QGstVideoBuffer(QGstBufferHandle buffer, const GstVideoInfo &info, QGstreamerVideoSink *sink, + const QVideoFrameFormat &frameFormat, QGstCaps::MemoryFormat format); + ~QGstVideoBuffer(); + + MapData map(QtVideo::MapMode mode) override; + void unmap() override; + + std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *) override; + +private: + const QGstCaps::MemoryFormat memoryFormat = QGstCaps::CpuMemory; + const QVideoFrameFormat m_frameFormat; + QRhi *m_rhi = nullptr; + mutable GstVideoInfo m_videoInfo; + mutable GstVideoFrame m_frame{}; + const QGstBufferHandle m_buffer; + QtVideo::MapMode m_mode = QtVideo::MapMode::NotMapped; + Qt::HANDLE eglDisplay = nullptr; + QFunctionPointer eglImageTargetTexture2D = nullptr; +}; + +QT_END_NAMESPACE + +#endif diff --git a/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp b/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp new file mode 100644 index 000000000..f9c936ea6 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp @@ -0,0 +1,499 @@ +// Copyright (C) 2016 Jolla Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include "qgstvideorenderersink_p.h" + +#include <QtMultimedia/qvideoframe.h> +#include <QtMultimedia/qvideosink.h> +#include <QtCore/private/qfactoryloader_p.h> +#include <QtCore/private/quniquehandle_p.h> +#include <QtCore/qcoreapplication.h> +#include <QtCore/qdebug.h> +#include <QtCore/qdebug.h> +#include <QtCore/qloggingcategory.h> +#include <QtCore/qmap.h> +#include <QtCore/qthread.h> +#include <QtGui/qevent.h> + +#include <common/qgstvideobuffer_p.h> +#include <common/qgstreamervideosink_p.h> +#include <common/qgst_debug_p.h> +#include <common/qgstutils_p.h> + +#include <private/qvideoframe_p.h> + +#include <gst/video/video.h> +#include <gst/video/gstvideometa.h> + + +#include <rhi/qrhi.h> +#if QT_CONFIG(gstreamer_gl) +#include <gst/gl/gl.h> +#endif // #if QT_CONFIG(gstreamer_gl) + +// DMA support +#if QT_CONFIG(linux_dmabuf) +#include <gst/allocators/gstdmabuf.h> +#endif + +static Q_LOGGING_CATEGORY(qLcGstVideoRenderer, "qt.multimedia.gstvideorenderer") + +QT_BEGIN_NAMESPACE + +QGstVideoRenderer::QGstVideoRenderer(QGstreamerVideoSink *sink) + : m_sink(sink), m_surfaceCaps(createSurfaceCaps(sink)) +{ + QObject::connect( + sink, &QGstreamerVideoSink::aboutToBeDestroyed, this, + [this] { + QMutexLocker locker(&m_sinkMutex); + m_sink = nullptr; + }, + Qt::DirectConnection); +} + +QGstVideoRenderer::~QGstVideoRenderer() = default; + +QGstCaps QGstVideoRenderer::createSurfaceCaps([[maybe_unused]] QGstreamerVideoSink *sink) +{ + QGstCaps caps = QGstCaps::create(); + + // All the formats that both we and gstreamer support + auto formats = QList<QVideoFrameFormat::PixelFormat>() + << QVideoFrameFormat::Format_YUV420P + << QVideoFrameFormat::Format_YUV422P + << QVideoFrameFormat::Format_YV12 + << QVideoFrameFormat::Format_UYVY + << QVideoFrameFormat::Format_YUYV + << QVideoFrameFormat::Format_NV12 + << QVideoFrameFormat::Format_NV21 + << QVideoFrameFormat::Format_AYUV + << QVideoFrameFormat::Format_P010 + << QVideoFrameFormat::Format_XRGB8888 + << QVideoFrameFormat::Format_XBGR8888 + << QVideoFrameFormat::Format_RGBX8888 + << QVideoFrameFormat::Format_BGRX8888 + << QVideoFrameFormat::Format_ARGB8888 + << QVideoFrameFormat::Format_ABGR8888 + << QVideoFrameFormat::Format_RGBA8888 + << QVideoFrameFormat::Format_BGRA8888 + << QVideoFrameFormat::Format_Y8 + << QVideoFrameFormat::Format_Y16 + ; +#if QT_CONFIG(gstreamer_gl) + QRhi *rhi = sink->rhi(); + if (rhi && rhi->backend() == QRhi::OpenGLES2) { + caps.addPixelFormats(formats, GST_CAPS_FEATURE_MEMORY_GL_MEMORY); +#if QT_CONFIG(linux_dmabuf) + if (sink->eglDisplay() && sink->eglImageTargetTexture2D()) { + // We currently do not handle planar DMA buffers, as it's somewhat unclear how to + // convert the planar EGLImage into something we can use from OpenGL + auto singlePlaneFormats = QList<QVideoFrameFormat::PixelFormat>() + << QVideoFrameFormat::Format_UYVY + << QVideoFrameFormat::Format_YUYV + << QVideoFrameFormat::Format_AYUV + << QVideoFrameFormat::Format_XRGB8888 + << QVideoFrameFormat::Format_XBGR8888 + << QVideoFrameFormat::Format_RGBX8888 + << QVideoFrameFormat::Format_BGRX8888 + << QVideoFrameFormat::Format_ARGB8888 + << QVideoFrameFormat::Format_ABGR8888 + << QVideoFrameFormat::Format_RGBA8888 + << QVideoFrameFormat::Format_BGRA8888 + << QVideoFrameFormat::Format_Y8 + << QVideoFrameFormat::Format_Y16 + ; + caps.addPixelFormats(singlePlaneFormats, GST_CAPS_FEATURE_MEMORY_DMABUF); + } +#endif + } +#endif + caps.addPixelFormats(formats); + return caps; +} + +const QGstCaps &QGstVideoRenderer::caps() +{ + return m_surfaceCaps; +} + +bool QGstVideoRenderer::start(const QGstCaps& caps) +{ + qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::start" << caps; + + { + m_frameRotationAngle = QtVideo::Rotation::None; + auto optionalFormatAndVideoInfo = caps.formatAndVideoInfo(); + if (optionalFormatAndVideoInfo) { + std::tie(m_format, m_videoInfo) = std::move(*optionalFormatAndVideoInfo); + } else { + m_format = {}; + m_videoInfo = {}; + } + m_memoryFormat = caps.memoryFormat(); + } + + return true; +} + +void QGstVideoRenderer::stop() +{ + qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::stop"; + + QMetaObject::invokeMethod(this, [this] { + m_currentState.buffer = {}; + m_sink->setVideoFrame(QVideoFrame{}); + return; + }); +} + +void QGstVideoRenderer::unlock() +{ + qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::unlock"; +} + +bool QGstVideoRenderer::proposeAllocation(GstQuery *) +{ + qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::proposeAllocation"; + return true; +} + +GstFlowReturn QGstVideoRenderer::render(GstBuffer *buffer) +{ + qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::render"; + + GstVideoCropMeta *meta = gst_buffer_get_video_crop_meta(buffer); + if (meta) { + QRect vp(meta->x, meta->y, meta->width, meta->height); + if (m_format.viewport() != vp) { + qCDebug(qLcGstVideoRenderer) + << Q_FUNC_INFO << " Update viewport on Metadata: [" << meta->height << "x" + << meta->width << " | " << meta->x << "x" << meta->y << "]"; + // Update viewport if data is not the same + m_format.setViewport(vp); + } + } + + RenderBufferState state{ + .buffer = QGstBufferHandle{ buffer, QGstBufferHandle::NeedsRef }, + .format = m_format, + .memoryFormat = m_memoryFormat, + .mirrored = m_frameMirrored, + .rotationAngle = m_frameRotationAngle, + }; + + qCDebug(qLcGstVideoRenderer) << " sending video frame"; + + QMetaObject::invokeMethod(this, [this, state = std::move(state)]() mutable { + if (state == m_currentState) { + // same buffer received twice + if (!m_sink || !m_sink->inStoppedState()) + return; + + qCDebug(qLcGstVideoRenderer) << " showing empty video frame"; + m_currentVideoFrame = {}; + m_sink->setVideoFrame(m_currentVideoFrame); + m_currentState = {}; + return; + } + + auto videoBuffer = std::make_unique<QGstVideoBuffer>(state.buffer, m_videoInfo, m_sink, + state.format, state.memoryFormat); + QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(videoBuffer), state.format); + QGstUtils::setFrameTimeStampsFromBuffer(&frame, state.buffer.get()); + frame.setMirrored(state.mirrored); + frame.setRotation(state.rotationAngle); + m_currentVideoFrame = std::move(frame); + m_currentState = std::move(state); + + if (!m_sink) + return; + + if (m_sink->inStoppedState()) { + qCDebug(qLcGstVideoRenderer) << " showing empty video frame"; + m_currentVideoFrame = {}; + } + + m_sink->setVideoFrame(m_currentVideoFrame); + }); + + return GST_FLOW_OK; +} + +bool QGstVideoRenderer::query(GstQuery *query) +{ +#if QT_CONFIG(gstreamer_gl) + if (GST_QUERY_TYPE(query) == GST_QUERY_CONTEXT) { + const gchar *type; + gst_query_parse_context_type(query, &type); + + if (strcmp(type, "gst.gl.local_context") != 0) + return false; + + QMutexLocker locker(&m_sinkMutex); + if (!m_sink) + return false; + + auto *gstGlContext = m_sink->gstGlLocalContext(); + if (!gstGlContext) + return false; + + gst_query_set_context(query, gstGlContext); + + return true; + } +#else + Q_UNUSED(query); +#endif + return false; +} + +void QGstVideoRenderer::gstEvent(GstEvent *event) +{ + switch (GST_EVENT_TYPE(event)) { + case GST_EVENT_TAG: + qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::gstEvent: Tag"; + return gstEventHandleTag(event); + case GST_EVENT_EOS: + qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::gstEvent: EOS"; + return gstEventHandleEOS(event); + + default: + qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::gstEvent: unhandled event - " << event; + return; + } +} + +void QGstVideoRenderer::gstEventHandleTag(GstEvent *event) +{ + GstTagList *taglist = nullptr; + gst_event_parse_tag(event, &taglist); + if (!taglist) + return; + + QGString value; + if (!gst_tag_list_get_string(taglist, GST_TAG_IMAGE_ORIENTATION, &value)) + return; + + constexpr const char rotate[] = "rotate-"; + constexpr const char flipRotate[] = "flip-rotate-"; + constexpr size_t rotateLen = sizeof(rotate) - 1; + constexpr size_t flipRotateLen = sizeof(flipRotate) - 1; + + bool mirrored = false; + int rotationAngle = 0; + + if (!strncmp(rotate, value.get(), rotateLen)) { + rotationAngle = atoi(value.get() + rotateLen); + } else if (!strncmp(flipRotate, value.get(), flipRotateLen)) { + // To flip by horizontal axis is the same as to mirror by vertical axis + // and rotate by 180 degrees. + mirrored = true; + rotationAngle = (180 + atoi(value.get() + flipRotateLen)) % 360; + } + + m_frameMirrored = mirrored; + switch (rotationAngle) { + case 0: + m_frameRotationAngle = QtVideo::Rotation::None; + break; + case 90: + m_frameRotationAngle = QtVideo::Rotation::Clockwise90; + break; + case 180: + m_frameRotationAngle = QtVideo::Rotation::Clockwise180; + break; + case 270: + m_frameRotationAngle = QtVideo::Rotation::Clockwise270; + break; + default: + m_frameRotationAngle = QtVideo::Rotation::None; + } +} + +void QGstVideoRenderer::gstEventHandleEOS(GstEvent *) +{ + stop(); +} + +static GstVideoSinkClass *gvrs_sink_parent_class; +static thread_local QGstreamerVideoSink *gvrs_current_sink; + +#define VO_SINK(s) QGstVideoRendererSink *sink(reinterpret_cast<QGstVideoRendererSink *>(s)) + +QGstVideoRendererSink *QGstVideoRendererSink::createSink(QGstreamerVideoSink *sink) +{ + setSink(sink); + QGstVideoRendererSink *gstSink = reinterpret_cast<QGstVideoRendererSink *>( + g_object_new(QGstVideoRendererSink::get_type(), nullptr)); + + return gstSink; +} + +void QGstVideoRendererSink::setSink(QGstreamerVideoSink *sink) +{ + gvrs_current_sink = sink; +} + +GType QGstVideoRendererSink::get_type() +{ + static const GTypeInfo info = + { + sizeof(QGstVideoRendererSinkClass), // class_size + base_init, // base_init + nullptr, // base_finalize + class_init, // class_init + nullptr, // class_finalize + nullptr, // class_data + sizeof(QGstVideoRendererSink), // instance_size + 0, // n_preallocs + instance_init, // instance_init + nullptr // value_table + }; + + static const GType type = g_type_register_static(GST_TYPE_VIDEO_SINK, "QGstVideoRendererSink", + &info, GTypeFlags(0)); + + return type; +} + +void QGstVideoRendererSink::class_init(gpointer g_class, gpointer class_data) +{ + Q_UNUSED(class_data); + + gvrs_sink_parent_class = reinterpret_cast<GstVideoSinkClass *>(g_type_class_peek_parent(g_class)); + + GstVideoSinkClass *video_sink_class = reinterpret_cast<GstVideoSinkClass *>(g_class); + video_sink_class->show_frame = QGstVideoRendererSink::show_frame; + + GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class); + base_sink_class->get_caps = QGstVideoRendererSink::get_caps; + base_sink_class->set_caps = QGstVideoRendererSink::set_caps; + base_sink_class->propose_allocation = QGstVideoRendererSink::propose_allocation; + base_sink_class->stop = QGstVideoRendererSink::stop; + base_sink_class->unlock = QGstVideoRendererSink::unlock; + base_sink_class->query = QGstVideoRendererSink::query; + base_sink_class->event = QGstVideoRendererSink::event; + + GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class); + element_class->change_state = QGstVideoRendererSink::change_state; + gst_element_class_set_metadata(element_class, + "Qt built-in video renderer sink", + "Sink/Video", + "Qt default built-in video renderer sink", + "The Qt Company"); + + GObjectClass *object_class = reinterpret_cast<GObjectClass *>(g_class); + object_class->finalize = QGstVideoRendererSink::finalize; +} + +void QGstVideoRendererSink::base_init(gpointer g_class) +{ + static GstStaticPadTemplate sink_pad_template = GST_STATIC_PAD_TEMPLATE( + "sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS( + "video/x-raw, " + "framerate = (fraction) [ 0, MAX ], " + "width = (int) [ 1, MAX ], " + "height = (int) [ 1, MAX ]")); + + gst_element_class_add_pad_template( + GST_ELEMENT_CLASS(g_class), gst_static_pad_template_get(&sink_pad_template)); +} + +void QGstVideoRendererSink::instance_init(GTypeInstance *instance, gpointer g_class) +{ + Q_UNUSED(g_class); + VO_SINK(instance); + + Q_ASSERT(gvrs_current_sink); + + sink->renderer = new QGstVideoRenderer(gvrs_current_sink); + sink->renderer->moveToThread(gvrs_current_sink->thread()); + gvrs_current_sink = nullptr; +} + +void QGstVideoRendererSink::finalize(GObject *object) +{ + VO_SINK(object); + + delete sink->renderer; + + // Chain up + G_OBJECT_CLASS(gvrs_sink_parent_class)->finalize(object); +} + +GstStateChangeReturn QGstVideoRendererSink::change_state( + GstElement *element, GstStateChange transition) +{ + return GST_ELEMENT_CLASS(gvrs_sink_parent_class)->change_state(element, transition); +} + +GstCaps *QGstVideoRendererSink::get_caps(GstBaseSink *base, GstCaps *filter) +{ + VO_SINK(base); + + QGstCaps caps = sink->renderer->caps(); + if (filter) + caps = QGstCaps(gst_caps_intersect(caps.caps(), filter), QGstCaps::HasRef); + + return caps.release(); +} + +gboolean QGstVideoRendererSink::set_caps(GstBaseSink *base, GstCaps *gcaps) +{ + VO_SINK(base); + auto caps = QGstCaps(gcaps, QGstCaps::NeedsRef); + + qCDebug(qLcGstVideoRenderer) << "set_caps:" << caps; + + if (caps.isNull()) { + sink->renderer->stop(); + return TRUE; + } + + return sink->renderer->start(caps); +} + +gboolean QGstVideoRendererSink::propose_allocation(GstBaseSink *base, GstQuery *query) +{ + VO_SINK(base); + return sink->renderer->proposeAllocation(query); +} + +gboolean QGstVideoRendererSink::stop(GstBaseSink *base) +{ + VO_SINK(base); + sink->renderer->stop(); + return TRUE; +} + +gboolean QGstVideoRendererSink::unlock(GstBaseSink *base) +{ + VO_SINK(base); + sink->renderer->unlock(); + return TRUE; +} + +GstFlowReturn QGstVideoRendererSink::show_frame(GstVideoSink *base, GstBuffer *buffer) +{ + VO_SINK(base); + return sink->renderer->render(buffer); +} + +gboolean QGstVideoRendererSink::query(GstBaseSink *base, GstQuery *query) +{ + VO_SINK(base); + if (sink->renderer->query(query)) + return TRUE; + + return GST_BASE_SINK_CLASS(gvrs_sink_parent_class)->query(base, query); +} + +gboolean QGstVideoRendererSink::event(GstBaseSink *base, GstEvent * event) +{ + VO_SINK(base); + sink->renderer->gstEvent(event); + return GST_BASE_SINK_CLASS(gvrs_sink_parent_class)->event(base, event); +} + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink_p.h b/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink_p.h new file mode 100644 index 000000000..d9e3db462 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink_p.h @@ -0,0 +1,138 @@ +// Copyright (C) 2016 Jolla Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTVIDEORENDERERSINK_P_H +#define QGSTVIDEORENDERERSINK_P_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <QtMultimedia/qvideoframeformat.h> +#include <QtMultimedia/qvideoframe.h> +#include <QtMultimedia/private/qtmultimediaglobal_p.h> +#include <QtCore/qmutex.h> + +#include <gst/video/gstvideosink.h> +#include <gst/video/video.h> + +#include <QtCore/qlist.h> +#include <QtCore/qmutex.h> +#include <QtCore/qqueue.h> +#include <QtCore/qpointer.h> +#include <QtCore/qwaitcondition.h> +#include <qvideoframeformat.h> +#include <qvideoframe.h> +#include <common/qgstvideobuffer_p.h> +#include <common/qgst_p.h> + +QT_BEGIN_NAMESPACE + +class QGstVideoRenderer : public QObject +{ +public: + explicit QGstVideoRenderer(QGstreamerVideoSink *); + ~QGstVideoRenderer(); + + const QGstCaps &caps(); + + bool start(const QGstCaps &); + void stop(); + void unlock(); + bool proposeAllocation(GstQuery *); + GstFlowReturn render(GstBuffer *); + bool query(GstQuery *); + void gstEvent(GstEvent *); + +private: + void notify(); + static QGstCaps createSurfaceCaps(QGstreamerVideoSink *); + + void gstEventHandleTag(GstEvent *); + void gstEventHandleEOS(GstEvent *); + + QMutex m_sinkMutex; + QGstreamerVideoSink *m_sink = nullptr; // written only from qt thread. so only readers on + // worker threads need to acquire the lock + + // --- only accessed from gstreamer thread + const QGstCaps m_surfaceCaps; + QVideoFrameFormat m_format; + GstVideoInfo m_videoInfo{}; + QGstCaps::MemoryFormat m_memoryFormat = QGstCaps::CpuMemory; + bool m_frameMirrored = false; + QtVideo::Rotation m_frameRotationAngle = QtVideo::Rotation::None; + + // --- only accessed from qt thread + QVideoFrame m_currentVideoFrame; + + struct RenderBufferState + { + QGstBufferHandle buffer; + QVideoFrameFormat format; + QGstCaps::MemoryFormat memoryFormat; + bool mirrored; + QtVideo::Rotation rotationAngle; + + bool operator==(const RenderBufferState &rhs) const + { + return std::tie(buffer, format, memoryFormat, mirrored, rotationAngle) + == std::tie(rhs.buffer, rhs.format, rhs.memoryFormat, rhs.mirrored, + rhs.rotationAngle); + } + }; + RenderBufferState m_currentState; +}; + +class QGstVideoRendererSink +{ +public: + GstVideoSink parent{}; + + static QGstVideoRendererSink *createSink(QGstreamerVideoSink *surface); + static void setSink(QGstreamerVideoSink *surface); + +private: + static GType get_type(); + static void class_init(gpointer g_class, gpointer class_data); + static void base_init(gpointer g_class); + static void instance_init(GTypeInstance *instance, gpointer g_class); + + static void finalize(GObject *object); + + static GstStateChangeReturn change_state(GstElement *element, GstStateChange transition); + + static GstCaps *get_caps(GstBaseSink *sink, GstCaps *filter); + static gboolean set_caps(GstBaseSink *sink, GstCaps *caps); + + static gboolean propose_allocation(GstBaseSink *sink, GstQuery *query); + + static gboolean stop(GstBaseSink *sink); + + static gboolean unlock(GstBaseSink *sink); + + static GstFlowReturn show_frame(GstVideoSink *sink, GstBuffer *buffer); + static gboolean query(GstBaseSink *element, GstQuery *query); + static gboolean event(GstBaseSink *element, GstEvent * event); + +private: + QGstVideoRenderer *renderer = nullptr; +}; + + +class QGstVideoRendererSinkClass +{ +public: + GstVideoSinkClass parent_class; +}; + +QT_END_NAMESPACE + +#endif diff --git a/src/plugins/multimedia/gstreamer/gstreamer.json b/src/plugins/multimedia/gstreamer/gstreamer.json new file mode 100644 index 000000000..6a709d9f4 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/gstreamer.json @@ -0,0 +1,3 @@ +{ + "Keys": [ "gstreamer" ] +} diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera.cpp b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera.cpp new file mode 100644 index 000000000..c54e8b74b --- /dev/null +++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera.cpp @@ -0,0 +1,771 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include <mediacapture/qgstreamercamera_p.h> + +#include <QtMultimedia/qcameradevice.h> +#include <QtMultimedia/qmediacapturesession.h> +#include <QtMultimedia/private/qcameradevice_p.h> +#include <QtCore/qdebug.h> + +#include <common/qgst_debug_p.h> +#include <qgstreamervideodevices_p.h> +#include <qgstreamerintegration_p.h> + +#if QT_CONFIG(linux_v4l) +#include <linux/videodev2.h> +#include <private/qcore_unix_p.h> +#endif + + +QT_BEGIN_NAMESPACE + +QMaybe<QPlatformCamera *> QGstreamerCamera::create(QCamera *camera) +{ + static const auto error = qGstErrorMessageIfElementsNotAvailable( + "videotestsrc", "capsfilter", "videoconvert", "videoscale", "identity"); + if (error) + return *error; + + return new QGstreamerCamera(camera); +} + +QGstreamerCamera::QGstreamerCamera(QCamera *camera) + : QGstreamerCameraBase(camera), + gstCameraBin{ + QGstBin::create("camerabin"), + }, + gstCamera{ + QGstElement::createFromFactory("videotestsrc"), + }, + gstCapsFilter{ + QGstElement::createFromFactory("capsfilter", "videoCapsFilter"), + }, + gstDecode{ + QGstElement::createFromFactory("identity"), + }, + gstVideoConvert{ + QGstElement::createFromFactory("videoconvert", "videoConvert"), + }, + gstVideoScale{ + QGstElement::createFromFactory("videoscale", "videoScale"), + } +{ + gstCameraBin.add(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert, gstVideoScale); + qLinkGstElements(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert, gstVideoScale); + gstCameraBin.addGhostPad(gstVideoScale, "src"); +} + +QGstreamerCamera::~QGstreamerCamera() +{ + gstCameraBin.setStateSync(GST_STATE_NULL); +} + +bool QGstreamerCamera::isActive() const +{ + return m_active; +} + +void QGstreamerCamera::setActive(bool active) +{ + if (m_active == active) + return; + if (m_cameraDevice.isNull() && active) + return; + + m_active = active; + + emit activeChanged(active); +} + +void QGstreamerCamera::setCamera(const QCameraDevice &camera) +{ + using namespace Qt::Literals; + + if (m_cameraDevice == camera) + return; + + m_cameraDevice = camera; + + QGstElement gstNewCamera; + if (camera.isNull()) { + gstNewCamera = QGstElement::createFromFactory("videotestsrc"); + } else { + auto *integration = static_cast<QGstreamerIntegration *>(QGstreamerIntegration::instance()); + GstDevice *device = integration->videoDevice(camera.id()); + + if (!device) { + updateError(QCamera::Error::CameraError, + u"Failed to create GstDevice for camera: "_s + + QString::fromUtf8(camera.id())); + return; + } + + gstNewCamera = QGstElement::createFromDevice(device, "camerasrc"); + QUniqueGstStructureHandle properties{ + gst_device_get_properties(device), + }; + + if (properties) { + QGstStructureView propertiesView{ properties }; + if (propertiesView.name() == "v4l2deviceprovider") + m_v4l2DevicePath = QString::fromUtf8(propertiesView["device.path"].toString()); + } + } + + QCameraFormat f = findBestCameraFormat(camera); + auto caps = QGstCaps::fromCameraFormat(f); + auto gstNewDecode = QGstElement::createFromFactory( + f.pixelFormat() == QVideoFrameFormat::Format_Jpeg ? "jpegdec" : "identity"); + + QGstPipeline::modifyPipelineWhileNotRunning(gstCamera.getPipeline(), [&] { + gstCamera.setStateSync(GST_STATE_READY); // stop camera, as it may have active tasks + + qUnlinkGstElements(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert); + gstCameraBin.stopAndRemoveElements(gstCamera, gstDecode); + + gstCapsFilter.set("caps", caps); + + gstCamera = std::move(gstNewCamera); + gstDecode = std::move(gstNewDecode); + + gstCameraBin.add(gstCamera, gstDecode); + qLinkGstElements(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert); + + gstCameraBin.syncChildrenState(); + }); + + updateCameraProperties(); +} + +bool QGstreamerCamera::setCameraFormat(const QCameraFormat &format) +{ + if (!format.isNull() && !m_cameraDevice.videoFormats().contains(format)) + return false; + + QCameraFormat f = format; + if (f.isNull()) + f = findBestCameraFormat(m_cameraDevice); + + auto caps = QGstCaps::fromCameraFormat(f); + + auto newGstDecode = QGstElement::createFromFactory( + f.pixelFormat() == QVideoFrameFormat::Format_Jpeg ? "jpegdec" : "identity"); + + QGstPipeline::modifyPipelineWhileNotRunning(gstCamera.getPipeline(), [&] { + gstCamera.setStateSync(GST_STATE_READY); // stop camera, as it may have active tasks + + qUnlinkGstElements(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert); + gstCameraBin.stopAndRemoveElements(gstDecode); + + gstCapsFilter.set("caps", caps); + + gstDecode = std::move(newGstDecode); + + gstCameraBin.add(gstDecode); + qLinkGstElements(gstCamera, gstCapsFilter, gstDecode, gstVideoConvert); + gstCameraBin.syncChildrenState(); + }); + + return true; +} + +void QGstreamerCamera::updateCameraProperties() +{ +#if QT_CONFIG(linux_v4l) + if (isV4L2Camera()) { + initV4L2Controls(); + return; + } +#endif +#if QT_CONFIG(gstreamer_photography) + if (auto *p = photography()) + gst_photography_set_white_balance_mode(p, GST_PHOTOGRAPHY_WB_MODE_AUTO); + QCamera::Features f = QCamera::Feature::ColorTemperature | QCamera::Feature::ExposureCompensation | + QCamera::Feature::IsoSensitivity | QCamera::Feature::ManualExposureTime; + supportedFeaturesChanged(f); +#endif + +} + +#if QT_CONFIG(gstreamer_photography) +GstPhotography *QGstreamerCamera::photography() const +{ + if (!gstCamera.isNull() && GST_IS_PHOTOGRAPHY(gstCamera.element())) + return GST_PHOTOGRAPHY(gstCamera.element()); + return nullptr; +} +#endif + +void QGstreamerCamera::setFocusMode(QCamera::FocusMode mode) +{ + if (mode == focusMode()) + return; + +#if QT_CONFIG(gstreamer_photography) + auto p = photography(); + if (p) { + GstPhotographyFocusMode photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_CONTINUOUS_NORMAL; + + switch (mode) { + case QCamera::FocusModeAutoNear: + photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_MACRO; + break; + case QCamera::FocusModeAutoFar: + // not quite, but hey :) + Q_FALLTHROUGH(); + case QCamera::FocusModeHyperfocal: + photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_HYPERFOCAL; + break; + case QCamera::FocusModeInfinity: + photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_INFINITY; + break; + case QCamera::FocusModeManual: + photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_MANUAL; + break; + default: // QCamera::FocusModeAuto: + break; + } + + if (gst_photography_set_focus_mode(p, photographyMode)) + focusModeChanged(mode); + } +#endif +} + +bool QGstreamerCamera::isFocusModeSupported(QCamera::FocusMode mode) const +{ +#if QT_CONFIG(gstreamer_photography) + if (photography()) + return true; +#endif + return mode == QCamera::FocusModeAuto; +} + +void QGstreamerCamera::setFlashMode(QCamera::FlashMode mode) +{ + Q_UNUSED(mode); + +#if QT_CONFIG(gstreamer_photography) + if (auto *p = photography()) { + GstPhotographyFlashMode flashMode; + gst_photography_get_flash_mode(p, &flashMode); + + switch (mode) { + case QCamera::FlashAuto: + flashMode = GST_PHOTOGRAPHY_FLASH_MODE_AUTO; + break; + case QCamera::FlashOff: + flashMode = GST_PHOTOGRAPHY_FLASH_MODE_OFF; + break; + case QCamera::FlashOn: + flashMode = GST_PHOTOGRAPHY_FLASH_MODE_ON; + break; + } + + if (gst_photography_set_flash_mode(p, flashMode)) + flashModeChanged(mode); + } +#endif +} + +bool QGstreamerCamera::isFlashModeSupported(QCamera::FlashMode mode) const +{ +#if QT_CONFIG(gstreamer_photography) + if (photography()) + return true; +#endif + + return mode == QCamera::FlashAuto; +} + +bool QGstreamerCamera::isFlashReady() const +{ +#if QT_CONFIG(gstreamer_photography) + if (photography()) + return true; +#endif + + return false; +} + +void QGstreamerCamera::setExposureMode(QCamera::ExposureMode mode) +{ + Q_UNUSED(mode); +#if QT_CONFIG(linux_v4l) + if (isV4L2Camera() && v4l2AutoExposureSupported && v4l2ManualExposureSupported) { + if (mode != QCamera::ExposureAuto && mode != QCamera::ExposureManual) + return; + int value = QCamera::ExposureAuto ? V4L2_EXPOSURE_AUTO : V4L2_EXPOSURE_MANUAL; + setV4L2Parameter(V4L2_CID_EXPOSURE_AUTO, value); + exposureModeChanged(mode); + return; + } +#endif + +#if QT_CONFIG(gstreamer_photography) + auto *p = photography(); + if (!p) + return; + + GstPhotographySceneMode sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_AUTO; + + switch (mode) { + case QCamera::ExposureManual: + sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_MANUAL; + break; + case QCamera::ExposurePortrait: + sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_PORTRAIT; + break; + case QCamera::ExposureSports: + sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_SPORT; + break; + case QCamera::ExposureNight: + sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_NIGHT; + break; + case QCamera::ExposureAuto: + sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_AUTO; + break; + case QCamera::ExposureLandscape: + sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_LANDSCAPE; + break; + case QCamera::ExposureSnow: + sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_SNOW; + break; + case QCamera::ExposureBeach: + sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_BEACH; + break; + case QCamera::ExposureAction: + sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_ACTION; + break; + case QCamera::ExposureNightPortrait: + sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_NIGHT_PORTRAIT; + break; + case QCamera::ExposureTheatre: + sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_THEATRE; + break; + case QCamera::ExposureSunset: + sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_SUNSET; + break; + case QCamera::ExposureSteadyPhoto: + sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_STEADY_PHOTO; + break; + case QCamera::ExposureFireworks: + sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_FIREWORKS; + break; + case QCamera::ExposureParty: + sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_PARTY; + break; + case QCamera::ExposureCandlelight: + sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_CANDLELIGHT; + break; + case QCamera::ExposureBarcode: + sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_BARCODE; + break; + default: + return; + } + + if (gst_photography_set_scene_mode(p, sceneMode)) + exposureModeChanged(mode); +#endif +} + +bool QGstreamerCamera::isExposureModeSupported(QCamera::ExposureMode mode) const +{ + if (mode == QCamera::ExposureAuto) + return true; +#if QT_CONFIG(linux_v4l) + if (isV4L2Camera() && v4l2ManualExposureSupported && v4l2AutoExposureSupported) + return mode == QCamera::ExposureManual; +#endif +#if QT_CONFIG(gstreamer_photography) + if (photography()) + return true; +#endif + + return false; +} + +void QGstreamerCamera::setExposureCompensation(float compensation) +{ + Q_UNUSED(compensation); +#if QT_CONFIG(linux_v4l) + if (isV4L2Camera() && (v4l2MinExposureAdjustment != 0 || v4l2MaxExposureAdjustment != 0)) { + int value = qBound(v4l2MinExposureAdjustment, (int)(compensation*1000), v4l2MaxExposureAdjustment); + setV4L2Parameter(V4L2_CID_AUTO_EXPOSURE_BIAS, value); + exposureCompensationChanged(value/1000.); + return; + } +#endif + +#if QT_CONFIG(gstreamer_photography) + if (auto *p = photography()) { + if (gst_photography_set_ev_compensation(p, compensation)) + exposureCompensationChanged(compensation); + } +#endif +} + +void QGstreamerCamera::setManualIsoSensitivity(int iso) +{ + Q_UNUSED(iso); +#if QT_CONFIG(linux_v4l) + if (isV4L2Camera()) { + if (!(supportedFeatures() & QCamera::Feature::IsoSensitivity)) + return; + setV4L2Parameter(V4L2_CID_ISO_SENSITIVITY_AUTO, iso <= 0 ? V4L2_ISO_SENSITIVITY_AUTO : V4L2_ISO_SENSITIVITY_MANUAL); + if (iso > 0) { + iso = qBound(minIso(), iso, maxIso()); + setV4L2Parameter(V4L2_CID_ISO_SENSITIVITY, iso); + } + return; + } +#endif +#if QT_CONFIG(gstreamer_photography) + if (auto *p = photography()) { + if (gst_photography_set_iso_speed(p, iso)) + isoSensitivityChanged(iso); + } +#endif +} + +int QGstreamerCamera::isoSensitivity() const +{ +#if QT_CONFIG(linux_v4l) + if (isV4L2Camera()) { + if (!(supportedFeatures() & QCamera::Feature::IsoSensitivity)) + return -1; + return getV4L2Parameter(V4L2_CID_ISO_SENSITIVITY); + } +#endif +#if QT_CONFIG(gstreamer_photography) + if (auto *p = photography()) { + guint speed = 0; + if (gst_photography_get_iso_speed(p, &speed)) + return speed; + } +#endif + return 100; +} + +void QGstreamerCamera::setManualExposureTime(float secs) +{ + Q_UNUSED(secs); +#if QT_CONFIG(linux_v4l) + if (isV4L2Camera() && v4l2ManualExposureSupported && v4l2AutoExposureSupported) { + int exposure = qBound(v4l2MinExposure, qRound(secs*10000.), v4l2MaxExposure); + setV4L2Parameter(V4L2_CID_EXPOSURE_ABSOLUTE, exposure); + exposureTimeChanged(exposure/10000.); + return; + } +#endif + +#if QT_CONFIG(gstreamer_photography) + if (auto *p = photography()) { + if (gst_photography_set_exposure(p, guint(secs*1000000))) + exposureTimeChanged(secs); + } +#endif +} + +float QGstreamerCamera::exposureTime() const +{ +#if QT_CONFIG(linux_v4l) + if (isV4L2Camera()) { + return getV4L2Parameter(V4L2_CID_EXPOSURE_ABSOLUTE)/10000.; + } +#endif +#if QT_CONFIG(gstreamer_photography) + if (auto *p = photography()) { + guint32 exposure = 0; + if (gst_photography_get_exposure(p, &exposure)) + return exposure/1000000.; + } +#endif + return -1; +} + +bool QGstreamerCamera::isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const +{ + if (mode == QCamera::WhiteBalanceAuto) + return true; + +#if QT_CONFIG(linux_v4l) + if (isV4L2Camera()) { + if (v4l2AutoWhiteBalanceSupported && v4l2ColorTemperatureSupported) + return true; + } +#endif +#if QT_CONFIG(gstreamer_photography) + if (auto *p = photography()) { + Q_UNUSED(p); + switch (mode) { + case QCamera::WhiteBalanceAuto: + case QCamera::WhiteBalanceSunlight: + case QCamera::WhiteBalanceCloudy: + case QCamera::WhiteBalanceShade: + case QCamera::WhiteBalanceSunset: + case QCamera::WhiteBalanceTungsten: + case QCamera::WhiteBalanceFluorescent: + return true; + case QCamera::WhiteBalanceManual: { +#if GST_CHECK_VERSION(1, 18, 0) + GstPhotographyInterface *iface = GST_PHOTOGRAPHY_GET_INTERFACE(p); + if (iface->set_color_temperature && iface->get_color_temperature) + return true; +#endif + break; + } + default: + break; + } + } +#endif + + return mode == QCamera::WhiteBalanceAuto; +} + +void QGstreamerCamera::setWhiteBalanceMode(QCamera::WhiteBalanceMode mode) +{ + Q_ASSERT(isWhiteBalanceModeSupported(mode)); + +#if QT_CONFIG(linux_v4l) + if (isV4L2Camera()) { + int temperature = colorTemperatureForWhiteBalance(mode); + int t = setV4L2ColorTemperature(temperature); + if (t == 0) + mode = QCamera::WhiteBalanceAuto; + whiteBalanceModeChanged(mode); + return; + } +#endif + +#if QT_CONFIG(gstreamer_photography) + if (auto *p = photography()) { + GstPhotographyWhiteBalanceMode gstMode = GST_PHOTOGRAPHY_WB_MODE_AUTO; + switch (mode) { + case QCamera::WhiteBalanceSunlight: + gstMode = GST_PHOTOGRAPHY_WB_MODE_DAYLIGHT; + break; + case QCamera::WhiteBalanceCloudy: + gstMode = GST_PHOTOGRAPHY_WB_MODE_CLOUDY; + break; + case QCamera::WhiteBalanceShade: + gstMode = GST_PHOTOGRAPHY_WB_MODE_SHADE; + break; + case QCamera::WhiteBalanceSunset: + gstMode = GST_PHOTOGRAPHY_WB_MODE_SUNSET; + break; + case QCamera::WhiteBalanceTungsten: + gstMode = GST_PHOTOGRAPHY_WB_MODE_TUNGSTEN; + break; + case QCamera::WhiteBalanceFluorescent: + gstMode = GST_PHOTOGRAPHY_WB_MODE_FLUORESCENT; + break; + case QCamera::WhiteBalanceAuto: + default: + break; + } + if (gst_photography_set_white_balance_mode(p, gstMode)) { + whiteBalanceModeChanged(mode); + return; + } + } +#endif +} + +void QGstreamerCamera::setColorTemperature(int temperature) +{ + if (temperature == 0) { + setWhiteBalanceMode(QCamera::WhiteBalanceAuto); + return; + } + + Q_ASSERT(isWhiteBalanceModeSupported(QCamera::WhiteBalanceManual)); + +#if QT_CONFIG(linux_v4l) + if (isV4L2Camera()) { + int t = setV4L2ColorTemperature(temperature); + if (t) + colorTemperatureChanged(t); + return; + } +#endif + +#if QT_CONFIG(gstreamer_photography) && GST_CHECK_VERSION(1, 18, 0) + if (auto *p = photography()) { + GstPhotographyInterface *iface = GST_PHOTOGRAPHY_GET_INTERFACE(p); + Q_ASSERT(iface->set_color_temperature); + iface->set_color_temperature(p, temperature); + return; + } +#endif +} + +#if QT_CONFIG(linux_v4l) +bool QGstreamerCamera::isV4L2Camera() const +{ + return !m_v4l2DevicePath.isEmpty(); +} + +void QGstreamerCamera::initV4L2Controls() +{ + v4l2AutoWhiteBalanceSupported = false; + v4l2ColorTemperatureSupported = false; + QCamera::Features features{}; + + Q_ASSERT(!m_v4l2DevicePath.isEmpty()); + + + withV4L2DeviceFileDescriptor([&](int fd) { + struct v4l2_queryctrl queryControl = {}; + queryControl.id = V4L2_CID_AUTO_WHITE_BALANCE; + + if (::ioctl(fd, VIDIOC_QUERYCTRL, &queryControl) == 0) { + v4l2AutoWhiteBalanceSupported = true; + setV4L2Parameter(V4L2_CID_AUTO_WHITE_BALANCE, true); + } + + queryControl = {}; + queryControl.id = V4L2_CID_WHITE_BALANCE_TEMPERATURE; + if (::ioctl(fd, VIDIOC_QUERYCTRL, &queryControl) == 0) { + v4l2MinColorTemp = queryControl.minimum; + v4l2MaxColorTemp = queryControl.maximum; + v4l2ColorTemperatureSupported = true; + features |= QCamera::Feature::ColorTemperature; + } + + queryControl = {}; + queryControl.id = V4L2_CID_EXPOSURE_AUTO; + if (::ioctl(fd, VIDIOC_QUERYCTRL, &queryControl) == 0) { + v4l2AutoExposureSupported = true; + } + + queryControl = {}; + queryControl.id = V4L2_CID_EXPOSURE_ABSOLUTE; + if (::ioctl(fd, VIDIOC_QUERYCTRL, &queryControl) == 0) { + v4l2ManualExposureSupported = true; + v4l2MinExposure = queryControl.minimum; + v4l2MaxExposure = queryControl.maximum; + features |= QCamera::Feature::ManualExposureTime; + } + + queryControl = {}; + queryControl.id = V4L2_CID_AUTO_EXPOSURE_BIAS; + if (::ioctl(fd, VIDIOC_QUERYCTRL, &queryControl) == 0) { + v4l2MinExposureAdjustment = queryControl.minimum; + v4l2MaxExposureAdjustment = queryControl.maximum; + features |= QCamera::Feature::ExposureCompensation; + } + + queryControl = {}; + queryControl.id = V4L2_CID_ISO_SENSITIVITY_AUTO; + if (::ioctl(fd, VIDIOC_QUERYCTRL, &queryControl) == 0) { + queryControl.id = V4L2_CID_ISO_SENSITIVITY; + if (::ioctl(fd, VIDIOC_QUERYCTRL, &queryControl) == 0) { + features |= QCamera::Feature::IsoSensitivity; + minIsoChanged(queryControl.minimum); + maxIsoChanged(queryControl.minimum); + } + } + }); + + supportedFeaturesChanged(features); +} + +int QGstreamerCamera::setV4L2ColorTemperature(int temperature) +{ + if (v4l2AutoWhiteBalanceSupported) { + setV4L2Parameter(V4L2_CID_AUTO_WHITE_BALANCE, temperature == 0 ? true : false); + } else if (temperature == 0) { + temperature = 5600; + } + + if (temperature != 0 && v4l2ColorTemperatureSupported) { + temperature = qBound(v4l2MinColorTemp, temperature, v4l2MaxColorTemp); + if (!setV4L2Parameter(V4L2_CID_WHITE_BALANCE_TEMPERATURE, qBound(v4l2MinColorTemp, temperature, v4l2MaxColorTemp))) + temperature = 0; + } else { + temperature = 0; + } + + return temperature; +} + +bool QGstreamerCamera::setV4L2Parameter(quint32 id, qint32 value) +{ + return withV4L2DeviceFileDescriptor([&](int fd) { + v4l2_control control{ id, value }; + if (::ioctl(fd, VIDIOC_S_CTRL, &control) != 0) { + qWarning() << "Unable to set the V4L2 Parameter" << Qt::hex << id << "to" << value + << qt_error_string(errno); + return false; + } + return true; + }); +} + +int QGstreamerCamera::getV4L2Parameter(quint32 id) const +{ + return withV4L2DeviceFileDescriptor([&](int fd) { + v4l2_control control{ id, 0 }; + if (::ioctl(fd, VIDIOC_G_CTRL, &control) != 0) { + qWarning() << "Unable to get the V4L2 Parameter" << Qt::hex << id + << qt_error_string(errno); + return 0; + } + return control.value; + }); +} + +QGstreamerCustomCamera::QGstreamerCustomCamera(QCamera *camera) + : QGstreamerCameraBase{ + camera, + }, + m_userProvidedGstElement{ + false, + } +{ +} + +QGstreamerCustomCamera::QGstreamerCustomCamera(QCamera *camera, QGstElement element) + : QGstreamerCameraBase{ + camera, + }, + gstCamera{ + std::move(element), + }, + m_userProvidedGstElement{ + true, + } +{ +} + +void QGstreamerCustomCamera::setCamera(const QCameraDevice &device) +{ + if (m_userProvidedGstElement) + return; + + gstCamera = QGstBin::createFromPipelineDescription(device.id(), /*name=*/nullptr, + /* ghostUnlinkedPads=*/true); +} + +bool QGstreamerCustomCamera::isActive() const +{ + return m_active; +} + +void QGstreamerCustomCamera::setActive(bool active) +{ + if (m_active == active) + return; + + m_active = active; + + emit activeChanged(active); +} + +#endif + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera_p.h b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera_p.h new file mode 100644 index 000000000..f43c01f34 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamercamera_p.h @@ -0,0 +1,152 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTREAMERCAMERACONTROL_H +#define QGSTREAMERCAMERACONTROL_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <private/qplatformcamera_p.h> +#include <private/qmultimediautils_p.h> + +#include <mediacapture/qgstreamermediacapture_p.h> +#include <common/qgst_p.h> +#include <common/qgstpipeline_p.h> + +QT_BEGIN_NAMESPACE + +class QGstreamerCameraBase : public QPlatformCamera +{ +public: + using QPlatformCamera::QPlatformCamera; + + virtual QGstElement gstElement() const = 0; +}; + +class QGstreamerCamera : public QGstreamerCameraBase +{ +public: + static QMaybe<QPlatformCamera *> create(QCamera *camera); + + virtual ~QGstreamerCamera(); + + bool isActive() const override; + void setActive(bool active) override; + + void setCamera(const QCameraDevice &camera) override; + bool setCameraFormat(const QCameraFormat &format) override; + + QGstElement gstElement() const override { return gstCameraBin; } +#if QT_CONFIG(gstreamer_photography) + GstPhotography *photography() const; +#endif + + void setFocusMode(QCamera::FocusMode mode) override; + bool isFocusModeSupported(QCamera::FocusMode mode) const override; + + void setFlashMode(QCamera::FlashMode mode) override; + bool isFlashModeSupported(QCamera::FlashMode mode) const override; + bool isFlashReady() const override; + + void setExposureMode(QCamera::ExposureMode) override; + bool isExposureModeSupported(QCamera::ExposureMode mode) const override; + void setExposureCompensation(float) override; + void setManualIsoSensitivity(int) override; + int isoSensitivity() const override; + void setManualExposureTime(float) override; + float exposureTime() const override; + + bool isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const override; + void setWhiteBalanceMode(QCamera::WhiteBalanceMode mode) override; + void setColorTemperature(int temperature) override; + +private: + QGstreamerCamera(QCamera *camera); + + void updateCameraProperties(); + +#if QT_CONFIG(linux_v4l) + bool isV4L2Camera() const; + void initV4L2Controls(); + int setV4L2ColorTemperature(int temperature); + bool setV4L2Parameter(quint32 id, qint32 value); + int getV4L2Parameter(quint32 id) const; + + bool v4l2AutoWhiteBalanceSupported = false; + bool v4l2ColorTemperatureSupported = false; + bool v4l2AutoExposureSupported = false; + bool v4l2ManualExposureSupported = false; + qint32 v4l2MinColorTemp = 5600; // Daylight... + qint32 v4l2MaxColorTemp = 5600; + qint32 v4l2MinExposure = 0; + qint32 v4l2MaxExposure = 0; + qint32 v4l2MinExposureAdjustment = 0; + qint32 v4l2MaxExposureAdjustment = 0; + + template <typename Functor> + auto withV4L2DeviceFileDescriptor(Functor &&f) const + { + using ReturnType = std::invoke_result_t<Functor, int>; + Q_ASSERT(isV4L2Camera()); + + if (int gstreamerDeviceFd = gstCamera.getInt("device-fd"); gstreamerDeviceFd != -1) + return f(gstreamerDeviceFd); + + auto v4l2FileDescriptor = QFileDescriptorHandle{ + qt_safe_open(m_v4l2DevicePath.toLocal8Bit().constData(), O_RDONLY), + }; + if (!v4l2FileDescriptor) { + qWarning() << "Unable to open the camera" << m_v4l2DevicePath + << "for read to query the parameter info:" << qt_error_string(errno); + if constexpr (std::is_void_v<ReturnType>) + return; + else + return ReturnType{}; + } + return f(v4l2FileDescriptor.get()); + } +#endif + + QCameraDevice m_cameraDevice; + + QGstBin gstCameraBin; + QGstElement gstCamera; + QGstElement gstCapsFilter; + QGstElement gstDecode; + QGstElement gstVideoConvert; + QGstElement gstVideoScale; + + bool m_active = false; + QString m_v4l2DevicePath; +}; + +class QGstreamerCustomCamera : public QGstreamerCameraBase +{ +public: + explicit QGstreamerCustomCamera(QCamera *); + explicit QGstreamerCustomCamera(QCamera *, QGstElement element); + + QGstElement gstElement() const override { return gstCamera; } + void setCamera(const QCameraDevice &) override; + + bool isActive() const override; + void setActive(bool) override; + +private: + QGstElement gstCamera; + bool m_active{}; + const bool m_userProvidedGstElement; +}; + +QT_END_NAMESPACE + +#endif // QGSTREAMERCAMERACONTROL_H diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture.cpp b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture.cpp new file mode 100644 index 000000000..9c21dc083 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture.cpp @@ -0,0 +1,450 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include "qgstreamerimagecapture_p.h" + +#include <QtMultimedia/qvideoframeformat.h> +#include <QtMultimedia/private/qmediastoragelocation_p.h> +#include <QtMultimedia/private/qplatformcamera_p.h> +#include <QtMultimedia/private/qplatformimagecapture_p.h> +#include <QtMultimedia/private/qvideoframe_p.h> +#include <QtCore/qdebug.h> +#include <QtCore/qdir.h> +#include <QtCore/qstandardpaths.h> +#include <QtCore/qcoreapplication.h> +#include <QtCore/qloggingcategory.h> + +#include <common/qgstreamermetadata_p.h> +#include <common/qgstvideobuffer_p.h> +#include <common/qgstutils_p.h> + +#include <utility> + +QT_BEGIN_NAMESPACE + +namespace { +Q_LOGGING_CATEGORY(qLcImageCaptureGst, "qt.multimedia.imageCapture") + +struct ThreadPoolSingleton +{ + QObject m_context; + QMutex m_poolMutex; + QThreadPool *m_instance{}; + bool m_appUnderDestruction = false; + + QThreadPool *get(const QMutexLocker<QMutex> &) + { + if (m_instance) + return m_instance; + if (m_appUnderDestruction || !qApp) + return nullptr; + + using namespace std::chrono; + + m_instance = new QThreadPool(qApp); + m_instance->setMaxThreadCount(1); // 1 thread; + static constexpr auto expiryTimeout = minutes(5); + m_instance->setExpiryTimeout(round<milliseconds>(expiryTimeout).count()); + + QObject::connect(qApp, &QCoreApplication::aboutToQuit, &m_context, [&] { + // we need to make sure that thread-local QRhi is destroyed before the application to + // prevent QTBUG-124189 + QMutexLocker guard(&m_poolMutex); + delete m_instance; + m_instance = {}; + m_appUnderDestruction = true; + }); + + QObject::connect(qApp, &QCoreApplication::destroyed, &m_context, [&] { + m_appUnderDestruction = false; + }); + return m_instance; + } + + template <typename Functor> + QFuture<void> run(Functor &&f) + { + QMutexLocker guard(&m_poolMutex); + QThreadPool *pool = get(guard); + if (!pool) + return QFuture<void>{}; + + return QtConcurrent::run(pool, std::forward<Functor>(f)); + } +}; + +ThreadPoolSingleton s_threadPoolSingleton; + +}; // namespace + +QMaybe<QPlatformImageCapture *> QGstreamerImageCapture::create(QImageCapture *parent) +{ + static const auto error = qGstErrorMessageIfElementsNotAvailable( + "queue", "capsfilter", "videoconvert", "jpegenc", "jifmux", "fakesink"); + if (error) + return *error; + + return new QGstreamerImageCapture(parent); +} + +QGstreamerImageCapture::QGstreamerImageCapture(QImageCapture *parent) + : QPlatformImageCapture(parent), + QGstreamerBufferProbe(ProbeBuffers), + bin{ + QGstBin::create("imageCaptureBin"), + }, + queue{ + QGstElement::createFromFactory("queue", "imageCaptureQueue"), + }, + filter{ + QGstElement::createFromFactory("capsfilter", "filter"), + }, + videoConvert{ + QGstElement::createFromFactory("videoconvert", "imageCaptureConvert"), + }, + encoder{ + QGstElement::createFromFactory("jpegenc", "jpegEncoder"), + }, + muxer{ + QGstElement::createFromFactory("jifmux", "jpegMuxer"), + }, + sink{ + QGstElement::createFromFactory("fakesink", "imageCaptureSink"), + } +{ + // configures the queue to be fast, lightweight and non blocking + queue.set("leaky", 2 /*downstream*/); + queue.set("silent", true); + queue.set("max-size-buffers", uint(1)); + queue.set("max-size-bytes", uint(0)); + queue.set("max-size-time", quint64(0)); + + // imageCaptureSink do not wait for a preroll buffer when going READY -> PAUSED + // as no buffer will arrive until capture() is called + sink.set("async", false); + + bin.add(queue, filter, videoConvert, encoder, muxer, sink); + qLinkGstElements(queue, filter, videoConvert, encoder, muxer, sink); + bin.addGhostPad(queue, "sink"); + + addProbeToPad(queue.staticPad("src").pad(), false); + + sink.set("signal-handoffs", true); + m_handoffConnection = sink.connect("handoff", G_CALLBACK(&saveImageFilter), this); +} + +QGstreamerImageCapture::~QGstreamerImageCapture() +{ + bin.setStateSync(GST_STATE_NULL); + + // wait for pending futures + auto pendingFutures = [&] { + QMutexLocker guard(&m_mutex); + return std::move(m_pendingFutures); + }(); + + for (QFuture<void> &pendingImage : pendingFutures) + pendingImage.waitForFinished(); +} + +bool QGstreamerImageCapture::isReadyForCapture() const +{ + QMutexLocker guard(&m_mutex); + return m_session && !passImage && cameraActive; +} + +int QGstreamerImageCapture::capture(const QString &fileName) +{ + using namespace Qt::Literals; + QString path = QMediaStorageLocation::generateFileName( + fileName, QStandardPaths::PicturesLocation, u"jpg"_s); + return doCapture(path); +} + +int QGstreamerImageCapture::captureToBuffer() +{ + return doCapture(QString()); +} + +int QGstreamerImageCapture::doCapture(const QString &fileName) +{ + qCDebug(qLcImageCaptureGst) << "do capture"; + + { + QMutexLocker guard(&m_mutex); + if (!m_session) { + invokeDeferred([this] { + emit error(-1, QImageCapture::ResourceError, + QPlatformImageCapture::msgImageCaptureNotSet()); + }); + + qCDebug(qLcImageCaptureGst) << "error 1"; + return -1; + } + if (!m_session->camera()) { + invokeDeferred([this] { + emit error(-1, QImageCapture::ResourceError, tr("No camera available.")); + }); + + qCDebug(qLcImageCaptureGst) << "error 2"; + return -1; + } + if (passImage) { + invokeDeferred([this] { + emit error(-1, QImageCapture::NotReadyError, + QPlatformImageCapture::msgCameraNotReady()); + }); + + qCDebug(qLcImageCaptureGst) << "error 3"; + return -1; + } + m_lastId++; + + pendingImages.enqueue({ m_lastId, fileName, QMediaMetaData{} }); + // let one image pass the pipeline + passImage = true; + } + + emit readyForCaptureChanged(false); + return m_lastId; +} + +void QGstreamerImageCapture::setResolution(const QSize &resolution) +{ + QGstCaps padCaps = bin.staticPad("sink").currentCaps(); + if (padCaps.isNull()) { + qDebug() << "Camera not ready"; + return; + } + QGstCaps caps = padCaps.copy(); + if (caps.isNull()) + return; + + gst_caps_set_simple(caps.caps(), "width", G_TYPE_INT, resolution.width(), "height", G_TYPE_INT, + resolution.height(), nullptr); + filter.set("caps", caps); +} + +// HACK: gcc-10 and earlier reject [=,this] when building with c++17 +#if __cplusplus >= 202002L +# define EQ_THIS_CAPTURE =, this +#else +# define EQ_THIS_CAPTURE = +#endif + +bool QGstreamerImageCapture::probeBuffer(GstBuffer *buffer) +{ + QMutexLocker guard(&m_mutex); + + if (!passImage) + return false; + qCDebug(qLcImageCaptureGst) << "probe buffer"; + + QGstBufferHandle bufferHandle{ + buffer, + QGstBufferHandle::NeedsRef, + }; + + passImage = false; + + bool ready = isReadyForCapture(); + invokeDeferred([this, ready] { + emit readyForCaptureChanged(ready); + }); + + QGstCaps caps = bin.staticPad("sink").currentCaps(); + auto memoryFormat = caps.memoryFormat(); + + GstVideoInfo previewInfo; + QVideoFrameFormat fmt; + auto optionalFormatAndVideoInfo = caps.formatAndVideoInfo(); + if (optionalFormatAndVideoInfo) + std::tie(fmt, previewInfo) = std::move(*optionalFormatAndVideoInfo); + + int futureId = futureIDAllocator += 1; + + // ensure QVideoFrame::toImage is executed on a worker thread that is joined before the + // qApplication is destroyed + QFuture<void> future = s_threadPoolSingleton.run([EQ_THIS_CAPTURE]() mutable { + QMutexLocker guard(&m_mutex); + auto scopeExit = qScopeGuard([&] { + m_pendingFutures.remove(futureId); + }); + + if (!m_session) { + qDebug() << "QGstreamerImageCapture::probeBuffer: no session"; + return; + } + + auto *sink = m_session->gstreamerVideoSink(); + auto gstBuffer = std::make_unique<QGstVideoBuffer>(std::move(bufferHandle), previewInfo, + sink, fmt, memoryFormat); + + QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(gstBuffer), fmt); + QImage img = frame.toImage(); + if (img.isNull()) { + qDebug() << "received a null image"; + return; + } + + QMediaMetaData imageMetaData = metaData(); + imageMetaData.insert(QMediaMetaData::Resolution, frame.size()); + pendingImages.head().metaData = std::move(imageMetaData); + PendingImage pendingImage = pendingImages.head(); + + invokeDeferred([this, pendingImage = std::move(pendingImage), frame = std::move(frame), + img = std::move(img)]() mutable { + emit imageExposed(pendingImage.id); + qCDebug(qLcImageCaptureGst) << "Image available!"; + emit imageAvailable(pendingImage.id, frame); + emit imageCaptured(pendingImage.id, img); + emit imageMetadataAvailable(pendingImage.id, pendingImage.metaData); + }); + }); + + if (!future.isValid()) // during qApplication shutdown the threadpool becomes unusable + return true; + + m_pendingFutures.insert(futureId, future); + + return true; +} + +#undef EQ_THIS_CAPTURE + +void QGstreamerImageCapture::setCaptureSession(QPlatformMediaCaptureSession *session) +{ + QMutexLocker guard(&m_mutex); + QGstreamerMediaCapture *captureSession = static_cast<QGstreamerMediaCapture *>(session); + if (m_session == captureSession) + return; + + bool readyForCapture = isReadyForCapture(); + if (m_session) { + disconnect(m_session, nullptr, this, nullptr); + m_lastId = 0; + pendingImages.clear(); + passImage = false; + cameraActive = false; + } + + m_session = captureSession; + if (!m_session) { + if (readyForCapture) + emit readyForCaptureChanged(false); + return; + } + + connect(m_session, &QPlatformMediaCaptureSession::cameraChanged, this, + &QGstreamerImageCapture::onCameraChanged); + onCameraChanged(); +} + +void QGstreamerImageCapture::setMetaData(const QMediaMetaData &m) +{ + { + QMutexLocker guard(&m_mutex); + QPlatformImageCapture::setMetaData(m); + } + + // ensure taginject injects this metaData + applyMetaDataToTagSetter(m, muxer); +} + +void QGstreamerImageCapture::cameraActiveChanged(bool active) +{ + qCDebug(qLcImageCaptureGst) << "cameraActiveChanged" << cameraActive << active; + if (cameraActive == active) + return; + cameraActive = active; + qCDebug(qLcImageCaptureGst) << "isReady" << isReadyForCapture(); + emit readyForCaptureChanged(isReadyForCapture()); +} + +void QGstreamerImageCapture::onCameraChanged() +{ + QMutexLocker guard(&m_mutex); + if (m_session->camera()) { + cameraActiveChanged(m_session->camera()->isActive()); + connect(m_session->camera(), &QPlatformCamera::activeChanged, this, + &QGstreamerImageCapture::cameraActiveChanged); + } else { + cameraActiveChanged(false); + } +} + +gboolean QGstreamerImageCapture::saveImageFilter(GstElement *, GstBuffer *buffer, GstPad *, + QGstreamerImageCapture *capture) +{ + capture->saveBufferToImage(buffer); + return true; +} + +void QGstreamerImageCapture::saveBufferToImage(GstBuffer *buffer) +{ + QMutexLocker guard(&m_mutex); + passImage = false; + + if (pendingImages.isEmpty()) + return; + + PendingImage imageData = pendingImages.dequeue(); + if (imageData.filename.isEmpty()) + return; + + int id = futureIDAllocator++; + QGstBufferHandle bufferHandle{ + buffer, + QGstBufferHandle::NeedsRef, + }; + + QFuture<void> saveImageFuture = QtConcurrent::run([this, imageData, bufferHandle, + id]() mutable { + auto cleanup = qScopeGuard([&] { + QMutexLocker guard(&m_mutex); + m_pendingFutures.remove(id); + }); + + qCDebug(qLcImageCaptureGst) << "saving image as" << imageData.filename; + + QFile f(imageData.filename); + if (!f.open(QFile::WriteOnly)) { + qCDebug(qLcImageCaptureGst) << " could not open image file for writing"; + return; + } + + GstMapInfo info; + GstBuffer *buffer = bufferHandle.get(); + if (gst_buffer_map(buffer, &info, GST_MAP_READ)) { + f.write(reinterpret_cast<const char *>(info.data), info.size); + gst_buffer_unmap(buffer, &info); + } + f.close(); + + QMetaObject::invokeMethod(this, [this, imageData = std::move(imageData)]() mutable { + emit imageSaved(imageData.id, imageData.filename); + }); + }); + + m_pendingFutures.insert(id, saveImageFuture); +} + +QImageEncoderSettings QGstreamerImageCapture::imageSettings() const +{ + return m_settings; +} + +void QGstreamerImageCapture::setImageSettings(const QImageEncoderSettings &settings) +{ + if (m_settings != settings) { + QSize resolution = settings.resolution(); + if (m_settings.resolution() != resolution && !resolution.isEmpty()) + setResolution(resolution); + + m_settings = settings; + } +} + +QT_END_NAMESPACE + +#include "moc_qgstreamerimagecapture_p.cpp" diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture_p.h b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture_p.h new file mode 100644 index 000000000..04a7c00b4 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamerimagecapture_p.h @@ -0,0 +1,109 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTREAMERIMAGECAPTURECONTROL_H +#define QGSTREAMERIMAGECAPTURECONTROL_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <QtMultimedia/private/qplatformimagecapture_p.h> +#include <QtMultimedia/private/qmultimediautils_p.h> + +#include <QtCore/qmutex.h> +#include <QtCore/qqueue.h> +#include <QtConcurrent/QtConcurrentRun> + +#include <common/qgst_p.h> +#include <common/qgstreamerbufferprobe_p.h> +#include <mediacapture/qgstreamermediacapture_p.h> +#include <gst/video/video.h> + +QT_BEGIN_NAMESPACE + +class QGstreamerImageCapture : public QPlatformImageCapture, private QGstreamerBufferProbe +{ + Q_OBJECT + +public: + static QMaybe<QPlatformImageCapture *> create(QImageCapture *parent); + virtual ~QGstreamerImageCapture(); + + bool isReadyForCapture() const override; + int capture(const QString &fileName) override; + int captureToBuffer() override; + + QImageEncoderSettings imageSettings() const override; + void setImageSettings(const QImageEncoderSettings &settings) override; + + bool probeBuffer(GstBuffer *buffer) override; + + void setCaptureSession(QPlatformMediaCaptureSession *session); + + QGstElement gstElement() const { return bin; } + + void setMetaData(const QMediaMetaData &m) override; + +public Q_SLOTS: + void cameraActiveChanged(bool active); + void onCameraChanged(); + +private: + QGstreamerImageCapture(QImageCapture *parent); + + void setResolution(const QSize &resolution); + int doCapture(const QString &fileName); + static gboolean saveImageFilter(GstElement *element, GstBuffer *buffer, GstPad *pad, + QGstreamerImageCapture *capture); + + void saveBufferToImage(GstBuffer *buffer); + + mutable QRecursiveMutex + m_mutex; // guard all elements accessed from probeBuffer/saveBufferToImage + QGstreamerMediaCapture *m_session = nullptr; + int m_lastId = 0; + QImageEncoderSettings m_settings; + + struct PendingImage { + int id; + QString filename; + QMediaMetaData metaData; + }; + + QQueue<PendingImage> pendingImages; + + QGstBin bin; + QGstElement queue; + QGstElement filter; + QGstElement videoConvert; + QGstElement encoder; + QGstElement muxer; + QGstElement sink; + QGstPad videoSrcPad; + + bool passImage = false; + bool cameraActive = false; + + QGObjectHandlerScopedConnection m_handoffConnection; + + QMap<int, QFuture<void>> m_pendingFutures; + int futureIDAllocator = 0; + + template <typename Functor> + void invokeDeferred(Functor &&fn) + { + QMetaObject::invokeMethod(this, std::forward<decltype(fn)>(fn), Qt::QueuedConnection); + }; +}; + +QT_END_NAMESPACE + +#endif // QGSTREAMERCAPTURECORNTROL_H diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture.cpp b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture.cpp new file mode 100644 index 000000000..7ecbb07d7 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture.cpp @@ -0,0 +1,326 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include <mediacapture/qgstreamermediacapture_p.h> +#include <mediacapture/qgstreamermediaencoder_p.h> +#include <mediacapture/qgstreamerimagecapture_p.h> +#include <mediacapture/qgstreamercamera_p.h> +#include <common/qgstpipeline_p.h> +#include <common/qgstreameraudioinput_p.h> +#include <common/qgstreameraudiooutput_p.h> +#include <common/qgstreamervideooutput_p.h> + +#include <QtCore/qloggingcategory.h> +#include <QtCore/private/quniquehandle_p.h> + +QT_BEGIN_NAMESPACE + +static void linkTeeToPad(QGstElement tee, QGstPad sink) +{ + if (tee.isNull() || sink.isNull()) + return; + + auto source = tee.getRequestPad("src_%u"); + source.link(sink); +} + +QMaybe<QPlatformMediaCaptureSession *> QGstreamerMediaCapture::create() +{ + auto videoOutput = QGstreamerVideoOutput::create(); + if (!videoOutput) + return videoOutput.error(); + + static const auto error = qGstErrorMessageIfElementsNotAvailable("tee", "capsfilter"); + if (error) + return *error; + + return new QGstreamerMediaCapture(videoOutput.value()); +} + +QGstreamerMediaCapture::QGstreamerMediaCapture(QGstreamerVideoOutput *videoOutput) + : capturePipeline(QGstPipeline::create("mediaCapturePipeline")), gstVideoOutput(videoOutput) +{ + gstVideoOutput->setParent(this); + gstVideoOutput->setIsPreview(); + gstVideoOutput->setPipeline(capturePipeline); + + // Use system clock to drive all elements in the pipeline. Otherwise, + // the clock is sourced from the elements (e.g. from an audio source). + // Since the elements are added and removed dynamically the clock would + // also change causing lost of synchronization in the pipeline. + + QGstClockHandle systemClock{ + gst_system_clock_obtain(), + }; + gst_pipeline_use_clock(capturePipeline.pipeline(), systemClock.get()); + + // This is the recording pipeline with only live sources, thus the pipeline + // will be always in the playing state. + capturePipeline.setState(GST_STATE_PLAYING); + capturePipeline.setInStoppedState(false); + + capturePipeline.dumpGraph("initial"); +} + +QGstreamerMediaCapture::~QGstreamerMediaCapture() +{ + setMediaRecorder(nullptr); + setImageCapture(nullptr); + setCamera(nullptr); + capturePipeline.setStateSync(GST_STATE_NULL); +} + +QPlatformCamera *QGstreamerMediaCapture::camera() +{ + return gstCamera; +} + +void QGstreamerMediaCapture::setCamera(QPlatformCamera *platformCamera) +{ + auto *camera = static_cast<QGstreamerCameraBase *>(platformCamera); + if (gstCamera == camera) + return; + + if (gstCamera) { + QObject::disconnect(gstCameraActiveConnection); + if (gstVideoTee) + setCameraActive(false); + } + + gstCamera = camera; + + if (gstCamera) { + gstCameraActiveConnection = QObject::connect(camera, &QPlatformCamera::activeChanged, this, + &QGstreamerMediaCapture::setCameraActive); + if (gstCamera->isActive()) + setCameraActive(true); + } + + emit cameraChanged(); +} + +void QGstreamerMediaCapture::setCameraActive(bool activate) +{ + capturePipeline.modifyPipelineWhileNotRunning([&] { + if (activate) { + QGstElement cameraElement = gstCamera->gstElement(); + gstVideoTee = QGstElement::createFromFactory("tee", "videotee"); + gstVideoTee.set("allow-not-linked", true); + + capturePipeline.add(gstVideoOutput->gstElement(), cameraElement, gstVideoTee); + + linkTeeToPad(gstVideoTee, encoderVideoSink); + linkTeeToPad(gstVideoTee, gstVideoOutput->gstElement().staticPad("sink")); + linkTeeToPad(gstVideoTee, imageCaptureSink); + + qLinkGstElements(cameraElement, gstVideoTee); + + capturePipeline.syncChildrenState(); + } else { + if (encoderVideoCapsFilter) + qUnlinkGstElements(gstVideoTee, encoderVideoCapsFilter); + if (m_imageCapture) + qUnlinkGstElements(gstVideoTee, m_imageCapture->gstElement()); + + auto camera = gstCamera->gstElement(); + + capturePipeline.stopAndRemoveElements(camera, gstVideoTee, + gstVideoOutput->gstElement()); + + gstVideoTee = {}; + gstCamera->setCaptureSession(nullptr); + } + }); + + capturePipeline.dumpGraph("camera"); +} + +QPlatformImageCapture *QGstreamerMediaCapture::imageCapture() +{ + return m_imageCapture; +} + +void QGstreamerMediaCapture::setImageCapture(QPlatformImageCapture *imageCapture) +{ + QGstreamerImageCapture *control = static_cast<QGstreamerImageCapture *>(imageCapture); + if (m_imageCapture == control) + return; + + capturePipeline.modifyPipelineWhileNotRunning([&] { + if (m_imageCapture) { + qUnlinkGstElements(gstVideoTee, m_imageCapture->gstElement()); + capturePipeline.stopAndRemoveElements(m_imageCapture->gstElement()); + imageCaptureSink = {}; + m_imageCapture->setCaptureSession(nullptr); + } + + m_imageCapture = control; + if (m_imageCapture) { + imageCaptureSink = m_imageCapture->gstElement().staticPad("sink"); + capturePipeline.add(m_imageCapture->gstElement()); + m_imageCapture->gstElement().syncStateWithParent(); + linkTeeToPad(gstVideoTee, imageCaptureSink); + m_imageCapture->setCaptureSession(this); + } + }); + + capturePipeline.dumpGraph("imageCapture"); + + emit imageCaptureChanged(); +} + +void QGstreamerMediaCapture::setMediaRecorder(QPlatformMediaRecorder *recorder) +{ + QGstreamerMediaEncoder *control = static_cast<QGstreamerMediaEncoder *>(recorder); + if (m_mediaEncoder == control) + return; + + if (m_mediaEncoder) + m_mediaEncoder->setCaptureSession(nullptr); + m_mediaEncoder = control; + if (m_mediaEncoder) + m_mediaEncoder->setCaptureSession(this); + + emit encoderChanged(); + capturePipeline.dumpGraph("encoder"); +} + +QPlatformMediaRecorder *QGstreamerMediaCapture::mediaRecorder() +{ + return m_mediaEncoder; +} + +void QGstreamerMediaCapture::linkEncoder(QGstPad audioSink, QGstPad videoSink) +{ + capturePipeline.modifyPipelineWhileNotRunning([&] { + if (!gstVideoTee.isNull() && !videoSink.isNull()) { + QGstCaps caps = gstVideoTee.sink().currentCaps(); + + encoderVideoCapsFilter = + QGstElement::createFromFactory("capsfilter", "encoderVideoCapsFilter"); + Q_ASSERT(encoderVideoCapsFilter); + encoderVideoCapsFilter.set("caps", caps); + + capturePipeline.add(encoderVideoCapsFilter); + + encoderVideoCapsFilter.src().link(videoSink); + linkTeeToPad(gstVideoTee, encoderVideoCapsFilter.sink()); + encoderVideoSink = encoderVideoCapsFilter.sink(); + } + + if (!gstAudioTee.isNull() && !audioSink.isNull()) { + QGstCaps caps = gstAudioTee.sink().currentCaps(); + + encoderAudioCapsFilter = + QGstElement::createFromFactory("capsfilter", "encoderAudioCapsFilter"); + Q_ASSERT(encoderAudioCapsFilter); + encoderAudioCapsFilter.set("caps", caps); + + capturePipeline.add(encoderAudioCapsFilter); + + encoderAudioCapsFilter.src().link(audioSink); + linkTeeToPad(gstAudioTee, encoderAudioCapsFilter.sink()); + encoderAudioSink = encoderAudioCapsFilter.sink(); + } + }); +} + +void QGstreamerMediaCapture::unlinkEncoder() +{ + capturePipeline.modifyPipelineWhileNotRunning([&] { + if (encoderVideoCapsFilter) { + qUnlinkGstElements(gstVideoTee, encoderVideoCapsFilter); + capturePipeline.stopAndRemoveElements(encoderVideoCapsFilter); + encoderVideoCapsFilter = {}; + } + + if (encoderAudioCapsFilter) { + qUnlinkGstElements(gstAudioTee, encoderAudioCapsFilter); + capturePipeline.stopAndRemoveElements(encoderAudioCapsFilter); + encoderAudioCapsFilter = {}; + } + + encoderAudioSink = {}; + encoderVideoSink = {}; + }); +} + +const QGstPipeline &QGstreamerMediaCapture::pipeline() const +{ + return capturePipeline; +} + +void QGstreamerMediaCapture::setAudioInput(QPlatformAudioInput *input) +{ + if (gstAudioInput == input) + return; + + capturePipeline.modifyPipelineWhileNotRunning([&] { + if (gstAudioInput) { + if (encoderAudioCapsFilter) + qUnlinkGstElements(gstAudioTee, encoderAudioCapsFilter); + + if (gstAudioOutput) { + qUnlinkGstElements(gstAudioTee, gstAudioOutput->gstElement()); + capturePipeline.stopAndRemoveElements(gstAudioOutput->gstElement()); + } + + capturePipeline.stopAndRemoveElements(gstAudioInput->gstElement(), gstAudioTee); + gstAudioTee = {}; + } + + gstAudioInput = static_cast<QGstreamerAudioInput *>(input); + if (gstAudioInput) { + Q_ASSERT(gstAudioTee.isNull()); + gstAudioTee = QGstElement::createFromFactory("tee", "audiotee"); + gstAudioTee.set("allow-not-linked", true); + capturePipeline.add(gstAudioInput->gstElement(), gstAudioTee); + qLinkGstElements(gstAudioInput->gstElement(), gstAudioTee); + + if (gstAudioOutput) { + capturePipeline.add(gstAudioOutput->gstElement()); + gstAudioOutput->gstElement().setState(GST_STATE_PLAYING); + linkTeeToPad(gstAudioTee, gstAudioOutput->gstElement().staticPad("sink")); + } + + capturePipeline.syncChildrenState(); + + linkTeeToPad(gstAudioTee, encoderAudioSink); + } + }); +} + +void QGstreamerMediaCapture::setVideoPreview(QVideoSink *sink) +{ + gstVideoOutput->setVideoSink(sink); +} + +void QGstreamerMediaCapture::setAudioOutput(QPlatformAudioOutput *output) +{ + if (gstAudioOutput == output) + return; + + capturePipeline.modifyPipelineWhileNotRunning([&] { + if (gstAudioOutput && gstAudioInput) { + // If audio input is set, the output is in the pipeline + qUnlinkGstElements(gstAudioTee, gstAudioOutput->gstElement()); + capturePipeline.stopAndRemoveElements(gstAudioOutput->gstElement()); + } + + gstAudioOutput = static_cast<QGstreamerAudioOutput *>(output); + if (gstAudioOutput && gstAudioInput) { + capturePipeline.add(gstAudioOutput->gstElement()); + capturePipeline.syncChildrenState(); + linkTeeToPad(gstAudioTee, gstAudioOutput->gstElement().staticPad("sink")); + } + }); +} + +QGstreamerVideoSink *QGstreamerMediaCapture::gstreamerVideoSink() const +{ + return gstVideoOutput ? gstVideoOutput->gstreamerVideoSink() : nullptr; +} + +QT_END_NAMESPACE + +#include "moc_qgstreamermediacapture_p.cpp" diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture_p.h b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture_p.h new file mode 100644 index 000000000..c44e31f0e --- /dev/null +++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediacapture_p.h @@ -0,0 +1,97 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTREAMERCAPTURESERVICE_H +#define QGSTREAMERCAPTURESERVICE_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <private/qplatformmediacapture_p.h> +#include <private/qplatformmediaintegration_p.h> + +#include <common/qgst_p.h> +#include <common/qgstpipeline_p.h> + +#include <qtimer.h> + +QT_BEGIN_NAMESPACE + +class QGstreamerCameraBase; +class QGstreamerImageCapture; +class QGstreamerMediaEncoder; +class QGstreamerAudioInput; +class QGstreamerAudioOutput; +class QGstreamerVideoOutput; +class QGstreamerVideoSink; + +class QGstreamerMediaCapture final : public QPlatformMediaCaptureSession +{ + Q_OBJECT + +public: + static QMaybe<QPlatformMediaCaptureSession *> create(); + virtual ~QGstreamerMediaCapture(); + + QPlatformCamera *camera() override; + void setCamera(QPlatformCamera *camera) override; + + QPlatformImageCapture *imageCapture() override; + void setImageCapture(QPlatformImageCapture *imageCapture) override; + + QPlatformMediaRecorder *mediaRecorder() override; + void setMediaRecorder(QPlatformMediaRecorder *recorder) override; + + void setAudioInput(QPlatformAudioInput *input) override; + QGstreamerAudioInput *audioInput() { return gstAudioInput; } + + void setVideoPreview(QVideoSink *sink) override; + void setAudioOutput(QPlatformAudioOutput *output) override; + + void linkEncoder(QGstPad audioSink, QGstPad videoSink); + void unlinkEncoder(); + + const QGstPipeline &pipeline() const; + + QGstreamerVideoSink *gstreamerVideoSink() const; + +private: + void setCameraActive(bool activate); + + explicit QGstreamerMediaCapture(QGstreamerVideoOutput *videoOutput); + + friend QGstreamerMediaEncoder; + // Gst elements + QGstPipeline capturePipeline; + + QGstreamerAudioInput *gstAudioInput = nullptr; + QGstreamerCameraBase *gstCamera = nullptr; + QMetaObject::Connection gstCameraActiveConnection; + + QGstElement gstAudioTee; + QGstElement gstVideoTee; + QGstElement encoderVideoCapsFilter; + QGstElement encoderAudioCapsFilter; + + QGstPad encoderAudioSink; + QGstPad encoderVideoSink; + QGstPad imageCaptureSink; + + QGstreamerAudioOutput *gstAudioOutput = nullptr; + QGstreamerVideoOutput *gstVideoOutput = nullptr; + + QGstreamerMediaEncoder *m_mediaEncoder = nullptr; + QGstreamerImageCapture *m_imageCapture = nullptr; +}; + +QT_END_NAMESPACE + +#endif // QGSTREAMERCAPTURESERVICE_H diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder.cpp b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder.cpp new file mode 100644 index 000000000..4ec10ca84 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder.cpp @@ -0,0 +1,419 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include <mediacapture/qgstreamermediaencoder_p.h> +#include <qgstreamerformatinfo_p.h> +#include <common/qgstpipeline_p.h> +#include <common/qgstreamermessage_p.h> +#include <common/qgst_debug_p.h> +#include <qgstreamerintegration_p.h> + +#include <QtMultimedia/private/qmediastoragelocation_p.h> +#include <QtMultimedia/private/qplatformcamera_p.h> +#include <QtMultimedia/qaudiodevice.h> + +#include <QtCore/qdebug.h> +#include <QtCore/qeventloop.h> +#include <QtCore/qstandardpaths.h> +#include <QtCore/qmimetype.h> +#include <QtCore/qloggingcategory.h> + +#include <gst/gsttagsetter.h> +#include <gst/gstversion.h> +#include <gst/video/video.h> +#include <gst/pbutils/encoding-profile.h> + +static Q_LOGGING_CATEGORY(qLcMediaEncoderGst, "qt.multimedia.encoder") + +QT_BEGIN_NAMESPACE + +QGstreamerMediaEncoder::QGstreamerMediaEncoder(QMediaRecorder *parent) + : QPlatformMediaRecorder(parent), + audioPauseControl(*this), + videoPauseControl(*this) +{ + signalDurationChangedTimer.setInterval(100); + signalDurationChangedTimer.callOnTimeout(&signalDurationChangedTimer, [this]() { + durationChanged(duration()); + }); +} + +QGstreamerMediaEncoder::~QGstreamerMediaEncoder() +{ + if (!capturePipeline.isNull()) { + finalize(); + capturePipeline.removeMessageFilter(this); + capturePipeline.setStateSync(GST_STATE_NULL); + } +} + +bool QGstreamerMediaEncoder::isLocationWritable(const QUrl &) const +{ + return true; +} + +void QGstreamerMediaEncoder::handleSessionError(QMediaRecorder::Error code, const QString &description) +{ + updateError(code, description); + stop(); +} + +bool QGstreamerMediaEncoder::processBusMessage(const QGstreamerMessage &msg) +{ + constexpr bool traceStateChange = false; + constexpr bool traceAllEvents = false; + + if constexpr (traceAllEvents) + qCDebug(qLcMediaEncoderGst) << "received event:" << msg; + + switch (msg.type()) { + case GST_MESSAGE_ELEMENT: { + QGstStructureView s = msg.structure(); + if (s.name() == "GstBinForwarded") + return processBusMessage(s.getMessage()); + + qCDebug(qLcMediaEncoderGst) + << "received element message from" << msg.source().name() << s.name(); + return false; + } + + case GST_MESSAGE_EOS: { + qCDebug(qLcMediaEncoderGst) << "received EOS from" << msg.source().name(); + finalize(); + return false; + } + + case GST_MESSAGE_ERROR: { + qCDebug(qLcMediaEncoderGst) + << "received error:" << msg.source().name() << QCompactGstMessageAdaptor(msg); + + QUniqueGErrorHandle err; + QGString debug; + gst_message_parse_error(msg.message(), &err, &debug); + updateError(QMediaRecorder::ResourceError, QString::fromUtf8(err.get()->message)); + if (!m_finalizing) + stop(); + finalize(); + return false; + } + + case GST_MESSAGE_STATE_CHANGED: { + if constexpr (traceStateChange) + qCDebug(qLcMediaEncoderGst) + << "received state change" << QCompactGstMessageAdaptor(msg); + + return false; + } + + default: + return false; + }; +} + +qint64 QGstreamerMediaEncoder::duration() const +{ + return std::max(audioPauseControl.duration, videoPauseControl.duration); +} + + +static GstEncodingContainerProfile *createContainerProfile(const QMediaEncoderSettings &settings) +{ + auto *formatInfo = QGstreamerIntegration::instance()->gstFormatsInfo(); + + auto caps = formatInfo->formatCaps(settings.fileFormat()); + + GstEncodingContainerProfile *profile = + (GstEncodingContainerProfile *)gst_encoding_container_profile_new( + "container_profile", (gchar *)"custom container profile", + const_cast<GstCaps *>(caps.caps()), + nullptr); // preset + return profile; +} + +static GstEncodingProfile *createVideoProfile(const QMediaEncoderSettings &settings) +{ + auto *formatInfo = QGstreamerIntegration::instance()->gstFormatsInfo(); + + QGstCaps caps = formatInfo->videoCaps(settings.mediaFormat()); + if (caps.isNull()) + return nullptr; + + QSize videoResolution = settings.videoResolution(); + if (videoResolution.isValid()) + caps.setResolution(videoResolution); + + GstEncodingVideoProfile *profile = + gst_encoding_video_profile_new(const_cast<GstCaps *>(caps.caps()), nullptr, + nullptr, // restriction + 0); // presence + + gst_encoding_video_profile_set_pass(profile, 0); + gst_encoding_video_profile_set_variableframerate(profile, TRUE); + + return (GstEncodingProfile *)profile; +} + +static GstEncodingProfile *createAudioProfile(const QMediaEncoderSettings &settings) +{ + auto *formatInfo = QGstreamerIntegration::instance()->gstFormatsInfo(); + + auto caps = formatInfo->audioCaps(settings.mediaFormat()); + if (caps.isNull()) + return nullptr; + + GstEncodingProfile *profile = + (GstEncodingProfile *)gst_encoding_audio_profile_new(const_cast<GstCaps *>(caps.caps()), + nullptr, // preset + nullptr, // restriction + 0); // presence + + return profile; +} + + +static GstEncodingContainerProfile *createEncodingProfile(const QMediaEncoderSettings &settings) +{ + auto *containerProfile = createContainerProfile(settings); + if (!containerProfile) { + qWarning() << "QGstreamerMediaEncoder: failed to create container profile!"; + return nullptr; + } + + GstEncodingProfile *audioProfile = createAudioProfile(settings); + GstEncodingProfile *videoProfile = nullptr; + if (settings.videoCodec() != QMediaFormat::VideoCodec::Unspecified) + videoProfile = createVideoProfile(settings); +// qDebug() << "audio profile" << (audioProfile ? gst_caps_to_string(gst_encoding_profile_get_format(audioProfile)) : "(null)"); +// qDebug() << "video profile" << (videoProfile ? gst_caps_to_string(gst_encoding_profile_get_format(videoProfile)) : "(null)"); +// qDebug() << "conta profile" << gst_caps_to_string(gst_encoding_profile_get_format((GstEncodingProfile *)containerProfile)); + + if (videoProfile) { + if (!gst_encoding_container_profile_add_profile(containerProfile, videoProfile)) { + qWarning() << "QGstreamerMediaEncoder: failed to add video profile!"; + gst_encoding_profile_unref(videoProfile); + } + } + if (audioProfile) { + if (!gst_encoding_container_profile_add_profile(containerProfile, audioProfile)) { + qWarning() << "QGstreamerMediaEncoder: failed to add audio profile!"; + gst_encoding_profile_unref(audioProfile); + } + } + + return containerProfile; +} + +void QGstreamerMediaEncoder::PauseControl::reset() +{ + pauseOffsetPts = 0; + pauseStartPts.reset(); + duration = 0; + firstBufferPts.reset(); +} + +void QGstreamerMediaEncoder::PauseControl::installOn(QGstPad pad) +{ + pad.addProbe<&QGstreamerMediaEncoder::PauseControl::processBuffer>(this, GST_PAD_PROBE_TYPE_BUFFER); +} + +GstPadProbeReturn QGstreamerMediaEncoder::PauseControl::processBuffer(QGstPad, GstPadProbeInfo *info) +{ + auto buffer = GST_PAD_PROBE_INFO_BUFFER(info); + if (!buffer) + return GST_PAD_PROBE_OK; + + buffer = gst_buffer_make_writable(buffer); + + if (!buffer) + return GST_PAD_PROBE_OK; + + GST_PAD_PROBE_INFO_DATA(info) = buffer; + + if (!GST_BUFFER_PTS_IS_VALID(buffer)) + return GST_PAD_PROBE_OK; + + if (!firstBufferPts) + firstBufferPts = GST_BUFFER_PTS(buffer); + + if (encoder.state() == QMediaRecorder::PausedState) { + if (!pauseStartPts) + pauseStartPts = GST_BUFFER_PTS(buffer); + + return GST_PAD_PROBE_DROP; + } + + if (pauseStartPts) { + pauseOffsetPts += GST_BUFFER_PTS(buffer) - *pauseStartPts; + pauseStartPts.reset(); + } + GST_BUFFER_PTS(buffer) -= pauseOffsetPts; + + duration = (GST_BUFFER_PTS(buffer) - *firstBufferPts) / GST_MSECOND; + + return GST_PAD_PROBE_OK; +} + +void QGstreamerMediaEncoder::record(QMediaEncoderSettings &settings) +{ + if (!m_session ||m_finalizing || state() != QMediaRecorder::StoppedState) + return; + + const auto hasVideo = m_session->camera() && m_session->camera()->isActive(); + const auto hasAudio = m_session->audioInput() != nullptr; + + if (!hasVideo && !hasAudio) { + updateError(QMediaRecorder::ResourceError, QMediaRecorder::tr("No camera or audio input")); + return; + } + + const auto audioOnly = settings.videoCodec() == QMediaFormat::VideoCodec::Unspecified; + + auto primaryLocation = audioOnly ? QStandardPaths::MusicLocation : QStandardPaths::MoviesLocation; + auto container = settings.mimeType().preferredSuffix(); + auto location = QMediaStorageLocation::generateFileName(outputLocation().toLocalFile(), primaryLocation, container); + + QUrl actualSink = QUrl::fromLocalFile(QDir::currentPath()).resolved(location); + qCDebug(qLcMediaEncoderGst) << "recording new video to" << actualSink; + + Q_ASSERT(!actualSink.isEmpty()); + + gstEncoder = QGstBin::createFromFactory("encodebin", "encodebin"); + Q_ASSERT(gstEncoder); + auto *encodingProfile = createEncodingProfile(settings); + g_object_set (gstEncoder.object(), "profile", encodingProfile, nullptr); + gst_encoding_profile_unref(encodingProfile); + + gstFileSink = QGstElement::createFromFactory("filesink", "filesink"); + Q_ASSERT(gstFileSink); + gstFileSink.set("location", QFile::encodeName(actualSink.toLocalFile()).constData()); + gstFileSink.set("async", false); + + QGstPad audioSink = {}; + QGstPad videoSink = {}; + + audioPauseControl.reset(); + videoPauseControl.reset(); + + if (hasAudio) { + audioSink = gstEncoder.getRequestPad("audio_%u"); + if (audioSink.isNull()) + qWarning() << "Unsupported audio codec"; + else + audioPauseControl.installOn(audioSink); + } + + if (hasVideo) { + videoSink = gstEncoder.getRequestPad("video_%u"); + if (videoSink.isNull()) + qWarning() << "Unsupported video codec"; + else + videoPauseControl.installOn(videoSink); + } + + capturePipeline.modifyPipelineWhileNotRunning([&] { + capturePipeline.add(gstEncoder, gstFileSink); + qLinkGstElements(gstEncoder, gstFileSink); + applyMetaDataToTagSetter(m_metaData, gstEncoder); + + m_session->linkEncoder(audioSink, videoSink); + + gstEncoder.syncStateWithParent(); + gstFileSink.syncStateWithParent(); + }); + + signalDurationChangedTimer.start(); + capturePipeline.dumpGraph("recording"); + + durationChanged(0); + stateChanged(QMediaRecorder::RecordingState); + actualLocationChanged(QUrl::fromLocalFile(location)); +} + +void QGstreamerMediaEncoder::pause() +{ + if (!m_session || m_finalizing || state() != QMediaRecorder::RecordingState) + return; + signalDurationChangedTimer.stop(); + durationChanged(duration()); + capturePipeline.dumpGraph("before-pause"); + stateChanged(QMediaRecorder::PausedState); +} + +void QGstreamerMediaEncoder::resume() +{ + capturePipeline.dumpGraph("before-resume"); + if (!m_session || m_finalizing || state() != QMediaRecorder::PausedState) + return; + signalDurationChangedTimer.start(); + stateChanged(QMediaRecorder::RecordingState); +} + +void QGstreamerMediaEncoder::stop() +{ + if (!m_session || m_finalizing || state() == QMediaRecorder::StoppedState) + return; + durationChanged(duration()); + qCDebug(qLcMediaEncoderGst) << "stop"; + m_finalizing = true; + m_session->unlinkEncoder(); + signalDurationChangedTimer.stop(); + + qCDebug(qLcMediaEncoderGst) << ">>>>>>>>>>>>> sending EOS"; + gstEncoder.sendEos(); +} + +void QGstreamerMediaEncoder::finalize() +{ + if (!m_session || gstEncoder.isNull()) + return; + + qCDebug(qLcMediaEncoderGst) << "finalize"; + + capturePipeline.stopAndRemoveElements(gstEncoder, gstFileSink); + gstFileSink = {}; + gstEncoder = {}; + m_finalizing = false; + stateChanged(QMediaRecorder::StoppedState); +} + +void QGstreamerMediaEncoder::setMetaData(const QMediaMetaData &metaData) +{ + if (!m_session) + return; + m_metaData = metaData; +} + +QMediaMetaData QGstreamerMediaEncoder::metaData() const +{ + return m_metaData; +} + +void QGstreamerMediaEncoder::setCaptureSession(QPlatformMediaCaptureSession *session) +{ + QGstreamerMediaCapture *captureSession = static_cast<QGstreamerMediaCapture *>(session); + if (m_session == captureSession) + return; + + if (m_session) { + stop(); + if (m_finalizing) { + QEventLoop loop; + QObject::connect(mediaRecorder(), &QMediaRecorder::recorderStateChanged, &loop, + &QEventLoop::quit); + loop.exec(); + } + + capturePipeline.removeMessageFilter(this); + capturePipeline = {}; + } + + m_session = captureSession; + if (!m_session) + return; + + capturePipeline = captureSession->capturePipeline; + capturePipeline.set("message-forward", true); + capturePipeline.installMessageFilter(this); +} + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder_p.h b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder_p.h new file mode 100644 index 000000000..56e8c193b --- /dev/null +++ b/src/plugins/multimedia/gstreamer/mediacapture/qgstreamermediaencoder_p.h @@ -0,0 +1,91 @@ +// Copyright (C) 2016 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + + +#ifndef QGSTREAMERENCODERCONTROL_H +#define QGSTREAMERENCODERCONTROL_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <mediacapture/qgstreamermediacapture_p.h> +#include <common/qgstreamermetadata_p.h> + +#include <QtMultimedia/private/qplatformmediarecorder_p.h> +#include <QtCore/qurl.h> +#include <QtCore/qdir.h> +#include <QtCore/qelapsedtimer.h> +#include <QtCore/qtimer.h> + +QT_BEGIN_NAMESPACE + +class QMediaMetaData; +class QGstreamerMessage; + +class QGstreamerMediaEncoder : public QPlatformMediaRecorder, QGstreamerBusMessageFilter +{ +public: + explicit QGstreamerMediaEncoder(QMediaRecorder *parent); + virtual ~QGstreamerMediaEncoder(); + + bool isLocationWritable(const QUrl &sink) const override; + + qint64 duration() const override; + + void record(QMediaEncoderSettings &settings) override; + void pause() override; + void resume() override; + void stop() override; + + void setMetaData(const QMediaMetaData &) override; + QMediaMetaData metaData() const override; + + void setCaptureSession(QPlatformMediaCaptureSession *session); + + QGstElement getEncoder() { return gstEncoder; } +private: + bool processBusMessage(const QGstreamerMessage& message) override; + +private: + struct PauseControl { + explicit PauseControl(QPlatformMediaRecorder &encoder) : encoder(encoder) { } + + GstPadProbeReturn processBuffer(QGstPad pad, GstPadProbeInfo *info); + void installOn(QGstPad pad); + void reset(); + + QPlatformMediaRecorder &encoder; + GstClockTime pauseOffsetPts = 0; + std::optional<GstClockTime> pauseStartPts; + std::optional<GstClockTime> firstBufferPts; + qint64 duration = 0; + }; + + PauseControl audioPauseControl; + PauseControl videoPauseControl; + + void handleSessionError(QMediaRecorder::Error code, const QString &description); + void finalize(); + + QGstreamerMediaCapture *m_session = nullptr; + QMediaMetaData m_metaData; + QTimer signalDurationChangedTimer; + + QGstPipeline capturePipeline; + QGstBin gstEncoder; + QGstElement gstFileSink; + + bool m_finalizing = false; +}; + +QT_END_NAMESPACE + +#endif // QGSTREAMERENCODERCONTROL_H diff --git a/src/plugins/multimedia/gstreamer/qgstreamerformatinfo.cpp b/src/plugins/multimedia/gstreamer/qgstreamerformatinfo.cpp new file mode 100644 index 000000000..a657fc52f --- /dev/null +++ b/src/plugins/multimedia/gstreamer/qgstreamerformatinfo.cpp @@ -0,0 +1,445 @@ +// Copyright (C) 2021 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include <common/qglist_helper_p.h> +#include "qgstreamerformatinfo_p.h" + +#include <gst/gst.h> + +QT_BEGIN_NAMESPACE + +QMediaFormat::AudioCodec QGstreamerFormatInfo::audioCodecForCaps(QGstStructureView structure) +{ + using namespace std::string_view_literals; + const char *name = structure.name().data(); + + if (!name || (strncmp(name, "audio/", 6) != 0)) + return QMediaFormat::AudioCodec::Unspecified; + name += 6; + if (name == "mpeg"sv) { + auto version = structure["mpegversion"].toInt(); + if (version == 1) { + auto layer = structure["layer"]; + if (!layer.isNull()) + return QMediaFormat::AudioCodec::MP3; + } + if (version == 4) + return QMediaFormat::AudioCodec::AAC; + return QMediaFormat::AudioCodec::Unspecified; + } + if (name == "x-ac3"sv) + return QMediaFormat::AudioCodec::AC3; + + if (name == "x-eac3"sv) + return QMediaFormat::AudioCodec::EAC3; + + if (name == "x-flac"sv) + return QMediaFormat::AudioCodec::FLAC; + + if (name == "x-alac"sv) + return QMediaFormat::AudioCodec::ALAC; + + if (name == "x-true-hd"sv) + return QMediaFormat::AudioCodec::DolbyTrueHD; + + if (name == "x-vorbis"sv) + return QMediaFormat::AudioCodec::Vorbis; + + if (name == "x-opus"sv) + return QMediaFormat::AudioCodec::Opus; + + if (name == "x-wav"sv) + return QMediaFormat::AudioCodec::Wave; + + if (name == "x-wma"sv) + return QMediaFormat::AudioCodec::WMA; + + return QMediaFormat::AudioCodec::Unspecified; +} + +QMediaFormat::VideoCodec QGstreamerFormatInfo::videoCodecForCaps(QGstStructureView structure) +{ + using namespace std::string_view_literals; + const char *name = structure.name().data(); + + if (!name || (strncmp(name, "video/", 6) != 0)) + return QMediaFormat::VideoCodec::Unspecified; + name += 6; + + if (name == "mpeg"sv) { + auto version = structure["mpegversion"].toInt(); + if (version == 1) + return QMediaFormat::VideoCodec::MPEG1; + if (version == 2) + return QMediaFormat::VideoCodec::MPEG2; + if (version == 4) + return QMediaFormat::VideoCodec::MPEG4; + return QMediaFormat::VideoCodec::Unspecified; + } + if (name == "x-h264"sv) + return QMediaFormat::VideoCodec::H264; + +#if GST_CHECK_VERSION(1, 17, 0) // x265enc seems to be broken on 1.16 at least + if (name == "x-h265"sv) + return QMediaFormat::VideoCodec::H265; +#endif + + if (name == "x-vp8"sv) + return QMediaFormat::VideoCodec::VP8; + + if (name == "x-vp9"sv) + return QMediaFormat::VideoCodec::VP9; + + if (name == "x-av1"sv) + return QMediaFormat::VideoCodec::AV1; + + if (name == "x-theora"sv) + return QMediaFormat::VideoCodec::Theora; + + if (name == "x-jpeg"sv) + return QMediaFormat::VideoCodec::MotionJPEG; + + if (name == "x-wmv"sv) + return QMediaFormat::VideoCodec::WMV; + + return QMediaFormat::VideoCodec::Unspecified; +} + +QMediaFormat::FileFormat QGstreamerFormatInfo::fileFormatForCaps(QGstStructureView structure) +{ + using namespace std::string_view_literals; + const char *name = structure.name().data(); + + if (name == "video/x-ms-asf"sv) + return QMediaFormat::FileFormat::WMV; + + if (name == "video/x-msvideo"sv) + return QMediaFormat::FileFormat::AVI; + + if (name == "video/x-matroska"sv) + return QMediaFormat::FileFormat::Matroska; + + if (name == "video/quicktime"sv) { + const char *variant = structure["variant"].toString(); + if (!variant) + return QMediaFormat::FileFormat::QuickTime; + if (variant == "iso"sv) + return QMediaFormat::FileFormat::MPEG4; + } + if (name == "video/ogg"sv) + return QMediaFormat::FileFormat::Ogg; + + if (name == "video/webm"sv) + return QMediaFormat::FileFormat::WebM; + + if (name == "audio/x-m4a"sv) + return QMediaFormat::FileFormat::Mpeg4Audio; + + if (name == "audio/x-wav"sv) + return QMediaFormat::FileFormat::Wave; + + if (name == "audio/mpeg"sv) { + auto mpegversion = structure["mpegversion"].toInt(); + if (mpegversion == 1) { + auto layer = structure["layer"]; + if (!layer.isNull()) + return QMediaFormat::FileFormat::MP3; + } + } + + return QMediaFormat::UnspecifiedFormat; +} + + +QImageCapture::FileFormat QGstreamerFormatInfo::imageFormatForCaps(QGstStructureView structure) +{ + using namespace std::string_view_literals; + const char *name = structure.name().data(); + + if (name == "image/jpeg"sv) + return QImageCapture::JPEG; + + if (name == "image/png"sv) + return QImageCapture::PNG; + + if (name == "image/webp"sv) + return QImageCapture::WebP; + + if (name == "image/tiff"sv) + return QImageCapture::Tiff; + + return QImageCapture::UnspecifiedFormat; +} + +static QPair<QList<QMediaFormat::AudioCodec>, QList<QMediaFormat::VideoCodec>> getCodecsList(bool decode) +{ + QList<QMediaFormat::AudioCodec> audio; + QList<QMediaFormat::VideoCodec> video; + + GstPadDirection padDirection = decode ? GST_PAD_SINK : GST_PAD_SRC; + + GList *elementList = gst_element_factory_list_get_elements(decode ? GST_ELEMENT_FACTORY_TYPE_DECODER : GST_ELEMENT_FACTORY_TYPE_ENCODER, + GST_RANK_MARGINAL); + + for (GstElementFactory *factory : + QGstUtils::GListRangeAdaptor<GstElementFactory *>(elementList)) { + for (GstStaticPadTemplate *padTemplate : + QGstUtils::GListRangeAdaptor<GstStaticPadTemplate *>( + gst_element_factory_get_static_pad_templates(factory))) { + if (padTemplate->direction == padDirection) { + auto caps = QGstCaps(gst_static_caps_get(&padTemplate->static_caps), QGstCaps::HasRef); + + for (int i = 0; i < caps.size(); i++) { + QGstStructureView structure = caps.at(i); + auto a = QGstreamerFormatInfo::audioCodecForCaps(structure); + if (a != QMediaFormat::AudioCodec::Unspecified && !audio.contains(a)) + audio.append(a); + auto v = QGstreamerFormatInfo::videoCodecForCaps(structure); + if (v != QMediaFormat::VideoCodec::Unspecified && !video.contains(v)) + video.append(v); + } + } + } + } + gst_plugin_feature_list_free(elementList); + return {audio, video}; +} + + +QList<QGstreamerFormatInfo::CodecMap> QGstreamerFormatInfo::getMuxerList(bool demuxer, + QList<QMediaFormat::AudioCodec> supportedAudioCodecs, + QList<QMediaFormat::VideoCodec> supportedVideoCodecs) +{ + QList<QGstreamerFormatInfo::CodecMap> muxers; + + GstPadDirection padDirection = demuxer ? GST_PAD_SINK : GST_PAD_SRC; + + GList *elementList = gst_element_factory_list_get_elements( + demuxer ? GST_ELEMENT_FACTORY_TYPE_DEMUXER : GST_ELEMENT_FACTORY_TYPE_MUXER, + GST_RANK_MARGINAL); + + for (GstElementFactory *factory : + QGstUtils::GListRangeAdaptor<GstElementFactory *>(elementList)) { + QList<QMediaFormat::FileFormat> fileFormats; + + for (GstStaticPadTemplate *padTemplate : + QGstUtils::GListRangeAdaptor<GstStaticPadTemplate *>( + gst_element_factory_get_static_pad_templates(factory))) { + + if (padTemplate->direction == padDirection) { + auto caps = QGstCaps(gst_static_caps_get(&padTemplate->static_caps), QGstCaps::HasRef); + + for (int i = 0; i < caps.size(); i++) { + QGstStructureView structure = caps.at(i); + auto fmt = fileFormatForCaps(structure); + if (fmt != QMediaFormat::UnspecifiedFormat) + fileFormats.append(fmt); + } + } + } + if (fileFormats.isEmpty()) + continue; + + QList<QMediaFormat::AudioCodec> audioCodecs; + QList<QMediaFormat::VideoCodec> videoCodecs; + + for (GstStaticPadTemplate *padTemplate : + QGstUtils::GListRangeAdaptor<GstStaticPadTemplate *>( + gst_element_factory_get_static_pad_templates(factory))) { + + // check the other side for supported inputs/outputs + if (padTemplate->direction != padDirection) { + auto caps = QGstCaps(gst_static_caps_get(&padTemplate->static_caps), QGstCaps::HasRef); + + bool acceptsRawAudio = false; + for (int i = 0; i < caps.size(); i++) { + QGstStructureView structure = caps.at(i); + if (structure.name() == "audio/x-raw") + acceptsRawAudio = true; + auto audio = audioCodecForCaps(structure); + if (audio != QMediaFormat::AudioCodec::Unspecified && supportedAudioCodecs.contains(audio)) + audioCodecs.append(audio); + auto video = videoCodecForCaps(structure); + if (video != QMediaFormat::VideoCodec::Unspecified && supportedVideoCodecs.contains(video)) + videoCodecs.append(video); + } + if (acceptsRawAudio && fileFormats.size() == 1) { + switch (fileFormats.at(0)) { + case QMediaFormat::Mpeg4Audio: + default: + break; + case QMediaFormat::MP3: + audioCodecs.append(QMediaFormat::AudioCodec::MP3); + break; + case QMediaFormat::FLAC: + audioCodecs.append(QMediaFormat::AudioCodec::FLAC); + break; + case QMediaFormat::Wave: + audioCodecs.append(QMediaFormat::AudioCodec::Wave); + break; + } + } + } + } + if (!audioCodecs.isEmpty() || !videoCodecs.isEmpty()) { + for (auto f : std::as_const(fileFormats)) { + muxers.append({f, audioCodecs, videoCodecs}); + if (f == QMediaFormat::MPEG4 && !fileFormats.contains(QMediaFormat::Mpeg4Audio)) { + muxers.append({QMediaFormat::Mpeg4Audio, audioCodecs, {}}); + if (audioCodecs.contains(QMediaFormat::AudioCodec::AAC)) + muxers.append({QMediaFormat::AAC, { QMediaFormat::AudioCodec::AAC }, {}}); + } else if (f == QMediaFormat::WMV && !fileFormats.contains(QMediaFormat::WMA)) { + muxers.append({QMediaFormat::WMA, audioCodecs, {}}); + } + } + } + } + gst_plugin_feature_list_free(elementList); + return muxers; +} + +static QList<QImageCapture::FileFormat> getImageFormatList() +{ + QSet<QImageCapture::FileFormat> formats; + + GList *elementList = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_ENCODER, + GST_RANK_MARGINAL); + + for (GstElementFactory *factory : + QGstUtils::GListRangeAdaptor<GstElementFactory *>(elementList)) { + + for (GstStaticPadTemplate *padTemplate : + QGstUtils::GListRangeAdaptor<GstStaticPadTemplate *>( + gst_element_factory_get_static_pad_templates(factory))) { + if (padTemplate->direction == GST_PAD_SRC) { + QGstCaps caps = QGstCaps(gst_static_caps_get(&padTemplate->static_caps), QGstCaps::HasRef); + + for (int i = 0; i < caps.size(); i++) { + QGstStructureView structure = caps.at(i); + auto f = QGstreamerFormatInfo::imageFormatForCaps(structure); + if (f != QImageCapture::UnspecifiedFormat) { +// qDebug() << structure.toString() << f; + formats.insert(f); + } + } + } + } + } + gst_plugin_feature_list_free(elementList); + return formats.values(); +} + +#if 0 +static void dumpAudioCodecs(const QList<QMediaFormat::AudioCodec> &codecList) +{ + qDebug() << "Audio codecs:"; + for (const auto &c : codecList) + qDebug() << " " << QMediaFormat::audioCodecName(c); +} + +static void dumpVideoCodecs(const QList<QMediaFormat::VideoCodec> &codecList) +{ + qDebug() << "Video codecs:"; + for (const auto &c : codecList) + qDebug() << " " << QMediaFormat::videoCodecName(c); +} + +static void dumpMuxers(const QList<QPlatformMediaFormatInfo::CodecMap> &muxerList) +{ + for (const auto &m : muxerList) { + qDebug() << " " << QMediaFormat::fileFormatName(m.format); + qDebug() << " Audio"; + for (const auto &a : m.audio) + qDebug() << " " << QMediaFormat::audioCodecName(a); + qDebug() << " Video"; + for (const auto &v : m.video) + qDebug() << " " << QMediaFormat::videoCodecName(v); + } + +} +#endif + +QGstreamerFormatInfo::QGstreamerFormatInfo() +{ + auto codecs = getCodecsList(/*decode = */ true); + decoders = getMuxerList(true, codecs.first, codecs.second); + + codecs = getCodecsList(/*decode = */ false); + encoders = getMuxerList(/* demuxer = */false, codecs.first, codecs.second); +// dumpAudioCodecs(codecs.first); +// dumpVideoCodecs(codecs.second); +// dumpMuxers(encoders); + + imageFormats = getImageFormatList(); +} + +QGstreamerFormatInfo::~QGstreamerFormatInfo() = default; + +QGstCaps QGstreamerFormatInfo::formatCaps(const QMediaFormat &f) const +{ + auto format = f.fileFormat(); + Q_ASSERT(format != QMediaFormat::UnspecifiedFormat); + + const char *capsForFormat[QMediaFormat::LastFileFormat + 1] = { + "video/x-ms-asf", // WMV + "video/x-msvideo", // AVI + "video/x-matroska", // Matroska + "video/quicktime, variant=(string)iso", // MPEG4 + "video/ogg", // Ogg + "video/quicktime", // QuickTime + "video/webm", // WebM + "video/quicktime, variant=(string)iso", // Mpeg4Audio is the same is mp4... + "video/quicktime, variant=(string)iso", // AAC is also an MP4 container + "video/x-ms-asf", // WMA, same as WMV + "audio/mpeg, mpegversion=(int)1, layer=(int)3", // MP3 + "audio/x-flac", // FLAC + "audio/x-wav" // Wave + }; + return QGstCaps(gst_caps_from_string(capsForFormat[format]), QGstCaps::HasRef); +} + +QGstCaps QGstreamerFormatInfo::audioCaps(const QMediaFormat &f) const +{ + auto codec = f.audioCodec(); + if (codec == QMediaFormat::AudioCodec::Unspecified) + return {}; + + const char *capsForCodec[(int)QMediaFormat::AudioCodec::LastAudioCodec + 1] = { + "audio/mpeg, mpegversion=(int)1, layer=(int)3", // MP3 + "audio/mpeg, mpegversion=(int)4", // AAC + "audio/x-ac3", // AC3 + "audio/x-eac3", // EAC3 + "audio/x-flac", // FLAC + "audio/x-true-hd", // DolbyTrueHD + "audio/x-opus", // Opus + "audio/x-vorbis", // Vorbis + "audio/x-raw", // WAVE + "audio/x-wma", // WMA + "audio/x-alac", // ALAC + }; + return QGstCaps(gst_caps_from_string(capsForCodec[(int)codec]), QGstCaps::HasRef); +} + +QGstCaps QGstreamerFormatInfo::videoCaps(const QMediaFormat &f) const +{ + auto codec = f.videoCodec(); + if (codec == QMediaFormat::VideoCodec::Unspecified) + return {}; + + const char *capsForCodec[(int)QMediaFormat::VideoCodec::LastVideoCodec + 1] = { + "video/mpeg, mpegversion=(int)1", // MPEG1, + "video/mpeg, mpegversion=(int)2", // MPEG2, + "video/mpeg, mpegversion=(int)4", // MPEG4, + "video/x-h264", // H264, + "video/x-h265", // H265, + "video/x-vp8", // VP8, + "video/x-vp9", // VP9, + "video/x-av1", // AV1, + "video/x-theora", // Theora, + "audio/x-wmv", // WMV + "video/x-jpeg", // MotionJPEG, + }; + return QGstCaps(gst_caps_from_string(capsForCodec[(int)codec]), QGstCaps::HasRef); +} + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/qgstreamerformatinfo_p.h b/src/plugins/multimedia/gstreamer/qgstreamerformatinfo_p.h new file mode 100644 index 000000000..bba10edb9 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/qgstreamerformatinfo_p.h @@ -0,0 +1,44 @@ +// Copyright (C) 2021 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTREAMERFORMATINFO_H +#define QGSTREAMERFORMATINFO_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <private/qplatformmediaformatinfo_p.h> +#include <qlist.h> +#include <common/qgst_p.h> + +QT_BEGIN_NAMESPACE + +class QGstreamerFormatInfo : public QPlatformMediaFormatInfo +{ +public: + QGstreamerFormatInfo(); + ~QGstreamerFormatInfo(); + + QGstCaps formatCaps(const QMediaFormat &f) const; + QGstCaps audioCaps(const QMediaFormat &f) const; + QGstCaps videoCaps(const QMediaFormat &f) const; + + static QMediaFormat::AudioCodec audioCodecForCaps(QGstStructureView structure); + static QMediaFormat::VideoCodec videoCodecForCaps(QGstStructureView structure); + static QMediaFormat::FileFormat fileFormatForCaps(QGstStructureView structure); + static QImageCapture::FileFormat imageFormatForCaps(QGstStructureView structure); + + QList<CodecMap> getMuxerList(bool demuxer, QList<QMediaFormat::AudioCodec> audioCodecs, QList<QMediaFormat::VideoCodec> videoCodecs); +}; + +QT_END_NAMESPACE + +#endif diff --git a/src/plugins/multimedia/gstreamer/qgstreamerintegration.cpp b/src/plugins/multimedia/gstreamer/qgstreamerintegration.cpp new file mode 100644 index 000000000..87c514f2e --- /dev/null +++ b/src/plugins/multimedia/gstreamer/qgstreamerintegration.cpp @@ -0,0 +1,242 @@ +// Copyright (C) 2022 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include <qgstreamerintegration_p.h> +#include <qgstreamerformatinfo_p.h> +#include <qgstreamervideodevices_p.h> +#include <audio/qgstreameraudiodevice_p.h> +#include <audio/qgstreameraudiodecoder_p.h> +#include <common/qgstreameraudioinput_p.h> +#include <common/qgstreameraudiooutput_p.h> +#include <common/qgstreamermediaplayer_p.h> +#include <common/qgstreamervideosink_p.h> +#include <mediacapture/qgstreamercamera_p.h> +#include <mediacapture/qgstreamerimagecapture_p.h> +#include <mediacapture/qgstreamermediacapture_p.h> +#include <mediacapture/qgstreamermediaencoder_p.h> + +#include <QtCore/qloggingcategory.h> +#include <QtMultimedia/private/qmediaplayer_p.h> +#include <QtMultimedia/private/qmediacapturesession_p.h> +#include <QtMultimedia/private/qcameradevice_p.h> + +QT_BEGIN_NAMESPACE + +static thread_local bool inCustomCameraConstruction = false; +static thread_local QGstElement pendingCameraElement{}; + +QGStreamerPlatformSpecificInterfaceImplementation:: + ~QGStreamerPlatformSpecificInterfaceImplementation() = default; + +QAudioDevice QGStreamerPlatformSpecificInterfaceImplementation::makeCustomGStreamerAudioInput( + const QByteArray &gstreamerPipeline) +{ + return qMakeCustomGStreamerAudioInput(gstreamerPipeline); +} + +QAudioDevice QGStreamerPlatformSpecificInterfaceImplementation::makeCustomGStreamerAudioOutput( + const QByteArray &gstreamerPipeline) +{ + return qMakeCustomGStreamerAudioOutput(gstreamerPipeline); +} + +QCamera *QGStreamerPlatformSpecificInterfaceImplementation::makeCustomGStreamerCamera( + const QByteArray &gstreamerPipeline, QObject *parent) +{ + QCameraDevicePrivate *info = new QCameraDevicePrivate; + info->id = gstreamerPipeline; + QCameraDevice device = info->create(); + + inCustomCameraConstruction = true; + auto guard = qScopeGuard([] { + inCustomCameraConstruction = false; + }); + + return new QCamera(device, parent); +} + +QCamera * +QGStreamerPlatformSpecificInterfaceImplementation::makeCustomGStreamerCamera(GstElement *element, + QObject *parent) +{ + QCameraDevicePrivate *info = new QCameraDevicePrivate; + info->id = "Custom Camera from GstElement"; + QCameraDevice device = info->create(); + + pendingCameraElement = QGstElement{ + element, + QGstElement::NeedsRef, + }; + + inCustomCameraConstruction = true; + auto guard = qScopeGuard([] { + inCustomCameraConstruction = false; + Q_ASSERT(!pendingCameraElement); + }); + + return new QCamera(device, parent); +} + +GstPipeline *QGStreamerPlatformSpecificInterfaceImplementation::gstPipeline(QMediaPlayer *player) +{ + auto *priv = reinterpret_cast<QMediaPlayerPrivate *>(QMediaPlayerPrivate::get(player)); + if (!priv) + return nullptr; + + QGstreamerMediaPlayer *gstreamerPlayer = dynamic_cast<QGstreamerMediaPlayer *>(priv->control); + return gstreamerPlayer ? gstreamerPlayer->pipeline().pipeline() : nullptr; +} + +GstPipeline * +QGStreamerPlatformSpecificInterfaceImplementation::gstPipeline(QMediaCaptureSession *session) +{ + auto *priv = QMediaCaptureSessionPrivate::get(session); + if (!priv) + return nullptr; + + QGstreamerMediaCapture *gstreamerCapture = + dynamic_cast<QGstreamerMediaCapture *>(priv->captureSession.get()); + return gstreamerCapture ? gstreamerCapture->pipeline().pipeline() : nullptr; +} + +Q_LOGGING_CATEGORY(lcGstreamer, "qt.multimedia.gstreamer") + +namespace { + +void rankDownPlugin(GstRegistry *reg, const char *name) +{ + QGstPluginFeatureHandle pluginFeature{ + gst_registry_lookup_feature(reg, name), + QGstPluginFeatureHandle::HasRef, + }; + if (pluginFeature) + gst_plugin_feature_set_rank(pluginFeature.get(), GST_RANK_PRIMARY - 1); +} + +// https://gstreamer.freedesktop.org/documentation/vaapi/index.html +constexpr auto vaapiPluginNames = { + "vaapidecodebin", "vaapih264dec", "vaapih264enc", "vaapih265dec", + "vaapijpegdec", "vaapijpegenc", "vaapimpeg2dec", "vaapipostproc", + "vaapisink", "vaapivp8dec", "vaapivp9dec", +}; + +// https://gstreamer.freedesktop.org/documentation/va/index.html +constexpr auto vaPluginNames = { + "vaav1dec", "vacompositor", "vadeinterlace", "vah264dec", "vah264enc", "vah265dec", + "vajpegdec", "vampeg2dec", "vapostproc", "vavp8dec", "vavp9dec", +}; + +// https://gstreamer.freedesktop.org/documentation/nvcodec/index.html +constexpr auto nvcodecPluginNames = { + "cudaconvert", "cudaconvertscale", "cudadownload", "cudaipcsink", "cudaipcsrc", + "cudascale", "cudaupload", "nvautogpuh264enc", "nvautogpuh265enc", "nvav1dec", + "nvcudah264enc", "nvcudah265enc", "nvd3d11h264enc", "nvd3d11h265enc", "nvh264dec", + "nvh264enc", "nvh265dec", "nvh265enc", "nvjpegdec", "nvjpegenc", + "nvmpeg2videodec", "nvmpeg4videodec", "nvmpegvideodec", "nvvp8dec", "nvvp9dec", +}; + +} // namespace + +QGstreamerIntegration::QGstreamerIntegration() + : QPlatformMediaIntegration(QLatin1String("gstreamer")) +{ + gst_init(nullptr, nullptr); + qCDebug(lcGstreamer) << "Using gstreamer version: " << gst_version_string(); + + GstRegistry *reg = gst_registry_get(); + + if constexpr (!GST_CHECK_VERSION(1, 22, 0)) { + GstRegistry* reg = gst_registry_get(); + for (const char *name : vaapiPluginNames) + rankDownPlugin(reg, name); + } + + if (qEnvironmentVariableIsSet("QT_GSTREAMER_DISABLE_VA")) { + for (const char *name : vaPluginNames) + rankDownPlugin(reg, name); + } + + if (qEnvironmentVariableIsSet("QT_GSTREAMER_DISABLE_NVCODEC")) { + for (const char *name : nvcodecPluginNames) + rankDownPlugin(reg, name); + } +} + +QPlatformMediaFormatInfo *QGstreamerIntegration::createFormatInfo() +{ + return new QGstreamerFormatInfo(); +} + +QPlatformVideoDevices *QGstreamerIntegration::createVideoDevices() +{ + return new QGstreamerVideoDevices(this); +} + +const QGstreamerFormatInfo *QGstreamerIntegration::gstFormatsInfo() +{ + return static_cast<const QGstreamerFormatInfo *>(formatInfo()); +} + +QMaybe<QPlatformAudioDecoder *> QGstreamerIntegration::createAudioDecoder(QAudioDecoder *decoder) +{ + return QGstreamerAudioDecoder::create(decoder); +} + +QMaybe<QPlatformMediaCaptureSession *> QGstreamerIntegration::createCaptureSession() +{ + return QGstreamerMediaCapture::create(); +} + +QMaybe<QPlatformMediaPlayer *> QGstreamerIntegration::createPlayer(QMediaPlayer *player) +{ + return QGstreamerMediaPlayer::create(player); +} + +QMaybe<QPlatformCamera *> QGstreamerIntegration::createCamera(QCamera *camera) +{ + if (inCustomCameraConstruction) { + QGstElement element = std::exchange(pendingCameraElement, {}); + return element ? new QGstreamerCustomCamera{ camera, std::move(element) } + : new QGstreamerCustomCamera{ camera }; + } + + return QGstreamerCamera::create(camera); +} + +QMaybe<QPlatformMediaRecorder *> QGstreamerIntegration::createRecorder(QMediaRecorder *recorder) +{ + return new QGstreamerMediaEncoder(recorder); +} + +QMaybe<QPlatformImageCapture *> QGstreamerIntegration::createImageCapture(QImageCapture *imageCapture) +{ + return QGstreamerImageCapture::create(imageCapture); +} + +QMaybe<QPlatformVideoSink *> QGstreamerIntegration::createVideoSink(QVideoSink *sink) +{ + return new QGstreamerVideoSink(sink); +} + +QMaybe<QPlatformAudioInput *> QGstreamerIntegration::createAudioInput(QAudioInput *q) +{ + return QGstreamerAudioInput::create(q); +} + +QMaybe<QPlatformAudioOutput *> QGstreamerIntegration::createAudioOutput(QAudioOutput *q) +{ + return QGstreamerAudioOutput::create(q); +} + +GstDevice *QGstreamerIntegration::videoDevice(const QByteArray &id) +{ + const auto devices = videoDevices(); + return devices ? static_cast<QGstreamerVideoDevices *>(devices)->videoDevice(id) : nullptr; +} + +QAbstractPlatformSpecificInterface *QGstreamerIntegration::platformSpecificInterface() +{ + return &m_platformSpecificImplementation; +} + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/qgstreamerintegration_p.h b/src/plugins/multimedia/gstreamer/qgstreamerintegration_p.h new file mode 100644 index 000000000..229bbd48e --- /dev/null +++ b/src/plugins/multimedia/gstreamer/qgstreamerintegration_p.h @@ -0,0 +1,79 @@ +// Copyright (C) 2021 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTREAMERINTEGRATION_H +#define QGSTREAMERINTEGRATION_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <QtMultimedia/private/qplatformmediaintegration_p.h> +#include <QtMultimedia/private/qgstreamer_platformspecificinterface_p.h> + +#include <gst/gst.h> + +QT_BEGIN_NAMESPACE + +class QGstreamerFormatInfo; + +class QGStreamerPlatformSpecificInterfaceImplementation : public QGStreamerPlatformSpecificInterface +{ +public: + ~QGStreamerPlatformSpecificInterfaceImplementation() override; + + QAudioDevice makeCustomGStreamerAudioInput(const QByteArray &gstreamerPipeline) override; + QAudioDevice makeCustomGStreamerAudioOutput(const QByteArray &gstreamerPipeline) override; + QCamera *makeCustomGStreamerCamera(const QByteArray &gstreamerPipeline, + QObject *parent) override; + + QCamera *makeCustomGStreamerCamera(GstElement *, QObject *parent) override; + + GstPipeline *gstPipeline(QMediaPlayer *) override; + GstPipeline *gstPipeline(QMediaCaptureSession *) override; +}; + +class QGstreamerIntegration : public QPlatformMediaIntegration +{ +public: + QGstreamerIntegration(); + + static QGstreamerIntegration *instance() + { + return static_cast<QGstreamerIntegration *>(QPlatformMediaIntegration::instance()); + } + + QMaybe<QPlatformAudioDecoder *> createAudioDecoder(QAudioDecoder *decoder) override; + QMaybe<QPlatformMediaCaptureSession *> createCaptureSession() override; + QMaybe<QPlatformMediaPlayer *> createPlayer(QMediaPlayer *player) override; + QMaybe<QPlatformCamera *> createCamera(QCamera *) override; + QMaybe<QPlatformMediaRecorder *> createRecorder(QMediaRecorder *) override; + QMaybe<QPlatformImageCapture *> createImageCapture(QImageCapture *) override; + + QMaybe<QPlatformVideoSink *> createVideoSink(QVideoSink *sink) override; + + QMaybe<QPlatformAudioInput *> createAudioInput(QAudioInput *) override; + QMaybe<QPlatformAudioOutput *> createAudioOutput(QAudioOutput *) override; + + const QGstreamerFormatInfo *gstFormatsInfo(); + GstDevice *videoDevice(const QByteArray &id); + + QAbstractPlatformSpecificInterface *platformSpecificInterface() override; + +protected: + QPlatformMediaFormatInfo *createFormatInfo() override; + QPlatformVideoDevices *createVideoDevices() override; + + QGStreamerPlatformSpecificInterfaceImplementation m_platformSpecificImplementation; +}; + +QT_END_NAMESPACE + +#endif diff --git a/src/plugins/multimedia/gstreamer/qgstreamerplugin.cpp b/src/plugins/multimedia/gstreamer/qgstreamerplugin.cpp new file mode 100644 index 000000000..66ad7f712 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/qgstreamerplugin.cpp @@ -0,0 +1,28 @@ +// Copyright (C) 2024 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include <QtMultimedia/private/qplatformmediaplugin_p.h> + +#include <qgstreamerintegration_p.h> + +QT_BEGIN_NAMESPACE + +class QGstreamerMediaPlugin : public QPlatformMediaPlugin +{ + Q_OBJECT + Q_PLUGIN_METADATA(IID QPlatformMediaPlugin_iid FILE "gstreamer.json") + +public: + QGstreamerMediaPlugin() = default; + + QPlatformMediaIntegration* create(const QString &name) override + { + if (name == u"gstreamer") + return new QGstreamerIntegration; + return nullptr; + } +}; + +QT_END_NAMESPACE + +#include "qgstreamerplugin.moc" diff --git a/src/plugins/multimedia/gstreamer/qgstreamervideodevices.cpp b/src/plugins/multimedia/gstreamer/qgstreamervideodevices.cpp new file mode 100644 index 000000000..78ac16eb4 --- /dev/null +++ b/src/plugins/multimedia/gstreamer/qgstreamervideodevices.cpp @@ -0,0 +1,158 @@ +// Copyright (C) 2021 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#include "qgstreamervideodevices_p.h" +#include <QtMultimedia/qmediadevices.h> +#include <QtMultimedia/private/qcameradevice_p.h> + +#include <common/qgst_p.h> +#include <common/qgstutils_p.h> +#include <common/qglist_helper_p.h> + +QT_BEGIN_NAMESPACE + +static gboolean deviceMonitorCallback(GstBus *, GstMessage *message, gpointer m) +{ + auto *manager = static_cast<QGstreamerVideoDevices *>(m); + QGstDeviceHandle device; + + switch (GST_MESSAGE_TYPE(message)) { + case GST_MESSAGE_DEVICE_ADDED: + gst_message_parse_device_added(message, &device); + manager->addDevice(std::move(device)); + break; + case GST_MESSAGE_DEVICE_REMOVED: + gst_message_parse_device_removed(message, &device); + manager->removeDevice(std::move(device)); + break; + default: + break; + } + + return G_SOURCE_CONTINUE; +} + +QGstreamerVideoDevices::QGstreamerVideoDevices(QPlatformMediaIntegration *integration) + : QPlatformVideoDevices(integration), + m_deviceMonitor{ + gst_device_monitor_new(), + } +{ + gst_device_monitor_add_filter(m_deviceMonitor.get(), "Video/Source", nullptr); + + QGstBusHandle bus{ + gst_device_monitor_get_bus(m_deviceMonitor.get()), + }; + gst_bus_add_watch(bus.get(), deviceMonitorCallback, this); + gst_device_monitor_start(m_deviceMonitor.get()); + + GList *devices = gst_device_monitor_get_devices(m_deviceMonitor.get()); + + for (GstDevice *device : QGstUtils::GListRangeAdaptor<GstDevice *>(devices)) { + addDevice(QGstDeviceHandle{ + device, + QGstDeviceHandle::HasRef, + }); + } + + g_list_free(devices); +} + +QGstreamerVideoDevices::~QGstreamerVideoDevices() +{ + gst_device_monitor_stop(m_deviceMonitor.get()); +} + +QList<QCameraDevice> QGstreamerVideoDevices::videoDevices() const +{ + QList<QCameraDevice> devices; + + for (const auto &device : m_videoSources) { + QCameraDevicePrivate *info = new QCameraDevicePrivate; + + QGString desc{ + gst_device_get_display_name(device.gstDevice.get()), + }; + info->description = desc.toQString(); + info->id = device.id; + + QUniqueGstStructureHandle properties{ + gst_device_get_properties(device.gstDevice.get()), + }; + if (properties) { + QGstStructureView view{ properties }; + auto def = view["is-default"].toBool(); + info->isDefault = def && *def; + } + + if (info->isDefault) + devices.prepend(info->create()); + else + devices.append(info->create()); + + auto caps = QGstCaps(gst_device_get_caps(device.gstDevice.get()), QGstCaps::HasRef); + if (!caps.isNull()) { + QList<QCameraFormat> formats; + QSet<QSize> photoResolutions; + + int size = caps.size(); + for (int i = 0; i < size; ++i) { + auto cap = caps.at(i); + + QSize resolution = cap.resolution(); + if (!resolution.isValid()) + continue; + + auto pixelFormat = cap.pixelFormat(); + auto frameRate = cap.frameRateRange(); + + auto *f = new QCameraFormatPrivate{ QSharedData(), pixelFormat, resolution, + frameRate.min, frameRate.max }; + formats << f->create(); + photoResolutions.insert(resolution); + } + info->videoFormats = formats; + // ### sort resolutions? + info->photoResolutions = photoResolutions.values(); + } + } + return devices; +} + +void QGstreamerVideoDevices::addDevice(QGstDeviceHandle device) +{ + Q_ASSERT(gst_device_has_classes(device.get(), "Video/Source")); + + auto it = std::find_if(m_videoSources.begin(), m_videoSources.end(), + [&](const QGstRecordDevice &a) { return a.gstDevice == device; }); + + if (it != m_videoSources.end()) + return; + + m_videoSources.push_back(QGstRecordDevice{ + std::move(device), + QByteArray::number(m_idGenerator), + }); + emit videoInputsChanged(); + m_idGenerator++; +} + +void QGstreamerVideoDevices::removeDevice(QGstDeviceHandle device) +{ + auto it = std::find_if(m_videoSources.begin(), m_videoSources.end(), + [&](const QGstRecordDevice &a) { return a.gstDevice == device; }); + + if (it != m_videoSources.end()) { + m_videoSources.erase(it); + emit videoInputsChanged(); + } +} + +GstDevice *QGstreamerVideoDevices::videoDevice(const QByteArray &id) const +{ + auto it = std::find_if(m_videoSources.begin(), m_videoSources.end(), + [&](const QGstRecordDevice &a) { return a.id == id; }); + return it != m_videoSources.end() ? it->gstDevice.get() : nullptr; +} + +QT_END_NAMESPACE diff --git a/src/plugins/multimedia/gstreamer/qgstreamervideodevices_p.h b/src/plugins/multimedia/gstreamer/qgstreamervideodevices_p.h new file mode 100644 index 000000000..a321ae66b --- /dev/null +++ b/src/plugins/multimedia/gstreamer/qgstreamervideodevices_p.h @@ -0,0 +1,54 @@ +// Copyright (C) 2021 The Qt Company Ltd. +// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only + +#ifndef QGSTREAMERMEDIADEVICES_H +#define QGSTREAMERMEDIADEVICES_H + +// +// W A R N I N G +// ------------- +// +// This file is not part of the Qt API. It exists purely as an +// implementation detail. This header file may change from version to +// version without notice, or even be removed. +// +// We mean it. +// + +#include <private/qplatformvideodevices_p.h> +#include <gst/gst.h> +#include <qaudiodevice.h> +#include <vector> + +#include <common/qgst_handle_types_p.h> + +QT_BEGIN_NAMESPACE + +class QGstreamerVideoDevices : public QPlatformVideoDevices +{ +public: + explicit QGstreamerVideoDevices(QPlatformMediaIntegration *integration); + ~QGstreamerVideoDevices(); + + QList<QCameraDevice> videoDevices() const override; + GstDevice *videoDevice(const QByteArray &id) const; + + void addDevice(QGstDeviceHandle); + void removeDevice(QGstDeviceHandle); + +private: + struct QGstRecordDevice + { + QGstDeviceHandle gstDevice; + QByteArray id; + }; + + quint64 m_idGenerator = 0; + std::vector<QGstRecordDevice> m_videoSources; + + QGstDeviceMonitorHandle m_deviceMonitor; +}; + +QT_END_NAMESPACE + +#endif |