summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/gsttools/gstvideoconnector.c16
-rw-r--r--src/gsttools/qgstreamerplayercontrol.cpp6
-rw-r--r--src/gsttools/qgstreamerplayersession.cpp32
-rw-r--r--src/gsttools/qgstreamervideowidget.cpp4
-rw-r--r--src/gsttools/qgstutils.cpp2
-rw-r--r--src/gsttools/qgstvideorenderersink.cpp13
-rw-r--r--src/imports/multimedia/Video.qml19
-rw-r--r--src/imports/multimedia/plugins.qmltypes36
-rw-r--r--src/imports/multimedia/qdeclarativecamera.cpp17
-rw-r--r--src/imports/multimedia/qdeclarativecamera_p.h5
-rw-r--r--src/multimedia/audio/qaudiohelpers.cpp6
-rw-r--r--src/multimedia/audio/qsamplecache_p.cpp2
-rw-r--r--src/multimedia/audio/qsoundeffect_pulse_p.cpp19
-rw-r--r--src/multimedia/camera/qcamera.cpp11
-rw-r--r--src/multimedia/camera/qcamera.h3
-rw-r--r--src/multimedia/camera/qcameraimagecapture.cpp9
-rw-r--r--src/multimedia/configure.json19
-rw-r--r--src/multimedia/controls/qmedianetworkaccesscontrol.cpp6
-rw-r--r--src/multimedia/controls/qmedianetworkaccesscontrol.h15
-rw-r--r--src/multimedia/doc/snippets/multimedia-snippets/media.cpp31
-rw-r--r--src/multimedia/doc/src/cameraoverview.qdoc2
-rw-r--r--src/multimedia/doc/src/platform-notes-windows.qdoc5
-rw-r--r--src/multimedia/doc/src/qtmultimedia-index.qdoc3
-rw-r--r--src/multimedia/playback/qmediaplayer.cpp48
-rw-r--r--src/multimedia/playback/qmediaplayer.h30
-rw-r--r--src/multimedia/playback/qplaylistfileparser.cpp2
-rw-r--r--src/multimedia/qmediapluginloader.cpp2
-rw-r--r--src/multimedia/video/qabstractvideobuffer.cpp5
-rw-r--r--src/multimedia/video/qabstractvideobuffer.h1
-rw-r--r--src/multimedia/video/qmemoryvideobuffer.cpp2
-rw-r--r--src/multimediawidgets/multimediawidgets.pro3
-rw-r--r--src/plugins/alsa/qalsaaudiodeviceinfo.h18
-rw-r--r--src/plugins/alsa/qalsaaudioinput.h44
-rw-r--r--src/plugins/alsa/qalsaaudiooutput.cpp2
-rw-r--r--src/plugins/alsa/qalsaaudiooutput.h44
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcapturesession.cpp56
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcapturesession.h3
-rw-r--r--src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.cpp8
-rw-r--r--src/plugins/android/src/wrappers/jni/androidcamera.cpp6
-rw-r--r--src/plugins/android/src/wrappers/jni/androidmediaplayer.cpp2
-rw-r--r--src/plugins/android/src/wrappers/jni/androidmediarecorder.cpp26
-rw-r--r--src/plugins/android/src/wrappers/jni/androidmediarecorder.h2
-rw-r--r--src/plugins/avfoundation/camera/avfcameraflashcontrol.mm26
-rw-r--r--src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm7
-rw-r--r--src/plugins/avfoundation/camera/avfcameraservice.h5
-rw-r--r--src/plugins/avfoundation/camera/avfcameraservice.mm29
-rw-r--r--src/plugins/avfoundation/camera/avfcamerasession.h3
-rw-r--r--src/plugins/avfoundation/camera/avfcamerasession.mm33
-rw-r--r--src/plugins/avfoundation/camera/avfcamerautility.h2
-rw-r--r--src/plugins/avfoundation/camera/avfcamerautility.mm7
-rw-r--r--src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm54
-rw-r--r--src/plugins/avfoundation/camera/avfcamerawindowcontrol.h129
-rw-r--r--src/plugins/avfoundation/camera/avfcamerawindowcontrol.mm262
-rw-r--r--src/plugins/avfoundation/camera/avfcapturedestinationcontrol.h63
-rw-r--r--src/plugins/avfoundation/camera/avfcapturedestinationcontrol.mm62
-rw-r--r--src/plugins/avfoundation/camera/avfimagecapturecontrol.mm31
-rw-r--r--src/plugins/avfoundation/camera/camera.pro8
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm47
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h67
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm330
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h113
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.mm261
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h5
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm140
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideowidget.mm17
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideowindowcontrol.mm9
-rw-r--r--src/plugins/avfoundation/mediaplayer/mediaplayer.pro38
-rw-r--r--src/plugins/common/evr/evrcustompresenter.cpp36
-rw-r--r--src/plugins/common/evr/evrcustompresenter.h3
-rw-r--r--src/plugins/common/evr/evrd3dpresentengine.cpp7
-rw-r--r--src/plugins/common/evr/evrd3dpresentengine.h3
-rw-r--r--src/plugins/common/evr/evrvideowindowcontrol.cpp24
-rw-r--r--src/plugins/common/evr/evrvideowindowcontrol.h3
-rw-r--r--src/plugins/coreaudio/coreaudiodeviceinfo.mm13
-rw-r--r--src/plugins/coreaudio/coreaudioinput.mm5
-rw-r--r--src/plugins/coreaudio/coreaudiooutput.mm7
-rw-r--r--src/plugins/coreaudio/coreaudiosessionmanager.mm5
-rw-r--r--src/plugins/directshow/camera/dscamerasession.cpp75
-rw-r--r--src/plugins/directshow/camera/dscamerasession.h2
-rw-r--r--src/plugins/directshow/common/directshowmediatype.cpp9
-rw-r--r--src/plugins/directshow/common/directshowmediatypeenum.cpp3
-rw-r--r--src/plugins/directshow/common/directshowutils.cpp29
-rw-r--r--src/plugins/directshow/common/directshowutils.h2
-rw-r--r--src/plugins/directshow/player/directshowioreader.cpp2
-rw-r--r--src/plugins/directshow/player/directshowmetadatacontrol.cpp2
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinfocus.cpp2
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinimagecapture.cpp5
-rw-r--r--src/plugins/gstreamer/camerabin/camerabinsession.cpp22
-rw-r--r--src/plugins/m3u/qm3uhandler.cpp10
-rw-r--r--src/plugins/m3u/qm3uhandler.h12
-rw-r--r--src/plugins/pulseaudio/qaudiodeviceinfo_pulse.h18
-rw-r--r--src/plugins/pulseaudio/qaudioinput_pulse.cpp8
-rw-r--r--src/plugins/pulseaudio/qaudioinput_pulse.h46
-rw-r--r--src/plugins/pulseaudio/qaudiooutput_pulse.cpp36
-rw-r--r--src/plugins/pulseaudio/qaudiooutput_pulse.h52
-rw-r--r--src/plugins/pulseaudio/qpulseaudioplugin.h10
-rw-r--r--src/plugins/qnx-audio/audio/audio.pro1
-rw-r--r--src/plugins/qnx-audio/audio/qnxaudioinput.cpp2
-rw-r--r--src/plugins/qnx/common/windowgrabber.cpp8
-rw-r--r--src/plugins/videonode/imx6/imx6.pro8
-rw-r--r--src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp23
-rw-r--r--src/plugins/videonode/imx6/qsgvivantevideomaterial.h2
-rw-r--r--src/plugins/videonode/videonode.pro2
-rw-r--r--src/plugins/windowsaudio/qwindowsaudiooutput.cpp2
-rw-r--r--src/plugins/winrt/qwinrtcameracontrol.cpp2
-rw-r--r--src/plugins/wmf/player/mfplayersession.cpp18
-rw-r--r--src/plugins/wmf/player/mftvideo.cpp2
-rw-r--r--src/plugins/wmf/player/mfvideorenderercontrol.cpp24
-rw-r--r--src/plugins/wmf/player/mfvideorenderercontrol.h5
-rw-r--r--src/plugins/wmf/sourceresolver.cpp2
-rw-r--r--src/qtmultimediaquicktools/qdeclarativevideooutput.cpp12
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_texture.cpp147
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_texture_p.h2
-rw-r--r--src/qtmultimediaquicktools/qtmultimediaquicktools.qrc5
-rw-r--r--src/qtmultimediaquicktools/shaders/rectsampler.vert10
-rw-r--r--src/qtmultimediaquicktools/shaders/rectsampler_core.vert11
-rw-r--r--src/qtmultimediaquicktools/shaders/rectsampler_rgb.frag8
-rw-r--r--src/qtmultimediaquicktools/shaders/rectsampler_rgb_core.frag10
118 files changed, 2009 insertions, 1081 deletions
diff --git a/src/gsttools/gstvideoconnector.c b/src/gsttools/gstvideoconnector.c
index 7f88a89af..b85f5bdbe 100644
--- a/src/gsttools/gstvideoconnector.c
+++ b/src/gsttools/gstvideoconnector.c
@@ -116,13 +116,13 @@ gst_video_connector_class_init (GstVideoConnectorClass * klass)
gst_video_connector_signals[SIGNAL_RESEND_NEW_SEGMENT] =
g_signal_new ("resend-new-segment", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
- G_STRUCT_OFFSET (GstVideoConnectorClass, resend_new_segment), nullptr, nullptr,
+ G_STRUCT_OFFSET (GstVideoConnectorClass, resend_new_segment), NULL, NULL,
g_cclosure_marshal_VOID__BOOLEAN, G_TYPE_NONE, 1, G_TYPE_BOOLEAN);
gst_video_connector_signals[SIGNAL_CONNECTION_FAILED] =
g_signal_new ("connection-failed", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST,
- 0, nullptr, nullptr,
+ 0, NULL, NULL,
g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
}
@@ -159,7 +159,7 @@ gst_video_connector_init (GstVideoConnector *element,
element->relinked = FALSE;
element->failedSignalEmited = FALSE;
gst_segment_init (&element->segment, GST_FORMAT_TIME);
- element->latest_buffer = nullptr;
+ element->latest_buffer = NULL;
}
static void
@@ -167,9 +167,9 @@ gst_video_connector_reset (GstVideoConnector * element)
{
element->relinked = FALSE;
element->failedSignalEmited = FALSE;
- if (element->latest_buffer != nullptr) {
+ if (element->latest_buffer != NULL) {
gst_buffer_unref (element->latest_buffer);
- element->latest_buffer = nullptr;
+ element->latest_buffer = NULL;
}
gst_segment_init (&element->segment, GST_FORMAT_UNDEFINED);
}
@@ -196,7 +196,7 @@ gst_video_connector_buffer_alloc (GstPad * pad, guint64 offset, guint size,
if (!buf)
return GST_FLOW_ERROR;
- *buf = nullptr;
+ *buf = NULL;
gboolean isFailed = FALSE;
while (1) {
@@ -265,7 +265,7 @@ gst_video_connector_setcaps (GstPad *pad, GstCaps *caps)
/* forward-negotiate */
gboolean res = gst_pad_set_caps(element->srcpad, caps);
- gchar * debugmsg = nullptr;
+ gchar * debugmsg = NULL;
GST_DEBUG_OBJECT(element, "gst_video_connector_setcaps %s %i", debugmsg = gst_caps_to_string(caps), res);
if (debugmsg)
g_free(debugmsg);
@@ -407,7 +407,7 @@ gst_video_connector_chain (GstPad * pad, GstBuffer * buf)
if (element->latest_buffer) {
gst_buffer_unref (element->latest_buffer);
- element->latest_buffer = nullptr;
+ element->latest_buffer = NULL;
}
element->latest_buffer = gst_buffer_ref(buf);
diff --git a/src/gsttools/qgstreamerplayercontrol.cpp b/src/gsttools/qgstreamerplayercontrol.cpp
index 165978288..689467db8 100644
--- a/src/gsttools/qgstreamerplayercontrol.cpp
+++ b/src/gsttools/qgstreamerplayercontrol.cpp
@@ -439,8 +439,10 @@ void QGstreamerPlayerControl::updateSessionState(QMediaPlayer::State state)
}
m_pendingSeekPosition = -1;
- if (m_currentState == QMediaPlayer::PlayingState)
- m_session->play();
+ if (m_currentState == QMediaPlayer::PlayingState) {
+ if (m_bufferProgress == -1 || m_bufferProgress == 100)
+ m_session->play();
+ }
}
updateMediaStatus();
diff --git a/src/gsttools/qgstreamerplayersession.cpp b/src/gsttools/qgstreamerplayersession.cpp
index c6d2df810..d36f73b2a 100644
--- a/src/gsttools/qgstreamerplayersession.cpp
+++ b/src/gsttools/qgstreamerplayersession.cpp
@@ -162,7 +162,15 @@ void QGstreamerPlayerSession::initPlaybin()
}
#if GST_CHECK_VERSION(1,0,0)
- m_videoIdentity = gst_element_factory_make("identity", nullptr); // floating ref
+ static const auto convDesc = qEnvironmentVariable("QT_GSTREAMER_PLAYBIN_CONVERT");
+ GError *err = nullptr;
+ auto convPipeline = !convDesc.isEmpty() ? convDesc.toLatin1().constData() : "identity";
+ auto convElement = gst_parse_launch(convPipeline, &err);
+ if (err) {
+ qWarning() << "Error:" << convDesc << ":" << QLatin1String(err->message);
+ g_clear_error(&err);
+ }
+ m_videoIdentity = convElement;
#else
m_videoIdentity = GST_ELEMENT(g_object_new(gst_video_connector_get_type(), 0)); // floating ref
g_signal_connect(G_OBJECT(m_videoIdentity), "connection-failed", G_CALLBACK(insertColorSpaceElement), (gpointer)this);
@@ -494,10 +502,12 @@ void QGstreamerPlayerSession::setPlaybackRate(qreal rate)
if (!qFuzzyCompare(m_playbackRate, rate)) {
m_playbackRate = rate;
if (m_pipeline && m_seekable) {
+ qint64 from = rate > 0 ? position() : 0;
+ qint64 to = rate > 0 ? duration() : position();
gst_element_seek(m_pipeline, rate, GST_FORMAT_TIME,
GstSeekFlags(GST_SEEK_FLAG_FLUSH),
- GST_SEEK_TYPE_NONE,0,
- GST_SEEK_TYPE_END, 0);
+ GST_SEEK_TYPE_SET, from * 1000000,
+ GST_SEEK_TYPE_SET, to * 1000000);
}
emit playbackRateChanged(m_playbackRate);
}
@@ -1078,15 +1088,13 @@ bool QGstreamerPlayerSession::seek(qint64 ms)
//seek locks when the video output sink is changing and pad is blocked
if (m_pipeline && !m_pendingVideoSink && m_state != QMediaPlayer::StoppedState && m_seekable) {
ms = qMax(ms,qint64(0));
- gint64 position = ms * 1000000;
- bool isSeeking = gst_element_seek(m_pipeline,
- m_playbackRate,
- GST_FORMAT_TIME,
+ qint64 from = m_playbackRate > 0 ? ms : 0;
+ qint64 to = m_playbackRate > 0 ? duration() : ms;
+
+ bool isSeeking = gst_element_seek(m_pipeline, m_playbackRate, GST_FORMAT_TIME,
GstSeekFlags(GST_SEEK_FLAG_FLUSH),
- GST_SEEK_TYPE_SET,
- position,
- GST_SEEK_TYPE_NONE,
- 0);
+ GST_SEEK_TYPE_SET, from * 1000000,
+ GST_SEEK_TYPE_SET, to * 1000000);
if (isSeeking)
m_lastPosition = ms;
@@ -1579,7 +1587,7 @@ void QGstreamerPlayerSession::updateVideoResolutionTag()
void QGstreamerPlayerSession::updateDuration()
{
gint64 gstDuration = 0;
- int duration = 0;
+ qint64 duration = 0;
if (m_pipeline && qt_gst_element_query_duration(m_pipeline, GST_FORMAT_TIME, &gstDuration))
duration = gstDuration / 1000000;
diff --git a/src/gsttools/qgstreamervideowidget.cpp b/src/gsttools/qgstreamervideowidget.cpp
index 164e62f86..4137aff32 100644
--- a/src/gsttools/qgstreamervideowidget.cpp
+++ b/src/gsttools/qgstreamervideowidget.cpp
@@ -38,6 +38,7 @@
****************************************************************************/
#include "qgstreamervideowidget_p.h"
+#include "qgstutils_p.h"
#include <QtCore/qcoreevent.h>
#include <QtCore/qdebug.h>
@@ -170,7 +171,8 @@ bool QGstreamerVideoWidgetControl::eventFilter(QObject *object, QEvent *e)
if (e->type() == QEvent::Paint) {
// Update overlay by new size if any.
- m_videoOverlay.setRenderRectangle(QRect(0, 0, m_widget->width(), m_widget->height()));
+ if (QGstUtils::useOpenGL())
+ m_videoOverlay.setRenderRectangle(QRect(0, 0, m_widget->width(), m_widget->height()));
if (m_videoOverlay.isActive())
m_videoOverlay.expose(); // triggers a repaint of the last frame
else
diff --git a/src/gsttools/qgstutils.cpp b/src/gsttools/qgstutils.cpp
index f998c4309..3f68244ba 100644
--- a/src/gsttools/qgstutils.cpp
+++ b/src/gsttools/qgstutils.cpp
@@ -866,7 +866,7 @@ QSet<QString> QGstUtils::supportedMimeTypes(bool (*isValidFactory)(GstElementFac
if (value) {
gchar *str = gst_value_serialize(value);
QString versions = QLatin1String(str);
- const QStringList elements = versions.split(QRegularExpression(QLatin1String("\\D+")), QString::SkipEmptyParts);
+ const QStringList elements = versions.split(QRegularExpression(QLatin1String("\\D+")), Qt::SkipEmptyParts);
for (const QString &e : elements)
supportedMimeTypes.insert(nameLowcase + e);
g_free(str);
diff --git a/src/gsttools/qgstvideorenderersink.cpp b/src/gsttools/qgstvideorenderersink.cpp
index 9eb2531a1..4000f2178 100644
--- a/src/gsttools/qgstvideorenderersink.cpp
+++ b/src/gsttools/qgstvideorenderersink.cpp
@@ -141,6 +141,19 @@ bool QGstDefaultVideoRenderer::present(QAbstractVideoSurface *surface, GstBuffer
if (!videoBuffer)
videoBuffer = new QGstVideoBuffer(buffer, m_videoInfo);
+ auto meta = gst_buffer_get_video_crop_meta (buffer);
+ if (meta) {
+ QRect vp(meta->x, meta->y, meta->width, meta->height);
+ if (m_format.viewport() != vp) {
+#ifdef DEBUG_VIDEO_SURFACE_SINK
+ qDebug() << Q_FUNC_INFO << " Update viewport on Metadata: [" << meta->height << "x" << meta->width << " | " << meta->x << "x" << meta->y << "]";
+#endif
+ //Update viewport if data is not the same
+ m_format.setViewport(vp);
+ surface->start(m_format);
+ }
+ }
+
QVideoFrame frame(
videoBuffer,
m_format.frameSize(),
diff --git a/src/imports/multimedia/Video.qml b/src/imports/multimedia/Video.qml
index 19c437869..24fde22e1 100644
--- a/src/imports/multimedia/Video.qml
+++ b/src/imports/multimedia/Video.qml
@@ -38,7 +38,7 @@
****************************************************************************/
import QtQuick 2.0
-import QtMultimedia 5.11
+import QtMultimedia 5.13
/*!
\qmltype Video
@@ -125,6 +125,23 @@ Item {
property alias fillMode: videoOut.fillMode
/*!
+ \qmlproperty enumeration Video::flushMode
+
+ Set this property to define what \c Video should show
+ when playback is finished or stopped.
+
+ \list
+ \li VideoOutput.EmptyFrame - clears video output.
+ \li VideoOutput.FirstFrame - shows the first valid frame.
+ \li VideoOutput.LastFrame - shows the last valid frame.
+ \endlist
+
+ The default flush mode is EmptyFrame.
+ \since 5.15
+ */
+ property alias flushMode: videoOut.flushMode
+
+ /*!
\qmlproperty int Video::orientation
The orientation of the \c Video in degrees. Only multiples of 90
diff --git a/src/imports/multimedia/plugins.qmltypes b/src/imports/multimedia/plugins.qmltypes
index 870544160..06fb8918b 100644
--- a/src/imports/multimedia/plugins.qmltypes
+++ b/src/imports/multimedia/plugins.qmltypes
@@ -4,7 +4,7 @@ import QtQuick.tooling 1.2
// It is used for QML tooling purposes only.
//
// This file was auto-generated by:
-// 'qmlplugindump -nonrelocatable QtMultimedia 5.14'
+// 'qmlplugindump -nonrelocatable QtMultimedia 5.15'
Module {
dependencies: ["QtQuick 2.0"]
@@ -288,6 +288,24 @@ Module {
Property { name: "active"; type: "bool" }
}
Component {
+ name: "QAbstractVideoSurface"
+ prototype: "QObject"
+ Property { name: "nativeResolution"; type: "QSize"; isReadonly: true }
+ Signal {
+ name: "activeChanged"
+ Parameter { name: "active"; type: "bool" }
+ }
+ Signal {
+ name: "surfaceFormatChanged"
+ Parameter { name: "format"; type: "QVideoSurfaceFormat" }
+ }
+ Signal { name: "supportedFormatsChanged" }
+ Signal {
+ name: "nativeResolutionChanged"
+ Parameter { name: "resolution"; type: "QSize" }
+ }
+ }
+ Component {
name: "QCamera"
prototype: "QMediaObject"
Enum {
@@ -427,10 +445,11 @@ Module {
"QtMultimedia/Audio 5.9",
"QtMultimedia/MediaPlayer 5.0",
"QtMultimedia/MediaPlayer 5.11",
+ "QtMultimedia/MediaPlayer 5.15",
"QtMultimedia/MediaPlayer 5.6",
"QtMultimedia/MediaPlayer 5.9"
]
- exportMetaObjectRevisions: [0, 3, 1, 2, 0, 3, 1, 2]
+ exportMetaObjectRevisions: [0, 3, 1, 2, 0, 3, 15, 1, 2]
Enum {
name: "Status"
values: {
@@ -524,6 +543,7 @@ Module {
Property { name: "audioRole"; revision: 1; type: "AudioRole" }
Property { name: "customAudioRole"; revision: 3; type: "string" }
Property { name: "notifyInterval"; revision: 2; type: "int" }
+ Property { name: "videoOutput"; revision: 15; type: "QVariant" }
Signal { name: "playlistChanged"; revision: 1 }
Signal { name: "loopCountChanged" }
Signal { name: "paused" }
@@ -541,6 +561,7 @@ Module {
Parameter { name: "errorString"; type: "string" }
}
Signal { name: "notifyIntervalChanged"; revision: 2 }
+ Signal { name: "videoOutputChanged"; revision: 15 }
Method { name: "play" }
Method { name: "pause" }
Method { name: "stop" }
@@ -2025,9 +2046,10 @@ Module {
exports: [
"QtMultimedia/VideoOutput 5.0",
"QtMultimedia/VideoOutput 5.13",
+ "QtMultimedia/VideoOutput 5.15",
"QtMultimedia/VideoOutput 5.2"
]
- exportMetaObjectRevisions: [0, 13, 2]
+ exportMetaObjectRevisions: [0, 13, 15, 2]
Enum {
name: "FlushMode"
values: {
@@ -2052,6 +2074,13 @@ Module {
Property { name: "contentRect"; type: "QRectF"; isReadonly: true }
Property { name: "filters"; type: "QAbstractVideoFilter"; isList: true; isReadonly: true }
Property { name: "flushMode"; revision: 13; type: "FlushMode" }
+ Property {
+ name: "videoSurface"
+ revision: 15
+ type: "QAbstractVideoSurface"
+ isReadonly: true
+ isPointer: true
+ }
Signal {
name: "fillModeChanged"
Parameter { type: "QDeclarativeVideoOutput::FillMode" }
@@ -2124,6 +2153,7 @@ Module {
Parameter { name: "availability"; type: "QMultimedia::AvailabilityStatus" }
}
}
+ Component { name: "QSGVideoItemSurface"; prototype: "QAbstractVideoSurface" }
Component {
name: "QSoundEffect"
prototype: "QObject"
diff --git a/src/imports/multimedia/qdeclarativecamera.cpp b/src/imports/multimedia/qdeclarativecamera.cpp
index 7730c9900..3a9bc8fa2 100644
--- a/src/imports/multimedia/qdeclarativecamera.cpp
+++ b/src/imports/multimedia/qdeclarativecamera.cpp
@@ -59,9 +59,10 @@
QT_BEGIN_NAMESPACE
-void QDeclarativeCamera::_q_error(QCamera::Error errorCode)
+void QDeclarativeCamera::_q_errorOccurred(QCamera::Error errorCode)
{
emit error(Error(errorCode), errorString());
+ emit errorOccurred(Error(errorCode), errorString());
emit errorChanged();
}
@@ -197,7 +198,7 @@ QDeclarativeCamera::QDeclarativeCamera(QObject *parent) :
this, SIGNAL(lockStatusChanged()));
connect(m_camera, &QCamera::stateChanged, this, &QDeclarativeCamera::_q_updateState);
connect(m_camera, SIGNAL(statusChanged(QCamera::Status)), this, SIGNAL(cameraStatusChanged()));
- connect(m_camera, SIGNAL(error(QCamera::Error)), this, SLOT(_q_error(QCamera::Error)));
+ connect(m_camera, SIGNAL(errorOccurred(QCamera::Error)), this, SLOT(_q_errorOccurred(QCamera::Error)));
connect(m_camera, SIGNAL(availabilityChanged(QMultimedia::AvailabilityStatus)),
this, SLOT(_q_availabilityChanged(QMultimedia::AvailabilityStatus)));
@@ -420,7 +421,7 @@ QDeclarativeCamera::Error QDeclarativeCamera::errorCode() const
This property holds the last error string, if any.
- \sa error, errorCode
+ \sa errorOccurred, errorCode
*/
QString QDeclarativeCamera::errorString() const
{
@@ -752,11 +753,19 @@ void QDeclarativeCamera::setDigitalZoom(qreal value)
\value Camera.NotSupportedFeatureError
The feature is not supported.
- \sa error, errorString
+ \sa errorOccurred, errorString
*/
/*!
\qmlsignal QtMultimedia::Camera::error(errorCode, errorString)
+ \obsolete
+
+ Use errorOccurred() instead.
+*/
+
+/*!
+ \qmlsignal QtMultimedia::Camera::errorOccurred(errorCode, errorString)
+ \since 5.15
This signal is emitted when an error specified by \a errorCode occurs.
A descriptive string value is available in \a errorString.
diff --git a/src/imports/multimedia/qdeclarativecamera_p.h b/src/imports/multimedia/qdeclarativecamera_p.h
index 97afa5b53..41203ac80 100644
--- a/src/imports/multimedia/qdeclarativecamera_p.h
+++ b/src/imports/multimedia/qdeclarativecamera_p.h
@@ -308,7 +308,10 @@ public Q_SLOTS:
Q_SIGNALS:
void errorChanged();
+#if QT_DEPRECATED_SINCE(5,15)
void error(QDeclarativeCamera::Error errorCode, const QString &errorString);
+#endif
+ Q_REVISION(15) void errorOccurred(QDeclarativeCamera::Error errorCode, const QString &errorString);
Q_REVISION(1) void deviceIdChanged();
Q_REVISION(1) void positionChanged();
@@ -332,7 +335,7 @@ Q_SIGNALS:
private Q_SLOTS:
void _q_updateState(QCamera::State);
- void _q_error(QCamera::Error);
+ void _q_errorOccurred(QCamera::Error);
void _q_availabilityChanged(QMultimedia::AvailabilityStatus);
protected:
diff --git a/src/multimedia/audio/qaudiohelpers.cpp b/src/multimedia/audio/qaudiohelpers.cpp
index fae591477..1cd189d6e 100644
--- a/src/multimedia/audio/qaudiohelpers.cpp
+++ b/src/multimedia/audio/qaudiohelpers.cpp
@@ -103,19 +103,19 @@ template<class T> struct signedVersion {};
template<> struct signedVersion<quint8>
{
typedef qint8 TS;
- enum {offset = 0x80};
+ static constexpr int offset = 0x80;
};
template<> struct signedVersion<quint16>
{
typedef qint16 TS;
- enum {offset = 0x8000};
+ static constexpr int offset = 0x8000;
};
template<> struct signedVersion<quint32>
{
typedef qint32 TS;
- enum {offset = 0x80000000};
+ static constexpr uint offset = 0x80000000;
};
template<class T> void adjustUnsignedSamples(qreal factor, const void *src, void *dst, int samples)
diff --git a/src/multimedia/audio/qsamplecache_p.cpp b/src/multimedia/audio/qsamplecache_p.cpp
index 8c4fdc210..b293946cc 100644
--- a/src/multimedia/audio/qsamplecache_p.cpp
+++ b/src/multimedia/audio/qsamplecache_p.cpp
@@ -397,7 +397,7 @@ void QSample::load()
qDebug() << "QSample: load [" << m_url << "]";
#endif
m_stream = m_parent->networkAccessManager().get(QNetworkRequest(m_url));
- connect(m_stream, SIGNAL(error(QNetworkReply::NetworkError)), SLOT(decoderError()));
+ connect(m_stream, SIGNAL(errorOccurred(QNetworkReply::NetworkError)), SLOT(decoderError()));
m_waveDecoder = new QWaveDecoder(m_stream);
connect(m_waveDecoder, SIGNAL(formatKnown()), SLOT(decoderReady()));
connect(m_waveDecoder, SIGNAL(parsingError()), SLOT(decoderError()));
diff --git a/src/multimedia/audio/qsoundeffect_pulse_p.cpp b/src/multimedia/audio/qsoundeffect_pulse_p.cpp
index 0855c1f67..a141f7e36 100644
--- a/src/multimedia/audio/qsoundeffect_pulse_p.cpp
+++ b/src/multimedia/audio/qsoundeffect_pulse_p.cpp
@@ -528,6 +528,21 @@ qreal QSoundEffectPrivate::volume() const
return m_volume;
}
+static void volume_stream_flush_callback(pa_stream *s, int success, void *userdata)
+{
+ Q_UNUSED(s);
+ QSoundEffectRef *ref = reinterpret_cast<QSoundEffectRef *>(userdata);
+ QSoundEffectPrivate *self = ref->soundEffect();
+ ref->release();
+ if (!self)
+ return;
+
+ if (!success)
+ qWarning("QSoundEffect(pulseaudio): failed to drain");
+
+ QMetaObject::invokeMethod(self, "prepare", Qt::QueuedConnection);
+}
+
void QSoundEffectPrivate::setVolume(qreal volume)
{
QMutexLocker locker(&m_volumeLock);
@@ -537,6 +552,10 @@ void QSoundEffectPrivate::setVolume(qreal volume)
m_volume = qBound(qreal(0), volume, qreal(1));
locker.unlock();
+ if (!m_playing && m_pulseStream) {
+ PulseDaemonLocker locker;
+ pa_stream_flush(m_pulseStream, volume_stream_flush_callback, m_ref->getRef());
+ }
emit volumeChanged();
}
diff --git a/src/multimedia/camera/qcamera.cpp b/src/multimedia/camera/qcamera.cpp
index 40441f332..79412b961 100644
--- a/src/multimedia/camera/qcamera.cpp
+++ b/src/multimedia/camera/qcamera.cpp
@@ -109,6 +109,7 @@ void QCameraPrivate::_q_error(int error, const QString &errorString)
this->error = QCamera::Error(error);
this->errorString = errorString;
+ emit q->errorOccurred(this->error);
emit q->error(this->error);
}
@@ -823,7 +824,7 @@ void QCamera::setCaptureMode(QCamera::CaptureModes mode)
Starts the camera.
State is changed to QCamera::ActiveState if camera is started
- successfully, otherwise error() signal is emitted.
+ successfully, otherwise errorOccurred() signal is emitted.
While the camera state is changed to QCamera::ActiveState,
starting the camera service can be asynchronous with the actual
@@ -1271,6 +1272,14 @@ void QCamera::unlock()
/*!
\fn void QCamera::error(QCamera::Error value)
+ \obsolete
+
+ Use errorOccurred() instead.
+*/
+
+/*!
+ \fn void QCamera::errorOccurred(QCamera::Error value)
+ \since 5.15
Signal emitted when error state changes to \a value.
*/
diff --git a/src/multimedia/camera/qcamera.h b/src/multimedia/camera/qcamera.h
index b2582cd19..97cf4509b 100644
--- a/src/multimedia/camera/qcamera.h
+++ b/src/multimedia/camera/qcamera.h
@@ -242,7 +242,10 @@ Q_SIGNALS:
void lockStatusChanged(QCamera::LockStatus status, QCamera::LockChangeReason reason);
void lockStatusChanged(QCamera::LockType lock, QCamera::LockStatus status, QCamera::LockChangeReason reason);
+#if QT_DEPRECATED_SINCE(5,15)
void error(QCamera::Error);
+#endif
+ void errorOccurred(QCamera::Error);
private:
Q_DISABLE_COPY(QCamera)
diff --git a/src/multimedia/camera/qcameraimagecapture.cpp b/src/multimedia/camera/qcameraimagecapture.cpp
index 502812f76..7eb67daed 100644
--- a/src/multimedia/camera/qcameraimagecapture.cpp
+++ b/src/multimedia/camera/qcameraimagecapture.cpp
@@ -618,7 +618,8 @@ void QCameraImageCapture::cancelCapture()
/*!
\fn QCameraImageCapture::imageCaptured(int id, const QImage &preview);
- Signal emitted when the frame with request \a id was captured, but not processed and saved yet.
+ Signal emitted when QAbstractVideoSurface is used as a viewfinder and
+ the frame with request \a id was captured, but not processed and saved yet.
Frame \a preview can be displayed to user.
*/
@@ -634,13 +635,15 @@ void QCameraImageCapture::cancelCapture()
/*!
\fn QCameraImageCapture::imageAvailable(int id, const QVideoFrame &frame)
- Signal emitted when the \a frame with request \a id is available.
+ Signal emitted when QCameraImageCapture::CaptureToBuffer is set and
+ the \a frame with request \a id is available.
*/
/*!
\fn QCameraImageCapture::imageSaved(int id, const QString &fileName)
- Signal emitted when the frame with request \a id was saved to \a fileName.
+ Signal emitted when QCameraImageCapture::CaptureToFile is set and
+ the frame with request \a id was saved to \a fileName.
*/
QT_END_NAMESPACE
diff --git a/src/multimedia/configure.json b/src/multimedia/configure.json
index e9480dfc5..7f9b5d064 100644
--- a/src/multimedia/configure.json
+++ b/src/multimedia/configure.json
@@ -54,7 +54,7 @@
"test": "gstreamer",
"sources": [
{ "type": "pkgConfig",
- "args": "gstreamer-1.0 gstreamer-base-1.0 gstreamer-audio-1.0 gstreamer-video-1.0 gstreamer-pbutils-1.0" },
+ "args": "gstreamer-1.0 gstreamer-base-1.0 gstreamer-audio-1.0 gstreamer-video-1.0 gstreamer-pbutils-1.0 gstreamer-allocators-1.0" },
{ "libs": "-lgstreamer-1.0 -lgstbase-1.0 -lgstaudio-1.0 -lgstvideo-1.0 -lgstpbutils-1.0 -lglib-2.0 -lgobject-2.0",
"condition": "config.win32 || config.macos" },
{ "libs": "", "condition": "config.android && input.gstreamer != ''" }
@@ -109,18 +109,6 @@
{ "type": "pkgConfig", "args": "gstreamer-gl-1.0" }
]
},
- "gstreamer_imxcommon": {
- "label": "GStreamer i.MX common",
- "export": "gstreamer_imxcommon",
- "test": {
- "include": "gst/allocators/imx/phys_mem_meta.h"
- },
- "use": "gstreamer_1_0",
- "sources": [
- { "type": "pkgConfig",
- "args": "gstimxcommon" }
- ]
- },
"libresourceqt5": {
"label": "libresourceqt5",
"test": "resourcepolicy",
@@ -260,11 +248,6 @@
"condition": "features.opengl && features.gstreamer_1_0 && libs.gstreamer_gl_1_0",
"output": [ "privateFeature" ]
},
- "gstreamer_imxcommon": {
- "label": "GStreamer i.MX common",
- "condition": "(features.gstreamer_1_0 && libs.gstreamer_imxcommon)",
- "output": [ "privateFeature" ]
- },
"gpu_vivante": {
"label": "Vivante GPU",
"condition": "features.gui && features.opengles2 && tests.gpu_vivante",
diff --git a/src/multimedia/controls/qmedianetworkaccesscontrol.cpp b/src/multimedia/controls/qmedianetworkaccesscontrol.cpp
index 07714da62..bd289cc29 100644
--- a/src/multimedia/controls/qmedianetworkaccesscontrol.cpp
+++ b/src/multimedia/controls/qmedianetworkaccesscontrol.cpp
@@ -39,12 +39,15 @@
#include "qmedianetworkaccesscontrol.h"
+#ifndef QT_NO_BEARERMANAGEMENT
+
QT_BEGIN_NAMESPACE
/*!
\class QMediaNetworkAccessControl
\obsolete
\brief The QMediaNetworkAccessControl class allows the setting of the Network Access Point for media related activities.
+ \obsolete
\inmodule QtMultimedia
@@ -93,10 +96,13 @@ QMediaNetworkAccessControl::~QMediaNetworkAccessControl()
/*!
\fn QMediaNetworkAccessControl::configurationChanged(const QNetworkConfiguration &configuration)
+
This signal is emitted when the current active network configuration changes
to \a configuration.
*/
QT_END_NAMESPACE
+#endif
+
#include "moc_qmedianetworkaccesscontrol.cpp"
diff --git a/src/multimedia/controls/qmedianetworkaccesscontrol.h b/src/multimedia/controls/qmedianetworkaccesscontrol.h
index daae4f389..681ef61b6 100644
--- a/src/multimedia/controls/qmedianetworkaccesscontrol.h
+++ b/src/multimedia/controls/qmedianetworkaccesscontrol.h
@@ -41,17 +41,25 @@
#ifndef QMEDIANETWORKACCESSCONTROL_H
#define QMEDIANETWORKACCESSCONTROL_H
+#if 0
+#pragma qt_class(QMediaNetworkAccessControl)
+#endif
+
#include <QtMultimedia/qmediacontrol.h>
#include <QtCore/qlist.h>
#include <QtNetwork/qnetworkconfiguration.h>
+#ifndef QT_NO_BEARERMANAGEMENT
+
QT_BEGIN_NAMESPACE
+QT_WARNING_PUSH
+QT_WARNING_DISABLE_DEPRECATED
+
// Required for QDoc workaround
class QString;
-
-class Q_MULTIMEDIA_EXPORT QMediaNetworkAccessControl : public QMediaControl
+class QT_DEPRECATED_VERSION_5_15 Q_MULTIMEDIA_EXPORT QMediaNetworkAccessControl : public QMediaControl
{
Q_OBJECT
public:
@@ -71,7 +79,10 @@ protected:
#define QMediaNetworkAccessControl_iid "org.qt-project.qt.medianetworkaccesscontrol/5.0"
Q_MEDIA_DECLARE_CONTROL(QMediaNetworkAccessControl, QMediaNetworkAccessControl_iid)
+QT_WARNING_POP
+
QT_END_NAMESPACE
+#endif
#endif
diff --git a/src/multimedia/doc/snippets/multimedia-snippets/media.cpp b/src/multimedia/doc/snippets/multimedia-snippets/media.cpp
index 8ec7cb072..7fd6259ea 100644
--- a/src/multimedia/doc/snippets/multimedia-snippets/media.cpp
+++ b/src/multimedia/doc/snippets/multimedia-snippets/media.cpp
@@ -56,6 +56,7 @@
#include "qaudioprobe.h"
#include "qaudiorecorder.h"
#include "qvideoprobe.h"
+#include <QAbstractVideoSurface>
class MediaExample : public QObject {
Q_OBJECT
@@ -197,6 +198,36 @@ void MediaExample::MediaPlayer()
player->play();
//! [Pipeline]
+ //! [Pipeline Surface]
+ class Surface : public QAbstractVideoSurface
+ {
+ public:
+ Surface(QObject *p) : QAbstractVideoSurface(p) { }
+ QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType) const override
+ {
+ // Make sure that the driver supports this pixel format.
+ return QList<QVideoFrame::PixelFormat>() << QVideoFrame::Format_YUYV;
+ }
+
+ // Video frames are handled here.
+ bool present(const QVideoFrame &) override { return true; }
+ };
+
+ player = new QMediaPlayer;
+ player->setVideoOutput(new Surface(player));
+ player->setMedia(QUrl("gst-pipeline: videotestsrc ! qtvideosink"));
+ player->play();
+ //! [Pipeline Surface]
+
+ //! [Pipeline Widget]
+ player = new QMediaPlayer;
+ videoWidget = new QVideoWidget;
+ videoWidget->show();
+ player->setVideoOutput(videoWidget);
+ player->setMedia(QUrl("gst-pipeline: videotestsrc ! xvimagesink name=\"qtvideosink\""));
+ player->play();
+ //! [Pipeline Widget]
+
//! [Pipeline appsrc]
QImage img("images/qt-logo.png");
img = img.convertToFormat(QImage::Format_ARGB32);
diff --git a/src/multimedia/doc/src/cameraoverview.qdoc b/src/multimedia/doc/src/cameraoverview.qdoc
index 6962c2c48..69631fdab 100644
--- a/src/multimedia/doc/src/cameraoverview.qdoc
+++ b/src/multimedia/doc/src/cameraoverview.qdoc
@@ -286,7 +286,7 @@ for all of these, so you shouldn't need to adjust them unless the user wants a s
If you're taking a series of images (for example, to stitch them together for
a panoramic image), you should lock the image processing settings so that all the
-images taken appear similar with \e {QCamera::lock(QCamera::LockWhiteBalance)}/
+images taken appear similar with \e {QCamera::searchAndLock(QCamera::LockWhiteBalance)}/
\section3 Canceling Asynchronous Operations
diff --git a/src/multimedia/doc/src/platform-notes-windows.qdoc b/src/multimedia/doc/src/platform-notes-windows.qdoc
index e86d6a276..a69e96d99 100644
--- a/src/multimedia/doc/src/platform-notes-windows.qdoc
+++ b/src/multimedia/doc/src/platform-notes-windows.qdoc
@@ -42,6 +42,11 @@ was introduced in Windows Vista as a replacement for DirectShow and other
multimedia APIs. Consequently, WMF plugin in Qt is supported only for
Windows Vista and later versions of the operating system.
+The environment variable \c QT_MULTIMEDIA_PREFERRED_PLUGINS can be used to
+control the priority of the plugins. For example, setting it to
+"windowsmediafoundation" or "directshow" will cause the corresponding plugin
+to be the preferred one.
+
\section1 Limitations
The WMF plugin in Qt does not currently provide a camera backend. Instead,
diff --git a/src/multimedia/doc/src/qtmultimedia-index.qdoc b/src/multimedia/doc/src/qtmultimedia-index.qdoc
index c2734f4a9..abd6a62cc 100644
--- a/src/multimedia/doc/src/qtmultimedia-index.qdoc
+++ b/src/multimedia/doc/src/qtmultimedia-index.qdoc
@@ -132,9 +132,6 @@
\li QMediaPlaylist
\li List of media to be played.
\row
- \li QRadioTuner
- \li Access radio device.
- \row
\li QAbstractVideoSurface
\li Base class for video presentation.
\endtable
diff --git a/src/multimedia/playback/qmediaplayer.cpp b/src/multimedia/playback/qmediaplayer.cpp
index 9d26e8d38..382d8b30b 100644
--- a/src/multimedia/playback/qmediaplayer.cpp
+++ b/src/multimedia/playback/qmediaplayer.cpp
@@ -59,6 +59,7 @@
#include <QtCore/qpointer.h>
#include <QtCore/qfileinfo.h>
#include <QtCore/qtemporaryfile.h>
+#include <QDir>
QT_BEGIN_NAMESPACE
@@ -115,7 +116,9 @@ public:
, audioRoleControl(nullptr)
, customAudioRoleControl(nullptr)
, playlist(nullptr)
+#ifndef QT_NO_BEARERMANAGEMENT
, networkAccessControl(nullptr)
+#endif
, state(QMediaPlayer::StoppedState)
, status(QMediaPlayer::UnknownMediaStatus)
, error(QMediaPlayer::NoError)
@@ -132,7 +135,12 @@ public:
QPointer<QObject> videoOutput;
QMediaPlaylist *playlist;
+#ifndef QT_NO_BEARERMANAGEMENT
+QT_WARNING_PUSH
+QT_WARNING_DISABLE_DEPRECATED
QMediaNetworkAccessControl *networkAccessControl;
+QT_WARNING_POP
+#endif
QVideoSurfaceOutput surfaceOutput;
QMediaContent qrcMedia;
QScopedPointer<QFile> qrcFile;
@@ -598,7 +606,12 @@ QMediaPlayer::QMediaPlayer(QObject *parent, QMediaPlayer::Flags flags):
d->error = ServiceMissingError;
} else {
d->control = qobject_cast<QMediaPlayerControl*>(d->service->requestControl(QMediaPlayerControl_iid));
+#ifndef QT_NO_BEARERMANAGEMENT
+QT_WARNING_PUSH
+QT_WARNING_DISABLE_DEPRECATED
d->networkAccessControl = qobject_cast<QMediaNetworkAccessControl*>(d->service->requestControl(QMediaNetworkAccessControl_iid));
+QT_WARNING_POP
+#endif
if (d->control != nullptr) {
connect(d->control, SIGNAL(mediaChanged(QMediaContent)), SLOT(_q_handleMediaChanged(QMediaContent)));
connect(d->control, SIGNAL(stateChanged(QMediaPlayer::State)), SLOT(_q_stateChanged(QMediaPlayer::State)));
@@ -642,10 +655,15 @@ QMediaPlayer::QMediaPlayer(QObject *parent, QMediaPlayer::Flags flags):
}
}
}
+#ifndef QT_NO_BEARERMANAGEMENT
if (d->networkAccessControl != nullptr) {
+QT_WARNING_PUSH
+QT_WARNING_DISABLE_DEPRECATED
connect(d->networkAccessControl, &QMediaNetworkAccessControl::configurationChanged,
this, &QMediaPlayer::networkConfigurationChanged);
+QT_WARNING_POP
}
+#endif
}
}
@@ -730,7 +748,12 @@ void QMediaPlayer::setPlaylist(QMediaPlaylist *playlist)
setMedia(m);
}
+#ifndef QT_NO_BEARERMANAGEMENT
+QT_WARNING_PUSH
+QT_WARNING_DISABLE_DEPRECATED
/*!
+ \obsolete
+
Sets the network access points for remote media playback.
\a configurations contains, in ascending preferential order, a list of
configuration that can be used for network access.
@@ -744,6 +767,8 @@ void QMediaPlayer::setNetworkConfigurations(const QList<QNetworkConfiguration> &
if (d->networkAccessControl)
d->networkAccessControl->setConfigurations(configurations);
}
+QT_WARNING_POP
+#endif
QMediaPlayer::State QMediaPlayer::state() const
{
@@ -869,7 +894,12 @@ QString QMediaPlayer::errorString() const
return d_func()->errorString;
}
+#ifndef QT_NO_BEARERMANAGEMENT
+QT_WARNING_PUSH
+QT_WARNING_DISABLE_DEPRECATED
/*!
+ \obsolete
+
Returns the current network access point in use.
If a default contructed QNetworkConfiguration is returned
this feature is not available or that none of the
@@ -884,6 +914,8 @@ QNetworkConfiguration QMediaPlayer::currentNetworkConfiguration() const
return QNetworkConfiguration();
}
+QT_WARNING_POP
+#endif
//public Q_SLOTS:
/*!
@@ -1023,8 +1055,17 @@ void QMediaPlayer::setPlaybackRate(qreal rate)
\snippet multimedia-snippets/media.cpp Pipeline
- If the pipeline contains a video sink element named \c qtvideosink,
- current QVideoWidget can be used to render the video.
+ If QAbstractVideoSurface is used as the video output,
+ \c qtvideosink can be used as a video sink element directly in the pipeline.
+ After that the surface will receive the video frames in QAbstractVideoSurface::present().
+
+ \snippet multimedia-snippets/media.cpp Pipeline Surface
+
+ If QVideoWidget is used as the video output
+ and the pipeline contains a video sink element named \c qtvideosink,
+ current QVideoWidget will be used to render the video.
+
+ \snippet multimedia-snippets/media.cpp Pipeline Widget
If the pipeline contains appsrc element, it will be used to push data from \a stream.
@@ -1308,7 +1349,7 @@ QStringList QMediaPlayer::supportedCustomAudioRoles() const
Defines the status of a media player's current media.
\value UnknownMediaStatus The status of the media cannot be determined.
- \value NoMedia The is no current media. The player is in the StoppedState.
+ \value NoMedia There is no current media. The player is in the StoppedState.
\value LoadingMedia The current media is being loaded. The player may be in any state.
\value LoadedMedia The current media has been loaded. The player is in the StoppedState.
\value StalledMedia Playback of the current media has stalled due to insufficient buffering or
@@ -1654,6 +1695,7 @@ QStringList QMediaPlayer::supportedCustomAudioRoles() const
/*!
\fn void QMediaPlayer::networkConfigurationChanged(const QNetworkConfiguration &configuration)
+ \obsolete
Signal that the active in use network access point has been changed to \a configuration and all subsequent network access will use this \a configuration.
*/
diff --git a/src/multimedia/playback/qmediaplayer.h b/src/multimedia/playback/qmediaplayer.h
index 7ebed84da..c579c7f40 100644
--- a/src/multimedia/playback/qmediaplayer.h
+++ b/src/multimedia/playback/qmediaplayer.h
@@ -40,6 +40,7 @@
#ifndef QMEDIAPLAYER_H
#define QMEDIAPLAYER_H
+#include <QtMultimedia/qtmultimediaglobal.h>
#include <QtMultimedia/qmediaobject.h>
#include <QtMultimedia/qmediacontent.h>
#include <QtMultimedia/qmediaenumdebug.h>
@@ -157,7 +158,12 @@ public:
Error error() const;
QString errorString() const;
- QNetworkConfiguration currentNetworkConfiguration() const;
+#ifndef QT_NO_BEARERMANAGEMENT
+QT_WARNING_PUSH
+QT_WARNING_DISABLE_DEPRECATED
+ QT_DEPRECATED_VERSION_5_15 QNetworkConfiguration currentNetworkConfiguration() const;
+QT_WARNING_POP
+#endif
QMultimedia::AvailabilityStatus availability() const override;
@@ -182,7 +188,16 @@ public Q_SLOTS:
void setMedia(const QMediaContent &media, QIODevice *stream = nullptr);
void setPlaylist(QMediaPlaylist *playlist);
- void setNetworkConfigurations(const QList<QNetworkConfiguration> &configurations);
+#ifndef QT_NO_BEARERMANAGEMENT
+#ifndef Q_MOC_RUN // moc fails to parse the expanded macro
+QT_WARNING_PUSH
+QT_WARNING_DISABLE_DEPRECATED
+#endif
+ QT_DEPRECATED_VERSION_5_15 void setNetworkConfigurations(const QList<QNetworkConfiguration> &configurations);
+#ifndef Q_MOC_RUN // moc fails to parse the expanded macro
+QT_WARNING_POP
+#endif
+#endif
Q_SIGNALS:
void mediaChanged(const QMediaContent &media);
@@ -209,7 +224,16 @@ Q_SIGNALS:
void error(QMediaPlayer::Error error);
- void networkConfigurationChanged(const QNetworkConfiguration &configuration);
+#ifndef QT_NO_BEARERMANAGEMENT
+#ifndef Q_MOC_RUN // moc fails to parse the expanded macro
+QT_WARNING_PUSH
+QT_WARNING_DISABLE_DEPRECATED
+#endif
+ QT_DEPRECATED_VERSION_5_15 void networkConfigurationChanged(const QNetworkConfiguration &configuration);
+#ifndef Q_MOC_RUN // moc fails to parse the expanded macro
+QT_WARNING_POP
+#endif
+#endif
public:
bool bind(QObject *) override;
void unbind(QObject *) override;
diff --git a/src/multimedia/playback/qplaylistfileparser.cpp b/src/multimedia/playback/qplaylistfileparser.cpp
index 9af447032..d3cd60476 100644
--- a/src/multimedia/playback/qplaylistfileparser.cpp
+++ b/src/multimedia/playback/qplaylistfileparser.cpp
@@ -552,7 +552,7 @@ void QPlaylistFileParser::start(const QNetworkRequest& request, const QString &m
d->m_source.reset(d->m_mgr.get(request));
connect(d->m_source.data(), SIGNAL(readyRead()), this, SLOT(handleData()));
connect(d->m_source.data(), SIGNAL(finished()), this, SLOT(handleData()));
- connect(d->m_source.data(), SIGNAL(error(QNetworkReply::NetworkError)), this, SLOT(handleError()));
+ connect(d->m_source.data(), SIGNAL(errorOccurred(QNetworkReply::NetworkError)), this, SLOT(handleError()));
if (url.isLocalFile())
d->handleData();
diff --git a/src/multimedia/qmediapluginloader.cpp b/src/multimedia/qmediapluginloader.cpp
index 88bd591c2..3e9e6cc21 100644
--- a/src/multimedia/qmediapluginloader.cpp
+++ b/src/multimedia/qmediapluginloader.cpp
@@ -104,7 +104,7 @@ QList<QObject*> QMediaPluginLoader::instances(QString const &key)
static const bool showDebug = qEnvironmentVariableIntValue("QT_DEBUG_PLUGINS");
static const QStringList preferredPlugins =
- qEnvironmentVariable("QT_MULTIMEDIA_PREFERRED_PLUGINS").split(QLatin1Char(','), QString::SkipEmptyParts);
+ qEnvironmentVariable("QT_MULTIMEDIA_PREFERRED_PLUGINS").split(QLatin1Char(','), Qt::SkipEmptyParts);
for (int i = preferredPlugins.size() - 1; i >= 0; --i) {
auto name = preferredPlugins[i];
bool found = false;
diff --git a/src/multimedia/video/qabstractvideobuffer.cpp b/src/multimedia/video/qabstractvideobuffer.cpp
index f0dd6d2eb..8762150d0 100644
--- a/src/multimedia/video/qabstractvideobuffer.cpp
+++ b/src/multimedia/video/qabstractvideobuffer.cpp
@@ -96,12 +96,15 @@ int QAbstractVideoBufferPrivate::map(
Identifies the type of a video buffers handle.
\value NoHandle The buffer has no handle, its data can only be accessed by mapping the buffer.
- \value GLTextureHandle The handle of the buffer is an OpenGL texture ID.
+ \value GLTextureHandle The handle of the buffer is an OpenGL texture ID
+ of an undefined and platform dependent target type.
\value XvShmImageHandle The handle contains pointer to shared memory XVideo image.
\value CoreImageHandle The handle contains pointer to \macos CIImage.
\value QPixmapHandle The handle of the buffer is a QPixmap.
\value EGLImageHandle The handle of the buffer is an EGLImageKHR.
\value UserHandle Start value for user defined handle types.
+ \value GLTextureRectangleHandle The handle of the buffer is an OpenGL texture ID
+ of target type \c GL_TEXTURE_RECTANGLE.
\sa handleType()
*/
diff --git a/src/multimedia/video/qabstractvideobuffer.h b/src/multimedia/video/qabstractvideobuffer.h
index 2352c0f3d..a3afc18e4 100644
--- a/src/multimedia/video/qabstractvideobuffer.h
+++ b/src/multimedia/video/qabstractvideobuffer.h
@@ -64,6 +64,7 @@ public:
CoreImageHandle,
QPixmapHandle,
EGLImageHandle,
+ GLTextureRectangleHandle,
UserHandle = 1000
};
diff --git a/src/multimedia/video/qmemoryvideobuffer.cpp b/src/multimedia/video/qmemoryvideobuffer.cpp
index e05210d9d..febcd66c8 100644
--- a/src/multimedia/video/qmemoryvideobuffer.cpp
+++ b/src/multimedia/video/qmemoryvideobuffer.cpp
@@ -101,7 +101,7 @@ uchar *QMemoryVideoBuffer::map(MapMode mode, int *numBytes, int *bytesPerLine)
{
Q_D(QMemoryVideoBuffer);
- if (d->mapMode == NotMapped && d->data.data() && mode != NotMapped) {
+ if (d->mapMode == NotMapped && d->data.size() && mode != NotMapped) {
d->mapMode = mode;
if (numBytes)
diff --git a/src/multimediawidgets/multimediawidgets.pro b/src/multimediawidgets/multimediawidgets.pro
index 57d4194ec..1919e8107 100644
--- a/src/multimediawidgets/multimediawidgets.pro
+++ b/src/multimediawidgets/multimediawidgets.pro
@@ -1,6 +1,7 @@
# distinct from Qt Multimedia
TARGET = QtMultimediaWidgets
-QT = core gui multimedia-private widgets-private
+QT = core gui multimedia widgets-private
+QT_PRIVATE += multimedia-private
qtHaveModule(opengl): \
QT_PRIVATE += opengl
diff --git a/src/plugins/alsa/qalsaaudiodeviceinfo.h b/src/plugins/alsa/qalsaaudiodeviceinfo.h
index 65675df54..cdf08bfab 100644
--- a/src/plugins/alsa/qalsaaudiodeviceinfo.h
+++ b/src/plugins/alsa/qalsaaudiodeviceinfo.h
@@ -79,15 +79,15 @@ public:
bool testSettings(const QAudioFormat& format) const;
void updateLists();
- QAudioFormat preferredFormat() const;
- bool isFormatSupported(const QAudioFormat& format) const;
- QString deviceName() const;
- QStringList supportedCodecs();
- QList<int> supportedSampleRates();
- QList<int> supportedChannelCounts();
- QList<int> supportedSampleSizes();
- QList<QAudioFormat::Endian> supportedByteOrders();
- QList<QAudioFormat::SampleType> supportedSampleTypes();
+ QAudioFormat preferredFormat() const override;
+ bool isFormatSupported(const QAudioFormat& format) const override;
+ QString deviceName() const override;
+ QStringList supportedCodecs() override;
+ QList<int> supportedSampleRates() override;
+ QList<int> supportedChannelCounts() override;
+ QList<int> supportedSampleSizes() override;
+ QList<QAudioFormat::Endian> supportedByteOrders() override;
+ QList<QAudioFormat::SampleType> supportedSampleTypes() override;
static QByteArray defaultDevice(QAudio::Mode mode);
static QList<QByteArray> availableDevices(QAudio::Mode);
static QString deviceFromCardName(const QString &card);
diff --git a/src/plugins/alsa/qalsaaudioinput.h b/src/plugins/alsa/qalsaaudioinput.h
index fa9c954d7..62e1be039 100644
--- a/src/plugins/alsa/qalsaaudioinput.h
+++ b/src/plugins/alsa/qalsaaudioinput.h
@@ -103,26 +103,26 @@ public:
qint64 read(char* data, qint64 len);
- void start(QIODevice* device);
- QIODevice* start();
- void stop();
- void reset();
- void suspend();
- void resume();
- int bytesReady() const;
- int periodSize() const;
- void setBufferSize(int value);
- int bufferSize() const;
- void setNotifyInterval(int milliSeconds);
- int notifyInterval() const;
- qint64 processedUSecs() const;
- qint64 elapsedUSecs() const;
- QAudio::Error error() const;
- QAudio::State state() const;
- void setFormat(const QAudioFormat& fmt);
- QAudioFormat format() const;
- void setVolume(qreal);
- qreal volume() const;
+ void start(QIODevice* device) override;
+ QIODevice* start() override;
+ void stop() override;
+ void reset() override;
+ void suspend() override;
+ void resume() override;
+ int bytesReady() const override;
+ int periodSize() const override;
+ void setBufferSize(int value) override;
+ int bufferSize() const override;
+ void setNotifyInterval(int milliSeconds) override;
+ int notifyInterval() const override;
+ qint64 processedUSecs() const override;
+ qint64 elapsedUSecs() const override;
+ QAudio::Error error() const override;
+ QAudio::State state() const override;
+ void setFormat(const QAudioFormat& fmt) override;
+ QAudioFormat format() const override;
+ void setVolume(qreal) override;
+ qreal volume() const override;
bool resuming;
snd_pcm_t* handle;
qint64 totalTimeValue;
@@ -171,8 +171,8 @@ public:
AlsaInputPrivate(QAlsaAudioInput* audio);
~AlsaInputPrivate();
- qint64 readData( char* data, qint64 len);
- qint64 writeData(const char* data, qint64 len);
+ qint64 readData( char* data, qint64 len) override;
+ qint64 writeData(const char* data, qint64 len) override;
void trigger();
private:
diff --git a/src/plugins/alsa/qalsaaudiooutput.cpp b/src/plugins/alsa/qalsaaudiooutput.cpp
index 5c8ae171c..ee5aee989 100644
--- a/src/plugins/alsa/qalsaaudiooutput.cpp
+++ b/src/plugins/alsa/qalsaaudiooutput.cpp
@@ -707,7 +707,7 @@ bool QAlsaAudioOutput::deviceReady()
if(l > 0) {
// Got some data to output
- if(deviceState != QAudio::ActiveState)
+ if (deviceState != QAudio::ActiveState && deviceState != QAudio::IdleState)
return true;
qint64 bytesWritten = write(audioBuffer,l);
if (bytesWritten != l)
diff --git a/src/plugins/alsa/qalsaaudiooutput.h b/src/plugins/alsa/qalsaaudiooutput.h
index 8002322cb..72b9c2e4c 100644
--- a/src/plugins/alsa/qalsaaudiooutput.h
+++ b/src/plugins/alsa/qalsaaudiooutput.h
@@ -77,26 +77,26 @@ public:
qint64 write( const char *data, qint64 len );
- void start(QIODevice* device);
- QIODevice* start();
- void stop();
- void reset();
- void suspend();
- void resume();
- int bytesFree() const;
- int periodSize() const;
- void setBufferSize(int value);
- int bufferSize() const;
- void setNotifyInterval(int milliSeconds);
- int notifyInterval() const;
- qint64 processedUSecs() const;
- qint64 elapsedUSecs() const;
- QAudio::Error error() const;
- QAudio::State state() const;
- void setFormat(const QAudioFormat& fmt);
- QAudioFormat format() const;
- void setVolume(qreal);
- qreal volume() const;
+ void start(QIODevice* device) override;
+ QIODevice* start() override;
+ void stop() override;
+ void reset() override;
+ void suspend() override;
+ void resume() override;
+ int bytesFree() const override;
+ int periodSize() const override;
+ void setBufferSize(int value) override;
+ int bufferSize() const override;
+ void setNotifyInterval(int milliSeconds) override;
+ int notifyInterval() const override;
+ qint64 processedUSecs() const override;
+ qint64 elapsedUSecs() const override;
+ QAudio::Error error() const override;
+ QAudio::State state() const override;
+ void setFormat(const QAudioFormat& fmt) override;
+ QAudioFormat format() const override;
+ void setVolume(qreal) override;
+ qreal volume() const override;
QIODevice* audioSource;
@@ -151,8 +151,8 @@ public:
AlsaOutputPrivate(QAlsaAudioOutput* audio);
~AlsaOutputPrivate();
- qint64 readData( char* data, qint64 len);
- qint64 writeData(const char* data, qint64 len);
+ qint64 readData( char* data, qint64 len) override;
+ qint64 writeData(const char* data, qint64 len) override;
private:
QAlsaAudioOutput *audioDevice;
diff --git a/src/plugins/android/src/mediacapture/qandroidcapturesession.cpp b/src/plugins/android/src/mediacapture/qandroidcapturesession.cpp
index 7cc3ad619..c0484a139 100644
--- a/src/plugins/android/src/mediacapture/qandroidcapturesession.cpp
+++ b/src/plugins/android/src/mediacapture/qandroidcapturesession.cpp
@@ -188,19 +188,30 @@ void QAndroidCaptureSession::setState(QMediaRecorder::State state)
start();
break;
case QMediaRecorder::PausedState:
- // Not supported by Android API
- qWarning("QMediaRecorder::PausedState is not supported on Android");
+ pause();
break;
}
}
void QAndroidCaptureSession::start()
{
- if (m_state == QMediaRecorder::RecordingState || m_status != QMediaRecorder::LoadedStatus)
+ if (m_state == QMediaRecorder::RecordingState
+ || (m_status != QMediaRecorder::LoadedStatus && m_status != QMediaRecorder::PausedStatus))
return;
setStatus(QMediaRecorder::StartingStatus);
+ if (m_state == QMediaRecorder::PausedState) {
+ if (!m_mediaRecorder || !m_mediaRecorder->resume()) {
+ emit error(QMediaRecorder::FormatError, QLatin1String("Unable to resume the media recorder."));
+ if (m_cameraSession)
+ restartViewfinder();
+ } else {
+ updateStartState();
+ }
+ return;
+ }
+
if (m_mediaRecorder) {
m_mediaRecorder->release();
delete m_mediaRecorder;
@@ -289,7 +300,11 @@ void QAndroidCaptureSession::start()
restartViewfinder();
return;
}
+ updateStartState();
+}
+void QAndroidCaptureSession::updateStartState()
+{
m_elapsedTime.start();
m_notifyTimer.start();
updateDuration();
@@ -302,9 +317,20 @@ void QAndroidCaptureSession::start()
m_cameraSession->camera()->setupPreviewFrameCallback();
}
+ QMediaRecorder::State oldState = m_state;
+ QMediaRecorder::Status oldStatus = m_status;
+
m_state = QMediaRecorder::RecordingState;
- emit stateChanged(m_state);
- setStatus(QMediaRecorder::RecordingStatus);
+ m_status = QMediaRecorder::RecordingStatus;
+
+ m_actualOutputLocation = m_usedOutputLocation;
+ emit actualLocationChanged(m_actualOutputLocation);
+
+ if (m_state != oldState)
+ emit stateChanged(m_state);
+
+ if (m_status != oldStatus)
+ emit statusChanged(m_status);
}
void QAndroidCaptureSession::stop(bool error)
@@ -317,6 +343,7 @@ void QAndroidCaptureSession::stop(bool error)
m_mediaRecorder->stop();
m_notifyTimer.stop();
updateDuration();
+ m_previousElapsedTime = 0;
m_elapsedTime.invalidate();
m_mediaRecorder->release();
delete m_mediaRecorder;
@@ -347,6 +374,23 @@ void QAndroidCaptureSession::stop(bool error)
setStatus(QMediaRecorder::LoadedStatus);
}
+void QAndroidCaptureSession::pause()
+{
+ if (m_state == QMediaRecorder::PausedState || m_mediaRecorder == 0)
+ return;
+
+ setStatus(QMediaRecorder::PausedStatus);
+
+ m_mediaRecorder->pause();
+ m_notifyTimer.stop();
+ updateDuration();
+ m_previousElapsedTime = m_duration;
+ m_elapsedTime.invalidate();
+
+ m_state = QMediaRecorder::PausedState;
+ emit stateChanged(m_state);
+}
+
void QAndroidCaptureSession::setStatus(QMediaRecorder::Status status)
{
if (m_status == status)
@@ -503,7 +547,7 @@ void QAndroidCaptureSession::restartViewfinder()
void QAndroidCaptureSession::updateDuration()
{
if (m_elapsedTime.isValid())
- m_duration = m_elapsedTime.elapsed();
+ m_duration = m_elapsedTime.elapsed() + m_previousElapsedTime;
emit durationChanged(m_duration);
}
diff --git a/src/plugins/android/src/mediacapture/qandroidcapturesession.h b/src/plugins/android/src/mediacapture/qandroidcapturesession.h
index 8cfb9ad2a..cee148a4c 100644
--- a/src/plugins/android/src/mediacapture/qandroidcapturesession.h
+++ b/src/plugins/android/src/mediacapture/qandroidcapturesession.h
@@ -136,7 +136,9 @@ private:
CaptureProfile getProfile(int id);
void start();
+ void updateStartState();
void stop(bool error = false);
+ void pause();
void setStatus(QMediaRecorder::Status status);
@@ -154,6 +156,7 @@ private:
QElapsedTimer m_elapsedTime;
QTimer m_notifyTimer;
qint64 m_duration;
+ qint64 m_previousElapsedTime = 0;
QMediaRecorder::State m_state;
QMediaRecorder::Status m_status;
diff --git a/src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.cpp b/src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.cpp
index ef86af896..66eafc765 100644
--- a/src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.cpp
+++ b/src/plugins/android/src/mediaplayer/qandroidmetadatareadercontrol.cpp
@@ -176,12 +176,12 @@ void QAndroidMetaDataReaderControl::extractMetadata(QAndroidMetaDataReaderContro
if (!string.isNull()) {
metadata.insert(isVideo ? QMediaMetaData::LeadPerformer
: QMediaMetaData::ContributingArtist,
- string.split('/', QString::SkipEmptyParts));
+ string.split('/', Qt::SkipEmptyParts));
}
string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Author);
if (!string.isNull())
- metadata.insert(QMediaMetaData::Author, string.split('/', QString::SkipEmptyParts));
+ metadata.insert(QMediaMetaData::Author, string.split('/', Qt::SkipEmptyParts));
string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Bitrate);
if (!string.isNull()) {
@@ -196,7 +196,7 @@ void QAndroidMetaDataReaderControl::extractMetadata(QAndroidMetaDataReaderContro
string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Composer);
if (!string.isNull())
- metadata.insert(QMediaMetaData::Composer, string.split('/', QString::SkipEmptyParts));
+ metadata.insert(QMediaMetaData::Composer, string.split('/', Qt::SkipEmptyParts));
string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Date);
if (!string.isNull())
@@ -231,7 +231,7 @@ void QAndroidMetaDataReaderControl::extractMetadata(QAndroidMetaDataReaderContro
string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Writer);
if (!string.isNull())
- metadata.insert(QMediaMetaData::Writer, string.split('/', QString::SkipEmptyParts));
+ metadata.insert(QMediaMetaData::Writer, string.split('/', Qt::SkipEmptyParts));
string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Year);
if (!string.isNull())
diff --git a/src/plugins/android/src/wrappers/jni/androidcamera.cpp b/src/plugins/android/src/wrappers/jni/androidcamera.cpp
index 3ea7bc773..50baaed21 100644
--- a/src/plugins/android/src/wrappers/jni/androidcamera.cpp
+++ b/src/plugins/android/src/wrappers/jni/androidcamera.cpp
@@ -797,6 +797,12 @@ void AndroidCamera::getCameraInfo(int id, AndroidCameraInfo *info)
default:
break;
}
+ // Add a number to allow correct access to cameras on systems with two
+ // (and more) front/back cameras
+ if (id > 1) {
+ info->name.append(QByteArray::number(id));
+ info->description.append(QString(" %1").arg(id));
+ }
}
void AndroidCamera::startPreview()
diff --git a/src/plugins/android/src/wrappers/jni/androidmediaplayer.cpp b/src/plugins/android/src/wrappers/jni/androidmediaplayer.cpp
index f899481f0..de8422b86 100644
--- a/src/plugins/android/src/wrappers/jni/androidmediaplayer.cpp
+++ b/src/plugins/android/src/wrappers/jni/androidmediaplayer.cpp
@@ -271,7 +271,7 @@ void AndroidMediaPlayer::setAudioRole(QAudio::Role role)
void AndroidMediaPlayer::setCustomAudioRole(const QString &role)
{
- QStringList roles = role.split(",", QString::SkipEmptyParts);
+ QStringList roles = role.split(",", Qt::SkipEmptyParts);
int type = 0; // CONTENT_TYPE_UNKNOWN
int usage = 0; // USAGE_UNKNOWN
diff --git a/src/plugins/android/src/wrappers/jni/androidmediarecorder.cpp b/src/plugins/android/src/wrappers/jni/androidmediarecorder.cpp
index d607ab806..10508a358 100644
--- a/src/plugins/android/src/wrappers/jni/androidmediarecorder.cpp
+++ b/src/plugins/android/src/wrappers/jni/androidmediarecorder.cpp
@@ -213,6 +213,32 @@ void AndroidMediaRecorder::stop()
}
}
+void AndroidMediaRecorder::pause()
+{
+ QJNIEnvironmentPrivate env;
+ m_mediaRecorder.callMethod<void>("pause");
+ if (env->ExceptionCheck()) {
+#ifdef QT_DEBUG
+ env->ExceptionDescribe();
+#endif
+ env->ExceptionClear();
+ }
+}
+
+bool AndroidMediaRecorder::resume()
+{
+ QJNIEnvironmentPrivate env;
+ m_mediaRecorder.callMethod<void>("resume");
+ if (env->ExceptionCheck()) {
+#ifdef QT_DEBUG
+ env->ExceptionDescribe();
+#endif
+ env->ExceptionClear();
+ return false;
+ }
+ return true;
+}
+
void AndroidMediaRecorder::setAudioChannels(int numChannels)
{
m_mediaRecorder.callMethod<void>("setAudioChannels", "(I)V", numChannels);
diff --git a/src/plugins/android/src/wrappers/jni/androidmediarecorder.h b/src/plugins/android/src/wrappers/jni/androidmediarecorder.h
index e4b3a80ea..55b370cf1 100644
--- a/src/plugins/android/src/wrappers/jni/androidmediarecorder.h
+++ b/src/plugins/android/src/wrappers/jni/androidmediarecorder.h
@@ -138,6 +138,8 @@ public:
bool start();
void stop();
+ void pause();
+ bool resume();
void setAudioChannels(int numChannels);
void setAudioEncoder(AudioEncoder encoder);
diff --git a/src/plugins/avfoundation/camera/avfcameraflashcontrol.mm b/src/plugins/avfoundation/camera/avfcameraflashcontrol.mm
index 42303ce17..1e27dc472 100644
--- a/src/plugins/avfoundation/camera/avfcameraflashcontrol.mm
+++ b/src/plugins/avfoundation/camera/avfcameraflashcontrol.mm
@@ -171,6 +171,20 @@ bool AVFCameraFlashControl::applyFlashSettings()
return false;
}
+ auto setAvTorchModeSafe = [&captureDevice](AVCaptureTorchMode avTorchMode) {
+ if ([captureDevice isTorchModeSupported:avTorchMode])
+ captureDevice.torchMode = avTorchMode;
+ else
+ qDebugCamera() << Q_FUNC_INFO << "Attempt to setup unsupported torch mode " << avTorchMode;
+ };
+
+ auto setAvFlashModeSafe = [&captureDevice](AVCaptureFlashMode avFlashMode) {
+ if ([captureDevice isFlashModeSupported:avFlashMode])
+ captureDevice.flashMode = avFlashMode;
+ else
+ qDebugCamera() << Q_FUNC_INFO << "Attempt to setup unsupported flash mode " << avFlashMode;
+ };
+
if (!isFlashModeSupported(m_flashMode)) {
qDebugCamera() << Q_FUNC_INFO << "unsupported mode" << m_flashMode;
return false;
@@ -192,7 +206,7 @@ bool AVFCameraFlashControl::applyFlashSettings()
return false;
}
#endif
- captureDevice.torchMode = AVCaptureTorchModeOff;
+ setAvTorchModeSafe(AVCaptureTorchModeOff);
}
#ifdef Q_OS_IOS
if (![captureDevice isFlashAvailable]) {
@@ -209,7 +223,7 @@ bool AVFCameraFlashControl::applyFlashSettings()
return false;
}
#endif
- captureDevice.flashMode = AVCaptureFlashModeOff;
+ setAvFlashModeSafe(AVCaptureFlashModeOff);
}
#ifdef Q_OS_IOS
@@ -221,13 +235,13 @@ bool AVFCameraFlashControl::applyFlashSettings()
}
if (m_flashMode == QCameraExposure::FlashOff)
- captureDevice.flashMode = AVCaptureFlashModeOff;
+ setAvFlashModeSafe(AVCaptureFlashModeOff);
else if (m_flashMode == QCameraExposure::FlashOn)
- captureDevice.flashMode = AVCaptureFlashModeOn;
+ setAvFlashModeSafe(AVCaptureFlashModeOn);
else if (m_flashMode == QCameraExposure::FlashAuto)
- captureDevice.flashMode = AVCaptureFlashModeAuto;
+ setAvFlashModeSafe(AVCaptureFlashModeAuto);
else if (m_flashMode == QCameraExposure::FlashVideoLight)
- captureDevice.torchMode = AVCaptureTorchModeOn;
+ setAvTorchModeSafe(AVCaptureTorchModeOn);
return true;
}
diff --git a/src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm b/src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm
index 0359f5d0a..7bf9de071 100644
--- a/src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm
+++ b/src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm
@@ -278,7 +278,8 @@ AVFCameraRendererControl::AVFCameraRendererControl(QObject *parent)
AVFCameraRendererControl::~AVFCameraRendererControl()
{
- [m_cameraSession->captureSession() removeOutput:m_videoDataOutput];
+ if ([m_cameraSession->captureSession().outputs containsObject:m_videoDataOutput])
+ [m_cameraSession->captureSession() removeOutput:m_videoDataOutput];
[m_viewfinderFramesDelegate release];
if (m_delegateQueue)
dispatch_release(m_delegateQueue);
@@ -297,11 +298,9 @@ void AVFCameraRendererControl::setSurface(QAbstractVideoSurface *surface)
{
if (m_surface != surface) {
m_surface = surface;
-#ifdef Q_OS_IOS
m_supportsTextures = m_surface
- ? m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).contains(QVideoFrame::Format_BGRA32)
+ ? !m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).isEmpty()
: false;
-#endif
Q_EMIT surfaceChanged(surface);
}
}
diff --git a/src/plugins/avfoundation/camera/avfcameraservice.h b/src/plugins/avfoundation/camera/avfcameraservice.h
index 2969882b0..1397a7dee 100644
--- a/src/plugins/avfoundation/camera/avfcameraservice.h
+++ b/src/plugins/avfoundation/camera/avfcameraservice.h
@@ -70,6 +70,8 @@ class AVFMediaRecorderControlIOS;
class AVFAudioEncoderSettingsControl;
class AVFVideoEncoderSettingsControl;
class AVFMediaContainerControl;
+class AVFCameraWindowControl;
+class AVFCaptureDestinationControl;
class AVFCameraService : public QMediaService
{
@@ -99,6 +101,7 @@ public:
AVFAudioEncoderSettingsControl *audioEncoderSettingsControl() const { return m_audioEncoderSettingsControl; }
AVFVideoEncoderSettingsControl *videoEncoderSettingsControl() const {return m_videoEncoderSettingsControl; }
AVFMediaContainerControl *mediaContainerControl() const { return m_mediaContainerControl; }
+ AVFCaptureDestinationControl *captureDestinationControl() const { return m_captureDestinationControl; }
private:
AVFCameraSession *m_session;
@@ -120,6 +123,8 @@ private:
AVFAudioEncoderSettingsControl *m_audioEncoderSettingsControl;
AVFVideoEncoderSettingsControl *m_videoEncoderSettingsControl;
AVFMediaContainerControl *m_mediaContainerControl;
+ AVFCameraWindowControl *m_captureWindowControl;
+ AVFCaptureDestinationControl *m_captureDestinationControl;
};
QT_END_NAMESPACE
diff --git a/src/plugins/avfoundation/camera/avfcameraservice.mm b/src/plugins/avfoundation/camera/avfcameraservice.mm
index 33b4b72aa..79bf73910 100644
--- a/src/plugins/avfoundation/camera/avfcameraservice.mm
+++ b/src/plugins/avfoundation/camera/avfcameraservice.mm
@@ -61,6 +61,8 @@
#include "avfaudioencodersettingscontrol.h"
#include "avfvideoencodersettingscontrol.h"
#include "avfmediacontainercontrol.h"
+#include "avfcapturedestinationcontrol.h"
+#include "avfcamerawindowcontrol.h"
#ifdef Q_OS_IOS
#include "avfcamerazoomcontrol.h"
@@ -74,7 +76,8 @@ QT_USE_NAMESPACE
AVFCameraService::AVFCameraService(QObject *parent):
QMediaService(parent),
- m_videoOutput(nullptr)
+ m_videoOutput(nullptr),
+ m_captureWindowControl(nullptr)
{
m_session = new AVFCameraSession(this);
m_cameraControl = new AVFCameraControl(this);
@@ -109,6 +112,7 @@ AVFCameraService::AVFCameraService(QObject *parent):
m_audioEncoderSettingsControl = new AVFAudioEncoderSettingsControl(this);
m_videoEncoderSettingsControl = new AVFVideoEncoderSettingsControl(this);
m_mediaContainerControl = new AVFMediaContainerControl(this);
+ m_captureDestinationControl = new AVFCaptureDestinationControl;
}
AVFCameraService::~AVFCameraService()
@@ -119,6 +123,12 @@ AVFCameraService::~AVFCameraService()
delete m_recorderControl;
#endif
+ if (m_captureWindowControl) {
+ m_session->setCapturePreviewOutput(nullptr);
+ delete m_captureWindowControl;
+ m_captureWindowControl = nullptr;
+ }
+
if (m_videoOutput) {
m_session->setVideoOutput(nullptr);
delete m_videoOutput;
@@ -143,6 +153,7 @@ AVFCameraService::~AVFCameraService()
delete m_audioEncoderSettingsControl;
delete m_videoEncoderSettingsControl;
delete m_mediaContainerControl;
+ delete m_captureDestinationControl;
delete m_session;
}
@@ -210,6 +221,17 @@ QMediaControl *AVFCameraService::requestControl(const char *name)
return m_cameraZoomControl;
#endif
+ if (qstrcmp(name, QCameraCaptureDestinationControl_iid) == 0)
+ return m_captureDestinationControl;
+
+ if (!m_captureWindowControl) {
+ if (qstrcmp(name, QVideoWindowControl_iid) == 0) {
+ m_captureWindowControl = new AVFCameraWindowControl(this);
+ m_session->setCapturePreviewOutput(m_captureWindowControl);
+ return m_captureWindowControl;
+ }
+ }
+
if (!m_videoOutput) {
if (qstrcmp(name, QVideoRendererControl_iid) == 0)
m_videoOutput = new AVFCameraRendererControl(this);
@@ -234,6 +256,11 @@ void AVFCameraService::releaseControl(QMediaControl *control)
delete m_videoOutput;
m_videoOutput = nullptr;
}
+ else if (m_captureWindowControl == control) {
+ m_session->setCapturePreviewOutput(nullptr);
+ delete m_captureWindowControl;
+ m_captureWindowControl = nullptr;
+ }
}
diff --git a/src/plugins/avfoundation/camera/avfcamerasession.h b/src/plugins/avfoundation/camera/avfcamerasession.h
index 103ec0e17..a449bb806 100644
--- a/src/plugins/avfoundation/camera/avfcamerasession.h
+++ b/src/plugins/avfoundation/camera/avfcamerasession.h
@@ -54,6 +54,7 @@ class AVFCameraControl;
class AVFCameraService;
class AVFCameraRendererControl;
class AVFMediaVideoProbeControl;
+class AVFCameraWindowControl;
struct AVFCameraInfo
{
@@ -79,6 +80,7 @@ public:
AVFCameraInfo activeCameraInfo() const { return m_activeCameraInfo; }
void setVideoOutput(AVFCameraRendererControl *output);
+ void setCapturePreviewOutput(AVFCameraWindowControl *output);
AVCaptureSession *captureSession() const { return m_captureSession; }
AVCaptureDevice *videoCaptureDevice() const;
@@ -122,6 +124,7 @@ private:
AVFCameraService *m_service;
AVFCameraRendererControl *m_videoOutput;
+ AVFCameraWindowControl *m_capturePreviewWindowOutput;
QCamera::State m_state;
bool m_active;
diff --git a/src/plugins/avfoundation/camera/avfcamerasession.mm b/src/plugins/avfoundation/camera/avfcamerasession.mm
index 3c5f8f09a..6ee9c2636 100644
--- a/src/plugins/avfoundation/camera/avfcamerasession.mm
+++ b/src/plugins/avfoundation/camera/avfcamerasession.mm
@@ -48,6 +48,7 @@
#include "avfcameraviewfindersettingscontrol.h"
#include "avfimageencodercontrol.h"
#include "avfcamerautility.h"
+#include "avfcamerawindowcontrol.h"
#include <CoreFoundation/CoreFoundation.h>
#include <Foundation/Foundation.h>
@@ -146,6 +147,7 @@ QList<AVFCameraInfo> AVFCameraSession::m_cameraDevices;
AVFCameraSession::AVFCameraSession(AVFCameraService *service, QObject *parent)
: QObject(parent)
, m_service(service)
+ , m_capturePreviewWindowOutput(nullptr)
, m_state(QCamera::UnloadedState)
, m_active(false)
, m_videoInput(nil)
@@ -160,6 +162,10 @@ AVFCameraSession::AVFCameraSession(AVFCameraService *service, QObject *parent)
AVFCameraSession::~AVFCameraSession()
{
+ if (m_capturePreviewWindowOutput) {
+ m_capturePreviewWindowOutput->setLayer(nil);
+ }
+
if (m_videoInput) {
[m_captureSession removeInput:m_videoInput];
[m_videoInput release];
@@ -257,6 +263,29 @@ void AVFCameraSession::setVideoOutput(AVFCameraRendererControl *output)
output->configureAVCaptureSession(this);
}
+void AVFCameraSession::setCapturePreviewOutput(AVFCameraWindowControl *output)
+{
+#ifdef QT_DEBUG_AVF
+ qDebug() << Q_FUNC_INFO << output;
+#endif
+
+ if (m_capturePreviewWindowOutput == output)
+ return;
+
+ if (m_capturePreviewWindowOutput)
+ m_capturePreviewWindowOutput->setLayer(nil);
+
+ m_capturePreviewWindowOutput = output;
+
+ if (m_capturePreviewWindowOutput) {
+ AVCaptureVideoPreviewLayer *previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:m_captureSession];
+ m_capturePreviewWindowOutput->setLayer(previewLayer);
+ if (AVFCameraViewfinderSettingsControl2 *vfControl = m_service->viewfinderSettingsControl2()) {
+ m_capturePreviewWindowOutput->setNativeSize(vfControl->viewfinderSettings().resolution());
+ }
+ }
+}
+
AVCaptureDevice *AVFCameraSession::videoCaptureDevice() const
{
if (m_videoInput)
@@ -409,6 +438,10 @@ bool AVFCameraSession::applyViewfinderSettings()
}
vfControl->applySettings(vfSettings);
+
+ if (m_capturePreviewWindowOutput)
+ m_capturePreviewWindowOutput->setNativeSize(vfControl->viewfinderSettings().resolution());
+
return !vfSettings.isNull();
}
diff --git a/src/plugins/avfoundation/camera/avfcamerautility.h b/src/plugins/avfoundation/camera/avfcamerautility.h
index 370302b1c..33956fc8a 100644
--- a/src/plugins/avfoundation/camera/avfcamerautility.h
+++ b/src/plugins/avfoundation/camera/avfcamerautility.h
@@ -161,7 +161,7 @@ QSize qt_device_format_high_resolution(AVCaptureDeviceFormat *format);
QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format);
QVector<AVFPSRange> qt_device_format_framerates(AVCaptureDeviceFormat *format);
AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDevice, const QSize &res,
- FourCharCode preferredFormat);
+ FourCharCode preferredFormat, bool stillImage = true);
AVCaptureDeviceFormat *qt_find_best_framerate_match(AVCaptureDevice *captureDevice,
FourCharCode preferredFormat,
Float64 fps);
diff --git a/src/plugins/avfoundation/camera/avfcamerautility.mm b/src/plugins/avfoundation/camera/avfcamerautility.mm
index 8c743dcbc..25ccc4b01 100644
--- a/src/plugins/avfoundation/camera/avfcamerautility.mm
+++ b/src/plugins/avfoundation/camera/avfcamerautility.mm
@@ -239,7 +239,8 @@ QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format)
AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDevice,
const QSize &request,
- FourCharCode filter)
+ FourCharCode filter,
+ bool stillImage)
{
Q_ASSERT(captureDevice);
Q_ASSERT(!request.isNull() && request.isValid());
@@ -254,7 +255,7 @@ AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDev
if (qt_device_format_resolution(format) == request)
return format;
// iOS only (still images).
- if (qt_device_format_high_resolution(format) == request)
+ if (stillImage && qt_device_format_high_resolution(format) == request)
return format;
}
@@ -272,7 +273,7 @@ AVCaptureDeviceFormat *qt_find_best_resolution_match(AVCaptureDevice *captureDev
if (!res.isNull() && res.isValid() && qt_area_sane(res))
pairs << FormatPair(res, format);
const QSize highRes(qt_device_format_high_resolution(format));
- if (!highRes.isNull() && highRes.isValid() && qt_area_sane(highRes))
+ if (stillImage && !highRes.isNull() && highRes.isValid() && qt_area_sane(highRes))
pairs << FormatPair(highRes, format);
}
diff --git a/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm b/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm
index a77d7de03..dd0393f96 100644
--- a/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm
+++ b/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm
@@ -185,7 +185,16 @@ void AVFCameraViewfinderSettingsControl2::setViewfinderSettings(const QCameraVie
return;
m_settings = settings;
+#if defined(Q_OS_IOS)
+ bool active = m_service->session()->state() == QCamera::ActiveState;
+ if (active)
+ [m_service->session()->captureSession() beginConfiguration];
applySettings(m_settings);
+ if (active)
+ [m_service->session()->captureSession() commitConfiguration];
+#else
+ applySettings(m_settings);
+#endif
}
QVideoFrame::PixelFormat AVFCameraViewfinderSettingsControl2::QtPixelFormatFromCVFormat(unsigned avPixelFormat)
@@ -264,7 +273,7 @@ AVCaptureDeviceFormat *AVFCameraViewfinderSettingsControl2::findBestFormatMatch(
// Either the exact match (including high resolution for images on iOS)
// or a format with a resolution close to the requested one.
return qt_find_best_resolution_match(captureDevice, resolution,
- m_service->session()->defaultCodec());
+ m_service->session()->defaultCodec(), false);
}
// No resolution requested, what about framerates?
@@ -332,7 +341,10 @@ bool AVFCameraViewfinderSettingsControl2::convertPixelFormatIfSupported(QVideoFr
if (m_service->videoOutput()->surface()) {
const QAbstractVideoSurface *surface = m_service->videoOutput()->surface();
- if (!surface->supportedPixelFormats().contains(qtFormat))
+ QAbstractVideoBuffer::HandleType h = m_service->videoOutput()->supportsTextures()
+ ? QAbstractVideoBuffer::GLTextureHandle
+ : QAbstractVideoBuffer::NoHandle;
+ if (!surface->supportedPixelFormats(h).contains(qtFormat))
return false;
}
@@ -380,21 +392,19 @@ bool AVFCameraViewfinderSettingsControl2::applySettings(const QCameraViewfinderS
// format, or if no surface is set, the preferred capture device format
const QVector<QVideoFrame::PixelFormat> deviceFormats = viewfinderPixelFormats();
- QVideoFrame::PixelFormat pickedFormat = deviceFormats.first();
-
QAbstractVideoSurface *surface = m_service->videoOutput()->surface();
+ QVideoFrame::PixelFormat pickedFormat = deviceFormats.first();
if (surface) {
- if (m_service->videoOutput()->supportsTextures()) {
- pickedFormat = QVideoFrame::Format_ARGB32;
- } else {
- QList<QVideoFrame::PixelFormat> surfaceFormats = surface->supportedPixelFormats();
-
- for (int i = 0; i < surfaceFormats.count(); ++i) {
- const QVideoFrame::PixelFormat surfaceFormat = surfaceFormats.at(i);
- if (deviceFormats.contains(surfaceFormat)) {
- pickedFormat = surfaceFormat;
- break;
- }
+ pickedFormat = QVideoFrame::Format_Invalid;
+ QAbstractVideoBuffer::HandleType h = m_service->videoOutput()->supportsTextures()
+ ? QAbstractVideoBuffer::GLTextureHandle
+ : QAbstractVideoBuffer::NoHandle;
+ QList<QVideoFrame::PixelFormat> surfaceFormats = surface->supportedPixelFormats(h);
+ for (int i = 0; i < surfaceFormats.count(); ++i) {
+ const QVideoFrame::PixelFormat surfaceFormat = surfaceFormats.at(i);
+ if (deviceFormats.contains(surfaceFormat)) {
+ pickedFormat = surfaceFormat;
+ break;
}
}
}
@@ -402,13 +412,15 @@ bool AVFCameraViewfinderSettingsControl2::applySettings(const QCameraViewfinderS
CVPixelFormatFromQtFormat(pickedFormat, avfPixelFormat);
}
- if (avfPixelFormat != 0) {
- NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithCapacity:1];
- [videoSettings setObject:[NSNumber numberWithUnsignedInt:avfPixelFormat]
- forKey:(id)kCVPixelBufferPixelFormatTypeKey];
+ NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithCapacity:1];
+ [videoSettings setObject:[NSNumber numberWithUnsignedInt:avfPixelFormat]
+ forKey:(id)kCVPixelBufferPixelFormatTypeKey];
- videoOutput.videoSettings = videoSettings;
- }
+ const AVFConfigurationLock lock(captureDevice);
+ if (!lock)
+ qWarning("Failed to set active format (lock failed)");
+
+ videoOutput.videoSettings = videoSettings;
}
qt_set_framerate_limits(captureDevice, videoConnection(), settings.minimumFrameRate(), settings.maximumFrameRate());
diff --git a/src/plugins/avfoundation/camera/avfcamerawindowcontrol.h b/src/plugins/avfoundation/camera/avfcamerawindowcontrol.h
new file mode 100644
index 000000000..d1a950e38
--- /dev/null
+++ b/src/plugins/avfoundation/camera/avfcamerawindowcontrol.h
@@ -0,0 +1,129 @@
+/****************************************************************************
+**
+** Copyright (C) 2020 The Qt Company Ltd.
+** Contact: https://www.qt.io/licensing/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and The Qt Company. For licensing terms
+** and conditions see https://www.qt.io/terms-conditions. For further
+** information use the contact form at https://www.qt.io/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 3 as published by the Free Software
+** Foundation and appearing in the file LICENSE.LGPL3 included in the
+** packaging of this file. Please review the following information to
+** ensure the GNU Lesser General Public License version 3 requirements
+** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU
+** General Public License version 2.0 or (at your option) the GNU General
+** Public license version 3 or any later version approved by the KDE Free
+** Qt Foundation. The licenses are as published by the Free Software
+** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
+** included in the packaging of this file. Please review the following
+** information to ensure the GNU General Public License requirements will
+** be met: https://www.gnu.org/licenses/gpl-2.0.html and
+** https://www.gnu.org/licenses/gpl-3.0.html.
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#ifndef AVFCAMERAWINDOWCONTROL_H
+#define AVFCAMERAWINDOWCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QVideoWindowControl>
+
+@class AVCaptureVideoPreviewLayer;
+#if defined(Q_OS_MACOS)
+@class NSView;
+typedef NSView NativeView;
+#else
+@class UIView;
+typedef UIView NativeView;
+#endif
+
+QT_BEGIN_NAMESPACE
+
+class AVFCameraWindowControl : public QVideoWindowControl
+{
+ Q_OBJECT
+public:
+ AVFCameraWindowControl(QObject *parent = nullptr);
+ virtual ~AVFCameraWindowControl() override;
+
+ // QVideoWindowControl interface
+public:
+ WId winId() const override;
+ void setWinId(WId id) override;
+
+ QRect displayRect() const override;
+ void setDisplayRect(const QRect &rect) override;
+
+ bool isFullScreen() const override;
+ void setFullScreen(bool fullScreen) override;
+
+ void repaint() override;
+
+ QSize nativeSize() const override;
+
+ Qt::AspectRatioMode aspectRatioMode() const override;
+ void setAspectRatioMode(Qt::AspectRatioMode mode) override;
+
+ int brightness() const override;
+ void setBrightness(int brightness) override;
+
+ int contrast() const override;
+ void setContrast(int contrast) override;
+
+ int hue() const override;
+ void setHue(int hue) override;
+
+ int saturation() const override;
+ void setSaturation(int saturation) override;
+
+ // AVF Camera implementation details
+ void setNativeSize(QSize size);
+ void setLayer(AVCaptureVideoPreviewLayer *capturePreviewLayer);
+
+private:
+ void updateAspectRatio();
+ void updateCaptureLayerBounds();
+
+ void retainNativeLayer();
+ void releaseNativeLayer();
+
+ void attachNativeLayer();
+ void detachNativeLayer();
+
+ WId m_winId{0};
+ QRect m_displayRect;
+ bool m_fullscreen{false};
+ Qt::AspectRatioMode m_aspectRatioMode{Qt::IgnoreAspectRatio};
+ QSize m_nativeSize;
+ AVCaptureVideoPreviewLayer *m_captureLayer{nullptr};
+ NativeView *m_nativeView{nullptr};
+};
+
+QT_END_NAMESPACE
+
+#endif // AVFCAMERAWINDOWCONTROL_H
diff --git a/src/plugins/avfoundation/camera/avfcamerawindowcontrol.mm b/src/plugins/avfoundation/camera/avfcamerawindowcontrol.mm
new file mode 100644
index 000000000..5154d0646
--- /dev/null
+++ b/src/plugins/avfoundation/camera/avfcamerawindowcontrol.mm
@@ -0,0 +1,262 @@
+/****************************************************************************
+**
+** Copyright (C) 2020 The Qt Company Ltd and/or its subsidiary(-ies).
+** Contact: https://www.qt.io/licensing/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and The Qt Company. For licensing terms
+** and conditions see https://www.qt.io/terms-conditions. For further
+** information use the contact form at https://www.qt.io/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 3 as published by the Free Software
+** Foundation and appearing in the file LICENSE.LGPL3 included in the
+** packaging of this file. Please review the following information to
+** ensure the GNU Lesser General Public License version 3 requirements
+** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU
+** General Public License version 2.0 or (at your option) the GNU General
+** Public license version 3 or any later version approved by the KDE Free
+** Qt Foundation. The licenses are as published by the Free Software
+** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
+** included in the packaging of this file. Please review the following
+** information to ensure the GNU General Public License requirements will
+** be met: https://www.gnu.org/licenses/gpl-2.0.html and
+** https://www.gnu.org/licenses/gpl-3.0.html.
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "avfcamerawindowcontrol.h"
+
+#import <AVFoundation/AVFoundation.h>
+#import <QuartzCore/CATransaction.h>
+
+#if QT_HAS_INCLUDE(<AppKit/AppKit.h>)
+#import <AppKit/AppKit.h>
+#endif
+
+#if QT_HAS_INCLUDE(<UIKit/UIKit.h>)
+#import <UIKit/UIKit.h>
+#endif
+
+QT_USE_NAMESPACE
+
+AVFCameraWindowControl::AVFCameraWindowControl(QObject *parent)
+ : QVideoWindowControl(parent)
+{
+ setObjectName(QStringLiteral("AVFCameraWindowControl"));
+}
+
+AVFCameraWindowControl::~AVFCameraWindowControl()
+{
+ releaseNativeLayer();
+}
+
+WId AVFCameraWindowControl::winId() const
+{
+ return m_winId;
+}
+
+void AVFCameraWindowControl::setWinId(WId id)
+{
+ if (m_winId == id)
+ return;
+
+ m_winId = id;
+
+ detachNativeLayer();
+ m_nativeView = (NativeView*)m_winId;
+ attachNativeLayer();
+}
+
+QRect AVFCameraWindowControl::displayRect() const
+{
+ return m_displayRect;
+}
+
+void AVFCameraWindowControl::setDisplayRect(const QRect &rect)
+{
+ if (m_displayRect != rect) {
+ m_displayRect = rect;
+ updateCaptureLayerBounds();
+ }
+}
+
+bool AVFCameraWindowControl::isFullScreen() const
+{
+ return m_fullscreen;
+}
+
+void AVFCameraWindowControl::setFullScreen(bool fullscreen)
+{
+ if (m_fullscreen != fullscreen) {
+ m_fullscreen = fullscreen;
+ Q_EMIT fullScreenChanged(fullscreen);
+ }
+}
+
+void AVFCameraWindowControl::repaint()
+{
+ if (m_captureLayer)
+ [m_captureLayer setNeedsDisplay];
+}
+
+QSize AVFCameraWindowControl::nativeSize() const
+{
+ return m_nativeSize;
+}
+
+void AVFCameraWindowControl::setNativeSize(QSize size)
+{
+ if (m_nativeSize != size) {
+ m_nativeSize = size;
+ Q_EMIT nativeSizeChanged();
+ }
+}
+
+Qt::AspectRatioMode AVFCameraWindowControl::aspectRatioMode() const
+{
+ return m_aspectRatioMode;
+}
+
+void AVFCameraWindowControl::setAspectRatioMode(Qt::AspectRatioMode mode)
+{
+ if (m_aspectRatioMode != mode) {
+ m_aspectRatioMode = mode;
+ updateAspectRatio();
+ }
+}
+
+int AVFCameraWindowControl::brightness() const
+{
+ return 0;
+}
+
+void AVFCameraWindowControl::setBrightness(int brightness)
+{
+ if (0 != brightness)
+ qWarning("AVFCameraWindowControl doesn't support changing Brightness");
+}
+
+int AVFCameraWindowControl::contrast() const
+{
+ return 0;
+}
+
+void AVFCameraWindowControl::setContrast(int contrast)
+{
+ if (0 != contrast)
+ qWarning("AVFCameraWindowControl doesn't support changing Contrast");
+}
+
+int AVFCameraWindowControl::hue() const
+{
+ return 0;
+}
+
+void AVFCameraWindowControl::setHue(int hue)
+{
+ if (0 != hue)
+ qWarning("AVFCameraWindowControl doesn't support changing Hue");
+}
+
+int AVFCameraWindowControl::saturation() const
+{
+ return 0;
+}
+
+void AVFCameraWindowControl::setSaturation(int saturation)
+{
+ if (0 != saturation)
+ qWarning("AVFCameraWindowControl doesn't support changing Saturation");
+}
+
+void AVFCameraWindowControl::setLayer(AVCaptureVideoPreviewLayer *capturePreviewLayer)
+{
+ if (m_captureLayer == capturePreviewLayer)
+ return;
+
+ releaseNativeLayer();
+
+ m_captureLayer = capturePreviewLayer;
+
+ if (m_captureLayer)
+ retainNativeLayer();
+}
+
+void AVFCameraWindowControl::updateAspectRatio()
+{
+ if (m_captureLayer) {
+ switch (m_aspectRatioMode) {
+ case Qt::IgnoreAspectRatio:
+ [m_captureLayer setVideoGravity:AVLayerVideoGravityResize];
+ break;
+ case Qt::KeepAspectRatio:
+ [m_captureLayer setVideoGravity:AVLayerVideoGravityResizeAspect];
+ break;
+ case Qt::KeepAspectRatioByExpanding:
+ [m_captureLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
+ break;
+ default:
+ break;
+ }
+ }
+}
+
+void AVFCameraWindowControl::updateCaptureLayerBounds()
+{
+ if (m_captureLayer && m_nativeView) {
+ [CATransaction begin];
+ [CATransaction setDisableActions: YES]; // disable animation/flicks
+ m_captureLayer.frame = m_displayRect.toCGRect();
+ [CATransaction commit];
+ }
+}
+
+void AVFCameraWindowControl::retainNativeLayer()
+{
+ [m_captureLayer retain];
+
+ updateAspectRatio();
+ attachNativeLayer();
+}
+
+void AVFCameraWindowControl::releaseNativeLayer()
+{
+ if (m_captureLayer) {
+ detachNativeLayer();
+ [m_captureLayer release];
+ m_captureLayer = nullptr;
+ }
+}
+
+void AVFCameraWindowControl::attachNativeLayer()
+{
+ if (m_captureLayer && m_nativeView) {
+#if defined(Q_OS_MACOS)
+ m_nativeView.wantsLayer = YES;
+#endif
+ CALayer *nativeLayer = m_nativeView.layer;
+ [nativeLayer addSublayer:m_captureLayer];
+ updateCaptureLayerBounds();
+ }
+}
+
+void AVFCameraWindowControl::detachNativeLayer()
+{
+ if (m_captureLayer && m_nativeView)
+ [m_captureLayer removeFromSuperlayer];
+}
+
+#include "moc_avfcamerawindowcontrol.cpp"
diff --git a/src/plugins/avfoundation/camera/avfcapturedestinationcontrol.h b/src/plugins/avfoundation/camera/avfcapturedestinationcontrol.h
new file mode 100644
index 000000000..04493437e
--- /dev/null
+++ b/src/plugins/avfoundation/camera/avfcapturedestinationcontrol.h
@@ -0,0 +1,63 @@
+/****************************************************************************
+**
+** Copyright (C) 2016 The Qt Company Ltd.
+** Contact: https://www.qt.io/licensing/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and The Qt Company. For licensing terms
+** and conditions see https://www.qt.io/terms-conditions. For further
+** information use the contact form at https://www.qt.io/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 3 as published by the Free Software
+** Foundation and appearing in the file LICENSE.LGPL3 included in the
+** packaging of this file. Please review the following information to
+** ensure the GNU Lesser General Public License version 3 requirements
+** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU
+** General Public License version 2.0 or (at your option) the GNU General
+** Public license version 3 or any later version approved by the KDE Free
+** Qt Foundation. The licenses are as published by the Free Software
+** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
+** included in the packaging of this file. Please review the following
+** information to ensure the GNU General Public License requirements will
+** be met: https://www.gnu.org/licenses/gpl-2.0.html and
+** https://www.gnu.org/licenses/gpl-3.0.html.
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#ifndef AVFCAPTUREDESTINATIONCONTROL_H
+#define AVFCAPTUREDESTINATIONCONTROL_H
+
+#include <qcameracapturedestinationcontrol.h>
+
+QT_BEGIN_NAMESPACE
+
+class AVFCaptureDestinationControl : public QCameraCaptureDestinationControl
+{
+public:
+ AVFCaptureDestinationControl() = default;
+ ~AVFCaptureDestinationControl() = default;
+
+ bool isCaptureDestinationSupported(QCameraImageCapture::CaptureDestinations destination) const override;
+ QCameraImageCapture::CaptureDestinations captureDestination() const override;
+ void setCaptureDestination(QCameraImageCapture::CaptureDestinations destination) override;
+
+private:
+ QCameraImageCapture::CaptureDestinations m_destination = QCameraImageCapture::CaptureToFile;
+};
+
+QT_END_NAMESPACE
+
+#endif // AVFCAPTUREDESTINATIONCONTROL_H
diff --git a/src/plugins/avfoundation/camera/avfcapturedestinationcontrol.mm b/src/plugins/avfoundation/camera/avfcapturedestinationcontrol.mm
new file mode 100644
index 000000000..d0700d69d
--- /dev/null
+++ b/src/plugins/avfoundation/camera/avfcapturedestinationcontrol.mm
@@ -0,0 +1,62 @@
+/****************************************************************************
+**
+** Copyright (C) 2016 The Qt Company Ltd.
+** Contact: https://www.qt.io/licensing/
+**
+** This file is part of the Qt Toolkit.
+**
+** $QT_BEGIN_LICENSE:LGPL$
+** Commercial License Usage
+** Licensees holding valid commercial Qt licenses may use this file in
+** accordance with the commercial license agreement provided with the
+** Software or, alternatively, in accordance with the terms contained in
+** a written agreement between you and The Qt Company. For licensing terms
+** and conditions see https://www.qt.io/terms-conditions. For further
+** information use the contact form at https://www.qt.io/contact-us.
+**
+** GNU Lesser General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU Lesser
+** General Public License version 3 as published by the Free Software
+** Foundation and appearing in the file LICENSE.LGPL3 included in the
+** packaging of this file. Please review the following information to
+** ensure the GNU Lesser General Public License version 3 requirements
+** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
+**
+** GNU General Public License Usage
+** Alternatively, this file may be used under the terms of the GNU
+** General Public License version 2.0 or (at your option) the GNU General
+** Public license version 3 or any later version approved by the KDE Free
+** Qt Foundation. The licenses are as published by the Free Software
+** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
+** included in the packaging of this file. Please review the following
+** information to ensure the GNU General Public License requirements will
+** be met: https://www.gnu.org/licenses/gpl-2.0.html and
+** https://www.gnu.org/licenses/gpl-3.0.html.
+**
+** $QT_END_LICENSE$
+**
+****************************************************************************/
+
+#include "avfcapturedestinationcontrol.h"
+
+QT_BEGIN_NAMESPACE
+
+bool AVFCaptureDestinationControl::isCaptureDestinationSupported(QCameraImageCapture::CaptureDestinations destination) const
+{
+ return destination & (QCameraImageCapture::CaptureToFile | QCameraImageCapture::CaptureToBuffer);
+}
+
+QCameraImageCapture::CaptureDestinations AVFCaptureDestinationControl::captureDestination() const
+{
+ return m_destination;
+}
+
+void AVFCaptureDestinationControl::setCaptureDestination(QCameraImageCapture::CaptureDestinations destination)
+{
+ if (m_destination != destination) {
+ m_destination = destination;
+ Q_EMIT captureDestinationChanged(m_destination);
+ }
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/avfoundation/camera/avfimagecapturecontrol.mm b/src/plugins/avfoundation/camera/avfimagecapturecontrol.mm
index 55a20b1bd..6fca55d29 100644
--- a/src/plugins/avfoundation/camera/avfimagecapturecontrol.mm
+++ b/src/plugins/avfoundation/camera/avfimagecapturecontrol.mm
@@ -42,6 +42,8 @@
#include "avfcameraservice.h"
#include "avfcamerautility.h"
#include "avfcameracontrol.h"
+#include "avfcapturedestinationcontrol.h"
+#include <private/qmemoryvideobuffer_p.h>
#include <QtCore/qurl.h>
#include <QtCore/qfile.h>
@@ -111,12 +113,16 @@ int AVFImageCaptureControl::capture(const QString &fileName)
return m_lastCaptureId;
}
- QString actualFileName = m_storageLocation.generateFileName(fileName,
- QCamera::CaptureStillImage,
- QLatin1String("img_"),
- QLatin1String("jpg"));
+ auto destination = m_service->captureDestinationControl()->captureDestination();
+ QString actualFileName;
+ if (destination & QCameraImageCapture::CaptureToFile) {
+ actualFileName = m_storageLocation.generateFileName(fileName,
+ QCamera::CaptureStillImage,
+ QLatin1String("img_"),
+ QLatin1String("jpg"));
- qDebugCamera() << "Capture image to" << actualFileName;
+ qDebugCamera() << "Capture image to" << actualFileName;
+ }
CaptureRequest request = { m_lastCaptureId, QSharedPointer<QSemaphore>::create()};
m_requestsMutex.lock();
@@ -152,11 +158,24 @@ int AVFImageCaptureControl::capture(const QString &fileName)
// so we cannot reliably check the camera's status. Instead, we wait
// with a timeout and treat a failure to acquire a semaphore as an error.
if (!m_service->videoOutput() || request.previewReady->tryAcquire(1, 1000)) {
- qDebugCamera() << "Image capture completed:" << actualFileName;
+ qDebugCamera() << "Image capture completed";
NSData *nsJpgData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
QByteArray jpgData = QByteArray::fromRawData((const char *)[nsJpgData bytes], [nsJpgData length]);
+ if (destination & QCameraImageCapture::CaptureToBuffer) {
+ QBuffer data(&jpgData);
+ QImageReader reader(&data, "JPEG");
+ QSize size = reader.size();
+ QVideoFrame frame(new QMemoryVideoBuffer(QByteArray(jpgData.constData(), jpgData.size()), -1), size, QVideoFrame::Format_Jpeg);
+ QMetaObject::invokeMethod(this, "imageAvailable", Qt::QueuedConnection,
+ Q_ARG(int, request.captureId),
+ Q_ARG(QVideoFrame, frame));
+ }
+
+ if (!(destination & QCameraImageCapture::CaptureToFile))
+ return;
+
QFile f(actualFileName);
if (f.open(QFile::WriteOnly)) {
if (f.write(jpgData) != -1) {
diff --git a/src/plugins/avfoundation/camera/camera.pro b/src/plugins/avfoundation/camera/camera.pro
index 4b960ed5a..76aa8af85 100644
--- a/src/plugins/avfoundation/camera/camera.pro
+++ b/src/plugins/avfoundation/camera/camera.pro
@@ -42,7 +42,9 @@ HEADERS += \
avfcameraflashcontrol.h \
avfvideoencodersettingscontrol.h \
avfmediacontainercontrol.h \
- avfaudioencodersettingscontrol.h
+ avfaudioencodersettingscontrol.h \
+ avfcamerawindowcontrol.h \
+ avfcapturedestinationcontrol.h
OBJECTIVE_SOURCES += \
avfcameraserviceplugin.mm \
@@ -65,7 +67,9 @@ OBJECTIVE_SOURCES += \
avfcameraflashcontrol.mm \
avfvideoencodersettingscontrol.mm \
avfmediacontainercontrol.mm \
- avfaudioencodersettingscontrol.mm
+ avfaudioencodersettingscontrol.mm \
+ avfcamerawindowcontrol.mm \
+ avfcapturedestinationcontrol.mm
osx {
diff --git a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
index 6a2dba6a0..a601bb5ac 100644
--- a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
+++ b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
@@ -110,6 +110,12 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext
self->m_session = session;
self->m_bufferIsLikelyToKeepUp = FALSE;
+
+ m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:nil];
+ [m_playerLayer retain];
+ m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
+ m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
+
return self;
}
@@ -162,6 +168,10 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVPlayerItemTimeJumpedNotification
object:m_playerItem];
+ for (AVPlayerItemOutput *output in m_playerItem.outputs) {
+ if ([output isKindOfClass:[AVPlayerItemVideoOutput class]])
+ [m_playerItem removeOutput:output];
+ }
m_playerItem = 0;
}
if (m_player) {
@@ -172,10 +182,6 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext
[m_player release];
m_player = 0;
}
- if (m_playerLayer) {
- [m_playerLayer release];
- m_playerLayer = 0;
- }
}
- (void) prepareToPlayAsset:(AVURLAsset *)asset
@@ -260,14 +266,8 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext
[m_player setMuted:m_session->isMuted()];
}
- //Create a new player layer if we don't have one already
- if (!m_playerLayer)
- {
- m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:m_player];
- [m_playerLayer retain];
- m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
- m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
- }
+ //Assign the output layer to the new player
+ m_playerLayer.player = m_player;
//Observe the AVPlayer "currentItem" property to find out when any
//AVPlayer replaceCurrentItemWithPlayerItem: replacement will/did
@@ -413,6 +413,7 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext
}
[m_mimeType release];
+ [m_playerLayer release];
[super dealloc];
}
@@ -536,16 +537,16 @@ QIODevice *AVFMediaPlayerSession::mediaStream() const
return m_mediaStream;
}
-static void setURL(void *observer, const QString &url, const QString &mimeType = QString())
+static void setURL(void *observer, const QByteArray &url, const QString &mimeType = QString())
{
- NSString *urlString = [NSString stringWithUTF8String:url.toUtf8().constData()];
+ NSString *urlString = [NSString stringWithUTF8String:url.constData()];
NSURL *nsurl = [NSURL URLWithString:urlString];
[static_cast<AVFMediaPlayerSessionObserver*>(observer) setURL:nsurl mimeType:[NSString stringWithUTF8String:mimeType.toLatin1().constData()]];
}
-static void setStreamURL(void *observer, const QString &url)
+static void setStreamURL(void *observer, const QByteArray &url)
{
- setURL(observer, QLatin1String("iodevice://") + url, QFileInfo(url).suffix());
+ setURL(observer, QByteArrayLiteral("iodevice://") + url, QFileInfo(url).suffix());
}
void AVFMediaPlayerSession::setMedia(const QMediaContent &content, QIODevice *stream)
@@ -588,11 +589,11 @@ void AVFMediaPlayerSession::setMedia(const QMediaContent &content, QIODevice *st
// If there is a data, try to load it,
// otherwise wait for readyRead.
if (m_mediaStream->size())
- setStreamURL(m_observer, m_resources.request().url().toString());
+ setStreamURL(m_observer, m_resources.request().url().toEncoded());
} else {
//Load AVURLAsset
//initialize asset using content's URL
- setURL(m_observer, m_resources.request().url().toString());
+ setURL(m_observer, m_resources.request().url().toEncoded());
}
m_state = QMediaPlayer::StoppedState;
@@ -940,9 +941,11 @@ void AVFMediaPlayerSession::processLoadStateChange(QMediaPlayer::State newState)
// Get the native size of the video, and reset the bounds of the player layer
AVPlayerLayer *playerLayer = [static_cast<AVFMediaPlayerSessionObserver*>(m_observer) playerLayer];
if (videoTrack && playerLayer) {
- playerLayer.bounds = CGRectMake(0.0f, 0.0f,
- videoTrack.naturalSize.width,
- videoTrack.naturalSize.height);
+ if (!playerLayer.bounds.size.width || !playerLayer.bounds.size.height) {
+ playerLayer.bounds = CGRectMake(0.0f, 0.0f,
+ videoTrack.naturalSize.width,
+ videoTrack.naturalSize.height);
+ }
if (m_videoOutput && newState != QMediaPlayer::StoppedState) {
m_videoOutput->setLayer(playerLayer);
@@ -1035,7 +1038,7 @@ void AVFMediaPlayerSession::processMediaLoadError()
void AVFMediaPlayerSession::streamReady()
{
- setStreamURL(m_observer, m_resources.request().url().toString());
+ setStreamURL(m_observer, m_resources.request().url().toEncoded());
}
void AVFMediaPlayerSession::streamDestroyed()
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h
index 99b6bb0b5..ac67090a5 100644
--- a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h
+++ b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h
@@ -43,18 +43,48 @@
#include <QtCore/QObject>
#include <QtGui/QImage>
#include <QtGui/QOpenGLContext>
+#include <QtGui/QOpenGLTextureBlitter>
#include <QtCore/QSize>
-@class CARenderer;
@class AVPlayerLayer;
+@class AVPlayerItemVideoOutput;
QT_BEGIN_NAMESPACE
-class QOpenGLFramebufferObject;
-class QWindow;
class QOpenGLContext;
+class QOpenGLFramebufferObject;
+class QOpenGLShaderProgram;
+class QOffscreenSurface;
class QAbstractVideoSurface;
+typedef struct __CVBuffer *CVBufferRef;
+typedef CVBufferRef CVImageBufferRef;
+typedef CVImageBufferRef CVPixelBufferRef;
+
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ typedef struct __CVOpenGLESTextureCache *CVOpenGLESTextureCacheRef;
+ typedef CVImageBufferRef CVOpenGLESTextureRef;
+ // helpers to avoid boring if def
+ typedef CVOpenGLESTextureCacheRef CVOGLTextureCacheRef;
+ typedef CVOpenGLESTextureRef CVOGLTextureRef;
+ #define CVOGLTextureGetTarget CVOpenGLESTextureGetTarget
+ #define CVOGLTextureGetName CVOpenGLESTextureGetName
+ #define CVOGLTextureCacheCreate CVOpenGLESTextureCacheCreate
+ #define CVOGLTextureCacheCreateTextureFromImage CVOpenGLESTextureCacheCreateTextureFromImage
+ #define CVOGLTextureCacheFlush CVOpenGLESTextureCacheFlush
+#else
+ typedef struct __CVOpenGLTextureCache *CVOpenGLTextureCacheRef;
+ typedef CVImageBufferRef CVOpenGLTextureRef;
+ // helpers to avoid boring if def
+ typedef CVOpenGLTextureCacheRef CVOGLTextureCacheRef;
+ typedef CVOpenGLTextureRef CVOGLTextureRef;
+ #define CVOGLTextureGetTarget CVOpenGLTextureGetTarget
+ #define CVOGLTextureGetName CVOpenGLTextureGetName
+ #define CVOGLTextureCacheCreate CVOpenGLTextureCacheCreate
+ #define CVOGLTextureCacheCreateTextureFromImage CVOpenGLTextureCacheCreateTextureFromImage
+ #define CVOGLTextureCacheFlush CVOpenGLTextureCacheFlush
+#endif
+
class AVFVideoFrameRenderer : public QObject
{
public:
@@ -62,22 +92,31 @@ public:
virtual ~AVFVideoFrameRenderer();
- GLuint renderLayerToTexture(AVPlayerLayer *layer);
- QImage renderLayerToImage(AVPlayerLayer *layer);
+ void setPlayerLayer(AVPlayerLayer *layer);
+
+ CVOGLTextureRef renderLayerToTexture(AVPlayerLayer *layer, QSize *size);
+#ifdef Q_OS_MACOS
+ GLuint renderLayerToFBO(AVPlayerLayer *layer, QSize *size);
+#endif
+ QImage renderLayerToImage(AVPlayerLayer *layer, QSize *size);
private:
- QOpenGLFramebufferObject* initRenderer(AVPlayerLayer *layer);
- void renderLayerToFBO(AVPlayerLayer *layer, QOpenGLFramebufferObject *fbo);
+ void initRenderer();
+ CVPixelBufferRef copyPixelBufferFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height);
+ CVOGLTextureRef createCacheTextureFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height);
- CARenderer *m_videoLayerRenderer;
- QAbstractVideoSurface *m_surface;
- QOpenGLFramebufferObject *m_fbo[2];
- QWindow *m_offscreenSurface;
QOpenGLContext *m_glContext;
- QSize m_targetSize;
-
- uint m_currentBuffer;
+ QOffscreenSurface *m_offscreenSurface;
+ QAbstractVideoSurface *m_surface;
+ CVOGLTextureCacheRef m_textureCache;
+ AVPlayerItemVideoOutput* m_videoOutput;
bool m_isContextShared;
+
+#ifdef Q_OS_MACOS
+ QOpenGLFramebufferObject *m_fbo[2];
+ uint m_currentFBO;
+ QOpenGLTextureBlitter m_blitter;
+#endif
};
QT_END_NAMESPACE
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm
index 51f961729..a22ee2b82 100644
--- a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm
+++ b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm
@@ -41,12 +41,20 @@
#include <QtMultimedia/qabstractvideosurface.h>
#include <QtGui/QOpenGLFramebufferObject>
-#include <QtGui/QWindow>
+#include <QtGui/QOpenGLShaderProgram>
+#include <QtGui/QOffscreenSurface>
+
+#include <QtCore/private/qcore_mac_p.h>
#ifdef QT_DEBUG_AVF
#include <QtCore/qdebug.h>
#endif
+#ifdef Q_OS_MACOS
+#import <AppKit/AppKit.h>
+#include <CoreVideo/CVOpenGLTextureCache.h>
+#endif
+
#import <CoreVideo/CVBase.h>
#import <AVFoundation/AVFoundation.h>
@@ -54,15 +62,23 @@ QT_USE_NAMESPACE
AVFVideoFrameRenderer::AVFVideoFrameRenderer(QAbstractVideoSurface *surface, QObject *parent)
: QObject(parent)
- , m_videoLayerRenderer(nullptr)
- , m_surface(surface)
- , m_offscreenSurface(nullptr)
, m_glContext(nullptr)
- , m_currentBuffer(1)
+ , m_offscreenSurface(nullptr)
+ , m_surface(surface)
+ , m_textureCache(nullptr)
+ , m_videoOutput(nullptr)
, m_isContextShared(true)
{
+ m_videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:@{
+ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
+ (NSString *)kCVPixelBufferOpenGLCompatibilityKey: @YES
+ }];
+ [m_videoOutput setDelegate:nil queue:nil];
+
+#ifdef Q_OS_MACOS
m_fbo[0] = nullptr;
m_fbo[1] = nullptr;
+#endif
}
AVFVideoFrameRenderer::~AVFVideoFrameRenderer()
@@ -71,81 +87,200 @@ AVFVideoFrameRenderer::~AVFVideoFrameRenderer()
qDebug() << Q_FUNC_INFO;
#endif
- [m_videoLayerRenderer release];
- delete m_fbo[0];
- delete m_fbo[1];
+ [m_videoOutput release];
+ if (m_textureCache)
+ CFRelease(m_textureCache);
delete m_offscreenSurface;
delete m_glContext;
+
+#ifdef Q_OS_MACOS
+ delete m_fbo[0];
+ delete m_fbo[1];
+#endif
}
-GLuint AVFVideoFrameRenderer::renderLayerToTexture(AVPlayerLayer *layer)
+#ifdef Q_OS_MACOS
+GLuint AVFVideoFrameRenderer::renderLayerToFBO(AVPlayerLayer *layer, QSize *size)
{
- //Is layer valid
- if (!layer)
+ QCFType<CVOGLTextureRef> texture = renderLayerToTexture(layer, size);
+ if (!texture)
return 0;
- //If the glContext isn't shared, it doesn't make sense to return a texture for us
- if (m_offscreenSurface && !m_isContextShared)
- return 0;
+ Q_ASSERT(size);
- QOpenGLFramebufferObject *fbo = initRenderer(layer);
+ // Do we have FBO's already?
+ if ((!m_fbo[0] && !m_fbo[0]) || (m_fbo[0]->size() != *size)) {
+ delete m_fbo[0];
+ delete m_fbo[1];
+ m_fbo[0] = new QOpenGLFramebufferObject(*size);
+ m_fbo[1] = new QOpenGLFramebufferObject(*size);
+ }
+
+ // Switch buffer target
+ m_currentFBO = !m_currentFBO;
+ QOpenGLFramebufferObject *fbo = m_fbo[m_currentFBO];
- if (!fbo)
+ if (!fbo || !fbo->bind())
return 0;
- renderLayerToFBO(layer, fbo);
- if (m_glContext)
- m_glContext->doneCurrent();
+ glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+ glClear(GL_COLOR_BUFFER_BIT);
+
+ glViewport(0, 0, size->width(), size->height());
+
+ if (!m_blitter.isCreated())
+ m_blitter.create();
+
+ m_blitter.bind(GL_TEXTURE_RECTANGLE);
+ m_blitter.blit(CVOpenGLTextureGetName(texture), QMatrix4x4(), QMatrix3x3());
+ m_blitter.release();
+ glFinish();
+
+ fbo->release();
return fbo->texture();
}
+#endif
+
+CVOGLTextureRef AVFVideoFrameRenderer::renderLayerToTexture(AVPlayerLayer *layer, QSize *size)
+{
+ initRenderer();
+
+ // If the glContext isn't shared, it doesn't make sense to return a texture for us
+ if (!m_isContextShared)
+ return nullptr;
+
+ size_t width = 0, height = 0;
+ auto texture = createCacheTextureFromLayer(layer, width, height);
+ if (size)
+ *size = QSize(width, height);
+ return texture;
+}
-QImage AVFVideoFrameRenderer::renderLayerToImage(AVPlayerLayer *layer)
+CVPixelBufferRef AVFVideoFrameRenderer::copyPixelBufferFromLayer(AVPlayerLayer *layer,
+ size_t& width, size_t& height)
{
//Is layer valid
if (!layer) {
- return QImage();
+#ifdef QT_DEBUG_AVF
+ qWarning("copyPixelBufferFromLayer: invalid layer");
+#endif
+ return nullptr;
}
- QOpenGLFramebufferObject *fbo = initRenderer(layer);
+ AVPlayerItem *item = layer.player.currentItem;
+ if (![item.outputs containsObject:m_videoOutput])
+ [item addOutput:m_videoOutput];
- if (!fbo)
- return QImage();
+ CFTimeInterval currentCAFrameTime = CACurrentMediaTime();
+ CMTime currentCMFrameTime = [m_videoOutput itemTimeForHostTime:currentCAFrameTime];
+
+ // Happens when buffering / loading
+ if (CMTimeCompare(currentCMFrameTime, kCMTimeZero) < 0)
+ return nullptr;
+
+ if (![m_videoOutput hasNewPixelBufferForItemTime:currentCMFrameTime])
+ return nullptr;
+
+ CVPixelBufferRef pixelBuffer = [m_videoOutput copyPixelBufferForItemTime:currentCMFrameTime
+ itemTimeForDisplay:nil];
+ if (!pixelBuffer) {
+#ifdef QT_DEBUG_AVF
+ qWarning("copyPixelBufferForItemTime returned nil");
+ CMTimeShow(currentCMFrameTime);
+#endif
+ return nullptr;
+ }
+
+ width = CVPixelBufferGetWidth(pixelBuffer);
+ height = CVPixelBufferGetHeight(pixelBuffer);
+ return pixelBuffer;
+}
- renderLayerToFBO(layer, fbo);
- QImage fboImage = fbo->toImage();
- if (m_glContext)
- m_glContext->doneCurrent();
+CVOGLTextureRef AVFVideoFrameRenderer::createCacheTextureFromLayer(AVPlayerLayer *layer,
+ size_t& width, size_t& height)
+{
+ CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height);
+
+ if (!pixelBuffer)
+ return nullptr;
+
+ CVOGLTextureCacheFlush(m_textureCache, 0);
+
+ CVOGLTextureRef texture = nullptr;
+#ifdef Q_OS_MACOS
+ CVReturn err = CVOpenGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
+ m_textureCache,
+ pixelBuffer,
+ nil,
+ &texture);
+#else
+ CVReturn err = CVOGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_textureCache, pixelBuffer, nullptr,
+ GL_TEXTURE_2D, GL_RGBA,
+ (GLsizei) width, (GLsizei) height,
+ GL_BGRA, GL_UNSIGNED_BYTE, 0,
+ &texture);
+#endif
+
+ if (!texture || err) {
+ qWarning() << "CVOGLTextureCacheCreateTextureFromImage failed error:" << err << m_textureCache;
+ }
+
+ CVPixelBufferRelease(pixelBuffer);
- return fboImage;
+ return texture;
}
-QOpenGLFramebufferObject *AVFVideoFrameRenderer::initRenderer(AVPlayerLayer *layer)
+QImage AVFVideoFrameRenderer::renderLayerToImage(AVPlayerLayer *layer, QSize *size)
{
+ size_t width = 0;
+ size_t height = 0;
+ CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height);
+ if (size)
+ *size = QSize(width, height);
+
+ if (!pixelBuffer)
+ return QImage();
- //Get size from AVPlayerLayer
- m_targetSize = QSize(layer.bounds.size.width, layer.bounds.size.height);
+ OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
+ if (pixelFormat != kCVPixelFormatType_32BGRA) {
+#ifdef QT_DEBUG_AVF
+ qWarning("CVPixelBuffer format is not BGRA32 (got: %d)", static_cast<quint32>(pixelFormat));
+#endif
+ return QImage();
+ }
- QOpenGLContext *shareContext = !m_glContext && m_surface
- ? qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>())
- : nullptr;
+ CVPixelBufferLockBaseAddress(pixelBuffer, 0);
+ char *data = (char *)CVPixelBufferGetBaseAddress(pixelBuffer);
+ size_t stride = CVPixelBufferGetBytesPerRow(pixelBuffer);
+
+ // format here is not relevant, only using for storage
+ QImage img = QImage(width, height, QImage::Format_ARGB32);
+ for (size_t j = 0; j < height; j++) {
+ memcpy(img.scanLine(j), data, width * 4);
+ data += stride;
+ }
+
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
+ CVPixelBufferRelease(pixelBuffer);
+ return img;
+}
+
+void AVFVideoFrameRenderer::initRenderer()
+{
+ // even for using a texture directly, we need to be able to make a context current,
+ // so we need an offscreen, and we shouldn't assume we can make the surface context
+ // current on that offscreen, so use our own (sharing with it). Slightly
+ // excessive but no performance penalty and makes the QImage path easier to maintain
//Make sure we have an OpenGL context to make current
- if ((shareContext && shareContext != QOpenGLContext::currentContext())
- || (!QOpenGLContext::currentContext() && !m_glContext)) {
-
- //Create Hidden QWindow surface to create context in this thread
- delete m_offscreenSurface;
- m_offscreenSurface = new QWindow();
- m_offscreenSurface->setSurfaceType(QWindow::OpenGLSurface);
- //Needs geometry to be a valid surface, but size is not important
- m_offscreenSurface->setGeometry(0, 0, 1, 1);
- m_offscreenSurface->create();
+ if (!m_glContext) {
+ //Create OpenGL context and set share context from surface
+ QOpenGLContext *shareContext = nullptr;
+ if (m_surface)
+ shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
- delete m_glContext;
m_glContext = new QOpenGLContext();
- m_glContext->setFormat(m_offscreenSurface->requestedFormat());
-
if (shareContext) {
m_glContext->setShareContext(shareContext);
m_isContextShared = true;
@@ -156,83 +291,40 @@ QOpenGLFramebufferObject *AVFVideoFrameRenderer::initRenderer(AVPlayerLayer *lay
m_isContextShared = false;
}
if (!m_glContext->create()) {
+#ifdef QT_DEBUG_AVF
qWarning("failed to create QOpenGLContext");
- return nullptr;
- }
-
- // CARenderer must be re-created with different current context, so release it now.
- // See lines below where m_videoLayerRenderer is constructed.
- if (m_videoLayerRenderer) {
- [m_videoLayerRenderer release];
- m_videoLayerRenderer = nullptr;
+#endif
+ return;
}
}
- //Need current context
- if (m_glContext)
- m_glContext->makeCurrent(m_offscreenSurface);
-
- //Create the CARenderer if needed
- if (!m_videoLayerRenderer) {
- m_videoLayerRenderer = [CARenderer rendererWithCGLContext: CGLGetCurrentContext() options: nil];
- [m_videoLayerRenderer retain];
- }
-
- //Set/Change render source if needed
- if (m_videoLayerRenderer.layer != layer) {
- m_videoLayerRenderer.layer = layer;
- m_videoLayerRenderer.bounds = layer.bounds;
- }
-
- //Do we have FBO's already?
- if ((!m_fbo[0] && !m_fbo[0]) || (m_fbo[0]->size() != m_targetSize)) {
- delete m_fbo[0];
- delete m_fbo[1];
- m_fbo[0] = new QOpenGLFramebufferObject(m_targetSize);
- m_fbo[1] = new QOpenGLFramebufferObject(m_targetSize);
+ if (!m_offscreenSurface) {
+ m_offscreenSurface = new QOffscreenSurface();
+ m_offscreenSurface->setFormat(m_glContext->format());
+ m_offscreenSurface->create();
}
- //Switch buffer target
- m_currentBuffer = !m_currentBuffer;
- return m_fbo[m_currentBuffer];
-}
-
-void AVFVideoFrameRenderer::renderLayerToFBO(AVPlayerLayer *layer, QOpenGLFramebufferObject *fbo)
-{
- //Start Rendering
- //NOTE: This rendering method will NOT work on iOS as there is no CARenderer in iOS
- if (!fbo->bind()) {
- qWarning("AVFVideoRender FBO failed to bind");
- return;
+ // Need current context
+ m_glContext->makeCurrent(m_offscreenSurface);
+
+ if (!m_textureCache) {
+#ifdef Q_OS_MACOS
+ auto *currentContext = NSOpenGLContext.currentContext;
+ // Create an OpenGL CoreVideo texture cache from the pixel buffer.
+ auto err = CVOpenGLTextureCacheCreate(
+ kCFAllocatorDefault,
+ nullptr,
+ currentContext.CGLContextObj,
+ currentContext.pixelFormat.CGLPixelFormatObj,
+ nil,
+ &m_textureCache);
+#else
+ CVReturn err = CVOGLTextureCacheCreate(kCFAllocatorDefault, nullptr,
+ [EAGLContext currentContext],
+ nullptr, &m_textureCache);
+#endif
+ if (err)
+ qWarning("Error at CVOGLTextureCacheCreate %d", err);
}
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glViewport(0, 0, m_targetSize.width(), m_targetSize.height());
-
- glMatrixMode(GL_PROJECTION);
- glPushMatrix();
- glLoadIdentity();
-
- //Render to FBO with inverted Y
- glOrtho(0.0, m_targetSize.width(), 0.0, m_targetSize.height(), 0.0, 1.0);
-
- glMatrixMode(GL_MODELVIEW);
- glPushMatrix();
- glLoadIdentity();
-
- [m_videoLayerRenderer beginFrameAtTime:CACurrentMediaTime() timeStamp:NULL];
- [m_videoLayerRenderer addUpdateRect:layer.bounds];
- [m_videoLayerRenderer render];
- [m_videoLayerRenderer endFrame];
-
- glMatrixMode(GL_MODELVIEW);
- glPopMatrix();
- glMatrixMode(GL_PROJECTION);
- glPopMatrix();
-
- glFinish(); //Rendering needs to be done before passing texture to video frame
-
- fbo->release();
}
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h
deleted file mode 100644
index d9f6baa7e..000000000
--- a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h
+++ /dev/null
@@ -1,113 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2016 The Qt Company Ltd.
-** Contact: https://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see https://www.qt.io/terms-conditions. For further
-** information use the contact form at https://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 3 as published by the Free Software
-** Foundation and appearing in the file LICENSE.LGPL3 included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU Lesser General Public License version 3 requirements
-** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
-**
-** GNU General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU
-** General Public License version 2.0 or (at your option) the GNU General
-** Public license version 3 or any later version approved by the KDE Free
-** Qt Foundation. The licenses are as published by the Free Software
-** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
-** included in the packaging of this file. Please review the following
-** information to ensure the GNU General Public License requirements will
-** be met: https://www.gnu.org/licenses/gpl-2.0.html and
-** https://www.gnu.org/licenses/gpl-3.0.html.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#ifndef AVFVIDEOFRAMERENDERER_H
-#define AVFVIDEOFRAMERENDERER_H
-
-#include <QtCore/QObject>
-#include <QtGui/QImage>
-#include <QtGui/QOpenGLContext>
-#include <QtCore/QSize>
-
-@class AVPlayerLayer;
-@class AVPlayerItemVideoOutput;
-
-QT_BEGIN_NAMESPACE
-
-class QOpenGLContext;
-class QOpenGLFramebufferObject;
-class QOpenGLShaderProgram;
-class QOffscreenSurface;
-class QAbstractVideoSurface;
-
-typedef struct __CVBuffer *CVBufferRef;
-typedef CVBufferRef CVImageBufferRef;
-typedef CVImageBufferRef CVPixelBufferRef;
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
-typedef struct __CVOpenGLESTextureCache *CVOpenGLESTextureCacheRef;
-typedef CVImageBufferRef CVOpenGLESTextureRef;
-// helpers to avoid boring if def
-typedef CVOpenGLESTextureCacheRef CVOGLTextureCacheRef;
-typedef CVOpenGLESTextureRef CVOGLTextureRef;
-#define CVOGLTextureGetTarget CVOpenGLESTextureGetTarget
-#define CVOGLTextureGetName CVOpenGLESTextureGetName
-#define CVOGLTextureCacheCreate CVOpenGLESTextureCacheCreate
-#define CVOGLTextureCacheCreateTextureFromImage CVOpenGLESTextureCacheCreateTextureFromImage
-#define CVOGLTextureCacheFlush CVOpenGLESTextureCacheFlush
-#else
-typedef struct __CVOpenGLTextureCache *CVOpenGLTextureCacheRef;
-typedef CVImageBufferRef CVOpenGLTextureRef;
-// helpers to avoid boring if def
-typedef CVOpenGLTextureCacheRef CVOGLTextureCacheRef;
-typedef CVOpenGLTextureRef CVOGLTextureRef;
-#define CVOGLTextureGetTarget CVOpenGLTextureGetTarget
-#define CVOGLTextureGetName CVOpenGLTextureGetName
-#define CVOGLTextureCacheCreate CVOpenGLTextureCacheCreate
-#define CVOGLTextureCacheCreateTextureFromImage CVOpenGLTextureCacheCreateTextureFromImage
-#define CVOGLTextureCacheFlush CVOpenGLTextureCacheFlush
-#endif
-
-class AVFVideoFrameRenderer : public QObject
-{
-public:
- AVFVideoFrameRenderer(QAbstractVideoSurface *surface, QObject *parent = nullptr);
-
- virtual ~AVFVideoFrameRenderer();
-
- void setPlayerLayer(AVPlayerLayer *layer);
-
- CVOGLTextureRef renderLayerToTexture(AVPlayerLayer *layer);
- QImage renderLayerToImage(AVPlayerLayer *layer);
-
-private:
- void initRenderer();
- CVPixelBufferRef copyPixelBufferFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height);
- CVOGLTextureRef createCacheTextureFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height);
-
- QOpenGLContext *m_glContext;
- QOffscreenSurface *m_offscreenSurface;
- QAbstractVideoSurface *m_surface;
- CVOGLTextureCacheRef m_textureCache;
- AVPlayerItemVideoOutput* m_videoOutput;
- bool m_isContextShared;
-};
-
-QT_END_NAMESPACE
-
-#endif // AVFVIDEOFRAMERENDERER_H
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.mm b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.mm
deleted file mode 100644
index 70e402e6c..000000000
--- a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.mm
+++ /dev/null
@@ -1,261 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
-** Contact: https://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see https://www.qt.io/terms-conditions. For further
-** information use the contact form at https://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 3 as published by the Free Software
-** Foundation and appearing in the file LICENSE.LGPL3 included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU Lesser General Public License version 3 requirements
-** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
-**
-** GNU General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU
-** General Public License version 2.0 or (at your option) the GNU General
-** Public license version 3 or any later version approved by the KDE Free
-** Qt Foundation. The licenses are as published by the Free Software
-** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
-** included in the packaging of this file. Please review the following
-** information to ensure the GNU General Public License requirements will
-** be met: https://www.gnu.org/licenses/gpl-2.0.html and
-** https://www.gnu.org/licenses/gpl-3.0.html.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#include "avfvideoframerenderer_ios.h"
-
-#include <QtMultimedia/qabstractvideosurface.h>
-#include <QtGui/QOpenGLFramebufferObject>
-#include <QtGui/QOpenGLShaderProgram>
-#include <QtGui/QOffscreenSurface>
-
-#ifdef QT_DEBUG_AVF
-#include <QtCore/qdebug.h>
-#endif
-
-#import <CoreVideo/CVBase.h>
-#import <AVFoundation/AVFoundation.h>
-QT_USE_NAMESPACE
-
-AVFVideoFrameRenderer::AVFVideoFrameRenderer(QAbstractVideoSurface *surface, QObject *parent)
- : QObject(parent)
- , m_glContext(nullptr)
- , m_offscreenSurface(nullptr)
- , m_surface(surface)
- , m_textureCache(nullptr)
- , m_videoOutput(nullptr)
- , m_isContextShared(true)
-{
-}
-
-AVFVideoFrameRenderer::~AVFVideoFrameRenderer()
-{
-#ifdef QT_DEBUG_AVF
- qDebug() << Q_FUNC_INFO;
-#endif
-
- [m_videoOutput release]; // sending to nil is fine
- if (m_textureCache)
- CFRelease(m_textureCache);
- delete m_offscreenSurface;
- delete m_glContext;
-}
-
-void AVFVideoFrameRenderer::setPlayerLayer(AVPlayerLayer *layer)
-{
- Q_UNUSED(layer)
- if (m_videoOutput) {
- [m_videoOutput release];
- m_videoOutput = nullptr;
- // will be re-created in first call to copyPixelBufferFromLayer
- }
-}
-
-CVOGLTextureRef AVFVideoFrameRenderer::renderLayerToTexture(AVPlayerLayer *layer)
-{
- initRenderer();
-
- // If the glContext isn't shared, it doesn't make sense to return a texture for us
- if (!m_isContextShared)
- return nullptr;
-
- size_t dummyWidth = 0, dummyHeight = 0;
- return createCacheTextureFromLayer(layer, dummyWidth, dummyHeight);
-}
-
-static NSString* const AVF_PIXEL_FORMAT_KEY = (NSString*)kCVPixelBufferPixelFormatTypeKey;
-static NSNumber* const AVF_PIXEL_FORMAT_VALUE = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
-static NSDictionary* const AVF_OUTPUT_SETTINGS = [NSDictionary dictionaryWithObject:AVF_PIXEL_FORMAT_VALUE forKey:AVF_PIXEL_FORMAT_KEY];
-
-
-CVPixelBufferRef AVFVideoFrameRenderer::copyPixelBufferFromLayer(AVPlayerLayer *layer,
- size_t& width, size_t& height)
-{
- //Is layer valid
- if (!layer) {
-#ifdef QT_DEBUG_AVF
- qWarning("copyPixelBufferFromLayer: invalid layer");
-#endif
- return nullptr;
- }
-
- if (!m_videoOutput) {
- m_videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:AVF_OUTPUT_SETTINGS];
- [m_videoOutput setDelegate:nil queue:nil];
- AVPlayerItem * item = [[layer player] currentItem];
- [item addOutput:m_videoOutput];
- }
-
- CFTimeInterval currentCAFrameTime = CACurrentMediaTime();
- CMTime currentCMFrameTime = [m_videoOutput itemTimeForHostTime:currentCAFrameTime];
- // happens when buffering / loading
- if (CMTimeCompare(currentCMFrameTime, kCMTimeZero) < 0) {
- return nullptr;
- }
-
- CVPixelBufferRef pixelBuffer = [m_videoOutput copyPixelBufferForItemTime:currentCMFrameTime
- itemTimeForDisplay:nil];
- if (!pixelBuffer) {
-#ifdef QT_DEBUG_AVF
- qWarning("copyPixelBufferForItemTime returned nil");
- CMTimeShow(currentCMFrameTime);
-#endif
- return nullptr;
- }
-
- width = CVPixelBufferGetWidth(pixelBuffer);
- height = CVPixelBufferGetHeight(pixelBuffer);
- return pixelBuffer;
-}
-
-CVOGLTextureRef AVFVideoFrameRenderer::createCacheTextureFromLayer(AVPlayerLayer *layer,
- size_t& width, size_t& height)
-{
- CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height);
-
- if (!pixelBuffer)
- return nullptr;
-
- CVOGLTextureCacheFlush(m_textureCache, 0);
-
- CVOGLTextureRef texture = nullptr;
- CVReturn err = CVOGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_textureCache, pixelBuffer, nullptr,
- GL_TEXTURE_2D, GL_RGBA,
- (GLsizei) width, (GLsizei) height,
- GL_BGRA, GL_UNSIGNED_BYTE, 0,
- &texture);
-
- if (!texture || err) {
-#ifdef QT_DEBUG_AVF
- qWarning("CVOGLTextureCacheCreateTextureFromImage failed (error: %d)", err);
-#endif
- }
-
- CVPixelBufferRelease(pixelBuffer);
-
- return texture;
-}
-
-QImage AVFVideoFrameRenderer::renderLayerToImage(AVPlayerLayer *layer)
-{
- size_t width = 0;
- size_t height = 0;
- CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height);
-
- if (!pixelBuffer)
- return QImage();
-
- OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
- if (pixelFormat != kCVPixelFormatType_32BGRA) {
-#ifdef QT_DEBUG_AVF
- qWarning("CVPixelBuffer format is not BGRA32 (got: %d)", static_cast<quint32>(pixelFormat));
-#endif
- return QImage();
- }
-
- CVPixelBufferLockBaseAddress(pixelBuffer, 0);
- char *data = (char *)CVPixelBufferGetBaseAddress(pixelBuffer);
- size_t stride = CVPixelBufferGetBytesPerRow(pixelBuffer);
-
- // format here is not relevant, only using for storage
- QImage img = QImage(width, height, QImage::Format_ARGB32);
- for (size_t j = 0; j < height; j++) {
- memcpy(img.scanLine(j), data, width * 4);
- data += stride;
- }
-
- CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
- CVPixelBufferRelease(pixelBuffer);
- return img;
-}
-
-void AVFVideoFrameRenderer::initRenderer()
-{
- // even for using a texture directly, we need to be able to make a context current,
- // so we need an offscreen, and we shouldn't assume we can make the surface context
- // current on that offscreen, so use our own (sharing with it). Slightly
- // excessive but no performance penalty and makes the QImage path easier to maintain
-
- //Make sure we have an OpenGL context to make current
- if (!m_glContext) {
- //Create OpenGL context and set share context from surface
- QOpenGLContext *shareContext = nullptr;
- if (m_surface) {
- shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
- }
-
- m_glContext = new QOpenGLContext();
- if (shareContext) {
- m_glContext->setShareContext(shareContext);
- m_isContextShared = true;
- } else {
-#ifdef QT_DEBUG_AVF
- qWarning("failed to get Render Thread context");
-#endif
- m_isContextShared = false;
- }
- if (!m_glContext->create()) {
-#ifdef QT_DEBUG_AVF
- qWarning("failed to create QOpenGLContext");
-#endif
- return;
- }
- }
-
- if (!m_offscreenSurface) {
- m_offscreenSurface = new QOffscreenSurface();
- m_offscreenSurface->setFormat(m_glContext->format());
- m_offscreenSurface->create();
- }
-
- //Need current context
- m_glContext->makeCurrent(m_offscreenSurface);
-
- if (!m_textureCache) {
- // Create a new open gl texture cache
- CVReturn err = CVOGLTextureCacheCreate(kCFAllocatorDefault, nullptr,
- [EAGLContext currentContext],
- nullptr, &m_textureCache);
- if (err) {
- #ifdef QT_DEBUG_AVF
- qWarning("Error at CVOGLTextureCacheCreate %d", err);
- #endif
- }
- }
-
-}
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h b/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h
index 85dc19d31..a88573eaa 100644
--- a/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h
+++ b/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h
@@ -41,6 +41,8 @@
#define AVFVIDEORENDERERCONTROL_H
#include <QtMultimedia/QVideoRendererControl>
+#include <QtMultimedia/qabstractvideobuffer.h>
+
#include <QtCore/QMutex>
#include <QtCore/QSize>
@@ -82,8 +84,7 @@ private:
AVFVideoFrameRenderer *m_frameRenderer;
AVFDisplayLink *m_displayLink;
- QSize m_nativeSize;
- bool m_enableOpenGL;
+ QAbstractVideoBuffer::HandleType m_surfaceType = QAbstractVideoBuffer::NoHandle;
};
QT_END_NAMESPACE
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm b/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm
index 63bdee4f5..f299d5f86 100644
--- a/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm
+++ b/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm
@@ -40,11 +40,7 @@
#include "avfvideorenderercontrol.h"
#include "avfdisplaylink.h"
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
-#include "avfvideoframerenderer_ios.h"
-#else
#include "avfvideoframerenderer.h"
-#endif
#include <QtMultimedia/qabstractvideobuffer.h>
#include <QtMultimedia/qabstractvideosurface.h>
@@ -58,69 +54,52 @@
QT_USE_NAMESPACE
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
-class TextureCacheVideoBuffer : public QAbstractVideoBuffer
+class TextureVideoBuffer : public QAbstractVideoBuffer
{
public:
- TextureCacheVideoBuffer(CVOGLTextureRef texture)
- : QAbstractVideoBuffer(GLTextureHandle)
+ TextureVideoBuffer(GLuint texture, QAbstractVideoBuffer::HandleType type)
+ : QAbstractVideoBuffer(type)
, m_texture(texture)
{}
- virtual ~TextureCacheVideoBuffer()
- {
- // absolutely critical that we drop this
- // reference of textures will stay in the cache
- CFRelease(m_texture);
- }
-
MapMode mapMode() const { return NotMapped; }
uchar *map(MapMode, int*, int*) { return nullptr; }
void unmap() {}
QVariant handle() const
{
- GLuint texId = CVOGLTextureGetName(m_texture);
- return QVariant::fromValue<unsigned int>(texId);
+ return QVariant::fromValue<unsigned int>(m_texture);
}
private:
- CVOGLTextureRef m_texture;
+ GLuint m_texture;
};
-#else
-class TextureVideoBuffer : public QAbstractVideoBuffer
+
+class CoreVideoTextureVideoBuffer : public TextureVideoBuffer
{
public:
- TextureVideoBuffer(GLuint tex)
- : QAbstractVideoBuffer(GLTextureHandle)
- , m_texture(tex)
+ CoreVideoTextureVideoBuffer(CVOGLTextureRef texture, QAbstractVideoBuffer::HandleType type)
+ : TextureVideoBuffer(CVOGLTextureGetName(texture), type)
+ , m_coreVideoTexture(texture)
{}
- virtual ~TextureVideoBuffer()
+ virtual ~CoreVideoTextureVideoBuffer()
{
- }
-
- MapMode mapMode() const { return NotMapped; }
- uchar *map(MapMode, int*, int*) { return 0; }
- void unmap() {}
-
- QVariant handle() const
- {
- return QVariant::fromValue<unsigned int>(m_texture);
+ // absolutely critical that we drop this
+ // reference of textures will stay in the cache
+ CFRelease(m_coreVideoTexture);
}
private:
- GLuint m_texture;
+ CVOGLTextureRef m_coreVideoTexture;
};
-#endif
+
AVFVideoRendererControl::AVFVideoRendererControl(QObject *parent)
: QVideoRendererControl(parent)
, m_surface(nullptr)
, m_playerLayer(nullptr)
, m_frameRenderer(nullptr)
- , m_enableOpenGL(false)
-
{
m_displayLink = new AVFDisplayLink(this);
connect(m_displayLink, SIGNAL(tick(CVTimeStamp)), SLOT(updateVideoFrame(CVTimeStamp)));
@@ -170,18 +149,26 @@ void AVFVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
//Surface changed, so we need a new frame renderer
m_frameRenderer = new AVFVideoFrameRenderer(m_surface, this);
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
- if (m_playerLayer) {
- m_frameRenderer->setPlayerLayer(static_cast<AVPlayerLayer*>(m_playerLayer));
- }
-#endif
- //Check for needed formats to render as OpenGL Texture
- auto handleGlEnabled = [this] {
- m_enableOpenGL = m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).contains(QVideoFrame::Format_BGR32);
+ auto updateSurfaceType = [this] {
+ auto preferredOpenGLSurfaceTypes = {
+#ifdef Q_OS_MACOS
+ QAbstractVideoBuffer::GLTextureRectangleHandle, // GL_TEXTURE_RECTANGLE
+#endif
+ QAbstractVideoBuffer::GLTextureHandle // GL_TEXTURE_2D
+ };
+
+ for (auto surfaceType : preferredOpenGLSurfaceTypes) {
+ auto supportedFormats = m_surface->supportedPixelFormats(surfaceType);
+ if (supportedFormats.contains(QVideoFrame::Format_BGR32)) {
+ m_surfaceType = surfaceType;
+ return;
+ }
+ m_surfaceType = QAbstractVideoBuffer::NoHandle; // QImage
+ }
};
- handleGlEnabled();
- connect(m_surface, &QAbstractVideoSurface::supportedFormatsChanged, this, handleGlEnabled);
+ updateSurfaceType();
+ connect(m_surface, &QAbstractVideoSurface::supportedFormatsChanged, this, updateSurfaceType);
//If we already have a layer, but changed surfaces start rendering again
if (m_playerLayer && !m_displayLink->isActive()) {
@@ -204,12 +191,6 @@ void AVFVideoRendererControl::setLayer(void *playerLayer)
if (m_surface && m_surface->isActive())
m_surface->stop();
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
- if (m_frameRenderer) {
- m_frameRenderer->setPlayerLayer(static_cast<AVPlayerLayer*>(playerLayer));
- }
-#endif
-
//If there is no layer to render, stop scheduling updates
if (m_playerLayer == nullptr) {
m_displayLink->stop();
@@ -238,36 +219,39 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
if (!playerLayer.readyForDisplay)
return;
- if (m_enableOpenGL) {
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
- CVOGLTextureRef tex = m_frameRenderer->renderLayerToTexture(playerLayer);
-
- //Make sure we got a valid texture
- if (tex == nullptr)
- return;
-
- QAbstractVideoBuffer *buffer = new TextureCacheVideoBuffer(tex);
+ if (m_surfaceType == QAbstractVideoBuffer::GLTextureHandle
+ || m_surfaceType == QAbstractVideoBuffer::GLTextureRectangleHandle) {
+ QSize size;
+ QAbstractVideoBuffer *buffer = nullptr;
+
+#ifdef Q_OS_MACOS
+ if (m_surfaceType == QAbstractVideoBuffer::GLTextureRectangleHandle) {
+ // Render to GL_TEXTURE_RECTANGLE directly
+ if (CVOGLTextureRef tex = m_frameRenderer->renderLayerToTexture(playerLayer, &size))
+ buffer = new CoreVideoTextureVideoBuffer(tex, m_surfaceType);
+ } else {
+ // Render to GL_TEXTURE_2D via FBO
+ if (GLuint tex = m_frameRenderer->renderLayerToFBO(playerLayer, &size))
+ buffer = new TextureVideoBuffer(tex, m_surfaceType);
+ }
#else
- GLuint tex = m_frameRenderer->renderLayerToTexture(playerLayer);
- //Make sure we got a valid texture
- if (tex == 0)
+ Q_ASSERT(m_surfaceType != QAbstractVideoBuffer::GLTextureRectangleHandle);
+ // Render to GL_TEXTURE_2D directly
+ if (CVOGLTextureRef tex = m_frameRenderer->renderLayerToTexture(playerLayer, &size))
+ buffer = new CoreVideoTextureVideoBuffer(tex, m_surfaceType);
+#endif
+ if (!buffer)
return;
- QAbstractVideoBuffer *buffer = new TextureVideoBuffer(tex);
-#endif
- QVideoFrame frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
+ QVideoFrame frame = QVideoFrame(buffer, size, QVideoFrame::Format_BGR32);
if (m_surface && frame.isValid()) {
if (m_surface->isActive() && m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat())
m_surface->stop();
if (!m_surface->isActive()) {
- QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), QAbstractVideoBuffer::GLTextureHandle);
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), m_surfaceType);
format.setScanLineDirection(QVideoSurfaceFormat::TopToBottom);
-#else
- format.setScanLineDirection(QVideoSurfaceFormat::BottomToTop);
-#endif
if (!m_surface->start(format)) {
//Surface doesn't support GLTextureHandle
qWarning("Failed to activate video surface");
@@ -279,20 +263,21 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
}
} else {
//fallback to rendering frames to QImages
- QImage frameData = m_frameRenderer->renderLayerToImage(playerLayer);
+ QSize size;
+ QImage frameData = m_frameRenderer->renderLayerToImage(playerLayer, &size);
if (frameData.isNull()) {
return;
}
QAbstractVideoBuffer *buffer = new QImageVideoBuffer(frameData);
- QVideoFrame frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_ARGB32);
+ QVideoFrame frame = QVideoFrame(buffer, size, QVideoFrame::Format_ARGB32);
if (m_surface && frame.isValid()) {
if (m_surface->isActive() && m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat())
m_surface->stop();
if (!m_surface->isActive()) {
- QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), QAbstractVideoBuffer::NoHandle);
+ QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), m_surfaceType);
if (!m_surface->start(format)) {
qWarning("Failed to activate video surface");
@@ -308,7 +293,4 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
void AVFVideoRendererControl::setupVideoOutput()
{
- AVPlayerLayer *playerLayer = static_cast<AVPlayerLayer*>(m_playerLayer);
- if (playerLayer)
- m_nativeSize = QSize(playerLayer.bounds.size.width, playerLayer.bounds.size.height);
}
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideowidget.mm b/src/plugins/avfoundation/mediaplayer/avfvideowidget.mm
index be349710c..0987342b4 100644
--- a/src/plugins/avfoundation/mediaplayer/avfvideowidget.mm
+++ b/src/plugins/avfoundation/mediaplayer/avfvideowidget.mm
@@ -38,12 +38,9 @@
****************************************************************************/
#include "avfvideowidget.h"
-#include <QtCore/QDebug>
-#include <AVFoundation/AVFoundation.h>
-#include <QtGui/QResizeEvent>
-#include <QtGui/QPaintEvent>
-#include <QtGui/QPainter>
+#import <AVFoundation/AVFoundation.h>
+#import <QuartzCore/CATransaction.h>
#if defined(Q_OS_MACOS)
#import <AppKit/AppKit.h>
@@ -51,6 +48,11 @@
#import <UIKit/UIKit.h>
#endif
+#include <QtCore/QDebug>
+#include <QtGui/QResizeEvent>
+#include <QtGui/QPaintEvent>
+#include <QtGui/QPainter>
+
QT_USE_NAMESPACE
AVFVideoWidget::AVFVideoWidget(QWidget *parent)
@@ -178,5 +180,8 @@ void AVFVideoWidget::updateAspectRatio()
void AVFVideoWidget::updatePlayerLayerBounds(const QSize &size)
{
- m_playerLayer.bounds = CGRectMake(0.0f, 0.0f, (float)size.width(), (float)size.height());
+ [CATransaction begin];
+ [CATransaction setDisableActions: YES]; // disable animation/flicks
+ m_playerLayer.bounds = QRect(QPoint(0, 0), size).toCGRect();
+ [CATransaction commit];
}
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideowindowcontrol.mm b/src/plugins/avfoundation/mediaplayer/avfvideowindowcontrol.mm
index 7fa41fdc2..d61129ec9 100644
--- a/src/plugins/avfoundation/mediaplayer/avfvideowindowcontrol.mm
+++ b/src/plugins/avfoundation/mediaplayer/avfvideowindowcontrol.mm
@@ -40,6 +40,7 @@
#include "avfvideowindowcontrol.h"
#include <AVFoundation/AVFoundation.h>
+#import <QuartzCore/CATransaction.h>
#if QT_HAS_INCLUDE(<AppKit/AppKit.h>)
#include <AppKit/AppKit.h>
@@ -244,10 +245,10 @@ void AVFVideoWindowControl::updateAspectRatio()
void AVFVideoWindowControl::updatePlayerLayerBounds()
{
if (m_playerLayer) {
- CGRect newBounds = CGRectMake(0, 0,
- m_displayRect.width(), m_displayRect.height());
- m_playerLayer.bounds = newBounds;
- m_playerLayer.position = CGPointMake(m_displayRect.x(), m_displayRect.y());
+ [CATransaction begin];
+ [CATransaction setDisableActions: YES]; // disable animation/flicks
+ m_playerLayer.frame = m_displayRect.toCGRect();
+ [CATransaction commit];
}
}
diff --git a/src/plugins/avfoundation/mediaplayer/mediaplayer.pro b/src/plugins/avfoundation/mediaplayer/mediaplayer.pro
index b60b276e9..f71e0c3b3 100644
--- a/src/plugins/avfoundation/mediaplayer/mediaplayer.pro
+++ b/src/plugins/avfoundation/mediaplayer/mediaplayer.pro
@@ -39,33 +39,23 @@ OBJECTIVE_SOURCES += \
avfvideowidget.mm
}
-ios|tvos {
- qtConfig(opengl) {
- HEADERS += \
- avfvideoframerenderer_ios.h \
- avfvideorenderercontrol.h \
- avfdisplaylink.h
+qtConfig(opengl) {
+ HEADERS += \
+ avfvideoframerenderer.h \
+ avfvideorenderercontrol.h \
+ avfdisplaylink.h
- OBJECTIVE_SOURCES += \
- avfvideoframerenderer_ios.mm \
- avfvideorenderercontrol.mm \
- avfdisplaylink.mm
- }
+ OBJECTIVE_SOURCES += \
+ avfvideoframerenderer.mm \
+ avfvideorenderercontrol.mm \
+ avfdisplaylink.mm
+}
+
+ios|tvos {
LIBS += -framework Foundation
} else {
- LIBS += -framework AppKit
-
- qtConfig(opengl) {
- HEADERS += \
- avfvideoframerenderer.h \
- avfvideorenderercontrol.h \
- avfdisplaylink.h
-
- OBJECTIVE_SOURCES += \
- avfvideoframerenderer.mm \
- avfvideorenderercontrol.mm \
- avfdisplaylink.mm
- }
+ INCLUDEPATH += $$[QT_INSTALL_HEADERS]
+ LIBS += -framework AppKit -framework Metal
}
OTHER_FILES += \
diff --git a/src/plugins/common/evr/evrcustompresenter.cpp b/src/plugins/common/evr/evrcustompresenter.cpp
index b2dd0426c..3405f6373 100644
--- a/src/plugins/common/evr/evrcustompresenter.cpp
+++ b/src/plugins/common/evr/evrcustompresenter.cpp
@@ -1065,6 +1065,13 @@ void EVRCustomPresenter::setSurface(QAbstractVideoSurface *surface)
supportedFormatsChanged();
}
+void EVRCustomPresenter::setCropRect(QRect cropRect)
+{
+ m_mutex.lock();
+ m_cropRect = cropRect;
+ m_mutex.unlock();
+}
+
HRESULT EVRCustomPresenter::configureMixer(IMFTransform *mixer)
{
// Set the zoom rectangle (ie, the source clipping rectangle).
@@ -1355,13 +1362,30 @@ HRESULT EVRCustomPresenter::createOptimalVideoType(IMFMediaType *proposedType, I
hr = proposedType->GetUINT64(MF_MT_FRAME_SIZE, &size);
width = int(HI32(size));
height = int(LO32(size));
- rcOutput.left = 0;
- rcOutput.top = 0;
- rcOutput.right = width;
- rcOutput.bottom = height;
+
+ if (m_cropRect.isValid()) {
+ rcOutput.left = m_cropRect.x();
+ rcOutput.top = m_cropRect.y();
+ rcOutput.right = m_cropRect.x() + m_cropRect.width();
+ rcOutput.bottom = m_cropRect.y() + m_cropRect.height();
+
+ m_sourceRect.left = float(m_cropRect.x()) / width;
+ m_sourceRect.top = float(m_cropRect.y()) / height;
+ m_sourceRect.right = float(m_cropRect.x() + m_cropRect.width()) / width;
+ m_sourceRect.bottom = float(m_cropRect.y() + m_cropRect.height()) / height;
+
+ if (m_mixer)
+ configureMixer(m_mixer);
+ } else {
+ rcOutput.left = 0;
+ rcOutput.top = 0;
+ rcOutput.right = width;
+ rcOutput.bottom = height;
+ }
// Set the geometric aperture, and disable pan/scan.
- displayArea = qt_evr_makeMFArea(0, 0, rcOutput.right, rcOutput.bottom);
+ displayArea = qt_evr_makeMFArea(0, 0, rcOutput.right - rcOutput.left,
+ rcOutput.bottom - rcOutput.top);
hr = mtOptimal->SetUINT32(MF_MT_PAN_SCAN_ENABLED, FALSE);
if (FAILED(hr))
@@ -1427,7 +1451,7 @@ HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType)
// Initialize the presenter engine with the new media type.
// The presenter engine allocates the samples.
- hr = m_presentEngine->createVideoSamples(mediaType, sampleQueue);
+ hr = m_presentEngine->createVideoSamples(mediaType, sampleQueue, m_cropRect.size());
if (FAILED(hr))
goto done;
diff --git a/src/plugins/common/evr/evrcustompresenter.h b/src/plugins/common/evr/evrcustompresenter.h
index c1c21580e..403158619 100644
--- a/src/plugins/common/evr/evrcustompresenter.h
+++ b/src/plugins/common/evr/evrcustompresenter.h
@@ -44,6 +44,7 @@
#include <qmutex.h>
#include <qqueue.h>
#include <qevent.h>
+#include <qrect.h>
#include <qvideosurfaceformat.h>
#include "evrdefs.h"
@@ -257,6 +258,7 @@ public:
void supportedFormatsChanged();
void setSurface(QAbstractVideoSurface *surface);
+ void setCropRect(QRect cropRect);
void startSurface();
void stopSurface();
@@ -368,6 +370,7 @@ private:
QAbstractVideoSurface *m_surface;
bool m_canRenderToSurface;
qint64 m_positionOffset; // Seek position in microseconds.
+ QRect m_cropRect; // Video crop rectangle
};
bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter);
diff --git a/src/plugins/common/evr/evrd3dpresentengine.cpp b/src/plugins/common/evr/evrd3dpresentengine.cpp
index d8e2da6d3..0bf8a0506 100644
--- a/src/plugins/common/evr/evrd3dpresentengine.cpp
+++ b/src/plugins/common/evr/evrd3dpresentengine.cpp
@@ -520,7 +520,7 @@ void D3DPresentEngine::setHint(Hint hint, bool enable)
m_useTextureRendering = enable && supportsTextureRendering();
}
-HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSample*> &videoSampleQueue)
+HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSample*> &videoSampleQueue, QSize frameSize)
{
if (!format)
return MF_E_UNEXPECTED;
@@ -537,6 +537,11 @@ HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSamp
if (FAILED(hr))
return hr;
+ if (frameSize.isValid() && !frameSize.isEmpty()) {
+ width = frameSize.width();
+ height = frameSize.height();
+ }
+
DWORD d3dFormat = 0;
hr = qt_evr_getFourCC(format, &d3dFormat);
if (FAILED(hr))
diff --git a/src/plugins/common/evr/evrd3dpresentengine.h b/src/plugins/common/evr/evrd3dpresentengine.h
index 8e2a444f3..d7eeacfc0 100644
--- a/src/plugins/common/evr/evrd3dpresentengine.h
+++ b/src/plugins/common/evr/evrd3dpresentengine.h
@@ -41,6 +41,7 @@
#define EVRD3DPRESENTENGINE_H
#include <QMutex>
+#include <QSize>
#include <QVideoSurfaceFormat>
#include <d3d9.h>
@@ -120,7 +121,7 @@ public:
bool supportsTextureRendering() const;
bool isTextureRenderingEnabled() const { return m_useTextureRendering; }
- HRESULT createVideoSamples(IMFMediaType *format, QList<IMFSample*>& videoSampleQueue);
+ HRESULT createVideoSamples(IMFMediaType *format, QList<IMFSample*>& videoSampleQueue, QSize frameSize);
QVideoSurfaceFormat videoSurfaceFormat() const { return m_surfaceFormat; }
QVideoFrame makeVideoFrame(IMFSample* sample);
diff --git a/src/plugins/common/evr/evrvideowindowcontrol.cpp b/src/plugins/common/evr/evrvideowindowcontrol.cpp
index 95f63c2e7..cec263da9 100644
--- a/src/plugins/common/evr/evrvideowindowcontrol.cpp
+++ b/src/plugins/common/evr/evrvideowindowcontrol.cpp
@@ -134,26 +134,29 @@ void EvrVideoWindowControl::setDisplayRect(const QRect &rect)
if (m_displayControl) {
RECT displayRect = { rect.left(), rect.top(), rect.right() + 1, rect.bottom() + 1 };
- QSize sourceSize = nativeSize();
+ QSize fullSize = nativeSize();
- RECT sourceRect = { 0, 0, sourceSize.width(), sourceSize.height() };
+ RECT sourceRect;
+ if (m_cropRect.isValid())
+ sourceRect = { m_cropRect.x(), m_cropRect.y(), m_cropRect.right() + 1, m_cropRect.bottom() + 1 };
+ else
+ sourceRect = { 0, 0, fullSize.width(), fullSize.height() };
if (m_aspectRatioMode == Qt::KeepAspectRatioByExpanding) {
QSize clippedSize = rect.size();
clippedSize.scale(sourceRect.right, sourceRect.bottom, Qt::KeepAspectRatio);
-
sourceRect.left = (sourceRect.right - clippedSize.width()) / 2;
sourceRect.top = (sourceRect.bottom - clippedSize.height()) / 2;
sourceRect.right = sourceRect.left + clippedSize.width();
sourceRect.bottom = sourceRect.top + clippedSize.height();
}
- if (sourceSize.width() > 0 && sourceSize.height() > 0) {
+ if (sourceRect.right - sourceRect.left > 0 && sourceRect.bottom - sourceRect.top > 0) {
MFVideoNormalizedRect sourceNormRect;
- sourceNormRect.left = float(sourceRect.left) / float(sourceRect.right);
- sourceNormRect.top = float(sourceRect.top) / float(sourceRect.bottom);
- sourceNormRect.right = float(sourceRect.right) / float(sourceRect.right);
- sourceNormRect.bottom = float(sourceRect.bottom) / float(sourceRect.bottom);
+ sourceNormRect.left = float(sourceRect.left) / float(fullSize.width());
+ sourceNormRect.top = float(sourceRect.top) / float(fullSize.height());
+ sourceNormRect.right = float(sourceRect.right) / float(fullSize.width());
+ sourceNormRect.bottom = float(sourceRect.bottom) / float(fullSize.height());
m_displayControl->SetVideoPosition(&sourceNormRect, &displayRect);
} else {
m_displayControl->SetVideoPosition(NULL, &displayRect);
@@ -164,6 +167,11 @@ void EvrVideoWindowControl::setDisplayRect(const QRect &rect)
}
}
+void EvrVideoWindowControl::setCropRect(QRect cropRect)
+{
+ m_cropRect = cropRect;
+}
+
bool EvrVideoWindowControl::isFullScreen() const
{
return m_fullScreen;
diff --git a/src/plugins/common/evr/evrvideowindowcontrol.h b/src/plugins/common/evr/evrvideowindowcontrol.h
index ce3b7746f..49d111b99 100644
--- a/src/plugins/common/evr/evrvideowindowcontrol.h
+++ b/src/plugins/common/evr/evrvideowindowcontrol.h
@@ -85,6 +85,8 @@ public:
void applyImageControls();
+ void setCropRect(QRect cropRect);
+
private:
void clear();
DXVA2_Fixed32 scaleProcAmpValue(DWORD prop, int value) const;
@@ -94,6 +96,7 @@ private:
DWORD m_dirtyValues;
Qt::AspectRatioMode m_aspectRatioMode;
QRect m_displayRect;
+ QRect m_cropRect;
int m_brightness;
int m_contrast;
int m_hue;
diff --git a/src/plugins/coreaudio/coreaudiodeviceinfo.mm b/src/plugins/coreaudio/coreaudiodeviceinfo.mm
index 1a79438cb..05d6c613d 100644
--- a/src/plugins/coreaudio/coreaudiodeviceinfo.mm
+++ b/src/plugins/coreaudio/coreaudiodeviceinfo.mm
@@ -60,11 +60,6 @@ CoreAudioDeviceInfo::CoreAudioDeviceInfo(const QByteArray &device, QAudio::Mode
m_deviceId = AudioDeviceID(deviceID);
#else //iOS
m_device = device;
- if (mode == QAudio::AudioInput) {
- if (CoreAudioSessionManager::instance().category() != CoreAudioSessionManager::PlayAndRecord) {
- CoreAudioSessionManager::instance().setCategory(CoreAudioSessionManager::PlayAndRecord);
- }
- }
#endif
}
@@ -336,14 +331,6 @@ QList<QByteArray> CoreAudioDeviceInfo::availableDevices(QAudio::Mode mode)
}
}
#else //iOS
- if (mode == QAudio::AudioInput) {
- if (CoreAudioSessionManager::instance().category() != CoreAudioSessionManager::PlayAndRecord) {
- CoreAudioSessionManager::instance().setCategory(CoreAudioSessionManager::PlayAndRecord);
- }
- }
-
- CoreAudioSessionManager::instance().setActive(true);
-
if (mode == QAudio::AudioOutput)
return CoreAudioSessionManager::instance().outputDevices();
if (mode == QAudio::AudioInput)
diff --git a/src/plugins/coreaudio/coreaudioinput.mm b/src/plugins/coreaudio/coreaudioinput.mm
index f7d511d27..7f305168f 100644
--- a/src/plugins/coreaudio/coreaudioinput.mm
+++ b/src/plugins/coreaudio/coreaudioinput.mm
@@ -483,6 +483,11 @@ CoreAudioInput::~CoreAudioInput()
bool CoreAudioInput::open()
{
+#if defined(Q_OS_IOS)
+ CoreAudioSessionManager::instance().setCategory(CoreAudioSessionManager::PlayAndRecord, CoreAudioSessionManager::MixWithOthers);
+ CoreAudioSessionManager::instance().setActive(true);
+#endif
+
if (m_isOpen)
return true;
diff --git a/src/plugins/coreaudio/coreaudiooutput.mm b/src/plugins/coreaudio/coreaudiooutput.mm
index caa4a1abb..b8addc1cf 100644
--- a/src/plugins/coreaudio/coreaudiooutput.mm
+++ b/src/plugins/coreaudio/coreaudiooutput.mm
@@ -549,6 +549,13 @@ OSStatus CoreAudioOutput::renderCallback(void *inRefCon, AudioUnitRenderActionFl
bool CoreAudioOutput::open()
{
+#if defined(Q_OS_IOS)
+ // Set default category to Ambient (implies MixWithOthers). This makes sure audio stops playing
+ // if the screen is locked or if the Silent switch is toggled.
+ CoreAudioSessionManager::instance().setCategory(CoreAudioSessionManager::Ambient, CoreAudioSessionManager::None);
+ CoreAudioSessionManager::instance().setActive(true);
+#endif
+
if (m_errorCode != QAudio::NoError)
return false;
diff --git a/src/plugins/coreaudio/coreaudiosessionmanager.mm b/src/plugins/coreaudio/coreaudiosessionmanager.mm
index 923a1942f..6c86f0753 100644
--- a/src/plugins/coreaudio/coreaudiosessionmanager.mm
+++ b/src/plugins/coreaudio/coreaudiosessionmanager.mm
@@ -38,7 +38,6 @@
****************************************************************************/
#include "coreaudiosessionmanager.h"
-
#import <AVFoundation/AVAudioSession.h>
#import <Foundation/Foundation.h>
@@ -215,10 +214,6 @@ CoreAudioSessionManager::CoreAudioSessionManager() :
QObject(0)
{
m_sessionObserver = [[CoreAudioSessionObserver alloc] initWithAudioSessionManager:this];
- setActive(true);
- // Set default category to Ambient (implies MixWithOthers). This makes sure audio stops playing
- // if the screen is locked or if the Silent switch is toggled.
- setCategory(CoreAudioSessionManager::Ambient, CoreAudioSessionManager::None);
}
CoreAudioSessionManager::~CoreAudioSessionManager()
diff --git a/src/plugins/directshow/camera/dscamerasession.cpp b/src/plugins/directshow/camera/dscamerasession.cpp
index cee3e9c56..e47142be1 100644
--- a/src/plugins/directshow/camera/dscamerasession.cpp
+++ b/src/plugins/directshow/camera/dscamerasession.cpp
@@ -428,6 +428,7 @@ bool DSCameraSession::unload()
SAFE_RELEASE(m_nullRendererFilter);
SAFE_RELEASE(m_filterGraph);
SAFE_RELEASE(m_graphBuilder);
+ SAFE_RELEASE(m_outputPin);
setStatus(QCamera::UnloadedStatus);
@@ -781,6 +782,9 @@ bool DSCameraSession::createFilterGraph()
goto failed;
}
+ if (!DirectShowUtils::getPin(m_sourceFilter, PINDIR_OUTPUT, PIN_CATEGORY_CAPTURE, &m_outputPin, &hr))
+ qWarning() << "Failed to get the pin for the video control:" << hr;
+
// Sample grabber filter
if (!m_previewSampleGrabber) {
m_previewSampleGrabber = new DirectShowSampleGrabber(this);
@@ -1055,24 +1059,18 @@ void DSCameraSession::updateSourceCapabilities()
reinterpret_cast<void**>(&pVideoControl));
if (FAILED(hr)) {
qWarning() << "Failed to get the video control";
- } else {
- IPin *pPin = nullptr;
- if (!DirectShowUtils::getPin(m_sourceFilter, PINDIR_OUTPUT, &pPin, &hr)) {
- qWarning() << "Failed to get the pin for the video control";
- } else {
- long supportedModes;
- hr = pVideoControl->GetCaps(pPin, &supportedModes);
- if (FAILED(hr)) {
- qWarning() << "Failed to get the supported modes of the video control";
- } else if (supportedModes & VideoControlFlag_FlipHorizontal) {
- long mode;
- hr = pVideoControl->GetMode(pPin, &mode);
- if (FAILED(hr))
- qWarning() << "Failed to get the mode of the video control";
- else if (supportedModes & VideoControlFlag_FlipHorizontal)
- m_needsHorizontalMirroring = (mode & VideoControlFlag_FlipHorizontal);
- }
- pPin->Release();
+ } else if (m_outputPin) {
+ long supportedModes;
+ hr = pVideoControl->GetCaps(m_outputPin, &supportedModes);
+ if (FAILED(hr)) {
+ qWarning() << "Failed to get the supported modes of the video control";
+ } else if (supportedModes & VideoControlFlag_FlipHorizontal) {
+ long mode;
+ hr = pVideoControl->GetMode(m_outputPin, &mode);
+ if (FAILED(hr))
+ qWarning() << "Failed to get the mode of the video control";
+ else if (supportedModes & VideoControlFlag_FlipHorizontal)
+ m_needsHorizontalMirroring = (mode & VideoControlFlag_FlipHorizontal);
}
pVideoControl->Release();
}
@@ -1107,28 +1105,22 @@ void DSCameraSession::updateSourceCapabilities()
QList<QCamera::FrameRateRange> frameRateRanges;
- if (pVideoControl) {
- IPin *pPin = nullptr;
- if (!DirectShowUtils::getPin(m_sourceFilter, PINDIR_OUTPUT, &pPin, &hr)) {
- qWarning() << "Failed to get the pin for the video control";
- } else {
- long listSize = 0;
- LONGLONG *frameRates = nullptr;
- SIZE size = { resolution.width(), resolution.height() };
- hr = pVideoControl->GetFrameRateList(pPin, iIndex, size, &listSize, &frameRates);
- if (hr == S_OK && listSize > 0 && frameRates) {
- for (long i = 0; i < listSize; ++i) {
- qreal fr = qreal(10000000) / frameRates[i];
- frameRateRanges.append(QCamera::FrameRateRange(fr, fr));
- }
-
- // Make sure higher frame rates come first
- std::sort(frameRateRanges.begin(), frameRateRanges.end(), qt_frameRateRangeGreaterThan);
+ if (pVideoControl && m_outputPin) {
+ long listSize = 0;
+ LONGLONG *frameRates = nullptr;
+ SIZE size = { resolution.width(), resolution.height() };
+ hr = pVideoControl->GetFrameRateList(m_outputPin, iIndex, size, &listSize, &frameRates);
+ if (hr == S_OK && listSize > 0 && frameRates) {
+ for (long i = 0; i < listSize; ++i) {
+ qreal fr = qreal(10000000) / frameRates[i];
+ frameRateRanges.append(QCamera::FrameRateRange(fr, fr));
}
- CoTaskMemFree(frameRates);
- pPin->Release();
+ // Make sure higher frame rates come first
+ std::sort(frameRateRanges.begin(), frameRateRanges.end(), qt_frameRateRangeGreaterThan);
}
+
+ CoTaskMemFree(frameRates);
}
if (frameRateRanges.isEmpty()) {
@@ -1146,9 +1138,14 @@ void DSCameraSession::updateSourceCapabilities()
m_supportedViewfinderSettings.append(settings);
m_supportedFormats.append(DirectShowMediaType(*pmt));
}
-
-
+ } else {
+ OLECHAR *guidString = nullptr;
+ StringFromCLSID(pmt->subtype, &guidString);
+ if (guidString)
+ qWarning() << "Unsupported media type:" << QString::fromWCharArray(guidString);
+ ::CoTaskMemFree(guidString);
}
+
DirectShowMediaType::deleteType(pmt);
}
}
diff --git a/src/plugins/directshow/camera/dscamerasession.h b/src/plugins/directshow/camera/dscamerasession.h
index 5e7d026c2..9f88163b9 100644
--- a/src/plugins/directshow/camera/dscamerasession.h
+++ b/src/plugins/directshow/camera/dscamerasession.h
@@ -231,6 +231,8 @@ private:
QMap<QCameraImageProcessingControl::ProcessingParameter, QVariant> m_pendingImageProcessingParametrs;
+ IPin *m_outputPin = nullptr;
+
friend class SampleGrabberCallbackPrivate;
};
diff --git a/src/plugins/directshow/common/directshowmediatype.cpp b/src/plugins/directshow/common/directshowmediatype.cpp
index 103f1ddc1..3429f4848 100644
--- a/src/plugins/directshow/common/directshowmediatype.cpp
+++ b/src/plugins/directshow/common/directshowmediatype.cpp
@@ -40,6 +40,11 @@
#include "directshowmediatype.h"
#include "directshowglobal.h"
+#include <initguid.h>
+
+DEFINE_GUID(MEDIASUBTYPE_Y800, 0x30303859, 0x0000, 0x0010, 0x80, 0x00,
+ 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
+
namespace
{
struct TypeLookup
@@ -66,7 +71,8 @@ namespace
{ QVideoFrame::Format_NV12, MEDIASUBTYPE_NV12 },
{ QVideoFrame::Format_YUV420P, MEDIASUBTYPE_IYUV },
{ QVideoFrame::Format_YUV420P, MEDIASUBTYPE_I420 },
- { QVideoFrame::Format_Jpeg, MEDIASUBTYPE_MJPG }
+ { QVideoFrame::Format_Jpeg, MEDIASUBTYPE_MJPG },
+ { QVideoFrame::Format_Y8, MEDIASUBTYPE_Y800 },
};
}
@@ -294,6 +300,7 @@ int DirectShowMediaType::bytesPerLine(const QVideoSurfaceFormat &format)
return format.frameWidth() * 4;
// 24 bpp packed formats.
case QVideoFrame::Format_RGB24:
+ case QVideoFrame::Format_BGR24:
return PAD_TO_DWORD(format.frameWidth() * 3);
// 16 bpp packed formats.
case QVideoFrame::Format_RGB565:
diff --git a/src/plugins/directshow/common/directshowmediatypeenum.cpp b/src/plugins/directshow/common/directshowmediatypeenum.cpp
index 0ff147fea..02281bb98 100644
--- a/src/plugins/directshow/common/directshowmediatypeenum.cpp
+++ b/src/plugins/directshow/common/directshowmediatypeenum.cpp
@@ -42,7 +42,8 @@
#include "directshowpin.h"
DirectShowMediaTypeEnum::DirectShowMediaTypeEnum(DirectShowPin *pin)
- : m_mediaTypes(pin->supportedMediaTypes())
+ : m_pin(pin)
+ , m_mediaTypes(pin->supportedMediaTypes())
{
m_pin->AddRef();
}
diff --git a/src/plugins/directshow/common/directshowutils.cpp b/src/plugins/directshow/common/directshowutils.cpp
index 1457837ce..9222ad779 100644
--- a/src/plugins/directshow/common/directshowutils.cpp
+++ b/src/plugins/directshow/common/directshowutils.cpp
@@ -93,6 +93,25 @@ bool DirectShowUtils::hasPinDirection(IPin *pin, PIN_DIRECTION direction, HRESUL
return (pinDir == direction);
}
+bool pinMatchesCategory(IPin* pPin, REFGUID category)
+{
+ bool found = false;
+ IKsPropertySet *pKs = nullptr;
+ DirectShowUtils::ScopedSafeRelease<IKsPropertySet> ks_property { &pKs };
+ HRESULT hr = pPin->QueryInterface(IID_PPV_ARGS(&pKs));
+
+ if (SUCCEEDED(hr)) {
+ GUID pin_category;
+ DWORD return_value;
+ hr = pKs->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0,
+ &pin_category, sizeof(pin_category), &return_value);
+ if (SUCCEEDED(hr) && (return_value == sizeof(pin_category)))
+ found = (pin_category == category);
+ }
+
+ return found;
+}
+
/**
* @brief DirectShowUtils::getPin
* @param filter
@@ -101,7 +120,7 @@ bool DirectShowUtils::hasPinDirection(IPin *pin, PIN_DIRECTION direction, HRESUL
* @param hrOut
* @return
*/
-bool DirectShowUtils::getPin(IBaseFilter *filter, PIN_DIRECTION pinDirection, IPin **pin, HRESULT *hrOut)
+bool DirectShowUtils::getPin(IBaseFilter *filter, PIN_DIRECTION pinDirection, REFGUID category, IPin **pin, HRESULT *hrOut)
{
IEnumPins *enumPins = nullptr;
const ScopedSafeRelease<IEnumPins> releaseEnumPins { &enumPins };
@@ -122,9 +141,11 @@ bool DirectShowUtils::getPin(IBaseFilter *filter, PIN_DIRECTION pinDirection, IP
PIN_DIRECTION currentPinDir;
*hrOut = nextPin->QueryDirection(&currentPinDir);
if (currentPinDir == pinDirection) {
- *pin = nextPin;
- (*pin)->AddRef();
- return true;
+ if (category == GUID_NULL || pinMatchesCategory(nextPin, category)) {
+ *pin = nextPin;
+ (*pin)->AddRef();
+ return true;
+ }
}
}
diff --git a/src/plugins/directshow/common/directshowutils.h b/src/plugins/directshow/common/directshowutils.h
index 5f2cfaa23..ec761abe6 100644
--- a/src/plugins/directshow/common/directshowutils.h
+++ b/src/plugins/directshow/common/directshowutils.h
@@ -68,7 +68,7 @@ struct ScopedSafeRelease
}
};
-bool getPin(IBaseFilter *filter, PIN_DIRECTION pinDirection, IPin **pin, HRESULT *hrOut);
+bool getPin(IBaseFilter *filter, PIN_DIRECTION pinDirection, REFGUID category, IPin **pin, HRESULT *hrOut);
bool isPinConnected(IPin *pin, HRESULT *hrOut = nullptr);
bool hasPinDirection(IPin *pin, PIN_DIRECTION direction, HRESULT *hrOut = nullptr);
bool matchPin(IPin *pin, PIN_DIRECTION pinDirection, BOOL shouldBeConnected, HRESULT *hrOut = nullptr);
diff --git a/src/plugins/directshow/player/directshowioreader.cpp b/src/plugins/directshow/player/directshowioreader.cpp
index ced10ea10..3318d57b5 100644
--- a/src/plugins/directshow/player/directshowioreader.cpp
+++ b/src/plugins/directshow/player/directshowioreader.cpp
@@ -155,7 +155,7 @@ HRESULT DirectShowIOReader::Request(IMediaSample *pSample, DWORD_PTR dwUser)
return VFW_E_SAMPLE_TIME_NOT_SET;
}
LONGLONG position = startTime / 10000000;
- LONG length = (endTime - startTime) / 10000000;
+ LONG length = qMin<qint64>((endTime - startTime) / 10000000, m_availableLength);
auto request = new DirectShowSampleRequest(pSample, dwUser, position, length, buffer);
diff --git a/src/plugins/directshow/player/directshowmetadatacontrol.cpp b/src/plugins/directshow/player/directshowmetadatacontrol.cpp
index 46674143e..d9864870a 100644
--- a/src/plugins/directshow/player/directshowmetadatacontrol.cpp
+++ b/src/plugins/directshow/player/directshowmetadatacontrol.cpp
@@ -64,7 +64,7 @@
#endif
#if QT_CONFIG(wshellitem)
-#include <ShlObj.h>
+#include <shlobj.h>
#include <propkeydef.h>
#include <private/qsystemlibrary_p.h>
diff --git a/src/plugins/gstreamer/camerabin/camerabinfocus.cpp b/src/plugins/gstreamer/camerabin/camerabinfocus.cpp
index d4e7fa699..a9cdf8e67 100644
--- a/src/plugins/gstreamer/camerabin/camerabinfocus.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinfocus.cpp
@@ -192,8 +192,6 @@ void CameraBinFocus::setFocusPointMode(QCameraFocus::FocusPointMode mode)
bool CameraBinFocus::isFocusPointModeSupported(QCameraFocus::FocusPointMode mode) const
{
- return mode == QCameraFocus::FocusPointAuto || mode == QCameraFocus::FocusPointCustom;
-
switch (mode) {
case QCameraFocus::FocusPointAuto:
case QCameraFocus::FocusPointCustom:
diff --git a/src/plugins/gstreamer/camerabin/camerabinimagecapture.cpp b/src/plugins/gstreamer/camerabin/camerabinimagecapture.cpp
index 52ec75f44..b164bc31a 100644
--- a/src/plugins/gstreamer/camerabin/camerabinimagecapture.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinimagecapture.cpp
@@ -366,12 +366,9 @@ bool CameraBinImageCapture::processBusMessage(const QGstreamerMessage &message)
#ifdef DEBUG_CAPTURE
qDebug() << Q_FUNC_INFO << "Dropped saving file" << fileName;
#endif
- //camerabin creates an empty file when captured buffer is dropped,
- //let's remove it
QFileInfo info(QString::fromUtf8(fileName));
- if (info.exists() && info.isFile() && info.size() == 0) {
+ if (info.exists() && info.isFile())
QFile(info.absoluteFilePath()).remove();
- }
}
}
}
diff --git a/src/plugins/gstreamer/camerabin/camerabinsession.cpp b/src/plugins/gstreamer/camerabin/camerabinsession.cpp
index 3e505a413..532fb43ec 100644
--- a/src/plugins/gstreamer/camerabin/camerabinsession.cpp
+++ b/src/plugins/gstreamer/camerabin/camerabinsession.cpp
@@ -347,10 +347,12 @@ void CameraBinSession::setupCaptureResolution()
// If capture resolution is specified, use it also for the viewfinder to avoid caps negotiation
// to fail.
if (m_usingWrapperCameraBinSrc) {
- if (m_captureMode == QCamera::CaptureStillImage && !imageResolution.isEmpty())
- viewfinderResolution = imageResolution;
- else if (m_captureMode == QCamera::CaptureVideo && !videoResolution.isEmpty())
- viewfinderResolution = videoResolution;
+ if (viewfinderResolution.isEmpty()) {
+ if (m_captureMode == QCamera::CaptureStillImage && !imageResolution.isEmpty())
+ viewfinderResolution = imageResolution;
+ else if (m_captureMode == QCamera::CaptureVideo && !videoResolution.isEmpty())
+ viewfinderResolution = videoResolution;
+ }
// Make sure we don't use incompatible frame rate and pixel format with the new resolution
if (viewfinderResolution != m_viewfinderSettings.resolution() &&
@@ -477,6 +479,15 @@ GstElement *CameraBinSession::buildCameraSource()
m_inputDeviceHasChanged = false;
m_usingWrapperCameraBinSrc = false;
+ if (m_videoSrc) {
+ gst_object_unref(GST_OBJECT(m_videoSrc));
+ m_videoSrc = 0;
+ }
+ if (m_cameraSrc) {
+ gst_object_unref(GST_OBJECT(m_cameraSrc));
+ m_cameraSrc = 0;
+ }
+
GstElement *camSrc = 0;
g_object_get(G_OBJECT(m_camerabin), CAMERA_SOURCE_PROPERTY, &camSrc, NULL);
@@ -1325,6 +1336,7 @@ QList< QPair<int,int> > CameraBinSession::supportedFrameRates(const QSize &frame
g_value_copy(oldRate, &rate);
gst_structure_remove_all_fields(structure);
gst_structure_set_value(structure, "framerate", &rate);
+ g_value_unset(&rate);
}
#if GST_CHECK_VERSION(1,0,0)
caps = gst_caps_simplify(caps);
@@ -1446,6 +1458,8 @@ QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate,
gst_structure_remove_all_fields(structure);
gst_structure_set_value(structure, "width", &w);
gst_structure_set_value(structure, "height", &h);
+ g_value_unset(&w);
+ g_value_unset(&h);
}
#if GST_CHECK_VERSION(1,0,0)
diff --git a/src/plugins/m3u/qm3uhandler.cpp b/src/plugins/m3u/qm3uhandler.cpp
index 5e05994ef..c621a181e 100644
--- a/src/plugins/m3u/qm3uhandler.cpp
+++ b/src/plugins/m3u/qm3uhandler.cpp
@@ -78,14 +78,14 @@ public:
delete m_textStream;
}
- virtual bool atEnd() const
+ bool atEnd() const override
{
//we can't just use m_textStream->atEnd(),
//for files with empty lines/comments at end
return nextResource.isNull();
}
- virtual QMediaContent readItem()
+ QMediaContent readItem() override
{
QMediaContent item;
if (!nextResource.isNull())
@@ -136,7 +136,7 @@ public:
return item;
}
- virtual void close()
+ void close() override
{
}
@@ -161,13 +161,13 @@ public:
delete m_textStream;
}
- virtual bool writeItem(const QMediaContent& item)
+ bool writeItem(const QMediaContent& item) override
{
*m_textStream << item.request().url().toString() << Qt::endl;
return true;
}
- virtual void close()
+ void close() override
{
}
diff --git a/src/plugins/m3u/qm3uhandler.h b/src/plugins/m3u/qm3uhandler.h
index 509ac11d5..1bc0684d3 100644
--- a/src/plugins/m3u/qm3uhandler.h
+++ b/src/plugins/m3u/qm3uhandler.h
@@ -54,15 +54,15 @@ public:
explicit QM3uPlaylistPlugin(QObject *parent = 0);
virtual ~QM3uPlaylistPlugin();
- virtual bool canRead(QIODevice *device, const QByteArray &format = QByteArray() ) const;
- virtual bool canRead(const QUrl& location, const QByteArray &format = QByteArray()) const;
+ bool canRead(QIODevice *device, const QByteArray &format = QByteArray()) const override;
+ bool canRead(const QUrl& location, const QByteArray &format = QByteArray()) const override;
- virtual bool canWrite(QIODevice *device, const QByteArray &format) const;
+ bool canWrite(QIODevice *device, const QByteArray &format) const override;
- virtual QMediaPlaylistReader *createReader(QIODevice *device, const QByteArray &format = QByteArray());
- virtual QMediaPlaylistReader *createReader(const QUrl& location, const QByteArray &format = QByteArray());
+ QMediaPlaylistReader *createReader(QIODevice *device, const QByteArray &format = QByteArray()) override;
+ QMediaPlaylistReader *createReader(const QUrl& location, const QByteArray &format = QByteArray()) override;
- virtual QMediaPlaylistWriter *createWriter(QIODevice *device, const QByteArray &format);
+ QMediaPlaylistWriter *createWriter(QIODevice *device, const QByteArray &format) override;
};
#endif // QM3UHANDLER_H
diff --git a/src/plugins/pulseaudio/qaudiodeviceinfo_pulse.h b/src/plugins/pulseaudio/qaudiodeviceinfo_pulse.h
index 64537229f..1cec772c0 100644
--- a/src/plugins/pulseaudio/qaudiodeviceinfo_pulse.h
+++ b/src/plugins/pulseaudio/qaudiodeviceinfo_pulse.h
@@ -69,15 +69,15 @@ public:
QPulseAudioDeviceInfo(const QByteArray &device, QAudio::Mode mode);
~QPulseAudioDeviceInfo() {}
- QAudioFormat preferredFormat() const;
- bool isFormatSupported(const QAudioFormat &format) const;
- QString deviceName() const;
- QStringList supportedCodecs();
- QList<int> supportedSampleRates();
- QList<int> supportedChannelCounts();
- QList<int> supportedSampleSizes();
- QList<QAudioFormat::Endian> supportedByteOrders();
- QList<QAudioFormat::SampleType> supportedSampleTypes();
+ QAudioFormat preferredFormat() const override;
+ bool isFormatSupported(const QAudioFormat &format) const override;
+ QString deviceName() const override;
+ QStringList supportedCodecs() override;
+ QList<int> supportedSampleRates() override;
+ QList<int> supportedChannelCounts() override;
+ QList<int> supportedSampleSizes() override;
+ QList<QAudioFormat::Endian> supportedByteOrders() override;
+ QList<QAudioFormat::SampleType> supportedSampleTypes() override;
private:
QByteArray m_device;
diff --git a/src/plugins/pulseaudio/qaudioinput_pulse.cpp b/src/plugins/pulseaudio/qaudioinput_pulse.cpp
index 2b5325132..b68b4af1b 100644
--- a/src/plugins/pulseaudio/qaudioinput_pulse.cpp
+++ b/src/plugins/pulseaudio/qaudioinput_pulse.cpp
@@ -402,6 +402,8 @@ int QPulseAudioInput::bytesReady() const
qint64 QPulseAudioInput::read(char *data, qint64 len)
{
+ Q_ASSERT(data != nullptr || len == 0);
+
m_bytesAvailable = checkBytesReady();
setError(QAudio::NoError);
@@ -411,7 +413,8 @@ qint64 QPulseAudioInput::read(char *data, qint64 len)
if (!m_pullMode && !m_tempBuffer.isEmpty()) {
readBytes = qMin(static_cast<int>(len), m_tempBuffer.size());
- memcpy(data, m_tempBuffer.constData(), readBytes);
+ if (readBytes)
+ memcpy(data, m_tempBuffer.constData(), readBytes);
m_totalTimeValue += readBytes;
if (readBytes < m_tempBuffer.size()) {
@@ -502,9 +505,10 @@ qint64 QPulseAudioInput::read(char *data, qint64 len)
void QPulseAudioInput::applyVolume(const void *src, void *dest, int len)
{
+ Q_ASSERT((src && dest) || len == 0);
if (m_volume < 1.f)
QAudioHelperInternal::qMultiplySamples(m_volume, m_format, src, dest, len);
- else
+ else if (len)
memcpy(dest, src, len);
}
diff --git a/src/plugins/pulseaudio/qaudioinput_pulse.h b/src/plugins/pulseaudio/qaudioinput_pulse.h
index 3a6cf03c4..dce212a25 100644
--- a/src/plugins/pulseaudio/qaudioinput_pulse.h
+++ b/src/plugins/pulseaudio/qaudioinput_pulse.h
@@ -78,27 +78,27 @@ public:
qint64 read(char *data, qint64 len);
- void start(QIODevice *device);
- QIODevice *start();
- void stop();
- void reset();
- void suspend();
- void resume();
- int bytesReady() const;
- int periodSize() const;
- void setBufferSize(int value);
- int bufferSize() const;
- void setNotifyInterval(int milliSeconds);
- int notifyInterval() const;
- qint64 processedUSecs() const;
- qint64 elapsedUSecs() const;
- QAudio::Error error() const;
- QAudio::State state() const;
- void setFormat(const QAudioFormat &format);
- QAudioFormat format() const;
-
- void setVolume(qreal volume);
- qreal volume() const;
+ void start(QIODevice *device) override;
+ QIODevice *start() override;
+ void stop() override;
+ void reset() override;
+ void suspend() override;
+ void resume() override;
+ int bytesReady() const override;
+ int periodSize() const override;
+ void setBufferSize(int value) override;
+ int bufferSize() const override;
+ void setNotifyInterval(int milliSeconds) override;
+ int notifyInterval() const override;
+ qint64 processedUSecs() const override;
+ qint64 elapsedUSecs() const override;
+ QAudio::Error error() const override;
+ QAudio::State state() const override;
+ void setFormat(const QAudioFormat &format) override;
+ QAudioFormat format() const override;
+
+ void setVolume(qreal volume) override;
+ qreal volume() const override;
qint64 m_totalTimeValue;
QIODevice *m_audioSource;
@@ -147,8 +147,8 @@ public:
PulseInputPrivate(QPulseAudioInput *audio);
~PulseInputPrivate() {};
- qint64 readData(char *data, qint64 len);
- qint64 writeData(const char *data, qint64 len);
+ qint64 readData(char *data, qint64 len) override;
+ qint64 writeData(const char *data, qint64 len) override;
void trigger();
diff --git a/src/plugins/pulseaudio/qaudiooutput_pulse.cpp b/src/plugins/pulseaudio/qaudiooutput_pulse.cpp
index 19ddac1e5..765efe036 100644
--- a/src/plugins/pulseaudio/qaudiooutput_pulse.cpp
+++ b/src/plugins/pulseaudio/qaudiooutput_pulse.cpp
@@ -506,27 +506,30 @@ qint64 QPulseAudioOutput::write(const char *data, qint64 len)
pulseEngine->lock();
- len = qMin(len, static_cast<qint64>(pa_stream_writable_size(m_stream)));
+ size_t nbytes = len;
+ void *dest = nullptr;
+
+ if (pa_stream_begin_write(m_stream, &dest, &nbytes) < 0) {
+ qWarning("QAudioSink(pulseaudio): pa_stream_begin_write, error = %s",
+ pa_strerror(pa_context_errno(pulseEngine->context())));
+ setError(QAudio::IOError);
+ return 0;
+ }
+
+ len = qMin(len, qint64(nbytes));
if (m_volume < 1.0f) {
// Don't use PulseAudio volume, as it might affect all other streams of the same category
// or even affect the system volume if flat volumes are enabled
- void *dest = NULL;
- size_t nbytes = len;
- if (pa_stream_begin_write(m_stream, &dest, &nbytes) < 0) {
- qWarning("QAudioOutput(pulseaudio): pa_stream_begin_write, error = %s",
- pa_strerror(pa_context_errno(pulseEngine->context())));
- setError(QAudio::IOError);
- return 0;
- }
-
- len = int(nbytes);
QAudioHelperInternal::qMultiplySamples(m_volume, m_format, data, dest, len);
- data = reinterpret_cast<char *>(dest);
+ } else {
+ memcpy(dest, data, len);
}
- if (pa_stream_write(m_stream, data, len, NULL, 0, PA_SEEK_RELATIVE) < 0) {
- qWarning("QAudioOutput(pulseaudio): pa_stream_write, error = %s",
+ data = reinterpret_cast<char *>(dest);
+
+ if ((pa_stream_write(m_stream, data, len, nullptr, 0, PA_SEEK_RELATIVE)) < 0) {
+ qWarning("QAudioSink(pulseaudio): pa_stream_write, error = %s",
pa_strerror(pa_context_errno(pulseEngine->context())));
setError(QAudio::IOError);
return 0;
@@ -683,7 +686,6 @@ qint64 PulseOutputPrivate::readData(char *data, qint64 len)
qint64 PulseOutputPrivate::writeData(const char *data, qint64 len)
{
- int retry = 0;
qint64 written = 0;
if ((m_audioDevice->m_deviceState == QAudio::ActiveState
@@ -691,10 +693,8 @@ qint64 PulseOutputPrivate::writeData(const char *data, qint64 len)
while(written < len) {
int chunk = m_audioDevice->write(data+written, (len-written));
if (chunk <= 0)
- retry++;
- written+=chunk;
- if (retry > 10)
return written;
+ written+=chunk;
}
}
diff --git a/src/plugins/pulseaudio/qaudiooutput_pulse.h b/src/plugins/pulseaudio/qaudiooutput_pulse.h
index 40d052681..e11f2ab2f 100644
--- a/src/plugins/pulseaudio/qaudiooutput_pulse.h
+++ b/src/plugins/pulseaudio/qaudiooutput_pulse.h
@@ -75,30 +75,30 @@ public:
QPulseAudioOutput(const QByteArray &device);
~QPulseAudioOutput();
- void start(QIODevice *device);
- QIODevice *start();
- void stop();
- void reset();
- void suspend();
- void resume();
- int bytesFree() const;
- int periodSize() const;
- void setBufferSize(int value);
- int bufferSize() const;
- void setNotifyInterval(int milliSeconds);
- int notifyInterval() const;
- qint64 processedUSecs() const;
- qint64 elapsedUSecs() const;
- QAudio::Error error() const;
- QAudio::State state() const;
- void setFormat(const QAudioFormat &format);
- QAudioFormat format() const;
-
- void setVolume(qreal volume);
- qreal volume() const;
-
- void setCategory(const QString &category);
- QString category() const;
+ void start(QIODevice *device) override;
+ QIODevice *start() override;
+ void stop() override;
+ void reset() override;
+ void suspend() override;
+ void resume() override;
+ int bytesFree() const override;
+ int periodSize() const override;
+ void setBufferSize(int value) override;
+ int bufferSize() const override;
+ void setNotifyInterval(int milliSeconds) override;
+ int notifyInterval() const override;
+ qint64 processedUSecs() const override;
+ qint64 elapsedUSecs() const override;
+ QAudio::Error error() const override;
+ QAudio::State state() const override;
+ void setFormat(const QAudioFormat &format) override;
+ QAudioFormat format() const override;
+
+ void setVolume(qreal volume) override;
+ qreal volume() const override;
+
+ void setCategory(const QString &category) override;
+ QString category() const override;
public:
void streamUnderflowCallback();
@@ -154,8 +154,8 @@ public:
virtual ~PulseOutputPrivate() {}
protected:
- qint64 readData(char *data, qint64 len);
- qint64 writeData(const char *data, qint64 len);
+ qint64 readData(char *data, qint64 len) override;
+ qint64 writeData(const char *data, qint64 len) override;
private:
QPulseAudioOutput *m_audioDevice;
diff --git a/src/plugins/pulseaudio/qpulseaudioplugin.h b/src/plugins/pulseaudio/qpulseaudioplugin.h
index 120d57df5..7d27cad48 100644
--- a/src/plugins/pulseaudio/qpulseaudioplugin.h
+++ b/src/plugins/pulseaudio/qpulseaudioplugin.h
@@ -58,11 +58,11 @@ public:
QPulseAudioPlugin(QObject *parent = 0);
~QPulseAudioPlugin() {}
- QByteArray defaultDevice(QAudio::Mode mode) const;
- QList<QByteArray> availableDevices(QAudio::Mode mode) const;
- QAbstractAudioInput *createInput(const QByteArray &device);
- QAbstractAudioOutput *createOutput(const QByteArray &device);
- QAbstractAudioDeviceInfo *createDeviceInfo(const QByteArray &device, QAudio::Mode mode);
+ QByteArray defaultDevice(QAudio::Mode mode) const override;
+ QList<QByteArray> availableDevices(QAudio::Mode mode) const override;
+ QAbstractAudioInput *createInput(const QByteArray &device) override;
+ QAbstractAudioOutput *createOutput(const QByteArray &device) override;
+ QAbstractAudioDeviceInfo *createDeviceInfo(const QByteArray &device, QAudio::Mode mode) override;
private:
QPulseAudioEngine *m_pulseEngine;
diff --git a/src/plugins/qnx-audio/audio/audio.pro b/src/plugins/qnx-audio/audio/audio.pro
index bd69dfe1e..d63e58676 100644
--- a/src/plugins/qnx-audio/audio/audio.pro
+++ b/src/plugins/qnx-audio/audio/audio.pro
@@ -19,4 +19,5 @@ SOURCES += qnxaudioplugin.cpp \
OTHER_FILES += qnx_audio.json
PLUGIN_TYPE = audio
+PLUGIN_CLASS_NAME = QnxAudioPlugin
load(qt_plugin)
diff --git a/src/plugins/qnx-audio/audio/qnxaudioinput.cpp b/src/plugins/qnx-audio/audio/qnxaudioinput.cpp
index 70b83390c..9fca8a7e1 100644
--- a/src/plugins/qnx-audio/audio/qnxaudioinput.cpp
+++ b/src/plugins/qnx-audio/audio/qnxaudioinput.cpp
@@ -331,7 +331,7 @@ bool QnxAudioInput::open()
m_pcmNotifier = new QSocketNotifier(snd_pcm_file_descriptor(m_pcmHandle, SND_PCM_CHANNEL_CAPTURE),
QSocketNotifier::Read, this);
- connect(m_pcmNotifier, SIGNAL(activated(int)), SLOT(userFeed()));
+ connect(m_pcmNotifier, SIGNAL(activated(QSocketDescriptor)), SLOT(userFeed()));
return true;
}
diff --git a/src/plugins/qnx/common/windowgrabber.cpp b/src/plugins/qnx/common/windowgrabber.cpp
index 9a65ad3a0..b2470c250 100644
--- a/src/plugins/qnx/common/windowgrabber.cpp
+++ b/src/plugins/qnx/common/windowgrabber.cpp
@@ -393,8 +393,12 @@ GLuint
WindowGrabberImage::getTexture(screen_window_t window, const QSize &size)
{
if (size != m_size) {
- if (!m_glTexture)
- glGenTextures(1, &m_glTexture);
+ // create a brand new texture to be the KHR image sibling, as
+ // previously used textures cannot be reused with new KHR image
+ // sources - note that glDeleteTextures handles nullptr gracefully
+ glDeleteTextures(1, &m_glTexture);
+ glGenTextures(1, &m_glTexture);
+
glBindTexture(GL_TEXTURE_2D, m_glTexture);
if (m_eglImage) {
glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, 0);
diff --git a/src/plugins/videonode/imx6/imx6.pro b/src/plugins/videonode/imx6/imx6.pro
index 43e17e725..0e9ed8b73 100644
--- a/src/plugins/videonode/imx6/imx6.pro
+++ b/src/plugins/videonode/imx6/imx6.pro
@@ -1,12 +1,8 @@
TARGET = imx6vivantevideonode
-QT += multimedia-private qtmultimediaquicktools-private
+QT += multimedia-private qtmultimediaquicktools-private multimediagsttools-private
-qtConfig(gstreamer_imxcommon) {
- QT += multimediagsttools-private
- QMAKE_USE += gstreamer_imxcommon
- DEFINES += GST_USE_UNSTABLE_API
-}
+QMAKE_USE += gstreamer
HEADERS += \
qsgvivantevideonode.h \
diff --git a/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp b/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp
index e1468fe34..c44a03896 100644
--- a/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp
+++ b/src/plugins/videonode/imx6/qsgvivantevideomaterial.cpp
@@ -50,9 +50,9 @@
#include <unistd.h>
#include <QtMultimedia/private/qtmultimediaglobal_p.h>
-#if QT_CONFIG(gstreamer_imxcommon)
#include "private/qgstvideobuffer_p.h"
-#include <gst/allocators/imx/phys_mem_meta.h>
+#if GST_CHECK_VERSION(1,14,0)
+#include <gst/allocators/gstphysmemory.h>
#endif
//#define QT_VIVANTE_VIDEO_DEBUG
@@ -107,7 +107,7 @@ void QSGVivanteVideoMaterial::updateBlending() {
void QSGVivanteVideoMaterial::setCurrentFrame(const QVideoFrame &frame, QSGVideoNode::FrameFlags flags)
{
QMutexLocker lock(&mFrameMutex);
- mNextFrame = frame;
+ mCurrentFrame = frame;
mMappable = mMapError == GL_NO_ERROR && !flags.testFlag(QSGVideoNode::FrameFiltered);
#ifdef QT_VIVANTE_VIDEO_DEBUG
@@ -124,12 +124,8 @@ void QSGVivanteVideoMaterial::bind()
}
QMutexLocker lock(&mFrameMutex);
- if (mNextFrame.isValid()) {
- mCurrentFrame.unmap();
-
- mCurrentFrame = mNextFrame;
- mCurrentTexture = vivanteMapping(mNextFrame);
- }
+ if (mCurrentFrame.isValid())
+ mCurrentTexture = vivanteMapping(mCurrentFrame);
else
glBindTexture(GL_TEXTURE_2D, mCurrentTexture);
}
@@ -229,11 +225,12 @@ GLuint QSGVivanteVideoMaterial::vivanteMapping(QVideoFrame vF)
#endif
GLuint physical = ~0U;
-#if QT_CONFIG(gstreamer_imxcommon)
+#if GST_CHECK_VERSION(1,14,0)
auto buffer = reinterpret_cast<QGstVideoBuffer *>(vF.buffer());
- GstImxPhysMemMeta *meta = GST_IMX_PHYS_MEM_META_GET(buffer->buffer());
- if (meta && meta->phys_addr)
- physical = meta->phys_addr;
+ auto mem = gst_buffer_peek_memory(buffer->buffer(), 0);
+ auto phys_addr = gst_is_phys_memory(mem) ? gst_phys_memory_get_phys_addr(mem) : 0;
+ if (phys_addr)
+ physical = phys_addr;
#endif
glBindTexture(GL_TEXTURE_2D, tmpTexId);
glTexDirectVIVMap_LOCAL(GL_TEXTURE_2D,
diff --git a/src/plugins/videonode/imx6/qsgvivantevideomaterial.h b/src/plugins/videonode/imx6/qsgvivantevideomaterial.h
index adbd960a4..db59e8fc7 100644
--- a/src/plugins/videonode/imx6/qsgvivantevideomaterial.h
+++ b/src/plugins/videonode/imx6/qsgvivantevideomaterial.h
@@ -78,7 +78,7 @@ private:
QVideoFrame::PixelFormat mFormat;
QMap<const uchar*, GLuint> mBitsToTextureMap;
- QVideoFrame mCurrentFrame, mNextFrame;
+ QVideoFrame mCurrentFrame;
GLuint mCurrentTexture;
bool mMappable;
GLenum mMapError = GL_NO_ERROR;
diff --git a/src/plugins/videonode/videonode.pro b/src/plugins/videonode/videonode.pro
index ab7295406..889862c36 100644
--- a/src/plugins/videonode/videonode.pro
+++ b/src/plugins/videonode/videonode.pro
@@ -1,7 +1,7 @@
TEMPLATE = subdirs
QT_FOR_CONFIG += gui-private multimedia-private
-qtConfig(gpu_vivante) {
+qtConfig(gpu_vivante):qtConfig(gstreamer) {
SUBDIRS += imx6
}
diff --git a/src/plugins/windowsaudio/qwindowsaudiooutput.cpp b/src/plugins/windowsaudio/qwindowsaudiooutput.cpp
index 1182647fc..6ccffc8b2 100644
--- a/src/plugins/windowsaudio/qwindowsaudiooutput.cpp
+++ b/src/plugins/windowsaudio/qwindowsaudiooutput.cpp
@@ -630,7 +630,7 @@ qreal QWindowsAudioOutput::volume() const
void QWindowsAudioOutput::reset()
{
- close();
+ stop();
}
OutputPrivate::OutputPrivate(QWindowsAudioOutput* audio)
diff --git a/src/plugins/winrt/qwinrtcameracontrol.cpp b/src/plugins/winrt/qwinrtcameracontrol.cpp
index ede3f6b04..98dd7c2f7 100644
--- a/src/plugins/winrt/qwinrtcameracontrol.cpp
+++ b/src/plugins/winrt/qwinrtcameracontrol.cpp
@@ -54,7 +54,7 @@
#include <functional>
#include <mfapi.h>
-#include <Mferror.h>
+#include <mferror.h>
#include <mfidl.h>
#include <wrl.h>
#include <windows.devices.enumeration.h>
diff --git a/src/plugins/wmf/player/mfplayersession.cpp b/src/plugins/wmf/player/mfplayersession.cpp
index 10ba25998..daf6f801f 100644
--- a/src/plugins/wmf/player/mfplayersession.cpp
+++ b/src/plugins/wmf/player/mfplayersession.cpp
@@ -47,6 +47,7 @@
#include <QtCore/qdebug.h>
#include <QtCore/qfile.h>
#include <QtCore/qbuffer.h>
+#include <QtMultimedia/qmediametadata.h>
#include "mfplayercontrol.h"
#include "mfevrvideowindowcontrol.h"
@@ -56,7 +57,7 @@
#include "mfplayersession.h"
#include "mfplayerservice.h"
#include "mfmetadatacontrol.h"
-#include <Mferror.h>
+#include <mferror.h>
#include <nserror.h>
#include "sourceresolver.h"
#include "samplegrabber.h"
@@ -277,10 +278,13 @@ MFPlayerSession::MediaType MFPlayerSession::getStreamType(IMFStreamDescriptor *s
if (!stream)
return Unknown;
- IMFMediaTypeHandler *typeHandler = NULL;
- if (SUCCEEDED(stream->GetMediaTypeHandler(&typeHandler))) {
+ struct SafeRelease {
+ IMFMediaTypeHandler *ptr = nullptr;
+ ~SafeRelease() { if (ptr) ptr->Release(); }
+ } typeHandler;
+ if (SUCCEEDED(stream->GetMediaTypeHandler(&typeHandler.ptr))) {
GUID guidMajorType;
- if (SUCCEEDED(typeHandler->GetMajorType(&guidMajorType))) {
+ if (SUCCEEDED(typeHandler.ptr->GetMajorType(&guidMajorType))) {
if (guidMajorType == MFMediaType_Audio)
return Audio;
else if (guidMajorType == MFMediaType_Video)
@@ -425,9 +429,15 @@ IMFTopologyNode* MFPlayerSession::addOutputNode(MediaType mediaType, IMFTopology
if (mediaType == Audio) {
activate = m_playerService->audioEndpointControl()->createActivate();
} else if (mediaType == Video) {
+
+ QSize resolution = m_playerService->metaDataControl()->metaData(QMediaMetaData::Resolution).toSize();
+ QRect cropRect = QRect(QPoint(), resolution);
+
if (m_playerService->videoRendererControl()) {
+ m_playerService->videoRendererControl()->setCropRect(cropRect);
activate = m_playerService->videoRendererControl()->createActivate();
} else if (m_playerService->videoWindowControl()) {
+ m_playerService->videoWindowControl()->setCropRect(cropRect);
activate = m_playerService->videoWindowControl()->createActivate();
} else {
qWarning() << "no videoWindowControl or videoRendererControl, unable to add output node for video data";
diff --git a/src/plugins/wmf/player/mftvideo.cpp b/src/plugins/wmf/player/mftvideo.cpp
index 879911d55..9dce654f2 100644
--- a/src/plugins/wmf/player/mftvideo.cpp
+++ b/src/plugins/wmf/player/mftvideo.cpp
@@ -40,7 +40,7 @@
#include "mftvideo.h"
#include "mfvideoprobecontrol.h"
#include <private/qmemoryvideobuffer_p.h>
-#include <Mferror.h>
+#include <mferror.h>
#include <strmif.h>
#include <uuids.h>
#include <InitGuid.h>
diff --git a/src/plugins/wmf/player/mfvideorenderercontrol.cpp b/src/plugins/wmf/player/mfvideorenderercontrol.cpp
index 94d5f68be..38455090b 100644
--- a/src/plugins/wmf/player/mfvideorenderercontrol.cpp
+++ b/src/plugins/wmf/player/mfvideorenderercontrol.cpp
@@ -2239,10 +2239,12 @@ public:
STDMETHODIMP DetachObject();
void setSurface(QAbstractVideoSurface *surface);
+ void setCropRect(QRect cropRect);
private:
EVRCustomPresenter *m_presenter;
QAbstractVideoSurface *m_surface;
+ QRect m_cropRect;
QMutex m_mutex;
};
@@ -2305,6 +2307,14 @@ void MFVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
static_cast<VideoRendererActivate*>(m_currentActivate)->setSurface(m_surface);
}
+void MFVideoRendererControl::setCropRect(QRect cropRect)
+{
+ m_cropRect = cropRect;
+
+ if (m_presenterActivate)
+ m_presenterActivate->setCropRect(cropRect);
+}
+
void MFVideoRendererControl::customEvent(QEvent *event)
{
if (m_presenterActivate)
@@ -2365,6 +2375,7 @@ IMFActivate* MFVideoRendererControl::createActivate()
if (SUCCEEDED(MFCreateVideoRendererActivate(::GetShellWindow(), &m_currentActivate))) {
m_presenterActivate = new EVRCustomPresenterActivate;
m_currentActivate->SetUnknown(MF_ACTIVATE_CUSTOM_VIDEO_PRESENTER_ACTIVATE, m_presenterActivate);
+ m_presenterActivate->setCropRect(m_cropRect);
} else {
m_currentActivate = new VideoRendererActivate(this);
}
@@ -2388,6 +2399,7 @@ HRESULT EVRCustomPresenterActivate::ActivateObject(REFIID riid, void **ppv)
QMutexLocker locker(&m_mutex);
if (!m_presenter) {
m_presenter = new EVRCustomPresenter;
+ m_presenter->setCropRect(m_cropRect);
if (m_surface)
m_presenter->setSurface(m_surface);
}
@@ -2423,5 +2435,17 @@ void EVRCustomPresenterActivate::setSurface(QAbstractVideoSurface *surface)
m_presenter->setSurface(surface);
}
+void EVRCustomPresenterActivate::setCropRect(QRect cropRect)
+{
+ QMutexLocker locker(&m_mutex);
+ if (m_cropRect == cropRect)
+ return;
+
+ m_cropRect = cropRect;
+
+ if (m_presenter)
+ m_presenter->setCropRect(cropRect);
+}
+
#include "moc_mfvideorenderercontrol.cpp"
#include "mfvideorenderercontrol.moc"
diff --git a/src/plugins/wmf/player/mfvideorenderercontrol.h b/src/plugins/wmf/player/mfvideorenderercontrol.h
index da9e97ba9..8eab19b40 100644
--- a/src/plugins/wmf/player/mfvideorenderercontrol.h
+++ b/src/plugins/wmf/player/mfvideorenderercontrol.h
@@ -43,6 +43,7 @@
#include "qvideorenderercontrol.h"
#include <mfapi.h>
#include <mfidl.h>
+#include <qrect.h>
QT_USE_NAMESPACE
@@ -61,6 +62,8 @@ public:
IMFActivate* createActivate();
void releaseActivate();
+ void setCropRect(QRect cropRect);
+
protected:
void customEvent(QEvent *event);
@@ -74,8 +77,8 @@ private:
QAbstractVideoSurface *m_surface;
IMFActivate *m_currentActivate;
IMFSampleGrabberSinkCallback *m_callback;
-
EVRCustomPresenterActivate *m_presenterActivate;
+ QRect m_cropRect;
};
#endif
diff --git a/src/plugins/wmf/sourceresolver.cpp b/src/plugins/wmf/sourceresolver.cpp
index c6f4e8566..15ef6f0ab 100644
--- a/src/plugins/wmf/sourceresolver.cpp
+++ b/src/plugins/wmf/sourceresolver.cpp
@@ -39,7 +39,7 @@
#include "mfstream.h"
#include "sourceresolver.h"
-#include <Mferror.h>
+#include <mferror.h>
#include <nserror.h>
#include <QtCore/qfile.h>
#include <QtCore/qdebug.h>
diff --git a/src/qtmultimediaquicktools/qdeclarativevideooutput.cpp b/src/qtmultimediaquicktools/qdeclarativevideooutput.cpp
index 81dc3fcb3..a948a5218 100644
--- a/src/qtmultimediaquicktools/qdeclarativevideooutput.cpp
+++ b/src/qtmultimediaquicktools/qdeclarativevideooutput.cpp
@@ -156,7 +156,8 @@ QDeclarativeVideoOutput::~QDeclarativeVideoOutput()
to render the video frames to this VideoOutput element.
It is similar to setting a QObject with \c videoSurface property as a source,
where this video surface will be set.
- \sa setSource
+
+ \sa source
*/
QAbstractVideoSurface *QDeclarativeVideoOutput::videoSurface() const
@@ -373,6 +374,8 @@ void QDeclarativeVideoOutput::_q_updateNativeSize()
if (!m_backend)
return;
+ m_geometryDirty = true;
+
QSize size = m_backend->nativeSize();
if (!qIsDefaultAspect(m_orientation)) {
size.transpose();
@@ -381,8 +384,6 @@ void QDeclarativeVideoOutput::_q_updateNativeSize()
if (m_nativeSize != size) {
m_nativeSize = size;
- m_geometryDirty = true;
-
setImplicitWidth(size.width());
setImplicitHeight(size.height());
@@ -420,9 +421,8 @@ void QDeclarativeVideoOutput::_q_updateGeometry()
}
if (m_backend) {
- if (!m_backend->videoSurface() || m_backend->videoSurface()->isActive())
- m_backend->updateGeometry();
- else
+ m_backend->updateGeometry();
+ if (m_backend->videoSurface() && !m_backend->videoSurface()->isActive())
m_geometryDirty = true;
}
diff --git a/src/qtmultimediaquicktools/qsgvideonode_texture.cpp b/src/qtmultimediaquicktools/qsgvideonode_texture.cpp
index 473a4144f..bac143b43 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_texture.cpp
+++ b/src/qtmultimediaquicktools/qsgvideonode_texture.cpp
@@ -53,6 +53,13 @@ QList<QVideoFrame::PixelFormat> QSGVideoNodeFactory_Texture::supportedPixelForma
{
QList<QVideoFrame::PixelFormat> pixelFormats;
+#ifdef Q_OS_MACOS
+ if (handleType == QAbstractVideoBuffer::GLTextureRectangleHandle) {
+ pixelFormats.append(QVideoFrame::Format_BGR32);
+ pixelFormats.append(QVideoFrame::Format_BGRA32);
+ }
+#endif
+
if (handleType == QAbstractVideoBuffer::GLTextureHandle) {
pixelFormats.append(QVideoFrame::Format_RGB565);
pixelFormats.append(QVideoFrame::Format_RGB32);
@@ -82,8 +89,6 @@ public:
QSGVideoMaterialShader_Texture()
: QSGMaterialShader()
{
- setShaderSourceFile(QOpenGLShader::Vertex, QStringLiteral(":/qtmultimediaquicktools/shaders/monoplanarvideo.vert"));
- setShaderSourceFile(QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/rgbvideo.frag"));
}
void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial) override;
@@ -109,10 +114,20 @@ protected:
int m_id_opacity;
};
-class QSGVideoMaterialShader_Texture_swizzle : public QSGVideoMaterialShader_Texture
+class QSGVideoMaterialShader_Texture_2D : public QSGVideoMaterialShader_Texture
{
public:
- QSGVideoMaterialShader_Texture_swizzle(bool hasAlpha)
+ QSGVideoMaterialShader_Texture_2D()
+ {
+ setShaderSourceFile(QOpenGLShader::Vertex, QStringLiteral(":/qtmultimediaquicktools/shaders/monoplanarvideo.vert"));
+ setShaderSourceFile(QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/rgbvideo.frag"));
+ }
+};
+
+class QSGVideoMaterialShader_Texture_2D_swizzle : public QSGVideoMaterialShader_Texture_2D
+{
+public:
+ QSGVideoMaterialShader_Texture_2D_swizzle(bool hasAlpha)
: m_hasAlpha(hasAlpha)
{
setShaderSourceFile(QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/rgbvideo_swizzle.frag"));
@@ -120,14 +135,13 @@ public:
protected:
void initialize() override {
- QSGVideoMaterialShader_Texture::initialize();
+ QSGVideoMaterialShader_Texture_2D::initialize();
program()->setUniformValue(program()->uniformLocation("hasAlpha"), GLboolean(m_hasAlpha));
}
int m_hasAlpha;
};
-
class QSGVideoMaterial_Texture : public QSGMaterial
{
public:
@@ -149,12 +163,6 @@ public:
return needsSwizzling() ? &swizzleType : &normalType;
}
- QSGMaterialShader *createShader() const override {
- const bool hasAlpha = m_format.pixelFormat() == QVideoFrame::Format_ARGB32;
- return needsSwizzling() ? new QSGVideoMaterialShader_Texture_swizzle(hasAlpha)
- : new QSGVideoMaterialShader_Texture;
- }
-
int compare(const QSGMaterial *other) const override {
const QSGVideoMaterial_Texture *m = static_cast<const QSGVideoMaterial_Texture *>(other);
@@ -179,9 +187,42 @@ public:
void setVideoFrame(const QVideoFrame &frame) {
QMutexLocker lock(&m_frameMutex);
m_frame = frame;
+ m_textureSize = frame.size();
+ }
+
+ virtual void bind() = 0;
+
+ QVideoFrame m_frame;
+ QMutex m_frameMutex;
+ QSize m_textureSize;
+ QVideoSurfaceFormat m_format;
+ GLuint m_textureId;
+ qreal m_opacity;
+
+protected:
+ bool needsSwizzling() const {
+ return !QMediaOpenGLHelper::isANGLE()
+ && (m_format.pixelFormat() == QVideoFrame::Format_RGB32
+ || m_format.pixelFormat() == QVideoFrame::Format_ARGB32);
+ }
+};
+
+class QSGVideoMaterial_Texture_2D : public QSGVideoMaterial_Texture
+{
+public:
+ QSGVideoMaterial_Texture_2D(const QVideoSurfaceFormat &format) :
+ QSGVideoMaterial_Texture(format)
+ {
}
- void bind()
+ QSGMaterialShader *createShader() const override
+ {
+ const bool hasAlpha = m_format.pixelFormat() == QVideoFrame::Format_ARGB32;
+ return needsSwizzling() ? new QSGVideoMaterialShader_Texture_2D_swizzle(hasAlpha)
+ : new QSGVideoMaterialShader_Texture_2D;
+ }
+
+ void bind() override
{
QMutexLocker lock(&m_frameMutex);
if (m_frame.isValid()) {
@@ -197,28 +238,84 @@ public:
m_textureId = 0;
}
}
+};
+
+#ifdef Q_OS_MACOS
+class QSGVideoMaterialShader_Texture_Rectangle : public QSGVideoMaterialShader_Texture
+{
+public:
+ QSGVideoMaterialShader_Texture_Rectangle()
+ {
+ setShaderSourceFile(QOpenGLShader::Vertex, QStringLiteral(":/qtmultimediaquicktools/shaders/rectsampler.vert"));
+ setShaderSourceFile(QOpenGLShader::Fragment, QStringLiteral(":/qtmultimediaquicktools/shaders/rectsampler_rgb.frag"));
+ }
- QVideoFrame m_frame;
- QMutex m_frameMutex;
- QSize m_textureSize;
- QVideoSurfaceFormat m_format;
- GLuint m_textureId;
- qreal m_opacity;
+ void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial) override
+ {
+ QSGVideoMaterial_Texture *mat = static_cast<QSGVideoMaterial_Texture *>(newMaterial);
+ QVector2D size(mat->m_textureSize.width(), mat->m_textureSize.height());
+ program()->setUniformValue(m_id_videoSize, size);
-private:
- bool needsSwizzling() const {
- return !QMediaOpenGLHelper::isANGLE()
- && (m_format.pixelFormat() == QVideoFrame::Format_RGB32
- || m_format.pixelFormat() == QVideoFrame::Format_ARGB32);
+ QSGVideoMaterialShader_Texture::updateState(state, newMaterial, oldMaterial);
}
+
+protected:
+ void initialize() override
+ {
+ QSGVideoMaterialShader_Texture::initialize();
+ m_id_videoSize = program()->uniformLocation("qt_videoSize");
+ }
+
+ int m_id_videoSize;
};
+class QSGVideoMaterial_Texture_Rectangle : public QSGVideoMaterial_Texture
+{
+public:
+ QSGVideoMaterial_Texture_Rectangle(const QVideoSurfaceFormat &format) :
+ QSGVideoMaterial_Texture(format)
+ {
+ }
+
+ QSGMaterialShader *createShader() const override
+ {
+ Q_ASSERT(!needsSwizzling());
+ return new QSGVideoMaterialShader_Texture_Rectangle;
+ }
+
+ void bind() override
+ {
+ QMutexLocker lock(&m_frameMutex);
+ if (m_frame.isValid()) {
+ m_textureId = m_frame.handle().toUInt();
+ QOpenGLFunctions *functions = QOpenGLContext::currentContext()->functions();
+ functions->glActiveTexture(GL_TEXTURE0);
+ functions->glBindTexture(GL_TEXTURE_RECTANGLE, m_textureId);
+
+ functions->glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ functions->glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ functions->glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ functions->glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ } else {
+ m_textureId = 0;
+ }
+ }
+};
+#endif
QSGVideoNode_Texture::QSGVideoNode_Texture(const QVideoSurfaceFormat &format) :
m_format(format)
{
setFlag(QSGNode::OwnsMaterial);
- m_material = new QSGVideoMaterial_Texture(format);
+
+#ifdef Q_OS_MACOS
+ if (format.handleType() == QAbstractVideoBuffer::GLTextureRectangleHandle)
+ m_material = new QSGVideoMaterial_Texture_Rectangle(format);
+#endif
+
+ if (!m_material)
+ m_material = new QSGVideoMaterial_Texture_2D(format);
+
setMaterial(m_material);
}
diff --git a/src/qtmultimediaquicktools/qsgvideonode_texture_p.h b/src/qtmultimediaquicktools/qsgvideonode_texture_p.h
index 12685dd24..21c905bd5 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_texture_p.h
+++ b/src/qtmultimediaquicktools/qsgvideonode_texture_p.h
@@ -74,7 +74,7 @@ public:
private:
QVideoSurfaceFormat m_format;
- QSGVideoMaterial_Texture *m_material;
+ QSGVideoMaterial_Texture *m_material = nullptr;
QVideoFrame m_frame;
};
diff --git a/src/qtmultimediaquicktools/qtmultimediaquicktools.qrc b/src/qtmultimediaquicktools/qtmultimediaquicktools.qrc
index b8180e31f..86523e771 100644
--- a/src/qtmultimediaquicktools/qtmultimediaquicktools.qrc
+++ b/src/qtmultimediaquicktools/qtmultimediaquicktools.qrc
@@ -11,7 +11,6 @@
<file>shaders/triplanaryuvvideo.vert</file>
<file>shaders/uyvyvideo.frag</file>
<file>shaders/yuyvvideo.frag</file>
-
<file>shaders/monoplanarvideo_core.vert</file>
<file>shaders/rgbvideo_core.frag</file>
<file>shaders/rgbvideo_swizzle_core.frag</file>
@@ -23,5 +22,9 @@
<file>shaders/triplanaryuvvideo_core.vert</file>
<file>shaders/uyvyvideo_core.frag</file>
<file>shaders/yuyvvideo_core.frag</file>
+ <file>shaders/rectsampler.vert</file>
+ <file>shaders/rectsampler_rgb.frag</file>
+ <file>shaders/rectsampler_core.vert</file>
+ <file>shaders/rectsampler_rgb_core.frag</file>
</qresource>
</RCC>
diff --git a/src/qtmultimediaquicktools/shaders/rectsampler.vert b/src/qtmultimediaquicktools/shaders/rectsampler.vert
new file mode 100644
index 000000000..762ec7e7e
--- /dev/null
+++ b/src/qtmultimediaquicktools/shaders/rectsampler.vert
@@ -0,0 +1,10 @@
+uniform highp mat4 qt_Matrix;
+uniform highp vec2 qt_videoSize;
+attribute highp vec4 qt_VertexPosition;
+attribute highp vec2 qt_VertexTexCoord;
+varying highp vec2 qt_TexCoord;
+
+void main() {
+ qt_TexCoord = vec2(qt_VertexTexCoord.x * qt_videoSize.x, qt_VertexTexCoord.y * qt_videoSize.y);
+ gl_Position = qt_Matrix * qt_VertexPosition;
+}
diff --git a/src/qtmultimediaquicktools/shaders/rectsampler_core.vert b/src/qtmultimediaquicktools/shaders/rectsampler_core.vert
new file mode 100644
index 000000000..f0fe02349
--- /dev/null
+++ b/src/qtmultimediaquicktools/shaders/rectsampler_core.vert
@@ -0,0 +1,11 @@
+#version 150 core
+uniform highp mat4 qt_Matrix;
+uniform highp vec2 qt_videoSize;
+in highp vec4 qt_VertexPosition;
+in highp vec2 qt_VertexTexCoord;
+out highp vec2 qt_TexCoord;
+
+void main() {
+ qt_TexCoord = vec2(qt_VertexTexCoord.x * qt_videoSize.x, qt_VertexTexCoord.y * qt_videoSize.y);
+ gl_Position = qt_Matrix * qt_VertexPosition;
+}
diff --git a/src/qtmultimediaquicktools/shaders/rectsampler_rgb.frag b/src/qtmultimediaquicktools/shaders/rectsampler_rgb.frag
new file mode 100644
index 000000000..2a30f7c3d
--- /dev/null
+++ b/src/qtmultimediaquicktools/shaders/rectsampler_rgb.frag
@@ -0,0 +1,8 @@
+uniform sampler2DRect rgbTexture;
+uniform lowp float opacity;
+varying highp vec2 qt_TexCoord;
+
+void main()
+{
+ gl_FragColor = texture2DRect(rgbTexture, qt_TexCoord) * opacity;
+}
diff --git a/src/qtmultimediaquicktools/shaders/rectsampler_rgb_core.frag b/src/qtmultimediaquicktools/shaders/rectsampler_rgb_core.frag
new file mode 100644
index 000000000..17f306456
--- /dev/null
+++ b/src/qtmultimediaquicktools/shaders/rectsampler_rgb_core.frag
@@ -0,0 +1,10 @@
+#version 150 core
+uniform sampler2DRect rgbTexture;
+uniform lowp float opacity;
+in highp vec2 qt_TexCoord;
+out vec4 fragColor;
+
+void main()
+{
+ fragColor = texture(rgbTexture, qt_TexCoord) * opacity;
+}