summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorLiang Qi <liang.qi@qt.io>2018-01-18 18:01:58 +0100
committerLiang Qi <liang.qi@qt.io>2018-01-18 18:01:58 +0100
commit2e9d1fa5ef1367e5d2ace88bc47f93b8fe8a039b (patch)
treee34abd8ef99ae1abb2730a3772debe3fc9b26144 /src
parentb8609cd3ffeddd02a514c6b5f9170c8017966e41 (diff)
parent7638848d2486e2fac9764b60a99de08f50adc8eb (diff)
Merge remote-tracking branch 'origin/5.9' into 5.10
Diffstat (limited to 'src')
-rw-r--r--src/multimedia/doc/src/qtmultimedia-index.qdoc9
-rw-r--r--src/multimediawidgets/doc/src/qtmultimediawidgets-index.qdoc9
-rw-r--r--src/plugins/avfoundation/camera/avfcamerautility.mm2
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfmediaplayersession.h3
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm41
-rw-r--r--src/plugins/directshow/player/directshowplayerservice.cpp31
-rw-r--r--src/plugins/directshow/player/directshowplayerservice.h4
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_yuv.cpp32
-rw-r--r--src/qtmultimediaquicktools/shaders/uyvyvideo.frag18
-rw-r--r--src/qtmultimediaquicktools/shaders/yuyvvideo.frag18
10 files changed, 86 insertions, 81 deletions
diff --git a/src/multimedia/doc/src/qtmultimedia-index.qdoc b/src/multimedia/doc/src/qtmultimedia-index.qdoc
index 269e3d24a..5e4ba3c58 100644
--- a/src/multimedia/doc/src/qtmultimedia-index.qdoc
+++ b/src/multimedia/doc/src/qtmultimedia-index.qdoc
@@ -139,6 +139,15 @@
\li Base class for video presentation.
\endtable
+ \section1 Licenses and Attributions
+
+ The Qt Quick Multimedia module is available under commercial licenses from \l{The Qt Company}.
+ In addition, it is available under free software licenses. Since Qt 5.4,
+ these free software licenses are
+ \l{GNU Lesser General Public License, version 3}, or
+ the \l{GNU General Public License, version 2}.
+ See \l{Qt Licensing} for further details.
+
\section1 Related Information
\section2 Guides
diff --git a/src/multimediawidgets/doc/src/qtmultimediawidgets-index.qdoc b/src/multimediawidgets/doc/src/qtmultimediawidgets-index.qdoc
index 0c2271ef0..fc5bd6df6 100644
--- a/src/multimediawidgets/doc/src/qtmultimediawidgets-index.qdoc
+++ b/src/multimediawidgets/doc/src/qtmultimediawidgets-index.qdoc
@@ -48,6 +48,15 @@ file:
QT += multimediawidgets
\endcode
+\section1 Licenses and Attributions
+
+The Qt Quick Multimedia Widgets module is available under commercial licenses from \l{The Qt Company}.
+In addition, it is available under free software licenses. Since Qt 5.4,
+these free software licenses are
+\l{GNU Lesser General Public License, version 3}, or
+the \l{GNU General Public License, version 2}.
+See \l{Qt Licensing} for further details.
+
\section1 Related Information
\section2 Reference
diff --git a/src/plugins/avfoundation/camera/avfcamerautility.mm b/src/plugins/avfoundation/camera/avfcamerautility.mm
index 22713d613..8a2254c2e 100644
--- a/src/plugins/avfoundation/camera/avfcamerautility.mm
+++ b/src/plugins/avfoundation/camera/avfcamerautility.mm
@@ -540,12 +540,14 @@ void qt_set_framerate_limits(AVCaptureDevice *captureDevice, qreal minFPS, qreal
void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection,
qreal minFPS, qreal maxFPS)
{
+ Q_UNUSED(videoConnection);
Q_ASSERT(captureDevice);
qt_set_framerate_limits(captureDevice, minFPS, maxFPS);
}
AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection)
{
+ Q_UNUSED(videoConnection);
Q_ASSERT(captureDevice);
AVFPSRange fps;
diff --git a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.h b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.h
index ef854e23f..9a55b7b74 100644
--- a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.h
+++ b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.h
@@ -105,6 +105,9 @@ public Q_SLOTS:
void processLoadStateChange();
void processLoadStateFailure();
+
+ void processDurationChange(qint64 duration);
+
Q_SIGNALS:
void positionChanged(qint64 position);
void durationChanged(qint64 duration);
diff --git a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
index f4eb8a4ea..b6fe01053 100644
--- a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
+++ b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
@@ -55,12 +55,14 @@ static NSString* const AVF_PLAYABLE_KEY = @"playable";
static NSString* const AVF_STATUS_KEY = @"status";
//AVPlayer keys
-static NSString* const AVF_RATE_KEY = @"rate";
-static NSString* const AVF_CURRENT_ITEM_KEY = @"currentItem";
+static NSString* const AVF_RATE_KEY = @"rate";
+static NSString* const AVF_CURRENT_ITEM_KEY = @"currentItem";
+static NSString* const AVF_CURRENT_ITEM_DURATION_KEY = @"currentItem.duration";
static void *AVFMediaPlayerSessionObserverRateObservationContext = &AVFMediaPlayerSessionObserverRateObservationContext;
static void *AVFMediaPlayerSessionObserverStatusObservationContext = &AVFMediaPlayerSessionObserverStatusObservationContext;
static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMediaPlayerSessionObserverCurrentItemObservationContext;
+static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext = &AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext;
@interface AVFMediaPlayerSessionObserver : NSObject
{
@@ -150,6 +152,7 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
}
if (m_player) {
[m_player setRate:0.0];
+ [m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_DURATION_KEY];
[m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_KEY];
[m_player removeObserver:self forKeyPath:AVF_RATE_KEY];
[m_player release];
@@ -261,6 +264,12 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
context:AVFMediaPlayerSessionObserverRateObservationContext];
+ //Observe the duration for getting the buffer state
+ [m_player addObserver:self
+ forKeyPath:AVF_CURRENT_ITEM_DURATION_KEY
+ options:0
+ context:AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext];
+
}
-(void) assetFailedToPrepareForPlayback:(NSError *)error
@@ -343,6 +352,13 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
if (m_playerItem != newPlayerItem)
m_playerItem = newPlayerItem;
}
+ else if (context == AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext)
+ {
+ const CMTime time = [m_playerItem duration];
+ const qint64 duration = static_cast<qint64>(float(time.value) / float(time.timescale) * 1000.0f);
+ if (self.session)
+ QMetaObject::invokeMethod(m_session, "processDurationChange", Qt::AutoConnection, Q_ARG(qint64, duration));
+ }
else
{
[super observeValueForKeyPath:path ofObject:object change:change context:context];
@@ -515,13 +531,7 @@ qint64 AVFMediaPlayerSession::duration() const
#ifdef QT_DEBUG_AVF
qDebug() << Q_FUNC_INFO;
#endif
- AVPlayerItem *playerItem = [(AVFMediaPlayerSessionObserver*)m_observer playerItem];
-
- if (!playerItem)
- return 0;
-
- CMTime time = [playerItem duration];
- return static_cast<quint64>(float(time.value) / float(time.timescale) * 1000.0f);
+ return m_duration;
}
int AVFMediaPlayerSession::bufferStatus() const
@@ -856,10 +866,6 @@ void AVFMediaPlayerSession::processLoadStateChange(QMediaPlayer::State newState)
}
}
- qint64 currentDuration = duration();
- if (m_duration != currentDuration)
- Q_EMIT durationChanged(m_duration = currentDuration);
-
if (m_requestedPosition != -1) {
setPosition(m_requestedPosition);
m_requestedPosition = -1;
@@ -892,6 +898,15 @@ void AVFMediaPlayerSession::processLoadStateFailure()
Q_EMIT stateChanged((m_state = QMediaPlayer::StoppedState));
}
+void AVFMediaPlayerSession::processDurationChange(qint64 duration)
+{
+ if (duration == m_duration)
+ return;
+
+ m_duration = duration;
+ Q_EMIT durationChanged(duration);
+}
+
void AVFMediaPlayerSession::processPositionChange()
{
if (m_state == QMediaPlayer::StoppedState)
diff --git a/src/plugins/directshow/player/directshowplayerservice.cpp b/src/plugins/directshow/player/directshowplayerservice.cpp
index 2218ca5ed..8ee5d67a1 100644
--- a/src/plugins/directshow/player/directshowplayerservice.cpp
+++ b/src/plugins/directshow/player/directshowplayerservice.cpp
@@ -318,15 +318,18 @@ void DirectShowPlayerService::load(const QMediaContent &media, QIODevice *stream
m_graphStatus = InvalidMedia;
m_error = QMediaPlayer::ResourceError;
} else {
+ // {36b73882-c2c8-11cf-8b46-00805f6cef60}
+ static const GUID iid_IFilterGraph2 = {
+ 0x36b73882, 0xc2c8, 0x11cf, {0x8b, 0x46, 0x00, 0x80, 0x5f, 0x6c, 0xef, 0x60} };
m_graphStatus = Loading;
+ m_graph = com_new<IFilterGraph2>(CLSID_FilterGraph, iid_IFilterGraph2);
+
if (stream)
m_pendingTasks = SetStreamSource;
else
m_pendingTasks = SetUrlSource;
- m_pendingTasks |= CreateGraph;
-
::SetEvent(m_taskHandle);
}
@@ -337,17 +340,6 @@ void DirectShowPlayerService::load(const QMediaContent &media, QIODevice *stream
updateStatus();
}
-void DirectShowPlayerService::doCreateGraph(QMutexLocker *locker)
-{
- Q_UNUSED(locker);
-
- // {36b73882-c2c8-11cf-8b46-00805f6cef60}
- static const GUID iid_IFilterGraph2 = {
- 0x36b73882, 0xc2c8, 0x11cf, {0x8b, 0x46, 0x00, 0x80, 0x5f, 0x6c, 0xef, 0x60} };
-
- m_graph = com_new<IFilterGraph2>(CLSID_FilterGraphNoThread, iid_IFilterGraph2);
-}
-
void DirectShowPlayerService::doSetUrlSource(QMutexLocker *locker)
{
IBaseFilter *source = 0;
@@ -1694,8 +1686,6 @@ void DirectShowPlayerService::run()
{
QMutexLocker locker(&m_mutex);
- CoInitialize(NULL);
-
for (;;) {
while (m_pendingTasks == 0) {
DWORD result = 0;
@@ -1710,17 +1700,12 @@ void DirectShowPlayerService::run()
}
locker.relock();
- if (m_graph && result == WAIT_OBJECT_0 + 1) {
+ if (result == WAIT_OBJECT_0 + 1) {
graphEvent(&locker);
}
}
- if (m_pendingTasks & CreateGraph) {
- m_pendingTasks ^= CreateGraph;
- m_executingTask = CreateGraph;
-
- doCreateGraph(&locker);
- } else if (m_pendingTasks & ReleaseGraph) {
+ if (m_pendingTasks & ReleaseGraph) {
m_pendingTasks ^= ReleaseGraph;
m_executingTask = ReleaseGraph;
@@ -1813,8 +1798,6 @@ void DirectShowPlayerService::run()
}
m_executingTask = 0;
}
-
- CoUninitialize();
}
QT_END_NAMESPACE
diff --git a/src/plugins/directshow/player/directshowplayerservice.h b/src/plugins/directshow/player/directshowplayerservice.h
index cc7b4dd3e..01d05449e 100644
--- a/src/plugins/directshow/player/directshowplayerservice.h
+++ b/src/plugins/directshow/player/directshowplayerservice.h
@@ -124,7 +124,6 @@ private:
void run();
- void doCreateGraph(QMutexLocker *locker);
void doSetUrlSource(QMutexLocker *locker);
void doSetStreamSource(QMutexLocker *locker);
void doRender(QMutexLocker *locker);
@@ -170,8 +169,7 @@ private:
ReleaseVideoProbe = 0x40000,
ReleaseFilters = ReleaseGraph | ReleaseAudioOutput
| ReleaseVideoOutput | ReleaseAudioProbe
- | ReleaseVideoProbe,
- CreateGraph = 0x80000
+ | ReleaseVideoProbe
};
enum Event
diff --git a/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp b/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp
index 38b5af943..b04c6b38b 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp
+++ b/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp
@@ -134,16 +134,16 @@ public:
protected:
void initialize() Q_DECL_OVERRIDE {
m_id_matrix = program()->uniformLocation("qt_Matrix");
- m_id_yuvtexture = program()->uniformLocation("yuvTexture");
- m_id_imageWidth = program()->uniformLocation("imageWidth");
+ m_id_yTexture = program()->uniformLocation("yTexture");
+ m_id_uvTexture = program()->uniformLocation("uvTexture");
m_id_colorMatrix = program()->uniformLocation("colorMatrix");
m_id_opacity = program()->uniformLocation("opacity");
QSGMaterialShader::initialize();
}
int m_id_matrix;
- int m_id_yuvtexture;
- int m_id_imageWidth;
+ int m_id_yTexture;
+ int m_id_uvTexture;
int m_id_colorMatrix;
int m_id_opacity;
};
@@ -291,7 +291,7 @@ QSGVideoMaterial_YUV::QSGVideoMaterial_YUV(const QVideoSurfaceFormat &format) :
case QVideoFrame::Format_UYVY:
case QVideoFrame::Format_YUYV:
default:
- m_planeCount = 1;
+ m_planeCount = 2;
break;
}
@@ -355,12 +355,19 @@ void QSGVideoMaterial_YUV::bind()
if (m_format.pixelFormat() == QVideoFrame::Format_UYVY
|| m_format.pixelFormat() == QVideoFrame::Format_YUYV) {
- int fw = m_frame.width() / 2;
+ int fw = m_frame.width();
m_planeWidth[0] = fw;
-
- functions->glActiveTexture(GL_TEXTURE0);
- bindTexture(m_textureIds[0], fw, m_frame.height(), m_frame.bits(), GL_RGBA);
-
+ // In YUYV texture the UV plane appears with the 1/2 of image and Y width.
+ m_planeWidth[1] = fw / 2;
+ functions->glActiveTexture(GL_TEXTURE1);
+ // Either r,b (YUYV) or g,a (UYVY) values are used as source of YV.
+ // Additionally Y and V are set per 2 pixels hence only 1/2 of image with is used.
+ // Interpreting this properly in shaders allows to not copy or not make conditionals inside shaders,
+ // only interpretation of data changes.
+ bindTexture(m_textureIds[1], m_planeWidth[1], m_frame.height(), m_frame.bits(), GL_RGBA);
+ functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
+ // Either red (YUYV) or alpha (UYVY) values are used as source of Y
+ bindTexture(m_textureIds[0], m_planeWidth[0], m_frame.height(), m_frame.bits(), GL_LUMINANCE_ALPHA);
} else if (m_format.pixelFormat() == QVideoFrame::Format_NV12
|| m_format.pixelFormat() == QVideoFrame::Format_NV21) {
const int y = 0;
@@ -473,13 +480,12 @@ void QSGVideoMaterialShader_UYVY::updateState(const RenderState &state,
Q_UNUSED(oldMaterial);
QSGVideoMaterial_YUV *mat = static_cast<QSGVideoMaterial_YUV *>(newMaterial);
-
- program()->setUniformValue(m_id_yuvtexture, 0);
+ program()->setUniformValue(m_id_yTexture, 0);
+ program()->setUniformValue(m_id_uvTexture, 1);
mat->bind();
program()->setUniformValue(m_id_colorMatrix, mat->m_colorMatrix);
- program()->setUniformValue(m_id_imageWidth, mat->m_frame.width());
if (state.isOpacityDirty()) {
mat->m_opacity = state.opacity();
diff --git a/src/qtmultimediaquicktools/shaders/uyvyvideo.frag b/src/qtmultimediaquicktools/shaders/uyvyvideo.frag
index 905035703..5c62441c2 100644
--- a/src/qtmultimediaquicktools/shaders/uyvyvideo.frag
+++ b/src/qtmultimediaquicktools/shaders/uyvyvideo.frag
@@ -1,22 +1,12 @@
-uniform sampler2D yuvTexture; // UYVY macropixel texture passed as RGBA format
-uniform mediump float imageWidth; // The UYVY texture appears to the shader with 1/2 the image width since we use the RGBA format to pass UYVY
+uniform sampler2D yTexture; // Y component passed as GL_LUMINANCE_ALPHA, in uyvy Y = a
+uniform sampler2D uvTexture; // UV component passed as RGBA macropixel, in uyvy U = r, V = b
uniform mediump mat4 colorMatrix;
uniform lowp float opacity;
-
varying highp vec2 qt_TexCoord;
void main()
{
- // For U0 Y0 V0 Y1 macropixel, lookup Y0 or Y1 based on whether
- // the original texture x coord is even or odd.
- mediump float Y;
- if (fract(floor(qt_TexCoord.x * imageWidth + 0.5) / 2.0) > 0.0)
- Y = texture2D(yuvTexture, qt_TexCoord).a; // odd so choose Y1
- else
- Y = texture2D(yuvTexture, qt_TexCoord).g; // even so choose Y0
- mediump float Cb = texture2D(yuvTexture, qt_TexCoord).r;
- mediump float Cr = texture2D(yuvTexture, qt_TexCoord).b;
+ mediump vec3 YUV = vec3(texture2D(yTexture, qt_TexCoord).a, texture2D(uvTexture, qt_TexCoord).rb);
- mediump vec4 color = vec4(Y, Cb, Cr, 1.0);
- gl_FragColor = colorMatrix * color * opacity;
+ gl_FragColor = colorMatrix * vec4(YUV, 1.0) * opacity;
}
diff --git a/src/qtmultimediaquicktools/shaders/yuyvvideo.frag b/src/qtmultimediaquicktools/shaders/yuyvvideo.frag
index 72732cd66..5ce8b7366 100644
--- a/src/qtmultimediaquicktools/shaders/yuyvvideo.frag
+++ b/src/qtmultimediaquicktools/shaders/yuyvvideo.frag
@@ -1,22 +1,12 @@
-uniform sampler2D yuvTexture; // YUYV macropixel texture passed as RGBA format
-uniform mediump float imageWidth; // The YUYV texture appears to the shader with 1/2 the image width since we use the RGBA format to pass YUYV
+uniform sampler2D yTexture; // Y component passed as GL_LUMINANCE_ALPHA, in yuyv Y = r
+uniform sampler2D uvTexture; // UV component passed as RGBA macropixel, in uyvy U = g, V = a
uniform mediump mat4 colorMatrix;
uniform lowp float opacity;
-
varying highp vec2 qt_TexCoord;
void main()
{
- // For Y0 U0 Y1 V0 macropixel, lookup Y0 or Y1 based on whether
- // the original texture x coord is even or odd.
- mediump float Y;
- if (fract(floor(qt_TexCoord.x * imageWidth + 0.5) / 2.0) > 0.0)
- Y = texture2D(yuvTexture, qt_TexCoord).b; // odd so choose Y1
- else
- Y = texture2D(yuvTexture, qt_TexCoord).r; // even so choose Y0
- mediump float Cb = texture2D(yuvTexture, qt_TexCoord).g;
- mediump float Cr = texture2D(yuvTexture, qt_TexCoord).a;
+ mediump vec3 YUV = vec3(texture2D(yTexture, qt_TexCoord).r, texture2D(uvTexture, qt_TexCoord).ga);
- mediump vec4 color = vec4(Y, Cb, Cr, 1.0);
- gl_FragColor = colorMatrix * color * opacity;
+ gl_FragColor = colorMatrix * vec4(YUV, 1.0) * opacity;
}