summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--src/plugins/avfoundation/camera/avfcamerautility.mm2
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfmediaplayersession.h3
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm41
-rw-r--r--src/qtmultimediaquicktools/qsgvideonode_yuv.cpp32
-rw-r--r--src/qtmultimediaquicktools/shaders/uyvyvideo.frag18
-rw-r--r--src/qtmultimediaquicktools/shaders/yuyvvideo.frag18
6 files changed, 60 insertions, 54 deletions
diff --git a/src/plugins/avfoundation/camera/avfcamerautility.mm b/src/plugins/avfoundation/camera/avfcamerautility.mm
index 872075e0f..b864162a3 100644
--- a/src/plugins/avfoundation/camera/avfcamerautility.mm
+++ b/src/plugins/avfoundation/camera/avfcamerautility.mm
@@ -543,12 +543,14 @@ void qt_set_framerate_limits(AVCaptureDevice *captureDevice, qreal minFPS, qreal
void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection,
qreal minFPS, qreal maxFPS)
{
+ Q_UNUSED(videoConnection);
Q_ASSERT(captureDevice);
qt_set_framerate_limits(captureDevice, minFPS, maxFPS);
}
AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection)
{
+ Q_UNUSED(videoConnection);
Q_ASSERT(captureDevice);
AVFPSRange fps;
diff --git a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.h b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.h
index ef854e23f..9a55b7b74 100644
--- a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.h
+++ b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.h
@@ -105,6 +105,9 @@ public Q_SLOTS:
void processLoadStateChange();
void processLoadStateFailure();
+
+ void processDurationChange(qint64 duration);
+
Q_SIGNALS:
void positionChanged(qint64 position);
void durationChanged(qint64 duration);
diff --git a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
index f4eb8a4ea..b6fe01053 100644
--- a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
+++ b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
@@ -55,12 +55,14 @@ static NSString* const AVF_PLAYABLE_KEY = @"playable";
static NSString* const AVF_STATUS_KEY = @"status";
//AVPlayer keys
-static NSString* const AVF_RATE_KEY = @"rate";
-static NSString* const AVF_CURRENT_ITEM_KEY = @"currentItem";
+static NSString* const AVF_RATE_KEY = @"rate";
+static NSString* const AVF_CURRENT_ITEM_KEY = @"currentItem";
+static NSString* const AVF_CURRENT_ITEM_DURATION_KEY = @"currentItem.duration";
static void *AVFMediaPlayerSessionObserverRateObservationContext = &AVFMediaPlayerSessionObserverRateObservationContext;
static void *AVFMediaPlayerSessionObserverStatusObservationContext = &AVFMediaPlayerSessionObserverStatusObservationContext;
static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMediaPlayerSessionObserverCurrentItemObservationContext;
+static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext = &AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext;
@interface AVFMediaPlayerSessionObserver : NSObject
{
@@ -150,6 +152,7 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
}
if (m_player) {
[m_player setRate:0.0];
+ [m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_DURATION_KEY];
[m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_KEY];
[m_player removeObserver:self forKeyPath:AVF_RATE_KEY];
[m_player release];
@@ -261,6 +264,12 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
context:AVFMediaPlayerSessionObserverRateObservationContext];
+ //Observe the duration for getting the buffer state
+ [m_player addObserver:self
+ forKeyPath:AVF_CURRENT_ITEM_DURATION_KEY
+ options:0
+ context:AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext];
+
}
-(void) assetFailedToPrepareForPlayback:(NSError *)error
@@ -343,6 +352,13 @@ static void *AVFMediaPlayerSessionObserverCurrentItemObservationContext = &AVFMe
if (m_playerItem != newPlayerItem)
m_playerItem = newPlayerItem;
}
+ else if (context == AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext)
+ {
+ const CMTime time = [m_playerItem duration];
+ const qint64 duration = static_cast<qint64>(float(time.value) / float(time.timescale) * 1000.0f);
+ if (self.session)
+ QMetaObject::invokeMethod(m_session, "processDurationChange", Qt::AutoConnection, Q_ARG(qint64, duration));
+ }
else
{
[super observeValueForKeyPath:path ofObject:object change:change context:context];
@@ -515,13 +531,7 @@ qint64 AVFMediaPlayerSession::duration() const
#ifdef QT_DEBUG_AVF
qDebug() << Q_FUNC_INFO;
#endif
- AVPlayerItem *playerItem = [(AVFMediaPlayerSessionObserver*)m_observer playerItem];
-
- if (!playerItem)
- return 0;
-
- CMTime time = [playerItem duration];
- return static_cast<quint64>(float(time.value) / float(time.timescale) * 1000.0f);
+ return m_duration;
}
int AVFMediaPlayerSession::bufferStatus() const
@@ -856,10 +866,6 @@ void AVFMediaPlayerSession::processLoadStateChange(QMediaPlayer::State newState)
}
}
- qint64 currentDuration = duration();
- if (m_duration != currentDuration)
- Q_EMIT durationChanged(m_duration = currentDuration);
-
if (m_requestedPosition != -1) {
setPosition(m_requestedPosition);
m_requestedPosition = -1;
@@ -892,6 +898,15 @@ void AVFMediaPlayerSession::processLoadStateFailure()
Q_EMIT stateChanged((m_state = QMediaPlayer::StoppedState));
}
+void AVFMediaPlayerSession::processDurationChange(qint64 duration)
+{
+ if (duration == m_duration)
+ return;
+
+ m_duration = duration;
+ Q_EMIT durationChanged(duration);
+}
+
void AVFMediaPlayerSession::processPositionChange()
{
if (m_state == QMediaPlayer::StoppedState)
diff --git a/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp b/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp
index 38b5af943..b04c6b38b 100644
--- a/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp
+++ b/src/qtmultimediaquicktools/qsgvideonode_yuv.cpp
@@ -134,16 +134,16 @@ public:
protected:
void initialize() Q_DECL_OVERRIDE {
m_id_matrix = program()->uniformLocation("qt_Matrix");
- m_id_yuvtexture = program()->uniformLocation("yuvTexture");
- m_id_imageWidth = program()->uniformLocation("imageWidth");
+ m_id_yTexture = program()->uniformLocation("yTexture");
+ m_id_uvTexture = program()->uniformLocation("uvTexture");
m_id_colorMatrix = program()->uniformLocation("colorMatrix");
m_id_opacity = program()->uniformLocation("opacity");
QSGMaterialShader::initialize();
}
int m_id_matrix;
- int m_id_yuvtexture;
- int m_id_imageWidth;
+ int m_id_yTexture;
+ int m_id_uvTexture;
int m_id_colorMatrix;
int m_id_opacity;
};
@@ -291,7 +291,7 @@ QSGVideoMaterial_YUV::QSGVideoMaterial_YUV(const QVideoSurfaceFormat &format) :
case QVideoFrame::Format_UYVY:
case QVideoFrame::Format_YUYV:
default:
- m_planeCount = 1;
+ m_planeCount = 2;
break;
}
@@ -355,12 +355,19 @@ void QSGVideoMaterial_YUV::bind()
if (m_format.pixelFormat() == QVideoFrame::Format_UYVY
|| m_format.pixelFormat() == QVideoFrame::Format_YUYV) {
- int fw = m_frame.width() / 2;
+ int fw = m_frame.width();
m_planeWidth[0] = fw;
-
- functions->glActiveTexture(GL_TEXTURE0);
- bindTexture(m_textureIds[0], fw, m_frame.height(), m_frame.bits(), GL_RGBA);
-
+ // In YUYV texture the UV plane appears with the 1/2 of image and Y width.
+ m_planeWidth[1] = fw / 2;
+ functions->glActiveTexture(GL_TEXTURE1);
+ // Either r,b (YUYV) or g,a (UYVY) values are used as source of YV.
+ // Additionally Y and V are set per 2 pixels hence only 1/2 of image with is used.
+ // Interpreting this properly in shaders allows to not copy or not make conditionals inside shaders,
+ // only interpretation of data changes.
+ bindTexture(m_textureIds[1], m_planeWidth[1], m_frame.height(), m_frame.bits(), GL_RGBA);
+ functions->glActiveTexture(GL_TEXTURE0); // Finish with 0 as default texture unit
+ // Either red (YUYV) or alpha (UYVY) values are used as source of Y
+ bindTexture(m_textureIds[0], m_planeWidth[0], m_frame.height(), m_frame.bits(), GL_LUMINANCE_ALPHA);
} else if (m_format.pixelFormat() == QVideoFrame::Format_NV12
|| m_format.pixelFormat() == QVideoFrame::Format_NV21) {
const int y = 0;
@@ -473,13 +480,12 @@ void QSGVideoMaterialShader_UYVY::updateState(const RenderState &state,
Q_UNUSED(oldMaterial);
QSGVideoMaterial_YUV *mat = static_cast<QSGVideoMaterial_YUV *>(newMaterial);
-
- program()->setUniformValue(m_id_yuvtexture, 0);
+ program()->setUniformValue(m_id_yTexture, 0);
+ program()->setUniformValue(m_id_uvTexture, 1);
mat->bind();
program()->setUniformValue(m_id_colorMatrix, mat->m_colorMatrix);
- program()->setUniformValue(m_id_imageWidth, mat->m_frame.width());
if (state.isOpacityDirty()) {
mat->m_opacity = state.opacity();
diff --git a/src/qtmultimediaquicktools/shaders/uyvyvideo.frag b/src/qtmultimediaquicktools/shaders/uyvyvideo.frag
index 905035703..5c62441c2 100644
--- a/src/qtmultimediaquicktools/shaders/uyvyvideo.frag
+++ b/src/qtmultimediaquicktools/shaders/uyvyvideo.frag
@@ -1,22 +1,12 @@
-uniform sampler2D yuvTexture; // UYVY macropixel texture passed as RGBA format
-uniform mediump float imageWidth; // The UYVY texture appears to the shader with 1/2 the image width since we use the RGBA format to pass UYVY
+uniform sampler2D yTexture; // Y component passed as GL_LUMINANCE_ALPHA, in uyvy Y = a
+uniform sampler2D uvTexture; // UV component passed as RGBA macropixel, in uyvy U = r, V = b
uniform mediump mat4 colorMatrix;
uniform lowp float opacity;
-
varying highp vec2 qt_TexCoord;
void main()
{
- // For U0 Y0 V0 Y1 macropixel, lookup Y0 or Y1 based on whether
- // the original texture x coord is even or odd.
- mediump float Y;
- if (fract(floor(qt_TexCoord.x * imageWidth + 0.5) / 2.0) > 0.0)
- Y = texture2D(yuvTexture, qt_TexCoord).a; // odd so choose Y1
- else
- Y = texture2D(yuvTexture, qt_TexCoord).g; // even so choose Y0
- mediump float Cb = texture2D(yuvTexture, qt_TexCoord).r;
- mediump float Cr = texture2D(yuvTexture, qt_TexCoord).b;
+ mediump vec3 YUV = vec3(texture2D(yTexture, qt_TexCoord).a, texture2D(uvTexture, qt_TexCoord).rb);
- mediump vec4 color = vec4(Y, Cb, Cr, 1.0);
- gl_FragColor = colorMatrix * color * opacity;
+ gl_FragColor = colorMatrix * vec4(YUV, 1.0) * opacity;
}
diff --git a/src/qtmultimediaquicktools/shaders/yuyvvideo.frag b/src/qtmultimediaquicktools/shaders/yuyvvideo.frag
index 72732cd66..5ce8b7366 100644
--- a/src/qtmultimediaquicktools/shaders/yuyvvideo.frag
+++ b/src/qtmultimediaquicktools/shaders/yuyvvideo.frag
@@ -1,22 +1,12 @@
-uniform sampler2D yuvTexture; // YUYV macropixel texture passed as RGBA format
-uniform mediump float imageWidth; // The YUYV texture appears to the shader with 1/2 the image width since we use the RGBA format to pass YUYV
+uniform sampler2D yTexture; // Y component passed as GL_LUMINANCE_ALPHA, in yuyv Y = r
+uniform sampler2D uvTexture; // UV component passed as RGBA macropixel, in uyvy U = g, V = a
uniform mediump mat4 colorMatrix;
uniform lowp float opacity;
-
varying highp vec2 qt_TexCoord;
void main()
{
- // For Y0 U0 Y1 V0 macropixel, lookup Y0 or Y1 based on whether
- // the original texture x coord is even or odd.
- mediump float Y;
- if (fract(floor(qt_TexCoord.x * imageWidth + 0.5) / 2.0) > 0.0)
- Y = texture2D(yuvTexture, qt_TexCoord).b; // odd so choose Y1
- else
- Y = texture2D(yuvTexture, qt_TexCoord).r; // even so choose Y0
- mediump float Cb = texture2D(yuvTexture, qt_TexCoord).g;
- mediump float Cr = texture2D(yuvTexture, qt_TexCoord).a;
+ mediump vec3 YUV = vec3(texture2D(yTexture, qt_TexCoord).r, texture2D(uvTexture, qt_TexCoord).ga);
- mediump vec4 color = vec4(Y, Cb, Cr, 1.0);
- gl_FragColor = colorMatrix * color * opacity;
+ gl_FragColor = colorMatrix * vec4(YUV, 1.0) * opacity;
}