summaryrefslogtreecommitdiffstats
path: root/src/qtmultimediaquicktools
diff options
context:
space:
mode:
Diffstat (limited to 'src/qtmultimediaquicktools')
-rw-r--r--src/qtmultimediaquicktools/qdeclarativevideooutput.cpp24
-rw-r--r--src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp39
-rw-r--r--src/qtmultimediaquicktools/qdeclarativevideooutput_render_p.h2
3 files changed, 51 insertions, 14 deletions
diff --git a/src/qtmultimediaquicktools/qdeclarativevideooutput.cpp b/src/qtmultimediaquicktools/qdeclarativevideooutput.cpp
index 3c6a6f9c5..fccab54ab 100644
--- a/src/qtmultimediaquicktools/qdeclarativevideooutput.cpp
+++ b/src/qtmultimediaquicktools/qdeclarativevideooutput.cpp
@@ -878,4 +878,28 @@ void QDeclarativeVideoOutput::_q_invalidateSceneGraph()
m_backend->invalidateSceneGraph();
}
+/*!
+ \qmlproperty enumeration QtMultimedia::VideoOutput::flushMode
+
+ Set this property to define what \c VideoOutput should show
+ when playback is finished or stopped.
+
+ \list
+ \li EmptyFrame - clears video output.
+ \li FirstFrame - shows the first valid frame.
+ \li LastFrame - shows the last valid frame.
+ \endlist
+
+ The default flush mode is EmptyFrame.
+*/
+
+void QDeclarativeVideoOutput::setFlushMode(FlushMode mode)
+{
+ if (m_flushMode == mode)
+ return;
+
+ m_flushMode = mode;
+ emit flushModeChanged();
+}
+
QT_END_NAMESPACE
diff --git a/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp b/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp
index c51aec088..5cb4689cc 100644
--- a/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp
+++ b/src/qtmultimediaquicktools/qdeclarativevideooutput_render.cpp
@@ -46,6 +46,7 @@
#include <QtCore/qloggingcategory.h>
#include <private/qmediapluginloader_p.h>
#include <private/qsgvideonode_p.h>
+#include <private/qvideoframe_p.h>
#include <QtGui/QOpenGLContext>
#include <QtQuick/QQuickWindow>
@@ -210,13 +211,13 @@ void QDeclarativeVideoRendererBackend::releaseControl()
QSize QDeclarativeVideoRendererBackend::nativeSize() const
{
- return m_surface->surfaceFormat().sizeHint();
+ return m_surfaceFormat.sizeHint();
}
void QDeclarativeVideoRendererBackend::updateGeometry()
{
- const QRectF viewport = videoSurface()->surfaceFormat().viewport();
- const QSizeF frameSize = videoSurface()->surfaceFormat().frameSize();
+ const QRectF viewport = m_surfaceFormat.viewport();
+ const QSizeF frameSize = m_surfaceFormat.frameSize();
const QRectF normalizedViewport(viewport.x() / frameSize.width(),
viewport.y() / frameSize.height(),
viewport.width() / frameSize.width(),
@@ -257,13 +258,13 @@ void QDeclarativeVideoRendererBackend::updateGeometry()
}
}
- if (videoSurface()->surfaceFormat().scanLineDirection() == QVideoSurfaceFormat::BottomToTop) {
+ if (m_surfaceFormat.scanLineDirection() == QVideoSurfaceFormat::BottomToTop) {
qreal top = m_sourceTextureRect.top();
m_sourceTextureRect.setTop(m_sourceTextureRect.bottom());
m_sourceTextureRect.setBottom(top);
}
- if (videoSurface()->surfaceFormat().property("mirrored").toBool()) {
+ if (m_surfaceFormat.property("mirrored").toBool()) {
qreal left = m_sourceTextureRect.left();
m_sourceTextureRect.setLeft(m_sourceTextureRect.right());
m_sourceTextureRect.setRight(left);
@@ -294,7 +295,6 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
if (m_frameChanged) {
// Run the VideoFilter if there is one. This must be done before potentially changing the videonode below.
if (m_frame.isValid() && !m_filters.isEmpty()) {
- const QVideoSurfaceFormat surfaceFormat = videoSurface()->surfaceFormat();
for (int i = 0; i < m_filters.count(); ++i) {
QAbstractVideoFilter *filter = m_filters[i].filter;
QVideoFilterRunnable *&runnable = m_filters[i].runnable;
@@ -309,7 +309,7 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
if (i == m_filters.count() - 1)
flags |= QVideoFilterRunnable::LastInChain;
- QVideoFrame newFrame = runnable->run(&m_frame, surfaceFormat, flags);
+ QVideoFrame newFrame = runnable->run(&m_frame, m_surfaceFormat, flags);
if (newFrame.isValid() && newFrame != m_frame) {
isFrameModified = true;
@@ -336,12 +336,13 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
// Get a node that supports our frame. The surface is irrelevant, our
// QSGVideoItemSurface supports (logically) anything.
QVideoSurfaceFormat nodeFormat(m_frame.size(), m_frame.pixelFormat(), m_frame.handleType());
- const QVideoSurfaceFormat surfaceFormat = m_surface->surfaceFormat();
- nodeFormat.setYCbCrColorSpace(surfaceFormat.yCbCrColorSpace());
- nodeFormat.setPixelAspectRatio(surfaceFormat.pixelAspectRatio());
- nodeFormat.setScanLineDirection(surfaceFormat.scanLineDirection());
- nodeFormat.setViewport(surfaceFormat.viewport());
- nodeFormat.setFrameRate(surfaceFormat.frameRate());
+ nodeFormat.setYCbCrColorSpace(m_surfaceFormat.yCbCrColorSpace());
+ nodeFormat.setPixelAspectRatio(m_surfaceFormat.pixelAspectRatio());
+ nodeFormat.setScanLineDirection(m_surfaceFormat.scanLineDirection());
+ nodeFormat.setViewport(m_surfaceFormat.viewport());
+ nodeFormat.setFrameRate(m_surfaceFormat.frameRate());
+ // Update current surface format if something has changed.
+ m_surfaceFormat = nodeFormat;
videoNode = factory->createNode(nodeFormat);
if (videoNode) {
qCDebug(qLcVideo) << "updatePaintNode: Video node created. Handle type:" << m_frame.handleType()
@@ -367,6 +368,14 @@ QSGNode *QDeclarativeVideoRendererBackend::updatePaintNode(QSGNode *oldNode,
if (isFrameModified)
flags |= QSGVideoNode::FrameFiltered;
videoNode->setCurrentFrame(m_frame, flags);
+
+ if ((q->flushMode() == QDeclarativeVideoOutput::FirstFrame && !m_frameOnFlush.isValid())
+ || q->flushMode() == QDeclarativeVideoOutput::LastFrame) {
+ m_frameOnFlush = m_surfaceFormat.handleType() == QAbstractVideoBuffer::NoHandle
+ ? m_frame
+ : qt_imageFromVideoFrame(m_frame);
+ }
+
//don't keep the frame for more than really necessary
m_frameChanged = false;
m_frame = QVideoFrame();
@@ -403,7 +412,7 @@ QOpenGLContext *QDeclarativeVideoRendererBackend::glContext() const
void QDeclarativeVideoRendererBackend::present(const QVideoFrame &frame)
{
m_frameMutex.lock();
- m_frame = frame;
+ m_frame = frame.isValid() ? frame : m_frameOnFlush;
m_frameChanged = true;
m_frameMutex.unlock();
@@ -450,10 +459,12 @@ QList<QVideoFrame::PixelFormat> QSGVideoItemSurface::supportedPixelFormats(
bool QSGVideoItemSurface::start(const QVideoSurfaceFormat &format)
{
qCDebug(qLcVideo) << "Video surface format:" << format << "all supported formats:" << supportedPixelFormats(format.handleType());
+ m_backend->m_frameOnFlush = QVideoFrame();
if (!supportedPixelFormats(format.handleType()).contains(format.pixelFormat()))
return false;
+ m_backend->m_surfaceFormat = format;
return QAbstractVideoSurface::start(format);
}
diff --git a/src/qtmultimediaquicktools/qdeclarativevideooutput_render_p.h b/src/qtmultimediaquicktools/qdeclarativevideooutput_render_p.h
index 4f1a90c8b..c69e74150 100644
--- a/src/qtmultimediaquicktools/qdeclarativevideooutput_render_p.h
+++ b/src/qtmultimediaquicktools/qdeclarativevideooutput_render_p.h
@@ -100,8 +100,10 @@ private:
QPointer<QVideoRendererControl> m_rendererControl;
QList<QSGVideoNodeFactoryInterface*> m_videoNodeFactories;
QSGVideoItemSurface *m_surface;
+ QVideoSurfaceFormat m_surfaceFormat;
QOpenGLContext *m_glContext;
QVideoFrame m_frame;
+ QVideoFrame m_frameOnFlush;
bool m_frameChanged;
QSGVideoNodeFactory_YUV m_i420Factory;
QSGVideoNodeFactory_RGB m_rgbFactory;