summaryrefslogtreecommitdiffstats
path: root/src/gsttools/qgstvideorenderersink.cpp
diff options
context:
space:
mode:
authorVaL Doroshchuk <valentyn.doroshchuk@qt.io>2018-02-27 13:23:27 +0100
committerVaL Doroshchuk <valentyn.doroshchuk@qt.io>2019-05-15 11:50:54 +0000
commit26767a8ebe355f55a7fd5aaa9ec947dea1b6ce29 (patch)
treecd625783652c171011da74dab0b9116f75b55f74 /src/gsttools/qgstvideorenderersink.cpp
parent5ed9d1d830b90c5d5093cc7ca4a5c7cd5b15240c (diff)
Gstreamer: Introduce support of OpenGL plugin
Since uploading to gl texture is quite performance-wise operation introduced integration of opengl plugin which can provide a way to avoid uploading to texture on each draw on qt's side. Which potentially fixes some performance issues especially on embedded devices. Added glupload and glcolorconvert gst elements to pipeline. For qml apps current opengl context is provided to the gst plugin to share opengl data with. It allows at the end to just use gl textures that created and filled inside the gst. For overlay apps, glimagesink element is added. Requires gstreamer-gl-1.0 to be installed. Since this plugin is almost always available, so enabling it by default will break behavior of current pipeline and might introduce some side effects. Thus also provided QT_GSTREAMER_USE_OPENGL_PLUGIN env var to explicitly enable OpenGL features from gst. After this change video frames will be texture based. But it is still possible to map frames to download their data. In case if the video surface does not support gl textures, glmemory will be anyway requested, which will lead to uploading data, downloading and uploading it again to textures in scene graph video node. Task-number: QTBUG-66162 Change-Id: I32044ba0bf0c0cf90434d72f8991ad00927e1380 Reviewed-by: Christian Strømme <christian.stromme@qt.io>
Diffstat (limited to 'src/gsttools/qgstvideorenderersink.cpp')
-rw-r--r--src/gsttools/qgstvideorenderersink.cpp185
1 files changed, 182 insertions, 3 deletions
diff --git a/src/gsttools/qgstvideorenderersink.cpp b/src/gsttools/qgstvideorenderersink.cpp
index 09fdd42a6..49016b952 100644
--- a/src/gsttools/qgstvideorenderersink.cpp
+++ b/src/gsttools/qgstvideorenderersink.cpp
@@ -54,12 +54,32 @@
#include "qgstutils_p.h"
+#if QT_CONFIG(gstreamer_gl)
+#include <QOpenGLContext>
+#include <QGuiApplication>
+#include <QWindow>
+#include <qpa/qplatformnativeinterface.h>
+
+#include <gst/gl/gstglconfig.h>
+
+#if GST_GL_HAVE_WINDOW_X11
+# include <gst/gl/x11/gstgldisplay_x11.h>
+#endif
+#if GST_GL_HAVE_PLATFORM_EGL
+# include <gst/gl/egl/gstgldisplay_egl.h>
+#endif
+#if GST_CHECK_VERSION(1,11,1) && GST_GL_HAVE_WINDOW_WAYLAND
+# include <gst/gl/wayland/gstgldisplay_wayland.h>
+#endif
+#endif // #if QT_CONFIG(gstreamer_gl)
+
//#define DEBUG_VIDEO_SURFACE_SINK
QT_BEGIN_NAMESPACE
QGstDefaultVideoRenderer::QGstDefaultVideoRenderer()
: m_flushed(true)
+ , m_handleType(QAbstractVideoBuffer::NoHandle)
{
}
@@ -69,13 +89,33 @@ QGstDefaultVideoRenderer::~QGstDefaultVideoRenderer()
GstCaps *QGstDefaultVideoRenderer::getCaps(QAbstractVideoSurface *surface)
{
- return QGstUtils::capsForFormats(surface->supportedPixelFormats());
+#if QT_CONFIG(gstreamer_gl)
+ if (QGstUtils::useOpenGL()) {
+ m_handleType = QAbstractVideoBuffer::GLTextureHandle;
+ auto formats = surface->supportedPixelFormats(m_handleType);
+ // Even if the surface does not support gl textures,
+ // glupload will be added to the pipeline and GLMemory will be requested.
+ // This will lead to upload data to gl textures
+ // and download it when the buffer will be used within rendering.
+ if (formats.isEmpty()) {
+ m_handleType = QAbstractVideoBuffer::NoHandle;
+ formats = surface->supportedPixelFormats(m_handleType);
+ }
+
+ GstCaps *caps = QGstUtils::capsForFormats(formats);
+ for (guint i = 0; i < gst_caps_get_size(caps); ++i)
+ gst_caps_set_features(caps, i, gst_caps_features_from_string("memory:GLMemory"));
+
+ return caps;
+ }
+#endif
+ return QGstUtils::capsForFormats(surface->supportedPixelFormats(QAbstractVideoBuffer::NoHandle));
}
bool QGstDefaultVideoRenderer::start(QAbstractVideoSurface *surface, GstCaps *caps)
{
m_flushed = true;
- m_format = QGstUtils::formatForCaps(caps, &m_videoInfo);
+ m_format = QGstUtils::formatForCaps(caps, &m_videoInfo, m_handleType);
return m_format.isValid() && surface->start(m_format);
}
@@ -90,8 +130,21 @@ void QGstDefaultVideoRenderer::stop(QAbstractVideoSurface *surface)
bool QGstDefaultVideoRenderer::present(QAbstractVideoSurface *surface, GstBuffer *buffer)
{
m_flushed = false;
+
+ QGstVideoBuffer *videoBuffer = nullptr;
+#if QT_CONFIG(gstreamer_gl)
+ if (m_format.handleType() == QAbstractVideoBuffer::GLTextureHandle) {
+ GstGLMemory *glmem = GST_GL_MEMORY_CAST(gst_buffer_peek_memory(buffer, 0));
+ guint textureId = gst_gl_memory_get_texture_id(glmem);
+ videoBuffer = new QGstVideoBuffer(buffer, m_videoInfo, m_format.handleType(), textureId);
+ }
+#endif
+
+ if (!videoBuffer)
+ videoBuffer = new QGstVideoBuffer(buffer, m_videoInfo);
+
QVideoFrame frame(
- new QGstVideoBuffer(buffer, m_videoInfo),
+ videoBuffer,
m_format.frameSize(),
m_format.pixelFormat());
QGstUtils::setFrameTimeStamps(&frame, buffer);
@@ -145,6 +198,10 @@ QVideoSurfaceGstDelegate::~QVideoSurfaceGstDelegate()
gst_caps_unref(m_surfaceCaps);
if (m_startCaps)
gst_caps_unref(m_startCaps);
+#if QT_CONFIG(gstreamer_gl)
+ if (m_gstGLDisplayContext)
+ gst_object_unref(m_gstGLDisplayContext);
+#endif
}
GstCaps *QVideoSurfaceGstDelegate::caps()
@@ -254,6 +311,118 @@ GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer)
return m_renderReturn;
}
+#if QT_CONFIG(gstreamer_gl)
+static GstGLContext *gstGLDisplayContext(QAbstractVideoSurface *surface)
+{
+ QOpenGLContext *glContext = qobject_cast<QOpenGLContext*>(surface->property("GLContext").value<QObject*>());
+ // Context is not ready yet.
+ if (!glContext)
+ return nullptr;
+
+ GstGLDisplay *display = nullptr;
+ const QString platform = QGuiApplication::platformName();
+ const char *contextName = "eglcontext";
+ GstGLPlatform glPlatform = GST_GL_PLATFORM_EGL;
+ QPlatformNativeInterface *pni = QGuiApplication::platformNativeInterface();
+
+#if GST_GL_HAVE_WINDOW_X11
+ if (platform == QLatin1String("xcb")) {
+ if (QOpenGLContext::openGLModuleType() == QOpenGLContext::LibGL) {
+ contextName = "glxcontext";
+ glPlatform = GST_GL_PLATFORM_GLX;
+ }
+
+ display = (GstGLDisplay *)gst_gl_display_x11_new_with_display(
+ (Display *)pni->nativeResourceForIntegration("display"));
+ }
+#endif
+
+#if GST_GL_HAVE_PLATFORM_EGL
+ if (!display && platform == QLatin1String("eglfs")) {
+ display = (GstGLDisplay *)gst_gl_display_egl_new_with_egl_display(
+ pni->nativeResourceForIntegration("egldisplay"));
+ }
+#endif
+
+#if GST_CHECK_VERSION(1,11,1)
+#if GST_GL_HAVE_WINDOW_WAYLAND
+ if (!display && platform.startsWith(QLatin1String("wayland"))) {
+ const char *displayName = (platform == QLatin1String("wayland"))
+ ? "display" : "egldisplay";
+
+ display = (GstGLDisplay *)gst_gl_display_wayland_new_with_display(
+ (struct wl_display *)pni->nativeResourceForIntegration(displayName));
+ }
+#endif
+#endif
+
+ if (!display) {
+ qWarning() << "Could not create GstGLDisplay";
+ return nullptr;
+ }
+
+ void *nativeContext = pni->nativeResourceForContext(contextName, glContext);
+ if (!nativeContext)
+ qWarning() << "Could not find resource for" << contextName;
+
+ GstGLContext *appContext = gst_gl_context_new_wrapped(display, (guintptr)nativeContext, glPlatform, GST_GL_API_ANY);
+ if (!appContext)
+ qWarning() << "Could not create wrappped context for platform:" << glPlatform;
+
+ GstGLContext *displayContext = nullptr;
+ GError *error = NULL;
+ gst_gl_display_create_context(display, appContext, &displayContext, &error);
+ if (error) {
+ qWarning() << "Could not create display context:" << error->message;
+ g_clear_error(&error);
+ }
+
+ if (appContext)
+ gst_object_unref(appContext);
+
+ gst_object_unref(display);
+
+ return displayContext;
+}
+#endif // #if QT_CONFIG(gstreamer_gl)
+
+bool QVideoSurfaceGstDelegate::query(GstQuery *query)
+{
+#if QT_CONFIG(gstreamer_gl)
+ if (GST_QUERY_TYPE(query) == GST_QUERY_CONTEXT) {
+ const gchar *type;
+ gst_query_parse_context_type(query, &type);
+
+ if (strcmp(type, "gst.gl.local_context") != 0)
+ return false;
+
+ if (!m_gstGLDisplayContext)
+ m_gstGLDisplayContext = gstGLDisplayContext(m_surface);
+
+ // No context yet.
+ if (!m_gstGLDisplayContext)
+ return false;
+
+ GstContext *context = NULL;
+ gst_query_parse_context(query, &context);
+ context = context ? gst_context_copy(context) : gst_context_new(type, FALSE);
+ GstStructure *structure = gst_context_writable_structure(context);
+#if GST_CHECK_VERSION(1,11,1)
+ gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, m_gstGLDisplayContext, NULL);
+#else
+ gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, m_gstGLDisplayContext, NULL);
+#endif
+ gst_query_set_context(query, context);
+ gst_context_unref(context);
+
+ return m_gstGLDisplayContext;
+ }
+#else
+ Q_UNUSED(query);
+#endif
+ return false;
+}
+
bool QVideoSurfaceGstDelegate::event(QEvent *event)
{
if (event->type() == QEvent::UpdateRequest) {
@@ -460,6 +629,7 @@ void QGstVideoRendererSink::class_init(gpointer g_class, gpointer class_data)
base_sink_class->propose_allocation = QGstVideoRendererSink::propose_allocation;
base_sink_class->stop = QGstVideoRendererSink::stop;
base_sink_class->unlock = QGstVideoRendererSink::unlock;
+ base_sink_class->query = QGstVideoRendererSink::query;
GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
element_class->change_state = QGstVideoRendererSink::change_state;
@@ -602,4 +772,13 @@ GstFlowReturn QGstVideoRendererSink::show_frame(GstVideoSink *base, GstBuffer *b
return sink->delegate->render(buffer);
}
+gboolean QGstVideoRendererSink::query(GstBaseSink *base, GstQuery *query)
+{
+ VO_SINK(base);
+ if (sink->delegate->query(query))
+ return TRUE;
+
+ return GST_BASE_SINK_CLASS(sink_parent_class)->query(base, query);
+}
+
QT_END_NAMESPACE