From 466a4f29c20b6d797af9d67029a7329b2124b276 Mon Sep 17 00:00:00 2001 From: Lars Knoll Date: Mon, 29 Nov 2021 15:52:25 +0100 Subject: Fix a memory leak in the camerabinsession Properly unref the video and camera source objects when changing the input device. Fixes: QTBUG-93762 Change-Id: Ibaf08978b6915faa668ef0ab54b3bc0903daafad Reviewed-by: Piotr Srebrny Reviewed-by: Lars Knoll --- src/plugins/gstreamer/camerabin/camerabinsession.cpp | 9 +++++++++ 1 file changed, 9 insertions(+) (limited to 'src/plugins') diff --git a/src/plugins/gstreamer/camerabin/camerabinsession.cpp b/src/plugins/gstreamer/camerabin/camerabinsession.cpp index 26080f979..dd81240f0 100644 --- a/src/plugins/gstreamer/camerabin/camerabinsession.cpp +++ b/src/plugins/gstreamer/camerabin/camerabinsession.cpp @@ -479,6 +479,15 @@ GstElement *CameraBinSession::buildCameraSource() m_inputDeviceHasChanged = false; m_usingWrapperCameraBinSrc = false; + if (m_videoSrc) { + gst_object_unref(GST_OBJECT(m_videoSrc)); + m_videoSrc = 0; + } + if (m_cameraSrc) { + gst_object_unref(GST_OBJECT(m_cameraSrc)); + m_cameraSrc = 0; + } + GstElement *camSrc = 0; g_object_get(G_OBJECT(m_camerabin), CAMERA_SOURCE_PROPERTY, &camSrc, NULL); -- cgit v1.2.3 From d9cdfeebd9eb6a067b97316daa149c7f58e1c7ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tor=20Arne=20Vestb=C3=B8?= Date: Mon, 29 Nov 2021 14:53:59 +0100 Subject: Use AVPlayerItemVideoOutput to generate video frames This fixes rendering problems on M1 based Macs. It also unifies the rendering pipeline between macOS and iOS as much as possible, and avoids an intermediate copy to an FBO, Since AVPlayerItemVideoOutput produces GL_TEXTURE_RECTANGLE textures on macOS a new QAbstractVideoBuffer handle has been added that explicitly maps to GL_TEXTURE_RECTANGLE. We use this handle type internally in QSGVideoMaterial_Texture where we know how to blit GL_TEXTURE_RECTANGLE textures. To maintain compatibility for QAbstractVideoSurface consumers who expect GL_TEXTURE_2D textures we blit the rectangle texture to an FBO returned as QAbstractVideoBuffer::GLTextureHandle. Fixes: QTBUG-89803 Done-with: Lars Knoll Change-Id: I36d22eafb63902ecc1097e138705812ef6a8cb71 Reviewed-by: Lars Knoll Reviewed-by: Doris Verria --- .../mediaplayer/avfmediaplayersession.mm | 21 +- .../mediaplayer/avfvideoframerenderer.h | 102 ++-- .../mediaplayer/avfvideoframerenderer.mm | 513 +++++++++------------ .../mediaplayer/avfvideoframerenderer_ios.h | 113 ----- .../mediaplayer/avfvideoframerenderer_ios.mm | 261 ----------- .../mediaplayer/avfvideorenderercontrol.h | 5 +- .../mediaplayer/avfvideorenderercontrol.mm | 140 +++--- .../avfoundation/mediaplayer/mediaplayer.pro | 35 +- 8 files changed, 344 insertions(+), 846 deletions(-) delete mode 100644 src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h delete mode 100644 src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.mm (limited to 'src/plugins') diff --git a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm index ea54fe6be..ae2234764 100644 --- a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm +++ b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm @@ -110,6 +110,12 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext self->m_session = session; self->m_bufferIsLikelyToKeepUp = FALSE; + + m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:nil]; + [m_playerLayer retain]; + m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; + m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f); + return self; } @@ -172,10 +178,6 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext [m_player release]; m_player = 0; } - if (m_playerLayer) { - [m_playerLayer release]; - m_playerLayer = 0; - } } - (void) prepareToPlayAsset:(AVURLAsset *)asset @@ -260,14 +262,8 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext [m_player setMuted:m_session->isMuted()]; } - //Create a new player layer if we don't have one already - if (!m_playerLayer) - { - m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:m_player]; - [m_playerLayer retain]; - m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; - m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f); - } + //Assign the output layer to the new player + m_playerLayer.player = m_player; //Observe the AVPlayer "currentItem" property to find out when any //AVPlayer replaceCurrentItemWithPlayerItem: replacement will/did @@ -413,6 +409,7 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext } [m_mimeType release]; + [m_playerLayer release]; [super dealloc]; } diff --git a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h index 28b47ac57..886722744 100644 --- a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h +++ b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h @@ -43,22 +43,48 @@ #include #include #include +#include #include -#import "Metal/Metal.h" -#import "MetalKit/MetalKit.h" - -@class CARenderer; @class AVPlayerLayer; +@class AVPlayerItemVideoOutput; QT_BEGIN_NAMESPACE +class QOpenGLContext; class QOpenGLFramebufferObject; class QOpenGLShaderProgram; -class QWindow; -class QOpenGLContext; +class QOffscreenSurface; class QAbstractVideoSurface; +typedef struct __CVBuffer *CVBufferRef; +typedef CVBufferRef CVImageBufferRef; +typedef CVImageBufferRef CVPixelBufferRef; + +#if defined(Q_OS_IOS) || defined(Q_OS_TVOS) + typedef struct __CVOpenGLESTextureCache *CVOpenGLESTextureCacheRef; + typedef CVImageBufferRef CVOpenGLESTextureRef; + // helpers to avoid boring if def + typedef CVOpenGLESTextureCacheRef CVOGLTextureCacheRef; + typedef CVOpenGLESTextureRef CVOGLTextureRef; + #define CVOGLTextureGetTarget CVOpenGLESTextureGetTarget + #define CVOGLTextureGetName CVOpenGLESTextureGetName + #define CVOGLTextureCacheCreate CVOpenGLESTextureCacheCreate + #define CVOGLTextureCacheCreateTextureFromImage CVOpenGLESTextureCacheCreateTextureFromImage + #define CVOGLTextureCacheFlush CVOpenGLESTextureCacheFlush +#else + typedef struct __CVOpenGLTextureCache *CVOpenGLTextureCacheRef; + typedef CVImageBufferRef CVOpenGLTextureRef; + // helpers to avoid boring if def + typedef CVOpenGLTextureCacheRef CVOGLTextureCacheRef; + typedef CVOpenGLTextureRef CVOGLTextureRef; + #define CVOGLTextureGetTarget CVOpenGLTextureGetTarget + #define CVOGLTextureGetName CVOpenGLTextureGetName + #define CVOGLTextureCacheCreate CVOpenGLTextureCacheCreate + #define CVOGLTextureCacheCreateTextureFromImage CVOpenGLTextureCacheCreateTextureFromImage + #define CVOGLTextureCacheFlush CVOpenGLTextureCacheFlush +#endif + class AVFVideoFrameRenderer : public QObject { public: @@ -66,57 +92,31 @@ public: virtual ~AVFVideoFrameRenderer(); - GLuint renderLayerToTexture(AVPlayerLayer *layer); - QImage renderLayerToImage(AVPlayerLayer *layer); - - static GLuint createGLTexture(CGLContextObj cglContextObj, CGLPixelFormatObj cglPixelFormtObj, - CVOpenGLTextureCacheRef cvglTextureCache, - CVPixelBufferRef cvPixelBufferRef, - CVOpenGLTextureRef cvOpenGLTextureRef); + void setPlayerLayer(AVPlayerLayer *layer); - static id createMetalTexture(id mtlDevice, - CVMetalTextureCacheRef cvMetalTextureCacheRef, - CVPixelBufferRef cvPixelBufferRef, - MTLPixelFormat pixelFormat, size_t width, size_t height, - CVMetalTextureRef cvMetalTextureRef); + CVOGLTextureRef renderLayerToTexture(AVPlayerLayer *layer, QSize *size); +#ifdef Q_OS_MACOS + GLuint renderLayerToFBO(AVPlayerLayer *layer, QSize *size); +#endif + QImage renderLayerToImage(AVPlayerLayer *layer, QSize *size); private: - QOpenGLFramebufferObject* initRenderer(AVPlayerLayer *layer); - void renderLayerToFBO(AVPlayerLayer *layer, QOpenGLFramebufferObject *fbo); - void renderLayerToFBOCoreOpenGL(AVPlayerLayer *layer, QOpenGLFramebufferObject *fbo); + void initRenderer(); + CVPixelBufferRef copyPixelBufferFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height); + CVOGLTextureRef createCacheTextureFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height); - CARenderer *m_videoLayerRenderer; - QAbstractVideoSurface *m_surface; - QOpenGLFramebufferObject *m_fbo[2]; - QOpenGLShaderProgram *m_shader = nullptr; - QWindow *m_offscreenSurface; QOpenGLContext *m_glContext; - QSize m_targetSize; - - bool m_useCoreProfile = false; - - // Shared pixel buffer - CVPixelBufferRef m_CVPixelBuffer; - - // OpenGL Texture - CVOpenGLTextureCacheRef m_CVGLTextureCache; - CVOpenGLTextureRef m_CVGLTexture; - CGLPixelFormatObj m_CGLPixelFormat; - GLuint m_textureName = 0; - - // Metal Texture - CVMetalTextureRef m_CVMTLTexture; - CVMetalTextureCacheRef m_CVMTLTextureCache; - id m_metalDevice = nil; - id m_metalTexture = nil; - - NSOpenGLContext *m_NSGLContext = nullptr; - - GLuint m_quadVao = 0; - GLuint m_quadVbos[2]; - - uint m_currentBuffer; + QOffscreenSurface *m_offscreenSurface; + QAbstractVideoSurface *m_surface; + CVOGLTextureCacheRef m_textureCache; + AVPlayerItemVideoOutput* m_videoOutput; bool m_isContextShared; + +#ifdef Q_OS_MACOS + QOpenGLFramebufferObject *m_fbo[2]; + uint m_currentFBO; + QOpenGLTextureBlitter m_blitter; +#endif }; QT_END_NAMESPACE diff --git a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm index 4c7364a11..766764ee3 100644 --- a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm +++ b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm @@ -41,14 +41,20 @@ #include #include -#include -#include -#include +#include +#include + +#include #ifdef QT_DEBUG_AVF #include #endif +#ifdef Q_OS_MACOS +#import +#include +#endif + #import #import @@ -56,15 +62,23 @@ QT_USE_NAMESPACE AVFVideoFrameRenderer::AVFVideoFrameRenderer(QAbstractVideoSurface *surface, QObject *parent) : QObject(parent) - , m_videoLayerRenderer(nullptr) - , m_surface(surface) - , m_offscreenSurface(nullptr) , m_glContext(nullptr) - , m_currentBuffer(1) + , m_offscreenSurface(nullptr) + , m_surface(surface) + , m_textureCache(nullptr) + , m_videoOutput(nullptr) , m_isContextShared(true) { + m_videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:@{ + (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA), + (NSString *)kCVPixelBufferOpenGLCompatibilityKey: @YES + }]; + [m_videoOutput setDelegate:nil queue:nil]; + +#ifdef Q_OS_MACOS m_fbo[0] = nullptr; m_fbo[1] = nullptr; +#endif } AVFVideoFrameRenderer::~AVFVideoFrameRenderer() @@ -73,355 +87,244 @@ AVFVideoFrameRenderer::~AVFVideoFrameRenderer() qDebug() << Q_FUNC_INFO; #endif - [m_videoLayerRenderer release]; - delete m_fbo[0]; - delete m_fbo[1]; + [m_videoOutput release]; + if (m_textureCache) + CFRelease(m_textureCache); delete m_offscreenSurface; delete m_glContext; - if (m_useCoreProfile) { - glDeleteVertexArrays(1, &m_quadVao); - glDeleteBuffers(2, m_quadVbos); - delete m_shader; - } +#ifdef Q_OS_MACOS + delete m_fbo[0]; + delete m_fbo[1]; +#endif } -GLuint AVFVideoFrameRenderer::renderLayerToTexture(AVPlayerLayer *layer) +#ifdef Q_OS_MACOS +GLuint AVFVideoFrameRenderer::renderLayerToFBO(AVPlayerLayer *layer, QSize *size) { - //Is layer valid - if (!layer) + QCFType texture = renderLayerToTexture(layer, size); + if (!texture) return 0; - //If the glContext isn't shared, it doesn't make sense to return a texture for us - if (m_offscreenSurface && !m_isContextShared) - return 0; + Q_ASSERT(size); - QOpenGLFramebufferObject *fbo = initRenderer(layer); + // Do we have FBO's already? + if ((!m_fbo[0] && !m_fbo[0]) || (m_fbo[0]->size() != *size)) { + delete m_fbo[0]; + delete m_fbo[1]; + m_fbo[0] = new QOpenGLFramebufferObject(*size); + m_fbo[1] = new QOpenGLFramebufferObject(*size); + } - if (!fbo) - return 0; + // Switch buffer target + m_currentFBO = !m_currentFBO; + QOpenGLFramebufferObject *fbo = m_fbo[m_currentFBO]; - renderLayerToFBO(layer, fbo); - if (m_glContext) - m_glContext->doneCurrent(); + if (!fbo || !fbo->bind()) + return 0; - return fbo->texture(); -} + glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT); -QImage AVFVideoFrameRenderer::renderLayerToImage(AVPlayerLayer *layer) -{ - //Is layer valid - if (!layer) { - return QImage(); - } + glViewport(0, 0, size->width(), size->height()); - QOpenGLFramebufferObject *fbo = initRenderer(layer); + if (!m_blitter.isCreated()) + m_blitter.create(); - if (!fbo) - return QImage(); + m_blitter.bind(GL_TEXTURE_RECTANGLE); + m_blitter.blit(CVOpenGLTextureGetName(texture), QMatrix4x4(), QMatrix3x3()); + m_blitter.release(); - renderLayerToFBO(layer, fbo); - QImage fboImage = fbo->toImage(); - if (m_glContext) - m_glContext->doneCurrent(); + glFinish(); - return fboImage; + fbo->release(); + return fbo->texture(); } +#endif -QOpenGLFramebufferObject *AVFVideoFrameRenderer::initRenderer(AVPlayerLayer *layer) +CVOGLTextureRef AVFVideoFrameRenderer::renderLayerToTexture(AVPlayerLayer *layer, QSize *size) { + initRenderer(); - //Get size from AVPlayerLayer - m_targetSize = QSize(layer.bounds.size.width, layer.bounds.size.height); - - QOpenGLContext *shareContext = !m_glContext && m_surface - ? qobject_cast(m_surface->property("GLContext").value()) - : nullptr; + // If the glContext isn't shared, it doesn't make sense to return a texture for us + if (!m_isContextShared) + return nullptr; - //Make sure we have an OpenGL context to make current - if ((shareContext && shareContext != QOpenGLContext::currentContext()) - || (!QOpenGLContext::currentContext() && !m_glContext)) { - - //Create Hidden QWindow surface to create context in this thread - delete m_offscreenSurface; - m_offscreenSurface = new QWindow(); - m_offscreenSurface->setSurfaceType(QWindow::OpenGLSurface); - //Needs geometry to be a valid surface, but size is not important - m_offscreenSurface->setGeometry(0, 0, 1, 1); - m_offscreenSurface->create(); - - delete m_glContext; - m_glContext = new QOpenGLContext(); - m_glContext->setFormat(m_offscreenSurface->requestedFormat()); + size_t width = 0, height = 0; + auto texture = createCacheTextureFromLayer(layer, width, height); + if (size) + *size = QSize(width, height); + return texture; +} - if (shareContext) { - m_glContext->setShareContext(shareContext); - m_isContextShared = true; - } else { +CVPixelBufferRef AVFVideoFrameRenderer::copyPixelBufferFromLayer(AVPlayerLayer *layer, + size_t& width, size_t& height) +{ + //Is layer valid + if (!layer) { #ifdef QT_DEBUG_AVF - qWarning("failed to get Render Thread context"); + qWarning("copyPixelBufferFromLayer: invalid layer"); #endif - m_isContextShared = false; - } - if (!m_glContext->create()) { - qWarning("failed to create QOpenGLContext"); - return nullptr; - } - - // CARenderer must be re-created with different current context, so release it now. - // See lines below where m_videoLayerRenderer is constructed. - if (m_videoLayerRenderer) { - [m_videoLayerRenderer release]; - m_videoLayerRenderer = nullptr; - } - - if (m_useCoreProfile) { - glDeleteVertexArrays(1, &m_quadVao); - glDeleteBuffers(2, m_quadVbos); - delete m_shader; - m_shader = nullptr; - } + return nullptr; } - //Need current context - if (m_glContext) - m_glContext->makeCurrent(m_offscreenSurface); + AVPlayerItem *item = layer.player.currentItem; + if (![item.outputs containsObject:m_videoOutput]) + [item addOutput:m_videoOutput]; - if (!m_metalDevice) - m_metalDevice = MTLCreateSystemDefaultDevice(); + CFTimeInterval currentCAFrameTime = CACurrentMediaTime(); + CMTime currentCMFrameTime = [m_videoOutput itemTimeForHostTime:currentCAFrameTime]; - if (@available(macOS 10.13, *)) { - m_useCoreProfile = m_metalDevice && (QOpenGLContext::currentContext()->format().profile() == - QSurfaceFormat::CoreProfile); - } else { - m_useCoreProfile = false; - } - - // Create the CARenderer if needed for no Core OpenGL - if (!m_videoLayerRenderer) { - if (!m_useCoreProfile) { - m_videoLayerRenderer = [CARenderer rendererWithCGLContext: CGLGetCurrentContext() - options: nil]; - [m_videoLayerRenderer retain]; - } else if (@available(macOS 10.13, *)) { - // This is always true when m_useCoreProfile is true, but the compiler wants the check - // anyway - // Setup Core OpenGL shader, VAO, VBOs and metal renderer - m_shader = new QOpenGLShaderProgram(); - m_shader->create(); - if (!m_shader->addShaderFromSourceCode(QOpenGLShader::Vertex, R"(#version 150 core - in vec2 qt_VertexPosition; - in vec2 qt_VertexTexCoord; - out vec2 qt_TexCoord; - void main() - { - qt_TexCoord = qt_VertexTexCoord; - gl_Position = vec4(qt_VertexPosition, 0.0f, 1.0f); - })")) { - qCritical() << "Vertex shader compilation failed" << m_shader->log(); - } - if (!m_shader->addShaderFromSourceCode(QOpenGLShader::Fragment, R"(#version 150 core - in vec2 qt_TexCoord; - out vec4 fragColor; - uniform sampler2DRect videoFrame; - void main(void) - { - ivec2 textureDim = textureSize(videoFrame); - fragColor = texture(videoFrame, qt_TexCoord * textureDim); - })")) { - qCritical() << "Fragment shader compilation failed" << m_shader->log(); - } - - // Setup quad where the video frame will be attached - GLfloat vertices[] = { - -1.0f, -1.0f, - 1.0f, -1.0f, - -1.0f, 1.0f, - 1.0f, 1.0f, - }; - - GLfloat uvs[] = { - 0.0f, 0.0f, - 1.0f, 0.0f, - 0.0f, 1.0f, - 1.0f, 1.0f, - }; - - glGenVertexArrays(1, &m_quadVao); - glBindVertexArray(m_quadVao); - - // Create vertex buffer objects for vertices - glGenBuffers(2, m_quadVbos); - - // Setup vertices - glBindBuffer(GL_ARRAY_BUFFER, m_quadVbos[0]); - glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW); - glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(float), nullptr); - glEnableVertexAttribArray(0); - - // Setup uvs - glBindBuffer(GL_ARRAY_BUFFER, m_quadVbos[1]); - glBufferData(GL_ARRAY_BUFFER, sizeof(uvs), uvs, GL_STATIC_DRAW); - glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(float), nullptr); - glEnableVertexAttribArray(1); - - glBindVertexArray(0); - - // Setup shared Metal/OpenGL pixel buffer and textures - m_NSGLContext = static_cast((QOpenGLContext::currentContext()->nativeHandle().data()))->context(); - m_CGLPixelFormat = m_NSGLContext.pixelFormat.CGLPixelFormatObj; - - NSDictionary* cvBufferProperties = @{ - static_cast(kCVPixelBufferOpenGLCompatibilityKey) : @YES, - static_cast(kCVPixelBufferMetalCompatibilityKey): @YES, - }; - - CVPixelBufferCreate(kCFAllocatorDefault, static_cast(m_targetSize.width()), - static_cast(m_targetSize.height()), kCVPixelFormatType_32BGRA, - static_cast(cvBufferProperties), &m_CVPixelBuffer); - - m_textureName = createGLTexture(reinterpret_cast(m_NSGLContext.CGLContextObj), - m_CGLPixelFormat, m_CVGLTextureCache, m_CVPixelBuffer, - m_CVGLTexture); - m_metalTexture = createMetalTexture(m_metalDevice, m_CVMTLTextureCache, m_CVPixelBuffer, - MTLPixelFormatBGRA8Unorm, - static_cast(m_targetSize.width()), - static_cast(m_targetSize.height()), - m_CVMTLTexture); - - m_videoLayerRenderer = [CARenderer rendererWithMTLTexture:m_metalTexture options:nil]; - [m_videoLayerRenderer retain]; - } - } + // Happens when buffering / loading + if (CMTimeCompare(currentCMFrameTime, kCMTimeZero) < 0) + return nullptr; - //Set/Change render source if needed - if (m_videoLayerRenderer.layer != layer) { - m_videoLayerRenderer.layer = layer; - m_videoLayerRenderer.bounds = layer.bounds; - } + if (![m_videoOutput hasNewPixelBufferForItemTime:currentCMFrameTime]) + return nullptr; - //Do we have FBO's already? - if ((!m_fbo[0] && !m_fbo[0]) || (m_fbo[0]->size() != m_targetSize)) { - delete m_fbo[0]; - delete m_fbo[1]; - m_fbo[0] = new QOpenGLFramebufferObject(m_targetSize); - m_fbo[1] = new QOpenGLFramebufferObject(m_targetSize); + CVPixelBufferRef pixelBuffer = [m_videoOutput copyPixelBufferForItemTime:currentCMFrameTime + itemTimeForDisplay:nil]; + if (!pixelBuffer) { +#ifdef QT_DEBUG_AVF + qWarning("copyPixelBufferForItemTime returned nil"); + CMTimeShow(currentCMFrameTime); +#endif + return nullptr; } - //Switch buffer target - m_currentBuffer = !m_currentBuffer; - return m_fbo[m_currentBuffer]; + width = CVPixelBufferGetWidth(pixelBuffer); + height = CVPixelBufferGetHeight(pixelBuffer); + return pixelBuffer; } -void AVFVideoFrameRenderer::renderLayerToFBO(AVPlayerLayer *layer, QOpenGLFramebufferObject *fbo) +CVOGLTextureRef AVFVideoFrameRenderer::createCacheTextureFromLayer(AVPlayerLayer *layer, + size_t& width, size_t& height) { - //Start Rendering - //NOTE: This rendering method will NOT work on iOS as there is no CARenderer in iOS - if (!fbo->bind()) { - qWarning("AVFVideoRender FBO failed to bind"); - return; + CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height); + + if (!pixelBuffer) + return nullptr; + + CVOGLTextureCacheFlush(m_textureCache, 0); + + CVOGLTextureRef texture = nullptr; +#ifdef Q_OS_MACOS + CVReturn err = CVOpenGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, + m_textureCache, + pixelBuffer, + nil, + &texture); +#else + CVReturn err = CVOGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_textureCache, pixelBuffer, nullptr, + GL_TEXTURE_2D, GL_RGBA, + (GLsizei) width, (GLsizei) height, + GL_BGRA, GL_UNSIGNED_BYTE, 0, + &texture); +#endif + + if (!texture || err) { + qWarning() << "CVOGLTextureCacheCreateTextureFromImage failed error:" << err << m_textureCache; } - glClearColor(0.0f, 0.0f, 0.0f, 1.0f); - glClear(GL_COLOR_BUFFER_BIT); + CVPixelBufferRelease(pixelBuffer); - glViewport(0, 0, m_targetSize.width(), m_targetSize.height()); + return texture; +} - if (m_useCoreProfile) { - CGLLockContext(m_NSGLContext.CGLContextObj); - m_shader->bind(); - glBindVertexArray(m_quadVao); - } else { - glMatrixMode(GL_PROJECTION); - glPushMatrix(); - glLoadIdentity(); +QImage AVFVideoFrameRenderer::renderLayerToImage(AVPlayerLayer *layer, QSize *size) +{ + size_t width = 0; + size_t height = 0; + CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height); + if (size) + *size = QSize(width, height); - // Render to FBO with inverted Y - glOrtho(0.0, m_targetSize.width(), 0.0, m_targetSize.height(), 0.0, 1.0); + if (!pixelBuffer) + return QImage(); - glMatrixMode(GL_MODELVIEW); - glPushMatrix(); - glLoadIdentity(); + OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer); + if (pixelFormat != kCVPixelFormatType_32BGRA) { +#ifdef QT_DEBUG_AVF + qWarning("CVPixelBuffer format is not BGRA32 (got: %d)", static_cast(pixelFormat)); +#endif + return QImage(); } - [m_videoLayerRenderer beginFrameAtTime:CACurrentMediaTime() timeStamp:nullptr]; - [m_videoLayerRenderer addUpdateRect:layer.bounds]; - [m_videoLayerRenderer render]; - [m_videoLayerRenderer endFrame]; - - if (m_useCoreProfile) { - glActiveTexture(0); - glBindTexture(GL_TEXTURE_RECTANGLE, m_textureName); + CVPixelBufferLockBaseAddress(pixelBuffer, 0); + char *data = (char *)CVPixelBufferGetBaseAddress(pixelBuffer); + size_t stride = CVPixelBufferGetBytesPerRow(pixelBuffer); - glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + // format here is not relevant, only using for storage + QImage img = QImage(width, height, QImage::Format_ARGB32); + for (size_t j = 0; j < height; j++) { + memcpy(img.scanLine(j), data, width * 4); + data += stride; + } - glBindTexture(GL_TEXTURE_RECTANGLE, 0); + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); + CVPixelBufferRelease(pixelBuffer); + return img; +} - glBindVertexArray(0); +void AVFVideoFrameRenderer::initRenderer() +{ + // even for using a texture directly, we need to be able to make a context current, + // so we need an offscreen, and we shouldn't assume we can make the surface context + // current on that offscreen, so use our own (sharing with it). Slightly + // excessive but no performance penalty and makes the QImage path easier to maintain - m_shader->release(); + //Make sure we have an OpenGL context to make current + if (!m_glContext) { + //Create OpenGL context and set share context from surface + QOpenGLContext *shareContext = nullptr; + if (m_surface) + shareContext = qobject_cast(m_surface->property("GLContext").value()); - CGLFlushDrawable(m_NSGLContext.CGLContextObj); - CGLUnlockContext(m_NSGLContext.CGLContextObj); - } else { - glMatrixMode(GL_MODELVIEW); - glPopMatrix(); - glMatrixMode(GL_PROJECTION); - glPopMatrix(); + m_glContext = new QOpenGLContext(); + if (shareContext) { + m_glContext->setShareContext(shareContext); + m_isContextShared = true; + } else { +#ifdef QT_DEBUG_AVF + qWarning("failed to get Render Thread context"); +#endif + m_isContextShared = false; + } + if (!m_glContext->create()) { +#ifdef QT_DEBUG_AVF + qWarning("failed to create QOpenGLContext"); +#endif + return; + } } - glFinish(); //Rendering needs to be done before passing texture to video frame - - fbo->release(); -} + if (!m_offscreenSurface) { + m_offscreenSurface = new QOffscreenSurface(); + m_offscreenSurface->setFormat(m_glContext->format()); + m_offscreenSurface->create(); + } -GLuint AVFVideoFrameRenderer::createGLTexture(CGLContextObj cglContextObj, CGLPixelFormatObj cglPixelFormtObj, CVOpenGLTextureCacheRef cvglTextureCache, - CVPixelBufferRef cvPixelBufferRef, CVOpenGLTextureRef cvOpenGLTextureRef) -{ - CVReturn cvret; - // Create an OpenGL CoreVideo texture cache from the pixel buffer. - cvret = CVOpenGLTextureCacheCreate( - kCFAllocatorDefault, - nil, - cglContextObj, - cglPixelFormtObj, - nil, - &cvglTextureCache); - - // Create a CVPixelBuffer-backed OpenGL texture image from the texture cache. - cvret = CVOpenGLTextureCacheCreateTextureFromImage( - kCFAllocatorDefault, - cvglTextureCache, - cvPixelBufferRef, - nil, - &cvOpenGLTextureRef); - - // Get an OpenGL texture name from the CVPixelBuffer-backed OpenGL texture image. - return CVOpenGLTextureGetName(cvOpenGLTextureRef); -} + // Need current context + m_glContext->makeCurrent(m_offscreenSurface); + + if (!m_textureCache) { +#ifdef Q_OS_MACOS + auto *currentContext = NSOpenGLContext.currentContext; + // Create an OpenGL CoreVideo texture cache from the pixel buffer. + auto err = CVOpenGLTextureCacheCreate( + kCFAllocatorDefault, + nullptr, + currentContext.CGLContextObj, + currentContext.pixelFormat.CGLPixelFormatObj, + nil, + &m_textureCache); +#else + CVReturn err = CVOGLTextureCacheCreate(kCFAllocatorDefault, nullptr, + [EAGLContext currentContext], + nullptr, &m_textureCache); +#endif + if (err) + qWarning("Error at CVOGLTextureCacheCreate %d", err); + } -id AVFVideoFrameRenderer::createMetalTexture(id mtlDevice, CVMetalTextureCacheRef cvMetalTextureCacheRef, CVPixelBufferRef cvPixelBufferRef, - MTLPixelFormat pixelFormat, size_t width, size_t height, CVMetalTextureRef cvMetalTextureRef) -{ - CVReturn cvret; - // Create a Metal Core Video texture cache from the pixel buffer. - cvret = CVMetalTextureCacheCreate( - kCFAllocatorDefault, - nil, - mtlDevice, - nil, - &cvMetalTextureCacheRef); - - // Create a CoreVideo pixel buffer backed Metal texture image from the texture cache. - cvret = CVMetalTextureCacheCreateTextureFromImage( - kCFAllocatorDefault, - cvMetalTextureCacheRef, - cvPixelBufferRef, nil, - pixelFormat, - width, height, - 0, - &cvMetalTextureRef); - - // Get a Metal texture using the CoreVideo Metal texture reference. - return CVMetalTextureGetTexture(cvMetalTextureRef); } diff --git a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h deleted file mode 100644 index 6ad676ad7..000000000 --- a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h +++ /dev/null @@ -1,113 +0,0 @@ -/**************************************************************************** -** -** Copyright (C) 2021 The Qt Company Ltd. -** Contact: https://www.qt.io/licensing/ -** -** This file is part of the Qt Toolkit. -** -** $QT_BEGIN_LICENSE:COMM$ -** -** Commercial License Usage -** Licensees holding valid commercial Qt licenses may use this file in -** accordance with the commercial license agreement provided with the -** Software or, alternatively, in accordance with the terms contained in -** a written agreement between you and The Qt Company. For licensing terms -** and conditions see https://www.qt.io/terms-conditions. For further -** information use the contact form at https://www.qt.io/contact-us. -** -** $QT_END_LICENSE$ -** -** -** -** -** -** -** -** -** -** -** -** -** -** -** -** -** -** -** -****************************************************************************/ - -#ifndef AVFVIDEOFRAMERENDERER_H -#define AVFVIDEOFRAMERENDERER_H - -#include -#include -#include -#include - -@class AVPlayerLayer; -@class AVPlayerItemVideoOutput; - -QT_BEGIN_NAMESPACE - -class QOpenGLContext; -class QOpenGLFramebufferObject; -class QOpenGLShaderProgram; -class QOffscreenSurface; -class QAbstractVideoSurface; - -typedef struct __CVBuffer *CVBufferRef; -typedef CVBufferRef CVImageBufferRef; -typedef CVImageBufferRef CVPixelBufferRef; -#if defined(Q_OS_IOS) || defined(Q_OS_TVOS) -typedef struct __CVOpenGLESTextureCache *CVOpenGLESTextureCacheRef; -typedef CVImageBufferRef CVOpenGLESTextureRef; -// helpers to avoid boring if def -typedef CVOpenGLESTextureCacheRef CVOGLTextureCacheRef; -typedef CVOpenGLESTextureRef CVOGLTextureRef; -#define CVOGLTextureGetTarget CVOpenGLESTextureGetTarget -#define CVOGLTextureGetName CVOpenGLESTextureGetName -#define CVOGLTextureCacheCreate CVOpenGLESTextureCacheCreate -#define CVOGLTextureCacheCreateTextureFromImage CVOpenGLESTextureCacheCreateTextureFromImage -#define CVOGLTextureCacheFlush CVOpenGLESTextureCacheFlush -#else -typedef struct __CVOpenGLTextureCache *CVOpenGLTextureCacheRef; -typedef CVImageBufferRef CVOpenGLTextureRef; -// helpers to avoid boring if def -typedef CVOpenGLTextureCacheRef CVOGLTextureCacheRef; -typedef CVOpenGLTextureRef CVOGLTextureRef; -#define CVOGLTextureGetTarget CVOpenGLTextureGetTarget -#define CVOGLTextureGetName CVOpenGLTextureGetName -#define CVOGLTextureCacheCreate CVOpenGLTextureCacheCreate -#define CVOGLTextureCacheCreateTextureFromImage CVOpenGLTextureCacheCreateTextureFromImage -#define CVOGLTextureCacheFlush CVOpenGLTextureCacheFlush -#endif - -class AVFVideoFrameRenderer : public QObject -{ -public: - AVFVideoFrameRenderer(QAbstractVideoSurface *surface, QObject *parent = nullptr); - - virtual ~AVFVideoFrameRenderer(); - - void setPlayerLayer(AVPlayerLayer *layer); - - CVOGLTextureRef renderLayerToTexture(AVPlayerLayer *layer); - QImage renderLayerToImage(AVPlayerLayer *layer); - -private: - void initRenderer(); - CVPixelBufferRef copyPixelBufferFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height); - CVOGLTextureRef createCacheTextureFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height); - - QOpenGLContext *m_glContext; - QOffscreenSurface *m_offscreenSurface; - QAbstractVideoSurface *m_surface; - CVOGLTextureCacheRef m_textureCache; - AVPlayerItemVideoOutput* m_videoOutput; - bool m_isContextShared; -}; - -QT_END_NAMESPACE - -#endif // AVFVIDEOFRAMERENDERER_H diff --git a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.mm b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.mm deleted file mode 100644 index 078898aa7..000000000 --- a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.mm +++ /dev/null @@ -1,261 +0,0 @@ -/**************************************************************************** -** -** Copyright (C) 2021 The Qt Company Ltd and/or its subsidiary(-ies). -** Contact: https://www.qt.io/licensing/ -** -** This file is part of the Qt Toolkit. -** -** $QT_BEGIN_LICENSE:COMM$ -** -** Commercial License Usage -** Licensees holding valid commercial Qt licenses may use this file in -** accordance with the commercial license agreement provided with the -** Software or, alternatively, in accordance with the terms contained in -** a written agreement between you and The Qt Company. For licensing terms -** and conditions see https://www.qt.io/terms-conditions. For further -** information use the contact form at https://www.qt.io/contact-us. -** -** $QT_END_LICENSE$ -** -** -** -** -** -** -** -** -** -** -** -** -** -** -** -** -** -** -** -****************************************************************************/ - -#include "avfvideoframerenderer_ios.h" - -#include -#include -#include -#include - -#ifdef QT_DEBUG_AVF -#include -#endif - -#import -#import -QT_USE_NAMESPACE - -AVFVideoFrameRenderer::AVFVideoFrameRenderer(QAbstractVideoSurface *surface, QObject *parent) - : QObject(parent) - , m_glContext(nullptr) - , m_offscreenSurface(nullptr) - , m_surface(surface) - , m_textureCache(nullptr) - , m_videoOutput(nullptr) - , m_isContextShared(true) -{ -} - -AVFVideoFrameRenderer::~AVFVideoFrameRenderer() -{ -#ifdef QT_DEBUG_AVF - qDebug() << Q_FUNC_INFO; -#endif - - [m_videoOutput release]; // sending to nil is fine - if (m_textureCache) - CFRelease(m_textureCache); - delete m_offscreenSurface; - delete m_glContext; -} - -void AVFVideoFrameRenderer::setPlayerLayer(AVPlayerLayer *layer) -{ - Q_UNUSED(layer) - if (m_videoOutput) { - [m_videoOutput release]; - m_videoOutput = nullptr; - // will be re-created in first call to copyPixelBufferFromLayer - } -} - -CVOGLTextureRef AVFVideoFrameRenderer::renderLayerToTexture(AVPlayerLayer *layer) -{ - initRenderer(); - - // If the glContext isn't shared, it doesn't make sense to return a texture for us - if (!m_isContextShared) - return nullptr; - - size_t dummyWidth = 0, dummyHeight = 0; - return createCacheTextureFromLayer(layer, dummyWidth, dummyHeight); -} - -static NSString* const AVF_PIXEL_FORMAT_KEY = (NSString*)kCVPixelBufferPixelFormatTypeKey; -static NSNumber* const AVF_PIXEL_FORMAT_VALUE = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; -static NSDictionary* const AVF_OUTPUT_SETTINGS = [NSDictionary dictionaryWithObject:AVF_PIXEL_FORMAT_VALUE forKey:AVF_PIXEL_FORMAT_KEY]; - - -CVPixelBufferRef AVFVideoFrameRenderer::copyPixelBufferFromLayer(AVPlayerLayer *layer, - size_t& width, size_t& height) -{ - //Is layer valid - if (!layer) { -#ifdef QT_DEBUG_AVF - qWarning("copyPixelBufferFromLayer: invalid layer"); -#endif - return nullptr; - } - - if (!m_videoOutput) { - m_videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:AVF_OUTPUT_SETTINGS]; - [m_videoOutput setDelegate:nil queue:nil]; - AVPlayerItem * item = [[layer player] currentItem]; - [item addOutput:m_videoOutput]; - } - - CFTimeInterval currentCAFrameTime = CACurrentMediaTime(); - CMTime currentCMFrameTime = [m_videoOutput itemTimeForHostTime:currentCAFrameTime]; - // happens when buffering / loading - if (CMTimeCompare(currentCMFrameTime, kCMTimeZero) < 0) { - return nullptr; - } - - CVPixelBufferRef pixelBuffer = [m_videoOutput copyPixelBufferForItemTime:currentCMFrameTime - itemTimeForDisplay:nil]; - if (!pixelBuffer) { -#ifdef QT_DEBUG_AVF - qWarning("copyPixelBufferForItemTime returned nil"); - CMTimeShow(currentCMFrameTime); -#endif - return nullptr; - } - - width = CVPixelBufferGetWidth(pixelBuffer); - height = CVPixelBufferGetHeight(pixelBuffer); - return pixelBuffer; -} - -CVOGLTextureRef AVFVideoFrameRenderer::createCacheTextureFromLayer(AVPlayerLayer *layer, - size_t& width, size_t& height) -{ - CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height); - - if (!pixelBuffer) - return nullptr; - - CVOGLTextureCacheFlush(m_textureCache, 0); - - CVOGLTextureRef texture = nullptr; - CVReturn err = CVOGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_textureCache, pixelBuffer, nullptr, - GL_TEXTURE_2D, GL_RGBA, - (GLsizei) width, (GLsizei) height, - GL_BGRA, GL_UNSIGNED_BYTE, 0, - &texture); - - if (!texture || err) { -#ifdef QT_DEBUG_AVF - qWarning("CVOGLTextureCacheCreateTextureFromImage failed (error: %d)", err); -#endif - } - - CVPixelBufferRelease(pixelBuffer); - - return texture; -} - -QImage AVFVideoFrameRenderer::renderLayerToImage(AVPlayerLayer *layer) -{ - size_t width = 0; - size_t height = 0; - CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height); - - if (!pixelBuffer) - return QImage(); - - OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer); - if (pixelFormat != kCVPixelFormatType_32BGRA) { -#ifdef QT_DEBUG_AVF - qWarning("CVPixelBuffer format is not BGRA32 (got: %d)", static_cast(pixelFormat)); -#endif - return QImage(); - } - - CVPixelBufferLockBaseAddress(pixelBuffer, 0); - char *data = (char *)CVPixelBufferGetBaseAddress(pixelBuffer); - size_t stride = CVPixelBufferGetBytesPerRow(pixelBuffer); - - // format here is not relevant, only using for storage - QImage img = QImage(width, height, QImage::Format_ARGB32); - for (size_t j = 0; j < height; j++) { - memcpy(img.scanLine(j), data, width * 4); - data += stride; - } - - CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); - CVPixelBufferRelease(pixelBuffer); - return img; -} - -void AVFVideoFrameRenderer::initRenderer() -{ - // even for using a texture directly, we need to be able to make a context current, - // so we need an offscreen, and we shouldn't assume we can make the surface context - // current on that offscreen, so use our own (sharing with it). Slightly - // excessive but no performance penalty and makes the QImage path easier to maintain - - //Make sure we have an OpenGL context to make current - if (!m_glContext) { - //Create OpenGL context and set share context from surface - QOpenGLContext *shareContext = nullptr; - if (m_surface) { - shareContext = qobject_cast(m_surface->property("GLContext").value()); - } - - m_glContext = new QOpenGLContext(); - if (shareContext) { - m_glContext->setShareContext(shareContext); - m_isContextShared = true; - } else { -#ifdef QT_DEBUG_AVF - qWarning("failed to get Render Thread context"); -#endif - m_isContextShared = false; - } - if (!m_glContext->create()) { -#ifdef QT_DEBUG_AVF - qWarning("failed to create QOpenGLContext"); -#endif - return; - } - } - - if (!m_offscreenSurface) { - m_offscreenSurface = new QOffscreenSurface(); - m_offscreenSurface->setFormat(m_glContext->format()); - m_offscreenSurface->create(); - } - - //Need current context - m_glContext->makeCurrent(m_offscreenSurface); - - if (!m_textureCache) { - // Create a new open gl texture cache - CVReturn err = CVOGLTextureCacheCreate(kCFAllocatorDefault, nullptr, - [EAGLContext currentContext], - nullptr, &m_textureCache); - if (err) { - #ifdef QT_DEBUG_AVF - qWarning("Error at CVOGLTextureCacheCreate %d", err); - #endif - } - } - -} diff --git a/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h b/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h index db675cd96..fa76fb33c 100644 --- a/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h +++ b/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h @@ -41,6 +41,8 @@ #define AVFVIDEORENDERERCONTROL_H #include +#include + #include #include @@ -82,8 +84,7 @@ private: AVFVideoFrameRenderer *m_frameRenderer; AVFDisplayLink *m_displayLink; - QSize m_nativeSize; - bool m_enableOpenGL; + QAbstractVideoBuffer::HandleType m_surfaceType = QAbstractVideoBuffer::NoHandle; }; QT_END_NAMESPACE diff --git a/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm b/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm index 2e68b36f9..b267c1f25 100644 --- a/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm +++ b/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm @@ -40,11 +40,7 @@ #include "avfvideorenderercontrol.h" #include "avfdisplaylink.h" -#if defined(Q_OS_IOS) || defined(Q_OS_TVOS) -#include "avfvideoframerenderer_ios.h" -#else #include "avfvideoframerenderer.h" -#endif #include #include @@ -58,69 +54,52 @@ QT_USE_NAMESPACE -#if defined(Q_OS_IOS) || defined(Q_OS_TVOS) -class TextureCacheVideoBuffer : public QAbstractVideoBuffer +class TextureVideoBuffer : public QAbstractVideoBuffer { public: - TextureCacheVideoBuffer(CVOGLTextureRef texture) - : QAbstractVideoBuffer(GLTextureHandle) + TextureVideoBuffer(GLuint texture, QAbstractVideoBuffer::HandleType type) + : QAbstractVideoBuffer(type) , m_texture(texture) {} - virtual ~TextureCacheVideoBuffer() - { - // absolutely critical that we drop this - // reference of textures will stay in the cache - CFRelease(m_texture); - } - MapMode mapMode() const { return NotMapped; } uchar *map(MapMode, int*, int*) { return nullptr; } void unmap() {} QVariant handle() const { - GLuint texId = CVOGLTextureGetName(m_texture); - return QVariant::fromValue(texId); + return QVariant::fromValue(m_texture); } private: - CVOGLTextureRef m_texture; + GLuint m_texture; }; -#else -class TextureVideoBuffer : public QAbstractVideoBuffer + +class CoreVideoTextureVideoBuffer : public TextureVideoBuffer { public: - TextureVideoBuffer(GLuint tex) - : QAbstractVideoBuffer(GLTextureHandle) - , m_texture(tex) + CoreVideoTextureVideoBuffer(CVOGLTextureRef texture, QAbstractVideoBuffer::HandleType type) + : TextureVideoBuffer(CVOGLTextureGetName(texture), type) + , m_coreVideoTexture(texture) {} - virtual ~TextureVideoBuffer() + virtual ~CoreVideoTextureVideoBuffer() { - } - - MapMode mapMode() const { return NotMapped; } - uchar *map(MapMode, int*, int*) { return 0; } - void unmap() {} - - QVariant handle() const - { - return QVariant::fromValue(m_texture); + // absolutely critical that we drop this + // reference of textures will stay in the cache + CFRelease(m_coreVideoTexture); } private: - GLuint m_texture; + CVOGLTextureRef m_coreVideoTexture; }; -#endif + AVFVideoRendererControl::AVFVideoRendererControl(QObject *parent) : QVideoRendererControl(parent) , m_surface(nullptr) , m_playerLayer(nullptr) , m_frameRenderer(nullptr) - , m_enableOpenGL(false) - { m_displayLink = new AVFDisplayLink(this); connect(m_displayLink, SIGNAL(tick(CVTimeStamp)), SLOT(updateVideoFrame(CVTimeStamp))); @@ -170,18 +149,26 @@ void AVFVideoRendererControl::setSurface(QAbstractVideoSurface *surface) //Surface changed, so we need a new frame renderer m_frameRenderer = new AVFVideoFrameRenderer(m_surface, this); -#if defined(Q_OS_IOS) || defined(Q_OS_TVOS) - if (m_playerLayer) { - m_frameRenderer->setPlayerLayer(static_cast(m_playerLayer)); - } -#endif - //Check for needed formats to render as OpenGL Texture - auto handleGlEnabled = [this] { - m_enableOpenGL = m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).contains(QVideoFrame::Format_BGR32); + auto updateSurfaceType = [this] { + auto preferredOpenGLSurfaceTypes = { +#ifdef Q_OS_MACOS + QAbstractVideoBuffer::GLTextureRectangleHandle, // GL_TEXTURE_RECTANGLE +#endif + QAbstractVideoBuffer::GLTextureHandle // GL_TEXTURE_2D + }; + + for (auto surfaceType : preferredOpenGLSurfaceTypes) { + auto supportedFormats = m_surface->supportedPixelFormats(surfaceType); + if (supportedFormats.contains(QVideoFrame::Format_BGR32)) { + m_surfaceType = surfaceType; + return; + } + m_surfaceType = QAbstractVideoBuffer::NoHandle; // QImage + } }; - handleGlEnabled(); - connect(m_surface, &QAbstractVideoSurface::supportedFormatsChanged, this, handleGlEnabled); + updateSurfaceType(); + connect(m_surface, &QAbstractVideoSurface::supportedFormatsChanged, this, updateSurfaceType); //If we already have a layer, but changed surfaces start rendering again if (m_playerLayer && !m_displayLink->isActive()) { @@ -204,12 +191,6 @@ void AVFVideoRendererControl::setLayer(void *playerLayer) if (m_surface && m_surface->isActive()) m_surface->stop(); -#if defined(Q_OS_IOS) || defined(Q_OS_TVOS) - if (m_frameRenderer) { - m_frameRenderer->setPlayerLayer(static_cast(playerLayer)); - } -#endif - //If there is no layer to render, stop scheduling updates if (m_playerLayer == nullptr) { m_displayLink->stop(); @@ -238,36 +219,39 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts) if (!playerLayer.readyForDisplay) return; - if (m_enableOpenGL) { -#if defined(Q_OS_IOS) || defined(Q_OS_TVOS) - CVOGLTextureRef tex = m_frameRenderer->renderLayerToTexture(playerLayer); - - //Make sure we got a valid texture - if (tex == nullptr) - return; - - QAbstractVideoBuffer *buffer = new TextureCacheVideoBuffer(tex); + if (m_surfaceType == QAbstractVideoBuffer::GLTextureHandle + || m_surfaceType == QAbstractVideoBuffer::GLTextureRectangleHandle) { + QSize size; + QAbstractVideoBuffer *buffer = nullptr; + +#ifdef Q_OS_MACOS + if (m_surfaceType == QAbstractVideoBuffer::GLTextureRectangleHandle) { + // Render to GL_TEXTURE_RECTANGLE directly + if (CVOGLTextureRef tex = m_frameRenderer->renderLayerToTexture(playerLayer, &size)) + buffer = new CoreVideoTextureVideoBuffer(tex, m_surfaceType); + } else { + // Render to GL_TEXTURE_2D via FBO + if (GLuint tex = m_frameRenderer->renderLayerToFBO(playerLayer, &size)) + buffer = new TextureVideoBuffer(tex, m_surfaceType); + } #else - GLuint tex = m_frameRenderer->renderLayerToTexture(playerLayer); - //Make sure we got a valid texture - if (tex == 0) + Q_ASSERT(m_surfaceType != QAbstractVideoBuffer::GLTextureRectangleHandle); + // Render to GL_TEXTURE_2D directly + if (CVOGLTextureRef tex = m_frameRenderer->renderLayerToTexture(playerLayer, &size)) + buffer = new CoreVideoTextureVideoBuffer(tex, m_surfaceType); +#endif + if (!buffer) return; - QAbstractVideoBuffer *buffer = new TextureVideoBuffer(tex); -#endif - QVideoFrame frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_BGR32); + QVideoFrame frame = QVideoFrame(buffer, size, QVideoFrame::Format_BGR32); if (m_surface && frame.isValid()) { if (m_surface->isActive() && m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat()) m_surface->stop(); if (!m_surface->isActive()) { - QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), QAbstractVideoBuffer::GLTextureHandle); -#if defined(Q_OS_IOS) || defined(Q_OS_TVOS) + QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), m_surfaceType); format.setScanLineDirection(QVideoSurfaceFormat::TopToBottom); -#else - format.setScanLineDirection(QVideoSurfaceFormat::BottomToTop); -#endif if (!m_surface->start(format)) { //Surface doesn't support GLTextureHandle qWarning("Failed to activate video surface"); @@ -279,20 +263,21 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts) } } else { //fallback to rendering frames to QImages - QImage frameData = m_frameRenderer->renderLayerToImage(playerLayer); + QSize size; + QImage frameData = m_frameRenderer->renderLayerToImage(playerLayer, &size); if (frameData.isNull()) { return; } QAbstractVideoBuffer *buffer = new QImageVideoBuffer(frameData); - QVideoFrame frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_ARGB32); + QVideoFrame frame = QVideoFrame(buffer, size, QVideoFrame::Format_ARGB32); if (m_surface && frame.isValid()) { if (m_surface->isActive() && m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat()) m_surface->stop(); if (!m_surface->isActive()) { - QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), QAbstractVideoBuffer::NoHandle); + QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), m_surfaceType); if (!m_surface->start(format)) { qWarning("Failed to activate video surface"); @@ -308,7 +293,4 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts) void AVFVideoRendererControl::setupVideoOutput() { - AVPlayerLayer *playerLayer = static_cast(m_playerLayer); - if (playerLayer) - m_nativeSize = QSize(playerLayer.bounds.size.width, playerLayer.bounds.size.height); } diff --git a/src/plugins/avfoundation/mediaplayer/mediaplayer.pro b/src/plugins/avfoundation/mediaplayer/mediaplayer.pro index 174220f37..f71e0c3b3 100644 --- a/src/plugins/avfoundation/mediaplayer/mediaplayer.pro +++ b/src/plugins/avfoundation/mediaplayer/mediaplayer.pro @@ -39,34 +39,23 @@ OBJECTIVE_SOURCES += \ avfvideowidget.mm } -ios|tvos { - qtConfig(opengl) { - HEADERS += \ - avfvideoframerenderer_ios.h \ - avfvideorenderercontrol.h \ - avfdisplaylink.h +qtConfig(opengl) { + HEADERS += \ + avfvideoframerenderer.h \ + avfvideorenderercontrol.h \ + avfdisplaylink.h - OBJECTIVE_SOURCES += \ - avfvideoframerenderer_ios.mm \ - avfvideorenderercontrol.mm \ - avfdisplaylink.mm - } + OBJECTIVE_SOURCES += \ + avfvideoframerenderer.mm \ + avfvideorenderercontrol.mm \ + avfdisplaylink.mm +} + +ios|tvos { LIBS += -framework Foundation } else { INCLUDEPATH += $$[QT_INSTALL_HEADERS] LIBS += -framework AppKit -framework Metal - - qtConfig(opengl) { - HEADERS += \ - avfvideoframerenderer.h \ - avfvideorenderercontrol.h \ - avfdisplaylink.h - - OBJECTIVE_SOURCES += \ - avfvideoframerenderer.mm \ - avfvideorenderercontrol.mm \ - avfdisplaylink.mm - } } OTHER_FILES += \ -- cgit v1.2.3 From 8ac4ea8582eed62332e3861240415c7d4fa6b140 Mon Sep 17 00:00:00 2001 From: Doris Verria Date: Sun, 5 Dec 2021 01:11:19 +0100 Subject: Remove the AVPlayerItemVideoOutput from player item when changing media MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When updating the player item of the AVPlayer, make sure to remove the video output (AVPlayerItemVideoOutput) from it. It will be added again to the updated player item on the next call to copyPixelBufferFromLayer. This fixes issues where the last frame of the previous video source was sometimes flashed before rendering the new one. Fixes: QTBUG-87000 Change-Id: Iec66f0e27efe621d1992a2a0f9f5060aa51f7076 Reviewed-by: Tor Arne Vestbø --- src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'src/plugins') diff --git a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm index ae2234764..9928c665b 100644 --- a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm +++ b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm @@ -168,6 +168,10 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemTimeJumpedNotification object:m_playerItem]; + for (AVPlayerItemOutput *output in m_playerItem.outputs) { + if ([output isKindOfClass:[AVPlayerItemVideoOutput class]]) + [m_playerItem removeOutput:output]; + } m_playerItem = 0; } if (m_player) { -- cgit v1.2.3