summaryrefslogtreecommitdiffstats
path: root/src/plugins/avfoundation
diff options
context:
space:
mode:
Diffstat (limited to 'src/plugins/avfoundation')
-rw-r--r--src/plugins/avfoundation/camera/avfcameraflashcontrol.mm26
-rw-r--r--src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm3
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm25
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h67
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm330
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h113
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.mm261
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h5
-rw-r--r--src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm140
-rw-r--r--src/plugins/avfoundation/mediaplayer/mediaplayer.pro38
10 files changed, 377 insertions, 631 deletions
diff --git a/src/plugins/avfoundation/camera/avfcameraflashcontrol.mm b/src/plugins/avfoundation/camera/avfcameraflashcontrol.mm
index 42303ce17..1e27dc472 100644
--- a/src/plugins/avfoundation/camera/avfcameraflashcontrol.mm
+++ b/src/plugins/avfoundation/camera/avfcameraflashcontrol.mm
@@ -171,6 +171,20 @@ bool AVFCameraFlashControl::applyFlashSettings()
return false;
}
+ auto setAvTorchModeSafe = [&captureDevice](AVCaptureTorchMode avTorchMode) {
+ if ([captureDevice isTorchModeSupported:avTorchMode])
+ captureDevice.torchMode = avTorchMode;
+ else
+ qDebugCamera() << Q_FUNC_INFO << "Attempt to setup unsupported torch mode " << avTorchMode;
+ };
+
+ auto setAvFlashModeSafe = [&captureDevice](AVCaptureFlashMode avFlashMode) {
+ if ([captureDevice isFlashModeSupported:avFlashMode])
+ captureDevice.flashMode = avFlashMode;
+ else
+ qDebugCamera() << Q_FUNC_INFO << "Attempt to setup unsupported flash mode " << avFlashMode;
+ };
+
if (!isFlashModeSupported(m_flashMode)) {
qDebugCamera() << Q_FUNC_INFO << "unsupported mode" << m_flashMode;
return false;
@@ -192,7 +206,7 @@ bool AVFCameraFlashControl::applyFlashSettings()
return false;
}
#endif
- captureDevice.torchMode = AVCaptureTorchModeOff;
+ setAvTorchModeSafe(AVCaptureTorchModeOff);
}
#ifdef Q_OS_IOS
if (![captureDevice isFlashAvailable]) {
@@ -209,7 +223,7 @@ bool AVFCameraFlashControl::applyFlashSettings()
return false;
}
#endif
- captureDevice.flashMode = AVCaptureFlashModeOff;
+ setAvFlashModeSafe(AVCaptureFlashModeOff);
}
#ifdef Q_OS_IOS
@@ -221,13 +235,13 @@ bool AVFCameraFlashControl::applyFlashSettings()
}
if (m_flashMode == QCameraExposure::FlashOff)
- captureDevice.flashMode = AVCaptureFlashModeOff;
+ setAvFlashModeSafe(AVCaptureFlashModeOff);
else if (m_flashMode == QCameraExposure::FlashOn)
- captureDevice.flashMode = AVCaptureFlashModeOn;
+ setAvFlashModeSafe(AVCaptureFlashModeOn);
else if (m_flashMode == QCameraExposure::FlashAuto)
- captureDevice.flashMode = AVCaptureFlashModeAuto;
+ setAvFlashModeSafe(AVCaptureFlashModeAuto);
else if (m_flashMode == QCameraExposure::FlashVideoLight)
- captureDevice.torchMode = AVCaptureTorchModeOn;
+ setAvTorchModeSafe(AVCaptureTorchModeOn);
return true;
}
diff --git a/src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm b/src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm
index 9e1bf3f84..7bf9de071 100644
--- a/src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm
+++ b/src/plugins/avfoundation/camera/avfcamerarenderercontrol.mm
@@ -278,7 +278,8 @@ AVFCameraRendererControl::AVFCameraRendererControl(QObject *parent)
AVFCameraRendererControl::~AVFCameraRendererControl()
{
- [m_cameraSession->captureSession() removeOutput:m_videoDataOutput];
+ if ([m_cameraSession->captureSession().outputs containsObject:m_videoDataOutput])
+ [m_cameraSession->captureSession() removeOutput:m_videoDataOutput];
[m_viewfinderFramesDelegate release];
if (m_delegateQueue)
dispatch_release(m_delegateQueue);
diff --git a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
index d5554c51c..a601bb5ac 100644
--- a/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
+++ b/src/plugins/avfoundation/mediaplayer/avfmediaplayersession.mm
@@ -110,6 +110,12 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext
self->m_session = session;
self->m_bufferIsLikelyToKeepUp = FALSE;
+
+ m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:nil];
+ [m_playerLayer retain];
+ m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
+ m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
+
return self;
}
@@ -162,6 +168,10 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVPlayerItemTimeJumpedNotification
object:m_playerItem];
+ for (AVPlayerItemOutput *output in m_playerItem.outputs) {
+ if ([output isKindOfClass:[AVPlayerItemVideoOutput class]])
+ [m_playerItem removeOutput:output];
+ }
m_playerItem = 0;
}
if (m_player) {
@@ -172,10 +182,6 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext
[m_player release];
m_player = 0;
}
- if (m_playerLayer) {
- [m_playerLayer release];
- m_playerLayer = 0;
- }
}
- (void) prepareToPlayAsset:(AVURLAsset *)asset
@@ -260,14 +266,8 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext
[m_player setMuted:m_session->isMuted()];
}
- //Create a new player layer if we don't have one already
- if (!m_playerLayer)
- {
- m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:m_player];
- [m_playerLayer retain];
- m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
- m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
- }
+ //Assign the output layer to the new player
+ m_playerLayer.player = m_player;
//Observe the AVPlayer "currentItem" property to find out when any
//AVPlayer replaceCurrentItemWithPlayerItem: replacement will/did
@@ -413,6 +413,7 @@ static void *AVFMediaPlayerSessionObserverCurrentItemDurationObservationContext
}
[m_mimeType release];
+ [m_playerLayer release];
[super dealloc];
}
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h
index 99b6bb0b5..ac67090a5 100644
--- a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h
+++ b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.h
@@ -43,18 +43,48 @@
#include <QtCore/QObject>
#include <QtGui/QImage>
#include <QtGui/QOpenGLContext>
+#include <QtGui/QOpenGLTextureBlitter>
#include <QtCore/QSize>
-@class CARenderer;
@class AVPlayerLayer;
+@class AVPlayerItemVideoOutput;
QT_BEGIN_NAMESPACE
-class QOpenGLFramebufferObject;
-class QWindow;
class QOpenGLContext;
+class QOpenGLFramebufferObject;
+class QOpenGLShaderProgram;
+class QOffscreenSurface;
class QAbstractVideoSurface;
+typedef struct __CVBuffer *CVBufferRef;
+typedef CVBufferRef CVImageBufferRef;
+typedef CVImageBufferRef CVPixelBufferRef;
+
+#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ typedef struct __CVOpenGLESTextureCache *CVOpenGLESTextureCacheRef;
+ typedef CVImageBufferRef CVOpenGLESTextureRef;
+ // helpers to avoid boring if def
+ typedef CVOpenGLESTextureCacheRef CVOGLTextureCacheRef;
+ typedef CVOpenGLESTextureRef CVOGLTextureRef;
+ #define CVOGLTextureGetTarget CVOpenGLESTextureGetTarget
+ #define CVOGLTextureGetName CVOpenGLESTextureGetName
+ #define CVOGLTextureCacheCreate CVOpenGLESTextureCacheCreate
+ #define CVOGLTextureCacheCreateTextureFromImage CVOpenGLESTextureCacheCreateTextureFromImage
+ #define CVOGLTextureCacheFlush CVOpenGLESTextureCacheFlush
+#else
+ typedef struct __CVOpenGLTextureCache *CVOpenGLTextureCacheRef;
+ typedef CVImageBufferRef CVOpenGLTextureRef;
+ // helpers to avoid boring if def
+ typedef CVOpenGLTextureCacheRef CVOGLTextureCacheRef;
+ typedef CVOpenGLTextureRef CVOGLTextureRef;
+ #define CVOGLTextureGetTarget CVOpenGLTextureGetTarget
+ #define CVOGLTextureGetName CVOpenGLTextureGetName
+ #define CVOGLTextureCacheCreate CVOpenGLTextureCacheCreate
+ #define CVOGLTextureCacheCreateTextureFromImage CVOpenGLTextureCacheCreateTextureFromImage
+ #define CVOGLTextureCacheFlush CVOpenGLTextureCacheFlush
+#endif
+
class AVFVideoFrameRenderer : public QObject
{
public:
@@ -62,22 +92,31 @@ public:
virtual ~AVFVideoFrameRenderer();
- GLuint renderLayerToTexture(AVPlayerLayer *layer);
- QImage renderLayerToImage(AVPlayerLayer *layer);
+ void setPlayerLayer(AVPlayerLayer *layer);
+
+ CVOGLTextureRef renderLayerToTexture(AVPlayerLayer *layer, QSize *size);
+#ifdef Q_OS_MACOS
+ GLuint renderLayerToFBO(AVPlayerLayer *layer, QSize *size);
+#endif
+ QImage renderLayerToImage(AVPlayerLayer *layer, QSize *size);
private:
- QOpenGLFramebufferObject* initRenderer(AVPlayerLayer *layer);
- void renderLayerToFBO(AVPlayerLayer *layer, QOpenGLFramebufferObject *fbo);
+ void initRenderer();
+ CVPixelBufferRef copyPixelBufferFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height);
+ CVOGLTextureRef createCacheTextureFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height);
- CARenderer *m_videoLayerRenderer;
- QAbstractVideoSurface *m_surface;
- QOpenGLFramebufferObject *m_fbo[2];
- QWindow *m_offscreenSurface;
QOpenGLContext *m_glContext;
- QSize m_targetSize;
-
- uint m_currentBuffer;
+ QOffscreenSurface *m_offscreenSurface;
+ QAbstractVideoSurface *m_surface;
+ CVOGLTextureCacheRef m_textureCache;
+ AVPlayerItemVideoOutput* m_videoOutput;
bool m_isContextShared;
+
+#ifdef Q_OS_MACOS
+ QOpenGLFramebufferObject *m_fbo[2];
+ uint m_currentFBO;
+ QOpenGLTextureBlitter m_blitter;
+#endif
};
QT_END_NAMESPACE
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm
index 51f961729..a22ee2b82 100644
--- a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm
+++ b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm
@@ -41,12 +41,20 @@
#include <QtMultimedia/qabstractvideosurface.h>
#include <QtGui/QOpenGLFramebufferObject>
-#include <QtGui/QWindow>
+#include <QtGui/QOpenGLShaderProgram>
+#include <QtGui/QOffscreenSurface>
+
+#include <QtCore/private/qcore_mac_p.h>
#ifdef QT_DEBUG_AVF
#include <QtCore/qdebug.h>
#endif
+#ifdef Q_OS_MACOS
+#import <AppKit/AppKit.h>
+#include <CoreVideo/CVOpenGLTextureCache.h>
+#endif
+
#import <CoreVideo/CVBase.h>
#import <AVFoundation/AVFoundation.h>
@@ -54,15 +62,23 @@ QT_USE_NAMESPACE
AVFVideoFrameRenderer::AVFVideoFrameRenderer(QAbstractVideoSurface *surface, QObject *parent)
: QObject(parent)
- , m_videoLayerRenderer(nullptr)
- , m_surface(surface)
- , m_offscreenSurface(nullptr)
, m_glContext(nullptr)
- , m_currentBuffer(1)
+ , m_offscreenSurface(nullptr)
+ , m_surface(surface)
+ , m_textureCache(nullptr)
+ , m_videoOutput(nullptr)
, m_isContextShared(true)
{
+ m_videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:@{
+ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
+ (NSString *)kCVPixelBufferOpenGLCompatibilityKey: @YES
+ }];
+ [m_videoOutput setDelegate:nil queue:nil];
+
+#ifdef Q_OS_MACOS
m_fbo[0] = nullptr;
m_fbo[1] = nullptr;
+#endif
}
AVFVideoFrameRenderer::~AVFVideoFrameRenderer()
@@ -71,81 +87,200 @@ AVFVideoFrameRenderer::~AVFVideoFrameRenderer()
qDebug() << Q_FUNC_INFO;
#endif
- [m_videoLayerRenderer release];
- delete m_fbo[0];
- delete m_fbo[1];
+ [m_videoOutput release];
+ if (m_textureCache)
+ CFRelease(m_textureCache);
delete m_offscreenSurface;
delete m_glContext;
+
+#ifdef Q_OS_MACOS
+ delete m_fbo[0];
+ delete m_fbo[1];
+#endif
}
-GLuint AVFVideoFrameRenderer::renderLayerToTexture(AVPlayerLayer *layer)
+#ifdef Q_OS_MACOS
+GLuint AVFVideoFrameRenderer::renderLayerToFBO(AVPlayerLayer *layer, QSize *size)
{
- //Is layer valid
- if (!layer)
+ QCFType<CVOGLTextureRef> texture = renderLayerToTexture(layer, size);
+ if (!texture)
return 0;
- //If the glContext isn't shared, it doesn't make sense to return a texture for us
- if (m_offscreenSurface && !m_isContextShared)
- return 0;
+ Q_ASSERT(size);
- QOpenGLFramebufferObject *fbo = initRenderer(layer);
+ // Do we have FBO's already?
+ if ((!m_fbo[0] && !m_fbo[0]) || (m_fbo[0]->size() != *size)) {
+ delete m_fbo[0];
+ delete m_fbo[1];
+ m_fbo[0] = new QOpenGLFramebufferObject(*size);
+ m_fbo[1] = new QOpenGLFramebufferObject(*size);
+ }
+
+ // Switch buffer target
+ m_currentFBO = !m_currentFBO;
+ QOpenGLFramebufferObject *fbo = m_fbo[m_currentFBO];
- if (!fbo)
+ if (!fbo || !fbo->bind())
return 0;
- renderLayerToFBO(layer, fbo);
- if (m_glContext)
- m_glContext->doneCurrent();
+ glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+ glClear(GL_COLOR_BUFFER_BIT);
+
+ glViewport(0, 0, size->width(), size->height());
+
+ if (!m_blitter.isCreated())
+ m_blitter.create();
+
+ m_blitter.bind(GL_TEXTURE_RECTANGLE);
+ m_blitter.blit(CVOpenGLTextureGetName(texture), QMatrix4x4(), QMatrix3x3());
+ m_blitter.release();
+ glFinish();
+
+ fbo->release();
return fbo->texture();
}
+#endif
+
+CVOGLTextureRef AVFVideoFrameRenderer::renderLayerToTexture(AVPlayerLayer *layer, QSize *size)
+{
+ initRenderer();
+
+ // If the glContext isn't shared, it doesn't make sense to return a texture for us
+ if (!m_isContextShared)
+ return nullptr;
+
+ size_t width = 0, height = 0;
+ auto texture = createCacheTextureFromLayer(layer, width, height);
+ if (size)
+ *size = QSize(width, height);
+ return texture;
+}
-QImage AVFVideoFrameRenderer::renderLayerToImage(AVPlayerLayer *layer)
+CVPixelBufferRef AVFVideoFrameRenderer::copyPixelBufferFromLayer(AVPlayerLayer *layer,
+ size_t& width, size_t& height)
{
//Is layer valid
if (!layer) {
- return QImage();
+#ifdef QT_DEBUG_AVF
+ qWarning("copyPixelBufferFromLayer: invalid layer");
+#endif
+ return nullptr;
}
- QOpenGLFramebufferObject *fbo = initRenderer(layer);
+ AVPlayerItem *item = layer.player.currentItem;
+ if (![item.outputs containsObject:m_videoOutput])
+ [item addOutput:m_videoOutput];
- if (!fbo)
- return QImage();
+ CFTimeInterval currentCAFrameTime = CACurrentMediaTime();
+ CMTime currentCMFrameTime = [m_videoOutput itemTimeForHostTime:currentCAFrameTime];
+
+ // Happens when buffering / loading
+ if (CMTimeCompare(currentCMFrameTime, kCMTimeZero) < 0)
+ return nullptr;
+
+ if (![m_videoOutput hasNewPixelBufferForItemTime:currentCMFrameTime])
+ return nullptr;
+
+ CVPixelBufferRef pixelBuffer = [m_videoOutput copyPixelBufferForItemTime:currentCMFrameTime
+ itemTimeForDisplay:nil];
+ if (!pixelBuffer) {
+#ifdef QT_DEBUG_AVF
+ qWarning("copyPixelBufferForItemTime returned nil");
+ CMTimeShow(currentCMFrameTime);
+#endif
+ return nullptr;
+ }
+
+ width = CVPixelBufferGetWidth(pixelBuffer);
+ height = CVPixelBufferGetHeight(pixelBuffer);
+ return pixelBuffer;
+}
- renderLayerToFBO(layer, fbo);
- QImage fboImage = fbo->toImage();
- if (m_glContext)
- m_glContext->doneCurrent();
+CVOGLTextureRef AVFVideoFrameRenderer::createCacheTextureFromLayer(AVPlayerLayer *layer,
+ size_t& width, size_t& height)
+{
+ CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height);
+
+ if (!pixelBuffer)
+ return nullptr;
+
+ CVOGLTextureCacheFlush(m_textureCache, 0);
+
+ CVOGLTextureRef texture = nullptr;
+#ifdef Q_OS_MACOS
+ CVReturn err = CVOpenGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
+ m_textureCache,
+ pixelBuffer,
+ nil,
+ &texture);
+#else
+ CVReturn err = CVOGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_textureCache, pixelBuffer, nullptr,
+ GL_TEXTURE_2D, GL_RGBA,
+ (GLsizei) width, (GLsizei) height,
+ GL_BGRA, GL_UNSIGNED_BYTE, 0,
+ &texture);
+#endif
+
+ if (!texture || err) {
+ qWarning() << "CVOGLTextureCacheCreateTextureFromImage failed error:" << err << m_textureCache;
+ }
+
+ CVPixelBufferRelease(pixelBuffer);
- return fboImage;
+ return texture;
}
-QOpenGLFramebufferObject *AVFVideoFrameRenderer::initRenderer(AVPlayerLayer *layer)
+QImage AVFVideoFrameRenderer::renderLayerToImage(AVPlayerLayer *layer, QSize *size)
{
+ size_t width = 0;
+ size_t height = 0;
+ CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height);
+ if (size)
+ *size = QSize(width, height);
+
+ if (!pixelBuffer)
+ return QImage();
- //Get size from AVPlayerLayer
- m_targetSize = QSize(layer.bounds.size.width, layer.bounds.size.height);
+ OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
+ if (pixelFormat != kCVPixelFormatType_32BGRA) {
+#ifdef QT_DEBUG_AVF
+ qWarning("CVPixelBuffer format is not BGRA32 (got: %d)", static_cast<quint32>(pixelFormat));
+#endif
+ return QImage();
+ }
- QOpenGLContext *shareContext = !m_glContext && m_surface
- ? qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>())
- : nullptr;
+ CVPixelBufferLockBaseAddress(pixelBuffer, 0);
+ char *data = (char *)CVPixelBufferGetBaseAddress(pixelBuffer);
+ size_t stride = CVPixelBufferGetBytesPerRow(pixelBuffer);
+
+ // format here is not relevant, only using for storage
+ QImage img = QImage(width, height, QImage::Format_ARGB32);
+ for (size_t j = 0; j < height; j++) {
+ memcpy(img.scanLine(j), data, width * 4);
+ data += stride;
+ }
+
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
+ CVPixelBufferRelease(pixelBuffer);
+ return img;
+}
+
+void AVFVideoFrameRenderer::initRenderer()
+{
+ // even for using a texture directly, we need to be able to make a context current,
+ // so we need an offscreen, and we shouldn't assume we can make the surface context
+ // current on that offscreen, so use our own (sharing with it). Slightly
+ // excessive but no performance penalty and makes the QImage path easier to maintain
//Make sure we have an OpenGL context to make current
- if ((shareContext && shareContext != QOpenGLContext::currentContext())
- || (!QOpenGLContext::currentContext() && !m_glContext)) {
-
- //Create Hidden QWindow surface to create context in this thread
- delete m_offscreenSurface;
- m_offscreenSurface = new QWindow();
- m_offscreenSurface->setSurfaceType(QWindow::OpenGLSurface);
- //Needs geometry to be a valid surface, but size is not important
- m_offscreenSurface->setGeometry(0, 0, 1, 1);
- m_offscreenSurface->create();
+ if (!m_glContext) {
+ //Create OpenGL context and set share context from surface
+ QOpenGLContext *shareContext = nullptr;
+ if (m_surface)
+ shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
- delete m_glContext;
m_glContext = new QOpenGLContext();
- m_glContext->setFormat(m_offscreenSurface->requestedFormat());
-
if (shareContext) {
m_glContext->setShareContext(shareContext);
m_isContextShared = true;
@@ -156,83 +291,40 @@ QOpenGLFramebufferObject *AVFVideoFrameRenderer::initRenderer(AVPlayerLayer *lay
m_isContextShared = false;
}
if (!m_glContext->create()) {
+#ifdef QT_DEBUG_AVF
qWarning("failed to create QOpenGLContext");
- return nullptr;
- }
-
- // CARenderer must be re-created with different current context, so release it now.
- // See lines below where m_videoLayerRenderer is constructed.
- if (m_videoLayerRenderer) {
- [m_videoLayerRenderer release];
- m_videoLayerRenderer = nullptr;
+#endif
+ return;
}
}
- //Need current context
- if (m_glContext)
- m_glContext->makeCurrent(m_offscreenSurface);
-
- //Create the CARenderer if needed
- if (!m_videoLayerRenderer) {
- m_videoLayerRenderer = [CARenderer rendererWithCGLContext: CGLGetCurrentContext() options: nil];
- [m_videoLayerRenderer retain];
- }
-
- //Set/Change render source if needed
- if (m_videoLayerRenderer.layer != layer) {
- m_videoLayerRenderer.layer = layer;
- m_videoLayerRenderer.bounds = layer.bounds;
- }
-
- //Do we have FBO's already?
- if ((!m_fbo[0] && !m_fbo[0]) || (m_fbo[0]->size() != m_targetSize)) {
- delete m_fbo[0];
- delete m_fbo[1];
- m_fbo[0] = new QOpenGLFramebufferObject(m_targetSize);
- m_fbo[1] = new QOpenGLFramebufferObject(m_targetSize);
+ if (!m_offscreenSurface) {
+ m_offscreenSurface = new QOffscreenSurface();
+ m_offscreenSurface->setFormat(m_glContext->format());
+ m_offscreenSurface->create();
}
- //Switch buffer target
- m_currentBuffer = !m_currentBuffer;
- return m_fbo[m_currentBuffer];
-}
-
-void AVFVideoFrameRenderer::renderLayerToFBO(AVPlayerLayer *layer, QOpenGLFramebufferObject *fbo)
-{
- //Start Rendering
- //NOTE: This rendering method will NOT work on iOS as there is no CARenderer in iOS
- if (!fbo->bind()) {
- qWarning("AVFVideoRender FBO failed to bind");
- return;
+ // Need current context
+ m_glContext->makeCurrent(m_offscreenSurface);
+
+ if (!m_textureCache) {
+#ifdef Q_OS_MACOS
+ auto *currentContext = NSOpenGLContext.currentContext;
+ // Create an OpenGL CoreVideo texture cache from the pixel buffer.
+ auto err = CVOpenGLTextureCacheCreate(
+ kCFAllocatorDefault,
+ nullptr,
+ currentContext.CGLContextObj,
+ currentContext.pixelFormat.CGLPixelFormatObj,
+ nil,
+ &m_textureCache);
+#else
+ CVReturn err = CVOGLTextureCacheCreate(kCFAllocatorDefault, nullptr,
+ [EAGLContext currentContext],
+ nullptr, &m_textureCache);
+#endif
+ if (err)
+ qWarning("Error at CVOGLTextureCacheCreate %d", err);
}
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glViewport(0, 0, m_targetSize.width(), m_targetSize.height());
-
- glMatrixMode(GL_PROJECTION);
- glPushMatrix();
- glLoadIdentity();
-
- //Render to FBO with inverted Y
- glOrtho(0.0, m_targetSize.width(), 0.0, m_targetSize.height(), 0.0, 1.0);
-
- glMatrixMode(GL_MODELVIEW);
- glPushMatrix();
- glLoadIdentity();
-
- [m_videoLayerRenderer beginFrameAtTime:CACurrentMediaTime() timeStamp:NULL];
- [m_videoLayerRenderer addUpdateRect:layer.bounds];
- [m_videoLayerRenderer render];
- [m_videoLayerRenderer endFrame];
-
- glMatrixMode(GL_MODELVIEW);
- glPopMatrix();
- glMatrixMode(GL_PROJECTION);
- glPopMatrix();
-
- glFinish(); //Rendering needs to be done before passing texture to video frame
-
- fbo->release();
}
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h
deleted file mode 100644
index d9f6baa7e..000000000
--- a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.h
+++ /dev/null
@@ -1,113 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2016 The Qt Company Ltd.
-** Contact: https://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see https://www.qt.io/terms-conditions. For further
-** information use the contact form at https://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 3 as published by the Free Software
-** Foundation and appearing in the file LICENSE.LGPL3 included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU Lesser General Public License version 3 requirements
-** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
-**
-** GNU General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU
-** General Public License version 2.0 or (at your option) the GNU General
-** Public license version 3 or any later version approved by the KDE Free
-** Qt Foundation. The licenses are as published by the Free Software
-** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
-** included in the packaging of this file. Please review the following
-** information to ensure the GNU General Public License requirements will
-** be met: https://www.gnu.org/licenses/gpl-2.0.html and
-** https://www.gnu.org/licenses/gpl-3.0.html.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#ifndef AVFVIDEOFRAMERENDERER_H
-#define AVFVIDEOFRAMERENDERER_H
-
-#include <QtCore/QObject>
-#include <QtGui/QImage>
-#include <QtGui/QOpenGLContext>
-#include <QtCore/QSize>
-
-@class AVPlayerLayer;
-@class AVPlayerItemVideoOutput;
-
-QT_BEGIN_NAMESPACE
-
-class QOpenGLContext;
-class QOpenGLFramebufferObject;
-class QOpenGLShaderProgram;
-class QOffscreenSurface;
-class QAbstractVideoSurface;
-
-typedef struct __CVBuffer *CVBufferRef;
-typedef CVBufferRef CVImageBufferRef;
-typedef CVImageBufferRef CVPixelBufferRef;
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
-typedef struct __CVOpenGLESTextureCache *CVOpenGLESTextureCacheRef;
-typedef CVImageBufferRef CVOpenGLESTextureRef;
-// helpers to avoid boring if def
-typedef CVOpenGLESTextureCacheRef CVOGLTextureCacheRef;
-typedef CVOpenGLESTextureRef CVOGLTextureRef;
-#define CVOGLTextureGetTarget CVOpenGLESTextureGetTarget
-#define CVOGLTextureGetName CVOpenGLESTextureGetName
-#define CVOGLTextureCacheCreate CVOpenGLESTextureCacheCreate
-#define CVOGLTextureCacheCreateTextureFromImage CVOpenGLESTextureCacheCreateTextureFromImage
-#define CVOGLTextureCacheFlush CVOpenGLESTextureCacheFlush
-#else
-typedef struct __CVOpenGLTextureCache *CVOpenGLTextureCacheRef;
-typedef CVImageBufferRef CVOpenGLTextureRef;
-// helpers to avoid boring if def
-typedef CVOpenGLTextureCacheRef CVOGLTextureCacheRef;
-typedef CVOpenGLTextureRef CVOGLTextureRef;
-#define CVOGLTextureGetTarget CVOpenGLTextureGetTarget
-#define CVOGLTextureGetName CVOpenGLTextureGetName
-#define CVOGLTextureCacheCreate CVOpenGLTextureCacheCreate
-#define CVOGLTextureCacheCreateTextureFromImage CVOpenGLTextureCacheCreateTextureFromImage
-#define CVOGLTextureCacheFlush CVOpenGLTextureCacheFlush
-#endif
-
-class AVFVideoFrameRenderer : public QObject
-{
-public:
- AVFVideoFrameRenderer(QAbstractVideoSurface *surface, QObject *parent = nullptr);
-
- virtual ~AVFVideoFrameRenderer();
-
- void setPlayerLayer(AVPlayerLayer *layer);
-
- CVOGLTextureRef renderLayerToTexture(AVPlayerLayer *layer);
- QImage renderLayerToImage(AVPlayerLayer *layer);
-
-private:
- void initRenderer();
- CVPixelBufferRef copyPixelBufferFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height);
- CVOGLTextureRef createCacheTextureFromLayer(AVPlayerLayer *layer, size_t& width, size_t& height);
-
- QOpenGLContext *m_glContext;
- QOffscreenSurface *m_offscreenSurface;
- QAbstractVideoSurface *m_surface;
- CVOGLTextureCacheRef m_textureCache;
- AVPlayerItemVideoOutput* m_videoOutput;
- bool m_isContextShared;
-};
-
-QT_END_NAMESPACE
-
-#endif // AVFVIDEOFRAMERENDERER_H
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.mm b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.mm
deleted file mode 100644
index 70e402e6c..000000000
--- a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer_ios.mm
+++ /dev/null
@@ -1,261 +0,0 @@
-/****************************************************************************
-**
-** Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
-** Contact: https://www.qt.io/licensing/
-**
-** This file is part of the Qt Toolkit.
-**
-** $QT_BEGIN_LICENSE:LGPL$
-** Commercial License Usage
-** Licensees holding valid commercial Qt licenses may use this file in
-** accordance with the commercial license agreement provided with the
-** Software or, alternatively, in accordance with the terms contained in
-** a written agreement between you and The Qt Company. For licensing terms
-** and conditions see https://www.qt.io/terms-conditions. For further
-** information use the contact form at https://www.qt.io/contact-us.
-**
-** GNU Lesser General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU Lesser
-** General Public License version 3 as published by the Free Software
-** Foundation and appearing in the file LICENSE.LGPL3 included in the
-** packaging of this file. Please review the following information to
-** ensure the GNU Lesser General Public License version 3 requirements
-** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
-**
-** GNU General Public License Usage
-** Alternatively, this file may be used under the terms of the GNU
-** General Public License version 2.0 or (at your option) the GNU General
-** Public license version 3 or any later version approved by the KDE Free
-** Qt Foundation. The licenses are as published by the Free Software
-** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
-** included in the packaging of this file. Please review the following
-** information to ensure the GNU General Public License requirements will
-** be met: https://www.gnu.org/licenses/gpl-2.0.html and
-** https://www.gnu.org/licenses/gpl-3.0.html.
-**
-** $QT_END_LICENSE$
-**
-****************************************************************************/
-
-#include "avfvideoframerenderer_ios.h"
-
-#include <QtMultimedia/qabstractvideosurface.h>
-#include <QtGui/QOpenGLFramebufferObject>
-#include <QtGui/QOpenGLShaderProgram>
-#include <QtGui/QOffscreenSurface>
-
-#ifdef QT_DEBUG_AVF
-#include <QtCore/qdebug.h>
-#endif
-
-#import <CoreVideo/CVBase.h>
-#import <AVFoundation/AVFoundation.h>
-QT_USE_NAMESPACE
-
-AVFVideoFrameRenderer::AVFVideoFrameRenderer(QAbstractVideoSurface *surface, QObject *parent)
- : QObject(parent)
- , m_glContext(nullptr)
- , m_offscreenSurface(nullptr)
- , m_surface(surface)
- , m_textureCache(nullptr)
- , m_videoOutput(nullptr)
- , m_isContextShared(true)
-{
-}
-
-AVFVideoFrameRenderer::~AVFVideoFrameRenderer()
-{
-#ifdef QT_DEBUG_AVF
- qDebug() << Q_FUNC_INFO;
-#endif
-
- [m_videoOutput release]; // sending to nil is fine
- if (m_textureCache)
- CFRelease(m_textureCache);
- delete m_offscreenSurface;
- delete m_glContext;
-}
-
-void AVFVideoFrameRenderer::setPlayerLayer(AVPlayerLayer *layer)
-{
- Q_UNUSED(layer)
- if (m_videoOutput) {
- [m_videoOutput release];
- m_videoOutput = nullptr;
- // will be re-created in first call to copyPixelBufferFromLayer
- }
-}
-
-CVOGLTextureRef AVFVideoFrameRenderer::renderLayerToTexture(AVPlayerLayer *layer)
-{
- initRenderer();
-
- // If the glContext isn't shared, it doesn't make sense to return a texture for us
- if (!m_isContextShared)
- return nullptr;
-
- size_t dummyWidth = 0, dummyHeight = 0;
- return createCacheTextureFromLayer(layer, dummyWidth, dummyHeight);
-}
-
-static NSString* const AVF_PIXEL_FORMAT_KEY = (NSString*)kCVPixelBufferPixelFormatTypeKey;
-static NSNumber* const AVF_PIXEL_FORMAT_VALUE = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
-static NSDictionary* const AVF_OUTPUT_SETTINGS = [NSDictionary dictionaryWithObject:AVF_PIXEL_FORMAT_VALUE forKey:AVF_PIXEL_FORMAT_KEY];
-
-
-CVPixelBufferRef AVFVideoFrameRenderer::copyPixelBufferFromLayer(AVPlayerLayer *layer,
- size_t& width, size_t& height)
-{
- //Is layer valid
- if (!layer) {
-#ifdef QT_DEBUG_AVF
- qWarning("copyPixelBufferFromLayer: invalid layer");
-#endif
- return nullptr;
- }
-
- if (!m_videoOutput) {
- m_videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:AVF_OUTPUT_SETTINGS];
- [m_videoOutput setDelegate:nil queue:nil];
- AVPlayerItem * item = [[layer player] currentItem];
- [item addOutput:m_videoOutput];
- }
-
- CFTimeInterval currentCAFrameTime = CACurrentMediaTime();
- CMTime currentCMFrameTime = [m_videoOutput itemTimeForHostTime:currentCAFrameTime];
- // happens when buffering / loading
- if (CMTimeCompare(currentCMFrameTime, kCMTimeZero) < 0) {
- return nullptr;
- }
-
- CVPixelBufferRef pixelBuffer = [m_videoOutput copyPixelBufferForItemTime:currentCMFrameTime
- itemTimeForDisplay:nil];
- if (!pixelBuffer) {
-#ifdef QT_DEBUG_AVF
- qWarning("copyPixelBufferForItemTime returned nil");
- CMTimeShow(currentCMFrameTime);
-#endif
- return nullptr;
- }
-
- width = CVPixelBufferGetWidth(pixelBuffer);
- height = CVPixelBufferGetHeight(pixelBuffer);
- return pixelBuffer;
-}
-
-CVOGLTextureRef AVFVideoFrameRenderer::createCacheTextureFromLayer(AVPlayerLayer *layer,
- size_t& width, size_t& height)
-{
- CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height);
-
- if (!pixelBuffer)
- return nullptr;
-
- CVOGLTextureCacheFlush(m_textureCache, 0);
-
- CVOGLTextureRef texture = nullptr;
- CVReturn err = CVOGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_textureCache, pixelBuffer, nullptr,
- GL_TEXTURE_2D, GL_RGBA,
- (GLsizei) width, (GLsizei) height,
- GL_BGRA, GL_UNSIGNED_BYTE, 0,
- &texture);
-
- if (!texture || err) {
-#ifdef QT_DEBUG_AVF
- qWarning("CVOGLTextureCacheCreateTextureFromImage failed (error: %d)", err);
-#endif
- }
-
- CVPixelBufferRelease(pixelBuffer);
-
- return texture;
-}
-
-QImage AVFVideoFrameRenderer::renderLayerToImage(AVPlayerLayer *layer)
-{
- size_t width = 0;
- size_t height = 0;
- CVPixelBufferRef pixelBuffer = copyPixelBufferFromLayer(layer, width, height);
-
- if (!pixelBuffer)
- return QImage();
-
- OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
- if (pixelFormat != kCVPixelFormatType_32BGRA) {
-#ifdef QT_DEBUG_AVF
- qWarning("CVPixelBuffer format is not BGRA32 (got: %d)", static_cast<quint32>(pixelFormat));
-#endif
- return QImage();
- }
-
- CVPixelBufferLockBaseAddress(pixelBuffer, 0);
- char *data = (char *)CVPixelBufferGetBaseAddress(pixelBuffer);
- size_t stride = CVPixelBufferGetBytesPerRow(pixelBuffer);
-
- // format here is not relevant, only using for storage
- QImage img = QImage(width, height, QImage::Format_ARGB32);
- for (size_t j = 0; j < height; j++) {
- memcpy(img.scanLine(j), data, width * 4);
- data += stride;
- }
-
- CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
- CVPixelBufferRelease(pixelBuffer);
- return img;
-}
-
-void AVFVideoFrameRenderer::initRenderer()
-{
- // even for using a texture directly, we need to be able to make a context current,
- // so we need an offscreen, and we shouldn't assume we can make the surface context
- // current on that offscreen, so use our own (sharing with it). Slightly
- // excessive but no performance penalty and makes the QImage path easier to maintain
-
- //Make sure we have an OpenGL context to make current
- if (!m_glContext) {
- //Create OpenGL context and set share context from surface
- QOpenGLContext *shareContext = nullptr;
- if (m_surface) {
- shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
- }
-
- m_glContext = new QOpenGLContext();
- if (shareContext) {
- m_glContext->setShareContext(shareContext);
- m_isContextShared = true;
- } else {
-#ifdef QT_DEBUG_AVF
- qWarning("failed to get Render Thread context");
-#endif
- m_isContextShared = false;
- }
- if (!m_glContext->create()) {
-#ifdef QT_DEBUG_AVF
- qWarning("failed to create QOpenGLContext");
-#endif
- return;
- }
- }
-
- if (!m_offscreenSurface) {
- m_offscreenSurface = new QOffscreenSurface();
- m_offscreenSurface->setFormat(m_glContext->format());
- m_offscreenSurface->create();
- }
-
- //Need current context
- m_glContext->makeCurrent(m_offscreenSurface);
-
- if (!m_textureCache) {
- // Create a new open gl texture cache
- CVReturn err = CVOGLTextureCacheCreate(kCFAllocatorDefault, nullptr,
- [EAGLContext currentContext],
- nullptr, &m_textureCache);
- if (err) {
- #ifdef QT_DEBUG_AVF
- qWarning("Error at CVOGLTextureCacheCreate %d", err);
- #endif
- }
- }
-
-}
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h b/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h
index 85dc19d31..a88573eaa 100644
--- a/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h
+++ b/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.h
@@ -41,6 +41,8 @@
#define AVFVIDEORENDERERCONTROL_H
#include <QtMultimedia/QVideoRendererControl>
+#include <QtMultimedia/qabstractvideobuffer.h>
+
#include <QtCore/QMutex>
#include <QtCore/QSize>
@@ -82,8 +84,7 @@ private:
AVFVideoFrameRenderer *m_frameRenderer;
AVFDisplayLink *m_displayLink;
- QSize m_nativeSize;
- bool m_enableOpenGL;
+ QAbstractVideoBuffer::HandleType m_surfaceType = QAbstractVideoBuffer::NoHandle;
};
QT_END_NAMESPACE
diff --git a/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm b/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm
index 63bdee4f5..f299d5f86 100644
--- a/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm
+++ b/src/plugins/avfoundation/mediaplayer/avfvideorenderercontrol.mm
@@ -40,11 +40,7 @@
#include "avfvideorenderercontrol.h"
#include "avfdisplaylink.h"
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
-#include "avfvideoframerenderer_ios.h"
-#else
#include "avfvideoframerenderer.h"
-#endif
#include <QtMultimedia/qabstractvideobuffer.h>
#include <QtMultimedia/qabstractvideosurface.h>
@@ -58,69 +54,52 @@
QT_USE_NAMESPACE
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
-class TextureCacheVideoBuffer : public QAbstractVideoBuffer
+class TextureVideoBuffer : public QAbstractVideoBuffer
{
public:
- TextureCacheVideoBuffer(CVOGLTextureRef texture)
- : QAbstractVideoBuffer(GLTextureHandle)
+ TextureVideoBuffer(GLuint texture, QAbstractVideoBuffer::HandleType type)
+ : QAbstractVideoBuffer(type)
, m_texture(texture)
{}
- virtual ~TextureCacheVideoBuffer()
- {
- // absolutely critical that we drop this
- // reference of textures will stay in the cache
- CFRelease(m_texture);
- }
-
MapMode mapMode() const { return NotMapped; }
uchar *map(MapMode, int*, int*) { return nullptr; }
void unmap() {}
QVariant handle() const
{
- GLuint texId = CVOGLTextureGetName(m_texture);
- return QVariant::fromValue<unsigned int>(texId);
+ return QVariant::fromValue<unsigned int>(m_texture);
}
private:
- CVOGLTextureRef m_texture;
+ GLuint m_texture;
};
-#else
-class TextureVideoBuffer : public QAbstractVideoBuffer
+
+class CoreVideoTextureVideoBuffer : public TextureVideoBuffer
{
public:
- TextureVideoBuffer(GLuint tex)
- : QAbstractVideoBuffer(GLTextureHandle)
- , m_texture(tex)
+ CoreVideoTextureVideoBuffer(CVOGLTextureRef texture, QAbstractVideoBuffer::HandleType type)
+ : TextureVideoBuffer(CVOGLTextureGetName(texture), type)
+ , m_coreVideoTexture(texture)
{}
- virtual ~TextureVideoBuffer()
+ virtual ~CoreVideoTextureVideoBuffer()
{
- }
-
- MapMode mapMode() const { return NotMapped; }
- uchar *map(MapMode, int*, int*) { return 0; }
- void unmap() {}
-
- QVariant handle() const
- {
- return QVariant::fromValue<unsigned int>(m_texture);
+ // absolutely critical that we drop this
+ // reference of textures will stay in the cache
+ CFRelease(m_coreVideoTexture);
}
private:
- GLuint m_texture;
+ CVOGLTextureRef m_coreVideoTexture;
};
-#endif
+
AVFVideoRendererControl::AVFVideoRendererControl(QObject *parent)
: QVideoRendererControl(parent)
, m_surface(nullptr)
, m_playerLayer(nullptr)
, m_frameRenderer(nullptr)
- , m_enableOpenGL(false)
-
{
m_displayLink = new AVFDisplayLink(this);
connect(m_displayLink, SIGNAL(tick(CVTimeStamp)), SLOT(updateVideoFrame(CVTimeStamp)));
@@ -170,18 +149,26 @@ void AVFVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
//Surface changed, so we need a new frame renderer
m_frameRenderer = new AVFVideoFrameRenderer(m_surface, this);
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
- if (m_playerLayer) {
- m_frameRenderer->setPlayerLayer(static_cast<AVPlayerLayer*>(m_playerLayer));
- }
-#endif
- //Check for needed formats to render as OpenGL Texture
- auto handleGlEnabled = [this] {
- m_enableOpenGL = m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).contains(QVideoFrame::Format_BGR32);
+ auto updateSurfaceType = [this] {
+ auto preferredOpenGLSurfaceTypes = {
+#ifdef Q_OS_MACOS
+ QAbstractVideoBuffer::GLTextureRectangleHandle, // GL_TEXTURE_RECTANGLE
+#endif
+ QAbstractVideoBuffer::GLTextureHandle // GL_TEXTURE_2D
+ };
+
+ for (auto surfaceType : preferredOpenGLSurfaceTypes) {
+ auto supportedFormats = m_surface->supportedPixelFormats(surfaceType);
+ if (supportedFormats.contains(QVideoFrame::Format_BGR32)) {
+ m_surfaceType = surfaceType;
+ return;
+ }
+ m_surfaceType = QAbstractVideoBuffer::NoHandle; // QImage
+ }
};
- handleGlEnabled();
- connect(m_surface, &QAbstractVideoSurface::supportedFormatsChanged, this, handleGlEnabled);
+ updateSurfaceType();
+ connect(m_surface, &QAbstractVideoSurface::supportedFormatsChanged, this, updateSurfaceType);
//If we already have a layer, but changed surfaces start rendering again
if (m_playerLayer && !m_displayLink->isActive()) {
@@ -204,12 +191,6 @@ void AVFVideoRendererControl::setLayer(void *playerLayer)
if (m_surface && m_surface->isActive())
m_surface->stop();
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
- if (m_frameRenderer) {
- m_frameRenderer->setPlayerLayer(static_cast<AVPlayerLayer*>(playerLayer));
- }
-#endif
-
//If there is no layer to render, stop scheduling updates
if (m_playerLayer == nullptr) {
m_displayLink->stop();
@@ -238,36 +219,39 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
if (!playerLayer.readyForDisplay)
return;
- if (m_enableOpenGL) {
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
- CVOGLTextureRef tex = m_frameRenderer->renderLayerToTexture(playerLayer);
-
- //Make sure we got a valid texture
- if (tex == nullptr)
- return;
-
- QAbstractVideoBuffer *buffer = new TextureCacheVideoBuffer(tex);
+ if (m_surfaceType == QAbstractVideoBuffer::GLTextureHandle
+ || m_surfaceType == QAbstractVideoBuffer::GLTextureRectangleHandle) {
+ QSize size;
+ QAbstractVideoBuffer *buffer = nullptr;
+
+#ifdef Q_OS_MACOS
+ if (m_surfaceType == QAbstractVideoBuffer::GLTextureRectangleHandle) {
+ // Render to GL_TEXTURE_RECTANGLE directly
+ if (CVOGLTextureRef tex = m_frameRenderer->renderLayerToTexture(playerLayer, &size))
+ buffer = new CoreVideoTextureVideoBuffer(tex, m_surfaceType);
+ } else {
+ // Render to GL_TEXTURE_2D via FBO
+ if (GLuint tex = m_frameRenderer->renderLayerToFBO(playerLayer, &size))
+ buffer = new TextureVideoBuffer(tex, m_surfaceType);
+ }
#else
- GLuint tex = m_frameRenderer->renderLayerToTexture(playerLayer);
- //Make sure we got a valid texture
- if (tex == 0)
+ Q_ASSERT(m_surfaceType != QAbstractVideoBuffer::GLTextureRectangleHandle);
+ // Render to GL_TEXTURE_2D directly
+ if (CVOGLTextureRef tex = m_frameRenderer->renderLayerToTexture(playerLayer, &size))
+ buffer = new CoreVideoTextureVideoBuffer(tex, m_surfaceType);
+#endif
+ if (!buffer)
return;
- QAbstractVideoBuffer *buffer = new TextureVideoBuffer(tex);
-#endif
- QVideoFrame frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
+ QVideoFrame frame = QVideoFrame(buffer, size, QVideoFrame::Format_BGR32);
if (m_surface && frame.isValid()) {
if (m_surface->isActive() && m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat())
m_surface->stop();
if (!m_surface->isActive()) {
- QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), QAbstractVideoBuffer::GLTextureHandle);
-#if defined(Q_OS_IOS) || defined(Q_OS_TVOS)
+ QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), m_surfaceType);
format.setScanLineDirection(QVideoSurfaceFormat::TopToBottom);
-#else
- format.setScanLineDirection(QVideoSurfaceFormat::BottomToTop);
-#endif
if (!m_surface->start(format)) {
//Surface doesn't support GLTextureHandle
qWarning("Failed to activate video surface");
@@ -279,20 +263,21 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
}
} else {
//fallback to rendering frames to QImages
- QImage frameData = m_frameRenderer->renderLayerToImage(playerLayer);
+ QSize size;
+ QImage frameData = m_frameRenderer->renderLayerToImage(playerLayer, &size);
if (frameData.isNull()) {
return;
}
QAbstractVideoBuffer *buffer = new QImageVideoBuffer(frameData);
- QVideoFrame frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_ARGB32);
+ QVideoFrame frame = QVideoFrame(buffer, size, QVideoFrame::Format_ARGB32);
if (m_surface && frame.isValid()) {
if (m_surface->isActive() && m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat())
m_surface->stop();
if (!m_surface->isActive()) {
- QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), QAbstractVideoBuffer::NoHandle);
+ QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), m_surfaceType);
if (!m_surface->start(format)) {
qWarning("Failed to activate video surface");
@@ -308,7 +293,4 @@ void AVFVideoRendererControl::updateVideoFrame(const CVTimeStamp &ts)
void AVFVideoRendererControl::setupVideoOutput()
{
- AVPlayerLayer *playerLayer = static_cast<AVPlayerLayer*>(m_playerLayer);
- if (playerLayer)
- m_nativeSize = QSize(playerLayer.bounds.size.width, playerLayer.bounds.size.height);
}
diff --git a/src/plugins/avfoundation/mediaplayer/mediaplayer.pro b/src/plugins/avfoundation/mediaplayer/mediaplayer.pro
index b60b276e9..f71e0c3b3 100644
--- a/src/plugins/avfoundation/mediaplayer/mediaplayer.pro
+++ b/src/plugins/avfoundation/mediaplayer/mediaplayer.pro
@@ -39,33 +39,23 @@ OBJECTIVE_SOURCES += \
avfvideowidget.mm
}
-ios|tvos {
- qtConfig(opengl) {
- HEADERS += \
- avfvideoframerenderer_ios.h \
- avfvideorenderercontrol.h \
- avfdisplaylink.h
+qtConfig(opengl) {
+ HEADERS += \
+ avfvideoframerenderer.h \
+ avfvideorenderercontrol.h \
+ avfdisplaylink.h
- OBJECTIVE_SOURCES += \
- avfvideoframerenderer_ios.mm \
- avfvideorenderercontrol.mm \
- avfdisplaylink.mm
- }
+ OBJECTIVE_SOURCES += \
+ avfvideoframerenderer.mm \
+ avfvideorenderercontrol.mm \
+ avfdisplaylink.mm
+}
+
+ios|tvos {
LIBS += -framework Foundation
} else {
- LIBS += -framework AppKit
-
- qtConfig(opengl) {
- HEADERS += \
- avfvideoframerenderer.h \
- avfvideorenderercontrol.h \
- avfdisplaylink.h
-
- OBJECTIVE_SOURCES += \
- avfvideoframerenderer.mm \
- avfvideorenderercontrol.mm \
- avfdisplaylink.mm
- }
+ INCLUDEPATH += $$[QT_INSTALL_HEADERS]
+ LIBS += -framework AppKit -framework Metal
}
OTHER_FILES += \