summaryrefslogtreecommitdiffstats
path: root/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm
diff options
context:
space:
mode:
authorYoann Lopes <yoann.lopes@theqtcompany.com>2015-05-29 18:02:26 +0200
committerYoann Lopes <yoann.lopes@theqtcompany.com>2015-09-17 13:10:28 +0000
commit46a83d5b86a792e62f14674e27c5d95695f2b44d (patch)
treedde4a796d0e988db7578df5d172456159f7dd7bb /src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm
parent99f6cf5f282c0d3d7192d5e87b745873e159c93d (diff)
AVFoundation: render camera frames using OpenGL textures on iOS.
OpenGL textures can easily be created from a CVImageBuffer using Apple APIs. It avoids having to map the buffer to main memory and therefore greatly improves rendering performances. We could do the same on OSX, but there, the textures are always of the GL_TEXTURE_RECTANGLE target type and changes need to be done to the QVideoFrame API and to the video node implementations to support that. Change-Id: I6dde7e8d7a27460e41523cd474c3c741affc1480 Reviewed-by: James Turner <james.turner@kdab.com> Reviewed-by: Christian Stromme <christian.stromme@theqtcompany.com>
Diffstat (limited to 'src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm')
-rw-r--r--src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm31
1 files changed, 19 insertions, 12 deletions
diff --git a/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm b/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm
index b32c0cd63..05f28898d 100644
--- a/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm
+++ b/src/plugins/avfoundation/camera/avfcameraviewfindersettingscontrol.mm
@@ -573,22 +573,29 @@ void AVFCameraViewfinderSettingsControl2::applySettings()
if (!convertPixelFormatIfSupported(m_settings.pixelFormat(), avfPixelFormat)) {
// If the the pixel format is not specified or invalid, pick the preferred video surface
// format, or if no surface is set, the preferred capture device format
- const QVector<QVideoFrame::PixelFormat> deviceFormats = viewfinderPixelFormats();
- QList<QVideoFrame::PixelFormat> surfaceFormats;
- if (m_service->videoOutput() && m_service->videoOutput()->surface())
- surfaceFormats = m_service->videoOutput()->surface()->supportedPixelFormats();
-
- QVideoFrame::PixelFormat format = deviceFormats.first();
- for (int i = 0; i < surfaceFormats.count(); ++i) {
- const QVideoFrame::PixelFormat surfaceFormat = surfaceFormats.at(i);
- if (deviceFormats.contains(surfaceFormat)) {
- format = surfaceFormat;
- break;
+ const QVector<QVideoFrame::PixelFormat> deviceFormats = viewfinderPixelFormats();
+ QVideoFrame::PixelFormat pickedFormat = deviceFormats.first();
+
+ QAbstractVideoSurface *surface = m_service->videoOutput() ? m_service->videoOutput()->surface()
+ : 0;
+ if (surface) {
+ if (m_service->videoOutput()->supportsTextures()) {
+ pickedFormat = QVideoFrame::Format_ARGB32;
+ } else {
+ QList<QVideoFrame::PixelFormat> surfaceFormats = m_service->videoOutput()->surface()->supportedPixelFormats();
+
+ for (int i = 0; i < surfaceFormats.count(); ++i) {
+ const QVideoFrame::PixelFormat surfaceFormat = surfaceFormats.at(i);
+ if (deviceFormats.contains(surfaceFormat)) {
+ pickedFormat = surfaceFormat;
+ break;
+ }
+ }
}
}
- CVPixelFormatFromQtFormat(format, avfPixelFormat);
+ CVPixelFormatFromQtFormat(pickedFormat, avfPixelFormat);
}
if (avfPixelFormat != 0) {