From 9fb8dd4e76700da8d321a8677e44e771a0213a7e Mon Sep 17 00:00:00 2001 From: Ihor Dutchak Date: Sat, 16 Jun 2018 14:30:32 +0300 Subject: DirectShow: use supported pixel formats for QCamera video surface [ChangeLog][DirectShow] Do not convert QCamera video frames to RGB32 if provided QAbstractVideoSurface supports frame pixel format, requested by QCameraViewfinderSettings. Task-number: QTBUG-68768 Change-Id: I78d0aaf6c22c4bfee2b17f78a3709d7ba9a8b4fa Reviewed-by: VaL Doroshchuk --- src/plugins/directshow/camera/dscamerasession.cpp | 53 ++++++++++++----------- 1 file changed, 28 insertions(+), 25 deletions(-) (limited to 'src/plugins/directshow/camera/dscamerasession.cpp') diff --git a/src/plugins/directshow/camera/dscamerasession.cpp b/src/plugins/directshow/camera/dscamerasession.cpp index 43cc9dbd1..73c97c824 100644 --- a/src/plugins/directshow/camera/dscamerasession.cpp +++ b/src/plugins/directshow/camera/dscamerasession.cpp @@ -44,6 +44,7 @@ #include #include #include +#include #include "dscamerasession.h" #include "dsvideorenderer.h" @@ -67,6 +68,7 @@ DSCameraSession::DSCameraSession(QObject *parent) , m_previewStarted(false) , m_surface(nullptr) , m_previewPixelFormat(QVideoFrame::Format_Invalid) + , m_stride(-1) , m_readyForCapture(false) , m_imageIdCounter(0) , m_currentImageId(-1) @@ -613,12 +615,9 @@ void DSCameraSession::onFrameAvailable(double time, const QByteArray &data) // the device might be potentially unplugged. m_deviceLostEventTimer.start(100); - // (We should be getting only RGB32 data) - int stride = m_previewSize.width() * 4; - // In case the source produces frames faster than we can display them, // only keep the most recent one - m_currentFrame = QVideoFrame(new QMemoryVideoBuffer(data, stride), + m_currentFrame = QVideoFrame(new QMemoryVideoBuffer(data, m_stride), m_previewSize, m_previewPixelFormat); @@ -657,17 +656,11 @@ void DSCameraSession::presentFrame() m_captureMutex.lock(); if (m_capturedFrame.isValid()) { - Q_ASSERT(m_previewPixelFormat == QVideoFrame::Format_RGB32); - - m_capturedFrame.map(QAbstractVideoBuffer::ReadOnly); - captureImage = QImage(m_capturedFrame.bits(), - m_previewSize.width(), m_previewSize.height(), - QImage::Format_RGB32); + captureImage = qt_imageFromVideoFrame(m_capturedFrame); - captureImage = captureImage.mirrored(m_needsHorizontalMirroring); // also causes a deep copy of the data - - m_capturedFrame.unmap(); + const bool needsVerticalMirroring = m_previewSurfaceFormat.scanLineDirection() != QVideoSurfaceFormat::TopToBottom; + captureImage = captureImage.mirrored(m_needsHorizontalMirroring, needsVerticalMirroring); // also causes a deep copy of the data QtConcurrent::run(this, &DSCameraSession::processCapturedImage, m_currentImageId, m_captureDestinations, captureImage, m_imageCaptureFileName); @@ -878,22 +871,34 @@ bool DSCameraSession::configurePreviewFormat() VIDEOINFOHEADER *videoInfo = reinterpret_cast(m_sourceFormat->pbFormat); videoInfo->AvgTimePerFrame = 10000000 / resolvedViewfinderSettings.maximumFrameRate(); - // We only support RGB32, if the capture source doesn't support - // that format, the graph builder will automatically insert a - // converter. + m_previewPixelFormat = resolvedViewfinderSettings.pixelFormat(); + const AM_MEDIA_TYPE *resolvedGrabberFormat = &m_sourceFormat; - if (m_surface && !m_surface->supportedPixelFormats(QAbstractVideoBuffer::NoHandle) - .contains(QVideoFrame::Format_RGB32)) { - qWarning() << "Video surface needs to support RGB32 pixel format"; - return false; + if (m_surface) { + const auto surfaceFormats = m_surface->supportedPixelFormats(QAbstractVideoBuffer::NoHandle); + if (!surfaceFormats.contains(m_previewPixelFormat)) { + if (surfaceFormats.contains(QVideoFrame::Format_RGB32)) { + // As a fallback, we support RGB32, if the capture source doesn't support + // that format, the graph builder will automatically insert a + // converter (when possible). + + static const AM_MEDIA_TYPE rgb32GrabberFormat { MEDIATYPE_Video, MEDIASUBTYPE_ARGB32, 0, 0, 0, FORMAT_VideoInfo, nullptr, 0, nullptr}; + resolvedGrabberFormat = &rgb32GrabberFormat; + m_previewPixelFormat = QVideoFrame::Format_RGB32; + + } else { + qWarning() << "Video surface needs to support at least RGB32 pixel format"; + return false; + } + } } - m_previewPixelFormat = QVideoFrame::Format_RGB32; m_previewSize = resolvedViewfinderSettings.resolution(); m_previewSurfaceFormat = QVideoSurfaceFormat(m_previewSize, m_previewPixelFormat, QAbstractVideoBuffer::NoHandle); - m_previewSurfaceFormat.setScanLineDirection(QVideoSurfaceFormat::BottomToTop); + m_previewSurfaceFormat.setScanLineDirection(DirectShowMediaType::scanLineDirection(m_previewPixelFormat, videoInfo->bmiHeader)); + m_stride = DirectShowMediaType::bytesPerLine(m_previewSurfaceFormat); HRESULT hr; IAMStreamConfig* pConfig = 0; @@ -914,9 +919,7 @@ bool DSCameraSession::configurePreviewFormat() return false; } - // Set sample grabber format - static const AM_MEDIA_TYPE grabberFormat { MEDIATYPE_Video, MEDIASUBTYPE_ARGB32, 0, 0, 0, FORMAT_VideoInfo, nullptr, 0, nullptr}; - if (!m_previewSampleGrabber->setMediaType(&grabberFormat)) + if (!m_previewSampleGrabber->setMediaType(resolvedGrabberFormat)) return false; m_previewSampleGrabber->start(DirectShowSampleGrabber::CallbackMethod::BufferCB); -- cgit v1.2.3