summaryrefslogtreecommitdiffstats
path: root/src/plugins/android/src/mediacapture/qandroidcamerasession.cpp
diff options
context:
space:
mode:
authorYoann Lopes <yoann.lopes@theqtcompany.com>2015-09-24 16:58:36 +0200
committerYoann Lopes <yoann.lopes@theqtcompany.com>2015-12-04 11:41:25 +0000
commit8debbfbc9b7fe035362afc3838e7cec595efb394 (patch)
tree8005f427792db96e33b8100fa72e27cdf1a3e26e /src/plugins/android/src/mediacapture/qandroidcamerasession.cpp
parentece9005efe62b297ada062411d255200c7ff124e (diff)
Android: support non OpenGL video surfaces for the camera.
QCamera can now pass raw frame data to its QAbstractVideoSurface. The now deprecated Android camera API we're using doesn't allow to get frame data without also displaying the frames in a SurfaceView or a SurfaceTexture. To work around that, an invisible dummy SurfaceView is used. This allows to retrieve frames in the NV21, YV12, YUY2 or RGB565 formats, depending on the Android version and on the device. Task-number: QTBUG-35416 Change-Id: I77b4f50505c3b91efb4b2288a57f50398922c0db Reviewed-by: Christian Stromme <christian.stromme@theqtcompany.com> Reviewed-by: Yoann Lopes <yoann.lopes@theqtcompany.com>
Diffstat (limited to 'src/plugins/android/src/mediacapture/qandroidcamerasession.cpp')
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcamerasession.cpp140
1 files changed, 55 insertions, 85 deletions
diff --git a/src/plugins/android/src/mediacapture/qandroidcamerasession.cpp b/src/plugins/android/src/mediacapture/qandroidcamerasession.cpp
index cf1879eb1..d69e3ce84 100644
--- a/src/plugins/android/src/mediacapture/qandroidcamerasession.cpp
+++ b/src/plugins/android/src/mediacapture/qandroidcamerasession.cpp
@@ -48,42 +48,6 @@
QT_BEGIN_NAMESPACE
-class DataVideoBuffer : public QAbstractVideoBuffer
-{
-public:
- DataVideoBuffer(const QByteArray &d, int bpl = -1)
- : QAbstractVideoBuffer(NoHandle)
- , data(d)
- , mode(NotMapped)
- , bytesPerLine(bpl)
- { }
-
- MapMode mapMode() const { return mode; }
-
- uchar *map(MapMode m, int *numBytes, int *bpl)
- {
- if (mode != NotMapped || m == NotMapped)
- return 0;
-
- mode = m;
-
- if (numBytes)
- *numBytes = data.size();
-
- if (bpl)
- *bpl = bytesPerLine;
-
- return reinterpret_cast<uchar *>(data.data());
- }
-
- void unmap() { mode = NotMapped; }
-
-private:
- QByteArray data;
- MapMode mode;
- int bytesPerLine;
-};
-
Q_GLOBAL_STATIC(QList<AndroidCameraInfo>, g_availableCameras)
QAndroidCameraSession::QAndroidCameraSession(QObject *parent)
@@ -104,6 +68,7 @@ QAndroidCameraSession::QAndroidCameraSession(QObject *parent)
, m_readyForCapture(false)
, m_captureCanceled(false)
, m_currentImageCaptureId(-1)
+ , m_previewCallback(0)
{
m_mediaStorageLocation.addStorageLocation(
QMediaStorageLocation::Pictures,
@@ -208,10 +173,11 @@ bool QAndroidCameraSession::open()
if (m_camera) {
connect(m_camera, SIGNAL(pictureExposed()), this, SLOT(onCameraPictureExposed()));
- connect(m_camera, SIGNAL(lastPreviewFrameFetched(QByteArray,int,int)),
- this, SLOT(onLastPreviewFrameFetched(QByteArray,int,int)));
- connect(m_camera, SIGNAL(newPreviewFrame(QByteArray,int,int)),
- this, SLOT(onNewPreviewFrame(QByteArray,int,int)),
+ connect(m_camera, SIGNAL(lastPreviewFrameFetched(QVideoFrame)),
+ this, SLOT(onLastPreviewFrameFetched(QVideoFrame)),
+ Qt::DirectConnection);
+ connect(m_camera, SIGNAL(newPreviewFrame(QVideoFrame)),
+ this, SLOT(onNewPreviewFrame(QVideoFrame)),
Qt::DirectConnection);
connect(m_camera, SIGNAL(pictureCaptured(QByteArray)), this, SLOT(onCameraPictureCaptured(QByteArray)));
connect(m_camera, SIGNAL(previewStarted()), this, SLOT(onCameraPreviewStarted()));
@@ -224,7 +190,7 @@ bool QAndroidCameraSession::open()
if (m_camera->getPreviewFormat() != AndroidCamera::NV21)
m_camera->setPreviewFormat(AndroidCamera::NV21);
- m_camera->notifyNewFrames(m_videoProbes.count());
+ m_camera->notifyNewFrames(m_videoProbes.count() || m_previewCallback);
emit opened();
} else {
@@ -259,16 +225,19 @@ void QAndroidCameraSession::close()
emit statusChanged(m_status);
}
-void QAndroidCameraSession::setVideoPreview(QObject *videoOutput)
+void QAndroidCameraSession::setVideoOutput(QAndroidVideoOutput *output)
{
if (m_videoOutput) {
m_videoOutput->stop();
m_videoOutput->reset();
}
- if (videoOutput) {
- connect(videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool)));
- m_videoOutput = qobject_cast<QAndroidVideoOutput *>(videoOutput);
+ if (output) {
+ m_videoOutput = output;
+ if (m_videoOutput->isReady())
+ onVideoOutputReady(true);
+ else
+ connect(m_videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool)));
} else {
m_videoOutput = 0;
}
@@ -336,7 +305,10 @@ bool QAndroidCameraSession::startPreview()
if (!m_videoOutput->isReady())
return true; // delay starting until the video output is ready
- if (!m_camera->setPreviewTexture(m_videoOutput->surfaceTexture()))
+ Q_ASSERT(m_videoOutput->surfaceTexture() || m_videoOutput->surfaceHolder());
+
+ if ((m_videoOutput->surfaceTexture() && !m_camera->setPreviewTexture(m_videoOutput->surfaceTexture()))
+ || (m_videoOutput->surfaceHolder() && !m_camera->setPreviewDisplay(m_videoOutput->surfaceHolder())))
return false;
m_status = QCamera::StartingStatus;
@@ -366,6 +338,7 @@ void QAndroidCameraSession::stopPreview()
m_camera->stopPreview();
m_camera->setPreviewSize(QSize());
m_camera->setPreviewTexture(0);
+ m_camera->setPreviewDisplay(0);
if (m_videoOutput) {
m_videoOutput->stop();
@@ -413,7 +386,7 @@ void QAndroidCameraSession::addProbe(QAndroidMediaVideoProbeControl *probe)
if (probe)
m_videoProbes << probe;
if (m_camera)
- m_camera->notifyNewFrames(m_videoProbes.count());
+ m_camera->notifyNewFrames(m_videoProbes.count() || m_previewCallback);
m_videoProbesMutex.unlock();
}
@@ -422,7 +395,24 @@ void QAndroidCameraSession::removeProbe(QAndroidMediaVideoProbeControl *probe)
m_videoProbesMutex.lock();
m_videoProbes.remove(probe);
if (m_camera)
- m_camera->notifyNewFrames(m_videoProbes.count());
+ m_camera->notifyNewFrames(m_videoProbes.count() || m_previewCallback);
+ m_videoProbesMutex.unlock();
+}
+
+void QAndroidCameraSession::setPreviewFormat(AndroidCamera::ImageFormat format)
+{
+ if (format == AndroidCamera::UnknownImageFormat)
+ return;
+
+ m_camera->setPreviewFormat(format);
+}
+
+void QAndroidCameraSession::setPreviewCallback(PreviewCallback *callback)
+{
+ m_videoProbesMutex.lock();
+ m_previewCallback = callback;
+ if (m_camera)
+ m_camera->notifyNewFrames(m_videoProbes.count() || m_previewCallback);
m_videoProbesMutex.unlock();
}
@@ -565,57 +555,37 @@ void QAndroidCameraSession::onCameraPictureExposed()
m_camera->fetchLastPreviewFrame();
}
-void QAndroidCameraSession::onLastPreviewFrameFetched(const QByteArray &preview, int width, int height)
-{
- if (preview.size()) {
- QtConcurrent::run(this, &QAndroidCameraSession::processPreviewImage,
- m_currentImageCaptureId,
- preview,
- width,
- height,
- m_camera->getRotation());
- }
-}
-
-void QAndroidCameraSession::processPreviewImage(int id, const QByteArray &data, int width, int height, int rotation)
+void QAndroidCameraSession::onLastPreviewFrameFetched(const QVideoFrame &frame)
{
- emit imageCaptured(id, prepareImageFromPreviewData(data, width, height, rotation));
+ QtConcurrent::run(this, &QAndroidCameraSession::processPreviewImage,
+ m_currentImageCaptureId,
+ frame,
+ m_camera->getRotation());
}
-QImage QAndroidCameraSession::prepareImageFromPreviewData(const QByteArray &data, int width, int height, int rotation)
+void QAndroidCameraSession::processPreviewImage(int id, const QVideoFrame &frame, int rotation)
{
- QVideoFrame frame(new QMemoryVideoBuffer(data, width),
- QSize(width, height), QVideoFrame::Format_NV21);
-
- QImage result = qt_imageFromVideoFrame(frame);
-
- QTransform transform;
-
// Preview display of front-facing cameras is flipped horizontally, but the frame data
// we get here is not. Flip it ourselves if the camera is front-facing to match what the user
// sees on the viewfinder.
+ QTransform transform;
if (m_camera->getFacing() == AndroidCamera::CameraFacingFront)
transform.scale(-1, 1);
-
transform.rotate(rotation);
- result = result.transformed(transform);
-
- return result;
+ emit imageCaptured(id, qt_imageFromVideoFrame(frame).transformed(transform));
}
-void QAndroidCameraSession::onNewPreviewFrame(const QByteArray &frame, int width, int height)
+void QAndroidCameraSession::onNewPreviewFrame(const QVideoFrame &frame)
{
m_videoProbesMutex.lock();
- if (frame.size() && m_videoProbes.count()) {
- // Bytes per line should be only for the first plane. For NV21, the Y plane has 8 bits
- // per sample, so bpl == width
- QVideoFrame videoFrame(new DataVideoBuffer(frame, width),
- QSize(width, height),
- QVideoFrame::Format_NV21);
- foreach (QAndroidMediaVideoProbeControl *probe, m_videoProbes)
- probe->newFrameProbed(videoFrame);
- }
+
+ foreach (QAndroidMediaVideoProbeControl *probe, m_videoProbes)
+ probe->newFrameProbed(frame);
+
+ if (m_previewCallback)
+ m_previewCallback->onFrameAvailable(frame);
+
m_videoProbesMutex.unlock();
}
@@ -692,7 +662,7 @@ void QAndroidCameraSession::processCapturedImage(int id,
}
if (dest & QCameraImageCapture::CaptureToBuffer) {
- QVideoFrame frame(new DataVideoBuffer(data), resolution, QVideoFrame::Format_Jpeg);
+ QVideoFrame frame(new QMemoryVideoBuffer(data, -1), resolution, QVideoFrame::Format_Jpeg);
emit imageAvailable(id, frame);
}
}