summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSamuel Mira <samuel.mira@qt.io>2023-02-24 19:21:52 +0200
committerSamuel Mira <samuel.mira@qt.io>2023-03-09 06:38:52 +0000
commit5fab664478f55ae2f93c87ca13cd9b0f15b58bff (patch)
tree150a69779ea4272e9333abc172b2a0a8ec70db58
parenta28a4651db5f3305e2e163f6f6bf55abc6c5fbf0 (diff)
Android: Integrate HW encoding for FFMpeg backend in Android
Video encoding was done using software that gave below acceptable results. This patch adds the necessary to use the Mediacodec encoders recently available in ffmpeg. For that to happen it was necessary to change the image reader of the QtCamera2 to use YUV420p frame instead of JPEG. That also improves performance since decoding is no longer necessary. This pipeline only has one copy done on FFMpeg side to upload to Mediacodec input buffer. Fixes: QTBUG-102235 Pick-to: 6.5 6.5.0 Change-Id: Ief5421c700dd0a77763a7713f9b452dd01828241 Reviewed-by: Lars Knoll <lars@knoll.priv.no>
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java81
-rw-r--r--src/plugins/multimedia/ffmpeg/CMakeLists.txt1
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidcamera.cpp250
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidcamera_p.h13
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp197
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidcameraframe_p.h75
-rw-r--r--src/plugins/multimedia/ffmpeg/qandroidvideodevices.cpp12
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegencoderoptions.cpp49
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp6
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec.cpp2
-rw-r--r--src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp32
11 files changed, 558 insertions, 160 deletions
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java
index 9b93e2f42..bf5e10013 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java
@@ -2,21 +2,27 @@
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
package org.qtproject.qt.android.multimedia;
+import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.Context;
-import android.graphics.ImageFormat;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.media.Image;
import android.media.ImageReader;
+import android.graphics.ImageFormat;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
-
+import android.view.Surface;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import java.util.ArrayList;
import java.util.List;
@TargetApi(23)
@@ -25,21 +31,15 @@ public class QtCamera2 {
CameraDevice mCameraDevice = null;
HandlerThread mBackgroundThread;
Handler mBackgroundHandler;
- ImageReader mImageReader;
+ ImageReader mImageReader = null;
CameraManager mCameraManager;
CameraCaptureSession mCaptureSession;
CaptureRequest.Builder mPreviewRequestBuilder;
CaptureRequest mPreviewRequest;
String mCameraId;
+ List<Surface> mTargetSurfaces = new ArrayList<>();
- native void onFrameAvailable(String cameraId, Image image);
-
- ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() {
- @Override
- public void onImageAvailable(ImageReader reader) {
- QtCamera2.this.onFrameAvailable(mCameraId, reader.acquireLatestImage());
- }
- };
+ private static int MaxNumberFrames = 10;
native void onCameraOpened(String cameraId);
native void onCameraDisconnect(String cameraId);
@@ -127,12 +127,9 @@ public class QtCamera2 {
}
}
- public boolean open(String cameraId, int width, int height) {
-
+ @SuppressLint("MissingPermission")
+ public boolean open(String cameraId) {
try {
- mImageReader = ImageReader.newInstance(width, height, ImageFormat.JPEG, /*maxImages*/10);
- mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);
-
mCameraId = cameraId;
mCameraManager.openCamera(cameraId,mStateCallback,mBackgroundHandler);
return true;
@@ -143,17 +140,53 @@ public class QtCamera2 {
return false;
}
+
+ native void onFrameAvailable(String cameraId, Image frame);
+
+ ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() {
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ QtCamera2.this.onFrameAvailable(mCameraId, reader.acquireLatestImage());
+ }
+ };
+
+ public boolean addImageReader(int width, int height, int format) {
+
+ if (mImageReader != null)
+ removeSurface(mImageReader.getSurface());
+
+ mImageReader = ImageReader.newInstance(width, height, format, MaxNumberFrames);
+ mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);
+ addSurface(mImageReader.getSurface());
+
+ return true;
+ }
+
+ public boolean addSurface(Surface surface) {
+ if (mTargetSurfaces.contains(surface))
+ return true;
+
+ return mTargetSurfaces.add(surface);
+ }
+
+ public boolean removeSurface(Surface surface) {
+ return mTargetSurfaces.remove(surface);
+ }
+
+ public void clearSurfaces() {
+ mTargetSurfaces.clear();
+ }
+
public boolean createSession() {
if (mCameraDevice == null)
return false;
try {
- mCameraDevice.createCaptureSession(List.of(mImageReader.getSurface()), mCaptureStateCallback, mBackgroundHandler);
+ mCameraDevice.createCaptureSession(mTargetSurfaces, mCaptureStateCallback, mBackgroundHandler);
return true;
} catch (Exception exception) {
Log.w("QtCamera2", "Failed to create a capture session:" + exception);
}
-
return false;
}
@@ -167,8 +200,13 @@ public class QtCamera2 {
try {
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(template);
- mPreviewRequestBuilder.addTarget(mImageReader.getSurface());
+ for (Surface surface : mTargetSurfaces) {
+ mPreviewRequestBuilder.addTarget(surface);
+ }
+
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
+ mPreviewRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CameraMetadata.CONTROL_CAPTURE_INTENT_VIDEO_RECORD);
+
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler);
return true;
@@ -189,11 +227,8 @@ public class QtCamera2 {
mCameraDevice.close();
mCameraDevice = null;
}
- if (null != mImageReader) {
- mImageReader.close();
- mImageReader = null;
- }
mCameraId = "";
+ mTargetSurfaces.clear();
} catch (Exception exception) {
Log.w("QtCamera2", "Failed to stop and close:" + exception);
}
diff --git a/src/plugins/multimedia/ffmpeg/CMakeLists.txt b/src/plugins/multimedia/ffmpeg/CMakeLists.txt
index 3a33a6f8c..58a66e27f 100644
--- a/src/plugins/multimedia/ffmpeg/CMakeLists.txt
+++ b/src/plugins/multimedia/ffmpeg/CMakeLists.txt
@@ -138,6 +138,7 @@ if (ANDROID)
qffmpeghwaccel_mediacodec.cpp qffmpeghwaccel_mediacodec_p.h
qandroidcamera_p.h qandroidcamera.cpp
qandroidvideodevices.cpp qandroidvideodevices_p.h
+ qandroidcameraframe_p.h qandroidcameraframe.cpp
../android/wrappers/jni/androidsurfacetexture_p.h
../android/wrappers/jni/androidsurfacetexture.cpp
INCLUDE_DIRECTORIES
diff --git a/src/plugins/multimedia/ffmpeg/qandroidcamera.cpp b/src/plugins/multimedia/ffmpeg/qandroidcamera.cpp
index eee50dbfe..a675ce733 100644
--- a/src/plugins/multimedia/ffmpeg/qandroidcamera.cpp
+++ b/src/plugins/multimedia/ffmpeg/qandroidcamera.cpp
@@ -5,110 +5,54 @@
#include <jni.h>
#include <QMediaFormat>
+#include <memory>
+#include <optional>
#include <qmediadevices.h>
#include <qguiapplication.h>
#include <qscreen.h>
+#include <QDebug>
#include <qloggingcategory.h>
-#include <private/qabstractvideobuffer_p.h>
#include <QtCore/qcoreapplication.h>
#include <QtCore/qpermissions.h>
#include <QtCore/private/qandroidextras_p.h>
-#include <private/qmemoryvideobuffer_p.h>
#include <private/qcameradevice_p.h>
#include <QReadWriteLock>
+#include <private/qvideoframeconverter_p.h>
+#include <private/qvideotexturehelper_p.h>
+#include <qffmpegvideobuffer_p.h>
+
+#include <qandroidcameraframe_p.h>
+#include <utility>
+
+extern "C" {
+#include "libavutil/hwcontext.h"
+#include "libavutil/pixfmt.h"
+}
Q_DECLARE_JNI_CLASS(QtCamera2, "org/qtproject/qt/android/multimedia/QtCamera2");
Q_DECLARE_JNI_CLASS(QtVideoDeviceManager,
"org/qtproject/qt/android/multimedia/QtVideoDeviceManager");
+Q_DECLARE_JNI_CLASS(AndroidImageFormat, "android/graphics/ImageFormat");
+
Q_DECLARE_JNI_TYPE(AndroidImage, "Landroid/media/Image;")
Q_DECLARE_JNI_TYPE(AndroidImagePlaneArray, "[Landroid/media/Image$Plane;")
Q_DECLARE_JNI_TYPE(JavaByteBuffer, "Ljava/nio/ByteBuffer;")
QT_BEGIN_NAMESPACE
-static Q_LOGGING_CATEGORY(qLCAndroidCamera, "qt.multimedia.ffmpeg.androidCamera")
+static Q_LOGGING_CATEGORY(qLCAndroidCamera, "qt.multimedia.ffmpeg.androidCamera");
typedef QMap<QString, QAndroidCamera *> QAndroidCameraMap;
Q_GLOBAL_STATIC(QAndroidCameraMap, g_qcameras)
Q_GLOBAL_STATIC(QReadWriteLock, rwLock)
-class JavaImageVideoBuffer : public QAbstractVideoBuffer
-{
-public:
- JavaImageVideoBuffer(const QJniObject &image, const QCameraDevice &device)
- : QAbstractVideoBuffer(QVideoFrame::NoHandle, nullptr),
- m_image(generateImage(image, device)){};
-
- virtual ~JavaImageVideoBuffer() = default;
-
- QVideoFrame::MapMode mapMode() const override { return m_mapMode; }
-
- MapData map(QVideoFrame::MapMode mode) override
- {
- MapData mapData;
- if (m_mapMode == QVideoFrame::NotMapped && mode != QVideoFrame::NotMapped
- && !m_image.isNull()) {
- m_mapMode = mode;
-
- mapData.nPlanes = 1;
- mapData.bytesPerLine[0] = m_image.bytesPerLine();
- mapData.data[0] = m_image.bits();
- mapData.size[0] = m_image.sizeInBytes();
- }
-
- return mapData;
- }
-
- void unmap() override { m_mapMode = QVideoFrame::NotMapped; }
-
- QImage generateImage(const QJniObject &image, const QCameraDevice &device)
- {
- if (!image.isValid())
- return {};
-
- QJniEnvironment jniEnv;
-
- QJniObject planes = image.callMethod<QtJniTypes::AndroidImagePlaneArray>("getPlanes");
- if (!planes.isValid())
- return {};
-
- // this assumes that this image is a JPEG - single plane, that is taken care of in Java
- QJniObject plane = jniEnv->GetObjectArrayElement(planes.object<jobjectArray>(), 0);
- if (jniEnv.checkAndClearExceptions() || !plane.isValid())
- return {};
-
- QJniObject byteBuffer = plane.callMethod<QtJniTypes::JavaByteBuffer>("getBuffer");
- if (!byteBuffer.isValid())
- return {};
-
- // Uses direct access which is garanteed by android to work with ImageReader bytebuffer
- uchar *data =
- reinterpret_cast<uchar *>(jniEnv->GetDirectBufferAddress(byteBuffer.object()));
- if (jniEnv.checkAndClearExceptions())
- return {};
-
- QTransform transform;
- if (device.position() == QCameraDevice::Position::FrontFace)
- transform.scale(-1, 1);
-
- return QImage::fromData(data, byteBuffer.callMethod<jint>("remaining"))
- .transformed(transform);
- }
-
- const QImage &image() { return m_image; }
-
-private:
- QVideoFrame::MapMode m_mapMode = QVideoFrame::NotMapped;
- QImage m_image;
-};
-
namespace {
QCameraFormat getDefaultCameraFormat()
{
// default settings
QCameraFormatPrivate *defaultFormat = new QCameraFormatPrivate{
- .pixelFormat = QVideoFrameFormat::Format_BGRA8888,
+ .pixelFormat = QVideoFrameFormat::Format_YUV420P,
.resolution = { 1920, 1080 },
.minFrameRate = 30,
.maxFrameRate = 60,
@@ -127,16 +71,35 @@ bool checkCameraPermission()
return granted;
}
+int sensorOrientation(QString cameraId)
+{
+ QJniObject deviceManager(QtJniTypes::className<QtJniTypes::QtVideoDeviceManager>(),
+ QNativeInterface::QAndroidApplication::context());
+
+ if (!deviceManager.isValid()) {
+ qCWarning(qLCAndroidCamera) << "Failed to connect to Qt Video Device Manager.";
+ return 0;
+ }
+
+ return deviceManager.callMethod<jint>("getSensorOrientation",
+ QJniObject::fromString(cameraId).object<jstring>());
+}
} // namespace
// QAndroidCamera
+
QAndroidCamera::QAndroidCamera(QCamera *camera) : QPlatformCamera(camera)
{
- m_cameraDevice = (camera ? camera->cameraDevice() : QCameraDevice());
- m_cameraFormat = getDefaultCameraFormat();
+ if (camera) {
+ m_cameraDevice = camera->cameraDevice();
+ m_cameraFormat = !camera->cameraFormat().isNull() ? camera->cameraFormat()
+ : getDefaultCameraFormat();
+ }
m_jniCamera = QJniObject(QtJniTypes::className<QtJniTypes::QtCamera2>(),
QNativeInterface::QAndroidApplication::context());
+
+ m_hwAccel = QFFmpeg::HWAccel::create(AVHWDeviceType::AV_HWDEVICE_TYPE_MEDIACODEC);
};
QAndroidCamera::~QAndroidCamera()
@@ -162,30 +125,71 @@ void QAndroidCamera::setCamera(const QCameraDevice &camera)
setActive(true);
}
-void QAndroidCamera::onFrameAvailable(QJniObject frame)
+std::optional<int> QAndroidCamera::ffmpegHWPixelFormat() const
+{
+ return QFFmpegVideoBuffer::toAVPixelFormat(m_androidFramePixelFormat);
+}
+
+static void deleteFrame(void *opaque, uint8_t *data)
+{
+ Q_UNUSED(data);
+
+ auto frame = reinterpret_cast<QAndroidCameraFrame *>(opaque);
+
+ if (frame)
+ delete frame;
+}
+
+void QAndroidCamera::frameAvailable(QJniObject image)
{
- if (!frame.isValid())
+ if (!(m_state == State::WaitingStart || m_state == State::Started)) {
+ qCWarning(qLCAndroidCamera) << "Received frame when not active... ignoring";
+ qCWarning(qLCAndroidCamera) << "state:" << m_state;
+ image.callMethod<void>("close");
return;
+ }
+
+ auto androidFrame = new QAndroidCameraFrame(image);
+ if (!androidFrame->isParsed()) {
+ qCWarning(qLCAndroidCamera) << "Failed to parse frame.. dropping frame";
+ delete androidFrame;
+ return;
+ }
+
+ int timestamp = androidFrame->timestamp();
+ m_androidFramePixelFormat = androidFrame->format();
+
+ auto avframe = QFFmpeg::makeAVFrame();
+
+ avframe->width = androidFrame->size().width();
+ avframe->height = androidFrame->size().height();
+ avframe->format = QFFmpegVideoBuffer::toAVPixelFormat(androidFrame->format());
+
+ avframe->extended_data = avframe->data;
+ avframe->pts = androidFrame->timestamp();
+
+ for (int planeNumber = 0; planeNumber < androidFrame->numberPlanes(); planeNumber++) {
+ QAndroidCameraFrame::Plane plane = androidFrame->plane(planeNumber);
+ avframe->linesize[planeNumber] = plane.rowStride;
+ avframe->data[planeNumber] = plane.data;
+ }
- long timestamp = frame.callMethod<jlong>("getTimestamp");
- int width = frame.callMethod<jint>("getWidth");
- int height = frame.callMethod<jint>("getHeight");
+ avframe->data[3] = nullptr;
+ avframe->buf[0] = nullptr;
- QVideoFrameFormat::PixelFormat pixelFormat =
- QVideoFrameFormat::PixelFormat::Format_BGRA8888_Premultiplied;
+ avframe->opaque_ref = av_buffer_create(NULL, 1, deleteFrame, androidFrame, 0);
+ avframe->extended_data = avframe->data;
+ avframe->pts = timestamp;
- QVideoFrameFormat format({ width, height }, pixelFormat);
+ QVideoFrameFormat format(androidFrame->size(), androidFrame->format());
- QVideoFrame videoFrame(new JavaImageVideoBuffer(frame, m_cameraDevice), format);
+ QVideoFrame videoFrame(new QFFmpegVideoBuffer(std::move(avframe)), format);
- timestamp = timestamp / 1000000;
if (lastTimestamp == 0)
lastTimestamp = timestamp;
- videoFrame.setRotationAngle(QVideoFrame::RotationAngle(orientation()));
-
- if (m_cameraDevice.position() == QCameraDevice::Position::FrontFace)
- videoFrame.setMirrored(true);
+ videoFrame.setRotationAngle(rotation());
+ videoFrame.setMirrored(m_cameraDevice.position() == QCameraDevice::Position::FrontFace);
videoFrame.setStartTime(lastTimestamp);
videoFrame.setEndTime(timestamp);
@@ -193,24 +197,19 @@ void QAndroidCamera::onFrameAvailable(QJniObject frame)
emit newVideoFrame(videoFrame);
lastTimestamp = timestamp;
-
- // must call close at the end
- frame.callMethod<void>("close");
}
-// based on https://developer.android.com/training/camera2/camera-preview#relative_rotation
-int QAndroidCamera::orientation()
+QVideoFrame::RotationAngle QAndroidCamera::rotation()
{
- QJniObject deviceManager(QtJniTypes::className<QtJniTypes::QtVideoDeviceManager>(),
- QNativeInterface::QAndroidApplication::context());
-
- QString cameraId = m_cameraDevice.id();
- int sensorOrientation = deviceManager.callMethod<jint>(
- "getSensorOrientation", QJniObject::fromString(cameraId).object<jstring>());
+ auto screen = QGuiApplication::primaryScreen();
+ auto screenOrientation = screen->orientation();
+ if (screenOrientation == Qt::PrimaryOrientation)
+ screenOrientation = screen->primaryOrientation();
int deviceOrientation = 0;
+ bool isFrontCamera = m_cameraDevice.position() == QCameraDevice::Position::FrontFace;
- switch (QGuiApplication::primaryScreen()->orientation()) {
+ switch (screenOrientation) {
case Qt::PrimaryOrientation:
case Qt::PortraitOrientation:
break;
@@ -225,9 +224,15 @@ int QAndroidCamera::orientation()
break;
}
- int sign = m_cameraDevice.position() == QCameraDevice::Position::FrontFace ? 1 : -1;
-
- return (sensorOrientation - deviceOrientation * sign + 360) % 360;
+ int rotation;
+ // subtract natural camera orientation and physical device orientation
+ if (isFrontCamera) {
+ rotation = (sensorOrientation(m_cameraDevice.id()) - deviceOrientation + 360) % 360;
+ rotation = (180 + rotation) % 360; // compensate the mirror
+ } else { // back-facing camera
+ rotation = (sensorOrientation(m_cameraDevice.id()) - deviceOrientation + 360) % 360;
+ }
+ return QVideoFrame::RotationAngle(rotation);
}
void QAndroidCamera::setActive(bool active)
@@ -251,12 +256,14 @@ void QAndroidCamera::setActive(bool active)
height = m_cameraFormat.resolution().height();
}
+ width = FFALIGN(width, 16);
+ height = FFALIGN(height, 16);
+
setState(State::WaitingOpen);
g_qcameras->insert(m_cameraDevice.id(), this);
bool canOpen = m_jniCamera.callMethod<jboolean>(
- "open", QJniObject::fromString(m_cameraDevice.id()).object<jstring>(), width,
- height);
+ "open", QJniObject::fromString(m_cameraDevice.id()).object<jstring>());
if (!canOpen) {
g_qcameras->remove(m_cameraDevice.id());
@@ -265,8 +272,17 @@ void QAndroidCamera::setActive(bool active)
QString("Failed to start camera: ").append(m_cameraDevice.description()));
}
+ // this should use the camera format.
+ // but there is only 2 fully supported formats on android - JPG and YUV420P
+ // and JPEG is not supported for encoding in FFMpeg, so it's locked for YUV for now.
+ const static int imageFormat =
+ QJniObject::getStaticField<QtJniTypes::AndroidImageFormat, jint>("YUV_420_888");
+ m_jniCamera.callMethod<jboolean>("addImageReader", jint(width), jint(height),
+ jint(imageFormat));
+
} else {
m_jniCamera.callMethod<void>("stopAndClose");
+ m_jniCamera.callMethod<void>("clearSurfaces");
setState(State::Closed);
}
}
@@ -305,20 +321,17 @@ void QAndroidCamera::setState(QAndroidCamera::State newState)
bool QAndroidCamera::setCameraFormat(const QCameraFormat &format)
{
- bool wasActive = isActive();
+ if (!format.isNull() && !m_cameraDevice.videoFormats().contains(format))
+ return false;
- setActive(false);
- m_cameraFormat = format;
-
- if (wasActive)
- setActive(true);
+ m_cameraFormat = format.isNull() ? getDefaultCameraFormat() : format;
return true;
}
void QAndroidCamera::onCaptureSessionConfigured()
{
- bool canStart = m_jniCamera.callMethod<jboolean>("start", 5);
+ bool canStart = m_jniCamera.callMethod<jboolean>("start", 3);
setState(canStart ? State::WaitingStart : State::Closed);
}
@@ -329,11 +342,6 @@ void QAndroidCamera::onCaptureSessionConfigureFailed()
void QAndroidCamera::onCameraOpened()
{
- if (m_state == State::WaitingOpen) {
- emit error(QCamera::CameraError, "Camera Open in incorrect state.");
- setState(State::Closed);
- }
-
bool canStart = m_jniCamera.callMethod<jboolean>("createSession");
setState(canStart ? State::WaitingStart : State::Closed);
}
@@ -369,7 +377,6 @@ void QAndroidCamera::onCaptureSessionFailed(int reason, long frameNumber)
QString("Capture session failure with Camera %1. Camera2 Api error code: %2")
.arg(m_cameraDevice.description())
.arg(reason));
- setState(State::Closed);
}
// JNI logic
@@ -390,7 +397,7 @@ static void onFrameAvailable(JNIEnv *env, jobject obj, jstring cameraId,
Q_UNUSED(obj);
GET_CAMERA(cameraId);
- camera->onFrameAvailable(QJniObject(image));
+ camera->frameAvailable(QJniObject(image));
}
Q_DECLARE_JNI_NATIVE_METHOD(onFrameAvailable)
@@ -490,7 +497,6 @@ bool QAndroidCamera::registerNativeMethods()
Q_JNI_NATIVE_METHOD(onFrameAvailable),
Q_JNI_NATIVE_METHOD(onSessionActive),
Q_JNI_NATIVE_METHOD(onSessionClosed),
-
});
}();
return registered;
diff --git a/src/plugins/multimedia/ffmpeg/qandroidcamera_p.h b/src/plugins/multimedia/ffmpeg/qandroidcamera_p.h
index 7a2152ebf..dbbe2abe7 100644
--- a/src/plugins/multimedia/ffmpeg/qandroidcamera_p.h
+++ b/src/plugins/multimedia/ffmpeg/qandroidcamera_p.h
@@ -15,6 +15,7 @@
// We mean it.
//
+#include "qffmpeghwaccel_p.h"
#include <private/qplatformcamera_p.h>
#include <QObject>
#include <QJniObject>
@@ -34,13 +35,15 @@ public:
void setCamera(const QCameraDevice &camera) override;
bool setCameraFormat(const QCameraFormat &format) override;
+ std::optional<int> ffmpegHWPixelFormat() const override;
+
static bool registerNativeMethods();
public slots:
void onCameraOpened();
void onCameraDisconnect();
void onCameraError(int error);
- void onFrameAvailable(QJniObject frame);
+ void frameAvailable(QJniObject image);
void onCaptureSessionConfigured();
void onCaptureSessionConfigureFailed();
void onCaptureSessionFailed(int reason, long frameNumber);
@@ -48,13 +51,19 @@ public slots:
void onSessionClosed();
private:
+ bool isActivating() const { return m_state != State::Closed; }
+
void setState(State newState);
- int orientation();
+ QVideoFrame::RotationAngle rotation();
State m_state = State::Closed;
QCameraDevice m_cameraDevice;
long lastTimestamp = 0;
QJniObject m_jniCamera;
+
+ std::unique_ptr<QFFmpeg::HWAccel> m_hwAccel;
+
+ QVideoFrameFormat::PixelFormat m_androidFramePixelFormat;
};
QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp b/src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp
new file mode 100644
index 000000000..e01f9ea7e
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qandroidcameraframe.cpp
@@ -0,0 +1,197 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidcameraframe_p.h"
+#include <jni.h>
+#include <QDebug>
+#include <QtCore/QLoggingCategory>
+
+Q_DECLARE_JNI_CLASS(AndroidImageFormat, "android/graphics/ImageFormat");
+
+Q_DECLARE_JNI_TYPE(AndroidImage, "Landroid/media/Image;")
+Q_DECLARE_JNI_TYPE(AndroidImagePlaneArray, "[Landroid/media/Image$Plane;")
+Q_DECLARE_JNI_TYPE(JavaByteBuffer, "Ljava/nio/ByteBuffer;")
+
+QT_BEGIN_NAMESPACE
+static Q_LOGGING_CATEGORY(qLCAndroidCameraFrame, "qt.multimedia.ffmpeg.android.camera.frame");
+
+bool QAndroidCameraFrame::parse(const QJniObject &frame)
+{
+ QJniEnvironment jniEnv;
+
+ if (!frame.isValid())
+ return false;
+
+ auto planes = frame.callMethod<QtJniTypes::AndroidImagePlaneArray>("getPlanes");
+ if (!planes.isValid())
+ return false;
+
+ int numberPlanes = jniEnv->GetArrayLength(planes.object<jarray>());
+ // create and populate temporary array structure
+ int pixelStrides[numberPlanes];
+ int rowStrides[numberPlanes];
+ int bufferSize[numberPlanes];
+ uint8_t *buffer[numberPlanes];
+
+ auto resetPlane = [&](int index) {
+ if (index < 0 || index > numberPlanes)
+ return;
+
+ rowStrides[index] = 0;
+ pixelStrides[index] = 0;
+ bufferSize[index] = 0;
+ buffer[index] = nullptr;
+ };
+
+ for (int index = 0; index < numberPlanes; index++) {
+ QJniObject plane = jniEnv->GetObjectArrayElement(planes.object<jobjectArray>(), index);
+ if (jniEnv.checkAndClearExceptions() || !plane.isValid()) {
+ resetPlane(index);
+ continue;
+ }
+
+ rowStrides[index] = plane.callMethod<jint>("getRowStride");
+ pixelStrides[index] = plane.callMethod<jint>("getPixelStride");
+
+ auto byteBuffer = plane.callMethod<QtJniTypes::JavaByteBuffer>("getBuffer");
+ if (!byteBuffer.isValid()) {
+ resetPlane(index);
+ continue;
+ }
+
+ // Uses direct access which is garanteed by android to work with
+ // ImageReader bytebuffer
+ buffer[index] = static_cast<uint8_t *>(jniEnv->GetDirectBufferAddress(byteBuffer.object()));
+ bufferSize[index] = byteBuffer.callMethod<jint>("remaining");
+ }
+
+ QVideoFrameFormat::PixelFormat calculedPixelFormat = QVideoFrameFormat::Format_Invalid;
+
+ // finding the image format
+ // the ImageFormats that can happen here are stated here:
+ // https://developer.android.com/reference/android/media/Image#getFormat()
+ int format = frame.callMethod<jint>("getFormat");
+ AndroidImageFormat imageFormat = AndroidImageFormat(format);
+
+ switch (imageFormat) {
+ case AndroidImageFormat::JPEG:
+ calculedPixelFormat = QVideoFrameFormat::Format_Jpeg;
+ break;
+ case AndroidImageFormat::YUV_420_888:
+ if (numberPlanes < 3) {
+ // something went wrong on parsing. YUV_420_888 format must always have 3 planes
+ calculedPixelFormat = QVideoFrameFormat::Format_Invalid;
+ break;
+ }
+ if (pixelStrides[1] == 1)
+ calculedPixelFormat = QVideoFrameFormat::Format_YUV420P;
+ else if (pixelStrides[1] == 2 && abs(buffer[1] - buffer[2]) == 1)
+ // this can be NV21, but it will converted below
+ calculedPixelFormat = QVideoFrameFormat::Format_NV12;
+ break;
+ case AndroidImageFormat::HEIC:
+ // QImage cannot parse HEIC
+ calculedPixelFormat = QVideoFrameFormat::Format_Invalid;
+ break;
+ case AndroidImageFormat::RAW_PRIVATE:
+ case AndroidImageFormat::RAW_SENSOR:
+ // we cannot know raw formats
+ calculedPixelFormat = QVideoFrameFormat::Format_Invalid;
+ break;
+ case AndroidImageFormat::FLEX_RGBA_8888:
+ case AndroidImageFormat::FLEX_RGB_888:
+ // these formats are only returned by Mediacodec.getOutputImage, they are not used as a
+ // Camera2 Image frame return
+ calculedPixelFormat = QVideoFrameFormat::Format_Invalid;
+ break;
+ case AndroidImageFormat::YUV_422_888:
+ case AndroidImageFormat::YUV_444_888:
+ case AndroidImageFormat::YCBCR_P010:
+ // not dealing with these formats, they require higher API levels than the current Qt min
+ calculedPixelFormat = QVideoFrameFormat::Format_Invalid;
+ break;
+ default:
+ calculedPixelFormat = QVideoFrameFormat::Format_Invalid;
+ break;
+ }
+
+ if (calculedPixelFormat == QVideoFrameFormat::Format_Invalid) {
+ qCWarning(qLCAndroidCameraFrame) << "Cannot determine image format!";
+ return false;
+ }
+
+ auto copyPlane = [&](int mapIndex, int arrayIndex) {
+ if (arrayIndex >= numberPlanes)
+ return;
+
+ m_planes[mapIndex].rowStride = rowStrides[arrayIndex];
+ m_planes[mapIndex].size = bufferSize[arrayIndex];
+ m_planes[mapIndex].data = buffer[arrayIndex];
+ };
+
+ switch (calculedPixelFormat) {
+ case QVideoFrameFormat::Format_YUV420P:
+ m_numberPlanes = 3;
+ copyPlane(0, 0);
+ copyPlane(1, 1);
+ copyPlane(2, 2);
+ m_pixelFormat = QVideoFrameFormat::Format_YUV420P;
+ break;
+ case QVideoFrameFormat::Format_NV12:
+ m_numberPlanes = 2;
+ copyPlane(0, 0);
+ copyPlane(1, 1);
+ m_pixelFormat = QVideoFrameFormat::Format_NV12;
+ break;
+ case QVideoFrameFormat::Format_Jpeg:
+ qCWarning(qLCAndroidCameraFrame)
+ << "FFMpeg HW Mediacodec does not encode other than YCbCr formats";
+ // we still parse it to preview the frame
+ m_image = QImage::fromData(buffer[0], bufferSize[0]);
+ m_planes[0].rowStride = m_image.bytesPerLine();
+ m_planes[0].size = m_image.sizeInBytes();
+ m_planes[0].data = m_image.bits();
+ m_pixelFormat = QVideoFrameFormat::pixelFormatFromImageFormat(m_image.format());
+ break;
+ default:
+ break;
+ }
+
+ long timestamp = frame.callMethod<jlong>("getTimestamp");
+ m_timestamp = timestamp / 1000;
+
+ int width = frame.callMethod<jint>("getWidth");
+ int height = frame.callMethod<jint>("getHeight");
+ m_size = QSize(width, height);
+
+ return true;
+}
+
+QAndroidCameraFrame::QAndroidCameraFrame(QJniObject frame)
+ : m_pixelFormat(QVideoFrameFormat::Format_Invalid), m_parsed(parse(frame))
+{
+ if (isParsed()) {
+ // holding the frame java object
+ QJniEnvironment jniEnv;
+ m_frame = jniEnv->NewGlobalRef(frame.object());
+ jniEnv.checkAndClearExceptions();
+ } else if (frame.isValid()) {
+ frame.callMethod<void>("close");
+ }
+}
+
+QAndroidCameraFrame::~QAndroidCameraFrame()
+{
+ if (!isParsed()) // nothing to clean
+ return;
+
+ QJniObject qFrame(m_frame);
+ if (qFrame.isValid())
+ qFrame.callMethod<void>("close");
+
+ QJniEnvironment jniEnv;
+ if (m_frame)
+ jniEnv->DeleteGlobalRef(m_frame);
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/ffmpeg/qandroidcameraframe_p.h b/src/plugins/multimedia/ffmpeg/qandroidcameraframe_p.h
new file mode 100644
index 000000000..23a737f7d
--- /dev/null
+++ b/src/plugins/multimedia/ffmpeg/qandroidcameraframe_p.h
@@ -0,0 +1,75 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDCAMERAFRAME_H
+#define QANDROIDCAMERAFRAME_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QVideoFrameFormat>
+#include <QJniObject>
+
+class QAndroidCameraFrame
+{
+public:
+ struct Plane
+ {
+ int pixelStride = 0;
+ int rowStride = 0;
+ int size = 0;
+ uint8_t *data;
+ };
+
+ QAndroidCameraFrame(QJniObject frame);
+ ~QAndroidCameraFrame();
+
+ QVideoFrameFormat::PixelFormat format() const { return m_pixelFormat; }
+ int numberPlanes() const { return m_numberPlanes; }
+ Plane plane(int index) const
+ {
+ if (index < 0 || index > numberPlanes())
+ return {};
+
+ return m_planes[index];
+ }
+ QSize size() const { return m_size; }
+ long timestamp() const { return m_timestamp; }
+
+ bool isParsed() const { return m_parsed; }
+
+private:
+ bool parse(const QJniObject &frame);
+ QVideoFrameFormat::PixelFormat m_pixelFormat;
+
+ QSize m_size = {};
+ long m_timestamp = 0;
+ int m_numberPlanes = 0;
+ Plane m_planes[3]; // 3 max number planes
+ jobject m_frame = nullptr;
+ bool m_parsed = false;
+ QImage m_image;
+
+ enum AndroidImageFormat {
+ RAW_SENSOR = 32,
+ YUV_420_888 = 35,
+ RAW_PRIVATE = 36,
+ YUV_422_888 = 39,
+ YUV_444_888 = 40,
+ FLEX_RGB_888 = 41,
+ FLEX_RGBA_8888 = 42,
+ YCBCR_P010 = 54,
+ JPEG = 256,
+ HEIC = 1212500294
+ };
+};
+
+#endif // QANDROIDCAMERAFRAME_H
diff --git a/src/plugins/multimedia/ffmpeg/qandroidvideodevices.cpp b/src/plugins/multimedia/ffmpeg/qandroidvideodevices.cpp
index 1aa383a60..a8942c799 100644
--- a/src/plugins/multimedia/ffmpeg/qandroidvideodevices.cpp
+++ b/src/plugins/multimedia/ffmpeg/qandroidvideodevices.cpp
@@ -17,6 +17,7 @@ static Q_LOGGING_CATEGORY(qLCAndroidVideoDevices, "qt.multimedia.ffmpeg.android.
Q_DECLARE_JNI_CLASS(QtVideoDeviceManager,
"org/qtproject/qt/android/multimedia/QtVideoDeviceManager");
Q_DECLARE_JNI_TYPE(StringArray, "[Ljava/lang/String;")
+Q_DECLARE_JNI_CLASS(AndroidImageFormat, "android/graphics/ImageFormat");
QCameraFormat createCameraFormat(int width, int height, int fpsMin, int fpsMax)
{
@@ -27,7 +28,7 @@ QCameraFormat createCameraFormat(int width, int height, int fpsMin, int fpsMax)
format->minFrameRate = fpsMin;
format->maxFrameRate = fpsMax;
- format->pixelFormat = QVideoFrameFormat::PixelFormat::Format_BGRA8888;
+ format->pixelFormat = QVideoFrameFormat::PixelFormat::Format_YUV420P;
return format->create();
}
@@ -109,18 +110,17 @@ QList<QCameraDevice> QAndroidVideoDevices::findVideoDevices()
int min = split[0].toInt();
int max = split[1].toInt();
- int distance = max - min;
- int maxDistance = maxFps - minFps;
- if (maxDistance < distance) {
+ if (max > maxFps) {
maxFps = max;
minFps = min;
}
}
- const int IMAGEFORMAT_JPEG = 256;
+ const static int imageFormat =
+ QJniObject::getStaticField<QtJniTypes::AndroidImageFormat, jint>("YUV_420_888");
QJniObject sizesObject = deviceManager.callMethod<QtJniTypes::StringArray>(
- "getStreamConfigurationsSizes", cameraId, IMAGEFORMAT_JPEG);
+ "getStreamConfigurationsSizes", cameraId, imageFormat);
jobjectArray streamSizes = sizesObject.object<jobjectArray>();
int numSizes = jniEnv->GetArrayLength(streamSizes);
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegencoderoptions.cpp b/src/plugins/multimedia/ffmpeg/qffmpegencoderoptions.cpp
index f9702adc9..d7bef040a 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegencoderoptions.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegencoderoptions.cpp
@@ -204,6 +204,49 @@ static void apply_mf(const QMediaEncoderSettings &settings, AVCodecContext *code
}
#endif
+#ifdef Q_OS_ANDROID
+static void apply_mediacodec(const QMediaEncoderSettings &settings, AVCodecContext *codec,
+ AVDictionary **opts)
+{
+ codec->bit_rate = settings.videoBitRate();
+
+ const int quality[] = { 25, 50, 75, 90, 100 };
+ codec->global_quality = quality[settings.quality()];
+
+ switch (settings.encodingMode()) {
+ case QMediaRecorder::EncodingMode::AverageBitRateEncoding:
+ av_dict_set(opts, "bitrate_mode", "vbr", 1);
+ break;
+ case QMediaRecorder::EncodingMode::ConstantBitRateEncoding:
+ av_dict_set(opts, "bitrate_mode", "cbr", 1);
+ break;
+ case QMediaRecorder::EncodingMode::ConstantQualityEncoding:
+ // av_dict_set(opts, "bitrate_mode", "cq", 1);
+ av_dict_set(opts, "bitrate_mode", "cbr", 1);
+ break;
+ default:
+ break;
+ }
+
+ switch (settings.videoCodec()) {
+ case QMediaFormat::VideoCodec::H264: {
+ const char *levels[] = { "2.2", "3.2", "4.2", "5.2", "6.2" };
+ av_dict_set(opts, "level", levels[settings.quality()], 1);
+ codec->profile = FF_PROFILE_H264_HIGH;
+ break;
+ }
+ case QMediaFormat::VideoCodec::H265: {
+ const char *levels[] = { "h2.1", "h3.1", "h4.1", "h5.1", "h6.1" };
+ av_dict_set(opts, "level", levels[settings.quality()], 1);
+ codec->profile = FF_PROFILE_HEVC_MAIN;
+ break;
+ }
+ default:
+ break;
+ }
+}
+#endif
+
namespace QFFmpeg {
using ApplyOptions = void (*)(const QMediaEncoderSettings &settings, AVCodecContext *codec, AVDictionary **opts);
@@ -235,7 +278,11 @@ const struct {
{ "hevc_mf", apply_mf },
{ "h264_mf", apply_mf },
#endif
- { nullptr, nullptr } };
+#ifdef Q_OS_ANDROID
+ { "hevc_mediacodec", apply_mediacodec},
+ { "h264_mediacodec", apply_mediacodec },
+#endif
+ { nullptr, nullptr } };
const struct {
const char *name;
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp
index f207d11f0..fe51aa877 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel.cpp
@@ -30,7 +30,9 @@ static Q_LOGGING_CATEGORY(qLHWAccel, "qt.multimedia.ffmpeg.hwaccel");
namespace QFFmpeg {
static const std::initializer_list<AVHWDeviceType> preferredHardwareAccelerators = {
-#if defined(Q_OS_LINUX)
+#if defined(Q_OS_ANDROID)
+ AV_HWDEVICE_TYPE_MEDIACODEC,
+#elif defined(Q_OS_LINUX)
AV_HWDEVICE_TYPE_VAAPI,
AV_HWDEVICE_TYPE_VDPAU,
AV_HWDEVICE_TYPE_CUDA,
@@ -38,8 +40,6 @@ static const std::initializer_list<AVHWDeviceType> preferredHardwareAccelerators
AV_HWDEVICE_TYPE_D3D11VA,
#elif defined (Q_OS_DARWIN)
AV_HWDEVICE_TYPE_VIDEOTOOLBOX,
-#elif defined (Q_OS_ANDROID)
- AV_HWDEVICE_TYPE_MEDIACODEC,
#endif
};
diff --git a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec.cpp b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec.cpp
index fd3b5adea..9f0de62d2 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpeghwaccel_mediacodec.cpp
@@ -3,7 +3,7 @@
#include "qffmpeghwaccel_mediacodec_p.h"
-#include <androidsurfacetexture_p.h>
+#include "androidsurfacetexture_p.h"
#include <QtGui/private/qrhi_p.h>
extern "C" {
diff --git a/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp
index e731a31a6..d86ebcfcf 100644
--- a/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp
+++ b/src/plugins/multimedia/ffmpeg/qffmpegmediaintegration.cpp
@@ -32,7 +32,8 @@
# include "qandroidvideodevices_p.h"
# include "qandroidcamera_p.h"
extern "C" {
-# include <libavcodec/jni.h>
+# include <libavutil/log.h>
+# include <libavcodec/jni.h>
}
#endif
@@ -68,6 +69,30 @@ public:
}
};
+static void qffmpegLogCallback(void *ptr, int level, const char *fmt, va_list vl)
+{
+ Q_UNUSED(ptr)
+
+ // filter logs above the chosen level and AV_LOG_QUIET (negative level)
+ if (level < 0 || level > av_log_get_level())
+ return;
+
+ QString message = QString("FFmpeg log: %1").arg(QString::vasprintf(fmt, vl));
+ if (message.endsWith("\n"))
+ message.removeLast();
+
+ if (level == AV_LOG_DEBUG || level == AV_LOG_TRACE)
+ qDebug() << message;
+ else if (level == AV_LOG_VERBOSE || level == AV_LOG_INFO)
+ qInfo() << message;
+ else if (level == AV_LOG_WARNING)
+ qWarning() << message;
+ else if (level == AV_LOG_ERROR)
+ qCritical() << message;
+ else if (level == AV_LOG_FATAL || level == AV_LOG_PANIC)
+ qFatal() << message;
+}
+
QFFmpegMediaIntegration::QFFmpegMediaIntegration()
{
m_formatsInfo = new QFFmpegMediaFormatInfo();
@@ -83,8 +108,10 @@ QFFmpegMediaIntegration::QFFmpegMediaIntegration()
m_videoDevices = std::make_unique<QWindowsVideoDevices>(this);
#endif
- if (qgetenv("QT_FFMPEG_DEBUG").toInt())
+ if (qEnvironmentVariableIsSet("QT_FFMPEG_DEBUG")) {
av_log_set_level(AV_LOG_DEBUG);
+ av_log_set_callback(&qffmpegLogCallback);
+ }
#ifndef QT_NO_DEBUG
qDebug() << "Available HW decoding frameworks:";
@@ -177,6 +204,7 @@ QMaybe<QPlatformAudioInput *> QFFmpegMediaIntegration::createAudioInput(QAudioIn
}
#ifdef Q_OS_ANDROID
+
Q_DECL_EXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void * /*reserved*/)
{
static bool initialized = false;