summaryrefslogtreecommitdiffstats
path: root/src/plugins/multimedia/android
diff options
context:
space:
mode:
Diffstat (limited to 'src/plugins/multimedia/android')
-rw-r--r--src/plugins/multimedia/android/CMakeLists.txt62
-rw-r--r--src/plugins/multimedia/android/android.json3
-rw-r--r--src/plugins/multimedia/android/audio/qandroidaudiodecoder.cpp437
-rw-r--r--src/plugins/multimedia/android/audio/qandroidaudiodecoder_p.h118
-rw-r--r--src/plugins/multimedia/android/common/qandroidaudioinput.cpp47
-rw-r--r--src/plugins/multimedia/android/common/qandroidaudioinput_p.h47
-rw-r--r--src/plugins/multimedia/android/common/qandroidaudiooutput_p.h30
-rw-r--r--src/plugins/multimedia/android/common/qandroidglobal_p.h28
-rw-r--r--src/plugins/multimedia/android/common/qandroidmultimediautils.cpp125
-rw-r--r--src/plugins/multimedia/android/common/qandroidmultimediautils_p.h40
-rw-r--r--src/plugins/multimedia/android/common/qandroidvideooutput.cpp468
-rw-r--r--src/plugins/multimedia/android/common/qandroidvideooutput_p.h93
-rw-r--r--src/plugins/multimedia/android/common/qandroidvideosink.cpp35
-rw-r--r--src/plugins/multimedia/android/common/qandroidvideosink_p.h41
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcamera.cpp562
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcamera_p.h99
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp808
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcamerasession_p.h166
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp473
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h158
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidimagecapture.cpp73
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidimagecapture_p.h48
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidmediacapturesession.cpp115
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidmediacapturesession_p.h66
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidmediaencoder.cpp72
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidmediaencoder_p.h50
-rw-r--r--src/plugins/multimedia/android/mediaplayer/qandroidmediaplayer.cpp999
-rw-r--r--src/plugins/multimedia/android/mediaplayer/qandroidmediaplayer_p.h127
-rw-r--r--src/plugins/multimedia/android/mediaplayer/qandroidmetadata.cpp163
-rw-r--r--src/plugins/multimedia/android/mediaplayer/qandroidmetadata_p.h47
-rw-r--r--src/plugins/multimedia/android/qandroidformatsinfo.cpp160
-rw-r--r--src/plugins/multimedia/android/qandroidformatsinfo_p.h40
-rw-r--r--src/plugins/multimedia/android/qandroidintegration.cpp136
-rw-r--r--src/plugins/multimedia/android/qandroidintegration_p.h48
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp1797
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidcamera_p.h208
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmediametadataretriever.cpp136
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmediametadataretriever_p.h66
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmediaplayer.cpp535
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmediaplayer_p.h135
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmediarecorder.cpp337
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmediarecorder_p.h161
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmultimediautils.cpp43
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmultimediautils_p.h40
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidsurfacetexture.cpp152
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidsurfacetexture_p.h61
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidsurfaceview.cpp152
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidsurfaceview_p.h78
48 files changed, 9885 insertions, 0 deletions
diff --git a/src/plugins/multimedia/android/CMakeLists.txt b/src/plugins/multimedia/android/CMakeLists.txt
new file mode 100644
index 000000000..31a94ff4f
--- /dev/null
+++ b/src/plugins/multimedia/android/CMakeLists.txt
@@ -0,0 +1,62 @@
+# Copyright (C) 2022 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+qt_internal_add_plugin(QAndroidMediaPlugin
+ OUTPUT_NAME androidmediaplugin
+ PLUGIN_TYPE multimedia
+ SOURCES
+ common/qandroidaudiooutput_p.h
+ common/qandroidaudioinput.cpp common/qandroidaudioinput_p.h
+ audio/qandroidaudiodecoder.cpp audio/qandroidaudiodecoder_p.h
+ common/qandroidglobal_p.h
+ common/qandroidmultimediautils.cpp common/qandroidmultimediautils_p.h
+ common/qandroidvideosink.cpp common/qandroidvideosink_p.h
+ common/qandroidvideooutput.cpp common/qandroidvideooutput_p.h
+ mediacapture/qandroidcamera.cpp mediacapture/qandroidcamera_p.h
+ mediacapture/qandroidimagecapture.cpp mediacapture/qandroidimagecapture_p.h
+ mediacapture/qandroidcamerasession.cpp mediacapture/qandroidcamerasession_p.h
+ mediacapture/qandroidmediacapturesession.cpp mediacapture/qandroidmediacapturesession_p.h
+ mediacapture/qandroidcapturesession.cpp mediacapture/qandroidcapturesession_p.h
+ mediacapture/qandroidmediaencoder.cpp mediacapture/qandroidmediaencoder_p.h
+ mediaplayer/qandroidmediaplayer.cpp mediaplayer/qandroidmediaplayer_p.h
+ mediaplayer/qandroidmetadata.cpp mediaplayer/qandroidmetadata_p.h
+ qandroidformatsinfo.cpp qandroidformatsinfo_p.h
+ qandroidintegration.cpp qandroidintegration_p.h
+ wrappers/jni/androidcamera.cpp wrappers/jni/androidcamera_p.h
+ wrappers/jni/androidmediametadataretriever.cpp wrappers/jni/androidmediametadataretriever_p.h
+ wrappers/jni/androidmediaplayer.cpp wrappers/jni/androidmediaplayer_p.h
+ wrappers/jni/androidmediarecorder.cpp wrappers/jni/androidmediarecorder_p.h
+ wrappers/jni/androidmultimediautils.cpp wrappers/jni/androidmultimediautils_p.h
+ wrappers/jni/androidsurfacetexture.cpp wrappers/jni/androidsurfacetexture_p.h
+ wrappers/jni/androidsurfaceview.cpp wrappers/jni/androidsurfaceview_p.h
+ NO_UNITY_BUILD_SOURCES
+ # Resolves two problems:
+ # - Collision of `rwLock` with wrappers/jni/androidmediaplayer.cpp
+ # - and redefinition of `notifyFrameAvailable` with different signature
+ # with wrappers/jni/androidsurfacetexture.cpp
+ wrappers/jni/androidcamera.cpp
+ INCLUDE_DIRECTORIES
+ audio
+ common
+ mediacapture
+ mediaplayer
+ wrappers/jni
+ ../android
+ LIBRARIES
+ Qt::MultimediaPrivate
+ Qt::CorePrivate
+ OpenSLES
+ mediandk
+)
+
+set_property(TARGET QAndroidMediaPlugin APPEND PROPERTY QT_ANDROID_BUNDLED_JAR_DEPENDENCIES
+ jar/Qt${QtMultimedia_VERSION_MAJOR}AndroidMultimedia.jar:org.qtproject.qt.android.multimedia.QtMultimediaUtils
+)
+set_property(TARGET QAndroidMediaPlugin APPEND PROPERTY QT_ANDROID_LIB_DEPENDENCIES
+ ${INSTALL_PLUGINSDIR}/multimedia/libplugins_multimedia_androidmediaplugin.so
+)
+set_property(TARGET QAndroidMediaPlugin APPEND PROPERTY QT_ANDROID_PERMISSIONS
+ android.permission.CAMERA android.permission.RECORD_AUDIO
+ android.permission.BLUETOOTH
+ android.permission.MODIFY_AUDIO_SETTINGS
+)
diff --git a/src/plugins/multimedia/android/android.json b/src/plugins/multimedia/android/android.json
new file mode 100644
index 000000000..6843bd330
--- /dev/null
+++ b/src/plugins/multimedia/android/android.json
@@ -0,0 +1,3 @@
+{
+ "Keys": [ "android" ]
+}
diff --git a/src/plugins/multimedia/android/audio/qandroidaudiodecoder.cpp b/src/plugins/multimedia/android/audio/qandroidaudiodecoder.cpp
new file mode 100644
index 000000000..d200a72b5
--- /dev/null
+++ b/src/plugins/multimedia/android/audio/qandroidaudiodecoder.cpp
@@ -0,0 +1,437 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#include "qandroidaudiodecoder_p.h"
+
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qjniobject.h>
+#include <QtCore/qjnienvironment.h>
+#include <QtCore/private/qandroidextras_p.h>
+#include <qloggingcategory.h>
+#include <QTimer>
+#include <QFile>
+#include <QDir>
+
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <unistd.h>
+
+QT_BEGIN_NAMESPACE
+
+static const char tempFile[] = "encoded.wav";
+constexpr int dequeueTimeout = 5000;
+static Q_LOGGING_CATEGORY(adLogger, "QAndroidAudioDecoder")
+
+Decoder::Decoder()
+ : m_format(AMediaFormat_new())
+{}
+
+Decoder::~Decoder()
+{
+ if (m_codec) {
+ AMediaCodec_delete(m_codec);
+ m_codec = nullptr;
+ }
+
+ if (m_extractor) {
+ AMediaExtractor_delete(m_extractor);
+ m_extractor = nullptr;
+ }
+
+ if (m_format) {
+ AMediaFormat_delete(m_format);
+ m_format = nullptr;
+ }
+}
+
+void Decoder::stop()
+{
+ if (!m_codec)
+ return;
+
+ const media_status_t err = AMediaCodec_stop(m_codec);
+ if (err != AMEDIA_OK)
+ qCWarning(adLogger) << "stop() error: " << err;
+}
+
+void Decoder::setSource(const QUrl &source)
+{
+ const QJniObject path = QJniObject::callStaticObjectMethod(
+ "org/qtproject/qt/android/multimedia/QtMultimediaUtils",
+ "getMimeType",
+ "(Landroid/content/Context;Ljava/lang/String;)Ljava/lang/String;",
+ QNativeInterface::QAndroidApplication::context().object(),
+ QJniObject::fromString(source.path()).object());
+
+ const QString mime = path.isValid() ? path.toString() : "";
+
+ if (!mime.isEmpty() && !mime.contains("audio", Qt::CaseInsensitive)) {
+ m_formatError = tr("Cannot set source, invalid mime type for the source provided.");
+ return;
+ }
+
+ if (!m_extractor)
+ m_extractor = AMediaExtractor_new();
+
+ QFile file(source.path());
+ if (!file.open(QFile::ReadOnly)) {
+ emit error(QAudioDecoder::ResourceError, tr("Cannot open the file"));
+ return;
+ }
+
+ const int fd = file.handle();
+
+ if (fd < 0) {
+ emit error(QAudioDecoder::ResourceError, tr("Invalid fileDescriptor for source."));
+ return;
+ }
+ const int size = file.size();
+ media_status_t status = AMediaExtractor_setDataSourceFd(m_extractor, fd, 0,
+ size > 0 ? size : LONG_MAX);
+ close(fd);
+
+ if (status != AMEDIA_OK) {
+ if (m_extractor) {
+ AMediaExtractor_delete(m_extractor);
+ m_extractor = nullptr;
+ }
+ m_formatError = tr("Setting source for Audio Decoder failed.");
+ }
+}
+
+void Decoder::createDecoder()
+{
+ // get encoded format for decoder
+ m_format = AMediaExtractor_getTrackFormat(m_extractor, 0);
+
+ const char *mime;
+ if (!AMediaFormat_getString(m_format, AMEDIAFORMAT_KEY_MIME, &mime)) {
+ if (m_extractor) {
+ AMediaExtractor_delete(m_extractor);
+ m_extractor = nullptr;
+ }
+ emit error(QAudioDecoder::FormatError, tr("Format not supported by Audio Decoder."));
+
+ return;
+ }
+
+ // get audio duration from source
+ int64_t durationUs;
+ AMediaFormat_getInt64(m_format, AMEDIAFORMAT_KEY_DURATION, &durationUs);
+ emit durationChanged(durationUs / 1000);
+
+ // set default output audio format from input file
+ if (!m_outputFormat.isValid()) {
+ int32_t sampleRate;
+ AMediaFormat_getInt32(m_format, AMEDIAFORMAT_KEY_SAMPLE_RATE, &sampleRate);
+ m_outputFormat.setSampleRate(sampleRate);
+ int32_t channelCount;
+ AMediaFormat_getInt32(m_format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &channelCount);
+ m_outputFormat.setChannelCount(channelCount);
+ m_outputFormat.setSampleFormat(QAudioFormat::Int16);
+ }
+
+ m_codec = AMediaCodec_createDecoderByType(mime);
+}
+
+void Decoder::doDecode()
+{
+ if (!m_formatError.isEmpty()) {
+ emit error(QAudioDecoder::FormatError, m_formatError);
+ return;
+ }
+
+ if (!m_extractor) {
+ emit error(QAudioDecoder::ResourceError, tr("Cannot decode, source not set."));
+ return;
+ }
+
+ createDecoder();
+
+ if (!m_codec) {
+ emit error(QAudioDecoder::ResourceError, tr("Audio Decoder could not be created."));
+ return;
+ }
+
+ media_status_t status = AMediaCodec_configure(m_codec, m_format, nullptr /* surface */,
+ nullptr /* crypto */, 0);
+
+ if (status != AMEDIA_OK) {
+ emit error(QAudioDecoder::ResourceError, tr("Audio Decoder failed configuration."));
+ return;
+ }
+
+ status = AMediaCodec_start(m_codec);
+ if (status != AMEDIA_OK) {
+ emit error(QAudioDecoder::ResourceError, tr("Audio Decoder failed to start."));
+ return;
+ }
+
+ AMediaExtractor_selectTrack(m_extractor, 0);
+
+ emit decodingChanged(true);
+ m_inputEOS = false;
+ while (!m_inputEOS) {
+ // handle input buffer
+ const ssize_t bufferIdx = AMediaCodec_dequeueInputBuffer(m_codec, dequeueTimeout);
+
+ if (bufferIdx >= 0) {
+ size_t bufferSize = {};
+ uint8_t *buffer = AMediaCodec_getInputBuffer(m_codec, bufferIdx, &bufferSize);
+ const int sample = AMediaExtractor_readSampleData(m_extractor, buffer, bufferSize);
+ if (sample < 0) {
+ m_inputEOS = true;
+ break;
+ }
+
+ const int64_t presentationTimeUs = AMediaExtractor_getSampleTime(m_extractor);
+ AMediaCodec_queueInputBuffer(m_codec, bufferIdx, 0, sample, presentationTimeUs,
+ m_inputEOS ? AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM : 0);
+ AMediaExtractor_advance(m_extractor);
+
+ // handle output buffer
+ AMediaCodecBufferInfo info;
+ ssize_t idx = AMediaCodec_dequeueOutputBuffer(m_codec, &info, dequeueTimeout);
+
+ while (idx == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED
+ || idx == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED) {
+ if (idx == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED)
+ qCWarning(adLogger) << "dequeueOutputBuffer() status: outputFormat changed";
+
+ idx = AMediaCodec_dequeueOutputBuffer(m_codec, &info, dequeueTimeout);
+ }
+
+ if (idx >= 0) {
+ if (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM)
+ break;
+
+ if (info.size > 0) {
+ size_t bufferSize;
+ const uint8_t *bufferData = AMediaCodec_getOutputBuffer(m_codec, idx,
+ &bufferSize);
+ const QByteArray data((const char*)(bufferData + info.offset), info.size);
+ auto audioBuffer = QAudioBuffer(data, m_outputFormat, presentationTimeUs);
+ if (presentationTimeUs >= 0)
+ emit positionChanged(std::move(audioBuffer), presentationTimeUs / 1000);
+
+ AMediaCodec_releaseOutputBuffer(m_codec, idx, false);
+ }
+ } else if (idx == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
+ qCWarning(adLogger) << "dequeueOutputBuffer() status: try again later";
+ break;
+ } else {
+ qCWarning(adLogger) <<
+ "AMediaCodec_dequeueOutputBuffer() status: invalid buffer idx " << idx;
+ }
+ } else {
+ qCWarning(adLogger) << "dequeueInputBuffer() status: invalid buffer idx " << bufferIdx;
+ }
+ }
+ emit finished();
+}
+
+QAndroidAudioDecoder::QAndroidAudioDecoder(QAudioDecoder *parent)
+ : QPlatformAudioDecoder(parent),
+ m_decoder(new Decoder())
+{
+ connect(m_decoder, &Decoder::positionChanged, this, &QAndroidAudioDecoder::positionChanged);
+ connect(m_decoder, &Decoder::durationChanged, this, &QAndroidAudioDecoder::durationChanged);
+ connect(m_decoder, &Decoder::error, this, &QAndroidAudioDecoder::error);
+ connect(m_decoder, &Decoder::finished, this, &QAndroidAudioDecoder::finished);
+ connect(m_decoder, &Decoder::decodingChanged, this, &QPlatformAudioDecoder::setIsDecoding);
+ connect(this, &QAndroidAudioDecoder::setSourceUrl, m_decoder, & Decoder::setSource);
+}
+
+QAndroidAudioDecoder::~QAndroidAudioDecoder()
+{
+ m_decoder->thread()->quit();
+ m_decoder->thread()->wait();
+ delete m_threadDecoder;
+ delete m_decoder;
+}
+
+void QAndroidAudioDecoder::setSource(const QUrl &fileName)
+{
+ if (!requestPermissions())
+ return;
+
+ if (isDecoding())
+ return;
+
+ m_device = nullptr;
+ error(QAudioDecoder::NoError, QStringLiteral(""));
+
+ if (m_source != fileName) {
+ m_source = fileName;
+ emit setSourceUrl(m_source);
+ sourceChanged();
+ }
+}
+
+void QAndroidAudioDecoder::setSourceDevice(QIODevice *device)
+{
+ if (isDecoding())
+ return;
+
+ m_source.clear();
+ if (m_device != device) {
+ m_device = device;
+
+ if (!requestPermissions())
+ return;
+
+ sourceChanged();
+ }
+}
+
+void QAndroidAudioDecoder::start()
+{
+ if (isDecoding())
+ return;
+
+ m_position = -1;
+
+ if (m_device && (!m_device->isOpen() || !m_device->isReadable())) {
+ emit error(QAudioDecoder::ResourceError,
+ QString::fromUtf8("Unable to read from the specified device"));
+ return;
+ }
+
+ if (!m_threadDecoder) {
+ m_threadDecoder = new QThread(this);
+ m_decoder->moveToThread(m_threadDecoder);
+ m_threadDecoder->start();
+ }
+
+ decode();
+}
+
+void QAndroidAudioDecoder::stop()
+{
+ if (!isDecoding() && m_position < 0 && m_duration < 0)
+ return;
+
+ m_decoder->stop();
+ m_audioBuffer.clear();
+ m_position = -1;
+ m_duration = -1;
+ setIsDecoding(false);
+
+ emit bufferAvailableChanged(false);
+ emit QPlatformAudioDecoder::positionChanged(m_position);
+}
+
+QAudioBuffer QAndroidAudioDecoder::read()
+{
+ if (!m_audioBuffer.isEmpty()) {
+ QPair<QAudioBuffer, int> buffer = m_audioBuffer.takeFirst();
+ m_position = buffer.second;
+ emit QPlatformAudioDecoder::positionChanged(buffer.second);
+ return buffer.first;
+ }
+
+ // no buffers available
+ return {};
+}
+
+bool QAndroidAudioDecoder::bufferAvailable() const
+{
+ return m_audioBuffer.size() > 0;
+}
+
+qint64 QAndroidAudioDecoder::position() const
+{
+ return m_position;
+}
+
+qint64 QAndroidAudioDecoder::duration() const
+{
+ return m_duration;
+}
+
+void QAndroidAudioDecoder::positionChanged(QAudioBuffer audioBuffer, qint64 position)
+{
+ m_audioBuffer.append(QPair<QAudioBuffer, int>(audioBuffer, position));
+ m_position = position;
+ emit bufferReady();
+}
+
+void QAndroidAudioDecoder::durationChanged(qint64 duration)
+{
+ m_duration = duration;
+ emit QPlatformAudioDecoder::durationChanged(duration);
+}
+
+void QAndroidAudioDecoder::error(const QAudioDecoder::Error err, const QString &errorString)
+{
+ stop();
+ emit QPlatformAudioDecoder::error(err, errorString);
+}
+
+void QAndroidAudioDecoder::finished()
+{
+ emit bufferAvailableChanged(m_audioBuffer.size() > 0);
+
+ if (m_duration != -1)
+ emit durationChanged(m_duration);
+
+ // remove temp file when decoding is finished
+ QFile(QString(QDir::tempPath()).append(QString::fromUtf8(tempFile))).remove();
+ emit QPlatformAudioDecoder::finished();
+}
+
+bool QAndroidAudioDecoder::requestPermissions()
+{
+ const auto writeRes = QtAndroidPrivate::requestPermission(QStringLiteral("android.permission.WRITE_EXTERNAL_STORAGE"));
+ if (writeRes.result() == QtAndroidPrivate::Authorized)
+ return true;
+
+ return false;
+}
+
+void QAndroidAudioDecoder::decode()
+{
+ if (m_device) {
+ connect(m_device, &QIODevice::readyRead, this, &QAndroidAudioDecoder::readDevice);
+ if (m_device->bytesAvailable())
+ readDevice();
+ } else {
+ QTimer::singleShot(0, m_decoder, &Decoder::doDecode);
+ }
+}
+
+bool QAndroidAudioDecoder::createTempFile()
+{
+ QFile file = QFile(QDir::tempPath().append(QString::fromUtf8(tempFile)), this);
+
+ bool success = file.open(QIODevice::QIODevice::ReadWrite);
+ if (!success)
+ emit error(QAudioDecoder::ResourceError, tr("Error opening temporary file: %1").arg(file.errorString()));
+
+ success &= (file.write(m_deviceBuffer) == m_deviceBuffer.size());
+ if (!success)
+ emit error(QAudioDecoder::ResourceError, tr("Error while writing data to temporary file"));
+
+ file.close();
+ m_deviceBuffer.clear();
+ if (success)
+ m_decoder->setSource(file.fileName());
+
+ return success;
+}
+
+void QAndroidAudioDecoder::readDevice() {
+ m_deviceBuffer.append(m_device->readAll());
+ if (m_device->atEnd()) {
+ disconnect(m_device, &QIODevice::readyRead, this, &QAndroidAudioDecoder::readDevice);
+ if (!createTempFile()) {
+ m_deviceBuffer.clear();
+ stop();
+ return;
+ }
+ QTimer::singleShot(0, m_decoder, &Decoder::doDecode);
+ }
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qandroidaudiodecoder_p.cpp"
diff --git a/src/plugins/multimedia/android/audio/qandroidaudiodecoder_p.h b/src/plugins/multimedia/android/audio/qandroidaudiodecoder_p.h
new file mode 100644
index 000000000..65a0f1855
--- /dev/null
+++ b/src/plugins/multimedia/android/audio/qandroidaudiodecoder_p.h
@@ -0,0 +1,118 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDAUDIODECODER_P_H
+#define QANDROIDAUDIODECODER_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+#include "private/qplatformaudiodecoder_p.h"
+
+#include <QtCore/qurl.h>
+#include <QThread>
+
+#include "media/NdkMediaCodec.h"
+#include "media/NdkMediaExtractor.h"
+#include "media/NdkMediaFormat.h"
+#include "media/NdkMediaError.h"
+
+
+QT_USE_NAMESPACE
+
+class Decoder : public QObject
+{
+ Q_OBJECT
+public:
+ Decoder();
+ ~Decoder();
+
+public slots:
+ void setSource(const QUrl &source);
+ void doDecode();
+ void stop();
+
+signals:
+ void positionChanged(const QAudioBuffer &buffer, qint64 position);
+ void durationChanged(const qint64 duration);
+ void error(const QAudioDecoder::Error error, const QString &errorString);
+ void finished();
+ void decodingChanged(bool decoding);
+
+private:
+ void createDecoder();
+
+ AMediaCodec *m_codec = nullptr;
+ AMediaExtractor *m_extractor = nullptr;
+ AMediaFormat *m_format = nullptr;
+
+ QAudioFormat m_outputFormat;
+ QString m_formatError;
+ bool m_inputEOS;
+};
+
+
+class QAndroidAudioDecoder : public QPlatformAudioDecoder
+{
+ Q_OBJECT
+public:
+ QAndroidAudioDecoder(QAudioDecoder *parent);
+ virtual ~QAndroidAudioDecoder();
+
+ QUrl source() const override { return m_source; }
+ void setSource(const QUrl &fileName) override;
+
+ QIODevice *sourceDevice() const override { return m_device; }
+ void setSourceDevice(QIODevice *device) override;
+
+ void start() override;
+ void stop() override;
+
+ QAudioFormat audioFormat() const override { return {}; }
+ void setAudioFormat(const QAudioFormat &/*format*/) override {}
+
+ QAudioBuffer read() override;
+ bool bufferAvailable() const override;
+
+ qint64 position() const override;
+ qint64 duration() const override;
+
+signals:
+ void setSourceUrl(const QUrl &source);
+
+private slots:
+ void positionChanged(QAudioBuffer audioBuffer, qint64 position);
+ void durationChanged(qint64 duration);
+ void error(const QAudioDecoder::Error error, const QString &errorString);
+ void readDevice();
+ void finished();
+
+private:
+ bool requestPermissions();
+ bool createTempFile();
+ void decode();
+
+ QIODevice *m_device = nullptr;
+ Decoder *m_decoder;
+
+ QList<QPair<QAudioBuffer, int>> m_audioBuffer;
+ QUrl m_source;
+
+ qint64 m_position = -1;
+ qint64 m_duration = -1;
+
+ QByteArray m_deviceBuffer;
+
+ QThread *m_threadDecoder = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDAUDIODECODER_P_H
diff --git a/src/plugins/multimedia/android/common/qandroidaudioinput.cpp b/src/plugins/multimedia/android/common/qandroidaudioinput.cpp
new file mode 100644
index 000000000..a1eb9258b
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidaudioinput.cpp
@@ -0,0 +1,47 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidaudioinput_p.h"
+
+#include <qaudioinput.h>
+
+#include <QtCore/qjniobject.h>
+
+QT_BEGIN_NAMESPACE
+
+QAndroidAudioInput::QAndroidAudioInput(QAudioInput *parent)
+ : QObject(parent),
+ QPlatformAudioInput(parent)
+{
+ m_muted = isMuted();
+}
+
+QAndroidAudioInput::~QAndroidAudioInput()
+{
+ setMuted(m_muted);
+}
+
+void QAndroidAudioInput::setMuted(bool muted)
+{
+ bool isInputMuted = isMuted();
+ if (muted != isInputMuted) {
+ QJniObject::callStaticMethod<void>(
+ "org/qtproject/qt/android/multimedia/QtAudioDeviceManager",
+ "setInputMuted",
+ "(Z)V",
+ muted);
+ emit mutedChanged(muted);
+ }
+}
+
+bool QAndroidAudioInput::isMuted() const
+{
+ return QJniObject::callStaticMethod<jboolean>(
+ "org/qtproject/qt/android/multimedia/QtAudioDeviceManager",
+ "isMicrophoneMute",
+ "()Z");
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qandroidaudioinput_p.cpp"
diff --git a/src/plugins/multimedia/android/common/qandroidaudioinput_p.h b/src/plugins/multimedia/android/common/qandroidaudioinput_p.h
new file mode 100644
index 000000000..ef59da8ec
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidaudioinput_p.h
@@ -0,0 +1,47 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDAUDIOINPUT_H
+#define QANDROIDAUDIOINPUT_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qobject.h>
+
+#include <private/qtmultimediaglobal_p.h>
+#include <private/qplatformaudioinput_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class Q_MULTIMEDIA_EXPORT QAndroidAudioInput : public QObject, public QPlatformAudioInput
+{
+ Q_OBJECT
+
+public:
+ explicit QAndroidAudioInput(QAudioInput *parent);
+ ~QAndroidAudioInput();
+
+ void setMuted(bool muted) override;
+
+ bool isMuted() const;
+
+Q_SIGNALS:
+ void mutedChanged(bool muted);
+
+private:
+ bool m_muted = false;
+
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/android/common/qandroidaudiooutput_p.h b/src/plugins/multimedia/android/common/qandroidaudiooutput_p.h
new file mode 100644
index 000000000..d5d25b458
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidaudiooutput_p.h
@@ -0,0 +1,30 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+#ifndef QANDROIDAUDIOOUTPUT_H
+#define QANDROIDAUDIOOUTPUT_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformaudiooutput_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class Q_MULTIMEDIA_EXPORT QAndroidAudioOutput : public QPlatformAudioOutput
+{
+public:
+ QAndroidAudioOutput(QAudioOutput *qq) : QPlatformAudioOutput(qq) {}
+};
+
+QT_END_NAMESPACE
+
+
+#endif // QANDROIDAUDIOOUTPUT_H
diff --git a/src/plugins/multimedia/android/common/qandroidglobal_p.h b/src/plugins/multimedia/android/common/qandroidglobal_p.h
new file mode 100644
index 000000000..1022fa061
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidglobal_p.h
@@ -0,0 +1,28 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDGLOBAL_H
+#define QANDROIDGLOBAL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qtmultimediaglobal_p.h>
+#include <QtCore/qglobal.h>
+#include <QtCore/qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+Q_DECLARE_LOGGING_CATEGORY(qtAndroidMediaPlugin)
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDGLOBAL_H
diff --git a/src/plugins/multimedia/android/common/qandroidmultimediautils.cpp b/src/plugins/multimedia/android/common/qandroidmultimediautils.cpp
new file mode 100644
index 000000000..6e4b95fe9
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidmultimediautils.cpp
@@ -0,0 +1,125 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidmultimediautils_p.h"
+#include "qandroidglobal_p.h"
+
+#include <qlist.h>
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qpermissions.h>
+#include <QtCore/private/qandroidextras_p.h>
+
+QT_BEGIN_NAMESPACE
+
+int qt_findClosestValue(const QList<int> &list, int value)
+{
+ if (list.size() < 2)
+ return 0;
+
+ int begin = 0;
+ int end = list.size() - 1;
+ int pivot = begin + (end - begin) / 2;
+ int v = list.at(pivot);
+
+ while (end - begin > 1) {
+ if (value == v)
+ return pivot;
+
+ if (value > v)
+ begin = pivot;
+ else
+ end = pivot;
+
+ pivot = begin + (end - begin) / 2;
+ v = list.at(pivot);
+ }
+
+ return value - v >= list.at(pivot + 1) - value ? pivot + 1 : pivot;
+}
+
+bool qt_sizeLessThan(const QSize &s1, const QSize &s2)
+{
+ return s1.width() * s1.height() < s2.width() * s2.height();
+}
+
+QVideoFrameFormat::PixelFormat qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat f)
+{
+ switch (f) {
+ case AndroidCamera::NV21:
+ return QVideoFrameFormat::Format_NV21;
+ case AndroidCamera::YV12:
+ return QVideoFrameFormat::Format_YV12;
+ case AndroidCamera::YUY2:
+ return QVideoFrameFormat::Format_YUYV;
+ case AndroidCamera::JPEG:
+ return QVideoFrameFormat::Format_Jpeg;
+ default:
+ return QVideoFrameFormat::Format_Invalid;
+ }
+}
+
+AndroidCamera::ImageFormat qt_androidImageFormatFromPixelFormat(QVideoFrameFormat::PixelFormat f)
+{
+ switch (f) {
+ case QVideoFrameFormat::Format_NV21:
+ return AndroidCamera::NV21;
+ case QVideoFrameFormat::Format_YV12:
+ return AndroidCamera::YV12;
+ case QVideoFrameFormat::Format_YUYV:
+ return AndroidCamera::YUY2;
+ case QVideoFrameFormat::Format_Jpeg:
+ return AndroidCamera::JPEG;
+ default:
+ return AndroidCamera::UnknownImageFormat;
+ }
+}
+
+static bool androidRequestPermission(const QString &permission)
+{
+ if (QNativeInterface::QAndroidApplication::sdkVersion() < 23)
+ return true;
+
+ // Permission already granted?
+ if (QtAndroidPrivate::checkPermission(permission).result() == QtAndroidPrivate::Authorized)
+ return true;
+
+ if (QtAndroidPrivate::requestPermission(permission).result() != QtAndroidPrivate::Authorized)
+ return false;
+
+ return true;
+}
+
+static bool androidCheckPermission(const QPermission &permission)
+{
+ return qApp->checkPermission(permission) == Qt::PermissionStatus::Granted;
+}
+
+bool qt_androidCheckCameraPermission()
+{
+ const QCameraPermission permission;
+ const auto granted = androidCheckPermission(permission);
+ if (!granted)
+ qCDebug(qtAndroidMediaPlugin, "Camera permission not granted!");
+ return granted;
+}
+
+bool qt_androidCheckMicrophonePermission()
+{
+ const QMicrophonePermission permission;
+ const auto granted = androidCheckPermission(permission);
+ if (!granted)
+ qCDebug(qtAndroidMediaPlugin, "Microphone permission not granted!");
+ return granted;
+}
+
+bool qt_androidRequestWriteStoragePermission()
+{
+ if (!androidRequestPermission(QStringLiteral("android.permission.WRITE_EXTERNAL_STORAGE"))) {
+ qCDebug(qtAndroidMediaPlugin, "Storage permission denied by user!");
+ return false;
+ }
+
+ return true;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/android/common/qandroidmultimediautils_p.h b/src/plugins/multimedia/android/common/qandroidmultimediautils_p.h
new file mode 100644
index 000000000..5fe841e8c
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidmultimediautils_p.h
@@ -0,0 +1,40 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDMULTIMEDIAUTILS_H
+#define QANDROIDMULTIMEDIAUTILS_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qglobal.h>
+#include <qsize.h>
+#include "androidcamera_p.h"
+
+QT_BEGIN_NAMESPACE
+
+// return the index of the closest value to <value> in <list>
+// (binary search)
+int qt_findClosestValue(const QList<int> &list, int value);
+
+bool qt_sizeLessThan(const QSize &s1, const QSize &s2);
+
+QVideoFrameFormat::PixelFormat qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat f);
+AndroidCamera::ImageFormat qt_androidImageFormatFromPixelFormat(QVideoFrameFormat::PixelFormat f);
+
+bool qt_androidRequestWriteStoragePermission();
+
+bool qt_androidCheckCameraPermission();
+bool qt_androidCheckMicrophonePermission();
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDMULTIMEDIAUTILS_H
diff --git a/src/plugins/multimedia/android/common/qandroidvideooutput.cpp b/src/plugins/multimedia/android/common/qandroidvideooutput.cpp
new file mode 100644
index 000000000..0724a8359
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidvideooutput.cpp
@@ -0,0 +1,468 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidvideooutput_p.h"
+#include "androidsurfacetexture_p.h"
+
+#include <rhi/qrhi.h>
+#include <QtGui/private/qopenglextensions_p.h>
+#include <private/qhwvideobuffer_p.h>
+#include <private/qvideoframeconverter_p.h>
+#include <private/qplatformvideosink_p.h>
+#include <private/qvideoframe_p.h>
+#include <qvideosink.h>
+#include <qopenglcontext.h>
+#include <qopenglfunctions.h>
+#include <qvideoframeformat.h>
+#include <qthread.h>
+#include <qfile.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidVideoFrameTextures : public QVideoFrameTextures
+{
+public:
+ QAndroidVideoFrameTextures(QRhi *rhi, QSize size, quint64 handle)
+ {
+ m_tex.reset(rhi->newTexture(QRhiTexture::RGBA8, size, 1));
+ m_tex->createFrom({quint64(handle), 0});
+ }
+
+ QRhiTexture *texture(uint plane) const override
+ {
+ return plane == 0 ? m_tex.get() : nullptr;
+ }
+
+private:
+ std::unique_ptr<QRhiTexture> m_tex;
+};
+
+// QRhiWithThreadGuard keeps QRhi and QThread (that created it) alive to allow proper cleaning
+class QRhiWithThreadGuard : public QObject {
+ Q_OBJECT
+public:
+ QRhiWithThreadGuard(std::shared_ptr<QRhi> r, std::shared_ptr<AndroidTextureThread> t)
+ : m_guardRhi(std::move(r)), m_thread(std::move(t)) {}
+ ~QRhiWithThreadGuard();
+protected:
+ std::shared_ptr<QRhi> m_guardRhi;
+private:
+ std::shared_ptr<AndroidTextureThread> m_thread;
+};
+
+class AndroidTextureVideoBuffer : public QRhiWithThreadGuard, public QHwVideoBuffer
+{
+public:
+ AndroidTextureVideoBuffer(std::shared_ptr<QRhi> rhi,
+ std::shared_ptr<AndroidTextureThread> thread,
+ std::unique_ptr<QRhiTexture> tex, const QSize &size)
+ : QRhiWithThreadGuard(std::move(rhi), std::move(thread)),
+ QHwVideoBuffer(QVideoFrame::RhiTextureHandle, m_guardRhi.get()),
+ m_size(size),
+ m_tex(std::move(tex))
+ {}
+
+ MapData map(QtVideo::MapMode mode) override;
+
+ void unmap() override
+ {
+ m_image = {};
+ m_mapMode = QtVideo::MapMode::NotMapped;
+ }
+
+ std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *rhi) override
+ {
+ return std::make_unique<QAndroidVideoFrameTextures>(rhi, m_size, m_tex->nativeTexture().object);
+ }
+
+private:
+ QSize m_size;
+ std::unique_ptr<QRhiTexture> m_tex;
+ QImage m_image;
+ QtVideo::MapMode m_mapMode = QtVideo::MapMode::NotMapped;
+};
+
+class ImageFromVideoFrameHelper : public QHwVideoBuffer
+{
+public:
+ ImageFromVideoFrameHelper(AndroidTextureVideoBuffer &atvb)
+ : QHwVideoBuffer(QVideoFrame::RhiTextureHandle, atvb.rhi()), m_atvb(atvb)
+ {}
+ std::unique_ptr<QVideoFrameTextures> mapTextures(QRhi *rhi) override
+ {
+ return m_atvb.mapTextures(rhi);
+ }
+
+ MapData map(QtVideo::MapMode) override { return {}; }
+ void unmap() override {}
+
+private:
+ AndroidTextureVideoBuffer &m_atvb;
+};
+
+QAbstractVideoBuffer::MapData AndroidTextureVideoBuffer::map(QtVideo::MapMode mode)
+{
+ QAbstractVideoBuffer::MapData mapData;
+
+ if (m_mapMode == QtVideo::MapMode::NotMapped && mode == QtVideo::MapMode::ReadOnly) {
+ m_mapMode = QtVideo::MapMode::ReadOnly;
+ m_image = qImageFromVideoFrame(QVideoFramePrivate::createFrame(
+ std::make_unique<ImageFromVideoFrameHelper>(*this),
+ QVideoFrameFormat(m_size, QVideoFrameFormat::Format_RGBA8888)));
+ mapData.planeCount = 1;
+ mapData.bytesPerLine[0] = m_image.bytesPerLine();
+ mapData.dataSize[0] = static_cast<int>(m_image.sizeInBytes());
+ mapData.data[0] = m_image.bits();
+ }
+
+ return mapData;
+}
+
+static const float g_quad[] = {
+ -1.f, -1.f, 0.f, 0.f,
+ -1.f, 1.f, 0.f, 1.f,
+ 1.f, 1.f, 1.f, 1.f,
+ 1.f, -1.f, 1.f, 0.f
+};
+
+class TextureCopy
+{
+ static QShader getShader(const QString &name)
+ {
+ QFile f(name);
+ if (f.open(QIODevice::ReadOnly))
+ return QShader::fromSerialized(f.readAll());
+ return {};
+ }
+
+public:
+ TextureCopy(QRhi *rhi, QRhiTexture *externalTex)
+ : m_rhi(rhi)
+ {
+ m_vertexBuffer.reset(m_rhi->newBuffer(QRhiBuffer::Immutable, QRhiBuffer::VertexBuffer, sizeof(g_quad)));
+ m_vertexBuffer->create();
+
+ m_uniformBuffer.reset(m_rhi->newBuffer(QRhiBuffer::Dynamic, QRhiBuffer::UniformBuffer, 64 + 64 + 4 + 4));
+ m_uniformBuffer->create();
+
+ m_sampler.reset(m_rhi->newSampler(QRhiSampler::Nearest, QRhiSampler::Nearest, QRhiSampler::None,
+ QRhiSampler::ClampToEdge, QRhiSampler::ClampToEdge));
+ m_sampler->create();
+
+ m_srb.reset(m_rhi->newShaderResourceBindings());
+ m_srb->setBindings({
+ QRhiShaderResourceBinding::uniformBuffer(0, QRhiShaderResourceBinding::VertexStage | QRhiShaderResourceBinding::FragmentStage, m_uniformBuffer.get()),
+ QRhiShaderResourceBinding::sampledTexture(1, QRhiShaderResourceBinding::FragmentStage, externalTex, m_sampler.get())
+ });
+ m_srb->create();
+
+ m_vertexShader = getShader(QStringLiteral(":/qt-project.org/multimedia/shaders/externalsampler.vert.qsb"));
+ Q_ASSERT(m_vertexShader.isValid());
+ m_fragmentShader = getShader(QStringLiteral(":/qt-project.org/multimedia/shaders/externalsampler.frag.qsb"));
+ Q_ASSERT(m_fragmentShader.isValid());
+ }
+
+ std::unique_ptr<QRhiTexture> copyExternalTexture(QSize size, const QMatrix4x4 &externalTexMatrix);
+
+private:
+ QRhi *m_rhi = nullptr;
+ std::unique_ptr<QRhiBuffer> m_vertexBuffer;
+ std::unique_ptr<QRhiBuffer> m_uniformBuffer;
+ std::unique_ptr<QRhiSampler> m_sampler;
+ std::unique_ptr<QRhiShaderResourceBindings> m_srb;
+ QShader m_vertexShader;
+ QShader m_fragmentShader;
+};
+
+static std::unique_ptr<QRhiGraphicsPipeline> newGraphicsPipeline(QRhi *rhi,
+ QRhiShaderResourceBindings *shaderResourceBindings,
+ QRhiRenderPassDescriptor *renderPassDescriptor,
+ QShader vertexShader,
+ QShader fragmentShader)
+{
+ std::unique_ptr<QRhiGraphicsPipeline> gp(rhi->newGraphicsPipeline());
+ gp->setTopology(QRhiGraphicsPipeline::TriangleFan);
+ gp->setShaderStages({
+ { QRhiShaderStage::Vertex, vertexShader },
+ { QRhiShaderStage::Fragment, fragmentShader }
+ });
+ QRhiVertexInputLayout inputLayout;
+ inputLayout.setBindings({
+ { 4 * sizeof(float) }
+ });
+ inputLayout.setAttributes({
+ { 0, 0, QRhiVertexInputAttribute::Float2, 0 },
+ { 0, 1, QRhiVertexInputAttribute::Float2, 2 * sizeof(float) }
+ });
+ gp->setVertexInputLayout(inputLayout);
+ gp->setShaderResourceBindings(shaderResourceBindings);
+ gp->setRenderPassDescriptor(renderPassDescriptor);
+ gp->create();
+
+ return gp;
+}
+
+std::unique_ptr<QRhiTexture> TextureCopy::copyExternalTexture(QSize size, const QMatrix4x4 &externalTexMatrix)
+{
+ std::unique_ptr<QRhiTexture> tex(m_rhi->newTexture(QRhiTexture::RGBA8, size, 1, QRhiTexture::RenderTarget));
+ if (!tex->create()) {
+ qWarning("Failed to create frame texture");
+ return {};
+ }
+
+ std::unique_ptr<QRhiTextureRenderTarget> renderTarget(m_rhi->newTextureRenderTarget({ { tex.get() } }));
+ std::unique_ptr<QRhiRenderPassDescriptor> renderPassDescriptor(renderTarget->newCompatibleRenderPassDescriptor());
+ renderTarget->setRenderPassDescriptor(renderPassDescriptor.get());
+ renderTarget->create();
+
+ QRhiResourceUpdateBatch *rub = m_rhi->nextResourceUpdateBatch();
+ rub->uploadStaticBuffer(m_vertexBuffer.get(), g_quad);
+
+ QMatrix4x4 identity;
+ char *p = m_uniformBuffer->beginFullDynamicBufferUpdateForCurrentFrame();
+ memcpy(p, identity.constData(), 64);
+ memcpy(p + 64, externalTexMatrix.constData(), 64);
+ float opacity = 1.0f;
+ memcpy(p + 64 + 64, &opacity, 4);
+ m_uniformBuffer->endFullDynamicBufferUpdateForCurrentFrame();
+
+ auto graphicsPipeline = newGraphicsPipeline(m_rhi, m_srb.get(), renderPassDescriptor.get(),
+ m_vertexShader, m_fragmentShader);
+
+ const QRhiCommandBuffer::VertexInput vbufBinding(m_vertexBuffer.get(), 0);
+
+ QRhiCommandBuffer *cb = nullptr;
+ if (m_rhi->beginOffscreenFrame(&cb) != QRhi::FrameOpSuccess)
+ return {};
+
+ cb->beginPass(renderTarget.get(), Qt::transparent, { 1.0f, 0 }, rub);
+ cb->setGraphicsPipeline(graphicsPipeline.get());
+ cb->setViewport({0, 0, float(size.width()), float(size.height())});
+ cb->setShaderResources(m_srb.get());
+ cb->setVertexInput(0, 1, &vbufBinding);
+ cb->draw(4);
+ cb->endPass();
+ m_rhi->endOffscreenFrame();
+
+ QOpenGLContext *ctx = QOpenGLContext::currentContext();
+ QOpenGLFunctions *f = ctx->functions();
+ static_cast<QOpenGLExtensions *>(f)->flushShared();
+
+ return tex;
+}
+
+static QMatrix4x4 extTransformMatrix(AndroidSurfaceTexture *surfaceTexture)
+{
+ QMatrix4x4 m = surfaceTexture->getTransformMatrix();
+ // flip it back, see
+ // http://androidxref.com/9.0.0_r3/xref/frameworks/native/libs/gui/GLConsumer.cpp#866
+ // (NB our matrix ctor takes row major)
+ static const QMatrix4x4 flipV(1.0f, 0.0f, 0.0f, 0.0f,
+ 0.0f, -1.0f, 0.0f, 1.0f,
+ 0.0f, 0.0f, 1.0f, 0.0f,
+ 0.0f, 0.0f, 0.0f, 1.0f);
+ m *= flipV;
+ return m;
+}
+
+class AndroidTextureThread : public QThread
+{
+ Q_OBJECT
+public:
+ AndroidTextureThread(QAndroidTextureVideoOutput * vo)
+ : QThread()
+ , m_videoOutput(vo)
+ {
+ }
+
+ ~AndroidTextureThread() {
+ QMetaObject::invokeMethod(this,
+ &AndroidTextureThread::clearSurfaceTexture, Qt::BlockingQueuedConnection);
+ this->quit();
+ this->wait();
+ }
+
+ void start()
+ {
+ QThread::start();
+ moveToThread(this);
+ }
+
+ void initRhi(QOpenGLContext *context)
+ {
+ QRhiGles2InitParams params;
+ params.shareContext = context;
+ params.fallbackSurface = QRhiGles2InitParams::newFallbackSurface();
+ m_rhi.reset(QRhi::create(QRhi::OpenGLES2, &params));
+ }
+
+public slots:
+ void onFrameAvailable(quint64 index)
+ {
+ // Check if 'm_surfaceTexture' is not reset and if the current index is the same that
+ // was used for creating connection because there can be pending frames in queue.
+ if (m_surfaceTexture && m_surfaceTexture->index() == index) {
+ m_surfaceTexture->updateTexImage();
+ auto matrix = extTransformMatrix(m_surfaceTexture.get());
+ auto tex = m_textureCopy->copyExternalTexture(m_size, matrix);
+ auto *buffer = new AndroidTextureVideoBuffer(m_rhi, m_videoOutput->getSurfaceThread(), std::move(tex), m_size);
+ QVideoFrame frame(buffer, QVideoFrameFormat(m_size, QVideoFrameFormat::Format_RGBA8888));
+ emit newFrame(frame);
+ }
+ }
+
+ void clearFrame() { emit newFrame({}); }
+
+ void setFrameSize(QSize size) { m_size = size; }
+
+ void clearSurfaceTexture()
+ {
+ m_surfaceTexture.reset();
+ m_texture.reset();
+ m_textureCopy.reset();
+ m_rhi.reset();
+ }
+
+ AndroidSurfaceTexture *createSurfaceTexture(QRhi *rhi)
+ {
+ if (m_surfaceTexture)
+ return m_surfaceTexture.get();
+
+ QOpenGLContext *ctx = rhi
+ ? static_cast<const QRhiGles2NativeHandles *>(rhi->nativeHandles())->context
+ : nullptr;
+ initRhi(ctx);
+
+ m_texture.reset(m_rhi->newTexture(QRhiTexture::RGBA8, m_size, 1, QRhiTexture::ExternalOES));
+ m_texture->create();
+ m_surfaceTexture = std::make_unique<AndroidSurfaceTexture>(m_texture->nativeTexture().object);
+ if (m_surfaceTexture->surfaceTexture()) {
+ const quint64 index = m_surfaceTexture->index();
+ connect(m_surfaceTexture.get(), &AndroidSurfaceTexture::frameAvailable, this,
+ [this, index] () { this->onFrameAvailable(index); });
+
+ m_textureCopy = std::make_unique<TextureCopy>(m_rhi.get(), m_texture.get());
+
+ } else {
+ m_texture.reset();
+ m_surfaceTexture.reset();
+ }
+
+ return m_surfaceTexture.get();
+ }
+
+signals:
+ void newFrame(const QVideoFrame &);
+
+private:
+ QAndroidTextureVideoOutput * m_videoOutput;
+ std::shared_ptr<QRhi> m_rhi;
+ std::unique_ptr<AndroidSurfaceTexture> m_surfaceTexture;
+ std::unique_ptr<QRhiTexture> m_texture;
+ std::unique_ptr<TextureCopy> m_textureCopy;
+ QSize m_size;
+};
+
+QRhiWithThreadGuard::~QRhiWithThreadGuard() {
+ // It may happen that reseting m_rhi shared_ptr will delete it (if it is the last reference)
+ // QRHI need to be deleted from the thread that created it.
+ QMetaObject::invokeMethod(m_thread.get(), [&]() {m_guardRhi.reset();}, Qt::BlockingQueuedConnection);
+}
+
+QAndroidTextureVideoOutput::QAndroidTextureVideoOutput(QVideoSink *sink, QObject *parent)
+ : QAndroidVideoOutput(parent)
+ , m_sink(sink)
+{
+ if (!m_sink) {
+ qDebug() << "Cannot create QAndroidTextureVideoOutput without a sink.";
+ m_surfaceThread = nullptr;
+ return;
+ }
+
+ startNewSurfaceThread();
+}
+
+void QAndroidTextureVideoOutput::startNewSurfaceThread()
+{
+ m_surfaceThread = std::make_shared<AndroidTextureThread>(this);
+ connect(m_surfaceThread.get(), &AndroidTextureThread::newFrame,
+ this, &QAndroidTextureVideoOutput::newFrame);
+ m_surfaceThread->start();
+}
+
+QAndroidTextureVideoOutput::~QAndroidTextureVideoOutput()
+{
+ // Make sure that no more VideFrames will be created by surfaceThread
+ QMetaObject::invokeMethod(m_surfaceThread.get(),
+ &AndroidTextureThread::clearSurfaceTexture, Qt::BlockingQueuedConnection);
+}
+
+void QAndroidTextureVideoOutput::setSubtitle(const QString &subtitle)
+{
+ if (m_sink) {
+ auto *sink = m_sink->platformVideoSink();
+ if (sink)
+ sink->setSubtitleText(subtitle);
+ }
+}
+
+bool QAndroidTextureVideoOutput::shouldTextureBeUpdated() const
+{
+ return m_sink->rhi() && m_surfaceCreatedWithoutRhi;
+}
+
+AndroidSurfaceTexture *QAndroidTextureVideoOutput::surfaceTexture()
+{
+ if (!m_sink)
+ return nullptr;
+
+ AndroidSurfaceTexture *surface = nullptr;
+ QMetaObject::invokeMethod(m_surfaceThread.get(), [&]() {
+ auto rhi = m_sink->rhi();
+ if (!rhi) {
+ m_surfaceCreatedWithoutRhi = true;
+ }
+ else if (m_surfaceCreatedWithoutRhi) {
+ m_surfaceThread->clearSurfaceTexture();
+ m_surfaceCreatedWithoutRhi = false;
+ }
+ surface = m_surfaceThread->createSurfaceTexture(rhi);
+ },
+ Qt::BlockingQueuedConnection);
+ return surface;
+}
+
+void QAndroidTextureVideoOutput::setVideoSize(const QSize &size)
+{
+ if (m_nativeSize == size)
+ return;
+
+ m_nativeSize = size;
+ QMetaObject::invokeMethod(m_surfaceThread.get(),
+ [&](){ m_surfaceThread->setFrameSize(size); },
+ Qt::BlockingQueuedConnection);
+}
+
+void QAndroidTextureVideoOutput::stop()
+{
+ m_nativeSize = {};
+ QMetaObject::invokeMethod(m_surfaceThread.get(), [&](){ m_surfaceThread->clearFrame(); });
+}
+
+void QAndroidTextureVideoOutput::reset()
+{
+ if (m_sink)
+ m_sink->platformVideoSink()->setVideoFrame({});
+ QMetaObject::invokeMethod(m_surfaceThread.get(), &AndroidTextureThread::clearSurfaceTexture);
+}
+
+void QAndroidTextureVideoOutput::newFrame(const QVideoFrame &frame)
+{
+ if (m_sink)
+ m_sink->setVideoFrame(frame);
+}
+
+QT_END_NAMESPACE
+
+#include "qandroidvideooutput.moc"
+#include "moc_qandroidvideooutput_p.cpp"
diff --git a/src/plugins/multimedia/android/common/qandroidvideooutput_p.h b/src/plugins/multimedia/android/common/qandroidvideooutput_p.h
new file mode 100644
index 000000000..7c9be5aee
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidvideooutput_p.h
@@ -0,0 +1,93 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDVIDEOOUTPUT_H
+#define QANDROIDVIDEOOUTPUT_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qsize.h>
+#include <qmutex.h>
+#include <qreadwritelock.h>
+#include <qabstractvideobuffer.h>
+#include <qmatrix4x4.h>
+#include <qoffscreensurface.h>
+#include <rhi/qrhi.h>
+
+QT_BEGIN_NAMESPACE
+
+class AndroidSurfaceTexture;
+class AndroidSurfaceHolder;
+class QVideoSink;
+
+class QAndroidVideoOutput : public QObject
+{
+ Q_OBJECT
+public:
+ virtual ~QAndroidVideoOutput() { }
+
+ virtual AndroidSurfaceTexture *surfaceTexture() { return 0; }
+ virtual AndroidSurfaceHolder *surfaceHolder() { return 0; }
+
+ virtual bool isReady() { return true; }
+
+ virtual void setVideoSize(const QSize &) { }
+ virtual void start() { }
+ virtual void stop() { }
+ virtual void reset() { }
+ virtual QSize getVideoSize() const { return QSize(0, 0); }
+
+Q_SIGNALS:
+ void readyChanged(bool);
+
+protected:
+ QAndroidVideoOutput(QObject *parent) : QObject(parent) { }
+};
+
+class AndroidTextureThread;
+class QAndroidTextureVideoOutput : public QAndroidVideoOutput
+{
+ Q_OBJECT
+public:
+ explicit QAndroidTextureVideoOutput(QVideoSink *sink, QObject *parent = 0);
+ ~QAndroidTextureVideoOutput() override;
+
+ QVideoSink *surface() const { return m_sink; }
+ bool shouldTextureBeUpdated() const;
+
+ AndroidSurfaceTexture *surfaceTexture() override;
+
+ void setVideoSize(const QSize &) override;
+ void stop() override;
+ void reset() override;
+ QSize getVideoSize() const override { return m_nativeSize; }
+
+ void setSubtitle(const QString &subtitle);
+ std::shared_ptr<AndroidTextureThread> getSurfaceThread() { return m_surfaceThread; }
+private Q_SLOTS:
+ void newFrame(const QVideoFrame &);
+
+private:
+ void startNewSurfaceThread();
+ QVideoSink *m_sink = nullptr;
+ QSize m_nativeSize;
+ bool m_surfaceCreatedWithoutRhi = false;
+
+ std::shared_ptr<AndroidTextureThread> m_surfaceThread;
+};
+
+QT_END_NAMESPACE
+
+Q_DECLARE_METATYPE(QList<QRhiResource *>)
+Q_DECLARE_METATYPE(QRhi*)
+
+#endif // QANDROIDVIDEOOUTPUT_H
diff --git a/src/plugins/multimedia/android/common/qandroidvideosink.cpp b/src/plugins/multimedia/android/common/qandroidvideosink.cpp
new file mode 100644
index 000000000..3da5eab31
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidvideosink.cpp
@@ -0,0 +1,35 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidvideosink_p.h"
+#include <rhi/qrhi.h>
+
+#include <QtCore/qdebug.h>
+
+#include <QtCore/qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+QAndroidVideoSink::QAndroidVideoSink(QVideoSink *parent)
+ : QPlatformVideoSink(parent)
+{
+}
+
+QAndroidVideoSink::~QAndroidVideoSink()
+{
+}
+
+void QAndroidVideoSink::setRhi(QRhi *rhi)
+{
+ if (rhi && rhi->backend() != QRhi::OpenGLES2)
+ rhi = nullptr;
+ if (m_rhi == rhi)
+ return;
+
+ m_rhi = rhi;
+ emit rhiChanged(rhi);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qandroidvideosink_p.cpp"
diff --git a/src/plugins/multimedia/android/common/qandroidvideosink_p.h b/src/plugins/multimedia/android/common/qandroidvideosink_p.h
new file mode 100644
index 000000000..9afc58f65
--- /dev/null
+++ b/src/plugins/multimedia/android/common/qandroidvideosink_p.h
@@ -0,0 +1,41 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDVIDEOSINK_P_H
+#define QANDROIDVIDEOSINK_P_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qtmultimediaglobal_p.h>
+#include <private/qplatformvideosink_p.h>
+
+#include <qvideosink.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidVideoSink
+ : public QPlatformVideoSink
+{
+ Q_OBJECT
+public:
+ explicit QAndroidVideoSink(QVideoSink *parent = 0);
+ ~QAndroidVideoSink();
+
+ void setRhi(QRhi *rhi) override;
+
+private:
+ QRhi *m_rhi = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcamera.cpp b/src/plugins/multimedia/android/mediacapture/qandroidcamera.cpp
new file mode 100644
index 000000000..52d2e00f6
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcamera.cpp
@@ -0,0 +1,562 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidcamera_p.h"
+#include "qandroidcamerasession_p.h"
+#include "qandroidcapturesession_p.h"
+#include "qandroidmediacapturesession_p.h"
+#include <qmediadevices.h>
+#include <qcameradevice.h>
+#include <qtimer.h>
+#include "qandroidmultimediautils_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QAndroidCamera::QAndroidCamera(QCamera *camera)
+ : QPlatformCamera(camera)
+{
+ Q_ASSERT(camera);
+}
+
+QAndroidCamera::~QAndroidCamera()
+{
+}
+
+void QAndroidCamera::setActive(bool active)
+{
+ if (m_cameraSession) {
+ m_cameraSession->setActive(active);
+ } else {
+ isPendingSetActive = active;
+ }
+}
+
+bool QAndroidCamera::isActive() const
+{
+ return m_cameraSession ? m_cameraSession->isActive() : false;
+}
+
+void QAndroidCamera::setCamera(const QCameraDevice &camera)
+{
+ m_cameraDev = camera;
+
+ if (m_cameraSession) {
+ int id = 0;
+ auto cameras = QMediaDevices::videoInputs();
+ for (int i = 0; i < cameras.size(); ++i) {
+ if (cameras.at(i) == camera) {
+ id = i;
+ break;
+ }
+ }
+ if (id != m_cameraSession->getSelectedCameraId()) {
+ m_cameraSession->setSelectedCameraId(id);
+ reactivateCameraSession();
+ }
+ }
+}
+
+void QAndroidCamera::reactivateCameraSession()
+{
+ if (m_cameraSession->isActive()) {
+ if (m_service->captureSession() &&
+ m_service->captureSession()->state() == QMediaRecorder::RecordingState) {
+ m_service->captureSession()->stop();
+ qWarning() << "Changing camera during recording not supported";
+ }
+ m_cameraSession->setActive(false);
+ m_cameraSession->setActive(true);
+ }
+}
+
+bool QAndroidCamera::setCameraFormat(const QCameraFormat &format)
+{
+ m_cameraFormat = format;
+
+ if (m_cameraSession)
+ m_cameraSession->setCameraFormat(m_cameraFormat);
+
+ return true;
+}
+
+void QAndroidCamera::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ QAndroidMediaCaptureSession *captureSession = static_cast<QAndroidMediaCaptureSession *>(session);
+ if (m_service == captureSession)
+ return;
+
+ m_service = captureSession;
+ if (!m_service) {
+ disconnect(m_cameraSession,nullptr,this,nullptr);
+ m_cameraSession = nullptr;
+ return;
+ }
+
+ m_cameraSession = m_service->cameraSession();
+ Q_ASSERT(m_cameraSession);
+ if (!m_cameraFormat.isNull())
+ m_cameraSession->setCameraFormat(m_cameraFormat);
+
+ setCamera(m_cameraDev);
+
+ connect(m_cameraSession, &QAndroidCameraSession::activeChanged, this, &QAndroidCamera::activeChanged);
+ connect(m_cameraSession, &QAndroidCameraSession::error, this, &QAndroidCamera::error);
+ connect(m_cameraSession, &QAndroidCameraSession::opened, this, &QAndroidCamera::onCameraOpened);
+
+ if (isPendingSetActive) {
+ setActive(true);
+ isPendingSetActive = false;
+ }
+}
+
+void QAndroidCamera::setFocusMode(QCamera::FocusMode mode)
+{
+ if (!m_cameraSession || !m_cameraSession->camera())
+ return;
+
+ if (isFocusModeSupported(mode)) {
+ QString focusMode;
+
+ switch (mode) {
+ case QCamera::FocusModeHyperfocal:
+ focusMode = QLatin1String("edof");
+ break;
+ case QCamera::FocusModeInfinity: // not 100%, but close
+ focusMode = QLatin1String("infinity");
+ break;
+ case QCamera::FocusModeManual:
+ focusMode = QLatin1String("fixed");
+ break;
+ case QCamera::FocusModeAutoNear:
+ focusMode = QLatin1String("macro");
+ break;
+ case QCamera::FocusModeAuto:
+ case QCamera::FocusModeAutoFar:
+ if (1) { // ###?
+ focusMode = QLatin1String("continuous-video");
+ } else {
+ focusMode = QLatin1String("continuous-picture");
+ }
+ break;
+ }
+
+ m_cameraSession->camera()->setFocusMode(focusMode);
+
+ // reset focus position
+ m_cameraSession->camera()->cancelAutoFocus();
+
+ focusModeChanged(mode);
+ }
+}
+
+bool QAndroidCamera::isFocusModeSupported(QCamera::FocusMode mode) const
+{
+ return (m_cameraSession && m_cameraSession->camera()) ? m_supportedFocusModes.contains(mode) : false;
+}
+
+void QAndroidCamera::onCameraOpened()
+{
+ Q_ASSERT(m_cameraSession);
+ connect(m_cameraSession->camera(), &AndroidCamera::previewSizeChanged, this, &QAndroidCamera::setCameraFocusArea);
+
+ m_supportedFocusModes.clear();
+ m_continuousPictureFocusSupported = false;
+ m_continuousVideoFocusSupported = false;
+ m_focusPointSupported = false;
+
+ QStringList focusModes = m_cameraSession->camera()->getSupportedFocusModes();
+ for (int i = 0; i < focusModes.size(); ++i) {
+ const QString &focusMode = focusModes.at(i);
+ if (focusMode == QLatin1String("continuous-picture")) {
+ m_supportedFocusModes << QCamera::FocusModeAuto;
+ m_continuousPictureFocusSupported = true;
+ } else if (focusMode == QLatin1String("continuous-video")) {
+ m_supportedFocusModes << QCamera::FocusModeAuto;
+ m_continuousVideoFocusSupported = true;
+ } else if (focusMode == QLatin1String("edof")) {
+ m_supportedFocusModes << QCamera::FocusModeHyperfocal;
+ } else if (focusMode == QLatin1String("fixed")) {
+ m_supportedFocusModes << QCamera::FocusModeManual;
+ } else if (focusMode == QLatin1String("infinity")) {
+ m_supportedFocusModes << QCamera::FocusModeInfinity;
+ } else if (focusMode == QLatin1String("macro")) {
+ m_supportedFocusModes << QCamera::FocusModeAutoNear;
+ }
+ }
+
+ if (m_cameraSession->camera()->getMaxNumFocusAreas() > 0)
+ m_focusPointSupported = true;
+
+ auto m = focusMode();
+ if (!m_supportedFocusModes.contains(m))
+ m = QCamera::FocusModeAuto;
+
+ setFocusMode(m);
+ setCustomFocusPoint(focusPoint());
+
+ if (m_cameraSession->camera()->isZoomSupported()) {
+ m_zoomRatios = m_cameraSession->camera()->getZoomRatios();
+ qreal maxZoom = m_zoomRatios.last() / qreal(100);
+ maximumZoomFactorChanged(maxZoom);
+ zoomTo(1, -1);
+ } else {
+ m_zoomRatios.clear();
+ maximumZoomFactorChanged(1.0);
+ }
+
+ m_minExposureCompensationIndex = m_cameraSession->camera()->getMinExposureCompensation();
+ m_maxExposureCompensationIndex = m_cameraSession->camera()->getMaxExposureCompensation();
+ m_exposureCompensationStep = m_cameraSession->camera()->getExposureCompensationStep();
+ exposureCompensationRangeChanged(m_minExposureCompensationIndex*m_exposureCompensationStep,
+ m_maxExposureCompensationIndex*m_exposureCompensationStep);
+
+ m_supportedExposureModes.clear();
+ QStringList sceneModes = m_cameraSession->camera()->getSupportedSceneModes();
+ if (!sceneModes.isEmpty()) {
+ for (int i = 0; i < sceneModes.size(); ++i) {
+ const QString &sceneMode = sceneModes.at(i);
+ if (sceneMode == QLatin1String("auto"))
+ m_supportedExposureModes << QCamera::ExposureAuto;
+ else if (sceneMode == QLatin1String("beach"))
+ m_supportedExposureModes << QCamera::ExposureBeach;
+ else if (sceneMode == QLatin1String("night"))
+ m_supportedExposureModes << QCamera::ExposureNight;
+ else if (sceneMode == QLatin1String("portrait"))
+ m_supportedExposureModes << QCamera::ExposurePortrait;
+ else if (sceneMode == QLatin1String("snow"))
+ m_supportedExposureModes << QCamera::ExposureSnow;
+ else if (sceneMode == QLatin1String("sports"))
+ m_supportedExposureModes << QCamera::ExposureSports;
+ else if (sceneMode == QLatin1String("action"))
+ m_supportedExposureModes << QCamera::ExposureAction;
+ else if (sceneMode == QLatin1String("landscape"))
+ m_supportedExposureModes << QCamera::ExposureLandscape;
+ else if (sceneMode == QLatin1String("night-portrait"))
+ m_supportedExposureModes << QCamera::ExposureNightPortrait;
+ else if (sceneMode == QLatin1String("theatre"))
+ m_supportedExposureModes << QCamera::ExposureTheatre;
+ else if (sceneMode == QLatin1String("sunset"))
+ m_supportedExposureModes << QCamera::ExposureSunset;
+ else if (sceneMode == QLatin1String("steadyphoto"))
+ m_supportedExposureModes << QCamera::ExposureSteadyPhoto;
+ else if (sceneMode == QLatin1String("fireworks"))
+ m_supportedExposureModes << QCamera::ExposureFireworks;
+ else if (sceneMode == QLatin1String("party"))
+ m_supportedExposureModes << QCamera::ExposureParty;
+ else if (sceneMode == QLatin1String("candlelight"))
+ m_supportedExposureModes << QCamera::ExposureCandlelight;
+ else if (sceneMode == QLatin1String("barcode"))
+ m_supportedExposureModes << QCamera::ExposureBarcode;
+ }
+ }
+
+ setExposureCompensation(exposureCompensation());
+ setExposureMode(exposureMode());
+
+ isFlashSupported = false;
+ isFlashAutoSupported = false;
+ isTorchSupported = false;
+
+ QStringList flashModes = m_cameraSession->camera()->getSupportedFlashModes();
+ for (int i = 0; i < flashModes.size(); ++i) {
+ const QString &flashMode = flashModes.at(i);
+ if (flashMode == QLatin1String("auto"))
+ isFlashAutoSupported = true;
+ else if (flashMode == QLatin1String("on"))
+ isFlashSupported = true;
+ else if (flashMode == QLatin1String("torch"))
+ isTorchSupported = true;
+ }
+
+ setFlashMode(flashMode());
+
+ m_supportedWhiteBalanceModes.clear();
+ QStringList whiteBalanceModes = m_cameraSession->camera()->getSupportedWhiteBalance();
+ for (int i = 0; i < whiteBalanceModes.size(); ++i) {
+ const QString &wb = whiteBalanceModes.at(i);
+ if (wb == QLatin1String("auto")) {
+ m_supportedWhiteBalanceModes.insert(QCamera::WhiteBalanceAuto,
+ QStringLiteral("auto"));
+ } else if (wb == QLatin1String("cloudy-daylight")) {
+ m_supportedWhiteBalanceModes.insert(QCamera::WhiteBalanceCloudy,
+ QStringLiteral("cloudy-daylight"));
+ } else if (wb == QLatin1String("daylight")) {
+ m_supportedWhiteBalanceModes.insert(QCamera::WhiteBalanceSunlight,
+ QStringLiteral("daylight"));
+ } else if (wb == QLatin1String("fluorescent")) {
+ m_supportedWhiteBalanceModes.insert(QCamera::WhiteBalanceFluorescent,
+ QStringLiteral("fluorescent"));
+ } else if (wb == QLatin1String("incandescent")) {
+ m_supportedWhiteBalanceModes.insert(QCamera::WhiteBalanceTungsten,
+ QStringLiteral("incandescent"));
+ } else if (wb == QLatin1String("shade")) {
+ m_supportedWhiteBalanceModes.insert(QCamera::WhiteBalanceShade,
+ QStringLiteral("shade"));
+ } else if (wb == QLatin1String("twilight")) {
+ m_supportedWhiteBalanceModes.insert(QCamera::WhiteBalanceSunset,
+ QStringLiteral("twilight"));
+ } else if (wb == QLatin1String("warm-fluorescent")) {
+ m_supportedWhiteBalanceModes.insert(QCamera::WhiteBalanceFlash,
+ QStringLiteral("warm-fluorescent"));
+ }
+ }
+
+}
+
+//void QAndroidCameraFocusControl::onCameraCaptureModeChanged()
+//{
+// if (m_cameraSession->camera() && m_focusMode == QCamera::FocusModeAudio) {
+// QString focusMode;
+// if ((m_cameraSession->captureMode().testFlag(QCamera::CaptureVideo) && m_continuousVideoFocusSupported)
+// || !m_continuousPictureFocusSupported) {
+// focusMode = QLatin1String("continuous-video");
+// } else {
+// focusMode = QLatin1String("continuous-picture");
+// }
+// m_cameraSession->camera()->setFocusMode(focusMode);
+// m_cameraSession->camera()->cancelAutoFocus();
+// }
+//}
+
+static QRect adjustedArea(const QRectF &area)
+{
+ // Qt maps focus points in the range (0.0, 0.0) -> (1.0, 1.0)
+ // Android maps focus points in the range (-1000, -1000) -> (1000, 1000)
+ // Converts an area in Qt coordinates to Android coordinates
+ return QRect(-1000 + qRound(area.x() * 2000),
+ -1000 + qRound(area.y() * 2000),
+ qRound(area.width() * 2000),
+ qRound(area.height() * 2000))
+ .intersected(QRect(-1000, -1000, 2000, 2000));
+}
+
+void QAndroidCamera::setCameraFocusArea()
+{
+ if (!m_cameraSession)
+ return;
+
+ QList<QRect> areas;
+ auto focusPoint = customFocusPoint();
+ if (QRectF(0., 0., 1., 1.).contains(focusPoint)) {
+ // in FocusPointAuto mode, leave the area list empty
+ // to let the driver choose the focus point.
+ QSize viewportSize = m_cameraSession->camera()->previewSize();
+
+ if (!viewportSize.isValid())
+ return;
+
+ // Set up a 50x50 pixel focus area around the focal point
+ QSizeF focusSize(50.f / viewportSize.width(), 50.f / viewportSize.height());
+ float x = qBound(qreal(0),
+ focusPoint.x() - (focusSize.width() / 2),
+ 1.f - focusSize.width());
+ float y = qBound(qreal(0),
+ focusPoint.y() - (focusSize.height() / 2),
+ 1.f - focusSize.height());
+
+ QRectF area(QPointF(x, y), focusSize);
+
+ areas.append(adjustedArea(area));
+ }
+ m_cameraSession->camera()->setFocusAreas(areas);
+}
+
+void QAndroidCamera::zoomTo(float factor, float rate)
+{
+ Q_UNUSED(rate);
+
+ if (zoomFactor() == factor)
+ return;
+
+ if (!m_cameraSession || !m_cameraSession->camera())
+ return;
+
+ factor = qBound(qreal(1), factor, maxZoomFactor());
+ int validZoomIndex = qt_findClosestValue(m_zoomRatios, qRound(factor * 100));
+ float newZoom = m_zoomRatios.at(validZoomIndex) / qreal(100);
+ m_cameraSession->camera()->setZoom(validZoomIndex);
+ zoomFactorChanged(newZoom);
+}
+
+void QAndroidCamera::setFlashMode(QCamera::FlashMode mode)
+{
+ if (!m_cameraSession || !m_cameraSession->camera())
+ return;
+
+ if (!isFlashModeSupported(mode))
+ return;
+
+ QString flashMode;
+ if (mode == QCamera::FlashAuto)
+ flashMode = QLatin1String("auto");
+ else if (mode == QCamera::FlashOn)
+ flashMode = QLatin1String("on");
+ else // FlashOff
+ flashMode = QLatin1String("off");
+
+ m_cameraSession->camera()->setFlashMode(flashMode);
+ flashModeChanged(mode);
+}
+
+bool QAndroidCamera::isFlashModeSupported(QCamera::FlashMode mode) const
+{
+ if (!m_cameraSession || !m_cameraSession->camera())
+ return false;
+ switch (mode) {
+ case QCamera::FlashOff:
+ return true;
+ case QCamera::FlashOn:
+ return isFlashSupported;
+ case QCamera::FlashAuto:
+ return isFlashAutoSupported;
+ }
+}
+
+bool QAndroidCamera::isFlashReady() const
+{
+ // Android doesn't have an API for that
+ return true;
+}
+
+void QAndroidCamera::setTorchMode(QCamera::TorchMode mode)
+{
+ if (!m_cameraSession)
+ return;
+ auto *camera = m_cameraSession->camera();
+ if (!camera || !isTorchSupported || mode == QCamera::TorchAuto)
+ return;
+
+ if (mode == QCamera::TorchOn) {
+ camera->setFlashMode(QLatin1String("torch"));
+ } else if (mode == QCamera::TorchOff) {
+ // if torch was enabled, it first needs to be turned off before restoring the flash mode
+ camera->setFlashMode(QLatin1String("off"));
+ setFlashMode(flashMode());
+ }
+ torchModeChanged(mode);
+}
+
+bool QAndroidCamera::isTorchModeSupported(QCamera::TorchMode mode) const
+{
+ if (!m_cameraSession || !m_cameraSession->camera())
+ return false;
+ switch (mode) {
+ case QCamera::TorchOff:
+ return true;
+ case QCamera::TorchOn:
+ return isTorchSupported;
+ case QCamera::TorchAuto:
+ return false;
+ }
+}
+
+void QAndroidCamera::setExposureMode(QCamera::ExposureMode mode)
+{
+ if (exposureMode() == mode)
+ return;
+
+ if (!m_cameraSession || !m_cameraSession->camera())
+ return;
+
+ if (!m_supportedExposureModes.contains(mode))
+ return;
+
+ QString sceneMode;
+ switch (mode) {
+ case QCamera::ExposureAuto:
+ sceneMode = QLatin1String("auto");
+ break;
+ case QCamera::ExposureSports:
+ sceneMode = QLatin1String("sports");
+ break;
+ case QCamera::ExposurePortrait:
+ sceneMode = QLatin1String("portrait");
+ break;
+ case QCamera::ExposureBeach:
+ sceneMode = QLatin1String("beach");
+ break;
+ case QCamera::ExposureSnow:
+ sceneMode = QLatin1String("snow");
+ break;
+ case QCamera::ExposureNight:
+ sceneMode = QLatin1String("night");
+ break;
+ case QCamera::ExposureAction:
+ sceneMode = QLatin1String("action");
+ break;
+ case QCamera::ExposureLandscape:
+ sceneMode = QLatin1String("landscape");
+ break;
+ case QCamera::ExposureNightPortrait:
+ sceneMode = QLatin1String("night-portrait");
+ break;
+ case QCamera::ExposureTheatre:
+ sceneMode = QLatin1String("theatre");
+ break;
+ case QCamera::ExposureSunset:
+ sceneMode = QLatin1String("sunset");
+ break;
+ case QCamera::ExposureSteadyPhoto:
+ sceneMode = QLatin1String("steadyphoto");
+ break;
+ case QCamera::ExposureFireworks:
+ sceneMode = QLatin1String("fireworks");
+ break;
+ case QCamera::ExposureParty:
+ sceneMode = QLatin1String("party");
+ break;
+ case QCamera::ExposureCandlelight:
+ sceneMode = QLatin1String("candlelight");
+ break;
+ case QCamera::ExposureBarcode:
+ sceneMode = QLatin1String("barcode");
+ break;
+ default:
+ sceneMode = QLatin1String("auto");
+ mode = QCamera::ExposureAuto;
+ break;
+ }
+
+ m_cameraSession->camera()->setSceneMode(sceneMode);
+ exposureModeChanged(mode);
+}
+
+bool QAndroidCamera::isExposureModeSupported(QCamera::ExposureMode mode) const
+{
+ return m_supportedExposureModes.contains(mode);
+}
+
+void QAndroidCamera::setExposureCompensation(float bias)
+{
+ if (exposureCompensation() == bias || !m_cameraSession || !m_cameraSession->camera())
+ return;
+
+ int biasIndex = qRound(bias / m_exposureCompensationStep);
+ biasIndex = qBound(m_minExposureCompensationIndex, biasIndex, m_maxExposureCompensationIndex);
+ float comp = biasIndex * m_exposureCompensationStep;
+ m_cameraSession->camera()->setExposureCompensation(biasIndex);
+ exposureCompensationChanged(comp);
+}
+
+bool QAndroidCamera::isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const
+{
+ return m_supportedWhiteBalanceModes.contains(mode);
+}
+
+void QAndroidCamera::setWhiteBalanceMode(QCamera::WhiteBalanceMode mode)
+{
+ if (!m_cameraSession)
+ return;
+ auto *camera = m_cameraSession->camera();
+ if (!camera)
+ return;
+ QString wb = m_supportedWhiteBalanceModes.value(mode, QString());
+ if (!wb.isEmpty()) {
+ camera->setWhiteBalance(wb);
+ whiteBalanceModeChanged(mode);
+ }
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qandroidcamera_p.cpp"
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcamera_p.h b/src/plugins/multimedia/android/mediacapture/qandroidcamera_p.h
new file mode 100644
index 000000000..77bbc3133
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcamera_p.h
@@ -0,0 +1,99 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+
+#ifndef QANDROIDCAMERACONTROL_H
+#define QANDROIDCAMERACONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformcamera_p.h>
+
+#include <qmap.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidCameraSession;
+class QAndroidCameraVideoRendererControl;
+class QAndroidMediaCaptureSession;
+
+class QAndroidCamera : public QPlatformCamera
+{
+ Q_OBJECT
+public:
+ explicit QAndroidCamera(QCamera *camera);
+ virtual ~QAndroidCamera();
+
+ bool isActive() const override;
+ void setActive(bool active) override;
+
+ void setCamera(const QCameraDevice &camera) override;
+ bool setCameraFormat(const QCameraFormat &format) override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session) override;
+
+ void setFocusMode(QCamera::FocusMode mode) override;
+ bool isFocusModeSupported(QCamera::FocusMode mode) const override;
+
+ void zoomTo(float factor, float rate) override;
+
+ void setFlashMode(QCamera::FlashMode mode) override;
+ bool isFlashModeSupported(QCamera::FlashMode mode) const override;
+ bool isFlashReady() const override;
+
+ void setTorchMode(QCamera::TorchMode mode) override;
+ bool isTorchModeSupported(QCamera::TorchMode mode) const override;
+
+ void setExposureMode(QCamera::ExposureMode mode) override;
+ bool isExposureModeSupported(QCamera::ExposureMode mode) const override;
+
+ void setExposureCompensation(float bias) override;
+
+ bool isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const override;
+ void setWhiteBalanceMode(QCamera::WhiteBalanceMode mode) override;
+
+private Q_SLOTS:
+ void onCameraOpened();
+ void setCameraFocusArea();
+
+private:
+ void reactivateCameraSession();
+
+ QAndroidCameraSession *m_cameraSession = nullptr;
+ QAndroidMediaCaptureSession *m_service = nullptr;
+
+ QList<QCamera::FocusMode> m_supportedFocusModes;
+ bool m_continuousPictureFocusSupported = false;
+ bool m_continuousVideoFocusSupported = false;
+ bool m_focusPointSupported = false;
+
+ QList<int> m_zoomRatios;
+
+ QList<QCamera::ExposureMode> m_supportedExposureModes;
+ int m_minExposureCompensationIndex;
+ int m_maxExposureCompensationIndex;
+ qreal m_exposureCompensationStep;
+
+ bool isFlashSupported = false;
+ bool isFlashAutoSupported = false;
+ bool isTorchSupported = false;
+ bool isPendingSetActive = false;
+ QCameraDevice m_cameraDev;
+
+ QMap<QCamera::WhiteBalanceMode, QString> m_supportedWhiteBalanceModes;
+ QCameraFormat m_cameraFormat;
+};
+
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDCAMERACONTROL_H
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp b/src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp
new file mode 100644
index 000000000..7eda1175f
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcamerasession.cpp
@@ -0,0 +1,808 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// Copyright (C) 2016 Ruslan Baratov
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidcamerasession_p.h"
+
+#include "androidcamera_p.h"
+#include "androidmultimediautils_p.h"
+#include "qandroidvideooutput_p.h"
+#include "qandroidmultimediautils_p.h"
+#include "androidmediarecorder_p.h"
+#include <qvideosink.h>
+#include <QtConcurrent/qtconcurrentrun.h>
+#include <qfile.h>
+#include <qguiapplication.h>
+#include <qscreen.h>
+#include <qdebug.h>
+#include <qvideoframe.h>
+#include <private/qplatformimagecapture_p.h>
+#include <private/qplatformvideosink_p.h>
+#include <private/qmemoryvideobuffer_p.h>
+#include <private/qcameradevice_p.h>
+#include <private/qmediastoragelocation_p.h>
+#include <private/qvideoframe_p.h>
+#include <QImageWriter>
+
+QT_BEGIN_NAMESPACE
+
+Q_GLOBAL_STATIC(QList<QCameraDevice>, g_availableCameras)
+
+QAndroidCameraSession::QAndroidCameraSession(QObject *parent)
+ : QObject(parent)
+ , m_selectedCamera(0)
+ , m_camera(0)
+ , m_videoOutput(0)
+ , m_savedState(-1)
+ , m_previewStarted(false)
+ , m_readyForCapture(false)
+ , m_currentImageCaptureId(-1)
+ , m_previewCallback(0)
+ , m_keepActive(false)
+{
+ if (qApp) {
+ connect(qApp, &QGuiApplication::applicationStateChanged,
+ this, &QAndroidCameraSession::onApplicationStateChanged);
+
+ auto screen = qApp->primaryScreen();
+ if (screen) {
+ connect(screen, &QScreen::orientationChanged, this,
+ &QAndroidCameraSession::updateOrientation);
+ enableRotation();
+ }
+ }
+}
+
+QAndroidCameraSession::~QAndroidCameraSession()
+{
+ if (m_sink)
+ disconnect(m_retryPreviewConnection);
+ close();
+}
+
+//void QAndroidCameraSession::setCaptureMode(QCamera::CaptureModes mode)
+//{
+// if (m_captureMode == mode || !isCaptureModeSupported(mode))
+// return;
+
+// m_captureMode = mode;
+// emit captureModeChanged(m_captureMode);
+
+// if (m_previewStarted && m_captureMode.testFlag(QCamera::CaptureStillImage))
+// applyResolution(m_actualImageSettings.resolution());
+//}
+
+void QAndroidCameraSession::setActive(bool active)
+{
+ if (m_active == active)
+ return;
+
+ // If the application is inactive, the camera shouldn't be started. Save the desired state
+ // instead and it will be set when the application becomes active.
+ if (active && qApp->applicationState() == Qt::ApplicationInactive) {
+ m_isStateSaved = true;
+ m_savedState = active;
+ return;
+ }
+
+ m_isStateSaved = false;
+ m_active = active;
+ setActiveHelper(m_active);
+ emit activeChanged(m_active);
+}
+
+void QAndroidCameraSession::setActiveHelper(bool active)
+{
+ if (!active) {
+ stopPreview();
+ close();
+ } else {
+ if (!m_camera && !open()) {
+ emit error(QCamera::CameraError, QStringLiteral("Failed to open camera"));
+ return;
+ }
+ startPreview();
+ }
+}
+
+void QAndroidCameraSession::updateAvailableCameras()
+{
+ g_availableCameras->clear();
+
+ const int numCameras = AndroidCamera::getNumberOfCameras();
+ for (int i = 0; i < numCameras; ++i) {
+ QCameraDevicePrivate *info = new QCameraDevicePrivate;
+ AndroidCamera::getCameraInfo(i, info);
+
+ if (!info->id.isEmpty()) {
+ // Add supported picture and video sizes to the camera info
+ AndroidCamera *camera = AndroidCamera::open(i);
+
+ if (camera) {
+ info->videoFormats = camera->getSupportedFormats();
+ info->photoResolutions = camera->getSupportedPictureSizes();
+ }
+
+ delete camera;
+ g_availableCameras->append(info->create());
+ }
+ }
+}
+
+const QList<QCameraDevice> &QAndroidCameraSession::availableCameras()
+{
+ if (g_availableCameras->isEmpty())
+ updateAvailableCameras();
+
+ return *g_availableCameras;
+}
+
+bool QAndroidCameraSession::open()
+{
+ close();
+
+ m_camera = AndroidCamera::open(m_selectedCamera);
+
+ if (m_camera) {
+ connect(m_camera, &AndroidCamera::pictureExposed,
+ this, &QAndroidCameraSession::onCameraPictureExposed);
+ connect(m_camera, &AndroidCamera::lastPreviewFrameFetched,
+ this, &QAndroidCameraSession::onLastPreviewFrameFetched,
+ Qt::DirectConnection);
+ connect(m_camera, &AndroidCamera::newPreviewFrame,
+ this, &QAndroidCameraSession::onNewPreviewFrame,
+ Qt::DirectConnection);
+ connect(m_camera, &AndroidCamera::pictureCaptured,
+ this, &QAndroidCameraSession::onCameraPictureCaptured);
+ connect(m_camera, &AndroidCamera::previewStarted,
+ this, &QAndroidCameraSession::onCameraPreviewStarted);
+ connect(m_camera, &AndroidCamera::previewStopped,
+ this, &QAndroidCameraSession::onCameraPreviewStopped);
+ connect(m_camera, &AndroidCamera::previewFailedToStart,
+ this, &QAndroidCameraSession::onCameraPreviewFailedToStart);
+ connect(m_camera, &AndroidCamera::takePictureFailed,
+ this, &QAndroidCameraSession::onCameraTakePictureFailed);
+
+ if (m_camera->getPreviewFormat() != AndroidCamera::NV21)
+ m_camera->setPreviewFormat(AndroidCamera::NV21);
+
+ m_camera->notifyNewFrames(m_previewCallback);
+
+ emit opened();
+ setActive(true);
+ }
+
+ return m_camera != 0;
+}
+
+void QAndroidCameraSession::close()
+{
+ if (!m_camera)
+ return;
+
+ stopPreview();
+
+ m_readyForCapture = false;
+ m_currentImageCaptureId = -1;
+ m_currentImageCaptureFileName.clear();
+ m_actualImageSettings = m_requestedImageSettings;
+
+ m_camera->release();
+ delete m_camera;
+ m_camera = 0;
+
+ setActive(false);
+}
+
+void QAndroidCameraSession::setVideoOutput(QAndroidVideoOutput *output)
+{
+ if (m_videoOutput) {
+ m_videoOutput->stop();
+ m_videoOutput->reset();
+ }
+
+ if (output) {
+ m_videoOutput = output;
+ if (m_videoOutput->isReady()) {
+ onVideoOutputReady(true);
+ } else {
+ connect(m_videoOutput, &QAndroidVideoOutput::readyChanged,
+ this, &QAndroidCameraSession::onVideoOutputReady);
+ }
+ } else {
+ m_videoOutput = 0;
+ }
+}
+
+void QAndroidCameraSession::setCameraFormat(const QCameraFormat &format)
+{
+ m_requestedFpsRange.min = format.minFrameRate();
+ m_requestedFpsRange.max = format.maxFrameRate();
+ m_requestedPixelFromat = AndroidCamera::AndroidImageFormatFromQtPixelFormat(format.pixelFormat());
+
+ m_requestedImageSettings.setResolution(format.resolution());
+ m_actualImageSettings.setResolution(format.resolution());
+ if (m_readyForCapture)
+ applyResolution(m_actualImageSettings.resolution());
+}
+
+void QAndroidCameraSession::applyResolution(const QSize &captureSize, bool restartPreview)
+{
+ if (!m_camera)
+ return;
+
+ const QSize currentViewfinderResolution = m_camera->previewSize();
+ const AndroidCamera::ImageFormat currentPreviewFormat = m_camera->getPreviewFormat();
+ const AndroidCamera::FpsRange currentFpsRange = m_camera->getPreviewFpsRange();
+
+ // -- adjust resolution
+ QSize adjustedViewfinderResolution;
+ const QList<QSize> previewSizes = m_camera->getSupportedPreviewSizes();
+
+ const bool validCaptureSize = captureSize.width() > 0 && captureSize.height() > 0;
+ if (validCaptureSize
+ && m_camera->getPreferredPreviewSizeForVideo().isEmpty()) {
+ // According to the Android doc, if getPreferredPreviewSizeForVideo() returns null, it means
+ // the preview size cannot be different from the capture size
+ adjustedViewfinderResolution = captureSize;
+ } else {
+ qreal captureAspectRatio = 0;
+ if (validCaptureSize)
+ captureAspectRatio = qreal(captureSize.width()) / qreal(captureSize.height());
+
+ if (validCaptureSize) {
+ // search for viewfinder resolution with the same aspect ratio
+ qreal minAspectDiff = 1;
+ QSize closestResolution;
+ for (int i = previewSizes.count() - 1; i >= 0; --i) {
+ const QSize &size = previewSizes.at(i);
+ const qreal sizeAspect = qreal(size.width()) / size.height();
+ if (qFuzzyCompare(captureAspectRatio, sizeAspect)) {
+ adjustedViewfinderResolution = size;
+ break;
+ } else if (minAspectDiff > qAbs(sizeAspect - captureAspectRatio)) {
+ closestResolution = size;
+ minAspectDiff = qAbs(sizeAspect - captureAspectRatio);
+ }
+ }
+ if (!adjustedViewfinderResolution.isValid()) {
+ qWarning("Cannot find a viewfinder resolution matching the capture aspect ratio.");
+ if (closestResolution.isValid()) {
+ adjustedViewfinderResolution = closestResolution;
+ qWarning("Using closest viewfinder resolution.");
+ } else {
+ return;
+ }
+ }
+ } else {
+ adjustedViewfinderResolution = previewSizes.last();
+ }
+ }
+
+ // -- adjust pixel format
+
+ AndroidCamera::ImageFormat adjustedPreviewFormat = m_requestedPixelFromat;
+ if (adjustedPreviewFormat == AndroidCamera::UnknownImageFormat)
+ adjustedPreviewFormat = AndroidCamera::NV21;
+
+ // -- adjust FPS
+
+ AndroidCamera::FpsRange adjustedFps = m_requestedFpsRange;
+ if (adjustedFps.min == 0 || adjustedFps.max == 0)
+ adjustedFps = currentFpsRange;
+
+ // -- Set values on camera
+
+ // fix the resolution of output based on the orientation
+ QSize cameraOutputResolution = adjustedViewfinderResolution;
+ QSize videoOutputResolution = adjustedViewfinderResolution;
+ QSize currentVideoOutputResolution = m_videoOutput ? m_videoOutput->getVideoSize() : QSize(0, 0);
+ const int rotation = currentCameraRotation();
+ // only transpose if it's valid for the preview
+ if (rotation == 90 || rotation == 270) {
+ videoOutputResolution.transpose();
+ if (previewSizes.contains(cameraOutputResolution.transposed()))
+ cameraOutputResolution.transpose();
+ }
+
+ if (currentViewfinderResolution != cameraOutputResolution
+ || (m_videoOutput && currentVideoOutputResolution != videoOutputResolution)
+ || currentPreviewFormat != adjustedPreviewFormat || currentFpsRange.min != adjustedFps.min
+ || currentFpsRange.max != adjustedFps.max) {
+ if (m_videoOutput) {
+ m_videoOutput->setVideoSize(videoOutputResolution);
+ }
+
+ // if preview is started, we have to stop it first before changing its size
+ if (m_previewStarted && restartPreview)
+ m_camera->stopPreview();
+
+ m_camera->setPreviewSize(cameraOutputResolution);
+ m_camera->setPreviewFormat(adjustedPreviewFormat);
+ m_camera->setPreviewFpsRange(adjustedFps);
+
+ // restart preview
+ if (m_previewStarted && restartPreview)
+ m_camera->startPreview();
+ }
+}
+
+QList<QSize> QAndroidCameraSession::getSupportedPreviewSizes() const
+{
+ return m_camera ? m_camera->getSupportedPreviewSizes() : QList<QSize>();
+}
+
+QList<QVideoFrameFormat::PixelFormat> QAndroidCameraSession::getSupportedPixelFormats() const
+{
+ QList<QVideoFrameFormat::PixelFormat> formats;
+
+ if (!m_camera)
+ return formats;
+
+ const QList<AndroidCamera::ImageFormat> nativeFormats = m_camera->getSupportedPreviewFormats();
+
+ formats.reserve(nativeFormats.size());
+
+ for (AndroidCamera::ImageFormat nativeFormat : nativeFormats) {
+ QVideoFrameFormat::PixelFormat format = AndroidCamera::QtPixelFormatFromAndroidImageFormat(nativeFormat);
+ if (format != QVideoFrameFormat::Format_Invalid)
+ formats.append(format);
+ }
+
+ return formats;
+}
+
+QList<AndroidCamera::FpsRange> QAndroidCameraSession::getSupportedPreviewFpsRange() const
+{
+ return m_camera ? m_camera->getSupportedPreviewFpsRange() : QList<AndroidCamera::FpsRange>();
+}
+
+
+bool QAndroidCameraSession::startPreview()
+{
+ if (!m_camera || !m_videoOutput)
+ return false;
+
+ if (m_previewStarted)
+ return true;
+
+ if (!m_videoOutput->isReady())
+ return true; // delay starting until the video output is ready
+
+ Q_ASSERT(m_videoOutput->surfaceTexture() || m_videoOutput->surfaceHolder());
+
+ if ((m_videoOutput->surfaceTexture() && !m_camera->setPreviewTexture(m_videoOutput->surfaceTexture()))
+ || (m_videoOutput->surfaceHolder() && !m_camera->setPreviewDisplay(m_videoOutput->surfaceHolder())))
+ return false;
+
+ applyResolution(m_actualImageSettings.resolution());
+
+ AndroidMultimediaUtils::enableOrientationListener(true);
+
+ updateOrientation();
+ m_camera->startPreview();
+ m_previewStarted = true;
+ m_videoOutput->start();
+
+ return true;
+}
+
+QSize QAndroidCameraSession::getDefaultResolution() const
+{
+ const bool hasHighQualityProfile = AndroidCamcorderProfile::hasProfile(
+ m_camera->cameraId(),
+ AndroidCamcorderProfile::Quality(AndroidCamcorderProfile::QUALITY_HIGH));
+
+ if (hasHighQualityProfile) {
+ const AndroidCamcorderProfile camProfile = AndroidCamcorderProfile::get(
+ m_camera->cameraId(),
+ AndroidCamcorderProfile::Quality(AndroidCamcorderProfile::QUALITY_HIGH));
+
+ return QSize(camProfile.getValue(AndroidCamcorderProfile::videoFrameWidth),
+ camProfile.getValue(AndroidCamcorderProfile::videoFrameHeight));
+ }
+ return QSize();
+}
+
+void QAndroidCameraSession::stopPreview()
+{
+ if (!m_camera || !m_previewStarted)
+ return;
+
+ AndroidMultimediaUtils::enableOrientationListener(false);
+
+ m_camera->stopPreview();
+ m_camera->setPreviewSize(QSize());
+ m_camera->setPreviewTexture(0);
+ m_camera->setPreviewDisplay(0);
+
+ if (m_videoOutput) {
+ m_videoOutput->stop();
+ }
+ m_previewStarted = false;
+}
+
+void QAndroidCameraSession::setImageSettings(const QImageEncoderSettings &settings)
+{
+ if (m_requestedImageSettings == settings)
+ return;
+
+ m_requestedImageSettings = m_actualImageSettings = settings;
+
+ applyImageSettings();
+
+ if (m_readyForCapture)
+ applyResolution(m_actualImageSettings.resolution());
+}
+
+void QAndroidCameraSession::enableRotation()
+{
+ m_rotationEnabled = true;
+}
+
+void QAndroidCameraSession::disableRotation()
+{
+ m_rotationEnabled = false;
+}
+
+void QAndroidCameraSession::updateOrientation()
+{
+ if (!m_camera || !m_rotationEnabled)
+ return;
+
+ m_camera->setDisplayOrientation(currentCameraRotation());
+ applyResolution(m_actualImageSettings.resolution());
+}
+
+
+int QAndroidCameraSession::currentCameraRotation() const
+{
+ if (!m_camera)
+ return 0;
+
+ auto screen = QGuiApplication::primaryScreen();
+ auto screenOrientation = screen->orientation();
+ if (screenOrientation == Qt::PrimaryOrientation)
+ screenOrientation = screen->primaryOrientation();
+
+ int deviceOrientation = 0;
+ switch (screenOrientation) {
+ case Qt::PrimaryOrientation:
+ case Qt::PortraitOrientation:
+ break;
+ case Qt::LandscapeOrientation:
+ deviceOrientation = 90;
+ break;
+ case Qt::InvertedPortraitOrientation:
+ deviceOrientation = 180;
+ break;
+ case Qt::InvertedLandscapeOrientation:
+ deviceOrientation = 270;
+ break;
+ }
+
+ int nativeCameraOrientation = m_camera->getNativeOrientation();
+
+ int rotation;
+ // subtract natural camera orientation and physical device orientation
+ if (m_camera->getFacing() == AndroidCamera::CameraFacingFront) {
+ rotation = (nativeCameraOrientation + deviceOrientation) % 360;
+ rotation = (360 - rotation) % 360; // compensate the mirror
+ } else { // back-facing camera
+ rotation = (nativeCameraOrientation - deviceOrientation + 360) % 360;
+ }
+ return rotation;
+}
+
+void QAndroidCameraSession::setPreviewFormat(AndroidCamera::ImageFormat format)
+{
+ if (format == AndroidCamera::UnknownImageFormat)
+ return;
+
+ m_camera->setPreviewFormat(format);
+}
+
+void QAndroidCameraSession::setPreviewCallback(PreviewCallback *callback)
+{
+ m_videoFrameCallbackMutex.lock();
+ m_previewCallback = callback;
+ if (m_camera)
+ m_camera->notifyNewFrames(m_previewCallback);
+ m_videoFrameCallbackMutex.unlock();
+}
+
+void QAndroidCameraSession::applyImageSettings()
+{
+ if (!m_camera)
+ return;
+
+ // only supported format right now.
+ m_actualImageSettings.setFormat(QImageCapture::JPEG);
+
+ const QSize requestedResolution = m_requestedImageSettings.resolution();
+ const QList<QSize> supportedResolutions = m_camera->getSupportedPictureSizes();
+ if (!requestedResolution.isValid()) {
+ m_actualImageSettings.setResolution(getDefaultResolution());
+ } else if (!supportedResolutions.contains(requestedResolution)) {
+ // if the requested resolution is not supported, find the closest one
+ int reqPixelCount = requestedResolution.width() * requestedResolution.height();
+ QList<int> supportedPixelCounts;
+ for (int i = 0; i < supportedResolutions.size(); ++i) {
+ const QSize &s = supportedResolutions.at(i);
+ supportedPixelCounts.append(s.width() * s.height());
+ }
+ int closestIndex = qt_findClosestValue(supportedPixelCounts, reqPixelCount);
+ m_actualImageSettings.setResolution(supportedResolutions.at(closestIndex));
+ }
+ m_camera->setPictureSize(m_actualImageSettings.resolution());
+
+ int jpegQuality = 100;
+ switch (m_requestedImageSettings.quality()) {
+ case QImageCapture::VeryLowQuality:
+ jpegQuality = 20;
+ break;
+ case QImageCapture::LowQuality:
+ jpegQuality = 40;
+ break;
+ case QImageCapture::NormalQuality:
+ jpegQuality = 60;
+ break;
+ case QImageCapture::HighQuality:
+ jpegQuality = 80;
+ break;
+ case QImageCapture::VeryHighQuality:
+ jpegQuality = 100;
+ break;
+ }
+ m_camera->setJpegQuality(jpegQuality);
+}
+
+bool QAndroidCameraSession::isReadyForCapture() const
+{
+ return isActive() && m_readyForCapture;
+}
+
+void QAndroidCameraSession::setReadyForCapture(bool ready)
+{
+ if (m_readyForCapture == ready)
+ return;
+
+ m_readyForCapture = ready;
+ emit readyForCaptureChanged(ready);
+}
+
+int QAndroidCameraSession::captureImage()
+{
+ const int newImageCaptureId = m_currentImageCaptureId + 1;
+
+ if (!isReadyForCapture()) {
+ emit imageCaptureError(newImageCaptureId, QImageCapture::NotReadyError,
+ QPlatformImageCapture::msgCameraNotReady());
+ return newImageCaptureId;
+ }
+
+ setReadyForCapture(false);
+
+ m_currentImageCaptureId = newImageCaptureId;
+
+ applyResolution(m_actualImageSettings.resolution());
+ m_camera->takePicture();
+
+ return m_currentImageCaptureId;
+}
+
+int QAndroidCameraSession::capture(const QString &fileName)
+{
+ m_currentImageCaptureFileName = fileName;
+ m_imageCaptureToBuffer = false;
+ return captureImage();
+}
+
+int QAndroidCameraSession::captureToBuffer()
+{
+ m_currentImageCaptureFileName.clear();
+ m_imageCaptureToBuffer = true;
+ return captureImage();
+}
+
+void QAndroidCameraSession::onCameraTakePictureFailed()
+{
+ emit imageCaptureError(m_currentImageCaptureId, QImageCapture::ResourceError,
+ tr("Failed to capture image"));
+
+ // Preview needs to be restarted and the preview call back must be setup again
+ m_camera->startPreview();
+}
+
+void QAndroidCameraSession::onCameraPictureExposed()
+{
+ if (!m_camera)
+ return;
+
+ emit imageExposed(m_currentImageCaptureId);
+ m_camera->fetchLastPreviewFrame();
+}
+
+void QAndroidCameraSession::onLastPreviewFrameFetched(const QVideoFrame &frame)
+{
+ if (!m_camera)
+ return;
+
+ updateOrientation();
+
+ (void)QtConcurrent::run(&QAndroidCameraSession::processPreviewImage, this,
+ m_currentImageCaptureId, frame, currentCameraRotation());
+}
+
+void QAndroidCameraSession::processPreviewImage(int id, const QVideoFrame &frame, int rotation)
+{
+ // Preview display of front-facing cameras is flipped horizontally, but the frame data
+ // we get here is not. Flip it ourselves if the camera is front-facing to match what the user
+ // sees on the viewfinder.
+ QTransform transform;
+ transform.rotate(rotation);
+
+ if (m_camera->getFacing() == AndroidCamera::CameraFacingFront)
+ transform.scale(-1, 1);
+
+ emit imageCaptured(id, frame.toImage().transformed(transform));
+}
+
+void QAndroidCameraSession::onNewPreviewFrame(const QVideoFrame &frame)
+{
+ if (!m_camera)
+ return;
+
+ m_videoFrameCallbackMutex.lock();
+
+ if (m_previewCallback)
+ m_previewCallback->onFrameAvailable(frame);
+
+ m_videoFrameCallbackMutex.unlock();
+}
+
+void QAndroidCameraSession::onCameraPictureCaptured(const QByteArray &bytes,
+ QVideoFrameFormat::PixelFormat format, QSize size,int bytesPerLine)
+{
+ if (m_imageCaptureToBuffer) {
+ processCapturedImageToBuffer(m_currentImageCaptureId, bytes, format, size, bytesPerLine);
+ } else {
+ // Loading and saving the captured image can be slow, do it in a separate thread
+ (void)QtConcurrent::run(&QAndroidCameraSession::processCapturedImage, this,
+ m_currentImageCaptureId, bytes, m_currentImageCaptureFileName);
+ }
+
+ // Preview needs to be restarted after taking a picture
+ if (m_camera)
+ m_camera->startPreview();
+}
+
+void QAndroidCameraSession::onCameraPreviewStarted()
+{
+ setReadyForCapture(true);
+}
+
+void QAndroidCameraSession::onCameraPreviewFailedToStart()
+{
+ if (isActive()) {
+ Q_EMIT error(QCamera::CameraError, tr("Camera preview failed to start."));
+
+ AndroidMultimediaUtils::enableOrientationListener(false);
+ m_camera->setPreviewSize(QSize());
+ m_camera->setPreviewTexture(0);
+ if (m_videoOutput) {
+ m_videoOutput->stop();
+ m_videoOutput->reset();
+ }
+ m_previewStarted = false;
+
+ setActive(false);
+ setReadyForCapture(false);
+ }
+}
+
+void QAndroidCameraSession::onCameraPreviewStopped()
+{
+ if (!m_previewStarted)
+ setActive(false);
+ setReadyForCapture(false);
+}
+
+void QAndroidCameraSession::processCapturedImage(int id, const QByteArray &bytes, const QString &fileName)
+{
+ const QString actualFileName = QMediaStorageLocation::generateFileName(
+ fileName, QStandardPaths::PicturesLocation, QLatin1String("jpg"));
+ QFile writer(actualFileName);
+ if (!writer.open(QIODeviceBase::WriteOnly)) {
+ const QString errorMessage = tr("File is not available: %1").arg(writer.errorString());
+ emit imageCaptureError(id, QImageCapture::Error::ResourceError, errorMessage);
+ return;
+ }
+
+ if (writer.write(bytes) < 0) {
+ const QString errorMessage = tr("Could not save to file: %1").arg(writer.errorString());
+ emit imageCaptureError(id, QImageCapture::Error::ResourceError, errorMessage);
+ return;
+ }
+
+ writer.close();
+ if (fileName.isEmpty() || QFileInfo(fileName).isRelative())
+ AndroidMultimediaUtils::registerMediaFile(actualFileName);
+
+ emit imageSaved(id, actualFileName);
+}
+
+void QAndroidCameraSession::processCapturedImageToBuffer(int id, const QByteArray &bytes,
+ QVideoFrameFormat::PixelFormat format, QSize size, int bytesPerLine)
+{
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(bytes, bytesPerLine),
+ QVideoFrameFormat(size, format));
+ emit imageAvailable(id, frame);
+}
+
+void QAndroidCameraSession::onVideoOutputReady(bool ready)
+{
+ if (ready && m_active)
+ startPreview();
+}
+
+void QAndroidCameraSession::onApplicationStateChanged()
+{
+
+ switch (QGuiApplication::applicationState()) {
+ case Qt::ApplicationInactive:
+ if (!m_keepActive && m_active) {
+ m_savedState = m_active;
+ setActive(false);
+ m_isStateSaved = true;
+ }
+ break;
+ case Qt::ApplicationActive:
+ if (m_isStateSaved) {
+ setActive(m_savedState);
+ m_isStateSaved = false;
+ }
+ break;
+ default:
+ break;
+ }
+}
+
+void QAndroidCameraSession::setKeepAlive(bool keepAlive)
+{
+ m_keepActive = keepAlive;
+}
+
+void QAndroidCameraSession::setVideoSink(QVideoSink *sink)
+{
+ if (m_sink == sink)
+ return;
+
+ if (m_sink)
+ disconnect(m_retryPreviewConnection);
+
+ m_sink = sink;
+
+ if (m_sink)
+ m_retryPreviewConnection =
+ connect(m_sink->platformVideoSink(), &QPlatformVideoSink::rhiChanged, this, [&]()
+ {
+ if (m_active) {
+ setActive(false);
+ setActive(true);
+ }
+ }, Qt::DirectConnection);
+ if (m_sink) {
+ delete m_textureOutput;
+ m_textureOutput = nullptr;
+
+ m_textureOutput = new QAndroidTextureVideoOutput(m_sink, this);
+ }
+
+ setVideoOutput(m_textureOutput);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qandroidcamerasession_p.cpp"
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcamerasession_p.h b/src/plugins/multimedia/android/mediacapture/qandroidcamerasession_p.h
new file mode 100644
index 000000000..3b56d9c3b
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcamerasession_p.h
@@ -0,0 +1,166 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// Copyright (C) 2016 Ruslan Baratov
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDCAMERASESSION_H
+#define QANDROIDCAMERASESSION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qcamera.h>
+#include <QImageCapture>
+#include <QSet>
+#include <QMutex>
+#include <private/qplatformimagecapture_p.h>
+#include "androidcamera_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidVideoOutput;
+class QAndroidTextureVideoOutput ;
+class QVideoSink;
+
+class QAndroidCameraSession : public QObject
+{
+ Q_OBJECT
+public:
+ explicit QAndroidCameraSession(QObject *parent = 0);
+ ~QAndroidCameraSession();
+
+ static const QList<QCameraDevice> &availableCameras();
+
+ void setSelectedCameraId(int cameraId) { m_selectedCamera = cameraId; }
+ int getSelectedCameraId() { return m_selectedCamera; }
+ AndroidCamera *camera() const { return m_camera; }
+
+ bool isActive() const { return m_active; }
+ void setActive(bool active);
+
+ void applyResolution(const QSize &captureSize = QSize(), bool restartPreview = true);
+
+ QAndroidVideoOutput *videoOutput() const { return m_videoOutput; }
+ void setVideoOutput(QAndroidVideoOutput *output);
+
+ void setCameraFormat(const QCameraFormat &format);
+
+ QList<QSize> getSupportedPreviewSizes() const;
+ QList<QVideoFrameFormat::PixelFormat> getSupportedPixelFormats() const;
+ QList<AndroidCamera::FpsRange> getSupportedPreviewFpsRange() const;
+
+ QImageEncoderSettings imageSettings() const { return m_actualImageSettings; }
+ void setImageSettings(const QImageEncoderSettings &settings);
+
+ bool isReadyForCapture() const;
+ void setReadyForCapture(bool ready);
+ int capture(const QString &fileName);
+ int captureToBuffer();
+
+ int currentCameraRotation() const;
+
+ void setPreviewFormat(AndroidCamera::ImageFormat format);
+
+ struct PreviewCallback
+ {
+ virtual void onFrameAvailable(const QVideoFrame &frame) = 0;
+ };
+ void setPreviewCallback(PreviewCallback *callback);
+
+ void setVideoSink(QVideoSink *surface);
+
+ void disableRotation();
+ void enableRotation();
+
+ void setKeepAlive(bool keepAlive);
+
+Q_SIGNALS:
+ void activeChanged(bool);
+ void error(int error, const QString &errorString);
+ void opened();
+
+ void readyForCaptureChanged(bool);
+ void imageExposed(int id);
+ void imageCaptured(int id, const QImage &preview);
+ void imageMetadataAvailable(int id, const QMediaMetaData &key);
+ void imageAvailable(int id, const QVideoFrame &buffer);
+ void imageSaved(int id, const QString &fileName);
+ void imageCaptureError(int id, int error, const QString &errorString);
+
+private Q_SLOTS:
+ void onVideoOutputReady(bool ready);
+ void updateOrientation();
+
+ void onApplicationStateChanged();
+
+ void onCameraTakePictureFailed();
+ void onCameraPictureExposed();
+ void onCameraPictureCaptured(const QByteArray &bytes, QVideoFrameFormat::PixelFormat format, QSize size, int bytesPerLine);
+ void onLastPreviewFrameFetched(const QVideoFrame &frame);
+ void onNewPreviewFrame(const QVideoFrame &frame);
+ void onCameraPreviewStarted();
+ void onCameraPreviewFailedToStart();
+ void onCameraPreviewStopped();
+
+private:
+ static void updateAvailableCameras();
+
+ bool open();
+ void close();
+
+ bool startPreview();
+ void stopPreview();
+
+ void applyImageSettings();
+
+ void processPreviewImage(int id, const QVideoFrame &frame, int rotation);
+ void processCapturedImage(int id, const QByteArray &bytes, const QString &fileName);
+ void processCapturedImageToBuffer(int id, const QByteArray &bytes,
+ QVideoFrameFormat::PixelFormat format, QSize size, int bytesPerLine);
+
+ void setActiveHelper(bool active);
+
+ int captureImage();
+
+ QSize getDefaultResolution() const;
+
+ int m_selectedCamera;
+ AndroidCamera *m_camera;
+ QAndroidVideoOutput *m_videoOutput;
+
+ bool m_active = false;
+ bool m_isStateSaved = false;
+ bool m_savedState = false;
+ bool m_previewStarted;
+
+ bool m_rotationEnabled = false;
+
+ QVideoSink *m_sink = nullptr;
+ QAndroidTextureVideoOutput *m_textureOutput = nullptr;
+
+ QImageEncoderSettings m_requestedImageSettings;
+ QImageEncoderSettings m_actualImageSettings;
+ AndroidCamera::FpsRange m_requestedFpsRange;
+ AndroidCamera::ImageFormat m_requestedPixelFromat = AndroidCamera::ImageFormat::NV21;
+
+ bool m_readyForCapture;
+ int m_currentImageCaptureId;
+ QString m_currentImageCaptureFileName;
+ bool m_imageCaptureToBuffer;
+
+ QMutex m_videoFrameCallbackMutex;
+ PreviewCallback *m_previewCallback;
+ bool m_keepActive;
+ QMetaObject::Connection m_retryPreviewConnection;
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDCAMERASESSION_H
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp
new file mode 100644
index 000000000..3b005e4a5
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp
@@ -0,0 +1,473 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidcapturesession_p.h"
+
+#include "androidcamera_p.h"
+#include "qandroidcamerasession_p.h"
+#include "qaudioinput.h"
+#include "qaudiooutput.h"
+#include "androidmediaplayer_p.h"
+#include "androidmultimediautils_p.h"
+#include "qandroidmultimediautils_p.h"
+#include "qandroidvideooutput_p.h"
+#include "qandroidglobal_p.h"
+#include <private/qplatformaudioinput_p.h>
+#include <private/qplatformaudiooutput_p.h>
+#include <private/qmediarecorder_p.h>
+#include <private/qmediastoragelocation_p.h>
+#include <QtCore/qmimetype.h>
+
+#include <algorithm>
+
+QT_BEGIN_NAMESPACE
+
+QAndroidCaptureSession::QAndroidCaptureSession()
+ : QObject()
+ , m_mediaRecorder(0)
+ , m_cameraSession(0)
+ , m_duration(0)
+ , m_state(QMediaRecorder::StoppedState)
+ , m_outputFormat(AndroidMediaRecorder::DefaultOutputFormat)
+ , m_audioEncoder(AndroidMediaRecorder::DefaultAudioEncoder)
+ , m_videoEncoder(AndroidMediaRecorder::DefaultVideoEncoder)
+{
+ m_notifyTimer.setInterval(1000);
+ connect(&m_notifyTimer, &QTimer::timeout, this, &QAndroidCaptureSession::updateDuration);
+}
+
+QAndroidCaptureSession::~QAndroidCaptureSession()
+{
+ stop();
+ m_mediaRecorder = nullptr;
+ if (m_audioInput && m_audioOutput)
+ AndroidMediaPlayer::stopSoundStreaming();
+}
+
+void QAndroidCaptureSession::setCameraSession(QAndroidCameraSession *cameraSession)
+{
+ if (m_cameraSession) {
+ disconnect(m_connOpenCamera);
+ disconnect(m_connActiveChangedCamera);
+ }
+
+ m_cameraSession = cameraSession;
+ if (m_cameraSession) {
+ m_connOpenCamera = connect(cameraSession, &QAndroidCameraSession::opened,
+ this, &QAndroidCaptureSession::onCameraOpened);
+ m_connActiveChangedCamera = connect(cameraSession, &QAndroidCameraSession::activeChanged,
+ this, [this](bool isActive) {
+ if (!isActive)
+ stop();
+ });
+ }
+}
+
+void QAndroidCaptureSession::setAudioInput(QPlatformAudioInput *input)
+{
+ if (m_audioInput == input)
+ return;
+
+ if (m_audioInput) {
+ disconnect(m_audioInputChanged);
+ }
+
+ m_audioInput = input;
+
+ if (m_audioInput) {
+ m_audioInputChanged = connect(m_audioInput->q, &QAudioInput::deviceChanged, this, [this]() {
+ if (m_state == QMediaRecorder::RecordingState)
+ m_mediaRecorder->setAudioInput(m_audioInput->device.id());
+ updateStreamingState();
+ });
+ }
+ updateStreamingState();
+}
+
+void QAndroidCaptureSession::setAudioOutput(QPlatformAudioOutput *output)
+{
+ if (m_audioOutput == output)
+ return;
+
+ if (m_audioOutput)
+ disconnect(m_audioOutputChanged);
+
+ m_audioOutput = output;
+
+ if (m_audioOutput) {
+ m_audioOutputChanged = connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this,
+ [this] () {
+ AndroidMediaPlayer::setAudioOutput(m_audioOutput->device.id());
+ updateStreamingState();
+ });
+ AndroidMediaPlayer::setAudioOutput(m_audioOutput->device.id());
+ }
+ updateStreamingState();
+}
+
+void QAndroidCaptureSession::updateStreamingState()
+{
+ if (m_audioInput && m_audioOutput) {
+ AndroidMediaPlayer::startSoundStreaming(m_audioInput->device.id().toInt(),
+ m_audioOutput->device.id().toInt());
+ } else {
+ AndroidMediaPlayer::stopSoundStreaming();
+ }
+}
+
+QMediaRecorder::RecorderState QAndroidCaptureSession::state() const
+{
+ return m_state;
+}
+
+void QAndroidCaptureSession::setKeepAlive(bool keepAlive)
+{
+ if (m_cameraSession)
+ m_cameraSession->setKeepAlive(keepAlive);
+}
+
+
+void QAndroidCaptureSession::start(QMediaEncoderSettings &settings, const QUrl &outputLocation)
+{
+ if (m_state == QMediaRecorder::RecordingState)
+ return;
+
+ if (!m_cameraSession && !m_audioInput) {
+ updateError(QMediaRecorder::ResourceError, QLatin1String("No devices are set"));
+ return;
+ }
+
+ setKeepAlive(true);
+
+ const bool validCameraSession = m_cameraSession && m_cameraSession->camera();
+
+ if (validCameraSession && !qt_androidCheckCameraPermission()) {
+ updateError(QMediaRecorder::ResourceError, QLatin1String("Camera permission denied."));
+ setKeepAlive(false);
+ return;
+ }
+
+ if (m_audioInput && !qt_androidCheckMicrophonePermission()) {
+ updateError(QMediaRecorder::ResourceError, QLatin1String("Microphone permission denied."));
+ setKeepAlive(false);
+ return;
+ }
+
+ m_mediaRecorder = std::make_shared<AndroidMediaRecorder>();
+ connect(m_mediaRecorder.get(), &AndroidMediaRecorder::error, this,
+ &QAndroidCaptureSession::onError);
+ connect(m_mediaRecorder.get(), &AndroidMediaRecorder::info, this,
+ &QAndroidCaptureSession::onInfo);
+
+ applySettings(settings);
+
+ // Set audio/video sources
+ if (validCameraSession) {
+ m_cameraSession->camera()->stopPreviewSynchronous();
+ m_cameraSession->camera()->unlock();
+
+ m_mediaRecorder->setCamera(m_cameraSession->camera());
+ m_mediaRecorder->setVideoSource(AndroidMediaRecorder::Camera);
+ }
+
+ if (m_audioInput) {
+ m_mediaRecorder->setAudioInput(m_audioInput->device.id());
+ if (!m_mediaRecorder->isAudioSourceSet())
+ m_mediaRecorder->setAudioSource(AndroidMediaRecorder::DefaultAudioSource);
+ }
+
+ // Set output format
+ m_mediaRecorder->setOutputFormat(m_outputFormat);
+
+ // Set video encoder settings
+ if (validCameraSession) {
+ m_mediaRecorder->setVideoSize(settings.videoResolution());
+ m_mediaRecorder->setVideoFrameRate(qRound(settings.videoFrameRate()));
+ m_mediaRecorder->setVideoEncodingBitRate(settings.videoBitRate());
+ m_mediaRecorder->setVideoEncoder(m_videoEncoder);
+
+ // media recorder is also compensanting the mirror on front camera
+ auto rotation = m_cameraSession->currentCameraRotation();
+ if (m_cameraSession->camera()->getFacing() == AndroidCamera::CameraFacingFront)
+ rotation = (360 - rotation) % 360; // remove mirror compensation
+
+ m_mediaRecorder->setOrientationHint(rotation);
+ }
+
+ // Set audio encoder settings
+ if (m_audioInput) {
+ m_mediaRecorder->setAudioChannels(settings.audioChannelCount());
+ m_mediaRecorder->setAudioEncodingBitRate(settings.audioBitRate());
+ m_mediaRecorder->setAudioSamplingRate(settings.audioSampleRate());
+ m_mediaRecorder->setAudioEncoder(m_audioEncoder);
+ }
+
+ QString extension = settings.mimeType().preferredSuffix();
+ // Set output file
+ auto location = outputLocation.toString(QUrl::PreferLocalFile);
+ QString filePath = location;
+ if (QUrl(filePath).scheme() != QLatin1String("content")) {
+ filePath = QMediaStorageLocation::generateFileName(
+ location, m_cameraSession ? QStandardPaths::MoviesLocation
+ : QStandardPaths::MusicLocation, extension);
+ }
+
+ m_usedOutputLocation = QUrl::fromLocalFile(filePath);
+ m_outputLocationIsStandard = location.isEmpty() || QFileInfo(location).isRelative();
+ m_mediaRecorder->setOutputFile(filePath);
+
+ if (validCameraSession) {
+ m_cameraSession->disableRotation();
+ }
+
+ if (!m_mediaRecorder->prepare()) {
+ updateError(QMediaRecorder::FormatError,
+ QLatin1String("Unable to prepare the media recorder."));
+ restartViewfinder();
+
+ return;
+ }
+
+ if (!m_mediaRecorder->start()) {
+ updateError(QMediaRecorder::FormatError, QMediaRecorderPrivate::msgFailedStartRecording());
+ restartViewfinder();
+
+ return;
+ }
+
+ m_elapsedTime.start();
+ m_notifyTimer.start();
+ updateDuration();
+
+ if (validCameraSession) {
+ m_cameraSession->setReadyForCapture(false);
+
+ // Preview frame callback is cleared when setting up the camera with the media recorder.
+ // We need to reset it.
+ m_cameraSession->camera()->setupPreviewFrameCallback();
+ }
+
+ m_state = QMediaRecorder::RecordingState;
+ emit stateChanged(m_state);
+}
+
+void QAndroidCaptureSession::stop(bool error)
+{
+ if (m_state == QMediaRecorder::StoppedState || m_mediaRecorder == nullptr)
+ return;
+
+ m_mediaRecorder->stop();
+ m_notifyTimer.stop();
+ updateDuration();
+ m_elapsedTime.invalidate();
+
+ m_mediaRecorder = nullptr;
+
+ if (m_cameraSession && m_cameraSession->isActive()) {
+ // Viewport needs to be restarted after recording
+ restartViewfinder();
+ }
+
+ if (!error) {
+ // if the media is saved into the standard media location, register it
+ // with the Android media scanner so it appears immediately in apps
+ // such as the gallery.
+ if (m_outputLocationIsStandard)
+ AndroidMultimediaUtils::registerMediaFile(m_usedOutputLocation.toLocalFile());
+
+ emit actualLocationChanged(m_usedOutputLocation);
+ }
+
+ m_state = QMediaRecorder::StoppedState;
+ emit stateChanged(m_state);
+}
+
+qint64 QAndroidCaptureSession::duration() const
+{
+ return m_duration;
+}
+
+void QAndroidCaptureSession::applySettings(QMediaEncoderSettings &settings)
+{
+ // container settings
+ auto fileFormat = settings.mediaFormat().fileFormat();
+ if (!m_cameraSession && fileFormat == QMediaFormat::AAC) {
+ m_outputFormat = AndroidMediaRecorder::AAC_ADTS;
+ } else if (fileFormat == QMediaFormat::Ogg) {
+ m_outputFormat = AndroidMediaRecorder::OGG;
+ } else if (fileFormat == QMediaFormat::WebM) {
+ m_outputFormat = AndroidMediaRecorder::WEBM;
+// } else if (fileFormat == QLatin1String("3gp")) {
+// m_outputFormat = AndroidMediaRecorder::THREE_GPP;
+ } else {
+ // fallback to MP4
+ m_outputFormat = AndroidMediaRecorder::MPEG_4;
+ }
+
+ // audio settings
+ if (settings.audioChannelCount() <= 0)
+ settings.setAudioChannelCount(m_defaultSettings.audioChannels);
+ if (settings.audioBitRate() <= 0)
+ settings.setAudioBitRate(m_defaultSettings.audioBitRate);
+ if (settings.audioSampleRate() <= 0)
+ settings.setAudioSampleRate(m_defaultSettings.audioSampleRate);
+
+ if (settings.audioCodec() == QMediaFormat::AudioCodec::AAC)
+ m_audioEncoder = AndroidMediaRecorder::AAC;
+ else if (settings.audioCodec() == QMediaFormat::AudioCodec::Opus)
+ m_audioEncoder = AndroidMediaRecorder::OPUS;
+ else if (settings.audioCodec() == QMediaFormat::AudioCodec::Vorbis)
+ m_audioEncoder = AndroidMediaRecorder::VORBIS;
+ else
+ m_audioEncoder = m_defaultSettings.audioEncoder;
+
+
+ // video settings
+ if (m_cameraSession && m_cameraSession->camera()) {
+ if (settings.videoResolution().isEmpty()) {
+ settings.setVideoResolution(m_defaultSettings.videoResolution);
+ } else if (!m_supportedResolutions.contains(settings.videoResolution())) {
+ // if the requested resolution is not supported, find the closest one
+ QSize reqSize = settings.videoResolution();
+ int reqPixelCount = reqSize.width() * reqSize.height();
+ QList<int> supportedPixelCounts;
+ for (int i = 0; i < m_supportedResolutions.size(); ++i) {
+ const QSize &s = m_supportedResolutions.at(i);
+ supportedPixelCounts.append(s.width() * s.height());
+ }
+ int closestIndex = qt_findClosestValue(supportedPixelCounts, reqPixelCount);
+ settings.setVideoResolution(m_supportedResolutions.at(closestIndex));
+ }
+
+ if (settings.videoFrameRate() <= 0)
+ settings.setVideoFrameRate(m_defaultSettings.videoFrameRate);
+ if (settings.videoBitRate() <= 0)
+ settings.setVideoBitRate(m_defaultSettings.videoBitRate);
+
+ if (settings.videoCodec() == QMediaFormat::VideoCodec::H264)
+ m_videoEncoder = AndroidMediaRecorder::H264;
+ else if (settings.videoCodec() == QMediaFormat::VideoCodec::H265)
+ m_videoEncoder = AndroidMediaRecorder::HEVC;
+ else if (settings.videoCodec() == QMediaFormat::VideoCodec::MPEG4)
+ m_videoEncoder = AndroidMediaRecorder::MPEG_4_SP;
+ else
+ m_videoEncoder = m_defaultSettings.videoEncoder;
+
+ }
+}
+
+void QAndroidCaptureSession::restartViewfinder()
+{
+
+ setKeepAlive(false);
+
+ if (!m_cameraSession)
+ return;
+
+ if (m_cameraSession && m_cameraSession->camera()) {
+ m_cameraSession->camera()->reconnect();
+ m_cameraSession->camera()->stopPreviewSynchronous();
+ m_cameraSession->camera()->startPreview();
+ m_cameraSession->setReadyForCapture(true);
+ m_cameraSession->enableRotation();
+ }
+
+ m_mediaRecorder = nullptr;
+}
+
+void QAndroidCaptureSession::updateDuration()
+{
+ if (m_elapsedTime.isValid())
+ m_duration = m_elapsedTime.elapsed();
+
+ emit durationChanged(m_duration);
+}
+
+void QAndroidCaptureSession::onCameraOpened()
+{
+ m_supportedResolutions.clear();
+ m_supportedFramerates.clear();
+
+ // get supported resolutions from predefined profiles
+ for (int i = 0; i < 8; ++i) {
+ CaptureProfile profile = getProfile(i);
+ if (!profile.isNull) {
+ if (i == AndroidCamcorderProfile::QUALITY_HIGH)
+ m_defaultSettings = profile;
+
+ if (!m_supportedResolutions.contains(profile.videoResolution))
+ m_supportedResolutions.append(profile.videoResolution);
+ if (!m_supportedFramerates.contains(profile.videoFrameRate))
+ m_supportedFramerates.append(profile.videoFrameRate);
+ }
+ }
+
+ std::sort(m_supportedResolutions.begin(), m_supportedResolutions.end(), qt_sizeLessThan);
+ std::sort(m_supportedFramerates.begin(), m_supportedFramerates.end());
+
+ QMediaEncoderSettings defaultSettings;
+ applySettings(defaultSettings);
+ m_cameraSession->applyResolution(defaultSettings.videoResolution());
+}
+
+QAndroidCaptureSession::CaptureProfile QAndroidCaptureSession::getProfile(int id)
+{
+ CaptureProfile profile;
+ const bool hasProfile = AndroidCamcorderProfile::hasProfile(m_cameraSession->camera()->cameraId(),
+ AndroidCamcorderProfile::Quality(id));
+
+ if (hasProfile) {
+ AndroidCamcorderProfile camProfile = AndroidCamcorderProfile::get(m_cameraSession->camera()->cameraId(),
+ AndroidCamcorderProfile::Quality(id));
+
+ profile.outputFormat = AndroidMediaRecorder::OutputFormat(camProfile.getValue(AndroidCamcorderProfile::fileFormat));
+ profile.audioEncoder = AndroidMediaRecorder::AudioEncoder(camProfile.getValue(AndroidCamcorderProfile::audioCodec));
+ profile.audioBitRate = camProfile.getValue(AndroidCamcorderProfile::audioBitRate);
+ profile.audioChannels = camProfile.getValue(AndroidCamcorderProfile::audioChannels);
+ profile.audioSampleRate = camProfile.getValue(AndroidCamcorderProfile::audioSampleRate);
+ profile.videoEncoder = AndroidMediaRecorder::VideoEncoder(camProfile.getValue(AndroidCamcorderProfile::videoCodec));
+ profile.videoBitRate = camProfile.getValue(AndroidCamcorderProfile::videoBitRate);
+ profile.videoFrameRate = camProfile.getValue(AndroidCamcorderProfile::videoFrameRate);
+ profile.videoResolution = QSize(camProfile.getValue(AndroidCamcorderProfile::videoFrameWidth),
+ camProfile.getValue(AndroidCamcorderProfile::videoFrameHeight));
+
+ if (profile.outputFormat == AndroidMediaRecorder::MPEG_4)
+ profile.outputFileExtension = QStringLiteral("mp4");
+ else if (profile.outputFormat == AndroidMediaRecorder::THREE_GPP)
+ profile.outputFileExtension = QStringLiteral("3gp");
+ else if (profile.outputFormat == AndroidMediaRecorder::AMR_NB_Format)
+ profile.outputFileExtension = QStringLiteral("amr");
+ else if (profile.outputFormat == AndroidMediaRecorder::AMR_WB_Format)
+ profile.outputFileExtension = QStringLiteral("awb");
+
+ profile.isNull = false;
+ }
+
+ return profile;
+}
+
+void QAndroidCaptureSession::onError(int what, int extra)
+{
+ Q_UNUSED(what);
+ Q_UNUSED(extra);
+ stop(true);
+ updateError(QMediaRecorder::ResourceError, QLatin1String("Unknown error."));
+}
+
+void QAndroidCaptureSession::onInfo(int what, int extra)
+{
+ Q_UNUSED(extra);
+ if (what == 800) {
+ // MEDIA_RECORDER_INFO_MAX_DURATION_REACHED
+ stop();
+ updateError(QMediaRecorder::OutOfSpaceError, QLatin1String("Maximum duration reached."));
+ } else if (what == 801) {
+ // MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED
+ stop();
+ updateError(QMediaRecorder::OutOfSpaceError, QLatin1String("Maximum file size reached."));
+ }
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qandroidcapturesession_p.cpp"
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h
new file mode 100644
index 000000000..161d47994
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h
@@ -0,0 +1,158 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDCAPTURESESSION_H
+#define QANDROIDCAPTURESESSION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qobject.h>
+#include <qmediarecorder.h>
+#include <qurl.h>
+#include <qelapsedtimer.h>
+#include <qtimer.h>
+#include "androidmediarecorder_p.h"
+#include "qandroidmediaencoder_p.h"
+
+QT_BEGIN_NAMESPACE
+
+class QAudioInput;
+class QAndroidCameraSession;
+
+class QAndroidCaptureSession : public QObject
+{
+ Q_OBJECT
+public:
+ explicit QAndroidCaptureSession();
+ ~QAndroidCaptureSession();
+
+ QList<QSize> supportedResolutions() const { return m_supportedResolutions; }
+ QList<qreal> supportedFrameRates() const { return m_supportedFramerates; }
+
+ void setCameraSession(QAndroidCameraSession *cameraSession = 0);
+ void setAudioInput(QPlatformAudioInput *input);
+ void setAudioOutput(QPlatformAudioOutput *output);
+
+ QMediaRecorder::RecorderState state() const;
+
+ void start(QMediaEncoderSettings &settings, const QUrl &outputLocation);
+ void stop(bool error = false);
+
+ qint64 duration() const;
+
+ QMediaEncoderSettings encoderSettings() { return m_encoderSettings; }
+
+ void setMediaEncoder(QAndroidMediaEncoder *encoder) { m_mediaEncoder = encoder; }
+
+ void stateChanged(QMediaRecorder::RecorderState state) {
+ if (m_mediaEncoder)
+ m_mediaEncoder->stateChanged(state);
+ }
+ void durationChanged(qint64 position)
+ {
+ if (m_mediaEncoder)
+ m_mediaEncoder->durationChanged(position);
+ }
+ void actualLocationChanged(const QUrl &location)
+ {
+ if (m_mediaEncoder)
+ m_mediaEncoder->actualLocationChanged(location);
+ }
+ void updateError(int error, const QString &errorString)
+ {
+ if (m_mediaEncoder)
+ m_mediaEncoder->updateError(QMediaRecorder::Error(error), errorString);
+ }
+
+private Q_SLOTS:
+ void updateDuration();
+ void onCameraOpened();
+
+ void onError(int what, int extra);
+ void onInfo(int what, int extra);
+
+private:
+ void applySettings(QMediaEncoderSettings &settings);
+
+ struct CaptureProfile {
+ AndroidMediaRecorder::OutputFormat outputFormat;
+ QString outputFileExtension;
+
+ AndroidMediaRecorder::AudioEncoder audioEncoder;
+ int audioBitRate;
+ int audioChannels;
+ int audioSampleRate;
+
+ AndroidMediaRecorder::VideoEncoder videoEncoder;
+ int videoBitRate;
+ int videoFrameRate;
+ QSize videoResolution;
+
+ bool isNull;
+
+ CaptureProfile()
+ : outputFormat(AndroidMediaRecorder::MPEG_4)
+ , outputFileExtension(QLatin1String("mp4"))
+ , audioEncoder(AndroidMediaRecorder::DefaultAudioEncoder)
+ , audioBitRate(128000)
+ , audioChannels(2)
+ , audioSampleRate(44100)
+ , videoEncoder(AndroidMediaRecorder::DefaultVideoEncoder)
+ , videoBitRate(1)
+ , videoFrameRate(-1)
+ , videoResolution(1280, 720)
+ , isNull(true)
+ { }
+ };
+
+ CaptureProfile getProfile(int id);
+
+ void restartViewfinder();
+ void updateStreamingState();
+
+ QAndroidMediaEncoder *m_mediaEncoder = nullptr;
+ std::shared_ptr<AndroidMediaRecorder> m_mediaRecorder;
+ QAndroidCameraSession *m_cameraSession;
+
+ QPlatformAudioInput *m_audioInput = nullptr;
+ QPlatformAudioOutput *m_audioOutput = nullptr;
+
+ QElapsedTimer m_elapsedTime;
+ QTimer m_notifyTimer;
+ qint64 m_duration;
+
+ QMediaRecorder::RecorderState m_state;
+ QUrl m_usedOutputLocation;
+ bool m_outputLocationIsStandard = false;
+
+ CaptureProfile m_defaultSettings;
+
+ QMediaEncoderSettings m_encoderSettings;
+ AndroidMediaRecorder::OutputFormat m_outputFormat;
+ AndroidMediaRecorder::AudioEncoder m_audioEncoder;
+ AndroidMediaRecorder::VideoEncoder m_videoEncoder;
+
+ QList<QSize> m_supportedResolutions;
+ QList<qreal> m_supportedFramerates;
+
+ QMetaObject::Connection m_audioInputChanged;
+ QMetaObject::Connection m_audioOutputChanged;
+ QMetaObject::Connection m_connOpenCamera;
+ QMetaObject::Connection m_connActiveChangedCamera;
+
+ void setKeepAlive(bool keepAlive);
+
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDCAPTURESESSION_H
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidimagecapture.cpp b/src/plugins/multimedia/android/mediacapture/qandroidimagecapture.cpp
new file mode 100644
index 000000000..4105851ed
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidimagecapture.cpp
@@ -0,0 +1,73 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidimagecapture_p.h"
+
+#include "qandroidcamerasession_p.h"
+#include "qandroidmediacapturesession_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QAndroidImageCapture::QAndroidImageCapture(QImageCapture *parent)
+ : QPlatformImageCapture(parent)
+{
+}
+
+bool QAndroidImageCapture::isReadyForCapture() const
+{
+ return m_session->isReadyForCapture();
+}
+
+int QAndroidImageCapture::capture(const QString &fileName)
+{
+ return m_session->capture(fileName);
+}
+
+int QAndroidImageCapture::captureToBuffer()
+{
+ return m_session->captureToBuffer();
+}
+
+QImageEncoderSettings QAndroidImageCapture::imageSettings() const
+{
+ return m_session->imageSettings();
+}
+
+void QAndroidImageCapture::setImageSettings(const QImageEncoderSettings &settings)
+{
+ m_session->setImageSettings(settings);
+}
+
+void QAndroidImageCapture::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ QAndroidMediaCaptureSession *captureSession = static_cast<QAndroidMediaCaptureSession *>(session);
+ if (m_service == captureSession)
+ return;
+
+ m_service = captureSession;
+ if (!m_service) {
+ disconnect(m_session, nullptr, this, nullptr);
+ return;
+ }
+
+ m_session = m_service->cameraSession();
+ Q_ASSERT(m_session);
+
+ connect(m_session, &QAndroidCameraSession::readyForCaptureChanged,
+ this, &QAndroidImageCapture::readyForCaptureChanged);
+ connect(m_session, &QAndroidCameraSession::imageExposed,
+ this, &QAndroidImageCapture::imageExposed);
+ connect(m_session, &QAndroidCameraSession::imageCaptured,
+ this, &QAndroidImageCapture::imageCaptured);
+ connect(m_session, &QAndroidCameraSession::imageMetadataAvailable,
+ this, &QAndroidImageCapture::imageMetadataAvailable);
+ connect(m_session, &QAndroidCameraSession::imageAvailable,
+ this, &QAndroidImageCapture::imageAvailable);
+ connect(m_session, &QAndroidCameraSession::imageSaved,
+ this, &QAndroidImageCapture::imageSaved);
+ connect(m_session, &QAndroidCameraSession::imageCaptureError,
+ this, &QAndroidImageCapture::error);
+}
+QT_END_NAMESPACE
+
+#include "moc_qandroidimagecapture_p.cpp"
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidimagecapture_p.h b/src/plugins/multimedia/android/mediacapture/qandroidimagecapture_p.h
new file mode 100644
index 000000000..ac273c195
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidimagecapture_p.h
@@ -0,0 +1,48 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDCAMERAIMAGECAPTURECONTROL_H
+#define QANDROIDCAMERAIMAGECAPTURECONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformimagecapture_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidCameraSession;
+class QAndroidMediaCaptureSession;
+
+class QAndroidImageCapture : public QPlatformImageCapture
+{
+ Q_OBJECT
+public:
+ explicit QAndroidImageCapture(QImageCapture *parent = nullptr);
+
+ bool isReadyForCapture() const override;
+
+ int capture(const QString &fileName) override;
+ int captureToBuffer() override;
+
+ QImageEncoderSettings imageSettings() const override;
+ void setImageSettings(const QImageEncoderSettings &settings) override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+private:
+ QAndroidCameraSession *m_session;
+ QAndroidMediaCaptureSession *m_service;
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDCAMERAIMAGECAPTURECONTROL_H
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidmediacapturesession.cpp b/src/plugins/multimedia/android/mediacapture/qandroidmediacapturesession.cpp
new file mode 100644
index 000000000..e2b551d35
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidmediacapturesession.cpp
@@ -0,0 +1,115 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// Copyright (C) 2016 Ruslan Baratov
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidmediacapturesession_p.h"
+
+#include "qandroidmediaencoder_p.h"
+#include "qandroidcapturesession_p.h"
+#include "qandroidcamera_p.h"
+#include "qandroidcamerasession_p.h"
+#include "qandroidimagecapture_p.h"
+#include "qmediadevices.h"
+#include "qaudiodevice.h"
+
+QT_BEGIN_NAMESPACE
+
+QAndroidMediaCaptureSession::QAndroidMediaCaptureSession()
+ : m_captureSession(new QAndroidCaptureSession())
+ , m_cameraSession(new QAndroidCameraSession())
+{
+}
+
+QAndroidMediaCaptureSession::~QAndroidMediaCaptureSession()
+{
+ delete m_captureSession;
+ delete m_cameraSession;
+}
+
+QPlatformCamera *QAndroidMediaCaptureSession::camera()
+{
+ return m_cameraControl;
+}
+
+void QAndroidMediaCaptureSession::setCamera(QPlatformCamera *camera)
+{
+ if (camera) {
+ m_captureSession->setCameraSession(m_cameraSession);
+ } else {
+ m_captureSession->setCameraSession(nullptr);
+ }
+
+ QAndroidCamera *control = static_cast<QAndroidCamera *>(camera);
+ if (m_cameraControl == control)
+ return;
+
+ if (m_cameraControl)
+ m_cameraControl->setCaptureSession(nullptr);
+
+ m_cameraControl = control;
+ if (m_cameraControl)
+ m_cameraControl->setCaptureSession(this);
+
+ emit cameraChanged();
+}
+
+QPlatformImageCapture *QAndroidMediaCaptureSession::imageCapture()
+{
+ return m_imageCaptureControl;
+}
+
+void QAndroidMediaCaptureSession::setImageCapture(QPlatformImageCapture *imageCapture)
+{
+ QAndroidImageCapture *control = static_cast<QAndroidImageCapture *>(imageCapture);
+ if (m_imageCaptureControl == control)
+ return;
+
+ if (m_imageCaptureControl)
+ m_imageCaptureControl->setCaptureSession(nullptr);
+
+ m_imageCaptureControl = control;
+ if (m_imageCaptureControl)
+ m_imageCaptureControl->setCaptureSession(this);
+}
+
+QPlatformMediaRecorder *QAndroidMediaCaptureSession::mediaRecorder()
+{
+ return m_encoder;
+}
+
+void QAndroidMediaCaptureSession::setMediaRecorder(QPlatformMediaRecorder *recorder)
+{
+ QAndroidMediaEncoder *control = static_cast<QAndroidMediaEncoder *>(recorder);
+
+ if (m_encoder == control)
+ return;
+
+ if (m_encoder)
+ m_encoder->setCaptureSession(nullptr);
+
+ m_encoder = control;
+ if (m_encoder)
+ m_encoder->setCaptureSession(this);
+
+ emit encoderChanged();
+
+}
+
+void QAndroidMediaCaptureSession::setAudioInput(QPlatformAudioInput *input)
+{
+ m_captureSession->setAudioInput(input);
+}
+
+void QAndroidMediaCaptureSession::setAudioOutput(QPlatformAudioOutput *output)
+{
+ m_captureSession->setAudioOutput(output);
+}
+
+void QAndroidMediaCaptureSession::setVideoPreview(QVideoSink *sink)
+{
+ m_cameraSession->setVideoSink(sink);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qandroidmediacapturesession_p.cpp"
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidmediacapturesession_p.h b/src/plugins/multimedia/android/mediacapture/qandroidmediacapturesession_p.h
new file mode 100644
index 000000000..90c792c32
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidmediacapturesession_p.h
@@ -0,0 +1,66 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// Copyright (C) 2016 Ruslan Baratov
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDCAPTURESERVICE_H
+#define QANDROIDCAPTURESERVICE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediacapture_p.h>
+#include <private/qplatformmediaintegration_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidMediaEncoder;
+class QAndroidCaptureSession;
+class QAndroidCamera;
+class QAndroidCameraSession;
+class QAndroidImageCapture;
+
+class QAndroidMediaCaptureSession : public QPlatformMediaCaptureSession
+{
+ Q_OBJECT
+
+public:
+ explicit QAndroidMediaCaptureSession();
+ virtual ~QAndroidMediaCaptureSession();
+
+ QPlatformCamera *camera() override;
+ void setCamera(QPlatformCamera *camera) override;
+
+ QPlatformImageCapture *imageCapture() override;
+ void setImageCapture(QPlatformImageCapture *imageCapture) override;
+
+ QPlatformMediaRecorder *mediaRecorder() override;
+ void setMediaRecorder(QPlatformMediaRecorder *recorder) override;
+
+ void setAudioInput(QPlatformAudioInput *input) override;
+
+ void setVideoPreview(QVideoSink *sink) override;
+
+ void setAudioOutput(QPlatformAudioOutput *output) override;
+
+ QAndroidCaptureSession *captureSession() const { return m_captureSession; }
+ QAndroidCameraSession *cameraSession() const { return m_cameraSession; }
+
+private:
+ QAndroidMediaEncoder *m_encoder = nullptr;
+ QAndroidCaptureSession *m_captureSession = nullptr;
+ QAndroidCamera *m_cameraControl = nullptr;
+ QAndroidCameraSession *m_cameraSession = nullptr;
+ QAndroidImageCapture *m_imageCaptureControl = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDCAPTURESERVICE_H
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidmediaencoder.cpp b/src/plugins/multimedia/android/mediacapture/qandroidmediaencoder.cpp
new file mode 100644
index 000000000..d3449312d
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidmediaencoder.cpp
@@ -0,0 +1,72 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidmediaencoder_p.h"
+#include "qandroidmultimediautils_p.h"
+#include "qandroidcapturesession_p.h"
+#include "qandroidmediacapturesession_p.h"
+
+QT_BEGIN_NAMESPACE
+
+QAndroidMediaEncoder::QAndroidMediaEncoder(QMediaRecorder *parent)
+ : QPlatformMediaRecorder(parent)
+{
+}
+
+bool QAndroidMediaEncoder::isLocationWritable(const QUrl &location) const
+{
+ return location.isValid()
+ && (location.isLocalFile() || location.isRelative());
+}
+
+QMediaRecorder::RecorderState QAndroidMediaEncoder::state() const
+{
+ return m_session ? m_session->state() : QMediaRecorder::StoppedState;
+}
+
+qint64 QAndroidMediaEncoder::duration() const
+{
+ return m_session ? m_session->duration() : 0;
+
+}
+
+void QAndroidMediaEncoder::record(QMediaEncoderSettings &settings)
+{
+ if (m_session)
+ m_session->start(settings, outputLocation());
+}
+
+void QAndroidMediaEncoder::stop()
+{
+ if (m_session)
+ m_session->stop();
+}
+
+void QAndroidMediaEncoder::setOutputLocation(const QUrl &location)
+{
+ if (location.isLocalFile()) {
+ qt_androidRequestWriteStoragePermission();
+ }
+ QPlatformMediaRecorder::setOutputLocation(location);
+}
+
+void QAndroidMediaEncoder::setCaptureSession(QPlatformMediaCaptureSession *session)
+{
+ QAndroidMediaCaptureSession *captureSession = static_cast<QAndroidMediaCaptureSession *>(session);
+ if (m_service == captureSession)
+ return;
+
+ if (m_service)
+ stop();
+ if (m_session)
+ m_session->setMediaEncoder(nullptr);
+
+ m_service = captureSession;
+ if (!m_service)
+ return;
+ m_session = m_service->captureSession();
+ Q_ASSERT(m_session);
+ m_session->setMediaEncoder(this);
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidmediaencoder_p.h b/src/plugins/multimedia/android/mediacapture/qandroidmediaencoder_p.h
new file mode 100644
index 000000000..b46268449
--- /dev/null
+++ b/src/plugins/multimedia/android/mediacapture/qandroidmediaencoder_p.h
@@ -0,0 +1,50 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDMEDIAENCODER_H
+#define QANDROIDMEDIAENCODER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediarecorder_p.h>
+#include <private/qplatformmediacapture_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidCaptureSession;
+class QAndroidMediaCaptureSession;
+
+class QAndroidMediaEncoder : public QPlatformMediaRecorder
+{
+public:
+ explicit QAndroidMediaEncoder(QMediaRecorder *parent);
+
+ bool isLocationWritable(const QUrl &location) const override;
+ QMediaRecorder::RecorderState state() const override;
+ qint64 duration() const override;
+
+ void setCaptureSession(QPlatformMediaCaptureSession *session);
+
+ void setOutputLocation(const QUrl &location) override;
+ void record(QMediaEncoderSettings &settings) override;
+ void stop() override;
+
+private:
+ friend class QAndroidCaptureSession;
+
+ QAndroidCaptureSession *m_session = nullptr;
+ QAndroidMediaCaptureSession *m_service = nullptr;
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDMEDIAENCODER_H
diff --git a/src/plugins/multimedia/android/mediaplayer/qandroidmediaplayer.cpp b/src/plugins/multimedia/android/mediaplayer/qandroidmediaplayer.cpp
new file mode 100644
index 000000000..b257a8986
--- /dev/null
+++ b/src/plugins/multimedia/android/mediaplayer/qandroidmediaplayer.cpp
@@ -0,0 +1,999 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidmediaplayer_p.h"
+#include "androidmediaplayer_p.h"
+#include "qandroidvideooutput_p.h"
+#include "qandroidmetadata_p.h"
+#include "qandroidaudiooutput_p.h"
+#include "qaudiooutput.h"
+
+#include <private/qplatformvideosink_p.h>
+#include <qloggingcategory.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(lcMediaPlayer, "qt.multimedia.mediaplayer.android")
+
+class StateChangeNotifier
+{
+public:
+ StateChangeNotifier(QAndroidMediaPlayer *mp)
+ : mControl(mp)
+ , mPreviousState(mp->state())
+ , mPreviousMediaStatus(mp->mediaStatus())
+ {
+ ++mControl->mActiveStateChangeNotifiers;
+ }
+
+ ~StateChangeNotifier()
+ {
+ if (--mControl->mActiveStateChangeNotifiers)
+ return;
+
+ if (mPreviousMediaStatus != mControl->mediaStatus())
+ Q_EMIT mControl->mediaStatusChanged(mControl->mediaStatus());
+
+ if (mPreviousState != mControl->state())
+ Q_EMIT mControl->stateChanged(mControl->state());
+ }
+
+private:
+ QAndroidMediaPlayer *mControl;
+ QMediaPlayer::PlaybackState mPreviousState;
+ QMediaPlayer::MediaStatus mPreviousMediaStatus;
+};
+
+QAndroidMediaPlayer::QAndroidMediaPlayer(QMediaPlayer *parent)
+ : QPlatformMediaPlayer(parent),
+ mMediaPlayer(new AndroidMediaPlayer),
+ mState(AndroidMediaPlayer::Uninitialized)
+{
+ // Set seekable to True by default. It changes if MEDIA_INFO_NOT_SEEKABLE is received
+ seekableChanged(true);
+ connect(mMediaPlayer, &AndroidMediaPlayer::bufferingChanged, this,
+ &QAndroidMediaPlayer::onBufferingChanged);
+ connect(mMediaPlayer, &AndroidMediaPlayer::info, this, &QAndroidMediaPlayer::onInfo);
+ connect(mMediaPlayer, &AndroidMediaPlayer::error, this, &QAndroidMediaPlayer::onError);
+ connect(mMediaPlayer, &AndroidMediaPlayer::stateChanged, this,
+ &QAndroidMediaPlayer::onStateChanged);
+ connect(mMediaPlayer, &AndroidMediaPlayer::videoSizeChanged, this,
+ &QAndroidMediaPlayer::onVideoSizeChanged);
+ connect(mMediaPlayer, &AndroidMediaPlayer::progressChanged, this,
+ &QAndroidMediaPlayer::positionChanged);
+ connect(mMediaPlayer, &AndroidMediaPlayer::durationChanged, this,
+ &QAndroidMediaPlayer::durationChanged);
+ connect(mMediaPlayer, &AndroidMediaPlayer::tracksInfoChanged, this,
+ &QAndroidMediaPlayer::updateTrackInfo);
+}
+
+QAndroidMediaPlayer::~QAndroidMediaPlayer()
+{
+ if (m_videoSink)
+ disconnect(m_videoSink->platformVideoSink(), nullptr, this, nullptr);
+
+ mMediaPlayer->disconnect();
+ mMediaPlayer->release();
+ delete mMediaPlayer;
+}
+
+qint64 QAndroidMediaPlayer::duration() const
+{
+ if (mediaStatus() == QMediaPlayer::NoMedia)
+ return 0;
+
+ if ((mState & (AndroidMediaPlayer::Prepared
+ | AndroidMediaPlayer::Started
+ | AndroidMediaPlayer::Paused
+ | AndroidMediaPlayer::Stopped
+ | AndroidMediaPlayer::PlaybackCompleted)) == 0) {
+ return 0;
+ }
+
+ return mMediaPlayer->getDuration();
+}
+
+qint64 QAndroidMediaPlayer::position() const
+{
+ if (mediaStatus() == QMediaPlayer::EndOfMedia)
+ return duration();
+
+ if ((mState & (AndroidMediaPlayer::Prepared
+ | AndroidMediaPlayer::Started
+ | AndroidMediaPlayer::Paused
+ | AndroidMediaPlayer::PlaybackCompleted))) {
+ return mMediaPlayer->getCurrentPosition();
+ }
+
+ return (mPendingPosition == -1) ? 0 : mPendingPosition;
+}
+
+void QAndroidMediaPlayer::setPosition(qint64 position)
+{
+ if (!isSeekable())
+ return;
+
+ const int seekPosition = (position > INT_MAX) ? INT_MAX : position;
+
+ qint64 currentPosition = mMediaPlayer->getCurrentPosition();
+ if (seekPosition == currentPosition) {
+ // update position - will send a new frame of this position
+ // for consistency with other platforms
+ mMediaPlayer->seekTo(seekPosition);
+ return;
+ }
+ StateChangeNotifier notifier(this);
+
+ if (mediaStatus() == QMediaPlayer::EndOfMedia)
+ setMediaStatus(QMediaPlayer::LoadedMedia);
+
+ if ((mState & (AndroidMediaPlayer::Prepared
+ | AndroidMediaPlayer::Started
+ | AndroidMediaPlayer::Paused
+ | AndroidMediaPlayer::PlaybackCompleted)) == 0) {
+ mPendingPosition = seekPosition;
+ } else {
+ mMediaPlayer->seekTo(seekPosition);
+
+ if (mPendingPosition != -1) {
+ mPendingPosition = -1;
+ }
+ }
+
+ Q_EMIT positionChanged(seekPosition);
+}
+
+void QAndroidMediaPlayer::setVolume(float volume)
+{
+ if ((mState & (AndroidMediaPlayer::Idle
+ | AndroidMediaPlayer::Initialized
+ | AndroidMediaPlayer::Stopped
+ | AndroidMediaPlayer::Prepared
+ | AndroidMediaPlayer::Started
+ | AndroidMediaPlayer::Paused
+ | AndroidMediaPlayer::PlaybackCompleted)) == 0) {
+ mPendingVolume = volume;
+ return;
+ }
+
+ mMediaPlayer->setVolume(qRound(volume*100.));
+ mPendingVolume = -1;
+}
+
+void QAndroidMediaPlayer::setMuted(bool muted)
+{
+ if ((mState & (AndroidMediaPlayer::Idle
+ | AndroidMediaPlayer::Initialized
+ | AndroidMediaPlayer::Stopped
+ | AndroidMediaPlayer::Prepared
+ | AndroidMediaPlayer::Started
+ | AndroidMediaPlayer::Paused
+ | AndroidMediaPlayer::PlaybackCompleted)) == 0) {
+ mPendingMute = muted;
+ return;
+ }
+
+ mMediaPlayer->setMuted(muted);
+ mPendingMute = -1;
+}
+
+QMediaMetaData QAndroidMediaPlayer::metaData() const
+{
+ return QAndroidMetaData::extractMetadata(mMediaContent);
+}
+
+float QAndroidMediaPlayer::bufferProgress() const
+{
+ return mBufferFilled ? 1. : mBufferPercent;
+}
+
+bool QAndroidMediaPlayer::isAudioAvailable() const
+{
+ return mAudioAvailable;
+}
+
+bool QAndroidMediaPlayer::isVideoAvailable() const
+{
+ return mVideoAvailable;
+}
+
+QMediaTimeRange QAndroidMediaPlayer::availablePlaybackRanges() const
+{
+ return mAvailablePlaybackRange;
+}
+
+void QAndroidMediaPlayer::updateAvailablePlaybackRanges()
+{
+ if (mBuffering) {
+ const qint64 pos = position();
+ const qint64 end = (duration() / 100) * mBufferPercent;
+ mAvailablePlaybackRange.addInterval(pos, end);
+ } else if (isSeekable()) {
+ mAvailablePlaybackRange = QMediaTimeRange(0, duration());
+ } else {
+ mAvailablePlaybackRange = QMediaTimeRange();
+ }
+
+// #### Q_EMIT availablePlaybackRangesChanged(mAvailablePlaybackRange);
+}
+
+qreal QAndroidMediaPlayer::playbackRate() const
+{
+ return mCurrentPlaybackRate;
+}
+
+void QAndroidMediaPlayer::setPlaybackRate(qreal rate)
+{
+ if (mState != AndroidMediaPlayer::Started) {
+ // If video isn't playing, changing speed rate may start it automatically
+ // It need to be postponed
+ if (mCurrentPlaybackRate != rate) {
+ mCurrentPlaybackRate = rate;
+ mHasPendingPlaybackRate = true;
+ Q_EMIT playbackRateChanged(rate);
+ }
+ return;
+ }
+
+ if (mMediaPlayer->setPlaybackRate(rate)) {
+ mCurrentPlaybackRate = rate;
+ Q_EMIT playbackRateChanged(rate);
+ }
+}
+
+QUrl QAndroidMediaPlayer::media() const
+{
+ return mMediaContent;
+}
+
+const QIODevice *QAndroidMediaPlayer::mediaStream() const
+{
+ return mMediaStream;
+}
+
+void QAndroidMediaPlayer::setMedia(const QUrl &mediaContent,
+ QIODevice *stream)
+{
+ StateChangeNotifier notifier(this);
+
+ mReloadingMedia = (mMediaContent == mediaContent) && !mPendingSetMedia;
+
+ if (!mReloadingMedia) {
+ mMediaContent = mediaContent;
+ mMediaStream = stream;
+ }
+
+ if (mediaContent.isEmpty()) {
+ setMediaStatus(QMediaPlayer::NoMedia);
+ } else {
+ if (mVideoOutput && !mVideoOutput->isReady()) {
+ // if a video output is set but the video texture is not ready, delay loading the media
+ // since it can cause problems on some hardware
+ mPendingSetMedia = true;
+ return;
+ }
+
+ if (mVideoSize.isValid() && mVideoOutput)
+ mVideoOutput->setVideoSize(mVideoSize);
+
+ if (mVideoOutput &&
+ (mMediaPlayer->display() == 0 || mVideoOutput->shouldTextureBeUpdated()))
+ mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
+ mMediaPlayer->setDataSource(QNetworkRequest(mediaContent));
+ mMediaPlayer->prepareAsync();
+
+ if (!mReloadingMedia)
+ setMediaStatus(QMediaPlayer::LoadingMedia);
+ }
+
+ resetBufferingProgress();
+
+ mReloadingMedia = false;
+}
+
+void QAndroidMediaPlayer::setVideoSink(QVideoSink *sink)
+{
+ if (m_videoSink == sink)
+ return;
+
+ if (m_videoSink)
+ disconnect(m_videoSink->platformVideoSink(), nullptr, this, nullptr);
+
+ m_videoSink = sink;
+
+ if (!m_videoSink) {
+ return;
+ }
+
+ if (mVideoOutput) {
+ delete mVideoOutput;
+ mVideoOutput = nullptr;
+ mMediaPlayer->setDisplay(nullptr);
+ }
+
+ mVideoOutput = new QAndroidTextureVideoOutput(sink, this);
+ connect(mVideoOutput, &QAndroidTextureVideoOutput::readyChanged, this,
+ &QAndroidMediaPlayer::onVideoOutputReady);
+ connect(mMediaPlayer, &AndroidMediaPlayer::timedTextChanged, mVideoOutput,
+ &QAndroidTextureVideoOutput::setSubtitle);
+
+ if (mVideoOutput->isReady())
+ mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
+
+ connect(m_videoSink->platformVideoSink(), &QPlatformVideoSink::rhiChanged, this, [&]()
+ { mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture()); });
+}
+
+void QAndroidMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
+{
+ if (m_audioOutput == output)
+ return;
+ if (m_audioOutput)
+ m_audioOutput->q->disconnect(this);
+ m_audioOutput = static_cast<QAndroidAudioOutput *>(output);
+ if (m_audioOutput) {
+ connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this, &QAndroidMediaPlayer::updateAudioDevice);
+ connect(m_audioOutput->q, &QAudioOutput::volumeChanged, this, &QAndroidMediaPlayer::setVolume);
+ connect(m_audioOutput->q, &QAudioOutput::mutedChanged, this, &QAndroidMediaPlayer::setMuted);
+ updateAudioDevice();
+ }
+}
+
+void QAndroidMediaPlayer::updateAudioDevice()
+{
+ if (m_audioOutput)
+ mMediaPlayer->setAudioOutput(m_audioOutput->device.id());
+}
+
+void QAndroidMediaPlayer::play()
+{
+ StateChangeNotifier notifier(this);
+
+ resetCurrentLoop();
+
+ // We need to prepare the mediaplayer again.
+ if ((mState & AndroidMediaPlayer::Stopped) && !mMediaContent.isEmpty()) {
+ setMedia(mMediaContent, mMediaStream);
+ }
+
+ if (!mMediaContent.isEmpty())
+ stateChanged(QMediaPlayer::PlayingState);
+
+ if ((mState & (AndroidMediaPlayer::Prepared
+ | AndroidMediaPlayer::Started
+ | AndroidMediaPlayer::Paused
+ | AndroidMediaPlayer::PlaybackCompleted)) == 0) {
+ mPendingState = QMediaPlayer::PlayingState;
+ return;
+ }
+
+ if (mVideoOutput)
+ mVideoOutput->start();
+
+ updateAudioDevice();
+
+ if (mHasPendingPlaybackRate) {
+ mHasPendingPlaybackRate = false;
+ if (mMediaPlayer->setPlaybackRate(mCurrentPlaybackRate))
+ return;
+ mCurrentPlaybackRate = mMediaPlayer->playbackRate();
+ Q_EMIT playbackRateChanged(mCurrentPlaybackRate);
+ }
+
+ mMediaPlayer->play();
+}
+
+void QAndroidMediaPlayer::pause()
+{
+ // cannot pause without media
+ if (mediaStatus() == QMediaPlayer::NoMedia)
+ return;
+
+ StateChangeNotifier notifier(this);
+
+ stateChanged(QMediaPlayer::PausedState);
+
+ if ((mState & (AndroidMediaPlayer::Started
+ | AndroidMediaPlayer::Paused
+ | AndroidMediaPlayer::PlaybackCompleted
+ | AndroidMediaPlayer::Prepared
+ | AndroidMediaPlayer::Stopped)) == 0) {
+ mPendingState = QMediaPlayer::PausedState;
+ return;
+ }
+
+ const qint64 currentPosition = mMediaPlayer->getCurrentPosition();
+ setPosition(currentPosition);
+
+ mMediaPlayer->pause();
+}
+
+void QAndroidMediaPlayer::stop()
+{
+ StateChangeNotifier notifier(this);
+
+ stateChanged(QMediaPlayer::StoppedState);
+
+ if ((mState & (AndroidMediaPlayer::Prepared
+ | AndroidMediaPlayer::Started
+ | AndroidMediaPlayer::Stopped
+ | AndroidMediaPlayer::Paused
+ | AndroidMediaPlayer::PlaybackCompleted)) == 0) {
+ if ((mState & (AndroidMediaPlayer::Idle | AndroidMediaPlayer::Uninitialized | AndroidMediaPlayer::Error)) == 0)
+ mPendingState = QMediaPlayer::StoppedState;
+ return;
+ }
+
+ if (mCurrentPlaybackRate != 1.)
+ // Playback rate need to by reapplied
+ mHasPendingPlaybackRate = true;
+
+ if (mVideoOutput)
+ mVideoOutput->stop();
+
+ mMediaPlayer->stop();
+}
+
+void QAndroidMediaPlayer::onInfo(qint32 what, qint32 extra)
+{
+ StateChangeNotifier notifier(this);
+
+ Q_UNUSED(extra);
+ switch (what) {
+ case AndroidMediaPlayer::MEDIA_INFO_UNKNOWN:
+ break;
+ case AndroidMediaPlayer::MEDIA_INFO_VIDEO_TRACK_LAGGING:
+ // IGNORE
+ break;
+ case AndroidMediaPlayer::MEDIA_INFO_VIDEO_RENDERING_START:
+ break;
+ case AndroidMediaPlayer::MEDIA_INFO_BUFFERING_START:
+ mPendingState = state();
+ stateChanged(QMediaPlayer::PausedState);
+ setMediaStatus(QMediaPlayer::StalledMedia);
+ break;
+ case AndroidMediaPlayer::MEDIA_INFO_BUFFERING_END:
+ if (state() != QMediaPlayer::StoppedState)
+ flushPendingStates();
+ break;
+ case AndroidMediaPlayer::MEDIA_INFO_BAD_INTERLEAVING:
+ break;
+ case AndroidMediaPlayer::MEDIA_INFO_NOT_SEEKABLE:
+ seekableChanged(false);
+ break;
+ case AndroidMediaPlayer::MEDIA_INFO_METADATA_UPDATE:
+ Q_EMIT metaDataChanged();
+ break;
+ }
+}
+
+void QAndroidMediaPlayer::onError(qint32 what, qint32 extra)
+{
+ StateChangeNotifier notifier(this);
+
+ QString errorString;
+ QMediaPlayer::Error error = QMediaPlayer::ResourceError;
+
+ switch (what) {
+ case AndroidMediaPlayer::MEDIA_ERROR_UNKNOWN:
+ errorString = QLatin1String("Error:");
+ break;
+ case AndroidMediaPlayer::MEDIA_ERROR_SERVER_DIED:
+ errorString = QLatin1String("Error: Server died");
+ error = QMediaPlayer::ResourceError;
+ break;
+ case AndroidMediaPlayer::MEDIA_ERROR_INVALID_STATE:
+ errorString = QLatin1String("Error: Invalid state");
+ error = QMediaPlayer::ResourceError;
+ break;
+ }
+
+ switch (extra) {
+ case AndroidMediaPlayer::MEDIA_ERROR_IO: // Network OR file error
+ errorString += QLatin1String(" (I/O operation failed)");
+ error = QMediaPlayer::NetworkError;
+ setMediaStatus(QMediaPlayer::InvalidMedia);
+ break;
+ case AndroidMediaPlayer::MEDIA_ERROR_MALFORMED:
+ errorString += QLatin1String(" (Malformed bitstream)");
+ error = QMediaPlayer::FormatError;
+ setMediaStatus(QMediaPlayer::InvalidMedia);
+ break;
+ case AndroidMediaPlayer::MEDIA_ERROR_UNSUPPORTED:
+ errorString += QLatin1String(" (Unsupported media)");
+ error = QMediaPlayer::FormatError;
+ setMediaStatus(QMediaPlayer::InvalidMedia);
+ break;
+ case AndroidMediaPlayer::MEDIA_ERROR_TIMED_OUT:
+ errorString += QLatin1String(" (Timed out)");
+ break;
+ case AndroidMediaPlayer::MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK:
+ errorString += QLatin1String(" (Unable to start progressive playback')");
+ error = QMediaPlayer::FormatError;
+ setMediaStatus(QMediaPlayer::InvalidMedia);
+ break;
+ case AndroidMediaPlayer::MEDIA_ERROR_BAD_THINGS_ARE_GOING_TO_HAPPEN:
+ errorString += mMediaContent.scheme() == QLatin1String("rtsp")
+ ? QLatin1String(" (Unknown error/Insufficient resources or RTSP may not be supported)")
+ : QLatin1String(" (Unknown error/Insufficient resources)");
+ error = QMediaPlayer::ResourceError;
+ break;
+ }
+
+ Q_EMIT QPlatformMediaPlayer::error(error, errorString);
+}
+
+void QAndroidMediaPlayer::onBufferingChanged(qint32 percent)
+{
+ StateChangeNotifier notifier(this);
+
+ mBuffering = percent != 100;
+ mBufferPercent = percent;
+
+ updateAvailablePlaybackRanges();
+
+ if (state() != QMediaPlayer::StoppedState)
+ setMediaStatus(mBuffering ? QMediaPlayer::BufferingMedia : QMediaPlayer::BufferedMedia);
+
+ updateBufferStatus();
+}
+
+void QAndroidMediaPlayer::onVideoSizeChanged(qint32 width, qint32 height)
+{
+ QSize newSize(width, height);
+
+ if (width == 0 || height == 0 || newSize == mVideoSize)
+ return;
+
+ setVideoAvailable(true);
+ mVideoSize = newSize;
+
+ if (mVideoOutput)
+ mVideoOutput->setVideoSize(mVideoSize);
+}
+
+void QAndroidMediaPlayer::onStateChanged(qint32 state)
+{
+ // If reloading, don't report state changes unless the new state is Prepared or Error.
+ if ((mState & AndroidMediaPlayer::Stopped)
+ && (state & (AndroidMediaPlayer::Prepared | AndroidMediaPlayer::Error | AndroidMediaPlayer::Uninitialized)) == 0) {
+ return;
+ }
+
+ StateChangeNotifier notifier(this);
+
+ mState = state;
+ switch (mState) {
+ case AndroidMediaPlayer::Idle:
+ break;
+ case AndroidMediaPlayer::Initialized:
+ break;
+ case AndroidMediaPlayer::Preparing:
+ if (!mReloadingMedia)
+ setMediaStatus(QMediaPlayer::LoadingMedia);
+ break;
+ case AndroidMediaPlayer::Prepared:
+ setMediaStatus(QMediaPlayer::LoadedMedia);
+ if (mBuffering) {
+ setMediaStatus(mBufferPercent == 100 ? QMediaPlayer::BufferedMedia
+ : QMediaPlayer::BufferingMedia);
+ } else {
+ onBufferingChanged(100);
+ }
+ setPosition(0);
+ Q_EMIT metaDataChanged();
+ setAudioAvailable(true);
+ flushPendingStates();
+ break;
+ case AndroidMediaPlayer::Started:
+ stateChanged(QMediaPlayer::PlayingState);
+ if (mBuffering) {
+ setMediaStatus(mBufferPercent == 100 ? QMediaPlayer::BufferedMedia
+ : QMediaPlayer::BufferingMedia);
+ } else {
+ setMediaStatus(QMediaPlayer::BufferedMedia);
+ }
+ Q_EMIT positionChanged(position());
+ break;
+ case AndroidMediaPlayer::Paused:
+ stateChanged(QMediaPlayer::PausedState);
+ if (mediaStatus() == QMediaPlayer::EndOfMedia) {
+ setPosition(0);
+ setMediaStatus(QMediaPlayer::BufferedMedia);
+ } else {
+ Q_EMIT positionChanged(position());
+ }
+ break;
+ case AndroidMediaPlayer::Error:
+ stateChanged(QMediaPlayer::StoppedState);
+ setMediaStatus(QMediaPlayer::InvalidMedia);
+ mMediaPlayer->release();
+ Q_EMIT positionChanged(0);
+ break;
+ case AndroidMediaPlayer::Stopped:
+ stateChanged(QMediaPlayer::StoppedState);
+ setMediaStatus(QMediaPlayer::LoadedMedia);
+ Q_EMIT positionChanged(0);
+ break;
+ case AndroidMediaPlayer::PlaybackCompleted:
+ if (doLoop()) {
+ setPosition(0);
+ mMediaPlayer->play();
+ break;
+ }
+ stateChanged(QMediaPlayer::StoppedState);
+ setMediaStatus(QMediaPlayer::EndOfMedia);
+ break;
+ case AndroidMediaPlayer::Uninitialized:
+ // reset some properties (unless we reload the same media)
+ if (!mReloadingMedia) {
+ resetBufferingProgress();
+ mPendingPosition = -1;
+ mPendingSetMedia = false;
+ mPendingState = -1;
+
+ Q_EMIT durationChanged(0);
+ Q_EMIT positionChanged(0);
+
+ setAudioAvailable(false);
+ setVideoAvailable(false);
+ seekableChanged(true);
+ }
+ break;
+ default:
+ break;
+ }
+
+ if ((mState & (AndroidMediaPlayer::Stopped | AndroidMediaPlayer::Uninitialized)) != 0) {
+ mMediaPlayer->setDisplay(0);
+ if (mVideoOutput) {
+ mVideoOutput->stop();
+ }
+ }
+}
+
+int QAndroidMediaPlayer::trackCount(TrackType trackType)
+{
+ if (!mTracksMetadata.contains(trackType))
+ return -1;
+
+ auto tracks = mTracksMetadata.value(trackType);
+ return tracks.count();
+}
+
+QMediaMetaData QAndroidMediaPlayer::trackMetaData(TrackType trackType, int streamNumber)
+{
+ if (!mTracksMetadata.contains(trackType))
+ return QMediaMetaData();
+
+ auto tracks = mTracksMetadata.value(trackType);
+ if (tracks.count() < streamNumber)
+ return QMediaMetaData();
+
+ QAndroidMetaData trackInfo = tracks.at(streamNumber);
+ return static_cast<QMediaMetaData>(trackInfo);
+}
+
+QPlatformMediaPlayer::TrackType convertTrackType(AndroidMediaPlayer::TrackType type)
+{
+ switch (type) {
+ case AndroidMediaPlayer::TrackType::Video:
+ return QPlatformMediaPlayer::TrackType::VideoStream;
+ case AndroidMediaPlayer::TrackType::Audio:
+ return QPlatformMediaPlayer::TrackType::AudioStream;
+ case AndroidMediaPlayer::TrackType::TimedText:
+ return QPlatformMediaPlayer::TrackType::SubtitleStream;
+ case AndroidMediaPlayer::TrackType::Subtitle:
+ return QPlatformMediaPlayer::TrackType::SubtitleStream;
+ case AndroidMediaPlayer::TrackType::Unknown:
+ case AndroidMediaPlayer::TrackType::Metadata:
+ return QPlatformMediaPlayer::TrackType::NTrackTypes;
+ }
+
+ return QPlatformMediaPlayer::TrackType::NTrackTypes;
+}
+
+int QAndroidMediaPlayer::convertTrackNumber(int androidTrackNumber)
+{
+ int trackNumber = androidTrackNumber;
+
+ int videoTrackCount = trackCount(QPlatformMediaPlayer::TrackType::VideoStream);
+ if (trackNumber <= videoTrackCount)
+ return trackNumber;
+
+ trackNumber = trackNumber - videoTrackCount;
+
+ int audioTrackCount = trackCount(QPlatformMediaPlayer::TrackType::AudioStream);
+ if (trackNumber <= audioTrackCount)
+ return trackNumber;
+
+ trackNumber = trackNumber - audioTrackCount;
+
+ auto subtitleTracks = mTracksMetadata.value(QPlatformMediaPlayer::TrackType::SubtitleStream);
+ int timedTextCount = 0;
+ int subtitleTextCount = 0;
+ for (const auto &track : subtitleTracks) {
+ if (track.androidTrackType() == 3) // 3 == TimedText
+ timedTextCount++;
+
+ if (track.androidTrackType() == 4) // 4 == Subtitle
+ subtitleTextCount++;
+ }
+
+ if (trackNumber <= timedTextCount)
+ return trackNumber;
+
+ trackNumber = trackNumber - timedTextCount;
+
+ if (trackNumber <= subtitleTextCount)
+ return trackNumber;
+
+ return -1;
+}
+
+int QAndroidMediaPlayer::activeTrack(TrackType trackType)
+{
+ int androidTrackNumber = -1;
+
+ switch (trackType) {
+ case QPlatformMediaPlayer::TrackType::VideoStream: {
+ if (!mIsVideoTrackEnabled)
+ return -1;
+ androidTrackNumber = mMediaPlayer->activeTrack(AndroidMediaPlayer::TrackType::Video);
+ }
+ case QPlatformMediaPlayer::TrackType::AudioStream: {
+ if (!mIsAudioTrackEnabled)
+ return -1;
+
+ androidTrackNumber = mMediaPlayer->activeTrack(AndroidMediaPlayer::TrackType::Audio);
+ }
+ case QPlatformMediaPlayer::TrackType::SubtitleStream: {
+ int timedTextSelectedTrack =
+ mMediaPlayer->activeTrack(AndroidMediaPlayer::TrackType::TimedText);
+
+ if (timedTextSelectedTrack > -1) {
+ androidTrackNumber = timedTextSelectedTrack;
+ break;
+ }
+
+ int subtitleSelectedTrack =
+ mMediaPlayer->activeTrack(AndroidMediaPlayer::TrackType::Subtitle);
+ if (subtitleSelectedTrack > -1) {
+ androidTrackNumber = subtitleSelectedTrack;
+ break;
+ }
+
+ return -1;
+ }
+ case QPlatformMediaPlayer::TrackType::NTrackTypes:
+ return -1;
+ }
+
+ return convertTrackNumber(androidTrackNumber);
+}
+
+void QAndroidMediaPlayer::disableTrack(TrackType trackType)
+{
+ const auto track = activeTrack(trackType);
+
+ switch (trackType) {
+ case VideoStream: {
+ if (track > -1) {
+ mMediaPlayer->setDisplay(nullptr);
+ mIsVideoTrackEnabled = false;
+ }
+ break;
+ }
+ case AudioStream: {
+ if (track > -1) {
+ mMediaPlayer->setMuted(true);
+ mMediaPlayer->blockAudio();
+ mIsAudioTrackEnabled = false;
+ }
+ break;
+ }
+ case SubtitleStream: {
+ // subtitles and timedtext tracks can be selected at the same time so deselect both
+ int subtitleSelectedTrack =
+ mMediaPlayer->activeTrack(AndroidMediaPlayer::TrackType::Subtitle);
+ if (subtitleSelectedTrack > -1)
+ mMediaPlayer->deselectTrack(subtitleSelectedTrack);
+
+ int timedTextSelectedTrack =
+ mMediaPlayer->activeTrack(AndroidMediaPlayer::TrackType::TimedText);
+ if (timedTextSelectedTrack > -1)
+ mMediaPlayer->deselectTrack(timedTextSelectedTrack);
+
+ break;
+ }
+ case NTrackTypes:
+ break;
+ }
+}
+
+void QAndroidMediaPlayer::setActiveTrack(TrackType trackType, int streamNumber)
+{
+
+ if (!mTracksMetadata.contains(trackType)) {
+ qCWarning(lcMediaPlayer)
+ << "Trying to set a active track which type has no available tracks.";
+ return;
+ }
+
+ const auto &tracks = mTracksMetadata.value(trackType);
+ if (streamNumber > tracks.count()) {
+ qCWarning(lcMediaPlayer) << "Trying to set a active track that does not exist.";
+ return;
+ }
+
+ // in case of < 0 deselect tracktype
+ if (streamNumber < 0) {
+ disableTrack(trackType);
+ return;
+ }
+
+ const auto currentTrack = activeTrack(trackType);
+ if (streamNumber == currentTrack) {
+ return;
+ }
+
+ if (trackType == TrackType::VideoStream && !mIsVideoTrackEnabled) {
+ // enable video stream
+ mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
+ mIsVideoTrackEnabled = true;
+ }
+
+ if (trackType == TrackType::AudioStream && !mIsAudioTrackEnabled) {
+ // enable audio stream
+ mMediaPlayer->unblockAudio();
+ mMediaPlayer->setMuted(false);
+ mIsAudioTrackEnabled = true;
+ }
+
+ if (trackType == TrackType::SubtitleStream) {
+ // subtitles and timedtext tracks can be selected at the same time so deselect both before
+ // selecting a new one
+ disableTrack(TrackType::SubtitleStream);
+ }
+
+ const auto &trackInfo = tracks.at(streamNumber);
+ const auto &trackNumber = trackInfo.androidTrackNumber();
+ mMediaPlayer->selectTrack(trackNumber);
+
+ emit activeTracksChanged();
+}
+
+void QAndroidMediaPlayer::positionChanged(qint64 position)
+{
+ QPlatformMediaPlayer::positionChanged(position);
+}
+
+void QAndroidMediaPlayer::durationChanged(qint64 duration)
+{
+ QPlatformMediaPlayer::durationChanged(duration);
+}
+
+void QAndroidMediaPlayer::onVideoOutputReady(bool ready)
+{
+ if ((mMediaPlayer->display() == 0) && mVideoOutput && ready)
+ mMediaPlayer->setDisplay(mVideoOutput->surfaceTexture());
+
+ flushPendingStates();
+}
+
+void QAndroidMediaPlayer::setMediaStatus(QMediaPlayer::MediaStatus status)
+{
+ mediaStatusChanged(status);
+
+ if (status == QMediaPlayer::NoMedia || status == QMediaPlayer::InvalidMedia) {
+ Q_EMIT durationChanged(0);
+ Q_EMIT metaDataChanged();
+ setAudioAvailable(false);
+ setVideoAvailable(false);
+ }
+
+ if (status == QMediaPlayer::EndOfMedia)
+ Q_EMIT positionChanged(position());
+
+ updateBufferStatus();
+}
+
+void QAndroidMediaPlayer::setAudioAvailable(bool available)
+{
+ if (mAudioAvailable == available)
+ return;
+
+ mAudioAvailable = available;
+ Q_EMIT audioAvailableChanged(mAudioAvailable);
+}
+
+void QAndroidMediaPlayer::setVideoAvailable(bool available)
+{
+ if (mVideoAvailable == available)
+ return;
+
+ if (!available)
+ mVideoSize = QSize();
+
+ mVideoAvailable = available;
+ Q_EMIT videoAvailableChanged(mVideoAvailable);
+}
+
+void QAndroidMediaPlayer::resetBufferingProgress()
+{
+ mBuffering = false;
+ mBufferPercent = 0;
+ mAvailablePlaybackRange = QMediaTimeRange();
+}
+
+void QAndroidMediaPlayer::flushPendingStates()
+{
+ if (mPendingSetMedia) {
+ setMedia(mMediaContent, 0);
+ mPendingSetMedia = false;
+ return;
+ }
+
+ const int newState = mPendingState;
+ mPendingState = -1;
+
+ if (mPendingPosition != -1)
+ setPosition(mPendingPosition);
+ if (mPendingVolume >= 0)
+ setVolume(mPendingVolume);
+ if (mPendingMute != -1)
+ setMuted((mPendingMute == 1));
+
+ switch (newState) {
+ case QMediaPlayer::PlayingState:
+ play();
+ break;
+ case QMediaPlayer::PausedState:
+ pause();
+ break;
+ case QMediaPlayer::StoppedState:
+ stop();
+ break;
+ default:
+ break;
+ }
+}
+
+void QAndroidMediaPlayer::updateBufferStatus()
+{
+ const auto &status = mediaStatus();
+ bool bufferFilled = (status == QMediaPlayer::BufferedMedia || status == QMediaPlayer::BufferingMedia);
+
+ if (mBufferFilled != bufferFilled)
+ mBufferFilled = bufferFilled;
+
+ emit bufferProgressChanged(bufferProgress());
+}
+
+void QAndroidMediaPlayer::updateTrackInfo()
+{
+ const auto &androidTracksInfo = mMediaPlayer->tracksInfo();
+
+ // prepare mTracksMetadata
+ mTracksMetadata[TrackType::VideoStream] = QList<QAndroidMetaData>();
+ mTracksMetadata[TrackType::AudioStream] = QList<QAndroidMetaData>();
+ mTracksMetadata[TrackType::SubtitleStream] = QList<QAndroidMetaData>();
+ mTracksMetadata[TrackType::NTrackTypes] = QList<QAndroidMetaData>();
+
+ for (const auto &androidTrackInfo : androidTracksInfo) {
+
+ const auto &mediaPlayerType = convertTrackType(androidTrackInfo.trackType);
+ auto &tracks = mTracksMetadata[mediaPlayerType];
+
+ const QAndroidMetaData metadata(mediaPlayerType, androidTrackInfo.trackType,
+ androidTrackInfo.trackNumber, androidTrackInfo.mimeType,
+ androidTrackInfo.language);
+ tracks.append(metadata);
+ }
+
+ emit tracksChanged();
+}
+
+QT_END_NAMESPACE
+
+#include "moc_qandroidmediaplayer_p.cpp"
diff --git a/src/plugins/multimedia/android/mediaplayer/qandroidmediaplayer_p.h b/src/plugins/multimedia/android/mediaplayer/qandroidmediaplayer_p.h
new file mode 100644
index 000000000..dd2a3469d
--- /dev/null
+++ b/src/plugins/multimedia/android/mediaplayer/qandroidmediaplayer_p.h
@@ -0,0 +1,127 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDMEDIAPLAYERCONTROL_H
+#define QANDROIDMEDIAPLAYERCONTROL_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qglobal.h>
+#include <private/qplatformmediaplayer_p.h>
+#include <qandroidmetadata_p.h>
+#include <qmap.h>
+#include <qsize.h>
+#include <qurl.h>
+
+QT_BEGIN_NAMESPACE
+
+class AndroidMediaPlayer;
+class QAndroidTextureVideoOutput;
+class QAndroidMediaPlayerVideoRendererControl;
+class QAndroidAudioOutput;
+
+class QAndroidMediaPlayer : public QObject, public QPlatformMediaPlayer
+{
+ Q_OBJECT
+
+public:
+ explicit QAndroidMediaPlayer(QMediaPlayer *parent = 0);
+ ~QAndroidMediaPlayer() override;
+
+ qint64 duration() const override;
+ qint64 position() const override;
+ float bufferProgress() const override;
+ bool isAudioAvailable() const override;
+ bool isVideoAvailable() const override;
+ QMediaTimeRange availablePlaybackRanges() const override;
+ qreal playbackRate() const override;
+ void setPlaybackRate(qreal rate) override;
+ QUrl media() const override;
+ const QIODevice *mediaStream() const override;
+ void setMedia(const QUrl &mediaContent, QIODevice *stream) override;
+
+ QMediaMetaData metaData() const override;
+
+ void setVideoSink(QVideoSink *surface) override;
+
+ void setAudioOutput(QPlatformAudioOutput *output) override;
+ void updateAudioDevice();
+
+ void setPosition(qint64 position) override;
+ void play() override;
+ void pause() override;
+ void stop() override;
+
+ int trackCount(TrackType trackType) override;
+ QMediaMetaData trackMetaData(TrackType trackType, int streamNumber) override;
+ int activeTrack(TrackType trackType) override;
+ void setActiveTrack(TrackType trackType, int streamNumber) override;
+
+private Q_SLOTS:
+ void setVolume(float volume);
+ void setMuted(bool muted);
+ void onVideoOutputReady(bool ready);
+ void onError(qint32 what, qint32 extra);
+ void onInfo(qint32 what, qint32 extra);
+ void onBufferingChanged(qint32 percent);
+ void onVideoSizeChanged(qint32 width, qint32 height);
+ void onStateChanged(qint32 state);
+ void positionChanged(qint64 position);
+ void durationChanged(qint64 duration);
+
+private:
+ AndroidMediaPlayer *mMediaPlayer = nullptr;
+ QAndroidAudioOutput *m_audioOutput = nullptr;
+ QUrl mMediaContent;
+ QIODevice *mMediaStream = nullptr;
+ QAndroidTextureVideoOutput *mVideoOutput = nullptr;
+ QVideoSink *m_videoSink = nullptr;
+ int mBufferPercent = -1;
+ bool mBufferFilled = false;
+ bool mAudioAvailable = false;
+ bool mVideoAvailable = false;
+ QSize mVideoSize;
+ bool mBuffering = false;
+ QMediaTimeRange mAvailablePlaybackRange;
+ int mState;
+ int mPendingState = -1;
+ qint64 mPendingPosition = -1;
+ bool mPendingSetMedia = false;
+ float mPendingVolume = -1;
+ int mPendingMute = -1;
+ bool mReloadingMedia = false;
+ int mActiveStateChangeNotifiers = 0;
+ qreal mCurrentPlaybackRate = 1.;
+ bool mHasPendingPlaybackRate = false; // we need this because the rate can theoretically be negative
+ QMap<TrackType, QList<QAndroidMetaData>> mTracksMetadata;
+
+ bool mIsVideoTrackEnabled = true;
+ bool mIsAudioTrackEnabled = true;
+
+ void setMediaStatus(QMediaPlayer::MediaStatus status);
+ void setAudioAvailable(bool available);
+ void setVideoAvailable(bool available);
+ void updateAvailablePlaybackRanges();
+ void resetBufferingProgress();
+ void flushPendingStates();
+ void updateBufferStatus();
+ void updateTrackInfo();
+ void setSubtitle(QString subtitle);
+ void disableTrack(TrackType trackType);
+
+ int convertTrackNumber(int androidTrackNumber);
+ friend class StateChangeNotifier;
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDMEDIAPLAYERCONTROL_H
diff --git a/src/plugins/multimedia/android/mediaplayer/qandroidmetadata.cpp b/src/plugins/multimedia/android/mediaplayer/qandroidmetadata.cpp
new file mode 100644
index 000000000..b01845fa7
--- /dev/null
+++ b/src/plugins/multimedia/android/mediaplayer/qandroidmetadata.cpp
@@ -0,0 +1,163 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidmetadata_p.h"
+
+#include "androidmediametadataretriever_p.h"
+#include <QtMultimedia/qmediametadata.h>
+#include <qsize.h>
+#include <QDate>
+#include <QtCore/qlist.h>
+#include <QtConcurrent/qtconcurrentrun.h>
+#include <QLoggingCategory>
+
+QT_BEGIN_NAMESPACE
+
+// Genre name ordered by ID
+// see: http://id3.org/id3v2.3.0#Appendix_A_-_Genre_List_from_ID3v1
+static const char* qt_ID3GenreNames[] =
+{
+ "Blues", "Classic Rock", "Country", "Dance", "Disco", "Funk", "Grunge", "Hip-Hop", "Jazz",
+ "Metal", "New Age", "Oldies", "Other", "Pop", "R&B", "Rap", "Reggae", "Rock", "Techno",
+ "Industrial", "Alternative", "Ska", "Death Metal", "Pranks", "Soundtrack", "Euro-Techno",
+ "Ambient", "Trip-Hop", "Vocal", "Jazz+Funk", "Fusion", "Trance", "Classical", "Instrumental",
+ "Acid", "House", "Game", "Sound Clip", "Gospel", "Noise", "AlternRock", "Bass", "Soul", "Punk",
+ "Space", "Meditative", "Instrumental Pop", "Instrumental Rock", "Ethnic", "Gothic", "Darkwave",
+ "Techno-Industrial", "Electronic", "Pop-Folk", "Eurodance", "Dream", "Southern Rock", "Comedy",
+ "Cult", "Gangsta", "Top 40", "Christian Rap", "Pop/Funk", "Jungle", "Native American",
+ "Cabaret", "New Wave", "Psychadelic", "Rave", "Showtunes", "Trailer", "Lo-Fi", "Tribal",
+ "Acid Punk", "Acid Jazz", "Polka", "Retro", "Musical", "Rock & Roll", "Hard Rock", "Folk",
+ "Folk-Rock", "National Folk", "Swing", "Fast Fusion", "Bebob", "Latin", "Revival", "Celtic",
+ "Bluegrass", "Avantgarde", "Gothic Rock", "Progressive Rock", "Psychedelic Rock",
+ "Symphonic Rock", "Slow Rock", "Big Band", "Chorus", "Easy Listening", "Acoustic", "Humour",
+ "Speech", "Chanson", "Opera", "Chamber Music", "Sonata", "Symphony", "Booty Bass", "Primus",
+ "Porn Groove", "Satire", "Slow Jam", "Club", "Tango", "Samba", "Folklore", "Ballad",
+ "Power Ballad", "Rhythmic Soul", "Freestyle", "Duet", "Punk Rock", "Drum Solo", "A capella",
+ "Euro-House", "Dance Hall"
+};
+
+QMediaMetaData QAndroidMetaData::extractMetadata(const QUrl &url)
+{
+ QMediaMetaData metadata;
+
+ if (!url.isEmpty()) {
+ AndroidMediaMetadataRetriever retriever;
+ if (!retriever.setDataSource(url))
+ return metadata;
+
+ QString mimeType = retriever.extractMetadata(AndroidMediaMetadataRetriever::MimeType);
+ if (!mimeType.isNull())
+ metadata.insert(QMediaMetaData::MediaType, mimeType);
+
+ bool isVideo = !retriever.extractMetadata(AndroidMediaMetadataRetriever::HasVideo).isNull()
+ || mimeType.startsWith(QStringLiteral("video"));
+
+ QString string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Album);
+ if (!string.isNull())
+ metadata.insert(QMediaMetaData::AlbumTitle, string);
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::AlbumArtist);
+ if (!string.isNull())
+ metadata.insert(QMediaMetaData::AlbumArtist, string);
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Artist);
+ if (!string.isNull()) {
+ metadata.insert(isVideo ? QMediaMetaData::LeadPerformer
+ : QMediaMetaData::ContributingArtist,
+ string.split(QLatin1Char('/'), Qt::SkipEmptyParts));
+ }
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Author);
+ if (!string.isNull())
+ metadata.insert(QMediaMetaData::Author, string.split(QLatin1Char('/'), Qt::SkipEmptyParts));
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Bitrate);
+ if (!string.isNull()) {
+ metadata.insert(isVideo ? QMediaMetaData::VideoBitRate
+ : QMediaMetaData::AudioBitRate,
+ string.toInt());
+ }
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::CDTrackNumber);
+ if (!string.isNull())
+ metadata.insert(QMediaMetaData::TrackNumber, string.toInt());
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Composer);
+ if (!string.isNull())
+ metadata.insert(QMediaMetaData::Composer, string.split(QLatin1Char('/'), Qt::SkipEmptyParts));
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Date);
+ if (!string.isNull())
+ metadata.insert(QMediaMetaData::Date, QDateTime::fromString(string, QStringLiteral("yyyyMMddTHHmmss.zzzZ")).date());
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Duration);
+ if (!string.isNull())
+ metadata.insert(QMediaMetaData::Duration, string.toLongLong());
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Genre);
+ if (!string.isNull()) {
+ // The genre can be returned as an ID3v2 id, get the name for it in that case
+ if (string.startsWith(QLatin1Char('(')) && string.endsWith(QLatin1Char(')'))) {
+ bool ok = false;
+ const int genreId = QStringView{string}.mid(1, string.length() - 2).toInt(&ok);
+ if (ok && genreId >= 0 && genreId <= 125)
+ string = QLatin1String(qt_ID3GenreNames[genreId]);
+ }
+ metadata.insert(QMediaMetaData::Genre, string);
+ }
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Title);
+ if (!string.isNull())
+ metadata.insert(QMediaMetaData::Title, string);
+
+ string = retriever.extractMetadata(AndroidMediaMetadataRetriever::VideoHeight);
+ if (!string.isNull()) {
+ const int height = string.toInt();
+ const int width = retriever.extractMetadata(AndroidMediaMetadataRetriever::VideoWidth).toInt();
+ metadata.insert(QMediaMetaData::Resolution, QSize(width, height));
+ }
+
+// string = retriever.extractMetadata(AndroidMediaMetadataRetriever::Writer);
+// if (!string.isNull())
+// metadata.insert(QMediaMetaData::Writer, string.split('/', Qt::SkipEmptyParts));
+
+ }
+
+ return metadata;
+}
+
+QLocale::Language getLocaleLanguage(const QString &language)
+{
+ // undefined language or uncoded language
+ if (language == QLatin1String("und") || language == QStringLiteral("mis"))
+ return QLocale::AnyLanguage;
+
+ return QLocale::codeToLanguage(language, QLocale::ISO639Part2);
+}
+
+QAndroidMetaData::QAndroidMetaData(int trackType, int androidTrackType, int androidTrackNumber,
+ const QString &mimeType, const QString &language)
+ : mTrackType(trackType),
+ mAndroidTrackType(androidTrackType),
+ mAndroidTrackNumber(androidTrackNumber)
+{
+ insert(QMediaMetaData::MediaType, mimeType);
+ insert(QMediaMetaData::Language, getLocaleLanguage(language));
+}
+
+int QAndroidMetaData::trackType() const
+{
+ return mTrackType;
+}
+
+int QAndroidMetaData::androidTrackType() const
+{
+ return mAndroidTrackType;
+}
+
+int QAndroidMetaData::androidTrackNumber() const
+{
+ return mAndroidTrackNumber;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/android/mediaplayer/qandroidmetadata_p.h b/src/plugins/multimedia/android/mediaplayer/qandroidmetadata_p.h
new file mode 100644
index 000000000..1bbad92dd
--- /dev/null
+++ b/src/plugins/multimedia/android/mediaplayer/qandroidmetadata_p.h
@@ -0,0 +1,47 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDMETADATA_H
+#define QANDROIDMETADATA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qmediametadata.h>
+#include <qurl.h>
+#include <QMutex>
+#include <QVariant>
+
+QT_BEGIN_NAMESPACE
+
+class AndroidMediaMetadataRetriever;
+
+class QAndroidMetaData : public QMediaMetaData
+{
+public:
+ static QMediaMetaData extractMetadata(const QUrl &url);
+
+ QAndroidMetaData(int trackType, int androidTrackType, int androidTrackNumber,
+ const QString &mimeType, const QString &language);
+
+ int trackType() const;
+ int androidTrackType() const;
+ int androidTrackNumber() const;
+
+private:
+ int mTrackType;
+ int mAndroidTrackType;
+ int mAndroidTrackNumber;
+};
+
+QT_END_NAMESPACE
+
+#endif // QANDROIDMETADATA_H
diff --git a/src/plugins/multimedia/android/qandroidformatsinfo.cpp b/src/plugins/multimedia/android/qandroidformatsinfo.cpp
new file mode 100644
index 000000000..3b23340ce
--- /dev/null
+++ b/src/plugins/multimedia/android/qandroidformatsinfo.cpp
@@ -0,0 +1,160 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidformatsinfo_p.h"
+
+#include <QtCore/qjnienvironment.h>
+#include <QtCore/qjniobject.h>
+#include <qcoreapplication.h>
+
+static const char encoderFilter[] = ".encoder";
+static const char decoderFilter[] = ".decoder";
+
+QT_BEGIN_NAMESPACE
+
+QAndroidFormatInfo::QAndroidFormatInfo()
+{
+ // Audio/Video/Image formats with their decoder/encoder information is documented at
+ // https://developer.android.com/guide/topics/media/media-formats
+
+ const QJniObject codecsArrayObject = QJniObject::callStaticObjectMethod(
+ "org/qtproject/qt/android/multimedia/QtMultimediaUtils",
+ "getMediaCodecs",
+ "()[Ljava/lang/String;");
+ QStringList codecs;
+ QJniEnvironment env;
+ const jobjectArray devsArray = codecsArrayObject.object<jobjectArray>();
+ for (int i = 0; i < env->GetArrayLength(devsArray); ++i) {
+ const QString codec = QJniObject(env->GetObjectArrayElement(devsArray, i)).toString();
+ if (codec.contains(QStringLiteral("encoder")))
+ m_supportedEncoders.append(codec);
+ else
+ m_supportedDecoders.append(codec);
+ }
+
+ auto removeUnspecifiedValues = [](QList<CodecMap> &map) {
+ for (CodecMap &codec : map) {
+ codec.audio.removeAll(QMediaFormat::AudioCodec::Unspecified);
+ codec.video.removeAll(QMediaFormat::VideoCodec::Unspecified);
+ }
+ erase_if(map, [](const CodecMap &codec) {
+ return codec.audio.isEmpty() && codec.video.isEmpty();
+ });
+ };
+
+ {
+ const QMediaFormat::AudioCodec aac = hasDecoder(QMediaFormat::AudioCodec::AAC);
+ const QMediaFormat::AudioCodec mp3 = hasDecoder(QMediaFormat::AudioCodec::MP3);
+ const QMediaFormat::AudioCodec flac = hasDecoder(QMediaFormat::AudioCodec::FLAC);
+ const QMediaFormat::AudioCodec opus = hasDecoder(QMediaFormat::AudioCodec::Opus);
+ const QMediaFormat::AudioCodec vorbis = hasDecoder(QMediaFormat::AudioCodec::Vorbis);
+
+ const QMediaFormat::VideoCodec vp8 = hasDecoder(QMediaFormat::VideoCodec::VP8);
+ const QMediaFormat::VideoCodec vp9 = hasDecoder(QMediaFormat::VideoCodec::VP9);
+ const QMediaFormat::VideoCodec h264 = hasDecoder(QMediaFormat::VideoCodec::H264);
+ const QMediaFormat::VideoCodec h265 = hasDecoder(QMediaFormat::VideoCodec::H265);
+ const QMediaFormat::VideoCodec av1 = hasDecoder(QMediaFormat::VideoCodec::AV1);
+
+ decoders = {
+ { QMediaFormat::AAC, {aac}, {} },
+ { QMediaFormat::MP3, {mp3}, {} },
+ { QMediaFormat::FLAC, {flac}, {} },
+ { QMediaFormat::Mpeg4Audio, {mp3, aac, flac, vorbis}, {} },
+ { QMediaFormat::MPEG4, {mp3, aac, flac, vorbis}, {h264, h265, av1} },
+ { QMediaFormat::Ogg, {opus, vorbis, flac}, {} },
+ { QMediaFormat::Matroska, {mp3, opus, vorbis}, {vp8, vp9, h264, h265, av1} },
+ { QMediaFormat::WebM, {opus, vorbis}, {vp8, vp9} }
+ };
+
+ removeUnspecifiedValues(decoders);
+ }
+
+ {
+ const QMediaFormat::AudioCodec aac = hasEncoder(QMediaFormat::AudioCodec::AAC);
+ const QMediaFormat::AudioCodec mp3 = hasEncoder(QMediaFormat::AudioCodec::MP3);
+ const QMediaFormat::AudioCodec opus = hasEncoder(QMediaFormat::AudioCodec::Opus);
+ const QMediaFormat::AudioCodec vorbis = hasEncoder(QMediaFormat::AudioCodec::Vorbis);
+
+ const QMediaFormat::VideoCodec vp8 = hasEncoder(QMediaFormat::VideoCodec::VP8);
+ const QMediaFormat::VideoCodec vp9 = hasEncoder(QMediaFormat::VideoCodec::VP9);
+ const QMediaFormat::VideoCodec h264 = hasEncoder(QMediaFormat::VideoCodec::H264);
+ const QMediaFormat::VideoCodec h265 = hasEncoder(QMediaFormat::VideoCodec::H265);
+ const QMediaFormat::VideoCodec av1 = hasEncoder(QMediaFormat::VideoCodec::AV1);
+
+ // MP3 and Vorbis encoders are not supported by the default Android SDK
+ // Opus encoder available only for Android 10+
+ encoders = {
+ { QMediaFormat::AAC, {aac}, {} },
+ { QMediaFormat::MP3, {mp3}, {} },
+ // FLAC encoder is not supported by the MediaRecorder used for recording
+ // { QMediaFormat::FLAC, {flac}, {} },
+ { QMediaFormat::Mpeg4Audio, {mp3, aac, vorbis}, {} },
+ { QMediaFormat::MPEG4, {mp3, aac, vorbis}, {h264, h265, av1} },
+ { QMediaFormat::Ogg, {opus, vorbis}, {} },
+ { QMediaFormat::Matroska, {mp3, opus}, {vp8, vp9, h264, h265, av1} },
+ // NOTE: WebM seems to be documented to supported with VP8 encoder,
+ // but the Camera API doesn't work with it, keep it commented for now.
+ // { QMediaFormat::WebM, {vorbis, opus}, {vp8, vp9} }
+ };
+
+ removeUnspecifiedValues(encoders);
+ }
+
+ imageFormats << QImageCapture::JPEG;
+ // NOTE: Add later if needed, the Camera API doens't seem to work with it.
+ // imageFormats << QImageCapture::PNG << QImageCapture::WebP;
+}
+
+QAndroidFormatInfo::~QAndroidFormatInfo()
+{
+}
+
+static QString getVideoCodecName(QMediaFormat::VideoCodec codec)
+{
+ QString codecString = QMediaFormat::videoCodecName(codec);
+ if (codec == QMediaFormat::VideoCodec::H265)
+ codecString = QLatin1String("HEVC");
+ return codecString;
+}
+
+QMediaFormat::AudioCodec QAndroidFormatInfo::hasEncoder(QMediaFormat::AudioCodec codec) const
+{
+ const QString codecString = QMediaFormat::audioCodecName(codec);
+ for (auto str : m_supportedEncoders) {
+ if (str.contains(codecString + QLatin1String(encoderFilter), Qt::CaseInsensitive))
+ return codec;
+ }
+ return QMediaFormat::AudioCodec::Unspecified;
+}
+
+QMediaFormat::VideoCodec QAndroidFormatInfo::hasEncoder(QMediaFormat::VideoCodec codec) const
+{
+ const QString codecString = getVideoCodecName(codec);
+ for (auto str : m_supportedEncoders) {
+ if (str.contains(codecString + QLatin1String(encoderFilter), Qt::CaseInsensitive))
+ return codec;
+ }
+ return QMediaFormat::VideoCodec::Unspecified;
+}
+
+QMediaFormat::AudioCodec QAndroidFormatInfo::hasDecoder(QMediaFormat::AudioCodec codec) const
+{
+ const QString codecString = QMediaFormat::audioCodecName(codec);
+ for (auto str : m_supportedDecoders) {
+ if (str.contains(codecString + QLatin1String(decoderFilter), Qt::CaseInsensitive))
+ return codec;
+ }
+ return QMediaFormat::AudioCodec::Unspecified;
+}
+
+QMediaFormat::VideoCodec QAndroidFormatInfo::hasDecoder(QMediaFormat::VideoCodec codec) const
+{
+ const QString codecString = getVideoCodecName(codec);
+ for (auto str : m_supportedDecoders) {
+ if (str.contains(codecString + QLatin1String(decoderFilter), Qt::CaseInsensitive))
+ return codec;
+ }
+ return QMediaFormat::VideoCodec::Unspecified;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/android/qandroidformatsinfo_p.h b/src/plugins/multimedia/android/qandroidformatsinfo_p.h
new file mode 100644
index 000000000..2d14ad181
--- /dev/null
+++ b/src/plugins/multimedia/android/qandroidformatsinfo_p.h
@@ -0,0 +1,40 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDFORMATINFO_H
+#define QANDROIDFORMATINFO_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaformatinfo_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidFormatInfo : public QPlatformMediaFormatInfo
+{
+public:
+ QAndroidFormatInfo();
+ ~QAndroidFormatInfo();
+
+private:
+ QMediaFormat::AudioCodec hasEncoder(QMediaFormat::AudioCodec codec) const;
+ QMediaFormat::VideoCodec hasEncoder(QMediaFormat::VideoCodec codec) const;
+ QMediaFormat::AudioCodec hasDecoder(QMediaFormat::AudioCodec codec) const;
+ QMediaFormat::VideoCodec hasDecoder(QMediaFormat::VideoCodec codec) const;
+
+ QStringList m_supportedDecoders;
+ QStringList m_supportedEncoders;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/android/qandroidintegration.cpp b/src/plugins/multimedia/android/qandroidintegration.cpp
new file mode 100644
index 000000000..c7077e49d
--- /dev/null
+++ b/src/plugins/multimedia/android/qandroidintegration.cpp
@@ -0,0 +1,136 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "qandroidintegration_p.h"
+#include "qandroidglobal_p.h"
+#include "qandroidmediacapturesession_p.h"
+#include "androidmediaplayer_p.h"
+#include "qandroidcamerasession_p.h"
+#include "androidsurfacetexture_p.h"
+#include "androidsurfaceview_p.h"
+#include "androidcamera_p.h"
+#include "qandroidcamera_p.h"
+#include "qandroidimagecapture_p.h"
+#include "qandroidmediaencoder_p.h"
+#include "androidmediarecorder_p.h"
+#include "qandroidformatsinfo_p.h"
+#include "qandroidmediaplayer_p.h"
+#include "qandroidaudiooutput_p.h"
+#include "qandroidaudioinput_p.h"
+#include "qandroidvideosink_p.h"
+#include "qandroidaudiodecoder_p.h"
+#include <QtMultimedia/private/qplatformmediaplugin_p.h>
+
+QT_BEGIN_NAMESPACE
+
+Q_LOGGING_CATEGORY(qtAndroidMediaPlugin, "qt.multimedia.android")
+
+class QAndroidMediaPlugin : public QPlatformMediaPlugin
+{
+ Q_OBJECT
+ Q_PLUGIN_METADATA(IID QPlatformMediaPlugin_iid FILE "android.json")
+
+public:
+ QAndroidMediaPlugin()
+ : QPlatformMediaPlugin()
+ {}
+
+ QPlatformMediaIntegration* create(const QString &name) override
+ {
+ if (name == u"android")
+ return new QAndroidIntegration;
+ return nullptr;
+ }
+};
+
+QAndroidIntegration::QAndroidIntegration() : QPlatformMediaIntegration(QLatin1String("android")) { }
+
+QMaybe<QPlatformAudioDecoder *> QAndroidIntegration::createAudioDecoder(QAudioDecoder *decoder)
+{
+ return new QAndroidAudioDecoder(decoder);
+}
+
+QPlatformMediaFormatInfo *QAndroidIntegration::createFormatInfo()
+{
+ return new QAndroidFormatInfo;
+}
+
+QMaybe<QPlatformMediaCaptureSession *> QAndroidIntegration::createCaptureSession()
+{
+ return new QAndroidMediaCaptureSession();
+}
+
+QMaybe<QPlatformMediaPlayer *> QAndroidIntegration::createPlayer(QMediaPlayer *player)
+{
+ return new QAndroidMediaPlayer(player);
+}
+
+QMaybe<QPlatformCamera *> QAndroidIntegration::createCamera(QCamera *camera)
+{
+ return new QAndroidCamera(camera);
+}
+
+QMaybe<QPlatformMediaRecorder *> QAndroidIntegration::createRecorder(QMediaRecorder *recorder)
+{
+ return new QAndroidMediaEncoder(recorder);
+}
+
+QMaybe<QPlatformImageCapture *> QAndroidIntegration::createImageCapture(QImageCapture *imageCapture)
+{
+ return new QAndroidImageCapture(imageCapture);
+}
+
+QMaybe<QPlatformAudioOutput *> QAndroidIntegration::createAudioOutput(QAudioOutput *q)
+{
+ return new QAndroidAudioOutput(q);
+}
+
+QMaybe<QPlatformAudioInput *> QAndroidIntegration::createAudioInput(QAudioInput *audioInput)
+{
+ return new QAndroidAudioInput(audioInput);
+}
+
+QMaybe<QPlatformVideoSink *> QAndroidIntegration::createVideoSink(QVideoSink *sink)
+{
+ return new QAndroidVideoSink(sink);
+}
+
+Q_DECL_EXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void * /*reserved*/)
+{
+ static bool initialized = false;
+ if (initialized)
+ return JNI_VERSION_1_6;
+ initialized = true;
+
+ QT_USE_NAMESPACE
+ typedef union {
+ JNIEnv *nativeEnvironment;
+ void *venv;
+ } UnionJNIEnvToVoid;
+
+ UnionJNIEnvToVoid uenv;
+ uenv.venv = NULL;
+
+ if (vm->GetEnv(&uenv.venv, JNI_VERSION_1_6) != JNI_OK)
+ return JNI_ERR;
+
+ if (!AndroidMediaPlayer::registerNativeMethods()
+ || !AndroidCamera::registerNativeMethods()
+ || !AndroidMediaRecorder::registerNativeMethods()
+ || !AndroidSurfaceHolder::registerNativeMethods()) {
+ return JNI_ERR;
+ }
+
+ AndroidSurfaceTexture::registerNativeMethods();
+
+ return JNI_VERSION_1_6;
+}
+
+QList<QCameraDevice> QAndroidIntegration::videoInputs()
+{
+ return QAndroidCameraSession::availableCameras();
+}
+
+QT_END_NAMESPACE
+
+#include "qandroidintegration.moc"
diff --git a/src/plugins/multimedia/android/qandroidintegration_p.h b/src/plugins/multimedia/android/qandroidintegration_p.h
new file mode 100644
index 000000000..9ef5a3267
--- /dev/null
+++ b/src/plugins/multimedia/android/qandroidintegration_p.h
@@ -0,0 +1,48 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef QANDROIDINTEGRATION_H
+#define QANDROIDINTEGRATION_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <private/qplatformmediaintegration_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QAndroidMediaDevices;
+
+class QAndroidIntegration : public QPlatformMediaIntegration
+{
+public:
+ QAndroidIntegration();
+
+ QMaybe<QPlatformAudioDecoder *> createAudioDecoder(QAudioDecoder *decoder) override;
+ QMaybe<QPlatformMediaCaptureSession *> createCaptureSession() override;
+ QMaybe<QPlatformMediaPlayer *> createPlayer(QMediaPlayer *player) override;
+ QMaybe<QPlatformCamera *> createCamera(QCamera *camera) override;
+ QMaybe<QPlatformMediaRecorder *> createRecorder(QMediaRecorder *recorder) override;
+ QMaybe<QPlatformImageCapture *> createImageCapture(QImageCapture *imageCapture) override;
+
+ QMaybe<QPlatformAudioOutput *> createAudioOutput(QAudioOutput *q) override;
+ QMaybe<QPlatformAudioInput *> createAudioInput(QAudioInput *audioInput) override;
+
+ QMaybe<QPlatformVideoSink *> createVideoSink(QVideoSink *) override;
+ QList<QCameraDevice> videoInputs() override;
+
+protected:
+ QPlatformMediaFormatInfo *createFormatInfo() override;
+};
+
+QT_END_NAMESPACE
+
+#endif
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp b/src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp
new file mode 100644
index 000000000..cef36d7ad
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidcamera.cpp
@@ -0,0 +1,1797 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// Copyright (C) 2016 Ruslan Baratov
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "androidcamera_p.h"
+#include "androidsurfacetexture_p.h"
+#include "androidsurfaceview_p.h"
+#include "qandroidmultimediautils_p.h"
+#include "qandroidglobal_p.h"
+
+#include <private/qvideoframe_p.h>
+
+#include <qhash.h>
+#include <qstringlist.h>
+#include <qdebug.h>
+#include <QtCore/qthread.h>
+#include <QtCore/qreadwritelock.h>
+#include <QtCore/qmutex.h>
+#include <QtMultimedia/private/qmemoryvideobuffer_p.h>
+#include <QtCore/qcoreapplication.h>
+
+#include <mutex>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(lcAndroidCamera, "qt.multimedia.android.camera")
+
+static const char QtCameraListenerClassName[] = "org/qtproject/qt/android/multimedia/QtCameraListener";
+
+typedef QHash<int, AndroidCamera *> CameraMap;
+Q_GLOBAL_STATIC(CameraMap, cameras)
+Q_GLOBAL_STATIC(QReadWriteLock, rwLock)
+
+static QRect areaToRect(jobject areaObj)
+{
+ QJniObject area(areaObj);
+ QJniObject rect = area.getObjectField("rect", "Landroid/graphics/Rect;");
+
+ return QRect(rect.getField<jint>("left"),
+ rect.getField<jint>("top"),
+ rect.callMethod<jint>("width"),
+ rect.callMethod<jint>("height"));
+}
+
+static QJniObject rectToArea(const QRect &rect)
+{
+ QJniObject jrect("android/graphics/Rect",
+ "(IIII)V",
+ rect.left(), rect.top(), rect.right(), rect.bottom());
+
+ QJniObject area("android/hardware/Camera$Area",
+ "(Landroid/graphics/Rect;I)V",
+ jrect.object(), 500);
+
+ return area;
+}
+
+// native method for QtCameraLisener.java
+static void notifyAutoFocusComplete(JNIEnv* , jobject, int id, jboolean success)
+{
+ QReadLocker locker(rwLock);
+ const auto it = cameras->constFind(id);
+ if (Q_UNLIKELY(it == cameras->cend()))
+ return;
+
+ Q_EMIT (*it)->autoFocusComplete(success);
+}
+
+static void notifyPictureExposed(JNIEnv* , jobject, int id)
+{
+ QReadLocker locker(rwLock);
+ const auto it = cameras->constFind(id);
+ if (Q_UNLIKELY(it == cameras->cend()))
+ return;
+
+ Q_EMIT (*it)->pictureExposed();
+}
+
+static void notifyPictureCaptured(JNIEnv *env, jobject, int id, jbyteArray data)
+{
+ QReadLocker locker(rwLock);
+ const auto it = cameras->constFind(id);
+ if (Q_UNLIKELY(it == cameras->cend())) {
+ qCWarning(lcAndroidCamera) << "Could not obtain camera!";
+ return;
+ }
+
+ AndroidCamera *camera = (*it);
+
+ const int arrayLength = env->GetArrayLength(data);
+ QByteArray bytes(arrayLength, Qt::Uninitialized);
+ env->GetByteArrayRegion(data, 0, arrayLength, reinterpret_cast<jbyte *>(bytes.data()));
+
+ auto parameters = camera->getParametersObject();
+
+ QJniObject size =
+ parameters.callObjectMethod("getPictureSize", "()Landroid/hardware/Camera$Size;");
+
+ if (!size.isValid()) {
+ qCWarning(lcAndroidCamera) << "Picture Size is not valid!";
+ return;
+ }
+
+ QSize pictureSize(size.getField<jint>("width"), size.getField<jint>("height"));
+
+ auto format = AndroidCamera::ImageFormat(parameters.callMethod<jint>("getPictureFormat"));
+
+ if (format == AndroidCamera::ImageFormat::UnknownImageFormat) {
+ qCWarning(lcAndroidCamera) << "Android Camera Image Format is UnknownImageFormat!";
+ return;
+ }
+
+ int bytesPerLine = 0;
+
+ switch (format) {
+ case AndroidCamera::ImageFormat::YV12:
+ bytesPerLine = (pictureSize.width() + 15) & ~15;
+ break;
+ case AndroidCamera::ImageFormat::NV21:
+ bytesPerLine = pictureSize.width();
+ break;
+ case AndroidCamera::ImageFormat::RGB565:
+ case AndroidCamera::ImageFormat::YUY2:
+ bytesPerLine = pictureSize.width() * 2;
+ break;
+ default:
+ bytesPerLine = -1;
+ }
+
+ auto pictureFormat = qt_pixelFormatFromAndroidImageFormat(format);
+
+ emit camera->pictureCaptured(bytes, pictureFormat, pictureSize, bytesPerLine);
+}
+
+static void notifyNewPreviewFrame(JNIEnv *env, jobject, int id, jbyteArray data,
+ int width, int height, int format, int bpl)
+{
+ QReadLocker locker(rwLock);
+ const auto it = cameras->constFind(id);
+ if (Q_UNLIKELY(it == cameras->cend()))
+ return;
+
+ const int arrayLength = env->GetArrayLength(data);
+ if (arrayLength == 0)
+ return;
+
+ QByteArray bytes(arrayLength, Qt::Uninitialized);
+ env->GetByteArrayRegion(data, 0, arrayLength, (jbyte*)bytes.data());
+
+ QVideoFrameFormat frameFormat(
+ QSize(width, height),
+ qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat(format)));
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(std::move(bytes), bpl), std::move(frameFormat));
+
+ Q_EMIT (*it)->newPreviewFrame(frame);
+}
+
+static void notifyFrameAvailable(JNIEnv *, jobject, int id)
+{
+ QReadLocker locker(rwLock);
+ const auto it = cameras->constFind(id);
+ if (Q_UNLIKELY(it == cameras->cend()))
+ return;
+
+ (*it)->fetchLastPreviewFrame();
+}
+
+class AndroidCameraPrivate : public QObject
+{
+ Q_OBJECT
+public:
+ AndroidCameraPrivate();
+ ~AndroidCameraPrivate();
+
+ Q_INVOKABLE bool init(int cameraId);
+
+ Q_INVOKABLE void release();
+ Q_INVOKABLE bool lock();
+ Q_INVOKABLE bool unlock();
+ Q_INVOKABLE bool reconnect();
+
+ Q_INVOKABLE AndroidCamera::CameraFacing getFacing();
+ Q_INVOKABLE int getNativeOrientation();
+
+ Q_INVOKABLE QSize getPreferredPreviewSizeForVideo();
+ Q_INVOKABLE QList<QSize> getSupportedPreviewSizes();
+ static QList<QSize> getSupportedPreviewSizes(QJniObject &parameters);
+
+ Q_INVOKABLE QList<AndroidCamera::FpsRange> getSupportedPreviewFpsRange();
+
+ Q_INVOKABLE AndroidCamera::FpsRange getPreviewFpsRange();
+ static AndroidCamera::FpsRange getPreviewFpsRange(QJniObject &parameters);
+ Q_INVOKABLE void setPreviewFpsRange(int min, int max);
+
+ Q_INVOKABLE AndroidCamera::ImageFormat getPreviewFormat();
+ Q_INVOKABLE void setPreviewFormat(AndroidCamera::ImageFormat fmt);
+ Q_INVOKABLE QList<AndroidCamera::ImageFormat> getSupportedPreviewFormats();
+ static QList<AndroidCamera::ImageFormat> getSupportedPreviewFormats(QJniObject &parameters);
+
+ Q_INVOKABLE QSize previewSize() const { return m_previewSize; }
+ Q_INVOKABLE QSize getPreviewSize();
+ Q_INVOKABLE void updatePreviewSize();
+ Q_INVOKABLE bool setPreviewTexture(void *surfaceTexture);
+ Q_INVOKABLE bool setPreviewDisplay(void *surfaceHolder);
+ Q_INVOKABLE void setDisplayOrientation(int degrees);
+
+ Q_INVOKABLE bool isZoomSupported();
+ Q_INVOKABLE int getMaxZoom();
+ Q_INVOKABLE QList<int> getZoomRatios();
+ Q_INVOKABLE int getZoom();
+ Q_INVOKABLE void setZoom(int value);
+
+ Q_INVOKABLE QString getFlashMode();
+ Q_INVOKABLE void setFlashMode(const QString &value);
+
+ Q_INVOKABLE QString getFocusMode();
+ Q_INVOKABLE void setFocusMode(const QString &value);
+
+ Q_INVOKABLE int getMaxNumFocusAreas();
+ Q_INVOKABLE QList<QRect> getFocusAreas();
+ Q_INVOKABLE void setFocusAreas(const QList<QRect> &areas);
+
+ Q_INVOKABLE void autoFocus();
+ Q_INVOKABLE void cancelAutoFocus();
+
+ Q_INVOKABLE bool isAutoExposureLockSupported();
+ Q_INVOKABLE bool getAutoExposureLock();
+ Q_INVOKABLE void setAutoExposureLock(bool toggle);
+
+ Q_INVOKABLE bool isAutoWhiteBalanceLockSupported();
+ Q_INVOKABLE bool getAutoWhiteBalanceLock();
+ Q_INVOKABLE void setAutoWhiteBalanceLock(bool toggle);
+
+ Q_INVOKABLE int getExposureCompensation();
+ Q_INVOKABLE void setExposureCompensation(int value);
+ Q_INVOKABLE float getExposureCompensationStep();
+ Q_INVOKABLE int getMinExposureCompensation();
+ Q_INVOKABLE int getMaxExposureCompensation();
+
+ Q_INVOKABLE QString getSceneMode();
+ Q_INVOKABLE void setSceneMode(const QString &value);
+
+ Q_INVOKABLE QString getWhiteBalance();
+ Q_INVOKABLE void setWhiteBalance(const QString &value);
+
+ Q_INVOKABLE void updateRotation();
+
+ Q_INVOKABLE QList<QSize> getSupportedPictureSizes();
+ Q_INVOKABLE QList<QSize> getSupportedVideoSizes();
+ Q_INVOKABLE void setPictureSize(const QSize &size);
+ Q_INVOKABLE void setJpegQuality(int quality);
+
+ Q_INVOKABLE void startPreview();
+ Q_INVOKABLE void stopPreview();
+
+ Q_INVOKABLE void takePicture();
+
+ Q_INVOKABLE void setupPreviewFrameCallback();
+ Q_INVOKABLE void notifyNewFrames(bool notify);
+ Q_INVOKABLE void fetchLastPreviewFrame();
+
+ Q_INVOKABLE void applyParameters();
+
+ Q_INVOKABLE QStringList callParametersStringListMethod(const QByteArray &methodName);
+
+ int m_cameraId;
+ QRecursiveMutex m_parametersMutex;
+ QSize m_previewSize;
+ int m_rotation;
+ QJniObject m_info;
+ QJniObject m_parameters;
+ QJniObject m_camera;
+ QJniObject m_cameraListener;
+
+Q_SIGNALS:
+ void previewSizeChanged();
+ void previewStarted();
+ void previewFailedToStart();
+ void previewStopped();
+
+ void autoFocusStarted();
+
+ void whiteBalanceChanged();
+
+ void takePictureFailed();
+
+ void lastPreviewFrameFetched(const QVideoFrame &frame);
+};
+
+AndroidCamera::AndroidCamera(AndroidCameraPrivate *d, QThread *worker)
+ : QObject(),
+ d_ptr(d),
+ m_worker(worker)
+
+{
+ connect(d, &AndroidCameraPrivate::previewSizeChanged, this, &AndroidCamera::previewSizeChanged);
+ connect(d, &AndroidCameraPrivate::previewStarted, this, &AndroidCamera::previewStarted);
+ connect(d, &AndroidCameraPrivate::previewFailedToStart, this, &AndroidCamera::previewFailedToStart);
+ connect(d, &AndroidCameraPrivate::previewStopped, this, &AndroidCamera::previewStopped);
+ connect(d, &AndroidCameraPrivate::autoFocusStarted, this, &AndroidCamera::autoFocusStarted);
+ connect(d, &AndroidCameraPrivate::whiteBalanceChanged, this, &AndroidCamera::whiteBalanceChanged);
+ connect(d, &AndroidCameraPrivate::takePictureFailed, this, &AndroidCamera::takePictureFailed);
+ connect(d, &AndroidCameraPrivate::lastPreviewFrameFetched, this, &AndroidCamera::lastPreviewFrameFetched);
+}
+
+AndroidCamera::~AndroidCamera()
+{
+ Q_D(AndroidCamera);
+ if (d->m_camera.isValid()) {
+ release();
+ QWriteLocker locker(rwLock);
+ cameras->remove(cameraId());
+ }
+
+ m_worker->exit();
+ m_worker->wait(5000);
+}
+
+AndroidCamera *AndroidCamera::open(int cameraId)
+{
+ if (!qt_androidCheckCameraPermission())
+ return nullptr;
+
+ AndroidCameraPrivate *d = new AndroidCameraPrivate();
+ QThread *worker = new QThread;
+ worker->start();
+ d->moveToThread(worker);
+ connect(worker, &QThread::finished, d, &AndroidCameraPrivate::deleteLater);
+ bool ok = true;
+ QMetaObject::invokeMethod(d, "init", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok), Q_ARG(int, cameraId));
+ if (!ok) {
+ worker->quit();
+ worker->wait(5000);
+ delete worker;
+ return 0;
+ }
+
+ AndroidCamera *q = new AndroidCamera(d, worker);
+ QWriteLocker locker(rwLock);
+ cameras->insert(cameraId, q);
+
+ return q;
+}
+
+int AndroidCamera::cameraId() const
+{
+ Q_D(const AndroidCamera);
+ return d->m_cameraId;
+}
+
+bool AndroidCamera::lock()
+{
+ Q_D(AndroidCamera);
+ bool ok = true;
+ QMetaObject::invokeMethod(d, "lock", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok));
+ return ok;
+}
+
+bool AndroidCamera::unlock()
+{
+ Q_D(AndroidCamera);
+ bool ok = true;
+ QMetaObject::invokeMethod(d, "unlock", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok));
+ return ok;
+}
+
+bool AndroidCamera::reconnect()
+{
+ Q_D(AndroidCamera);
+ bool ok = true;
+ QMetaObject::invokeMethod(d, "reconnect", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, ok));
+ return ok;
+}
+
+void AndroidCamera::release()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "release", Qt::BlockingQueuedConnection);
+}
+
+AndroidCamera::CameraFacing AndroidCamera::getFacing()
+{
+ Q_D(AndroidCamera);
+ return d->getFacing();
+}
+
+int AndroidCamera::getNativeOrientation()
+{
+ Q_D(AndroidCamera);
+ return d->getNativeOrientation();
+}
+
+QSize AndroidCamera::getPreferredPreviewSizeForVideo()
+{
+ Q_D(AndroidCamera);
+ return d->getPreferredPreviewSizeForVideo();
+}
+
+QList<QSize> AndroidCamera::getSupportedPreviewSizes()
+{
+ Q_D(AndroidCamera);
+ return d->getSupportedPreviewSizes();
+}
+
+QList<AndroidCamera::FpsRange> AndroidCamera::getSupportedPreviewFpsRange()
+{
+ Q_D(AndroidCamera);
+ return d->getSupportedPreviewFpsRange();
+}
+
+AndroidCamera::FpsRange AndroidCamera::getPreviewFpsRange()
+{
+ Q_D(AndroidCamera);
+ return d->getPreviewFpsRange();
+}
+
+void AndroidCamera::setPreviewFpsRange(FpsRange range)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setPreviewFpsRange", Q_ARG(int, range.min), Q_ARG(int, range.max));
+}
+
+AndroidCamera::ImageFormat AndroidCamera::getPreviewFormat()
+{
+ Q_D(AndroidCamera);
+ return d->getPreviewFormat();
+}
+
+void AndroidCamera::setPreviewFormat(ImageFormat fmt)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setPreviewFormat", Q_ARG(AndroidCamera::ImageFormat, fmt));
+}
+
+QList<AndroidCamera::ImageFormat> AndroidCamera::getSupportedPreviewFormats()
+{
+ Q_D(AndroidCamera);
+ return d->getSupportedPreviewFormats();
+}
+
+QSize AndroidCamera::previewSize() const
+{
+ Q_D(const AndroidCamera);
+ return d->m_previewSize;
+}
+
+QSize AndroidCamera::actualPreviewSize()
+{
+ Q_D(AndroidCamera);
+ return d->getPreviewSize();
+}
+
+void AndroidCamera::setPreviewSize(const QSize &size)
+{
+ Q_D(AndroidCamera);
+ d->m_parametersMutex.lock();
+ bool areParametersValid = d->m_parameters.isValid();
+ d->m_parametersMutex.unlock();
+ if (!areParametersValid)
+ return;
+
+ d->m_previewSize = size;
+ QMetaObject::invokeMethod(d, "updatePreviewSize");
+}
+
+bool AndroidCamera::setPreviewTexture(AndroidSurfaceTexture *surfaceTexture)
+{
+ Q_D(AndroidCamera);
+ bool ok = true;
+ QMetaObject::invokeMethod(d,
+ "setPreviewTexture",
+ Qt::BlockingQueuedConnection,
+ Q_RETURN_ARG(bool, ok),
+ Q_ARG(void *, surfaceTexture ? surfaceTexture->surfaceTexture() : 0));
+ return ok;
+}
+
+bool AndroidCamera::setPreviewDisplay(AndroidSurfaceHolder *surfaceHolder)
+{
+ Q_D(AndroidCamera);
+ bool ok = true;
+ QMetaObject::invokeMethod(d,
+ "setPreviewDisplay",
+ Qt::BlockingQueuedConnection,
+ Q_RETURN_ARG(bool, ok),
+ Q_ARG(void *, surfaceHolder ? surfaceHolder->surfaceHolder() : 0));
+ return ok;
+}
+
+void AndroidCamera::setDisplayOrientation(int degrees)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setDisplayOrientation", Qt::QueuedConnection, Q_ARG(int, degrees));
+}
+
+bool AndroidCamera::isZoomSupported()
+{
+ Q_D(AndroidCamera);
+ return d->isZoomSupported();
+}
+
+int AndroidCamera::getMaxZoom()
+{
+ Q_D(AndroidCamera);
+ return d->getMaxZoom();
+}
+
+QList<int> AndroidCamera::getZoomRatios()
+{
+ Q_D(AndroidCamera);
+ return d->getZoomRatios();
+}
+
+int AndroidCamera::getZoom()
+{
+ Q_D(AndroidCamera);
+ return d->getZoom();
+}
+
+void AndroidCamera::setZoom(int value)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setZoom", Q_ARG(int, value));
+}
+
+QStringList AndroidCamera::getSupportedFlashModes()
+{
+ Q_D(AndroidCamera);
+ return d->callParametersStringListMethod("getSupportedFlashModes");
+}
+
+QString AndroidCamera::getFlashMode()
+{
+ Q_D(AndroidCamera);
+ return d->getFlashMode();
+}
+
+void AndroidCamera::setFlashMode(const QString &value)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setFlashMode", Q_ARG(QString, value));
+}
+
+QStringList AndroidCamera::getSupportedFocusModes()
+{
+ Q_D(AndroidCamera);
+ return d->callParametersStringListMethod("getSupportedFocusModes");
+}
+
+QString AndroidCamera::getFocusMode()
+{
+ Q_D(AndroidCamera);
+ return d->getFocusMode();
+}
+
+void AndroidCamera::setFocusMode(const QString &value)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setFocusMode", Q_ARG(QString, value));
+}
+
+int AndroidCamera::getMaxNumFocusAreas()
+{
+ Q_D(AndroidCamera);
+ return d->getMaxNumFocusAreas();
+}
+
+QList<QRect> AndroidCamera::getFocusAreas()
+{
+ Q_D(AndroidCamera);
+ return d->getFocusAreas();
+}
+
+void AndroidCamera::setFocusAreas(const QList<QRect> &areas)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setFocusAreas", Q_ARG(QList<QRect>, areas));
+}
+
+void AndroidCamera::autoFocus()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "autoFocus");
+}
+
+void AndroidCamera::cancelAutoFocus()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "cancelAutoFocus", Qt::QueuedConnection);
+}
+
+bool AndroidCamera::isAutoExposureLockSupported()
+{
+ Q_D(AndroidCamera);
+ return d->isAutoExposureLockSupported();
+}
+
+bool AndroidCamera::getAutoExposureLock()
+{
+ Q_D(AndroidCamera);
+ return d->getAutoExposureLock();
+}
+
+void AndroidCamera::setAutoExposureLock(bool toggle)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setAutoExposureLock", Q_ARG(bool, toggle));
+}
+
+bool AndroidCamera::isAutoWhiteBalanceLockSupported()
+{
+ Q_D(AndroidCamera);
+ return d->isAutoWhiteBalanceLockSupported();
+}
+
+bool AndroidCamera::getAutoWhiteBalanceLock()
+{
+ Q_D(AndroidCamera);
+ return d->getAutoWhiteBalanceLock();
+}
+
+void AndroidCamera::setAutoWhiteBalanceLock(bool toggle)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setAutoWhiteBalanceLock", Q_ARG(bool, toggle));
+}
+
+int AndroidCamera::getExposureCompensation()
+{
+ Q_D(AndroidCamera);
+ return d->getExposureCompensation();
+}
+
+void AndroidCamera::setExposureCompensation(int value)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setExposureCompensation", Q_ARG(int, value));
+}
+
+float AndroidCamera::getExposureCompensationStep()
+{
+ Q_D(AndroidCamera);
+ return d->getExposureCompensationStep();
+}
+
+int AndroidCamera::getMinExposureCompensation()
+{
+ Q_D(AndroidCamera);
+ return d->getMinExposureCompensation();
+}
+
+int AndroidCamera::getMaxExposureCompensation()
+{
+ Q_D(AndroidCamera);
+ return d->getMaxExposureCompensation();
+}
+
+QStringList AndroidCamera::getSupportedSceneModes()
+{
+ Q_D(AndroidCamera);
+ return d->callParametersStringListMethod("getSupportedSceneModes");
+}
+
+QString AndroidCamera::getSceneMode()
+{
+ Q_D(AndroidCamera);
+ return d->getSceneMode();
+}
+
+void AndroidCamera::setSceneMode(const QString &value)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setSceneMode", Q_ARG(QString, value));
+}
+
+QStringList AndroidCamera::getSupportedWhiteBalance()
+{
+ Q_D(AndroidCamera);
+ return d->callParametersStringListMethod("getSupportedWhiteBalance");
+}
+
+QString AndroidCamera::getWhiteBalance()
+{
+ Q_D(AndroidCamera);
+ return d->getWhiteBalance();
+}
+
+void AndroidCamera::setWhiteBalance(const QString &value)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setWhiteBalance", Q_ARG(QString, value));
+}
+
+void AndroidCamera::setRotation(int rotation)
+{
+ Q_D(AndroidCamera);
+ //We need to do it here and not in worker class because we cache rotation
+ d->m_parametersMutex.lock();
+ bool areParametersValid = d->m_parameters.isValid();
+ d->m_parametersMutex.unlock();
+ if (!areParametersValid)
+ return;
+
+ d->m_rotation = rotation;
+ QMetaObject::invokeMethod(d, "updateRotation");
+}
+
+int AndroidCamera::getRotation() const
+{
+ Q_D(const AndroidCamera);
+ return d->m_rotation;
+}
+
+QList<QSize> AndroidCamera::getSupportedPictureSizes()
+{
+ Q_D(AndroidCamera);
+ return d->getSupportedPictureSizes();
+}
+
+QList<QSize> AndroidCamera::getSupportedVideoSizes()
+{
+ Q_D(AndroidCamera);
+ return d->getSupportedVideoSizes();
+}
+
+void AndroidCamera::setPictureSize(const QSize &size)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setPictureSize", Q_ARG(QSize, size));
+}
+
+void AndroidCamera::setJpegQuality(int quality)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setJpegQuality", Q_ARG(int, quality));
+}
+
+void AndroidCamera::takePicture()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "takePicture", Qt::BlockingQueuedConnection);
+}
+
+void AndroidCamera::setupPreviewFrameCallback()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "setupPreviewFrameCallback");
+}
+
+void AndroidCamera::notifyNewFrames(bool notify)
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "notifyNewFrames", Q_ARG(bool, notify));
+}
+
+void AndroidCamera::fetchLastPreviewFrame()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "fetchLastPreviewFrame");
+}
+
+QJniObject AndroidCamera::getCameraObject()
+{
+ Q_D(AndroidCamera);
+ return d->m_camera;
+}
+
+int AndroidCamera::getNumberOfCameras()
+{
+ if (!qt_androidCheckCameraPermission())
+ return 0;
+
+ return QJniObject::callStaticMethod<jint>("android/hardware/Camera",
+ "getNumberOfCameras");
+}
+
+void AndroidCamera::getCameraInfo(int id, QCameraDevicePrivate *info)
+{
+ Q_ASSERT(info);
+
+ QJniObject cameraInfo("android/hardware/Camera$CameraInfo");
+ QJniObject::callStaticMethod<void>("android/hardware/Camera",
+ "getCameraInfo",
+ "(ILandroid/hardware/Camera$CameraInfo;)V",
+ id, cameraInfo.object());
+
+ AndroidCamera::CameraFacing facing = AndroidCamera::CameraFacing(cameraInfo.getField<jint>("facing"));
+ // The orientation provided by Android is counter-clockwise, we need it clockwise
+ info->orientation = (360 - cameraInfo.getField<jint>("orientation")) % 360;
+
+ switch (facing) {
+ case AndroidCamera::CameraFacingBack:
+ info->id = QByteArray("back");
+ info->description = QStringLiteral("Rear-facing camera");
+ info->position = QCameraDevice::BackFace;
+ info->isDefault = true;
+ break;
+ case AndroidCamera::CameraFacingFront:
+ info->id = QByteArray("front");
+ info->description = QStringLiteral("Front-facing camera");
+ info->position = QCameraDevice::FrontFace;
+ break;
+ default:
+ break;
+ }
+ // Add a number to allow correct access to cameras on systems with two
+ // (and more) front/back cameras
+ if (id > 1) {
+ info->id.append(QByteArray::number(id));
+ info->description.append(QString(" %1").arg(id));
+ }
+}
+
+QVideoFrameFormat::PixelFormat AndroidCamera::QtPixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat format)
+{
+ switch (format) {
+ case AndroidCamera::NV21:
+ return QVideoFrameFormat::Format_NV21;
+ case AndroidCamera::YUY2:
+ return QVideoFrameFormat::Format_YUYV;
+ case AndroidCamera::JPEG:
+ return QVideoFrameFormat::Format_Jpeg;
+ case AndroidCamera::YV12:
+ return QVideoFrameFormat::Format_YV12;
+ default:
+ return QVideoFrameFormat::Format_Invalid;
+ }
+}
+
+AndroidCamera::ImageFormat AndroidCamera::AndroidImageFormatFromQtPixelFormat(QVideoFrameFormat::PixelFormat format)
+{
+ switch (format) {
+ case QVideoFrameFormat::Format_NV21:
+ return AndroidCamera::NV21;
+ case QVideoFrameFormat::Format_YUYV:
+ return AndroidCamera::YUY2;
+ case QVideoFrameFormat::Format_Jpeg:
+ return AndroidCamera::JPEG;
+ case QVideoFrameFormat::Format_YV12:
+ return AndroidCamera::YV12;
+ default:
+ return AndroidCamera::UnknownImageFormat;
+ }
+}
+
+QList<QCameraFormat> AndroidCamera::getSupportedFormats()
+{
+ QList<QCameraFormat> formats;
+ AndroidCamera::FpsRange range = getPreviewFpsRange();
+ for (const auto &previewSize : getSupportedVideoSizes()) {
+ for (const auto &previewFormat : getSupportedPreviewFormats()) {
+ QCameraFormatPrivate * format = new QCameraFormatPrivate();
+ format->pixelFormat = QtPixelFormatFromAndroidImageFormat(previewFormat);
+ format->resolution = previewSize;
+ format->minFrameRate = range.min;
+ format->maxFrameRate = range.max;
+ formats.append(format->create());
+ }
+ }
+
+ return formats;
+}
+
+void AndroidCamera::startPreview()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "startPreview");
+}
+
+void AndroidCamera::stopPreview()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "stopPreview");
+}
+
+void AndroidCamera::stopPreviewSynchronous()
+{
+ Q_D(AndroidCamera);
+ QMetaObject::invokeMethod(d, "stopPreview", Qt::BlockingQueuedConnection);
+}
+
+QJniObject AndroidCamera::getParametersObject()
+{
+ Q_D(AndroidCamera);
+ return d->m_parameters;
+}
+
+AndroidCameraPrivate::AndroidCameraPrivate()
+ : QObject()
+{
+}
+
+AndroidCameraPrivate::~AndroidCameraPrivate()
+{
+}
+
+static qint32 s_activeCameras = 0;
+
+bool AndroidCameraPrivate::init(int cameraId)
+{
+ m_cameraId = cameraId;
+ QJniEnvironment env;
+
+ const bool opened = s_activeCameras & (1 << cameraId);
+ if (opened)
+ return false;
+
+ m_camera = QJniObject::callStaticObjectMethod("android/hardware/Camera",
+ "open",
+ "(I)Landroid/hardware/Camera;",
+ cameraId);
+ if (!m_camera.isValid())
+ return false;
+
+ m_cameraListener = QJniObject(QtCameraListenerClassName, "(I)V", m_cameraId);
+ m_info = QJniObject("android/hardware/Camera$CameraInfo");
+ m_camera.callStaticMethod<void>("android/hardware/Camera",
+ "getCameraInfo",
+ "(ILandroid/hardware/Camera$CameraInfo;)V",
+ cameraId,
+ m_info.object());
+
+ QJniObject params = m_camera.callObjectMethod("getParameters",
+ "()Landroid/hardware/Camera$Parameters;");
+ m_parameters = QJniObject(params);
+ s_activeCameras |= 1 << cameraId;
+
+ return true;
+}
+
+void AndroidCameraPrivate::release()
+{
+ m_previewSize = QSize();
+ m_parametersMutex.lock();
+ m_parameters = QJniObject();
+ m_parametersMutex.unlock();
+ if (m_camera.isValid()) {
+ m_camera.callMethod<void>("release");
+ s_activeCameras &= ~(1 << m_cameraId);
+ }
+}
+
+bool AndroidCameraPrivate::lock()
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_camera.objectClass(), "lock", "()V");
+ env->CallVoidMethod(m_camera.object(), methodId);
+
+ if (env.checkAndClearExceptions())
+ return false;
+ return true;
+}
+
+bool AndroidCameraPrivate::unlock()
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_camera.objectClass(), "unlock", "()V");
+ env->CallVoidMethod(m_camera.object(), methodId);
+
+ if (env.checkAndClearExceptions())
+ return false;
+ return true;
+}
+
+bool AndroidCameraPrivate::reconnect()
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_camera.objectClass(), "reconnect", "()V");
+ env->CallVoidMethod(m_camera.object(), methodId);
+
+ if (env.checkAndClearExceptions())
+ return false;
+ return true;
+}
+
+AndroidCamera::CameraFacing AndroidCameraPrivate::getFacing()
+{
+ return AndroidCamera::CameraFacing(m_info.getField<jint>("facing"));
+}
+
+int AndroidCameraPrivate::getNativeOrientation()
+{
+ return m_info.getField<jint>("orientation");
+}
+
+QSize AndroidCameraPrivate::getPreferredPreviewSizeForVideo()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return QSize();
+
+ QJniObject size = m_parameters.callObjectMethod("getPreferredPreviewSizeForVideo",
+ "()Landroid/hardware/Camera$Size;");
+
+ if (!size.isValid())
+ return QSize();
+
+ return QSize(size.getField<jint>("width"), size.getField<jint>("height"));
+}
+
+QList<QSize> AndroidCameraPrivate::getSupportedPreviewSizes()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+ return getSupportedPreviewSizes(m_parameters);
+}
+
+QList<QSize> AndroidCameraPrivate::getSupportedPreviewSizes(QJniObject &parameters)
+{
+ QList<QSize> list;
+
+ if (parameters.isValid()) {
+ QJniObject sizeList = parameters.callObjectMethod("getSupportedPreviewSizes",
+ "()Ljava/util/List;");
+ int count = sizeList.callMethod<jint>("size");
+ for (int i = 0; i < count; ++i) {
+ QJniObject size = sizeList.callObjectMethod("get",
+ "(I)Ljava/lang/Object;",
+ i);
+ list.append(QSize(size.getField<jint>("width"), size.getField<jint>("height")));
+ }
+
+ std::sort(list.begin(), list.end(), qt_sizeLessThan);
+ }
+
+ return list;
+}
+
+QList<AndroidCamera::FpsRange> AndroidCameraPrivate::getSupportedPreviewFpsRange()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ QJniEnvironment env;
+
+ QList<AndroidCamera::FpsRange> rangeList;
+
+ if (m_parameters.isValid()) {
+ QJniObject rangeListNative = m_parameters.callObjectMethod("getSupportedPreviewFpsRange",
+ "()Ljava/util/List;");
+ int count = rangeListNative.callMethod<jint>("size");
+
+ rangeList.reserve(count);
+
+ for (int i = 0; i < count; ++i) {
+ QJniObject range = rangeListNative.callObjectMethod("get",
+ "(I)Ljava/lang/Object;",
+ i);
+
+ jintArray jRange = static_cast<jintArray>(range.object());
+ jint* rangeArray = env->GetIntArrayElements(jRange, 0);
+
+ AndroidCamera::FpsRange fpsRange;
+
+ fpsRange.min = rangeArray[0];
+ fpsRange.max = rangeArray[1];
+
+ env->ReleaseIntArrayElements(jRange, rangeArray, 0);
+
+ rangeList << fpsRange;
+ }
+ }
+
+ return rangeList;
+}
+
+AndroidCamera::FpsRange AndroidCameraPrivate::getPreviewFpsRange()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+ return getPreviewFpsRange(m_parameters);
+}
+
+AndroidCamera::FpsRange AndroidCameraPrivate::getPreviewFpsRange(QJniObject &parameters)
+{
+ QJniEnvironment env;
+
+ AndroidCamera::FpsRange range;
+
+ if (!parameters.isValid())
+ return range;
+
+ jintArray jRangeArray = env->NewIntArray(2);
+ parameters.callMethod<void>("getPreviewFpsRange", "([I)V", jRangeArray);
+
+ jint* jRangeElements = env->GetIntArrayElements(jRangeArray, 0);
+
+ // Android Camera API returns values scaled by 1000, so divide here to report
+ // normal values for Qt
+ range.min = jRangeElements[0] / 1000;
+ range.max = jRangeElements[1] / 1000;
+
+ env->ReleaseIntArrayElements(jRangeArray, jRangeElements, 0);
+ env->DeleteLocalRef(jRangeArray);
+
+ return range;
+}
+
+void AndroidCameraPrivate::setPreviewFpsRange(int min, int max)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ // Android Camera API returns values scaled by 1000, so multiply here to
+ // give Android API the scale is expects
+ m_parameters.callMethod<void>("setPreviewFpsRange", "(II)V", min * 1000, max * 1000);
+}
+
+AndroidCamera::ImageFormat AndroidCameraPrivate::getPreviewFormat()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return AndroidCamera::UnknownImageFormat;
+
+ return AndroidCamera::ImageFormat(m_parameters.callMethod<jint>("getPreviewFormat"));
+}
+
+void AndroidCameraPrivate::setPreviewFormat(AndroidCamera::ImageFormat fmt)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setPreviewFormat", "(I)V", jint(fmt));
+ applyParameters();
+}
+
+QList<AndroidCamera::ImageFormat> AndroidCameraPrivate::getSupportedPreviewFormats()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+ return getSupportedPreviewFormats(m_parameters);
+}
+
+QList<AndroidCamera::ImageFormat> AndroidCameraPrivate::getSupportedPreviewFormats(QJniObject &parameters)
+{
+ QList<AndroidCamera::ImageFormat> list;
+
+ if (parameters.isValid()) {
+ QJniObject formatList = parameters.callObjectMethod("getSupportedPreviewFormats",
+ "()Ljava/util/List;");
+ int count = formatList.callMethod<jint>("size");
+ for (int i = 0; i < count; ++i) {
+ QJniObject format = formatList.callObjectMethod("get",
+ "(I)Ljava/lang/Object;",
+ i);
+ list.append(AndroidCamera::ImageFormat(format.callMethod<jint>("intValue")));
+ }
+ }
+
+ return list;
+}
+
+QSize AndroidCameraPrivate::getPreviewSize()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return QSize();
+
+ QJniObject size = m_parameters.callObjectMethod("getPreviewSize",
+ "()Landroid/hardware/Camera$Size;");
+
+ if (!size.isValid())
+ return QSize();
+
+ return QSize(size.getField<jint>("width"), size.getField<jint>("height"));
+}
+
+void AndroidCameraPrivate::updatePreviewSize()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (m_previewSize.isValid()) {
+ m_parameters.callMethod<void>("setPreviewSize", "(II)V", m_previewSize.width(), m_previewSize.height());
+ applyParameters();
+ }
+
+ emit previewSizeChanged();
+}
+
+bool AndroidCameraPrivate::setPreviewTexture(void *surfaceTexture)
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_camera.objectClass(), "setPreviewTexture",
+ "(Landroid/graphics/SurfaceTexture;)V");
+ env->CallVoidMethod(m_camera.object(), methodId, static_cast<jobject>(surfaceTexture));
+
+ if (env.checkAndClearExceptions())
+ return false;
+ return true;
+}
+
+bool AndroidCameraPrivate::setPreviewDisplay(void *surfaceHolder)
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_camera.objectClass(), "setPreviewDisplay",
+ "(Landroid/view/SurfaceHolder;)V");
+ env->CallVoidMethod(m_camera.object(), methodId, static_cast<jobject>(surfaceHolder));
+
+ if (env.checkAndClearExceptions())
+ return false;
+ return true;
+}
+
+void AndroidCameraPrivate::setDisplayOrientation(int degrees)
+{
+ m_camera.callMethod<void>("setDisplayOrientation", "(I)V", degrees);
+ m_cameraListener.callMethod<void>("setPhotoRotation", "(I)V", degrees);
+}
+
+bool AndroidCameraPrivate::isZoomSupported()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return false;
+
+ return m_parameters.callMethod<jboolean>("isZoomSupported");
+}
+
+int AndroidCameraPrivate::getMaxZoom()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return 0;
+
+ return m_parameters.callMethod<jint>("getMaxZoom");
+}
+
+QList<int> AndroidCameraPrivate::getZoomRatios()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ QList<int> ratios;
+
+ if (m_parameters.isValid()) {
+ QJniObject ratioList = m_parameters.callObjectMethod("getZoomRatios",
+ "()Ljava/util/List;");
+ int count = ratioList.callMethod<jint>("size");
+ for (int i = 0; i < count; ++i) {
+ QJniObject zoomRatio = ratioList.callObjectMethod("get",
+ "(I)Ljava/lang/Object;",
+ i);
+
+ ratios.append(zoomRatio.callMethod<jint>("intValue"));
+ }
+ }
+
+ return ratios;
+}
+
+int AndroidCameraPrivate::getZoom()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return 0;
+
+ return m_parameters.callMethod<jint>("getZoom");
+}
+
+void AndroidCameraPrivate::setZoom(int value)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setZoom", "(I)V", value);
+ applyParameters();
+}
+
+QString AndroidCameraPrivate::getFlashMode()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ QString value;
+
+ if (m_parameters.isValid()) {
+ QJniObject flashMode = m_parameters.callObjectMethod("getFlashMode",
+ "()Ljava/lang/String;");
+ if (flashMode.isValid())
+ value = flashMode.toString();
+ }
+
+ return value;
+}
+
+void AndroidCameraPrivate::setFlashMode(const QString &value)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setFlashMode",
+ "(Ljava/lang/String;)V",
+ QJniObject::fromString(value).object());
+ applyParameters();
+}
+
+QString AndroidCameraPrivate::getFocusMode()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ QString value;
+
+ if (m_parameters.isValid()) {
+ QJniObject focusMode = m_parameters.callObjectMethod("getFocusMode",
+ "()Ljava/lang/String;");
+ if (focusMode.isValid())
+ value = focusMode.toString();
+ }
+
+ return value;
+}
+
+void AndroidCameraPrivate::setFocusMode(const QString &value)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setFocusMode",
+ "(Ljava/lang/String;)V",
+ QJniObject::fromString(value).object());
+ applyParameters();
+}
+
+int AndroidCameraPrivate::getMaxNumFocusAreas()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return 0;
+
+ return m_parameters.callMethod<jint>("getMaxNumFocusAreas");
+}
+
+QList<QRect> AndroidCameraPrivate::getFocusAreas()
+{
+ QList<QRect> areas;
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (m_parameters.isValid()) {
+ QJniObject list = m_parameters.callObjectMethod("getFocusAreas",
+ "()Ljava/util/List;");
+
+ if (list.isValid()) {
+ int count = list.callMethod<jint>("size");
+ for (int i = 0; i < count; ++i) {
+ QJniObject area = list.callObjectMethod("get",
+ "(I)Ljava/lang/Object;",
+ i);
+
+ areas.append(areaToRect(area.object()));
+ }
+ }
+ }
+
+ return areas;
+}
+
+void AndroidCameraPrivate::setFocusAreas(const QList<QRect> &areas)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid() || areas.isEmpty())
+ return;
+
+ QJniObject list;
+
+ if (!areas.isEmpty()) {
+ QJniEnvironment env;
+ QJniObject arrayList("java/util/ArrayList", "(I)V", areas.size());
+ for (int i = 0; i < areas.size(); ++i) {
+ arrayList.callMethod<jboolean>("add",
+ "(Ljava/lang/Object;)Z",
+ rectToArea(areas.at(i)).object());
+ }
+ list = arrayList;
+ }
+
+ m_parameters.callMethod<void>("setFocusAreas", "(Ljava/util/List;)V", list.object());
+
+ applyParameters();
+}
+
+void AndroidCameraPrivate::autoFocus()
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_camera.objectClass(), "autoFocus",
+ "(Landroid/hardware/Camera$AutoFocusCallback;)V");
+ env->CallVoidMethod(m_camera.object(), methodId, m_cameraListener.object());
+
+ if (!env.checkAndClearExceptions())
+ emit autoFocusStarted();
+}
+
+void AndroidCameraPrivate::cancelAutoFocus()
+{
+ QJniEnvironment env;
+ m_camera.callMethod<void>("cancelAutoFocus");
+}
+
+bool AndroidCameraPrivate::isAutoExposureLockSupported()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return false;
+
+ return m_parameters.callMethod<jboolean>("isAutoExposureLockSupported");
+}
+
+bool AndroidCameraPrivate::getAutoExposureLock()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return false;
+
+ return m_parameters.callMethod<jboolean>("getAutoExposureLock");
+}
+
+void AndroidCameraPrivate::setAutoExposureLock(bool toggle)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setAutoExposureLock", "(Z)V", toggle);
+ applyParameters();
+}
+
+bool AndroidCameraPrivate::isAutoWhiteBalanceLockSupported()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return false;
+
+ return m_parameters.callMethod<jboolean>("isAutoWhiteBalanceLockSupported");
+}
+
+bool AndroidCameraPrivate::getAutoWhiteBalanceLock()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return false;
+
+ return m_parameters.callMethod<jboolean>("getAutoWhiteBalanceLock");
+}
+
+void AndroidCameraPrivate::setAutoWhiteBalanceLock(bool toggle)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setAutoWhiteBalanceLock", "(Z)V", toggle);
+ applyParameters();
+}
+
+int AndroidCameraPrivate::getExposureCompensation()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return 0;
+
+ return m_parameters.callMethod<jint>("getExposureCompensation");
+}
+
+void AndroidCameraPrivate::setExposureCompensation(int value)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setExposureCompensation", "(I)V", value);
+ applyParameters();
+}
+
+float AndroidCameraPrivate::getExposureCompensationStep()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return 0;
+
+ return m_parameters.callMethod<jfloat>("getExposureCompensationStep");
+}
+
+int AndroidCameraPrivate::getMinExposureCompensation()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return 0;
+
+ return m_parameters.callMethod<jint>("getMinExposureCompensation");
+}
+
+int AndroidCameraPrivate::getMaxExposureCompensation()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return 0;
+
+ return m_parameters.callMethod<jint>("getMaxExposureCompensation");
+}
+
+QString AndroidCameraPrivate::getSceneMode()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ QString value;
+
+ if (m_parameters.isValid()) {
+ QJniObject sceneMode = m_parameters.callObjectMethod("getSceneMode",
+ "()Ljava/lang/String;");
+ if (sceneMode.isValid())
+ value = sceneMode.toString();
+ }
+
+ return value;
+}
+
+void AndroidCameraPrivate::setSceneMode(const QString &value)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setSceneMode",
+ "(Ljava/lang/String;)V",
+ QJniObject::fromString(value).object());
+ applyParameters();
+}
+
+QString AndroidCameraPrivate::getWhiteBalance()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ QString value;
+
+ if (m_parameters.isValid()) {
+ QJniObject wb = m_parameters.callObjectMethod("getWhiteBalance",
+ "()Ljava/lang/String;");
+ if (wb.isValid())
+ value = wb.toString();
+ }
+
+ return value;
+}
+
+void AndroidCameraPrivate::setWhiteBalance(const QString &value)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setWhiteBalance",
+ "(Ljava/lang/String;)V",
+ QJniObject::fromString(value).object());
+ applyParameters();
+
+ emit whiteBalanceChanged();
+}
+
+void AndroidCameraPrivate::updateRotation()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ m_parameters.callMethod<void>("setRotation", "(I)V", m_rotation);
+ applyParameters();
+}
+
+QList<QSize> AndroidCameraPrivate::getSupportedPictureSizes()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ QList<QSize> list;
+
+ if (m_parameters.isValid()) {
+ QJniObject sizeList = m_parameters.callObjectMethod("getSupportedPictureSizes",
+ "()Ljava/util/List;");
+ int count = sizeList.callMethod<jint>("size");
+ for (int i = 0; i < count; ++i) {
+ QJniObject size = sizeList.callObjectMethod("get",
+ "(I)Ljava/lang/Object;",
+ i);
+ list.append(QSize(size.getField<jint>("width"), size.getField<jint>("height")));
+ }
+
+ std::sort(list.begin(), list.end(), qt_sizeLessThan);
+ }
+
+ return list;
+}
+
+QList<QSize> AndroidCameraPrivate::getSupportedVideoSizes()
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+ QList<QSize> list;
+
+ if (m_parameters.isValid()) {
+ QJniObject sizeList = m_parameters.callObjectMethod("getSupportedVideoSizes",
+ "()Ljava/util/List;");
+ if (!sizeList.isValid())
+ return list;
+
+ int count = sizeList.callMethod<jint>("size");
+ for (int i = 0; i < count; ++i) {
+ const QJniObject size = sizeList.callObjectMethod("get", "(I)Ljava/lang/Object;", i);
+ if (size.isValid())
+ list.append(QSize(size.getField<jint>("width"), size.getField<jint>("height")));
+ }
+ std::sort(list.begin(), list.end(), qt_sizeLessThan);
+ }
+
+ return list;
+}
+
+void AndroidCameraPrivate::setPictureSize(const QSize &size)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setPictureSize", "(II)V", size.width(), size.height());
+ applyParameters();
+}
+
+void AndroidCameraPrivate::setJpegQuality(int quality)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setJpegQuality", "(I)V", quality);
+ applyParameters();
+}
+
+void AndroidCameraPrivate::startPreview()
+{
+ setupPreviewFrameCallback();
+
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_camera.objectClass(), "startPreview", "()V");
+ env->CallVoidMethod(m_camera.object(), methodId);
+
+ if (env.checkAndClearExceptions())
+ emit previewFailedToStart();
+ else
+ emit previewStarted();
+}
+
+void AndroidCameraPrivate::stopPreview()
+{
+ // cancel any pending new frame notification
+ m_cameraListener.callMethod<void>("notifyWhenFrameAvailable", "(Z)V", false);
+ m_camera.callMethod<void>("stopPreview");
+ emit previewStopped();
+}
+
+void AndroidCameraPrivate::takePicture()
+{
+ // We must clear the preview callback before calling takePicture(), otherwise the call will
+ // block and the camera server will be frozen until the next device restart...
+ // That problem only happens on some devices and on the emulator
+ m_cameraListener.callMethod<void>("clearPreviewCallback", "(Landroid/hardware/Camera;)V", m_camera.object());
+
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_camera.objectClass(), "takePicture",
+ "(Landroid/hardware/Camera$ShutterCallback;"
+ "Landroid/hardware/Camera$PictureCallback;"
+ "Landroid/hardware/Camera$PictureCallback;)V");
+ env->CallVoidMethod(m_camera.object(), methodId, m_cameraListener.object(),
+ jobject(0), m_cameraListener.object());
+
+ if (env.checkAndClearExceptions())
+ emit takePictureFailed();
+}
+
+void AndroidCameraPrivate::setupPreviewFrameCallback()
+{
+ m_cameraListener.callMethod<void>("setupPreviewCallback", "(Landroid/hardware/Camera;)V", m_camera.object());
+}
+
+void AndroidCameraPrivate::notifyNewFrames(bool notify)
+{
+ m_cameraListener.callMethod<void>("notifyNewFrames", "(Z)V", notify);
+}
+
+void AndroidCameraPrivate::fetchLastPreviewFrame()
+{
+ QJniEnvironment env;
+ QJniObject data = m_cameraListener.callObjectMethod("lastPreviewBuffer", "()[B");
+
+ if (!data.isValid()) {
+ // If there's no buffer received yet, retry when the next one arrives
+ m_cameraListener.callMethod<void>("notifyWhenFrameAvailable", "(Z)V", true);
+ return;
+ }
+
+ const int arrayLength = env->GetArrayLength(static_cast<jbyteArray>(data.object()));
+ if (arrayLength == 0)
+ return;
+
+ QByteArray bytes(arrayLength, Qt::Uninitialized);
+ env->GetByteArrayRegion(static_cast<jbyteArray>(data.object()),
+ 0,
+ arrayLength,
+ reinterpret_cast<jbyte *>(bytes.data()));
+
+ const int width = m_cameraListener.callMethod<jint>("previewWidth");
+ const int height = m_cameraListener.callMethod<jint>("previewHeight");
+ const int format = m_cameraListener.callMethod<jint>("previewFormat");
+ const int bpl = m_cameraListener.callMethod<jint>("previewBytesPerLine");
+
+ QVideoFrameFormat frameFormat(
+ QSize(width, height),
+ qt_pixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat(format)));
+
+ QVideoFrame frame = QVideoFramePrivate::createFrame(
+ std::make_unique<QMemoryVideoBuffer>(std::move(bytes), bpl), std::move(frameFormat));
+
+ emit lastPreviewFrameFetched(frame);
+}
+
+void AndroidCameraPrivate::applyParameters()
+{
+ QJniEnvironment env;
+ m_camera.callMethod<void>("setParameters",
+ "(Landroid/hardware/Camera$Parameters;)V",
+ m_parameters.object());
+}
+
+QStringList AndroidCameraPrivate::callParametersStringListMethod(const QByteArray &methodName)
+{
+ const std::lock_guard<QRecursiveMutex> locker(m_parametersMutex);
+
+ QStringList stringList;
+
+ if (m_parameters.isValid()) {
+ QJniObject list = m_parameters.callObjectMethod(methodName.constData(),
+ "()Ljava/util/List;");
+
+ if (list.isValid()) {
+ int count = list.callMethod<jint>("size");
+ for (int i = 0; i < count; ++i) {
+ QJniObject string = list.callObjectMethod("get",
+ "(I)Ljava/lang/Object;",
+ i);
+ stringList.append(string.toString());
+ }
+ }
+ }
+
+ return stringList;
+}
+
+bool AndroidCamera::registerNativeMethods()
+{
+ static const JNINativeMethod methods[] = {
+ {"notifyAutoFocusComplete", "(IZ)V", (void *)notifyAutoFocusComplete},
+ {"notifyPictureExposed", "(I)V", (void *)notifyPictureExposed},
+ {"notifyPictureCaptured", "(I[B)V", (void *)notifyPictureCaptured},
+ {"notifyNewPreviewFrame", "(I[BIIII)V", (void *)notifyNewPreviewFrame},
+ {"notifyFrameAvailable", "(I)V", (void *)notifyFrameAvailable}
+ };
+
+ const int size = std::size(methods);
+ return QJniEnvironment().registerNativeMethods(QtCameraListenerClassName, methods, size);
+}
+
+QT_END_NAMESPACE
+
+#include "androidcamera.moc"
+#include "moc_androidcamera_p.cpp"
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidcamera_p.h b/src/plugins/multimedia/android/wrappers/jni/androidcamera_p.h
new file mode 100644
index 000000000..8375cf3b1
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidcamera_p.h
@@ -0,0 +1,208 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// Copyright (C) 2016 Ruslan Baratov
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef ANDROIDCAMERA_H
+#define ANDROIDCAMERA_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qobject.h>
+#include <qsize.h>
+#include <qrect.h>
+#include <QtMultimedia/qcamera.h>
+#include <QtCore/qjniobject.h>
+#include <private/qcameradevice_p.h>
+
+QT_BEGIN_NAMESPACE
+
+class QThread;
+
+class AndroidCameraPrivate;
+class AndroidSurfaceTexture;
+class AndroidSurfaceHolder;
+
+class AndroidCamera : public QObject
+{
+ Q_OBJECT
+public:
+ enum CameraFacing {
+ CameraFacingBack = 0,
+ CameraFacingFront = 1
+ };
+ Q_ENUM(CameraFacing)
+
+ enum ImageFormat { // same values as in android.graphics.ImageFormat Java class
+ UnknownImageFormat = 0,
+ RGB565 = 4,
+ NV16 = 16,
+ NV21 = 17,
+ YUY2 = 20,
+ JPEG = 256,
+ YV12 = 842094169
+ };
+ Q_ENUM(ImageFormat)
+
+ // http://developer.android.com/reference/android/hardware/Camera.Parameters.html#getSupportedPreviewFpsRange%28%29
+ // "The values are multiplied by 1000 and represented in integers"
+ struct FpsRange {
+ int min;
+ int max;
+
+ FpsRange(): min(0), max(0) {}
+
+ qreal getMinReal() const { return min / 1000.0; }
+ qreal getMaxReal() const { return max / 1000.0; }
+
+ static FpsRange makeFromQReal(qreal min, qreal max)
+ {
+ FpsRange range;
+ range.min = static_cast<int>(min * 1000.0);
+ range.max = static_cast<int>(max * 1000.0);
+ return range;
+ }
+ };
+
+ ~AndroidCamera();
+
+ static AndroidCamera *open(int cameraId);
+
+ int cameraId() const;
+
+ bool lock();
+ bool unlock();
+ bool reconnect();
+ void release();
+
+ CameraFacing getFacing();
+ int getNativeOrientation();
+
+ QSize getPreferredPreviewSizeForVideo();
+ QList<QSize> getSupportedPreviewSizes();
+
+ QList<FpsRange> getSupportedPreviewFpsRange();
+
+ FpsRange getPreviewFpsRange();
+ void setPreviewFpsRange(FpsRange);
+
+ ImageFormat getPreviewFormat();
+ void setPreviewFormat(ImageFormat fmt);
+ QList<ImageFormat> getSupportedPreviewFormats();
+
+ QSize previewSize() const;
+ QSize actualPreviewSize();
+ void setPreviewSize(const QSize &size);
+ bool setPreviewTexture(AndroidSurfaceTexture *surfaceTexture);
+ bool setPreviewDisplay(AndroidSurfaceHolder *surfaceHolder);
+ void setDisplayOrientation(int degrees);
+
+ bool isZoomSupported();
+ int getMaxZoom();
+ QList<int> getZoomRatios();
+ int getZoom();
+ void setZoom(int value);
+
+ QStringList getSupportedFlashModes();
+ QString getFlashMode();
+ void setFlashMode(const QString &value);
+
+ QStringList getSupportedFocusModes();
+ QString getFocusMode();
+ void setFocusMode(const QString &value);
+
+ int getMaxNumFocusAreas();
+ QList<QRect> getFocusAreas();
+ void setFocusAreas(const QList<QRect> &areas);
+
+ void autoFocus();
+ void cancelAutoFocus();
+
+ bool isAutoExposureLockSupported();
+ bool getAutoExposureLock();
+ void setAutoExposureLock(bool toggle);
+
+ bool isAutoWhiteBalanceLockSupported();
+ bool getAutoWhiteBalanceLock();
+ void setAutoWhiteBalanceLock(bool toggle);
+
+ int getExposureCompensation();
+ void setExposureCompensation(int value);
+ float getExposureCompensationStep();
+ int getMinExposureCompensation();
+ int getMaxExposureCompensation();
+
+ QStringList getSupportedSceneModes();
+ QString getSceneMode();
+ void setSceneMode(const QString &value);
+
+ QStringList getSupportedWhiteBalance();
+ QString getWhiteBalance();
+ void setWhiteBalance(const QString &value);
+
+ void setRotation(int rotation);
+ int getRotation() const;
+
+ QList<QCameraFormat> getSupportedFormats();
+ QList<QSize> getSupportedPictureSizes();
+ QList<QSize> getSupportedVideoSizes();
+ void setPictureSize(const QSize &size);
+ void setJpegQuality(int quality);
+
+ void startPreview();
+ void stopPreview();
+ void stopPreviewSynchronous();
+
+ void takePicture();
+
+ void setupPreviewFrameCallback();
+ void notifyNewFrames(bool notify);
+ void fetchLastPreviewFrame();
+ QJniObject getCameraObject();
+ QJniObject getParametersObject();
+
+ static int getNumberOfCameras();
+ static void getCameraInfo(int id, QCameraDevicePrivate *info);
+ static QVideoFrameFormat::PixelFormat QtPixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat);
+ static AndroidCamera::ImageFormat AndroidImageFormatFromQtPixelFormat(QVideoFrameFormat::PixelFormat);
+ static bool requestCameraPermission();
+
+ static bool registerNativeMethods();
+Q_SIGNALS:
+ void previewSizeChanged();
+ void previewStarted();
+ void previewFailedToStart();
+ void previewStopped();
+
+ void autoFocusStarted();
+ void autoFocusComplete(bool success);
+
+ void whiteBalanceChanged();
+
+ void takePictureFailed();
+ void pictureExposed();
+ void pictureCaptured(const QByteArray &frame, QVideoFrameFormat::PixelFormat format, QSize size, int bytesPerLine);
+ void lastPreviewFrameFetched(const QVideoFrame &frame);
+ void newPreviewFrame(const QVideoFrame &frame);
+
+private:
+ AndroidCamera(AndroidCameraPrivate *d, QThread *worker);
+
+ Q_DECLARE_PRIVATE(AndroidCamera)
+ AndroidCameraPrivate *d_ptr;
+ QScopedPointer<QThread> m_worker;
+};
+
+QT_END_NAMESPACE
+
+Q_DECLARE_METATYPE(AndroidCamera::ImageFormat)
+
+#endif // ANDROIDCAMERA_H
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmediametadataretriever.cpp b/src/plugins/multimedia/android/wrappers/jni/androidmediametadataretriever.cpp
new file mode 100644
index 000000000..25e1efdb0
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmediametadataretriever.cpp
@@ -0,0 +1,136 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "androidmediametadataretriever_p.h"
+
+#include <QtCore/QUrl>
+#include <qdebug.h>
+#include <QtCore/qcoreapplication.h>
+
+QT_BEGIN_NAMESPACE
+
+AndroidMediaMetadataRetriever::AndroidMediaMetadataRetriever()
+{
+ m_metadataRetriever = QJniObject("android/media/MediaMetadataRetriever");
+}
+
+AndroidMediaMetadataRetriever::~AndroidMediaMetadataRetriever()
+{
+ release();
+}
+
+QString AndroidMediaMetadataRetriever::extractMetadata(MetadataKey key)
+{
+ QString value;
+
+ QJniObject metadata = m_metadataRetriever.callObjectMethod("extractMetadata",
+ "(I)Ljava/lang/String;",
+ jint(key));
+ if (metadata.isValid())
+ value = metadata.toString();
+
+ return value;
+}
+
+void AndroidMediaMetadataRetriever::release()
+{
+ if (!m_metadataRetriever.isValid())
+ return;
+
+ m_metadataRetriever.callMethod<void>("release");
+}
+
+bool AndroidMediaMetadataRetriever::setDataSource(const QUrl &url)
+{
+ if (!m_metadataRetriever.isValid())
+ return false;
+
+ QJniEnvironment env;
+ if (url.isLocalFile()) { // also includes qrc files (copied to a temp file by QMediaPlayer)
+ QJniObject string = QJniObject::fromString(url.path());
+ QJniObject fileInputStream("java/io/FileInputStream",
+ "(Ljava/lang/String;)V",
+ string.object());
+
+ if (!fileInputStream.isValid())
+ return false;
+
+ QJniObject fd = fileInputStream.callObjectMethod("getFD",
+ "()Ljava/io/FileDescriptor;");
+ if (!fd.isValid()) {
+ fileInputStream.callMethod<void>("close");
+ return false;
+ }
+
+ auto methodId = env->GetMethodID(m_metadataRetriever.objectClass(), "setDataSource",
+ "(Ljava/io/FileDescriptor;)V");
+ env->CallVoidMethod(m_metadataRetriever.object(), methodId, fd.object());
+ bool ok = !env.checkAndClearExceptions();
+ fileInputStream.callMethod<void>("close");
+ if (!ok)
+ return false;
+ } else if (url.scheme() == QLatin1String("assets")) {
+ QJniObject string = QJniObject::fromString(url.path().mid(1)); // remove first '/'
+ QJniObject activity(QNativeInterface::QAndroidApplication::context());
+ QJniObject assetManager = activity.callObjectMethod("getAssets",
+ "()Landroid/content/res/AssetManager;");
+ QJniObject assetFd = assetManager.callObjectMethod("openFd",
+ "(Ljava/lang/String;)Landroid/content/res/AssetFileDescriptor;",
+ string.object());
+ if (!assetFd.isValid())
+ return false;
+
+ QJniObject fd = assetFd.callObjectMethod("getFileDescriptor",
+ "()Ljava/io/FileDescriptor;");
+ if (!fd.isValid()) {
+ assetFd.callMethod<void>("close");
+ return false;
+ }
+
+ auto methodId = env->GetMethodID(m_metadataRetriever.objectClass(), "setDataSource",
+ "(Ljava/io/FileDescriptor;JJ)V");
+ env->CallVoidMethod(m_metadataRetriever.object(), methodId,
+ fd.object(),
+ assetFd.callMethod<jlong>("getStartOffset"),
+ assetFd.callMethod<jlong>("getLength"));
+ bool ok = !env.checkAndClearExceptions();
+ assetFd.callMethod<void>("close");
+
+ if (!ok)
+ return false;
+ } else if (url.scheme() != QLatin1String("content")) {
+ // On API levels >= 14, only setDataSource(String, Map<String, String>) accepts remote media
+ QJniObject string = QJniObject::fromString(url.toString(QUrl::FullyEncoded));
+ QJniObject hash("java/util/HashMap");
+
+ auto methodId = env->GetMethodID(m_metadataRetriever.objectClass(), "setDataSource",
+ "(Ljava/lang/String;Ljava/util/Map;)V");
+ env->CallVoidMethod(m_metadataRetriever.object(), methodId,
+ string.object(), hash.object());
+ if (env.checkAndClearExceptions())
+ return false;
+ } else {
+ // While on API levels < 14, only setDataSource(Context, Uri) is available and works for
+ // remote media...
+ QJniObject string = QJniObject::fromString(url.toString(QUrl::FullyEncoded));
+ QJniObject uri = m_metadataRetriever.callStaticObjectMethod(
+ "android/net/Uri",
+ "parse",
+ "(Ljava/lang/String;)Landroid/net/Uri;",
+ string.object());
+ if (!uri.isValid())
+ return false;
+
+ auto methodId = env->GetMethodID(m_metadataRetriever.objectClass(), "setDataSource",
+ "(Landroid/content/Context;Landroid/net/Uri;)V");
+ env->CallVoidMethod(m_metadataRetriever.object(), methodId,
+ QNativeInterface::QAndroidApplication::context().object(),
+ uri.object());
+ if (env.checkAndClearExceptions())
+ return false;
+ }
+
+ return true;
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmediametadataretriever_p.h b/src/plugins/multimedia/android/wrappers/jni/androidmediametadataretriever_p.h
new file mode 100644
index 000000000..68e346336
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmediametadataretriever_p.h
@@ -0,0 +1,66 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef ANDROIDMEDIAMETADATARETRIEVER_H
+#define ANDROIDMEDIAMETADATARETRIEVER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/private/qglobal_p.h>
+#include <QtCore/qurl.h>
+#include <QtCore/qjniobject.h>
+
+QT_BEGIN_NAMESPACE
+
+class AndroidMediaMetadataRetriever
+{
+public:
+ enum MetadataKey {
+ Album = 1,
+ AlbumArtist = 13,
+ Artist = 2,
+ Author = 3,
+ Bitrate = 20,
+ CDTrackNumber = 0,
+ Compilation = 15,
+ Composer = 4,
+ Date = 5,
+ DiscNumber = 14,
+ Duration = 9,
+ Genre = 6,
+ HasAudio = 16,
+ HasVideo = 17,
+ Location = 23,
+ MimeType = 12,
+ NumTracks = 10,
+ Title = 7,
+ VideoHeight = 19,
+ VideoWidth = 18,
+ VideoRotation = 24,
+ Writer = 11,
+ Year = 8
+ };
+
+ AndroidMediaMetadataRetriever();
+ ~AndroidMediaMetadataRetriever();
+
+ QString extractMetadata(MetadataKey key);
+ bool setDataSource(const QUrl &url);
+
+private:
+ void release();
+ QJniObject m_metadataRetriever;
+};
+
+QT_END_NAMESPACE
+
+#endif // ANDROIDMEDIAMETADATARETRIEVER_H
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer.cpp b/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer.cpp
new file mode 100644
index 000000000..91f489f9e
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer.cpp
@@ -0,0 +1,535 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "androidmediaplayer_p.h"
+#include "androidsurfacetexture_p.h"
+
+#include <QList>
+#include <QReadWriteLock>
+#include <QString>
+#include <QtCore/qcoreapplication.h>
+#include <qloggingcategory.h>
+
+static const char QtAndroidMediaPlayerClassName[] = "org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer";
+typedef QList<AndroidMediaPlayer *> MediaPlayerList;
+Q_GLOBAL_STATIC(MediaPlayerList, mediaPlayers)
+Q_GLOBAL_STATIC(QReadWriteLock, rwLock)
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(lcAudio, "qt.multimedia.audio")
+
+AndroidMediaPlayer::AndroidMediaPlayer()
+ : QObject()
+{
+ QWriteLocker locker(rwLock);
+ auto context = QNativeInterface::QAndroidApplication::context();
+ const jlong id = reinterpret_cast<jlong>(this);
+ mMediaPlayer = QJniObject(QtAndroidMediaPlayerClassName,
+ "(Landroid/content/Context;J)V",
+ context.object(),
+ id);
+ mediaPlayers->append(this);
+}
+
+AndroidMediaPlayer::~AndroidMediaPlayer()
+{
+ QWriteLocker locker(rwLock);
+ const int i = mediaPlayers->indexOf(this);
+ Q_ASSERT(i != -1);
+ mediaPlayers->remove(i);
+}
+
+void AndroidMediaPlayer::release()
+{
+ mMediaPlayer.callMethod<void>("release");
+}
+
+void AndroidMediaPlayer::reset()
+{
+ mMediaPlayer.callMethod<void>("reset");
+}
+
+int AndroidMediaPlayer::getCurrentPosition()
+{
+ return mMediaPlayer.callMethod<jint>("getCurrentPosition");
+}
+
+int AndroidMediaPlayer::getDuration()
+{
+ return mMediaPlayer.callMethod<jint>("getDuration");
+}
+
+bool AndroidMediaPlayer::isPlaying()
+{
+ return mMediaPlayer.callMethod<jboolean>("isPlaying");
+}
+
+int AndroidMediaPlayer::volume()
+{
+ return mMediaPlayer.callMethod<jint>("getVolume");
+}
+
+bool AndroidMediaPlayer::isMuted()
+{
+ return mMediaPlayer.callMethod<jboolean>("isMuted");
+}
+
+qreal AndroidMediaPlayer::playbackRate()
+{
+ qreal rate(1.0);
+
+ if (QNativeInterface::QAndroidApplication::sdkVersion() < 23)
+ return rate;
+
+ QJniObject player = mMediaPlayer.callObjectMethod("getMediaPlayerHandle",
+ "()Landroid/media/MediaPlayer;");
+ if (player.isValid()) {
+ QJniObject playbackParams = player.callObjectMethod("getPlaybackParams",
+ "()Landroid/media/PlaybackParams;");
+ if (playbackParams.isValid()) {
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(playbackParams.objectClass(), "getSpeed", "()F");
+ const qreal speed = env->CallFloatMethod(playbackParams.object(), methodId);
+ if (!env.checkAndClearExceptions())
+ rate = speed;
+ }
+ }
+
+ return rate;
+}
+
+jobject AndroidMediaPlayer::display()
+{
+ return mMediaPlayer.callObjectMethod("display", "()Landroid/view/SurfaceHolder;").object();
+}
+
+AndroidMediaPlayer::TrackInfo convertTrackInfo(int streamNumber, QJniObject androidTrackInfo)
+{
+ const QLatin1String unknownMimeType("application/octet-stream");
+ const QLatin1String undefinedLanguage("und");
+
+ if (!androidTrackInfo.isValid())
+ return { streamNumber, AndroidMediaPlayer::TrackType::Unknown, undefinedLanguage,
+ unknownMimeType };
+
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(androidTrackInfo.objectClass(), "getType", "()I");
+ const jint type = env->CallIntMethod(androidTrackInfo.object(), methodId);
+ if (env.checkAndClearExceptions())
+ return { streamNumber, AndroidMediaPlayer::TrackType::Unknown, undefinedLanguage,
+ unknownMimeType };
+
+ if (type < 0 || type > 5) {
+ return { streamNumber, AndroidMediaPlayer::TrackType::Unknown, undefinedLanguage,
+ unknownMimeType };
+ }
+
+ AndroidMediaPlayer::TrackType trackType = static_cast<AndroidMediaPlayer::TrackType>(type);
+
+ auto languageObject = androidTrackInfo.callObjectMethod("getLanguage", "()Ljava/lang/String;");
+ QString language = languageObject.isValid() ? languageObject.toString() : undefinedLanguage;
+
+ auto mimeTypeObject = androidTrackInfo.callObjectMethod("getMime", "()Ljava/lang/String;");
+ QString mimeType = mimeTypeObject.isValid() ? mimeTypeObject.toString() : unknownMimeType;
+
+ return { streamNumber, trackType, language, mimeType };
+}
+
+QList<AndroidMediaPlayer::TrackInfo> AndroidMediaPlayer::tracksInfo()
+{
+ auto androidTracksInfoObject = mMediaPlayer.callObjectMethod(
+ "getAllTrackInfo",
+ "()[Lorg/qtproject/qt/android/multimedia/QtAndroidMediaPlayer$TrackInfo;");
+
+ if (!androidTracksInfoObject.isValid())
+ return QList<AndroidMediaPlayer::TrackInfo>();
+
+ auto androidTracksInfo = androidTracksInfoObject.object<jobjectArray>();
+ if (!androidTracksInfo)
+ return QList<AndroidMediaPlayer::TrackInfo>();
+
+ QJniEnvironment environment;
+ auto numberofTracks = environment->GetArrayLength(androidTracksInfo);
+
+ QList<AndroidMediaPlayer::TrackInfo> tracksInformation;
+
+ for (int index = 0; index < numberofTracks; index++) {
+ auto androidTrackInformation = environment->GetObjectArrayElement(androidTracksInfo, index);
+
+ if (environment.checkAndClearExceptions()) {
+ continue;
+ }
+
+ auto trackInfo = convertTrackInfo(index, androidTrackInformation);
+ tracksInformation.insert(index, trackInfo);
+
+ environment->DeleteLocalRef(androidTrackInformation);
+ }
+
+ return tracksInformation;
+}
+
+int AndroidMediaPlayer::activeTrack(TrackType androidTrackType)
+{
+ int type = static_cast<int>(androidTrackType);
+ return mMediaPlayer.callMethod<jint>("getSelectedTrack", "(I)I", type);
+}
+
+void AndroidMediaPlayer::deselectTrack(int trackNumber)
+{
+ mMediaPlayer.callMethod<void>("deselectTrack", "(I)V", trackNumber);
+}
+
+void AndroidMediaPlayer::selectTrack(int trackNumber)
+{
+ mMediaPlayer.callMethod<void>("selectTrack", "(I)V", trackNumber);
+}
+
+void AndroidMediaPlayer::play()
+{
+ mMediaPlayer.callMethod<void>("start");
+}
+
+void AndroidMediaPlayer::pause()
+{
+ mMediaPlayer.callMethod<void>("pause");
+}
+
+void AndroidMediaPlayer::stop()
+{
+ mMediaPlayer.callMethod<void>("stop");
+}
+
+void AndroidMediaPlayer::seekTo(qint32 msec)
+{
+ mMediaPlayer.callMethod<void>("seekTo", "(I)V", jint(msec));
+}
+
+void AndroidMediaPlayer::setMuted(bool mute)
+{
+ if (mAudioBlocked)
+ return;
+
+ mMediaPlayer.callMethod<void>("mute", "(Z)V", jboolean(mute));
+}
+
+void AndroidMediaPlayer::setDataSource(const QNetworkRequest &request)
+{
+ QJniObject string = QJniObject::fromString(request.url().toString(QUrl::FullyEncoded));
+
+ mMediaPlayer.callMethod<void>("initHeaders", "()V");
+ for (auto &header : request.rawHeaderList()) {
+ auto value = request.rawHeader(header);
+ mMediaPlayer.callMethod<void>("setHeader", "(Ljava/lang/String;Ljava/lang/String;)V",
+ QJniObject::fromString(QLatin1String(header)).object(),
+ QJniObject::fromString(QLatin1String(value)).object());
+ }
+
+ mMediaPlayer.callMethod<void>("setDataSource", "(Ljava/lang/String;)V", string.object());
+}
+
+void AndroidMediaPlayer::prepareAsync()
+{
+ mMediaPlayer.callMethod<void>("prepareAsync");
+}
+
+void AndroidMediaPlayer::setVolume(int volume)
+{
+ if (mAudioBlocked)
+ return;
+
+ mMediaPlayer.callMethod<void>("setVolume", "(I)V", jint(volume));
+}
+
+void AndroidMediaPlayer::blockAudio()
+{
+ mAudioBlocked = true;
+}
+
+void AndroidMediaPlayer::unblockAudio()
+{
+ mAudioBlocked = false;
+}
+
+void AndroidMediaPlayer::startSoundStreaming(const int inputId, const int outputId)
+{
+ QJniObject::callStaticMethod<void>("org/qtproject/qt/android/multimedia/QtAudioDeviceManager",
+ "startSoundStreaming",
+ inputId,
+ outputId);
+}
+
+void AndroidMediaPlayer::stopSoundStreaming()
+{
+ QJniObject::callStaticMethod<void>(
+ "org/qtproject/qt/android/multimedia/QtAudioDeviceManager", "stopSoundStreaming");
+}
+
+bool AndroidMediaPlayer::setPlaybackRate(qreal rate)
+{
+ if (QNativeInterface::QAndroidApplication::sdkVersion() < 23) {
+ qWarning() << "Setting the playback rate on a media player requires"
+ << "Android 6.0 (API level 23) or later";
+ return false;
+ }
+
+ return mMediaPlayer.callMethod<jboolean>("setPlaybackRate", jfloat(rate));
+}
+
+void AndroidMediaPlayer::setDisplay(AndroidSurfaceTexture *surfaceTexture)
+{
+ mMediaPlayer.callMethod<void>("setDisplay",
+ "(Landroid/view/SurfaceHolder;)V",
+ surfaceTexture ? surfaceTexture->surfaceHolder() : 0);
+}
+
+bool AndroidMediaPlayer::setAudioOutput(const QByteArray &deviceId)
+{
+ const bool ret = QJniObject::callStaticMethod<jboolean>(
+ "org/qtproject/qt/android/multimedia/QtAudioDeviceManager",
+ "setAudioOutput",
+ "(I)Z",
+ deviceId.toInt());
+
+ if (!ret)
+ qCWarning(lcAudio) << "Output device not set";
+
+ return ret;
+}
+
+#if 0
+void AndroidMediaPlayer::setAudioRole(QAudio::Role role)
+{
+ QString r;
+ switch (role) {
+ case QAudio::MusicRole:
+ r = QLatin1String("CONTENT_TYPE_MUSIC");
+ break;
+ case QAudio::VideoRole:
+ r = QLatin1String("CONTENT_TYPE_MOVIE");
+ break;
+ case QAudio::VoiceCommunicationRole:
+ r = QLatin1String("USAGE_VOICE_COMMUNICATION");
+ break;
+ case QAudio::AlarmRole:
+ r = QLatin1String("USAGE_ALARM");
+ break;
+ case QAudio::NotificationRole:
+ r = QLatin1String("USAGE_NOTIFICATION");
+ break;
+ case QAudio::RingtoneRole:
+ r = QLatin1String("USAGE_NOTIFICATION_RINGTONE");
+ break;
+ case QAudio::AccessibilityRole:
+ r = QLatin1String("USAGE_ASSISTANCE_ACCESSIBILITY");
+ break;
+ case QAudio::SonificationRole:
+ r = QLatin1String("CONTENT_TYPE_SONIFICATION");
+ break;
+ case QAudio::GameRole:
+ r = QLatin1String("USAGE_GAME");
+ break;
+ default:
+ return;
+ }
+
+ int type = 0; // CONTENT_TYPE_UNKNOWN
+ int usage = 0; // USAGE_UNKNOWN
+
+ if (r == QLatin1String("CONTENT_TYPE_MOVIE"))
+ type = 3;
+ else if (r == QLatin1String("CONTENT_TYPE_MUSIC"))
+ type = 2;
+ else if (r == QLatin1String("CONTENT_TYPE_SONIFICATION"))
+ type = 4;
+ else if (r == QLatin1String("CONTENT_TYPE_SPEECH"))
+ type = 1;
+ else if (r == QLatin1String("USAGE_ALARM"))
+ usage = 4;
+ else if (r == QLatin1String("USAGE_ASSISTANCE_ACCESSIBILITY"))
+ usage = 11;
+ else if (r == QLatin1String("USAGE_ASSISTANCE_NAVIGATION_GUIDANCE"))
+ usage = 12;
+ else if (r == QLatin1String("USAGE_ASSISTANCE_SONIFICATION"))
+ usage = 13;
+ else if (r == QLatin1String("USAGE_ASSISTANT"))
+ usage = 16;
+ else if (r == QLatin1String("USAGE_GAME"))
+ usage = 14;
+ else if (r == QLatin1String("USAGE_MEDIA"))
+ usage = 1;
+ else if (r == QLatin1String("USAGE_NOTIFICATION"))
+ usage = 5;
+ else if (r == QLatin1String("USAGE_NOTIFICATION_COMMUNICATION_DELAYED"))
+ usage = 9;
+ else if (r == QLatin1String("USAGE_NOTIFICATION_COMMUNICATION_INSTANT"))
+ usage = 8;
+ else if (r == QLatin1String("USAGE_NOTIFICATION_COMMUNICATION_REQUEST"))
+ usage = 7;
+ else if (r == QLatin1String("USAGE_NOTIFICATION_EVENT"))
+ usage = 10;
+ else if (r == QLatin1String("USAGE_NOTIFICATION_RINGTONE"))
+ usage = 6;
+ else if (r == QLatin1String("USAGE_VOICE_COMMUNICATION"))
+ usage = 2;
+ else if (r == QLatin1String("USAGE_VOICE_COMMUNICATION_SIGNALLING"))
+ usage = 3;
+
+ mMediaPlayer.callMethod<void>("setAudioAttributes", "(II)V", jint(type), jint(usage));
+}
+#endif
+
+static void onErrorNative(JNIEnv *env, jobject thiz, jint what, jint extra, jlong id)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ QReadLocker locker(rwLock);
+ const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ Q_EMIT (*mediaPlayers)[i]->error(what, extra);
+}
+
+static void onBufferingUpdateNative(JNIEnv *env, jobject thiz, jint percent, jlong id)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ QReadLocker locker(rwLock);
+ const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ Q_EMIT (*mediaPlayers)[i]->bufferingChanged(percent);
+}
+
+static void onProgressUpdateNative(JNIEnv *env, jobject thiz, jint progress, jlong id)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ QReadLocker locker(rwLock);
+ const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ Q_EMIT (*mediaPlayers)[i]->progressChanged(progress);
+}
+
+static void onDurationChangedNative(JNIEnv *env, jobject thiz, jint duration, jlong id)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ QReadLocker locker(rwLock);
+ const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ Q_EMIT (*mediaPlayers)[i]->durationChanged(duration);
+}
+
+static void onInfoNative(JNIEnv *env, jobject thiz, jint what, jint extra, jlong id)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ QReadLocker locker(rwLock);
+ const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ Q_EMIT (*mediaPlayers)[i]->info(what, extra);
+}
+
+static void onStateChangedNative(JNIEnv *env, jobject thiz, jint state, jlong id)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ QReadLocker locker(rwLock);
+ const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ Q_EMIT (*mediaPlayers)[i]->stateChanged(state);
+}
+
+static void onVideoSizeChangedNative(JNIEnv *env,
+ jobject thiz,
+ jint width,
+ jint height,
+ jlong id)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ QReadLocker locker(rwLock);
+ const int i = mediaPlayers->indexOf(reinterpret_cast<AndroidMediaPlayer *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ Q_EMIT (*mediaPlayers)[i]->videoSizeChanged(width, height);
+}
+
+static AndroidMediaPlayer *getMediaPlayer(jlong ptr)
+{
+ auto mediaplayer = reinterpret_cast<AndroidMediaPlayer *>(ptr);
+ if (!mediaplayer || !mediaPlayers->contains(mediaplayer))
+ return nullptr;
+
+ return mediaplayer;
+}
+
+static void onTrackInfoChangedNative(JNIEnv *env, jobject thiz, jlong ptr)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+
+ QReadLocker locker(rwLock);
+ auto mediaplayer = getMediaPlayer(ptr);
+ if (!mediaplayer)
+ return;
+
+ emit mediaplayer->tracksInfoChanged();
+}
+
+static void onTimedTextChangedNative(JNIEnv *env, jobject thiz, jstring timedText, jint time,
+ jlong ptr)
+{
+ Q_UNUSED(env);
+ Q_UNUSED(thiz);
+ Q_UNUSED(time);
+
+ QReadLocker locker(rwLock);
+
+ auto mediaplayer = getMediaPlayer(ptr);
+ if (!mediaplayer)
+ return;
+
+ QString subtitleText;
+ if (timedText != nullptr)
+ subtitleText = QString::fromUtf8(env->GetStringUTFChars(timedText, 0));
+
+ emit mediaplayer->timedTextChanged(subtitleText);
+}
+
+bool AndroidMediaPlayer::registerNativeMethods()
+{
+ static const JNINativeMethod methods[] = {
+ { "onErrorNative", "(IIJ)V", reinterpret_cast<void *>(onErrorNative) },
+ { "onBufferingUpdateNative", "(IJ)V", reinterpret_cast<void *>(onBufferingUpdateNative) },
+ { "onProgressUpdateNative", "(IJ)V", reinterpret_cast<void *>(onProgressUpdateNative) },
+ { "onDurationChangedNative", "(IJ)V", reinterpret_cast<void *>(onDurationChangedNative) },
+ { "onInfoNative", "(IIJ)V", reinterpret_cast<void *>(onInfoNative) },
+ { "onVideoSizeChangedNative", "(IIJ)V",
+ reinterpret_cast<void *>(onVideoSizeChangedNative) },
+ { "onStateChangedNative", "(IJ)V", reinterpret_cast<void *>(onStateChangedNative) },
+ { "onTrackInfoChangedNative", "(J)V", reinterpret_cast<void *>(onTrackInfoChangedNative) },
+ { "onTimedTextChangedNative", "(Ljava/lang/String;IJ)V",
+ reinterpret_cast<void *>(onTimedTextChangedNative) }
+ };
+
+ const int size = std::size(methods);
+ return QJniEnvironment().registerNativeMethods(QtAndroidMediaPlayerClassName, methods, size);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_androidmediaplayer_p.cpp"
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer_p.h b/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer_p.h
new file mode 100644
index 000000000..66095b114
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer_p.h
@@ -0,0 +1,135 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef ANDROIDMEDIAPLAYER_H
+#define ANDROIDMEDIAPLAYER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QObject>
+#include <QNetworkRequest>
+#include <QtCore/qjniobject.h>
+#include <QAudio>
+
+QT_BEGIN_NAMESPACE
+
+class AndroidSurfaceTexture;
+
+class AndroidMediaPlayer : public QObject
+{
+ Q_OBJECT
+public:
+ AndroidMediaPlayer();
+ ~AndroidMediaPlayer();
+
+ enum MediaError
+ {
+ // What
+ MEDIA_ERROR_UNKNOWN = 1,
+ MEDIA_ERROR_SERVER_DIED = 100,
+ MEDIA_ERROR_INVALID_STATE = -38, // Undocumented
+ // Extra
+ MEDIA_ERROR_IO = -1004,
+ MEDIA_ERROR_MALFORMED = -1007,
+ MEDIA_ERROR_UNSUPPORTED = -1010,
+ MEDIA_ERROR_TIMED_OUT = -110,
+ MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK = 200,
+ MEDIA_ERROR_BAD_THINGS_ARE_GOING_TO_HAPPEN = -2147483648 // Undocumented
+ };
+
+ enum MediaInfo
+ {
+ MEDIA_INFO_UNKNOWN = 1,
+ MEDIA_INFO_VIDEO_TRACK_LAGGING = 700,
+ MEDIA_INFO_VIDEO_RENDERING_START = 3,
+ MEDIA_INFO_BUFFERING_START = 701,
+ MEDIA_INFO_BUFFERING_END = 702,
+ MEDIA_INFO_BAD_INTERLEAVING = 800,
+ MEDIA_INFO_NOT_SEEKABLE = 801,
+ MEDIA_INFO_METADATA_UPDATE = 802
+ };
+
+ enum MediaPlayerState {
+ Uninitialized = 0x1, /* End */
+ Idle = 0x2,
+ Preparing = 0x4,
+ Prepared = 0x8,
+ Initialized = 0x10,
+ Started = 0x20,
+ Stopped = 0x40,
+ Paused = 0x80,
+ PlaybackCompleted = 0x100,
+ Error = 0x200
+ };
+
+ enum TrackType { Unknown = 0, Video, Audio, TimedText, Subtitle, Metadata };
+
+ struct TrackInfo
+ {
+ int trackNumber;
+ TrackType trackType;
+ QString language;
+ QString mimeType;
+ };
+
+ void release();
+ void reset();
+
+ int getCurrentPosition();
+ int getDuration();
+ bool isPlaying();
+ int volume();
+ bool isMuted();
+ qreal playbackRate();
+ jobject display();
+
+ void play();
+ void pause();
+ void stop();
+ void seekTo(qint32 msec);
+ void setMuted(bool mute);
+ void setDataSource(const QNetworkRequest &request);
+ void prepareAsync();
+ void setVolume(int volume);
+ static void startSoundStreaming(const int inputId, const int outputId);
+ static void stopSoundStreaming();
+ bool setPlaybackRate(qreal rate);
+ void setDisplay(AndroidSurfaceTexture *surfaceTexture);
+ static bool setAudioOutput(const QByteArray &deviceId);
+ QList<TrackInfo> tracksInfo();
+ int activeTrack(TrackType trackType);
+ void deselectTrack(int trackNumber);
+ void selectTrack(int trackNumber);
+
+ static bool registerNativeMethods();
+
+ void blockAudio();
+ void unblockAudio();
+Q_SIGNALS:
+ void error(qint32 what, qint32 extra);
+ void bufferingChanged(qint32 percent);
+ void durationChanged(qint64 duration);
+ void progressChanged(qint64 progress);
+ void stateChanged(qint32 state);
+ void info(qint32 what, qint32 extra);
+ void videoSizeChanged(qint32 width, qint32 height);
+ void timedTextChanged(QString text);
+ void tracksInfoChanged();
+
+private:
+ QJniObject mMediaPlayer;
+ bool mAudioBlocked = false;
+};
+
+QT_END_NAMESPACE
+
+#endif // ANDROIDMEDIAPLAYER_H
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmediarecorder.cpp b/src/plugins/multimedia/android/wrappers/jni/androidmediarecorder.cpp
new file mode 100644
index 000000000..a3c9f4556
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmediarecorder.cpp
@@ -0,0 +1,337 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "androidmediarecorder_p.h"
+#include "androidcamera_p.h"
+#include "androidsurfacetexture_p.h"
+#include "androidsurfaceview_p.h"
+#include "qandroidglobal_p.h"
+#include "qandroidmultimediautils_p.h"
+
+#include <qmap.h>
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qlogging.h>
+#include <QtCore/qurl.h>
+
+QT_BEGIN_NAMESPACE
+
+static Q_LOGGING_CATEGORY(lcMediaRecorder, "qt.multimedia.mediarecorder.android")
+
+typedef QMap<QString, QJniObject> CamcorderProfiles;
+Q_GLOBAL_STATIC(CamcorderProfiles, g_camcorderProfiles)
+
+static QString profileKey()
+{
+ return QStringLiteral("%1-%2");
+}
+
+bool AndroidCamcorderProfile::hasProfile(jint cameraId, Quality quality)
+{
+ if (g_camcorderProfiles->contains(profileKey().arg(cameraId).arg(quality)))
+ return true;
+
+ return QJniObject::callStaticMethod<jboolean>("android/media/CamcorderProfile",
+ "hasProfile",
+ "(II)Z",
+ cameraId,
+ quality);
+}
+
+AndroidCamcorderProfile AndroidCamcorderProfile::get(jint cameraId, Quality quality)
+{
+ const QString key = profileKey().arg(cameraId).arg(quality);
+ QMap<QString, QJniObject>::const_iterator it = g_camcorderProfiles->constFind(key);
+
+ if (it != g_camcorderProfiles->constEnd())
+ return AndroidCamcorderProfile(*it);
+
+ QJniObject camProfile = QJniObject::callStaticObjectMethod("android/media/CamcorderProfile",
+ "get",
+ "(II)Landroid/media/CamcorderProfile;",
+ cameraId,
+ quality);
+
+ return AndroidCamcorderProfile((*g_camcorderProfiles)[key] = camProfile);
+}
+
+int AndroidCamcorderProfile::getValue(AndroidCamcorderProfile::Field field) const
+{
+ switch (field) {
+ case audioBitRate:
+ return m_camcorderProfile.getField<jint>("audioBitRate");
+ case audioChannels:
+ return m_camcorderProfile.getField<jint>("audioChannels");
+ case audioCodec:
+ return m_camcorderProfile.getField<jint>("audioCodec");
+ case audioSampleRate:
+ return m_camcorderProfile.getField<jint>("audioSampleRate");
+ case duration:
+ return m_camcorderProfile.getField<jint>("duration");
+ case fileFormat:
+ return m_camcorderProfile.getField<jint>("fileFormat");
+ case quality:
+ return m_camcorderProfile.getField<jint>("quality");
+ case videoBitRate:
+ return m_camcorderProfile.getField<jint>("videoBitRate");
+ case videoCodec:
+ return m_camcorderProfile.getField<jint>("videoCodec");
+ case videoFrameHeight:
+ return m_camcorderProfile.getField<jint>("videoFrameHeight");
+ case videoFrameRate:
+ return m_camcorderProfile.getField<jint>("videoFrameRate");
+ case videoFrameWidth:
+ return m_camcorderProfile.getField<jint>("videoFrameWidth");
+ }
+
+ return 0;
+}
+
+AndroidCamcorderProfile::AndroidCamcorderProfile(const QJniObject &camcorderProfile)
+{
+ m_camcorderProfile = camcorderProfile;
+}
+
+static const char QtMediaRecorderListenerClassName[] =
+ "org/qtproject/qt/android/multimedia/QtMediaRecorderListener";
+typedef QMap<jlong, AndroidMediaRecorder*> MediaRecorderMap;
+Q_GLOBAL_STATIC(MediaRecorderMap, mediaRecorders)
+
+static void notifyError(JNIEnv* , jobject, jlong id, jint what, jint extra)
+{
+ AndroidMediaRecorder *obj = mediaRecorders->value(id, 0);
+ if (obj)
+ emit obj->error(what, extra);
+}
+
+static void notifyInfo(JNIEnv* , jobject, jlong id, jint what, jint extra)
+{
+ AndroidMediaRecorder *obj = mediaRecorders->value(id, 0);
+ if (obj)
+ emit obj->info(what, extra);
+}
+
+AndroidMediaRecorder::AndroidMediaRecorder()
+ : QObject()
+ , m_id(reinterpret_cast<jlong>(this))
+{
+ m_mediaRecorder = QJniObject("android/media/MediaRecorder");
+ if (m_mediaRecorder.isValid()) {
+ QJniObject listener(QtMediaRecorderListenerClassName, "(J)V", m_id);
+ m_mediaRecorder.callMethod<void>("setOnErrorListener",
+ "(Landroid/media/MediaRecorder$OnErrorListener;)V",
+ listener.object());
+ m_mediaRecorder.callMethod<void>("setOnInfoListener",
+ "(Landroid/media/MediaRecorder$OnInfoListener;)V",
+ listener.object());
+ mediaRecorders->insert(m_id, this);
+ }
+}
+
+AndroidMediaRecorder::~AndroidMediaRecorder()
+{
+ if (m_isVideoSourceSet || m_isAudioSourceSet)
+ reset();
+
+ release();
+ mediaRecorders->remove(m_id);
+}
+
+void AndroidMediaRecorder::release()
+{
+ m_mediaRecorder.callMethod<void>("release");
+}
+
+bool AndroidMediaRecorder::prepare()
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_mediaRecorder.objectClass(), "prepare", "()V");
+ env->CallVoidMethod(m_mediaRecorder.object(), methodId);
+
+ if (env.checkAndClearExceptions())
+ return false;
+ return true;
+}
+
+void AndroidMediaRecorder::reset()
+{
+ m_mediaRecorder.callMethod<void>("reset");
+ m_isAudioSourceSet = false; // Now setAudioSource can be used again.
+ m_isVideoSourceSet = false;
+}
+
+bool AndroidMediaRecorder::start()
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_mediaRecorder.objectClass(), "start", "()V");
+ env->CallVoidMethod(m_mediaRecorder.object(), methodId);
+
+ if (env.checkAndClearExceptions())
+ return false;
+ return true;
+}
+
+void AndroidMediaRecorder::stop()
+{
+ m_mediaRecorder.callMethod<void>("stop");
+}
+
+void AndroidMediaRecorder::setAudioChannels(int numChannels)
+{
+ m_mediaRecorder.callMethod<void>("setAudioChannels", "(I)V", numChannels);
+}
+
+void AndroidMediaRecorder::setAudioEncoder(AudioEncoder encoder)
+{
+ QJniEnvironment env;
+ m_mediaRecorder.callMethod<void>("setAudioEncoder", "(I)V", int(encoder));
+}
+
+void AndroidMediaRecorder::setAudioEncodingBitRate(int bitRate)
+{
+ m_mediaRecorder.callMethod<void>("setAudioEncodingBitRate", "(I)V", bitRate);
+}
+
+void AndroidMediaRecorder::setAudioSamplingRate(int samplingRate)
+{
+ m_mediaRecorder.callMethod<void>("setAudioSamplingRate", "(I)V", samplingRate);
+}
+
+void AndroidMediaRecorder::setAudioSource(AudioSource source)
+{
+ if (!m_isAudioSourceSet) {
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_mediaRecorder.objectClass(), "setAudioSource", "(I)V");
+ env->CallVoidMethod(m_mediaRecorder.object(), methodId, source);
+ if (!env.checkAndClearExceptions())
+ m_isAudioSourceSet = true;
+ } else {
+ qCWarning(lcMediaRecorder) << "Audio source already set. Not setting a new source.";
+ }
+}
+
+bool AndroidMediaRecorder::isAudioSourceSet() const
+{
+ return m_isAudioSourceSet;
+}
+
+bool AndroidMediaRecorder::setAudioInput(const QByteArray &id)
+{
+ const bool ret = QJniObject::callStaticMethod<jboolean>(
+ "org/qtproject/qt/android/multimedia/QtAudioDeviceManager",
+ "setAudioInput",
+ "(Landroid/media/MediaRecorder;I)Z",
+ m_mediaRecorder.object(),
+ id.toInt());
+ if (!ret)
+ qCWarning(lcMediaRecorder) << "No default input device was set.";
+
+ return ret;
+}
+
+void AndroidMediaRecorder::setCamera(AndroidCamera *camera)
+{
+ QJniObject cam = camera->getCameraObject();
+ m_mediaRecorder.callMethod<void>("setCamera", "(Landroid/hardware/Camera;)V", cam.object());
+}
+
+void AndroidMediaRecorder::setVideoEncoder(VideoEncoder encoder)
+{
+ m_mediaRecorder.callMethod<void>("setVideoEncoder", "(I)V", int(encoder));
+}
+
+void AndroidMediaRecorder::setVideoEncodingBitRate(int bitRate)
+{
+ m_mediaRecorder.callMethod<void>("setVideoEncodingBitRate", "(I)V", bitRate);
+}
+
+void AndroidMediaRecorder::setVideoFrameRate(int rate)
+{
+ m_mediaRecorder.callMethod<void>("setVideoFrameRate", "(I)V", rate);
+}
+
+void AndroidMediaRecorder::setVideoSize(const QSize &size)
+{
+ m_mediaRecorder.callMethod<void>("setVideoSize", "(II)V", size.width(), size.height());
+}
+
+void AndroidMediaRecorder::setVideoSource(VideoSource source)
+{
+ QJniEnvironment env;
+
+ auto methodId = env->GetMethodID(m_mediaRecorder.objectClass(), "setVideoSource", "(I)V");
+ env->CallVoidMethod(m_mediaRecorder.object(), methodId, source);
+
+ if (!env.checkAndClearExceptions())
+ m_isVideoSourceSet = true;
+}
+
+void AndroidMediaRecorder::setOrientationHint(int degrees)
+{
+ m_mediaRecorder.callMethod<void>("setOrientationHint", "(I)V", degrees);
+}
+
+void AndroidMediaRecorder::setOutputFormat(OutputFormat format)
+{
+ QJniEnvironment env;
+ auto methodId = env->GetMethodID(m_mediaRecorder.objectClass(), "setOutputFormat", "(I)V");
+ env->CallVoidMethod(m_mediaRecorder.object(), methodId, format);
+ // setAudioSource cannot be set after outputFormat is set.
+ if (!env.checkAndClearExceptions())
+ m_isAudioSourceSet = true;
+}
+
+void AndroidMediaRecorder::setOutputFile(const QString &path)
+{
+ if (QUrl(path).scheme() == QLatin1String("content")) {
+ const QJniObject fileDescriptor = QJniObject::callStaticObjectMethod(
+ "org/qtproject/qt/android/QtNative",
+ "openFdObjectForContentUrl",
+ "(Landroid/content/Context;Ljava/lang/String;Ljava/lang/String;)Ljava/io/FileDescriptor;",
+ QNativeInterface::QAndroidApplication::context().object(),
+ QJniObject::fromString(path).object(),
+ QJniObject::fromString(QLatin1String("rw")).object());
+
+ m_mediaRecorder.callMethod<void>("setOutputFile",
+ "(Ljava/io/FileDescriptor;)V",
+ fileDescriptor.object());
+ } else {
+ m_mediaRecorder.callMethod<void>("setOutputFile",
+ "(Ljava/lang/String;)V",
+ QJniObject::fromString(path).object());
+ }
+}
+
+void AndroidMediaRecorder::setSurfaceTexture(AndroidSurfaceTexture *texture)
+{
+ m_mediaRecorder.callMethod<void>("setPreviewDisplay",
+ "(Landroid/view/Surface;)V",
+ texture->surface());
+}
+
+void AndroidMediaRecorder::setSurfaceHolder(AndroidSurfaceHolder *holder)
+{
+ QJniObject surfaceHolder(holder->surfaceHolder());
+ QJniObject surface = surfaceHolder.callObjectMethod("getSurface",
+ "()Landroid/view/Surface;");
+ if (!surface.isValid())
+ return;
+
+ m_mediaRecorder.callMethod<void>("setPreviewDisplay",
+ "(Landroid/view/Surface;)V",
+ surface.object());
+}
+
+bool AndroidMediaRecorder::registerNativeMethods()
+{
+ static const JNINativeMethod methods[] = {
+ {"notifyError", "(JII)V", (void *)notifyError},
+ {"notifyInfo", "(JII)V", (void *)notifyInfo}
+ };
+
+ const int size = std::size(methods);
+ return QJniEnvironment().registerNativeMethods(QtMediaRecorderListenerClassName, methods, size);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_androidmediarecorder_p.cpp"
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmediarecorder_p.h b/src/plugins/multimedia/android/wrappers/jni/androidmediarecorder_p.h
new file mode 100644
index 000000000..ffdbcc149
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmediarecorder_p.h
@@ -0,0 +1,161 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef ANDROIDMEDIARECORDER_H
+#define ANDROIDMEDIARECORDER_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qobject.h>
+#include <QtCore/qjniobject.h>
+#include <qsize.h>
+
+QT_BEGIN_NAMESPACE
+
+class AndroidCamera;
+class AndroidSurfaceTexture;
+class AndroidSurfaceHolder;
+
+class AndroidCamcorderProfile
+{
+public:
+ enum Quality { // Needs to match CamcorderProfile
+ QUALITY_LOW,
+ QUALITY_HIGH,
+ QUALITY_QCIF,
+ QUALITY_CIF,
+ QUALITY_480P,
+ QUALITY_720P,
+ QUALITY_1080P,
+ QUALITY_QVGA
+ };
+
+ enum Field {
+ audioBitRate,
+ audioChannels,
+ audioCodec,
+ audioSampleRate,
+ duration,
+ fileFormat,
+ quality,
+ videoBitRate,
+ videoCodec,
+ videoFrameHeight,
+ videoFrameRate,
+ videoFrameWidth
+ };
+
+ static bool hasProfile(jint cameraId, Quality quality);
+ static AndroidCamcorderProfile get(jint cameraId, Quality quality);
+ int getValue(Field field) const;
+
+private:
+ AndroidCamcorderProfile(const QJniObject &camcorderProfile);
+ QJniObject m_camcorderProfile;
+};
+
+class AndroidMediaRecorder : public QObject
+{
+ Q_OBJECT
+public:
+ enum AudioEncoder {
+ DefaultAudioEncoder = 0,
+ AMR_NB_Encoder = 1,
+ AMR_WB_Encoder = 2,
+ AAC = 3,
+ OPUS = 7,
+ VORBIS = 6
+ };
+
+ enum AudioSource {
+ DefaultAudioSource = 0,
+ Mic = 1,
+ VoiceUplink = 2,
+ VoiceDownlink = 3,
+ VoiceCall = 4,
+ Camcorder = 5,
+ VoiceRecognition = 6
+ };
+
+ enum VideoEncoder {
+ DefaultVideoEncoder = 0,
+ H263 = 1,
+ H264 = 2,
+ MPEG_4_SP = 3,
+ HEVC = 5
+ };
+
+ enum VideoSource {
+ DefaultVideoSource = 0,
+ Camera = 1
+ };
+
+ enum OutputFormat {
+ DefaultOutputFormat = 0,
+ THREE_GPP = 1,
+ MPEG_4 = 2,
+ AMR_NB_Format = 3,
+ AMR_WB_Format = 4,
+ AAC_ADTS = 6,
+ OGG = 11,
+ WEBM = 9
+ };
+
+ AndroidMediaRecorder();
+ ~AndroidMediaRecorder();
+
+ void release();
+ bool prepare();
+ void reset();
+
+ bool start();
+ void stop();
+
+ void setAudioChannels(int numChannels);
+ void setAudioEncoder(AudioEncoder encoder);
+ void setAudioEncodingBitRate(int bitRate);
+ void setAudioSamplingRate(int samplingRate);
+ void setAudioSource(AudioSource source);
+ bool isAudioSourceSet() const;
+ bool setAudioInput(const QByteArray &id);
+
+ void setCamera(AndroidCamera *camera);
+ void setVideoEncoder(VideoEncoder encoder);
+ void setVideoEncodingBitRate(int bitRate);
+ void setVideoFrameRate(int rate);
+ void setVideoSize(const QSize &size);
+ void setVideoSource(VideoSource source);
+
+ void setOrientationHint(int degrees);
+
+ void setOutputFormat(OutputFormat format);
+ void setOutputFile(const QString &path);
+
+ void setSurfaceTexture(AndroidSurfaceTexture *texture);
+ void setSurfaceHolder(AndroidSurfaceHolder *holder);
+
+ static bool registerNativeMethods();
+
+Q_SIGNALS:
+ void error(int what, int extra);
+ void info(int what, int extra);
+
+private:
+ jlong m_id;
+ QJniObject m_mediaRecorder;
+ bool m_isAudioSourceSet = false;
+ bool m_isVideoSourceSet = false;
+};
+
+QT_END_NAMESPACE
+
+#endif // ANDROIDMEDIARECORDER_H
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmultimediautils.cpp b/src/plugins/multimedia/android/wrappers/jni/androidmultimediautils.cpp
new file mode 100644
index 000000000..9606bd6bb
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmultimediautils.cpp
@@ -0,0 +1,43 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "androidmultimediautils_p.h"
+
+#include <QtCore/qjniobject.h>
+
+QT_BEGIN_NAMESPACE
+
+
+void AndroidMultimediaUtils::enableOrientationListener(bool enable)
+{
+ QJniObject::callStaticMethod<void>("org/qtproject/qt/android/multimedia/QtMultimediaUtils",
+ "enableOrientationListener",
+ "(Z)V",
+ enable);
+}
+
+int AndroidMultimediaUtils::getDeviceOrientation()
+{
+ return QJniObject::callStaticMethod<jint>("org/qtproject/qt/android/multimedia/QtMultimediaUtils",
+ "getDeviceOrientation");
+}
+
+QString AndroidMultimediaUtils::getDefaultMediaDirectory(MediaType type)
+{
+ QJniObject path = QJniObject::callStaticObjectMethod(
+ "org/qtproject/qt/android/multimedia/QtMultimediaUtils",
+ "getDefaultMediaDirectory",
+ "(I)Ljava/lang/String;",
+ jint(type));
+ return path.toString();
+}
+
+void AndroidMultimediaUtils::registerMediaFile(const QString &file)
+{
+ QJniObject::callStaticMethod<void>("org/qtproject/qt/android/multimedia/QtMultimediaUtils",
+ "registerMediaFile",
+ "(Ljava/lang/String;)V",
+ QJniObject::fromString(file).object());
+}
+
+QT_END_NAMESPACE
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmultimediautils_p.h b/src/plugins/multimedia/android/wrappers/jni/androidmultimediautils_p.h
new file mode 100644
index 000000000..ee72c3c61
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmultimediautils_p.h
@@ -0,0 +1,40 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef ANDROIDMULTIMEDIAUTILS_H
+#define ANDROIDMULTIMEDIAUTILS_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qobject.h>
+
+QT_BEGIN_NAMESPACE
+
+class AndroidMultimediaUtils
+{
+public:
+ enum MediaType {
+ Music = 0,
+ Movies = 1,
+ DCIM = 2,
+ Sounds = 3
+ };
+
+ static void enableOrientationListener(bool enable);
+ static int getDeviceOrientation();
+ static QString getDefaultMediaDirectory(MediaType type);
+ static void registerMediaFile(const QString &file);
+};
+
+QT_END_NAMESPACE
+
+#endif // ANDROIDMULTIMEDIAUTILS_H
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidsurfacetexture.cpp b/src/plugins/multimedia/android/wrappers/jni/androidsurfacetexture.cpp
new file mode 100644
index 000000000..c5860b265
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidsurfacetexture.cpp
@@ -0,0 +1,152 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "androidsurfacetexture_p.h"
+#include <QtCore/qmutex.h>
+#include <QtCore/qcoreapplication.h>
+
+QT_BEGIN_NAMESPACE
+
+static const char QtSurfaceTextureListenerClassName[] = "org/qtproject/qt/android/multimedia/QtSurfaceTextureListener";
+typedef QList<jlong> SurfaceTextures;
+Q_GLOBAL_STATIC(SurfaceTextures, g_surfaceTextures);
+Q_GLOBAL_STATIC(QMutex, g_textureMutex);
+
+static QAtomicInteger<quint64> indexCounter = 0u;
+
+// native method for QtSurfaceTexture.java
+static void notifyFrameAvailable(JNIEnv* , jobject, jlong id)
+{
+ const QMutexLocker lock(g_textureMutex());
+ const int idx = g_surfaceTextures->indexOf(id);
+ if (idx == -1)
+ return;
+
+ AndroidSurfaceTexture *obj = reinterpret_cast<AndroidSurfaceTexture *>(g_surfaceTextures->at(idx));
+ if (obj)
+ Q_EMIT obj->frameAvailable();
+}
+
+AndroidSurfaceTexture::AndroidSurfaceTexture(quint32 texName)
+ : QObject()
+ , m_index(indexCounter.fetchAndAddRelaxed(1))
+{
+ Q_STATIC_ASSERT(sizeof (jlong) >= sizeof (void *));
+ m_surfaceTexture = QJniObject("android/graphics/SurfaceTexture", "(I)V", jint(texName));
+
+ if (!m_surfaceTexture.isValid())
+ return;
+
+ const QMutexLocker lock(g_textureMutex());
+ g_surfaceTextures->append(jlong(this));
+ QJniObject listener(QtSurfaceTextureListenerClassName, "(J)V", jlong(this));
+ setOnFrameAvailableListener(listener);
+}
+
+AndroidSurfaceTexture::~AndroidSurfaceTexture()
+{
+ if (m_surface.isValid())
+ m_surface.callMethod<void>("release");
+
+ if (m_surfaceTexture.isValid()) {
+ release();
+ const QMutexLocker lock(g_textureMutex());
+ const int idx = g_surfaceTextures->indexOf(jlong(this));
+ if (idx != -1)
+ g_surfaceTextures->remove(idx);
+ }
+}
+
+QMatrix4x4 AndroidSurfaceTexture::getTransformMatrix()
+{
+ QMatrix4x4 matrix;
+ if (!m_surfaceTexture.isValid())
+ return matrix;
+
+ QJniEnvironment env;
+ jfloatArray array = env->NewFloatArray(16);
+ m_surfaceTexture.callMethod<void>("getTransformMatrix", "([F)V", array);
+ env->GetFloatArrayRegion(array, 0, 16, matrix.data());
+ env->DeleteLocalRef(array);
+
+ return matrix;
+}
+
+void AndroidSurfaceTexture::release()
+{
+ m_surfaceTexture.callMethod<void>("release");
+}
+
+void AndroidSurfaceTexture::updateTexImage()
+{
+ if (!m_surfaceTexture.isValid())
+ return;
+
+ m_surfaceTexture.callMethod<void>("updateTexImage");
+}
+
+jobject AndroidSurfaceTexture::surfaceTexture()
+{
+ return m_surfaceTexture.object();
+}
+
+jobject AndroidSurfaceTexture::surface()
+{
+ if (!m_surface.isValid()) {
+ m_surface = QJniObject("android/view/Surface",
+ "(Landroid/graphics/SurfaceTexture;)V",
+ m_surfaceTexture.object());
+ }
+
+ return m_surface.object();
+}
+
+jobject AndroidSurfaceTexture::surfaceHolder()
+{
+ if (!m_surfaceHolder.isValid()) {
+ m_surfaceHolder = QJniObject("org/qtproject/qt/android/multimedia/QtSurfaceTextureHolder",
+ "(Landroid/view/Surface;)V",
+ surface());
+ }
+
+ return m_surfaceHolder.object();
+}
+
+void AndroidSurfaceTexture::attachToGLContext(quint32 texName)
+{
+ if (!m_surfaceTexture.isValid())
+ return;
+
+ m_surfaceTexture.callMethod<void>("attachToGLContext", "(I)V", texName);
+}
+
+void AndroidSurfaceTexture::detachFromGLContext()
+{
+ if (!m_surfaceTexture.isValid())
+ return;
+
+ m_surfaceTexture.callMethod<void>("detachFromGLContext");
+}
+
+bool AndroidSurfaceTexture::registerNativeMethods()
+{
+ static const JNINativeMethod methods[] = {
+ {"notifyFrameAvailable", "(J)V", (void *)notifyFrameAvailable}
+ };
+ const int size = std::size(methods);
+ if (QJniEnvironment().registerNativeMethods(QtSurfaceTextureListenerClassName, methods, size))
+ return false;
+
+ return true;
+}
+
+void AndroidSurfaceTexture::setOnFrameAvailableListener(const QJniObject &listener)
+{
+ m_surfaceTexture.callMethod<void>("setOnFrameAvailableListener",
+ "(Landroid/graphics/SurfaceTexture$OnFrameAvailableListener;)V",
+ listener.object());
+}
+
+QT_END_NAMESPACE
+
+#include "moc_androidsurfacetexture_p.cpp"
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidsurfacetexture_p.h b/src/plugins/multimedia/android/wrappers/jni/androidsurfacetexture_p.h
new file mode 100644
index 000000000..24581ca8d
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidsurfacetexture_p.h
@@ -0,0 +1,61 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef ANDROIDSURFACETEXTURE_H
+#define ANDROIDSURFACETEXTURE_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <qobject.h>
+#include <QtCore/qjniobject.h>
+
+#include <QMatrix4x4>
+
+QT_BEGIN_NAMESPACE
+
+class AndroidSurfaceTexture : public QObject
+{
+ Q_OBJECT
+public:
+ explicit AndroidSurfaceTexture(quint32 texName);
+ ~AndroidSurfaceTexture();
+
+ jobject surfaceTexture();
+ jobject surface();
+ jobject surfaceHolder();
+ inline bool isValid() const { return m_surfaceTexture.isValid(); }
+
+ QMatrix4x4 getTransformMatrix();
+ void release(); // API level 14
+ void updateTexImage();
+
+ void attachToGLContext(quint32 texName); // API level 16
+ void detachFromGLContext(); // API level 16
+
+ static bool registerNativeMethods();
+
+ quint64 index() const { return m_index; }
+Q_SIGNALS:
+ void frameAvailable();
+
+private:
+ void setOnFrameAvailableListener(const QJniObject &listener);
+
+ QJniObject m_surfaceTexture;
+ QJniObject m_surface;
+ QJniObject m_surfaceHolder;
+ const quint64 m_index = 0;
+};
+
+QT_END_NAMESPACE
+
+#endif // ANDROIDSURFACETEXTURE_H
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidsurfaceview.cpp b/src/plugins/multimedia/android/wrappers/jni/androidsurfaceview.cpp
new file mode 100644
index 000000000..dae9516c3
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidsurfaceview.cpp
@@ -0,0 +1,152 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#include "androidsurfaceview_p.h"
+
+#include <QtCore/qcoreapplication.h>
+#include <QtCore/qdebug.h>
+#include <QtCore/qlist.h>
+#include <QtCore/qmutex.h>
+#include <QtGui/qwindow.h>
+
+QT_BEGIN_NAMESPACE
+
+static const char QtSurfaceHolderCallbackClassName[] = "org/qtproject/qt/android/multimedia/QtSurfaceHolderCallback";
+typedef QList<AndroidSurfaceHolder *> SurfaceHolders;
+Q_GLOBAL_STATIC(SurfaceHolders, surfaceHolders)
+Q_GLOBAL_STATIC(QMutex, shLock)
+
+AndroidSurfaceHolder::AndroidSurfaceHolder(QJniObject object)
+ : m_surfaceHolder(object)
+ , m_surfaceCreated(false)
+{
+ if (!m_surfaceHolder.isValid())
+ return;
+
+ {
+ QMutexLocker locker(shLock());
+ surfaceHolders->append(this);
+ }
+
+ QJniObject callback(QtSurfaceHolderCallbackClassName, "(J)V", reinterpret_cast<jlong>(this));
+ m_surfaceHolder.callMethod<void>("addCallback",
+ "(Landroid/view/SurfaceHolder$Callback;)V",
+ callback.object());
+}
+
+AndroidSurfaceHolder::~AndroidSurfaceHolder()
+{
+ QMutexLocker locker(shLock());
+ const int i = surfaceHolders->indexOf(this);
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ surfaceHolders->remove(i);
+}
+
+jobject AndroidSurfaceHolder::surfaceHolder() const
+{
+ return m_surfaceHolder.object();
+}
+
+bool AndroidSurfaceHolder::isSurfaceCreated() const
+{
+ QMutexLocker locker(shLock());
+ return m_surfaceCreated;
+}
+
+void AndroidSurfaceHolder::handleSurfaceCreated(JNIEnv*, jobject, jlong id)
+{
+ QMutexLocker locker(shLock());
+ const int i = surfaceHolders->indexOf(reinterpret_cast<AndroidSurfaceHolder *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ (*surfaceHolders)[i]->m_surfaceCreated = true;
+ Q_EMIT (*surfaceHolders)[i]->surfaceCreated();
+}
+
+void AndroidSurfaceHolder::handleSurfaceDestroyed(JNIEnv*, jobject, jlong id)
+{
+ QMutexLocker locker(shLock());
+ const int i = surfaceHolders->indexOf(reinterpret_cast<AndroidSurfaceHolder *>(id));
+ if (Q_UNLIKELY(i == -1))
+ return;
+
+ (*surfaceHolders)[i]->m_surfaceCreated = false;
+}
+
+bool AndroidSurfaceHolder::registerNativeMethods()
+{
+ static const JNINativeMethod methods[] = {
+ {"notifySurfaceCreated", "(J)V", (void *)AndroidSurfaceHolder::handleSurfaceCreated},
+ {"notifySurfaceDestroyed", "(J)V", (void *)AndroidSurfaceHolder::handleSurfaceDestroyed}
+ };
+
+ const int size = std::size(methods);
+ return QJniEnvironment().registerNativeMethods(QtSurfaceHolderCallbackClassName, methods, size);
+}
+
+AndroidSurfaceView::AndroidSurfaceView()
+ : m_window(0)
+ , m_surfaceHolder(0)
+ , m_pendingVisible(-1)
+{
+ QNativeInterface::QAndroidApplication::runOnAndroidMainThread([this] {
+ m_surfaceView = QJniObject("android/view/SurfaceView",
+ "(Landroid/content/Context;)V",
+ QNativeInterface::QAndroidApplication::context().object());
+ }).waitForFinished();
+
+ Q_ASSERT(m_surfaceView.isValid());
+
+ QJniObject holder = m_surfaceView.callObjectMethod("getHolder",
+ "()Landroid/view/SurfaceHolder;");
+ if (!holder.isValid()) {
+ m_surfaceView = QJniObject();
+ } else {
+ m_surfaceHolder = new AndroidSurfaceHolder(holder);
+ connect(m_surfaceHolder, &AndroidSurfaceHolder::surfaceCreated,
+ this, &AndroidSurfaceView::surfaceCreated);
+ { // Lock now to avoid a race with handleSurfaceCreated()
+ QMutexLocker locker(shLock());
+ m_window = QWindow::fromWinId(WId(m_surfaceView.object()));
+
+ if (m_pendingVisible != -1)
+ m_window->setVisible(m_pendingVisible);
+ if (m_pendingGeometry.isValid())
+ m_window->setGeometry(m_pendingGeometry);
+ }
+ }
+}
+
+AndroidSurfaceView::~AndroidSurfaceView()
+{
+ delete m_surfaceHolder;
+ delete m_window;
+}
+
+AndroidSurfaceHolder *AndroidSurfaceView::holder() const
+{
+ return m_surfaceHolder;
+}
+
+void AndroidSurfaceView::setVisible(bool v)
+{
+ if (m_window)
+ m_window->setVisible(v);
+ else
+ m_pendingVisible = int(v);
+}
+
+void AndroidSurfaceView::setGeometry(int x, int y, int width, int height)
+{
+ if (m_window)
+ m_window->setGeometry(x, y, width, height);
+ else
+ m_pendingGeometry = QRect(x, y, width, height);
+}
+
+QT_END_NAMESPACE
+
+#include "moc_androidsurfaceview_p.cpp"
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidsurfaceview_p.h b/src/plugins/multimedia/android/wrappers/jni/androidsurfaceview_p.h
new file mode 100644
index 000000000..e6be60ef1
--- /dev/null
+++ b/src/plugins/multimedia/android/wrappers/jni/androidsurfaceview_p.h
@@ -0,0 +1,78 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+#ifndef ANDROIDSURFACEVIEW_H
+#define ANDROIDSURFACEVIEW_H
+
+//
+// W A R N I N G
+// -------------
+//
+// This file is not part of the Qt API. It exists purely as an
+// implementation detail. This header file may change from version to
+// version without notice, or even be removed.
+//
+// We mean it.
+//
+
+#include <QtCore/qjniobject.h>
+#include <qrect.h>
+#include <QtCore/qrunnable.h>
+#include <QtCore/qobject.h>
+
+QT_BEGIN_NAMESPACE
+
+class QWindow;
+
+class AndroidSurfaceHolder : public QObject
+{
+ Q_OBJECT
+public:
+ ~AndroidSurfaceHolder();
+
+ jobject surfaceHolder() const;
+ bool isSurfaceCreated() const;
+
+ static bool registerNativeMethods();
+
+Q_SIGNALS:
+ void surfaceCreated();
+
+private:
+ AndroidSurfaceHolder(QJniObject object);
+
+ static void handleSurfaceCreated(JNIEnv*, jobject, jlong id);
+ static void handleSurfaceDestroyed(JNIEnv*, jobject, jlong id);
+
+ QJniObject m_surfaceHolder;
+ bool m_surfaceCreated;
+
+ friend class AndroidSurfaceView;
+};
+
+class AndroidSurfaceView : public QObject
+{
+ Q_OBJECT
+public:
+ AndroidSurfaceView();
+ ~AndroidSurfaceView();
+
+ AndroidSurfaceHolder *holder() const;
+
+ void setVisible(bool v);
+ void setGeometry(int x, int y, int width, int height);
+
+Q_SIGNALS:
+ void surfaceCreated();
+
+private:
+ QJniObject m_surfaceView;
+ QWindow *m_window;
+ AndroidSurfaceHolder *m_surfaceHolder;
+ int m_pendingVisible;
+ QRect m_pendingGeometry;
+};
+
+QT_END_NAMESPACE
+
+#endif // ANDROIDSURFACEVIEW_H