summaryrefslogtreecommitdiffstats
path: root/src/android/jar
diff options
context:
space:
mode:
Diffstat (limited to 'src/android/jar')
-rw-r--r--src/android/jar/AndroidManifest.xml6
-rw-r--r--src/android/jar/CMakeLists.txt32
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java751
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java361
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java519
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java207
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtExifDataHandler.java51
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtMediaRecorderListener.java31
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java149
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceHolderCallback.java37
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureHolder.java89
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureListener.java24
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java247
13 files changed, 2504 insertions, 0 deletions
diff --git a/src/android/jar/AndroidManifest.xml b/src/android/jar/AndroidManifest.xml
new file mode 100644
index 000000000..17019fb34
--- /dev/null
+++ b/src/android/jar/AndroidManifest.xml
@@ -0,0 +1,6 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="org.qtproject.qt.android.multimedia"
+ android:versionCode="1"
+ android:versionName="1.0" >
+ <supports-screens android:largeScreens="true" android:normalScreens="true" android:anyDensity="true" android:smallScreens="true"/>
+</manifest>
diff --git a/src/android/jar/CMakeLists.txt b/src/android/jar/CMakeLists.txt
new file mode 100644
index 000000000..01e0e5a08
--- /dev/null
+++ b/src/android/jar/CMakeLists.txt
@@ -0,0 +1,32 @@
+# Copyright (C) 2022 The Qt Company Ltd.
+# SPDX-License-Identifier: BSD-3-Clause
+
+# Generated from jar.pro.
+
+set(java_sources
+ src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java
+ src/org/qtproject/qt/android/multimedia/QtCameraListener.java
+ src/org/qtproject/qt/android/multimedia/QtCamera2.java
+ src/org/qtproject/qt/android/multimedia/QtExifDataHandler.java
+ src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
+ src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java
+ src/org/qtproject/qt/android/multimedia/QtMediaRecorderListener.java
+ src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java
+ src/org/qtproject/qt/android/multimedia/QtSurfaceHolderCallback.java
+ src/org/qtproject/qt/android/multimedia/QtSurfaceTextureHolder.java
+ src/org/qtproject/qt/android/multimedia/QtSurfaceTextureListener.java
+)
+
+qt_internal_add_jar(Qt${QtMultimedia_VERSION_MAJOR}AndroidMultimedia
+ INCLUDE_JARS ${QT_ANDROID_JAR}
+ SOURCES ${java_sources}
+ OUTPUT_DIR "${QT_BUILD_DIR}/jar"
+)
+
+qt_path_join(destination ${INSTALL_DATADIR} "jar")
+
+install_jar(Qt${QtMultimedia_VERSION_MAJOR}AndroidMultimedia
+ DESTINATION ${destination}
+ COMPONENT Devel
+)
+
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java
new file mode 100644
index 000000000..6ddc64dc2
--- /dev/null
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java
@@ -0,0 +1,751 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+package org.qtproject.qt.android.multimedia;
+
+import java.io.IOException;
+import java.lang.String;
+import java.util.HashMap;
+import java.io.FileInputStream;
+
+// API is level is < 9 unless marked otherwise.
+import android.content.Context;
+import android.media.MediaPlayer;
+import android.media.MediaFormat;
+import android.media.PlaybackParams;
+import android.media.AudioAttributes;
+import android.media.TimedText;
+import android.net.Uri;
+import android.util.Log;
+import java.io.FileDescriptor;
+import android.content.res.AssetFileDescriptor;
+import android.content.res.AssetManager;
+import android.view.SurfaceHolder;
+
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.TimeUnit;
+
+public class QtAndroidMediaPlayer
+{
+ // Native callback functions for MediaPlayer
+ native public void onErrorNative(int what, int extra, long id);
+ native public void onBufferingUpdateNative(int percent, long id);
+ native public void onProgressUpdateNative(int progress, long id);
+ native public void onDurationChangedNative(int duration, long id);
+ native public void onInfoNative(int what, int extra, long id);
+ native public void onVideoSizeChangedNative(int width, int height, long id);
+ native public void onStateChangedNative(int state, long id);
+
+ native public void onTrackInfoChangedNative(long id);
+ native public void onTimedTextChangedNative(String text, int time, long id);
+
+ private MediaPlayer mMediaPlayer = null;
+ private AudioAttributes mAudioAttributes = null;
+ private HashMap<String, String> mHeaders = null;
+ private Uri mUri = null;
+ private final long mID;
+ private final Context mContext;
+ private boolean mMuted = false;
+ private int mVolume = 100;
+ private static final String TAG = "Qt MediaPlayer";
+ private SurfaceHolder mSurfaceHolder = null;
+ private ScheduledExecutorService mProgressScheduler = null;
+ private ScheduledFuture mProgressWatcherHandle = null;
+
+ private class State {
+ final static int Uninitialized = 0x1 /* End */;
+ final static int Idle = 0x2;
+ final static int Preparing = 0x4;
+ final static int Prepared = 0x8;
+ final static int Initialized = 0x10;
+ final static int Started = 0x20;
+ final static int Stopped = 0x40;
+ final static int Paused = 0x80;
+ final static int PlaybackCompleted = 0x100;
+ final static int Error = 0x200;
+ }
+
+ public class TrackInfo
+ {
+ private int type;
+ private String mime, language;
+
+ TrackInfo(int type, String mime, String language)
+ {
+ this.type = type;
+ this.mime = mime;
+ this.language = language;
+ }
+
+ int getType() { return this.type; }
+ String getMime() { return this.mime; }
+ String getLanguage() { return this.language; }
+ }
+
+ private volatile int mState = State.Uninitialized;
+
+ /**
+ * MediaPlayer OnErrorListener
+ */
+ private class MediaPlayerErrorListener
+ implements MediaPlayer.OnErrorListener
+ {
+ @Override
+ public boolean onError(final MediaPlayer mp,
+ final int what,
+ final int extra)
+ {
+ setState(State.Error);
+ onErrorNative(what, extra, mID);
+ return true;
+ }
+ }
+
+ /**
+ * MediaPlayer OnBufferingListener
+ */
+ private class MediaPlayerBufferingListener
+ implements MediaPlayer.OnBufferingUpdateListener
+ {
+ private int mBufferPercent = -1;
+ @Override
+ public void onBufferingUpdate(final android.media.MediaPlayer mp,
+ final int percent)
+ {
+ // Avoid updates when percent is unchanged.
+ // E.g., we keep getting updates when percent == 100
+ if (mBufferPercent == percent)
+ return;
+
+ onBufferingUpdateNative((mBufferPercent = percent), mID);
+ }
+
+ }
+
+ /**
+ * MediaPlayer OnCompletionListener
+ */
+ private class MediaPlayerCompletionListener
+ implements MediaPlayer.OnCompletionListener
+ {
+ @Override
+ public void onCompletion(final MediaPlayer mp)
+ {
+ setState(State.PlaybackCompleted);
+ }
+
+ }
+
+ /**
+ * MediaPlayer OnInfoListener
+ */
+ private class MediaPlayerInfoListener
+ implements MediaPlayer.OnInfoListener
+ {
+ @Override
+ public boolean onInfo(final MediaPlayer mp,
+ final int what,
+ final int extra)
+ {
+ onInfoNative(what, extra, mID);
+ return true;
+ }
+
+ }
+
+ /**
+ * MediaPlayer OnPreparedListener
+ */
+ private class MediaPlayerPreparedListener
+ implements MediaPlayer.OnPreparedListener
+ {
+
+ @Override
+ public void onPrepared(final MediaPlayer mp)
+ {
+ setState(State.Prepared);
+ onDurationChangedNative(getDuration(), mID);
+ onTrackInfoChangedNative(mID);
+ }
+
+ }
+
+ /**
+ * MediaPlayer OnSeekCompleteListener
+ */
+ private class MediaPlayerSeekCompleteListener
+ implements MediaPlayer.OnSeekCompleteListener
+ {
+
+ @Override
+ public void onSeekComplete(final MediaPlayer mp)
+ {
+ onProgressUpdateNative(getCurrentPosition(), mID);
+ }
+
+ }
+
+ /**
+ * MediaPlayer OnVideoSizeChangedListener
+ */
+ private class MediaPlayerVideoSizeChangedListener
+ implements MediaPlayer.OnVideoSizeChangedListener
+ {
+
+ @Override
+ public void onVideoSizeChanged(final MediaPlayer mp,
+ final int width,
+ final int height)
+ {
+ onVideoSizeChangedNative(width, height, mID);
+ }
+
+ }
+
+ private class MediaPlayerTimedTextListener implements MediaPlayer.OnTimedTextListener
+ {
+ @Override public void onTimedText(MediaPlayer mp, TimedText text)
+ {
+ onTimedTextChangedNative(text.getText(), mp.getCurrentPosition(), mID);
+ }
+ }
+
+ public QtAndroidMediaPlayer(final Context context, final long id)
+ {
+ mID = id;
+ mContext = context;
+ }
+
+ public MediaPlayer getMediaPlayerHandle()
+ {
+ return mMediaPlayer;
+ }
+
+ private void setState(int state)
+ {
+ if (mState == state)
+ return;
+
+ mState = state;
+
+ onStateChangedNative(mState, mID);
+ }
+
+ private void init()
+ {
+ if (mMediaPlayer != null)
+ return;
+
+ mMediaPlayer = new MediaPlayer();
+ setState(State.Idle);
+ // Make sure the new media player has the volume that was set on the QMediaPlayer
+ setVolumeHelper(mMuted ? 0 : mVolume);
+ setAudioAttributes(mMediaPlayer, mAudioAttributes);
+
+ mMediaPlayer.setOnBufferingUpdateListener(new MediaPlayerBufferingListener());
+ mMediaPlayer.setOnCompletionListener(new MediaPlayerCompletionListener());
+ mMediaPlayer.setOnInfoListener(new MediaPlayerInfoListener());
+ mMediaPlayer.setOnSeekCompleteListener(new MediaPlayerSeekCompleteListener());
+ mMediaPlayer.setOnVideoSizeChangedListener(new MediaPlayerVideoSizeChangedListener());
+ mMediaPlayer.setOnErrorListener(new MediaPlayerErrorListener());
+ mMediaPlayer.setOnPreparedListener(new MediaPlayerPreparedListener());
+ mMediaPlayer.setOnTimedTextListener(new MediaPlayerTimedTextListener());
+ // Report playback position since there is no default listner for that in MediaPlayer
+ mProgressScheduler = Executors.newScheduledThreadPool(1);
+ }
+
+ public void startProgressWatcher()
+ {
+ // if it was shutdown, request new thread
+ if (mProgressScheduler.isTerminated() || mProgressScheduler == null)
+ mProgressScheduler = Executors.newScheduledThreadPool(1);
+
+ mProgressWatcherHandle = mProgressScheduler.scheduleAtFixedRate(new Runnable() {
+ @Override
+ public void run() {
+ if (isPlaying())
+ onProgressUpdateNative(getCurrentPosition(), mID);
+ }
+ }, 10, 100, TimeUnit.MILLISECONDS);
+ }
+
+ public void cancelProgressWatcher()
+ {
+ if (mProgressScheduler != null)
+ mProgressScheduler.shutdown();
+ }
+
+ public void start()
+ {
+ if ((mState & (State.Prepared
+ | State.Started
+ | State.Paused
+ | State.PlaybackCompleted)) == 0) {
+ return;
+ }
+
+ try {
+ mMediaPlayer.start();
+ setState(State.Started);
+ startProgressWatcher();
+ } catch (final IllegalStateException exception) {
+ Log.w(TAG, exception);
+ }
+ }
+
+ public void pause()
+ {
+ if ((mState & (State.Started | State.Paused | State.PlaybackCompleted)) == 0)
+ return;
+
+ try {
+ mMediaPlayer.pause();
+ setState(State.Paused);
+ } catch (final IllegalStateException exception) {
+ Log.w(TAG, exception);
+ }
+ }
+
+
+ public void stop()
+ {
+ if ((mState & (State.Prepared
+ | State.Started
+ | State.Stopped
+ | State.Paused
+ | State.PlaybackCompleted)) == 0) {
+ return;
+ }
+
+ try {
+ mMediaPlayer.stop();
+ setState(State.Stopped);
+ cancelProgressWatcher();
+ } catch (final IllegalStateException exception) {
+ Log.w(TAG, exception);
+ }
+ }
+
+
+ public void seekTo(final int msec)
+ {
+ if ((mState & (State.Prepared
+ | State.Started
+ | State.Paused
+ | State.PlaybackCompleted)) == 0) {
+ return;
+ }
+
+ try {
+ if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.O) {
+ // seekTo to closest frame of the provided msec is only available for devices
+ // with api level over 26
+ mMediaPlayer.seekTo(msec, MediaPlayer.SEEK_CLOSEST);
+ } else {
+ mMediaPlayer.seekTo(msec);
+ }
+ } catch (final IllegalStateException exception) {
+ Log.w(TAG, exception);
+ }
+ }
+
+ public boolean isPlaying()
+ {
+ boolean playing = false;
+ if ((mState & (State.Idle
+ | State.Initialized
+ | State.Prepared
+ | State.Started
+ | State.Paused
+ | State.Stopped
+ | State.PlaybackCompleted)) == 0) {
+ return playing;
+ }
+
+ try {
+ playing = mMediaPlayer.isPlaying();
+ } catch (final IllegalStateException exception) {
+ Log.w(TAG, exception);
+ }
+
+ return playing;
+ }
+
+ public void prepareAsync()
+ {
+ if ((mState & (State.Initialized | State.Stopped)) == 0)
+ return;
+
+ try {
+ mMediaPlayer.prepareAsync();
+ setState(State.Preparing);
+ } catch (final IllegalStateException exception) {
+ Log.w(TAG, exception);
+ }
+ }
+
+ public void initHeaders()
+ {
+ mHeaders = new HashMap<String, String>();
+ }
+
+ public void setHeader(final String header, final String value)
+ {
+ mHeaders.put(header, value);
+ }
+
+ public void setDataSource(final String path)
+ {
+ if (mState == State.Uninitialized)
+ init();
+
+ if (mState != State.Idle)
+ reset();
+
+ // mediaplayer can only setDataSource if it is on State.Idle
+ if (mState != State.Idle) {
+ Log.w(TAG, "Trying to set data source of a media player that is not idle!");
+ return;
+ }
+
+ if (mSurfaceHolder != null)
+ mMediaPlayer.setDisplay(mSurfaceHolder);
+
+ AssetFileDescriptor afd = null;
+ FileInputStream fis = null;
+ try {
+ mUri = Uri.parse(path);
+ if (mUri.getScheme().compareTo("assets") == 0) {
+ final String asset = mUri.getPath().substring(1 /* Remove first '/' */);
+ final AssetManager am = mContext.getAssets();
+ afd = am.openFd(asset);
+ final long offset = afd.getStartOffset();
+ final long length = afd.getLength();
+ FileDescriptor fd = afd.getFileDescriptor();
+ mMediaPlayer.setDataSource(fd, offset, length);
+ } else if (mUri.getScheme().compareTo("file") == 0) {
+ fis = new FileInputStream(mUri.getPath());
+ FileDescriptor fd = fis.getFD();
+ mMediaPlayer.setDataSource(fd);
+ } else if (mUri.getScheme().compareTo("content") == 0) {
+ mMediaPlayer.setDataSource(mContext, mUri, mHeaders);
+ } else {
+ mMediaPlayer.setDataSource(path);
+ }
+ setState(State.Initialized);
+ } catch (final Exception exception) {
+ Log.w(TAG, exception);
+ } finally {
+ try {
+ if (afd != null)
+ afd.close();
+ if (fis != null)
+ fis.close();
+ } catch (final IOException ioe) { /* Ignore... */ }
+
+ if ((mState & State.Initialized) == 0) {
+ setState(State.Error);
+ onErrorNative(MediaPlayer.MEDIA_ERROR_UNKNOWN,
+ -1004 /*MEDIA_ERROR_IO*/,
+ mID);
+ return;
+ }
+ }
+ }
+
+ private boolean isMediaPlayerPrepared()
+ {
+ int preparedState = (State.Prepared | State.Started | State.Paused | State.Stopped
+ | State.PlaybackCompleted);
+ return ((mState & preparedState) != 0);
+ }
+
+ public TrackInfo[] getAllTrackInfo()
+ {
+ if (!isMediaPlayerPrepared()) {
+ Log.w(TAG, "Trying to get track info of a media player that is not prepared!");
+ return new TrackInfo[0];
+ }
+
+ MediaPlayer.TrackInfo[] tracks = new MediaPlayer.TrackInfo[0];
+
+ try {
+ // media player will ignore if this a out bounds index.
+ tracks = mMediaPlayer.getTrackInfo();
+ } catch (final IllegalStateException exception) {
+ Log.w(TAG, exception);
+ }
+
+ int numberOfTracks = tracks.length;
+ TrackInfo[] qtTracksInfo = new TrackInfo[numberOfTracks];
+
+ for (int index = 0; index < numberOfTracks; index++) {
+
+ MediaPlayer.TrackInfo track = tracks[index];
+
+ int type = track.getTrackType();
+ String mimeType = getMimeType(track);
+ String language = track.getLanguage();
+
+ qtTracksInfo[index] = new TrackInfo(type, mimeType, language);
+ }
+
+ return qtTracksInfo;
+ }
+
+ private String getMimeType(MediaPlayer.TrackInfo trackInfo)
+ {
+ // The "octet-stream" subtype is used to indicate that a body contains arbitrary binary
+ // data.
+ String defaultMimeType = "application/octet-stream";
+
+ String mimeType = defaultMimeType;
+
+ MediaFormat mediaFormat = trackInfo.getFormat();
+ if (mediaFormat != null) {
+ mimeType = mediaFormat.getString(MediaFormat.KEY_MIME, defaultMimeType);
+ }
+
+ return mimeType;
+ }
+
+ public void selectTrack(int index)
+ {
+ if (!isMediaPlayerPrepared()) {
+ Log.d(TAG, "Trying to select a track of a media player that is not prepared!");
+ return;
+ }
+ try {
+ // media player will ignore if this a out bounds index.
+ mMediaPlayer.selectTrack(index);
+ } catch (final IllegalStateException exception) {
+ Log.w(TAG, exception);
+ }
+ }
+
+ public void deselectTrack(int index)
+ {
+ if (!isMediaPlayerPrepared()) {
+ Log.d(TAG, "Trying to deselect track of a media player that is not prepared!");
+ return;
+ }
+
+ try {
+ // media player will ignore if this a out bounds index.
+ mMediaPlayer.deselectTrack(index);
+ } catch (final IllegalStateException exception) {
+ Log.w(TAG, exception);
+ }
+ }
+
+ public int getSelectedTrack(int type)
+ {
+
+ int InvalidTrack = -1;
+ if (!isMediaPlayerPrepared()) {
+ Log.d(TAG, "Trying to get the selected track of a media player that is not prepared!");
+ return InvalidTrack;
+ }
+
+ boolean isVideoTrackType = (type == MediaPlayer.TrackInfo.MEDIA_TRACK_TYPE_VIDEO);
+ boolean isAudioTrackType = (type == MediaPlayer.TrackInfo.MEDIA_TRACK_TYPE_AUDIO);
+ boolean isTimedTextTrackType = (type == MediaPlayer.TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT);
+ boolean isSubtitleTrackType = (type == MediaPlayer.TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE);
+
+ if (!(isVideoTrackType || isAudioTrackType || isSubtitleTrackType
+ || isTimedTextTrackType)) {
+ Log.w(TAG,
+ "Trying to get a selected track of a invalid type"
+ + " Only Video,Audio, TimedText and Subtitle tracks are selectable.");
+ return InvalidTrack;
+ }
+
+ try {
+ return mMediaPlayer.getSelectedTrack(type);
+ } catch (final IllegalStateException exception) {
+ Log.w(TAG, exception);
+ }
+
+ return InvalidTrack;
+ }
+
+ public int getCurrentPosition()
+ {
+ int currentPosition = 0;
+ if ((mState & (State.Idle
+ | State.Initialized
+ | State.Prepared
+ | State.Started
+ | State.Paused
+ | State.Stopped
+ | State.PlaybackCompleted)) == 0) {
+ return currentPosition;
+ }
+
+ try {
+ currentPosition = mMediaPlayer.getCurrentPosition();
+ } catch (final IllegalStateException exception) {
+ Log.w(TAG, exception);
+ }
+
+ return currentPosition;
+ }
+
+
+ public int getDuration()
+ {
+ int duration = 0;
+ if ((mState & (State.Prepared
+ | State.Started
+ | State.Paused
+ | State.Stopped
+ | State.PlaybackCompleted)) == 0) {
+ return duration;
+ }
+
+ try {
+ duration = mMediaPlayer.getDuration();
+ } catch (final IllegalStateException exception) {
+ Log.w(TAG, exception);
+ }
+
+ return duration;
+ }
+
+ public void setVolume(int volume)
+ {
+ if (volume < 0)
+ volume = 0;
+
+ if (volume > 100)
+ volume = 100;
+
+ mVolume = volume;
+
+ if (!mMuted)
+ setVolumeHelper(mVolume);
+ }
+
+ private void setVolumeHelper(int volume)
+ {
+ if ((mState & (State.Idle
+ | State.Initialized
+ | State.Stopped
+ | State.Prepared
+ | State.Started
+ | State.Paused
+ | State.PlaybackCompleted)) == 0) {
+ return;
+ }
+
+ try {
+ float newVolume = (float)volume / 100;
+ mMediaPlayer.setVolume(newVolume, newVolume);
+ } catch (final IllegalStateException exception) {
+ Log.w(TAG, exception);
+ }
+ }
+
+ public SurfaceHolder display()
+ {
+ return mSurfaceHolder;
+ }
+
+ public void setDisplay(SurfaceHolder sh)
+ {
+ mSurfaceHolder = sh;
+
+ if ((mState & State.Uninitialized) != 0)
+ return;
+
+ mMediaPlayer.setDisplay(mSurfaceHolder);
+ }
+
+
+ public int getVolume()
+ {
+ return mVolume;
+ }
+
+ public void mute(final boolean mute)
+ {
+ mMuted = mute;
+ setVolumeHelper(mute ? 0 : mVolume);
+ }
+
+ public boolean isMuted()
+ {
+ return mMuted;
+ }
+
+ public void reset()
+ {
+ if (mState == State.Uninitialized) {
+ return;
+ }
+
+ mMediaPlayer.reset();
+ setState(State.Idle);
+ cancelProgressWatcher();
+ }
+
+ public void release()
+ {
+ if (mMediaPlayer != null) {
+ mMediaPlayer.reset();
+ mMediaPlayer.release();
+ mMediaPlayer = null;
+ }
+
+ setState(State.Uninitialized);
+ cancelProgressWatcher();
+ }
+
+ public void setAudioAttributes(int type, int usage)
+ {
+ mAudioAttributes = new AudioAttributes.Builder()
+ .setUsage(usage)
+ .setContentType(type)
+ .build();
+
+ setAudioAttributes(mMediaPlayer, mAudioAttributes);
+ }
+
+ static private void setAudioAttributes(MediaPlayer player, AudioAttributes attr)
+ {
+ if (player == null || attr == null)
+ return;
+
+ try {
+ player.setAudioAttributes(attr);
+ } catch (final IllegalArgumentException exception) {
+ Log.w(TAG, exception);
+ }
+ }
+
+ public boolean setPlaybackRate(float rate)
+ {
+ PlaybackParams playbackParams = mMediaPlayer.getPlaybackParams();
+ playbackParams.setSpeed(rate);
+ // According to discussion under the patch from QTBUG-61115: At least with DirectShow
+ // and GStreamer, it changes both speed and pitch. (...) need to be consistent
+ if (rate != 0.0)
+ playbackParams.setPitch(Math.abs(rate));
+
+ try {
+ mMediaPlayer.setPlaybackParams(playbackParams);
+ } catch (IllegalStateException | IllegalArgumentException e) {
+ Log.e(TAG, "Cannot set playback rate " + rate + " :" + e.toString());
+ return false;
+ }
+
+ if ((mState & State.Started) == 0 && mMediaPlayer.isPlaying()) {
+ setState(State.Started);
+ startProgressWatcher();
+ }
+
+ return true;
+ }
+}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
new file mode 100644
index 000000000..2b6fcc2dc
--- /dev/null
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
@@ -0,0 +1,361 @@
+// Copyright (C) 2021 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+package org.qtproject.qt.android.multimedia;
+
+import java.util.ArrayList;
+import android.content.Context;
+import android.media.AudioDeviceCallback;
+import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioTrack;
+import android.media.MediaRecorder;
+import android.os.Handler;
+import android.os.Looper;
+import android.util.Log;
+
+public class QtAudioDeviceManager
+{
+ private static final String TAG = "QtAudioDeviceManager";
+ static private AudioManager m_audioManager = null;
+ static private final AudioDevicesReceiver m_audioDevicesReceiver = new AudioDevicesReceiver();
+ static private Handler handler = new Handler(Looper.getMainLooper());
+ static private AudioRecord m_recorder = null;
+ static private AudioTrack m_streamPlayer = null;
+ static private Thread m_streamingThread = null;
+ static private boolean m_isStreaming = false;
+ static private boolean m_useSpeaker = false;
+ static private final int m_sampleRate = 8000;
+ static private final int m_channels = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ static private final int m_audioFormat = AudioFormat.ENCODING_PCM_16BIT;
+ static private final int m_bufferSize = AudioRecord.getMinBufferSize(m_sampleRate, m_channels, m_audioFormat);
+
+ public static native void onAudioInputDevicesUpdated();
+ public static native void onAudioOutputDevicesUpdated();
+
+ static private void updateDeviceList() {
+ onAudioInputDevicesUpdated();
+ onAudioOutputDevicesUpdated();
+ if (m_useSpeaker) {
+ final AudioDeviceInfo[] audioDevices =
+ m_audioManager.getDevices(AudioManager.GET_DEVICES_OUTPUTS);
+ setAudioOutput(getModeForSpeaker(audioDevices), false, true);
+ }
+ }
+
+ private static class AudioDevicesReceiver extends AudioDeviceCallback {
+ @Override
+ public void onAudioDevicesAdded(AudioDeviceInfo[] addedDevices) {
+ updateDeviceList();
+ }
+
+ @Override
+ public void onAudioDevicesRemoved(AudioDeviceInfo[] removedDevices) {
+ updateDeviceList();
+ }
+ }
+
+
+ public static void registerAudioHeadsetStateReceiver()
+ {
+ m_audioManager.registerAudioDeviceCallback(m_audioDevicesReceiver, handler);
+ }
+
+ public static void unregisterAudioHeadsetStateReceiver()
+ {
+ m_audioManager.unregisterAudioDeviceCallback(m_audioDevicesReceiver);
+ }
+
+ static public void setContext(Context context)
+ {
+ m_audioManager = (AudioManager)context.getSystemService(Context.AUDIO_SERVICE);
+ }
+
+ private static String[] getAudioOutputDevices()
+ {
+ return getAudioDevices(AudioManager.GET_DEVICES_OUTPUTS);
+ }
+
+ private static String[] getAudioInputDevices()
+ {
+ return getAudioDevices(AudioManager.GET_DEVICES_INPUTS);
+ }
+
+ private static boolean isBluetoothDevice(AudioDeviceInfo deviceInfo)
+ {
+ switch (deviceInfo.getType()) {
+ case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP:
+ case AudioDeviceInfo.TYPE_BLUETOOTH_SCO:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ private static boolean setAudioInput(MediaRecorder recorder, int id)
+ {
+ if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.P)
+ return false;
+
+ final AudioDeviceInfo[] audioDevices =
+ m_audioManager.getDevices(AudioManager.GET_DEVICES_INPUTS);
+
+ for (AudioDeviceInfo deviceInfo : audioDevices) {
+ if (deviceInfo.getId() != id)
+ continue;
+
+ boolean isPreferred = recorder.setPreferredDevice(deviceInfo);
+ if (isPreferred && isBluetoothDevice(deviceInfo)) {
+ m_audioManager.startBluetoothSco();
+ m_audioManager.setBluetoothScoOn(true);
+ }
+
+ return isPreferred;
+ }
+
+ return false;
+ }
+
+ private static void setInputMuted(boolean mute)
+ {
+ // This method mutes the microphone across the entire platform
+ m_audioManager.setMicrophoneMute(mute);
+ }
+
+ private static boolean isMicrophoneMute()
+ {
+ return m_audioManager.isMicrophoneMute();
+ }
+
+ private static String audioDeviceTypeToString(int type)
+ {
+ // API <= 23 types
+ switch (type)
+ {
+ case AudioDeviceInfo.TYPE_AUX_LINE:
+ return "AUX Line";
+ case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP:
+ case AudioDeviceInfo.TYPE_BLUETOOTH_SCO:
+ return "Bluetooth";
+ case AudioDeviceInfo.TYPE_BUILTIN_EARPIECE:
+ return "Built in earpiece";
+ case AudioDeviceInfo.TYPE_BUILTIN_MIC:
+ return "Built in microphone";
+ case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER:
+ return "Built in speaker";
+ case AudioDeviceInfo.TYPE_DOCK:
+ return "Dock";
+ case AudioDeviceInfo.TYPE_FM:
+ return "FM";
+ case AudioDeviceInfo.TYPE_FM_TUNER:
+ return "FM TUNER";
+ case AudioDeviceInfo.TYPE_HDMI:
+ return "HDMI";
+ case AudioDeviceInfo.TYPE_HDMI_ARC:
+ return "HDMI ARC";
+ case AudioDeviceInfo.TYPE_IP:
+ return "IP";
+ case AudioDeviceInfo.TYPE_LINE_ANALOG:
+ return "Line analog";
+ case AudioDeviceInfo.TYPE_LINE_DIGITAL:
+ return "Line digital";
+ case AudioDeviceInfo.TYPE_TV_TUNER:
+ return "TV tuner";
+ case AudioDeviceInfo.TYPE_USB_ACCESSORY:
+ return "USB accessory";
+ case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
+ return "Wired headphones";
+ case AudioDeviceInfo.TYPE_WIRED_HEADSET:
+ return "Wired headset";
+ }
+
+ // API 24
+ if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.N) {
+ if (type == AudioDeviceInfo.TYPE_BUS)
+ return "Bus";
+ }
+
+ return "Unknown-Type";
+
+ }
+
+ private static String[] getAudioDevices(int type)
+ {
+ ArrayList<String> devices = new ArrayList<>();
+
+ if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.M) {
+ boolean builtInMicAdded = false;
+ boolean bluetoothDeviceAdded = false;
+ for (AudioDeviceInfo deviceInfo : m_audioManager.getDevices(type)) {
+ String deviceType = audioDeviceTypeToString(deviceInfo.getType());
+
+ if (deviceType.equals(audioDeviceTypeToString(AudioDeviceInfo.TYPE_UNKNOWN))) {
+ // Not supported device type
+ continue;
+ } else if (deviceType.equals(audioDeviceTypeToString(AudioDeviceInfo.TYPE_BUILTIN_MIC))) {
+ if (builtInMicAdded) {
+ // Built in mic already added. Second built in mic is CAMCORDER, but there
+ // is no reliable way of selecting it. AudioSource.MIC usually means the
+ // primary mic. AudioSource.CAMCORDER source might mean the secondary mic,
+ // but there's no guarantee. It depends e.g. on the physical placement
+ // of the mics. That's why we will not add built in microphone twice.
+ // Should we?
+ continue;
+ }
+ builtInMicAdded = true;
+ } else if (isBluetoothDevice(deviceInfo)) {
+ if (bluetoothDeviceAdded) {
+ // Bluetooth device already added. Second device is just a different
+ // technology profille (like A2DP or SCO). We should not add the same
+ // device twice. Should we?
+ continue;
+ }
+ bluetoothDeviceAdded = true;
+ }
+
+ devices.add(deviceInfo.getId() + ":" + deviceType + " ("
+ + deviceInfo.getProductName().toString() +")");
+ }
+ }
+
+ String[] ret = new String[devices.size()];
+ ret = devices.toArray(ret);
+ return ret;
+ }
+
+ private static int getModeForSpeaker(AudioDeviceInfo[] audioDevices)
+ {
+ // If we want to force device to use speaker when Bluetooth or Wiread headset is connected,
+ // we need to use MODE_IN_COMMUNICATION. Otherwise the MODE_NORMAL can be used.
+ for (AudioDeviceInfo deviceInfo : audioDevices) {
+ switch (deviceInfo.getType()) {
+ case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP:
+ case AudioDeviceInfo.TYPE_BLUETOOTH_SCO:
+ case AudioDeviceInfo.TYPE_WIRED_HEADSET:
+ case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
+ return AudioManager.MODE_IN_COMMUNICATION;
+ default: break;
+ }
+ }
+ return AudioManager.MODE_NORMAL;
+ }
+
+
+ private static boolean setAudioOutput(int id)
+ {
+ m_useSpeaker = false;
+ final AudioDeviceInfo[] audioDevices =
+ m_audioManager.getDevices(AudioManager.GET_DEVICES_OUTPUTS);
+ for (AudioDeviceInfo deviceInfo : audioDevices) {
+ if (deviceInfo.getId() == id) {
+ switch (deviceInfo.getType())
+ {
+ case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP:
+ case AudioDeviceInfo.TYPE_BLUETOOTH_SCO:
+ setAudioOutput(AudioManager.MODE_IN_COMMUNICATION, true, false);
+ return true;
+ case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER:
+ m_useSpeaker = true;
+ setAudioOutput(getModeForSpeaker(audioDevices), false, true);
+ return true;
+ case AudioDeviceInfo.TYPE_WIRED_HEADSET:
+ case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
+ setAudioOutput(AudioManager.MODE_IN_COMMUNICATION, false, false);
+ return true;
+ case AudioDeviceInfo.TYPE_BUILTIN_EARPIECE:
+ // It doesn't work when WIRED HEADPHONES are connected
+ // Earpiece has the lowest priority and setWiredHeadsetOn(boolean)
+ // method to force it is deprecated
+ Log.w(TAG, "Built in Earpiece may not work when "
+ + "Wired Headphones are connected");
+ setAudioOutput(AudioManager.MODE_IN_CALL, false, false);
+ return true;
+ case AudioDeviceInfo.TYPE_HDMI:
+ case AudioDeviceInfo.TYPE_HDMI_ARC:
+ case AudioDeviceInfo.TYPE_HDMI_EARC:
+ setAudioOutput(AudioManager.MODE_NORMAL, false, false);
+ return true;
+ default:
+ return false;
+ }
+ }
+ }
+ return false;
+ }
+
+ private static void setAudioOutput(int mode, boolean bluetoothOn, boolean speakerOn)
+ {
+ m_audioManager.setMode(mode);
+ if (bluetoothOn) {
+ m_audioManager.startBluetoothSco();
+ } else {
+ m_audioManager.stopBluetoothSco();
+ }
+ m_audioManager.setBluetoothScoOn(bluetoothOn);
+ m_audioManager.setSpeakerphoneOn(speakerOn);
+
+ }
+
+ private static void streamSound()
+ {
+ byte data[] = new byte[m_bufferSize];
+ while (m_isStreaming) {
+ m_recorder.read(data, 0, m_bufferSize);
+ m_streamPlayer.play();
+ m_streamPlayer.write(data, 0, m_bufferSize);
+ m_streamPlayer.stop();
+ }
+ }
+
+ private static void startSoundStreaming(int inputId, int outputId)
+ {
+ if (m_isStreaming)
+ stopSoundStreaming();
+
+ m_recorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, m_sampleRate, m_channels,
+ m_audioFormat, m_bufferSize);
+ m_streamPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, m_sampleRate, m_channels,
+ m_audioFormat, m_bufferSize, AudioTrack.MODE_STREAM);
+
+ final AudioDeviceInfo[] devices = m_audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
+ for (AudioDeviceInfo deviceInfo : devices) {
+ if (deviceInfo.getId() == outputId) {
+ m_streamPlayer.setPreferredDevice(deviceInfo);
+ } else if (deviceInfo.getId() == inputId) {
+ m_recorder.setPreferredDevice(deviceInfo);
+ }
+ }
+
+ m_recorder.startRecording();
+ m_isStreaming = true;
+
+ m_streamingThread = new Thread(new Runnable() {
+ public void run() {
+ streamSound();
+ }
+ });
+
+ m_streamingThread.start();
+ }
+
+ private static void stopSoundStreaming()
+ {
+ if (!m_isStreaming)
+ return;
+
+ m_isStreaming = false;
+ try {
+ m_streamingThread.join();
+ m_streamingThread = null;
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ m_recorder.stop();
+ m_recorder.release();
+ m_streamPlayer.release();
+ m_streamPlayer = null;
+ m_recorder = null;
+ }
+}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java
new file mode 100644
index 000000000..a88352bb2
--- /dev/null
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java
@@ -0,0 +1,519 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+package org.qtproject.qt.android.multimedia;
+
+import android.annotation.SuppressLint;
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.graphics.Rect;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CaptureFailure;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.TotalCaptureResult;
+import android.media.Image;
+import android.media.ImageReader;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.util.Log;
+import android.util.Range;
+import android.view.Surface;
+import java.lang.Thread;
+import java.util.ArrayList;
+import java.util.List;
+
+@TargetApi(23)
+public class QtCamera2 {
+
+ CameraDevice mCameraDevice = null;
+ QtVideoDeviceManager mVideoDeviceManager = null;
+ HandlerThread mBackgroundThread;
+ Handler mBackgroundHandler;
+ ImageReader mImageReader = null;
+ ImageReader mCapturedPhotoReader = null;
+ CameraManager mCameraManager;
+ CameraCaptureSession mCaptureSession;
+ CaptureRequest.Builder mPreviewRequestBuilder;
+ CaptureRequest mPreviewRequest;
+ String mCameraId;
+ List<Surface> mTargetSurfaces = new ArrayList<>();
+
+ private static final int STATE_PREVIEW = 0;
+ private static final int STATE_WAITING_LOCK = 1;
+ private static final int STATE_WAITING_PRECAPTURE = 2;
+ private static final int STATE_WAITING_NON_PRECAPTURE = 3;
+ private static final int STATE_PICTURE_TAKEN = 4;
+
+ private int mState = STATE_PREVIEW;
+ private Object mStartMutex = new Object();
+ private boolean mIsStarted = false;
+ private static int MaxNumberFrames = 12;
+ private int mFlashMode = CaptureRequest.CONTROL_AE_MODE_ON;
+ private int mTorchMode = CameraMetadata.FLASH_MODE_OFF;
+ private int mAFMode = CaptureRequest.CONTROL_AF_MODE_OFF;
+ private float mZoomFactor = 1.0f;
+ private Range<Integer> mFpsRange = null;
+ private QtExifDataHandler mExifDataHandler = null;
+
+ native void onCameraOpened(String cameraId);
+ native void onCameraDisconnect(String cameraId);
+ native void onCameraError(String cameraId, int error);
+ CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
+ @Override
+ public void onOpened(CameraDevice cameraDevice) {
+ if (mCameraDevice != null)
+ mCameraDevice.close();
+ mCameraDevice = cameraDevice;
+ onCameraOpened(mCameraId);
+ }
+ @Override
+ public void onDisconnected(CameraDevice cameraDevice) {
+ cameraDevice.close();
+ if (mCameraDevice == cameraDevice)
+ mCameraDevice = null;
+ onCameraDisconnect(mCameraId);
+ }
+ @Override
+ public void onError(CameraDevice cameraDevice, int error) {
+ cameraDevice.close();
+ if (mCameraDevice == cameraDevice)
+ mCameraDevice = null;
+ onCameraError(mCameraId, error);
+ }
+ };
+
+ native void onCaptureSessionConfigured(String cameraId);
+ native void onCaptureSessionConfigureFailed(String cameraId);
+ CameraCaptureSession.StateCallback mCaptureStateCallback = new CameraCaptureSession.StateCallback() {
+ @Override
+ public void onConfigured(CameraCaptureSession cameraCaptureSession) {
+ mCaptureSession = cameraCaptureSession;
+ onCaptureSessionConfigured(mCameraId);
+ }
+
+ @Override
+ public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
+ onCaptureSessionConfigureFailed(mCameraId);
+ }
+
+ @Override
+ public void onActive(CameraCaptureSession cameraCaptureSession) {
+ super.onActive(cameraCaptureSession);
+ onSessionActive(mCameraId);
+ }
+
+ @Override
+ public void onClosed(CameraCaptureSession cameraCaptureSession) {
+ super.onClosed(cameraCaptureSession);
+ onSessionClosed(mCameraId);
+ }
+ };
+
+ native void onSessionActive(String cameraId);
+ native void onSessionClosed(String cameraId);
+ native void onCaptureSessionFailed(String cameraId, int reason, long frameNumber);
+ CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() {
+ public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
+ super.onCaptureFailed(session, request, failure);
+ onCaptureSessionFailed(mCameraId, failure.getReason(), failure.getFrameNumber());
+ }
+
+ private void process(CaptureResult result) {
+ switch (mState) {
+ case STATE_WAITING_LOCK: {
+ Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
+ if (afState == null) {
+ capturePhoto();
+ } else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState ||
+ CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) {
+ Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
+ if (aeState == null ||
+ aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
+ mState = STATE_PICTURE_TAKEN;
+ capturePhoto();
+ } else {
+ try {
+ mPreviewRequestBuilder.set(
+ CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
+ CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
+ mState = STATE_WAITING_PRECAPTURE;
+ mCaptureSession.capture(mPreviewRequestBuilder.build(),
+ mCaptureCallback,
+ mBackgroundHandler);
+ } catch (CameraAccessException e) {
+ Log.w("QtCamera2", "Cannot get access to the camera: " + e);
+ }
+ }
+ }
+ break;
+ }
+ case STATE_WAITING_PRECAPTURE: {
+ Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
+ if (aeState == null || aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
+ mState = STATE_WAITING_NON_PRECAPTURE;
+ }
+ break;
+ }
+ case STATE_WAITING_NON_PRECAPTURE: {
+ Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
+ if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
+ mState = STATE_PICTURE_TAKEN;
+ capturePhoto();
+ }
+ break;
+ }
+ default:
+ break;
+ }
+ }
+
+ @Override
+ public void onCaptureProgressed(CameraCaptureSession s, CaptureRequest r, CaptureResult partialResult) {
+ process(partialResult);
+ }
+
+ @Override
+ public void onCaptureCompleted(CameraCaptureSession s, CaptureRequest r, TotalCaptureResult result) {
+ process(result);
+ }
+ };
+
+ public QtCamera2(Context context) {
+ mCameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ mVideoDeviceManager = new QtVideoDeviceManager(context);
+ startBackgroundThread();
+ }
+
+ void startBackgroundThread() {
+ mBackgroundThread = new HandlerThread("CameraBackground");
+ mBackgroundThread.start();
+ mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
+ }
+
+ void stopBackgroundThread() {
+ mBackgroundThread.quitSafely();
+ try {
+ mBackgroundThread.join();
+ mBackgroundThread = null;
+ mBackgroundHandler = null;
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ @SuppressLint("MissingPermission")
+ public boolean open(String cameraId) {
+ try {
+ mCameraId = cameraId;
+ mCameraManager.openCamera(cameraId,mStateCallback,mBackgroundHandler);
+ return true;
+ } catch (Exception e){
+ Log.w("QtCamera2", "Failed to open camera:" + e);
+ }
+
+ return false;
+ }
+
+ native void onPhotoAvailable(String cameraId, Image frame);
+
+ ImageReader.OnImageAvailableListener mOnPhotoAvailableListener = new ImageReader.OnImageAvailableListener() {
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ QtCamera2.this.onPhotoAvailable(mCameraId, reader.acquireLatestImage());
+ }
+ };
+
+ native void onFrameAvailable(String cameraId, Image frame);
+
+ ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() {
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ try {
+ Image img = reader.acquireLatestImage();
+ if (img != null)
+ QtCamera2.this.onFrameAvailable(mCameraId, img);
+ } catch (IllegalStateException e) {
+ // It seems that ffmpeg is processing images for too long (and does not close it)
+ // Give it a little more time. Restarting the camera session if it doesn't help
+ Log.e("QtCamera2", "Image processing taking too long. Let's wait 0,5s more " + e);
+ try {
+ Thread.sleep(500);
+ QtCamera2.this.onFrameAvailable(mCameraId, reader.acquireLatestImage());
+ } catch (IllegalStateException | InterruptedException e2) {
+ Log.e("QtCamera2", "Will not wait anymore. Restart camera session. " + e2);
+ // Remember current used camera ID, because stopAndClose will clear the value
+ String cameraId = mCameraId;
+ stopAndClose();
+ addImageReader(mImageReader.getWidth(), mImageReader.getHeight(),
+ mImageReader.getImageFormat());
+ open(cameraId);
+ }
+ }
+ }
+ };
+
+
+ public void prepareCamera(int width, int height, int format, int minFps, int maxFps) {
+
+ addImageReader(width, height, format);
+ setFrameRate(minFps, maxFps);
+ }
+
+ private void addImageReader(int width, int height, int format) {
+
+ if (mImageReader != null)
+ removeSurface(mImageReader.getSurface());
+
+ if (mCapturedPhotoReader != null)
+ removeSurface(mCapturedPhotoReader.getSurface());
+
+ mImageReader = ImageReader.newInstance(width, height, format, MaxNumberFrames);
+ mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);
+ addSurface(mImageReader.getSurface());
+
+ mCapturedPhotoReader = ImageReader.newInstance(width, height, format, MaxNumberFrames);
+ mCapturedPhotoReader.setOnImageAvailableListener(mOnPhotoAvailableListener, mBackgroundHandler);
+ addSurface(mCapturedPhotoReader.getSurface());
+ }
+
+ private void setFrameRate(int minFrameRate, int maxFrameRate) {
+
+ if (minFrameRate <= 0 || maxFrameRate <= 0)
+ mFpsRange = null;
+ else
+ mFpsRange = new Range<>(minFrameRate, maxFrameRate);
+ }
+
+ public boolean addSurface(Surface surface) {
+ if (mTargetSurfaces.contains(surface))
+ return true;
+
+ return mTargetSurfaces.add(surface);
+ }
+
+ public boolean removeSurface(Surface surface) {
+ return mTargetSurfaces.remove(surface);
+ }
+
+ public void clearSurfaces() {
+ mTargetSurfaces.clear();
+ }
+
+ public boolean createSession() {
+ if (mCameraDevice == null)
+ return false;
+
+ try {
+ mCameraDevice.createCaptureSession(mTargetSurfaces, mCaptureStateCallback, mBackgroundHandler);
+ return true;
+ } catch (Exception exception) {
+ Log.w("QtCamera2", "Failed to create a capture session:" + exception);
+ }
+ return false;
+ }
+
+ public boolean start(int template) {
+
+ if (mCameraDevice == null)
+ return false;
+
+ if (mCaptureSession == null)
+ return false;
+
+ synchronized (mStartMutex) {
+ try {
+ mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(template);
+ mPreviewRequestBuilder.addTarget(mImageReader.getSurface());
+ mAFMode = CaptureRequest.CONTROL_AF_MODE_OFF;
+ for (int mode : mVideoDeviceManager.getSupportedAfModes(mCameraId)) {
+ if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE) {
+ mAFMode = mode;
+ break;
+ }
+ }
+
+ mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, mFlashMode);
+ mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE, mTorchMode);
+ mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
+ mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, mAFMode);
+ mPreviewRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CameraMetadata.CONTROL_CAPTURE_INTENT_VIDEO_RECORD);
+ if (mZoomFactor != 1.0f)
+ mPreviewRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, getScalerCropRegion());
+ if (mFpsRange != null)
+ mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, mFpsRange);
+ mPreviewRequest = mPreviewRequestBuilder.build();
+ mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler);
+ mIsStarted = true;
+ return true;
+
+ } catch (Exception exception) {
+ Log.w("QtCamera2", "Failed to start preview:" + exception);
+ }
+ return false;
+ }
+ }
+
+ public void stopAndClose() {
+ synchronized (mStartMutex) {
+ try {
+ if (null != mCaptureSession) {
+ mCaptureSession.close();
+ mCaptureSession = null;
+ }
+ if (null != mCameraDevice) {
+ mCameraDevice.close();
+ mCameraDevice = null;
+ }
+ mCameraId = "";
+ mTargetSurfaces.clear();
+ } catch (Exception exception) {
+ Log.w("QtCamera2", "Failed to stop and close:" + exception);
+ }
+ mIsStarted = false;
+ }
+ }
+
+ private void capturePhoto() {
+ try {
+ final CaptureRequest.Builder captureBuilder =
+ mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
+ captureBuilder.addTarget(mCapturedPhotoReader.getSurface());
+ captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, mFlashMode);
+ if (mZoomFactor != 1.0f)
+ captureBuilder.set(CaptureRequest.SCALER_CROP_REGION, getScalerCropRegion());
+
+ CameraCaptureSession.CaptureCallback captureCallback
+ = new CameraCaptureSession.CaptureCallback() {
+ @Override
+ public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
+ TotalCaptureResult result) {
+ try {
+ mExifDataHandler = new QtExifDataHandler(result);
+ // Reset the focus/flash and go back to the normal state of preview.
+ mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
+ CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
+ mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
+ CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE);
+ mPreviewRequest = mPreviewRequestBuilder.build();
+ mState = STATE_PREVIEW;
+ mCaptureSession.setRepeatingRequest(mPreviewRequest,
+ mCaptureCallback,
+ mBackgroundHandler);
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+ }
+ };
+
+ mCaptureSession.capture(captureBuilder.build(), captureCallback, mBackgroundHandler);
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+ }
+
+ public void takePhoto() {
+ try {
+ if (mAFMode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE) {
+ mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
+ mState = STATE_WAITING_LOCK;
+ mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
+ } else {
+ capturePhoto();
+ }
+ } catch (CameraAccessException e) {
+ Log.w("QtCamera2", "Cannot get access to the camera: " + e);
+ }
+ }
+
+ public void saveExifToFile(String path)
+ {
+ if (mExifDataHandler != null)
+ mExifDataHandler.save(path);
+ else
+ Log.e("QtCamera2", "No Exif data that could be saved to " + path);
+ }
+
+ private Rect getScalerCropRegion()
+ {
+ Rect activePixels = mVideoDeviceManager.getActiveArraySize(mCameraId);
+ float zoomRatio = 1.0f;
+ if (mZoomFactor != 0.0f)
+ zoomRatio = 1.0f/mZoomFactor;
+ int croppedWidth = activePixels.width() - (int)(activePixels.width() * zoomRatio);
+ int croppedHeight = activePixels.height() - (int)(activePixels.height() * zoomRatio);
+ return new Rect(croppedWidth/2, croppedHeight/2, activePixels.width() - croppedWidth/2,
+ activePixels.height() - croppedHeight/2);
+ }
+
+ public void zoomTo(float factor)
+ {
+ synchronized (mStartMutex) {
+ mZoomFactor = factor;
+
+ if (!mIsStarted) {
+ Log.w("QtCamera2", "Cannot set zoom on invalid camera");
+ return;
+ }
+
+ mPreviewRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, getScalerCropRegion());
+ mPreviewRequest = mPreviewRequestBuilder.build();
+
+ try {
+ mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler);
+ } catch (Exception exception) {
+ Log.w("QtCamera2", "Failed to set zoom:" + exception);
+ }
+ }
+ }
+ public void setFlashMode(String flashMode)
+ {
+ synchronized (mStartMutex) {
+
+ int flashModeValue = mVideoDeviceManager.stringToControlAEMode(flashMode);
+ if (flashModeValue < 0) {
+ Log.w("QtCamera2", "Unknown flash mode");
+ return;
+ }
+ mFlashMode = flashModeValue;
+
+ if (!mIsStarted)
+ return;
+
+ mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, mFlashMode);
+ mPreviewRequest = mPreviewRequestBuilder.build();
+
+ try {
+ mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler);
+ } catch (Exception exception) {
+ Log.w("QtCamera2", "Failed to set flash mode:" + exception);
+ }
+ }
+ }
+
+ private int getTorchModeValue(boolean mode)
+ {
+ return mode ? CameraMetadata.FLASH_MODE_TORCH : CameraMetadata.FLASH_MODE_OFF;
+ }
+
+ public void setTorchMode(boolean torchMode)
+ {
+ synchronized (mStartMutex) {
+ mTorchMode = getTorchModeValue(torchMode);
+
+ if (mIsStarted) {
+ mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE, mTorchMode);
+ mPreviewRequest = mPreviewRequestBuilder.build();
+
+ try {
+ mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler);
+ } catch (Exception exception) {
+ Log.w("QtCamera2", "Failed to set flash mode:" + exception);
+ }
+ }
+ }
+ }
+}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java
new file mode 100644
index 000000000..3e35eb416
--- /dev/null
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java
@@ -0,0 +1,207 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+package org.qtproject.qt.android.multimedia;
+
+import android.hardware.Camera;
+import android.hardware.Camera.CameraInfo;
+
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.ImageFormat;
+import android.graphics.Matrix;
+import android.graphics.SurfaceTexture;
+import android.util.Log;
+import java.lang.Math;
+import java.io.ByteArrayOutputStream;
+
+public class QtCameraListener implements Camera.ShutterCallback,
+ Camera.PictureCallback,
+ Camera.AutoFocusCallback,
+ Camera.PreviewCallback
+{
+ private static final String TAG = "Qt Camera";
+
+ private static final int BUFFER_POOL_SIZE = 2;
+
+ private int m_cameraId = -1;
+
+ private boolean m_notifyNewFrames = false;
+ private boolean m_notifyWhenFrameAvailable = false;
+ private byte[][] m_previewBuffers = null;
+ private byte[] m_lastPreviewBuffer = null;
+ private Camera.Size m_previewSize = null;
+ private int m_previewFormat = ImageFormat.NV21; // Default preview format on all devices
+ private int m_previewBytesPerLine = -1;
+ private int m_rotation = 0;
+
+ private QtCameraListener(int id)
+ {
+ m_cameraId = id;
+ }
+
+ public void notifyNewFrames(boolean notify)
+ {
+ m_notifyNewFrames = notify;
+ }
+
+ public void notifyWhenFrameAvailable(boolean notify)
+ {
+ m_notifyWhenFrameAvailable = notify;
+ }
+
+ public byte[] lastPreviewBuffer()
+ {
+ return m_lastPreviewBuffer;
+ }
+
+ public int previewWidth()
+ {
+ if (m_previewSize == null)
+ return -1;
+
+ return m_previewSize.width;
+ }
+
+ public int previewHeight()
+ {
+ if (m_previewSize == null)
+ return -1;
+
+ return m_previewSize.height;
+ }
+
+ public int previewFormat()
+ {
+ return m_previewFormat;
+ }
+
+ public int previewBytesPerLine()
+ {
+ return m_previewBytesPerLine;
+ }
+
+ public void clearPreviewCallback(Camera camera)
+ {
+ camera.setPreviewCallbackWithBuffer(null);
+ }
+
+ public void setPhotoRotation(int rotation)
+ {
+ m_rotation = rotation;
+ }
+
+ public void setupPreviewCallback(Camera camera)
+ {
+ // Clear previous callback (also clears added buffers)
+ clearPreviewCallback(camera);
+ m_lastPreviewBuffer = null;
+
+ final Camera.Parameters params = camera.getParameters();
+ m_previewSize = params.getPreviewSize();
+ m_previewFormat = params.getPreviewFormat();
+
+ int bufferSizeNeeded = 0;
+ if (m_previewFormat == ImageFormat.YV12) {
+ // For YV12, bytes per line must be a multiple of 16
+ final int yStride = (int) Math.ceil(m_previewSize.width / 16.0) * 16;
+ final int uvStride = (int) Math.ceil((yStride / 2) / 16.0) * 16;
+ final int ySize = yStride * m_previewSize.height;
+ final int uvSize = uvStride * m_previewSize.height / 2;
+ bufferSizeNeeded = ySize + uvSize * 2;
+
+ m_previewBytesPerLine = yStride;
+
+ } else {
+ double bytesPerPixel = ImageFormat.getBitsPerPixel(m_previewFormat) / 8.0;
+ bufferSizeNeeded = (int) Math.ceil(bytesPerPixel * m_previewSize.width * m_previewSize.height);
+
+ // bytes per line are calculated only for the first plane
+ switch (m_previewFormat) {
+ case ImageFormat.NV21:
+ m_previewBytesPerLine = m_previewSize.width; // 1 byte per sample and tightly packed
+ break;
+ case ImageFormat.RGB_565:
+ case ImageFormat.YUY2:
+ m_previewBytesPerLine = m_previewSize.width * 2; // 2 bytes per pixel
+ break;
+ default:
+ m_previewBytesPerLine = -1;
+ break;
+ }
+ }
+
+ // We could keep the same buffers when they are already bigger than the required size
+ // but the Android doc says the size must match, so in doubt just replace them.
+ if (m_previewBuffers == null || m_previewBuffers[0].length != bufferSizeNeeded)
+ m_previewBuffers = new byte[BUFFER_POOL_SIZE][bufferSizeNeeded];
+
+ // Add callback and queue all buffers
+ camera.setPreviewCallbackWithBuffer(this);
+ for (byte[] buffer : m_previewBuffers)
+ camera.addCallbackBuffer(buffer);
+ }
+
+ @Override
+ public void onPreviewFrame(byte[] data, Camera camera)
+ {
+ // Re-enqueue the last buffer
+ if (m_lastPreviewBuffer != null)
+ camera.addCallbackBuffer(m_lastPreviewBuffer);
+
+ m_lastPreviewBuffer = data;
+
+ if (data != null) {
+ if (m_notifyWhenFrameAvailable) {
+ m_notifyWhenFrameAvailable = false;
+ notifyFrameAvailable(m_cameraId);
+ }
+ if (m_notifyNewFrames) {
+ notifyNewPreviewFrame(m_cameraId, data,
+ m_previewSize.width, m_previewSize.height,
+ m_previewFormat,
+ m_previewBytesPerLine);
+ }
+ }
+ }
+
+ @Override
+ public void onShutter()
+ {
+ notifyPictureExposed(m_cameraId);
+ }
+
+ @Override
+ public void onPictureTaken(byte[] data, Camera camera)
+ {
+ Camera.CameraInfo info = new Camera.CameraInfo();
+ Camera.getCameraInfo(m_cameraId, info);
+ Bitmap source = BitmapFactory.decodeByteArray(data, 0, data.length);
+ Matrix matrix = new Matrix();
+ matrix.postRotate(m_rotation);
+ if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
+ matrix.postScale(-1, 1, source.getWidth() / 2.0f, source.getHeight() / 2.0f);
+ }
+ Bitmap rotatedBitmap = Bitmap.createBitmap(source, 0, 0, source.getWidth(),
+ source.getHeight(), matrix, true);
+ ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+ rotatedBitmap.compress(Bitmap.CompressFormat.JPEG, 100, outputStream);
+ byte[] byteArray = outputStream.toByteArray();
+ rotatedBitmap.recycle();
+ source.recycle();
+ notifyPictureCaptured(m_cameraId, byteArray);
+ }
+
+ @Override
+ public void onAutoFocus(boolean success, Camera camera)
+ {
+ notifyAutoFocusComplete(m_cameraId, success);
+ }
+
+ private static native void notifyAutoFocusComplete(int id, boolean success);
+ private static native void notifyPictureExposed(int id);
+ private static native void notifyPictureCaptured(int id, byte[] data);
+ private static native void notifyNewPreviewFrame(int id, byte[] data, int width, int height,
+ int pixelFormat, int bytesPerLine);
+ private static native void notifyFrameAvailable(int id);
+}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtExifDataHandler.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtExifDataHandler.java
new file mode 100644
index 000000000..c2699eb1d
--- /dev/null
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtExifDataHandler.java
@@ -0,0 +1,51 @@
+// Copyright (C) 2023 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+package org.qtproject.qt.android.multimedia;
+
+import android.hardware.camera2.CaptureResult;
+import android.media.ExifInterface;
+import android.os.Build;
+import android.util.Log;
+
+import java.io.IOException;
+
+public class QtExifDataHandler {
+
+ private int mFlashFired = 0;
+ private long mExposureTime = 0L;
+ private float mFocalLength = 0;
+ private static String mModel = Build.MANUFACTURER + " " + Build.MODEL;
+
+ public QtExifDataHandler(CaptureResult r)
+ {
+ Integer flash = r.get(CaptureResult.FLASH_STATE);
+ if (flash != null && flash == CaptureResult.FLASH_STATE_FIRED)
+ mFlashFired = 1;
+
+ Long exposureTime = r.get(CaptureResult.SENSOR_EXPOSURE_TIME);
+ if (exposureTime != null)
+ mExposureTime = exposureTime/1000000000;
+ mFocalLength = r.get(CaptureResult.LENS_FOCAL_LENGTH);
+ }
+
+ public void save(String path)
+ {
+ ExifInterface exif;
+ try {
+ exif = new ExifInterface(path);
+ } catch ( IOException e ) {
+ Log.e("QtExifDataHandler", "Cannot open file: " + path + "\n" + e);
+ return;
+ }
+ exif.setAttribute(ExifInterface.TAG_FLASH, String.valueOf(mFlashFired));
+ exif.setAttribute(ExifInterface.TAG_EXPOSURE_TIME, String.valueOf(mExposureTime));
+ exif.setAttribute(ExifInterface.TAG_FOCAL_LENGTH, String.valueOf(mFocalLength));
+ exif.setAttribute(ExifInterface.TAG_MODEL, mModel);
+
+ try {
+ exif.saveAttributes();
+ } catch ( IOException e ) {
+ Log.e("QtExifDataHandler", "Cannot save file: " + path + "\n" + e);
+ }
+ }
+}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMediaRecorderListener.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMediaRecorderListener.java
new file mode 100644
index 000000000..97d317119
--- /dev/null
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMediaRecorderListener.java
@@ -0,0 +1,31 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+package org.qtproject.qt.android.multimedia;
+
+import android.media.MediaRecorder;
+
+public class QtMediaRecorderListener implements MediaRecorder.OnErrorListener, MediaRecorder.OnInfoListener
+{
+ private long m_id = -1;
+
+ public QtMediaRecorderListener(long id)
+ {
+ m_id = id;
+ }
+
+ @Override
+ public void onError(MediaRecorder mr, int what, int extra)
+ {
+ notifyError(m_id, what, extra);
+ }
+
+ @Override
+ public void onInfo(MediaRecorder mr, int what, int extra)
+ {
+ notifyInfo(m_id, what, extra);
+ }
+
+ private static native void notifyError(long id, int what, int extra);
+ private static native void notifyInfo(long id, int what, int extra);
+}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java
new file mode 100644
index 000000000..3c40d32a6
--- /dev/null
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java
@@ -0,0 +1,149 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+package org.qtproject.qt.android.multimedia;
+
+import android.app.Activity;
+import android.content.Context;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecList;
+import android.view.OrientationEventListener;
+import android.webkit.MimeTypeMap;
+import android.net.Uri;
+import android.content.ContentResolver;
+import android.os.Environment;
+import android.media.MediaScannerConnection;
+import java.lang.String;
+import java.io.File;
+import android.util.Log;
+
+public class QtMultimediaUtils
+{
+ static private class OrientationListener extends OrientationEventListener
+ {
+ static public int deviceOrientation = 0;
+
+ public OrientationListener(Context context)
+ {
+ super(context);
+ }
+
+ @Override
+ public void onOrientationChanged(int orientation)
+ {
+ if (orientation == ORIENTATION_UNKNOWN)
+ return;
+
+ deviceOrientation = orientation;
+ }
+ }
+
+ static private Context m_context = null;
+ static private OrientationListener m_orientationListener = null;
+ private static final String QtTAG = "Qt QtMultimediaUtils";
+
+ static public void setActivity(Activity qtMainActivity, Object qtActivityDelegate)
+ {
+ }
+
+ static public void setContext(Context context)
+ {
+ m_context = context;
+ m_orientationListener = new OrientationListener(context);
+ }
+
+ public QtMultimediaUtils()
+ {
+ }
+
+ static void enableOrientationListener(boolean enable)
+ {
+ if (enable)
+ m_orientationListener.enable();
+ else
+ m_orientationListener.disable();
+ }
+
+ static int getDeviceOrientation()
+ {
+ return OrientationListener.deviceOrientation;
+ }
+
+ static String getDefaultMediaDirectory(int type)
+ {
+ String dirType = new String();
+ switch (type) {
+ case 0:
+ dirType = Environment.DIRECTORY_MUSIC;
+ break;
+ case 1:
+ dirType = Environment.DIRECTORY_MOVIES;
+ break;
+ case 2:
+ dirType = Environment.DIRECTORY_DCIM;
+ break;
+ default:
+ break;
+ }
+
+ File path = new File("");
+ if (type == 3) {
+ // There is no API for knowing the standard location for sounds
+ // such as voice recording. Though, it's typically in the 'Sounds'
+ // directory at the root of the external storage
+ path = new File(Environment.getExternalStorageDirectory().getAbsolutePath()
+ + File.separator + "Sounds");
+ } else {
+ path = Environment.getExternalStoragePublicDirectory(dirType);
+ }
+
+ path.mkdirs(); // make sure the directory exists
+
+ return path.getAbsolutePath();
+ }
+
+ static void registerMediaFile(String file)
+ {
+ MediaScannerConnection.scanFile(m_context, new String[] { file }, null, null);
+ }
+
+ static File getCacheDirectory() { return m_context.getCacheDir(); }
+
+ /*
+ The array of codecs is in the form:
+ c2.qti.vp9.decoder
+ c2.android.opus.encoder
+ OMX.google.opus.decoder
+ */
+ private static String[] getMediaCodecs()
+ {
+ final MediaCodecList codecList = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
+ final MediaCodecInfo[] codecInfoArray = codecList.getCodecInfos();
+ String[] codecs = new String[codecInfoArray.length];
+ for (int i = 0; i < codecInfoArray.length; ++i)
+ codecs[i] = codecInfoArray[i].getName();
+ return codecs;
+ }
+
+ public static String getMimeType(Context context, String url)
+ {
+ Uri parsedUri = Uri.parse(url);
+ String type = null;
+
+ try {
+ String scheme = parsedUri.getScheme();
+ if (scheme != null && scheme.contains("content")) {
+ ContentResolver cR = context.getContentResolver();
+ type = cR.getType(parsedUri);
+ } else {
+ String extension = MimeTypeMap.getFileExtensionFromUrl(url);
+ if (extension != null)
+ type = MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension);
+ }
+ } catch (Exception e) {
+ Log.e(QtTAG, "getMimeType(): " + e.toString());
+ }
+ return type;
+ }
+}
+
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceHolderCallback.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceHolderCallback.java
new file mode 100644
index 000000000..30dad68d5
--- /dev/null
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceHolderCallback.java
@@ -0,0 +1,37 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+package org.qtproject.qt.android.multimedia;
+
+import android.view.SurfaceHolder;
+
+public class QtSurfaceHolderCallback implements SurfaceHolder.Callback
+{
+ private long m_id = -1;
+
+ public QtSurfaceHolderCallback(long id)
+ {
+ m_id = id;
+ }
+
+ @Override
+ public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
+ {
+ }
+
+ @Override
+ public void surfaceCreated(SurfaceHolder holder)
+ {
+ notifySurfaceCreated(m_id);
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder holder)
+ {
+ notifySurfaceDestroyed(m_id);
+ }
+
+
+ private static native void notifySurfaceCreated(long id);
+ private static native void notifySurfaceDestroyed(long id);
+}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureHolder.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureHolder.java
new file mode 100644
index 000000000..59406ca59
--- /dev/null
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureHolder.java
@@ -0,0 +1,89 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+package org.qtproject.qt.android.multimedia;
+
+import android.view.SurfaceHolder;
+import android.view.Surface;
+import android.graphics.Rect;
+import android.graphics.Canvas;
+
+public class QtSurfaceTextureHolder implements SurfaceHolder
+{
+ private Surface surfaceTexture;
+
+ public QtSurfaceTextureHolder(Surface surface)
+ {
+ surfaceTexture = surface;
+ }
+
+ @Override
+ public void addCallback(SurfaceHolder.Callback callback)
+ {
+ }
+
+ @Override
+ public Surface getSurface()
+ {
+ return surfaceTexture;
+ }
+
+ @Override
+ public Rect getSurfaceFrame()
+ {
+ return new Rect();
+ }
+
+ @Override
+ public boolean isCreating()
+ {
+ return false;
+ }
+
+ @Override
+ public Canvas lockCanvas(Rect dirty)
+ {
+ return new Canvas();
+ }
+
+ @Override
+ public Canvas lockCanvas()
+ {
+ return new Canvas();
+ }
+
+ @Override
+ public void removeCallback(SurfaceHolder.Callback callback)
+ {
+ }
+
+ @Override
+ public void setFixedSize(int width, int height)
+ {
+ }
+
+ @Override
+ public void setFormat(int format)
+ {
+ }
+
+ @Override
+ public void setKeepScreenOn(boolean screenOn)
+ {
+ }
+
+ @Override
+ public void setSizeFromLayout()
+ {
+ }
+
+ @Override
+ public void setType(int type)
+ {
+ }
+
+ @Override
+ public void unlockCanvasAndPost(Canvas canvas)
+ {
+ }
+}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureListener.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureListener.java
new file mode 100644
index 000000000..4974f9301
--- /dev/null
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtSurfaceTextureListener.java
@@ -0,0 +1,24 @@
+// Copyright (C) 2016 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+package org.qtproject.qt.android.multimedia;
+
+import android.graphics.SurfaceTexture;
+
+public class QtSurfaceTextureListener implements SurfaceTexture.OnFrameAvailableListener
+{
+ private final long m_id;
+
+ public QtSurfaceTextureListener(long id)
+ {
+ m_id = id;
+ }
+
+ @Override
+ public void onFrameAvailable(SurfaceTexture surfaceTexture)
+ {
+ notifyFrameAvailable(m_id);
+ }
+
+ private static native void notifyFrameAvailable(long id);
+}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java
new file mode 100644
index 000000000..2e11e62a2
--- /dev/null
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java
@@ -0,0 +1,247 @@
+// Copyright (C) 2022 The Qt Company Ltd.
+// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
+
+package org.qtproject.qt.android.multimedia;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.graphics.ImageFormat;
+import android.graphics.Rect;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.media.MediaCodecList;
+import android.media.MediaCodecInfo;
+import android.os.Build;
+import android.util.Range;
+import android.util.Size;
+import android.util.Log;
+
+import java.lang.String;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.WeakHashMap;
+
+public class QtVideoDeviceManager {
+
+ CameraManager mCameraManager;
+ Map<String, CameraCharacteristics> cache;
+
+ public QtVideoDeviceManager(Context context) {
+ mCameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ cache = new WeakHashMap<String, CameraCharacteristics>();
+ }
+
+ public CameraCharacteristics getCameraCharacteristics(String cameraId) {
+
+ if (cache.containsKey(cameraId))
+ return cache.get(cameraId);
+
+ try {
+ CameraCharacteristics cameraCharacteristics = mCameraManager.getCameraCharacteristics(cameraId);
+ cache.put(cameraId, cameraCharacteristics);
+ return cameraCharacteristics;
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ return null;
+ }
+
+ static private boolean isSoftwareCodec(String longCodecName) {
+ longCodecName = longCodecName.toLowerCase();
+ return longCodecName.startsWith("omx.google.") || longCodecName.startsWith("c2.android.")
+ || !(longCodecName.startsWith("omx.") || longCodecName.startsWith("c2."));
+ }
+
+ private enum CODEC {
+ DECODER,
+ ENCODER
+ }
+ static private String[] getHWVideoCodecs(CODEC expectedType) {
+ MediaCodecList mediaCodecList = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
+ MediaCodecInfo[] mediaCodecInfo = mediaCodecList.getCodecInfos();
+ Set<String> codecs = new HashSet<String>();
+
+ for (MediaCodecInfo codecInfo : mediaCodecInfo) {
+ CODEC currentType = codecInfo.isEncoder() ? CODEC.ENCODER : CODEC.DECODER;
+ if (currentType == expectedType && !isSoftwareCodec(codecInfo.getName())) {
+ String[] supportedTypes = codecInfo.getSupportedTypes();
+ for (String type : supportedTypes) {
+ if (type.startsWith("video/"))
+ codecs.add(type.substring(6));
+ }
+ }
+ }
+ return codecs.toArray(new String[codecs.size()]);
+ }
+
+ static public String[] getHWVideoDecoders() { return getHWVideoCodecs(CODEC.DECODER); }
+ static public String[] getHWVideoEncoders() { return getHWVideoCodecs(CODEC.ENCODER); }
+
+ public String[] getCameraIdList() {
+ try {
+ return mCameraManager.getCameraIdList();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ return null;
+ }
+
+ public int getSensorOrientation(String cameraId) {
+ CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
+ if (characteristics == null)
+ return 0;
+ return characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
+ }
+
+ public int getLensFacing(String cameraId) {
+ CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
+ if (characteristics == null)
+ return 0;
+ return characteristics.get(CameraCharacteristics.LENS_FACING);
+ }
+
+ public String[] getFpsRange(String cameraId) {
+
+ CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
+ if (characteristics == null)
+ return new String[0];
+
+ Range<Integer>[] ranges = characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+
+ String[] fps = new String[ranges.length];
+
+ for (int index = 0; index < ranges.length; index++) {
+ fps[index] = ranges[index].toString();
+ }
+
+ return fps;
+ }
+
+ public float getMaxZoom(String cameraId) {
+
+ float maxZoom = 1.0f;
+ final CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
+ if (characteristics != null)
+ maxZoom = characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
+ return maxZoom;
+ }
+
+ public Rect getActiveArraySize(String cameraId) {
+ Rect activeArraySize = new Rect();
+ final CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
+ if (characteristics != null)
+ activeArraySize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+ return activeArraySize;
+ }
+
+ static final int maxResolution = 3840*2160; // 4k resolution
+ public String[] getStreamConfigurationsSizes(String cameraId, int imageFormat) {
+
+ CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
+ if (characteristics == null)
+ return new String[0];
+
+ StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ Size[] sizes = map.getOutputSizes(imageFormat);
+ if (sizes == null)
+ return new String[0];
+
+ ArrayList<String> stream = new ArrayList<>();
+
+ for (int index = 0; index < sizes.length; index++) {
+ if (sizes[index].getWidth() * sizes[index].getHeight() <= maxResolution)
+ stream.add(sizes[index].toString());
+ }
+
+ return stream.toArray(new String[0]);
+ }
+
+ public int stringToControlAEMode(String mode) {
+ switch (mode) {
+ case "off":
+ return CaptureRequest.CONTROL_AE_MODE_ON;
+ case "auto":
+ return CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH;
+ case "on":
+ return CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH;
+ case "redeye":
+ return CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
+ case "external":
+ return CaptureRequest.CONTROL_AE_MODE_ON_EXTERNAL_FLASH;
+ default:
+ return -1;
+ }
+ }
+
+ public String controlAEModeToString(int mode) {
+ switch (mode) {
+ case CaptureRequest.CONTROL_AE_MODE_ON:
+ return "off";
+ case CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH:
+ return "auto";
+ case CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH:
+ return "on";
+ case CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE:
+ return "redeye";
+ case CaptureRequest.CONTROL_AE_MODE_ON_EXTERNAL_FLASH:
+ return "external";
+ case CaptureRequest.CONTROL_AE_MODE_OFF:
+ default:
+ return "unknown";
+ }
+ }
+
+ public int[] getSupportedAfModes(String cameraId) {
+
+ CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
+ if (characteristics == null)
+ return new int[0];
+
+ return characteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
+ }
+
+ public String[] getSupportedFlashModes(String cameraId) {
+
+ CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
+ if (characteristics == null)
+ return new String[0];
+
+ int supportedFlashModes[] = characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES);
+ ArrayList<String> supportedFlashModesList = new ArrayList<String>();
+ for (int index = 0; index < supportedFlashModes.length; index++) {
+ supportedFlashModesList.add(controlAEModeToString(supportedFlashModes[index]));
+ }
+
+ String[] ret = new String[ supportedFlashModesList.size() ];
+ return supportedFlashModesList.toArray(ret);
+ }
+
+ static public boolean isEmulator()
+ {
+ return ((Build.BRAND.startsWith("generic") && Build.DEVICE.startsWith("generic"))
+ || Build.FINGERPRINT.startsWith("generic")
+ || Build.FINGERPRINT.startsWith("unknown")
+ || Build.HARDWARE.contains("goldfish")
+ || Build.HARDWARE.contains("ranchu")
+ || Build.MODEL.contains("google_sdk")
+ || Build.MODEL.contains("Emulator")
+ || Build.MODEL.contains("Android SDK built for x86")
+ || Build.MANUFACTURER.contains("Genymotion")
+ || Build.PRODUCT.contains("sdk")
+ || Build.PRODUCT.contains("vbox86p")
+ || Build.PRODUCT.contains("emulator")
+ || Build.PRODUCT.contains("simulator"));
+ }
+
+ public boolean isTorchModeSupported(String cameraId) {
+ boolean ret = false;
+ final CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
+ if (characteristics != null)
+ ret = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
+ return ret;
+ }
+}