summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBartlomiej Moskal <bartlomiej.moskal@qt.io>2023-02-10 15:09:45 +0100
committerQt Cherry-pick Bot <cherrypick_bot@qt-project.org>2023-02-15 14:16:42 +0000
commit5663b1201d9f1b0d7beb3f3a0b3c19c8bb8ebdf4 (patch)
tree26458c34f1d7be37dcfe9a1bea2b4db65d718a55
parent2ce2ca9ae74099d051bce723ac605027b9a161f6 (diff)
android-backend: Route audio from input to output device
CaptureSession has a functionality of route audio to an output device by assigning an AudioOutput object to the audioOutput property. It was missing in android-backend. This commit add implementation for streaming audio from chosen input device to chosen output device. Fixes: QTBUG-109659 Change-Id: I07dfe2188dd9e98a740ccaa6188daaf8e34ca0df Reviewed-by: Assam Boudjelthia <assam.boudjelthia@qt.io> (cherry picked from commit b5e0c68b965bd9e6ea99e1c20994d33194324220) Reviewed-by: Qt Cherry-pick Bot <cherrypick_bot@qt-project.org>
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java72
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp27
-rw-r--r--src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h2
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmediaplayer.cpp14
-rw-r--r--src/plugins/multimedia/android/wrappers/jni/androidmediaplayer_p.h2
5 files changed, 116 insertions, 1 deletions
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
index 98690bfb5..09c80b84f 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
@@ -13,7 +13,10 @@ import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioTrack;
import android.media.MediaRecorder;
import android.util.Log;
@@ -22,6 +25,14 @@ public class QtAudioDeviceManager
private static final String TAG = "QtAudioDeviceManager";
static private AudioManager m_audioManager = null;
static private final AudioDevicesReceiver m_audioDevicesReceiver = new AudioDevicesReceiver();
+ static private AudioRecord m_recorder = null;
+ static private AudioTrack m_streamPlayer = null;
+ static private Thread m_streamingThread = null;
+ static private boolean m_isStreaming = false;
+ static private final int m_sampleRate = 8000;
+ static private final int m_channels = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ static private final int m_audioFormat = AudioFormat.ENCODING_PCM_16BIT;
+ static private final int m_bufferSize = AudioRecord.getMinBufferSize(m_sampleRate, m_channels, m_audioFormat);
public static native void onAudioInputDevicesUpdated();
public static native void onAudioOutputDevicesUpdated();
@@ -251,4 +262,65 @@ public class QtAudioDeviceManager
m_audioManager.setSpeakerphoneOn(speakerOn);
}
+
+ private static void streamSound()
+ {
+ byte data[] = new byte[m_bufferSize];
+ while (m_isStreaming) {
+ m_recorder.read(data, 0, m_bufferSize);
+ m_streamPlayer.play();
+ m_streamPlayer.write(data, 0, m_bufferSize);
+ m_streamPlayer.stop();
+ }
+ }
+
+ private static void startSoundStreaming(int inputId, int outputId)
+ {
+ if (m_isStreaming)
+ stopSoundStreaming();
+
+ m_recorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, m_sampleRate, m_channels,
+ m_audioFormat, m_bufferSize);
+ m_streamPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, m_sampleRate, m_channels,
+ m_audioFormat, m_bufferSize, AudioTrack.MODE_STREAM);
+
+ final AudioDeviceInfo[] devices = m_audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
+ for (AudioDeviceInfo deviceInfo : devices) {
+ if (deviceInfo.getId() == outputId) {
+ m_streamPlayer.setPreferredDevice(deviceInfo);
+ } else if (deviceInfo.getId() == inputId) {
+ m_recorder.setPreferredDevice(deviceInfo);
+ }
+ }
+
+ m_recorder.startRecording();
+ m_isStreaming = true;
+
+ m_streamingThread = new Thread(new Runnable() {
+ public void run() {
+ streamSound();
+ }
+ });
+
+ m_streamingThread.start();
+ }
+
+ private static void stopSoundStreaming()
+ {
+ if (!m_isStreaming)
+ return;
+
+ m_isStreaming = false;
+ try {
+ m_streamingThread.join();
+ m_streamingThread = null;
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ m_recorder.stop();
+ m_recorder.release();
+ m_streamPlayer.release();
+ m_streamPlayer = null;
+ m_recorder = null;
+ }
}
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp
index afee45e2f..920249135 100644
--- a/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession.cpp
@@ -6,6 +6,7 @@
#include "androidcamera_p.h"
#include "qandroidcamerasession_p.h"
#include "qaudioinput.h"
+#include "qaudiooutput.h"
#include "androidmediaplayer_p.h"
#include "androidmultimediautils_p.h"
#include "qandroidmultimediautils_p.h"
@@ -39,6 +40,8 @@ QAndroidCaptureSession::~QAndroidCaptureSession()
{
stop();
m_mediaRecorder = nullptr;
+ if (m_audioInput && m_audioOutput)
+ AndroidMediaPlayer::stopSoundStreaming();
}
void QAndroidCaptureSession::setCameraSession(QAndroidCameraSession *cameraSession)
@@ -75,8 +78,10 @@ void QAndroidCaptureSession::setAudioInput(QPlatformAudioInput *input)
m_audioInputChanged = connect(m_audioInput->q, &QAudioInput::deviceChanged, this, [this]() {
if (m_state == QMediaRecorder::RecordingState)
m_mediaRecorder->setAudioInput(m_audioInput->device.id());
+ updateStreamingState();
});
}
+ updateStreamingState();
}
void QAndroidCaptureSession::setAudioOutput(QPlatformAudioOutput *output)
@@ -84,10 +89,30 @@ void QAndroidCaptureSession::setAudioOutput(QPlatformAudioOutput *output)
if (m_audioOutput == output)
return;
+ if (m_audioOutput)
+ disconnect(m_audioOutputChanged);
+
m_audioOutput = output;
- if (m_audioOutput)
+ if (m_audioOutput) {
+ m_audioOutputChanged = connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this,
+ [this] () {
+ AndroidMediaPlayer::setAudioOutput(m_audioOutput->device.id());
+ updateStreamingState();
+ });
AndroidMediaPlayer::setAudioOutput(m_audioOutput->device.id());
+ }
+ updateStreamingState();
+}
+
+void QAndroidCaptureSession::updateStreamingState()
+{
+ if (m_audioInput && m_audioOutput) {
+ AndroidMediaPlayer::startSoundStreaming(m_audioInput->device.id().toInt(),
+ m_audioOutput->device.id().toInt());
+ } else {
+ AndroidMediaPlayer::stopSoundStreaming();
+ }
}
QMediaRecorder::RecorderState QAndroidCaptureSession::state() const
diff --git a/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h
index 42feb4c1e..ab91fc3ef 100644
--- a/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h
+++ b/src/plugins/multimedia/android/mediacapture/qandroidcapturesession_p.h
@@ -117,6 +117,7 @@ private:
CaptureProfile getProfile(int id);
void restartViewfinder();
+ void updateStreamingState();
QAndroidMediaEncoder *m_mediaEncoder = nullptr;
std::shared_ptr<AndroidMediaRecorder> m_mediaRecorder;
@@ -144,6 +145,7 @@ private:
QList<qreal> m_supportedFramerates;
QMetaObject::Connection m_audioInputChanged;
+ QMetaObject::Connection m_audioOutputChanged;
QMetaObject::Connection m_connOpenCamera;
QMetaObject::Connection m_connActiveChangedCamera;
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer.cpp b/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer.cpp
index 51e0aa46f..67907dcd7 100644
--- a/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer.cpp
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer.cpp
@@ -252,6 +252,20 @@ void AndroidMediaPlayer::unblockAudio()
mAudioBlocked = false;
}
+void AndroidMediaPlayer::startSoundStreaming(const int inputId, const int outputId)
+{
+ QJniObject::callStaticMethod<void>("org/qtproject/qt/android/multimedia/QtAudioDeviceManager",
+ "startSoundStreaming",
+ inputId,
+ outputId);
+}
+
+void AndroidMediaPlayer::stopSoundStreaming()
+{
+ QJniObject::callStaticMethod<void>(
+ "org/qtproject/qt/android/multimedia/QtAudioDeviceManager", "stopSoundStreaming");
+}
+
bool AndroidMediaPlayer::setPlaybackRate(qreal rate)
{
if (QNativeInterface::QAndroidApplication::sdkVersion() < 23) {
diff --git a/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer_p.h b/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer_p.h
index 46cd73688..66095b114 100644
--- a/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer_p.h
+++ b/src/plugins/multimedia/android/wrappers/jni/androidmediaplayer_p.h
@@ -100,6 +100,8 @@ public:
void setDataSource(const QNetworkRequest &request);
void prepareAsync();
void setVolume(int volume);
+ static void startSoundStreaming(const int inputId, const int outputId);
+ static void stopSoundStreaming();
bool setPlaybackRate(qreal rate);
void setDisplay(AndroidSurfaceTexture *surfaceTexture);
static bool setAudioOutput(const QByteArray &deviceId);