summaryrefslogtreecommitdiffstats
path: root/src/android/jar/src/org/qtproject/qt/android
diff options
context:
space:
mode:
Diffstat (limited to 'src/android/jar/src/org/qtproject/qt/android')
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java25
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java78
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java16
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java28
4 files changed, 142 insertions, 5 deletions
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java
index ce5dd5008..753586f21 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAndroidMediaPlayer.java
@@ -48,6 +48,7 @@ import java.io.FileInputStream;
import android.content.Context;
import android.media.MediaPlayer;
import android.media.MediaFormat;
+import android.media.PlaybackParams;
import android.media.AudioAttributes;
import android.media.TimedText;
import android.net.Uri;
@@ -759,4 +760,28 @@ public class QtAndroidMediaPlayer
Log.w(TAG, exception);
}
}
+
+ public boolean setPlaybackRate(float rate)
+ {
+ PlaybackParams playbackParams = mMediaPlayer.getPlaybackParams();
+ playbackParams.setSpeed(rate);
+ // According to discussion under the patch from QTBUG-61115: At least with DirectShow
+ // and GStreamer, it changes both speed and pitch. (...) need to be consistent
+ if (rate != 0.0)
+ playbackParams.setPitch(Math.abs(rate));
+
+ try {
+ mMediaPlayer.setPlaybackParams(playbackParams);
+ } catch (IllegalStateException | IllegalArgumentException e) {
+ Log.e(TAG, "Cannot set playback rate " + rate + " :" + e.toString());
+ return false;
+ }
+
+ if ((mState & State.Started) == 0 && mMediaPlayer.isPlaying()) {
+ setState(State.Started);
+ startProgressWatcher();
+ }
+
+ return true;
+ }
}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
index c2a8a0a51..62a2554cf 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
@@ -49,13 +49,26 @@ import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioTrack;
import android.media.MediaRecorder;
+import android.util.Log;
public class QtAudioDeviceManager
{
+ private static final String TAG = "QtAudioDeviceManager";
static private AudioManager m_audioManager = null;
static private final AudioDevicesReceiver m_audioDevicesReceiver = new AudioDevicesReceiver();
+ static private AudioRecord m_recorder = null;
+ static private AudioTrack m_streamPlayer = null;
+ static private Thread m_streamingThread = null;
+ static private boolean m_isStreaming = false;
+ static private final int m_sampleRate = 8000;
+ static private final int m_channels = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ static private final int m_audioFormat = AudioFormat.ENCODING_PCM_16BIT;
+ static private final int m_bufferSize = AudioRecord.getMinBufferSize(m_sampleRate, m_channels, m_audioFormat);
public static native void onAudioInputDevicesUpdated();
public static native void onAudioOutputDevicesUpdated();
@@ -252,7 +265,7 @@ public class QtAudioDeviceManager
setAudioOutput(AudioManager.MODE_IN_COMMUNICATION, true, false);
return true;
case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER:
- setAudioOutput(AudioManager.MODE_NORMAL, false, true);
+ setAudioOutput(AudioManager.STREAM_MUSIC, false, true);
return true;
case AudioDeviceInfo.TYPE_WIRED_HEADSET:
case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
@@ -262,6 +275,8 @@ public class QtAudioDeviceManager
// It doesn't work when WIRED HEADPHONES are connected
// Earpiece has the lowest priority and setWiredHeadsetOn(boolean)
// method to force it is deprecated
+ Log.w(TAG, "Built in Earpiece may not work when "
+ + "Wired Headphones are connected");
setAudioOutput(AudioManager.MODE_IN_CALL, false, false);
return true;
default:
@@ -284,4 +299,65 @@ public class QtAudioDeviceManager
m_audioManager.setSpeakerphoneOn(speakerOn);
}
+
+ private static void streamSound()
+ {
+ byte data[] = new byte[m_bufferSize];
+ while (m_isStreaming) {
+ m_recorder.read(data, 0, m_bufferSize);
+ m_streamPlayer.play();
+ m_streamPlayer.write(data, 0, m_bufferSize);
+ m_streamPlayer.stop();
+ }
+ }
+
+ private static void startSoundStreaming(int inputId, int outputId)
+ {
+ if (m_isStreaming)
+ stopSoundStreaming();
+
+ m_recorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, m_sampleRate, m_channels,
+ m_audioFormat, m_bufferSize);
+ m_streamPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, m_sampleRate, m_channels,
+ m_audioFormat, m_bufferSize, AudioTrack.MODE_STREAM);
+
+ final AudioDeviceInfo[] devices = m_audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
+ for (AudioDeviceInfo deviceInfo : devices) {
+ if (deviceInfo.getId() == outputId) {
+ m_streamPlayer.setPreferredDevice(deviceInfo);
+ } else if (deviceInfo.getId() == inputId) {
+ m_recorder.setPreferredDevice(deviceInfo);
+ }
+ }
+
+ m_recorder.startRecording();
+ m_isStreaming = true;
+
+ m_streamingThread = new Thread(new Runnable() {
+ public void run() {
+ streamSound();
+ }
+ });
+
+ m_streamingThread.start();
+ }
+
+ private static void stopSoundStreaming()
+ {
+ if (!m_isStreaming)
+ return;
+
+ m_isStreaming = false;
+ try {
+ m_streamingThread.join();
+ m_streamingThread = null;
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ m_recorder.stop();
+ m_recorder.release();
+ m_streamPlayer.release();
+ m_streamPlayer = null;
+ m_recorder = null;
+ }
}
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java
index a37544916..7f5361e77 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCameraListener.java
@@ -45,11 +45,11 @@ import android.hardware.Camera.CameraInfo;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.util.Log;
+import java.io.File;
+import java.io.FileOutputStream;
import java.lang.Math;
import android.media.ExifInterface;
-import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
-import java.io.InputStream;
import java.lang.String;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
@@ -208,11 +208,16 @@ public class QtCameraListener implements Camera.ShutterCallback,
@Override
public void onPictureTaken(byte[] data, Camera camera)
{
+ File outputFile = null;
try {
- InputStream stream = new ByteArrayInputStream(data);
+ outputFile = File.createTempFile("pic_", ".jpg", QtMultimediaUtils.getCacheDirectory());
+ FileOutputStream out = new FileOutputStream(outputFile);
- ExifInterface exif = new ExifInterface(stream);
+ // we just want to read the exif...
+ BitmapFactory.decodeByteArray(data, 0, data.length)
+ .compress(Bitmap.CompressFormat.JPEG, 10, out);
+ ExifInterface exif = new ExifInterface(outputFile.getAbsolutePath());
int orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_UNDEFINED);
@@ -260,6 +265,9 @@ public class QtCameraListener implements Camera.ShutterCallback,
} catch (Exception e) {
Log.w(TAG, "Error fixing bitmap orientation.");
e.printStackTrace();
+ } finally {
+ if (outputFile != null && outputFile.exists())
+ outputFile.delete();
}
notifyPictureCaptured(m_cameraId, data);
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java
index 68f63067e..cae69e7e8 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java
@@ -44,10 +44,14 @@ import android.content.Context;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.view.OrientationEventListener;
+import android.webkit.MimeTypeMap;
+import android.net.Uri;
+import android.content.ContentResolver;
import android.os.Environment;
import android.media.MediaScannerConnection;
import java.lang.String;
import java.io.File;
+import android.util.Log;
public class QtMultimediaUtils
{
@@ -72,6 +76,7 @@ public class QtMultimediaUtils
static private Context m_context = null;
static private OrientationListener m_orientationListener = null;
+ private static final String QtTAG = "Qt QtMultimediaUtils";
static public void setActivity(Activity qtMainActivity, Object qtActivityDelegate)
{
@@ -138,6 +143,8 @@ public class QtMultimediaUtils
MediaScannerConnection.scanFile(m_context, new String[] { file }, null, null);
}
+ static File getCacheDirectory() { return m_context.getCacheDir(); }
+
/*
The array of codecs is in the form:
c2.qti.vp9.decoder
@@ -153,4 +160,25 @@ public class QtMultimediaUtils
codecs[i] = codecInfoArray[i].getName();
return codecs;
}
+
+ public static String getMimeType(Context context, String url)
+ {
+ Uri parsedUri = Uri.parse(url);
+ String type = null;
+
+ try {
+ String scheme = parsedUri.getScheme();
+ if (scheme != null && scheme.contains("content")) {
+ ContentResolver cR = context.getContentResolver();
+ type = cR.getType(parsedUri);
+ } else {
+ String extension = MimeTypeMap.getFileExtensionFromUrl(url);
+ if (extension != null)
+ type = MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension);
+ }
+ } catch (Exception e) {
+ Log.e(QtTAG, "getMimeType(): " + e.toString());
+ }
+ return type;
+ }
}