summaryrefslogtreecommitdiffstats
path: root/src/android/jar/src
diff options
context:
space:
mode:
Diffstat (limited to 'src/android/jar/src')
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java79
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java24
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java37
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java26
4 files changed, 111 insertions, 55 deletions
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
index 83d704838..2b6fcc2dc 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
@@ -4,20 +4,16 @@
package org.qtproject.qt.android.multimedia;
import java.util.ArrayList;
-import android.bluetooth.BluetoothA2dp;
-import android.bluetooth.BluetoothAdapter;
-import android.bluetooth.BluetoothDevice;
-import android.bluetooth.BluetoothHeadset;
-import android.content.BroadcastReceiver;
import android.content.Context;
-import android.content.Intent;
-import android.content.IntentFilter;
+import android.media.AudioDeviceCallback;
import android.media.AudioDeviceInfo;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.media.MediaRecorder;
+import android.os.Handler;
+import android.os.Looper;
import android.util.Log;
public class QtAudioDeviceManager
@@ -25,10 +21,12 @@ public class QtAudioDeviceManager
private static final String TAG = "QtAudioDeviceManager";
static private AudioManager m_audioManager = null;
static private final AudioDevicesReceiver m_audioDevicesReceiver = new AudioDevicesReceiver();
+ static private Handler handler = new Handler(Looper.getMainLooper());
static private AudioRecord m_recorder = null;
static private AudioTrack m_streamPlayer = null;
static private Thread m_streamingThread = null;
static private boolean m_isStreaming = false;
+ static private boolean m_useSpeaker = false;
static private final int m_sampleRate = 8000;
static private final int m_channels = AudioFormat.CHANNEL_CONFIGURATION_MONO;
static private final int m_audioFormat = AudioFormat.ENCODING_PCM_16BIT;
@@ -37,36 +35,37 @@ public class QtAudioDeviceManager
public static native void onAudioInputDevicesUpdated();
public static native void onAudioOutputDevicesUpdated();
- static private class AudioDevicesReceiver extends BroadcastReceiver
- {
+ static private void updateDeviceList() {
+ onAudioInputDevicesUpdated();
+ onAudioOutputDevicesUpdated();
+ if (m_useSpeaker) {
+ final AudioDeviceInfo[] audioDevices =
+ m_audioManager.getDevices(AudioManager.GET_DEVICES_OUTPUTS);
+ setAudioOutput(getModeForSpeaker(audioDevices), false, true);
+ }
+ }
+
+ private static class AudioDevicesReceiver extends AudioDeviceCallback {
+ @Override
+ public void onAudioDevicesAdded(AudioDeviceInfo[] addedDevices) {
+ updateDeviceList();
+ }
+
@Override
- public void onReceive(Context context, Intent intent) {
- onAudioInputDevicesUpdated();
- onAudioOutputDevicesUpdated();
+ public void onAudioDevicesRemoved(AudioDeviceInfo[] removedDevices) {
+ updateDeviceList();
}
}
- public static void registerAudioHeadsetStateReceiver(Context context)
+
+ public static void registerAudioHeadsetStateReceiver()
{
- IntentFilter audioDevicesFilter = new IntentFilter();
- audioDevicesFilter.addAction(AudioManager.ACTION_HEADSET_PLUG);
- audioDevicesFilter.addAction(AudioManager.ACTION_HDMI_AUDIO_PLUG);
- audioDevicesFilter.addAction(BluetoothDevice.ACTION_ACL_CONNECTED);
- audioDevicesFilter.addAction(BluetoothDevice.ACTION_ACL_DISCONNECTED);
- audioDevicesFilter.addAction(BluetoothDevice.ACTION_ACL_DISCONNECT_REQUESTED);
- audioDevicesFilter.addAction(BluetoothAdapter.ACTION_STATE_CHANGED);
- audioDevicesFilter.addAction(BluetoothDevice.ACTION_BOND_STATE_CHANGED);
- audioDevicesFilter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED);
- audioDevicesFilter.addAction(AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED);
- audioDevicesFilter.addAction(BluetoothA2dp.ACTION_CONNECTION_STATE_CHANGED);
- audioDevicesFilter.addAction(BluetoothA2dp.ACTION_PLAYING_STATE_CHANGED);
-
- context.registerReceiver(m_audioDevicesReceiver, audioDevicesFilter);
+ m_audioManager.registerAudioDeviceCallback(m_audioDevicesReceiver, handler);
}
- public static void unregisterAudioHeadsetStateReceiver(Context context)
+ public static void unregisterAudioHeadsetStateReceiver()
{
- context.unregisterReceiver(m_audioDevicesReceiver);
+ m_audioManager.unregisterAudioDeviceCallback(m_audioDevicesReceiver);
}
static public void setContext(Context context)
@@ -226,8 +225,27 @@ public class QtAudioDeviceManager
return ret;
}
+ private static int getModeForSpeaker(AudioDeviceInfo[] audioDevices)
+ {
+ // If we want to force device to use speaker when Bluetooth or Wiread headset is connected,
+ // we need to use MODE_IN_COMMUNICATION. Otherwise the MODE_NORMAL can be used.
+ for (AudioDeviceInfo deviceInfo : audioDevices) {
+ switch (deviceInfo.getType()) {
+ case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP:
+ case AudioDeviceInfo.TYPE_BLUETOOTH_SCO:
+ case AudioDeviceInfo.TYPE_WIRED_HEADSET:
+ case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
+ return AudioManager.MODE_IN_COMMUNICATION;
+ default: break;
+ }
+ }
+ return AudioManager.MODE_NORMAL;
+ }
+
+
private static boolean setAudioOutput(int id)
{
+ m_useSpeaker = false;
final AudioDeviceInfo[] audioDevices =
m_audioManager.getDevices(AudioManager.GET_DEVICES_OUTPUTS);
for (AudioDeviceInfo deviceInfo : audioDevices) {
@@ -239,7 +257,8 @@ public class QtAudioDeviceManager
setAudioOutput(AudioManager.MODE_IN_COMMUNICATION, true, false);
return true;
case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER:
- setAudioOutput(AudioManager.STREAM_MUSIC, false, true);
+ m_useSpeaker = true;
+ setAudioOutput(getModeForSpeaker(audioDevices), false, true);
return true;
case AudioDeviceInfo.TYPE_WIRED_HEADSET:
case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java
index 39feff6c7..ac8140197 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtCamera2.java
@@ -24,6 +24,7 @@ import android.graphics.ImageFormat;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
+import android.util.Range;
import android.view.Surface;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
@@ -57,11 +58,12 @@ public class QtCamera2 {
private int mState = STATE_PREVIEW;
private Object mStartMutex = new Object();
private boolean mIsStarted = false;
- private static int MaxNumberFrames = 10;
+ private static int MaxNumberFrames = 12;
private int mFlashMode = CaptureRequest.CONTROL_AE_MODE_ON;
private int mTorchMode = CameraMetadata.FLASH_MODE_OFF;
private int mAFMode = CaptureRequest.CONTROL_AF_MODE_OFF;
private float mZoomFactor = 1.0f;
+ private Range<Integer> mFpsRange = null;
private QtExifDataHandler mExifDataHandler = null;
native void onCameraOpened(String cameraId);
@@ -261,7 +263,14 @@ public class QtCamera2 {
}
};
- public boolean addImageReader(int width, int height, int format) {
+
+ public void prepareCamera(int width, int height, int format, int minFps, int maxFps) {
+
+ addImageReader(width, height, format);
+ setFrameRate(minFps, maxFps);
+ }
+
+ private void addImageReader(int width, int height, int format) {
if (mImageReader != null)
removeSurface(mImageReader.getSurface());
@@ -276,8 +285,14 @@ public class QtCamera2 {
mCapturedPhotoReader = ImageReader.newInstance(width, height, format, MaxNumberFrames);
mCapturedPhotoReader.setOnImageAvailableListener(mOnPhotoAvailableListener, mBackgroundHandler);
addSurface(mCapturedPhotoReader.getSurface());
+ }
+
+ private void setFrameRate(int minFrameRate, int maxFrameRate) {
- return true;
+ if (minFrameRate <= 0 || maxFrameRate <= 0)
+ mFpsRange = null;
+ else
+ mFpsRange = new Range<>(minFrameRate, maxFrameRate);
}
public boolean addSurface(Surface surface) {
@@ -335,7 +350,8 @@ public class QtCamera2 {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CameraMetadata.CONTROL_CAPTURE_INTENT_VIDEO_RECORD);
if (mZoomFactor != 1.0f)
mPreviewRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, getScalerCropRegion());
-
+ if (mFpsRange != null)
+ mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, mFpsRange);
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler);
mIsStarted = true;
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java
index bfac6670f..355308958 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtMultimediaUtils.java
@@ -125,24 +125,25 @@ public class QtMultimediaUtils
return codecs;
}
-public static String getMimeType(Context context, String url)
-{
- Uri parsedUri = Uri.parse(url);
- String type = null;
-
- try {
- String scheme = parsedUri.getScheme();
- if (scheme != null && scheme.contains("content")) {
- ContentResolver cR = context.getContentResolver();
- type = cR.getType(parsedUri);
- } else {
- String extension = MimeTypeMap.getFileExtensionFromUrl(url);
- if (extension != null)
+ public static String getMimeType(Context context, String url)
+ {
+ Uri parsedUri = Uri.parse(url);
+ String type = null;
+
+ try {
+ String scheme = parsedUri.getScheme();
+ if (scheme != null && scheme.contains("content")) {
+ ContentResolver cR = context.getContentResolver();
+ type = cR.getType(parsedUri);
+ } else {
+ String extension = MimeTypeMap.getFileExtensionFromUrl(url);
+ if (extension != null)
type = MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension);
- }
- } catch (Exception e) {
- Log.e(QtTAG, "getMimeType(): " + e.toString());
+ }
+ } catch (Exception e) {
+ Log.e(QtTAG, "getMimeType(): " + e.toString());
+ }
+ return type;
}
- return type;
-}
}
+
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java
index b3ba8f3dc..2e11e62a2 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtVideoDeviceManager.java
@@ -13,6 +13,7 @@ import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaCodecList;
import android.media.MediaCodecInfo;
+import android.os.Build;
import android.util.Range;
import android.util.Size;
import android.util.Log;
@@ -137,6 +138,7 @@ public class QtVideoDeviceManager {
return activeArraySize;
}
+ static final int maxResolution = 3840*2160; // 4k resolution
public String[] getStreamConfigurationsSizes(String cameraId, int imageFormat) {
CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
@@ -148,13 +150,14 @@ public class QtVideoDeviceManager {
if (sizes == null)
return new String[0];
- String[] stream = new String[sizes.length];
+ ArrayList<String> stream = new ArrayList<>();
for (int index = 0; index < sizes.length; index++) {
- stream[index] = sizes[index].toString();
+ if (sizes[index].getWidth() * sizes[index].getHeight() <= maxResolution)
+ stream.add(sizes[index].toString());
}
- return stream;
+ return stream.toArray(new String[0]);
}
public int stringToControlAEMode(String mode) {
@@ -217,6 +220,23 @@ public class QtVideoDeviceManager {
return supportedFlashModesList.toArray(ret);
}
+ static public boolean isEmulator()
+ {
+ return ((Build.BRAND.startsWith("generic") && Build.DEVICE.startsWith("generic"))
+ || Build.FINGERPRINT.startsWith("generic")
+ || Build.FINGERPRINT.startsWith("unknown")
+ || Build.HARDWARE.contains("goldfish")
+ || Build.HARDWARE.contains("ranchu")
+ || Build.MODEL.contains("google_sdk")
+ || Build.MODEL.contains("Emulator")
+ || Build.MODEL.contains("Android SDK built for x86")
+ || Build.MANUFACTURER.contains("Genymotion")
+ || Build.PRODUCT.contains("sdk")
+ || Build.PRODUCT.contains("vbox86p")
+ || Build.PRODUCT.contains("emulator")
+ || Build.PRODUCT.contains("simulator"));
+ }
+
public boolean isTorchModeSupported(String cameraId) {
boolean ret = false;
final CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);