summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorTarja Sundqvist <tarja.sundqvist@qt.io>2024-01-22 22:43:35 +0200
committerTarja Sundqvist <tarja.sundqvist@qt.io>2024-02-22 05:30:53 +0000
commit644e97ff4a0115e9df2579a10367c2649892741f (patch)
treea9c18bc84ebf3f8f0b0098024bcb05b3c44e6bef
parent7d7a5a372afd1e1c9f6407f8fd90b5ee56723367 (diff)
parent92848e037a6ded98a8bc4485cdad4a239685a342 (diff)
Merge remote-tracking branch 'origin/tqtc/lts-6.2.8' into tqtc/lts-6.2-opensource
Conflicts solved in a file: dependencies.yaml Change-Id: I567ea83b244225b29a759a63cdc1658d7734035e
-rw-r--r--.cmake.conf2
-rw-r--r--dependencies.yaml6
-rw-r--r--examples/multimedia/audiosource/CMakeLists.txt1
-rw-r--r--examples/multimedia/audiosource/Info.plist.in44
-rw-r--r--examples/multimedia/devices/CMakeLists.txt4
-rw-r--r--src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java72
-rw-r--r--src/multimedia/camera/qcameradevice.cpp3
-rw-r--r--src/multimedia/doc/snippets/multimedia-snippets/audio.cpp8
-rw-r--r--src/multimedia/doc/snippets/multimedia-snippets/camera.cpp2
-rw-r--r--src/multimedia/platform/android/common/qandroidvideooutput_p.h2
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidcamera.cpp10
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidcamera_p.h1
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp18
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidcapturesession.cpp27
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidcapturesession_p.h2
-rw-r--r--src/multimedia/platform/android/mediacapture/qandroidmediacapturesession.cpp4
-rw-r--r--src/multimedia/platform/android/mediaplayer/qandroidmediaplayer.cpp11
-rw-r--r--src/multimedia/platform/android/mediaplayer/qandroidmediaplayer_p.h2
-rw-r--r--src/multimedia/platform/android/mediaplayer/qandroidmetadata.cpp11
-rw-r--r--src/multimedia/platform/android/wrappers/jni/androidcamera.cpp6
-rw-r--r--src/multimedia/platform/android/wrappers/jni/androidmediaplayer.cpp15
-rw-r--r--src/multimedia/platform/android/wrappers/jni/androidmediaplayer_p.h2
-rw-r--r--src/multimedia/platform/darwin/camera/avfcamera.mm2
-rw-r--r--src/multimedia/platform/darwin/camera/avfmediaencoder.mm15
-rw-r--r--src/multimedia/platform/gstreamer/audio/qgstreameraudiodecoder.cpp1
-rw-r--r--src/multimedia/platform/gstreamer/common/qgstreamermediaplayer.cpp34
-rw-r--r--src/multimedia/platform/gstreamer/common/qgstreamermediaplayer_p.h1
-rw-r--r--src/multimedia/platform/windows/player/mfplayersession.cpp9
-rw-r--r--src/multimedia/recording/qmediarecorder.cpp4
-rw-r--r--src/multimedia/video/qvideosink.cpp1
-rw-r--r--src/multimediaquick/qsgvideonode_p.cpp20
-rw-r--r--src/multimediaquick/qsgvideonode_p.h2
32 files changed, 290 insertions, 52 deletions
diff --git a/.cmake.conf b/.cmake.conf
index 07d90b8ad..e62fae5ef 100644
--- a/.cmake.conf
+++ b/.cmake.conf
@@ -1,2 +1,2 @@
-set(QT_REPO_MODULE_VERSION "6.2.7")
+set(QT_REPO_MODULE_VERSION "6.2.8")
set(QT_REPO_MODULE_PRERELEASE_VERSION_SEGMENT "")
diff --git a/dependencies.yaml b/dependencies.yaml
index 7150fa636..9a6286ac6 100644
--- a/dependencies.yaml
+++ b/dependencies.yaml
@@ -1,10 +1,10 @@
dependencies:
../tqtc-qtbase:
- ref: 694575a59b5370afc494fbf700eee8db1d1ec091
+ ref: 67934c103800bae50c2ec1977758d40fa8e4e507
required: true
../tqtc-qtdeclarative:
- ref: 02277e3753613d9e19bbb36367c7d2b1d13d7545
+ ref: 302ab20d46280e11042f3896460c55d8b8146e41
required: false
../tqtc-qtshadertools:
- ref: 5de9eb91f047e8001ac24f0bdf5ffc72adb37236
+ ref: f9868e2b39e539b1bb6917006b789ab7b5fec5eb
required: true
diff --git a/examples/multimedia/audiosource/CMakeLists.txt b/examples/multimedia/audiosource/CMakeLists.txt
index 692a24564..1c9259c91 100644
--- a/examples/multimedia/audiosource/CMakeLists.txt
+++ b/examples/multimedia/audiosource/CMakeLists.txt
@@ -25,6 +25,7 @@ qt_add_executable(audiosource
set_target_properties(audiosource PROPERTIES
WIN32_EXECUTABLE TRUE
MACOSX_BUNDLE TRUE
+ MACOSX_BUNDLE_INFO_PLIST ${CMAKE_CURRENT_SOURCE_DIR}/Info.plist.in
)
# special case begin
target_include_directories(audiosource PUBLIC
diff --git a/examples/multimedia/audiosource/Info.plist.in b/examples/multimedia/audiosource/Info.plist.in
new file mode 100644
index 000000000..43b966509
--- /dev/null
+++ b/examples/multimedia/audiosource/Info.plist.in
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+
+ <key>CFBundleName</key>
+ <string>${MACOSX_BUNDLE_BUNDLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>${MACOSX_BUNDLE_GUI_IDENTIFIER}</string>
+ <key>CFBundleExecutable</key>
+ <string>${MACOSX_BUNDLE_EXECUTABLE_NAME}</string>
+
+ <key>CFBundleVersion</key>
+ <string>${MACOSX_BUNDLE_BUNDLE_VERSION}</string>
+ <key>CFBundleShortVersionString</key>
+ <string>${MACOSX_BUNDLE_SHORT_VERSION_STRING}</string>
+ <key>CFBundleLongVersionString</key>
+ <string>${MACOSX_BUNDLE_LONG_VERSION_STRING}</string>
+
+ <key>LSMinimumSystemVersion</key>
+ <string>${CMAKE_OSX_DEPLOYMENT_TARGET}</string>
+
+ <key>CFBundleGetInfoString</key>
+ <string>${MACOSX_BUNDLE_INFO_STRING}</string>
+ <key>NSHumanReadableCopyright</key>
+ <string>${MACOSX_BUNDLE_COPYRIGHT}</string>
+
+ <key>CFBundleIconFile</key>
+ <string>${MACOSX_BUNDLE_ICON_FILE}</string>
+
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+
+ <key>NSMicrophoneUsageDescription</key>
+ <string>Qt Multimedia Example</string>
+
+ <key>NSSupportsAutomaticGraphicsSwitching</key>
+ <true/>
+</dict>
+</plist>
diff --git a/examples/multimedia/devices/CMakeLists.txt b/examples/multimedia/devices/CMakeLists.txt
index d75b4f23f..c7e038bb5 100644
--- a/examples/multimedia/devices/CMakeLists.txt
+++ b/examples/multimedia/devices/CMakeLists.txt
@@ -3,6 +3,10 @@
cmake_minimum_required(VERSION 3.16)
project(devices LANGUAGES CXX)
+if(ANDROID OR IOS)
+ message(FATAL_ERROR "This is a commandline tool that is not supported on mobile platforms")
+endif()
+
set(CMAKE_INCLUDE_CURRENT_DIR ON)
set(CMAKE_AUTOMOC ON)
diff --git a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
index a0651053f..62a2554cf 100644
--- a/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
+++ b/src/android/jar/src/org/qtproject/qt/android/multimedia/QtAudioDeviceManager.java
@@ -49,7 +49,10 @@ import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioTrack;
import android.media.MediaRecorder;
import android.util.Log;
@@ -58,6 +61,14 @@ public class QtAudioDeviceManager
private static final String TAG = "QtAudioDeviceManager";
static private AudioManager m_audioManager = null;
static private final AudioDevicesReceiver m_audioDevicesReceiver = new AudioDevicesReceiver();
+ static private AudioRecord m_recorder = null;
+ static private AudioTrack m_streamPlayer = null;
+ static private Thread m_streamingThread = null;
+ static private boolean m_isStreaming = false;
+ static private final int m_sampleRate = 8000;
+ static private final int m_channels = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+ static private final int m_audioFormat = AudioFormat.ENCODING_PCM_16BIT;
+ static private final int m_bufferSize = AudioRecord.getMinBufferSize(m_sampleRate, m_channels, m_audioFormat);
public static native void onAudioInputDevicesUpdated();
public static native void onAudioOutputDevicesUpdated();
@@ -288,4 +299,65 @@ public class QtAudioDeviceManager
m_audioManager.setSpeakerphoneOn(speakerOn);
}
+
+ private static void streamSound()
+ {
+ byte data[] = new byte[m_bufferSize];
+ while (m_isStreaming) {
+ m_recorder.read(data, 0, m_bufferSize);
+ m_streamPlayer.play();
+ m_streamPlayer.write(data, 0, m_bufferSize);
+ m_streamPlayer.stop();
+ }
+ }
+
+ private static void startSoundStreaming(int inputId, int outputId)
+ {
+ if (m_isStreaming)
+ stopSoundStreaming();
+
+ m_recorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, m_sampleRate, m_channels,
+ m_audioFormat, m_bufferSize);
+ m_streamPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, m_sampleRate, m_channels,
+ m_audioFormat, m_bufferSize, AudioTrack.MODE_STREAM);
+
+ final AudioDeviceInfo[] devices = m_audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
+ for (AudioDeviceInfo deviceInfo : devices) {
+ if (deviceInfo.getId() == outputId) {
+ m_streamPlayer.setPreferredDevice(deviceInfo);
+ } else if (deviceInfo.getId() == inputId) {
+ m_recorder.setPreferredDevice(deviceInfo);
+ }
+ }
+
+ m_recorder.startRecording();
+ m_isStreaming = true;
+
+ m_streamingThread = new Thread(new Runnable() {
+ public void run() {
+ streamSound();
+ }
+ });
+
+ m_streamingThread.start();
+ }
+
+ private static void stopSoundStreaming()
+ {
+ if (!m_isStreaming)
+ return;
+
+ m_isStreaming = false;
+ try {
+ m_streamingThread.join();
+ m_streamingThread = null;
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ m_recorder.stop();
+ m_recorder.release();
+ m_streamPlayer.release();
+ m_streamPlayer = null;
+ m_recorder = null;
+ }
}
diff --git a/src/multimedia/camera/qcameradevice.cpp b/src/multimedia/camera/qcameradevice.cpp
index 5aadd4b79..426f2a66e 100644
--- a/src/multimedia/camera/qcameradevice.cpp
+++ b/src/multimedia/camera/qcameradevice.cpp
@@ -236,8 +236,7 @@ bool QCameraFormat::operator==(const QCameraFormat &other) const
\snippet multimedia-snippets/camera.cpp Camera selection
You can also use QCameraDevice to get general information about a camera
- device such as description, physical position on the system, or camera sensor
- orientation.
+ device such as description and physical position on the system.
\snippet multimedia-snippets/camera.cpp Camera info
diff --git a/src/multimedia/doc/snippets/multimedia-snippets/audio.cpp b/src/multimedia/doc/snippets/multimedia-snippets/audio.cpp
index 24045adab..701786282 100644
--- a/src/multimedia/doc/snippets/multimedia-snippets/audio.cpp
+++ b/src/multimedia/doc/snippets/multimedia-snippets/audio.cpp
@@ -156,7 +156,7 @@ void AudioOutputExample::setup()
format.setChannelCount(1);
format.setSampleFormat(QAudioFormat::UInt8);
- QAudioDevice info(QAudioDevice::defaultOutputDevice());
+ QAudioDevice info(QMediaDevices::defaultAudioOutput());
if (!info.isFormatSupported(format)) {
qWarning() << "Raw audio format not supported by backend, cannot play audio.";
return;
@@ -203,9 +203,9 @@ void AudioDeviceInfo()
//! [Setting audio format]
//! [Dumping audio formats]
- const auto deviceInfos = QMediaDevices::availableDevices(QAudioDevice::Output);
- for (const QAudioDevice &deviceInfo : deviceInfos)
- qDebug() << "Device: " << deviceInfo.description();
+ const auto devices = QMediaDevices::audioOutputs();
+ for (const QAudioDevice &device : devices)
+ qDebug() << "Device: " << device.description();
//! [Dumping audio formats]
}
diff --git a/src/multimedia/doc/snippets/multimedia-snippets/camera.cpp b/src/multimedia/doc/snippets/multimedia-snippets/camera.cpp
index 7c7b5e5a0..26e3231f1 100644
--- a/src/multimedia/doc/snippets/multimedia-snippets/camera.cpp
+++ b/src/multimedia/doc/snippets/multimedia-snippets/camera.cpp
@@ -201,8 +201,6 @@ void camera_info()
qDebug() << "The camera is on the front face of the hardware system.";
else if (cameraDevice.position() == QCameraDevice::BackFace)
qDebug() << "The camera is on the back face of the hardware system.";
-
- qDebug() << "The camera sensor orientation is " << cameraDevice.orientation() << " degrees.";
//! [Camera info]
}
diff --git a/src/multimedia/platform/android/common/qandroidvideooutput_p.h b/src/multimedia/platform/android/common/qandroidvideooutput_p.h
index 0c9af69f6..d7eedc985 100644
--- a/src/multimedia/platform/android/common/qandroidvideooutput_p.h
+++ b/src/multimedia/platform/android/common/qandroidvideooutput_p.h
@@ -82,6 +82,7 @@ public:
virtual void start() { }
virtual void stop() { }
virtual void reset() { }
+ virtual QSize getVideoSize() const { return QSize(0, 0); }
Q_SIGNALS:
void readyChanged(bool);
@@ -122,6 +123,7 @@ public:
void stop() override;
void reset() override;
void renderFrame();
+ QSize getVideoSize() const override { return m_nativeSize; }
void setSubtitle(const QString &subtitle);
private Q_SLOTS:
diff --git a/src/multimedia/platform/android/mediacapture/qandroidcamera.cpp b/src/multimedia/platform/android/mediacapture/qandroidcamera.cpp
index 3bcc93564..0db9b7bbc 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidcamera.cpp
+++ b/src/multimedia/platform/android/mediacapture/qandroidcamera.cpp
@@ -60,8 +60,11 @@ QAndroidCamera::~QAndroidCamera()
void QAndroidCamera::setActive(bool active)
{
- if (m_cameraSession)
+ if (m_cameraSession) {
m_cameraSession->setActive(active);
+ } else {
+ isPendingSetActive = active;
+ }
}
bool QAndroidCamera::isActive() const
@@ -135,6 +138,11 @@ void QAndroidCamera::setCaptureSession(QPlatformMediaCaptureSession *session)
connect(m_cameraSession, &QAndroidCameraSession::activeChanged, this, &QAndroidCamera::activeChanged);
connect(m_cameraSession, &QAndroidCameraSession::error, this, &QAndroidCamera::error);
connect(m_cameraSession, &QAndroidCameraSession::opened, this, &QAndroidCamera::onCameraOpened);
+
+ if (isPendingSetActive) {
+ setActive(true);
+ isPendingSetActive = false;
+ }
}
void QAndroidCamera::setFocusMode(QCamera::FocusMode mode)
diff --git a/src/multimedia/platform/android/mediacapture/qandroidcamera_p.h b/src/multimedia/platform/android/mediacapture/qandroidcamera_p.h
index e97368698..45e97a4fa 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidcamera_p.h
+++ b/src/multimedia/platform/android/mediacapture/qandroidcamera_p.h
@@ -121,6 +121,7 @@ private:
bool isFlashSupported = false;
bool isFlashAutoSupported = false;
bool isTorchSupported = false;
+ bool isPendingSetActive = false;
QCameraDevice m_cameraDev;
QMap<QCamera::WhiteBalanceMode, QString> m_supportedWhiteBalanceModes;
diff --git a/src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp b/src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp
index 2ee2b9a70..19db3ec73 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp
+++ b/src/multimedia/platform/android/mediacapture/qandroidcamerasession.cpp
@@ -326,25 +326,31 @@ void QAndroidCameraSession::applyResolution(const QSize &captureSize, bool resta
// -- Set values on camera
// fix the resolution of output based on the orientation
- QSize outputResolution = adjustedViewfinderResolution;
+ QSize cameraOutputResolution = adjustedViewfinderResolution;
+ QSize videoOutputResolution = adjustedViewfinderResolution;
+ QSize currentVideoOutputResolution = m_videoOutput ? m_videoOutput->getVideoSize() : QSize(0, 0);
const int rotation = currentCameraRotation();
// only transpose if it's valid for the preview
- if ((rotation == 90 || rotation == 270) && previewSizes.contains(outputResolution.transposed()))
- outputResolution.transpose();
+ if (rotation == 90 || rotation == 270) {
+ videoOutputResolution.transpose();
+ if (previewSizes.contains(cameraOutputResolution.transposed()))
+ cameraOutputResolution.transpose();
+ }
- if (currentViewfinderResolution != outputResolution
+ if (currentViewfinderResolution != cameraOutputResolution
+ || (m_videoOutput && currentVideoOutputResolution != videoOutputResolution)
|| currentPreviewFormat != adjustedPreviewFormat || currentFpsRange.min != adjustedFps.min
|| currentFpsRange.max != adjustedFps.max) {
if (m_videoOutput) {
- m_videoOutput->setVideoSize(outputResolution);
+ m_videoOutput->setVideoSize(videoOutputResolution);
}
// if preview is started, we have to stop it first before changing its size
if (m_previewStarted && restartPreview)
m_camera->stopPreview();
- m_camera->setPreviewSize(outputResolution);
+ m_camera->setPreviewSize(cameraOutputResolution);
m_camera->setPreviewFormat(adjustedPreviewFormat);
m_camera->setPreviewFpsRange(adjustedFps);
diff --git a/src/multimedia/platform/android/mediacapture/qandroidcapturesession.cpp b/src/multimedia/platform/android/mediacapture/qandroidcapturesession.cpp
index 075d6a603..ba296e4f7 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidcapturesession.cpp
+++ b/src/multimedia/platform/android/mediacapture/qandroidcapturesession.cpp
@@ -42,6 +42,7 @@
#include "androidcamera_p.h"
#include "qandroidcamerasession_p.h"
#include "qaudioinput.h"
+#include "qaudiooutput.h"
#include "androidmediaplayer_p.h"
#include "androidmultimediautils_p.h"
#include "qandroidmultimediautils_p.h"
@@ -75,6 +76,8 @@ QAndroidCaptureSession::~QAndroidCaptureSession()
{
stop();
m_mediaRecorder = nullptr;
+ if (m_audioInput && m_audioOutput)
+ AndroidMediaPlayer::stopSoundStreaming();
}
void QAndroidCaptureSession::setCameraSession(QAndroidCameraSession *cameraSession)
@@ -111,8 +114,10 @@ void QAndroidCaptureSession::setAudioInput(QPlatformAudioInput *input)
m_audioInputChanged = connect(m_audioInput->q, &QAudioInput::deviceChanged, this, [this]() {
if (m_state == QMediaRecorder::RecordingState)
m_mediaRecorder->setAudioInput(m_audioInput->device.id());
+ updateStreamingState();
});
}
+ updateStreamingState();
}
void QAndroidCaptureSession::setAudioOutput(QPlatformAudioOutput *output)
@@ -120,10 +125,30 @@ void QAndroidCaptureSession::setAudioOutput(QPlatformAudioOutput *output)
if (m_audioOutput == output)
return;
+ if (m_audioOutput)
+ disconnect(m_audioOutputChanged);
+
m_audioOutput = output;
- if (m_audioOutput)
+ if (m_audioOutput) {
+ m_audioOutputChanged = connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this,
+ [this] () {
+ AndroidMediaPlayer::setAudioOutput(m_audioOutput->device.id());
+ updateStreamingState();
+ });
AndroidMediaPlayer::setAudioOutput(m_audioOutput->device.id());
+ }
+ updateStreamingState();
+}
+
+void QAndroidCaptureSession::updateStreamingState()
+{
+ if (m_audioInput && m_audioOutput) {
+ AndroidMediaPlayer::startSoundStreaming(m_audioInput->device.id().toInt(),
+ m_audioOutput->device.id().toInt());
+ } else {
+ AndroidMediaPlayer::stopSoundStreaming();
+ }
}
QMediaRecorder::RecorderState QAndroidCaptureSession::state() const
diff --git a/src/multimedia/platform/android/mediacapture/qandroidcapturesession_p.h b/src/multimedia/platform/android/mediacapture/qandroidcapturesession_p.h
index 2f3a4fb53..c3b4926cb 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidcapturesession_p.h
+++ b/src/multimedia/platform/android/mediacapture/qandroidcapturesession_p.h
@@ -153,6 +153,7 @@ private:
CaptureProfile getProfile(int id);
void restartViewfinder();
+ void updateStreamingState();
QAndroidMediaEncoder *m_mediaEncoder = nullptr;
std::shared_ptr<AndroidMediaRecorder> m_mediaRecorder;
@@ -180,6 +181,7 @@ private:
QList<qreal> m_supportedFramerates;
QMetaObject::Connection m_audioInputChanged;
+ QMetaObject::Connection m_audioOutputChanged;
QMetaObject::Connection m_connOpenCamera;
QMetaObject::Connection m_connActiveChangedCamera;
diff --git a/src/multimedia/platform/android/mediacapture/qandroidmediacapturesession.cpp b/src/multimedia/platform/android/mediacapture/qandroidmediacapturesession.cpp
index ddc690d77..ce7135466 100644
--- a/src/multimedia/platform/android/mediacapture/qandroidmediacapturesession.cpp
+++ b/src/multimedia/platform/android/mediacapture/qandroidmediacapturesession.cpp
@@ -83,10 +83,8 @@ void QAndroidMediaCaptureSession::setCamera(QPlatformCamera *camera)
m_cameraControl->setCaptureSession(nullptr);
m_cameraControl = control;
- if (m_cameraControl) {
+ if (m_cameraControl)
m_cameraControl->setCaptureSession(this);
- m_cameraControl->setActive(true);
- }
emit cameraChanged();
}
diff --git a/src/multimedia/platform/android/mediaplayer/qandroidmediaplayer.cpp b/src/multimedia/platform/android/mediaplayer/qandroidmediaplayer.cpp
index 1df5e393b..cbcc34ca3 100644
--- a/src/multimedia/platform/android/mediaplayer/qandroidmediaplayer.cpp
+++ b/src/multimedia/platform/android/mediaplayer/qandroidmediaplayer.cpp
@@ -84,6 +84,8 @@ QAndroidMediaPlayer::QAndroidMediaPlayer(QMediaPlayer *parent)
mMediaPlayer(new AndroidMediaPlayer),
mState(AndroidMediaPlayer::Uninitialized)
{
+ // Set seekable to True by default. It changes if MEDIA_INFO_NOT_SEEKABLE is received
+ seekableChanged(true);
connect(mMediaPlayer, &AndroidMediaPlayer::bufferingChanged, this,
&QAndroidMediaPlayer::onBufferingChanged);
connect(mMediaPlayer, &AndroidMediaPlayer::info, this, &QAndroidMediaPlayer::onInfo);
@@ -461,11 +463,6 @@ void QAndroidMediaPlayer::stop()
mMediaPlayer->stop();
}
-bool QAndroidMediaPlayer::isSeekable() const
-{
- return true;
-}
-
void QAndroidMediaPlayer::onInfo(qint32 what, qint32 extra)
{
StateChangeNotifier notifier(this);
@@ -545,7 +542,9 @@ void QAndroidMediaPlayer::onError(qint32 what, qint32 extra)
setMediaStatus(QMediaPlayer::InvalidMedia);
break;
case AndroidMediaPlayer::MEDIA_ERROR_BAD_THINGS_ARE_GOING_TO_HAPPEN:
- errorString += QLatin1String(" (Unknown error/Insufficient resources)");
+ errorString += mMediaContent.scheme() == QLatin1String("rtsp")
+ ? QLatin1String(" (Unknown error/Insufficient resources or RTSP may not be supported)")
+ : QLatin1String(" (Unknown error/Insufficient resources)");
error = QMediaPlayer::ResourceError;
break;
}
diff --git a/src/multimedia/platform/android/mediaplayer/qandroidmediaplayer_p.h b/src/multimedia/platform/android/mediaplayer/qandroidmediaplayer_p.h
index 6221e994d..486b0ddb3 100644
--- a/src/multimedia/platform/android/mediaplayer/qandroidmediaplayer_p.h
+++ b/src/multimedia/platform/android/mediaplayer/qandroidmediaplayer_p.h
@@ -96,8 +96,6 @@ public:
void pause() override;
void stop() override;
- bool isSeekable() const override;
-
int trackCount(TrackType trackType) override;
QMediaMetaData trackMetaData(TrackType trackType, int streamNumber) override;
int activeTrack(TrackType trackType) override;
diff --git a/src/multimedia/platform/android/mediaplayer/qandroidmetadata.cpp b/src/multimedia/platform/android/mediaplayer/qandroidmetadata.cpp
index 4765fa0ad..93d3b246d 100644
--- a/src/multimedia/platform/android/mediaplayer/qandroidmetadata.cpp
+++ b/src/multimedia/platform/android/mediaplayer/qandroidmetadata.cpp
@@ -46,6 +46,7 @@
#include <QtCore/qlist.h>
#include <QtConcurrent/qtconcurrentrun.h>
#include <QLoggingCategory>
+#include <private/qiso639_2_p.h>
QT_BEGIN_NAMESPACE
@@ -170,15 +171,7 @@ QLocale::Language getLocaleLanguage(const QString &language)
if (language == QLatin1String("und") || language == QStringLiteral("mis"))
return QLocale::AnyLanguage;
- QLocale locale(language);
-
- if (locale == QLocale::c()) {
- qCWarning(lcaMetadata) << "Could not parse language:" << language
- << ". It is not a valid Unicode CLDR language code.";
- return QLocale::AnyLanguage;
- }
-
- return locale.language();
+ return QtMultimediaPrivate::fromIso639(language.toStdString().c_str());
}
QAndroidMetaData::QAndroidMetaData(int trackType, int androidTrackType, int androidTrackNumber,
diff --git a/src/multimedia/platform/android/wrappers/jni/androidcamera.cpp b/src/multimedia/platform/android/wrappers/jni/androidcamera.cpp
index 80830565e..70a8e52ba 100644
--- a/src/multimedia/platform/android/wrappers/jni/androidcamera.cpp
+++ b/src/multimedia/platform/android/wrappers/jni/androidcamera.cpp
@@ -836,6 +836,12 @@ void AndroidCamera::getCameraInfo(int id, QCameraDevicePrivate *info)
default:
break;
}
+ // Add a number to allow correct access to cameras on systems with two
+ // (and more) front/back cameras
+ if (id > 1) {
+ info->id.append(QByteArray::number(id));
+ info->description.append(QString(" %1").arg(id));
+ }
}
QVideoFrameFormat::PixelFormat AndroidCamera::QtPixelFormatFromAndroidImageFormat(AndroidCamera::ImageFormat format)
diff --git a/src/multimedia/platform/android/wrappers/jni/androidmediaplayer.cpp b/src/multimedia/platform/android/wrappers/jni/androidmediaplayer.cpp
index 079ccb42a..1bdc517d6 100644
--- a/src/multimedia/platform/android/wrappers/jni/androidmediaplayer.cpp
+++ b/src/multimedia/platform/android/wrappers/jni/androidmediaplayer.cpp
@@ -288,6 +288,21 @@ void AndroidMediaPlayer::unblockAudio()
mAudioBlocked = false;
}
+void AndroidMediaPlayer::startSoundStreaming(const int inputId, const int outputId)
+{
+ QJniObject::callStaticMethod<void>("org/qtproject/qt/android/multimedia/QtAudioDeviceManager",
+ "startSoundStreaming",
+ "(II)V",
+ inputId,
+ outputId);
+}
+
+void AndroidMediaPlayer::stopSoundStreaming()
+{
+ QJniObject::callStaticMethod<void>(
+ "org/qtproject/qt/android/multimedia/QtAudioDeviceManager", "stopSoundStreaming");
+}
+
bool AndroidMediaPlayer::setPlaybackRate(qreal rate)
{
if (QNativeInterface::QAndroidApplication::sdkVersion() < 23) {
diff --git a/src/multimedia/platform/android/wrappers/jni/androidmediaplayer_p.h b/src/multimedia/platform/android/wrappers/jni/androidmediaplayer_p.h
index d5cf07f9c..83fb212c7 100644
--- a/src/multimedia/platform/android/wrappers/jni/androidmediaplayer_p.h
+++ b/src/multimedia/platform/android/wrappers/jni/androidmediaplayer_p.h
@@ -136,6 +136,8 @@ public:
void setDataSource(const QNetworkRequest &request);
void prepareAsync();
void setVolume(int volume);
+ static void startSoundStreaming(const int inputId, const int outputId);
+ static void stopSoundStreaming();
bool setPlaybackRate(qreal rate);
void setDisplay(AndroidSurfaceTexture *surfaceTexture);
static bool setAudioOutput(const QByteArray &deviceId);
diff --git a/src/multimedia/platform/darwin/camera/avfcamera.mm b/src/multimedia/platform/darwin/camera/avfcamera.mm
index 0b8790ea4..fe0a37822 100644
--- a/src/multimedia/platform/darwin/camera/avfcamera.mm
+++ b/src/multimedia/platform/darwin/camera/avfcamera.mm
@@ -711,7 +711,7 @@ void AVFCamera::applyFlashSettings()
};
if (mode == QCamera::FlashOff) {
- captureDevice.flashMode = AVCaptureFlashModeOff;
+ setAvFlashModeSafe(AVCaptureFlashModeOff);
} else {
if (isFlashAvailable(captureDevice)) {
if (mode == QCamera::FlashOn)
diff --git a/src/multimedia/platform/darwin/camera/avfmediaencoder.mm b/src/multimedia/platform/darwin/camera/avfmediaencoder.mm
index b669dd960..1cd6ce087 100644
--- a/src/multimedia/platform/darwin/camera/avfmediaencoder.mm
+++ b/src/multimedia/platform/darwin/camera/avfmediaencoder.mm
@@ -52,6 +52,7 @@
#include "private/qmediarecorder_p.h"
#include "private/qdarwinformatsinfo_p.h"
#include "private/qplatformaudiooutput_p.h"
+#include <private/qplatformaudioinput_p.h>
#include <QtCore/qmath.h>
#include <QtCore/qdebug.h>
@@ -135,7 +136,7 @@ void AVFMediaEncoder::updateDuration(qint64 duration)
durationChanged(m_duration);
}
-static NSDictionary *avfAudioSettings(const QMediaEncoderSettings &encoderSettings)
+static NSDictionary *avfAudioSettings(const QMediaEncoderSettings &encoderSettings, const QAudioFormat &format)
{
NSMutableDictionary *settings = [NSMutableDictionary dictionary];
@@ -204,6 +205,10 @@ static NSDictionary *avfAudioSettings(const QMediaEncoderSettings &encoderSettin
// Channels
int channelCount = encoderSettings.audioChannelCount();
+ // if no channel count is set in the encoder settings,
+ // set it to the device's format channel count
+ if (channelCount <= 0)
+ channelCount = format.channelCount();
bool isChannelCountSupported = false;
if (channelCount > 0) {
std::optional<QList<UInt32>> channelCounts = qt_supported_channel_counts_for_format(codecId);
@@ -221,7 +226,7 @@ static NSDictionary *avfAudioSettings(const QMediaEncoderSettings &encoderSettin
}
}
-if (isChannelCountSupported && channelCount > 2) {
+ if (isChannelCountSupported && channelCount > 2) {
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
auto channelLayoutTags = qt_supported_channel_layout_tags_for_format(codecId, channelCount);
@@ -233,7 +238,7 @@ if (isChannelCountSupported && channelCount > 2) {
}
}
if (!isChannelCountSupported)
- channelCount = 2;
+ channelCount = 1;
[settings setObject:[NSNumber numberWithInt:channelCount] forKey:AVNumberOfChannelsKey];
if (codecId == kAudioFormatAppleLossless)
@@ -421,7 +426,9 @@ void AVFMediaEncoder::applySettings(QMediaEncoderSettings &settings)
AVFCameraSession *session = m_service->session();
// audio settings
- m_audioSettings = avfAudioSettings(settings);
+ const auto audioInput = m_service->audioInput();
+ const QAudioFormat audioFormat = audioInput ? audioInput->device.preferredFormat() : QAudioFormat();
+ m_audioSettings = avfAudioSettings(settings, audioFormat);
if (m_audioSettings)
[m_audioSettings retain];
diff --git a/src/multimedia/platform/gstreamer/audio/qgstreameraudiodecoder.cpp b/src/multimedia/platform/gstreamer/audio/qgstreameraudiodecoder.cpp
index 361bf2207..9f024d268 100644
--- a/src/multimedia/platform/gstreamer/audio/qgstreameraudiodecoder.cpp
+++ b/src/multimedia/platform/gstreamer/audio/qgstreameraudiodecoder.cpp
@@ -188,6 +188,7 @@ bool QGstreamerAudioDecoder::processBusMessage(const QGstreamerMessage &message)
break;
case GST_MESSAGE_EOS:
+ m_playbin.setState(GST_STATE_NULL);
finished();
break;
diff --git a/src/multimedia/platform/gstreamer/common/qgstreamermediaplayer.cpp b/src/multimedia/platform/gstreamer/common/qgstreamermediaplayer.cpp
index e9ac97724..e4bdc6c56 100644
--- a/src/multimedia/platform/gstreamer/common/qgstreamermediaplayer.cpp
+++ b/src/multimedia/platform/gstreamer/common/qgstreamermediaplayer.cpp
@@ -635,6 +635,39 @@ void QGstreamerMediaPlayer::uridecodebinElementAddedCallback(GstElement */*uride
}
}
+void QGstreamerMediaPlayer::sourceSetupCallback(GstElement *uridecodebin, GstElement *source, QGstreamerMediaPlayer *that)
+{
+ Q_UNUSED(uridecodebin)
+ Q_UNUSED(that)
+
+ qCDebug(qLcMediaPlayer) << "Setting up source:" << g_type_name_from_instance((GTypeInstance*)source);
+
+ if (QLatin1String("GstRTSPSrc") == QString::fromUtf8(g_type_name_from_instance((GTypeInstance*)source))) {
+ QGstElement s(source);
+ int latency{40};
+ bool ok{false};
+ int v = QString::fromLocal8Bit(qgetenv("QT_MEDIA_RTSP_LATENCY")).toUInt(&ok);
+ if (ok)
+ latency = v;
+ qCDebug(qLcMediaPlayer) << " -> setting source latency to:" << latency << "ms";
+ s.set("latency", latency);
+
+ bool drop{true};
+ v = QString::fromLocal8Bit(qgetenv("QT_MEDIA_RTSP_DROP_ON_LATENCY")).toUInt(&ok);
+ if (ok && v == 0)
+ drop = false;
+ qCDebug(qLcMediaPlayer) << " -> setting drop-on-latency to:" << drop;
+ s.set("drop-on-latency", drop);
+
+ bool retrans{false};
+ v = QString::fromLocal8Bit(qgetenv("QT_MEDIA_RTSP_DO_RETRANSMISSION")).toUInt(&ok);
+ if (ok && v not_eq 0)
+ retrans = true;
+ qCDebug(qLcMediaPlayer) << " -> setting do-retransmission to:" << retrans;
+ s.set("do-retransmission", retrans);
+ }
+}
+
void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
{
qCDebug(qLcMediaPlayer) << Q_FUNC_INFO << "setting location to" << content;
@@ -691,6 +724,7 @@ void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
playerPipeline.add(decoder);
// can't set post-stream-topology to true, as uridecodebin doesn't have the property. Use a hack
decoder.connect("element-added", GCallback(QGstreamerMediaPlayer::uridecodebinElementAddedCallback), this);
+ decoder.connect("source-setup", GCallback(QGstreamerMediaPlayer::sourceSetupCallback), this);
decoder.set("uri", content.toEncoded().constData());
if (m_bufferProgress != 0) {
diff --git a/src/multimedia/platform/gstreamer/common/qgstreamermediaplayer_p.h b/src/multimedia/platform/gstreamer/common/qgstreamermediaplayer_p.h
index 26c3fa362..387c3334a 100644
--- a/src/multimedia/platform/gstreamer/common/qgstreamermediaplayer_p.h
+++ b/src/multimedia/platform/gstreamer/common/qgstreamermediaplayer_p.h
@@ -143,6 +143,7 @@ private:
void decoderPadAdded(const QGstElement &src, const QGstPad &pad);
void decoderPadRemoved(const QGstElement &src, const QGstPad &pad);
static void uridecodebinElementAddedCallback(GstElement *uridecodebin, GstElement *child, QGstreamerMediaPlayer *that);
+ static void sourceSetupCallback(GstElement *uridecodebin, GstElement *source, QGstreamerMediaPlayer *that);
void parseStreamsAndMetadata();
void connectOutput(TrackSelector &ts);
void removeOutput(TrackSelector &ts);
diff --git a/src/multimedia/platform/windows/player/mfplayersession.cpp b/src/multimedia/platform/windows/player/mfplayersession.cpp
index caa0a708e..ebdbff696 100644
--- a/src/multimedia/platform/windows/player/mfplayersession.cpp
+++ b/src/multimedia/platform/windows/player/mfplayersession.cpp
@@ -111,7 +111,7 @@ MFPlayerSession::MFPlayerSession(MFPlayerControl *playerControl)
m_request.rate = 1.0f;
m_audioSampleGrabber = new AudioSampleGrabberCallback;
- m_videoRendererControl = new MFVideoRendererControl;
+ m_videoRendererControl = new MFVideoRendererControl(this);
}
void MFPlayerSession::timeout()
@@ -1593,8 +1593,13 @@ ULONG MFPlayerSession::AddRef(void)
ULONG MFPlayerSession::Release(void)
{
LONG cRef = InterlockedDecrement(&m_cRef);
- if (cRef == 0)
+ if (cRef == 0) {
this->deleteLater();
+
+ // In rare cases the session has queued events to be run between deleteLater and deleting,
+ // so we set the parent control to nullptr in order to prevent crashes in the cases.
+ m_playerControl = nullptr;
+ }
return cRef;
}
diff --git a/src/multimedia/recording/qmediarecorder.cpp b/src/multimedia/recording/qmediarecorder.cpp
index 80c53ee46..7ea934158 100644
--- a/src/multimedia/recording/qmediarecorder.cpp
+++ b/src/multimedia/recording/qmediarecorder.cpp
@@ -106,14 +106,14 @@ QT_BEGIN_NAMESPACE
\qml
CameraButton {
text: "Record"
- visible: recorder.status !== MediaRecorder.RecordingStatus
+ visible: recorder.recorderState !== MediaRecorder.RecordingState
onClicked: recorder.record()
}
CameraButton {
id: stopButton
text: "Stop"
- visible: recorder.status === MediaRecorder.RecordingStatus
+ visible: recorder.recorderState === MediaRecorder.RecordingState
onClicked: recorder.stop()
}
\endqml
diff --git a/src/multimedia/video/qvideosink.cpp b/src/multimedia/video/qvideosink.cpp
index f70763157..19e7093c8 100644
--- a/src/multimedia/video/qvideosink.cpp
+++ b/src/multimedia/video/qvideosink.cpp
@@ -122,6 +122,7 @@ QVideoSink::QVideoSink(QObject *parent)
*/
QVideoSink::~QVideoSink()
{
+ disconnect(this);
d->unregisterSource();
delete d;
}
diff --git a/src/multimediaquick/qsgvideonode_p.cpp b/src/multimediaquick/qsgvideonode_p.cpp
index 000bda7b9..4a6d3113a 100644
--- a/src/multimediaquick/qsgvideonode_p.cpp
+++ b/src/multimediaquick/qsgvideonode_p.cpp
@@ -227,8 +227,10 @@ QSGVideoMaterial::QSGVideoMaterial(const QVideoFrameFormat &format) :
QSGVideoNode::QSGVideoNode(QQuickVideoOutput *parent, const QVideoFrameFormat &format)
: m_parent(parent),
- m_orientation(-1),
- m_format(format)
+ m_orientation(-1),
+ m_frameOrientation(-1),
+ m_frameMirrored(false),
+ m_format(format)
{
setFlag(QSGNode::OwnsMaterial);
setFlag(QSGNode::OwnsGeometry);
@@ -303,12 +305,24 @@ void QSGVideoNode::setSubtitleGeometry()
/* Update the vertices and texture coordinates. Orientation must be in {0,90,180,270} */
void QSGVideoNode::setTexturedRectGeometry(const QRectF &rect, const QRectF &textureRect, int orientation)
{
- if (rect == m_rect && textureRect == m_textureRect && orientation == m_orientation)
+ bool frameChanged = false;
+ if (m_material) {
+ if (m_material->m_currentFrame.rotationAngle() != m_frameOrientation
+ || m_material->m_currentFrame.mirrored() != m_frameMirrored) {
+ frameChanged = true;
+ }
+ }
+ if (rect == m_rect && textureRect == m_textureRect && orientation == m_orientation
+ && !frameChanged)
return;
m_rect = rect;
m_textureRect = textureRect;
m_orientation = orientation;
+ if (m_material) {
+ m_frameOrientation = m_material->m_currentFrame.rotationAngle();
+ m_frameMirrored = m_material->m_currentFrame.mirrored();
+ }
int videoRotation = orientation;
videoRotation += m_material ? m_material->m_currentFrame.rotationAngle() : 0;
videoRotation %= 360;
diff --git a/src/multimediaquick/qsgvideonode_p.h b/src/multimediaquick/qsgvideonode_p.h
index d62248b7c..6508581a7 100644
--- a/src/multimediaquick/qsgvideonode_p.h
+++ b/src/multimediaquick/qsgvideonode_p.h
@@ -86,6 +86,8 @@ private:
QRectF m_rect;
QRectF m_textureRect;
int m_orientation;
+ int m_frameOrientation;
+ bool m_frameMirrored;
QVideoFrameFormat m_format;
QSGVideoMaterial *m_material;